Repository: drizzle-team/drizzle-orm Branch: main Commit: 4aa6ecfee4b4 Files: 1373 Total size: 10.4 MB Directory structure: gitextract_jk7qoxoz/ ├── .eslintignore ├── .eslintrc.yaml ├── .github/ │ ├── FUNDING.yml │ ├── ISSUE_TEMPLATE/ │ │ ├── bug-template.yaml │ │ ├── config.yml │ │ ├── docs-template.yaml │ │ └── feature-template.yaml │ └── workflows/ │ ├── codeql.yml │ ├── release-feature-branch.yaml │ ├── release-latest.yaml │ ├── router.yaml │ └── unpublish-release-feature-branch.yaml ├── .gitignore ├── .markdownlint.yaml ├── .npmrc ├── .nvmrc ├── CODE_OF_CONDUCT.md ├── CONTRIBUTING.md ├── LICENSE ├── README.md ├── SECURITY.md ├── changelogs/ │ ├── README.md │ ├── drizzle-arktype/ │ │ ├── 0.1.2.md │ │ └── 0.1.3.md │ ├── drizzle-kit/ │ │ ├── 0.23.2.md │ │ ├── 0.24.0.md │ │ ├── 0.24.1.md │ │ ├── 0.24.2.md │ │ ├── 0.25.0.md │ │ ├── 0.26.0.md │ │ ├── 0.26.1.md │ │ ├── 0.26.2.md │ │ ├── 0.27.0.md │ │ ├── 0.27.1.md │ │ ├── 0.27.2.md │ │ ├── 0.28.0.md │ │ ├── 0.28.1.md │ │ ├── 0.29.0.md │ │ ├── 0.29.1.md │ │ ├── 0.30.0.md │ │ ├── 0.30.1.md │ │ ├── 0.30.2.md │ │ ├── 0.30.3.md │ │ ├── 0.30.4.md │ │ ├── 0.30.5.md │ │ ├── 0.30.6.md │ │ ├── 0.31.0.md │ │ ├── 0.31.1.md │ │ ├── 0.31.10.md │ │ ├── 0.31.2.md │ │ ├── 0.31.3.md │ │ ├── 0.31.4.md │ │ ├── 0.31.5.md │ │ ├── 0.31.6.md │ │ ├── 0.31.7.md │ │ ├── 0.31.8.md │ │ └── 0.31.9.md │ ├── drizzle-orm/ │ │ ├── 0.12.0-beta.23.md │ │ ├── 0.12.0-beta.24.md │ │ ├── 0.13.0.md │ │ ├── 0.13.1.md │ │ ├── 0.14.1.md │ │ ├── 0.14.2.md │ │ ├── 0.15.0.md │ │ ├── 0.15.1.md │ │ ├── 0.15.2.md │ │ ├── 0.15.3.md │ │ ├── 0.16.0.md │ │ ├── 0.16.1.md │ │ ├── 0.17.0.md │ │ ├── 0.17.1.md │ │ ├── 0.17.2.md │ │ ├── 0.17.3.md │ │ ├── 0.17.4.md │ │ ├── 0.17.5.md │ │ ├── 0.17.6.md │ │ ├── 0.17.7.md │ │ ├── 0.18.0.md │ │ ├── 0.19.0.md │ │ ├── 0.19.1.md │ │ ├── 0.20.0.md │ │ ├── 0.20.1.md │ │ ├── 0.20.2.md │ │ ├── 0.20.3.md │ │ ├── 0.21.0.md │ │ ├── 0.21.1.md │ │ ├── 0.22.0.md │ │ ├── 0.23.0.md │ │ ├── 0.23.1.md │ │ ├── 0.23.10.md │ │ ├── 0.23.11.md │ │ ├── 0.23.12.md │ │ ├── 0.23.13.md │ │ ├── 0.23.2.md │ │ ├── 0.23.3.md │ │ ├── 0.23.4.md │ │ ├── 0.23.5.md │ │ ├── 0.23.6.md │ │ ├── 0.23.7.md │ │ ├── 0.23.8.md │ │ ├── 0.23.9.md │ │ ├── 0.24.0.md │ │ ├── 0.24.1.md │ │ ├── 0.24.2.md │ │ ├── 0.24.3.md │ │ ├── 0.24.4.md │ │ ├── 0.24.5.md │ │ ├── 0.25.0.md │ │ ├── 0.25.1.md │ │ ├── 0.25.2.md │ │ ├── 0.25.3.md │ │ ├── 0.25.4.md │ │ ├── 0.26.0.md │ │ ├── 0.26.1.md │ │ ├── 0.26.2.md │ │ ├── 0.26.3.md │ │ ├── 0.26.4.md │ │ ├── 0.26.5.md │ │ ├── 0.27.0.md │ │ ├── 0.27.1.md │ │ ├── 0.27.2.md │ │ ├── 0.28.0.md │ │ ├── 0.28.1.md │ │ ├── 0.28.2.md │ │ ├── 0.28.3.md │ │ ├── 0.28.4.md │ │ ├── 0.28.5.md │ │ ├── 0.28.6.md │ │ ├── 0.29.0.md │ │ ├── 0.29.1.md │ │ ├── 0.29.2.md │ │ ├── 0.29.3.md │ │ ├── 0.29.4.md │ │ ├── 0.29.5.md │ │ ├── 0.30.0.md │ │ ├── 0.30.1.md │ │ ├── 0.30.10.md │ │ ├── 0.30.2.md │ │ ├── 0.30.3.md │ │ ├── 0.30.4.md │ │ ├── 0.30.5.md │ │ ├── 0.30.6.md │ │ ├── 0.30.7-preview.md │ │ ├── 0.30.7.md │ │ ├── 0.30.8.md │ │ ├── 0.30.9.md │ │ ├── 0.31.0-beta.md │ │ ├── 0.31.0.md │ │ ├── 0.31.1.md │ │ ├── 0.31.2.md │ │ ├── 0.31.3.md │ │ ├── 0.31.4.md │ │ ├── 0.32.0-beta.md │ │ ├── 0.32.0.md │ │ ├── 0.32.1.md │ │ ├── 0.32.2.md │ │ ├── 0.33.0.md │ │ ├── 0.34.0.md │ │ ├── 0.34.1.md │ │ ├── 0.35.0.md │ │ ├── 0.35.1.md │ │ ├── 0.35.2.md │ │ ├── 0.35.3.md │ │ ├── 0.36.0.md │ │ ├── 0.36.1.md │ │ ├── 0.36.2.md │ │ ├── 0.36.3.md │ │ ├── 0.36.4.md │ │ ├── 0.37.0.md │ │ ├── 0.38.0.md │ │ ├── 0.38.1.md │ │ ├── 0.38.2.md │ │ ├── 0.38.3.md │ │ ├── 0.38.4.md │ │ ├── 0.39.0.md │ │ ├── 0.39.1.md │ │ ├── 0.39.2.md │ │ ├── 0.39.3.md │ │ ├── 0.40.0.md │ │ ├── 0.40.1.md │ │ ├── 0.41.0.md │ │ ├── 0.42.0.md │ │ ├── 0.43.0.md │ │ ├── 0.43.1.md │ │ ├── 0.44.0.md │ │ ├── 0.44.1.md │ │ ├── 0.44.2.md │ │ ├── 0.44.3.md │ │ ├── 0.44.4.md │ │ ├── 0.44.5.md │ │ ├── 0.44.6.md │ │ ├── 0.44.7.md │ │ ├── 0.45.0.md │ │ └── 0.45.1.md │ ├── drizzle-orm-mysql/ │ │ ├── 0.14.1.md │ │ ├── 0.14.2.md │ │ ├── 0.14.3.md │ │ ├── 0.15.0.md │ │ ├── 0.15.1.md │ │ ├── 0.15.2.md │ │ ├── 0.15.3.md │ │ ├── 0.16.0.md │ │ ├── 0.16.1.md │ │ └── 0.16.2.md │ ├── drizzle-orm-pg/ │ │ ├── 0.12.0-beta.40.md │ │ ├── 0.13.0.md │ │ ├── 0.13.1.md │ │ ├── 0.13.2.md │ │ ├── 0.13.3.md │ │ ├── 0.13.4.md │ │ ├── 0.14.0.md │ │ ├── 0.14.1.md │ │ ├── 0.14.2.md │ │ ├── 0.14.3.md │ │ ├── 0.14.4.md │ │ ├── 0.15.0.md │ │ ├── 0.15.1.md │ │ ├── 0.15.2.md │ │ ├── 0.15.3.md │ │ ├── 0.16.0.md │ │ ├── 0.16.1.md │ │ ├── 0.16.2.md │ │ └── 0.16.3.md │ ├── drizzle-orm-sqlite/ │ │ ├── 0.12.0-beta.17.md │ │ ├── 0.12.0-beta.18.md │ │ ├── 0.12.0-beta.19.md │ │ ├── 0.12.0-beta.20.md │ │ ├── 0.12.0-beta.21.md │ │ ├── 0.13.0.md │ │ ├── 0.14.1.md │ │ ├── 0.14.2.md │ │ ├── 0.14.3.md │ │ ├── 0.14.4.md │ │ ├── 0.14.5.md │ │ ├── 0.15.0.md │ │ ├── 0.15.2.md │ │ ├── 0.15.3.md │ │ ├── 0.15.4.md │ │ ├── 0.16.0.md │ │ └── 0.16.1.md │ ├── drizzle-seed/ │ │ ├── 0.1.1.md │ │ ├── 0.1.2.md │ │ ├── 0.1.3.md │ │ ├── 0.2.1.md │ │ ├── 0.3.0.md │ │ └── 0.3.1.md │ ├── drizzle-typebox/ │ │ ├── 0.1.0.md │ │ ├── 0.1.1.md │ │ ├── 0.2.0.md │ │ ├── 0.2.1.md │ │ ├── 0.3.0.md │ │ ├── 0.3.1.md │ │ ├── 0.3.2.md │ │ └── 0.3.3.md │ ├── drizzle-valibot/ │ │ ├── 0.1.0.md │ │ ├── 0.1.1.md │ │ ├── 0.2.0.md │ │ ├── 0.3.0.md │ │ ├── 0.3.1.md │ │ ├── 0.4.0.md │ │ ├── 0.4.1.md │ │ └── 0.4.2.md │ ├── drizzle-zod/ │ │ ├── 0.1.0.md │ │ ├── 0.1.1.md │ │ ├── 0.1.2.md │ │ ├── 0.1.3.md │ │ ├── 0.1.4.md │ │ ├── 0.2.0.md │ │ ├── 0.2.1.md │ │ ├── 0.3.0.md │ │ ├── 0.3.1.md │ │ ├── 0.3.2.md │ │ ├── 0.4.0.md │ │ ├── 0.4.1.md │ │ ├── 0.4.2.md │ │ ├── 0.4.3.md │ │ ├── 0.4.4.md │ │ ├── 0.5.0.md │ │ ├── 0.5.1.md │ │ ├── 0.6.0.md │ │ ├── 0.6.1.md │ │ ├── 0.7.0.md │ │ ├── 0.7.1.md │ │ ├── 0.8.0.md │ │ ├── 0.8.1.md │ │ ├── 0.8.2.md │ │ └── 0.8.3.md │ └── eslint-plugin-drizzle/ │ ├── 0.2.0.md │ ├── 0.2.1.md │ ├── 0.2.2.md │ └── 0.2.3.md ├── docs/ │ ├── custom-types.lite.md │ ├── custom-types.md │ ├── joins.md │ └── table-introspect-api.md ├── dprint.json ├── drizzle-arktype/ │ ├── README.md │ ├── benchmarks/ │ │ └── types.ts │ ├── package.json │ ├── rollup.config.ts │ ├── scripts/ │ │ ├── build.ts │ │ └── fix-imports.ts │ ├── src/ │ │ ├── column.ts │ │ ├── column.types.ts │ │ ├── constants.ts │ │ ├── index.ts │ │ ├── schema.ts │ │ ├── schema.types.internal.ts │ │ ├── schema.types.ts │ │ └── utils.ts │ ├── tests/ │ │ ├── mysql.test.ts │ │ ├── pg.test.ts │ │ ├── singlestore.test.ts │ │ ├── sqlite.test.ts │ │ ├── tsconfig.json │ │ └── utils.ts │ ├── tsconfig.build.json │ ├── tsconfig.json │ └── vitest.config.ts ├── drizzle-kit/ │ ├── .gitignore │ ├── README.md │ ├── build.dev.ts │ ├── build.ts │ ├── imports-checker/ │ │ ├── analyze.ts │ │ ├── checker.ts │ │ ├── grammar/ │ │ │ ├── grammar.ohm │ │ │ ├── grammar.ohm-bundle.d.ts │ │ │ └── grammar.ohm-bundle.js │ │ └── index.ts │ ├── package.json │ ├── patches/ │ │ └── difflib@0.2.4.patch │ ├── src/ │ │ ├── @types/ │ │ │ └── utils.ts │ │ ├── api.ts │ │ ├── cli/ │ │ │ ├── commands/ │ │ │ │ ├── _es5.ts │ │ │ │ ├── check.ts │ │ │ │ ├── drop.ts │ │ │ │ ├── introspect.ts │ │ │ │ ├── libSqlPushUtils.ts │ │ │ │ ├── migrate.ts │ │ │ │ ├── mysqlIntrospect.ts │ │ │ │ ├── mysqlPushUtils.ts │ │ │ │ ├── mysqlUp.ts │ │ │ │ ├── pgIntrospect.ts │ │ │ │ ├── pgPushUtils.ts │ │ │ │ ├── pgUp.ts │ │ │ │ ├── push.ts │ │ │ │ ├── singlestoreIntrospect.ts │ │ │ │ ├── singlestorePushUtils.ts │ │ │ │ ├── singlestoreUp.ts │ │ │ │ ├── sqliteIntrospect.ts │ │ │ │ ├── sqlitePushUtils.ts │ │ │ │ ├── sqliteUp.ts │ │ │ │ └── utils.ts │ │ │ ├── connections.ts │ │ │ ├── index.ts │ │ │ ├── schema.ts │ │ │ ├── selector-ui.ts │ │ │ ├── utils.ts │ │ │ ├── validations/ │ │ │ │ ├── cli.ts │ │ │ │ ├── common.ts │ │ │ │ ├── gel.ts │ │ │ │ ├── libsql.ts │ │ │ │ ├── mysql.ts │ │ │ │ ├── outputs.ts │ │ │ │ ├── postgres.ts │ │ │ │ ├── singlestore.ts │ │ │ │ ├── sqlite.ts │ │ │ │ └── studio.ts │ │ │ └── views.ts │ │ ├── extensions/ │ │ │ ├── getTablesFilterByExtensions.ts │ │ │ └── vector.ts │ │ ├── global.ts │ │ ├── index.ts │ │ ├── introspect-gel.ts │ │ ├── introspect-mysql.ts │ │ ├── introspect-pg.ts │ │ ├── introspect-singlestore.ts │ │ ├── introspect-sqlite.ts │ │ ├── jsonDiffer.js │ │ ├── jsonStatements.ts │ │ ├── loader.mjs │ │ ├── migrationPreparator.ts │ │ ├── schemaValidator.ts │ │ ├── serializer/ │ │ │ ├── gelSchema.ts │ │ │ ├── gelSerializer.ts │ │ │ ├── index.ts │ │ │ ├── mysqlImports.ts │ │ │ ├── mysqlSchema.ts │ │ │ ├── mysqlSerializer.ts │ │ │ ├── pgImports.ts │ │ │ ├── pgSchema.ts │ │ │ ├── pgSerializer.ts │ │ │ ├── singlestoreImports.ts │ │ │ ├── singlestoreSchema.ts │ │ │ ├── singlestoreSerializer.ts │ │ │ ├── sqliteImports.ts │ │ │ ├── sqliteSchema.ts │ │ │ ├── sqliteSerializer.ts │ │ │ ├── studio.ts │ │ │ └── utils.ts │ │ ├── simulator.ts │ │ ├── snapshotsDiffer.ts │ │ ├── sqlgenerator.ts │ │ ├── statementCombiner.ts │ │ ├── utils/ │ │ │ ├── certs.ts │ │ │ └── words.ts │ │ └── utils.ts │ ├── tests/ │ │ ├── bin.test.ts │ │ ├── cli/ │ │ │ ├── d1http.config.ts │ │ │ ├── drizzle.config.ts │ │ │ ├── durable-sqlite.config.ts │ │ │ ├── expo.config.ts │ │ │ ├── postgres.config.ts │ │ │ ├── postgres2.config.ts │ │ │ ├── schema.ts │ │ │ └── turso.config.ts │ │ ├── cli-export.test.ts │ │ ├── cli-generate.test.ts │ │ ├── cli-migrate.test.ts │ │ ├── cli-push.test.ts │ │ ├── common.ts │ │ ├── indexes/ │ │ │ ├── common.ts │ │ │ └── pg.test.ts │ │ ├── introspect/ │ │ │ ├── gel.ext.test.ts │ │ │ ├── gel.test.ts │ │ │ ├── libsql.test.ts │ │ │ ├── mysql.test.ts │ │ │ ├── pg.test.ts │ │ │ ├── singlestore.test.ts │ │ │ └── sqlite.test.ts │ │ ├── libsql-checks.test.ts │ │ ├── libsql-statements.test.ts │ │ ├── libsql-views.test.ts │ │ ├── migrate/ │ │ │ ├── libsq-schema.ts │ │ │ ├── libsql-migrate.test.ts │ │ │ └── migrations/ │ │ │ ├── 0000_little_blizzard.sql │ │ │ ├── 0001_nebulous_storm.sql │ │ │ └── meta/ │ │ │ ├── 0000_snapshot.json │ │ │ ├── 0001_snapshot.json │ │ │ └── _journal.json │ │ ├── mysql-checks.test.ts │ │ ├── mysql-generated.test.ts │ │ ├── mysql-schemas.test.ts │ │ ├── mysql-views.test.ts │ │ ├── mysql.test.ts │ │ ├── pg-array.test.ts │ │ ├── pg-checks.test.ts │ │ ├── pg-columns.test.ts │ │ ├── pg-enums.test.ts │ │ ├── pg-generated.test.ts │ │ ├── pg-identity.test.ts │ │ ├── pg-schemas.test.ts │ │ ├── pg-sequences.test.ts │ │ ├── pg-tables.test.ts │ │ ├── pg-views.test.ts │ │ ├── push/ │ │ │ ├── common.ts │ │ │ ├── libsql.test.ts │ │ │ ├── mysql-push.test.ts │ │ │ ├── mysql.test.ts │ │ │ ├── pg.test.ts │ │ │ ├── singlestore-push.test.ts │ │ │ ├── singlestore.test.ts │ │ │ └── sqlite.test.ts │ │ ├── rls/ │ │ │ ├── pg-policy.test.ts │ │ │ └── pg-role.test.ts │ │ ├── schemaDiffer.ts │ │ ├── singlestore-generated.test.ts │ │ ├── singlestore-schemas.test.ts │ │ ├── singlestore.test.ts │ │ ├── sqlite-checks.test.ts │ │ ├── sqlite-columns.test.ts │ │ ├── sqlite-generated.test.ts │ │ ├── sqlite-tables.test.ts │ │ ├── sqlite-views.test.ts │ │ ├── statements-combiner/ │ │ │ ├── libsql-statements-combiner.test.ts │ │ │ ├── singlestore-statements-combiner.test.ts │ │ │ └── sqlite-statements-combiner.test.ts │ │ ├── test/ │ │ │ └── sqlite.test.ts │ │ ├── testsinglestore.ts │ │ ├── validations.test.ts │ │ └── wrap-param.test.ts │ ├── tsconfig.build.json │ ├── tsconfig.cli-types.json │ ├── tsconfig.json │ └── vitest.config.ts ├── drizzle-orm/ │ ├── .madgerc │ ├── package.json │ ├── scripts/ │ │ ├── build.ts │ │ └── fix-imports.ts │ ├── src/ │ │ ├── alias.ts │ │ ├── aws-data-api/ │ │ │ ├── common/ │ │ │ │ └── index.ts │ │ │ └── pg/ │ │ │ ├── driver.ts │ │ │ ├── index.ts │ │ │ ├── migrator.ts │ │ │ └── session.ts │ │ ├── batch.ts │ │ ├── better-sqlite3/ │ │ │ ├── driver.ts │ │ │ ├── index.ts │ │ │ ├── migrator.ts │ │ │ └── session.ts │ │ ├── bun-sql/ │ │ │ ├── driver.ts │ │ │ ├── index.ts │ │ │ ├── migrator.ts │ │ │ └── session.ts │ │ ├── bun-sqlite/ │ │ │ ├── driver.ts │ │ │ ├── index.ts │ │ │ ├── migrator.ts │ │ │ └── session.ts │ │ ├── cache/ │ │ │ ├── core/ │ │ │ │ ├── cache.ts │ │ │ │ ├── index.ts │ │ │ │ └── types.ts │ │ │ ├── readme.md │ │ │ └── upstash/ │ │ │ ├── cache.ts │ │ │ └── index.ts │ │ ├── casing.ts │ │ ├── column-builder.ts │ │ ├── column.ts │ │ ├── d1/ │ │ │ ├── driver.ts │ │ │ ├── index.ts │ │ │ ├── migrator.ts │ │ │ └── session.ts │ │ ├── durable-sqlite/ │ │ │ ├── driver.ts │ │ │ ├── index.ts │ │ │ ├── migrator.ts │ │ │ └── session.ts │ │ ├── entity.ts │ │ ├── errors.ts │ │ ├── expo-sqlite/ │ │ │ ├── driver.ts │ │ │ ├── index.ts │ │ │ ├── migrator.ts │ │ │ ├── query.ts │ │ │ └── session.ts │ │ ├── gel/ │ │ │ ├── driver.ts │ │ │ ├── index.ts │ │ │ ├── migrator.ts │ │ │ └── session.ts │ │ ├── gel-core/ │ │ │ ├── alias.ts │ │ │ ├── checks.ts │ │ │ ├── columns/ │ │ │ │ ├── all.ts │ │ │ │ ├── bigint.ts │ │ │ │ ├── bigintT.ts │ │ │ │ ├── boolean.ts │ │ │ │ ├── bytes.ts │ │ │ │ ├── common.ts │ │ │ │ ├── custom.ts │ │ │ │ ├── date-duration.ts │ │ │ │ ├── date.common.ts │ │ │ │ ├── decimal.ts │ │ │ │ ├── double-precision.ts │ │ │ │ ├── duration.ts │ │ │ │ ├── index.ts │ │ │ │ ├── int.common.ts │ │ │ │ ├── integer.ts │ │ │ │ ├── json.ts │ │ │ │ ├── localdate.ts │ │ │ │ ├── localtime.ts │ │ │ │ ├── real.ts │ │ │ │ ├── relative-duration.ts │ │ │ │ ├── smallint.ts │ │ │ │ ├── text.ts │ │ │ │ ├── timestamp.ts │ │ │ │ ├── timestamptz.ts │ │ │ │ └── uuid.ts │ │ │ ├── db.ts │ │ │ ├── dialect.ts │ │ │ ├── expressions.ts │ │ │ ├── foreign-keys.ts │ │ │ ├── index.ts │ │ │ ├── indexes.ts │ │ │ ├── policies.ts │ │ │ ├── primary-keys.ts │ │ │ ├── query-builders/ │ │ │ │ ├── count.ts │ │ │ │ ├── delete.ts │ │ │ │ ├── index.ts │ │ │ │ ├── insert.ts │ │ │ │ ├── query-builder.ts │ │ │ │ ├── query.ts │ │ │ │ ├── raw.ts │ │ │ │ ├── refresh-materialized-view.ts │ │ │ │ ├── select.ts │ │ │ │ ├── select.types.ts │ │ │ │ └── update.ts │ │ │ ├── roles.ts │ │ │ ├── schema.ts │ │ │ ├── sequence.ts │ │ │ ├── session.ts │ │ │ ├── subquery.ts │ │ │ ├── table.ts │ │ │ ├── unique-constraint.ts │ │ │ ├── utils.ts │ │ │ ├── view-base.ts │ │ │ ├── view-common.ts │ │ │ └── view.ts │ │ ├── index.ts │ │ ├── knex/ │ │ │ ├── README.md │ │ │ └── index.ts │ │ ├── kysely/ │ │ │ ├── README.md │ │ │ └── index.ts │ │ ├── libsql/ │ │ │ ├── driver-core.ts │ │ │ ├── driver.ts │ │ │ ├── http/ │ │ │ │ └── index.ts │ │ │ ├── index.ts │ │ │ ├── migrator.ts │ │ │ ├── node/ │ │ │ │ └── index.ts │ │ │ ├── session.ts │ │ │ ├── sqlite3/ │ │ │ │ └── index.ts │ │ │ ├── wasm/ │ │ │ │ └── index.ts │ │ │ ├── web/ │ │ │ │ └── index.ts │ │ │ └── ws/ │ │ │ └── index.ts │ │ ├── logger.ts │ │ ├── migrator.ts │ │ ├── mysql-core/ │ │ │ ├── alias.ts │ │ │ ├── checks.ts │ │ │ ├── columns/ │ │ │ │ ├── all.ts │ │ │ │ ├── bigint.ts │ │ │ │ ├── binary.ts │ │ │ │ ├── boolean.ts │ │ │ │ ├── char.ts │ │ │ │ ├── common.ts │ │ │ │ ├── custom.ts │ │ │ │ ├── date.common.ts │ │ │ │ ├── date.ts │ │ │ │ ├── datetime.ts │ │ │ │ ├── decimal.ts │ │ │ │ ├── double.ts │ │ │ │ ├── enum.ts │ │ │ │ ├── float.ts │ │ │ │ ├── index.ts │ │ │ │ ├── int.ts │ │ │ │ ├── json.ts │ │ │ │ ├── mediumint.ts │ │ │ │ ├── real.ts │ │ │ │ ├── serial.ts │ │ │ │ ├── smallint.ts │ │ │ │ ├── text.ts │ │ │ │ ├── time.ts │ │ │ │ ├── timestamp.ts │ │ │ │ ├── tinyint.ts │ │ │ │ ├── varbinary.ts │ │ │ │ ├── varchar.ts │ │ │ │ └── year.ts │ │ │ ├── db.ts │ │ │ ├── dialect.ts │ │ │ ├── expressions.ts │ │ │ ├── foreign-keys.ts │ │ │ ├── index.ts │ │ │ ├── indexes.ts │ │ │ ├── primary-keys.ts │ │ │ ├── query-builders/ │ │ │ │ ├── count.ts │ │ │ │ ├── delete.ts │ │ │ │ ├── index.ts │ │ │ │ ├── insert.ts │ │ │ │ ├── query-builder.ts │ │ │ │ ├── query.ts │ │ │ │ ├── select.ts │ │ │ │ ├── select.types.ts │ │ │ │ └── update.ts │ │ │ ├── schema.ts │ │ │ ├── session.ts │ │ │ ├── subquery.ts │ │ │ ├── table.ts │ │ │ ├── unique-constraint.ts │ │ │ ├── utils.ts │ │ │ ├── view-base.ts │ │ │ ├── view-common.ts │ │ │ └── view.ts │ │ ├── mysql-proxy/ │ │ │ ├── driver.ts │ │ │ ├── index.ts │ │ │ ├── migrator.ts │ │ │ └── session.ts │ │ ├── mysql2/ │ │ │ ├── driver.ts │ │ │ ├── index.ts │ │ │ ├── migrator.ts │ │ │ └── session.ts │ │ ├── neon/ │ │ │ ├── index.ts │ │ │ ├── neon-auth.ts │ │ │ └── rls.ts │ │ ├── neon-http/ │ │ │ ├── driver.ts │ │ │ ├── index.ts │ │ │ ├── migrator.ts │ │ │ └── session.ts │ │ ├── neon-serverless/ │ │ │ ├── driver.ts │ │ │ ├── index.ts │ │ │ ├── migrator.ts │ │ │ └── session.ts │ │ ├── node-postgres/ │ │ │ ├── driver.ts │ │ │ ├── index.ts │ │ │ ├── migrator.ts │ │ │ └── session.ts │ │ ├── op-sqlite/ │ │ │ ├── driver.ts │ │ │ ├── index.ts │ │ │ ├── migrator.ts │ │ │ └── session.ts │ │ ├── operations.ts │ │ ├── pg-core/ │ │ │ ├── alias.ts │ │ │ ├── checks.ts │ │ │ ├── columns/ │ │ │ │ ├── all.ts │ │ │ │ ├── bigint.ts │ │ │ │ ├── bigserial.ts │ │ │ │ ├── boolean.ts │ │ │ │ ├── char.ts │ │ │ │ ├── cidr.ts │ │ │ │ ├── common.ts │ │ │ │ ├── custom.ts │ │ │ │ ├── date.common.ts │ │ │ │ ├── date.ts │ │ │ │ ├── double-precision.ts │ │ │ │ ├── enum.ts │ │ │ │ ├── index.ts │ │ │ │ ├── inet.ts │ │ │ │ ├── int.common.ts │ │ │ │ ├── integer.ts │ │ │ │ ├── interval.ts │ │ │ │ ├── json.ts │ │ │ │ ├── jsonb.ts │ │ │ │ ├── line.ts │ │ │ │ ├── macaddr.ts │ │ │ │ ├── macaddr8.ts │ │ │ │ ├── numeric.ts │ │ │ │ ├── point.ts │ │ │ │ ├── postgis_extension/ │ │ │ │ │ ├── geometry.ts │ │ │ │ │ └── utils.ts │ │ │ │ ├── real.ts │ │ │ │ ├── serial.ts │ │ │ │ ├── smallint.ts │ │ │ │ ├── smallserial.ts │ │ │ │ ├── text.ts │ │ │ │ ├── time.ts │ │ │ │ ├── timestamp.ts │ │ │ │ ├── uuid.ts │ │ │ │ ├── varchar.ts │ │ │ │ └── vector_extension/ │ │ │ │ ├── bit.ts │ │ │ │ ├── halfvec.ts │ │ │ │ ├── sparsevec.ts │ │ │ │ └── vector.ts │ │ │ ├── db.ts │ │ │ ├── dialect.ts │ │ │ ├── expressions.ts │ │ │ ├── foreign-keys.ts │ │ │ ├── index.ts │ │ │ ├── indexes.ts │ │ │ ├── policies.ts │ │ │ ├── primary-keys.ts │ │ │ ├── query-builders/ │ │ │ │ ├── count.ts │ │ │ │ ├── delete.ts │ │ │ │ ├── index.ts │ │ │ │ ├── insert.ts │ │ │ │ ├── query-builder.ts │ │ │ │ ├── query.ts │ │ │ │ ├── raw.ts │ │ │ │ ├── refresh-materialized-view.ts │ │ │ │ ├── select.ts │ │ │ │ ├── select.types.ts │ │ │ │ └── update.ts │ │ │ ├── roles.ts │ │ │ ├── schema.ts │ │ │ ├── sequence.ts │ │ │ ├── session.ts │ │ │ ├── subquery.ts │ │ │ ├── table.ts │ │ │ ├── unique-constraint.ts │ │ │ ├── utils/ │ │ │ │ ├── array.ts │ │ │ │ └── index.ts │ │ │ ├── utils.ts │ │ │ ├── view-base.ts │ │ │ ├── view-common.ts │ │ │ └── view.ts │ │ ├── pg-proxy/ │ │ │ ├── driver.ts │ │ │ ├── index.ts │ │ │ ├── migrator.ts │ │ │ └── session.ts │ │ ├── pglite/ │ │ │ ├── driver.ts │ │ │ ├── index.ts │ │ │ ├── migrator.ts │ │ │ └── session.ts │ │ ├── planetscale-serverless/ │ │ │ ├── driver.ts │ │ │ ├── index.ts │ │ │ ├── migrator.ts │ │ │ └── session.ts │ │ ├── postgres-js/ │ │ │ ├── README.md │ │ │ ├── driver.ts │ │ │ ├── index.ts │ │ │ ├── migrator.ts │ │ │ └── session.ts │ │ ├── primary-key.ts │ │ ├── prisma/ │ │ │ ├── mysql/ │ │ │ │ ├── driver.ts │ │ │ │ ├── index.ts │ │ │ │ └── session.ts │ │ │ ├── pg/ │ │ │ │ ├── driver.ts │ │ │ │ ├── index.ts │ │ │ │ └── session.ts │ │ │ ├── schema.prisma │ │ │ └── sqlite/ │ │ │ ├── driver.ts │ │ │ ├── index.ts │ │ │ └── session.ts │ │ ├── query-builders/ │ │ │ ├── query-builder.ts │ │ │ └── select.types.ts │ │ ├── query-promise.ts │ │ ├── relations.ts │ │ ├── runnable-query.ts │ │ ├── selection-proxy.ts │ │ ├── session.ts │ │ ├── singlestore/ │ │ │ ├── driver.ts │ │ │ ├── index.ts │ │ │ ├── migrator.ts │ │ │ └── session.ts │ │ ├── singlestore-core/ │ │ │ ├── alias.ts │ │ │ ├── columns/ │ │ │ │ ├── all.ts │ │ │ │ ├── bigint.ts │ │ │ │ ├── binary.ts │ │ │ │ ├── boolean.ts │ │ │ │ ├── char.ts │ │ │ │ ├── common.ts │ │ │ │ ├── custom.ts │ │ │ │ ├── date.common.ts │ │ │ │ ├── date.ts │ │ │ │ ├── datetime.ts │ │ │ │ ├── decimal.ts │ │ │ │ ├── double.ts │ │ │ │ ├── enum.ts │ │ │ │ ├── float.ts │ │ │ │ ├── index.ts │ │ │ │ ├── int.ts │ │ │ │ ├── json.ts │ │ │ │ ├── mediumint.ts │ │ │ │ ├── real.ts │ │ │ │ ├── serial.ts │ │ │ │ ├── smallint.ts │ │ │ │ ├── text.ts │ │ │ │ ├── time.ts │ │ │ │ ├── timestamp.ts │ │ │ │ ├── tinyint.ts │ │ │ │ ├── varbinary.ts │ │ │ │ ├── varchar.ts │ │ │ │ ├── vector.ts │ │ │ │ └── year.ts │ │ │ ├── db.ts │ │ │ ├── dialect.ts │ │ │ ├── expressions.ts │ │ │ ├── index.ts │ │ │ ├── indexes.ts │ │ │ ├── primary-keys.ts │ │ │ ├── query-builders/ │ │ │ │ ├── count.ts │ │ │ │ ├── delete.ts │ │ │ │ ├── index.ts │ │ │ │ ├── insert.ts │ │ │ │ ├── query-builder.ts │ │ │ │ ├── query.ts │ │ │ │ ├── select.ts │ │ │ │ ├── select.types.ts │ │ │ │ └── update.ts │ │ │ ├── schema.ts │ │ │ ├── session.ts │ │ │ ├── subquery.ts │ │ │ ├── table.ts │ │ │ ├── unique-constraint.ts │ │ │ ├── utils.ts │ │ │ ├── view-base.ts │ │ │ ├── view-common.ts │ │ │ └── view.ts │ │ ├── singlestore-proxy/ │ │ │ ├── driver.ts │ │ │ ├── index.ts │ │ │ ├── migrator.ts │ │ │ └── session.ts │ │ ├── sql/ │ │ │ ├── expressions/ │ │ │ │ ├── conditions.ts │ │ │ │ ├── index.ts │ │ │ │ └── select.ts │ │ │ ├── functions/ │ │ │ │ ├── aggregate.ts │ │ │ │ ├── index.ts │ │ │ │ └── vector.ts │ │ │ ├── index.ts │ │ │ └── sql.ts │ │ ├── sql-js/ │ │ │ ├── driver.ts │ │ │ ├── index.ts │ │ │ ├── migrator.ts │ │ │ └── session.ts │ │ ├── sqlite-core/ │ │ │ ├── README.md │ │ │ ├── alias.ts │ │ │ ├── checks.ts │ │ │ ├── columns/ │ │ │ │ ├── all.ts │ │ │ │ ├── blob.ts │ │ │ │ ├── common.ts │ │ │ │ ├── custom.ts │ │ │ │ ├── index.ts │ │ │ │ ├── integer.ts │ │ │ │ ├── numeric.ts │ │ │ │ ├── real.ts │ │ │ │ └── text.ts │ │ │ ├── db.ts │ │ │ ├── dialect.ts │ │ │ ├── expressions.ts │ │ │ ├── foreign-keys.ts │ │ │ ├── index.ts │ │ │ ├── indexes.ts │ │ │ ├── primary-keys.ts │ │ │ ├── query-builders/ │ │ │ │ ├── count.ts │ │ │ │ ├── delete.ts │ │ │ │ ├── index.ts │ │ │ │ ├── insert.ts │ │ │ │ ├── query-builder.ts │ │ │ │ ├── query.ts │ │ │ │ ├── raw.ts │ │ │ │ ├── select.ts │ │ │ │ ├── select.types.ts │ │ │ │ └── update.ts │ │ │ ├── session.ts │ │ │ ├── subquery.ts │ │ │ ├── table.ts │ │ │ ├── unique-constraint.ts │ │ │ ├── utils.ts │ │ │ ├── view-base.ts │ │ │ ├── view-common.ts │ │ │ └── view.ts │ │ ├── sqlite-proxy/ │ │ │ ├── driver.ts │ │ │ ├── index.ts │ │ │ ├── migrator.ts │ │ │ └── session.ts │ │ ├── subquery.ts │ │ ├── supabase/ │ │ │ ├── index.ts │ │ │ └── rls.ts │ │ ├── table.ts │ │ ├── table.utils.ts │ │ ├── tidb-serverless/ │ │ │ ├── driver.ts │ │ │ ├── index.ts │ │ │ ├── migrator.ts │ │ │ └── session.ts │ │ ├── tracing-utils.ts │ │ ├── tracing.ts │ │ ├── utils.ts │ │ ├── vercel-postgres/ │ │ │ ├── driver.ts │ │ │ ├── index.ts │ │ │ ├── migrator.ts │ │ │ └── session.ts │ │ ├── version.ts │ │ ├── view-common.ts │ │ └── xata-http/ │ │ ├── driver.ts │ │ ├── index.ts │ │ ├── migrator.ts │ │ └── session.ts │ ├── tests/ │ │ ├── casing/ │ │ │ ├── casing.test.ts │ │ │ ├── mysql-to-camel.test.ts │ │ │ ├── mysql-to-snake.test.ts │ │ │ ├── pg-to-camel.test.ts │ │ │ ├── pg-to-snake.test.ts │ │ │ ├── sqlite-to-camel.test.ts │ │ │ └── sqlite-to-snake.test.ts │ │ ├── exports.test.ts │ │ ├── is.test.ts │ │ ├── makePgArray.test.ts │ │ ├── parsePgArray.test.ts │ │ ├── relation.test.ts │ │ ├── tsconfig.json │ │ └── type-hints.test.ts │ ├── tsconfig.build.json │ ├── tsconfig.dts.json │ ├── tsconfig.json │ ├── tsup.config.ts │ ├── type-tests/ │ │ ├── common/ │ │ │ └── aliased-table.ts │ │ ├── geldb/ │ │ │ ├── 1-to-1-fk.ts │ │ │ ├── array.ts │ │ │ ├── count.ts │ │ │ ├── db-rel.ts │ │ │ ├── db.ts │ │ │ ├── delete.ts │ │ │ ├── generated-columns.ts │ │ │ ├── insert.ts │ │ │ ├── no-strict-null-checks/ │ │ │ │ ├── test.ts │ │ │ │ └── tsconfig.json │ │ │ ├── other.ts │ │ │ ├── select.ts │ │ │ ├── set-operators.ts │ │ │ ├── subquery.ts │ │ │ ├── tables-rel.ts │ │ │ ├── tables.ts │ │ │ ├── update.ts │ │ │ └── with.ts │ │ ├── knex/ │ │ │ └── index.ts │ │ ├── kysely/ │ │ │ └── index.ts │ │ ├── mysql/ │ │ │ ├── 1-to-1-fk.ts │ │ │ ├── 1000columns.ts │ │ │ ├── count.ts │ │ │ ├── db-rel.ts │ │ │ ├── db.ts │ │ │ ├── delete.ts │ │ │ ├── generated-columns.ts │ │ │ ├── insert.ts │ │ │ ├── no-strict-null-checks/ │ │ │ │ ├── test.ts │ │ │ │ └── tsconfig.json │ │ │ ├── select.ts │ │ │ ├── set-operators.ts │ │ │ ├── subquery.ts │ │ │ ├── tables-rel.ts │ │ │ ├── tables.ts │ │ │ ├── update.ts │ │ │ └── with.ts │ │ ├── pg/ │ │ │ ├── 1-to-1-fk.ts │ │ │ ├── array.ts │ │ │ ├── count.ts │ │ │ ├── db-rel.ts │ │ │ ├── db.ts │ │ │ ├── delete.ts │ │ │ ├── generated-columns.ts │ │ │ ├── insert.ts │ │ │ ├── no-strict-null-checks/ │ │ │ │ ├── test.ts │ │ │ │ └── tsconfig.json │ │ │ ├── other.ts │ │ │ ├── select.ts │ │ │ ├── set-operators.ts │ │ │ ├── subquery.ts │ │ │ ├── tables-rel.ts │ │ │ ├── tables.ts │ │ │ ├── update.ts │ │ │ └── with.ts │ │ ├── singlestore/ │ │ │ ├── 1000columns.ts │ │ │ ├── count.ts │ │ │ ├── db.ts │ │ │ ├── delete.ts │ │ │ ├── insert.ts │ │ │ ├── no-strict-null-checks/ │ │ │ │ ├── test.ts │ │ │ │ └── tsconfig.json │ │ │ ├── select.ts │ │ │ ├── set-operators.ts │ │ │ ├── subquery.ts │ │ │ ├── tables.ts │ │ │ ├── update.ts │ │ │ └── with.ts │ │ ├── sqlite/ │ │ │ ├── .gitignore │ │ │ ├── count.ts │ │ │ ├── db.ts │ │ │ ├── delete.ts │ │ │ ├── generated-columns.ts │ │ │ ├── insert.ts │ │ │ ├── no-strict-null-checks/ │ │ │ │ ├── test.ts │ │ │ │ └── tsconfig.json │ │ │ ├── other.ts │ │ │ ├── select.ts │ │ │ ├── set-operators.ts │ │ │ ├── subquery.ts │ │ │ ├── tables.ts │ │ │ ├── update.ts │ │ │ └── with.ts │ │ ├── tsconfig.json │ │ ├── utils/ │ │ │ └── neon-auth-token.ts │ │ └── utils.ts │ └── vitest.config.ts ├── drizzle-seed/ │ ├── README.md │ ├── package.json │ ├── rollup.config.ts │ ├── scripts/ │ │ └── build.ts │ ├── src/ │ │ ├── datasets/ │ │ │ ├── adjectives.ts │ │ │ ├── cityNames.ts │ │ │ ├── companyNameSuffixes.ts │ │ │ ├── countries.ts │ │ │ ├── emailDomains.ts │ │ │ ├── firstNames.ts │ │ │ ├── jobsTitles.ts │ │ │ ├── lastNames.ts │ │ │ ├── loremIpsumSentences.ts │ │ │ ├── phonesInfo.ts │ │ │ ├── states.ts │ │ │ └── streetSuffix.ts │ │ ├── index.ts │ │ ├── services/ │ │ │ ├── GeneratorFuncs.ts │ │ │ ├── Generators.ts │ │ │ ├── SeedService.ts │ │ │ ├── apiVersion.ts │ │ │ ├── utils.ts │ │ │ └── versioning/ │ │ │ └── v2.ts │ │ └── types/ │ │ ├── drizzleStudio.ts │ │ ├── seedService.ts │ │ └── tables.ts │ ├── tests/ │ │ ├── benchmarks/ │ │ │ └── generatorsBenchmark.ts │ │ ├── mysql/ │ │ │ ├── allDataTypesTest/ │ │ │ │ ├── mysqlSchema.ts │ │ │ │ └── mysql_all_data_types.test.ts │ │ │ ├── cyclicTables/ │ │ │ │ ├── cyclicTables.test.ts │ │ │ │ └── mysqlSchema.ts │ │ │ ├── generatorsTest/ │ │ │ │ ├── generators.test.ts │ │ │ │ └── mysqlSchema.ts │ │ │ ├── mysql.test.ts │ │ │ ├── mysqlSchema.ts │ │ │ └── softRelationsTest/ │ │ │ ├── mysqlSchema.ts │ │ │ └── softRelations.test.ts │ │ ├── northwind/ │ │ │ ├── mysqlSchema.ts │ │ │ ├── mysqlTest.ts │ │ │ ├── pgSchema.ts │ │ │ ├── pgTest.ts │ │ │ ├── sqliteSchema.ts │ │ │ └── sqliteTest.ts │ │ ├── pg/ │ │ │ ├── allDataTypesTest/ │ │ │ │ ├── pgSchema.ts │ │ │ │ └── pg_all_data_types.test.ts │ │ │ ├── cyclicTables/ │ │ │ │ ├── cyclicTables.test.ts │ │ │ │ └── pgSchema.ts │ │ │ ├── generatorsTest/ │ │ │ │ ├── generators.test.ts │ │ │ │ └── pgSchema.ts │ │ │ ├── pg.test.ts │ │ │ ├── pgSchema.ts │ │ │ └── softRelationsTest/ │ │ │ ├── pgSchema.ts │ │ │ └── softRelations.test.ts │ │ └── sqlite/ │ │ ├── allDataTypesTest/ │ │ │ ├── sqliteSchema.ts │ │ │ └── sqlite_all_data_types.test.ts │ │ ├── cyclicTables/ │ │ │ ├── cyclicTables.test.ts │ │ │ └── sqliteSchema.ts │ │ ├── softRelationsTest/ │ │ │ ├── softRelations.test.ts │ │ │ └── sqliteSchema.ts │ │ ├── sqlite.test.ts │ │ └── sqliteSchema.ts │ ├── tsconfig.build.json │ ├── tsconfig.json │ ├── type-tests/ │ │ ├── mysql.ts │ │ ├── pg.ts │ │ ├── sqlite.ts │ │ └── tsconfig.json │ └── vitest.config.ts ├── drizzle-typebox/ │ ├── README.md │ ├── package.json │ ├── rollup.config.ts │ ├── scripts/ │ │ ├── build.ts │ │ └── fix-imports.ts │ ├── src/ │ │ ├── column.ts │ │ ├── column.types.ts │ │ ├── constants.ts │ │ ├── index.ts │ │ ├── schema.ts │ │ ├── schema.types.internal.ts │ │ ├── schema.types.ts │ │ └── utils.ts │ ├── tests/ │ │ ├── mysql.test.ts │ │ ├── pg.test.ts │ │ ├── singlestore.test.ts │ │ ├── sqlite.test.ts │ │ ├── tsconfig.json │ │ └── utils.ts │ ├── tsconfig.build.json │ ├── tsconfig.json │ └── vitest.config.ts ├── drizzle-valibot/ │ ├── README.md │ ├── package.json │ ├── rollup.config.ts │ ├── scripts/ │ │ ├── build.ts │ │ └── fix-imports.ts │ ├── src/ │ │ ├── column.ts │ │ ├── column.types.ts │ │ ├── constants.ts │ │ ├── index.ts │ │ ├── schema.ts │ │ ├── schema.types.internal.ts │ │ ├── schema.types.ts │ │ └── utils.ts │ ├── tests/ │ │ ├── mysql.test.ts │ │ ├── pg.test.ts │ │ ├── singlestore.test.ts │ │ ├── sqlite.test.ts │ │ ├── tsconfig.json │ │ └── utils.ts │ ├── tsconfig.build.json │ ├── tsconfig.json │ └── vitest.config.ts ├── drizzle-zod/ │ ├── README.md │ ├── package.json │ ├── rollup.config.ts │ ├── scripts/ │ │ ├── build.ts │ │ └── fix-imports.ts │ ├── src/ │ │ ├── column.ts │ │ ├── column.types.ts │ │ ├── constants.ts │ │ ├── index.ts │ │ ├── schema.ts │ │ ├── schema.types.internal.ts │ │ ├── schema.types.ts │ │ └── utils.ts │ ├── tests/ │ │ ├── mysql.test.ts │ │ ├── pg.test.ts │ │ ├── singlestore.test.ts │ │ ├── sqlite.test.ts │ │ ├── tsconfig.json │ │ └── utils.ts │ ├── tsconfig.build.json │ ├── tsconfig.json │ └── vitest.config.ts ├── eslint/ │ └── eslint-plugin-drizzle-internal/ │ └── index.js ├── eslint-plugin-drizzle/ │ ├── .gitignore │ ├── package.json │ ├── readme.md │ ├── src/ │ │ ├── configs/ │ │ │ ├── all.ts │ │ │ └── recommended.ts │ │ ├── enforce-delete-with-where.ts │ │ ├── enforce-update-with-where.ts │ │ ├── index.ts │ │ └── utils/ │ │ ├── ast.ts │ │ └── options.ts │ ├── tests/ │ │ ├── delete.test.ts │ │ └── update.test.ts │ ├── tsconfig.json │ └── vitest.config.ts ├── integration-tests/ │ ├── .gitignore │ ├── .xata/ │ │ ├── migrations/ │ │ │ └── .ledger │ │ └── version/ │ │ └── compatibility.json │ ├── .xatarc │ ├── docker-neon.yml │ ├── drizzle2/ │ │ ├── mysql/ │ │ │ ├── 0000_nostalgic_carnage.sql │ │ │ └── meta/ │ │ │ ├── 0000_snapshot.json │ │ │ └── _journal.json │ │ ├── mysql-proxy/ │ │ │ ├── first/ │ │ │ │ ├── 0000_nostalgic_carnage.sql │ │ │ │ └── meta/ │ │ │ │ ├── 0000_snapshot.json │ │ │ │ └── _journal.json │ │ │ └── second/ │ │ │ ├── 0000_nostalgic_carnage.sql │ │ │ ├── 0001_test.sql │ │ │ └── meta/ │ │ │ ├── 0000_snapshot.json │ │ │ ├── 0001_snapshot.json │ │ │ └── _journal.json │ │ ├── pg/ │ │ │ ├── 0000_puzzling_flatman.sql │ │ │ ├── 0001_test.sql │ │ │ └── meta/ │ │ │ ├── 0000_snapshot.json │ │ │ └── _journal.json │ │ ├── pg-proxy/ │ │ │ ├── first/ │ │ │ │ ├── 0000_puzzling_flatman.sql │ │ │ │ └── meta/ │ │ │ │ ├── 0000_snapshot.json │ │ │ │ └── _journal.json │ │ │ └── second/ │ │ │ ├── 0000_puzzling_flatman.sql │ │ │ ├── 0001_test.sql │ │ │ └── meta/ │ │ │ ├── 0000_snapshot.json │ │ │ ├── 0001_snapshot.json │ │ │ └── _journal.json │ │ ├── planetscale/ │ │ │ ├── 0000_nostalgic_carnage.sql │ │ │ └── meta/ │ │ │ ├── 0000_snapshot.json │ │ │ └── _journal.json │ │ ├── singlestore/ │ │ │ ├── 0000_nostalgic_carnage.sql │ │ │ └── meta/ │ │ │ ├── 0000_snapshot.json │ │ │ └── _journal.json │ │ └── sqlite/ │ │ ├── 0000_fancy_bug.sql │ │ └── meta/ │ │ ├── 0000_snapshot.json │ │ └── _journal.json │ ├── js-tests/ │ │ └── driver-init/ │ │ ├── commonjs/ │ │ │ ├── better-sqlite3.test.cjs │ │ │ ├── libsql.test.cjs │ │ │ ├── mysql2.test.cjs │ │ │ ├── neon-http.test.cjs │ │ │ ├── neon-ws.test.cjs │ │ │ ├── node-pg.test.cjs │ │ │ ├── pglite.test.cjs │ │ │ ├── planetscale.test.cjs │ │ │ ├── postgres-js.test.cjs │ │ │ ├── schema.cjs │ │ │ ├── tidb.test.cjs │ │ │ └── vercel.test.cjs │ │ └── module/ │ │ ├── better-sqlite3.test.mjs │ │ ├── libsql.test.mjs │ │ ├── mysql2.test.mjs │ │ ├── neon-http.test.mjs │ │ ├── neon-ws.test.mjs │ │ ├── node-pg.test.mjs │ │ ├── pglite.test.mjs │ │ ├── planetscale.test.mjs │ │ ├── postgres-js.test.mjs │ │ ├── schema.mjs │ │ ├── tidb.test.mjs │ │ └── vercel.test.mjs │ ├── package.json │ ├── sst-env.d.ts │ ├── sst.config.ts │ ├── tests/ │ │ ├── awsdatapi.alltypes.test.ts │ │ ├── bun/ │ │ │ ├── bun-sql.test.ts │ │ │ ├── sqlite-nw.test.ts │ │ │ └── sqlite.test.ts │ │ ├── common.ts │ │ ├── extensions/ │ │ │ ├── postgis/ │ │ │ │ ├── pg.test.ts │ │ │ │ └── postgres.test.ts │ │ │ └── vectors/ │ │ │ ├── pg.test.ts │ │ │ └── postgres.test.ts │ │ ├── gel/ │ │ │ ├── cache.ts │ │ │ ├── createInstance.ts │ │ │ ├── gel-custom.test.ts │ │ │ ├── gel-ext.test.ts │ │ │ └── gel.test.ts │ │ ├── imports/ │ │ │ └── index.test.ts │ │ ├── mysql/ │ │ │ ├── mysql-common-cache.ts │ │ │ ├── mysql-common.ts │ │ │ ├── mysql-custom.test.ts │ │ │ ├── mysql-planetscale.test.ts │ │ │ ├── mysql-prefixed.test.ts │ │ │ ├── mysql-proxy.test.ts │ │ │ ├── mysql.test.ts │ │ │ └── tidb-serverless.test.ts │ │ ├── mysql-returning.test.ts │ │ ├── pg/ │ │ │ ├── awsdatapi.test.ts │ │ │ ├── neon-http-batch.test.ts │ │ │ ├── neon-http-batch.ts │ │ │ ├── neon-http.test.ts │ │ │ ├── neon-serverless.test.ts │ │ │ ├── node-postgres.test.ts │ │ │ ├── pg-common-cache.ts │ │ │ ├── pg-common.ts │ │ │ ├── pg-custom.test.ts │ │ │ ├── pg-proxy.test.ts │ │ │ ├── pglite.test.ts │ │ │ ├── postgres-js.test.ts │ │ │ ├── rls/ │ │ │ │ └── rls.definition.test.ts │ │ │ ├── vercel-pg.test.ts │ │ │ └── xata-http.test.ts │ │ ├── relational/ │ │ │ ├── bettersqlite.test.ts │ │ │ ├── db.ts │ │ │ ├── issues-schemas/ │ │ │ │ ├── duplicates/ │ │ │ │ │ ├── mysql/ │ │ │ │ │ │ ├── mysql.duplicates.test.ts │ │ │ │ │ │ └── mysql.duplicates.ts │ │ │ │ │ └── pg/ │ │ │ │ │ ├── pg.duplicates.test.ts │ │ │ │ │ └── pg.duplicates.ts │ │ │ │ └── wrong-mapping/ │ │ │ │ ├── pg.schema.ts │ │ │ │ └── pg.test.ts │ │ │ ├── mysql.planetscale.test.ts │ │ │ ├── mysql.schema.ts │ │ │ ├── mysql.test.ts │ │ │ ├── pg.postgresjs.test.ts │ │ │ ├── pg.schema.ts │ │ │ ├── pg.test.ts │ │ │ ├── singlestore.schema.ts │ │ │ ├── singlestore.test.ts │ │ │ ├── sqlite.schema.ts │ │ │ ├── tables.ts │ │ │ ├── turso.test.ts │ │ │ └── vercel.test.ts │ │ ├── replicas/ │ │ │ ├── mysql.test.ts │ │ │ ├── postgres.test.ts │ │ │ ├── singlestore.test.ts │ │ │ └── sqlite.test.ts │ │ ├── seeder/ │ │ │ ├── mysql.test.ts │ │ │ ├── mysqlSchema.ts │ │ │ ├── pg.test.ts │ │ │ ├── pgSchema.ts │ │ │ ├── sqlite.test.ts │ │ │ └── sqliteSchema.ts │ │ ├── singlestore/ │ │ │ ├── singlestore-cache.ts │ │ │ ├── singlestore-common.ts │ │ │ ├── singlestore-custom.test.ts │ │ │ ├── singlestore-prefixed.test.ts │ │ │ ├── singlestore-proxy.test.ts │ │ │ └── singlestore.test.ts │ │ ├── sqlite/ │ │ │ ├── better-sqlite.test.ts │ │ │ ├── d1-batch.test.ts │ │ │ ├── d1.test.ts │ │ │ ├── durable-objects/ │ │ │ │ ├── drizzle/ │ │ │ │ │ ├── 0000_cuddly_black_bolt.sql │ │ │ │ │ ├── meta/ │ │ │ │ │ │ ├── 0000_snapshot.json │ │ │ │ │ │ └── _journal.json │ │ │ │ │ └── migrations.js │ │ │ │ ├── index.ts │ │ │ │ ├── worker-configuration.d.ts │ │ │ │ └── wrangler.toml │ │ │ ├── libsql-batch.test.ts │ │ │ ├── libsql-http.test.ts │ │ │ ├── libsql-node.test.ts │ │ │ ├── libsql-sqlite3.test.ts │ │ │ ├── libsql-ws.test.ts │ │ │ ├── libsql.test.ts │ │ │ ├── sql-js.test.ts │ │ │ ├── sqlite-common-cache.ts │ │ │ ├── sqlite-common.ts │ │ │ ├── sqlite-proxy-batch.test.ts │ │ │ └── sqlite-proxy.test.ts │ │ ├── utils/ │ │ │ └── is-config.test.ts │ │ ├── utils.ts │ │ ├── version.test.ts │ │ └── xata/ │ │ └── xata.ts │ ├── tsconfig.json │ ├── type-tests/ │ │ └── join-nodenext/ │ │ ├── gel.ts │ │ ├── mysql.ts │ │ ├── package.json │ │ ├── pg.ts │ │ ├── singlestore.ts │ │ ├── sqlite.ts │ │ └── tsconfig.json │ ├── vitest-ci.config.ts │ └── vitest.config.ts ├── package.json ├── patches/ │ └── typescript@5.6.3.patch ├── pnpm-workspace.yaml ├── tsconfig.json └── turbo.json ================================================ FILE CONTENTS ================================================ ================================================ FILE: .eslintignore ================================================ node_modules dist dist-dts examples **/*.js **/*.mjs **/*.cjs **/playground integration-tests/tests/prisma/*/client integration-tests/tests/prisma/*/drizzle drizzle-kit/* ================================================ FILE: .eslintrc.yaml ================================================ root: true extends: - 'eslint:recommended' - 'plugin:@typescript-eslint/recommended' - 'plugin:unicorn/recommended' parser: '@typescript-eslint/parser' parserOptions: project: './tsconfig.json' plugins: - import - unused-imports - no-instanceof - drizzle-internal overrides: - files: - '**/tests/**/*.ts' - '**/type-tests/**/*.ts' rules: import/extensions: 'off' no-instanceof: 'off' - files: 'eslint-plugin-drizzle/**/*' rules: import/extensions: 'off' rules: '@typescript-eslint/consistent-type-imports': - error - disallowTypeAnnotations: false fixStyle: separate-type-imports '@typescript-eslint/no-import-type-side-effects': 'error' import/no-cycle: error import/no-self-import: error import/no-empty-named-blocks: error unused-imports/no-unused-imports: error import/no-useless-path-segments: error import/newline-after-import: error import/no-duplicates: error import/extensions: - error - always - ignorePackages: true '@typescript-eslint/no-explicit-any': 'off' '@typescript-eslint/no-non-null-assertion': 'off' '@typescript-eslint/no-namespace': 'off' '@typescript-eslint/no-unused-vars': - error - argsIgnorePattern: '^_' varsIgnorePattern: '^_' '@typescript-eslint/ban-types': - error - extendDefaults: true types: '{}' : false '@typescript-eslint/no-this-alias': 'off' '@typescript-eslint/no-var-requires': 'off' 'unicorn/prefer-node-protocol': 'off' 'unicorn/prefer-top-level-await': 'off' 'unicorn/prevent-abbreviations': 'off' 'unicorn/prefer-switch': 'off' 'unicorn/catch-error-name': 'off' 'unicorn/no-null': 'off' 'unicorn/numeric-separators-style': 'off' 'unicorn/explicit-length-check': 'off' 'unicorn/filename-case': 'off' 'unicorn/prefer-module': 'off' 'unicorn/no-array-reduce': 'off' 'unicorn/no-nested-ternary': 'off' 'unicorn/no-useless-undefined': - error - checkArguments: false 'unicorn/no-this-assignment': 'off' 'unicorn/empty-brace-spaces': 'off' 'unicorn/no-thenable': 'off' 'unicorn/consistent-function-scoping': 'off' 'unicorn/prefer-type-error': 'off' 'unicorn/relative-url-style': 'off' 'eqeqeq': 'error' 'no-instanceof/no-instanceof': 'error' 'drizzle-internal/require-entity-kind': 'error' 'unicorn/prefer-string-replace-all': 'off' 'unicorn/no-process-exit': 'off' '@typescript-eslint/ban-ts-comment': 'off' '@typescript-eslint/no-empty-interface': 'off' '@typescript-eslint/no-unsafe-declaration-merging': 'off' 'no-inner-declarations': 'off' ================================================ FILE: .github/FUNDING.yml ================================================ # These are supported funding model platforms github: drizzle-team patreon: # Replace with a single Patreon username open_collective: # Replace with a single Open Collective username ko_fi: # Replace with a single Ko-fi username tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry liberapay: # Replace with a single Liberapay username issuehunt: # Replace with a single IssueHunt username otechie: # Replace with a single Otechie username lfx_crowdfunding: # Replace with a single LFX Crowdfunding project-name e.g., cloud-foundry custom: # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2'] ================================================ FILE: .github/ISSUE_TEMPLATE/bug-template.yaml ================================================ name: "Bug Report" description: Report an issue or possible bug title: "[BUG]:" labels: ["bug"] body: - type: markdown attributes: value: | Thank you for taking the time to file a bug report! Please provide as much information as possible. - type: checkboxes id: verified attributes: label: Report hasn't been filed before. options: - label: I have verified that the bug I'm about to report hasn't been filed before. required: true - type: input attributes: label: What version of `drizzle-orm` are you using? description: You can check the version by opening the `package.json` file in your project. placeholder: 0.0.0 validations: required: true - type: input attributes: label: What version of `drizzle-kit` are you using? description: You can check the version by opening the `package.json` file in your project. placeholder: 0.0.0 validations: required: true - type: input attributes: label: Other packages description: If this bug is related to one of the other first-party packages we maintain, please list them here alongside their version. placeholder: drizzle-zod@0.0.0, drizzle-valibot@0.0.0 validations: required: false - type: textarea attributes: label: Describe the Bug description: | To fill this field, please answer the following: - What is the undesired behavior? - What are the steps to reproduce it? - What is the desired result? If the issue is more specific, consider answering the following questions if you think they may be relevant: - What database engine are you using? Are you using a specific cloud provider? Which one? - Do you think this bug pertains to a specific database driver? Which one? - Are you working in a monorepo? - If this is a bug related to types: What Typescript version are you using? What's the content of your tsconfig.json file? - If you're using a runtime that isn't Node.js: Which one? What version? Have you verified that this isn't an issue with the runtime itself? validations: required: true ================================================ FILE: .github/ISSUE_TEMPLATE/config.yml ================================================ blank_issues_enabled: true contact_links: - name: Ask a question url: https://discord.gg/JGrkEU4Scj about: Ask questions and discuss with other community members in Discord ================================================ FILE: .github/ISSUE_TEMPLATE/docs-template.yaml ================================================ name: "Documentation Enhancement" description: Suggest documentation improvements title: "[DOCS]:" labels: ["docs"] body: - type: checkboxes id: verified attributes: label: Enhancement hasn't been filed before. options: - label: I have verified this enhancement I'm about to request hasn't been suggested before. required: true - type: textarea attributes: label: Describe the enhancement you want to request description: What do you want to change or add to the documentation? validations: required: true ================================================ FILE: .github/ISSUE_TEMPLATE/feature-template.yaml ================================================ name: "Feature Request" description: Suggest new feature title: "[FEATURE]:" labels: ["enhancement"] body: - type: checkboxes id: verified attributes: label: Feature hasn't been suggested before. options: - label: I have verified this feature I'm about to request hasn't been suggested before. required: true - type: textarea attributes: label: Describe the enhancement you want to request description: What do you want to change or add? What are the benefits of implementing this? validations: required: true ================================================ FILE: .github/workflows/codeql.yml ================================================ name: "CodeQL" on: push: branches: [ 'main', 'beta' ] pull_request: # The branches below must be a subset of the branches above branches: [ 'main' ] schedule: - cron: '44 16 * * 0' jobs: analyze: name: Analyze runs-on: ubuntu-22.04 permissions: actions: read contents: read security-events: write strategy: fail-fast: false matrix: language: [ 'javascript' ] # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ] # Use only 'java' to analyze code written in Java, Kotlin or both # Use only 'javascript' to analyze code written in JavaScript, TypeScript or both # Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support steps: - name: Checkout repository uses: actions/checkout@v4 # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL uses: github/codeql-action/init@v3 with: languages: ${{ matrix.language }} # If you wish to specify custom queries, you can do so here or in a config file. # By default, queries listed here will override any specified in a config file. # Prefix the list here with "+" to use these queries and those in the config file. # For more details on CodeQL's query packs, refer to: https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs # queries: security-extended,security-and-quality # Autobuild attempts to build any compiled languages (C/C++, C#, Go, Java, or Swift). # If this step fails, then you should remove it and run the build manually (see below) - name: Autobuild uses: github/codeql-action/autobuild@v3 # ℹ️ Command-line programs to run using the OS shell. # 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun # If the Autobuild fails above, remove it and uncomment the following three lines. # modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance. # - run: | # echo "Run, Build Application using script" # ./location_of_script_within_repo/buildscript.sh - name: Perform CodeQL Analysis uses: github/codeql-action/analyze@v3 with: category: "/language:${{matrix.language}}" ================================================ FILE: .github/workflows/release-feature-branch.yaml ================================================ name: Release (feature branch) on: workflow_call: secrets: PLANETSCALE_CONNECTION_STRING: required: true NEON_CONNECTION_STRING: required: true # NEON_HTTP_CONNECTION_STRING: # required: true TIDB_CONNECTION_STRING: required: true XATA_API_KEY: required: true XATA_BRANCH: required: true LIBSQL_REMOTE_URL: required: true LIBSQL_REMOTE_TOKEN: required: true jobs: test: # only run on all pushes or pull requests from forks if: github.event_name == 'push' || github.event.pull_request.head.repo.full_name != github.repository strategy: matrix: shard: - gel - planetscale - singlestore-core - singlestore-proxy - singlestore-prefixed - singlestore-custom - neon-http - neon-serverless - drizzle-orm - drizzle-kit - drizzle-zod - drizzle-seed - drizzle-typebox - drizzle-valibot - drizzle-arktype - other runs-on: ubuntu-22.04 services: postgres-postgis: image: postgis/postgis:16-3.4 env: POSTGRES_USER: postgres POSTGRES_PASSWORD: postgres POSTGRES_DB: drizzle options: >- --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5 ports: - 54322:5432 postgres-vector: image: pgvector/pgvector:pg16 env: POSTGRES_USER: postgres POSTGRES_PASSWORD: postgres POSTGRES_DB: drizzle options: >- --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5 ports: - 54321:5432 postgres: image: postgres:14 env: POSTGRES_USER: postgres POSTGRES_PASSWORD: postgres POSTGRES_DB: drizzle options: >- --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5 ports: - 55433:5432 mysql: image: mysql:8 env: MYSQL_ROOT_PASSWORD: root MYSQL_DATABASE: drizzle options: >- --health-cmd "mysqladmin ping" --health-interval 10s --health-timeout 5s --health-retries 5 ports: - 33306:3306 singlestore: image: ghcr.io/singlestore-labs/singlestoredb-dev:latest env: ROOT_PASSWORD: singlestore ports: - 33307:3306 steps: - uses: actions/checkout@v4 - uses: actions/setup-node@v4 with: node-version: '20.19' registry-url: 'https://registry.npmjs.org' - uses: pnpm/action-setup@v3 name: Install pnpm id: pnpm-install with: version: latest run_install: false - name: Get pnpm store directory id: pnpm-cache shell: bash run: | echo "STORE_PATH=$(pnpm store path --silent)" >> $GITHUB_OUTPUT - uses: actions/cache@v4 name: Setup pnpm cache with: path: ${{ steps.pnpm-cache.outputs.STORE_PATH }} key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }} restore-keys: | ${{ runner.os }}-pnpm-store- - name: Install dependencies run: pnpm install - name: Build Prisma client working-directory: drizzle-orm run: pnpm prisma generate --schema src/prisma/schema.prisma - name: Build run: pnpm build - name: Run tests env: PG_CONNECTION_STRING: postgres://postgres:postgres@localhost:55433/drizzle PG_VECTOR_CONNECTION_STRING: postgres://postgres:postgres@localhost:54321/drizzle PG_POSTGIS_CONNECTION_STRING: postgres://postgres:postgres@localhost:54322/drizzle MYSQL_CONNECTION_STRING: mysql://root:root@localhost:33306/drizzle PLANETSCALE_CONNECTION_STRING: ${{ secrets.PLANETSCALE_CONNECTION_STRING }} NEON_CONNECTION_STRING: ${{ secrets.NEON_CONNECTION_STRING }} # NEON_HTTP_CONNECTION_STRING: postgres://postgres:postgres@db.localtest.me:5432/postgres NEON_HTTP_CONNECTION_STRING: ${{ secrets.NEON_CONNECTION_STRING }} NEON_SERVERLESS_CONNECTION_STRING: postgres://postgres:postgres@localhost:5445/postgres TIDB_CONNECTION_STRING: ${{ secrets.TIDB_CONNECTION_STRING }} XATA_API_KEY: ${{ secrets.XATA_API_KEY }} XATA_BRANCH: ${{ secrets.XATA_BRANCH }} LIBSQL_URL: file:local.db LIBSQL_REMOTE_URL: ${{ secrets.LIBSQL_REMOTE_URL }} LIBSQL_REMOTE_TOKEN: ${{ secrets.LIBSQL_REMOTE_TOKEN }} SINGLESTORE_CONNECTION_STRING: singlestore://root:singlestore@localhost:33307/ working-directory: integration-tests run: | if [[ ${{ github.event_name }} != "push" && "${{ github.event.pull_request.head.repo.full_name }}" != "${{ github.repository }}" ]]; then export SKIP_EXTERNAL_DB_TESTS=1 fi case ${{ matrix.shard }} in gel) if [[ -z "$SKIP_EXTERNAL_DB_TESTS" ]]; then pnpm vitest run tests/gel fi ;; planetscale) if [[ -z "$SKIP_EXTERNAL_DB_TESTS" ]]; then pnpm vitest run \ tests/mysql/mysql-planetscale.test.ts \ tests/relational/mysql.planetscale-v1.test.ts \ tests/relational/mysql.planetscale.test.ts fi ;; singlestore-core) pnpm vitest run tests/singlestore/singlestore.test.ts ;; singlestore-proxy) pnpm vitest run tests/singlestore/singlestore-proxy.test.ts ;; singlestore-prefixed) pnpm vitest run tests/singlestore/singlestore-prefixed.test.ts ;; singlestore-custom) pnpm vitest run tests/singlestore/singlestore-custom.test.ts ;; neon-http) if [[ -z "$SKIP_EXTERNAL_DB_TESTS" ]]; then pnpm vitest run tests/pg/neon-http.test.ts tests/pg/neon-http-batch.test.ts fi ;; neon-serverless) docker compose -f docker-neon.yml up -d pnpm vitest run --config=./vitest-ci.config.ts tests/pg/neon-serverless.test.ts docker compose -f docker-neon.yml down ;; drizzle-orm|drizzle-kit|drizzle-zod|drizzle-seed|drizzle-typebox|drizzle-valibot|drizzle-arktype) (cd .. && pnpm test --filter ${{ matrix.shard }}) ;; other) pnpm vitest run \ --exclude tests/gel \ --exclude tests/mysql/mysql-planetscale.test.ts \ --exclude tests/relational/mysql.planetscale-v1.test.ts \ --exclude tests/relational/mysql.planetscale.test.ts \ --exclude tests/singlestore/singlestore.test.ts \ --exclude tests/singlestore/singlestore-proxy.test.ts \ --exclude tests/singlestore/singlestore-prefixed.test.ts \ --exclude tests/singlestore/singlestore-custom.test.ts \ --exclude tests/pg/neon-http.test.ts \ --exclude tests/pg/neon-http-batch.test.ts \ --exclude tests/pg/neon-serverless.test.ts ;; esac attw: # only run on all pushes or pull requests from forks if: github.event_name == 'push' || github.event.pull_request.head.repo.full_name != github.repository strategy: matrix: package: - drizzle-orm - drizzle-kit - drizzle-zod - drizzle-seed - drizzle-typebox - drizzle-valibot - drizzle-arktype - eslint-plugin-drizzle runs-on: ubuntu-22.04 steps: - uses: actions/checkout@v4 - uses: actions/setup-node@v4 with: node-version: '22' registry-url: 'https://registry.npmjs.org' - uses: pnpm/action-setup@v3 name: Install pnpm id: pnpm-install with: version: latest run_install: false - name: Get pnpm store directory id: pnpm-cache shell: bash run: | echo "STORE_PATH=$(pnpm store path --silent)" >> $GITHUB_OUTPUT - uses: actions/cache@v4 name: Setup pnpm cache with: path: ${{ steps.pnpm-cache.outputs.STORE_PATH }} key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }} restore-keys: | ${{ runner.os }}-pnpm-store- - name: Install dependencies run: pnpm install - name: Install Bun uses: oven-sh/setup-bun@v2 - name: Check preconditions id: checks shell: bash working-directory: ${{ matrix.package }} run: | old_version="$(jq -r .version package.json)" version="$old_version-$(git rev-parse --short HEAD)" npm version $version tag="${{ github.ref_name }}" is_version_published="$(npm view ${{ matrix.package }} versions --json | jq -r '.[] | select(. == "'$version'") | . == "'$version'"')" if [[ "$is_version_published" == "true" ]]; then echo "\`${{ matrix.package }}$version\` already published, adding tag \`$tag\`" >> $GITHUB_STEP_SUMMARY npm dist-tag add ${{ matrix.package }}@$version $tag else { echo "version=$version" echo "tag=$tag" echo "has_new_release=true" } >> $GITHUB_OUTPUT fi - name: Build Prisma client if: steps.checks.outputs.has_new_release == 'true' working-directory: drizzle-orm run: pnpm prisma generate --schema src/prisma/schema.prisma - name: Build if: steps.checks.outputs.has_new_release == 'true' run: pnpm build - name: Pack if: steps.checks.outputs.has_new_release == 'true' working-directory: ${{ matrix.package }} run: npm run pack - name: Run @arethetypeswrong/cli if: steps.checks.outputs.has_new_release == 'true' working-directory: ${{ matrix.package }} run: bunx attw package.tgz release: # only run on all pushes or pull requests from forks if: github.event_name == 'push' || github.event.pull_request.head.repo.full_name != github.repository needs: - test - attw strategy: matrix: package: - drizzle-orm - drizzle-kit - drizzle-zod - drizzle-seed - drizzle-typebox - drizzle-valibot - drizzle-arktype - eslint-plugin-drizzle runs-on: ubuntu-22.04 permissions: contents: read id-token: write # for OIDC # force empty so npm can use OIDC env: NODE_AUTH_TOKEN: "" NPM_TOKEN: "" steps: - uses: actions/checkout@v5 - uses: pnpm/action-setup@v4 with: { run_install: false } - uses: actions/setup-node@v6 with: { node-version: '24', cache: 'pnpm', cache-dependency-path: pnpm-lock.yaml } - run: pnpm install --frozen-lockfile --prefer-offline # >= 11.5.1 for trusted publishing - name: Update NPM run: npm install -g npm@latest # nuke, so npm can use OIDC - name: Remove temp npmrc run: rm -f "$NPM_CONFIG_USERCONFIG" - name: Check preconditions id: checks shell: bash working-directory: ${{ matrix.package }} run: | old_version="$(jq -r .version package.json)" version="$old_version-$(git rev-parse --short HEAD)" npm version $version tag="${{ github.ref_name }}" is_version_published="$(npm view ${{ matrix.package }} versions --json | jq -r '.[] | select(. == "'$version'") | . == "'$version'"')" if [[ "$is_version_published" == "true" ]]; then echo "\`${{ matrix.package }}$version\` already published, adding tag \`$tag\`" >> $GITHUB_STEP_SUMMARY else { echo "version=$version" echo "tag=$tag" echo "has_new_release=true" } >> $GITHUB_OUTPUT fi - name: Build Prisma client working-directory: drizzle-orm run: pnpm prisma generate --schema src/prisma/schema.prisma - name: Build if: steps.checks.outputs.has_new_release == 'true' run: pnpm build - name: Pack if: steps.checks.outputs.has_new_release == 'true' working-directory: ${{ matrix.package }} shell: bash run: npm run pack - name: Publish if: github.event_name == 'push' && steps.checks.outputs.has_new_release == 'true' working-directory: ${{ matrix.package }} shell: bash run: | tag="${{ steps.checks.outputs.tag }}" version="${{ steps.checks.outputs.version }}" echo "Publishing ${{ matrix.package }}@$tag using version $version" npm run publish -- --tag $tag echo "npm: \`${{ matrix.package }}@$tag | ${{ matrix.package }}@$version\`" >> $GITHUB_STEP_SUMMARY # Post release message to Discord # curl -X POST -H "Content-Type: application/json" -d "{\"embeds\": [{\"title\": \"New \`${{ matrix.package }}\` release! 🎉\", \"url\": \"https://www.npmjs.com/package/${{ matrix.package }}/v/$version\", \"color\": \"12907856\", \"fields\": [{\"name\": \"Version\", \"value\": \"\`$version\`\"}, {\"name\": \"Tag\", \"value\": \"\`$tag\`\"}]}]}" ${{ secrets.DISCORD_DEV_RELEASE_WEBHOOK_URL }} ================================================ FILE: .github/workflows/release-latest.yaml ================================================ name: Release (latest) on: workflow_call: secrets: PLANETSCALE_CONNECTION_STRING: required: true NEON_CONNECTION_STRING: required: true # NEON_HTTP_CONNECTION_STRING: # required: true TIDB_CONNECTION_STRING: required: true XATA_API_KEY: required: true XATA_BRANCH: required: true LIBSQL_REMOTE_URL: required: true LIBSQL_REMOTE_TOKEN: required: true jobs: test: strategy: matrix: shard: - gel - planetscale - singlestore-core - singlestore-proxy - singlestore-prefixed - singlestore-custom - neon-http - neon-serverless - drizzle-orm - drizzle-kit - drizzle-zod - drizzle-seed - drizzle-typebox - drizzle-valibot - drizzle-arktype - other runs-on: ubuntu-22.04 services: postgres-postgis: image: postgis/postgis:16-3.4 env: POSTGRES_USER: postgres POSTGRES_PASSWORD: postgres POSTGRES_DB: drizzle options: >- --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5 ports: - 54322:5432 postgres-vector: image: pgvector/pgvector:pg16 env: POSTGRES_USER: postgres POSTGRES_PASSWORD: postgres POSTGRES_DB: drizzle options: >- --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5 ports: - 54321:5432 postgres: image: postgres:14 env: POSTGRES_USER: postgres POSTGRES_PASSWORD: postgres POSTGRES_DB: drizzle options: >- --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5 ports: - 55433:5432 mysql: image: mysql:8 env: MYSQL_ROOT_PASSWORD: root MYSQL_DATABASE: drizzle options: >- --health-cmd "mysqladmin ping" --health-interval 10s --health-timeout 5s --health-retries 5 ports: - 33306:3306 singlestore: image: ghcr.io/singlestore-labs/singlestoredb-dev:latest env: ROOT_PASSWORD: singlestore ports: - 33307:3306 steps: - uses: actions/checkout@v4 - uses: actions/setup-node@v4 with: node-version: '20.19' registry-url: 'https://registry.npmjs.org' - uses: pnpm/action-setup@v3 name: Install pnpm id: pnpm-install with: version: latest run_install: false - name: Get pnpm store directory id: pnpm-cache shell: bash run: | echo "STORE_PATH=$(pnpm store path --silent)" >> $GITHUB_OUTPUT - uses: actions/cache@v4 name: Setup pnpm cache with: path: ${{ steps.pnpm-cache.outputs.STORE_PATH }} key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }} restore-keys: | ${{ runner.os }}-pnpm-store- - name: Install dependencies run: pnpm install - name: Build Prisma client working-directory: drizzle-orm run: pnpm prisma generate --schema src/prisma/schema.prisma - name: Build run: pnpm build - name: Run tests env: PG_CONNECTION_STRING: postgres://postgres:postgres@localhost:55433/drizzle PG_VECTOR_CONNECTION_STRING: postgres://postgres:postgres@localhost:54321/drizzle PG_POSTGIS_CONNECTION_STRING: postgres://postgres:postgres@localhost:54322/drizzle MYSQL_CONNECTION_STRING: mysql://root:root@localhost:33306/drizzle PLANETSCALE_CONNECTION_STRING: ${{ secrets.PLANETSCALE_CONNECTION_STRING }} NEON_CONNECTION_STRING: ${{ secrets.NEON_CONNECTION_STRING }} # NEON_HTTP_CONNECTION_STRING: postgres://postgres:postgres@db.localtest.me:5432/postgres NEON_HTTP_CONNECTION_STRING: ${{ secrets.NEON_CONNECTION_STRING }} NEON_SERVERLESS_CONNECTION_STRING: postgres://postgres:postgres@localhost:5445/postgres TIDB_CONNECTION_STRING: ${{ secrets.TIDB_CONNECTION_STRING }} XATA_API_KEY: ${{ secrets.XATA_API_KEY }} XATA_BRANCH: ${{ secrets.XATA_BRANCH }} LIBSQL_URL: file:local.db LIBSQL_REMOTE_URL: ${{ secrets.LIBSQL_REMOTE_URL }} LIBSQL_REMOTE_TOKEN: ${{ secrets.LIBSQL_REMOTE_TOKEN }} SINGLESTORE_CONNECTION_STRING: singlestore://root:singlestore@localhost:33307/ working-directory: integration-tests run: | case ${{ matrix.shard }} in gel) pnpm vitest run tests/gel ;; planetscale) pnpm vitest run \ tests/mysql/mysql-planetscale.test.ts \ tests/relational/mysql.planetscale-v1.test.ts \ tests/relational/mysql.planetscale.test.ts ;; singlestore-core) pnpm vitest run tests/singlestore/singlestore.test.ts ;; singlestore-proxy) pnpm vitest run tests/singlestore/singlestore-proxy.test.ts ;; singlestore-prefixed) pnpm vitest run tests/singlestore/singlestore-prefixed.test.ts ;; singlestore-custom) pnpm vitest run tests/singlestore/singlestore-custom.test.ts ;; neon-http) pnpm vitest run tests/pg/neon-http.test.ts tests/pg/neon-http-batch.test.ts ;; neon-serverless) docker compose -f docker-neon.yml up -d pnpm vitest run tests/pg/neon-serverless.test.ts docker compose -f docker-neon.yml down ;; drizzle-orm|drizzle-kit|drizzle-zod|drizzle-seed|drizzle-typebox|drizzle-valibot|drizzle-arktype) (cd .. && pnpm test --filter ${{ matrix.shard }}) ;; other) pnpm vitest run \ --exclude tests/gel \ --exclude tests/mysql/mysql-planetscale.test.ts \ --exclude tests/relational/mysql.planetscale-v1.test.ts \ --exclude tests/relational/mysql.planetscale.test.ts \ --exclude tests/singlestore/singlestore.test.ts \ --exclude tests/singlestore/singlestore-proxy.test.ts \ --exclude tests/singlestore/singlestore-prefixed.test.ts \ --exclude tests/singlestore/singlestore-custom.test.ts \ --exclude tests/pg/neon-http.test.ts \ --exclude tests/pg/neon-http-batch.test.ts \ --exclude tests/pg/neon-serverless.test.ts ;; esac attw: strategy: matrix: package: - drizzle-orm - drizzle-kit - drizzle-zod - drizzle-seed - drizzle-typebox - drizzle-valibot - drizzle-arktype - eslint-plugin-drizzle runs-on: ubuntu-22.04 steps: - uses: actions/checkout@v4 - uses: actions/setup-node@v4 with: node-version: '18.18' registry-url: 'https://registry.npmjs.org' - uses: pnpm/action-setup@v3 name: Install pnpm id: pnpm-install with: version: latest run_install: false - name: Get pnpm store directory id: pnpm-cache shell: bash run: | echo "STORE_PATH=$(pnpm store path --silent)" >> $GITHUB_OUTPUT - uses: actions/cache@v4 name: Setup pnpm cache with: path: ${{ steps.pnpm-cache.outputs.STORE_PATH }} key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }} restore-keys: | ${{ runner.os }}-pnpm-store- - name: Install dependencies run: pnpm install - name: Install Bun uses: oven-sh/setup-bun@v2 - name: Check preconditions id: checks shell: bash working-directory: ${{ matrix.package }} run: | latest="$(npm view --json ${{ matrix.package }} dist-tags.latest | jq -r)" version="$(jq -r .version package.json)" is_version_published="$(npm view ${{ matrix.package }} versions --json | jq -r '.[] | select(. == "'$version'") | . == "'$version'"')" if [[ "$is_version_published" == "false" && "$latest" != "$version" ]]; then { echo "version=$version" echo "has_new_release=true" echo "changelog_path=$changelogPath" } >> $GITHUB_OUTPUT fi - name: Build Prisma client if: steps.checks.outputs.has_new_release == 'true' working-directory: drizzle-orm run: pnpm prisma generate --schema src/prisma/schema.prisma - name: Build if: steps.checks.outputs.has_new_release == 'true' run: pnpm build - name: Pack if: steps.checks.outputs.has_new_release == 'true' working-directory: ${{ matrix.package }} run: npm run pack - name: Run @arethetypeswrong/cli if: steps.checks.outputs.has_new_release == 'true' working-directory: ${{ matrix.package }} run: bunx attw package.tgz release: permissions: contents: write # for creating GitHub releases id-token: write # for OIDC needs: - test - attw strategy: fail-fast: false matrix: package: - drizzle-orm - drizzle-kit - drizzle-zod - drizzle-seed - drizzle-typebox - drizzle-valibot - drizzle-arktype - eslint-plugin-drizzle runs-on: ubuntu-22.04 # force empty so npm can use OIDC env: NODE_AUTH_TOKEN: "" NPM_TOKEN: "" services: postgres-postgis: image: postgis/postgis:16-3.4 env: POSTGRES_USER: postgres POSTGRES_PASSWORD: postgres POSTGRES_DB: drizzle options: >- --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5 ports: - 54322:5432 postgres-vector: image: pgvector/pgvector:pg16 env: POSTGRES_USER: postgres POSTGRES_PASSWORD: postgres POSTGRES_DB: drizzle options: >- --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5 ports: - 54321:5432 postgres: image: postgres:14 env: POSTGRES_USER: postgres POSTGRES_PASSWORD: postgres POSTGRES_DB: drizzle options: >- --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5 ports: - 55433:5432 mysql: image: mysql:8 env: MYSQL_ROOT_PASSWORD: root MYSQL_DATABASE: drizzle options: >- --health-cmd "mysqladmin ping" --health-interval 10s --health-timeout 5s --health-retries 5 ports: - 33306:3306 steps: - uses: actions/checkout@v5 - uses: pnpm/action-setup@v4 with: { run_install: false } - uses: actions/setup-node@v6 with: { node-version: '24', cache: 'pnpm', cache-dependency-path: pnpm-lock.yaml } - run: pnpm install --frozen-lockfile --prefer-offline # >= 11.5.1 for trusted publishing - name: Update NPM run: npm install -g npm@latest # nuke, so npm can use OIDC - name: Remove temp npmrc run: rm -f "$NPM_CONFIG_USERCONFIG" - name: Check preconditions id: checks shell: bash working-directory: ${{ matrix.package }} run: | latest="$(npm view --json ${{ matrix.package }} dist-tags.latest | jq -r)" version="$(jq -r .version package.json)" is_version_published="$(npm view ${{ matrix.package }} versions --json | jq -r '.[] | select(. == "'$version'") | . == "'$version'"')" if [[ "$is_version_published" == "true" ]]; then echo "\`${{ matrix.package }}@$version\` already published, adding tag \`latest\`" >> $GITHUB_STEP_SUMMARY elif [[ "$latest" != "$version" ]]; then echo "Latest: $latest" echo "Current: $version" changelogPath=$(node -e "console.log(require('path').resolve('..', 'changelogs', '${{ matrix.package }}', '$version.md'))") if [[ ! -f "$changelogPath" ]]; then echo "::error::Changelog for version $version not found: $changelogPath" exit 1 fi { echo "version=$version" echo "has_new_release=true" echo "changelog_path=$changelogPath" } >> $GITHUB_OUTPUT else echo "Already up to date: $version" echo "\`$version\` is already latest on NPM" >> $GITHUB_STEP_SUMMARY fi - name: Build Prisma client if: steps.checks.outputs.has_new_release == 'true' working-directory: drizzle-orm run: pnpm prisma generate --schema src/prisma/schema.prisma - name: Build if: steps.checks.outputs.has_new_release == 'true' run: pnpm build - name: Pack if: steps.checks.outputs.has_new_release == 'true' working-directory: ${{ matrix.package }} shell: bash run: npm run pack - name: Publish if: steps.checks.outputs.has_new_release == 'true' working-directory: ${{ matrix.package }} shell: bash run: | version="${{ steps.checks.outputs.version }}" echo "Publishing ${{ matrix.package }}@$version" npm run publish echo "npm: \`+ ${{ matrix.package }}@$version\`" >> $GITHUB_STEP_SUMMARY # Post release message to Discord # curl -X POST -H "Content-Type: application/json" -d "{\"embeds\": [{\"title\": \"New \`${{ matrix.package }}\` release! 🎉\", \"url\": \"https://www.npmjs.com/package/${{ matrix.package }}\", \"color\": \"12907856\", \"fields\": [{\"name\": \"Tag\", \"value\": \"\`$tag\`\"}]}]}" ${{ secrets.DISCORD_RELEASE_WEBHOOK_URL }} - name: Create GitHub release for ORM package uses: actions/github-script@v6 if: matrix.package == 'drizzle-orm' && steps.checks.outputs.has_new_release == 'true' with: github-token: ${{ secrets.GITHUB_TOKEN }} script: | try { const fs = require("fs"); const path = require("path"); const version = "${{ steps.checks.outputs.version }}"; const changelog = fs.readFileSync("${{ steps.checks.outputs.changelog_path }}", "utf8"); const release = await github.rest.repos.createRelease({ owner: context.repo.owner, repo: context.repo.repo, tag_name: `${version}`, name: `${version}`, body: changelog, }); await github.rest.repos.uploadReleaseAsset({ owner: context.repo.owner, repo: context.repo.repo, release_id: release.data.id, name: `${{ matrix.package }}-${version}-dist.tgz`, data: fs.readFileSync(path.resolve("${{ matrix.package }}", "package.tgz")), }); } catch (e) { core.setFailed(e.message); } - name: Create GitHub release for KIT package uses: actions/github-script@v6 if: matrix.package == 'drizzle-kit' && steps.checks.outputs.has_new_release == 'true' with: github-token: ${{ secrets.GITHUB_TOKEN }} script: | try { const fs = require("fs"); const path = require("path"); const version = "${{ steps.checks.outputs.version }}"; const changelog = fs.readFileSync("${{ steps.checks.outputs.changelog_path }}", "utf8"); const release = await github.rest.repos.createRelease({ owner: context.repo.owner, repo: context.repo.repo, tag_name: `drizzle-kit@${version}`, name: `drizzle-kit@${version}`, body: changelog, }); await github.rest.repos.uploadReleaseAsset({ owner: context.repo.owner, repo: context.repo.repo, release_id: release.data.id, name: `${{ matrix.package }}-${version}-dist.tgz`, data: fs.readFileSync(path.resolve("${{ matrix.package }}", "package.tgz")), }); } catch (e) { core.setFailed(e.message); } ================================================ FILE: .github/workflows/router.yaml ================================================ name: Release Router on: push: branches-ignore: - main pull_request: workflow_dispatch: jobs: switch: runs-on: ubuntu-24.04 outputs: target: ${{ steps.route.outputs.target }} steps: - name: Route release id: route shell: bash run: | HEAD_REPO="${{ github.event.pull_request.head.repo.full_name }}" if [[ "$GITHUB_EVENT_NAME" == "workflow_dispatch" && "${GITHUB_REF##*/}" == "main" ]]; then echo "target=latest" >> $GITHUB_OUTPUT # only run on all pushes or pull requests from forks elif [[ "$GITHUB_EVENT_NAME" == "push" ]] || [[ "$HEAD_REPO" != "$GITHUB_REPOSITORY" ]]; then echo "target=feature" >> $GITHUB_OUTPUT else echo "target=skip" >> $GITHUB_OUTPUT fi run-feature: needs: switch if: needs.switch.outputs.target == 'feature' uses: ./.github/workflows/release-feature-branch.yaml secrets: PLANETSCALE_CONNECTION_STRING: ${{ secrets.PLANETSCALE_CONNECTION_STRING }} NEON_CONNECTION_STRING: ${{ secrets.NEON_CONNECTION_STRING }} # NEON_HTTP_CONNECTION_STRING: ${{ secrets.NEON_CONNECTION_STRING }} TIDB_CONNECTION_STRING: ${{ secrets.TIDB_CONNECTION_STRING }} XATA_API_KEY: ${{ secrets.XATA_API_KEY }} XATA_BRANCH: ${{ secrets.XATA_BRANCH }} LIBSQL_REMOTE_URL: ${{ secrets.LIBSQL_REMOTE_URL }} LIBSQL_REMOTE_TOKEN: ${{ secrets.LIBSQL_REMOTE_TOKEN }} run-latest: needs: switch if: needs.switch.outputs.target == 'latest' uses: ./.github/workflows/release-latest.yaml secrets: PLANETSCALE_CONNECTION_STRING: ${{ secrets.PLANETSCALE_CONNECTION_STRING }} NEON_CONNECTION_STRING: ${{ secrets.NEON_CONNECTION_STRING }} # NEON_HTTP_CONNECTION_STRING: ${{ secrets.NEON_CONNECTION_STRING }} TIDB_CONNECTION_STRING: ${{ secrets.TIDB_CONNECTION_STRING }} XATA_API_KEY: ${{ secrets.XATA_API_KEY }} XATA_BRANCH: ${{ secrets.XATA_BRANCH }} LIBSQL_REMOTE_URL: ${{ secrets.LIBSQL_REMOTE_URL }} LIBSQL_REMOTE_TOKEN: ${{ secrets.LIBSQL_REMOTE_TOKEN }} ================================================ FILE: .github/workflows/unpublish-release-feature-branch.yaml ================================================ name: Unpublish release (feature branch) on: delete jobs: unpublish-release: if: github.event.ref_type == 'branch' && github.event.ref != 'refs/heads/main' && github.event.ref != 'refs/heads/beta' strategy: matrix: package: - drizzle-orm - drizzle-kit - drizzle-zod - drizzle-typebox - drizzle-valibot - drizzle-arktype - eslint-plugin-drizzle runs-on: ubuntu-22.04 steps: - uses: actions/checkout@v4 - uses: actions/setup-node@v4 with: node-version: '22' registry-url: 'https://registry.npmjs.org' - name: Unpublish run: | tag="${{ github.event.ref }}" tag="${tag#refs/heads/}" echo "Unpublishing ${{ matrix.package }}@$tag" npm dist-tag rm ${{ matrix.package }} $tag echo "npm: \`- ${{ matrix.package }}@$tag\`" >> $GITHUB_STEP_SUMMARY working-directory: ${{ matrix.package }} env: NODE_AUTH_TOKEN: ${{ secrets.NPM_ACCESS_TOKEN }} ================================================ FILE: .gitignore ================================================ node_modules .vscode dist dist.new *.tsbuildinfo *.tgz /*.sql .cache .turbo .rollup.cache dist-dts rollup.config-*.mjs *.log .DS_Store drizzle-seed/src/dev ================================================ FILE: .markdownlint.yaml ================================================ no-inline-html: false first-line-h1: false line-length: false MD010: spaces_per_tab: 2 ================================================ FILE: .npmrc ================================================ # prefer-workspace-packages = true ================================================ FILE: .nvmrc ================================================ 22 ================================================ FILE: CODE_OF_CONDUCT.md ================================================ # Contributor Covenant Code of Conduct ## Our Pledge We as members, contributors, and leaders pledge to make participation in our community a harassment-free experience for everyone, regardless of age, body size, visible or invisible disability, ethnicity, sex characteristics, gender identity and expression, level of experience, education, socio-economic status, nationality, personal appearance, race, religion, or sexual identity and orientation. We pledge to act and interact in ways that contribute to an open, welcoming, diverse, inclusive, and healthy community. ## Our Standards Examples of behavior that contributes to a positive environment for our community include: * Demonstrating empathy and kindness toward other people * Being respectful of differing opinions, viewpoints, and experiences * Giving and gracefully accepting constructive feedback * Accepting responsibility and apologizing to those affected by our mistakes, and learning from the experience * Focusing on what is best not just for us as individuals, but for the overall community Examples of unacceptable behavior include: * The use of sexualized language or imagery, and sexual attention or advances of any kind * Trolling, insulting or derogatory comments, and personal or political attacks * Public or private harassment * Publishing others' private information, such as a physical or email address, without their explicit permission * Other conduct which could reasonably be considered inappropriate in a professional setting ## Enforcement Responsibilities Community leaders are responsible for clarifying and enforcing our standards of acceptable behavior and will take appropriate and fair corrective action in response to any behavior that they deem inappropriate, threatening, offensive, or harmful. Community leaders have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, and will communicate reasons for moderation decisions when appropriate. ## Scope This Code of Conduct applies within all community spaces, and also applies when an individual is officially representing the community in public spaces. Examples of representing our community include using an official e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. ## Enforcement Instances of abusive, harassing, or otherwise unacceptable behavior may be reported to the community leaders responsible for enforcement at help@drizzle.team. All complaints will be reviewed and investigated promptly and fairly. All community leaders are obligated to respect the privacy and security of the reporter of any incident. ## Enforcement Guidelines Community leaders will follow these Community Impact Guidelines in determining the consequences for any action they deem in violation of this Code of Conduct: ### 1. Correction **Community Impact**: Use of inappropriate language or other behavior deemed unprofessional or unwelcome in the community. **Consequence**: A private, written warning from community leaders, providing clarity around the nature of the violation and an explanation of why the behavior was inappropriate. A public apology may be requested. ### 2. Warning **Community Impact**: A violation through a single incident or series of actions. **Consequence**: A warning with consequences for continued behavior. No interaction with the people involved, including unsolicited interaction with those enforcing the Code of Conduct, for a specified period of time. This includes avoiding interactions in community spaces as well as external channels like social media. Violating these terms may lead to a temporary or permanent ban. ### 3. Temporary Ban **Community Impact**: A serious violation of community standards, including sustained inappropriate behavior. **Consequence**: A temporary ban from any sort of interaction or public communication with the community for a specified period of time. No public or private interaction with the people involved, including unsolicited interaction with those enforcing the Code of Conduct, is allowed during this period. Violating these terms may lead to a permanent ban. ### 4. Permanent Ban **Community Impact**: Demonstrating a pattern of violation of community standards, including sustained inappropriate behavior, harassment of an individual, or aggression toward or disparagement of classes of individuals. **Consequence**: A permanent ban from any sort of public interaction within the community. ## Attribution This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 2.0, available at https://www.contributor-covenant.org/version/2/0/code_of_conduct.html. Community Impact Guidelines were inspired by [Mozilla's code of conduct enforcement ladder](https://github.com/mozilla/diversity). [homepage]: https://www.contributor-covenant.org For answers to common questions about this code of conduct, see the FAQ at https://www.contributor-covenant.org/faq. Translations are available at https://www.contributor-covenant.org/translations. ================================================ FILE: CONTRIBUTING.md ================================================ # Contributing Welcome! We're glad you're interested in Drizzle ORM and want to help us make it better. Drizzle ORM is owned by [Drizzle Team](https://drizzle.team) and maintained by community members, mainly by our core contributors ([@AndriiSherman](https://github.com/AndriiSherman), [@AlexBlokh](https://github.com/AlexBlokh), [@dankochetov](https://github.com/dankochetov)). Everything that is going to be merged should be approved by all core contributors members. --- There are many ways you can contribute to the Drizzle ORM project: - [Submitting bug reports](#bug-report) - [Submitting feature request](#feature-request) - [Providing feedback](#feedback) - [Contribution guidelines](#contribution-guidelines) ## Submitting bug report To report a bug or issue, please use our [issue form](https://github.com/drizzle-team/drizzle-orm/issues/new/choose) and choose Bug Report. ## Submitting feature request To request a feature, please use our [issue form](https://github.com/drizzle-team/drizzle-orm/issues/new/choose) and choose Feature Request. ## Providing feedback There are several ways you can provide feedback: - You can join our [Discord server](https://discord.gg/yfjTbVXMW4) and provide feedback there. - You can add new ticket in [Discussions](https://github.com/drizzle-team/drizzle-orm/discussions). - Mention our [Twitter account](https://twitter.com/DrizzleOrm). ## Contribution guidelines - [Pre-contribution setup](#pre-contribution) - [Installing Node](#installing-node) - [Installing pnpm](#installing-pnpm) - [Installing Docker](#installing-docker) - [Cloning the repository](#cloning-the-repository) - [Repository structure](#repository-structure) - [Building the project](#building-the-project) - [Commit message guidelines](#commit-message-guidelines) - [Contributing to `drizzle-orm`](#contributing-orm) - [Project structure](#project-structure-orm) - [Running tests](#running-tests-orm) - [PR guidelines](#pr-guidelines-orm) - [Contributing to `drizzle-kit`](#contributing-kit) - [Project structure](#project-structure-kit) - [Running tests](#running-tests-kit) - [PR guidelines](#pr-guidelines-kit) ## Pre-contribution setup ### Installing Node ```bash # https://github.com/nvm-sh/nvm#install--update-script curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.40.1/install.sh | bash nvm install 18.13.0 nvm use 18.13.0 ``` ### Installing pnpm ```bash # https://pnpm.io/installation npm install -g pnpm ``` ### Installing Docker ```bash # https://docs.docker.com/get-docker/ # Use Docker's guide to install Docker for your OS. ``` ### Cloning the repository ```bash git clone https://github.com/drizzle-team/drizzle-orm.git cd drizzle-orm ``` ### Repository structure - 📂 `drizzle-orm/` orm core package with all main logic for each dialect - 📂 `drizzle-kit/` kit core package with all main logic and tests for each dialect - 📂 `drizzle-typebox/` all the code related to drizzle+typebox extension - 📂 `drizzle-valibot/` all the code related to drizzle+valibot extension - 📂 `drizzle-zod/` all the code related to drizzle+zod extension - 📂 `eslint-plugin-drizzle/` all the code related to drizzle eslint plugin - 📂 `changelogs/` all changelogs for drizzle-orm, drizzle-kit, drizzle-typebox, drizzle-zod, drizzle-valibot modules - 📂 `examples/` package with Drizzle ORM usage examples - 📂 `integration-tests/` package with all type of tests for each supported database ### Building the project Run the following script from the root folder to build the whole monorepo. Running it from a specific package folder will only build that package. ```bash pnpm install && pnpm build ``` ## Commit message guidelines We have specific rules on how commit messages should be structured. It's important to make sure your commit messages are clear, concise, and informative to make it easier for others to understand the changes you are making. All commit messages should follow the pattern below: ``` ``` Example: ``` Add groupBy error message In specific case, groupBy was responding with unreadable error ... ``` > [!WARNING] > All commits should be signed before submitting a PR. Please check the documentation on [how to sign commits](https://docs.github.com/en/authentication/managing-commit-signature-verification/about-commit-signature-verification). ## Contributing to `drizzle-orm` ### Project structure - 📂 `pg-core/`, `mysql-core/`, `sqlite-core/` core packages for each dialect with all the main logic for relation and query builder - 📂 `sql/` package containing all expressions and SQL template implementation - All other folders are for each specific driver that Drizzle ORM supports. ### Running tests All tests for Drizzle ORM are integration tests that simulate real databases with different queries and responses from each database. Each file in `integration-tests` has a list of different scenarios for different dialects and drivers. Each file creates a Docker container with the needed database and runs the test cases there. After every test is run, the Docker container will be deleted. If you have added additional logic to a core package, make sure that all tests completed without any failures. > [!NOTE] > If you have added data types or a feature for query building, you need to create additional test cases using the new API to ensure it works properly. If you are in the root of the repository, run all integration tests with the following script: ```bash cd integration-tests && pnpm test ``` ### PR guidelines 1. PR titles should follow the pattern below: ``` []: ``` Example: ``` [Pg] Add PostGIS extension support ``` 2. PRs should contain a detailed description of everything that was changed. 3. Commit messages should follow the [message style guidelines](#commit-message-guidelines). 4. PRs should implement: - Tests for features that were added. - Tests for bugs that were fixed. > [!NOTE] > To understand how tests should be created and run, please check the [Running tests](#running-tests-orm) section. ## Contributing to `drizzle-kit` ### Project structure - 📂 `cli/` - 📄 `schema.ts` all the commands defined using brocli - 📂 `commands/` all the business logic for drizzle-kit commands - 📂 `extensions/` all the extension helpers for databases - 📂 `serializer/` all the necessary logic to read from the Drizzle ORM schema and convert it to a common JSON format, as well as the logic to introspect all tables, types, and other database elements and convert them to a common JSON format - 📄 `introspect-pg.ts`, `introspect-mysql.ts`, `introspect-sqlite.ts` these files are responsible for mapping JSON snapshots to TypeScript files during introspect commands - 📄 `snapshotsDiffer.ts` this file handles the mapping from JSON snapshot format to JSON statement objects. - 📄 `jsonStatements.ts` this file defines JSON statement types, interfaces, and helper functions. - 📄 `sqlgenerator.ts` this file converts JSON statements to SQL strings. ### Running tests All tests for Drizzle Kit are integration tests that simulate real databases with different queries and responses from each database. Each file in `drizzle-kit/tests` has a list of different scenarios for different commands. Each file creates a Docker container with the needed database and runs the test cases there. After every test is run, the Docker container will be deleted. We test MySQL, PostgreSQL (using PGlite), and SQLite. If you are in the root of the repository, run all Drizzle Kit tests with the following script: ```bash cd drizzle-kit && pnpm test ``` ### PR guidelines 1. PR titles should follow the pattern below: ``` [-kit]: ``` Example: ``` [Pg-kit] Add PostGIS extension support ``` 2. PRs should contain a detailed description of everything that was changed. 3. Commit messages should follow the [message style guidelines](#commit-message-guidelines). 4. PRs should implement: - Tests for features that were added. - Tests for bugs that were fixed. > [!NOTE] > To understand how tests should be created and run, please check the [Running tests](#running-tests-kit) section. ================================================ FILE: LICENSE ================================================ Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS APPENDIX: How to apply the Apache License to your work. To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. Copyright [yyyy] [name of copyright owner] Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ================================================ FILE: README.md ================================================

Headless ORM for NodeJS, TypeScript and JavaScript 🚀

WebsiteDocumentationTwitterDiscord


### What's Drizzle? Drizzle is a modern TypeScript ORM developers [wanna use in their next project](https://stateofdb.com/tools/drizzle). It is [lightweight](https://bundlephobia.com/package/drizzle-orm) at only ~7.4kb minified+gzipped, and it's tree shakeable with exactly 0 dependencies. **Drizzle supports every PostgreSQL, MySQL and SQLite database**, including serverless ones like [Turso](https://orm.drizzle.team/docs/get-started-sqlite#turso), [Neon](https://orm.drizzle.team/docs/get-started-postgresql#neon), [Xata](https://orm.drizzle.team/docs/connect-xata), [PlanetScale](https://orm.drizzle.team/docs/get-started-mysql#planetscale), [Cloudflare D1](https://orm.drizzle.team/docs/get-started-sqlite#cloudflare-d1), [FlyIO LiteFS](https://fly.io/docs/litefs/), [Vercel Postgres](https://orm.drizzle.team/docs/get-started-postgresql#vercel-postgres), [Supabase](https://orm.drizzle.team/docs/get-started-postgresql#supabase) and [AWS Data API](https://orm.drizzle.team/docs/get-started-postgresql#aws-data-api). No bells and whistles, no Rust binaries, no serverless adapters, everything just works out of the box. **Drizzle is serverless-ready by design**. It works in every major JavaScript runtime like NodeJS, Bun, Deno, Cloudflare Workers, Supabase functions, any Edge runtime, and even in browsers. With Drizzle you can be [**fast out of the box**](https://orm.drizzle.team/benchmarks) and save time and costs while never introducing any data proxies into your infrastructure. While you can use Drizzle as a JavaScript library, it shines with TypeScript. It lets you [**declare SQL schemas**](https://orm.drizzle.team/docs/sql-schema-declaration) and build both [**relational**](https://orm.drizzle.team/docs/rqb) and [**SQL-like queries**](https://orm.drizzle.team/docs/select), while keeping the balance between type-safety and extensibility for toolmakers to build on top. ### Ecosystem While Drizzle ORM remains a thin typed layer on top of SQL, we made a set of tools for people to have best possible developer experience. Drizzle comes with a powerful [**Drizzle Kit**](https://orm.drizzle.team/kit-docs/overview) CLI companion for you to have hassle-free migrations. It can generate SQL migration files for you or apply schema changes directly to the database. We also have [**Drizzle Studio**](https://orm.drizzle.team/drizzle-studio/overview) for you to effortlessly browse and manipulate data in your database of choice. ### Documentation Check out the full documentation on [the website](https://orm.drizzle.team/docs/overview). ### Our sponsors ❤️

================================================ FILE: SECURITY.md ================================================ # Security Policy ## Reporting a Vulnerability If you have a security issue to report, please contact us at [security@drizzle.team](mailto:security@drizzle.team). ================================================ FILE: changelogs/README.md ================================================ # Release flow - Push feature branch - GitHub workflow publishes new feature tag to NPM - Bump package versions manually - (Optional) Create and merge PR to beta - (Optional) GitHub workflow publishes new beta version to NPM - Create PR to main - TODO: GitHub workflow checks if changelog is present for every package version - Resolve all conflicts, bump versions if necessary - Merge PR - GitHub workflow publishes new latest version to NPM and removes feature tag from NPM ================================================ FILE: changelogs/drizzle-arktype/0.1.2.md ================================================ `drizzle-arktype` is a plugin for [Drizzle ORM](https://github.com/drizzle-team/drizzle-orm) that allows you to generate [arktype](https://arktype.io/) schemas from Drizzle ORM schemas. **Features** - Create a select schema for tables, views and enums. - Create insert and update schemas for tables. - Supports all dialects: PostgreSQL, MySQL and SQLite. # Usage ```ts import { pgEnum, pgTable, serial, text, timestamp } from 'drizzle-orm/pg-core'; import { createInsertSchema, createSelectSchema } from 'drizzle-arktype'; import { type } from 'arktype'; const users = pgTable('users', { id: serial('id').primaryKey(), name: text('name').notNull(), email: text('email').notNull(), role: text('role', { enum: ['admin', 'user'] }).notNull(), createdAt: timestamp('created_at').notNull().defaultNow(), }); // Schema for inserting a user - can be used to validate API requests const insertUserSchema = createInsertSchema(users); // Schema for updating a user - can be used to validate API requests const updateUserSchema = createUpdateSchema(users); // Schema for selecting a user - can be used to validate API responses const selectUserSchema = createSelectSchema(users); // Overriding the fields const insertUserSchema = createInsertSchema(users, { role: type('string'), }); // Refining the fields - useful if you want to change the fields before they become nullable/optional in the final schema const insertUserSchema = createInsertSchema(users, { id: (schema) => schema.atLeast(1), role: type('string'), }); // Usage const isUserValid = parse(insertUserSchema, { name: 'John Doe', email: 'johndoe@test.com', role: 'admin', }); ``` thanks @L-Mario564 ================================================ FILE: changelogs/drizzle-arktype/0.1.3.md ================================================ - TS language server performance improvements - Fixed [Buffer is not defined using drizzle-arktype client side with vite](https://github.com/drizzle-team/drizzle-orm/issues/4383) - Fixed [[BUG]: drizzle-arktype Buffer is undefined](https://github.com/drizzle-team/drizzle-orm/issues/4371) ================================================ FILE: changelogs/drizzle-kit/0.23.2.md ================================================ - Fixed a bug in PostgreSQL with push and introspect where the `schemaFilter` object was passed. It was detecting enums even in schemas that were not defined in the schemaFilter. - Fixed the `drizzle-kit up` command to work as expected, starting from the sequences release. ================================================ FILE: changelogs/drizzle-kit/0.24.0.md ================================================ ## Breaking changes (for SQLite users) #### Fixed [Composite primary key order is not consistent](https://github.com/drizzle-team/drizzle-kit-mirror/issues/342) by removing `sort` in SQLite and to be consistant with the same logic in PostgreSQL and MySQL The issue that may arise for SQLite users with any driver using composite primary keys is that the order in the database may differ from the Drizzle schema. - If you are using `push`, you **MAY** be prompted to update your table with a new order of columns in the composite primary key. You will need to either change it manually in the database or push the changes, but this may lead to data loss, etc. - If you are using `generate`, you **MAY** also be prompted to update your table with a new order of columns in the composite primary key. You can either keep that migration or skip it by emptying the SQL migration file. If nothing works for you and you are blocked, please reach out to me @AndriiSherman. I will try to help you! ## Bug fixes - [[BUG] When using double type columns, import is not inserted](https://github.com/drizzle-team/drizzle-kit-mirror/issues/403) - thanks @Karibash - [[BUG] A number value is specified as the default for a column of type char](https://github.com/drizzle-team/drizzle-kit-mirror/issues/404) - thanks @Karibash - [[BUG]: Array default in migrations are wrong](https://github.com/drizzle-team/drizzle-orm/issues/2621) - thanks @L-Mario564 - [[FEATURE]: Simpler default array fields](https://github.com/drizzle-team/drizzle-orm/issues/2709) - thanks @L-Mario564 - [[BUG]: drizzle-kit generate succeeds but generates invalid SQL for default([]) - Postgres](https://github.com/drizzle-team/drizzle-orm/issues/2432) - thanks @L-Mario564 - [[BUG]: Incorrect type for array column default value](https://github.com/drizzle-team/drizzle-orm/issues/2334) - thanks @L-Mario564 - [[BUG]: error: column is of type integer[] but default expression is of type integer](https://github.com/drizzle-team/drizzle-orm/issues/2224) - thanks @L-Mario564 - [[BUG]: Default value in array generating wrong migration file](https://github.com/drizzle-team/drizzle-orm/issues/1003) - thanks @L-Mario564 - [[BUG]: enum as array, not possible?](https://github.com/drizzle-team/drizzle-orm/issues/1564) - thanks @L-Mario564 ================================================ FILE: changelogs/drizzle-kit/0.24.1.md ================================================ ## Bug fixes > Big thanks to @L-Mario564 for his [PR](https://github.com/drizzle-team/drizzle-orm/pull/2804). It conflicted in most cases with a PR that was merged, but we incorporated some of his logic. Merging it would have caused more problems and taken more time to resolve, so we just took a few things from his PR, like removing "::" mappings in introspect and some array type default handlers ### What was fixed 1. The Drizzle Kit CLI was not working properly for the `introspect` command. 2. Added the ability to use column names with special characters for all dialects. 3. Included PostgreSQL sequences in the introspection process. 4. Reworked array type introspection and added all test cases. 5. Fixed all (we hope) default issues in PostgreSQL, where `::` was included in the introspected output. 6. `preserve` casing option was broken ### Tickets that were closed - [[BUG]: invalid schema generation with drizzle-kit introspect:pg](https://github.com/drizzle-team/drizzle-orm/issues/1210) - [[BUG][mysql introspection]: TS error when introspect column including colon](https://github.com/drizzle-team/drizzle-orm/issues/1928) - [[BUG]: Unhandled defaults when introspecting postgres db](https://github.com/drizzle-team/drizzle-orm/issues/1625) - [[BUG]: PostgreSQL Enum Naming and Schema Typing Issue](https://github.com/drizzle-team/drizzle-orm/issues/2315) - [[BUG]: drizzle-kit instrospect command generates syntax error on varchar column types](https://github.com/drizzle-team/drizzle-orm/issues/2714) - [[BUG]: Introspecting varchar[] type produces syntactically invalid schema.ts](https://github.com/drizzle-team/drizzle-orm/issues/1633) - [[BUG]: introspect:pg column not using generated enum name](https://github.com/drizzle-team/drizzle-orm/issues/1648) - [[BUG]: drizzle-kit introspect casing "preserve" config not working](https://github.com/drizzle-team/drizzle-orm/issues/2773) - [[BUG]: drizzle-kit introspect fails on required param that is defined](https://github.com/drizzle-team/drizzle-orm/issues/2719) - [[BUG]: Error when running npx drizzle-kit introspect: "Expected object, received string"](https://github.com/drizzle-team/drizzle-orm/issues/2657) - [[BUG]: Missing index names when running introspect command [MYSQL]](https://github.com/drizzle-team/drizzle-orm/issues/2525) - [[BUG]: drizzle-kit introspect TypeError: Cannot read properties of undefined (reading 'toLowerCase')](https://github.com/drizzle-team/drizzle-orm/issues/2338) - [[BUG]: Wrong column name when using PgEnum.array()](https://github.com/drizzle-team/drizzle-orm/issues/2100) - [[BUG]: Incorrect Schema Generated when introspecting extisting pg database](https://github.com/drizzle-team/drizzle-orm/issues/1985) - [[⚠️🐞BUG]: index() missing argument after introspection, causes tsc error that fails the build](https://github.com/drizzle-team/drizzle-orm/issues/1870) - [[BUG]: drizzle-kit introspect small errors](https://github.com/drizzle-team/drizzle-orm/issues/1738) - [[BUG]: Missing bigint import in drizzle-kit introspect](https://github.com/drizzle-team/drizzle-orm/issues/1020) ================================================ FILE: changelogs/drizzle-kit/0.24.2.md ================================================ ## New Features ### 🎉 Support for `pglite` driver You can now use pglite with all drizzle-kit commands, including Drizzle Studio! ```ts import { defineConfig } from "drizzle-kit"; export default defineConfig({ dialect: "postgresql", driver: "pglite", schema: "./schema.ts", dbCredentials: { url: "local-pg.db", }, verbose: true, strict: true, }); ``` ## Bug fixes - mysql-kit: fix GENERATED ALWAYS AS ... NOT NULL - [#2824](https://github.com/drizzle-team/drizzle-orm/pull/2824) ================================================ FILE: changelogs/drizzle-kit/0.25.0.md ================================================ ## Breaking changes and migrate guide for Turso users If you are using Turso and libsql, you will need to upgrade your `drizzle.config` and `@libsql/client` package. 1. This version of drizzle-orm will only work with `@libsql/client@0.10.0` or higher if you are using the `migrate` function. For other use cases, you can continue using previous versions(But the suggestion is to upgrade) To install the latest version, use the command: ```bash npm i @libsql/client@latest ``` 2. Previously, we had a common `drizzle.config` for SQLite and Turso users, which allowed a shared strategy for both dialects. Starting with this release, we are introducing the turso dialect in drizzle-kit. We will evolve and improve Turso as a separate dialect with its own migration strategies. **Before** ```ts import { defineConfig } from "drizzle-kit"; export default defineConfig({ dialect: "sqlite", schema: "./schema.ts", out: "./drizzle", dbCredentials: { url: "database.db", }, breakpoints: true, verbose: true, strict: true, }); ``` **After** ```ts import { defineConfig } from "drizzle-kit"; export default defineConfig({ dialect: "turso", schema: "./schema.ts", out: "./drizzle", dbCredentials: { url: "database.db", }, breakpoints: true, verbose: true, strict: true, }); ``` If you are using only SQLite, you can use `dialect: "sqlite"` ## LibSQL/Turso and Sqlite migration updates ### SQLite "generate" and "push" statements updates Starting from this release, we will no longer generate comments like this: ```sql '/*\n SQLite does not support "Changing existing column type" out of the box, we do not generate automatic migration for that, so it has to be done manually' + '\n Please refer to: https://www.techonthenet.com/sqlite/tables/alter_table.php' + '\n https://www.sqlite.org/lang_altertable.html' + '\n https://stackoverflow.com/questions/2083543/modify-a-columns-type-in-sqlite3' + "\n\n Due to that we don't generate migration automatically and it has to be done manually" + '\n*/' ``` We will generate a set of statements, and you can decide if it's appropriate to create data-moving statements instead. Here is an example of the SQL file you'll receive now: ```sql PRAGMA foreign_keys=OFF; --> statement-breakpoint CREATE TABLE `__new_worker` ( `id` integer PRIMARY KEY NOT NULL, `name` text NOT NULL, `salary` text NOT NULL, `job_id` integer, FOREIGN KEY (`job_id`) REFERENCES `job`(`id`) ON UPDATE no action ON DELETE no action ); --> statement-breakpoint INSERT INTO `__new_worker`("id", "name", "salary", "job_id") SELECT "id", "name", "salary", "job_id" FROM `worker`; --> statement-breakpoint DROP TABLE `worker`; --> statement-breakpoint ALTER TABLE `__new_worker` RENAME TO `worker`; --> statement-breakpoint PRAGMA foreign_keys=ON; ``` ### LibSQL/Turso "generate" and "push" statements updates Since LibSQL supports more ALTER statements than SQLite, we can generate more statements without recreating your schema and moving all the data, which can be potentially dangerous for production environments. LibSQL and Turso will now have a separate dialect in the Drizzle config file, meaning that we will evolve Turso and LibSQL independently from SQLite and will aim to support as many features as Turso/LibSQL offer. With the updated LibSQL migration strategy, you will have the ability to: - **Change Data Type**: Set a new data type for existing columns. - **Set and Drop Default Values**: Add or remove default values for existing columns. - **Set and Drop NOT NULL**: Add or remove the NOT NULL constraint on existing columns. - **Add References to Existing Columns**: Add foreign key references to existing columns You can find more information in the [LibSQL documentation](https://github.com/tursodatabase/libsql/blob/main/libsql-sqlite3/doc/libsql_extensions.md#altering-columns) ### LIMITATIONS - Dropping or altering an index will cause table recreation. This is because LibSQL/Turso does not support dropping this type of index. ```sql CREATE TABLE `users` ( `id` integer NOT NULL, `name` integer, `age` integer PRIMARY KEY NOT NULL FOREIGN KEY (`name`) REFERENCES `users1`("id") ON UPDATE no action ON DELETE no action ); ``` - If the table has indexes, altering columns will cause table recreation. - Drizzle-Kit will drop the indexes, modify the columns, and then recreate the indexes. - Adding or dropping composite foreign keys is not supported and will cause table recreation ### NOTES - You can create a reference on any column type, but if you want to insert values, the referenced column must have a unique index or primary key. ```sql CREATE TABLE parent(a PRIMARY KEY, b UNIQUE, c, d, e, f); CREATE UNIQUE INDEX i1 ON parent(c, d); CREATE INDEX i2 ON parent(e); CREATE UNIQUE INDEX i3 ON parent(f COLLATE nocase); CREATE TABLE child1(f, g REFERENCES parent(a)); -- Ok CREATE TABLE child2(h, i REFERENCES parent(b)); -- Ok CREATE TABLE child3(j, k, FOREIGN KEY(j, k) REFERENCES parent(c, d)); -- Ok CREATE TABLE child4(l, m REFERENCES parent(e)); -- Error! CREATE TABLE child5(n, o REFERENCES parent(f)); -- Error! CREATE TABLE child6(p, q, FOREIGN KEY(p, q) REFERENCES parent(b, c)); -- Error! CREATE TABLE child7(r REFERENCES parent(c)); -- Error! ``` > **NOTE**: The foreign key for the table child5 is an error because, although the parent key column has a unique index, the index uses a different collating sequence. See more: https://www.sqlite.org/foreignkeys.html ## New `casing` param in `drizzle-orm` and `drizzle-kit` There are more improvements you can make to your schema definition. The most common way to name your variables in a database and in TypeScript code is usually `snake_case` in the database and `camelCase` in the code. For this case, in Drizzle, you can now define a naming strategy in your database to help Drizzle map column keys automatically. Let's take a table from the previous example and make it work with the new casing API in Drizzle Table can now become: ```ts import { pgTable } from "drizzle-orm/pg-core"; export const ingredients = pgTable("ingredients", (t) => ({ id: t.uuid().defaultRandom().primaryKey(), name: t.text().notNull(), description: t.text(), inStock: t.boolean().default(true), })); ``` As you can see, `inStock` doesn't have a database name alias, but by defining the casing configuration at the connection level, all queries will automatically map it to `snake_case` ```ts const db = await drizzle('node-postgres', { connection: '', casing: 'snake_case' }) ``` For `drizzle-kit` migrations generation you should also specify `casing` param in drizzle config, so you can be sure you casing strategy will be applied to drizzle-kit as well ```ts import { defineConfig } from "drizzle-kit"; export default defineConfig({ dialect: "postgresql", schema: "./schema.ts", dbCredentials: { url: "postgresql://postgres:password@localhost:5432/db", }, casing: "snake_case", }); ``` ================================================ FILE: changelogs/drizzle-kit/0.26.0.md ================================================ # New Features ## Checks support in `drizzle-kit` You can use drizzle-kit to manage your `check` constraint defined in drizzle-orm schema definition For example current drizzle table: ```ts import { sql } from "drizzle-orm"; import { check, pgTable } from "drizzle-orm/pg-core"; export const users = pgTable( "users", (c) => ({ id: c.uuid().defaultRandom().primaryKey(), username: c.text().notNull(), age: c.integer(), }), (table) => ({ checkConstraint: check("age_check", sql`${table.age} > 21`), }) ); ``` will be generated into ```sql CREATE TABLE IF NOT EXISTS "users" ( "id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL, "username" text NOT NULL, "age" integer, CONSTRAINT "age_check" CHECK ("users"."age" > 21) ); ``` The same is supported in all dialects ### Limitations - `generate` will work as expected for all check constraint changes. - `push` will detect only check renames and will recreate the constraint. All other changes to SQL won't be detected and will be ignored. So, if you want to change the constraint's SQL definition using only `push`, you would need to manually comment out the constraint, `push`, then put it back with the new SQL definition and `push` one more time. ## Views support in `drizzle-kit` You can use drizzle-kit to manage your `views` defined in drizzle-orm schema definition. It will work with all existing dialects and view options ### PostgreSQL For example current drizzle table: ```ts import { sql } from "drizzle-orm"; import { check, pgMaterializedView, pgTable, pgView, } from "drizzle-orm/pg-core"; export const users = pgTable( "users", (c) => ({ id: c.uuid().defaultRandom().primaryKey(), username: c.text().notNull(), age: c.integer(), }), (table) => ({ checkConstraint: check("age_check", sql`${table.age} > 21`), }) ); export const simpleView = pgView("simple_users_view").as((qb) => qb.select().from(users) ); export const materializedView = pgMaterializedView( "materialized_users_view" ).as((qb) => qb.select().from(users)); ``` will be generated into ```sql CREATE TABLE IF NOT EXISTS "users" ( "id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL, "username" text NOT NULL, "age" integer, CONSTRAINT "age_check" CHECK ("users"."age" > 21) ); CREATE VIEW "public"."simple_users_view" AS (select "id", "username", "age" from "users"); CREATE MATERIALIZED VIEW "public"."materialized_users_view" AS (select "id", "username", "age" from "users"); ``` Views supported in all dialects, but materialized views are supported only in PostgreSQL #### Limitations - `generate` will work as expected for all view changes - `push` limitations: 1. If you want to change the view's SQL definition using only `push`, you would need to manually comment out the view, `push`, then put it back with the new SQL definition and `push` one more time. ## Updates for PostgreSQL enums behavior We've updated enum behavior in Drizzle with PostgreSQL: - Add value after or before in enum: With this change, Drizzle will now respect the order of values in the enum and allow adding new values after or before a specific one. - Support for dropping a value from an enum: In this case, Drizzle will attempt to alter all columns using the enum to text, then drop the existing enum and create a new one with the updated set of values. After that, all columns previously using the enum will be altered back to the new enum. > If the deleted enum value was used by a column, this process will result in a database error. - Support for dropping an enum - Support for moving enums between schemas - Support for renaming enums ================================================ FILE: changelogs/drizzle-kit/0.26.1.md ================================================ - Fix `data is malformed` for views ================================================ FILE: changelogs/drizzle-kit/0.26.2.md ================================================ - Updated internal versions for the drizzle-kit and drizzle-orm packages. Changes were introduced in the last minor release, and you are required to upgrade both packages to ensure they work as expected ================================================ FILE: changelogs/drizzle-kit/0.27.0.md ================================================ > This version of `drizzle-jit` requires `drizzle-orm@0.36.0` to enable all new features # New Features ## Row-Level Security (RLS) With Drizzle, you can enable Row-Level Security (RLS) for any Postgres table, create policies with various options, and define and manage the roles those policies apply to. Drizzle supports a raw representation of Postgres policies and roles that can be used in any way you want. This works with popular Postgres database providers such as `Neon` and `Supabase`. In Drizzle, we have specific predefined RLS roles and functions for RLS with both database providers, but you can also define your own logic. ### Enable RLS If you just want to enable RLS on a table without adding policies, you can use `.enableRLS()` As mentioned in the PostgreSQL documentation: > If no policy exists for the table, a default-deny policy is used, meaning that no rows are visible or can be modified. Operations that apply to the whole table, such as TRUNCATE and REFERENCES, are not subject to row security. ```ts import { integer, pgTable } from 'drizzle-orm/pg-core'; export const users = pgTable('users', { id: integer(), }).enableRLS(); ``` > If you add a policy to a table, RLS will be enabled automatically. So, there’s no need to explicitly enable RLS when adding policies to a table. ### Roles Currently, Drizzle supports defining roles with a few different options, as shown below. Support for more options will be added in a future release. ```ts import { pgRole } from 'drizzle-orm/pg-core'; export const admin = pgRole('admin', { createRole: true, createDb: true, inherit: true }); ``` If a role already exists in your database, and you don’t want drizzle-kit to ‘see’ it or include it in migrations, you can mark the role as existing. ```ts import { pgRole } from 'drizzle-orm/pg-core'; export const admin = pgRole('admin').existing(); ``` ### Policies To fully leverage RLS, you can define policies within a Drizzle table. > In PostgreSQL, policies should be linked to an existing table. Since policies are always associated with a specific table, we decided that policy definitions should be defined as a parameter of `pgTable` **Example of pgPolicy with all available properties** ```ts import { sql } from 'drizzle-orm'; import { integer, pgPolicy, pgRole, pgTable } from 'drizzle-orm/pg-core'; export const admin = pgRole('admin'); export const users = pgTable('users', { id: integer(), }, (t) => [ pgPolicy('policy', { as: 'permissive', to: admin, for: 'delete', using: sql``, withCheck: sql``, }), ]); ``` **Link Policy to an existing table** There are situations where you need to link a policy to an existing table in your database. The most common use case is with database providers like `Neon` or `Supabase`, where you need to add a policy to their existing tables. In this case, you can use the `.link()` API ```ts import { sql } from "drizzle-orm"; import { pgPolicy } from "drizzle-orm/pg-core"; import { authenticatedRole, realtimeMessages } from "drizzle-orm/supabase"; export const policy = pgPolicy("authenticated role insert policy", { for: "insert", to: authenticatedRole, using: sql``, }).link(realtimeMessages); ``` ### Migrations If you are using drizzle-kit to manage your schema and roles, there may be situations where you want to refer to roles that are not defined in your Drizzle schema. In such cases, you may want drizzle-kit to skip managing these roles without having to define each role in your drizzle schema and marking it with `.existing()`. In these cases, you can use `entities.roles` in `drizzle.config.ts`. For a complete reference, refer to the the [`drizzle.config.ts`](https://orm.drizzle.team/docs/drizzle-config-file) documentation. By default, `drizzle-kit` does not manage roles for you, so you will need to enable this feature in `drizzle.config.ts`. ```ts {12-14} // drizzle.config.ts import { defineConfig } from "drizzle-kit"; export default defineConfig({ dialect: 'postgresql', schema: "./drizzle/schema.ts", dbCredentials: { url: process.env.DATABASE_URL! }, verbose: true, strict: true, entities: { roles: true } }); ``` In case you need additional configuration options, let's take a look at a few more examples. **You have an `admin` role and want to exclude it from the list of manageable roles** ```ts // drizzle.config.ts import { defineConfig } from "drizzle-kit"; export default defineConfig({ ... entities: { roles: { exclude: ['admin'] } } }); ``` **You have an `admin` role and want to include it in the list of manageable roles** ```ts // drizzle.config.ts import { defineConfig } from "drizzle-kit"; export default defineConfig({ ... entities: { roles: { include: ['admin'] } } }); ``` **If you are using `Neon` and want to exclude Neon-defined roles, you can use the provider option** ```ts // drizzle.config.ts import { defineConfig } from "drizzle-kit"; export default defineConfig({ ... entities: { roles: { provider: 'neon' } } }); ``` **If you are using `Supabase` and want to exclude Supabase-defined roles, you can use the provider option** ```ts // drizzle.config.ts import { defineConfig } from "drizzle-kit"; export default defineConfig({ ... entities: { roles: { provider: 'supabase' } } }); ``` > You may encounter situations where Drizzle is slightly outdated compared to new roles specified by your database provider. In such cases, you can use the `provider` option and `exclude` additional roles: ```ts // drizzle.config.ts import { defineConfig } from "drizzle-kit"; export default defineConfig({ ... entities: { roles: { provider: 'supabase', exclude: ['new_supabase_role'] } } }); ``` ### RLS on views With Drizzle, you can also specify RLS policies on views. For this, you need to use `security_invoker` in the view's WITH options. Here is a small example: ```ts {5} ... export const roomsUsersProfiles = pgView("rooms_users_profiles") .with({ securityInvoker: true, }) .as((qb) => qb .select({ ...getTableColumns(roomsUsers), email: profiles.email, }) .from(roomsUsers) .innerJoin(profiles, eq(roomsUsers.userId, profiles.id)) ); ``` ### Using with Neon The Neon Team helped us implement their vision of a wrapper on top of our raw policies API. We defined a specific `/neon` import with the `crudPolicy` function that includes predefined functions and Neon's default roles. Here's an example of how to use the `crudPolicy` function: ```ts import { crudPolicy } from 'drizzle-orm/neon'; import { integer, pgRole, pgTable } from 'drizzle-orm/pg-core'; export const admin = pgRole('admin'); export const users = pgTable('users', { id: integer(), }, (t) => [ crudPolicy({ role: admin, read: true, modify: false }), ]); ``` This policy is equivalent to: ```ts import { sql } from 'drizzle-orm'; import { integer, pgPolicy, pgRole, pgTable } from 'drizzle-orm/pg-core'; export const admin = pgRole('admin'); export const users = pgTable('users', { id: integer(), }, (t) => [ pgPolicy(`crud-${admin.name}-policy-insert`, { for: 'insert', to: admin, withCheck: sql`false`, }), pgPolicy(`crud-${admin.name}-policy-update`, { for: 'update', to: admin, using: sql`false`, withCheck: sql`false`, }), pgPolicy(`crud-${admin.name}-policy-delete`, { for: 'delete', to: admin, using: sql`false`, }), pgPolicy(`crud-${admin.name}-policy-select`, { for: 'select', to: admin, using: sql`true`, }), ]); ``` `Neon` exposes predefined `authenticated` and `anaonymous` roles and related functions. If you are using `Neon` for RLS, you can use these roles, which are marked as existing, and the related functions in your RLS queries. ```ts // drizzle-orm/neon export const authenticatedRole = pgRole('authenticated').existing(); export const anonymousRole = pgRole('anonymous').existing(); export const authUid = (userIdColumn: AnyPgColumn) => sql`(select auth.user_id() = ${userIdColumn})`; ``` For example, you can use the `Neon` predefined roles and functions like this: ```ts import { sql } from 'drizzle-orm'; import { authenticatedRole } from 'drizzle-orm/neon'; import { integer, pgPolicy, pgRole, pgTable } from 'drizzle-orm/pg-core'; export const admin = pgRole('admin'); export const users = pgTable('users', { id: integer(), }, (t) => [ pgPolicy(`policy-insert`, { for: 'insert', to: authenticatedRole, withCheck: sql`false`, }), ]); ``` ### Using with Supabase We also have a `/supabase` import with a set of predefined roles marked as existing, which you can use in your schema. This import will be extended in a future release with more functions and helpers to make using RLS and `Supabase` simpler. ```ts // drizzle-orm/supabase export const anonRole = pgRole('anon').existing(); export const authenticatedRole = pgRole('authenticated').existing(); export const serviceRole = pgRole('service_role').existing(); export const postgresRole = pgRole('postgres_role').existing(); export const supabaseAuthAdminRole = pgRole('supabase_auth_admin').existing(); ``` For example, you can use the `Supabase` predefined roles like this: ```ts import { sql } from 'drizzle-orm'; import { serviceRole } from 'drizzle-orm/supabase'; import { integer, pgPolicy, pgRole, pgTable } from 'drizzle-orm/pg-core'; export const admin = pgRole('admin'); export const users = pgTable('users', { id: integer(), }, (t) => [ pgPolicy(`policy-insert`, { for: 'insert', to: serviceRole, withCheck: sql`false`, }), ]); ``` The `/supabase` import also includes predefined tables and functions that you can use in your application ```ts // drizzle-orm/supabase const auth = pgSchema('auth'); export const authUsers = auth.table('users', { id: uuid().primaryKey().notNull(), }); const realtime = pgSchema('realtime'); export const realtimeMessages = realtime.table( 'messages', { id: bigserial({ mode: 'bigint' }).primaryKey(), topic: text().notNull(), extension: text({ enum: ['presence', 'broadcast', 'postgres_changes'], }).notNull(), }, ); export const authUid = sql`(select auth.uid())`; export const realtimeTopic = sql`realtime.topic()`; ``` This allows you to use it in your code, and Drizzle Kit will treat them as existing databases, using them only as information to connect to other entities ```ts import { foreignKey, pgPolicy, pgTable, text, uuid } from "drizzle-orm/pg-core"; import { sql } from "drizzle-orm/sql"; import { authenticatedRole, authUsers } from "drizzle-orm/supabase"; export const profiles = pgTable( "profiles", { id: uuid().primaryKey().notNull(), email: text().notNull(), }, (table) => [ foreignKey({ columns: [table.id], // reference to the auth table from Supabase foreignColumns: [authUsers.id], name: "profiles_id_fk", }).onDelete("cascade"), pgPolicy("authenticated can view all profiles", { for: "select", // using predefined role from Supabase to: authenticatedRole, using: sql`true`, }), ] ); ``` Let's check an example of adding a policy to a table that exists in `Supabase` ```ts import { sql } from "drizzle-orm"; import { pgPolicy } from "drizzle-orm/pg-core"; import { authenticatedRole, realtimeMessages } from "drizzle-orm/supabase"; export const policy = pgPolicy("authenticated role insert policy", { for: "insert", to: authenticatedRole, using: sql``, }).link(realtimeMessages); ``` # Bug fixes - [[BUG]: Studio + mysql default mode, wrong format related timezone](https://github.com/drizzle-team/drizzle-orm/issues/2747) - [[BUG]: Drizzle Studio CORS error](https://github.com/drizzle-team/drizzle-orm/issues/1857) - [[BUG]: TIMESTAMPS showing up incorrectly on drizzle studio](https://github.com/drizzle-team/drizzle-orm/issues/2549) ================================================ FILE: changelogs/drizzle-kit/0.27.1.md ================================================ - Fix: [[BUG]: When using RLS policies and Views, the view is the last clause generated](https://github.com/drizzle-team/drizzle-orm/issues/3378) ================================================ FILE: changelogs/drizzle-kit/0.27.2.md ================================================ - Fix [[BUG]: Undefined properties when using drizzle-kit push](https://github.com/drizzle-team/drizzle-orm/issues/3391) - Fix TypeError: Cannot read properties of undefined (reading 'isRLSEnabled') - Fix push bugs, when pushing a schema with linked policy to a table from `drizzle-orm/supabase` ================================================ FILE: changelogs/drizzle-kit/0.28.0.md ================================================ # Improvements - Added an OHM static imports checker to identify unexpected imports within a chain of imports in the drizzle-kit repo. For example, it checks if drizzle-orm is imported before drizzle-kit and verifies if the drizzle-orm import is available in your project. - [Adding more columns to Supabase auth.users table schema](https://github.com/drizzle-team/drizzle-orm/issues/3327) - thanks @nicholasdly # Bug Fixes - [[BUG]: [drizzle-kit]: Fix breakpoints option cannot be disabled](https://github.com/drizzle-team/drizzle-orm/issues/2828) - thanks @klotztech - [[BUG]: drizzle-kit introspect: SMALLINT import missing and incorrect DECIMAL UNSIGNED handling](https://github.com/drizzle-team/drizzle-orm/issues/2950) - thanks @L-Mario564 - [Unsigned tinyints preventing migrations](https://github.com/drizzle-team/drizzle-orm/issues/1571) - thanks @L-Mario564 - [[BUG]: Can't parse float(8,2) from database (precision and scale and/or unsigned breaks float types)](https://github.com/drizzle-team/drizzle-orm/issues/3285) - thanks @L-Mario564 - [[BUG]: PgEnum generated migration doesn't escape single quotes](https://github.com/drizzle-team/drizzle-orm/issues/1272) - thanks @L-Mario564 - [[BUG]: single quote not escaped correctly in migration file](https://github.com/drizzle-team/drizzle-orm/issues/2184) - thanks @L-Mario564 - [[BUG]: Migrations does not escape single quotes](https://github.com/drizzle-team/drizzle-orm/issues/1765) - thanks @L-Mario564 - [[BUG]: Issue with quoted default string values](https://github.com/drizzle-team/drizzle-orm/issues/2122) - thanks @L-Mario564 - [[BUG]: SQl commands in wrong roder](https://github.com/drizzle-team/drizzle-orm/issues/2390) - thanks @L-Mario564 - [[BUG]: Time with precision in drizzle-orm/pg-core adds double-quotes around type](https://github.com/drizzle-team/drizzle-orm/issues/1804) - thanks @L-Mario564 - [[BUG]: Postgres push fails due to lack of quotes](https://github.com/drizzle-team/drizzle-orm/issues/2396) - thanks @L-Mario564 - [[BUG]: TypeError: Cannot read properties of undefined (reading 'compositePrimaryKeys')](https://github.com/drizzle-team/drizzle-orm/issues/2344) - thanks @L-Mario564 - [[BUG]: drizzle-kit introspect generates CURRENT_TIMESTAMP without sql operator on date column](https://github.com/drizzle-team/drizzle-orm/issues/2899) - thanks @L-Mario564 - [[BUG]: Drizzle-kit introspect doesn't pull correct defautl statement](https://github.com/drizzle-team/drizzle-orm/issues/2905) - thanks @L-Mario564 - [[BUG]: Problem on MacBook - This statement does not return data. Use run() instead](https://github.com/drizzle-team/drizzle-orm/issues/2623) - thanks @L-Mario564 - [[BUG]: Enum column names that are used as arrays are not quoted](https://github.com/drizzle-team/drizzle-orm/issues/2598) - thanks @L-Mario564 - [[BUG]: drizzle-kit generate ignores index operators](https://github.com/drizzle-team/drizzle-orm/issues/2935) - thanks @L-Mario564 - [dialect param config error message is wrong](https://github.com/drizzle-team/drizzle-orm/issues/3427) - thanks @L-Mario564 - [[BUG]: Error setting default enum field values](https://github.com/drizzle-team/drizzle-orm/issues/2299) - thanks @L-Mario564 - [[BUG]: drizzle-kit does not respect the order of columns configured in primaryKey()](https://github.com/drizzle-team/drizzle-orm/issues/2326) - thanks @L-Mario564 - [[BUG]: Cannot drop Unique Constraint MySQL](https://github.com/drizzle-team/drizzle-orm/issues/998) - thanks @L-Mario564 ================================================ FILE: changelogs/drizzle-kit/0.28.1.md ================================================ # Bug fixes - Fixed typos in repository: thanks @armandsalle, @masto, @wackbyte, @Asher-JH, @MaxLeiter - [fix: wrong dialect set in mysql/sqlite introspect](https://github.com/drizzle-team/drizzle-orm/pull/2865) ================================================ FILE: changelogs/drizzle-kit/0.29.0.md ================================================ # New Dialects ### 🎉 `SingleStore` dialect is now available in Drizzle Thanks to the SingleStore team for creating a PR with all the necessary changes to support the MySQL-compatible part of SingleStore. You can already start using it with Drizzle. The SingleStore team will also help us iterate through updates and make more SingleStore-specific features available in Drizzle ```ts import 'dotenv/config'; import { defineConfig } from 'drizzle-kit'; export default defineConfig({ dialect: 'singlestore', out: './drizzle', schema: './src/db/schema.ts', dbCredentials: { url: process.env.DATABASE_URL!, }, }); ``` You can check out our [Getting started guides](https://orm.drizzle.team/docs/get-started/singlestore-new) to try SingleStore! # New Drivers ### 🎉 `SQLite Durable Objects` driver is now available in Drizzle You can now query SQLite Durable Objects in Drizzle! For the full example, please check our [Get Started](https://orm.drizzle.team/docs/get-started/do-new) Section ```ts import 'dotenv/config'; import { defineConfig } from 'drizzle-kit'; export default defineConfig({ out: './drizzle', schema: './src/db/schema.ts', dialect: 'sqlite', driver: 'durable-sqlite', }); ``` ================================================ FILE: changelogs/drizzle-kit/0.29.1.md ================================================ - Fix SingleStore generate migrations command ================================================ FILE: changelogs/drizzle-kit/0.30.0.md ================================================ Starting from this update, the PostgreSQL dialect will align with the behavior of all other dialects. It will no longer include `IF NOT EXISTS`, `$DO`, or similar statements, which could cause incorrect DDL statements to not fail when an object already exists in the database and should actually fail. This change marks our first step toward several major upgrades we are preparing: - An updated and improved migration workflow featuring commutative migrations, a revised folder structure, and enhanced collaboration capabilities for migrations. - Better support for Xata migrations. - Compatibility with CockroachDB (achieving full compatibility will only require removing serial fields from the migration folder). ================================================ FILE: changelogs/drizzle-kit/0.30.1.md ================================================ # New Features ### `drizzle-kit export` To make drizzle-kit integration with other migration tools, like Atlas much easier, we've prepared a new command called `export`. It will translate your drizzle schema in SQL representation(DDL) statements and outputs to the console ```ts // schema.ts import { pgTable, serial, text } from 'drizzle-orm/pg-core' export const users = pgTable('users', { id: serial('id').primaryKey(), email: text('email').notNull(), name: text('name') }); ``` Running ```bash npx drizzle-kit export ``` will output this string to console ```bash CREATE TABLE "users" ( "id" serial PRIMARY KEY NOT NULL, "email" text NOT NULL, "name" text ); ``` By default, the only option for now is `--sql`, so the output format will be SQL DDL statements. In the future, we will support additional output formats to accommodate more migration tools ```bash npx drizzle-kit export --sql ``` ================================================ FILE: changelogs/drizzle-kit/0.30.2.md ================================================ - Fix certificates generation utility for Drizzle Studio; [[BUG]: [drizzle-kit]: drizzle-kit dependency on drizzle-studio perms error](https://github.com/drizzle-team/drizzle-orm/issues/3729) ================================================ FILE: changelogs/drizzle-kit/0.30.3.md ================================================ # SingleStore `push` and `generate` improvements As SingleStore did not support certain DDL statements before this release, you might encounter an error indicating that some schema changes cannot be applied due to a database issue. Starting from this version, drizzle-kit will detect such cases and initiate table recreation with data transfer between the tables # Bug fixes - [[BUG] If the index name is the same as the generated name, it will be empty and a type error will occur](https://github.com/drizzle-team/drizzle-orm/issues/3420) ================================================ FILE: changelogs/drizzle-kit/0.30.4.md ================================================ - Fix bug that generates incorrect syntax when introspect in mysql - Fix a bug that caused incorrect syntax output when introspect in unsigned columns ================================================ FILE: changelogs/drizzle-kit/0.30.5.md ================================================ # New Features ## Added `Gel` dialect support and `gel-js` client support Drizzle is getting a new `Gel` dialect with its own types and Gel-specific logic. In this first iteration, almost all query-building features have been copied from the `PostgreSQL` dialect since Gel is fully PostgreSQL-compatible. The only change in this iteration is the data types. The Gel dialect has a different set of available data types, and all mappings for these types have been designed to avoid any extra conversions on Drizzle's side. This means you will insert and select exactly the same data as supported by the Gel protocol. Drizzle + Gel integration will work only through `drizzle-kit pull`. Drizzle won't support `generate`, `migrate`, or `push` features in this case. Instead, drizzle-kit is used solely to pull the Drizzle schema from the Gel database, which can then be used in your `drizzle-orm` queries. The Gel + Drizzle workflow: 1. Use the `gel` CLI to manage your schema. 2. Use the `gel` CLI to generate and apply migrations to the database. 3. Use drizzle-kit to pull the Gel database schema into a Drizzle schema. 4. Use drizzle-orm with gel-js to query the Gel database. On the drizzle-kit side you can now use `dialect: "gel"` ```ts // drizzle.config.ts import { defineConfig } from 'drizzle-kit'; export default defineConfig({ dialect: 'gel', }); ``` For a complete Get Started tutorial you can use our new guides: - [Get Started with Drizzle and Gel in a new project](https://orm.drizzle.team/docs/get-started/gel-new) - [Get Started with Drizzle and Gel in a existing project](https://orm.drizzle.team/docs/get-started/gel-existing) ================================================ FILE: changelogs/drizzle-kit/0.30.6.md ================================================ ### Bug fixes - [[BUG]: d1 push locally is not working](https://github.com/drizzle-team/drizzle-orm/issues/4099) - thanks @mabels and @RomanNabukhotnyi - [[BUG] Cloudflare D1: drizzle-kit push is not working (error 7500 SQLITE_AUTH)](https://github.com/drizzle-team/drizzle-orm/issues/3728) - thanks @mabels and @RomanNabukhotnyi ================================================ FILE: changelogs/drizzle-kit/0.31.0.md ================================================ ## Features and improvements ### Enum DDL improvements For situations where you drop an `enum` value or reorder values in an `enum`, there is no native way to do this in PostgreSQL. To handle these cases, `drizzle-kit` used to: - Change the column data types from the enum to text - Drop the old enum - Add the new enum - Change the column data types back to the new enum However, there were a few scenarios that weren’t covered: `PostgreSQL` wasn’t updating default expressions for columns when their data types changed Therefore, for cases where you either change a column’s data type from an `enum` to some other type, drop an `enum` value, or reorder `enum` values, we now do the following: - Change the column data types from the enum to text - Set the default using the ::text expression - Drop the old enum - Add the new enum - Change the column data types back to the new enum - Set the default using the :: expression ### `esbuild` version upgrade For `drizzle-kit` we upgraded the version to latest (`0.25.2`), thanks @paulmarsicloud ## Bug fixes - [[BUG]: Error on Malformed Array Literal](https://github.com/drizzle-team/drizzle-orm/issues/2715) - thanks @Kratious - [[BUG]: Postgres drizzle-kit: Error while pulling indexes from a table with json/jsonb deep field index](https://github.com/drizzle-team/drizzle-orm/issues/2744) - thanks @Kratious - [goog-vulnz flags CVE-2024-24790 in esbuild 0.19.7](https://github.com/drizzle-team/drizzle-orm/issues/4045) ================================================ FILE: changelogs/drizzle-kit/0.31.1.md ================================================ ### Fixed `drizzle-kit pull` bugs when using Gel extensions. Because Gel extensions create schema names containing `::` (for example, `ext::auth`), Drizzle previously handled these names incorrectly. Starting with this release, you can use Gel extensions without any problems. Here’s what you should do: 1. Enable extensions schemas in `drizzle.config.ts` ```ts import { defineConfig } from "drizzle-kit"; export default defineConfig({ dialect: 'gel', schemaFilter: ['ext::auth', 'public'] }); ``` 2. Run `drizzle-kit pull` 3. Done! ================================================ FILE: changelogs/drizzle-kit/0.31.10.md ================================================ - Updated to `hanji@0.0.8` - native bun `stringWidth`, `stripANSI` support, errors for non-TTY environments - We've migrated away from `esbuild-register` to `tsx` loader, it will now allow to use `drizzle-kit` seamlessly with both `ESM` and `CJS` modules - We've also added native `Bun` and `Deno` launch support, which will not trigger `tsx` loader and utilise native `bun` and `deno` imports capabilities and faster startup times ================================================ FILE: changelogs/drizzle-kit/0.31.2.md ================================================ ### Bug fixes - Fixed relations extraction to not interfere with Drizzle Studio. ================================================ FILE: changelogs/drizzle-kit/0.31.3.md ================================================ - Internal changes to Studio context. Added `databaseName` and `packageName` properties for Studio ================================================ FILE: changelogs/drizzle-kit/0.31.4.md ================================================ - Fixed `halfvec`, `bit` and `sparsevec` type generation bug in drizzle-kit ================================================ FILE: changelogs/drizzle-kit/0.31.5.md ================================================ - Add casing support to studio configuration and related functions ================================================ FILE: changelogs/drizzle-kit/0.31.6.md ================================================ ### Bug fixes - [[BUG]: Importing drizzle-kit/api fails in ESM modules](https://github.com/drizzle-team/drizzle-orm/issues/2853) ================================================ FILE: changelogs/drizzle-kit/0.31.7.md ================================================ ### Bug fixes - [[BUG]: Drizzle Kit push to Postgres 18 produces unecessary DROP SQL when the schema was NOT changed](https://github.com/drizzle-team/drizzle-orm/issues/4944) ================================================ FILE: changelogs/drizzle-kit/0.31.8.md ================================================ ### Bug fixes - Fixed `algorythm` => `algorithm` typo. - Fixed external dependencies in build configuration. ================================================ FILE: changelogs/drizzle-kit/0.31.9.md ================================================ - drizzle-kit api improvements for D1 connections ================================================ FILE: changelogs/drizzle-orm/0.12.0-beta.23.md ================================================ # drizzle-orm 0.12.0-beta.23 - Added new row mapping mechanism as `mapResultRowV2`, `mapResultRow` will be replaced by it in the future. ================================================ FILE: changelogs/drizzle-orm/0.12.0-beta.24.md ================================================ # drizzle-orm 0.12.0-beta.24 - Made `.execute()` method public in query builders. - Added `name()` function for escaping entity names inside queries. - (internal) Removed old row mapper implementation. ================================================ FILE: changelogs/drizzle-orm/0.13.0.md ================================================ # drizzle-orm 0.13.0 - Release 🎉 ================================================ FILE: changelogs/drizzle-orm/0.13.1.md ================================================ # drizzle-orm 0.13.1 - Fix mysql peer dependency range ================================================ FILE: changelogs/drizzle-orm/0.14.1.md ================================================ # drizzle-orm 0.14.1 - Bumped everything to 0.14.1. ================================================ FILE: changelogs/drizzle-orm/0.14.2.md ================================================ # drizzle-orm 0.14.2 - Bumped everything to 0.14.2 ================================================ FILE: changelogs/drizzle-orm/0.15.0.md ================================================ # drizzle-orm 0.15.0 - Minor upgrade for all modules, due to adding version for api - Add internal version for ORM api and npm version ================================================ FILE: changelogs/drizzle-orm/0.15.1.md ================================================ # drizzle-orm 0.15.1 - Add schema symbol to table - Append schema before table name in SQLWrapper if it exists ================================================ FILE: changelogs/drizzle-orm/0.15.2.md ================================================ # drizzle-orm 0.15.2 Internal release ================================================ FILE: changelogs/drizzle-orm/0.15.3.md ================================================ # drizzle-orm 0.15.3 Internal release ================================================ FILE: changelogs/drizzle-orm/0.16.0.md ================================================ # drizzle-orm 0.16.0 - Bump all packages to 0.16.0 ================================================ FILE: changelogs/drizzle-orm/0.16.1.md ================================================ # drizzle-orm 0.16.0 - Fix peer dependency error for >=0.16 drizzle packages ================================================ FILE: changelogs/drizzle-orm/0.17.0.md ================================================ ## ❗ All ORM packages are now merged into `drizzle-orm` Starting from release `0.17.0` and onwards, all dialect-specific packages are merged into `drizzle-orm`. Legacy ORM packages will be archived. ### Import paths changes #### PostgreSQL - `import { ... } from 'drizzle-orm-pg'` -> `import { ... } from 'drizzle-orm/pg-core'` - `import { ... } from 'drizzle-orm-pg/node'` -> `import { ... } from 'drizzle-orm/node-postgres'` - `import { ... } from 'drizzle-orm-pg/neondb'` -> `import { ... } from 'drizzle-orm/neon'` - `import { ... } from 'drizzle-orm-pg/postgres.js'` -> `import { ... } from 'drizzle-orm/postgres.js'` #### MySQL - `import { ... } from 'drizzle-orm-mysql'` -> `import { ... } from 'drizzle-orm/mysql-core'` - `import { ... } from 'drizzle-orm-mysql/mysql2'` -> `import { ... } from 'drizzle-orm/mysql2'` #### SQLite - `import { ... } from 'drizzle-orm-sqlite'` -> `import { ... } from 'drizzle-orm/sqlite-core'` - `import { ... } from 'drizzle-orm-sqlite/better-sqlite3'` -> `import { ... } from 'drizzle-orm/better-sqlite3'` - `import { ... } from 'drizzle-orm-sqlite/d1'` -> `import { ... } from 'drizzle-orm/d1'` - `import { ... } from 'drizzle-orm-sqlite/bun'` -> `import { ... } from 'drizzle-orm/bun-sqlite'` - `import { ... } from 'drizzle-orm-sqlite/sql.js'` -> `import { ... } from 'drizzle-orm/sql.js'` ================================================ FILE: changelogs/drizzle-orm/0.17.1.md ================================================ - Added feature showcase section to README ================================================ FILE: changelogs/drizzle-orm/0.17.2.md ================================================ - Fixed package.json require path in 'drizzle-orm/version' ================================================ FILE: changelogs/drizzle-orm/0.17.3.md ================================================ We have released [AWS Data API support](https://docs.aws.amazon.com/AmazonRDS/latest/AuroraUserGuide/data-api.html) for PostgreSQL --- Connection example ```typescript import { drizzle, migrate } from 'drizzle-orm/aws-data-api/pg'; const rdsClient = new RDSDataClient({}); const db = drizzle(rdsClient, { database: '', secretArn: '', resourceArn: '', }); await migrate(db, { migrationsFolder: '' }); ``` > **Note**: > All drizzle pg data types are working well with data api, except of `interval`. This type is not yet mapped in proper way ================================================ FILE: changelogs/drizzle-orm/0.17.4.md ================================================ We have released [SQLite Proxy Driver](https://github.com/drizzle-team/drizzle-orm/tree/main/examples/sqlite-proxy) --- Perfect way to setup custom logic for database calls instead of predefined drivers Should work well with serverless apps 🚀 ```typescript // Custom Proxy HTTP driver const db = drizzle(async (sql, params, method) => { try { const rows = await axios.post('http://localhost:3000/query', { sql, params, method }); return { rows: rows.data }; } catch (e: any) { console.error('Error from sqlite proxy server: ', e.response.data) return { rows: [] }; } }); ``` > For more example you can check [full documentation](https://github.com/drizzle-team/drizzle-orm/tree/main/examples/sqlite-proxy) ================================================ FILE: changelogs/drizzle-orm/0.17.5.md ================================================ We have released [Planetscale Serverless](https://github.com/planetscale/database-js) driver support --- Usage example: ```typescript import { drizzle } from 'drizzle-orm/planetscale-serverless'; import { connect } from '@planetscale/database'; // create the connection const connection = connect({ host: process.env['DATABASE_HOST'], username: process.env['DATABASE_USERNAME'], password: process.env['DATABASE_PASSWORD'], }); const db = drizzle(connection); ``` ================================================ FILE: changelogs/drizzle-orm/0.17.6.md ================================================ Fix circular dependency for query building on all pg and mysql drivers Moved all aws data api typings specific logic to dialect from sql to prevent circular dependency issues ================================================ FILE: changelogs/drizzle-orm/0.17.7.md ================================================ - Fix [#158](https://github.com/drizzle-team/drizzle-orm/issues/158) issue. Method `.returning()` was working incorrectly with `.get()` method in sqlite dialect - Fix SQLite Proxy driver mapping bug - Add test cases for SQLite Proxy driver - Add additional example for SQLite Proxy Server setup to handle `.get()` as well ================================================ FILE: changelogs/drizzle-orm/0.18.0.md ================================================ - Improved join result types for partial selects (refer to the [docs](/docs/joins.md) page for more information) - Renamed import paths for Postgres.js and SQL.js drivers to avoid bundling errors: - `drizzle-orm/postgres.js` -> `drizzle-orm/postgres-js` - `drizzle-orm/sql.js` -> `drizzle-orm/sql-js` ================================================ FILE: changelogs/drizzle-orm/0.19.0.md ================================================ - Implemented selecting and joining a subquery. Example usage: ```ts const sq = db .select({ categoryId: courseCategoriesTable.id, category: courseCategoriesTable.name, total: sql`count(${courseCategoriesTable.id})`.as(), }) .from(courseCategoriesTable) .groupBy(courseCategoriesTable.id, courseCategoriesTable.name) .subquery('sq'); ``` After that, just use the subquery instead of a table as usual. - ❗ Replaced `db.select(table).fields({ ... })` syntax with `db.select({ ... }).from(table)` to look more like its SQL counterpart. ================================================ FILE: changelogs/drizzle-orm/0.19.1.md ================================================ ## Changelog --- - Add `char` data type support for postgresql by @AlexandrLi in [#177](https://github.com/drizzle-team/drizzle-orm/pull/177) - Adding new section with `New Contributors` for release notes. Took this template from [bun](https://github.com/oven-sh/bun) release notes pattern ## New Contributors --- - @AlexandrLi made their first contribution in [#177](https://github.com/drizzle-team/drizzle-orm/pull/177) ================================================ FILE: changelogs/drizzle-orm/0.20.0.md ================================================ - 🎉 **Implemented support for WITH clause ([docs](/drizzle-orm/src/pg-core/README.md#with-clause)). Example usage:** ```ts const sq = db .select() .from(users) .prepareWithSubquery('sq'); const result = await db .with(sq) .select({ id: sq.id, name: sq.name, total: sql`count(${sq.id})::int`(), }) .from(sq) .groupBy(sq.id, sq.name); ``` - 🐛 Fixed various bugs with selecting/joining of subqueries. - ❗ Renamed `.subquery('alias')` to `.as('alias')`. - ❗ ``sql`query`.as()`` is now ``sql`query`()``. Old syntax is still supported, but is deprecated and will be removed in one of the next releases. ================================================ FILE: changelogs/drizzle-orm/0.20.1.md ================================================ - 🎉 Added `{ logger: true }` shorthand to `drizzle()` to enable query logging. See [logging docs](/drizzle-orm/src/pg-core/README.md#logging) for detailed logging configuration. ================================================ FILE: changelogs/drizzle-orm/0.20.2.md ================================================ - 🎉 Added PostgreSQL network data types: - `inet` - `cidr` - `macaddr` - `macaddr8` ================================================ FILE: changelogs/drizzle-orm/0.20.3.md ================================================ - 🎉 Added support for locking clauses in SELECT (`SELECT ... FOR UPDATE`): PostgreSQL ```ts await db .select() .from(users) .for('update') .for('no key update', { of: users }) .for('no key update', { of: users, skipLocked: true }) .for('share', { of: users, noWait: true }); ``` MySQL ```ts await db.select().from(users).for('update'); await db.select().from(users).for('share', { skipLocked: true }); await db.select().from(users).for('update', { noWait: true }); ``` - 🎉🐛 Custom column types now support returning `SQL` from `toDriver()` method in addition to the `driverData` type from generic. ================================================ FILE: changelogs/drizzle-orm/0.21.0.md ================================================ ## Drizzle ORM 0.21.0 was released 🎉 - Added support for new migration folder structure and breakpoints feature, described in drizzle-kit release section - Fix `onUpdateNow()` expression generation for default migration statement
### Support for PostgreSQL array types --- ```ts export const salEmp = pgTable('sal_emp', { name: text('name').notNull(), payByQuarter: integer('pay_by_quarter').array(), schedule: text('schedule').array().array(), }); export const tictactoe = pgTable('tictactoe', { squares: integer('squares').array(3).array(3), }); ``` drizzle kit will generate ```sql CREATE TABLE sal_emp ( name text, pay_by_quarter integer[], schedule text[][] ); CREATE TABLE tictactoe ( squares integer[3][3] ); ```
### Added composite primary key support to PostgreSQL and MySQL --- PostgreSQL ```ts import { primaryKey } from 'drizzle-orm/pg-core'; export const cpkTable = pgTable('table', { column1: integer('column1').default(10).notNull(), column2: integer('column2'), column3: integer('column3'), }, (table) => ({ cpk: primaryKey(table.column1, table.column2), })); ``` MySQL ```ts import { primaryKey } from 'drizzle-orm/mysql-core'; export const cpkTable = mysqlTable('table', { simple: int('simple'), columnNotNull: int('column_not_null').notNull(), columnDefault: int('column_default').default(100), }, (table) => ({ cpk: primaryKey(table.simple, table.columnDefault), })); ``` --- ## Drizzle Kit 0.17.0 was released 🎉 ## Breaking changes ### Folder structure was migrated to newer version Before running any new migrations `drizzle-kit` will ask you to upgrade in a first place Migration file structure < 0.17.0 ```plaintext 📦 └ 📂 migrations └ 📂 20221207174503 ├ 📜 migration.sql ├ 📜 snapshot.json └ 📂 20230101104503 ├ 📜 migration.sql ├ 📜 snapshot.json ``` Migration file structure >= 0.17.0 ```plaintext 📦 └ 📂 migrations └ 📂 meta ├ 📜 _journal.json ├ 📜 0000_snapshot.json ├ 📜 0001_snapshot.json └ 📜 0000_icy_stranger.sql └ 📜 0001_strange_avengers.sql ``` ## Upgrading to 0.17.0 --- ![](/changelogs/media/up_mysql.gif) To easily migrate from previous folder structure to new you need to run `up` command in drizzle kit. It's a great helper to upgrade your migrations to new format on each drizzle kit major update ```bash drizzle-kit up: # dialects: `pg`, `mysql`, `sqlite` # example for pg drizzle-kit up:pg ```
## New Features ### New `drizzle-kit` command called `drop`
In a case you think some of migrations were generated in a wrong way or you have made migration simultaneously with other developers you can easily rollback it by running simple command > **Warning**: > Make sure you are dropping migrations that were not applied to your database ```bash drizzle-kit drop ``` This command will show you a list of all migrations you have and you'll need just to choose migration you want to drop. After that `drizzle-kit` will do all the hard work on deleting migration files ![](/changelogs/media/drop.gif)
### New `drizzle-kit` option `--breakpoints` for `generate` and `introspect` commands If particular driver doesn't support running multiple quries in 1 execution you can use `--breakpoints`. `drizzle-kit` will generate current sql ```sql CREATE TABLE `users` ( `id` int PRIMARY KEY NOT NULL, `full_name` text NOT NULL, ); --> statement-breakpoint CREATE TABLE `table` ( `id` int PRIMARY KEY NOT NULL, `phone` int, ); ``` Using it `drizzle-orm` will split all sql files by statements and execute them separately
### Add `drizzle-kit introspect` for MySQL dialect You can introspect your mysql database using `introspect:mysql` command ```bash drizzle-kit introspect:mysql --out ./migrations --connectionString mysql://user:password@127.0.0.1:3306/database ``` ![](/changelogs/media/introspect_mysql.gif)
### Support for glob patterns for schema path Usage example in `cli` ```bash drizzle-kit generate:pg --out ./migrations --schema ./core/**/*.ts ./database/schema.ts ``` Usage example in `drizzle.config` ```text { "out: "./migrations", "schema": ["./core/**/*.ts", "./database/schema.ts"] } ``` ## Bug Fixes and improvements ### Postgres dialect --- **GitHub issue fixes** - [pg] char is undefined during introspection [#9](https://github.com/drizzle-team/drizzle-kit-mirror/issues/9) - when unknown type is detected, would be nice to emit a TODO comment instead of undefined [#8](https://github.com/drizzle-team/drizzle-kit-mirror/issues/8) - "post_id" integer DEFAULT currval('posts_id_seq'::regclass) generates invalid TS [#7](https://github.com/drizzle-team/drizzle-kit-mirror/issues/7) - "ip" INET NOT NULL is not supported [#6](https://github.com/drizzle-team/drizzle-kit-mirror/issues/6) - "id" UUID NOT NULL DEFAULT uuid_generate_v4() type is not supported [#5](https://github.com/drizzle-team/drizzle-kit-mirror/issues/5) - array fields end up as "undefined" in the schema [#4](https://github.com/drizzle-team/drizzle-kit-mirror/issues/4) - timestamp is not in the import statement in schema.ts [#3](https://github.com/drizzle-team/drizzle-kit-mirror/issues/3) - generated enums are not camel cased [#2](https://github.com/drizzle-team/drizzle-kit-mirror/issues/2) **Introspect improvements** - Add support for composite PK's generation; - Add support for `cidr`, `inet`, `macaddr`, `macaddr8`, `smallserial` - Add interval fields generation in schema, such as `minute to second`, `day to hour`, etc. - Add default values for `numerics` - Add default values for `enums` ### MySQL dialect --- **Migration generation improvements** - Add `autoincrement` create, delete and update handling - Add `on update current_timestamp` handling for timestamps - Add data type changing, using `modify` - Add `not null` changing, using `modify` - Add `default` drop and create statements - Fix `defaults` generation bugs, such as escaping, date strings, expressions, etc **Introspect improvements** - Add `autoincrement` to all supported types - Add `fsp` for time based data types - Add precision and scale for `double` - Make time `{ mode: "string" }` by default - Add defaults to `json`, `decimal` and `binary` datatypes - Add `enum` data type generation ================================================ FILE: changelogs/drizzle-orm/0.21.1.md ================================================ - 🎉 Added support for `HAVING` clause - 🎉 Added support for referencing selected fields in `.where()`, `.having()`, `.groupBy()` and `.orderBy()` using an optional callback: ```ts await db .select({ id: citiesTable.id, name: sql`upper(${citiesTable.name})`.as('upper_name'), usersCount: sql`count(${users2Table.id})::int`.as('users_count'), }) .from(citiesTable) .leftJoin(users2Table, eq(users2Table.cityId, citiesTable.id)) .where(({ name }) => sql`length(${name}) >= 3`) .groupBy(citiesTable.id) .having(({ usersCount }) => sql`${usersCount} > 0`) .orderBy(({ name }) => name); ``` ================================================ FILE: changelogs/drizzle-orm/0.22.0.md ================================================ - 🎉 Introduced a standalone query builder that can be used without a DB connection: ```ts import { queryBuilder as qb } from 'drizzle-orm/pg-core'; const query = qb.select().from(users).where(eq(users.name, 'Dan')); const { sql, params } = query.toSQL(); ``` - 🎉 Improved `WITH ... SELECT` subquery creation syntax to more resemble SQL: **Before**: ```ts const regionalSales = db .select({ region: orders.region, totalSales: sql`sum(${orders.amount})`.as('total_sales'), }) .from(orders) .groupBy(orders.region) .prepareWithSubquery('regional_sales'); await db.with(regionalSales).select(...).from(...); ``` **After**: ```ts const regionalSales = db .$with('regional_sales') .as( db .select({ region: orders.region, totalSales: sql`sum(${orders.amount})`.as('total_sales'), }) .from(orders) .groupBy(orders.region), ); await db.with(regionalSales).select(...).from(...); ``` ================================================ FILE: changelogs/drizzle-orm/0.23.0.md ================================================ - 🎉 Added Knex and Kysely adapters! They allow you to manage the schemas and migrations with Drizzle and query the data with your favorite query builder. See documentation for more details: - [Knex adapter](https://github.com/drizzle-team/drizzle-knex) - [Kysely adapter](https://github.com/drizzle-team/drizzle-kysely) - 🎉 Added "type maps" to all entities. You can access them via the special `_` property. For example: ```ts const users = mysqlTable('users', { id: int('id').primaryKey(), name: text('name').notNull(), }); type UserFields = typeof users['_']['columns']; type InsertUser = typeof users['_']['model']['insert']; ``` Full documentation on the type maps is coming soon. - 🎉 Added `.$type()` method to all column builders to allow overriding the data type. It also replaces the optional generics on columns. ```ts // Before const test = mysqlTable('test', { jsonField: json('json_field'), }); // After const test = mysqlTable('test', { jsonField: json('json_field').$type(), }); ``` - ❗ Changed syntax for text-based enum columns: ```ts // Before const test = mysqlTable('test', { role: text<'admin' | 'user'>('role'), }); // After const test = mysqlTable('test', { role: text('role', { enum: ['admin', 'user'] }), }); ``` - 🎉 Allowed passing an array of values into `.insert().values()` directly without spreading: ```ts const users = mysqlTable('users', { id: int('id').primaryKey(), name: text('name').notNull(), }); await users.insert().values([ { name: 'John' }, { name: 'Jane' }, ]); ``` The spread syntax is now deprecated and will be removed in one of the next releases. - 🎉 Added "table creators" to allow for table name customization: ```ts import { mysqlTableCreator } from 'drizzle-orm/mysql-core'; const mysqlTable = mysqlTableCreator((name) => `myprefix_${name}`); const users = mysqlTable('users', { id: int('id').primaryKey(), name: text('name').notNull(), }); // Users table is a normal table, but its name is `myprefix_users` in runtime ``` - 🎉 Implemented support for selecting/joining raw SQL expressions: ```ts // select current_date + s.a as dates from generate_series(0,14,7) as s(a); const result = await db .select({ dates: sql`current_date + s.a`, }) .from(sql`generate_series(0,14,7) as s(a)`); ``` - 🐛 Fixed a lot of bugs from user feedback on GitHub and Discord (thank you! ❤). Fixes #293 #301 #276 #269 #253 #311 #312 ================================================ FILE: changelogs/drizzle-orm/0.23.1.md ================================================ - 🐛 Re-export `InferModel` from `drizzle-orm` ================================================ FILE: changelogs/drizzle-orm/0.23.10.md ================================================ - 🐛 Add missing config argument to transactions API - 🐛 Fix Postgres and MySQL schema declaration (#427) ================================================ FILE: changelogs/drizzle-orm/0.23.11.md ================================================ - 🐛 Fix migrator function for PostgreSQL > Would suggest to upgrade to this version anyone who is using postgres dialect. `0.23.9` and `0.23.10` are broken for postgresql migrations ================================================ FILE: changelogs/drizzle-orm/0.23.12.md ================================================ - 🐛 Fixed multi-level join results (e.g. joining a subquery with a nested join) ================================================ FILE: changelogs/drizzle-orm/0.23.13.md ================================================ - 🎉 All enum and text enum columns now have a properly typed `enumValues` property ================================================ FILE: changelogs/drizzle-orm/0.23.2.md ================================================ - 🐛 Rolled back some breaking changes for drizzle-kit ================================================ FILE: changelogs/drizzle-orm/0.23.3.md ================================================ - 🎉 Added [libSQL](https://libsql.org/) support ================================================ FILE: changelogs/drizzle-orm/0.23.4.md ================================================ - 🐛 Fixed broken types in Kysely and Knex adapters ================================================ FILE: changelogs/drizzle-orm/0.23.5.md ================================================ - 🐛 Various minor bugfixes ================================================ FILE: changelogs/drizzle-orm/0.23.6.md ================================================ - 🐛 Fixed referencing the selected aliased field in the same query - 🐛 Fixed decimal column data type in MySQL - 🐛 Fixed mode autocompletion for integer column in SQLite - 🐛 Fixed extra parentheses in the generated SQL for the `IN` operator (#382) - 🐛 Fixed regression in `pgEnum.enumValues` type (#358) - 🎉 Allowed readonly arrays to be passed to `pgEnum` ================================================ FILE: changelogs/drizzle-orm/0.23.7.md ================================================ - 🎉 Added `INSERT IGNORE` support for MySQL (#305) ================================================ FILE: changelogs/drizzle-orm/0.23.8.md ================================================ - 🎉 Fixed dates timezone differences for timestamps in Postgres and MySQL (contributed by @AppelBoomHD via #288) ================================================ FILE: changelogs/drizzle-orm/0.23.9.md ================================================ # Transactions support 🎉 You can now use transactions with all the supported databases and drivers. `node-postgres` example: ```ts await db.transaction(async (tx) => { await tx.insert(users).values(newUser); await tx.update(users).set({ name: 'Mr. Dan' }).where(eq(users.name, 'Dan')); await tx.delete(users).where(eq(users.name, 'Dan')); }); ``` For more information, see transactions docs: - [PostgreSQL](/drizzle-orm/src/pg-core/README.md#transactions) - [MySQL](/drizzle-orm/src/mysql-core/README.md#transactions) - [SQLite](/drizzle-orm/src/sqlite-core/README.md#transactions) ================================================ FILE: changelogs/drizzle-orm/0.24.0.md ================================================ - 🎉 Added iterator support to `mysql2` (sponsored by @rizen ❤) - ❗ `.prepare()` in MySQL no longer requires a name argument ================================================ FILE: changelogs/drizzle-orm/0.24.1.md ================================================ ### Bugs 🐛 Fix onConflict targets in [#475](https://github.com/drizzle-team/drizzle-orm/pull/475) - thanks @wkunert ❤️ ### Documentation > Thanks to @tmcw we have started our way to get JSDoc documentation 📄 JSDoc for conditions in [#467](https://github.com/drizzle-team/drizzle-orm/pull/467) - thanks @tmcw ❤️ ================================================ FILE: changelogs/drizzle-orm/0.24.2.md ================================================ - 🐛 Pool connections opened for transactions are now closed after the transaction is committed or rolled back ================================================ FILE: changelogs/drizzle-orm/0.24.3.md ================================================ - 🐛 Fixed query generation when selecting from alias ================================================ FILE: changelogs/drizzle-orm/0.24.4.md ================================================ - 🐛 Added verbose error when .values() is called without values (#441) - 🐛 Fixed nested PG arrays mapping (#460) - ❗ Removed spread syntax in .values() (#269) - 🐛 Fixed passing undefined as field value to insert/update (#375) ================================================ FILE: changelogs/drizzle-orm/0.24.5.md ================================================ - Add possibility to have placeholders in `.limit()` and `.offset()` ```ts const stmt = db .select({ id: usersTable.id, name: usersTable.name, }) .from(usersTable) .limit(placeholder('limit')) .offset(placeholder('offset')) .prepare('stmt'); const result = await stmt.execute({ limit: 1, offset: 1 }); ``` ================================================ FILE: changelogs/drizzle-orm/0.25.0.md ================================================ # ESM support - 🎉 Added ESM support! You can now use `drizzle-orm` in both ESM and CJS environments. - 🎉 Added code minification and source maps. - ❗ Removed several nested import paths. Most notably, everything from `drizzle-orm/sql` and `drizzle-orm/expressions` should now be imported from `drizzle-orm` instead. ================================================ FILE: changelogs/drizzle-orm/0.25.1.md ================================================ - 🐛 Fix package.json `exports` field ================================================ FILE: changelogs/drizzle-orm/0.25.2.md ================================================ - 🎉 Documentation improvements (#495, #507) - 🎉 Added `"sideEffects": false` to package.json (#515) - 🐛 Fixed AWS Data API driver migrations (#510) ================================================ FILE: changelogs/drizzle-orm/0.25.3.md ================================================ - 🐛 Fix `pg` imports in ESM mode (#505) - 🐛 Add "types" and "default" fields to "exports" entries in package.json (#511) ================================================ FILE: changelogs/drizzle-orm/0.25.4.md ================================================ - 🎉 Added support for [Vercel Postgres](https://vercel.com/docs/storage/vercel-postgres/quickstart) ```typescript import { drizzle } from 'drizzle-orm/vercel-postgres'; import { sql } from "@vercel/postgres"; const db = drizzle(sql); db.select(...) ``` ================================================ FILE: changelogs/drizzle-orm/0.26.0.md ================================================ # Drizzle ORM 0.26.0 is here 🎉 ## README docs are fully tranferred to web The documentation has been completely reworked and updated with additional examples and explanations. You can find it here: https://orm.drizzle.team. Furthermore, the entire documentation has been made open source, allowing you to edit and add any information you deem important for the community. Visit https://github.com/drizzle-team/drizzle-orm-docs to access the open-sourced documentation. Additionally, you can create specific documentation issues in this repository ## New Features Introducing our first helper built on top of Drizzle Core API syntax: **the Relational Queries!** 🎉 With Drizzle RQ you can do: 1. Any amount of relations that will be mapped for you 2. Including or excluding! specific columns. You can also combine these options 3. Harness the flexibility of the `where` statements, allowing you to define custom conditions beyond the predefined ones available in the Drizzle Core API. 4. Expand the functionality by incorporating additional extras columns using SQL templates. For more examples, refer to the documentation. Most importantly, regardless of the size of your query, Drizzle will always generate a **SINGLE optimized query**. This efficiency extends to the usage of **Prepared Statements**, which are fully supported within the Relational Query Builder. For more info: [Prepared Statements in Relational Query Builder](https://orm.drizzle.team/rqb#prepared-statements) **Example of setting one-to-many relations** > As you can observe, `relations` are a distinct concept that coexists alongside the main Drizzle schema. You have the flexibility to opt-in or opt-out of them at any time without affecting the `drizzle-kit` migrations or the logic for Core API's types and runtime. ```ts import { integer, serial, text, pgTable } from 'drizzle-orm/pg-core'; import { relations } from 'drizzle-orm'; export const users = pgTable('users', { id: serial('id').primaryKey(), name: text('name').notNull(), }); export const usersConfig = relations(users, ({ many }) => ({ posts: many(posts), })); export const posts = pgTable('posts', { id: serial('id').primaryKey(), content: text('content').notNull(), authorId: integer('author_id').notNull(), }); export const postsConfig = relations(posts, ({ one }) => ({ author: one(users, { fields: [posts.authorId], references: [users.id] }), })); ``` **Example of querying you database** Step 1: Provide all tables and relations to `drizzle` function > `drizzle` import depends on the database driver you're using ```ts import * as schema from './schema'; import { drizzle } from 'drizzle-orm/...'; const db = drizzle(client, { schema }); await db.query.users.findMany(...); ``` If you have schema in multiple files ```ts import * as schema1 from './schema1'; import * as schema2 from './schema2'; import { drizzle } from 'drizzle-orm/...'; const db = drizzle(client, { schema: { ...schema1, ...schema2 } }); await db.query.users.findMany(...); ``` Step 2: Query your database with Relational Query Builder **Select all users** ```ts const users = await db.query.users.findMany(); ``` **Select first users** > `.findFirst()` will add limit 1 to the query ```ts const user = await db.query.users.findFirst(); ``` **Select all users** Get all posts with just `id`, `content` and include `comments` ```ts const posts = await db.query.posts.findMany({ columns: { id: true, content: true, }, with: { comments: true, } }); ``` **Select all posts excluding `content` column** ```ts const posts = await db.query.posts.findMany({ columns: { content: false, }, }); ``` For more examples you can check [full docs](https://orm.drizzle.team/rqb) for Relational Queries ## Bug fixes - 🐛 Fixed partial joins with prefixed tables (#542) ## Drizzle Kit updates ### New ways to define drizzle config file You can now specify the configuration not only in the `.json` format but also in `.ts` and `.js` formats.
**TypeScript example** ```ts import { Config } from "drizzle-kit"; export default { schema: "", connectionString: process.env.DB_URL, out: "", breakpoints: true } satisfies Config; ``` **JavaScript example** ```js /** @type { import("drizzle-kit").Config } */ export default { schema: "", connectionString: "", out: "", breakpoints: true }; ``` ## New commands 🎉 ### `drizzle-kit push:mysql` You can now push your MySQL schema directly to the database without the need to create and manage migration files. This feature proves to be particularly useful for rapid local development and when working with PlanetScale databases. By pushing the MySQL schema directly to the database, you can streamline the development process and avoid the overhead of managing migration files. This allows for more efficient iteration and quick deployment of schema changes during local development. ### How to setup your codebase for drizzle-kit push feature? 1. For this feature, you need to create a `drizzle.config.[ts|js|json]` file. We recommend using `.ts` or `.js` files as they allow you to easily provide the database connection information as secret variables You'll need to specify `schema` and `connectionString`(or `db`, `port`, `host`, `password`, etc.) to make `drizzle-kit push:mysql` work `drizzle.config.ts` example ```ts copy import { Config } from "src"; export default { schema: "./schema.ts", connectionString: process.env.DB_URL, } satisfies Config; ``` 2. Run `drizzle-kit push:mysql` 3. If Drizzle detects any potential `data-loss` issues during a migration, it will prompt you to approve whether the data should be truncated or not in order to ensure a successful migration 4. Approve or reject the action that Drizzle needs to perform in order to push your schema changes to the database. 5. Done ✅ ================================================ FILE: changelogs/drizzle-orm/0.26.1.md ================================================ - 🐛 Fixed including multiple relations on the same level in RQB (#599) - 🐛 Updated migrators for relational queries support (#601) - 🐛 Fixed invoking .findMany() without arguments ================================================ FILE: changelogs/drizzle-orm/0.26.2.md ================================================ - 🐛 Fixed upsert targeting composite keys for SQLite (#521) - 🐛 AWS Data API+Postgres: fixed adding of typings when merging queries (#517) - 🐛 Fixed "on conflict" with "where" clause for Postgres (#651) - 🐛 Various GitHub docs community fixes and improvements ♥ (#547, #548, #587, #606, #609, #625) - **Experimental**: added OpenTelemetry support for Postgres ================================================ FILE: changelogs/drizzle-orm/0.26.3.md ================================================ - Disabled OTEL integration due to the top-level await issues ================================================ FILE: changelogs/drizzle-orm/0.26.4.md ================================================ - 🐛 Fixed AWS Data API mapping in relational queries (#677, #681) - 🐛 Allowed using named self-relations (#678) - 🐛 Fixed querying relations with composite FKs (#683) ================================================ FILE: changelogs/drizzle-orm/0.26.5.md ================================================ - 🎉 Added bigint mode to SQLite (#558) ================================================ FILE: changelogs/drizzle-orm/0.27.0.md ================================================ ## Correct behavior when installed in a monorepo (multiple Drizzle instances) Replacing all `instanceof` statements with a custom `is()` function allowed us to handle multiple Drizzle packages interacting properly. **It also fixes one of our biggest Discord tickets: `maximum call stack exceeded` 🎉** You should now use `is()` instead of `instanceof` to check if specific objects are instances of specific Drizzle types. It might be useful if you are building something on top of the Drizzle API. ```ts import { is, Column } from 'drizzle-orm' if (is(value, Column)) { // value's type is narrowed to Column } ``` ## `distinct` clause support ```ts await db.selectDistinct().from(usersDistinctTable).orderBy( usersDistinctTable.id, usersDistinctTable.name, ); ``` Also, `distinct on` clause is available for PostgreSQL: ```ts await db.selectDistinctOn([usersDistinctTable.id]).from(usersDistinctTable).orderBy( usersDistinctTable.id, ); await db.selectDistinctOn([usersDistinctTable.name], { name: usersDistinctTable.name }).from( usersDistinctTable, ).orderBy(usersDistinctTable.name); ``` ## `bigint` and `boolean` support for SQLite Contributed by @MrRahulRamkumar (#558), @raducristianpopa (#411) and @meech-ward (#725) ```ts const users = sqliteTable('users', { bigintCol: blob('bigint', { mode: 'bigint' }).notNull(), boolCol: integer('bool', { mode: 'boolean' }).notNull(), }); ``` ## DX improvements - Added verbose type error when relational queries are used on a database type without a schema generic - Fix `where` callback in RQB for tables without relations ## Various docs improvements - Fix joins docs typo (#522) by @arjunyel - Add Supabase guide to readme (#690) by @saltcod - Make the column type in sqlite clearer (#717) by @shairez ================================================ FILE: changelogs/drizzle-orm/0.27.1.md ================================================ - 🎉 Added support for [Neon HTTP driver](https://neon.tech/docs/serverless/serverless-driver) ```typescript import { neon, neonConfig } from '@neondatabase/serverless'; import { drizzle } from 'drizzle-orm/neon-http'; neonConfig.fetchConnectionCache = true; const sql = neon(process.env.DRIZZLE_DATABASE_URL!); const db = drizzle(sql); db.select(...) ``` ================================================ FILE: changelogs/drizzle-orm/0.27.2.md ================================================ ## 🎉 Added support for `UNIQUE` constraints in PostgreSQL, MySQL, SQLite For PostgreSQL, unique constraints can be defined at the column level for single-column constraints, and in the third parameter for multi-column constraints. In both cases, it will be possible to define a custom name for the constraint. Additionally, PostgreSQL will receive the `NULLS NOT DISTINCT` option to restrict having more than one NULL value in a table. [Reference](https://www.postgresql.org/docs/current/ddl-constraints.html#DDL-CONSTRAINTS-UNIQUE-CONSTRAINTS) Examples that just shows a different `unique` usage. Please don't search a real usage for those tables ```ts // single column const table = pgTable('table', { id: serial('id').primaryKey(), name: text('name').notNull().unique(), state: char('state', { length: 2 }).unique('custom'), field: char('field', { length: 2 }).unique('custom_field', { nulls: 'not distinct' }), }); // multiple columns const table = pgTable('table', { id: serial('id').primaryKey(), name: text('name').notNull(), state: char('state', { length: 2 }), }, (t) => ({ first: unique('custom_name').on(t.name, t.state).nullsNotDistinct(), second: unique('custom_name1').on(t.name, t.state), })); ``` For MySQL, everything will be the same except for the `NULLS NOT DISTINCT` option. It appears that MySQL does not support it Examples that just shows a different `unique` usage. Please don't search a real usage for those tables ```ts // single column const table = mysqlTable('table', { id: serial('id').primaryKey(), name: text('name').notNull().unique(), state: text('state').unique('custom'), field: text('field').unique('custom_field'), }); // multiple columns const table = mysqlTable('cities1', { id: serial('id').primaryKey(), name: text('name').notNull(), state: text('state'), }, (t) => ({ first: unique().on(t.name, t.state), second: unique('custom_name1').on(t.name, t.state), })); ``` In SQLite unique constraints are the same as unique indexes. As long as you can specify a name for the unique index in SQLite - we will treat all unique constraints as unique indexes in internal implementation ```ts // single column const table = sqliteTable('table', { id: int('id').primaryKey(), name: text('name').notNull().unique(), state: text('state').unique('custom'), field: text('field').unique(), }); // multiple columns const table = sqliteTable('table', { id: int('id').primaryKey(), name: text('name').notNull(), state: text('state'), }, (t) => ({ first: unique().on(t.name, t.state), second: unique('custom').on(t.name, t.state), })); ``` ================================================ FILE: changelogs/drizzle-orm/0.28.0.md ================================================ ## Breaking changes ### Removed support for filtering by nested relations Current example won't work in `0.28.0`: ```ts const usersWithPosts = await db.query.users.findMany({ where: (table, { sql }) => (sql`json_array_length(${table.posts}) > 0`), with: { posts: true, }, }); ``` The `table` object in the `where` callback won't have fields from `with` and `extras`. We removed them to be able to build more efficient relational queries, which improved row reads and performance. If you have used those fields in the `where` callback before, there are several workarounds: 1. Applying those filters manually on the code level after the rows are fetched; 2. Using the core API. ### Added Relational Queries `mode` config for `mysql2` driver Drizzle relational queries always generate exactly one SQL statement to run on the database and it has certain caveats. To have best in class support for every database out there we've introduced modes. Drizzle relational queries use lateral joins of subqueries under the hood and for now PlanetScale does not support them. When using `mysql2` driver with regular MySQL database - you should specify mode: "default". When using `mysql2` driver with PlanetScale - you need to specify mode: "planetscale". ```ts import { drizzle } from 'drizzle-orm/mysql2'; import mysql from 'mysql2/promise'; import * as schema from './schema'; const connection = await mysql.createConnection({ uri: process.env.PLANETSCALE_DATABASE_URL, }); const db = drizzle(connection, { schema, mode: 'planetscale' }); ``` ## Improved IntelliSense performance for large schemas We've run the diagnostics on a database schema with 85 tables, 666 columns, 26 enums, 172 indexes and 133 foreign keys. We've optimized internal types which resulted in **430%** speed up in IntelliSense. ## Improved Relational Queries Permormance and Read Usage In this release we've fully changed a way query is generated for Relational Queri API. As a summary we've made current set of changes in query generation startegy: 1. Lateral Joins: In the new version we're utilizing lateral joins, denoted by the "LEFT JOIN LATERAL" clauses, to retrieve specific data from related tables efficiently For MySQL in PlanetScale and SQLite, we've used simple subquery selects, which improved a query plan and overall performance 2. Selective Data Retrieval: In the new version we're retrieving only the necessary data from tables. This targeted data retrieval reduces the amount of unnecessary information fetched, resulting in a smaller dataset to process and faster execution. 3. Reduced Aggregations: In the new version we've reduced the number of aggregation functions (e.g., COUNT, json_agg). By using json_build_array directly within the lateral joins, drizzle is aggregating the data in a more streamlined manner, leading to improved query performance. 4. Simplified Grouping: In the new version the GROUP BY clause is removed, as the lateral joins and subqueries already handle data aggregation more efficiently. For this drizzle query ```ts const items = await db.query.comments.findMany({ limit, orderBy: comments.id, with: { user: { columns: { name: true }, }, post: { columns: { title: true }, with: { user: { columns: { name: true }, }, }, }, }, }); ``` Query that Drizzle generates now ```sql select "comments"."id", "comments"."user_id", "comments"."post_id", "comments"."content", "comments_user"."data" as "user", "comments_post"."data" as "post" from "comments" left join lateral (select json_build_array("comments_user"."name") as "data" from (select * from "users" "comments_user" where "comments_user"."id" = "comments"."user_id" limit 1) "comments_user") "comments_user" on true left join lateral (select json_build_array("comments_post"."title", "comments_post_user"."data") as "data" from (select * from "posts" "comments_post" where "comments_post"."id" = "comments"."post_id" limit 1) "comments_post" left join lateral (select json_build_array("comments_post_user"."name") as "data" from (select * from "users" "comments_post_user" where "comments_post_user"."id" = "comments_post"."user_id" limit 1) "comments_post_user") "comments_post_user" on true) "comments_post" on true order by "comments"."id" limit 1 ``` Query generated before: ```sql SELECT "id", "user_id", "post_id", "content", "user"::JSON, "post"::JSON FROM (SELECT "comments".*, CASE WHEN count("comments_post"."id") = 0 THEN '[]' ELSE json_agg(json_build_array("comments_post"."title", "comments_post"."user"::JSON))::text END AS "post" FROM (SELECT "comments".*, CASE WHEN count("comments_user"."id") = 0 THEN '[]' ELSE json_agg(json_build_array("comments_user"."name"))::text END AS "user" FROM "comments" LEFT JOIN (SELECT "comments_user".* FROM "users" "comments_user") "comments_user" ON "comments"."user_id" = "comments_user"."id" GROUP BY "comments"."id", "comments"."user_id", "comments"."post_id", "comments"."content") "comments" LEFT JOIN (SELECT "comments_post".* FROM (SELECT "comments_post".*, CASE WHEN count("comments_post_user"."id") = 0 THEN '[]' ELSE json_agg(json_build_array("comments_post_user"."name")) END AS "user" FROM "posts" "comments_post" LEFT JOIN (SELECT "comments_post_user".* FROM "users" "comments_post_user") "comments_post_user" ON "comments_post"."user_id" = "comments_post_user"."id" GROUP BY "comments_post"."id") "comments_post") "comments_post" ON "comments"."post_id" = "comments_post"."id" GROUP BY "comments"."id", "comments"."user_id", "comments"."post_id", "comments"."content", "comments"."user") "comments" LIMIT 1 ``` ## Possibility to insert rows with default values for all columns You can now provide an empty object or an array of empty objects, and Drizzle will insert all defaults into the database. ```ts // Insert 1 row with all defaults await db.insert(usersTable).values({}); // Insert 2 rows with all defaults await db.insert(usersTable).values([{}, {}]); ``` ================================================ FILE: changelogs/drizzle-orm/0.28.1.md ================================================ - 🐛 Fixed Postgres array-related issues introduced by 0.28.0 (#983, #992) ================================================ FILE: changelogs/drizzle-orm/0.28.2.md ================================================ ## The community contributions release 🎉 ### Internal Features and Changes 1. Added a set of tests for d1. Thanks to @AdiRishi! 2. Fixed issues in internal documentation. Thanks to @balazsorban44 and @pyk! ### Bug Fixes 1. Resolved the issue of truncating timestamp milliseconds for MySQL. Thanks to @steviec! 2. Corrected the type of the get() method for sqlite-based dialects. Issue #565 has been closed. Thanks to @stefanmaric! 3. Rectified the sqlite-proxy bug that caused the query to execute twice. Thanks to @mosch! ### New packages 🎉 Added a support for [Typebox](https://github.com/sinclairzx81/typebox) in [drizzle-typebox](https://orm.drizzle.team/docs/typebox) package. Thanks to @Bulbang! Please check documentation page for more usage examples: https://orm.drizzle.team/docs/typebox ================================================ FILE: changelogs/drizzle-orm/0.28.3.md ================================================ - 🎉 Added SQLite simplified query API - 🎉 Added `.$defaultFn()` / `.$default()` methods to column builders You can specify any logic and any implementation for a function like `cuid()` for runtime defaults. Drizzle won't limit you in the number of implementations you can add. > Note: This value does not affect the `drizzle-kit` behavior, it is only used at runtime in `drizzle-orm` ```ts import { varchar, mysqlTable } from "drizzle-orm/mysql-core"; import { createId } from '@paralleldrive/cuid2'; const table = mysqlTable('table', { id: varchar('id', { length: 128 }).$defaultFn(() => createId()), }); ``` - 🎉 Added `table.$inferSelect` / `table._.inferSelect` and `table.$inferInsert` / `table._.inferInsert` for more convenient table model type inference - 🛠 Deprecated `InferModel` type in favor of more explicit `InferSelectModel` and `InferInsertModel` ```ts import { InferSelectModel, InferInsertModel } from 'drizzle-orm' const usersTable = pgTable('users', { id: serial('id').primaryKey(), name: text('name').notNull(), verified: boolean('verified').notNull().default(false), jsonb: jsonb('jsonb').$type(), createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(), }); type SelectUser = typeof usersTable.$inferSelect; type InsertUser = typeof usersTable.$inferInsert; type SelectUser2 = InferSelectModel; type InsertUser2 = InferInsertModel; ``` - 🛠 Disabled `.d.ts` files bundling - 🐛 Fixed sqlite-proxy and SQL.js response from `.get()` when the result is empty ================================================ FILE: changelogs/drizzle-orm/0.28.4.md ================================================ - 🐛 Fixed imports in ESM-based projects (#1088) - 🐛 Fixed type error on Postgres table definitions (#1089) ================================================ FILE: changelogs/drizzle-orm/0.28.5.md ================================================ - 🐛 Fixed incorrect OpenTelemetry type import that caused a runtime error ================================================ FILE: changelogs/drizzle-orm/0.28.6.md ================================================ ## Changes > **Note**: > MySQL `datetime` with `mode: 'date'` will now store dates in UTC strings and retrieve data in UTC as well to align with MySQL behavior for `datetime`. If you need a different behavior and want to handle `datetime` mapping in a different way, please use `mode: 'string'` or [Custom Types](https://orm.drizzle.team/docs/custom-types) implementation Check [Fix Datetime mapping for MySQL](https://github.com/drizzle-team/drizzle-orm/pull/1082) for implementation details ## New Features ### 🎉 `LibSQL` batch api support Reference: https://docs.turso.tech/reference/client-access/javascript-typescript-sdk#execute-a-batch-of-statements Batch API usage example: ```ts const batchResponse = await db.batch([ db.insert(usersTable).values({ id: 1, name: 'John' }).returning({ id: usersTable.id, }), db.update(usersTable).set({ name: 'Dan' }).where(eq(usersTable.id, 1)), db.query.usersTable.findMany({}), db.select().from(usersTable).where(eq(usersTable.id, 1)), db.select({ id: usersTable.id, invitedBy: usersTable.invitedBy }).from( usersTable, ), ]); ``` Type for `batchResponse` in this example would be: ```ts type BatchResponse = [ { id: number; }[], ResultSet, { id: number; name: string; verified: number; invitedBy: number | null; }[], { id: number; name: string; verified: number; invitedBy: number | null; }[], { id: number; invitedBy: number | null; }[], ]; ``` All possible builders that can be used inside `db.batch`: ```ts `db.all()`, `db.get()`, `db.values()`, `db.run()`, `db.query..findMany()`, `db.query.
.findFirst()`, `db.select()...`, `db.update()...`, `db.delete()...`, `db.insert()...`, ``` More usage examples here: [integration-tests/tests/libsql-batch.test.ts](https://github.com/drizzle-team/drizzle-orm/pull/1161/files#diff-17253895532e520545027dd48dcdbac2d69a5a49d594974e6d55d7502f89b838R248) and in [docs](https://orm.drizzle.team/docs/batch-api) ### 🎉 Add json mode for text in SQLite Example ```ts const test = sqliteTable('test', { dataTyped: text('data_typed', { mode: 'json' }).$type<{ a: 1 }>().notNull(), }); ``` ### 🎉 Add `.toSQL()` to Relational Query API calls Example ```ts const query = db.query.usersTable.findFirst().toSQL(); ``` ### 🎉 Added new PostgreSQL operators for Arrays - thanks @L-Mario564 List of operators and usage examples `arrayContains`, `arrayContained`, `arrayOverlaps` ```ts const contains = await db.select({ id: posts.id }).from(posts) .where(arrayContains(posts.tags, ['Typescript', 'ORM'])); const contained = await db.select({ id: posts.id }).from(posts) .where(arrayContained(posts.tags, ['Typescript', 'ORM'])); const overlaps = await db.select({ id: posts.id }).from(posts) .where(arrayOverlaps(posts.tags, ['Typescript', 'ORM'])); const withSubQuery = await db.select({ id: posts.id }).from(posts) .where(arrayContains( posts.tags, db.select({ tags: posts.tags }).from(posts).where(eq(posts.id, 1)), )); ``` ### 🎉 Add more SQL operators for where filter function in Relational Queries - thanks @cayter! **Before** ```ts import { inArray } from "drizzle-orm/pg-core"; await db.users.findFirst({ where: (table, _) => inArray(table.id, [ ... ]) }) ``` **After** ```ts await db.users.findFirst({ where: (table, { inArray }) => inArray(table.id, [ ... ]) }) ``` ## Bug Fixes - 🐛 [Correct where in on conflict in sqlite](https://github.com/drizzle-team/drizzle-orm/pull/1076) - Thanks @hanssonduck! - 🐛 [Fix libsql/client type import](https://github.com/drizzle-team/drizzle-orm/pull/1122) - Thanks @luisfvieirasilva! - 🐛 [Fix: raw sql query not being mapped properly on RDS](https://github.com/drizzle-team/drizzle-orm/pull/1071) - Thanks @boian-ivanov - 🐛 [Fix Datetime mapping for MySQL](https://github.com/drizzle-team/drizzle-orm/pull/1082) - thanks @Angelelz - 🐛 [Fix smallserial generating as serial](https://github.com/drizzle-team/drizzle-orm/pull/1127) - thanks @L-Mario564 ================================================ FILE: changelogs/drizzle-orm/0.29.0.md ================================================ > Drizzle ORM version `0.29.0` will require a minimum Drizzle Kit version of `0.20.0`, and vice versa. Therefore, when upgrading to a newer version of Drizzle ORM, you will also need to upgrade Drizzle Kit. This may result in some breaking changes throughout the versions, especially if you need to upgrade Drizzle Kit and your Drizzle ORM version is older than `<0.28.0` ## New Features ### 🎉 MySQL `unsigned` option for bigint You can now specify `bigint unsigned` type ```ts const table = mysqlTable('table', { id: bigint('id', { mode: 'number', unsigned: true }), }); ``` Read more in [docs](https://orm.drizzle.team/docs/column-types/mysql#bigint) ### 🎉 Improved query builder types Starting from `0.29.0` by default, as all the query builders in Drizzle try to conform to SQL as much as possible, you can only invoke most of the methods once. For example, in a SELECT statement there might only be one WHERE clause, so you can only invoke .where() once: ```ts const query = db .select() .from(users) .where(eq(users.id, 1)) .where(eq(users.name, 'John')); // ❌ Type error - where() can only be invoked once ``` This behavior is useful for conventional query building, i.e. when you create the whole query at once. However, it becomes a problem when you want to build a query dynamically, i.e. if you have a shared function that takes a query builder and enhances it. To solve this problem, Drizzle provides a special 'dynamic' mode for query builders, which removes the restriction of invoking methods only once. To enable it, you need to call .$dynamic() on a query builder. Let's see how it works by implementing a simple withPagination function that adds LIMIT and OFFSET clauses to a query based on the provided page number and an optional page size: ```ts function withPagination( qb: T, page: number, pageSize: number = 10, ) { return qb.limit(pageSize).offset(page * pageSize); } const query = db.select().from(users).where(eq(users.id, 1)); withPagination(query, 1); // ❌ Type error - the query builder is not in dynamic mode const dynamicQuery = query.$dynamic(); withPagination(dynamicQuery, 1); // ✅ OK ``` Note that the withPagination function is generic, which allows you to modify the result type of the query builder inside it, for example by adding a join: ```ts function withFriends(qb: T) { return qb.leftJoin(friends, eq(friends.userId, users.id)); } let query = db.select().from(users).where(eq(users.id, 1)).$dynamic(); query = withFriends(query); ``` Read more in [docs](https://orm.drizzle.team/docs/dynamic-query-building) ### 🎉 Possibility to specify name for primary keys and foreign keys There is an issue when constraint names exceed the 64-character limit of the database. This causes the database engine to truncate the name, potentially leading to issues. Starting from `0.29.0`, you have the option to specify custom names for both `primaryKey()` and `foreignKey()`. We have also deprecated the old `primaryKey()` syntax, which can still be used but will be removed in future releases ```ts const table = pgTable('table', { id: integer('id'), name: text('name'), }, (table) => ({ cpk: primaryKey({ name: 'composite_key', columns: [table.id, table.name] }), cfk: foreignKey({ name: 'fkName', columns: [table.id], foreignColumns: [table.name], }), })); ``` Read more in [docs](https://orm.drizzle.team/docs/indexes-constraints#composite-primary-key) ### 🎉 Read Replicas Support You can now use the Drizzle `withReplica` function to specify different database connections for read replicas and the main instance for write operations. By default, `withReplicas` will use a random read replica for read operations and the main instance for all other data modification operations. You can also specify custom logic for choosing which read replica connection to use. You have the freedom to make any weighted, custom decision for that. Here are some usage examples: ```ts const primaryDb = drizzle(client); const read1 = drizzle(client); const read2 = drizzle(client); const db = withReplicas(primaryDb, [read1, read2]); // read from primary db.$primary.select().from(usersTable); // read from either read1 connection or read2 connection db.select().from(usersTable) // use primary database for delete operation db.delete(usersTable).where(eq(usersTable.id, 1)) ``` Implementation example of custom logic for selecting read replicas, where the first replica has a 70% chance of being chosen, and the second replica has a 30% chance of being chosen. Note that you can implement any type of random selection for read replicas ```ts const db = withReplicas(primaryDb, [read1, read2], (replicas) => { const weight = [0.7, 0.3]; let cumulativeProbability = 0; const rand = Math.random(); for (const [i, replica] of replicas.entries()) { cumulativeProbability += weight[i]!; if (rand < cumulativeProbability) return replica; } return replicas[0]! }); ``` `withReplicas` function is available for all dialects in Drizzle ORM Read more in [docs](https://orm.drizzle.team/docs/read-replicas) ### 🎉 Set operators support (UNION, UNION ALL, INTERSECT, INTERSECT ALL, EXCEPT, EXCEPT ALL) Huge thanks to @Angelelz for the significant contribution he made, from API discussions to proper type checks and runtime logic, along with an extensive set of tests. This greatly assisted us in delivering this feature in this release Usage examples: All set operators can be used in a two ways: `import approach` or `builder approach` ##### Import approach ```ts import { union } from 'drizzle-orm/pg-core' const allUsersQuery = db.select().from(users); const allCustomersQuery = db.select().from(customers); const result = await union(allUsersQuery, allCustomersQuery) ``` ##### Builder approach ```ts const result = await db.select().from(users).union(db.select().from(customers)); ``` Read more in [docs](https://orm.drizzle.team/docs/set-operations) ### 🎉 New MySQL Proxy Driver A new driver has been released, allowing you to create your own implementation for an HTTP driver using a MySQL database. You can find usage examples in the `./examples/mysql-proxy` folder You need to implement two endpoints on your server that will be used for queries and migrations(Migrate endpoint is optional and only if you want to use drizzle migrations). Both the server and driver implementation are up to you, so you are not restricted in any way. You can add custom mappings, logging, and much more You can find both server and driver implementation examples in the `./examples/mysql-proxy` folder ```ts // Driver import axios from 'axios'; import { eq } from 'drizzle-orm/expressions'; import { drizzle } from 'drizzle-orm/mysql-proxy'; import { migrate } from 'drizzle-orm/mysql-proxy/migrator'; import { cities, users } from './schema'; async function main() { const db = drizzle(async (sql, params, method) => { try { const rows = await axios.post(`${process.env.REMOTE_DRIVER}/query`, { sql, params, method, }); return { rows: rows.data }; } catch (e: any) { console.error('Error from pg proxy server:', e.response.data); return { rows: [] }; } }); await migrate(db, async (queries) => { try { await axios.post(`${process.env.REMOTE_DRIVER}/migrate`, { queries }); } catch (e) { console.log(e); throw new Error('Proxy server cannot run migrations'); } }, { migrationsFolder: 'drizzle' }); await db.insert(cities).values({ id: 1, name: 'name' }); await db.insert(users).values({ id: 1, name: 'name', email: 'email', cityId: 1, }); const usersToCityResponse = await db.select().from(users).leftJoin( cities, eq(users.cityId, cities.id), ); } ``` ### 🎉 New PostgreSQL Proxy Driver Same as MySQL you can now implement your own http driver for PostgreSQL database. You can find usage examples in the `./examples/pg-proxy` folder You need to implement two endpoints on your server that will be used for queries and migrations (Migrate endpoint is optional and only if you want to use drizzle migrations). Both the server and driver implementation are up to you, so you are not restricted in any way. You can add custom mappings, logging, and much more You can find both server and driver implementation examples in the `./examples/pg-proxy` folder ```ts import axios from 'axios'; import { eq } from 'drizzle-orm/expressions'; import { drizzle } from 'drizzle-orm/pg-proxy'; import { migrate } from 'drizzle-orm/pg-proxy/migrator'; import { cities, users } from './schema'; async function main() { const db = drizzle(async (sql, params, method) => { try { const rows = await axios.post(`${process.env.REMOTE_DRIVER}/query`, { sql, params, method }); return { rows: rows.data }; } catch (e: any) { console.error('Error from pg proxy server:', e.response.data); return { rows: [] }; } }); await migrate(db, async (queries) => { try { await axios.post(`${process.env.REMOTE_DRIVER}/query`, { queries }); } catch (e) { console.log(e); throw new Error('Proxy server cannot run migrations'); } }, { migrationsFolder: 'drizzle' }); const insertedCity = await db.insert(cities).values({ id: 1, name: 'name' }).returning(); const insertedUser = await db.insert(users).values({ id: 1, name: 'name', email: 'email', cityId: 1 }); const usersToCityResponse = await db.select().from(users).leftJoin(cities, eq(users.cityId, cities.id)); } ``` ### 🎉 `D1` Batch API support Reference: https://developers.cloudflare.com/d1/platform/client-api/#dbbatch Batch API usage example: ```ts const batchResponse = await db.batch([ db.insert(usersTable).values({ id: 1, name: 'John' }).returning({ id: usersTable.id, }), db.update(usersTable).set({ name: 'Dan' }).where(eq(usersTable.id, 1)), db.query.usersTable.findMany({}), db.select().from(usersTable).where(eq(usersTable.id, 1)), db.select({ id: usersTable.id, invitedBy: usersTable.invitedBy }).from( usersTable, ), ]); ``` Type for `batchResponse` in this example would be: ```ts type BatchResponse = [ { id: number; }[], D1Result, { id: number; name: string; verified: number; invitedBy: number | null; }[], { id: number; name: string; verified: number; invitedBy: number | null; }[], { id: number; invitedBy: number | null; }[], ]; ``` All possible builders that can be used inside `db.batch`: ```ts `db.all()`, `db.get()`, `db.values()`, `db.run()`, `db.query.
.findMany()`, `db.query.
.findFirst()`, `db.select()...`, `db.update()...`, `db.delete()...`, `db.insert()...`, ``` More usage examples here: [integration-tests/tests/d1-batch.test.ts](https://github.com/drizzle-team/drizzle-orm/blob/beta/integration-tests/tests/d1-batch.test.ts) and in [docs](https://orm.drizzle.team/docs/batch-api) --- ## Drizzle Kit 0.20.0 1. New way to define drizzle.config using `defineConfig` function 2. Possibility to access Cloudflare D1 with Drizzle Studio using wrangler.toml file 3. Drizzle Studio is migrating to https://local.drizzle.studio/ 4. `bigint unsigned` support 5. `primaryKeys` and `foreignKeys` now can have custom names 6. Environment variables are now automatically fetched 7. Some bug fixes and improvements You can read more about drizzle-kit updates [here](https://github.com/drizzle-team/drizzle-kit-mirror/releases/tag/v0.20.0) ================================================ FILE: changelogs/drizzle-orm/0.29.1.md ================================================ # Fixes - Forward args correctly when using withReplica feature #1536. Thanks @Angelelz - Fix selectDistinctOn not working with multiple columns #1466. Thanks @L-Mario564 # New Features/Helpers ## 🎉 Detailed JSDoc for all query builders in all dialects - thanks @realmikesolo You can now access more information, hints, documentation links, etc. while developing and using JSDoc right in your IDE. Previously, we had them only for filter expressions, but now you can see them for all parts of the Drizzle query builder ## 🎉 New helpers for aggregate functions in SQL - thanks @L-Mario564 > Remember, aggregation functions are often used with the GROUP BY clause of the SELECT statement. So if you are selecting using aggregating functions and other columns in one query, be sure to use the `.groupBy` clause Here is a list of functions and equivalent using `sql` template **count** ```ts await db.select({ value: count() }).from(users); await db.select({ value: count(users.id) }).from(users); // It's equivalent to writing await db.select({ value: sql`count('*'))`.mapWith(Number) }).from(users); await db.select({ value: sql`count(${users.id})`.mapWith(Number) }).from(users); ``` **countDistinct** ```ts await db.select({ value: countDistinct(users.id) }).from(users); // It's equivalent to writing await db.select({ value: sql`count(${users.id})`.mapWith(Number) }).from(users); ``` **avg** ```ts await db.select({ value: avg(users.id) }).from(users); // It's equivalent to writing await db.select({ value: sql`avg(${users.id})`.mapWith(String) }).from(users); ``` **avgDistinct** ```ts await db.select({ value: avgDistinct(users.id) }).from(users); // It's equivalent to writing await db.select({ value: sql`avg(distinct ${users.id})`.mapWith(String) }).from(users); ``` **sum** ```ts await db.select({ value: sum(users.id) }).from(users); // It's equivalent to writing await db.select({ value: sql`sum(${users.id})`.mapWith(String) }).from(users); ``` **sumDistinct** ```ts await db.select({ value: sumDistinct(users.id) }).from(users); // It's equivalent to writing await db.select({ value: sql`sum(distinct ${users.id})`.mapWith(String) }).from(users); ``` **max** ```ts await db.select({ value: max(users.id) }).from(users); // It's equivalent to writing await db.select({ value: sql`max(${expression})`.mapWith(users.id) }).from(users); ``` **min** ```ts await db.select({ value: min(users.id) }).from(users); // It's equivalent to writing await db.select({ value: sql`min(${users.id})`.mapWith(users.id) }).from(users); ``` # New Packages ## 🎉 ESLint Drizzle Plugin For cases where it's impossible to perform type checks for specific scenarios, or where it's possible but error messages would be challenging to understand, we've decided to create an ESLint package with recommended rules. This package aims to assist developers in handling crucial scenarios during development > Big thanks to @Angelelz for initiating the development of this package and transferring it to the Drizzle Team's npm ## Install ```sh [ npm | yarn | pnpm | bun ] install eslint eslint-plugin-drizzle ``` You can install those packages for typescript support in your IDE ```sh [ npm | yarn | pnpm | bun ] install @typescript-eslint/eslint-plugin @typescript-eslint/parser ``` ## Usage Create a `.eslintrc.yml` file, add `drizzle` to the `plugins`, and specify the rules you want to use. You can find a list of all existing rules below ```yml root: true parser: '@typescript-eslint/parser' parserOptions: project: './tsconfig.json' plugins: - drizzle rules: 'drizzle/enforce-delete-with-where': "error" 'drizzle/enforce-update-with-where': "error" ``` ### All config This plugin exports an [`all` config](src/configs/all.js) that makes use of all rules (except for deprecated ones). ```yml root: true extends: - "plugin:drizzle/all" parser: '@typescript-eslint/parser' parserOptions: project: './tsconfig.json' plugins: - drizzle ``` At the moment, `all` is equivalent to `recommended` ```yml root: true extends: - "plugin:drizzle/recommended" parser: '@typescript-eslint/parser' parserOptions: project: './tsconfig.json' plugins: - drizzle ``` ## Rules **enforce-delete-with-where**: Enforce using `delete` with the`.where()` clause in the `.delete()` statement. Most of the time, you don't need to delete all rows in the table and require some kind of `WHERE` statements. **Error Message**: ``` Without `.where(...)` you will delete all the rows in a table. If you didn't want to do it, please use `db.delete(...).where(...)` instead. Otherwise you can ignore this rule here ``` Optionally, you can define a `drizzleObjectName` in the plugin options that accept a `string` or `string[]`. This is useful when you have objects or classes with a delete method that's not from Drizzle. Such a `delete` method will trigger the ESLint rule. To avoid that, you can define the name of the Drizzle object that you use in your codebase (like db) so that the rule would only trigger if the delete method comes from this object: Example, config 1: ```json "rules": { "drizzle/enforce-delete-with-where": ["error"] } ``` ```ts class MyClass { public delete() { return {} } } const myClassObj = new MyClass(); // ---> Will be triggered by ESLint Rule myClassObj.delete() const db = drizzle(...) // ---> Will be triggered by ESLint Rule db.delete() ``` Example, config 2: ```json "rules": { "drizzle/enforce-delete-with-where": ["error", { "drizzleObjectName": ["db"] }], } ``` ```ts class MyClass { public delete() { return {} } } const myClassObj = new MyClass(); // ---> Will NOT be triggered by ESLint Rule myClassObj.delete() const db = drizzle(...) // ---> Will be triggered by ESLint Rule db.delete() ``` **enforce-update-with-where**: Enforce using `update` with the`.where()` clause in the `.update()` statement. Most of the time, you don't need to update all rows in the table and require some kind of `WHERE` statements. **Error Message**: ``` Without `.where(...)` you will update all the rows in a table. If you didn't want to do it, please use `db.update(...).set(...).where(...)` instead. Otherwise you can ignore this rule here ``` Optionally, you can define a `drizzleObjectName` in the plugin options that accept a `string` or `string[]`. This is useful when you have objects or classes with a delete method that's not from Drizzle. Such as `update` method will trigger the ESLint rule. To avoid that, you can define the name of the Drizzle object that you use in your codebase (like db) so that the rule would only trigger if the delete method comes from this object: Example, config 1: ```json "rules": { "drizzle/enforce-update-with-where": ["error"] } ``` ```ts class MyClass { public update() { return {} } } const myClassObj = new MyClass(); // ---> Will be triggered by ESLint Rule myClassObj.update() const db = drizzle(...) // ---> Will be triggered by ESLint Rule db.update() ``` Example, config 2: ```json "rules": { "drizzle/enforce-update-with-where": ["error", { "drizzleObjectName": ["db"] }], } ``` ```ts class MyClass { public update() { return {} } } const myClassObj = new MyClass(); // ---> Will NOT be triggered by ESLint Rule myClassObj.update() const db = drizzle(...) // ---> Will be triggered by ESLint Rule db.update() ``` ================================================ FILE: changelogs/drizzle-orm/0.29.2.md ================================================ ## Fixes - Added improvements to the planescale relational tests #1579 - thanks @Angelelz - [Pg] FIX: correct string escaping for empty PgArrays #1640 - thanks @Angelelz - Fix wrong syntax for exists fn in sqlite #1647 - thanks @Angelelz - Properly handle dates in AWS Data API - Fix Hermes mixins constructor issue ## ESLint Drizzle Plugin, v0.2.3 ``` npm i eslint-plugin-drizzle@0.2.3 ``` 🎉 **[ESLint] Add support for functions and improve error messages #1586 - thanks @ngregrichardson** - Allowed Drizzle object to be or to be retrieved from a function, e.g. - Added better context to the suggestion in the error message. ## New Drivers ### 🎉 Expo SQLite Driver is available For starting with Expo SQLite Driver, you need to install `expo-sqlite` and `drizzle-orm` packages. ```bash npm install drizzle-orm expo-sqlite@next ``` Then, you can use it like this: ```ts import { drizzle } from "drizzle-orm/expo-sqlite"; import { openDatabaseSync } from "expo-sqlite/next"; const expoDb = openDatabaseSync("db.db"); const db = drizzle(expoDb); await db.select().from(...)... // or db.select().from(...).then(...); // or db.select().from(...).all(); ``` If you want to use Drizzle Migrations, you need to update babel and metro configuration files. 1. Install `babel-plugin-inline-import` package. ```bash npm install babel-plugin-inline-import ``` 2. Update `babel.config.js` and `metro.config.js` files. babel.config.js ```diff module.exports = function(api) { api.cache(true); return { presets: ['babel-preset-expo'], + plugins: [["inline-import", { "extensions": [".sql"] }]] }; }; ``` metro.config.js ```diff const { getDefaultConfig } = require('expo/metro-config'); /** @type {import('expo/metro-config').MetroConfig} */ const config = getDefaultConfig(__dirname); +config.resolver.sourceExts.push('sql'); module.exports = config; ``` 3. Create `drizzle.config.ts` file in your project root folder. ```ts import type { Config } from 'drizzle-kit'; export default { schema: './db/schema.ts', out: './drizzle', driver: 'expo', } satisfies Config; ``` After creating schema file and drizzle.config.ts file, you can generate migrations like this: ```bash npx drizzle-kit generate:sqlite ``` Then you need to import `migrations.js` file in your `App.tsx` file from `./drizzle` folder and use hook `useMigrations` or `migrate` function. ```tsx import { drizzle } from "drizzle-orm/expo-sqlite"; import { openDatabaseSync } from "expo-sqlite/next"; import { useMigrations } from 'drizzle-orm/expo-sqlite/migrator'; import migrations from './drizzle/migrations'; const expoDb = openDatabaseSync("db.db"); const db = drizzle(expoDb); export default function App() { const { success, error } = useMigrations(db, migrations); if (error) { return ( Migration error: {error.message} ); } if (!success) { return ( Migration is in progress... ); } return ...your application component; } ``` ================================================ FILE: changelogs/drizzle-orm/0.29.3.md ================================================ - fix: make expo peer dependencies optional #1714 ================================================ FILE: changelogs/drizzle-orm/0.29.4.md ================================================ ## New Features ### 🎉 **Neon HTTP Batch** For more info you can check [Neon docs](https://neon.tech/docs/serverless/serverless-driver#issue-multiple-queries-with-the-transaction-function) **Example** ```ts const batchResponse: BatchType = await db.batch([ db.insert(usersTable).values({ id: 1, name: 'John' }).returning({ id: usersTable.id, }), db.insert(usersTable).values({ id: 2, name: 'Dan' }), db.query.usersTable.findMany({}), db.query.usersTable.findFirst({}), ]); ``` ```ts type BatchType = [ { id: number; }[], NeonHttpQueryResult, { id: number; name: string; verified: number; invitedBy: number | null; }[], { id: number; name: string; verified: number; invitedBy: number | null; } | undefined, ]; ``` ## Improvements Thanks to the `database-js` and `PlanetScale` teams, we have updated the default behavior and instances of `database-js`. As suggested by the `database-js` core team, you should use the `Client` instance instead of `connect()`: ```typescript import { Client } from '@planetscale/database'; import { drizzle } from 'drizzle-orm/planetscale-serverless'; // create the connection const client = new Client({ host: process.env['DATABASE_HOST'], username: process.env['DATABASE_USERNAME'], password: process.env['DATABASE_PASSWORD'], }); const db = drizzle(client); ``` > Warning: In this version, there are no breaking changes, but starting from version `0.30.0`, you will encounter an error if you attempt to use anything other than a `Client` instance. > > We suggest starting to change connections to PlanetScale now to prevent any runtime errors in the future. Previously our docs stated to use `connect()` and only this function was can be passed to drizzle. In this realase we are adding support for `new Client()` and deprecating `connect()`, by suggesting from `database-js` team. In this release you will see a `warning` when trying to pass `connect()` function result: **Warning text** ```mdx Warning: You need to pass an instance of Client: import { Client } from "@planetscale/database"; const client = new Client({ host: process.env["DATABASE_HOST"], username: process.env["DATABASE_USERNAME"], password: process.env["DATABASE_PASSWORD"], }); const db = drizzle(client); Starting from version 0.30.0, you will encounter an error if you attempt to use anything other than a Client instance. Please make the necessary changes now to prevent any runtime errors in the future ``` ================================================ FILE: changelogs/drizzle-orm/0.29.5.md ================================================ ## New Features ### 🎉 WITH UPDATE, WITH DELETE, WITH INSERT - thanks @L-Mario564 You can now use `WITH` statements with [INSERT](https://orm.drizzle.team/docs/insert#with-insert-clause), [UPDATE](https://orm.drizzle.team/docs/update#with-update-clause) and [DELETE](https://orm.drizzle.team/docs/delete#with-delete-clause) statements Usage examples ```ts const averageAmount = db.$with('average_amount').as( db.select({ value: sql`avg(${orders.amount})`.as('value') }).from(orders), ); const result = await db .with(averageAmount) .delete(orders) .where(gt(orders.amount, sql`(select * from ${averageAmount})`)) .returning({ id: orders.id, }); ``` Generated SQL: ```sql with "average_amount" as (select avg("amount") as "value" from "orders") delete from "orders" where "orders"."amount" > (select * from "average_amount") returning "id" ``` For more examples for all statements, check docs: - [with insert docs](https://orm.drizzle.team/docs/insert#with-insert-clause) - [with update docs](https://orm.drizzle.team/docs/update#with-update-clause) - [with delete docs](https://orm.drizzle.team/docs/delete#with-delete-clause) ### 🎉 Possibility to specify custom schema and custom name for migrations table - thanks @g3r4n - **Custom table for migrations** By default, all information about executed migrations will be stored in the database inside the `__drizzle_migrations` table, and for PostgreSQL, inside the `drizzle` schema. However, you can configure where to store those records. To add a custom table name for migrations stored inside your database, you should use the `migrationsTable` option Usage example ```ts await migrate(db, { migrationsFolder: './drizzle', migrationsTable: 'my_migrations', }); ``` - **Custom schema for migrations** > Works only with PostgreSQL databases To add a custom schema name for migrations stored inside your database, you should use the `migrationsSchema` option Usage example ```ts await migrate(db, { migrationsFolder: './drizzle', migrationsSchema: 'custom', }); ``` ### 🎉 SQLite Proxy batch and Relational Queries support - You can now use `.query.findFirst` and `.query.findMany` syntax with sqlite proxy driver - SQLite Proxy supports batch requests, the same as it's done for all other drivers. Check full [docs](https://orm.drizzle.team/docs/batch-api) You will need to specify a specific callback for batch queries and handle requests to proxy server: ```ts import { drizzle } from 'drizzle-orm/sqlite-proxy'; type ResponseType = { rows: any[][] | any[] }[]; const db = drizzle( async (sql, params, method) => { // single query logic }, // new batch callback async ( queries: { sql: string; params: any[]; method: 'all' | 'run' | 'get' | 'values'; }[], ) => { try { const result: ResponseType = await axios.post( 'http://localhost:3000/batch', { queries }, ); return result; } catch (e: any) { console.error('Error from sqlite proxy server:', e); throw e; } }, ); ``` And then you can use `db.batch([])` method, that will proxy all queries > Response from the batch should be an array of raw values (an array within an array), in the same order as they were sent to the proxy server ================================================ FILE: changelogs/drizzle-orm/0.30.0.md ================================================ ## Breaking Changes The Postgres timestamp mapping has been changed to align all drivers with the same behavior. ❗ We've modified the `postgres.js` driver instance to always return strings for dates, and then Drizzle will provide you with either strings of mapped dates, depending on the selected `mode`. The only issue you may encounter is that once you provide the `postgres.js`` driver instance inside Drizzle, the behavior of this object will change for dates, which will always be strings. We've made this change as a minor release, just as a warning, that: - If you were using timestamps and were waiting for a specific response, the behavior will now be changed. When mapping to the driver, we will always use `.toISOString` for both timestamps with timezone and without timezone. - If you were using the `postgres.js` driver outside of Drizzle, all `postgres.js` clients passed to Drizzle will have mutated behavior for dates. All dates will be strings in the response. Parsers that were changed for `postgres.js`. ```ts const transparentParser = (val: any) => val; // Override postgres.js default date parsers: https://github.com/porsager/postgres/discussions/761 for (const type of ['1184', '1082', '1083', '1114']) { client.options.parsers[type as any] = transparentParser; client.options.serializers[type as any] = transparentParser; } ``` Ideally, as is the case with almost all other drivers, we should have the possibility to mutate mappings on a per-query basis, which means that the driver client won't be mutated. We will be reaching out to the creator of the `postgres.js` library to inquire about the possibility of specifying per-query mapping interceptors and making this flow even better for all users. If we've overlooked this capability and it is already available with `postgres.js``, please ping us in our Discord! A few more references for timestamps without and with timezones can be found in our [docs](http://orm.drizzle.team/docs/column-types/pg#timestamp) ## Bug fixed in this release - [BUG]: timestamp with mode string is returned as Date object instead of string - #806 - [BUG]: Dates are always dates #971 - [BUG]: Inconsistencies when working with timestamps and corresponding datetime objects in javascript. #1176 - [BUG]: timestamp columns showing string type, however actually returning a Date object. #1185 - [BUG]: Wrong data type for postgres date colum #1407 - [BUG]: invalid timestamp conversion when using PostgreSQL with TimeZone set to UTC #1587 - [BUG]: Postgres insert into timestamp with time zone removes milliseconds #1061 - [BUG]: update timestamp field (using AWS Data API) #1164 - [BUG]: Invalid date from relational queries #895 ================================================ FILE: changelogs/drizzle-orm/0.30.1.md ================================================ ## New Features ### 🎉 OP-SQLite driver Support Usage Example ```ts import { open } from '@op-engineering/op-sqlite'; import { drizzle } from 'drizzle-orm/op-sqlite'; const opsqlite = open({ name: 'myDB', }); const db = drizzle(opsqlite); await db.select().from(users); ``` For more usage and setup details, please check our [op-sqlite docs](http://orm.drizzle.team/docs/get-started-sqlite#op-sqlite) ### Bug fixes - Migration hook fixed for Expo driver ================================================ FILE: changelogs/drizzle-orm/0.30.10.md ================================================ ## New Features ### 🎉 `.if()` function added to all WHERE expressions #### Select all users after cursors if a cursor value was provided ```ts function getUsersAfter(cursor?: number) { return db.select().from(users).where( gt(users.id, cursor).if(cursor) ); } ``` ## Bug Fixes - Fixed internal mappings for sessions `.all`, `.values`, `.execute` functions in AWS DataAPI ================================================ FILE: changelogs/drizzle-orm/0.30.2.md ================================================ ## Improvements LibSQL migrations have been updated to utilize batch execution instead of transactions. As stated in the [documentation](https://docs.turso.tech/sdk/ts/reference#batch-transactions), LibSQL now supports batch operations > A batch consists of multiple SQL statements executed sequentially within an implicit transaction. The backend handles the transaction: success commits all changes, while any failure results in a full rollback with no modifications. ## Bug fixed - [Sqlite] Fix findFirst query for bun:sqlite #1885 - thanks @shaileshaanand ================================================ FILE: changelogs/drizzle-orm/0.30.3.md ================================================ - 🎉 Added raw query support (`db.execute(...)`) to batch API in Neon HTTP driver - 🐛 Fixed `@neondatabase/serverless` HTTP driver types issue (#1945, neondatabase/serverless#66) - 🐛 Fixed sqlite-proxy driver `.run()` result ================================================ FILE: changelogs/drizzle-orm/0.30.4.md ================================================ ## New Features ### 🎉 xata-http driver support According their **[official website](https://xata.io)**, Xata is a Postgres data platform with a focus on reliability, scalability, and developer experience. The Xata Postgres service is currently in beta, please see the [Xata docs](https://xata.io/docs/postgres) on how to enable it in your account. Drizzle ORM natively supports both the `xata` driver with `drizzle-orm/xata` package and the **[`postgres`](#postgresjs)** or **[`pg`](#node-postgres)** drivers for accessing a Xata Postgres database. The following example use the Xata generated client, which you obtain by running the [xata init](https://xata.io/docs/getting-started/installation) CLI command. ```bash pnpm add drizzle-orm @xata.io/client ``` ```ts import { drizzle } from 'drizzle-orm/xata-http'; import { getXataClient } from './xata'; // Generated client const xata = getXataClient(); const db = drizzle(xata); const result = await db.select().from(...); ``` You can also connect to Xata using `pg` or `postgres.js` drivers ================================================ FILE: changelogs/drizzle-orm/0.30.5.md ================================================ ## New Features ### 🎉 `$onUpdate` functionality for PostgreSQL, MySQL and SQLite Adds a dynamic update value to the column. The function will be called when the row is updated, and the returned value will be used as the column value if none is provided. If no `default` (or `$defaultFn`) value is provided, the function will be called when the row is inserted as well, and the returned value will be used as the column value. > Note: This value does not affect the `drizzle-kit` behavior, it is only used at runtime in `drizzle-orm`. ```ts const usersOnUpdate = pgTable('users_on_update', { id: serial('id').primaryKey(), name: text('name').notNull(), updateCounter: integer('update_counter').default(sql`1`).$onUpdateFn(() => sql`update_counter + 1`), updatedAt: timestamp('updated_at', { mode: 'date', precision: 3 }).$onUpdate(() => new Date()), alwaysNull: text('always_null').$type().$onUpdate(() => null), }); ``` ## Fixes - [BUG]: insertions on columns with the smallserial datatype are not optional - #1848 Thanks @Angelelz and @gabrielDonnantuoni! ================================================ FILE: changelogs/drizzle-orm/0.30.6.md ================================================ ## New Features ### 🎉 PGlite driver Support PGlite is a WASM Postgres build packaged into a TypeScript client library that enables you to run Postgres in the browser, Node.js and Bun, with no need to install any other dependencies. It is only 2.6mb gzipped. It can be used as an ephemeral in-memory database, or with persistence either to the file system (Node/Bun) or indexedDB (Browser). Unlike previous "Postgres in the browser" projects, PGlite does not use a Linux virtual machine - it is simply Postgres in WASM. Usage Example ```ts import { PGlite } from '@electric-sql/pglite'; import { drizzle } from 'drizzle-orm/pglite'; // In-memory Postgres const client = new PGlite(); const db = drizzle(client); await db.select().from(users); ``` --- There are currently 2 limitations, that should be fixed on Pglite side: - [Attempting to refresh a materialised view throws error](https://github.com/electric-sql/pglite/issues/63) - [Attempting to SET TIME ZONE throws error](https://github.com/electric-sql/pglite/issues/62) ================================================ FILE: changelogs/drizzle-orm/0.30.7-preview.md ================================================ - 🎉 Added custom schema support to enums in Postgres: ```ts import { pgSchema } from 'drizzle-orm/pg-core'; const mySchema = pgSchema('mySchema'); const colors = mySchema.enum('colors', ['red', 'green', 'blue']); ``` - 🐛 Split `where` clause in Postgres `.onConflictDoUpdate` method into `setWhere` and `targetWhere` clauses, to support both `where` cases in `on conflict ...` clause (#1628, #1302) - 🐛 Fix query generation for `where` clause in Postgres `.onConflictDoNothing` method, as it was placed in a wrong spot (#1628) ================================================ FILE: changelogs/drizzle-orm/0.30.7.md ================================================ ## Bug fixes - Add mappings for `@vercel/postgres` package - Fix interval mapping for `neon` drivers - #1542 ================================================ FILE: changelogs/drizzle-orm/0.30.8.md ================================================ - 🎉 Added custom schema support to enums in Postgres (fixes #669 via #2048): ```ts import { pgSchema } from 'drizzle-orm/pg-core'; const mySchema = pgSchema('mySchema'); const colors = mySchema.enum('colors', ['red', 'green', 'blue']); ``` - 🎉 Changed D1 `migrate()` function to use batch API (#2137) - 🐛 Split `where` clause in Postgres `.onConflictDoUpdate` method into `setWhere` and `targetWhere` clauses, to support both `where` cases in `on conflict ...` clause (fixes #1628, #1302 via #2056) - 🐛 Fixed query generation for `where` clause in Postgres `.onConflictDoNothing` method, as it was placed in a wrong spot (fixes #1628 via #2056) - 🐛 Fixed multiple issues with AWS Data API driver (fixes #1931, #1932, #1934, #1936 via #2119) - 🐛 Fix inserting and updating array values in AWS Data API (fixes #1912 via #1911) Thanks @hugo082 and @livingforjesus! ================================================ FILE: changelogs/drizzle-orm/0.30.9.md ================================================ - 🐛 Fixed migrator in AWS Data API - Added `setWhere` and `targetWhere` fields to `.onConflictDoUpdate()` config in SQLite instead of single `where` field - 🛠️ Added schema information to Drizzle instances via `db._.fullSchema` ================================================ FILE: changelogs/drizzle-orm/0.31.0-beta.md ================================================ ## Breaking changes ### PostgreSQL indexes API was changed The previous Drizzle+PostgreSQL indexes API was incorrect and was not aligned with the PostgreSQL documentation. The good thing is that it was not used in queries, and drizzle-kit didn't support all properties for indexes. This means we can now change the API to the correct one and provide full support for it in drizzle-kit Previous API - No way to define SQL expressions inside `.on`. - `.using` and `.on` in our case are the same thing, so the API is incorrect here. - `.asc()`, `.desc()`, `.nullsFirst()`, and `.nullsLast()` should be specified for each column or expression on indexes, but not on an index itself. ```ts // Index declaration reference index('name') .on(table.column1, table.column2, ...) or .onOnly(table.column1, table.column2, ...) .concurrently() .using(sql``) // sql expression .asc() or .desc() .nullsFirst() or .nullsLast() .where(sql``) // sql expression ``` Current API ```ts // First example, with `.on()` index('name') .on(table.column1.asc(), table.column2.nullsFirst(), ...) or .onOnly(table.column1.desc().nullsLast(), table.column2, ...) .concurrently() .where(sql``) .with({ fillfactor: '70' }) // Second Example, with `.using()` index('name') .using('btree', table.column1.asc(), sql`lower(${table.column2})`, table.column1.op('text_ops')) .where(sql``) // sql expression .with({ fillfactor: '70' }) ``` ## New Features ### 🎉 "pg_vector" extension support > There is no specific code to create an extension inside the Drizzle schema. We assume that if you are using vector types, indexes, and queries, you have a PostgreSQL database with the `pg_vector` extension installed. You can now specify indexes for `pg_vector` and utilize `pg_vector` functions for querying, ordering, etc. Let's take a few examples of `pg_vector` indexes from the `pg_vector` docs and translate them to Drizzle #### L2 distance, Inner product and Cosine distance ```ts // CREATE INDEX ON items USING hnsw (embedding vector_l2_ops); // CREATE INDEX ON items USING hnsw (embedding vector_ip_ops); // CREATE INDEX ON items USING hnsw (embedding vector_cosine_ops); const table = pgTable('items', { embedding: vector('embedding', { dimensions: 3 }) }, (table) => ({ l2: index('l2_index').using('hnsw', table.embedding.op('vector_l2_ops')) ip: index('ip_index').using('hnsw', table.embedding.op('vector_ip_ops')) cosine: index('cosine_index').using('hnsw', table.embedding.op('vector_cosine_ops')) })) ``` #### L1 distance, Hamming distance and Jaccard distance - added in pg_vector 0.7.0 version ```ts // CREATE INDEX ON items USING hnsw (embedding vector_l1_ops); // CREATE INDEX ON items USING hnsw (embedding bit_hamming_ops); // CREATE INDEX ON items USING hnsw (embedding bit_jaccard_ops); const table = pgTable('table', { embedding: vector('embedding', { dimensions: 3 }) }, (table) => ({ l1: index('l1_index').using('hnsw', table.embedding.op('vector_l1_ops')) hamming: index('hamming_index').using('hnsw', table.embedding.op('bit_hamming_ops')) bit: index('bit_jaccard_index').using('hnsw', table.embedding.op('bit_jaccard_ops')) })) ``` For queries, you can use predefined functions for vectors or create custom ones using the SQL template operator. You can also use the following helpers: ```ts import { l2Distance, l1Distance, innerProduct, cosineDistance, hammingDistance, jaccardDistance } from 'drizzle-orm' l2Distance(table.column, [3, 1, 2]) // table.column <-> '[3, 1, 2]' l1Distance(table.column, [3, 1, 2]) // table.column <+> '[3, 1, 2]' innerProduct(table.column, [3, 1, 2]) // table.column <#> '[3, 1, 2]' cosineDistance(table.column, [3, 1, 2]) // table.column <=> '[3, 1, 2]' hammingDistance(table.column, '101') // table.column <~> '101' jaccardDistance(table.column, '101') // table.column <%> '101' ``` If `pg_vector` has some other functions to use, you can replicate implimentation from existing one we have. Here is how it can be done ```ts export function l2Distance( column: SQLWrapper | AnyColumn, value: number[] | string[] | TypedQueryBuilder | string, ): SQL { if (is(value, TypedQueryBuilder) || typeof value === 'string') { return sql`${column} <-> ${value}`; } return sql`${column} <-> ${JSON.stringify(value)}`; } ``` Name it as you wish and change the operator. This example allows for a numbers array, strings array, string, or even a select query. Feel free to create any other type you want or even contribute and submit a PR #### Examples Let's take a few examples of `pg_vector` queries from the `pg_vector` docs and translate them to Drizzle ```ts import { l2Distance } from 'drizzle-orm'; // SELECT * FROM items ORDER BY embedding <-> '[3,1,2]' LIMIT 5; db.select().from(items).orderBy(l2Distance(items.embedding, [3,1,2])) // SELECT embedding <-> '[3,1,2]' AS distance FROM items; db.select({ distance: l2Distance(items.embedding, [3,1,2]) }) // SELECT * FROM items ORDER BY embedding <-> (SELECT embedding FROM items WHERE id = 1) LIMIT 5; const subquery = db.select({ embedding: items.embedding }).from(items).where(eq(items.id, 1)); db.select().from(items).orderBy(l2Distance(items.embedding, subquery)).limit(5) // SELECT (embedding <#> '[3,1,2]') * -1 AS inner_product FROM items; db.select({ innerProduct: sql`(${maxInnerProduct(items.embedding, [3,1,2])}) * -1` }).from(items) // and more! ``` - 🛠️ Fixed RQB behavior for tables with same names in different schemas ================================================ FILE: changelogs/drizzle-orm/0.31.0.md ================================================ ## Breaking changes > Note: `drizzle-orm@0.31.0` can be used with `drizzle-kit@0.22.0` or higher. The same applies to Drizzle Kit. If you run a Drizzle Kit command, it will check and prompt you for an upgrade (if needed). You can check for Drizzle Kit updates. [below](#drizzle-kit-updates-drizzle-kit0220) ### PostgreSQL indexes API was changed The previous Drizzle+PostgreSQL indexes API was incorrect and was not aligned with the PostgreSQL documentation. The good thing is that it was not used in queries, and drizzle-kit didn't support all properties for indexes. This means we can now change the API to the correct one and provide full support for it in drizzle-kit Previous API - No way to define SQL expressions inside `.on`. - `.using` and `.on` in our case are the same thing, so the API is incorrect here. - `.asc()`, `.desc()`, `.nullsFirst()`, and `.nullsLast()` should be specified for each column or expression on indexes, but not on an index itself. ```ts // Index declaration reference index('name') .on(table.column1, table.column2, ...) or .onOnly(table.column1, table.column2, ...) .concurrently() .using(sql``) // sql expression .asc() or .desc() .nullsFirst() or .nullsLast() .where(sql``) // sql expression ``` Current API ```ts // First example, with `.on()` index('name') .on(table.column1.asc(), table.column2.nullsFirst(), ...) or .onOnly(table.column1.desc().nullsLast(), table.column2, ...) .concurrently() .where(sql``) .with({ fillfactor: '70' }) // Second Example, with `.using()` index('name') .using('btree', table.column1.asc(), sql`lower(${table.column2})`, table.column1.op('text_ops')) .where(sql``) // sql expression .with({ fillfactor: '70' }) ``` ## New Features ### 🎉 "pg_vector" extension support > There is no specific code to create an extension inside the Drizzle schema. We assume that if you are using vector types, indexes, and queries, you have a PostgreSQL database with the `pg_vector` extension installed. You can now specify indexes for `pg_vector` and utilize `pg_vector` functions for querying, ordering, etc. Let's take a few examples of `pg_vector` indexes from the `pg_vector` docs and translate them to Drizzle #### L2 distance, Inner product and Cosine distance ```ts // CREATE INDEX ON items USING hnsw (embedding vector_l2_ops); // CREATE INDEX ON items USING hnsw (embedding vector_ip_ops); // CREATE INDEX ON items USING hnsw (embedding vector_cosine_ops); const table = pgTable('items', { embedding: vector('embedding', { dimensions: 3 }) }, (table) => ({ l2: index('l2_index').using('hnsw', table.embedding.op('vector_l2_ops')) ip: index('ip_index').using('hnsw', table.embedding.op('vector_ip_ops')) cosine: index('cosine_index').using('hnsw', table.embedding.op('vector_cosine_ops')) })) ``` #### L1 distance, Hamming distance and Jaccard distance - added in pg_vector 0.7.0 version ```ts // CREATE INDEX ON items USING hnsw (embedding vector_l1_ops); // CREATE INDEX ON items USING hnsw (embedding bit_hamming_ops); // CREATE INDEX ON items USING hnsw (embedding bit_jaccard_ops); const table = pgTable('table', { embedding: vector('embedding', { dimensions: 3 }) }, (table) => ({ l1: index('l1_index').using('hnsw', table.embedding.op('vector_l1_ops')) hamming: index('hamming_index').using('hnsw', table.embedding.op('bit_hamming_ops')) bit: index('bit_jaccard_index').using('hnsw', table.embedding.op('bit_jaccard_ops')) })) ``` For queries, you can use predefined functions for vectors or create custom ones using the SQL template operator. You can also use the following helpers: ```ts import { l2Distance, l1Distance, innerProduct, cosineDistance, hammingDistance, jaccardDistance } from 'drizzle-orm' l2Distance(table.column, [3, 1, 2]) // table.column <-> '[3, 1, 2]' l1Distance(table.column, [3, 1, 2]) // table.column <+> '[3, 1, 2]' innerProduct(table.column, [3, 1, 2]) // table.column <#> '[3, 1, 2]' cosineDistance(table.column, [3, 1, 2]) // table.column <=> '[3, 1, 2]' hammingDistance(table.column, '101') // table.column <~> '101' jaccardDistance(table.column, '101') // table.column <%> '101' ``` If `pg_vector` has some other functions to use, you can replicate implimentation from existing one we have. Here is how it can be done ```ts export function l2Distance( column: SQLWrapper | AnyColumn, value: number[] | string[] | TypedQueryBuilder | string, ): SQL { if (is(value, TypedQueryBuilder) || typeof value === 'string') { return sql`${column} <-> ${value}`; } return sql`${column} <-> ${JSON.stringify(value)}`; } ``` Name it as you wish and change the operator. This example allows for a numbers array, strings array, string, or even a select query. Feel free to create any other type you want or even contribute and submit a PR #### Examples Let's take a few examples of `pg_vector` queries from the `pg_vector` docs and translate them to Drizzle ```ts import { l2Distance } from 'drizzle-orm'; // SELECT * FROM items ORDER BY embedding <-> '[3,1,2]' LIMIT 5; db.select().from(items).orderBy(l2Distance(items.embedding, [3,1,2])) // SELECT embedding <-> '[3,1,2]' AS distance FROM items; db.select({ distance: l2Distance(items.embedding, [3,1,2]) }) // SELECT * FROM items ORDER BY embedding <-> (SELECT embedding FROM items WHERE id = 1) LIMIT 5; const subquery = db.select({ embedding: items.embedding }).from(items).where(eq(items.id, 1)); db.select().from(items).orderBy(l2Distance(items.embedding, subquery)).limit(5) // SELECT (embedding <#> '[3,1,2]') * -1 AS inner_product FROM items; db.select({ innerProduct: sql`(${maxInnerProduct(items.embedding, [3,1,2])}) * -1` }).from(items) // and more! ``` ## 🎉 New PostgreSQL types: `point`, `line` You can now use `point` and `line` from [PostgreSQL Geometric Types](https://www.postgresql.org/docs/current/datatype-geometric.html) Type `point` has 2 modes for mappings from the database: `tuple` and `xy`. - `tuple` will be accepted for insert and mapped on select to a tuple. So, the database Point(1,2) will be typed as [1,2] with drizzle. - `xy` will be accepted for insert and mapped on select to an object with x, y coordinates. So, the database Point(1,2) will be typed as `{ x: 1, y: 2 }` with drizzle ```ts const items = pgTable('items', { point: point('point'), pointObj: point('point_xy', { mode: 'xy' }), }); ``` Type `line` has 2 modes for mappings from the database: `tuple` and `abc`. - `tuple` will be accepted for insert and mapped on select to a tuple. So, the database Line{1,2,3} will be typed as [1,2,3] with drizzle. - `abc` will be accepted for insert and mapped on select to an object with a, b, and c constants from the equation `Ax + By + C = 0`. So, the database Line{1,2,3} will be typed as `{ a: 1, b: 2, c: 3 }` with drizzle. ```ts const items = pgTable('items', { line: line('line'), lineObj: point('line_abc', { mode: 'abc' }), }); ``` ## 🎉 Basic "postgis" extension support > There is no specific code to create an extension inside the Drizzle schema. We assume that if you are using postgis types, indexes, and queries, you have a PostgreSQL database with the `postgis` extension installed. `geometry` type from postgis extension: ```ts const items = pgTable('items', { geo: geometry('geo', { type: 'point' }), geoObj: geometry('geo_obj', { type: 'point', mode: 'xy' }), geoSrid: geometry('geo_options', { type: 'point', mode: 'xy', srid: 4000 }), }); ``` **mode** Type `geometry` has 2 modes for mappings from the database: `tuple` and `xy`. - `tuple` will be accepted for insert and mapped on select to a tuple. So, the database geometry will be typed as [1,2] with drizzle. - `xy` will be accepted for insert and mapped on select to an object with x, y coordinates. So, the database geometry will be typed as `{ x: 1, y: 2 }` with drizzle **type** The current release has a predefined type: `point`, which is the `geometry(Point)` type in the PostgreSQL PostGIS extension. You can specify any string there if you want to use some other type # Drizzle Kit updates: `drizzle-kit@0.22.0` > Release notes here are partially duplicated from [drizzle-kit@0.22.0]() ## New Features ### 🎉 Support for new types Drizzle Kit can now handle: - `point` and `line` from PostgreSQL - `vector` from the PostgreSQL `pg_vector` extension - `geometry` from the PostgreSQL `PostGIS` extension ### 🎉 New param in drizzle.config - `extensionsFilters` The PostGIS extension creates a few internal tables in the `public` schema. This means that if you have a database with the PostGIS extension and use `push` or `introspect`, all those tables will be included in `diff` operations. In this case, you would need to specify `tablesFilter`, find all tables created by the extension, and list them in this parameter. We have addressed this issue so that you won't need to take all these steps. Simply specify `extensionsFilters` with the name of the extension used, and Drizzle will skip all the necessary tables. Currently, we only support the `postgis` option, but we plan to add more extensions if they create tables in the `public` schema. The `postgis` option will skip the `geography_columns`, `geometry_columns`, and `spatial_ref_sys` tables ```ts import { defineConfig } from 'drizzle-kit' export default defaultConfig({ dialect: "postgresql", extensionsFilters: ["postgis"], }) ``` ## Improvements ### Update zod schemas for database credentials and write tests to all the positive/negative cases - support full set of SSL params in kit config, provide types from node:tls connection ```ts import { defineConfig } from 'drizzle-kit' export default defaultConfig({ dialect: "postgresql", dbCredentials: { ssl: true, //"require" | "allow" | "prefer" | "verify-full" | options from node:tls } }) ``` ```ts import { defineConfig } from 'drizzle-kit' export default defaultConfig({ dialect: "mysql", dbCredentials: { ssl: "", // string | SslOptions (ssl options from mysql2 package) } }) ``` ### Normilized SQLite urls for `libsql` and `better-sqlite3` drivers Those drivers have different file path patterns, and Drizzle Kit will accept both and create a proper file path format for each ### Updated MySQL and SQLite index-as-expression behavior In this release MySQL and SQLite will properly map expressions into SQL query. Expressions won't be escaped in string but columns will be ```ts export const users = sqliteTable( 'users', { id: integer('id').primaryKey(), email: text('email').notNull(), }, (table) => ({ emailUniqueIndex: uniqueIndex('emailUniqueIndex').on(sql`lower(${table.email})`), }), ); ``` ```sql -- before CREATE UNIQUE INDEX `emailUniqueIndex` ON `users` (`lower("users"."email")`); -- now CREATE UNIQUE INDEX `emailUniqueIndex` ON `users` (lower("email")); ``` ## Bug Fixes - [BUG]: multiple constraints not added (only the first one is generated) - [#2341](https://github.com/drizzle-team/drizzle-orm/issues/2341) - Drizzle Studio: Error: Connection terminated unexpectedly - [#435](https://github.com/drizzle-team/drizzle-kit-mirror/issues/435) - Unable to run sqlite migrations local - [#432](https://github.com/drizzle-team/drizzle-kit-mirror/issues/432) - error: unknown option '--config' - [#423](https://github.com/drizzle-team/drizzle-kit-mirror/issues/423) ## How `push` and `generate` works for indexes ### Limitations #### You should specify a name for your index manually if you have an index on at least one expression Example ```ts index().on(table.id, table.email) // will work well and name will be autogeneretaed index('my_name').on(table.id, table.email) // will work well // but index().on(sql`lower(${table.email})`) // error index('my_name').on(sql`lower(${table.email})`) // will work well ``` #### Push won't generate statements if these fields(list below) were changed in an existing index: - expressions inside `.on()` and `.using()` - `.where()` statements - operator classes `.op()` on columns If you are using `push` workflows and want to change these fields in the index, you would need to: - Comment out the index - Push - Uncomment the index and change those fields - Push again For the `generate` command, `drizzle-kit` will be triggered by any changes in the index for any property in the new drizzle indexes API, so there are no limitations here. ================================================ FILE: changelogs/drizzle-orm/0.31.1.md ================================================ # New Features ## Live Queries 🎉 As of `v0.31.1` Drizzle ORM now has native support for Expo SQLite Live Queries! We've implemented a native `useLiveQuery` React Hook which observes necessary database changes and automatically re-runs database queries. It works with both SQL-like and Drizzle Queries: ```tsx import { useLiveQuery, drizzle } from 'drizzle-orm/expo-sqlite'; import { openDatabaseSync } from 'expo-sqlite/next'; import { users } from './schema'; import { Text } from 'react-native'; const expo = openDatabaseSync('db.db'); const db = drizzle(expo); const App = () => { // Re-renders automatically when data changes const { data } = useLiveQuery(db.select().from(users)); // const { data, error, updatedAt } = useLiveQuery(db.query.users.findFirst()); // const { data, error, updatedAt } = useLiveQuery(db.query.users.findMany()); return {JSON.stringify(data)}; }; export default App; ``` We've intentionally not changed the API of ORM itself to stay with conventional React Hook API, so we have `useLiveQuery(databaseQuery)` as opposed to `db.select().from(users).useLive()` or `db.query.users.useFindMany()` We've also decided to provide `data`, `error` and `updatedAt` fields as a result of hook for concise explicit error handling following practices of `React Query` and `Electric SQL` ================================================ FILE: changelogs/drizzle-orm/0.31.2.md ================================================ - 🎉 Added support for TiDB Cloud Serverless driver: ```ts import { connect } from '@tidbcloud/serverless'; import { drizzle } from 'drizzle-orm/tidb-serverless'; const client = connect({ url: '...' }); const db = drizzle(client); await db.select().from(...); ``` ================================================ FILE: changelogs/drizzle-orm/0.31.3.md ================================================ ### Bug fixed - 🛠️ Fixed RQB behavior for tables with same names in different schemas - 🛠️ Fixed [BUG]: Mismatched type hints when using RDS Data API - #2097 ### New Prisma-Drizzle extension ```ts import { PrismaClient } from '@prisma/client'; import { drizzle } from 'drizzle-orm/prisma/pg'; import { User } from './drizzle'; const prisma = new PrismaClient().$extends(drizzle()); const users = await prisma.$drizzle.select().from(User); ``` For more info, check docs: https://orm.drizzle.team/docs/prisma ================================================ FILE: changelogs/drizzle-orm/0.31.4.md ================================================ - Mark prisma clients package as optional - thanks @Cherry ================================================ FILE: changelogs/drizzle-orm/0.32.0-beta.md ================================================ # Preview release for `drizzle-orm@0.32.0` and `drizzle-kit@0.23.0` > It's not mandatory to upgrade both packages, but if you want to use the new features in both queries and migrations, you will need to upgrade both packages ## New Features ### 🎉 PostgreSQL Sequences You can now specify sequences in Postgres within any schema you need and define all the available properties ##### **Example** ```ts import { pgSchema, pgSequence } from "drizzle-orm/pg-core"; // No params specified export const customSequence = pgSequence("name"); // Sequence with params export const customSequence = pgSequence("name", { startWith: 100, maxValue: 10000, minValue: 100, cycle: true, cache: 10, increment: 2 }); // Sequence in custom schema export const customSchema = pgSchema('custom_schema'); export const customSequence = customSchema.sequence("name"); ``` ### 🎉 PostgreSQL Identity Columns [Source](https://wiki.postgresql.org/wiki/Don%27t_Do_This#Don.27t_use_serial): As mentioned, the `serial` type in Postgres is outdated and should be deprecated. Ideally, you should not use it. `Identity columns` are the recommended way to specify sequences in your schema, which is why we are introducing the `identity columns` feature ##### **Example** ```ts import { pgTable, integer, text } from 'drizzle-orm/pg-core' export const ingredients = pgTable("ingredients", { id: integer("id").primaryKey().generatedAlwaysAsIdentity({ startWith: 1000 }), name: text("name").notNull(), description: text("description"), }); ``` You can specify all properties available for sequences in the `.generatedAlwaysAsIdentity()` function. Additionally, you can specify custom names for these sequences PostgreSQL docs [reference](https://www.postgresql.org/docs/current/sql-createtable.html#SQL-CREATETABLE-PARMS-GENERATED-IDENTITY). ### 🎉 PostgreSQL Generated Columns You can now specify generated columns on any column supported by PostgreSQL to use with generated columns ##### **Example** with generated column for `tsvector` > Note: we will add `tsVector` column type before latest release ```ts import { SQL, sql } from "drizzle-orm"; import { customType, index, integer, pgTable, text } from "drizzle-orm/pg-core"; const tsVector = customType<{ data: string }>({ dataType() { return "tsvector"; }, }); export const test = pgTable( "test", { id: integer("id").primaryKey().generatedAlwaysAsIdentity(), content: text("content"), contentSearch: tsVector("content_search", { dimensions: 3, }).generatedAlwaysAs( (): SQL => sql`to_tsvector('english', ${test.content})` ), }, (t) => ({ idx: index("idx_content_search").using("gin", t.contentSearch), }) ); ``` In case you don't need to reference any columns from your table, you can use just `sql` template or a `string` ```ts export const users = pgTable("users", { id: integer("id"), name: text("name"), generatedName: text("gen_name").generatedAlwaysAs(sql`hello world!`), generatedName1: text("gen_name1").generatedAlwaysAs("hello world!"), }), ``` ### 🎉 MySQL Generated Columns You can now specify generated columns on any column supported by MySQL to use with generated columns You can specify both `stored` and `virtual` options, for more info you can check [MySQL docs](https://dev.mysql.com/doc/refman/8.4/en/create-table-generated-columns.html) Also MySQL has a few limitation for such columns usage, which is described [here](https://dev.mysql.com/doc/refman/8.4/en/alter-table-generated-columns.html) Drizzle Kit will also have limitations for `push` command: 1. You can't change the generated constraint expression and type using `push`. Drizzle-kit will ignore this change. To make it work, you would need to `drop the column`, `push`, and then `add a column with a new expression`. This was done due to the complex mapping from the database side, where the schema expression will be modified on the database side and, on introspection, we will get a different string. We can't be sure if you changed this expression or if it was changed and formatted by the database. As long as these are generated columns and `push` is mostly used for prototyping on a local database, it should be fast to `drop` and `create` generated columns. Since these columns are `generated`, all the data will be restored 2. `generate` should have no limitations ##### **Example** ```ts export const users = mysqlTable("users", { id: int("id"), id2: int("id2"), name: text("name"), generatedName: text("gen_name").generatedAlwaysAs( (): SQL => sql`${schema2.users.name} || 'hello'`, { mode: "stored" } ), generatedName1: text("gen_name1").generatedAlwaysAs( (): SQL => sql`${schema2.users.name} || 'hello'`, { mode: "virtual" } ), }), ``` In case you don't need to reference any columns from your table, you can use just `sql` template or a `string` in `.generatedAlwaysAs()` ### 🎉 SQLite Generated Columns You can now specify generated columns on any column supported by SQLite to use with generated columns You can specify both `stored` and `virtual` options, for more info you can check [SQLite docs](https://www.sqlite.org/gencol.html) Also SQLite has a few limitation for such columns usage, which is described [here](https://www.sqlite.org/gencol.html) Drizzle Kit will also have limitations for `push` and `generate` command: 1. You can't change the generated constraint expression with the stored type in an existing table. You would need to delete this table and create it again. This is due to SQLite limitations for such actions. We will handle this case in future releases (it will involve the creation of a new table with data migration). 2. You can't add a `stored` generated expression to an existing column for the same reason as above. However, you can add a `virtual` expression to an existing column. 3. You can't change a `stored` generated expression in an existing column for the same reason as above. However, you can change a `virtual` expression. 4. You can't change the generated constraint type from `virtual` to `stored` for the same reason as above. However, you can change from `stored` to `virtual`. ## New Drizzle Kit features ### 🎉 Migrations support for all the new orm features PostgreSQL sequences, identity columns and generated columns for all dialects ### 🎉 New flag `--force` for `drizzle-kit push` You can auto-accept all data-loss statements using the push command. It's only available in CLI parameters. Make sure you always use it if you are fine with running data-loss statements on your database ### 🎉 New `migrations` flag `prefix` You can now customize migration file prefixes to make the format suitable for your migration tools: - `index` is the default type and will result in `0001_name.sql` file names; - `supabase` and `timestamp` are equal and will result in `20240627123900_name.sql` file names; - `unix` will result in unix seconds prefixes `1719481298_name.sql` file names; - `none` will omit the prefix completely; ##### **Example**: Supabase migrations format ```ts import { defineConfig } from "drizzle-kit"; export default defineConfig({ dialect: "postgresql", migrations: { prefix: 'supabase' } }); ``` ================================================ FILE: changelogs/drizzle-orm/0.32.0.md ================================================ # Release notes for `drizzle-orm@0.32.0` and `drizzle-kit@0.23.0` > It's not mandatory to upgrade both packages, but if you want to use the new features in both queries and migrations, you will need to upgrade both packages ## New Features ### 🎉 MySQL `$returningId()` function MySQL itself doesn't have native support for `RETURNING` after using `INSERT`. There is only one way to do it for `primary keys` with `autoincrement` (or `serial`) types, where you can access `insertId` and `affectedRows` fields. We've prepared an automatic way for you to handle such cases with Drizzle and automatically receive all inserted IDs as separate objects ```ts import { boolean, int, text, mysqlTable } from 'drizzle-orm/mysql-core'; const usersTable = mysqlTable('users', { id: int('id').primaryKey(), name: text('name').notNull(), verified: boolean('verified').notNull().default(false), }); const result = await db.insert(usersTable).values([{ name: 'John' }, { name: 'John1' }]).$returningId(); // ^? { id: number }[] ``` Also with Drizzle, you can specify a `primary key` with `$default` function that will generate custom primary keys at runtime. We will also return those generated keys for you in the `$returningId()` call ```ts import { varchar, text, mysqlTable } from 'drizzle-orm/mysql-core'; import { createId } from '@paralleldrive/cuid2'; const usersTableDefFn = mysqlTable('users_default_fn', { customId: varchar('id', { length: 256 }).primaryKey().$defaultFn(createId), name: text('name').notNull(), }); const result = await db.insert(usersTableDefFn).values([{ name: 'John' }, { name: 'John1' }]).$returningId(); // ^? { customId: string }[] ``` > If there is no primary keys -> type will be `{}[]` for such queries ### 🎉 PostgreSQL Sequences You can now specify sequences in Postgres within any schema you need and define all the available properties ##### **Example** ```ts import { pgSchema, pgSequence } from "drizzle-orm/pg-core"; // No params specified export const customSequence = pgSequence("name"); // Sequence with params export const customSequence = pgSequence("name", { startWith: 100, maxValue: 10000, minValue: 100, cycle: true, cache: 10, increment: 2 }); // Sequence in custom schema export const customSchema = pgSchema('custom_schema'); export const customSequence = customSchema.sequence("name"); ``` ### 🎉 PostgreSQL Identity Columns [Source](https://wiki.postgresql.org/wiki/Don%27t_Do_This#Don.27t_use_serial): As mentioned, the `serial` type in Postgres is outdated and should be deprecated. Ideally, you should not use it. `Identity columns` are the recommended way to specify sequences in your schema, which is why we are introducing the `identity columns` feature ##### **Example** ```ts import { pgTable, integer, text } from 'drizzle-orm/pg-core' export const ingredients = pgTable("ingredients", { id: integer("id").primaryKey().generatedAlwaysAsIdentity({ startWith: 1000 }), name: text("name").notNull(), description: text("description"), }); ``` You can specify all properties available for sequences in the `.generatedAlwaysAsIdentity()` function. Additionally, you can specify custom names for these sequences PostgreSQL docs [reference](https://www.postgresql.org/docs/current/sql-createtable.html#SQL-CREATETABLE-PARMS-GENERATED-IDENTITY). ### 🎉 PostgreSQL Generated Columns You can now specify generated columns on any column supported by PostgreSQL to use with generated columns ##### **Example** with generated column for `tsvector` > Note: we will add `tsVector` column type before latest release ```ts import { SQL, sql } from "drizzle-orm"; import { customType, index, integer, pgTable, text } from "drizzle-orm/pg-core"; const tsVector = customType<{ data: string }>({ dataType() { return "tsvector"; }, }); export const test = pgTable( "test", { id: integer("id").primaryKey().generatedAlwaysAsIdentity(), content: text("content"), contentSearch: tsVector("content_search", { dimensions: 3, }).generatedAlwaysAs( (): SQL => sql`to_tsvector('english', ${test.content})` ), }, (t) => ({ idx: index("idx_content_search").using("gin", t.contentSearch), }) ); ``` In case you don't need to reference any columns from your table, you can use just `sql` template or a `string` ```ts export const users = pgTable("users", { id: integer("id"), name: text("name"), generatedName: text("gen_name").generatedAlwaysAs(sql`hello world!`), generatedName1: text("gen_name1").generatedAlwaysAs("hello world!"), }), ``` ### 🎉 MySQL Generated Columns You can now specify generated columns on any column supported by MySQL to use with generated columns You can specify both `stored` and `virtual` options, for more info you can check [MySQL docs](https://dev.mysql.com/doc/refman/8.4/en/create-table-generated-columns.html) Also MySQL has a few limitation for such columns usage, which is described [here](https://dev.mysql.com/doc/refman/8.4/en/alter-table-generated-columns.html) Drizzle Kit will also have limitations for `push` command: 1. You can't change the generated constraint expression and type using `push`. Drizzle-kit will ignore this change. To make it work, you would need to `drop the column`, `push`, and then `add a column with a new expression`. This was done due to the complex mapping from the database side, where the schema expression will be modified on the database side and, on introspection, we will get a different string. We can't be sure if you changed this expression or if it was changed and formatted by the database. As long as these are generated columns and `push` is mostly used for prototyping on a local database, it should be fast to `drop` and `create` generated columns. Since these columns are `generated`, all the data will be restored 2. `generate` should have no limitations ##### **Example** ```ts export const users = mysqlTable("users", { id: int("id"), id2: int("id2"), name: text("name"), generatedName: text("gen_name").generatedAlwaysAs( (): SQL => sql`${schema2.users.name} || 'hello'`, { mode: "stored" } ), generatedName1: text("gen_name1").generatedAlwaysAs( (): SQL => sql`${schema2.users.name} || 'hello'`, { mode: "virtual" } ), }), ``` In case you don't need to reference any columns from your table, you can use just `sql` template or a `string` in `.generatedAlwaysAs()` ### 🎉 SQLite Generated Columns You can now specify generated columns on any column supported by SQLite to use with generated columns You can specify both `stored` and `virtual` options, for more info you can check [SQLite docs](https://www.sqlite.org/gencol.html) Also SQLite has a few limitation for such columns usage, which is described [here](https://www.sqlite.org/gencol.html) Drizzle Kit will also have limitations for `push` and `generate` command: 1. You can't change the generated constraint expression with the stored type in an existing table. You would need to delete this table and create it again. This is due to SQLite limitations for such actions. We will handle this case in future releases (it will involve the creation of a new table with data migration). 2. You can't add a `stored` generated expression to an existing column for the same reason as above. However, you can add a `virtual` expression to an existing column. 3. You can't change a `stored` generated expression in an existing column for the same reason as above. However, you can change a `virtual` expression. 4. You can't change the generated constraint type from `virtual` to `stored` for the same reason as above. However, you can change from `stored` to `virtual`. ## New Drizzle Kit features ### 🎉 Migrations support for all the new orm features PostgreSQL sequences, identity columns and generated columns for all dialects ### 🎉 New flag `--force` for `drizzle-kit push` You can auto-accept all data-loss statements using the push command. It's only available in CLI parameters. Make sure you always use it if you are fine with running data-loss statements on your database ### 🎉 New `migrations` flag `prefix` You can now customize migration file prefixes to make the format suitable for your migration tools: - `index` is the default type and will result in `0001_name.sql` file names; - `supabase` and `timestamp` are equal and will result in `20240627123900_name.sql` file names; - `unix` will result in unix seconds prefixes `1719481298_name.sql` file names; - `none` will omit the prefix completely; ##### **Example**: Supabase migrations format ```ts import { defineConfig } from "drizzle-kit"; export default defineConfig({ dialect: "postgresql", migrations: { prefix: 'supabase' } }); ``` ================================================ FILE: changelogs/drizzle-orm/0.32.1.md ================================================ - Fix typings for indexes and allow creating indexes on 3+ columns mixing columns and expressions - thanks @lbguilherme! - Added support for "limit 0" in all dialects - closes [#2011](https://github.com/drizzle-team/drizzle-orm/issues/2011) - thanks @sillvva! - Make inArray and notInArray accept empty list, closes [#1295](https://github.com/drizzle-team/drizzle-orm/issues/1295) - thanks @RemiPeruto! - fix typo in lt typedoc - thanks @dalechyn! - fix wrong example in README.md - thanks @7flash! ================================================ FILE: changelogs/drizzle-orm/0.32.2.md ================================================ - Fix AWS Data API type hints bugs in RQB - Fix set transactions in MySQL bug - thanks @roguesherlock - Add forwaring dependencies within useLiveQuery, fixes [#2651](https://github.com/drizzle-team/drizzle-orm/issues/2651) - thanks @anstapol - Export additional types from SQLite package, like `AnySQLiteUpdate` - thanks @veloii ================================================ FILE: changelogs/drizzle-orm/0.33.0.md ================================================ ## Breaking changes (for some of postgres.js users) #### Bugs fixed for this breaking change - [Open [BUG]: jsonb always inserted as a json string when using postgres-js](https://github.com/drizzle-team/drizzle-orm/issues/724) - [[BUG]: jsonb type on postgres implement incorrectly](https://github.com/drizzle-team/drizzle-orm/issues/1511) > As we are doing with other drivers, we've changed the behavior of PostgreSQL-JS to pass raw JSON values, the same as you see them in the database. So if you are using the PostgreSQL-JS driver and passing data to Drizzle elsewhere, please check the new behavior of the client after it is passed to Drizzle. > We will update it to ensure it does not override driver behaviors, but this will be done as a complex task for everything in Drizzle in other releases If you were using `postgres-js` with `jsonb` fields, you might have seen stringified objects in your database, while drizzle insert and select operations were working as expected. You need to convert those fields from strings to actual JSON objects. To do this, you can use the following query to update your database: **if you are using jsonb:** ```sql update table_name set jsonb_column = (jsonb_column #>> '{}')::jsonb; ``` **if you are using json:** ```sql update table_name set json_column = (json_column #>> '{}')::json; ``` We've tested it in several cases, and it worked well, but only if all stringified objects are arrays or objects. If you have primitives like strings, numbers, booleans, etc., you can use this query to update all the fields **if you are using jsonb:** ```sql UPDATE table_name SET jsonb_column = CASE -- Convert to JSONB if it is a valid JSON object or array WHEN jsonb_column #>> '{}' LIKE '{%' OR jsonb_column #>> '{}' LIKE '[%' THEN (jsonb_column #>> '{}')::jsonb ELSE jsonb_column END WHERE jsonb_column IS NOT NULL; ``` **if you are using json:** ```sql UPDATE table_name SET json_column = CASE -- Convert to JSON if it is a valid JSON object or array WHEN json_column #>> '{}' LIKE '{%' OR json_column #>> '{}' LIKE '[%' THEN (json_column #>> '{}')::json ELSE json_column END WHERE json_column IS NOT NULL; ``` If nothing works for you and you are blocked, please reach out to me @AndriiSherman. I will try to help you! ## Bug Fixes - [[BUG]: boolean mode not working with prepared statements (bettersqlite)](https://github.com/drizzle-team/drizzle-orm/issues/2568) - thanks @veloii - [[BUG]: isTable helper function is not working](https://github.com/drizzle-team/drizzle-orm/issues/2672) - thanks @hajek-raven - [[BUG]: Documentation is outdated on inArray and notInArray Methods](https://github.com/drizzle-team/drizzle-orm/issues/2690) - thanks @RemiPeruto ================================================ FILE: changelogs/drizzle-orm/0.34.0.md ================================================ ## Breaking changes and migrate guide for Turso users If you are using Turso and libsql, you will need to upgrade your `drizzle.config` and `@libsql/client` package. 1. This version of drizzle-orm will only work with `@libsql/client@0.10.0` or higher if you are using the `migrate` function. For other use cases, you can continue using previous versions(But the suggestion is to upgrade) To install the latest version, use the command: ```bash npm i @libsql/client@latest ``` 2. Previously, we had a common `drizzle.config` for SQLite and Turso users, which allowed a shared strategy for both dialects. Starting with this release, we are introducing the turso dialect in drizzle-kit. We will evolve and improve Turso as a separate dialect with its own migration strategies. **Before** ```ts import { defineConfig } from "drizzle-kit"; export default defineConfig({ dialect: "sqlite", schema: "./schema.ts", out: "./drizzle", dbCredentials: { url: "database.db", }, breakpoints: true, verbose: true, strict: true, }); ``` **After** ```ts import { defineConfig } from "drizzle-kit"; export default defineConfig({ dialect: "turso", schema: "./schema.ts", out: "./drizzle", dbCredentials: { url: "database.db", }, breakpoints: true, verbose: true, strict: true, }); ``` If you are using only SQLite, you can use `dialect: "sqlite"` ## LibSQL/Turso and Sqlite migration updates ### SQLite "generate" and "push" statements updates Starting from this release, we will no longer generate comments like this: ```sql '/*\n SQLite does not support "Changing existing column type" out of the box, we do not generate automatic migration for that, so it has to be done manually' + '\n Please refer to: https://www.techonthenet.com/sqlite/tables/alter_table.php' + '\n https://www.sqlite.org/lang_altertable.html' + '\n https://stackoverflow.com/questions/2083543/modify-a-columns-type-in-sqlite3' + "\n\n Due to that we don't generate migration automatically and it has to be done manually" + '\n*/' ``` We will generate a set of statements, and you can decide if it's appropriate to create data-moving statements instead. Here is an example of the SQL file you'll receive now: ```sql PRAGMA foreign_keys=OFF; --> statement-breakpoint CREATE TABLE `__new_worker` ( `id` integer PRIMARY KEY NOT NULL, `name` text NOT NULL, `salary` text NOT NULL, `job_id` integer, FOREIGN KEY (`job_id`) REFERENCES `job`(`id`) ON UPDATE no action ON DELETE no action ); --> statement-breakpoint INSERT INTO `__new_worker`("id", "name", "salary", "job_id") SELECT "id", "name", "salary", "job_id" FROM `worker`; --> statement-breakpoint DROP TABLE `worker`; --> statement-breakpoint ALTER TABLE `__new_worker` RENAME TO `worker`; --> statement-breakpoint PRAGMA foreign_keys=ON; ``` ### LibSQL/Turso "generate" and "push" statements updates Since LibSQL supports more ALTER statements than SQLite, we can generate more statements without recreating your schema and moving all the data, which can be potentially dangerous for production environments. LibSQL and Turso will now have a separate dialect in the Drizzle config file, meaning that we will evolve Turso and LibSQL independently from SQLite and will aim to support as many features as Turso/LibSQL offer. With the updated LibSQL migration strategy, you will have the ability to: - **Change Data Type**: Set a new data type for existing columns. - **Set and Drop Default Values**: Add or remove default values for existing columns. - **Set and Drop NOT NULL**: Add or remove the NOT NULL constraint on existing columns. - **Add References to Existing Columns**: Add foreign key references to existing columns You can find more information in the [LibSQL documentation](https://github.com/tursodatabase/libsql/blob/main/libsql-sqlite3/doc/libsql_extensions.md#altering-columns) ### LIMITATIONS - Dropping or altering an index will cause table recreation. This is because LibSQL/Turso does not support dropping this type of index. ```sql CREATE TABLE `users` ( `id` integer NOT NULL, `name` integer, `age` integer PRIMARY KEY NOT NULL FOREIGN KEY (`name`) REFERENCES `users1`("id") ON UPDATE no action ON DELETE no action ); ``` - If the table has indexes, altering columns will cause table recreation. - Drizzle-Kit will drop the indexes, modify the columns, and then recreate the indexes. - Adding or dropping composite foreign keys is not supported and will cause table recreation ### NOTES - You can create a reference on any column type, but if you want to insert values, the referenced column must have a unique index or primary key. ```sql CREATE TABLE parent(a PRIMARY KEY, b UNIQUE, c, d, e, f); CREATE UNIQUE INDEX i1 ON parent(c, d); CREATE INDEX i2 ON parent(e); CREATE UNIQUE INDEX i3 ON parent(f COLLATE nocase); CREATE TABLE child1(f, g REFERENCES parent(a)); -- Ok CREATE TABLE child2(h, i REFERENCES parent(b)); -- Ok CREATE TABLE child3(j, k, FOREIGN KEY(j, k) REFERENCES parent(c, d)); -- Ok CREATE TABLE child4(l, m REFERENCES parent(e)); -- Error! CREATE TABLE child5(n, o REFERENCES parent(f)); -- Error! CREATE TABLE child6(p, q, FOREIGN KEY(p, q) REFERENCES parent(b, c)); -- Error! CREATE TABLE child7(r REFERENCES parent(c)); -- Error! ``` > **NOTE**: The foreign key for the table child5 is an error because, although the parent key column has a unique index, the index uses a different collating sequence. See more: https://www.sqlite.org/foreignkeys.html ## A new and easy way to start using drizzle Current and the only way to do, is to define client yourself and pass it to drizzle ```ts const client = new Pool({ url: '' }); drizzle(client, { logger: true }); ``` But we want to introduce you to a new API, which is a simplified method in addition to the existing one. Most clients will have a few options to connect, starting with the easiest and most common one, and allowing you to control your client connection as needed. Let's use `node-postgres` as an example, but the same pattern can be applied to all other clients ```ts // Finally, one import for all available clients and dialects! import { drizzle } from 'drizzle-orm' // Choose a client and use a connection URL — nothing else is needed! const db1 = await drizzle("node-postgres", process.env.POSTGRES_URL); // If you need to pass a logger, schema, or other configurations, you can use an object and specify the client-specific URL in the connection const db2 = await drizzle("node-postgres", { connection: process.env.POSTGRES_URL, logger: true }); // And finally, if you need to use full client/driver-specific types in connections, you can use a URL or host/port/etc. as an object inferred from the underlying client connection types const db3 = await drizzle("node-postgres", { connection: { connectionString: process.env.POSTGRES_URL, }, }); const db4 = await drizzle("node-postgres", { connection: { user: process.env.DB_USER, password: process.env.DB_PASSWORD, host: process.env.DB_HOST, port: process.env.DB_PORT, database: process.env.DB_NAME, ssl: true, }, }); ``` A few clients will have a slightly different API due to their specific behavior. Let's take a look at them: For `aws-data-api-pg`, Drizzle will require `resourceArn`, `database`, and `secretArn`, along with any other AWS Data API client types for the connection, such as credentials, region, etc. ```ts drizzle("aws-data-api-pg", { connection: { resourceArn: "", database: "", secretArn: "", }, }); ``` For `d1`, the CloudFlare Worker types as described in the [documentation](https://developers.cloudflare.com/d1/get-started/) here will be required. ```ts drizzle("d1", { connection: env.DB // CloudFlare Worker Types }) ``` For `vercel-postgres`, nothing is needed since Vercel automatically retrieves the `POSTGRES_URL` from the `.env` file. You can check this [documentation](https://vercel.com/docs/storage/vercel-postgres/quickstart) for more info ```ts drizzle("vercel-postgres") ``` > Note that the first example with the client is still available and not deprecated. You can use it if you don't want to await the drizzle object. The new way of defining drizzle is designed to make it easier to import from one place and get autocomplete for all the available clients ## Optional names for columns and callback in drizzle table We believe that schema definition in Drizzle is extremely powerful and aims to be as close to SQL as possible while adding more helper functions for JS runtime values. However, there are a few areas that could be improved, which we addressed in this release. These include: - Unnecessary database column names when TypeScript keys are essentially just copies of them - A callback that provides all column types available for a specific table. Let's look at an example with PostgreSQL (this applies to all the dialects supported by Drizzle) **Previously** ```ts import { boolean, pgTable, text, uuid } from "drizzle-orm/pg-core"; export const ingredients = pgTable("ingredients", { id: uuid("id").defaultRandom().primaryKey(), name: text("name").notNull(), description: text("description"), inStock: boolean("in_stock").default(true), }); ``` The previous table definition will still be valid in the new release, but it can be replaced with this instead ```ts import { pgTable } from "drizzle-orm/pg-core"; export const ingredients = pgTable("ingredients", (t) => ({ id: t.uuid().defaultRandom().primaryKey(), name: t.text().notNull(), description: t.text(), inStock: t.boolean("in_stock").default(true), })); ``` ## New `casing` param in `drizzle-orm` and `drizzle-kit` There are more improvements you can make to your schema definition. The most common way to name your variables in a database and in TypeScript code is usually `snake_case` in the database and `camelCase` in the code. For this case, in Drizzle, you can now define a naming strategy in your database to help Drizzle map column keys automatically. Let's take a table from the previous example and make it work with the new casing API in Drizzle Table can now become: ```ts import { pgTable } from "drizzle-orm/pg-core"; export const ingredients = pgTable("ingredients", (t) => ({ id: t.uuid().defaultRandom().primaryKey(), name: t.text().notNull(), description: t.text(), inStock: t.boolean().default(true), })); ``` As you can see, `inStock` doesn't have a database name alias, but by defining the casing configuration at the connection level, all queries will automatically map it to `snake_case` ```ts const db = await drizzle('node-postgres', { connection: '', casing: 'snake_case' }) ``` For `drizzle-kit` migrations generation you should also specify `casing` param in drizzle config, so you can be sure you casing strategy will be applied to drizzle-kit as well ```ts import { defineConfig } from "drizzle-kit"; export default defineConfig({ dialect: "postgresql", schema: "./schema.ts", dbCredentials: { url: "postgresql://postgres:password@localhost:5432/db", }, casing: "snake_case", }); ``` ## New "count" API Before this release to count entities in a table, you would need to do this: ```ts const res = await db.select({ count: sql`count(*)` }).from(users); const count = res[0].count; ``` The new API will look like this: ```ts // how many users are in the database const count: number = await db.$count(users); // how many users with the name "Dan" are in the database const count: number = await db.$count(users, eq(name, "Dan")); ``` This can also work as a subquery and within relational queries ```ts const users = await db.select({ ...users, postsCount: db.$count(posts, eq(posts.authorId, users.id)) }); const users = await db.query.users.findMany({ extras: { postsCount: db.$count(posts, eq(posts.authorId, users.id)) } }) ``` ## Ability to execute raw strings instead of using SQL templates for raw queries Previously, you would have needed to do this to execute a raw query with Drizzle ```ts import { sql } from 'drizzle-orm' db.execute(sql`select * from ${users}`); // or db.execute(sql.raw(`select * from ${users}`)); ``` You can now do this as well ```ts db.execute('select * from users') ``` ================================================ FILE: changelogs/drizzle-orm/0.34.1.md ================================================ - Fixed dynamic imports for CJS and MJS in the `/connect` module ================================================ FILE: changelogs/drizzle-orm/0.35.0.md ================================================ # Important change after 0.34.0 release ## Updated the init Drizzle database API The API from version 0.34.0 turned out to be unusable and needs to be changed. You can read more about our decisions in [this discussion](https://github.com/drizzle-team/drizzle-orm/discussions/3097) If you still want to use the new API introduced in 0.34.0, which can create driver clients for you under the hood, you can now do so ```ts import { drizzle } from "drizzle-orm/node-postgres"; const db = drizzle(process.env.DATABASE_URL); // or const db = drizzle({ connection: process.env.DATABASE_URL }); const db = drizzle({ connection: { user: "...", password: "...", host: "...", port: 4321, db: "...", }, }); // if you need to pass logger or schema const db = drizzle({ connection: process.env.DATABASE_URL, logger: true, schema: schema, }); ``` in order to not introduce breaking change - we will still leave support for deprecated API until V1 release. It will degrade autocomplete performance in connection params due to `DatabaseDriver` | `ConnectionParams` types collision, but that's a decent compromise against breaking changes ```ts import { drizzle } from "drizzle-orm/node-postgres"; import { Pool } from "pg"; const client = new Pool({ connectionString: process.env.DATABASE_URL }); const db = drizzle(client); // deprecated but available // new version const db = drizzle({ client: client, }); ``` # New Features ## New .orderBy() and .limit() functions in update and delete statements SQLite and MySQL You now have more options for the `update` and `delete` query builders in MySQL and SQLite **Example** ```ts await db.update(usersTable).set({ verified: true }).limit(2).orderBy(asc(usersTable.name)); await db.delete(usersTable).where(eq(usersTable.verified, false)).limit(1).orderBy(asc(usersTable.name)); ``` ## New `drizzle.mock()` function There were cases where you didn't need to provide a driver to the Drizzle object, and this served as a workaround ```ts const db = drizzle({} as any) ``` Now you can do this using a mock function ```ts const db = drizzle.mock() ``` There is no valid production use case for this, but we used it in situations where we needed to check types, etc., without making actual database calls or dealing with driver creation. If anyone was using it, please switch to using mocks now # Internal updates - Upgraded TS in codebase to the version 5.6.3 # Bug fixes - [[BUG]: New $count API error with @neondatabase/serverless](https://github.com/drizzle-team/drizzle-orm/issues/3081) ================================================ FILE: changelogs/drizzle-orm/0.35.1.md ================================================ - Updated internal versions for the drizzle-kit and drizzle-orm packages. Changes were introduced in the last minor release, and you are required to upgrade both packages to ensure they work as expected ================================================ FILE: changelogs/drizzle-orm/0.35.2.md ================================================ - Fix issues with importing in several environments after updating the Drizzle driver implementation We've added approximately 240 tests to check the ESM and CJS builds for all the drivers we have. You can check them [here](https://github.com/drizzle-team/drizzle-orm/tree/main/integration-tests/js-tests/driver-init) - Fixed [[BUG]: Type Error in PgTransaction Missing $client Property After Upgrading to drizzle-orm@0.35.1](https://github.com/drizzle-team/drizzle-orm/issues/3140) - Fixed [[BUG]: New critical Build error drizzle 0.35.0 deploying on Cloudflare ](https://github.com/drizzle-team/drizzle-orm/issues/3137) ================================================ FILE: changelogs/drizzle-orm/0.35.3.md ================================================ # New LibSQL driver modules Drizzle now has native support for all `@libsql/client` driver variations: 1. `@libsql/client` - defaults to node import, automatically changes to web if target or platform is set for bundler, e.g. `esbuild --platform=browser` ```ts import { drizzle } from 'drizzle-orm/libsql'; const db = drizzle({ connection: { url: process.env.DATABASE_URL, authToken: process.env.DATABASE_AUTH_TOKEN }}); ``` 2. `@libsql/client/node` node compatible module, supports :memory:, file, wss, http and turso connection protocols ```ts import { drizzle } from 'drizzle-orm/libsql/node'; const db = drizzle({ connection: { url: process.env.DATABASE_URL, authToken: process.env.DATABASE_AUTH_TOKEN }}); ``` 3. `@libsql/client/web` module for fullstack web frameworks like next, nuxt, astro, etc. ```ts import { drizzle } from 'drizzle-orm/libsql/web'; const db = drizzle({ connection: { url: process.env.DATABASE_URL, authToken: process.env.DATABASE_AUTH_TOKEN }}); ``` 4. `@libsql/client/http` module for http and https connection protocols ```ts import { drizzle } from 'drizzle-orm/libsql/http'; const db = drizzle({ connection: { url: process.env.DATABASE_URL, authToken: process.env.DATABASE_AUTH_TOKEN }}); ``` 5. `@libsql/client/ws` module for ws and wss connection protocols ```ts import { drizzle } from 'drizzle-orm/libsql/ws'; const db = drizzle({ connection: { url: process.env.DATABASE_URL, authToken: process.env.DATABASE_AUTH_TOKEN }}); ``` 6. `@libsql/client/sqlite3` module for :memory: and file connection protocols ```ts import { drizzle } from 'drizzle-orm/libsql/wasm'; const db = drizzle({ connection: { url: process.env.DATABASE_URL, authToken: process.env.DATABASE_AUTH_TOKEN }}); ``` 7. `@libsql/client-wasm` Separate experimental package for WASM ```ts import { drizzle } from 'drizzle-orm/libsql'; const db = drizzle({ connection: { url: process.env.DATABASE_URL, authToken: process.env.DATABASE_AUTH_TOKEN }}); ``` ================================================ FILE: changelogs/drizzle-orm/0.36.0.md ================================================ > This version of `drizzle-orm` requires `drizzle-kit@0.27.0` to enable all new features # New Features ## Row-Level Security (RLS) With Drizzle, you can enable Row-Level Security (RLS) for any Postgres table, create policies with various options, and define and manage the roles those policies apply to. Drizzle supports a raw representation of Postgres policies and roles that can be used in any way you want. This works with popular Postgres database providers such as `Neon` and `Supabase`. In Drizzle, we have specific predefined RLS roles and functions for RLS with both database providers, but you can also define your own logic. ### Enable RLS If you just want to enable RLS on a table without adding policies, you can use `.enableRLS()` As mentioned in the PostgreSQL documentation: > If no policy exists for the table, a default-deny policy is used, meaning that no rows are visible or can be modified. Operations that apply to the whole table, such as TRUNCATE and REFERENCES, are not subject to row security. ```ts import { integer, pgTable } from 'drizzle-orm/pg-core'; export const users = pgTable('users', { id: integer(), }).enableRLS(); ``` > If you add a policy to a table, RLS will be enabled automatically. So, there’s no need to explicitly enable RLS when adding policies to a table. ### Roles Currently, Drizzle supports defining roles with a few different options, as shown below. Support for more options will be added in a future release. ```ts import { pgRole } from 'drizzle-orm/pg-core'; export const admin = pgRole('admin', { createRole: true, createDb: true, inherit: true }); ``` If a role already exists in your database, and you don’t want drizzle-kit to ‘see’ it or include it in migrations, you can mark the role as existing. ```ts import { pgRole } from 'drizzle-orm/pg-core'; export const admin = pgRole('admin').existing(); ``` ### Policies To fully leverage RLS, you can define policies within a Drizzle table. > In PostgreSQL, policies should be linked to an existing table. Since policies are always associated with a specific table, we decided that policy definitions should be defined as a parameter of `pgTable` **Example of pgPolicy with all available properties** ```ts import { sql } from 'drizzle-orm'; import { integer, pgPolicy, pgRole, pgTable } from 'drizzle-orm/pg-core'; export const admin = pgRole('admin'); export const users = pgTable('users', { id: integer(), }, (t) => [ pgPolicy('policy', { as: 'permissive', to: admin, for: 'delete', using: sql``, withCheck: sql``, }), ]); ``` **Link Policy to an existing table** There are situations where you need to link a policy to an existing table in your database. The most common use case is with database providers like `Neon` or `Supabase`, where you need to add a policy to their existing tables. In this case, you can use the `.link()` API ```ts import { sql } from "drizzle-orm"; import { pgPolicy } from "drizzle-orm/pg-core"; import { authenticatedRole, realtimeMessages } from "drizzle-orm/supabase"; export const policy = pgPolicy("authenticated role insert policy", { for: "insert", to: authenticatedRole, using: sql``, }).link(realtimeMessages); ``` ### Migrations If you are using drizzle-kit to manage your schema and roles, there may be situations where you want to refer to roles that are not defined in your Drizzle schema. In such cases, you may want drizzle-kit to skip managing these roles without having to define each role in your drizzle schema and marking it with `.existing()`. In these cases, you can use `entities.roles` in `drizzle.config.ts`. For a complete reference, refer to the the [`drizzle.config.ts`](https://orm.drizzle.team/docs/drizzle-config-file) documentation. By default, `drizzle-kit` does not manage roles for you, so you will need to enable this feature in `drizzle.config.ts`. ```ts {12-14} // drizzle.config.ts import { defineConfig } from "drizzle-kit"; export default defineConfig({ dialect: 'postgresql', schema: "./drizzle/schema.ts", dbCredentials: { url: process.env.DATABASE_URL! }, verbose: true, strict: true, entities: { roles: true } }); ``` In case you need additional configuration options, let's take a look at a few more examples. **You have an `admin` role and want to exclude it from the list of manageable roles** ```ts // drizzle.config.ts import { defineConfig } from "drizzle-kit"; export default defineConfig({ ... entities: { roles: { exclude: ['admin'] } } }); ``` **You have an `admin` role and want to include it in the list of manageable roles** ```ts // drizzle.config.ts import { defineConfig } from "drizzle-kit"; export default defineConfig({ ... entities: { roles: { include: ['admin'] } } }); ``` **If you are using `Neon` and want to exclude Neon-defined roles, you can use the provider option** ```ts // drizzle.config.ts import { defineConfig } from "drizzle-kit"; export default defineConfig({ ... entities: { roles: { provider: 'neon' } } }); ``` **If you are using `Supabase` and want to exclude Supabase-defined roles, you can use the provider option** ```ts // drizzle.config.ts import { defineConfig } from "drizzle-kit"; export default defineConfig({ ... entities: { roles: { provider: 'supabase' } } }); ``` > You may encounter situations where Drizzle is slightly outdated compared to new roles specified by your database provider. In such cases, you can use the `provider` option and `exclude` additional roles: ```ts // drizzle.config.ts import { defineConfig } from "drizzle-kit"; export default defineConfig({ ... entities: { roles: { provider: 'supabase', exclude: ['new_supabase_role'] } } }); ``` ### RLS on views With Drizzle, you can also specify RLS policies on views. For this, you need to use `security_invoker` in the view's WITH options. Here is a small example: ```ts {5} ... export const roomsUsersProfiles = pgView("rooms_users_profiles") .with({ securityInvoker: true, }) .as((qb) => qb .select({ ...getTableColumns(roomsUsers), email: profiles.email, }) .from(roomsUsers) .innerJoin(profiles, eq(roomsUsers.userId, profiles.id)) ); ``` ### Using with Neon The Neon Team helped us implement their vision of a wrapper on top of our raw policies API. We defined a specific `/neon` import with the `crudPolicy` function that includes predefined functions and Neon's default roles. Here's an example of how to use the `crudPolicy` function: ```ts import { crudPolicy } from 'drizzle-orm/neon'; import { integer, pgRole, pgTable } from 'drizzle-orm/pg-core'; export const admin = pgRole('admin'); export const users = pgTable('users', { id: integer(), }, (t) => [ crudPolicy({ role: admin, read: true, modify: false }), ]); ``` This policy is equivalent to: ```ts import { sql } from 'drizzle-orm'; import { integer, pgPolicy, pgRole, pgTable } from 'drizzle-orm/pg-core'; export const admin = pgRole('admin'); export const users = pgTable('users', { id: integer(), }, (t) => [ pgPolicy(`crud-${admin.name}-policy-insert`, { for: 'insert', to: admin, withCheck: sql`false`, }), pgPolicy(`crud-${admin.name}-policy-update`, { for: 'update', to: admin, using: sql`false`, withCheck: sql`false`, }), pgPolicy(`crud-${admin.name}-policy-delete`, { for: 'delete', to: admin, using: sql`false`, }), pgPolicy(`crud-${admin.name}-policy-select`, { for: 'select', to: admin, using: sql`true`, }), ]); ``` `Neon` exposes predefined `authenticated` and `anaonymous` roles and related functions. If you are using `Neon` for RLS, you can use these roles, which are marked as existing, and the related functions in your RLS queries. ```ts // drizzle-orm/neon export const authenticatedRole = pgRole('authenticated').existing(); export const anonymousRole = pgRole('anonymous').existing(); export const authUid = (userIdColumn: AnyPgColumn) => sql`(select auth.user_id() = ${userIdColumn})`; ``` For example, you can use the `Neon` predefined roles and functions like this: ```ts import { sql } from 'drizzle-orm'; import { authenticatedRole } from 'drizzle-orm/neon'; import { integer, pgPolicy, pgRole, pgTable } from 'drizzle-orm/pg-core'; export const admin = pgRole('admin'); export const users = pgTable('users', { id: integer(), }, (t) => [ pgPolicy(`policy-insert`, { for: 'insert', to: authenticatedRole, withCheck: sql`false`, }), ]); ``` ### Using with Supabase We also have a `/supabase` import with a set of predefined roles marked as existing, which you can use in your schema. This import will be extended in a future release with more functions and helpers to make using RLS and `Supabase` simpler. ```ts // drizzle-orm/supabase export const anonRole = pgRole('anon').existing(); export const authenticatedRole = pgRole('authenticated').existing(); export const serviceRole = pgRole('service_role').existing(); export const postgresRole = pgRole('postgres_role').existing(); export const supabaseAuthAdminRole = pgRole('supabase_auth_admin').existing(); ``` For example, you can use the `Supabase` predefined roles like this: ```ts import { sql } from 'drizzle-orm'; import { serviceRole } from 'drizzle-orm/supabase'; import { integer, pgPolicy, pgRole, pgTable } from 'drizzle-orm/pg-core'; export const admin = pgRole('admin'); export const users = pgTable('users', { id: integer(), }, (t) => [ pgPolicy(`policy-insert`, { for: 'insert', to: serviceRole, withCheck: sql`false`, }), ]); ``` The `/supabase` import also includes predefined tables and functions that you can use in your application ```ts // drizzle-orm/supabase const auth = pgSchema('auth'); export const authUsers = auth.table('users', { id: uuid().primaryKey().notNull(), }); const realtime = pgSchema('realtime'); export const realtimeMessages = realtime.table( 'messages', { id: bigserial({ mode: 'bigint' }).primaryKey(), topic: text().notNull(), extension: text({ enum: ['presence', 'broadcast', 'postgres_changes'], }).notNull(), }, ); export const authUid = sql`(select auth.uid())`; export const realtimeTopic = sql`realtime.topic()`; ``` This allows you to use it in your code, and Drizzle Kit will treat them as existing databases, using them only as information to connect to other entities ```ts import { foreignKey, pgPolicy, pgTable, text, uuid } from "drizzle-orm/pg-core"; import { sql } from "drizzle-orm/sql"; import { authenticatedRole, authUsers } from "drizzle-orm/supabase"; export const profiles = pgTable( "profiles", { id: uuid().primaryKey().notNull(), email: text().notNull(), }, (table) => [ foreignKey({ columns: [table.id], // reference to the auth table from Supabase foreignColumns: [authUsers.id], name: "profiles_id_fk", }).onDelete("cascade"), pgPolicy("authenticated can view all profiles", { for: "select", // using predefined role from Supabase to: authenticatedRole, using: sql`true`, }), ] ); ``` Let's check an example of adding a policy to a table that exists in `Supabase` ```ts import { sql } from "drizzle-orm"; import { pgPolicy } from "drizzle-orm/pg-core"; import { authenticatedRole, realtimeMessages } from "drizzle-orm/supabase"; export const policy = pgPolicy("authenticated role insert policy", { for: "insert", to: authenticatedRole, using: sql``, }).link(realtimeMessages); ``` # Bug fixes - [[BUG]: postgres-js driver throws error when using new { client } constructor arguments ](https://github.com/drizzle-team/drizzle-orm/issues/3176) ================================================ FILE: changelogs/drizzle-orm/0.36.1.md ================================================ # Bug Fixes - [[BUG]: Using sql.placeholder with limit and/or offset for a prepared statement produces TS error](https://github.com/drizzle-team/drizzle-orm/issues/2146) - thanks @L-Mario564 - [[BUG] If a query I am trying to modify with a dynamic query (....$dynamic()) contains any placeholders, I'm getting an error that says No value for placeholder.... provided](https://github.com/drizzle-team/drizzle-orm/issues/2272) - thanks @L-Mario564 - [[BUG]: Error thrown when trying to insert an array of new rows using generatedAlwaysAsIdentity() for the id column](https://github.com/drizzle-team/drizzle-orm/issues/2849) - thanks @L-Mario564 - [[BUG]: Unable to Use BigInt Types with Bun and Drizzle](https://github.com/drizzle-team/drizzle-orm/issues/2603) - thanks @L-Mario564 ================================================ FILE: changelogs/drizzle-orm/0.36.2.md ================================================ # New Features - [Support more types in like, notLike, ilike and notIlike expressions](https://github.com/drizzle-team/drizzle-orm/pull/2805) # Bug and typo fixes - Fixed typos in repository: thanks @armandsalle, @masto, @wackbyte, @Asher-JH, @MaxLeiter - [Fixed .generated behavior with non-strict tsconfig](https://github.com/drizzle-team/drizzle-orm/pull/3542) - [Fix Drizzle ORM for expo-sqlite](https://github.com/drizzle-team/drizzle-orm/pull/3197) - [Fixed lack of schema name on columns in sql](https://github.com/drizzle-team/drizzle-orm/pull/3531) - [fix: Adjust neon http driver entity kind](https://github.com/drizzle-team/drizzle-orm/pull/3424) - [Export PgIntegerBuilderInitial type](https://github.com/drizzle-team/drizzle-orm/pull/2846) - [[MySQL] Correct $returningId() implementation to correctly store selected fields](https://github.com/drizzle-team/drizzle-orm/pull/2975) ================================================ FILE: changelogs/drizzle-orm/0.36.3.md ================================================ # New Features ## Support for `UPDATE ... FROM` in PostgreSQL and SQLite As the SQLite documentation mentions: > [!NOTE] > The UPDATE-FROM idea is an extension to SQL that allows an UPDATE statement to be driven by other tables in the database. The "target" table is the specific table that is being updated. With UPDATE-FROM you can join the target table against other tables in the database in order to help compute which rows need updating and what the new values should be on those rows Similarly, the PostgreSQL documentation states: > [!NOTE] > A table expression allowing columns from other tables to appear in the WHERE condition and update expressions Drizzle also supports this feature starting from this version For example, current query: ```ts await db .update(users) .set({ cityId: cities.id }) .from(cities) .where(and(eq(cities.name, 'Seattle'), eq(users.name, 'John'))) ``` Will generate this sql ```sql update "users" set "city_id" = "cities"."id" from "cities" where ("cities"."name" = $1 and "users"."name" = $2) -- params: [ 'Seattle', 'John' ] ``` You can also alias tables that are joined (in PG, you can also alias the updating table too). ```ts const c = alias(cities, 'c'); await db .update(users) .set({ cityId: c.id }) .from(c); ``` Will generate this sql ```sql update "users" set "city_id" = "c"."id" from "cities" "c" ``` In PostgreSQL, you can also return columns from the joined tables. ```ts const updatedUsers = await db .update(users) .set({ cityId: cities.id }) .from(cities) .returning({ id: users.id, cityName: cities.name }); ``` Will generate this sql ```sql update "users" set "city_id" = "cities"."id" from "cities" returning "users"."id", "cities"."name" ``` ## Support for `INSERT INTO ... SELECT` in all dialects As the SQLite documentation mentions: > [!NOTE] > The second form of the INSERT statement contains a SELECT statement instead of a VALUES clause. A new entry is inserted into the table for each row of data returned by executing the SELECT statement. If a column-list is specified, the number of columns in the result of the SELECT must be the same as the number of items in the column-list. Otherwise, if no column-list is specified, the number of columns in the result of the SELECT must be the same as the number of columns in the table. Any SELECT statement, including compound SELECTs and SELECT statements with ORDER BY and/or LIMIT clauses, may be used in an INSERT statement of this form. > [!CAUTION] > To avoid a parsing ambiguity, the SELECT statement should always contain a WHERE clause, even if that clause is simply "WHERE true", if the upsert-clause is present. Without the WHERE clause, the parser does not know if the token "ON" is part of a join constraint on the SELECT, or the beginning of the upsert-clause. As the PostgreSQL documentation mentions: > [!NOTE] > A query (SELECT statement) that supplies the rows to be inserted And as the MySQL documentation mentions: > [!NOTE] > With INSERT ... SELECT, you can quickly insert many rows into a table from the result of a SELECT statement, which can select from one or many tables Drizzle supports the current syntax for all dialects, and all of them share the same syntax. Let's review some common scenarios and API usage. There are several ways to use select inside insert statements, allowing you to choose your preferred approach: - You can pass a query builder inside the select function. - You can use a query builder inside a callback. - You can pass an SQL template tag with any custom select query you want to use **Query Builder** ```ts const insertedEmployees = await db .insert(employees) .select( db.select({ name: users.name }).from(users).where(eq(users.role, 'employee')) ) .returning({ id: employees.id, name: employees.name }); ``` ```ts const qb = new QueryBuilder(); await db.insert(employees).select( qb.select({ name: users.name }).from(users).where(eq(users.role, 'employee')) ); ``` **Callback** ```ts await db.insert(employees).select( () => db.select({ name: users.name }).from(users).where(eq(users.role, 'employee')) ); ``` ```ts await db.insert(employees).select( (qb) => qb.select({ name: users.name }).from(users).where(eq(users.role, 'employee')) ); ``` **SQL template tag** ```ts await db.insert(employees).select( sql`select "users"."name" as "name" from "users" where "users"."role" = 'employee'` ); ``` ```ts await db.insert(employees).select( () => sql`select "users"."name" as "name" from "users" where "users"."role" = 'employee'` ); ``` ================================================ FILE: changelogs/drizzle-orm/0.36.4.md ================================================ # New Package: `drizzle-seed` > [!NOTE] > `drizzle-seed` can only be used with `drizzle-orm@0.36.4` or higher. Versions lower than this may work at runtime but could have type issues and identity column issues, as this patch was introduced in `drizzle-orm@0.36.4` ## Full Reference The full API reference and package overview can be found in our [official documentation](https://orm.drizzle.team/docs/seed-overview) ## Basic Usage In this example we will create 10 users with random names and ids ```ts {12} import { pgTable, integer, text } from "drizzle-orm/pg-core"; import { drizzle } from "drizzle-orm/node-postgres"; import { seed } from "drizzle-seed"; const users = pgTable("users", { id: integer().primaryKey(), name: text().notNull(), }); async function main() { const db = drizzle(process.env.DATABASE_URL!); await seed(db, { users }); } main(); ``` ## Options **`count`** By default, the `seed` function will create 10 entities. However, if you need more for your tests, you can specify this in the seed options object ```ts await seed(db, schema, { count: 1000 }); ``` **`seed`** If you need a seed to generate a different set of values for all subsequent runs, you can define a different number in the `seed` option. Any new number will generate a unique set of values ```ts await seed(db, schema, { seed: 12345 }); ``` The full API reference and package overview can be found in our [official documentation](https://orm.drizzle.team/docs/seed-overview) # Features ## Added `OVERRIDING SYSTEM VALUE` api to db.insert() If you want to force you own values for `GENERATED ALWAYS AS IDENTITY` columns, you can use `OVERRIDING SYSTEM VALUE` As PostgreSQL docs mentions > In an INSERT command, if ALWAYS is selected, a user-specified value is only accepted if the INSERT statement specifies OVERRIDING SYSTEM VALUE. If BY DEFAULT is selected, then the user-specified value takes precedence ```ts await db.insert(identityColumnsTable).overridingSystemValue().values([ { alwaysAsIdentity: 2 }, ]); ``` ## Added `.$withAuth()` API for Neon HTTP driver Using this API, Drizzle will send you an auth token to authorize your query. It can be used with any query available in Drizzle by simply adding `.$withAuth()` before it. This token will be used for a specific query Examples ```ts const token = 'HdncFj1Nm' await db.$withAuth(token).select().from(usersTable); await db.$withAuth(token).update(usersTable).set({ name: 'CHANGED' }).where(eq(usersTable.name, 'TARGET')) ``` # Bug Fixes - [[BUG]: TypeScript error Please install '@neondatabase/serverless' to allow Drizzle ORM to connect to the database](https://github.com/drizzle-team/drizzle-orm/issues/3521) ================================================ FILE: changelogs/drizzle-orm/0.37.0.md ================================================ # New Dialects ### 🎉 `SingleStore` dialect is now available in Drizzle Thanks to the SingleStore team for creating a PR with all the necessary changes to support the MySQL-compatible part of SingleStore. You can already start using it with Drizzle. The SingleStore team will also help us iterate through updates and make more SingleStore-specific features available in Drizzle ```ts import { int, singlestoreTable, varchar } from 'drizzle-orm/singlestore-core'; import { drizzle } from 'drizzle-orm/singlestore'; export const usersTable = singlestoreTable('users_table', { id: int().primaryKey(), name: varchar({ length: 255 }).notNull(), age: int().notNull(), email: varchar({ length: 255 }).notNull().unique(), }); ... const db = drizzle(process.env.DATABASE_URL!); db.select()... ``` You can check out our [Getting started guides](https://orm.drizzle.team/docs/get-started/singlestore-new) to try SingleStore! # New Drivers ### 🎉 `SQLite Durable Objects` driver is now available in Drizzle You can now query SQLite Durable Objects in Drizzle! For the full example, please check our [Get Started](https://orm.drizzle.team/docs/get-started/do-new) Section ```ts /// import { drizzle, DrizzleSqliteDODatabase } from 'drizzle-orm/durable-sqlite'; import { DurableObject } from 'cloudflare:workers' import { migrate } from 'drizzle-orm/durable-sqlite/migrator'; import migrations from '../drizzle/migrations'; import { usersTable } from './db/schema'; export class MyDurableObject1 extends DurableObject { storage: DurableObjectStorage; db: DrizzleSqliteDODatabase; constructor(ctx: DurableObjectState, env: Env) { super(ctx, env); this.storage = ctx.storage; this.db = drizzle(this.storage, { logger: false }); } async migrate() { migrate(this.db, migrations); } async insert(user: typeof usersTable.$inferInsert) { await this.db.insert(usersTable).values(user); } async select() { return this.db.select().from(usersTable); } } export default { /** * This is the standard fetch handler for a Cloudflare Worker * * @param request - The request submitted to the Worker from the client * @param env - The interface to reference bindings declared in wrangler.toml * @param ctx - The execution context of the Worker * @returns The response to be sent back to the client */ async fetch(request: Request, env: Env): Promise { const id: DurableObjectId = env.MY_DURABLE_OBJECT1.idFromName('durable-object'); const stub = env.MY_DURABLE_OBJECT1.get(id); await stub.migrate(); await stub.insert({ name: 'John', age: 30, email: 'john@example.com', }) console.log('New user created!') const users = await stub.select(); console.log('Getting all users from the database: ', users) return new Response(); } } ``` # Bug fixes - [[BUG]: $with is undefined on withReplicas](https://github.com/drizzle-team/drizzle-orm/issues/1834) - [[BUG]: Neon serverless driver accepts authToken as a promise, but the $withAuth does not](https://github.com/drizzle-team/drizzle-orm/issues/3597) ================================================ FILE: changelogs/drizzle-orm/0.38.0.md ================================================ # Types breaking changes A few internal types were changed and extra generic types for length of column types were added in this release. It won't affect anyone, unless you are using those internal types for some custom wrappers, logic, etc. Here is a list of all types that were changed, so if you are relying on those, please review them before upgrading - `MySqlCharBuilderInitial` - `MySqlVarCharBuilderInitial` - `PgCharBuilderInitial` - `PgArrayBuilder` - `PgArray` - `PgVarcharBuilderInitial` - `PgBinaryVectorBuilderInitial` - `PgBinaryVectorBuilder` - `PgBinaryVector` - `PgHalfVectorBuilderInitial` - `PgHalfVectorBuilder` - `PgHalfVector` - `PgVectorBuilderInitial` - `PgVectorBuilder` - `PgVector` - `SQLiteTextBuilderInitial` # New Features - Added new function `getViewSelectedFields` - Added `$inferSelect` function to views - Added `InferSelectViewModel` type for views - Added `isView` function # Validator packages updates - `drizzle-zod` has been completely rewritten. You can find detailed information about it [here](https://github.com/drizzle-team/drizzle-orm/blob/main/changelogs/drizzle-zod/0.6.0.md) - `drizzle-valibot` has been completely rewritten. You can find detailed information about it [here](https://github.com/drizzle-team/drizzle-orm/blob/main/changelogs/drizzle-valibot/0.3.0.md) - `drizzle-typebox` has been completely rewritten. You can find detailed information about it [here](https://github.com/drizzle-team/drizzle-orm/blob/main/changelogs/drizzle-typebox/0.2.0.md) Thanks to @L-Mario564 for making more updates than we expected to be shipped in this release. We'll copy his message from a PR regarding improvements made in this release: - Output for all packages are now unminified, makes exploring the compiled code easier when published to npm. - Smaller footprint. Previously, we imported the column types at runtime for each dialect, meaning that for example, if you're just using Postgres then you'd likely only have drizzle-orm and drizzle-orm/pg-core in the build output of your app; however, these packages imported all dialects which could lead to mysql-core and sqlite-core being bundled as well even if they're unused in your app. This is now fixed. - Slight performance gain. To determine the column data type we used the is function which performs a few checks to ensure the column data type matches. This was slow, as these checks would pile up every quickly when comparing all data types for many fields in a table/view. The easier and faster alternative is to simply go off of the column's columnType property. - Some changes had to be made at the type level in the ORM package for better compatibility with drizzle-valibot. And a set of new features - `createSelectSchema` function now also accepts views and enums. - New function: `createUpdateSchema`, for use in updating queries. - New function: `createSchemaFactory`, to provide more advanced options and to avoid bloating the parameters of the other schema functions # Bug fixes - [[FEATURE]: publish packages un-minified](https://github.com/drizzle-team/drizzle-orm/issues/2247) - [Don't allow unknown keys in drizzle-zod refinement](https://github.com/drizzle-team/drizzle-orm/issues/573) - [[BUG]:drizzle-zod not working with pgSchema](https://github.com/drizzle-team/drizzle-orm/issues/1458) - [Add createUpdateSchema to drizzle-zod](https://github.com/drizzle-team/drizzle-orm/issues/503) - [[BUG]:drizzle-zod produces wrong type](https://github.com/drizzle-team/drizzle-orm/issues/1110) - [[BUG]:Drizzle-zod:Boolean and Serial types from Schema are defined as enum when using CreateInsertSchema and CreateSelectSchema](https://github.com/drizzle-team/drizzle-orm/issues/1327) - [[BUG]: Drizzle typebox enum array wrong schema and type](https://github.com/drizzle-team/drizzle-orm/issues/1345) - [[BUG]:drizzle-zod not working with pgSchema](https://github.com/drizzle-team/drizzle-orm/issues/1458) - [[BUG]: drizzle-zod not parsing arrays correctly](https://github.com/drizzle-team/drizzle-orm/issues/1609) - [[BUG]: Drizzle typebox not supporting array](https://github.com/drizzle-team/drizzle-orm/issues/1810) - [[FEATURE]: Export factory functions from drizzle-zod to allow usage with extended Zod classes](https://github.com/drizzle-team/drizzle-orm/issues/2245) - [[FEATURE]: Add support for new pipe syntax for drizzle-valibot](https://github.com/drizzle-team/drizzle-orm/issues/2358) - [[BUG]: drizzle-zod's createInsertSchema() can't handle column of type vector](https://github.com/drizzle-team/drizzle-orm/issues/2424) - [[BUG]: drizzle-typebox fails to map geometry column to type-box schema](https://github.com/drizzle-team/drizzle-orm/issues/2516) - [[BUG]: drizzle-valibot does not provide types for returned schemas](https://github.com/drizzle-team/drizzle-orm/issues/2521) - [[BUG]: Drizzle-typebox types SQLite real field to string](https://github.com/drizzle-team/drizzle-orm/issues/2524) - [[BUG]: drizzle-zod: documented usage generates type error with exactOptionalPropertyTypes](https://github.com/drizzle-team/drizzle-orm/issues/2550) - [[BUG]: drizzle-zod does not respect/count db type range](https://github.com/drizzle-team/drizzle-orm/issues/2737) - [[BUG]: drizzle-zod not overriding optional](https://github.com/drizzle-team/drizzle-orm/issues/2755) - [[BUG]:drizzle-zod doesn't accept custom id value](https://github.com/drizzle-team/drizzle-orm/issues/2957) - [[FEATURE]: Support for Database Views in Drizzle Zod](https://github.com/drizzle-team/drizzle-orm/issues/3398) - [[BUG]: drizzle-valibot return type any](https://github.com/drizzle-team/drizzle-orm/issues/3621) - [[BUG]: drizzle-zod Type generation results in undefined types](https://github.com/drizzle-team/drizzle-orm/issues/3645) - [[BUG]: GeneratedAlwaysAs](https://github.com/drizzle-team/drizzle-orm/issues/3511) - [[FEATURE]: $inferSelect on a view](https://github.com/drizzle-team/drizzle-orm/issues/2610) - [[BUG]:Can't infer props from view in schema](https://github.com/drizzle-team/drizzle-orm/issues/3392) ================================================ FILE: changelogs/drizzle-orm/0.38.1.md ================================================ - Closed [[FEATURE]: Add more flexible typing for usage with exactOptionalPropertyTypes](https://github.com/drizzle-team/drizzle-orm/issues/2742) ================================================ FILE: changelogs/drizzle-orm/0.38.2.md ================================================ # New features ## `USE INDEX`, `FORCE INDEX` and `IGNORE INDEX` for MySQL In MySQL, the statements USE INDEX, FORCE INDEX, and IGNORE INDEX are hints used in SQL queries to influence how the query optimizer selects indexes. These hints provide fine-grained control over index usage, helping optimize performance when the default behavior of the optimizer is not ideal. ### Use Index The `USE INDEX` hint suggests to the optimizer which indexes to consider when processing the query. The optimizer is not forced to use these indexes but will prioritize them if they are suitable. ```ts export const users = mysqlTable('users', { id: int('id').primaryKey(), name: varchar('name', { length: 100 }).notNull(), }, () => [usersTableNameIndex]); const usersTableNameIndex = index('users_name_index').on(users.name); await db.select() .from(users, { useIndex: usersTableNameIndex }) .where(eq(users.name, 'David')); ``` ### Ignore Index The `IGNORE INDEX` hint tells the optimizer to avoid using specific indexes for the query. MySQL will consider all other indexes (if any) or perform a full table scan if necessary. ```ts export const users = mysqlTable('users', { id: int('id').primaryKey(), name: varchar('name', { length: 100 }).notNull(), }, () => [usersTableNameIndex]); const usersTableNameIndex = index('users_name_index').on(users.name); await db.select() .from(users, { ignoreIndex: usersTableNameIndex }) .where(eq(users.name, 'David')); ``` ### Force Index The `FORCE INDEX` hint forces the optimizer to use the specified index(es) for the query. If the specified index cannot be used, MySQL will not fall back to other indexes; it might resort to a full table scan instead. ```ts copy export const users = mysqlTable('users', { id: int('id').primaryKey(), name: varchar('name', { length: 100 }).notNull(), }, () => [usersTableNameIndex]); const usersTableNameIndex = index('users_name_index').on(users.name); await db.select() .from(users, { forceIndex: usersTableNameIndex }) .where(eq(users.name, 'David')); ``` You can also combine those hints and use multiple indexes in a query if you need ================================================ FILE: changelogs/drizzle-orm/0.38.3.md ================================================ - Fix incorrect deprecation detection for table declarations ================================================ FILE: changelogs/drizzle-orm/0.38.4.md ================================================ - New SingleStore type `vector` - thanks @mitchwadair - Fix wrong DROP INDEX statement generation, [#3866](https://github.com/drizzle-team/drizzle-orm/pull/3866) - thanks @WaciX - Typo fixes - thanks @stephan281094 ================================================ FILE: changelogs/drizzle-orm/0.39.0.md ================================================ # New features ## Bun SQL driver support You can now use the new Bun SQL driver released in Bun v1.2.0 with Drizzle ```ts import { drizzle } from 'drizzle-orm/bun-sql'; const db = drizzle(process.env.PG_DB_URL!); const result = await db.select().from(...); ``` or you can use Bun SQL instance ```ts import { drizzle } from 'drizzle-orm/bun-sqlite'; import { SQL } from 'bun'; const client = new SQL(process.env.PG_DB_URL!); const db = drizzle({ client }); const result = await db.select().from(...); ``` Current Limitations: - `json` and `jsonb` inserts and selects currently perform an additional `JSON.stringify` on the Bun SQL side. Once this is removed, they should work properly. You can always use custom types and redefine the mappers to and from the database. - `datetime`, `date`, and `timestamp` will not work properly when using `mode: string` in Drizzle. This is due to Bun's API limitations, which prevent custom parsers for queries. As a result, Drizzle cannot control the response sent from Bun SQL to Drizzle. Once this feature is added to Bun SQL, it should work as expected. - `array` types currently have issues in Bun SQL. > You can check more in [Bun docs](https://bun.sh/docs/api/sql) > > You can check more getting started examples in [Drizzle docs](https://orm.drizzle.team/docs/get-started/bun-sql-new) ## WITH now supports INSERT, UPDATE, DELETE and raw sql template **`with` and `insert`** ```ts const users = pgTable('users', { id: serial('id').primaryKey(), name: text('name').notNull(), }); const sq = db.$with('sq').as( db.insert(users).values({ name: 'John' }).returning(), ); const result = await db.with(sq).select().from(sq); ``` **`with` and `update`** ```ts const users = pgTable('users', { id: serial('id').primaryKey(), name: text('name').notNull(), }); const sq = db.$with('sq').as( db.update(users).set({ age: 25 }).where(eq(users.name, 'John')).returning(), ); const result = await db.with(sq).select().from(sq); ``` **`with` and `delete`** ```ts const users = pgTable('users', { id: serial('id').primaryKey(), name: text('name').notNull(), }); const sq = db.$with('sq').as( db.delete(users).where(eq(users.name, 'John')).returning(), ); const result = await db.with(sq).select().from(sq); ``` **`with` and `sql`** ```ts const users = pgTable('users', { id: serial('id').primaryKey(), name: text('name').notNull(), }); const sq = db.$with('sq', { userId: users.id, data: { name: users.name, }, }).as(sql`select * from ${users} where ${users.name} = 'John'`); const result = await db.with(sq).select().from(sq); ``` ## New tables in `/neon` import In this release you can use `neon_identity` schema and `users_sync` table inside this schema by just importing it from `/neon` ```ts // "drizzle-orm/neon" const neonIdentitySchema = pgSchema('neon_identity'); /** * Table schema of the `users_sync` table used by Neon Identity. * This table automatically synchronizes and stores user data from external authentication providers. * * @schema neon_identity * @table users_sync */ export const usersSync = neonIdentitySchema.table('users_sync', { rawJson: jsonb('raw_json').notNull(), id: text().primaryKey().notNull(), name: text(), email: text(), createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }), deletedAt: timestamp('deleted_at', { withTimezone: true, mode: 'string' }), }); ``` # Utils and small improvements ## `getViewName` util function ```ts import { getViewName } from 'drizzle-orm/sql' export const user = pgTable("user", { id: serial(), name: text(), email: text(), }); export const userView = pgView("user_view").as((qb) => qb.select().from(user)); const viewName = getViewName(userView) ``` # Bug fixed and GitHub issue closed - [[FEATURE]: allow INSERT in CTEs (WITH clauses)](https://github.com/drizzle-team/drizzle-orm/issues/2078) - [[FEATURE]: Support Raw SQL in CTE Query Builder](https://github.com/drizzle-team/drizzle-orm/issues/2168) - [[FEATURE]: include pre-defined database objects related to Neon Identity in drizzle-orm](https://github.com/drizzle-team/drizzle-orm/issues/3959) - [[BUG]: $count is undefined on withReplicas](https://github.com/drizzle-team/drizzle-orm/issues/3951) - [[FEATURE]: get[Materialized]ViewName, ie getTableName but for (materialized) views.](https://github.com/drizzle-team/drizzle-orm/issues/3946) - [[BUG]: $count API error with vercel-postgres](https://github.com/drizzle-team/drizzle-orm/issues/3710) - [[BUG]: Cannot use schema.coerce on refining drizzle-zod types](https://github.com/drizzle-team/drizzle-orm/issues/3842) - [[FEATURE]: Type Coercion in drizzle-zod](https://github.com/drizzle-team/drizzle-orm/issues/776) - [[BUG]: The inferred type of X cannot be named without a reference to ../../../../../node_modules/drizzle-zod/schema.types.internal.mjs](https://github.com/drizzle-team/drizzle-orm/issues/3732) - [[BUG]: drizzle-zod excessively deep and possibly infinite types](https://github.com/drizzle-team/drizzle-orm/issues/3869) ================================================ FILE: changelogs/drizzle-orm/0.39.1.md ================================================ - Fixed SQLite onConflict clauses being overwritten instead of stacked - [#2276](https://github.com/drizzle-team/drizzle-orm/issues/2276) - Added view support to `aliasedTable()` - Fixed sql builder prefixing aliased views and tables with their schema ================================================ FILE: changelogs/drizzle-orm/0.39.2.md ================================================ - To be compatible with latest Neon Auth feature we renamed the pre-defined schema internally, from `neon_identity` to `neon_auth` - thanks @pffigueiredo ================================================ FILE: changelogs/drizzle-orm/0.39.3.md ================================================ - Remove `react` from peerDependencies ================================================ FILE: changelogs/drizzle-orm/0.40.0.md ================================================ # New Features ## Added `Gel` dialect support and `gel-js` client support Drizzle is getting a new `Gel` dialect with its own types and Gel-specific logic. In this first iteration, almost all query-building features have been copied from the `PostgreSQL` dialect since Gel is fully PostgreSQL-compatible. The only change in this iteration is the data types. The Gel dialect has a different set of available data types, and all mappings for these types have been designed to avoid any extra conversions on Drizzle's side. This means you will insert and select exactly the same data as supported by the Gel protocol. Drizzle + Gel integration will work only through `drizzle-kit pull`. Drizzle won't support `generate`, `migrate`, or `push` features in this case. Instead, drizzle-kit is used solely to pull the Drizzle schema from the Gel database, which can then be used in your `drizzle-orm` queries. The Gel + Drizzle workflow: 1. Use the `gel` CLI to manage your schema. 2. Use the `gel` CLI to generate and apply migrations to the database. 3. Use drizzle-kit to pull the Gel database schema into a Drizzle schema. 4. Use drizzle-orm with gel-js to query the Gel database. Here is a small example of how to connect to Gel using Drizzle: ```typescript copy // Make sure to install the 'gel' package import { drizzle } from "drizzle-orm/gel"; import { createClient } from "gel"; const gelClient = createClient(); const db = drizzle({ client: gelClient }); const result = await db.execute('select 1'); ``` On the drizzle-kit side you can now use `dialect: "gel"` ```ts // drizzle.config.ts import { defineConfig } from 'drizzle-kit'; export default defineConfig({ dialect: 'gel', }); ``` For a complete Get Started tutorial you can use our new guides: - [Get Started with Drizzle and Gel in a new project](https://orm.drizzle.team/docs/get-started/gel-new) - [Get Started with Drizzle and Gel in a existing project](https://orm.drizzle.team/docs/get-started/gel-existing) ================================================ FILE: changelogs/drizzle-orm/0.40.1.md ================================================ #### Updates to `neon-http` for `@neondatabase/serverless@1.0.0` - thanks @jawj Starting from this version, drizzle-orm will be compatible with both `@neondatabase/serverless` <1.0 and >1.0 ================================================ FILE: changelogs/drizzle-orm/0.41.0.md ================================================ - `bigint`, `number` modes for `SQLite`, `MySQL`, `PostgreSQL`, `SingleStore` `decimal` & `numeric` column types - Changed behavior of `sql-js` query preparation to query prebuild instead of db-side prepare due to need to manually free prepared queries, removed `.free()` method - Fixed `MySQL`, `SingleStore` `varchar` allowing not specifying `length` in config - Fixed `MySQL`, `SingleStore` `binary`, `varbinary` data\\type mismatches - Fixed `numeric`\\`decimal` data\\type mismatches: [#1290](https://github.com/drizzle-team/drizzle-orm/issues/1290), [#1453](https://github.com/drizzle-team/drizzle-orm/issues/1453) - Fixed `drizzle-studio` + `AWS Data Api` connection issue: [#3224](https://github.com/drizzle-team/drizzle-orm/issues/3224) - Fixed `isConfig` utility function checking types of wrong fields - Enabled `supportBigNumbers` in auto-created `mysql2` driver instances - Fixed custom schema tables querying in RQBv1: [#4060](https://github.com/drizzle-team/drizzle-orm/issues/4060) - Removed in-driver mapping for postgres types `1231` (`numeric[]`), `1115` (`timestamp[]`), `1185` (`timestamp_with_timezone[]`), `1187` (`interval[]`), `1182` (`date[]`), preventing precision loss and data\\type mismatches - Fixed `SQLite` `buffer`-mode `blob` sometimes returning `number[]` ================================================ FILE: changelogs/drizzle-orm/0.42.0.md ================================================ ## Features ### Duplicate imports removal When importing from `drizzle-orm` using custom loaders, you may encounter issues such as: `SyntaxError: The requested module 'drizzle-orm' does not provide an export named 'eq'` This issue arose because there were duplicated exports in `drizzle-orm`. To address this, we added a set of tests that checks every file in `drizzle-orm` to ensure all exports are valid. These tests will fail if any new duplicated exports appear. In this release, we’ve removed all duplicated exports, so you should no longer encounter this issue. ### `pgEnum` and `mysqlEnum` now can accept both strings and TS enums If you provide a TypeScript enum, all your types will be inferred as that enum - so you can insert and retrieve enum values directly. If you provide a string union, it will work as before. ```ts enum Test { a = 'a', b = 'b', c = 'c', } const tableWithTsEnums = mysqlTable('enums_test_case', { id: serial().primaryKey(), enum1: mysqlEnum(Test).notNull(), enum2: mysqlEnum(Test).default(Test.a), }); await db.insert(tableWithTsEnums).values([ { id: 1, enum1: Test.a, enum2: Test.b, enum3: Test.c }, { id: 2, enum1: Test.a, enum3: Test.c }, { id: 3, enum1: Test.a }, ]); const res = await db.select().from(tableWithTsEnums); expect(res).toEqual([ { id: 1, enum1: 'a', enum2: 'b', enum3: 'c' }, { id: 2, enum1: 'a', enum2: 'a', enum3: 'c' }, { id: 3, enum1: 'a', enum2: 'a', enum3: 'b' }, ]); ``` ## Improvements - Make `inArray` accept `ReadonlyArray` as a value - thanks @Zamiell - Pass row type parameter to `@planetscale/database`'s execute - thanks @ayrton - New `InferEnum` type - thanks @totigm ## Issues closed - [Add first-class support for TS native enums](https://github.com/drizzle-team/drizzle-orm/issues/332) - [[FEATURE]: support const enums](https://github.com/drizzle-team/drizzle-orm/issues/2798) - [[BUG]: SyntaxError: The requested module 'drizzle-orm' does not provide an export named 'lte'](https://github.com/drizzle-team/drizzle-orm/issues/4079) ================================================ FILE: changelogs/drizzle-orm/0.43.0.md ================================================ ## Features - Added `cross join` \([#1414](https://github.com/drizzle-team/drizzle-orm/issues/1414)\) - Added lateral `left`, `inner`, `cross` joins to `PostgreSQL`, `MySQL`, `Gel`, `SingleStore` - Added drizzle connection attributes to `SingleStore`'s driver instances ## Fixes - Removed unsupported by dialect `full join` from `MySQL` select api - Forced `Gel` columns to always have explicit schema & table prefixes due to potential errors caused by lack of such prefix in subquery's selection when there's already a column bearing same name in context - Added missing export for `PgTextBuilderInitial` type - Removed outdated `IfNotImported` type check from `SingleStore` driver initializer - Fixed incorrect type inferrence for insert and update models with non-strict `tsconfig`s \([#2654](https://github.com/drizzle-team/drizzle-orm/issues/2654)\) - Fixed invalid spelling of `nowait` flag \([#3554](https://github.com/drizzle-team/drizzle-orm/issues/3554)\) - [Add join lateral support](https://github.com/drizzle-team/drizzle-orm/issues/420) - [Remove .fullJoin() from MySQL API](https://github.com/drizzle-team/drizzle-orm/issues/1125) ================================================ FILE: changelogs/drizzle-orm/0.43.1.md ================================================ ## Fixes - [Fixed incorrect types of schema enums in PostgreSQL](https://github.com/drizzle-team/drizzle-orm/issues/4421) ================================================ FILE: changelogs/drizzle-orm/0.44.0.md ================================================ ## Error handling Starting from this version, we’ve introduced a new `DrizzleQueryError` that wraps all errors from database drivers and provides a set of useful information: 1. A proper stack trace to identify which exact `Drizzle` query failed 2. The generated SQL string and its parameters 3. The original stack trace from the driver that caused the DrizzleQueryError ## Drizzle `cache` module Drizzle sends every query straight to your database by default. There are no hidden actions, no automatic caching or invalidation - you’ll always see exactly what runs. If you want caching, you must opt in. By default, Drizzle uses a explicit caching strategy (i.e. `global: false`), so nothing is ever cached unless you ask. This prevents surprises or hidden performance traps in your application. Alternatively, you can flip on all caching (global: true) so that every select will look in cache first. Out first native integration was built together with Upstash team and let you natively use `upstash` as a cache for your drizzle queries ```ts import { upstashCache } from "drizzle-orm/cache/upstash"; import { drizzle } from "drizzle-orm/..."; const db = drizzle(process.env.DB_URL!, { cache: upstashCache({ // 👇 Redis credentials (optional — can also be pulled from env vars) url: '', token: '', // 👇 Enable caching for all queries by default (optional) global: true, // 👇 Default cache behavior (optional) config: { ex: 60 } }) }); ``` You can also implement your own cache, as Drizzle exposes all the necessary APIs, such as get, put, mutate, etc. You can find full implementation details on the [website](https://orm.drizzle.team/docs/cache#custom-cache) ```ts import Keyv from "keyv"; export class TestGlobalCache extends Cache { private globalTtl: number = 1000; // This object will be used to store which query keys were used // for a specific table, so we can later use it for invalidation. private usedTablesPerKey: Record = {}; constructor(private kv: Keyv = new Keyv()) { super(); } // For the strategy, we have two options: // - 'explicit': The cache is used only when .$withCache() is added to a query. // - 'all': All queries are cached globally. // The default behavior is 'explicit'. override strategy(): "explicit" | "all" { return "all"; } // This function accepts query and parameters that cached into key param, // allowing you to retrieve response values for this query from the cache. override async get(key: string): Promise { ... } // This function accepts several options to define how cached data will be stored: // - 'key': A hashed query and parameters. // - 'response': An array of values returned by Drizzle from the database. // - 'tables': An array of tables involved in the select queries. This information is needed for cache invalidation. // // For example, if a query uses the "users" and "posts" tables, you can store this information. Later, when the app executes // any mutation statements on these tables, you can remove the corresponding key from the cache. // If you're okay with eventual consistency for your queries, you can skip this option. override async put( key: string, response: any, tables: string[], config?: CacheConfig, ): Promise { ... } // This function is called when insert, update, or delete statements are executed. // You can either skip this step or invalidate queries that used the affected tables. // // The function receives an object with two keys: // - 'tags': Used for queries labeled with a specific tag, allowing you to invalidate by that tag. // - 'tables': The actual tables affected by the insert, update, or delete statements, // helping you track which tables have changed since the last cache update. override async onMutate(params: { tags: string | string[]; tables: string | string[] | Table | Table[]; }): Promise { ... } } ``` For more usage example you can check our [docs](https://orm.drizzle.team/docs/cache#cache-usage-examples) ================================================ FILE: changelogs/drizzle-orm/0.44.1.md ================================================ - [[BUG]: Drizzle can no longer run on Durable Objects](https://github.com/drizzle-team/drizzle-orm/issues/4586) ================================================ FILE: changelogs/drizzle-orm/0.44.2.md ================================================ - [BUG]: Fixed type issues with joins with certain variations of `tsconfig`: [#4535](https://github.com/drizzle-team/drizzle-orm/issues/4535), [#4457](https://github.com/drizzle-team/drizzle-orm/issues/4457) ================================================ FILE: changelogs/drizzle-orm/0.44.3.md ================================================ - Fixed types of `$client` for clients created by drizzle function ```ts await db.$client.[...] ``` - Added the `updated_at` column to the `neon_auth.users_sync` table definition. ================================================ FILE: changelogs/drizzle-orm/0.44.4.md ================================================ - Fix wrong DrizzleQueryError export. thanks @nathankleyn ================================================ FILE: changelogs/drizzle-orm/0.44.5.md ================================================ - Fixed invalid usage of `.one()` in `durable-sqlite` session - Fixed spread operator related crash in sqlite `blob` columns - Better browser support for sqlite `blob` columns - Improved sqlite `blob` mapping ================================================ FILE: changelogs/drizzle-orm/0.44.6.md ================================================ - feat: add $replicas reference #4874 ================================================ FILE: changelogs/drizzle-orm/0.44.7.md ================================================ - fix durable sqlite transaction return value #3746 - thanks @joaocstro ================================================ FILE: changelogs/drizzle-orm/0.45.0.md ================================================ - Fixed pg-native Pool detection in node-postgres transactions - Allowed subqueries in select fields - Updated typo algorythm => algorithm - Fixed `$onUpdate` not handling `SQL` values (fixes [#2388](https://github.com/drizzle-team/drizzle-orm/issues/2388), tests implemented by [L-Mario564](https://github.com/L-Mario564) in [#2911](https://github.com/drizzle-team/drizzle-orm/pull/2911)) - Fixed `pg` mappers not handling `Date` instances in `bun-sql:postgresql` driver responses for `date`, `timestamp` types (fixes [#4493](https://github.com/drizzle-team/drizzle-orm/issues/4493)) ================================================ FILE: changelogs/drizzle-orm/0.45.1.md ================================================ - Fixed pg-native Pool detection in node-postgres transactions breaking in environments with forbidden `require()` ([#5107](https://github.com/drizzle-team/drizzle-orm/issues/5107)) ================================================ FILE: changelogs/drizzle-orm-mysql/0.14.1.md ================================================ # drizzle-orm-mysql 0.14.1 - Release support for mysql. Currently mysql module is up-to-date with `pg` and `sqlite` ================================================ FILE: changelogs/drizzle-orm-mysql/0.14.2.md ================================================ # drizzle-orm-mysql 0.14.2 - Bumped everything to 0.14.2 ================================================ FILE: changelogs/drizzle-orm-mysql/0.14.3.md ================================================ # drizzle-orm-mysql 0.14.3 - Fill author field in package.json ================================================ FILE: changelogs/drizzle-orm-mysql/0.15.0.md ================================================ # drizzle-orm-mysql 0.15.0 - Bumped everything to 0.15.0 ================================================ FILE: changelogs/drizzle-orm-mysql/0.15.1.md ================================================ # drizzle-orm-mysql 0.15.1 Add support for schemas -> [MySQL schemas](https://dev.mysql.com/doc/refman/8.0/en/create-database.html) > **Warning** > If you will have tables with same names in different schemas then drizzle will respond with `never[]` error in result types and error from database > > In this case you may use [alias syntax](https://github.com/drizzle-team/drizzle-orm/tree/main/drizzle-orm-mysql#join-aliases-and-self-joins) --- Usage example ```typescript // Table in default schema const publicUsersTable = mysqlTable('users', { id: serial('id').primaryKey(), name: text('name').notNull(), verified: boolean('verified').notNull().default(false), jsonb: json('jsonb'), createdAt: timestamp('created_at', { fsp: 2 }).notNull().defaultNow(), }); // Table in custom schema const mySchema = mysqlSchema('mySchema'); const mySchemaUsersTable = mySchema('users', { id: serial('id').primaryKey(), name: text('name').notNull(), verified: boolean('verified').notNull().default(false), jsonb: json('jsonb'), createdAt: timestamp('created_at', { fsp: 2 }).notNull().defaultNow(), }); ``` --- ## Breaking changes - `foreignKey()` function api changes. Previously you need to pass callback function with table columns for FK. Right now no need for callback, just object with data for FK #### Before ```typescript export const usersTable = mysqlTable('userstest', { id: serial('id').primaryKey(), homeCity: text('name').notNull(), createdAt: timestamp('created_at', { fsp: 2 }).notNull().defaultNow(), }, (users) => ({ // foreignKey has a callback as param usersCityFK: foreignKey(() => { columns: [users.homeCity], foreignColumns: [cities.id] }), })); ``` #### Now ```typescript export const usersTable = mysqlTable('userstest', { id: serial('id').primaryKey(), homeCity: text('name').notNull(), createdAt: timestamp('created_at', { fsp: 2 }).notNull().defaultNow(), }, (users) => ({ // foreignKey has a callback as param usersCityFK: foreignKey({ columns: [users.homeCity], foreignColumns: [cities.id] }), })); ``` --- - Change enum initializing strategy for mysql You should use ``` typescript mysqlEnum('popularity', ['unknown', 'known', 'popular']).notNull().default('known') ``` instead of ``` typescript export const popularityEnum = mysqlEnum('popularity', ['unknown', 'known', 'popular']); popularityEnum('column_name'); ``` Usage example in table schema ``` typescript const tableWithEnums = mysqlTable('enums_test_case', { id: serial('id').primaryKey(), enum1: mysqlEnum('enum1', ['a', 'b', 'c']).notNull(), enum2: mysqlEnum('enum2', ['a', 'b', 'c']).default('a'), enum3: mysqlEnum('enum3', ['a', 'b', 'c']).notNull().default('b'), }); ``` ================================================ FILE: changelogs/drizzle-orm-mysql/0.15.2.md ================================================ # drizzle-orm-mysql 0.15.2 Internal release ================================================ FILE: changelogs/drizzle-orm-mysql/0.15.3.md ================================================ # drizzle-orm-mysql 0.15.3 Internal release ================================================ FILE: changelogs/drizzle-orm-mysql/0.16.0.md ================================================ # drizzle-orm-mysql 0.16.0 - Bump all packages to 0.16.0 ================================================ FILE: changelogs/drizzle-orm-mysql/0.16.1.md ================================================ # drizzle-orm-mysql 0.16.1 - Add possibility to define database custom data types Example usage: ```typescript const customText = customType<{ data: string }>({ dataType() { return 'text'; }, }); const usersTable = mysqlTable('users', { name: customText('name').notNull(), }); ``` For more examples please check [docs](https://github.com/drizzle-team/drizzle-orm/blob/main/docs/custom-types.lite.md) ================================================ FILE: changelogs/drizzle-orm-mysql/0.16.2.md ================================================ # drizzle-orm-mysql 0.16.2 - Fix peer dependency error for >=0.16 drizzle packages ================================================ FILE: changelogs/drizzle-orm-pg/0.12.0-beta.40.md ================================================ # drizzle-orm-pg 0.12.0-beta.40 - Added prepared statements and placeholders support. - Refactored `.select().fields()` to allow fields from joined tables and nested objects structure, removed partial selects from joins. - Allowed passing query builders to `db.execute`. - Optimized INSERT query generation for single values by skipping columns without values. - Exposed `table` property from index config. - Removed testing utils. ================================================ FILE: changelogs/drizzle-orm-pg/0.13.0.md ================================================ # drizzle-orm-pg 0.13.0 - Release 🎉 ================================================ FILE: changelogs/drizzle-orm-pg/0.13.1.md ================================================ # drizzle-orm-pg 0.13.1 - Implemented node-pg prepared statements usage via adding `name` argument to `.prepare()` method. ================================================ FILE: changelogs/drizzle-orm-pg/0.13.2.md ================================================ # drizzle-orm-pg 0.13.2 - Fix prepared statements usage. ================================================ FILE: changelogs/drizzle-orm-pg/0.13.3.md ================================================ # drizzle-orm-pg 0.13.3 - Implemented NeonDB serverless driver support. - (internal) Added `session.all()` and `session.values()` methods. ================================================ FILE: changelogs/drizzle-orm-pg/0.13.4.md ================================================ # drizzle-orm-pg 0.13.4 - Fixed types for IndexBuilder. ================================================ FILE: changelogs/drizzle-orm-pg/0.14.0.md ================================================ # drizzle-orm-pg 0.14.0 - Separated migrations functionality to a separate import: ```typescript import { migrate } from 'drizzle-orm-pg/node/migrate'; ``` - Replaced `await new PgConnector(client).connect()` with `drizzle(client)`. - `import { PgConnector } from 'drizzle-orm-pg` -> `import { drizzle } from 'drizzle-orm-pg/node`. ================================================ FILE: changelogs/drizzle-orm-pg/0.14.1.md ================================================ # drizzle-orm-pg 0.14.1 - Bumped everything to 0.14.1. ================================================ FILE: changelogs/drizzle-orm-pg/0.14.2.md ================================================ # drizzle-orm-pg 0.14.2 - Bumped everything to 0.14.2 ================================================ FILE: changelogs/drizzle-orm-pg/0.14.3.md ================================================ # drizzle-orm-pg 0.14.3 - Fixed `.onConflict` statement query builder. In previous versions target column was mapped together with table name - Added documentation examples for `onConflict` - Added documentation examples for returning statements for insert/update/delete - Add more tests for `onConflict` query builder ================================================ FILE: changelogs/drizzle-orm-pg/0.14.4.md ================================================ # drizzle-orm-pg 0.14.4 - Fill author field in package.json ================================================ FILE: changelogs/drizzle-orm-pg/0.15.0.md ================================================ # drizzle-orm-pg 0.15.0 - Set `notNull` to `true` in runtime, when `.primaryKey()` function was used in `ColumnBuilder` - Set `no action` for `OnDelete` and `OnUpdate` in runtime by default - Add internal version for ORM api - Index name now becomes optional. You can write either `index('usersNameIdx')` or `index()`. In last case, drizzle will generate index name automatically based on table and column index was created on ## Breaking changes `foreignKey()` function api changes. Previosuly you need to pass callback function with table columns for FK. Right now no need for callback, just object with data for FK #### Before ```typescript export const usersTable = pgTable( 'users_table', { id: serial('id').primaryKey(), uuid: uuid('uuid').defaultRandom().notNull(), homeCity: integer('home_city').notNull() }, (users) => ({ // foreignKey had a callback as param usersCityFK: foreignKey(() => ({ columns: [users.homeCity], foreignColumns: [cities.id] })), }), ); ``` #### Now ```typescript export const usersTable = pgTable( 'users_table', { id: serial('id').primaryKey(), uuid: uuid('uuid').defaultRandom().notNull(), homeCity: integer('home_city').notNull() }, (users) => ({ // foreignKey doesn't have a callback as param usersCityFK: foreignKey({ columns: [users.homeCity], foreignColumns: [cities.id] }), }), ); ``` ================================================ FILE: changelogs/drizzle-orm-pg/0.15.1.md ================================================ # drizzle-orm-pg 0.15.1 Add support for schemas -> [PostgreSQL schemas](https://www.postgresql.org/docs/current/ddl-schemas.html) --- Drizzle won't append any schema before table definition by default. So if your tables are in `public` schema drizzle generate -> `select * from "users"` But if you will specify any custom schema you want, then drizzle will generate -> `select * from "custom_schema"."users"` > **Warning** > If you will have tables with same names in different schemas then drizzle will respond with `never[]` error in result types and error from database > > In this case you may use [alias syntax](https://github.com/drizzle-team/drizzle-orm/tree/main/drizzle-orm-pg#join-aliases-and-self-joins) --- Usage example ```typescript // Table in default schema const publicUsersTable = pgTable('users', { id: serial('id').primaryKey(), name: text('name').notNull(), verified: boolean('verified').notNull().default(false), jsonb: jsonb('jsonb'), createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(), }); // Table in custom schema const mySchema = pgSchema('mySchema'); const usersTable = mySchema('users', { id: serial('id').primaryKey(), name: text('name').notNull(), verified: boolean('verified').notNull().default(false), jsonb: jsonb('jsonb'), createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(), }); ``` ================================================ FILE: changelogs/drizzle-orm-pg/0.15.2.md ================================================ # drizzle-orm-pg 0.15.2 Internal release ================================================ FILE: changelogs/drizzle-orm-pg/0.15.3.md ================================================ # drizzle-orm-pg 0.15.3 Internal release ================================================ FILE: changelogs/drizzle-orm-pg/0.16.0.md ================================================ # drizzle-orm-pg 0.16.0 - Implemented [postgres.js](https://github.com/porsager/postgres) driver support ([docs](/drizzle-orm-pg/src/postgres-js/README.md)) ================================================ FILE: changelogs/drizzle-orm-pg/0.16.1.md ================================================ # drizzle-orm-pg 0.16.1 - Fix documentation links ================================================ FILE: changelogs/drizzle-orm-pg/0.16.2.md ================================================ - Add possibility to define database custom data types Example usage: ```typescript const customText = customType<{ data: string }>({ dataType() { return 'text'; }, }); const usersTable = pgTable('users', { name: customText('name').notNull(), }); ``` For more examples please check [docs](https://github.com/drizzle-team/drizzle-orm/blob/main/docs/custom-types.lite.md) ================================================ FILE: changelogs/drizzle-orm-pg/0.16.3.md ================================================ # drizzle-orm-pg 0.16.3 - Fix peer dependency error for >=0.16 drizzle packages ================================================ FILE: changelogs/drizzle-orm-sqlite/0.12.0-beta.17.md ================================================ # drizzle-orm-sqlite 0.12.0-beta.17 - Refactored `.select().fields()` to allow fields from joined tables and nested objects structure, removed partial selects from joins. - Replaced `.execute()` in query builders and prepared statements with `.run()`, `.all()`, `.get()`, `.values()`. ================================================ FILE: changelogs/drizzle-orm-sqlite/0.12.0-beta.18.md ================================================ # drizzle-orm-sqlite 0.12.0-beta.18 - Updated `better-sqlite3` and `@types/better-sqlite3` peer dependency from `<8` to `<9`. ================================================ FILE: changelogs/drizzle-orm-sqlite/0.12.0-beta.19.md ================================================ # drizzle-orm-sqlite 0.12.0-beta.19 - Fix bug with running migrations. `Error: SqliteError: near "SCHEMA": syntax error` was fixed ================================================ FILE: changelogs/drizzle-orm-sqlite/0.12.0-beta.20.md ================================================ # drizzle-orm-sqlite 0.12.0-beta.20 - Fix bug with running migrations for async driver. `Error: SqliteError: near "SCHEMA": syntax error` was fixed - Fix `Statement does not return any data - use run()` error, when no fields were provided to prepared statement ================================================ FILE: changelogs/drizzle-orm-sqlite/0.12.0-beta.21.md ================================================ # drizzle-orm-sqlite 0.12.0-beta.21 - Fixed `db.all` logic for all drivers. - Allowed passing query builders to raw query execution methods. - Optimized INSERT query generation for single values by skipping columns without values. - Exposed `table` property from index config. ================================================ FILE: changelogs/drizzle-orm-sqlite/0.13.0.md ================================================ # drizzle-orm-sqlite 0.13.0 - Release 🎉 ================================================ FILE: changelogs/drizzle-orm-sqlite/0.14.1.md ================================================ # drizzle-orm-sqlite 0.14.1 - Separated migrations functionality to a separate import: ```typescript import { migrate } from 'drizzle-orm-sqlite/better-sqlite3/migrate'; ``` - Replaced `await new SQLiteConnector(client).connect()` with `drizzle(client)`. - `import { SQLiteConnector } from 'drizzle-orm-sqlite` -> `import { drizzle } from 'drizzle-orm-pg/better-sqlite3`. ================================================ FILE: changelogs/drizzle-orm-sqlite/0.14.2.md ================================================ # drizzle-orm-sqlite 0.14.2 - Bumped everything to 0.14.2 ================================================ FILE: changelogs/drizzle-orm-sqlite/0.14.3.md ================================================ # drizzle-orm-sqlite 0.14.3 - `RangeError: The supplied SQL string contains more than one statement` error on migrations was fixed Created `.exec()` method for session, that will run query without prepared statments - Fix `defaultNow()` method query generation by adding missin `"()"`. Previously default value was generated as ```sql cast((julianday('now') - 2440587.5)*86400000 as integer) ``` Currently default value looks like ```sql (cast((julianday('now') - 2440587.5)*86400000 as integer)) ``` - Create test cases for both issues ================================================ FILE: changelogs/drizzle-orm-sqlite/0.14.4.md ================================================ # drizzle-orm-sqlite 0.14.4 - Fix adding autoincrement to `drizzle-kit` migrations ================================================ FILE: changelogs/drizzle-orm-sqlite/0.14.5.md ================================================ # drizzle-orm-sqlite 0.14.5 - Remove upper bound restriction from `@cloudflare/workers-types` peer dependency - Fill author field in package.json ================================================ FILE: changelogs/drizzle-orm-sqlite/0.15.0.md ================================================ # drizzle-orm-sqlite 0.15.0 - Add composite PK's on table schema definition #### Usage example ```typescript const pkExample = sqliteTable('pk_example', { id: integer('id'), name: text('name').notNull(), email: text('email').notNull(), }, (table) => ({ compositePk: primaryKey(table.id, table.name) })); ``` ================================================ FILE: changelogs/drizzle-orm-sqlite/0.15.2.md ================================================ # drizzle-orm-sqlite 0.15.2 Internal release ================================================ FILE: changelogs/drizzle-orm-sqlite/0.15.3.md ================================================ # drizzle-orm-sqlite 0.15.3 Internal release ================================================ FILE: changelogs/drizzle-orm-sqlite/0.15.4.md ================================================ # drizzle-orm-sqlite 0.15.4 - Implemented [sql.js](https://github.com/sql-js/sql.js/) driver support (allows you to use SQLite in the browser) ================================================ FILE: changelogs/drizzle-orm-sqlite/0.16.0.md ================================================ # drizzle-orm-sqlite 0.16.0 - Bump all packages to 0.16.0 ================================================ FILE: changelogs/drizzle-orm-sqlite/0.16.1.md ================================================ # drizzle-orm-sqlite 0.16.1 - Fix peer dependency error for >=0.16 drizzle packages ================================================ FILE: changelogs/drizzle-seed/0.1.1.md ================================================ # Initial Release > [!NOTE] > `drizzle-seed` can only be used with `drizzle-orm@0.36.4` or higher. Versions lower than this may work at runtime but could have type issues and identity column issues, as this patch was introduced in `drizzle-orm@0.36.4` ## Full Reference The full API reference and package overview can be found in our [official documentation](https://orm.drizzle.team/docs/seed-overview) ## Basic Usage In this example we will create 10 users with random names and ids ```ts {12} import { pgTable, integer, text } from "drizzle-orm/pg-core"; import { drizzle } from "drizzle-orm/node-postgres"; import { seed } from "drizzle-seed"; const users = pgTable("users", { id: integer().primaryKey(), name: text().notNull(), }); async function main() { const db = drizzle(process.env.DATABASE_URL!); await seed(db, { users }); } main(); ``` ## Options **`count`** By default, the `seed` function will create 10 entities. However, if you need more for your tests, you can specify this in the seed options object ```ts await seed(db, schema, { count: 1000 }); ``` **`seed`** If you need a seed to generate a different set of values for all subsequent runs, you can define a different number in the `seed` option. Any new number will generate a unique set of values ```ts await seed(db, schema, { seed: 12345 }); ``` The full API reference and package overview can be found in our [official documentation](https://orm.drizzle.team/docs/seed-overview) ================================================ FILE: changelogs/drizzle-seed/0.1.2.md ================================================ - Fixed: [[BUG]: drizzle-seed reset fails without @electric-sql/pglite installed](https://github.com/drizzle-team/drizzle-orm/issues/3603) - Fixed: [[BUG]: TypeScript type error in drizzle-seed with schema passed to drizzle in IDE](https://github.com/drizzle-team/drizzle-orm/issues/3599) ================================================ FILE: changelogs/drizzle-seed/0.1.3.md ================================================ ## Bug fixes - https://github.com/drizzle-team/drizzle-orm/issues/3644 - seeding a table with columns that have .default(sql``) will result in an error ## Features - added support for postgres uuid columns Example ```ts import { pgTable, uuid } from "drizzle-orm/pg-core"; import { drizzle } from "drizzle-orm/node-postgres"; import { seed } from "drizzle-seed"; const users = pgTable("users", { uuid: uuid("uuid"), }); async function main() { const db = drizzle(process.env.DATABASE_URL!); // You can let it seed automatically // await seed(db, { users }); // Alternatively, you can manually specify the generator in refine. await seed(db, { users }, { count: 1000 }).refine((funcs) => ({ users: { columns: { uuid: funcs.uuid(), }, }, })); } main(); ``` ## - added support for postgres array columns Example ```ts import { pgTable, integer, text, varchar } from "drizzle-orm/pg-core"; import { drizzle } from "drizzle-orm/node-postgres"; import { seed } from "drizzle-seed"; const users = pgTable("users", { id: integer().primaryKey(), name: text().notNull(), phone_numbers: varchar({ length: 256 }).array(), }); ``` You can specify the `arraySize` parameter in generator options, like `funcs.phoneNumber({ arraySize: 3 })`, to generate 1D arrays. ```ts async function main() { const db = drizzle(process.env.DATABASE_URL!); await seed(db, { users }, { count: 1000 }).refine((funcs) => ({ users: { columns: { phone_numbers: funcs.phoneNumber({ arraySize: 3 }), }, }, })); } main(); ``` Alternatively, you can let it seed automatically, and it will handle arrays of any dimension. ```ts async function main() { const db = drizzle(process.env.DATABASE_URL!); await seed(db, { users }); } main(); ``` ## - added support for cyclic tables You can now seed tables with cyclic relations. ```ts import type { AnyPgColumn } from "drizzle-orm/pg-core"; import { foreignKey, integer, pgTable, serial, varchar, } from "drizzle-orm/pg-core"; export const modelTable = pgTable( "model", { id: serial().primaryKey(), name: varchar().notNull(), defaultImageId: integer(), }, (t) => [ foreignKey({ columns: [t.defaultImageId], foreignColumns: [modelImageTable.id], }), ] ); export const modelImageTable = pgTable("model_image", { id: serial().primaryKey(), url: varchar().notNull(), caption: varchar(), modelId: integer() .notNull() .references((): AnyPgColumn => modelTable.id), }); async function main() { const db = drizzle(process.env.DATABASE_URL!); await seed(db, { modelTable, modelImageTable }); } main(); ``` ================================================ FILE: changelogs/drizzle-seed/0.2.1.md ================================================ ## API updates We are introducing a new parameter, `version`, to the `seed` function options. This parameter, which controls generator versioning, has been added to make it easier to update deterministic generators in the future. Since values should remain consistent after each regeneration, it is crucial to provide a well-designed API for gradual updates ```ts await seed(db, schema, { version: '2' }); ``` #### Example: > This is not an actual API change; it is just an example of how we will proceed with `drizzle-seed` versioning For example, `lastName` generator was changed, and new version, `V2`, of this generator became available. Later, `firstName` generator was changed, making `V3` version of this generator available. | | `V1` | `V2` | `V3(latest)` | | :--------------: | :--------------: | :-------------: | :--------------: | | **LastNameGen** | `LastNameGenV1` | `LastNameGenV2` | | | **FirstNameGen** | `FirstNameGenV1` | | `FirstNameGenV3` | ##### Use the `firstName` generator of version 3 and the `lastName` generator of version 2 ```ts await seed(db, schema); ``` If you are not ready to use latest generator version right away, you can specify max version to use ##### Use the `firstName` generator of version 1 and the `lastName` generator of version 2 ```ts await seed(db, schema, { version: '2' }); ``` ##### Use the `firstName` generator of version 1 and the `lastName` generator of version 1. ```ts await seed(db, schema, { version: '1' }); ``` Each update with breaking changes for generators will be documented on our docs and in release notes, explaining which version you should use, if you are not ready to upgrade the way generators works ## Breaking changes ### `interval` unique generator was changed and upgraded to v2 ```ts await seed(db, { table }).refine((f) => ({ table: { columns: { // this function usage will output different values with the same `seed` number from previous version column1: f.interval({ isUnique: true }), } } })) ``` **Reason for upgrade** An older version of the generator could produce intervals like `1 minute 60 seconds` and `2 minutes 0 seconds`, treating them as distinct intervals. However, when the `1 minute 60 seconds` interval is inserted into a PostgreSQL database, it is automatically converted to `2 minutes 0 seconds`. As a result, attempting to insert the `2 minutes 0 seconds` interval into a unique column afterwards will cause an error **Usage** ```ts await seed(db, schema); // or explicit await seed(db, schema, { version: '2' }); ``` **Switch to the old version** ```ts await seed(db, schema, { version: '1' }); ``` ### `string` generators were changed and upgraded to v2 ```ts await seed(db, { table }).refine((f) => ({ table: { columns: { // this function will output different values with the same `seed` number from previous version column1: f.string(), } } })) ``` **Reason to upgrade** Ability to generate a unique string based on the length of the text column (e.g., `varchar(20)`) #### PostgreSQL changes Default generators for `text`, `varchar`, `char` will output different values with the same `seed` number from previous version. ```ts // schema.ts import * as p from 'drizzle-orm/pg-core' export const table = p.pgTable('table', { column1: p.text(), column2: p.varchar(), column3: p.char() }); // index.ts ... // this will be affected with new changes await seed(db, { table }); ``` **Switch to the old version** ```ts await seed(db, schema, { version: '' }); ``` #### MySQL changes Default generators for `text`, `char`, `varchar`, `binary`, `varbinary` will output different values with the same `seed` number. ```ts // schema.ts import * as p from 'drizzle-orm/mysql-core' export const table = p.mysqlTable('table', { column1: p.text(), column2: p.char(), column3: p.varchar({ length: 256 }), column4: p.binary(), column5: p.varbinary({ length: 256 }), }); // index.ts ... // this will be affected with new changes await seed(db, {table}) ``` **Switch to the old version** ```ts await seed(db, schema, { version: '1' }); ``` #### SQLite changes Default generators for `text`, `numeric`, `blob`, `blobbuffer` will output different values with the same `seed` number. ```ts // schema.ts import * as p from 'drizzle-orm/sqlite-core' export const table = p.sqliteTable('table', { column1: p.text(), column2: p.numeric(), column3: p.blob({ mode:'buffer' }), column4: p.blob(), }); // index.ts ... // this will be affected with new changes await seed(db, { table }) ``` ## Bug fixes - Seeding a table with a foreign key referencing another table, without including the second table in the schema, will cause the seeding process to get stuck - [[BUG]: seeding postgresql char column doesn't respect length option](https://github.com/drizzle-team/drizzle-orm/issues/3774) ================================================ FILE: changelogs/drizzle-seed/0.3.0.md ================================================ # New features ## Drizzle Relations support The `seed` function can now accept Drizzle Relations objects and treat them as foreign key constraints ```ts // schema.ts import { integer, serial, text, pgTable } from 'drizzle-orm/pg-core'; import { relations } from 'drizzle-orm'; export const users = pgTable('users', { id: serial('id').primaryKey(), name: text('name').notNull(), }); export const usersRelations = relations(users, ({ many }) => ({ posts: many(posts), })); export const posts = pgTable('posts', { id: serial('id').primaryKey(), content: text('content').notNull(), authorId: integer('author_id').notNull(), }); export const postsRelations = relations(posts, ({ one }) => ({ author: one(users, { fields: [posts.authorId], references: [users.id] }), })); ``` ```ts // index.ts import { seed } from "drizzle-seed"; import * as schema from './schema.ts' async function main() { const db = drizzle(process.env.DATABASE_URL!); await seed(db, schema); } main(); ``` ================================================ FILE: changelogs/drizzle-seed/0.3.1.md ================================================ ## Bug fixes - Combining a reference in a table schema (foreign key constraint) with a one-to-many relation for the same two tables defined in the constraint causes the seeder to duplicate these relations and enter an infinite loop. Example: ```ts // schema.ts import { integer, pgTable, text } from "drizzle-orm/pg-core"; import { relations } from "drizzle-orm/relations"; export const users = pgTable("users", { id: integer().primaryKey(), name: text(), email: text(), }); export const posts = pgTable("posts", { id: integer().primaryKey(), content: text(), userId: integer().references(() => users.id), }); export const postsRelation = relations(posts, ({ one }) => ({ user: one(users, { fields: [posts.userId], references: [users.id], }), })); ``` Now, seeding with the schema above will trigger a warning. ``` You are providing a one-to-many relation between the 'users' and 'posts' tables, while the 'posts' table object already has foreign key constraint in the schema referencing 'users' table. In this case, the foreign key constraint will be used. ``` ================================================ FILE: changelogs/drizzle-typebox/0.1.0.md ================================================ # drizzle-typebox 0.1.0 - Initial release ================================================ FILE: changelogs/drizzle-typebox/0.1.1.md ================================================ - 🐛 Fixed imports in ESM projects ================================================ FILE: changelogs/drizzle-typebox/0.2.0.md ================================================ This version fully updates `drizzle-typebox` integration and makes sure it's compatible with newer typebox versions # Breaking Changes > You must also have Drizzle ORM v0.38.0 or greater and Typebox v0.34.8 or greater installed. - When refining a field, if a schema is provided instead of a callback function, it will ignore the field's nullability and optional status. - Some data types have more specific schemas for improved validation # Improvements Thanks to @L-Mario564 for making more updates than we expected to be shipped in this release. We'll copy his message from a PR regarding improvements made in this release: - Output for all packages are now unminified, makes exploring the compiled code easier when published to npm. - Smaller footprint. Previously, we imported the column types at runtime for each dialect, meaning that for example, if you're just using Postgres then you'd likely only have drizzle-orm and drizzle-orm/pg-core in the build output of your app; however, these packages imported all dialects which could lead to mysql-core and sqlite-core being bundled as well even if they're unused in your app. This is now fixed. - Slight performance gain. To determine the column data type we used the is function which performs a few checks to ensure the column data type matches. This was slow, as these checks would pile up every quickly when comparing all data types for many fields in a table/view. The easier and faster alternative is to simply go off of the column's columnType property. # New features - `createSelectSchema` function now also accepts views and enums. ```ts import { pgEnum } from 'drizzle-orm/pg-core'; import { createSelectSchema } from 'drizzle-typebox'; import { Value } from '@sinclair/typebox/value'; const roles = pgEnum('roles', ['admin', 'basic']); const rolesSchema = createSelectSchema(roles); const parsed: 'admin' | 'basic' = Value.Parse(rolesSchema, ...); const usersView = pgView('users_view').as((qb) => qb.select().from(users).where(gt(users.age, 18))); const usersViewSchema = createSelectSchema(usersView); const parsed: { id: number; name: string; age: number } = Value.Parse(usersViewSchema, ...); ``` - New function: `createUpdateSchema`, for use in updating queries. ```ts copy import { pgTable, text, integer } from 'drizzle-orm/pg-core'; import { createUpdateSchema } from 'drizzle-typebox'; import { Value } from '@sinclair/typebox/value'; const users = pgTable('users', { id: integer().generatedAlwaysAsIdentity().primaryKey(), name: text().notNull(), age: integer().notNull() }); const userUpdateSchema = createUpdateSchema(users); const user = { id: 5, name: 'John' }; const parsed: { name?: string | undefined, age?: number | undefined } = Value.Parse(userUpdateSchema, user); // Error: `id` is a generated column, it can't be updated const user = { age: 35 }; const parsed: { name?: string | undefined, age?: number | undefined } = Value.Parse(userUpdateSchema, user); // Will parse successfully await db.update(users).set(parsed).where(eq(users.name, 'Jane')); ``` - New function: `createSchemaFactory`, to provide more advanced options and to avoid bloating the parameters of the other schema functions ```ts copy import { pgTable, text, integer } from 'drizzle-orm/pg-core'; import { createSchemaFactory } from 'drizzle-typebox'; import { t } from 'elysia'; // Extended Typebox instance const users = pgTable('users', { id: integer().generatedAlwaysAsIdentity().primaryKey(), name: text().notNull(), age: integer().notNull() }); const { createInsertSchema } = createSchemaFactory({ typeboxInstance: t }); const userInsertSchema = createInsertSchema(users, { // We can now use the extended instance name: (schema) => t.Number({ ...schema }, { error: '`name` must be a string' }) }); ``` - Full support for PG arrays ```ts pg.dataType().array(...); // Schema Type.Array(baseDataTypeSchema, { minItems: size, maxItems: size }); ``` ================================================ FILE: changelogs/drizzle-typebox/0.2.1.md ================================================ # Added support for SingleStore dialect ```ts import { singlestoreTable, text, int } from 'drizzle-orm/singlestore-core'; import { createSelectSchema } from 'drizzle-typebox'; import { Value } from '@sinclair/typebox/value'; const users = singlestoreTable('users', { id: int().primaryKey(), name: text().notNull(), age: int().notNull() }); const userSelectSchema = createSelectSchema(users); const rows = await db.select({ id: users.id, name: users.name }).from(users).limit(1); const parsed: { id: number; name: string; age: number } = Value.Parse(userSelectSchema, rows[0]); // Error: `age` is not returned in the above query const rows = await db.select().from(users).limit(1); const parsed: { id: number; name: string; age: number } = Value.Parse(userSelectSchema, rows[0]); // Will parse successfully ``` # Bug fixes - [[BUG]: drizzle-typebox infers integer() as TString](https://github.com/drizzle-team/drizzle-orm/issues/3756) ================================================ FILE: changelogs/drizzle-typebox/0.3.0.md ================================================ # Bug fixed and GitHub issue closed - [[BUG]: The inferred type of X cannot be named without a reference to ../../../../../node_modules/drizzle-zod/schema.types.internal.mjs](https://github.com/drizzle-team/drizzle-orm/issues/3732) - [[BUG]: drizzle-zod excessively deep and possibly infinite types](https://github.com/drizzle-team/drizzle-orm/issues/3869) ================================================ FILE: changelogs/drizzle-typebox/0.3.1.md ================================================ - Exports all types, including internal ones to avoid type issues. - Properly handle infinitely recursive types in custom JSON column types. thanks @L-Mario564 ================================================ FILE: changelogs/drizzle-typebox/0.3.2.md ================================================ - Functions `getColumns`, `handleColumns` and `handleEnum` were exported from `drizzle-typebox` ================================================ FILE: changelogs/drizzle-typebox/0.3.3.md ================================================ - TS language server performance improvements ================================================ FILE: changelogs/drizzle-valibot/0.1.0.md ================================================ # drizzle-valibot 0.1.0 - Initial release ================================================ FILE: changelogs/drizzle-valibot/0.1.1.md ================================================ - 🐛 Fixed imports in ESM projects ================================================ FILE: changelogs/drizzle-valibot/0.2.0.md ================================================ Use updated types introduced in valibot `0.20.0`: - `enumType` -> `picklist` - `nullType` -> `null_` Minimum supported valibot version is now `0.20.0`. ================================================ FILE: changelogs/drizzle-valibot/0.3.0.md ================================================ This version fully updates `drizzle-valibot` integration and makes sure it's compatible with newer valibot versions # Breaking Changes > You must also have Drizzle ORM v0.38.0 or greater and Valibot v1.0.0-beta.7 or greater installed. - When refining a field, if a schema is provided instead of a callback function, it will ignore the field's nullability and optional status. - Some data types have more specific schemas for improved validation # Improvements Thanks to @L-Mario564 for making more updates than we expected to be shipped in this release. We'll copy his message from a PR regarding improvements made in this release: - Output for all packages are now unminified, makes exploring the compiled code easier when published to npm. - Smaller footprint. Previously, we imported the column types at runtime for each dialect, meaning that for example, if you're just using Postgres then you'd likely only have drizzle-orm and drizzle-orm/pg-core in the build output of your app; however, these packages imported all dialects which could lead to mysql-core and sqlite-core being bundled as well even if they're unused in your app. This is now fixed. - Slight performance gain. To determine the column data type we used the is function which performs a few checks to ensure the column data type matches. This was slow, as these checks would pile up every quickly when comparing all data types for many fields in a table/view. The easier and faster alternative is to simply go off of the column's columnType property. - Some changes had to be made at the type level in the ORM package for better compatibility with drizzle-valibot. # New features - `createSelectSchema` function now also accepts views and enums. ```ts copy import { pgEnum } from 'drizzle-orm/pg-core'; import { createSelectSchema } from 'drizzle-valibot'; import { parse } from 'valibot'; const roles = pgEnum('roles', ['admin', 'basic']); const rolesSchema = createSelectSchema(roles); const parsed: 'admin' | 'basic' = parse(rolesSchema, ...); const usersView = pgView('users_view').as((qb) => qb.select().from(users).where(gt(users.age, 18))); const usersViewSchema = createSelectSchema(usersView); const parsed: { id: number; name: string; age: number } = parse(usersViewSchema, ...); ``` - New function: `createUpdateSchema`, for use in updating queries. ```ts copy import { pgTable, text, integer } from 'drizzle-orm/pg-core'; import { createUpdateSchema } from 'drizzle-valibot'; import { parse } from 'valibot'; const users = pgTable('users', { id: integer().generatedAlwaysAsIdentity().primaryKey(), name: text().notNull(), age: integer().notNull() }); const userUpdateSchema = createUpdateSchema(users); const user = { id: 5, name: 'John' }; const parsed: { name?: string | undefined, age?: number | undefined } = parse(userUpdateSchema, user); // Error: `id` is a generated column, it can't be updated const user = { age: 35 }; const parsed: { name?: string | undefined, age?: number | undefined } = parse(userUpdateSchema, user); // Will parse successfully await db.update(users).set(parsed).where(eq(users.name, 'Jane')); ``` - Full support for PG arrays ```ts pg.dataType().array(...); // Schema z.array(baseDataTypeSchema).length(size); ``` ================================================ FILE: changelogs/drizzle-valibot/0.3.1.md ================================================ # Added support for SingleStore dialect ```ts import { singlestoreTable, text, int } from 'drizzle-orm/singlestore-core'; import { createSelectSchema } from 'drizzle-valibot'; import { parse } from 'valibot'; const users = singlestoreTable('users', { id: int().primaryKey(), name: text().notNull(), age: int().notNull() }); const userSelectSchema = createSelectSchema(users); const rows = await db.select({ id: users.id, name: users.name }).from(users).limit(1); const parsed: { id: number; name: string; age: number } = parse(userSelectSchema, rows[0]); // Error: `age` is not returned in the above query const rows = await db.select().from(users).limit(1); const parsed: { id: number; name: string; age: number } = parse(userSelectSchema, rows[0]); // Will parse successfully ``` # Bug fixes - [[BUG]: drizzle-valibot throws Type instantiation is excessively deep and possibly infinite. for refinements](https://github.com/drizzle-team/drizzle-orm/issues/3751) ================================================ FILE: changelogs/drizzle-valibot/0.4.0.md ================================================ # Bug fixed and GitHub issue closed - [[BUG]: The inferred type of X cannot be named without a reference to ../../../../../node_modules/drizzle-zod/schema.types.internal.mjs](https://github.com/drizzle-team/drizzle-orm/issues/3732) - [[BUG]: drizzle-zod excessively deep and possibly infinite types](https://github.com/drizzle-team/drizzle-orm/issues/3869) ================================================ FILE: changelogs/drizzle-valibot/0.4.1.md ================================================ - Exports all types, including internal ones to avoid type issues. - Properly handle infinitely recursive types in custom JSON column types. thanks @L-Mario564 ================================================ FILE: changelogs/drizzle-valibot/0.4.2.md ================================================ - TS language server performance improvements ================================================ FILE: changelogs/drizzle-zod/0.1.0.md ================================================ # drizzle-zod 0.1.0 - Initial release - Added insert schema generation for Postgres ================================================ FILE: changelogs/drizzle-zod/0.1.1.md ================================================ # drizzle-zod 0.1.1 Internal release ================================================ FILE: changelogs/drizzle-zod/0.1.2.md ================================================ # drizzle-zod 0.1.2 - Fix peer dependency error for >=0.16 drizzle packages ================================================ FILE: changelogs/drizzle-zod/0.1.3.md ================================================ # drizzle-zod 0.1.3 - Fix import for 0.17 drizzle-orm ================================================ FILE: changelogs/drizzle-zod/0.1.4.md ================================================ - 🐛 Updated logic for drizzle-orm 0.23.2 ================================================ FILE: changelogs/drizzle-zod/0.2.0.md ================================================ - 🎉 Added select schema support - 🎉 Added SQLite support - ❗ Changed imports from `drizzle-zod/pg` to `drizzle-zod` for all dialects ================================================ FILE: changelogs/drizzle-zod/0.2.1.md ================================================ - 🐛 Fix insert schemas generation ================================================ FILE: changelogs/drizzle-zod/0.3.0.md ================================================ - 🎉 Added MySQL support ================================================ FILE: changelogs/drizzle-zod/0.3.1.md ================================================ - Fix drizzle-zod default refine type in [479](https://github.com/drizzle-team/drizzle-orm/pull/479) - thanks @hugo-clemente ❤️ ================================================ FILE: changelogs/drizzle-zod/0.3.2.md ================================================ - 🐛 Fixed a bug in schema types inference ================================================ FILE: changelogs/drizzle-zod/0.4.0.md ================================================ # ESM support - 🎉 Added ESM support! You can now use `drizzle-zod` in both ESM and CJS environments. - 🎉 Added code minification and source maps. ================================================ FILE: changelogs/drizzle-zod/0.4.1.md ================================================ - 🐛 Add "exports" field to package.json ================================================ FILE: changelogs/drizzle-zod/0.4.2.md ================================================ - 🐛 Fixed autoincrement columns not being optional in drizzle-zod (#652) - 🐛 Added length check for text fields in drizzle-zod (#658) ================================================ FILE: changelogs/drizzle-zod/0.4.3.md ================================================ - 🎉 Added PgDateString to drizzle-zod (#665) ================================================ FILE: changelogs/drizzle-zod/0.4.4.md ================================================ - Fixed drizzle-zod not enforcing string lengths (#691) by @TiltedToast ================================================ FILE: changelogs/drizzle-zod/0.5.0.md ================================================ - Added compatibility with Drizzle 0.28.0 ================================================ FILE: changelogs/drizzle-zod/0.5.1.md ================================================ - 🐛 Fixed imports in ESM projects ================================================ FILE: changelogs/drizzle-zod/0.6.0.md ================================================ This version fully updates `drizzle-zod` integration and makes sure it's compatible with newer zod versions # Breaking Changes > You must also have Drizzle ORM v0.38.0 or greater and Zod v3.0.0 or greater installed. - When refining a field, if a schema is provided instead of a callback function, it will ignore the field's nullability and optional status. - Some data types have more specific schemas for improved validation # Improvements Thanks to @L-Mario564 for making more updates than we expected to be shipped in this release. We'll copy his message from a PR regarding improvements made in this release: - Output for all packages are now unminified, makes exploring the compiled code easier when published to npm. - Smaller footprint. Previously, we imported the column types at runtime for each dialect, meaning that for example, if you're just using Postgres then you'd likely only have drizzle-orm and drizzle-orm/pg-core in the build output of your app; however, these packages imported all dialects which could lead to mysql-core and sqlite-core being bundled as well even if they're unused in your app. This is now fixed. - Slight performance gain. To determine the column data type we used the is function which performs a few checks to ensure the column data type matches. This was slow, as these checks would pile up every quickly when comparing all data types for many fields in a table/view. The easier and faster alternative is to simply go off of the column's columnType property. # New features - `createSelectSchema` function now also accepts views and enums. ```ts copy import { pgEnum } from 'drizzle-orm/pg-core'; import { createSelectSchema } from 'drizzle-zod'; const roles = pgEnum('roles', ['admin', 'basic']); const rolesSchema = createSelectSchema(roles); const parsed: 'admin' | 'basic' = rolesSchema.parse(...); const usersView = pgView('users_view').as((qb) => qb.select().from(users).where(gt(users.age, 18))); const usersViewSchema = createSelectSchema(usersView); const parsed: { id: number; name: string; age: number } = usersViewSchema.parse(...); ``` - New function: `createUpdateSchema`, for use in updating queries. ```ts copy import { pgTable, text, integer } from 'drizzle-orm/pg-core'; import { createUpdateSchema } from 'drizzle-zod'; const users = pgTable('users', { id: integer().generatedAlwaysAsIdentity().primaryKey(), name: text().notNull(), age: integer().notNull() }); const userUpdateSchema = createUpdateSchema(users); const user = { id: 5, name: 'John' }; const parsed: { name?: string | undefined, age?: number | undefined } = userUpdateSchema.parse(user); // Error: `id` is a generated column, it can't be updated const user = { age: 35 }; const parsed: { name?: string | undefined, age?: number | undefined } = userUpdateSchema.parse(user); // Will parse successfully await db.update(users).set(parsed).where(eq(users.name, 'Jane')); ``` - New function: `createSchemaFactory`, to provide more advanced options and to avoid bloating the parameters of the other schema functions ```ts copy import { pgTable, text, integer } from 'drizzle-orm/pg-core'; import { createSchemaFactory } from 'drizzle-zod'; import { z } from '@hono/zod-openapi'; // Extended Zod instance const users = pgTable('users', { id: integer().generatedAlwaysAsIdentity().primaryKey(), name: text().notNull(), age: integer().notNull() }); const { createInsertSchema } = createSchemaFactory({ zodInstance: z }); const userInsertSchema = createInsertSchema(users, { // We can now use the extended instance name: (schema) => schema.openapi({ example: 'John' }) }); ``` - Full support for PG arrays ```ts pg.dataType().array(...); // Schema z.array(baseDataTypeSchema).length(size); ``` ================================================ FILE: changelogs/drizzle-zod/0.6.1.md ================================================ # New Features ## Added support for SingleStore dialect ```ts import { singlestoreTable, text, int } from 'drizzle-orm/singlestore-core'; import { createSelectSchema } from 'drizzle-zod'; const users = singlestoreTable('users', { id: int().primaryKey(), name: text().notNull(), age: int().notNull() }); const userSelectSchema = createSelectSchema(users); const rows = await db.select({ id: users.id, name: users.name }).from(users).limit(1); const parsed: { id: number; name: string; age: number } = userSelectSchema.parse(rows[0]); // Error: `age` is not returned in the above query const rows = await db.select().from(users).limit(1); const parsed: { id: number; name: string; age: number } = userSelectSchema.parse(rows[0]); // Will parse successfully ``` # Bug fixes - [[BUG]: refining schema using createSelectSchema is not working with drizzle-kit 0.6.0](https://github.com/drizzle-team/drizzle-orm/issues/3735) - [[BUG]: drizzle-zod inferring types incorrectly](https://github.com/drizzle-team/drizzle-orm/issues/3734) ================================================ FILE: changelogs/drizzle-zod/0.7.0.md ================================================ # Improvements ## Added type coercion support **Use case: Type coercion** ```ts copy import { pgTable, timestamp } from 'drizzle-orm/pg-core'; import { createSchemaFactory } from 'drizzle-zod'; import { z } from 'zod'; const users = pgTable('users', { ..., createdAt: timestamp().notNull() }); const { createInsertSchema } = createSchemaFactory({ // This configuration will only coerce dates. Set `coerce` to `true` to coerce all data types or specify others coerce: { date: true } }); const userInsertSchema = createInsertSchema(users); // The above is the same as this: const userInsertSchema = z.object({ ..., createdAt: z.coerce.date() }); ``` # Bug fixed and GitHub issue closed - [[BUG]: Cannot use schema.coerce on refining drizzle-zod types](https://github.com/drizzle-team/drizzle-orm/issues/3842) - [[FEATURE]: Type Coercion in drizzle-zod](https://github.com/drizzle-team/drizzle-orm/issues/776) - [[BUG]: The inferred type of X cannot be named without a reference to ../../../../../node_modules/drizzle-zod/schema.types.internal.mjs](https://github.com/drizzle-team/drizzle-orm/issues/3732) - [[BUG]: drizzle-zod excessively deep and possibly infinite types](https://github.com/drizzle-team/drizzle-orm/issues/3869) ================================================ FILE: changelogs/drizzle-zod/0.7.1.md ================================================ ### Bug fixes - [[BUG]: createInsertSchema from drizzle-zod@0.6.1 does not infer types correctly but returns unknown for every value](https://github.com/drizzle-team/drizzle-orm/issues/3907) - [[BUG]: drizzle-zod excessively deep and possibly infinite types](https://github.com/drizzle-team/drizzle-orm/issues/3869) thanks @L-Mario564 ================================================ FILE: changelogs/drizzle-zod/0.8.0.md ================================================ - Support for Zod v4: Starting with this release, `drizzle-zod` now requires Zod v3.25 or later ================================================ FILE: changelogs/drizzle-zod/0.8.1.md ================================================ - Support for Zod v4: Starting with this release, `drizzle-zod` now requires Zod v3.25.1 or later This version was released to resolve several compatibility issues with the `ZodObject` type, which were fixed in `drizzle-orm@0.8.1`, so version `0.8.0` can be skipped ================================================ FILE: changelogs/drizzle-zod/0.8.2.md ================================================ - [[BUG]: drizzle-zod: incorrect inferred types for columns .generatedAlwaysAsIdentity()](https://github.com/drizzle-team/drizzle-orm/issues/4553) ================================================ FILE: changelogs/drizzle-zod/0.8.3.md ================================================ - Update peerDeps for zod ================================================ FILE: changelogs/eslint-plugin-drizzle/0.2.0.md ================================================ # eslint-plugin-drizzle 0.1.0 - Initial release - 2 rules available ================================================ FILE: changelogs/eslint-plugin-drizzle/0.2.1.md ================================================ # eslint-plugin-drizzle 0.2.1 - Update README.md - Change error text message ================================================ FILE: changelogs/eslint-plugin-drizzle/0.2.2.md ================================================ # eslint-plugin-drizzle 0.2.2 - fix: Correct detection of `drizzleObjectName` when it's a nested object ================================================ FILE: changelogs/eslint-plugin-drizzle/0.2.3.md ================================================ # eslint-plugin-drizzle 0.2.3 - Added better context to the suggestion in the error message - fix: Correct detection of `drizzleObjectName` when it's retrieved from or is a function - chore: Refactored duplicate code in `utils/options.ts` into `isDrizzleObjName` function ================================================ FILE: docs/custom-types.lite.md ================================================ # Common way of defining custom types > [!NOTE] > For more advanced documentation about defining custom data types in PostgreSQL and MySQL, please check [`custom-types.md`](custom-types.md). ## Examples Best way to see, how customType definition is working - is to check how existing data types in postgres and mysql could be defined using `customType` function from Drizzle ORM ### Postgres Data Types using `node-postgres` driver --- #### **Serial** ```typescript const customSerial = customType<{ data: number; notNull: true; default: true }>( { dataType() { return 'serial'; }, }, ); ``` #### **Text** ```typescript const customText = customType<{ data: string }>({ dataType() { return 'text'; }, }); ``` #### **Boolean** ```typescript const customBoolean = customType<{ data: boolean }>({ dataType() { return 'boolean'; }, }); ``` #### **Jsonb** ```typescript const customJsonb = (name: string) => customType<{ data: TData; driverData: string }>({ dataType() { return 'jsonb'; }, toDriver(value: TData): string { return JSON.stringify(value); }, })(name); ``` #### **Timestamp** ```typescript const customTimestamp = customType< { data: Date; driverData: string; config: { withTimezone: boolean; precision?: number }; } >({ dataType(config) { const precision = typeof config.precision !== 'undefined' ? ` (${config.precision})` : ''; return `timestamp${precision}${ config.withTimezone ? ' with time zone' : '' }`; }, fromDriver(value: string): Date { return new Date(value); }, }); ``` #### Usage for all types will be same as defined functions in Drizzle ORM ```typescript const usersTable = pgTable('users', { id: customSerial('id').primaryKey(), name: customText('name').notNull(), verified: customBoolean('verified').notNull().default(false), jsonb: customJsonb('jsonb'), createdAt: customTimestamp('created_at', { withTimezone: true }).notNull() .default(sql`now()`), }); ``` ### MySql Data Types using `mysql2` driver --- #### **Serial** ```typescript const customSerial = customType<{ data: number; notNull: true; default: true }>( { dataType() { return 'serial'; }, }, ); ``` #### **Text** ```typescript const customText = customType<{ data: string }>({ dataType() { return 'text'; }, }); ``` #### **Boolean** ```typescript const customBoolean = customType<{ data: boolean }>({ dataType() { return 'boolean'; }, fromDriver(value) { if (typeof value === 'boolean') { return value; } return value === 1; }, }); ``` #### **Json** ```typescript const customJson = (name: string) => customType<{ data: TData; driverData: string }>({ dataType() { return 'json'; }, toDriver(value: TData): string { return JSON.stringify(value); }, })(name); ``` #### **Timestamp** ```typescript const customTimestamp = customType< { data: Date; driverData: string; config: { fsp: number } } >({ dataType(config) { const precision = typeof config.fsp !== 'undefined' ? ` (${config.fsp})` : ''; return `timestamp${precision}`; }, fromDriver(value: string): Date { return new Date(value); }, }); ``` #### Usage for all types will be same as defined functions in Drizzle ORM ```typescript const usersTable = mysqlTable('userstest', { id: customSerial('id').primaryKey(), name: customText('name').notNull(), verified: customBoolean('verified').notNull().default(false), jsonb: customJson('jsonb'), createdAt: customTimestamp('created_at', { fsp: 2 }).notNull().default( sql`now()`, ), }); ``` You can check ts-doc for types and param definition ````typescript export type CustomTypeValues = { /** * Required type for custom column, that will infer proper type model * * Examples: * * If you want your column to be `string` type after selecting/or on inserting - use `data: string`. Like `text`, `varchar` * * If you want your column to be `number` type after selecting/or on inserting - use `data: number`. Like `integer` */ data: unknown; /** * Type helper, that represents what type database driver is accepting for specific database data type */ driverData?: unknown; /** * What config type should be used for {@link CustomTypeParams} `dataType` generation */ config?: unknown; /** * Whether the config argument should be required or not * @default false */ configRequired?: boolean; /** * If your custom data type should be notNull by default you can use `notNull: true` * * @example * const customSerial = customType<{ data: number, notNull: true, default: true }>({ * dataType() { * return 'serial'; * }, * }); */ notNull?: boolean; /** * If your custom data type has default you can use `default: true` * * @example * const customSerial = customType<{ data: number, notNull: true, default: true }>({ * dataType() { * return 'serial'; * }, * }); */ default?: boolean; }; export interface CustomTypeParams> { /** * Database data type string representation, that is used for migrations * @example * ``` * `jsonb`, `text` * ``` * * If database data type needs additional params you can use them from `config` param * @example * ``` * `varchar(256)`, `numeric(2,3)` * ``` * * To make `config` be of specific type please use config generic in {@link CustomTypeValues} * * @example * Usage example * ``` * dataType() { * return 'boolean'; * }, * ``` * Or * ``` * dataType(config) { * return typeof config.length !== 'undefined' ? `varchar(${config.length})` : `varchar`; * } * ``` */ dataType: (config: T['config'] | (Equal extends true ? never : undefined)) => string; /** * Optional mapping function, between user input and driver * @example * For example, when using jsonb we need to map JS/TS object to string before writing to database * ``` * toDriver(value: TData): string { * return JSON.stringify(value); * } * ``` */ toDriver?: (value: T['data']) => T['driverData'] | SQL; /** * Optional mapping function, that is responsible for data mapping from database to JS/TS code * @example * For example, when using timestamp we need to map string Date representation to JS Date * ``` * fromDriver(value: string): Date { * return new Date(value); * }, * ``` */ fromDriver?: (value: T['driverData']) => T['data']; } ```` ================================================ FILE: docs/custom-types.md ================================================ # How to define custom types Drizzle ORM has a big set of predefined column types for different SQL databases. But still there are additional types that are not supported by Drizzle ORM (yet). That could be native pg types or extension types Here are some instructions on how to create and use your own types with Drizzle ORM --- ## Abstract view on column builder pattern in Drizzle ORM Each type creation should use 2 classes: - `ColumnBuilder` - class, that is responsible for generating whole set of needed fields for column creation - `Column` - class, that is representing Columns itself, that is used in query generation, migration mapping, etc. Each module has it's own class, representing `ColumnBuilder` or `Column`: - For `pg` -> `PgColumnBuilder` and `PgColumn` - For `mysql` -> `MySqlColumnBuilder` and `MySqlColumn` - For `sqlite` -> `SQLiteColumnBuilder` and `SQLiteColumn` ### Builder class explanation - (postgresql text data type example) - Builder class is responsible for storing TS return type for specific database datatype and override build function to return ready to use column in table - `TData` - extends return type for column. Current example will infer string type for current datatype used in schema definition ```typescript export class PgTextBuilder extends PgColumnBuilder< ColumnBuilderConfig<{ data: TData; driverParam: string }> > { build( table: AnyPgTable<{ name: TTableName }>, ): PgText { return new PgText(table, this.config); } } ``` > [!WARNING] > `$pgColumnBuilderBrand` should be changed and be equal to class name for new data type builder ### Column class explanation - (postgresql text data type example) --- Column class has set of types/functions, that could be overridden to get needed behavior for custom type - `TData` - extends return type for column. Current example will infer string type for current datatype used in schema definition - `getSQLType()` - function, that shows datatype name in database and will be used in migration generation - `mapFromDriverValue()` - interceptor between database and select query execution. If you want to modify/map/change value for specific data type, it could be done here #### Usage example for jsonb type ```typescript override mapToDriverValue(value: TData): string { return JSON.stringify(value); } ``` - `mapToDriverValue` - interceptor between user input for insert/update queries and database query. If you want to modify/map/change value for specific data type, it could be done here #### Usage example for int type ```typescript override mapFromDriverValue(value: number | string): number { if (typeof value === 'string') { return parseInt(value); } return value; } ``` #### Column class example ```typescript export class PgText extends PgColumn> { constructor(table: AnyPgTable<{ name: TTableName }>, builder: PgTextBuilder['config']) { super(table, builder); } getSQLType(): string { return 'text'; } override mapFromDriverValue(value: string): TData { return value as TData } override mapToDriverValue(value: TData): string { return value } } ``` > [!WARNING] > `$pgColumnBrand` should be changed and be equal to class name for new data type ### Full text data type for PostgreSQL example For more postgres data type examples you could check [here](/drizzle-orm/src/pg-core/columns) ```typescript import { ColumnConfig, ColumnBuilderConfig } from 'drizzle-orm'; import { AnyPgTable } from 'drizzle-orm/pg-core'; import { PgColumn, PgColumnBuilder } from './common'; export class PgTextBuilder extends PgColumnBuilder< ColumnBuilderConfig<{ data: TData; driverParam: string }> > { build( table: AnyPgTable<{ name: TTableName }>, ): PgText { return new PgText(table, this.config); } } export class PgText extends PgColumn< ColumnConfig<{ tableName: TTableName; data: TData; driverParam: string }> > { constructor( table: AnyPgTable<{ name: TTableName }>, builder: PgTextBuilder['config'], ) { super(table, builder); } getSQLType(): string { return 'text'; } } export function text( name: string, ): PgTextBuilder { return new PgTextBuilder(name); } ``` ## Custom data type example > [!NOTE] > We will check example on pg module, but current pattern applies to all dialects, that are currently supported by Drizzle ORM ### Setting up CITEXT datatype > [!NOTE] > This type is available only with extensions and used for example, just to show how you could setup any data type you want. Extension support will come soon ### CITEXT data type example ```typescript export class PgCITextBuilder extends PgColumnBuilder< PgColumnBuilderHKT, ColumnBuilderConfig<{ data: TData; driverParam: string }> > { protected $pgColumnBuilderBrand: string = 'PgCITextBuilder'; build(table: AnyPgTable<{ name: TTableName }>): PgCIText { return new PgCIText(table, this.config); } } export class PgCIText extends PgColumn> { constructor(table: AnyPgTable<{ name: TTableName }>, builder: PgCITextBuilder['config']) { super(table, builder); } getSQLType(): string { return 'citext'; } } export function citext(name: string): PgCITextBuilder { return new PgCITextBuilder(name); } ``` #### Usage example ```typescript const table = pgTable('table', { id: integer('id').primaryKey(), ciname: citext('ciname') }) ``` ## Contributing by adding new custom types in Drizzle ORM You could add your created custom data types to Drizzle ORM, so everyone can use it. Each data type should be placed in separate file in `columns` folder and PR open with tag `new-data-type:pg` | `new-data-type:sqlite` | `new-data-type:mysql` For more Contribution information - please check [CONTRIBUTING.md](../CONTRIBUTING.md) ================================================ FILE: docs/joins.md ================================================ # Drizzle ORM - Joins As with other parts of Drizzle ORM, the joins syntax is a balance between the SQL-likeness and type safety. Here's an example of how a common "one-to-many" relationship can be modelled. ```typescript const users = pgTable('users', { id: serial('id').primaryKey(), firstName: text('first_name').notNull(), lastName: text('last_name'), cityId: int('city_id').references(() => cities.id), }); const cities = pgTable('cities', { id: serial('id').primaryKey(), name: text('name').notNull(), }); ``` Now, let's select all cities with all users that live in that city. This is how you'd write it in raw SQL: ```sql select cities.id as city_id, cities.name as city_name, users.id as user_id, users.first_name, users.last_name from cities left join users on users.city_id = cities.id ``` And here's how to do the same with Drizzle ORM: ```typescript const rows = await db .select({ cityId: cities.id, cityName: cities.name, userId: users.id, firstName: users.firstName, lastName: users.lastName, }) .from(cities) .leftJoin(users, eq(users.cityId, cities.id)); ``` `rows` will have the following type: ```typescript { cityId: number; cityName: string; userId: number | null; firstName: string | null; lastName: string | null; }[] ``` As you can see, all the joined columns have been nullified. This might do the trick if you're using joins to form a single row of results, but in our case we have two separate entities in our row - a city and a user. It might not be very convenient to check every field for nullability separately (or, even worse, just add an `!` after every field to "make compiler happy"). It would be much more useful if you could somehow run a single check to verify that the user was joined and all of its fields are available. **To achieve that, you can group the fields of a certain table in a nested object inside of `.select()`:** ```typescript const rows = await db .select({ cityId: cities.id, cityName: cities.name, user: { id: users.id, firstName: users.firstName, lastName: users.lastName, }, }) .from(cities) .leftJoin(users, eq(users.cityId, cities.id)); ``` In that case, the ORM will use dark TypeScript magic (as if it wasn't already) and figure out that you have a nested object where all the fields belong to the same table. So, the `rows` type will now look like this: ```typescript { cityId: number; cityName: string; user: { id: number; firstName: string; lastName: string | null; } | null; } ``` This is much more convenient! Now, you can just do a single check for `row.user !== null`, and all the user fields will become available. --- Note that you can group any fields in a nested object however you like, but the single check optimization will only be applied to a certain nested object if all its fields belong to the same table. So, for example, you can group the city fields, too: ```typescript .select({ city: { id: cities.id, name: cities.name, }, user: { id: users.id, firstName: users.firstName, lastName: users.lastName, }, }) ``` And the result type will look like this: ```typescript { city: { id: number; name: string; }; user: { id: number; firstName: string; lastName: string | null; } | null; } ``` --- If you just need all the fields from all the tables you're selecting and joining, you can simply omit the argument of the `.select()` method altogether: ```typescript const rows = await db.select().from(cities).leftJoin(users, eq(users.cityId, cities.id)); ``` > [!NOTE] > In this case, the Drizzle table/column names will be used as the keys in the result object. ```typescript { cities: { id: number; name: string; }; users: { id: number; firstName: string; lastName: string | null; cityId: number | null; } | null; }[] ``` --- There are cases where you'd want to select all the fields from one table, but pick fields from others. In that case, instead of listing all the table fields, you can just pass a table: ```typescript .select({ cities, // shorthand for "cities: cities", the key can be anything user: { firstName: users.firstName, }, }) ``` ```typescript { cities: { id: number; name: string; }; user: { firstName: string; } | null; } ``` --- But what happens if you group columns from multiple tables in the same nested object? Nothing, really - they will still be all individually nullable, just grouped under the same object (as you might expect!): ```typescript .select({ id: cities.id, cityAndUser: { cityName: cities.name, userId: users.id, firstName: users.firstName, lastName: users.lastName, } }) ``` ```typescript { id: number; cityAndUser: { cityName: string; userId: number | null; firstName: string | null; lastName: string | null; }; } ``` ## Aggregating results OK, so you have obtained all the cities and the users for every city. But what you **really** wanted is a **list** of users for every city, and what you currently have is an array of `city-user?` combinations. So, how do you transform it? That's the neat part - you can do that however you'd like! No hand-holding here. For example, one of the ways to do that would be `Array.reduce()`: ```typescript import { InferModel } from 'drizzle-orm'; type User = InferModel; type City = InferModel; const rows = await db .select({ city: cities, user: users, }) .from(cities) .leftJoin(users, eq(users.cityId, cities.id)); const result = rows.reduce>( (acc, row) => { const city = row.city; const user = row.user; if (!acc[city.id]) { acc[city.id] = { city, users: [] }; } if (user) { acc[city.id].users.push(user); } return acc; }, {}, ); ``` ================================================ FILE: docs/table-introspect-api.md ================================================ # Table introspect API ## Get table information ```ts import { pgTable, getTableConfig } from 'drizzle-orm/pg-core'; const table = pgTable(...); const { columns, indexes, foreignKeys, checks, primaryKeys, name, schema, } = getTableConfig(table); ``` ## Get table columns map ```ts import { pgTable, getTableColumns } from 'drizzle-orm/pg-core'; const table = pgTable('table', { id: integer('id').primaryKey(), name: text('name'), }); const columns/*: { id: ..., name: ... } */ = getTableColumns(table); ``` ================================================ FILE: dprint.json ================================================ { "typescript": { "useTabs": true, "quoteStyle": "preferSingle", "quoteProps": "asNeeded", "arrowFunction.useParentheses": "force", "jsx.quoteStyle": "preferSingle" }, "json": { "useTabs": true }, "markdown": {}, "includes": ["**/*.{ts,tsx,js,jsx,cjs,mjs,json}"], "excludes": [ "**/node_modules", "dist", "dist-dts", "dist.new", "**/drizzle/**/meta", "**/drizzle2/**/meta", "**/*snapshot.json", "**/_journal.json", "**/tsup.config*.mjs", "**/.sst", "integration-tests/tests/prisma/*/client", "integration-tests/tests/prisma/*/drizzle" ], "plugins": [ "https://plugins.dprint.dev/typescript-0.91.1.wasm", "https://plugins.dprint.dev/json-0.19.3.wasm", "https://plugins.dprint.dev/markdown-0.17.1.wasm" ] } ================================================ FILE: drizzle-arktype/README.md ================================================ `drizzle-arktype` is a plugin for [Drizzle ORM](https://github.com/drizzle-team/drizzle-orm) that allows you to generate [arktype](https://arktype.io/) schemas from Drizzle ORM schemas. **Features** - Create a select schema for tables, views and enums. - Create insert and update schemas for tables. - Supports all dialects: PostgreSQL, MySQL and SQLite. # Usage ```ts import { pgEnum, pgTable, serial, text, timestamp } from 'drizzle-orm/pg-core'; import { createInsertSchema, createSelectSchema } from 'drizzle-arktype'; import { type } from 'arktype'; const users = pgTable('users', { id: serial('id').primaryKey(), name: text('name').notNull(), email: text('email').notNull(), role: text('role', { enum: ['admin', 'user'] }).notNull(), createdAt: timestamp('created_at').notNull().defaultNow(), }); // Schema for inserting a user - can be used to validate API requests const insertUserSchema = createInsertSchema(users); // Schema for updating a user - can be used to validate API requests const updateUserSchema = createUpdateSchema(users); // Schema for selecting a user - can be used to validate API responses const selectUserSchema = createSelectSchema(users); // Overriding the fields const insertUserSchema = createInsertSchema(users, { role: type('string'), }); // Refining the fields - useful if you want to change the fields before they become nullable/optional in the final schema const insertUserSchema = createInsertSchema(users, { id: (schema) => schema.atLeast(1), role: type('string'), }); // Usage const isUserValid = parse(insertUserSchema, { name: 'John Doe', email: 'johndoe@test.com', role: 'admin', }); ``` ================================================ FILE: drizzle-arktype/benchmarks/types.ts ================================================ import { bench, setup } from '@ark/attest'; import { type } from 'arktype'; import { boolean, integer, pgTable, text } from 'drizzle-orm/pg-core'; import { createSelectSchema } from '~/index.ts'; const users = pgTable('users', { id: integer().primaryKey(), firstName: text().notNull(), middleName: text(), lastName: text().notNull(), age: integer().notNull(), admin: boolean().notNull().default(false), }); const teardown = setup(); bench('select schema', () => { return createSelectSchema(users); }).types([13129, 'instantiations']); bench('select schema with refinements', () => { return createSelectSchema(users, { firstName: (t) => t.atMostLength(100), middleName: (t) => t.atMostLength(100), lastName: (t) => t.atMostLength(100), age: type.number.atLeast(1), }); }).types([21631, 'instantiations']); teardown(); ================================================ FILE: drizzle-arktype/package.json ================================================ { "name": "drizzle-arktype", "version": "0.1.3", "description": "Generate arktype schemas from Drizzle ORM schemas", "type": "module", "scripts": { "build": "tsx scripts/build.ts", "b": "pnpm build", "test:types": "cd tests && tsc", "pack": "(cd dist && npm pack --pack-destination ..) && rm -f package.tgz && mv *.tgz package.tgz", "publish": "npm publish package.tgz", "test": "vitest run", "bench:types": "tsx ./benchmarks/types.ts" }, "exports": { ".": { "import": { "types": "./index.d.mts", "default": "./index.mjs" }, "require": { "types": "./index.d.cjs", "default": "./index.cjs" }, "types": "./index.d.ts", "default": "./index.mjs" } }, "main": "./index.cjs", "module": "./index.mjs", "types": "./index.d.ts", "publishConfig": { "provenance": true }, "repository": { "type": "git", "url": "git+https://github.com/drizzle-team/drizzle-orm.git" }, "keywords": [ "arktype", "validate", "validation", "schema", "drizzle", "orm", "pg", "mysql", "postgresql", "postgres", "sqlite", "database", "sql", "typescript", "ts" ], "author": "Drizzle Team", "license": "Apache-2.0", "peerDependencies": { "arktype": ">=2.0.0", "drizzle-orm": ">=0.36.0" }, "devDependencies": { "@ark/attest": "^0.45.8", "@rollup/plugin-typescript": "^11.1.0", "@types/node": "^18.15.10", "arktype": "^2.1.10", "cpy": "^10.1.0", "drizzle-orm": "link:../drizzle-orm/dist", "json-rules-engine": "7.3.1", "rimraf": "^5.0.0", "rollup": "^3.29.5", "tsx": "^4.19.3", "vite-tsconfig-paths": "^4.3.2", "vitest": "^3.1.3", "zx": "^7.2.2" } } ================================================ FILE: drizzle-arktype/rollup.config.ts ================================================ import typescript from '@rollup/plugin-typescript'; import { defineConfig } from 'rollup'; export default defineConfig([ { input: 'src/index.ts', output: [ { format: 'esm', dir: 'dist', entryFileNames: '[name].mjs', chunkFileNames: '[name]-[hash].mjs', sourcemap: true, }, { format: 'cjs', dir: 'dist', entryFileNames: '[name].cjs', chunkFileNames: '[name]-[hash].cjs', sourcemap: true, }, ], external: [ /^drizzle-orm\/?/, 'arktype', ], plugins: [ typescript({ tsconfig: 'tsconfig.build.json', }), ], }, ]); ================================================ FILE: drizzle-arktype/scripts/build.ts ================================================ #!/usr/bin/env -S pnpm tsx import 'zx/globals'; import cpy from 'cpy'; await fs.remove('dist'); await $`rollup --config rollup.config.ts --configPlugin typescript`; await $`resolve-tspaths`; await fs.copy('README.md', 'dist/README.md'); await cpy('dist/**/*.d.ts', 'dist', { rename: (basename) => basename.replace(/\.d\.ts$/, '.d.mts'), }); await cpy('dist/**/*.d.ts', 'dist', { rename: (basename) => basename.replace(/\.d\.ts$/, '.d.cts'), }); await fs.copy('package.json', 'dist/package.json'); await $`scripts/fix-imports.ts`; ================================================ FILE: drizzle-arktype/scripts/fix-imports.ts ================================================ #!/usr/bin/env -S pnpm tsx import 'zx/globals'; import path from 'node:path'; import { parse, print, visit } from 'recast'; import parser from 'recast/parsers/typescript'; function resolvePathAlias(importPath: string, file: string) { if (importPath.startsWith('~/')) { const relativePath = path.relative(path.dirname(file), path.resolve('dist.new', importPath.slice(2))); importPath = relativePath.startsWith('.') ? relativePath : './' + relativePath; } return importPath; } function fixImportPath(importPath: string, file: string, ext: string) { importPath = resolvePathAlias(importPath, file); if (!/\..*\.(js|ts)$/.test(importPath)) { return importPath; } return importPath.replace(/\.(js|ts)$/, ext); } const cjsFiles = await glob('dist/**/*.{cjs,d.cts}'); await Promise.all(cjsFiles.map(async (file) => { const code = parse(await fs.readFile(file, 'utf8'), { parser }); visit(code, { visitImportDeclaration(path) { path.value.source.value = fixImportPath(path.value.source.value, file, '.cjs'); this.traverse(path); }, visitExportAllDeclaration(path) { path.value.source.value = fixImportPath(path.value.source.value, file, '.cjs'); this.traverse(path); }, visitExportNamedDeclaration(path) { if (path.value.source) { path.value.source.value = fixImportPath(path.value.source.value, file, '.cjs'); } this.traverse(path); }, visitCallExpression(path) { if (path.value.callee.type === 'Identifier' && path.value.callee.name === 'require') { path.value.arguments[0].value = fixImportPath(path.value.arguments[0].value, file, '.cjs'); } this.traverse(path); }, visitTSImportType(path) { path.value.argument.value = resolvePathAlias(path.value.argument.value, file); this.traverse(path); }, visitAwaitExpression(path) { if (print(path.value).code.startsWith(`await import("./`)) { path.value.argument.arguments[0].value = fixImportPath(path.value.argument.arguments[0].value, file, '.cjs'); } this.traverse(path); }, }); await fs.writeFile(file, print(code).code); })); let esmFiles = await glob('dist/**/*.{js,d.ts}'); await Promise.all(esmFiles.map(async (file) => { const code = parse(await fs.readFile(file, 'utf8'), { parser }); visit(code, { visitImportDeclaration(path) { path.value.source.value = fixImportPath(path.value.source.value, file, '.js'); this.traverse(path); }, visitExportAllDeclaration(path) { path.value.source.value = fixImportPath(path.value.source.value, file, '.js'); this.traverse(path); }, visitExportNamedDeclaration(path) { if (path.value.source) { path.value.source.value = fixImportPath(path.value.source.value, file, '.js'); } this.traverse(path); }, visitTSImportType(path) { path.value.argument.value = fixImportPath(path.value.argument.value, file, '.js'); this.traverse(path); }, visitAwaitExpression(path) { if (print(path.value).code.startsWith(`await import("./`)) { path.value.argument.arguments[0].value = fixImportPath(path.value.argument.arguments[0].value, file, '.js'); } this.traverse(path); }, }); await fs.writeFile(file, print(code).code); })); esmFiles = await glob('dist/**/*.{mjs,d.mts}'); await Promise.all(esmFiles.map(async (file) => { const code = parse(await fs.readFile(file, 'utf8'), { parser }); visit(code, { visitImportDeclaration(path) { path.value.source.value = fixImportPath(path.value.source.value, file, '.mjs'); this.traverse(path); }, visitExportAllDeclaration(path) { path.value.source.value = fixImportPath(path.value.source.value, file, '.mjs'); this.traverse(path); }, visitExportNamedDeclaration(path) { if (path.value.source) { path.value.source.value = fixImportPath(path.value.source.value, file, '.mjs'); } this.traverse(path); }, visitTSImportType(path) { path.value.argument.value = fixImportPath(path.value.argument.value, file, '.mjs'); this.traverse(path); }, visitAwaitExpression(path) { if (print(path.value).code.startsWith(`await import("./`)) { path.value.argument.arguments[0].value = fixImportPath(path.value.argument.arguments[0].value, file, '.mjs'); } this.traverse(path); }, }); await fs.writeFile(file, print(code).code); })); ================================================ FILE: drizzle-arktype/src/column.ts ================================================ import { type Type, type } from 'arktype'; import type { Column, ColumnBaseConfig } from 'drizzle-orm'; import type { MySqlBigInt53, MySqlChar, MySqlDouble, MySqlFloat, MySqlInt, MySqlMediumInt, MySqlReal, MySqlSerial, MySqlSmallInt, MySqlText, MySqlTinyInt, MySqlVarChar, MySqlYear, } from 'drizzle-orm/mysql-core'; import type { PgArray, PgBigInt53, PgBigSerial53, PgBinaryVector, PgChar, PgDoublePrecision, PgGeometry, PgGeometryObject, PgHalfVector, PgInteger, PgLineABC, PgLineTuple, PgPointObject, PgPointTuple, PgReal, PgSerial, PgSmallInt, PgSmallSerial, PgUUID, PgVarchar, PgVector, } from 'drizzle-orm/pg-core'; import type { SingleStoreBigInt53, SingleStoreChar, SingleStoreDouble, SingleStoreFloat, SingleStoreInt, SingleStoreMediumInt, SingleStoreReal, SingleStoreSerial, SingleStoreSmallInt, SingleStoreText, SingleStoreTinyInt, SingleStoreVarChar, SingleStoreYear, } from 'drizzle-orm/singlestore-core'; import type { SQLiteInteger, SQLiteReal, SQLiteText } from 'drizzle-orm/sqlite-core'; import { CONSTANTS } from './constants.ts'; import { isColumnType, isWithEnum } from './utils.ts'; export const literalSchema = type.string.or(type.number).or(type.boolean).or(type.null); export const jsonSchema = literalSchema.or(type.unknown.as().array()).or(type.object.as>()); export const bufferSchema = type.unknown.narrow((value) => value instanceof Buffer).as().describe( // eslint-disable-line no-instanceof/no-instanceof 'a Buffer instance', ); export function columnToSchema(column: Column): Type { let schema!: Type; if (isWithEnum(column)) { schema = column.enumValues.length ? type.enumerated(...column.enumValues) : type.string; } if (!schema) { // Handle specific types if (isColumnType | PgPointTuple>(column, ['PgGeometry', 'PgPointTuple'])) { schema = type([type.number, type.number]); } else if ( isColumnType | PgGeometryObject>(column, ['PgGeometryObject', 'PgPointObject']) ) { schema = type({ x: type.number, y: type.number, }); } else if (isColumnType | PgVector>(column, ['PgHalfVector', 'PgVector'])) { schema = column.dimensions ? type.number.array().exactlyLength(column.dimensions) : type.number.array(); } else if (isColumnType>(column, ['PgLine'])) { schema = type([type.number, type.number, type.number]); } else if (isColumnType>(column, ['PgLineABC'])) { schema = type({ a: type.number, b: type.number, c: type.number, }); } // Handle other types else if (isColumnType>(column, ['PgArray'])) { const arraySchema = columnToSchema(column.baseColumn).array(); schema = column.size ? arraySchema.exactlyLength(column.size) : arraySchema; } else if (column.dataType === 'array') { schema = type.unknown.array(); } else if (column.dataType === 'number') { schema = numberColumnToSchema(column); } else if (column.dataType === 'bigint') { schema = bigintColumnToSchema(column); } else if (column.dataType === 'boolean') { schema = type.boolean; } else if (column.dataType === 'date') { schema = type.Date; } else if (column.dataType === 'string') { schema = stringColumnToSchema(column); } else if (column.dataType === 'json') { schema = jsonSchema; } else if (column.dataType === 'custom') { schema = type.unknown; } else if (column.dataType === 'buffer') { schema = bufferSchema; } } if (!schema) { schema = type.unknown; } return schema; } function numberColumnToSchema(column: Column): Type { let unsigned = column.getSQLType().includes('unsigned'); let min!: number; let max!: number; let integer = false; if (isColumnType | SingleStoreTinyInt>(column, ['MySqlTinyInt', 'SingleStoreTinyInt'])) { min = unsigned ? 0 : CONSTANTS.INT8_MIN; max = unsigned ? CONSTANTS.INT8_UNSIGNED_MAX : CONSTANTS.INT8_MAX; integer = true; } else if ( isColumnType | PgSmallSerial | MySqlSmallInt | SingleStoreSmallInt>(column, [ 'PgSmallInt', 'PgSmallSerial', 'MySqlSmallInt', 'SingleStoreSmallInt', ]) ) { min = unsigned ? 0 : CONSTANTS.INT16_MIN; max = unsigned ? CONSTANTS.INT16_UNSIGNED_MAX : CONSTANTS.INT16_MAX; integer = true; } else if ( isColumnType< PgReal | MySqlFloat | MySqlMediumInt | SingleStoreFloat | SingleStoreMediumInt >(column, [ 'PgReal', 'MySqlFloat', 'MySqlMediumInt', 'SingleStoreFloat', 'SingleStoreMediumInt', ]) ) { min = unsigned ? 0 : CONSTANTS.INT24_MIN; max = unsigned ? CONSTANTS.INT24_UNSIGNED_MAX : CONSTANTS.INT24_MAX; integer = isColumnType(column, ['MySqlMediumInt', 'SingleStoreMediumInt']); } else if ( isColumnType | PgSerial | MySqlInt | SingleStoreInt>(column, [ 'PgInteger', 'PgSerial', 'MySqlInt', 'SingleStoreInt', ]) ) { min = unsigned ? 0 : CONSTANTS.INT32_MIN; max = unsigned ? CONSTANTS.INT32_UNSIGNED_MAX : CONSTANTS.INT32_MAX; integer = true; } else if ( isColumnType< | PgDoublePrecision | MySqlReal | MySqlDouble | SingleStoreReal | SingleStoreDouble | SQLiteReal >(column, [ 'PgDoublePrecision', 'MySqlReal', 'MySqlDouble', 'SingleStoreReal', 'SingleStoreDouble', 'SQLiteReal', ]) ) { min = unsigned ? 0 : CONSTANTS.INT48_MIN; max = unsigned ? CONSTANTS.INT48_UNSIGNED_MAX : CONSTANTS.INT48_MAX; } else if ( isColumnType< | PgBigInt53 | PgBigSerial53 | MySqlBigInt53 | MySqlSerial | SingleStoreBigInt53 | SingleStoreSerial | SQLiteInteger >( column, [ 'PgBigInt53', 'PgBigSerial53', 'MySqlBigInt53', 'MySqlSerial', 'SingleStoreBigInt53', 'SingleStoreSerial', 'SQLiteInteger', ], ) ) { unsigned = unsigned || isColumnType(column, ['MySqlSerial', 'SingleStoreSerial']); min = unsigned ? 0 : Number.MIN_SAFE_INTEGER; max = Number.MAX_SAFE_INTEGER; integer = true; } else if (isColumnType | SingleStoreYear>(column, ['MySqlYear', 'SingleStoreYear'])) { min = 1901; max = 2155; integer = true; } else { min = Number.MIN_SAFE_INTEGER; max = Number.MAX_SAFE_INTEGER; } return (integer ? type.keywords.number.integer : type.number).atLeast(min).atMost(max); } /** @internal */ export const unsignedBigintNarrow = (v: bigint, ctx: { mustBe: (expected: string) => false }) => v < 0n ? ctx.mustBe('greater than') : v > CONSTANTS.INT64_UNSIGNED_MAX ? ctx.mustBe('less than') : true; /** @internal */ export const bigintNarrow = (v: bigint, ctx: { mustBe: (expected: string) => false }) => v < CONSTANTS.INT64_MIN ? ctx.mustBe('greater than') : v > CONSTANTS.INT64_MAX ? ctx.mustBe('less than') : true; function bigintColumnToSchema(column: Column): Type { const unsigned = column.getSQLType().includes('unsigned'); return type.bigint.narrow(unsigned ? unsignedBigintNarrow : bigintNarrow); } function stringColumnToSchema(column: Column): Type { if (isColumnType>>(column, ['PgUUID'])) { return type(/^[\da-f]{8}(?:-[\da-f]{4}){3}-[\da-f]{12}$/iu).describe('a RFC-4122-compliant UUID'); } if ( isColumnType< PgBinaryVector< ColumnBaseConfig<'string', 'PgBinaryVector'> & { dimensions: number; } > >(column, ['PgBinaryVector']) ) { return type(`/^[01]{${column.dimensions}}$/`) .describe(`a string containing ones or zeros while being ${column.dimensions} characters long`); } let max: number | undefined; let fixed = false; if (isColumnType | SQLiteText>(column, ['PgVarchar', 'SQLiteText'])) { max = column.length; } else if ( isColumnType | SingleStoreVarChar>(column, ['MySqlVarChar', 'SingleStoreVarChar']) ) { max = column.length ?? CONSTANTS.INT16_UNSIGNED_MAX; } else if (isColumnType | SingleStoreText>(column, ['MySqlText', 'SingleStoreText'])) { if (column.textType === 'longtext') { max = CONSTANTS.INT32_UNSIGNED_MAX; } else if (column.textType === 'mediumtext') { max = CONSTANTS.INT24_UNSIGNED_MAX; } else if (column.textType === 'text') { max = CONSTANTS.INT16_UNSIGNED_MAX; } else { max = CONSTANTS.INT8_UNSIGNED_MAX; } } if ( isColumnType | MySqlChar | SingleStoreChar>(column, [ 'PgChar', 'MySqlChar', 'SingleStoreChar', ]) ) { max = column.length; fixed = true; } return max && fixed ? type.string.exactlyLength(max) : max ? type.string.atMostLength(max) : type.string; } ================================================ FILE: drizzle-arktype/src/column.types.ts ================================================ import { Type, type } from 'arktype'; import type { Column } from 'drizzle-orm'; import type { Json } from './utils.ts'; export type ArktypeNullable = Type | null>; export type ArktypeOptional = [Type>, '?']; export type GetArktypeType< TColumn extends Column, > = TColumn['_']['columnType'] extends 'PgJson' | 'PgJsonb' | 'MySqlJson' | 'SingleStoreJson' | 'SQLiteTextJson' | 'SQLiteBlobJson' ? unknown extends TColumn['_']['data'] ? Type : Type : Type; type HandleSelectColumn< TSchema, TColumn extends Column, > = TColumn['_']['notNull'] extends true ? TSchema : ArktypeNullable; type HandleInsertColumn< TSchema, TColumn extends Column, > = TColumn['_']['notNull'] extends true ? TColumn['_']['hasDefault'] extends true ? ArktypeOptional : TSchema : ArktypeOptional>; type HandleUpdateColumn< TSchema, TColumn extends Column, > = TColumn['_']['notNull'] extends true ? ArktypeOptional : ArktypeOptional>; export type HandleColumn< TType extends 'select' | 'insert' | 'update', TColumn extends Column, > = TType extends 'select' ? HandleSelectColumn, TColumn> : TType extends 'insert' ? HandleInsertColumn, TColumn> : TType extends 'update' ? HandleUpdateColumn, TColumn> : GetArktypeType; ================================================ FILE: drizzle-arktype/src/constants.ts ================================================ export const CONSTANTS = { INT8_MIN: -128, INT8_MAX: 127, INT8_UNSIGNED_MAX: 255, INT16_MIN: -32768, INT16_MAX: 32767, INT16_UNSIGNED_MAX: 65535, INT24_MIN: -8388608, INT24_MAX: 8388607, INT24_UNSIGNED_MAX: 16777215, INT32_MIN: -2147483648, INT32_MAX: 2147483647, INT32_UNSIGNED_MAX: 4294967295, INT48_MIN: -140737488355328, INT48_MAX: 140737488355327, INT48_UNSIGNED_MAX: 281474976710655, INT64_MIN: -9223372036854775808n, INT64_MAX: 9223372036854775807n, INT64_UNSIGNED_MAX: 18446744073709551615n, }; ================================================ FILE: drizzle-arktype/src/index.ts ================================================ export { bufferSchema, jsonSchema, literalSchema } from './column.ts'; export * from './column.types.ts'; export * from './schema.ts'; export * from './schema.types.internal.ts'; export * from './schema.types.ts'; export * from './utils.ts'; ================================================ FILE: drizzle-arktype/src/schema.ts ================================================ import { Type, type } from 'arktype'; import { Column, getTableColumns, getViewSelectedFields, is, isTable, isView, SQL } from 'drizzle-orm'; import type { Table, View } from 'drizzle-orm'; import type { PgEnum } from 'drizzle-orm/pg-core'; import { columnToSchema } from './column.ts'; import type { Conditions } from './schema.types.internal.ts'; import type { CreateInsertSchema, CreateSelectSchema, CreateUpdateSchema } from './schema.types.ts'; import { isPgEnum } from './utils.ts'; function getColumns(tableLike: Table | View) { return isTable(tableLike) ? getTableColumns(tableLike) : getViewSelectedFields(tableLike); } function handleColumns( columns: Record, refinements: Record, conditions: Conditions, ): Type { const columnSchemas: Record = {}; for (const [key, selected] of Object.entries(columns)) { if (!is(selected, Column) && !is(selected, SQL) && !is(selected, SQL.Aliased) && typeof selected === 'object') { const columns = isTable(selected) || isView(selected) ? getColumns(selected) : selected; columnSchemas[key] = handleColumns(columns, refinements[key] ?? {}, conditions); continue; } const refinement = refinements[key]; if ( refinement !== undefined && (typeof refinement !== 'function' || (typeof refinement === 'function' && refinement.expression !== undefined)) ) { columnSchemas[key] = refinement; continue; } const column = is(selected, Column) ? selected : undefined; const schema = column ? columnToSchema(column) : type.unknown; const refined = typeof refinement === 'function' ? refinement(schema) : schema; if (conditions.never(column)) { continue; } else { columnSchemas[key] = refined; } if (column) { if (conditions.nullable(column)) { columnSchemas[key] = columnSchemas[key]!.or(type.null); } if (conditions.optional(column)) { columnSchemas[key] = columnSchemas[key]!.optional() as any; } } } return type(columnSchemas); } export const createSelectSchema = (( entity: Table | View | PgEnum<[string, ...string[]]>, refine?: Record, ) => { if (isPgEnum(entity)) { return type.enumerated(...entity.enumValues); } const columns = getColumns(entity); return handleColumns(columns, refine ?? {}, { never: () => false, optional: () => false, nullable: (column) => !column.notNull, }) as any; }) as CreateSelectSchema; export const createInsertSchema = (( entity: Table, refine?: Record, ) => { const columns = getColumns(entity); return handleColumns(columns, refine ?? {}, { never: (column) => column?.generated?.type === 'always' || column?.generatedIdentity?.type === 'always', optional: (column) => !column.notNull || (column.notNull && column.hasDefault), nullable: (column) => !column.notNull, }) as any; }) as CreateInsertSchema; export const createUpdateSchema = (( entity: Table, refine?: Record, ) => { const columns = getColumns(entity); return handleColumns(columns, refine ?? {}, { never: (column) => column?.generated?.type === 'always' || column?.generatedIdentity?.type === 'always', optional: () => true, nullable: (column) => !column.notNull, }) as any; }) as CreateUpdateSchema; ================================================ FILE: drizzle-arktype/src/schema.types.internal.ts ================================================ import type { Type, type } from 'arktype'; import type { Column, DrizzleTypeError, SelectedFieldsFlat, Simplify, Table, View } from 'drizzle-orm'; import type { ArktypeNullable, ArktypeOptional, GetArktypeType, HandleColumn } from './column.types.ts'; import type { ColumnIsGeneratedAlwaysAs, GetSelection } from './utils.ts'; export interface Conditions { never: (column?: Column) => boolean; optional: (column: Column) => boolean; nullable: (column: Column) => boolean; } type GenericSchema = type.cast | [type.cast, '?']; type BuildRefineField = T extends GenericSchema ? ((schema: T) => GenericSchema) | GenericSchema : never; export type BuildRefine< TColumns extends Record, > = { [K in keyof TColumns as TColumns[K] extends Column | SelectedFieldsFlat | Table | View ? K : never]?: TColumns[K] extends Column ? BuildRefineField> : BuildRefine>; }; type HandleRefinement< TType extends 'select' | 'insert' | 'update', TRefinement, TColumn extends Column, > = TRefinement extends (schema: any) => GenericSchema ? ( TColumn['_']['notNull'] extends true ? ReturnType : ArktypeNullable> ) extends infer TSchema ? TType extends 'update' ? ArktypeOptional : TSchema : Type : TRefinement; type IsRefinementDefined< TRefinements extends Record | undefined, TKey extends string | symbol | number, > = TRefinements extends object ? TRefinements[TKey] extends GenericSchema | ((schema: any) => any) ? true : false : false; export type BuildSchema< TType extends 'select' | 'insert' | 'update', TColumns extends Record, TRefinements extends Record | undefined, > = type.instantiate< Simplify< { readonly [K in keyof TColumns as ColumnIsGeneratedAlwaysAs extends true ? never : K]: TColumns[K] extends infer TColumn extends Column ? IsRefinementDefined extends true ? HandleRefinement : HandleColumn : TColumns[K] extends infer TNested extends SelectedFieldsFlat | Table | View ? BuildSchema< TType, GetSelection, TRefinements extends object ? TRefinements[K & keyof TRefinements] : undefined > : any; } > >; export type NoUnknownKeys< TRefinement extends Record, TCompare extends Record, > = { [K in keyof TRefinement]: K extends keyof TCompare ? TRefinement[K] extends Record ? NoUnknownKeys : TRefinement[K] : DrizzleTypeError<`Found unknown key in refinement: "${K & string}"`>; }; ================================================ FILE: drizzle-arktype/src/schema.types.ts ================================================ import type { Type } from 'arktype'; import type { Table, View } from 'drizzle-orm'; import type { PgEnum } from 'drizzle-orm/pg-core'; import type { BuildRefine, BuildSchema, NoUnknownKeys } from './schema.types.internal.ts'; export interface CreateSelectSchema { (table: TTable): BuildSchema<'select', TTable['_']['columns'], undefined>; < TTable extends Table, TRefine extends BuildRefine, >( table: TTable, refine?: NoUnknownKeys, ): BuildSchema<'select', TTable['_']['columns'], TRefine>; (view: TView): BuildSchema<'select', TView['_']['selectedFields'], undefined>; < TView extends View, TRefine extends BuildRefine, >( view: TView, refine: NoUnknownKeys, ): BuildSchema<'select', TView['_']['selectedFields'], TRefine>; >(enum_: TEnum): Type; } export interface CreateInsertSchema { (table: TTable): BuildSchema<'insert', TTable['_']['columns'], undefined>; < TTable extends Table, TRefine extends BuildRefine>, >( table: TTable, refine?: NoUnknownKeys, ): BuildSchema<'insert', TTable['_']['columns'], TRefine>; } export interface CreateUpdateSchema { (table: TTable): BuildSchema<'update', TTable['_']['columns'], undefined>; < TTable extends Table, TRefine extends BuildRefine>, >( table: TTable, refine?: TRefine, ): BuildSchema<'update', TTable['_']['columns'], TRefine>; } ================================================ FILE: drizzle-arktype/src/utils.ts ================================================ import type { type } from 'arktype'; import type { Column, SelectedFieldsFlat, Table, View } from 'drizzle-orm'; import type { PgEnum } from 'drizzle-orm/pg-core'; import type { literalSchema } from './column.ts'; export function isColumnType(column: Column, columnTypes: string[]): column is T { return columnTypes.includes(column.columnType); } export function isWithEnum(column: Column): column is typeof column & { enumValues: [string, ...string[]] } { return 'enumValues' in column && Array.isArray(column.enumValues) && column.enumValues.length > 0; } export const isPgEnum: (entity: any) => entity is PgEnum<[string, ...string[]]> = isWithEnum as any; type Literal = type.infer; export type Json = Literal | Record | any[]; export type ColumnIsGeneratedAlwaysAs = TColumn extends Column ? TColumn['_']['identity'] extends 'always' ? true : TColumn['_']['generated'] extends { type: 'byDefault' } | undefined ? false : true : false; export type GetSelection | Table | View> = T extends Table ? T['_']['columns'] : T extends View ? T['_']['selectedFields'] : T; ================================================ FILE: drizzle-arktype/tests/mysql.test.ts ================================================ import { Type, type } from 'arktype'; import { type Equal, sql } from 'drizzle-orm'; import { customType, int, json, mysqlSchema, mysqlTable, mysqlView, serial, text } from 'drizzle-orm/mysql-core'; import type { TopLevelCondition } from 'json-rules-engine'; import { test } from 'vitest'; import { bigintNarrow, jsonSchema, unsignedBigintNarrow } from '~/column.ts'; import { CONSTANTS } from '~/constants.ts'; import { createInsertSchema, createSelectSchema, createUpdateSchema } from '../src'; import { Expect, expectSchemaShape } from './utils.ts'; const intSchema = type.keywords.number.integer.atLeast(CONSTANTS.INT32_MIN).atMost(CONSTANTS.INT32_MAX); const serialNumberModeSchema = type.keywords.number.integer.atLeast(0).atMost(Number.MAX_SAFE_INTEGER); const textSchema = type.string.atMostLength(CONSTANTS.INT16_UNSIGNED_MAX); test('table - select', (t) => { const table = mysqlTable('test', { id: serial().primaryKey(), name: text().notNull(), }); const result = createSelectSchema(table); const expected = type({ id: serialNumberModeSchema, name: textSchema }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('table in schema - select', (tc) => { const schema = mysqlSchema('test'); const table = schema.table('test', { id: serial().primaryKey(), name: text().notNull(), }); const result = createSelectSchema(table); const expected = type({ id: serialNumberModeSchema, name: textSchema }); expectSchemaShape(tc, expected).from(result); Expect>(); }); test('table - insert', (t) => { const table = mysqlTable('test', { id: serial().primaryKey(), name: text().notNull(), age: int(), }); const result = createInsertSchema(table); const expected = type({ id: serialNumberModeSchema.optional(), name: textSchema, age: intSchema.or(type.null).optional(), }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('table - update', (t) => { const table = mysqlTable('test', { id: serial().primaryKey(), name: text().notNull(), age: int(), }); const result = createUpdateSchema(table); const expected = type({ id: serialNumberModeSchema.optional(), name: textSchema.optional(), age: intSchema.or(type.null).optional(), }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('view qb - select', (t) => { const table = mysqlTable('test', { id: serial().primaryKey(), name: text().notNull(), }); const view = mysqlView('test').as((qb) => qb.select({ id: table.id, age: sql``.as('age') }).from(table)); const result = createSelectSchema(view); const expected = type({ id: serialNumberModeSchema, age: type('unknown.any') }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('view columns - select', (t) => { const view = mysqlView('test', { id: serial().primaryKey(), name: text().notNull(), }).as(sql``); const result = createSelectSchema(view); const expected = type({ id: serialNumberModeSchema, name: textSchema }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('view with nested fields - select', (t) => { const table = mysqlTable('test', { id: serial().primaryKey(), name: text().notNull(), }); const view = mysqlView('test').as((qb) => qb.select({ id: table.id, nested: { name: table.name, age: sql``.as('age'), }, table, }).from(table) ); const result = createSelectSchema(view); const expected = type({ id: serialNumberModeSchema, nested: type({ name: textSchema, age: type('unknown.any') }), table: type({ id: serialNumberModeSchema, name: textSchema }), }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('nullability - select', (t) => { const table = mysqlTable('test', { c1: int(), c2: int().notNull(), c3: int().default(1), c4: int().notNull().default(1), }); const result = createSelectSchema(table); const expected = type({ c1: intSchema.or(type.null), c2: intSchema, c3: intSchema.or(type.null), c4: intSchema, }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('nullability - insert', (t) => { const table = mysqlTable('test', { c1: int(), c2: int().notNull(), c3: int().default(1), c4: int().notNull().default(1), c5: int().generatedAlwaysAs(1), }); const result = createInsertSchema(table); const expected = type({ c1: intSchema.or(type.null).optional(), c2: intSchema, c3: intSchema.or(type.null).optional(), c4: intSchema.optional(), }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('nullability - update', (t) => { const table = mysqlTable('test', { c1: int(), c2: int().notNull(), c3: int().default(1), c4: int().notNull().default(1), c5: int().generatedAlwaysAs(1), }); const result = createUpdateSchema(table); const expected = type({ c1: intSchema.or(type.null).optional(), c2: intSchema.optional(), c3: intSchema.or(type.null).optional(), c4: intSchema.optional(), }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('refine table - select', (t) => { const table = mysqlTable('test', { c1: int(), c2: int().notNull(), c3: int().notNull(), }); const result = createSelectSchema(table, { c2: (schema) => schema.atMost(1000), c3: type.string.pipe(Number), }); const expected = type({ c1: intSchema.or(type.null), c2: intSchema.atMost(1000), c3: type.string.pipe(Number), }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('refine table - select with custom data type', (t) => { const customText = customType({ dataType: () => 'text' }); const table = mysqlTable('test', { c1: int(), c2: int().notNull(), c3: int().notNull(), c4: customText(), }); const customTextSchema = type.string.atLeastLength(1).atMostLength(100); const result = createSelectSchema(table, { c2: (schema) => schema.atMost(1000), c3: type.string.pipe(Number), c4: customTextSchema, }); const expected = type({ c1: intSchema.or(type.null), c2: intSchema.atMost(1000), c3: type.string.pipe(Number), c4: customTextSchema, }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('refine table - insert', (t) => { const table = mysqlTable('test', { c1: int(), c2: int().notNull(), c3: int().notNull(), c4: int().generatedAlwaysAs(1), }); const result = createInsertSchema(table, { c2: (schema) => schema.atMost(1000), c3: type.string.pipe(Number), }); const expected = type({ c1: intSchema.or(type.null).optional(), c2: intSchema.atMost(1000), c3: type.string.pipe(Number), }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('refine table - update', (t) => { const table = mysqlTable('test', { c1: int(), c2: int().notNull(), c3: int().notNull(), c4: int().generatedAlwaysAs(1), }); const result = createUpdateSchema(table, { c2: (schema) => schema.atMost(1000), c3: type.string.pipe(Number), }); const expected = type({ c1: intSchema.or(type.null).optional(), c2: intSchema.atMost(1000).optional(), c3: type.string.pipe(Number), }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('refine view - select', (t) => { const table = mysqlTable('test', { c1: int(), c2: int(), c3: int(), c4: int(), c5: int(), c6: int(), }); const view = mysqlView('test').as((qb) => qb.select({ c1: table.c1, c2: table.c2, c3: table.c3, nested: { c4: table.c4, c5: table.c5, c6: table.c6, }, table, }).from(table) ); const result = createSelectSchema(view, { c2: (schema) => schema.atMost(1000), c3: type.string.pipe(Number), nested: { c5: (schema) => schema.atMost(1000), c6: type.string.pipe(Number), }, table: { c2: (schema) => schema.atMost(1000), c3: type.string.pipe(Number), }, }); const expected = type({ c1: intSchema.or(type.null), c2: intSchema.atMost(1000).or(type.null), c3: type.string.pipe(Number), nested: type({ c4: intSchema.or(type.null), c5: intSchema.atMost(1000).or(type.null), c6: type.string.pipe(Number), }), table: type({ c1: intSchema.or(type.null), c2: intSchema.atMost(1000).or(type.null), c3: type.string.pipe(Number), c4: intSchema.or(type.null), c5: intSchema.or(type.null), c6: intSchema.or(type.null), }), }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('all data types', (t) => { const table = mysqlTable('test', ({ bigint, binary, boolean, char, date, datetime, decimal, double, float, int, json, mediumint, mysqlEnum, real, serial, smallint, text, time, timestamp, tinyint, varchar, varbinary, year, longtext, mediumtext, tinytext, }) => ({ bigint1: bigint({ mode: 'number' }).notNull(), bigint2: bigint({ mode: 'bigint' }).notNull(), bigint3: bigint({ unsigned: true, mode: 'number' }).notNull(), bigint4: bigint({ unsigned: true, mode: 'bigint' }).notNull(), binary: binary({ length: 10 }).notNull(), boolean: boolean().notNull(), char1: char({ length: 10 }).notNull(), char2: char({ length: 1, enum: ['a', 'b', 'c'] }).notNull(), date1: date({ mode: 'date' }).notNull(), date2: date({ mode: 'string' }).notNull(), datetime1: datetime({ mode: 'date' }).notNull(), datetime2: datetime({ mode: 'string' }).notNull(), decimal1: decimal().notNull(), decimal2: decimal({ unsigned: true }).notNull(), double1: double().notNull(), double2: double({ unsigned: true }).notNull(), float1: float().notNull(), float2: float({ unsigned: true }).notNull(), int1: int().notNull(), int2: int({ unsigned: true }).notNull(), json: json().notNull(), mediumint1: mediumint().notNull(), mediumint2: mediumint({ unsigned: true }).notNull(), enum: mysqlEnum('enum', ['a', 'b', 'c']).notNull(), real: real().notNull(), serial: serial().notNull(), smallint1: smallint().notNull(), smallint2: smallint({ unsigned: true }).notNull(), text1: text().notNull(), text2: text({ enum: ['a', 'b', 'c'] }).notNull(), time: time().notNull(), timestamp1: timestamp({ mode: 'date' }).notNull(), timestamp2: timestamp({ mode: 'string' }).notNull(), tinyint1: tinyint().notNull(), tinyint2: tinyint({ unsigned: true }).notNull(), varchar1: varchar({ length: 10 }).notNull(), varchar2: varchar({ length: 1, enum: ['a', 'b', 'c'] }).notNull(), varbinary: varbinary({ length: 10 }).notNull(), year: year().notNull(), longtext1: longtext().notNull(), longtext2: longtext({ enum: ['a', 'b', 'c'] }).notNull(), mediumtext1: mediumtext().notNull(), mediumtext2: mediumtext({ enum: ['a', 'b', 'c'] }).notNull(), tinytext1: tinytext().notNull(), tinytext2: tinytext({ enum: ['a', 'b', 'c'] }).notNull(), })); const result = createSelectSchema(table); const expected = type({ bigint1: type.keywords.number.integer.atLeast(Number.MIN_SAFE_INTEGER).atMost(Number.MAX_SAFE_INTEGER), bigint2: type.bigint.narrow(bigintNarrow), bigint3: type.keywords.number.integer.atLeast(0).atMost(Number.MAX_SAFE_INTEGER), bigint4: type.bigint.narrow(unsignedBigintNarrow), binary: type.string, boolean: type.boolean, char1: type.string.exactlyLength(10), char2: type.enumerated('a', 'b', 'c'), date1: type.Date, date2: type.string, datetime1: type.Date, datetime2: type.string, decimal1: type.string, decimal2: type.string, double1: type.number.atLeast(CONSTANTS.INT48_MIN).atMost(CONSTANTS.INT48_MAX), double2: type.number.atLeast(0).atMost(CONSTANTS.INT48_UNSIGNED_MAX), float1: type.number.atLeast(CONSTANTS.INT24_MIN).atMost(CONSTANTS.INT24_MAX), float2: type.number.atLeast(0).atMost(CONSTANTS.INT24_UNSIGNED_MAX), int1: type.keywords.number.integer.atLeast(CONSTANTS.INT32_MIN).atMost(CONSTANTS.INT32_MAX), int2: type.keywords.number.integer.atLeast(0).atMost(CONSTANTS.INT32_UNSIGNED_MAX), json: jsonSchema, mediumint1: type.keywords.number.integer.atLeast(CONSTANTS.INT24_MIN).atMost(CONSTANTS.INT24_MAX), mediumint2: type.keywords.number.integer.atLeast(0).atMost(CONSTANTS.INT24_UNSIGNED_MAX), enum: type.enumerated('a', 'b', 'c'), real: type.number.atLeast(CONSTANTS.INT48_MIN).atMost(CONSTANTS.INT48_MAX), serial: type.keywords.number.integer.atLeast(0).atMost(Number.MAX_SAFE_INTEGER), smallint1: type.keywords.number.integer.atLeast(CONSTANTS.INT16_MIN).atMost(CONSTANTS.INT16_MAX), smallint2: type.keywords.number.integer.atLeast(0).atMost(CONSTANTS.INT16_UNSIGNED_MAX), text1: type.string.atMostLength(CONSTANTS.INT16_UNSIGNED_MAX), text2: type.enumerated('a', 'b', 'c'), time: type.string, timestamp1: type.Date, timestamp2: type.string, tinyint1: type.keywords.number.integer.atLeast(CONSTANTS.INT8_MIN).atMost(CONSTANTS.INT8_MAX), tinyint2: type.keywords.number.integer.atLeast(0).atMost(CONSTANTS.INT8_UNSIGNED_MAX), varchar1: type.string.atMostLength(10), varchar2: type.enumerated('a', 'b', 'c'), varbinary: type.string, year: type.keywords.number.integer.atLeast(1901).atMost(2155), longtext1: type.string.atMostLength(CONSTANTS.INT32_UNSIGNED_MAX), longtext2: type.enumerated('a', 'b', 'c'), mediumtext1: type.string.atMostLength(CONSTANTS.INT24_UNSIGNED_MAX), mediumtext2: type.enumerated('a', 'b', 'c'), tinytext1: type.string.atMostLength(CONSTANTS.INT8_UNSIGNED_MAX), tinytext2: type.enumerated('a', 'b', 'c'), }); expectSchemaShape(t, expected).from(result); Expect>(); }); /* Infinitely recursive type */ { const TopLevelCondition: Type = type('unknown.any') as any; const table = mysqlTable('test', { json: json().$type(), }); const result = createSelectSchema(table); const expected = type({ json: TopLevelCondition.or(type.null), }); Expect, type.infer>>(); } /* Disallow unknown keys in table refinement - select */ { const table = mysqlTable('test', { id: int() }); // @ts-expect-error createSelectSchema(table, { unknown: type.string }); } /* Disallow unknown keys in table refinement - insert */ { const table = mysqlTable('test', { id: int() }); // @ts-expect-error createInsertSchema(table, { unknown: type.string }); } /* Disallow unknown keys in table refinement - update */ { const table = mysqlTable('test', { id: int() }); // @ts-expect-error createUpdateSchema(table, { unknown: type.string }); } /* Disallow unknown keys in view qb - select */ { const table = mysqlTable('test', { id: int() }); const view = mysqlView('test').as((qb) => qb.select().from(table)); const nestedSelect = mysqlView('test').as((qb) => qb.select({ table }).from(table)); // @ts-expect-error createSelectSchema(view, { unknown: type.string }); // @ts-expect-error createSelectSchema(nestedSelect, { table: { unknown: type.string } }); } /* Disallow unknown keys in view columns - select */ { const view = mysqlView('test', { id: int() }).as(sql``); // @ts-expect-error createSelectSchema(view, { unknown: type.string }); } ================================================ FILE: drizzle-arktype/tests/pg.test.ts ================================================ import { Type, type } from 'arktype'; import { type Equal, sql } from 'drizzle-orm'; import { customType, integer, json, jsonb, pgEnum, pgMaterializedView, pgSchema, pgTable, pgView, serial, text, } from 'drizzle-orm/pg-core'; import type { TopLevelCondition } from 'json-rules-engine'; import { test } from 'vitest'; import { bigintNarrow, jsonSchema } from '~/column.ts'; import { CONSTANTS } from '~/constants.ts'; import { createInsertSchema, createSelectSchema, createUpdateSchema } from '../src'; import { Expect, expectEnumValues, expectSchemaShape } from './utils.ts'; const integerSchema = type.keywords.number.integer.atLeast(CONSTANTS.INT32_MIN).atMost(CONSTANTS.INT32_MAX); const textSchema = type.string; test('table - select', (t) => { const table = pgTable('test', { id: serial().primaryKey(), name: text().notNull(), }); const result = createSelectSchema(table); const expected = type({ id: integerSchema, name: textSchema }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('table in schema - select', (tc) => { const schema = pgSchema('test'); const table = schema.table('test', { id: serial().primaryKey(), name: text().notNull(), }); const result = createSelectSchema(table); const expected = type({ id: integerSchema, name: textSchema }); expectSchemaShape(tc, expected).from(result); Expect>(); }); test('table - insert', (t) => { const table = pgTable('test', { id: integer().generatedAlwaysAsIdentity().primaryKey(), name: text().notNull(), age: integer(), }); const result = createInsertSchema(table); const expected = type({ name: textSchema, age: integerSchema.or(type.null).optional() }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('table - update', (t) => { const table = pgTable('test', { id: integer().generatedAlwaysAsIdentity().primaryKey(), name: text().notNull(), age: integer(), }); const result = createUpdateSchema(table); const expected = type({ name: textSchema.optional(), age: integerSchema.or(type.null).optional(), }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('view qb - select', (t) => { const table = pgTable('test', { id: serial().primaryKey(), name: text().notNull(), }); const view = pgView('test').as((qb) => qb.select({ id: table.id, age: sql``.as('age') }).from(table)); const result = createSelectSchema(view); const expected = type({ id: integerSchema, age: type('unknown.any') }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('view columns - select', (t) => { const view = pgView('test', { id: serial().primaryKey(), name: text().notNull(), }).as(sql``); const result = createSelectSchema(view); const expected = type({ id: integerSchema, name: textSchema }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('materialized view qb - select', (t) => { const table = pgTable('test', { id: serial().primaryKey(), name: text().notNull(), }); const view = pgMaterializedView('test').as((qb) => qb.select({ id: table.id, age: sql``.as('age') }).from(table)); const result = createSelectSchema(view); const expected = type({ id: integerSchema, age: type('unknown.any') }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('materialized view columns - select', (t) => { const view = pgView('test', { id: serial().primaryKey(), name: text().notNull(), }).as(sql``); const result = createSelectSchema(view); const expected = type({ id: integerSchema, name: textSchema }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('view with nested fields - select', (t) => { const table = pgTable('test', { id: serial().primaryKey(), name: text().notNull(), }); const view = pgMaterializedView('test').as((qb) => qb.select({ id: table.id, nested: { name: table.name, age: sql``.as('age'), }, table, }).from(table) ); const result = createSelectSchema(view); const expected = type({ id: integerSchema, nested: { name: textSchema, age: type('unknown.any') }, table: { id: integerSchema, name: textSchema }, }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('enum - select', (t) => { const enum_ = pgEnum('test', ['a', 'b', 'c']); const result = createSelectSchema(enum_); const expected = type.enumerated('a', 'b', 'c'); expectEnumValues(t, expected).from(result); Expect>(); }); test('nullability - select', (t) => { const table = pgTable('test', { c1: integer(), c2: integer().notNull(), c3: integer().default(1), c4: integer().notNull().default(1), }); const result = createSelectSchema(table); const expected = type({ c1: integerSchema.or(type.null), c2: integerSchema, c3: integerSchema.or(type.null), c4: integerSchema, }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('nullability - insert', (t) => { const table = pgTable('test', { c1: integer(), c2: integer().notNull(), c3: integer().default(1), c4: integer().notNull().default(1), c5: integer().generatedAlwaysAs(1), c6: integer().generatedAlwaysAsIdentity(), c7: integer().generatedByDefaultAsIdentity(), }); const result = createInsertSchema(table); const expected = type({ c1: integerSchema.or(type.null).optional(), c2: integerSchema, c3: integerSchema.or(type.null).optional(), c4: integerSchema.optional(), c7: integerSchema.optional(), }); expectSchemaShape(t, expected).from(result); }); test('nullability - update', (t) => { const table = pgTable('test', { c1: integer(), c2: integer().notNull(), c3: integer().default(1), c4: integer().notNull().default(1), c5: integer().generatedAlwaysAs(1), c6: integer().generatedAlwaysAsIdentity(), c7: integer().generatedByDefaultAsIdentity(), }); const result = createUpdateSchema(table); const expected = type({ c1: integerSchema.or(type.null).optional(), c2: integerSchema.optional(), c3: integerSchema.or(type.null).optional(), c4: integerSchema.optional(), c7: integerSchema.optional(), }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('refine table - select', (t) => { const table = pgTable('test', { c1: integer(), c2: integer().notNull(), c3: integer().notNull(), }); const result = createSelectSchema(table, { c2: (schema) => schema.atMost(1000), c3: type.string.pipe(Number), }); const expected = type({ c1: integerSchema.or(type.null), c2: integerSchema.atMost(1000), c3: type.string.pipe(Number), }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('refine table - select with custom data type', (t) => { const customText = customType({ dataType: () => 'text' }); const table = pgTable('test', { c1: integer(), c2: integer().notNull(), c3: integer().notNull(), c4: customText(), }); const customTextSchema = type.string.atLeastLength(1).atMostLength(100); const result = createSelectSchema(table, { c2: (schema) => schema.atMost(1000), c3: type.string.pipe(Number), c4: customTextSchema, }); const expected = type({ c1: integerSchema.or(type.null), c2: integerSchema.atMost(1000), c3: type.string.pipe(Number), c4: customTextSchema, }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('refine table - insert', (t) => { const table = pgTable('test', { c1: integer(), c2: integer().notNull(), c3: integer().notNull(), c4: integer().generatedAlwaysAs(1), }); const result = createInsertSchema(table, { c2: (schema) => schema.atMost(1000), c3: type.string.pipe(Number), }); const expected = type({ c1: integerSchema.or(type.null).optional(), c2: integerSchema.atMost(1000), c3: type.string.pipe(Number), }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('refine table - update', (t) => { const table = pgTable('test', { c1: integer(), c2: integer().notNull(), c3: integer().notNull(), c4: integer().generatedAlwaysAs(1), }); const result = createUpdateSchema(table, { c2: (schema) => schema.atMost(1000), c3: type.string.pipe(Number), }); const expected = type({ c1: integerSchema.or(type.null).optional(), c2: integerSchema.atMost(1000).optional(), c3: type.string.pipe(Number), }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('refine view - select', (t) => { const table = pgTable('test', { c1: integer(), c2: integer(), c3: integer(), c4: integer(), c5: integer(), c6: integer(), }); const view = pgView('test').as((qb) => qb.select({ c1: table.c1, c2: table.c2, c3: table.c3, nested: { c4: table.c4, c5: table.c5, c6: table.c6, }, table, }).from(table) ); const result = createSelectSchema(view, { c2: (schema) => schema.atMost(1000), c3: type.string.pipe(Number), nested: { c5: (schema) => schema.atMost(1000), c6: type.string.pipe(Number), }, table: { c2: (schema) => schema.atMost(1000), c3: type.string.pipe(Number), }, }); const expected = type({ c1: integerSchema.or(type.null), c2: integerSchema.atMost(1000).or(type.null), c3: type.string.pipe(Number), nested: type({ c4: integerSchema.or(type.null), c5: integerSchema.atMost(1000).or(type.null), c6: type.string.pipe(Number), }), table: type({ c1: integerSchema.or(type.null), c2: integerSchema.atMost(1000).or(type.null), c3: type.string.pipe(Number), c4: integerSchema.or(type.null), c5: integerSchema.or(type.null), c6: integerSchema.or(type.null), }), }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('all data types', (t) => { const table = pgTable('test', ({ bigint, bigserial, bit, boolean, date, char, cidr, doublePrecision, geometry, halfvec, inet, integer, interval, json, jsonb, line, macaddr, macaddr8, numeric, point, real, serial, smallint, smallserial, text, sparsevec, time, timestamp, uuid, varchar, vector, }) => ({ bigint1: bigint({ mode: 'number' }).notNull(), bigint2: bigint({ mode: 'bigint' }).notNull(), bigserial1: bigserial({ mode: 'number' }).notNull(), bigserial2: bigserial({ mode: 'bigint' }).notNull(), bit: bit({ dimensions: 5 }).notNull(), boolean: boolean().notNull(), date1: date({ mode: 'date' }).notNull(), date2: date({ mode: 'string' }).notNull(), char1: char({ length: 10 }).notNull(), char2: char({ length: 1, enum: ['a', 'b', 'c'] }).notNull(), cidr: cidr().notNull(), doublePrecision: doublePrecision().notNull(), geometry1: geometry({ type: 'point', mode: 'tuple' }).notNull(), geometry2: geometry({ type: 'point', mode: 'xy' }).notNull(), halfvec: halfvec({ dimensions: 3 }).notNull(), inet: inet().notNull(), integer: integer().notNull(), interval: interval().notNull(), json: json().notNull(), jsonb: jsonb().notNull(), line1: line({ mode: 'abc' }).notNull(), line2: line({ mode: 'tuple' }).notNull(), macaddr: macaddr().notNull(), macaddr8: macaddr8().notNull(), numeric: numeric().notNull(), point1: point({ mode: 'xy' }).notNull(), point2: point({ mode: 'tuple' }).notNull(), real: real().notNull(), serial: serial().notNull(), smallint: smallint().notNull(), smallserial: smallserial().notNull(), text1: text().notNull(), text2: text({ enum: ['a', 'b', 'c'] }).notNull(), sparsevec: sparsevec({ dimensions: 3 }).notNull(), time: time().notNull(), timestamp1: timestamp({ mode: 'date' }).notNull(), timestamp2: timestamp({ mode: 'string' }).notNull(), uuid: uuid().notNull(), varchar1: varchar({ length: 10 }).notNull(), varchar2: varchar({ length: 1, enum: ['a', 'b', 'c'] }).notNull(), vector: vector({ dimensions: 3 }).notNull(), array1: integer().array().notNull(), array2: integer().array().array(2).notNull(), array3: varchar({ length: 10 }).array().array(2).notNull(), })); const result = createSelectSchema(table); const expected = type({ bigint1: type.keywords.number.integer.atLeast(Number.MIN_SAFE_INTEGER).atMost(Number.MAX_SAFE_INTEGER), bigint2: type.bigint.narrow(bigintNarrow), bigserial1: type.keywords.number.integer.atLeast(Number.MIN_SAFE_INTEGER).atMost(Number.MAX_SAFE_INTEGER), bigserial2: type.bigint.narrow(bigintNarrow), bit: type(/^[01]{5}$/).describe('a string containing ones or zeros while being 5 characters long'), boolean: type.boolean, date1: type.Date, date2: type.string, char1: type.string.exactlyLength(10), char2: type.enumerated('a', 'b', 'c'), cidr: type.string, doublePrecision: type.number.atLeast(CONSTANTS.INT48_MIN).atMost(CONSTANTS.INT48_MAX), geometry1: type([type.number, type.number]), geometry2: type({ x: type.number, y: type.number }), halfvec: type.number.array().exactlyLength(3), inet: type.string, integer: type.keywords.number.integer.atLeast(CONSTANTS.INT32_MIN).atMost(CONSTANTS.INT32_MAX), interval: type.string, json: jsonSchema, jsonb: jsonSchema, line1: type({ a: type.number, b: type.number, c: type.number }), line2: type([type.number, type.number, type.number]), macaddr: type.string, macaddr8: type.string, numeric: type.string, point1: type({ x: type.number, y: type.number }), point2: type([type.number, type.number]), real: type.number.atLeast(CONSTANTS.INT24_MIN).atMost(CONSTANTS.INT24_MAX), serial: type.keywords.number.integer.atLeast(CONSTANTS.INT32_MIN).atMost(CONSTANTS.INT32_MAX), smallint: type.keywords.number.integer.atLeast(CONSTANTS.INT16_MIN).atMost(CONSTANTS.INT16_MAX), smallserial: type.keywords.number.integer.atLeast(CONSTANTS.INT16_MIN).atMost(CONSTANTS.INT16_MAX), text1: type.string, text2: type.enumerated('a', 'b', 'c'), sparsevec: type.string, time: type.string, timestamp1: type.Date, timestamp2: type.string, uuid: type(/^[\da-f]{8}(?:-[\da-f]{4}){3}-[\da-f]{12}$/iu).describe('a RFC-4122-compliant UUID'), varchar1: type.string.atMostLength(10), varchar2: type.enumerated('a', 'b', 'c'), vector: type.number.array().exactlyLength(3), array1: integerSchema.array(), array2: integerSchema.array().array().exactlyLength(2), array3: type.string.atMostLength(10).array().array().exactlyLength(2), }); expectSchemaShape(t, expected).from(result); Expect>(); }); /* Infinitely recursive type */ { const TopLevelCondition: Type = type('unknown.any') as any; const table = pgTable('test', { json: json().$type().notNull(), jsonb: jsonb().$type(), }); const result = createSelectSchema(table); const expected = type({ json: TopLevelCondition, jsonb: TopLevelCondition.or(type.null), }); Expect, type.infer>>(); } /* Disallow unknown keys in table refinement - select */ { const table = pgTable('test', { id: integer() }); // @ts-expect-error createSelectSchema(table, { unknown: type.string }); } /* Disallow unknown keys in table refinement - insert */ { const table = pgTable('test', { id: integer() }); // @ts-expect-error createInsertSchema(table, { unknown: type.string }); } /* Disallow unknown keys in table refinement - update */ { const table = pgTable('test', { id: integer() }); // @ts-expect-error createUpdateSchema(table, { unknown: type.string }); } /* Disallow unknown keys in view qb - select */ { const table = pgTable('test', { id: integer() }); const view = pgView('test').as((qb) => qb.select().from(table)); const mView = pgMaterializedView('test').as((qb) => qb.select().from(table)); const nestedSelect = pgView('test').as((qb) => qb.select({ table }).from(table)); // @ts-expect-error createSelectSchema(view, { unknown: type.string }); // @ts-expect-error createSelectSchema(mView, { unknown: type.string }); // @ts-expect-error createSelectSchema(nestedSelect, { table: { unknown: type.string } }); } /* Disallow unknown keys in view columns - select */ { const view = pgView('test', { id: integer() }).as(sql``); const mView = pgView('test', { id: integer() }).as(sql``); // @ts-expect-error createSelectSchema(view, { unknown: type.string }); // @ts-expect-error createSelectSchema(mView, { unknown: type.string }); } ================================================ FILE: drizzle-arktype/tests/singlestore.test.ts ================================================ import { Type, type } from 'arktype'; import { type Equal } from 'drizzle-orm'; import { customType, int, json, serial, singlestoreSchema, singlestoreTable, text } from 'drizzle-orm/singlestore-core'; import type { TopLevelCondition } from 'json-rules-engine'; import { test } from 'vitest'; import { bigintNarrow, jsonSchema, unsignedBigintNarrow } from '~/column.ts'; import { CONSTANTS } from '~/constants.ts'; import { createInsertSchema, createSelectSchema, createUpdateSchema } from '../src'; import { Expect, expectSchemaShape } from './utils.ts'; const intSchema = type.keywords.number.integer.atLeast(CONSTANTS.INT32_MIN).atMost(CONSTANTS.INT32_MAX); const serialNumberModeSchema = type.keywords.number.integer.atLeast(0).atMost(Number.MAX_SAFE_INTEGER); const textSchema = type.string.atMostLength(CONSTANTS.INT16_UNSIGNED_MAX); test('table - select', (t) => { const table = singlestoreTable('test', { id: serial().primaryKey(), name: text().notNull(), }); const result = createSelectSchema(table); const expected = type({ id: serialNumberModeSchema, name: textSchema }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('table in schema - select', (tc) => { const schema = singlestoreSchema('test'); const table = schema.table('test', { id: serial().primaryKey(), name: text().notNull(), }); const result = createSelectSchema(table); const expected = type({ id: serialNumberModeSchema, name: textSchema }); expectSchemaShape(tc, expected).from(result); Expect>(); }); test('table - insert', (t) => { const table = singlestoreTable('test', { id: serial().primaryKey(), name: text().notNull(), age: int(), }); const result = createInsertSchema(table); const expected = type({ id: serialNumberModeSchema.optional(), name: textSchema, age: intSchema.or(type.null).optional(), }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('table - update', (t) => { const table = singlestoreTable('test', { id: serial().primaryKey(), name: text().notNull(), age: int(), }); const result = createUpdateSchema(table); const expected = type({ id: serialNumberModeSchema.optional(), name: textSchema.optional(), age: intSchema.or(type.null).optional(), }); expectSchemaShape(t, expected).from(result); Expect>(); }); // TODO: SingleStore doesn't support views yet. Add these tests when they're added // test('view qb - select', (t) => { // const table = singlestoreTable('test', { // id: serial().primaryKey(), // name: text().notNull(), // }); // const view = mysqlView('test').as((qb) => qb.select({ id: table.id, age: sql``.as('age') }).from(table)); // const result = createSelectSchema(view); // const expected = v.object({ id: serialNumberModeSchema, age: v.any() }); // expectSchemaShape(t, expected).from(result); // Expect>(); // }); // test('view columns - select', (t) => { // const view = mysqlView('test', { // id: serial().primaryKey(), // name: text().notNull(), // }).as(sql``); // const result = createSelectSchema(view); // const expected = v.object({ id: serialNumberModeSchema, name: textSchema }); // expectSchemaShape(t, expected).from(result); // Expect>(); // }); // test('view with nested fields - select', (t) => { // const table = singlestoreTable('test', { // id: serial().primaryKey(), // name: text().notNull(), // }); // const view = mysqlView('test').as((qb) => // qb.select({ // id: table.id, // nested: { // name: table.name, // age: sql``.as('age'), // }, // table, // }).from(table) // ); // const result = createSelectSchema(view); // const expected = v.object({ // id: serialNumberModeSchema, // nested: v.object({ name: textSchema, age: v.any() }), // table: v.object({ id: serialNumberModeSchema, name: textSchema }), // }); // expectSchemaShape(t, expected).from(result); // Expect>(); // }); test('nullability - select', (t) => { const table = singlestoreTable('test', { c1: int(), c2: int().notNull(), c3: int().default(1), c4: int().notNull().default(1), }); const result = createSelectSchema(table); const expected = type({ c1: intSchema.or(type.null), c2: intSchema, c3: intSchema.or(type.null), c4: intSchema, }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('nullability - insert', (t) => { const table = singlestoreTable('test', { c1: int(), c2: int().notNull(), c3: int().default(1), c4: int().notNull().default(1), c5: int().generatedAlwaysAs(1), }); const result = createInsertSchema(table); const expected = type({ c1: intSchema.or(type.null).optional(), c2: intSchema, c3: intSchema.or(type.null).optional(), c4: intSchema.optional(), }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('nullability - update', (t) => { const table = singlestoreTable('test', { c1: int(), c2: int().notNull(), c3: int().default(1), c4: int().notNull().default(1), c5: int().generatedAlwaysAs(1), }); const result = createUpdateSchema(table); const expected = type({ c1: intSchema.or(type.null).optional(), c2: intSchema.optional(), c3: intSchema.or(type.null).optional(), c4: intSchema.optional(), }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('refine table - select', (t) => { const table = singlestoreTable('test', { c1: int(), c2: int().notNull(), c3: int().notNull(), }); const result = createSelectSchema(table, { c2: (schema) => schema.atMost(1000), c3: type.string.pipe(Number), }); const expected = type({ c1: intSchema.or(type.null), c2: intSchema.atMost(1000), c3: type.string.pipe(Number), }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('refine table - select with custom data type', (t) => { const customText = customType({ dataType: () => 'text' }); const table = singlestoreTable('test', { c1: int(), c2: int().notNull(), c3: int().notNull(), c4: customText(), }); const customTextSchema = type.string.atLeastLength(1).atMostLength(100); const result = createSelectSchema(table, { c2: (schema) => schema.atMost(1000), c3: type.string.pipe(Number), c4: customTextSchema, }); const expected = type({ c1: intSchema.or(type.null), c2: intSchema.atMost(1000), c3: type.string.pipe(Number), c4: customTextSchema, }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('refine table - insert', (t) => { const table = singlestoreTable('test', { c1: int(), c2: int().notNull(), c3: int().notNull(), c4: int().generatedAlwaysAs(1), }); const result = createInsertSchema(table, { c2: (schema) => schema.atMost(1000), c3: type.string.pipe(Number), }); const expected = type({ c1: intSchema.or(type.null).optional(), c2: intSchema.atMost(1000), c3: type.string.pipe(Number), }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('refine table - update', (t) => { const table = singlestoreTable('test', { c1: int(), c2: int().notNull(), c3: int().notNull(), c4: int().generatedAlwaysAs(1), }); const result = createUpdateSchema(table, { c2: (schema) => schema.atMost(1000), c3: type.string.pipe(Number), }); const expected = type({ c1: intSchema.or(type.null).optional(), c2: intSchema.atMost(1000).optional(), c3: type.string.pipe(Number), }); expectSchemaShape(t, expected).from(result); Expect>(); }); // test('refine view - select', (t) => { // const table = singlestoreTable('test', { // c1: int(), // c2: int(), // c3: int(), // c4: int(), // c5: int(), // c6: int(), // }); // const view = mysqlView('test').as((qb) => // qb.select({ // c1: table.c1, // c2: table.c2, // c3: table.c3, // nested: { // c4: table.c4, // c5: table.c5, // c6: table.c6, // }, // table, // }).from(table) // ); // const result = createSelectSchema(view, { // c2: (schema) => v.pipe(schema, v.maxValue(1000)), // c3: v.pipe(type.string, v.transform(Number)), // nested: { // c5: (schema) => v.pipe(schema, v.maxValue(1000)), // c6: v.pipe(type.string, v.transform(Number)), // }, // table: { // c2: (schema) => v.pipe(schema, v.maxValue(1000)), // c3: v.pipe(type.string, v.transform(Number)), // }, // }); // const expected = v.object({ // c1: v.nullable(intSchema), // c2: v.nullable(v.pipe(intSchema, v.maxValue(1000))), // c3: v.pipe(type.string, v.transform(Number)), // nested: v.object({ // c4: v.nullable(intSchema), // c5: v.nullable(v.pipe(intSchema, v.maxValue(1000))), // c6: v.pipe(type.string, v.transform(Number)), // }), // table: v.object({ // c1: v.nullable(intSchema), // c2: v.nullable(v.pipe(intSchema, v.maxValue(1000))), // c3: v.pipe(type.string, v.transform(Number)), // c4: v.nullable(intSchema), // c5: v.nullable(intSchema), // c6: v.nullable(intSchema), // }), // }); // expectSchemaShape(t, expected).from(result); // Expect>(); // }); test('all data types', (t) => { const table = singlestoreTable('test', ({ bigint, binary, boolean, char, date, datetime, decimal, double, float, int, json, mediumint, singlestoreEnum, real, serial, smallint, text, time, timestamp, tinyint, varchar, varbinary, year, longtext, mediumtext, tinytext, }) => ({ bigint1: bigint({ mode: 'number' }).notNull(), bigint2: bigint({ mode: 'bigint' }).notNull(), bigint3: bigint({ unsigned: true, mode: 'number' }).notNull(), bigint4: bigint({ unsigned: true, mode: 'bigint' }).notNull(), binary: binary({ length: 10 }).notNull(), boolean: boolean().notNull(), char1: char({ length: 10 }).notNull(), char2: char({ length: 1, enum: ['a', 'b', 'c'] }).notNull(), date1: date({ mode: 'date' }).notNull(), date2: date({ mode: 'string' }).notNull(), datetime1: datetime({ mode: 'date' }).notNull(), datetime2: datetime({ mode: 'string' }).notNull(), decimal1: decimal().notNull(), decimal2: decimal({ unsigned: true }).notNull(), double1: double().notNull(), double2: double({ unsigned: true }).notNull(), float1: float().notNull(), float2: float({ unsigned: true }).notNull(), int1: int().notNull(), int2: int({ unsigned: true }).notNull(), json: json().notNull(), mediumint1: mediumint().notNull(), mediumint2: mediumint({ unsigned: true }).notNull(), enum: singlestoreEnum('enum', ['a', 'b', 'c']).notNull(), real: real().notNull(), serial: serial().notNull(), smallint1: smallint().notNull(), smallint2: smallint({ unsigned: true }).notNull(), text1: text().notNull(), text2: text({ enum: ['a', 'b', 'c'] }).notNull(), time: time().notNull(), timestamp1: timestamp({ mode: 'date' }).notNull(), timestamp2: timestamp({ mode: 'string' }).notNull(), tinyint1: tinyint().notNull(), tinyint2: tinyint({ unsigned: true }).notNull(), varchar1: varchar({ length: 10 }).notNull(), varchar2: varchar({ length: 1, enum: ['a', 'b', 'c'] }).notNull(), varbinary: varbinary({ length: 10 }).notNull(), year: year().notNull(), longtext1: longtext().notNull(), longtext2: longtext({ enum: ['a', 'b', 'c'] }).notNull(), mediumtext1: mediumtext().notNull(), mediumtext2: mediumtext({ enum: ['a', 'b', 'c'] }).notNull(), tinytext1: tinytext().notNull(), tinytext2: tinytext({ enum: ['a', 'b', 'c'] }).notNull(), })); const result = createSelectSchema(table); const expected = type({ bigint1: type.keywords.number.integer.atLeast(Number.MIN_SAFE_INTEGER).atMost(Number.MAX_SAFE_INTEGER), bigint2: type.bigint.narrow(bigintNarrow), bigint3: type.keywords.number.integer.atLeast(0).atMost(Number.MAX_SAFE_INTEGER), bigint4: type.bigint.narrow(unsignedBigintNarrow), binary: type.string, boolean: type.boolean, char1: type.string.exactlyLength(10), char2: type.enumerated('a', 'b', 'c'), date1: type.Date, date2: type.string, datetime1: type.Date, datetime2: type.string, decimal1: type.string, decimal2: type.string, double1: type.number.atLeast(CONSTANTS.INT48_MIN).atMost(CONSTANTS.INT48_MAX), double2: type.number.atLeast(0).atMost(CONSTANTS.INT48_UNSIGNED_MAX), float1: type.number.atLeast(CONSTANTS.INT24_MIN).atMost(CONSTANTS.INT24_MAX), float2: type.number.atLeast(0).atMost(CONSTANTS.INT24_UNSIGNED_MAX), int1: type.keywords.number.integer.atLeast(CONSTANTS.INT32_MIN).atMost(CONSTANTS.INT32_MAX), int2: type.keywords.number.integer.atLeast(0).atMost(CONSTANTS.INT32_UNSIGNED_MAX), json: jsonSchema, mediumint1: type.keywords.number.integer.atLeast(CONSTANTS.INT24_MIN).atMost(CONSTANTS.INT24_MAX), mediumint2: type.keywords.number.integer.atLeast(0).atMost(CONSTANTS.INT24_UNSIGNED_MAX), enum: type.enumerated('a', 'b', 'c'), real: type.number.atLeast(CONSTANTS.INT48_MIN).atMost(CONSTANTS.INT48_MAX), serial: type.keywords.number.integer.atLeast(0).atMost(Number.MAX_SAFE_INTEGER), smallint1: type.keywords.number.integer.atLeast(CONSTANTS.INT16_MIN).atMost(CONSTANTS.INT16_MAX), smallint2: type.keywords.number.integer.atLeast(0).atMost(CONSTANTS.INT16_UNSIGNED_MAX), text1: type.string.atMostLength(CONSTANTS.INT16_UNSIGNED_MAX), text2: type.enumerated('a', 'b', 'c'), time: type.string, timestamp1: type.Date, timestamp2: type.string, tinyint1: type.keywords.number.integer.atLeast(CONSTANTS.INT8_MIN).atMost(CONSTANTS.INT8_MAX), tinyint2: type.keywords.number.integer.atLeast(0).atMost(CONSTANTS.INT8_UNSIGNED_MAX), varchar1: type.string.atMostLength(10), varchar2: type.enumerated('a', 'b', 'c'), varbinary: type.string, year: type.keywords.number.integer.atLeast(1901).atMost(2155), longtext1: type.string.atMostLength(CONSTANTS.INT32_UNSIGNED_MAX), longtext2: type.enumerated('a', 'b', 'c'), mediumtext1: type.string.atMostLength(CONSTANTS.INT24_UNSIGNED_MAX), mediumtext2: type.enumerated('a', 'b', 'c'), tinytext1: type.string.atMostLength(CONSTANTS.INT8_UNSIGNED_MAX), tinytext2: type.enumerated('a', 'b', 'c'), }); expectSchemaShape(t, expected).from(result); Expect>(); }); /* Infinitely recursive type */ { const TopLevelCondition: Type = type('unknown.any') as any; const table = singlestoreTable('test', { json: json().$type(), }); const result = createSelectSchema(table); const expected = type({ json: TopLevelCondition.or(type.null), }); Expect, type.infer>>(); } /* Disallow unknown keys in table refinement - select */ { const table = singlestoreTable('test', { id: int() }); // @ts-expect-error createSelectSchema(table, { unknown: type.string }); } /* Disallow unknown keys in table refinement - insert */ { const table = singlestoreTable('test', { id: int() }); // @ts-expect-error createInsertSchema(table, { unknown: type.string }); } /* Disallow unknown keys in table refinement - update */ { const table = singlestoreTable('test', { id: int() }); // @ts-expect-error createUpdateSchema(table, { unknown: type.string }); } // /* Disallow unknown keys in view qb - select */ { // const table = singlestoreTable('test', { id: int() }); // const view = mysqlView('test').as((qb) => qb.select().from(table)); // const nestedSelect = mysqlView('test').as((qb) => qb.select({ table }).from(table)); // // @ts-expect-error // createSelectSchema(view, { unknown: type.string }); // // @ts-expect-error // createSelectSchema(nestedSelect, { table: { unknown: type.string } }); // } // /* Disallow unknown keys in view columns - select */ { // const view = mysqlView('test', { id: int() }).as(sql``); // // @ts-expect-error // createSelectSchema(view, { unknown: type.string }); // } ================================================ FILE: drizzle-arktype/tests/sqlite.test.ts ================================================ import { Type, type } from 'arktype'; import { type Equal, sql } from 'drizzle-orm'; import { blob, customType, int, sqliteTable, sqliteView, text } from 'drizzle-orm/sqlite-core'; import type { TopLevelCondition } from 'json-rules-engine'; import { test } from 'vitest'; import { bigintNarrow, bufferSchema, jsonSchema } from '~/column.ts'; import { CONSTANTS } from '~/constants.ts'; import { createInsertSchema, createSelectSchema, createUpdateSchema } from '../src'; import { Expect, expectSchemaShape } from './utils.ts'; const intSchema = type.keywords.number.integer.atLeast(Number.MIN_SAFE_INTEGER).atMost(Number.MAX_SAFE_INTEGER); const textSchema = type.string; test('table - select', (t) => { const table = sqliteTable('test', { id: int().primaryKey({ autoIncrement: true }), name: text().notNull(), }); const result = createSelectSchema(table); const expected = type({ id: intSchema, name: textSchema }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('table - insert', (t) => { const table = sqliteTable('test', { id: int().primaryKey({ autoIncrement: true }), name: text().notNull(), age: int(), }); const result = createInsertSchema(table); const expected = type({ id: intSchema.optional(), name: textSchema, age: intSchema.or(type.null).optional() }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('table - update', (t) => { const table = sqliteTable('test', { id: int().primaryKey({ autoIncrement: true }), name: text().notNull(), age: int(), }); const result = createUpdateSchema(table); const expected = type({ id: intSchema.optional(), name: textSchema.optional(), age: intSchema.or(type.null).optional(), }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('view qb - select', (t) => { const table = sqliteTable('test', { id: int().primaryKey({ autoIncrement: true }), name: text().notNull(), }); const view = sqliteView('test').as((qb) => qb.select({ id: table.id, age: sql``.as('age') }).from(table)); const result = createSelectSchema(view); const expected = type({ id: intSchema, age: type('unknown.any') }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('view columns - select', (t) => { const view = sqliteView('test', { id: int().primaryKey({ autoIncrement: true }), name: text().notNull(), }).as(sql``); const result = createSelectSchema(view); const expected = type({ id: intSchema, name: textSchema }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('view with nested fields - select', (t) => { const table = sqliteTable('test', { id: int().primaryKey({ autoIncrement: true }), name: text().notNull(), }); const view = sqliteView('test').as((qb) => qb.select({ id: table.id, nested: { name: table.name, age: sql``.as('age'), }, table, }).from(table) ); const result = createSelectSchema(view); const expected = type({ id: intSchema, nested: type({ name: textSchema, age: type('unknown.any') }), table: type({ id: intSchema, name: textSchema }), }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('nullability - select', (t) => { const table = sqliteTable('test', { c1: int(), c2: int().notNull(), c3: int().default(1), c4: int().notNull().default(1), }); const result = createSelectSchema(table); const expected = type({ c1: intSchema.or(type.null), c2: intSchema, c3: intSchema.or(type.null), c4: intSchema, }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('nullability - insert', (t) => { const table = sqliteTable('test', { c1: int(), c2: int().notNull(), c3: int().default(1), c4: int().notNull().default(1), c5: int().generatedAlwaysAs(1), }); const result = createInsertSchema(table); const expected = type({ c1: intSchema.or(type.null).optional(), c2: intSchema, c3: intSchema.or(type.null).optional(), c4: intSchema.optional(), }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('nullability - update', (t) => { const table = sqliteTable('test', { c1: int(), c2: int().notNull(), c3: int().default(1), c4: int().notNull().default(1), c5: int().generatedAlwaysAs(1), }); const result = createUpdateSchema(table); const expected = type({ c1: intSchema.or(type.null).optional(), c2: intSchema.optional(), c3: intSchema.or(type.null).optional(), c4: intSchema.optional(), }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('refine table - select', (t) => { const table = sqliteTable('test', { c1: int(), c2: int().notNull(), c3: int().notNull(), }); const result = createSelectSchema(table, { c2: (schema) => schema.atMost(1000), c3: type.string.pipe(Number), }); const expected = type({ c1: intSchema.or(type.null), c2: intSchema.atMost(1000), c3: type.string.pipe(Number), }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('refine table - select with custom data type', (t) => { const customText = customType({ dataType: () => 'text' }); const table = sqliteTable('test', { c1: int(), c2: int().notNull(), c3: int().notNull(), c4: customText(), }); const customTextSchema = type.string.atLeastLength(1).atMostLength(100); const result = createSelectSchema(table, { c2: (schema) => schema.atMost(1000), c3: type.string.pipe(Number), c4: customTextSchema, }); const expected = type({ c1: intSchema.or(type.null), c2: intSchema.atMost(1000), c3: type.string.pipe(Number), c4: customTextSchema, }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('refine table - insert', (t) => { const table = sqliteTable('test', { c1: int(), c2: int().notNull(), c3: int().notNull(), c4: int().generatedAlwaysAs(1), }); const result = createInsertSchema(table, { c2: (schema) => schema.atMost(1000), c3: type.string.pipe(Number), }); const expected = type({ c1: intSchema.or(type.null).optional(), c2: intSchema.atMost(1000), c3: type.string.pipe(Number), }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('refine table - update', (t) => { const table = sqliteTable('test', { c1: int(), c2: int().notNull(), c3: int().notNull(), c4: int().generatedAlwaysAs(1), }); const result = createUpdateSchema(table, { c2: (schema) => schema.atMost(1000), c3: type.string.pipe(Number), }); const expected = type({ c1: intSchema.or(type.null).optional(), c2: intSchema.atMost(1000).optional(), c3: type.string.pipe(Number), }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('refine view - select', (t) => { const table = sqliteTable('test', { c1: int(), c2: int(), c3: int(), c4: int(), c5: int(), c6: int(), }); const view = sqliteView('test').as((qb) => qb.select({ c1: table.c1, c2: table.c2, c3: table.c3, nested: { c4: table.c4, c5: table.c5, c6: table.c6, }, table, }).from(table) ); const result = createSelectSchema(view, { c2: (schema) => schema.atMost(1000), c3: type.string.pipe(Number), nested: { c5: (schema) => schema.atMost(1000), c6: type.string.pipe(Number), }, table: { c2: (schema) => schema.atMost(1000), c3: type.string.pipe(Number), }, }); const expected = type({ c1: intSchema.or(type.null), c2: intSchema.atMost(1000).or(type.null), c3: type.string.pipe(Number), nested: type({ c4: intSchema.or(type.null), c5: intSchema.atMost(1000).or(type.null), c6: type.string.pipe(Number), }), table: type({ c1: intSchema.or(type.null), c2: intSchema.atMost(1000).or(type.null), c3: type.string.pipe(Number), c4: intSchema.or(type.null), c5: intSchema.or(type.null), c6: intSchema.or(type.null), }), }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('all data types', (t) => { const table = sqliteTable('test', ({ blob, integer, numeric, real, text, }) => ({ blob1: blob({ mode: 'buffer' }).notNull(), blob2: blob({ mode: 'bigint' }).notNull(), blob3: blob({ mode: 'json' }).notNull(), integer1: integer({ mode: 'number' }).notNull(), integer2: integer({ mode: 'boolean' }).notNull(), integer3: integer({ mode: 'timestamp' }).notNull(), integer4: integer({ mode: 'timestamp_ms' }).notNull(), numeric: numeric().notNull(), real: real().notNull(), text1: text({ mode: 'text' }).notNull(), text2: text({ mode: 'text', length: 10 }).notNull(), text3: text({ mode: 'text', enum: ['a', 'b', 'c'] }).notNull(), text4: text({ mode: 'json' }).notNull(), })); const result = createSelectSchema(table); const expected = type({ blob1: bufferSchema, blob2: type.bigint.narrow(bigintNarrow), blob3: jsonSchema, integer1: type.keywords.number.integer.atLeast(Number.MIN_SAFE_INTEGER).atMost(Number.MAX_SAFE_INTEGER), integer2: type.boolean, integer3: type.Date, integer4: type.Date, numeric: type.string, real: type.number.atLeast(CONSTANTS.INT48_MIN).atMost(CONSTANTS.INT48_MAX), text1: type.string, text2: type.string.atMostLength(10), text3: type.enumerated('a', 'b', 'c'), text4: jsonSchema, }); expectSchemaShape(t, expected).from(result); Expect>(); }); /* Infinitely recursive type */ { const TopLevelCondition: Type = type('unknown.any') as any; const table = sqliteTable('test', { json1: text({ mode: 'json' }).$type().notNull(), json2: blob({ mode: 'json' }).$type(), }); const result = createSelectSchema(table); const expected = type({ json1: TopLevelCondition, json2: TopLevelCondition.or(type.null), }); Expect, type.infer>>(); } /* Disallow unknown keys in table refinement - select */ { const table = sqliteTable('test', { id: int() }); // @ts-expect-error createSelectSchema(table, { unknown: type.string }); } /* Disallow unknown keys in table refinement - insert */ { const table = sqliteTable('test', { id: int() }); // @ts-expect-error createInsertSchema(table, { unknown: type.string }); } /* Disallow unknown keys in table refinement - update */ { const table = sqliteTable('test', { id: int() }); // @ts-expect-error createUpdateSchema(table, { unknown: type.string }); } /* Disallow unknown keys in view qb - select */ { const table = sqliteTable('test', { id: int() }); const view = sqliteView('test').as((qb) => qb.select().from(table)); const nestedSelect = sqliteView('test').as((qb) => qb.select({ table }).from(table)); // @ts-expect-error createSelectSchema(view, { unknown: type.string }); // @ts-expect-error createSelectSchema(nestedSelect, { table: { unknown: type.string } }); } /* Disallow unknown keys in view columns - select */ { const view = sqliteView('test', { id: int() }).as(sql``); // @ts-expect-error createSelectSchema(view, { unknown: type.string }); } ================================================ FILE: drizzle-arktype/tests/tsconfig.json ================================================ { "extends": "../tsconfig.json", "compilerOptions": { "module": "esnext", "target": "esnext", "noEmit": true, "rootDir": "..", "outDir": "./.cache" }, "include": [".", "../src"] } ================================================ FILE: drizzle-arktype/tests/utils.ts ================================================ import { Type } from 'arktype'; import { expect, type TaskContext } from 'vitest'; export function expectSchemaShape>(t: TaskContext, expected: T) { return { from(actual: T) { expect(actual.json).toStrictEqual(expected.json); expect(actual.expression).toStrictEqual(expected.expression); }, }; } export const expectEnumValues = expectSchemaShape; export function Expect<_ extends true>() {} ================================================ FILE: drizzle-arktype/tsconfig.build.json ================================================ { "extends": "./tsconfig.json", "compilerOptions": { "rootDir": "src" }, "include": ["src"] } ================================================ FILE: drizzle-arktype/tsconfig.json ================================================ { "extends": "../tsconfig.json", "compilerOptions": { "outDir": "dist", "baseUrl": ".", "declaration": true, "noEmit": true, "paths": { "~/*": ["src/*"] } }, "include": ["src", "*.ts", "benchmarks"] } ================================================ FILE: drizzle-arktype/vitest.config.ts ================================================ import tsconfigPaths from 'vite-tsconfig-paths'; import { defineConfig } from 'vitest/config'; export default defineConfig({ test: { include: [ 'tests/**/*.test.ts', ], exclude: [ 'tests/bun/**/*', ], typecheck: { tsconfig: 'tsconfig.json', }, testTimeout: 100000, hookTimeout: 100000, isolate: false, poolOptions: { threads: { singleThread: true, }, }, }, plugins: [tsconfigPaths()], }); ================================================ FILE: drizzle-kit/.gitignore ================================================ /* **/.DS_Store !src !imports-checker !tests !vitest.config.ts !README.md !CONTRIBUTING.md !schema.ts !.eslint !.gitignore !package.json !tsconfig.json !tsconfig.cli-types.json !tsconfig.build.json !pnpm-lock.yaml !.github !build.ts !build.dev.ts tests/test.ts !patches ================================================ FILE: drizzle-kit/README.md ================================================ ## Drizzle Kit Drizzle Kit is a CLI migrator tool for Drizzle ORM. It is probably the one and only tool that lets you completely automatically generate SQL migrations and covers ~95% of the common cases like deletions and renames by prompting user input. - is a mirror repository for issues. ## Documentation Check the full documentation on [the website](https://orm.drizzle.team/kit-docs/overview). ### How it works Drizzle Kit traverses a schema module and generates a snapshot to compare with the previous version, if there is one. Based on the difference, it will generate all needed SQL migrations. If there are any cases that can't be resolved automatically, such as renames, it will prompt the user for input. For example, for this schema module: ```typescript // src/db/schema.ts import { integer, pgTable, serial, text, varchar } from "drizzle-orm/pg-core"; const users = pgTable("users", { id: serial("id").primaryKey(), fullName: varchar("full_name", { length: 256 }), }, (table) => ({ nameIdx: index("name_idx", table.fullName), }) ); export const authOtp = pgTable("auth_otp", { id: serial("id").primaryKey(), phone: varchar("phone", { length: 256 }), userId: integer("user_id").references(() => users.id), }); ``` It will generate: ```SQL CREATE TABLE IF NOT EXISTS auth_otp ( "id" SERIAL PRIMARY KEY, "phone" character varying(256), "user_id" INT ); CREATE TABLE IF NOT EXISTS users ( "id" SERIAL PRIMARY KEY, "full_name" character varying(256) ); DO $$ BEGIN ALTER TABLE auth_otp ADD CONSTRAINT auth_otp_user_id_fkey FOREIGN KEY ("user_id") REFERENCES users(id); EXCEPTION WHEN duplicate_object THEN null; END $$; CREATE INDEX IF NOT EXISTS users_full_name_index ON users (full_name); ``` ### Installation & configuration ```shell npm install -D drizzle-kit ``` Running with CLI options: ```jsonc // package.json { "scripts": { "generate": "drizzle-kit generate --out migrations-folder --schema src/db/schema.ts" } } ``` ```shell npm run generate ``` ================================================ FILE: drizzle-kit/build.dev.ts ================================================ import * as esbuild from 'esbuild'; import { cpSync } from 'node:fs'; const driversPackages = [ // postgres drivers 'pg', 'postgres', '@vercel/postgres', '@neondatabase/serverless', // mysql drivers 'mysql2', '@planetscale/database', // sqlite drivers '@libsql/client', 'better-sqlite3', ]; esbuild.buildSync({ entryPoints: ['./src/utils.ts'], bundle: true, outfile: 'dist/utils.js', format: 'cjs', target: 'node16', platform: 'node', external: ['drizzle-orm', 'esbuild', ...driversPackages], banner: { js: `#!/usr/bin/env -S node --loader @esbuild-kit/esm-loader --no-warnings`, }, }); esbuild.buildSync({ entryPoints: ['./src/cli/index.ts'], bundle: true, outfile: 'dist/index.cjs', format: 'cjs', target: 'node16', platform: 'node', external: [ 'commander', 'json-diff', 'glob', 'esbuild', 'drizzle-orm', ...driversPackages, ], banner: { js: `#!/usr/bin/env -S node --loader ./dist/loader.mjs --no-warnings`, }, }); cpSync('./src/loader.mjs', 'dist/loader.mjs'); ================================================ FILE: drizzle-kit/build.ts ================================================ /// import * as esbuild from 'esbuild'; import { readFileSync, writeFileSync } from 'node:fs'; import * as tsup from 'tsup'; import pkg from './package.json'; const driversPackages = [ // postgres drivers 'pg', 'postgres', '@vercel/postgres', '@neondatabase/serverless', '@electric-sql/pglite', // mysql drivers 'mysql2', '@planetscale/database', // sqlite drivers '@libsql/client', 'better-sqlite3', 'bun:sqlite', ]; esbuild.buildSync({ entryPoints: ['./src/utils.ts'], bundle: true, outfile: 'dist/utils.js', format: 'cjs', target: 'node16', platform: 'node', external: [ 'commander', 'json-diff', 'glob', 'esbuild', 'drizzle-orm', ...driversPackages, ], banner: { js: `#!/usr/bin/env node`, }, }); esbuild.buildSync({ entryPoints: ['./src/utils.ts'], bundle: true, outfile: 'dist/utils.mjs', format: 'esm', target: 'node16', platform: 'node', external: [ 'commander', 'json-diff', 'glob', 'esbuild', 'drizzle-orm', ...driversPackages, ], banner: { js: `#!/usr/bin/env node`, }, }); esbuild.buildSync({ entryPoints: ['./src/cli/index.ts'], bundle: true, outfile: 'dist/bin.cjs', format: 'cjs', target: 'node16', platform: 'node', define: { 'process.env.DRIZZLE_KIT_VERSION': `"${pkg.version}"`, }, external: [ 'esbuild', 'drizzle-orm', ...driversPackages, ], banner: { js: `#!/usr/bin/env node`, }, }); const main = async () => { await tsup.build({ entryPoints: ['./src/index.ts'], outDir: './dist', external: [ 'esbuild', 'drizzle-orm', ...driversPackages, ], splitting: false, dts: true, format: ['cjs', 'esm'], outExtension: (ctx) => { if (ctx.format === 'cjs') { return { dts: '.d.ts', js: '.js', }; } return { dts: '.d.mts', js: '.mjs', }; }, }); await tsup.build({ entryPoints: ['./src/api.ts'], outDir: './dist', external: [ 'esbuild', 'drizzle-orm', ...driversPackages, ], splitting: false, dts: true, format: ['cjs', 'esm'], banner: (ctx) => { /** * fix dynamic require in ESM ("glob" -> "fs.realpath" requires 'fs' module) * @link https://github.com/drizzle-team/drizzle-orm/issues/2853 */ if (ctx.format === 'esm') { return { js: "import { createRequire } from 'module'; const require = createRequire(import.meta.url);", }; } return undefined; }, outExtension: (ctx) => { if (ctx.format === 'cjs') { return { dts: '.d.ts', js: '.js', }; } return { dts: '.d.mts', js: '.mjs', }; }, }); const apiCjs = readFileSync('./dist/api.js', 'utf8').replace(/await import\(/g, 'require('); writeFileSync('./dist/api.js', apiCjs); }; main().catch((e) => { console.error(e); process.exit(1); }); ================================================ FILE: drizzle-kit/imports-checker/analyze.ts ================================================ import { readFileSync } from 'fs'; import type { Node } from 'ohm-js'; import JSImports from './grammar/grammar.ohm-bundle'; export type CollectionItem = { type: 'data' | 'types'; source: string; }; function recursiveRun(...args: Node[]): boolean { for (const arg of args) { if ( arg.ctorName === 'Rest' || arg.ctorName === 'comment' || arg.ctorName === 'stringLiteral' ) { continue; } if ( arg.ctorName === 'ImportExpr_From' || arg.ctorName === 'ImportExpr_NoFrom' ) { arg['analyze'](); continue; } if (arg.isTerminal()) continue; for (const c of arg.children) { if (!recursiveRun(c)) return false; } } return true; } function init(collection: CollectionItem[]) { const semantics = JSImports.createSemantics(); semantics.addOperation('analyze', { JSImports(arg0, arg1) { recursiveRun(arg0, arg1); }, ImportExpr_From(kImport, importInner, kFrom, importSource) { const ruleName = importInner.children[0]!.ctorName; const importType = ruleName === 'ImportInner_Type' || ruleName === 'ImportInner_Types' ? 'types' : 'data'; collection.push({ source: importSource.children[1]!.sourceString!, type: importType, }); }, ImportExpr_NoFrom(kImport, importSource) { collection.push({ source: importSource.children[1]!.sourceString!, type: 'data', }); }, }); return semantics; } export function analyze(path: string) { const file = readFileSync(path).toString(); const match = JSImports.match(file, 'JSImports'); if (match.failed()) throw new Error(`Failed to parse file: ${path}`); const collection: CollectionItem[] = []; init(collection)(match)['analyze'](); return collection; } ================================================ FILE: drizzle-kit/imports-checker/checker.ts ================================================ import fs from 'fs'; import m from 'micromatch'; import { dirname, join as joinPath, relative, resolve as resolvePath } from 'path'; import { analyze } from './analyze'; type External = { file: string; import: string; type: 'data' | 'types'; }; export type Issue = { file: string; imports: IssueImport[]; accessChains: ChainLink[][]; }; export type IssueImport = { name: string; type: 'data' | 'types'; }; export type ChainLink = { file: string; import: string; }; type ListMode = 'whitelist' | 'blacklist'; class ImportAnalyzer { private localImportRegex = /^(\.?\.?\/|\.\.?$)/; private importedFileFormatRegex = /^.*\.(ts|tsx|mts|cts|js|jsx|mjs|cjs|json)$/i; private visited: Set = new Set(); private externals: External[] = []; private accessChains: Record = {}; constructor( private basePath: string, private entry: string, private listMode: ListMode, private readonly wantedList: string[], private localPaths: string[], private logger?: boolean, private ignoreTypes?: boolean, ) {} private isDirectory = (path: string) => { try { return fs.lstatSync(path).isDirectory(); } catch (e) { return false; } }; private isFile = (path: string) => { try { return fs.lstatSync(path).isFile(); } catch (e) { return false; } }; private localizePath = (path: string) => relative(resolvePath(this.basePath), resolvePath(path)); private isCustomLocal = (importTarget: string) => !!this.localPaths.find( (l) => importTarget === l || importTarget.startsWith(l.endsWith('/') ? l : `${l}/`), ); private isLocal = (importTarget: string) => this.localImportRegex.test(importTarget) || this.isCustomLocal(importTarget); private isTsFormat = (path: string) => this.importedFileFormatRegex.test(path); private resolveCustomLocalPath = ( absoluteBase: string, base: string, target: string, ): string => { return joinPath(absoluteBase, target); }; private resolveTargetFile = (path: string): string => { if (this.isFile(path)) return path; const formats = [ '.ts', '.mts', '.cts', '.tsx', '.js', '.mjs', '.cjs', '.jsx', ]; for (const format of formats) { const indexPath = joinPath(path, `/index${format}`); if (this.isFile(indexPath)) return indexPath; const formatFilePath = `${path}${format}`; if (this.isFile(formatFilePath)) return formatFilePath; } return path; }; private resolveTargetPath = ( absoluteBase: string, base: string, target: string, ): string => { if (this.isCustomLocal(target)) { return this.resolveTargetFile( this.resolveCustomLocalPath(absoluteBase, base, target), ); } const dir = this.isDirectory(base) ? base : dirname(base); const joined = joinPath(dir, target); return this.resolveTargetFile(joined); }; private _analyzeImports = ( target: string = this.entry, basePath: string = this.basePath, accessChain: ChainLink[] = [], ) => { if (this.visited.has(target)) return; const locals: string[] = []; try { if (this.logger) console.log(`${this.localizePath(target)}`); const imports = analyze(target); for (const { source: i, type } of imports) { if (this.ignoreTypes && type === 'types') continue; if (this.isLocal(i)) { locals.push(i); continue; } this.externals.push({ file: this.localizePath(target), import: i, type: type, }); } } catch (e) { throw e; } finally { this.visited.add(target); } for (const local of locals) { const transformedTarget = this.resolveTargetPath(basePath, target, local); const localChain = [ ...accessChain, { file: this.localizePath(target), import: local, }, ]; const localized = this.localizePath(transformedTarget); if (this.accessChains[localized]) { this.accessChains[localized].push(localChain); } else this.accessChains[localized] = [localChain]; if (this.isTsFormat(transformedTarget)) { this._analyzeImports(transformedTarget, basePath, localChain); } else { throw new Error(`unrecognized: ${localized}`); } } }; public analyzeImports = () => { const entryLocalized = this.localizePath(this.entry); if (!this.accessChains[entryLocalized]) { this.accessChains[entryLocalized] = [[]]; } this._analyzeImports(); const rawIssues = this.listMode === 'whitelist' ? this.externals.filter((e) => !m([e.import], this.wantedList).length) : this.externals.filter((e) => m([e.import], this.wantedList).length); const issueMap: Record = {}; for (const { file, import: i, type } of rawIssues) { if (issueMap[file]) { issueMap[file].imports.push({ name: i, type, }); continue; } issueMap[file] = { file, imports: [ { name: i, type, }, ], accessChains: this.accessChains[file]!, }; } return { issues: Object.entries(issueMap).map(([file, data]) => { for (const chain of data.accessChains) { chain.push({ file, import: '', }); } return data; }), accessChains: this.accessChains, }; }; } export type CustomLocalPathResolver = ( basePath: string, path: string, target: string, ) => string; export type AnalyzeImportsConfig = & { basePath: string; entry: string; logger?: boolean; ignoreTypes?: boolean; localPaths?: string[]; } & ( | { blackList: string[]; } | { whiteList: string[]; } ); type AnyAnalyzeImportsConfig = { basePath: string; entry: string; blackList?: string[]; whiteList?: string[]; logger?: boolean; ignoreTypes?: boolean; localPaths?: string[]; }; export function analyzeImports(cfg: AnalyzeImportsConfig) { const { basePath, blackList, whiteList, entry, localPaths: localImports, ignoreTypes, logger, } = cfg as AnyAnalyzeImportsConfig; const mode = whiteList ? 'whitelist' : 'blacklist'; const wantedList = blackList ?? whiteList!; const analyzer = new ImportAnalyzer( joinPath(basePath), joinPath(entry), mode, wantedList, localImports ?? [], logger, ignoreTypes, ); return analyzer.analyzeImports(); } ================================================ FILE: drizzle-kit/imports-checker/grammar/grammar.ohm ================================================ JSImports { JSImports = (Expr ";"?)* Expr = | comment | stringLiteral | ImportExpr | Rest ImportExpr = | "import" ImportInner "from" importSource -- From | "import" importSource -- NoFrom Rest = (~(ImportExpr | comment | stringLiteral) any)+ ImportInner = | ("type" "{" NonemptyListOf ","? "}") -- Type | ("{" NonemptyListOf ","? "}") -- Types | ("{" NonemptyListOf ","? "}") -- Extended | (identifier ("," "type"? "{" NonemptyListOf ","? "}")?) -- Mixed | ("*" ("as" identifier)?) -- All | (identifier ("as" identifier)?) -- Default ImportExtendedSelection = TypeImport | Import ImportExtendedSelectionTypes = TypeImport ImportExtendedSelectionTypeless = Import Import = identifier ("as" identifier)? TypeImport = "type" Import ("as" identifier)? identifier = letter alnum* quote = "\"" | "'" | "`" notQuote = ~quote any importSource = | "\"" notQuote+ "\"" | "'" notQuote+ "'" | "`" notQuote+ "`" lineTerminator = "\n" | "\r" | "\u2028" | "\u2029" lineTerminatorSequence = "\n" | "\r" ~"\n" | "\u2028" | "\u2029" | "\r\n" comment = multiLineComment | singleLineComment multiLineComment = "/*" (~"*/" any)* "*/" singleLineComment = "//" (~lineTerminator any)* stringLiteral = | "\"" doubleStringCharacter* "\"" | "'" singleStringCharacter* "'" | "`" templateStringCharacter* "`" doubleStringCharacter = | ~("\"" | "\\" | lineTerminator) any -- NonEscaped | "\\" escapeSequence -- Escaped | lineContinuation -- LineContinuation singleStringCharacter = | ~("'" | "\\" | lineTerminator) any -- NonEscaped | "\\" escapeSequence -- Escaped | lineContinuation -- LineContinuation templateStringCharacter = | ~ ("`" | "\\") any -- NonEscaped | "\\" escapeSequence -- Escaped lineContinuation = "\\" lineTerminatorSequence escapeSequence = unicodeEscapeSequence | hexEscapeSequence | octalEscapeSequence | characterEscapeSequence characterEscapeSequence = singleEscapeCharacter | nonEscapeCharacter singleEscapeCharacter = "'" | "\"" | "\\" | "b" | "f" | "n" | "r" | "t" | "v" nonEscapeCharacter = ~(escapeCharacter | lineTerminator) any escapeCharacter = singleEscapeCharacter | decimalDigit | "x" | "u" octalEscapeSequence = | zeroToThree octalDigit octalDigit -- Whole | fourToSeven octalDigit -- EightTimesfourToSeven | zeroToThree octalDigit ~decimalDigit -- EightTimesZeroToThree | octalDigit ~decimalDigit -- Octal hexEscapeSequence = "x" hexDigit hexDigit unicodeEscapeSequence = "u" hexDigit hexDigit hexDigit hexDigit zeroToThree = "0".."3" fourToSeven = "4".."7" decimalDigit = "0".."9" nonZeroDigit = "1".."9" octalDigit = "0".."7" regularExpressionLiteral = "/" regularExpressionBody "/" regularExpressionFlags regularExpressionBody = regularExpressionFirstChar regularExpressionChar* regularExpressionFirstChar = | ~("*" | "\\" | "/" | "[") regularExpressionNonTerminator | regularExpressionBackslashSequence | regularExpressionClass regularExpressionChar = ~("\\" | "/" | "[") regularExpressionNonTerminator | regularExpressionBackslashSequence | regularExpressionClass regularExpressionBackslashSequence = "\\" regularExpressionNonTerminator regularExpressionNonTerminator = ~(lineTerminator) any regularExpressionClass = "[" regularExpressionClassChar* "]" regularExpressionClassChar = | ~("]" | "\\") regularExpressionNonTerminator | regularExpressionBackslashSequence regularExpressionFlags = identifierPart* multiLineCommentNoNL = "/*" (~("*/" | lineTerminator) any)* "*/" identifierStart = | letter | "$" | "_" | "\\" unicodeEscapeSequence -- escaped identifierPart = | identifierStart | unicodeCombiningMark | unicodeDigit | unicodeConnectorPunctuation | "\u200C" | "\u200D" letter += unicodeCategoryNl unicodeCategoryNl = "\u2160".."\u2182" | "\u3007" | "\u3021".."\u3029" unicodeDigit (a digit) = "\u0030".."\u0039" | "\u0660".."\u0669" | "\u06F0".."\u06F9" | "\u0966".."\u096F" | "\u09E6".."\u09EF" | "\u0A66".."\u0A6F" | "\u0AE6".."\u0AEF" | "\u0B66".."\u0B6F" | "\u0BE7".."\u0BEF" | "\u0C66".."\u0C6F" | "\u0CE6".."\u0CEF" | "\u0D66".."\u0D6F" | "\u0E50".."\u0E59" | "\u0ED0".."\u0ED9" | "\u0F20".."\u0F29" | "\uFF10".."\uFF19" unicodeCombiningMark (a Unicode combining mark) = "\u0300".."\u0345" | "\u0360".."\u0361" | "\u0483".."\u0486" | "\u0591".."\u05A1" | "\u05A3".."\u05B9" | "\u05BB".."\u05BD" | "\u05BF".."\u05BF" | "\u05C1".."\u05C2" | "\u05C4".."\u05C4" | "\u064B".."\u0652" | "\u0670".."\u0670" | "\u06D6".."\u06DC" | "\u06DF".."\u06E4" | "\u06E7".."\u06E8" | "\u06EA".."\u06ED" | "\u0901".."\u0902" | "\u093C".."\u093C" | "\u0941".."\u0948" | "\u094D".."\u094D" | "\u0951".."\u0954" | "\u0962".."\u0963" | "\u0981".."\u0981" | "\u09BC".."\u09BC" | "\u09C1".."\u09C4" | "\u09CD".."\u09CD" | "\u09E2".."\u09E3" | "\u0A02".."\u0A02" | "\u0A3C".."\u0A3C" | "\u0A41".."\u0A42" | "\u0A47".."\u0A48" | "\u0A4B".."\u0A4D" | "\u0A70".."\u0A71" | "\u0A81".."\u0A82" | "\u0ABC".."\u0ABC" | "\u0AC1".."\u0AC5" | "\u0AC7".."\u0AC8" | "\u0ACD".."\u0ACD" | "\u0B01".."\u0B01" | "\u0B3C".."\u0B3C" | "\u0B3F".."\u0B3F" | "\u0B41".."\u0B43" | "\u0B4D".."\u0B4D" | "\u0B56".."\u0B56" | "\u0B82".."\u0B82" | "\u0BC0".."\u0BC0" | "\u0BCD".."\u0BCD" | "\u0C3E".."\u0C40" | "\u0C46".."\u0C48" | "\u0C4A".."\u0C4D" | "\u0C55".."\u0C56" | "\u0CBF".."\u0CBF" | "\u0CC6".."\u0CC6" | "\u0CCC".."\u0CCD" | "\u0D41".."\u0D43" | "\u0D4D".."\u0D4D" | "\u0E31".."\u0E31" | "\u0E34".."\u0E3A" | "\u0E47".."\u0E4E" | "\u0EB1".."\u0EB1" | "\u0EB4".."\u0EB9" | "\u0EBB".."\u0EBC" | "\u0EC8".."\u0ECD" | "\u0F18".."\u0F19" | "\u0F35".."\u0F35" | "\u0F37".."\u0F37" | "\u0F39".."\u0F39" | "\u0F71".."\u0F7E" | "\u0F80".."\u0F84" | "\u0F86".."\u0F87" | "\u0F90".."\u0F95" | "\u0F97".."\u0F97" | "\u0F99".."\u0FAD" | "\u0FB1".."\u0FB7" | "\u0FB9".."\u0FB9" | "\u20D0".."\u20DC" | "\u20E1".."\u20E1" | "\u302A".."\u302F" | "\u3099".."\u309A" | "\uFB1E".."\uFB1E" | "\uFE20".."\uFE23" unicodeConnectorPunctuation = "\u005F" | "\u203F".."\u2040" | "\u30FB" | "\uFE33".."\uFE34" | "\uFE4D".."\uFE4F" | "\uFF3F" | "\uFF65" unicodeSpaceSeparator = "\u2000".."\u200B" | "\u3000" } ================================================ FILE: drizzle-kit/imports-checker/grammar/grammar.ohm-bundle.d.ts ================================================ // AUTOGENERATED FILE // This file was generated from grammar.ohm by `ohm generateBundles`. import { BaseActionDict, Grammar, IterationNode, Node, NonterminalNode, Semantics, TerminalNode } from 'ohm-js'; export interface JSImportsActionDict extends BaseActionDict { JSImports?: (this: NonterminalNode, arg0: IterationNode, arg1: IterationNode) => T; Expr?: (this: NonterminalNode, arg0: NonterminalNode) => T; ImportExpr_From?: ( this: NonterminalNode, arg0: TerminalNode, arg1: NonterminalNode, arg2: TerminalNode, arg3: NonterminalNode, ) => T; ImportExpr_NoFrom?: (this: NonterminalNode, arg0: TerminalNode, arg1: NonterminalNode) => T; ImportExpr?: (this: NonterminalNode, arg0: NonterminalNode) => T; Rest?: (this: NonterminalNode, arg0: IterationNode) => T; ImportInner_Type?: ( this: NonterminalNode, arg0: TerminalNode, arg1: TerminalNode, arg2: NonterminalNode, arg3: IterationNode, arg4: TerminalNode, ) => T; ImportInner_Types?: ( this: NonterminalNode, arg0: TerminalNode, arg1: NonterminalNode, arg2: IterationNode, arg3: TerminalNode, ) => T; ImportInner_Extended?: ( this: NonterminalNode, arg0: TerminalNode, arg1: NonterminalNode, arg2: IterationNode, arg3: TerminalNode, ) => T; ImportInner_Mixed?: ( this: NonterminalNode, arg0: NonterminalNode, arg1: IterationNode, arg2: IterationNode, arg3: IterationNode, arg4: IterationNode, arg5: IterationNode, arg6: IterationNode, ) => T; ImportInner_All?: (this: NonterminalNode, arg0: TerminalNode, arg1: IterationNode, arg2: IterationNode) => T; ImportInner_Default?: (this: NonterminalNode, arg0: NonterminalNode, arg1: IterationNode, arg2: IterationNode) => T; ImportInner?: (this: NonterminalNode, arg0: NonterminalNode) => T; ImportExtendedSelection?: (this: NonterminalNode, arg0: NonterminalNode) => T; ImportExtendedSelectionTypes?: (this: NonterminalNode, arg0: NonterminalNode) => T; ImportExtendedSelectionTypeless?: (this: NonterminalNode, arg0: NonterminalNode) => T; Import?: (this: NonterminalNode, arg0: NonterminalNode, arg1: IterationNode, arg2: IterationNode) => T; TypeImport?: ( this: NonterminalNode, arg0: TerminalNode, arg1: NonterminalNode, arg2: IterationNode, arg3: IterationNode, ) => T; identifier?: (this: NonterminalNode, arg0: NonterminalNode, arg1: IterationNode) => T; quote?: (this: NonterminalNode, arg0: TerminalNode) => T; notQuote?: (this: NonterminalNode, arg0: NonterminalNode) => T; importSource?: (this: NonterminalNode, arg0: TerminalNode, arg1: IterationNode, arg2: TerminalNode) => T; lineTerminator?: (this: NonterminalNode, arg0: TerminalNode) => T; lineTerminatorSequence?: (this: NonterminalNode, arg0: TerminalNode) => T; comment?: (this: NonterminalNode, arg0: NonterminalNode) => T; multiLineComment?: (this: NonterminalNode, arg0: TerminalNode, arg1: IterationNode, arg2: TerminalNode) => T; singleLineComment?: (this: NonterminalNode, arg0: TerminalNode, arg1: IterationNode) => T; stringLiteral?: (this: NonterminalNode, arg0: TerminalNode, arg1: IterationNode, arg2: TerminalNode) => T; doubleStringCharacter_NonEscaped?: (this: NonterminalNode, arg0: NonterminalNode) => T; doubleStringCharacter_Escaped?: (this: NonterminalNode, arg0: TerminalNode, arg1: NonterminalNode) => T; doubleStringCharacter_LineContinuation?: (this: NonterminalNode, arg0: NonterminalNode) => T; doubleStringCharacter?: (this: NonterminalNode, arg0: NonterminalNode) => T; singleStringCharacter_NonEscaped?: (this: NonterminalNode, arg0: NonterminalNode) => T; singleStringCharacter_Escaped?: (this: NonterminalNode, arg0: TerminalNode, arg1: NonterminalNode) => T; singleStringCharacter_LineContinuation?: (this: NonterminalNode, arg0: NonterminalNode) => T; singleStringCharacter?: (this: NonterminalNode, arg0: NonterminalNode) => T; templateStringCharacter_NonEscaped?: (this: NonterminalNode, arg0: NonterminalNode) => T; templateStringCharacter_Escaped?: (this: NonterminalNode, arg0: TerminalNode, arg1: NonterminalNode) => T; templateStringCharacter?: (this: NonterminalNode, arg0: NonterminalNode) => T; lineContinuation?: (this: NonterminalNode, arg0: TerminalNode, arg1: NonterminalNode) => T; escapeSequence?: (this: NonterminalNode, arg0: NonterminalNode) => T; characterEscapeSequence?: (this: NonterminalNode, arg0: NonterminalNode) => T; singleEscapeCharacter?: (this: NonterminalNode, arg0: TerminalNode) => T; nonEscapeCharacter?: (this: NonterminalNode, arg0: NonterminalNode) => T; escapeCharacter?: (this: NonterminalNode, arg0: NonterminalNode | TerminalNode) => T; octalEscapeSequence_Whole?: ( this: NonterminalNode, arg0: NonterminalNode, arg1: NonterminalNode, arg2: NonterminalNode, ) => T; octalEscapeSequence_EightTimesfourToSeven?: ( this: NonterminalNode, arg0: NonterminalNode, arg1: NonterminalNode, ) => T; octalEscapeSequence_EightTimesZeroToThree?: ( this: NonterminalNode, arg0: NonterminalNode, arg1: NonterminalNode, ) => T; octalEscapeSequence_Octal?: (this: NonterminalNode, arg0: NonterminalNode) => T; octalEscapeSequence?: (this: NonterminalNode, arg0: NonterminalNode) => T; hexEscapeSequence?: (this: NonterminalNode, arg0: TerminalNode, arg1: NonterminalNode, arg2: NonterminalNode) => T; unicodeEscapeSequence?: ( this: NonterminalNode, arg0: TerminalNode, arg1: NonterminalNode, arg2: NonterminalNode, arg3: NonterminalNode, arg4: NonterminalNode, ) => T; zeroToThree?: (this: NonterminalNode, arg0: TerminalNode) => T; fourToSeven?: (this: NonterminalNode, arg0: TerminalNode) => T; decimalDigit?: (this: NonterminalNode, arg0: TerminalNode) => T; nonZeroDigit?: (this: NonterminalNode, arg0: TerminalNode) => T; octalDigit?: (this: NonterminalNode, arg0: TerminalNode) => T; regularExpressionLiteral?: ( this: NonterminalNode, arg0: TerminalNode, arg1: NonterminalNode, arg2: TerminalNode, arg3: NonterminalNode, ) => T; regularExpressionBody?: (this: NonterminalNode, arg0: NonterminalNode, arg1: IterationNode) => T; regularExpressionFirstChar?: (this: NonterminalNode, arg0: NonterminalNode) => T; regularExpressionChar?: (this: NonterminalNode, arg0: NonterminalNode) => T; regularExpressionBackslashSequence?: (this: NonterminalNode, arg0: TerminalNode, arg1: NonterminalNode) => T; regularExpressionNonTerminator?: (this: NonterminalNode, arg0: NonterminalNode) => T; regularExpressionClass?: (this: NonterminalNode, arg0: TerminalNode, arg1: IterationNode, arg2: TerminalNode) => T; regularExpressionClassChar?: (this: NonterminalNode, arg0: NonterminalNode) => T; regularExpressionFlags?: (this: NonterminalNode, arg0: IterationNode) => T; multiLineCommentNoNL?: (this: NonterminalNode, arg0: TerminalNode, arg1: IterationNode, arg2: TerminalNode) => T; identifierStart_escaped?: (this: NonterminalNode, arg0: TerminalNode, arg1: NonterminalNode) => T; identifierStart?: (this: NonterminalNode, arg0: NonterminalNode | TerminalNode) => T; identifierPart?: (this: NonterminalNode, arg0: NonterminalNode | TerminalNode) => T; letter?: (this: NonterminalNode, arg0: NonterminalNode) => T; unicodeCategoryNl?: (this: NonterminalNode, arg0: TerminalNode) => T; unicodeDigit?: (this: NonterminalNode, arg0: TerminalNode) => T; unicodeCombiningMark?: (this: NonterminalNode, arg0: TerminalNode) => T; unicodeConnectorPunctuation?: (this: NonterminalNode, arg0: TerminalNode) => T; unicodeSpaceSeparator?: (this: NonterminalNode, arg0: TerminalNode) => T; } export interface JSImportsSemantics extends Semantics { addOperation(name: string, actionDict: JSImportsActionDict): this; extendOperation(name: string, actionDict: JSImportsActionDict): this; addAttribute(name: string, actionDict: JSImportsActionDict): this; extendAttribute(name: string, actionDict: JSImportsActionDict): this; } export interface JSImportsGrammar extends Grammar { createSemantics(): JSImportsSemantics; extendSemantics(superSemantics: JSImportsSemantics): JSImportsSemantics; } declare const grammar: JSImportsGrammar; export default grammar; ================================================ FILE: drizzle-kit/imports-checker/grammar/grammar.ohm-bundle.js ================================================ import { makeRecipe } from 'ohm-js'; const result = makeRecipe([ 'grammar', { source: 'JSImports {\n JSImports = (Expr ";"?)*\n\n Expr = \n | comment\n | stringLiteral\n | ImportExpr\n | Rest\n\n ImportExpr =\n | "import" ImportInner "from" importSource -- From\n | "import" importSource -- NoFrom\n\n Rest = (~(ImportExpr | comment | stringLiteral) any)+\n\n ImportInner = \n | ("type" "{" NonemptyListOf ","? "}") -- Type\n | ("{" NonemptyListOf ","? "}") -- Types\n | ("{" NonemptyListOf ","? "}") -- Extended\n | (identifier ("," "type"? "{" NonemptyListOf ","? "}")?) -- Mixed\n | ("*" ("as" identifier)?) -- All\n | (identifier ("as" identifier)?) -- Default\n \n\n ImportExtendedSelection = TypeImport | Import\n ImportExtendedSelectionTypes = TypeImport\n ImportExtendedSelectionTypeless = Import\n\n Import = identifier ("as" identifier)?\n TypeImport = "type" Import ("as" identifier)?\n\n identifier = letter alnum*\n quote = "\\"" | "\'" | "`"\n notQuote = ~quote any\n importSource =\n | "\\"" notQuote+ "\\""\n | "\'" notQuote+ "\'"\n | "`" notQuote+ "`"\n\n lineTerminator = "\\n" | "\\r" | "\\u2028" | "\\u2029"\n lineTerminatorSequence = "\\n" | "\\r" ~"\\n" | "\\u2028" | "\\u2029" | "\\r\\n"\n \n comment = multiLineComment | singleLineComment\n\n multiLineComment = "/*" (~"*/" any)* "*/"\n singleLineComment = "//" (~lineTerminator any)*\n\n stringLiteral =\n | "\\"" doubleStringCharacter* "\\""\n | "\'" singleStringCharacter* "\'"\n | "`" templateStringCharacter* "`"\n doubleStringCharacter =\n | ~("\\"" | "\\\\" | lineTerminator) any -- NonEscaped\n | "\\\\" escapeSequence -- Escaped\n | lineContinuation -- LineContinuation\n singleStringCharacter =\n | ~("\'" | "\\\\" | lineTerminator) any -- NonEscaped\n | "\\\\" escapeSequence -- Escaped\n | lineContinuation -- LineContinuation\n templateStringCharacter = \n | ~ ("`" | "\\\\") any -- NonEscaped\n | "\\\\" escapeSequence -- Escaped\n lineContinuation = "\\\\" lineTerminatorSequence\n escapeSequence = unicodeEscapeSequence | hexEscapeSequence | octalEscapeSequence | characterEscapeSequence\n characterEscapeSequence = singleEscapeCharacter | nonEscapeCharacter\n singleEscapeCharacter = "\'" | "\\"" | "\\\\" | "b" | "f" | "n" | "r" | "t" | "v"\n nonEscapeCharacter = ~(escapeCharacter | lineTerminator) any\n escapeCharacter = singleEscapeCharacter | decimalDigit | "x" | "u"\n octalEscapeSequence =\n | zeroToThree octalDigit octalDigit -- Whole\n | fourToSeven octalDigit -- EightTimesfourToSeven\n | zeroToThree octalDigit ~decimalDigit -- EightTimesZeroToThree\n | octalDigit ~decimalDigit -- Octal\n hexEscapeSequence = "x" hexDigit hexDigit\n unicodeEscapeSequence = "u" hexDigit hexDigit hexDigit hexDigit\n\n zeroToThree = "0".."3"\n fourToSeven = "4".."7"\n decimalDigit = "0".."9"\n nonZeroDigit = "1".."9"\n octalDigit = "0".."7"\n\n regularExpressionLiteral = "/" regularExpressionBody "/" regularExpressionFlags\n regularExpressionBody = regularExpressionFirstChar regularExpressionChar*\n regularExpressionFirstChar =\n | ~("*" | "\\\\" | "/" | "[") regularExpressionNonTerminator\n | regularExpressionBackslashSequence\n | regularExpressionClass\n regularExpressionChar = ~("\\\\" | "/" | "[") regularExpressionNonTerminator\n | regularExpressionBackslashSequence\n | regularExpressionClass\n regularExpressionBackslashSequence = "\\\\" regularExpressionNonTerminator\n regularExpressionNonTerminator = ~(lineTerminator) any\n regularExpressionClass = "[" regularExpressionClassChar* "]"\n regularExpressionClassChar =\n | ~("]" | "\\\\") regularExpressionNonTerminator\n | regularExpressionBackslashSequence\n regularExpressionFlags = identifierPart*\n\n multiLineCommentNoNL = "/*" (~("*/" | lineTerminator) any)* "*/"\n\n identifierStart =\n | letter | "$" | "_"\n | "\\\\" unicodeEscapeSequence -- escaped\n identifierPart =\n | identifierStart | unicodeCombiningMark\n | unicodeDigit | unicodeConnectorPunctuation\n | "\\u200C" | "\\u200D"\n letter += unicodeCategoryNl\n unicodeCategoryNl\n = "\\u2160".."\\u2182" | "\\u3007" | "\\u3021".."\\u3029"\n unicodeDigit (a digit)\n = "\\u0030".."\\u0039" | "\\u0660".."\\u0669" | "\\u06F0".."\\u06F9" | "\\u0966".."\\u096F" | "\\u09E6".."\\u09EF" | "\\u0A66".."\\u0A6F" | "\\u0AE6".."\\u0AEF" | "\\u0B66".."\\u0B6F" | "\\u0BE7".."\\u0BEF" | "\\u0C66".."\\u0C6F" | "\\u0CE6".."\\u0CEF" | "\\u0D66".."\\u0D6F" | "\\u0E50".."\\u0E59" | "\\u0ED0".."\\u0ED9" | "\\u0F20".."\\u0F29" | "\\uFF10".."\\uFF19"\n\n unicodeCombiningMark (a Unicode combining mark)\n = "\\u0300".."\\u0345" | "\\u0360".."\\u0361" | "\\u0483".."\\u0486" | "\\u0591".."\\u05A1" | "\\u05A3".."\\u05B9" | "\\u05BB".."\\u05BD" | "\\u05BF".."\\u05BF" | "\\u05C1".."\\u05C2" | "\\u05C4".."\\u05C4" | "\\u064B".."\\u0652" | "\\u0670".."\\u0670" | "\\u06D6".."\\u06DC" | "\\u06DF".."\\u06E4" | "\\u06E7".."\\u06E8" | "\\u06EA".."\\u06ED" | "\\u0901".."\\u0902" | "\\u093C".."\\u093C" | "\\u0941".."\\u0948" | "\\u094D".."\\u094D" | "\\u0951".."\\u0954" | "\\u0962".."\\u0963" | "\\u0981".."\\u0981" | "\\u09BC".."\\u09BC" | "\\u09C1".."\\u09C4" | "\\u09CD".."\\u09CD" | "\\u09E2".."\\u09E3" | "\\u0A02".."\\u0A02" | "\\u0A3C".."\\u0A3C" | "\\u0A41".."\\u0A42" | "\\u0A47".."\\u0A48" | "\\u0A4B".."\\u0A4D" | "\\u0A70".."\\u0A71" | "\\u0A81".."\\u0A82" | "\\u0ABC".."\\u0ABC" | "\\u0AC1".."\\u0AC5" | "\\u0AC7".."\\u0AC8" | "\\u0ACD".."\\u0ACD" | "\\u0B01".."\\u0B01" | "\\u0B3C".."\\u0B3C" | "\\u0B3F".."\\u0B3F" | "\\u0B41".."\\u0B43" | "\\u0B4D".."\\u0B4D" | "\\u0B56".."\\u0B56" | "\\u0B82".."\\u0B82" | "\\u0BC0".."\\u0BC0" | "\\u0BCD".."\\u0BCD" | "\\u0C3E".."\\u0C40" | "\\u0C46".."\\u0C48" | "\\u0C4A".."\\u0C4D" | "\\u0C55".."\\u0C56" | "\\u0CBF".."\\u0CBF" | "\\u0CC6".."\\u0CC6" | "\\u0CCC".."\\u0CCD" | "\\u0D41".."\\u0D43" | "\\u0D4D".."\\u0D4D" | "\\u0E31".."\\u0E31" | "\\u0E34".."\\u0E3A" | "\\u0E47".."\\u0E4E" | "\\u0EB1".."\\u0EB1" | "\\u0EB4".."\\u0EB9" | "\\u0EBB".."\\u0EBC" | "\\u0EC8".."\\u0ECD" | "\\u0F18".."\\u0F19" | "\\u0F35".."\\u0F35" | "\\u0F37".."\\u0F37" | "\\u0F39".."\\u0F39" | "\\u0F71".."\\u0F7E" | "\\u0F80".."\\u0F84" | "\\u0F86".."\\u0F87" | "\\u0F90".."\\u0F95" | "\\u0F97".."\\u0F97" | "\\u0F99".."\\u0FAD" | "\\u0FB1".."\\u0FB7" | "\\u0FB9".."\\u0FB9" | "\\u20D0".."\\u20DC" | "\\u20E1".."\\u20E1" | "\\u302A".."\\u302F" | "\\u3099".."\\u309A" | "\\uFB1E".."\\uFB1E" | "\\uFE20".."\\uFE23"\n\n unicodeConnectorPunctuation = "\\u005F" | "\\u203F".."\\u2040" | "\\u30FB" | "\\uFE33".."\\uFE34" | "\\uFE4D".."\\uFE4F" | "\\uFF3F" | "\\uFF65"\n unicodeSpaceSeparator = "\\u2000".."\\u200B" | "\\u3000"\n\n}', }, 'JSImports', null, 'JSImports', { JSImports: ['define', { sourceInterval: [16, 40] }, null, [], ['star', { sourceInterval: [28, 40] }, [ 'seq', { sourceInterval: [29, 38] }, ['app', { sourceInterval: [29, 33] }, 'Expr', []], ['opt', { sourceInterval: [34, 38] }, ['terminal', { sourceInterval: [34, 37] }, ';']], ]]], Expr: ['define', { sourceInterval: [46, 115] }, null, [], [ 'alt', { sourceInterval: [58, 115] }, ['app', { sourceInterval: [60, 67] }, 'comment', []], ['app', { sourceInterval: [74, 87] }, 'stringLiteral', []], ['app', { sourceInterval: [94, 104] }, 'ImportExpr', []], ['app', { sourceInterval: [111, 115] }, 'Rest', []], ]], ImportExpr_From: ['define', { sourceInterval: [140, 188] }, null, [], [ 'seq', { sourceInterval: [140, 180] }, ['terminal', { sourceInterval: [140, 148] }, 'import'], ['app', { sourceInterval: [149, 160] }, 'ImportInner', []], ['terminal', { sourceInterval: [161, 167] }, 'from'], ['app', { sourceInterval: [168, 180] }, 'importSource', []], ]], ImportExpr_NoFrom: ['define', { sourceInterval: [195, 226] }, null, [], ['seq', { sourceInterval: [195, 216] }, [ 'terminal', { sourceInterval: [195, 203] }, 'import', ], ['app', { sourceInterval: [204, 216] }, 'importSource', []]]], ImportExpr: ['define', { sourceInterval: [121, 226] }, null, [], ['alt', { sourceInterval: [138, 226] }, [ 'app', { sourceInterval: [140, 180] }, 'ImportExpr_From', [], ], ['app', { sourceInterval: [195, 216] }, 'ImportExpr_NoFrom', []]]], Rest: ['define', { sourceInterval: [232, 285] }, null, [], ['plus', { sourceInterval: [239, 285] }, ['seq', { sourceInterval: [240, 283], }, ['not', { sourceInterval: [240, 279] }, [ 'alt', { sourceInterval: [242, 278] }, ['app', { sourceInterval: [242, 252] }, 'ImportExpr', []], ['app', { sourceInterval: [255, 262] }, 'comment', []], ['app', { sourceInterval: [265, 278] }, 'stringLiteral', []], ]], ['app', { sourceInterval: [280, 283] }, 'any', []]]]], ImportInner_Type: ['define', { sourceInterval: [312, 405] }, null, [], [ 'seq', { sourceInterval: [312, 386] }, ['terminal', { sourceInterval: [313, 319] }, 'type'], ['terminal', { sourceInterval: [320, 323] }, '{'], ['app', { sourceInterval: [324, 376] }, 'NonemptyListOf', [[ 'app', { sourceInterval: [339, 370] }, 'ImportExtendedSelectionTypeless', [], ], ['terminal', { sourceInterval: [372, 375] }, ',']]], ['opt', { sourceInterval: [377, 381] }, ['terminal', { sourceInterval: [377, 380] }, ',']], ['terminal', { sourceInterval: [382, 385] }, '}'], ]], ImportInner_Types: ['define', { sourceInterval: [412, 506] }, null, [], ['seq', { sourceInterval: [412, 476] }, [ 'terminal', { sourceInterval: [413, 416] }, '{', ], ['app', { sourceInterval: [417, 466] }, 'NonemptyListOf', [[ 'app', { sourceInterval: [432, 460] }, 'ImportExtendedSelectionTypes', [], ], ['terminal', { sourceInterval: [462, 465] }, ',']]], ['opt', { sourceInterval: [467, 471] }, ['terminal', { sourceInterval: [467, 470], }, ',']], ['terminal', { sourceInterval: [472, 475] }, '}']]], ImportInner_Extended: ['define', { sourceInterval: [513, 610] }, null, [], ['seq', { sourceInterval: [513, 572] }, [ 'terminal', { sourceInterval: [514, 517] }, '{', ], ['app', { sourceInterval: [518, 562] }, 'NonemptyListOf', [[ 'app', { sourceInterval: [533, 556] }, 'ImportExtendedSelection', [], ], ['terminal', { sourceInterval: [558, 561] }, ',']]], ['opt', { sourceInterval: [563, 567] }, ['terminal', { sourceInterval: [563, 566], }, ',']], ['terminal', { sourceInterval: [568, 571] }, '}']]], ImportInner_Mixed: ['define', { sourceInterval: [617, 711] }, null, [], ['seq', { sourceInterval: [617, 702] }, [ 'app', { sourceInterval: [618, 628] }, 'identifier', [], ], ['opt', { sourceInterval: [629, 701] }, [ 'seq', { sourceInterval: [630, 699] }, ['terminal', { sourceInterval: [630, 633] }, ','], ['opt', { sourceInterval: [634, 641] }, ['terminal', { sourceInterval: [634, 640] }, 'type']], ['terminal', { sourceInterval: [642, 645] }, '{'], ['app', { sourceInterval: [646, 690] }, 'NonemptyListOf', [[ 'app', { sourceInterval: [661, 684] }, 'ImportExtendedSelection', [], ], ['terminal', { sourceInterval: [686, 689] }, ',']]], ['opt', { sourceInterval: [691, 695] }, ['terminal', { sourceInterval: [691, 694] }, ',']], ['terminal', { sourceInterval: [696, 699] }, '}'], ]]]], ImportInner_All: ['define', { sourceInterval: [718, 810] }, null, [], ['seq', { sourceInterval: [718, 742] }, [ 'terminal', { sourceInterval: [719, 722] }, '*', ], ['opt', { sourceInterval: [723, 741] }, ['seq', { sourceInterval: [724, 739] }, ['terminal', { sourceInterval: [724, 728], }, 'as'], ['app', { sourceInterval: [729, 739] }, 'identifier', []]]]]], ImportInner_Default: ['define', { sourceInterval: [817, 913] }, null, [], ['seq', { sourceInterval: [817, 848] }, [ 'app', { sourceInterval: [818, 828] }, 'identifier', [], ], ['opt', { sourceInterval: [829, 847] }, ['seq', { sourceInterval: [830, 845] }, ['terminal', { sourceInterval: [830, 834], }, 'as'], ['app', { sourceInterval: [835, 845] }, 'identifier', []]]]]], ImportInner: ['define', { sourceInterval: [291, 913] }, null, [], [ 'alt', { sourceInterval: [310, 913] }, ['app', { sourceInterval: [312, 386] }, 'ImportInner_Type', []], ['app', { sourceInterval: [412, 476] }, 'ImportInner_Types', []], ['app', { sourceInterval: [513, 572] }, 'ImportInner_Extended', []], ['app', { sourceInterval: [617, 702] }, 'ImportInner_Mixed', []], ['app', { sourceInterval: [718, 742] }, 'ImportInner_All', []], ['app', { sourceInterval: [817, 848] }, 'ImportInner_Default', []], ]], ImportExtendedSelection: ['define', { sourceInterval: [924, 969] }, null, [], [ 'alt', { sourceInterval: [950, 969] }, ['app', { sourceInterval: [950, 960] }, 'TypeImport', []], ['app', { sourceInterval: [963, 969] }, 'Import', []], ]], ImportExtendedSelectionTypes: ['define', { sourceInterval: [974, 1015] }, null, [], [ 'app', { sourceInterval: [1005, 1015] }, 'TypeImport', [], ]], ImportExtendedSelectionTypeless: ['define', { sourceInterval: [1020, 1060] }, null, [], [ 'app', { sourceInterval: [1054, 1060] }, 'Import', [], ]], Import: ['define', { sourceInterval: [1066, 1104] }, null, [], ['seq', { sourceInterval: [1075, 1104] }, [ 'app', { sourceInterval: [1075, 1085] }, 'identifier', [], ], ['opt', { sourceInterval: [1086, 1104] }, ['seq', { sourceInterval: [1087, 1102] }, ['terminal', { sourceInterval: [1087, 1091], }, 'as'], ['app', { sourceInterval: [1092, 1102] }, 'identifier', []]]]]], TypeImport: ['define', { sourceInterval: [1109, 1154] }, null, [], [ 'seq', { sourceInterval: [1122, 1154] }, ['terminal', { sourceInterval: [1122, 1128] }, 'type'], ['app', { sourceInterval: [1129, 1135] }, 'Import', []], ['opt', { sourceInterval: [1136, 1154] }, ['seq', { sourceInterval: [1137, 1152] }, ['terminal', { sourceInterval: [1137, 1141], }, 'as'], ['app', { sourceInterval: [1142, 1152] }, 'identifier', []]]], ]], identifier: ['define', { sourceInterval: [1160, 1186] }, null, [], ['seq', { sourceInterval: [1173, 1186] }, [ 'app', { sourceInterval: [1173, 1179] }, 'letter', [], ], ['star', { sourceInterval: [1180, 1186] }, ['app', { sourceInterval: [1180, 1185] }, 'alnum', []]]]], quote: ['define', { sourceInterval: [1191, 1215] }, null, [], [ 'alt', { sourceInterval: [1199, 1215] }, ['terminal', { sourceInterval: [1199, 1203] }, '"'], ['terminal', { sourceInterval: [1206, 1209] }, "'"], ['terminal', { sourceInterval: [1212, 1215] }, '`'], ]], notQuote: ['define', { sourceInterval: [1220, 1241] }, null, [], ['seq', { sourceInterval: [1231, 1241] }, ['not', { sourceInterval: [1231, 1237], }, ['app', { sourceInterval: [1232, 1237] }, 'quote', []]], ['app', { sourceInterval: [1238, 1241] }, 'any', []]]], importSource: ['define', { sourceInterval: [1246, 1334] }, null, [], [ 'alt', { sourceInterval: [1265, 1334] }, ['seq', { sourceInterval: [1267, 1286] }, ['terminal', { sourceInterval: [1267, 1271] }, '"'], ['plus', { sourceInterval: [1272, 1281], }, ['app', { sourceInterval: [1272, 1280] }, 'notQuote', []]], [ 'terminal', { sourceInterval: [1282, 1286] }, '"', ]], ['seq', { sourceInterval: [1293, 1310] }, ['terminal', { sourceInterval: [1293, 1296] }, "'"], ['plus', { sourceInterval: [1297, 1306], }, ['app', { sourceInterval: [1297, 1305] }, 'notQuote', []]], [ 'terminal', { sourceInterval: [1307, 1310] }, "'", ]], ['seq', { sourceInterval: [1317, 1334] }, ['terminal', { sourceInterval: [1317, 1320] }, '`'], ['plus', { sourceInterval: [1321, 1330], }, ['app', { sourceInterval: [1321, 1329] }, 'notQuote', []]], [ 'terminal', { sourceInterval: [1331, 1334] }, '`', ]], ]], lineTerminator: ['define', { sourceInterval: [1340, 1390] }, null, [], [ 'alt', { sourceInterval: [1357, 1390] }, ['terminal', { sourceInterval: [1357, 1361] }, '\n'], ['terminal', { sourceInterval: [1364, 1368] }, '\r'], ['terminal', { sourceInterval: [1371, 1379] }, '\u2028'], ['terminal', { sourceInterval: [1382, 1390] }, '\u2029'], ]], lineTerminatorSequence: ['define', { sourceInterval: [1395, 1468] }, null, [], [ 'alt', { sourceInterval: [1420, 1468] }, ['terminal', { sourceInterval: [1420, 1424] }, '\n'], ['seq', { sourceInterval: [1427, 1437] }, ['terminal', { sourceInterval: [1427, 1431] }, '\r'], ['not', { sourceInterval: [1432, 1437], }, ['terminal', { sourceInterval: [1433, 1437] }, '\n']]], ['terminal', { sourceInterval: [1440, 1448] }, '\u2028'], ['terminal', { sourceInterval: [1451, 1459] }, '\u2029'], ['terminal', { sourceInterval: [1462, 1468] }, '\r\n'], ]], comment: ['define', { sourceInterval: [1478, 1524] }, null, [], ['alt', { sourceInterval: [1488, 1524] }, [ 'app', { sourceInterval: [1488, 1504] }, 'multiLineComment', [], ], ['app', { sourceInterval: [1507, 1524] }, 'singleLineComment', []]]], multiLineComment: ['define', { sourceInterval: [1530, 1571] }, null, [], ['seq', { sourceInterval: [1549, 1571] }, [ 'terminal', { sourceInterval: [1549, 1553] }, '/*', ], ['star', { sourceInterval: [1554, 1566] }, ['seq', { sourceInterval: [1555, 1564] }, ['not', { sourceInterval: [1555, 1560], }, ['terminal', { sourceInterval: [1556, 1560] }, '*/']], ['app', { sourceInterval: [1561, 1564] }, 'any', []]]], [ 'terminal', { sourceInterval: [1567, 1571] }, '*/', ]]], singleLineComment: ['define', { sourceInterval: [1576, 1623] }, null, [], [ 'seq', { sourceInterval: [1596, 1623] }, ['terminal', { sourceInterval: [1596, 1600] }, '//'], ['star', { sourceInterval: [1601, 1623] }, ['seq', { sourceInterval: [1602, 1621] }, ['not', { sourceInterval: [1602, 1617], }, ['app', { sourceInterval: [1603, 1617] }, 'lineTerminator', []]], [ 'app', { sourceInterval: [1618, 1621] }, 'any', [], ]]], ]], stringLiteral: ['define', { sourceInterval: [1629, 1759] }, null, [], ['alt', { sourceInterval: [1649, 1759] }, [ 'seq', { sourceInterval: [1651, 1683] }, ['terminal', { sourceInterval: [1651, 1655] }, '"'], ['star', { sourceInterval: [1656, 1678] }, [ 'app', { sourceInterval: [1656, 1677] }, 'doubleStringCharacter', [], ]], ['terminal', { sourceInterval: [1679, 1683] }, '"'], ], ['seq', { sourceInterval: [1690, 1720] }, ['terminal', { sourceInterval: [1690, 1693] }, "'"], ['star', { sourceInterval: [1694, 1716], }, ['app', { sourceInterval: [1694, 1715] }, 'singleStringCharacter', []]], ['terminal', { sourceInterval: [1717, 1720], }, "'"]], ['seq', { sourceInterval: [1727, 1759] }, ['terminal', { sourceInterval: [1727, 1730] }, '`'], ['star', { sourceInterval: [1731, 1755], }, ['app', { sourceInterval: [1731, 1754] }, 'templateStringCharacter', []]], ['terminal', { sourceInterval: [1756, 1759], }, '`']]]], doubleStringCharacter_NonEscaped: ['define', { sourceInterval: [1794, 1845] }, null, [], ['seq', { sourceInterval: [1794, 1829], }, ['not', { sourceInterval: [1794, 1825] }, [ 'alt', { sourceInterval: [1796, 1824] }, ['terminal', { sourceInterval: [1796, 1800] }, '"'], ['terminal', { sourceInterval: [1803, 1807] }, '\\'], ['app', { sourceInterval: [1810, 1824] }, 'lineTerminator', []], ]], ['app', { sourceInterval: [1826, 1829] }, 'any', []]]], doubleStringCharacter_Escaped: ['define', { sourceInterval: [1852, 1900] }, null, [], [ 'seq', { sourceInterval: [1852, 1871] }, ['terminal', { sourceInterval: [1852, 1856] }, '\\'], ['app', { sourceInterval: [1857, 1871] }, 'escapeSequence', []], ]], doubleStringCharacter_LineContinuation: ['define', { sourceInterval: [1907, 1964] }, null, [], [ 'app', { sourceInterval: [1907, 1923] }, 'lineContinuation', [], ]], doubleStringCharacter: ['define', { sourceInterval: [1764, 1964] }, null, [], [ 'alt', { sourceInterval: [1792, 1964] }, ['app', { sourceInterval: [1794, 1829] }, 'doubleStringCharacter_NonEscaped', []], ['app', { sourceInterval: [1852, 1871] }, 'doubleStringCharacter_Escaped', []], ['app', { sourceInterval: [1907, 1923] }, 'doubleStringCharacter_LineContinuation', []], ]], singleStringCharacter_NonEscaped: ['define', { sourceInterval: [1999, 2050] }, null, [], ['seq', { sourceInterval: [1999, 2033], }, ['not', { sourceInterval: [1999, 2029] }, [ 'alt', { sourceInterval: [2001, 2028] }, ['terminal', { sourceInterval: [2001, 2004] }, "'"], ['terminal', { sourceInterval: [2007, 2011] }, '\\'], ['app', { sourceInterval: [2014, 2028] }, 'lineTerminator', []], ]], ['app', { sourceInterval: [2030, 2033] }, 'any', []]]], singleStringCharacter_Escaped: ['define', { sourceInterval: [2057, 2105] }, null, [], [ 'seq', { sourceInterval: [2057, 2076] }, ['terminal', { sourceInterval: [2057, 2061] }, '\\'], ['app', { sourceInterval: [2062, 2076] }, 'escapeSequence', []], ]], singleStringCharacter_LineContinuation: ['define', { sourceInterval: [2112, 2169] }, null, [], [ 'app', { sourceInterval: [2112, 2128] }, 'lineContinuation', [], ]], singleStringCharacter: ['define', { sourceInterval: [1969, 2169] }, null, [], [ 'alt', { sourceInterval: [1997, 2169] }, ['app', { sourceInterval: [1999, 2033] }, 'singleStringCharacter_NonEscaped', []], ['app', { sourceInterval: [2057, 2076] }, 'singleStringCharacter_Escaped', []], ['app', { sourceInterval: [2112, 2128] }, 'singleStringCharacter_LineContinuation', []], ]], templateStringCharacter_NonEscaped: ['define', { sourceInterval: [2207, 2258] }, null, [], ['seq', { sourceInterval: [2207, 2225], }, ['not', { sourceInterval: [2207, 2221] }, ['alt', { sourceInterval: [2210, 2220] }, ['terminal', { sourceInterval: [2210, 2213], }, '`'], ['terminal', { sourceInterval: [2216, 2220] }, '\\']]], [ 'app', { sourceInterval: [2222, 2225] }, 'any', [], ]]], templateStringCharacter_Escaped: ['define', { sourceInterval: [2265, 2318] }, null, [], [ 'seq', { sourceInterval: [2265, 2284] }, ['terminal', { sourceInterval: [2265, 2269] }, '\\'], ['app', { sourceInterval: [2270, 2284] }, 'escapeSequence', []], ]], templateStringCharacter: ['define', { sourceInterval: [2174, 2318] }, null, [], [ 'alt', { sourceInterval: [2205, 2318] }, ['app', { sourceInterval: [2207, 2225] }, 'templateStringCharacter_NonEscaped', []], ['app', { sourceInterval: [2265, 2284] }, 'templateStringCharacter_Escaped', []], ]], lineContinuation: ['define', { sourceInterval: [2323, 2369] }, null, [], ['seq', { sourceInterval: [2342, 2369] }, [ 'terminal', { sourceInterval: [2342, 2346] }, '\\', ], ['app', { sourceInterval: [2347, 2369] }, 'lineTerminatorSequence', []]]], escapeSequence: ['define', { sourceInterval: [2374, 2480] }, null, [], [ 'alt', { sourceInterval: [2391, 2480] }, ['app', { sourceInterval: [2391, 2412] }, 'unicodeEscapeSequence', []], ['app', { sourceInterval: [2415, 2432] }, 'hexEscapeSequence', []], ['app', { sourceInterval: [2435, 2454] }, 'octalEscapeSequence', []], ['app', { sourceInterval: [2457, 2480] }, 'characterEscapeSequence', []], ]], characterEscapeSequence: ['define', { sourceInterval: [2485, 2553] }, null, [], [ 'alt', { sourceInterval: [2511, 2553] }, ['app', { sourceInterval: [2511, 2532] }, 'singleEscapeCharacter', []], ['app', { sourceInterval: [2535, 2553] }, 'nonEscapeCharacter', []], ]], singleEscapeCharacter: ['define', { sourceInterval: [2558, 2635] }, null, [], [ 'alt', { sourceInterval: [2582, 2635] }, ['terminal', { sourceInterval: [2582, 2585] }, "'"], ['terminal', { sourceInterval: [2588, 2592] }, '"'], ['terminal', { sourceInterval: [2595, 2599] }, '\\'], ['terminal', { sourceInterval: [2602, 2605] }, 'b'], ['terminal', { sourceInterval: [2608, 2611] }, 'f'], ['terminal', { sourceInterval: [2614, 2617] }, 'n'], ['terminal', { sourceInterval: [2620, 2623] }, 'r'], ['terminal', { sourceInterval: [2626, 2629] }, 't'], ['terminal', { sourceInterval: [2632, 2635] }, 'v'], ]], nonEscapeCharacter: ['define', { sourceInterval: [2640, 2700] }, null, [], [ 'seq', { sourceInterval: [2661, 2700] }, ['not', { sourceInterval: [2661, 2696] }, ['alt', { sourceInterval: [2663, 2695] }, [ 'app', { sourceInterval: [2663, 2678] }, 'escapeCharacter', [], ], ['app', { sourceInterval: [2681, 2695] }, 'lineTerminator', []]]], ['app', { sourceInterval: [2697, 2700] }, 'any', []], ]], escapeCharacter: ['define', { sourceInterval: [2705, 2771] }, null, [], [ 'alt', { sourceInterval: [2723, 2771] }, ['app', { sourceInterval: [2723, 2744] }, 'singleEscapeCharacter', []], ['app', { sourceInterval: [2747, 2759] }, 'decimalDigit', []], ['terminal', { sourceInterval: [2762, 2765] }, 'x'], ['terminal', { sourceInterval: [2768, 2771] }, 'u'], ]], octalEscapeSequence_Whole: ['define', { sourceInterval: [2804, 2850] }, null, [], [ 'seq', { sourceInterval: [2804, 2837] }, ['app', { sourceInterval: [2804, 2815] }, 'zeroToThree', []], ['app', { sourceInterval: [2816, 2826] }, 'octalDigit', []], ['app', { sourceInterval: [2827, 2837] }, 'octalDigit', []], ]], octalEscapeSequence_EightTimesfourToSeven: ['define', { sourceInterval: [2857, 2919] }, null, [], [ 'seq', { sourceInterval: [2857, 2879] }, ['app', { sourceInterval: [2857, 2868] }, 'fourToSeven', []], ['app', { sourceInterval: [2869, 2879] }, 'octalDigit', []], ]], octalEscapeSequence_EightTimesZeroToThree: ['define', { sourceInterval: [2926, 2988] }, null, [], [ 'seq', { sourceInterval: [2926, 2962] }, ['app', { sourceInterval: [2926, 2937] }, 'zeroToThree', []], ['app', { sourceInterval: [2938, 2948] }, 'octalDigit', []], ['not', { sourceInterval: [2949, 2962] }, ['app', { sourceInterval: [2950, 2962] }, 'decimalDigit', []]], ]], octalEscapeSequence_Octal: ['define', { sourceInterval: [2995, 3041] }, null, [], [ 'seq', { sourceInterval: [2995, 3019] }, ['app', { sourceInterval: [2995, 3005] }, 'octalDigit', []], ['not', { sourceInterval: [3006, 3019] }, ['app', { sourceInterval: [3007, 3019] }, 'decimalDigit', []]], ]], octalEscapeSequence: ['define', { sourceInterval: [2776, 3041] }, null, [], [ 'alt', { sourceInterval: [2802, 3041] }, ['app', { sourceInterval: [2804, 2837] }, 'octalEscapeSequence_Whole', []], ['app', { sourceInterval: [2857, 2879] }, 'octalEscapeSequence_EightTimesfourToSeven', []], ['app', { sourceInterval: [2926, 2962] }, 'octalEscapeSequence_EightTimesZeroToThree', []], ['app', { sourceInterval: [2995, 3019] }, 'octalEscapeSequence_Octal', []], ]], hexEscapeSequence: ['define', { sourceInterval: [3046, 3087] }, null, [], [ 'seq', { sourceInterval: [3066, 3087] }, ['terminal', { sourceInterval: [3066, 3069] }, 'x'], ['app', { sourceInterval: [3070, 3078] }, 'hexDigit', []], ['app', { sourceInterval: [3079, 3087] }, 'hexDigit', []], ]], unicodeEscapeSequence: ['define', { sourceInterval: [3092, 3155] }, null, [], [ 'seq', { sourceInterval: [3116, 3155] }, ['terminal', { sourceInterval: [3116, 3119] }, 'u'], ['app', { sourceInterval: [3120, 3128] }, 'hexDigit', []], ['app', { sourceInterval: [3129, 3137] }, 'hexDigit', []], ['app', { sourceInterval: [3138, 3146] }, 'hexDigit', []], ['app', { sourceInterval: [3147, 3155] }, 'hexDigit', []], ]], zeroToThree: ['define', { sourceInterval: [3161, 3183] }, null, [], [ 'range', { sourceInterval: [3175, 3183] }, '0', '3', ]], fourToSeven: ['define', { sourceInterval: [3188, 3210] }, null, [], [ 'range', { sourceInterval: [3202, 3210] }, '4', '7', ]], decimalDigit: ['define', { sourceInterval: [3215, 3238] }, null, [], [ 'range', { sourceInterval: [3230, 3238] }, '0', '9', ]], nonZeroDigit: ['define', { sourceInterval: [3243, 3266] }, null, [], [ 'range', { sourceInterval: [3258, 3266] }, '1', '9', ]], octalDigit: ['define', { sourceInterval: [3271, 3292] }, null, [], [ 'range', { sourceInterval: [3284, 3292] }, '0', '7', ]], regularExpressionLiteral: ['define', { sourceInterval: [3298, 3377] }, null, [], [ 'seq', { sourceInterval: [3325, 3377] }, ['terminal', { sourceInterval: [3325, 3328] }, '/'], ['app', { sourceInterval: [3329, 3350] }, 'regularExpressionBody', []], ['terminal', { sourceInterval: [3351, 3354] }, '/'], ['app', { sourceInterval: [3355, 3377] }, 'regularExpressionFlags', []], ]], regularExpressionBody: ['define', { sourceInterval: [3382, 3455] }, null, [], [ 'seq', { sourceInterval: [3406, 3455] }, ['app', { sourceInterval: [3406, 3432] }, 'regularExpressionFirstChar', []], ['star', { sourceInterval: [3433, 3455] }, [ 'app', { sourceInterval: [3433, 3454] }, 'regularExpressionChar', [], ]], ]], regularExpressionFirstChar: ['define', { sourceInterval: [3460, 3621] }, null, [], ['alt', { sourceInterval: [3493, 3621], }, ['seq', { sourceInterval: [3495, 3551] }, ['not', { sourceInterval: [3495, 3520] }, [ 'alt', { sourceInterval: [3497, 3519] }, ['terminal', { sourceInterval: [3497, 3500] }, '*'], ['terminal', { sourceInterval: [3503, 3507] }, '\\'], ['terminal', { sourceInterval: [3510, 3513] }, '/'], ['terminal', { sourceInterval: [3516, 3519] }, '['], ]], ['app', { sourceInterval: [3521, 3551] }, 'regularExpressionNonTerminator', []]], [ 'app', { sourceInterval: [3558, 3592] }, 'regularExpressionBackslashSequence', [], ], ['app', { sourceInterval: [3599, 3621] }, 'regularExpressionClass', []]]], regularExpressionChar: ['define', { sourceInterval: [3626, 3770] }, null, [], ['alt', { sourceInterval: [3650, 3770], }, ['seq', { sourceInterval: [3650, 3700] }, ['not', { sourceInterval: [3650, 3669] }, [ 'alt', { sourceInterval: [3652, 3668] }, ['terminal', { sourceInterval: [3652, 3656] }, '\\'], ['terminal', { sourceInterval: [3659, 3662] }, '/'], ['terminal', { sourceInterval: [3665, 3668] }, '['], ]], ['app', { sourceInterval: [3670, 3700] }, 'regularExpressionNonTerminator', []]], [ 'app', { sourceInterval: [3707, 3741] }, 'regularExpressionBackslashSequence', [], ], ['app', { sourceInterval: [3748, 3770] }, 'regularExpressionClass', []]]], regularExpressionBackslashSequence: ['define', { sourceInterval: [3775, 3847] }, null, [], [ 'seq', { sourceInterval: [3812, 3847] }, ['terminal', { sourceInterval: [3812, 3816] }, '\\'], ['app', { sourceInterval: [3817, 3847] }, 'regularExpressionNonTerminator', []], ]], regularExpressionNonTerminator: ['define', { sourceInterval: [3852, 3906] }, null, [], [ 'seq', { sourceInterval: [3885, 3906] }, ['not', { sourceInterval: [3885, 3902] }, ['app', { sourceInterval: [3887, 3901] }, 'lineTerminator', []]], ['app', { sourceInterval: [3903, 3906] }, 'any', []], ]], regularExpressionClass: ['define', { sourceInterval: [3911, 3971] }, null, [], [ 'seq', { sourceInterval: [3936, 3971] }, ['terminal', { sourceInterval: [3936, 3939] }, '['], ['star', { sourceInterval: [3940, 3967] }, [ 'app', { sourceInterval: [3940, 3966] }, 'regularExpressionClassChar', [], ]], ['terminal', { sourceInterval: [3968, 3971] }, ']'], ]], regularExpressionClassChar: ['define', { sourceInterval: [3976, 4096] }, null, [], ['alt', { sourceInterval: [4009, 4096], }, ['seq', { sourceInterval: [4011, 4055] }, ['not', { sourceInterval: [4011, 4024] }, [ 'alt', { sourceInterval: [4013, 4023] }, ['terminal', { sourceInterval: [4013, 4016] }, ']'], ['terminal', { sourceInterval: [4019, 4023] }, '\\'], ]], ['app', { sourceInterval: [4025, 4055] }, 'regularExpressionNonTerminator', []]], [ 'app', { sourceInterval: [4062, 4096] }, 'regularExpressionBackslashSequence', [], ]]], regularExpressionFlags: ['define', { sourceInterval: [4101, 4141] }, null, [], ['star', { sourceInterval: [4126, 4141], }, ['app', { sourceInterval: [4126, 4140] }, 'identifierPart', []]]], multiLineCommentNoNL: ['define', { sourceInterval: [4147, 4211] }, null, [], [ 'seq', { sourceInterval: [4170, 4211] }, ['terminal', { sourceInterval: [4170, 4174] }, '/*'], ['star', { sourceInterval: [4175, 4206] }, ['seq', { sourceInterval: [4176, 4204] }, ['not', { sourceInterval: [4176, 4200], }, ['alt', { sourceInterval: [4178, 4199] }, ['terminal', { sourceInterval: [4178, 4182] }, '*/'], [ 'app', { sourceInterval: [4185, 4199] }, 'lineTerminator', [], ]]], ['app', { sourceInterval: [4201, 4204] }, 'any', []]]], ['terminal', { sourceInterval: [4207, 4211] }, '*/'], ]], identifierStart_escaped: ['define', { sourceInterval: [4266, 4303] }, null, [], [ 'seq', { sourceInterval: [4266, 4292] }, ['terminal', { sourceInterval: [4266, 4270] }, '\\'], ['app', { sourceInterval: [4271, 4292] }, 'unicodeEscapeSequence', []], ]], identifierStart: ['define', { sourceInterval: [4217, 4303] }, null, [], [ 'alt', { sourceInterval: [4239, 4303] }, ['app', { sourceInterval: [4241, 4247] }, 'letter', []], ['terminal', { sourceInterval: [4250, 4253] }, '$'], ['terminal', { sourceInterval: [4256, 4259] }, '_'], ['app', { sourceInterval: [4266, 4292] }, 'identifierStart_escaped', []], ]], identifierPart: ['define', { sourceInterval: [4308, 4444] }, null, [], [ 'alt', { sourceInterval: [4329, 4444] }, ['app', { sourceInterval: [4331, 4346] }, 'identifierStart', []], ['app', { sourceInterval: [4349, 4369] }, 'unicodeCombiningMark', []], ['app', { sourceInterval: [4376, 4388] }, 'unicodeDigit', []], ['app', { sourceInterval: [4391, 4418] }, 'unicodeConnectorPunctuation', []], ['terminal', { sourceInterval: [4425, 4433] }, '‌'], ['terminal', { sourceInterval: [4436, 4444] }, '‍'], ]], letter: ['extend', { sourceInterval: [4449, 4476] }, null, [], [ 'app', { sourceInterval: [4459, 4476] }, 'unicodeCategoryNl', [], ]], unicodeCategoryNl: ['define', { sourceInterval: [4481, 4555] }, null, [], [ 'alt', { sourceInterval: [4505, 4555] }, ['range', { sourceInterval: [4505, 4523] }, 'Ⅰ', 'ↂ'], ['terminal', { sourceInterval: [4526, 4534] }, '〇'], ['range', { sourceInterval: [4537, 4555] }, '〡', '〩'], ]], unicodeDigit: ['define', { sourceInterval: [4560, 4922] }, 'a digit', [], [ 'alt', { sourceInterval: [4589, 4922] }, ['range', { sourceInterval: [4589, 4607] }, '0', '9'], ['range', { sourceInterval: [4610, 4628] }, '٠', '٩'], ['range', { sourceInterval: [4631, 4649] }, '۰', '۹'], ['range', { sourceInterval: [4652, 4670] }, '०', '९'], ['range', { sourceInterval: [4673, 4691] }, '০', '৯'], ['range', { sourceInterval: [4694, 4712] }, '੦', '੯'], ['range', { sourceInterval: [4715, 4733] }, '૦', '૯'], ['range', { sourceInterval: [4736, 4754] }, '୦', '୯'], ['range', { sourceInterval: [4757, 4775] }, '௧', '௯'], ['range', { sourceInterval: [4778, 4796] }, '౦', '౯'], ['range', { sourceInterval: [4799, 4817] }, '೦', '೯'], ['range', { sourceInterval: [4820, 4838] }, '൦', '൯'], ['range', { sourceInterval: [4841, 4859] }, '๐', '๙'], ['range', { sourceInterval: [4862, 4880] }, '໐', '໙'], ['range', { sourceInterval: [4883, 4901] }, '༠', '༩'], ['range', { sourceInterval: [4904, 4922] }, '0', '9'], ]], unicodeCombiningMark: ['define', { sourceInterval: [4928, 6659] }, 'a Unicode combining mark', [], [ 'alt', { sourceInterval: [4982, 6659] }, ['range', { sourceInterval: [4982, 5000] }, '̀', 'ͅ'], ['range', { sourceInterval: [5003, 5021] }, '͠', '͡'], ['range', { sourceInterval: [5024, 5042] }, '҃', '҆'], ['range', { sourceInterval: [5045, 5063] }, '֑', '֡'], ['range', { sourceInterval: [5066, 5084] }, '֣', 'ֹ'], ['range', { sourceInterval: [5087, 5105] }, 'ֻ', 'ֽ'], ['range', { sourceInterval: [5108, 5126] }, 'ֿ', 'ֿ'], ['range', { sourceInterval: [5129, 5147] }, 'ׁ', 'ׂ'], ['range', { sourceInterval: [5150, 5168] }, 'ׄ', 'ׄ'], ['range', { sourceInterval: [5171, 5189] }, 'ً', 'ْ'], ['range', { sourceInterval: [5192, 5210] }, 'ٰ', 'ٰ'], ['range', { sourceInterval: [5213, 5231] }, 'ۖ', 'ۜ'], ['range', { sourceInterval: [5234, 5252] }, '۟', 'ۤ'], ['range', { sourceInterval: [5255, 5273] }, 'ۧ', 'ۨ'], ['range', { sourceInterval: [5276, 5294] }, '۪', 'ۭ'], ['range', { sourceInterval: [5297, 5315] }, 'ँ', 'ं'], ['range', { sourceInterval: [5318, 5336] }, '़', '़'], ['range', { sourceInterval: [5339, 5357] }, 'ु', 'ै'], ['range', { sourceInterval: [5360, 5378] }, '्', '्'], ['range', { sourceInterval: [5381, 5399] }, '॑', '॔'], ['range', { sourceInterval: [5402, 5420] }, 'ॢ', 'ॣ'], ['range', { sourceInterval: [5423, 5441] }, 'ঁ', 'ঁ'], ['range', { sourceInterval: [5444, 5462] }, '়', '়'], ['range', { sourceInterval: [5465, 5483] }, 'ু', 'ৄ'], ['range', { sourceInterval: [5486, 5504] }, '্', '্'], ['range', { sourceInterval: [5507, 5525] }, 'ৢ', 'ৣ'], ['range', { sourceInterval: [5528, 5546] }, 'ਂ', 'ਂ'], ['range', { sourceInterval: [5549, 5567] }, '਼', '਼'], ['range', { sourceInterval: [5570, 5588] }, 'ੁ', 'ੂ'], ['range', { sourceInterval: [5591, 5609] }, 'ੇ', 'ੈ'], ['range', { sourceInterval: [5612, 5630] }, 'ੋ', '੍'], ['range', { sourceInterval: [5633, 5651] }, 'ੰ', 'ੱ'], ['range', { sourceInterval: [5654, 5672] }, 'ઁ', 'ં'], ['range', { sourceInterval: [5675, 5693] }, '઼', '઼'], ['range', { sourceInterval: [5696, 5714] }, 'ુ', 'ૅ'], ['range', { sourceInterval: [5717, 5735] }, 'ે', 'ૈ'], ['range', { sourceInterval: [5738, 5756] }, '્', '્'], ['range', { sourceInterval: [5759, 5777] }, 'ଁ', 'ଁ'], ['range', { sourceInterval: [5780, 5798] }, '଼', '଼'], ['range', { sourceInterval: [5801, 5819] }, 'ି', 'ି'], ['range', { sourceInterval: [5822, 5840] }, 'ୁ', 'ୃ'], ['range', { sourceInterval: [5843, 5861] }, '୍', '୍'], ['range', { sourceInterval: [5864, 5882] }, 'ୖ', 'ୖ'], ['range', { sourceInterval: [5885, 5903] }, 'ஂ', 'ஂ'], ['range', { sourceInterval: [5906, 5924] }, 'ீ', 'ீ'], ['range', { sourceInterval: [5927, 5945] }, '்', '்'], ['range', { sourceInterval: [5948, 5966] }, 'ా', 'ీ'], ['range', { sourceInterval: [5969, 5987] }, 'ె', 'ై'], ['range', { sourceInterval: [5990, 6008] }, 'ొ', '్'], ['range', { sourceInterval: [6011, 6029] }, 'ౕ', 'ౖ'], ['range', { sourceInterval: [6032, 6050] }, 'ಿ', 'ಿ'], ['range', { sourceInterval: [6053, 6071] }, 'ೆ', 'ೆ'], ['range', { sourceInterval: [6074, 6092] }, 'ೌ', '್'], ['range', { sourceInterval: [6095, 6113] }, 'ു', 'ൃ'], ['range', { sourceInterval: [6116, 6134] }, '്', '്'], ['range', { sourceInterval: [6137, 6155] }, 'ั', 'ั'], ['range', { sourceInterval: [6158, 6176] }, 'ิ', 'ฺ'], ['range', { sourceInterval: [6179, 6197] }, '็', '๎'], ['range', { sourceInterval: [6200, 6218] }, 'ັ', 'ັ'], ['range', { sourceInterval: [6221, 6239] }, 'ິ', 'ູ'], ['range', { sourceInterval: [6242, 6260] }, 'ົ', 'ຼ'], ['range', { sourceInterval: [6263, 6281] }, '່', 'ໍ'], ['range', { sourceInterval: [6284, 6302] }, '༘', '༙'], ['range', { sourceInterval: [6305, 6323] }, '༵', '༵'], ['range', { sourceInterval: [6326, 6344] }, '༷', '༷'], ['range', { sourceInterval: [6347, 6365] }, '༹', '༹'], ['range', { sourceInterval: [6368, 6386] }, 'ཱ', 'ཾ'], ['range', { sourceInterval: [6389, 6407] }, 'ྀ', '྄'], ['range', { sourceInterval: [6410, 6428] }, '྆', '྇'], ['range', { sourceInterval: [6431, 6449] }, 'ྐ', 'ྕ'], ['range', { sourceInterval: [6452, 6470] }, 'ྗ', 'ྗ'], ['range', { sourceInterval: [6473, 6491] }, 'ྙ', 'ྭ'], ['range', { sourceInterval: [6494, 6512] }, 'ྱ', 'ྷ'], ['range', { sourceInterval: [6515, 6533] }, 'ྐྵ', 'ྐྵ'], ['range', { sourceInterval: [6536, 6554] }, '⃐', '⃜'], ['range', { sourceInterval: [6557, 6575] }, '⃡', '⃡'], ['range', { sourceInterval: [6578, 6596] }, '〪', '〯'], ['range', { sourceInterval: [6599, 6617] }, '゙', '゚'], ['range', { sourceInterval: [6620, 6638] }, 'ﬞ', 'ﬞ'], ['range', { sourceInterval: [6641, 6659] }, '︠', '︣'], ]], unicodeConnectorPunctuation: ['define', { sourceInterval: [6665, 6799] }, null, [], [ 'alt', { sourceInterval: [6695, 6799] }, ['terminal', { sourceInterval: [6695, 6703] }, '_'], ['range', { sourceInterval: [6706, 6724] }, '‿', '⁀'], ['terminal', { sourceInterval: [6727, 6735] }, '・'], ['range', { sourceInterval: [6738, 6756] }, '︳', '︴'], ['range', { sourceInterval: [6759, 6777] }, '﹍', '﹏'], ['terminal', { sourceInterval: [6780, 6788] }, '_'], ['terminal', { sourceInterval: [6791, 6799] }, '・'], ]], unicodeSpaceSeparator: ['define', { sourceInterval: [6804, 6857] }, null, [], [ 'alt', { sourceInterval: [6828, 6857] }, ['range', { sourceInterval: [6828, 6846] }, ' ', '​'], ['terminal', { sourceInterval: [6849, 6857] }, ' '], ]], }, ]); export default result; ================================================ FILE: drizzle-kit/imports-checker/index.ts ================================================ import chalk from 'chalk'; import { analyzeImports, ChainLink } from './checker'; const issues = analyzeImports({ basePath: './drizzle-kit', localPaths: ['src'], whiteList: [ '@drizzle-team/brocli', 'json-diff', 'path', 'fs', 'fs/*', 'url', 'zod', 'node:*', 'hono', 'glob', 'hono/*', 'hono/**/*', '@hono/*', 'crypto', 'hanji', ], entry: './drizzle-kit/src/cli/index.ts', logger: true, ignoreTypes: true, }).issues; const chainToString = (chains: ChainLink[]) => { if (chains.length === 0) throw new Error(); let out = chains[0]!.file + '\n'; let indentation = 0; for (let chain of chains) { out += ' '.repeat(indentation) + '└' + chain.import + ` ${chalk.gray(chain.file)}\n`; indentation += 1; } return out; }; console.log(); for (const issue of issues) { console.log(chalk.red(issue.imports.map((it) => it.name).join('\n'))); console.log(issue.accessChains.map((it) => chainToString(it)).join('\n')); } ================================================ FILE: drizzle-kit/package.json ================================================ { "name": "drizzle-kit", "version": "0.31.10", "homepage": "https://orm.drizzle.team", "keywords": [ "drizzle", "orm", "pg", "mysql", "singlestore", "postgresql", "postgres", "sqlite", "database", "sql", "typescript", "ts", "drizzle-kit", "migrations", "schema" ], "publishConfig": { "provenance": true }, "repository": { "type": "git", "url": "git+https://github.com/drizzle-team/drizzle-orm.git" }, "author": "Drizzle Team", "license": "MIT", "bin": { "drizzle-kit": "./bin.cjs" }, "scripts": { "api": "tsx ./dev/api.ts", "migrate:old": "drizzle-kit generate:mysql", "cli": "tsx ./src/cli/index.ts", "test": "pnpm tsc && TEST_CONFIG_PATH_PREFIX=./tests/cli/ vitest", "build": "rm -rf ./dist && tsx build.ts && cp package.json dist/ && attw --pack dist", "build:dev": "rm -rf ./dist && tsx build.dev.ts && tsc -p tsconfig.cli-types.json && chmod +x ./dist/index.cjs", "pack": "cp package.json README.md dist/ && (cd dist && npm pack --pack-destination ..) && rm -f package.tgz && mv *.tgz package.tgz", "tsc": "tsc -p tsconfig.build.json --noEmit", "publish": "npm publish package.tgz" }, "dependencies": { "@drizzle-team/brocli": "^0.10.2", "@esbuild-kit/esm-loader": "^2.5.5", "esbuild": "^0.25.4", "tsx": "^4.21.0" }, "devDependencies": { "@arethetypeswrong/cli": "^0.15.3", "@aws-sdk/client-rds-data": "^3.556.0", "@cloudflare/workers-types": "^4.20230518.0", "@electric-sql/pglite": "^0.2.12", "@hono/node-server": "^1.9.0", "@hono/zod-validator": "^0.2.1", "@libsql/client": "^0.10.0", "@neondatabase/serverless": "^0.9.1", "@originjs/vite-plugin-commonjs": "^1.0.3", "@planetscale/database": "^1.16.0", "@types/better-sqlite3": "^7.6.13", "@types/dockerode": "^3.3.28", "@types/glob": "^8.1.0", "@types/json-diff": "^1.0.3", "@types/micromatch": "^4.0.9", "@types/minimatch": "^5.1.2", "@types/node": "^18.11.15", "@types/pg": "^8.10.7", "@types/pluralize": "^0.0.33", "@types/semver": "^7.5.5", "@types/uuid": "^9.0.8", "@types/ws": "^8.5.10", "@typescript-eslint/eslint-plugin": "^7.2.0", "@typescript-eslint/parser": "^7.2.0", "@vercel/postgres": "^0.8.0", "ava": "^5.1.0", "better-sqlite3": "^11.9.1", "bun-types": "^0.6.6", "camelcase": "^7.0.1", "chalk": "^5.2.0", "commander": "^12.1.0", "dockerode": "^4.0.6", "dotenv": "^16.0.3", "drizzle-kit": "0.25.0-b1faa33", "drizzle-orm": "workspace:./drizzle-orm/dist", "env-paths": "^3.0.0", "esbuild-node-externals": "^1.9.0", "eslint": "^8.57.0", "eslint-config-prettier": "^9.1.0", "eslint-plugin-prettier": "^5.1.3", "gel": "^2.0.0", "get-port": "^6.1.2", "glob": "^8.1.0", "hanji": "^0.0.8", "hono": "^4.7.9", "json-diff": "1.0.6", "micromatch": "^4.0.8", "minimatch": "^7.4.3", "mysql2": "3.14.1", "node-fetch": "^3.3.2", "ohm-js": "^17.1.0", "pg": "^8.11.5", "pluralize": "^8.0.0", "postgres": "^3.4.4", "prettier": "^3.5.3", "semver": "^7.7.2", "superjson": "^2.2.1", "tsup": "^8.3.5", "typescript": "^5.6.3", "uuid": "^9.0.1", "vite-tsconfig-paths": "^4.3.2", "vitest": "^3.1.3", "ws": "^8.18.2", "zod": "^3.20.2", "zx": "^8.3.2" }, "exports": { ".": { "import": { "types": "./index.d.mts", "default": "./index.mjs" }, "require": { "types": "./index.d.ts", "default": "./index.js" }, "types": "./index.d.mts", "default": "./index.mjs" }, "./api": { "import": { "types": "./api.d.mts", "default": "./api.mjs" }, "require": { "types": "./api.d.ts", "default": "./api.js" }, "types": "./api.d.mts", "default": "./api.mjs" } } } ================================================ FILE: drizzle-kit/patches/difflib@0.2.4.patch ================================================ diff --git a/lib/difflib.js b/lib/difflib.js index 80d250e7e18bdc972df3621ee5c05ffff0e3659f..94916f33dbae0d3eea6f74e2c619c4c6f52cc125 100644 --- a/lib/difflib.js +++ b/lib/difflib.js @@ -17,7 +17,7 @@ Function restore(delta, which): Function unifiedDiff(a, b): For two lists of strings, return a delta in unified diff format. - +. Class SequenceMatcher: A flexible class for comparing pairs of sequences of any type. @@ -75,7 +75,7 @@ Class Differ: SequenceMatcher = (function() { - SequenceMatcher.name = 'SequenceMatcher'; + // SequenceMatcher.name = 'SequenceMatcher'; /* SequenceMatcher is a flexible class for comparing pairs of sequences of @@ -737,7 +737,7 @@ Class Differ: Differ = (function() { - Differ.name = 'Differ'; + // Differ.name = 'Differ'; /* Differ is a class for comparing sequences of lines of text, and ================================================ FILE: drizzle-kit/src/@types/utils.ts ================================================ declare global { interface String { trimChar(char: string): string; squashSpaces(): string; capitalise(): string; camelCase(): string; snake_case(): string; concatIf(it: string, condition: boolean): string; } interface Array { random(): T; } } import camelcase from 'camelcase'; String.prototype.trimChar = function(char: string) { let start = 0; let end = this.length; while (start < end && this[start] === char) ++start; while (end > start && this[end - 1] === char) --end; // this.toString() due to ava deep equal issue with String { "value" } return start > 0 || end < this.length ? this.substring(start, end) : this.toString(); }; String.prototype.squashSpaces = function() { return this.replace(/ +/g, ' ').trim(); }; String.prototype.camelCase = function() { return camelcase(String(this)); }; String.prototype.capitalise = function() { return this && this.length > 0 ? `${this[0].toUpperCase()}${this.slice(1)}` : String(this); }; String.prototype.concatIf = function(it: string, condition: boolean) { return condition ? `${this}${it}` : String(this); }; String.prototype.snake_case = function() { return this && this.length > 0 ? `${this.replace(/[A-Z]/g, (letter) => `_${letter.toLowerCase()}`)}` : String(this); }; Array.prototype.random = function() { return this[~~(Math.random() * this.length)]; }; export {}; ================================================ FILE: drizzle-kit/src/api.ts ================================================ /// import type { PGlite } from '@electric-sql/pglite'; import { randomUUID } from 'crypto'; import { is } from 'drizzle-orm'; import { LibSQLDatabase } from 'drizzle-orm/libsql'; import { AnyMySqlTable, getTableConfig as mysqlTableConfig, MySqlTable } from 'drizzle-orm/mysql-core'; import type { MySql2Database } from 'drizzle-orm/mysql2'; import { AnyPgTable, getTableConfig as pgTableConfig, PgDatabase, PgTable } from 'drizzle-orm/pg-core'; import { Relations } from 'drizzle-orm/relations'; import { SingleStoreDriverDatabase } from 'drizzle-orm/singlestore'; import { AnySingleStoreTable, getTableConfig as singlestoreTableConfig, SingleStoreTable, } from 'drizzle-orm/singlestore-core'; import { AnySQLiteTable, SQLiteTable } from 'drizzle-orm/sqlite-core'; import { columnsResolver, enumsResolver, indPolicyResolver, mySqlViewsResolver, policyResolver, roleResolver, schemasResolver, sequencesResolver, sqliteViewsResolver, tablesResolver, viewsResolver, } from './cli/commands/migrate'; import { pgPushIntrospect } from './cli/commands/pgIntrospect'; import { pgSuggestions } from './cli/commands/pgPushUtils'; import { updateUpToV6 as upPgV6, updateUpToV7 as upPgV7 } from './cli/commands/pgUp'; import { sqlitePushIntrospect } from './cli/commands/sqliteIntrospect'; import { logSuggestionsAndReturn } from './cli/commands/sqlitePushUtils'; import type { CasingType } from './cli/validations/common'; import type { MysqlCredentials } from './cli/validations/mysql'; import type { PostgresCredentials } from './cli/validations/postgres'; import type { SingleStoreCredentials } from './cli/validations/singlestore'; import type { SqliteCredentials } from './cli/validations/sqlite'; import { getTablesFilterByExtensions } from './extensions/getTablesFilterByExtensions'; import { originUUID } from './global'; import type { Config } from './index'; import { MySqlSchema as MySQLSchemaKit, mysqlSchema, squashMysqlScheme } from './serializer/mysqlSchema'; import { generateMySqlSnapshot } from './serializer/mysqlSerializer'; import { prepareFromExports } from './serializer/pgImports'; import { PgSchema as PgSchemaKit, pgSchema, squashPgScheme } from './serializer/pgSchema'; import { generatePgSnapshot } from './serializer/pgSerializer'; import { SingleStoreSchema as SingleStoreSchemaKit, singlestoreSchema, squashSingleStoreScheme, } from './serializer/singlestoreSchema'; import { generateSingleStoreSnapshot } from './serializer/singlestoreSerializer'; import { SQLiteSchema as SQLiteSchemaKit, sqliteSchema, squashSqliteScheme } from './serializer/sqliteSchema'; import { generateSqliteSnapshot } from './serializer/sqliteSerializer'; import type { Setup } from './serializer/studio'; import type { DB, SQLiteDB } from './utils'; import { certs } from './utils/certs'; export type DrizzleSnapshotJSON = PgSchemaKit; export type DrizzleSQLiteSnapshotJSON = SQLiteSchemaKit; export type DrizzleMySQLSnapshotJSON = MySQLSchemaKit; export type DrizzleSingleStoreSnapshotJSON = SingleStoreSchemaKit; export const generateDrizzleJson = ( imports: Record, prevId?: string, schemaFilters?: string[], casing?: CasingType, ): PgSchemaKit => { const prepared = prepareFromExports(imports); const id = randomUUID(); const snapshot = generatePgSnapshot( prepared.tables, prepared.enums, prepared.schemas, prepared.sequences, prepared.roles, prepared.policies, prepared.views, prepared.matViews, casing, schemaFilters, ); return { ...snapshot, id, prevId: prevId ?? originUUID, }; }; export const generateMigration = async ( prev: DrizzleSnapshotJSON, cur: DrizzleSnapshotJSON, ) => { const { applyPgSnapshotsDiff } = await import('./snapshotsDiffer'); const validatedPrev = pgSchema.parse(prev); const validatedCur = pgSchema.parse(cur); const squashedPrev = squashPgScheme(validatedPrev); const squashedCur = squashPgScheme(validatedCur); const { sqlStatements, _meta } = await applyPgSnapshotsDiff( squashedPrev, squashedCur, schemasResolver, enumsResolver, sequencesResolver, policyResolver, indPolicyResolver, roleResolver, tablesResolver, columnsResolver, viewsResolver, validatedPrev, validatedCur, ); return sqlStatements; }; export const pushSchema = async ( imports: Record, drizzleInstance: PgDatabase, schemaFilters?: string[], tablesFilter?: string[], extensionsFilters?: Config['extensionsFilters'], ) => { const { applyPgSnapshotsDiff } = await import('./snapshotsDiffer'); const { sql } = await import('drizzle-orm'); const filters = (tablesFilter ?? []).concat( getTablesFilterByExtensions({ extensionsFilters, dialect: 'postgresql' }), ); const db: DB = { query: async (query: string, params?: any[]) => { const res = await drizzleInstance.execute(sql.raw(query)); return res.rows; }, }; const cur = generateDrizzleJson(imports); const { schema: prev } = await pgPushIntrospect( db, filters, schemaFilters ?? ['public'], undefined, ); const validatedPrev = pgSchema.parse(prev); const validatedCur = pgSchema.parse(cur); const squashedPrev = squashPgScheme(validatedPrev, 'push'); const squashedCur = squashPgScheme(validatedCur, 'push'); const { statements } = await applyPgSnapshotsDiff( squashedPrev, squashedCur, schemasResolver, enumsResolver, sequencesResolver, policyResolver, indPolicyResolver, roleResolver, tablesResolver, columnsResolver, viewsResolver, validatedPrev, validatedCur, 'push', ); const { shouldAskForApprove, statementsToExecute, infoToPrint } = await pgSuggestions(db, statements); return { hasDataLoss: shouldAskForApprove, warnings: infoToPrint, statementsToExecute, apply: async () => { for (const dStmnt of statementsToExecute) { await db.query(dStmnt); } }, }; }; export const startStudioPostgresServer = async ( imports: Record, credentials: PostgresCredentials | { driver: 'pglite'; client: PGlite; }, options?: { host?: string; port?: number; casing?: CasingType; }, ) => { const { drizzleForPostgres } = await import('./serializer/studio'); const pgSchema: Record> = {}; const relations: Record = {}; Object.entries(imports).forEach(([k, t]) => { if (is(t, PgTable)) { const schema = pgTableConfig(t).schema || 'public'; pgSchema[schema] = pgSchema[schema] || {}; pgSchema[schema][k] = t; } if (is(t, Relations)) { relations[k] = t; } }); const setup = await drizzleForPostgres(credentials, pgSchema, relations, [], options?.casing); await startServerFromSetup(setup, options); }; // SQLite export const generateSQLiteDrizzleJson = async ( imports: Record, prevId?: string, casing?: CasingType, ): Promise => { const { prepareFromExports } = await import('./serializer/sqliteImports'); const prepared = prepareFromExports(imports); const id = randomUUID(); const snapshot = generateSqliteSnapshot(prepared.tables, prepared.views, casing); return { ...snapshot, id, prevId: prevId ?? originUUID, }; }; export const generateSQLiteMigration = async ( prev: DrizzleSQLiteSnapshotJSON, cur: DrizzleSQLiteSnapshotJSON, ) => { const { applySqliteSnapshotsDiff } = await import('./snapshotsDiffer'); const validatedPrev = sqliteSchema.parse(prev); const validatedCur = sqliteSchema.parse(cur); const squashedPrev = squashSqliteScheme(validatedPrev); const squashedCur = squashSqliteScheme(validatedCur); const { sqlStatements } = await applySqliteSnapshotsDiff( squashedPrev, squashedCur, tablesResolver, columnsResolver, sqliteViewsResolver, validatedPrev, validatedCur, ); return sqlStatements; }; export const pushSQLiteSchema = async ( imports: Record, drizzleInstance: LibSQLDatabase, ) => { const { applySqliteSnapshotsDiff } = await import('./snapshotsDiffer'); const { sql } = await import('drizzle-orm'); const db: SQLiteDB = { query: async (query: string, params?: any[]) => { const res = drizzleInstance.all(sql.raw(query)); return res; }, run: async (query: string) => { return Promise.resolve(drizzleInstance.run(sql.raw(query))).then( () => {}, ); }, }; const cur = await generateSQLiteDrizzleJson(imports); const { schema: prev } = await sqlitePushIntrospect(db, []); const validatedPrev = sqliteSchema.parse(prev); const validatedCur = sqliteSchema.parse(cur); const squashedPrev = squashSqliteScheme(validatedPrev, 'push'); const squashedCur = squashSqliteScheme(validatedCur, 'push'); const { statements, _meta } = await applySqliteSnapshotsDiff( squashedPrev, squashedCur, tablesResolver, columnsResolver, sqliteViewsResolver, validatedPrev, validatedCur, 'push', ); const { shouldAskForApprove, statementsToExecute, infoToPrint } = await logSuggestionsAndReturn( db, statements, squashedPrev, squashedCur, _meta!, ); return { hasDataLoss: shouldAskForApprove, warnings: infoToPrint, statementsToExecute, apply: async () => { for (const dStmnt of statementsToExecute) { await db.query(dStmnt); } }, }; }; export const startStudioSQLiteServer = async ( imports: Record, credentials: SqliteCredentials | { driver: 'd1'; binding: D1Database; }, options?: { host?: string; port?: number; casing?: CasingType; }, ) => { const { drizzleForSQLite } = await import('./serializer/studio'); const sqliteSchema: Record> = {}; const relations: Record = {}; Object.entries(imports).forEach(([k, t]) => { if (is(t, SQLiteTable)) { const schema = 'public'; // sqlite does not have schemas sqliteSchema[schema] = sqliteSchema[schema] || {}; sqliteSchema[schema][k] = t; } if (is(t, Relations)) { relations[k] = t; } }); const setup = await drizzleForSQLite(credentials, sqliteSchema, relations, [], options?.casing); await startServerFromSetup(setup, options); }; // MySQL export const generateMySQLDrizzleJson = async ( imports: Record, prevId?: string, casing?: CasingType, ): Promise => { const { prepareFromExports } = await import('./serializer/mysqlImports'); const prepared = prepareFromExports(imports); const id = randomUUID(); const snapshot = generateMySqlSnapshot(prepared.tables, prepared.views, casing); return { ...snapshot, id, prevId: prevId ?? originUUID, }; }; export const generateMySQLMigration = async ( prev: DrizzleMySQLSnapshotJSON, cur: DrizzleMySQLSnapshotJSON, ) => { const { applyMysqlSnapshotsDiff } = await import('./snapshotsDiffer'); const validatedPrev = mysqlSchema.parse(prev); const validatedCur = mysqlSchema.parse(cur); const squashedPrev = squashMysqlScheme(validatedPrev); const squashedCur = squashMysqlScheme(validatedCur); const { sqlStatements } = await applyMysqlSnapshotsDiff( squashedPrev, squashedCur, tablesResolver, columnsResolver, mySqlViewsResolver, validatedPrev, validatedCur, ); return sqlStatements; }; export const pushMySQLSchema = async ( imports: Record, drizzleInstance: MySql2Database, databaseName: string, ) => { const { applyMysqlSnapshotsDiff } = await import('./snapshotsDiffer'); const { logSuggestionsAndReturn } = await import( './cli/commands/mysqlPushUtils' ); const { mysqlPushIntrospect } = await import( './cli/commands/mysqlIntrospect' ); const { sql } = await import('drizzle-orm'); const db: DB = { query: async (query: string, params?: any[]) => { const res = await drizzleInstance.execute(sql.raw(query)); return res[0] as unknown as any[]; }, }; const cur = await generateMySQLDrizzleJson(imports); const { schema: prev } = await mysqlPushIntrospect(db, databaseName, []); const validatedPrev = mysqlSchema.parse(prev); const validatedCur = mysqlSchema.parse(cur); const squashedPrev = squashMysqlScheme(validatedPrev); const squashedCur = squashMysqlScheme(validatedCur); const { statements } = await applyMysqlSnapshotsDiff( squashedPrev, squashedCur, tablesResolver, columnsResolver, mySqlViewsResolver, validatedPrev, validatedCur, 'push', ); const { shouldAskForApprove, statementsToExecute, infoToPrint } = await logSuggestionsAndReturn( db, statements, validatedCur, ); return { hasDataLoss: shouldAskForApprove, warnings: infoToPrint, statementsToExecute, apply: async () => { for (const dStmnt of statementsToExecute) { await db.query(dStmnt); } }, }; }; export const startStudioMySQLServer = async ( imports: Record, credentials: MysqlCredentials, options?: { host?: string; port?: number; casing?: CasingType; }, ) => { const { drizzleForMySQL } = await import('./serializer/studio'); const mysqlSchema: Record> = {}; const relations: Record = {}; Object.entries(imports).forEach(([k, t]) => { if (is(t, MySqlTable)) { const schema = mysqlTableConfig(t).schema || 'public'; mysqlSchema[schema] = mysqlSchema[schema] || {}; mysqlSchema[schema][k] = t; } if (is(t, Relations)) { relations[k] = t; } }); const setup = await drizzleForMySQL(credentials, mysqlSchema, relations, [], options?.casing); await startServerFromSetup(setup, options); }; // SingleStore export const generateSingleStoreDrizzleJson = async ( imports: Record, prevId?: string, casing?: CasingType, ): Promise => { const { prepareFromExports } = await import('./serializer/singlestoreImports'); const prepared = prepareFromExports(imports); const id = randomUUID(); const snapshot = generateSingleStoreSnapshot(prepared.tables, /* prepared.views, */ casing); return { ...snapshot, id, prevId: prevId ?? originUUID, }; }; export const generateSingleStoreMigration = async ( prev: DrizzleSingleStoreSnapshotJSON, cur: DrizzleSingleStoreSnapshotJSON, ) => { const { applySingleStoreSnapshotsDiff } = await import('./snapshotsDiffer'); const validatedPrev = singlestoreSchema.parse(prev); const validatedCur = singlestoreSchema.parse(cur); const squashedPrev = squashSingleStoreScheme(validatedPrev); const squashedCur = squashSingleStoreScheme(validatedCur); const { sqlStatements } = await applySingleStoreSnapshotsDiff( squashedPrev, squashedCur, tablesResolver, columnsResolver, /* singleStoreViewsResolver, */ validatedPrev, validatedCur, 'push', ); return sqlStatements; }; export const pushSingleStoreSchema = async ( imports: Record, drizzleInstance: SingleStoreDriverDatabase, databaseName: string, ) => { const { applySingleStoreSnapshotsDiff } = await import('./snapshotsDiffer'); const { logSuggestionsAndReturn } = await import( './cli/commands/singlestorePushUtils' ); const { singlestorePushIntrospect } = await import( './cli/commands/singlestoreIntrospect' ); const { sql } = await import('drizzle-orm'); const db: DB = { query: async (query: string) => { const res = await drizzleInstance.execute(sql.raw(query)); return res[0] as unknown as any[]; }, }; const cur = await generateSingleStoreDrizzleJson(imports); const { schema: prev } = await singlestorePushIntrospect(db, databaseName, []); const validatedPrev = singlestoreSchema.parse(prev); const validatedCur = singlestoreSchema.parse(cur); const squashedPrev = squashSingleStoreScheme(validatedPrev); const squashedCur = squashSingleStoreScheme(validatedCur); const { statements } = await applySingleStoreSnapshotsDiff( squashedPrev, squashedCur, tablesResolver, columnsResolver, /* singleStoreViewsResolver, */ validatedPrev, validatedCur, 'push', ); const { shouldAskForApprove, statementsToExecute, infoToPrint } = await logSuggestionsAndReturn( db, statements, validatedCur, validatedPrev, ); return { hasDataLoss: shouldAskForApprove, warnings: infoToPrint, statementsToExecute, apply: async () => { for (const dStmnt of statementsToExecute) { await db.query(dStmnt); } }, }; }; export const startStudioSingleStoreServer = async ( imports: Record, credentials: SingleStoreCredentials, options?: { host?: string; port?: number; casing?: CasingType; }, ) => { const { drizzleForSingleStore } = await import('./serializer/studio'); const singleStoreSchema: Record> = {}; const relations: Record = {}; Object.entries(imports).forEach(([k, t]) => { if (is(t, SingleStoreTable)) { const schema = singlestoreTableConfig(t).schema || 'public'; singleStoreSchema[schema] = singleStoreSchema[schema] || {}; singleStoreSchema[schema][k] = t; } if (is(t, Relations)) { relations[k] = t; } }); const setup = await drizzleForSingleStore(credentials, singleStoreSchema, relations, [], options?.casing); await startServerFromSetup(setup, options); }; const startServerFromSetup = async (setup: Setup, options?: { host?: string; port?: number; }) => { const { prepareServer } = await import('./serializer/studio'); const server = await prepareServer(setup); const host = options?.host || '127.0.0.1'; const port = options?.port || 4983; const { key, cert } = (await certs()) || {}; server.start({ host, port, key, cert, cb: (err) => { if (err) { console.error(err); } else { console.log(`Studio is running at ${key ? 'https' : 'http'}://${host}:${port}`); } }, }); }; export const upPgSnapshot = (snapshot: Record) => { if (snapshot.version === '5') { return upPgV7(upPgV6(snapshot)); } if (snapshot.version === '6') { return upPgV7(snapshot); } return snapshot; }; ================================================ FILE: drizzle-kit/src/cli/commands/_es5.ts ================================================ const _ = ''; export default _; ================================================ FILE: drizzle-kit/src/cli/commands/check.ts ================================================ import { Dialect } from '../../schemaValidator'; import { prepareOutFolder, validateWithReport } from '../../utils'; export const checkHandler = (out: string, dialect: Dialect) => { const { snapshots } = prepareOutFolder(out, dialect); const report = validateWithReport(snapshots, dialect); if (report.nonLatest.length > 0) { console.log( report.nonLatest .map((it) => { return `${it} is not of the latest version, please run "drizzle-kit up"`; }) .join('\n'), ); process.exit(1); } if (report.malformed.length) { const message = report.malformed .map((it) => { return `${it} data is malformed`; }) .join('\n'); console.log(message); } const collisionEntries = Object.entries(report.idsMap).filter( (it) => it[1].snapshots.length > 1, ); const message = collisionEntries .map((it) => { const data = it[1]; return `[${ data.snapshots.join( ', ', ) }] are pointing to a parent snapshot: ${data.parent}/snapshot.json which is a collision.`; }) .join('\n'); if (message) { console.log(message); } const abort = report.malformed.length!! || collisionEntries.length > 0; if (abort) { process.exit(1); } }; ================================================ FILE: drizzle-kit/src/cli/commands/drop.ts ================================================ import chalk from 'chalk'; import { readFileSync, rmSync, writeFileSync } from 'fs'; import fs from 'fs'; import { render } from 'hanji'; import { join } from 'path'; import { Journal } from '../../utils'; import { DropMigrationView } from '../views'; import { embeddedMigrations } from './migrate'; export const dropMigration = async ({ out, bundle, }: { out: string; bundle: boolean; }) => { const metaFilePath = join(out, 'meta', '_journal.json'); const journal = JSON.parse(readFileSync(metaFilePath, 'utf-8')) as Journal; if (journal.entries.length === 0) { console.log( `[${chalk.blue('i')}] no migration entries found in ${metaFilePath}`, ); return; } const result = await render(new DropMigrationView(journal.entries)); if (result.status === 'aborted') return; delete journal.entries[journal.entries.indexOf(result.data!)]; const resultJournal: Journal = { ...journal, entries: journal.entries.filter(Boolean), }; const sqlFilePath = join(out, `${result.data.tag}.sql`); const snapshotFilePath = join( out, 'meta', `${result.data.tag.split('_')[0]}_snapshot.json`, ); rmSync(sqlFilePath); rmSync(snapshotFilePath); writeFileSync(metaFilePath, JSON.stringify(resultJournal, null, 2)); if (bundle) { fs.writeFileSync( join(out, `migrations.js`), embeddedMigrations(resultJournal), ); } console.log( `[${chalk.green('✓')}] ${ chalk.bold( result.data.tag, ) } migration successfully dropped`, ); }; ================================================ FILE: drizzle-kit/src/cli/commands/introspect.ts ================================================ import chalk from 'chalk'; import { writeFileSync } from 'fs'; import { render, renderWithTask } from 'hanji'; import { Minimatch } from 'minimatch'; import { join } from 'path'; import { plural, singular } from 'pluralize'; import { GelSchema } from 'src/serializer/gelSchema'; import { drySingleStore, SingleStoreSchema, squashSingleStoreScheme } from 'src/serializer/singlestoreSchema'; import { assertUnreachable, originUUID } from '../../global'; import { schemaToTypeScript as gelSchemaToTypeScript } from '../../introspect-gel'; import { schemaToTypeScript as mysqlSchemaToTypeScript } from '../../introspect-mysql'; import { paramNameFor, schemaToTypeScript as postgresSchemaToTypeScript } from '../../introspect-pg'; import { schemaToTypeScript as singlestoreSchemaToTypeScript } from '../../introspect-singlestore'; import { schemaToTypeScript as sqliteSchemaToTypeScript } from '../../introspect-sqlite'; import { fromDatabase as fromGelDatabase } from '../../serializer/gelSerializer'; import { dryMySql, MySqlSchema, squashMysqlScheme } from '../../serializer/mysqlSchema'; import { fromDatabase as fromMysqlDatabase } from '../../serializer/mysqlSerializer'; import { dryPg, type PgSchema, squashPgScheme } from '../../serializer/pgSchema'; import { fromDatabase as fromPostgresDatabase } from '../../serializer/pgSerializer'; import { fromDatabase as fromSingleStoreDatabase } from '../../serializer/singlestoreSerializer'; import { drySQLite, type SQLiteSchema, squashSqliteScheme } from '../../serializer/sqliteSchema'; import { fromDatabase as fromSqliteDatabase } from '../../serializer/sqliteSerializer'; import { applyLibSQLSnapshotsDiff, applyMysqlSnapshotsDiff, applyPgSnapshotsDiff, applySingleStoreSnapshotsDiff, applySqliteSnapshotsDiff, } from '../../snapshotsDiffer'; import { prepareOutFolder } from '../../utils'; import { Entities } from '../validations/cli'; import type { Casing, Prefix } from '../validations/common'; import { GelCredentials } from '../validations/gel'; import { LibSQLCredentials } from '../validations/libsql'; import type { MysqlCredentials } from '../validations/mysql'; import type { PostgresCredentials } from '../validations/postgres'; import { SingleStoreCredentials } from '../validations/singlestore'; import type { SqliteCredentials } from '../validations/sqlite'; import { IntrospectProgress } from '../views'; import { columnsResolver, enumsResolver, indPolicyResolver, mySqlViewsResolver, policyResolver, roleResolver, schemasResolver, sequencesResolver, sqliteViewsResolver, tablesResolver, viewsResolver, writeResult, } from './migrate'; export const introspectPostgres = async ( casing: Casing, out: string, breakpoints: boolean, credentials: PostgresCredentials, tablesFilter: string[], schemasFilter: string[], prefix: Prefix, entities: Entities, ) => { const { preparePostgresDB } = await import('../connections'); const db = await preparePostgresDB(credentials); const matchers = tablesFilter.map((it) => { return new Minimatch(it); }); const filter = (tableName: string) => { if (matchers.length === 0) return true; let flags: boolean[] = []; for (let matcher of matchers) { if (matcher.negate) { if (!matcher.match(tableName)) { flags.push(false); } } if (matcher.match(tableName)) { flags.push(true); } } if (flags.length > 0) { return flags.every(Boolean); } return false; }; const progress = new IntrospectProgress(true); const res = await renderWithTask( progress, fromPostgresDatabase( db, filter, schemasFilter, entities, (stage, count, status) => { progress.update(stage, count, status); }, ), ); const schema = { id: originUUID, prevId: '', ...res } as PgSchema; const ts = postgresSchemaToTypeScript(schema, casing); const relationsTs = relationsToTypeScript(schema, casing); const { internal, ...schemaWithoutInternals } = schema; const schemaFile = join(out, 'schema.ts'); writeFileSync(schemaFile, ts.file); const relationsFile = join(out, 'relations.ts'); writeFileSync(relationsFile, relationsTs.file); console.log(); const { snapshots, journal } = prepareOutFolder(out, 'postgresql'); if (snapshots.length === 0) { const { sqlStatements, _meta } = await applyPgSnapshotsDiff( squashPgScheme(dryPg), squashPgScheme(schema), schemasResolver, enumsResolver, sequencesResolver, policyResolver, indPolicyResolver, roleResolver, tablesResolver, columnsResolver, viewsResolver, dryPg, schema, ); writeResult({ cur: schema, sqlStatements, journal, _meta, outFolder: out, breakpoints, type: 'introspect', prefixMode: prefix, }); } else { render( `[${ chalk.blue( 'i', ) }] No SQL generated, you already have migrations in project`, ); } render( `[${ chalk.green( '✓', ) }] Your schema file is ready ➜ ${chalk.bold.underline.blue(schemaFile)} 🚀`, ); render( `[${ chalk.green( '✓', ) }] Your relations file is ready ➜ ${ chalk.bold.underline.blue( relationsFile, ) } 🚀`, ); process.exit(0); }; export const introspectGel = async ( casing: Casing, out: string, breakpoints: boolean, credentials: GelCredentials | undefined, tablesFilter: string[], schemasFilter: string[], prefix: Prefix, entities: Entities, ) => { const { prepareGelDB } = await import('../connections'); const db = await prepareGelDB(credentials); const matchers = tablesFilter.map((it) => { return new Minimatch(it); }); const filter = (tableName: string) => { if (matchers.length === 0) return true; let flags: boolean[] = []; for (let matcher of matchers) { if (matcher.negate) { if (!matcher.match(tableName)) { flags.push(false); } } if (matcher.match(tableName)) { flags.push(true); } } if (flags.length > 0) { return flags.every(Boolean); } return false; }; const progress = new IntrospectProgress(true); const res = await renderWithTask( progress, fromGelDatabase( db, filter, schemasFilter, entities, (stage, count, status) => { progress.update(stage, count, status); }, ), ); const schema = { id: originUUID, prevId: '', ...res } as GelSchema; const ts = gelSchemaToTypeScript(schema, casing); const relationsTs = relationsToTypeScript(schema, casing); const { internal, ...schemaWithoutInternals } = schema; const schemaFile = join(out, 'schema.ts'); writeFileSync(schemaFile, ts.file); const relationsFile = join(out, 'relations.ts'); writeFileSync(relationsFile, relationsTs.file); console.log(); // const { snapshots, journal } = prepareOutFolder(out, 'gel'); // if (snapshots.length === 0) { // const { sqlStatements, _meta } = await applyGelSnapshotsDiff( // squashGelScheme(dryGel), // squashGelScheme(schema), // schemasResolver, // enumsResolver, // sequencesResolver, // policyResolver, // indPolicyResolver, // roleResolver, // tablesResolver, // columnsResolver, // viewsResolver, // dryPg, // schema, // ); // writeResult({ // cur: schema, // sqlStatements, // journal, // _meta, // outFolder: out, // breakpoints, // type: 'introspect', // prefixMode: prefix, // }); // } else { // render( // `[${ // chalk.blue( // 'i', // ) // }] No SQL generated, you already have migrations in project`, // ); // } render( `[${ chalk.green( '✓', ) }] Your schema file is ready ➜ ${chalk.bold.underline.blue(schemaFile)} 🚀`, ); render( `[${ chalk.green( '✓', ) }] Your relations file is ready ➜ ${ chalk.bold.underline.blue( relationsFile, ) } 🚀`, ); process.exit(0); }; export const introspectMysql = async ( casing: Casing, out: string, breakpoints: boolean, credentials: MysqlCredentials, tablesFilter: string[], prefix: Prefix, ) => { const { connectToMySQL } = await import('../connections'); const { db, database } = await connectToMySQL(credentials); const matchers = tablesFilter.map((it) => { return new Minimatch(it); }); const filter = (tableName: string) => { if (matchers.length === 0) return true; let flags: boolean[] = []; for (let matcher of matchers) { if (matcher.negate) { if (!matcher.match(tableName)) { flags.push(false); } } if (matcher.match(tableName)) { flags.push(true); } } if (flags.length > 0) { return flags.every(Boolean); } return false; }; const progress = new IntrospectProgress(); const res = await renderWithTask( progress, fromMysqlDatabase(db, database, filter, (stage, count, status) => { progress.update(stage, count, status); }), ); const schema = { id: originUUID, prevId: '', ...res } as MySqlSchema; const ts = mysqlSchemaToTypeScript(schema, casing); const relationsTs = relationsToTypeScript(schema, casing); const { internal, ...schemaWithoutInternals } = schema; const schemaFile = join(out, 'schema.ts'); writeFileSync(schemaFile, ts.file); const relationsFile = join(out, 'relations.ts'); writeFileSync(relationsFile, relationsTs.file); console.log(); const { snapshots, journal } = prepareOutFolder(out, 'mysql'); if (snapshots.length === 0) { const { sqlStatements, _meta } = await applyMysqlSnapshotsDiff( squashMysqlScheme(dryMySql), squashMysqlScheme(schema), tablesResolver, columnsResolver, mySqlViewsResolver, dryMySql, schema, ); writeResult({ cur: schema, sqlStatements, journal, _meta, outFolder: out, breakpoints, type: 'introspect', prefixMode: prefix, }); } else { render( `[${ chalk.blue( 'i', ) }] No SQL generated, you already have migrations in project`, ); } render( `[${ chalk.green( '✓', ) }] Your schema file is ready ➜ ${chalk.bold.underline.blue(schemaFile)} 🚀`, ); render( `[${ chalk.green( '✓', ) }] Your relations file is ready ➜ ${ chalk.bold.underline.blue( relationsFile, ) } 🚀`, ); process.exit(0); }; export const introspectSingleStore = async ( casing: Casing, out: string, breakpoints: boolean, credentials: SingleStoreCredentials, tablesFilter: string[], prefix: Prefix, ) => { const { connectToSingleStore } = await import('../connections'); const { db, database } = await connectToSingleStore(credentials); const matchers = tablesFilter.map((it) => { return new Minimatch(it); }); const filter = (tableName: string) => { if (matchers.length === 0) return true; let flags: boolean[] = []; for (let matcher of matchers) { if (matcher.negate) { if (!matcher.match(tableName)) { flags.push(false); } } if (matcher.match(tableName)) { flags.push(true); } } if (flags.length > 0) { return flags.every(Boolean); } return false; }; const progress = new IntrospectProgress(); const res = await renderWithTask( progress, fromSingleStoreDatabase(db, database, filter, (stage, count, status) => { progress.update(stage, count, status); }), ); const schema = { id: originUUID, prevId: '', ...res } as SingleStoreSchema; const ts = singlestoreSchemaToTypeScript(schema, casing); const { internal, ...schemaWithoutInternals } = schema; const schemaFile = join(out, 'schema.ts'); writeFileSync(schemaFile, ts.file); console.log(); const { snapshots, journal } = prepareOutFolder(out, 'postgresql'); if (snapshots.length === 0) { const { sqlStatements, _meta } = await applySingleStoreSnapshotsDiff( squashSingleStoreScheme(drySingleStore), squashSingleStoreScheme(schema), tablesResolver, columnsResolver, /* singleStoreViewsResolver, */ drySingleStore, schema, ); writeResult({ cur: schema, sqlStatements, journal, _meta, outFolder: out, breakpoints, type: 'introspect', prefixMode: prefix, }); } else { render( `[${ chalk.blue( 'i', ) }] No SQL generated, you already have migrations in project`, ); } render( `[${ chalk.green( '✓', ) }] You schema file is ready ➜ ${chalk.bold.underline.blue(schemaFile)} 🚀`, ); process.exit(0); }; export const introspectSqlite = async ( casing: Casing, out: string, breakpoints: boolean, credentials: SqliteCredentials, tablesFilter: string[], prefix: Prefix, ) => { const { connectToSQLite } = await import('../connections'); const db = await connectToSQLite(credentials); const matchers = tablesFilter.map((it) => { return new Minimatch(it); }); const filter = (tableName: string) => { if (matchers.length === 0) return true; let flags: boolean[] = []; for (let matcher of matchers) { if (matcher.negate) { if (!matcher.match(tableName)) { flags.push(false); } } if (matcher.match(tableName)) { flags.push(true); } } if (flags.length > 0) { return flags.every(Boolean); } return false; }; const progress = new IntrospectProgress(); const res = await renderWithTask( progress, fromSqliteDatabase(db, filter, (stage, count, status) => { progress.update(stage, count, status); }), ); const schema = { id: originUUID, prevId: '', ...res } as SQLiteSchema; const ts = sqliteSchemaToTypeScript(schema, casing); const relationsTs = relationsToTypeScript(schema, casing); // check orm and orm-pg api version const schemaFile = join(out, 'schema.ts'); writeFileSync(schemaFile, ts.file); const relationsFile = join(out, 'relations.ts'); writeFileSync(relationsFile, relationsTs.file); console.log(); const { snapshots, journal } = prepareOutFolder(out, 'sqlite'); if (snapshots.length === 0) { const { sqlStatements, _meta } = await applySqliteSnapshotsDiff( squashSqliteScheme(drySQLite), squashSqliteScheme(schema), tablesResolver, columnsResolver, sqliteViewsResolver, drySQLite, schema, ); writeResult({ cur: schema, sqlStatements, journal, _meta, outFolder: out, breakpoints, type: 'introspect', prefixMode: prefix, }); } else { render( `[${ chalk.blue( 'i', ) }] No SQL generated, you already have migrations in project`, ); } render( `[${ chalk.green( '✓', ) }] You schema file is ready ➜ ${chalk.bold.underline.blue(schemaFile)} 🚀`, ); render( `[${ chalk.green( '✓', ) }] You relations file is ready ➜ ${ chalk.bold.underline.blue( relationsFile, ) } 🚀`, ); process.exit(0); }; export const introspectLibSQL = async ( casing: Casing, out: string, breakpoints: boolean, credentials: LibSQLCredentials, tablesFilter: string[], prefix: Prefix, ) => { const { connectToLibSQL } = await import('../connections'); const db = await connectToLibSQL(credentials); const matchers = tablesFilter.map((it) => { return new Minimatch(it); }); const filter = (tableName: string) => { if (matchers.length === 0) return true; let flags: boolean[] = []; for (let matcher of matchers) { if (matcher.negate) { if (!matcher.match(tableName)) { flags.push(false); } } if (matcher.match(tableName)) { flags.push(true); } } if (flags.length > 0) { return flags.every(Boolean); } return false; }; const progress = new IntrospectProgress(); const res = await renderWithTask( progress, fromSqliteDatabase(db, filter, (stage, count, status) => { progress.update(stage, count, status); }), ); const schema = { id: originUUID, prevId: '', ...res } as SQLiteSchema; const ts = sqliteSchemaToTypeScript(schema, casing); const relationsTs = relationsToTypeScript(schema, casing); // check orm and orm-pg api version const schemaFile = join(out, 'schema.ts'); writeFileSync(schemaFile, ts.file); const relationsFile = join(out, 'relations.ts'); writeFileSync(relationsFile, relationsTs.file); console.log(); const { snapshots, journal } = prepareOutFolder(out, 'sqlite'); if (snapshots.length === 0) { const { sqlStatements, _meta } = await applyLibSQLSnapshotsDiff( squashSqliteScheme(drySQLite), squashSqliteScheme(schema), tablesResolver, columnsResolver, sqliteViewsResolver, drySQLite, schema, ); writeResult({ cur: schema, sqlStatements, journal, _meta, outFolder: out, breakpoints, type: 'introspect', prefixMode: prefix, }); } else { render( `[${ chalk.blue( 'i', ) }] No SQL generated, you already have migrations in project`, ); } render( `[${ chalk.green( '✓', ) }] Your schema file is ready ➜ ${chalk.bold.underline.blue(schemaFile)} 🚀`, ); render( `[${ chalk.green( '✓', ) }] Your relations file is ready ➜ ${ chalk.bold.underline.blue( relationsFile, ) } 🚀`, ); process.exit(0); }; const withCasing = (value: string, casing: Casing) => { if (casing === 'preserve') { return value; } if (casing === 'camel') { return value.camelCase(); } assertUnreachable(casing); }; export const relationsToTypeScript = ( schema: { tables: Record< string, { schema?: string; foreignKeys: Record< string, { name: string; tableFrom: string; columnsFrom: string[]; tableTo: string; schemaTo?: string; columnsTo: string[]; onUpdate?: string | undefined; onDelete?: string | undefined; } >; } >; }, casing: Casing, ) => { const imports: string[] = []; const tableRelations: Record< string, { name: string; type: 'one' | 'many'; tableFrom: string; schemaFrom?: string; columnFrom: string; tableTo: string; schemaTo?: string; columnTo: string; relationName?: string; }[] > = {}; Object.values(schema.tables).forEach((table) => { Object.values(table.foreignKeys).forEach((fk) => { const tableNameFrom = paramNameFor(fk.tableFrom, table.schema); const tableNameTo = paramNameFor(fk.tableTo, fk.schemaTo); const tableFrom = withCasing(tableNameFrom.replace(/:+/g, ''), casing); const tableTo = withCasing(tableNameTo.replace(/:+/g, ''), casing); const columnFrom = withCasing(fk.columnsFrom[0], casing); const columnTo = withCasing(fk.columnsTo[0], casing); imports.push(tableTo, tableFrom); // const keyFrom = `${schemaFrom}.${tableFrom}`; const keyFrom = tableFrom; if (!tableRelations[keyFrom]) { tableRelations[keyFrom] = []; } tableRelations[keyFrom].push({ name: singular(tableTo), type: 'one', tableFrom, columnFrom, tableTo, columnTo, }); // const keyTo = `${schemaTo}.${tableTo}`; const keyTo = tableTo; if (!tableRelations[keyTo]) { tableRelations[keyTo] = []; } tableRelations[keyTo].push({ name: plural(tableFrom), type: 'many', tableFrom: tableTo, columnFrom: columnTo, tableTo: tableFrom, columnTo: columnFrom, }); }); }); const uniqueImports = [...new Set(imports)]; const importsTs = `import { relations } from "drizzle-orm/relations";\nimport { ${ uniqueImports.join( ', ', ) } } from "./schema";\n\n`; const relationStatements = Object.entries(tableRelations).map( ([table, relations]) => { const hasOne = relations.some((it) => it.type === 'one'); const hasMany = relations.some((it) => it.type === 'many'); // * change relation names if they are duplicated or if there are multiple relations between two tables const preparedRelations = relations.map( (relation, relationIndex, originArray) => { let name = relation.name; let relationName; const hasMultipleRelations = originArray.some( (it, originIndex) => relationIndex !== originIndex && it.tableTo === relation.tableTo, ); if (hasMultipleRelations) { relationName = relation.type === 'one' ? `${relation.tableFrom}_${relation.columnFrom}_${relation.tableTo}_${relation.columnTo}` : `${relation.tableTo}_${relation.columnTo}_${relation.tableFrom}_${relation.columnFrom}`; } const hasDuplicatedRelation = originArray.some( (it, originIndex) => relationIndex !== originIndex && it.name === relation.name, ); if (hasDuplicatedRelation) { name = `${relation.name}_${relation.type === 'one' ? relation.columnFrom : relation.columnTo}`; } return { ...relation, name, relationName, }; }, ); const fields = preparedRelations.map((relation) => { if (relation.type === 'one') { return `\t${relation.name}: one(${relation.tableTo}, {\n\t\tfields: [${relation.tableFrom}.${relation.columnFrom}],\n\t\treferences: [${relation.tableTo}.${relation.columnTo}]${ relation.relationName ? `,\n\t\trelationName: "${relation.relationName}"` : '' }\n\t}),`; } else { return `\t${relation.name}: many(${relation.tableTo}${ relation.relationName ? `, {\n\t\trelationName: "${relation.relationName}"\n\t}` : '' }),`; } }); return `export const ${table}Relations = relations(${table}, ({${hasOne ? 'one' : ''}${ hasOne && hasMany ? ', ' : '' }${hasMany ? 'many' : ''}}) => ({\n${fields.join('\n')}\n}));`; }, ); return { file: importsTs + relationStatements.join('\n\n'), }; }; ================================================ FILE: drizzle-kit/src/cli/commands/libSqlPushUtils.ts ================================================ import chalk from 'chalk'; import { JsonStatement } from 'src/jsonStatements'; import { findAddedAndRemoved, SQLiteDB } from 'src/utils'; import { SQLiteSchemaInternal, SQLiteSchemaSquashed, SQLiteSquasher } from '../../serializer/sqliteSchema'; import { CreateSqliteIndexConvertor, fromJson, LibSQLModifyColumn, SQLiteCreateTableConvertor, SQLiteDropTableConvertor, SqliteRenameTableConvertor, } from '../../sqlgenerator'; export const getOldTableName = ( tableName: string, meta: SQLiteSchemaInternal['_meta'], ) => { for (const key of Object.keys(meta.tables)) { const value = meta.tables[key]; if (`"${tableName}"` === value) { return key.substring(1, key.length - 1); } } return tableName; }; export const _moveDataStatements = ( tableName: string, json: SQLiteSchemaSquashed, dataLoss: boolean = false, ) => { const statements: string[] = []; const newTableName = `__new_${tableName}`; // create table statement from a new json2 with proper name const tableColumns = Object.values(json.tables[tableName].columns); const referenceData = Object.values(json.tables[tableName].foreignKeys); const compositePKs = Object.values( json.tables[tableName].compositePrimaryKeys, ).map((it) => SQLiteSquasher.unsquashPK(it)); const checkConstraints = Object.values(json.tables[tableName].checkConstraints); const fks = referenceData.map((it) => SQLiteSquasher.unsquashPushFK(it)); const mappedCheckConstraints: string[] = checkConstraints.map((it) => it.replaceAll(`"${tableName}".`, `"${newTableName}".`) .replaceAll(`\`${tableName}\`.`, `\`${newTableName}\`.`) .replaceAll(`${tableName}.`, `${newTableName}.`) .replaceAll(`'${tableName}'.`, `\`${newTableName}\`.`) ); // create new table statements.push( new SQLiteCreateTableConvertor().convert({ type: 'sqlite_create_table', tableName: newTableName, columns: tableColumns, referenceData: fks, compositePKs, checkConstraints: mappedCheckConstraints, }), ); // move data if (!dataLoss) { const columns = Object.keys(json.tables[tableName].columns).map( (c) => `"${c}"`, ); statements.push( `INSERT INTO \`${newTableName}\`(${ columns.join( ', ', ) }) SELECT ${columns.join(', ')} FROM \`${tableName}\`;`, ); } statements.push( new SQLiteDropTableConvertor().convert({ type: 'drop_table', tableName: tableName, schema: '', }), ); // rename table statements.push( new SqliteRenameTableConvertor().convert({ fromSchema: '', tableNameFrom: newTableName, tableNameTo: tableName, toSchema: '', type: 'rename_table', }), ); for (const idx of Object.values(json.tables[tableName].indexes)) { statements.push( new CreateSqliteIndexConvertor().convert({ type: 'create_index', tableName: tableName, schema: '', data: idx, }), ); } return statements; }; export const libSqlLogSuggestionsAndReturn = async ( connection: SQLiteDB, statements: JsonStatement[], json1: SQLiteSchemaSquashed, json2: SQLiteSchemaSquashed, meta: SQLiteSchemaInternal['_meta'], ) => { let shouldAskForApprove = false; const statementsToExecute: string[] = []; const infoToPrint: string[] = []; const tablesToRemove: string[] = []; const columnsToRemove: string[] = []; const tablesToTruncate: string[] = []; for (const statement of statements) { if (statement.type === 'drop_table') { const res = await connection.query<{ count: string }>( `select count(*) as count from \`${statement.tableName}\``, ); const count = Number(res[0].count); if (count > 0) { infoToPrint.push( `· You're about to delete ${ chalk.underline( statement.tableName, ) } table with ${count} items`, ); tablesToRemove.push(statement.tableName); shouldAskForApprove = true; } const fromJsonStatement = fromJson([statement], 'turso', 'push', json2); statementsToExecute.push( ...(Array.isArray(fromJsonStatement) ? fromJsonStatement : [fromJsonStatement]), ); } else if (statement.type === 'alter_table_drop_column') { const tableName = statement.tableName; const res = await connection.query<{ count: string }>( `select count(*) as count from \`${tableName}\``, ); const count = Number(res[0].count); if (count > 0) { infoToPrint.push( `· You're about to delete ${ chalk.underline( statement.columnName, ) } column in ${tableName} table with ${count} items`, ); columnsToRemove.push(`${tableName}_${statement.columnName}`); shouldAskForApprove = true; } const fromJsonStatement = fromJson([statement], 'turso', 'push', json2); statementsToExecute.push( ...(Array.isArray(fromJsonStatement) ? fromJsonStatement : [fromJsonStatement]), ); } else if ( statement.type === 'sqlite_alter_table_add_column' && statement.column.notNull && !statement.column.default ) { const newTableName = statement.tableName; const res = await connection.query<{ count: string }>( `select count(*) as count from \`${newTableName}\``, ); const count = Number(res[0].count); if (count > 0) { infoToPrint.push( `· You're about to add not-null ${ chalk.underline( statement.column.name, ) } column without default value, which contains ${count} items`, ); tablesToTruncate.push(newTableName); statementsToExecute.push(`delete from ${newTableName};`); shouldAskForApprove = true; } const fromJsonStatement = fromJson([statement], 'turso', 'push', json2); statementsToExecute.push( ...(Array.isArray(fromJsonStatement) ? fromJsonStatement : [fromJsonStatement]), ); } else if (statement.type === 'alter_table_alter_column_set_notnull') { const tableName = statement.tableName; if ( statement.type === 'alter_table_alter_column_set_notnull' && typeof statement.columnDefault === 'undefined' ) { const res = await connection.query<{ count: string }>( `select count(*) as count from \`${tableName}\``, ); const count = Number(res[0].count); if (count > 0) { infoToPrint.push( `· You're about to add not-null constraint to ${ chalk.underline( statement.columnName, ) } column without default value, which contains ${count} items`, ); tablesToTruncate.push(tableName); statementsToExecute.push(`delete from \`${tableName}\``); shouldAskForApprove = true; } } const modifyStatements = new LibSQLModifyColumn().convert(statement, json2); statementsToExecute.push( ...(Array.isArray(modifyStatements) ? modifyStatements : [modifyStatements]), ); } else if (statement.type === 'recreate_table') { const tableName = statement.tableName; let dataLoss = false; const oldTableName = getOldTableName(tableName, meta); const prevColumnNames = Object.keys(json1.tables[oldTableName].columns); const currentColumnNames = Object.keys(json2.tables[tableName].columns); const { removedColumns, addedColumns } = findAddedAndRemoved( prevColumnNames, currentColumnNames, ); if (removedColumns.length) { for (const removedColumn of removedColumns) { const res = await connection.query<{ count: string }>( `select count(\`${tableName}\`.\`${removedColumn}\`) as count from \`${tableName}\``, ); const count = Number(res[0].count); if (count > 0) { infoToPrint.push( `· You're about to delete ${ chalk.underline( removedColumn, ) } column in ${tableName} table with ${count} items`, ); columnsToRemove.push(removedColumn); shouldAskForApprove = true; } } } if (addedColumns.length) { for (const addedColumn of addedColumns) { const [res] = await connection.query<{ count: string }>( `select count(*) as count from \`${tableName}\``, ); const columnConf = json2.tables[tableName].columns[addedColumn]; const count = Number(res.count); if (count > 0 && columnConf.notNull && !columnConf.default) { dataLoss = true; infoToPrint.push( `· You're about to add not-null ${ chalk.underline( addedColumn, ) } column without default value to table, which contains ${count} items`, ); shouldAskForApprove = true; tablesToTruncate.push(tableName); statementsToExecute.push(`DELETE FROM \`${tableName}\`;`); } } } // check if some tables referencing current for pragma const tablesReferencingCurrent: string[] = []; for (const table of Object.values(json2.tables)) { const tablesRefs = Object.values(json2.tables[table.name].foreignKeys) .filter((t) => SQLiteSquasher.unsquashPushFK(t).tableTo === tableName) .map((it) => SQLiteSquasher.unsquashPushFK(it).tableFrom); tablesReferencingCurrent.push(...tablesRefs); } if (!tablesReferencingCurrent.length) { statementsToExecute.push(..._moveDataStatements(tableName, json2, dataLoss)); continue; } // recreate table statementsToExecute.push( ..._moveDataStatements(tableName, json2, dataLoss), ); } else if ( statement.type === 'alter_table_alter_column_set_generated' || statement.type === 'alter_table_alter_column_drop_generated' ) { const tableName = statement.tableName; const res = await connection.query<{ count: string }>( `select count("${statement.columnName}") as count from \`${tableName}\``, ); const count = Number(res[0].count); if (count > 0) { infoToPrint.push( `· You're about to delete ${ chalk.underline( statement.columnName, ) } column in ${tableName} table with ${count} items`, ); columnsToRemove.push(`${tableName}_${statement.columnName}`); shouldAskForApprove = true; } const fromJsonStatement = fromJson([statement], 'turso', 'push', json2); statementsToExecute.push( ...(Array.isArray(fromJsonStatement) ? fromJsonStatement : [fromJsonStatement]), ); } else { const fromJsonStatement = fromJson([statement], 'turso', 'push', json2); statementsToExecute.push( ...(Array.isArray(fromJsonStatement) ? fromJsonStatement : [fromJsonStatement]), ); } } return { statementsToExecute: [...new Set(statementsToExecute)], shouldAskForApprove, infoToPrint, columnsToRemove: [...new Set(columnsToRemove)], tablesToTruncate: [...new Set(tablesToTruncate)], tablesToRemove: [...new Set(tablesToRemove)], }; }; ================================================ FILE: drizzle-kit/src/cli/commands/migrate.ts ================================================ import fs from 'fs'; import { prepareMySqlDbPushSnapshot, prepareMySqlMigrationSnapshot, preparePgDbPushSnapshot, preparePgMigrationSnapshot, prepareSingleStoreDbPushSnapshot, prepareSingleStoreMigrationSnapshot, prepareSQLiteDbPushSnapshot, prepareSqliteMigrationSnapshot, } from '../../migrationPreparator'; import chalk from 'chalk'; import { render } from 'hanji'; import path, { join } from 'path'; import { SingleStoreSchema, singlestoreSchema, squashSingleStoreScheme } from 'src/serializer/singlestoreSchema'; import { TypeOf } from 'zod'; import type { CommonSchema } from '../../schemaValidator'; import { MySqlSchema, mysqlSchema, squashMysqlScheme, ViewSquashed } from '../../serializer/mysqlSchema'; import { PgSchema, pgSchema, Policy, Role, squashPgScheme, View } from '../../serializer/pgSchema'; import { SQLiteSchema, sqliteSchema, squashSqliteScheme, View as SQLiteView } from '../../serializer/sqliteSchema'; import { applyLibSQLSnapshotsDiff, applyMysqlSnapshotsDiff, applyPgSnapshotsDiff, applySingleStoreSnapshotsDiff, applySqliteSnapshotsDiff, Column, ColumnsResolverInput, ColumnsResolverOutput, Enum, PolicyResolverInput, PolicyResolverOutput, ResolverInput, ResolverOutput, ResolverOutputWithMoved, RolesResolverInput, RolesResolverOutput, Sequence, Table, TablePolicyResolverInput, TablePolicyResolverOutput, } from '../../snapshotsDiffer'; import { assertV1OutFolder, Journal, prepareMigrationFolder } from '../../utils'; import { prepareMigrationMetadata } from '../../utils/words'; import { CasingType, Driver, Prefix } from '../validations/common'; import { withStyle } from '../validations/outputs'; import { isRenamePromptItem, RenamePropmtItem, ResolveColumnSelect, ResolveSchemasSelect, ResolveSelect, ResolveSelectNamed, schema, } from '../views'; import { ExportConfig, GenerateConfig } from './utils'; export type Named = { name: string; }; export type NamedWithSchema = { name: string; schema: string; }; export const schemasResolver = async ( input: ResolverInput
, ): Promise> => { try { const { created, deleted, renamed } = await promptSchemasConflict( input.created, input.deleted, ); return { created: created, deleted: deleted, renamed: renamed }; } catch (e) { console.error(e); throw e; } }; export const tablesResolver = async ( input: ResolverInput
, ): Promise> => { try { const { created, deleted, moved, renamed } = await promptNamedWithSchemasConflict( input.created, input.deleted, 'table', ); return { created: created, deleted: deleted, moved: moved, renamed: renamed, }; } catch (e) { console.error(e); throw e; } }; export const viewsResolver = async ( input: ResolverInput, ): Promise> => { try { const { created, deleted, moved, renamed } = await promptNamedWithSchemasConflict( input.created, input.deleted, 'view', ); return { created: created, deleted: deleted, moved: moved, renamed: renamed, }; } catch (e) { console.error(e); throw e; } }; export const mySqlViewsResolver = async ( input: ResolverInput, ): Promise> => { try { const { created, deleted, moved, renamed } = await promptNamedWithSchemasConflict( input.created, input.deleted, 'view', ); return { created: created, deleted: deleted, moved: moved, renamed: renamed, }; } catch (e) { console.error(e); throw e; } }; /* export const singleStoreViewsResolver = async ( input: ResolverInput, ): Promise> => { try { const { created, deleted, moved, renamed } = await promptNamedWithSchemasConflict( input.created, input.deleted, 'view', ); return { created: created, deleted: deleted, moved: moved, renamed: renamed, }; } catch (e) { console.error(e); throw e; } }; */ export const sqliteViewsResolver = async ( input: ResolverInput, ): Promise> => { try { const { created, deleted, moved, renamed } = await promptNamedWithSchemasConflict( input.created, input.deleted, 'view', ); return { created: created, deleted: deleted, moved: moved, renamed: renamed, }; } catch (e) { console.error(e); throw e; } }; export const sequencesResolver = async ( input: ResolverInput, ): Promise> => { try { const { created, deleted, moved, renamed } = await promptNamedWithSchemasConflict( input.created, input.deleted, 'sequence', ); return { created: created, deleted: deleted, moved: moved, renamed: renamed, }; } catch (e) { console.error(e); throw e; } }; export const roleResolver = async ( input: RolesResolverInput, ): Promise> => { const result = await promptNamedConflict( input.created, input.deleted, 'role', ); return { created: result.created, deleted: result.deleted, renamed: result.renamed, }; }; export const policyResolver = async ( input: TablePolicyResolverInput, ): Promise> => { const result = await promptColumnsConflicts( input.tableName, input.created, input.deleted, ); return { tableName: input.tableName, schema: input.schema, created: result.created, deleted: result.deleted, renamed: result.renamed, }; }; export const indPolicyResolver = async ( input: PolicyResolverInput, ): Promise> => { const result = await promptNamedConflict( input.created, input.deleted, 'policy', ); return { created: result.created, deleted: result.deleted, renamed: result.renamed, }; }; export const enumsResolver = async ( input: ResolverInput, ): Promise> => { try { const { created, deleted, moved, renamed } = await promptNamedWithSchemasConflict( input.created, input.deleted, 'enum', ); return { created: created, deleted: deleted, moved: moved, renamed: renamed, }; } catch (e) { console.error(e); throw e; } }; export const columnsResolver = async ( input: ColumnsResolverInput, ): Promise> => { const result = await promptColumnsConflicts( input.tableName, input.created, input.deleted, ); return { tableName: input.tableName, schema: input.schema, created: result.created, deleted: result.deleted, renamed: result.renamed, }; }; export const prepareAndMigratePg = async (config: GenerateConfig) => { const outFolder = config.out; const schemaPath = config.schema; const casing = config.casing; try { assertV1OutFolder(outFolder); const { snapshots, journal } = prepareMigrationFolder( outFolder, 'postgresql', ); const { prev, cur, custom } = await preparePgMigrationSnapshot( snapshots, schemaPath, casing, ); const validatedPrev = pgSchema.parse(prev); const validatedCur = pgSchema.parse(cur); if (config.custom) { writeResult({ cur: custom, sqlStatements: [], journal, outFolder, name: config.name, breakpoints: config.breakpoints, type: 'custom', prefixMode: config.prefix, }); return; } const squashedPrev = squashPgScheme(validatedPrev); const squashedCur = squashPgScheme(validatedCur); const { sqlStatements, _meta } = await applyPgSnapshotsDiff( squashedPrev, squashedCur, schemasResolver, enumsResolver, sequencesResolver, policyResolver, indPolicyResolver, roleResolver, tablesResolver, columnsResolver, viewsResolver, validatedPrev, validatedCur, ); writeResult({ cur, sqlStatements, journal, outFolder, name: config.name, breakpoints: config.breakpoints, prefixMode: config.prefix, }); } catch (e) { console.error(e); } }; export const prepareAndExportPg = async (config: ExportConfig) => { const schemaPath = config.schema; try { const { prev, cur } = await preparePgMigrationSnapshot( [], // no snapshots before schemaPath, undefined, ); const validatedPrev = pgSchema.parse(prev); const validatedCur = pgSchema.parse(cur); const squashedPrev = squashPgScheme(validatedPrev); const squashedCur = squashPgScheme(validatedCur); const { sqlStatements } = await applyPgSnapshotsDiff( squashedPrev, squashedCur, schemasResolver, enumsResolver, sequencesResolver, policyResolver, indPolicyResolver, roleResolver, tablesResolver, columnsResolver, viewsResolver, validatedPrev, validatedCur, ); console.log(sqlStatements.join('\n')); } catch (e) { console.error(e); } }; export const preparePgPush = async ( cur: PgSchema, prev: PgSchema, ) => { const validatedPrev = pgSchema.parse(prev); const validatedCur = pgSchema.parse(cur); const squashedPrev = squashPgScheme(validatedPrev, 'push'); const squashedCur = squashPgScheme(validatedCur, 'push'); const { sqlStatements, statements, _meta } = await applyPgSnapshotsDiff( squashedPrev, squashedCur, schemasResolver, enumsResolver, sequencesResolver, policyResolver, indPolicyResolver, roleResolver, tablesResolver, columnsResolver, viewsResolver, validatedPrev, validatedCur, 'push', ); return { sqlStatements, statements, squashedPrev, squashedCur }; }; // Not needed for now function mysqlSchemaSuggestions( curSchema: TypeOf, prevSchema: TypeOf, ) { const suggestions: string[] = []; const usedSuggestions: string[] = []; const suggestionTypes = { serial: withStyle.errorWarning( `We deprecated the use of 'serial' for MySQL starting from version 0.20.0. In MySQL, 'serial' is simply an alias for 'bigint unsigned not null auto_increment unique,' which creates all constraints and indexes for you. This may make the process less explicit for both users and drizzle-kit push commands`, ), }; for (const table of Object.values(curSchema.tables)) { for (const column of Object.values(table.columns)) { if (column.type === 'serial') { if (!usedSuggestions.includes('serial')) { suggestions.push(suggestionTypes['serial']); } const uniqueForSerial = Object.values( prevSchema.tables[table.name].uniqueConstraints, ).find((it) => it.columns[0] === column.name); suggestions.push( `\n` + withStyle.suggestion( `We are suggesting to change ${ chalk.blue( column.name, ) } column in ${ chalk.blueBright( table.name, ) } table from serial to bigint unsigned\n\n${ chalk.blueBright( `bigint("${column.name}", { mode: "number", unsigned: true }).notNull().autoincrement().unique(${ uniqueForSerial?.name ? `"${uniqueForSerial?.name}"` : '' })`, ) }`, ), ); } } } return suggestions; } // Intersect with prepareAnMigrate export const prepareMySQLPush = async ( schemaPath: string | string[], snapshot: MySqlSchema, casing: CasingType | undefined, ) => { try { const { prev, cur } = await prepareMySqlDbPushSnapshot( snapshot, schemaPath, casing, ); const validatedPrev = mysqlSchema.parse(prev); const validatedCur = mysqlSchema.parse(cur); const squashedPrev = squashMysqlScheme(validatedPrev); const squashedCur = squashMysqlScheme(validatedCur); const { sqlStatements, statements } = await applyMysqlSnapshotsDiff( squashedPrev, squashedCur, tablesResolver, columnsResolver, mySqlViewsResolver, validatedPrev, validatedCur, 'push', ); return { sqlStatements, statements, validatedCur, validatedPrev }; } catch (e) { console.error(e); process.exit(1); } }; export const prepareAndMigrateMysql = async (config: GenerateConfig) => { const outFolder = config.out; const schemaPath = config.schema; const casing = config.casing; try { // TODO: remove assertV1OutFolder(outFolder); const { snapshots, journal } = prepareMigrationFolder(outFolder, 'mysql'); const { prev, cur, custom } = await prepareMySqlMigrationSnapshot( snapshots, schemaPath, casing, ); const validatedPrev = mysqlSchema.parse(prev); const validatedCur = mysqlSchema.parse(cur); if (config.custom) { writeResult({ cur: custom, sqlStatements: [], journal, outFolder, name: config.name, breakpoints: config.breakpoints, type: 'custom', prefixMode: config.prefix, }); return; } const squashedPrev = squashMysqlScheme(validatedPrev); const squashedCur = squashMysqlScheme(validatedCur); const { sqlStatements, statements, _meta } = await applyMysqlSnapshotsDiff( squashedPrev, squashedCur, tablesResolver, columnsResolver, mySqlViewsResolver, validatedPrev, validatedCur, ); writeResult({ cur, sqlStatements, journal, _meta, outFolder, name: config.name, breakpoints: config.breakpoints, prefixMode: config.prefix, }); } catch (e) { console.error(e); } }; // Not needed for now function singleStoreSchemaSuggestions( curSchema: TypeOf, prevSchema: TypeOf, ) { const suggestions: string[] = []; const usedSuggestions: string[] = []; const suggestionTypes = { // TODO: Check if SingleStore has serial type serial: withStyle.errorWarning( `We deprecated the use of 'serial' for SingleStore starting from version 0.20.0. In SingleStore, 'serial' is simply an alias for 'bigint unsigned not null auto_increment unique,' which creates all constraints and indexes for you. This may make the process less explicit for both users and drizzle-kit push commands`, ), }; for (const table of Object.values(curSchema.tables)) { for (const column of Object.values(table.columns)) { if (column.type === 'serial') { if (!usedSuggestions.includes('serial')) { suggestions.push(suggestionTypes['serial']); } const uniqueForSerial = Object.values( prevSchema.tables[table.name].uniqueConstraints, ).find((it) => it.columns[0] === column.name); suggestions.push( `\n` + withStyle.suggestion( `We are suggesting to change ${ chalk.blue( column.name, ) } column in ${ chalk.blueBright( table.name, ) } table from serial to bigint unsigned\n\n${ chalk.blueBright( `bigint("${column.name}", { mode: "number", unsigned: true }).notNull().autoincrement().unique(${ uniqueForSerial?.name ? `"${uniqueForSerial?.name}"` : '' })`, ) }`, ), ); } } } return suggestions; } // Intersect with prepareAnMigrate export const prepareSingleStorePush = async ( schemaPath: string | string[], snapshot: SingleStoreSchema, casing: CasingType | undefined, ) => { try { const { prev, cur } = await prepareSingleStoreDbPushSnapshot( snapshot, schemaPath, casing, ); const validatedPrev = singlestoreSchema.parse(prev); const validatedCur = singlestoreSchema.parse(cur); const squashedPrev = squashSingleStoreScheme(validatedPrev); const squashedCur = squashSingleStoreScheme(validatedCur); const { sqlStatements, statements } = await applySingleStoreSnapshotsDiff( squashedPrev, squashedCur, tablesResolver, columnsResolver, /* singleStoreViewsResolver, */ validatedPrev, validatedCur, 'push', ); return { sqlStatements, statements, validatedCur, validatedPrev }; } catch (e) { console.error(e); process.exit(1); } }; export const prepareAndMigrateSingleStore = async (config: GenerateConfig) => { const outFolder = config.out; const schemaPath = config.schema; const casing = config.casing; try { // TODO: remove assertV1OutFolder(outFolder); const { snapshots, journal } = prepareMigrationFolder(outFolder, 'singlestore'); const { prev, cur, custom } = await prepareSingleStoreMigrationSnapshot( snapshots, schemaPath, casing, ); const validatedPrev = singlestoreSchema.parse(prev); const validatedCur = singlestoreSchema.parse(cur); if (config.custom) { writeResult({ cur: custom, sqlStatements: [], journal, outFolder, name: config.name, breakpoints: config.breakpoints, type: 'custom', prefixMode: config.prefix, }); return; } const squashedPrev = squashSingleStoreScheme(validatedPrev); const squashedCur = squashSingleStoreScheme(validatedCur); const { sqlStatements, _meta } = await applySingleStoreSnapshotsDiff( squashedPrev, squashedCur, tablesResolver, columnsResolver, /* singleStoreViewsResolver, */ validatedPrev, validatedCur, ); writeResult({ cur, sqlStatements, journal, _meta, outFolder, name: config.name, breakpoints: config.breakpoints, prefixMode: config.prefix, }); } catch (e) { console.error(e); } }; export const prepareAndExportSinglestore = async (config: ExportConfig) => { const schemaPath = config.schema; try { const { prev, cur } = await prepareSingleStoreMigrationSnapshot( [], schemaPath, undefined, ); const validatedPrev = singlestoreSchema.parse(prev); const validatedCur = singlestoreSchema.parse(cur); const squashedPrev = squashSingleStoreScheme(validatedPrev); const squashedCur = squashSingleStoreScheme(validatedCur); const { sqlStatements, _meta } = await applySingleStoreSnapshotsDiff( squashedPrev, squashedCur, tablesResolver, columnsResolver, /* singleStoreViewsResolver, */ validatedPrev, validatedCur, ); console.log(sqlStatements.join('\n')); } catch (e) { console.error(e); } }; export const prepareAndExportMysql = async (config: ExportConfig) => { const schemaPath = config.schema; try { const { prev, cur, custom } = await prepareMySqlMigrationSnapshot( [], schemaPath, undefined, ); const validatedPrev = mysqlSchema.parse(prev); const validatedCur = mysqlSchema.parse(cur); const squashedPrev = squashMysqlScheme(validatedPrev); const squashedCur = squashMysqlScheme(validatedCur); const { sqlStatements, statements, _meta } = await applyMysqlSnapshotsDiff( squashedPrev, squashedCur, tablesResolver, columnsResolver, mySqlViewsResolver, validatedPrev, validatedCur, ); console.log(sqlStatements.join('\n')); } catch (e) { console.error(e); } }; export const prepareAndMigrateSqlite = async (config: GenerateConfig) => { const outFolder = config.out; const schemaPath = config.schema; const casing = config.casing; try { assertV1OutFolder(outFolder); const { snapshots, journal } = prepareMigrationFolder(outFolder, 'sqlite'); const { prev, cur, custom } = await prepareSqliteMigrationSnapshot( snapshots, schemaPath, casing, ); const validatedPrev = sqliteSchema.parse(prev); const validatedCur = sqliteSchema.parse(cur); if (config.custom) { writeResult({ cur: custom, sqlStatements: [], journal, outFolder, name: config.name, breakpoints: config.breakpoints, bundle: config.bundle, type: 'custom', prefixMode: config.prefix, }); return; } const squashedPrev = squashSqliteScheme(validatedPrev); const squashedCur = squashSqliteScheme(validatedCur); const { sqlStatements, _meta } = await applySqliteSnapshotsDiff( squashedPrev, squashedCur, tablesResolver, columnsResolver, sqliteViewsResolver, validatedPrev, validatedCur, ); writeResult({ cur, sqlStatements, journal, _meta, outFolder, name: config.name, breakpoints: config.breakpoints, bundle: config.bundle, prefixMode: config.prefix, driver: config.driver, }); } catch (e) { console.error(e); } }; export const prepareAndExportSqlite = async (config: ExportConfig) => { const schemaPath = config.schema; try { const { prev, cur } = await prepareSqliteMigrationSnapshot( [], schemaPath, undefined, ); const validatedPrev = sqliteSchema.parse(prev); const validatedCur = sqliteSchema.parse(cur); const squashedPrev = squashSqliteScheme(validatedPrev); const squashedCur = squashSqliteScheme(validatedCur); const { sqlStatements, _meta } = await applySqliteSnapshotsDiff( squashedPrev, squashedCur, tablesResolver, columnsResolver, sqliteViewsResolver, validatedPrev, validatedCur, ); console.log(sqlStatements.join('\n')); } catch (e) { console.error(e); } }; export const prepareAndMigrateLibSQL = async (config: GenerateConfig) => { const outFolder = config.out; const schemaPath = config.schema; const casing = config.casing; try { assertV1OutFolder(outFolder); const { snapshots, journal } = prepareMigrationFolder(outFolder, 'sqlite'); const { prev, cur, custom } = await prepareSqliteMigrationSnapshot( snapshots, schemaPath, casing, ); const validatedPrev = sqliteSchema.parse(prev); const validatedCur = sqliteSchema.parse(cur); if (config.custom) { writeResult({ cur: custom, sqlStatements: [], journal, outFolder, name: config.name, breakpoints: config.breakpoints, bundle: config.bundle, type: 'custom', prefixMode: config.prefix, }); return; } const squashedPrev = squashSqliteScheme(validatedPrev); const squashedCur = squashSqliteScheme(validatedCur); const { sqlStatements, _meta } = await applyLibSQLSnapshotsDiff( squashedPrev, squashedCur, tablesResolver, columnsResolver, sqliteViewsResolver, validatedPrev, validatedCur, ); writeResult({ cur, sqlStatements, journal, _meta, outFolder, name: config.name, breakpoints: config.breakpoints, bundle: config.bundle, prefixMode: config.prefix, }); } catch (e) { console.error(e); } }; export const prepareAndExportLibSQL = async (config: ExportConfig) => { const schemaPath = config.schema; try { const { prev, cur, custom } = await prepareSqliteMigrationSnapshot( [], schemaPath, undefined, ); const validatedPrev = sqliteSchema.parse(prev); const validatedCur = sqliteSchema.parse(cur); const squashedPrev = squashSqliteScheme(validatedPrev); const squashedCur = squashSqliteScheme(validatedCur); const { sqlStatements, _meta } = await applyLibSQLSnapshotsDiff( squashedPrev, squashedCur, tablesResolver, columnsResolver, sqliteViewsResolver, validatedPrev, validatedCur, ); console.log(sqlStatements.join('\n')); } catch (e) { console.error(e); } }; export const prepareSQLitePush = async ( schemaPath: string | string[], snapshot: SQLiteSchema, casing: CasingType | undefined, ) => { const { prev, cur } = await prepareSQLiteDbPushSnapshot(snapshot, schemaPath, casing); const validatedPrev = sqliteSchema.parse(prev); const validatedCur = sqliteSchema.parse(cur); const squashedPrev = squashSqliteScheme(validatedPrev, 'push'); const squashedCur = squashSqliteScheme(validatedCur, 'push'); const { sqlStatements, statements, _meta } = await applySqliteSnapshotsDiff( squashedPrev, squashedCur, tablesResolver, columnsResolver, sqliteViewsResolver, validatedPrev, validatedCur, 'push', ); return { sqlStatements, statements, squashedPrev, squashedCur, meta: _meta, }; }; export const prepareLibSQLPush = async ( schemaPath: string | string[], snapshot: SQLiteSchema, casing: CasingType | undefined, ) => { const { prev, cur } = await prepareSQLiteDbPushSnapshot(snapshot, schemaPath, casing); const validatedPrev = sqliteSchema.parse(prev); const validatedCur = sqliteSchema.parse(cur); const squashedPrev = squashSqliteScheme(validatedPrev, 'push'); const squashedCur = squashSqliteScheme(validatedCur, 'push'); const { sqlStatements, statements, _meta } = await applyLibSQLSnapshotsDiff( squashedPrev, squashedCur, tablesResolver, columnsResolver, sqliteViewsResolver, validatedPrev, validatedCur, 'push', ); return { sqlStatements, statements, squashedPrev, squashedCur, meta: _meta, }; }; const freeeeeeze = (obj: any) => { Object.freeze(obj); for (let key in obj) { if (obj.hasOwnProperty(key) && typeof obj[key] === 'object') { freeeeeeze(obj[key]); } } }; export const promptColumnsConflicts = async ( tableName: string, newColumns: T[], missingColumns: T[], ) => { if (newColumns.length === 0 || missingColumns.length === 0) { return { created: newColumns, renamed: [], deleted: missingColumns }; } const result: { created: T[]; renamed: { from: T; to: T }[]; deleted: T[] } = { created: [], renamed: [], deleted: [], }; let index = 0; let leftMissing = [...missingColumns]; do { const created = newColumns[index]; const renames: RenamePropmtItem[] = leftMissing.map((it) => { return { from: it, to: created }; }); const promptData: (RenamePropmtItem | T)[] = [created, ...renames]; const { status, data } = await render( new ResolveColumnSelect(tableName, created, promptData), ); if (status === 'aborted') { console.error('ERROR'); process.exit(1); } if (isRenamePromptItem(data)) { console.log( `${chalk.yellow('~')} ${data.from.name} › ${data.to.name} ${ chalk.gray( 'column will be renamed', ) }`, ); result.renamed.push(data); // this will make [item1, undefined, item2] delete leftMissing[leftMissing.indexOf(data.from)]; // this will make [item1, item2] leftMissing = leftMissing.filter(Boolean); } else { console.log( `${chalk.green('+')} ${data.name} ${ chalk.gray( 'column will be created', ) }`, ); result.created.push(created); } index += 1; } while (index < newColumns.length); console.log( chalk.gray(`--- all columns conflicts in ${tableName} table resolved ---\n`), ); result.deleted.push(...leftMissing); return result; }; export const promptNamedConflict = async ( newItems: T[], missingItems: T[], entity: 'role' | 'policy', ): Promise<{ created: T[]; renamed: { from: T; to: T }[]; deleted: T[]; }> => { if (missingItems.length === 0 || newItems.length === 0) { return { created: newItems, renamed: [], deleted: missingItems, }; } const result: { created: T[]; renamed: { from: T; to: T }[]; deleted: T[]; } = { created: [], renamed: [], deleted: [] }; let index = 0; let leftMissing = [...missingItems]; do { const created = newItems[index]; const renames: RenamePropmtItem[] = leftMissing.map((it) => { return { from: it, to: created }; }); const promptData: (RenamePropmtItem | T)[] = [created, ...renames]; const { status, data } = await render( new ResolveSelectNamed(created, promptData, entity), ); if (status === 'aborted') { console.error('ERROR'); process.exit(1); } if (isRenamePromptItem(data)) { console.log( `${chalk.yellow('~')} ${data.from.name} › ${data.to.name} ${ chalk.gray( `${entity} will be renamed/moved`, ) }`, ); if (data.from.name !== data.to.name) { result.renamed.push(data); } delete leftMissing[leftMissing.indexOf(data.from)]; leftMissing = leftMissing.filter(Boolean); } else { console.log( `${chalk.green('+')} ${data.name} ${ chalk.gray( `${entity} will be created`, ) }`, ); result.created.push(created); } index += 1; } while (index < newItems.length); console.log(chalk.gray(`--- all ${entity} conflicts resolved ---\n`)); result.deleted.push(...leftMissing); return result; }; export const promptNamedWithSchemasConflict = async ( newItems: T[], missingItems: T[], entity: 'table' | 'enum' | 'sequence' | 'view', ): Promise<{ created: T[]; renamed: { from: T; to: T }[]; moved: { name: string; schemaFrom: string; schemaTo: string }[]; deleted: T[]; }> => { if (missingItems.length === 0 || newItems.length === 0) { return { created: newItems, renamed: [], moved: [], deleted: missingItems, }; } const result: { created: T[]; renamed: { from: T; to: T }[]; moved: { name: string; schemaFrom: string; schemaTo: string }[]; deleted: T[]; } = { created: [], renamed: [], moved: [], deleted: [] }; let index = 0; let leftMissing = [...missingItems]; do { const created = newItems[index]; const renames: RenamePropmtItem[] = leftMissing.map((it) => { return { from: it, to: created }; }); const promptData: (RenamePropmtItem | T)[] = [created, ...renames]; const { status, data } = await render( new ResolveSelect(created, promptData, entity), ); if (status === 'aborted') { console.error('ERROR'); process.exit(1); } if (isRenamePromptItem(data)) { const schemaFromPrefix = !data.from.schema || data.from.schema === 'public' ? '' : `${data.from.schema}.`; const schemaToPrefix = !data.to.schema || data.to.schema === 'public' ? '' : `${data.to.schema}.`; console.log( `${chalk.yellow('~')} ${schemaFromPrefix}${data.from.name} › ${schemaToPrefix}${data.to.name} ${ chalk.gray( `${entity} will be renamed/moved`, ) }`, ); if (data.from.name !== data.to.name) { result.renamed.push(data); } if (data.from.schema !== data.to.schema) { result.moved.push({ name: data.from.name, schemaFrom: data.from.schema || 'public', schemaTo: data.to.schema || 'public', }); } delete leftMissing[leftMissing.indexOf(data.from)]; leftMissing = leftMissing.filter(Boolean); } else { console.log( `${chalk.green('+')} ${data.name} ${ chalk.gray( `${entity} will be created`, ) }`, ); result.created.push(created); } index += 1; } while (index < newItems.length); console.log(chalk.gray(`--- all ${entity} conflicts resolved ---\n`)); result.deleted.push(...leftMissing); return result; }; export const promptSchemasConflict = async ( newSchemas: T[], missingSchemas: T[], ): Promise<{ created: T[]; renamed: { from: T; to: T }[]; deleted: T[] }> => { if (missingSchemas.length === 0 || newSchemas.length === 0) { return { created: newSchemas, renamed: [], deleted: missingSchemas }; } const result: { created: T[]; renamed: { from: T; to: T }[]; deleted: T[] } = { created: [], renamed: [], deleted: [], }; let index = 0; let leftMissing = [...missingSchemas]; do { const created = newSchemas[index]; const renames: RenamePropmtItem[] = leftMissing.map((it) => { return { from: it, to: created }; }); const promptData: (RenamePropmtItem | T)[] = [created, ...renames]; const { status, data } = await render( new ResolveSchemasSelect(created, promptData), ); if (status === 'aborted') { console.error('ERROR'); process.exit(1); } if (isRenamePromptItem(data)) { console.log( `${chalk.yellow('~')} ${data.from.name} › ${data.to.name} ${ chalk.gray( 'schema will be renamed', ) }`, ); result.renamed.push(data); delete leftMissing[leftMissing.indexOf(data.from)]; leftMissing = leftMissing.filter(Boolean); } else { console.log( `${chalk.green('+')} ${data.name} ${ chalk.gray( 'schema will be created', ) }`, ); result.created.push(created); } index += 1; } while (index < newSchemas.length); console.log(chalk.gray('--- all schemas conflicts resolved ---\n')); result.deleted.push(...leftMissing); return result; }; export const BREAKPOINT = '--> statement-breakpoint\n'; export const writeResult = ({ cur, sqlStatements, journal, _meta = { columns: {}, schemas: {}, tables: {}, }, outFolder, breakpoints, name, bundle = false, type = 'none', prefixMode, driver, }: { cur: CommonSchema; sqlStatements: string[]; journal: Journal; _meta?: any; outFolder: string; breakpoints: boolean; prefixMode: Prefix; name?: string; bundle?: boolean; type?: 'introspect' | 'custom' | 'none'; driver?: Driver; }) => { if (type === 'none') { console.log(schema(cur)); if (sqlStatements.length === 0) { console.log('No schema changes, nothing to migrate 😴'); return; } } // append entry to _migrations.json // append entry to _journal.json->entries // dialect in _journal.json // append sql file to out folder // append snapshot file to meta folder const lastEntryInJournal = journal.entries[journal.entries.length - 1]; const idx = typeof lastEntryInJournal === 'undefined' ? 0 : lastEntryInJournal.idx + 1; const { prefix, tag } = prepareMigrationMetadata(idx, prefixMode, name); const toSave = JSON.parse(JSON.stringify(cur)); toSave['_meta'] = _meta; // todo: save results to a new migration folder const metaFolderPath = join(outFolder, 'meta'); const metaJournal = join(metaFolderPath, '_journal.json'); fs.writeFileSync( join(metaFolderPath, `${prefix}_snapshot.json`), JSON.stringify(toSave, null, 2), ); const sqlDelimiter = breakpoints ? BREAKPOINT : '\n'; let sql = sqlStatements.join(sqlDelimiter); if (type === 'introspect') { sql = `-- Current sql file was generated after introspecting the database\n-- If you want to run this migration please uncomment this code before executing migrations\n/*\n${sql}\n*/`; } if (type === 'custom') { console.log('Prepared empty file for your custom SQL migration!'); sql = '-- Custom SQL migration file, put your code below! --'; } journal.entries.push({ idx, version: cur.version, when: +new Date(), tag, breakpoints: breakpoints, }); fs.writeFileSync(metaJournal, JSON.stringify(journal, null, 2)); fs.writeFileSync(`${outFolder}/${tag}.sql`, sql); // js file with .sql imports for React Native / Expo and Durable Sqlite Objects if (bundle) { const js = embeddedMigrations(journal, driver); fs.writeFileSync(`${outFolder}/migrations.js`, js); } render( `[${ chalk.green( '✓', ) }] Your SQL migration file ➜ ${ chalk.bold.underline.blue( path.join(`${outFolder}/${tag}.sql`), ) } 🚀`, ); }; export const embeddedMigrations = (journal: Journal, driver?: Driver) => { let content = driver === 'expo' ? '// This file is required for Expo/React Native SQLite migrations - https://orm.drizzle.team/quick-sqlite/expo\n\n' : ''; content += "import journal from './meta/_journal.json';\n"; journal.entries.forEach((entry) => { content += `import m${entry.idx.toString().padStart(4, '0')} from './${entry.tag}.sql';\n`; }); content += ` export default { journal, migrations: { ${ journal.entries .map((it) => `m${it.idx.toString().padStart(4, '0')}`) .join(',\n') } } } `; return content; }; export const prepareSnapshotFolderName = () => { const now = new Date(); return `${now.getFullYear()}${two(now.getUTCMonth() + 1)}${ two( now.getUTCDate(), ) }${two(now.getUTCHours())}${two(now.getUTCMinutes())}${ two( now.getUTCSeconds(), ) }`; }; const two = (input: number): string => { return input.toString().padStart(2, '0'); }; ================================================ FILE: drizzle-kit/src/cli/commands/mysqlIntrospect.ts ================================================ import { renderWithTask } from 'hanji'; import { Minimatch } from 'minimatch'; import { originUUID } from '../../global'; import type { MySqlSchema } from '../../serializer/mysqlSchema'; import { fromDatabase } from '../../serializer/mysqlSerializer'; import type { DB } from '../../utils'; import { ProgressView } from '../views'; export const mysqlPushIntrospect = async ( db: DB, databaseName: string, filters: string[], ) => { const matchers = filters.map((it) => { return new Minimatch(it); }); const filter = (tableName: string) => { if (matchers.length === 0) return true; let flags: boolean[] = []; for (let matcher of matchers) { if (matcher.negate) { if (!matcher.match(tableName)) { flags.push(false); } } if (matcher.match(tableName)) { flags.push(true); } } if (flags.length > 0) { return flags.every(Boolean); } return false; }; const progress = new ProgressView( 'Pulling schema from database...', 'Pulling schema from database...', ); const res = await renderWithTask( progress, fromDatabase(db, databaseName, filter), ); const schema = { id: originUUID, prevId: '', ...res } as MySqlSchema; const { internal, ...schemaWithoutInternals } = schema; return { schema: schemaWithoutInternals }; }; ================================================ FILE: drizzle-kit/src/cli/commands/mysqlPushUtils.ts ================================================ import chalk from 'chalk'; import { render } from 'hanji'; import { TypeOf } from 'zod'; import { JsonAlterColumnTypeStatement, JsonStatement } from '../../jsonStatements'; import { mysqlSchema, MySqlSquasher } from '../../serializer/mysqlSchema'; import type { DB } from '../../utils'; import { Select } from '../selector-ui'; import { withStyle } from '../validations/outputs'; export const filterStatements = ( statements: JsonStatement[], currentSchema: TypeOf, prevSchema: TypeOf, ) => { return statements.filter((statement) => { if (statement.type === 'alter_table_alter_column_set_type') { // Don't need to handle it on migrations step and introspection // but for both it should be skipped if ( statement.oldDataType.startsWith('tinyint') && statement.newDataType.startsWith('boolean') ) { return false; } if ( statement.oldDataType.startsWith('bigint unsigned') && statement.newDataType.startsWith('serial') ) { return false; } if ( statement.oldDataType.startsWith('serial') && statement.newDataType.startsWith('bigint unsigned') ) { return false; } } else if (statement.type === 'alter_table_alter_column_set_default') { if ( statement.newDefaultValue === false && statement.oldDefaultValue === 0 && statement.newDataType === 'boolean' ) { return false; } if ( statement.newDefaultValue === true && statement.oldDefaultValue === 1 && statement.newDataType === 'boolean' ) { return false; } } else if (statement.type === 'delete_unique_constraint') { const unsquashed = MySqlSquasher.unsquashUnique(statement.data); // only if constraint was removed from a serial column, than treat it as removed // const serialStatement = statements.find( // (it) => it.type === "alter_table_alter_column_set_type" // ) as JsonAlterColumnTypeStatement; // if ( // serialStatement?.oldDataType.startsWith("bigint unsigned") && // serialStatement?.newDataType.startsWith("serial") && // serialStatement.columnName === // MySqlSquasher.unsquashUnique(statement.data).columns[0] // ) { // return false; // } // Check if uniqueindex was only on this column, that is serial // if now serial and was not serial and was unique index if ( unsquashed.columns.length === 1 && currentSchema.tables[statement.tableName].columns[unsquashed.columns[0]] .type === 'serial' && prevSchema.tables[statement.tableName].columns[unsquashed.columns[0]] .type === 'serial' && currentSchema.tables[statement.tableName].columns[unsquashed.columns[0]] .name === unsquashed.columns[0] ) { return false; } } else if (statement.type === 'alter_table_alter_column_drop_notnull') { // only if constraint was removed from a serial column, than treat it as removed const serialStatement = statements.find( (it) => it.type === 'alter_table_alter_column_set_type', ) as JsonAlterColumnTypeStatement; if ( serialStatement?.oldDataType.startsWith('bigint unsigned') && serialStatement?.newDataType.startsWith('serial') && serialStatement.columnName === statement.columnName && serialStatement.tableName === statement.tableName ) { return false; } if (statement.newDataType === 'serial' && !statement.columnNotNull) { return false; } if (statement.columnAutoIncrement) { return false; } } return true; }); }; export const logSuggestionsAndReturn = async ( db: DB, statements: JsonStatement[], json2: TypeOf, ) => { let shouldAskForApprove = false; const statementsToExecute: string[] = []; const infoToPrint: string[] = []; const tablesToRemove: string[] = []; const columnsToRemove: string[] = []; const schemasToRemove: string[] = []; const tablesToTruncate: string[] = []; for (const statement of statements) { if (statement.type === 'drop_table') { const res = await db.query( `select count(*) as count from \`${statement.tableName}\``, ); const count = Number(res[0].count); if (count > 0) { infoToPrint.push( `· You're about to delete ${ chalk.underline( statement.tableName, ) } table with ${count} items`, ); tablesToRemove.push(statement.tableName); shouldAskForApprove = true; } } else if (statement.type === 'alter_table_drop_column') { const res = await db.query( `select count(*) as count from \`${statement.tableName}\``, ); const count = Number(res[0].count); if (count > 0) { infoToPrint.push( `· You're about to delete ${ chalk.underline( statement.columnName, ) } column in ${statement.tableName} table with ${count} items`, ); columnsToRemove.push(`${statement.tableName}_${statement.columnName}`); shouldAskForApprove = true; } } else if (statement.type === 'drop_schema') { const res = await db.query( `select count(*) as count from information_schema.tables where table_schema = \`${statement.name}\`;`, ); const count = Number(res[0].count); if (count > 0) { infoToPrint.push( `· You're about to delete ${ chalk.underline( statement.name, ) } schema with ${count} tables`, ); schemasToRemove.push(statement.name); shouldAskForApprove = true; } } else if (statement.type === 'alter_table_alter_column_set_type') { const res = await db.query( `select count(*) as count from \`${statement.tableName}\``, ); const count = Number(res[0].count); if (count > 0) { infoToPrint.push( `· You're about to change ${ chalk.underline( statement.columnName, ) } column type from ${ chalk.underline( statement.oldDataType, ) } to ${chalk.underline(statement.newDataType)} with ${count} items`, ); statementsToExecute.push(`truncate table ${statement.tableName};`); tablesToTruncate.push(statement.tableName); shouldAskForApprove = true; } } else if (statement.type === 'alter_table_alter_column_drop_default') { if (statement.columnNotNull) { const res = await db.query( `select count(*) as count from \`${statement.tableName}\``, ); const count = Number(res[0].count); if (count > 0) { infoToPrint.push( `· You're about to remove default value from ${ chalk.underline( statement.columnName, ) } not-null column with ${count} items`, ); tablesToTruncate.push(statement.tableName); statementsToExecute.push(`truncate table ${statement.tableName};`); shouldAskForApprove = true; } } // shouldAskForApprove = true; } else if (statement.type === 'alter_table_alter_column_set_notnull') { if (typeof statement.columnDefault === 'undefined') { const res = await db.query( `select count(*) as count from \`${statement.tableName}\``, ); const count = Number(res[0].count); if (count > 0) { infoToPrint.push( `· You're about to set not-null constraint to ${ chalk.underline( statement.columnName, ) } column without default, which contains ${count} items`, ); tablesToTruncate.push(statement.tableName); statementsToExecute.push(`truncate table ${statement.tableName};`); shouldAskForApprove = true; } } } else if (statement.type === 'alter_table_alter_column_drop_pk') { const res = await db.query( `select count(*) as count from \`${statement.tableName}\``, ); // if drop pk and json2 has autoincrement in table -> exit process with error if ( Object.values(json2.tables[statement.tableName].columns).filter( (column) => column.autoincrement, ).length > 0 ) { console.log( `${ withStyle.errorWarning( `You have removed the primary key from a ${statement.tableName} table without removing the auto-increment property from this table. As the database error states: 'there can be only one auto column, and it must be defined as a key. Make sure to remove autoincrement from ${statement.tableName} table`, ) }`, ); process.exit(1); } const count = Number(res[0].count); if (count > 0) { infoToPrint.push( `· You're about to change ${ chalk.underline( statement.tableName, ) } primary key. This statements may fail and you table may left without primary key`, ); tablesToTruncate.push(statement.tableName); shouldAskForApprove = true; } } else if (statement.type === 'delete_composite_pk') { // if drop pk and json2 has autoincrement in table -> exit process with error if ( Object.values(json2.tables[statement.tableName].columns).filter( (column) => column.autoincrement, ).length > 0 ) { console.log( `${ withStyle.errorWarning( `You have removed the primary key from a ${statement.tableName} table without removing the auto-increment property from this table. As the database error states: 'there can be only one auto column, and it must be defined as a key. Make sure to remove autoincrement from ${statement.tableName} table`, ) }`, ); process.exit(1); } } else if (statement.type === 'alter_table_add_column') { if ( statement.column.notNull && typeof statement.column.default === 'undefined' ) { const res = await db.query( `select count(*) as count from \`${statement.tableName}\``, ); const count = Number(res[0].count); if (count > 0) { infoToPrint.push( `· You're about to add not-null ${ chalk.underline( statement.column.name, ) } column without default value, which contains ${count} items`, ); tablesToTruncate.push(statement.tableName); statementsToExecute.push(`truncate table ${statement.tableName};`); shouldAskForApprove = true; } } } else if (statement.type === 'create_unique_constraint') { const res = await db.query( `select count(*) as count from \`${statement.tableName}\``, ); const count = Number(res[0].count); if (count > 0) { const unsquashedUnique = MySqlSquasher.unsquashUnique(statement.data); console.log( `· You're about to add ${ chalk.underline( unsquashedUnique.name, ) } unique constraint to the table, which contains ${count} items. If this statement fails, you will receive an error from the database. Do you want to truncate ${ chalk.underline( statement.tableName, ) } table?\n`, ); const { status, data } = await render( new Select([ 'No, add the constraint without truncating the table', `Yes, truncate the table`, ]), ); if (data?.index === 1) { tablesToTruncate.push(statement.tableName); statementsToExecute.push(`truncate table ${statement.tableName};`); shouldAskForApprove = true; } } } } return { statementsToExecute, shouldAskForApprove, infoToPrint, columnsToRemove: [...new Set(columnsToRemove)], schemasToRemove: [...new Set(schemasToRemove)], tablesToTruncate: [...new Set(tablesToTruncate)], tablesToRemove: [...new Set(tablesToRemove)], }; }; ================================================ FILE: drizzle-kit/src/cli/commands/mysqlUp.ts ================================================ import chalk from 'chalk'; import fs, { writeFileSync } from 'fs'; import path from 'path'; import { Column, MySqlSchema, MySqlSchemaV4, MySqlSchemaV5, mysqlSchemaV5, Table } from '../../serializer/mysqlSchema'; import { prepareOutFolder, validateWithReport } from '../../utils'; export const upMysqlHandler = (out: string) => {}; export const upMySqlHandlerV4toV5 = (obj: MySqlSchemaV4): MySqlSchemaV5 => { const mappedTables: Record = {}; for (const [key, table] of Object.entries(obj.tables)) { const mappedColumns: Record = {}; for (const [ckey, column] of Object.entries(table.columns)) { let newDefault: any = column.default; let newType: string = column.type; let newAutoIncrement: boolean | undefined = column.autoincrement; if (column.type.toLowerCase().startsWith('datetime')) { if (typeof column.default !== 'undefined') { if (column.default.startsWith("'") && column.default.endsWith("'")) { newDefault = `'${ column.default .substring(1, column.default.length - 1) .replace('T', ' ') .slice(0, 23) }'`; } else { newDefault = column.default.replace('T', ' ').slice(0, 23); } } newType = column.type.toLowerCase().replace('datetime (', 'datetime('); } else if (column.type.toLowerCase() === 'date') { if (typeof column.default !== 'undefined') { if (column.default.startsWith("'") && column.default.endsWith("'")) { newDefault = `'${ column.default .substring(1, column.default.length - 1) .split('T')[0] }'`; } else { newDefault = column.default.split('T')[0]; } } newType = column.type.toLowerCase().replace('date (', 'date('); } else if (column.type.toLowerCase().startsWith('timestamp')) { if (typeof column.default !== 'undefined') { if (column.default.startsWith("'") && column.default.endsWith("'")) { newDefault = `'${ column.default .substring(1, column.default.length - 1) .replace('T', ' ') .slice(0, 23) }'`; } else { newDefault = column.default.replace('T', ' ').slice(0, 23); } } newType = column.type .toLowerCase() .replace('timestamp (', 'timestamp('); } else if (column.type.toLowerCase().startsWith('time')) { newType = column.type.toLowerCase().replace('time (', 'time('); } else if (column.type.toLowerCase().startsWith('decimal')) { newType = column.type.toLowerCase().replace(', ', ','); } else if (column.type.toLowerCase().startsWith('enum')) { newType = column.type.toLowerCase(); } else if (column.type.toLowerCase().startsWith('serial')) { newAutoIncrement = true; } mappedColumns[ckey] = { ...column, default: newDefault, type: newType, autoincrement: newAutoIncrement, }; } mappedTables[key] = { ...table, columns: mappedColumns, compositePrimaryKeys: {}, uniqueConstraints: {}, checkConstraint: {}, }; } return { version: '5', dialect: obj.dialect, id: obj.id, prevId: obj.prevId, tables: mappedTables, schemas: obj.schemas, _meta: { schemas: {} as Record, tables: {} as Record, columns: {} as Record, }, }; }; ================================================ FILE: drizzle-kit/src/cli/commands/pgIntrospect.ts ================================================ import { renderWithTask } from 'hanji'; import { Minimatch } from 'minimatch'; import { originUUID } from '../../global'; import type { PgSchema, PgSchemaInternal } from '../../serializer/pgSchema'; import { fromDatabase } from '../../serializer/pgSerializer'; import type { DB } from '../../utils'; import type { Entities } from '../validations/cli'; import { ProgressView } from '../views'; export const pgPushIntrospect = async ( db: DB, filters: string[], schemaFilters: string[], entities: Entities, tsSchema?: PgSchemaInternal, ) => { const matchers = filters.map((it) => { return new Minimatch(it); }); const filter = (tableName: string) => { if (matchers.length === 0) return true; let flags: boolean[] = []; for (let matcher of matchers) { if (matcher.negate) { if (!matcher.match(tableName)) { flags.push(false); } } if (matcher.match(tableName)) { flags.push(true); } } if (flags.length > 0) { return flags.every(Boolean); } return false; }; const progress = new ProgressView( 'Pulling schema from database...', 'Pulling schema from database...', ); const res = await renderWithTask( progress, fromDatabase(db, filter, schemaFilters, entities, undefined, tsSchema), ); const schema = { id: originUUID, prevId: '', ...res } as PgSchema; const { internal, ...schemaWithoutInternals } = schema; return { schema: schemaWithoutInternals }; }; ================================================ FILE: drizzle-kit/src/cli/commands/pgPushUtils.ts ================================================ import chalk from 'chalk'; import { render } from 'hanji'; import type { JsonStatement } from '../../jsonStatements'; import { PgSquasher } from '../../serializer/pgSchema'; import { fromJson } from '../../sqlgenerator'; import type { DB } from '../../utils'; import { Select } from '../selector-ui'; // export const filterStatements = (statements: JsonStatement[]) => { // return statements.filter((statement) => { // if (statement.type === "alter_table_alter_column_set_type") { // // Don't need to handle it on migrations step and introspection // // but for both it should be skipped // if ( // statement.oldDataType.startsWith("tinyint") && // statement.newDataType.startsWith("boolean") // ) { // return false; // } // } else if (statement.type === "alter_table_alter_column_set_default") { // if ( // statement.newDefaultValue === false && // statement.oldDefaultValue === 0 && // statement.newDataType === "boolean" // ) { // return false; // } // if ( // statement.newDefaultValue === true && // statement.oldDefaultValue === 1 && // statement.newDataType === "boolean" // ) { // return false; // } // } // return true; // }); // }; function concatSchemaAndTableName(schema: string | undefined, table: string) { return schema ? `"${schema}"."${table}"` : `"${table}"`; } function tableNameWithSchemaFrom( schema: string | undefined, tableName: string, renamedSchemas: Record, renamedTables: Record, ) { const newSchemaName = schema ? (renamedSchemas[schema] ? renamedSchemas[schema] : schema) : undefined; const newTableName = renamedTables[concatSchemaAndTableName(newSchemaName, tableName)] ? renamedTables[concatSchemaAndTableName(newSchemaName, tableName)] : tableName; return concatSchemaAndTableName(newSchemaName, newTableName); } export const pgSuggestions = async (db: DB, statements: JsonStatement[]) => { let shouldAskForApprove = false; const statementsToExecute: string[] = []; const infoToPrint: string[] = []; const tablesToRemove: string[] = []; const columnsToRemove: string[] = []; const schemasToRemove: string[] = []; const tablesToTruncate: string[] = []; const matViewsToRemove: string[] = []; let renamedSchemas: Record = {}; let renamedTables: Record = {}; for (const statement of statements) { if (statement.type === 'rename_schema') { renamedSchemas[statement.to] = statement.from; } else if (statement.type === 'rename_table') { renamedTables[concatSchemaAndTableName(statement.toSchema, statement.tableNameTo)] = statement.tableNameFrom; } else if (statement.type === 'drop_table') { const res = await db.query( `select count(*) as count from ${ tableNameWithSchemaFrom(statement.schema, statement.tableName, renamedSchemas, renamedTables) }`, ); const count = Number(res[0].count); if (count > 0) { infoToPrint.push(`· You're about to delete ${chalk.underline(statement.tableName)} table with ${count} items`); // statementsToExecute.push( // `truncate table ${tableNameWithSchemaFrom(statement)} cascade;` // ); tablesToRemove.push(statement.tableName); shouldAskForApprove = true; } } else if (statement.type === 'drop_view' && statement.materialized) { const res = await db.query(`select count(*) as count from "${statement.schema ?? 'public'}"."${statement.name}"`); const count = Number(res[0].count); if (count > 0) { infoToPrint.push( `· You're about to delete "${chalk.underline(statement.name)}" materialized view with ${count} items`, ); matViewsToRemove.push(statement.name); shouldAskForApprove = true; } } else if (statement.type === 'alter_table_drop_column') { const res = await db.query( `select count(*) as count from ${ tableNameWithSchemaFrom(statement.schema, statement.tableName, renamedSchemas, renamedTables) }`, ); const count = Number(res[0].count); if (count > 0) { infoToPrint.push( `· You're about to delete ${ chalk.underline(statement.columnName) } column in ${statement.tableName} table with ${count} items`, ); columnsToRemove.push(`${statement.tableName}_${statement.columnName}`); shouldAskForApprove = true; } } else if (statement.type === 'drop_schema') { const res = await db.query( `select count(*) as count from information_schema.tables where table_schema = '${statement.name}';`, ); const count = Number(res[0].count); if (count > 0) { infoToPrint.push(`· You're about to delete ${chalk.underline(statement.name)} schema with ${count} tables`); schemasToRemove.push(statement.name); shouldAskForApprove = true; } } else if (statement.type === 'alter_table_alter_column_set_type') { const res = await db.query( `select count(*) as count from ${ tableNameWithSchemaFrom(statement.schema, statement.tableName, renamedSchemas, renamedTables) }`, ); const count = Number(res[0].count); if (count > 0) { infoToPrint.push( `· You're about to change ${chalk.underline(statement.columnName)} column type from ${ chalk.underline(statement.oldDataType) } to ${ chalk.underline( statement.newDataType, ) } with ${count} items`, ); statementsToExecute.push( `truncate table ${ tableNameWithSchemaFrom(statement.schema, statement.tableName, renamedSchemas, renamedTables) } cascade;`, ); tablesToTruncate.push(statement.tableName); shouldAskForApprove = true; } } else if (statement.type === 'alter_table_alter_column_drop_pk') { const res = await db.query( `select count(*) as count from ${ tableNameWithSchemaFrom(statement.schema, statement.tableName, renamedSchemas, renamedTables) }`, ); const count = Number(res[0].count); if (count > 0) { infoToPrint.push( `· You're about to change ${ chalk.underline(statement.tableName) } primary key. This statements may fail and you table may left without primary key`, ); tablesToTruncate.push(statement.tableName); shouldAskForApprove = true; } const tableNameWithSchema = tableNameWithSchemaFrom( statement.schema, statement.tableName, renamedSchemas, renamedTables, ); const pkNameResponse = await db.query( `SELECT constraint_name FROM information_schema.table_constraints WHERE table_schema = '${ typeof statement.schema === 'undefined' || statement.schema === '' ? 'public' : statement.schema }' AND table_name = '${statement.tableName}' AND constraint_type = 'PRIMARY KEY';`, ); statementsToExecute.push( `ALTER TABLE ${tableNameWithSchema} DROP CONSTRAINT "${pkNameResponse[0].constraint_name}"`, ); // we will generate statement for drop pk here and not after all if-else statements continue; } else if (statement.type === 'alter_table_add_column') { if (statement.column.notNull && typeof statement.column.default === 'undefined') { const res = await db.query( `select count(*) as count from ${ tableNameWithSchemaFrom(statement.schema, statement.tableName, renamedSchemas, renamedTables) }`, ); const count = Number(res[0].count); if (count > 0) { infoToPrint.push( `· You're about to add not-null ${ chalk.underline(statement.column.name) } column without default value, which contains ${count} items`, ); tablesToTruncate.push(statement.tableName); statementsToExecute.push( `truncate table ${ tableNameWithSchemaFrom(statement.schema, statement.tableName, renamedSchemas, renamedTables) } cascade;`, ); shouldAskForApprove = true; } } } else if (statement.type === 'create_unique_constraint') { const res = await db.query( `select count(*) as count from ${ tableNameWithSchemaFrom(statement.schema, statement.tableName, renamedSchemas, renamedTables) }`, ); const count = Number(res[0].count); if (count > 0) { const unsquashedUnique = PgSquasher.unsquashUnique(statement.data); console.log( `· You're about to add ${ chalk.underline( unsquashedUnique.name, ) } unique constraint to the table, which contains ${count} items. If this statement fails, you will receive an error from the database. Do you want to truncate ${ chalk.underline( statement.tableName, ) } table?\n`, ); const { status, data } = await render( new Select(['No, add the constraint without truncating the table', `Yes, truncate the table`]), ); if (data?.index === 1) { tablesToTruncate.push(statement.tableName); statementsToExecute.push( `truncate table ${ tableNameWithSchemaFrom(statement.schema, statement.tableName, renamedSchemas, renamedTables) } cascade;`, ); shouldAskForApprove = true; } } } const stmnt = fromJson([statement], 'postgresql', 'push'); if (typeof stmnt !== 'undefined') { statementsToExecute.push(...stmnt); } } return { statementsToExecute: [...new Set(statementsToExecute)], shouldAskForApprove, infoToPrint, matViewsToRemove: [...new Set(matViewsToRemove)], columnsToRemove: [...new Set(columnsToRemove)], schemasToRemove: [...new Set(schemasToRemove)], tablesToTruncate: [...new Set(tablesToTruncate)], tablesToRemove: [...new Set(tablesToRemove)], }; }; ================================================ FILE: drizzle-kit/src/cli/commands/pgUp.ts ================================================ import chalk from 'chalk'; import { writeFileSync } from 'fs'; import { Column, Index, PgSchema, PgSchemaV4, PgSchemaV5, pgSchemaV5, PgSchemaV6, pgSchemaV6, Table, TableV5, } from '../../serializer/pgSchema'; import { prepareOutFolder, validateWithReport } from '../../utils'; export const upPgHandler = (out: string) => { const { snapshots } = prepareOutFolder(out, 'postgresql'); const report = validateWithReport(snapshots, 'postgresql'); report.nonLatest .map((it) => ({ path: it, raw: report.rawMap[it]!! as Record, })) .forEach((it) => { const path = it.path; let resultV6 = it.raw; if (it.raw.version === '5') { resultV6 = updateUpToV6(it.raw); } const result = updateUpToV7(resultV6); console.log(`[${chalk.green('✓')}] ${path}`); writeFileSync(path, JSON.stringify(result, null, 2)); }); console.log("Everything's fine 🐶🔥"); }; export const updateUpToV6 = (json: Record): PgSchemaV6 => { const schema = pgSchemaV5.parse(json); const tables = Object.fromEntries( Object.entries(schema.tables).map((it) => { const table = it[1]; const schema = table.schema || 'public'; return [`${schema}.${table.name}`, table]; }), ); const enums = Object.fromEntries( Object.entries(schema.enums).map((it) => { const en = it[1]; return [ `public.${en.name}`, { name: en.name, schema: 'public', values: Object.values(en.values), }, ]; }), ); return { ...schema, version: '6', dialect: 'postgresql', tables: tables, enums, }; }; // Changed index format stored in snapshot for PostgreSQL in 0.22.0 export const updateUpToV7 = (json: Record): PgSchema => { const schema = pgSchemaV6.parse(json); const tables = Object.fromEntries( Object.entries(schema.tables).map((it) => { const table = it[1]; const mappedIndexes = Object.fromEntries( Object.entries(table.indexes).map((idx) => { const { columns, ...rest } = idx[1]; const mappedColumns = columns.map((it) => { return { expression: it, isExpression: false, asc: true, nulls: 'last', opClass: undefined, }; }); return [idx[0], { columns: mappedColumns, with: {}, ...rest }]; }), ); return [it[0], { ...table, indexes: mappedIndexes, policies: {}, isRLSEnabled: false, checkConstraints: {} }]; }), ); return { ...schema, version: '7', dialect: 'postgresql', sequences: {}, tables: tables, policies: {}, views: {}, roles: {}, }; }; // major migration with of folder structure, etc... export const upPgHandlerV4toV5 = (obj: PgSchemaV4): PgSchemaV5 => { const mappedTables: Record = {}; for (const [key, table] of Object.entries(obj.tables)) { const mappedColumns: Record = {}; for (const [ckey, column] of Object.entries(table.columns)) { let newDefault: any = column.default; let newType: string = column.type; if (column.type.toLowerCase() === 'date') { if (typeof column.default !== 'undefined') { if (column.default.startsWith("'") && column.default.endsWith("'")) { newDefault = `'${ column.default .substring(1, column.default.length - 1) .split('T')[0] }'`; } else { newDefault = column.default.split('T')[0]; } } } else if (column.type.toLowerCase().startsWith('timestamp')) { if (typeof column.default !== 'undefined') { if (column.default.startsWith("'") && column.default.endsWith("'")) { newDefault = `'${ column.default .substring(1, column.default.length - 1) .replace('T', ' ') .slice(0, 23) }'`; } else { newDefault = column.default.replace('T', ' ').slice(0, 23); } } newType = column.type .toLowerCase() .replace('timestamp (', 'timestamp('); } else if (column.type.toLowerCase().startsWith('time')) { newType = column.type.toLowerCase().replace('time (', 'time('); } else if (column.type.toLowerCase().startsWith('interval')) { newType = column.type.toLowerCase().replace(' (', '('); } mappedColumns[ckey] = { ...column, default: newDefault, type: newType }; } mappedTables[key] = { ...table, columns: mappedColumns, compositePrimaryKeys: {}, uniqueConstraints: {}, }; } return { version: '5', dialect: obj.dialect, id: obj.id, prevId: obj.prevId, tables: mappedTables, enums: obj.enums, schemas: obj.schemas, _meta: { schemas: {} as Record, tables: {} as Record, columns: {} as Record, }, }; }; ================================================ FILE: drizzle-kit/src/cli/commands/push.ts ================================================ import chalk from 'chalk'; import { randomUUID } from 'crypto'; import { render } from 'hanji'; import { serializePg } from 'src/serializer'; import { fromJson } from '../../sqlgenerator'; import { Select } from '../selector-ui'; import { Entities } from '../validations/cli'; import { CasingType } from '../validations/common'; import { LibSQLCredentials } from '../validations/libsql'; import type { MysqlCredentials } from '../validations/mysql'; import { withStyle } from '../validations/outputs'; import type { PostgresCredentials } from '../validations/postgres'; import { SingleStoreCredentials } from '../validations/singlestore'; import type { SqliteCredentials } from '../validations/sqlite'; import { libSqlLogSuggestionsAndReturn } from './libSqlPushUtils'; import { filterStatements as mySqlFilterStatements, logSuggestionsAndReturn as mySqlLogSuggestionsAndReturn, } from './mysqlPushUtils'; import { pgSuggestions } from './pgPushUtils'; import { filterStatements as singleStoreFilterStatements, logSuggestionsAndReturn as singleStoreLogSuggestionsAndReturn, } from './singlestorePushUtils'; import { logSuggestionsAndReturn as sqliteSuggestions } from './sqlitePushUtils'; export const mysqlPush = async ( schemaPath: string | string[], credentials: MysqlCredentials, tablesFilter: string[], strict: boolean, verbose: boolean, force: boolean, casing: CasingType | undefined, ) => { const { connectToMySQL } = await import('../connections'); const { mysqlPushIntrospect } = await import('./mysqlIntrospect'); const { db, database } = await connectToMySQL(credentials); const { schema } = await mysqlPushIntrospect(db, database, tablesFilter); const { prepareMySQLPush } = await import('./migrate'); const statements = await prepareMySQLPush(schemaPath, schema, casing); const filteredStatements = mySqlFilterStatements( statements.statements ?? [], statements.validatedCur, statements.validatedPrev, ); try { if (filteredStatements.length === 0) { render(`[${chalk.blue('i')}] No changes detected`); } else { const { shouldAskForApprove, statementsToExecute, columnsToRemove, tablesToRemove, tablesToTruncate, infoToPrint, } = await mySqlLogSuggestionsAndReturn( db, filteredStatements, statements.validatedCur, ); const filteredSqlStatements = fromJson(filteredStatements, 'mysql'); const uniqueSqlStatementsToExecute: string[] = []; statementsToExecute.forEach((ss) => { if (!uniqueSqlStatementsToExecute.includes(ss)) { uniqueSqlStatementsToExecute.push(ss); } }); const uniqueFilteredSqlStatements: string[] = []; filteredSqlStatements.forEach((ss) => { if (!uniqueFilteredSqlStatements.includes(ss)) { uniqueFilteredSqlStatements.push(ss); } }); if (verbose) { console.log(); console.log( withStyle.warning('You are about to execute current statements:'), ); console.log(); console.log( [...uniqueSqlStatementsToExecute, ...uniqueFilteredSqlStatements] .map((s) => chalk.blue(s)) .join('\n'), ); console.log(); } if (!force && strict) { if (!shouldAskForApprove) { const { status, data } = await render( new Select(['No, abort', `Yes, I want to execute all statements`]), ); if (data?.index === 0) { render(`[${chalk.red('x')}] All changes were aborted`); process.exit(0); } } } if (!force && shouldAskForApprove) { console.log(withStyle.warning('Found data-loss statements:')); console.log(infoToPrint.join('\n')); console.log(); console.log( chalk.red.bold( 'THIS ACTION WILL CAUSE DATA LOSS AND CANNOT BE REVERTED\n', ), ); console.log(chalk.white('Do you still want to push changes?')); const { status, data } = await render( new Select([ 'No, abort', `Yes, I want to${ tablesToRemove.length > 0 ? ` remove ${tablesToRemove.length} ${tablesToRemove.length > 1 ? 'tables' : 'table'},` : ' ' }${ columnsToRemove.length > 0 ? ` remove ${columnsToRemove.length} ${columnsToRemove.length > 1 ? 'columns' : 'column'},` : ' ' }${ tablesToTruncate.length > 0 ? ` truncate ${tablesToTruncate.length} ${tablesToTruncate.length > 1 ? 'tables' : 'table'}` : '' }` .replace(/(^,)|(,$)/g, '') .replace(/ +(?= )/g, ''), ]), ); if (data?.index === 0) { render(`[${chalk.red('x')}] All changes were aborted`); process.exit(0); } } for (const dStmnt of uniqueSqlStatementsToExecute) { await db.query(dStmnt); } for (const statement of uniqueFilteredSqlStatements) { await db.query(statement); } if (filteredStatements.length > 0) { render(`[${chalk.green('✓')}] Changes applied`); } else { render(`[${chalk.blue('i')}] No changes detected`); } } } catch (e) { console.log(e); } }; export const singlestorePush = async ( schemaPath: string | string[], credentials: SingleStoreCredentials, tablesFilter: string[], strict: boolean, verbose: boolean, force: boolean, casing: CasingType | undefined, ) => { const { connectToSingleStore } = await import('../connections'); const { singlestorePushIntrospect } = await import('./singlestoreIntrospect'); const { db, database } = await connectToSingleStore(credentials); const { schema } = await singlestorePushIntrospect( db, database, tablesFilter, ); const { prepareSingleStorePush } = await import('./migrate'); const statements = await prepareSingleStorePush(schemaPath, schema, casing); const filteredStatements = singleStoreFilterStatements( statements.statements ?? [], statements.validatedCur, statements.validatedPrev, ); try { if (filteredStatements.length === 0) { render(`[${chalk.blue('i')}] No changes detected`); } else { const { shouldAskForApprove, statementsToExecute, columnsToRemove, tablesToRemove, tablesToTruncate, infoToPrint, schemasToRemove, } = await singleStoreLogSuggestionsAndReturn( db, filteredStatements, statements.validatedCur, statements.validatedPrev, ); if (verbose) { console.log(); console.log( withStyle.warning('You are about to execute current statements:'), ); console.log(); console.log(statementsToExecute.map((s) => chalk.blue(s)).join('\n')); console.log(); } if (!force && strict) { if (!shouldAskForApprove) { const { status, data } = await render( new Select(['No, abort', `Yes, I want to execute all statements`]), ); if (data?.index === 0) { render(`[${chalk.red('x')}] All changes were aborted`); process.exit(0); } } } if (!force && shouldAskForApprove) { console.log(withStyle.warning('Found data-loss statements:')); console.log(infoToPrint.join('\n')); console.log(); console.log( chalk.red.bold( 'THIS ACTION WILL CAUSE DATA LOSS AND CANNOT BE REVERTED\n', ), ); console.log(chalk.white('Do you still want to push changes?')); const { status, data } = await render( new Select([ 'No, abort', `Yes, I want to${ tablesToRemove.length > 0 ? ` remove ${tablesToRemove.length} ${tablesToRemove.length > 1 ? 'tables' : 'table'},` : ' ' }${ columnsToRemove.length > 0 ? ` remove ${columnsToRemove.length} ${columnsToRemove.length > 1 ? 'columns' : 'column'},` : ' ' }${ tablesToTruncate.length > 0 ? ` truncate ${tablesToTruncate.length} ${tablesToTruncate.length > 1 ? 'tables' : 'table'}` : '' }` .replace(/(^,)|(,$)/g, '') .replace(/ +(?= )/g, ''), ]), ); if (data?.index === 0) { render(`[${chalk.red('x')}] All changes were aborted`); process.exit(0); } } for (const dStmnt of statementsToExecute) { await db.query(dStmnt); } if (filteredStatements.length > 0) { render(`[${chalk.green('✓')}] Changes applied`); } else { render(`[${chalk.blue('i')}] No changes detected`); } } } catch (e) { console.log(e); } }; export const pgPush = async ( schemaPath: string | string[], verbose: boolean, strict: boolean, credentials: PostgresCredentials, tablesFilter: string[], schemasFilter: string[], entities: Entities, force: boolean, casing: CasingType | undefined, ) => { const { preparePostgresDB } = await import('../connections'); const { pgPushIntrospect } = await import('./pgIntrospect'); const db = await preparePostgresDB(credentials); const serialized = await serializePg(schemaPath, casing, schemasFilter); const { schema } = await pgPushIntrospect(db, tablesFilter, schemasFilter, entities, serialized); const { preparePgPush } = await import('./migrate'); const statements = await preparePgPush( { id: randomUUID(), prevId: schema.id, ...serialized }, schema, ); try { if (statements.sqlStatements.length === 0) { render(`[${chalk.blue('i')}] No changes detected`); } else { // const filteredStatements = filterStatements(statements.statements); const { shouldAskForApprove, statementsToExecute, columnsToRemove, tablesToRemove, matViewsToRemove, tablesToTruncate, infoToPrint, schemasToRemove, } = await pgSuggestions(db, statements.statements); if (verbose) { console.log(); // console.log(chalk.gray('Verbose logs:')); console.log( withStyle.warning('You are about to execute current statements:'), ); console.log(); console.log(statementsToExecute.map((s) => chalk.blue(s)).join('\n')); console.log(); } if (!force && strict) { if (!shouldAskForApprove) { const { status, data } = await render( new Select(['No, abort', `Yes, I want to execute all statements`]), ); if (data?.index === 0) { render(`[${chalk.red('x')}] All changes were aborted`); process.exit(0); } } } if (!force && shouldAskForApprove) { console.log(withStyle.warning('Found data-loss statements:')); console.log(infoToPrint.join('\n')); console.log(); console.log( chalk.red.bold( 'THIS ACTION WILL CAUSE DATA LOSS AND CANNOT BE REVERTED\n', ), ); console.log(chalk.white('Do you still want to push changes?')); const { status, data } = await render( new Select([ 'No, abort', `Yes, I want to${ tablesToRemove.length > 0 ? ` remove ${tablesToRemove.length} ${tablesToRemove.length > 1 ? 'tables' : 'table'},` : ' ' }${ columnsToRemove.length > 0 ? ` remove ${columnsToRemove.length} ${columnsToRemove.length > 1 ? 'columns' : 'column'},` : ' ' }${ tablesToTruncate.length > 0 ? ` truncate ${tablesToTruncate.length} ${tablesToTruncate.length > 1 ? 'tables' : 'table'}` : '' }${ matViewsToRemove.length > 0 ? ` remove ${matViewsToRemove.length} ${ matViewsToRemove.length > 1 ? 'materialized views' : 'materialize view' },` : ' ' }` .replace(/(^,)|(,$)/g, '') .replace(/ +(?= )/g, ''), ]), ); if (data?.index === 0) { render(`[${chalk.red('x')}] All changes were aborted`); process.exit(0); } } for (const dStmnt of statementsToExecute) { await db.query(dStmnt); } if (statements.statements.length > 0) { render(`[${chalk.green('✓')}] Changes applied`); } else { render(`[${chalk.blue('i')}] No changes detected`); } } } catch (e) { console.error(e); } }; export const sqlitePush = async ( schemaPath: string | string[], verbose: boolean, strict: boolean, credentials: SqliteCredentials, tablesFilter: string[], force: boolean, casing: CasingType | undefined, ) => { const { connectToSQLite } = await import('../connections'); const { sqlitePushIntrospect } = await import('./sqliteIntrospect'); const db = await connectToSQLite(credentials); const { schema } = await sqlitePushIntrospect(db, tablesFilter); const { prepareSQLitePush } = await import('./migrate'); const statements = await prepareSQLitePush(schemaPath, schema, casing); if (statements.sqlStatements.length === 0) { render(`\n[${chalk.blue('i')}] No changes detected`); } else { const { shouldAskForApprove, statementsToExecute, columnsToRemove, tablesToRemove, tablesToTruncate, infoToPrint, schemasToRemove, } = await sqliteSuggestions( db, statements.statements, statements.squashedPrev, statements.squashedCur, statements.meta!, ); if (verbose && statementsToExecute.length > 0) { console.log(); console.log( withStyle.warning('You are about to execute current statements:'), ); console.log(); console.log(statementsToExecute.map((s) => chalk.blue(s)).join('\n')); console.log(); } if (!force && strict) { if (!shouldAskForApprove) { const { status, data } = await render( new Select(['No, abort', `Yes, I want to execute all statements`]), ); if (data?.index === 0) { render(`[${chalk.red('x')}] All changes were aborted`); process.exit(0); } } } if (!force && shouldAskForApprove) { console.log(withStyle.warning('Found data-loss statements:')); console.log(infoToPrint.join('\n')); console.log(); console.log( chalk.red.bold( 'THIS ACTION WILL CAUSE DATA LOSS AND CANNOT BE REVERTED\n', ), ); console.log(chalk.white('Do you still want to push changes?')); const { status, data } = await render( new Select([ 'No, abort', `Yes, I want to${ tablesToRemove.length > 0 ? ` remove ${tablesToRemove.length} ${tablesToRemove.length > 1 ? 'tables' : 'table'},` : ' ' }${ columnsToRemove.length > 0 ? ` remove ${columnsToRemove.length} ${columnsToRemove.length > 1 ? 'columns' : 'column'},` : ' ' }${ tablesToTruncate.length > 0 ? ` truncate ${tablesToTruncate.length} ${tablesToTruncate.length > 1 ? 'tables' : 'table'}` : '' }` .trimEnd() .replace(/(^,)|(,$)/g, '') .replace(/ +(?= )/g, ''), ]), ); if (data?.index === 0) { render(`[${chalk.red('x')}] All changes were aborted`); process.exit(0); } } if (statementsToExecute.length === 0) { render(`\n[${chalk.blue('i')}] No changes detected`); } else { // D1-HTTP does not support transactions // there might a be a better way to fix this // in the db connection itself const isNotD1 = !('driver' in credentials && credentials.driver === 'd1-http'); isNotD1 ?? await db.run('begin'); try { for (const dStmnt of statementsToExecute) { await db.run(dStmnt); } isNotD1 ?? await db.run('commit'); } catch (e) { console.error(e); isNotD1 ?? await db.run('rollback'); process.exit(1); } render(`[${chalk.green('✓')}] Changes applied`); } } }; export const libSQLPush = async ( schemaPath: string | string[], verbose: boolean, strict: boolean, credentials: LibSQLCredentials, tablesFilter: string[], force: boolean, casing: CasingType | undefined, ) => { const { connectToLibSQL } = await import('../connections'); const { sqlitePushIntrospect } = await import('./sqliteIntrospect'); const db = await connectToLibSQL(credentials); const { schema } = await sqlitePushIntrospect(db, tablesFilter); const { prepareLibSQLPush } = await import('./migrate'); const statements = await prepareLibSQLPush(schemaPath, schema, casing); if (statements.sqlStatements.length === 0) { render(`\n[${chalk.blue('i')}] No changes detected`); } else { const { shouldAskForApprove, statementsToExecute, columnsToRemove, tablesToRemove, tablesToTruncate, infoToPrint, } = await libSqlLogSuggestionsAndReturn( db, statements.statements, statements.squashedPrev, statements.squashedCur, statements.meta!, ); if (verbose && statementsToExecute.length > 0) { console.log(); console.log( withStyle.warning('You are about to execute current statements:'), ); console.log(); console.log(statementsToExecute.map((s) => chalk.blue(s)).join('\n')); console.log(); } if (!force && strict) { if (!shouldAskForApprove) { const { status, data } = await render( new Select(['No, abort', `Yes, I want to execute all statements`]), ); if (data?.index === 0) { render(`[${chalk.red('x')}] All changes were aborted`); process.exit(0); } } } if (!force && shouldAskForApprove) { console.log(withStyle.warning('Found data-loss statements:')); console.log(infoToPrint.join('\n')); console.log(); console.log( chalk.red.bold( 'THIS ACTION WILL CAUSE DATA LOSS AND CANNOT BE REVERTED\n', ), ); console.log(chalk.white('Do you still want to push changes?')); const { status, data } = await render( new Select([ 'No, abort', `Yes, I want to${ tablesToRemove.length > 0 ? ` remove ${tablesToRemove.length} ${tablesToRemove.length > 1 ? 'tables' : 'table'},` : ' ' }${ columnsToRemove.length > 0 ? ` remove ${columnsToRemove.length} ${columnsToRemove.length > 1 ? 'columns' : 'column'},` : ' ' }${ tablesToTruncate.length > 0 ? ` truncate ${tablesToTruncate.length} ${tablesToTruncate.length > 1 ? 'tables' : 'table'}` : '' }` .trimEnd() .replace(/(^,)|(,$)/g, '') .replace(/ +(?= )/g, ''), ]), ); if (data?.index === 0) { render(`[${chalk.red('x')}] All changes were aborted`); process.exit(0); } } if (statementsToExecute.length === 0) { render(`\n[${chalk.blue('i')}] No changes detected`); } else { await db.batchWithPragma!(statementsToExecute); render(`[${chalk.green('✓')}] Changes applied`); } } }; ================================================ FILE: drizzle-kit/src/cli/commands/singlestoreIntrospect.ts ================================================ import { renderWithTask } from 'hanji'; import { Minimatch } from 'minimatch'; import { originUUID } from '../../global'; import type { SingleStoreSchema } from '../../serializer/singlestoreSchema'; import { fromDatabase } from '../../serializer/singlestoreSerializer'; import type { DB } from '../../utils'; import { ProgressView } from '../views'; export const singlestorePushIntrospect = async ( db: DB, databaseName: string, filters: string[], ) => { const matchers = filters.map((it) => { return new Minimatch(it); }); const filter = (tableName: string) => { if (matchers.length === 0) return true; let flags: boolean[] = []; for (let matcher of matchers) { if (matcher.negate) { if (!matcher.match(tableName)) { flags.push(false); } } if (matcher.match(tableName)) { flags.push(true); } } if (flags.length > 0) { return flags.every(Boolean); } return false; }; const progress = new ProgressView( 'Pulling schema from database...', 'Pulling schema from database...', ); const res = await renderWithTask( progress, fromDatabase(db, databaseName, filter), ); const schema = { id: originUUID, prevId: '', ...res } as SingleStoreSchema; const { internal, ...schemaWithoutInternals } = schema; return { schema: schemaWithoutInternals }; }; ================================================ FILE: drizzle-kit/src/cli/commands/singlestorePushUtils.ts ================================================ import chalk from 'chalk'; import { render } from 'hanji'; import { fromJson } from 'src/sqlgenerator'; import { TypeOf } from 'zod'; import { JsonAlterColumnTypeStatement, JsonStatement } from '../../jsonStatements'; import { Column, SingleStoreSchemaSquashed, SingleStoreSquasher } from '../../serializer/singlestoreSchema'; import { singlestoreSchema } from '../../serializer/singlestoreSchema'; import { type DB, findAddedAndRemoved } from '../../utils'; import { Select } from '../selector-ui'; import { withStyle } from '../validations/outputs'; export const filterStatements = ( statements: JsonStatement[], currentSchema: TypeOf, prevSchema: TypeOf, ) => { return statements.filter((statement) => { if (statement.type === 'alter_table_alter_column_set_type') { // Don't need to handle it on migrations step and introspection // but for both it should be skipped if ( statement.oldDataType.startsWith('tinyint') && statement.newDataType.startsWith('boolean') ) { return false; } if ( statement.oldDataType.startsWith('bigint unsigned') && statement.newDataType.startsWith('serial') ) { return false; } if ( statement.oldDataType.startsWith('serial') && statement.newDataType.startsWith('bigint unsigned') ) { return false; } } else if (statement.type === 'alter_table_alter_column_set_default') { if ( statement.newDefaultValue === false && statement.oldDefaultValue === 0 && statement.newDataType === 'boolean' ) { return false; } if ( statement.newDefaultValue === true && statement.oldDefaultValue === 1 && statement.newDataType === 'boolean' ) { return false; } } else if (statement.type === 'delete_unique_constraint') { const unsquashed = SingleStoreSquasher.unsquashUnique(statement.data); // only if constraint was removed from a serial column, than treat it as removed // const serialStatement = statements.find( // (it) => it.type === "alter_table_alter_column_set_type" // ) as JsonAlterColumnTypeStatement; // if ( // serialStatement?.oldDataType.startsWith("bigint unsigned") && // serialStatement?.newDataType.startsWith("serial") && // serialStatement.columnName === // SingleStoreSquasher.unsquashUnique(statement.data).columns[0] // ) { // return false; // } // Check if uniqueindex was only on this column, that is serial // if now serial and was not serial and was unique index if ( unsquashed.columns.length === 1 && currentSchema.tables[statement.tableName].columns[unsquashed.columns[0]] .type === 'serial' && prevSchema.tables[statement.tableName].columns[unsquashed.columns[0]] .type === 'serial' && currentSchema.tables[statement.tableName].columns[unsquashed.columns[0]] .name === unsquashed.columns[0] ) { return false; } } else if (statement.type === 'alter_table_alter_column_drop_notnull') { // only if constraint was removed from a serial column, than treat it as removed const serialStatement = statements.find( (it) => it.type === 'alter_table_alter_column_set_type', ) as JsonAlterColumnTypeStatement; if ( serialStatement?.oldDataType.startsWith('bigint unsigned') && serialStatement?.newDataType.startsWith('serial') && serialStatement.columnName === statement.columnName && serialStatement.tableName === statement.tableName ) { return false; } if (statement.newDataType === 'serial' && !statement.columnNotNull) { return false; } if (statement.columnAutoIncrement) { return false; } } return true; }); }; export function findColumnTypeAlternations( columns1: Record, columns2: Record, ): string[] { const changes: string[] = []; for (const key in columns1) { if (columns1.hasOwnProperty(key) && columns2.hasOwnProperty(key)) { const col1 = columns1[key]; const col2 = columns2[key]; if (col1.type !== col2.type) { changes.push(col2.name); } } } return changes; } export const logSuggestionsAndReturn = async ( db: DB, statements: JsonStatement[], json2: TypeOf, json1: TypeOf, ) => { let shouldAskForApprove = false; const statementsToExecute: string[] = []; const infoToPrint: string[] = []; const tablesToRemove: string[] = []; const columnsToRemove: string[] = []; const schemasToRemove: string[] = []; const tablesToTruncate: string[] = []; for (const statement of statements) { if (statement.type === 'drop_table') { const res = await db.query( `select count(*) as count from \`${statement.tableName}\``, ); const count = Number(res[0].count); if (count > 0) { infoToPrint.push( `· You're about to delete ${ chalk.underline( statement.tableName, ) } table with ${count} items`, ); tablesToRemove.push(statement.tableName); shouldAskForApprove = true; } } else if (statement.type === 'alter_table_drop_column') { const res = await db.query( `select count(*) as count from \`${statement.tableName}\``, ); const count = Number(res[0].count); if (count > 0) { infoToPrint.push( `· You're about to delete ${ chalk.underline( statement.columnName, ) } column in ${statement.tableName} table with ${count} items`, ); columnsToRemove.push(`${statement.tableName}_${statement.columnName}`); shouldAskForApprove = true; } } else if (statement.type === 'drop_schema') { const res = await db.query( `select count(*) as count from information_schema.tables where table_schema = \`${statement.name}\`;`, ); const count = Number(res[0].count); if (count > 0) { infoToPrint.push( `· You're about to delete ${ chalk.underline( statement.name, ) } schema with ${count} tables`, ); schemasToRemove.push(statement.name); shouldAskForApprove = true; } } else if (statement.type === 'alter_table_alter_column_set_type') { const res = await db.query( `select count(*) as count from \`${statement.tableName}\``, ); const count = Number(res[0].count); if (count > 0) { infoToPrint.push( `· You're about to change ${ chalk.underline( statement.columnName, ) } column type from ${ chalk.underline( statement.oldDataType, ) } to ${chalk.underline(statement.newDataType)} with ${count} items`, ); statementsToExecute.push(`truncate table ${statement.tableName};`); tablesToTruncate.push(statement.tableName); shouldAskForApprove = true; } } else if (statement.type === 'alter_table_alter_column_drop_default') { if (statement.columnNotNull) { const res = await db.query( `select count(*) as count from \`${statement.tableName}\``, ); const count = Number(res[0].count); if (count > 0) { infoToPrint.push( `· You're about to remove default value from ${ chalk.underline( statement.columnName, ) } not-null column with ${count} items`, ); tablesToTruncate.push(statement.tableName); statementsToExecute.push(`truncate table ${statement.tableName};`); shouldAskForApprove = true; } } // shouldAskForApprove = true; } else if (statement.type === 'alter_table_alter_column_set_notnull') { if (typeof statement.columnDefault === 'undefined') { const res = await db.query( `select count(*) as count from \`${statement.tableName}\``, ); const count = Number(res[0].count); if (count > 0) { infoToPrint.push( `· You're about to set not-null constraint to ${ chalk.underline( statement.columnName, ) } column without default, which contains ${count} items`, ); tablesToTruncate.push(statement.tableName); statementsToExecute.push(`truncate table ${statement.tableName};`); shouldAskForApprove = true; } } } else if (statement.type === 'alter_table_alter_column_drop_pk') { const res = await db.query( `select count(*) as count from \`${statement.tableName}\``, ); // if drop pk and json2 has autoincrement in table -> exit process with error if ( Object.values(json2.tables[statement.tableName].columns).filter( (column) => column.autoincrement, ).length > 0 ) { console.log( `${ withStyle.errorWarning( `You have removed the primary key from a ${statement.tableName} table without removing the auto-increment property from this table. As the database error states: 'there can be only one auto column, and it must be defined as a key. Make sure to remove autoincrement from ${statement.tableName} table`, ) }`, ); process.exit(1); } const count = Number(res[0].count); if (count > 0) { infoToPrint.push( `· You're about to change ${ chalk.underline( statement.tableName, ) } primary key. This statements may fail and you table may left without primary key`, ); tablesToTruncate.push(statement.tableName); shouldAskForApprove = true; } } else if (statement.type === 'delete_composite_pk') { // if drop pk and json2 has autoincrement in table -> exit process with error if ( Object.values(json2.tables[statement.tableName].columns).filter( (column) => column.autoincrement, ).length > 0 ) { console.log( `${ withStyle.errorWarning( `You have removed the primary key from a ${statement.tableName} table without removing the auto-increment property from this table. As the database error states: 'there can be only one auto column, and it must be defined as a key. Make sure to remove autoincrement from ${statement.tableName} table`, ) }`, ); process.exit(1); } } else if (statement.type === 'alter_table_add_column') { if ( statement.column.notNull && typeof statement.column.default === 'undefined' ) { const res = await db.query( `select count(*) as count from \`${statement.tableName}\``, ); const count = Number(res[0].count); if (count > 0) { infoToPrint.push( `· You're about to add not-null ${ chalk.underline( statement.column.name, ) } column without default value, which contains ${count} items`, ); tablesToTruncate.push(statement.tableName); statementsToExecute.push(`truncate table ${statement.tableName};`); shouldAskForApprove = true; } } } else if (statement.type === 'create_unique_constraint') { const res = await db.query( `select count(*) as count from \`${statement.tableName}\``, ); const count = Number(res[0].count); if (count > 0) { const unsquashedUnique = SingleStoreSquasher.unsquashUnique(statement.data); console.log( `· You're about to add ${ chalk.underline( unsquashedUnique.name, ) } unique constraint to the table, which contains ${count} items. If this statement fails, you will receive an error from the database. Do you want to truncate ${ chalk.underline( statement.tableName, ) } table?\n`, ); const { status, data } = await render( new Select([ 'No, add the constraint without truncating the table', `Yes, truncate the table`, ]), ); if (data?.index === 1) { tablesToTruncate.push(statement.tableName); statementsToExecute.push(`truncate table ${statement.tableName};`); shouldAskForApprove = true; } } } else if (statement.type === 'singlestore_recreate_table') { const tableName = statement.tableName; const prevColumns = json1.tables[tableName].columns; const currentColumns = json2.tables[tableName].columns; const { removedColumns, addedColumns } = findAddedAndRemoved( Object.keys(prevColumns), Object.keys(currentColumns), ); if (removedColumns.length) { for (const removedColumn of removedColumns) { const res = await db.query<{ count: string }>( `select count(\`${tableName}\`.\`${removedColumn}\`) as count from \`${tableName}\``, ); const count = Number(res[0].count); if (count > 0) { infoToPrint.push( `· You're about to delete ${ chalk.underline( removedColumn, ) } column in ${tableName} table with ${count} items`, ); columnsToRemove.push(removedColumn); shouldAskForApprove = true; } } } if (addedColumns.length) { for (const addedColumn of addedColumns) { const [res] = await db.query<{ count: string }>( `select count(*) as count from \`${tableName}\``, ); const columnConf = json2.tables[tableName].columns[addedColumn]; const count = Number(res.count); if (count > 0 && columnConf.notNull && !columnConf.default) { infoToPrint.push( `· You're about to add not-null ${ chalk.underline( addedColumn, ) } column without default value to table, which contains ${count} items`, ); shouldAskForApprove = true; tablesToTruncate.push(tableName); statementsToExecute.push(`TRUNCATE TABLE \`${tableName}\`;`); } } } const columnWithChangedType = findColumnTypeAlternations(prevColumns, currentColumns); for (const column of columnWithChangedType) { const [res] = await db.query<{ count: string }>( `select count(*) as count from \`${tableName}\` WHERE \`${tableName}\`.\`${column}\` IS NOT NULL;`, ); const count = Number(res.count); if (count > 0) { infoToPrint.push( `· You're about recreate ${chalk.underline(tableName)} table with data type changing for ${ chalk.underline( column, ) } column, which contains ${count} items`, ); shouldAskForApprove = true; tablesToTruncate.push(tableName); statementsToExecute.push(`TRUNCATE TABLE \`${tableName}\`;`); } } } const stmnt = fromJson([statement], 'singlestore', 'push'); if (typeof stmnt !== 'undefined') { statementsToExecute.push(...stmnt); } } return { statementsToExecute, shouldAskForApprove, infoToPrint, columnsToRemove: [...new Set(columnsToRemove)], schemasToRemove: [...new Set(schemasToRemove)], tablesToTruncate: [...new Set(tablesToTruncate)], tablesToRemove: [...new Set(tablesToRemove)], }; }; ================================================ FILE: drizzle-kit/src/cli/commands/singlestoreUp.ts ================================================ export const upSinglestoreHandler = (out: string) => {}; ================================================ FILE: drizzle-kit/src/cli/commands/sqliteIntrospect.ts ================================================ import { renderWithTask } from 'hanji'; import { Minimatch } from 'minimatch'; import { originUUID } from '../../global'; import { schemaToTypeScript } from '../../introspect-sqlite'; import type { SQLiteSchema } from '../../serializer/sqliteSchema'; import { fromDatabase } from '../../serializer/sqliteSerializer'; import type { SQLiteDB } from '../../utils'; import type { Casing } from '../validations/common'; import type { SqliteCredentials } from '../validations/sqlite'; import { IntrospectProgress, ProgressView } from '../views'; export const sqliteIntrospect = async ( credentials: SqliteCredentials, filters: string[], casing: Casing, ) => { const { connectToSQLite } = await import('../connections'); const db = await connectToSQLite(credentials); const matchers = filters.map((it) => { return new Minimatch(it); }); const filter = (tableName: string) => { if (matchers.length === 0) return true; let flags: boolean[] = []; for (let matcher of matchers) { if (matcher.negate) { if (!matcher.match(tableName)) { flags.push(false); } } if (matcher.match(tableName)) { flags.push(true); } } if (flags.length > 0) { return flags.every(Boolean); } return false; }; const progress = new IntrospectProgress(); const res = await renderWithTask( progress, fromDatabase(db, filter, (stage, count, status) => { progress.update(stage, count, status); }), ); const schema = { id: originUUID, prevId: '', ...res } as SQLiteSchema; const ts = schemaToTypeScript(schema, casing); return { schema, ts }; }; export const sqlitePushIntrospect = async (db: SQLiteDB, filters: string[]) => { const matchers = filters.map((it) => { return new Minimatch(it); }); const filter = (tableName: string) => { if (matchers.length === 0) return true; let flags: boolean[] = []; for (let matcher of matchers) { if (matcher.negate) { if (!matcher.match(tableName)) { flags.push(false); } } if (matcher.match(tableName)) { flags.push(true); } } if (flags.length > 0) { return flags.every(Boolean); } return false; }; const progress = new ProgressView( 'Pulling schema from database...', 'Pulling schema from database...', ); const res = await renderWithTask(progress, fromDatabase(db, filter)); const schema = { id: originUUID, prevId: '', ...res } as SQLiteSchema; return { schema }; }; ================================================ FILE: drizzle-kit/src/cli/commands/sqlitePushUtils.ts ================================================ import chalk from 'chalk'; import { SQLiteSchemaInternal, SQLiteSchemaSquashed, SQLiteSquasher } from '../../serializer/sqliteSchema'; import { CreateSqliteIndexConvertor, fromJson, SQLiteCreateTableConvertor, SQLiteDropTableConvertor, SqliteRenameTableConvertor, } from '../../sqlgenerator'; import type { JsonStatement } from '../../jsonStatements'; import { findAddedAndRemoved, type SQLiteDB } from '../../utils'; export const _moveDataStatements = ( tableName: string, json: SQLiteSchemaSquashed, dataLoss: boolean = false, ) => { const statements: string[] = []; const newTableName = `__new_${tableName}`; // create table statement from a new json2 with proper name const tableColumns = Object.values(json.tables[tableName].columns); const referenceData = Object.values(json.tables[tableName].foreignKeys); const compositePKs = Object.values( json.tables[tableName].compositePrimaryKeys, ).map((it) => SQLiteSquasher.unsquashPK(it)); const checkConstraints = Object.values(json.tables[tableName].checkConstraints); const mappedCheckConstraints: string[] = checkConstraints.map((it) => it.replaceAll(`"${tableName}".`, `"${newTableName}".`) .replaceAll(`\`${tableName}\`.`, `\`${newTableName}\`.`) .replaceAll(`${tableName}.`, `${newTableName}.`) .replaceAll(`'${tableName}'.`, `\`${newTableName}\`.`) ); const fks = referenceData.map((it) => SQLiteSquasher.unsquashPushFK(it)); // create new table statements.push( new SQLiteCreateTableConvertor().convert({ type: 'sqlite_create_table', tableName: newTableName, columns: tableColumns, referenceData: fks, compositePKs, checkConstraints: mappedCheckConstraints, }), ); // move data if (!dataLoss) { const columns = Object.keys(json.tables[tableName].columns).map( (c) => `"${c}"`, ); statements.push( `INSERT INTO \`${newTableName}\`(${ columns.join( ', ', ) }) SELECT ${columns.join(', ')} FROM \`${tableName}\`;`, ); } statements.push( new SQLiteDropTableConvertor().convert({ type: 'drop_table', tableName: tableName, schema: '', }), ); // rename table statements.push( new SqliteRenameTableConvertor().convert({ fromSchema: '', tableNameFrom: newTableName, tableNameTo: tableName, toSchema: '', type: 'rename_table', }), ); for (const idx of Object.values(json.tables[tableName].indexes)) { statements.push( new CreateSqliteIndexConvertor().convert({ type: 'create_index', tableName: tableName, schema: '', data: idx, }), ); } return statements; }; export const getOldTableName = ( tableName: string, meta: SQLiteSchemaInternal['_meta'], ) => { for (const key of Object.keys(meta.tables)) { const value = meta.tables[key]; if (`"${tableName}"` === value) { return key.substring(1, key.length - 1); } } return tableName; }; export const getNewTableName = ( tableName: string, meta: SQLiteSchemaInternal['_meta'], ) => { if (typeof meta.tables[`"${tableName}"`] !== 'undefined') { return meta.tables[`"${tableName}"`].substring( 1, meta.tables[`"${tableName}"`].length - 1, ); } return tableName; }; export const logSuggestionsAndReturn = async ( connection: SQLiteDB, statements: JsonStatement[], json1: SQLiteSchemaSquashed, json2: SQLiteSchemaSquashed, meta: SQLiteSchemaInternal['_meta'], ) => { let shouldAskForApprove = false; const statementsToExecute: string[] = []; const infoToPrint: string[] = []; const tablesToRemove: string[] = []; const columnsToRemove: string[] = []; const schemasToRemove: string[] = []; const tablesToTruncate: string[] = []; for (const statement of statements) { if (statement.type === 'drop_table') { const res = await connection.query<{ count: string }>( `select count(*) as count from \`${statement.tableName}\``, ); const count = Number(res[0].count); if (count > 0) { infoToPrint.push( `· You're about to delete ${ chalk.underline( statement.tableName, ) } table with ${count} items`, ); tablesToRemove.push(statement.tableName); shouldAskForApprove = true; } const fromJsonStatement = fromJson([statement], 'sqlite', 'push'); statementsToExecute.push( ...(Array.isArray(fromJsonStatement) ? fromJsonStatement : [fromJsonStatement]), ); } else if (statement.type === 'alter_table_drop_column') { const tableName = statement.tableName; const columnName = statement.columnName; const res = await connection.query<{ count: string }>( `select count(\`${tableName}\`.\`${columnName}\`) as count from \`${tableName}\``, ); const count = Number(res[0].count); if (count > 0) { infoToPrint.push( `· You're about to delete ${ chalk.underline( columnName, ) } column in ${tableName} table with ${count} items`, ); columnsToRemove.push(`${tableName}_${statement.columnName}`); shouldAskForApprove = true; } const fromJsonStatement = fromJson([statement], 'sqlite', 'push'); statementsToExecute.push( ...(Array.isArray(fromJsonStatement) ? fromJsonStatement : [fromJsonStatement]), ); } else if ( statement.type === 'sqlite_alter_table_add_column' && (statement.column.notNull && !statement.column.default) ) { const tableName = statement.tableName; const columnName = statement.column.name; const res = await connection.query<{ count: string }>( `select count(*) as count from \`${tableName}\``, ); const count = Number(res[0].count); if (count > 0) { infoToPrint.push( `· You're about to add not-null ${ chalk.underline( columnName, ) } column without default value, which contains ${count} items`, ); tablesToTruncate.push(tableName); statementsToExecute.push(`delete from ${tableName};`); shouldAskForApprove = true; } const fromJsonStatement = fromJson([statement], 'sqlite', 'push'); statementsToExecute.push( ...(Array.isArray(fromJsonStatement) ? fromJsonStatement : [fromJsonStatement]), ); } else if (statement.type === 'recreate_table') { const tableName = statement.tableName; const oldTableName = getOldTableName(tableName, meta); let dataLoss = false; const prevColumnNames = Object.keys(json1.tables[oldTableName].columns); const currentColumnNames = Object.keys(json2.tables[tableName].columns); const { removedColumns, addedColumns } = findAddedAndRemoved( prevColumnNames, currentColumnNames, ); if (removedColumns.length) { for (const removedColumn of removedColumns) { const res = await connection.query<{ count: string }>( `select count(\`${tableName}\`.\`${removedColumn}\`) as count from \`${tableName}\``, ); const count = Number(res[0].count); if (count > 0) { infoToPrint.push( `· You're about to delete ${ chalk.underline( removedColumn, ) } column in ${tableName} table with ${count} items`, ); columnsToRemove.push(removedColumn); shouldAskForApprove = true; } } } if (addedColumns.length) { for (const addedColumn of addedColumns) { const [res] = await connection.query<{ count: string }>( `select count(*) as count from \`${tableName}\``, ); const columnConf = json2.tables[tableName].columns[addedColumn]; const count = Number(res.count); if (count > 0 && columnConf.notNull && !columnConf.default) { dataLoss = true; infoToPrint.push( `· You're about to add not-null ${ chalk.underline( addedColumn, ) } column without default value to table, which contains ${count} items`, ); shouldAskForApprove = true; tablesToTruncate.push(tableName); statementsToExecute.push(`DELETE FROM \`${tableName}\`;`); } } } // check if some tables referencing current for pragma const tablesReferencingCurrent: string[] = []; for (const table of Object.values(json2.tables)) { const tablesRefs = Object.values(json2.tables[table.name].foreignKeys) .filter((t) => SQLiteSquasher.unsquashPushFK(t).tableTo === tableName) .map((it) => SQLiteSquasher.unsquashPushFK(it).tableFrom); tablesReferencingCurrent.push(...tablesRefs); } if (!tablesReferencingCurrent.length) { statementsToExecute.push(..._moveDataStatements(tableName, json2, dataLoss)); continue; } const [{ foreign_keys: pragmaState }] = await connection.query<{ foreign_keys: number; }>(`PRAGMA foreign_keys;`); if (pragmaState) { statementsToExecute.push(`PRAGMA foreign_keys=OFF;`); } statementsToExecute.push(..._moveDataStatements(tableName, json2, dataLoss)); if (pragmaState) { statementsToExecute.push(`PRAGMA foreign_keys=ON;`); } } else { const fromJsonStatement = fromJson([statement], 'sqlite', 'push'); statementsToExecute.push( ...(Array.isArray(fromJsonStatement) ? fromJsonStatement : [fromJsonStatement]), ); } } return { statementsToExecute, shouldAskForApprove, infoToPrint, columnsToRemove: [...new Set(columnsToRemove)], schemasToRemove: [...new Set(schemasToRemove)], tablesToTruncate: [...new Set(tablesToTruncate)], tablesToRemove: [...new Set(tablesToRemove)], }; }; ================================================ FILE: drizzle-kit/src/cli/commands/sqliteUp.ts ================================================ import chalk from 'chalk'; import { writeFileSync } from 'fs'; import { mapEntries } from 'src/global'; import { SQLiteSchema, sqliteSchemaV5 } from 'src/serializer/sqliteSchema'; import { prepareOutFolder, validateWithReport } from 'src/utils'; export const upSqliteHandler = (out: string) => { const { snapshots } = prepareOutFolder(out, 'sqlite'); const report = validateWithReport(snapshots, 'sqlite'); report.nonLatest .map((it) => ({ path: it, raw: report.rawMap[it]!! as Record, })) .forEach((it) => { const path = it.path; const result = updateUpToV6(it.raw); console.log(`[${chalk.green('✓')}] ${path}`); writeFileSync(path, JSON.stringify(result, null, 2)); }); console.log("Everything's fine 🐶🔥"); }; const updateUpToV6 = (json: Record): SQLiteSchema => { const schema = sqliteSchemaV5.parse(json); const tables = mapEntries(schema.tables, (tableKey, table) => { const columns = mapEntries(table.columns, (key, value) => { if ( value.default && (typeof value.default === 'object' || Array.isArray(value.default)) ) { value.default = `'${JSON.stringify(value.default)}'`; } return [key, value]; }); table.columns = columns; return [tableKey, table]; }); return { ...schema, version: '6', dialect: 'sqlite', tables: tables, views: {}, }; }; ================================================ FILE: drizzle-kit/src/cli/commands/utils.ts ================================================ import chalk from 'chalk'; import { existsSync } from 'fs'; import { render } from 'hanji'; import { join, resolve } from 'path'; import { object, string } from 'zod'; import { getTablesFilterByExtensions } from '../../extensions/getTablesFilterByExtensions'; import { assertUnreachable } from '../../global'; import { type Dialect, dialect } from '../../schemaValidator'; import { prepareFilenames } from '../../serializer'; import type { Entities } from '../validations/cli'; import { pullParams, pushParams } from '../validations/cli'; import type { Casing, CasingType, CliConfig, Driver, Prefix } from '../validations/common'; import { configCommonSchema, configMigrations, wrapParam } from '../validations/common'; import type { GelCredentials } from '../validations/gel'; import { gelCredentials, printConfigConnectionIssues as printIssuesGel } from '../validations/gel'; import type { LibSQLCredentials } from '../validations/libsql'; import { libSQLCredentials, printConfigConnectionIssues as printIssuesLibSQL } from '../validations/libsql'; import type { MysqlCredentials } from '../validations/mysql'; import { mysqlCredentials, printConfigConnectionIssues as printIssuesMysql } from '../validations/mysql'; import { outputs } from '../validations/outputs'; import type { PostgresCredentials } from '../validations/postgres'; import { postgresCredentials, printConfigConnectionIssues as printIssuesPg } from '../validations/postgres'; import type { SingleStoreCredentials } from '../validations/singlestore'; import { printConfigConnectionIssues as printIssuesSingleStore, singlestoreCredentials, } from '../validations/singlestore'; import type { SqliteCredentials } from '../validations/sqlite'; import { printConfigConnectionIssues as printIssuesSqlite, sqliteCredentials } from '../validations/sqlite'; import { studioCliParams, studioConfig } from '../validations/studio'; import { error } from '../views'; // NextJs default config is target: es5, which esbuild-register can't consume const assertES5 = async () => { try { await import('./_es5'); } catch (e: any) { if ('errors' in e && Array.isArray(e.errors) && e.errors.length > 0) { const es5Error = (e.errors as any[]).filter((it) => it.text?.includes(`("es5") is not supported yet`)).length > 0; if (es5Error) { console.log( error( `Please change compilerOptions.target from 'es5' to 'es6' or above in your tsconfig.json`, ), ); process.exit(1); } } console.error(e); process.exit(1); } }; export class InMemoryMutex { private lockPromise: Promise | null = null; async withLock(fn: () => Promise): Promise { // Wait for any existing lock while (this.lockPromise) { await this.lockPromise; } let resolveLock: (() => void) | undefined; this.lockPromise = new Promise((resolve) => { resolveLock = resolve; }); try { return await fn(); } finally { this.lockPromise = null; resolveLock!(); // non-null assertion: TS now knows it's definitely assigned } } } const registerMutex = new InMemoryMutex(); let tsxRegistered = false; const ensureTsxRegistered = () => { if (tsxRegistered) return; const isBun = typeof (globalThis as any).Bun !== 'undefined'; const isDeno = typeof (globalThis as any).Deno !== 'undefined'; if (isBun || isDeno) { tsxRegistered = true; return; } const tsx = require('tsx/cjs/api'); tsx.register(); tsxRegistered = true; }; export const safeRegister = async (fn: () => Promise) => { return registerMutex.withLock(async () => { ensureTsxRegistered(); await assertES5(); return fn(); }); }; export const prepareCheckParams = async ( options: { config?: string; dialect?: Dialect; out?: string; }, from: 'cli' | 'config', ): Promise<{ out: string; dialect: Dialect }> => { const config = from === 'config' ? await drizzleConfigFromFile(options.config as string | undefined) : options; if (!config.out || !config.dialect) { let text = `Please provide required params for AWS Data API driver:\n`; console.log(error(text)); console.log(wrapParam('database', config.out)); console.log(wrapParam('secretArn', config.dialect)); process.exit(1); } return { out: config.out, dialect: config.dialect }; }; export const prepareDropParams = async ( options: { config?: string; out?: string; driver?: Driver; dialect?: Dialect; }, from: 'cli' | 'config', ): Promise<{ out: string; bundle: boolean }> => { const config = from === 'config' ? await drizzleConfigFromFile(options.config as string | undefined) : options; if (config.dialect === 'gel') { console.log( error( `You can't use 'drop' command with Gel dialect`, ), ); process.exit(1); } return { out: config.out || 'drizzle', bundle: config.driver === 'expo' }; }; export type GenerateConfig = { dialect: Dialect; schema: string | string[]; out: string; breakpoints: boolean; name?: string; prefix: Prefix; custom: boolean; bundle: boolean; casing?: CasingType; driver?: Driver; }; export type ExportConfig = { dialect: Dialect; schema: string | string[]; sql: boolean; }; export const prepareGenerateConfig = async ( options: { config?: string; schema?: string; out?: string; breakpoints?: boolean; custom?: boolean; name?: string; dialect?: Dialect; driver?: Driver; prefix?: Prefix; casing?: CasingType; }, from: 'config' | 'cli', ): Promise => { const config = from === 'config' ? await drizzleConfigFromFile(options.config) : options; const { schema, out, breakpoints, dialect, driver, casing } = config; if (!schema || !dialect) { console.log(error('Please provide required params:')); console.log(wrapParam('schema', schema)); console.log(wrapParam('dialect', dialect)); console.log(wrapParam('out', out, true)); process.exit(1); } const fileNames = prepareFilenames(schema); if (fileNames.length === 0) { render(`[${chalk.blue('i')}] No schema file in ${schema} was found`); process.exit(0); } const prefix = ('migrations' in config ? config.migrations?.prefix : options.prefix) || 'index'; return { dialect: dialect, name: options.name, custom: options.custom || false, prefix, breakpoints: breakpoints ?? true, schema: schema, out: out || 'drizzle', bundle: driver === 'expo' || driver === 'durable-sqlite', casing, driver, }; }; export const prepareExportConfig = async ( options: { config?: string; schema?: string; dialect?: Dialect; sql: boolean; }, from: 'config' | 'cli', ): Promise => { const config = from === 'config' ? await drizzleConfigFromFile(options.config, true) : options; const { schema, dialect, sql } = config; if (!schema || !dialect) { console.log(error('Please provide required params:')); console.log(wrapParam('schema', schema)); console.log(wrapParam('dialect', dialect)); process.exit(1); } const fileNames = prepareFilenames(schema); if (fileNames.length === 0) { render(`[${chalk.blue('i')}] No schema file in ${schema} was found`); process.exit(0); } return { dialect: dialect, schema: schema, sql: sql, }; }; export const flattenDatabaseCredentials = (config: any) => { if ('dbCredentials' in config) { const { dbCredentials, ...rest } = config; return { ...rest, ...dbCredentials, }; } return config; }; const flattenPull = (config: any) => { if ('dbCredentials' in config) { const { dbCredentials, introspect, ...rest } = config; return { ...rest, ...dbCredentials, casing: introspect?.casing, }; } return config; }; export const preparePushConfig = async ( options: Record, from: 'cli' | 'config', ): Promise< ( | { dialect: 'mysql'; credentials: MysqlCredentials; } | { dialect: 'postgresql'; credentials: PostgresCredentials; } | { dialect: 'sqlite'; credentials: SqliteCredentials; } | { dialect: 'turso'; credentials: LibSQLCredentials; } | { dialect: 'singlestore'; credentials: SingleStoreCredentials; } ) & { schemaPath: string | string[]; verbose: boolean; strict: boolean; force: boolean; tablesFilter: string[]; schemasFilter: string[]; casing?: CasingType; entities?: Entities; } > => { const raw = flattenDatabaseCredentials( from === 'config' ? await drizzleConfigFromFile(options.config as string | undefined) : options, ); raw.verbose ||= options.verbose; // if provided in cli to debug raw.strict ||= options.strict; // if provided in cli only const parsed = pushParams.safeParse(raw); if (parsed.error) { console.log(error('Please provide required params:')); console.log(wrapParam('dialect', raw.dialect)); console.log(wrapParam('schema', raw.schema)); process.exit(1); } const config = parsed.data; const schemaFiles = prepareFilenames(config.schema); if (schemaFiles.length === 0) { render(`[${chalk.blue('i')}] No schema file in ${config.schema} was found`); process.exit(0); } const tablesFilterConfig = config.tablesFilter; const tablesFilter = tablesFilterConfig ? typeof tablesFilterConfig === 'string' ? [tablesFilterConfig] : tablesFilterConfig : []; const schemasFilterConfig = config.schemaFilter; const schemasFilter = schemasFilterConfig ? typeof schemasFilterConfig === 'string' ? [schemasFilterConfig] : schemasFilterConfig : []; tablesFilter.push(...getTablesFilterByExtensions(config)); if (config.dialect === 'postgresql') { const parsed = postgresCredentials.safeParse(config); if (!parsed.success) { printIssuesPg(config); process.exit(1); } return { dialect: 'postgresql', schemaPath: config.schema, strict: config.strict ?? false, verbose: config.verbose ?? false, force: (options.force as boolean) ?? false, credentials: parsed.data, casing: config.casing, tablesFilter, schemasFilter, entities: config.entities, }; } if (config.dialect === 'mysql') { const parsed = mysqlCredentials.safeParse(config); if (!parsed.success) { printIssuesMysql(config); process.exit(1); } return { dialect: 'mysql', schemaPath: config.schema, strict: config.strict ?? false, verbose: config.verbose ?? false, force: (options.force as boolean) ?? false, credentials: parsed.data, casing: config.casing, tablesFilter, schemasFilter, }; } if (config.dialect === 'singlestore') { const parsed = singlestoreCredentials.safeParse(config); if (!parsed.success) { printIssuesSingleStore(config); process.exit(1); } return { dialect: 'singlestore', schemaPath: config.schema, strict: config.strict ?? false, verbose: config.verbose ?? false, force: (options.force as boolean) ?? false, credentials: parsed.data, tablesFilter, schemasFilter, }; } if (config.dialect === 'sqlite') { const parsed = sqliteCredentials.safeParse(config); if (!parsed.success) { printIssuesSqlite(config, 'push'); process.exit(1); } return { dialect: 'sqlite', schemaPath: config.schema, strict: config.strict ?? false, verbose: config.verbose ?? false, force: (options.force as boolean) ?? false, credentials: parsed.data, casing: config.casing, tablesFilter, schemasFilter, }; } if (config.dialect === 'turso') { const parsed = libSQLCredentials.safeParse(config); if (!parsed.success) { printIssuesSqlite(config, 'push'); process.exit(1); } return { dialect: 'turso', schemaPath: config.schema, strict: config.strict ?? false, verbose: config.verbose ?? false, force: (options.force as boolean) ?? false, credentials: parsed.data, casing: config.casing, tablesFilter, schemasFilter, }; } if (config.dialect === 'gel') { console.log( error( `You can't use 'push' command with Gel dialect`, ), ); process.exit(1); } assertUnreachable(config.dialect); }; export const preparePullConfig = async ( options: Record, from: 'cli' | 'config', ): Promise< ( | { dialect: 'mysql'; credentials: MysqlCredentials; } | { dialect: 'postgresql'; credentials: PostgresCredentials; } | { dialect: 'sqlite'; credentials: SqliteCredentials; } | { dialect: 'turso'; credentials: LibSQLCredentials; } | { dialect: 'singlestore'; credentials: SingleStoreCredentials; } | { dialect: 'gel'; credentials?: GelCredentials; } ) & { out: string; breakpoints: boolean; casing: Casing; tablesFilter: string[]; schemasFilter: string[]; prefix: Prefix; entities: Entities; } > => { const raw = flattenPull( from === 'config' ? await drizzleConfigFromFile(options.config as string | undefined) : options, ); const parsed = pullParams.safeParse(raw); if (parsed.error) { console.log(error('Please provide required params:')); console.log(wrapParam('dialect', raw.dialect)); process.exit(1); } const config = parsed.data; const dialect = config.dialect; const tablesFilterConfig = config.tablesFilter; const tablesFilter = tablesFilterConfig ? typeof tablesFilterConfig === 'string' ? [tablesFilterConfig] : tablesFilterConfig : []; if (config.extensionsFilters) { if ( config.extensionsFilters.includes('postgis') && dialect === 'postgresql' ) { tablesFilter.push( ...['!geography_columns', '!geometry_columns', '!spatial_ref_sys'], ); } } const schemasFilterConfig = config.schemaFilter; // TODO: consistent naming const schemasFilter = schemasFilterConfig ? typeof schemasFilterConfig === 'string' ? [schemasFilterConfig] : schemasFilterConfig : []; if (dialect === 'postgresql') { const parsed = postgresCredentials.safeParse(config); if (!parsed.success) { printIssuesPg(config); process.exit(1); } return { dialect: 'postgresql', out: config.out, breakpoints: config.breakpoints, casing: config.casing, credentials: parsed.data, tablesFilter, schemasFilter, prefix: config.migrations?.prefix || 'index', entities: config.entities, }; } if (dialect === 'mysql') { const parsed = mysqlCredentials.safeParse(config); if (!parsed.success) { printIssuesMysql(config); process.exit(1); } return { dialect: 'mysql', out: config.out, breakpoints: config.breakpoints, casing: config.casing, credentials: parsed.data, tablesFilter, schemasFilter, prefix: config.migrations?.prefix || 'index', entities: config.entities, }; } if (dialect === 'singlestore') { const parsed = singlestoreCredentials.safeParse(config); if (!parsed.success) { printIssuesSingleStore(config); process.exit(1); } return { dialect: 'singlestore', out: config.out, breakpoints: config.breakpoints, casing: config.casing, credentials: parsed.data, tablesFilter, schemasFilter, prefix: config.migrations?.prefix || 'index', entities: config.entities, }; } if (dialect === 'sqlite') { const parsed = sqliteCredentials.safeParse(config); if (!parsed.success) { printIssuesSqlite(config, 'pull'); process.exit(1); } return { dialect: 'sqlite', out: config.out, breakpoints: config.breakpoints, casing: config.casing, credentials: parsed.data, tablesFilter, schemasFilter, prefix: config.migrations?.prefix || 'index', entities: config.entities, }; } if (dialect === 'turso') { const parsed = libSQLCredentials.safeParse(config); if (!parsed.success) { printIssuesLibSQL(config, 'pull'); process.exit(1); } return { dialect, out: config.out, breakpoints: config.breakpoints, casing: config.casing, credentials: parsed.data, tablesFilter, schemasFilter, prefix: config.migrations?.prefix || 'index', entities: config.entities, }; } if (dialect === 'gel') { const parsed = gelCredentials.safeParse(config); if (!parsed.success) { printIssuesGel(config); process.exit(1); } return { dialect, out: config.out, breakpoints: config.breakpoints, casing: config.casing, credentials: parsed.data, tablesFilter, schemasFilter, prefix: config.migrations?.prefix || 'index', entities: config.entities, }; } assertUnreachable(dialect); }; export const prepareStudioConfig = async (options: Record) => { const params = studioCliParams.parse(options); const config = await drizzleConfigFromFile(params.config); const result = studioConfig.safeParse(config); if (!result.success) { if (!('dialect' in config)) { console.log(outputs.studio.noDialect()); } process.exit(1); } if (!('dbCredentials' in config)) { console.log(outputs.studio.noCredentials()); process.exit(1); } const { host, port } = params; const { dialect, schema, casing } = result.data; const flattened = flattenDatabaseCredentials(config); if (dialect === 'postgresql') { const parsed = postgresCredentials.safeParse(flattened); if (!parsed.success) { printIssuesPg(flattened as Record); process.exit(1); } const credentials = parsed.data; return { dialect, schema, host, port, credentials, casing, }; } if (dialect === 'mysql') { const parsed = mysqlCredentials.safeParse(flattened); if (!parsed.success) { printIssuesMysql(flattened as Record); process.exit(1); } const credentials = parsed.data; return { dialect, schema, host, port, credentials, casing, }; } if (dialect === 'singlestore') { const parsed = singlestoreCredentials.safeParse(flattened); if (!parsed.success) { printIssuesSingleStore(flattened as Record); process.exit(1); } const credentials = parsed.data; return { dialect, schema, host, port, credentials, casing, }; } if (dialect === 'sqlite') { const parsed = sqliteCredentials.safeParse(flattened); if (!parsed.success) { printIssuesSqlite(flattened as Record, 'studio'); process.exit(1); } const credentials = parsed.data; return { dialect, schema, host, port, credentials, casing, }; } if (dialect === 'turso') { const parsed = libSQLCredentials.safeParse(flattened); if (!parsed.success) { printIssuesLibSQL(flattened as Record, 'studio'); process.exit(1); } const credentials = parsed.data; return { dialect, schema, host, port, credentials, casing, }; } if (dialect === 'gel') { console.log( error( `You can't use 'studio' command with Gel dialect`, ), ); process.exit(1); } assertUnreachable(dialect); }; export const migrateConfig = object({ dialect, out: string().optional().default('drizzle'), migrations: configMigrations, }); export const prepareMigrateConfig = async (configPath: string | undefined) => { const config = await drizzleConfigFromFile(configPath); const parsed = migrateConfig.safeParse(config); if (parsed.error) { console.log(error('Please provide required params:')); console.log(wrapParam('dialect', config.dialect)); process.exit(1); } const { dialect, out } = parsed.data; const { schema, table } = parsed.data.migrations || {}; const flattened = flattenDatabaseCredentials(config); if (dialect === 'postgresql') { const parsed = postgresCredentials.safeParse(flattened); if (!parsed.success) { printIssuesPg(flattened as Record); process.exit(1); } const credentials = parsed.data; return { dialect, out, credentials, schema, table, }; } if (dialect === 'mysql') { const parsed = mysqlCredentials.safeParse(flattened); if (!parsed.success) { printIssuesMysql(flattened as Record); process.exit(1); } const credentials = parsed.data; return { dialect, out, credentials, schema, table, }; } if (dialect === 'singlestore') { const parsed = singlestoreCredentials.safeParse(flattened); if (!parsed.success) { printIssuesSingleStore(flattened as Record); process.exit(1); } const credentials = parsed.data; return { dialect, out, credentials, schema, table, }; } if (dialect === 'sqlite') { const parsed = sqliteCredentials.safeParse(flattened); if (!parsed.success) { printIssuesSqlite(flattened as Record, 'migrate'); process.exit(1); } const credentials = parsed.data; return { dialect, out, credentials, schema, table, }; } if (dialect === 'turso') { const parsed = libSQLCredentials.safeParse(flattened); if (!parsed.success) { printIssuesLibSQL(flattened as Record, 'migrate'); process.exit(1); } const credentials = parsed.data; return { dialect, out, credentials, schema, table, }; } if (dialect === 'gel') { console.log( error( `You can't use 'migrate' command with Gel dialect`, ), ); process.exit(1); } assertUnreachable(dialect); }; export const drizzleConfigFromFile = async ( configPath?: string, isExport?: boolean, ): Promise => { const prefix = process.env.TEST_CONFIG_PATH_PREFIX || ''; const defaultTsConfigExists = existsSync(resolve(join(prefix, 'drizzle.config.ts'))); const defaultJsConfigExists = existsSync(resolve(join(prefix, 'drizzle.config.js'))); existsSync( join(resolve('drizzle.config.json')), ); const defaultConfigPath = defaultTsConfigExists ? 'drizzle.config.ts' : defaultJsConfigExists ? 'drizzle.config.js' : 'drizzle.config.json'; if (!configPath && !isExport) { console.log( chalk.gray( `No config path provided, using default '${defaultConfigPath}'`, ), ); } const path: string = resolve(join(prefix, configPath ?? defaultConfigPath)); if (!existsSync(path)) { console.log(`${path} file does not exist`); process.exit(1); } if (!isExport) console.log(chalk.grey(`Reading config file '${path}'`)); return safeRegister(async () => { const required = require(`${path}`); const content = required.default ?? required; // --- get response and then check by each dialect independently const res = configCommonSchema.safeParse(content); if (!res.success) { console.log(res.error); if (!('dialect' in content)) { console.log(error("Please specify 'dialect' param in config file")); } process.exit(1); } return res.data; }); }; ================================================ FILE: drizzle-kit/src/cli/connections.ts ================================================ /// import type { PGlite } from '@electric-sql/pglite'; import type { AwsDataApiPgQueryResult, AwsDataApiSessionOptions } from 'drizzle-orm/aws-data-api/pg'; import type { MigrationConfig } from 'drizzle-orm/migrator'; import type { PreparedQueryConfig } from 'drizzle-orm/pg-core'; import fetch from 'node-fetch'; import ws from 'ws'; import { assertUnreachable } from '../global'; import type { ProxyParams } from '../serializer/studio'; import { type DB, LibSQLDB, normalisePGliteUrl, normaliseSQLiteUrl, type Proxy, type SQLiteDB, type TransactionProxy, } from '../utils'; import { assertPackages, checkPackage } from './utils'; import { GelCredentials } from './validations/gel'; import { LibSQLCredentials } from './validations/libsql'; import type { MysqlCredentials } from './validations/mysql'; import { withStyle } from './validations/outputs'; import type { PostgresCredentials } from './validations/postgres'; import { SingleStoreCredentials } from './validations/singlestore'; import type { SqliteCredentials } from './validations/sqlite'; export const preparePostgresDB = async ( credentials: PostgresCredentials | { driver: 'pglite'; client: PGlite; }, ): Promise< DB & { packageName: | '@aws-sdk/client-rds-data' | 'pglite' | 'pg' | 'postgres' | '@vercel/postgres' | '@neondatabase/serverless'; proxy: Proxy; transactionProxy: TransactionProxy; migrate: (config: string | MigrationConfig) => Promise; } > => { if ('driver' in credentials) { const { driver } = credentials; if (driver === 'aws-data-api') { assertPackages('@aws-sdk/client-rds-data'); const { RDSDataClient, ExecuteStatementCommand, TypeHint } = await import( '@aws-sdk/client-rds-data' ); const { AwsDataApiSession, drizzle } = await import( 'drizzle-orm/aws-data-api/pg' ); const { migrate } = await import('drizzle-orm/aws-data-api/pg/migrator'); const { PgDialect } = await import('drizzle-orm/pg-core'); const config: AwsDataApiSessionOptions = { database: credentials.database, resourceArn: credentials.resourceArn, secretArn: credentials.secretArn, }; const rdsClient = new RDSDataClient(); const session = new AwsDataApiSession( rdsClient, new PgDialect(), undefined, config, undefined, ); const db = drizzle(rdsClient, config); const migrateFn = async (config: MigrationConfig) => { return migrate(db, config); }; const query = async (sql: string, params: any[]) => { const prepared = session.prepareQuery( { sql, params: params ?? [] }, undefined, undefined, false, ); const result = await prepared.all(); return result as any[]; }; const proxy = async (params: ProxyParams) => { const prepared = session.prepareQuery< PreparedQueryConfig & { execute: AwsDataApiPgQueryResult; values: AwsDataApiPgQueryResult; } >( { sql: params.sql, params: params.params ?? [], typings: params.typings, }, undefined, undefined, params.mode === 'array', ); if (params.mode === 'array') { const result = await prepared.values(); return result.rows; } const result = await prepared.execute(); return result.rows; }; const transactionProxy: TransactionProxy = async (queries) => { throw new Error('Transaction not supported'); }; return { packageName: '@aws-sdk/client-rds-data', query, proxy, transactionProxy, migrate: migrateFn, }; } if (driver === 'pglite') { assertPackages('@electric-sql/pglite'); const { PGlite, types } = await import('@electric-sql/pglite'); const { drizzle } = await import('drizzle-orm/pglite'); const { migrate } = await import('drizzle-orm/pglite/migrator'); const pglite = 'client' in credentials ? credentials.client : new PGlite(normalisePGliteUrl(credentials.url)); await pglite.waitReady; const drzl = drizzle(pglite); const migrateFn = async (config: MigrationConfig) => { return migrate(drzl, config); }; const parsers = { [types.TIMESTAMP]: (value: any) => value, [types.TIMESTAMPTZ]: (value: any) => value, [types.INTERVAL]: (value: any) => value, [types.DATE]: (value: any) => value, }; const query = async (sql: string, params: any[] = []) => { const result = await pglite.query(sql, params, { parsers, }); return result.rows as T[]; }; const proxy = async (params: ProxyParams) => { const preparedParams = preparePGliteParams(params.params || []); const result = await pglite.query(params.sql, preparedParams, { rowMode: params.mode, parsers, }); return result.rows; }; const transactionProxy: TransactionProxy = async (queries) => { const results: any[] = []; try { await pglite.transaction(async (tx) => { for (const query of queries) { const result = await tx.query(query.sql, undefined, { parsers, }); results.push(result.rows); } }); } catch (error) { results.push(error as Error); } return results; }; return { packageName: 'pglite', query, proxy, transactionProxy, migrate: migrateFn }; } assertUnreachable(driver); } if (await checkPackage('pg')) { console.log(withStyle.info(`Using 'pg' driver for database querying`)); const { default: pg } = await import('pg'); const { drizzle } = await import('drizzle-orm/node-postgres'); const { migrate } = await import('drizzle-orm/node-postgres/migrator'); const ssl = 'ssl' in credentials ? credentials.ssl === 'prefer' || credentials.ssl === 'require' || credentials.ssl === 'allow' ? { rejectUnauthorized: false } : credentials.ssl === 'verify-full' ? {} : credentials.ssl : {}; // Override pg default date parsers const types: { getTypeParser: typeof pg.types.getTypeParser } = { // @ts-ignore getTypeParser: (typeId, format) => { if (typeId === pg.types.builtins.TIMESTAMPTZ) { return (val) => val; } if (typeId === pg.types.builtins.TIMESTAMP) { return (val) => val; } if (typeId === pg.types.builtins.DATE) { return (val) => val; } if (typeId === pg.types.builtins.INTERVAL) { return (val) => val; } // @ts-ignore return pg.types.getTypeParser(typeId, format); }, }; const client = 'url' in credentials ? new pg.Pool({ connectionString: credentials.url, max: 1 }) : new pg.Pool({ ...credentials, ssl, max: 1 }); const db = drizzle(client); const migrateFn = async (config: MigrationConfig) => { return migrate(db, config); }; const query = async (sql: string, params?: any[]) => { const result = await client.query({ text: sql, values: params ?? [], types, }); return result.rows; }; const proxy: Proxy = async (params) => { const result = await client.query({ text: params.sql, values: params.params, ...(params.mode === 'array' && { rowMode: 'array' }), types, }); return result.rows; }; const transactionProxy: TransactionProxy = async (queries) => { const results: any[] = []; const tx = await client.connect(); try { await tx.query('BEGIN'); for (const query of queries) { const result = await tx.query({ text: query.sql, types, }); results.push(result.rows); } await tx.query('COMMIT'); } catch (error) { await tx.query('ROLLBACK'); results.push(error as Error); } finally { tx.release(); } return results; }; return { packageName: 'pg', query, proxy, transactionProxy, migrate: migrateFn }; } if (await checkPackage('postgres')) { console.log( withStyle.info(`Using 'postgres' driver for database querying`), ); const postgres = await import('postgres'); const { drizzle } = await import('drizzle-orm/postgres-js'); const { migrate } = await import('drizzle-orm/postgres-js/migrator'); const client = 'url' in credentials ? postgres.default(credentials.url, { max: 1 }) : postgres.default({ ...credentials, max: 1 }); const transparentParser = (val: any) => val; // Override postgres.js default date parsers: https://github.com/porsager/postgres/discussions/761 for (const type of ['1184', '1082', '1083', '1114']) { client.options.parsers[type as any] = transparentParser; client.options.serializers[type as any] = transparentParser; } client.options.serializers['114'] = transparentParser; client.options.serializers['3802'] = transparentParser; const db = drizzle(client); const migrateFn = async (config: MigrationConfig) => { return migrate(db, config); }; const query = async (sql: string, params?: any[]) => { const result = await client.unsafe(sql, params ?? []); return result as any[]; }; const proxy: Proxy = async (params) => { if (params.mode === 'array') { return await client.unsafe(params.sql, params.params).values(); } return await client.unsafe(params.sql, params.params); }; const transactionProxy: TransactionProxy = async (queries) => { const results: any[] = []; try { await client.begin(async (sql) => { for (const query of queries) { const result = await sql.unsafe(query.sql); results.push(result); } }); } catch (error) { results.push(error as Error); } return results; }; return { packageName: 'postgres', query, proxy, transactionProxy, migrate: migrateFn }; } if (await checkPackage('@vercel/postgres')) { console.log( withStyle.info(`Using '@vercel/postgres' driver for database querying`), ); console.log( withStyle.fullWarning( "'@vercel/postgres' can only connect to remote Neon/Vercel Postgres/Supabase instances through a websocket", ), ); const { VercelPool, types: pgTypes } = await import('@vercel/postgres'); const { drizzle } = await import('drizzle-orm/vercel-postgres'); const { migrate } = await import('drizzle-orm/vercel-postgres/migrator'); const ssl = 'ssl' in credentials ? credentials.ssl === 'prefer' || credentials.ssl === 'require' || credentials.ssl === 'allow' ? { rejectUnauthorized: false } : credentials.ssl === 'verify-full' ? {} : credentials.ssl : {}; // Override @vercel/postgres default date parsers const types: { getTypeParser: typeof pgTypes.getTypeParser } = { // @ts-ignore getTypeParser: (typeId, format) => { if (typeId === pgTypes.builtins.TIMESTAMPTZ) { return (val: any) => val; } if (typeId === pgTypes.builtins.TIMESTAMP) { return (val: any) => val; } if (typeId === pgTypes.builtins.DATE) { return (val: any) => val; } if (typeId === pgTypes.builtins.INTERVAL) { return (val: any) => val; } // @ts-ignore return pgTypes.getTypeParser(typeId, format); }, }; const client = 'url' in credentials ? new VercelPool({ connectionString: credentials.url }) : new VercelPool({ ...credentials, ssl }); await client.connect(); const db = drizzle(client); const migrateFn = async (config: MigrationConfig) => { return migrate(db, config); }; const query = async (sql: string, params?: any[]) => { const result = await client.query({ text: sql, values: params ?? [], types, }); return result.rows; }; const proxy: Proxy = async (params) => { const result = await client.query({ text: params.sql, values: params.params, ...(params.mode === 'array' && { rowMode: 'array' }), types, }); return result.rows; }; const transactionProxy: TransactionProxy = async (queries) => { const results: any[] = []; const tx = await client.connect(); try { await tx.query('BEGIN'); for (const query of queries) { const result = await tx.query({ text: query.sql, types, }); results.push(result.rows); } await tx.query('COMMIT'); } catch (error) { await tx.query('ROLLBACK'); results.push(error as Error); } finally { tx.release(); } return results; }; return { packageName: '@vercel/postgres', query, proxy, transactionProxy, migrate: migrateFn }; } if (await checkPackage('@neondatabase/serverless')) { console.log( withStyle.info( `Using '@neondatabase/serverless' driver for database querying`, ), ); console.log( withStyle.fullWarning( "'@neondatabase/serverless' can only connect to remote Neon/Vercel Postgres/Supabase instances through a websocket", ), ); const { Pool, neonConfig, types: pgTypes } = await import('@neondatabase/serverless'); const { drizzle } = await import('drizzle-orm/neon-serverless'); const { migrate } = await import('drizzle-orm/neon-serverless/migrator'); const ssl = 'ssl' in credentials ? credentials.ssl === 'prefer' || credentials.ssl === 'require' || credentials.ssl === 'allow' ? { rejectUnauthorized: false } : credentials.ssl === 'verify-full' ? {} : credentials.ssl : {}; // Override @neondatabase/serverless default date parsers const types: { getTypeParser: typeof pgTypes.getTypeParser } = { // @ts-ignore getTypeParser: (typeId, format) => { if (typeId === pgTypes.builtins.TIMESTAMPTZ) { return (val: any) => val; } if (typeId === pgTypes.builtins.TIMESTAMP) { return (val: any) => val; } if (typeId === pgTypes.builtins.DATE) { return (val: any) => val; } if (typeId === pgTypes.builtins.INTERVAL) { return (val: any) => val; } // @ts-ignore return pgTypes.getTypeParser(typeId, format); }, }; const client = 'url' in credentials ? new Pool({ connectionString: credentials.url, max: 1 }) : new Pool({ ...credentials, max: 1, ssl }); neonConfig.webSocketConstructor = ws; const db = drizzle(client); const migrateFn = async (config: MigrationConfig) => { return migrate(db, config); }; const query = async (sql: string, params?: any[]) => { const result = await client.query({ text: sql, values: params ?? [], types, }); return result.rows; }; const proxy: Proxy = async (params: ProxyParams) => { const result = await client.query({ text: params.sql, values: params.params, ...(params.mode === 'array' && { rowMode: 'array' }), types, }); return result.rows; }; const transactionProxy: TransactionProxy = async (queries) => { const results: any[] = []; const tx = await client.connect(); try { await tx.query('BEGIN'); for (const query of queries) { const result = await tx.query({ text: query.sql, types, }); results.push(result.rows); } await tx.query('COMMIT'); } catch (error) { await tx.query('ROLLBACK'); results.push(error as Error); } finally { tx.release(); } return results; }; return { packageName: '@neondatabase/serverless', query, proxy, transactionProxy, migrate: migrateFn }; } console.error( "To connect to Postgres database - please install either of 'pg', 'postgres', '@neondatabase/serverless' or '@vercel/postgres' drivers", ); process.exit(1); }; export const prepareGelDB = async ( credentials?: GelCredentials, ): Promise< DB & { packageName: 'gel'; proxy: Proxy; transactionProxy: TransactionProxy; } > => { if (await checkPackage('gel')) { const gel = await import('gel'); let client: ReturnType; if (!credentials) { client = gel.createClient(); try { await client.querySQL(`select 1;`); } catch (error: any) { if (error instanceof gel.ClientConnectionError) { console.error( `It looks like you forgot to link the Gel project or provide the database credentials. To link your project, please refer https://docs.geldata.com/reference/cli/gel_instance/gel_instance_link, or add the dbCredentials to your configuration file.`, ); process.exit(1); } throw error; } } else if ('url' in credentials) { 'tlsSecurity' in credentials ? client = gel.createClient({ dsn: credentials.url, tlsSecurity: credentials.tlsSecurity, concurrency: 1 }) : client = gel.createClient({ dsn: credentials.url, concurrency: 1 }); } else { gel.createClient({ ...credentials, concurrency: 1 }); } const query = async (sql: string, params?: any[]) => { const result = params?.length ? await client.querySQL(sql, params) : await client.querySQL(sql); return result as any[]; }; const proxy: Proxy = async (params: ProxyParams) => { const { method, mode, params: sqlParams, sql, typings } = params; let result: any[]; switch (mode) { case 'array': result = sqlParams?.length ? await client.withSQLRowMode('array').querySQL(sql, sqlParams) : await client.withSQLRowMode('array').querySQL(sql); break; case 'object': result = sqlParams?.length ? await client.querySQL(sql, sqlParams) : await client.querySQL(sql); break; } return result; }; const transactionProxy: TransactionProxy = async (queries) => { const result: any[] = []; try { await client.transaction(async (tx) => { for (const query of queries) { const res = await tx.querySQL(query.sql); result.push(res); } }); } catch (error) { result.push(error as Error); } return result; }; return { packageName: 'gel', query, proxy, transactionProxy }; } console.error( "To connect to gel database - please install 'edgedb' driver", ); process.exit(1); }; const parseSingleStoreCredentials = (credentials: SingleStoreCredentials) => { if ('url' in credentials) { const url = credentials.url; const connectionUrl = new URL(url); const pathname = connectionUrl.pathname; const database = pathname.split('/')[pathname.split('/').length - 1]; if (!database) { console.error( 'You should specify a database name in connection string (singlestore://USER:PASSWORD@HOST:PORT/DATABASE)', ); process.exit(1); } return { database, url }; } else { return { database: credentials.database, credentials, }; } }; export const connectToSingleStore = async ( it: SingleStoreCredentials, ): Promise<{ db: DB; packageName: 'mysql2'; proxy: Proxy; transactionProxy: TransactionProxy; database: string; migrate: (config: MigrationConfig) => Promise; }> => { const result = parseSingleStoreCredentials(it); if (await checkPackage('mysql2')) { const { createConnection } = await import('mysql2/promise'); const { drizzle } = await import('drizzle-orm/singlestore'); const { migrate } = await import('drizzle-orm/singlestore/migrator'); const connection = result.url ? await createConnection(result.url) : await createConnection(result.credentials!); // needed for some reason! const db = drizzle(connection); const migrateFn = async (config: MigrationConfig) => { return migrate(db, config); }; await connection.connect(); const query: DB['query'] = async ( sql: string, params?: any[], ): Promise => { const res = await connection.execute(sql, params); return res[0] as any; }; const proxy: Proxy = async (params: ProxyParams) => { const result = await connection.query({ sql: params.sql, values: params.params, rowsAsArray: params.mode === 'array', }); return result[0] as any[]; }; const transactionProxy: TransactionProxy = async (queries) => { const results: any[] = []; try { await connection.beginTransaction(); for (const query of queries) { const res = await connection.query(query.sql); results.push(res[0]); } await connection.commit(); } catch (error) { await connection.rollback(); results.push(error as Error); } return results; }; return { db: { query }, packageName: 'mysql2', proxy, transactionProxy, database: result.database, migrate: migrateFn, }; } console.error( "To connect to SingleStore database - please install 'mysql2' driver", ); process.exit(1); }; const parseMysqlCredentials = (credentials: MysqlCredentials) => { if ('url' in credentials) { const url = credentials.url; const connectionUrl = new URL(url); const pathname = connectionUrl.pathname; const database = pathname.split('/')[pathname.split('/').length - 1]; if (!database) { console.error( 'You should specify a database name in connection string (mysql://USER:PASSWORD@HOST:PORT/DATABASE)', ); process.exit(1); } return { database, url }; } else { return { database: credentials.database, credentials, }; } }; export const connectToMySQL = async ( it: MysqlCredentials, ): Promise<{ db: DB; packageName: 'mysql2' | '@planetscale/database'; proxy: Proxy; transactionProxy: TransactionProxy; database: string; migrate: (config: MigrationConfig) => Promise; }> => { const result = parseMysqlCredentials(it); if (await checkPackage('mysql2')) { const { createConnection } = await import('mysql2/promise'); const { drizzle } = await import('drizzle-orm/mysql2'); const { migrate } = await import('drizzle-orm/mysql2/migrator'); const connection = result.url ? await createConnection(result.url) : await createConnection(result.credentials!); // needed for some reason! const db = drizzle(connection); const migrateFn = async (config: MigrationConfig) => { return migrate(db, config); }; const typeCast = (field: any, next: any) => { if (field.type === 'TIMESTAMP' || field.type === 'DATETIME' || field.type === 'DATE') { return field.string(); } return next(); }; await connection.connect(); const query: DB['query'] = async ( sql: string, params?: any[], ): Promise => { const res = await connection.execute({ sql, values: params, typeCast, }); return res[0] as any; }; const proxy: Proxy = async (params: ProxyParams) => { const result = await connection.query({ sql: params.sql, values: params.params, rowsAsArray: params.mode === 'array', typeCast, }); return result[0] as any[]; }; const transactionProxy: TransactionProxy = async (queries) => { const results: any[] = []; try { await connection.beginTransaction(); for (const query of queries) { const res = await connection.query(query.sql); results.push(res[0]); } await connection.commit(); } catch (error) { await connection.rollback(); results.push(error as Error); } return results; }; return { db: { query }, packageName: 'mysql2', proxy, transactionProxy, database: result.database, migrate: migrateFn, }; } if (await checkPackage('@planetscale/database')) { const { Client } = await import('@planetscale/database'); const { drizzle } = await import('drizzle-orm/planetscale-serverless'); const { migrate } = await import( 'drizzle-orm/planetscale-serverless/migrator' ); const connection = new Client(result); const db = drizzle(connection); const migrateFn = async (config: MigrationConfig) => { return migrate(db, config); }; const query = async (sql: string, params?: any[]): Promise => { const res = await connection.execute(sql, params); return res.rows as T[]; }; const proxy: Proxy = async (params: ProxyParams) => { const result = await connection.execute( params.sql, params.params, params.mode === 'array' ? { as: 'array' } : undefined, ); return result.rows; }; const transactionProxy: TransactionProxy = async (queries) => { const results: any[] = []; try { await connection.transaction(async (tx) => { for (const query of queries) { const res = await tx.execute(query.sql); results.push(res.rows); } }); } catch (error) { results.push(error as Error); } return results; }; return { db: { query }, packageName: '@planetscale/database', proxy, transactionProxy, database: result.database, migrate: migrateFn, }; } console.error( "To connect to MySQL database - please install either of 'mysql2' or '@planetscale/database' drivers", ); process.exit(1); }; const prepareSqliteParams = (params: any[], driver?: string) => { return params.map((param) => { if ( param && typeof param === 'object' && 'type' in param && 'value' in param && param.type === 'binary' ) { const value = typeof param.value === 'object' ? JSON.stringify(param.value) : (param.value as string); if (driver === 'd1-http' || driver === 'd1') { return value; } return Buffer.from(value); } return param; }); }; const preparePGliteParams = (params: any[]) => { return params.map((param) => { if ( param && typeof param === 'object' && 'type' in param && 'value' in param && param.type === 'binary' ) { const value = typeof param.value === 'object' ? JSON.stringify(param.value) : (param.value as string); return value; } return param; }); }; export type D1Credentials = { driver: 'd1'; binding: D1Database; }; export const connectToD1 = async ( d1: D1Database, ): Promise< & SQLiteDB & { packageName: 'd1'; migrate: (config: MigrationConfig) => Promise; proxy: Proxy; transactionProxy: TransactionProxy; } > => { const db: SQLiteDB = { query: async (sql: string, params?: any[]) => { const stmt = d1.prepare(sql); const boundStmt = params && params.length > 0 ? stmt.bind(...params) : stmt; const result = await boundStmt.all(); return (result.results ?? []) as T[]; }, run: async (query: string) => { const stmt = d1.prepare(query); await stmt.run(); }, }; const proxy: Proxy = async (params) => { const preparedParams = prepareSqliteParams(params.params || [], 'd1'); const stmt = d1.prepare(params.sql); const boundStmt = preparedParams.length > 0 ? stmt.bind(...preparedParams) : stmt; try { if (params.mode === 'array') { return await boundStmt.raw(); } const result = await boundStmt.all(); return result.results ?? []; } catch (error: any) { // D1 doesn't allow certain introspection queries (sqlite_master with pragma functions) // Return empty array for SQLITE_AUTH errors on these system queries if (error?.message?.includes('SQLITE_AUTH') || error?.message?.includes('not authorized')) { return []; } throw error; } }; const transactionProxy: TransactionProxy = async (queries) => { const results: any[] = []; try { // D1 doesn't support true transactions via binding, use batch instead const statements = queries.map((q) => d1.prepare(q.sql)); const batchResults = await d1.batch(statements); for (const result of batchResults) { results.push(result.results ?? []); } } catch (error) { results.push(error as Error); } return results; }; const { drizzle } = await import('drizzle-orm/d1'); const { migrate } = await import('drizzle-orm/d1/migrator'); const drzl = drizzle(d1); const migrateFn = async (config: MigrationConfig) => { return migrate(drzl, config); }; return { ...db, packageName: 'd1', proxy, transactionProxy, migrate: migrateFn }; }; export const connectToSQLite = async ( credentials: SqliteCredentials, ): Promise< & SQLiteDB & { packageName: 'd1-http' | '@libsql/client' | 'better-sqlite3'; migrate: (config: MigrationConfig) => Promise; proxy: Proxy; transactionProxy: TransactionProxy; } > => { if ('driver' in credentials) { const { driver } = credentials; if (driver === 'd1-http') { const { drizzle } = await import('drizzle-orm/sqlite-proxy'); const { migrate } = await import('drizzle-orm/sqlite-proxy/migrator'); type D1Response = | { success: true; result: { results: | any[] | { columns: string[]; rows: any[][]; }; }[]; } | { success: false; errors: { code: number; message: string }[]; }; const remoteCallback: Parameters[0] = async ( sql, params, method, ) => { const res = await fetch( `https://api.cloudflare.com/client/v4/accounts/${credentials.accountId}/d1/database/${credentials.databaseId}/${ method === 'values' ? 'raw' : 'query' }`, { method: 'POST', body: JSON.stringify({ sql, params }), headers: { 'Content-Type': 'application/json', Authorization: `Bearer ${credentials.token}`, }, }, ); const data = (await res.json()) as D1Response; if (!data.success) { throw new Error( data.errors.map((it) => `${it.code}: ${it.message}`).join('\n'), ); } const result = data.result[0].results; const rows = Array.isArray(result) ? result : result.rows; return { rows, }; }; const remoteBatchCallback = async ( queries: { sql: string; }[], ) => { const sql = queries.map((q) => q.sql).join('; '); const res = await fetch( `https://api.cloudflare.com/client/v4/accounts/${credentials.accountId}/d1/database/${credentials.databaseId}/query`, { method: 'POST', body: JSON.stringify({ sql }), headers: { 'Content-Type': 'application/json', Authorization: `Bearer ${credentials.token}`, }, }, ); const data = (await res.json()) as D1Response; if (!data.success) { throw new Error( data.errors.map((it) => `${it.code}: ${it.message}`).join('\n'), ); } const rows = data.result.map((result) => { const res = result.results; return Array.isArray(res) ? res : res.rows; }); return { rows, }; }; const drzl = drizzle(remoteCallback); const migrateFn = async (config: MigrationConfig) => { return migrate( drzl, async (queries) => { for (const query of queries) { await remoteCallback(query, [], 'run'); } }, config, ); }; const db: SQLiteDB = { query: async (sql: string, params?: any[]) => { const res = await remoteCallback(sql, params || [], 'all'); return res.rows as T[]; }, run: async (query: string) => { await remoteCallback(query, [], 'run'); }, }; const proxy: Proxy = async (params) => { const preparedParams = prepareSqliteParams(params.params || [], 'd1-http'); const result = await remoteCallback( params.sql, preparedParams, params.mode === 'array' ? 'values' : 'all', ); return result.rows; }; const transactionProxy: TransactionProxy = async (queries) => { const result = await remoteBatchCallback(queries); return result.rows; }; return { ...db, packageName: 'd1-http', proxy, transactionProxy, migrate: migrateFn }; } else { assertUnreachable(driver); } } if (await checkPackage('@libsql/client')) { const { createClient } = await import('@libsql/client'); const { drizzle } = await import('drizzle-orm/libsql'); const { migrate } = await import('drizzle-orm/libsql/migrator'); const client = createClient({ url: normaliseSQLiteUrl(credentials.url, 'libsql'), }); const drzl = drizzle(client); const migrateFn = async (config: MigrationConfig) => { return migrate(drzl, config); }; const db: SQLiteDB = { query: async (sql: string, params?: any[]) => { const res = await client.execute({ sql, args: params || [] }); return res.rows as T[]; }, run: async (query: string) => { await client.execute(query); }, }; type Transaction = Awaited>; const proxy = async (params: ProxyParams) => { const preparedParams = prepareSqliteParams(params.params || []); const result = await client.execute({ sql: params.sql, args: preparedParams, }); if (params.mode === 'array') { return result.rows.map((row) => Object.values(row)); } else { return result.rows; } }; const transactionProxy: TransactionProxy = async (queries) => { const results: (any[] | Error)[] = []; let transaction: Transaction | null = null; try { transaction = await client.transaction(); for (const query of queries) { const result = await transaction.execute(query.sql); results.push(result.rows); } await transaction.commit(); } catch (error) { results.push(error as Error); await transaction?.rollback(); } finally { transaction?.close(); } return results; }; return { ...db, packageName: '@libsql/client', proxy, transactionProxy, migrate: migrateFn }; } if (await checkPackage('better-sqlite3')) { const { default: Database } = await import('better-sqlite3'); const { drizzle } = await import('drizzle-orm/better-sqlite3'); const { migrate } = await import('drizzle-orm/better-sqlite3/migrator'); const sqlite = new Database( normaliseSQLiteUrl(credentials.url, 'better-sqlite'), ); const drzl = drizzle(sqlite); const migrateFn = async (config: MigrationConfig) => { return migrate(drzl, config); }; const db: SQLiteDB = { query: async (sql: string, params: any[] = []) => { return sqlite.prepare(sql).bind(params).all() as T[]; }, run: async (query: string) => { sqlite.prepare(query).run(); }, }; const proxy: Proxy = async (params) => { const preparedParams = prepareSqliteParams(params.params || []); if ( params.method === 'values' || params.method === 'get' || params.method === 'all' ) { return sqlite .prepare(params.sql) .raw(params.mode === 'array') .all(preparedParams); } sqlite.prepare(params.sql).run(preparedParams); return []; }; const transactionProxy: TransactionProxy = async (queries) => { const results: (any[] | Error)[] = []; const tx = sqlite.transaction((queries: Parameters[0]) => { for (const query of queries) { let result: any[] = []; if (query.method === 'values' || query.method === 'get' || query.method === 'all') { result = sqlite .prepare(query.sql) .all(); } else { sqlite.prepare(query.sql).run(); } results.push(result); } }); try { tx(queries); } catch (error) { results.push(error as Error); } return results; }; return { ...db, packageName: 'better-sqlite3', proxy, transactionProxy, migrate: migrateFn }; } console.log( "Please install either 'better-sqlite3' or '@libsql/client' for Drizzle Kit to connect to SQLite databases", ); process.exit(1); }; export const connectToLibSQL = async (credentials: LibSQLCredentials): Promise< & LibSQLDB & { packageName: '@libsql/client'; migrate: (config: MigrationConfig) => Promise; proxy: Proxy; transactionProxy: TransactionProxy; } > => { if (await checkPackage('@libsql/client')) { const { createClient } = await import('@libsql/client'); const { drizzle } = await import('drizzle-orm/libsql'); const { migrate } = await import('drizzle-orm/libsql/migrator'); const client = createClient({ url: normaliseSQLiteUrl(credentials.url, 'libsql'), authToken: credentials.authToken, }); const drzl = drizzle(client); const migrateFn = async (config: MigrationConfig) => { return migrate(drzl, config); }; const db: LibSQLDB = { query: async (sql: string, params?: any[]) => { const res = await client.execute({ sql, args: params || [] }); return res.rows as T[]; }, run: async (query: string) => { await client.execute(query); }, batchWithPragma: async (queries: string[]) => { await client.migrate(queries); }, }; type Transaction = Awaited>; const proxy = async (params: ProxyParams) => { const preparedParams = prepareSqliteParams(params.params || []); const result = await client.execute({ sql: params.sql, args: preparedParams, }); if (params.mode === 'array') { return result.rows.map((row) => Object.values(row)); } else { return result.rows; } }; const transactionProxy: TransactionProxy = async (queries) => { const results: (any[] | Error)[] = []; let transaction: Transaction | null = null; try { transaction = await client.transaction(); for (const query of queries) { const result = await transaction.execute(query.sql); results.push(result.rows); } await transaction.commit(); } catch (error) { results.push(error as Error); await transaction?.rollback(); } finally { transaction?.close(); } return results; }; return { ...db, packageName: '@libsql/client', proxy, transactionProxy, migrate: migrateFn }; } console.log( "Please install '@libsql/client' for Drizzle Kit to connect to LibSQL databases", ); process.exit(1); }; ================================================ FILE: drizzle-kit/src/cli/index.ts ================================================ import { command, run } from '@drizzle-team/brocli'; import chalk from 'chalk'; import { check, drop, exportRaw, generate, migrate, pull, push, studio, up } from './schema'; import { ormCoreVersions } from './utils'; const version = async () => { const { npmVersion } = await ormCoreVersions(); const ormVersion = npmVersion ? `drizzle-orm: v${npmVersion}` : ''; const envVersion = process.env.DRIZZLE_KIT_VERSION; const kitVersion = envVersion ? `v${envVersion}` : '--'; const versions = `drizzle-kit: ${kitVersion}\n${ormVersion}`; console.log(chalk.gray(versions), '\n'); }; const legacyCommand = (name: string, newName: string) => { return command({ name, hidden: true, handler: () => { console.log( `This command is deprecated, please use updated '${newName}' command (see https://orm.drizzle.team/kit-docs/upgrade-21#how-to-migrate-to-0210)`, ); }, }); }; const legacy = [ legacyCommand('generate:pg', 'generate'), legacyCommand('generate:mysql', 'generate'), legacyCommand('generate:sqlite', 'generate'), legacyCommand('push:pg', 'push'), legacyCommand('push:mysql', 'push'), legacyCommand('push:sqlite', 'push'), legacyCommand('introspect:pg', 'introspect'), legacyCommand('introspect:mysql', 'introspect'), legacyCommand('introspect:sqlite', 'introspect'), legacyCommand('up:pg', 'up'), legacyCommand('up:mysql', 'up'), legacyCommand('up:sqlite', 'up'), legacyCommand('check:pg', 'check'), legacyCommand('check:mysql', 'check'), legacyCommand('check:sqlite', 'check'), ]; run([generate, migrate, pull, push, studio, up, check, drop, exportRaw, ...legacy], { name: 'drizzle-kit', version: version, }); ================================================ FILE: drizzle-kit/src/cli/schema.ts ================================================ import { boolean, command, number, string } from '@drizzle-team/brocli'; import chalk from 'chalk'; import 'dotenv/config'; import { mkdirSync } from 'fs'; import { renderWithTask } from 'hanji'; import { dialects } from 'src/schemaValidator'; import '../@types/utils'; import { assertUnreachable } from '../global'; import type { Setup } from '../serializer/studio'; import { assertV1OutFolder } from '../utils'; import { certs } from '../utils/certs'; import { checkHandler } from './commands/check'; import { dropMigration } from './commands/drop'; import { upMysqlHandler } from './commands/mysqlUp'; import { upPgHandler } from './commands/pgUp'; import { upSinglestoreHandler } from './commands/singlestoreUp'; import { upSqliteHandler } from './commands/sqliteUp'; import { prepareCheckParams, prepareDropParams, prepareExportConfig, prepareGenerateConfig, prepareMigrateConfig, preparePullConfig, preparePushConfig, prepareStudioConfig, } from './commands/utils'; import { assertOrmCoreVersion, assertPackages, assertStudioNodeVersion, ormVersionGt } from './utils'; import { assertCollisions, drivers, prefixes } from './validations/common'; import { withStyle } from './validations/outputs'; import { error, grey, MigrateProgress } from './views'; const optionDialect = string('dialect') .enum(...dialects) .desc( `Database dialect: 'gel', 'postgresql', 'mysql', 'sqlite', 'turso' or 'singlestore'`, ); const optionOut = string().desc("Output folder, 'drizzle' by default"); const optionConfig = string().desc('Path to drizzle config file'); const optionBreakpoints = boolean().desc( `Prepare SQL statements with breakpoints`, ); const optionDriver = string() .enum(...drivers) .desc('Database driver'); const optionCasing = string().enum('camelCase', 'snake_case').desc('Casing for serialization'); export const generate = command({ name: 'generate', options: { config: optionConfig, dialect: optionDialect, driver: optionDriver, casing: optionCasing, schema: string().desc('Path to a schema file or folder'), out: optionOut, name: string().desc('Migration file name'), breakpoints: optionBreakpoints, custom: boolean() .desc('Prepare empty migration file for custom SQL') .default(false), prefix: string() .enum(...prefixes) .default('index'), }, transform: async (opts) => { const from = assertCollisions( 'generate', opts, ['prefix', 'name', 'custom'], ['driver', 'breakpoints', 'schema', 'out', 'dialect', 'casing'], ); return prepareGenerateConfig(opts, from); }, handler: async (opts) => { await assertOrmCoreVersion(); await assertPackages('drizzle-orm'); // const parsed = cliConfigGenerate.parse(opts); const { prepareAndMigratePg, prepareAndMigrateMysql, prepareAndMigrateSqlite, prepareAndMigrateLibSQL, prepareAndMigrateSingleStore, } = await import('./commands/migrate'); const dialect = opts.dialect; if (dialect === 'postgresql') { await prepareAndMigratePg(opts); } else if (dialect === 'mysql') { await prepareAndMigrateMysql(opts); } else if (dialect === 'sqlite') { await prepareAndMigrateSqlite(opts); } else if (dialect === 'turso') { await prepareAndMigrateLibSQL(opts); } else if (dialect === 'singlestore') { await prepareAndMigrateSingleStore(opts); } else if (dialect === 'gel') { console.log( error( `You can't use 'generate' command with Gel dialect`, ), ); process.exit(1); } else { assertUnreachable(dialect); } }, }); export const migrate = command({ name: 'migrate', options: { config: optionConfig, }, transform: async (opts) => { return await prepareMigrateConfig(opts.config); }, handler: async (opts) => { await assertOrmCoreVersion(); await assertPackages('drizzle-orm'); const { dialect, schema, table, out, credentials } = opts; try { if (dialect === 'postgresql') { if ('driver' in credentials) { const { driver } = credentials; if (driver === 'aws-data-api') { if (!(await ormVersionGt('0.30.10'))) { console.log( "To use 'aws-data-api' driver - please update drizzle-orm to the latest version", ); process.exit(1); } } else if (driver === 'pglite') { if (!(await ormVersionGt('0.30.6'))) { console.log( "To use 'pglite' driver - please update drizzle-orm to the latest version", ); process.exit(1); } } else { assertUnreachable(driver); } } const { preparePostgresDB } = await import('./connections'); const { migrate } = await preparePostgresDB(credentials); await renderWithTask( new MigrateProgress(), migrate({ migrationsFolder: out, migrationsTable: table, migrationsSchema: schema, }), ); } else if (dialect === 'mysql') { const { connectToMySQL } = await import('./connections'); const { migrate } = await connectToMySQL(credentials); await renderWithTask( new MigrateProgress(), migrate({ migrationsFolder: out, migrationsTable: table, migrationsSchema: schema, }), ); } else if (dialect === 'singlestore') { const { connectToSingleStore } = await import('./connections'); const { migrate } = await connectToSingleStore(credentials); await renderWithTask( new MigrateProgress(), migrate({ migrationsFolder: out, migrationsTable: table, migrationsSchema: schema, }), ); } else if (dialect === 'sqlite') { const { connectToSQLite } = await import('./connections'); const { migrate } = await connectToSQLite(credentials); await renderWithTask( new MigrateProgress(), migrate({ migrationsFolder: opts.out, migrationsTable: table, migrationsSchema: schema, }), ); } else if (dialect === 'turso') { const { connectToLibSQL } = await import('./connections'); const { migrate } = await connectToLibSQL(credentials); await renderWithTask( new MigrateProgress(), migrate({ migrationsFolder: opts.out, migrationsTable: table, migrationsSchema: schema, }), ); } else if (dialect === 'gel') { console.log( error( `You can't use 'migrate' command with Gel dialect`, ), ); process.exit(1); } else { assertUnreachable(dialect); } } catch (e) { console.error(e); process.exit(1); } process.exit(0); }, }); const optionsFilters = { tablesFilter: string().desc('Table name filters'), schemaFilters: string().desc('Schema name filters'), extensionsFilters: string().desc( '`Database extensions internal database filters', ), } as const; const optionsDatabaseCredentials = { url: string().desc('Database connection URL'), host: string().desc('Database host'), port: string().desc('Database port'), user: string().desc('Database user'), password: string().desc('Database password'), database: string().desc('Database name'), ssl: string().desc('ssl mode'), // Turso authToken: string('auth-token').desc('Database auth token [Turso]'), // gel tlsSecurity: string('tlsSecurity').desc('tls security mode'), // specific cases driver: optionDriver, } as const; export const push = command({ name: 'push', options: { config: optionConfig, dialect: optionDialect, casing: optionCasing, schema: string().desc('Path to a schema file or folder'), ...optionsFilters, ...optionsDatabaseCredentials, verbose: boolean() .desc('Print all statements for each push') .default(false), strict: boolean().desc('Always ask for confirmation').default(false), force: boolean() .desc( 'Auto-approve all data loss statements. Note: Data loss statements may truncate your tables and data', ) .default(false), }, transform: async (opts) => { const from = assertCollisions( 'push', opts, ['force', 'verbose', 'strict'], [ 'schema', 'dialect', 'driver', 'url', 'host', 'port', 'user', 'password', 'database', 'ssl', 'authToken', 'schemaFilters', 'extensionsFilters', 'tablesFilter', 'casing', 'tlsSecurity', ], ); return preparePushConfig(opts, from); }, handler: async (config) => { await assertPackages('drizzle-orm'); await assertOrmCoreVersion(); const { dialect, schemaPath, strict, verbose, credentials, tablesFilter, schemasFilter, force, casing, entities, } = config; try { if (dialect === 'mysql') { const { mysqlPush } = await import('./commands/push'); await mysqlPush( schemaPath, credentials, tablesFilter, strict, verbose, force, casing, ); } else if (dialect === 'postgresql') { if ('driver' in credentials) { const { driver } = credentials; if (driver === 'aws-data-api') { if (!(await ormVersionGt('0.30.10'))) { console.log( "To use 'aws-data-api' driver - please update drizzle-orm to the latest version", ); process.exit(1); } } else if (driver === 'pglite') { if (!(await ormVersionGt('0.30.6'))) { console.log( "To use 'pglite' driver - please update drizzle-orm to the latest version", ); process.exit(1); } } else { assertUnreachable(driver); } } const { pgPush } = await import('./commands/push'); await pgPush( schemaPath, verbose, strict, credentials, tablesFilter, schemasFilter, entities, force, casing, ); } else if (dialect === 'sqlite') { const { sqlitePush } = await import('./commands/push'); await sqlitePush( schemaPath, verbose, strict, credentials, tablesFilter, force, casing, ); } else if (dialect === 'turso') { const { libSQLPush } = await import('./commands/push'); await libSQLPush( schemaPath, verbose, strict, credentials, tablesFilter, force, casing, ); } else if (dialect === 'singlestore') { const { singlestorePush } = await import('./commands/push'); await singlestorePush( schemaPath, credentials, tablesFilter, strict, verbose, force, casing, ); } else if (dialect === 'gel') { console.log( error( `You can't use 'push' command with Gel dialect`, ), ); process.exit(1); } else { assertUnreachable(dialect); } } catch (e) { console.error(e); } process.exit(0); }, }); export const check = command({ name: 'check', options: { config: optionConfig, dialect: optionDialect, out: optionOut, }, transform: async (opts) => { const from = assertCollisions('check', opts, [], ['dialect', 'out']); return prepareCheckParams(opts, from); }, handler: async (config) => { await assertOrmCoreVersion(); const { out, dialect } = config; checkHandler(out, dialect); console.log("Everything's fine 🐶🔥"); }, }); export const up = command({ name: 'up', options: { config: optionConfig, dialect: optionDialect, out: optionOut, }, transform: async (opts) => { const from = assertCollisions('check', opts, [], ['dialect', 'out']); return prepareCheckParams(opts, from); }, handler: async (config) => { await assertOrmCoreVersion(); const { out, dialect } = config; await assertPackages('drizzle-orm'); if (dialect === 'postgresql') { upPgHandler(out); } if (dialect === 'mysql') { upMysqlHandler(out); } if (dialect === 'sqlite' || dialect === 'turso') { upSqliteHandler(out); } if (dialect === 'singlestore') { upSinglestoreHandler(out); } if (dialect === 'gel') { console.log( error( `You can't use 'up' command with Gel dialect`, ), ); process.exit(1); } }, }); export const pull = command({ name: 'introspect', aliases: ['pull'], options: { config: optionConfig, dialect: optionDialect, out: optionOut, breakpoints: optionBreakpoints, casing: string('introspect-casing').enum('camel', 'preserve'), ...optionsFilters, ...optionsDatabaseCredentials, }, transform: async (opts) => { const from = assertCollisions( 'introspect', opts, [], [ 'dialect', 'driver', 'out', 'url', 'host', 'port', 'user', 'password', 'database', 'ssl', 'authToken', 'casing', 'breakpoints', 'tablesFilter', 'schemaFilters', 'extensionsFilters', 'tlsSecurity', ], ); return preparePullConfig(opts, from); }, handler: async (config) => { await assertPackages('drizzle-orm'); await assertOrmCoreVersion(); const { dialect, credentials, out, casing, breakpoints, tablesFilter, schemasFilter, prefix, entities, } = config; mkdirSync(out, { recursive: true }); console.log( grey( `Pulling from [${ schemasFilter .map((it) => `'${it}'`) .join(', ') }] list of schemas`, ), ); console.log(); try { if (dialect === 'postgresql') { if ('driver' in credentials) { const { driver } = credentials; if (driver === 'aws-data-api') { if (!(await ormVersionGt('0.30.10'))) { console.log( "To use 'aws-data-api' driver - please update drizzle-orm to the latest version", ); process.exit(1); } } else if (driver === 'pglite') { if (!(await ormVersionGt('0.30.6'))) { console.log( "To use 'pglite' driver - please update drizzle-orm to the latest version", ); process.exit(1); } } else { assertUnreachable(driver); } } const { introspectPostgres } = await import('./commands/introspect'); await introspectPostgres( casing, out, breakpoints, credentials, tablesFilter, schemasFilter, prefix, entities, ); } else if (dialect === 'mysql') { const { introspectMysql } = await import('./commands/introspect'); await introspectMysql( casing, out, breakpoints, credentials, tablesFilter, prefix, ); } else if (dialect === 'sqlite') { const { introspectSqlite } = await import('./commands/introspect'); await introspectSqlite( casing, out, breakpoints, credentials, tablesFilter, prefix, ); } else if (dialect === 'turso') { const { introspectLibSQL } = await import('./commands/introspect'); await introspectLibSQL( casing, out, breakpoints, credentials, tablesFilter, prefix, ); } else if (dialect === 'singlestore') { const { introspectSingleStore } = await import('./commands/introspect'); await introspectSingleStore( casing, out, breakpoints, credentials, tablesFilter, prefix, ); } else if (dialect === 'gel') { const { introspectGel } = await import('./commands/introspect'); await introspectGel( casing, out, breakpoints, credentials, tablesFilter, schemasFilter, prefix, entities, ); } else { assertUnreachable(dialect); } } catch (e) { console.error(e); } process.exit(0); }, }); export const drop = command({ name: 'drop', options: { config: optionConfig, out: optionOut, driver: optionDriver, }, transform: async (opts) => { const from = assertCollisions('check', opts, [], ['driver', 'out']); return prepareDropParams(opts, from); }, handler: async (config) => { await assertOrmCoreVersion(); assertV1OutFolder(config.out); await dropMigration(config); }, }); export const studio = command({ name: 'studio', options: { config: optionConfig, port: number().desc('Custom port for drizzle studio [default=4983]'), host: string().desc('Custom host for drizzle studio [default=0.0.0.0]'), verbose: boolean() .default(false) .desc('Print all stataments that are executed by Studio'), }, handler: async (opts) => { await assertOrmCoreVersion(); await assertPackages('drizzle-orm'); assertStudioNodeVersion(); const { dialect, schema: schemaPath, port, host, credentials, casing, } = await prepareStudioConfig(opts); const { drizzleForPostgres, preparePgSchema, prepareMySqlSchema, drizzleForMySQL, prepareSQLiteSchema, drizzleForSQLite, prepareSingleStoreSchema, drizzleForSingleStore, drizzleForLibSQL, } = await import('../serializer/studio'); let setup: Setup; try { if (dialect === 'postgresql') { if ('driver' in credentials) { const { driver } = credentials; if (driver === 'aws-data-api') { if (!(await ormVersionGt('0.30.10'))) { console.log( "To use 'aws-data-api' driver - please update drizzle-orm to the latest version", ); process.exit(1); } } else if (driver === 'pglite') { if (!(await ormVersionGt('0.30.6'))) { console.log( "To use 'pglite' driver - please update drizzle-orm to the latest version", ); process.exit(1); } } else { assertUnreachable(driver); } } const { schema, relations, files } = schemaPath ? await preparePgSchema(schemaPath) : { schema: {}, relations: {}, files: [] }; setup = await drizzleForPostgres(credentials, schema, relations, files, casing); } else if (dialect === 'mysql') { const { schema, relations, files } = schemaPath ? await prepareMySqlSchema(schemaPath) : { schema: {}, relations: {}, files: [] }; setup = await drizzleForMySQL(credentials, schema, relations, files, casing); } else if (dialect === 'sqlite') { const { schema, relations, files } = schemaPath ? await prepareSQLiteSchema(schemaPath) : { schema: {}, relations: {}, files: [] }; setup = await drizzleForSQLite(credentials, schema, relations, files, casing); } else if (dialect === 'turso') { const { schema, relations, files } = schemaPath ? await prepareSQLiteSchema(schemaPath) : { schema: {}, relations: {}, files: [] }; setup = await drizzleForLibSQL(credentials, schema, relations, files, casing); } else if (dialect === 'singlestore') { const { schema, relations, files } = schemaPath ? await prepareSingleStoreSchema(schemaPath) : { schema: {}, relations: {}, files: [] }; setup = await drizzleForSingleStore( credentials, schema, relations, files, casing, ); } else if (dialect === 'gel') { console.log( error( `You can't use 'studio' command with Gel dialect`, ), ); process.exit(1); } else { assertUnreachable(dialect); } const { prepareServer } = await import('../serializer/studio'); const server = await prepareServer(setup); console.log(); console.log( withStyle.fullWarning( 'Drizzle Studio is currently in Beta. If you find anything that is not working as expected or should be improved, feel free to create an issue on GitHub: https://github.com/drizzle-team/drizzle-kit-mirror/issues/new or write to us on Discord: https://discord.gg/WcRKz2FFxN', ), ); const { key, cert } = (await certs()) || {}; server.start({ host, port, key, cert, cb: (err, _address) => { if (err) { console.error(err); } else { const queryParams: { port?: number; host?: string } = {}; if (port !== 4983) { queryParams.port = port; } if (host !== '127.0.0.1') { queryParams.host = host; } const queryString = Object.keys(queryParams) .map((key: keyof { port?: number; host?: string }) => { return `${key}=${queryParams[key]}`; }) .join('&'); console.log( `\nDrizzle Studio is up and running on ${ chalk.blue( `https://local.drizzle.studio${queryString ? `?${queryString}` : ''}`, ) }`, ); } }, }); } catch (e) { console.error(e); process.exit(0); } }, }); export const exportRaw = command({ name: 'export', desc: 'Generate diff between current state and empty state in specified formats: sql', options: { sql: boolean('sql').default(true).desc('Generate as sql'), config: optionConfig, dialect: optionDialect, schema: string().desc('Path to a schema file or folder'), }, transform: async (opts) => { const from = assertCollisions('export', opts, ['sql'], ['dialect', 'schema']); return prepareExportConfig(opts, from); }, handler: async (opts) => { await assertOrmCoreVersion(); await assertPackages('drizzle-orm'); const { prepareAndExportPg, prepareAndExportMysql, prepareAndExportSqlite, prepareAndExportLibSQL, prepareAndExportSinglestore, } = await import( './commands/migrate' ); const dialect = opts.dialect; if (dialect === 'postgresql') { await prepareAndExportPg(opts); } else if (dialect === 'mysql') { await prepareAndExportMysql(opts); } else if (dialect === 'sqlite') { await prepareAndExportSqlite(opts); } else if (dialect === 'turso') { await prepareAndExportLibSQL(opts); } else if (dialect === 'singlestore') { await prepareAndExportSinglestore(opts); } else if (dialect === 'gel') { console.log( error( `You can't use 'export' command with Gel dialect`, ), ); process.exit(1); } else { assertUnreachable(dialect); } }, }); ================================================ FILE: drizzle-kit/src/cli/selector-ui.ts ================================================ import chalk from 'chalk'; import { Prompt, SelectState } from 'hanji'; export class Select extends Prompt<{ index: number; value: string }> { private readonly data: SelectState<{ label: string; value: string }>; constructor(items: string[]) { super(); this.on('attach', (terminal) => terminal.toggleCursor('hide')); this.on('detach', (terminal) => terminal.toggleCursor('show')); this.data = new SelectState( items.map((it) => ({ label: it, value: `${it}-value` })), ); this.data.bind(this); } render(status: 'idle' | 'submitted' | 'aborted'): string { if (status === 'submitted' || status === 'aborted') return ''; let text = ``; this.data.items.forEach((it, idx) => { text += idx === this.data.selectedIdx ? `${chalk.green('❯ ' + it.label)}` : ` ${it.label}`; text += idx != this.data.items.length - 1 ? '\n' : ''; }); return text; } result() { return { index: this.data.selectedIdx, value: this.data.items[this.data.selectedIdx]!.value!, }; } } ================================================ FILE: drizzle-kit/src/cli/utils.ts ================================================ import semver from 'semver'; import { err, warning } from './views'; export const assertExists = (it?: any) => { if (!it) throw new Error(); }; export const ormVersionGt = async (version: string) => { const { npmVersion } = await import('drizzle-orm/version'); if (!semver.gte(npmVersion, version)) { return false; } return true; }; export const assertStudioNodeVersion = () => { if (semver.gte(process.version, '18.0.0')) return; err('Drizzle Studio requires NodeJS v18 or above'); process.exit(1); }; export const checkPackage = async (it: string) => { try { await import(it); return true; } catch (e) { return false; } }; export const assertPackages = async (...pkgs: string[]) => { try { for (let i = 0; i < pkgs.length; i++) { const it = pkgs[i]; await import(it); } } catch (e) { err( `please install required packages: ${ pkgs .map((it) => `'${it}'`) .join(' ') }`, ); process.exit(1); } }; // ex: either pg or postgres are needed export const assertEitherPackage = async ( ...pkgs: string[] ): Promise => { const availables = [] as string[]; for (let i = 0; i < pkgs.length; i++) { try { const it = pkgs[i]; await import(it); availables.push(it); } catch (e) {} } if (availables.length > 0) { return availables; } err( `Please install one of those packages are needed: ${ pkgs .map((it) => `'${it}'`) .join(' or ') }`, ); process.exit(1); }; const requiredApiVersion = 10; export const assertOrmCoreVersion = async () => { try { const { compatibilityVersion } = await import('drizzle-orm/version'); await import('drizzle-orm/relations'); if (compatibilityVersion && compatibilityVersion === requiredApiVersion) { return; } if (!compatibilityVersion || compatibilityVersion < requiredApiVersion) { console.log( 'This version of drizzle-kit requires newer version of drizzle-orm\nPlease update drizzle-orm package to the latest version 👍', ); } else { console.log( 'This version of drizzle-kit is outdated\nPlease update drizzle-kit package to the latest version 👍', ); } } catch (e) { console.log('Please install latest version of drizzle-orm'); } process.exit(1); }; export const ormCoreVersions = async () => { try { const { compatibilityVersion, npmVersion } = await import( 'drizzle-orm/version' ); return { compatibilityVersion, npmVersion }; } catch (e) { return {}; } }; ================================================ FILE: drizzle-kit/src/cli/validations/cli.ts ================================================ import { array, boolean, intersection, literal, object, string, TypeOf, union } from 'zod'; import { dialect } from '../../schemaValidator'; import { casing, casingType, prefix } from './common'; export const cliConfigGenerate = object({ dialect: dialect.optional(), schema: union([string(), string().array()]).optional(), out: string().optional().default('./drizzle'), config: string().optional(), name: string().optional(), prefix: prefix.optional(), breakpoints: boolean().optional().default(true), custom: boolean().optional().default(false), }).strict(); export type CliConfigGenerate = TypeOf; export const pushParams = object({ dialect: dialect, casing: casingType.optional(), schema: union([string(), string().array()]), tablesFilter: union([string(), string().array()]).optional(), schemaFilter: union([string(), string().array()]) .optional() .default(['public']), extensionsFilters: literal('postgis').array().optional(), verbose: boolean().optional(), strict: boolean().optional(), entities: object({ roles: boolean().or(object({ provider: string().optional(), include: string().array().optional(), exclude: string().array().optional(), })).optional().default(false), }).optional(), }).passthrough(); export type PushParams = TypeOf; export const pullParams = object({ config: string().optional(), dialect: dialect, out: string().optional().default('drizzle'), tablesFilter: union([string(), string().array()]).optional(), schemaFilter: union([string(), string().array()]) .optional() .default(['public']), extensionsFilters: literal('postgis').array().optional(), casing, breakpoints: boolean().optional().default(true), migrations: object({ prefix: prefix.optional().default('index'), }).optional(), entities: object({ roles: boolean().or(object({ provider: string().optional(), include: string().array().optional(), exclude: string().array().optional(), })).optional().default(false), }).optional(), }).passthrough(); export type Entities = TypeOf['entities']; export type PullParams = TypeOf; export const configCheck = object({ dialect: dialect.optional(), out: string().optional(), }); export const cliConfigCheck = intersection( object({ config: string().optional(), }), configCheck, ); export type CliCheckConfig = TypeOf; ================================================ FILE: drizzle-kit/src/cli/validations/common.ts ================================================ import chalk from 'chalk'; import { UnionToIntersection } from 'hono/utils/types'; import { any, boolean, enum as enum_, literal, object, string, TypeOf, union } from 'zod'; import { dialect } from '../../schemaValidator'; import { outputs } from './outputs'; export type Commands = | 'introspect' | 'generate' | 'check' | 'up' | 'drop' | 'push' | 'export'; type Expand = T extends infer O ? { [K in keyof O]: O[K] } : never; type IsUnion = [T] extends [UnionToIntersection] ? false : true; type LastTupleElement = TArr extends [ ...start: infer _, end: infer Last, ] ? Last : never; export type UniqueArrayOfUnion = Exclude< TUnion, TArray[number] > extends never ? [TUnion] : [...TArray, Exclude]; export const assertCollisions = < T extends Record, TKeys extends (keyof T)[], TRemainingKeys extends Exclude[], Exhaustive extends TRemainingKeys, UNIQ extends UniqueArrayOfUnion, >( command: Commands, options: T, whitelist: Exclude, remainingKeys: UniqueArrayOfUnion, ): IsUnion> extends false ? 'cli' | 'config' : TKeys => { const { config, ...rest } = options; let atLeastOneParam = false; for (const key of Object.keys(rest)) { if (whitelist.includes(key)) continue; atLeastOneParam = atLeastOneParam || rest[key] !== undefined; } if (!config && atLeastOneParam) { return 'cli' as any; } if (!atLeastOneParam) { return 'config' as any; } // if config and cli - return error - write a reason console.log(outputs.common.ambiguousParams(command)); process.exit(1); }; export const sqliteDriversLiterals = [ literal('d1-http'), literal('expo'), literal('durable-sqlite'), ] as const; export const postgresqlDriversLiterals = [ literal('aws-data-api'), literal('pglite'), ] as const; export const prefixes = [ 'index', 'timestamp', 'supabase', 'unix', 'none', ] as const; export const prefix = enum_(prefixes); export type Prefix = (typeof prefixes)[number]; { const _: Prefix = '' as TypeOf; } export const casingTypes = ['snake_case', 'camelCase'] as const; export const casingType = enum_(casingTypes); export type CasingType = (typeof casingTypes)[number]; export const sqliteDriver = union(sqliteDriversLiterals); export const postgresDriver = union(postgresqlDriversLiterals); export const driver = union([sqliteDriver, postgresDriver]); export const configMigrations = object({ table: string().optional(), schema: string().optional(), prefix: prefix.optional().default('index'), }).optional(); export const configCommonSchema = object({ dialect: dialect, schema: union([string(), string().array()]).optional(), out: string().optional(), breakpoints: boolean().optional().default(true), verbose: boolean().optional().default(false), driver: driver.optional(), tablesFilter: union([string(), string().array()]).optional(), schemaFilter: union([string(), string().array()]).default(['public']), migrations: configMigrations, dbCredentials: any().optional(), casing: casingType.optional(), sql: boolean().default(true), }).passthrough(); export const casing = union([literal('camel'), literal('preserve')]).default( 'camel', ); export const introspectParams = object({ schema: union([string(), string().array()]).optional(), out: string().optional().default('./drizzle'), breakpoints: boolean().default(true), tablesFilter: union([string(), string().array()]).optional(), schemaFilter: union([string(), string().array()]).default(['public']), introspect: object({ casing, }).default({ casing: 'camel' }), }); export type IntrospectParams = TypeOf; export type Casing = TypeOf; export const configIntrospectCliSchema = object({ schema: union([string(), string().array()]).optional(), out: string().optional().default('./drizzle'), breakpoints: boolean().default(true), tablesFilter: union([string(), string().array()]).optional(), schemaFilter: union([string(), string().array()]).default(['public']), introspectCasing: union([literal('camel'), literal('preserve')]).default( 'camel', ), }); export const configGenerateSchema = object({ schema: union([string(), string().array()]), out: string().optional().default('./drizzle'), breakpoints: boolean().default(true), }); export type GenerateSchema = TypeOf; export const configPushSchema = object({ dialect: dialect, schema: union([string(), string().array()]), tablesFilter: union([string(), string().array()]).optional(), schemaFilter: union([string(), string().array()]).default(['public']), verbose: boolean().default(false), strict: boolean().default(false), out: string().optional(), }); export type CliConfig = TypeOf; export const drivers = ['d1-http', 'expo', 'aws-data-api', 'pglite', 'durable-sqlite'] as const; export type Driver = (typeof drivers)[number]; const _: Driver = '' as TypeOf; export const wrapParam = ( name: string, param: any | undefined, optional: boolean = false, type?: 'url' | 'secret', ) => { const check = `[${chalk.green('✓')}]`; const cross = `[${chalk.red('x')}]`; if (typeof param === 'string') { if (param.length === 0) { return ` ${cross} ${name}: ''`; } if (type === 'secret') { return ` ${check} ${name}: '*****'`; } else if (type === 'url') { return ` ${check} ${name}: '${param.replace(/(?<=:\/\/[^:\n]*:)([^@]*)/, '****')}'`; } return ` ${check} ${name}: '${param}'`; } if (optional) { return chalk.gray(` ${name}?: `); } return ` ${cross} ${name}: ${chalk.gray('undefined')}`; }; ================================================ FILE: drizzle-kit/src/cli/validations/gel.ts ================================================ import { coerce, literal, object, string, TypeOf, undefined as undefinedType, union } from 'zod'; import { error } from '../views'; import { wrapParam } from './common'; export const gelCredentials = union([ object({ driver: undefinedType(), host: string().min(1), port: coerce.number().min(1).optional(), user: string().min(1).optional(), password: string().min(1).optional(), database: string().min(1), tlsSecurity: union([ literal('insecure'), literal('no_host_verification'), literal('strict'), literal('default'), ]).optional(), }).transform((o) => { delete o.driver; return o as Omit; }), object({ driver: undefinedType(), url: string().min(1), tlsSecurity: union([ literal('insecure'), literal('no_host_verification'), literal('strict'), literal('default'), ]).optional(), }).transform<{ url: string; tlsSecurity?: | 'insecure' | 'no_host_verification' | 'strict' | 'default'; }>((o) => { delete o.driver; return o; }), object({ driver: undefinedType(), }).transform((o) => { return undefined; }), ]); export type GelCredentials = TypeOf; export const printConfigConnectionIssues = ( options: Record, ) => { if ('url' in options) { let text = `Please provide required params for Gel driver:\n`; console.log(error(text)); console.log(wrapParam('url', options.url, false, 'url')); process.exit(1); } if ('host' in options || 'database' in options) { let text = `Please provide required params for Gel driver:\n`; console.log(error(text)); console.log(wrapParam('host', options.host)); console.log(wrapParam('port', options.port, true)); console.log(wrapParam('user', options.user, true)); console.log(wrapParam('password', options.password, true, 'secret')); console.log(wrapParam('database', options.database)); console.log(wrapParam('tlsSecurity', options.tlsSecurity, true)); process.exit(1); } console.log( error( `Either connection "url" or "host", "database" are required for Gel database connection`, ), ); process.exit(1); }; ================================================ FILE: drizzle-kit/src/cli/validations/libsql.ts ================================================ import { softAssertUnreachable } from 'src/global'; import { object, string, TypeOf } from 'zod'; import { error } from '../views'; import { wrapParam } from './common'; export const libSQLCredentials = object({ url: string().min(1), authToken: string().min(1).optional(), }); export type LibSQLCredentials = { url: string; authToken?: string; }; const _: LibSQLCredentials = {} as TypeOf; export const printConfigConnectionIssues = ( options: Record, command: 'generate' | 'migrate' | 'push' | 'pull' | 'studio', ) => { let text = `Please provide required params for 'turso' dialect:\n`; console.log(error(text)); console.log(wrapParam('url', options.url)); console.log(wrapParam('authToken', options.authToken, true, 'secret')); process.exit(1); }; ================================================ FILE: drizzle-kit/src/cli/validations/mysql.ts ================================================ import { boolean, coerce, object, string, TypeOf, union } from 'zod'; import { error } from '../views'; import { wrapParam } from './common'; import { outputs } from './outputs'; export const mysqlCredentials = union([ object({ host: string().min(1), port: coerce.number().min(1).optional(), user: string().min(1).optional(), password: string().min(1).optional(), database: string().min(1), ssl: union([ string(), object({ pfx: string().optional(), key: string().optional(), passphrase: string().optional(), cert: string().optional(), ca: union([string(), string().array()]).optional(), crl: union([string(), string().array()]).optional(), ciphers: string().optional(), rejectUnauthorized: boolean().optional(), }), ]).optional(), }), object({ url: string().min(1), }), ]); export type MysqlCredentials = TypeOf; export const printCliConnectionIssues = (options: any) => { const { uri, host, database } = options || {}; if (!uri && (!host || !database)) { console.log(outputs.mysql.connection.required()); } }; export const printConfigConnectionIssues = ( options: Record, ) => { if ('url' in options) { let text = `Please provide required params for MySQL driver:\n`; console.log(error(text)); console.log(wrapParam('url', options.url, false, 'url')); process.exit(1); } let text = `Please provide required params for MySQL driver:\n`; console.log(error(text)); console.log(wrapParam('host', options.host)); console.log(wrapParam('port', options.port, true)); console.log(wrapParam('user', options.user, true)); console.log(wrapParam('password', options.password, true, 'secret')); console.log(wrapParam('database', options.database)); console.log(wrapParam('ssl', options.ssl, true)); process.exit(1); }; ================================================ FILE: drizzle-kit/src/cli/validations/outputs.ts ================================================ import chalk from 'chalk'; import { sqliteDriversLiterals } from './common'; export const withStyle = { error: (str: string) => `${chalk.red(`${chalk.white.bgRed(' Invalid input ')} ${str}`)}`, warning: (str: string) => `${chalk.white.bgGray(' Warning ')} ${str}`, errorWarning: (str: string) => `${chalk.red(`${chalk.white.bgRed(' Warning ')} ${str}`)}`, fullWarning: (str: string) => `${chalk.black.bgYellow(' Warning ')} ${chalk.bold(str)}`, suggestion: (str: string) => `${chalk.white.bgGray(' Suggestion ')} ${str}`, info: (str: string) => `${chalk.grey(str)}`, }; export const outputs = { studio: { drivers: (param: string) => withStyle.error( `"${param}" is not a valid driver. Available drivers: "pg", "mysql2", "better-sqlite", "libsql", "turso". You can read more about drizzle.config: https://orm.drizzle.team/kit-docs/config-reference`, ), noCredentials: () => withStyle.error( `Please specify a 'dbCredentials' param in config. It will help drizzle to know how to query you database. You can read more about drizzle.config: https://orm.drizzle.team/kit-docs/config-reference`, ), noDriver: () => withStyle.error( `Please specify a 'driver' param in config. It will help drizzle to know how to query you database. You can read more about drizzle.config: https://orm.drizzle.team/kit-docs/config-reference`, ), noDialect: () => withStyle.error( `Please specify 'dialect' param in config, either of 'postgresql', 'mysql', 'sqlite', turso or singlestore`, ), }, common: { ambiguousParams: (command: string) => withStyle.error( `You can't use both --config and other cli options for ${command} command`, ), schema: (command: string) => withStyle.error(`"--schema" is a required field for ${command} command`), }, postgres: { connection: { required: () => withStyle.error( `Either "url" or "host", "database" are required for database connection`, ), awsDataApi: () => withStyle.error( "You need to provide 'database', 'secretArn' and 'resourceArn' for Drizzle Kit to connect to AWS Data API", ), }, }, mysql: { connection: { driver: () => withStyle.error(`Only "mysql2" is available options for "--driver"`), required: () => withStyle.error( `Either "url" or "host", "database" are required for database connection`, ), }, }, sqlite: { connection: { driver: () => { const listOfDrivers = sqliteDriversLiterals .map((it) => `'${it.value}'`) .join(', '); return withStyle.error( `Either ${listOfDrivers} are available options for 'driver' param`, ); }, url: (driver: string) => withStyle.error( `"url" is a required option for driver "${driver}". You can read more about drizzle.config: https://orm.drizzle.team/kit-docs/config-reference`, ), authToken: (driver: string) => withStyle.error( `"authToken" is a required option for driver "${driver}". You can read more about drizzle.config: https://orm.drizzle.team/kit-docs/config-reference`, ), }, introspect: {}, push: {}, }, singlestore: { connection: { driver: () => withStyle.error(`Only "mysql2" is available options for "--driver"`), required: () => withStyle.error( `Either "url" or "host", "database" are required for database connection`, ), }, }, }; ================================================ FILE: drizzle-kit/src/cli/validations/postgres.ts ================================================ import { boolean, coerce, literal, object, string, TypeOf, undefined, union } from 'zod'; import { error } from '../views'; import { wrapParam } from './common'; export const postgresCredentials = union([ object({ driver: undefined(), host: string().min(1), port: coerce.number().min(1).optional(), user: string().min(1).optional(), password: string().min(1).optional(), database: string().min(1), ssl: union([ literal('require'), literal('allow'), literal('prefer'), literal('verify-full'), boolean(), object({}).passthrough(), ]).optional(), }).transform((o) => { delete o.driver; return o as Omit; }), object({ driver: undefined(), url: string().min(1), }).transform<{ url: string }>((o) => { delete o.driver; return o; }), object({ driver: literal('aws-data-api'), database: string().min(1), secretArn: string().min(1), resourceArn: string().min(1), }), object({ driver: literal('pglite'), url: string().min(1), }), ]); export type PostgresCredentials = TypeOf; export const printConfigConnectionIssues = ( options: Record, ) => { if (options.driver === 'aws-data-api') { let text = `Please provide required params for AWS Data API driver:\n`; console.log(error(text)); console.log(wrapParam('database', options.database)); console.log(wrapParam('secretArn', options.secretArn, false, 'secret')); console.log(wrapParam('resourceArn', options.resourceArn, false, 'secret')); process.exit(1); } if ('url' in options) { let text = `Please provide required params for Postgres driver:\n`; console.log(error(text)); console.log(wrapParam('url', options.url, false, 'url')); process.exit(1); } if ('host' in options || 'database' in options) { let text = `Please provide required params for Postgres driver:\n`; console.log(error(text)); console.log(wrapParam('host', options.host)); console.log(wrapParam('port', options.port, true)); console.log(wrapParam('user', options.user, true)); console.log(wrapParam('password', options.password, true, 'secret')); console.log(wrapParam('database', options.database)); console.log(wrapParam('ssl', options.ssl, true)); process.exit(1); } console.log( error( `Either connection "url" or "host", "database" are required for PostgreSQL database connection`, ), ); process.exit(1); }; ================================================ FILE: drizzle-kit/src/cli/validations/singlestore.ts ================================================ import { boolean, coerce, object, string, TypeOf, union } from 'zod'; import { error } from '../views'; import { wrapParam } from './common'; import { outputs } from './outputs'; export const singlestoreCredentials = union([ object({ host: string().min(1), port: coerce.number().min(1).optional(), user: string().min(1).optional(), password: string().min(1).optional(), database: string().min(1), ssl: union([ string(), object({ pfx: string().optional(), key: string().optional(), passphrase: string().optional(), cert: string().optional(), ca: union([string(), string().array()]).optional(), crl: union([string(), string().array()]).optional(), ciphers: string().optional(), rejectUnauthorized: boolean().optional(), }), ]).optional(), }), object({ url: string().min(1), }), ]); export type SingleStoreCredentials = TypeOf; export const printCliConnectionIssues = (options: any) => { const { uri, host, database } = options || {}; if (!uri && (!host || !database)) { console.log(outputs.singlestore.connection.required()); } }; export const printConfigConnectionIssues = ( options: Record, ) => { if ('url' in options) { let text = `Please provide required params for SingleStore driver:\n`; console.log(error(text)); console.log(wrapParam('url', options.url, false, 'url')); process.exit(1); } let text = `Please provide required params for SingleStore driver:\n`; console.log(error(text)); console.log(wrapParam('host', options.host)); console.log(wrapParam('port', options.port, true)); console.log(wrapParam('user', options.user, true)); console.log(wrapParam('password', options.password, true, 'secret')); console.log(wrapParam('database', options.database)); console.log(wrapParam('ssl', options.ssl, true)); process.exit(1); }; ================================================ FILE: drizzle-kit/src/cli/validations/sqlite.ts ================================================ import { softAssertUnreachable } from 'src/global'; import { literal, object, string, TypeOf, undefined, union } from 'zod'; import { error } from '../views'; import { sqliteDriver, wrapParam } from './common'; export const sqliteCredentials = union([ object({ driver: literal('turso'), url: string().min(1), authToken: string().min(1).optional(), }), object({ driver: literal('d1-http'), accountId: string().min(1), databaseId: string().min(1), token: string().min(1), }), object({ driver: undefined(), url: string().min(1), }).transform<{ url: string }>((o) => { delete o.driver; return o; }), ]); export type SqliteCredentials = | { driver: 'd1-http'; accountId: string; databaseId: string; token: string; } | { url: string; }; const _: SqliteCredentials = {} as TypeOf; export const printConfigConnectionIssues = ( options: Record, command: 'generate' | 'migrate' | 'push' | 'pull' | 'studio', ) => { const parsedDriver = sqliteDriver.safeParse(options.driver); const driver = parsedDriver.success ? parsedDriver.data : ('' as never); if (driver === 'expo') { if (command === 'migrate') { console.log( error( `You can't use 'migrate' command with Expo SQLite, please follow migration instructions in our docs - https://orm.drizzle.team/docs/get-started-sqlite#expo-sqlite`, ), ); } else if (command === 'studio') { console.log( error( `You can't use 'studio' command with Expo SQLite, please use Expo Plugin https://www.npmjs.com/package/expo-drizzle-studio-plugin`, ), ); } else if (command === 'pull') { console.log(error("You can't use 'pull' command with Expo SQLite")); } else if (command === 'push') { console.log(error("You can't use 'push' command with Expo SQLite")); } else { console.log(error('Unexpected error with expo driver 🤔')); } process.exit(1); } else if (driver === 'd1-http') { let text = `Please provide required params for D1 HTTP driver:\n`; console.log(error(text)); console.log(wrapParam('accountId', options.accountId)); console.log(wrapParam('databaseId', options.databaseId)); console.log(wrapParam('token', options.token, false, 'secret')); process.exit(1); } else if (driver === 'durable-sqlite') { if (command === 'migrate') { console.log( error( `You can't use 'migrate' command with SQLite Durable Objects`, ), ); } else if (command === 'studio') { console.log( error( `You can't use 'studio' command with SQLite Durable Objects`, ), ); } else if (command === 'pull') { console.log(error("You can't use 'pull' command with SQLite Durable Objects")); } else if (command === 'push') { console.log(error("You can't use 'push' command with SQLite Durable Objects")); } else { console.log(error('Unexpected error with SQLite Durable Object driver 🤔')); } process.exit(1); } else { softAssertUnreachable(driver); } let text = `Please provide required params:\n`; console.log(error(text)); console.log(wrapParam('url', options.url)); process.exit(1); }; ================================================ FILE: drizzle-kit/src/cli/validations/studio.ts ================================================ import { coerce, intersection, object, string, TypeOf, union } from 'zod'; import { dialect } from '../../schemaValidator'; import { casingType } from './common'; import { mysqlCredentials } from './mysql'; import { postgresCredentials } from './postgres'; import { sqliteCredentials } from './sqlite'; export const credentials = intersection( postgresCredentials, mysqlCredentials, sqliteCredentials, ); export type Credentials = TypeOf; export const studioCliParams = object({ port: coerce.number().optional().default(4983), host: string().optional().default('127.0.0.1'), config: string().optional(), }); export const studioConfig = object({ dialect, schema: union([string(), string().array()]).optional(), casing: casingType.optional(), }); ================================================ FILE: drizzle-kit/src/cli/views.ts ================================================ import chalk from 'chalk'; import { Prompt, render, SelectState, TaskView } from 'hanji'; import type { CommonSchema } from '../schemaValidator'; import { objectValues } from '../utils'; import type { Named, NamedWithSchema } from './commands/migrate'; export const warning = (msg: string) => { render(`[${chalk.yellow('Warning')}] ${msg}`); }; export const err = (msg: string) => { render(`${chalk.bold.red('Error')} ${msg}`); }; export const info = (msg: string, greyMsg: string = ''): string => { return `${chalk.blue.bold('Info:')} ${msg} ${greyMsg ? chalk.grey(greyMsg) : ''}`.trim(); }; export const grey = (msg: string): string => { return chalk.grey(msg); }; export const error = (error: string, greyMsg: string = ''): string => { return `${chalk.bgRed.bold(' Error ')} ${error} ${greyMsg ? chalk.grey(greyMsg) : ''}`.trim(); }; export const schema = (schema: CommonSchema): string => { type TableEntry = (typeof schema)['tables'][keyof (typeof schema)['tables']]; const tables = Object.values(schema.tables) as unknown as TableEntry[]; let msg = chalk.bold(`${tables.length} tables\n`); msg += tables .map((t) => { const columnsCount = Object.values(t.columns).length; const indexesCount = Object.values(t.indexes).length; let foreignKeys: number = 0; // Singlestore doesn't have foreign keys if (schema.dialect !== 'singlestore') { // @ts-expect-error foreignKeys = Object.values(t.foreignKeys).length; } return `${chalk.bold.blue(t.name)} ${ chalk.gray( `${columnsCount} columns ${indexesCount} indexes ${foreignKeys} fks`, ) }`; }) .join('\n'); msg += '\n'; const enums = objectValues( 'enums' in schema ? 'values' in schema['enums'] ? schema['enums'] : {} : {}, ); if (enums.length > 0) { msg += '\n'; msg += chalk.bold(`${enums.length} enums\n`); msg += enums .map((it) => { return `${chalk.bold.blue(it.name)} ${ chalk.gray( `[${Object.values(it.values).join(', ')}]`, ) }`; }) .join('\n'); msg += '\n'; } return msg; }; export interface RenamePropmtItem { from: T; to: T; } export const isRenamePromptItem = ( item: RenamePropmtItem | T, ): item is RenamePropmtItem => { return 'from' in item && 'to' in item; }; export class ResolveColumnSelect extends Prompt< RenamePropmtItem | T > { private readonly data: SelectState | T>; constructor( private readonly tableName: string, private readonly base: Named, data: (RenamePropmtItem | T)[], ) { super(); this.on('attach', (terminal) => terminal.toggleCursor('hide')); this.data = new SelectState(data); this.data.bind(this); } render(status: 'idle' | 'submitted' | 'aborted'): string { if (status === 'submitted' || status === 'aborted') { return '\n'; } let text = `\nIs ${ chalk.bold.blue( this.base.name, ) } column in ${ chalk.bold.blue( this.tableName, ) } table created or renamed from another column?\n`; const isSelectedRenamed = isRenamePromptItem( this.data.items[this.data.selectedIdx], ); const selectedPrefix = isSelectedRenamed ? chalk.yellow('❯ ') : chalk.green('❯ '); const labelLength: number = this.data.items .filter((it) => isRenamePromptItem(it)) .map((it: RenamePropmtItem) => { return this.base.name.length + 3 + it['from'].name.length; }) .reduce((a, b) => { if (a > b) { return a; } return b; }, 0); this.data.items.forEach((it, idx) => { const isSelected = idx === this.data.selectedIdx; const isRenamed = isRenamePromptItem(it); const title = isRenamed ? `${it.from.name} › ${it.to.name}`.padEnd(labelLength, ' ') : it.name.padEnd(labelLength, ' '); const label = isRenamed ? `${chalk.yellow('~')} ${title} ${chalk.gray('rename column')}` : `${chalk.green('+')} ${title} ${chalk.gray('create column')}`; text += isSelected ? `${selectedPrefix}${label}` : ` ${label}`; text += idx !== this.data.items.length - 1 ? '\n' : ''; }); return text; } result(): RenamePropmtItem | T { return this.data.items[this.data.selectedIdx]!; } } export const tableKey = (it: NamedWithSchema) => { return it.schema === 'public' || !it.schema ? it.name : `${it.schema}.${it.name}`; }; export class ResolveSelectNamed extends Prompt< RenamePropmtItem | T > { private readonly state: SelectState | T>; constructor( private readonly base: T, data: (RenamePropmtItem | T)[], private readonly entityType: 'role' | 'policy', ) { super(); this.on('attach', (terminal) => terminal.toggleCursor('hide')); this.state = new SelectState(data); this.state.bind(this); } render(status: 'idle' | 'submitted' | 'aborted'): string { if (status === 'submitted' || status === 'aborted') { return ''; } const key = this.base.name; let text = `\nIs ${chalk.bold.blue(key)} ${this.entityType} created or renamed from another ${this.entityType}?\n`; const isSelectedRenamed = isRenamePromptItem( this.state.items[this.state.selectedIdx], ); const selectedPrefix = isSelectedRenamed ? chalk.yellow('❯ ') : chalk.green('❯ '); const labelLength: number = this.state.items .filter((it) => isRenamePromptItem(it)) .map((_) => { const it = _ as RenamePropmtItem; const keyFrom = it.from.name; return key.length + 3 + keyFrom.length; }) .reduce((a, b) => { if (a > b) { return a; } return b; }, 0); const entityType = this.entityType; this.state.items.forEach((it, idx) => { const isSelected = idx === this.state.selectedIdx; const isRenamed = isRenamePromptItem(it); const title = isRenamed ? `${it.from.name} › ${it.to.name}`.padEnd(labelLength, ' ') : it.name.padEnd(labelLength, ' '); const label = isRenamed ? `${chalk.yellow('~')} ${title} ${chalk.gray(`rename ${entityType}`)}` : `${chalk.green('+')} ${title} ${chalk.gray(`create ${entityType}`)}`; text += isSelected ? `${selectedPrefix}${label}` : ` ${label}`; text += idx !== this.state.items.length - 1 ? '\n' : ''; }); return text; } result(): RenamePropmtItem | T { return this.state.items[this.state.selectedIdx]!; } } export class ResolveSelect extends Prompt< RenamePropmtItem | T > { private readonly state: SelectState | T>; constructor( private readonly base: T, data: (RenamePropmtItem | T)[], private readonly entityType: 'table' | 'enum' | 'sequence' | 'view' | 'role', ) { super(); this.on('attach', (terminal) => terminal.toggleCursor('hide')); this.state = new SelectState(data); this.state.bind(this); this.base = base; } render(status: 'idle' | 'submitted' | 'aborted'): string { if (status === 'submitted' || status === 'aborted') { return ''; } const key = tableKey(this.base); let text = `\nIs ${chalk.bold.blue(key)} ${this.entityType} created or renamed from another ${this.entityType}?\n`; const isSelectedRenamed = isRenamePromptItem( this.state.items[this.state.selectedIdx], ); const selectedPrefix = isSelectedRenamed ? chalk.yellow('❯ ') : chalk.green('❯ '); const labelLength: number = this.state.items .filter((it) => isRenamePromptItem(it)) .map((_) => { const it = _ as RenamePropmtItem; const keyFrom = tableKey(it.from); return key.length + 3 + keyFrom.length; }) .reduce((a, b) => { if (a > b) { return a; } return b; }, 0); const entityType = this.entityType; this.state.items.forEach((it, idx) => { const isSelected = idx === this.state.selectedIdx; const isRenamed = isRenamePromptItem(it); const title = isRenamed ? `${tableKey(it.from)} › ${tableKey(it.to)}`.padEnd(labelLength, ' ') : tableKey(it).padEnd(labelLength, ' '); const label = isRenamed ? `${chalk.yellow('~')} ${title} ${chalk.gray(`rename ${entityType}`)}` : `${chalk.green('+')} ${title} ${chalk.gray(`create ${entityType}`)}`; text += isSelected ? `${selectedPrefix}${label}` : ` ${label}`; text += idx !== this.state.items.length - 1 ? '\n' : ''; }); return text; } result(): RenamePropmtItem | T { return this.state.items[this.state.selectedIdx]!; } } export class ResolveSchemasSelect extends Prompt< RenamePropmtItem | T > { private readonly state: SelectState | T>; constructor(private readonly base: Named, data: (RenamePropmtItem | T)[]) { super(); this.on('attach', (terminal) => terminal.toggleCursor('hide')); this.state = new SelectState(data); this.state.bind(this); this.base = base; } render(status: 'idle' | 'submitted' | 'aborted'): string { if (status === 'submitted' || status === 'aborted') { return ''; } let text = `\nIs ${ chalk.bold.blue( this.base.name, ) } schema created or renamed from another schema?\n`; const isSelectedRenamed = isRenamePromptItem( this.state.items[this.state.selectedIdx], ); const selectedPrefix = isSelectedRenamed ? chalk.yellow('❯ ') : chalk.green('❯ '); const labelLength: number = this.state.items .filter((it) => isRenamePromptItem(it)) .map((it: RenamePropmtItem) => { return this.base.name.length + 3 + it['from'].name.length; }) .reduce((a, b) => { if (a > b) { return a; } return b; }, 0); this.state.items.forEach((it, idx) => { const isSelected = idx === this.state.selectedIdx; const isRenamed = isRenamePromptItem(it); const title = isRenamed ? `${it.from.name} › ${it.to.name}`.padEnd(labelLength, ' ') : it.name.padEnd(labelLength, ' '); const label = isRenamed ? `${chalk.yellow('~')} ${title} ${chalk.gray('rename schema')}` : `${chalk.green('+')} ${title} ${chalk.gray('create schema')}`; text += isSelected ? `${selectedPrefix}${label}` : ` ${label}`; text += idx !== this.state.items.length - 1 ? '\n' : ''; }); return text; } result(): RenamePropmtItem | T { return this.state.items[this.state.selectedIdx]!; } } class Spinner { private offset: number = 0; private readonly iterator: () => void; constructor(private readonly frames: string[]) { this.iterator = () => { this.offset += 1; this.offset %= frames.length - 1; }; } public tick = () => { this.iterator(); }; public value = () => { return this.frames[this.offset]; }; } const _frames = function(values: string[]): () => string { let index = 0; const iterator = () => { const frame = values[index]; index += 1; index %= values.length; return frame!; }; return iterator; }; type ValueOf = T[keyof T]; export type IntrospectStatus = 'fetching' | 'done'; export type IntrospectStage = | 'tables' | 'columns' | 'enums' | 'indexes' | 'policies' | 'checks' | 'fks' | 'views'; type IntrospectState = { [key in IntrospectStage]: { count: number; name: string; status: IntrospectStatus; }; }; export class IntrospectProgress extends TaskView { private readonly spinner: Spinner = new Spinner('⣷⣯⣟⡿⢿⣻⣽⣾'.split('')); private timeout: NodeJS.Timeout | undefined; private state: IntrospectState = { tables: { count: 0, name: 'tables', status: 'fetching', }, columns: { count: 0, name: 'columns', status: 'fetching', }, enums: { count: 0, name: 'enums', status: 'fetching', }, indexes: { count: 0, name: 'indexes', status: 'fetching', }, fks: { count: 0, name: 'foreign keys', status: 'fetching', }, policies: { count: 0, name: 'policies', status: 'fetching', }, checks: { count: 0, name: 'check constraints', status: 'fetching', }, views: { count: 0, name: 'views', status: 'fetching', }, }; constructor(private readonly hasEnums: boolean = false) { super(); this.timeout = setInterval(() => { this.spinner.tick(); this.requestLayout(); }, 128); this.on('detach', () => clearInterval(this.timeout)); } public update( stage: IntrospectStage, count: number, status: IntrospectStatus, ) { this.state[stage].count = count; this.state[stage].status = status; this.requestLayout(); } private formatCount = (count: number) => { const width: number = Math.max.apply( null, Object.values(this.state).map((it) => it.count.toFixed(0).length), ); return count.toFixed(0).padEnd(width, ' '); }; private statusText = (spinner: string, stage: ValueOf) => { const { name, count } = stage; const isDone = stage.status === 'done'; const prefix = isDone ? `[${chalk.green('✓')}]` : `[${spinner}]`; const formattedCount = this.formatCount(count); const suffix = isDone ? `${formattedCount} ${name} fetched` : `${formattedCount} ${name} fetching`; return `${prefix} ${suffix}\n`; }; render(): string { let info = ''; const spin = this.spinner.value(); info += this.statusText(spin, this.state.tables); info += this.statusText(spin, this.state.columns); info += this.hasEnums ? this.statusText(spin, this.state.enums) : ''; info += this.statusText(spin, this.state.indexes); info += this.statusText(spin, this.state.fks); info += this.statusText(spin, this.state.policies); info += this.statusText(spin, this.state.checks); info += this.statusText(spin, this.state.views); return info; } } export class MigrateProgress extends TaskView { private readonly spinner: Spinner = new Spinner('⣷⣯⣟⡿⢿⣻⣽⣾'.split('')); private timeout: NodeJS.Timeout | undefined; constructor() { super(); this.timeout = setInterval(() => { this.spinner.tick(); this.requestLayout(); }, 128); this.on('detach', () => clearInterval(this.timeout)); } render(status: 'pending' | 'done' | 'rejected'): string { if (status === 'pending' || status === 'rejected') { const spin = this.spinner.value(); return `[${spin}] applying migrations...`; } return `[${chalk.green('✓')}] migrations applied successfully!`; } } export class ProgressView extends TaskView { private readonly spinner: Spinner = new Spinner('⣷⣯⣟⡿⢿⣻⣽⣾'.split('')); private timeout: NodeJS.Timeout | undefined; constructor( private readonly progressText: string, private readonly successText: string, ) { super(); this.timeout = setInterval(() => { this.spinner.tick(); this.requestLayout(); }, 128); this.on('detach', () => clearInterval(this.timeout)); } render(status: 'pending' | 'done' | 'rejected'): string { if (status === 'pending' || status === 'rejected') { const spin = this.spinner.value(); return `[${spin}] ${this.progressText}\n`; } return `[${chalk.green('✓')}] ${this.successText}\n`; } } export class DropMigrationView extends Prompt { private readonly data: SelectState; constructor(data: T[]) { super(); this.on('attach', (terminal) => terminal.toggleCursor('hide')); this.data = new SelectState(data); this.data.selectedIdx = data.length - 1; this.data.bind(this); } render(status: 'idle' | 'submitted' | 'aborted'): string { if (status === 'submitted' || status === 'aborted') { return '\n'; } let text = chalk.bold('Please select migration to drop:\n'); const selectedPrefix = chalk.yellow('❯ '); const data = trimmedRange(this.data.items, this.data.selectedIdx, 9); const labelLength: number = data.trimmed .map((it) => it.tag.length) .reduce((a, b) => { if (a > b) { return a; } return b; }, 0); text += data.startTrimmed ? ' ...\n' : ''; data.trimmed.forEach((it, idx) => { const isSelected = idx === this.data.selectedIdx - data.offset; let title = it.tag.padEnd(labelLength, ' '); title = isSelected ? chalk.yellow(title) : title; text += isSelected ? `${selectedPrefix}${title}` : ` ${title}`; text += idx !== this.data.items.length - 1 ? '\n' : ''; }); text += data.endTrimmed ? ' ...\n' : ''; return text; } result(): T { return this.data.items[this.data.selectedIdx]!; } } export const trimmedRange = ( arr: T[], index: number, limitLines: number, ): { trimmed: T[]; offset: number; startTrimmed: boolean; endTrimmed: boolean; } => { const limit = limitLines - 2; const sideLimit = Math.round(limit / 2); const endTrimmed = arr.length - sideLimit > index; const startTrimmed = index > sideLimit - 1; const paddingStart = Math.max(index + sideLimit - arr.length, 0); const paddingEnd = Math.min(index - sideLimit + 1, 0); const d1 = endTrimmed ? 1 : 0; const d2 = startTrimmed ? 0 : 1; const start = Math.max(0, index - sideLimit + d1 - paddingStart); const end = Math.min(arr.length, index + sideLimit + d2 - paddingEnd); return { trimmed: arr.slice(start, end), offset: start, startTrimmed, endTrimmed, }; }; ================================================ FILE: drizzle-kit/src/extensions/getTablesFilterByExtensions.ts ================================================ import type { Config } from '../index'; export const getTablesFilterByExtensions = ({ extensionsFilters, dialect, }: Pick): string[] => { if (extensionsFilters) { if ( extensionsFilters.includes('postgis') && dialect === 'postgresql' ) { return ['!geography_columns', '!geometry_columns', '!spatial_ref_sys']; } } return []; }; ================================================ FILE: drizzle-kit/src/extensions/vector.ts ================================================ export const vectorOps = [ 'vector_l2_ops', 'vector_ip_ops', 'vector_cosine_ops', 'vector_l1_ops', 'bit_hamming_ops', 'bit_jaccard_ops', 'halfvec_l2_ops', 'sparsevec_l2_ops', ]; ================================================ FILE: drizzle-kit/src/global.ts ================================================ export const originUUID = '00000000-0000-0000-0000-000000000000'; export const snapshotVersion = '7'; export function assertUnreachable(x: never | undefined): never { throw new Error("Didn't expect to get here"); } // don't fail in runtime, types only export function softAssertUnreachable(x: never) { return null as never; } export const mapValues = ( obj: Record, map: (input: IN) => OUT, ): Record => { const result = Object.keys(obj).reduce(function(result, key) { result[key] = map(obj[key]); return result; }, {} as Record); return result; }; export const mapKeys = ( obj: Record, map: (key: string, value: T) => string, ): Record => { const result = Object.fromEntries( Object.entries(obj).map(([key, val]) => { const newKey = map(key, val); return [newKey, val]; }), ); return result; }; export const mapEntries = ( obj: Record, map: (key: string, value: T) => [string, T], ): Record => { const result = Object.fromEntries( Object.entries(obj).map(([key, val]) => { const [newKey, newVal] = map(key, val); return [newKey, newVal]; }), ); return result; }; export const customMapEntries = ( obj: Record, map: (key: string, value: T) => [string, TReturn], ): Record => { const result = Object.fromEntries( Object.entries(obj).map(([key, val]) => { const [newKey, newVal] = map(key, val); return [newKey, newVal]; }), ); return result; }; ================================================ FILE: drizzle-kit/src/index.ts ================================================ import { ConnectionOptions } from 'tls'; import type { Driver, Prefix } from './cli/validations/common'; import type { Dialect } from './schemaValidator'; // import {SslOptions} from 'mysql2' type SslOptions = { pfx?: string; key?: string; passphrase?: string; cert?: string; ca?: string | string[]; crl?: string | string[]; ciphers?: string; rejectUnauthorized?: boolean; }; type Verify = U; /** * **You are currently using version 0.21.0+ of drizzle-kit. If you have just upgraded to this version, please make sure to read the changelog to understand what changes have been made and what * adjustments may be necessary for you. See https://orm.drizzle.team/kit-docs/upgrade-21#how-to-migrate-to-0210** * * **Config** usage: * * `dialect` - mandatory and is responsible for explicitly providing a databse dialect you are using for all the commands * *Possible values*: `postgresql`, `mysql`, `sqlite`, `singlestore * * See https://orm.drizzle.team/kit-docs/config-reference#dialect * * --- * `schema` - param lets you define where your schema file/files live. * You can have as many separate schema files as you want and define paths to them using glob or array of globs syntax. * * See https://orm.drizzle.team/kit-docs/config-reference#schema * * --- * `out` - allows you to define the folder for your migrations and a folder, where drizzle will introspect the schema and relations * * See https://orm.drizzle.team/kit-docs/config-reference#out * * --- * `driver` - optional param that is responsible for explicitly providing a driver to use when accessing a database * *Possible values*: `aws-data-api`, `d1-http`, `expo`, `turso`, `pglite` * If you don't use AWS Data API, D1, Turso or Expo - ypu don't need this driver. You can check a driver strategy choice here: https://orm.drizzle.team/kit-docs/upgrade-21 * * See https://orm.drizzle.team/kit-docs/config-reference#driver * * --- * * `dbCredentials` - an object to define your connection to the database. For more info please check the docs * * See https://orm.drizzle.team/kit-docs/config-reference#dbcredentials * * --- * * `migrations` - param let’s use specify custom table and schema(PostgreSQL only) for migrations. * By default, all information about executed migrations will be stored in the database inside * the `__drizzle_migrations` table, and for PostgreSQL, inside the drizzle schema. * However, you can configure where to store those records. * * See https://orm.drizzle.team/kit-docs/config-reference#migrations * * --- * * `breakpoints` - param lets you enable/disable SQL statement breakpoints in generated migrations. * It’s optional and true by default, it’s necessary to properly apply migrations on databases, * that do not support multiple DDL alternation statements in one transaction(MySQL, SQLite, SingleStore) and * Drizzle ORM has to apply them sequentially one by one. * * See https://orm.drizzle.team/kit-docs/config-reference#breakpoints * * --- * * `tablesFilters` - param lets you filter tables with glob syntax for db push command. * It’s useful when you have only one database avaialable for several separate projects with separate sql schemas. * * How to define multi-project tables with Drizzle ORM — see https://orm.drizzle.team/docs/goodies#multi-project-schema * * See https://orm.drizzle.team/kit-docs/config-reference#tablesfilters * * --- * * `schemaFilter` - parameter allows you to define which schema in PostgreSQL should be used for either introspect or push commands. * This parameter accepts a single schema as a string or an array of schemas as strings. * No glob pattern is supported here. By default, drizzle will use the public schema for both commands, * but you can add any schema you need. * * For example, having schemaFilter: ["my_schema"] will only look for tables in both the database and * drizzle schema that are a part of the my_schema schema. * * See https://orm.drizzle.team/kit-docs/config-reference#schemafilter * * --- * * `verbose` - command is used for drizzle-kit push commands and prints all statements that will be executed. * * > Note: This command will only print the statements that should be executed. * To approve them before applying, please refer to the `strict` command. * * See https://orm.drizzle.team/kit-docs/config-reference#verbose * * --- * * `strict` - command is used for drizzle-kit push commands and will always ask for your confirmation, * either to execute all statements needed to sync your schema with the database or not. * * See https://orm.drizzle.team/kit-docs/config-reference#strict */ export type Config = & { dialect: Dialect; out?: string; breakpoints?: boolean; tablesFilter?: string | string[]; extensionsFilters?: 'postgis'[]; schemaFilter?: string | string[]; schema?: string | string[]; verbose?: boolean; strict?: boolean; casing?: 'camelCase' | 'snake_case'; migrations?: { table?: string; schema?: string; prefix?: Prefix; }; introspect?: { casing: 'camel' | 'preserve'; }; entities?: { roles?: boolean | { provider?: 'supabase' | 'neon' | string & {}; exclude?: string[]; include?: string[] }; }; } & ( | { dialect: Verify; dbCredentials: { url: string; authToken?: string; }; } | { dialect: Verify; dbCredentials: { url: string; }; } | { dialect: Verify; dbCredentials: | ({ host: string; port?: number; user?: string; password?: string; database: string; ssl?: | boolean | 'require' | 'allow' | 'prefer' | 'verify-full' | ConnectionOptions; } & {}) | { url: string; }; } | { dialect: Verify; driver: Verify; dbCredentials: { database: string; secretArn: string; resourceArn: string; }; } | { dialect: Verify; driver: Verify; dbCredentials: { url: string; }; } | { dialect: Verify; dbCredentials: | { host: string; port?: number; user?: string; password?: string; database: string; ssl?: string | SslOptions; } | { url: string; }; } | { dialect: Verify; driver: Verify; dbCredentials: { accountId: string; databaseId: string; token: string; }; } | { dialect: Verify; driver: Verify; } | { dialect: Verify; driver: Verify; } | {} | { dialect: Verify; dbCredentials: | { host: string; port?: number; user?: string; password?: string; database: string; ssl?: string | SslOptions; } | { url: string; }; } | { dialect: Verify; dbCredentials?: & { tlsSecurity?: | 'insecure' | 'no_host_verification' | 'strict' | 'default'; } & ( | { url: string; } | ({ host: string; port?: number; user?: string; password?: string; database: string; }) ); } ); /** * **You are currently using version 0.21.0+ of drizzle-kit. If you have just upgraded to this version, please make sure to read the changelog to understand what changes have been made and what * adjustments may be necessary for you. See https://orm.drizzle.team/kit-docs/upgrade-21#how-to-migrate-to-0210** * * **Config** usage: * * `dialect` - mandatory and is responsible for explicitly providing a databse dialect you are using for all the commands * *Possible values*: `postgresql`, `mysql`, `sqlite`, `singlestore`, `gel` * * See https://orm.drizzle.team/kit-docs/config-reference#dialect * * --- * `schema` - param lets you define where your schema file/files live. * You can have as many separate schema files as you want and define paths to them using glob or array of globs syntax. * * See https://orm.drizzle.team/kit-docs/config-reference#schema * * --- * `out` - allows you to define the folder for your migrations and a folder, where drizzle will introspect the schema and relations * * See https://orm.drizzle.team/kit-docs/config-reference#out * * --- * `driver` - optional param that is responsible for explicitly providing a driver to use when accessing a database * *Possible values*: `aws-data-api`, `d1-http`, `expo`, `turso`, `pglite` * If you don't use AWS Data API, D1, Turso or Expo - ypu don't need this driver. You can check a driver strategy choice here: https://orm.drizzle.team/kit-docs/upgrade-21 * * See https://orm.drizzle.team/kit-docs/config-reference#driver * * --- * * `dbCredentials` - an object to define your connection to the database. For more info please check the docs * * See https://orm.drizzle.team/kit-docs/config-reference#dbcredentials * * --- * * `migrations` - param let’s use specify custom table and schema(PostgreSQL only) for migrations. * By default, all information about executed migrations will be stored in the database inside * the `__drizzle_migrations` table, and for PostgreSQL, inside the drizzle schema. * However, you can configure where to store those records. * * See https://orm.drizzle.team/kit-docs/config-reference#migrations * * --- * * `breakpoints` - param lets you enable/disable SQL statement breakpoints in generated migrations. * It’s optional and true by default, it’s necessary to properly apply migrations on databases, * that do not support multiple DDL alternation statements in one transaction(MySQL, SQLite, SingleStore) and * Drizzle ORM has to apply them sequentially one by one. * * See https://orm.drizzle.team/kit-docs/config-reference#breakpoints * * --- * * `tablesFilters` - param lets you filter tables with glob syntax for db push command. * It’s useful when you have only one database avaialable for several separate projects with separate sql schemas. * * How to define multi-project tables with Drizzle ORM — see https://orm.drizzle.team/docs/goodies#multi-project-schema * * See https://orm.drizzle.team/kit-docs/config-reference#tablesfilters * * --- * * `schemaFilter` - parameter allows you to define which schema in PostgreSQL should be used for either introspect or push commands. * This parameter accepts a single schema as a string or an array of schemas as strings. * No glob pattern is supported here. By default, drizzle will use the public schema for both commands, * but you can add any schema you need. * * For example, having schemaFilter: ["my_schema"] will only look for tables in both the database and * drizzle schema that are a part of the my_schema schema. * * See https://orm.drizzle.team/kit-docs/config-reference#schemafilter * * --- * * `verbose` - command is used for drizzle-kit push commands and prints all statements that will be executed. * * > Note: This command will only print the statements that should be executed. * To approve them before applying, please refer to the `strict` command. * * See https://orm.drizzle.team/kit-docs/config-reference#verbose * * --- * * `strict` - command is used for drizzle-kit push commands and will always ask for your confirmation, * either to execute all statements needed to sync your schema with the database or not. * * See https://orm.drizzle.team/kit-docs/config-reference#strict */ export function defineConfig(config: Config) { return config; } ================================================ FILE: drizzle-kit/src/introspect-gel.ts ================================================ import { getTableName, is } from 'drizzle-orm'; import { AnyGelTable } from 'drizzle-orm/gel-core'; import { createTableRelationsHelpers, extractTablesRelationalConfig, Many, One, Relation, Relations, } from 'drizzle-orm/relations'; import './@types/utils'; import { toCamelCase } from 'drizzle-orm/casing'; import { Casing } from './cli/validations/common'; import { assertUnreachable } from './global'; import { CheckConstraint, Column, ForeignKey, GelKitInternals, GelSchemaInternal, Index, Policy, PrimaryKey, UniqueConstraint, } from './serializer/gelSchema'; import { indexName } from './serializer/gelSerializer'; import { unescapeSingleQuotes } from './utils'; const gelImportsList = new Set([ 'gelTable', 'smallint', 'integer', 'bigint', 'bigintT', 'boolean', 'bytes', 'dateDuration', 'decimal', 'doublePrecision', 'duration', 'json', 'localDate', 'localTime', 'real', 'relDuration', 'text', 'timestamp', 'timestamptz', 'uuid', 'time', ]); const mapColumnDefault = (defaultValue: any, isExpression?: boolean) => { if (isExpression) { return `sql\`${defaultValue}\``; } return defaultValue; }; const relations = new Set(); const escapeColumnKey = (value: string) => { if (/^(?![a-zA-Z_$][a-zA-Z0-9_$]*$).+$/.test(value)) { return `"${value}"`; } return value; }; const withCasing = (value: string, casing: Casing) => { if (casing === 'preserve') { return escapeColumnKey(value); } if (casing === 'camel') { return escapeColumnKey(value.camelCase()); } assertUnreachable(casing); }; const dbColumnName = ({ name, casing, withMode = false }: { name: string; casing: Casing; withMode?: boolean }) => { if (casing === 'preserve') { return ''; } if (casing === 'camel') { return toCamelCase(name) === name ? '' : withMode ? `"${name}", ` : `"${name}"`; } assertUnreachable(casing); }; // export const relationsToTypeScriptForStudio = ( // schema: Record>>, // relations: Record>>>, // ) => { // const relationalSchema: Record = { // ...Object.fromEntries( // Object.entries(schema) // .map(([key, val]) => { // // have unique keys across schemas // const mappedTableEntries = Object.entries(val).map((tableEntry) => { // return [`__${key}__.${tableEntry[0]}`, tableEntry[1]]; // }); // return mappedTableEntries; // }) // .flat(), // ), // ...relations, // }; // const relationsConfig = extractTablesRelationalConfig(relationalSchema, createTableRelationsHelpers); // let result = ''; // function findColumnKey(table: AnyGelTable, columnName: string) { // for (const tableEntry of Object.entries(table)) { // const key = tableEntry[0]; // const value = tableEntry[1]; // if (value.name === columnName) { // return key; // } // } // } // Object.values(relationsConfig.tables).forEach((table) => { // const tableName = table.tsName.split('.')[1]; // const relations = table.relations; // let hasRelations = false; // let relationsObjAsStr = ''; // let hasOne = false; // let hasMany = false; // Object.values(relations).forEach((relation) => { // hasRelations = true; // if (is(relation, Many)) { // hasMany = true; // relationsObjAsStr += `\t\t${relation.fieldName}: many(${ // relationsConfig.tableNamesMap[relation.referencedTableName].split('.')[1] // }${typeof relation.relationName !== 'undefined' ? `, { relationName: "${relation.relationName}"}` : ''}),`; // } // if (is(relation, One)) { // hasOne = true; // relationsObjAsStr += `\t\t${relation.fieldName}: one(${ // relationsConfig.tableNamesMap[relation.referencedTableName].split('.')[1] // }, { fields: [${ // relation.config?.fields.map( // (c) => // `${relationsConfig.tableNamesMap[getTableName(relation.sourceTable)].split('.')[1]}.${ // findColumnKey(relation.sourceTable, c.name) // }`, // ) // }], references: [${ // relation.config?.references.map( // (c) => // `${relationsConfig.tableNamesMap[getTableName(relation.referencedTable)].split('.')[1]}.${ // findColumnKey(relation.referencedTable, c.name) // }`, // ) // }]${typeof relation.relationName !== 'undefined' ? `, relationName: "${relation.relationName}"` : ''}}),`; // } // }); // if (hasRelations) { // result += `export const ${tableName}Relation = relations(${tableName}, ({${hasOne ? 'one' : ''}${ // hasOne && hasMany ? ', ' : '' // }${hasMany ? 'many' : ''}}) => ({ // ${relationsObjAsStr} // }));\n`; // } // }); // return result; // }; function generateIdentityParams(identity: Column['identity']) { let paramsObj = `{ name: "${identity!.name}"`; if (identity?.startWith) { paramsObj += `, startWith: ${identity.startWith}`; } if (identity?.increment) { paramsObj += `, increment: ${identity.increment}`; } if (identity?.minValue) { paramsObj += `, minValue: ${identity.minValue}`; } if (identity?.maxValue) { paramsObj += `, maxValue: ${identity.maxValue}`; } if (identity?.cache) { paramsObj += `, cache: ${identity.cache}`; } if (identity?.cycle) { paramsObj += `, cycle: true`; } paramsObj += ' }'; if (identity?.type === 'always') { return `.generatedAlwaysAsIdentity(${paramsObj})`; } return `.generatedByDefaultAsIdentity(${paramsObj})`; } export const paramNameFor = (name: string, schema?: string) => { const schemaSuffix = schema && schema !== 'public' ? `In${schema.capitalise()}` : ''; return `${name}${schemaSuffix}`; }; export const schemaToTypeScript = (schema: GelSchemaInternal, casing: Casing) => { // collectFKs Object.values(schema.tables).forEach((table) => { Object.values(table.foreignKeys).forEach((fk) => { const relation = `${fk.tableFrom}-${fk.tableTo}`; relations.add(relation); }); }); const schemas = Object.fromEntries( Object.entries(schema.schemas).map((it) => { return [it[0], withCasing(it[1].replace('::', ''), casing)]; }), ); // const enumTypes = Object.values(schema.enums).reduce((acc, cur) => { // acc.add(`${cur.schema}.${cur.name}`); // return acc; // }, new Set()); const imports = Object.values(schema.tables).reduce( (res, it) => { const idxImports = Object.values(it.indexes).map((idx) => (idx.isUnique ? 'uniqueIndex' : 'index')); const fkImpots = Object.values(it.foreignKeys).map((it) => 'foreignKey'); if (Object.values(it.foreignKeys).some((it) => isCyclic(it) && !isSelf(it))) { res.gel.push('type AnyGelColumn'); } const pkImports = Object.values(it.compositePrimaryKeys).map((it) => 'primaryKey'); const uniqueImports = Object.values(it.uniqueConstraints).map((it) => 'unique'); const checkImports = Object.values(it.checkConstraints).map( (it) => 'check', ); const policiesImports = Object.values(it.policies).map( (it) => 'gelPolicy', ); if (it.schema && it.schema !== 'public' && it.schema !== '') { res.gel.push('gelSchema'); } res.gel.push(...idxImports); res.gel.push(...fkImpots); res.gel.push(...pkImports); res.gel.push(...uniqueImports); res.gel.push(...policiesImports); res.gel.push(...checkImports); const columnImports = Object.values(it.columns) .map((col) => { let patched: string = col.type?.replace('[]', '') ?? ''; patched = patched.startsWith('time without time zone') ? 'localTime' : patched; patched = patched === 'double precision' ? 'doublePrecision' : patched; patched = patched.startsWith('edgedbt.bigint_t') ? 'bigintT' : patched; patched = patched.startsWith('jsonb') ? 'json' : patched; patched = patched.startsWith('edgedbt.timestamptz_t') ? 'timestamptz' : patched; patched = patched.startsWith('edgedbt.timestamp_t') ? 'timestamp' : patched; patched = patched.startsWith('edgedbt.relative_duration_t') ? 'relDuration' : patched; patched = patched.startsWith('bytea') ? 'bytes' : patched; patched = patched.startsWith('numeric') ? 'decimal' : patched; patched = patched.startsWith('edgedbt.duration_t') ? 'duration' : patched; patched = patched.startsWith('edgedbt.date_t') ? 'localDate' : patched; patched = patched.startsWith('edgedbt.date_duration_t') ? 'dateDuration' : patched; return patched; }) .filter((type) => { return gelImportsList.has(type); }); res.gel.push(...columnImports); return res; }, { gel: [] as string[] }, ); // Object.values(schema.sequences).forEach((it) => { // if (it.schema && it.schema !== 'public' && it.schema !== '') { // imports.gel.push('gelSchema'); // } else if (it.schema === 'public') { // imports.gel.push('gelSequence'); // } // }); // Object.values(schema.enums).forEach((it) => { // if (it.schema && it.schema !== 'public' && it.schema !== '') { // imports.gel.push('gelSchema'); // } else if (it.schema === 'public') { // imports.gel.push('gelEnum'); // } // }); if (Object.keys(schema.roles).length > 0) { imports.gel.push('gelRole'); } // const enumStatements = Object.values(schema.enums) // .map((it) => { // const enumSchema = schemas[it.schema]; // // const func = schema || schema === "public" ? "gelTable" : schema; // const paramName = paramNameFor(it.name, enumSchema); // const func = enumSchema ? `${enumSchema}.enum` : 'gelEnum'; // const values = Object.values(it.values) // .map((it) => `'${unescapeSingleQuotes(it, false)}'`) // .join(', '); // return `export const ${withCasing(paramName, casing)} = ${func}("${it.name}", [${values}])\n`; // }) // .join('') // .concat('\n'); // const sequencesStatements = Object.values(schema.sequences) // .map((it) => { // const seqSchema = schemas[it.schema]; // const paramName = paramNameFor(it.name, seqSchema); // const func = seqSchema ? `${seqSchema}.sequence` : 'gelSequence'; // let params = ''; // if (it.startWith) { // params += `, startWith: "${it.startWith}"`; // } // if (it.increment) { // params += `, increment: "${it.increment}"`; // } // if (it.minValue) { // params += `, minValue: "${it.minValue}"`; // } // if (it.maxValue) { // params += `, maxValue: "${it.maxValue}"`; // } // if (it.cache) { // params += `, cache: "${it.cache}"`; // } // if (it.cycle) { // params += `, cycle: true`; // } else { // params += `, cycle: false`; // } // return `export const ${withCasing(paramName, casing)} = ${func}("${it.name}"${ // params ? `, { ${params.trimChar(',')} }` : '' // })\n`; // }) // .join('') // .concat(''); const schemaStatements = Object.entries(schemas) .filter((it) => it[0] !== 'public') .map((it) => { return `export const ${it[1].replace('::', '').camelCase()} = gelSchema("${it[0]}");\n`; }) .join(''); const rolesNameToTsKey: Record = {}; const rolesStatements = Object.entries(schema.roles) .map((it) => { const fields = it[1]; rolesNameToTsKey[fields.name] = it[0]; return `export const ${withCasing(it[0], casing)} = gelRole("${fields.name}", ${ !fields.createDb && !fields.createRole && fields.inherit ? '' : `${ `, { ${fields.createDb ? `createDb: true,` : ''}${fields.createRole ? ` createRole: true,` : ''}${ !fields.inherit ? ` inherit: false ` : '' }`.trimChar(',') }}` } );\n`; }) .join(''); const tableStatements = Object.values(schema.tables).map((table) => { const tableSchema = schemas[table.schema]; const paramName = paramNameFor(table.name, tableSchema); const func = tableSchema ? `${tableSchema}.table` : 'gelTable'; let statement = `export const ${withCasing(paramName, casing)} = ${func}("${table.name}", {\n`; statement += createTableColumns( table.name, Object.values(table.columns), Object.values(table.foreignKeys), // enumTypes, new Set(), schemas, casing, schema.internal, ); statement += '}'; // more than 2 fields or self reference or cyclic // Andrii: I switched this one off until we will get custom names in .references() // const filteredFKs = Object.values(table.foreignKeys).filter((it) => { // return it.columnsFrom.length > 1 || isSelf(it); // }); if ( Object.keys(table.indexes).length > 0 || Object.values(table.foreignKeys).length > 0 || Object.values(table.policies).length > 0 || Object.keys(table.compositePrimaryKeys).length > 0 || Object.keys(table.uniqueConstraints).length > 0 || Object.keys(table.checkConstraints).length > 0 ) { statement += ', '; statement += '(table) => ['; statement += createTableIndexes(table.name, Object.values(table.indexes), casing); statement += createTableFKs(Object.values(table.foreignKeys), schemas, casing); statement += createTablePKs( Object.values(table.compositePrimaryKeys), casing, ); statement += createTableUniques( Object.values(table.uniqueConstraints), casing, ); statement += createTablePolicies( Object.values(table.policies), casing, rolesNameToTsKey, ); statement += createTableChecks( Object.values(table.checkConstraints), casing, ); statement += '\n]'; } statement += ');'; return statement; }); // const viewsStatements = Object.values(schema.views) // .map((it) => { // const viewSchema = schemas[it.schema]; // const paramName = paramNameFor(it.name, viewSchema); // const func = viewSchema // ? (it.materialized ? `${viewSchema}.materializedView` : `${viewSchema}.view`) // : it.materialized // ? 'gelMaterializedView' // : 'gelView'; // const withOption = it.with ?? ''; // const as = `sql\`${it.definition}\``; // const tablespace = it.tablespace ?? ''; // const columns = createTableColumns( // '', // Object.values(it.columns), // [], // enumTypes, // schemas, // casing, // schema.internal, // ); // let statement = `export const ${withCasing(paramName, casing)} = ${func}("${it.name}", {${columns}})`; // statement += tablespace ? `.tablespace("${tablespace}")` : ''; // statement += withOption ? `.with(${JSON.stringify(withOption)})` : ''; // statement += `.as(${as});`; // return statement; // }) // .join('\n\n'); const uniqueGelImports = ['gelTable', ...new Set(imports.gel)]; const importsTs = `import { ${ uniqueGelImports.join( ', ', ) } } from "drizzle-orm/gel-core" import { sql } from "drizzle-orm"\n\n`; let decalrations = schemaStatements; decalrations += rolesStatements; // decalrations += enumStatements; // decalrations += sequencesStatements; decalrations += '\n'; decalrations += tableStatements.join('\n\n'); decalrations += '\n'; // decalrations += viewsStatements; const file = importsTs + decalrations; // for drizzle studio query runner const schemaEntry = ` { ${ Object.values(schema.tables) .map((it) => withCasing(it.name, casing)) .join(',\n') } } `; return { file, imports: importsTs, decalrations, schemaEntry }; }; const isCyclic = (fk: ForeignKey) => { const key = `${fk.tableFrom}-${fk.tableTo}`; const reverse = `${fk.tableTo}-${fk.tableFrom}`; return relations.has(key) && relations.has(reverse); }; const isSelf = (fk: ForeignKey) => { return fk.tableFrom === fk.tableTo; }; const buildArrayDefault = (defaultValue: string, typeName: string): string => { if ( typeof defaultValue === 'string' && !(defaultValue.startsWith('_nullif_array_nulls(ARRAY[') || defaultValue.startsWith('ARRAY[')) ) { return `sql\`${defaultValue}\``; } const regex = /ARRAY\[(.*)\]/; const match = defaultValue.match(regex); if (!match) { return `sql\`${defaultValue}\``; } defaultValue = match[1]; return `sql\`[${defaultValue}]\``; }; const mapDefault = ( tableName: string, type: string, name: string, enumTypes: Set, typeSchema: string, defaultValue?: any, internals?: GelKitInternals, ) => { const isExpression = internals?.tables[tableName]?.columns[name]?.isDefaultAnExpression ?? false; const isArray = internals?.tables[tableName]?.columns[name]?.isArray ?? false; const lowered = type.toLowerCase().replace('[]', ''); if (name === 'id') { return `.default(sql\`uuid_generate_v4()\`)`; } if (isArray) { return typeof defaultValue !== 'undefined' ? `.default(${buildArrayDefault(defaultValue, lowered)})` : ''; } if (enumTypes.has(`${typeSchema}.${type.replace('[]', '')}`)) { return typeof defaultValue !== 'undefined' ? `.default(${mapColumnDefault(unescapeSingleQuotes(defaultValue, true), isExpression)})` : ''; } if (lowered.startsWith('integer')) { return typeof defaultValue !== 'undefined' ? `.default(${mapColumnDefault(defaultValue.replaceAll('(', '').replaceAll(')', ''), isExpression)})` : ''; } if (lowered.startsWith('smallint')) { return typeof defaultValue !== 'undefined' ? `.default(${mapColumnDefault(defaultValue.replaceAll('(', '').replaceAll(')', ''), isExpression)})` : ''; } if (lowered.startsWith('bigint')) { return typeof defaultValue !== 'undefined' ? `.default(${mapColumnDefault(defaultValue.replaceAll('(', '').replaceAll(')', ''), isExpression)})` : ''; } if (lowered.startsWith('edgedbt.bigint_t')) { return typeof defaultValue !== 'undefined' ? `.default(BigInt(${mapColumnDefault(defaultValue.replaceAll('(', '').replaceAll(')', ''), isExpression)}))` : ''; } if (lowered.startsWith('boolean')) { return typeof defaultValue !== 'undefined' ? `.default(${mapColumnDefault(defaultValue, isExpression)})` : ''; } if (lowered.startsWith('double precision')) { return typeof defaultValue !== 'undefined' ? `.default(${mapColumnDefault(defaultValue, isExpression)})` : ''; } if (lowered.startsWith('edgedbt.date_duration_t')) { return typeof defaultValue !== 'undefined' ? `.default(${mapColumnDefault(defaultValue, true)})` : ''; } if (lowered.startsWith('real')) { return typeof defaultValue !== 'undefined' ? `.default(${mapColumnDefault(defaultValue, isExpression)})` : ''; } if (lowered.startsWith('uuid')) { const res = defaultValue === 'gen_random_uuid()' ? '.defaultRandom()' : defaultValue ? `.default(sql\`${defaultValue}\`)` : ''; return res; } if (lowered.startsWith('numeric')) { defaultValue = defaultValue ? (defaultValue.startsWith(`'`) && defaultValue.endsWith(`'`) ? defaultValue.substring(1, defaultValue.length - 1) : defaultValue) : undefined; return defaultValue ? `.default(sql\`${defaultValue}\`)` : ''; } if (lowered.startsWith('edgedbt.timestamptz_t')) { return defaultValue === 'now()' ? '.defaultNow()' : /^'\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}(\.\d+)?([+-]\d{2}(:\d{2})?)?'$/.test(defaultValue) // Matches 'YYYY-MM-DD HH:MI:SS', 'YYYY-MM-DD HH:MI:SS.FFFFFF', 'YYYY-MM-DD HH:MI:SS+TZ', 'YYYY-MM-DD HH:MI:SS.FFFFFF+TZ' and 'YYYY-MM-DD HH:MI:SS+HH:MI' ? `.default(${mapColumnDefault(defaultValue, isExpression)})` : defaultValue ? `.default(sql\`${defaultValue}\`)` : ''; } if (lowered.startsWith('time without time zone')) { return defaultValue === 'now()' ? '.defaultNow()' : /^'\d{2}:\d{2}(:\d{2})?(\.\d+)?'$/.test(defaultValue) // Matches 'HH:MI', 'HH:MI:SS' and 'HH:MI:SS.FFFFFF' ? `.default(${mapColumnDefault(defaultValue, isExpression)})` : defaultValue ? `.default(sql\`${defaultValue}\`)` : ''; } if (lowered.startsWith('edgedbt.duration_t')) { return defaultValue ? `.default(${mapColumnDefault(defaultValue, true)})` : ''; } if (lowered === 'edgedbt.date_t') { return defaultValue === 'now()' ? '.defaultNow()' : /^'\d{4}-\d{2}-\d{2}'$/.test(defaultValue) // Matches 'YYYY-MM-DD' ? `.default(${defaultValue})` : defaultValue ? `.default(sql\`${defaultValue}\`)` : ''; } if (lowered.startsWith('edgedbt.relative_duration_t')) { return defaultValue ? `.default(${mapColumnDefault(defaultValue, true)})` : ''; } if (lowered.startsWith('text')) { return typeof defaultValue !== 'undefined' ? `.default(${mapColumnDefault(unescapeSingleQuotes(defaultValue, true), isExpression)})` : ''; } if (lowered.startsWith('json')) { const def = typeof defaultValue !== 'undefined' ? defaultValue : null; return defaultValue ? `.default(sql\`${def}\`)` : ''; } if (lowered.startsWith('bytea')) { return typeof defaultValue !== 'undefined' ? `.default(${mapColumnDefault(defaultValue, true)})` : ''; } return ''; }; const column = ( tableName: string, type: string, name: string, enumTypes: Set, typeSchema: string, casing: Casing, defaultValue?: any, internals?: GelKitInternals, ) => { const isExpression = internals?.tables[tableName]?.columns[name]?.isDefaultAnExpression ?? false; const lowered = type.toLowerCase().replace('[]', ''); if (enumTypes.has(`${typeSchema}.${type.replace('[]', '')}`)) { let out = `${withCasing(name, casing)}: ${withCasing(paramNameFor(type.replace('[]', ''), typeSchema), casing)}(${ dbColumnName({ name, casing }) })`; return out; } if (lowered.startsWith('integer')) { let out = `${withCasing(name, casing)}: integer(${dbColumnName({ name, casing })})`; return out; } if (lowered.startsWith('smallint')) { let out = `${withCasing(name, casing)}: smallint(${dbColumnName({ name, casing })})`; return out; } if (lowered.startsWith('bigint')) { let out = `${withCasing(name, casing)}: bigint(${dbColumnName({ name, casing })})`; return out; } if (lowered.startsWith('edgedbt.bigint_t')) { let out = `${withCasing(name, casing)}: bigintT(${dbColumnName({ name, casing })})`; return out; } if (lowered.startsWith('boolean')) { let out = `${withCasing(name, casing)}: boolean(${dbColumnName({ name, casing })})`; return out; } if (lowered.startsWith('double precision')) { let out = `${withCasing(name, casing)}: doublePrecision(${dbColumnName({ name, casing })})`; return out; } if (lowered.startsWith('edgedbt.date_duration_t')) { let out = `${withCasing(name, casing)}: dateDuration(${dbColumnName({ name, casing })})`; return out; } if (lowered.startsWith('real')) { let out = `${withCasing(name, casing)}: real(${dbColumnName({ name, casing })})`; return out; } if (lowered.startsWith('uuid')) { let out = `${withCasing(name, casing)}: uuid(${dbColumnName({ name, casing })})`; return out; } if (lowered.startsWith('numeric')) { let out = `${withCasing(name, casing)}: decimal(${dbColumnName({ name, casing })})`; return out; } if (lowered.startsWith('edgedbt.timestamptz_t')) { let out = `${withCasing(name, casing)}: timestamptz(${dbColumnName({ name, casing })})`; return out; } if (lowered.startsWith('edgedbt.timestamp_t')) { let out = `${withCasing(name, casing)}: timestamp(${dbColumnName({ name, casing })})`; return out; } if (lowered.startsWith('edgedbt.date_t')) { let out = `${withCasing(name, casing)}: localDate(${dbColumnName({ name, casing })})`; return out; } if (lowered.startsWith('edgedbt.duration_t')) { let out = `${withCasing(name, casing)}: duration(${dbColumnName({ name, casing })})`; return out; } if (lowered.startsWith('edgedbt.relative_duration_t')) { let out = `${withCasing(name, casing)}: relDuration(${dbColumnName({ name, casing })})`; return out; } if (lowered.startsWith('text')) { let out = `${withCasing(name, casing)}: text(${dbColumnName({ name, casing })})`; return out; } if (lowered.startsWith('jsonb')) { let out = `${withCasing(name, casing)}: json(${dbColumnName({ name, casing })})`; return out; } if (lowered.startsWith('time without time zone')) { let out = `${withCasing(name, casing)}: localTime(${dbColumnName({ name, casing })})`; return out; } if (lowered.startsWith('bytea')) { let out = `${withCasing(name, casing)}: bytes(${dbColumnName({ name, casing })})`; return out; } let unknown = `// TODO: failed to parse database type '${type}'\n`; unknown += `\t${withCasing(name, casing)}: unknown("${name}")`; return unknown; }; const dimensionsInArray = (size?: number): string => { let res = ''; if (typeof size === 'undefined') return res; for (let i = 0; i < size; i++) { res += '.array()'; } return res; }; const createTableColumns = ( tableName: string, columns: Column[], fks: ForeignKey[], enumTypes: Set, schemas: Record, casing: Casing, internals: GelKitInternals, ): string => { let statement = ''; // no self refs and no cyclic const oneColumnsFKs = Object.values(fks) .filter((it) => { return !isSelf(it); }) .filter((it) => it.columnsFrom.length === 1); const fkByColumnName = oneColumnsFKs.reduce((res, it) => { const arr = res[it.columnsFrom[0]] || []; arr.push(it); res[it.columnsFrom[0]] = arr; return res; }, {} as Record); columns.forEach((it) => { const columnStatement = column( tableName, it.type, it.name, enumTypes, it.typeSchema ?? 'public', casing, it.default, internals, ); statement += '\t'; statement += columnStatement; // Provide just this in column function if (internals?.tables[tableName]?.columns[it.name]?.isArray) { statement += dimensionsInArray(internals?.tables[tableName]?.columns[it.name]?.dimensions); } statement += mapDefault(tableName, it.type, it.name, enumTypes, it.typeSchema ?? 'public', it.default, internals); statement += it.primaryKey ? '.primaryKey()' : ''; statement += it.notNull && !it.identity ? '.notNull()' : ''; statement += it.identity ? generateIdentityParams(it.identity) : ''; statement += it.generated ? `.generatedAlwaysAs(sql\`${it.generated.as}\`)` : ''; // const fks = fkByColumnName[it.name]; // Andrii: I switched it off until we will get a custom naem setting in references // if (fks) { // const fksStatement = fks // .map((it) => { // const onDelete = it.onDelete && it.onDelete !== 'no action' ? it.onDelete : null; // const onUpdate = it.onUpdate && it.onUpdate !== 'no action' ? it.onUpdate : null; // const params = { onDelete, onUpdate }; // const typeSuffix = isCyclic(it) ? ': AnyGelColumn' : ''; // const paramsStr = objToStatement2(params); // const tableSchema = schemas[it.schemaTo || '']; // const paramName = paramNameFor(it.tableTo, tableSchema); // if (paramsStr) { // return `.references(()${typeSuffix} => ${ // withCasing( // paramName, // casing, // ) // }.${withCasing(it.columnsTo[0], casing)}, ${paramsStr} )`; // } // return `.references(()${typeSuffix} => ${ // withCasing( // paramName, // casing, // ) // }.${withCasing(it.columnsTo[0], casing)})`; // }) // .join(''); // statement += fksStatement; // } statement += ',\n'; }); return statement; }; const createTableIndexes = (tableName: string, idxs: Index[], casing: Casing): string => { let statement = ''; idxs.forEach((it) => { // we have issue when index is called as table called let idxKey = it.name.startsWith(tableName) && it.name !== tableName ? it.name.slice(tableName.length + 1) : it.name; idxKey = idxKey.endsWith('_index') ? idxKey.slice(0, -'_index'.length) + '_idx' : idxKey; idxKey = withCasing(idxKey, casing); const indexGeneratedName = indexName( tableName, it.columns.map((it) => it.expression), ); const escapedIndexName = indexGeneratedName === it.name ? '' : `"${it.name}"`; statement += `\n\t`; statement += it.isUnique ? 'uniqueIndex(' : 'index('; statement += `${escapedIndexName})`; statement += `${it.concurrently ? `.concurrently()` : ''}`; statement += `.using("${it.method}", ${ it.columns .map((it) => { if (it.isExpression) { return `sql\`${it.expression}\``; } else { return `table.${withCasing(it.expression, casing)}${it.asc ? '.asc()' : '.desc()'}${ it.nulls === 'first' ? '.nullsFirst()' : '.nullsLast()' }${ it.opclass ? `.op("${it.opclass}")` : '' }`; } }) .join(', ') })`; statement += it.where ? `.where(sql\`${it.where}\`)` : ''; function reverseLogic(mappedWith: Record): string { let reversedString = '{'; for (const key in mappedWith) { if (mappedWith.hasOwnProperty(key)) { reversedString += `${key}: "${mappedWith[key]}",`; } } reversedString = reversedString.length > 1 ? reversedString.slice(0, reversedString.length - 1) : reversedString; return `${reversedString}}`; } statement += it.with && Object.keys(it.with).length > 0 ? `.with(${reverseLogic(it.with)})` : ''; statement += `,`; }); return statement; }; const createTablePKs = (pks: PrimaryKey[], casing: Casing): string => { let statement = ''; pks.forEach((it) => { statement += `\n\t`; statement += 'primaryKey({ columns: ['; statement += `${ it.columns .map((c) => { return `table.${withCasing(c, casing)}`; }) .join(', ') }]${it.name ? `, name: "${it.name}"` : ''}}`; statement += ')'; statement += `,`; }); return statement; }; // get a map of db role name to ts key // if to by key is in this map - no quotes, otherwise - quotes const createTablePolicies = ( policies: Policy[], casing: Casing, rolesNameToTsKey: Record = {}, ): string => { let statement = ''; policies.forEach((it) => { const idxKey = withCasing(it.name, casing); const mappedItTo = it.to?.map((v) => { return rolesNameToTsKey[v] ? withCasing(rolesNameToTsKey[v], casing) : `"${v}"`; }); statement += `\n\t`; statement += 'gelPolicy('; statement += `"${it.name}", { `; statement += `as: "${it.as?.toLowerCase()}", for: "${it.for?.toLowerCase()}", to: [${mappedItTo?.join(', ')}]${ it.using ? `, using: sql\`${it.using}\`` : '' }${it.withCheck ? `, withCheck: sql\`${it.withCheck}\` ` : ''}`; statement += ` }),`; }); return statement; }; const createTableUniques = ( unqs: UniqueConstraint[], casing: Casing, ): string => { let statement = ''; unqs.forEach((it) => { statement += `\n\t`; statement += 'unique('; statement += `"${it.name}")`; statement += `.on(${it.columns.map((it) => `table.${withCasing(it, casing)}`).join(', ')})`; statement += it.nullsNotDistinct ? `.nullsNotDistinct()` : ''; statement += `,`; }); return statement; }; const createTableChecks = ( checkConstraints: CheckConstraint[], casing: Casing, ) => { let statement = ''; checkConstraints.forEach((it) => { statement += `\n\t`; statement += 'check('; statement += `"${it.name}", `; statement += `sql\`${it.value}\`)`; statement += `,`; }); return statement; }; const createTableFKs = (fks: ForeignKey[], schemas: Record, casing: Casing): string => { let statement = ''; fks.forEach((it) => { const tableSchema = schemas[it.schemaTo || '']; const paramName = paramNameFor(it.tableTo, tableSchema); const isSelf = it.tableTo === it.tableFrom; const tableTo = isSelf ? 'table' : `${withCasing(paramName, casing)}`; statement += `\n\t`; statement += `foreignKey({\n`; statement += `\t\t\tcolumns: [${it.columnsFrom.map((i) => `table.${withCasing(i, casing)}`).join(', ')}],\n`; statement += `\t\t\tforeignColumns: [${ it.columnsTo.map((i) => `${tableTo}.${withCasing(i, casing)}`).join(', ') }],\n`; statement += `\t\t\tname: "${it.name}"\n`; statement += `\t\t})`; statement += it.onUpdate && it.onUpdate !== 'no action' ? `.onUpdate("${it.onUpdate}")` : ''; statement += it.onDelete && it.onDelete !== 'no action' ? `.onDelete("${it.onDelete}")` : ''; statement += `,`; }); return statement; }; ================================================ FILE: drizzle-kit/src/introspect-mysql.ts ================================================ /* eslint-disable @typescript-eslint/no-unsafe-argument */ import { toCamelCase } from 'drizzle-orm/casing'; import './@types/utils'; import type { Casing } from './cli/validations/common'; import { assertUnreachable } from './global'; import { CheckConstraint, Column, ForeignKey, Index, MySqlSchema, MySqlSchemaInternal, PrimaryKey, UniqueConstraint, } from './serializer/mysqlSchema'; import { unescapeSingleQuotes } from './utils'; const mysqlImportsList = new Set([ 'mysqlTable', 'mysqlEnum', 'bigint', 'binary', 'boolean', 'char', 'date', 'datetime', 'decimal', 'double', 'float', 'int', 'json', 'mediumint', 'real', 'serial', 'smallint', 'text', 'tinytext', 'mediumtext', 'longtext', 'time', 'timestamp', 'tinyint', 'varbinary', 'varchar', 'year', 'enum', ]); const objToStatement = (json: any) => { json = Object.fromEntries(Object.entries(json).filter((it) => it[1])); const keys = Object.keys(json); if (keys.length === 0) return; let statement = '{ '; statement += keys.map((it) => `"${it}": "${json[it]}"`).join(', '); statement += ' }'; return statement; }; const objToStatement2 = (json: any) => { json = Object.fromEntries(Object.entries(json).filter((it) => it[1])); const keys = Object.keys(json); if (keys.length === 0) return; let statement = '{ '; statement += keys.map((it) => `${it}: "${json[it]}"`).join(', '); // no "" for keys statement += ' }'; return statement; }; const timeConfig = (json: any) => { json = Object.fromEntries(Object.entries(json).filter((it) => it[1])); const keys = Object.keys(json); if (keys.length === 0) return; let statement = '{ '; statement += keys.map((it) => `${it}: ${json[it]}`).join(', '); statement += ' }'; return statement; }; const binaryConfig = (json: any) => { json = Object.fromEntries(Object.entries(json).filter((it) => it[1])); const keys = Object.keys(json); if (keys.length === 0) return; let statement = '{ '; statement += keys.map((it) => `${it}: ${json[it]}`).join(', '); statement += ' }'; return statement; }; const importsPatch = { 'double precision': 'doublePrecision', 'timestamp without time zone': 'timestamp', } as Record; const relations = new Set(); const escapeColumnKey = (value: string) => { if (/^(?![a-zA-Z_$][a-zA-Z0-9_$]*$).+$/.test(value)) { return `"${value}"`; } return value; }; const prepareCasing = (casing?: Casing) => (value: string) => { if (casing === 'preserve') { return escapeColumnKey(value); } if (casing === 'camel') { return escapeColumnKey(value.camelCase()); } assertUnreachable(casing); }; const dbColumnName = ({ name, casing, withMode = false }: { name: string; casing: Casing; withMode?: boolean }) => { if (casing === 'preserve') { return ''; } if (casing === 'camel') { return toCamelCase(name) === name ? '' : withMode ? `"${name}", ` : `"${name}"`; } assertUnreachable(casing); }; export const schemaToTypeScript = ( schema: MySqlSchemaInternal, casing: Casing, ) => { const withCasing = prepareCasing(casing); // collectFKs Object.values(schema.tables).forEach((table) => { Object.values(table.foreignKeys).forEach((fk) => { const relation = `${fk.tableFrom}-${fk.tableTo}`; relations.add(relation); }); }); const imports = Object.values(schema.tables).reduce( (res, it) => { const idxImports = Object.values(it.indexes).map((idx) => idx.isUnique ? 'uniqueIndex' : 'index'); const fkImpots = Object.values(it.foreignKeys).map((it) => 'foreignKey'); const pkImports = Object.values(it.compositePrimaryKeys).map( (it) => 'primaryKey', ); const uniqueImports = Object.values(it.uniqueConstraints).map( (it) => 'unique', ); const checkImports = Object.values(it.checkConstraint).map( (it) => 'check', ); res.mysql.push(...idxImports); res.mysql.push(...fkImpots); res.mysql.push(...pkImports); res.mysql.push(...uniqueImports); res.mysql.push(...checkImports); const columnImports = Object.values(it.columns) .map((col) => { let patched = importsPatch[col.type] ?? col.type; patched = patched.startsWith('varchar(') ? 'varchar' : patched; patched = patched.startsWith('char(') ? 'char' : patched; patched = patched.startsWith('binary(') ? 'binary' : patched; patched = patched.startsWith('decimal(') ? 'decimal' : patched; patched = patched.startsWith('smallint(') ? 'smallint' : patched; patched = patched.startsWith('enum(') ? 'mysqlEnum' : patched; patched = patched.startsWith('datetime(') ? 'datetime' : patched; patched = patched.startsWith('varbinary(') ? 'varbinary' : patched; patched = patched.startsWith('int(') ? 'int' : patched; patched = patched.startsWith('double(') ? 'double' : patched; patched = patched.startsWith('float(') ? 'float' : patched; patched = patched.startsWith('int unsigned') ? 'int' : patched; patched = patched.startsWith('tinyint unsigned') ? 'tinyint' : patched; patched = patched.startsWith('smallint unsigned') ? 'smallint' : patched; patched = patched.startsWith('mediumint unsigned') ? 'mediumint' : patched; patched = patched.startsWith('bigint unsigned') ? 'bigint' : patched; return patched; }) .filter((type) => { return mysqlImportsList.has(type); }); res.mysql.push(...columnImports); return res; }, { mysql: [] as string[] }, ); Object.values(schema.views).forEach((it) => { imports.mysql.push('mysqlView'); const columnImports = Object.values(it.columns) .map((col) => { let patched = importsPatch[col.type] ?? col.type; patched = patched.startsWith('varchar(') ? 'varchar' : patched; patched = patched.startsWith('char(') ? 'char' : patched; patched = patched.startsWith('binary(') ? 'binary' : patched; patched = patched.startsWith('decimal(') ? 'decimal' : patched; patched = patched.startsWith('smallint(') ? 'smallint' : patched; patched = patched.startsWith('enum(') ? 'mysqlEnum' : patched; patched = patched.startsWith('datetime(') ? 'datetime' : patched; patched = patched.startsWith('varbinary(') ? 'varbinary' : patched; patched = patched.startsWith('int(') ? 'int' : patched; patched = patched.startsWith('double(') ? 'double' : patched; patched = patched.startsWith('float(') ? 'float' : patched; patched = patched.startsWith('int unsigned') ? 'int' : patched; patched = patched.startsWith('tinyint unsigned') ? 'tinyint' : patched; patched = patched.startsWith('smallint unsigned') ? 'smallint' : patched; patched = patched.startsWith('mediumint unsigned') ? 'mediumint' : patched; patched = patched.startsWith('bigint unsigned') ? 'bigint' : patched; return patched; }) .filter((type) => { return mysqlImportsList.has(type); }); imports.mysql.push(...columnImports); }); const tableStatements = Object.values(schema.tables).map((table) => { const func = 'mysqlTable'; let statement = ''; if (imports.mysql.includes(withCasing(table.name))) { statement = `// Table name is in conflict with ${ withCasing( table.name, ) } import.\n// Please change to any other name, that is not in imports list\n`; } statement += `export const ${withCasing(table.name)} = ${func}("${table.name}", {\n`; statement += createTableColumns( Object.values(table.columns), Object.values(table.foreignKeys), withCasing, casing, table.name, schema, ); statement += '}'; // more than 2 fields or self reference or cyclic const filteredFKs = Object.values(table.foreignKeys).filter((it) => { return it.columnsFrom.length > 1 || isSelf(it); }); if ( Object.keys(table.indexes).length > 0 || filteredFKs.length > 0 || Object.keys(table.compositePrimaryKeys).length > 0 || Object.keys(table.uniqueConstraints).length > 0 || Object.keys(table.checkConstraint).length > 0 ) { statement += ',\n'; statement += '(table) => ['; statement += createTableIndexes( table.name, Object.values(table.indexes), withCasing, ); statement += createTableFKs(Object.values(filteredFKs), withCasing); statement += createTablePKs( Object.values(table.compositePrimaryKeys), withCasing, ); statement += createTableUniques( Object.values(table.uniqueConstraints), withCasing, ); statement += createTableChecks( Object.values(table.checkConstraint), withCasing, ); statement += '\n]'; } statement += ');'; return statement; }); const viewsStatements = Object.values(schema.views).map((view) => { const { columns, name, algorithm, definition, sqlSecurity, withCheckOption } = view; const func = 'mysqlView'; let statement = ''; if (imports.mysql.includes(withCasing(name))) { statement = `// Table name is in conflict with ${ withCasing( view.name, ) } import.\n// Please change to any other name, that is not in imports list\n`; } statement += `export const ${withCasing(name)} = ${func}("${name}", {\n`; statement += createTableColumns( Object.values(columns), [], withCasing, casing, name, schema, ); statement += '})'; statement += algorithm ? `.algorithm("${algorithm}")` : ''; statement += sqlSecurity ? `.sqlSecurity("${sqlSecurity}")` : ''; statement += withCheckOption ? `.withCheckOption("${withCheckOption}")` : ''; statement += `.as(sql\`${definition?.replaceAll('`', '\\`')}\`);`; return statement; }); const uniqueMySqlImports = [ 'mysqlTable', 'mysqlSchema', 'AnyMySqlColumn', ...new Set(imports.mysql), ]; const importsTs = `import { ${ uniqueMySqlImports.join( ', ', ) } } from "drizzle-orm/mysql-core"\nimport { sql } from "drizzle-orm"\n\n`; let decalrations = ''; decalrations += tableStatements.join('\n\n'); decalrations += '\n'; decalrations += viewsStatements.join('\n\n'); const file = importsTs + decalrations; const schemaEntry = ` { ${ Object.values(schema.tables) .map((it) => withCasing(it.name)) .join(',') } } `; return { file, // backward compatible, print to file imports: importsTs, decalrations, schemaEntry, }; }; const isCyclic = (fk: ForeignKey) => { const key = `${fk.tableFrom}-${fk.tableTo}`; const reverse = `${fk.tableTo}-${fk.tableFrom}`; return relations.has(key) && relations.has(reverse); }; const isSelf = (fk: ForeignKey) => { return fk.tableFrom === fk.tableTo; }; const mapColumnDefault = (defaultValue: any, isExpression?: boolean) => { if (isExpression) { return `sql\`${defaultValue}\``; } return defaultValue; }; const mapColumnDefaultForJson = (defaultValue: any) => { if ( typeof defaultValue === 'string' && defaultValue.startsWith("('") && defaultValue.endsWith("')") ) { return defaultValue.substring(2, defaultValue.length - 2); } return defaultValue; }; const column = ( type: string, name: string, casing: (value: string) => string, rawCasing: Casing, defaultValue?: any, autoincrement?: boolean, onUpdate?: boolean, isExpression?: boolean, ) => { let lowered = type; if (!type.startsWith('enum(')) { lowered = type.toLowerCase(); } if (lowered === 'serial') { return `${casing(name)}: serial(${dbColumnName({ name, casing: rawCasing })})`; } if (lowered.startsWith('int')) { const isUnsigned = lowered.startsWith('int unsigned'); const columnName = dbColumnName({ name, casing: rawCasing, withMode: isUnsigned }); let out = `${casing(name)}: int(${columnName}${isUnsigned ? '{ unsigned: true }' : ''})`; out += autoincrement ? `.autoincrement()` : ''; out += typeof defaultValue !== 'undefined' ? `.default(${mapColumnDefault(defaultValue, isExpression)})` : ''; return out; } if (lowered.startsWith('tinyint')) { const isUnsigned = lowered.startsWith('tinyint unsigned'); const columnName = dbColumnName({ name, casing: rawCasing, withMode: isUnsigned }); // let out = `${name.camelCase()}: tinyint("${name}")`; let out: string = `${casing(name)}: tinyint(${columnName}${isUnsigned ? '{ unsigned: true }' : ''})`; out += autoincrement ? `.autoincrement()` : ''; out += typeof defaultValue !== 'undefined' ? `.default(${mapColumnDefault(defaultValue, isExpression)})` : ''; return out; } if (lowered.startsWith('smallint')) { const isUnsigned = lowered.startsWith('smallint unsigned'); const columnName = dbColumnName({ name, casing: rawCasing, withMode: isUnsigned }); let out = `${casing(name)}: smallint(${columnName}${isUnsigned ? '{ unsigned: true }' : ''})`; out += autoincrement ? `.autoincrement()` : ''; out += defaultValue ? `.default(${mapColumnDefault(defaultValue, isExpression)})` : ''; return out; } if (lowered.startsWith('mediumint')) { const isUnsigned = lowered.startsWith('mediumint unsigned'); const columnName = dbColumnName({ name, casing: rawCasing, withMode: isUnsigned }); let out = `${casing(name)}: mediumint(${columnName}${isUnsigned ? '{ unsigned: true }' : ''})`; out += autoincrement ? `.autoincrement()` : ''; out += defaultValue ? `.default(${mapColumnDefault(defaultValue, isExpression)})` : ''; return out; } if (lowered.startsWith('bigint')) { const isUnsigned = lowered.startsWith('bigint unsigned'); let out = `${casing(name)}: bigint(${dbColumnName({ name, casing: rawCasing, withMode: true })}{ mode: "number"${ isUnsigned ? ', unsigned: true' : '' } })`; out += autoincrement ? `.autoincrement()` : ''; out += defaultValue ? `.default(${mapColumnDefault(defaultValue, isExpression)})` : ''; return out; } if (lowered === 'boolean') { let out = `${casing(name)}: boolean(${dbColumnName({ name, casing: rawCasing })})`; out += defaultValue ? `.default(${mapColumnDefault(defaultValue, isExpression)})` : ''; return out; } if (lowered.startsWith('double')) { let params: | { precision?: string; scale?: string; unsigned?: boolean } | undefined; if (lowered.length > (lowered.includes('unsigned') ? 15 : 6)) { const [precision, scale] = lowered .slice(7, lowered.length - (1 + (lowered.includes('unsigned') ? 9 : 0))) .split(','); params = { precision, scale }; } if (lowered.includes('unsigned')) { params = { ...(params ?? {}), unsigned: true }; } const timeConfigParams = params ? timeConfig(params) : undefined; let out = params ? `${casing(name)}: double(${ dbColumnName({ name, casing: rawCasing, withMode: timeConfigParams !== undefined }) }${timeConfig(params)})` : `${casing(name)}: double(${dbColumnName({ name, casing: rawCasing })})`; // let out = `${name.camelCase()}: double("${name}")`; out += defaultValue ? `.default(${mapColumnDefault(defaultValue, isExpression)})` : ''; return out; } if (lowered.startsWith('float')) { let params: | { precision?: string; scale?: string; unsigned?: boolean } | undefined; if (lowered.length > (lowered.includes('unsigned') ? 14 : 5)) { const [precision, scale] = lowered .slice(6, lowered.length - (1 + (lowered.includes('unsigned') ? 9 : 0))) .split(','); params = { precision, scale }; } if (lowered.includes('unsigned')) { params = { ...(params ?? {}), unsigned: true }; } let out = `${casing(name)}: float(${dbColumnName({ name, casing: rawCasing })}${params ? timeConfig(params) : ''})`; out += defaultValue ? `.default(${mapColumnDefault(defaultValue, isExpression)})` : ''; return out; } if (lowered === 'real') { let out = `${casing(name)}: real(${dbColumnName({ name, casing: rawCasing })})`; out += defaultValue ? `.default(${mapColumnDefault(defaultValue, isExpression)})` : ''; return out; } if (lowered.startsWith('timestamp')) { const keyLength = 'timestamp'.length + 1; let fsp = lowered.length > keyLength ? Number(lowered.substring(keyLength, lowered.length - 1)) : null; fsp = fsp ? fsp : null; const params = timeConfig({ fsp, mode: "'string'" }); let out = params ? `${casing(name)}: timestamp(${ dbColumnName({ name, casing: rawCasing, withMode: params !== undefined }) }${params})` : `${casing(name)}: timestamp(${dbColumnName({ name, casing: rawCasing })})`; // mysql has only CURRENT_TIMESTAMP, as I found from docs. But will leave now() for just a case defaultValue = defaultValue === 'now()' || defaultValue === '(CURRENT_TIMESTAMP)' ? '.defaultNow()' : defaultValue ? `.default(${mapColumnDefault(defaultValue, isExpression)})` : ''; out += defaultValue; let onUpdateNow = onUpdate ? '.onUpdateNow()' : ''; out += onUpdateNow; return out; } if (lowered.startsWith('time')) { const keyLength = 'time'.length + 1; let fsp = lowered.length > keyLength ? Number(lowered.substring(keyLength, lowered.length - 1)) : null; fsp = fsp ? fsp : null; const params = timeConfig({ fsp }); let out = params ? `${casing(name)}: time(${dbColumnName({ name, casing: rawCasing, withMode: params !== undefined })}${params})` : `${casing(name)}: time(${dbColumnName({ name, casing: rawCasing })})`; defaultValue = defaultValue === 'now()' ? '.defaultNow()' : defaultValue ? `.default(${mapColumnDefault(defaultValue, isExpression)})` : ''; out += defaultValue; return out; } if (lowered === 'date') { let out = `// you can use { mode: 'date' }, if you want to have Date as type for this column\n\t${ casing( name, ) }: date(${dbColumnName({ name, casing: rawCasing, withMode: true })}{ mode: 'string' })`; defaultValue = defaultValue === 'now()' ? '.defaultNow()' : defaultValue ? `.default(${mapColumnDefault(defaultValue, isExpression)})` : ''; out += defaultValue; return out; } // in mysql text can't have default value. Will leave it in case smth ;) if (lowered === 'text') { let out = `${casing(name)}: text(${dbColumnName({ name, casing: rawCasing })})`; out += defaultValue ? `.default(${mapColumnDefault(defaultValue, isExpression)})` : ''; return out; } // in mysql text can't have default value. Will leave it in case smth ;) if (lowered === 'tinytext') { let out = `${casing(name)}: tinytext(${dbColumnName({ name, casing: rawCasing })})`; out += defaultValue ? `.default(${mapColumnDefault(defaultValue, isExpression)})` : ''; return out; } // in mysql text can't have default value. Will leave it in case smth ;) if (lowered === 'mediumtext') { let out = `${casing(name)}: mediumtext(${dbColumnName({ name, casing: rawCasing })})`; out += defaultValue ? `.default(${mapColumnDefault(defaultValue, isExpression)})` : ''; return out; } // in mysql text can't have default value. Will leave it in case smth ;) if (lowered === 'longtext') { let out = `${casing(name)}: longtext(${dbColumnName({ name, casing: rawCasing })})`; out += defaultValue ? `.default(${mapColumnDefault(defaultValue, isExpression)})` : ''; return out; } if (lowered === 'year') { let out = `${casing(name)}: year(${dbColumnName({ name, casing: rawCasing })})`; out += defaultValue ? `.default(${mapColumnDefault(defaultValue, isExpression)})` : ''; return out; } // in mysql json can't have default value. Will leave it in case smth ;) if (lowered === 'json') { let out = `${casing(name)}: json(${dbColumnName({ name, casing: rawCasing })})`; out += defaultValue ? `.default(${mapColumnDefaultForJson(defaultValue)})` : ''; return out; } if (lowered.startsWith('varchar')) { let out: string = `${ casing( name, ) }: varchar(${dbColumnName({ name, casing: rawCasing, withMode: true })}{ length: ${ lowered.substring( 'varchar'.length + 1, lowered.length - 1, ) } })`; const mappedDefaultValue = mapColumnDefault(defaultValue, isExpression); out += defaultValue ? `.default(${isExpression ? mappedDefaultValue : unescapeSingleQuotes(mappedDefaultValue, true)})` : ''; return out; } if (lowered.startsWith('char')) { let out: string = `${ casing( name, ) }: char(${dbColumnName({ name, casing: rawCasing, withMode: true })}{ length: ${ lowered.substring( 'char'.length + 1, lowered.length - 1, ) } })`; out += defaultValue ? `.default(${mapColumnDefault(defaultValue, isExpression)})` : ''; return out; } if (lowered.startsWith('datetime')) { let out = `// you can use { mode: 'date' }, if you want to have Date as type for this column\n\t`; const fsp = lowered.startsWith('datetime(') ? lowered.substring('datetime'.length + 1, lowered.length - 1) : undefined; out = fsp ? `${ casing( name, ) }: datetime(${dbColumnName({ name, casing: rawCasing, withMode: true })}{ mode: 'string', fsp: ${ lowered.substring( 'datetime'.length + 1, lowered.length - 1, ) } })` : `${casing(name)}: datetime(${dbColumnName({ name, casing: rawCasing, withMode: true })}{ mode: 'string'})`; defaultValue = defaultValue === 'now()' ? '.defaultNow()' : defaultValue ? `.default(${mapColumnDefault(defaultValue, isExpression)})` : ''; out += defaultValue; return out; } if (lowered.startsWith('decimal')) { let params: | { precision?: string; scale?: string; unsigned?: boolean } | undefined; if (lowered.length > (lowered.includes('unsigned') ? 16 : 7)) { const [precision, scale] = lowered .slice(8, lowered.length - (1 + (lowered.includes('unsigned') ? 9 : 0))) .split(','); params = { precision, scale }; } if (lowered.includes('unsigned')) { params = { ...(params ?? {}), unsigned: true }; } const timeConfigParams = params ? timeConfig(params) : undefined; let out = params ? `${casing(name)}: decimal(${ dbColumnName({ name, casing: rawCasing, withMode: timeConfigParams !== undefined }) }${timeConfigParams})` : `${casing(name)}: decimal(${dbColumnName({ name, casing: rawCasing })})`; defaultValue = typeof defaultValue !== 'undefined' ? `.default(${mapColumnDefault(defaultValue, isExpression)})` : ''; out += defaultValue; return out; } if (lowered.startsWith('binary')) { const keyLength = 'binary'.length + 1; let length = lowered.length > keyLength ? Number(lowered.substring(keyLength, lowered.length - 1)) : null; length = length ? length : null; const params = binaryConfig({ length }); let out = params ? `${casing(name)}: binary(${dbColumnName({ name, casing: rawCasing, withMode: params !== undefined })}${params})` : `${casing(name)}: binary(${dbColumnName({ name, casing: rawCasing })})`; defaultValue = defaultValue ? `.default(${mapColumnDefault(defaultValue, isExpression)})` : ''; out += defaultValue; return out; } if (lowered.startsWith('enum')) { const values = lowered .substring('enum'.length + 1, lowered.length - 1) .split(',') .map((v) => unescapeSingleQuotes(v, true)) .join(','); let out = `${casing(name)}: mysqlEnum(${dbColumnName({ name, casing: rawCasing, withMode: true })}[${values}])`; const mappedDefaultValue = mapColumnDefault(defaultValue, isExpression); out += defaultValue ? `.default(${isExpression ? mappedDefaultValue : unescapeSingleQuotes(mappedDefaultValue, true)})` : ''; return out; } if (lowered.startsWith('varbinary')) { const keyLength = 'varbinary'.length + 1; let length = lowered.length > keyLength ? Number(lowered.substring(keyLength, lowered.length - 1)) : null; length = length ? length : null; const params = binaryConfig({ length }); let out = params ? `${casing(name)}: varbinary(${ dbColumnName({ name, casing: rawCasing, withMode: params !== undefined }) }${params})` : `${casing(name)}: varbinary(${dbColumnName({ name, casing: rawCasing })})`; defaultValue = defaultValue ? `.default(${mapColumnDefault(defaultValue, isExpression)})` : ''; out += defaultValue; return out; } console.log('uknown', type); return `// Warning: Can't parse ${type} from database\n\t// ${type}Type: ${type}("${name}")`; }; const createTableColumns = ( columns: Column[], fks: ForeignKey[], casing: (val: string) => string, rawCasing: Casing, tableName: string, schema: MySqlSchemaInternal, ): string => { let statement = ''; // no self refs and no cyclic const oneColumnsFKs = Object.values(fks) .filter((it) => { return !isSelf(it); }) .filter((it) => it.columnsFrom.length === 1); const fkByColumnName = oneColumnsFKs.reduce((res, it) => { const arr = res[it.columnsFrom[0]] || []; arr.push(it); res[it.columnsFrom[0]] = arr; return res; }, {} as Record); columns.forEach((it) => { statement += '\t'; statement += column( it.type, it.name, casing, rawCasing, it.default, it.autoincrement, it.onUpdate, schema.internal?.tables![tableName]?.columns[it.name] ?.isDefaultAnExpression ?? false, ); statement += it.primaryKey ? '.primaryKey()' : ''; statement += it.notNull ? '.notNull()' : ''; statement += it.generated ? `.generatedAlwaysAs(sql\`${ it.generated.as.replace( /`/g, '\\`', ) }\`, { mode: "${it.generated.type}" })` : ''; const fks = fkByColumnName[it.name]; if (fks) { const fksStatement = fks .map((it) => { const onDelete = it.onDelete && it.onDelete !== 'no action' ? it.onDelete : null; const onUpdate = it.onUpdate && it.onUpdate !== 'no action' ? it.onUpdate : null; const params = { onDelete, onUpdate }; const typeSuffix = isCyclic(it) ? ': AnyMySqlColumn' : ''; const paramsStr = objToStatement2(params); if (paramsStr) { return `.references(()${typeSuffix} => ${ casing( it.tableTo, ) }.${casing(it.columnsTo[0])}, ${paramsStr} )`; } return `.references(()${typeSuffix} => ${casing(it.tableTo)}.${ casing( it.columnsTo[0], ) })`; }) .join(''); statement += fksStatement; } statement += ',\n'; }); return statement; }; const createTableIndexes = ( tableName: string, idxs: Index[], casing: (value: string) => string, ): string => { let statement = ''; idxs.forEach((it) => { let idxKey = it.name.startsWith(tableName) && it.name !== tableName ? it.name.slice(tableName.length + 1) : it.name; idxKey = idxKey.endsWith('_index') ? idxKey.slice(0, -'_index'.length) + '_idx' : idxKey; idxKey = casing(idxKey); statement += `\n\t`; statement += it.isUnique ? 'uniqueIndex(' : 'index('; statement += `"${it.name}")`; statement += `.on(${ it.columns .map((it) => `table.${casing(it)}`) .join(', ') }),`; }); return statement; }; const createTableUniques = ( unqs: UniqueConstraint[], casing: (value: string) => string, ): string => { let statement = ''; unqs.forEach((it) => { const idxKey = casing(it.name); statement += `\n\t`; statement += 'unique('; statement += `"${it.name}")`; statement += `.on(${ it.columns .map((it) => `table.${casing(it)}`) .join(', ') }),`; }); return statement; }; const createTableChecks = ( checks: CheckConstraint[], casing: (value: string) => string, ): string => { let statement = ''; checks.forEach((it) => { statement += `\n\t`; statement += 'check('; statement += `"${it.name}", `; statement += `sql\`${it.value.replace(/`/g, '\\`')}\`)`; statement += `,`; }); return statement; }; const createTablePKs = ( pks: PrimaryKey[], casing: (value: string) => string, ): string => { let statement = ''; pks.forEach((it) => { let idxKey = casing(it.name); statement += `\n\t`; statement += 'primaryKey({ columns: ['; statement += `${ it.columns .map((c) => { return `table.${casing(c)}`; }) .join(', ') }]${it.name ? `, name: "${it.name}"` : ''}}`; statement += '),'; }); return statement; }; const createTableFKs = ( fks: ForeignKey[], casing: (value: string) => string, ): string => { let statement = ''; fks.forEach((it) => { const isSelf = it.tableTo === it.tableFrom; const tableTo = isSelf ? 'table' : `${casing(it.tableTo)}`; statement += `\n\t`; statement += `foreignKey({\n`; statement += `\t\t\tcolumns: [${ it.columnsFrom .map((i) => `table.${casing(i)}`) .join(', ') }],\n`; statement += `\t\t\tforeignColumns: [${ it.columnsTo .map((i) => `${tableTo}.${casing(i)}`) .join(', ') }],\n`; statement += `\t\t\tname: "${it.name}"\n`; statement += `\t\t})`; statement += it.onUpdate && it.onUpdate !== 'no action' ? `.onUpdate("${it.onUpdate}")` : ''; statement += it.onDelete && it.onDelete !== 'no action' ? `.onDelete("${it.onDelete}")` : ''; statement += `,`; }); return statement; }; ================================================ FILE: drizzle-kit/src/introspect-pg.ts ================================================ import { getTableName, is } from 'drizzle-orm'; import { AnyPgTable } from 'drizzle-orm/pg-core'; import { createTableRelationsHelpers, extractTablesRelationalConfig, Many, One, Relation, Relations, } from 'drizzle-orm/relations'; import './@types/utils'; import { toCamelCase } from 'drizzle-orm/casing'; import { Casing } from './cli/validations/common'; import { assertUnreachable } from './global'; import { CheckConstraint, Column, ForeignKey, Index, PgKitInternals, PgSchemaInternal, Policy, PrimaryKey, UniqueConstraint, } from './serializer/pgSchema'; import { indexName } from './serializer/pgSerializer'; import { unescapeSingleQuotes } from './utils'; const pgImportsList = new Set([ 'pgTable', 'pgEnum', 'smallint', 'integer', 'bigint', 'boolean', 'text', 'varchar', 'char', 'serial', 'smallserial', 'bigserial', 'decimal', 'numeric', 'real', 'json', 'jsonb', 'time', 'timestamp', 'date', 'interval', 'cidr', 'inet', 'macaddr', 'macaddr8', 'bigint', 'doublePrecision', 'uuid', 'vector', 'point', 'line', 'geometry', ]); const objToStatement2 = (json: { [s: string]: unknown }) => { json = Object.fromEntries(Object.entries(json).filter((it) => it[1])); const keys = Object.keys(json); if (keys.length === 0) return; let statement = '{ '; statement += keys.map((it) => `${it}: "${json[it]}"`).join(', '); // no "" for keys statement += ' }'; return statement; }; const timeConfig = (json: { [s: string]: unknown }) => { json = Object.fromEntries(Object.entries(json).filter((it) => it[1])); const keys = Object.keys(json); if (keys.length === 0) return; let statement = '{ '; statement += keys.map((it) => `${it}: ${json[it]}`).join(', '); statement += ' }'; return statement; }; const possibleIntervals = [ 'year', 'month', 'day', 'hour', 'minute', 'second', 'year to month', 'day to hour', 'day to minute', 'day to second', 'hour to minute', 'hour to second', 'minute to second', ]; const intervalStrToObj = (str: string) => { if (str.startsWith('interval(')) { return { precision: Number(str.substring('interval('.length, str.length - 1)), }; } const splitted = str.split(' '); if (splitted.length === 1) { return {}; } const rest = splitted.slice(1, splitted.length).join(' '); if (possibleIntervals.includes(rest)) { return { fields: `"${rest}"` }; } for (const s of possibleIntervals) { if (rest.startsWith(`${s}(`)) { return { fields: `"${s}"`, precision: Number(rest.substring(s.length + 1, rest.length - 1)), }; } } return {}; }; const intervalConfig = (str: string) => { const json = intervalStrToObj(str); // json = Object.fromEntries(Object.entries(json).filter((it) => it[1])); const keys = Object.keys(json); if (keys.length === 0) return; let statement = '{ '; statement += keys.map((it: keyof typeof json) => `${it}: ${json[it]}`).join(', '); statement += ' }'; return statement; }; const mapColumnDefault = (defaultValue: any, isExpression?: boolean) => { if (isExpression) { return `sql\`${defaultValue}\``; } return defaultValue; }; const importsPatch = { 'double precision': 'doublePrecision', 'timestamp without time zone': 'timestamp', 'timestamp with time zone': 'timestamp', 'time without time zone': 'time', 'time with time zone': 'time', } as Record; const relations = new Set(); const escapeColumnKey = (value: string) => { if (/^(?![a-zA-Z_$][a-zA-Z0-9_$]*$).+$/.test(value)) { return `"${value}"`; } return value; }; const withCasing = (value: string, casing: Casing) => { if (casing === 'preserve') { return escapeColumnKey(value); } if (casing === 'camel') { return escapeColumnKey(value.camelCase()); } assertUnreachable(casing); }; const dbColumnName = ({ name, casing, withMode = false }: { name: string; casing: Casing; withMode?: boolean }) => { if (casing === 'preserve') { return ''; } if (casing === 'camel') { return toCamelCase(name) === name ? '' : withMode ? `"${name}", ` : `"${name}"`; } assertUnreachable(casing); }; export const relationsToTypeScriptForStudio = ( schema: Record>>, relations: Record>>>, ) => { const relationalSchema: Record = { ...Object.fromEntries( Object.entries(schema) .map(([key, val]) => { // have unique keys across schemas const mappedTableEntries = Object.entries(val).map((tableEntry) => { return [`__${key}__.${tableEntry[0]}`, tableEntry[1]]; }); return mappedTableEntries; }) .flat(), ), ...relations, }; const relationsConfig = extractTablesRelationalConfig(relationalSchema, createTableRelationsHelpers); let result = ''; function findColumnKey(table: AnyPgTable, columnName: string) { for (const tableEntry of Object.entries(table)) { const key = tableEntry[0]; const value = tableEntry[1]; if (value.name === columnName) { return key; } } } Object.values(relationsConfig.tables).forEach((table) => { const tableName = table.tsName.split('.')[1]; const relations = table.relations; let hasRelations = false; let relationsObjAsStr = ''; let hasOne = false; let hasMany = false; Object.values(relations).forEach((relation) => { hasRelations = true; if (is(relation, Many)) { hasMany = true; relationsObjAsStr += `\t\t${relation.fieldName}: many(${ relationsConfig.tableNamesMap[relation.referencedTableName].split('.')[1] }${typeof relation.relationName !== 'undefined' ? `, { relationName: "${relation.relationName}"}` : ''}),`; } if (is(relation, One)) { hasOne = true; relationsObjAsStr += `\t\t${relation.fieldName}: one(${ relationsConfig.tableNamesMap[relation.referencedTableName].split('.')[1] }, { fields: [${ relation.config?.fields.map( (c) => `${relationsConfig.tableNamesMap[getTableName(relation.sourceTable)].split('.')[1]}.${ findColumnKey(relation.sourceTable, c.name) }`, ) }], references: [${ relation.config?.references.map( (c) => `${relationsConfig.tableNamesMap[getTableName(relation.referencedTable)].split('.')[1]}.${ findColumnKey(relation.referencedTable, c.name) }`, ) }]${typeof relation.relationName !== 'undefined' ? `, relationName: "${relation.relationName}"` : ''}}),`; } }); if (hasRelations) { result += `export const ${tableName}Relation = relations(${tableName}, ({${hasOne ? 'one' : ''}${ hasOne && hasMany ? ', ' : '' }${hasMany ? 'many' : ''}}) => ({ ${relationsObjAsStr} }));\n`; } }); return result; }; function generateIdentityParams(identity: Column['identity']) { let paramsObj = `{ name: "${identity!.name}"`; if (identity?.startWith) { paramsObj += `, startWith: ${identity.startWith}`; } if (identity?.increment) { paramsObj += `, increment: ${identity.increment}`; } if (identity?.minValue) { paramsObj += `, minValue: ${identity.minValue}`; } if (identity?.maxValue) { paramsObj += `, maxValue: ${identity.maxValue}`; } if (identity?.cache) { paramsObj += `, cache: ${identity.cache}`; } if (identity?.cycle) { paramsObj += `, cycle: true`; } paramsObj += ' }'; if (identity?.type === 'always') { return `.generatedAlwaysAsIdentity(${paramsObj})`; } return `.generatedByDefaultAsIdentity(${paramsObj})`; } export const paramNameFor = (name: string, schema?: string) => { const schemaSuffix = schema && schema !== 'public' ? `In${schema.capitalise()}` : ''; return `${name}${schemaSuffix}`; }; export const schemaToTypeScript = (schema: PgSchemaInternal, casing: Casing) => { // collectFKs Object.values(schema.tables).forEach((table) => { Object.values(table.foreignKeys).forEach((fk) => { const relation = `${fk.tableFrom}-${fk.tableTo}`; relations.add(relation); }); }); const schemas = Object.fromEntries( Object.entries(schema.schemas).map((it) => { return [it[0], withCasing(it[1], casing)]; }), ); const enumTypes = Object.values(schema.enums).reduce((acc, cur) => { acc.add(`${cur.schema}.${cur.name}`); return acc; }, new Set()); const imports = Object.values(schema.tables).reduce( (res, it) => { const idxImports = Object.values(it.indexes).map((idx) => (idx.isUnique ? 'uniqueIndex' : 'index')); const fkImpots = Object.values(it.foreignKeys).map((it) => 'foreignKey'); if (Object.values(it.foreignKeys).some((it) => isCyclic(it) && !isSelf(it))) { res.pg.push('type AnyPgColumn'); } const pkImports = Object.values(it.compositePrimaryKeys).map((it) => 'primaryKey'); const uniqueImports = Object.values(it.uniqueConstraints).map((it) => 'unique'); const checkImports = Object.values(it.checkConstraints).map( (it) => 'check', ); const policiesImports = Object.values(it.policies).map( (it) => 'pgPolicy', ); if (it.schema && it.schema !== 'public' && it.schema !== '') { res.pg.push('pgSchema'); } res.pg.push(...idxImports); res.pg.push(...fkImpots); res.pg.push(...pkImports); res.pg.push(...uniqueImports); res.pg.push(...policiesImports); res.pg.push(...checkImports); const columnImports = Object.values(it.columns) .map((col) => { let patched: string = (importsPatch[col.type] || col.type).replace('[]', ''); patched = patched === 'double precision' ? 'doublePrecision' : patched; patched = patched.startsWith('varchar(') ? 'varchar' : patched; patched = patched.startsWith('char(') ? 'char' : patched; patched = patched.startsWith('numeric(') ? 'numeric' : patched; patched = patched.startsWith('time(') ? 'time' : patched; patched = patched.startsWith('timestamp(') ? 'timestamp' : patched; patched = patched.startsWith('vector(') ? 'vector' : patched; patched = patched.startsWith('geometry(') ? 'geometry' : patched; return patched; }) .filter((type) => { return pgImportsList.has(type); }); res.pg.push(...columnImports); return res; }, { pg: [] as string[] }, ); Object.values(schema.views).forEach((it) => { if (it.schema && it.schema !== 'public' && it.schema !== '') { imports.pg.push('pgSchema'); } else if (it.schema === 'public') { it.materialized ? imports.pg.push('pgMaterializedView') : imports.pg.push('pgView'); } Object.values(it.columns).forEach(() => { const columnImports = Object.values(it.columns) .map((col) => { let patched: string = (importsPatch[col.type] || col.type).replace('[]', ''); patched = patched === 'double precision' ? 'doublePrecision' : patched; patched = patched.startsWith('varchar(') ? 'varchar' : patched; patched = patched.startsWith('char(') ? 'char' : patched; patched = patched.startsWith('numeric(') ? 'numeric' : patched; patched = patched.startsWith('time(') ? 'time' : patched; patched = patched.startsWith('timestamp(') ? 'timestamp' : patched; patched = patched.startsWith('vector(') ? 'vector' : patched; patched = patched.startsWith('geometry(') ? 'geometry' : patched; return patched; }) .filter((type) => { return pgImportsList.has(type); }); imports.pg.push(...columnImports); }); }); Object.values(schema.sequences).forEach((it) => { if (it.schema && it.schema !== 'public' && it.schema !== '') { imports.pg.push('pgSchema'); } else if (it.schema === 'public') { imports.pg.push('pgSequence'); } }); Object.values(schema.enums).forEach((it) => { if (it.schema && it.schema !== 'public' && it.schema !== '') { imports.pg.push('pgSchema'); } else if (it.schema === 'public') { imports.pg.push('pgEnum'); } }); if (Object.keys(schema.roles).length > 0) { imports.pg.push('pgRole'); } const enumStatements = Object.values(schema.enums) .map((it) => { const enumSchema = schemas[it.schema]; // const func = schema || schema === "public" ? "pgTable" : schema; const paramName = paramNameFor(it.name, enumSchema); const func = enumSchema ? `${enumSchema}.enum` : 'pgEnum'; const values = Object.values(it.values) .map((it) => `'${unescapeSingleQuotes(it, false)}'`) .join(', '); return `export const ${withCasing(paramName, casing)} = ${func}("${it.name}", [${values}])\n`; }) .join('') .concat('\n'); const sequencesStatements = Object.values(schema.sequences) .map((it) => { const seqSchema = schemas[it.schema]; const paramName = paramNameFor(it.name, seqSchema); const func = seqSchema ? `${seqSchema}.sequence` : 'pgSequence'; let params = ''; if (it.startWith) { params += `, startWith: "${it.startWith}"`; } if (it.increment) { params += `, increment: "${it.increment}"`; } if (it.minValue) { params += `, minValue: "${it.minValue}"`; } if (it.maxValue) { params += `, maxValue: "${it.maxValue}"`; } if (it.cache) { params += `, cache: "${it.cache}"`; } if (it.cycle) { params += `, cycle: true`; } else { params += `, cycle: false`; } return `export const ${withCasing(paramName, casing)} = ${func}("${it.name}"${ params ? `, { ${params.trimChar(',')} }` : '' })\n`; }) .join('') .concat(''); const schemaStatements = Object.entries(schemas) // .filter((it) => it[0] !== "public") .map((it) => { return `export const ${it[1]} = pgSchema("${it[0]}");\n`; }) .join(''); const rolesNameToTsKey: Record = {}; const rolesStatements = Object.entries(schema.roles) .map((it) => { const fields = it[1]; rolesNameToTsKey[fields.name] = it[0]; return `export const ${withCasing(it[0], casing)} = pgRole("${fields.name}", ${ !fields.createDb && !fields.createRole && fields.inherit ? '' : `${ `, { ${fields.createDb ? `createDb: true,` : ''}${fields.createRole ? ` createRole: true,` : ''}${ !fields.inherit ? ` inherit: false ` : '' }`.trimChar(',') }}` } );\n`; }) .join(''); const tableStatements = Object.values(schema.tables).map((table) => { const tableSchema = schemas[table.schema]; const paramName = paramNameFor(table.name, tableSchema); const func = tableSchema ? `${tableSchema}.table` : 'pgTable'; let statement = `export const ${withCasing(paramName, casing)} = ${func}("${table.name}", {\n`; statement += createTableColumns( table.name, Object.values(table.columns), Object.values(table.foreignKeys), enumTypes, schemas, casing, schema.internal, ); statement += '}'; // more than 2 fields or self reference or cyclic // Andrii: I switched this one off until we will get custom names in .references() // const filteredFKs = Object.values(table.foreignKeys).filter((it) => { // return it.columnsFrom.length > 1 || isSelf(it); // }); if ( Object.keys(table.indexes).length > 0 || Object.values(table.foreignKeys).length > 0 || Object.values(table.policies).length > 0 || Object.keys(table.compositePrimaryKeys).length > 0 || Object.keys(table.uniqueConstraints).length > 0 || Object.keys(table.checkConstraints).length > 0 ) { statement += ', '; statement += '(table) => ['; statement += createTableIndexes(table.name, Object.values(table.indexes), casing); statement += createTableFKs(Object.values(table.foreignKeys), schemas, casing); statement += createTablePKs( Object.values(table.compositePrimaryKeys), casing, ); statement += createTableUniques( Object.values(table.uniqueConstraints), casing, ); statement += createTablePolicies( Object.values(table.policies), casing, rolesNameToTsKey, ); statement += createTableChecks( Object.values(table.checkConstraints), casing, ); statement += '\n]'; } statement += ');'; return statement; }); const viewsStatements = Object.values(schema.views) .map((it) => { const viewSchema = schemas[it.schema]; const paramName = paramNameFor(it.name, viewSchema); const func = viewSchema ? (it.materialized ? `${viewSchema}.materializedView` : `${viewSchema}.view`) : it.materialized ? 'pgMaterializedView' : 'pgView'; const withOption = it.with ?? ''; const as = `sql\`${it.definition}\``; const tablespace = it.tablespace ?? ''; const columns = createTableColumns( '', Object.values(it.columns), [], enumTypes, schemas, casing, schema.internal, ); let statement = `export const ${withCasing(paramName, casing)} = ${func}("${it.name}", {${columns}})`; statement += tablespace ? `.tablespace("${tablespace}")` : ''; statement += withOption ? `.with(${JSON.stringify(withOption)})` : ''; statement += `.as(${as});`; return statement; }) .join('\n\n'); const uniquePgImports = ['pgTable', ...new Set(imports.pg)]; const importsTs = `import { ${ uniquePgImports.join( ', ', ) } } from "drizzle-orm/pg-core" import { sql } from "drizzle-orm"\n\n`; let decalrations = schemaStatements; decalrations += rolesStatements; decalrations += enumStatements; decalrations += sequencesStatements; decalrations += '\n'; decalrations += tableStatements.join('\n\n'); decalrations += '\n'; decalrations += viewsStatements; const file = importsTs + decalrations; // for drizzle studio query runner const schemaEntry = ` { ${ Object.values(schema.tables) .map((it) => withCasing(it.name, casing)) .join(',\n') } } `; return { file, imports: importsTs, decalrations, schemaEntry }; }; const isCyclic = (fk: ForeignKey) => { const key = `${fk.tableFrom}-${fk.tableTo}`; const reverse = `${fk.tableTo}-${fk.tableFrom}`; return relations.has(key) && relations.has(reverse); }; const isSelf = (fk: ForeignKey) => { return fk.tableFrom === fk.tableTo; }; const buildArrayDefault = (defaultValue: string, typeName: string): string => { if (typeof defaultValue === 'string' && !(defaultValue.startsWith('{') || defaultValue.startsWith("'{"))) { return `sql\`${defaultValue}\``; } defaultValue = defaultValue.substring(2, defaultValue.length - 2); return `[${ defaultValue .split(/\s*,\s*/g) .map((value) => { // if (['integer', 'smallint', 'bigint', 'double precision', 'real'].includes(typeName)) { // return value; // } else if (typeName === 'interval') { // return value.replaceAll('"', "'"); // } else if (typeName === 'boolean') { // return value === 't' ? 'true' : 'false'; if (typeName === 'json' || typeName === 'jsonb') { return value.substring(1, value.length - 1).replaceAll('\\', ''); } return value; // } }) .join(', ') }]`; }; const mapDefault = ( tableName: string, type: string, name: string, enumTypes: Set, typeSchema: string, defaultValue?: any, internals?: PgKitInternals, ) => { const isExpression = internals?.tables[tableName]?.columns[name]?.isDefaultAnExpression ?? false; const isArray = internals?.tables[tableName]?.columns[name]?.isArray ?? false; const lowered = type.toLowerCase().replace('[]', ''); if (isArray) { return typeof defaultValue !== 'undefined' ? `.default(${buildArrayDefault(defaultValue, lowered)})` : ''; } if (enumTypes.has(`${typeSchema}.${type.replace('[]', '')}`)) { return typeof defaultValue !== 'undefined' ? `.default(${mapColumnDefault(unescapeSingleQuotes(defaultValue, true), isExpression)})` : ''; } if (lowered.startsWith('integer')) { return typeof defaultValue !== 'undefined' ? `.default(${mapColumnDefault(defaultValue, isExpression)})` : ''; } if (lowered.startsWith('smallint')) { return typeof defaultValue !== 'undefined' ? `.default(${mapColumnDefault(defaultValue, isExpression)})` : ''; } if (lowered.startsWith('bigint')) { return typeof defaultValue !== 'undefined' ? `.default(${mapColumnDefault(defaultValue, isExpression)})` : ''; } if (lowered.startsWith('boolean')) { return typeof defaultValue !== 'undefined' ? `.default(${mapColumnDefault(defaultValue, isExpression)})` : ''; } if (lowered.startsWith('double precision')) { return typeof defaultValue !== 'undefined' ? `.default(${mapColumnDefault(defaultValue, isExpression)})` : ''; } if (lowered.startsWith('real')) { return typeof defaultValue !== 'undefined' ? `.default(${mapColumnDefault(defaultValue, isExpression)})` : ''; } if (lowered.startsWith('uuid')) { return defaultValue === 'gen_random_uuid()' ? '.defaultRandom()' : defaultValue ? `.default(sql\`${defaultValue}\`)` : ''; } if (lowered.startsWith('numeric')) { defaultValue = defaultValue ? (defaultValue.startsWith(`'`) && defaultValue.endsWith(`'`) ? defaultValue.substring(1, defaultValue.length - 1) : defaultValue) : undefined; return defaultValue ? `.default('${mapColumnDefault(defaultValue, isExpression)}')` : ''; } if (lowered.startsWith('timestamp')) { return defaultValue === 'now()' ? '.defaultNow()' : /^'\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}(\.\d+)?([+-]\d{2}(:\d{2})?)?'$/.test(defaultValue) // Matches 'YYYY-MM-DD HH:MI:SS', 'YYYY-MM-DD HH:MI:SS.FFFFFF', 'YYYY-MM-DD HH:MI:SS+TZ', 'YYYY-MM-DD HH:MI:SS.FFFFFF+TZ' and 'YYYY-MM-DD HH:MI:SS+HH:MI' ? `.default(${mapColumnDefault(defaultValue, isExpression)})` : defaultValue ? `.default(sql\`${defaultValue}\`)` : ''; } if (lowered.startsWith('time')) { return defaultValue === 'now()' ? '.defaultNow()' : /^'\d{2}:\d{2}(:\d{2})?(\.\d+)?'$/.test(defaultValue) // Matches 'HH:MI', 'HH:MI:SS' and 'HH:MI:SS.FFFFFF' ? `.default(${mapColumnDefault(defaultValue, isExpression)})` : defaultValue ? `.default(sql\`${defaultValue}\`)` : ''; } if (lowered.startsWith('interval')) { return defaultValue ? `.default(${mapColumnDefault(defaultValue, isExpression)})` : ''; } if (lowered === 'date') { return defaultValue === 'now()' ? '.defaultNow()' : /^'\d{4}-\d{2}-\d{2}'$/.test(defaultValue) // Matches 'YYYY-MM-DD' ? `.default(${defaultValue})` : defaultValue ? `.default(sql\`${defaultValue}\`)` : ''; } if (lowered.startsWith('text')) { return typeof defaultValue !== 'undefined' ? `.default(${mapColumnDefault(unescapeSingleQuotes(defaultValue, true), isExpression)})` : ''; } if (lowered.startsWith('jsonb')) { const def = typeof defaultValue !== 'undefined' ? defaultValue.replace(/::(.*?)(?, typeSchema: string, casing: Casing, defaultValue?: any, internals?: PgKitInternals, ) => { const isExpression = internals?.tables[tableName]?.columns[name]?.isDefaultAnExpression ?? false; const lowered = type.toLowerCase().replace('[]', ''); if (enumTypes.has(`${typeSchema}.${type.replace('[]', '')}`)) { let out = `${withCasing(name, casing)}: ${withCasing(paramNameFor(type.replace('[]', ''), typeSchema), casing)}(${ dbColumnName({ name, casing }) })`; return out; } if (lowered.startsWith('serial')) { return `${withCasing(name, casing)}: serial(${dbColumnName({ name, casing })})`; } if (lowered.startsWith('smallserial')) { return `${withCasing(name, casing)}: smallserial(${dbColumnName({ name, casing })})`; } if (lowered.startsWith('bigserial')) { return `${withCasing(name, casing)}: bigserial(${ dbColumnName({ name, casing, withMode: true }) }{ mode: "bigint" })`; } if (lowered.startsWith('integer')) { let out = `${withCasing(name, casing)}: integer(${dbColumnName({ name, casing })})`; return out; } if (lowered.startsWith('smallint')) { let out = `${withCasing(name, casing)}: smallint(${dbColumnName({ name, casing })})`; return out; } if (lowered.startsWith('bigint')) { let out = `// You can use { mode: "bigint" } if numbers are exceeding js number limitations\n\t`; out += `${withCasing(name, casing)}: bigint(${dbColumnName({ name, casing, withMode: true })}{ mode: "number" })`; return out; } if (lowered.startsWith('boolean')) { let out = `${withCasing(name, casing)}: boolean(${dbColumnName({ name, casing })})`; return out; } if (lowered.startsWith('double precision')) { let out = `${withCasing(name, casing)}: doublePrecision(${dbColumnName({ name, casing })})`; return out; } if (lowered.startsWith('real')) { let out = `${withCasing(name, casing)}: real(${dbColumnName({ name, casing })})`; return out; } if (lowered.startsWith('uuid')) { let out = `${withCasing(name, casing)}: uuid(${dbColumnName({ name, casing })})`; return out; } if (lowered.startsWith('numeric')) { let params: { precision: string | undefined; scale: string | undefined } | undefined; if (lowered.length > 7) { const [precision, scale] = lowered.slice(8, lowered.length - 1).split(','); params = { precision, scale }; } let out = params ? `${withCasing(name, casing)}: numeric(${dbColumnName({ name, casing, withMode: true })}${timeConfig(params)})` : `${withCasing(name, casing)}: numeric(${dbColumnName({ name, casing })})`; return out; } if (lowered.startsWith('timestamp')) { const withTimezone = lowered.includes('with time zone'); // const split = lowered.split(" "); let precision = lowered.startsWith('timestamp(') ? Number(lowered.split(' ')[0].substring('timestamp('.length, lowered.split(' ')[0].length - 1)) : null; precision = precision ? precision : null; const params = timeConfig({ precision, withTimezone, mode: "'string'", }); let out = params ? `${withCasing(name, casing)}: timestamp(${dbColumnName({ name, casing, withMode: true })}${params})` : `${withCasing(name, casing)}: timestamp(${dbColumnName({ name, casing })})`; return out; } if (lowered.startsWith('time')) { const withTimezone = lowered.includes('with time zone'); let precision = lowered.startsWith('time(') ? Number(lowered.split(' ')[0].substring('time('.length, lowered.split(' ')[0].length - 1)) : null; precision = precision ? precision : null; const params = timeConfig({ precision, withTimezone }); let out = params ? `${withCasing(name, casing)}: time(${dbColumnName({ name, casing, withMode: true })}${params})` : `${withCasing(name, casing)}: time(${dbColumnName({ name, casing })})`; return out; } if (lowered.startsWith('interval')) { // const withTimezone = lowered.includes("with time zone"); // const split = lowered.split(" "); // let precision = split.length >= 2 ? Number(split[1].substring(1, 2)) : null; // precision = precision ? precision : null; const params = intervalConfig(lowered); let out = params ? `${withCasing(name, casing)}: interval(${dbColumnName({ name, casing, withMode: true })}${params})` : `${withCasing(name, casing)}: interval(${dbColumnName({ name, casing })})`; return out; } if (lowered === 'date') { let out = `${withCasing(name, casing)}: date(${dbColumnName({ name, casing })})`; return out; } if (lowered.startsWith('text')) { let out = `${withCasing(name, casing)}: text(${dbColumnName({ name, casing })})`; return out; } if (lowered.startsWith('jsonb')) { let out = `${withCasing(name, casing)}: jsonb(${dbColumnName({ name, casing })})`; return out; } if (lowered.startsWith('json')) { let out = `${withCasing(name, casing)}: json(${dbColumnName({ name, casing })})`; return out; } if (lowered.startsWith('inet')) { let out = `${withCasing(name, casing)}: inet(${dbColumnName({ name, casing })})`; return out; } if (lowered.startsWith('cidr')) { let out = `${withCasing(name, casing)}: cidr(${dbColumnName({ name, casing })})`; return out; } if (lowered.startsWith('macaddr8')) { let out = `${withCasing(name, casing)}: macaddr8(${dbColumnName({ name, casing })})`; return out; } if (lowered.startsWith('macaddr')) { let out = `${withCasing(name, casing)}: macaddr(${dbColumnName({ name, casing })})`; return out; } if (lowered.startsWith('varchar')) { let out: string; if (lowered.length !== 7) { out = `${withCasing(name, casing)}: varchar(${dbColumnName({ name, casing, withMode: true })}{ length: ${ lowered.substring(8, lowered.length - 1) } })`; } else { out = `${withCasing(name, casing)}: varchar(${dbColumnName({ name, casing })})`; } return out; } if (lowered.startsWith('point')) { let out: string = `${withCasing(name, casing)}: point(${dbColumnName({ name, casing })})`; return out; } if (lowered.startsWith('line')) { let out: string = `${withCasing(name, casing)}: point(${dbColumnName({ name, casing })})`; return out; } if (lowered.startsWith('geometry')) { let out: string = ''; let isGeoUnknown = false; if (lowered.length !== 8) { const geometryOptions = lowered.slice(9, -1).split(','); if (geometryOptions.length === 1 && geometryOptions[0] !== '') { out = `${withCasing(name, casing)}: geometry(${dbColumnName({ name, casing, withMode: true })}{ type: "${ geometryOptions[0] }" })`; } else if (geometryOptions.length === 2) { out = `${withCasing(name, casing)}: geometry(${dbColumnName({ name, casing, withMode: true })}{ type: "${ geometryOptions[0] }", srid: ${geometryOptions[1]} })`; } else { isGeoUnknown = true; } } else { out = `${withCasing(name, casing)}: geometry(${dbColumnName({ name, casing })})`; } if (isGeoUnknown) { let unknown = `// TODO: failed to parse geometry type because found more than 2 options inside geometry function '${type}'\n// Introspect is currently supporting only type and srid options\n`; unknown += `\t${withCasing(name, casing)}: unknown("${name}")`; return unknown; } return out; } if (lowered.startsWith('vector')) { let out: string; if (lowered.length !== 6) { out = `${withCasing(name, casing)}: vector(${dbColumnName({ name, casing, withMode: true })}{ dimensions: ${ lowered.substring(7, lowered.length - 1) } })`; } else { out = `${withCasing(name, casing)}: vector(${dbColumnName({ name, casing })})`; } return out; } if (lowered.startsWith('char')) { let out: string; if (lowered.length !== 4) { out = `${withCasing(name, casing)}: char(${dbColumnName({ name, casing, withMode: true })}{ length: ${ lowered.substring(5, lowered.length - 1) } })`; } else { out = `${withCasing(name, casing)}: char(${dbColumnName({ name, casing })})`; } return out; } let unknown = `// TODO: failed to parse database type '${type}'\n`; unknown += `\t${withCasing(name, casing)}: unknown("${name}")`; return unknown; }; const dimensionsInArray = (size?: number): string => { let res = ''; if (typeof size === 'undefined') return res; for (let i = 0; i < size; i++) { res += '.array()'; } return res; }; const createTableColumns = ( tableName: string, columns: Column[], fks: ForeignKey[], enumTypes: Set, schemas: Record, casing: Casing, internals: PgKitInternals, ): string => { let statement = ''; // no self refs and no cyclic const oneColumnsFKs = Object.values(fks) .filter((it) => { return !isSelf(it); }) .filter((it) => it.columnsFrom.length === 1); const fkByColumnName = oneColumnsFKs.reduce((res, it) => { const arr = res[it.columnsFrom[0]] || []; arr.push(it); res[it.columnsFrom[0]] = arr; return res; }, {} as Record); columns.forEach((it) => { const columnStatement = column( tableName, it.type, it.name, enumTypes, it.typeSchema ?? 'public', casing, it.default, internals, ); statement += '\t'; statement += columnStatement; // Provide just this in column function if (internals?.tables[tableName]?.columns[it.name]?.isArray) { statement += dimensionsInArray(internals?.tables[tableName]?.columns[it.name]?.dimensions); } statement += mapDefault(tableName, it.type, it.name, enumTypes, it.typeSchema ?? 'public', it.default, internals); statement += it.primaryKey ? '.primaryKey()' : ''; statement += it.notNull && !it.identity ? '.notNull()' : ''; statement += it.identity ? generateIdentityParams(it.identity) : ''; statement += it.generated ? `.generatedAlwaysAs(sql\`${it.generated.as}\`)` : ''; // const fks = fkByColumnName[it.name]; // Andrii: I switched it off until we will get a custom naem setting in references // if (fks) { // const fksStatement = fks // .map((it) => { // const onDelete = it.onDelete && it.onDelete !== 'no action' ? it.onDelete : null; // const onUpdate = it.onUpdate && it.onUpdate !== 'no action' ? it.onUpdate : null; // const params = { onDelete, onUpdate }; // const typeSuffix = isCyclic(it) ? ': AnyPgColumn' : ''; // const paramsStr = objToStatement2(params); // const tableSchema = schemas[it.schemaTo || '']; // const paramName = paramNameFor(it.tableTo, tableSchema); // if (paramsStr) { // return `.references(()${typeSuffix} => ${ // withCasing( // paramName, // casing, // ) // }.${withCasing(it.columnsTo[0], casing)}, ${paramsStr} )`; // } // return `.references(()${typeSuffix} => ${ // withCasing( // paramName, // casing, // ) // }.${withCasing(it.columnsTo[0], casing)})`; // }) // .join(''); // statement += fksStatement; // } statement += ',\n'; }); return statement; }; const createTableIndexes = (tableName: string, idxs: Index[], casing: Casing): string => { let statement = ''; idxs.forEach((it) => { // we have issue when index is called as table called let idxKey = it.name.startsWith(tableName) && it.name !== tableName ? it.name.slice(tableName.length + 1) : it.name; idxKey = idxKey.endsWith('_index') ? idxKey.slice(0, -'_index'.length) + '_idx' : idxKey; idxKey = withCasing(idxKey, casing); const indexGeneratedName = indexName( tableName, it.columns.map((it) => it.expression), ); const escapedIndexName = indexGeneratedName === it.name ? '' : `"${it.name}"`; statement += `\n\t`; statement += it.isUnique ? 'uniqueIndex(' : 'index('; statement += `${escapedIndexName})`; statement += `${it.concurrently ? `.concurrently()` : ''}`; statement += `.using("${it.method}", ${ it.columns .map((it) => { if (it.isExpression) { return `sql\`${it.expression}\``; } else { return `table.${withCasing(it.expression, casing)}${it.asc ? '.asc()' : '.desc()'}${ it.nulls === 'first' ? '.nullsFirst()' : '.nullsLast()' }${ it.opclass ? `.op("${it.opclass}")` : '' }`; } }) .join(', ') })`; statement += it.where ? `.where(sql\`${it.where}\`)` : ''; function reverseLogic(mappedWith: Record): string { let reversedString = '{'; for (const key in mappedWith) { if (mappedWith.hasOwnProperty(key)) { reversedString += `${key}: "${mappedWith[key]}",`; } } reversedString = reversedString.length > 1 ? reversedString.slice(0, reversedString.length - 1) : reversedString; return `${reversedString}}`; } statement += it.with && Object.keys(it.with).length > 0 ? `.with(${reverseLogic(it.with)})` : ''; statement += `,`; }); return statement; }; const createTablePKs = (pks: PrimaryKey[], casing: Casing): string => { let statement = ''; pks.forEach((it) => { statement += `\n\t`; statement += 'primaryKey({ columns: ['; statement += `${ it.columns .map((c) => { return `table.${withCasing(c, casing)}`; }) .join(', ') }]${it.name ? `, name: "${it.name}"` : ''}}`; statement += ')'; statement += `,`; }); return statement; }; // get a map of db role name to ts key // if to by key is in this map - no quotes, otherwise - quotes const createTablePolicies = ( policies: Policy[], casing: Casing, rolesNameToTsKey: Record = {}, ): string => { let statement = ''; policies.forEach((it) => { const idxKey = withCasing(it.name, casing); const mappedItTo = it.to?.map((v) => { return rolesNameToTsKey[v] ? withCasing(rolesNameToTsKey[v], casing) : `"${v}"`; }); statement += `\n\t`; statement += 'pgPolicy('; statement += `"${it.name}", { `; statement += `as: "${it.as?.toLowerCase()}", for: "${it.for?.toLowerCase()}", to: [${mappedItTo?.join(', ')}]${ it.using ? `, using: sql\`${it.using}\`` : '' }${it.withCheck ? `, withCheck: sql\`${it.withCheck}\` ` : ''}`; statement += ` }),`; }); return statement; }; const createTableUniques = ( unqs: UniqueConstraint[], casing: Casing, ): string => { let statement = ''; unqs.forEach((it) => { statement += `\n\t`; statement += 'unique('; statement += `"${it.name}")`; statement += `.on(${it.columns.map((it) => `table.${withCasing(it, casing)}`).join(', ')})`; statement += it.nullsNotDistinct ? `.nullsNotDistinct()` : ''; statement += `,`; }); return statement; }; const createTableChecks = ( checkConstraints: CheckConstraint[], casing: Casing, ) => { let statement = ''; checkConstraints.forEach((it) => { statement += `\n\t`; statement += 'check('; statement += `"${it.name}", `; statement += `sql\`${it.value}\`)`; statement += `,`; }); return statement; }; const createTableFKs = (fks: ForeignKey[], schemas: Record, casing: Casing): string => { let statement = ''; fks.forEach((it) => { const tableSchema = schemas[it.schemaTo || '']; const paramName = paramNameFor(it.tableTo, tableSchema); const isSelf = it.tableTo === it.tableFrom; const tableTo = isSelf ? 'table' : `${withCasing(paramName, casing)}`; statement += `\n\t`; statement += `foreignKey({\n`; statement += `\t\t\tcolumns: [${it.columnsFrom.map((i) => `table.${withCasing(i, casing)}`).join(', ')}],\n`; statement += `\t\t\tforeignColumns: [${ it.columnsTo.map((i) => `${tableTo}.${withCasing(i, casing)}`).join(', ') }],\n`; statement += `\t\t\tname: "${it.name}"\n`; statement += `\t\t})`; statement += it.onUpdate && it.onUpdate !== 'no action' ? `.onUpdate("${it.onUpdate}")` : ''; statement += it.onDelete && it.onDelete !== 'no action' ? `.onDelete("${it.onDelete}")` : ''; statement += `,`; }); return statement; }; ================================================ FILE: drizzle-kit/src/introspect-singlestore.ts ================================================ /* eslint-disable @typescript-eslint/no-unsafe-argument */ import { toCamelCase } from 'drizzle-orm/casing'; import './@types/utils'; import type { Casing } from './cli/validations/common'; import { assertUnreachable } from './global'; import { Column, Index, PrimaryKey, SingleStoreSchema, SingleStoreSchemaInternal, UniqueConstraint, } from './serializer/singlestoreSchema'; import { indexName } from './serializer/singlestoreSerializer'; // time precision to fsp // {mode: "string"} for timestamp by default const singlestoreImportsList = new Set([ 'singlestoreTable', 'singlestoreEnum', 'bigint', 'binary', 'boolean', 'char', 'date', 'datetime', 'decimal', 'double', 'float', 'int', 'json', // TODO: add new type BSON // TODO: add new type Blob // TODO: add new type UUID // TODO: add new type GUID // TODO: add new type Vector // TODO: add new type GeoPoint 'mediumint', 'real', 'serial', 'smallint', 'text', 'tinytext', 'mediumtext', 'longtext', 'time', 'timestamp', 'tinyint', 'varbinary', 'varchar', 'vector', 'year', 'enum', ]); const objToStatement = (json: any) => { json = Object.fromEntries(Object.entries(json).filter((it) => it[1])); const keys = Object.keys(json); if (keys.length === 0) return; let statement = '{ '; statement += keys.map((it) => `"${it}": "${json[it]}"`).join(', '); statement += ' }'; return statement; }; const objToStatement2 = (json: any) => { json = Object.fromEntries(Object.entries(json).filter((it) => it[1])); const keys = Object.keys(json); if (keys.length === 0) return; let statement = '{ '; statement += keys.map((it) => `${it}: "${json[it]}"`).join(', '); // no "" for keys statement += ' }'; return statement; }; const timeConfig = (json: any) => { json = Object.fromEntries(Object.entries(json).filter((it) => it[1])); const keys = Object.keys(json); if (keys.length === 0) return; let statement = '{ '; statement += keys.map((it) => `${it}: ${json[it]}`).join(', '); statement += ' }'; return statement; }; const binaryConfig = (json: any) => { json = Object.fromEntries(Object.entries(json).filter((it) => it[1])); const keys = Object.keys(json); if (keys.length === 0) return; let statement = '{ '; statement += keys.map((it) => `${it}: ${json[it]}`).join(', '); statement += ' }'; return statement; }; const importsPatch = { 'double precision': 'doublePrecision', 'timestamp without time zone': 'timestamp', } as Record; const escapeColumnKey = (value: string) => { if (/^(?![a-zA-Z_$][a-zA-Z0-9_$]*$).+$/.test(value)) { return `"${value}"`; } return value; }; const prepareCasing = (casing?: Casing) => (value: string) => { if (casing === 'preserve') { return escapeColumnKey(value); } if (casing === 'camel') { return escapeColumnKey(value.camelCase()); } assertUnreachable(casing); }; const dbColumnName = ({ name, casing, withMode = false }: { name: string; casing: Casing; withMode?: boolean }) => { if (casing === 'preserve') { return ''; } if (casing === 'camel') { return toCamelCase(name) === name ? '' : withMode ? `"${name}", ` : `"${name}"`; } assertUnreachable(casing); }; export const schemaToTypeScript = ( schema: SingleStoreSchemaInternal, casing: Casing, ) => { const withCasing = prepareCasing(casing); const imports = Object.values(schema.tables).reduce( (res, it) => { const idxImports = Object.values(it.indexes).map((idx) => idx.isUnique ? 'uniqueIndex' : 'index'); const pkImports = Object.values(it.compositePrimaryKeys).map( (it) => 'primaryKey', ); const uniqueImports = Object.values(it.uniqueConstraints).map( (it) => 'unique', ); res.singlestore.push(...idxImports); res.singlestore.push(...pkImports); res.singlestore.push(...uniqueImports); const columnImports = Object.values(it.columns) .map((col) => { let patched = importsPatch[col.type] ?? col.type; patched = patched.startsWith('varchar(') ? 'varchar' : patched; patched = patched.startsWith('char(') ? 'char' : patched; patched = patched.startsWith('binary(') ? 'binary' : patched; patched = patched.startsWith('decimal(') ? 'decimal' : patched; patched = patched.startsWith('smallint(') ? 'smallint' : patched; patched = patched.startsWith('enum(') ? 'singlestoreEnum' : patched; patched = patched.startsWith('datetime(') ? 'datetime' : patched; patched = patched.startsWith('varbinary(') ? 'varbinary' : patched; patched = patched.startsWith('int(') ? 'int' : patched; patched = patched.startsWith('double(') ? 'double' : patched; patched = patched.startsWith('float(') ? 'float' : patched; patched = patched.startsWith('int unsigned') ? 'int' : patched; patched = patched.startsWith('tinyint(') ? 'tinyint' : patched; patched = patched.startsWith('mediumint(') ? 'mediumint' : patched; patched = patched.startsWith('bigint(') ? 'bigint' : patched; patched = patched.startsWith('tinyint unsigned') ? 'tinyint' : patched; patched = patched.startsWith('smallint unsigned') ? 'smallint' : patched; patched = patched.startsWith('mediumint unsigned') ? 'mediumint' : patched; patched = patched.startsWith('bigint unsigned') ? 'bigint' : patched; return patched; }) .filter((type) => { return singlestoreImportsList.has(type); }); res.singlestore.push(...columnImports); return res; }, { singlestore: [] as string[] }, ); /* Object.values(schema.views).forEach((it) => { imports.singlestore.push('singlestoreView'); const columnImports = Object.values(it.columns) .map((col) => { let patched = importsPatch[col.type] ?? col.type; patched = patched.startsWith('varchar(') ? 'varchar' : patched; patched = patched.startsWith('char(') ? 'char' : patched; patched = patched.startsWith('binary(') ? 'binary' : patched; patched = patched.startsWith('decimal(') ? 'decimal' : patched; patched = patched.startsWith('smallint(') ? 'smallint' : patched; patched = patched.startsWith('enum(') ? 'singlestoreEnum' : patched; patched = patched.startsWith('datetime(') ? 'datetime' : patched; patched = patched.startsWith('varbinary(') ? 'varbinary' : patched; patched = patched.startsWith('int(') ? 'int' : patched; patched = patched.startsWith('double(') ? 'double' : patched; patched = patched.startsWith('float(') ? 'float' : patched; patched = patched.startsWith('int unsigned') ? 'int' : patched; patched = patched.startsWith('tinyint(') ? 'tinyint' : patched; patched = patched.startsWith('mediumint(') ? 'mediumint' : patched; patched = patched.startsWith('bigint(') ? 'bigint' : patched; patched = patched.startsWith('tinyint unsigned') ? 'tinyint' : patched; patched = patched.startsWith('smallint unsigned') ? 'smallint' : patched; patched = patched.startsWith('mediumint unsigned') ? 'mediumint' : patched; patched = patched.startsWith('bigint unsigned') ? 'bigint' : patched; return patched; }) .filter((type) => { return singlestoreImportsList.has(type); }); imports.singlestore.push(...columnImports); }); */ const tableStatements = Object.values(schema.tables).map((table) => { const func = 'singlestoreTable'; let statement = ''; if (imports.singlestore.includes(withCasing(table.name))) { statement = `// Table name is in conflict with ${ withCasing( table.name, ) } import.\n// Please change to any other name, that is not in imports list\n`; } statement += `export const ${withCasing(table.name)} = ${func}("${table.name}", {\n`; statement += createTableColumns( Object.values(table.columns), withCasing, casing, table.name, schema, ); statement += '}'; if ( Object.keys(table.indexes).length > 0 || Object.keys(table.compositePrimaryKeys).length > 0 || Object.keys(table.uniqueConstraints).length > 0 ) { statement += ',\n'; statement += '(table) => ['; statement += createTableIndexes( table.name, Object.values(table.indexes), withCasing, ); statement += createTablePKs( Object.values(table.compositePrimaryKeys), withCasing, ); statement += createTableUniques( Object.values(table.uniqueConstraints), withCasing, ); statement += '\n]'; } statement += ');'; return statement; }); /* const viewsStatements = Object.values(schema.views).map((view) => { const { columns, name, algorithm, definition, sqlSecurity, withCheckOption } = view; const func = 'singlestoreView'; let statement = ''; if (imports.singlestore.includes(withCasing(name))) { statement = `// Table name is in conflict with ${ withCasing( view.name, ) } import.\n// Please change to any other name, that is not in imports list\n`; } statement += `export const ${withCasing(name)} = ${func}("${name}", {\n`; statement += createTableColumns( Object.values(columns), withCasing, casing, name, schema, ); statement += '})'; statement += algorithm ? `.algorithm("${algorithm}")` : ''; statement += sqlSecurity ? `.sqlSecurity("${sqlSecurity}")` : ''; statement += withCheckOption ? `.withCheckOption("${withCheckOption}")` : ''; statement += `.as(sql\`${definition?.replaceAll('`', '\\`')}\`);`; return statement; }); */ const uniqueSingleStoreImports = [ 'singlestoreTable', 'singlestoreSchema', 'AnySingleStoreColumn', ...new Set(imports.singlestore), ]; const importsTs = `import { ${ uniqueSingleStoreImports.join( ', ', ) } } from "drizzle-orm/singlestore-core"\nimport { sql } from "drizzle-orm"\n\n`; let decalrations = ''; decalrations += tableStatements.join('\n\n'); decalrations += '\n'; /* decalrations += viewsStatements.join('\n\n'); */ const file = importsTs + decalrations; const schemaEntry = ` { ${ Object.values(schema.tables) .map((it) => withCasing(it.name)) .join(',') } } `; return { file, // backward compatible, print to file imports: importsTs, decalrations, schemaEntry, }; }; const mapColumnDefault = (defaultValue: any, isExpression?: boolean) => { if (isExpression) { return `sql\`${defaultValue}\``; } return defaultValue; }; const mapColumnDefaultForJson = (defaultValue: any) => { if ( typeof defaultValue === 'string' && defaultValue.startsWith("('") && defaultValue.endsWith("')") ) { return defaultValue.substring(2, defaultValue.length - 2); } return defaultValue; }; const column = ( type: string, name: string, casing: (value: string) => string, rawCasing: Casing, defaultValue?: any, autoincrement?: boolean, onUpdate?: boolean, isExpression?: boolean, ) => { let lowered = type; if (!type.startsWith('enum(')) { lowered = type.toLowerCase(); } if (lowered === 'serial') { return `${casing(name)}: serial(${dbColumnName({ name, casing: rawCasing })})`; } if (lowered.startsWith('int')) { const isUnsigned = lowered.includes('unsigned'); const columnName = dbColumnName({ name, casing: rawCasing, withMode: isUnsigned }); let out = `${casing(name)}: int(${columnName}${isUnsigned ? '{ unsigned: true }' : ''})`; out += autoincrement ? `.autoincrement()` : ''; out += typeof defaultValue !== 'undefined' ? `.default(${mapColumnDefault(defaultValue, isExpression)})` : ''; return out; } if (lowered.startsWith('tinyint')) { const isUnsigned = lowered.includes('unsigned'); const columnName = dbColumnName({ name, casing: rawCasing, withMode: isUnsigned }); let out: string = `${casing(name)}: tinyint(${columnName}${isUnsigned ? '{ unsigned: true }' : ''})`; out += autoincrement ? `.autoincrement()` : ''; out += typeof defaultValue !== 'undefined' ? `.default(${mapColumnDefault(defaultValue, isExpression)})` : ''; return out; } if (lowered.startsWith('smallint')) { const isUnsigned = lowered.includes('unsigned'); const columnName = dbColumnName({ name, casing: rawCasing, withMode: isUnsigned }); let out = `${casing(name)}: smallint(${columnName}${isUnsigned ? '{ unsigned: true }' : ''})`; out += autoincrement ? `.autoincrement()` : ''; out += defaultValue ? `.default(${mapColumnDefault(defaultValue, isExpression)})` : ''; return out; } if (lowered.startsWith('mediumint')) { const isUnsigned = lowered.includes('unsigned'); const columnName = dbColumnName({ name, casing: rawCasing, withMode: isUnsigned }); let out = `${casing(name)}: mediumint(${columnName}${isUnsigned ? '{ unsigned: true }' : ''})`; out += autoincrement ? `.autoincrement()` : ''; out += defaultValue ? `.default(${mapColumnDefault(defaultValue, isExpression)})` : ''; return out; } if (lowered.startsWith('bigint')) { const isUnsigned = lowered.includes('unsigned'); let out = `${casing(name)}: bigint(${dbColumnName({ name, casing: rawCasing, withMode: true })}{ mode: "number"${ isUnsigned ? ', unsigned: true' : '' } })`; out += autoincrement ? `.autoincrement()` : ''; out += defaultValue ? `.default(${mapColumnDefault(defaultValue, isExpression)})` : ''; return out; } if (lowered === 'boolean') { let out = `${casing(name)}: boolean(${dbColumnName({ name, casing: rawCasing })})`; out += defaultValue ? `.default(${mapColumnDefault(defaultValue, isExpression)})` : ''; return out; } if (lowered.startsWith('double')) { let params: | { precision?: string; scale?: string; unsigned?: boolean } | undefined; if (lowered.length > (lowered.includes('unsigned') ? 15 : 6)) { const [precision, scale] = lowered .slice(7, lowered.length - (1 + (lowered.includes('unsigned') ? 9 : 0))) .split(','); params = { precision, scale }; } if (lowered.includes('unsigned')) { params = { ...(params ?? {}), unsigned: true }; } const timeConfigParams = params ? timeConfig(params) : undefined; let out = params ? `${casing(name)}: double(${ dbColumnName({ name, casing: rawCasing, withMode: timeConfigParams !== undefined }) }${timeConfig(params)})` : `${casing(name)}: double(${dbColumnName({ name, casing: rawCasing })})`; // let out = `${name.camelCase()}: double("${name}")`; out += defaultValue ? `.default(${mapColumnDefault(defaultValue, isExpression)})` : ''; return out; } if (lowered.startsWith('float')) { let params: | { precision?: string; scale?: string; unsigned?: boolean } | undefined; if (lowered.length > (lowered.includes('unsigned') ? 14 : 5)) { const [precision, scale] = lowered .slice(6, lowered.length - (1 + (lowered.includes('unsigned') ? 9 : 0))) .split(','); params = { precision, scale }; } if (lowered.includes('unsigned')) { params = { ...(params ?? {}), unsigned: true }; } let out = `${casing(name)}: float(${dbColumnName({ name, casing: rawCasing })}${params ? timeConfig(params) : ''})`; out += defaultValue ? `.default(${mapColumnDefault(defaultValue, isExpression)})` : ''; return out; } if (lowered === 'real') { let out = `${casing(name)}: real(${dbColumnName({ name, casing: rawCasing })})`; out += defaultValue ? `.default(${mapColumnDefault(defaultValue, isExpression)})` : ''; return out; } if (lowered.startsWith('timestamp')) { const keyLength = 'timestamp'.length + 1; let fsp = lowered.length > keyLength ? Number(lowered.substring(keyLength, lowered.length - 1)) : null; fsp = fsp ? fsp : null; const params = timeConfig({ fsp, mode: "'string'" }); let out = params ? `${casing(name)}: timestamp(${ dbColumnName({ name, casing: rawCasing, withMode: params !== undefined }) }${params})` : `${casing(name)}: timestamp(${dbColumnName({ name, casing: rawCasing })})`; // singlestore has only CURRENT_TIMESTAMP, as I found from docs. But will leave now() for just a case defaultValue = defaultValue === 'now()' || defaultValue === 'CURRENT_TIMESTAMP' ? '.defaultNow()' : defaultValue ? `.default(${mapColumnDefault(defaultValue, isExpression)})` : ''; out += defaultValue; let onUpdateNow = onUpdate ? '.onUpdateNow()' : ''; out += onUpdateNow; return out; } if (lowered.startsWith('time')) { const keyLength = 'time'.length + 1; let fsp = lowered.length > keyLength ? Number(lowered.substring(keyLength, lowered.length - 1)) : null; fsp = fsp ? fsp : null; const params = timeConfig({ fsp }); let out = params ? `${casing(name)}: time(${dbColumnName({ name, casing: rawCasing, withMode: params !== undefined })}${params})` : `${casing(name)}: time(${dbColumnName({ name, casing: rawCasing })})`; defaultValue = defaultValue === 'now()' ? '.defaultNow()' : defaultValue ? `.default(${mapColumnDefault(defaultValue, isExpression)})` : ''; out += defaultValue; return out; } if (lowered === 'date') { let out = `// you can use { mode: 'date' }, if you want to have Date as type for this column\n\t${ casing( name, ) }: date(${dbColumnName({ name, casing: rawCasing, withMode: true })}{ mode: 'string' })`; defaultValue = defaultValue === 'now()' ? '.defaultNow()' : defaultValue ? `.default(${mapColumnDefault(defaultValue, isExpression)})` : ''; out += defaultValue; return out; } // in singlestore text can't have default value. Will leave it in case smth ;) if (lowered === 'text') { let out = `${casing(name)}: text(${dbColumnName({ name, casing: rawCasing })})`; out += defaultValue ? `.default(${mapColumnDefault(defaultValue, isExpression)})` : ''; return out; } // in singlestore text can't have default value. Will leave it in case smth ;) if (lowered === 'tinytext') { let out = `${casing(name)}: tinytext(${dbColumnName({ name, casing: rawCasing })})`; out += defaultValue ? `.default(${mapColumnDefault(defaultValue, isExpression)})` : ''; return out; } // in singlestore text can't have default value. Will leave it in case smth ;) if (lowered === 'mediumtext') { let out = `${casing(name)}: mediumtext(${dbColumnName({ name, casing: rawCasing })})`; out += defaultValue ? `.default(${mapColumnDefault(defaultValue, isExpression)})` : ''; return out; } // in singlestore text can't have default value. Will leave it in case smth ;) if (lowered === 'longtext') { let out = `${casing(name)}: longtext(${dbColumnName({ name, casing: rawCasing })})`; out += defaultValue ? `.default(${mapColumnDefault(defaultValue, isExpression)})` : ''; return out; } if (lowered === 'year') { let out = `${casing(name)}: year(${dbColumnName({ name, casing: rawCasing })})`; out += defaultValue ? `.default(${mapColumnDefault(defaultValue, isExpression)})` : ''; return out; } // in singlestore json can't have default value. Will leave it in case smth ;) if (lowered === 'json') { let out = `${casing(name)}: json(${dbColumnName({ name, casing: rawCasing })})`; out += defaultValue ? `.default(${mapColumnDefaultForJson(defaultValue)})` : ''; return out; } if (lowered.startsWith('varchar')) { let out: string = `${ casing( name, ) }: varchar(${dbColumnName({ name, casing: rawCasing, withMode: true })}{ length: ${ lowered.substring( 'varchar'.length + 1, lowered.length - 1, ) } })`; out += defaultValue ? `.default(${mapColumnDefault(defaultValue, isExpression)})` : ''; return out; } if (lowered.startsWith('char')) { let out: string = `${ casing( name, ) }: char(${dbColumnName({ name, casing: rawCasing, withMode: true })}{ length: ${ lowered.substring( 'char'.length + 1, lowered.length - 1, ) } })`; out += defaultValue ? `.default(${mapColumnDefault(defaultValue, isExpression)})` : ''; return out; } if (lowered.startsWith('datetime')) { let out = `// you can use { mode: 'date' }, if you want to have Date as type for this column\n\t`; const fsp = lowered.startsWith('datetime(') ? lowered.substring('datetime'.length + 1, lowered.length - 1) : undefined; out = fsp ? `${ casing( name, ) }: datetime(${dbColumnName({ name, casing: rawCasing, withMode: true })}{ mode: 'string', fsp: ${ lowered.substring( 'datetime'.length + 1, lowered.length - 1, ) } })` : `${casing(name)}: datetime(${dbColumnName({ name, casing: rawCasing, withMode: true })}{ mode: 'string'})`; defaultValue = defaultValue === 'now()' ? '.defaultNow()' : defaultValue ? `.default(${mapColumnDefault(defaultValue, isExpression)})` : ''; out += defaultValue; return out; } if (lowered.startsWith('decimal')) { let params: | { precision?: string; scale?: string; unsigned?: boolean } | undefined; if (lowered.length > (lowered.includes('unsigned') ? 16 : 7)) { const [precision, scale] = lowered .slice(8, lowered.length - (1 + (lowered.includes('unsigned') ? 9 : 0))) .split(','); params = { precision, scale }; } if (lowered.includes('unsigned')) { params = { ...(params ?? {}), unsigned: true }; } const timeConfigParams = params ? timeConfig(params) : undefined; let out = params ? `${casing(name)}: decimal(${ dbColumnName({ name, casing: rawCasing, withMode: timeConfigParams !== undefined }) }${timeConfigParams})` : `${casing(name)}: decimal(${dbColumnName({ name, casing: rawCasing })})`; defaultValue = typeof defaultValue !== 'undefined' ? `.default(${mapColumnDefault(defaultValue, isExpression)})` : ''; out += defaultValue; return out; } if (lowered.startsWith('binary')) { const keyLength = 'binary'.length + 1; let length = lowered.length > keyLength ? Number(lowered.substring(keyLength, lowered.length - 1)) : null; length = length ? length : null; const params = binaryConfig({ length }); let out = params ? `${casing(name)}: binary(${dbColumnName({ name, casing: rawCasing, withMode: params !== undefined })}${params})` : `${casing(name)}: binary(${dbColumnName({ name, casing: rawCasing })})`; defaultValue = defaultValue ? `.default(${mapColumnDefault(defaultValue, isExpression)})` : ''; out += defaultValue; return out; } if (lowered.startsWith('enum')) { const values = lowered.substring('enum'.length + 1, lowered.length - 1); let out = `${casing(name)}: singlestoreEnum(${ dbColumnName({ name, casing: rawCasing, withMode: true }) }[${values}])`; out += defaultValue ? `.default(${mapColumnDefault(defaultValue, isExpression)})` : ''; return out; } if (lowered.startsWith('varbinary')) { const keyLength = 'varbinary'.length + 1; let length = lowered.length > keyLength ? Number(lowered.substring(keyLength, lowered.length - 1)) : null; length = length ? length : null; const params = binaryConfig({ length }); let out = params ? `${casing(name)}: varbinary(${ dbColumnName({ name, casing: rawCasing, withMode: params !== undefined }) }${params})` : `${casing(name)}: varbinary(${dbColumnName({ name, casing: rawCasing })})`; defaultValue = defaultValue ? `.default(${mapColumnDefault(defaultValue, isExpression)})` : ''; out += defaultValue; return out; } if (lowered.startsWith('vector')) { const [dimensions, elementType] = lowered.substring('vector'.length + 1, lowered.length - 1).split(','); let out = `${casing(name)}: vector(${ dbColumnName({ name, casing: rawCasing, withMode: true }) }{ dimensions: ${dimensions}, elementType: ${elementType} })`; out += defaultValue ? `.default(${mapColumnDefault(defaultValue, isExpression)})` : ''; return out; } console.log('uknown', type); return `// Warning: Can't parse ${type} from database\n\t// ${type}Type: ${type}("${name}")`; }; const createTableColumns = ( columns: Column[], casing: (val: string) => string, rawCasing: Casing, tableName: string, schema: SingleStoreSchemaInternal, ): string => { let statement = ''; columns.forEach((it) => { statement += '\t'; statement += column( it.type, it.name, casing, rawCasing, it.default, it.autoincrement, it.onUpdate, schema.internal?.tables![tableName]?.columns[it.name] ?.isDefaultAnExpression ?? false, ); statement += it.primaryKey ? '.primaryKey()' : ''; statement += it.notNull ? '.notNull()' : ''; statement += it.generated ? `.generatedAlwaysAs(sql\`${ it.generated.as.replace( /`/g, '\\`', ) }\`, { mode: "${it.generated.type}" })` : ''; statement += ',\n'; }); return statement; }; const createTableIndexes = ( tableName: string, idxs: Index[], casing: (value: string) => string, ): string => { let statement = ''; idxs.forEach((it) => { let idxKey = it.name.startsWith(tableName) && it.name !== tableName ? it.name.slice(tableName.length + 1) : it.name; idxKey = idxKey.endsWith('_index') ? idxKey.slice(0, -'_index'.length) + '_idx' : idxKey; idxKey = casing(idxKey); const indexGeneratedName = indexName(tableName, it.columns); const escapedIndexName = indexGeneratedName === it.name ? '' : `"${it.name}"`; statement += `\n\t`; statement += it.isUnique ? 'uniqueIndex(' : 'index('; statement += `${escapedIndexName})`; statement += `.on(${ it.columns .map((it) => `table.${casing(it)}`) .join(', ') }),`; }); return statement; }; const createTableUniques = ( unqs: UniqueConstraint[], casing: (value: string) => string, ): string => { let statement = ''; unqs.forEach((it) => { statement += `\n\t`; statement += 'unique('; statement += `"${it.name}")`; statement += `.on(${ it.columns .map((it) => `table.${casing(it)}`) .join(', ') }),`; }); return statement; }; const createTablePKs = ( pks: PrimaryKey[], casing: (value: string) => string, ): string => { let statement = ''; pks.forEach((it) => { let idxKey = casing(it.name); statement += `\n\t`; statement += 'primaryKey({ columns: ['; statement += `${ it.columns .map((c) => { return `table.${casing(c)}`; }) .join(', ') }]${it.name ? `, name: "${it.name}"` : ''}}`; statement += '),'; }); return statement; }; ================================================ FILE: drizzle-kit/src/introspect-sqlite.ts ================================================ /* eslint-disable @typescript-eslint/no-unsafe-argument */ import { toCamelCase } from 'drizzle-orm/casing'; import './@types/utils'; import type { Casing } from './cli/validations/common'; import { assertUnreachable } from './global'; import { CheckConstraint } from './serializer/mysqlSchema'; import type { Column, ForeignKey, Index, PrimaryKey, SQLiteSchema, SQLiteSchemaInternal, UniqueConstraint, } from './serializer/sqliteSchema'; const sqliteImportsList = new Set([ 'sqliteTable', 'integer', 'real', 'text', 'numeric', 'blob', ]); export const indexName = (tableName: string, columns: string[]) => { return `${tableName}_${columns.join('_')}_index`; }; const objToStatement2 = (json: any) => { json = Object.fromEntries(Object.entries(json).filter((it) => it[1])); const keys = Object.keys(json); if (keys.length === 0) return; let statement = '{ '; statement += keys.map((it) => `${it}: "${json[it]}"`).join(', '); // no "" for keys statement += ' }'; return statement; }; const relations = new Set(); const escapeColumnKey = (value: string) => { if (/^(?![a-zA-Z_$][a-zA-Z0-9_$]*$).+$/.test(value)) { return `"${value}"`; } return value; }; const withCasing = (value: string, casing?: Casing) => { if (casing === 'preserve') { return escapeColumnKey(value); } if (casing === 'camel') { return escapeColumnKey(value.camelCase()); } return value; }; const dbColumnName = ({ name, casing, withMode = false }: { name: string; casing: Casing; withMode?: boolean }) => { if (casing === 'preserve') { return ''; } if (casing === 'camel') { return toCamelCase(name) === name ? '' : withMode ? `"${name}", ` : `"${name}"`; } assertUnreachable(casing); }; export const schemaToTypeScript = ( schema: SQLiteSchemaInternal, casing: Casing, ) => { // collectFKs Object.values(schema.tables).forEach((table) => { Object.values(table.foreignKeys).forEach((fk) => { const relation = `${fk.tableFrom}-${fk.tableTo}`; relations.add(relation); }); }); const imports = Object.values(schema.tables).reduce( (res, it) => { const idxImports = Object.values(it.indexes).map((idx) => idx.isUnique ? 'uniqueIndex' : 'index'); const fkImpots = Object.values(it.foreignKeys).map((it) => 'foreignKey'); const pkImports = Object.values(it.compositePrimaryKeys).map( (it) => 'primaryKey', ); const uniqueImports = Object.values(it.uniqueConstraints).map( (it) => 'unique', ); const checkImports = Object.values(it.checkConstraints).map( (it) => 'check', ); res.sqlite.push(...idxImports); res.sqlite.push(...fkImpots); res.sqlite.push(...pkImports); res.sqlite.push(...uniqueImports); res.sqlite.push(...checkImports); const columnImports = Object.values(it.columns) .map((col) => { return col.type; }) .filter((type) => { return sqliteImportsList.has(type); }); res.sqlite.push(...columnImports); return res; }, { sqlite: [] as string[] }, ); Object.values(schema.views).forEach((it) => { imports.sqlite.push('sqliteView'); const columnImports = Object.values(it.columns) .map((col) => { return col.type; }) .filter((type) => { return sqliteImportsList.has(type); }); imports.sqlite.push(...columnImports); }); const tableStatements = Object.values(schema.tables).map((table) => { const func = 'sqliteTable'; let statement = ''; if (imports.sqlite.includes(withCasing(table.name, casing))) { statement = `// Table name is in conflict with ${ withCasing( table.name, casing, ) } import.\n// Please change to any other name, that is not in imports list\n`; } statement += `export const ${withCasing(table.name, casing)} = ${func}("${table.name}", {\n`; statement += createTableColumns( Object.values(table.columns), Object.values(table.foreignKeys), casing, ); statement += '}'; // more than 2 fields or self reference or cyclic const filteredFKs = Object.values(table.foreignKeys).filter((it) => { return it.columnsFrom.length > 1 || isSelf(it); }); if ( Object.keys(table.indexes).length > 0 || filteredFKs.length > 0 || Object.keys(table.compositePrimaryKeys).length > 0 || Object.keys(table.uniqueConstraints).length > 0 || Object.keys(table.checkConstraints).length > 0 ) { statement += ',\n'; statement += '(table) => ['; statement += createTableIndexes( table.name, Object.values(table.indexes), casing, ); statement += createTableFKs(Object.values(filteredFKs), casing); statement += createTablePKs( Object.values(table.compositePrimaryKeys), casing, ); statement += createTableUniques( Object.values(table.uniqueConstraints), casing, ); statement += createTableChecks( Object.values(table.checkConstraints), casing, ); statement += '\n]'; } statement += ');'; return statement; }); const viewsStatements = Object.values(schema.views).map((view) => { const func = 'sqliteView'; let statement = ''; if (imports.sqlite.includes(withCasing(view.name, casing))) { statement = `// Table name is in conflict with ${ withCasing( view.name, casing, ) } import.\n// Please change to any other name, that is not in imports list\n`; } statement += `export const ${withCasing(view.name, casing)} = ${func}("${view.name}", {\n`; statement += createTableColumns( Object.values(view.columns), [], casing, ); statement += '})'; statement += `.as(sql\`${view.definition?.replaceAll('`', '\\`')}\`);`; return statement; }); const uniqueSqliteImports = [ 'sqliteTable', 'AnySQLiteColumn', ...new Set(imports.sqlite), ]; const importsTs = `import { ${ uniqueSqliteImports.join( ', ', ) } } from "drizzle-orm/sqlite-core" import { sql } from "drizzle-orm"\n\n`; let decalrations = tableStatements.join('\n\n'); decalrations += '\n\n'; decalrations += viewsStatements.join('\n\n'); const file = importsTs + decalrations; // for drizzle studio query runner const schemaEntry = ` { ${ Object.values(schema.tables) .map((it) => withCasing(it.name, casing)) .join(',') } } `; return { file, imports: importsTs, decalrations, schemaEntry }; }; const isCyclic = (fk: ForeignKey) => { const key = `${fk.tableFrom}-${fk.tableTo}`; const reverse = `${fk.tableTo}-${fk.tableFrom}`; return relations.has(key) && relations.has(reverse); }; const isSelf = (fk: ForeignKey) => { return fk.tableFrom === fk.tableTo; }; const mapColumnDefault = (defaultValue: any) => { if ( typeof defaultValue === 'string' && defaultValue.startsWith('(') && defaultValue.endsWith(')') ) { return `sql\`${defaultValue}\``; } // If default value is NULL as string it will come back from db as "'NULL'" and not just "NULL" if (defaultValue === 'NULL') { return `sql\`NULL\``; } if ( typeof defaultValue === 'string' ) { return defaultValue.substring(1, defaultValue.length - 1).replaceAll('"', '\\"').replaceAll("''", "'"); } return defaultValue; }; const column = ( type: string, name: string, defaultValue?: any, autoincrement?: boolean, casing?: Casing, ) => { let lowered = type; casing = casing!; if (lowered === 'integer') { let out = `${withCasing(name, casing)}: integer(${dbColumnName({ name, casing })})`; // out += autoincrement ? `.autoincrement()` : ""; out += typeof defaultValue !== 'undefined' ? `.default(${mapColumnDefault(defaultValue)})` : ''; return out; } if (lowered === 'real') { let out = `${withCasing(name, casing)}: real(${dbColumnName({ name, casing })})`; out += defaultValue ? `.default(${mapColumnDefault(defaultValue)})` : ''; return out; } if (lowered.startsWith('text')) { const match = lowered.match(/\d+/); let out: string; if (match) { out = `${withCasing(name, casing)}: text(${dbColumnName({ name, casing, withMode: true })}{ length: ${ match[0] } })`; } else { out = `${withCasing(name, casing)}: text(${dbColumnName({ name, casing })})`; } out += defaultValue ? `.default("${mapColumnDefault(defaultValue)}")` : ''; return out; } if (lowered === 'blob') { let out = `${withCasing(name, casing)}: blob(${dbColumnName({ name, casing })})`; out += defaultValue ? `.default(${mapColumnDefault(defaultValue)})` : ''; return out; } if (lowered === 'numeric') { let out = `${withCasing(name, casing)}: numeric(${dbColumnName({ name, casing })})`; out += defaultValue ? `.default(${mapColumnDefault(defaultValue)})` : ''; return out; } // console.log("uknown", type); return `// Warning: Can't parse ${type} from database\n\t// ${type}Type: ${type}("${name}")`; }; const createTableColumns = ( columns: Column[], fks: ForeignKey[], casing: Casing, ): string => { let statement = ''; // no self refs and no cyclic const oneColumnsFKs = Object.values(fks) .filter((it) => { return !isSelf(it); }) .filter((it) => it.columnsFrom.length === 1); const fkByColumnName = oneColumnsFKs.reduce((res, it) => { const arr = res[it.columnsFrom[0]] || []; arr.push(it); res[it.columnsFrom[0]] = arr; return res; }, {} as Record); columns.forEach((it) => { statement += '\t'; statement += column(it.type, it.name, it.default, it.autoincrement, casing); statement += it.primaryKey ? `.primaryKey(${it.autoincrement ? '{ autoIncrement: true }' : ''})` : ''; statement += it.notNull ? '.notNull()' : ''; statement += it.generated ? `.generatedAlwaysAs(sql\`${ it.generated.as .replace(/`/g, '\\`') .slice(1, -1) }\`, { mode: "${it.generated.type}" })` : ''; const fks = fkByColumnName[it.name]; if (fks) { const fksStatement = fks .map((it) => { const onDelete = it.onDelete && it.onDelete !== 'no action' ? it.onDelete : null; const onUpdate = it.onUpdate && it.onUpdate !== 'no action' ? it.onUpdate : null; const params = { onDelete, onUpdate }; const typeSuffix = isCyclic(it) ? ': AnySQLiteColumn' : ''; const paramsStr = objToStatement2(params); if (paramsStr) { return `.references(()${typeSuffix} => ${ withCasing( it.tableTo, casing, ) }.${withCasing(it.columnsTo[0], casing)}, ${paramsStr} )`; } return `.references(()${typeSuffix} => ${ withCasing( it.tableTo, casing, ) }.${withCasing(it.columnsTo[0], casing)})`; }) .join(''); statement += fksStatement; } statement += ',\n'; }); return statement; }; const createTableIndexes = ( tableName: string, idxs: Index[], casing: Casing, ): string => { let statement = ''; idxs.forEach((it) => { let idxKey = it.name.startsWith(tableName) && it.name !== tableName ? it.name.slice(tableName.length + 1) : it.name; idxKey = idxKey.endsWith('_index') ? idxKey.slice(0, -'_index'.length) + '_idx' : idxKey; idxKey = withCasing(idxKey, casing); const indexGeneratedName = indexName(tableName, it.columns); const escapedIndexName = indexGeneratedName === it.name ? '' : `"${it.name}"`; statement += `\n\t`; statement += it.isUnique ? 'uniqueIndex(' : 'index('; statement += `${escapedIndexName})`; statement += `.on(${ it.columns .map((it) => `table.${withCasing(it, casing)}`) .join(', ') }),`; }); return statement; }; const createTableUniques = ( unqs: UniqueConstraint[], casing: Casing, ): string => { let statement = ''; unqs.forEach((it) => { const idxKey = withCasing(it.name, casing); statement += `\n\t`; statement += 'unique('; statement += `"${it.name}")`; statement += `.on(${ it.columns .map((it) => `table.${withCasing(it, casing)}`) .join(', ') }),`; }); return statement; }; const createTableChecks = ( checks: CheckConstraint[], casing: Casing, ): string => { let statement = ''; checks.forEach((it) => { statement += `\n\t`; statement += 'check('; statement += `"${it.name}", `; statement += `sql\`${it.value}\`)`; statement += `,`; }); return statement; }; const createTablePKs = (pks: PrimaryKey[], casing: Casing): string => { let statement = ''; pks.forEach((it, i) => { statement += `\n\t`; statement += 'primaryKey({ columns: ['; statement += `${ it.columns .map((c) => { return `table.${withCasing(c, casing)}`; }) .join(', ') }]${it.name ? `, name: "${it.name}"` : ''}}`; statement += ')'; }); return statement; }; const createTableFKs = (fks: ForeignKey[], casing: Casing): string => { let statement = ''; fks.forEach((it) => { const isSelf = it.tableTo === it.tableFrom; const tableTo = isSelf ? 'table' : `${withCasing(it.tableTo, casing)}`; statement += `\n\t`; statement += `foreignKey(() => ({\n`; statement += `\t\t\tcolumns: [${ it.columnsFrom .map((i) => `table.${withCasing(i, casing)}`) .join(', ') }],\n`; statement += `\t\t\tforeignColumns: [${ it.columnsTo .map((i) => `${tableTo}.${withCasing(i, casing)}`) .join(', ') }],\n`; statement += `\t\t\tname: "${it.name}"\n`; statement += `\t\t}))`; statement += it.onUpdate && it.onUpdate !== 'no action' ? `.onUpdate("${it.onUpdate}")` : ''; statement += it.onDelete && it.onDelete !== 'no action' ? `.onDelete("${it.onDelete}")` : ''; statement += `,`; }); return statement; }; ================================================ FILE: drizzle-kit/src/jsonDiffer.js ================================================ 'use-strict'; import { diff } from 'json-diff'; export function diffForRenamedTables(pairs) { // raname table1 to name of table2, so we can apply diffs const renamed = pairs.map((it) => { const from = it.from; const to = it.to; const newFrom = { ...from, name: to.name }; return [newFrom, to]; }); // find any alternations made to a renamed table const altered = renamed.map((pair) => { return diffForRenamedTable(pair[0], pair[1]); }); return altered; } function diffForRenamedTable(t1, t2) { t1.name = t2.name; const diffed = diff(t1, t2) || {}; diffed.name = t2.name; return findAlternationsInTable(diffed, t2.schema); } export function diffForRenamedColumn(t1, t2) { const renamed = { ...t1, name: t2.name }; const diffed = diff(renamed, t2) || {}; diffed.name = t2.name; return alternationsInColumn(diffed); } const update1to2 = (json) => { Object.entries(json).forEach(([key, val]) => { if ('object' !== typeof val) return; if (val.hasOwnProperty('references')) { const ref = val['references']; const fkName = ref['foreignKeyName']; const table = ref['table']; const column = ref['column']; const onDelete = ref['onDelete']; const onUpdate = ref['onUpdate']; const newRef = `${fkName};${table};${column};${onDelete ?? ''};${onUpdate ?? ''}`; val['references'] = newRef; } else { update1to2(val); } }); }; const mapArraysDiff = (source, diff) => { const sequence = []; let sourceIndex = 0; for (let i = 0; i < diff.length; i++) { const it = diff[i]; if (it.length === 1) { sequence.push({ type: 'same', value: source[sourceIndex] }); sourceIndex += 1; } else { if (it[0] === '-') { sequence.push({ type: 'removed', value: it[1] }); } else { sequence.push({ type: 'added', value: it[1], before: '' }); } } } const result = sequence.reverse().reduce( (acc, it) => { if (it.type === 'same') { acc.prev = it.value; } if (it.type === 'added' && acc.prev) { it.before = acc.prev; } acc.result.push(it); return acc; }, { result: [] }, ); return result.result.reverse(); }; export function diffSchemasOrTables(left, right) { left = JSON.parse(JSON.stringify(left)); right = JSON.parse(JSON.stringify(right)); const result = Object.entries(diff(left, right) ?? {}); const added = result .filter((it) => it[0].endsWith('__added')) .map((it) => it[1]); const deleted = result .filter((it) => it[0].endsWith('__deleted')) .map((it) => it[1]); return { added, deleted }; } export function diffIndPolicies(left, right) { left = JSON.parse(JSON.stringify(left)); right = JSON.parse(JSON.stringify(right)); const result = Object.entries(diff(left, right) ?? {}); const added = result .filter((it) => it[0].endsWith('__added')) .map((it) => it[1]); const deleted = result .filter((it) => it[0].endsWith('__deleted')) .map((it) => it[1]); return { added, deleted }; } export function diffColumns(left, right) { left = JSON.parse(JSON.stringify(left)); right = JSON.parse(JSON.stringify(right)); const result = diff(left, right) ?? {}; const alteredTables = Object.fromEntries( Object.entries(result) .filter((it) => { return !(it[0].includes('__added') || it[0].includes('__deleted')); }) .map((tableEntry) => { // const entry = { name: it, ...result[it] } const deletedColumns = Object.entries(tableEntry[1].columns ?? {}) .filter((it) => { return it[0].endsWith('__deleted'); }) .map((it) => { return it[1]; }); const addedColumns = Object.entries(tableEntry[1].columns ?? {}) .filter((it) => { return it[0].endsWith('__added'); }) .map((it) => { return it[1]; }); tableEntry[1].columns = { added: addedColumns, deleted: deletedColumns, }; const table = left[tableEntry[0]]; return [ tableEntry[0], { name: table.name, schema: table.schema, ...tableEntry[1] }, ]; }), ); return alteredTables; } export function diffPolicies(left, right) { left = JSON.parse(JSON.stringify(left)); right = JSON.parse(JSON.stringify(right)); const result = diff(left, right) ?? {}; const alteredTables = Object.fromEntries( Object.entries(result) .filter((it) => { return !(it[0].includes('__added') || it[0].includes('__deleted')); }) .map((tableEntry) => { // const entry = { name: it, ...result[it] } const deletedPolicies = Object.entries(tableEntry[1].policies ?? {}) .filter((it) => { return it[0].endsWith('__deleted'); }) .map((it) => { return it[1]; }); const addedPolicies = Object.entries(tableEntry[1].policies ?? {}) .filter((it) => { return it[0].endsWith('__added'); }) .map((it) => { return it[1]; }); tableEntry[1].policies = { added: addedPolicies, deleted: deletedPolicies, }; const table = left[tableEntry[0]]; return [ tableEntry[0], { name: table.name, schema: table.schema, ...tableEntry[1] }, ]; }), ); return alteredTables; } export function applyJsonDiff(json1, json2) { json1 = JSON.parse(JSON.stringify(json1)); json2 = JSON.parse(JSON.stringify(json2)); // deep copy, needed because of the bug in diff library const rawDiff = diff(json1, json2); const difference = JSON.parse(JSON.stringify(rawDiff || {})); difference.schemas = difference.schemas || {}; difference.tables = difference.tables || {}; difference.enums = difference.enums || {}; difference.sequences = difference.sequences || {}; difference.roles = difference.roles || {}; difference.policies = difference.policies || {}; difference.views = difference.views || {}; // remove added/deleted schemas const schemaKeys = Object.keys(difference.schemas); for (let key of schemaKeys) { if (key.endsWith('__added') || key.endsWith('__deleted')) { delete difference.schemas[key]; continue; } } // remove added/deleted tables const tableKeys = Object.keys(difference.tables); for (let key of tableKeys) { if (key.endsWith('__added') || key.endsWith('__deleted')) { delete difference.tables[key]; continue; } // supply table name and schema for altered tables const table = json1.tables[key]; difference.tables[key] = { name: table.name, schema: table.schema, ...difference.tables[key], }; } for (let [tableKey, tableValue] of Object.entries(difference.tables)) { const table = difference.tables[tableKey]; const columns = tableValue.columns || {}; const columnKeys = Object.keys(columns); for (let key of columnKeys) { if (key.endsWith('__added') || key.endsWith('__deleted')) { delete table.columns[key]; continue; } } if (Object.keys(columns).length === 0) { delete table['columns']; } if ( 'name' in table && 'schema' in table && Object.keys(table).length === 2 ) { delete difference.tables[tableKey]; } } const enumsEntries = Object.entries(difference.enums); const alteredEnums = enumsEntries .filter((it) => !(it[0].includes('__added') || it[0].includes('__deleted'))) .map((it) => { const enumEntry = json1.enums[it[0]]; const { name, schema, values } = enumEntry; const sequence = mapArraysDiff(values, it[1].values); const addedValues = sequence .filter((it) => it.type === 'added') .map((it) => { return { before: it.before, value: it.value, }; }); const deletedValues = sequence .filter((it) => it.type === 'removed') .map((it) => it.value); return { name, schema, addedValues, deletedValues }; }); const sequencesEntries = Object.entries(difference.sequences); const alteredSequences = sequencesEntries .filter((it) => !(it[0].includes('__added') || it[0].includes('__deleted')) && 'values' in it[1]) .map((it) => { return json2.sequences[it[0]]; }); const rolesEntries = Object.entries(difference.roles); const alteredRoles = rolesEntries .filter((it) => !(it[0].includes('__added') || it[0].includes('__deleted'))) .map((it) => { return json2.roles[it[0]]; }); const policiesEntries = Object.entries(difference.policies); const alteredPolicies = policiesEntries .filter((it) => !(it[0].includes('__added') || it[0].includes('__deleted'))) .map((it) => { return json2.policies[it[0]]; }); const viewsEntries = Object.entries(difference.views); const alteredViews = viewsEntries.filter((it) => !(it[0].includes('__added') || it[0].includes('__deleted'))).map( ([nameWithSchema, view]) => { const deletedWithOption = view.with__deleted; const addedWithOption = view.with__added; const deletedWith = Object.fromEntries( Object.entries(view.with || {}).filter((it) => it[0].endsWith('__deleted')).map(([key, value]) => { return [key.replace('__deleted', ''), value]; }), ); const addedWith = Object.fromEntries( Object.entries(view.with || {}).filter((it) => it[0].endsWith('__added')).map(([key, value]) => { return [key.replace('__added', ''), value]; }), ); const alterWith = Object.fromEntries( Object.entries(view.with || {}).filter((it) => typeof it[1].__old !== 'undefined' && typeof it[1].__new !== 'undefined' ).map( (it) => { return [it[0], it[1].__new]; }, ), ); const alteredSchema = view.schema; const alteredDefinition = view.definition; const alteredExisting = view.isExisting; const addedTablespace = view.tablespace__added; const droppedTablespace = view.tablespace__deleted; const alterTablespaceTo = view.tablespace; let alteredTablespace; if (addedTablespace) alteredTablespace = { __new: addedTablespace, __old: 'pg_default' }; if (droppedTablespace) alteredTablespace = { __new: 'pg_default', __old: droppedTablespace }; if (alterTablespaceTo) alteredTablespace = alterTablespaceTo; const addedUsing = view.using__added; const droppedUsing = view.using__deleted; const alterUsingTo = view.using; let alteredUsing; if (addedUsing) alteredUsing = { __new: addedUsing, __old: 'heap' }; if (droppedUsing) alteredUsing = { __new: 'heap', __old: droppedUsing }; if (alterUsingTo) alteredUsing = alterUsingTo; const alteredMeta = view.meta; return Object.fromEntries( Object.entries({ name: json2.views[nameWithSchema].name, schema: json2.views[nameWithSchema].schema, // pg deletedWithOption: deletedWithOption, addedWithOption: addedWithOption, deletedWith: Object.keys(deletedWith).length ? deletedWith : undefined, addedWith: Object.keys(addedWith).length ? addedWith : undefined, alteredWith: Object.keys(alterWith).length ? alterWith : undefined, alteredSchema, alteredTablespace, alteredUsing, // mysql alteredMeta, // common alteredDefinition, alteredExisting, }).filter(([_, value]) => value !== undefined), ); }, ); const alteredTablesWithColumns = Object.values(difference.tables).map( (table) => { return findAlternationsInTable(table); }, ); return { alteredTablesWithColumns, alteredEnums, alteredSequences, alteredRoles, alteredViews, alteredPolicies, }; } const findAlternationsInTable = (table) => { // map each table to have altered, deleted or renamed columns // in case no columns were altered, but indexes were const columns = table.columns ?? {}; const altered = Object.keys(columns) .filter((it) => !(it.includes('__deleted') || it.includes('__added'))) .map((it) => { return { name: it, ...columns[it] }; }); const deletedIndexes = Object.fromEntries( Object.entries(table.indexes__deleted || {}) .concat( Object.entries(table.indexes || {}).filter((it) => it[0].includes('__deleted')), ) .map((entry) => [entry[0].replace('__deleted', ''), entry[1]]), ); const addedIndexes = Object.fromEntries( Object.entries(table.indexes__added || {}) .concat( Object.entries(table.indexes || {}).filter((it) => it[0].includes('__added')), ) .map((entry) => [entry[0].replace('__added', ''), entry[1]]), ); const alteredIndexes = Object.fromEntries( Object.entries(table.indexes || {}).filter((it) => { return !it[0].endsWith('__deleted') && !it[0].endsWith('__added'); }), ); const deletedPolicies = Object.fromEntries( Object.entries(table.policies__deleted || {}) .concat( Object.entries(table.policies || {}).filter((it) => it[0].includes('__deleted')), ) .map((entry) => [entry[0].replace('__deleted', ''), entry[1]]), ); const addedPolicies = Object.fromEntries( Object.entries(table.policies__added || {}) .concat( Object.entries(table.policies || {}).filter((it) => it[0].includes('__added')), ) .map((entry) => [entry[0].replace('__added', ''), entry[1]]), ); const alteredPolicies = Object.fromEntries( Object.entries(table.policies || {}).filter((it) => { return !it[0].endsWith('__deleted') && !it[0].endsWith('__added'); }), ); const deletedForeignKeys = Object.fromEntries( Object.entries(table.foreignKeys__deleted || {}) .concat( Object.entries(table.foreignKeys || {}).filter((it) => it[0].includes('__deleted')), ) .map((entry) => [entry[0].replace('__deleted', ''), entry[1]]), ); const addedForeignKeys = Object.fromEntries( Object.entries(table.foreignKeys__added || {}) .concat( Object.entries(table.foreignKeys || {}).filter((it) => it[0].includes('__added')), ) .map((entry) => [entry[0].replace('__added', ''), entry[1]]), ); const alteredForeignKeys = Object.fromEntries( Object.entries(table.foreignKeys || {}) .filter( (it) => !it[0].endsWith('__added') && !it[0].endsWith('__deleted'), ) .map((entry) => [entry[0], entry[1]]), ); const addedCompositePKs = Object.fromEntries( Object.entries(table.compositePrimaryKeys || {}).filter((it) => { return it[0].endsWith('__added'); }), ); const deletedCompositePKs = Object.fromEntries( Object.entries(table.compositePrimaryKeys || {}).filter((it) => { return it[0].endsWith('__deleted'); }), ); const alteredCompositePKs = Object.fromEntries( Object.entries(table.compositePrimaryKeys || {}).filter((it) => { return !it[0].endsWith('__deleted') && !it[0].endsWith('__added'); }), ); const addedUniqueConstraints = Object.fromEntries( Object.entries(table.uniqueConstraints || {}).filter((it) => { return it[0].endsWith('__added'); }), ); const deletedUniqueConstraints = Object.fromEntries( Object.entries(table.uniqueConstraints || {}).filter((it) => { return it[0].endsWith('__deleted'); }), ); const alteredUniqueConstraints = Object.fromEntries( Object.entries(table.uniqueConstraints || {}).filter((it) => { return !it[0].endsWith('__deleted') && !it[0].endsWith('__added'); }), ); const addedCheckConstraints = Object.fromEntries( Object.entries(table.checkConstraints || {}).filter((it) => { return it[0].endsWith('__added'); }), ); const deletedCheckConstraints = Object.fromEntries( Object.entries(table.checkConstraints || {}).filter((it) => { return it[0].endsWith('__deleted'); }), ); const alteredCheckConstraints = Object.fromEntries( Object.entries(table.checkConstraints || {}).filter((it) => { return !it[0].endsWith('__deleted') && !it[0].endsWith('__added'); }), ); const mappedAltered = altered.map((it) => alternationsInColumn(it)).filter(Boolean); return { name: table.name, schema: table.schema || '', altered: mappedAltered, addedIndexes, deletedIndexes, alteredIndexes, addedForeignKeys, deletedForeignKeys, alteredForeignKeys, addedCompositePKs, deletedCompositePKs, alteredCompositePKs, addedUniqueConstraints, deletedUniqueConstraints, alteredUniqueConstraints, deletedPolicies, addedPolicies, alteredPolicies, addedCheckConstraints, deletedCheckConstraints, alteredCheckConstraints, }; }; const alternationsInColumn = (column) => { const altered = [column]; const result = altered .filter((it) => { if ('type' in it && it.type.__old.replace(' (', '(') === it.type.__new.replace(' (', '(')) { return false; } return true; }) .map((it) => { if (typeof it.name !== 'string' && '__old' in it.name) { // rename return { ...it, name: { type: 'changed', old: it.name.__old, new: it.name.__new }, }; } return it; }) .map((it) => { if ('type' in it) { // type change return { ...it, type: { type: 'changed', old: it.type.__old, new: it.type.__new }, }; } return it; }) .map((it) => { if ('default' in it) { return { ...it, default: { type: 'changed', old: it.default.__old, new: it.default.__new, }, }; } if ('default__added' in it) { const { default__added, ...others } = it; return { ...others, default: { type: 'added', value: it.default__added }, }; } if ('default__deleted' in it) { const { default__deleted, ...others } = it; return { ...others, default: { type: 'deleted', value: it.default__deleted }, }; } return it; }) .map((it) => { if ('generated' in it) { if ('as' in it.generated && 'type' in it.generated) { return { ...it, generated: { type: 'changed', old: { as: it.generated.as.__old, type: it.generated.type.__old }, new: { as: it.generated.as.__new, type: it.generated.type.__new }, }, }; } else if ('as' in it.generated) { return { ...it, generated: { type: 'changed', old: { as: it.generated.as.__old }, new: { as: it.generated.as.__new }, }, }; } else { return { ...it, generated: { type: 'changed', old: { as: it.generated.type.__old }, new: { as: it.generated.type.__new }, }, }; } } if ('generated__added' in it) { const { generated__added, ...others } = it; return { ...others, generated: { type: 'added', value: it.generated__added }, }; } if ('generated__deleted' in it) { const { generated__deleted, ...others } = it; return { ...others, generated: { type: 'deleted', value: it.generated__deleted }, }; } return it; }) .map((it) => { if ('identity' in it) { return { ...it, identity: { type: 'changed', old: it.identity.__old, new: it.identity.__new, }, }; } if ('identity__added' in it) { const { identity__added, ...others } = it; return { ...others, identity: { type: 'added', value: it.identity__added }, }; } if ('identity__deleted' in it) { const { identity__deleted, ...others } = it; return { ...others, identity: { type: 'deleted', value: it.identity__deleted }, }; } return it; }) .map((it) => { if ('notNull' in it) { return { ...it, notNull: { type: 'changed', old: it.notNull.__old, new: it.notNull.__new, }, }; } if ('notNull__added' in it) { const { notNull__added, ...others } = it; return { ...others, notNull: { type: 'added', value: it.notNull__added }, }; } if ('notNull__deleted' in it) { const { notNull__deleted, ...others } = it; return { ...others, notNull: { type: 'deleted', value: it.notNull__deleted }, }; } return it; }) .map((it) => { if ('primaryKey' in it) { return { ...it, primaryKey: { type: 'changed', old: it.primaryKey.__old, new: it.primaryKey.__new, }, }; } if ('primaryKey__added' in it) { const { notNull__added, ...others } = it; return { ...others, primaryKey: { type: 'added', value: it.primaryKey__added }, }; } if ('primaryKey__deleted' in it) { const { notNull__deleted, ...others } = it; return { ...others, primaryKey: { type: 'deleted', value: it.primaryKey__deleted }, }; } return it; }) .map((it) => { if ('typeSchema' in it) { return { ...it, typeSchema: { type: 'changed', old: it.typeSchema.__old, new: it.typeSchema.__new, }, }; } if ('typeSchema__added' in it) { const { typeSchema__added, ...others } = it; return { ...others, typeSchema: { type: 'added', value: it.typeSchema__added }, }; } if ('typeSchema__deleted' in it) { const { typeSchema__deleted, ...others } = it; return { ...others, typeSchema: { type: 'deleted', value: it.typeSchema__deleted }, }; } return it; }) .map((it) => { if ('onUpdate' in it) { return { ...it, onUpdate: { type: 'changed', old: it.onUpdate.__old, new: it.onUpdate.__new, }, }; } if ('onUpdate__added' in it) { const { onUpdate__added, ...others } = it; return { ...others, onUpdate: { type: 'added', value: it.onUpdate__added }, }; } if ('onUpdate__deleted' in it) { const { onUpdate__deleted, ...others } = it; return { ...others, onUpdate: { type: 'deleted', value: it.onUpdate__deleted }, }; } return it; }) .map((it) => { if ('autoincrement' in it) { return { ...it, autoincrement: { type: 'changed', old: it.autoincrement.__old, new: it.autoincrement.__new, }, }; } if ('autoincrement__added' in it) { const { autoincrement__added, ...others } = it; return { ...others, autoincrement: { type: 'added', value: it.autoincrement__added }, }; } if ('autoincrement__deleted' in it) { const { autoincrement__deleted, ...others } = it; return { ...others, autoincrement: { type: 'deleted', value: it.autoincrement__deleted }, }; } return it; }) .map((it) => { if ('' in it) { return { ...it, autoincrement: { type: 'changed', old: it.autoincrement.__old, new: it.autoincrement.__new, }, }; } if ('autoincrement__added' in it) { const { autoincrement__added, ...others } = it; return { ...others, autoincrement: { type: 'added', value: it.autoincrement__added }, }; } if ('autoincrement__deleted' in it) { const { autoincrement__deleted, ...others } = it; return { ...others, autoincrement: { type: 'deleted', value: it.autoincrement__deleted }, }; } return it; }) .filter(Boolean); return result[0]; }; ================================================ FILE: drizzle-kit/src/jsonStatements.ts ================================================ import chalk from 'chalk'; import { getNewTableName } from './cli/commands/sqlitePushUtils'; import { warning } from './cli/views'; import { CommonSquashedSchema } from './schemaValidator'; import { MySqlKitInternals, MySqlSchema, MySqlSquasher, View as MySqlView } from './serializer/mysqlSchema'; import { Index, MatViewWithOption, PgSchema, PgSchemaSquashed, PgSquasher, Policy, Role, View as PgView, ViewWithOption, } from './serializer/pgSchema'; import { SingleStoreKitInternals, SingleStoreSchema, SingleStoreSquasher } from './serializer/singlestoreSchema'; import { SQLiteKitInternals, SQLiteSchemaInternal, SQLiteSchemaSquashed, SQLiteSquasher, View as SqliteView, } from './serializer/sqliteSchema'; import { AlteredColumn, Column, Sequence, Table } from './snapshotsDiffer'; export interface JsonSqliteCreateTableStatement { type: 'sqlite_create_table'; tableName: string; columns: Column[]; referenceData: { name: string; tableFrom: string; columnsFrom: string[]; tableTo: string; columnsTo: string[]; onUpdate?: string | undefined; onDelete?: string | undefined; }[]; compositePKs: string[][]; uniqueConstraints?: string[]; checkConstraints?: string[]; } export interface JsonCreateTableStatement { type: 'create_table'; tableName: string; schema: string; columns: Column[]; compositePKs: string[]; compositePkName?: string; uniqueConstraints?: string[]; policies?: string[]; checkConstraints?: string[]; internals?: MySqlKitInternals | SingleStoreKitInternals; isRLSEnabled?: boolean; } export interface JsonRecreateTableStatement { type: 'recreate_table'; tableName: string; columns: Column[]; referenceData: { name: string; tableFrom: string; columnsFrom: string[]; tableTo: string; columnsTo: string[]; onUpdate?: string | undefined; onDelete?: string | undefined; }[]; compositePKs: string[][]; uniqueConstraints?: string[]; checkConstraints: string[]; } export interface JsonRecreateSingleStoreTableStatement { type: 'singlestore_recreate_table'; tableName: string; columns: Column[]; compositePKs: string[]; uniqueConstraints?: string[]; } export interface JsonDropTableStatement { type: 'drop_table'; tableName: string; schema: string; policies?: string[]; } export interface JsonRenameTableStatement { type: 'rename_table'; fromSchema: string; toSchema: string; tableNameFrom: string; tableNameTo: string; } export interface JsonCreateEnumStatement { type: 'create_type_enum'; name: string; schema: string; values: string[]; } export interface JsonDropEnumStatement { type: 'drop_type_enum'; name: string; schema: string; } export interface JsonMoveEnumStatement { type: 'move_type_enum'; name: string; schemaFrom: string; schemaTo: string; } export interface JsonRenameEnumStatement { type: 'rename_type_enum'; nameFrom: string; nameTo: string; schema: string; } export interface JsonAddValueToEnumStatement { type: 'alter_type_add_value'; name: string; schema: string; value: string; before: string; } ////// export interface JsonCreateRoleStatement { type: 'create_role'; name: string; values: { inherit?: boolean; createDb?: boolean; createRole?: boolean; }; } export interface JsonDropRoleStatement { type: 'drop_role'; name: string; } export interface JsonRenameRoleStatement { type: 'rename_role'; nameFrom: string; nameTo: string; } export interface JsonAlterRoleStatement { type: 'alter_role'; name: string; values: { inherit?: boolean; createDb?: boolean; createRole?: boolean; }; } ////// export interface JsonDropValueFromEnumStatement { type: 'alter_type_drop_value'; name: string; enumSchema: string; deletedValues: string[]; newValues: string[]; columnsWithEnum: { tableSchema: string; table: string; column: string; default?: string; columnType: string }[]; } export interface JsonCreateSequenceStatement { type: 'create_sequence'; name: string; schema: string; values: { increment?: string | undefined; minValue?: string | undefined; maxValue?: string | undefined; startWith?: string | undefined; cache?: string | undefined; cycle?: boolean | undefined; }; } export interface JsonDropSequenceStatement { type: 'drop_sequence'; name: string; schema: string; } export interface JsonMoveSequenceStatement { type: 'move_sequence'; name: string; schemaFrom: string; schemaTo: string; } export interface JsonRenameSequenceStatement { type: 'rename_sequence'; nameFrom: string; nameTo: string; schema: string; } export interface JsonAlterSequenceStatement { type: 'alter_sequence'; name: string; schema: string; values: { increment?: string | undefined; minValue?: string | undefined; maxValue?: string | undefined; startWith?: string | undefined; cache?: string | undefined; cycle?: boolean | undefined; }; } export interface JsonDropColumnStatement { type: 'alter_table_drop_column'; tableName: string; columnName: string; schema: string; } export interface JsonAddColumnStatement { type: 'alter_table_add_column'; tableName: string; column: Column; schema: string; } export interface JsonSqliteAddColumnStatement { type: 'sqlite_alter_table_add_column'; tableName: string; column: Column; referenceData?: string; } export interface JsonCreatePolicyStatement { type: 'create_policy'; tableName: string; data: Policy; schema: string; } export interface JsonCreateIndPolicyStatement { type: 'create_ind_policy'; tableName: string; data: Policy; } export interface JsonDropPolicyStatement { type: 'drop_policy'; tableName: string; data: Policy; schema: string; } export interface JsonDropIndPolicyStatement { type: 'drop_ind_policy'; tableName: string; data: Policy; } export interface JsonRenamePolicyStatement { type: 'rename_policy'; tableName: string; oldName: string; newName: string; schema: string; } export interface JsonIndRenamePolicyStatement { type: 'rename_ind_policy'; tableKey: string; oldName: string; newName: string; } export interface JsonEnableRLSStatement { type: 'enable_rls'; tableName: string; schema: string; } export interface JsonDisableRLSStatement { type: 'disable_rls'; tableName: string; schema: string; } export interface JsonAlterPolicyStatement { type: 'alter_policy'; tableName: string; oldData: string; newData: string; schema: string; } export interface JsonAlterIndPolicyStatement { type: 'alter_ind_policy'; oldData: Policy; newData: Policy; } export interface JsonCreateIndexStatement { type: 'create_index'; tableName: string; data: string; schema: string; internal?: MySqlKitInternals | SQLiteKitInternals | SingleStoreKitInternals; } export interface JsonPgCreateIndexStatement { type: 'create_index_pg'; tableName: string; data: Index; schema: string; } export interface JsonReferenceStatement { type: 'create_reference' | 'alter_reference' | 'delete_reference'; data: string; schema: string; tableName: string; isMulticolumn?: boolean; columnNotNull?: boolean; columnDefault?: string; columnType?: string; // fromTable: string; // fromColumns: string[]; // toTable: string; // toColumns: string[]; // foreignKeyName: string; // onDelete?: string; // onUpdate?: string; } export interface JsonCreateUniqueConstraint { type: 'create_unique_constraint'; tableName: string; data: string; schema?: string; constraintName?: string; } export interface JsonDeleteUniqueConstraint { type: 'delete_unique_constraint'; tableName: string; data: string; schema?: string; constraintName?: string; } export interface JsonAlterUniqueConstraint { type: 'alter_unique_constraint'; tableName: string; old: string; new: string; schema?: string; oldConstraintName?: string; newConstraintName?: string; } export interface JsonCreateCheckConstraint { type: 'create_check_constraint'; tableName: string; data: string; schema?: string; } export interface JsonDeleteCheckConstraint { type: 'delete_check_constraint'; tableName: string; constraintName: string; schema?: string; } export interface JsonCreateCompositePK { type: 'create_composite_pk'; tableName: string; data: string; schema?: string; constraintName?: string; } export interface JsonDeleteCompositePK { type: 'delete_composite_pk'; tableName: string; data: string; schema?: string; constraintName?: string; } export interface JsonAlterCompositePK { type: 'alter_composite_pk'; tableName: string; old: string; new: string; schema?: string; oldConstraintName?: string; newConstraintName?: string; } export interface JsonAlterTableSetSchema { type: 'alter_table_set_schema'; tableName: string; schemaFrom: string; schemaTo: string; } export interface JsonAlterTableRemoveFromSchema { type: 'alter_table_remove_from_schema'; tableName: string; schema: string; } export interface JsonAlterTableSetNewSchema { type: 'alter_table_set_new_schema'; tableName: string; from: string; to: string; } export interface JsonCreateReferenceStatement extends JsonReferenceStatement { type: 'create_reference'; } export interface JsonAlterReferenceStatement extends JsonReferenceStatement { type: 'alter_reference'; oldFkey: string; } export interface JsonDeleteReferenceStatement extends JsonReferenceStatement { type: 'delete_reference'; } export interface JsonDropIndexStatement { type: 'drop_index'; tableName: string; data: string; schema: string; } export interface JsonRenameColumnStatement { type: 'alter_table_rename_column'; tableName: string; oldColumnName: string; newColumnName: string; schema: string; } export interface JsonAlterColumnTypeStatement { type: 'alter_table_alter_column_set_type'; tableName: string; columnName: string; newDataType: string; oldDataType: string; schema: string; columnDefault: string; columnOnUpdate: boolean; columnNotNull: boolean; columnAutoIncrement: boolean; columnPk: boolean; columnGenerated?: { as: string; type: 'stored' | 'virtual' }; } export interface JsonAlterColumnPgTypeStatement { type: 'pg_alter_table_alter_column_set_type'; tableName: string; columnName: string; typeSchema: string | undefined; newDataType: { name: string; isEnum: boolean }; oldDataType: { name: string; isEnum: boolean }; schema: string; columnDefault: string; columnOnUpdate: boolean; columnNotNull: boolean; columnAutoIncrement: boolean; columnPk: boolean; columnGenerated?: { as: string; type: 'stored' | 'virtual' }; } export interface JsonAlterColumnSetPrimaryKeyStatement { type: 'alter_table_alter_column_set_pk'; tableName: string; schema: string; columnName: string; } export interface JsonAlterColumnDropPrimaryKeyStatement { type: 'alter_table_alter_column_drop_pk'; tableName: string; columnName: string; schema: string; } export interface JsonAlterColumnSetDefaultStatement { type: 'alter_table_alter_column_set_default'; tableName: string; columnName: string; newDefaultValue: any; oldDefaultValue?: any; schema: string; newDataType: string; columnOnUpdate: boolean; columnNotNull: boolean; columnAutoIncrement: boolean; columnPk: boolean; } export interface JsonAlterColumnDropDefaultStatement { type: 'alter_table_alter_column_drop_default'; tableName: string; columnName: string; schema: string; newDataType: string; columnDefault: string; columnOnUpdate: boolean; columnNotNull: boolean; columnAutoIncrement: boolean; columnPk: boolean; } export interface JsonAlterColumnSetNotNullStatement { type: 'alter_table_alter_column_set_notnull'; tableName: string; columnName: string; schema: string; newDataType: string; columnDefault: string; columnOnUpdate: boolean; columnNotNull: boolean; columnAutoIncrement: boolean; columnPk: boolean; } export interface JsonAlterColumnDropNotNullStatement { type: 'alter_table_alter_column_drop_notnull'; tableName: string; columnName: string; schema: string; newDataType: string; columnDefault: string; columnOnUpdate: boolean; columnNotNull: boolean; columnAutoIncrement: boolean; columnPk: boolean; } export interface JsonAlterColumnSetGeneratedStatement { type: 'alter_table_alter_column_set_generated'; tableName: string; columnName: string; schema: string; newDataType: string; columnDefault: string; columnOnUpdate: boolean; columnNotNull: boolean; columnAutoIncrement: boolean; columnPk: boolean; columnGenerated?: { as: string; type: 'stored' | 'virtual' }; } export interface JsonAlterColumnSetIdentityStatement { type: 'alter_table_alter_column_set_identity'; tableName: string; columnName: string; schema: string; identity: string; } export interface JsonAlterColumnDropIdentityStatement { type: 'alter_table_alter_column_drop_identity'; tableName: string; columnName: string; schema: string; } export interface JsonAlterColumnAlterIdentityStatement { type: 'alter_table_alter_column_change_identity'; tableName: string; columnName: string; schema: string; identity: string; oldIdentity: string; } export interface JsonAlterColumnDropGeneratedStatement { type: 'alter_table_alter_column_drop_generated'; tableName: string; columnName: string; schema: string; newDataType: string; columnDefault: string; columnOnUpdate: boolean; columnNotNull: boolean; columnAutoIncrement: boolean; columnPk: boolean; columnGenerated?: { as: string; type: 'stored' | 'virtual' }; oldColumn?: Column; } export interface JsonAlterColumnAlterGeneratedStatement { type: 'alter_table_alter_column_alter_generated'; tableName: string; columnName: string; schema: string; newDataType: string; columnDefault: string; columnOnUpdate: boolean; columnNotNull: boolean; columnAutoIncrement: boolean; columnPk: boolean; columnGenerated?: { as: string; type: 'stored' | 'virtual' }; } export interface JsonAlterColumnSetOnUpdateStatement { type: 'alter_table_alter_column_set_on_update'; tableName: string; columnName: string; schema: string; newDataType: string; columnDefault: string; columnOnUpdate: boolean; columnNotNull: boolean; columnAutoIncrement: boolean; columnPk: boolean; } export interface JsonAlterColumnDropOnUpdateStatement { type: 'alter_table_alter_column_drop_on_update'; tableName: string; columnName: string; schema: string; newDataType: string; columnDefault: string; columnOnUpdate: boolean; columnNotNull: boolean; columnAutoIncrement: boolean; columnPk: boolean; } export interface JsonAlterColumnSetAutoincrementStatement { type: 'alter_table_alter_column_set_autoincrement'; tableName: string; columnName: string; schema: string; newDataType: string; columnDefault: string; columnOnUpdate: boolean; columnNotNull: boolean; columnAutoIncrement: boolean; columnPk: boolean; } export interface JsonAlterColumnDropAutoincrementStatement { type: 'alter_table_alter_column_drop_autoincrement'; tableName: string; columnName: string; schema: string; newDataType: string; columnDefault: string; columnOnUpdate: boolean; columnNotNull: boolean; columnAutoIncrement: boolean; columnPk: boolean; } export interface JsonCreateSchema { type: 'create_schema'; name: string; } export interface JsonDropSchema { type: 'drop_schema'; name: string; } export interface JsonRenameSchema { type: 'rename_schema'; from: string; to: string; } export type JsonCreatePgViewStatement = { type: 'create_view'; } & Omit; export type JsonCreateMySqlViewStatement = { type: 'mysql_create_view'; replace: boolean; } & Omit; /* export type JsonCreateSingleStoreViewStatement = { type: 'singlestore_create_view'; replace: boolean; } & Omit; */ export type JsonCreateSqliteViewStatement = { type: 'sqlite_create_view'; } & Omit; export interface JsonDropViewStatement { type: 'drop_view'; name: string; schema?: string; materialized?: boolean; } export interface JsonRenameViewStatement { type: 'rename_view'; nameTo: string; nameFrom: string; schema: string; materialized?: boolean; } export interface JsonRenameMySqlViewStatement { type: 'rename_view'; nameTo: string; nameFrom: string; schema: string; materialized?: boolean; } export interface JsonAlterViewAlterSchemaStatement { type: 'alter_view_alter_schema'; fromSchema: string; toSchema: string; name: string; materialized?: boolean; } export type JsonAlterViewAddWithOptionStatement = & { type: 'alter_view_add_with_option'; schema: string; name: string; } & ({ materialized: true; with: MatViewWithOption; } | { materialized: false; with: ViewWithOption; }); export type JsonAlterViewDropWithOptionStatement = & { type: 'alter_view_drop_with_option'; schema: string; name: string; } & ({ materialized: true; with: MatViewWithOption; } | { materialized: false; with: ViewWithOption; }); export interface JsonAlterViewAlterTablespaceStatement { type: 'alter_view_alter_tablespace'; toTablespace: string; name: string; schema: string; materialized: true; } export interface JsonAlterViewAlterUsingStatement { type: 'alter_view_alter_using'; toUsing: string; name: string; schema: string; materialized: true; } export type JsonAlterMySqlViewStatement = { type: 'alter_mysql_view'; } & Omit; /* export type JsonAlterSingleStoreViewStatement = { type: 'alter_singlestore_view'; } & Omit; */ export type JsonAlterViewStatement = | JsonAlterViewAlterSchemaStatement | JsonAlterViewAddWithOptionStatement | JsonAlterViewDropWithOptionStatement | JsonAlterViewAlterTablespaceStatement | JsonAlterViewAlterUsingStatement; export type JsonAlterColumnStatement = | JsonRenameColumnStatement | JsonAlterColumnTypeStatement | JsonAlterColumnPgTypeStatement | JsonAlterColumnSetDefaultStatement | JsonAlterColumnDropDefaultStatement | JsonAlterColumnSetNotNullStatement | JsonAlterColumnDropNotNullStatement | JsonAlterColumnDropOnUpdateStatement | JsonAlterColumnSetOnUpdateStatement | JsonAlterColumnDropAutoincrementStatement | JsonAlterColumnSetAutoincrementStatement | JsonAlterColumnSetPrimaryKeyStatement | JsonAlterColumnDropPrimaryKeyStatement | JsonAlterColumnSetGeneratedStatement | JsonAlterColumnDropGeneratedStatement | JsonAlterColumnAlterGeneratedStatement | JsonAlterColumnSetIdentityStatement | JsonAlterColumnAlterIdentityStatement | JsonAlterColumnDropIdentityStatement; export type JsonStatement = | JsonRecreateSingleStoreTableStatement | JsonRecreateTableStatement | JsonAlterColumnStatement | JsonCreateTableStatement | JsonDropTableStatement | JsonRenameTableStatement | JsonCreateEnumStatement | JsonDropEnumStatement | JsonMoveEnumStatement | JsonRenameEnumStatement | JsonAddValueToEnumStatement | JsonDropColumnStatement | JsonAddColumnStatement | JsonCreateIndexStatement | JsonCreateReferenceStatement | JsonAlterReferenceStatement | JsonDeleteReferenceStatement | JsonDropIndexStatement | JsonReferenceStatement | JsonSqliteCreateTableStatement | JsonSqliteAddColumnStatement | JsonCreateCompositePK | JsonDeleteCompositePK | JsonAlterCompositePK | JsonCreateUniqueConstraint | JsonDeleteUniqueConstraint | JsonAlterUniqueConstraint | JsonCreateSchema | JsonDropSchema | JsonRenameSchema | JsonAlterTableSetSchema | JsonAlterTableRemoveFromSchema | JsonAlterTableSetNewSchema | JsonPgCreateIndexStatement | JsonAlterSequenceStatement | JsonDropSequenceStatement | JsonCreateSequenceStatement | JsonMoveSequenceStatement | JsonRenameSequenceStatement | JsonDropPolicyStatement | JsonCreatePolicyStatement | JsonAlterPolicyStatement | JsonRenamePolicyStatement | JsonEnableRLSStatement | JsonDisableRLSStatement | JsonRenameRoleStatement | JsonCreateRoleStatement | JsonDropRoleStatement | JsonAlterRoleStatement | JsonCreatePgViewStatement | JsonDropViewStatement | JsonRenameViewStatement | JsonAlterViewStatement | JsonCreateMySqlViewStatement | JsonAlterMySqlViewStatement /* | JsonCreateSingleStoreViewStatement | JsonAlterSingleStoreViewStatement */ | JsonCreateSqliteViewStatement | JsonCreateCheckConstraint | JsonDeleteCheckConstraint | JsonDropValueFromEnumStatement | JsonIndRenamePolicyStatement | JsonDropIndPolicyStatement | JsonCreateIndPolicyStatement | JsonAlterIndPolicyStatement; export const preparePgCreateTableJson = ( table: Table, // TODO: remove? json2: PgSchema, ): JsonCreateTableStatement => { const { name, schema, columns, compositePrimaryKeys, uniqueConstraints, checkConstraints, policies, isRLSEnabled } = table; const tableKey = `${schema || 'public'}.${name}`; // TODO: @AndriiSherman. We need this, will add test cases const compositePkName = Object.values(compositePrimaryKeys).length > 0 ? json2.tables[tableKey].compositePrimaryKeys[ `${PgSquasher.unsquashPK(Object.values(compositePrimaryKeys)[0]).name}` ].name : ''; return { type: 'create_table', tableName: name, schema, columns: Object.values(columns), compositePKs: Object.values(compositePrimaryKeys), compositePkName: compositePkName, uniqueConstraints: Object.values(uniqueConstraints), policies: Object.values(policies), checkConstraints: Object.values(checkConstraints), isRLSEnabled: isRLSEnabled ?? false, }; }; export const prepareMySqlCreateTableJson = ( table: Table, // TODO: remove? json2: MySqlSchema, // we need it to know if some of the indexes(and in future other parts) are expressions or columns // didn't change mysqlserialaizer, because it will break snapshots and diffs and it's hard to detect // if previously it was an expression or column internals: MySqlKitInternals, ): JsonCreateTableStatement => { const { name, schema, columns, compositePrimaryKeys, uniqueConstraints, checkConstraints } = table; return { type: 'create_table', tableName: name, schema, columns: Object.values(columns), compositePKs: Object.values(compositePrimaryKeys), compositePkName: Object.values(compositePrimaryKeys).length > 0 ? json2.tables[name].compositePrimaryKeys[ MySqlSquasher.unsquashPK(Object.values(compositePrimaryKeys)[0]) .name ].name : '', uniqueConstraints: Object.values(uniqueConstraints), internals, checkConstraints: Object.values(checkConstraints), }; }; export const prepareSingleStoreCreateTableJson = ( table: Table, // TODO: remove? json2: SingleStoreSchema, // we need it to know if some of the indexes(and in future other parts) are expressions or columns // didn't change singlestoreserialaizer, because it will break snapshots and diffs and it's hard to detect // if previously it was an expression or column internals: SingleStoreKitInternals, ): JsonCreateTableStatement => { const { name, schema, columns, compositePrimaryKeys, uniqueConstraints } = table; return { type: 'create_table', tableName: name, schema, columns: Object.values(columns), compositePKs: Object.values(compositePrimaryKeys), compositePkName: Object.values(compositePrimaryKeys).length > 0 ? json2.tables[name].compositePrimaryKeys[ SingleStoreSquasher.unsquashPK(Object.values(compositePrimaryKeys)[0]) .name ].name : '', uniqueConstraints: Object.values(uniqueConstraints), internals, }; }; export const prepareSQLiteCreateTable = ( table: Table, action?: 'push' | undefined, ): JsonSqliteCreateTableStatement => { const { name, columns, uniqueConstraints, checkConstraints } = table; const references: string[] = Object.values(table.foreignKeys); const composites: string[][] = Object.values(table.compositePrimaryKeys).map( (it) => SQLiteSquasher.unsquashPK(it), ); const fks = references.map((it) => action === 'push' ? SQLiteSquasher.unsquashPushFK(it) : SQLiteSquasher.unsquashFK(it) ); return { type: 'sqlite_create_table', tableName: name, columns: Object.values(columns), referenceData: fks, compositePKs: composites, uniqueConstraints: Object.values(uniqueConstraints), checkConstraints: Object.values(checkConstraints), }; }; export const prepareDropTableJson = (table: Table): JsonDropTableStatement => { return { type: 'drop_table', tableName: table.name, schema: table.schema, policies: table.policies ? Object.values(table.policies) : [], }; }; export const prepareRenameTableJson = ( tableFrom: Table, tableTo: Table, ): JsonRenameTableStatement => { return { type: 'rename_table', fromSchema: tableTo.schema, toSchema: tableTo.schema, tableNameFrom: tableFrom.name, tableNameTo: tableTo.name, }; }; export const prepareCreateEnumJson = ( name: string, schema: string, values: string[], ): JsonCreateEnumStatement => { return { type: 'create_type_enum', name: name, schema: schema, values, }; }; // https://blog.yo1.dog/updating-enum-values-in-postgresql-the-safe-and-easy-way/ export const prepareAddValuesToEnumJson = ( name: string, schema: string, values: { value: string; before: string }[], ): JsonAddValueToEnumStatement[] => { return values.map((it) => { return { type: 'alter_type_add_value', name: name, schema: schema, value: it.value, before: it.before, }; }); }; export const prepareDropEnumValues = ( name: string, schema: string, removedValues: string[], json2: PgSchema, ): JsonDropValueFromEnumStatement[] => { if (!removedValues.length) return []; const affectedColumns: JsonDropValueFromEnumStatement['columnsWithEnum'] = []; for (const tableKey in json2.tables) { const table = json2.tables[tableKey]; for (const columnKey in table.columns) { const column = table.columns[columnKey]; const arrayDefinitionRegex = /\[\d*(?:\[\d*\])*\]/g; const parsedColumnType = column.type.replace(arrayDefinitionRegex, ''); if (parsedColumnType === name && column.typeSchema === schema) { affectedColumns.push({ tableSchema: table.schema, table: table.name, column: column.name, columnType: column.type, default: column.default, }); } } } return [{ type: 'alter_type_drop_value', name: name, enumSchema: schema, deletedValues: removedValues, newValues: json2.enums[`${schema}.${name}`].values, columnsWithEnum: affectedColumns, }]; }; export const prepareDropEnumJson = ( name: string, schema: string, ): JsonDropEnumStatement => { return { type: 'drop_type_enum', name: name, schema: schema, }; }; export const prepareMoveEnumJson = ( name: string, schemaFrom: string, schemaTo: string, ): JsonMoveEnumStatement => { return { type: 'move_type_enum', name: name, schemaFrom, schemaTo, }; }; export const prepareRenameEnumJson = ( nameFrom: string, nameTo: string, schema: string, ): JsonRenameEnumStatement => { return { type: 'rename_type_enum', nameFrom, nameTo, schema, }; }; //////////// export const prepareCreateSequenceJson = ( seq: Sequence, ): JsonCreateSequenceStatement => { const values = PgSquasher.unsquashSequence(seq.values); return { type: 'create_sequence', name: seq.name, schema: seq.schema, values, }; }; export const prepareAlterSequenceJson = ( seq: Sequence, ): JsonAlterSequenceStatement[] => { const values = PgSquasher.unsquashSequence(seq.values); return [ { type: 'alter_sequence', schema: seq.schema, name: seq.name, values, }, ]; }; export const prepareDropSequenceJson = ( name: string, schema: string, ): JsonDropSequenceStatement => { return { type: 'drop_sequence', name: name, schema: schema, }; }; export const prepareMoveSequenceJson = ( name: string, schemaFrom: string, schemaTo: string, ): JsonMoveSequenceStatement => { return { type: 'move_sequence', name: name, schemaFrom, schemaTo, }; }; export const prepareRenameSequenceJson = ( nameFrom: string, nameTo: string, schema: string, ): JsonRenameSequenceStatement => { return { type: 'rename_sequence', nameFrom, nameTo, schema, }; }; //////////// export const prepareCreateRoleJson = ( role: Role, ): JsonCreateRoleStatement => { return { type: 'create_role', name: role.name, values: { createDb: role.createDb, createRole: role.createRole, inherit: role.inherit, }, }; }; export const prepareAlterRoleJson = ( role: Role, ): JsonAlterRoleStatement => { return { type: 'alter_role', name: role.name, values: { createDb: role.createDb, createRole: role.createRole, inherit: role.inherit, }, }; }; export const prepareDropRoleJson = ( name: string, ): JsonDropRoleStatement => { return { type: 'drop_role', name: name, }; }; export const prepareRenameRoleJson = ( nameFrom: string, nameTo: string, ): JsonRenameRoleStatement => { return { type: 'rename_role', nameFrom, nameTo, }; }; ////////// export const prepareCreateSchemasJson = ( values: string[], ): JsonCreateSchema[] => { return values.map((it) => { return { type: 'create_schema', name: it, } as JsonCreateSchema; }); }; export const prepareRenameSchemasJson = ( values: { from: string; to: string }[], ): JsonRenameSchema[] => { return values.map((it) => { return { type: 'rename_schema', from: it.from, to: it.to, } as JsonRenameSchema; }); }; export const prepareDeleteSchemasJson = ( values: string[], ): JsonDropSchema[] => { return values.map((it) => { return { type: 'drop_schema', name: it, } as JsonDropSchema; }); }; export const prepareRenameColumns = ( tableName: string, // TODO: split for pg and mysql+sqlite and singlestore without schema schema: string, pairs: { from: Column; to: Column }[], ): JsonRenameColumnStatement[] => { return pairs.map((it) => { return { type: 'alter_table_rename_column', tableName: tableName, oldColumnName: it.from.name, newColumnName: it.to.name, schema, }; }); }; export const _prepareDropColumns = ( taleName: string, schema: string, columns: Column[], ): JsonDropColumnStatement[] => { return columns.map((it) => { return { type: 'alter_table_drop_column', tableName: taleName, columnName: it.name, schema, }; }); }; export const _prepareAddColumns = ( tableName: string, schema: string, columns: Column[], ): JsonAddColumnStatement[] => { return columns.map((it) => { return { type: 'alter_table_add_column', tableName: tableName, column: it, schema, }; }); }; export const _prepareSqliteAddColumns = ( tableName: string, columns: Column[], referenceData: string[], ): JsonSqliteAddColumnStatement[] => { const unsquashed = referenceData.map((addedFkValue) => SQLiteSquasher.unsquashFK(addedFkValue)); return columns .map((it) => { const columnsWithReference = unsquashed.find((t) => t.columnsFrom.includes(it.name)); if (it.generated?.type === 'stored') { warning( `As SQLite docs mention: "It is not possible to ALTER TABLE ADD COLUMN a STORED column. One can add a VIRTUAL column, however", source: "https://www.sqlite.org/gencol.html"`, ); return undefined; } return { type: 'sqlite_alter_table_add_column', tableName: tableName, column: it, referenceData: columnsWithReference ? SQLiteSquasher.squashFK(columnsWithReference) : undefined, }; }) .filter(Boolean) as JsonSqliteAddColumnStatement[]; }; export const prepareAlterColumnsMysql = ( tableName: string, schema: string, columns: AlteredColumn[], // TODO: remove? json1: CommonSquashedSchema, json2: CommonSquashedSchema, action?: 'push' | undefined, ): JsonAlterColumnStatement[] => { let statements: JsonAlterColumnStatement[] = []; let dropPkStatements: JsonAlterColumnDropPrimaryKeyStatement[] = []; let setPkStatements: JsonAlterColumnSetPrimaryKeyStatement[] = []; for (const column of columns) { const columnName = typeof column.name !== 'string' ? column.name.new : column.name; const table = json2.tables[tableName]; const snapshotColumn = table.columns[columnName]; const columnType = snapshotColumn.type; const columnDefault = snapshotColumn.default; const columnOnUpdate = 'onUpdate' in snapshotColumn ? snapshotColumn.onUpdate : undefined; const columnNotNull = table.columns[columnName].notNull; const columnAutoIncrement = 'autoincrement' in snapshotColumn ? snapshotColumn.autoincrement ?? false : false; const columnPk = table.columns[columnName].primaryKey; if (column.autoincrement?.type === 'added') { statements.push({ type: 'alter_table_alter_column_set_autoincrement', tableName, columnName, schema, newDataType: columnType, columnDefault, columnOnUpdate, columnNotNull, columnAutoIncrement, columnPk, }); } if (column.autoincrement?.type === 'changed') { const type = column.autoincrement.new ? 'alter_table_alter_column_set_autoincrement' : 'alter_table_alter_column_drop_autoincrement'; statements.push({ type, tableName, columnName, schema, newDataType: columnType, columnDefault, columnOnUpdate, columnNotNull, columnAutoIncrement, columnPk, }); } if (column.autoincrement?.type === 'deleted') { statements.push({ type: 'alter_table_alter_column_drop_autoincrement', tableName, columnName, schema, newDataType: columnType, columnDefault, columnOnUpdate, columnNotNull, columnAutoIncrement, columnPk, }); } } for (const column of columns) { const columnName = typeof column.name !== 'string' ? column.name.new : column.name; // I used any, because those fields are available only for mysql dialect // For other dialects it will become undefined, that is fine for json statements const columnType = json2.tables[tableName].columns[columnName].type; const columnDefault = json2.tables[tableName].columns[columnName].default; const columnGenerated = json2.tables[tableName].columns[columnName].generated; const columnOnUpdate = (json2.tables[tableName].columns[columnName] as any) .onUpdate; const columnNotNull = json2.tables[tableName].columns[columnName].notNull; const columnAutoIncrement = ( json2.tables[tableName].columns[columnName] as any ).autoincrement; const columnPk = (json2.tables[tableName].columns[columnName] as any) .primaryKey; const compositePk = json2.tables[tableName].compositePrimaryKeys[ `${tableName}_${columnName}` ]; if (typeof column.name !== 'string') { statements.push({ type: 'alter_table_rename_column', tableName, oldColumnName: column.name.old, newColumnName: column.name.new, schema, }); } if (column.type?.type === 'changed') { statements.push({ type: 'alter_table_alter_column_set_type', tableName, columnName, newDataType: column.type.new, oldDataType: column.type.old, schema, columnDefault, columnOnUpdate, columnNotNull, columnAutoIncrement, columnPk, columnGenerated, }); } if ( column.primaryKey?.type === 'deleted' || (column.primaryKey?.type === 'changed' && !column.primaryKey.new && typeof compositePk === 'undefined') ) { dropPkStatements.push({ //// type: 'alter_table_alter_column_drop_pk', tableName, columnName, schema, }); } if (column.default?.type === 'added') { statements.push({ type: 'alter_table_alter_column_set_default', tableName, columnName, newDefaultValue: column.default.value, schema, columnOnUpdate, columnNotNull, columnAutoIncrement, newDataType: columnType, columnPk, }); } if (column.default?.type === 'changed') { statements.push({ type: 'alter_table_alter_column_set_default', tableName, columnName, newDefaultValue: column.default.new, oldDefaultValue: column.default.old, schema, columnOnUpdate, columnNotNull, columnAutoIncrement, newDataType: columnType, columnPk, }); } if (column.default?.type === 'deleted') { statements.push({ type: 'alter_table_alter_column_drop_default', tableName, columnName, schema, columnDefault, columnOnUpdate, columnNotNull, columnAutoIncrement, newDataType: columnType, columnPk, }); } if (column.notNull?.type === 'added') { statements.push({ type: 'alter_table_alter_column_set_notnull', tableName, columnName, schema, newDataType: columnType, columnDefault, columnOnUpdate, columnNotNull, columnAutoIncrement, columnPk, }); } if (column.notNull?.type === 'changed') { const type = column.notNull.new ? 'alter_table_alter_column_set_notnull' : 'alter_table_alter_column_drop_notnull'; statements.push({ type: type, tableName, columnName, schema, newDataType: columnType, columnDefault, columnOnUpdate, columnNotNull, columnAutoIncrement, columnPk, }); } if (column.notNull?.type === 'deleted') { statements.push({ type: 'alter_table_alter_column_drop_notnull', tableName, columnName, schema, newDataType: columnType, columnDefault, columnOnUpdate, columnNotNull, columnAutoIncrement, columnPk, }); } if (column.generated?.type === 'added') { if (columnGenerated?.type === 'virtual') { warning( `You are trying to add virtual generated constraint to ${ chalk.blue( columnName, ) } column. As MySQL docs mention: "Nongenerated columns can be altered to stored but not virtual generated columns". We will drop an existing column and add it with a virtual generated statement. This means that the data previously stored in this column will be wiped, and new data will be generated on each read for this column\n`, ); } statements.push({ type: 'alter_table_alter_column_set_generated', tableName, columnName, schema, newDataType: columnType, columnDefault, columnOnUpdate, columnNotNull, columnAutoIncrement, columnPk, columnGenerated, }); } if (column.generated?.type === 'changed' && action !== 'push') { statements.push({ type: 'alter_table_alter_column_alter_generated', tableName, columnName, schema, newDataType: columnType, columnDefault, columnOnUpdate, columnNotNull, columnAutoIncrement, columnPk, columnGenerated, }); } if (column.generated?.type === 'deleted') { if (columnGenerated?.type === 'virtual') { warning( `You are trying to remove virtual generated constraint from ${ chalk.blue( columnName, ) } column. As MySQL docs mention: "Stored but not virtual generated columns can be altered to nongenerated columns. The stored generated values become the values of the nongenerated column". We will drop an existing column and add it without a virtual generated statement. This means that this column will have no data after migration\n`, ); } statements.push({ type: 'alter_table_alter_column_drop_generated', tableName, columnName, schema, newDataType: columnType, columnDefault, columnOnUpdate, columnNotNull, columnAutoIncrement, columnPk, columnGenerated, oldColumn: json1.tables[tableName].columns[columnName], }); } if ( column.primaryKey?.type === 'added' || (column.primaryKey?.type === 'changed' && column.primaryKey.new) ) { const wasAutoincrement = statements.filter( (it) => it.type === 'alter_table_alter_column_set_autoincrement', ); if (wasAutoincrement.length === 0) { setPkStatements.push({ type: 'alter_table_alter_column_set_pk', tableName, schema, columnName, }); } } if (column.onUpdate?.type === 'added') { statements.push({ type: 'alter_table_alter_column_set_on_update', tableName, columnName, schema, newDataType: columnType, columnDefault, columnOnUpdate, columnNotNull, columnAutoIncrement, columnPk, }); } if (column.onUpdate?.type === 'deleted') { statements.push({ type: 'alter_table_alter_column_drop_on_update', tableName, columnName, schema, newDataType: columnType, columnDefault, columnOnUpdate, columnNotNull, columnAutoIncrement, columnPk, }); } } return [...dropPkStatements, ...setPkStatements, ...statements]; }; export const prepareAlterColumnsSingleStore = ( tableName: string, schema: string, columns: AlteredColumn[], // TODO: remove? json1: CommonSquashedSchema, json2: CommonSquashedSchema, action?: 'push' | undefined, ): JsonAlterColumnStatement[] => { let statements: JsonAlterColumnStatement[] = []; let dropPkStatements: JsonAlterColumnDropPrimaryKeyStatement[] = []; let setPkStatements: JsonAlterColumnSetPrimaryKeyStatement[] = []; for (const column of columns) { const columnName = typeof column.name !== 'string' ? column.name.new : column.name; const table = json2.tables[tableName]; const snapshotColumn = table.columns[columnName]; const columnType = snapshotColumn.type; const columnDefault = snapshotColumn.default; const columnOnUpdate = 'onUpdate' in snapshotColumn ? snapshotColumn.onUpdate : undefined; const columnNotNull = table.columns[columnName].notNull; const columnAutoIncrement = 'autoincrement' in snapshotColumn ? snapshotColumn.autoincrement ?? false : false; const columnPk = table.columns[columnName].primaryKey; if (column.autoincrement?.type === 'added') { statements.push({ type: 'alter_table_alter_column_set_autoincrement', tableName, columnName, schema, newDataType: columnType, columnDefault, columnOnUpdate, columnNotNull, columnAutoIncrement, columnPk, }); } if (column.autoincrement?.type === 'changed') { const type = column.autoincrement.new ? 'alter_table_alter_column_set_autoincrement' : 'alter_table_alter_column_drop_autoincrement'; statements.push({ type, tableName, columnName, schema, newDataType: columnType, columnDefault, columnOnUpdate, columnNotNull, columnAutoIncrement, columnPk, }); } if (column.autoincrement?.type === 'deleted') { statements.push({ type: 'alter_table_alter_column_drop_autoincrement', tableName, columnName, schema, newDataType: columnType, columnDefault, columnOnUpdate, columnNotNull, columnAutoIncrement, columnPk, }); } } for (const column of columns) { const columnName = typeof column.name !== 'string' ? column.name.new : column.name; // I used any, because those fields are available only for mysql and singlestore dialect // For other dialects it will become undefined, that is fine for json statements const columnType = json2.tables[tableName].columns[columnName].type; const columnDefault = json2.tables[tableName].columns[columnName].default; const columnGenerated = json2.tables[tableName].columns[columnName].generated; const columnOnUpdate = (json2.tables[tableName].columns[columnName] as any) .onUpdate; const columnNotNull = json2.tables[tableName].columns[columnName].notNull; const columnAutoIncrement = ( json2.tables[tableName].columns[columnName] as any ).autoincrement; const columnPk = (json2.tables[tableName].columns[columnName] as any) .primaryKey; const compositePk = json2.tables[tableName].compositePrimaryKeys[ `${tableName}_${columnName}` ]; if (typeof column.name !== 'string') { statements.push({ type: 'alter_table_rename_column', tableName, oldColumnName: column.name.old, newColumnName: column.name.new, schema, }); } if (column.type?.type === 'changed') { statements.push({ type: 'alter_table_alter_column_set_type', tableName, columnName, newDataType: column.type.new, oldDataType: column.type.old, schema, columnDefault, columnOnUpdate, columnNotNull, columnAutoIncrement, columnPk, columnGenerated, }); } if ( column.primaryKey?.type === 'deleted' || (column.primaryKey?.type === 'changed' && !column.primaryKey.new && typeof compositePk === 'undefined') ) { dropPkStatements.push({ //// type: 'alter_table_alter_column_drop_pk', tableName, columnName, schema, }); } if (column.default?.type === 'added') { statements.push({ type: 'alter_table_alter_column_set_default', tableName, columnName, newDefaultValue: column.default.value, schema, columnOnUpdate, columnNotNull, columnAutoIncrement, newDataType: columnType, columnPk, }); } if (column.default?.type === 'changed') { statements.push({ type: 'alter_table_alter_column_set_default', tableName, columnName, newDefaultValue: column.default.new, oldDefaultValue: column.default.old, schema, columnOnUpdate, columnNotNull, columnAutoIncrement, newDataType: columnType, columnPk, }); } if (column.default?.type === 'deleted') { statements.push({ type: 'alter_table_alter_column_drop_default', tableName, columnName, schema, columnDefault, columnOnUpdate, columnNotNull, columnAutoIncrement, newDataType: columnType, columnPk, }); } if (column.notNull?.type === 'added') { statements.push({ type: 'alter_table_alter_column_set_notnull', tableName, columnName, schema, newDataType: columnType, columnDefault, columnOnUpdate, columnNotNull, columnAutoIncrement, columnPk, }); } if (column.notNull?.type === 'changed') { const type = column.notNull.new ? 'alter_table_alter_column_set_notnull' : 'alter_table_alter_column_drop_notnull'; statements.push({ type: type, tableName, columnName, schema, newDataType: columnType, columnDefault, columnOnUpdate, columnNotNull, columnAutoIncrement, columnPk, }); } if (column.notNull?.type === 'deleted') { statements.push({ type: 'alter_table_alter_column_drop_notnull', tableName, columnName, schema, newDataType: columnType, columnDefault, columnOnUpdate, columnNotNull, columnAutoIncrement, columnPk, }); } if (column.generated?.type === 'added') { if (columnGenerated?.type === 'virtual') { // TODO: Change warning message according to SingleStore docs warning( `You are trying to add virtual generated constraint to ${ chalk.blue( columnName, ) } column. As MySQL docs mention: "Nongenerated columns can be altered to stored but not virtual generated columns". We will drop an existing column and add it with a virtual generated statement. This means that the data previously stored in this column will be wiped, and new data will be generated on each read for this column\n`, ); } statements.push({ type: 'alter_table_alter_column_set_generated', tableName, columnName, schema, newDataType: columnType, columnDefault, columnOnUpdate, columnNotNull, columnAutoIncrement, columnPk, columnGenerated, }); } if (column.generated?.type === 'changed' && action !== 'push') { statements.push({ type: 'alter_table_alter_column_alter_generated', tableName, columnName, schema, newDataType: columnType, columnDefault, columnOnUpdate, columnNotNull, columnAutoIncrement, columnPk, columnGenerated, }); } if (column.generated?.type === 'deleted') { if (columnGenerated?.type === 'virtual') { // TODO: Change warning message according to SingleStore docs warning( `You are trying to remove virtual generated constraint from ${ chalk.blue( columnName, ) } column. As MySQL docs mention: "Stored but not virtual generated columns can be altered to nongenerated columns. The stored generated values become the values of the nongenerated column". We will drop an existing column and add it without a virtual generated statement. This means that this column will have no data after migration\n`, ); } statements.push({ type: 'alter_table_alter_column_drop_generated', tableName, columnName, schema, newDataType: columnType, columnDefault, columnOnUpdate, columnNotNull, columnAutoIncrement, columnPk, columnGenerated, oldColumn: json1.tables[tableName].columns[columnName], }); } if ( column.primaryKey?.type === 'added' || (column.primaryKey?.type === 'changed' && column.primaryKey.new) ) { const wasAutoincrement = statements.filter( (it) => it.type === 'alter_table_alter_column_set_autoincrement', ); if (wasAutoincrement.length === 0) { setPkStatements.push({ type: 'alter_table_alter_column_set_pk', tableName, schema, columnName, }); } } if (column.onUpdate?.type === 'added') { statements.push({ type: 'alter_table_alter_column_set_on_update', tableName, columnName, schema, newDataType: columnType, columnDefault, columnOnUpdate, columnNotNull, columnAutoIncrement, columnPk, }); } if (column.onUpdate?.type === 'deleted') { statements.push({ type: 'alter_table_alter_column_drop_on_update', tableName, columnName, schema, newDataType: columnType, columnDefault, columnOnUpdate, columnNotNull, columnAutoIncrement, columnPk, }); } } return [...dropPkStatements, ...setPkStatements, ...statements]; }; export const preparePgAlterColumns = ( _tableName: string, schema: string, columns: AlteredColumn[], // TODO: remove? json2: PgSchemaSquashed, json1: PgSchemaSquashed, action?: 'push' | undefined, ): JsonAlterColumnStatement[] => { const tableKey = `${schema || 'public'}.${_tableName}`; let statements: JsonAlterColumnStatement[] = []; let dropPkStatements: JsonAlterColumnDropPrimaryKeyStatement[] = []; let setPkStatements: JsonAlterColumnSetPrimaryKeyStatement[] = []; for (const column of columns) { const columnName = typeof column.name !== 'string' ? column.name.new : column.name; const tableName = json2.tables[tableKey].name; // I used any, because those fields are available only for mysql dialect // For other dialects it will become undefined, that is fine for json statements const columnType = json2.tables[tableKey].columns[columnName].type; const columnDefault = json2.tables[tableKey].columns[columnName].default; const columnGenerated = json2.tables[tableKey].columns[columnName].generated; const columnOnUpdate = (json2.tables[tableKey].columns[columnName] as any) .onUpdate; const columnNotNull = json2.tables[tableKey].columns[columnName].notNull; const columnAutoIncrement = ( json2.tables[tableKey].columns[columnName] as any ).autoincrement; const columnPk = (json2.tables[tableKey].columns[columnName] as any) .primaryKey; const typeSchema = json2.tables[tableKey].columns[columnName].typeSchema; const json1ColumnTypeSchema = json1.tables[tableKey].columns[columnName].typeSchema; const compositePk = json2.tables[tableKey].compositePrimaryKeys[`${tableName}_${columnName}`]; if (typeof column.name !== 'string') { statements.push({ type: 'alter_table_rename_column', tableName, oldColumnName: column.name.old, newColumnName: column.name.new, schema, }); } if (column.type?.type === 'changed') { const arrayDefinitionRegex = /\[\d*(?:\[\d*\])*\]/g; const parsedNewColumnType = column.type.new.replace(arrayDefinitionRegex, ''); const parsedOldColumnType = column.type.old.replace(arrayDefinitionRegex, ''); const isNewTypeIsEnum = json2.enums[`${typeSchema}.${parsedNewColumnType}`]; const isOldTypeIsEnum = json1.enums[`${json1ColumnTypeSchema}.${parsedOldColumnType}`]; statements.push({ type: 'pg_alter_table_alter_column_set_type', tableName, columnName, typeSchema: typeSchema, newDataType: { name: column.type.new, isEnum: isNewTypeIsEnum ? true : false, }, oldDataType: { name: column.type.old, isEnum: isOldTypeIsEnum ? true : false, }, schema, columnDefault, columnOnUpdate, columnNotNull, columnAutoIncrement, columnPk, }); } if ( column.primaryKey?.type === 'deleted' || (column.primaryKey?.type === 'changed' && !column.primaryKey.new && typeof compositePk === 'undefined') ) { dropPkStatements.push({ //// type: 'alter_table_alter_column_drop_pk', tableName, columnName, schema, }); } if (column.default?.type === 'added') { statements.push({ type: 'alter_table_alter_column_set_default', tableName, columnName, newDefaultValue: column.default.value, schema, columnOnUpdate, columnNotNull, columnAutoIncrement, newDataType: columnType, columnPk, }); } if (column.default?.type === 'changed') { statements.push({ type: 'alter_table_alter_column_set_default', tableName, columnName, newDefaultValue: column.default.new, oldDefaultValue: column.default.old, schema, columnOnUpdate, columnNotNull, columnAutoIncrement, newDataType: columnType, columnPk, }); } if (column.default?.type === 'deleted') { statements.push({ type: 'alter_table_alter_column_drop_default', tableName, columnName, schema, columnDefault, columnOnUpdate, columnNotNull, columnAutoIncrement, newDataType: columnType, columnPk, }); } if (column.notNull?.type === 'added') { statements.push({ type: 'alter_table_alter_column_set_notnull', tableName, columnName, schema, newDataType: columnType, columnDefault, columnOnUpdate, columnNotNull, columnAutoIncrement, columnPk, }); } if (column.notNull?.type === 'changed') { const type = column.notNull.new ? 'alter_table_alter_column_set_notnull' : 'alter_table_alter_column_drop_notnull'; statements.push({ type: type, tableName, columnName, schema, newDataType: columnType, columnDefault, columnOnUpdate, columnNotNull, columnAutoIncrement, columnPk, }); } if (column.notNull?.type === 'deleted') { statements.push({ type: 'alter_table_alter_column_drop_notnull', tableName, columnName, schema, newDataType: columnType, columnDefault, columnOnUpdate, columnNotNull, columnAutoIncrement, columnPk, }); } if (column.identity?.type === 'added') { statements.push({ type: 'alter_table_alter_column_set_identity', tableName, columnName, schema, identity: column.identity.value, }); } if (column.identity?.type === 'changed') { statements.push({ type: 'alter_table_alter_column_change_identity', tableName, columnName, schema, identity: column.identity.new, oldIdentity: column.identity.old, }); } if (column.identity?.type === 'deleted') { statements.push({ type: 'alter_table_alter_column_drop_identity', tableName, columnName, schema, }); } if (column.generated?.type === 'added') { statements.push({ type: 'alter_table_alter_column_set_generated', tableName, columnName, schema, newDataType: columnType, columnDefault, columnOnUpdate, columnNotNull, columnAutoIncrement, columnPk, columnGenerated, }); } if (column.generated?.type === 'changed' && action !== 'push') { statements.push({ type: 'alter_table_alter_column_alter_generated', tableName, columnName, schema, newDataType: columnType, columnDefault, columnOnUpdate, columnNotNull, columnAutoIncrement, columnPk, columnGenerated, }); } if (column.generated?.type === 'deleted') { statements.push({ type: 'alter_table_alter_column_drop_generated', tableName, columnName, schema, newDataType: columnType, columnDefault, columnOnUpdate, columnNotNull, columnAutoIncrement, columnPk, columnGenerated, }); } if ( column.primaryKey?.type === 'added' || (column.primaryKey?.type === 'changed' && column.primaryKey.new) ) { const wasAutoincrement = statements.filter( (it) => it.type === 'alter_table_alter_column_set_autoincrement', ); if (wasAutoincrement.length === 0) { setPkStatements.push({ type: 'alter_table_alter_column_set_pk', tableName, schema, columnName, }); } } // if (column.primaryKey?.type === "added") { // statements.push({ // type: "alter_table_alter_column_set_primarykey", // tableName, // columnName, // schema, // newDataType: columnType, // columnDefault, // columnOnUpdate, // columnNotNull, // columnAutoIncrement, // }); // } // if (column.primaryKey?.type === "changed") { // const type = column.primaryKey.new // ? "alter_table_alter_column_set_primarykey" // : "alter_table_alter_column_drop_primarykey"; // statements.push({ // type, // tableName, // columnName, // schema, // newDataType: columnType, // columnDefault, // columnOnUpdate, // columnNotNull, // columnAutoIncrement, // }); // } // if (column.primaryKey?.type === "deleted") { // statements.push({ // type: "alter_table_alter_column_drop_primarykey", // tableName, // columnName, // schema, // newDataType: columnType, // columnDefault, // columnOnUpdate, // columnNotNull, // columnAutoIncrement, // }); // } if (column.onUpdate?.type === 'added') { statements.push({ type: 'alter_table_alter_column_set_on_update', tableName, columnName, schema, newDataType: columnType, columnDefault, columnOnUpdate, columnNotNull, columnAutoIncrement, columnPk, }); } if (column.onUpdate?.type === 'deleted') { statements.push({ type: 'alter_table_alter_column_drop_on_update', tableName, columnName, schema, newDataType: columnType, columnDefault, columnOnUpdate, columnNotNull, columnAutoIncrement, columnPk, }); } } return [...dropPkStatements, ...setPkStatements, ...statements]; }; export const prepareSqliteAlterColumns = ( tableName: string, schema: string, columns: AlteredColumn[], // TODO: remove? json2: CommonSquashedSchema, ): JsonAlterColumnStatement[] => { let statements: JsonAlterColumnStatement[] = []; let dropPkStatements: JsonAlterColumnDropPrimaryKeyStatement[] = []; let setPkStatements: JsonAlterColumnSetPrimaryKeyStatement[] = []; for (const column of columns) { const columnName = typeof column.name !== 'string' ? column.name.new : column.name; // I used any, because those fields are available only for mysql dialect // For other dialects it will become undefined, that is fine for json statements const columnType = json2.tables[tableName].columns[columnName].type; const columnDefault = json2.tables[tableName].columns[columnName].default; const columnOnUpdate = (json2.tables[tableName].columns[columnName] as any) .onUpdate; const columnNotNull = json2.tables[tableName].columns[columnName].notNull; const columnAutoIncrement = ( json2.tables[tableName].columns[columnName] as any ).autoincrement; const columnPk = (json2.tables[tableName].columns[columnName] as any) .primaryKey; const columnGenerated = json2.tables[tableName].columns[columnName].generated; const compositePk = json2.tables[tableName].compositePrimaryKeys[ `${tableName}_${columnName}` ]; if (column.autoincrement?.type === 'added') { statements.push({ type: 'alter_table_alter_column_set_autoincrement', tableName, columnName, schema, newDataType: columnType, columnDefault, columnOnUpdate, columnNotNull, columnAutoIncrement, columnPk, }); } if (column.autoincrement?.type === 'changed') { const type = column.autoincrement.new ? 'alter_table_alter_column_set_autoincrement' : 'alter_table_alter_column_drop_autoincrement'; statements.push({ type, tableName, columnName, schema, newDataType: columnType, columnDefault, columnOnUpdate, columnNotNull, columnAutoIncrement, columnPk, }); } if (column.autoincrement?.type === 'deleted') { statements.push({ type: 'alter_table_alter_column_drop_autoincrement', tableName, columnName, schema, newDataType: columnType, columnDefault, columnOnUpdate, columnNotNull, columnAutoIncrement, columnPk, }); } if (typeof column.name !== 'string') { statements.push({ type: 'alter_table_rename_column', tableName, oldColumnName: column.name.old, newColumnName: column.name.new, schema, }); } if (column.type?.type === 'changed') { statements.push({ type: 'alter_table_alter_column_set_type', tableName, columnName, newDataType: column.type.new, oldDataType: column.type.old, schema, columnDefault, columnOnUpdate, columnNotNull, columnAutoIncrement, columnPk, }); } if ( column.primaryKey?.type === 'deleted' || (column.primaryKey?.type === 'changed' && !column.primaryKey.new && typeof compositePk === 'undefined') ) { dropPkStatements.push({ //// type: 'alter_table_alter_column_drop_pk', tableName, columnName, schema, }); } if (column.default?.type === 'added') { statements.push({ type: 'alter_table_alter_column_set_default', tableName, columnName, newDefaultValue: column.default.value, schema, columnOnUpdate, columnNotNull, columnAutoIncrement, newDataType: columnType, columnPk, }); } if (column.default?.type === 'changed') { statements.push({ type: 'alter_table_alter_column_set_default', tableName, columnName, newDefaultValue: column.default.new, oldDefaultValue: column.default.old, schema, columnOnUpdate, columnNotNull, columnAutoIncrement, newDataType: columnType, columnPk, }); } if (column.default?.type === 'deleted') { statements.push({ type: 'alter_table_alter_column_drop_default', tableName, columnName, schema, columnDefault, columnOnUpdate, columnNotNull, columnAutoIncrement, newDataType: columnType, columnPk, }); } if (column.notNull?.type === 'added') { statements.push({ type: 'alter_table_alter_column_set_notnull', tableName, columnName, schema, newDataType: columnType, columnDefault, columnOnUpdate, columnNotNull, columnAutoIncrement, columnPk, }); } if (column.notNull?.type === 'changed') { const type = column.notNull.new ? 'alter_table_alter_column_set_notnull' : 'alter_table_alter_column_drop_notnull'; statements.push({ type: type, tableName, columnName, schema, newDataType: columnType, columnDefault, columnOnUpdate, columnNotNull, columnAutoIncrement, columnPk, }); } if (column.notNull?.type === 'deleted') { statements.push({ type: 'alter_table_alter_column_drop_notnull', tableName, columnName, schema, newDataType: columnType, columnDefault, columnOnUpdate, columnNotNull, columnAutoIncrement, columnPk, }); } if (column.generated?.type === 'added') { if (columnGenerated?.type === 'virtual') { statements.push({ type: 'alter_table_alter_column_set_generated', tableName, columnName, schema, newDataType: columnType, columnDefault, columnOnUpdate, columnNotNull, columnAutoIncrement, columnPk, columnGenerated, }); } else { warning( `As SQLite docs mention: "It is not possible to ALTER TABLE ADD COLUMN a STORED column. One can add a VIRTUAL column, however", source: "https://www.sqlite.org/gencol.html"`, ); } } if (column.generated?.type === 'changed') { if (columnGenerated?.type === 'virtual') { statements.push({ type: 'alter_table_alter_column_alter_generated', tableName, columnName, schema, newDataType: columnType, columnDefault, columnOnUpdate, columnNotNull, columnAutoIncrement, columnPk, columnGenerated, }); } else { warning( `As SQLite docs mention: "It is not possible to ALTER TABLE ADD COLUMN a STORED column. One can add a VIRTUAL column, however", source: "https://www.sqlite.org/gencol.html"`, ); } } if (column.generated?.type === 'deleted') { statements.push({ type: 'alter_table_alter_column_drop_generated', tableName, columnName, schema, newDataType: columnType, columnDefault, columnOnUpdate, columnNotNull, columnAutoIncrement, columnPk, columnGenerated, }); } if ( column.primaryKey?.type === 'added' || (column.primaryKey?.type === 'changed' && column.primaryKey.new) ) { const wasAutoincrement = statements.filter( (it) => it.type === 'alter_table_alter_column_set_autoincrement', ); if (wasAutoincrement.length === 0) { setPkStatements.push({ type: 'alter_table_alter_column_set_pk', tableName, schema, columnName, }); } } if (column.onUpdate?.type === 'added') { statements.push({ type: 'alter_table_alter_column_set_on_update', tableName, columnName, schema, newDataType: columnType, columnDefault, columnOnUpdate, columnNotNull, columnAutoIncrement, columnPk, }); } if (column.onUpdate?.type === 'deleted') { statements.push({ type: 'alter_table_alter_column_drop_on_update', tableName, columnName, schema, newDataType: columnType, columnDefault, columnOnUpdate, columnNotNull, columnAutoIncrement, columnPk, }); } } return [...dropPkStatements, ...setPkStatements, ...statements]; }; export const prepareRenamePolicyJsons = ( tableName: string, schema: string, renames: { from: Policy; to: Policy; }[], ): JsonRenamePolicyStatement[] => { return renames.map((it) => { return { type: 'rename_policy', tableName: tableName, oldName: it.from.name, newName: it.to.name, schema, }; }); }; export const prepareRenameIndPolicyJsons = ( renames: { from: Policy; to: Policy; }[], ): JsonIndRenamePolicyStatement[] => { return renames.map((it) => { return { type: 'rename_ind_policy', tableKey: it.from.on!, oldName: it.from.name, newName: it.to.name, }; }); }; export const prepareCreatePolicyJsons = ( tableName: string, schema: string, policies: Policy[], ): JsonCreatePolicyStatement[] => { return policies.map((it) => { return { type: 'create_policy', tableName, data: it, schema, }; }); }; export const prepareCreateIndPolicyJsons = ( policies: Policy[], ): JsonCreateIndPolicyStatement[] => { return policies.map((it) => { return { type: 'create_ind_policy', tableName: it.on!, data: it, }; }); }; export const prepareDropPolicyJsons = ( tableName: string, schema: string, policies: Policy[], ): JsonDropPolicyStatement[] => { return policies.map((it) => { return { type: 'drop_policy', tableName, data: it, schema, }; }); }; export const prepareDropIndPolicyJsons = ( policies: Policy[], ): JsonDropIndPolicyStatement[] => { return policies.map((it) => { return { type: 'drop_ind_policy', tableName: it.on!, data: it, }; }); }; export const prepareAlterPolicyJson = ( tableName: string, schema: string, oldPolicy: string, newPolicy: string, ): JsonAlterPolicyStatement => { return { type: 'alter_policy', tableName, oldData: oldPolicy, newData: newPolicy, schema, }; }; export const prepareAlterIndPolicyJson = ( oldPolicy: Policy, newPolicy: Policy, ): JsonAlterIndPolicyStatement => { return { type: 'alter_ind_policy', oldData: oldPolicy, newData: newPolicy, }; }; export const preparePgCreateIndexesJson = ( tableName: string, schema: string, indexes: Record, fullSchema: PgSchema, action?: 'push' | undefined, ): JsonPgCreateIndexStatement[] => { if (action === 'push') { return Object.values(indexes).map((indexData) => { const unsquashedIndex = PgSquasher.unsquashIdxPush(indexData); const data = fullSchema.tables[`${schema === '' ? 'public' : schema}.${tableName}`] .indexes[unsquashedIndex.name]; return { type: 'create_index_pg', tableName, data, schema, }; }); } return Object.values(indexes).map((indexData) => { return { type: 'create_index_pg', tableName, data: PgSquasher.unsquashIdx(indexData), schema, }; }); }; export const prepareCreateIndexesJson = ( tableName: string, schema: string, indexes: Record, internal?: MySqlKitInternals | SQLiteKitInternals, ): JsonCreateIndexStatement[] => { return Object.values(indexes).map((indexData) => { return { type: 'create_index', tableName, data: indexData, schema, internal, }; }); }; export const prepareCreateReferencesJson = ( tableName: string, schema: string, foreignKeys: Record, ): JsonCreateReferenceStatement[] => { return Object.values(foreignKeys).map((fkData) => { return { type: 'create_reference', tableName, data: fkData, schema, }; }); }; export const prepareLibSQLCreateReferencesJson = ( tableName: string, schema: string, foreignKeys: Record, json2: SQLiteSchemaSquashed, action?: 'push', ): JsonCreateReferenceStatement[] => { return Object.values(foreignKeys).map((fkData) => { const { columnsFrom, tableFrom, columnsTo } = action === 'push' ? SQLiteSquasher.unsquashPushFK(fkData) : SQLiteSquasher.unsquashFK(fkData); // When trying to alter table in lib sql it is necessary to pass all config for column like "NOT NULL", "DEFAULT", etc. // If it is multicolumn reference it is not possible to pass this data for all columns // Pass multicolumn flag for sql statements to not generate migration let isMulticolumn = false; if (columnsFrom.length > 1 || columnsTo.length > 1) { isMulticolumn = true; return { type: 'create_reference', tableName, data: fkData, schema, isMulticolumn, }; } const columnFrom = columnsFrom[0]; const { notNull: columnNotNull, default: columnDefault, type: columnType, } = json2.tables[tableFrom].columns[columnFrom]; return { type: 'create_reference', tableName, data: fkData, schema, columnNotNull, columnDefault, columnType, }; }); }; export const prepareDropReferencesJson = ( tableName: string, schema: string, foreignKeys: Record, ): JsonDeleteReferenceStatement[] => { return Object.values(foreignKeys).map((fkData) => { return { type: 'delete_reference', tableName, data: fkData, schema, }; }); }; export const prepareLibSQLDropReferencesJson = ( tableName: string, schema: string, foreignKeys: Record, json2: SQLiteSchemaSquashed, meta: SQLiteSchemaInternal['_meta'], action?: 'push', ): JsonDeleteReferenceStatement[] => { const statements = Object.values(foreignKeys).map((fkData) => { const { columnsFrom, tableFrom, columnsTo, name, tableTo, onDelete, onUpdate } = action === 'push' ? SQLiteSquasher.unsquashPushFK(fkData) : SQLiteSquasher.unsquashFK(fkData); // If all columns from where were references were deleted -> skip this logic // Drop columns will cover this scenario const keys = Object.keys(json2.tables[tableName].columns); const filtered = columnsFrom.filter((it) => keys.includes(it)); const fullDrop = filtered.length === 0; if (fullDrop) return; // When trying to alter table in lib sql it is necessary to pass all config for column like "NOT NULL", "DEFAULT", etc. // If it is multicolumn reference it is not possible to pass this data for all columns // Pass multicolumn flag for sql statements to not generate migration let isMulticolumn = false; if (columnsFrom.length > 1 || columnsTo.length > 1) { isMulticolumn = true; return { type: 'delete_reference', tableName, data: fkData, schema, isMulticolumn, }; } const columnFrom = columnsFrom[0]; const newTableName = getNewTableName(tableFrom, meta); const { notNull: columnNotNull, default: columnDefault, type: columnType, } = json2.tables[newTableName].columns[columnFrom]; const fkToSquash = { columnsFrom, columnsTo, name, tableFrom: newTableName, tableTo, onDelete, onUpdate, }; const foreignKey = action === 'push' ? SQLiteSquasher.squashPushFK(fkToSquash) : SQLiteSquasher.squashFK(fkToSquash); return { type: 'delete_reference', tableName, data: foreignKey, schema, columnNotNull, columnDefault, columnType, }; }); return statements.filter((it) => it) as JsonDeleteReferenceStatement[]; }; // alter should create 2 statements. It's important to make only 1 sql per statement(for breakpoints) export const prepareAlterReferencesJson = ( tableName: string, schema: string, foreignKeys: Record, ): JsonReferenceStatement[] => { const stmts: JsonReferenceStatement[] = []; Object.values(foreignKeys).map((val) => { stmts.push({ type: 'delete_reference', tableName, schema, data: val.__old, }); stmts.push({ type: 'create_reference', tableName, schema, data: val.__new, }); }); return stmts; }; export const prepareDropIndexesJson = ( tableName: string, schema: string, indexes: Record, ): JsonDropIndexStatement[] => { return Object.values(indexes).map((indexData) => { return { type: 'drop_index', tableName, data: indexData, schema, }; }); }; export const prepareAddCompositePrimaryKeySqlite = ( tableName: string, pks: Record, ): JsonCreateCompositePK[] => { return Object.values(pks).map((it) => { return { type: 'create_composite_pk', tableName, data: it, } as JsonCreateCompositePK; }); }; export const prepareDeleteCompositePrimaryKeySqlite = ( tableName: string, pks: Record, ): JsonDeleteCompositePK[] => { return Object.values(pks).map((it) => { return { type: 'delete_composite_pk', tableName, data: it, } as JsonDeleteCompositePK; }); }; export const prepareAlterCompositePrimaryKeySqlite = ( tableName: string, pks: Record, ): JsonAlterCompositePK[] => { return Object.values(pks).map((it) => { return { type: 'alter_composite_pk', tableName, old: it.__old, new: it.__new, } as JsonAlterCompositePK; }); }; export const prepareAddCompositePrimaryKeyPg = ( tableName: string, schema: string, pks: Record, // TODO: remove? json2: PgSchema, ): JsonCreateCompositePK[] => { return Object.values(pks).map((it) => { const unsquashed = PgSquasher.unsquashPK(it); return { type: 'create_composite_pk', tableName, data: it, schema, constraintName: PgSquasher.unsquashPK(it).name, } as JsonCreateCompositePK; }); }; export const prepareDeleteCompositePrimaryKeyPg = ( tableName: string, schema: string, pks: Record, // TODO: remove? json1: PgSchema, ): JsonDeleteCompositePK[] => { return Object.values(pks).map((it) => { return { type: 'delete_composite_pk', tableName, data: it, schema, constraintName: PgSquasher.unsquashPK(it).name, } as JsonDeleteCompositePK; }); }; export const prepareAlterCompositePrimaryKeyPg = ( tableName: string, schema: string, pks: Record, // TODO: remove? json1: PgSchema, json2: PgSchema, ): JsonAlterCompositePK[] => { return Object.values(pks).map((it) => { return { type: 'alter_composite_pk', tableName, old: it.__old, new: it.__new, schema, oldConstraintName: PgSquasher.unsquashPK(it.__old).name, newConstraintName: PgSquasher.unsquashPK(it.__new).name, } as JsonAlterCompositePK; }); }; export const prepareAddUniqueConstraintPg = ( tableName: string, schema: string, unqs: Record, ): JsonCreateUniqueConstraint[] => { return Object.values(unqs).map((it) => { return { type: 'create_unique_constraint', tableName, data: it, schema, } as JsonCreateUniqueConstraint; }); }; export const prepareDeleteUniqueConstraintPg = ( tableName: string, schema: string, unqs: Record, ): JsonDeleteUniqueConstraint[] => { return Object.values(unqs).map((it) => { return { type: 'delete_unique_constraint', tableName, data: it, schema, } as JsonDeleteUniqueConstraint; }); }; export const prepareAddCheckConstraint = ( tableName: string, schema: string, check: Record, ): JsonCreateCheckConstraint[] => { return Object.values(check).map((it) => { return { type: 'create_check_constraint', tableName, data: it, schema, } as JsonCreateCheckConstraint; }); }; export const prepareDeleteCheckConstraint = ( tableName: string, schema: string, check: Record, ): JsonDeleteCheckConstraint[] => { return Object.values(check).map((it) => { return { type: 'delete_check_constraint', tableName, constraintName: PgSquasher.unsquashCheck(it).name, schema, } as JsonDeleteCheckConstraint; }); }; // add create table changes // add handler to make drop and add and not alter(looking at __old and __new) // add serializer for mysql and sqlite + types // add introspect serializer for pg+sqlite+mysql // add introspect actual code // add push sqlite handler // add push mysql warning if data exists and may have unique conflict // add release notes // add docs changes export const prepareAlterUniqueConstraintPg = ( tableName: string, schema: string, unqs: Record, ): JsonAlterUniqueConstraint[] => { return Object.values(unqs).map((it) => { return { type: 'alter_unique_constraint', tableName, old: it.__old, new: it.__new, schema, } as JsonAlterUniqueConstraint; }); }; export const prepareAddCompositePrimaryKeyMySql = ( tableName: string, pks: Record, // TODO: remove? json1: MySqlSchema, json2: MySqlSchema, ): JsonCreateCompositePK[] => { const res: JsonCreateCompositePK[] = []; for (const it of Object.values(pks)) { const unsquashed = MySqlSquasher.unsquashPK(it); if ( unsquashed.columns.length === 1 && json1.tables[tableName]?.columns[unsquashed.columns[0]]?.primaryKey ) { continue; } res.push({ type: 'create_composite_pk', tableName, data: it, constraintName: unsquashed.name, } as JsonCreateCompositePK); } return res; }; export const prepareDeleteCompositePrimaryKeyMySql = ( tableName: string, pks: Record, // TODO: remove? json1: MySqlSchema, ): JsonDeleteCompositePK[] => { return Object.values(pks).map((it) => { const unsquashed = MySqlSquasher.unsquashPK(it); return { type: 'delete_composite_pk', tableName, data: it, } as JsonDeleteCompositePK; }); }; export const prepareAlterCompositePrimaryKeyMySql = ( tableName: string, pks: Record, // TODO: remove? json1: MySqlSchema, json2: MySqlSchema, ): JsonAlterCompositePK[] => { return Object.values(pks).map((it) => { return { type: 'alter_composite_pk', tableName, old: it.__old, new: it.__new, oldConstraintName: json1.tables[tableName].compositePrimaryKeys[ MySqlSquasher.unsquashPK(it.__old).name ].name, newConstraintName: json2.tables[tableName].compositePrimaryKeys[ MySqlSquasher.unsquashPK(it.__new).name ].name, } as JsonAlterCompositePK; }); }; export const preparePgCreateViewJson = ( name: string, schema: string, definition: string, materialized: boolean, withNoData: boolean = false, withOption?: any, using?: string, tablespace?: string, ): JsonCreatePgViewStatement => { return { type: 'create_view', name: name, schema: schema, definition: definition, with: withOption, materialized: materialized, withNoData, using, tablespace, }; }; export const prepareMySqlCreateViewJson = ( name: string, definition: string, meta: string, replace: boolean = false, ): JsonCreateMySqlViewStatement => { const { algorithm, sqlSecurity, withCheckOption } = MySqlSquasher.unsquashView(meta); return { type: 'mysql_create_view', name: name, definition: definition, algorithm, sqlSecurity, withCheckOption, replace, }; }; /* export const prepareSingleStoreCreateViewJson = ( name: string, definition: string, meta: string, replace: boolean = false, ): JsonCreateSingleStoreViewStatement => { const { algorithm, sqlSecurity, withCheckOption } = SingleStoreSquasher.unsquashView(meta); return { type: 'singlestore_create_view', name: name, definition: definition, algorithm, sqlSecurity, withCheckOption, replace, }; }; */ export const prepareSqliteCreateViewJson = ( name: string, definition: string, ): JsonCreateSqliteViewStatement => { return { type: 'sqlite_create_view', name: name, definition: definition, }; }; export const prepareDropViewJson = ( name: string, schema?: string, materialized?: boolean, ): JsonDropViewStatement => { const resObject: JsonDropViewStatement = { name, type: 'drop_view' }; if (schema) resObject['schema'] = schema; if (materialized) resObject['materialized'] = materialized; return resObject; }; export const prepareRenameViewJson = ( to: string, from: string, schema?: string, materialized?: boolean, ): JsonRenameViewStatement => { const resObject: JsonRenameViewStatement = { type: 'rename_view', nameTo: to, nameFrom: from, }; if (schema) resObject['schema'] = schema; if (materialized) resObject['materialized'] = materialized; return resObject; }; export const preparePgAlterViewAlterSchemaJson = ( to: string, from: string, name: string, materialized?: boolean, ): JsonAlterViewAlterSchemaStatement => { const returnObject: JsonAlterViewAlterSchemaStatement = { type: 'alter_view_alter_schema', fromSchema: from, toSchema: to, name, }; if (materialized) returnObject['materialized'] = materialized; return returnObject; }; export const preparePgAlterViewAddWithOptionJson = ( name: string, schema: string, materialized: boolean, withOption: MatViewWithOption | ViewWithOption, ): JsonAlterViewAddWithOptionStatement => { return { type: 'alter_view_add_with_option', name, schema, materialized: materialized, with: withOption, } as JsonAlterViewAddWithOptionStatement; }; export const preparePgAlterViewDropWithOptionJson = ( name: string, schema: string, materialized: boolean, withOption: MatViewWithOption | ViewWithOption, ): JsonAlterViewDropWithOptionStatement => { return { type: 'alter_view_drop_with_option', name, schema, materialized: materialized, with: withOption, } as JsonAlterViewDropWithOptionStatement; }; export const preparePgAlterViewAlterTablespaceJson = ( name: string, schema: string, materialized: boolean, to: string, ): JsonAlterViewAlterTablespaceStatement => { return { type: 'alter_view_alter_tablespace', name, schema, materialized: materialized, toTablespace: to, } as JsonAlterViewAlterTablespaceStatement; }; export const preparePgAlterViewAlterUsingJson = ( name: string, schema: string, materialized: boolean, to: string, ): JsonAlterViewAlterUsingStatement => { return { type: 'alter_view_alter_using', name, schema, materialized: materialized, toUsing: to, } as JsonAlterViewAlterUsingStatement; }; export const prepareMySqlAlterView = ( view: Omit, ): JsonAlterMySqlViewStatement => { return { type: 'alter_mysql_view', ...view }; }; /* export const prepareSingleStoreAlterView = ( view: Omit, ): JsonAlterSingleStoreViewStatement => { return { type: 'alter_singlestore_view', ...view }; }; */ ================================================ FILE: drizzle-kit/src/loader.mjs ================================================ import esbuild from 'esbuild'; import { readFileSync } from 'fs'; import * as path from 'path'; const parse = (it) => { if (!it) return { drizzle: false }; if (it.endsWith('__drizzle__')) { const offset = it.startsWith('file://') ? 'file://'.length : 0; const clean = it.slice(offset, -'__drizzle__'.length); return { drizzle: true, clean, original: it }; } return { drizzle: false, clean: it }; }; export function resolve(specifier, context, nextResolve) { const { drizzle, clean } = parse(specifier); if (drizzle && !clean.endsWith('.ts') && !clean.endsWith('.mts')) { return nextResolve(clean); } if (drizzle) { return { shortCircuit: true, url: `file://${specifier}`, }; } const parsedParent = parse(context.parentURL); const parentURL = parsedParent.drizzle ? new URL(`file://${path.resolve(parsedParent.clean)}`) : context.parentURL; // Let Node.js handle all other specifiers. return nextResolve(specifier, { ...context, parentURL }); } export async function load(url, context, defaultLoad) { const { drizzle, clean } = parse(url); if (drizzle) { const file = readFileSync(clean, 'utf-8'); if (clean.endsWith('.ts') || clean.endsWith('.mts')) { const source = esbuild.transformSync(file, { loader: 'ts', format: 'esm', }); return { format: 'module', shortCircuit: true, source: source.code, }; } } // let Node.js handle all other URLs return defaultLoad(url, context, defaultLoad); } ================================================ FILE: drizzle-kit/src/migrationPreparator.ts ================================================ import { randomUUID } from 'crypto'; import fs from 'fs'; import { CasingType } from './cli/validations/common'; import { serializeMySql, serializePg, serializeSingleStore, serializeSQLite } from './serializer'; import { dryMySql, MySqlSchema, mysqlSchema } from './serializer/mysqlSchema'; import { dryPg, PgSchema, pgSchema } from './serializer/pgSchema'; import { drySingleStore, SingleStoreSchema, singlestoreSchema } from './serializer/singlestoreSchema'; import { drySQLite, SQLiteSchema, sqliteSchema } from './serializer/sqliteSchema'; export const prepareMySqlDbPushSnapshot = async ( prev: MySqlSchema, schemaPath: string | string[], casing: CasingType | undefined, ): Promise<{ prev: MySqlSchema; cur: MySqlSchema }> => { const serialized = await serializeMySql(schemaPath, casing); const id = randomUUID(); const idPrev = prev.id; const { version, dialect, ...rest } = serialized; const result: MySqlSchema = { version, dialect, id, prevId: idPrev, ...rest }; return { prev, cur: result }; }; export const prepareSingleStoreDbPushSnapshot = async ( prev: SingleStoreSchema, schemaPath: string | string[], casing: CasingType | undefined, ): Promise<{ prev: SingleStoreSchema; cur: SingleStoreSchema }> => { const serialized = await serializeSingleStore(schemaPath, casing); const id = randomUUID(); const idPrev = prev.id; const { version, dialect, ...rest } = serialized; const result: SingleStoreSchema = { version, dialect, id, prevId: idPrev, ...rest }; return { prev, cur: result }; }; export const prepareSQLiteDbPushSnapshot = async ( prev: SQLiteSchema, schemaPath: string | string[], casing: CasingType | undefined, ): Promise<{ prev: SQLiteSchema; cur: SQLiteSchema }> => { const serialized = await serializeSQLite(schemaPath, casing); const id = randomUUID(); const idPrev = prev.id; const { version, dialect, ...rest } = serialized; const result: SQLiteSchema = { version, dialect, id, prevId: idPrev, ...rest, }; return { prev, cur: result }; }; export const preparePgDbPushSnapshot = async ( prev: PgSchema, schemaPath: string | string[], casing: CasingType | undefined, schemaFilter: string[] = ['public'], ): Promise<{ prev: PgSchema; cur: PgSchema }> => { const serialized = await serializePg(schemaPath, casing, schemaFilter); const id = randomUUID(); const idPrev = prev.id; const { version, dialect, ...rest } = serialized; const result: PgSchema = { version, dialect, id, prevId: idPrev, ...rest }; return { prev, cur: result }; }; export const prepareMySqlMigrationSnapshot = async ( migrationFolders: string[], schemaPath: string | string[], casing: CasingType | undefined, ): Promise<{ prev: MySqlSchema; cur: MySqlSchema; custom: MySqlSchema }> => { const prevSnapshot = mysqlSchema.parse( preparePrevSnapshot(migrationFolders, dryMySql), ); const serialized = await serializeMySql(schemaPath, casing); const id = randomUUID(); const idPrev = prevSnapshot.id; const { version, dialect, ...rest } = serialized; const result: MySqlSchema = { version, dialect, id, prevId: idPrev, ...rest }; const { id: _ignoredId, prevId: _ignoredPrevId, ...prevRest } = prevSnapshot; // that's for custom migrations, when we need new IDs, but old snapshot const custom: MySqlSchema = { id, prevId: idPrev, ...prevRest, }; return { prev: prevSnapshot, cur: result, custom }; }; export const prepareSingleStoreMigrationSnapshot = async ( migrationFolders: string[], schemaPath: string | string[], casing: CasingType | undefined, ): Promise<{ prev: SingleStoreSchema; cur: SingleStoreSchema; custom: SingleStoreSchema }> => { const prevSnapshot = singlestoreSchema.parse( preparePrevSnapshot(migrationFolders, drySingleStore), ); const serialized = await serializeSingleStore(schemaPath, casing); const id = randomUUID(); const idPrev = prevSnapshot.id; const { version, dialect, ...rest } = serialized; const result: SingleStoreSchema = { version, dialect, id, prevId: idPrev, ...rest }; const { id: _ignoredId, prevId: _ignoredPrevId, ...prevRest } = prevSnapshot; // that's for custom migrations, when we need new IDs, but old snapshot const custom: SingleStoreSchema = { id, prevId: idPrev, ...prevRest, }; return { prev: prevSnapshot, cur: result, custom }; }; export const prepareSqliteMigrationSnapshot = async ( snapshots: string[], schemaPath: string | string[], casing: CasingType | undefined, ): Promise<{ prev: SQLiteSchema; cur: SQLiteSchema; custom: SQLiteSchema }> => { const prevSnapshot = sqliteSchema.parse( preparePrevSnapshot(snapshots, drySQLite), ); const serialized = await serializeSQLite(schemaPath, casing); const id = randomUUID(); const idPrev = prevSnapshot.id; const { version, dialect, ...rest } = serialized; const result: SQLiteSchema = { version, dialect, id, prevId: idPrev, ...rest, }; const { id: _ignoredId, prevId: _ignoredPrevId, ...prevRest } = prevSnapshot; // that's for custom migrations, when we need new IDs, but old snapshot const custom: SQLiteSchema = { id, prevId: idPrev, ...prevRest, }; return { prev: prevSnapshot, cur: result, custom }; }; export const preparePgMigrationSnapshot = async ( snapshots: string[], schemaPath: string | string[], casing: CasingType | undefined, ): Promise<{ prev: PgSchema; cur: PgSchema; custom: PgSchema }> => { const prevSnapshot = pgSchema.parse(preparePrevSnapshot(snapshots, dryPg)); const serialized = await serializePg(schemaPath, casing); const id = randomUUID(); const idPrev = prevSnapshot.id; // const { version, dialect, ...rest } = serialized; const result: PgSchema = { id, prevId: idPrev, ...serialized }; const { id: _ignoredId, prevId: _ignoredPrevId, ...prevRest } = prevSnapshot; // that's for custom migrations, when we need new IDs, but old snapshot const custom: PgSchema = { id, prevId: idPrev, ...prevRest, }; return { prev: prevSnapshot, cur: result, custom }; }; const preparePrevSnapshot = (snapshots: string[], defaultPrev: any) => { let prevSnapshot: any; if (snapshots.length === 0) { prevSnapshot = defaultPrev; } else { const lastSnapshot = snapshots[snapshots.length - 1]; prevSnapshot = JSON.parse(fs.readFileSync(lastSnapshot).toString()); } return prevSnapshot; }; ================================================ FILE: drizzle-kit/src/schemaValidator.ts ================================================ import { enum as enumType, TypeOf, union } from 'zod'; import { mysqlSchema, mysqlSchemaSquashed } from './serializer/mysqlSchema'; import { pgSchema, pgSchemaSquashed } from './serializer/pgSchema'; import { singlestoreSchema, singlestoreSchemaSquashed } from './serializer/singlestoreSchema'; import { sqliteSchema, SQLiteSchemaSquashed } from './serializer/sqliteSchema'; export const dialects = ['postgresql', 'mysql', 'sqlite', 'turso', 'singlestore', 'gel'] as const; export const dialect = enumType(dialects); export type Dialect = (typeof dialects)[number]; const _: Dialect = '' as TypeOf; const commonSquashedSchema = union([ pgSchemaSquashed, mysqlSchemaSquashed, SQLiteSchemaSquashed, singlestoreSchemaSquashed, ]); const commonSchema = union([pgSchema, mysqlSchema, sqliteSchema, singlestoreSchema]); export type CommonSquashedSchema = TypeOf; export type CommonSchema = TypeOf; ================================================ FILE: drizzle-kit/src/serializer/gelSchema.ts ================================================ import { mapValues, originUUID, snapshotVersion } from '../global'; import { any, array, boolean, enum as enumType, literal, number, object, record, string, TypeOf, union } from 'zod'; const enumSchema = object({ name: string(), schema: string(), values: string().array(), }).strict(); const enumSchemaV1 = object({ name: string(), values: record(string(), string()), }).strict(); const indexColumn = object({ expression: string(), isExpression: boolean(), asc: boolean(), nulls: string().optional(), opclass: string().optional(), }); export type IndexColumnType = TypeOf; const index = object({ name: string(), columns: indexColumn.array(), isUnique: boolean(), with: record(string(), any()).optional(), method: string().default('btree'), where: string().optional(), concurrently: boolean().default(false), }).strict(); const fk = object({ name: string(), tableFrom: string(), columnsFrom: string().array(), tableTo: string(), schemaTo: string().optional(), columnsTo: string().array(), onUpdate: string().optional(), onDelete: string().optional(), }).strict(); export const sequenceSchema = object({ name: string(), increment: string().optional(), minValue: string().optional(), maxValue: string().optional(), startWith: string().optional(), cache: string().optional(), cycle: boolean().optional(), schema: string(), }).strict(); export const roleSchema = object({ name: string(), createDb: boolean().optional(), createRole: boolean().optional(), inherit: boolean().optional(), }).strict(); export const sequenceSquashed = object({ name: string(), schema: string(), values: string(), }).strict(); const column = object({ name: string(), type: string(), typeSchema: string().optional(), primaryKey: boolean(), notNull: boolean(), default: any().optional(), isUnique: any().optional(), uniqueName: string().optional(), nullsNotDistinct: boolean().optional(), generated: object({ type: literal('stored'), as: string(), }).optional(), identity: sequenceSchema .merge(object({ type: enumType(['always', 'byDefault']) })) .optional(), }).strict(); const checkConstraint = object({ name: string(), value: string(), }).strict(); const columnSquashed = object({ name: string(), type: string(), typeSchema: string().optional(), primaryKey: boolean(), notNull: boolean(), default: any().optional(), isUnique: any().optional(), uniqueName: string().optional(), nullsNotDistinct: boolean().optional(), generated: object({ type: literal('stored'), as: string(), }).optional(), identity: string().optional(), }).strict(); const compositePK = object({ name: string(), columns: string().array(), }).strict(); const uniqueConstraint = object({ name: string(), columns: string().array(), nullsNotDistinct: boolean(), }).strict(); export const policy = object({ name: string(), as: enumType(['PERMISSIVE', 'RESTRICTIVE']).optional(), for: enumType(['ALL', 'SELECT', 'INSERT', 'UPDATE', 'DELETE']).optional(), to: string().array().optional(), using: string().optional(), withCheck: string().optional(), on: string().optional(), schema: string().optional(), }).strict(); export const policySquashed = object({ name: string(), values: string(), }).strict(); const viewWithOption = object({ checkOption: enumType(['local', 'cascaded']).optional(), securityBarrier: boolean().optional(), securityInvoker: boolean().optional(), }).strict(); const matViewWithOption = object({ fillfactor: number().optional(), toastTupleTarget: number().optional(), parallelWorkers: number().optional(), autovacuumEnabled: boolean().optional(), vacuumIndexCleanup: enumType(['auto', 'off', 'on']).optional(), vacuumTruncate: boolean().optional(), autovacuumVacuumThreshold: number().optional(), autovacuumVacuumScaleFactor: number().optional(), autovacuumVacuumCostDelay: number().optional(), autovacuumVacuumCostLimit: number().optional(), autovacuumFreezeMinAge: number().optional(), autovacuumFreezeMaxAge: number().optional(), autovacuumFreezeTableAge: number().optional(), autovacuumMultixactFreezeMinAge: number().optional(), autovacuumMultixactFreezeMaxAge: number().optional(), autovacuumMultixactFreezeTableAge: number().optional(), logAutovacuumMinDuration: number().optional(), userCatalogTable: boolean().optional(), }).strict(); export const mergedViewWithOption = viewWithOption.merge(matViewWithOption).strict(); export const view = object({ name: string(), schema: string(), columns: record(string(), column), definition: string().optional(), materialized: boolean(), with: mergedViewWithOption.optional(), isExisting: boolean(), withNoData: boolean().optional(), using: string().optional(), tablespace: string().optional(), }).strict(); const table = object({ name: string(), schema: string(), columns: record(string(), column), indexes: record(string(), index), foreignKeys: record(string(), fk), compositePrimaryKeys: record(string(), compositePK), uniqueConstraints: record(string(), uniqueConstraint).default({}), policies: record(string(), policy).default({}), checkConstraints: record(string(), checkConstraint).default({}), isRLSEnabled: boolean().default(false), }).strict(); const schemaHash = object({ id: string(), prevId: string(), }); export const kitInternals = object({ tables: record( string(), object({ columns: record( string(), object({ isArray: boolean().optional(), dimensions: number().optional(), rawType: string().optional(), isDefaultAnExpression: boolean().optional(), }).optional(), ), }).optional(), ), }).optional(); export const gelSchemaExternal = object({ version: literal('1'), dialect: literal('gel'), tables: array(table), enums: array(enumSchemaV1), schemas: array(object({ name: string() })), _meta: object({ schemas: record(string(), string()), tables: record(string(), string()), columns: record(string(), string()), }), }).strict(); export const gelSchemaInternal = object({ version: literal('1'), dialect: literal('gel'), tables: record(string(), table), enums: record(string(), enumSchema), schemas: record(string(), string()), views: record(string(), view).default({}), sequences: record(string(), sequenceSchema).default({}), roles: record(string(), roleSchema).default({}), policies: record(string(), policy).default({}), _meta: object({ schemas: record(string(), string()), tables: record(string(), string()), columns: record(string(), string()), }), internal: kitInternals, }).strict(); const tableSquashed = object({ name: string(), schema: string(), columns: record(string(), columnSquashed), indexes: record(string(), string()), foreignKeys: record(string(), string()), compositePrimaryKeys: record(string(), string()), uniqueConstraints: record(string(), string()), policies: record(string(), string()), checkConstraints: record(string(), string()), isRLSEnabled: boolean().default(false), }).strict(); export const gelSchemaSquashed = object({ version: literal('1'), dialect: literal('gel'), tables: record(string(), tableSquashed), enums: record(string(), enumSchema), schemas: record(string(), string()), views: record(string(), view), sequences: record(string(), sequenceSquashed), roles: record(string(), roleSchema).default({}), policies: record(string(), policySquashed).default({}), }).strict(); export const gelSchema = gelSchemaInternal.merge(schemaHash); export type Enum = TypeOf; export type Sequence = TypeOf; export type Role = TypeOf; export type Column = TypeOf; export type Table = TypeOf; export type GelSchema = TypeOf; export type GelSchemaInternal = TypeOf; export type GelSchemaExternal = TypeOf; export type GelSchemaSquashed = TypeOf; export type Index = TypeOf; export type ForeignKey = TypeOf; export type PrimaryKey = TypeOf; export type UniqueConstraint = TypeOf; export type Policy = TypeOf; export type View = TypeOf; export type MatViewWithOption = TypeOf; export type ViewWithOption = TypeOf; export type GelKitInternals = TypeOf; export type CheckConstraint = TypeOf; // no prev version export const backwardCompatibleGelSchema = gelSchema; export const GelSquasher = { squashIdx: (idx: Index) => { index.parse(idx); return `${idx.name};${ idx.columns .map( (c) => `${c.expression}--${c.isExpression}--${c.asc}--${c.nulls}--${c.opclass ? c.opclass : ''}`, ) .join(',,') };${idx.isUnique};${idx.concurrently};${idx.method};${idx.where};${JSON.stringify(idx.with)}`; }, unsquashIdx: (input: string): Index => { const [ name, columnsString, isUnique, concurrently, method, where, idxWith, ] = input.split(';'); const columnString = columnsString.split(',,'); const columns: IndexColumnType[] = []; for (const column of columnString) { const [expression, isExpression, asc, nulls, opclass] = column.split('--'); columns.push({ nulls: nulls as IndexColumnType['nulls'], isExpression: isExpression === 'true', asc: asc === 'true', expression: expression, opclass: opclass === 'undefined' ? undefined : opclass, }); } const result: Index = index.parse({ name, columns: columns, isUnique: isUnique === 'true', concurrently: concurrently === 'true', method, where: where === 'undefined' ? undefined : where, with: !idxWith || idxWith === 'undefined' ? undefined : JSON.parse(idxWith), }); return result; }, squashIdxPush: (idx: Index) => { index.parse(idx); return `${idx.name};${ idx.columns .map((c) => `${c.isExpression ? '' : c.expression}--${c.asc}--${c.nulls}`) .join(',,') };${idx.isUnique};${idx.method};${JSON.stringify(idx.with)}`; }, unsquashIdxPush: (input: string): Index => { const [name, columnsString, isUnique, method, idxWith] = input.split(';'); const columnString = columnsString.split('--'); const columns: IndexColumnType[] = []; for (const column of columnString) { const [expression, asc, nulls, opclass] = column.split(','); columns.push({ nulls: nulls as IndexColumnType['nulls'], isExpression: expression === '', asc: asc === 'true', expression: expression, }); } const result: Index = index.parse({ name, columns: columns, isUnique: isUnique === 'true', concurrently: false, method, with: idxWith === 'undefined' ? undefined : JSON.parse(idxWith), }); return result; }, squashFK: (fk: ForeignKey) => { return `${fk.name};${fk.tableFrom};${fk.columnsFrom.join(',')};${fk.tableTo};${fk.columnsTo.join(',')};${ fk.onUpdate ?? '' };${fk.onDelete ?? ''};${fk.schemaTo || 'public'}`; }, squashPolicy: (policy: Policy) => { return `${policy.name}--${policy.as}--${policy.for}--${ policy.to?.join(',') }--${policy.using}--${policy.withCheck}--${policy.on}`; }, unsquashPolicy: (policy: string): Policy => { const splitted = policy.split('--'); return { name: splitted[0], as: splitted[1] as Policy['as'], for: splitted[2] as Policy['for'], to: splitted[3].split(','), using: splitted[4] !== 'undefined' ? splitted[4] : undefined, withCheck: splitted[5] !== 'undefined' ? splitted[5] : undefined, on: splitted[6] !== 'undefined' ? splitted[6] : undefined, }; }, squashPolicyPush: (policy: Policy) => { return `${policy.name}--${policy.as}--${policy.for}--${policy.to?.join(',')}--${policy.on}`; }, unsquashPolicyPush: (policy: string): Policy => { const splitted = policy.split('--'); return { name: splitted[0], as: splitted[1] as Policy['as'], for: splitted[2] as Policy['for'], to: splitted[3].split(','), on: splitted[4] !== 'undefined' ? splitted[4] : undefined, }; }, squashPK: (pk: PrimaryKey) => { return `${pk.columns.join(',')};${pk.name}`; }, unsquashPK: (pk: string): PrimaryKey => { const splitted = pk.split(';'); return { name: splitted[1], columns: splitted[0].split(',') }; }, squashUnique: (unq: UniqueConstraint) => { return `${unq.name};${unq.columns.join(',')};${unq.nullsNotDistinct}`; }, unsquashUnique: (unq: string): UniqueConstraint => { const [name, columns, nullsNotDistinct] = unq.split(';'); return { name, columns: columns.split(','), nullsNotDistinct: nullsNotDistinct === 'true', }; }, unsquashFK: (input: string): ForeignKey => { const [ name, tableFrom, columnsFromStr, tableTo, columnsToStr, onUpdate, onDelete, schemaTo, ] = input.split(';'); const result: ForeignKey = fk.parse({ name, tableFrom, columnsFrom: columnsFromStr.split(','), schemaTo: schemaTo, tableTo, columnsTo: columnsToStr.split(','), onUpdate, onDelete, }); return result; }, squashSequence: (seq: Omit) => { return `${seq.minValue};${seq.maxValue};${seq.increment};${seq.startWith};${seq.cache};${seq.cycle ?? ''}`; }, unsquashSequence: (seq: string): Omit => { const splitted = seq.split(';'); return { minValue: splitted[0] !== 'undefined' ? splitted[0] : undefined, maxValue: splitted[1] !== 'undefined' ? splitted[1] : undefined, increment: splitted[2] !== 'undefined' ? splitted[2] : undefined, startWith: splitted[3] !== 'undefined' ? splitted[3] : undefined, cache: splitted[4] !== 'undefined' ? splitted[4] : undefined, cycle: splitted[5] === 'true', }; }, squashIdentity: ( seq: Omit & { type: 'always' | 'byDefault' }, ) => { return `${seq.name};${seq.type};${seq.minValue};${seq.maxValue};${seq.increment};${seq.startWith};${seq.cache};${ seq.cycle ?? '' }`; }, unsquashIdentity: ( seq: string, ): Omit & { type: 'always' | 'byDefault' } => { const splitted = seq.split(';'); return { name: splitted[0], type: splitted[1] as 'always' | 'byDefault', minValue: splitted[2] !== 'undefined' ? splitted[2] : undefined, maxValue: splitted[3] !== 'undefined' ? splitted[3] : undefined, increment: splitted[4] !== 'undefined' ? splitted[4] : undefined, startWith: splitted[5] !== 'undefined' ? splitted[5] : undefined, cache: splitted[6] !== 'undefined' ? splitted[6] : undefined, cycle: splitted[7] === 'true', }; }, squashCheck: (check: CheckConstraint) => { return `${check.name};${check.value}`; }, unsquashCheck: (input: string): CheckConstraint => { const [ name, value, ] = input.split(';'); return { name, value }; }, }; export const squashGelScheme = ( json: GelSchema, action?: 'push' | undefined, ): GelSchemaSquashed => { const mappedTables = Object.fromEntries( Object.entries(json.tables).map((it) => { const squashedIndexes = mapValues(it[1].indexes, (index) => { return action === 'push' ? GelSquasher.squashIdxPush(index) : GelSquasher.squashIdx(index); }); const squashedFKs = mapValues(it[1].foreignKeys, (fk) => { return GelSquasher.squashFK(fk); }); const squashedPKs = mapValues(it[1].compositePrimaryKeys, (pk) => { return GelSquasher.squashPK(pk); }); const mappedColumns = Object.fromEntries( Object.entries(it[1].columns).map((it) => { const mappedIdentity = it[1].identity ? GelSquasher.squashIdentity(it[1].identity) : undefined; return [ it[0], { ...it[1], identity: mappedIdentity, }, ]; }), ); const squashedUniqueConstraints = mapValues( it[1].uniqueConstraints, (unq) => { return GelSquasher.squashUnique(unq); }, ); const squashedPolicies = mapValues(it[1].policies, (policy) => { return action === 'push' ? GelSquasher.squashPolicyPush(policy) : GelSquasher.squashPolicy(policy); }); const squashedChecksContraints = mapValues( it[1].checkConstraints, (check) => { return GelSquasher.squashCheck(check); }, ); return [ it[0], { name: it[1].name, schema: it[1].schema, columns: mappedColumns, indexes: squashedIndexes, foreignKeys: squashedFKs, compositePrimaryKeys: squashedPKs, uniqueConstraints: squashedUniqueConstraints, policies: squashedPolicies, checkConstraints: squashedChecksContraints, isRLSEnabled: it[1].isRLSEnabled ?? false, }, ]; }), ); const mappedSequences = Object.fromEntries( Object.entries(json.sequences).map((it) => { return [ it[0], { name: it[1].name, schema: it[1].schema, values: GelSquasher.squashSequence(it[1]), }, ]; }), ); const mappedPolicies = Object.fromEntries( Object.entries(json.policies).map((it) => { return [ it[0], { name: it[1].name, values: action === 'push' ? GelSquasher.squashPolicyPush(it[1]) : GelSquasher.squashPolicy(it[1]), }, ]; }), ); return { version: '1', dialect: json.dialect, tables: mappedTables, enums: json.enums, schemas: json.schemas, views: json.views, policies: mappedPolicies, sequences: mappedSequences, roles: json.roles, }; }; export const dryGel = gelSchema.parse({ version: '1', dialect: 'gel', id: originUUID, prevId: '', tables: {}, enums: {}, schemas: {}, policies: {}, roles: {}, sequences: {}, _meta: { schemas: {}, tables: {}, columns: {}, }, }); ================================================ FILE: drizzle-kit/src/serializer/gelSerializer.ts ================================================ import chalk from 'chalk'; import { getTableName, is, SQL } from 'drizzle-orm'; import { AnyGelTable, GelColumn, GelDialect, GelMaterializedView, GelPolicy, GelRole, GelSchema, GelSequence, GelView, getMaterializedViewConfig, getTableConfig, getViewConfig, IndexedColumn, } from 'drizzle-orm/gel-core'; import { CasingType } from 'src/cli/validations/common'; import { IntrospectStage, IntrospectStatus } from 'src/cli/views'; import { vectorOps } from 'src/extensions/vector'; import { withStyle } from '../cli/validations/outputs'; import { type DB, escapeSingleQuotes } from '../utils'; import { GelSchemaInternal } from './gelSchema'; import type { Column, ForeignKey, GelKitInternals, Index, IndexColumnType, Policy, PrimaryKey, Role, Sequence, Table, UniqueConstraint, View, } from './gelSchema'; import { getColumnCasing, sqlToStr } from './utils'; export const indexName = (tableName: string, columns: string[]) => { return `${tableName}_${columns.join('_')}_index`; }; function stringFromIdentityProperty(field: string | number | undefined): string | undefined { return typeof field === 'string' ? (field as string) : typeof field === 'undefined' ? undefined : String(field); } function maxRangeForIdentityBasedOn(columnType: string) { return columnType === 'integer' ? '2147483647' : columnType === 'bigint' ? '9223372036854775807' : '32767'; } function minRangeForIdentityBasedOn(columnType: string) { return columnType === 'integer' ? '-2147483648' : columnType === 'bigint' ? '-9223372036854775808' : '-32768'; } function stringFromDatabaseIdentityProperty(field: any): string | undefined { return typeof field === 'string' ? (field as string) : typeof field === 'undefined' ? undefined : typeof field === 'bigint' ? field.toString() : String(field); } export function buildArrayString(array: any[], sqlType: string): string { sqlType = sqlType.split('[')[0]; const values = array .map((value) => { if (typeof value === 'number' || typeof value === 'bigint') { return value.toString(); } else if (typeof value === 'boolean') { return value ? 'true' : 'false'; } else if (Array.isArray(value)) { return buildArrayString(value, sqlType); } else if (value instanceof Date) { if (sqlType === 'date') { return `"${value.toISOString().split('T')[0]}"`; } else if (sqlType === 'timestamp') { return `"${value.toISOString().replace('T', ' ').slice(0, 23)}"`; } else { return `"${value.toISOString()}"`; } } else if (typeof value === 'object') { return `"${JSON.stringify(value).replaceAll('"', '\\"')}"`; } return `"${value}"`; }) .join(','); return `{${values}}`; } const generateGelSnapshot = ( tables: AnyGelTable[], // enums: GelEnum[], schemas: GelSchema[], sequences: GelSequence[], roles: GelRole[], policies: GelPolicy[], views: GelView[], matViews: GelMaterializedView[], casing: CasingType | undefined, schemaFilter?: string[], ): GelSchemaInternal => { const dialect = new GelDialect({ casing }); const result: Record = {}; const resultViews: Record = {}; const sequencesToReturn: Record = {}; const rolesToReturn: Record = {}; // this policies are a separate objects that were linked to a table outside of it const policiesToReturn: Record = {}; // This object stores unique names for indexes and will be used to detect if you have the same names for indexes // within the same PostgreSQL schema const indexesInSchema: Record = {}; for (const table of tables) { // This object stores unique names for checks and will be used to detect if you have the same names for checks // within the same PostgreSQL table const checksInTable: Record = {}; const { name: tableName, columns, indexes, foreignKeys, checks, schema, primaryKeys, uniqueConstraints, policies, enableRLS, } = getTableConfig(table); if (schemaFilter && !schemaFilter.includes(schema ?? 'public')) { continue; } const columnsObject: Record = {}; const indexesObject: Record = {}; // const checksObject: Record = {}; const foreignKeysObject: Record = {}; const primaryKeysObject: Record = {}; // const uniqueConstraintObject: Record = {}; const policiesObject: Record = {}; columns.forEach((column) => { const name = getColumnCasing(column, casing); const notNull: boolean = column.notNull; const primaryKey: boolean = column.primary; const sqlTypeLowered = column.getSQLType().toLowerCase(); // const typeSchema = is(column, GelEnumColumn) ? column.enum.schema || 'public' : undefined; const generated = column.generated; const identity = column.generatedIdentity; const increment = stringFromIdentityProperty(identity?.sequenceOptions?.increment) ?? '1'; const minValue = stringFromIdentityProperty(identity?.sequenceOptions?.minValue) ?? (parseFloat(increment) < 0 ? minRangeForIdentityBasedOn(column.columnType) : '1'); const maxValue = stringFromIdentityProperty(identity?.sequenceOptions?.maxValue) ?? (parseFloat(increment) < 0 ? '-1' : maxRangeForIdentityBasedOn(column.getSQLType())); const startWith = stringFromIdentityProperty(identity?.sequenceOptions?.startWith) ?? (parseFloat(increment) < 0 ? maxValue : minValue); const cache = stringFromIdentityProperty(identity?.sequenceOptions?.cache) ?? '1'; const columnToSet: Column = { name, type: column.getSQLType(), typeSchema: undefined, primaryKey, notNull, generated: generated ? { as: is(generated.as, SQL) ? dialect.sqlToQuery(generated.as as SQL).sql : typeof generated.as === 'function' ? dialect.sqlToQuery(generated.as() as SQL).sql : (generated.as as any), type: 'stored', } : undefined, identity: identity ? { type: identity.type, name: identity.sequenceName ?? `${tableName}_${name}_seq`, schema: schema ?? 'public', increment, startWith, minValue, maxValue, cache, cycle: identity?.sequenceOptions?.cycle ?? false, } : undefined, }; // if (column.isUnique) { // const existingUnique = uniqueConstraintObject[column.uniqueName!]; // if (typeof existingUnique !== 'undefined') { // console.log( // `\n${ // withStyle.errorWarning(`We\'ve found duplicated unique constraint names in ${ // chalk.underline.blue( // tableName, // ) // } table. // The unique constraint ${ // chalk.underline.blue( // column.uniqueName, // ) // } on the ${ // chalk.underline.blue( // name, // ) // } column is conflicting with a unique constraint name already defined for ${ // chalk.underline.blue( // existingUnique.columns.join(','), // ) // } columns\n`) // }`, // ); // process.exit(1); // } // uniqueConstraintObject[column.uniqueName!] = { // name: column.uniqueName!, // nullsNotDistinct: column.uniqueType === 'not distinct', // columns: [columnToSet.name], // }; // } if (column.default !== undefined) { if (is(column.default, SQL)) { columnToSet.default = sqlToStr(column.default, casing); } else { if (typeof column.default === 'string') { columnToSet.default = `'${escapeSingleQuotes(column.default)}'`; } else { if (sqlTypeLowered === 'jsonb' || sqlTypeLowered === 'json') { columnToSet.default = `'${JSON.stringify(column.default)}'::${sqlTypeLowered}`; } else if (column.default instanceof Date) { if (sqlTypeLowered === 'date') { columnToSet.default = `'${column.default.toISOString().split('T')[0]}'`; } else if (sqlTypeLowered === 'timestamp') { columnToSet.default = `'${column.default.toISOString().replace('T', ' ').slice(0, 23)}'`; } else { columnToSet.default = `'${column.default.toISOString()}'`; } } else if (Array.isArray(column.default)) { columnToSet.default = columnToSet.default; } else { // Should do for all types // columnToSet.default = `'${column.default}'::${sqlTypeLowered}`; columnToSet.default = column.default; } } } } columnsObject[name] = columnToSet; }); primaryKeys.map((pk) => { const originalColumnNames = pk.columns.map((c) => c.name); const columnNames = pk.columns.map((c) => getColumnCasing(c, casing)); let name = pk.getName(); if (casing !== undefined) { for (let i = 0; i < originalColumnNames.length; i++) { name = name.replace(originalColumnNames[i], columnNames[i]); } } primaryKeysObject[name] = { name, columns: columnNames, }; }); // uniqueConstraints?.map((unq) => { // const columnNames = unq.columns.map((c) => getColumnCasing(c, casing)); // const name = unq.name ?? uniqueKeyName(table, columnNames); // // const existingUnique = uniqueConstraintObject[name]; // // if (typeof existingUnique !== 'undefined') { // // console.log( // // `\n${ // // withStyle.errorWarning( // // `We\'ve found duplicated unique constraint names in ${chalk.underline.blue(tableName)} table. // // The unique constraint ${chalk.underline.blue(name)} on the ${ // // chalk.underline.blue( // // columnNames.join(','), // // ) // // } columns is confilcting with a unique constraint name already defined for ${ // // chalk.underline.blue(existingUnique.columns.join(',')) // // } columns\n`, // // ) // // }`, // // ); // // process.exit(1); // // } // // uniqueConstraintObject[name] = { // // name: unq.name!, // // nullsNotDistinct: unq.nullsNotDistinct, // // columns: columnNames, // // }; // }); const fks: ForeignKey[] = foreignKeys.map((fk) => { const tableFrom = tableName; const onDelete = fk.onDelete; const onUpdate = fk.onUpdate; const reference = fk.reference(); const tableTo = getTableName(reference.foreignTable); // TODO: resolve issue with schema undefined/public for db push(or squasher) // getTableConfig(reference.foreignTable).schema || "public"; const schemaTo = getTableConfig(reference.foreignTable).schema; const originalColumnsFrom = reference.columns.map((it) => it.name); const columnsFrom = reference.columns.map((it) => getColumnCasing(it, casing)); const originalColumnsTo = reference.foreignColumns.map((it) => it.name); const columnsTo = reference.foreignColumns.map((it) => getColumnCasing(it, casing)); let name = fk.getName(); if (casing !== undefined) { for (let i = 0; i < originalColumnsFrom.length; i++) { name = name.replace(originalColumnsFrom[i], columnsFrom[i]); } for (let i = 0; i < originalColumnsTo.length; i++) { name = name.replace(originalColumnsTo[i], columnsTo[i]); } } return { name, tableFrom, tableTo, schemaTo, columnsFrom, columnsTo, onDelete, onUpdate, } as ForeignKey; }); fks.forEach((it) => { foreignKeysObject[it.name] = it; }); indexes.forEach((value) => { const columns = value.config.columns; let indexColumnNames: string[] = []; columns.forEach((it) => { if (is(it, SQL)) { if (typeof value.config.name === 'undefined') { console.log( `\n${ withStyle.errorWarning( `Please specify an index name in ${getTableName(value.config.table)} table that has "${ dialect.sqlToQuery(it).sql }" expression. We can generate index names for indexes on columns only; for expressions in indexes, you need to specify the name yourself.`, ) }`, ); process.exit(1); } } it = it as IndexedColumn; const name = getColumnCasing(it as IndexedColumn, casing); if ( !is(it, SQL) && typeof it.indexConfig!.opClass === 'undefined' ) { console.log( `\n${ withStyle.errorWarning( `You are specifying an index on the ${ chalk.blueBright( name, ) } column inside the ${ chalk.blueBright( tableName, ) } table with the ${ chalk.blueBright( 'vector', ) } type without specifying an operator class. Vector extension doesn't have a default operator class, so you need to specify one of the available options. Here is a list of available op classes for the vector extension: [${ vectorOps .map((it) => `${chalk.underline(`${it}`)}`) .join(', ') }].\n\nYou can specify it using current syntax: ${ chalk.underline( `index("${value.config.name}").using("${value.config.method}", table.${name}.op("${ vectorOps[0] }"))`, ) }\n\nYou can check the "pg_vector" docs for more info: https://github.com/pgvector/pgvector?tab=readme-ov-file#indexing\n`, ) }`, ); process.exit(1); } indexColumnNames.push(name); }); const name = value.config.name ? value.config.name : indexName(tableName, indexColumnNames); let indexColumns: IndexColumnType[] = columns.map( (it): IndexColumnType => { if (is(it, SQL)) { return { expression: dialect.sqlToQuery(it, 'indexes').sql, asc: true, isExpression: true, nulls: 'last', }; } else { it = it as IndexedColumn; return { expression: getColumnCasing(it as IndexedColumn, casing), isExpression: false, asc: it.indexConfig?.order === 'asc', nulls: it.indexConfig?.nulls ? it.indexConfig?.nulls : it.indexConfig?.order === 'desc' ? 'first' : 'last', opclass: it.indexConfig?.opClass, }; } }, ); // check for index names duplicates if (typeof indexesInSchema[schema ?? 'public'] !== 'undefined') { if (indexesInSchema[schema ?? 'public'].includes(name)) { console.log( `\n${ withStyle.errorWarning( `We\'ve found duplicated index name across ${ chalk.underline.blue(schema ?? 'public') } schema. Please rename your index in either the ${ chalk.underline.blue( tableName, ) } table or the table with the duplicated index name`, ) }`, ); process.exit(1); } indexesInSchema[schema ?? 'public'].push(name); } else { indexesInSchema[schema ?? 'public'] = [name]; } indexesObject[name] = { name, columns: indexColumns, isUnique: value.config.unique ?? false, where: value.config.where ? dialect.sqlToQuery(value.config.where).sql : undefined, concurrently: value.config.concurrently ?? false, method: value.config.method ?? 'btree', with: value.config.with ?? {}, }; }); policies.forEach((policy) => { const mappedTo = []; if (!policy.to) { mappedTo.push('public'); } else { if (policy.to && typeof policy.to === 'string') { mappedTo.push(policy.to); } else if (policy.to && is(policy.to, GelRole)) { mappedTo.push(policy.to.name); } else if (policy.to && Array.isArray(policy.to)) { policy.to.forEach((it) => { if (typeof it === 'string') { mappedTo.push(it); } else if (is(it, GelRole)) { mappedTo.push(it.name); } }); } } if (policiesObject[policy.name] !== undefined) { console.log( `\n${ withStyle.errorWarning( `We\'ve found duplicated policy name across ${ chalk.underline.blue(tableKey) } table. Please rename one of the policies with ${ chalk.underline.blue( policy.name, ) } name`, ) }`, ); process.exit(1); } policiesObject[policy.name] = { name: policy.name, as: policy.as?.toUpperCase() as Policy['as'] ?? 'PERMISSIVE', for: policy.for?.toUpperCase() as Policy['for'] ?? 'ALL', to: mappedTo.sort(), using: is(policy.using, SQL) ? dialect.sqlToQuery(policy.using).sql : undefined, withCheck: is(policy.withCheck, SQL) ? dialect.sqlToQuery(policy.withCheck).sql : undefined, }; }); // checks.forEach((check) => { // const checkName = check.name; // if (typeof checksInTable[`"${schema ?? 'public'}"."${tableName}"`] !== 'undefined') { // if (checksInTable[`"${schema ?? 'public'}"."${tableName}"`].includes(check.name)) { // console.log( // `\n${ // withStyle.errorWarning( // `We\'ve found duplicated check constraint name across ${ // chalk.underline.blue( // schema ?? 'public', // ) // } schema in ${ // chalk.underline.blue( // tableName, // ) // }. Please rename your check constraint in either the ${ // chalk.underline.blue( // tableName, // ) // } table or the table with the duplicated check contraint name`, // ) // }`, // ); // process.exit(1); // } // checksInTable[`"${schema ?? 'public'}"."${tableName}"`].push(checkName); // } else { // checksInTable[`"${schema ?? 'public'}"."${tableName}"`] = [check.name]; // } // checksObject[checkName] = { // name: checkName, // value: dialect.sqlToQuery(check.value).sql, // }; // }); const tableKey = `${schema ?? 'public'}.${tableName}`; result[tableKey] = { name: tableName, schema: schema ?? '', columns: columnsObject, indexes: indexesObject, foreignKeys: foreignKeysObject, compositePrimaryKeys: primaryKeysObject, uniqueConstraints: {}, // uniqueConstraintObject, policies: policiesObject, checkConstraints: {}, // checksObject, isRLSEnabled: enableRLS, }; } for (const policy of policies) { // @ts-ignore if (!policy._linkedTable) { console.log( `\n${ withStyle.errorWarning( `"Policy ${policy.name} was skipped because it was not linked to any table. You should either include the policy in a table or use .link() on the policy to link it to any table you have. For more information, please check:`, ) }`, ); continue; } // @ts-ignore const tableConfig = getTableConfig(policy._linkedTable); const tableKey = `${tableConfig.schema ?? 'public'}.${tableConfig.name}`; const mappedTo = []; if (!policy.to) { mappedTo.push('public'); } else { if (policy.to && typeof policy.to === 'string') { mappedTo.push(policy.to); } else if (policy.to && is(policy.to, GelRole)) { mappedTo.push(policy.to.name); } else if (policy.to && Array.isArray(policy.to)) { policy.to.forEach((it) => { if (typeof it === 'string') { mappedTo.push(it); } else if (is(it, GelRole)) { mappedTo.push(it.name); } }); } } // add separate policies object, that will be only responsible for policy creation // but we would need to track if a policy was enabled for a specific table or not // enable only if jsonStatements for enable rls was not already there + filter it if (result[tableKey]?.policies[policy.name] !== undefined || policiesToReturn[policy.name] !== undefined) { console.log( `\n${ withStyle.errorWarning( `We\'ve found duplicated policy name across ${ chalk.underline.blue(tableKey) } table. Please rename one of the policies with ${ chalk.underline.blue( policy.name, ) } name`, ) }`, ); process.exit(1); } const mappedPolicy = { name: policy.name, as: policy.as?.toUpperCase() as Policy['as'] ?? 'PERMISSIVE', for: policy.for?.toUpperCase() as Policy['for'] ?? 'ALL', to: mappedTo.sort(), using: is(policy.using, SQL) ? dialect.sqlToQuery(policy.using).sql : undefined, withCheck: is(policy.withCheck, SQL) ? dialect.sqlToQuery(policy.withCheck).sql : undefined, }; if (result[tableKey]) { result[tableKey].policies[policy.name] = mappedPolicy; } else { policiesToReturn[policy.name] = { ...mappedPolicy, schema: tableConfig.schema ?? 'public', on: `"${tableConfig.schema ?? 'public'}"."${tableConfig.name}"`, }; } } for (const sequence of sequences) { const name = sequence.seqName!; if (typeof sequencesToReturn[`${sequence.schema ?? 'public'}.${name}`] === 'undefined') { const increment = stringFromIdentityProperty(sequence?.seqOptions?.increment) ?? '1'; const minValue = stringFromIdentityProperty(sequence?.seqOptions?.minValue) ?? (parseFloat(increment) < 0 ? '-9223372036854775808' : '1'); const maxValue = stringFromIdentityProperty(sequence?.seqOptions?.maxValue) ?? (parseFloat(increment) < 0 ? '-1' : '9223372036854775807'); const startWith = stringFromIdentityProperty(sequence?.seqOptions?.startWith) ?? (parseFloat(increment) < 0 ? maxValue : minValue); const cache = stringFromIdentityProperty(sequence?.seqOptions?.cache) ?? '1'; sequencesToReturn[`${sequence.schema ?? 'public'}.${name}`] = { name, schema: sequence.schema ?? 'public', increment, startWith, minValue, maxValue, cache, cycle: sequence.seqOptions?.cycle ?? false, }; } else { // duplicate seq error } } for (const role of roles) { if (!(role as any)._existing) { rolesToReturn[role.name] = { name: role.name, createDb: (role as any).createDb === undefined ? false : (role as any).createDb, createRole: (role as any).createRole === undefined ? false : (role as any).createRole, inherit: (role as any).inherit === undefined ? true : (role as any).inherit, }; } } const combinedViews = [...views, ...matViews]; for (const view of combinedViews) { let viewName; let schema; let query; let selectedFields; let isExisting; let withOption; let tablespace; let using; let withNoData; let materialized: boolean = false; if (is(view, GelView)) { ({ name: viewName, schema, query, selectedFields, isExisting, with: withOption } = getViewConfig(view)); } else { ({ name: viewName, schema, query, selectedFields, isExisting, with: withOption, tablespace, using, withNoData } = getMaterializedViewConfig(view)); materialized = true; } const viewSchema = schema ?? 'public'; const viewKey = `${viewSchema}.${viewName}`; const columnsObject: Record = {}; const uniqueConstraintObject: Record = {}; const existingView = resultViews[viewKey]; if (typeof existingView !== 'undefined') { console.log( `\n${ withStyle.errorWarning( `We\'ve found duplicated view name across ${ chalk.underline.blue(schema ?? 'public') } schema. Please rename your view`, ) }`, ); process.exit(1); } for (const key in selectedFields) { if (is(selectedFields[key], GelColumn)) { const column = selectedFields[key]; const notNull: boolean = column.notNull; const primaryKey: boolean = column.primary; const sqlTypeLowered = column.getSQLType().toLowerCase(); // const typeSchema = is(column, GelEnumColumn) ? column.enum.schema || 'public' : undefined; const generated = column.generated; const identity = column.generatedIdentity; const increment = stringFromIdentityProperty(identity?.sequenceOptions?.increment) ?? '1'; const minValue = stringFromIdentityProperty(identity?.sequenceOptions?.minValue) ?? (parseFloat(increment) < 0 ? minRangeForIdentityBasedOn(column.columnType) : '1'); const maxValue = stringFromIdentityProperty(identity?.sequenceOptions?.maxValue) ?? (parseFloat(increment) < 0 ? '-1' : maxRangeForIdentityBasedOn(column.getSQLType())); const startWith = stringFromIdentityProperty(identity?.sequenceOptions?.startWith) ?? (parseFloat(increment) < 0 ? maxValue : minValue); const cache = stringFromIdentityProperty(identity?.sequenceOptions?.cache) ?? '1'; const columnToSet: Column = { name: column.name, type: column.getSQLType(), typeSchema: undefined, primaryKey, notNull, generated: generated ? { as: is(generated.as, SQL) ? dialect.sqlToQuery(generated.as as SQL).sql : typeof generated.as === 'function' ? dialect.sqlToQuery(generated.as() as SQL).sql : (generated.as as any), type: 'stored', } : undefined, identity: identity ? { type: identity.type, name: identity.sequenceName ?? `${viewName}_${column.name}_seq`, schema: schema ?? 'public', increment, startWith, minValue, maxValue, cache, cycle: identity?.sequenceOptions?.cycle ?? false, } : undefined, }; if (column.isUnique) { const existingUnique = uniqueConstraintObject[column.uniqueName!]; if (typeof existingUnique !== 'undefined') { console.log( `\n${ withStyle.errorWarning( `We\'ve found duplicated unique constraint names in ${chalk.underline.blue(viewName)} table. The unique constraint ${chalk.underline.blue(column.uniqueName)} on the ${ chalk.underline.blue( column.name, ) } column is confilcting with a unique constraint name already defined for ${ chalk.underline.blue(existingUnique.columns.join(',')) } columns\n`, ) }`, ); process.exit(1); } uniqueConstraintObject[column.uniqueName!] = { name: column.uniqueName!, nullsNotDistinct: column.uniqueType === 'not distinct', columns: [columnToSet.name], }; } if (column.default !== undefined) { if (is(column.default, SQL)) { columnToSet.default = sqlToStr(column.default, casing); } else { if (typeof column.default === 'string') { columnToSet.default = `'${column.default}'`; } else { if (sqlTypeLowered === 'jsonb' || sqlTypeLowered === 'json') { columnToSet.default = `'${JSON.stringify(column.default)}'::${sqlTypeLowered}`; } else if (column.default instanceof Date) { if (sqlTypeLowered === 'date') { columnToSet.default = `'${column.default.toISOString().split('T')[0]}'`; } else if (sqlTypeLowered === 'timestamp') { columnToSet.default = `'${column.default.toISOString().replace('T', ' ').slice(0, 23)}'`; } else { columnToSet.default = `'${column.default.toISOString()}'`; } } else if (Array.isArray(column.default)) { columnToSet.default = `'${buildArrayString(column.default, sqlTypeLowered)}'`; } else { // Should do for all types // columnToSet.default = `'${column.default}'::${sqlTypeLowered}`; columnToSet.default = column.default; } } } } columnsObject[column.name] = columnToSet; } } resultViews[viewKey] = { columns: columnsObject, definition: isExisting ? undefined : dialect.sqlToQuery(query!).sql, name: viewName, schema: viewSchema, isExisting, with: withOption, withNoData, materialized, tablespace, using, }; } // const enumsToReturn: Record = enums.reduce<{ // [key: string]: Enum; // }>((map, obj) => { // const enumSchema = obj.schema || 'public'; // const key = `${enumSchema}.${obj.enumName}`; // map[key] = { // name: obj.enumName, // schema: enumSchema, // values: obj.enumValues, // }; // return map; // }, {}); const schemasObject = Object.fromEntries( schemas .filter((it) => { if (schemaFilter) { return schemaFilter.includes(it.schemaName) && it.schemaName !== 'public'; } else { return it.schemaName !== 'public'; } }) .map((it) => [it.schemaName, it.schemaName]), ); return { version: '1', dialect: 'gel', tables: result, enums: {}, schemas: schemasObject, sequences: sequencesToReturn, roles: rolesToReturn, policies: policiesToReturn, views: resultViews, _meta: { schemas: {}, tables: {}, columns: {}, }, }; }; const trimChar = (str: string, char: string) => { let start = 0; let end = str.length; while (start < end && str[start] === char) ++start; while (end > start && str[end - 1] === char) --end; // this.toString() due to ava deep equal issue with String { "value" } return start > 0 || end < str.length ? str.substring(start, end) : str.toString(); }; function prepareRoles(entities?: { roles: boolean | { provider?: string | undefined; include?: string[] | undefined; exclude?: string[] | undefined; }; }) { let useRoles: boolean = false; const includeRoles: string[] = []; const excludeRoles: string[] = []; if (entities && entities.roles) { if (typeof entities.roles === 'object') { if (entities.roles.provider) { if (entities.roles.provider === 'supabase') { excludeRoles.push(...[ 'anon', 'authenticator', 'authenticated', 'service_role', 'supabase_auth_admin', 'supabase_storage_admin', 'dashboard_user', 'supabase_admin', ]); } else if (entities.roles.provider === 'neon') { excludeRoles.push(...['authenticated', 'anonymous']); } } if (entities.roles.include) { includeRoles.push(...entities.roles.include); } if (entities.roles.exclude) { excludeRoles.push(...entities.roles.exclude); } } else { useRoles = entities.roles; } } return { useRoles, includeRoles, excludeRoles }; } export const fromDatabase = async ( db: DB, tablesFilter: (table: string) => boolean = () => true, schemaFilters: string[], entities?: { roles: boolean | { provider?: string | undefined; include?: string[] | undefined; exclude?: string[] | undefined; }; }, progressCallback?: ( stage: IntrospectStage, count: number, status: IntrospectStatus, ) => void, tsSchema?: GelSchemaInternal, ): Promise => { const result: Record = {}; // const views: Record = {}; const policies: Record = {}; const internals: GelKitInternals = { tables: {} }; const where = schemaFilters.map((t) => `n.nspname = '${t}'`).join(' or '); const allTables = await db.query<{ table_schema: string; table_name: string; type: string; rls_enabled: boolean }>( `SELECT n.nspname::text AS table_schema, c.relname::text AS table_name, CASE WHEN c.relkind = 'r' THEN 'table' WHEN c.relkind = 'v' THEN 'view' WHEN c.relkind = 'm' THEN 'materialized_view' END AS type, c.relrowsecurity AS rls_enabled FROM pg_catalog.pg_class c JOIN pg_catalog.pg_namespace n ON n.oid::text = c.relnamespace::text WHERE c.relkind IN ('r', 'v', 'm') ${where === '' ? '' : ` AND ${where}`};`, ); const schemas = new Set(allTables.map((it) => it.table_schema)); const allSchemas = await db.query<{ table_schema: string; }>(`select s.nspname::text as table_schema from pg_catalog.pg_namespace s join pg_catalog.pg_user u on u.usesysid::text = s.nspowner::text where nspname not in ('information_schema', 'pg_catalog', 'public') and nspname::text not like 'pg_toast%' and nspname::text not like 'pg_temp_%' order by 1;`); allSchemas.forEach((item) => { if (schemaFilters.includes(item.table_schema)) { schemas.add(item.table_schema); } }); let columnsCount = 0; let indexesCount = 0; let foreignKeysCount = 0; let tableCount = 0; const sequencesToReturn: Record = {}; const all = allTables .filter((it) => it.type === 'table') .map((row) => { return new Promise(async (res, rej) => { const tableName = row.table_name as string; if (!tablesFilter(tableName)) return res(''); tableCount += 1; const tableSchema = row.table_schema; try { const columnToReturn: Record = {}; const indexToReturn: Record = {}; const foreignKeysToReturn: Record = {}; const primaryKeys: Record = {}; // const uniqueConstrains: Record = {}; // const checkConstraints: Record = {}; const tableResponse = await getColumnsInfoQuery({ schema: tableSchema, table: tableName, db }); // const tableConstraints = await db.query( // `SELECT c.column_name::text, c.data_type::text, constraint_type::text, constraint_name::text, constraint_schema::text // FROM information_schema.table_constraints tc // JOIN information_schema.constraint_column_usage AS ccu USING (constraint_schema, constraint_name) // JOIN information_schema.columns AS c ON c.table_schema = tc.constraint_schema // AND tc.table_name = c.table_name AND ccu.column_name = c.column_name // WHERE tc.table_name = '${tableName}' and constraint_schema = '${tableSchema}';`, // ); // const tableChecks = await db.query(`SELECT // tc.constraint_name::text, // tc.constraint_type::text, // pg_get_constraintdef(con.oid) AS constraint_definition // FROM // information_schema.table_constraints AS tc // JOIN pg_constraint AS con // ON tc.constraint_name = con.conname // AND con.conrelid = ( // SELECT oid // FROM pg_class // WHERE relname = tc.table_name // AND relnamespace = ( // SELECT oid // FROM pg_namespace // WHERE nspname = tc.constraint_schema // ) // ) // WHERE // tc.table_name = '${tableName}' // AND tc.constraint_schema = '${tableSchema}' // AND tc.constraint_type = 'CHECK';`); columnsCount += tableResponse.length; if (progressCallback) { progressCallback('columns', columnsCount, 'fetching'); } const tableForeignKeys = await db.query( `SELECT con.contype::text AS constraint_type, nsp.nspname::text AS constraint_schema, con.conname::text AS constraint_name, rel.relname::text AS table_name, att.attname::text AS column_name, fnsp.nspname::text AS foreign_table_schema, frel.relname::text AS foreign_table_name, fatt.attname::text AS foreign_column_name, CASE con.confupdtype WHEN 'a' THEN 'NO ACTION' WHEN 'r' THEN 'RESTRICT' WHEN 'n' THEN 'SET NULL' WHEN 'c' THEN 'CASCADE' WHEN 'd' THEN 'SET DEFAULT' END AS update_rule, CASE con.confdeltype WHEN 'a' THEN 'NO ACTION' WHEN 'r' THEN 'RESTRICT' WHEN 'n' THEN 'SET NULL' WHEN 'c' THEN 'CASCADE' WHEN 'd' THEN 'SET DEFAULT' END AS delete_rule FROM pg_catalog.pg_constraint con JOIN pg_catalog.pg_class rel ON rel.oid = con.conrelid JOIN pg_catalog.pg_namespace nsp ON nsp.oid = con.connamespace LEFT JOIN pg_catalog.pg_attribute att ON att.attnum = ANY (con.conkey) AND att.attrelid = con.conrelid LEFT JOIN pg_catalog.pg_class frel ON frel.oid = con.confrelid LEFT JOIN pg_catalog.pg_namespace fnsp ON fnsp.oid = frel.relnamespace LEFT JOIN pg_catalog.pg_attribute fatt ON fatt.attnum = ANY (con.confkey) AND fatt.attrelid = con.confrelid WHERE nsp.nspname = '${tableSchema}' AND rel.relname = '${tableName}' AND con.contype IN ('f');`, ); foreignKeysCount += tableForeignKeys.length; if (progressCallback) { progressCallback('fks', foreignKeysCount, 'fetching'); } for (const fk of tableForeignKeys) { // const tableFrom = fk.table_name; const columnFrom: string = fk.column_name; const tableTo = fk.foreign_table_name; const columnTo: string = fk.foreign_column_name; const schemaTo: string = fk.foreign_table_schema; const foreignKeyName = fk.constraint_name; const onUpdate = fk.update_rule?.toLowerCase(); const onDelete = fk.delete_rule?.toLowerCase(); if (typeof foreignKeysToReturn[foreignKeyName] !== 'undefined') { foreignKeysToReturn[foreignKeyName].columnsFrom.push(columnFrom); foreignKeysToReturn[foreignKeyName].columnsTo.push(columnTo); } else { foreignKeysToReturn[foreignKeyName] = { name: foreignKeyName, tableFrom: tableName, tableTo, schemaTo, columnsFrom: [columnFrom], columnsTo: [columnTo], onDelete, onUpdate, }; } foreignKeysToReturn[foreignKeyName].columnsFrom = [ ...new Set(foreignKeysToReturn[foreignKeyName].columnsFrom), ]; foreignKeysToReturn[foreignKeyName].columnsTo = [...new Set(foreignKeysToReturn[foreignKeyName].columnsTo)]; } // const uniqueConstrainsRows = tableConstraints.filter((mapRow) => mapRow.constraint_type === 'UNIQUE'); // for (const unqs of uniqueConstrainsRows) { // // const tableFrom = fk.table_name; // const columnName: string = unqs.column_name; // const constraintName: string = unqs.constraint_name; // if (typeof uniqueConstrains[constraintName] !== 'undefined') { // uniqueConstrains[constraintName].columns.push(columnName); // } else { // uniqueConstrains[constraintName] = { // columns: [columnName], // nullsNotDistinct: false, // name: constraintName, // }; // } // } // checksCount += tableChecks.length; // if (progressCallback) { // progressCallback('checks', checksCount, 'fetching'); // } // for (const checks of tableChecks) { // // CHECK (((email)::text <> 'test@gmail.com'::text)) // // Where (email) is column in table // let checkValue: string = checks.constraint_definition; // const constraintName: string = checks.constraint_name; // checkValue = checkValue.replace(/^CHECK\s*\(\(/, '').replace(/\)\)\s*$/, ''); // checkConstraints[constraintName] = { // name: constraintName, // value: checkValue, // }; // } for (const columnResponse of tableResponse) { const columnName = columnResponse.column_name; if (columnName === '__type__') continue; const columnAdditionalDT = columnResponse.additional_dt; const columnDimensions = columnResponse.array_dimensions; const enumType: string = columnResponse.enum_name; let columnType: string = columnResponse.data_type; // const typeSchema = columnResponse.type_schema; const defaultValueRes: string = columnResponse.column_default; const isGenerated = columnResponse.is_generated === 'ALWAYS'; const generationExpression = columnResponse.generation_expression; const isIdentity = columnResponse.is_identity === 'YES'; const identityGeneration = columnResponse.identity_generation === 'ALWAYS' ? 'always' : 'byDefault'; const identityStart = columnResponse.identity_start; const identityIncrement = columnResponse.identity_increment; const identityMaximum = columnResponse.identity_maximum; const identityMinimum = columnResponse.identity_minimum; const identityCycle = columnResponse.identity_cycle === 'YES'; const identityName = columnResponse.seq_name; // const primaryKey = tableConstraints.filter((mapRow) => // columnName === mapRow.column_name && mapRow.constraint_type === 'PRIMARY KEY' // ); // const cprimaryKey = tableConstraints.filter((mapRow) => mapRow.constraint_type === 'PRIMARY KEY'); // if (cprimaryKey.length > 1) { // const tableCompositePkName = await db.query( // `SELECT conname::text AS primary_key // FROM pg_constraint join pg_class on (pg_class.oid = conrelid) // WHERE contype = 'p' // AND connamespace = $1::regnamespace // AND pg_class.relname = $2;`, // [tableSchema, tableName], // ); // primaryKeys[tableCompositePkName[0].primary_key] = { // name: tableCompositePkName[0].primary_key, // columns: cprimaryKey.map((c: any) => c.column_name), // }; // } let columnTypeMapped = columnType; // Set default to internal object if (columnAdditionalDT === 'ARRAY') { if (typeof internals.tables[tableName] === 'undefined') { internals.tables[tableName] = { columns: { [columnName]: { isArray: true, dimensions: columnDimensions, rawType: columnTypeMapped.substring(0, columnTypeMapped.length - 2), }, }, }; } else { if (typeof internals.tables[tableName]!.columns[columnName] === 'undefined') { internals.tables[tableName]!.columns[columnName] = { isArray: true, dimensions: columnDimensions, rawType: columnTypeMapped.substring(0, columnTypeMapped.length - 2), }; } } } const defaultValue = defaultForColumn(columnResponse, internals, tableName); if ( defaultValue === 'NULL' || (defaultValueRes && defaultValueRes.startsWith('(') && defaultValueRes.endsWith(')')) ) { if (typeof internals!.tables![tableName] === 'undefined') { internals!.tables![tableName] = { columns: { [columnName]: { isDefaultAnExpression: true, }, }, }; } else { if (typeof internals!.tables![tableName]!.columns[columnName] === 'undefined') { internals!.tables![tableName]!.columns[columnName] = { isDefaultAnExpression: true, }; } else { internals!.tables![tableName]!.columns[columnName]!.isDefaultAnExpression = true; } } } if (columnTypeMapped.startsWith('numeric(')) { columnTypeMapped = columnTypeMapped.replace(',', ', '); } if (columnAdditionalDT === 'ARRAY') { for (let i = 1; i < Number(columnDimensions); i++) { columnTypeMapped += '[]'; } } // TODO check if correct // skip range and tuples if (columnTypeMapped.includes('tuple<') || columnTypeMapped.includes('range')) continue; columnTypeMapped = trimChar(columnTypeMapped, '"'); columnTypeMapped = columnTypeMapped.replace('pg_catalog.', ''); // patching array types columnTypeMapped = columnTypeMapped.replace('float4[]', 'real[]').replace('float8[]', 'double precision[]') .replace('"numeric"[]', 'numeric[]').replace('"time"[]', 'time without time zone[]').replace( 'int2[]', 'smallint[]', ).replace( 'int4[]', 'integer[]', ).replace( 'int8[]', 'bigint[]', ).replace( 'bool[]', 'boolean[]', ); columnToReturn[columnName] = { name: columnName, type: // filter vectors, but in future we should filter any extension that was installed by user columnAdditionalDT === 'USER-DEFINED' && !['vector', 'geometry'].includes(enumType) ? enumType : columnTypeMapped, typeSchema: undefined, // typeSchema: enumsToReturn[`${typeSchema}.${enumType}`] !== undefined // ? enumsToReturn[`${typeSchema}.${enumType}`].schema // : undefined, primaryKey: columnName === 'id', default: defaultValue, notNull: columnResponse.is_nullable === 'NO', generated: isGenerated ? { as: generationExpression, type: 'stored' } : undefined, identity: isIdentity ? { type: identityGeneration, name: identityName, increment: stringFromDatabaseIdentityProperty(identityIncrement), minValue: stringFromDatabaseIdentityProperty(identityMinimum), maxValue: stringFromDatabaseIdentityProperty(identityMaximum), startWith: stringFromDatabaseIdentityProperty(identityStart), cache: sequencesToReturn[identityName]?.cache ? sequencesToReturn[identityName]?.cache : sequencesToReturn[`${tableSchema}.${identityName}`]?.cache ? sequencesToReturn[`${tableSchema}.${identityName}`]?.cache : undefined, cycle: identityCycle, schema: tableSchema, } : undefined, }; if (identityName && typeof identityName === 'string') { // remove "" from sequence name delete sequencesToReturn[ `${tableSchema}.${ identityName.startsWith('"') && identityName.endsWith('"') ? identityName.slice(1, -1) : identityName }` ]; delete sequencesToReturn[identityName]; } } const dbIndexes = await db.query( `SELECT DISTINCT ON (t.relname, ic.relname, k.i) t.relname::text as table_name, ic.relname::text AS indexname, k.i AS index_order, i.indisunique as is_unique, am.amname::text as method, ic.reloptions as with, coalesce(a.attname, (('{' || pg_get_expr( i.indexprs, i.indrelid ) || '}')::text[] )[k.i] )::text AS column_name, CASE WHEN pg_get_expr(i.indexprs, i.indrelid) IS NOT NULL THEN 1 ELSE 0 END AS is_expression, i.indoption[k.i-1] & 1 = 1 AS descending, i.indoption[k.i-1] & 2 = 2 AS nulls_first, pg_get_expr( i.indpred, i.indrelid ) as where, opc.opcname::text FROM pg_class t LEFT JOIN pg_index i ON t.oid = i.indrelid LEFT JOIN pg_class ic ON ic.oid = i.indexrelid CROSS JOIN LATERAL (SELECT unnest(i.indkey), generate_subscripts(i.indkey, 1) + 1) AS k(attnum, i) LEFT JOIN pg_attribute AS a ON i.indrelid = a.attrelid AND k.attnum = a.attnum JOIN pg_namespace c on c.oid = t.relnamespace LEFT JOIN pg_am AS am ON ic.relam = am.oid JOIN pg_opclass opc ON opc.oid = ANY(i.indclass) WHERE c.nspname = '${tableSchema}' AND t.relname = '${tableName}';`, ); const dbIndexFromConstraint = await db.query( `SELECT idx.indexrelname::text AS index_name, idx.relname::text AS table_name, schemaname::text, CASE WHEN con.conname IS NOT NULL THEN 1 ELSE 0 END AS generated_by_constraint FROM pg_stat_user_indexes idx LEFT JOIN pg_constraint con ON con.conindid = idx.indexrelid WHERE idx.relname = '${tableName}' and schemaname = '${tableSchema}' group by index_name, table_name,schemaname, generated_by_constraint;`, ); const idxsInConsteraint = dbIndexFromConstraint.filter((it) => it.generated_by_constraint === 1).map((it) => it.index_name ); for (const dbIndex of dbIndexes) { const indexName: string = dbIndex.indexname; const indexColumnName: string = dbIndex.column_name; const indexIsUnique = dbIndex.is_unique; const indexMethod = dbIndex.method; const indexWith: string[] = dbIndex.with; const indexWhere: string = dbIndex.where; const opclass: string = dbIndex.opcname; const isExpression = dbIndex.is_expression === 1; const desc: boolean = dbIndex.descending; const nullsFirst: boolean = dbIndex.nulls_first; const mappedWith: Record = {}; if (indexWith !== null) { indexWith // .slice(1, indexWith.length - 1) // .split(",") .forEach((it) => { const splitted = it.split('='); mappedWith[splitted[0]] = splitted[1]; }); } if (idxsInConsteraint.includes(indexName)) continue; if (typeof indexToReturn[indexName] !== 'undefined') { indexToReturn[indexName].columns.push({ expression: indexColumnName, asc: !desc, nulls: nullsFirst ? 'first' : 'last', opclass, isExpression, }); } else { indexToReturn[indexName] = { name: indexName, columns: [ { expression: indexColumnName, asc: !desc, nulls: nullsFirst ? 'first' : 'last', opclass, isExpression, }, ], isUnique: indexIsUnique, // should not be a part of diff detects concurrently: false, method: indexMethod, where: indexWhere === null ? undefined : indexWhere, with: mappedWith, }; } } indexesCount += Object.keys(indexToReturn).length; if (progressCallback) { progressCallback('indexes', indexesCount, 'fetching'); } result[`${tableSchema}.${tableName}`] = { name: tableName, schema: tableSchema !== 'public' ? tableSchema : '', columns: columnToReturn, indexes: indexToReturn, foreignKeys: foreignKeysToReturn, compositePrimaryKeys: primaryKeys, uniqueConstraints: {}, // uniqueConstrains, checkConstraints: {}, // checkConstraints, policies: {}, // policiesByTable[`${tableSchema}.${tableName}`] ?? {}, isRLSEnabled: row.rls_enabled, }; } catch (e) { rej(e); return; } res(''); }); }); if (progressCallback) { progressCallback('tables', tableCount, 'done'); } for await (const _ of all) { } if (progressCallback) { progressCallback('columns', columnsCount, 'done'); progressCallback('indexes', indexesCount, 'done'); progressCallback('fks', foreignKeysCount, 'done'); } const schemasObject = Object.fromEntries([...schemas].map((it) => [it, it])); return { version: '1', dialect: 'gel', tables: result, enums: {}, schemas: schemasObject, sequences: sequencesToReturn, roles: {}, // rolesToReturn, policies, views: {}, // views, _meta: { schemas: {}, tables: {}, columns: {}, }, internal: internals, }; }; const defaultForColumn = (column: any, internals: GelKitInternals, tableName: string) => { const columnName = column.column_name; const isArray = internals?.tables[tableName]?.columns[columnName]?.isArray ?? false; if (column.column_default === null || column.column_default === undefined) return undefined; if (column.column_default.endsWith('[]')) { column.column_default = column.column_default.slice(0, -2); } column.column_default = column.column_default.replace(/::(.*?)(? psql stores like '99'::numeric return columnDefaultAsString.includes("'") ? columnDefaultAsString : `'${columnDefaultAsString}'`; } else if (column.data_type === 'json' || column.data_type === 'jsonb') { return `'${columnDefaultAsString}'`; } else if (column.data_type === 'boolean') { return column.column_default === 'true'; } else if (columnDefaultAsString === 'NULL') { return `NULL`; } else if (columnDefaultAsString.startsWith("'") && columnDefaultAsString.endsWith("'")) { return columnDefaultAsString; } else { return `${columnDefaultAsString.replace(/\\/g, '`\\')}`; } }; const getColumnsInfoQuery = ({ schema, table, db }: { schema: string; table: string; db: DB }) => { return db.query( `SELECT a.attrelid::regclass::text AS table_name, -- Table, view, or materialized view name a.attname::text AS column_name, -- Column name CASE WHEN NOT a.attisdropped THEN CASE WHEN a.attnotnull THEN 'NO' ELSE 'YES' END ELSE NULL END AS is_nullable, -- NULL or NOT NULL constraint a.attndims AS array_dimensions, -- Array dimensions CASE WHEN a.atttypid = ANY ('{int,int8,int2}'::regtype[]) AND EXISTS ( SELECT FROM pg_attrdef ad WHERE ad.adrelid = a.attrelid AND ad.adnum = a.attnum AND pg_get_expr(ad.adbin, ad.adrelid) = 'nextval(''' || pg_get_serial_sequence(a.attrelid::regclass::text, a.attname)::regclass || '''::regclass)' ) THEN CASE a.atttypid WHEN 'int'::regtype THEN 'serial' WHEN 'int8'::regtype THEN 'bigserial' WHEN 'int2'::regtype THEN 'smallserial' END ELSE format_type(a.atttypid, a.atttypmod) END AS data_type, -- Column data type -- ns.nspname AS type_schema, -- Schema name c.column_default::text, -- Column default value c.data_type::text AS additional_dt, -- Data type from information_schema c.udt_name::text AS enum_name, -- Enum type (if applicable) c.is_generated::text, -- Is it a generated column? c.generation_expression::text, -- Generation expression (if generated) c.is_identity::text, -- Is it an identity column? c.identity_generation::text, -- Identity generation strategy (ALWAYS or BY DEFAULT) c.identity_start::text, -- Start value of identity column c.identity_increment::text, -- Increment for identity column c.identity_maximum::text, -- Maximum value for identity column c.identity_minimum::text, -- Minimum value for identity column c.identity_cycle::text, -- Does the identity column cycle? ns.nspname::text AS type_schema -- Schema of the enum type FROM pg_attribute a JOIN pg_class cls ON cls.oid = a.attrelid -- Join pg_class to get table/view/materialized view info JOIN pg_namespace ns ON ns.oid = cls.relnamespace -- Join namespace to get schema info LEFT JOIN information_schema.columns c ON c.column_name = a.attname AND c.table_schema = ns.nspname AND c.table_name = cls.relname -- Match schema and table/view name LEFT JOIN pg_type enum_t ON enum_t.oid = a.atttypid -- Join to get the type info LEFT JOIN pg_namespace enum_ns ON enum_ns.oid = enum_t.typnamespace -- Join to get the enum schema WHERE a.attnum > 0 -- Valid column numbers only AND NOT a.attisdropped -- Skip dropped columns AND cls.relkind IN ('r', 'v', 'm') -- Include regular tables ('r'), views ('v'), and materialized views ('m') AND ns.nspname::text = '${schema}' -- Filter by schema AND cls.relname::text = '${table}' -- Filter by table name ORDER BY a.attnum; -- Order by column number`, ); }; ================================================ FILE: drizzle-kit/src/serializer/index.ts ================================================ import chalk from 'chalk'; import fs from 'fs'; import * as glob from 'glob'; import Path from 'path'; import { CasingType } from 'src/cli/validations/common'; import { error } from '../cli/views'; import type { MySqlSchemaInternal } from './mysqlSchema'; import type { PgSchemaInternal } from './pgSchema'; import { SingleStoreSchemaInternal } from './singlestoreSchema'; import type { SQLiteSchemaInternal } from './sqliteSchema'; export const serializeMySql = async ( path: string | string[], casing: CasingType | undefined, ): Promise => { const filenames = prepareFilenames(path); console.log(chalk.gray(`Reading schema files:\n${filenames.join('\n')}\n`)); const { prepareFromMySqlImports } = await import('./mysqlImports'); const { generateMySqlSnapshot } = await import('./mysqlSerializer'); const { tables, views } = await prepareFromMySqlImports(filenames); return generateMySqlSnapshot(tables, views, casing); }; export const serializePg = async ( path: string | string[], casing: CasingType | undefined, schemaFilter?: string[], ): Promise => { const filenames = prepareFilenames(path); const { prepareFromPgImports } = await import('./pgImports'); const { generatePgSnapshot } = await import('./pgSerializer'); const { tables, enums, schemas, sequences, views, matViews, roles, policies } = await prepareFromPgImports( filenames, ); return generatePgSnapshot(tables, enums, schemas, sequences, roles, policies, views, matViews, casing, schemaFilter); }; export const serializeSQLite = async ( path: string | string[], casing: CasingType | undefined, ): Promise => { const filenames = prepareFilenames(path); const { prepareFromSqliteImports } = await import('./sqliteImports'); const { generateSqliteSnapshot } = await import('./sqliteSerializer'); const { tables, views } = await prepareFromSqliteImports(filenames); return generateSqliteSnapshot(tables, views, casing); }; export const serializeSingleStore = async ( path: string | string[], casing: CasingType | undefined, ): Promise => { const filenames = prepareFilenames(path); console.log(chalk.gray(`Reading schema files:\n${filenames.join('\n')}\n`)); const { prepareFromSingleStoreImports } = await import('./singlestoreImports'); const { generateSingleStoreSnapshot } = await import('./singlestoreSerializer'); const { tables /* views */ } = await prepareFromSingleStoreImports(filenames); return generateSingleStoreSnapshot(tables, /* views, */ casing); }; export const prepareFilenames = (path: string | string[]) => { if (typeof path === 'string') { path = [path]; } const prefix = process.env.TEST_CONFIG_PATH_PREFIX || ''; const result = path.reduce((result, cur) => { const globbed = glob.sync(`${prefix}${cur}`); globbed.forEach((it) => { const fileName = fs.lstatSync(it).isDirectory() ? null : Path.resolve(it); const filenames = fileName ? [fileName!] : fs.readdirSync(it).map((file) => Path.join(Path.resolve(it), file)); filenames .filter((file) => !fs.lstatSync(file).isDirectory()) .forEach((file) => result.add(file)); }); return result; }, new Set()); const res = [...result]; // TODO: properly handle and test const errors = res.filter((it) => { return !( it.endsWith('.ts') || it.endsWith('.js') || it.endsWith('.cjs') || it.endsWith('.mjs') || it.endsWith('.mts') || it.endsWith('.cts') ); }); // when schema: "./schema" and not "./schema.ts" if (res.length === 0) { console.log( error( `No schema files found for path config [${ path .map((it) => `'${it}'`) .join(', ') }]`, ), ); console.log( error( `If path represents a file - please make sure to use .ts or other extension in the path`, ), ); process.exit(1); } return res; }; ================================================ FILE: drizzle-kit/src/serializer/mysqlImports.ts ================================================ import { is } from 'drizzle-orm'; import type { AnyMySqlTable } from 'drizzle-orm/mysql-core'; import { MySqlTable, MySqlView } from 'drizzle-orm/mysql-core'; import { safeRegister } from '../cli/commands/utils'; export const prepareFromExports = (exports: Record) => { const tables: AnyMySqlTable[] = []; const views: MySqlView[] = []; const i0values = Object.values(exports); i0values.forEach((t) => { if (is(t, MySqlTable)) { tables.push(t); } if (is(t, MySqlView)) { views.push(t); } }); return { tables, views }; }; export const prepareFromMySqlImports = async (imports: string[]) => { const tables: AnyMySqlTable[] = []; const views: MySqlView[] = []; await safeRegister(async () => { for (let i = 0; i < imports.length; i++) { const it = imports[i]; const i0: Record = require(`${it}`); const prepared = prepareFromExports(i0); tables.push(...prepared.tables); views.push(...prepared.views); } }); return { tables: Array.from(new Set(tables)), views }; }; ================================================ FILE: drizzle-kit/src/serializer/mysqlSchema.ts ================================================ import { any, boolean, enum as enumType, literal, object, record, string, TypeOf, union } from 'zod'; import { mapValues, originUUID } from '../global'; // ------- V3 -------- const index = object({ name: string(), columns: string().array(), isUnique: boolean(), using: enumType(['btree', 'hash']).optional(), algorithm: enumType(['default', 'inplace', 'copy']).optional(), lock: enumType(['default', 'none', 'shared', 'exclusive']).optional(), }).strict(); const fk = object({ name: string(), tableFrom: string(), columnsFrom: string().array(), tableTo: string(), columnsTo: string().array(), onUpdate: string().optional(), onDelete: string().optional(), }).strict(); const column = object({ name: string(), type: string(), primaryKey: boolean(), notNull: boolean(), autoincrement: boolean().optional(), default: any().optional(), onUpdate: any().optional(), generated: object({ type: enumType(['stored', 'virtual']), as: string(), }).optional(), }).strict(); const tableV3 = object({ name: string(), columns: record(string(), column), indexes: record(string(), index), foreignKeys: record(string(), fk), }).strict(); const compositePK = object({ name: string(), columns: string().array(), }).strict(); const uniqueConstraint = object({ name: string(), columns: string().array(), }).strict(); const checkConstraint = object({ name: string(), value: string(), }).strict(); const tableV4 = object({ name: string(), schema: string().optional(), columns: record(string(), column), indexes: record(string(), index), foreignKeys: record(string(), fk), }).strict(); const table = object({ name: string(), columns: record(string(), column), indexes: record(string(), index), foreignKeys: record(string(), fk), compositePrimaryKeys: record(string(), compositePK), uniqueConstraints: record(string(), uniqueConstraint).default({}), checkConstraint: record(string(), checkConstraint).default({}), }).strict(); const viewMeta = object({ algorithm: enumType(['undefined', 'merge', 'temptable']), sqlSecurity: enumType(['definer', 'invoker']), withCheckOption: enumType(['local', 'cascaded']).optional(), }).strict(); export const view = object({ name: string(), columns: record(string(), column), definition: string().optional(), isExisting: boolean(), }).strict().merge(viewMeta); type SquasherViewMeta = Omit, 'definer'>; export const kitInternals = object({ tables: record( string(), object({ columns: record( string(), object({ isDefaultAnExpression: boolean().optional() }).optional(), ), }).optional(), ).optional(), indexes: record( string(), object({ columns: record( string(), object({ isExpression: boolean().optional() }).optional(), ), }).optional(), ).optional(), }).optional(); // use main dialect const dialect = literal('mysql'); const schemaHash = object({ id: string(), prevId: string(), }); export const schemaInternalV3 = object({ version: literal('3'), dialect: dialect, tables: record(string(), tableV3), }).strict(); export const schemaInternalV4 = object({ version: literal('4'), dialect: dialect, tables: record(string(), tableV4), schemas: record(string(), string()), }).strict(); export const schemaInternalV5 = object({ version: literal('5'), dialect: dialect, tables: record(string(), table), schemas: record(string(), string()), _meta: object({ schemas: record(string(), string()), tables: record(string(), string()), columns: record(string(), string()), }), internal: kitInternals, }).strict(); export const schemaInternal = object({ version: literal('5'), dialect: dialect, tables: record(string(), table), views: record(string(), view).default({}), _meta: object({ tables: record(string(), string()), columns: record(string(), string()), }), internal: kitInternals, }).strict(); export const schemaV3 = schemaInternalV3.merge(schemaHash); export const schemaV4 = schemaInternalV4.merge(schemaHash); export const schemaV5 = schemaInternalV5.merge(schemaHash); export const schema = schemaInternal.merge(schemaHash); const tableSquashedV4 = object({ name: string(), schema: string().optional(), columns: record(string(), column), indexes: record(string(), string()), foreignKeys: record(string(), string()), }).strict(); const tableSquashed = object({ name: string(), columns: record(string(), column), indexes: record(string(), string()), foreignKeys: record(string(), string()), compositePrimaryKeys: record(string(), string()), uniqueConstraints: record(string(), string()).default({}), checkConstraints: record(string(), string()).default({}), }).strict(); const viewSquashed = view.omit({ algorithm: true, sqlSecurity: true, withCheckOption: true, }).extend({ meta: string() }); export const schemaSquashed = object({ version: literal('5'), dialect: dialect, tables: record(string(), tableSquashed), views: record(string(), viewSquashed), }).strict(); export const schemaSquashedV4 = object({ version: literal('4'), dialect: dialect, tables: record(string(), tableSquashedV4), schemas: record(string(), string()), }).strict(); export type Dialect = TypeOf; export type Column = TypeOf; export type Table = TypeOf; export type TableV4 = TypeOf; export type MySqlSchema = TypeOf; export type MySqlSchemaV3 = TypeOf; export type MySqlSchemaV4 = TypeOf; export type MySqlSchemaV5 = TypeOf; export type MySqlSchemaInternal = TypeOf; export type MySqlKitInternals = TypeOf; export type MySqlSchemaSquashed = TypeOf; export type MySqlSchemaSquashedV4 = TypeOf; export type Index = TypeOf; export type ForeignKey = TypeOf; export type PrimaryKey = TypeOf; export type UniqueConstraint = TypeOf; export type CheckConstraint = TypeOf; export type View = TypeOf; export type ViewSquashed = TypeOf; export const MySqlSquasher = { squashIdx: (idx: Index) => { index.parse(idx); return `${idx.name};${idx.columns.join(',')};${idx.isUnique};${idx.using ?? ''};${idx.algorithm ?? ''};${ idx.lock ?? '' }`; }, unsquashIdx: (input: string): Index => { const [name, columnsString, isUnique, using, algorithm, lock] = input.split(';'); const destructed = { name, columns: columnsString.split(','), isUnique: isUnique === 'true', using: using ? using : undefined, algorithm: algorithm ? algorithm : undefined, lock: lock ? lock : undefined, }; return index.parse(destructed); }, squashPK: (pk: PrimaryKey) => { return `${pk.name};${pk.columns.join(',')}`; }, unsquashPK: (pk: string): PrimaryKey => { const splitted = pk.split(';'); return { name: splitted[0], columns: splitted[1].split(',') }; }, squashUnique: (unq: UniqueConstraint) => { return `${unq.name};${unq.columns.join(',')}`; }, unsquashUnique: (unq: string): UniqueConstraint => { const [name, columns] = unq.split(';'); return { name, columns: columns.split(',') }; }, squashFK: (fk: ForeignKey) => { return `${fk.name};${fk.tableFrom};${fk.columnsFrom.join(',')};${fk.tableTo};${fk.columnsTo.join(',')};${ fk.onUpdate ?? '' };${fk.onDelete ?? ''}`; }, unsquashFK: (input: string): ForeignKey => { const [ name, tableFrom, columnsFromStr, tableTo, columnsToStr, onUpdate, onDelete, ] = input.split(';'); const result: ForeignKey = fk.parse({ name, tableFrom, columnsFrom: columnsFromStr.split(','), tableTo, columnsTo: columnsToStr.split(','), onUpdate, onDelete, }); return result; }, squashCheck: (input: CheckConstraint): string => { return `${input.name};${input.value}`; }, unsquashCheck: (input: string): CheckConstraint => { const [name, value] = input.split(';'); return { name, value }; }, squashView: (view: View): string => { return `${view.algorithm};${view.sqlSecurity};${view.withCheckOption}`; }, unsquashView: (meta: string): SquasherViewMeta => { const [algorithm, sqlSecurity, withCheckOption] = meta.split(';'); const toReturn = { algorithm: algorithm, sqlSecurity: sqlSecurity, withCheckOption: withCheckOption !== 'undefined' ? withCheckOption : undefined, }; return viewMeta.parse(toReturn); }, }; export const squashMysqlSchemeV4 = ( json: MySqlSchemaV4, ): MySqlSchemaSquashedV4 => { const mappedTables = Object.fromEntries( Object.entries(json.tables).map((it) => { const squashedIndexes = mapValues(it[1].indexes, (index) => { return MySqlSquasher.squashIdx(index); }); const squashedFKs = mapValues(it[1].foreignKeys, (fk) => { return MySqlSquasher.squashFK(fk); }); return [ it[0], { name: it[1].name, schema: it[1].schema, columns: it[1].columns, indexes: squashedIndexes, foreignKeys: squashedFKs, }, ]; }), ); return { version: '4', dialect: json.dialect, tables: mappedTables, schemas: json.schemas, }; }; export const squashMysqlScheme = (json: MySqlSchema): MySqlSchemaSquashed => { const mappedTables = Object.fromEntries( Object.entries(json.tables).map((it) => { const squashedIndexes = mapValues(it[1].indexes, (index) => { return MySqlSquasher.squashIdx(index); }); const squashedFKs = mapValues(it[1].foreignKeys, (fk) => { return MySqlSquasher.squashFK(fk); }); const squashedPKs = mapValues(it[1].compositePrimaryKeys, (pk) => { return MySqlSquasher.squashPK(pk); }); const squashedUniqueConstraints = mapValues( it[1].uniqueConstraints, (unq) => { return MySqlSquasher.squashUnique(unq); }, ); const squashedCheckConstraints = mapValues(it[1].checkConstraint, (check) => { return MySqlSquasher.squashCheck(check); }); return [ it[0], { name: it[1].name, columns: it[1].columns, indexes: squashedIndexes, foreignKeys: squashedFKs, compositePrimaryKeys: squashedPKs, uniqueConstraints: squashedUniqueConstraints, checkConstraints: squashedCheckConstraints, }, ]; }), ); const mappedViews = Object.fromEntries( Object.entries(json.views).map(([key, value]) => { const meta = MySqlSquasher.squashView(value); return [key, { name: value.name, isExisting: value.isExisting, columns: value.columns, definition: value.definition, meta, }]; }), ); return { version: '5', dialect: json.dialect, tables: mappedTables, views: mappedViews, }; }; export const mysqlSchema = schema; export const mysqlSchemaV3 = schemaV3; export const mysqlSchemaV4 = schemaV4; export const mysqlSchemaV5 = schemaV5; export const mysqlSchemaSquashed = schemaSquashed; // no prev version export const backwardCompatibleMysqlSchema = union([mysqlSchemaV5, schema]); export const dryMySql = mysqlSchema.parse({ version: '5', dialect: 'mysql', id: originUUID, prevId: '', tables: {}, schemas: {}, views: {}, _meta: { schemas: {}, tables: {}, columns: {}, }, }); ================================================ FILE: drizzle-kit/src/serializer/mysqlSerializer.ts ================================================ import chalk from 'chalk'; import { getTableName, is, SQL } from 'drizzle-orm'; import { AnyMySqlTable, getTableConfig, getViewConfig, MySqlColumn, MySqlDialect, MySqlView, type PrimaryKey as PrimaryKeyORM, uniqueKeyName, } from 'drizzle-orm/mysql-core'; import { RowDataPacket } from 'mysql2/promise'; import { CasingType } from 'src/cli/validations/common'; import { withStyle } from '../cli/validations/outputs'; import { IntrospectStage, IntrospectStatus } from '../cli/views'; import { CheckConstraint, Column, ForeignKey, Index, MySqlKitInternals, MySqlSchemaInternal, PrimaryKey, Table, UniqueConstraint, View, } from '../serializer/mysqlSchema'; import { type DB, escapeSingleQuotes } from '../utils'; import { getColumnCasing, sqlToStr } from './utils'; export const indexName = (tableName: string, columns: string[]) => { return `${tableName}_${columns.join('_')}_index`; }; const handleEnumType = (type: string) => { let str = type.split('(')[1]; str = str.substring(0, str.length - 1); const values = str.split(',').map((v) => `'${escapeSingleQuotes(v.substring(1, v.length - 1))}'`); return `enum(${values.join(',')})`; }; export const generateMySqlSnapshot = ( tables: AnyMySqlTable[], views: MySqlView[], casing: CasingType | undefined, ): MySqlSchemaInternal => { const dialect = new MySqlDialect({ casing }); const result: Record = {}; const resultViews: Record = {}; const internal: MySqlKitInternals = { tables: {}, indexes: {} }; for (const table of tables) { const { name: tableName, columns, indexes, foreignKeys, schema, checks, primaryKeys, uniqueConstraints, } = getTableConfig(table); const columnsObject: Record = {}; const indexesObject: Record = {}; const foreignKeysObject: Record = {}; const primaryKeysObject: Record = {}; const uniqueConstraintObject: Record = {}; const checkConstraintObject: Record = {}; // this object will help to identify same check names let checksInTable: Record = {}; columns.forEach((column) => { const name = getColumnCasing(column, casing); const notNull: boolean = column.notNull; const sqlType = column.getSQLType(); const sqlTypeLowered = sqlType.toLowerCase(); const autoIncrement = typeof (column as any).autoIncrement === 'undefined' ? false : (column as any).autoIncrement; const generated = column.generated; const columnToSet: Column = { name, type: sqlType.startsWith('enum') ? handleEnumType(sqlType) : sqlType, primaryKey: false, // If field is autoincrement it's notNull by default // notNull: autoIncrement ? true : notNull, notNull, autoincrement: autoIncrement, onUpdate: (column as any).hasOnUpdateNow, generated: generated ? { as: is(generated.as, SQL) ? dialect.sqlToQuery(generated.as as SQL).sql : typeof generated.as === 'function' ? dialect.sqlToQuery(generated.as() as SQL).sql : (generated.as as any), type: generated.mode ?? 'stored', } : undefined, }; if (column.primary) { primaryKeysObject[`${tableName}_${name}`] = { name: `${tableName}_${name}`, columns: [name], }; } if (column.isUnique) { const existingUnique = uniqueConstraintObject[column.uniqueName!]; if (typeof existingUnique !== 'undefined') { console.log( `\n${ withStyle.errorWarning(`We\'ve found duplicated unique constraint names in ${ chalk.underline.blue( tableName, ) } table. The unique constraint ${ chalk.underline.blue( column.uniqueName, ) } on the ${ chalk.underline.blue( name, ) } column is confilcting with a unique constraint name already defined for ${ chalk.underline.blue( existingUnique.columns.join(','), ) } columns\n`) }`, ); process.exit(1); } uniqueConstraintObject[column.uniqueName!] = { name: column.uniqueName!, columns: [columnToSet.name], }; } if (column.default !== undefined) { if (is(column.default, SQL)) { columnToSet.default = sqlToStr(column.default, casing); } else { if (typeof column.default === 'string') { columnToSet.default = `'${escapeSingleQuotes(column.default)}'`; } else { if (sqlTypeLowered === 'json') { columnToSet.default = `'${JSON.stringify(column.default)}'`; } else if (column.default instanceof Date) { if (sqlTypeLowered === 'date') { columnToSet.default = `'${column.default.toISOString().split('T')[0]}'`; } else if ( sqlTypeLowered.startsWith('datetime') || sqlTypeLowered.startsWith('timestamp') ) { columnToSet.default = `'${ column.default .toISOString() .replace('T', ' ') .slice(0, 23) }'`; } } else { columnToSet.default = column.default; } } if (['blob', 'text', 'json'].includes(column.getSQLType())) { columnToSet.default = `(${columnToSet.default})`; } } } columnsObject[name] = columnToSet; }); primaryKeys.map((pk: PrimaryKeyORM) => { const originalColumnNames = pk.columns.map((c) => c.name); const columnNames = pk.columns.map((c: any) => getColumnCasing(c, casing)); let name = pk.getName(); if (casing !== undefined) { for (let i = 0; i < originalColumnNames.length; i++) { name = name.replace(originalColumnNames[i], columnNames[i]); } } primaryKeysObject[name] = { name, columns: columnNames, }; // all composite pk's should be treated as notNull for (const column of pk.columns) { columnsObject[getColumnCasing(column, casing)].notNull = true; } }); uniqueConstraints?.map((unq) => { const columnNames = unq.columns.map((c) => getColumnCasing(c, casing)); const name = unq.name ?? uniqueKeyName(table, columnNames); const existingUnique = uniqueConstraintObject[name]; if (typeof existingUnique !== 'undefined') { console.log( `\n${ withStyle.errorWarning( `We\'ve found duplicated unique constraint names in ${ chalk.underline.blue( tableName, ) } table. \nThe unique constraint ${ chalk.underline.blue( name, ) } on the ${ chalk.underline.blue( columnNames.join(','), ) } columns is confilcting with a unique constraint name already defined for ${ chalk.underline.blue( existingUnique.columns.join(','), ) } columns\n`, ) }`, ); process.exit(1); } uniqueConstraintObject[name] = { name: unq.name!, columns: columnNames, }; }); const fks: ForeignKey[] = foreignKeys.map((fk) => { const tableFrom = tableName; const onDelete = fk.onDelete ?? 'no action'; const onUpdate = fk.onUpdate ?? 'no action'; const reference = fk.reference(); const referenceFT = reference.foreignTable; // eslint-disable-next-line @typescript-eslint/no-unsafe-argument const tableTo = getTableName(referenceFT); const originalColumnsFrom = reference.columns.map((it) => it.name); const columnsFrom = reference.columns.map((it) => getColumnCasing(it, casing)); const originalColumnsTo = reference.foreignColumns.map((it) => it.name); const columnsTo = reference.foreignColumns.map((it) => getColumnCasing(it, casing)); let name = fk.getName(); if (casing !== undefined) { for (let i = 0; i < originalColumnsFrom.length; i++) { name = name.replace(originalColumnsFrom[i], columnsFrom[i]); } for (let i = 0; i < originalColumnsTo.length; i++) { name = name.replace(originalColumnsTo[i], columnsTo[i]); } } return { name, tableFrom, tableTo, columnsFrom, columnsTo, onDelete, onUpdate, } as ForeignKey; }); fks.forEach((it) => { foreignKeysObject[it.name] = it; }); indexes.forEach((value) => { const columns = value.config.columns; const name = value.config.name; let indexColumns = columns.map((it) => { if (is(it, SQL)) { const sql = dialect.sqlToQuery(it, 'indexes').sql; if (typeof internal!.indexes![name] === 'undefined') { internal!.indexes![name] = { columns: { [sql]: { isExpression: true, }, }, }; } else { if (typeof internal!.indexes![name]?.columns[sql] === 'undefined') { internal!.indexes![name]!.columns[sql] = { isExpression: true, }; } else { internal!.indexes![name]!.columns[sql]!.isExpression = true; } } return sql; } else { return `${getColumnCasing(it, casing)}`; } }); if (value.config.unique) { if (typeof uniqueConstraintObject[name] !== 'undefined') { console.log( `\n${ withStyle.errorWarning( `We\'ve found duplicated unique constraint names in ${ chalk.underline.blue( tableName, ) } table. \nThe unique index ${ chalk.underline.blue( name, ) } on the ${ chalk.underline.blue( indexColumns.join(','), ) } columns is confilcting with a unique constraint name already defined for ${ chalk.underline.blue( uniqueConstraintObject[name].columns.join(','), ) } columns\n`, ) }`, ); process.exit(1); } } else { if (typeof foreignKeysObject[name] !== 'undefined') { console.log( `\n${ withStyle.errorWarning( `In MySQL, when creating a foreign key, an index is automatically generated with the same name as the foreign key constraint.\n\nWe have encountered a collision between the index name on columns ${ chalk.underline.blue( indexColumns.join(','), ) } and the foreign key on columns ${ chalk.underline.blue( foreignKeysObject[name].columnsFrom.join(','), ) }. Please change either the index name or the foreign key name. For more information, please refer to https://dev.mysql.com/doc/refman/8.0/en/constraint-foreign-key.html\n `, ) }`, ); process.exit(1); } } indexesObject[name] = { name, columns: indexColumns, isUnique: value.config.unique ?? false, using: value.config.using, algorithm: value.config.algorithm, lock: value.config.lock, }; }); checks.forEach((check) => { check; const checkName = check.name; if (typeof checksInTable[tableName] !== 'undefined') { if (checksInTable[tableName].includes(check.name)) { console.log( `\n${ withStyle.errorWarning( `We\'ve found duplicated check constraint name in ${ chalk.underline.blue( tableName, ) }. Please rename your check constraint in the ${ chalk.underline.blue( tableName, ) } table`, ) }`, ); process.exit(1); } checksInTable[tableName].push(checkName); } else { checksInTable[tableName] = [check.name]; } checkConstraintObject[checkName] = { name: checkName, value: dialect.sqlToQuery(check.value).sql, }; }); // only handle tables without schemas if (!schema) { result[tableName] = { name: tableName, columns: columnsObject, indexes: indexesObject, foreignKeys: foreignKeysObject, compositePrimaryKeys: primaryKeysObject, uniqueConstraints: uniqueConstraintObject, checkConstraint: checkConstraintObject, }; } } for (const view of views) { const { isExisting, name, query, schema, selectedFields, algorithm, sqlSecurity, withCheckOption, } = getViewConfig(view); const columnsObject: Record = {}; const existingView = resultViews[name]; if (typeof existingView !== 'undefined') { console.log( `\n${ withStyle.errorWarning( `We\'ve found duplicated view name across ${ chalk.underline.blue( schema ?? 'public', ) } schema. Please rename your view`, ) }`, ); process.exit(1); } for (const key in selectedFields) { if (is(selectedFields[key], MySqlColumn)) { const column = selectedFields[key]; const notNull: boolean = column.notNull; const sqlTypeLowered = column.getSQLType().toLowerCase(); const autoIncrement = typeof (column as any).autoIncrement === 'undefined' ? false : (column as any).autoIncrement; const generated = column.generated; const columnToSet: Column = { name: column.name, type: column.getSQLType(), primaryKey: false, // If field is autoincrement it's notNull by default // notNull: autoIncrement ? true : notNull, notNull, autoincrement: autoIncrement, onUpdate: (column as any).hasOnUpdateNow, generated: generated ? { as: is(generated.as, SQL) ? dialect.sqlToQuery(generated.as as SQL).sql : typeof generated.as === 'function' ? dialect.sqlToQuery(generated.as() as SQL).sql : (generated.as as any), type: generated.mode ?? 'stored', } : undefined, }; if (column.default !== undefined) { if (is(column.default, SQL)) { columnToSet.default = sqlToStr(column.default, casing); } else { if (typeof column.default === 'string') { columnToSet.default = `'${column.default}'`; } else { if (sqlTypeLowered === 'json') { columnToSet.default = `'${JSON.stringify(column.default)}'`; } else if (column.default instanceof Date) { if (sqlTypeLowered === 'date') { columnToSet.default = `'${column.default.toISOString().split('T')[0]}'`; } else if ( sqlTypeLowered.startsWith('datetime') || sqlTypeLowered.startsWith('timestamp') ) { columnToSet.default = `'${ column.default .toISOString() .replace('T', ' ') .slice(0, 23) }'`; } } else { columnToSet.default = column.default; } } if (['blob', 'text', 'json'].includes(column.getSQLType())) { columnToSet.default = `(${columnToSet.default})`; } } } columnsObject[column.name] = columnToSet; } } resultViews[name] = { columns: columnsObject, name, isExisting, definition: isExisting ? undefined : dialect.sqlToQuery(query!).sql, withCheckOption, algorithm: algorithm ?? 'undefined', // set default values sqlSecurity: sqlSecurity ?? 'definer', // set default values }; } return { version: '5', dialect: 'mysql', tables: result, views: resultViews, _meta: { tables: {}, columns: {}, }, internal, }; }; function clearDefaults(defaultValue: any, collate: string) { if (typeof collate === 'undefined' || collate === null) { collate = `utf8mb4`; } let resultDefault = defaultValue; collate = `_${collate}`; if (defaultValue.startsWith(collate)) { resultDefault = resultDefault .substring(collate.length, defaultValue.length) .replace(/\\/g, ''); if (resultDefault.startsWith("'") && resultDefault.endsWith("'")) { return `('${escapeSingleQuotes(resultDefault.substring(1, resultDefault.length - 1))}')`; } else { return `'${escapeSingleQuotes(resultDefault.substring(1, resultDefault.length - 1))}'`; } } else { return `(${resultDefault})`; } } export const fromDatabase = async ( db: DB, inputSchema: string, tablesFilter: (table: string) => boolean = (table) => true, progressCallback?: ( stage: IntrospectStage, count: number, status: IntrospectStatus, ) => void, ): Promise => { const result: Record = {}; const internals: MySqlKitInternals = { tables: {}, indexes: {} }; const columns = await db.query(`select * from information_schema.columns where table_schema = '${inputSchema}' and table_name != '__drizzle_migrations' order by table_name, ordinal_position;`); const response = columns as RowDataPacket[]; const schemas: string[] = []; let columnsCount = 0; let tablesCount = new Set(); let indexesCount = 0; let foreignKeysCount = 0; let checksCount = 0; let viewsCount = 0; const idxs = await db.query( `select * from INFORMATION_SCHEMA.STATISTICS WHERE INFORMATION_SCHEMA.STATISTICS.TABLE_SCHEMA = '${inputSchema}' and INFORMATION_SCHEMA.STATISTICS.INDEX_NAME != 'PRIMARY';`, ); const idxRows = idxs as RowDataPacket[]; for (const column of response) { if (!tablesFilter(column['TABLE_NAME'] as string)) continue; columnsCount += 1; if (progressCallback) { progressCallback('columns', columnsCount, 'fetching'); } const schema: string = column['TABLE_SCHEMA']; const tableName = column['TABLE_NAME']; tablesCount.add(`${schema}.${tableName}`); if (progressCallback) { progressCallback('columns', tablesCount.size, 'fetching'); } const columnName: string = column['COLUMN_NAME']; const isNullable = column['IS_NULLABLE'] === 'YES'; // 'YES', 'NO' const dataType = column['DATA_TYPE']; // varchar const columnType = column['COLUMN_TYPE']; // varchar(256) const isPrimary = column['COLUMN_KEY'] === 'PRI'; // 'PRI', '' const columnDefault: string = column['COLUMN_DEFAULT']; const collation: string = column['CHARACTER_SET_NAME']; const geenratedExpression: string = column['GENERATION_EXPRESSION']; let columnExtra = column['EXTRA']; let isAutoincrement = false; // 'auto_increment', '' let isDefaultAnExpression = false; // 'auto_increment', '' if (typeof column['EXTRA'] !== 'undefined') { columnExtra = column['EXTRA']; isAutoincrement = column['EXTRA'] === 'auto_increment'; // 'auto_increment', '' isDefaultAnExpression = column['EXTRA'].includes('DEFAULT_GENERATED'); // 'auto_increment', '' } // if (isPrimary) { // if (typeof tableToPk[tableName] === "undefined") { // tableToPk[tableName] = [columnName]; // } else { // tableToPk[tableName].push(columnName); // } // } if (schema !== inputSchema) { schemas.push(schema); } const table = result[tableName]; // let changedType = columnType.replace("bigint unsigned", "serial") let changedType = columnType; if (columnType === 'bigint unsigned' && !isNullable && isAutoincrement) { // check unique here const uniqueIdx = idxRows.filter( (it) => it['COLUMN_NAME'] === columnName && it['TABLE_NAME'] === tableName && it['NON_UNIQUE'] === 0, ); if (uniqueIdx && uniqueIdx.length === 1) { changedType = columnType.replace('bigint unsigned', 'serial'); } } if (columnType.includes('decimal(10,0)')) { changedType = columnType.replace('decimal(10,0)', 'decimal'); } let onUpdate: boolean | undefined = undefined; if ( columnType.startsWith('timestamp') && typeof columnExtra !== 'undefined' && columnExtra.includes('on update CURRENT_TIMESTAMP') ) { onUpdate = true; } const newColumn: Column = { default: columnDefault === null || columnDefault === undefined ? undefined : /^-?[\d.]+(?:e-?\d+)?$/.test(columnDefault) && !['decimal', 'char', 'varchar'].some((type) => columnType.startsWith(type)) ? Number(columnDefault) : isDefaultAnExpression ? clearDefaults(columnDefault, collation) : `'${escapeSingleQuotes(columnDefault)}'`, autoincrement: isAutoincrement, name: columnName, type: changedType, primaryKey: false, notNull: !isNullable, onUpdate, generated: geenratedExpression ? { as: geenratedExpression, type: columnExtra === 'VIRTUAL GENERATED' ? 'virtual' : 'stored', } : undefined, }; // Set default to internal object if (isDefaultAnExpression) { if (typeof internals!.tables![tableName] === 'undefined') { internals!.tables![tableName] = { columns: { [columnName]: { isDefaultAnExpression: true, }, }, }; } else { if ( typeof internals!.tables![tableName]!.columns[columnName] === 'undefined' ) { internals!.tables![tableName]!.columns[columnName] = { isDefaultAnExpression: true, }; } else { internals!.tables![tableName]!.columns[ columnName ]!.isDefaultAnExpression = true; } } } if (!table) { result[tableName] = { name: tableName, columns: { [columnName]: newColumn, }, compositePrimaryKeys: {}, indexes: {}, foreignKeys: {}, uniqueConstraints: {}, checkConstraint: {}, }; } else { result[tableName]!.columns[columnName] = newColumn; } } const tablePks = await db.query( `SELECT table_name, column_name, ordinal_position FROM information_schema.table_constraints t LEFT JOIN information_schema.key_column_usage k USING(constraint_name,table_schema,table_name) WHERE t.constraint_type='PRIMARY KEY' and table_name != '__drizzle_migrations' AND t.table_schema = '${inputSchema}' ORDER BY ordinal_position`, ); const tableToPk: { [tname: string]: string[] } = {}; const tableToPkRows = tablePks as RowDataPacket[]; for (const tableToPkRow of tableToPkRows) { const tableName: string = tableToPkRow['TABLE_NAME']; const columnName: string = tableToPkRow['COLUMN_NAME']; const position: string = tableToPkRow['ordinal_position']; if (typeof result[tableName] === 'undefined') { continue; } if (typeof tableToPk[tableName] === 'undefined') { tableToPk[tableName] = [columnName]; } else { tableToPk[tableName].push(columnName); } } for (const [key, value] of Object.entries(tableToPk)) { // if (value.length > 1) { result[key].compositePrimaryKeys = { [`${key}_${value.join('_')}`]: { name: `${key}_${value.join('_')}`, columns: value, }, }; // } else if (value.length === 1) { // result[key].columns[value[0]].primaryKey = true; // } else { // } } if (progressCallback) { progressCallback('columns', columnsCount, 'done'); progressCallback('tables', tablesCount.size, 'done'); } try { const fks = await db.query( `SELECT kcu.TABLE_SCHEMA, kcu.TABLE_NAME, kcu.CONSTRAINT_NAME, kcu.COLUMN_NAME, kcu.REFERENCED_TABLE_SCHEMA, kcu.REFERENCED_TABLE_NAME, kcu.REFERENCED_COLUMN_NAME, rc.UPDATE_RULE, rc.DELETE_RULE FROM INFORMATION_SCHEMA.KEY_COLUMN_USAGE kcu LEFT JOIN information_schema.referential_constraints rc ON kcu.CONSTRAINT_NAME = rc.CONSTRAINT_NAME WHERE kcu.TABLE_SCHEMA = '${inputSchema}' AND kcu.CONSTRAINT_NAME != 'PRIMARY' AND kcu.REFERENCED_TABLE_NAME IS NOT NULL;`, ); const fkRows = fks as RowDataPacket[]; for (const fkRow of fkRows) { foreignKeysCount += 1; if (progressCallback) { progressCallback('fks', foreignKeysCount, 'fetching'); } const tableSchema = fkRow['TABLE_SCHEMA']; const tableName: string = fkRow['TABLE_NAME']; const constraintName = fkRow['CONSTRAINT_NAME']; const columnName: string = fkRow['COLUMN_NAME']; const refTableSchema = fkRow['REFERENCED_TABLE_SCHEMA']; const refTableName = fkRow['REFERENCED_TABLE_NAME']; const refColumnName: string = fkRow['REFERENCED_COLUMN_NAME']; const updateRule: string = fkRow['UPDATE_RULE']; const deleteRule = fkRow['DELETE_RULE']; const tableInResult = result[tableName]; if (typeof tableInResult === 'undefined') continue; if (typeof tableInResult.foreignKeys[constraintName] !== 'undefined') { tableInResult.foreignKeys[constraintName]!.columnsFrom.push(columnName); tableInResult.foreignKeys[constraintName]!.columnsTo.push( refColumnName, ); } else { tableInResult.foreignKeys[constraintName] = { name: constraintName, tableFrom: tableName, tableTo: refTableName, columnsFrom: [columnName], columnsTo: [refColumnName], onDelete: deleteRule?.toLowerCase(), onUpdate: updateRule?.toLowerCase(), }; } tableInResult.foreignKeys[constraintName]!.columnsFrom = [ ...new Set(tableInResult.foreignKeys[constraintName]!.columnsFrom), ]; tableInResult.foreignKeys[constraintName]!.columnsTo = [ ...new Set(tableInResult.foreignKeys[constraintName]!.columnsTo), ]; } } catch (e) { // console.log(`Can't proccess foreign keys`); } if (progressCallback) { progressCallback('fks', foreignKeysCount, 'done'); } for (const idxRow of idxRows) { const tableSchema = idxRow['TABLE_SCHEMA']; const tableName = idxRow['TABLE_NAME']; const constraintName = idxRow['INDEX_NAME']; const columnName: string = idxRow['COLUMN_NAME']; const isUnique = idxRow['NON_UNIQUE'] === 0; const tableInResult = result[tableName]; if (typeof tableInResult === 'undefined') continue; // if (tableInResult.columns[columnName].type === "serial") continue; indexesCount += 1; if (progressCallback) { progressCallback('indexes', indexesCount, 'fetching'); } if (isUnique) { if ( typeof tableInResult.uniqueConstraints[constraintName] !== 'undefined' ) { tableInResult.uniqueConstraints[constraintName]!.columns.push( columnName, ); } else { tableInResult.uniqueConstraints[constraintName] = { name: constraintName, columns: [columnName], }; } } else { // in MySQL FK creates index by default. Name of index is the same as fk constraint name // so for introspect we will just skip it if (typeof tableInResult.foreignKeys[constraintName] === 'undefined') { if (typeof tableInResult.indexes[constraintName] !== 'undefined') { tableInResult.indexes[constraintName]!.columns.push(columnName); } else { tableInResult.indexes[constraintName] = { name: constraintName, columns: [columnName], isUnique: isUnique, }; } } } } const views = await db.query( `select * from INFORMATION_SCHEMA.VIEWS WHERE table_schema = '${inputSchema}';`, ); const resultViews: Record = {}; viewsCount = views.length; if (progressCallback) { progressCallback('views', viewsCount, 'fetching'); } for await (const view of views) { const viewName = view['TABLE_NAME']; const definition = view['VIEW_DEFINITION']; const withCheckOption = view['CHECK_OPTION'] === 'NONE' ? undefined : view['CHECK_OPTION'].toLowerCase(); const sqlSecurity = view['SECURITY_TYPE'].toLowerCase(); const [createSqlStatement] = await db.query(`SHOW CREATE VIEW \`${viewName}\`;`); const algorithmMatch = createSqlStatement['Create View'].match(/ALGORITHM=([^ ]+)/); const algorithm = algorithmMatch ? algorithmMatch[1].toLowerCase() : undefined; const columns = result[viewName].columns; delete result[viewName]; resultViews[viewName] = { columns: columns, isExisting: false, name: viewName, algorithm, definition, sqlSecurity, withCheckOption, }; } if (progressCallback) { progressCallback('indexes', indexesCount, 'done'); // progressCallback("enums", 0, "fetching"); progressCallback('enums', 0, 'done'); progressCallback('views', viewsCount, 'done'); } const checkConstraints = await db.query( `SELECT tc.table_name, tc.constraint_name, cc.check_clause FROM information_schema.table_constraints tc JOIN information_schema.check_constraints cc ON tc.constraint_name = cc.constraint_name WHERE tc.constraint_schema = '${inputSchema}' AND tc.constraint_type = 'CHECK';`, ); checksCount += checkConstraints.length; if (progressCallback) { progressCallback('checks', checksCount, 'fetching'); } for (const checkConstraintRow of checkConstraints) { const constraintName = checkConstraintRow['CONSTRAINT_NAME']; const constraintValue = checkConstraintRow['CHECK_CLAUSE']; const tableName = checkConstraintRow['TABLE_NAME']; const tableInResult = result[tableName]; // if (typeof tableInResult === 'undefined') continue; tableInResult.checkConstraint[constraintName] = { name: constraintName, value: constraintValue, }; } if (progressCallback) { progressCallback('checks', checksCount, 'done'); } return { version: '5', dialect: 'mysql', tables: result, views: resultViews, _meta: { tables: {}, columns: {}, }, internal: internals, }; }; ================================================ FILE: drizzle-kit/src/serializer/pgImports.ts ================================================ import { is } from 'drizzle-orm'; import type { AnyPgTable, PgEnum, PgMaterializedView, PgSequence, PgView } from 'drizzle-orm/pg-core'; import { isPgEnum, isPgMaterializedView, isPgSequence, isPgView, PgPolicy, PgRole, PgSchema, PgTable, } from 'drizzle-orm/pg-core'; import { Relations } from 'drizzle-orm/relations'; import { safeRegister } from '../cli/commands/utils'; export const prepareFromExports = (exports: Record) => { const tables: AnyPgTable[] = []; const enums: PgEnum[] = []; const schemas: PgSchema[] = []; const sequences: PgSequence[] = []; const roles: PgRole[] = []; const policies: PgPolicy[] = []; const views: PgView[] = []; const matViews: PgMaterializedView[] = []; const relations: Relations[] = []; const i0values = Object.values(exports); i0values.forEach((t) => { if (isPgEnum(t)) { enums.push(t); return; } if (is(t, PgTable)) { tables.push(t); } if (is(t, PgSchema)) { schemas.push(t); } if (isPgView(t)) { views.push(t); } if (isPgMaterializedView(t)) { matViews.push(t); } if (isPgSequence(t)) { sequences.push(t); } if (is(t, PgRole)) { roles.push(t); } if (is(t, PgPolicy)) { policies.push(t); } if (is(t, Relations)) { relations.push(t); } }); return { tables, enums, schemas, sequences, views, matViews, roles, policies, relations }; }; export const prepareFromPgImports = async (imports: string[]) => { const tables: AnyPgTable[] = []; const enums: PgEnum[] = []; const schemas: PgSchema[] = []; const sequences: PgSequence[] = []; const views: PgView[] = []; const roles: PgRole[] = []; const policies: PgPolicy[] = []; const matViews: PgMaterializedView[] = []; const relations: Relations[] = []; await safeRegister(async () => { for (let i = 0; i < imports.length; i++) { const it = imports[i]; const i0: Record = require(`${it}`); const prepared = prepareFromExports(i0); tables.push(...prepared.tables); enums.push(...prepared.enums); schemas.push(...prepared.schemas); sequences.push(...prepared.sequences); views.push(...prepared.views); matViews.push(...prepared.matViews); roles.push(...prepared.roles); policies.push(...prepared.policies); relations.push(...prepared.relations); } }); return { tables: Array.from(new Set(tables)), enums, schemas, sequences, views, matViews, roles, policies, relations, }; }; ================================================ FILE: drizzle-kit/src/serializer/pgSchema.ts ================================================ import { mapValues, originUUID, snapshotVersion } from '../global'; import { any, array, boolean, enum as enumType, literal, number, object, record, string, TypeOf, union } from 'zod'; const indexV2 = object({ name: string(), columns: record( string(), object({ name: string(), }), ), isUnique: boolean(), }).strict(); const columnV2 = object({ name: string(), type: string(), primaryKey: boolean(), notNull: boolean(), default: any().optional(), references: string().optional(), }).strict(); const tableV2 = object({ name: string(), columns: record(string(), columnV2), indexes: record(string(), indexV2), }).strict(); const enumSchemaV1 = object({ name: string(), values: record(string(), string()), }).strict(); const enumSchema = object({ name: string(), schema: string(), values: string().array(), }).strict(); export const pgSchemaV2 = object({ version: literal('2'), tables: record(string(), tableV2), enums: record(string(), enumSchemaV1), }).strict(); // ------- V1 -------- const references = object({ foreignKeyName: string(), table: string(), column: string(), onDelete: string().optional(), onUpdate: string().optional(), }).strict(); const columnV1 = object({ name: string(), type: string(), primaryKey: boolean(), notNull: boolean(), default: any().optional(), references: references.optional(), }).strict(); const tableV1 = object({ name: string(), columns: record(string(), columnV1), indexes: record(string(), indexV2), }).strict(); export const pgSchemaV1 = object({ version: literal('1'), tables: record(string(), tableV1), enums: record(string(), enumSchemaV1), }).strict(); const indexColumn = object({ expression: string(), isExpression: boolean(), asc: boolean(), nulls: string().optional(), opclass: string().optional(), }); export type IndexColumnType = TypeOf; const index = object({ name: string(), columns: indexColumn.array(), isUnique: boolean(), with: record(string(), any()).optional(), method: string().default('btree'), where: string().optional(), concurrently: boolean().default(false), }).strict(); const indexV4 = object({ name: string(), columns: string().array(), isUnique: boolean(), with: record(string(), string()).optional(), method: string().default('btree'), where: string().optional(), concurrently: boolean().default(false), }).strict(); const indexV5 = object({ name: string(), columns: string().array(), isUnique: boolean(), with: record(string(), string()).optional(), method: string().default('btree'), where: string().optional(), concurrently: boolean().default(false), }).strict(); const indexV6 = object({ name: string(), columns: string().array(), isUnique: boolean(), with: record(string(), string()).optional(), method: string().default('btree'), where: string().optional(), concurrently: boolean().default(false), }).strict(); const fk = object({ name: string(), tableFrom: string(), columnsFrom: string().array(), tableTo: string(), schemaTo: string().optional(), columnsTo: string().array(), onUpdate: string().optional(), onDelete: string().optional(), }).strict(); export const sequenceSchema = object({ name: string(), increment: string().optional(), minValue: string().optional(), maxValue: string().optional(), startWith: string().optional(), cache: string().optional(), cycle: boolean().optional(), schema: string(), }).strict(); export const roleSchema = object({ name: string(), createDb: boolean().optional(), createRole: boolean().optional(), inherit: boolean().optional(), }).strict(); export const sequenceSquashed = object({ name: string(), schema: string(), values: string(), }).strict(); const columnV7 = object({ name: string(), type: string(), typeSchema: string().optional(), primaryKey: boolean(), notNull: boolean(), default: any().optional(), isUnique: any().optional(), uniqueName: string().optional(), nullsNotDistinct: boolean().optional(), }).strict(); const column = object({ name: string(), type: string(), typeSchema: string().optional(), primaryKey: boolean(), notNull: boolean(), default: any().optional(), isUnique: any().optional(), uniqueName: string().optional(), nullsNotDistinct: boolean().optional(), generated: object({ type: literal('stored'), as: string(), }).optional(), identity: sequenceSchema .merge(object({ type: enumType(['always', 'byDefault']) })) .optional(), }).strict(); const checkConstraint = object({ name: string(), value: string(), }).strict(); const columnSquashed = object({ name: string(), type: string(), typeSchema: string().optional(), primaryKey: boolean(), notNull: boolean(), default: any().optional(), isUnique: any().optional(), uniqueName: string().optional(), nullsNotDistinct: boolean().optional(), generated: object({ type: literal('stored'), as: string(), }).optional(), identity: string().optional(), }).strict(); const tableV3 = object({ name: string(), columns: record(string(), column), indexes: record(string(), index), foreignKeys: record(string(), fk), }).strict(); const compositePK = object({ name: string(), columns: string().array(), }).strict(); const uniqueConstraint = object({ name: string(), columns: string().array(), nullsNotDistinct: boolean(), }).strict(); export const policy = object({ name: string(), as: enumType(['PERMISSIVE', 'RESTRICTIVE']).optional(), for: enumType(['ALL', 'SELECT', 'INSERT', 'UPDATE', 'DELETE']).optional(), to: string().array().optional(), using: string().optional(), withCheck: string().optional(), on: string().optional(), schema: string().optional(), }).strict(); export const policySquashed = object({ name: string(), values: string(), }).strict(); const viewWithOption = object({ checkOption: enumType(['local', 'cascaded']).optional(), securityBarrier: boolean().optional(), securityInvoker: boolean().optional(), }).strict(); const matViewWithOption = object({ fillfactor: number().optional(), toastTupleTarget: number().optional(), parallelWorkers: number().optional(), autovacuumEnabled: boolean().optional(), vacuumIndexCleanup: enumType(['auto', 'off', 'on']).optional(), vacuumTruncate: boolean().optional(), autovacuumVacuumThreshold: number().optional(), autovacuumVacuumScaleFactor: number().optional(), autovacuumVacuumCostDelay: number().optional(), autovacuumVacuumCostLimit: number().optional(), autovacuumFreezeMinAge: number().optional(), autovacuumFreezeMaxAge: number().optional(), autovacuumFreezeTableAge: number().optional(), autovacuumMultixactFreezeMinAge: number().optional(), autovacuumMultixactFreezeMaxAge: number().optional(), autovacuumMultixactFreezeTableAge: number().optional(), logAutovacuumMinDuration: number().optional(), userCatalogTable: boolean().optional(), }).strict(); export const mergedViewWithOption = viewWithOption.merge(matViewWithOption).strict(); export const view = object({ name: string(), schema: string(), columns: record(string(), column), definition: string().optional(), materialized: boolean(), with: mergedViewWithOption.optional(), isExisting: boolean(), withNoData: boolean().optional(), using: string().optional(), tablespace: string().optional(), }).strict(); const tableV4 = object({ name: string(), schema: string(), columns: record(string(), column), indexes: record(string(), indexV4), foreignKeys: record(string(), fk), }).strict(); const tableV5 = object({ name: string(), schema: string(), columns: record(string(), column), indexes: record(string(), indexV5), foreignKeys: record(string(), fk), compositePrimaryKeys: record(string(), compositePK), uniqueConstraints: record(string(), uniqueConstraint).default({}), }).strict(); const tableV6 = object({ name: string(), schema: string(), columns: record(string(), column), indexes: record(string(), indexV6), foreignKeys: record(string(), fk), compositePrimaryKeys: record(string(), compositePK), uniqueConstraints: record(string(), uniqueConstraint).default({}), }).strict(); const tableV7 = object({ name: string(), schema: string(), columns: record(string(), columnV7), indexes: record(string(), index), foreignKeys: record(string(), fk), compositePrimaryKeys: record(string(), compositePK), uniqueConstraints: record(string(), uniqueConstraint).default({}), }).strict(); const table = object({ name: string(), schema: string(), columns: record(string(), column), indexes: record(string(), index), foreignKeys: record(string(), fk), compositePrimaryKeys: record(string(), compositePK), uniqueConstraints: record(string(), uniqueConstraint).default({}), policies: record(string(), policy).default({}), checkConstraints: record(string(), checkConstraint).default({}), isRLSEnabled: boolean().default(false), }).strict(); const schemaHash = object({ id: string(), prevId: string(), }); export const kitInternals = object({ tables: record( string(), object({ columns: record( string(), object({ isArray: boolean().optional(), dimensions: number().optional(), rawType: string().optional(), isDefaultAnExpression: boolean().optional(), }).optional(), ), }).optional(), ), }).optional(); export const pgSchemaInternalV3 = object({ version: literal('3'), dialect: literal('pg'), tables: record(string(), tableV3), enums: record(string(), enumSchemaV1), }).strict(); export const pgSchemaInternalV4 = object({ version: literal('4'), dialect: literal('pg'), tables: record(string(), tableV4), enums: record(string(), enumSchemaV1), schemas: record(string(), string()), }).strict(); // "table" -> "schema.table" for schema proper support export const pgSchemaInternalV5 = object({ version: literal('5'), dialect: literal('pg'), tables: record(string(), tableV5), enums: record(string(), enumSchemaV1), schemas: record(string(), string()), _meta: object({ schemas: record(string(), string()), tables: record(string(), string()), columns: record(string(), string()), }), internal: kitInternals, }).strict(); export const pgSchemaInternalV6 = object({ version: literal('6'), dialect: literal('postgresql'), tables: record(string(), tableV6), enums: record(string(), enumSchema), schemas: record(string(), string()), _meta: object({ schemas: record(string(), string()), tables: record(string(), string()), columns: record(string(), string()), }), internal: kitInternals, }).strict(); export const pgSchemaExternal = object({ version: literal('5'), dialect: literal('pg'), tables: array(table), enums: array(enumSchemaV1), schemas: array(object({ name: string() })), _meta: object({ schemas: record(string(), string()), tables: record(string(), string()), columns: record(string(), string()), }), }).strict(); export const pgSchemaInternalV7 = object({ version: literal('7'), dialect: literal('postgresql'), tables: record(string(), tableV7), enums: record(string(), enumSchema), schemas: record(string(), string()), sequences: record(string(), sequenceSchema), _meta: object({ schemas: record(string(), string()), tables: record(string(), string()), columns: record(string(), string()), }), internal: kitInternals, }).strict(); export const pgSchemaInternal = object({ version: literal('7'), dialect: literal('postgresql'), tables: record(string(), table), enums: record(string(), enumSchema), schemas: record(string(), string()), views: record(string(), view).default({}), sequences: record(string(), sequenceSchema).default({}), roles: record(string(), roleSchema).default({}), policies: record(string(), policy).default({}), _meta: object({ schemas: record(string(), string()), tables: record(string(), string()), columns: record(string(), string()), }), internal: kitInternals, }).strict(); const tableSquashed = object({ name: string(), schema: string(), columns: record(string(), columnSquashed), indexes: record(string(), string()), foreignKeys: record(string(), string()), compositePrimaryKeys: record(string(), string()), uniqueConstraints: record(string(), string()), policies: record(string(), string()), checkConstraints: record(string(), string()), isRLSEnabled: boolean().default(false), }).strict(); const tableSquashedV4 = object({ name: string(), schema: string(), columns: record(string(), column), indexes: record(string(), string()), foreignKeys: record(string(), string()), }).strict(); export const pgSchemaSquashedV4 = object({ version: literal('4'), dialect: literal('pg'), tables: record(string(), tableSquashedV4), enums: record(string(), enumSchemaV1), schemas: record(string(), string()), }).strict(); export const pgSchemaSquashedV6 = object({ version: literal('6'), dialect: literal('postgresql'), tables: record(string(), tableSquashed), enums: record(string(), enumSchema), schemas: record(string(), string()), }).strict(); export const pgSchemaSquashed = object({ version: literal('7'), dialect: literal('postgresql'), tables: record(string(), tableSquashed), enums: record(string(), enumSchema), schemas: record(string(), string()), views: record(string(), view), sequences: record(string(), sequenceSquashed), roles: record(string(), roleSchema).default({}), policies: record(string(), policySquashed).default({}), }).strict(); export const pgSchemaV3 = pgSchemaInternalV3.merge(schemaHash); export const pgSchemaV4 = pgSchemaInternalV4.merge(schemaHash); export const pgSchemaV5 = pgSchemaInternalV5.merge(schemaHash); export const pgSchemaV6 = pgSchemaInternalV6.merge(schemaHash); export const pgSchemaV7 = pgSchemaInternalV7.merge(schemaHash); export const pgSchema = pgSchemaInternal.merge(schemaHash); export type Enum = TypeOf; export type Sequence = TypeOf; export type Role = TypeOf; export type Column = TypeOf; export type TableV3 = TypeOf; export type TableV4 = TypeOf; export type TableV5 = TypeOf; export type Table = TypeOf; export type PgSchema = TypeOf; export type PgSchemaInternal = TypeOf; export type PgSchemaV6Internal = TypeOf; export type PgSchemaExternal = TypeOf; export type PgSchemaSquashed = TypeOf; export type PgSchemaSquashedV4 = TypeOf; export type PgSchemaSquashedV6 = TypeOf; export type Index = TypeOf; export type ForeignKey = TypeOf; export type PrimaryKey = TypeOf; export type UniqueConstraint = TypeOf; export type Policy = TypeOf; export type View = TypeOf; export type MatViewWithOption = TypeOf; export type ViewWithOption = TypeOf; export type PgKitInternals = TypeOf; export type CheckConstraint = TypeOf; export type PgSchemaV1 = TypeOf; export type PgSchemaV2 = TypeOf; export type PgSchemaV3 = TypeOf; export type PgSchemaV4 = TypeOf; export type PgSchemaV5 = TypeOf; export type PgSchemaV6 = TypeOf; export const backwardCompatiblePgSchema = union([ pgSchemaV5, pgSchemaV6, pgSchema, ]); export const PgSquasher = { squashIdx: (idx: Index) => { index.parse(idx); return `${idx.name};${ idx.columns .map( (c) => `${c.expression}--${c.isExpression}--${c.asc}--${c.nulls}--${c.opclass ? c.opclass : ''}`, ) .join(',,') };${idx.isUnique};${idx.concurrently};${idx.method};${idx.where};${JSON.stringify(idx.with)}`; }, unsquashIdx: (input: string): Index => { const [ name, columnsString, isUnique, concurrently, method, where, idxWith, ] = input.split(';'); const columnString = columnsString.split(',,'); const columns: IndexColumnType[] = []; for (const column of columnString) { const [expression, isExpression, asc, nulls, opclass] = column.split('--'); columns.push({ nulls: nulls as IndexColumnType['nulls'], isExpression: isExpression === 'true', asc: asc === 'true', expression: expression, opclass: opclass === 'undefined' ? undefined : opclass, }); } const result: Index = index.parse({ name, columns: columns, isUnique: isUnique === 'true', concurrently: concurrently === 'true', method, where: where === 'undefined' ? undefined : where, with: !idxWith || idxWith === 'undefined' ? undefined : JSON.parse(idxWith), }); return result; }, squashIdxPush: (idx: Index) => { index.parse(idx); return `${idx.name};${ idx.columns .map((c) => `${c.isExpression ? '' : c.expression}--${c.asc}--${c.nulls}`) .join(',,') };${idx.isUnique};${idx.method};${JSON.stringify(idx.with)}`; }, unsquashIdxPush: (input: string): Index => { const [name, columnsString, isUnique, method, idxWith] = input.split(';'); const columnString = columnsString.split('--'); const columns: IndexColumnType[] = []; for (const column of columnString) { const [expression, asc, nulls, opclass] = column.split(','); columns.push({ nulls: nulls as IndexColumnType['nulls'], isExpression: expression === '', asc: asc === 'true', expression: expression, }); } const result: Index = index.parse({ name, columns: columns, isUnique: isUnique === 'true', concurrently: false, method, with: idxWith === 'undefined' ? undefined : JSON.parse(idxWith), }); return result; }, squashFK: (fk: ForeignKey) => { return `${fk.name};${fk.tableFrom};${fk.columnsFrom.join(',')};${fk.tableTo};${fk.columnsTo.join(',')};${ fk.onUpdate ?? '' };${fk.onDelete ?? ''};${fk.schemaTo || 'public'}`; }, squashPolicy: (policy: Policy) => { return `${policy.name}--${policy.as}--${policy.for}--${ policy.to?.join(',') }--${policy.using}--${policy.withCheck}--${policy.on}`; }, unsquashPolicy: (policy: string): Policy => { const splitted = policy.split('--'); return { name: splitted[0], as: splitted[1] as Policy['as'], for: splitted[2] as Policy['for'], to: splitted[3].split(','), using: splitted[4] !== 'undefined' ? splitted[4] : undefined, withCheck: splitted[5] !== 'undefined' ? splitted[5] : undefined, on: splitted[6] !== 'undefined' ? splitted[6] : undefined, }; }, squashPolicyPush: (policy: Policy) => { return `${policy.name}--${policy.as}--${policy.for}--${policy.to?.join(',')}--${policy.on}`; }, unsquashPolicyPush: (policy: string): Policy => { const splitted = policy.split('--'); return { name: splitted[0], as: splitted[1] as Policy['as'], for: splitted[2] as Policy['for'], to: splitted[3].split(','), on: splitted[4] !== 'undefined' ? splitted[4] : undefined, }; }, squashPK: (pk: PrimaryKey) => { return `${pk.columns.join(',')};${pk.name}`; }, unsquashPK: (pk: string): PrimaryKey => { const splitted = pk.split(';'); return { name: splitted[1], columns: splitted[0].split(',') }; }, squashUnique: (unq: UniqueConstraint) => { return `${unq.name};${unq.columns.join(',')};${unq.nullsNotDistinct}`; }, unsquashUnique: (unq: string): UniqueConstraint => { const [name, columns, nullsNotDistinct] = unq.split(';'); return { name, columns: columns.split(','), nullsNotDistinct: nullsNotDistinct === 'true', }; }, unsquashFK: (input: string): ForeignKey => { const [ name, tableFrom, columnsFromStr, tableTo, columnsToStr, onUpdate, onDelete, schemaTo, ] = input.split(';'); const result: ForeignKey = fk.parse({ name, tableFrom, columnsFrom: columnsFromStr.split(','), schemaTo: schemaTo, tableTo, columnsTo: columnsToStr.split(','), onUpdate, onDelete, }); return result; }, squashSequence: (seq: Omit) => { return `${seq.minValue};${seq.maxValue};${seq.increment};${seq.startWith};${seq.cache};${seq.cycle ?? ''}`; }, unsquashSequence: (seq: string): Omit => { const splitted = seq.split(';'); return { minValue: splitted[0] !== 'undefined' ? splitted[0] : undefined, maxValue: splitted[1] !== 'undefined' ? splitted[1] : undefined, increment: splitted[2] !== 'undefined' ? splitted[2] : undefined, startWith: splitted[3] !== 'undefined' ? splitted[3] : undefined, cache: splitted[4] !== 'undefined' ? splitted[4] : undefined, cycle: splitted[5] === 'true', }; }, squashIdentity: ( seq: Omit & { type: 'always' | 'byDefault' }, ) => { return `${seq.name};${seq.type};${seq.minValue};${seq.maxValue};${seq.increment};${seq.startWith};${seq.cache};${ seq.cycle ?? '' }`; }, unsquashIdentity: ( seq: string, ): Omit & { type: 'always' | 'byDefault' } => { const splitted = seq.split(';'); return { name: splitted[0], type: splitted[1] as 'always' | 'byDefault', minValue: splitted[2] !== 'undefined' ? splitted[2] : undefined, maxValue: splitted[3] !== 'undefined' ? splitted[3] : undefined, increment: splitted[4] !== 'undefined' ? splitted[4] : undefined, startWith: splitted[5] !== 'undefined' ? splitted[5] : undefined, cache: splitted[6] !== 'undefined' ? splitted[6] : undefined, cycle: splitted[7] === 'true', }; }, squashCheck: (check: CheckConstraint) => { return `${check.name};${check.value}`; }, unsquashCheck: (input: string): CheckConstraint => { const [ name, value, ] = input.split(';'); return { name, value }; }, }; export const squashPgScheme = ( json: PgSchema, action?: 'push' | undefined, ): PgSchemaSquashed => { const mappedTables = Object.fromEntries( Object.entries(json.tables).map((it) => { const squashedIndexes = mapValues(it[1].indexes, (index) => { return action === 'push' ? PgSquasher.squashIdxPush(index) : PgSquasher.squashIdx(index); }); const squashedFKs = mapValues(it[1].foreignKeys, (fk) => { return PgSquasher.squashFK(fk); }); const squashedPKs = mapValues(it[1].compositePrimaryKeys, (pk) => { return PgSquasher.squashPK(pk); }); const mappedColumns = Object.fromEntries( Object.entries(it[1].columns).map((it) => { const mappedIdentity = it[1].identity ? PgSquasher.squashIdentity(it[1].identity) : undefined; return [ it[0], { ...it[1], identity: mappedIdentity, }, ]; }), ); const squashedUniqueConstraints = mapValues( it[1].uniqueConstraints, (unq) => { return PgSquasher.squashUnique(unq); }, ); const squashedPolicies = mapValues(it[1].policies, (policy) => { return action === 'push' ? PgSquasher.squashPolicyPush(policy) : PgSquasher.squashPolicy(policy); }); const squashedChecksContraints = mapValues( it[1].checkConstraints, (check) => { return PgSquasher.squashCheck(check); }, ); return [ it[0], { name: it[1].name, schema: it[1].schema, columns: mappedColumns, indexes: squashedIndexes, foreignKeys: squashedFKs, compositePrimaryKeys: squashedPKs, uniqueConstraints: squashedUniqueConstraints, policies: squashedPolicies, checkConstraints: squashedChecksContraints, isRLSEnabled: it[1].isRLSEnabled ?? false, }, ]; }), ); const mappedSequences = Object.fromEntries( Object.entries(json.sequences).map((it) => { return [ it[0], { name: it[1].name, schema: it[1].schema, values: PgSquasher.squashSequence(it[1]), }, ]; }), ); const mappedPolicies = Object.fromEntries( Object.entries(json.policies).map((it) => { return [ it[0], { name: it[1].name, values: action === 'push' ? PgSquasher.squashPolicyPush(it[1]) : PgSquasher.squashPolicy(it[1]), }, ]; }), ); return { version: '7', dialect: json.dialect, tables: mappedTables, enums: json.enums, schemas: json.schemas, views: json.views, policies: mappedPolicies, sequences: mappedSequences, roles: json.roles, }; }; export const dryPg = pgSchema.parse({ version: snapshotVersion, dialect: 'postgresql', id: originUUID, prevId: '', tables: {}, enums: {}, schemas: {}, policies: {}, roles: {}, sequences: {}, _meta: { schemas: {}, tables: {}, columns: {}, }, }); ================================================ FILE: drizzle-kit/src/serializer/pgSerializer.ts ================================================ import chalk from 'chalk'; import { getTableName, is, SQL } from 'drizzle-orm'; import { AnyPgTable, getMaterializedViewConfig, getTableConfig, getViewConfig, IndexedColumn, PgArray, PgColumn, PgDialect, PgEnum, PgEnumColumn, PgMaterializedView, PgPolicy, PgRole, PgSchema, PgSequence, PgView, uniqueKeyName, } from 'drizzle-orm/pg-core'; import { CasingType } from 'src/cli/validations/common'; import { vectorOps } from 'src/extensions/vector'; import { withStyle } from '../cli/validations/outputs'; import type { IntrospectStage, IntrospectStatus } from '../cli/views'; import type { CheckConstraint, Column, Enum, ForeignKey, Index, IndexColumnType, PgKitInternals, PgSchemaInternal, Policy, PrimaryKey, Role, Sequence, Table, UniqueConstraint, View, } from '../serializer/pgSchema'; import { type DB, escapeSingleQuotes, isPgArrayType } from '../utils'; import { getColumnCasing, sqlToStr } from './utils'; export const indexName = (tableName: string, columns: string[]) => { return `${tableName}_${columns.join('_')}_index`; }; function stringFromIdentityProperty(field: string | number | undefined): string | undefined { return typeof field === 'string' ? (field as string) : typeof field === 'undefined' ? undefined : String(field); } function maxRangeForIdentityBasedOn(columnType: string) { return columnType === 'integer' ? '2147483647' : columnType === 'bigint' ? '9223372036854775807' : '32767'; } function minRangeForIdentityBasedOn(columnType: string) { return columnType === 'integer' ? '-2147483648' : columnType === 'bigint' ? '-9223372036854775808' : '-32768'; } function stringFromDatabaseIdentityProperty(field: any): string | undefined { return typeof field === 'string' ? (field as string) : typeof field === 'undefined' ? undefined : typeof field === 'bigint' ? field.toString() : String(field); } export function buildArrayString(array: any[], sqlType: string): string { sqlType = sqlType.split('[')[0]; const values = array .map((value) => { if (typeof value === 'number' || typeof value === 'bigint') { return value.toString(); } else if (typeof value === 'boolean') { return value ? 'true' : 'false'; } else if (Array.isArray(value)) { return buildArrayString(value, sqlType); } else if (value instanceof Date) { if (sqlType === 'date') { return `"${value.toISOString().split('T')[0]}"`; } else if (sqlType === 'timestamp') { return `"${value.toISOString().replace('T', ' ').slice(0, 23)}"`; } else { return `"${value.toISOString()}"`; } } else if (typeof value === 'object') { return `"${JSON.stringify(value).replaceAll('"', '\\"')}"`; } return `"${value}"`; }) .join(','); return `{${values}}`; } export const generatePgSnapshot = ( tables: AnyPgTable[], enums: PgEnum[], schemas: PgSchema[], sequences: PgSequence[], roles: PgRole[], policies: PgPolicy[], views: PgView[], matViews: PgMaterializedView[], casing: CasingType | undefined, schemaFilter?: string[], ): PgSchemaInternal => { const dialect = new PgDialect({ casing }); const result: Record = {}; const resultViews: Record = {}; const sequencesToReturn: Record = {}; const rolesToReturn: Record = {}; // this policies are a separate objects that were linked to a table outside of it const policiesToReturn: Record = {}; // This object stores unique names for indexes and will be used to detect if you have the same names for indexes // within the same PostgreSQL schema const indexesInSchema: Record = {}; for (const table of tables) { // This object stores unique names for checks and will be used to detect if you have the same names for checks // within the same PostgreSQL table const checksInTable: Record = {}; const { name: tableName, columns, indexes, foreignKeys, checks, schema, primaryKeys, uniqueConstraints, policies, enableRLS, } = getTableConfig(table); if (schemaFilter && !schemaFilter.includes(schema ?? 'public')) { continue; } const columnsObject: Record = {}; const indexesObject: Record = {}; const checksObject: Record = {}; const foreignKeysObject: Record = {}; const primaryKeysObject: Record = {}; const uniqueConstraintObject: Record = {}; const policiesObject: Record = {}; columns.forEach((column) => { const name = getColumnCasing(column, casing); const notNull: boolean = column.notNull; const primaryKey: boolean = column.primary; const sqlTypeLowered = column.getSQLType().toLowerCase(); const getEnumSchema = (column: PgColumn) => { while (is(column, PgArray)) { column = column.baseColumn; } return is(column, PgEnumColumn) ? column.enum.schema || 'public' : undefined; }; const typeSchema: string | undefined = getEnumSchema(column); const generated = column.generated; const identity = column.generatedIdentity; const increment = stringFromIdentityProperty(identity?.sequenceOptions?.increment) ?? '1'; const minValue = stringFromIdentityProperty(identity?.sequenceOptions?.minValue) ?? (parseFloat(increment) < 0 ? minRangeForIdentityBasedOn(column.columnType) : '1'); const maxValue = stringFromIdentityProperty(identity?.sequenceOptions?.maxValue) ?? (parseFloat(increment) < 0 ? '-1' : maxRangeForIdentityBasedOn(column.getSQLType())); const startWith = stringFromIdentityProperty(identity?.sequenceOptions?.startWith) ?? (parseFloat(increment) < 0 ? maxValue : minValue); const cache = stringFromIdentityProperty(identity?.sequenceOptions?.cache) ?? '1'; const columnToSet: Column = { name, type: column.getSQLType(), typeSchema: typeSchema, primaryKey, notNull, generated: generated ? { as: is(generated.as, SQL) ? dialect.sqlToQuery(generated.as as SQL).sql : typeof generated.as === 'function' ? dialect.sqlToQuery(generated.as() as SQL).sql : (generated.as as any), type: 'stored', } : undefined, identity: identity ? { type: identity.type, name: identity.sequenceName ?? `${tableName}_${name}_seq`, schema: schema ?? 'public', increment, startWith, minValue, maxValue, cache, cycle: identity?.sequenceOptions?.cycle ?? false, } : undefined, }; if (column.isUnique) { const existingUnique = uniqueConstraintObject[column.uniqueName!]; if (typeof existingUnique !== 'undefined') { console.log( `\n${ withStyle.errorWarning(`We\'ve found duplicated unique constraint names in ${ chalk.underline.blue( tableName, ) } table. The unique constraint ${ chalk.underline.blue( column.uniqueName, ) } on the ${ chalk.underline.blue( name, ) } column is conflicting with a unique constraint name already defined for ${ chalk.underline.blue( existingUnique.columns.join(','), ) } columns\n`) }`, ); process.exit(1); } uniqueConstraintObject[column.uniqueName!] = { name: column.uniqueName!, nullsNotDistinct: column.uniqueType === 'not distinct', columns: [columnToSet.name], }; } if (column.default !== undefined) { if (is(column.default, SQL)) { columnToSet.default = sqlToStr(column.default, casing); } else { if (typeof column.default === 'string') { columnToSet.default = `'${escapeSingleQuotes(column.default)}'`; } else { if (sqlTypeLowered === 'jsonb' || sqlTypeLowered === 'json') { columnToSet.default = `'${JSON.stringify(column.default)}'::${sqlTypeLowered}`; } else if (column.default instanceof Date) { if (sqlTypeLowered === 'date') { columnToSet.default = `'${column.default.toISOString().split('T')[0]}'`; } else if (sqlTypeLowered === 'timestamp') { columnToSet.default = `'${column.default.toISOString().replace('T', ' ').slice(0, 23)}'`; } else { columnToSet.default = `'${column.default.toISOString()}'`; } } else if (isPgArrayType(sqlTypeLowered) && Array.isArray(column.default)) { columnToSet.default = `'${buildArrayString(column.default, sqlTypeLowered)}'`; } else { // Should do for all types // columnToSet.default = `'${column.default}'::${sqlTypeLowered}`; columnToSet.default = column.default; } } } } columnsObject[name] = columnToSet; }); primaryKeys.map((pk) => { const originalColumnNames = pk.columns.map((c) => c.name); const columnNames = pk.columns.map((c) => getColumnCasing(c, casing)); let name = pk.getName(); if (casing !== undefined) { for (let i = 0; i < originalColumnNames.length; i++) { name = name.replace(originalColumnNames[i], columnNames[i]); } } primaryKeysObject[name] = { name, columns: columnNames, }; }); uniqueConstraints?.map((unq) => { const columnNames = unq.columns.map((c) => getColumnCasing(c, casing)); const name = unq.name ?? uniqueKeyName(table, columnNames); const existingUnique = uniqueConstraintObject[name]; if (typeof existingUnique !== 'undefined') { console.log( `\n${ withStyle.errorWarning( `We\'ve found duplicated unique constraint names in ${chalk.underline.blue(tableName)} table. The unique constraint ${chalk.underline.blue(name)} on the ${ chalk.underline.blue( columnNames.join(','), ) } columns is confilcting with a unique constraint name already defined for ${ chalk.underline.blue(existingUnique.columns.join(',')) } columns\n`, ) }`, ); process.exit(1); } uniqueConstraintObject[name] = { name: unq.name!, nullsNotDistinct: unq.nullsNotDistinct, columns: columnNames, }; }); const fks: ForeignKey[] = foreignKeys.map((fk) => { const tableFrom = tableName; const onDelete = fk.onDelete; const onUpdate = fk.onUpdate; const reference = fk.reference(); const tableTo = getTableName(reference.foreignTable); // TODO: resolve issue with schema undefined/public for db push(or squasher) // getTableConfig(reference.foreignTable).schema || "public"; const schemaTo = getTableConfig(reference.foreignTable).schema; const originalColumnsFrom = reference.columns.map((it) => it.name); const columnsFrom = reference.columns.map((it) => getColumnCasing(it, casing)); const originalColumnsTo = reference.foreignColumns.map((it) => it.name); const columnsTo = reference.foreignColumns.map((it) => getColumnCasing(it, casing)); let name = fk.getName(); if (casing !== undefined) { for (let i = 0; i < originalColumnsFrom.length; i++) { name = name.replace(originalColumnsFrom[i], columnsFrom[i]); } for (let i = 0; i < originalColumnsTo.length; i++) { name = name.replace(originalColumnsTo[i], columnsTo[i]); } } return { name, tableFrom, tableTo, schemaTo, columnsFrom, columnsTo, onDelete, onUpdate, } as ForeignKey; }); fks.forEach((it) => { foreignKeysObject[it.name] = it; }); indexes.forEach((value) => { const columns = value.config.columns; let indexColumnNames: string[] = []; columns.forEach((it) => { if (is(it, SQL)) { if (typeof value.config.name === 'undefined') { console.log( `\n${ withStyle.errorWarning( `Please specify an index name in ${getTableName(value.config.table)} table that has "${ dialect.sqlToQuery(it).sql }" expression. We can generate index names for indexes on columns only; for expressions in indexes, you need to specify the name yourself.`, ) }`, ); process.exit(1); } } it = it as IndexedColumn; const name = getColumnCasing(it as IndexedColumn, casing); if ( !is(it, SQL) && it.type! === 'PgVector' && typeof it.indexConfig!.opClass === 'undefined' ) { console.log( `\n${ withStyle.errorWarning( `You are specifying an index on the ${ chalk.blueBright( name, ) } column inside the ${ chalk.blueBright( tableName, ) } table with the ${ chalk.blueBright( 'vector', ) } type without specifying an operator class. Vector extension doesn't have a default operator class, so you need to specify one of the available options. Here is a list of available op classes for the vector extension: [${ vectorOps .map((it) => `${chalk.underline(`${it}`)}`) .join(', ') }].\n\nYou can specify it using current syntax: ${ chalk.underline( `index("${value.config.name}").using("${value.config.method}", table.${name}.op("${ vectorOps[0] }"))`, ) }\n\nYou can check the "pg_vector" docs for more info: https://github.com/pgvector/pgvector?tab=readme-ov-file#indexing\n`, ) }`, ); process.exit(1); } indexColumnNames.push(name); }); const name = value.config.name ? value.config.name : indexName(tableName, indexColumnNames); let indexColumns: IndexColumnType[] = columns.map( (it): IndexColumnType => { if (is(it, SQL)) { return { expression: dialect.sqlToQuery(it, 'indexes').sql, asc: true, isExpression: true, nulls: 'last', }; } else { it = it as IndexedColumn; return { expression: getColumnCasing(it as IndexedColumn, casing), isExpression: false, asc: it.indexConfig?.order === 'asc', nulls: it.indexConfig?.nulls ? it.indexConfig?.nulls : it.indexConfig?.order === 'desc' ? 'first' : 'last', opclass: it.indexConfig?.opClass, }; } }, ); // check for index names duplicates if (typeof indexesInSchema[schema ?? 'public'] !== 'undefined') { if (indexesInSchema[schema ?? 'public'].includes(name)) { console.log( `\n${ withStyle.errorWarning( `We\'ve found duplicated index name across ${ chalk.underline.blue(schema ?? 'public') } schema. Please rename your index in either the ${ chalk.underline.blue( tableName, ) } table or the table with the duplicated index name`, ) }`, ); process.exit(1); } indexesInSchema[schema ?? 'public'].push(name); } else { indexesInSchema[schema ?? 'public'] = [name]; } indexesObject[name] = { name, columns: indexColumns, isUnique: value.config.unique ?? false, where: value.config.where ? dialect.sqlToQuery(value.config.where).sql : undefined, concurrently: value.config.concurrently ?? false, method: value.config.method ?? 'btree', with: value.config.with ?? {}, }; }); policies.forEach((policy) => { const mappedTo = []; if (!policy.to) { mappedTo.push('public'); } else { if (policy.to && typeof policy.to === 'string') { mappedTo.push(policy.to); } else if (policy.to && is(policy.to, PgRole)) { mappedTo.push(policy.to.name); } else if (policy.to && Array.isArray(policy.to)) { policy.to.forEach((it) => { if (typeof it === 'string') { mappedTo.push(it); } else if (is(it, PgRole)) { mappedTo.push(it.name); } }); } } if (policiesObject[policy.name] !== undefined) { console.log( `\n${ withStyle.errorWarning( `We\'ve found duplicated policy name across ${ chalk.underline.blue(tableKey) } table. Please rename one of the policies with ${ chalk.underline.blue( policy.name, ) } name`, ) }`, ); process.exit(1); } policiesObject[policy.name] = { name: policy.name, as: policy.as?.toUpperCase() as Policy['as'] ?? 'PERMISSIVE', for: policy.for?.toUpperCase() as Policy['for'] ?? 'ALL', to: mappedTo.sort(), using: is(policy.using, SQL) ? dialect.sqlToQuery(policy.using).sql : undefined, withCheck: is(policy.withCheck, SQL) ? dialect.sqlToQuery(policy.withCheck).sql : undefined, }; }); checks.forEach((check) => { const checkName = check.name; if (typeof checksInTable[`"${schema ?? 'public'}"."${tableName}"`] !== 'undefined') { if (checksInTable[`"${schema ?? 'public'}"."${tableName}"`].includes(check.name)) { console.log( `\n${ withStyle.errorWarning( `We\'ve found duplicated check constraint name across ${ chalk.underline.blue( schema ?? 'public', ) } schema in ${ chalk.underline.blue( tableName, ) }. Please rename your check constraint in either the ${ chalk.underline.blue( tableName, ) } table or the table with the duplicated check contraint name`, ) }`, ); process.exit(1); } checksInTable[`"${schema ?? 'public'}"."${tableName}"`].push(checkName); } else { checksInTable[`"${schema ?? 'public'}"."${tableName}"`] = [check.name]; } checksObject[checkName] = { name: checkName, value: dialect.sqlToQuery(check.value).sql, }; }); const tableKey = `${schema ?? 'public'}.${tableName}`; result[tableKey] = { name: tableName, schema: schema ?? '', columns: columnsObject, indexes: indexesObject, foreignKeys: foreignKeysObject, compositePrimaryKeys: primaryKeysObject, uniqueConstraints: uniqueConstraintObject, policies: policiesObject, checkConstraints: checksObject, isRLSEnabled: enableRLS, }; } for (const policy of policies) { // @ts-ignore if (!policy._linkedTable) { console.log( `\n${ withStyle.errorWarning( `"Policy ${policy.name} was skipped because it was not linked to any table. You should either include the policy in a table or use .link() on the policy to link it to any table you have. For more information, please check:`, ) }`, ); continue; } // @ts-ignore const tableConfig = getTableConfig(policy._linkedTable); const tableKey = `${tableConfig.schema ?? 'public'}.${tableConfig.name}`; const mappedTo = []; if (!policy.to) { mappedTo.push('public'); } else { if (policy.to && typeof policy.to === 'string') { mappedTo.push(policy.to); } else if (policy.to && is(policy.to, PgRole)) { mappedTo.push(policy.to.name); } else if (policy.to && Array.isArray(policy.to)) { policy.to.forEach((it) => { if (typeof it === 'string') { mappedTo.push(it); } else if (is(it, PgRole)) { mappedTo.push(it.name); } }); } } // add separate policies object, that will be only responsible for policy creation // but we would need to track if a policy was enabled for a specific table or not // enable only if jsonStatements for enable rls was not already there + filter it if (result[tableKey]?.policies[policy.name] !== undefined || policiesToReturn[policy.name] !== undefined) { console.log( `\n${ withStyle.errorWarning( `We\'ve found duplicated policy name across ${ chalk.underline.blue(tableKey) } table. Please rename one of the policies with ${ chalk.underline.blue( policy.name, ) } name`, ) }`, ); process.exit(1); } const mappedPolicy = { name: policy.name, as: policy.as?.toUpperCase() as Policy['as'] ?? 'PERMISSIVE', for: policy.for?.toUpperCase() as Policy['for'] ?? 'ALL', to: mappedTo.sort(), using: is(policy.using, SQL) ? dialect.sqlToQuery(policy.using).sql : undefined, withCheck: is(policy.withCheck, SQL) ? dialect.sqlToQuery(policy.withCheck).sql : undefined, }; if (result[tableKey]) { result[tableKey].policies[policy.name] = mappedPolicy; } else { policiesToReturn[policy.name] = { ...mappedPolicy, schema: tableConfig.schema ?? 'public', on: `"${tableConfig.schema ?? 'public'}"."${tableConfig.name}"`, }; } } for (const sequence of sequences) { const name = sequence.seqName!; if (typeof sequencesToReturn[`${sequence.schema ?? 'public'}.${name}`] === 'undefined') { const increment = stringFromIdentityProperty(sequence?.seqOptions?.increment) ?? '1'; const minValue = stringFromIdentityProperty(sequence?.seqOptions?.minValue) ?? (parseFloat(increment) < 0 ? '-9223372036854775808' : '1'); const maxValue = stringFromIdentityProperty(sequence?.seqOptions?.maxValue) ?? (parseFloat(increment) < 0 ? '-1' : '9223372036854775807'); const startWith = stringFromIdentityProperty(sequence?.seqOptions?.startWith) ?? (parseFloat(increment) < 0 ? maxValue : minValue); const cache = stringFromIdentityProperty(sequence?.seqOptions?.cache) ?? '1'; sequencesToReturn[`${sequence.schema ?? 'public'}.${name}`] = { name, schema: sequence.schema ?? 'public', increment, startWith, minValue, maxValue, cache, cycle: sequence.seqOptions?.cycle ?? false, }; } else { // duplicate seq error } } for (const role of roles) { if (!(role as any)._existing) { rolesToReturn[role.name] = { name: role.name, createDb: (role as any).createDb === undefined ? false : (role as any).createDb, createRole: (role as any).createRole === undefined ? false : (role as any).createRole, inherit: (role as any).inherit === undefined ? true : (role as any).inherit, }; } } const combinedViews = [...views, ...matViews]; for (const view of combinedViews) { let viewName; let schema; let query; let selectedFields; let isExisting; let withOption; let tablespace; let using; let withNoData; let materialized: boolean = false; if (is(view, PgView)) { ({ name: viewName, schema, query, selectedFields, isExisting, with: withOption } = getViewConfig(view)); } else { ({ name: viewName, schema, query, selectedFields, isExisting, with: withOption, tablespace, using, withNoData } = getMaterializedViewConfig(view)); materialized = true; } const viewSchema = schema ?? 'public'; const viewKey = `${viewSchema}.${viewName}`; const columnsObject: Record = {}; const uniqueConstraintObject: Record = {}; const existingView = resultViews[viewKey]; if (typeof existingView !== 'undefined') { console.log( `\n${ withStyle.errorWarning( `We\'ve found duplicated view name across ${ chalk.underline.blue(schema ?? 'public') } schema. Please rename your view`, ) }`, ); process.exit(1); } for (const key in selectedFields) { if (is(selectedFields[key], PgColumn)) { const column = selectedFields[key]; const notNull: boolean = column.notNull; const primaryKey: boolean = column.primary; const sqlTypeLowered = column.getSQLType().toLowerCase(); const typeSchema = is(column, PgEnumColumn) ? column.enum.schema || 'public' : undefined; const generated = column.generated; const identity = column.generatedIdentity; const increment = stringFromIdentityProperty(identity?.sequenceOptions?.increment) ?? '1'; const minValue = stringFromIdentityProperty(identity?.sequenceOptions?.minValue) ?? (parseFloat(increment) < 0 ? minRangeForIdentityBasedOn(column.columnType) : '1'); const maxValue = stringFromIdentityProperty(identity?.sequenceOptions?.maxValue) ?? (parseFloat(increment) < 0 ? '-1' : maxRangeForIdentityBasedOn(column.getSQLType())); const startWith = stringFromIdentityProperty(identity?.sequenceOptions?.startWith) ?? (parseFloat(increment) < 0 ? maxValue : minValue); const cache = stringFromIdentityProperty(identity?.sequenceOptions?.cache) ?? '1'; const columnToSet: Column = { name: column.name, type: column.getSQLType(), typeSchema: typeSchema, primaryKey, notNull, generated: generated ? { as: is(generated.as, SQL) ? dialect.sqlToQuery(generated.as as SQL).sql : typeof generated.as === 'function' ? dialect.sqlToQuery(generated.as() as SQL).sql : (generated.as as any), type: 'stored', } : undefined, identity: identity ? { type: identity.type, name: identity.sequenceName ?? `${viewName}_${column.name}_seq`, schema: schema ?? 'public', increment, startWith, minValue, maxValue, cache, cycle: identity?.sequenceOptions?.cycle ?? false, } : undefined, }; if (column.isUnique) { const existingUnique = uniqueConstraintObject[column.uniqueName!]; if (typeof existingUnique !== 'undefined') { console.log( `\n${ withStyle.errorWarning( `We\'ve found duplicated unique constraint names in ${chalk.underline.blue(viewName)} table. The unique constraint ${chalk.underline.blue(column.uniqueName)} on the ${ chalk.underline.blue( column.name, ) } column is confilcting with a unique constraint name already defined for ${ chalk.underline.blue(existingUnique.columns.join(',')) } columns\n`, ) }`, ); process.exit(1); } uniqueConstraintObject[column.uniqueName!] = { name: column.uniqueName!, nullsNotDistinct: column.uniqueType === 'not distinct', columns: [columnToSet.name], }; } if (column.default !== undefined) { if (is(column.default, SQL)) { columnToSet.default = sqlToStr(column.default, casing); } else { if (typeof column.default === 'string') { columnToSet.default = `'${column.default}'`; } else { if (sqlTypeLowered === 'jsonb' || sqlTypeLowered === 'json') { columnToSet.default = `'${JSON.stringify(column.default)}'::${sqlTypeLowered}`; } else if (column.default instanceof Date) { if (sqlTypeLowered === 'date') { columnToSet.default = `'${column.default.toISOString().split('T')[0]}'`; } else if (sqlTypeLowered === 'timestamp') { columnToSet.default = `'${column.default.toISOString().replace('T', ' ').slice(0, 23)}'`; } else { columnToSet.default = `'${column.default.toISOString()}'`; } } else if (isPgArrayType(sqlTypeLowered) && Array.isArray(column.default)) { columnToSet.default = `'${buildArrayString(column.default, sqlTypeLowered)}'`; } else { // Should do for all types // columnToSet.default = `'${column.default}'::${sqlTypeLowered}`; columnToSet.default = column.default; } } } } columnsObject[column.name] = columnToSet; } } resultViews[viewKey] = { columns: columnsObject, definition: isExisting ? undefined : dialect.sqlToQuery(query!).sql, name: viewName, schema: viewSchema, isExisting, with: withOption, withNoData, materialized, tablespace, using, }; } const enumsToReturn: Record = enums.reduce<{ [key: string]: Enum; }>((map, obj) => { const enumSchema = obj.schema || 'public'; const key = `${enumSchema}.${obj.enumName}`; map[key] = { name: obj.enumName, schema: enumSchema, values: obj.enumValues, }; return map; }, {}); const schemasObject = Object.fromEntries( schemas .filter((it) => { if (schemaFilter) { return schemaFilter.includes(it.schemaName) && it.schemaName !== 'public'; } else { return it.schemaName !== 'public'; } }) .map((it) => [it.schemaName, it.schemaName]), ); return { version: '7', dialect: 'postgresql', tables: result, enums: enumsToReturn, schemas: schemasObject, sequences: sequencesToReturn, roles: rolesToReturn, policies: policiesToReturn, views: resultViews, _meta: { schemas: {}, tables: {}, columns: {}, }, }; }; const trimChar = (str: string, char: string) => { let start = 0; let end = str.length; while (start < end && str[start] === char) ++start; while (end > start && str[end - 1] === char) --end; // this.toString() due to ava deep equal issue with String { "value" } return start > 0 || end < str.length ? str.substring(start, end) : str.toString(); }; function prepareRoles(entities?: { roles: boolean | { provider?: string | undefined; include?: string[] | undefined; exclude?: string[] | undefined; }; }) { let useRoles: boolean = false; const includeRoles: string[] = []; const excludeRoles: string[] = []; if (entities && entities.roles) { if (typeof entities.roles === 'object') { if (entities.roles.provider) { if (entities.roles.provider === 'supabase') { excludeRoles.push(...[ 'anon', 'authenticator', 'authenticated', 'service_role', 'supabase_auth_admin', 'supabase_storage_admin', 'dashboard_user', 'supabase_admin', ]); } else if (entities.roles.provider === 'neon') { excludeRoles.push(...['authenticated', 'anonymous']); } } if (entities.roles.include) { includeRoles.push(...entities.roles.include); } if (entities.roles.exclude) { excludeRoles.push(...entities.roles.exclude); } } else { useRoles = entities.roles; } } return { useRoles, includeRoles, excludeRoles }; } export const fromDatabase = async ( db: DB, tablesFilter: (table: string) => boolean = () => true, schemaFilters: string[], entities?: { roles: boolean | { provider?: string | undefined; include?: string[] | undefined; exclude?: string[] | undefined; }; }, progressCallback?: ( stage: IntrospectStage, count: number, status: IntrospectStatus, ) => void, tsSchema?: PgSchemaInternal, ): Promise => { const result: Record = {}; const views: Record = {}; const policies: Record = {}; const internals: PgKitInternals = { tables: {} }; const where = schemaFilters.map((t) => `n.nspname = '${t}'`).join(' or '); const allTables = await db.query<{ table_schema: string; table_name: string; type: string; rls_enabled: boolean }>( `SELECT n.nspname AS table_schema, c.relname AS table_name, CASE WHEN c.relkind = 'r' THEN 'table' WHEN c.relkind = 'v' THEN 'view' WHEN c.relkind = 'm' THEN 'materialized_view' END AS type, c.relrowsecurity AS rls_enabled FROM pg_catalog.pg_class c JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace WHERE c.relkind IN ('r', 'v', 'm') ${where === '' ? '' : ` AND ${where}`};`, ); const schemas = new Set(allTables.map((it) => it.table_schema)); schemas.delete('public'); const allSchemas = await db.query<{ table_schema: string; }>(`select s.nspname as table_schema from pg_catalog.pg_namespace s join pg_catalog.pg_user u on u.usesysid = s.nspowner where nspname not in ('information_schema', 'pg_catalog', 'public') and nspname not like 'pg_toast%' and nspname not like 'pg_temp_%' order by table_schema;`); allSchemas.forEach((item) => { if (schemaFilters.includes(item.table_schema)) { schemas.add(item.table_schema); } }); let columnsCount = 0; let indexesCount = 0; let foreignKeysCount = 0; let tableCount = 0; let checksCount = 0; let viewsCount = 0; const sequencesToReturn: Record = {}; const seqWhere = schemaFilters.map((t) => `schemaname = '${t}'`).join(' or '); const allSequences = await db.query( `select schemaname, sequencename, start_value, min_value, max_value, increment_by, cycle, cache_size from pg_sequences as seq${ seqWhere === '' ? '' : ` WHERE ${seqWhere}` };`, ); for (const dbSeq of allSequences) { const schemaName = dbSeq.schemaname; const sequenceName = dbSeq.sequencename; const startValue = stringFromDatabaseIdentityProperty(dbSeq.start_value); const minValue = stringFromDatabaseIdentityProperty(dbSeq.min_value); const maxValue = stringFromDatabaseIdentityProperty(dbSeq.max_value); const incrementBy = stringFromDatabaseIdentityProperty(dbSeq.increment_by); const cycle = dbSeq.cycle; const cacheSize = stringFromDatabaseIdentityProperty(dbSeq.cache_size); const key = `${schemaName}.${sequenceName}`; sequencesToReturn[key] = { name: sequenceName, schema: schemaName, startWith: startValue, minValue, maxValue, increment: incrementBy, cycle, cache: cacheSize, }; } const whereEnums = schemaFilters.map((t) => `n.nspname = '${t}'`).join(' or '); const allEnums = await db.query( `select n.nspname as enum_schema, t.typname as enum_name, e.enumlabel as enum_value, e.enumsortorder as sort_order from pg_type t join pg_enum e on t.oid = e.enumtypid join pg_catalog.pg_namespace n ON n.oid = t.typnamespace ${whereEnums === '' ? '' : ` WHERE ${whereEnums}`} order by enum_schema, enum_name, sort_order;`, ); const enumsToReturn: Record = {}; for (const dbEnum of allEnums) { const enumName = dbEnum.enum_name; const enumValue = dbEnum.enum_value as string; const enumSchema: string = dbEnum.enum_schema || 'public'; const key = `${enumSchema}.${enumName}`; if (enumsToReturn[key] !== undefined && enumsToReturn[key] !== null) { enumsToReturn[key].values.push(enumValue); } else { enumsToReturn[key] = { name: enumName, values: [enumValue], schema: enumSchema, }; } } if (progressCallback) { progressCallback('enums', Object.keys(enumsToReturn).length, 'done'); } const allRoles = await db.query< { rolname: string; rolinherit: boolean; rolcreatedb: boolean; rolcreaterole: boolean } >( `SELECT rolname, rolinherit, rolcreatedb, rolcreaterole FROM pg_roles;`, ); const rolesToReturn: Record = {}; const preparedRoles = prepareRoles(entities); if ( preparedRoles.useRoles || !(preparedRoles.includeRoles.length === 0 && preparedRoles.excludeRoles.length === 0) ) { for (const dbRole of allRoles) { if ( preparedRoles.useRoles ) { rolesToReturn[dbRole.rolname] = { createDb: dbRole.rolcreatedb, createRole: dbRole.rolcreatedb, inherit: dbRole.rolinherit, name: dbRole.rolname, }; } else { if (preparedRoles.includeRoles.length === 0 && preparedRoles.excludeRoles.length === 0) continue; if ( preparedRoles.includeRoles.includes(dbRole.rolname) && preparedRoles.excludeRoles.includes(dbRole.rolname) ) continue; if (preparedRoles.excludeRoles.includes(dbRole.rolname)) continue; if (!preparedRoles.includeRoles.includes(dbRole.rolname)) continue; rolesToReturn[dbRole.rolname] = { createDb: dbRole.rolcreatedb, createRole: dbRole.rolcreaterole, inherit: dbRole.rolinherit, name: dbRole.rolname, }; } } } const schemasForLinkedPoliciesInSchema = Object.values(tsSchema?.policies ?? {}).map((it) => it.schema!); const wherePolicies = [...schemaFilters, ...schemasForLinkedPoliciesInSchema] .map((t) => `schemaname = '${t}'`) .join(' or '); const policiesByTable: Record> = {}; const allPolicies = await db.query< { schemaname: string; tablename: string; name: string; as: string; to: string; for: string; using: string; withCheck: string; } >(`SELECT schemaname, tablename, policyname as name, permissive as "as", roles as to, cmd as for, qual as using, with_check as "withCheck" FROM pg_policies${ wherePolicies === '' ? '' : ` WHERE ${wherePolicies}` };`); for (const dbPolicy of allPolicies) { const { tablename, schemaname, to, withCheck, using, ...rest } = dbPolicy; const tableForPolicy = policiesByTable[`${schemaname}.${tablename}`]; const parsedTo = typeof to === 'string' ? to.slice(1, -1).split(',') : to; const parsedWithCheck = withCheck === null ? undefined : withCheck; const parsedUsing = using === null ? undefined : using; if (tableForPolicy) { tableForPolicy[dbPolicy.name] = { ...rest, to: parsedTo } as Policy; } else { policiesByTable[`${schemaname}.${tablename}`] = { [dbPolicy.name]: { ...rest, to: parsedTo, withCheck: parsedWithCheck, using: parsedUsing } as Policy, }; } if (tsSchema?.policies[dbPolicy.name]) { policies[dbPolicy.name] = { ...rest, to: parsedTo, withCheck: parsedWithCheck, using: parsedUsing, on: tsSchema?.policies[dbPolicy.name].on, } as Policy; } } if (progressCallback) { progressCallback( 'policies', Object.values(policiesByTable).reduce((total, innerRecord) => { return total + Object.keys(innerRecord).length; }, 0), 'done', ); } const sequencesInColumns: string[] = []; const all = allTables .filter((it) => it.type === 'table') .map((row) => { return new Promise(async (res, rej) => { const tableName = row.table_name as string; if (!tablesFilter(tableName)) return res(''); tableCount += 1; const tableSchema = row.table_schema; try { const columnToReturn: Record = {}; const indexToReturn: Record = {}; const foreignKeysToReturn: Record = {}; const primaryKeys: Record = {}; const uniqueConstrains: Record = {}; const checkConstraints: Record = {}; const tableResponse = await getColumnsInfoQuery({ schema: tableSchema, table: tableName, db }); const tableConstraints = await db.query( `SELECT c.column_name, c.data_type, constraint_type, constraint_name, constraint_schema FROM information_schema.table_constraints tc JOIN information_schema.constraint_column_usage AS ccu USING (constraint_schema, constraint_name) JOIN information_schema.columns AS c ON c.table_schema = tc.constraint_schema AND tc.table_name = c.table_name AND ccu.column_name = c.column_name WHERE tc.table_name = '${tableName}' and constraint_schema = '${tableSchema}';`, ); const tableChecks = await db.query(`SELECT tc.constraint_name, tc.constraint_type, pg_get_constraintdef(con.oid) AS constraint_definition FROM information_schema.table_constraints AS tc JOIN pg_constraint AS con ON tc.constraint_name = con.conname AND con.conrelid = ( SELECT oid FROM pg_class WHERE relname = tc.table_name AND relnamespace = ( SELECT oid FROM pg_namespace WHERE nspname = tc.constraint_schema ) ) WHERE tc.table_name = '${tableName}' AND tc.constraint_schema = '${tableSchema}' AND tc.constraint_type = 'CHECK' AND con.contype = 'c';`); columnsCount += tableResponse.length; if (progressCallback) { progressCallback('columns', columnsCount, 'fetching'); } const tableForeignKeys = await db.query( `SELECT con.contype AS constraint_type, nsp.nspname AS constraint_schema, con.conname AS constraint_name, rel.relname AS table_name, att.attname AS column_name, fnsp.nspname AS foreign_table_schema, frel.relname AS foreign_table_name, fatt.attname AS foreign_column_name, CASE con.confupdtype WHEN 'a' THEN 'NO ACTION' WHEN 'r' THEN 'RESTRICT' WHEN 'n' THEN 'SET NULL' WHEN 'c' THEN 'CASCADE' WHEN 'd' THEN 'SET DEFAULT' END AS update_rule, CASE con.confdeltype WHEN 'a' THEN 'NO ACTION' WHEN 'r' THEN 'RESTRICT' WHEN 'n' THEN 'SET NULL' WHEN 'c' THEN 'CASCADE' WHEN 'd' THEN 'SET DEFAULT' END AS delete_rule FROM pg_catalog.pg_constraint con JOIN pg_catalog.pg_class rel ON rel.oid = con.conrelid JOIN pg_catalog.pg_namespace nsp ON nsp.oid = con.connamespace LEFT JOIN pg_catalog.pg_attribute att ON att.attnum = ANY (con.conkey) AND att.attrelid = con.conrelid LEFT JOIN pg_catalog.pg_class frel ON frel.oid = con.confrelid LEFT JOIN pg_catalog.pg_namespace fnsp ON fnsp.oid = frel.relnamespace LEFT JOIN pg_catalog.pg_attribute fatt ON fatt.attnum = ANY (con.confkey) AND fatt.attrelid = con.confrelid WHERE nsp.nspname = '${tableSchema}' AND rel.relname = '${tableName}' AND con.contype IN ('f');`, ); foreignKeysCount += tableForeignKeys.length; if (progressCallback) { progressCallback('fks', foreignKeysCount, 'fetching'); } for (const fk of tableForeignKeys) { // const tableFrom = fk.table_name; const columnFrom: string = fk.column_name; const tableTo = fk.foreign_table_name; const columnTo: string = fk.foreign_column_name; const schemaTo: string = fk.foreign_table_schema; const foreignKeyName = fk.constraint_name; const onUpdate = fk.update_rule?.toLowerCase(); const onDelete = fk.delete_rule?.toLowerCase(); if (typeof foreignKeysToReturn[foreignKeyName] !== 'undefined') { foreignKeysToReturn[foreignKeyName].columnsFrom.push(columnFrom); foreignKeysToReturn[foreignKeyName].columnsTo.push(columnTo); } else { foreignKeysToReturn[foreignKeyName] = { name: foreignKeyName, tableFrom: tableName, tableTo, schemaTo, columnsFrom: [columnFrom], columnsTo: [columnTo], onDelete, onUpdate, }; } foreignKeysToReturn[foreignKeyName].columnsFrom = [ ...new Set(foreignKeysToReturn[foreignKeyName].columnsFrom), ]; foreignKeysToReturn[foreignKeyName].columnsTo = [...new Set(foreignKeysToReturn[foreignKeyName].columnsTo)]; } const uniqueConstrainsRows = tableConstraints.filter((mapRow) => mapRow.constraint_type === 'UNIQUE'); for (const unqs of uniqueConstrainsRows) { // const tableFrom = fk.table_name; const columnName: string = unqs.column_name; const constraintName: string = unqs.constraint_name; if (typeof uniqueConstrains[constraintName] !== 'undefined') { uniqueConstrains[constraintName].columns.push(columnName); } else { uniqueConstrains[constraintName] = { columns: [columnName], nullsNotDistinct: false, name: constraintName, }; } } checksCount += tableChecks.length; if (progressCallback) { progressCallback('checks', checksCount, 'fetching'); } for (const checks of tableChecks) { // CHECK (((email)::text <> 'test@gmail.com'::text)) // Where (email) is column in table let checkValue: string = checks.constraint_definition; const constraintName: string = checks.constraint_name; checkValue = checkValue.replace(/^CHECK\s*\(\(/, '').replace(/\)\)\s*$/, ''); checkConstraints[constraintName] = { name: constraintName, value: checkValue, }; } for (const columnResponse of tableResponse) { const columnName = columnResponse.column_name; const columnAdditionalDT = columnResponse.additional_dt; const columnDimensions = columnResponse.array_dimensions; const enumType: string = columnResponse.enum_name; let columnType: string = columnResponse.data_type; const typeSchema = columnResponse.type_schema; const defaultValueRes: string = columnResponse.column_default; const isGenerated = columnResponse.is_generated === 'ALWAYS'; const generationExpression = columnResponse.generation_expression; const isIdentity = columnResponse.is_identity === 'YES'; const identityGeneration = columnResponse.identity_generation === 'ALWAYS' ? 'always' : 'byDefault'; const identityStart = columnResponse.identity_start; const identityIncrement = columnResponse.identity_increment; const identityMaximum = columnResponse.identity_maximum; const identityMinimum = columnResponse.identity_minimum; const identityCycle = columnResponse.identity_cycle === 'YES'; const identityName = columnResponse.seq_name; const primaryKey = tableConstraints.filter((mapRow) => columnName === mapRow.column_name && mapRow.constraint_type === 'PRIMARY KEY' ); const cprimaryKey = tableConstraints.filter((mapRow) => mapRow.constraint_type === 'PRIMARY KEY'); if (cprimaryKey.length > 1) { const tableCompositePkName = await db.query( `SELECT conname AS primary_key FROM pg_constraint join pg_class on (pg_class.oid = conrelid) WHERE contype = 'p' AND connamespace = $1::regnamespace AND pg_class.relname = $2;`, [tableSchema, tableName], ); primaryKeys[tableCompositePkName[0].primary_key] = { name: tableCompositePkName[0].primary_key, columns: cprimaryKey.map((c: any) => c.column_name), }; } let columnTypeMapped = columnType; // Set default to internal object if (columnAdditionalDT === 'ARRAY') { if (typeof internals.tables[tableName] === 'undefined') { internals.tables[tableName] = { columns: { [columnName]: { isArray: true, dimensions: columnDimensions, rawType: columnTypeMapped.substring(0, columnTypeMapped.length - 2), }, }, }; } else { if (typeof internals.tables[tableName]!.columns[columnName] === 'undefined') { internals.tables[tableName]!.columns[columnName] = { isArray: true, dimensions: columnDimensions, rawType: columnTypeMapped.substring(0, columnTypeMapped.length - 2), }; } } } const defaultValue = defaultForColumn(columnResponse, internals, tableName); if ( defaultValue === 'NULL' || (defaultValueRes && defaultValueRes.startsWith('(') && defaultValueRes.endsWith(')')) ) { if (typeof internals!.tables![tableName] === 'undefined') { internals!.tables![tableName] = { columns: { [columnName]: { isDefaultAnExpression: true, }, }, }; } else { if (typeof internals!.tables![tableName]!.columns[columnName] === 'undefined') { internals!.tables![tableName]!.columns[columnName] = { isDefaultAnExpression: true, }; } else { internals!.tables![tableName]!.columns[columnName]!.isDefaultAnExpression = true; } } } const isSerial = columnType === 'serial'; if (columnTypeMapped.startsWith('numeric(')) { columnTypeMapped = columnTypeMapped.replace(',', ', '); } if (columnAdditionalDT === 'ARRAY') { for (let i = 1; i < Number(columnDimensions); i++) { columnTypeMapped += '[]'; } } columnTypeMapped = columnTypeMapped .replace('character varying', 'varchar') .replace(' without time zone', '') // .replace("timestamp without time zone", "timestamp") .replace('character', 'char'); columnTypeMapped = trimChar(columnTypeMapped, '"'); columnToReturn[columnName] = { name: columnName, type: // filter vectors, but in future we should filter any extension that was installed by user columnAdditionalDT === 'USER-DEFINED' && !['vector', 'geometry', 'halfvec', 'sparsevec', 'bit'].includes(enumType) ? enumType : columnTypeMapped, typeSchema: enumsToReturn[`${typeSchema}.${enumType}`] !== undefined ? enumsToReturn[`${typeSchema}.${enumType}`].schema : undefined, primaryKey: primaryKey.length === 1 && cprimaryKey.length < 2, // default: isSerial ? undefined : defaultValue, notNull: columnResponse.is_nullable === 'NO', generated: isGenerated ? { as: generationExpression, type: 'stored' } : undefined, identity: isIdentity ? { type: identityGeneration, name: identityName, increment: stringFromDatabaseIdentityProperty(identityIncrement), minValue: stringFromDatabaseIdentityProperty(identityMinimum), maxValue: stringFromDatabaseIdentityProperty(identityMaximum), startWith: stringFromDatabaseIdentityProperty(identityStart), cache: sequencesToReturn[identityName]?.cache ? sequencesToReturn[identityName]?.cache : sequencesToReturn[`${tableSchema}.${identityName}`]?.cache ? sequencesToReturn[`${tableSchema}.${identityName}`]?.cache : undefined, cycle: identityCycle, schema: tableSchema, } : undefined, }; if (identityName && typeof identityName === 'string') { // remove "" from sequence name delete sequencesToReturn[ `${tableSchema}.${ identityName.startsWith('"') && identityName.endsWith('"') ? identityName.slice(1, -1) : identityName }` ]; delete sequencesToReturn[identityName]; } if (!isSerial && typeof defaultValue !== 'undefined') { columnToReturn[columnName].default = defaultValue; } } const dbIndexes = await db.query( `SELECT DISTINCT ON (t.relname, ic.relname, k.i) t.relname as table_name, ic.relname AS indexname, k.i AS index_order, i.indisunique as is_unique, am.amname as method, ic.reloptions as with, coalesce(a.attname, pg_get_indexdef(i.indexrelid, k.i, false)) AS column_name, CASE WHEN pg_get_expr(i.indexprs, i.indrelid) IS NOT NULL THEN 1 ELSE 0 END AS is_expression, i.indoption[k.i-1] & 1 = 1 AS descending, i.indoption[k.i-1] & 2 = 2 AS nulls_first, pg_get_expr( i.indpred, i.indrelid ) as where, opc.opcname FROM pg_class t LEFT JOIN pg_index i ON t.oid = i.indrelid LEFT JOIN pg_class ic ON ic.oid = i.indexrelid CROSS JOIN LATERAL (SELECT unnest(i.indkey), generate_subscripts(i.indkey, 1) + 1) AS k(attnum, i) LEFT JOIN pg_attribute AS a ON i.indrelid = a.attrelid AND k.attnum = a.attnum JOIN pg_namespace c on c.oid = t.relnamespace LEFT JOIN pg_am AS am ON ic.relam = am.oid JOIN pg_opclass opc ON opc.oid = ANY(i.indclass) WHERE c.nspname = '${tableSchema}' AND t.relname = '${tableName}';`, ); const dbIndexFromConstraint = await db.query( `SELECT idx.indexrelname AS index_name, idx.relname AS table_name, schemaname, CASE WHEN con.conname IS NOT NULL THEN 1 ELSE 0 END AS generated_by_constraint FROM pg_stat_user_indexes idx LEFT JOIN pg_constraint con ON con.conindid = idx.indexrelid WHERE idx.relname = '${tableName}' and schemaname = '${tableSchema}' group by index_name, table_name,schemaname, generated_by_constraint;`, ); const idxsInConsteraint = dbIndexFromConstraint.filter((it) => it.generated_by_constraint === 1).map((it) => it.index_name ); for (const dbIndex of dbIndexes) { const indexName: string = dbIndex.indexname; const indexColumnName: string = dbIndex.column_name; const indexIsUnique = dbIndex.is_unique; const indexMethod = dbIndex.method; const indexWith: string[] = dbIndex.with; const indexWhere: string = dbIndex.where; const opclass: string = dbIndex.opcname; const isExpression = dbIndex.is_expression === 1; const desc: boolean = dbIndex.descending; const nullsFirst: boolean = dbIndex.nulls_first; const mappedWith: Record = {}; if (indexWith !== null) { indexWith // .slice(1, indexWith.length - 1) // .split(",") .forEach((it) => { const splitted = it.split('='); mappedWith[splitted[0]] = splitted[1]; }); } if (idxsInConsteraint.includes(indexName)) continue; if (typeof indexToReturn[indexName] !== 'undefined') { indexToReturn[indexName].columns.push({ expression: indexColumnName, asc: !desc, nulls: nullsFirst ? 'first' : 'last', opclass, isExpression, }); } else { indexToReturn[indexName] = { name: indexName, columns: [ { expression: indexColumnName, asc: !desc, nulls: nullsFirst ? 'first' : 'last', opclass, isExpression, }, ], isUnique: indexIsUnique, // should not be a part of diff detects concurrently: false, method: indexMethod, where: indexWhere === null ? undefined : indexWhere, with: mappedWith, }; } } indexesCount += Object.keys(indexToReturn).length; if (progressCallback) { progressCallback('indexes', indexesCount, 'fetching'); } result[`${tableSchema}.${tableName}`] = { name: tableName, schema: tableSchema !== 'public' ? tableSchema : '', columns: columnToReturn, indexes: indexToReturn, foreignKeys: foreignKeysToReturn, compositePrimaryKeys: primaryKeys, uniqueConstraints: uniqueConstrains, checkConstraints: checkConstraints, policies: policiesByTable[`${tableSchema}.${tableName}`] ?? {}, isRLSEnabled: row.rls_enabled, }; } catch (e) { rej(e); return; } res(''); }); }); if (progressCallback) { progressCallback('tables', tableCount, 'done'); } for await (const _ of all) { } const allViews = allTables .filter((it) => it.type === 'view' || it.type === 'materialized_view') .map((row) => { return new Promise(async (res, rej) => { const viewName = row.table_name as string; if (!tablesFilter(viewName)) return res(''); tableCount += 1; const viewSchema = row.table_schema; try { const columnToReturn: Record = {}; const viewResponses = await getColumnsInfoQuery({ schema: viewSchema, table: viewName, db }); for (const viewResponse of viewResponses) { const columnName = viewResponse.column_name; const columnAdditionalDT = viewResponse.additional_dt; const columnDimensions = viewResponse.array_dimensions; const enumType: string = viewResponse.enum_name; let columnType: string = viewResponse.data_type; const typeSchema = viewResponse.type_schema; // const defaultValueRes: string = viewResponse.column_default; const isGenerated = viewResponse.is_generated === 'ALWAYS'; const generationExpression = viewResponse.generation_expression; const isIdentity = viewResponse.is_identity === 'YES'; const identityGeneration = viewResponse.identity_generation === 'ALWAYS' ? 'always' : 'byDefault'; const identityStart = viewResponse.identity_start; const identityIncrement = viewResponse.identity_increment; const identityMaximum = viewResponse.identity_maximum; const identityMinimum = viewResponse.identity_minimum; const identityCycle = viewResponse.identity_cycle === 'YES'; const identityName = viewResponse.seq_name; const defaultValueRes = viewResponse.column_default; const primaryKey = viewResponse.constraint_type === 'PRIMARY KEY'; let columnTypeMapped = columnType; // Set default to internal object if (columnAdditionalDT === 'ARRAY') { if (typeof internals.tables[viewName] === 'undefined') { internals.tables[viewName] = { columns: { [columnName]: { isArray: true, dimensions: columnDimensions, rawType: columnTypeMapped.substring(0, columnTypeMapped.length - 2), }, }, }; } else { if (typeof internals.tables[viewName]!.columns[columnName] === 'undefined') { internals.tables[viewName]!.columns[columnName] = { isArray: true, dimensions: columnDimensions, rawType: columnTypeMapped.substring(0, columnTypeMapped.length - 2), }; } } } const defaultValue = defaultForColumn(viewResponse, internals, viewName); if ( defaultValue === 'NULL' || (defaultValueRes && defaultValueRes.startsWith('(') && defaultValueRes.endsWith(')')) ) { if (typeof internals!.tables![viewName] === 'undefined') { internals!.tables![viewName] = { columns: { [columnName]: { isDefaultAnExpression: true, }, }, }; } else { if (typeof internals!.tables![viewName]!.columns[columnName] === 'undefined') { internals!.tables![viewName]!.columns[columnName] = { isDefaultAnExpression: true, }; } else { internals!.tables![viewName]!.columns[columnName]!.isDefaultAnExpression = true; } } } const isSerial = columnType === 'serial'; if (columnTypeMapped.startsWith('numeric(')) { columnTypeMapped = columnTypeMapped.replace(',', ', '); } if (columnAdditionalDT === 'ARRAY') { for (let i = 1; i < Number(columnDimensions); i++) { columnTypeMapped += '[]'; } } columnTypeMapped = columnTypeMapped .replace('character varying', 'varchar') .replace(' without time zone', '') // .replace("timestamp without time zone", "timestamp") .replace('character', 'char'); columnTypeMapped = trimChar(columnTypeMapped, '"'); columnToReturn[columnName] = { name: columnName, type: // filter vectors, but in future we should filter any extension that was installed by user columnAdditionalDT === 'USER-DEFINED' && !['vector', 'geometry', 'halfvec', 'sparsevec', 'bit'].includes(enumType) ? enumType : columnTypeMapped, typeSchema: enumsToReturn[`${typeSchema}.${enumType}`] !== undefined ? enumsToReturn[`${typeSchema}.${enumType}`].schema : undefined, primaryKey: primaryKey, notNull: viewResponse.is_nullable === 'NO', generated: isGenerated ? { as: generationExpression, type: 'stored' } : undefined, identity: isIdentity ? { type: identityGeneration, name: identityName, increment: stringFromDatabaseIdentityProperty(identityIncrement), minValue: stringFromDatabaseIdentityProperty(identityMinimum), maxValue: stringFromDatabaseIdentityProperty(identityMaximum), startWith: stringFromDatabaseIdentityProperty(identityStart), cache: sequencesToReturn[identityName]?.cache ? sequencesToReturn[identityName]?.cache : sequencesToReturn[`${viewSchema}.${identityName}`]?.cache ? sequencesToReturn[`${viewSchema}.${identityName}`]?.cache : undefined, cycle: identityCycle, schema: viewSchema, } : undefined, }; if (identityName) { // remove "" from sequence name delete sequencesToReturn[ `${viewSchema}.${ identityName.startsWith('"') && identityName.endsWith('"') ? identityName.slice(1, -1) : identityName }` ]; delete sequencesToReturn[identityName]; } if (!isSerial && typeof defaultValue !== 'undefined') { columnToReturn[columnName].default = defaultValue; } } const [viewInfo] = await db.query<{ view_name: string; schema_name: string; definition: string; tablespace_name: string | null; options: string[] | null; location: string | null; }>(` SELECT c.relname AS view_name, n.nspname AS schema_name, pg_get_viewdef(c.oid, true) AS definition, ts.spcname AS tablespace_name, c.reloptions AS options, pg_tablespace_location(ts.oid) AS location FROM pg_class c JOIN pg_namespace n ON c.relnamespace = n.oid LEFT JOIN pg_tablespace ts ON c.reltablespace = ts.oid WHERE (c.relkind = 'm' OR c.relkind = 'v') AND n.nspname = '${viewSchema}' AND c.relname = '${viewName}';`); const resultWith: { [key: string]: string | boolean | number } = {}; if (viewInfo.options) { viewInfo.options.forEach((pair) => { const splitted = pair.split('='); const key = splitted[0]; const value = splitted[1]; if (value === 'true') { resultWith[key] = true; } else if (value === 'false') { resultWith[key] = false; } else if (!isNaN(Number(value))) { resultWith[key] = Number(value); } else { resultWith[key] = value; } }); } const definition = viewInfo.definition.replace(/\s+/g, ' ').replace(';', '').trim(); // { "check_option":"cascaded","security_barrier":true} -> // { "checkOption":"cascaded","securityBarrier":true} const withOption = Object.values(resultWith).length ? Object.fromEntries(Object.entries(resultWith).map(([key, value]) => [key.camelCase(), value])) : undefined; const materialized = row.type === 'materialized_view'; views[`${viewSchema}.${viewName}`] = { name: viewName, schema: viewSchema, columns: columnToReturn, isExisting: false, definition: definition, materialized: materialized, with: withOption, tablespace: viewInfo.tablespace_name ?? undefined, }; } catch (e) { rej(e); return; } res(''); }); }); viewsCount = allViews.length; for await (const _ of allViews) { } if (progressCallback) { progressCallback('columns', columnsCount, 'done'); progressCallback('indexes', indexesCount, 'done'); progressCallback('fks', foreignKeysCount, 'done'); progressCallback('checks', checksCount, 'done'); progressCallback('views', viewsCount, 'done'); } const schemasObject = Object.fromEntries([...schemas].map((it) => [it, it])); return { version: '7', dialect: 'postgresql', tables: result, enums: enumsToReturn, schemas: schemasObject, sequences: sequencesToReturn, roles: rolesToReturn, policies, views: views, _meta: { schemas: {}, tables: {}, columns: {}, }, internal: internals, }; }; const defaultForColumn = (column: any, internals: PgKitInternals, tableName: string) => { const columnName = column.column_name; const isArray = internals?.tables[tableName]?.columns[columnName]?.isArray ?? false; if ( column.column_default === null || column.column_default === undefined || column.data_type === 'serial' || column.data_type === 'smallserial' || column.data_type === 'bigserial' ) { return undefined; } if (column.column_default.endsWith('[]')) { column.column_default = column.column_default.slice(0, -2); } // if ( // !['integer', 'smallint', 'bigint', 'double precision', 'real'].includes(column.data_type) // ) { column.column_default = column.column_default.replace(/::(.*?)(? { if (['integer', 'smallint', 'bigint', 'double precision', 'real'].includes(column.data_type.slice(0, -2))) { return value; } else if (column.data_type.startsWith('timestamp')) { return `${value}`; } else if (column.data_type.slice(0, -2) === 'interval') { return value.replaceAll('"', `\"`); } else if (column.data_type.slice(0, -2) === 'boolean') { return value === 't' ? 'true' : 'false'; } else if (['json', 'jsonb'].includes(column.data_type.slice(0, -2))) { return JSON.stringify(JSON.stringify(JSON.parse(JSON.parse(value)), null, 0)); } else { return `\"${value}\"`; } }) .join(',') }}'`; } if (['integer', 'smallint', 'bigint', 'double precision', 'real'].includes(column.data_type)) { if (/^-?[\d.]+(?:e-?\d+)?$/.test(columnDefaultAsString)) { return Number(columnDefaultAsString); } else { if (typeof internals!.tables![tableName] === 'undefined') { internals!.tables![tableName] = { columns: { [columnName]: { isDefaultAnExpression: true, }, }, }; } else { if (typeof internals!.tables![tableName]!.columns[columnName] === 'undefined') { internals!.tables![tableName]!.columns[columnName] = { isDefaultAnExpression: true, }; } else { internals!.tables![tableName]!.columns[columnName]!.isDefaultAnExpression = true; } } return columnDefaultAsString; } } else if (column.data_type.includes('numeric')) { // if numeric(1,1) and used '99' -> psql stores like '99'::numeric return columnDefaultAsString.includes("'") ? columnDefaultAsString : `'${columnDefaultAsString}'`; } else if (column.data_type === 'json' || column.data_type === 'jsonb') { const jsonWithoutSpaces = JSON.stringify(JSON.parse(columnDefaultAsString.slice(1, -1))); return `'${jsonWithoutSpaces}'::${column.data_type}`; } else if (column.data_type === 'boolean') { return column.column_default === 'true'; } else if (columnDefaultAsString === 'NULL') { return `NULL`; } else if (columnDefaultAsString.startsWith("'") && columnDefaultAsString.endsWith("'")) { return columnDefaultAsString; } else { return `${columnDefaultAsString.replace(/\\/g, '`\\')}`; } }; const getColumnsInfoQuery = ({ schema, table, db }: { schema: string; table: string; db: DB }) => { return db.query( `SELECT a.attrelid::regclass::text AS table_name, -- Table, view, or materialized view name a.attname AS column_name, -- Column name CASE WHEN NOT a.attisdropped THEN CASE WHEN a.attnotnull THEN 'NO' ELSE 'YES' END ELSE NULL END AS is_nullable, -- NULL or NOT NULL constraint a.attndims AS array_dimensions, -- Array dimensions CASE WHEN a.atttypid = ANY ('{int,int8,int2}'::regtype[]) AND EXISTS ( SELECT FROM pg_attrdef ad WHERE ad.adrelid = a.attrelid AND ad.adnum = a.attnum AND pg_get_expr(ad.adbin, ad.adrelid) = 'nextval(''' || pg_get_serial_sequence(a.attrelid::regclass::text, a.attname)::regclass || '''::regclass)' ) THEN CASE a.atttypid WHEN 'int'::regtype THEN 'serial' WHEN 'int8'::regtype THEN 'bigserial' WHEN 'int2'::regtype THEN 'smallserial' END ELSE format_type(a.atttypid, a.atttypmod) END AS data_type, -- Column data type -- ns.nspname AS type_schema, -- Schema name pg_get_serial_sequence('"${schema}"."${table}"', a.attname)::regclass AS seq_name, -- Serial sequence (if any) c.column_default, -- Column default value c.data_type AS additional_dt, -- Data type from information_schema c.udt_name AS enum_name, -- Enum type (if applicable) c.is_generated, -- Is it a generated column? c.generation_expression, -- Generation expression (if generated) c.is_identity, -- Is it an identity column? c.identity_generation, -- Identity generation strategy (ALWAYS or BY DEFAULT) c.identity_start, -- Start value of identity column c.identity_increment, -- Increment for identity column c.identity_maximum, -- Maximum value for identity column c.identity_minimum, -- Minimum value for identity column c.identity_cycle, -- Does the identity column cycle? enum_ns.nspname AS type_schema -- Schema of the enum type FROM pg_attribute a JOIN pg_class cls ON cls.oid = a.attrelid -- Join pg_class to get table/view/materialized view info JOIN pg_namespace ns ON ns.oid = cls.relnamespace -- Join namespace to get schema info LEFT JOIN information_schema.columns c ON c.column_name = a.attname AND c.table_schema = ns.nspname AND c.table_name = cls.relname -- Match schema and table/view name LEFT JOIN pg_type enum_t ON enum_t.oid = a.atttypid -- Join to get the type info LEFT JOIN pg_namespace enum_ns ON enum_ns.oid = enum_t.typnamespace -- Join to get the enum schema WHERE a.attnum > 0 -- Valid column numbers only AND NOT a.attisdropped -- Skip dropped columns AND cls.relkind IN ('r', 'v', 'm') -- Include regular tables ('r'), views ('v'), and materialized views ('m') AND ns.nspname = '${schema}' -- Filter by schema AND cls.relname = '${table}' -- Filter by table name ORDER BY a.attnum; -- Order by column number`, ); }; ================================================ FILE: drizzle-kit/src/serializer/singlestoreImports.ts ================================================ import { is } from 'drizzle-orm'; import type { AnySingleStoreTable } from 'drizzle-orm/singlestore-core'; import { SingleStoreTable } from 'drizzle-orm/singlestore-core'; import { safeRegister } from '../cli/commands/utils'; export const prepareFromExports = (exports: Record) => { const tables: AnySingleStoreTable[] = []; /* const views: SingleStoreView[] = []; */ const i0values = Object.values(exports); i0values.forEach((t) => { if (is(t, SingleStoreTable)) { tables.push(t); } /* if (is(t, SingleStoreView)) { views.push(t); } */ }); return { tables /* views */ }; }; export const prepareFromSingleStoreImports = async (imports: string[]) => { const tables: AnySingleStoreTable[] = []; /* const views: SingleStoreView[] = []; */ await safeRegister(async () => { for (let i = 0; i < imports.length; i++) { const it = imports[i]; const i0: Record = require(`${it}`); const prepared = prepareFromExports(i0); tables.push(...prepared.tables); /* views.push(...prepared.views); */ } }); return { tables: Array.from(new Set(tables)) /* , views */ }; }; ================================================ FILE: drizzle-kit/src/serializer/singlestoreSchema.ts ================================================ import { any, boolean, enum as enumType, literal, object, record, string, TypeOf, union } from 'zod'; import { mapValues, originUUID, snapshotVersion } from '../global'; // ------- V3 -------- const index = object({ name: string(), columns: string().array(), isUnique: boolean(), using: enumType(['btree', 'hash']).optional(), algorithm: enumType(['default', 'inplace', 'copy']).optional(), lock: enumType(['default', 'none', 'shared', 'exclusive']).optional(), }).strict(); const column = object({ name: string(), type: string(), primaryKey: boolean(), notNull: boolean(), autoincrement: boolean().optional(), default: any().optional(), onUpdate: any().optional(), generated: object({ type: enumType(['stored', 'virtual']), as: string(), }).optional(), }).strict(); const compositePK = object({ name: string(), columns: string().array(), }).strict(); const uniqueConstraint = object({ name: string(), columns: string().array(), }).strict(); const table = object({ name: string(), columns: record(string(), column), indexes: record(string(), index), compositePrimaryKeys: record(string(), compositePK), uniqueConstraints: record(string(), uniqueConstraint).default({}), }).strict(); const viewMeta = object({ algorithm: enumType(['undefined', 'merge', 'temptable']), sqlSecurity: enumType(['definer', 'invoker']), withCheckOption: enumType(['local', 'cascaded']).optional(), }).strict(); /* export const view = object({ name: string(), columns: record(string(), column), definition: string().optional(), isExisting: boolean(), }).strict().merge(viewMeta); type SquasherViewMeta = Omit, 'definer'>; */ export const kitInternals = object({ tables: record( string(), object({ columns: record( string(), object({ isDefaultAnExpression: boolean().optional() }).optional(), ), }).optional(), ).optional(), indexes: record( string(), object({ columns: record( string(), object({ isExpression: boolean().optional() }).optional(), ), }).optional(), ).optional(), }).optional(); // use main dialect const dialect = literal('singlestore'); const schemaHash = object({ id: string(), prevId: string(), }); export const schemaInternal = object({ version: literal('1'), dialect: dialect, tables: record(string(), table), /* views: record(string(), view).default({}), */ _meta: object({ tables: record(string(), string()), columns: record(string(), string()), }), internal: kitInternals, }).strict(); export const schema = schemaInternal.merge(schemaHash); const tableSquashed = object({ name: string(), columns: record(string(), column), indexes: record(string(), string()), compositePrimaryKeys: record(string(), string()), uniqueConstraints: record(string(), string()).default({}), }).strict(); /* const viewSquashed = view.omit({ algorithm: true, sqlSecurity: true, withCheckOption: true, }).extend({ meta: string() }); */ export const schemaSquashed = object({ version: literal('1'), dialect: dialect, tables: record(string(), tableSquashed), /* views: record(string(), viewSquashed), */ }).strict(); export type Dialect = TypeOf; export type Column = TypeOf; export type Table = TypeOf; export type SingleStoreSchema = TypeOf; export type SingleStoreSchemaInternal = TypeOf; export type SingleStoreKitInternals = TypeOf; export type SingleStoreSchemaSquashed = TypeOf; export type Index = TypeOf; export type PrimaryKey = TypeOf; export type UniqueConstraint = TypeOf; /* export type View = TypeOf; */ /* export type ViewSquashed = TypeOf; */ export const SingleStoreSquasher = { squashIdx: (idx: Index) => { index.parse(idx); return `${idx.name};${idx.columns.join(',')};${idx.isUnique};${idx.using ?? ''};${idx.algorithm ?? ''};${ idx.lock ?? '' }`; }, unsquashIdx: (input: string): Index => { const [name, columnsString, isUnique, using, algorithm, lock] = input.split(';'); const destructed = { name, columns: columnsString.split(','), isUnique: isUnique === 'true', using: using ? using : undefined, algorithm: algorithm ? algorithm : undefined, lock: lock ? lock : undefined, }; return index.parse(destructed); }, squashPK: (pk: PrimaryKey) => { return `${pk.name};${pk.columns.join(',')}`; }, unsquashPK: (pk: string): PrimaryKey => { const splitted = pk.split(';'); return { name: splitted[0], columns: splitted[1].split(',') }; }, squashUnique: (unq: UniqueConstraint) => { return `${unq.name};${unq.columns.join(',')}`; }, unsquashUnique: (unq: string): UniqueConstraint => { const [name, columns] = unq.split(';'); return { name, columns: columns.split(',') }; }, /* squashView: (view: View): string => { return `${view.algorithm};${view.sqlSecurity};${view.withCheckOption}`; }, unsquashView: (meta: string): SquasherViewMeta => { const [algorithm, sqlSecurity, withCheckOption] = meta.split(';'); const toReturn = { algorithm: algorithm, sqlSecurity: sqlSecurity, withCheckOption: withCheckOption !== 'undefined' ? withCheckOption : undefined, }; return viewMeta.parse(toReturn); }, */ }; export const squashSingleStoreScheme = (json: SingleStoreSchema): SingleStoreSchemaSquashed => { const mappedTables = Object.fromEntries( Object.entries(json.tables).map((it) => { const squashedIndexes = mapValues(it[1].indexes, (index) => { return SingleStoreSquasher.squashIdx(index); }); const squashedPKs = mapValues(it[1].compositePrimaryKeys, (pk) => { return SingleStoreSquasher.squashPK(pk); }); const squashedUniqueConstraints = mapValues( it[1].uniqueConstraints, (unq) => { return SingleStoreSquasher.squashUnique(unq); }, ); return [ it[0], { name: it[1].name, columns: it[1].columns, indexes: squashedIndexes, compositePrimaryKeys: squashedPKs, uniqueConstraints: squashedUniqueConstraints, }, ]; }), ); /* const mappedViews = Object.fromEntries( Object.entries(json.views).map(([key, value]) => { const meta = SingleStoreSquasher.squashView(value); return [key, { name: value.name, isExisting: value.isExisting, columns: value.columns, definition: value.definition, meta, }]; }), ); */ return { version: '1', dialect: json.dialect, tables: mappedTables, /* views: mappedViews, */ }; }; export const singlestoreSchema = schema; export const singlestoreSchemaSquashed = schemaSquashed; // no prev version export const backwardCompatibleSingleStoreSchema = union([singlestoreSchema, schema]); export const drySingleStore = singlestoreSchema.parse({ version: '1', dialect: 'singlestore', id: originUUID, prevId: '', tables: {}, schemas: {}, /* views: {}, */ _meta: { schemas: {}, tables: {}, columns: {}, }, }); ================================================ FILE: drizzle-kit/src/serializer/singlestoreSerializer.ts ================================================ import chalk from 'chalk'; import { is, SQL } from 'drizzle-orm'; import { AnySingleStoreTable, getTableConfig, type PrimaryKey as PrimaryKeyORM, SingleStoreDialect, uniqueKeyName, } from 'drizzle-orm/singlestore-core'; import { RowDataPacket } from 'mysql2/promise'; import { withStyle } from '../cli/validations/outputs'; import { IntrospectStage, IntrospectStatus } from '../cli/views'; import { CasingType } from 'src/cli/validations/common'; import type { DB } from '../utils'; import { Column, Index, PrimaryKey, SingleStoreKitInternals, SingleStoreSchemaInternal, Table, UniqueConstraint, } from './singlestoreSchema'; import { sqlToStr } from './utils'; const dialect = new SingleStoreDialect(); export const indexName = (tableName: string, columns: string[]) => { return `${tableName}_${columns.join('_')}_index`; }; export const generateSingleStoreSnapshot = ( tables: AnySingleStoreTable[], /* views: SingleStoreView[], */ casing: CasingType | undefined, ): SingleStoreSchemaInternal => { const dialect = new SingleStoreDialect({ casing }); const result: Record = {}; /* const resultViews: Record = {}; */ const internal: SingleStoreKitInternals = { tables: {}, indexes: {} }; for (const table of tables) { const { name: tableName, columns, indexes, schema, primaryKeys, uniqueConstraints, } = getTableConfig(table); const columnsObject: Record = {}; const indexesObject: Record = {}; const primaryKeysObject: Record = {}; const uniqueConstraintObject: Record = {}; columns.forEach((column) => { const notNull: boolean = column.notNull; const sqlTypeLowered = column.getSQLType().toLowerCase(); const autoIncrement = typeof (column as any).autoIncrement === 'undefined' ? false : (column as any).autoIncrement; const generated = column.generated; const columnToSet: Column = { name: column.name, type: column.getSQLType(), primaryKey: false, // If field is autoincrement it's notNull by default // notNull: autoIncrement ? true : notNull, notNull, autoincrement: autoIncrement, onUpdate: (column as any).hasOnUpdateNow, generated: generated ? { as: is(generated.as, SQL) ? dialect.sqlToQuery(generated.as as SQL).sql : typeof generated.as === 'function' ? dialect.sqlToQuery(generated.as() as SQL).sql : (generated.as as any), type: generated.mode ?? 'stored', } : undefined, }; if (column.primary) { primaryKeysObject[`${tableName}_${column.name}`] = { name: `${tableName}_${column.name}`, columns: [column.name], }; } if (column.isUnique) { const existingUnique = uniqueConstraintObject[column.uniqueName!]; if (typeof existingUnique !== 'undefined') { console.log( `\n${ withStyle.errorWarning(`We\'ve found duplicated unique constraint names in ${ chalk.underline.blue( tableName, ) } table. The unique constraint ${ chalk.underline.blue( column.uniqueName, ) } on the ${ chalk.underline.blue( column.name, ) } column is confilcting with a unique constraint name already defined for ${ chalk.underline.blue( existingUnique.columns.join(','), ) } columns\n`) }`, ); process.exit(1); } uniqueConstraintObject[column.uniqueName!] = { name: column.uniqueName!, columns: [columnToSet.name], }; } if (column.default !== undefined) { if (is(column.default, SQL)) { columnToSet.default = sqlToStr(column.default, casing); } else { if (typeof column.default === 'string') { columnToSet.default = `'${column.default}'`; } else { if (sqlTypeLowered === 'json' || Array.isArray(column.default)) { columnToSet.default = `'${JSON.stringify(column.default)}'`; } else if (column.default instanceof Date) { if (sqlTypeLowered === 'date') { columnToSet.default = `'${column.default.toISOString().split('T')[0]}'`; } else if ( sqlTypeLowered.startsWith('datetime') || sqlTypeLowered.startsWith('timestamp') ) { columnToSet.default = `'${ column.default .toISOString() .replace('T', ' ') .slice(0, 23) }'`; } } else { columnToSet.default = column.default; } } // if (['blob', 'text', 'json'].includes(column.getSQLType())) { // columnToSet.default = `(${columnToSet.default})`; // } } } columnsObject[column.name] = columnToSet; }); primaryKeys.map((pk: PrimaryKeyORM) => { const columnNames = pk.columns.map((c: any) => c.name); primaryKeysObject[pk.getName()] = { name: pk.getName(), columns: columnNames, }; // all composite pk's should be treated as notNull for (const column of pk.columns) { columnsObject[column.name].notNull = true; } }); uniqueConstraints?.map((unq) => { const columnNames = unq.columns.map((c) => c.name); const name = unq.name ?? uniqueKeyName(table, columnNames); const existingUnique = uniqueConstraintObject[name]; if (typeof existingUnique !== 'undefined') { console.log( `\n${ withStyle.errorWarning( `We\'ve found duplicated unique constraint names in ${ chalk.underline.blue( tableName, ) } table. \nThe unique constraint ${ chalk.underline.blue( name, ) } on the ${ chalk.underline.blue( columnNames.join(','), ) } columns is confilcting with a unique constraint name already defined for ${ chalk.underline.blue( existingUnique.columns.join(','), ) } columns\n`, ) }`, ); process.exit(1); } uniqueConstraintObject[name] = { name: unq.name!, columns: columnNames, }; }); indexes.forEach((value) => { const columns = value.config.columns; const name = value.config.name; let indexColumns = columns.map((it) => { if (is(it, SQL)) { const sql = dialect.sqlToQuery(it, 'indexes').sql; if (typeof internal!.indexes![name] === 'undefined') { internal!.indexes![name] = { columns: { [sql]: { isExpression: true, }, }, }; } else { if (typeof internal!.indexes![name]?.columns[sql] === 'undefined') { internal!.indexes![name]!.columns[sql] = { isExpression: true, }; } else { internal!.indexes![name]!.columns[sql]!.isExpression = true; } } return sql; } else { return `${it.name}`; } }); if (value.config.unique) { if (typeof uniqueConstraintObject[name] !== 'undefined') { console.log( `\n${ withStyle.errorWarning( `We\'ve found duplicated unique constraint names in ${ chalk.underline.blue( tableName, ) } table. \nThe unique index ${ chalk.underline.blue( name, ) } on the ${ chalk.underline.blue( indexColumns.join(','), ) } columns is confilcting with a unique constraint name already defined for ${ chalk.underline.blue( uniqueConstraintObject[name].columns.join(','), ) } columns\n`, ) }`, ); process.exit(1); } } indexesObject[name] = { name, columns: indexColumns, isUnique: value.config.unique ?? false, using: value.config.using, algorithm: value.config.algorithm, lock: value.config.lock, }; }); // only handle tables without schemas if (!schema) { result[tableName] = { name: tableName, columns: columnsObject, indexes: indexesObject, compositePrimaryKeys: primaryKeysObject, uniqueConstraints: uniqueConstraintObject, }; } } /* for (const view of views) { const { isExisting, name, query, schema, selectedFields, algorithm, sqlSecurity, withCheckOption, } = getViewConfig(view); const columnsObject: Record = {}; const existingView = resultViews[name]; if (typeof existingView !== 'undefined') { console.log( `\n${ withStyle.errorWarning( `We\'ve found duplicated view name across ${ chalk.underline.blue( schema ?? 'public', ) } schema. Please rename your view`, ) }`, ); process.exit(1); } for (const key in selectedFields) { if (is(selectedFields[key], SingleStoreColumn)) { const column = selectedFields[key]; const notNull: boolean = column.notNull; const sqlTypeLowered = column.getSQLType().toLowerCase(); const autoIncrement = typeof (column as any).autoIncrement === 'undefined' ? false : (column as any).autoIncrement; const generated = column.generated; const columnToSet: Column = { name: column.name, type: column.getSQLType(), primaryKey: false, // If field is autoincrement it's notNull by default // notNull: autoIncrement ? true : notNull, notNull, autoincrement: autoIncrement, onUpdate: (column as any).hasOnUpdateNow, generated: generated ? { as: is(generated.as, SQL) ? dialect.sqlToQuery(generated.as as SQL).sql : typeof generated.as === 'function' ? dialect.sqlToQuery(generated.as() as SQL).sql : (generated.as as any), type: generated.mode ?? 'stored', } : undefined, }; if (column.default !== undefined) { if (is(column.default, SQL)) { columnToSet.default = sqlToStr(column.default, casing); } else { if (typeof column.default === 'string') { columnToSet.default = `'${column.default}'`; } else { if (sqlTypeLowered === 'json') { columnToSet.default = `'${JSON.stringify(column.default)}'`; } else if (column.default instanceof Date) { if (sqlTypeLowered === 'date') { columnToSet.default = `'${column.default.toISOString().split('T')[0]}'`; } else if ( sqlTypeLowered.startsWith('datetime') || sqlTypeLowered.startsWith('timestamp') ) { columnToSet.default = `'${ column.default .toISOString() .replace('T', ' ') .slice(0, 23) }'`; } } else { columnToSet.default = column.default; } } } } columnsObject[column.name] = columnToSet; } } resultViews[name] = { columns: columnsObject, name, isExisting, definition: isExisting ? undefined : dialect.sqlToQuery(query!).sql, withCheckOption, algorithm: algorithm ?? 'undefined', // set default values sqlSecurity: sqlSecurity ?? 'definer', // set default values }; } */ return { version: '1', dialect: 'singlestore', tables: result, /* views: resultViews, */ _meta: { tables: {}, columns: {}, }, internal, }; }; function clearDefaults(defaultValue: any, collate: string) { if (typeof collate === 'undefined' || collate === null) { collate = `utf8mb4`; } let resultDefault = defaultValue; collate = `_${collate}`; if (defaultValue.startsWith(collate)) { resultDefault = resultDefault .substring(collate.length, defaultValue.length) .replace(/\\/g, ''); if (resultDefault.startsWith("'") && resultDefault.endsWith("'")) { return `('${resultDefault.substring(1, resultDefault.length - 1)}')`; } else { return `'${resultDefault}'`; } } else { return `(${resultDefault})`; } } export const fromDatabase = async ( db: DB, inputSchema: string, tablesFilter: (table: string) => boolean = (table) => true, progressCallback?: ( stage: IntrospectStage, count: number, status: IntrospectStatus, ) => void, ): Promise => { const result: Record = {}; const internals: SingleStoreKitInternals = { tables: {}, indexes: {} }; const columns = await db.query(`select * from information_schema.columns where table_schema = '${inputSchema}' and table_name != '__drizzle_migrations' order by table_name, ordinal_position;`); const response = columns as RowDataPacket[]; const schemas: string[] = []; let columnsCount = 0; let tablesCount = new Set(); let indexesCount = 0; /* let viewsCount = 0; */ const idxs = await db.query( `select * from INFORMATION_SCHEMA.STATISTICS WHERE INFORMATION_SCHEMA.STATISTICS.TABLE_SCHEMA = '${inputSchema}' and INFORMATION_SCHEMA.STATISTICS.INDEX_NAME != 'PRIMARY';`, ); const idxRows = idxs as RowDataPacket[]; for (const column of response) { if (!tablesFilter(column['TABLE_NAME'] as string)) continue; columnsCount += 1; if (progressCallback) { progressCallback('columns', columnsCount, 'fetching'); } const schema: string = column['TABLE_SCHEMA']; const tableName = column['TABLE_NAME']; tablesCount.add(`${schema}.${tableName}`); if (progressCallback) { progressCallback('columns', tablesCount.size, 'fetching'); } const columnName: string = column['COLUMN_NAME']; const isNullable = column['IS_NULLABLE'] === 'YES'; // 'YES', 'NO' const dataType = column['DATA_TYPE']; // varchar const columnType = column['COLUMN_TYPE']; // varchar(256) // const columnType = column["DATA_TYPE"]; const isPrimary = column['COLUMN_KEY'] === 'PRI'; // 'PRI', '' let columnDefault: string | null = column['COLUMN_DEFAULT']; const collation: string = column['CHARACTER_SET_NAME']; const geenratedExpression: string = column['GENERATION_EXPRESSION']; let columnExtra = column['EXTRA']; let isAutoincrement = false; // 'auto_increment', '' let isDefaultAnExpression = false; // 'auto_increment', '' if (typeof column['EXTRA'] !== 'undefined') { columnExtra = column['EXTRA']; isAutoincrement = column['EXTRA'] === 'auto_increment'; // 'auto_increment', '' isDefaultAnExpression = column['EXTRA'].includes('DEFAULT_GENERATED'); // 'auto_increment', '' } // if (isPrimary) { // if (typeof tableToPk[tableName] === "undefined") { // tableToPk[tableName] = [columnName]; // } else { // tableToPk[tableName].push(columnName); // } // } if (schema !== inputSchema) { schemas.push(schema); } const table = result[tableName]; // let changedType = columnType.replace("bigint unsigned", "serial") let changedType = columnType; if (columnType === 'bigint unsigned' && !isNullable && isAutoincrement) { // check unique here const uniqueIdx = idxRows.filter( (it) => it['COLUMN_NAME'] === columnName && it['TABLE_NAME'] === tableName && it['NON_UNIQUE'] === 0, ); if (uniqueIdx && uniqueIdx.length === 1) { changedType = columnType.replace('bigint unsigned', 'serial'); } } if ( columnType.startsWith('bigint(') || columnType.startsWith('tinyint(') || columnType.startsWith('date(') || columnType.startsWith('int(') || columnType.startsWith('mediumint(') || columnType.startsWith('smallint(') || columnType.startsWith('text(') || columnType.startsWith('time(') || columnType.startsWith('year(') ) { changedType = columnType.replace(/\(\s*[^)]*\)$/, ''); } if (columnType.includes('decimal(10,0)')) { changedType = columnType.replace('decimal(10,0)', 'decimal'); } if (columnDefault?.endsWith('.')) { columnDefault = columnDefault.slice(0, -1); } let onUpdate: boolean | undefined = undefined; if ( columnType.startsWith('timestamp') && typeof columnExtra !== 'undefined' && columnExtra.includes('on update CURRENT_TIMESTAMP') ) { onUpdate = true; } const newColumn: Column = { default: columnDefault === null ? undefined : /^-?[\d.]+(?:e-?\d+)?$/.test(columnDefault) && !['decimal', 'char', 'varchar'].some((type) => columnType.startsWith(type)) ? Number(columnDefault) : isDefaultAnExpression ? clearDefaults(columnDefault, collation) : columnDefault.startsWith('CURRENT_TIMESTAMP') ? 'CURRENT_TIMESTAMP' : `'${columnDefault}'`, autoincrement: isAutoincrement, name: columnName, type: changedType, primaryKey: false, notNull: !isNullable, onUpdate, generated: geenratedExpression ? { as: geenratedExpression, type: columnExtra === 'VIRTUAL GENERATED' ? 'virtual' : 'stored', } : undefined, }; // Set default to internal object if (isDefaultAnExpression) { if (typeof internals!.tables![tableName] === 'undefined') { internals!.tables![tableName] = { columns: { [columnName]: { isDefaultAnExpression: true, }, }, }; } else { if ( typeof internals!.tables![tableName]!.columns[columnName] === 'undefined' ) { internals!.tables![tableName]!.columns[columnName] = { isDefaultAnExpression: true, }; } else { internals!.tables![tableName]!.columns[ columnName ]!.isDefaultAnExpression = true; } } } if (!table) { result[tableName] = { name: tableName, columns: { [columnName]: newColumn, }, compositePrimaryKeys: {}, indexes: {}, uniqueConstraints: {}, }; } else { result[tableName]!.columns[columnName] = newColumn; } } const tablePks = await db.query( `SELECT table_name, column_name, ordinal_position FROM information_schema.table_constraints t LEFT JOIN information_schema.key_column_usage k USING(constraint_name,table_schema,table_name) WHERE t.constraint_type='UNIQUE' and table_name != '__drizzle_migrations' AND t.table_schema = '${inputSchema}' ORDER BY ordinal_position`, ); const tableToPk: { [tname: string]: string[] } = {}; const tableToPkRows = tablePks as RowDataPacket[]; for (const tableToPkRow of tableToPkRows) { const tableName: string = tableToPkRow['table_name']; const columnName: string = tableToPkRow['column_name']; const position: string = tableToPkRow['ordinal_position']; if (typeof result[tableName] === 'undefined') { continue; } if (typeof tableToPk[tableName] === 'undefined') { tableToPk[tableName] = [columnName]; } else { tableToPk[tableName].push(columnName); } } for (const [key, value] of Object.entries(tableToPk)) { // if (value.length > 1) { result[key].compositePrimaryKeys = { [`${key}_${value.join('_')}`]: { name: `${key}_${value.join('_')}`, columns: value, }, }; // } else if (value.length === 1) { // result[key].columns[value[0]].primaryKey = true; // } else { // } } if (progressCallback) { progressCallback('columns', columnsCount, 'done'); progressCallback('tables', tablesCount.size, 'done'); } for (const idxRow of idxRows) { const tableSchema = idxRow['TABLE_SCHEMA']; const tableName = idxRow['TABLE_NAME']; const constraintName = idxRow['INDEX_NAME']; const columnName: string = idxRow['COLUMN_NAME']; const isUnique = idxRow['NON_UNIQUE'] === 0; const tableInResult = result[tableName]; if (typeof tableInResult === 'undefined') continue; // if (tableInResult.columns[columnName].type === "serial") continue; indexesCount += 1; if (progressCallback) { progressCallback('indexes', indexesCount, 'fetching'); } if (isUnique) { if ( typeof tableInResult.uniqueConstraints[constraintName] !== 'undefined' ) { tableInResult.uniqueConstraints[constraintName]!.columns.push( columnName, ); } else { tableInResult.uniqueConstraints[constraintName] = { name: constraintName, columns: [columnName], }; } } } /* const views = await db.query( `select * from INFORMATION_SCHEMA.VIEWS WHERE table_schema = '${inputSchema}';`, ); */ /* const resultViews: Record = {}; */ /* viewsCount = views.length; if (progressCallback) { progressCallback('views', viewsCount, 'fetching'); } for await (const view of views) { const viewName = view['TABLE_NAME']; const definition = view['VIEW_DEFINITION']; const withCheckOption = view['CHECK_OPTION'] === 'NONE' ? undefined : view['CHECK_OPTION'].toLowerCase(); const sqlSecurity = view['SECURITY_TYPE'].toLowerCase(); const [createSqlStatement] = await db.query( `SHOW CREATE VIEW \`${viewName}\`;`, ); const algorithmMatch = createSqlStatement['Create View'].match(/ALGORITHM=([^ ]+)/); const algorithm = algorithmMatch ? algorithmMatch[1].toLowerCase() : undefined; const columns = result[viewName].columns; delete result[viewName]; resultViews[viewName] = { columns: columns, isExisting: false, name: viewName, algorithm, definition, sqlSecurity, withCheckOption, }; } */ if (progressCallback) { progressCallback('indexes', indexesCount, 'done'); // progressCallback("enums", 0, "fetching"); progressCallback('enums', 0, 'done'); } return { version: '1', dialect: 'singlestore', tables: result, /* views: resultViews, */ _meta: { tables: {}, columns: {}, }, internal: internals, }; }; ================================================ FILE: drizzle-kit/src/serializer/sqliteImports.ts ================================================ import { is } from 'drizzle-orm'; import type { AnySQLiteTable } from 'drizzle-orm/sqlite-core'; import { SQLiteTable, SQLiteView } from 'drizzle-orm/sqlite-core'; import { safeRegister } from '../cli/commands/utils'; export const prepareFromExports = (exports: Record) => { const tables: AnySQLiteTable[] = []; const views: SQLiteView[] = []; const i0values = Object.values(exports); i0values.forEach((t) => { if (is(t, SQLiteTable)) { tables.push(t); } if (is(t, SQLiteView)) { views.push(t); } }); return { tables, views }; }; export const prepareFromSqliteImports = async (imports: string[]) => { const tables: AnySQLiteTable[] = []; const views: SQLiteView[] = []; await safeRegister(async () => { for (let i = 0; i < imports.length; i++) { const it = imports[i]; const i0: Record = require(`${it}`); const prepared = prepareFromExports(i0); tables.push(...prepared.tables); views.push(...prepared.views); } }); return { tables: Array.from(new Set(tables)), views }; }; ================================================ FILE: drizzle-kit/src/serializer/sqliteSchema.ts ================================================ import { any, boolean, enum as enumType, literal, object, record, string, TypeOf, union } from 'zod'; import { customMapEntries, mapValues, originUUID } from '../global'; // ------- V3 -------- const index = object({ name: string(), columns: string().array(), where: string().optional(), isUnique: boolean(), }).strict(); const fk = object({ name: string(), tableFrom: string(), columnsFrom: string().array(), tableTo: string(), columnsTo: string().array(), onUpdate: string().optional(), onDelete: string().optional(), }).strict(); const compositePK = object({ columns: string().array(), name: string().optional(), }).strict(); const column = object({ name: string(), type: string(), primaryKey: boolean(), notNull: boolean(), autoincrement: boolean().optional(), default: any().optional(), generated: object({ type: enumType(['stored', 'virtual']), as: string(), }).optional(), }).strict(); const tableV3 = object({ name: string(), columns: record(string(), column), indexes: record(string(), index), foreignKeys: record(string(), fk), }).strict(); const uniqueConstraint = object({ name: string(), columns: string().array(), }).strict(); const checkConstraint = object({ name: string(), value: string(), }).strict(); const table = object({ name: string(), columns: record(string(), column), indexes: record(string(), index), foreignKeys: record(string(), fk), compositePrimaryKeys: record(string(), compositePK), uniqueConstraints: record(string(), uniqueConstraint).default({}), checkConstraints: record(string(), checkConstraint).default({}), }).strict(); export const view = object({ name: string(), columns: record(string(), column), definition: string().optional(), isExisting: boolean(), }).strict(); // use main dialect const dialect = enumType(['sqlite']); const schemaHash = object({ id: string(), prevId: string(), }).strict(); export const schemaInternalV3 = object({ version: literal('3'), dialect: dialect, tables: record(string(), tableV3), enums: object({}), }).strict(); export const schemaInternalV4 = object({ version: literal('4'), dialect: dialect, tables: record(string(), table), views: record(string(), view).default({}), enums: object({}), }).strict(); export const schemaInternalV5 = object({ version: literal('5'), dialect: dialect, tables: record(string(), table), enums: object({}), _meta: object({ tables: record(string(), string()), columns: record(string(), string()), }), }).strict(); export const kitInternals = object({ indexes: record( string(), object({ columns: record( string(), object({ isExpression: boolean().optional() }).optional(), ), }).optional(), ).optional(), }).optional(); const latestVersion = literal('6'); export const schemaInternal = object({ version: latestVersion, dialect: dialect, tables: record(string(), table), views: record(string(), view).default({}), enums: object({}), _meta: object({ tables: record(string(), string()), columns: record(string(), string()), }), internal: kitInternals, }).strict(); export const schemaV3 = schemaInternalV3.merge(schemaHash).strict(); export const schemaV4 = schemaInternalV4.merge(schemaHash).strict(); export const schemaV5 = schemaInternalV5.merge(schemaHash).strict(); export const schema = schemaInternal.merge(schemaHash).strict(); const tableSquashed = object({ name: string(), columns: record(string(), column), indexes: record(string(), string()), foreignKeys: record(string(), string()), compositePrimaryKeys: record(string(), string()), uniqueConstraints: record(string(), string()).default({}), checkConstraints: record(string(), string()).default({}), }).strict(); export const schemaSquashed = object({ version: latestVersion, dialect: dialect, tables: record(string(), tableSquashed), views: record(string(), view), enums: any(), }).strict(); export type Dialect = TypeOf; export type Column = TypeOf; export type Table = TypeOf; export type SQLiteSchema = TypeOf; export type SQLiteSchemaV3 = TypeOf; export type SQLiteSchemaV4 = TypeOf; export type SQLiteSchemaInternal = TypeOf; export type SQLiteSchemaSquashed = TypeOf; export type SQLiteKitInternals = TypeOf; export type Index = TypeOf; export type ForeignKey = TypeOf; export type PrimaryKey = TypeOf; export type UniqueConstraint = TypeOf; export type CheckConstraint = TypeOf; export type View = TypeOf; export const SQLiteSquasher = { squashIdx: (idx: Index) => { index.parse(idx); return `${idx.name};${idx.columns.join(',')};${idx.isUnique};${idx.where ?? ''}`; }, unsquashIdx: (input: string): Index => { const [name, columnsString, isUnique, where] = input.split(';'); const result: Index = index.parse({ name, columns: columnsString.split(','), isUnique: isUnique === 'true', where: where ?? undefined, }); return result; }, squashUnique: (unq: UniqueConstraint) => { return `${unq.name};${unq.columns.join(',')}`; }, unsquashUnique: (unq: string): UniqueConstraint => { const [name, columns] = unq.split(';'); return { name, columns: columns.split(',') }; }, squashFK: (fk: ForeignKey) => { return `${fk.name};${fk.tableFrom};${fk.columnsFrom.join(',')};${fk.tableTo};${fk.columnsTo.join(',')};${ fk.onUpdate ?? '' };${fk.onDelete ?? ''}`; }, unsquashFK: (input: string): ForeignKey => { const [ name, tableFrom, columnsFromStr, tableTo, columnsToStr, onUpdate, onDelete, ] = input.split(';'); const result: ForeignKey = fk.parse({ name, tableFrom, columnsFrom: columnsFromStr.split(','), tableTo, columnsTo: columnsToStr.split(','), onUpdate, onDelete, }); return result; }, squashPushFK: (fk: ForeignKey) => { return `${fk.tableFrom};${fk.columnsFrom.join(',')};${fk.tableTo};${fk.columnsTo.join(',')};${fk.onUpdate ?? ''};${ fk.onDelete ?? '' }`; }, unsquashPushFK: (input: string): ForeignKey => { const [ tableFrom, columnsFromStr, tableTo, columnsToStr, onUpdate, onDelete, ] = input.split(';'); const result: ForeignKey = fk.parse({ name: '', tableFrom, columnsFrom: columnsFromStr.split(','), tableTo, columnsTo: columnsToStr.split(','), onUpdate, onDelete, }); return result; }, squashPK: (pk: PrimaryKey) => { return pk.columns.join(','); }, unsquashPK: (pk: string) => { return pk.split(','); }, squashCheck: (check: CheckConstraint) => { return `${check.name};${check.value}`; }, unsquashCheck: (input: string): CheckConstraint => { const [ name, value, ] = input.split(';'); return { name, value }; }, }; export const squashSqliteScheme = ( json: SQLiteSchema | SQLiteSchemaV4, action?: 'push' | undefined, ): SQLiteSchemaSquashed => { const mappedTables = Object.fromEntries( Object.entries(json.tables).map((it) => { const squashedIndexes = mapValues(it[1].indexes, (index: Index) => { return SQLiteSquasher.squashIdx(index); }); const squashedFKs = customMapEntries( it[1].foreignKeys, (key, value) => { return action === 'push' ? [ SQLiteSquasher.squashPushFK(value), SQLiteSquasher.squashPushFK(value), ] : [key, SQLiteSquasher.squashFK(value)]; }, ); const squashedPKs = mapValues(it[1].compositePrimaryKeys, (pk) => { return SQLiteSquasher.squashPK(pk); }); const squashedUniqueConstraints = mapValues( it[1].uniqueConstraints, (unq) => { return SQLiteSquasher.squashUnique(unq); }, ); const squashedCheckConstraints = mapValues( it[1].checkConstraints, (check) => { return SQLiteSquasher.squashCheck(check); }, ); return [ it[0], { name: it[1].name, columns: it[1].columns, indexes: squashedIndexes, foreignKeys: squashedFKs, compositePrimaryKeys: squashedPKs, uniqueConstraints: squashedUniqueConstraints, checkConstraints: squashedCheckConstraints, }, ]; }), ); return { version: '6', dialect: json.dialect, tables: mappedTables, views: json.views, enums: json.enums, }; }; export const drySQLite = schema.parse({ version: '6', dialect: 'sqlite', id: originUUID, prevId: '', tables: {}, views: {}, enums: {}, _meta: { tables: {}, columns: {}, }, }); export const sqliteSchemaV3 = schemaV3; export const sqliteSchemaV4 = schemaV4; export const sqliteSchemaV5 = schemaV5; export const sqliteSchema = schema; export const SQLiteSchemaSquashed = schemaSquashed; export const backwardCompatibleSqliteSchema = union([sqliteSchemaV5, schema]); ================================================ FILE: drizzle-kit/src/serializer/sqliteSerializer.ts ================================================ import chalk from 'chalk'; import { getTableName, is, SQL } from 'drizzle-orm'; import { AnySQLiteTable, getTableConfig, getViewConfig, SQLiteBaseInteger, SQLiteColumn, SQLiteSyncDialect, SQLiteView, uniqueKeyName, } from 'drizzle-orm/sqlite-core'; import { CasingType } from 'src/cli/validations/common'; import { withStyle } from '../cli/validations/outputs'; import type { IntrospectStage, IntrospectStatus } from '../cli/views'; import type { CheckConstraint, Column, ForeignKey, Index, PrimaryKey, SQLiteKitInternals, SQLiteSchemaInternal, Table, UniqueConstraint, View, } from '../serializer/sqliteSchema'; import { escapeSingleQuotes, type SQLiteDB } from '../utils'; import { getColumnCasing, sqlToStr } from './utils'; export const generateSqliteSnapshot = ( tables: AnySQLiteTable[], views: SQLiteView[], casing: CasingType | undefined, ): SQLiteSchemaInternal => { const dialect = new SQLiteSyncDialect({ casing }); const result: Record = {}; const resultViews: Record = {}; const internal: SQLiteKitInternals = { indexes: {} }; for (const table of tables) { // const tableName = getTableName(table); const columnsObject: Record = {}; const indexesObject: Record = {}; const foreignKeysObject: Record = {}; const primaryKeysObject: Record = {}; const uniqueConstraintObject: Record = {}; const checkConstraintObject: Record = {}; const checksInTable: Record = {}; const { name: tableName, columns, indexes, checks, foreignKeys: tableForeignKeys, primaryKeys, uniqueConstraints, } = getTableConfig(table); columns.forEach((column) => { const name = getColumnCasing(column, casing); const notNull: boolean = column.notNull; const primaryKey: boolean = column.primary; const generated = column.generated; const columnToSet: Column = { name, type: column.getSQLType(), primaryKey, notNull, autoincrement: is(column, SQLiteBaseInteger) ? column.autoIncrement : false, generated: generated ? { as: is(generated.as, SQL) ? `(${dialect.sqlToQuery(generated.as as SQL, 'indexes').sql})` : typeof generated.as === 'function' ? `(${dialect.sqlToQuery(generated.as() as SQL, 'indexes').sql})` : `(${generated.as as any})`, type: generated.mode ?? 'virtual', } : undefined, }; if (column.default !== undefined) { if (is(column.default, SQL)) { columnToSet.default = sqlToStr(column.default, casing); } else { columnToSet.default = typeof column.default === 'string' ? `'${escapeSingleQuotes(column.default)}'` : typeof column.default === 'object' || Array.isArray(column.default) ? `'${JSON.stringify(column.default)}'` : column.default; } } columnsObject[name] = columnToSet; if (column.isUnique) { const existingUnique = indexesObject[column.uniqueName!]; if (typeof existingUnique !== 'undefined') { console.log( `\n${ withStyle.errorWarning(`We\'ve found duplicated unique constraint names in ${ chalk.underline.blue( tableName, ) } table. The unique constraint ${ chalk.underline.blue( column.uniqueName, ) } on the ${ chalk.underline.blue( name, ) } column is confilcting with a unique constraint name already defined for ${ chalk.underline.blue( existingUnique.columns.join(','), ) } columns\n`) }`, ); process.exit(1); } indexesObject[column.uniqueName!] = { name: column.uniqueName!, columns: [columnToSet.name], isUnique: true, }; } }); const foreignKeys: ForeignKey[] = tableForeignKeys.map((fk) => { const tableFrom = tableName; const onDelete = fk.onDelete ?? 'no action'; const onUpdate = fk.onUpdate ?? 'no action'; const reference = fk.reference(); const referenceFT = reference.foreignTable; // eslint-disable-next-line @typescript-eslint/no-unsafe-argument const tableTo = getTableName(referenceFT); const originalColumnsFrom = reference.columns.map((it) => it.name); const columnsFrom = reference.columns.map((it) => getColumnCasing(it, casing)); const originalColumnsTo = reference.foreignColumns.map((it) => it.name); const columnsTo = reference.foreignColumns.map((it) => getColumnCasing(it, casing)); let name = fk.getName(); if (casing !== undefined) { for (let i = 0; i < originalColumnsFrom.length; i++) { name = name.replace(originalColumnsFrom[i], columnsFrom[i]); } for (let i = 0; i < originalColumnsTo.length; i++) { name = name.replace(originalColumnsTo[i], columnsTo[i]); } } return { name, tableFrom, tableTo, columnsFrom, columnsTo, onDelete, onUpdate, } as ForeignKey; }); foreignKeys.forEach((it) => { foreignKeysObject[it.name] = it; }); indexes.forEach((value) => { const columns = value.config.columns; const name = value.config.name; let indexColumns = columns.map((it) => { if (is(it, SQL)) { const sql = dialect.sqlToQuery(it, 'indexes').sql; if (typeof internal!.indexes![name] === 'undefined') { internal!.indexes![name] = { columns: { [sql]: { isExpression: true, }, }, }; } else { if (typeof internal!.indexes![name]?.columns[sql] === 'undefined') { internal!.indexes![name]!.columns[sql] = { isExpression: true, }; } else { internal!.indexes![name]!.columns[sql]!.isExpression = true; } } return sql; } else { return getColumnCasing(it, casing); } }); let where: string | undefined = undefined; if (value.config.where !== undefined) { if (is(value.config.where, SQL)) { where = dialect.sqlToQuery(value.config.where).sql; } } indexesObject[name] = { name, columns: indexColumns, isUnique: value.config.unique ?? false, where, }; }); uniqueConstraints?.map((unq) => { const columnNames = unq.columns.map((c) => getColumnCasing(c, casing)); const name = unq.name ?? uniqueKeyName(table, columnNames); const existingUnique = indexesObject[name]; if (typeof existingUnique !== 'undefined') { console.log( `\n${ withStyle.errorWarning( `We\'ve found duplicated unique constraint names in ${ chalk.underline.blue( tableName, ) } table. \nThe unique constraint ${ chalk.underline.blue( name, ) } on the ${ chalk.underline.blue( columnNames.join(','), ) } columns is confilcting with a unique constraint name already defined for ${ chalk.underline.blue( existingUnique.columns.join(','), ) } columns\n`, ) }`, ); process.exit(1); } indexesObject[name] = { name: unq.name!, columns: columnNames, isUnique: true, }; }); primaryKeys.forEach((it) => { if (it.columns.length > 1) { const originalColumnNames = it.columns.map((c) => c.name); const columnNames = it.columns.map((c) => getColumnCasing(c, casing)); let name = it.getName(); if (casing !== undefined) { for (let i = 0; i < originalColumnNames.length; i++) { name = name.replace(originalColumnNames[i], columnNames[i]); } } primaryKeysObject[name] = { columns: columnNames, name, }; } else { columnsObject[getColumnCasing(it.columns[0], casing)].primaryKey = true; } }); checks.forEach((check) => { const checkName = check.name; if (typeof checksInTable[tableName] !== 'undefined') { if (checksInTable[tableName].includes(check.name)) { console.log( `\n${ withStyle.errorWarning( `We\'ve found duplicated check constraint name in ${ chalk.underline.blue( tableName, ) }. Please rename your check constraint in the ${ chalk.underline.blue( tableName, ) } table`, ) }`, ); process.exit(1); } checksInTable[tableName].push(checkName); } else { checksInTable[tableName] = [check.name]; } checkConstraintObject[checkName] = { name: checkName, value: dialect.sqlToQuery(check.value).sql, }; }); result[tableName] = { name: tableName, columns: columnsObject, indexes: indexesObject, foreignKeys: foreignKeysObject, compositePrimaryKeys: primaryKeysObject, uniqueConstraints: uniqueConstraintObject, checkConstraints: checkConstraintObject, }; } for (const view of views) { const { name, isExisting, selectedFields, query, schema } = getViewConfig(view); const columnsObject: Record = {}; const existingView = resultViews[name]; if (typeof existingView !== 'undefined') { console.log( `\n${ withStyle.errorWarning( `We\'ve found duplicated view name across ${ chalk.underline.blue( schema ?? 'public', ) } schema. Please rename your view`, ) }`, ); process.exit(1); } for (const key in selectedFields) { if (is(selectedFields[key], SQLiteColumn)) { const column = selectedFields[key]; const notNull: boolean = column.notNull; const primaryKey: boolean = column.primary; const generated = column.generated; const columnToSet: Column = { name: column.name, type: column.getSQLType(), primaryKey, notNull, autoincrement: is(column, SQLiteBaseInteger) ? column.autoIncrement : false, generated: generated ? { as: is(generated.as, SQL) ? `(${dialect.sqlToQuery(generated.as as SQL, 'indexes').sql})` : typeof generated.as === 'function' ? `(${dialect.sqlToQuery(generated.as() as SQL, 'indexes').sql})` : `(${generated.as as any})`, type: generated.mode ?? 'virtual', } : undefined, }; if (column.default !== undefined) { if (is(column.default, SQL)) { columnToSet.default = sqlToStr(column.default, casing); } else { columnToSet.default = typeof column.default === 'string' ? `'${column.default}'` : typeof column.default === 'object' || Array.isArray(column.default) ? `'${JSON.stringify(column.default)}'` : column.default; } } columnsObject[column.name] = columnToSet; } } resultViews[name] = { columns: columnsObject, name, isExisting, definition: isExisting ? undefined : dialect.sqlToQuery(query!).sql, }; } return { version: '6', dialect: 'sqlite', tables: result, views: resultViews, enums: {}, _meta: { tables: {}, columns: {}, }, internal, }; }; function mapSqlToSqliteType(sqlType: string): string { const lowered = sqlType.toLowerCase(); if ( [ 'int', 'integer', 'integer auto_increment', 'tinyint', 'smallint', 'mediumint', 'bigint', 'unsigned big int', 'int2', 'int8', ].some((it) => lowered.startsWith(it)) ) { return 'integer'; } else if ( [ 'character', 'varchar', 'varying character', 'national varying character', 'nchar', 'native character', 'nvarchar', 'text', 'clob', ].some((it) => lowered.startsWith(it)) ) { const match = lowered.match(/\d+/); if (match) { return `text(${match[0]})`; } return 'text'; } else if (lowered.startsWith('blob')) { return 'blob'; } else if ( ['real', 'double', 'double precision', 'float'].some((it) => lowered.startsWith(it)) ) { return 'real'; } else { return 'numeric'; } } interface ColumnInfo { columnName: string; expression: string; type: 'stored' | 'virtual'; } function extractGeneratedColumns(input: string): Record { const columns: Record = {}; const lines = input.split(/,\s*(?![^()]*\))/); // Split by commas outside parentheses for (const line of lines) { if (line.includes('GENERATED ALWAYS AS')) { const parts = line.trim().split(/\s+/); const columnName = parts[0].replace(/[`'"]/g, ''); // Remove quotes around the column name const expression = line .substring(line.indexOf('('), line.indexOf(')') + 1) .trim(); // Extract type ensuring to remove any trailing characters like ')' const typeIndex = parts.findIndex((part) => part.match(/(stored|virtual)/i)); let type: ColumnInfo['type'] = 'virtual'; if (typeIndex !== -1) { type = parts[typeIndex] .replace(/[^a-z]/gi, '') .toLowerCase() as ColumnInfo['type']; } columns[columnName] = { columnName: columnName, expression: expression, type, }; } } return columns; } function filterIgnoredTablesByField(fieldName: string) { // _cf_ is a prefix for internal Cloudflare D1 tables (e.g. _cf_KV, _cf_METADATA) // _litestream_ is a prefix for internal Litestream tables (e.g. _litestream_seq, _litestream_lock) // libsql_ is a prefix for internal libSQL tables (e.g. libsql_wasm_func_table) // sqlite_ is a prefix for internal SQLite tables (e.g. sqlite_sequence, sqlite_stat1) return `${fieldName} != '__drizzle_migrations' AND ${fieldName} NOT LIKE '\\_cf\\_%' ESCAPE '\\' AND ${fieldName} NOT LIKE '\\_litestream\\_%' ESCAPE '\\' AND ${fieldName} NOT LIKE 'libsql\\_%' ESCAPE '\\' AND ${fieldName} NOT LIKE 'sqlite\\_%' ESCAPE '\\'`; } export const fromDatabase = async ( db: SQLiteDB, tablesFilter: (table: string) => boolean = (table) => true, progressCallback?: ( stage: IntrospectStage, count: number, status: IntrospectStatus, ) => void, ): Promise => { const result: Record = {}; const resultViews: Record = {}; const columns = await db.query<{ tableName: string; columnName: string; columnType: string; notNull: number; defaultValue: string; pk: number; seq: number; hidden: number; sql: string; type: 'view' | 'table'; }>(`SELECT m.name as "tableName", p.name as "columnName", p.type as "columnType", p."notnull" as "notNull", p.dflt_value as "defaultValue", p.pk as pk, p.hidden as hidden, m.sql, m.type as type FROM sqlite_master AS m JOIN pragma_table_xinfo(m.name) AS p WHERE (m.type = 'table' OR m.type = 'view') AND ${filterIgnoredTablesByField('m.tbl_name')};`); const tablesWithSeq: string[] = []; const seq = await db.query<{ name: string; }>(`SELECT * FROM sqlite_master WHERE sql GLOB '*[ *' || CHAR(9) || CHAR(10) || CHAR(13) || ']AUTOINCREMENT[^'']*' AND ${filterIgnoredTablesByField('tbl_name')};`); for (const s of seq) { tablesWithSeq.push(s.name); } let columnsCount = 0; let tablesCount = new Set(); let indexesCount = 0; let foreignKeysCount = 0; let checksCount = 0; let viewsCount = 0; // append primaryKeys by table const tableToPk: { [tname: string]: string[] } = {}; let tableToGeneratedColumnsInfo: Record< string, Record > = {}; for (const column of columns) { if (!tablesFilter(column.tableName)) continue; // TODO if (column.type !== 'view') { columnsCount += 1; } if (progressCallback) { progressCallback('columns', columnsCount, 'fetching'); } const tableName = column.tableName; tablesCount.add(tableName); if (progressCallback) { progressCallback('tables', tablesCount.size, 'fetching'); } const columnName = column.columnName; const isNotNull = column.notNull === 1; // 'YES', 'NO' const columnType = column.columnType; // varchar(256) const isPrimary = column.pk !== 0; // 'PRI', '' const columnDefault: string = column.defaultValue; const isAutoincrement = isPrimary && tablesWithSeq.includes(tableName); if (isPrimary) { if (typeof tableToPk[tableName] === 'undefined') { tableToPk[tableName] = [columnName]; } else { tableToPk[tableName].push(columnName); } } const table = result[tableName]; if (column.hidden === 2 || column.hidden === 3) { if ( typeof tableToGeneratedColumnsInfo[column.tableName] === 'undefined' ) { tableToGeneratedColumnsInfo[column.tableName] = extractGeneratedColumns( column.sql, ); } } const newColumn: Column = { default: columnDefault === null ? undefined : /^-?[\d.]+(?:e-?\d+)?$/.test(columnDefault) ? Number(columnDefault) : ['CURRENT_TIME', 'CURRENT_DATE', 'CURRENT_TIMESTAMP'].includes( columnDefault, ) ? `(${columnDefault})` : columnDefault === 'false' ? false : columnDefault === 'true' ? true : columnDefault.startsWith("'") && columnDefault.endsWith("'") ? columnDefault // ? columnDefault.substring(1, columnDefault.length - 1) : `(${columnDefault})`, autoincrement: isAutoincrement, name: columnName, type: mapSqlToSqliteType(columnType), primaryKey: false, notNull: isNotNull, generated: tableToGeneratedColumnsInfo[tableName] && tableToGeneratedColumnsInfo[tableName][columnName] ? { type: tableToGeneratedColumnsInfo[tableName][columnName].type, as: tableToGeneratedColumnsInfo[tableName][columnName].expression, } : undefined, }; if (!table) { result[tableName] = { name: tableName, columns: { [columnName]: newColumn, }, compositePrimaryKeys: {}, indexes: {}, foreignKeys: {}, uniqueConstraints: {}, checkConstraints: {}, }; } else { result[tableName]!.columns[columnName] = newColumn; } } for (const [key, value] of Object.entries(tableToPk)) { if (value.length > 1) { result[key].compositePrimaryKeys = { [`${key}_${value.join('_')}_pk`]: { columns: value, name: `${key}_${value.join('_')}_pk`, }, }; } else if (value.length === 1) { result[key].columns[value[0]].primaryKey = true; } else { } } if (progressCallback) { progressCallback('columns', columnsCount, 'done'); progressCallback('tables', tablesCount.size, 'done'); } try { const fks = await db.query<{ tableFrom: string; tableTo: string; from: string; to: string; onUpdate: string; onDelete: string; seq: number; id: number; }>(`SELECT m.name as "tableFrom", f.id as "id", f."table" as "tableTo", f."from", f."to", f."on_update" as "onUpdate", f."on_delete" as "onDelete", f.seq as "seq" FROM sqlite_master m, pragma_foreign_key_list(m.name) as f WHERE ${filterIgnoredTablesByField('m.tbl_name')};`); const fkByTableName: Record = {}; for (const fkRow of fks) { foreignKeysCount += 1; if (progressCallback) { progressCallback('fks', foreignKeysCount, 'fetching'); } const tableName: string = fkRow.tableFrom; const columnName: string = fkRow.from; const refTableName = fkRow.tableTo; const refColumnName: string = fkRow.to; const updateRule: string = fkRow.onUpdate; const deleteRule = fkRow.onDelete; const sequence = fkRow.seq; const id = fkRow.id; const tableInResult = result[tableName]; if (typeof tableInResult === 'undefined') continue; if (typeof fkByTableName[`${tableName}_${id}`] !== 'undefined') { fkByTableName[`${tableName}_${id}`]!.columnsFrom.push(columnName); fkByTableName[`${tableName}_${id}`]!.columnsTo.push(refColumnName); } else { fkByTableName[`${tableName}_${id}`] = { name: '', tableFrom: tableName, tableTo: refTableName, columnsFrom: [columnName], columnsTo: [refColumnName], onDelete: deleteRule?.toLowerCase(), onUpdate: updateRule?.toLowerCase(), }; } const columnsFrom = fkByTableName[`${tableName}_${id}`].columnsFrom; const columnsTo = fkByTableName[`${tableName}_${id}`].columnsTo; fkByTableName[ `${tableName}_${id}` ].name = `${tableName}_${ columnsFrom.join( '_', ) }_${refTableName}_${columnsTo.join('_')}_fk`; } for (const idx of Object.keys(fkByTableName)) { const value = fkByTableName[idx]; result[value.tableFrom].foreignKeys[value.name] = value; } } catch (e) { // console.log(`Can't proccess foreign keys`); } if (progressCallback) { progressCallback('fks', foreignKeysCount, 'done'); } const idxs = await db.query<{ tableName: string; indexName: string; columnName: string; isUnique: number; seq: string; }>(`SELECT m.tbl_name as tableName, il.name as indexName, ii.name as columnName, il.[unique] as isUnique, il.seq as seq FROM sqlite_master AS m, pragma_index_list(m.name) AS il, pragma_index_info(il.name) AS ii WHERE m.type = 'table' AND il.name NOT LIKE 'sqlite\\_autoindex\\_%' ESCAPE '\\' AND ${filterIgnoredTablesByField('m.tbl_name')};`); for (const idxRow of idxs) { const tableName = idxRow.tableName; const constraintName = idxRow.indexName; const columnName: string = idxRow.columnName; const isUnique = idxRow.isUnique === 1; const tableInResult = result[tableName]; if (typeof tableInResult === 'undefined') continue; indexesCount += 1; if (progressCallback) { progressCallback('indexes', indexesCount, 'fetching'); } if ( typeof tableInResult.indexes[constraintName] !== 'undefined' && columnName ) { tableInResult.indexes[constraintName]!.columns.push(columnName); } else { tableInResult.indexes[constraintName] = { name: constraintName, columns: columnName ? [columnName] : [], isUnique: isUnique, }; } // if (isUnique) { // if (typeof tableInResult.uniqueConstraints[constraintName] !== "undefined") { // tableInResult.uniqueConstraints[constraintName]!.columns.push(columnName); // } else { // tableInResult.uniqueConstraints[constraintName] = { // name: constraintName, // columns: [columnName], // }; // } // } else { // if (typeof tableInResult.indexes[constraintName] !== "undefined") { // tableInResult.indexes[constraintName]!.columns.push(columnName); // } else { // tableInResult.indexes[constraintName] = { // name: constraintName, // columns: [columnName], // isUnique: isUnique, // }; // } // } } if (progressCallback) { progressCallback('indexes', indexesCount, 'done'); // progressCallback("enums", 0, "fetching"); progressCallback('enums', 0, 'done'); } const views = await db.query( `SELECT name AS view_name, sql AS sql FROM sqlite_master WHERE type = 'view';`, ); viewsCount = views.length; if (progressCallback) { progressCallback('views', viewsCount, 'fetching'); } for (const view of views) { const viewName = view['view_name']; const sql = view['sql']; const regex = new RegExp(`\\bAS\\b\\s+(SELECT.+)$`, 'i'); const match = sql.match(regex); if (!match) { console.log('Could not process view'); process.exit(1); } const viewDefinition = match[1] as string; const columns = result[viewName].columns; delete result[viewName]; resultViews[viewName] = { columns: columns, isExisting: false, name: viewName, definition: viewDefinition, }; } if (progressCallback) { progressCallback('views', viewsCount, 'done'); } const namedCheckPattern = /CONSTRAINT\s*["']?(\w+)["']?\s*CHECK\s*\((.*?)\)/gi; const unnamedCheckPattern = /CHECK\s*\((.*?)\)/gi; let checkCounter = 0; const checkConstraints: Record = {}; const checks = await db.query<{ tableName: string; sql: string; }>(`SELECT name as "tableName", sql as "sql" FROM sqlite_master WHERE type = 'table' AND ${filterIgnoredTablesByField('tbl_name')};`); for (const check of checks) { if (!tablesFilter(check.tableName)) continue; const { tableName, sql } = check; // Find named CHECK constraints let namedChecks = [...sql.matchAll(namedCheckPattern)]; if (namedChecks.length > 0) { namedChecks.forEach(([_, checkName, checkValue]) => { checkConstraints[checkName] = { name: checkName, value: checkValue.trim(), }; }); } else { // If no named constraints, find unnamed CHECK constraints and assign names let unnamedChecks = [...sql.matchAll(unnamedCheckPattern)]; unnamedChecks.forEach(([_, checkValue]) => { let checkName = `${tableName}_check_${++checkCounter}`; checkConstraints[checkName] = { name: checkName, value: checkValue.trim(), }; }); } checksCount += Object.values(checkConstraints).length; if (progressCallback) { progressCallback('checks', checksCount, 'fetching'); } const table = result[tableName]; if (!table) { result[tableName] = { name: tableName, columns: {}, compositePrimaryKeys: {}, indexes: {}, foreignKeys: {}, uniqueConstraints: {}, checkConstraints: checkConstraints, }; } else { result[tableName]!.checkConstraints = checkConstraints; } } if (progressCallback) { progressCallback('checks', checksCount, 'done'); } return { version: '6', dialect: 'sqlite', tables: result, views: resultViews, enums: {}, _meta: { tables: {}, columns: {}, }, }; }; ================================================ FILE: drizzle-kit/src/serializer/studio.ts ================================================ /// import type { PGlite } from '@electric-sql/pglite'; import { serve } from '@hono/node-server'; import { zValidator } from '@hono/zod-validator'; import { createHash } from 'crypto'; import type { AnyColumn, AnyTable } from 'drizzle-orm'; import { is } from 'drizzle-orm'; import type { AnyMySqlTable } from 'drizzle-orm/mysql-core'; import { getTableConfig as mysqlTableConfig, MySqlTable } from 'drizzle-orm/mysql-core'; import type { AnyPgTable } from 'drizzle-orm/pg-core'; import { getTableConfig as pgTableConfig, PgTable } from 'drizzle-orm/pg-core'; import type { TablesRelationalConfig } from 'drizzle-orm/relations'; import { createTableRelationsHelpers, extractTablesRelationalConfig, Many, normalizeRelation, One, Relations, } from 'drizzle-orm/relations'; import type { AnySingleStoreTable } from 'drizzle-orm/singlestore-core'; import { getTableConfig as singlestoreTableConfig, SingleStoreTable } from 'drizzle-orm/singlestore-core'; import type { AnySQLiteTable } from 'drizzle-orm/sqlite-core'; import { getTableConfig as sqliteTableConfig, SQLiteTable } from 'drizzle-orm/sqlite-core'; import fs from 'fs'; import { Hono } from 'hono'; import { compress } from 'hono/compress'; import { cors } from 'hono/cors'; import { createServer } from 'node:https'; import type { CasingType } from 'src/cli/validations/common'; import type { LibSQLCredentials } from 'src/cli/validations/libsql'; import { assertUnreachable } from 'src/global'; import { z } from 'zod'; import { safeRegister } from '../cli/commands/utils'; import type { MysqlCredentials } from '../cli/validations/mysql'; import type { PostgresCredentials } from '../cli/validations/postgres'; import type { SingleStoreCredentials } from '../cli/validations/singlestore'; import type { SqliteCredentials } from '../cli/validations/sqlite'; import type { Proxy, TransactionProxy } from '../utils'; import { prepareFilenames } from '.'; import { getColumnCasing } from './utils'; type CustomDefault = { schema: string; table: string; column: string; func: () => unknown; }; type SchemaFile = { name: string; content: string; }; export type Setup = { dbHash: string; dialect: 'postgresql' | 'mysql' | 'sqlite' | 'singlestore'; packageName: | '@aws-sdk/client-rds-data' | 'pglite' | 'pg' | 'postgres' | '@vercel/postgres' | '@neondatabase/serverless' | 'gel' | 'mysql2' | '@planetscale/database' | 'd1-http' | 'd1' | '@libsql/client' | 'better-sqlite3'; driver?: 'aws-data-api' | 'd1-http' | 'd1' | 'turso' | 'pglite'; databaseName?: string; // for planetscale (driver remove database name from connection string) proxy: Proxy; transactionProxy: TransactionProxy; customDefaults: CustomDefault[]; schema: Record>>; relations: Record; casing?: CasingType; schemaFiles?: SchemaFile[]; }; export type ProxyParams = { sql: string; params?: any[]; typings?: any[]; mode: 'array' | 'object'; method: 'values' | 'get' | 'all' | 'run' | 'execute'; }; export const preparePgSchema = async (path: string | string[]) => { const imports = prepareFilenames(path); const pgSchema: Record> = {}; const relations: Record = {}; // files content as string const files = imports.map((it, index) => ({ // get the file name from the path name: it.split('/').pop() || `schema${index}.ts`, content: fs.readFileSync(it, 'utf-8'), })); await safeRegister(async () => { for (let i = 0; i < imports.length; i++) { const it = imports[i]; const i0: Record = require(`${it}`); const i0values = Object.entries(i0); i0values.forEach(([k, t]) => { if (is(t, PgTable)) { const schema = pgTableConfig(t).schema || 'public'; pgSchema[schema] = pgSchema[schema] || {}; pgSchema[schema][k] = t; } if (is(t, Relations)) { relations[k] = t; } }); } }); return { schema: pgSchema, relations, files }; }; export const prepareMySqlSchema = async (path: string | string[]) => { const imports = prepareFilenames(path); const mysqlSchema: Record> = { public: {}, }; const relations: Record = {}; // files content as string const files = imports.map((it, index) => ({ // get the file name from the path name: it.split('/').pop() || `schema${index}.ts`, content: fs.readFileSync(it, 'utf-8'), })); await safeRegister(async () => { for (let i = 0; i < imports.length; i++) { const it = imports[i]; const i0: Record = require(`${it}`); const i0values = Object.entries(i0); i0values.forEach(([k, t]) => { if (is(t, MySqlTable)) { const schema = mysqlTableConfig(t).schema || 'public'; mysqlSchema[schema][k] = t; } if (is(t, Relations)) { relations[k] = t; } }); } }); return { schema: mysqlSchema, relations, files }; }; export const prepareSQLiteSchema = async (path: string | string[]) => { const imports = prepareFilenames(path); const sqliteSchema: Record> = { public: {}, }; const relations: Record = {}; // files content as string const files = imports.map((it, index) => ({ // get the file name from the path name: it.split('/').pop() || `schema${index}.ts`, content: fs.readFileSync(it, 'utf-8'), })); await safeRegister(async () => { for (let i = 0; i < imports.length; i++) { const it = imports[i]; const i0: Record = require(`${it}`); const i0values = Object.entries(i0); i0values.forEach(([k, t]) => { if (is(t, SQLiteTable)) { const schema = 'public'; // sqlite does not have schemas sqliteSchema[schema][k] = t; } if (is(t, Relations)) { relations[k] = t; } }); } }); return { schema: sqliteSchema, relations, files }; }; export const prepareSingleStoreSchema = async (path: string | string[]) => { const imports = prepareFilenames(path); const singlestoreSchema: Record< string, Record > = { public: {}, }; const relations: Record = {}; // files content as string const files = imports.map((it, index) => ({ // get the file name from the path name: it.split('/').pop() || `schema${index}.ts`, content: fs.readFileSync(it, 'utf-8'), })); await safeRegister(async () => { for (let i = 0; i < imports.length; i++) { const it = imports[i]; const i0: Record = require(`${it}`); const i0values = Object.entries(i0); i0values.forEach(([k, t]) => { if (is(t, SingleStoreTable)) { const schema = singlestoreTableConfig(t).schema || 'public'; singlestoreSchema[schema][k] = t; } if (is(t, Relations)) { relations[k] = t; } }); } }); return { schema: singlestoreSchema, relations, files }; }; const getCustomDefaults = >( schema: Record>, casing?: CasingType, ): CustomDefault[] => { const customDefaults: CustomDefault[] = []; Object.entries(schema).map(([schema, tables]) => { Object.entries(tables).map(([, table]) => { let tableConfig: { name: string; columns: AnyColumn[]; }; if (is(table, PgTable)) { tableConfig = pgTableConfig(table); } else if (is(table, MySqlTable)) { tableConfig = mysqlTableConfig(table); } else if (is(table, SQLiteTable)) { tableConfig = sqliteTableConfig(table); } else { tableConfig = singlestoreTableConfig(table as SingleStoreTable); } tableConfig.columns.map((column) => { if (column.defaultFn) { customDefaults.push({ schema, table: tableConfig.name, column: getColumnCasing(column, casing), func: column.defaultFn, }); } }); }); }); return customDefaults; }; export const drizzleForPostgres = async ( credentials: PostgresCredentials | { driver: 'pglite'; client: PGlite; }, pgSchema: Record>, relations: Record, schemaFiles?: SchemaFile[], casing?: CasingType, ): Promise => { const { preparePostgresDB } = await import('../cli/connections'); const db = await preparePostgresDB(credentials); const customDefaults = getCustomDefaults(pgSchema, casing); let dbUrl: string; if ('driver' in credentials) { const { driver } = credentials; if (driver === 'aws-data-api') { dbUrl = `aws-data-api://${credentials.database}/${credentials.secretArn}/${credentials.resourceArn}`; } else if (driver === 'pglite') { dbUrl = 'client' in credentials ? credentials.client.dataDir || 'pglite://custom-client' : credentials.url; } else { assertUnreachable(driver); } } else if ('url' in credentials) { dbUrl = credentials.url; } else { dbUrl = `postgresql://${credentials.user}:${credentials.password}@${credentials.host}:${credentials.port}/${credentials.database}`; } const dbHash = createHash('sha256').update(dbUrl).digest('hex'); return { dbHash, dialect: 'postgresql', driver: 'driver' in credentials ? credentials.driver : undefined, packageName: db.packageName, proxy: db.proxy, transactionProxy: db.transactionProxy, customDefaults, schema: pgSchema, relations, schemaFiles, casing, }; }; export const drizzleForMySQL = async ( credentials: MysqlCredentials, mysqlSchema: Record>, relations: Record, schemaFiles?: SchemaFile[], casing?: CasingType, ): Promise => { const { connectToMySQL } = await import('../cli/connections'); const { proxy, transactionProxy, database, packageName } = await connectToMySQL(credentials); const customDefaults = getCustomDefaults(mysqlSchema, casing); let dbUrl: string; if ('url' in credentials) { dbUrl = credentials.url; } else { dbUrl = `mysql://${credentials.user}:${credentials.password}@${credentials.host}:${credentials.port}/${credentials.database}`; } const dbHash = createHash('sha256').update(dbUrl).digest('hex'); return { dbHash, dialect: 'mysql', packageName, databaseName: database, proxy, transactionProxy, customDefaults, schema: mysqlSchema, relations, schemaFiles, casing, }; }; // D1 binding credentials type (mirrors the one in connections.ts) type D1BindingCredentials = { driver: 'd1'; binding: D1Database; }; export const drizzleForSQLite = async ( credentials: SqliteCredentials | D1BindingCredentials, sqliteSchema: Record>, relations: Record, schemaFiles?: SchemaFile[], casing?: CasingType, ): Promise => { const customDefaults = getCustomDefaults(sqliteSchema, casing); if ('driver' in credentials && credentials.driver === 'd1') { const { connectToD1 } = await import('../cli/connections'); const sqliteDB = await connectToD1(credentials.binding); const dbUrl = 'd1://binding'; const dbHash = createHash('sha256').update(dbUrl).digest('hex'); return { dbHash, dialect: 'sqlite', driver: 'd1', packageName: 'd1', proxy: sqliteDB.proxy, transactionProxy: sqliteDB.transactionProxy, customDefaults, schema: sqliteSchema, relations, schemaFiles, casing, }; } const { connectToSQLite } = await import('../cli/connections'); const sqliteDB = await connectToSQLite(credentials); let dbUrl: string; if ('driver' in credentials) { const { driver } = credentials; if (driver === 'd1-http') { dbUrl = `d1-http://${credentials.accountId}/${credentials.databaseId}/${credentials.token}`; } else { assertUnreachable(driver); } } else { dbUrl = credentials.url; } const dbHash = createHash('sha256').update(dbUrl).digest('hex'); return { dbHash, dialect: 'sqlite', driver: 'driver' in credentials ? credentials.driver : undefined, packageName: sqliteDB.packageName, proxy: sqliteDB.proxy, transactionProxy: sqliteDB.transactionProxy, customDefaults, schema: sqliteSchema, relations, schemaFiles, casing, }; }; export const drizzleForLibSQL = async ( credentials: LibSQLCredentials, sqliteSchema: Record>, relations: Record, schemaFiles?: SchemaFile[], casing?: CasingType, ): Promise => { const { connectToLibSQL } = await import('../cli/connections'); const sqliteDB = await connectToLibSQL(credentials); const customDefaults = getCustomDefaults(sqliteSchema, casing); let dbUrl: string = `turso://${credentials.url}/${credentials.authToken}`; const dbHash = createHash('sha256').update(dbUrl).digest('hex'); return { dbHash, dialect: 'sqlite', driver: undefined, packageName: sqliteDB.packageName, proxy: sqliteDB.proxy, transactionProxy: sqliteDB.transactionProxy, customDefaults, schema: sqliteSchema, relations, schemaFiles, casing, }; }; export const drizzleForSingleStore = async ( credentials: SingleStoreCredentials, singlestoreSchema: Record>, relations: Record, schemaFiles?: SchemaFile[], casing?: CasingType, ): Promise => { const { connectToSingleStore } = await import('../cli/connections'); const { proxy, transactionProxy, database, packageName } = await connectToSingleStore(credentials); const customDefaults = getCustomDefaults(singlestoreSchema, casing); let dbUrl: string; if ('url' in credentials) { dbUrl = credentials.url; } else { dbUrl = `singlestore://${credentials.user}:${credentials.password}@${credentials.host}:${credentials.port}/${credentials.database}`; } const dbHash = createHash('sha256').update(dbUrl).digest('hex'); return { dbHash, dialect: 'singlestore', databaseName: database, packageName, proxy, transactionProxy, customDefaults, schema: singlestoreSchema, relations, schemaFiles, casing, }; }; type Relation = { name: string; type: 'one' | 'many'; table: string; schema: string; columns: string[]; refTable: string; refSchema: string; refColumns: string[]; }; export const extractRelations = ( tablesConfig: { tables: TablesRelationalConfig; tableNamesMap: Record; }, casing?: CasingType, ): Relation[] => { const relations = Object.values(tablesConfig.tables) .map((it) => Object.entries(it.relations).map(([name, relation]) => { try { const normalized = normalizeRelation( tablesConfig.tables, tablesConfig.tableNamesMap, relation, ); const rel = relation; const refTableName = rel.referencedTableName; const refTable = rel.referencedTable; const fields = normalized.fields .map((it) => getColumnCasing(it, casing)) .flat(); const refColumns = normalized.references .map((it) => getColumnCasing(it, casing)) .flat(); let refSchema: string | undefined; if (is(refTable, PgTable)) { refSchema = pgTableConfig(refTable).schema; } else if (is(refTable, MySqlTable)) { refSchema = mysqlTableConfig(refTable).schema; } else if (is(refTable, SQLiteTable)) { refSchema = undefined; } else if (is(refTable, SingleStoreTable)) { refSchema = singlestoreTableConfig(refTable).schema; } else { throw new Error('unsupported dialect'); } let type: 'one' | 'many'; if (is(rel, One)) { type = 'one'; } else if (is(rel, Many)) { type = 'many'; } else { throw new Error('unsupported relation type'); } return { name, type, table: it.dbName, schema: it.schema || 'public', columns: fields, refTable: refTableName, refSchema: refSchema || 'public', refColumns: refColumns, }; } catch { throw new Error( `Invalid relation "${relation.fieldName}" for table "${ it.schema ? `${it.schema}.${it.dbName}` : it.dbName }"`, ); } }) ) .flat(); return relations; }; const init = z.object({ type: z.literal('init'), }); const proxySchema = z.object({ type: z.literal('proxy'), data: z.object({ sql: z.string(), params: z.array(z.any()).optional(), typings: z.string().array().optional(), mode: z.enum(['array', 'object']).default('object'), method: z.union([ z.literal('values'), z.literal('get'), z.literal('all'), z.literal('run'), z.literal('execute'), ]), }), }); const transactionProxySchema = z.object({ type: z.literal('tproxy'), data: z .object({ sql: z.string(), method: z .union([ z.literal('values'), z.literal('get'), z.literal('all'), z.literal('run'), z.literal('execute'), ]) .optional(), }) .array(), }); const defaultsSchema = z.object({ type: z.literal('defaults'), data: z .array( z.object({ schema: z.string(), table: z.string(), column: z.string(), }), ) .min(1), }); const schema = z.union([ init, proxySchema, transactionProxySchema, defaultsSchema, ]); const jsonStringify = (data: any) => { return JSON.stringify(data, (_key, value) => { // Convert Error to object if (value instanceof Error) { return { error: value.message, }; } // Convert BigInt to string if (typeof value === 'bigint') { return value.toString(); } // Convert Buffer and ArrayBuffer to base64 if ( (value && typeof value === 'object' && 'type' in value && 'data' in value && value.type === 'Buffer') || value instanceof ArrayBuffer || value instanceof Buffer ) { return Buffer.from(value).toString('base64'); } return value; }); }; export type Server = { start: (params: { host: string; port: number; key?: string; cert?: string; cb: (err: Error | null, address: string) => void; }) => void; }; export const prepareServer = async ( { dialect, driver, packageName, databaseName, proxy, transactionProxy, customDefaults, schema: drizzleSchema, relations, dbHash, casing, schemaFiles, }: Setup, app?: Hono, ): Promise => { app = app !== undefined ? app : new Hono(); app.use(compress()); app.use(async (ctx, next) => { await next(); // * https://wicg.github.io/private-network-access/#headers // * https://github.com/drizzle-team/drizzle-orm/issues/1857#issuecomment-2395724232 ctx.header('Access-Control-Allow-Private-Network', 'true'); }); app.use(cors()); app.onError((err, ctx) => { console.error(err); return ctx.json({ status: 'error', error: err.message, }); }); const relationalSchema: Record = { ...Object.fromEntries( Object.entries(drizzleSchema) .map(([schemaName, schema]) => { // have unique keys across schemas const mappedTableEntries = Object.entries(schema).map( ([tableName, table]) => { return [`__${schemaName}__.${tableName}`, table]; }, ); return mappedTableEntries; }) .flat(), ), ...relations, }; const relationsConfig = extractTablesRelationalConfig( relationalSchema, createTableRelationsHelpers, ); app.post('/', zValidator('json', schema), async (c) => { const body = c.req.valid('json'); const { type } = body; if (type === 'init') { const preparedDefaults = customDefaults.map((d) => ({ schema: d.schema, table: d.table, column: d.column, })); let relations: Relation[] = []; // Attempt to extract relations from the relational config. // An error may occur if the relations are ambiguous or misconfigured. try { relations = extractRelations(relationsConfig, casing); } catch (error) { console.warn( 'Failed to extract relations. This is likely due to ambiguous or misconfigured relations.', ); console.warn( 'Please check your schema and ensure that all relations are correctly defined.', ); console.warn( 'See: https://orm.drizzle.team/docs/relations#disambiguating-relations', ); console.warn('Error message:', (error as Error).message); } return c.json({ version: '6.2', dialect, driver, packageName, schemaFiles, customDefaults: preparedDefaults, relations, dbHash, databaseName, }); } if (type === 'proxy') { const result = await proxy({ ...body.data, params: body.data.params || [], }); return c.json(JSON.parse(jsonStringify(result))); } if (type === 'tproxy') { const result = await transactionProxy(body.data); return c.json(JSON.parse(jsonStringify(result))); } if (type === 'defaults') { const columns = body.data; const result = columns.map((column) => { const found = customDefaults.find((d) => { return ( d.schema === column.schema && d.table === column.table && d.column === column.column ); }); if (!found) { throw new Error( `Custom default not found for ${column.schema}.${column.table}.${column.column}`, ); } const value = found.func(); return { ...column, value, }; }); return c.json(JSON.parse(jsonStringify(result))); } throw new Error(`Unknown type: ${type}`); }); return { start: (params: Parameters[0]) => { serve( { fetch: app!.fetch, createServer: params.key ? createServer : undefined, hostname: params.host, port: params.port, serverOptions: { key: params.key, cert: params.cert, }, }, () => params.cb(null, `${params.host}:${params.port}`), ); }, }; }; ================================================ FILE: drizzle-kit/src/serializer/utils.ts ================================================ import { SQL } from 'drizzle-orm'; import { CasingCache, toCamelCase, toSnakeCase } from 'drizzle-orm/casing'; import { CasingType } from '../cli/validations/common'; export function getColumnCasing( column: { keyAsName: boolean; name: string | undefined }, casing: CasingType | undefined, ) { if (!column.name) return ''; return !column.keyAsName || casing === undefined ? column.name : casing === 'camelCase' ? toCamelCase(column.name) : toSnakeCase(column.name); } export const sqlToStr = (sql: SQL, casing: CasingType | undefined) => { return sql.toQuery({ escapeName: () => { throw new Error("we don't support params for `sql` default values"); }, escapeParam: () => { throw new Error("we don't support params for `sql` default values"); }, escapeString: () => { throw new Error("we don't support params for `sql` default values"); }, casing: new CasingCache(casing), }).sql; }; export const sqlToStrGenerated = (sql: SQL, casing: CasingType | undefined) => { return sql.toQuery({ escapeName: () => { throw new Error("we don't support params for `sql` default values"); }, escapeParam: () => { throw new Error("we don't support params for `sql` default values"); }, escapeString: () => { throw new Error("we don't support params for `sql` default values"); }, casing: new CasingCache(casing), }).sql; }; ================================================ FILE: drizzle-kit/src/simulator.ts ================================================ declare global { interface Array { exactlyOne(): T; } } Array.prototype.exactlyOne = function() { if (this.length !== 1) { return undefined; } return this[0]; }; interface TablesHandler { can(added: T[], removed: T[]): boolean; handle(added: T[], removed: T[]): { created: T[]; deleted: T[]; renamed: { from: T; to: T }[] }; } interface ColumnsHandler { can(tableName: string, added: T[], removed: T[]): boolean; handle( tableName: string, added: T[], removed: T[], ): { tableName: string; created: T[]; deleted: T[]; renamed: { from: T; to: T }[] }; } class DryRun implements TablesHandler { can(added: T[], removed: T[]): boolean { return added.length === 0 && removed.length === 0; } handle(added: T[], _: T[]): { created: T[]; deleted: T[]; renamed: { from: T; to: T }[] } { return { created: added, deleted: [], renamed: [] }; } } // class Fallback implements Handler { // can(_: Table[], __: Table[]): boolean { // return true // } // handle(added: Table[], _: Table[]): { created: Table[]; deleted: Table[]; renamed: { from: Table; to: Table; }[]; } { // return { created: added, deleted: , renamed: [] } // } // } class Case1 implements TablesHandler { can(_: T[], removed: T[]): boolean { return removed.length === 1 && removed[0].name === 'citiess'; } handle(added: T[], removed: T[]): { created: T[]; deleted: T[]; renamed: { from: T; to: T }[] } { return { created: added, deleted: removed, renamed: [] }; } } class Case2 implements TablesHandler { // authOtp, deleted, users -> authOtp renamed, cities added, deleted deleted can(_: T[], removed: T[]): boolean { return removed.length === 3 && removed[0].name === 'auth_otp'; } handle(added: T[], removed: T[]): { created: T[]; deleted: T[]; renamed: { from: T; to: T }[] } { return { created: added.slice(1), deleted: removed.slice(1), renamed: [{ from: removed[0], to: added[0] }] }; } } type Named = { name: string }; const handlers: TablesHandler[] = []; handlers.push(new Case1()); handlers.push(new Case2()); handlers.push(new DryRun()); export const resolveTables = (added: T[], removed: T[]) => { const handler = handlers.filter((it) => { return it.can(added, removed); }).exactlyOne(); if (!handler) { console.log('added', added.map((it) => it.name).join()); console.log('removed', removed.map((it) => it.name).join()); throw new Error('No handler'); } console.log(`Simluated by ${handler.constructor.name}`); return handler.handle(added, removed); }; class LehaColumnsHandler implements ColumnsHandler { can(tableName: string, _: T[], __: T[]): boolean { return tableName === 'users'; } handle( tableName: string, added: T[], removed: T[], ): { tableName: string; created: T[]; deleted: T[]; renamed: { from: T; to: T }[] } { return { tableName, created: [], deleted: [], renamed: [{ from: removed[0], to: added[0] }] }; } } class DryRunColumnsHandler implements ColumnsHandler { can(tableName: string, _: T[], __: T[]): boolean { return true; } handle( tableName: string, added: T[], removed: T[], ): { tableName: string; created: T[]; deleted: T[]; renamed: { from: T; to: T }[] } { return { tableName, created: added, deleted: removed, renamed: [] }; } } class V1V2AuthOtpColumnsHandler implements ColumnsHandler { can(tableName: string, _: T[], __: T[]): boolean { return tableName === 'auth_otp'; } handle( tableName: string, added: T[], removed: T[], ): { tableName: string; created: T[]; deleted: T[]; renamed: { from: T; to: T }[] } { const phonePrev = removed.filter((it) => it.name === 'phone')[0]; const phoneNew = added.filter((it) => it.name === 'phone1')[0]; const newAdded = added.filter((it) => it.name !== 'phone1'); const newRemoved = removed.filter((it) => it.name !== 'phone'); return { tableName, created: newAdded, deleted: newRemoved, renamed: [{ from: phonePrev, to: phoneNew }] }; } // handle(tableName:string, added: T[], _: T[]): { created: T[]; deleted: T[]; renamed: { from: T; to: T; }[]; } { // return { created: added, deleted: [], renamed: [] } // } } const columnsHandlers: ColumnsHandler[] = []; columnsHandlers.push(new V1V2AuthOtpColumnsHandler()); columnsHandlers.push(new LehaColumnsHandler()); columnsHandlers.push(new DryRunColumnsHandler()); export const resolveColumns = (tableName: string, added: T[], removed: T[]) => { const handler = columnsHandlers.filter((it) => { return it.can(tableName, added, removed); })[0]; if (!handler) { console.log('added', added.map((it) => it.name).join()); console.log('removed', removed.map((it) => it.name).join()); throw new Error('No columns handler for table: ' + tableName); } console.log(`${tableName} columns simluated by ${handler.constructor.name}`); return handler.handle(tableName, added, removed); }; ================================================ FILE: drizzle-kit/src/snapshotsDiffer.ts ================================================ import { any, array, boolean, enum as enumType, literal, never, object, record, string, TypeOf, union, ZodTypeAny, } from 'zod'; import { applyJsonDiff, diffColumns, diffIndPolicies, diffPolicies, diffSchemasOrTables } from './jsonDiffer'; import { fromJson } from './sqlgenerator'; import { _prepareAddColumns, _prepareDropColumns, _prepareSqliteAddColumns, JsonAddColumnStatement, JsonAlterCompositePK, JsonAlterIndPolicyStatement, JsonAlterMySqlViewStatement, JsonAlterPolicyStatement, JsonAlterTableSetSchema, JsonAlterUniqueConstraint, JsonAlterViewStatement, JsonCreateCheckConstraint, JsonCreateCompositePK, JsonCreateIndPolicyStatement, JsonCreateMySqlViewStatement, JsonCreatePgViewStatement, JsonCreatePolicyStatement, JsonCreateReferenceStatement, JsonCreateSqliteViewStatement, JsonCreateUniqueConstraint, JsonDeleteCheckConstraint, JsonDeleteCompositePK, JsonDeleteUniqueConstraint, JsonDisableRLSStatement, JsonDropColumnStatement, JsonDropIndPolicyStatement, JsonDropPolicyStatement, JsonDropViewStatement, JsonEnableRLSStatement, JsonIndRenamePolicyStatement, JsonReferenceStatement, JsonRenameColumnStatement, JsonRenamePolicyStatement, JsonRenameRoleStatement, JsonRenameViewStatement, JsonSqliteAddColumnStatement, JsonStatement, prepareAddCheckConstraint, prepareAddCompositePrimaryKeyMySql, prepareAddCompositePrimaryKeyPg, prepareAddCompositePrimaryKeySqlite, prepareAddUniqueConstraintPg as prepareAddUniqueConstraint, prepareAddValuesToEnumJson, prepareAlterColumnsMysql, prepareAlterCompositePrimaryKeyMySql, prepareAlterCompositePrimaryKeyPg, prepareAlterCompositePrimaryKeySqlite, prepareAlterIndPolicyJson, prepareAlterPolicyJson, prepareAlterReferencesJson, prepareAlterRoleJson, prepareAlterSequenceJson, prepareCreateEnumJson, prepareCreateIndexesJson, prepareCreateIndPolicyJsons, prepareCreatePolicyJsons, prepareCreateReferencesJson, prepareCreateRoleJson, prepareCreateSchemasJson, prepareCreateSequenceJson, prepareDeleteCheckConstraint, prepareDeleteCompositePrimaryKeyMySql, prepareDeleteCompositePrimaryKeyPg, prepareDeleteCompositePrimaryKeySqlite, prepareDeleteSchemasJson as prepareDropSchemasJson, prepareDeleteUniqueConstraintPg as prepareDeleteUniqueConstraint, prepareDropEnumJson, prepareDropEnumValues, prepareDropIndexesJson, prepareDropIndPolicyJsons, prepareDropPolicyJsons, prepareDropReferencesJson, prepareDropRoleJson, prepareDropSequenceJson, prepareDropTableJson, prepareDropViewJson, prepareLibSQLCreateReferencesJson, prepareLibSQLDropReferencesJson, prepareMoveEnumJson, prepareMoveSequenceJson, prepareMySqlAlterView, prepareMySqlCreateTableJson, prepareMySqlCreateViewJson, preparePgAlterColumns, preparePgAlterViewAddWithOptionJson, preparePgAlterViewAlterSchemaJson, preparePgAlterViewAlterTablespaceJson, preparePgAlterViewAlterUsingJson, preparePgAlterViewDropWithOptionJson, preparePgCreateIndexesJson, preparePgCreateTableJson, preparePgCreateViewJson, prepareRenameColumns, prepareRenameEnumJson, prepareRenameIndPolicyJsons, prepareRenamePolicyJsons, prepareRenameRoleJson, prepareRenameSchemasJson, prepareRenameSequenceJson, prepareRenameTableJson, prepareRenameViewJson, prepareSingleStoreCreateTableJson, prepareSqliteAlterColumns, prepareSQLiteCreateTable, prepareSqliteCreateViewJson, } from './jsonStatements'; import { Named, NamedWithSchema } from './cli/commands/migrate'; import { mapEntries, mapKeys, mapValues } from './global'; import { MySqlSchema, MySqlSchemaSquashed, MySqlSquasher, ViewSquashed } from './serializer/mysqlSchema'; import { mergedViewWithOption, PgSchema, PgSchemaSquashed, PgSquasher, Policy, policy, policySquashed, Role, roleSchema, sequenceSquashed, View, } from './serializer/pgSchema'; import { SingleStoreSchema, SingleStoreSchemaSquashed, SingleStoreSquasher } from './serializer/singlestoreSchema'; import { SQLiteSchema, SQLiteSchemaSquashed, SQLiteSquasher, View as SqliteView } from './serializer/sqliteSchema'; import { libSQLCombineStatements, singleStoreCombineStatements, sqliteCombineStatements } from './statementCombiner'; import { copy, prepareMigrationMeta } from './utils'; const makeChanged = (schema: T) => { return object({ type: enumType(['changed']), old: schema, new: schema, }); }; const makeSelfOrChanged = (schema: T) => { return union([ schema, object({ type: enumType(['changed']), old: schema, new: schema, }), ]); }; export const makePatched = (schema: T) => { return union([ object({ type: literal('added'), value: schema, }), object({ type: literal('deleted'), value: schema, }), object({ type: literal('changed'), old: schema, new: schema, }), ]); }; export const makeSelfOrPatched = (schema: T) => { return union([ object({ type: literal('none'), value: schema, }), object({ type: literal('added'), value: schema, }), object({ type: literal('deleted'), value: schema, }), object({ type: literal('changed'), old: schema, new: schema, }), ]); }; const columnSchema = object({ name: string(), type: string(), typeSchema: string().optional(), primaryKey: boolean().optional(), default: any().optional(), notNull: boolean().optional(), // should it be optional? should if be here? autoincrement: boolean().optional(), onUpdate: boolean().optional(), isUnique: any().optional(), uniqueName: string().optional(), nullsNotDistinct: boolean().optional(), generated: object({ as: string(), type: enumType(['stored', 'virtual']).default('stored'), }).optional(), identity: string().optional(), }).strict(); const alteredColumnSchema = object({ name: makeSelfOrChanged(string()), type: makeChanged(string()).optional(), default: makePatched(any()).optional(), primaryKey: makePatched(boolean()).optional(), notNull: makePatched(boolean()).optional(), typeSchema: makePatched(string()).optional(), onUpdate: makePatched(boolean()).optional(), autoincrement: makePatched(boolean()).optional(), generated: makePatched( object({ as: string(), type: enumType(['stored', 'virtual']).default('stored'), }), ).optional(), identity: makePatched(string()).optional(), }).strict(); const enumSchema = object({ name: string(), schema: string(), values: array(string()), }).strict(); const changedEnumSchema = object({ name: string(), schema: string(), addedValues: object({ before: string(), value: string(), }).array(), deletedValues: array(string()), }).strict(); const tableScheme = object({ name: string(), schema: string().default(''), columns: record(string(), columnSchema), indexes: record(string(), string()), foreignKeys: record(string(), string()), compositePrimaryKeys: record(string(), string()).default({}), uniqueConstraints: record(string(), string()).default({}), policies: record(string(), string()).default({}), checkConstraints: record(string(), string()).default({}), isRLSEnabled: boolean().default(false), }).strict(); export const alteredTableScheme = object({ name: string(), schema: string(), altered: alteredColumnSchema.array(), addedIndexes: record(string(), string()), deletedIndexes: record(string(), string()), alteredIndexes: record( string(), object({ __new: string(), __old: string(), }).strict(), ), addedForeignKeys: record(string(), string()), deletedForeignKeys: record(string(), string()), alteredForeignKeys: record( string(), object({ __new: string(), __old: string(), }).strict(), ), addedCompositePKs: record(string(), string()), deletedCompositePKs: record(string(), string()), alteredCompositePKs: record( string(), object({ __new: string(), __old: string(), }), ), addedUniqueConstraints: record(string(), string()), deletedUniqueConstraints: record(string(), string()), alteredUniqueConstraints: record( string(), object({ __new: string(), __old: string(), }), ), addedPolicies: record(string(), string()), deletedPolicies: record(string(), string()), alteredPolicies: record( string(), object({ __new: string(), __old: string(), }), ), addedCheckConstraints: record( string(), string(), ), deletedCheckConstraints: record( string(), string(), ), alteredCheckConstraints: record( string(), object({ __new: string(), __old: string(), }), ), }).strict(); const alteredViewCommon = object({ name: string(), alteredDefinition: object({ __old: string(), __new: string(), }).strict().optional(), alteredExisting: object({ __old: boolean(), __new: boolean(), }).strict().optional(), }); export const alteredPgViewSchema = alteredViewCommon.merge( object({ schema: string(), deletedWithOption: mergedViewWithOption.optional(), addedWithOption: mergedViewWithOption.optional(), addedWith: mergedViewWithOption.optional(), deletedWith: mergedViewWithOption.optional(), alteredWith: mergedViewWithOption.optional(), alteredSchema: object({ __old: string(), __new: string(), }).strict().optional(), alteredTablespace: object({ __old: string(), __new: string(), }).strict().optional(), alteredUsing: object({ __old: string(), __new: string(), }).strict().optional(), }).strict(), ); const alteredMySqlViewSchema = alteredViewCommon.merge( object({ alteredMeta: object({ __old: string(), __new: string(), }).strict().optional(), }).strict(), ); export const diffResultScheme = object({ alteredTablesWithColumns: alteredTableScheme.array(), alteredEnums: changedEnumSchema.array(), alteredSequences: sequenceSquashed.array(), alteredRoles: roleSchema.array(), alteredPolicies: policySquashed.array(), alteredViews: alteredPgViewSchema.array(), }).strict(); export const diffResultSchemeMysql = object({ alteredTablesWithColumns: alteredTableScheme.array(), alteredEnums: never().array(), alteredViews: alteredMySqlViewSchema.array(), }); export const diffResultSchemeSingleStore = object({ alteredTablesWithColumns: alteredTableScheme.array(), alteredEnums: never().array(), }); export const diffResultSchemeSQLite = object({ alteredTablesWithColumns: alteredTableScheme.array(), alteredEnums: never().array(), alteredViews: alteredViewCommon.array(), }); export type Column = TypeOf; export type AlteredColumn = TypeOf; export type Enum = TypeOf; export type Sequence = TypeOf; export type Table = TypeOf; export type AlteredTable = TypeOf; export type DiffResult = TypeOf; export type DiffResultMysql = TypeOf; export type DiffResultSingleStore = TypeOf; export type DiffResultSQLite = TypeOf; export interface ResolverInput { created: T[]; deleted: T[]; } export interface ResolverOutput { created: T[]; renamed: { from: T; to: T }[]; deleted: T[]; } export interface ResolverOutputWithMoved { created: T[]; moved: { name: string; schemaFrom: string; schemaTo: string }[]; renamed: { from: T; to: T }[]; deleted: T[]; } export interface ColumnsResolverInput { tableName: string; schema: string; created: T[]; deleted: T[]; } export interface TablePolicyResolverInput { tableName: string; schema: string; created: T[]; deleted: T[]; } export interface TablePolicyResolverOutput { tableName: string; schema: string; created: T[]; renamed: { from: T; to: T }[]; deleted: T[]; } export interface PolicyResolverInput { created: T[]; deleted: T[]; } export interface PolicyResolverOutput { created: T[]; renamed: { from: T; to: T }[]; deleted: T[]; } export interface RolesResolverInput { created: T[]; deleted: T[]; } export interface RolesResolverOutput { created: T[]; renamed: { from: T; to: T }[]; deleted: T[]; } export interface ColumnsResolverOutput { tableName: string; schema: string; created: T[]; renamed: { from: T; to: T }[]; deleted: T[]; } const schemaChangeFor = ( table: NamedWithSchema, renamedSchemas: { from: Named; to: Named }[], ) => { for (let ren of renamedSchemas) { if (table.schema === ren.from.name) { return { key: `${ren.to.name}.${table.name}`, schema: ren.to.name }; } } return { key: `${table.schema || 'public'}.${table.name}`, schema: table.schema, }; }; const nameChangeFor = (table: Named, renamed: { from: Named; to: Named }[]) => { for (let ren of renamed) { if (table.name === ren.from.name) { return { name: ren.to.name }; } } return { name: table.name, }; }; const nameSchemaChangeFor = ( table: NamedWithSchema, renamedTables: { from: NamedWithSchema; to: NamedWithSchema }[], ) => { for (let ren of renamedTables) { if (table.name === ren.from.name && table.schema === ren.from.schema) { return { key: `${ren.to.schema || 'public'}.${ren.to.name}`, name: ren.to.name, schema: ren.to.schema, }; } } return { key: `${table.schema || 'public'}.${table.name}`, name: table.name, schema: table.schema, }; }; const columnChangeFor = ( column: string, renamedColumns: { from: Named; to: Named }[], ) => { for (let ren of renamedColumns) { if (column === ren.from.name) { return ren.to.name; } } return column; }; // resolve roles same as enums // create new json statements // sql generators // tests everything! export const applyPgSnapshotsDiff = async ( json1: PgSchemaSquashed, json2: PgSchemaSquashed, schemasResolver: ( input: ResolverInput, ) => Promise>, enumsResolver: ( input: ResolverInput, ) => Promise>, sequencesResolver: ( input: ResolverInput, ) => Promise>, policyResolver: ( input: TablePolicyResolverInput, ) => Promise>, indPolicyResolver: ( input: PolicyResolverInput, ) => Promise>, roleResolver: ( input: RolesResolverInput, ) => Promise>, tablesResolver: ( input: ResolverInput
, ) => Promise>, columnsResolver: ( input: ColumnsResolverInput, ) => Promise>, viewsResolver: ( input: ResolverInput, ) => Promise>, prevFull: PgSchema, curFull: PgSchema, action?: 'push' | undefined, ): Promise<{ statements: JsonStatement[]; sqlStatements: string[]; _meta: | { schemas: {}; tables: {}; columns: {}; } | undefined; }> => { const schemasDiff = diffSchemasOrTables(json1.schemas, json2.schemas); const { created: createdSchemas, deleted: deletedSchemas, renamed: renamedSchemas, } = await schemasResolver({ created: schemasDiff.added.map((it) => ({ name: it })), deleted: schemasDiff.deleted.map((it) => ({ name: it })), }); const schemasPatchedSnap1 = copy(json1); schemasPatchedSnap1.tables = mapEntries( schemasPatchedSnap1.tables, (_, it) => { const { key, schema } = schemaChangeFor(it, renamedSchemas); it.schema = schema; return [key, it]; }, ); schemasPatchedSnap1.enums = mapEntries(schemasPatchedSnap1.enums, (_, it) => { const { key, schema } = schemaChangeFor(it, renamedSchemas); it.schema = schema; return [key, it]; }); const enumsDiff = diffSchemasOrTables(schemasPatchedSnap1.enums, json2.enums); const { created: createdEnums, deleted: deletedEnums, renamed: renamedEnums, moved: movedEnums, } = await enumsResolver({ created: enumsDiff.added, deleted: enumsDiff.deleted, }); schemasPatchedSnap1.enums = mapEntries(schemasPatchedSnap1.enums, (_, it) => { const { key, name, schema } = nameSchemaChangeFor(it, renamedEnums); it.name = name; it.schema = schema; return [key, it]; }); const columnTypesChangeMap = renamedEnums.reduce( (acc, it) => { acc[`${it.from.schema}.${it.from.name}`] = { nameFrom: it.from.name, nameTo: it.to.name, schemaFrom: it.from.schema, schemaTo: it.to.schema, }; return acc; }, {} as Record< string, { nameFrom: string; nameTo: string; schemaFrom: string; schemaTo: string; } >, ); const columnTypesMovesMap = movedEnums.reduce( (acc, it) => { acc[`${it.schemaFrom}.${it.name}`] = { nameFrom: it.name, nameTo: it.name, schemaFrom: it.schemaFrom, schemaTo: it.schemaTo, }; return acc; }, {} as Record< string, { nameFrom: string; nameTo: string; schemaFrom: string; schemaTo: string; } >, ); schemasPatchedSnap1.tables = mapEntries( schemasPatchedSnap1.tables, (tableKey, tableValue) => { const patchedColumns = mapValues(tableValue.columns, (column) => { const key = `${column.typeSchema || 'public'}.${column.type}`; const change = columnTypesChangeMap[key] || columnTypesMovesMap[key]; if (change) { column.type = change.nameTo; column.typeSchema = change.schemaTo; } return column; }); tableValue.columns = patchedColumns; return [tableKey, tableValue]; }, ); schemasPatchedSnap1.sequences = mapEntries( schemasPatchedSnap1.sequences, (_, it) => { const { key, schema } = schemaChangeFor(it, renamedSchemas); it.schema = schema; return [key, it]; }, ); const sequencesDiff = diffSchemasOrTables( schemasPatchedSnap1.sequences, json2.sequences, ); const { created: createdSequences, deleted: deletedSequences, renamed: renamedSequences, moved: movedSequences, } = await sequencesResolver({ created: sequencesDiff.added, deleted: sequencesDiff.deleted, }); schemasPatchedSnap1.sequences = mapEntries( schemasPatchedSnap1.sequences, (_, it) => { const { key, name, schema } = nameSchemaChangeFor(it, renamedSequences); it.name = name; it.schema = schema; return [key, it]; }, ); const sequencesChangeMap = renamedSequences.reduce( (acc, it) => { acc[`${it.from.schema}.${it.from.name}`] = { nameFrom: it.from.name, nameTo: it.to.name, schemaFrom: it.from.schema, schemaTo: it.to.schema, }; return acc; }, {} as Record< string, { nameFrom: string; nameTo: string; schemaFrom: string; schemaTo: string; } >, ); const sequencesMovesMap = movedSequences.reduce( (acc, it) => { acc[`${it.schemaFrom}.${it.name}`] = { nameFrom: it.name, nameTo: it.name, schemaFrom: it.schemaFrom, schemaTo: it.schemaTo, }; return acc; }, {} as Record< string, { nameFrom: string; nameTo: string; schemaFrom: string; schemaTo: string; } >, ); schemasPatchedSnap1.tables = mapEntries( schemasPatchedSnap1.tables, (tableKey, tableValue) => { const patchedColumns = mapValues(tableValue.columns, (column) => { const key = `${column.typeSchema || 'public'}.${column.type}`; const change = sequencesChangeMap[key] || sequencesMovesMap[key]; if (change) { column.type = change.nameTo; column.typeSchema = change.schemaTo; } return column; }); tableValue.columns = patchedColumns; return [tableKey, tableValue]; }, ); const rolesDiff = diffSchemasOrTables( schemasPatchedSnap1.roles, json2.roles, ); const { created: createdRoles, deleted: deletedRoles, renamed: renamedRoles, } = await roleResolver({ created: rolesDiff.added, deleted: rolesDiff.deleted, }); schemasPatchedSnap1.roles = mapEntries( schemasPatchedSnap1.roles, (_, it) => { const { name } = nameChangeFor(it, renamedRoles); it.name = name; return [name, it]; }, ); const rolesChangeMap = renamedRoles.reduce( (acc, it) => { acc[it.from.name] = { nameFrom: it.from.name, nameTo: it.to.name, }; return acc; }, {} as Record< string, { nameFrom: string; nameTo: string; } >, ); schemasPatchedSnap1.roles = mapEntries( schemasPatchedSnap1.roles, (roleKey, roleValue) => { const key = roleKey; const change = rolesChangeMap[key]; if (change) { roleValue.name = change.nameTo; } return [roleKey, roleValue]; }, ); const tablesDiff = diffSchemasOrTables( schemasPatchedSnap1.tables as Record, json2.tables, ); const { created: createdTables, deleted: deletedTables, moved: movedTables, renamed: renamedTables, // renamed or moved } = await tablesResolver({ created: tablesDiff.added, deleted: tablesDiff.deleted, }); const tablesPatchedSnap1 = copy(schemasPatchedSnap1); tablesPatchedSnap1.tables = mapEntries(tablesPatchedSnap1.tables, (_, it) => { const { key, name, schema } = nameSchemaChangeFor(it, renamedTables); it.name = name; it.schema = schema; return [key, it]; }); const res = diffColumns(tablesPatchedSnap1.tables, json2.tables); const columnRenames = [] as { table: string; schema: string; renames: { from: Column; to: Column }[]; }[]; const columnCreates = [] as { table: string; schema: string; columns: Column[]; }[]; const columnDeletes = [] as { table: string; schema: string; columns: Column[]; }[]; for (let entry of Object.values(res)) { const { renamed, created, deleted } = await columnsResolver({ tableName: entry.name, schema: entry.schema, deleted: entry.columns.deleted, created: entry.columns.added, }); if (created.length > 0) { columnCreates.push({ table: entry.name, schema: entry.schema, columns: created, }); } if (deleted.length > 0) { columnDeletes.push({ table: entry.name, schema: entry.schema, columns: deleted, }); } if (renamed.length > 0) { columnRenames.push({ table: entry.name, schema: entry.schema, renames: renamed, }); } } const columnRenamesDict = columnRenames.reduce( (acc, it) => { acc[`${it.schema || 'public'}.${it.table}`] = it.renames; return acc; }, {} as Record< string, { from: Named; to: Named; }[] >, ); const columnsPatchedSnap1 = copy(tablesPatchedSnap1); columnsPatchedSnap1.tables = mapEntries( columnsPatchedSnap1.tables, (tableKey, tableValue) => { const patchedColumns = mapKeys( tableValue.columns, (columnKey, column) => { const rens = columnRenamesDict[ `${tableValue.schema || 'public'}.${tableValue.name}` ] || []; const newName = columnChangeFor(columnKey, rens); column.name = newName; return newName; }, ); tableValue.columns = patchedColumns; return [tableKey, tableValue]; }, ); //// Policies const policyRes = diffPolicies(tablesPatchedSnap1.tables, json2.tables); const policyRenames = [] as { table: string; schema: string; renames: { from: Policy; to: Policy }[]; }[]; const policyCreates = [] as { table: string; schema: string; columns: Policy[]; }[]; const policyDeletes = [] as { table: string; schema: string; columns: Policy[]; }[]; for (let entry of Object.values(policyRes)) { const { renamed, created, deleted } = await policyResolver({ tableName: entry.name, schema: entry.schema, deleted: entry.policies.deleted.map( action === 'push' ? PgSquasher.unsquashPolicyPush : PgSquasher.unsquashPolicy, ), created: entry.policies.added.map(action === 'push' ? PgSquasher.unsquashPolicyPush : PgSquasher.unsquashPolicy), }); if (created.length > 0) { policyCreates.push({ table: entry.name, schema: entry.schema, columns: created, }); } if (deleted.length > 0) { policyDeletes.push({ table: entry.name, schema: entry.schema, columns: deleted, }); } if (renamed.length > 0) { policyRenames.push({ table: entry.name, schema: entry.schema, renames: renamed, }); } } const policyRenamesDict = columnRenames.reduce( (acc, it) => { acc[`${it.schema || 'public'}.${it.table}`] = it.renames; return acc; }, {} as Record< string, { from: Named; to: Named; }[] >, ); const policyPatchedSnap1 = copy(tablesPatchedSnap1); policyPatchedSnap1.tables = mapEntries( policyPatchedSnap1.tables, (tableKey, tableValue) => { const patchedPolicies = mapKeys( tableValue.policies, (policyKey, policy) => { const rens = policyRenamesDict[ `${tableValue.schema || 'public'}.${tableValue.name}` ] || []; const newName = columnChangeFor(policyKey, rens); const unsquashedPolicy = action === 'push' ? PgSquasher.unsquashPolicyPush(policy) : PgSquasher.unsquashPolicy(policy); unsquashedPolicy.name = newName; policy = PgSquasher.squashPolicy(unsquashedPolicy); return newName; }, ); tableValue.policies = patchedPolicies; return [tableKey, tableValue]; }, ); //// Individual policies const indPolicyRes = diffIndPolicies(policyPatchedSnap1.policies, json2.policies); const indPolicyCreates = [] as { policies: Policy[]; }[]; const indPolicyDeletes = [] as { policies: Policy[]; }[]; const { renamed: indPolicyRenames, created, deleted } = await indPolicyResolver({ deleted: indPolicyRes.deleted.map((t) => action === 'push' ? PgSquasher.unsquashPolicyPush(t.values) : PgSquasher.unsquashPolicy(t.values) ), created: indPolicyRes.added.map((t) => action === 'push' ? PgSquasher.unsquashPolicyPush(t.values) : PgSquasher.unsquashPolicy(t.values) ), }); if (created.length > 0) { indPolicyCreates.push({ policies: created, }); } if (deleted.length > 0) { indPolicyDeletes.push({ policies: deleted, }); } const indPolicyRenamesDict = indPolicyRenames.reduce( (acc, it) => { acc[it.from.name] = { nameFrom: it.from.name, nameTo: it.to.name, }; return acc; }, {} as Record< string, { nameFrom: string; nameTo: string; } >, ); const indPolicyPatchedSnap1 = copy(policyPatchedSnap1); indPolicyPatchedSnap1.policies = mapEntries( indPolicyPatchedSnap1.policies, (policyKey, policyValue) => { const key = policyKey; const change = indPolicyRenamesDict[key]; if (change) { policyValue.name = change.nameTo; } return [policyKey, policyValue]; }, ); //// const viewsDiff = diffSchemasOrTables(indPolicyPatchedSnap1.views, json2.views); const { created: createdViews, deleted: deletedViews, renamed: renamedViews, moved: movedViews, } = await viewsResolver({ created: viewsDiff.added, deleted: viewsDiff.deleted, }); const renamesViewDic: Record = {}; renamedViews.forEach((it) => { renamesViewDic[`${it.from.schema}.${it.from.name}`] = { to: it.to.name, from: it.from.name }; }); const movedViewDic: Record = {}; movedViews.forEach((it) => { movedViewDic[`${it.schemaFrom}.${it.name}`] = { to: it.schemaTo, from: it.schemaFrom }; }); const viewsPatchedSnap1 = copy(policyPatchedSnap1); viewsPatchedSnap1.views = mapEntries( viewsPatchedSnap1.views, (viewKey, viewValue) => { const rename = renamesViewDic[`${viewValue.schema}.${viewValue.name}`]; const moved = movedViewDic[`${viewValue.schema}.${viewValue.name}`]; if (rename) { viewValue.name = rename.to; viewKey = `${viewValue.schema}.${viewValue.name}`; } if (moved) viewKey = `${moved.to}.${viewValue.name}`; return [viewKey, viewValue]; }, ); const diffResult = applyJsonDiff(viewsPatchedSnap1, json2); const typedResult: DiffResult = diffResultScheme.parse(diffResult); const jsonStatements: JsonStatement[] = []; const jsonCreateIndexesForCreatedTables = createdTables .map((it) => { return preparePgCreateIndexesJson( it.name, it.schema, it.indexes, curFull, action, ); }) .flat(); const jsonDropTables = deletedTables.map((it) => { return prepareDropTableJson(it); }); const jsonRenameTables = renamedTables.map((it) => { return prepareRenameTableJson(it.from, it.to); }); const alteredTables = typedResult.alteredTablesWithColumns; const jsonRenameColumnsStatements: JsonRenameColumnStatement[] = []; const jsonDropColumnsStatemets: JsonDropColumnStatement[] = []; const jsonAddColumnsStatemets: JsonAddColumnStatement[] = []; for (let it of columnRenames) { jsonRenameColumnsStatements.push( ...prepareRenameColumns(it.table, it.schema, it.renames), ); } for (let it of columnDeletes) { jsonDropColumnsStatemets.push( ..._prepareDropColumns(it.table, it.schema, it.columns), ); } for (let it of columnCreates) { jsonAddColumnsStatemets.push( ..._prepareAddColumns(it.table, it.schema, it.columns), ); } const jsonAddedCompositePKs: JsonCreateCompositePK[] = []; const jsonDeletedCompositePKs: JsonDeleteCompositePK[] = []; const jsonAlteredCompositePKs: JsonAlterCompositePK[] = []; const jsonAddedUniqueConstraints: JsonCreateUniqueConstraint[] = []; const jsonDeletedUniqueConstraints: JsonDeleteUniqueConstraint[] = []; const jsonAlteredUniqueConstraints: JsonAlterUniqueConstraint[] = []; const jsonSetTableSchemas: JsonAlterTableSetSchema[] = []; if (movedTables) { for (let it of movedTables) { jsonSetTableSchemas.push({ type: 'alter_table_set_schema', tableName: it.name, schemaFrom: it.schemaFrom || 'public', schemaTo: it.schemaTo || 'public', }); } } const jsonDeletedCheckConstraints: JsonDeleteCheckConstraint[] = []; const jsonCreatedCheckConstraints: JsonCreateCheckConstraint[] = []; for (let it of alteredTables) { // This part is needed to make sure that same columns in a table are not triggered for change // there is a case where orm and kit are responsible for pk name generation and one of them is not sorting name // We double-check that pk with same set of columns are both in added and deleted diffs let addedColumns: { name: string; columns: string[] } | undefined; for (const addedPkName of Object.keys(it.addedCompositePKs)) { const addedPkColumns = it.addedCompositePKs[addedPkName]; addedColumns = PgSquasher.unsquashPK(addedPkColumns); } let deletedColumns: { name: string; columns: string[] } | undefined; for (const deletedPkName of Object.keys(it.deletedCompositePKs)) { const deletedPkColumns = it.deletedCompositePKs[deletedPkName]; deletedColumns = PgSquasher.unsquashPK(deletedPkColumns); } // Don't need to sort, but need to add tests for it // addedColumns.sort(); // deletedColumns.sort(); const doPerformDeleteAndCreate = JSON.stringify(addedColumns ?? {}) !== JSON.stringify(deletedColumns ?? {}); let addedCompositePKs: JsonCreateCompositePK[] = []; let deletedCompositePKs: JsonDeleteCompositePK[] = []; let alteredCompositePKs: JsonAlterCompositePK[] = []; if (doPerformDeleteAndCreate) { addedCompositePKs = prepareAddCompositePrimaryKeyPg( it.name, it.schema, it.addedCompositePKs, curFull as PgSchema, ); deletedCompositePKs = prepareDeleteCompositePrimaryKeyPg( it.name, it.schema, it.deletedCompositePKs, prevFull as PgSchema, ); } alteredCompositePKs = prepareAlterCompositePrimaryKeyPg( it.name, it.schema, it.alteredCompositePKs, prevFull as PgSchema, curFull as PgSchema, ); // add logic for unique constraints let addedUniqueConstraints: JsonCreateUniqueConstraint[] = []; let deletedUniqueConstraints: JsonDeleteUniqueConstraint[] = []; let alteredUniqueConstraints: JsonAlterUniqueConstraint[] = []; let createCheckConstraints: JsonCreateCheckConstraint[] = []; let deleteCheckConstraints: JsonDeleteCheckConstraint[] = []; addedUniqueConstraints = prepareAddUniqueConstraint( it.name, it.schema, it.addedUniqueConstraints, ); deletedUniqueConstraints = prepareDeleteUniqueConstraint( it.name, it.schema, it.deletedUniqueConstraints, ); if (it.alteredUniqueConstraints) { const added: Record = {}; const deleted: Record = {}; for (const k of Object.keys(it.alteredUniqueConstraints)) { added[k] = it.alteredUniqueConstraints[k].__new; deleted[k] = it.alteredUniqueConstraints[k].__old; } addedUniqueConstraints.push( ...prepareAddUniqueConstraint(it.name, it.schema, added), ); deletedUniqueConstraints.push( ...prepareDeleteUniqueConstraint(it.name, it.schema, deleted), ); } createCheckConstraints = prepareAddCheckConstraint(it.name, it.schema, it.addedCheckConstraints); deleteCheckConstraints = prepareDeleteCheckConstraint( it.name, it.schema, it.deletedCheckConstraints, ); if (it.alteredCheckConstraints && action !== 'push') { const added: Record = {}; const deleted: Record = {}; for (const k of Object.keys(it.alteredCheckConstraints)) { added[k] = it.alteredCheckConstraints[k].__new; deleted[k] = it.alteredCheckConstraints[k].__old; } createCheckConstraints.push(...prepareAddCheckConstraint(it.name, it.schema, added)); deleteCheckConstraints.push(...prepareDeleteCheckConstraint(it.name, it.schema, deleted)); } jsonCreatedCheckConstraints.push(...createCheckConstraints); jsonDeletedCheckConstraints.push(...deleteCheckConstraints); jsonAddedCompositePKs.push(...addedCompositePKs); jsonDeletedCompositePKs.push(...deletedCompositePKs); jsonAlteredCompositePKs.push(...alteredCompositePKs); jsonAddedUniqueConstraints.push(...addedUniqueConstraints); jsonDeletedUniqueConstraints.push(...deletedUniqueConstraints); jsonAlteredUniqueConstraints.push(...alteredUniqueConstraints); } const rColumns = jsonRenameColumnsStatements.map((it) => { const tableName = it.tableName; const schema = it.schema; return { from: { schema, table: tableName, column: it.oldColumnName }, to: { schema, table: tableName, column: it.newColumnName }, }; }); const jsonTableAlternations = alteredTables .map((it) => { return preparePgAlterColumns( it.name, it.schema, it.altered, json2, json1, action, ); }) .flat(); const jsonCreateIndexesFoAlteredTables = alteredTables .map((it) => { return preparePgCreateIndexesJson( it.name, it.schema, it.addedIndexes || {}, curFull, action, ); }) .flat(); const jsonDropIndexesForAllAlteredTables = alteredTables .map((it) => { return prepareDropIndexesJson( it.name, it.schema, it.deletedIndexes || {}, ); }) .flat(); const jsonCreatePoliciesStatements: JsonCreatePolicyStatement[] = []; const jsonDropPoliciesStatements: JsonDropPolicyStatement[] = []; const jsonAlterPoliciesStatements: JsonAlterPolicyStatement[] = []; const jsonRenamePoliciesStatements: JsonRenamePolicyStatement[] = []; const jsonRenameIndPoliciesStatements: JsonIndRenamePolicyStatement[] = []; const jsonCreateIndPoliciesStatements: JsonCreateIndPolicyStatement[] = []; const jsonDropIndPoliciesStatements: JsonDropIndPolicyStatement[] = []; const jsonAlterIndPoliciesStatements: JsonAlterIndPolicyStatement[] = []; const jsonEnableRLSStatements: JsonEnableRLSStatement[] = []; const jsonDisableRLSStatements: JsonDisableRLSStatement[] = []; for (let it of indPolicyRenames) { jsonRenameIndPoliciesStatements.push( ...prepareRenameIndPolicyJsons([it]), ); } for (const it of indPolicyCreates) { jsonCreateIndPoliciesStatements.push( ...prepareCreateIndPolicyJsons( it.policies, ), ); } for (const it of indPolicyDeletes) { jsonDropIndPoliciesStatements.push( ...prepareDropIndPolicyJsons( it.policies, ), ); } typedResult.alteredPolicies.forEach(({ values }) => { // return prepareAlterIndPolicyJson(json1.policies[it.name], json2.policies[it.name]); const policy = action === 'push' ? PgSquasher.unsquashPolicyPush(values) : PgSquasher.unsquashPolicy(values); const newPolicy = action === 'push' ? PgSquasher.unsquashPolicyPush(json2.policies[policy.name].values) : PgSquasher.unsquashPolicy(json2.policies[policy.name].values); const oldPolicy = action === 'push' ? PgSquasher.unsquashPolicyPush(json2.policies[policy.name].values) : PgSquasher.unsquashPolicy(json1.policies[policy.name].values); if (newPolicy.as !== oldPolicy.as) { jsonDropIndPoliciesStatements.push( ...prepareDropIndPolicyJsons( [oldPolicy], ), ); jsonCreateIndPoliciesStatements.push( ...prepareCreateIndPolicyJsons( [newPolicy], ), ); return; } if (newPolicy.for !== oldPolicy.for) { jsonDropIndPoliciesStatements.push( ...prepareDropIndPolicyJsons( [oldPolicy], ), ); jsonCreateIndPoliciesStatements.push( ...prepareCreateIndPolicyJsons( [newPolicy], ), ); return; } // alter jsonAlterIndPoliciesStatements.push( prepareAlterIndPolicyJson( oldPolicy, newPolicy, ), ); }); for (let it of policyRenames) { jsonRenamePoliciesStatements.push( ...prepareRenamePolicyJsons(it.table, it.schema, it.renames), ); } for (const it of policyCreates) { jsonCreatePoliciesStatements.push( ...prepareCreatePolicyJsons( it.table, it.schema, it.columns, ), ); } for (const it of policyDeletes) { jsonDropPoliciesStatements.push( ...prepareDropPolicyJsons( it.table, it.schema, it.columns, ), ); } alteredTables.forEach((it) => { // handle policies Object.keys(it.alteredPolicies).forEach((policyName: string) => { const newPolicy = action === 'push' ? PgSquasher.unsquashPolicyPush(it.alteredPolicies[policyName].__new) : PgSquasher.unsquashPolicy(it.alteredPolicies[policyName].__new); const oldPolicy = action === 'push' ? PgSquasher.unsquashPolicyPush(it.alteredPolicies[policyName].__old) : PgSquasher.unsquashPolicy(it.alteredPolicies[policyName].__old); if (newPolicy.as !== oldPolicy.as) { jsonDropPoliciesStatements.push( ...prepareDropPolicyJsons( it.name, it.schema, [oldPolicy], ), ); jsonCreatePoliciesStatements.push( ...prepareCreatePolicyJsons( it.name, it.schema, [newPolicy], ), ); return; } if (newPolicy.for !== oldPolicy.for) { jsonDropPoliciesStatements.push( ...prepareDropPolicyJsons( it.name, it.schema, [oldPolicy], ), ); jsonCreatePoliciesStatements.push( ...prepareCreatePolicyJsons( it.name, it.schema, [newPolicy], ), ); return; } // alter jsonAlterPoliciesStatements.push( prepareAlterPolicyJson( it.name, it.schema, it.alteredPolicies[policyName].__old, it.alteredPolicies[policyName].__new, ), ); }); // Handle enabling and disabling RLS for (const table of Object.values(json2.tables)) { const policiesInCurrentState = Object.keys(table.policies); const tableInPreviousState = columnsPatchedSnap1.tables[`${table.schema === '' ? 'public' : table.schema}.${table.name}`]; const policiesInPreviousState = tableInPreviousState ? Object.keys(tableInPreviousState.policies) : []; // const indPoliciesInCurrentState = Object.keys(table.policies); // const indPoliciesInPreviousState = Object.keys(columnsPatchedSnap1.policies); if ( (policiesInPreviousState.length === 0 && policiesInCurrentState.length > 0) && !table.isRLSEnabled ) { jsonEnableRLSStatements.push({ type: 'enable_rls', tableName: table.name, schema: table.schema }); } if ( (policiesInPreviousState.length > 0 && policiesInCurrentState.length === 0) && !table.isRLSEnabled ) { jsonDisableRLSStatements.push({ type: 'disable_rls', tableName: table.name, schema: table.schema }); } // handle table.isRLSEnabled const wasRlsEnabled = tableInPreviousState ? tableInPreviousState.isRLSEnabled : false; if (table.isRLSEnabled !== wasRlsEnabled) { if (table.isRLSEnabled) { // was force enabled jsonEnableRLSStatements.push({ type: 'enable_rls', tableName: table.name, schema: table.schema }); } else if ( !table.isRLSEnabled && policiesInCurrentState.length === 0 ) { // was force disabled jsonDisableRLSStatements.push({ type: 'disable_rls', tableName: table.name, schema: table.schema }); } } } for (const table of Object.values(columnsPatchedSnap1.tables)) { const tableInCurrentState = json2.tables[`${table.schema === '' ? 'public' : table.schema}.${table.name}`]; if (tableInCurrentState === undefined && !table.isRLSEnabled) { jsonDisableRLSStatements.push({ type: 'disable_rls', tableName: table.name, schema: table.schema }); } } // handle indexes const droppedIndexes = Object.keys(it.alteredIndexes).reduce( (current, item: string) => { current[item] = it.alteredIndexes[item].__old; return current; }, {} as Record, ); const createdIndexes = Object.keys(it.alteredIndexes).reduce( (current, item: string) => { current[item] = it.alteredIndexes[item].__new; return current; }, {} as Record, ); jsonCreateIndexesFoAlteredTables.push( ...preparePgCreateIndexesJson( it.name, it.schema, createdIndexes || {}, curFull, action, ), ); jsonDropIndexesForAllAlteredTables.push( ...prepareDropIndexesJson(it.name, it.schema, droppedIndexes || {}), ); }); const jsonCreateReferencesForCreatedTables: JsonCreateReferenceStatement[] = createdTables .map((it) => { return prepareCreateReferencesJson(it.name, it.schema, it.foreignKeys); }) .flat(); const jsonReferencesForAlteredTables: JsonReferenceStatement[] = alteredTables .map((it) => { const forAdded = prepareCreateReferencesJson( it.name, it.schema, it.addedForeignKeys, ); const forAltered = prepareDropReferencesJson( it.name, it.schema, it.deletedForeignKeys, ); const alteredFKs = prepareAlterReferencesJson( it.name, it.schema, it.alteredForeignKeys, ); return [...forAdded, ...forAltered, ...alteredFKs]; }) .flat(); const jsonCreatedReferencesForAlteredTables = jsonReferencesForAlteredTables.filter((t) => t.type === 'create_reference' ); const jsonDroppedReferencesForAlteredTables = jsonReferencesForAlteredTables.filter((t) => t.type === 'delete_reference' ); // Sequences // - create sequence ✅ // - create sequence inside schema ✅ // - rename sequence ✅ // - change sequence schema ✅ // - change sequence schema + name ✅ // - drop sequence - check if sequence is in use. If yes - ??? // - change sequence values ✅ // Generated columns // - add generated // - drop generated // - create table with generated // - alter - should be not triggered, but should get warning const createEnums = createdEnums.map((it) => { return prepareCreateEnumJson(it.name, it.schema, it.values); }) ?? []; const dropEnums = deletedEnums.map((it) => { return prepareDropEnumJson(it.name, it.schema); }); const moveEnums = movedEnums.map((it) => { return prepareMoveEnumJson(it.name, it.schemaFrom, it.schemaTo); }); const renameEnums = renamedEnums.map((it) => { return prepareRenameEnumJson(it.from.name, it.to.name, it.to.schema); }); const jsonAlterEnumsWithAddedValues = typedResult.alteredEnums .map((it) => { return prepareAddValuesToEnumJson(it.name, it.schema, it.addedValues); }) .flat() ?? []; const jsonAlterEnumsWithDroppedValues = typedResult.alteredEnums .map((it) => { return prepareDropEnumValues(it.name, it.schema, it.deletedValues, curFull); }) .flat() ?? []; const createSequences = createdSequences.map((it) => { return prepareCreateSequenceJson(it); }) ?? []; const dropSequences = deletedSequences.map((it) => { return prepareDropSequenceJson(it.name, it.schema); }); const moveSequences = movedSequences.map((it) => { return prepareMoveSequenceJson(it.name, it.schemaFrom, it.schemaTo); }); const renameSequences = renamedSequences.map((it) => { return prepareRenameSequenceJson(it.from.name, it.to.name, it.to.schema); }); const jsonAlterSequences = typedResult.alteredSequences .map((it) => { return prepareAlterSequenceJson(it); }) .flat() ?? []; //////////// const createRoles = createdRoles.map((it) => { return prepareCreateRoleJson(it); }) ?? []; const dropRoles = deletedRoles.map((it) => { return prepareDropRoleJson(it.name); }); const renameRoles = renamedRoles.map((it) => { return prepareRenameRoleJson(it.from.name, it.to.name); }); const jsonAlterRoles = typedResult.alteredRoles .map((it) => { return prepareAlterRoleJson(it); }) .flat() ?? []; //////////// const createSchemas = prepareCreateSchemasJson( createdSchemas.map((it) => it.name), ); const renameSchemas = prepareRenameSchemasJson( renamedSchemas.map((it) => ({ from: it.from.name, to: it.to.name })), ); const dropSchemas = prepareDropSchemasJson( deletedSchemas.map((it) => it.name), ); const createTables = createdTables.map((it) => { return preparePgCreateTableJson(it, curFull); }); jsonCreatePoliciesStatements.push(...([] as JsonCreatePolicyStatement[]).concat( ...(createdTables.map((it) => prepareCreatePolicyJsons( it.name, it.schema, Object.values(it.policies).map(action === 'push' ? PgSquasher.unsquashPolicyPush : PgSquasher.unsquashPolicy), ) )), )); const createViews: JsonCreatePgViewStatement[] = []; const dropViews: JsonDropViewStatement[] = []; const renameViews: JsonRenameViewStatement[] = []; const alterViews: JsonAlterViewStatement[] = []; createViews.push( ...createdViews.filter((it) => !it.isExisting).map((it) => { return preparePgCreateViewJson( it.name, it.schema, it.definition!, it.materialized, it.withNoData, it.with, it.using, it.tablespace, ); }), ); dropViews.push( ...deletedViews.filter((it) => !it.isExisting).map((it) => { return prepareDropViewJson(it.name, it.schema, it.materialized); }), ); renameViews.push( ...renamedViews.filter((it) => !it.to.isExisting && !json1.views[`${it.from.schema}.${it.from.name}`].isExisting) .map((it) => { return prepareRenameViewJson(it.to.name, it.from.name, it.to.schema, it.to.materialized); }), ); alterViews.push( ...movedViews.filter((it) => !json2.views[`${it.schemaTo}.${it.name}`].isExisting && !json1.views[`${it.schemaFrom}.${it.name}`].isExisting ).map((it) => { return preparePgAlterViewAlterSchemaJson( it.schemaTo, it.schemaFrom, it.name, json2.views[`${it.schemaTo}.${it.name}`].materialized, ); }), ); const alteredViews = typedResult.alteredViews.filter((it) => !json2.views[`${it.schema}.${it.name}`].isExisting); for (const alteredView of alteredViews) { const viewKey = `${alteredView.schema}.${alteredView.name}`; const { materialized, with: withOption, definition, withNoData, using, tablespace } = json2.views[viewKey]; if (alteredView.alteredExisting || (alteredView.alteredDefinition && action !== 'push')) { dropViews.push(prepareDropViewJson(alteredView.name, alteredView.schema, materialized)); createViews.push( preparePgCreateViewJson( alteredView.name, alteredView.schema, definition!, materialized, withNoData, withOption, using, tablespace, ), ); continue; } if (alteredView.addedWithOption) { alterViews.push( preparePgAlterViewAddWithOptionJson( alteredView.name, alteredView.schema, materialized, alteredView.addedWithOption, ), ); } if (alteredView.deletedWithOption) { alterViews.push( preparePgAlterViewDropWithOptionJson( alteredView.name, alteredView.schema, materialized, alteredView.deletedWithOption, ), ); } if (alteredView.addedWith) { alterViews.push( preparePgAlterViewAddWithOptionJson( alteredView.name, alteredView.schema, materialized, alteredView.addedWith, ), ); } if (alteredView.deletedWith) { alterViews.push( preparePgAlterViewDropWithOptionJson( alteredView.name, alteredView.schema, materialized, alteredView.deletedWith, ), ); } if (alteredView.alteredWith) { alterViews.push( preparePgAlterViewAddWithOptionJson( alteredView.name, alteredView.schema, materialized, alteredView.alteredWith, ), ); } if (alteredView.alteredTablespace) { alterViews.push( preparePgAlterViewAlterTablespaceJson( alteredView.name, alteredView.schema, materialized, alteredView.alteredTablespace.__new, ), ); } if (alteredView.alteredUsing) { alterViews.push( preparePgAlterViewAlterUsingJson( alteredView.name, alteredView.schema, materialized, alteredView.alteredUsing.__new, ), ); } } jsonStatements.push(...createSchemas); jsonStatements.push(...renameSchemas); jsonStatements.push(...createEnums); jsonStatements.push(...moveEnums); jsonStatements.push(...renameEnums); jsonStatements.push(...jsonAlterEnumsWithAddedValues); jsonStatements.push(...createSequences); jsonStatements.push(...moveSequences); jsonStatements.push(...renameSequences); jsonStatements.push(...jsonAlterSequences); jsonStatements.push(...renameRoles); jsonStatements.push(...dropRoles); jsonStatements.push(...createRoles); jsonStatements.push(...jsonAlterRoles); jsonStatements.push(...createTables); jsonStatements.push(...jsonEnableRLSStatements); jsonStatements.push(...jsonDisableRLSStatements); jsonStatements.push(...dropViews); jsonStatements.push(...renameViews); jsonStatements.push(...alterViews); jsonStatements.push(...jsonDropTables); jsonStatements.push(...jsonSetTableSchemas); jsonStatements.push(...jsonRenameTables); jsonStatements.push(...jsonRenameColumnsStatements); jsonStatements.push(...jsonDeletedUniqueConstraints); jsonStatements.push(...jsonDeletedCheckConstraints); jsonStatements.push(...jsonDroppedReferencesForAlteredTables); jsonStatements.push(...jsonAlterEnumsWithDroppedValues); // Will need to drop indexes before changing any columns in table // Then should go column alternations and then index creation jsonStatements.push(...jsonDropIndexesForAllAlteredTables); jsonStatements.push(...jsonDeletedCompositePKs); jsonStatements.push(...jsonTableAlternations); jsonStatements.push(...jsonAddedCompositePKs); jsonStatements.push(...jsonAddColumnsStatemets); jsonStatements.push(...jsonCreateReferencesForCreatedTables); jsonStatements.push(...jsonCreateIndexesForCreatedTables); jsonStatements.push(...jsonCreatedReferencesForAlteredTables); jsonStatements.push(...jsonCreateIndexesFoAlteredTables); jsonStatements.push(...jsonDropColumnsStatemets); jsonStatements.push(...jsonAlteredCompositePKs); jsonStatements.push(...jsonAddedUniqueConstraints); jsonStatements.push(...jsonCreatedCheckConstraints); jsonStatements.push(...jsonAlteredUniqueConstraints); jsonStatements.push(...createViews); jsonStatements.push(...jsonRenamePoliciesStatements); jsonStatements.push(...jsonDropPoliciesStatements); jsonStatements.push(...jsonCreatePoliciesStatements); jsonStatements.push(...jsonAlterPoliciesStatements); jsonStatements.push(...jsonRenameIndPoliciesStatements); jsonStatements.push(...jsonDropIndPoliciesStatements); jsonStatements.push(...jsonCreateIndPoliciesStatements); jsonStatements.push(...jsonAlterIndPoliciesStatements); jsonStatements.push(...dropEnums); jsonStatements.push(...dropSequences); jsonStatements.push(...dropSchemas); // generate filters const filteredJsonStatements = jsonStatements.filter((st) => { if (st.type === 'alter_table_alter_column_drop_notnull') { if ( jsonStatements.find( (it) => it.type === 'alter_table_alter_column_drop_identity' && it.tableName === st.tableName && it.schema === st.schema, ) ) { return false; } } if (st.type === 'alter_table_alter_column_set_notnull') { if ( jsonStatements.find( (it) => it.type === 'alter_table_alter_column_set_identity' && it.tableName === st.tableName && it.schema === st.schema, ) ) { return false; } } return true; }); // enum filters // Need to find add and drop enum values in same enum and remove add values const filteredEnumsJsonStatements = filteredJsonStatements.filter((st) => { if (st.type === 'alter_type_add_value') { if ( filteredJsonStatements.find( (it) => it.type === 'alter_type_drop_value' && it.name === st.name && it.enumSchema === st.schema, ) ) { return false; } } return true; }); // This is needed because in sql generator on type pg_alter_table_alter_column_set_type and alter_type_drop_value // drizzle kit checks whether column has defaults to cast them to new types properly const filteredEnums2JsonStatements = filteredEnumsJsonStatements.filter((st) => { if (st.type === 'alter_table_alter_column_set_default') { if ( filteredEnumsJsonStatements.find( (it) => it.type === 'pg_alter_table_alter_column_set_type' && it.columnDefault === st.newDefaultValue && it.columnName === st.columnName && it.tableName === st.tableName && it.schema === st.schema, ) ) { return false; } if ( filteredEnumsJsonStatements.find( (it) => it.type === 'alter_type_drop_value' && it.columnsWithEnum.find((column) => column.default === st.newDefaultValue && column.column === st.columnName && column.table === st.tableName && column.tableSchema === st.schema ), ) ) { return false; } } return true; }); const sqlStatements = fromJson(filteredEnums2JsonStatements, 'postgresql', action); const uniqueSqlStatements: string[] = []; sqlStatements.forEach((ss) => { if (!uniqueSqlStatements.includes(ss)) { uniqueSqlStatements.push(ss); } }); const rSchemas = renamedSchemas.map((it) => ({ from: it.from.name, to: it.to.name, })); const rTables = renamedTables.map((it) => { return { from: it.from, to: it.to }; }); const _meta = prepareMigrationMeta(rSchemas, rTables, rColumns); return { statements: filteredEnums2JsonStatements, sqlStatements: uniqueSqlStatements, _meta, }; }; export const applyMysqlSnapshotsDiff = async ( json1: MySqlSchemaSquashed, json2: MySqlSchemaSquashed, tablesResolver: ( input: ResolverInput
, ) => Promise>, columnsResolver: ( input: ColumnsResolverInput, ) => Promise>, viewsResolver: ( input: ResolverInput, ) => Promise>, prevFull: MySqlSchema, curFull: MySqlSchema, action?: 'push' | undefined, ): Promise<{ statements: JsonStatement[]; sqlStatements: string[]; _meta: | { schemas: {}; tables: {}; columns: {}; } | undefined; }> => { // squash indexes and fks // squash uniqueIndexes and uniqueConstraint into constraints object // it should be done for mysql only because it has no diffs for it // TODO: @AndriiSherman // Add an upgrade to v6 and move all snaphosts to this strcutre // After that we can generate mysql in 1 object directly(same as sqlite) for (const tableName in json1.tables) { const table = json1.tables[tableName]; for (const indexName in table.indexes) { const index = MySqlSquasher.unsquashIdx(table.indexes[indexName]); if (index.isUnique) { table.uniqueConstraints[indexName] = MySqlSquasher.squashUnique({ name: index.name, columns: index.columns, }); delete json1.tables[tableName].indexes[index.name]; } } } for (const tableName in json2.tables) { const table = json2.tables[tableName]; for (const indexName in table.indexes) { const index = MySqlSquasher.unsquashIdx(table.indexes[indexName]); if (index.isUnique) { table.uniqueConstraints[indexName] = MySqlSquasher.squashUnique({ name: index.name, columns: index.columns, }); delete json2.tables[tableName].indexes[index.name]; } } } const tablesDiff = diffSchemasOrTables(json1.tables, json2.tables); const { created: createdTables, deleted: deletedTables, renamed: renamedTables, // renamed or moved } = await tablesResolver({ created: tablesDiff.added, deleted: tablesDiff.deleted, }); const tablesPatchedSnap1 = copy(json1); tablesPatchedSnap1.tables = mapEntries(tablesPatchedSnap1.tables, (_, it) => { const { name } = nameChangeFor(it, renamedTables); it.name = name; return [name, it]; }); const res = diffColumns(tablesPatchedSnap1.tables, json2.tables); const columnRenames = [] as { table: string; renames: { from: Column; to: Column }[]; }[]; const columnCreates = [] as { table: string; columns: Column[]; }[]; const columnDeletes = [] as { table: string; columns: Column[]; }[]; for (let entry of Object.values(res)) { const { renamed, created, deleted } = await columnsResolver({ tableName: entry.name, schema: entry.schema, deleted: entry.columns.deleted, created: entry.columns.added, }); if (created.length > 0) { columnCreates.push({ table: entry.name, columns: created, }); } if (deleted.length > 0) { columnDeletes.push({ table: entry.name, columns: deleted, }); } if (renamed.length > 0) { columnRenames.push({ table: entry.name, renames: renamed, }); } } const columnRenamesDict = columnRenames.reduce( (acc, it) => { acc[it.table] = it.renames; return acc; }, {} as Record< string, { from: Named; to: Named; }[] >, ); const columnsPatchedSnap1 = copy(tablesPatchedSnap1); columnsPatchedSnap1.tables = mapEntries( columnsPatchedSnap1.tables, (tableKey, tableValue) => { const patchedColumns = mapKeys( tableValue.columns, (columnKey, column) => { const rens = columnRenamesDict[tableValue.name] || []; const newName = columnChangeFor(columnKey, rens); column.name = newName; return newName; }, ); tableValue.columns = patchedColumns; return [tableKey, tableValue]; }, ); const viewsDiff = diffSchemasOrTables(json1.views, json2.views); const { created: createdViews, deleted: deletedViews, renamed: renamedViews, // renamed or moved } = await viewsResolver({ created: viewsDiff.added, deleted: viewsDiff.deleted, }); const renamesViewDic: Record = {}; renamedViews.forEach((it) => { renamesViewDic[it.from.name] = { to: it.to.name, from: it.from.name }; }); const viewsPatchedSnap1 = copy(columnsPatchedSnap1); viewsPatchedSnap1.views = mapEntries( viewsPatchedSnap1.views, (viewKey, viewValue) => { const rename = renamesViewDic[viewValue.name]; if (rename) { viewValue.name = rename.to; viewKey = rename.to; } return [viewKey, viewValue]; }, ); const diffResult = applyJsonDiff(viewsPatchedSnap1, json2); const typedResult: DiffResultMysql = diffResultSchemeMysql.parse(diffResult); const jsonStatements: JsonStatement[] = []; const jsonCreateIndexesForCreatedTables = createdTables .map((it) => { return prepareCreateIndexesJson( it.name, it.schema, it.indexes, curFull.internal, ); }) .flat(); const jsonDropTables = deletedTables.map((it) => { return prepareDropTableJson(it); }); const jsonRenameTables = renamedTables.map((it) => { return prepareRenameTableJson(it.from, it.to); }); const alteredTables = typedResult.alteredTablesWithColumns; const jsonAddedCompositePKs: JsonCreateCompositePK[] = []; const jsonDeletedCompositePKs: JsonDeleteCompositePK[] = []; const jsonAlteredCompositePKs: JsonAlterCompositePK[] = []; const jsonAddedUniqueConstraints: JsonCreateUniqueConstraint[] = []; const jsonDeletedUniqueConstraints: JsonDeleteUniqueConstraint[] = []; const jsonAlteredUniqueConstraints: JsonAlterUniqueConstraint[] = []; const jsonCreatedCheckConstraints: JsonCreateCheckConstraint[] = []; const jsonDeletedCheckConstraints: JsonDeleteCheckConstraint[] = []; const jsonRenameColumnsStatements: JsonRenameColumnStatement[] = columnRenames .map((it) => prepareRenameColumns(it.table, '', it.renames)) .flat(); const jsonAddColumnsStatemets: JsonAddColumnStatement[] = columnCreates .map((it) => _prepareAddColumns(it.table, '', it.columns)) .flat(); const jsonDropColumnsStatemets: JsonDropColumnStatement[] = columnDeletes .map((it) => _prepareDropColumns(it.table, '', it.columns)) .flat(); alteredTables.forEach((it) => { // This part is needed to make sure that same columns in a table are not triggered for change // there is a case where orm and kit are responsible for pk name generation and one of them is not sorting name // We double-check that pk with same set of columns are both in added and deleted diffs let addedColumns: string[] = []; for (const addedPkName of Object.keys(it.addedCompositePKs)) { const addedPkColumns = it.addedCompositePKs[addedPkName]; addedColumns = MySqlSquasher.unsquashPK(addedPkColumns).columns; } let deletedColumns: string[] = []; for (const deletedPkName of Object.keys(it.deletedCompositePKs)) { const deletedPkColumns = it.deletedCompositePKs[deletedPkName]; deletedColumns = MySqlSquasher.unsquashPK(deletedPkColumns).columns; } // Don't need to sort, but need to add tests for it // addedColumns.sort(); // deletedColumns.sort(); const doPerformDeleteAndCreate = JSON.stringify(addedColumns) !== JSON.stringify(deletedColumns); let addedCompositePKs: JsonCreateCompositePK[] = []; let deletedCompositePKs: JsonDeleteCompositePK[] = []; let alteredCompositePKs: JsonAlterCompositePK[] = []; addedCompositePKs = prepareAddCompositePrimaryKeyMySql( it.name, it.addedCompositePKs, prevFull, curFull, ); deletedCompositePKs = prepareDeleteCompositePrimaryKeyMySql( it.name, it.deletedCompositePKs, prevFull, ); // } alteredCompositePKs = prepareAlterCompositePrimaryKeyMySql( it.name, it.alteredCompositePKs, prevFull, curFull, ); // add logic for unique constraints let addedUniqueConstraints: JsonCreateUniqueConstraint[] = []; let deletedUniqueConstraints: JsonDeleteUniqueConstraint[] = []; let alteredUniqueConstraints: JsonAlterUniqueConstraint[] = []; let createdCheckConstraints: JsonCreateCheckConstraint[] = []; let deletedCheckConstraints: JsonDeleteCheckConstraint[] = []; addedUniqueConstraints = prepareAddUniqueConstraint( it.name, it.schema, it.addedUniqueConstraints, ); deletedUniqueConstraints = prepareDeleteUniqueConstraint( it.name, it.schema, it.deletedUniqueConstraints, ); if (it.alteredUniqueConstraints) { const added: Record = {}; const deleted: Record = {}; for (const k of Object.keys(it.alteredUniqueConstraints)) { added[k] = it.alteredUniqueConstraints[k].__new; deleted[k] = it.alteredUniqueConstraints[k].__old; } addedUniqueConstraints.push( ...prepareAddUniqueConstraint(it.name, it.schema, added), ); deletedUniqueConstraints.push( ...prepareDeleteUniqueConstraint(it.name, it.schema, deleted), ); } createdCheckConstraints = prepareAddCheckConstraint(it.name, it.schema, it.addedCheckConstraints); deletedCheckConstraints = prepareDeleteCheckConstraint( it.name, it.schema, it.deletedCheckConstraints, ); // skip for push if (it.alteredCheckConstraints && action !== 'push') { const added: Record = {}; const deleted: Record = {}; for (const k of Object.keys(it.alteredCheckConstraints)) { added[k] = it.alteredCheckConstraints[k].__new; deleted[k] = it.alteredCheckConstraints[k].__old; } createdCheckConstraints.push(...prepareAddCheckConstraint(it.name, it.schema, added)); deletedCheckConstraints.push(...prepareDeleteCheckConstraint(it.name, it.schema, deleted)); } jsonAddedCompositePKs.push(...addedCompositePKs); jsonDeletedCompositePKs.push(...deletedCompositePKs); jsonAlteredCompositePKs.push(...alteredCompositePKs); jsonAddedUniqueConstraints.push(...addedUniqueConstraints); jsonDeletedUniqueConstraints.push(...deletedUniqueConstraints); jsonAlteredUniqueConstraints.push(...alteredUniqueConstraints); jsonCreatedCheckConstraints.push(...createdCheckConstraints); jsonDeletedCheckConstraints.push(...deletedCheckConstraints); }); const rColumns = jsonRenameColumnsStatements.map((it) => { const tableName = it.tableName; const schema = it.schema; return { from: { schema, table: tableName, column: it.oldColumnName }, to: { schema, table: tableName, column: it.newColumnName }, }; }); const jsonTableAlternations = alteredTables .map((it) => { return prepareAlterColumnsMysql( it.name, it.schema, it.altered, json1, json2, action, ); }) .flat(); const jsonCreateIndexesForAllAlteredTables = alteredTables .map((it) => { return prepareCreateIndexesJson( it.name, it.schema, it.addedIndexes || {}, curFull.internal, ); }) .flat(); const jsonDropIndexesForAllAlteredTables = alteredTables .map((it) => { return prepareDropIndexesJson( it.name, it.schema, it.deletedIndexes || {}, ); }) .flat(); alteredTables.forEach((it) => { const droppedIndexes = Object.keys(it.alteredIndexes).reduce( (current, item: string) => { current[item] = it.alteredIndexes[item].__old; return current; }, {} as Record, ); const createdIndexes = Object.keys(it.alteredIndexes).reduce( (current, item: string) => { current[item] = it.alteredIndexes[item].__new; return current; }, {} as Record, ); jsonCreateIndexesForAllAlteredTables.push( ...prepareCreateIndexesJson(it.name, it.schema, createdIndexes || {}), ); jsonDropIndexesForAllAlteredTables.push( ...prepareDropIndexesJson(it.name, it.schema, droppedIndexes || {}), ); }); const jsonCreateReferencesForCreatedTables: JsonCreateReferenceStatement[] = createdTables .map((it) => { return prepareCreateReferencesJson(it.name, it.schema, it.foreignKeys); }) .flat(); const jsonReferencesForAllAlteredTables: JsonReferenceStatement[] = alteredTables .map((it) => { const forAdded = prepareCreateReferencesJson( it.name, it.schema, it.addedForeignKeys, ); const forAltered = prepareDropReferencesJson( it.name, it.schema, it.deletedForeignKeys, ); const alteredFKs = prepareAlterReferencesJson( it.name, it.schema, it.alteredForeignKeys, ); return [...forAdded, ...forAltered, ...alteredFKs]; }) .flat(); const jsonCreatedReferencesForAlteredTables = jsonReferencesForAllAlteredTables.filter( (t) => t.type === 'create_reference', ); const jsonDroppedReferencesForAlteredTables = jsonReferencesForAllAlteredTables.filter( (t) => t.type === 'delete_reference', ); const jsonMySqlCreateTables = createdTables.map((it) => { return prepareMySqlCreateTableJson( it, curFull as MySqlSchema, curFull.internal, ); }); const createViews: JsonCreateMySqlViewStatement[] = []; const dropViews: JsonDropViewStatement[] = []; const renameViews: JsonRenameViewStatement[] = []; const alterViews: JsonAlterMySqlViewStatement[] = []; createViews.push( ...createdViews.filter((it) => !it.isExisting).map((it) => { return prepareMySqlCreateViewJson( it.name, it.definition!, it.meta, ); }), ); dropViews.push( ...deletedViews.filter((it) => !it.isExisting).map((it) => { return prepareDropViewJson(it.name); }), ); renameViews.push( ...renamedViews.filter((it) => !it.to.isExisting && !json1.views[it.from.name].isExisting).map((it) => { return prepareRenameViewJson(it.to.name, it.from.name); }), ); const alteredViews = typedResult.alteredViews.filter((it) => !json2.views[it.name].isExisting); for (const alteredView of alteredViews) { const { definition, meta } = json2.views[alteredView.name]; if (alteredView.alteredExisting) { dropViews.push(prepareDropViewJson(alteredView.name)); createViews.push( prepareMySqlCreateViewJson( alteredView.name, definition!, meta, ), ); continue; } if (alteredView.alteredDefinition && action !== 'push') { createViews.push( prepareMySqlCreateViewJson( alteredView.name, definition!, meta, true, ), ); continue; } if (alteredView.alteredMeta) { const view = curFull['views'][alteredView.name]; alterViews.push( prepareMySqlAlterView(view), ); } } jsonStatements.push(...jsonMySqlCreateTables); jsonStatements.push(...jsonDropTables); jsonStatements.push(...jsonRenameTables); jsonStatements.push(...jsonRenameColumnsStatements); jsonStatements.push(...dropViews); jsonStatements.push(...renameViews); jsonStatements.push(...alterViews); jsonStatements.push(...jsonDeletedUniqueConstraints); jsonStatements.push(...jsonDeletedCheckConstraints); jsonStatements.push(...jsonDroppedReferencesForAlteredTables); // Will need to drop indexes before changing any columns in table // Then should go column alternations and then index creation jsonStatements.push(...jsonDropIndexesForAllAlteredTables); jsonStatements.push(...jsonDeletedCompositePKs); jsonStatements.push(...jsonTableAlternations); jsonStatements.push(...jsonAddedCompositePKs); jsonStatements.push(...jsonAddColumnsStatemets); jsonStatements.push(...jsonAddedUniqueConstraints); jsonStatements.push(...jsonDeletedUniqueConstraints); jsonStatements.push(...jsonCreateReferencesForCreatedTables); jsonStatements.push(...jsonCreateIndexesForCreatedTables); jsonStatements.push(...jsonCreatedCheckConstraints); jsonStatements.push(...jsonCreatedReferencesForAlteredTables); jsonStatements.push(...jsonCreateIndexesForAllAlteredTables); jsonStatements.push(...jsonDropColumnsStatemets); // jsonStatements.push(...jsonDeletedCompositePKs); // jsonStatements.push(...jsonAddedCompositePKs); jsonStatements.push(...jsonAlteredCompositePKs); jsonStatements.push(...createViews); jsonStatements.push(...jsonAlteredUniqueConstraints); const sqlStatements = fromJson(jsonStatements, 'mysql'); const uniqueSqlStatements: string[] = []; sqlStatements.forEach((ss) => { if (!uniqueSqlStatements.includes(ss)) { uniqueSqlStatements.push(ss); } }); const rTables = renamedTables.map((it) => { return { from: it.from, to: it.to }; }); const _meta = prepareMigrationMeta([], rTables, rColumns); return { statements: jsonStatements, sqlStatements: uniqueSqlStatements, _meta, }; }; export const applySingleStoreSnapshotsDiff = async ( json1: SingleStoreSchemaSquashed, json2: SingleStoreSchemaSquashed, tablesResolver: ( input: ResolverInput
, ) => Promise>, columnsResolver: ( input: ColumnsResolverInput, ) => Promise>, /* viewsResolver: ( input: ResolverInput, ) => Promise>, */ prevFull: SingleStoreSchema, curFull: SingleStoreSchema, action?: 'push' | undefined, ): Promise<{ statements: JsonStatement[]; sqlStatements: string[]; _meta: | { schemas: {}; tables: {}; columns: {}; } | undefined; }> => { // squash indexes and fks // squash uniqueIndexes and uniqueConstraint into constraints object // it should be done for singlestore only because it has no diffs for it // TODO: @AndriiSherman // Add an upgrade to v6 and move all snaphosts to this strcutre // After that we can generate singlestore in 1 object directly(same as sqlite) for (const tableName in json1.tables) { const table = json1.tables[tableName]; for (const indexName in table.indexes) { const index = SingleStoreSquasher.unsquashIdx(table.indexes[indexName]); if (index.isUnique) { table.uniqueConstraints[indexName] = SingleStoreSquasher.squashUnique({ name: index.name, columns: index.columns, }); delete json1.tables[tableName].indexes[index.name]; } } } for (const tableName in json2.tables) { const table = json2.tables[tableName]; for (const indexName in table.indexes) { const index = SingleStoreSquasher.unsquashIdx(table.indexes[indexName]); if (index.isUnique) { table.uniqueConstraints[indexName] = SingleStoreSquasher.squashUnique({ name: index.name, columns: index.columns, }); delete json2.tables[tableName].indexes[index.name]; } } } const tablesDiff = diffSchemasOrTables(json1.tables, json2.tables); const { created: createdTables, deleted: deletedTables, renamed: renamedTables, // renamed or moved } = await tablesResolver({ created: tablesDiff.added, deleted: tablesDiff.deleted, }); const tablesPatchedSnap1 = copy(json1); tablesPatchedSnap1.tables = mapEntries(tablesPatchedSnap1.tables, (_, it) => { const { name } = nameChangeFor(it, renamedTables); it.name = name; return [name, it]; }); const res = diffColumns(tablesPatchedSnap1.tables, json2.tables); const columnRenames = [] as { table: string; renames: { from: Column; to: Column }[]; }[]; const columnCreates = [] as { table: string; columns: Column[]; }[]; const columnDeletes = [] as { table: string; columns: Column[]; }[]; for (let entry of Object.values(res)) { const { renamed, created, deleted } = await columnsResolver({ tableName: entry.name, schema: entry.schema, deleted: entry.columns.deleted, created: entry.columns.added, }); if (created.length > 0) { columnCreates.push({ table: entry.name, columns: created, }); } if (deleted.length > 0) { columnDeletes.push({ table: entry.name, columns: deleted, }); } if (renamed.length > 0) { columnRenames.push({ table: entry.name, renames: renamed, }); } } const columnRenamesDict = columnRenames.reduce( (acc, it) => { acc[it.table] = it.renames; return acc; }, {} as Record< string, { from: Named; to: Named; }[] >, ); const columnsPatchedSnap1 = copy(tablesPatchedSnap1); columnsPatchedSnap1.tables = mapEntries( columnsPatchedSnap1.tables, (tableKey, tableValue) => { const patchedColumns = mapKeys( tableValue.columns, (columnKey, column) => { const rens = columnRenamesDict[tableValue.name] || []; const newName = columnChangeFor(columnKey, rens); column.name = newName; return newName; }, ); tableValue.columns = patchedColumns; return [tableKey, tableValue]; }, ); /* const viewsDiff = diffSchemasOrTables(json1.views, json2.views); const { created: createdViews, deleted: deletedViews, renamed: renamedViews, // renamed or moved } = await viewsResolver({ created: viewsDiff.added, deleted: viewsDiff.deleted, }); const renamesViewDic: Record = {}; renamedViews.forEach((it) => { renamesViewDic[it.from.name] = { to: it.to.name, from: it.from.name }; }); const viewsPatchedSnap1 = copy(columnsPatchedSnap1); viewsPatchedSnap1.views = mapEntries( viewsPatchedSnap1.views, (viewKey, viewValue) => { const rename = renamesViewDic[viewValue.name]; if (rename) { viewValue.name = rename.to; viewKey = rename.to; } return [viewKey, viewValue]; }, ); */ const diffResult = applyJsonDiff(columnsPatchedSnap1, json2); // replace columnsPatchedSnap1 with viewsPatchedSnap1 const typedResult: DiffResultSingleStore = diffResultSchemeSingleStore.parse(diffResult); const jsonStatements: JsonStatement[] = []; const jsonCreateIndexesForCreatedTables = createdTables .map((it) => { return prepareCreateIndexesJson( it.name, it.schema, it.indexes, curFull.internal, ); }) .flat(); const jsonDropTables = deletedTables.map((it) => { return prepareDropTableJson(it); }); const jsonRenameTables = renamedTables.map((it) => { return prepareRenameTableJson(it.from, it.to); }); const alteredTables = typedResult.alteredTablesWithColumns; const jsonAddedCompositePKs: JsonCreateCompositePK[] = []; const jsonAddedUniqueConstraints: JsonCreateUniqueConstraint[] = []; const jsonDeletedUniqueConstraints: JsonDeleteUniqueConstraint[] = []; const jsonAlteredUniqueConstraints: JsonAlterUniqueConstraint[] = []; const jsonRenameColumnsStatements: JsonRenameColumnStatement[] = columnRenames .map((it) => prepareRenameColumns(it.table, '', it.renames)) .flat(); const jsonAddColumnsStatemets: JsonAddColumnStatement[] = columnCreates .map((it) => _prepareAddColumns(it.table, '', it.columns)) .flat(); const jsonDropColumnsStatemets: JsonDropColumnStatement[] = columnDeletes .map((it) => _prepareDropColumns(it.table, '', it.columns)) .flat(); alteredTables.forEach((it) => { // This part is needed to make sure that same columns in a table are not triggered for change // there is a case where orm and kit are responsible for pk name generation and one of them is not sorting name // We double-check that pk with same set of columns are both in added and deleted diffs let addedColumns: string[] = []; for (const addedPkName of Object.keys(it.addedCompositePKs)) { const addedPkColumns = it.addedCompositePKs[addedPkName]; addedColumns = SingleStoreSquasher.unsquashPK(addedPkColumns).columns; } let deletedColumns: string[] = []; for (const deletedPkName of Object.keys(it.deletedCompositePKs)) { const deletedPkColumns = it.deletedCompositePKs[deletedPkName]; deletedColumns = SingleStoreSquasher.unsquashPK(deletedPkColumns).columns; } // Don't need to sort, but need to add tests for it // addedColumns.sort(); // deletedColumns.sort(); const doPerformDeleteAndCreate = JSON.stringify(addedColumns) !== JSON.stringify(deletedColumns); // add logic for unique constraints let addedUniqueConstraints: JsonCreateUniqueConstraint[] = []; let deletedUniqueConstraints: JsonDeleteUniqueConstraint[] = []; let alteredUniqueConstraints: JsonAlterUniqueConstraint[] = []; let createdCheckConstraints: JsonCreateCheckConstraint[] = []; let deletedCheckConstraints: JsonDeleteCheckConstraint[] = []; addedUniqueConstraints = prepareAddUniqueConstraint( it.name, it.schema, it.addedUniqueConstraints, ); deletedUniqueConstraints = prepareDeleteUniqueConstraint( it.name, it.schema, it.deletedUniqueConstraints, ); if (it.alteredUniqueConstraints) { const added: Record = {}; const deleted: Record = {}; for (const k of Object.keys(it.alteredUniqueConstraints)) { added[k] = it.alteredUniqueConstraints[k].__new; deleted[k] = it.alteredUniqueConstraints[k].__old; } addedUniqueConstraints.push( ...prepareAddUniqueConstraint(it.name, it.schema, added), ); deletedUniqueConstraints.push( ...prepareDeleteUniqueConstraint(it.name, it.schema, deleted), ); } createdCheckConstraints = prepareAddCheckConstraint(it.name, it.schema, it.addedCheckConstraints); deletedCheckConstraints = prepareDeleteCheckConstraint( it.name, it.schema, it.deletedCheckConstraints, ); // skip for push if (it.alteredCheckConstraints && action !== 'push') { const added: Record = {}; const deleted: Record = {}; for (const k of Object.keys(it.alteredCheckConstraints)) { added[k] = it.alteredCheckConstraints[k].__new; deleted[k] = it.alteredCheckConstraints[k].__old; } createdCheckConstraints.push(...prepareAddCheckConstraint(it.name, it.schema, added)); deletedCheckConstraints.push(...prepareDeleteCheckConstraint(it.name, it.schema, deleted)); } jsonAddedUniqueConstraints.push(...addedUniqueConstraints); jsonDeletedUniqueConstraints.push(...deletedUniqueConstraints); jsonAlteredUniqueConstraints.push(...alteredUniqueConstraints); }); const rColumns = jsonRenameColumnsStatements.map((it) => { const tableName = it.tableName; const schema = it.schema; return { from: { schema, table: tableName, column: it.oldColumnName }, to: { schema, table: tableName, column: it.newColumnName }, }; }); const jsonTableAlternations = alteredTables .map((it) => { return prepareAlterColumnsMysql( it.name, it.schema, it.altered, json1, json2, action, ); }) .flat(); const jsonCreateIndexesForAllAlteredTables = alteredTables .map((it) => { return prepareCreateIndexesJson( it.name, it.schema, it.addedIndexes || {}, curFull.internal, ); }) .flat(); const jsonDropIndexesForAllAlteredTables = alteredTables .map((it) => { return prepareDropIndexesJson( it.name, it.schema, it.deletedIndexes || {}, ); }) .flat(); alteredTables.forEach((it) => { const droppedIndexes = Object.keys(it.alteredIndexes).reduce( (current, item: string) => { current[item] = it.alteredIndexes[item].__old; return current; }, {} as Record, ); const createdIndexes = Object.keys(it.alteredIndexes).reduce( (current, item: string) => { current[item] = it.alteredIndexes[item].__new; return current; }, {} as Record, ); jsonCreateIndexesForAllAlteredTables.push( ...prepareCreateIndexesJson(it.name, it.schema, createdIndexes || {}), ); jsonDropIndexesForAllAlteredTables.push( ...prepareDropIndexesJson(it.name, it.schema, droppedIndexes || {}), ); }); const jsonSingleStoreCreateTables = createdTables.map((it) => { return prepareSingleStoreCreateTableJson( it, curFull as SingleStoreSchema, curFull.internal, ); }); /* const createViews: JsonCreateSingleStoreViewStatement[] = []; const dropViews: JsonDropViewStatement[] = []; const renameViews: JsonRenameViewStatement[] = []; const alterViews: JsonAlterSingleStoreViewStatement[] = []; createViews.push( ...createdViews.filter((it) => !it.isExisting).map((it) => { return prepareSingleStoreCreateViewJson( it.name, it.definition!, it.meta, ); }), ); dropViews.push( ...deletedViews.filter((it) => !it.isExisting).map((it) => { return prepareDropViewJson(it.name); }), ); renameViews.push( ...renamedViews.filter((it) => !it.to.isExisting && !json1.views[it.from.name].isExisting).map((it) => { return prepareRenameViewJson(it.to.name, it.from.name); }), ); const alteredViews = typedResult.alteredViews.filter((it) => !json2.views[it.name].isExisting); for (const alteredView of alteredViews) { const { definition, meta } = json2.views[alteredView.name]; if (alteredView.alteredExisting) { dropViews.push(prepareDropViewJson(alteredView.name)); createViews.push( prepareSingleStoreCreateViewJson( alteredView.name, definition!, meta, ), ); continue; } if (alteredView.alteredDefinition && action !== 'push') { createViews.push( prepareSingleStoreCreateViewJson( alteredView.name, definition!, meta, true, ), ); continue; } if (alteredView.alteredMeta) { const view = curFull['views'][alteredView.name]; alterViews.push( prepareSingleStoreAlterView(view), ); } } */ jsonStatements.push(...jsonSingleStoreCreateTables); jsonStatements.push(...jsonDropTables); jsonStatements.push(...jsonRenameTables); jsonStatements.push(...jsonRenameColumnsStatements); /*jsonStatements.push(...createViews); jsonStatements.push(...dropViews); jsonStatements.push(...renameViews); jsonStatements.push(...alterViews); */ jsonStatements.push(...jsonDeletedUniqueConstraints); // Will need to drop indexes before changing any columns in table // Then should go column alternations and then index creation jsonStatements.push(...jsonDropIndexesForAllAlteredTables); jsonStatements.push(...jsonTableAlternations); jsonStatements.push(...jsonAddedCompositePKs); jsonStatements.push(...jsonAddedUniqueConstraints); jsonStatements.push(...jsonDeletedUniqueConstraints); jsonStatements.push(...jsonAddColumnsStatemets); jsonStatements.push(...jsonCreateIndexesForCreatedTables); jsonStatements.push(...jsonCreateIndexesForAllAlteredTables); jsonStatements.push(...jsonDropColumnsStatemets); jsonStatements.push(...jsonAddedCompositePKs); jsonStatements.push(...jsonAlteredUniqueConstraints); const combinedJsonStatements = singleStoreCombineStatements(jsonStatements, json2); const sqlStatements = fromJson(combinedJsonStatements, 'singlestore'); const uniqueSqlStatements: string[] = []; sqlStatements.forEach((ss) => { if (!uniqueSqlStatements.includes(ss)) { uniqueSqlStatements.push(ss); } }); const rTables = renamedTables.map((it) => { return { from: it.from, to: it.to }; }); const _meta = prepareMigrationMeta([], rTables, rColumns); return { statements: combinedJsonStatements, sqlStatements: uniqueSqlStatements, _meta, }; }; export const applySqliteSnapshotsDiff = async ( json1: SQLiteSchemaSquashed, json2: SQLiteSchemaSquashed, tablesResolver: ( input: ResolverInput
, ) => Promise>, columnsResolver: ( input: ColumnsResolverInput, ) => Promise>, viewsResolver: ( input: ResolverInput, ) => Promise>, prevFull: SQLiteSchema, curFull: SQLiteSchema, action?: 'push' | undefined, ): Promise<{ statements: JsonStatement[]; sqlStatements: string[]; _meta: | { schemas: {}; tables: {}; columns: {}; } | undefined; }> => { const tablesDiff = diffSchemasOrTables(json1.tables, json2.tables); const { created: createdTables, deleted: deletedTables, renamed: renamedTables, } = await tablesResolver({ created: tablesDiff.added, deleted: tablesDiff.deleted, }); const tablesPatchedSnap1 = copy(json1); tablesPatchedSnap1.tables = mapEntries(tablesPatchedSnap1.tables, (_, it) => { const { name } = nameChangeFor(it, renamedTables); it.name = name; return [name, it]; }); const res = diffColumns(tablesPatchedSnap1.tables, json2.tables); const columnRenames = [] as { table: string; renames: { from: Column; to: Column }[]; }[]; const columnCreates = [] as { table: string; columns: Column[]; }[]; const columnDeletes = [] as { table: string; columns: Column[]; }[]; for (let entry of Object.values(res)) { const { renamed, created, deleted } = await columnsResolver({ tableName: entry.name, schema: entry.schema, deleted: entry.columns.deleted, created: entry.columns.added, }); if (created.length > 0) { columnCreates.push({ table: entry.name, columns: created, }); } if (deleted.length > 0) { columnDeletes.push({ table: entry.name, columns: deleted, }); } if (renamed.length > 0) { columnRenames.push({ table: entry.name, renames: renamed, }); } } const columnRenamesDict = columnRenames.reduce( (acc, it) => { acc[it.table] = it.renames; return acc; }, {} as Record< string, { from: Named; to: Named; }[] >, ); const columnsPatchedSnap1 = copy(tablesPatchedSnap1); columnsPatchedSnap1.tables = mapEntries( columnsPatchedSnap1.tables, (tableKey, tableValue) => { const patchedColumns = mapKeys( tableValue.columns, (columnKey, column) => { const rens = columnRenamesDict[tableValue.name] || []; const newName = columnChangeFor(columnKey, rens); column.name = newName; return newName; }, ); tableValue.columns = patchedColumns; return [tableKey, tableValue]; }, ); const viewsDiff = diffSchemasOrTables(json1.views, json2.views); const { created: createdViews, deleted: deletedViews, renamed: renamedViews, // renamed or moved } = await viewsResolver({ created: viewsDiff.added, deleted: viewsDiff.deleted, }); const renamesViewDic: Record = {}; renamedViews.forEach((it) => { renamesViewDic[it.from.name] = { to: it.to.name, from: it.from.name }; }); const viewsPatchedSnap1 = copy(columnsPatchedSnap1); viewsPatchedSnap1.views = mapEntries( viewsPatchedSnap1.views, (viewKey, viewValue) => { const rename = renamesViewDic[viewValue.name]; if (rename) { viewValue.name = rename.to; } return [viewKey, viewValue]; }, ); const diffResult = applyJsonDiff(viewsPatchedSnap1, json2); const typedResult = diffResultSchemeSQLite.parse(diffResult); // Map array of objects to map const tablesMap: { [key: string]: (typeof typedResult.alteredTablesWithColumns)[number]; } = {}; typedResult.alteredTablesWithColumns.forEach((obj) => { tablesMap[obj.name] = obj; }); const jsonCreateTables = createdTables.map((it) => { return prepareSQLiteCreateTable(it, action); }); const jsonCreateIndexesForCreatedTables = createdTables .map((it) => { return prepareCreateIndexesJson( it.name, it.schema, it.indexes, curFull.internal, ); }) .flat(); const jsonDropTables = deletedTables.map((it) => { return prepareDropTableJson(it); }); const jsonRenameTables = renamedTables.map((it) => { return prepareRenameTableJson(it.from, it.to); }); const jsonRenameColumnsStatements: JsonRenameColumnStatement[] = columnRenames .map((it) => prepareRenameColumns(it.table, '', it.renames)) .flat(); const jsonDropColumnsStatemets: JsonDropColumnStatement[] = columnDeletes .map((it) => _prepareDropColumns(it.table, '', it.columns)) .flat(); const jsonAddColumnsStatemets: JsonSqliteAddColumnStatement[] = columnCreates .map((it) => { return _prepareSqliteAddColumns( it.table, it.columns, tablesMap[it.table] && tablesMap[it.table].addedForeignKeys ? Object.values(tablesMap[it.table].addedForeignKeys) : [], ); }) .flat(); const allAltered = typedResult.alteredTablesWithColumns; const jsonAddedCompositePKs: JsonCreateCompositePK[] = []; const jsonDeletedCompositePKs: JsonDeleteCompositePK[] = []; const jsonAlteredCompositePKs: JsonAlterCompositePK[] = []; const jsonAddedUniqueConstraints: JsonCreateUniqueConstraint[] = []; const jsonDeletedUniqueConstraints: JsonDeleteUniqueConstraint[] = []; const jsonAlteredUniqueConstraints: JsonAlterUniqueConstraint[] = []; const jsonDeletedCheckConstraints: JsonDeleteCheckConstraint[] = []; const jsonCreatedCheckConstraints: JsonCreateCheckConstraint[] = []; allAltered.forEach((it) => { // This part is needed to make sure that same columns in a table are not triggered for change // there is a case where orm and kit are responsible for pk name generation and one of them is not sorting name // We double-check that pk with same set of columns are both in added and deleted diffs let addedColumns: string[] = []; for (const addedPkName of Object.keys(it.addedCompositePKs)) { const addedPkColumns = it.addedCompositePKs[addedPkName]; addedColumns = SQLiteSquasher.unsquashPK(addedPkColumns); } let deletedColumns: string[] = []; for (const deletedPkName of Object.keys(it.deletedCompositePKs)) { const deletedPkColumns = it.deletedCompositePKs[deletedPkName]; deletedColumns = SQLiteSquasher.unsquashPK(deletedPkColumns); } // Don't need to sort, but need to add tests for it // addedColumns.sort(); // deletedColumns.sort(); const doPerformDeleteAndCreate = JSON.stringify(addedColumns) !== JSON.stringify(deletedColumns); let addedCompositePKs: JsonCreateCompositePK[] = []; let deletedCompositePKs: JsonDeleteCompositePK[] = []; let alteredCompositePKs: JsonAlterCompositePK[] = []; if (doPerformDeleteAndCreate) { addedCompositePKs = prepareAddCompositePrimaryKeySqlite( it.name, it.addedCompositePKs, ); deletedCompositePKs = prepareDeleteCompositePrimaryKeySqlite( it.name, it.deletedCompositePKs, ); } alteredCompositePKs = prepareAlterCompositePrimaryKeySqlite( it.name, it.alteredCompositePKs, ); // add logic for unique constraints let addedUniqueConstraints: JsonCreateUniqueConstraint[] = []; let deletedUniqueConstraints: JsonDeleteUniqueConstraint[] = []; let alteredUniqueConstraints: JsonAlterUniqueConstraint[] = []; addedUniqueConstraints = prepareAddUniqueConstraint( it.name, it.schema, it.addedUniqueConstraints, ); deletedUniqueConstraints = prepareDeleteUniqueConstraint( it.name, it.schema, it.deletedUniqueConstraints, ); if (it.alteredUniqueConstraints) { const added: Record = {}; const deleted: Record = {}; for (const k of Object.keys(it.alteredUniqueConstraints)) { added[k] = it.alteredUniqueConstraints[k].__new; deleted[k] = it.alteredUniqueConstraints[k].__old; } addedUniqueConstraints.push( ...prepareAddUniqueConstraint(it.name, it.schema, added), ); deletedUniqueConstraints.push( ...prepareDeleteUniqueConstraint(it.name, it.schema, deleted), ); } let createdCheckConstraints: JsonCreateCheckConstraint[] = []; let deletedCheckConstraints: JsonDeleteCheckConstraint[] = []; addedUniqueConstraints = prepareAddUniqueConstraint( it.name, it.schema, it.addedUniqueConstraints, ); deletedUniqueConstraints = prepareDeleteUniqueConstraint( it.name, it.schema, it.deletedUniqueConstraints, ); if (it.alteredUniqueConstraints) { const added: Record = {}; const deleted: Record = {}; for (const k of Object.keys(it.alteredUniqueConstraints)) { added[k] = it.alteredUniqueConstraints[k].__new; deleted[k] = it.alteredUniqueConstraints[k].__old; } addedUniqueConstraints.push( ...prepareAddUniqueConstraint(it.name, it.schema, added), ); deletedUniqueConstraints.push( ...prepareDeleteUniqueConstraint(it.name, it.schema, deleted), ); } createdCheckConstraints = prepareAddCheckConstraint(it.name, it.schema, it.addedCheckConstraints); deletedCheckConstraints = prepareDeleteCheckConstraint( it.name, it.schema, it.deletedCheckConstraints, ); // skip for push if (it.alteredCheckConstraints && action !== 'push') { const added: Record = {}; const deleted: Record = {}; for (const k of Object.keys(it.alteredCheckConstraints)) { added[k] = it.alteredCheckConstraints[k].__new; deleted[k] = it.alteredCheckConstraints[k].__old; } createdCheckConstraints.push(...prepareAddCheckConstraint(it.name, it.schema, added)); deletedCheckConstraints.push(...prepareDeleteCheckConstraint(it.name, it.schema, deleted)); } jsonAddedCompositePKs.push(...addedCompositePKs); jsonDeletedCompositePKs.push(...deletedCompositePKs); jsonAlteredCompositePKs.push(...alteredCompositePKs); jsonAddedUniqueConstraints.push(...addedUniqueConstraints); jsonDeletedUniqueConstraints.push(...deletedUniqueConstraints); jsonAlteredUniqueConstraints.push(...alteredUniqueConstraints); jsonCreatedCheckConstraints.push(...createdCheckConstraints); jsonDeletedCheckConstraints.push(...deletedCheckConstraints); }); const rColumns = jsonRenameColumnsStatements.map((it) => { const tableName = it.tableName; const schema = it.schema; return { from: { schema, table: tableName, column: it.oldColumnName }, to: { schema, table: tableName, column: it.newColumnName }, }; }); const jsonTableAlternations = allAltered .map((it) => { return prepareSqliteAlterColumns(it.name, it.schema, it.altered, json2); }) .flat(); const jsonCreateIndexesForAllAlteredTables = allAltered .map((it) => { return prepareCreateIndexesJson( it.name, it.schema, it.addedIndexes || {}, curFull.internal, ); }) .flat(); const jsonDropIndexesForAllAlteredTables = allAltered .map((it) => { return prepareDropIndexesJson( it.name, it.schema, it.deletedIndexes || {}, ); }) .flat(); allAltered.forEach((it) => { const droppedIndexes = Object.keys(it.alteredIndexes).reduce( (current, item: string) => { current[item] = it.alteredIndexes[item].__old; return current; }, {} as Record, ); const createdIndexes = Object.keys(it.alteredIndexes).reduce( (current, item: string) => { current[item] = it.alteredIndexes[item].__new; return current; }, {} as Record, ); jsonCreateIndexesForAllAlteredTables.push( ...prepareCreateIndexesJson( it.name, it.schema, createdIndexes || {}, curFull.internal, ), ); jsonDropIndexesForAllAlteredTables.push( ...prepareDropIndexesJson(it.name, it.schema, droppedIndexes || {}), ); }); const jsonReferencesForAllAlteredTables: JsonReferenceStatement[] = allAltered .map((it) => { const forAdded = prepareCreateReferencesJson( it.name, it.schema, it.addedForeignKeys, ); const forAltered = prepareDropReferencesJson( it.name, it.schema, it.deletedForeignKeys, ); const alteredFKs = prepareAlterReferencesJson( it.name, it.schema, it.alteredForeignKeys, ); return [...forAdded, ...forAltered, ...alteredFKs]; }) .flat(); const jsonCreatedReferencesForAlteredTables = jsonReferencesForAllAlteredTables.filter( (t) => t.type === 'create_reference', ); const jsonDroppedReferencesForAlteredTables = jsonReferencesForAllAlteredTables.filter( (t) => t.type === 'delete_reference', ); const createViews: JsonCreateSqliteViewStatement[] = []; const dropViews: JsonDropViewStatement[] = []; createViews.push( ...createdViews.filter((it) => !it.isExisting).map((it) => { return prepareSqliteCreateViewJson( it.name, it.definition!, ); }), ); dropViews.push( ...deletedViews.filter((it) => !it.isExisting).map((it) => { return prepareDropViewJson(it.name); }), ); dropViews.push( ...renamedViews.filter((it) => !it.to.isExisting).map((it) => { return prepareDropViewJson(it.from.name); }), ); createViews.push( ...renamedViews.filter((it) => !it.to.isExisting).map((it) => { return prepareSqliteCreateViewJson(it.to.name, it.to.definition!); }), ); const alteredViews = typedResult.alteredViews.filter((it) => !json2.views[it.name].isExisting); for (const alteredView of alteredViews) { const { definition } = json2.views[alteredView.name]; if (alteredView.alteredExisting || (alteredView.alteredDefinition && action !== 'push')) { dropViews.push(prepareDropViewJson(alteredView.name)); createViews.push( prepareSqliteCreateViewJson( alteredView.name, definition!, ), ); } } const jsonStatements: JsonStatement[] = []; jsonStatements.push(...jsonCreateTables); jsonStatements.push(...jsonDropTables); jsonStatements.push(...jsonRenameTables); jsonStatements.push(...jsonRenameColumnsStatements); jsonStatements.push(...jsonDroppedReferencesForAlteredTables); jsonStatements.push(...jsonDeletedCheckConstraints); // Will need to drop indexes before changing any columns in table // Then should go column alternations and then index creation jsonStatements.push(...jsonDropIndexesForAllAlteredTables); jsonStatements.push(...jsonDeletedCompositePKs); jsonStatements.push(...jsonTableAlternations); jsonStatements.push(...jsonAddedCompositePKs); jsonStatements.push(...jsonAddColumnsStatemets); jsonStatements.push(...jsonCreateIndexesForCreatedTables); jsonStatements.push(...jsonCreateIndexesForAllAlteredTables); jsonStatements.push(...jsonCreatedCheckConstraints); jsonStatements.push(...jsonCreatedReferencesForAlteredTables); jsonStatements.push(...jsonDropColumnsStatemets); // jsonStatements.push(...jsonDeletedCompositePKs); // jsonStatements.push(...jsonAddedCompositePKs); jsonStatements.push(...jsonAlteredCompositePKs); jsonStatements.push(...jsonAlteredUniqueConstraints); jsonStatements.push(...dropViews); jsonStatements.push(...createViews); const combinedJsonStatements = sqliteCombineStatements(jsonStatements, json2, action); const sqlStatements = fromJson(combinedJsonStatements, 'sqlite'); const uniqueSqlStatements: string[] = []; sqlStatements.forEach((ss) => { if (!uniqueSqlStatements.includes(ss)) { uniqueSqlStatements.push(ss); } }); const rTables = renamedTables.map((it) => { return { from: it.from, to: it.to }; }); const _meta = prepareMigrationMeta([], rTables, rColumns); return { statements: combinedJsonStatements, sqlStatements: uniqueSqlStatements, _meta, }; }; export const applyLibSQLSnapshotsDiff = async ( json1: SQLiteSchemaSquashed, json2: SQLiteSchemaSquashed, tablesResolver: ( input: ResolverInput
, ) => Promise>, columnsResolver: ( input: ColumnsResolverInput, ) => Promise>, viewsResolver: ( input: ResolverInput, ) => Promise>, prevFull: SQLiteSchema, curFull: SQLiteSchema, action?: 'push', ): Promise<{ statements: JsonStatement[]; sqlStatements: string[]; _meta: | { schemas: {}; tables: {}; columns: {}; } | undefined; }> => { const tablesDiff = diffSchemasOrTables(json1.tables, json2.tables); const { created: createdTables, deleted: deletedTables, renamed: renamedTables, } = await tablesResolver({ created: tablesDiff.added, deleted: tablesDiff.deleted, }); const tablesPatchedSnap1 = copy(json1); tablesPatchedSnap1.tables = mapEntries(tablesPatchedSnap1.tables, (_, it) => { const { name } = nameChangeFor(it, renamedTables); it.name = name; return [name, it]; }); const res = diffColumns(tablesPatchedSnap1.tables, json2.tables); const columnRenames = [] as { table: string; renames: { from: Column; to: Column }[]; }[]; const columnCreates = [] as { table: string; columns: Column[]; }[]; const columnDeletes = [] as { table: string; columns: Column[]; }[]; for (let entry of Object.values(res)) { const { renamed, created, deleted } = await columnsResolver({ tableName: entry.name, schema: entry.schema, deleted: entry.columns.deleted, created: entry.columns.added, }); if (created.length > 0) { columnCreates.push({ table: entry.name, columns: created, }); } if (deleted.length > 0) { columnDeletes.push({ table: entry.name, columns: deleted, }); } if (renamed.length > 0) { columnRenames.push({ table: entry.name, renames: renamed, }); } } const columnRenamesDict = columnRenames.reduce( (acc, it) => { acc[it.table] = it.renames; return acc; }, {} as Record< string, { from: Named; to: Named; }[] >, ); const columnsPatchedSnap1 = copy(tablesPatchedSnap1); columnsPatchedSnap1.tables = mapEntries( columnsPatchedSnap1.tables, (tableKey, tableValue) => { const patchedColumns = mapKeys( tableValue.columns, (columnKey, column) => { const rens = columnRenamesDict[tableValue.name] || []; const newName = columnChangeFor(columnKey, rens); column.name = newName; return newName; }, ); tableValue.columns = patchedColumns; return [tableKey, tableValue]; }, ); const viewsDiff = diffSchemasOrTables(json1.views, json2.views); const { created: createdViews, deleted: deletedViews, renamed: renamedViews, // renamed or moved } = await viewsResolver({ created: viewsDiff.added, deleted: viewsDiff.deleted, }); const renamesViewDic: Record = {}; renamedViews.forEach((it) => { renamesViewDic[it.from.name] = { to: it.to.name, from: it.from.name }; }); const viewsPatchedSnap1 = copy(columnsPatchedSnap1); viewsPatchedSnap1.views = mapEntries( viewsPatchedSnap1.views, (viewKey, viewValue) => { const rename = renamesViewDic[viewValue.name]; if (rename) { viewValue.name = rename.to; } return [viewKey, viewValue]; }, ); const diffResult = applyJsonDiff(viewsPatchedSnap1, json2); const typedResult = diffResultSchemeSQLite.parse(diffResult); // Map array of objects to map const tablesMap: { [key: string]: (typeof typedResult.alteredTablesWithColumns)[number]; } = {}; typedResult.alteredTablesWithColumns.forEach((obj) => { tablesMap[obj.name] = obj; }); const jsonCreateTables = createdTables.map((it) => { return prepareSQLiteCreateTable(it, action); }); const jsonCreateIndexesForCreatedTables = createdTables .map((it) => { return prepareCreateIndexesJson( it.name, it.schema, it.indexes, curFull.internal, ); }) .flat(); const jsonDropTables = deletedTables.map((it) => { return prepareDropTableJson(it); }); const jsonRenameTables = renamedTables.map((it) => { return prepareRenameTableJson(it.from, it.to); }); const jsonRenameColumnsStatements: JsonRenameColumnStatement[] = columnRenames .map((it) => prepareRenameColumns(it.table, '', it.renames)) .flat(); const jsonDropColumnsStatemets: JsonDropColumnStatement[] = columnDeletes .map((it) => _prepareDropColumns(it.table, '', it.columns)) .flat(); const jsonAddColumnsStatemets: JsonSqliteAddColumnStatement[] = columnCreates .map((it) => { return _prepareSqliteAddColumns( it.table, it.columns, tablesMap[it.table] && tablesMap[it.table].addedForeignKeys ? Object.values(tablesMap[it.table].addedForeignKeys) : [], ); }) .flat(); const rColumns = jsonRenameColumnsStatements.map((it) => { const tableName = it.tableName; const schema = it.schema; return { from: { schema, table: tableName, column: it.oldColumnName }, to: { schema, table: tableName, column: it.newColumnName }, }; }); const rTables = renamedTables.map((it) => { return { from: it.from, to: it.to }; }); const _meta = prepareMigrationMeta([], rTables, rColumns); const allAltered = typedResult.alteredTablesWithColumns; const jsonAddedCompositePKs: JsonCreateCompositePK[] = []; const jsonDeletedCompositePKs: JsonDeleteCompositePK[] = []; const jsonAlteredCompositePKs: JsonAlterCompositePK[] = []; const jsonAddedUniqueConstraints: JsonCreateUniqueConstraint[] = []; const jsonDeletedUniqueConstraints: JsonDeleteUniqueConstraint[] = []; const jsonAlteredUniqueConstraints: JsonAlterUniqueConstraint[] = []; const jsonDeletedCheckConstraints: JsonDeleteCheckConstraint[] = []; const jsonCreatedCheckConstraints: JsonCreateCheckConstraint[] = []; allAltered.forEach((it) => { // This part is needed to make sure that same columns in a table are not triggered for change // there is a case where orm and kit are responsible for pk name generation and one of them is not sorting name // We double-check that pk with same set of columns are both in added and deleted diffs let addedColumns: string[] = []; for (const addedPkName of Object.keys(it.addedCompositePKs)) { const addedPkColumns = it.addedCompositePKs[addedPkName]; addedColumns = SQLiteSquasher.unsquashPK(addedPkColumns); } let deletedColumns: string[] = []; for (const deletedPkName of Object.keys(it.deletedCompositePKs)) { const deletedPkColumns = it.deletedCompositePKs[deletedPkName]; deletedColumns = SQLiteSquasher.unsquashPK(deletedPkColumns); } // Don't need to sort, but need to add tests for it // addedColumns.sort(); // deletedColumns.sort(); const doPerformDeleteAndCreate = JSON.stringify(addedColumns) !== JSON.stringify(deletedColumns); let addedCompositePKs: JsonCreateCompositePK[] = []; let deletedCompositePKs: JsonDeleteCompositePK[] = []; let alteredCompositePKs: JsonAlterCompositePK[] = []; if (doPerformDeleteAndCreate) { addedCompositePKs = prepareAddCompositePrimaryKeySqlite( it.name, it.addedCompositePKs, ); deletedCompositePKs = prepareDeleteCompositePrimaryKeySqlite( it.name, it.deletedCompositePKs, ); } alteredCompositePKs = prepareAlterCompositePrimaryKeySqlite( it.name, it.alteredCompositePKs, ); // add logic for unique constraints let addedUniqueConstraints: JsonCreateUniqueConstraint[] = []; let deletedUniqueConstraints: JsonDeleteUniqueConstraint[] = []; let alteredUniqueConstraints: JsonAlterUniqueConstraint[] = []; let createdCheckConstraints: JsonCreateCheckConstraint[] = []; let deletedCheckConstraints: JsonDeleteCheckConstraint[] = []; addedUniqueConstraints = prepareAddUniqueConstraint( it.name, it.schema, it.addedUniqueConstraints, ); deletedUniqueConstraints = prepareDeleteUniqueConstraint( it.name, it.schema, it.deletedUniqueConstraints, ); if (it.alteredUniqueConstraints) { const added: Record = {}; const deleted: Record = {}; for (const k of Object.keys(it.alteredUniqueConstraints)) { added[k] = it.alteredUniqueConstraints[k].__new; deleted[k] = it.alteredUniqueConstraints[k].__old; } addedUniqueConstraints.push( ...prepareAddUniqueConstraint(it.name, it.schema, added), ); deletedUniqueConstraints.push( ...prepareDeleteUniqueConstraint(it.name, it.schema, deleted), ); } createdCheckConstraints = prepareAddCheckConstraint(it.name, it.schema, it.addedCheckConstraints); deletedCheckConstraints = prepareDeleteCheckConstraint( it.name, it.schema, it.deletedCheckConstraints, ); // skip for push if (it.alteredCheckConstraints && action !== 'push') { const added: Record = {}; const deleted: Record = {}; for (const k of Object.keys(it.alteredCheckConstraints)) { added[k] = it.alteredCheckConstraints[k].__new; deleted[k] = it.alteredCheckConstraints[k].__old; } createdCheckConstraints.push(...prepareAddCheckConstraint(it.name, it.schema, added)); deletedCheckConstraints.push(...prepareDeleteCheckConstraint(it.name, it.schema, deleted)); } jsonAddedCompositePKs.push(...addedCompositePKs); jsonDeletedCompositePKs.push(...deletedCompositePKs); jsonAlteredCompositePKs.push(...alteredCompositePKs); jsonAddedUniqueConstraints.push(...addedUniqueConstraints); jsonDeletedUniqueConstraints.push(...deletedUniqueConstraints); jsonAlteredUniqueConstraints.push(...alteredUniqueConstraints); jsonCreatedCheckConstraints.push(...createdCheckConstraints); jsonDeletedCheckConstraints.push(...deletedCheckConstraints); }); const jsonTableAlternations = allAltered .map((it) => { return prepareSqliteAlterColumns(it.name, it.schema, it.altered, json2); }) .flat(); const jsonCreateIndexesForAllAlteredTables = allAltered .map((it) => { return prepareCreateIndexesJson( it.name, it.schema, it.addedIndexes || {}, curFull.internal, ); }) .flat(); const jsonDropIndexesForAllAlteredTables = allAltered .map((it) => { return prepareDropIndexesJson( it.name, it.schema, it.deletedIndexes || {}, ); }) .flat(); allAltered.forEach((it) => { const droppedIndexes = Object.keys(it.alteredIndexes).reduce( (current, item: string) => { current[item] = it.alteredIndexes[item].__old; return current; }, {} as Record, ); const createdIndexes = Object.keys(it.alteredIndexes).reduce( (current, item: string) => { current[item] = it.alteredIndexes[item].__new; return current; }, {} as Record, ); jsonCreateIndexesForAllAlteredTables.push( ...prepareCreateIndexesJson( it.name, it.schema, createdIndexes || {}, curFull.internal, ), ); jsonDropIndexesForAllAlteredTables.push( ...prepareDropIndexesJson(it.name, it.schema, droppedIndexes || {}), ); }); const jsonReferencesForAllAlteredTables: JsonReferenceStatement[] = allAltered .map((it) => { const forAdded = prepareLibSQLCreateReferencesJson( it.name, it.schema, it.addedForeignKeys, json2, action, ); const forAltered = prepareLibSQLDropReferencesJson( it.name, it.schema, it.deletedForeignKeys, json2, _meta, action, ); const alteredFKs = prepareAlterReferencesJson(it.name, it.schema, it.alteredForeignKeys); return [...forAdded, ...forAltered, ...alteredFKs]; }) .flat(); const jsonCreatedReferencesForAlteredTables = jsonReferencesForAllAlteredTables.filter( (t) => t.type === 'create_reference', ); const jsonDroppedReferencesForAlteredTables = jsonReferencesForAllAlteredTables.filter( (t) => t.type === 'delete_reference', ); const createViews: JsonCreateSqliteViewStatement[] = []; const dropViews: JsonDropViewStatement[] = []; createViews.push( ...createdViews.filter((it) => !it.isExisting).map((it) => { return prepareSqliteCreateViewJson( it.name, it.definition!, ); }), ); dropViews.push( ...deletedViews.filter((it) => !it.isExisting).map((it) => { return prepareDropViewJson(it.name); }), ); // renames dropViews.push( ...renamedViews.filter((it) => !it.to.isExisting).map((it) => { return prepareDropViewJson(it.from.name); }), ); createViews.push( ...renamedViews.filter((it) => !it.to.isExisting).map((it) => { return prepareSqliteCreateViewJson(it.to.name, it.to.definition!); }), ); const alteredViews = typedResult.alteredViews.filter((it) => !json2.views[it.name].isExisting); for (const alteredView of alteredViews) { const { definition } = json2.views[alteredView.name]; if (alteredView.alteredExisting || (alteredView.alteredDefinition && action !== 'push')) { dropViews.push(prepareDropViewJson(alteredView.name)); createViews.push( prepareSqliteCreateViewJson( alteredView.name, definition!, ), ); } } const jsonStatements: JsonStatement[] = []; jsonStatements.push(...jsonCreateTables); jsonStatements.push(...jsonDropTables); jsonStatements.push(...jsonRenameTables); jsonStatements.push(...jsonRenameColumnsStatements); jsonStatements.push(...jsonDroppedReferencesForAlteredTables); jsonStatements.push(...jsonDeletedCheckConstraints); // Will need to drop indexes before changing any columns in table // Then should go column alternations and then index creation jsonStatements.push(...jsonDropIndexesForAllAlteredTables); jsonStatements.push(...jsonDeletedCompositePKs); jsonStatements.push(...jsonTableAlternations); jsonStatements.push(...jsonAddedCompositePKs); jsonStatements.push(...jsonAddColumnsStatemets); jsonStatements.push(...jsonCreateIndexesForCreatedTables); jsonStatements.push(...jsonCreateIndexesForAllAlteredTables); jsonStatements.push(...jsonCreatedCheckConstraints); jsonStatements.push(...dropViews); jsonStatements.push(...createViews); jsonStatements.push(...jsonCreatedReferencesForAlteredTables); jsonStatements.push(...jsonDropColumnsStatemets); jsonStatements.push(...jsonAlteredCompositePKs); jsonStatements.push(...jsonAlteredUniqueConstraints); const combinedJsonStatements = libSQLCombineStatements(jsonStatements, json2, action); const sqlStatements = fromJson( combinedJsonStatements, 'turso', action, json2, ); const uniqueSqlStatements: string[] = []; sqlStatements.forEach((ss) => { if (!uniqueSqlStatements.includes(ss)) { uniqueSqlStatements.push(ss); } }); return { statements: combinedJsonStatements, sqlStatements: uniqueSqlStatements, _meta, }; }; // explicitely ask if tables were renamed, if yes - add those to altered tables, otherwise - deleted // double check if user wants to delete particular table and warn him on data loss ================================================ FILE: drizzle-kit/src/sqlgenerator.ts ================================================ import { BREAKPOINT } from './cli/commands/migrate'; import { JsonAddColumnStatement, JsonAddValueToEnumStatement, JsonAlterColumnAlterGeneratedStatement, JsonAlterColumnAlterIdentityStatement, JsonAlterColumnDropAutoincrementStatement, JsonAlterColumnDropDefaultStatement, JsonAlterColumnDropGeneratedStatement, JsonAlterColumnDropIdentityStatement, JsonAlterColumnDropNotNullStatement, JsonAlterColumnDropOnUpdateStatement, JsonAlterColumnDropPrimaryKeyStatement, JsonAlterColumnPgTypeStatement, JsonAlterColumnSetAutoincrementStatement, JsonAlterColumnSetDefaultStatement, JsonAlterColumnSetGeneratedStatement, JsonAlterColumnSetIdentityStatement, JsonAlterColumnSetNotNullStatement, JsonAlterColumnSetOnUpdateStatement, JsonAlterColumnSetPrimaryKeyStatement, JsonAlterColumnTypeStatement, JsonAlterCompositePK, JsonAlterIndPolicyStatement, JsonAlterMySqlViewStatement, JsonAlterPolicyStatement, JsonAlterReferenceStatement, JsonAlterRoleStatement, JsonAlterSequenceStatement, JsonAlterTableRemoveFromSchema, JsonAlterTableSetNewSchema, JsonAlterTableSetSchema, JsonAlterViewAddWithOptionStatement, JsonAlterViewAlterSchemaStatement, JsonAlterViewAlterTablespaceStatement, JsonAlterViewAlterUsingStatement, JsonAlterViewDropWithOptionStatement, JsonCreateCheckConstraint, JsonCreateCompositePK, JsonCreateEnumStatement, JsonCreateIndexStatement, JsonCreateIndPolicyStatement, JsonCreateMySqlViewStatement, JsonCreatePgViewStatement, JsonCreatePolicyStatement, JsonCreateReferenceStatement, JsonCreateRoleStatement, JsonCreateSchema, JsonCreateSequenceStatement, JsonCreateSqliteViewStatement, JsonCreateTableStatement, JsonCreateUniqueConstraint, JsonDeleteCheckConstraint, JsonDeleteCompositePK, JsonDeleteReferenceStatement, JsonDeleteUniqueConstraint, JsonDisableRLSStatement, JsonDropColumnStatement, JsonDropEnumStatement, JsonDropIndexStatement, JsonDropIndPolicyStatement, JsonDropPolicyStatement, JsonDropRoleStatement, JsonDropSequenceStatement, JsonDropTableStatement, JsonDropValueFromEnumStatement, JsonDropViewStatement, JsonEnableRLSStatement, JsonIndRenamePolicyStatement, JsonMoveEnumStatement, JsonMoveSequenceStatement, JsonPgCreateIndexStatement, JsonRecreateSingleStoreTableStatement, JsonRecreateTableStatement, JsonRenameColumnStatement, JsonRenameEnumStatement, JsonRenamePolicyStatement, JsonRenameRoleStatement, JsonRenameSchema, JsonRenameSequenceStatement, JsonRenameTableStatement, JsonRenameViewStatement, JsonSqliteAddColumnStatement, JsonSqliteCreateTableStatement, JsonStatement, } from './jsonStatements'; import { Dialect } from './schemaValidator'; import { MySqlSquasher } from './serializer/mysqlSchema'; import { PgSquasher, policy } from './serializer/pgSchema'; import { SingleStoreSquasher } from './serializer/singlestoreSchema'; import { SQLiteSchemaSquashed, SQLiteSquasher } from './serializer/sqliteSchema'; import { escapeSingleQuotes } from './utils'; const parseType = (schemaPrefix: string, type: string) => { const pgNativeTypes = [ 'uuid', 'smallint', 'integer', 'bigint', 'boolean', 'text', 'varchar', 'serial', 'bigserial', 'decimal', 'numeric', 'real', 'json', 'jsonb', 'time', 'time with time zone', 'time without time zone', 'time', 'timestamp', 'timestamp with time zone', 'timestamp without time zone', 'date', 'interval', 'bigint', 'bigserial', 'double precision', 'interval year', 'interval month', 'interval day', 'interval hour', 'interval minute', 'interval second', 'interval year to month', 'interval day to hour', 'interval day to minute', 'interval day to second', 'interval hour to minute', 'interval hour to second', 'interval minute to second', 'char', 'vector', 'geometry', 'halfvec', 'sparsevec', 'bit', ]; const arrayDefinitionRegex = /\[\d*(?:\[\d*\])*\]/g; const arrayDefinition = (type.match(arrayDefinitionRegex) ?? []).join(''); const withoutArrayDefinition = type.replace(arrayDefinitionRegex, ''); return pgNativeTypes.some((it) => type.startsWith(it)) ? `${withoutArrayDefinition}${arrayDefinition}` : `${schemaPrefix}"${withoutArrayDefinition}"${arrayDefinition}`; }; abstract class Convertor { abstract can( statement: JsonStatement, dialect: Dialect, ): boolean; abstract convert( statement: JsonStatement, json2?: SQLiteSchemaSquashed, action?: 'push', ): string | string[]; } class PgCreateRoleConvertor extends Convertor { override can(statement: JsonStatement, dialect: Dialect): boolean { return statement.type === 'create_role' && dialect === 'postgresql'; } override convert(statement: JsonCreateRoleStatement): string | string[] { return `CREATE ROLE "${statement.name}"${ statement.values.createDb || statement.values.createRole || !statement.values.inherit ? ` WITH${statement.values.createDb ? ' CREATEDB' : ''}${statement.values.createRole ? ' CREATEROLE' : ''}${ statement.values.inherit ? '' : ' NOINHERIT' }` : '' };`; } } class PgDropRoleConvertor extends Convertor { override can(statement: JsonStatement, dialect: Dialect): boolean { return statement.type === 'drop_role' && dialect === 'postgresql'; } override convert(statement: JsonDropRoleStatement): string | string[] { return `DROP ROLE "${statement.name}";`; } } class PgRenameRoleConvertor extends Convertor { override can(statement: JsonStatement, dialect: Dialect): boolean { return statement.type === 'rename_role' && dialect === 'postgresql'; } override convert(statement: JsonRenameRoleStatement): string | string[] { return `ALTER ROLE "${statement.nameFrom}" RENAME TO "${statement.nameTo}";`; } } class PgAlterRoleConvertor extends Convertor { override can(statement: JsonStatement, dialect: Dialect): boolean { return statement.type === 'alter_role' && dialect === 'postgresql'; } override convert(statement: JsonAlterRoleStatement): string | string[] { return `ALTER ROLE "${statement.name}"${` WITH${statement.values.createDb ? ' CREATEDB' : ' NOCREATEDB'}${ statement.values.createRole ? ' CREATEROLE' : ' NOCREATEROLE' }${statement.values.inherit ? ' INHERIT' : ' NOINHERIT'}`};`; } } ///// class PgCreatePolicyConvertor extends Convertor { override can(statement: JsonStatement, dialect: Dialect): boolean { return statement.type === 'create_policy' && dialect === 'postgresql'; } override convert(statement: JsonCreatePolicyStatement): string | string[] { const policy = statement.data; const tableNameWithSchema = statement.schema ? `"${statement.schema}"."${statement.tableName}"` : `"${statement.tableName}"`; const usingPart = policy.using ? ` USING (${policy.using})` : ''; const withCheckPart = policy.withCheck ? ` WITH CHECK (${policy.withCheck})` : ''; const policyToPart = policy.to?.map((v) => ['current_user', 'current_role', 'session_user', 'public'].includes(v) ? v : `"${v}"` ).join(', '); return `CREATE POLICY "${policy.name}" ON ${tableNameWithSchema} AS ${policy.as?.toUpperCase()} FOR ${policy.for?.toUpperCase()} TO ${policyToPart}${usingPart}${withCheckPart};`; } } class PgDropPolicyConvertor extends Convertor { override can(statement: JsonStatement, dialect: Dialect): boolean { return statement.type === 'drop_policy' && dialect === 'postgresql'; } override convert(statement: JsonDropPolicyStatement): string | string[] { const policy = statement.data; const tableNameWithSchema = statement.schema ? `"${statement.schema}"."${statement.tableName}"` : `"${statement.tableName}"`; return `DROP POLICY "${policy.name}" ON ${tableNameWithSchema} CASCADE;`; } } class PgRenamePolicyConvertor extends Convertor { override can(statement: JsonStatement, dialect: Dialect): boolean { return statement.type === 'rename_policy' && dialect === 'postgresql'; } override convert(statement: JsonRenamePolicyStatement): string | string[] { const tableNameWithSchema = statement.schema ? `"${statement.schema}"."${statement.tableName}"` : `"${statement.tableName}"`; return `ALTER POLICY "${statement.oldName}" ON ${tableNameWithSchema} RENAME TO "${statement.newName}";`; } } class PgAlterPolicyConvertor extends Convertor { override can(statement: JsonStatement, dialect: Dialect): boolean { return statement.type === 'alter_policy' && dialect === 'postgresql'; } override convert(statement: JsonAlterPolicyStatement, _dialect: any, action?: string): string | string[] { const newPolicy = action === 'push' ? PgSquasher.unsquashPolicyPush(statement.newData) : PgSquasher.unsquashPolicy(statement.newData); const oldPolicy = action === 'push' ? PgSquasher.unsquashPolicyPush(statement.oldData) : PgSquasher.unsquashPolicy(statement.oldData); const tableNameWithSchema = statement.schema ? `"${statement.schema}"."${statement.tableName}"` : `"${statement.tableName}"`; const usingPart = newPolicy.using ? ` USING (${newPolicy.using})` : oldPolicy.using ? ` USING (${oldPolicy.using})` : ''; const withCheckPart = newPolicy.withCheck ? ` WITH CHECK (${newPolicy.withCheck})` : oldPolicy.withCheck ? ` WITH CHECK (${oldPolicy.withCheck})` : ''; return `ALTER POLICY "${oldPolicy.name}" ON ${tableNameWithSchema} TO ${newPolicy.to}${usingPart}${withCheckPart};`; } } //// class PgCreateIndPolicyConvertor extends Convertor { override can(statement: JsonStatement, dialect: Dialect): boolean { return statement.type === 'create_ind_policy' && dialect === 'postgresql'; } override convert(statement: JsonCreateIndPolicyStatement): string | string[] { const policy = statement.data; const usingPart = policy.using ? ` USING (${policy.using})` : ''; const withCheckPart = policy.withCheck ? ` WITH CHECK (${policy.withCheck})` : ''; const policyToPart = policy.to?.map((v) => ['current_user', 'current_role', 'session_user', 'public'].includes(v) ? v : `"${v}"` ).join(', '); return `CREATE POLICY "${policy.name}" ON ${policy.on} AS ${policy.as?.toUpperCase()} FOR ${policy.for?.toUpperCase()} TO ${policyToPart}${usingPart}${withCheckPart};`; } } class PgDropIndPolicyConvertor extends Convertor { override can(statement: JsonStatement, dialect: Dialect): boolean { return statement.type === 'drop_ind_policy' && dialect === 'postgresql'; } override convert(statement: JsonDropIndPolicyStatement): string | string[] { const policy = statement.data; return `DROP POLICY "${policy.name}" ON ${policy.on} CASCADE;`; } } class PgRenameIndPolicyConvertor extends Convertor { override can(statement: JsonStatement, dialect: Dialect): boolean { return statement.type === 'rename_ind_policy' && dialect === 'postgresql'; } override convert(statement: JsonIndRenamePolicyStatement): string | string[] { return `ALTER POLICY "${statement.oldName}" ON ${statement.tableKey} RENAME TO "${statement.newName}";`; } } class PgAlterIndPolicyConvertor extends Convertor { override can(statement: JsonStatement, dialect: Dialect): boolean { return statement.type === 'alter_ind_policy' && dialect === 'postgresql'; } override convert(statement: JsonAlterIndPolicyStatement): string | string[] { const newPolicy = statement.newData; const oldPolicy = statement.oldData; const usingPart = newPolicy.using ? ` USING (${newPolicy.using})` : oldPolicy.using ? ` USING (${oldPolicy.using})` : ''; const withCheckPart = newPolicy.withCheck ? ` WITH CHECK (${newPolicy.withCheck})` : oldPolicy.withCheck ? ` WITH CHECK (${oldPolicy.withCheck})` : ''; return `ALTER POLICY "${oldPolicy.name}" ON ${oldPolicy.on} TO ${newPolicy.to}${usingPart}${withCheckPart};`; } } //// class PgEnableRlsConvertor extends Convertor { override can(statement: JsonStatement, dialect: Dialect): boolean { return statement.type === 'enable_rls' && dialect === 'postgresql'; } override convert(statement: JsonEnableRLSStatement): string { const tableNameWithSchema = statement.schema ? `"${statement.schema}"."${statement.tableName}"` : `"${statement.tableName}"`; return `ALTER TABLE ${tableNameWithSchema} ENABLE ROW LEVEL SECURITY;`; } } class PgDisableRlsConvertor extends Convertor { override can(statement: JsonStatement, dialect: Dialect): boolean { return statement.type === 'disable_rls' && dialect === 'postgresql'; } override convert(statement: JsonDisableRLSStatement): string { const tableNameWithSchema = statement.schema ? `"${statement.schema}"."${statement.tableName}"` : `"${statement.tableName}"`; return `ALTER TABLE ${tableNameWithSchema} DISABLE ROW LEVEL SECURITY;`; } } class PgCreateTableConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return statement.type === 'create_table' && dialect === 'postgresql'; } convert(st: JsonCreateTableStatement) { const { tableName, schema, columns, compositePKs, uniqueConstraints, checkConstraints, policies, isRLSEnabled } = st; let statement = ''; const name = schema ? `"${schema}"."${tableName}"` : `"${tableName}"`; statement += `CREATE TABLE ${name} (\n`; for (let i = 0; i < columns.length; i++) { const column = columns[i]; const primaryKeyStatement = column.primaryKey ? ' PRIMARY KEY' : ''; const notNullStatement = column.notNull && !column.identity ? ' NOT NULL' : ''; const defaultStatement = column.default !== undefined ? ` DEFAULT ${column.default}` : ''; const uniqueConstraint = column.isUnique ? ` CONSTRAINT "${column.uniqueName}" UNIQUE${column.nullsNotDistinct ? ' NULLS NOT DISTINCT' : ''}` : ''; const schemaPrefix = column.typeSchema && column.typeSchema !== 'public' ? `"${column.typeSchema}".` : ''; const type = parseType(schemaPrefix, column.type); const generated = column.generated; const generatedStatement = generated ? ` GENERATED ALWAYS AS (${generated?.as}) STORED` : ''; const unsquashedIdentity = column.identity ? PgSquasher.unsquashIdentity(column.identity) : undefined; const identityWithSchema = schema ? `"${schema}"."${unsquashedIdentity?.name}"` : `"${unsquashedIdentity?.name}"`; const identity = unsquashedIdentity ? ` GENERATED ${ unsquashedIdentity.type === 'always' ? 'ALWAYS' : 'BY DEFAULT' } AS IDENTITY (sequence name ${identityWithSchema}${ unsquashedIdentity.increment ? ` INCREMENT BY ${unsquashedIdentity.increment}` : '' }${ unsquashedIdentity.minValue ? ` MINVALUE ${unsquashedIdentity.minValue}` : '' }${ unsquashedIdentity.maxValue ? ` MAXVALUE ${unsquashedIdentity.maxValue}` : '' }${ unsquashedIdentity.startWith ? ` START WITH ${unsquashedIdentity.startWith}` : '' }${unsquashedIdentity.cache ? ` CACHE ${unsquashedIdentity.cache}` : ''}${ unsquashedIdentity.cycle ? ` CYCLE` : '' })` : ''; statement += '\t' + `"${column.name}" ${type}${primaryKeyStatement}${defaultStatement}${generatedStatement}${notNullStatement}${uniqueConstraint}${identity}`; statement += i === columns.length - 1 ? '' : ',\n'; } if (typeof compositePKs !== 'undefined' && compositePKs.length > 0) { statement += ',\n'; const compositePK = PgSquasher.unsquashPK(compositePKs[0]); statement += `\tCONSTRAINT "${st.compositePkName}" PRIMARY KEY(\"${compositePK.columns.join(`","`)}\")`; // statement += `\n`; } if ( typeof uniqueConstraints !== 'undefined' && uniqueConstraints.length > 0 ) { for (const uniqueConstraint of uniqueConstraints) { statement += ',\n'; const unsquashedUnique = PgSquasher.unsquashUnique(uniqueConstraint); statement += `\tCONSTRAINT "${unsquashedUnique.name}" UNIQUE${ unsquashedUnique.nullsNotDistinct ? ' NULLS NOT DISTINCT' : '' }(\"${unsquashedUnique.columns.join(`","`)}\")`; // statement += `\n`; } } if (typeof checkConstraints !== 'undefined' && checkConstraints.length > 0) { for (const checkConstraint of checkConstraints) { statement += ',\n'; const unsquashedCheck = PgSquasher.unsquashCheck(checkConstraint); statement += `\tCONSTRAINT "${unsquashedCheck.name}" CHECK (${unsquashedCheck.value})`; } } statement += `\n);`; statement += `\n`; const enableRls = new PgEnableRlsConvertor().convert({ type: 'enable_rls', tableName, schema, }); return [statement, ...(policies && policies.length > 0 || isRLSEnabled ? [enableRls] : [])]; } } class MySqlCreateTableConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return statement.type === 'create_table' && dialect === 'mysql'; } convert(st: JsonCreateTableStatement) { const { tableName, columns, schema, checkConstraints, compositePKs, uniqueConstraints, internals, } = st; let statement = ''; statement += `CREATE TABLE \`${tableName}\` (\n`; for (let i = 0; i < columns.length; i++) { const column = columns[i]; const primaryKeyStatement = column.primaryKey ? ' PRIMARY KEY' : ''; const notNullStatement = column.notNull ? ' NOT NULL' : ''; const defaultStatement = column.default !== undefined ? ` DEFAULT ${column.default}` : ''; const onUpdateStatement = column.onUpdate ? ` ON UPDATE CURRENT_TIMESTAMP` : ''; const autoincrementStatement = column.autoincrement ? ' AUTO_INCREMENT' : ''; const generatedStatement = column.generated ? ` GENERATED ALWAYS AS (${column.generated?.as}) ${column.generated?.type.toUpperCase()}` : ''; statement += '\t' + `\`${column.name}\` ${column.type}${autoincrementStatement}${primaryKeyStatement}${generatedStatement}${notNullStatement}${defaultStatement}${onUpdateStatement}`; statement += i === columns.length - 1 ? '' : ',\n'; } if (typeof compositePKs !== 'undefined' && compositePKs.length > 0) { statement += ',\n'; const compositePK = MySqlSquasher.unsquashPK(compositePKs[0]); statement += `\tCONSTRAINT \`${st.compositePkName}\` PRIMARY KEY(\`${compositePK.columns.join(`\`,\``)}\`)`; } if ( typeof uniqueConstraints !== 'undefined' && uniqueConstraints.length > 0 ) { for (const uniqueConstraint of uniqueConstraints) { statement += ',\n'; const unsquashedUnique = MySqlSquasher.unsquashUnique(uniqueConstraint); const uniqueString = unsquashedUnique.columns .map((it) => { return internals?.indexes ? internals?.indexes[unsquashedUnique.name]?.columns[it] ?.isExpression ? it : `\`${it}\`` : `\`${it}\``; }) .join(','); statement += `\tCONSTRAINT \`${unsquashedUnique.name}\` UNIQUE(${uniqueString})`; } } if (typeof checkConstraints !== 'undefined' && checkConstraints.length > 0) { for (const checkConstraint of checkConstraints) { statement += ',\n'; const unsquashedCheck = MySqlSquasher.unsquashCheck(checkConstraint); statement += `\tCONSTRAINT \`${unsquashedCheck.name}\` CHECK(${unsquashedCheck.value})`; } } statement += `\n);`; statement += `\n`; return statement; } } export class SingleStoreCreateTableConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return statement.type === 'create_table' && dialect === 'singlestore'; } convert(st: JsonCreateTableStatement) { const { tableName, columns, schema, compositePKs, uniqueConstraints, internals, } = st; let statement = ''; statement += `CREATE TABLE \`${tableName}\` (\n`; for (let i = 0; i < columns.length; i++) { const column = columns[i]; const primaryKeyStatement = column.primaryKey ? ' PRIMARY KEY' : ''; const notNullStatement = column.notNull ? ' NOT NULL' : ''; const defaultStatement = column.default !== undefined ? ` DEFAULT ${column.default}` : ''; const onUpdateStatement = column.onUpdate ? ` ON UPDATE CURRENT_TIMESTAMP` : ''; const autoincrementStatement = column.autoincrement ? ' AUTO_INCREMENT' : ''; const generatedStatement = column.generated ? ` GENERATED ALWAYS AS (${column.generated?.as}) ${column.generated?.type.toUpperCase()}` : ''; statement += '\t' + `\`${column.name}\` ${column.type}${autoincrementStatement}${primaryKeyStatement}${notNullStatement}${defaultStatement}${onUpdateStatement}${generatedStatement}`; statement += i === columns.length - 1 ? '' : ',\n'; } if (typeof compositePKs !== 'undefined' && compositePKs.length > 0) { statement += ',\n'; const compositePK = SingleStoreSquasher.unsquashPK(compositePKs[0]); statement += `\tCONSTRAINT \`${compositePK.name}\` PRIMARY KEY(\`${compositePK.columns.join(`\`,\``)}\`)`; } if ( typeof uniqueConstraints !== 'undefined' && uniqueConstraints.length > 0 ) { for (const uniqueConstraint of uniqueConstraints) { statement += ',\n'; const unsquashedUnique = SingleStoreSquasher.unsquashUnique(uniqueConstraint); const uniqueString = unsquashedUnique.columns .map((it) => { return internals?.indexes ? internals?.indexes[unsquashedUnique.name]?.columns[it] ?.isExpression ? it : `\`${it}\`` : `\`${it}\``; }) .join(','); statement += `\tCONSTRAINT \`${unsquashedUnique.name}\` UNIQUE(${uniqueString})`; } } statement += `\n);`; statement += `\n`; return statement; } } export class SQLiteCreateTableConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return statement.type === 'sqlite_create_table' && (dialect === 'sqlite' || dialect === 'turso'); } convert(st: JsonSqliteCreateTableStatement) { const { tableName, columns, referenceData, compositePKs, uniqueConstraints, checkConstraints, } = st; let statement = ''; statement += `CREATE TABLE \`${tableName}\` (\n`; for (let i = 0; i < columns.length; i++) { const column = columns[i]; const primaryKeyStatement = column.primaryKey ? ' PRIMARY KEY' : ''; const notNullStatement = column.notNull ? ' NOT NULL' : ''; const defaultStatement = column.default !== undefined ? ` DEFAULT ${column.default}` : ''; const autoincrementStatement = column.autoincrement ? ' AUTOINCREMENT' : ''; const generatedStatement = column.generated ? ` GENERATED ALWAYS AS ${column.generated.as} ${column.generated.type.toUpperCase()}` : ''; statement += '\t'; statement += `\`${column.name}\` ${column.type}${primaryKeyStatement}${autoincrementStatement}${defaultStatement}${generatedStatement}${notNullStatement}`; statement += i === columns.length - 1 ? '' : ',\n'; } compositePKs.forEach((it) => { statement += ',\n\t'; statement += `PRIMARY KEY(${it.map((it) => `\`${it}\``).join(', ')})`; }); for (let i = 0; i < referenceData.length; i++) { const { name, tableFrom, tableTo, columnsFrom, columnsTo, onDelete, onUpdate, } = referenceData[i]; const onDeleteStatement = onDelete ? ` ON DELETE ${onDelete}` : ''; const onUpdateStatement = onUpdate ? ` ON UPDATE ${onUpdate}` : ''; const fromColumnsString = columnsFrom.map((it) => `\`${it}\``).join(','); const toColumnsString = columnsTo.map((it) => `\`${it}\``).join(','); statement += ','; statement += '\n\t'; statement += `FOREIGN KEY (${fromColumnsString}) REFERENCES \`${tableTo}\`(${toColumnsString})${onUpdateStatement}${onDeleteStatement}`; } if ( typeof uniqueConstraints !== 'undefined' && uniqueConstraints.length > 0 ) { for (const uniqueConstraint of uniqueConstraints) { statement += ',\n'; const unsquashedUnique = SQLiteSquasher.unsquashUnique(uniqueConstraint); statement += `\tCONSTRAINT ${unsquashedUnique.name} UNIQUE(\`${unsquashedUnique.columns.join(`\`,\``)}\`)`; } } if ( typeof checkConstraints !== 'undefined' && checkConstraints.length > 0 ) { for (const check of checkConstraints) { statement += ',\n'; const { value, name } = SQLiteSquasher.unsquashCheck(check); statement += `\tCONSTRAINT "${name}" CHECK(${value})`; } } statement += `\n`; statement += `);`; statement += `\n`; return statement; } } class PgCreateViewConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return statement.type === 'create_view' && dialect === 'postgresql'; } convert(st: JsonCreatePgViewStatement) { const { definition, name: viewName, schema, with: withOption, materialized, withNoData, tablespace, using } = st; const name = schema ? `"${schema}"."${viewName}"` : `"${viewName}"`; let statement = materialized ? `CREATE MATERIALIZED VIEW ${name}` : `CREATE VIEW ${name}`; if (using) statement += ` USING "${using}"`; const options: string[] = []; if (withOption) { statement += ` WITH (`; Object.entries(withOption).forEach(([key, value]) => { if (typeof value === 'undefined') return; options.push(`${key.snake_case()} = ${value}`); }); statement += options.join(', '); statement += `)`; } if (tablespace) statement += ` TABLESPACE ${tablespace}`; statement += ` AS (${definition})`; if (withNoData) statement += ` WITH NO DATA`; statement += `;`; return statement; } } class MySqlCreateViewConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return statement.type === 'mysql_create_view' && dialect === 'mysql'; } convert(st: JsonCreateMySqlViewStatement) { const { definition, name, algorithm, sqlSecurity, withCheckOption, replace } = st; let statement = `CREATE `; statement += replace ? `OR REPLACE ` : ''; statement += algorithm ? `ALGORITHM = ${algorithm}\n` : ''; statement += sqlSecurity ? `SQL SECURITY ${sqlSecurity}\n` : ''; statement += `VIEW \`${name}\` AS (${definition})`; statement += withCheckOption ? `\nWITH ${withCheckOption} CHECK OPTION` : ''; statement += ';'; return statement; } } class SqliteCreateViewConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return statement.type === 'sqlite_create_view' && (dialect === 'sqlite' || dialect === 'turso'); } convert(st: JsonCreateSqliteViewStatement) { const { definition, name } = st; return `CREATE VIEW \`${name}\` AS ${definition};`; } } class PgDropViewConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return statement.type === 'drop_view' && dialect === 'postgresql'; } convert(st: JsonDropViewStatement) { const { name: viewName, schema, materialized } = st; const name = schema ? `"${schema}"."${viewName}"` : `"${viewName}"`; return `DROP${materialized ? ' MATERIALIZED' : ''} VIEW ${name};`; } } class MySqlDropViewConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return statement.type === 'drop_view' && dialect === 'mysql'; } convert(st: JsonDropViewStatement) { const { name } = st; return `DROP VIEW \`${name}\`;`; } } class SqliteDropViewConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return statement.type === 'drop_view' && (dialect === 'sqlite' || dialect === 'turso'); } convert(st: JsonDropViewStatement) { const { name } = st; return `DROP VIEW \`${name}\`;`; } } class MySqlAlterViewConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return statement.type === 'alter_mysql_view' && dialect === 'mysql'; } convert(st: JsonAlterMySqlViewStatement) { const { name, algorithm, definition, sqlSecurity, withCheckOption } = st; let statement = `ALTER `; statement += algorithm ? `ALGORITHM = ${algorithm}\n` : ''; statement += sqlSecurity ? `SQL SECURITY ${sqlSecurity}\n` : ''; statement += `VIEW \`${name}\` AS ${definition}`; statement += withCheckOption ? `\nWITH ${withCheckOption} CHECK OPTION` : ''; statement += ';'; return statement; } } class PgRenameViewConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return statement.type === 'rename_view' && dialect === 'postgresql'; } convert(st: JsonRenameViewStatement) { const { nameFrom: from, nameTo: to, schema, materialized } = st; const nameFrom = `"${schema}"."${from}"`; return `ALTER${materialized ? ' MATERIALIZED' : ''} VIEW ${nameFrom} RENAME TO "${to}";`; } } class MySqlRenameViewConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return statement.type === 'rename_view' && dialect === 'mysql'; } convert(st: JsonRenameViewStatement) { const { nameFrom: from, nameTo: to } = st; return `RENAME TABLE \`${from}\` TO \`${to}\`;`; } } class PgAlterViewSchemaConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return statement.type === 'alter_view_alter_schema' && dialect === 'postgresql'; } convert(st: JsonAlterViewAlterSchemaStatement) { const { fromSchema, toSchema, name, materialized } = st; const statement = `ALTER${ materialized ? ' MATERIALIZED' : '' } VIEW "${fromSchema}"."${name}" SET SCHEMA "${toSchema}";`; return statement; } } class PgAlterViewAddWithOptionConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return statement.type === 'alter_view_add_with_option' && dialect === 'postgresql'; } convert(st: JsonAlterViewAddWithOptionStatement) { const { schema, with: withOption, name, materialized } = st; let statement = `ALTER${materialized ? ' MATERIALIZED' : ''} VIEW "${schema}"."${name}" SET (`; const options: string[] = []; Object.entries(withOption).forEach(([key, value]) => { options.push(`${key.snake_case()} = ${value}`); }); statement += options.join(', '); statement += `);`; return statement; } } class PgAlterViewDropWithOptionConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return statement.type === 'alter_view_drop_with_option' && dialect === 'postgresql'; } convert(st: JsonAlterViewDropWithOptionStatement) { const { schema, name, materialized, with: withOptions } = st; let statement = `ALTER${materialized ? ' MATERIALIZED' : ''} VIEW "${schema}"."${name}" RESET (`; const options: string[] = []; Object.entries(withOptions).forEach(([key, value]) => { options.push(`${key.snake_case()}`); }); statement += options.join(', '); statement += ');'; return statement; } } class PgAlterViewAlterTablespaceConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return statement.type === 'alter_view_alter_tablespace' && dialect === 'postgresql'; } convert(st: JsonAlterViewAlterTablespaceStatement) { const { schema, name, toTablespace } = st; const statement = `ALTER MATERIALIZED VIEW "${schema}"."${name}" SET TABLESPACE ${toTablespace};`; return statement; } } class PgAlterViewAlterUsingConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return statement.type === 'alter_view_alter_using' && dialect === 'postgresql'; } convert(st: JsonAlterViewAlterUsingStatement) { const { schema, name, toUsing } = st; const statement = `ALTER MATERIALIZED VIEW "${schema}"."${name}" SET ACCESS METHOD "${toUsing}";`; return statement; } } class PgAlterTableAlterColumnSetGenerated extends Convertor { override can(statement: JsonStatement, dialect: Dialect): boolean { return ( statement.type === 'alter_table_alter_column_set_identity' && dialect === 'postgresql' ); } override convert( statement: JsonAlterColumnSetIdentityStatement, ): string | string[] { const { identity, tableName, columnName, schema } = statement; const tableNameWithSchema = schema ? `"${schema}"."${tableName}"` : `"${tableName}"`; const unsquashedIdentity = PgSquasher.unsquashIdentity(identity); const identityWithSchema = schema ? `"${schema}"."${unsquashedIdentity?.name}"` : `"${unsquashedIdentity?.name}"`; const identityStatement = unsquashedIdentity ? ` GENERATED ${ unsquashedIdentity.type === 'always' ? 'ALWAYS' : 'BY DEFAULT' } AS IDENTITY (sequence name ${identityWithSchema}${ unsquashedIdentity.increment ? ` INCREMENT BY ${unsquashedIdentity.increment}` : '' }${ unsquashedIdentity.minValue ? ` MINVALUE ${unsquashedIdentity.minValue}` : '' }${ unsquashedIdentity.maxValue ? ` MAXVALUE ${unsquashedIdentity.maxValue}` : '' }${ unsquashedIdentity.startWith ? ` START WITH ${unsquashedIdentity.startWith}` : '' }${unsquashedIdentity.cache ? ` CACHE ${unsquashedIdentity.cache}` : ''}${ unsquashedIdentity.cycle ? ` CYCLE` : '' })` : ''; return `ALTER TABLE ${tableNameWithSchema} ALTER COLUMN "${columnName}" ADD${identityStatement};`; } } class PgAlterTableAlterColumnDropGenerated extends Convertor { override can(statement: JsonStatement, dialect: Dialect): boolean { return ( statement.type === 'alter_table_alter_column_drop_identity' && dialect === 'postgresql' ); } override convert( statement: JsonAlterColumnDropIdentityStatement, ): string | string[] { const { tableName, columnName, schema } = statement; const tableNameWithSchema = schema ? `"${schema}"."${tableName}"` : `"${tableName}"`; return `ALTER TABLE ${tableNameWithSchema} ALTER COLUMN "${columnName}" DROP IDENTITY;`; } } class PgAlterTableAlterColumnAlterGenerated extends Convertor { override can(statement: JsonStatement, dialect: Dialect): boolean { return ( statement.type === 'alter_table_alter_column_change_identity' && dialect === 'postgresql' ); } override convert( statement: JsonAlterColumnAlterIdentityStatement, ): string | string[] { const { identity, oldIdentity, tableName, columnName, schema } = statement; const tableNameWithSchema = schema ? `"${schema}"."${tableName}"` : `"${tableName}"`; const unsquashedIdentity = PgSquasher.unsquashIdentity(identity); const unsquashedOldIdentity = PgSquasher.unsquashIdentity(oldIdentity); const statementsToReturn: string[] = []; if (unsquashedOldIdentity.type !== unsquashedIdentity.type) { statementsToReturn.push( `ALTER TABLE ${tableNameWithSchema} ALTER COLUMN "${columnName}" SET GENERATED ${ unsquashedIdentity.type === 'always' ? 'ALWAYS' : 'BY DEFAULT' };`, ); } if (unsquashedOldIdentity.minValue !== unsquashedIdentity.minValue) { statementsToReturn.push( `ALTER TABLE ${tableNameWithSchema} ALTER COLUMN "${columnName}" SET MINVALUE ${unsquashedIdentity.minValue};`, ); } if (unsquashedOldIdentity.maxValue !== unsquashedIdentity.maxValue) { statementsToReturn.push( `ALTER TABLE ${tableNameWithSchema} ALTER COLUMN "${columnName}" SET MAXVALUE ${unsquashedIdentity.maxValue};`, ); } if (unsquashedOldIdentity.increment !== unsquashedIdentity.increment) { statementsToReturn.push( `ALTER TABLE ${tableNameWithSchema} ALTER COLUMN "${columnName}" SET INCREMENT BY ${unsquashedIdentity.increment};`, ); } if (unsquashedOldIdentity.startWith !== unsquashedIdentity.startWith) { statementsToReturn.push( `ALTER TABLE ${tableNameWithSchema} ALTER COLUMN "${columnName}" SET START WITH ${unsquashedIdentity.startWith};`, ); } if (unsquashedOldIdentity.cache !== unsquashedIdentity.cache) { statementsToReturn.push( `ALTER TABLE ${tableNameWithSchema} ALTER COLUMN "${columnName}" SET CACHE ${unsquashedIdentity.cache};`, ); } if (unsquashedOldIdentity.cycle !== unsquashedIdentity.cycle) { statementsToReturn.push( `ALTER TABLE ${tableNameWithSchema} ALTER COLUMN "${columnName}" SET ${ unsquashedIdentity.cycle ? `CYCLE` : 'NO CYCLE' };`, ); } return statementsToReturn; } } class PgAlterTableAddUniqueConstraintConvertor extends Convertor { can(statement: JsonCreateUniqueConstraint, dialect: Dialect): boolean { return ( statement.type === 'create_unique_constraint' && dialect === 'postgresql' ); } convert(statement: JsonCreateUniqueConstraint): string { const unsquashed = PgSquasher.unsquashUnique(statement.data); const tableNameWithSchema = statement.schema ? `"${statement.schema}"."${statement.tableName}"` : `"${statement.tableName}"`; return `ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT "${unsquashed.name}" UNIQUE${ unsquashed.nullsNotDistinct ? ' NULLS NOT DISTINCT' : '' }("${unsquashed.columns.join('","')}");`; } } class PgAlterTableDropUniqueConstraintConvertor extends Convertor { can(statement: JsonDeleteUniqueConstraint, dialect: Dialect): boolean { return ( statement.type === 'delete_unique_constraint' && dialect === 'postgresql' ); } convert(statement: JsonDeleteUniqueConstraint): string { const unsquashed = PgSquasher.unsquashUnique(statement.data); const tableNameWithSchema = statement.schema ? `"${statement.schema}"."${statement.tableName}"` : `"${statement.tableName}"`; return `ALTER TABLE ${tableNameWithSchema} DROP CONSTRAINT "${unsquashed.name}";`; } } class PgAlterTableAddCheckConstraintConvertor extends Convertor { can(statement: JsonCreateCheckConstraint, dialect: Dialect): boolean { return ( statement.type === 'create_check_constraint' && dialect === 'postgresql' ); } convert(statement: JsonCreateCheckConstraint): string { const unsquashed = PgSquasher.unsquashCheck(statement.data); const tableNameWithSchema = statement.schema ? `"${statement.schema}"."${statement.tableName}"` : `"${statement.tableName}"`; return `ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT "${unsquashed.name}" CHECK (${unsquashed.value});`; } } class PgAlterTableDeleteCheckConstraintConvertor extends Convertor { can(statement: JsonDeleteCheckConstraint, dialect: Dialect): boolean { return ( statement.type === 'delete_check_constraint' && dialect === 'postgresql' ); } convert(statement: JsonDeleteCheckConstraint): string { const tableNameWithSchema = statement.schema ? `"${statement.schema}"."${statement.tableName}"` : `"${statement.tableName}"`; return `ALTER TABLE ${tableNameWithSchema} DROP CONSTRAINT "${statement.constraintName}";`; } } class MySQLAlterTableAddUniqueConstraintConvertor extends Convertor { can(statement: JsonCreateUniqueConstraint, dialect: Dialect): boolean { return statement.type === 'create_unique_constraint' && dialect === 'mysql'; } convert(statement: JsonCreateUniqueConstraint): string { const unsquashed = MySqlSquasher.unsquashUnique(statement.data); return `ALTER TABLE \`${statement.tableName}\` ADD CONSTRAINT \`${unsquashed.name}\` UNIQUE(\`${ unsquashed.columns.join('`,`') }\`);`; } } class MySQLAlterTableDropUniqueConstraintConvertor extends Convertor { can(statement: JsonDeleteUniqueConstraint, dialect: Dialect): boolean { return statement.type === 'delete_unique_constraint' && dialect === 'mysql'; } convert(statement: JsonDeleteUniqueConstraint): string { const unsquashed = MySqlSquasher.unsquashUnique(statement.data); return `ALTER TABLE \`${statement.tableName}\` DROP INDEX \`${unsquashed.name}\`;`; } } class MySqlAlterTableAddCheckConstraintConvertor extends Convertor { can(statement: JsonCreateCheckConstraint, dialect: Dialect): boolean { return ( statement.type === 'create_check_constraint' && dialect === 'mysql' ); } convert(statement: JsonCreateCheckConstraint): string { const unsquashed = MySqlSquasher.unsquashCheck(statement.data); const { tableName } = statement; return `ALTER TABLE \`${tableName}\` ADD CONSTRAINT \`${unsquashed.name}\` CHECK (${unsquashed.value});`; } } class SingleStoreAlterTableAddUniqueConstraintConvertor extends Convertor { can(statement: JsonCreateUniqueConstraint, dialect: Dialect): boolean { return statement.type === 'create_unique_constraint' && dialect === 'singlestore'; } convert(statement: JsonCreateUniqueConstraint): string { const unsquashed = SingleStoreSquasher.unsquashUnique(statement.data); return `ALTER TABLE \`${statement.tableName}\` ADD CONSTRAINT \`${unsquashed.name}\` UNIQUE(\`${ unsquashed.columns.join('`,`') }\`);`; } } class SingleStoreAlterTableDropUniqueConstraintConvertor extends Convertor { can(statement: JsonDeleteUniqueConstraint, dialect: Dialect): boolean { return statement.type === 'delete_unique_constraint' && dialect === 'singlestore'; } convert(statement: JsonDeleteUniqueConstraint): string { const unsquashed = SingleStoreSquasher.unsquashUnique(statement.data); return `ALTER TABLE \`${statement.tableName}\` DROP INDEX \`${unsquashed.name}\`;`; } } class MySqlAlterTableDeleteCheckConstraintConvertor extends Convertor { can(statement: JsonDeleteCheckConstraint, dialect: Dialect): boolean { return ( statement.type === 'delete_check_constraint' && dialect === 'mysql' ); } convert(statement: JsonDeleteCheckConstraint): string { const { tableName } = statement; return `ALTER TABLE \`${tableName}\` DROP CONSTRAINT \`${statement.constraintName}\`;`; } } class CreatePgSequenceConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return statement.type === 'create_sequence' && dialect === 'postgresql'; } convert(st: JsonCreateSequenceStatement) { const { name, values, schema } = st; const sequenceWithSchema = schema ? `"${schema}"."${name}"` : `"${name}"`; return `CREATE SEQUENCE ${sequenceWithSchema}${values.increment ? ` INCREMENT BY ${values.increment}` : ''}${ values.minValue ? ` MINVALUE ${values.minValue}` : '' }${values.maxValue ? ` MAXVALUE ${values.maxValue}` : ''}${ values.startWith ? ` START WITH ${values.startWith}` : '' }${values.cache ? ` CACHE ${values.cache}` : ''}${values.cycle ? ` CYCLE` : ''};`; } } class DropPgSequenceConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return statement.type === 'drop_sequence' && dialect === 'postgresql'; } convert(st: JsonDropSequenceStatement) { const { name, schema } = st; const sequenceWithSchema = schema ? `"${schema}"."${name}"` : `"${name}"`; return `DROP SEQUENCE ${sequenceWithSchema};`; } } class RenamePgSequenceConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return statement.type === 'rename_sequence' && dialect === 'postgresql'; } convert(st: JsonRenameSequenceStatement) { const { nameFrom, nameTo, schema } = st; const sequenceWithSchemaFrom = schema ? `"${schema}"."${nameFrom}"` : `"${nameFrom}"`; const sequenceWithSchemaTo = schema ? `"${schema}"."${nameTo}"` : `"${nameTo}"`; return `ALTER SEQUENCE ${sequenceWithSchemaFrom} RENAME TO "${nameTo}";`; } } class MovePgSequenceConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return statement.type === 'move_sequence' && dialect === 'postgresql'; } convert(st: JsonMoveSequenceStatement) { const { schemaFrom, schemaTo, name } = st; const sequenceWithSchema = schemaFrom ? `"${schemaFrom}"."${name}"` : `"${name}"`; const seqSchemaTo = schemaTo ? `"${schemaTo}"` : `public`; return `ALTER SEQUENCE ${sequenceWithSchema} SET SCHEMA ${seqSchemaTo};`; } } class AlterPgSequenceConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return statement.type === 'alter_sequence' && dialect === 'postgresql'; } convert(st: JsonAlterSequenceStatement) { const { name, schema, values } = st; const { increment, minValue, maxValue, startWith, cache, cycle } = values; const sequenceWithSchema = schema ? `"${schema}"."${name}"` : `"${name}"`; return `ALTER SEQUENCE ${sequenceWithSchema}${increment ? ` INCREMENT BY ${increment}` : ''}${ minValue ? ` MINVALUE ${minValue}` : '' }${maxValue ? ` MAXVALUE ${maxValue}` : ''}${startWith ? ` START WITH ${startWith}` : ''}${ cache ? ` CACHE ${cache}` : '' }${cycle ? ` CYCLE` : ''};`; } } class CreateTypeEnumConvertor extends Convertor { can(statement: JsonStatement): boolean { return statement.type === 'create_type_enum'; } convert(st: JsonCreateEnumStatement) { const { name, values, schema } = st; const enumNameWithSchema = schema ? `"${schema}"."${name}"` : `"${name}"`; let valuesStatement = '('; valuesStatement += values.map((it) => `'${escapeSingleQuotes(it)}'`).join(', '); valuesStatement += ')'; // TODO do we need this? // let statement = 'DO $$ BEGIN'; // statement += '\n'; let statement = `CREATE TYPE ${enumNameWithSchema} AS ENUM${valuesStatement};`; // statement += '\n'; // statement += 'EXCEPTION'; // statement += '\n'; // statement += ' WHEN duplicate_object THEN null;'; // statement += '\n'; // statement += 'END $$;'; // statement += '\n'; return statement; } } class DropTypeEnumConvertor extends Convertor { can(statement: JsonStatement): boolean { return statement.type === 'drop_type_enum'; } convert(st: JsonDropEnumStatement) { const { name, schema } = st; const enumNameWithSchema = schema ? `"${schema}"."${name}"` : `"${name}"`; let statement = `DROP TYPE ${enumNameWithSchema};`; return statement; } } class AlterTypeAddValueConvertor extends Convertor { can(statement: JsonStatement): boolean { return statement.type === 'alter_type_add_value'; } convert(st: JsonAddValueToEnumStatement) { const { name, schema, value, before } = st; const enumNameWithSchema = schema ? `"${schema}"."${name}"` : `"${name}"`; return `ALTER TYPE ${enumNameWithSchema} ADD VALUE '${value}'${before.length ? ` BEFORE '${before}'` : ''};`; } } class AlterTypeSetSchemaConvertor extends Convertor { can(statement: JsonStatement): boolean { return statement.type === 'move_type_enum'; } convert(st: JsonMoveEnumStatement) { const { name, schemaFrom, schemaTo } = st; const enumNameWithSchema = schemaFrom ? `"${schemaFrom}"."${name}"` : `"${name}"`; return `ALTER TYPE ${enumNameWithSchema} SET SCHEMA "${schemaTo}";`; } } class AlterRenameTypeConvertor extends Convertor { can(statement: JsonStatement): boolean { return statement.type === 'rename_type_enum'; } convert(st: JsonRenameEnumStatement) { const { nameTo, nameFrom, schema } = st; const enumNameWithSchema = schema ? `"${schema}"."${nameFrom}"` : `"${nameFrom}"`; return `ALTER TYPE ${enumNameWithSchema} RENAME TO "${nameTo}";`; } } class AlterTypeDropValueConvertor extends Convertor { can(statement: JsonDropValueFromEnumStatement): boolean { return statement.type === 'alter_type_drop_value'; } convert(st: JsonDropValueFromEnumStatement) { const { columnsWithEnum, name, newValues, enumSchema } = st; const statements: string[] = []; for (const withEnum of columnsWithEnum) { const tableNameWithSchema = withEnum.tableSchema ? `"${withEnum.tableSchema}"."${withEnum.table}"` : `"${withEnum.table}"`; statements.push( `ALTER TABLE ${tableNameWithSchema} ALTER COLUMN "${withEnum.column}" SET DATA TYPE text;`, ); if (withEnum.default) { statements.push( `ALTER TABLE ${tableNameWithSchema} ALTER COLUMN "${withEnum.column}" SET DEFAULT ${withEnum.default}::text;`, ); } } statements.push(new DropTypeEnumConvertor().convert({ name: name, schema: enumSchema, type: 'drop_type_enum' })); statements.push(new CreateTypeEnumConvertor().convert({ name: name, schema: enumSchema, values: newValues, type: 'create_type_enum', })); for (const withEnum of columnsWithEnum) { const tableNameWithSchema = withEnum.tableSchema ? `"${withEnum.tableSchema}"."${withEnum.table}"` : `"${withEnum.table}"`; const parsedType = parseType(`"${enumSchema}".`, withEnum.columnType); if (withEnum.default) { statements.push( `ALTER TABLE ${tableNameWithSchema} ALTER COLUMN "${withEnum.column}" SET DEFAULT ${withEnum.default}::${parsedType};`, ); } statements.push( `ALTER TABLE ${tableNameWithSchema} ALTER COLUMN "${withEnum.column}" SET DATA TYPE ${parsedType} USING "${withEnum.column}"::${parsedType};`, ); } return statements; } } class PgDropTableConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return statement.type === 'drop_table' && dialect === 'postgresql'; } convert(statement: JsonDropTableStatement, _d: any, action?: string) { const { tableName, schema, policies } = statement; const tableNameWithSchema = schema ? `"${schema}"."${tableName}"` : `"${tableName}"`; const dropPolicyConvertor = new PgDropPolicyConvertor(); const droppedPolicies = policies?.map((p) => { return dropPolicyConvertor.convert({ type: 'drop_policy', tableName, data: action === 'push' ? PgSquasher.unsquashPolicyPush(p) : PgSquasher.unsquashPolicy(p), schema, }) as string; }) ?? []; return [ ...droppedPolicies, `DROP TABLE ${tableNameWithSchema} CASCADE;`, ]; } } class MySQLDropTableConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return statement.type === 'drop_table' && dialect === 'mysql'; } convert(statement: JsonDropTableStatement) { const { tableName } = statement; return `DROP TABLE \`${tableName}\`;`; } } export class SingleStoreDropTableConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return statement.type === 'drop_table' && dialect === 'singlestore'; } convert(statement: JsonDropTableStatement) { const { tableName } = statement; return `DROP TABLE \`${tableName}\`;`; } } export class SQLiteDropTableConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return statement.type === 'drop_table' && (dialect === 'sqlite' || dialect === 'turso'); } convert(statement: JsonDropTableStatement) { const { tableName } = statement; return `DROP TABLE \`${tableName}\`;`; } } class PgRenameTableConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return statement.type === 'rename_table' && dialect === 'postgresql'; } convert(statement: JsonRenameTableStatement) { const { tableNameFrom, tableNameTo, toSchema, fromSchema } = statement; const from = fromSchema ? `"${fromSchema}"."${tableNameFrom}"` : `"${tableNameFrom}"`; const to = `"${tableNameTo}"`; return `ALTER TABLE ${from} RENAME TO ${to};`; } } export class SqliteRenameTableConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return statement.type === 'rename_table' && (dialect === 'sqlite' || dialect === 'turso'); } convert(statement: JsonRenameTableStatement) { const { tableNameFrom, tableNameTo } = statement; return `ALTER TABLE \`${tableNameFrom}\` RENAME TO \`${tableNameTo}\`;`; } } class MySqlRenameTableConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return statement.type === 'rename_table' && dialect === 'mysql'; } convert(statement: JsonRenameTableStatement) { const { tableNameFrom, tableNameTo } = statement; return `RENAME TABLE \`${tableNameFrom}\` TO \`${tableNameTo}\`;`; } } export class SingleStoreRenameTableConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return statement.type === 'rename_table' && dialect === 'singlestore'; } convert(statement: JsonRenameTableStatement) { const { tableNameFrom, tableNameTo } = statement; return `ALTER TABLE \`${tableNameFrom}\` RENAME TO \`${tableNameTo}\`;`; } } class PgAlterTableRenameColumnConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return ( statement.type === 'alter_table_rename_column' && dialect === 'postgresql' ); } convert(statement: JsonRenameColumnStatement) { const { tableName, oldColumnName, newColumnName, schema } = statement; const tableNameWithSchema = schema ? `"${schema}"."${tableName}"` : `"${tableName}"`; return `ALTER TABLE ${tableNameWithSchema} RENAME COLUMN "${oldColumnName}" TO "${newColumnName}";`; } } class MySqlAlterTableRenameColumnConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return ( statement.type === 'alter_table_rename_column' && dialect === 'mysql' ); } convert(statement: JsonRenameColumnStatement) { const { tableName, oldColumnName, newColumnName } = statement; return `ALTER TABLE \`${tableName}\` RENAME COLUMN \`${oldColumnName}\` TO \`${newColumnName}\`;`; } } class SingleStoreAlterTableRenameColumnConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return ( statement.type === 'alter_table_rename_column' && dialect === 'singlestore' ); } convert(statement: JsonRenameColumnStatement) { const { tableName, oldColumnName, newColumnName } = statement; return `ALTER TABLE \`${tableName}\` CHANGE \`${oldColumnName}\` \`${newColumnName}\`;`; } } class SQLiteAlterTableRenameColumnConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return ( statement.type === 'alter_table_rename_column' && (dialect === 'sqlite' || dialect === 'turso') ); } convert(statement: JsonRenameColumnStatement) { const { tableName, oldColumnName, newColumnName } = statement; return `ALTER TABLE \`${tableName}\` RENAME COLUMN "${oldColumnName}" TO "${newColumnName}";`; } } class PgAlterTableDropColumnConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return ( statement.type === 'alter_table_drop_column' && dialect === 'postgresql' ); } convert(statement: JsonDropColumnStatement) { const { tableName, columnName, schema } = statement; const tableNameWithSchema = schema ? `"${schema}"."${tableName}"` : `"${tableName}"`; return `ALTER TABLE ${tableNameWithSchema} DROP COLUMN "${columnName}";`; } } class MySqlAlterTableDropColumnConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return statement.type === 'alter_table_drop_column' && dialect === 'mysql'; } convert(statement: JsonDropColumnStatement) { const { tableName, columnName } = statement; return `ALTER TABLE \`${tableName}\` DROP COLUMN \`${columnName}\`;`; } } class SingleStoreAlterTableDropColumnConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return statement.type === 'alter_table_drop_column' && dialect === 'singlestore'; } convert(statement: JsonDropColumnStatement) { const { tableName, columnName } = statement; return `ALTER TABLE \`${tableName}\` DROP COLUMN \`${columnName}\`;`; } } class SQLiteAlterTableDropColumnConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return statement.type === 'alter_table_drop_column' && (dialect === 'sqlite' || dialect === 'turso'); } convert(statement: JsonDropColumnStatement) { const { tableName, columnName } = statement; return `ALTER TABLE \`${tableName}\` DROP COLUMN \`${columnName}\`;`; } } class PgAlterTableAddColumnConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return ( statement.type === 'alter_table_add_column' && dialect === 'postgresql' ); } convert(statement: JsonAddColumnStatement) { const { tableName, column, schema } = statement; const { name, type, notNull, generated, primaryKey, identity } = column; const primaryKeyStatement = primaryKey ? ' PRIMARY KEY' : ''; const tableNameWithSchema = schema ? `"${schema}"."${tableName}"` : `"${tableName}"`; const defaultStatement = `${column.default !== undefined ? ` DEFAULT ${column.default}` : ''}`; const schemaPrefix = column.typeSchema && column.typeSchema !== 'public' ? `"${column.typeSchema}".` : ''; const fixedType = parseType(schemaPrefix, column.type); const notNullStatement = `${notNull ? ' NOT NULL' : ''}`; const unsquashedIdentity = identity ? PgSquasher.unsquashIdentity(identity) : undefined; const identityWithSchema = schema ? `"${schema}"."${unsquashedIdentity?.name}"` : `"${unsquashedIdentity?.name}"`; const identityStatement = unsquashedIdentity ? ` GENERATED ${ unsquashedIdentity.type === 'always' ? 'ALWAYS' : 'BY DEFAULT' } AS IDENTITY (sequence name ${identityWithSchema}${ unsquashedIdentity.increment ? ` INCREMENT BY ${unsquashedIdentity.increment}` : '' }${ unsquashedIdentity.minValue ? ` MINVALUE ${unsquashedIdentity.minValue}` : '' }${ unsquashedIdentity.maxValue ? ` MAXVALUE ${unsquashedIdentity.maxValue}` : '' }${ unsquashedIdentity.startWith ? ` START WITH ${unsquashedIdentity.startWith}` : '' }${unsquashedIdentity.cache ? ` CACHE ${unsquashedIdentity.cache}` : ''}${ unsquashedIdentity.cycle ? ` CYCLE` : '' })` : ''; const generatedStatement = generated ? ` GENERATED ALWAYS AS (${generated?.as}) STORED` : ''; return `ALTER TABLE ${tableNameWithSchema} ADD COLUMN "${name}" ${fixedType}${primaryKeyStatement}${defaultStatement}${generatedStatement}${notNullStatement}${identityStatement};`; } } class MySqlAlterTableAddColumnConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return statement.type === 'alter_table_add_column' && dialect === 'mysql'; } convert(statement: JsonAddColumnStatement) { const { tableName, column } = statement; const { name, type, notNull, primaryKey, autoincrement, onUpdate, generated, } = column; const defaultStatement = `${column.default !== undefined ? ` DEFAULT ${column.default}` : ''}`; const notNullStatement = `${notNull ? ' NOT NULL' : ''}`; const primaryKeyStatement = `${primaryKey ? ' PRIMARY KEY' : ''}`; const autoincrementStatement = `${autoincrement ? ' AUTO_INCREMENT' : ''}`; const onUpdateStatement = `${onUpdate ? ' ON UPDATE CURRENT_TIMESTAMP' : ''}`; const generatedStatement = generated ? ` GENERATED ALWAYS AS (${generated?.as}) ${generated?.type.toUpperCase()}` : ''; return `ALTER TABLE \`${tableName}\` ADD \`${name}\` ${type}${primaryKeyStatement}${autoincrementStatement}${defaultStatement}${generatedStatement}${notNullStatement}${onUpdateStatement};`; } } class SingleStoreAlterTableAddColumnConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return statement.type === 'alter_table_add_column' && dialect === 'singlestore'; } convert(statement: JsonAddColumnStatement) { const { tableName, column } = statement; const { name, type, notNull, primaryKey, autoincrement, onUpdate, generated, } = column; const defaultStatement = `${column.default !== undefined ? ` DEFAULT ${column.default}` : ''}`; const notNullStatement = `${notNull ? ' NOT NULL' : ''}`; const primaryKeyStatement = `${primaryKey ? ' PRIMARY KEY' : ''}`; const autoincrementStatement = `${autoincrement ? ' AUTO_INCREMENT' : ''}`; const onUpdateStatement = `${onUpdate ? ' ON UPDATE CURRENT_TIMESTAMP' : ''}`; const generatedStatement = generated ? ` GENERATED ALWAYS AS (${generated?.as}) ${generated?.type.toUpperCase()}` : ''; return `ALTER TABLE \`${tableName}\` ADD \`${name}\` ${type}${primaryKeyStatement}${autoincrementStatement}${defaultStatement}${notNullStatement}${onUpdateStatement}${generatedStatement};`; } } export class SQLiteAlterTableAddColumnConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return ( statement.type === 'sqlite_alter_table_add_column' && (dialect === 'sqlite' || dialect === 'turso') ); } convert(statement: JsonSqliteAddColumnStatement) { const { tableName, column, referenceData } = statement; const { name, type, notNull, primaryKey, generated } = column; const defaultStatement = `${column.default !== undefined ? ` DEFAULT ${column.default}` : ''}`; const notNullStatement = `${notNull ? ' NOT NULL' : ''}`; const primaryKeyStatement = `${primaryKey ? ' PRIMARY KEY' : ''}`; const referenceAsObject = referenceData ? SQLiteSquasher.unsquashFK(referenceData) : undefined; const referenceStatement = `${ referenceAsObject ? ` REFERENCES ${referenceAsObject.tableTo}(${referenceAsObject.columnsTo})` : '' }`; // const autoincrementStatement = `${autoincrement ? 'AUTO_INCREMENT' : ''}` const generatedStatement = generated ? ` GENERATED ALWAYS AS ${generated.as} ${generated.type.toUpperCase()}` : ''; return `ALTER TABLE \`${tableName}\` ADD \`${name}\` ${type}${primaryKeyStatement}${defaultStatement}${generatedStatement}${notNullStatement}${referenceStatement};`; } } class PgAlterTableAlterColumnSetTypeConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return ( statement.type === 'pg_alter_table_alter_column_set_type' && dialect === 'postgresql' ); } convert(statement: JsonAlterColumnPgTypeStatement) { const { tableName, columnName, newDataType, schema, oldDataType, columnDefault, typeSchema } = statement; const tableNameWithSchema = schema ? `"${schema}"."${tableName}"` : `"${tableName}"`; const statements: string[] = []; const type = parseType(`"${typeSchema}".`, newDataType.name); if (!oldDataType.isEnum && !newDataType.isEnum) { statements.push( `ALTER TABLE ${tableNameWithSchema} ALTER COLUMN "${columnName}" SET DATA TYPE ${type};`, ); if (columnDefault) { statements.push( `ALTER TABLE ${tableNameWithSchema} ALTER COLUMN "${columnName}" SET DEFAULT ${columnDefault};`, ); } } if (oldDataType.isEnum && !newDataType.isEnum) { statements.push( `ALTER TABLE ${tableNameWithSchema} ALTER COLUMN "${columnName}" SET DATA TYPE ${type};`, ); if (columnDefault) { statements.push( `ALTER TABLE ${tableNameWithSchema} ALTER COLUMN "${columnName}" SET DEFAULT ${columnDefault};`, ); } } if (!oldDataType.isEnum && newDataType.isEnum) { if (columnDefault) { statements.push( `ALTER TABLE ${tableNameWithSchema} ALTER COLUMN "${columnName}" SET DEFAULT ${columnDefault}::${type};`, ); } statements.push( `ALTER TABLE ${tableNameWithSchema} ALTER COLUMN "${columnName}" SET DATA TYPE ${type} USING "${columnName}"::${type};`, ); } if (oldDataType.isEnum && newDataType.isEnum) { const alterType = `ALTER TABLE ${tableNameWithSchema} ALTER COLUMN "${columnName}" SET DATA TYPE ${type} USING "${columnName}"::text::${type};`; if (newDataType.name !== oldDataType.name && columnDefault) { statements.push( `ALTER TABLE ${tableNameWithSchema} ALTER COLUMN "${columnName}" DROP DEFAULT;`, alterType, `ALTER TABLE ${tableNameWithSchema} ALTER COLUMN "${columnName}" SET DEFAULT ${columnDefault};`, ); } else { statements.push(alterType); } } return statements; } } class PgAlterTableAlterColumnSetDefaultConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return ( statement.type === 'alter_table_alter_column_set_default' && dialect === 'postgresql' ); } convert(statement: JsonAlterColumnSetDefaultStatement) { const { tableName, columnName, schema } = statement; const tableNameWithSchema = schema ? `"${schema}"."${tableName}"` : `"${tableName}"`; return `ALTER TABLE ${tableNameWithSchema} ALTER COLUMN "${columnName}" SET DEFAULT ${statement.newDefaultValue};`; } } class PgAlterTableAlterColumnDropDefaultConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return ( statement.type === 'alter_table_alter_column_drop_default' && dialect === 'postgresql' ); } convert(statement: JsonAlterColumnDropDefaultStatement) { const { tableName, columnName, schema } = statement; const tableNameWithSchema = schema ? `"${schema}"."${tableName}"` : `"${tableName}"`; return `ALTER TABLE ${tableNameWithSchema} ALTER COLUMN "${columnName}" DROP DEFAULT;`; } } class PgAlterTableAlterColumnDropGeneratedConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return ( statement.type === 'alter_table_alter_column_drop_generated' && dialect === 'postgresql' ); } convert(statement: JsonAlterColumnDropGeneratedStatement) { const { tableName, columnName, schema } = statement; const tableNameWithSchema = schema ? `"${schema}"."${tableName}"` : `"${tableName}"`; return `ALTER TABLE ${tableNameWithSchema} ALTER COLUMN "${columnName}" DROP EXPRESSION;`; } } class PgAlterTableAlterColumnSetExpressionConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return ( statement.type === 'alter_table_alter_column_set_generated' && dialect === 'postgresql' ); } convert(statement: JsonAlterColumnSetGeneratedStatement) { const { tableName, columnName, schema, columnNotNull: notNull, columnDefault, columnOnUpdate, columnAutoIncrement, columnPk, columnGenerated, } = statement; const tableNameWithSchema = schema ? `"${schema}"."${tableName}"` : `"${tableName}"`; const addColumnStatement = new PgAlterTableAddColumnConvertor().convert({ schema, tableName, column: { name: columnName, type: statement.newDataType, notNull, default: columnDefault, onUpdate: columnOnUpdate, autoincrement: columnAutoIncrement, primaryKey: columnPk, generated: columnGenerated, }, type: 'alter_table_add_column', }); return [ `ALTER TABLE ${tableNameWithSchema} drop column "${columnName}";`, addColumnStatement, ]; } } class PgAlterTableAlterColumnAlterrGeneratedConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return ( statement.type === 'alter_table_alter_column_alter_generated' && dialect === 'postgresql' ); } convert(statement: JsonAlterColumnAlterGeneratedStatement) { const { tableName, columnName, schema, columnNotNull: notNull, columnDefault, columnOnUpdate, columnAutoIncrement, columnPk, columnGenerated, } = statement; const tableNameWithSchema = schema ? `"${schema}"."${tableName}"` : `"${tableName}"`; const addColumnStatement = new PgAlterTableAddColumnConvertor().convert({ schema, tableName, column: { name: columnName, type: statement.newDataType, notNull, default: columnDefault, onUpdate: columnOnUpdate, autoincrement: columnAutoIncrement, primaryKey: columnPk, generated: columnGenerated, }, type: 'alter_table_add_column', }); return [ `ALTER TABLE ${tableNameWithSchema} drop column "${columnName}";`, addColumnStatement, ]; } } //// class SqliteAlterTableAlterColumnDropGeneratedConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return ( statement.type === 'alter_table_alter_column_drop_generated' && (dialect === 'sqlite' || dialect === 'turso') ); } convert(statement: JsonAlterColumnDropGeneratedStatement) { const { tableName, columnName, schema, columnDefault, columnOnUpdate, columnAutoIncrement, columnPk, columnGenerated, columnNotNull, } = statement; const addColumnStatement = new SQLiteAlterTableAddColumnConvertor().convert( { tableName, column: { name: columnName, type: statement.newDataType, notNull: columnNotNull, default: columnDefault, onUpdate: columnOnUpdate, autoincrement: columnAutoIncrement, primaryKey: columnPk, generated: columnGenerated, }, type: 'sqlite_alter_table_add_column', }, ); const dropColumnStatement = new SQLiteAlterTableDropColumnConvertor().convert({ tableName, columnName, schema, type: 'alter_table_drop_column', }); return [dropColumnStatement, addColumnStatement]; } } class SqliteAlterTableAlterColumnSetExpressionConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return ( statement.type === 'alter_table_alter_column_set_generated' && (dialect === 'sqlite' || dialect === 'turso') ); } convert(statement: JsonAlterColumnSetGeneratedStatement) { const { tableName, columnName, schema, columnNotNull: notNull, columnDefault, columnOnUpdate, columnAutoIncrement, columnPk, columnGenerated, } = statement; const addColumnStatement = new SQLiteAlterTableAddColumnConvertor().convert( { tableName, column: { name: columnName, type: statement.newDataType, notNull, default: columnDefault, onUpdate: columnOnUpdate, autoincrement: columnAutoIncrement, primaryKey: columnPk, generated: columnGenerated, }, type: 'sqlite_alter_table_add_column', }, ); const dropColumnStatement = new SQLiteAlterTableDropColumnConvertor().convert({ tableName, columnName, schema, type: 'alter_table_drop_column', }); return [dropColumnStatement, addColumnStatement]; } } class SqliteAlterTableAlterColumnAlterGeneratedConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return ( statement.type === 'alter_table_alter_column_alter_generated' && (dialect === 'sqlite' || dialect === 'turso') ); } convert(statement: JsonAlterColumnAlterGeneratedStatement) { const { tableName, columnName, schema, columnNotNull, columnDefault, columnOnUpdate, columnAutoIncrement, columnPk, columnGenerated, } = statement; const addColumnStatement = new SQLiteAlterTableAddColumnConvertor().convert( { tableName, column: { name: columnName, type: statement.newDataType, notNull: columnNotNull, default: columnDefault, onUpdate: columnOnUpdate, autoincrement: columnAutoIncrement, primaryKey: columnPk, generated: columnGenerated, }, type: 'sqlite_alter_table_add_column', }, ); const dropColumnStatement = new SQLiteAlterTableDropColumnConvertor().convert({ tableName, columnName, schema, type: 'alter_table_drop_column', }); return [dropColumnStatement, addColumnStatement]; } } //// class MySqlAlterTableAlterColumnAlterrGeneratedConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return ( statement.type === 'alter_table_alter_column_alter_generated' && dialect === 'mysql' ); } convert(statement: JsonAlterColumnAlterGeneratedStatement) { const { tableName, columnName, schema, columnNotNull: notNull, columnDefault, columnOnUpdate, columnAutoIncrement, columnPk, columnGenerated, } = statement; const tableNameWithSchema = schema ? `\`${schema}\`.\`${tableName}\`` : `\`${tableName}\``; const addColumnStatement = new MySqlAlterTableAddColumnConvertor().convert({ schema, tableName, column: { name: columnName, type: statement.newDataType, notNull, default: columnDefault, onUpdate: columnOnUpdate, autoincrement: columnAutoIncrement, primaryKey: columnPk, generated: columnGenerated, }, type: 'alter_table_add_column', }); return [ `ALTER TABLE ${tableNameWithSchema} drop column \`${columnName}\`;`, addColumnStatement, ]; } } class MySqlAlterTableAlterColumnSetDefaultConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return ( statement.type === 'alter_table_alter_column_set_default' && dialect === 'mysql' ); } convert(statement: JsonAlterColumnSetDefaultStatement) { const { tableName, columnName } = statement; return `ALTER TABLE \`${tableName}\` ALTER COLUMN \`${columnName}\` SET DEFAULT ${statement.newDefaultValue};`; } } class MySqlAlterTableAlterColumnDropDefaultConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return ( statement.type === 'alter_table_alter_column_drop_default' && dialect === 'mysql' ); } convert(statement: JsonAlterColumnDropDefaultStatement) { const { tableName, columnName } = statement; return `ALTER TABLE \`${tableName}\` ALTER COLUMN \`${columnName}\` DROP DEFAULT;`; } } class MySqlAlterTableAddPk extends Convertor { can(statement: JsonStatement, dialect: string): boolean { return ( statement.type === 'alter_table_alter_column_set_pk' && dialect === 'mysql' ); } convert(statement: JsonAlterColumnSetPrimaryKeyStatement): string { return `ALTER TABLE \`${statement.tableName}\` ADD PRIMARY KEY (\`${statement.columnName}\`);`; } } class MySqlAlterTableDropPk extends Convertor { can(statement: JsonStatement, dialect: string): boolean { return ( statement.type === 'alter_table_alter_column_drop_pk' && dialect === 'mysql' ); } convert(statement: JsonAlterColumnDropPrimaryKeyStatement): string { return `ALTER TABLE \`${statement.tableName}\` DROP PRIMARY KEY`; } } type LibSQLModifyColumnStatement = | JsonAlterColumnTypeStatement | JsonAlterColumnDropNotNullStatement | JsonAlterColumnSetNotNullStatement | JsonAlterColumnSetDefaultStatement | JsonAlterColumnDropDefaultStatement; export class LibSQLModifyColumn extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return ( (statement.type === 'alter_table_alter_column_set_type' || statement.type === 'alter_table_alter_column_drop_notnull' || statement.type === 'alter_table_alter_column_set_notnull' || statement.type === 'alter_table_alter_column_set_default' || statement.type === 'alter_table_alter_column_drop_default' || statement.type === 'create_check_constraint' || statement.type === 'delete_check_constraint') && dialect === 'turso' ); } convert(statement: LibSQLModifyColumnStatement, json2: SQLiteSchemaSquashed) { const { tableName, columnName } = statement; let columnType = ``; let columnDefault: any = ''; let columnNotNull = ''; const sqlStatements: string[] = []; // collect index info const indexes: { name: string; tableName: string; columns: string[]; isUnique: boolean; where?: string | undefined; }[] = []; for (const table of Object.values(json2.tables)) { for (const index of Object.values(table.indexes)) { const unsquashed = SQLiteSquasher.unsquashIdx(index); sqlStatements.push(`DROP INDEX "${unsquashed.name}";`); indexes.push({ ...unsquashed, tableName: table.name }); } } switch (statement.type) { case 'alter_table_alter_column_set_type': columnType = ` ${statement.newDataType}`; columnDefault = statement.columnDefault ? ` DEFAULT ${statement.columnDefault}` : ''; columnNotNull = statement.columnNotNull ? ` NOT NULL` : ''; break; case 'alter_table_alter_column_drop_notnull': columnType = ` ${statement.newDataType}`; columnDefault = statement.columnDefault ? ` DEFAULT ${statement.columnDefault}` : ''; columnNotNull = ''; break; case 'alter_table_alter_column_set_notnull': columnType = ` ${statement.newDataType}`; columnDefault = statement.columnDefault ? ` DEFAULT ${statement.columnDefault}` : ''; columnNotNull = ` NOT NULL`; break; case 'alter_table_alter_column_set_default': columnType = ` ${statement.newDataType}`; columnDefault = ` DEFAULT ${statement.newDefaultValue}`; columnNotNull = statement.columnNotNull ? ` NOT NULL` : ''; break; case 'alter_table_alter_column_drop_default': columnType = ` ${statement.newDataType}`; columnDefault = ''; columnNotNull = statement.columnNotNull ? ` NOT NULL` : ''; break; } // Seems like getting value from simple json2 shanpshot makes dates be dates columnDefault = columnDefault instanceof Date ? columnDefault.toISOString() : columnDefault; sqlStatements.push( `ALTER TABLE \`${tableName}\` ALTER COLUMN "${columnName}" TO "${columnName}"${columnType}${columnNotNull}${columnDefault};`, ); for (const index of indexes) { const indexPart = index.isUnique ? 'UNIQUE INDEX' : 'INDEX'; const whereStatement = index.where ? ` WHERE ${index.where}` : ''; const uniqueString = index.columns.map((it) => `\`${it}\``).join(','); const tableName = index.tableName; sqlStatements.push( `CREATE ${indexPart} \`${index.name}\` ON \`${tableName}\` (${uniqueString})${whereStatement};`, ); } return sqlStatements; } } type MySqlModifyColumnStatement = | JsonAlterColumnDropNotNullStatement | JsonAlterColumnSetNotNullStatement | JsonAlterColumnTypeStatement | JsonAlterColumnDropOnUpdateStatement | JsonAlterColumnSetOnUpdateStatement | JsonAlterColumnDropAutoincrementStatement | JsonAlterColumnSetAutoincrementStatement | JsonAlterColumnSetDefaultStatement | JsonAlterColumnDropDefaultStatement | JsonAlterColumnSetGeneratedStatement | JsonAlterColumnDropGeneratedStatement; class MySqlModifyColumn extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return ( (statement.type === 'alter_table_alter_column_set_type' || statement.type === 'alter_table_alter_column_set_notnull' || statement.type === 'alter_table_alter_column_drop_notnull' || statement.type === 'alter_table_alter_column_drop_on_update' || statement.type === 'alter_table_alter_column_set_on_update' || statement.type === 'alter_table_alter_column_set_autoincrement' || statement.type === 'alter_table_alter_column_drop_autoincrement' || statement.type === 'alter_table_alter_column_set_default' || statement.type === 'alter_table_alter_column_drop_default' || statement.type === 'alter_table_alter_column_set_generated' || statement.type === 'alter_table_alter_column_drop_generated') && dialect === 'mysql' ); } convert(statement: MySqlModifyColumnStatement) { const { tableName, columnName } = statement; let columnType = ``; let columnDefault: any = ''; let columnNotNull = ''; let columnOnUpdate = ''; let columnAutoincrement = ''; let primaryKey = statement.columnPk ? ' PRIMARY KEY' : ''; let columnGenerated = ''; if (statement.type === 'alter_table_alter_column_drop_notnull') { columnType = ` ${statement.newDataType}`; columnDefault = statement.columnDefault ? ` DEFAULT ${statement.columnDefault}` : ''; columnNotNull = statement.columnNotNull ? ` NOT NULL` : ''; columnOnUpdate = statement.columnOnUpdate ? ` ON UPDATE CURRENT_TIMESTAMP` : ''; columnAutoincrement = statement.columnAutoIncrement ? ' AUTO_INCREMENT' : ''; } else if (statement.type === 'alter_table_alter_column_set_notnull') { columnNotNull = ` NOT NULL`; columnType = ` ${statement.newDataType}`; columnDefault = statement.columnDefault ? ` DEFAULT ${statement.columnDefault}` : ''; columnOnUpdate = statement.columnOnUpdate ? ` ON UPDATE CURRENT_TIMESTAMP` : ''; columnAutoincrement = statement.columnAutoIncrement ? ' AUTO_INCREMENT' : ''; } else if (statement.type === 'alter_table_alter_column_drop_on_update') { columnNotNull = statement.columnNotNull ? ` NOT NULL` : ''; columnType = ` ${statement.newDataType}`; columnDefault = statement.columnDefault ? ` DEFAULT ${statement.columnDefault}` : ''; columnOnUpdate = ''; columnAutoincrement = statement.columnAutoIncrement ? ' AUTO_INCREMENT' : ''; } else if (statement.type === 'alter_table_alter_column_set_on_update') { columnNotNull = statement.columnNotNull ? ` NOT NULL` : ''; columnOnUpdate = ` ON UPDATE CURRENT_TIMESTAMP`; columnType = ` ${statement.newDataType}`; columnDefault = statement.columnDefault ? ` DEFAULT ${statement.columnDefault}` : ''; columnAutoincrement = statement.columnAutoIncrement ? ' AUTO_INCREMENT' : ''; } else if ( statement.type === 'alter_table_alter_column_set_autoincrement' ) { columnNotNull = statement.columnNotNull ? ` NOT NULL` : ''; columnOnUpdate = columnOnUpdate = statement.columnOnUpdate ? ` ON UPDATE CURRENT_TIMESTAMP` : ''; columnType = ` ${statement.newDataType}`; columnDefault = statement.columnDefault ? ` DEFAULT ${statement.columnDefault}` : ''; columnAutoincrement = ' AUTO_INCREMENT'; } else if ( statement.type === 'alter_table_alter_column_drop_autoincrement' ) { columnNotNull = statement.columnNotNull ? ` NOT NULL` : ''; columnOnUpdate = columnOnUpdate = statement.columnOnUpdate ? ` ON UPDATE CURRENT_TIMESTAMP` : ''; columnType = ` ${statement.newDataType}`; columnDefault = statement.columnDefault ? ` DEFAULT ${statement.columnDefault}` : ''; columnAutoincrement = ''; } else if (statement.type === 'alter_table_alter_column_set_default') { columnNotNull = statement.columnNotNull ? ` NOT NULL` : ''; columnOnUpdate = columnOnUpdate = statement.columnOnUpdate ? ` ON UPDATE CURRENT_TIMESTAMP` : ''; columnType = ` ${statement.newDataType}`; columnDefault = ` DEFAULT ${statement.newDefaultValue}`; columnAutoincrement = statement.columnAutoIncrement ? ' AUTO_INCREMENT' : ''; } else if (statement.type === 'alter_table_alter_column_drop_default') { columnNotNull = statement.columnNotNull ? ` NOT NULL` : ''; columnOnUpdate = columnOnUpdate = statement.columnOnUpdate ? ` ON UPDATE CURRENT_TIMESTAMP` : ''; columnType = ` ${statement.newDataType}`; columnDefault = ''; columnAutoincrement = statement.columnAutoIncrement ? ' AUTO_INCREMENT' : ''; } else if (statement.type === 'alter_table_alter_column_set_generated') { columnType = ` ${statement.newDataType}`; columnNotNull = statement.columnNotNull ? ` NOT NULL` : ''; columnOnUpdate = columnOnUpdate = statement.columnOnUpdate ? ` ON UPDATE CURRENT_TIMESTAMP` : ''; columnDefault = statement.columnDefault ? ` DEFAULT ${statement.columnDefault}` : ''; columnAutoincrement = statement.columnAutoIncrement ? ' AUTO_INCREMENT' : ''; if (statement.columnGenerated?.type === 'virtual') { return [ new MySqlAlterTableDropColumnConvertor().convert({ type: 'alter_table_drop_column', tableName: statement.tableName, columnName: statement.columnName, schema: statement.schema, }), new MySqlAlterTableAddColumnConvertor().convert({ tableName, column: { name: columnName, type: statement.newDataType, notNull: statement.columnNotNull, default: statement.columnDefault, onUpdate: statement.columnOnUpdate, autoincrement: statement.columnAutoIncrement, primaryKey: statement.columnPk, generated: statement.columnGenerated, }, schema: statement.schema, type: 'alter_table_add_column', }), ]; } else { columnGenerated = statement.columnGenerated ? ` GENERATED ALWAYS AS (${statement.columnGenerated?.as}) ${statement.columnGenerated?.type.toUpperCase()}` : ''; } } else if (statement.type === 'alter_table_alter_column_drop_generated') { columnType = ` ${statement.newDataType}`; columnNotNull = statement.columnNotNull ? ` NOT NULL` : ''; columnOnUpdate = columnOnUpdate = statement.columnOnUpdate ? ` ON UPDATE CURRENT_TIMESTAMP` : ''; columnDefault = statement.columnDefault ? ` DEFAULT ${statement.columnDefault}` : ''; columnAutoincrement = statement.columnAutoIncrement ? ' AUTO_INCREMENT' : ''; if (statement.oldColumn?.generated?.type === 'virtual') { return [ new MySqlAlterTableDropColumnConvertor().convert({ type: 'alter_table_drop_column', tableName: statement.tableName, columnName: statement.columnName, schema: statement.schema, }), new MySqlAlterTableAddColumnConvertor().convert({ tableName, column: { name: columnName, type: statement.newDataType, notNull: statement.columnNotNull, default: statement.columnDefault, onUpdate: statement.columnOnUpdate, autoincrement: statement.columnAutoIncrement, primaryKey: statement.columnPk, generated: statement.columnGenerated, }, schema: statement.schema, type: 'alter_table_add_column', }), ]; } } else { columnType = ` ${statement.newDataType}`; columnNotNull = statement.columnNotNull ? ` NOT NULL` : ''; columnOnUpdate = columnOnUpdate = statement.columnOnUpdate ? ` ON UPDATE CURRENT_TIMESTAMP` : ''; columnDefault = statement.columnDefault ? ` DEFAULT ${statement.columnDefault}` : ''; columnAutoincrement = statement.columnAutoIncrement ? ' AUTO_INCREMENT' : ''; columnGenerated = statement.columnGenerated ? ` GENERATED ALWAYS AS (${statement.columnGenerated?.as}) ${statement.columnGenerated?.type.toUpperCase()}` : ''; } // Seems like getting value from simple json2 shanpshot makes dates be dates columnDefault = columnDefault instanceof Date ? columnDefault.toISOString() : columnDefault; return `ALTER TABLE \`${tableName}\` MODIFY COLUMN \`${columnName}\`${columnType}${columnAutoincrement}${columnGenerated}${columnNotNull}${columnDefault}${columnOnUpdate};`; } } class SingleStoreAlterTableAlterColumnAlterrGeneratedConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return ( statement.type === 'alter_table_alter_column_alter_generated' && dialect === 'singlestore' ); } convert(statement: JsonAlterColumnAlterGeneratedStatement) { const { tableName, columnName, schema, columnNotNull: notNull, columnDefault, columnOnUpdate, columnAutoIncrement, columnPk, columnGenerated, } = statement; const tableNameWithSchema = schema ? `\`${schema}\`.\`${tableName}\`` : `\`${tableName}\``; const addColumnStatement = new SingleStoreAlterTableAddColumnConvertor().convert({ schema, tableName, column: { name: columnName, type: statement.newDataType, notNull, default: columnDefault, onUpdate: columnOnUpdate, autoincrement: columnAutoIncrement, primaryKey: columnPk, generated: columnGenerated, }, type: 'alter_table_add_column', }); return [ `ALTER TABLE ${tableNameWithSchema} drop column \`${columnName}\`;`, addColumnStatement, ]; } } class SingleStoreAlterTableAlterColumnSetDefaultConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return ( statement.type === 'alter_table_alter_column_set_default' && dialect === 'singlestore' ); } convert(statement: JsonAlterColumnSetDefaultStatement) { const { tableName, columnName } = statement; return `ALTER TABLE \`${tableName}\` ALTER COLUMN \`${columnName}\` SET DEFAULT ${statement.newDefaultValue};`; } } class SingleStoreAlterTableAlterColumnDropDefaultConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return ( statement.type === 'alter_table_alter_column_drop_default' && dialect === 'singlestore' ); } convert(statement: JsonAlterColumnDropDefaultStatement) { const { tableName, columnName } = statement; return `ALTER TABLE \`${tableName}\` ALTER COLUMN \`${columnName}\` DROP DEFAULT;`; } } class SingleStoreAlterTableAddPk extends Convertor { can(statement: JsonStatement, dialect: string): boolean { return ( statement.type === 'alter_table_alter_column_set_pk' && dialect === 'singlestore' ); } convert(statement: JsonAlterColumnSetPrimaryKeyStatement): string { return `ALTER TABLE \`${statement.tableName}\` ADD PRIMARY KEY (\`${statement.columnName}\`);`; } } class SingleStoreAlterTableDropPk extends Convertor { can(statement: JsonStatement, dialect: string): boolean { return ( statement.type === 'alter_table_alter_column_drop_pk' && dialect === 'singlestore' ); } convert(statement: JsonAlterColumnDropPrimaryKeyStatement): string { return `ALTER TABLE \`${statement.tableName}\` DROP PRIMARY KEY`; } } type SingleStoreModifyColumnStatement = | JsonAlterColumnDropNotNullStatement | JsonAlterColumnSetNotNullStatement | JsonAlterColumnTypeStatement | JsonAlterColumnDropOnUpdateStatement | JsonAlterColumnSetOnUpdateStatement | JsonAlterColumnDropAutoincrementStatement | JsonAlterColumnSetAutoincrementStatement | JsonAlterColumnSetDefaultStatement | JsonAlterColumnDropDefaultStatement | JsonAlterColumnSetGeneratedStatement | JsonAlterColumnDropGeneratedStatement; class SingleStoreModifyColumn extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return ( (statement.type === 'alter_table_alter_column_set_type' || statement.type === 'alter_table_alter_column_set_notnull' || statement.type === 'alter_table_alter_column_drop_notnull' || statement.type === 'alter_table_alter_column_drop_on_update' || statement.type === 'alter_table_alter_column_set_on_update' || statement.type === 'alter_table_alter_column_set_autoincrement' || statement.type === 'alter_table_alter_column_drop_autoincrement' || statement.type === 'alter_table_alter_column_set_default' || statement.type === 'alter_table_alter_column_drop_default' || statement.type === 'alter_table_alter_column_set_generated' || statement.type === 'alter_table_alter_column_drop_generated') && dialect === 'singlestore' ); } convert(statement: SingleStoreModifyColumnStatement) { const { tableName, columnName } = statement; let columnType = ``; let columnDefault: any = ''; let columnNotNull = ''; let columnOnUpdate = ''; let columnAutoincrement = ''; let primaryKey = statement.columnPk ? ' PRIMARY KEY' : ''; let columnGenerated = ''; if (statement.type === 'alter_table_alter_column_drop_notnull') { columnType = ` ${statement.newDataType}`; columnDefault = statement.columnDefault ? ` DEFAULT ${statement.columnDefault}` : ''; columnNotNull = statement.columnNotNull ? ` NOT NULL` : ''; columnOnUpdate = statement.columnOnUpdate ? ` ON UPDATE CURRENT_TIMESTAMP` : ''; columnAutoincrement = statement.columnAutoIncrement ? ' AUTO_INCREMENT' : ''; } else if (statement.type === 'alter_table_alter_column_set_notnull') { columnNotNull = ` NOT NULL`; columnType = ` ${statement.newDataType}`; columnDefault = statement.columnDefault ? ` DEFAULT ${statement.columnDefault}` : ''; columnOnUpdate = statement.columnOnUpdate ? ` ON UPDATE CURRENT_TIMESTAMP` : ''; columnAutoincrement = statement.columnAutoIncrement ? ' AUTO_INCREMENT' : ''; } else if (statement.type === 'alter_table_alter_column_drop_on_update') { columnNotNull = statement.columnNotNull ? ` NOT NULL` : ''; columnType = ` ${statement.newDataType}`; columnDefault = statement.columnDefault ? ` DEFAULT ${statement.columnDefault}` : ''; columnOnUpdate = ''; columnAutoincrement = statement.columnAutoIncrement ? ' AUTO_INCREMENT' : ''; } else if (statement.type === 'alter_table_alter_column_set_on_update') { columnNotNull = statement.columnNotNull ? ` NOT NULL` : ''; columnOnUpdate = ` ON UPDATE CURRENT_TIMESTAMP`; columnType = ` ${statement.newDataType}`; columnDefault = statement.columnDefault ? ` DEFAULT ${statement.columnDefault}` : ''; columnAutoincrement = statement.columnAutoIncrement ? ' AUTO_INCREMENT' : ''; } else if ( statement.type === 'alter_table_alter_column_set_autoincrement' ) { columnNotNull = statement.columnNotNull ? ` NOT NULL` : ''; columnOnUpdate = columnOnUpdate = statement.columnOnUpdate ? ` ON UPDATE CURRENT_TIMESTAMP` : ''; columnType = ` ${statement.newDataType}`; columnDefault = statement.columnDefault ? ` DEFAULT ${statement.columnDefault}` : ''; columnAutoincrement = ' AUTO_INCREMENT'; } else if ( statement.type === 'alter_table_alter_column_drop_autoincrement' ) { columnNotNull = statement.columnNotNull ? ` NOT NULL` : ''; columnOnUpdate = columnOnUpdate = statement.columnOnUpdate ? ` ON UPDATE CURRENT_TIMESTAMP` : ''; columnType = ` ${statement.newDataType}`; columnDefault = statement.columnDefault ? ` DEFAULT ${statement.columnDefault}` : ''; columnAutoincrement = ''; } else if (statement.type === 'alter_table_alter_column_set_default') { columnNotNull = statement.columnNotNull ? ` NOT NULL` : ''; columnOnUpdate = columnOnUpdate = statement.columnOnUpdate ? ` ON UPDATE CURRENT_TIMESTAMP` : ''; columnType = ` ${statement.newDataType}`; columnDefault = ` DEFAULT ${statement.newDefaultValue}`; columnAutoincrement = statement.columnAutoIncrement ? ' AUTO_INCREMENT' : ''; } else if (statement.type === 'alter_table_alter_column_drop_default') { columnNotNull = statement.columnNotNull ? ` NOT NULL` : ''; columnOnUpdate = columnOnUpdate = statement.columnOnUpdate ? ` ON UPDATE CURRENT_TIMESTAMP` : ''; columnType = ` ${statement.newDataType}`; columnDefault = ''; columnAutoincrement = statement.columnAutoIncrement ? ' AUTO_INCREMENT' : ''; } else if (statement.type === 'alter_table_alter_column_set_generated') { columnType = ` ${statement.newDataType}`; columnNotNull = statement.columnNotNull ? ` NOT NULL` : ''; columnOnUpdate = columnOnUpdate = statement.columnOnUpdate ? ` ON UPDATE CURRENT_TIMESTAMP` : ''; columnDefault = statement.columnDefault ? ` DEFAULT ${statement.columnDefault}` : ''; columnAutoincrement = statement.columnAutoIncrement ? ' AUTO_INCREMENT' : ''; if (statement.columnGenerated?.type === 'virtual') { return [ new SingleStoreAlterTableDropColumnConvertor().convert({ type: 'alter_table_drop_column', tableName: statement.tableName, columnName: statement.columnName, schema: statement.schema, }), new SingleStoreAlterTableAddColumnConvertor().convert({ tableName, column: { name: columnName, type: statement.newDataType, notNull: statement.columnNotNull, default: statement.columnDefault, onUpdate: statement.columnOnUpdate, autoincrement: statement.columnAutoIncrement, primaryKey: statement.columnPk, generated: statement.columnGenerated, }, schema: statement.schema, type: 'alter_table_add_column', }), ]; } else { columnGenerated = statement.columnGenerated ? ` GENERATED ALWAYS AS (${statement.columnGenerated?.as}) ${statement.columnGenerated?.type.toUpperCase()}` : ''; } } else if (statement.type === 'alter_table_alter_column_drop_generated') { columnType = ` ${statement.newDataType}`; columnNotNull = statement.columnNotNull ? ` NOT NULL` : ''; columnOnUpdate = columnOnUpdate = statement.columnOnUpdate ? ` ON UPDATE CURRENT_TIMESTAMP` : ''; columnDefault = statement.columnDefault ? ` DEFAULT ${statement.columnDefault}` : ''; columnAutoincrement = statement.columnAutoIncrement ? ' AUTO_INCREMENT' : ''; if (statement.oldColumn?.generated?.type === 'virtual') { return [ new SingleStoreAlterTableDropColumnConvertor().convert({ type: 'alter_table_drop_column', tableName: statement.tableName, columnName: statement.columnName, schema: statement.schema, }), new SingleStoreAlterTableAddColumnConvertor().convert({ tableName, column: { name: columnName, type: statement.newDataType, notNull: statement.columnNotNull, default: statement.columnDefault, onUpdate: statement.columnOnUpdate, autoincrement: statement.columnAutoIncrement, primaryKey: statement.columnPk, generated: statement.columnGenerated, }, schema: statement.schema, type: 'alter_table_add_column', }), ]; } } else { columnType = ` ${statement.newDataType}`; columnNotNull = statement.columnNotNull ? ` NOT NULL` : ''; columnOnUpdate = columnOnUpdate = statement.columnOnUpdate ? ` ON UPDATE CURRENT_TIMESTAMP` : ''; columnDefault = statement.columnDefault ? ` DEFAULT ${statement.columnDefault}` : ''; columnAutoincrement = statement.columnAutoIncrement ? ' AUTO_INCREMENT' : ''; columnGenerated = statement.columnGenerated ? ` GENERATED ALWAYS AS (${statement.columnGenerated?.as}) ${statement.columnGenerated?.type.toUpperCase()}` : ''; } // Seems like getting value from simple json2 shanpshot makes dates be dates columnDefault = columnDefault instanceof Date ? columnDefault.toISOString() : columnDefault; return `ALTER TABLE \`${tableName}\` MODIFY COLUMN \`${columnName}\`${columnType}${columnAutoincrement}${columnNotNull}${columnDefault}${columnOnUpdate}${columnGenerated};`; } } class SqliteAlterTableAlterColumnDropDefaultConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return ( statement.type === 'alter_table_alter_column_drop_default' && dialect === 'sqlite' ); } convert(statement: JsonAlterColumnDropDefaultStatement) { return ( '/*\n SQLite does not support "Drop default from column" out of the box, we do not generate automatic migration for that, so it has to be done manually' + '\n Please refer to: https://www.techonthenet.com/sqlite/tables/alter_table.php' + '\n https://www.sqlite.org/lang_altertable.html' + '\n https://stackoverflow.com/questions/2083543/modify-a-columns-type-in-sqlite3' + "\n\n Due to that we don't generate migration automatically and it has to be done manually" + '\n*/' ); } } class PgAlterTableCreateCompositePrimaryKeyConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return statement.type === 'create_composite_pk' && dialect === 'postgresql'; } convert(statement: JsonCreateCompositePK) { const { name, columns } = PgSquasher.unsquashPK(statement.data); const tableNameWithSchema = statement.schema ? `"${statement.schema}"."${statement.tableName}"` : `"${statement.tableName}"`; return `ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT "${statement.constraintName}" PRIMARY KEY("${ columns.join('","') }");`; } } class PgAlterTableDeleteCompositePrimaryKeyConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return statement.type === 'delete_composite_pk' && dialect === 'postgresql'; } convert(statement: JsonDeleteCompositePK) { const { name, columns } = PgSquasher.unsquashPK(statement.data); const tableNameWithSchema = statement.schema ? `"${statement.schema}"."${statement.tableName}"` : `"${statement.tableName}"`; return `ALTER TABLE ${tableNameWithSchema} DROP CONSTRAINT "${statement.constraintName}";`; } } class PgAlterTableAlterCompositePrimaryKeyConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return statement.type === 'alter_composite_pk' && dialect === 'postgresql'; } convert(statement: JsonAlterCompositePK) { const { name, columns } = PgSquasher.unsquashPK(statement.old); const { name: newName, columns: newColumns } = PgSquasher.unsquashPK( statement.new, ); const tableNameWithSchema = statement.schema ? `"${statement.schema}"."${statement.tableName}"` : `"${statement.tableName}"`; return `ALTER TABLE ${tableNameWithSchema} DROP CONSTRAINT "${statement.oldConstraintName}";\n${BREAKPOINT}ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT "${statement.newConstraintName}" PRIMARY KEY("${ newColumns.join('","') }");`; } } class MySqlAlterTableCreateCompositePrimaryKeyConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return statement.type === 'create_composite_pk' && dialect === 'mysql'; } convert(statement: JsonCreateCompositePK) { const { name, columns } = MySqlSquasher.unsquashPK(statement.data); return `ALTER TABLE \`${statement.tableName}\` ADD PRIMARY KEY(\`${columns.join('`,`')}\`);`; } } class MySqlAlterTableDeleteCompositePrimaryKeyConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return statement.type === 'delete_composite_pk' && dialect === 'mysql'; } convert(statement: JsonDeleteCompositePK) { const { name, columns } = MySqlSquasher.unsquashPK(statement.data); return `ALTER TABLE \`${statement.tableName}\` DROP PRIMARY KEY;`; } } class MySqlAlterTableAlterCompositePrimaryKeyConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return statement.type === 'alter_composite_pk' && dialect === 'mysql'; } convert(statement: JsonAlterCompositePK) { const { name, columns } = MySqlSquasher.unsquashPK(statement.old); const { name: newName, columns: newColumns } = MySqlSquasher.unsquashPK( statement.new, ); return `ALTER TABLE \`${statement.tableName}\` DROP PRIMARY KEY, ADD PRIMARY KEY(\`${newColumns.join('`,`')}\`);`; } } class SqliteAlterTableCreateCompositePrimaryKeyConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return statement.type === 'create_composite_pk' && dialect === 'sqlite'; } convert(statement: JsonCreateCompositePK) { let msg = '/*\n'; msg += `You're trying to add PRIMARY KEY(${statement.data}) to '${statement.tableName}' table\n`; msg += 'SQLite does not support adding primary key to an already created table\n'; msg += 'You can do it in 3 steps with drizzle orm:\n'; msg += ' - create new mirror table with needed pk, rename current table to old_table, generate SQL\n'; msg += ' - migrate old data from one table to another\n'; msg += ' - delete old_table in schema, generate sql\n\n'; msg += 'or create manual migration like below:\n\n'; msg += 'ALTER TABLE table_name RENAME TO old_table;\n'; msg += 'CREATE TABLE table_name (\n'; msg += '\tcolumn1 datatype [ NULL | NOT NULL ],\n'; msg += '\tcolumn2 datatype [ NULL | NOT NULL ],\n'; msg += '\t...\n'; msg += '\tPRIMARY KEY (pk_col1, pk_col2, ... pk_col_n)\n'; msg += ' );\n'; msg += 'INSERT INTO table_name SELECT * FROM old_table;\n\n'; msg += "Due to that we don't generate migration automatically and it has to be done manually\n"; msg += '*/\n'; return msg; } } class SqliteAlterTableDeleteCompositePrimaryKeyConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return statement.type === 'delete_composite_pk' && dialect === 'sqlite'; } convert(statement: JsonDeleteCompositePK) { let msg = '/*\n'; msg += `You're trying to delete PRIMARY KEY(${statement.data}) from '${statement.tableName}' table\n`; msg += 'SQLite does not supportprimary key deletion from existing table\n'; msg += 'You can do it in 3 steps with drizzle orm:\n'; msg += ' - create new mirror table table without pk, rename current table to old_table, generate SQL\n'; msg += ' - migrate old data from one table to another\n'; msg += ' - delete old_table in schema, generate sql\n\n'; msg += 'or create manual migration like below:\n\n'; msg += 'ALTER TABLE table_name RENAME TO old_table;\n'; msg += 'CREATE TABLE table_name (\n'; msg += '\tcolumn1 datatype [ NULL | NOT NULL ],\n'; msg += '\tcolumn2 datatype [ NULL | NOT NULL ],\n'; msg += '\t...\n'; msg += '\tPRIMARY KEY (pk_col1, pk_col2, ... pk_col_n)\n'; msg += ' );\n'; msg += 'INSERT INTO table_name SELECT * FROM old_table;\n\n'; msg += "Due to that we don't generate migration automatically and it has to be done manually\n"; msg += '*/\n'; return msg; } } class SqliteAlterTableAlterCompositePrimaryKeyConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return statement.type === 'alter_composite_pk' && dialect === 'sqlite'; } convert(statement: JsonAlterCompositePK) { let msg = '/*\n'; msg += 'SQLite does not support altering primary key\n'; msg += 'You can do it in 3 steps with drizzle orm:\n'; msg += ' - create new mirror table with needed pk, rename current table to old_table, generate SQL\n'; msg += ' - migrate old data from one table to another\n'; msg += ' - delete old_table in schema, generate sql\n\n'; msg += 'or create manual migration like below:\n\n'; msg += 'ALTER TABLE table_name RENAME TO old_table;\n'; msg += 'CREATE TABLE table_name (\n'; msg += '\tcolumn1 datatype [ NULL | NOT NULL ],\n'; msg += '\tcolumn2 datatype [ NULL | NOT NULL ],\n'; msg += '\t...\n'; msg += '\tPRIMARY KEY (pk_col1, pk_col2, ... pk_col_n)\n'; msg += ' );\n'; msg += 'INSERT INTO table_name SELECT * FROM old_table;\n\n'; msg += "Due to that we don't generate migration automatically and it has to be done manually\n"; msg += '*/\n'; return msg; } } class PgAlterTableAlterColumnSetPrimaryKeyConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return ( statement.type === 'alter_table_alter_column_set_pk' && dialect === 'postgresql' ); } convert(statement: JsonAlterColumnSetPrimaryKeyStatement) { const { tableName, columnName } = statement; const tableNameWithSchema = statement.schema ? `"${statement.schema}"."${statement.tableName}"` : `"${statement.tableName}"`; return `ALTER TABLE ${tableNameWithSchema} ADD PRIMARY KEY ("${columnName}");`; } } class PgAlterTableAlterColumnDropPrimaryKeyConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return ( statement.type === 'alter_table_alter_column_drop_pk' && dialect === 'postgresql' ); } convert(statement: JsonAlterColumnDropPrimaryKeyStatement) { const { tableName, columnName, schema } = statement; return `/* Unfortunately in current drizzle-kit version we can't automatically get name for primary key. We are working on making it available! Meanwhile you can: 1. Check pk name in your database, by running SELECT constraint_name FROM information_schema.table_constraints WHERE table_schema = '${typeof schema === 'undefined' || schema === '' ? 'public' : schema}' AND table_name = '${tableName}' AND constraint_type = 'PRIMARY KEY'; 2. Uncomment code below and paste pk name manually Hope to release this update as soon as possible */ -- ALTER TABLE "${tableName}" DROP CONSTRAINT "";`; } } class PgAlterTableAlterColumnSetNotNullConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return ( statement.type === 'alter_table_alter_column_set_notnull' && dialect === 'postgresql' ); } convert(statement: JsonAlterColumnSetNotNullStatement) { const { tableName, columnName } = statement; const tableNameWithSchema = statement.schema ? `"${statement.schema}"."${statement.tableName}"` : `"${statement.tableName}"`; return `ALTER TABLE ${tableNameWithSchema} ALTER COLUMN "${columnName}" SET NOT NULL;`; } } class PgAlterTableAlterColumnDropNotNullConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return ( statement.type === 'alter_table_alter_column_drop_notnull' && dialect === 'postgresql' ); } convert(statement: JsonAlterColumnDropNotNullStatement) { const { tableName, columnName } = statement; const tableNameWithSchema = statement.schema ? `"${statement.schema}"."${statement.tableName}"` : `"${statement.tableName}"`; return `ALTER TABLE ${tableNameWithSchema} ALTER COLUMN "${columnName}" DROP NOT NULL;`; } } // FK class PgCreateForeignKeyConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return statement.type === 'create_reference' && dialect === 'postgresql'; } convert(statement: JsonCreateReferenceStatement): string { const { name, tableFrom, tableTo, columnsFrom, columnsTo, onDelete, onUpdate, schemaTo, } = PgSquasher.unsquashFK(statement.data); const onDeleteStatement = onDelete ? ` ON DELETE ${onDelete}` : ''; const onUpdateStatement = onUpdate ? ` ON UPDATE ${onUpdate}` : ''; const fromColumnsString = columnsFrom.map((it) => `"${it}"`).join(','); const toColumnsString = columnsTo.map((it) => `"${it}"`).join(','); const tableNameWithSchema = statement.schema ? `"${statement.schema}"."${tableFrom}"` : `"${tableFrom}"`; const tableToNameWithSchema = schemaTo ? `"${schemaTo}"."${tableTo}"` : `"${tableTo}"`; const alterStatement = `ALTER TABLE ${tableNameWithSchema} ADD CONSTRAINT "${name}" FOREIGN KEY (${fromColumnsString}) REFERENCES ${tableToNameWithSchema}(${toColumnsString})${onDeleteStatement}${onUpdateStatement};`; return alterStatement; } } class LibSQLCreateForeignKeyConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return ( statement.type === 'create_reference' && dialect === 'turso' ); } convert( statement: JsonCreateReferenceStatement, json2?: SQLiteSchemaSquashed, action?: 'push', ): string { const { columnsFrom, columnsTo, tableFrom, onDelete, onUpdate, tableTo } = action === 'push' ? SQLiteSquasher.unsquashPushFK(statement.data) : SQLiteSquasher.unsquashFK(statement.data); const { columnDefault, columnNotNull, columnType } = statement; const onDeleteStatement = onDelete ? ` ON DELETE ${onDelete}` : ''; const onUpdateStatement = onUpdate ? ` ON UPDATE ${onUpdate}` : ''; const columnsDefaultValue = columnDefault ? ` DEFAULT ${columnDefault}` : ''; const columnNotNullValue = columnNotNull ? ` NOT NULL` : ''; const columnTypeValue = columnType ? ` ${columnType}` : ''; const columnFrom = columnsFrom[0]; const columnTo = columnsTo[0]; return `ALTER TABLE \`${tableFrom}\` ALTER COLUMN "${columnFrom}" TO "${columnFrom}"${columnTypeValue}${columnNotNullValue}${columnsDefaultValue} REFERENCES ${tableTo}(${columnTo})${onDeleteStatement}${onUpdateStatement};`; } } class MySqlCreateForeignKeyConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return statement.type === 'create_reference' && dialect === 'mysql'; } convert(statement: JsonCreateReferenceStatement): string { const { name, tableFrom, tableTo, columnsFrom, columnsTo, onDelete, onUpdate, } = MySqlSquasher.unsquashFK(statement.data); const onDeleteStatement = onDelete ? ` ON DELETE ${onDelete}` : ''; const onUpdateStatement = onUpdate ? ` ON UPDATE ${onUpdate}` : ''; const fromColumnsString = columnsFrom.map((it) => `\`${it}\``).join(','); const toColumnsString = columnsTo.map((it) => `\`${it}\``).join(','); return `ALTER TABLE \`${tableFrom}\` ADD CONSTRAINT \`${name}\` FOREIGN KEY (${fromColumnsString}) REFERENCES \`${tableTo}\`(${toColumnsString})${onDeleteStatement}${onUpdateStatement};`; } } class PgAlterForeignKeyConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return statement.type === 'alter_reference' && dialect === 'postgresql'; } convert(statement: JsonAlterReferenceStatement): string { const newFk = PgSquasher.unsquashFK(statement.data); const oldFk = PgSquasher.unsquashFK(statement.oldFkey); const tableNameWithSchema = statement.schema ? `"${statement.schema}"."${oldFk.tableFrom}"` : `"${oldFk.tableFrom}"`; let sql = `ALTER TABLE ${tableNameWithSchema} DROP CONSTRAINT "${oldFk.name}";\n`; const onDeleteStatement = newFk.onDelete ? ` ON DELETE ${newFk.onDelete}` : ''; const onUpdateStatement = newFk.onUpdate ? ` ON UPDATE ${newFk.onUpdate}` : ''; const fromColumnsString = newFk.columnsFrom .map((it) => `"${it}"`) .join(','); const toColumnsString = newFk.columnsTo.map((it) => `"${it}"`).join(','); const tableFromNameWithSchema = oldFk.schemaTo ? `"${oldFk.schemaTo}"."${oldFk.tableFrom}"` : `"${oldFk.tableFrom}"`; const tableToNameWithSchema = newFk.schemaTo ? `"${newFk.schemaTo}"."${newFk.tableFrom}"` : `"${newFk.tableFrom}"`; const alterStatement = `ALTER TABLE ${tableFromNameWithSchema} ADD CONSTRAINT "${newFk.name}" FOREIGN KEY (${fromColumnsString}) REFERENCES ${tableToNameWithSchema}(${toColumnsString})${onDeleteStatement}${onUpdateStatement};`; sql += alterStatement; return sql; } } class PgDeleteForeignKeyConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return statement.type === 'delete_reference' && dialect === 'postgresql'; } convert(statement: JsonDeleteReferenceStatement): string { const tableFrom = statement.tableName; // delete fk from renamed table case const { name } = PgSquasher.unsquashFK(statement.data); const tableNameWithSchema = statement.schema ? `"${statement.schema}"."${tableFrom}"` : `"${tableFrom}"`; return `ALTER TABLE ${tableNameWithSchema} DROP CONSTRAINT "${name}";\n`; } } class MySqlDeleteForeignKeyConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return statement.type === 'delete_reference' && dialect === 'mysql'; } convert(statement: JsonDeleteReferenceStatement): string { const tableFrom = statement.tableName; // delete fk from renamed table case const { name } = MySqlSquasher.unsquashFK(statement.data); return `ALTER TABLE \`${tableFrom}\` DROP FOREIGN KEY \`${name}\`;\n`; } } class CreatePgIndexConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return statement.type === 'create_index_pg' && dialect === 'postgresql'; } convert(statement: JsonPgCreateIndexStatement): string { const { name, columns, isUnique, concurrently, with: withMap, method, where, } = statement.data; // // since postgresql 9.5 const indexPart = isUnique ? 'UNIQUE INDEX' : 'INDEX'; const value = columns .map( (it) => `${it.isExpression ? it.expression : `"${it.expression}"`}${ it.opclass ? ` ${it.opclass}` : it.asc ? '' : ' DESC' }${ (it.asc && it.nulls && it.nulls === 'last') || it.opclass ? '' : ` NULLS ${it.nulls!.toUpperCase()}` }`, ) .join(','); const tableNameWithSchema = statement.schema ? `"${statement.schema}"."${statement.tableName}"` : `"${statement.tableName}"`; function reverseLogic(mappedWith: Record): string { let reversedString = ''; for (const key in mappedWith) { if (mappedWith.hasOwnProperty(key)) { reversedString += `${key}=${mappedWith[key]},`; } } reversedString = reversedString.slice(0, -1); return reversedString; } return `CREATE ${indexPart}${ concurrently ? ' CONCURRENTLY' : '' } "${name}" ON ${tableNameWithSchema} USING ${method} (${value})${ Object.keys(withMap!).length !== 0 ? ` WITH (${reverseLogic(withMap!)})` : '' }${where ? ` WHERE ${where}` : ''};`; } } class CreateMySqlIndexConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return statement.type === 'create_index' && dialect === 'mysql'; } convert(statement: JsonCreateIndexStatement): string { // should be changed const { name, columns, isUnique } = MySqlSquasher.unsquashIdx( statement.data, ); const indexPart = isUnique ? 'UNIQUE INDEX' : 'INDEX'; const uniqueString = columns .map((it) => { return statement.internal?.indexes ? statement.internal?.indexes[name]?.columns[it]?.isExpression ? it : `\`${it}\`` : `\`${it}\``; }) .join(','); return `CREATE ${indexPart} \`${name}\` ON \`${statement.tableName}\` (${uniqueString});`; } } export class CreateSingleStoreIndexConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return statement.type === 'create_index' && dialect === 'singlestore'; } convert(statement: JsonCreateIndexStatement): string { // should be changed const { name, columns, isUnique } = SingleStoreSquasher.unsquashIdx( statement.data, ); const indexPart = isUnique ? 'UNIQUE INDEX' : 'INDEX'; const uniqueString = columns .map((it) => { return statement.internal?.indexes ? statement.internal?.indexes[name]?.columns[it]?.isExpression ? it : `\`${it}\`` : `\`${it}\``; }) .join(','); return `CREATE ${indexPart} \`${name}\` ON \`${statement.tableName}\` (${uniqueString});`; } } export class CreateSqliteIndexConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return statement.type === 'create_index' && (dialect === 'sqlite' || dialect === 'turso'); } convert(statement: JsonCreateIndexStatement): string { // should be changed const { name, columns, isUnique, where } = SQLiteSquasher.unsquashIdx( statement.data, ); // // since postgresql 9.5 const indexPart = isUnique ? 'UNIQUE INDEX' : 'INDEX'; const whereStatement = where ? ` WHERE ${where}` : ''; const uniqueString = columns .map((it) => { return statement.internal?.indexes ? statement.internal?.indexes[name]?.columns[it]?.isExpression ? it : `\`${it}\`` : `\`${it}\``; }) .join(','); return `CREATE ${indexPart} \`${name}\` ON \`${statement.tableName}\` (${uniqueString})${whereStatement};`; } } class PgDropIndexConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return statement.type === 'drop_index' && dialect === 'postgresql'; } convert(statement: JsonDropIndexStatement): string { const { schema } = statement; const { name } = PgSquasher.unsquashIdx(statement.data); const indexNameWithSchema = schema ? `"${schema}"."${name}"` : `"${name}"`; return `DROP INDEX ${indexNameWithSchema};`; } } class PgCreateSchemaConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return statement.type === 'create_schema' && dialect === 'postgresql'; } convert(statement: JsonCreateSchema) { const { name } = statement; return `CREATE SCHEMA "${name}";\n`; } } class PgRenameSchemaConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return statement.type === 'rename_schema' && dialect === 'postgresql'; } convert(statement: JsonRenameSchema) { const { from, to } = statement; return `ALTER SCHEMA "${from}" RENAME TO "${to}";\n`; } } class PgDropSchemaConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return statement.type === 'drop_schema' && dialect === 'postgresql'; } convert(statement: JsonCreateSchema) { const { name } = statement; return `DROP SCHEMA "${name}";\n`; } } class PgAlterTableSetSchemaConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return ( statement.type === 'alter_table_set_schema' && dialect === 'postgresql' ); } convert(statement: JsonAlterTableSetSchema) { const { tableName, schemaFrom, schemaTo } = statement; return `ALTER TABLE "${schemaFrom}"."${tableName}" SET SCHEMA "${schemaTo}";\n`; } } class PgAlterTableSetNewSchemaConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return ( statement.type === 'alter_table_set_new_schema' && dialect === 'postgresql' ); } convert(statement: JsonAlterTableSetNewSchema) { const { tableName, to, from } = statement; const tableNameWithSchema = from ? `"${from}"."${tableName}"` : `"${tableName}"`; return `ALTER TABLE ${tableNameWithSchema} SET SCHEMA "${to}";\n`; } } class PgAlterTableRemoveFromSchemaConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return ( statement.type === 'alter_table_remove_from_schema' && dialect === 'postgresql' ); } convert(statement: JsonAlterTableRemoveFromSchema) { const { tableName, schema } = statement; const tableNameWithSchema = schema ? `"${schema}"."${tableName}"` : `"${tableName}"`; return `ALTER TABLE ${tableNameWithSchema} SET SCHEMA public;\n`; } } export class SqliteDropIndexConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return statement.type === 'drop_index' && (dialect === 'sqlite' || dialect === 'turso'); } convert(statement: JsonDropIndexStatement): string { const { name } = PgSquasher.unsquashIdx(statement.data); return `DROP INDEX \`${name}\`;`; } } class MySqlDropIndexConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return statement.type === 'drop_index' && dialect === 'mysql'; } convert(statement: JsonDropIndexStatement): string { const { name } = MySqlSquasher.unsquashIdx(statement.data); return `DROP INDEX \`${name}\` ON \`${statement.tableName}\`;`; } } class SingleStoreDropIndexConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return statement.type === 'drop_index' && dialect === 'singlestore'; } convert(statement: JsonDropIndexStatement): string { const { name } = SingleStoreSquasher.unsquashIdx(statement.data); return `DROP INDEX \`${name}\` ON \`${statement.tableName}\`;`; } } class SQLiteRecreateTableConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return ( statement.type === 'recreate_table' && dialect === 'sqlite' ); } convert(statement: JsonRecreateTableStatement): string | string[] { const { tableName, columns, compositePKs, referenceData, checkConstraints } = statement; const columnNames = columns.map((it) => `"${it.name}"`).join(', '); const newTableName = `__new_${tableName}`; const sqlStatements: string[] = []; sqlStatements.push(`PRAGMA foreign_keys=OFF;`); // map all possible variants const mappedCheckConstraints: string[] = checkConstraints.map((it) => it.replaceAll(`"${tableName}".`, `"${newTableName}".`).replaceAll(`\`${tableName}\`.`, `\`${newTableName}\`.`) .replaceAll(`${tableName}.`, `${newTableName}.`).replaceAll(`'${tableName}'.`, `'${newTableName}'.`) ); // create new table sqlStatements.push( new SQLiteCreateTableConvertor().convert({ type: 'sqlite_create_table', tableName: newTableName, columns, referenceData, compositePKs, checkConstraints: mappedCheckConstraints, }), ); // migrate data sqlStatements.push( `INSERT INTO \`${newTableName}\`(${columnNames}) SELECT ${columnNames} FROM \`${tableName}\`;`, ); // drop table sqlStatements.push( new SQLiteDropTableConvertor().convert({ type: 'drop_table', tableName: tableName, schema: '', }), ); // rename table sqlStatements.push( new SqliteRenameTableConvertor().convert({ fromSchema: '', tableNameFrom: newTableName, tableNameTo: tableName, toSchema: '', type: 'rename_table', }), ); sqlStatements.push(`PRAGMA foreign_keys=ON;`); return sqlStatements; } } class LibSQLRecreateTableConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return ( statement.type === 'recreate_table' && dialect === 'turso' ); } convert(statement: JsonRecreateTableStatement): string[] { const { tableName, columns, compositePKs, referenceData, checkConstraints } = statement; const columnNames = columns.map((it) => `"${it.name}"`).join(', '); const newTableName = `__new_${tableName}`; const sqlStatements: string[] = []; const mappedCheckConstraints: string[] = checkConstraints.map((it) => it.replaceAll(`"${tableName}".`, `"${newTableName}".`).replaceAll(`\`${tableName}\`.`, `\`${newTableName}\`.`) .replaceAll(`${tableName}.`, `${newTableName}.`).replaceAll(`'${tableName}'.`, `\`${newTableName}\`.`) ); sqlStatements.push(`PRAGMA foreign_keys=OFF;`); // create new table sqlStatements.push( new SQLiteCreateTableConvertor().convert({ type: 'sqlite_create_table', tableName: newTableName, columns, referenceData, compositePKs, checkConstraints: mappedCheckConstraints, }), ); // migrate data sqlStatements.push( `INSERT INTO \`${newTableName}\`(${columnNames}) SELECT ${columnNames} FROM \`${tableName}\`;`, ); // drop table sqlStatements.push( new SQLiteDropTableConvertor().convert({ type: 'drop_table', tableName: tableName, schema: '', }), ); // rename table sqlStatements.push( new SqliteRenameTableConvertor().convert({ fromSchema: '', tableNameFrom: newTableName, tableNameTo: tableName, toSchema: '', type: 'rename_table', }), ); sqlStatements.push(`PRAGMA foreign_keys=ON;`); return sqlStatements; } } class SingleStoreRecreateTableConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return ( statement.type === 'singlestore_recreate_table' && dialect === 'singlestore' ); } convert(statement: JsonRecreateSingleStoreTableStatement): string[] { const { tableName, columns, compositePKs, uniqueConstraints } = statement; const columnNames = columns.map((it) => `\`${it.name}\``).join(', '); const newTableName = `__new_${tableName}`; const sqlStatements: string[] = []; // create new table sqlStatements.push( new SingleStoreCreateTableConvertor().convert({ type: 'create_table', tableName: newTableName, columns, compositePKs, uniqueConstraints, schema: '', }), ); // migrate data sqlStatements.push( `INSERT INTO \`${newTableName}\`(${columnNames}) SELECT ${columnNames} FROM \`${tableName}\`;`, ); // drop table sqlStatements.push( new SingleStoreDropTableConvertor().convert({ type: 'drop_table', tableName: tableName, schema: '', }), ); // rename table sqlStatements.push( new SingleStoreRenameTableConvertor().convert({ fromSchema: '', tableNameFrom: newTableName, tableNameTo: tableName, toSchema: '', type: 'rename_table', }), ); return sqlStatements; } } const convertors: Convertor[] = []; convertors.push(new PgCreateTableConvertor()); convertors.push(new MySqlCreateTableConvertor()); convertors.push(new SingleStoreCreateTableConvertor()); convertors.push(new SingleStoreRecreateTableConvertor()); convertors.push(new SQLiteCreateTableConvertor()); convertors.push(new SQLiteRecreateTableConvertor()); convertors.push(new LibSQLRecreateTableConvertor()); convertors.push(new PgCreateViewConvertor()); convertors.push(new PgDropViewConvertor()); convertors.push(new PgRenameViewConvertor()); convertors.push(new PgAlterViewSchemaConvertor()); convertors.push(new PgAlterViewAddWithOptionConvertor()); convertors.push(new PgAlterViewDropWithOptionConvertor()); convertors.push(new PgAlterViewAlterTablespaceConvertor()); convertors.push(new PgAlterViewAlterUsingConvertor()); convertors.push(new MySqlCreateViewConvertor()); convertors.push(new MySqlDropViewConvertor()); convertors.push(new MySqlRenameViewConvertor()); convertors.push(new MySqlAlterViewConvertor()); convertors.push(new SqliteCreateViewConvertor()); convertors.push(new SqliteDropViewConvertor()); convertors.push(new CreateTypeEnumConvertor()); convertors.push(new DropTypeEnumConvertor()); convertors.push(new AlterTypeAddValueConvertor()); convertors.push(new AlterTypeSetSchemaConvertor()); convertors.push(new AlterRenameTypeConvertor()); convertors.push(new AlterTypeDropValueConvertor()); convertors.push(new CreatePgSequenceConvertor()); convertors.push(new DropPgSequenceConvertor()); convertors.push(new RenamePgSequenceConvertor()); convertors.push(new MovePgSequenceConvertor()); convertors.push(new AlterPgSequenceConvertor()); convertors.push(new PgDropTableConvertor()); convertors.push(new MySQLDropTableConvertor()); convertors.push(new SingleStoreDropTableConvertor()); convertors.push(new SQLiteDropTableConvertor()); convertors.push(new PgRenameTableConvertor()); convertors.push(new MySqlRenameTableConvertor()); convertors.push(new SingleStoreRenameTableConvertor()); convertors.push(new SqliteRenameTableConvertor()); convertors.push(new PgAlterTableRenameColumnConvertor()); convertors.push(new MySqlAlterTableRenameColumnConvertor()); convertors.push(new SingleStoreAlterTableRenameColumnConvertor()); convertors.push(new SQLiteAlterTableRenameColumnConvertor()); convertors.push(new PgAlterTableDropColumnConvertor()); convertors.push(new MySqlAlterTableDropColumnConvertor()); convertors.push(new SingleStoreAlterTableDropColumnConvertor()); convertors.push(new SQLiteAlterTableDropColumnConvertor()); convertors.push(new PgAlterTableAddColumnConvertor()); convertors.push(new MySqlAlterTableAddColumnConvertor()); convertors.push(new SingleStoreAlterTableAddColumnConvertor()); convertors.push(new SQLiteAlterTableAddColumnConvertor()); convertors.push(new PgAlterTableAlterColumnSetTypeConvertor()); convertors.push(new PgAlterTableAddUniqueConstraintConvertor()); convertors.push(new PgAlterTableDropUniqueConstraintConvertor()); convertors.push(new PgAlterTableAddCheckConstraintConvertor()); convertors.push(new PgAlterTableDeleteCheckConstraintConvertor()); convertors.push(new MySqlAlterTableAddCheckConstraintConvertor()); convertors.push(new MySqlAlterTableDeleteCheckConstraintConvertor()); convertors.push(new MySQLAlterTableAddUniqueConstraintConvertor()); convertors.push(new MySQLAlterTableDropUniqueConstraintConvertor()); convertors.push(new SingleStoreAlterTableAddUniqueConstraintConvertor()); convertors.push(new SingleStoreAlterTableDropUniqueConstraintConvertor()); convertors.push(new CreatePgIndexConvertor()); convertors.push(new CreateMySqlIndexConvertor()); convertors.push(new CreateSingleStoreIndexConvertor()); convertors.push(new CreateSqliteIndexConvertor()); convertors.push(new PgDropIndexConvertor()); convertors.push(new SqliteDropIndexConvertor()); convertors.push(new MySqlDropIndexConvertor()); convertors.push(new SingleStoreDropIndexConvertor()); convertors.push(new PgAlterTableAlterColumnSetPrimaryKeyConvertor()); convertors.push(new PgAlterTableAlterColumnDropPrimaryKeyConvertor()); convertors.push(new PgAlterTableAlterColumnSetNotNullConvertor()); convertors.push(new PgAlterTableAlterColumnDropNotNullConvertor()); convertors.push(new PgAlterTableAlterColumnSetDefaultConvertor()); convertors.push(new PgAlterTableAlterColumnDropDefaultConvertor()); convertors.push(new PgAlterPolicyConvertor()); convertors.push(new PgCreatePolicyConvertor()); convertors.push(new PgDropPolicyConvertor()); convertors.push(new PgRenamePolicyConvertor()); convertors.push(new PgAlterIndPolicyConvertor()); convertors.push(new PgCreateIndPolicyConvertor()); convertors.push(new PgDropIndPolicyConvertor()); convertors.push(new PgRenameIndPolicyConvertor()); convertors.push(new PgEnableRlsConvertor()); convertors.push(new PgDisableRlsConvertor()); convertors.push(new PgDropRoleConvertor()); convertors.push(new PgAlterRoleConvertor()); convertors.push(new PgCreateRoleConvertor()); convertors.push(new PgRenameRoleConvertor()); /// generated convertors.push(new PgAlterTableAlterColumnSetExpressionConvertor()); convertors.push(new PgAlterTableAlterColumnDropGeneratedConvertor()); convertors.push(new PgAlterTableAlterColumnAlterrGeneratedConvertor()); convertors.push(new MySqlAlterTableAlterColumnAlterrGeneratedConvertor()); convertors.push(new SingleStoreAlterTableAlterColumnAlterrGeneratedConvertor()); convertors.push(new SqliteAlterTableAlterColumnDropGeneratedConvertor()); convertors.push(new SqliteAlterTableAlterColumnAlterGeneratedConvertor()); convertors.push(new SqliteAlterTableAlterColumnSetExpressionConvertor()); convertors.push(new MySqlModifyColumn()); convertors.push(new LibSQLModifyColumn()); // convertors.push(new MySqlAlterTableAlterColumnSetDefaultConvertor()); // convertors.push(new MySqlAlterTableAlterColumnDropDefaultConvertor()); convertors.push(new SingleStoreModifyColumn()); convertors.push(new PgCreateForeignKeyConvertor()); convertors.push(new MySqlCreateForeignKeyConvertor()); convertors.push(new PgAlterForeignKeyConvertor()); convertors.push(new PgDeleteForeignKeyConvertor()); convertors.push(new MySqlDeleteForeignKeyConvertor()); convertors.push(new PgCreateSchemaConvertor()); convertors.push(new PgRenameSchemaConvertor()); convertors.push(new PgDropSchemaConvertor()); convertors.push(new PgAlterTableSetSchemaConvertor()); convertors.push(new PgAlterTableSetNewSchemaConvertor()); convertors.push(new PgAlterTableRemoveFromSchemaConvertor()); convertors.push(new LibSQLCreateForeignKeyConvertor()); convertors.push(new PgAlterTableAlterColumnDropGenerated()); convertors.push(new PgAlterTableAlterColumnSetGenerated()); convertors.push(new PgAlterTableAlterColumnAlterGenerated()); convertors.push(new PgAlterTableCreateCompositePrimaryKeyConvertor()); convertors.push(new PgAlterTableDeleteCompositePrimaryKeyConvertor()); convertors.push(new PgAlterTableAlterCompositePrimaryKeyConvertor()); convertors.push(new MySqlAlterTableDeleteCompositePrimaryKeyConvertor()); convertors.push(new MySqlAlterTableDropPk()); convertors.push(new MySqlAlterTableCreateCompositePrimaryKeyConvertor()); convertors.push(new MySqlAlterTableAddPk()); convertors.push(new MySqlAlterTableAlterCompositePrimaryKeyConvertor()); convertors.push(new SingleStoreAlterTableDropPk()); convertors.push(new SingleStoreAlterTableAddPk()); export function fromJson( statements: JsonStatement[], dialect: Dialect, action?: 'push', json2?: SQLiteSchemaSquashed, ) { const result = statements .flatMap((statement) => { const filtered = convertors.filter((it) => { return it.can(statement, dialect); }); const convertor = filtered.length === 1 ? filtered[0] : undefined; if (!convertor) { return ''; } return convertor.convert(statement, json2, action); }) .filter((it) => it !== ''); return result; } // blog.yo1.dog/updating-enum-values-in-postgresql-the-safe-and-easy-way/ // test case for enum altering https: ` create table users ( id int, name character varying(128) ); create type venum as enum('one', 'two', 'three'); alter table users add column typed venum; insert into users(id, name, typed) values (1, 'name1', 'one'); insert into users(id, name, typed) values (2, 'name2', 'two'); insert into users(id, name, typed) values (3, 'name3', 'three'); alter type venum rename to __venum; create type venum as enum ('one', 'two', 'three', 'four', 'five'); ALTER TABLE users ALTER COLUMN typed TYPE venum USING typed::text::venum; insert into users(id, name, typed) values (4, 'name4', 'four'); insert into users(id, name, typed) values (5, 'name5', 'five'); drop type __venum; `; ================================================ FILE: drizzle-kit/src/statementCombiner.ts ================================================ import { JsonCreateIndexStatement, JsonRecreateTableStatement, JsonStatement, prepareCreateIndexesJson, } from './jsonStatements'; import { SingleStoreSchemaSquashed } from './serializer/singlestoreSchema'; import { SQLiteSchemaSquashed, SQLiteSquasher } from './serializer/sqliteSchema'; export const prepareLibSQLRecreateTable = ( table: SQLiteSchemaSquashed['tables'][keyof SQLiteSchemaSquashed['tables']], action?: 'push', ): (JsonRecreateTableStatement | JsonCreateIndexStatement)[] => { const { name, columns, uniqueConstraints, indexes, checkConstraints } = table; const composites: string[][] = Object.values(table.compositePrimaryKeys).map( (it) => SQLiteSquasher.unsquashPK(it), ); const references: string[] = Object.values(table.foreignKeys); const fks = references.map((it) => action === 'push' ? SQLiteSquasher.unsquashPushFK(it) : SQLiteSquasher.unsquashFK(it) ); const statements: (JsonRecreateTableStatement | JsonCreateIndexStatement)[] = [ { type: 'recreate_table', tableName: name, columns: Object.values(columns), compositePKs: composites, referenceData: fks, uniqueConstraints: Object.values(uniqueConstraints), checkConstraints: Object.values(checkConstraints), }, ]; if (Object.keys(indexes).length) { statements.push(...prepareCreateIndexesJson(name, '', indexes)); } return statements; }; export const prepareSQLiteRecreateTable = ( table: SQLiteSchemaSquashed['tables'][keyof SQLiteSchemaSquashed['tables']], action?: 'push', ): JsonStatement[] => { const { name, columns, uniqueConstraints, indexes, checkConstraints } = table; const composites: string[][] = Object.values(table.compositePrimaryKeys).map( (it) => SQLiteSquasher.unsquashPK(it), ); const references: string[] = Object.values(table.foreignKeys); const fks = references.map((it) => action === 'push' ? SQLiteSquasher.unsquashPushFK(it) : SQLiteSquasher.unsquashFK(it) ); const statements: JsonStatement[] = [ { type: 'recreate_table', tableName: name, columns: Object.values(columns), compositePKs: composites, referenceData: fks, uniqueConstraints: Object.values(uniqueConstraints), checkConstraints: Object.values(checkConstraints), }, ]; if (Object.keys(indexes).length) { statements.push(...prepareCreateIndexesJson(name, '', indexes)); } return statements; }; export const libSQLCombineStatements = ( statements: JsonStatement[], json2: SQLiteSchemaSquashed, action?: 'push', ) => { // const tablesContext: Record = {}; const newStatements: Record = {}; for (const statement of statements) { if ( statement.type === 'alter_table_alter_column_drop_autoincrement' || statement.type === 'alter_table_alter_column_set_autoincrement' || statement.type === 'alter_table_alter_column_drop_pk' || statement.type === 'alter_table_alter_column_set_pk' || statement.type === 'create_composite_pk' || statement.type === 'alter_composite_pk' || statement.type === 'delete_composite_pk' || statement.type === 'create_check_constraint' || statement.type === 'delete_check_constraint' ) { const tableName = statement.tableName; const statementsForTable = newStatements[tableName]; if (!statementsForTable) { newStatements[tableName] = prepareLibSQLRecreateTable(json2.tables[tableName], action); continue; } if (!statementsForTable.some(({ type }) => type === 'recreate_table')) { const wasRename = statementsForTable.some(({ type }) => type === 'rename_table'); const preparedStatements = prepareLibSQLRecreateTable(json2.tables[tableName], action); if (wasRename) { newStatements[tableName].push(...preparedStatements); } else { newStatements[tableName] = preparedStatements; } continue; } continue; } if ( statement.type === 'alter_table_alter_column_set_type' || statement.type === 'alter_table_alter_column_drop_notnull' || statement.type === 'alter_table_alter_column_set_notnull' || statement.type === 'alter_table_alter_column_set_default' || statement.type === 'alter_table_alter_column_drop_default' ) { const { tableName, columnName, columnPk } = statement; const columnIsPartOfForeignKey = Object.values( json2.tables[tableName].foreignKeys, ).some((it) => { const unsquashFk = action === 'push' ? SQLiteSquasher.unsquashPushFK(it) : SQLiteSquasher.unsquashFK(it); return ( unsquashFk.columnsFrom.includes(columnName) ); }); const statementsForTable = newStatements[tableName]; if ( !statementsForTable && (columnIsPartOfForeignKey || columnPk) ) { newStatements[tableName] = prepareLibSQLRecreateTable(json2.tables[tableName], action); continue; } if ( statementsForTable && (columnIsPartOfForeignKey || columnPk) ) { if (!statementsForTable.some(({ type }) => type === 'recreate_table')) { const wasRename = statementsForTable.some(({ type }) => type === 'rename_table'); const preparedStatements = prepareLibSQLRecreateTable(json2.tables[tableName], action); if (wasRename) { newStatements[tableName].push(...preparedStatements); } else { newStatements[tableName] = preparedStatements; } } continue; } if ( statementsForTable && !(columnIsPartOfForeignKey || columnPk) ) { if (!statementsForTable.some(({ type }) => type === 'recreate_table')) { newStatements[tableName].push(statement); } continue; } newStatements[tableName] = [statement]; continue; } if (statement.type === 'create_reference') { const tableName = statement.tableName; const data = action === 'push' ? SQLiteSquasher.unsquashPushFK(statement.data) : SQLiteSquasher.unsquashFK(statement.data); const statementsForTable = newStatements[tableName]; if (!statementsForTable) { newStatements[tableName] = statement.isMulticolumn ? prepareLibSQLRecreateTable(json2.tables[tableName], action) : [statement]; continue; } // if add column with reference -> skip create_reference statement if ( !statement.isMulticolumn && statementsForTable.some((st) => st.type === 'sqlite_alter_table_add_column' && st.column.name === data.columnsFrom[0] ) ) { continue; } if (statement.isMulticolumn) { if (!statementsForTable.some(({ type }) => type === 'recreate_table')) { const wasRename = statementsForTable.some(({ type }) => type === 'rename_table'); const preparedStatements = prepareLibSQLRecreateTable(json2.tables[tableName], action); if (wasRename) { newStatements[tableName].push(...preparedStatements); } else { newStatements[tableName] = preparedStatements; } continue; } continue; } if (!statementsForTable.some(({ type }) => type === 'recreate_table')) { newStatements[tableName].push(statement); } continue; } if (statement.type === 'delete_reference') { const tableName = statement.tableName; const statementsForTable = newStatements[tableName]; if (!statementsForTable) { newStatements[tableName] = prepareLibSQLRecreateTable(json2.tables[tableName], action); continue; } if (!statementsForTable.some(({ type }) => type === 'recreate_table')) { const wasRename = statementsForTable.some(({ type }) => type === 'rename_table'); const preparedStatements = prepareLibSQLRecreateTable(json2.tables[tableName], action); if (wasRename) { newStatements[tableName].push(...preparedStatements); } else { newStatements[tableName] = preparedStatements; } continue; } continue; } if (statement.type === 'sqlite_alter_table_add_column' && statement.column.primaryKey) { const tableName = statement.tableName; const statementsForTable = newStatements[tableName]; if (!statementsForTable) { newStatements[tableName] = prepareLibSQLRecreateTable(json2.tables[tableName], action); continue; } if (!statementsForTable.some(({ type }) => type === 'recreate_table')) { const wasRename = statementsForTable.some(({ type }) => type === 'rename_table'); const preparedStatements = prepareLibSQLRecreateTable(json2.tables[tableName], action); if (wasRename) { newStatements[tableName].push(...preparedStatements); } else { newStatements[tableName] = preparedStatements; } continue; } continue; } const tableName = statement.type === 'rename_table' ? statement.tableNameTo : (statement as { tableName: string }).tableName; const statementsForTable = newStatements[tableName]; if (!statementsForTable) { newStatements[tableName] = [statement]; continue; } if (!statementsForTable.some(({ type }) => type === 'recreate_table')) { newStatements[tableName].push(statement); } } const combinedStatements = Object.values(newStatements).flat(); const renamedTables = combinedStatements.filter((it) => it.type === 'rename_table'); const renamedColumns = combinedStatements.filter((it) => it.type === 'alter_table_rename_column'); const rest = combinedStatements.filter((it) => it.type !== 'rename_table' && it.type !== 'alter_table_rename_column'); return [...renamedTables, ...renamedColumns, ...rest]; }; export const sqliteCombineStatements = ( statements: JsonStatement[], json2: SQLiteSchemaSquashed, action?: 'push', ) => { // const tablesContext: Record = {}; const newStatements: Record = {}; for (const statement of statements) { if ( statement.type === 'alter_table_alter_column_set_type' || statement.type === 'alter_table_alter_column_set_default' || statement.type === 'alter_table_alter_column_drop_default' || statement.type === 'alter_table_alter_column_set_notnull' || statement.type === 'alter_table_alter_column_drop_notnull' || statement.type === 'alter_table_alter_column_drop_autoincrement' || statement.type === 'alter_table_alter_column_set_autoincrement' || statement.type === 'alter_table_alter_column_drop_pk' || statement.type === 'alter_table_alter_column_set_pk' || statement.type === 'delete_reference' || statement.type === 'alter_reference' || statement.type === 'create_composite_pk' || statement.type === 'alter_composite_pk' || statement.type === 'delete_composite_pk' || statement.type === 'create_unique_constraint' || statement.type === 'delete_unique_constraint' || statement.type === 'create_check_constraint' || statement.type === 'delete_check_constraint' ) { const tableName = statement.tableName; const statementsForTable = newStatements[tableName]; if (!statementsForTable) { newStatements[tableName] = prepareSQLiteRecreateTable(json2.tables[tableName], action); continue; } if (!statementsForTable.some(({ type }) => type === 'recreate_table')) { const wasRename = statementsForTable.some(({ type }) => type === 'rename_table'); const preparedStatements = prepareSQLiteRecreateTable(json2.tables[tableName], action); if (wasRename) { newStatements[tableName].push(...preparedStatements); } else { newStatements[tableName] = preparedStatements; } continue; } continue; } if (statement.type === 'sqlite_alter_table_add_column' && statement.column.primaryKey) { const tableName = statement.tableName; const statementsForTable = newStatements[tableName]; if (!statementsForTable) { newStatements[tableName] = prepareSQLiteRecreateTable(json2.tables[tableName], action); continue; } if (!statementsForTable.some(({ type }) => type === 'recreate_table')) { const wasRename = statementsForTable.some(({ type }) => type === 'rename_table'); const preparedStatements = prepareSQLiteRecreateTable(json2.tables[tableName], action); if (wasRename) { newStatements[tableName].push(...preparedStatements); } else { newStatements[tableName] = preparedStatements; } continue; } continue; } if (statement.type === 'create_reference') { const tableName = statement.tableName; const data = action === 'push' ? SQLiteSquasher.unsquashPushFK(statement.data) : SQLiteSquasher.unsquashFK(statement.data); const statementsForTable = newStatements[tableName]; if (!statementsForTable) { newStatements[tableName] = prepareSQLiteRecreateTable(json2.tables[tableName], action); continue; } // if add column with reference -> skip create_reference statement if ( data.columnsFrom.length === 1 && statementsForTable.some((st) => st.type === 'sqlite_alter_table_add_column' && st.column.name === data.columnsFrom[0] ) ) { continue; } if (!statementsForTable.some(({ type }) => type === 'recreate_table')) { const wasRename = statementsForTable.some(({ type }) => type === 'rename_table'); const preparedStatements = prepareSQLiteRecreateTable(json2.tables[tableName], action); if (wasRename) { newStatements[tableName].push(...preparedStatements); } else { newStatements[tableName] = preparedStatements; } continue; } continue; } const tableName = statement.type === 'rename_table' ? statement.tableNameTo : (statement as { tableName: string }).tableName; const statementsForTable = newStatements[tableName]; if (!statementsForTable) { newStatements[tableName] = [statement]; continue; } if (!statementsForTable.some(({ type }) => type === 'recreate_table')) { newStatements[tableName].push(statement); } } const combinedStatements = Object.values(newStatements).flat(); const renamedTables = combinedStatements.filter((it) => it.type === 'rename_table'); const renamedColumns = combinedStatements.filter((it) => it.type === 'alter_table_rename_column'); const rest = combinedStatements.filter((it) => it.type !== 'rename_table' && it.type !== 'alter_table_rename_column'); return [...renamedTables, ...renamedColumns, ...rest]; }; export const prepareSingleStoreRecreateTable = ( table: SingleStoreSchemaSquashed['tables'][keyof SingleStoreSchemaSquashed['tables']], ): JsonStatement[] => { const { name, columns, uniqueConstraints, indexes, compositePrimaryKeys } = table; const composites: string[] = Object.values(compositePrimaryKeys); const statements: JsonStatement[] = [ { type: 'singlestore_recreate_table', tableName: name, columns: Object.values(columns), compositePKs: composites, uniqueConstraints: Object.values(uniqueConstraints), }, ]; if (Object.keys(indexes).length) { statements.push(...prepareCreateIndexesJson(name, '', indexes)); } return statements; }; export const singleStoreCombineStatements = ( statements: JsonStatement[], json2: SingleStoreSchemaSquashed, ) => { const newStatements: Record = {}; for (const statement of statements) { if ( statement.type === 'alter_table_alter_column_set_type' || statement.type === 'alter_table_alter_column_set_notnull' || statement.type === 'alter_table_alter_column_drop_notnull' || statement.type === 'alter_table_alter_column_drop_autoincrement' || statement.type === 'alter_table_alter_column_set_autoincrement' || statement.type === 'alter_table_alter_column_drop_pk' || statement.type === 'alter_table_alter_column_set_pk' || statement.type === 'create_composite_pk' || statement.type === 'alter_composite_pk' || statement.type === 'delete_composite_pk' ) { const tableName = statement.tableName; const statementsForTable = newStatements[tableName]; if (!statementsForTable) { newStatements[tableName] = prepareSingleStoreRecreateTable(json2.tables[tableName]); continue; } if (!statementsForTable.some(({ type }) => type === 'recreate_table')) { const wasRename = statementsForTable.some(({ type }) => type === 'rename_table' || type === 'alter_table_rename_column' ); const preparedStatements = prepareSingleStoreRecreateTable(json2.tables[tableName]); if (wasRename) { newStatements[tableName].push(...preparedStatements); } else { newStatements[tableName] = preparedStatements; } continue; } continue; } if ( (statement.type === 'alter_table_alter_column_drop_default' || statement.type === 'alter_table_alter_column_set_default') && statement.columnNotNull ) { const tableName = statement.tableName; const statementsForTable = newStatements[tableName]; if (!statementsForTable) { newStatements[tableName] = prepareSingleStoreRecreateTable(json2.tables[tableName]); continue; } if (!statementsForTable.some(({ type }) => type === 'recreate_table')) { const wasRename = statementsForTable.some(({ type }) => type === 'rename_table'); const preparedStatements = prepareSingleStoreRecreateTable(json2.tables[tableName]); if (wasRename) { newStatements[tableName].push(...preparedStatements); } else { newStatements[tableName] = preparedStatements; } continue; } continue; } if (statement.type === 'alter_table_add_column' && statement.column.primaryKey) { const tableName = statement.tableName; const statementsForTable = newStatements[tableName]; if (!statementsForTable) { newStatements[tableName] = prepareSingleStoreRecreateTable(json2.tables[tableName]); continue; } if (!statementsForTable.some(({ type }) => type === 'recreate_table')) { const wasRename = statementsForTable.some(({ type }) => type === 'rename_table'); const preparedStatements = prepareSingleStoreRecreateTable(json2.tables[tableName]); if (wasRename) { newStatements[tableName].push(...preparedStatements); } else { newStatements[tableName] = preparedStatements; } continue; } continue; } const tableName = statement.type === 'rename_table' ? statement.tableNameTo : (statement as { tableName: string }).tableName; const statementsForTable = newStatements[tableName]; if (!statementsForTable) { newStatements[tableName] = [statement]; continue; } if (!statementsForTable.some(({ type }) => type === 'singlestore_recreate_table')) { newStatements[tableName].push(statement); } } const combinedStatements = Object.values(newStatements).flat(); const renamedTables = combinedStatements.filter((it) => it.type === 'rename_table'); const renamedColumns = combinedStatements.filter((it) => it.type === 'alter_table_rename_column'); const rest = combinedStatements.filter((it) => it.type !== 'rename_table' && it.type !== 'alter_table_rename_column'); return [...renamedTables, ...renamedColumns, ...rest]; }; ================================================ FILE: drizzle-kit/src/utils/certs.ts ================================================ import envPaths from 'env-paths'; import { mkdirSync } from 'fs'; import { access, readFile } from 'fs/promises'; import { exec, ExecOptions } from 'node:child_process'; import { join } from 'path'; export function runCommand(command: string, options: ExecOptions = {}) { return new Promise<{ exitCode: number }>((resolve) => { exec(command, options, (error) => { return resolve({ exitCode: error?.code ?? 0 }); }); }); } export const certs = async () => { const res = await runCommand('mkcert --help'); if (res.exitCode === 0) { const p = envPaths('drizzle-studio', { suffix: '', }); // create ~/.local/share/drizzle-studio mkdirSync(p.data, { recursive: true }); // ~/.local/share/drizzle-studio const keyPath = join(p.data, 'localhost-key.pem'); const certPath = join(p.data, 'localhost.pem'); try { // check if the files exist await Promise.all([access(keyPath), access(certPath)]); } catch (e) { // if not create them await runCommand(`mkcert localhost`, { cwd: p.data }); } const [key, cert] = await Promise.all([ readFile(keyPath, { encoding: 'utf-8' }), readFile(certPath, { encoding: 'utf-8' }), ]); return key && cert ? { key, cert } : null; } return null; }; ================================================ FILE: drizzle-kit/src/utils/words.ts ================================================ import type { Prefix } from '../cli/validations/common'; export const prepareMigrationMetadata = ( idx: number, prefixMode: Prefix, name?: string, ) => { const prefix = prefixMode === 'index' ? idx.toFixed(0).padStart(4, '0') : prefixMode === 'timestamp' || prefixMode === 'supabase' ? new Date() .toISOString() .replace('T', '') .replaceAll('-', '') .replaceAll(':', '') .slice(0, 14) : prefixMode === 'unix' ? Math.floor(Date.now() / 1000) : ''; const suffix = name || `${adjectives.random()}_${heroes.random()}`; const tag = `${prefix}_${suffix}`; return { prefix, suffix, tag }; }; export const adjectives = [ 'abandoned', 'aberrant', 'abnormal', 'absent', 'absurd', 'acoustic', 'adorable', 'amazing', 'ambiguous', 'ambitious', 'amused', 'amusing', 'ancient', 'aromatic', 'aspiring', 'awesome', 'bent', 'big', 'bitter', 'bizarre', 'black', 'blue', 'blushing', 'bored', 'boring', 'bouncy', 'brainy', 'brave', 'breezy', 'brief', 'bright', 'broad', 'broken', 'brown', 'bumpy', 'burly', 'busy', 'calm', 'careful', 'careless', 'certain', 'charming', 'cheerful', 'chemical', 'chief', 'chilly', 'chubby', 'chunky', 'clammy', 'classy', 'clean', 'clear', 'clever', 'cloudy', 'closed', 'clumsy', 'cold', 'colorful', 'colossal', 'common', 'complete', 'complex', 'concerned', 'condemned', 'confused', 'conscious', 'cooing', 'cool', 'crazy', 'cuddly', 'cultured', 'curious', 'curly', 'curved', 'curvy', 'cute', 'cynical', 'daffy', 'daily', 'damp', 'dapper', 'dark', 'dashing', 'dazzling', 'dear', 'deep', 'demonic', 'dizzy', 'dry', 'dusty', 'eager', 'early', 'easy', 'elite', 'eminent', 'empty', 'equal', 'even', 'exotic', 'fair', 'faithful', 'familiar', 'famous', 'fancy', 'fantastic', 'far', 'fast', 'fat', 'faulty', 'fearless', 'fine', 'first', 'fixed', 'flaky', 'flashy', 'flat', 'flawless', 'flimsy', 'flippant', 'flowery', 'fluffy', 'foamy', 'free', 'freezing', 'fresh', 'friendly', 'funny', 'furry', 'futuristic', 'fuzzy', 'giant', 'gifted', 'gigantic', 'glamorous', 'glorious', 'glossy', 'good', 'goofy', 'gorgeous', 'graceful', 'gray', 'great', 'greedy', 'green', 'grey', 'groovy', 'handy', 'happy', 'hard', 'harsh', 'heavy', 'hesitant', 'high', 'hot', 'huge', 'icy', 'illegal', 'jazzy', 'jittery', 'keen', 'kind', 'known', 'lame', 'large', 'last', 'late', 'lazy', 'lean', 'left', 'legal', 'lethal', 'light', 'little', 'lively', 'living', 'lonely', 'long', 'loose', 'loud', 'lovely', 'loving', 'low', 'lowly', 'lucky', 'lumpy', 'lush', 'luxuriant', 'lying', 'lyrical', 'magenta', 'magical', 'majestic', 'many', 'massive', 'married', 'marvelous', 'material', 'mature', 'mean', 'medical', 'melodic', 'melted', 'messy', 'mighty', 'military', 'milky', 'minor', 'misty', 'mixed', 'moaning', 'modern', 'motionless', 'mushy', 'mute', 'mysterious', 'naive', 'nappy', 'narrow', 'nasty', 'natural', 'neat', 'nebulous', 'needy', 'nervous', 'new', 'next', 'nice', 'nifty', 'noisy', 'normal', 'nostalgic', 'nosy', 'numerous', 'odd', 'old', 'omniscient', 'open', 'opposite', 'optimal', 'orange', 'ordinary', 'organic', 'outgoing', 'outstanding', 'oval', 'overconfident', 'overjoyed', 'overrated', 'pale', 'panoramic', 'parallel', 'parched', 'past', 'peaceful', 'perfect', 'perpetual', 'petite', 'pink', 'plain', 'polite', 'powerful', 'premium', 'pretty', 'previous', 'productive', 'public', 'purple', 'puzzling', 'quick', 'quiet', 'rainy', 'rapid', 'rare', 'real', 'red', 'redundant', 'reflective', 'regular', 'remarkable', 'rich', 'right', 'robust', 'romantic', 'round', 'sad', 'safe', 'salty', 'same', 'secret', 'serious', 'shallow', 'sharp', 'shiny', 'shocking', 'short', 'silent', 'silky', 'silly', 'simple', 'skinny', 'sleepy', 'slim', 'slimy', 'slippery', 'sloppy', 'slow', 'small', 'smart', 'smiling', 'smooth', 'soft', 'solid', 'sour', 'sparkling', 'special', 'spicy', 'spooky', 'spotty', 'square', 'stale', 'steady', 'steep', 'sticky', 'stiff', 'stormy', 'strange', 'striped', 'strong', 'sturdy', 'sudden', 'superb', 'supreme', 'sweet', 'swift', 'talented', 'tan', 'tearful', 'tense', 'thankful', 'thick', 'thin', 'third', 'tidy', 'tiny', 'tired', 'tiresome', 'tough', 'tranquil', 'tricky', 'true', 'typical', 'uneven', 'unique', 'unknown', 'unusual', 'useful', 'vengeful', 'violet', 'volatile', 'wakeful', 'wandering', 'warm', 'watery', 'wealthy', 'wet', 'white', 'whole', 'wide', 'wild', 'windy', 'wise', 'wonderful', 'wooden', 'woozy', 'workable', 'worried', 'worthless', 'yellow', 'yielding', 'young', 'youthful', 'yummy', 'zippy', ]; export const heroes = [ 'aaron_stack', 'abomination', 'absorbing_man', 'adam_destine', 'adam_warlock', 'agent_brand', 'agent_zero', 'albert_cleary', 'alex_power', 'alex_wilder', 'alice', 'amazoness', 'amphibian', 'angel', 'anita_blake', 'annihilus', 'anthem', 'apocalypse', 'aqueduct', 'arachne', 'archangel', 'arclight', 'ares', 'argent', 'avengers', 'azazel', 'banshee', 'baron_strucker', 'baron_zemo', 'barracuda', 'bastion', 'beast', 'bedlam', 'ben_grimm', 'ben_parker', 'ben_urich', 'betty_brant', 'betty_ross', 'beyonder', 'big_bertha', 'bill_hollister', 'bishop', 'black_bird', 'black_bolt', 'black_cat', 'black_crow', 'black_knight', 'black_panther', 'black_queen', 'black_tarantula', 'black_tom', 'black_widow', 'blackheart', 'blacklash', 'blade', 'blazing_skull', 'blindfold', 'blink', 'blizzard', 'blob', 'blockbuster', 'blonde_phantom', 'bloodaxe', 'bloodscream', 'bloodstorm', 'bloodstrike', 'blue_blade', 'blue_marvel', 'blue_shield', 'blur', 'boom_boom', 'boomer', 'boomerang', 'bromley', 'brood', 'brother_voodoo', 'bruce_banner', 'bucky', 'bug', 'bulldozer', 'bullseye', 'bushwacker', 'butterfly', 'cable', 'callisto', 'calypso', 'cammi', 'cannonball', 'captain_america', 'captain_britain', 'captain_cross', 'captain_flint', 'captain_marvel', 'captain_midlands', 'captain_stacy', 'captain_universe', 'cardiac', 'caretaker', 'cargill', 'carlie_cooper', 'carmella_unuscione', 'carnage', 'cassandra_nova', 'catseye', 'celestials', 'centennial', 'cerebro', 'cerise', 'chamber', 'chameleon', 'champions', 'changeling', 'charles_xavier', 'chat', 'chimera', 'christian_walker', 'chronomancer', 'clea', 'clint_barton', 'cloak', 'cobalt_man', 'colleen_wing', 'colonel_america', 'colossus', 'corsair', 'crusher_hogan', 'crystal', 'cyclops', 'dagger', 'daimon_hellstrom', 'dakota_north', 'daredevil', 'dark_beast', 'dark_phoenix', 'darkhawk', 'darkstar', 'darwin', 'dazzler', 'deadpool', 'deathbird', 'deathstrike', 'demogoblin', 'devos', 'dexter_bennett', 'diamondback', 'doctor_doom', 'doctor_faustus', 'doctor_octopus', 'doctor_spectrum', 'doctor_strange', 'domino', 'donald_blake', 'doomsday', 'doorman', 'dorian_gray', 'dormammu', 'dracula', 'dragon_lord', 'dragon_man', 'drax', 'dreadnoughts', 'dreaming_celestial', 'dust', 'earthquake', 'echo', 'eddie_brock', 'edwin_jarvis', 'ego', 'electro', 'elektra', 'emma_frost', 'enchantress', 'ender_wiggin', 'energizer', 'epoch', 'eternals', 'eternity', 'excalibur', 'exiles', 'exodus', 'expediter', 'ezekiel', 'ezekiel_stane', 'fabian_cortez', 'falcon', 'fallen_one', 'famine', 'fantastic_four', 'fat_cobra', 'felicia_hardy', 'fenris', 'firebird', 'firebrand', 'firedrake', 'firelord', 'firestar', 'fixer', 'flatman', 'forge', 'forgotten_one', 'frank_castle', 'franklin_richards', 'franklin_storm', 'freak', 'frightful_four', 'frog_thor', 'gabe_jones', 'galactus', 'gambit', 'gamma_corps', 'gamora', 'gargoyle', 'garia', 'gateway', 'gauntlet', 'genesis', 'george_stacy', 'gertrude_yorkes', 'ghost_rider', 'giant_girl', 'giant_man', 'gideon', 'gladiator', 'glorian', 'goblin_queen', 'golden_guardian', 'goliath', 'gorgon', 'gorilla_man', 'grandmaster', 'gravity', 'green_goblin', 'gressill', 'grey_gargoyle', 'greymalkin', 'grim_reaper', 'groot', 'guardian', 'guardsmen', 'gunslinger', 'gwen_stacy', 'hairball', 'hammerhead', 'hannibal_king', 'hardball', 'harpoon', 'harrier', 'harry_osborn', 'havok', 'hawkeye', 'hedge_knight', 'hellcat', 'hellfire_club', 'hellion', 'hemingway', 'hercules', 'hex', 'hiroim', 'hitman', 'hobgoblin', 'hulk', 'human_cannonball', 'human_fly', 'human_robot', 'human_torch', 'husk', 'hydra', 'iceman', 'ikaris', 'imperial_guard', 'impossible_man', 'inertia', 'infant_terrible', 'inhumans', 'ink', 'invaders', 'invisible_woman', 'iron_fist', 'iron_lad', 'iron_man', 'iron_monger', 'iron_patriot', 'ironclad', 'jack_flag', 'jack_murdock', 'jack_power', 'jackal', 'jackpot', 'james_howlett', 'jamie_braddock', 'jane_foster', 'jasper_sitwell', 'jazinda', 'jean_grey', 'jetstream', 'jigsaw', 'jimmy_woo', 'jocasta', 'johnny_blaze', 'johnny_storm', 'joseph', 'joshua_kane', 'joystick', 'jubilee', 'juggernaut', 'junta', 'justice', 'justin_hammer', 'kabuki', 'kang', 'karen_page', 'karma', 'karnak', 'kat_farrell', 'kate_bishop', 'katie_power', 'ken_ellis', 'khan', 'kid_colt', 'killer_shrike', 'killmonger', 'killraven', 'king_bedlam', 'king_cobra', 'kingpin', 'kinsey_walden', 'kitty_pryde', 'klaw', 'komodo', 'korath', 'korg', 'korvac', 'kree', 'krista_starr', 'kronos', 'kulan_gath', 'kylun', 'la_nuit', 'lady_bullseye', 'lady_deathstrike', 'lady_mastermind', 'lady_ursula', 'lady_vermin', 'lake', 'landau', 'layla_miller', 'leader', 'leech', 'legion', 'lenny_balinger', 'leo', 'leopardon', 'leper_queen', 'lester', 'lethal_legion', 'lifeguard', 'lightspeed', 'lila_cheney', 'lilandra', 'lilith', 'lily_hollister', 'lionheart', 'living_lightning', 'living_mummy', 'living_tribunal', 'liz_osborn', 'lizard', 'loa', 'lockheed', 'lockjaw', 'logan', 'loki', 'loners', 'longshot', 'lord_hawal', 'lord_tyger', 'lorna_dane', 'luckman', 'lucky_pierre', 'luke_cage', 'luminals', 'lyja', 'ma_gnuci', 'mac_gargan', 'mach_iv', 'machine_man', 'mad_thinker', 'madame_hydra', 'madame_masque', 'madame_web', 'maddog', 'madelyne_pryor', 'madripoor', 'madrox', 'maelstrom', 'maestro', 'magdalene', 'maggott', 'magik', 'maginty', 'magma', 'magneto', 'magus', 'major_mapleleaf', 'makkari', 'malcolm_colcord', 'malice', 'mandarin', 'mandrill', 'mandroid', 'manta', 'mantis', 'marauders', 'maria_hill', 'mariko_yashida', 'marrow', 'marten_broadcloak', 'martin_li', 'marvel_apes', 'marvel_boy', 'marvel_zombies', 'marvex', 'masked_marvel', 'masque', 'master_chief', 'master_mold', 'mastermind', 'mathemanic', 'matthew_murdock', 'mattie_franklin', 'mauler', 'maverick', 'maximus', 'may_parker', 'medusa', 'meggan', 'meltdown', 'menace', 'mentallo', 'mentor', 'mephisto', 'mephistopheles', 'mercury', 'mesmero', 'metal_master', 'meteorite', 'micromacro', 'microbe', 'microchip', 'micromax', 'midnight', 'miek', 'mikhail_rasputin', 'millenium_guard', 'mimic', 'mindworm', 'miracleman', 'miss_america', 'mister_fear', 'mister_sinister', 'misty_knight', 'mockingbird', 'moira_mactaggert', 'mojo', 'mole_man', 'molecule_man', 'molly_hayes', 'molten_man', 'mongoose', 'mongu', 'monster_badoon', 'moon_knight', 'moondragon', 'moonstone', 'morbius', 'mordo', 'morg', 'morgan_stark', 'morlocks', 'morlun', 'morph', 'mother_askani', 'mulholland_black', 'multiple_man', 'mysterio', 'mystique', 'namor', 'namora', 'namorita', 'naoko', 'natasha_romanoff', 'nebula', 'nehzno', 'nekra', 'nemesis', 'network', 'newton_destine', 'next_avengers', 'nextwave', 'nick_fury', 'nico_minoru', 'nicolaos', 'night_nurse', 'night_thrasher', 'nightcrawler', 'nighthawk', 'nightmare', 'nightshade', 'nitro', 'nocturne', 'nomad', 'norman_osborn', 'norrin_radd', 'northstar', 'nova', 'nuke', 'obadiah_stane', 'odin', 'ogun', 'old_lace', 'omega_flight', 'omega_red', 'omega_sentinel', 'onslaught', 'oracle', 'orphan', 'otto_octavius', 'outlaw_kid', 'overlord', 'owl', 'ozymandias', 'paibok', 'paladin', 'pandemic', 'paper_doll', 'patch', 'patriot', 'payback', 'penance', 'pepper_potts', 'pestilence', 'pet_avengers', 'pete_wisdom', 'peter_parker', 'peter_quill', 'phalanx', 'phantom_reporter', 'phil_sheldon', 'photon', 'piledriver', 'pixie', 'plazm', 'polaris', 'post', 'power_man', 'power_pack', 'praxagora', 'preak', 'pretty_boy', 'pride', 'prima', 'princess_powerful', 'prism', 'prodigy', 'proemial_gods', 'professor_monster', 'proteus', 'proudstar', 'prowler', 'psylocke', 'psynapse', 'puck', 'puff_adder', 'puma', 'punisher', 'puppet_master', 'purifiers', 'purple_man', 'pyro', 'quasar', 'quasimodo', 'queen_noir', 'quentin_quire', 'quicksilver', 'rachel_grey', 'radioactive_man', 'rafael_vega', 'rage', 'raider', 'randall', 'randall_flagg', 'random', 'rattler', 'ravenous', 'rawhide_kid', 'raza', 'reaper', 'reavers', 'red_ghost', 'red_hulk', 'red_shift', 'red_skull', 'red_wolf', 'redwing', 'reptil', 'retro_girl', 'revanche', 'rhino', 'rhodey', 'richard_fisk', 'rick_jones', 'ricochet', 'rictor', 'riptide', 'risque', 'robbie_robertson', 'robin_chapel', 'rocket_raccoon', 'rocket_racer', 'rockslide', 'rogue', 'roland_deschain', 'romulus', 'ronan', 'roughhouse', 'roulette', 'roxanne_simpson', 'rumiko_fujikawa', 'runaways', 'sabra', 'sabretooth', 'sage', 'sally_floyd', 'salo', 'sandman', 'santa_claus', 'saracen', 'sasquatch', 'satana', 'sauron', 'scalphunter', 'scarecrow', 'scarlet_spider', 'scarlet_witch', 'scorpion', 'scourge', 'scrambler', 'scream', 'screwball', 'sebastian_shaw', 'secret_warriors', 'selene', 'senator_kelly', 'sentinel', 'sentinels', 'sentry', 'ser_duncan', 'serpent_society', 'sersi', 'shadow_king', 'shadowcat', 'shaman', 'shape', 'shard', 'sharon_carter', 'sharon_ventura', 'shatterstar', 'shen', 'sheva_callister', 'shinko_yamashiro', 'shinobi_shaw', 'shiva', 'shiver_man', 'shocker', 'shockwave', 'shooting_star', 'shotgun', 'shriek', 'silhouette', 'silk_fever', 'silver_centurion', 'silver_fox', 'silver_sable', 'silver_samurai', 'silver_surfer', 'silverclaw', 'silvermane', 'sinister_six', 'sir_ram', 'siren', 'sister_grimm', 'skaar', 'skin', 'skreet', 'skrulls', 'skullbuster', 'slapstick', 'slayback', 'sleeper', 'sleepwalker', 'slipstream', 'slyde', 'smasher', 'smiling_tiger', 'snowbird', 'solo', 'songbird', 'spacker_dave', 'spectrum', 'speed', 'speed_demon', 'speedball', 'spencer_smythe', 'sphinx', 'spiral', 'spirit', 'spitfire', 'spot', 'sprite', 'spyke', 'squadron_sinister', 'squadron_supreme', 'squirrel_girl', 'star_brand', 'starbolt', 'stardust', 'starfox', 'starhawk', 'starjammers', 'stark_industries', 'stature', 'steel_serpent', 'stellaris', 'stepford_cuckoos', 'stephen_strange', 'steve_rogers', 'stick', 'stingray', 'stone_men', 'storm', 'stranger', 'strong_guy', 'stryfe', 'sue_storm', 'sugar_man', 'sumo', 'sunfire', 'sunset_bain', 'sunspot', 'supernaut', 'supreme_intelligence', 'surge', 'susan_delgado', 'swarm', 'sway', 'switch', 'swordsman', 'synch', 'tag', 'talisman', 'talkback', 'talon', 'talos', 'tana_nile', 'tarantula', 'tarot', 'taskmaster', 'tattoo', 'ted_forrester', 'tempest', 'tenebrous', 'terrax', 'terror', 'texas_twister', 'thaddeus_ross', 'thanos', 'the_anarchist', 'the_call', 'the_captain', 'the_enforcers', 'the_executioner', 'the_fallen', 'the_fury', 'the_hand', 'the_hood', 'the_hunter', 'the_initiative', 'the_leader', 'the_liberteens', 'the_order', 'the_phantom', 'the_professor', 'the_renegades', 'the_santerians', 'the_spike', 'the_stranger', 'the_twelve', 'the_watchers', 'thena', 'thing', 'thor', 'thor_girl', 'thunderball', 'thunderbird', 'thunderbolt', 'thunderbolt_ross', 'thunderbolts', 'thundra', 'tiger_shark', 'tigra', 'timeslip', 'tinkerer', 'titania', 'titanium_man', 'toad', 'toad_men', 'tomas', 'tombstone', 'tomorrow_man', 'tony_stark', 'toro', 'toxin', 'trauma', 'triathlon', 'trish_tilby', 'triton', 'true_believers', 'turbo', 'tusk', 'tyger_tiger', 'typhoid_mary', 'tyrannus', 'ulik', 'ultimates', 'ultimatum', 'ultimo', 'ultragirl', 'ultron', 'umar', 'unicorn', 'union_jack', 'unus', 'valeria_richards', 'valkyrie', 'vampiro', 'vance_astro', 'vanisher', 'vapor', 'vargas', 'vector', 'veda', 'vengeance', 'venom', 'venus', 'vermin', 'vertigo', 'victor_mancha', 'vin_gonzales', 'vindicator', 'violations', 'viper', 'virginia_dare', 'vision', 'vivisector', 'vulcan', 'vulture', 'wallflower', 'wallop', 'wallow', 'war_machine', 'warbird', 'warbound', 'warhawk', 'warlock', 'warpath', 'warstar', 'wasp', 'weapon_omega', 'wendell_rand', 'wendell_vaughn', 'wendigo', 'whiplash', 'whirlwind', 'whistler', 'white_queen', 'white_tiger', 'whizzer', 'wiccan', 'wild_child', 'wild_pack', 'wildside', 'william_stryker', 'wilson_fisk', 'wind_dancer', 'winter_soldier', 'wither', 'wolf_cub', 'wolfpack', 'wolfsbane', 'wolverine', 'wonder_man', 'wong', 'wraith', 'wrecker', 'wrecking_crew', 'xavin', 'xorn', 'yellow_claw', 'yellowjacket', 'young_avengers', 'zaladane', 'zaran', 'zarda', 'zarek', 'zeigeist', 'zemo', 'zodiak', 'zombie', 'zuras', 'zzzax', ]; ================================================ FILE: drizzle-kit/src/utils.ts ================================================ import type { RunResult } from 'better-sqlite3'; import chalk from 'chalk'; import { existsSync, mkdirSync, readdirSync, readFileSync, writeFileSync } from 'fs'; import { join } from 'path'; import { parse } from 'url'; import type { NamedWithSchema } from './cli/commands/migrate'; import { info } from './cli/views'; import { assertUnreachable, snapshotVersion } from './global'; import type { Dialect } from './schemaValidator'; import { backwardCompatibleGelSchema } from './serializer/gelSchema'; import { backwardCompatibleMysqlSchema } from './serializer/mysqlSchema'; import { backwardCompatiblePgSchema } from './serializer/pgSchema'; import { backwardCompatibleSingleStoreSchema } from './serializer/singlestoreSchema'; import { backwardCompatibleSqliteSchema } from './serializer/sqliteSchema'; import type { ProxyParams } from './serializer/studio'; export type Proxy = (params: ProxyParams) => Promise; export type TransactionProxy = (queries: { sql: string; method?: ProxyParams['method'] }[]) => Promise; export type DB = { query: (sql: string, params?: any[]) => Promise; }; export type SQLiteDB = { query: (sql: string, params?: any[]) => Promise; run(query: string): Promise; }; export type LibSQLDB = { query: (sql: string, params?: any[]) => Promise; run(query: string): Promise; batchWithPragma?(queries: string[]): Promise; }; export const copy = (it: T): T => { return JSON.parse(JSON.stringify(it)); }; export const objectValues = (obj: T): Array => { return Object.values(obj); }; export const assertV1OutFolder = (out: string) => { if (!existsSync(out)) return; const oldMigrationFolders = readdirSync(out).filter( (it) => it.length === 14 && /^\d+$/.test(it), ); if (oldMigrationFolders.length > 0) { console.log( `Your migrations folder format is outdated, please run ${ chalk.green.bold( `drizzle-kit up`, ) }`, ); process.exit(1); } }; export type Journal = { version: string; dialect: Dialect; entries: { idx: number; version: string; when: number; tag: string; breakpoints: boolean; }[]; }; export const dryJournal = (dialect: Dialect): Journal => { return { version: snapshotVersion, dialect, entries: [], }; }; // export const preparePushFolder = (dialect: Dialect) => { // const out = ".drizzle"; // let snapshot: string = ""; // if (!existsSync(join(out))) { // mkdirSync(out); // snapshot = JSON.stringify(dryJournal(dialect)); // } else { // snapshot = readdirSync(out)[0]; // } // return { snapshot }; // }; export const prepareOutFolder = (out: string, dialect: Dialect) => { const meta = join(out, 'meta'); const journalPath = join(meta, '_journal.json'); if (!existsSync(join(out, 'meta'))) { mkdirSync(meta, { recursive: true }); writeFileSync(journalPath, JSON.stringify(dryJournal(dialect))); } const journal = JSON.parse(readFileSync(journalPath).toString()); const snapshots = readdirSync(meta) .filter((it) => !it.startsWith('_')) .map((it) => join(meta, it)); snapshots.sort(); return { meta, snapshots, journal }; }; const validatorForDialect = (dialect: Dialect) => { switch (dialect) { case 'postgresql': return { validator: backwardCompatiblePgSchema, version: 7 }; case 'sqlite': return { validator: backwardCompatibleSqliteSchema, version: 6 }; case 'turso': return { validator: backwardCompatibleSqliteSchema, version: 6 }; case 'mysql': return { validator: backwardCompatibleMysqlSchema, version: 5 }; case 'singlestore': return { validator: backwardCompatibleSingleStoreSchema, version: 1 }; case 'gel': return { validator: backwardCompatibleGelSchema, version: 1 }; } }; export const validateWithReport = (snapshots: string[], dialect: Dialect) => { // ✅ check if drizzle-kit can handle snapshot version // ✅ check if snapshot is of the last version // ✅ check if id of the snapshot is valid // ✅ collect {} of prev id -> snapshotName[], if there's more than one - tell about collision const { validator, version } = validatorForDialect(dialect); const result = snapshots.reduce( (accum, it) => { const raw = JSON.parse(readFileSync(`./${it}`).toString()); accum.rawMap[it] = raw; if (raw['version'] && Number(raw['version']) > version) { console.log( info( `${it} snapshot is of unsupported version, please update drizzle-kit`, ), ); process.exit(0); } const result = validator.safeParse(raw); if (!result.success) { accum.malformed.push(it); return accum; } const snapshot = result.data; if (snapshot.version !== String(version)) { accum.nonLatest.push(it); return accum; } // only if latest version here const idEntry = accum.idsMap[snapshot['prevId']] ?? { parent: it, snapshots: [], }; idEntry.snapshots.push(it); accum.idsMap[snapshot['prevId']] = idEntry; return accum; }, { malformed: [], nonLatest: [], idToNameMap: {}, idsMap: {}, rawMap: {}, } as { malformed: string[]; nonLatest: string[]; idsMap: Record; rawMap: Record; }, ); return result; }; export const prepareMigrationFolder = ( outFolder: string = 'drizzle', dialect: Dialect, ) => { const { snapshots, journal } = prepareOutFolder(outFolder, dialect); const report = validateWithReport(snapshots, dialect); if (report.nonLatest.length > 0) { console.log( report.nonLatest .map((it) => { return `${it}/snapshot.json is not of the latest version`; }) .concat(`Run ${chalk.green.bold(`drizzle-kit up`)}`) .join('\n'), ); process.exit(0); } if (report.malformed.length) { const message = report.malformed .map((it) => { return `${it} data is malformed`; }) .join('\n'); console.log(message); } const collisionEntries = Object.entries(report.idsMap).filter( (it) => it[1].snapshots.length > 1, ); const message = collisionEntries .map((it) => { const data = it[1]; return `[${ data.snapshots.join( ', ', ) }] are pointing to a parent snapshot: ${data.parent}/snapshot.json which is a collision.`; }) .join('\n') .trim(); if (message) { console.log(chalk.red.bold('Error:'), message); } const abort = report.malformed.length!! || collisionEntries.length > 0; if (abort) { process.exit(0); } return { snapshots, journal }; }; export const prepareMigrationMeta = ( schemas: { from: string; to: string }[], tables: { from: NamedWithSchema; to: NamedWithSchema }[], columns: { from: { table: string; schema: string; column: string }; to: { table: string; schema: string; column: string }; }[], ) => { const _meta = { schemas: {} as Record, tables: {} as Record, columns: {} as Record, }; schemas.forEach((it) => { const from = schemaRenameKey(it.from); const to = schemaRenameKey(it.to); _meta.schemas[from] = to; }); tables.forEach((it) => { const from = tableRenameKey(it.from); const to = tableRenameKey(it.to); _meta.tables[from] = to; }); columns.forEach((it) => { const from = columnRenameKey(it.from.table, it.from.schema, it.from.column); const to = columnRenameKey(it.to.table, it.to.schema, it.to.column); _meta.columns[from] = to; }); return _meta; }; export const schemaRenameKey = (it: string) => { return it; }; export const tableRenameKey = (it: NamedWithSchema) => { const out = it.schema ? `"${it.schema}"."${it.name}"` : `"${it.name}"`; return out; }; export const columnRenameKey = ( table: string, schema: string, column: string, ) => { const out = schema ? `"${schema}"."${table}"."${column}"` : `"${table}"."${column}"`; return out; }; export const kloudMeta = () => { return { pg: [5], mysql: [] as number[], sqlite: [] as number[], }; }; export const normaliseSQLiteUrl = ( it: string, type: 'libsql' | 'better-sqlite', ) => { if (type === 'libsql') { if (it.startsWith('file:')) { return it; } try { const url = parse(it); if (url.protocol === null) { return `file:${it}`; } return it; } catch (e) { return `file:${it}`; } } if (type === 'better-sqlite') { if (it.startsWith('file:')) { return it.substring(5); } return it; } assertUnreachable(type); }; export const normalisePGliteUrl = ( it: string, ) => { if (it.startsWith('file:')) { return it.substring(5); } return it; }; export function isPgArrayType(sqlType: string) { return sqlType.match(/.*\[\d*\].*|.*\[\].*/g) !== null; } export function findAddedAndRemoved(columnNames1: string[], columnNames2: string[]) { const set1 = new Set(columnNames1); const set2 = new Set(columnNames2); const addedColumns = columnNames2.filter((it) => !set1.has(it)); const removedColumns = columnNames1.filter((it) => !set2.has(it)); return { addedColumns, removedColumns }; } export function escapeSingleQuotes(str: string) { return str.replace(/'/g, "''"); } export function unescapeSingleQuotes(str: string, ignoreFirstAndLastChar: boolean) { const regex = ignoreFirstAndLastChar ? /(? { const issues = analyzeImports({ basePath: '.', localPaths: ['src'], whiteList: [ '@drizzle-team/brocli', 'json-diff', 'path', 'fs', 'fs/*', 'url', 'zod', 'node:*', 'hono', 'glob', 'hono/*', 'hono/**/*', '@hono/*', 'crypto', 'hanji', 'chalk', 'dotenv/config', 'camelcase', 'semver', 'env-paths', ], entry: 'src/cli/index.ts', logger: true, ignoreTypes: true, }).issues; const chainToString = (chains: ChainLink[]) => { if (chains.length === 0) throw new Error(); let out = chains[0]!.file + '\n'; let indentation = 0; for (let chain of chains) { out += ' '.repeat(indentation) + '└' + chain.import + ` ${chalk.gray(chain.file)}\n`; indentation += 1; } return out; }; console.log(); for (const issue of issues) { console.log(chalk.red(issue.imports.map((it) => it.name).join('\n'))); console.log(issue.accessChains.map((it) => chainToString(it)).join('\n')); } assert.equal(issues.length, 0); }); ================================================ FILE: drizzle-kit/tests/cli/d1http.config.ts ================================================ import { defineConfig } from '../../src'; export default defineConfig({ schema: './schema.ts', dialect: 'sqlite', driver: 'd1-http', dbCredentials: { accountId: 'accid', databaseId: 'dbid', token: 'token', }, }); ================================================ FILE: drizzle-kit/tests/cli/drizzle.config.ts ================================================ import { defineConfig } from '../../src'; export default defineConfig({ schema: './schema.ts', dialect: 'postgresql', dbCredentials: { url: 'postgresql://postgres:postgres@127.0.0.1:5432/db', }, }); ================================================ FILE: drizzle-kit/tests/cli/durable-sqlite.config.ts ================================================ import { defineConfig } from '../../src'; export default defineConfig({ schema: './schema.ts', dialect: 'sqlite', driver: 'durable-sqlite', }); ================================================ FILE: drizzle-kit/tests/cli/expo.config.ts ================================================ import { defineConfig } from '../../src'; export default defineConfig({ schema: './schema.ts', dialect: 'sqlite', driver: 'expo', }); ================================================ FILE: drizzle-kit/tests/cli/postgres.config.ts ================================================ import { defineConfig } from '../../src'; export default defineConfig({ schema: './schema.ts', dialect: 'postgresql', dbCredentials: { host: '127.0.0.1', port: 5432, user: 'postgresql', password: 'postgres', database: 'db', }, }); ================================================ FILE: drizzle-kit/tests/cli/postgres2.config.ts ================================================ import { defineConfig } from '../../src'; export default defineConfig({ schema: './schema.ts', dialect: 'postgresql', dbCredentials: { host: '127.0.0.1', port: 5432, user: 'postgresql', password: 'postgres', database: 'db', }, migrations: { schema: 'custom', table: 'custom', }, }); ================================================ FILE: drizzle-kit/tests/cli/schema.ts ================================================ // mock ================================================ FILE: drizzle-kit/tests/cli/turso.config.ts ================================================ import { defineConfig } from '../../src'; export default defineConfig({ schema: './schema.ts', dialect: 'turso', dbCredentials: { url: 'turso.dev', authToken: 'token', }, }); ================================================ FILE: drizzle-kit/tests/cli-export.test.ts ================================================ import { test as brotest } from '@drizzle-team/brocli'; import { assert, expect, test } from 'vitest'; import { exportRaw } from '../src/cli/schema'; // good: // #1 drizzle-kit export --dialect=postgresql --schema=schema.ts // #3 drizzle-kit export // #3 drizzle-kit export --config=drizzle1.config.ts // errors: // #1 drizzle-kit export --schema=src/schema.ts // #2 drizzle-kit export --dialect=postgresql // #3 drizzle-kit export --dialect=postgresql2 // #4 drizzle-kit export --config=drizzle.config.ts --schema=schema.ts // #5 drizzle-kit export --config=drizzle.config.ts --dialect=postgresql test('export #1', async (t) => { const res = await brotest( exportRaw, '--dialect=postgresql --schema=schema.ts', ); if (res.type !== 'handler') assert.fail(res.type, 'handler'); expect(res.options).toStrictEqual({ dialect: 'postgresql', schema: 'schema.ts', sql: true, }); }); test('export #2', async (t) => { const res = await brotest(exportRaw, ''); if (res.type !== 'handler') assert.fail(res.type, 'handler'); expect(res.options).toStrictEqual({ dialect: 'postgresql', schema: './schema.ts', sql: true, }); }); // custom config path test('export #3', async (t) => { const res = await brotest(exportRaw, '--config=expo.config.ts'); assert.equal(res.type, 'handler'); if (res.type !== 'handler') assert.fail(res.type, 'handler'); expect(res.options).toStrictEqual({ dialect: 'sqlite', schema: './schema.ts', sql: true, }); }); // --- errors --- test('err #1', async (t) => { const res = await brotest(exportRaw, '--schema=src/schema.ts'); assert.equal(res.type, 'error'); }); test('err #2', async (t) => { const res = await brotest(exportRaw, '--dialect=postgresql'); assert.equal(res.type, 'error'); }); test('err #3', async (t) => { const res = await brotest(exportRaw, '--dialect=postgresql2'); assert.equal(res.type, 'error'); }); test('err #4', async (t) => { const res = await brotest(exportRaw, '--config=drizzle.config.ts --schema=schema.ts'); assert.equal(res.type, 'error'); }); test('err #5', async (t) => { const res = await brotest(exportRaw, '--config=drizzle.config.ts --dialect=postgresql'); assert.equal(res.type, 'error'); }); ================================================ FILE: drizzle-kit/tests/cli-generate.test.ts ================================================ import { test as brotest } from '@drizzle-team/brocli'; import { assert, expect, test } from 'vitest'; import { generate } from '../src/cli/schema'; // good: // #1 drizzle-kit generate --dialect=postgresql --schema=schema.ts // #2 drizzle-kit generate --dialect=postgresql --schema=schema.ts --out=out // #3 drizzle-kit generate // #4 drizzle-kit generate --custom // #5 drizzle-kit generate --name=custom // #6 drizzle-kit generate --prefix=timestamp // #7 drizzle-kit generate --prefix=timestamp --name=custom --custom // #8 drizzle-kit generate --config=drizzle1.config.ts // #9 drizzle-kit generate --dialect=postgresql --schema=schema.ts --out=out --prefix=timestamp --name=custom --custom // errors: // #1 drizzle-kit generate --schema=src/schema.ts // #2 drizzle-kit generate --dialect=postgresql // #3 drizzle-kit generate --dialect=postgresql2 // #4 drizzle-kit generate --driver=expo // #5 drizzle-kit generate --dialect=postgresql --out=out // #6 drizzle-kit generate --config=drizzle.config.ts --out=out // #7 drizzle-kit generate --config=drizzle.config.ts --schema=schema.ts // #8 drizzle-kit generate --config=drizzle.config.ts --dialect=postgresql test('generate #1', async (t) => { const res = await brotest( generate, '--dialect=postgresql --schema=schema.ts', ); if (res.type !== 'handler') assert.fail(res.type, 'handler'); expect(res.options).toStrictEqual({ dialect: 'postgresql', name: undefined, custom: false, prefix: 'index', breakpoints: true, schema: 'schema.ts', out: 'drizzle', bundle: false, casing: undefined, driver: undefined, }); }); test('generate #2', async (t) => { const res = await brotest( generate, '--dialect=postgresql --schema=schema.ts --out=out', ); if (res.type !== 'handler') assert.fail(res.type, 'handler'); expect(res.options).toStrictEqual({ dialect: 'postgresql', name: undefined, custom: false, prefix: 'index', breakpoints: true, schema: 'schema.ts', out: 'out', bundle: false, casing: undefined, driver: undefined, }); }); test('generate #3', async (t) => { const res = await brotest(generate, ''); if (res.type !== 'handler') assert.fail(res.type, 'handler'); expect(res.options).toStrictEqual({ dialect: 'postgresql', name: undefined, custom: false, prefix: 'index', breakpoints: true, schema: './schema.ts', out: 'drizzle', bundle: false, casing: undefined, driver: undefined, }); }); // config | pass through custom test('generate #4', async (t) => { const res = await brotest(generate, '--custom'); if (res.type !== 'handler') assert.fail(res.type, 'handler'); expect(res.options).toStrictEqual({ dialect: 'postgresql', name: undefined, custom: true, prefix: 'index', breakpoints: true, schema: './schema.ts', out: 'drizzle', bundle: false, casing: undefined, driver: undefined, }); }); // config | pass through name test('generate #5', async (t) => { const res = await brotest(generate, '--name=custom'); if (res.type !== 'handler') assert.fail(res.type, 'handler'); expect(res.options).toStrictEqual({ dialect: 'postgresql', name: 'custom', custom: false, prefix: 'index', breakpoints: true, schema: './schema.ts', out: 'drizzle', bundle: false, casing: undefined, driver: undefined, }); }); // config | pass through prefix test('generate #6', async (t) => { const res = await brotest(generate, '--prefix=timestamp'); if (res.type !== 'handler') assert.fail(res.type, 'handler'); expect(res.options).toStrictEqual({ dialect: 'postgresql', name: undefined, custom: false, prefix: 'timestamp', breakpoints: true, schema: './schema.ts', out: 'drizzle', bundle: false, casing: undefined, driver: undefined, }); }); // config | pass through name, prefix and custom test('generate #7', async (t) => { const res = await brotest( generate, '--prefix=timestamp --name=custom --custom', ); if (res.type !== 'handler') assert.fail(res.type, 'handler'); expect(res.options).toStrictEqual({ dialect: 'postgresql', name: 'custom', custom: true, prefix: 'timestamp', breakpoints: true, schema: './schema.ts', out: 'drizzle', bundle: false, casing: undefined, driver: undefined, }); }); // custom config path test('generate #8', async (t) => { const res = await brotest(generate, '--config=expo.config.ts'); assert.equal(res.type, 'handler'); if (res.type !== 'handler') assert.fail(res.type, 'handler'); expect(res.options).toStrictEqual({ dialect: 'sqlite', name: undefined, custom: false, prefix: 'index', breakpoints: true, schema: './schema.ts', out: 'drizzle', bundle: true, // expo driver casing: undefined, driver: 'expo', }); }); test('generate #9', async (t) => { const res = await brotest(generate, '--config=durable-sqlite.config.ts'); assert.equal(res.type, 'handler'); if (res.type !== 'handler') assert.fail(res.type, 'handler'); expect(res.options).toStrictEqual({ dialect: 'sqlite', name: undefined, custom: false, prefix: 'index', breakpoints: true, schema: './schema.ts', out: 'drizzle', bundle: true, // expo driver casing: undefined, driver: 'durable-sqlite', }); }); // cli | pass through name, prefix and custom test('generate #9', async (t) => { const res = await brotest( generate, '--dialect=postgresql --schema=schema.ts --out=out --prefix=timestamp --name=custom --custom', ); if (res.type !== 'handler') assert.fail(res.type, 'handler'); expect(res.options).toStrictEqual({ dialect: 'postgresql', name: 'custom', custom: true, prefix: 'timestamp', breakpoints: true, schema: 'schema.ts', out: 'out', bundle: false, casing: undefined, driver: undefined, }); }); // --- errors --- test('err #1', async (t) => { const res = await brotest(generate, '--schema=src/schema.ts'); assert.equal(res.type, 'error'); }); test('err #2', async (t) => { const res = await brotest(generate, '--dialect=postgresql'); assert.equal(res.type, 'error'); }); test('err #3', async (t) => { const res = await brotest(generate, '--dialect=postgresql2'); assert.equal(res.type, 'error'); }); test('err #4', async (t) => { const res = await brotest(generate, '--driver=expo'); assert.equal(res.type, 'error'); }); test('err #5', async (t) => { const res = await brotest(generate, '--dialect=postgresql --out=out'); assert.equal(res.type, 'error'); }); test('err #6', async (t) => { const res = await brotest(generate, '--config=drizzle.config.ts --out=out'); assert.equal(res.type, 'error'); }); test('err #7', async (t) => { const res = await brotest(generate, '--config=drizzle.config.ts --schema=schema.ts'); assert.equal(res.type, 'error'); }); test('err #8', async (t) => { const res = await brotest(generate, '--config=drizzle.config.ts --dialect=postgresql'); assert.equal(res.type, 'error'); }); ================================================ FILE: drizzle-kit/tests/cli-migrate.test.ts ================================================ import { test as brotest } from '@drizzle-team/brocli'; import { assert, expect, test } from 'vitest'; import { migrate } from '../src/cli/schema'; // good: // #1 drizzle-kit generate // #2 drizzle-kit generate --config=turso.config.ts // #3 drizzle-kit generate --config=d1http.config.ts // #4 drizzle-kit generate --config=postgres.config.ts ## spread connection params // #5 drizzle-kit generate --config=drizzle2.config.ts ## custom schema and table for migrations journal // errors: // #1 drizzle-kit generate --config=expo.config.ts // TODO: missing required params in config? test('migrate #1', async (t) => { const res = await brotest(migrate, ''); if (res.type !== 'handler') assert.fail(res.type, 'handler'); expect(res.options).toStrictEqual({ dialect: 'postgresql', out: 'drizzle', credentials: { url: 'postgresql://postgres:postgres@127.0.0.1:5432/db', }, schema: undefined, // drizzle migrations table schema table: undefined, // drizzle migrations table name }); }); test('migrate #2', async (t) => { const res = await brotest(migrate, '--config=turso.config.ts'); if (res.type !== 'handler') assert.fail(res.type, 'handler'); expect(res.options).toStrictEqual({ dialect: 'turso', out: 'drizzle', credentials: { authToken: 'token', url: 'turso.dev', }, schema: undefined, // drizzle migrations table schema table: undefined, // drizzle migrations table name }); }); test('migrate #3', async (t) => { const res = await brotest(migrate, '--config=d1http.config.ts'); if (res.type !== 'handler') assert.fail(res.type, 'handler'); expect(res.options).toStrictEqual({ dialect: 'sqlite', out: 'drizzle', credentials: { driver: 'd1-http', accountId: 'accid', databaseId: 'dbid', token: 'token', }, schema: undefined, // drizzle migrations table schema table: undefined, // drizzle migrations table name }); }); test('migrate #4', async (t) => { const res = await brotest(migrate, '--config=postgres.config.ts'); if (res.type !== 'handler') assert.fail(res.type, 'handler'); expect(res.options).toStrictEqual({ dialect: 'postgresql', out: 'drizzle', credentials: { database: 'db', host: '127.0.0.1', password: 'postgres', port: 5432, user: 'postgresql', }, schema: undefined, // drizzle migrations table schema table: undefined, // drizzle migrations table name }); }); // catched a bug test('migrate #5', async (t) => { const res = await brotest(migrate, '--config=postgres2.config.ts'); if (res.type !== 'handler') assert.fail(res.type, 'handler'); expect(res.options).toStrictEqual({ dialect: 'postgresql', out: 'drizzle', credentials: { database: 'db', host: '127.0.0.1', password: 'postgres', port: 5432, user: 'postgresql', }, schema: 'custom', // drizzle migrations table schema table: 'custom', // drizzle migrations table name }); }); // --- errors --- test('err #1', async (t) => { const res = await brotest(migrate, '--config=expo.config.ts'); assert.equal(res.type, 'error'); }); ================================================ FILE: drizzle-kit/tests/cli-push.test.ts ================================================ import { test as brotest } from '@drizzle-team/brocli'; import { assert, expect, test } from 'vitest'; import { push } from '../src/cli/schema'; // good: // #1 drizzle-kit push // #2 drizzle-kit push --config=turso.config.ts // #3 drizzle-kit push --config=d1http.config.ts // #4 drizzle-kit push --config=postgres.config.ts ## spread connection params // #5 drizzle-kit push --config=drizzle2.config.ts ## custom schema and table for migrations journal // errors: // #1 drizzle-kit push --config=expo.config.ts // TODO: missing required params in config? test('push #1', async (t) => { const res = await brotest(push, ''); if (res.type !== 'handler') assert.fail(res.type, 'handler'); expect(res.options).toStrictEqual({ dialect: 'postgresql', credentials: { url: 'postgresql://postgres:postgres@127.0.0.1:5432/db', }, force: false, schemaPath: './schema.ts', schemasFilter: ['public'], tablesFilter: [], entities: undefined, strict: false, verbose: false, casing: undefined, }); }); test('push #2', async (t) => { const res = await brotest(push, '--config=turso.config.ts'); if (res.type !== 'handler') assert.fail(res.type, 'handler'); expect(res.options).toStrictEqual({ dialect: 'turso', credentials: { authToken: 'token', url: 'turso.dev', }, force: false, schemaPath: './schema.ts', schemasFilter: ['public'], tablesFilter: [], strict: false, verbose: false, casing: undefined, }); }); test('push #3', async (t) => { const res = await brotest(push, '--config=d1http.config.ts'); if (res.type !== 'handler') assert.fail(res.type, 'handler'); expect(res.options).toStrictEqual({ dialect: 'sqlite', credentials: { driver: 'd1-http', accountId: 'accid', databaseId: 'dbid', token: 'token', }, force: false, schemaPath: './schema.ts', schemasFilter: ['public'], tablesFilter: [], strict: false, verbose: false, casing: undefined, }); }); test('push #4', async (t) => { const res = await brotest(push, '--config=postgres.config.ts'); if (res.type !== 'handler') assert.fail(res.type, 'handler'); expect(res.options).toStrictEqual({ dialect: 'postgresql', credentials: { database: 'db', host: '127.0.0.1', password: 'postgres', port: 5432, user: 'postgresql', }, force: false, schemaPath: './schema.ts', schemasFilter: ['public'], tablesFilter: [], entities: undefined, strict: false, verbose: false, casing: undefined, }); }); // catched a bug test('push #5', async (t) => { const res = await brotest(push, '--config=postgres2.config.ts'); if (res.type !== 'handler') assert.fail(res.type, 'handler'); expect(res.options).toStrictEqual({ dialect: 'postgresql', credentials: { database: 'db', host: '127.0.0.1', password: 'postgres', port: 5432, user: 'postgresql', }, schemaPath: './schema.ts', schemasFilter: ['public'], tablesFilter: [], strict: false, entities: undefined, force: false, verbose: false, casing: undefined, }); }); // --- errors --- test('err #1', async (t) => { const res = await brotest(push, '--config=expo.config.ts'); assert.equal(res.type, 'error'); }); ================================================ FILE: drizzle-kit/tests/common.ts ================================================ import { test } from 'vitest'; export interface DialectSuite { /** * 1 statement | create column: * * id int primary key autoincrement */ columns1(): Promise; } export const run = (suite: DialectSuite) => { test('add columns #1', suite.columns1); }; // test("add columns #1", suite.columns1) ================================================ FILE: drizzle-kit/tests/indexes/common.ts ================================================ import { afterAll, beforeAll, test } from 'vitest'; export interface DialectSuite { simpleIndex(context?: any): Promise; vectorIndex(context?: any): Promise; indexesToBeTriggered(context?: any): Promise; } export const run = ( suite: DialectSuite, beforeAllFn?: (context: any) => Promise, afterAllFn?: (context: any) => Promise, ) => { let context: any = {}; beforeAll(beforeAllFn ? () => beforeAllFn(context) : () => {}); test('index #1: simple index', () => suite.simpleIndex(context)); test('index #2: vector index', () => suite.vectorIndex(context)); test('index #3: fields that should be triggered on generate and not triggered on push', () => suite.indexesToBeTriggered(context)); afterAll(afterAllFn ? () => afterAllFn(context) : () => {}); }; ================================================ FILE: drizzle-kit/tests/indexes/pg.test.ts ================================================ import { sql } from 'drizzle-orm'; import { index, pgTable, serial, text, vector } from 'drizzle-orm/pg-core'; import { JsonCreateIndexStatement } from 'src/jsonStatements'; import { PgSquasher } from 'src/serializer/pgSchema'; import { diffTestSchemas } from 'tests/schemaDiffer'; import { expect } from 'vitest'; import { DialectSuite, run } from './common'; const pgSuite: DialectSuite = { async vectorIndex() { const schema1 = { users: pgTable('users', { id: serial('id').primaryKey(), name: vector('name', { dimensions: 3 }), }), }; const schema2 = { users: pgTable( 'users', { id: serial('id').primaryKey(), embedding: vector('name', { dimensions: 3 }), }, (t) => ({ indx2: index('vector_embedding_idx') .using('hnsw', t.embedding.op('vector_ip_ops')) .with({ m: 16, ef_construction: 64 }), }), ), }; const { statements, sqlStatements } = await diffTestSchemas( schema1, schema2, [], ); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ schema: '', tableName: 'users', type: 'create_index_pg', data: { columns: [ { asc: true, expression: 'name', isExpression: false, nulls: 'last', opclass: 'vector_ip_ops', }, ], concurrently: false, isUnique: false, method: 'hnsw', name: 'vector_embedding_idx', where: undefined, with: { ef_construction: 64, m: 16, }, }, }); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe( `CREATE INDEX "vector_embedding_idx" ON "users" USING hnsw ("name" vector_ip_ops) WITH (m=16,ef_construction=64);`, ); }, async indexesToBeTriggered() { const schema1 = { users: pgTable( 'users', { id: serial('id').primaryKey(), name: text('name'), }, (t) => ({ indx: index('indx').on(t.name.desc()).concurrently(), indx1: index('indx1') .on(t.name.desc()) .where(sql`true`), indx2: index('indx2') .on(t.name.op('text_ops')) .where(sql`true`), indx3: index('indx3') .on(sql`lower(name)`) .where(sql`true`), }), ), }; const schema2 = { users: pgTable( 'users', { id: serial('id').primaryKey(), name: text('name'), }, (t) => ({ indx: index('indx').on(t.name.desc()), indx1: index('indx1') .on(t.name.desc()) .where(sql`false`), indx2: index('indx2') .on(t.name.op('test')) .where(sql`true`), indx3: index('indx3') .on(sql`lower(${t.id})`) .where(sql`true`), indx4: index('indx4') .on(sql`lower(id)`) .where(sql`true`), }), ), }; const { statements, sqlStatements } = await diffTestSchemas( schema1, schema2, [], ); expect(sqlStatements).toStrictEqual([ 'DROP INDEX "indx";', 'DROP INDEX "indx1";', 'DROP INDEX "indx2";', 'DROP INDEX "indx3";', 'CREATE INDEX "indx4" ON "users" USING btree (lower(id)) WHERE true;', 'CREATE INDEX "indx" ON "users" USING btree ("name" DESC NULLS LAST);', 'CREATE INDEX "indx1" ON "users" USING btree ("name" DESC NULLS LAST) WHERE false;', 'CREATE INDEX "indx2" ON "users" USING btree ("name" test) WHERE true;', 'CREATE INDEX "indx3" ON "users" USING btree (lower("id")) WHERE true;', ]); }, async simpleIndex() { const schema1 = { users: pgTable('users', { id: serial('id').primaryKey(), name: text('name'), }), }; const schema2 = { users: pgTable( 'users', { id: serial('id').primaryKey(), name: text('name'), }, (t) => ({ indx: index() .on(t.name.desc(), t.id.asc().nullsLast()) .with({ fillfactor: 70 }) .where(sql`select 1`), indx1: index('indx1') .using('hash', t.name.desc(), sql`${t.name}`) .with({ fillfactor: 70 }), }), ), }; const { statements, sqlStatements } = await diffTestSchemas( schema1, schema2, [], ); expect(statements.length).toBe(2); expect(statements[0]).toStrictEqual({ schema: '', tableName: 'users', type: 'create_index_pg', data: { columns: [ { asc: false, expression: 'name', isExpression: false, nulls: 'last', opclass: '', }, { asc: true, expression: 'id', isExpression: false, nulls: 'last', opclass: '', }, ], concurrently: false, isUnique: false, method: 'btree', name: 'users_name_id_index', where: 'select 1', with: { fillfactor: 70, }, }, // data: 'users_name_id_index;name,false,last,undefined,,id,true,last,undefined;false;false;btree;select 1;{"fillfactor":70}', }); expect(statements[1]).toStrictEqual({ schema: '', tableName: 'users', type: 'create_index_pg', data: { columns: [ { asc: false, expression: 'name', isExpression: false, nulls: 'last', opclass: '', }, { asc: true, expression: '"name"', isExpression: true, nulls: 'last', opclass: '', }, ], concurrently: false, isUnique: false, method: 'hash', name: 'indx1', where: undefined, with: { fillfactor: 70, }, }, }); expect(sqlStatements.length).toBe(2); expect(sqlStatements[0]).toBe( `CREATE INDEX "users_name_id_index" ON "users" USING btree ("name" DESC NULLS LAST,"id") WITH (fillfactor=70) WHERE select 1;`, ); expect(sqlStatements[1]).toBe( `CREATE INDEX "indx1" ON "users" USING hash ("name" DESC NULLS LAST,"name") WITH (fillfactor=70);`, ); }, }; run(pgSuite); ================================================ FILE: drizzle-kit/tests/introspect/gel.ext.test.ts ================================================ import Docker from 'dockerode'; import { drizzle, GelJsDatabase } from 'drizzle-orm/gel'; import fs from 'fs'; import createClient, { type Client } from 'gel'; import getPort from 'get-port'; import { introspectGelToFile } from 'tests/schemaDiffer'; import { v4 as uuidV4 } from 'uuid'; import { afterAll, beforeAll, expect, test } from 'vitest'; import 'zx/globals'; if (!fs.existsSync('tests/introspect/gel')) { fs.mkdirSync('tests/introspect/gel'); } $.quiet = true; const ENABLE_LOGGING = false; let client: Client; let db: GelJsDatabase; const tlsSecurity: string = 'insecure'; let dsn: string; let container: Docker.Container | undefined; async function createDockerDB(): Promise<{ connectionString: string; container: Docker.Container }> { const docker = new Docker(); const port = await getPort({ port: 5656 }); const image = 'geldata/gel:6'; const pullStream = await docker.pull(image); await new Promise((resolve, reject) => docker.modem.followProgress(pullStream, (err) => (err ? reject(err) : resolve(err))) ); const gelContainer = await docker.createContainer({ Image: image, Env: [ 'GEL_CLIENT_SECURITY=insecure_dev_mode', 'GEL_SERVER_SECURITY=insecure_dev_mode', 'GEL_CLIENT_TLS_SECURITY=no_host_verification', 'GEL_SERVER_PASSWORD=password', ], name: `drizzle-integration-tests-${uuidV4()}`, HostConfig: { AutoRemove: true, PortBindings: { '5656/tcp': [{ HostPort: `${port}` }], }, }, }); await gelContainer.start(); return { connectionString: `gel://admin:password@localhost:${port}/main`, container: gelContainer }; } function sleep(ms: number) { return new Promise((resolve) => setTimeout(resolve, ms)); } beforeAll(async () => { let connectionString; if (process.env['GEL_CONNECTION_STRING']) { connectionString = process.env['GEL_CONNECTION_STRING']; } else { const { connectionString: conStr, container: contrainerObj } = await createDockerDB(); connectionString = conStr; container = contrainerObj; } await sleep(15 * 1000); client = createClient({ dsn: connectionString, tlsSecurity: 'insecure' }); db = drizzle(client, { logger: ENABLE_LOGGING }); dsn = connectionString; }); afterAll(async () => { await client?.close().catch(console.error); await container?.stop().catch(console.error); }); test('basic introspect test', async () => { await $`pnpm gel query 'CREATE EXTENSION pgcrypto VERSION "1.3"; CREATE EXTENSION auth VERSION "1.0"; CREATE TYPE default::User { CREATE REQUIRED LINK identity: ext::auth::Identity; CREATE REQUIRED PROPERTY email: std::str; CREATE REQUIRED PROPERTY username: std::str; }; CREATE GLOBAL default::current_user := (std::assert_single((SELECT default::User { id, username, email } FILTER (.identity = GLOBAL ext::auth::ClientTokenIdentity) )));' --tls-security=${tlsSecurity} --dsn=${dsn}`; const path = await introspectGelToFile( client, 'basic-ext-introspect', ['ext::auth', 'public'], ); const result = await $`pnpm exec tsc --noEmit --skipLibCheck ${path}`.nothrow(true); expect(result.exitCode).toBe(0); fs.rmSync(path); }); ================================================ FILE: drizzle-kit/tests/introspect/gel.test.ts ================================================ import Docker from 'dockerode'; import { drizzle, GelJsDatabase } from 'drizzle-orm/gel'; import fs from 'fs'; import createClient, { type Client } from 'gel'; import getPort from 'get-port'; import { introspectGelToFile } from 'tests/schemaDiffer'; import { v4 as uuidV4 } from 'uuid'; import { afterAll, beforeAll, expect, test } from 'vitest'; import 'zx/globals'; if (!fs.existsSync('tests/introspect/gel')) { fs.mkdirSync('tests/introspect/gel'); } $.quiet = true; const ENABLE_LOGGING = false; let client: Client; let db: GelJsDatabase; const tlsSecurity: string = 'insecure'; let dsn: string; let container: Docker.Container | undefined; async function createDockerDB(): Promise<{ connectionString: string; container: Docker.Container }> { const docker = new Docker(); const port = await getPort({ port: 5656 }); const image = 'geldata/gel:6.0'; const pullStream = await docker.pull(image); await new Promise((resolve, reject) => docker.modem.followProgress(pullStream, (err) => (err ? reject(err) : resolve(err))) ); const gelContainer = await docker.createContainer({ Image: image, Env: [ 'GEL_CLIENT_SECURITY=insecure_dev_mode', 'GEL_SERVER_SECURITY=insecure_dev_mode', 'GEL_CLIENT_TLS_SECURITY=no_host_verification', 'GEL_SERVER_PASSWORD=password', ], name: `drizzle-integration-tests-${uuidV4()}`, HostConfig: { AutoRemove: true, PortBindings: { '5656/tcp': [{ HostPort: `${port}` }], }, }, }); await gelContainer.start(); return { connectionString: `gel://admin:password@localhost:${port}/main`, container: gelContainer }; } function sleep(ms: number) { return new Promise((resolve) => setTimeout(resolve, ms)); } beforeAll(async () => { let connectionString; if (process.env['GEL_CONNECTION_STRING']) { connectionString = process.env['GEL_CONNECTION_STRING']; } else { const { connectionString: conStr, container: contrainerObj } = await createDockerDB(); connectionString = conStr; container = contrainerObj; } await sleep(15 * 1000); client = createClient({ dsn: connectionString, tlsSecurity: 'insecure' }); db = drizzle(client, { logger: ENABLE_LOGGING }); dsn = connectionString; }); afterAll(async () => { await client?.close().catch(console.error); await container?.stop().catch(console.error); }); test('basic introspect test', async () => { await $`pnpm gel query 'CREATE TYPE default::all_columns { create property stringColumn: str; create required property requiredStringColumn: str; create required property arrayRequiredStringColumn: array; create property defaultStringColumn: str { SET DEFAULT := "name"; }; create property boolColumn:bool; create required property requiredBoolColumn: bool; create required property arrayRequiredBoolColumn: array; create property defaultBoolColumn: bool { SET DEFAULT := true; }; create property int16Column:int16; create required property requiredint16Column: int16; create required property arrayRequiredint16Column: array; create property defaultint16Column: int16 { SET DEFAULT := 123; }; create property int32Column:int32; create required property requiredint32Column: int32; create required property arrayRequiredint32Column: array; create property defaultint32Column: int32 { SET DEFAULT := 123; }; create property int64Column:int64; create required property requiredint64Column: int64; create required property arrayRequiredint64Column: array; create property defaultint64Column: int64 { SET DEFAULT := 123; }; create property float32Column:float32; create required property requiredfloat32Column: float32; create required property arrayRequiredfloat32Column: array; create property defaultfloat32Column: float32 { SET DEFAULT := 123.123; }; create property float64Column:float64; create required property requiredfloat64Column: float64; create required property arrayRequiredfloat64Column: array; create property defaultfloat64Column: float64 { SET DEFAULT := 123.123; }; create property bigintColumn:bigint; create required property requiredbigintColumn: bigint; create required property arrayRequiredbigintColumn: array; create property defaultbigintColumn: bigint { SET DEFAULT := 123n; }; create property decimalColumn:decimal; create required property requireddecimalColumn: decimal; create required property arrayRequireddecimalColumn: array; create property defaultdecimalColumn: decimal { SET DEFAULT := 1.23n; }; create property uuidColumn:uuid; create required property requireduuidColumn: uuid; create required property arrayRequireduuidColumn: array; create property defaultuuidColumn: uuid { SET DEFAULT := uuid_generate_v4(); }; create property jsonColumn:json; create required property requiredjsonColumn: json; create required property arrayRequiredjsonColumn: array; create property defaultjsonColumn: json { SET DEFAULT := [1, 2]; }; create property datetimeColumn:datetime; create required property requireddatetimeColumn: datetime; create required property arrayRequireddatetimeColumn: array; create property defaultdatetimeColumn: datetime { SET DEFAULT := "2018-05-07T15:01:22.306916+00"; }; create property local_datetimeColumn:cal::local_datetime; create required property requiredlocal_datetimeColumn: cal::local_datetime; create required property arrayRequiredlocal_datetimeColumn: array; create property defaultlocal_datetimeColumn: cal::local_datetime { SET DEFAULT := "2018-05-07T15:01:22.306916"; }; create property local_dateColumn:cal::local_date; create required property requiredlocal_dateColumn: cal::local_date; create required property arrayRequiredlocal_dateColumn: array; create property defaultlocal_dateColumn: cal::local_date { SET DEFAULT := "2018-05-07"; }; create property local_timeColumn:cal::local_time; create required property requiredlocal_timeColumn: cal::local_time; create required property arrayRequiredlocal_timeColumn: array; create property defaultlocal_timeColumn: cal::local_time { SET DEFAULT := "15:01:22.306916"; }; create property durationColumn:duration; create required property requireddurationColumn: duration; create required property arrayRequireddurationColumn: array; create property defaultdurationColumn: duration { SET DEFAULT := "45.6 seconds"; }; create property relative_durationColumn:cal::relative_duration; create required property requiredrelative_durationColumn: cal::relative_duration; create required property arrayRequiredrelative_durationColumn: array; create property defaultrelative_durationColumn: cal::relative_duration { SET DEFAULT := "1 year"; }; create property dateDurationColumn:cal::date_duration; create required property requireddate_durationColumn: cal::date_duration; create required property arrayRequireddate_durationColumn: array; create property defaultdate_durationColumn: cal::date_duration { SET DEFAULT := "5 days"; }; create property bytesColumn:bytes; create required property requiredbytesColumn:bytes; create required property arrayRequiredbytesColumn: array; create property defaultbytesColumn: bytes { SET DEFAULT := b"Hello, world"; }; }' --tls-security=${tlsSecurity} --dsn=${dsn}`; const path = await introspectGelToFile( client, 'basic-introspect', ); const result = await $`pnpm exec tsc --noEmit --skipLibCheck ${path}`.nothrow(true); expect(result.exitCode).toBe(0); fs.rmSync(path); }); ================================================ FILE: drizzle-kit/tests/introspect/libsql.test.ts ================================================ import { createClient } from '@libsql/client'; import { sql } from 'drizzle-orm'; import { int, sqliteTable, sqliteView } from 'drizzle-orm/sqlite-core'; import fs from 'fs'; import { introspectLibSQLToFile, introspectMySQLToFile, introspectSQLiteToFile } from 'tests/schemaDiffer'; import { expect, test } from 'vitest'; if (!fs.existsSync('tests/introspect/libsql')) { fs.mkdirSync('tests/introspect/libsql'); } test('view #1', async () => { const turso = createClient({ url: ':memory:', }); const users = sqliteTable('users', { id: int('id') }); const testView = sqliteView('some_view', { id: int('id') }).as( sql`SELECT * FROM ${users}`, ); const schema = { users: users, testView, }; const { statements, sqlStatements } = await introspectLibSQLToFile( turso, schema, 'view-1', ); expect(statements.length).toBe(0); expect(sqlStatements.length).toBe(0); }); ================================================ FILE: drizzle-kit/tests/introspect/mysql.test.ts ================================================ import 'dotenv/config'; import Docker from 'dockerode'; import { SQL, sql } from 'drizzle-orm'; import { bigint, char, check, decimal, double, float, int, mediumint, mysqlEnum, mysqlTable, mysqlView, serial, smallint, text, tinyint, varchar, } from 'drizzle-orm/mysql-core'; import * as fs from 'fs'; import getPort from 'get-port'; import { Connection, createConnection } from 'mysql2/promise'; import { introspectMySQLToFile } from 'tests/schemaDiffer'; import { v4 as uuid } from 'uuid'; import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; let client: Connection; let mysqlContainer: Docker.Container; async function createDockerDB(): Promise { const docker = new Docker(); const port = await getPort({ port: 3306 }); const image = 'mysql:8'; const pullStream = await docker.pull(image); await new Promise((resolve, reject) => // eslint-disable-next-line @typescript-eslint/no-unsafe-argument docker.modem.followProgress(pullStream, (err) => err ? reject(err) : resolve(err)) ); mysqlContainer = await docker.createContainer({ Image: image, Env: ['MYSQL_ROOT_PASSWORD=mysql', 'MYSQL_DATABASE=drizzle'], name: `drizzle-integration-tests-${uuid()}`, HostConfig: { AutoRemove: true, PortBindings: { '3306/tcp': [{ HostPort: `${port}` }], }, }, }); await mysqlContainer.start(); return `mysql://root:mysql@127.0.0.1:${port}/drizzle`; } beforeAll(async () => { const connectionString = process.env.MYSQL_CONNECTION_STRING ?? await createDockerDB(); const sleep = 1000; let timeLeft = 20000; let connected = false; let lastError: unknown | undefined; do { try { client = await createConnection(connectionString); await client.connect(); connected = true; break; } catch (e) { lastError = e; await new Promise((resolve) => setTimeout(resolve, sleep)); timeLeft -= sleep; } } while (timeLeft > 0); if (!connected) { console.error('Cannot connect to MySQL'); await client?.end().catch(console.error); await mysqlContainer?.stop().catch(console.error); throw lastError; } }); afterAll(async () => { await client?.end().catch(console.error); await mysqlContainer?.stop().catch(console.error); }); beforeEach(async () => { await client.query(`drop database if exists \`drizzle\`;`); await client.query(`create database \`drizzle\`;`); await client.query(`use \`drizzle\`;`); }); if (!fs.existsSync('tests/introspect/mysql')) { fs.mkdirSync('tests/introspect/mysql'); } test('generated always column: link to another column', async () => { const schema = { users: mysqlTable('users', { id: int('id'), email: text('email'), generatedEmail: text('generatedEmail').generatedAlwaysAs( (): SQL => sql`\`email\``, ), }), }; const { statements, sqlStatements } = await introspectMySQLToFile( client, schema, 'generated-link-column', 'drizzle', ); expect(statements.length).toBe(0); expect(sqlStatements.length).toBe(0); }); test('generated always column virtual: link to another column', async () => { const schema = { users: mysqlTable('users', { id: int('id'), email: text('email'), generatedEmail: text('generatedEmail').generatedAlwaysAs( (): SQL => sql`\`email\``, { mode: 'virtual' }, ), }), }; const { statements, sqlStatements } = await introspectMySQLToFile( client, schema, 'generated-link-column-virtual', 'drizzle', ); expect(statements.length).toBe(0); expect(sqlStatements.length).toBe(0); }); test('Default value of character type column: char', async () => { const schema = { users: mysqlTable('users', { id: int('id'), sortKey: char('sortKey', { length: 255 }).default('0'), }), }; const { statements, sqlStatements } = await introspectMySQLToFile( client, schema, 'default-value-char-column', 'drizzle', ); expect(statements.length).toBe(0); expect(sqlStatements.length).toBe(0); }); test('Default value of character type column: varchar', async () => { const schema = { users: mysqlTable('users', { id: int('id'), sortKey: varchar('sortKey', { length: 255 }).default('0'), }), }; const { statements, sqlStatements } = await introspectMySQLToFile( client, schema, 'default-value-varchar-column', 'drizzle', ); expect(statements.length).toBe(0); expect(sqlStatements.length).toBe(0); }); test('introspect checks', async () => { const schema = { users: mysqlTable('users', { id: serial('id'), name: varchar('name', { length: 255 }), age: int('age'), }, (table) => ({ someCheck: check('some_check', sql`${table.age} > 21`), })), }; const { statements, sqlStatements } = await introspectMySQLToFile( client, schema, 'introspect-checks', 'drizzle', ); expect(statements.length).toBe(0); expect(sqlStatements.length).toBe(0); }); test('view #1', async () => { const users = mysqlTable('users', { id: int('id') }); const testView = mysqlView('some_view', { id: int('id') }).as( sql`select \`drizzle\`.\`users\`.\`id\` AS \`id\` from \`drizzle\`.\`users\``, ); const schema = { users: users, testView, }; const { statements, sqlStatements } = await introspectMySQLToFile( client, schema, 'view-1', 'drizzle', ); expect(statements.length).toBe(0); expect(sqlStatements.length).toBe(0); }); test('view #2', async () => { const users = mysqlTable('some_users', { id: int('id') }); const testView = mysqlView('some_view', { id: int('id') }).algorithm('temptable').sqlSecurity('definer').as( sql`SELECT * FROM ${users}`, ); const schema = { users: users, testView, }; const { statements, sqlStatements } = await introspectMySQLToFile( client, schema, 'view-2', 'drizzle', ); expect(statements.length).toBe(0); expect(sqlStatements.length).toBe(0); }); test('handle float type', async () => { const schema = { table: mysqlTable('table', { col1: float(), col2: float({ precision: 2 }), col3: float({ precision: 2, scale: 1 }), }), }; const { statements, sqlStatements } = await introspectMySQLToFile( client, schema, 'handle-float-type', 'drizzle', ); expect(statements.length).toBe(0); expect(sqlStatements.length).toBe(0); }); test('handle unsigned numerical types', async () => { const schema = { table: mysqlTable('table', { col1: int({ unsigned: true }), col2: tinyint({ unsigned: true }), col3: smallint({ unsigned: true }), col4: mediumint({ unsigned: true }), col5: bigint({ mode: 'number', unsigned: true }), col6: float({ unsigned: true }), col7: float({ precision: 2, scale: 1, unsigned: true }), col8: double({ unsigned: true }), col9: double({ precision: 2, scale: 1, unsigned: true }), col10: decimal({ unsigned: true }), col11: decimal({ precision: 2, scale: 1, unsigned: true }), }), }; const { statements, sqlStatements } = await introspectMySQLToFile( client, schema, 'handle-unsigned-numerical-types', 'drizzle', ); expect(statements.length).toBe(0); expect(sqlStatements.length).toBe(0); }); test('instrospect strings with single quotes', async () => { const schema = { columns: mysqlTable('columns', { enum: mysqlEnum('my_enum', ['escape\'s quotes "', 'escape\'s quotes 2 "']).default('escape\'s quotes "'), text: text('text').default('escape\'s quotes " '), varchar: varchar('varchar', { length: 255 }).default('escape\'s quotes " '), }), }; const { statements, sqlStatements } = await introspectMySQLToFile( client, schema, 'introspect-strings-with-single-quotes', 'drizzle', ); expect(statements.length).toBe(0); expect(sqlStatements.length).toBe(0); await client.query(`drop table columns;`); }); ================================================ FILE: drizzle-kit/tests/introspect/pg.test.ts ================================================ import { PGlite } from '@electric-sql/pglite'; import { SQL, sql } from 'drizzle-orm'; import { bigint, bigserial, boolean, char, check, cidr, date, doublePrecision, index, inet, integer, interval, json, jsonb, macaddr, macaddr8, numeric, pgEnum, pgMaterializedView, pgPolicy, pgRole, pgSchema, pgTable, pgView, real, serial, smallint, smallserial, text, time, timestamp, uuid, varchar, } from 'drizzle-orm/pg-core'; import fs from 'fs'; import { introspectPgToFile } from 'tests/schemaDiffer'; import { expect, test } from 'vitest'; if (!fs.existsSync('tests/introspect/postgres')) { fs.mkdirSync('tests/introspect/postgres'); } test('basic introspect test', async () => { const client = new PGlite(); const schema = { users: pgTable('users', { id: integer('id').notNull(), email: text('email'), }), }; const { statements, sqlStatements } = await introspectPgToFile( client, schema, 'basic-introspect', ); expect(statements.length).toBe(0); expect(sqlStatements.length).toBe(0); }); test('basic identity always test', async () => { const client = new PGlite(); const schema = { users: pgTable('users', { id: integer('id').generatedAlwaysAsIdentity(), email: text('email'), }), }; const { statements, sqlStatements } = await introspectPgToFile( client, schema, 'basic-identity-always-introspect', ); expect(statements.length).toBe(0); expect(sqlStatements.length).toBe(0); }); test('basic identity by default test', async () => { const client = new PGlite(); const schema = { users: pgTable('users', { id: integer('id').generatedByDefaultAsIdentity(), email: text('email'), }), }; const { statements, sqlStatements } = await introspectPgToFile( client, schema, 'basic-identity-default-introspect', ); expect(statements.length).toBe(0); expect(sqlStatements.length).toBe(0); }); test('basic index test', async () => { const client = new PGlite(); const schema = { users: pgTable('users', { firstName: text('first_name'), lastName: text('last_name'), data: jsonb('data'), }, (table) => ({ singleColumn: index('single_column').on(table.firstName), multiColumn: index('multi_column').on(table.firstName, table.lastName), singleExpression: index('single_expression').on(sql`lower(${table.firstName})`), multiExpression: index('multi_expression').on(sql`lower(${table.firstName})`, sql`lower(${table.lastName})`), expressionWithComma: index('expression_with_comma').on( sql`(lower(${table.firstName}) || ', '::text || lower(${table.lastName}))`, ), expressionWithDoubleQuote: index('expression_with_double_quote').on(sql`('"'::text || ${table.firstName})`), expressionWithJsonbOperator: index('expression_with_jsonb_operator').on( sql`(${table.data} #>> '{a,b,1}'::text[])`, ), })), }; const { statements, sqlStatements } = await introspectPgToFile( client, schema, 'basic-index-introspect', ); expect(statements.length).toBe(10); expect(sqlStatements.length).toBe(10); }); test('identity always test: few params', async () => { const client = new PGlite(); const schema = { users: pgTable('users', { id: integer('id').generatedAlwaysAsIdentity({ startWith: 100, name: 'custom_name', }), email: text('email'), }), }; const { statements, sqlStatements } = await introspectPgToFile( client, schema, 'identity-always-few-params-introspect', ); expect(statements.length).toBe(0); expect(sqlStatements.length).toBe(0); }); test('identity by default test: few params', async () => { const client = new PGlite(); const schema = { users: pgTable('users', { id: integer('id').generatedByDefaultAsIdentity({ maxValue: 10000, name: 'custom_name', }), email: text('email'), }), }; const { statements, sqlStatements } = await introspectPgToFile( client, schema, 'identity-default-few-params-introspect', ); expect(statements.length).toBe(0); expect(sqlStatements.length).toBe(0); }); test('identity always test: all params', async () => { const client = new PGlite(); const schema = { users: pgTable('users', { id: integer('id').generatedAlwaysAsIdentity({ startWith: 10, increment: 4, minValue: 10, maxValue: 10000, cache: 100, cycle: true, }), email: text('email'), }), }; const { statements, sqlStatements } = await introspectPgToFile( client, schema, 'identity-always-all-params-introspect', ); expect(statements.length).toBe(0); expect(sqlStatements.length).toBe(0); }); test('identity by default test: all params', async () => { const client = new PGlite(); const schema = { users: pgTable('users', { id: integer('id').generatedByDefaultAsIdentity({ startWith: 10, increment: 4, minValue: 10, maxValue: 10000, cache: 100, cycle: true, }), email: text('email'), }), }; const { statements, sqlStatements } = await introspectPgToFile( client, schema, 'identity-default-all-params-introspect', ); expect(statements.length).toBe(0); expect(sqlStatements.length).toBe(0); }); test('generated column: link to another column', async () => { const client = new PGlite(); const schema = { users: pgTable('users', { id: integer('id').generatedAlwaysAsIdentity(), email: text('email'), generatedEmail: text('generatedEmail').generatedAlwaysAs( (): SQL => sql`email`, ), }), }; const { statements, sqlStatements } = await introspectPgToFile( client, schema, 'generated-link-column', ); expect(statements.length).toBe(0); expect(sqlStatements.length).toBe(0); }); test('instrospect all column types', async () => { const client = new PGlite(); const myEnum = pgEnum('my_enum', ['a', 'b', 'c']); const schema = { enum_: myEnum, // NOTE: Types from extensions aren't tested due to PGlite not supporting at the moment columns: pgTable('columns', { enum: myEnum('my_enum').default('a'), smallint: smallint('smallint').default(10), integer: integer('integer').default(10), numeric: numeric('numeric', { precision: 3, scale: 1 }).default('99.9'), numeric2: numeric('numeric2', { precision: 1, scale: 1 }).default('99.9'), numeric3: numeric('numeric3').default('99.9'), bigint: bigint('bigint', { mode: 'number' }).default(100), boolean: boolean('boolean').default(true), text: text('test').default('abc'), varchar: varchar('varchar', { length: 25 }).default('abc'), char: char('char', { length: 3 }).default('abc'), serial: serial('serial'), bigserial: bigserial('bigserial', { mode: 'number' }), smallserial: smallserial('smallserial'), doublePrecision: doublePrecision('doublePrecision').default(100), real: real('real').default(100), json: json('json').$type<{ attr: string }>().default({ attr: 'value' }), jsonb: jsonb('jsonb').$type<{ attr: string }>().default({ attr: 'value' }), time1: time('time1').default('00:00:00'), time2: time('time2').defaultNow(), timestamp1: timestamp('timestamp1', { withTimezone: true, precision: 6 }).default(new Date()), timestamp2: timestamp('timestamp2', { withTimezone: true, precision: 6 }).defaultNow(), timestamp3: timestamp('timestamp3', { withTimezone: true, precision: 6 }).default( sql`timezone('utc'::text, now())`, ), date1: date('date1').default('2024-01-01'), date2: date('date2').defaultNow(), date3: date('date3').default(sql`CURRENT_TIMESTAMP`), uuid1: uuid('uuid1').default('a0eebc99-9c0b-4ef8-bb6d-6bb9bd380a11'), uuid2: uuid('uuid2').defaultRandom(), inet: inet('inet').default('127.0.0.1'), cidr: cidr('cidr').default('127.0.0.1/32'), macaddr: macaddr('macaddr').default('00:00:00:00:00:00'), macaddr8: macaddr8('macaddr8').default('00:00:00:ff:fe:00:00:00'), interval: interval('interval').default('1 day 01:00:00'), }), }; const { statements, sqlStatements } = await introspectPgToFile( client, schema, 'introspect-all-columns-types', ); expect(statements.length).toBe(0); expect(sqlStatements.length).toBe(0); }); test('instrospect all column array types', async () => { const client = new PGlite(); const myEnum = pgEnum('my_enum', ['a', 'b', 'c']); const schema = { enum_: myEnum, // NOTE: Types from extensions aren't tested due to PGlite not supporting at the moment columns: pgTable('columns', { enum: myEnum('my_enum').array().default(['a', 'b']), smallint: smallint('smallint').array().default([10, 20]), integer: integer('integer').array().default([10, 20]), numeric: numeric('numeric', { precision: 3, scale: 1 }).array().default(['99.9', '88.8']), bigint: bigint('bigint', { mode: 'number' }).array().default([100, 200]), boolean: boolean('boolean').array().default([true, false]), text: text('test').array().default(['abc', 'def']), varchar: varchar('varchar', { length: 25 }).array().default(['abc', 'def']), char: char('char', { length: 3 }).array().default(['abc', 'def']), doublePrecision: doublePrecision('doublePrecision').array().default([100, 200]), real: real('real').array().default([100, 200]), json: json('json').$type<{ attr: string }>().array().default([{ attr: 'value1' }, { attr: 'value2' }]), jsonb: jsonb('jsonb').$type<{ attr: string }>().array().default([{ attr: 'value1' }, { attr: 'value2' }]), time: time('time').array().default(['00:00:00', '01:00:00']), timestamp: timestamp('timestamp', { withTimezone: true, precision: 6 }) .array() .default([new Date(), new Date()]), date: date('date').array().default(['2024-01-01', '2024-01-02']), uuid: uuid('uuid').array().default([ 'a0eebc99-9c0b-4ef8-bb6d-6bb9bd380a11', 'a0eebc99-9c0b-4ef8-bb6d-6bb9bd380a12', ]), inet: inet('inet').array().default(['127.0.0.1', '127.0.0.2']), cidr: cidr('cidr').array().default(['127.0.0.1/32', '127.0.0.2/32']), macaddr: macaddr('macaddr').array().default(['00:00:00:00:00:00', '00:00:00:00:00:01']), macaddr8: macaddr8('macaddr8').array().default(['00:00:00:ff:fe:00:00:00', '00:00:00:ff:fe:00:00:01']), interval: interval('interval').array().default(['1 day 01:00:00', '1 day 02:00:00']), }), }; const { statements, sqlStatements } = await introspectPgToFile( client, schema, 'introspect-all-columns-array-types', ); expect(statements.length).toBe(0); expect(sqlStatements.length).toBe(0); }); test('introspect columns with name with non-alphanumeric characters', async () => { const client = new PGlite(); const schema = { users: pgTable('users', { 'not:allowed': integer('not:allowed'), 'nuh--uh': integer('nuh-uh'), '1_nope': integer('1_nope'), valid: integer('valid'), }), }; const { statements, sqlStatements } = await introspectPgToFile( client, schema, 'introspect-column-with-name-with-non-alphanumeric-characters', ); expect(statements.length).toBe(0); expect(sqlStatements.length).toBe(0); }); test('introspect enum from different schema', async () => { const client = new PGlite(); const schema2 = pgSchema('schema2'); const myEnumInSchema2 = schema2.enum('my_enum', ['a', 'b', 'c']); const schema = { schema2, myEnumInSchema2, users: pgTable('users', { col: myEnumInSchema2('col'), }), }; const { statements, sqlStatements } = await introspectPgToFile( client, schema, 'introspect-enum-from-different-schema', ['public', 'schema2'], ); expect(statements.length).toBe(0); expect(sqlStatements.length).toBe(0); }); test('introspect enum with same names across different schema', async () => { const client = new PGlite(); const schema2 = pgSchema('schema2'); const myEnumInSchema2 = schema2.enum('my_enum', ['a', 'b', 'c']); const myEnum = pgEnum('my_enum', ['a', 'b', 'c']); const schema = { schema2, myEnumInSchema2, myEnum, users: pgTable('users', { col1: myEnumInSchema2('col1'), col2: myEnum('col2'), }), }; const { statements, sqlStatements } = await introspectPgToFile( client, schema, 'introspect-enum-with-same-names-across-different-schema', ['public', 'schema2'], ); expect(statements.length).toBe(0); expect(sqlStatements.length).toBe(0); }); test('introspect enum with similar name to native type', async () => { const client = new PGlite(); const timeLeft = pgEnum('time_left', ['short', 'medium', 'long']); const schema = { timeLeft, auction: pgTable('auction', { col: timeLeft('col1'), }), }; const { statements, sqlStatements } = await introspectPgToFile( client, schema, 'introspect-enum-with-similar-name-to-native-type', ); expect(statements.length).toBe(0); expect(sqlStatements.length).toBe(0); }); test('instrospect strings with single quotes', async () => { const client = new PGlite(); const myEnum = pgEnum('my_enum', ['escape\'s quotes " ']); const schema = { enum_: myEnum, columns: pgTable('columns', { enum: myEnum('my_enum').default('escape\'s quotes " '), text: text('text').default('escape\'s quotes " '), varchar: varchar('varchar').default('escape\'s quotes " '), }), }; const { statements, sqlStatements } = await introspectPgToFile( client, schema, 'introspect-strings-with-single-quotes', ); expect(statements.length).toBe(0); expect(sqlStatements.length).toBe(0); }); test('introspect checks', async () => { const client = new PGlite(); const schema = { users: pgTable('users', { id: serial('id'), name: varchar('name'), age: integer('age'), }, (table) => ({ someCheck: check('some_check', sql`${table.age} > 21`), })), }; const { statements, sqlStatements } = await introspectPgToFile( client, schema, 'introspect-checks', ); expect(statements.length).toBe(0); expect(sqlStatements.length).toBe(0); }); test('introspect checks from different schemas with same names', async () => { const client = new PGlite(); const mySchema = pgSchema('schema2'); const schema = { mySchema, users: pgTable('users', { id: serial('id'), age: integer('age'), }, (table) => ({ someCheck: check('some_check', sql`${table.age} > 21`), })), usersInMySchema: mySchema.table('users', { id: serial('id'), age: integer('age'), }, (table) => ({ someCheck: check('some_check', sql`${table.age} < 1`), })), }; const { statements, sqlStatements } = await introspectPgToFile( client, schema, 'introspect-checks-diff-schema-same-names', ['public', 'schema2'], ); expect(statements.length).toBe(0); expect(sqlStatements.length).toBe(0); }); test('introspect view #1', async () => { const client = new PGlite(); const users = pgTable('users', { id: serial('id').primaryKey().notNull(), name: varchar('users'), }); const view = pgView('some_view').as((qb) => qb.select().from(users)); const schema = { view, users, }; const { statements, sqlStatements } = await introspectPgToFile( client, schema, 'introspect-view', ); expect(statements.length).toBe(0); expect(sqlStatements.length).toBe(0); }); test('introspect view #2', async () => { const client = new PGlite(); const users = pgTable('users', { id: serial('id').primaryKey().notNull(), name: varchar('users'), }); const view = pgView('some_view', { id: integer('asd') }).with({ checkOption: 'cascaded' }).as( sql`SELECT * FROM ${users}`, ); const schema = { view, users, }; const { statements, sqlStatements } = await introspectPgToFile( client, schema, 'introspect-view-2', ); expect(statements.length).toBe(0); expect(sqlStatements.length).toBe(0); }); test('introspect view in other schema', async () => { const client = new PGlite(); const newSchema = pgSchema('new_schema'); const users = pgTable('users', { id: serial('id').primaryKey().notNull(), name: varchar('users'), }); const view = newSchema.view('some_view', { id: integer('asd') }).with({ checkOption: 'cascaded' }).as( sql`SELECT * FROM ${users}`, ); const schema = { view, users, newSchema, }; const { statements, sqlStatements } = await introspectPgToFile( client, schema, 'introspect-view-in-other-schema', ['new_schema'], ); expect(statements.length).toBe(0); expect(sqlStatements.length).toBe(0); }); test('introspect materialized view in other schema', async () => { const client = new PGlite(); const newSchema = pgSchema('new_schema'); const users = pgTable('users', { id: serial('id').primaryKey().notNull(), name: varchar('users'), }); const view = newSchema.materializedView('some_view', { id: integer('asd') }).with({ autovacuumEnabled: true }).as( sql`SELECT * FROM ${users}`, ); const schema = { view, users, newSchema, }; const { statements, sqlStatements } = await introspectPgToFile( client, schema, 'introspect-mat-view-in-other-schema', ['new_schema'], ); expect(statements.length).toBe(0); expect(sqlStatements.length).toBe(0); }); test('introspect materialized view #1', async () => { const client = new PGlite(); const users = pgTable('users', { id: serial('id').primaryKey().notNull(), name: varchar('users'), }); const view = pgMaterializedView('some_view').using('heap').withNoData().as((qb) => qb.select().from(users)); const schema = { view, users, }; const { statements, sqlStatements } = await introspectPgToFile( client, schema, 'introspect-materialized-view', ); expect(statements.length).toBe(0); expect(sqlStatements.length).toBe(0); }); test('introspect materialized view #2', async () => { const client = new PGlite(); const users = pgTable('users', { id: serial('id').primaryKey().notNull(), name: varchar('users'), }); const view = pgMaterializedView('some_view', { id: integer('asd') }).with({ autovacuumFreezeMinAge: 1 }).as( sql`SELECT * FROM ${users}`, ); const schema = { view, users, }; const { statements, sqlStatements } = await introspectPgToFile( client, schema, 'introspect-materialized-view-2', ); expect(statements.length).toBe(0); expect(sqlStatements.length).toBe(0); }); test('basic policy', async () => { const client = new PGlite(); const schema = { users: pgTable('users', { id: integer('id').primaryKey(), }, () => ({ rls: pgPolicy('test'), })), }; const { statements, sqlStatements } = await introspectPgToFile( client, schema, 'basic-policy', ); expect(statements.length).toBe(0); expect(sqlStatements.length).toBe(0); }); test('basic policy with "as"', async () => { const client = new PGlite(); const schema = { users: pgTable('users', { id: integer('id').primaryKey(), }, () => ({ rls: pgPolicy('test', { as: 'permissive' }), })), }; const { statements, sqlStatements } = await introspectPgToFile( client, schema, 'basic-policy-as', ); expect(statements.length).toBe(0); expect(sqlStatements.length).toBe(0); }); test.todo('basic policy with CURRENT_USER role', async () => { const client = new PGlite(); const schema = { users: pgTable('users', { id: integer('id').primaryKey(), }, () => ({ rls: pgPolicy('test', { to: 'current_user' }), })), }; const { statements, sqlStatements } = await introspectPgToFile( client, schema, 'basic-policy', ); expect(statements.length).toBe(0); expect(sqlStatements.length).toBe(0); }); test('basic policy with all fields except "using" and "with"', async () => { const client = new PGlite(); const schema = { users: pgTable('users', { id: integer('id').primaryKey(), }, () => ({ rls: pgPolicy('test', { as: 'permissive', for: 'all', to: ['postgres'] }), })), }; const { statements, sqlStatements } = await introspectPgToFile( client, schema, 'basic-policy-all-fields', ); expect(statements.length).toBe(0); expect(sqlStatements.length).toBe(0); }); test('basic policy with "using" and "with"', async () => { const client = new PGlite(); const schema = { users: pgTable('users', { id: integer('id').primaryKey(), }, () => ({ rls: pgPolicy('test', { using: sql`true`, withCheck: sql`true` }), })), }; const { statements, sqlStatements } = await introspectPgToFile( client, schema, 'basic-policy-using-withcheck', ); expect(statements.length).toBe(0); expect(sqlStatements.length).toBe(0); }); test('multiple policies', async () => { const client = new PGlite(); const schema = { users: pgTable('users', { id: integer('id').primaryKey(), }, () => ({ rls: pgPolicy('test', { using: sql`true`, withCheck: sql`true` }), rlsPolicy: pgPolicy('newRls'), })), }; const { statements, sqlStatements } = await introspectPgToFile( client, schema, 'multiple-policies', ); expect(statements.length).toBe(0); expect(sqlStatements.length).toBe(0); }); test('multiple policies with roles', async () => { const client = new PGlite(); client.query(`CREATE ROLE manager;`); const schema = { users: pgTable('users', { id: integer('id').primaryKey(), }, () => ({ rls: pgPolicy('test', { using: sql`true`, withCheck: sql`true` }), rlsPolicy: pgPolicy('newRls', { to: ['postgres', 'manager'] }), })), }; const { statements, sqlStatements } = await introspectPgToFile( client, schema, 'multiple-policies-with-roles', ); expect(statements.length).toBe(0); expect(sqlStatements.length).toBe(0); }); test('basic roles', async () => { const client = new PGlite(); const schema = { usersRole: pgRole('user'), }; const { statements, sqlStatements } = await introspectPgToFile( client, schema, 'basic-roles', ['public'], { roles: { include: ['user'] } }, ); expect(statements.length).toBe(0); expect(sqlStatements.length).toBe(0); }); test('role with properties', async () => { const client = new PGlite(); const schema = { usersRole: pgRole('user', { inherit: false, createDb: true, createRole: true }), }; const { statements, sqlStatements } = await introspectPgToFile( client, schema, 'roles-with-properties', ['public'], { roles: { include: ['user'] } }, ); expect(statements.length).toBe(0); expect(sqlStatements.length).toBe(0); }); test('role with a few properties', async () => { const client = new PGlite(); const schema = { usersRole: pgRole('user', { inherit: false, createRole: true }), }; const { statements, sqlStatements } = await introspectPgToFile( client, schema, 'roles-with-few-properties', ['public'], { roles: { include: ['user'] } }, ); expect(statements.length).toBe(0); expect(sqlStatements.length).toBe(0); }); test('multiple policies with roles from schema', async () => { const client = new PGlite(); const usersRole = pgRole('user_role', { inherit: false, createRole: true }); const schema = { usersRole, users: pgTable('users', { id: integer('id').primaryKey(), }, () => ({ rls: pgPolicy('test', { using: sql`true`, withCheck: sql`true` }), rlsPolicy: pgPolicy('newRls', { to: ['postgres', usersRole] }), })), }; const { statements, sqlStatements } = await introspectPgToFile( client, schema, 'multiple-policies-with-roles-from-schema', ['public'], { roles: { include: ['user_role'] } }, ); expect(statements.length).toBe(0); expect(sqlStatements.length).toBe(0); }); ================================================ FILE: drizzle-kit/tests/introspect/singlestore.test.ts ================================================ import Docker from 'dockerode'; import 'dotenv/config'; import { SQL, sql } from 'drizzle-orm'; import { bigint, char, decimal, double, float, int, mediumint, singlestoreTable, smallint, text, tinyint, varchar, } from 'drizzle-orm/singlestore-core'; import * as fs from 'fs'; import getPort from 'get-port'; import { Connection, createConnection } from 'mysql2/promise'; import { introspectSingleStoreToFile } from 'tests/schemaDiffer'; import { v4 as uuid } from 'uuid'; import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; let client: Connection; let singlestoreContainer: Docker.Container; async function createDockerDB(): Promise { const docker = new Docker(); const port = await getPort({ port: 3306 }); const image = 'ghcr.io/singlestore-labs/singlestoredb-dev:latest'; const pullStream = await docker.pull(image); await new Promise((resolve, reject) => docker.modem.followProgress(pullStream, (err) => err ? reject(err) : resolve(err)) ); singlestoreContainer = await docker.createContainer({ Image: image, Env: ['ROOT_PASSWORD=singlestore'], name: `drizzle-integration-tests-${uuid()}`, HostConfig: { AutoRemove: true, PortBindings: { '3306/tcp': [{ HostPort: `${port}` }], }, }, }); await singlestoreContainer.start(); await new Promise((resolve) => setTimeout(resolve, 4000)); return `singlestore://root:singlestore@localhost:${port}/`; } beforeAll(async () => { const connectionString = process.env.SINGLESTORE_CONNECTION_STRING ?? await createDockerDB(); const sleep = 1000; let timeLeft = 20000; let connected = false; let lastError: unknown | undefined; do { try { client = await createConnection(connectionString); await client.connect(); connected = true; break; } catch (e) { lastError = e; await new Promise((resolve) => setTimeout(resolve, sleep)); timeLeft -= sleep; } } while (timeLeft > 0); if (!connected) { console.error('Cannot connect to SingleStore'); await client?.end().catch(console.error); await singlestoreContainer?.stop().catch(console.error); throw lastError; } }); afterAll(async () => { await client?.end().catch(console.error); await singlestoreContainer?.stop().catch(console.error); }); beforeEach(async () => { await client.query(`drop database if exists \`drizzle\`;`); await client.query(`create database \`drizzle\`;`); await client.query(`use \`drizzle\`;`); }); if (!fs.existsSync('tests/introspect/singlestore')) { fs.mkdirSync('tests/introspect/singlestore'); } // TODO: Unskip this test when generated column is implemented /* test.skip('generated always column: link to another column', async () => { const schema = { users: singlestoreTable('users', { id: int('id'), email: text('email'), generatedEmail: text('generatedEmail').generatedAlwaysAs( (): SQL => sql`\`email\``, ), }), }; const { statements, sqlStatements } = await introspectSingleStoreToFile( client, schema, 'generated-link-column', 'drizzle', ); expect(statements.length).toBe(0); expect(sqlStatements.length).toBe(0); }); */ // TODO: Unskip this test when generated column is implemented /* test.skip('generated always column virtual: link to another column', async () => { const schema = { users: singlestoreTable('users', { id: int('id'), email: text('email'), generatedEmail: text('generatedEmail').generatedAlwaysAs( (): SQL => sql`\`email\``, { mode: 'virtual' }, ), }), }; const { statements, sqlStatements } = await introspectSingleStoreToFile( client, schema, 'generated-link-column-virtual', 'drizzle', ); expect(statements.length).toBe(0); expect(sqlStatements.length).toBe(0); }); */ test('Default value of character type column: char', async () => { const schema = { users: singlestoreTable('users', { id: int('id'), sortKey: char('sortKey', { length: 255 }).default('0'), }), }; const { statements, sqlStatements } = await introspectSingleStoreToFile( client, schema, 'default-value-char-column', 'drizzle', ); expect(statements.length).toBe(0); expect(sqlStatements.length).toBe(0); }); test('Default value of character type column: varchar', async () => { const schema = { users: singlestoreTable('users', { id: int('id'), sortKey: varchar('sortKey', { length: 255 }).default('0'), }), }; const { statements, sqlStatements } = await introspectSingleStoreToFile( client, schema, 'default-value-varchar-column', 'drizzle', ); expect(statements.length).toBe(0); expect(sqlStatements.length).toBe(0); }); // TODO: Unskip this test when views are implemented /* test('view #1', async () => { const users = singlestoreTable('users', { id: int('id') }); const testView = singlestoreView('some_view', { id: int('id') }).as( sql`select \`drizzle\`.\`users\`.\`id\` AS \`id\` from \`drizzle\`.\`users\``, ); const schema = { users: users, testView, }; const { statements, sqlStatements } = await introspectSingleStoreToFile( client, schema, 'view-1', 'drizzle', ); expect(statements.length).toBe(0); expect(sqlStatements.length).toBe(0); }); */ // TODO: Unskip this test when views are implemented /* test('view #2', async () => { const users = singlestoreTable('some_users', { id: int('id') }); const testView = singlestoreView('some_view', { id: int('id') }).algorithm('temptable').sqlSecurity('definer').as( sql`SELECT * FROM ${users}`, ); const schema = { users: users, testView, }; const { statements, sqlStatements } = await introspectSingleStoreToFile( client, schema, 'view-2', 'drizzle', ); expect(statements.length).toBe(0); expect(sqlStatements.length).toBe(0); }); */ test('handle float type', async () => { const schema = { table: singlestoreTable('table', { col1: float(), col2: float({ precision: 2 }), col3: float({ precision: 2, scale: 1 }), }), }; const { statements, sqlStatements } = await introspectSingleStoreToFile( client, schema, 'handle-float-type', 'drizzle', ); expect(statements.length).toBe(0); expect(sqlStatements.length).toBe(0); }); test('handle unsigned numerical types', async () => { const schema = { table: singlestoreTable('table', { col1: int({ unsigned: true }), col2: tinyint({ unsigned: true }), col3: smallint({ unsigned: true }), col4: mediumint({ unsigned: true }), col5: bigint({ mode: 'number', unsigned: true }), col6: float({ unsigned: true }), col7: float({ precision: 2, scale: 1, unsigned: true }), col8: double({ unsigned: true }), col9: double({ precision: 2, scale: 1, unsigned: true }), col10: decimal({ unsigned: true }), col11: decimal({ precision: 2, scale: 1, unsigned: true }), }), }; const { statements, sqlStatements } = await introspectSingleStoreToFile( client, schema, 'handle-unsigned-numerical-types', 'drizzle', ); expect(statements.length).toBe(0); expect(sqlStatements.length).toBe(0); }); ================================================ FILE: drizzle-kit/tests/introspect/sqlite.test.ts ================================================ import Database from 'better-sqlite3'; import { SQL, sql } from 'drizzle-orm'; import { check, int, sqliteTable, sqliteView, text } from 'drizzle-orm/sqlite-core'; import * as fs from 'fs'; import { introspectSQLiteToFile } from 'tests/schemaDiffer'; import { expect, test } from 'vitest'; if (!fs.existsSync('tests/introspect/sqlite')) { fs.mkdirSync('tests/introspect/sqlite'); } test('generated always column: link to another column', async () => { const sqlite = new Database(':memory:'); const schema = { users: sqliteTable('users', { id: int('id'), email: text('email'), generatedEmail: text('generatedEmail').generatedAlwaysAs( (): SQL => sql`\`email\``, ), }), }; const { statements, sqlStatements } = await introspectSQLiteToFile( sqlite, schema, 'generated-link-column', ); expect(statements.length).toBe(0); expect(sqlStatements.length).toBe(0); }); test('generated always column virtual: link to another column', async () => { const sqlite = new Database(':memory:'); const schema = { users: sqliteTable('users', { id: int('id'), email: text('email'), generatedEmail: text('generatedEmail').generatedAlwaysAs( (): SQL => sql`\`email\``, { mode: 'virtual' }, ), }), }; const { statements, sqlStatements } = await introspectSQLiteToFile( sqlite, schema, 'generated-link-column-virtual', ); expect(statements.length).toBe(0); expect(sqlStatements.length).toBe(0); }); test('instrospect strings with single quotes', async () => { const sqlite = new Database(':memory:'); const schema = { columns: sqliteTable('columns', { text: text('text').default('escape\'s quotes " '), }), }; const { statements, sqlStatements } = await introspectSQLiteToFile( sqlite, schema, 'introspect-strings-with-single-quotes', ); expect(statements.length).toBe(0); expect(sqlStatements.length).toBe(0); }); test('introspect checks', async () => { const sqlite = new Database(':memory:'); const schema = { users: sqliteTable('users', { id: int('id'), name: text('name'), age: int('age'), }, (table) => ({ someCheck: check('some_check', sql`${table.age} > 21`), })), }; const { statements, sqlStatements } = await introspectSQLiteToFile( sqlite, schema, 'introspect-checks', ); expect(statements.length).toBe(0); expect(sqlStatements.length).toBe(0); }); test('view #1', async () => { const sqlite = new Database(':memory:'); const users = sqliteTable('users', { id: int('id') }); const testView = sqliteView('some_view', { id: int('id') }).as( sql`SELECT * FROM ${users}`, ); const schema = { users: users, testView, }; const { statements, sqlStatements } = await introspectSQLiteToFile( sqlite, schema, 'view-1', ); expect(statements.length).toBe(0); expect(sqlStatements.length).toBe(0); }); ================================================ FILE: drizzle-kit/tests/libsql-checks.test.ts ================================================ import { sql } from 'drizzle-orm'; import { check, int, sqliteTable, text } from 'drizzle-orm/sqlite-core'; import { expect, test } from 'vitest'; import { diffTestSchemasLibSQL } from './schemaDiffer'; test('create table with check', async (t) => { const to = { users: sqliteTable('users', { id: int('id').primaryKey(), age: int('age'), }, (table) => ({ checkConstraint: check('some_check_name', sql`${table.age} > 21`), })), }; const { sqlStatements, statements } = await diffTestSchemasLibSQL({}, to, []); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'sqlite_create_table', tableName: 'users', columns: [ { name: 'id', type: 'integer', notNull: true, primaryKey: true, autoincrement: false, }, { name: 'age', type: 'integer', notNull: false, primaryKey: false, autoincrement: false, }, ], compositePKs: [], checkConstraints: ['some_check_name;"users"."age" > 21'], referenceData: [], uniqueConstraints: [], }); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe(`CREATE TABLE \`users\` ( \t\`id\` integer PRIMARY KEY NOT NULL, \t\`age\` integer, \tCONSTRAINT "some_check_name" CHECK("users"."age" > 21) );\n`); }); test('add check contraint to existing table', async (t) => { const to = { users: sqliteTable('users', { id: int('id').primaryKey(), age: int('age'), }, (table) => ({ checkConstraint: check('some_check_name', sql`${table.age} > 21`), })), }; const from = { users: sqliteTable('users', { id: int('id').primaryKey(), age: int('age'), }), }; const { sqlStatements, statements } = await diffTestSchemasLibSQL(from, to, []); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ columns: [ { autoincrement: false, generated: undefined, name: 'id', notNull: true, primaryKey: true, type: 'integer', }, { autoincrement: false, generated: undefined, name: 'age', notNull: false, primaryKey: false, type: 'integer', }, ], compositePKs: [], referenceData: [], tableName: 'users', type: 'recreate_table', uniqueConstraints: [], checkConstraints: ['some_check_name;"users"."age" > 21'], }); expect(sqlStatements.length).toBe(6); expect(sqlStatements[0]).toBe('PRAGMA foreign_keys=OFF;'); expect(sqlStatements[1]).toBe(`CREATE TABLE \`__new_users\` ( \t\`id\` integer PRIMARY KEY NOT NULL, \t\`age\` integer, \tCONSTRAINT "some_check_name" CHECK("__new_users"."age" > 21) );\n`); expect(sqlStatements[2]).toBe(`INSERT INTO \`__new_users\`("id", "age") SELECT "id", "age" FROM \`users\`;`); expect(sqlStatements[3]).toBe(`DROP TABLE \`users\`;`); expect(sqlStatements[4]).toBe(`ALTER TABLE \`__new_users\` RENAME TO \`users\`;`); expect(sqlStatements[5]).toBe(`PRAGMA foreign_keys=ON;`); }); test('drop check contraint to existing table', async (t) => { const from = { users: sqliteTable('users', { id: int('id').primaryKey(), age: int('age'), }, (table) => ({ checkConstraint: check('some_check_name', sql`${table.age} > 21`), })), }; const to = { users: sqliteTable('users', { id: int('id').primaryKey(), age: int('age'), }), }; const { sqlStatements, statements } = await diffTestSchemasLibSQL(from, to, []); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ columns: [ { autoincrement: false, generated: undefined, name: 'id', notNull: true, primaryKey: true, type: 'integer', }, { autoincrement: false, generated: undefined, name: 'age', notNull: false, primaryKey: false, type: 'integer', }, ], compositePKs: [], referenceData: [], tableName: 'users', type: 'recreate_table', uniqueConstraints: [], checkConstraints: [], }); expect(sqlStatements.length).toBe(6); expect(sqlStatements[0]).toBe('PRAGMA foreign_keys=OFF;'); expect(sqlStatements[1]).toBe(`CREATE TABLE \`__new_users\` ( \t\`id\` integer PRIMARY KEY NOT NULL, \t\`age\` integer );\n`); expect(sqlStatements[2]).toBe(`INSERT INTO \`__new_users\`("id", "age") SELECT "id", "age" FROM \`users\`;`); expect(sqlStatements[3]).toBe(`DROP TABLE \`users\`;`); expect(sqlStatements[4]).toBe(`ALTER TABLE \`__new_users\` RENAME TO \`users\`;`); expect(sqlStatements[5]).toBe(`PRAGMA foreign_keys=ON;`); }); test('rename check constraint', async (t) => { const from = { users: sqliteTable('users', { id: int('id').primaryKey(), age: int('age'), }, (table) => ({ checkConstraint: check('some_check_name', sql`${table.age} > 21`), })), }; const to = { users: sqliteTable('users', { id: int('id').primaryKey(), age: int('age'), }, (table) => ({ checkConstraint: check('new_some_check_name', sql`${table.age} > 21`), })), }; const { sqlStatements, statements } = await diffTestSchemasLibSQL(from, to, []); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ columns: [ { autoincrement: false, generated: undefined, name: 'id', notNull: true, primaryKey: true, type: 'integer', }, { autoincrement: false, generated: undefined, name: 'age', notNull: false, primaryKey: false, type: 'integer', }, ], compositePKs: [], referenceData: [], tableName: 'users', type: 'recreate_table', uniqueConstraints: [], checkConstraints: [`new_some_check_name;"users"."age" > 21`], }); expect(sqlStatements.length).toBe(6); expect(sqlStatements[0]).toBe('PRAGMA foreign_keys=OFF;'); expect(sqlStatements[1]).toBe(`CREATE TABLE \`__new_users\` ( \t\`id\` integer PRIMARY KEY NOT NULL, \t\`age\` integer, \tCONSTRAINT "new_some_check_name" CHECK("__new_users"."age" > 21) );\n`); expect(sqlStatements[2]).toBe(`INSERT INTO \`__new_users\`("id", "age") SELECT "id", "age" FROM \`users\`;`); expect(sqlStatements[3]).toBe(`DROP TABLE \`users\`;`); expect(sqlStatements[4]).toBe(`ALTER TABLE \`__new_users\` RENAME TO \`users\`;`); expect(sqlStatements[5]).toBe(`PRAGMA foreign_keys=ON;`); }); test('rename check constraint', async (t) => { const from = { users: sqliteTable('users', { id: int('id').primaryKey(), age: int('age'), }, (table) => ({ checkConstraint: check('some_check_name', sql`${table.age} > 21`), })), }; const to = { users: sqliteTable('users', { id: int('id').primaryKey(), age: int('age'), }, (table) => ({ checkConstraint: check('some_check_name', sql`${table.age} > 10`), })), }; const { sqlStatements, statements } = await diffTestSchemasLibSQL(from, to, []); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ columns: [ { autoincrement: false, generated: undefined, name: 'id', notNull: true, primaryKey: true, type: 'integer', }, { autoincrement: false, generated: undefined, name: 'age', notNull: false, primaryKey: false, type: 'integer', }, ], compositePKs: [], referenceData: [], tableName: 'users', type: 'recreate_table', uniqueConstraints: [], checkConstraints: [`some_check_name;"users"."age" > 10`], }); expect(sqlStatements.length).toBe(6); expect(sqlStatements[0]).toBe('PRAGMA foreign_keys=OFF;'); expect(sqlStatements[1]).toBe(`CREATE TABLE \`__new_users\` ( \t\`id\` integer PRIMARY KEY NOT NULL, \t\`age\` integer, \tCONSTRAINT "some_check_name" CHECK("__new_users"."age" > 10) );\n`); expect(sqlStatements[2]).toBe(`INSERT INTO \`__new_users\`("id", "age") SELECT "id", "age" FROM \`users\`;`); expect(sqlStatements[3]).toBe(`DROP TABLE \`users\`;`); expect(sqlStatements[4]).toBe(`ALTER TABLE \`__new_users\` RENAME TO \`users\`;`); expect(sqlStatements[5]).toBe(`PRAGMA foreign_keys=ON;`); }); test('create checks with same names', async (t) => { const to = { users: sqliteTable('users', { id: int('id').primaryKey(), age: int('age'), name: text('name'), }, (table) => ({ checkConstraint1: check('some_check_name', sql`${table.age} > 21`), checkConstraint2: check('some_check_name', sql`${table.name} != 'Alex'`), })), }; await expect(diffTestSchemasLibSQL({}, to, [])).rejects.toThrowError(); }); ================================================ FILE: drizzle-kit/tests/libsql-statements.test.ts ================================================ import { foreignKey, index, int, integer, sqliteTable, text, uniqueIndex } from 'drizzle-orm/sqlite-core'; import { JsonRecreateTableStatement } from 'src/jsonStatements'; import { expect, test } from 'vitest'; import { diffTestSchemasLibSQL } from './schemaDiffer'; test('drop autoincrement', async (t) => { const schema1 = { users: sqliteTable('users', { id: int('id').primaryKey({ autoIncrement: true }), }), }; const schema2 = { users: sqliteTable('users', { id: int('id').primaryKey({ autoIncrement: false }), }), }; const { statements } = await diffTestSchemasLibSQL(schema1, schema2, []); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ columns: [{ autoincrement: false, generated: undefined, name: 'id', notNull: true, primaryKey: true, type: 'integer', }], compositePKs: [], referenceData: [], tableName: 'users', type: 'recreate_table', uniqueConstraints: [], checkConstraints: [], }); }); test('set autoincrement', async (t) => { const schema1 = { users: sqliteTable('users', { id: int('id').primaryKey({ autoIncrement: false }), }), }; const schema2 = { users: sqliteTable('users', { id: int('id').primaryKey({ autoIncrement: true }), }), }; const { statements } = await diffTestSchemasLibSQL(schema1, schema2, []); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ columns: [{ autoincrement: true, generated: undefined, name: 'id', notNull: true, primaryKey: true, type: 'integer', }], compositePKs: [], referenceData: [], tableName: 'users', type: 'recreate_table', uniqueConstraints: [], checkConstraints: [], }); }); test('set not null', async (t) => { const schema1 = { users: sqliteTable('users', { id: int('id').primaryKey({ autoIncrement: true }), name: text('name'), }), }; const schema2 = { users: sqliteTable('users', { id: int('id').primaryKey({ autoIncrement: true }), name: text('name').notNull(), }), }; const { statements, sqlStatements } = await diffTestSchemasLibSQL( schema1, schema2, [], ); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'alter_table_alter_column_set_notnull', tableName: 'users', columnName: 'name', schema: '', newDataType: 'text', columnDefault: undefined, columnOnUpdate: undefined, columnNotNull: true, columnAutoIncrement: false, columnPk: false, }); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe( `ALTER TABLE \`users\` ALTER COLUMN "name" TO "name" text NOT NULL;`, ); }); test('drop not null', async (t) => { const schema1 = { users: sqliteTable('users', { id: int('id').primaryKey({ autoIncrement: true }), name: text('name').notNull(), }), }; const schema2 = { users: sqliteTable('users', { id: int('id').primaryKey({ autoIncrement: true }), name: text('name'), }), }; const { statements, sqlStatements } = await diffTestSchemasLibSQL( schema1, schema2, [], ); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'alter_table_alter_column_drop_notnull', tableName: 'users', columnName: 'name', schema: '', newDataType: 'text', columnDefault: undefined, columnOnUpdate: undefined, columnNotNull: false, columnAutoIncrement: false, columnPk: false, }); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe( `ALTER TABLE \`users\` ALTER COLUMN "name" TO "name" text;`, ); }); test('set default. set not null. add column', async (t) => { const schema1 = { users: sqliteTable('users', { id: int('id').primaryKey({ autoIncrement: true }), name: text('name'), }), }; const schema2 = { users: sqliteTable('users', { id: int('id').primaryKey({ autoIncrement: true }), name: text('name').notNull().default('name'), age: int('age').notNull(), }), }; const { statements, sqlStatements } = await diffTestSchemasLibSQL( schema1, schema2, [], ); expect(statements.length).toBe(3); expect(statements[0]).toStrictEqual({ type: 'alter_table_alter_column_set_default', tableName: 'users', columnName: 'name', newDefaultValue: "'name'", schema: '', newDataType: 'text', columnOnUpdate: undefined, columnNotNull: true, columnAutoIncrement: false, columnPk: false, }); expect(statements[1]).toStrictEqual({ type: 'alter_table_alter_column_set_notnull', tableName: 'users', columnName: 'name', schema: '', newDataType: 'text', columnDefault: "'name'", columnOnUpdate: undefined, columnNotNull: true, columnAutoIncrement: false, columnPk: false, }); expect(statements[2]).toStrictEqual({ type: 'sqlite_alter_table_add_column', tableName: 'users', referenceData: undefined, column: { name: 'age', type: 'integer', primaryKey: false, notNull: true, autoincrement: false, }, }); expect(sqlStatements.length).toBe(2); expect(sqlStatements[0]).toBe( `ALTER TABLE \`users\` ALTER COLUMN "name" TO "name" text NOT NULL DEFAULT 'name';`, ); expect(sqlStatements[1]).toBe( `ALTER TABLE \`users\` ADD \`age\` integer NOT NULL;`, ); }); test('drop default. drop not null', async (t) => { const schema1 = { users: sqliteTable('users', { id: int('id').primaryKey({ autoIncrement: true }), name: text('name').notNull().default('name'), }), }; const schema2 = { users: sqliteTable('users', { id: int('id').primaryKey({ autoIncrement: true }), name: text('name'), }), }; const { statements, sqlStatements } = await diffTestSchemasLibSQL( schema1, schema2, [], ); expect(statements.length).toBe(2); expect(statements[0]).toStrictEqual({ type: 'alter_table_alter_column_drop_default', tableName: 'users', columnName: 'name', schema: '', newDataType: 'text', columnDefault: undefined, columnOnUpdate: undefined, columnNotNull: false, columnAutoIncrement: false, columnPk: false, }); expect(statements[1]).toStrictEqual({ type: 'alter_table_alter_column_drop_notnull', tableName: 'users', columnName: 'name', schema: '', newDataType: 'text', columnDefault: undefined, columnOnUpdate: undefined, columnNotNull: false, columnAutoIncrement: false, columnPk: false, }); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe( `ALTER TABLE \`users\` ALTER COLUMN "name" TO "name" text;`, ); }); test('set data type. set default', async (t) => { const schema1 = { users: sqliteTable('users', { id: int('id').primaryKey({ autoIncrement: true }), name: text('name'), }), }; const schema2 = { users: sqliteTable('users', { id: int('id').primaryKey({ autoIncrement: true }), name: int('name').default(123), }), }; const { statements, sqlStatements } = await diffTestSchemasLibSQL( schema1, schema2, [], ); expect(statements.length).toBe(2); expect(statements[0]).toStrictEqual({ type: 'alter_table_alter_column_set_type', tableName: 'users', columnName: 'name', newDataType: 'integer', oldDataType: 'text', schema: '', columnDefault: 123, columnOnUpdate: undefined, columnNotNull: false, columnAutoIncrement: false, columnPk: false, }); expect(statements[1]).toStrictEqual({ type: 'alter_table_alter_column_set_default', tableName: 'users', columnName: 'name', schema: '', newDataType: 'integer', newDefaultValue: 123, columnOnUpdate: undefined, columnNotNull: false, columnAutoIncrement: false, columnPk: false, }); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe( `ALTER TABLE \`users\` ALTER COLUMN "name" TO "name" integer DEFAULT 123;`, ); }); test('add foriegn key', async (t) => { const schema = { table: sqliteTable('table', { id: int('id').primaryKey({ autoIncrement: true }), name: text('name'), }), }; const schema1 = { users: sqliteTable('users', { id: int('id').primaryKey({ autoIncrement: true }), tableId: int('table_id'), }), }; const schema2 = { users: sqliteTable('users', { id: int('id').primaryKey({ autoIncrement: true }), tableId: int('table_id').references(() => schema.table.id), }), }; const { statements, sqlStatements } = await diffTestSchemasLibSQL( schema1, schema2, [], ); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'create_reference', tableName: 'users', data: 'users_table_id_table_id_fk;users;table_id;table;id;no action;no action', schema: '', columnNotNull: false, columnDefault: undefined, columnType: 'integer', }); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe( `ALTER TABLE \`users\` ALTER COLUMN "table_id" TO "table_id" integer REFERENCES table(id) ON DELETE no action ON UPDATE no action;`, ); }); test('drop foriegn key', async (t) => { const schema = { table: sqliteTable('table', { id: int('id').primaryKey({ autoIncrement: true }), name: text('name'), }), }; const schema1 = { users: sqliteTable('users', { id: int('id').primaryKey({ autoIncrement: true }), tableId: int('table_id').references(() => schema.table.id, { onDelete: 'cascade', }), }), }; const schema2 = { users: sqliteTable('users', { id: int('id').primaryKey({ autoIncrement: true }), tableId: int('table_id'), }), }; const { statements, sqlStatements } = await diffTestSchemasLibSQL( schema1, schema2, [], ); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ columns: [ { autoincrement: true, generated: undefined, name: 'id', notNull: true, primaryKey: true, type: 'integer', }, { autoincrement: false, generated: undefined, name: 'table_id', notNull: false, primaryKey: false, type: 'integer', }, ], compositePKs: [], referenceData: [], tableName: 'users', type: 'recreate_table', uniqueConstraints: [], checkConstraints: [], }); expect(sqlStatements.length).toBe(6); expect(sqlStatements[0]).toBe(`PRAGMA foreign_keys=OFF;`); expect(sqlStatements[1]).toBe(`CREATE TABLE \`__new_users\` ( \t\`id\` integer PRIMARY KEY AUTOINCREMENT NOT NULL, \t\`table_id\` integer );\n`); expect(sqlStatements[2]).toBe( `INSERT INTO \`__new_users\`("id", "table_id") SELECT "id", "table_id" FROM \`users\`;`, ); expect(sqlStatements[3]).toBe(`DROP TABLE \`users\`;`); expect(sqlStatements[4]).toBe( `ALTER TABLE \`__new_users\` RENAME TO \`users\`;`, ); expect(sqlStatements[5]).toBe(`PRAGMA foreign_keys=ON;`); }); test('alter foriegn key', async (t) => { const tableRef = sqliteTable('table', { id: int('id').primaryKey({ autoIncrement: true }), name: text('name'), }); const tableRef2 = sqliteTable('table2', { id: int('id').primaryKey({ autoIncrement: true }), name: text('name'), }); const schema1 = { users: sqliteTable('users', { id: int('id').primaryKey({ autoIncrement: true }), tableId: int('table_id').references(() => tableRef.id, { onDelete: 'cascade', }), }), }; const schema2 = { users: sqliteTable('users', { id: int('id').primaryKey({ autoIncrement: true }), tableId: int('table_id').references(() => tableRef2.id), }), }; const { statements, sqlStatements } = await diffTestSchemasLibSQL( schema1, schema2, [], ); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ columns: [ { autoincrement: true, generated: undefined, name: 'id', notNull: true, primaryKey: true, type: 'integer', }, { autoincrement: false, generated: undefined, name: 'table_id', notNull: false, primaryKey: false, type: 'integer', }, ], compositePKs: [], referenceData: [ { columnsFrom: ['table_id'], columnsTo: ['id'], name: 'users_table_id_table2_id_fk', onDelete: 'no action', onUpdate: 'no action', tableFrom: 'users', tableTo: 'table2', }, ], tableName: 'users', type: 'recreate_table', uniqueConstraints: [], checkConstraints: [], }); expect(sqlStatements.length).toBe(6); expect(sqlStatements[0]).toBe(`PRAGMA foreign_keys=OFF;`); expect(sqlStatements[1]).toBe(`CREATE TABLE \`__new_users\` ( \t\`id\` integer PRIMARY KEY AUTOINCREMENT NOT NULL, \t\`table_id\` integer, \tFOREIGN KEY (\`table_id\`) REFERENCES \`table2\`(\`id\`) ON UPDATE no action ON DELETE no action );\n`); expect(sqlStatements[2]).toBe( `INSERT INTO \`__new_users\`("id", "table_id") SELECT "id", "table_id" FROM \`users\`;`, ); expect(sqlStatements[3]).toBe( 'DROP TABLE `users`;', ); expect(sqlStatements[4]).toBe( 'ALTER TABLE `__new_users` RENAME TO `users`;', ); expect(sqlStatements[5]).toBe(`PRAGMA foreign_keys=ON;`); }); test('add foriegn key for multiple columns', async (t) => { const tableRef = sqliteTable('table', { id: int('id').primaryKey({ autoIncrement: true }), age: int('age'), age1: int('age_1'), }); const schema1 = { users: sqliteTable('users', { id: int('id').primaryKey({ autoIncrement: true }), column: int('column'), column1: int('column_1'), }), tableRef, }; const schema2 = { tableRef, users: sqliteTable( 'users', { id: int('id').primaryKey({ autoIncrement: true }), column: int('column'), column1: int('column_1'), }, (table) => ({ foreignKey: foreignKey({ columns: [table.column, table.column1], foreignColumns: [tableRef.age, tableRef.age1], }), }), ), }; const { statements, sqlStatements } = await diffTestSchemasLibSQL( schema1, schema2, [], ); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ columns: [ { autoincrement: true, generated: undefined, name: 'id', notNull: true, primaryKey: true, type: 'integer', }, { autoincrement: false, generated: undefined, name: 'column', notNull: false, primaryKey: false, type: 'integer', }, { autoincrement: false, generated: undefined, name: 'column_1', notNull: false, primaryKey: false, type: 'integer', }, ], compositePKs: [], referenceData: [ { columnsFrom: ['column', 'column_1'], columnsTo: ['age', 'age_1'], name: 'users_column_column_1_table_age_age_1_fk', onDelete: 'no action', onUpdate: 'no action', tableFrom: 'users', tableTo: 'table', }, ], tableName: 'users', type: 'recreate_table', uniqueConstraints: [], checkConstraints: [], } as JsonRecreateTableStatement); expect(sqlStatements.length).toBe(6); expect(sqlStatements[0]).toBe(`PRAGMA foreign_keys=OFF;`); expect(sqlStatements[1]).toBe( `CREATE TABLE \`__new_users\` ( \t\`id\` integer PRIMARY KEY AUTOINCREMENT NOT NULL, \t\`column\` integer, \t\`column_1\` integer, \tFOREIGN KEY (\`column\`,\`column_1\`) REFERENCES \`table\`(\`age\`,\`age_1\`) ON UPDATE no action ON DELETE no action );\n`, ); expect(sqlStatements[2]).toBe( `INSERT INTO \`__new_users\`("id", "column", "column_1") SELECT "id", "column", "column_1" FROM \`users\`;`, ); expect(sqlStatements[3]).toBe(`DROP TABLE \`users\`;`); expect(sqlStatements[4]).toBe( `ALTER TABLE \`__new_users\` RENAME TO \`users\`;`, ); expect(sqlStatements[5]).toBe(`PRAGMA foreign_keys=ON;`); }); test('drop foriegn key for multiple columns', async (t) => { const tableRef = sqliteTable('table', { id: int('id').primaryKey({ autoIncrement: true }), age: int('age'), age1: int('age_1'), }); const schema1 = { users: sqliteTable( 'users', { id: int('id').primaryKey({ autoIncrement: true }), column: int('column'), column1: int('column_1'), }, (table) => ({ foreignKey: foreignKey({ columns: [table.column, table.column1], foreignColumns: [tableRef.age, tableRef.age1], }), }), ), tableRef, }; const schema2 = { users: sqliteTable('users', { id: int('id').primaryKey({ autoIncrement: true }), column: int('column'), column1: int('column_1'), }), tableRef, }; const { statements, sqlStatements } = await diffTestSchemasLibSQL( schema1, schema2, [], ); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ columns: [ { autoincrement: true, generated: undefined, name: 'id', notNull: true, primaryKey: true, type: 'integer', }, { autoincrement: false, generated: undefined, name: 'column', notNull: false, primaryKey: false, type: 'integer', }, { autoincrement: false, generated: undefined, name: 'column_1', notNull: false, primaryKey: false, type: 'integer', }, ], compositePKs: [], referenceData: [], tableName: 'users', type: 'recreate_table', uniqueConstraints: [], checkConstraints: [], }); expect(sqlStatements.length).toBe(6); expect(sqlStatements[0]).toBe(`PRAGMA foreign_keys=OFF;`); expect(sqlStatements[1]).toBe( `CREATE TABLE \`__new_users\` ( \t\`id\` integer PRIMARY KEY AUTOINCREMENT NOT NULL, \t\`column\` integer, \t\`column_1\` integer );\n`, ); expect(sqlStatements[2]).toBe( `INSERT INTO \`__new_users\`("id", "column", "column_1") SELECT "id", "column", "column_1" FROM \`users\`;`, ); expect(sqlStatements[3]).toBe(`DROP TABLE \`users\`;`); expect(sqlStatements[4]).toBe( `ALTER TABLE \`__new_users\` RENAME TO \`users\`;`, ); expect(sqlStatements[5]).toBe(`PRAGMA foreign_keys=ON;`); }); test('alter column drop generated', async (t) => { const from = { users: sqliteTable('table', { id: int('id').primaryKey().notNull(), name: text('name').generatedAlwaysAs('drizzle is the best').notNull(), }), }; const to = { users: sqliteTable('table', { id: int('id').primaryKey().notNull(), name: text('name').notNull(), }), }; const { statements, sqlStatements } = await diffTestSchemasLibSQL( from, to, [], ); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ columnAutoIncrement: false, columnDefault: undefined, columnGenerated: undefined, columnName: 'name', columnNotNull: true, columnOnUpdate: undefined, columnPk: false, newDataType: 'text', schema: '', tableName: 'table', type: 'alter_table_alter_column_drop_generated', }); expect(sqlStatements.length).toBe(2); expect(sqlStatements[0]).toBe(`ALTER TABLE \`table\` DROP COLUMN \`name\`;`); expect(sqlStatements[1]).toBe( `ALTER TABLE \`table\` ADD \`name\` text NOT NULL;`, ); }); test('recreate table with nested references', async (t) => { let users = sqliteTable('users', { id: int('id').primaryKey({ autoIncrement: true }), name: text('name'), age: integer('age'), }); let subscriptions = sqliteTable('subscriptions', { id: int('id').primaryKey({ autoIncrement: true }), userId: integer('user_id').references(() => users.id), customerId: text('customer_id'), }); const schema1 = { users: users, subscriptions: subscriptions, subscriptionMetadata: sqliteTable('subscriptions_metadata', { id: int('id').primaryKey({ autoIncrement: true }), subscriptionId: text('subscription_id').references( () => subscriptions.id, ), }), }; users = sqliteTable('users', { id: int('id').primaryKey({ autoIncrement: false }), name: text('name'), age: integer('age'), }); const schema2 = { users: users, subscriptions: subscriptions, subscriptionMetadata: sqliteTable('subscriptions_metadata', { id: int('id').primaryKey({ autoIncrement: true }), subscriptionId: text('subscription_id').references( () => subscriptions.id, ), }), }; const { statements, sqlStatements } = await diffTestSchemasLibSQL( schema1, schema2, [], ); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ columns: [ { autoincrement: false, generated: undefined, name: 'id', notNull: true, primaryKey: true, type: 'integer', }, { autoincrement: false, generated: undefined, name: 'name', notNull: false, primaryKey: false, type: 'text', }, { autoincrement: false, generated: undefined, name: 'age', notNull: false, primaryKey: false, type: 'integer', }, ], compositePKs: [], referenceData: [], tableName: 'users', type: 'recreate_table', uniqueConstraints: [], checkConstraints: [], }); expect(sqlStatements.length).toBe(6); expect(sqlStatements[0]).toBe(`PRAGMA foreign_keys=OFF;`); expect(sqlStatements[1]).toBe(`CREATE TABLE \`__new_users\` ( \t\`id\` integer PRIMARY KEY NOT NULL, \t\`name\` text, \t\`age\` integer );\n`); expect(sqlStatements[2]).toBe( `INSERT INTO \`__new_users\`("id", "name", "age") SELECT "id", "name", "age" FROM \`users\`;`, ); expect(sqlStatements[3]).toBe(`DROP TABLE \`users\`;`); expect(sqlStatements[4]).toBe( `ALTER TABLE \`__new_users\` RENAME TO \`users\`;`, ); expect(sqlStatements[5]).toBe(`PRAGMA foreign_keys=ON;`); }); test('set not null with index', async (t) => { const schema1 = { users: sqliteTable('users', { id: int('id').primaryKey({ autoIncrement: true }), name: text('name'), }, (table) => ({ someIndex: index('users_name_index').on(table.name), })), }; const schema2 = { users: sqliteTable('users', { id: int('id').primaryKey({ autoIncrement: true }), name: text('name').notNull(), }, (table) => ({ someIndex: index('users_name_index').on(table.name), })), }; const { statements, sqlStatements } = await diffTestSchemasLibSQL( schema1, schema2, [], ); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'alter_table_alter_column_set_notnull', tableName: 'users', columnName: 'name', schema: '', newDataType: 'text', columnDefault: undefined, columnOnUpdate: undefined, columnNotNull: true, columnAutoIncrement: false, columnPk: false, }); expect(sqlStatements.length).toBe(3); expect(sqlStatements[0]).toBe( `DROP INDEX "users_name_index";`, ); expect(sqlStatements[1]).toBe( `ALTER TABLE \`users\` ALTER COLUMN "name" TO "name" text NOT NULL;`, ); expect(sqlStatements[2]).toBe( `CREATE INDEX \`users_name_index\` ON \`users\` (\`name\`);`, ); }); test('drop not null with two indexes', async (t) => { const schema1 = { users: sqliteTable('users', { id: int('id').primaryKey({ autoIncrement: true }), name: text('name').notNull(), age: int('age').notNull(), }, (table) => ({ someUniqeIndex: uniqueIndex('users_name_unique').on(table.name), someIndex: index('users_age_index').on(table.age), })), }; const schema2 = { users: sqliteTable('users', { id: int('id').primaryKey({ autoIncrement: true }), name: text('name'), age: int('age').notNull(), }, (table) => ({ someUniqeIndex: uniqueIndex('users_name_unique').on(table.name), someIndex: index('users_age_index').on(table.age), })), }; const { statements, sqlStatements } = await diffTestSchemasLibSQL( schema1, schema2, [], ); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'alter_table_alter_column_drop_notnull', tableName: 'users', columnName: 'name', schema: '', newDataType: 'text', columnDefault: undefined, columnOnUpdate: undefined, columnNotNull: false, columnAutoIncrement: false, columnPk: false, }); expect(sqlStatements.length).toBe(5); expect(sqlStatements[0]).toBe( `DROP INDEX "users_name_unique";`, ); expect(sqlStatements[1]).toBe( `DROP INDEX "users_age_index";`, ); expect(sqlStatements[2]).toBe( `ALTER TABLE \`users\` ALTER COLUMN "name" TO "name" text;`, ); expect(sqlStatements[3]).toBe( `CREATE UNIQUE INDEX \`users_name_unique\` ON \`users\` (\`name\`);`, ); expect(sqlStatements[4]).toBe( `CREATE INDEX \`users_age_index\` ON \`users\` (\`age\`);`, ); }); ================================================ FILE: drizzle-kit/tests/libsql-views.test.ts ================================================ import { sql } from 'drizzle-orm'; import { int, sqliteTable, sqliteView } from 'drizzle-orm/sqlite-core'; import { expect, test } from 'vitest'; import { diffTestSchemasLibSQL } from './schemaDiffer'; test('create view', async () => { const users = sqliteTable('users', { id: int('id').default(1) }); const view = sqliteView('view').as((qb) => qb.select().from(users)); const to = { users: users, testView: view, }; const { statements, sqlStatements } = await diffTestSchemasLibSQL({}, to, []); expect(statements.length).toBe(2); expect(statements[0]).toStrictEqual({ type: 'sqlite_create_table', tableName: 'users', columns: [{ autoincrement: false, default: 1, name: 'id', type: 'integer', primaryKey: false, notNull: false, }], compositePKs: [], uniqueConstraints: [], referenceData: [], checkConstraints: [], }); expect(statements[1]).toStrictEqual({ type: 'sqlite_create_view', name: 'view', definition: 'select "id" from "users"', }); expect(sqlStatements.length).toBe(2); expect(sqlStatements[0]).toBe(`CREATE TABLE \`users\` ( \t\`id\` integer DEFAULT 1 );\n`); expect(sqlStatements[1]).toBe(`CREATE VIEW \`view\` AS select "id" from "users";`); }); test('drop view', async () => { const users = sqliteTable('users', { id: int('id').primaryKey({ autoIncrement: true }), }); const from = { users: users, testView: sqliteView('view', { id: int('id') }).as(sql`SELECT * FROM users`), }; const to = { users, }; const { statements, sqlStatements } = await diffTestSchemasLibSQL(from, to, []); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ name: 'view', type: 'drop_view', }); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe( `DROP VIEW \`view\`;`, ); }); test('alter view', async () => { const users = sqliteTable('users', { id: int('id').primaryKey({ autoIncrement: true }), }); const from = { users: users, testView: sqliteView('view', { id: int('id') }).as(sql`SELECT * FROM users`), }; const to = { users, testView: sqliteView('view', { id: int('id') }).as(sql`SELECT * FROM users WHERE users.id = 1`), }; const { statements, sqlStatements } = await diffTestSchemasLibSQL(from, to, []); expect(statements.length).toBe(2); expect(statements[0]).toStrictEqual({ name: 'view', type: 'drop_view', }); expect(statements[1]).toStrictEqual({ name: 'view', type: 'sqlite_create_view', definition: 'SELECT * FROM users WHERE users.id = 1', }); expect(sqlStatements.length).toBe(2); expect(sqlStatements[0]).toBe( `DROP VIEW \`view\`;`, ); expect(sqlStatements[1]).toBe( `CREATE VIEW \`view\` AS SELECT * FROM users WHERE users.id = 1;`, ); }); test('create view with existing flag', async () => { const view = sqliteView('view', {}).existing(); const to = { testView: view, }; const { statements, sqlStatements } = await diffTestSchemasLibSQL({}, to, []); expect(statements.length).toBe(0); expect(sqlStatements.length).toBe(0); }); test('drop view with existing flag', async () => { const users = sqliteTable('users', { id: int('id').primaryKey({ autoIncrement: true }), }); const from = { users: users, testView: sqliteView('view', { id: int('id') }).existing(), }; const to = { users, }; const { statements, sqlStatements } = await diffTestSchemasLibSQL(from, to, []); expect(statements.length).toBe(0); expect(sqlStatements.length).toBe(0); }); test('rename view with existing flag', async () => { const users = sqliteTable('users', { id: int('id').primaryKey({ autoIncrement: true }), }); const from = { users: users, testView: sqliteView('view', { id: int('id') }).existing(), }; const to = { users, testView: sqliteView('new_view', { id: int('id') }).existing(), }; const { statements, sqlStatements } = await diffTestSchemasLibSQL(from, to, ['view->new_view']); expect(statements.length).toBe(0); expect(sqlStatements.length).toBe(0); }); test('rename view and drop existing flag', async () => { const users = sqliteTable('users', { id: int('id').primaryKey({ autoIncrement: true }), }); const from = { users: users, testView: sqliteView('view', { id: int('id') }).existing(), }; const to = { users, testView: sqliteView('new_view', { id: int('id') }).as(sql`SELECT * FROM users`), }; const { statements, sqlStatements } = await diffTestSchemasLibSQL(from, to, ['view->new_view']); expect(statements.length).toBe(2); expect(statements[0]).toStrictEqual({ name: 'view', type: 'drop_view', }); expect(statements[1]).toStrictEqual({ type: 'sqlite_create_view', name: 'new_view', definition: 'SELECT * FROM users', }); expect(sqlStatements.length).toBe(2); expect(sqlStatements[0]).toBe('DROP VIEW `view`;'); expect(sqlStatements[1]).toBe(`CREATE VIEW \`new_view\` AS SELECT * FROM users;`); }); test('rename view and alter ".as"', async () => { const users = sqliteTable('users', { id: int('id').primaryKey({ autoIncrement: true }), }); const from = { users: users, testView: sqliteView('view', { id: int('id') }).as(sql`SELECT * FROM users`), }; const to = { users, testView: sqliteView('new_view', { id: int('id') }).as(sql`SELECT * FROM users WHERE 1=1`), }; const { statements, sqlStatements } = await diffTestSchemasLibSQL(from, to, ['view->new_view']); expect(statements.length).toBe(2); expect(statements[0]).toStrictEqual({ name: 'view', type: 'drop_view', }); expect(statements[1]).toStrictEqual({ type: 'sqlite_create_view', name: 'new_view', definition: 'SELECT * FROM users WHERE 1=1', }); expect(sqlStatements.length).toBe(2); expect(sqlStatements[0]).toBe('DROP VIEW `view`;'); expect(sqlStatements[1]).toBe(`CREATE VIEW \`new_view\` AS SELECT * FROM users WHERE 1=1;`); }); ================================================ FILE: drizzle-kit/tests/migrate/libsq-schema.ts ================================================ import { integer, sqliteTable, text } from 'drizzle-orm/sqlite-core'; export const users = sqliteTable('users', { id: integer('id').primaryKey().notNull(), name: text('name').notNull(), }); ================================================ FILE: drizzle-kit/tests/migrate/libsql-migrate.test.ts ================================================ import { createClient } from '@libsql/client'; import { connectToLibSQL } from 'src/cli/connections'; import { expect, test } from 'vitest'; test('validate migrate function', async () => { const credentials = { url: ':memory:', }; const { migrate, query } = await connectToLibSQL(credentials); await migrate({ migrationsFolder: 'tests/migrate/migrations' }); const res = await query(`PRAGMA table_info("users");`); expect(res).toStrictEqual([{ cid: 0, name: 'id', type: 'INTEGER', notnull: 0, dflt_value: null, pk: 0, }, { cid: 1, name: 'name', type: 'INTEGER', notnull: 1, dflt_value: null, pk: 0, }]); }); // test('validate migrate function', async () => { // const credentials = { // url: '', // authToken: '', // }; // const { migrate, query } = await connectToLibSQL(credentials); // await migrate({ migrationsFolder: 'tests/migrate/migrations' }); // const res = await query(`PRAGMA table_info("users");`); // expect(res).toStrictEqual([{ // cid: 0, // name: 'id', // type: 'INTEGER', // notnull: 0, // dflt_value: null, // pk: 0, // }, { // cid: 1, // name: 'name', // type: 'INTEGER', // notnull: 1, // dflt_value: null, // pk: 0, // }]); // }); ================================================ FILE: drizzle-kit/tests/migrate/migrations/0000_little_blizzard.sql ================================================ CREATE TABLE `users` ( `id` integer PRIMARY KEY NOT NULL, `name` text NOT NULL ); ================================================ FILE: drizzle-kit/tests/migrate/migrations/0001_nebulous_storm.sql ================================================ PRAGMA foreign_keys=OFF;--> statement-breakpoint CREATE TABLE `__new_users` ( `id` integer, `name` integer NOT NULL ); --> statement-breakpoint INSERT INTO `__new_users`("id", "name") SELECT "id", "name" FROM `users`;--> statement-breakpoint DROP TABLE `users`;--> statement-breakpoint ALTER TABLE `__new_users` RENAME TO `users`;--> statement-breakpoint PRAGMA foreign_keys=ON; ================================================ FILE: drizzle-kit/tests/migrate/migrations/meta/0000_snapshot.json ================================================ { "version": "6", "dialect": "sqlite", "id": "2bd46776-9e41-4a6c-b617-5c600bb176f2", "prevId": "00000000-0000-0000-0000-000000000000", "tables": { "users": { "name": "users", "columns": { "id": { "name": "id", "type": "integer", "primaryKey": true, "notNull": true, "autoincrement": false }, "name": { "name": "name", "type": "text", "primaryKey": false, "notNull": true, "autoincrement": false } }, "indexes": {}, "foreignKeys": {}, "compositePrimaryKeys": {}, "uniqueConstraints": {} } }, "enums": {}, "_meta": { "schemas": {}, "tables": {}, "columns": {} }, "internal": { "indexes": {} } } ================================================ FILE: drizzle-kit/tests/migrate/migrations/meta/0001_snapshot.json ================================================ { "version": "6", "dialect": "sqlite", "id": "6c0ec455-42fd-47fd-a22c-4bb4551e1358", "prevId": "2bd46776-9e41-4a6c-b617-5c600bb176f2", "tables": { "users": { "name": "users", "columns": { "id": { "name": "id", "type": "integer", "primaryKey": false, "notNull": false, "autoincrement": false }, "name": { "name": "name", "type": "integer", "primaryKey": false, "notNull": true, "autoincrement": false } }, "indexes": {}, "foreignKeys": {}, "compositePrimaryKeys": {}, "uniqueConstraints": {} } }, "enums": {}, "_meta": { "schemas": {}, "tables": {}, "columns": {} }, "internal": { "indexes": {} } } ================================================ FILE: drizzle-kit/tests/migrate/migrations/meta/_journal.json ================================================ { "version": "7", "dialect": "sqlite", "entries": [ { "idx": 0, "version": "6", "when": 1725358702427, "tag": "0000_little_blizzard", "breakpoints": true }, { "idx": 1, "version": "6", "when": 1725358713033, "tag": "0001_nebulous_storm", "breakpoints": true } ] } ================================================ FILE: drizzle-kit/tests/mysql-checks.test.ts ================================================ import { sql } from 'drizzle-orm'; import { check, int, mysqlTable, serial, varchar } from 'drizzle-orm/mysql-core'; import { expect, test } from 'vitest'; import { diffTestSchemasMysql } from './schemaDiffer'; test('create table with check', async (t) => { const to = { users: mysqlTable('users', { id: serial('id').primaryKey(), age: int('age'), }, (table) => ({ checkConstraint: check('some_check_name', sql`${table.age} > 21`), })), }; const { sqlStatements, statements } = await diffTestSchemasMysql({}, to, []); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'create_table', tableName: 'users', columns: [ { name: 'id', type: 'serial', notNull: true, primaryKey: false, autoincrement: true, }, { name: 'age', type: 'int', notNull: false, primaryKey: false, autoincrement: false, }, ], compositePKs: [ 'users_id;id', ], checkConstraints: ['some_check_name;\`users\`.\`age\` > 21'], compositePkName: 'users_id', uniqueConstraints: [], schema: undefined, internals: { tables: {}, indexes: {}, }, }); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe(`CREATE TABLE \`users\` ( \t\`id\` serial AUTO_INCREMENT NOT NULL, \t\`age\` int, \tCONSTRAINT \`users_id\` PRIMARY KEY(\`id\`), \tCONSTRAINT \`some_check_name\` CHECK(\`users\`.\`age\` > 21) );\n`); }); test('add check contraint to existing table', async (t) => { const from = { users: mysqlTable('users', { id: serial('id').primaryKey(), age: int('age'), }), }; const to = { users: mysqlTable('users', { id: serial('id').primaryKey(), age: int('age'), }, (table) => ({ checkConstraint: check('some_check_name', sql`${table.age} > 21`), })), }; const { sqlStatements, statements } = await diffTestSchemasMysql(from, to, []); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'create_check_constraint', tableName: 'users', data: 'some_check_name;\`users\`.\`age\` > 21', schema: '', }); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe( `ALTER TABLE \`users\` ADD CONSTRAINT \`some_check_name\` CHECK (\`users\`.\`age\` > 21);`, ); }); test('drop check contraint in existing table', async (t) => { const to = { users: mysqlTable('users', { id: serial('id').primaryKey(), age: int('age'), }), }; const from = { users: mysqlTable('users', { id: serial('id').primaryKey(), age: int('age'), }, (table) => ({ checkConstraint: check('some_check_name', sql`${table.age} > 21`), })), }; const { sqlStatements, statements } = await diffTestSchemasMysql(from, to, []); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'delete_check_constraint', tableName: 'users', schema: '', constraintName: 'some_check_name', }); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe( `ALTER TABLE \`users\` DROP CONSTRAINT \`some_check_name\`;`, ); }); test('rename check constraint', async (t) => { const from = { users: mysqlTable('users', { id: serial('id').primaryKey(), age: int('age'), }, (table) => ({ checkConstraint: check('some_check_name', sql`${table.age} > 21`), })), }; const to = { users: mysqlTable('users', { id: serial('id').primaryKey(), age: int('age'), }, (table) => ({ checkConstraint: check('new_check_name', sql`${table.age} > 21`), })), }; const { sqlStatements, statements } = await diffTestSchemasMysql(from, to, []); expect(statements.length).toBe(2); expect(statements[0]).toStrictEqual({ constraintName: 'some_check_name', schema: '', tableName: 'users', type: 'delete_check_constraint', }); expect(statements[1]).toStrictEqual({ data: 'new_check_name;\`users\`.\`age\` > 21', schema: '', tableName: 'users', type: 'create_check_constraint', }); expect(sqlStatements.length).toBe(2); expect(sqlStatements[0]).toBe( `ALTER TABLE \`users\` DROP CONSTRAINT \`some_check_name\`;`, ); expect(sqlStatements[1]).toBe( `ALTER TABLE \`users\` ADD CONSTRAINT \`new_check_name\` CHECK (\`users\`.\`age\` > 21);`, ); }); test('alter check constraint', async (t) => { const from = { users: mysqlTable('users', { id: serial('id').primaryKey(), age: int('age'), }, (table) => ({ checkConstraint: check('some_check_name', sql`${table.age} > 21`), })), }; const to = { users: mysqlTable('users', { id: serial('id').primaryKey(), age: int('age'), }, (table) => ({ checkConstraint: check('new_check_name', sql`${table.age} > 10`), })), }; const { sqlStatements, statements } = await diffTestSchemasMysql(from, to, []); expect(statements.length).toBe(2); expect(statements[0]).toStrictEqual({ constraintName: 'some_check_name', schema: '', tableName: 'users', type: 'delete_check_constraint', }); expect(statements[1]).toStrictEqual({ data: 'new_check_name;\`users\`.\`age\` > 10', schema: '', tableName: 'users', type: 'create_check_constraint', }); expect(sqlStatements.length).toBe(2); expect(sqlStatements[0]).toBe( `ALTER TABLE \`users\` DROP CONSTRAINT \`some_check_name\`;`, ); expect(sqlStatements[1]).toBe( `ALTER TABLE \`users\` ADD CONSTRAINT \`new_check_name\` CHECK (\`users\`.\`age\` > 10);`, ); }); test('alter multiple check constraints', async (t) => { const from = { users: mysqlTable('users', { id: serial('id').primaryKey(), age: int('age'), name: varchar('name', { length: 255 }), }, (table) => ({ checkConstraint1: check('some_check_name_1', sql`${table.age} > 21`), checkConstraint2: check('some_check_name_2', sql`${table.name} != 'Alex'`), })), }; const to = { users: mysqlTable('users', { id: serial('id').primaryKey(), age: int('age'), name: varchar('name', { length: 255 }), }, (table) => ({ checkConstraint1: check('some_check_name_3', sql`${table.age} > 21`), checkConstraint2: check('some_check_name_4', sql`${table.name} != 'Alex'`), })), }; const { sqlStatements, statements } = await diffTestSchemasMysql(from, to, []); expect(statements.length).toBe(4); expect(statements[0]).toStrictEqual({ constraintName: 'some_check_name_1', schema: '', tableName: 'users', type: 'delete_check_constraint', }); expect(statements[1]).toStrictEqual({ constraintName: 'some_check_name_2', schema: '', tableName: 'users', type: 'delete_check_constraint', }); expect(statements[2]).toStrictEqual({ data: 'some_check_name_3;\`users\`.\`age\` > 21', schema: '', tableName: 'users', type: 'create_check_constraint', }); expect(statements[3]).toStrictEqual({ data: "some_check_name_4;\`users\`.\`name\` != 'Alex'", schema: '', tableName: 'users', type: 'create_check_constraint', }); expect(sqlStatements.length).toBe(4); expect(sqlStatements[0]).toBe( `ALTER TABLE \`users\` DROP CONSTRAINT \`some_check_name_1\`;`, ); expect(sqlStatements[1]).toBe( `ALTER TABLE \`users\` DROP CONSTRAINT \`some_check_name_2\`;`, ); expect(sqlStatements[2]).toBe( `ALTER TABLE \`users\` ADD CONSTRAINT \`some_check_name_3\` CHECK (\`users\`.\`age\` > 21);`, ); expect(sqlStatements[3]).toBe( `ALTER TABLE \`users\` ADD CONSTRAINT \`some_check_name_4\` CHECK (\`users\`.\`name\` != \'Alex\');`, ); }); test('create checks with same names', async (t) => { const to = { users: mysqlTable('users', { id: serial('id').primaryKey(), age: int('age'), name: varchar('name', { length: 255 }), }, (table) => ({ checkConstraint1: check('some_check_name', sql`${table.age} > 21`), checkConstraint2: check('some_check_name', sql`${table.name} != 'Alex'`), })), }; await expect(diffTestSchemasMysql({}, to, [])).rejects.toThrowError(); }); ================================================ FILE: drizzle-kit/tests/mysql-generated.test.ts ================================================ import { SQL, sql } from 'drizzle-orm'; import { int, mysqlTable, text } from 'drizzle-orm/mysql-core'; import { expect, test } from 'vitest'; import { diffTestSchemasMysql } from './schemaDiffer'; test('generated as callback: add column with generated constraint', async () => { const from = { users: mysqlTable('users', { id: int('id'), id2: int('id2'), name: text('name'), }), }; const to = { users: mysqlTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( (): SQL => sql`${to.users.name} || 'hello'`, { mode: 'stored' }, ), }), }; const { statements, sqlStatements } = await diffTestSchemasMysql( from, to, [], ); expect(statements).toStrictEqual([ { column: { generated: { as: "`users`.`name` || 'hello'", type: 'stored', }, autoincrement: false, name: 'gen_name', notNull: false, primaryKey: false, type: 'text', }, schema: '', tableName: 'users', type: 'alter_table_add_column', }, ]); expect(sqlStatements).toStrictEqual([ "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') STORED;", ]); }); test('generated as callback: add generated constraint to an exisiting column as stored', async () => { const from = { users: mysqlTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').notNull(), }), }; const to = { users: mysqlTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name') .notNull() .generatedAlwaysAs((): SQL => sql`${from.users.name} || 'to add'`, { mode: 'stored', }), }), }; const { statements, sqlStatements } = await diffTestSchemasMysql( from, to, [], ); expect(statements).toStrictEqual([ { columnDefault: undefined, columnGenerated: { as: "`users`.`name` || 'to add'", type: 'stored', }, columnAutoIncrement: false, columnName: 'gen_name', columnNotNull: true, columnOnUpdate: undefined, columnPk: false, newDataType: 'text', schema: '', tableName: 'users', type: 'alter_table_alter_column_set_generated', }, ]); expect(sqlStatements).toStrictEqual([ "ALTER TABLE `users` MODIFY COLUMN `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'to add') STORED NOT NULL;", ]); }); test('generated as callback: add generated constraint to an exisiting column as virtual', async () => { const from = { users: mysqlTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').notNull(), }), }; const to = { users: mysqlTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name') .notNull() .generatedAlwaysAs((): SQL => sql`${from.users.name} || 'to add'`, { mode: 'virtual', }), }), }; const { statements, sqlStatements } = await diffTestSchemasMysql( from, to, [], ); expect(statements).toStrictEqual([ { columnAutoIncrement: false, columnDefault: undefined, columnGenerated: { as: "`users`.`name` || 'to add'", type: 'virtual', }, columnName: 'gen_name', columnNotNull: true, columnOnUpdate: undefined, columnPk: false, newDataType: 'text', schema: '', tableName: 'users', type: 'alter_table_alter_column_set_generated', }, ]); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE `users` DROP COLUMN `gen_name`;', "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'to add') VIRTUAL NOT NULL;", ]); }); test('generated as callback: drop generated constraint as stored', async () => { const from = { users: mysqlTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( (): SQL => sql`${from.users.name} || 'to delete'`, { mode: 'stored' }, ), }), }; const to = { users: mysqlTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName1: text('gen_name'), }), }; const { statements, sqlStatements } = await diffTestSchemasMysql( from, to, [], ); expect(statements).toStrictEqual([ { columnAutoIncrement: false, columnDefault: undefined, columnGenerated: undefined, columnName: 'gen_name', columnNotNull: false, columnOnUpdate: undefined, columnPk: false, newDataType: 'text', schema: '', tableName: 'users', oldColumn: { autoincrement: false, generated: { as: "`users`.`name` || 'to delete'", type: 'stored', }, name: 'gen_name', notNull: false, onUpdate: undefined, primaryKey: false, type: 'text', }, type: 'alter_table_alter_column_drop_generated', }, ]); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE `users` MODIFY COLUMN `gen_name` text;', ]); }); test('generated as callback: drop generated constraint as virtual', async () => { const from = { users: mysqlTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( (): SQL => sql`${from.users.name} || 'to delete'`, { mode: 'virtual' }, ), }), }; const to = { users: mysqlTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName1: text('gen_name'), }), }; const { statements, sqlStatements } = await diffTestSchemasMysql( from, to, [], ); expect(statements).toStrictEqual([ { columnAutoIncrement: false, columnDefault: undefined, columnGenerated: undefined, columnName: 'gen_name', columnNotNull: false, columnOnUpdate: undefined, columnPk: false, newDataType: 'text', schema: '', oldColumn: { autoincrement: false, generated: { as: "`users`.`name` || 'to delete'", type: 'virtual', }, name: 'gen_name', notNull: false, onUpdate: undefined, primaryKey: false, type: 'text', }, tableName: 'users', type: 'alter_table_alter_column_drop_generated', }, ]); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE `users` DROP COLUMN `gen_name`;', 'ALTER TABLE `users` ADD `gen_name` text;', ]); }); test('generated as callback: change generated constraint type from virtual to stored', async () => { const from = { users: mysqlTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( (): SQL => sql`${from.users.name}`, { mode: 'virtual' }, ), }), }; const to = { users: mysqlTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( (): SQL => sql`${to.users.name} || 'hello'`, { mode: 'stored' }, ), }), }; const { statements, sqlStatements } = await diffTestSchemasMysql( from, to, [], ); expect(statements).toStrictEqual([ { columnAutoIncrement: false, columnDefault: undefined, columnGenerated: { as: "`users`.`name` || 'hello'", type: 'stored', }, columnName: 'gen_name', columnNotNull: false, columnOnUpdate: undefined, columnPk: false, newDataType: 'text', schema: '', tableName: 'users', type: 'alter_table_alter_column_alter_generated', }, ]); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE `users` drop column `gen_name`;', "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') STORED;", ]); }); test('generated as callback: change generated constraint type from stored to virtual', async () => { const from = { users: mysqlTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( (): SQL => sql`${from.users.name}`, ), }), }; const to = { users: mysqlTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( (): SQL => sql`${to.users.name} || 'hello'`, ), }), }; const { statements, sqlStatements } = await diffTestSchemasMysql( from, to, [], ); expect(statements).toStrictEqual([ { columnAutoIncrement: false, columnDefault: undefined, columnGenerated: { as: "`users`.`name` || 'hello'", type: 'virtual', }, columnName: 'gen_name', columnNotNull: false, columnOnUpdate: undefined, columnPk: false, newDataType: 'text', schema: '', tableName: 'users', type: 'alter_table_alter_column_alter_generated', }, ]); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE `users` drop column `gen_name`;', "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') VIRTUAL;", ]); }); test('generated as callback: change generated constraint', async () => { const from = { users: mysqlTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( (): SQL => sql`${from.users.name}`, ), }), }; const to = { users: mysqlTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( (): SQL => sql`${to.users.name} || 'hello'`, ), }), }; const { statements, sqlStatements } = await diffTestSchemasMysql( from, to, [], ); expect(statements).toStrictEqual([ { columnAutoIncrement: false, columnDefault: undefined, columnGenerated: { as: "`users`.`name` || 'hello'", type: 'virtual', }, columnName: 'gen_name', columnNotNull: false, columnOnUpdate: undefined, columnPk: false, newDataType: 'text', schema: '', tableName: 'users', type: 'alter_table_alter_column_alter_generated', }, ]); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE `users` drop column `gen_name`;', "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') VIRTUAL;", ]); }); // --- test('generated as sql: add column with generated constraint', async () => { const from = { users: mysqlTable('users', { id: int('id'), id2: int('id2'), name: text('name'), }), }; const to = { users: mysqlTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( sql`\`users\`.\`name\` || 'hello'`, { mode: 'stored' }, ), }), }; const { statements, sqlStatements } = await diffTestSchemasMysql( from, to, [], ); expect(statements).toStrictEqual([ { column: { generated: { as: "`users`.`name` || 'hello'", type: 'stored', }, autoincrement: false, name: 'gen_name', notNull: false, primaryKey: false, type: 'text', }, schema: '', tableName: 'users', type: 'alter_table_add_column', }, ]); expect(sqlStatements).toStrictEqual([ "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') STORED;", ]); }); test('generated as sql: add generated constraint to an exisiting column as stored', async () => { const from = { users: mysqlTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').notNull(), }), }; const to = { users: mysqlTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name') .notNull() .generatedAlwaysAs(sql`\`users\`.\`name\` || 'to add'`, { mode: 'stored', }), }), }; const { statements, sqlStatements } = await diffTestSchemasMysql( from, to, [], ); expect(statements).toStrictEqual([ { columnDefault: undefined, columnGenerated: { as: "`users`.`name` || 'to add'", type: 'stored', }, columnAutoIncrement: false, columnName: 'gen_name', columnNotNull: true, columnOnUpdate: undefined, columnPk: false, newDataType: 'text', schema: '', tableName: 'users', type: 'alter_table_alter_column_set_generated', }, ]); expect(sqlStatements).toStrictEqual([ "ALTER TABLE `users` MODIFY COLUMN `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'to add') STORED NOT NULL;", ]); }); test('generated as sql: add generated constraint to an exisiting column as virtual', async () => { const from = { users: mysqlTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').notNull(), }), }; const to = { users: mysqlTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name') .notNull() .generatedAlwaysAs(sql`\`users\`.\`name\` || 'to add'`, { mode: 'virtual', }), }), }; const { statements, sqlStatements } = await diffTestSchemasMysql( from, to, [], ); expect(statements).toStrictEqual([ { columnAutoIncrement: false, columnDefault: undefined, columnGenerated: { as: "`users`.`name` || 'to add'", type: 'virtual', }, columnName: 'gen_name', columnNotNull: true, columnOnUpdate: undefined, columnPk: false, newDataType: 'text', schema: '', tableName: 'users', type: 'alter_table_alter_column_set_generated', }, ]); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE `users` DROP COLUMN `gen_name`;', "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'to add') VIRTUAL NOT NULL;", ]); }); test('generated as sql: drop generated constraint as stored', async () => { const from = { users: mysqlTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( sql`\`users\`.\`name\` || 'to delete'`, { mode: 'stored' }, ), }), }; const to = { users: mysqlTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName1: text('gen_name'), }), }; const { statements, sqlStatements } = await diffTestSchemasMysql( from, to, [], ); expect(statements).toStrictEqual([ { columnAutoIncrement: false, columnDefault: undefined, columnGenerated: undefined, columnName: 'gen_name', columnNotNull: false, columnOnUpdate: undefined, columnPk: false, newDataType: 'text', schema: '', tableName: 'users', oldColumn: { autoincrement: false, generated: { as: "`users`.`name` || 'to delete'", type: 'stored', }, name: 'gen_name', notNull: false, onUpdate: undefined, primaryKey: false, type: 'text', }, type: 'alter_table_alter_column_drop_generated', }, ]); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE `users` MODIFY COLUMN `gen_name` text;', ]); }); test('generated as sql: drop generated constraint as virtual', async () => { const from = { users: mysqlTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( sql`\`users\`.\`name\` || 'to delete'`, { mode: 'virtual' }, ), }), }; const to = { users: mysqlTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName1: text('gen_name'), }), }; const { statements, sqlStatements } = await diffTestSchemasMysql( from, to, [], ); expect(statements).toStrictEqual([ { columnAutoIncrement: false, columnDefault: undefined, columnGenerated: undefined, columnName: 'gen_name', columnNotNull: false, columnOnUpdate: undefined, columnPk: false, newDataType: 'text', schema: '', oldColumn: { autoincrement: false, generated: { as: "`users`.`name` || 'to delete'", type: 'virtual', }, name: 'gen_name', notNull: false, onUpdate: undefined, primaryKey: false, type: 'text', }, tableName: 'users', type: 'alter_table_alter_column_drop_generated', }, ]); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE `users` DROP COLUMN `gen_name`;', 'ALTER TABLE `users` ADD `gen_name` text;', ]); }); test('generated as sql: change generated constraint type from virtual to stored', async () => { const from = { users: mysqlTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( sql`\`users\`.\`name\``, { mode: 'virtual' }, ), }), }; const to = { users: mysqlTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( sql`\`users\`.\`name\` || 'hello'`, { mode: 'stored' }, ), }), }; const { statements, sqlStatements } = await diffTestSchemasMysql( from, to, [], ); expect(statements).toStrictEqual([ { columnAutoIncrement: false, columnDefault: undefined, columnGenerated: { as: "`users`.`name` || 'hello'", type: 'stored', }, columnName: 'gen_name', columnNotNull: false, columnOnUpdate: undefined, columnPk: false, newDataType: 'text', schema: '', tableName: 'users', type: 'alter_table_alter_column_alter_generated', }, ]); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE `users` drop column `gen_name`;', "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') STORED;", ]); }); test('generated as sql: change generated constraint type from stored to virtual', async () => { const from = { users: mysqlTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( sql`\`users\`.\`name\``, ), }), }; const to = { users: mysqlTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( sql`\`users\`.\`name\` || 'hello'`, ), }), }; const { statements, sqlStatements } = await diffTestSchemasMysql( from, to, [], ); expect(statements).toStrictEqual([ { columnAutoIncrement: false, columnDefault: undefined, columnGenerated: { as: "`users`.`name` || 'hello'", type: 'virtual', }, columnName: 'gen_name', columnNotNull: false, columnOnUpdate: undefined, columnPk: false, newDataType: 'text', schema: '', tableName: 'users', type: 'alter_table_alter_column_alter_generated', }, ]); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE `users` drop column `gen_name`;', "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') VIRTUAL;", ]); }); test('generated as sql: change generated constraint', async () => { const from = { users: mysqlTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( sql`\`users\`.\`name\``, ), }), }; const to = { users: mysqlTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( sql`\`users\`.\`name\` || 'hello'`, ), }), }; const { statements, sqlStatements } = await diffTestSchemasMysql( from, to, [], ); expect(statements).toStrictEqual([ { columnAutoIncrement: false, columnDefault: undefined, columnGenerated: { as: "`users`.`name` || 'hello'", type: 'virtual', }, columnName: 'gen_name', columnNotNull: false, columnOnUpdate: undefined, columnPk: false, newDataType: 'text', schema: '', tableName: 'users', type: 'alter_table_alter_column_alter_generated', }, ]); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE `users` drop column `gen_name`;', "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') VIRTUAL;", ]); }); // --- test('generated as string: add column with generated constraint', async () => { const from = { users: mysqlTable('users', { id: int('id'), id2: int('id2'), name: text('name'), }), }; const to = { users: mysqlTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( `\`users\`.\`name\` || 'hello'`, { mode: 'stored' }, ), }), }; const { statements, sqlStatements } = await diffTestSchemasMysql( from, to, [], ); expect(statements).toStrictEqual([ { column: { generated: { as: "`users`.`name` || 'hello'", type: 'stored', }, autoincrement: false, name: 'gen_name', notNull: false, primaryKey: false, type: 'text', }, schema: '', tableName: 'users', type: 'alter_table_add_column', }, ]); expect(sqlStatements).toStrictEqual([ "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') STORED;", ]); }); test('generated as string: add generated constraint to an exisiting column as stored', async () => { const from = { users: mysqlTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').notNull(), }), }; const to = { users: mysqlTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name') .notNull() .generatedAlwaysAs(`\`users\`.\`name\` || 'to add'`, { mode: 'stored', }), }), }; const { statements, sqlStatements } = await diffTestSchemasMysql( from, to, [], ); expect(statements).toStrictEqual([ { columnDefault: undefined, columnGenerated: { as: "`users`.`name` || 'to add'", type: 'stored', }, columnAutoIncrement: false, columnName: 'gen_name', columnNotNull: true, columnOnUpdate: undefined, columnPk: false, newDataType: 'text', schema: '', tableName: 'users', type: 'alter_table_alter_column_set_generated', }, ]); expect(sqlStatements).toStrictEqual([ "ALTER TABLE `users` MODIFY COLUMN `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'to add') STORED NOT NULL;", ]); }); test('generated as string: add generated constraint to an exisiting column as virtual', async () => { const from = { users: mysqlTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').notNull(), }), }; const to = { users: mysqlTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name') .notNull() .generatedAlwaysAs(`\`users\`.\`name\` || 'to add'`, { mode: 'virtual', }), }), }; const { statements, sqlStatements } = await diffTestSchemasMysql( from, to, [], ); expect(statements).toStrictEqual([ { columnAutoIncrement: false, columnDefault: undefined, columnGenerated: { as: "`users`.`name` || 'to add'", type: 'virtual', }, columnName: 'gen_name', columnNotNull: true, columnOnUpdate: undefined, columnPk: false, newDataType: 'text', schema: '', tableName: 'users', type: 'alter_table_alter_column_set_generated', }, ]); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE `users` DROP COLUMN `gen_name`;', "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'to add') VIRTUAL NOT NULL;", ]); }); test('generated as string: drop generated constraint as stored', async () => { const from = { users: mysqlTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( `\`users\`.\`name\` || 'to delete'`, { mode: 'stored' }, ), }), }; const to = { users: mysqlTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName1: text('gen_name'), }), }; const { statements, sqlStatements } = await diffTestSchemasMysql( from, to, [], ); expect(statements).toStrictEqual([ { columnAutoIncrement: false, columnDefault: undefined, columnGenerated: undefined, columnName: 'gen_name', columnNotNull: false, columnOnUpdate: undefined, columnPk: false, newDataType: 'text', schema: '', tableName: 'users', oldColumn: { autoincrement: false, generated: { as: "`users`.`name` || 'to delete'", type: 'stored', }, name: 'gen_name', notNull: false, onUpdate: undefined, primaryKey: false, type: 'text', }, type: 'alter_table_alter_column_drop_generated', }, ]); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE `users` MODIFY COLUMN `gen_name` text;', ]); }); test('generated as string: drop generated constraint as virtual', async () => { const from = { users: mysqlTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( `\`users\`.\`name\` || 'to delete'`, { mode: 'virtual' }, ), }), }; const to = { users: mysqlTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName1: text('gen_name'), }), }; const { statements, sqlStatements } = await diffTestSchemasMysql( from, to, [], ); expect(statements).toStrictEqual([ { columnAutoIncrement: false, columnDefault: undefined, columnGenerated: undefined, columnName: 'gen_name', columnNotNull: false, columnOnUpdate: undefined, columnPk: false, newDataType: 'text', schema: '', oldColumn: { autoincrement: false, generated: { as: "`users`.`name` || 'to delete'", type: 'virtual', }, name: 'gen_name', notNull: false, onUpdate: undefined, primaryKey: false, type: 'text', }, tableName: 'users', type: 'alter_table_alter_column_drop_generated', }, ]); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE `users` DROP COLUMN `gen_name`;', 'ALTER TABLE `users` ADD `gen_name` text;', ]); }); test('generated as string: change generated constraint type from virtual to stored', async () => { const from = { users: mysqlTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs(`\`users\`.\`name\``, { mode: 'virtual', }), }), }; const to = { users: mysqlTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( `\`users\`.\`name\` || 'hello'`, { mode: 'stored' }, ), }), }; const { statements, sqlStatements } = await diffTestSchemasMysql( from, to, [], ); expect(statements).toStrictEqual([ { columnAutoIncrement: false, columnDefault: undefined, columnGenerated: { as: "`users`.`name` || 'hello'", type: 'stored', }, columnName: 'gen_name', columnNotNull: false, columnOnUpdate: undefined, columnPk: false, newDataType: 'text', schema: '', tableName: 'users', type: 'alter_table_alter_column_alter_generated', }, ]); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE `users` drop column `gen_name`;', "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') STORED;", ]); }); test('generated as string: change generated constraint type from stored to virtual', async () => { const from = { users: mysqlTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs(`\`users\`.\`name\``), }), }; const to = { users: mysqlTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( `\`users\`.\`name\` || 'hello'`, ), }), }; const { statements, sqlStatements } = await diffTestSchemasMysql( from, to, [], ); expect(statements).toStrictEqual([ { columnAutoIncrement: false, columnDefault: undefined, columnGenerated: { as: "`users`.`name` || 'hello'", type: 'virtual', }, columnName: 'gen_name', columnNotNull: false, columnOnUpdate: undefined, columnPk: false, newDataType: 'text', schema: '', tableName: 'users', type: 'alter_table_alter_column_alter_generated', }, ]); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE `users` drop column `gen_name`;', "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') VIRTUAL;", ]); }); test('generated as string: change generated constraint', async () => { const from = { users: mysqlTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs(`\`users\`.\`name\``), }), }; const to = { users: mysqlTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( `\`users\`.\`name\` || 'hello'`, ), }), }; const { statements, sqlStatements } = await diffTestSchemasMysql( from, to, [], ); expect(statements).toStrictEqual([ { columnAutoIncrement: false, columnDefault: undefined, columnGenerated: { as: "`users`.`name` || 'hello'", type: 'virtual', }, columnName: 'gen_name', columnNotNull: false, columnOnUpdate: undefined, columnPk: false, newDataType: 'text', schema: '', tableName: 'users', type: 'alter_table_alter_column_alter_generated', }, ]); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE `users` drop column `gen_name`;', "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') VIRTUAL;", ]); }); ================================================ FILE: drizzle-kit/tests/mysql-schemas.test.ts ================================================ import { mysqlSchema, mysqlTable } from 'drizzle-orm/mysql-core'; import { expect, test } from 'vitest'; import { diffTestSchemasMysql } from './schemaDiffer'; // We don't manage databases(schemas) in MySQL with Drizzle Kit test('add schema #1', async () => { const to = { devSchema: mysqlSchema('dev'), }; const { statements } = await diffTestSchemasMysql({}, to, []); expect(statements.length).toBe(0); }); test('add schema #2', async () => { const from = { devSchema: mysqlSchema('dev'), }; const to = { devSchema: mysqlSchema('dev'), devSchema2: mysqlSchema('dev2'), }; const { statements } = await diffTestSchemasMysql(from, to, []); expect(statements.length).toBe(0); }); test('delete schema #1', async () => { const from = { devSchema: mysqlSchema('dev'), }; const { statements } = await diffTestSchemasMysql(from, {}, []); expect(statements.length).toBe(0); }); test('delete schema #2', async () => { const from = { devSchema: mysqlSchema('dev'), devSchema2: mysqlSchema('dev2'), }; const to = { devSchema: mysqlSchema('dev'), }; const { statements } = await diffTestSchemasMysql(from, to, []); expect(statements.length).toBe(0); }); test('rename schema #1', async () => { const from = { devSchema: mysqlSchema('dev'), }; const to = { devSchema2: mysqlSchema('dev2'), }; const { statements } = await diffTestSchemasMysql(from, to, ['dev->dev2']); expect(statements.length).toBe(0); }); test('rename schema #2', async () => { const from = { devSchema: mysqlSchema('dev'), devSchema1: mysqlSchema('dev1'), }; const to = { devSchema: mysqlSchema('dev'), devSchema2: mysqlSchema('dev2'), }; const { statements } = await diffTestSchemasMysql(from, to, ['dev1->dev2']); expect(statements.length).toBe(0); }); test('add table to schema #1', async () => { const dev = mysqlSchema('dev'); const from = {}; const to = { dev, users: dev.table('users', {}), }; const { statements } = await diffTestSchemasMysql(from, to, ['dev1->dev2']); expect(statements.length).toBe(0); }); test('add table to schema #2', async () => { const dev = mysqlSchema('dev'); const from = { dev }; const to = { dev, users: dev.table('users', {}), }; const { statements } = await diffTestSchemasMysql(from, to, ['dev1->dev2']); expect(statements.length).toBe(0); }); test('add table to schema #3', async () => { const dev = mysqlSchema('dev'); const from = { dev }; const to = { dev, usersInDev: dev.table('users', {}), users: mysqlTable('users', {}), }; const { statements } = await diffTestSchemasMysql(from, to, ['dev1->dev2']); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'create_table', tableName: 'users', schema: undefined, columns: [], uniqueConstraints: [], internals: { tables: {}, indexes: {}, }, compositePkName: '', compositePKs: [], checkConstraints: [], }); }); test('remove table from schema #1', async () => { const dev = mysqlSchema('dev'); const from = { dev, users: dev.table('users', {}) }; const to = { dev, }; const { statements } = await diffTestSchemasMysql(from, to, ['dev1->dev2']); expect(statements.length).toBe(0); }); test('remove table from schema #2', async () => { const dev = mysqlSchema('dev'); const from = { dev, users: dev.table('users', {}) }; const to = {}; const { statements } = await diffTestSchemasMysql(from, to, ['dev1->dev2']); expect(statements.length).toBe(0); }); ================================================ FILE: drizzle-kit/tests/mysql-views.test.ts ================================================ import { sql } from 'drizzle-orm'; import { int, mysqlTable, mysqlView } from 'drizzle-orm/mysql-core'; import { expect, test } from 'vitest'; import { diffTestSchemasMysql } from './schemaDiffer'; test('create view #1', async () => { const users = mysqlTable('users', { id: int('id').primaryKey().notNull(), }); const from = { users: users, }; const to = { users: users, view: mysqlView('some_view').as((qb) => qb.select().from(users)), }; const { statements, sqlStatements } = await diffTestSchemasMysql(from, to, []); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'mysql_create_view', name: 'some_view', algorithm: 'undefined', replace: false, definition: 'select `id` from `users`', withCheckOption: undefined, sqlSecurity: 'definer', }); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe(`CREATE ALGORITHM = undefined SQL SECURITY definer VIEW \`some_view\` AS (select \`id\` from \`users\`);`); }); test('create view #2', async () => { const users = mysqlTable('users', { id: int('id').primaryKey().notNull(), }); const from = { users: users, }; const to = { users: users, view: mysqlView('some_view', {}).algorithm('merge').sqlSecurity('definer') .withCheckOption('cascaded').as(sql`SELECT * FROM ${users}`), }; const { statements, sqlStatements } = await diffTestSchemasMysql(from, to, []); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'mysql_create_view', name: 'some_view', algorithm: 'merge', replace: false, definition: 'SELECT * FROM \`users\`', withCheckOption: 'cascaded', sqlSecurity: 'definer', }); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe(`CREATE ALGORITHM = merge SQL SECURITY definer VIEW \`some_view\` AS (SELECT * FROM \`users\`) WITH cascaded CHECK OPTION;`); }); test('create view with existing flag', async () => { const users = mysqlTable('users', { id: int('id').primaryKey().notNull(), }); const from = { users: users, }; const to = { users: users, view: mysqlView('some_view', {}).existing(), }; const { statements, sqlStatements } = await diffTestSchemasMysql(from, to, []); expect(statements.length).toBe(0); expect(sqlStatements.length).toBe(0); }); test('drop view', async () => { const users = mysqlTable('users', { id: int('id').primaryKey().notNull(), }); const from = { users: users, view: mysqlView('some_view', {}).algorithm('merge').sqlSecurity('definer') .withCheckOption('cascaded').as(sql`SELECT * FROM ${users}`), }; const to = { users: users, }; const { statements, sqlStatements } = await diffTestSchemasMysql(from, to, []); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'drop_view', name: 'some_view', }); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe(`DROP VIEW \`some_view\`;`); }); test('drop view with existing flag', async () => { const users = mysqlTable('users', { id: int('id').primaryKey().notNull(), }); const from = { users: users, view: mysqlView('some_view', {}).algorithm('merge').sqlSecurity('definer') .withCheckOption('cascaded').existing(), }; const to = { users: users, }; const { statements, sqlStatements } = await diffTestSchemasMysql(from, to, []); expect(statements.length).toBe(0); expect(sqlStatements.length).toBe(0); }); test('rename view', async () => { const users = mysqlTable('users', { id: int('id').primaryKey().notNull(), }); const from = { users: users, view: mysqlView('some_view', {}).algorithm('merge').sqlSecurity('definer') .withCheckOption('cascaded').as(sql`SELECT * FROM ${users}`), }; const to = { users: users, view: mysqlView('new_some_view', {}).algorithm('merge').sqlSecurity('definer') .withCheckOption('cascaded').as(sql`SELECT * FROM ${users}`), }; const { statements, sqlStatements } = await diffTestSchemasMysql(from, to, [ 'public.some_view->public.new_some_view', ]); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'rename_view', nameFrom: 'some_view', nameTo: 'new_some_view', }); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe(`RENAME TABLE \`some_view\` TO \`new_some_view\`;`); }); test('rename view and alter meta options', async () => { const users = mysqlTable('users', { id: int('id').primaryKey().notNull(), }); const from = { users: users, view: mysqlView('some_view', {}).algorithm('merge').sqlSecurity('definer') .withCheckOption('cascaded').as(sql`SELECT * FROM ${users}`), }; const to = { users: users, view: mysqlView('new_some_view', {}).sqlSecurity('definer') .withCheckOption('cascaded').as(sql`SELECT * FROM ${users}`), }; const { statements, sqlStatements } = await diffTestSchemasMysql(from, to, [ 'public.some_view->public.new_some_view', ]); expect(statements.length).toBe(2); expect(statements[0]).toStrictEqual({ type: 'rename_view', nameFrom: 'some_view', nameTo: 'new_some_view', }); expect(statements[1]).toStrictEqual({ algorithm: 'undefined', columns: {}, definition: 'SELECT * FROM `users`', isExisting: false, name: 'new_some_view', sqlSecurity: 'definer', type: 'alter_mysql_view', withCheckOption: 'cascaded', }); expect(sqlStatements.length).toBe(2); expect(sqlStatements[0]).toBe(`RENAME TABLE \`some_view\` TO \`new_some_view\`;`); expect(sqlStatements[1]).toBe(`ALTER ALGORITHM = undefined SQL SECURITY definer VIEW \`new_some_view\` AS SELECT * FROM \`users\` WITH cascaded CHECK OPTION;`); }); test('rename view with existing flag', async () => { const users = mysqlTable('users', { id: int('id').primaryKey().notNull(), }); const from = { users: users, view: mysqlView('some_view', {}).algorithm('merge').sqlSecurity('definer') .withCheckOption('cascaded').existing(), }; const to = { users: users, view: mysqlView('new_some_view', {}).algorithm('merge').sqlSecurity('definer') .withCheckOption('cascaded').existing(), }; const { statements, sqlStatements } = await diffTestSchemasMysql(from, to, [ 'public.some_view->public.new_some_view', ]); expect(statements.length).toBe(0); expect(sqlStatements.length).toBe(0); }); test('add meta to view', async () => { const users = mysqlTable('users', { id: int('id').primaryKey().notNull(), }); const from = { users: users, view: mysqlView('some_view', {}).as(sql`SELECT * FROM ${users}`), }; const to = { users: users, view: mysqlView('some_view', {}).algorithm('merge').sqlSecurity('definer') .withCheckOption('cascaded').as(sql`SELECT * FROM ${users}`), }; const { statements, sqlStatements } = await diffTestSchemasMysql(from, to, []); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ algorithm: 'merge', columns: {}, definition: 'SELECT * FROM `users`', isExisting: false, name: 'some_view', sqlSecurity: 'definer', type: 'alter_mysql_view', withCheckOption: 'cascaded', }); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe(`ALTER ALGORITHM = merge SQL SECURITY definer VIEW \`some_view\` AS SELECT * FROM \`users\` WITH cascaded CHECK OPTION;`); }); test('add meta to view with existing flag', async () => { const users = mysqlTable('users', { id: int('id').primaryKey().notNull(), }); const from = { users: users, view: mysqlView('some_view', {}).existing(), }; const to = { users: users, view: mysqlView('some_view', {}).algorithm('merge').sqlSecurity('definer') .withCheckOption('cascaded').existing(), }; const { statements, sqlStatements } = await diffTestSchemasMysql(from, to, []); expect(statements.length).toBe(0); expect(sqlStatements.length).toBe(0); }); test('alter meta to view', async () => { const users = mysqlTable('users', { id: int('id').primaryKey().notNull(), }); const from = { users: users, view: mysqlView('some_view', {}).algorithm('temptable').sqlSecurity('invoker') .withCheckOption('cascaded').as(sql`SELECT * FROM ${users}`), }; const to = { users: users, view: mysqlView('some_view', {}).algorithm('merge').sqlSecurity('definer') .withCheckOption('cascaded').as(sql`SELECT * FROM ${users}`), }; const { statements, sqlStatements } = await diffTestSchemasMysql(from, to, []); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ algorithm: 'merge', columns: {}, definition: 'SELECT * FROM `users`', isExisting: false, name: 'some_view', sqlSecurity: 'definer', type: 'alter_mysql_view', withCheckOption: 'cascaded', }); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe(`ALTER ALGORITHM = merge SQL SECURITY definer VIEW \`some_view\` AS SELECT * FROM \`users\` WITH cascaded CHECK OPTION;`); }); test('alter meta to view with existing flag', async () => { const users = mysqlTable('users', { id: int('id').primaryKey().notNull(), }); const from = { users: users, view: mysqlView('some_view', {}).algorithm('temptable').sqlSecurity('invoker') .withCheckOption('cascaded').existing(), }; const to = { users: users, view: mysqlView('some_view', {}).algorithm('merge').sqlSecurity('definer') .withCheckOption('cascaded').existing(), }; const { statements, sqlStatements } = await diffTestSchemasMysql(from, to, []); expect(statements.length).toBe(0); expect(sqlStatements.length).toBe(0); }); test('drop meta from view', async () => { const users = mysqlTable('users', { id: int('id').primaryKey().notNull(), }); const from = { users: users, view: mysqlView('some_view', {}).algorithm('merge').sqlSecurity('definer') .withCheckOption('cascaded').as(sql`SELECT * FROM ${users}`), }; const to = { users: users, view: mysqlView('some_view', {}).as(sql`SELECT * FROM ${users}`), }; const { statements, sqlStatements } = await diffTestSchemasMysql(from, to, []); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ algorithm: 'undefined', columns: {}, definition: 'SELECT * FROM `users`', isExisting: false, name: 'some_view', sqlSecurity: 'definer', type: 'alter_mysql_view', withCheckOption: undefined, }); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe(`ALTER ALGORITHM = undefined SQL SECURITY definer VIEW \`some_view\` AS SELECT * FROM \`users\`;`); }); test('drop meta from view existing flag', async () => { const users = mysqlTable('users', { id: int('id').primaryKey().notNull(), }); const from = { users: users, view: mysqlView('some_view', {}).algorithm('merge').sqlSecurity('definer') .withCheckOption('cascaded').existing(), }; const to = { users: users, view: mysqlView('some_view', {}).existing(), }; const { statements, sqlStatements } = await diffTestSchemasMysql(from, to, []); expect(statements.length).toBe(0); expect(sqlStatements.length).toBe(0); }); test('alter view ".as" value', async () => { const users = mysqlTable('users', { id: int('id').primaryKey().notNull(), }); const from = { users: users, view: mysqlView('some_view', {}).algorithm('temptable').sqlSecurity('invoker') .withCheckOption('cascaded').as(sql`SELECT * FROM ${users}`), }; const to = { users: users, view: mysqlView('some_view', {}).algorithm('temptable').sqlSecurity('invoker') .withCheckOption('cascaded').as(sql`SELECT * FROM ${users} WHERE ${users.id} = 1`), }; const { statements, sqlStatements } = await diffTestSchemasMysql(from, to, []); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ algorithm: 'temptable', definition: 'SELECT * FROM `users` WHERE `users`.`id` = 1', name: 'some_view', sqlSecurity: 'invoker', type: 'mysql_create_view', withCheckOption: 'cascaded', replace: true, }); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe(`CREATE OR REPLACE ALGORITHM = temptable SQL SECURITY invoker VIEW \`some_view\` AS (SELECT * FROM \`users\` WHERE \`users\`.\`id\` = 1) WITH cascaded CHECK OPTION;`); }); test('rename and alter view ".as" value', async () => { const users = mysqlTable('users', { id: int('id').primaryKey().notNull(), }); const from = { users: users, view: mysqlView('some_view', {}).algorithm('temptable').sqlSecurity('invoker') .withCheckOption('cascaded').as(sql`SELECT * FROM ${users}`), }; const to = { users: users, view: mysqlView('new_some_view', {}).algorithm('temptable').sqlSecurity('invoker') .withCheckOption('cascaded').as(sql`SELECT * FROM ${users} WHERE ${users.id} = 1`), }; const { statements, sqlStatements } = await diffTestSchemasMysql(from, to, [ 'public.some_view->public.new_some_view', ]); expect(statements.length).toBe(2); expect(statements[0]).toStrictEqual({ nameFrom: 'some_view', nameTo: 'new_some_view', type: 'rename_view', }); expect(statements[1]).toStrictEqual({ algorithm: 'temptable', definition: 'SELECT * FROM `users` WHERE `users`.`id` = 1', name: 'new_some_view', sqlSecurity: 'invoker', type: 'mysql_create_view', withCheckOption: 'cascaded', replace: true, }); expect(sqlStatements.length).toBe(2); expect(sqlStatements[0]).toBe(`RENAME TABLE \`some_view\` TO \`new_some_view\`;`); expect(sqlStatements[1]).toBe(`CREATE OR REPLACE ALGORITHM = temptable SQL SECURITY invoker VIEW \`new_some_view\` AS (SELECT * FROM \`users\` WHERE \`users\`.\`id\` = 1) WITH cascaded CHECK OPTION;`); }); test('set existing', async () => { const users = mysqlTable('users', { id: int('id').primaryKey().notNull(), }); const from = { users: users, view: mysqlView('some_view', {}).algorithm('temptable').sqlSecurity('invoker') .withCheckOption('cascaded').as(sql`SELECT * FROM ${users}`), }; const to = { users: users, view: mysqlView('new_some_view', {}).algorithm('temptable').sqlSecurity('invoker') .withCheckOption('cascaded').existing(), }; const { statements, sqlStatements } = await diffTestSchemasMysql(from, to, [ 'public.some_view->public.new_some_view', ]); expect(statements.length).toBe(0); expect(sqlStatements.length).toBe(0); }); test('drop existing', async () => { const users = mysqlTable('users', { id: int('id').primaryKey().notNull(), }); const from = { users: users, view: mysqlView('some_view', {}).algorithm('temptable').sqlSecurity('invoker') .withCheckOption('cascaded').existing(), }; const to = { users: users, view: mysqlView('new_some_view', {}).algorithm('temptable').sqlSecurity('invoker') .withCheckOption('cascaded').as(sql`SELECT * FROM ${users} WHERE ${users.id} = 1`), }; const { statements, sqlStatements } = await diffTestSchemasMysql(from, to, [ 'public.some_view->public.new_some_view', ]); expect(statements.length).toBe(2); expect(statements[0]).toStrictEqual({ name: 'new_some_view', type: 'drop_view', }); expect(statements[1]).toStrictEqual({ algorithm: 'temptable', definition: 'SELECT * FROM `users` WHERE `users`.`id` = 1', name: 'new_some_view', sqlSecurity: 'invoker', type: 'mysql_create_view', withCheckOption: 'cascaded', replace: false, }); expect(sqlStatements.length).toBe(2); expect(sqlStatements[0]).toBe(`DROP VIEW \`new_some_view\`;`); expect(sqlStatements[1]).toBe(`CREATE ALGORITHM = temptable SQL SECURITY invoker VIEW \`new_some_view\` AS (SELECT * FROM \`users\` WHERE \`users\`.\`id\` = 1) WITH cascaded CHECK OPTION;`); }); ================================================ FILE: drizzle-kit/tests/mysql.test.ts ================================================ import { sql } from 'drizzle-orm'; import { foreignKey, index, int, json, mysqlEnum, mysqlSchema, mysqlTable, primaryKey, serial, text, unique, uniqueIndex, varchar, } from 'drizzle-orm/mysql-core'; import { expect, test } from 'vitest'; import { diffTestSchemasMysql } from './schemaDiffer'; test('add table #1', async () => { const to = { users: mysqlTable('users', {}), }; const { statements } = await diffTestSchemasMysql({}, to, []); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'create_table', tableName: 'users', schema: undefined, columns: [], compositePKs: [], internals: { tables: {}, indexes: {}, }, uniqueConstraints: [], compositePkName: '', checkConstraints: [], }); }); test('add table #2', async () => { const to = { users: mysqlTable('users', { id: serial('id').primaryKey(), }), }; const { statements } = await diffTestSchemasMysql({}, to, []); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'create_table', tableName: 'users', schema: undefined, columns: [ { name: 'id', notNull: true, primaryKey: false, type: 'serial', autoincrement: true, }, ], compositePKs: ['users_id;id'], compositePkName: 'users_id', uniqueConstraints: [], checkConstraints: [], internals: { tables: {}, indexes: {}, }, }); }); test('add table #3', async () => { const to = { users: mysqlTable( 'users', { id: serial('id'), }, (t) => { return { pk: primaryKey({ name: 'users_pk', columns: [t.id], }), }; }, ), }; const { statements } = await diffTestSchemasMysql({}, to, []); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'create_table', tableName: 'users', schema: undefined, columns: [ { name: 'id', notNull: true, primaryKey: false, type: 'serial', autoincrement: true, }, ], compositePKs: ['users_pk;id'], uniqueConstraints: [], compositePkName: 'users_pk', checkConstraints: [], internals: { tables: {}, indexes: {}, }, }); }); test('add table #4', async () => { const to = { users: mysqlTable('users', {}), posts: mysqlTable('posts', {}), }; const { statements } = await diffTestSchemasMysql({}, to, []); expect(statements.length).toBe(2); expect(statements[0]).toStrictEqual({ type: 'create_table', tableName: 'users', schema: undefined, columns: [], internals: { tables: {}, indexes: {}, }, compositePKs: [], uniqueConstraints: [], compositePkName: '', checkConstraints: [], }); expect(statements[1]).toStrictEqual({ type: 'create_table', tableName: 'posts', schema: undefined, columns: [], compositePKs: [], internals: { tables: {}, indexes: {}, }, uniqueConstraints: [], compositePkName: '', checkConstraints: [], }); }); test('add table #5', async () => { const schema = mysqlSchema('folder'); const from = { schema, }; const to = { schema, users: schema.table('users', {}), }; const { statements } = await diffTestSchemasMysql(from, to, []); expect(statements.length).toBe(0); }); test('add table #6', async () => { const from = { users1: mysqlTable('users1', {}), }; const to = { users2: mysqlTable('users2', {}), }; const { statements } = await diffTestSchemasMysql(from, to, []); expect(statements.length).toBe(2); expect(statements[0]).toStrictEqual({ type: 'create_table', tableName: 'users2', schema: undefined, columns: [], internals: { tables: {}, indexes: {}, }, compositePKs: [], uniqueConstraints: [], compositePkName: '', checkConstraints: [], }); expect(statements[1]).toStrictEqual({ type: 'drop_table', policies: [], tableName: 'users1', schema: undefined, }); }); test('add table #7', async () => { const from = { users1: mysqlTable('users1', {}), }; const to = { users: mysqlTable('users', {}), users2: mysqlTable('users2', {}), }; const { statements } = await diffTestSchemasMysql(from, to, [ 'public.users1->public.users2', ]); expect(statements.length).toBe(2); expect(statements[0]).toStrictEqual({ type: 'create_table', tableName: 'users', schema: undefined, columns: [], compositePKs: [], uniqueConstraints: [], internals: { tables: {}, indexes: {}, }, compositePkName: '', checkConstraints: [], }); expect(statements[1]).toStrictEqual({ type: 'rename_table', tableNameFrom: 'users1', tableNameTo: 'users2', fromSchema: undefined, toSchema: undefined, }); }); test('add schema + table #1', async () => { const schema = mysqlSchema('folder'); const to = { schema, users: schema.table('users', {}), }; const { statements } = await diffTestSchemasMysql({}, to, []); expect(statements.length).toBe(0); }); test('change schema with tables #1', async () => { const schema = mysqlSchema('folder'); const schema2 = mysqlSchema('folder2'); const from = { schema, users: schema.table('users', {}), }; const to = { schema2, users: schema2.table('users', {}), }; const { statements } = await diffTestSchemasMysql(from, to, [ 'folder->folder2', ]); expect(statements.length).toBe(0); }); test('change table schema #1', async () => { const schema = mysqlSchema('folder'); const from = { schema, users: mysqlTable('users', {}), }; const to = { schema, users: schema.table('users', {}), }; const { statements } = await diffTestSchemasMysql(from, to, [ 'public.users->folder.users', ]); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'drop_table', policies: [], tableName: 'users', schema: undefined, }); }); test('change table schema #2', async () => { const schema = mysqlSchema('folder'); const from = { schema, users: schema.table('users', {}), }; const to = { schema, users: mysqlTable('users', {}), }; const { statements } = await diffTestSchemasMysql(from, to, [ 'folder.users->public.users', ]); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'create_table', tableName: 'users', schema: undefined, columns: [], uniqueConstraints: [], compositePkName: '', compositePKs: [], checkConstraints: [], internals: { tables: {}, indexes: {}, }, }); }); test('change table schema #3', async () => { const schema1 = mysqlSchema('folder1'); const schema2 = mysqlSchema('folder2'); const from = { schema1, schema2, users: schema1.table('users', {}), }; const to = { schema1, schema2, users: schema2.table('users', {}), }; const { statements } = await diffTestSchemasMysql(from, to, [ 'folder1.users->folder2.users', ]); expect(statements.length).toBe(0); }); test('change table schema #4', async () => { const schema1 = mysqlSchema('folder1'); const schema2 = mysqlSchema('folder2'); const from = { schema1, users: schema1.table('users', {}), }; const to = { schema1, schema2, // add schema users: schema2.table('users', {}), // move table }; const { statements } = await diffTestSchemasMysql(from, to, [ 'folder1.users->folder2.users', ]); expect(statements.length).toBe(0); }); test('change table schema #5', async () => { const schema1 = mysqlSchema('folder1'); const schema2 = mysqlSchema('folder2'); const from = { schema1, // remove schema users: schema1.table('users', {}), }; const to = { schema2, // add schema users: schema2.table('users', {}), // move table }; const { statements } = await diffTestSchemasMysql(from, to, [ 'folder1.users->folder2.users', ]); expect(statements.length).toBe(0); }); test('change table schema #5', async () => { const schema1 = mysqlSchema('folder1'); const schema2 = mysqlSchema('folder2'); const from = { schema1, schema2, users: schema1.table('users', {}), }; const to = { schema1, schema2, users: schema2.table('users2', {}), // rename and move table }; const { statements } = await diffTestSchemasMysql(from, to, [ 'folder1.users->folder2.users2', ]); expect(statements.length).toBe(0); }); test('change table schema #6', async () => { const schema1 = mysqlSchema('folder1'); const schema2 = mysqlSchema('folder2'); const from = { schema1, users: schema1.table('users', {}), }; const to = { schema2, // rename schema users: schema2.table('users2', {}), // rename table }; const { statements } = await diffTestSchemasMysql(from, to, [ 'folder1->folder2', 'folder2.users->folder2.users2', ]); expect(statements.length).toBe(0); }); test('add table #10', async () => { const to = { users: mysqlTable('table', { json: json('json').default({}), }), }; const { sqlStatements } = await diffTestSchemasMysql({}, to, []); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe( "CREATE TABLE `table` (\n\t`json` json DEFAULT ('{}')\n);\n", ); }); test('add table #11', async () => { const to = { users: mysqlTable('table', { json: json('json').default([]), }), }; const { sqlStatements } = await diffTestSchemasMysql({}, to, []); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe( "CREATE TABLE `table` (\n\t`json` json DEFAULT ('[]')\n);\n", ); }); test('add table #12', async () => { const to = { users: mysqlTable('table', { json: json('json').default([1, 2, 3]), }), }; const { sqlStatements } = await diffTestSchemasMysql({}, to, []); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe( "CREATE TABLE `table` (\n\t`json` json DEFAULT ('[1,2,3]')\n);\n", ); }); test('add table #13', async () => { const to = { users: mysqlTable('table', { json: json('json').default({ key: 'value' }), }), }; const { sqlStatements } = await diffTestSchemasMysql({}, to, []); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe( 'CREATE TABLE `table` (\n\t`json` json DEFAULT (\'{"key":"value"}\')\n);\n', ); }); test('add table #14', async () => { const to = { users: mysqlTable('table', { json: json('json').default({ key: 'value', arr: [1, 2, 3], }), }), }; const { sqlStatements } = await diffTestSchemasMysql({}, to, []); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe( 'CREATE TABLE `table` (\n\t`json` json DEFAULT (\'{"key":"value","arr":[1,2,3]}\')\n);\n', ); }); test('drop index', async () => { const from = { users: mysqlTable( 'table', { name: text('name'), }, (t) => { return { idx: index('name_idx').on(t.name), }; }, ), }; const to = { users: mysqlTable('table', { name: text('name'), }), }; const { sqlStatements } = await diffTestSchemasMysql(from, to, []); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe('DROP INDEX `name_idx` ON `table`;'); }); test('drop unique constraint', async () => { const from = { users: mysqlTable( 'table', { name: text('name'), }, (t) => { return { uq: unique('name_uq').on(t.name), }; }, ), }; const to = { users: mysqlTable('table', { name: text('name'), }), }; const { sqlStatements } = await diffTestSchemasMysql(from, to, []); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe('ALTER TABLE `table` DROP INDEX `name_uq`;'); }); test('add table with indexes', async () => { const from = {}; const to = { users: mysqlTable( 'users', { id: serial('id').primaryKey(), name: text('name'), email: text('email'), }, (t) => ({ uniqueExpr: uniqueIndex('uniqueExpr').on(sql`(lower(${t.email}))`), indexExpr: index('indexExpr').on(sql`(lower(${t.email}))`), indexExprMultiple: index('indexExprMultiple').on( sql`(lower(${t.email}))`, sql`(lower(${t.email}))`, ), uniqueCol: uniqueIndex('uniqueCol').on(t.email), indexCol: index('indexCol').on(t.email), indexColMultiple: index('indexColMultiple').on(t.email, t.email), indexColExpr: index('indexColExpr').on( sql`(lower(${t.email}))`, t.email, ), }), ), }; const { sqlStatements } = await diffTestSchemasMysql(from, to, []); expect(sqlStatements.length).toBe(6); expect(sqlStatements).toStrictEqual([ `CREATE TABLE \`users\` (\n\t\`id\` serial AUTO_INCREMENT NOT NULL,\n\t\`name\` text,\n\t\`email\` text,\n\tCONSTRAINT \`users_id\` PRIMARY KEY(\`id\`),\n\tCONSTRAINT \`uniqueExpr\` UNIQUE((lower(\`email\`))),\n\tCONSTRAINT \`uniqueCol\` UNIQUE(\`email\`) ); `, 'CREATE INDEX `indexExpr` ON `users` ((lower(`email`)));', 'CREATE INDEX `indexExprMultiple` ON `users` ((lower(`email`)),(lower(`email`)));', 'CREATE INDEX `indexCol` ON `users` (`email`);', 'CREATE INDEX `indexColMultiple` ON `users` (`email`,`email`);', 'CREATE INDEX `indexColExpr` ON `users` ((lower(`email`)),`email`);', ]); }); test('varchar and text default values escape single quotes', async (t) => { const schema1 = { table: mysqlTable('table', { id: serial('id').primaryKey(), }), }; const schem2 = { table: mysqlTable('table', { id: serial('id').primaryKey(), enum: mysqlEnum('enum', ["escape's quotes", "escape's quotes 2"]).default("escape's quotes"), text: text('text').default("escape's quotes"), varchar: varchar('varchar', { length: 255 }).default("escape's quotes"), }), }; const { sqlStatements } = await diffTestSchemasMysql(schema1, schem2, []); expect(sqlStatements.length).toBe(3); expect(sqlStatements[0]).toStrictEqual( "ALTER TABLE `table` ADD `enum` enum('escape''s quotes','escape''s quotes 2') DEFAULT 'escape''s quotes';", ); expect(sqlStatements[1]).toStrictEqual( "ALTER TABLE `table` ADD `text` text DEFAULT ('escape''s quotes');", ); expect(sqlStatements[2]).toStrictEqual( "ALTER TABLE `table` ADD `varchar` varchar(255) DEFAULT 'escape''s quotes';", ); }); test('composite primary key', async () => { const from = {}; const to = { table: mysqlTable('works_to_creators', { workId: int('work_id').notNull(), creatorId: int('creator_id').notNull(), classification: text('classification').notNull(), }, (t) => ({ pk: primaryKey({ columns: [t.workId, t.creatorId, t.classification], }), })), }; const { sqlStatements } = await diffTestSchemasMysql(from, to, []); expect(sqlStatements).toStrictEqual([ 'CREATE TABLE `works_to_creators` (\n\t`work_id` int NOT NULL,\n\t`creator_id` int NOT NULL,\n\t`classification` text NOT NULL,\n\tCONSTRAINT `works_to_creators_work_id_creator_id_classification_pk` PRIMARY KEY(`work_id`,`creator_id`,`classification`)\n);\n', ]); }); test('add column before creating unique constraint', async () => { const from = { table: mysqlTable('table', { id: serial('id').primaryKey(), }), }; const to = { table: mysqlTable('table', { id: serial('id').primaryKey(), name: text('name').notNull(), }, (t) => ({ uq: unique('uq').on(t.name), })), }; const { sqlStatements } = await diffTestSchemasMysql(from, to, []); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE `table` ADD `name` text NOT NULL;', 'ALTER TABLE `table` ADD CONSTRAINT `uq` UNIQUE(`name`);', ]); }); test('optional db aliases (snake case)', async () => { const from = {}; const t1 = mysqlTable( 't1', { t1Id1: int().notNull().primaryKey(), t1Col2: int().notNull(), t1Col3: int().notNull(), t2Ref: int().notNull().references(() => t2.t2Id), t1Uni: int().notNull(), t1UniIdx: int().notNull(), t1Idx: int().notNull(), }, (table) => ({ uni: unique('t1_uni').on(table.t1Uni), uniIdx: uniqueIndex('t1_uni_idx').on(table.t1UniIdx), idx: index('t1_idx').on(table.t1Idx), fk: foreignKey({ columns: [table.t1Col2, table.t1Col3], foreignColumns: [t3.t3Id1, t3.t3Id2], }), }), ); const t2 = mysqlTable( 't2', { t2Id: serial().primaryKey(), }, ); const t3 = mysqlTable( 't3', { t3Id1: int(), t3Id2: int(), }, (table) => ({ pk: primaryKey({ columns: [table.t3Id1, table.t3Id2], }), }), ); const to = { t1, t2, t3, }; const { sqlStatements } = await diffTestSchemasMysql(from, to, [], false, 'snake_case'); const st1 = `CREATE TABLE \`t1\` ( \`t1_id1\` int NOT NULL, \`t1_col2\` int NOT NULL, \`t1_col3\` int NOT NULL, \`t2_ref\` int NOT NULL, \`t1_uni\` int NOT NULL, \`t1_uni_idx\` int NOT NULL, \`t1_idx\` int NOT NULL, CONSTRAINT \`t1_t1_id1\` PRIMARY KEY(\`t1_id1\`), CONSTRAINT \`t1_uni\` UNIQUE(\`t1_uni\`), CONSTRAINT \`t1_uni_idx\` UNIQUE(\`t1_uni_idx\`) ); `; const st2 = `CREATE TABLE \`t2\` ( \`t2_id\` serial AUTO_INCREMENT NOT NULL, CONSTRAINT \`t2_t2_id\` PRIMARY KEY(\`t2_id\`) ); `; const st3 = `CREATE TABLE \`t3\` ( \`t3_id1\` int NOT NULL, \`t3_id2\` int NOT NULL, CONSTRAINT \`t3_t3_id1_t3_id2_pk\` PRIMARY KEY(\`t3_id1\`,\`t3_id2\`) ); `; const st4 = `ALTER TABLE \`t1\` ADD CONSTRAINT \`t1_t2_ref_t2_t2_id_fk\` FOREIGN KEY (\`t2_ref\`) REFERENCES \`t2\`(\`t2_id\`) ON DELETE no action ON UPDATE no action;`; const st5 = `ALTER TABLE \`t1\` ADD CONSTRAINT \`t1_t1_col2_t1_col3_t3_t3_id1_t3_id2_fk\` FOREIGN KEY (\`t1_col2\`,\`t1_col3\`) REFERENCES \`t3\`(\`t3_id1\`,\`t3_id2\`) ON DELETE no action ON UPDATE no action;`; const st6 = `CREATE INDEX \`t1_idx\` ON \`t1\` (\`t1_idx\`);`; expect(sqlStatements).toStrictEqual([st1, st2, st3, st4, st5, st6]); }); test('optional db aliases (camel case)', async () => { const from = {}; const t1 = mysqlTable( 't1', { t1_id1: int().notNull().primaryKey(), t1_col2: int().notNull(), t1_col3: int().notNull(), t2_ref: int().notNull().references(() => t2.t2_id), t1_uni: int().notNull(), t1_uni_idx: int().notNull(), t1_idx: int().notNull(), }, (table) => ({ uni: unique('t1Uni').on(table.t1_uni), uni_idx: uniqueIndex('t1UniIdx').on(table.t1_uni_idx), idx: index('t1Idx').on(table.t1_idx), fk: foreignKey({ columns: [table.t1_col2, table.t1_col3], foreignColumns: [t3.t3_id1, t3.t3_id2], }), }), ); const t2 = mysqlTable( 't2', { t2_id: serial().primaryKey(), }, ); const t3 = mysqlTable( 't3', { t3_id1: int(), t3_id2: int(), }, (table) => ({ pk: primaryKey({ columns: [table.t3_id1, table.t3_id2], }), }), ); const to = { t1, t2, t3, }; const { sqlStatements } = await diffTestSchemasMysql(from, to, [], false, 'camelCase'); const st1 = `CREATE TABLE \`t1\` ( \`t1Id1\` int NOT NULL, \`t1Col2\` int NOT NULL, \`t1Col3\` int NOT NULL, \`t2Ref\` int NOT NULL, \`t1Uni\` int NOT NULL, \`t1UniIdx\` int NOT NULL, \`t1Idx\` int NOT NULL, CONSTRAINT \`t1_t1Id1\` PRIMARY KEY(\`t1Id1\`), CONSTRAINT \`t1Uni\` UNIQUE(\`t1Uni\`), CONSTRAINT \`t1UniIdx\` UNIQUE(\`t1UniIdx\`) ); `; const st2 = `CREATE TABLE \`t2\` ( \`t2Id\` serial AUTO_INCREMENT NOT NULL, CONSTRAINT \`t2_t2Id\` PRIMARY KEY(\`t2Id\`) ); `; const st3 = `CREATE TABLE \`t3\` ( \`t3Id1\` int NOT NULL, \`t3Id2\` int NOT NULL, CONSTRAINT \`t3_t3Id1_t3Id2_pk\` PRIMARY KEY(\`t3Id1\`,\`t3Id2\`) ); `; const st4 = `ALTER TABLE \`t1\` ADD CONSTRAINT \`t1_t2Ref_t2_t2Id_fk\` FOREIGN KEY (\`t2Ref\`) REFERENCES \`t2\`(\`t2Id\`) ON DELETE no action ON UPDATE no action;`; const st5 = `ALTER TABLE \`t1\` ADD CONSTRAINT \`t1_t1Col2_t1Col3_t3_t3Id1_t3Id2_fk\` FOREIGN KEY (\`t1Col2\`,\`t1Col3\`) REFERENCES \`t3\`(\`t3Id1\`,\`t3Id2\`) ON DELETE no action ON UPDATE no action;`; const st6 = `CREATE INDEX \`t1Idx\` ON \`t1\` (\`t1Idx\`);`; expect(sqlStatements).toStrictEqual([st1, st2, st3, st4, st5, st6]); }); test('add table with ts enum', async () => { enum Test { value = 'value', } const to = { users: mysqlTable('users', { enum: mysqlEnum(Test), }), }; const { statements } = await diffTestSchemasMysql({}, to, []); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'create_table', tableName: 'users', schema: undefined, columns: [{ autoincrement: false, name: 'enum', notNull: false, primaryKey: false, type: "enum('value')", }], compositePKs: [], internals: { tables: {}, indexes: {}, }, uniqueConstraints: [], compositePkName: '', checkConstraints: [], }); }); ================================================ FILE: drizzle-kit/tests/pg-array.test.ts ================================================ import { bigint, boolean, date, integer, json, pgEnum, pgTable, serial, text, timestamp, uuid, } from 'drizzle-orm/pg-core'; import { expect, test } from 'vitest'; import { diffTestSchemas } from './schemaDiffer'; test('array #1: empty array default', async (t) => { const from = { test: pgTable('test', { id: serial('id').primaryKey(), }), }; const to = { test: pgTable('test', { id: serial('id').primaryKey(), values: integer('values').array().default([]), }), }; const { statements } = await diffTestSchemas(from, to, []); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'alter_table_add_column', tableName: 'test', schema: '', column: { name: 'values', type: 'integer[]', primaryKey: false, notNull: false, default: "'{}'" }, }); }); test('array #2: integer array default', async (t) => { const from = { test: pgTable('test', { id: serial('id').primaryKey(), }), }; const to = { test: pgTable('test', { id: serial('id').primaryKey(), values: integer('values').array().default([1, 2, 3]), }), }; const { statements } = await diffTestSchemas(from, to, []); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'alter_table_add_column', tableName: 'test', schema: '', column: { name: 'values', type: 'integer[]', primaryKey: false, notNull: false, default: "'{1,2,3}'" }, }); }); test('array #3: bigint array default', async (t) => { const from = { test: pgTable('test', { id: serial('id').primaryKey(), }), }; const to = { test: pgTable('test', { id: serial('id').primaryKey(), values: bigint('values', { mode: 'bigint' }).array().default([BigInt(1), BigInt(2), BigInt(3)]), }), }; const { statements } = await diffTestSchemas(from, to, []); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'alter_table_add_column', tableName: 'test', schema: '', column: { name: 'values', type: 'bigint[]', primaryKey: false, notNull: false, default: "'{1,2,3}'" }, }); }); test('array #4: boolean array default', async (t) => { const from = { test: pgTable('test', { id: serial('id').primaryKey(), }), }; const to = { test: pgTable('test', { id: serial('id').primaryKey(), values: boolean('values').array().default([true, false, true]), }), }; const { statements } = await diffTestSchemas(from, to, []); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'alter_table_add_column', tableName: 'test', schema: '', column: { name: 'values', type: 'boolean[]', primaryKey: false, notNull: false, default: "'{true,false,true}'", }, }); }); test('array #5: multi-dimensional array default', async (t) => { const from = { test: pgTable('test', { id: serial('id').primaryKey(), }), }; const to = { test: pgTable('test', { id: serial('id').primaryKey(), values: integer('values').array().array().default([[1, 2], [3, 4]]), }), }; const { statements } = await diffTestSchemas(from, to, []); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'alter_table_add_column', tableName: 'test', schema: '', column: { name: 'values', type: 'integer[][]', primaryKey: false, notNull: false, default: "'{{1,2},{3,4}}'", }, }); }); test('array #6: date array default', async (t) => { const from = { test: pgTable('test', { id: serial('id').primaryKey(), }), }; const to = { test: pgTable('test', { id: serial('id').primaryKey(), values: date('values').array().default(['2024-08-06', '2024-08-07']), }), }; const { statements } = await diffTestSchemas(from, to, []); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'alter_table_add_column', tableName: 'test', schema: '', column: { name: 'values', type: 'date[]', primaryKey: false, notNull: false, default: '\'{"2024-08-06","2024-08-07"}\'', }, }); }); test('array #7: timestamp array default', async (t) => { const from = { test: pgTable('test', { id: serial('id').primaryKey(), }), }; const to = { test: pgTable('test', { id: serial('id').primaryKey(), values: timestamp('values').array().default([new Date('2024-08-06'), new Date('2024-08-07')]), }), }; const { statements } = await diffTestSchemas(from, to, []); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'alter_table_add_column', tableName: 'test', schema: '', column: { name: 'values', type: 'timestamp[]', primaryKey: false, notNull: false, default: '\'{"2024-08-06 00:00:00.000","2024-08-07 00:00:00.000"}\'', }, }); }); test('array #8: json array default', async (t) => { const from = { test: pgTable('test', { id: serial('id').primaryKey(), }), }; const to = { test: pgTable('test', { id: serial('id').primaryKey(), values: json('values').array().default([{ a: 1 }, { b: 2 }]), }), }; const { statements } = await diffTestSchemas(from, to, []); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'alter_table_add_column', tableName: 'test', schema: '', column: { name: 'values', type: 'json[]', primaryKey: false, notNull: false, default: '\'{"{\\"a\\":1}","{\\"b\\":2}"}\'', }, }); }); test('array #9: text array default', async (t) => { const from = { test: pgTable('test', { id: serial('id').primaryKey(), }), }; const to = { test: pgTable('test', { id: serial('id').primaryKey(), values: text('values').array().default(['abc', 'def']), }), }; const { statements } = await diffTestSchemas(from, to, []); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'alter_table_add_column', tableName: 'test', schema: '', column: { name: 'values', type: 'text[]', primaryKey: false, notNull: false, default: '\'{"abc","def"}\'', }, }); }); test('array #10: uuid array default', async (t) => { const from = { test: pgTable('test', { id: serial('id').primaryKey(), }), }; const to = { test: pgTable('test', { id: serial('id').primaryKey(), values: uuid('values').array().default([ 'a0eebc99-9c0b-4ef8-bb6d-6bb9bd380a11', 'b0eebc99-9c0b-4ef8-bb6d-cbb9bd380a11', ]), }), }; const { statements } = await diffTestSchemas(from, to, []); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'alter_table_add_column', tableName: 'test', schema: '', column: { name: 'values', type: 'uuid[]', primaryKey: false, notNull: false, default: '\'{"a0eebc99-9c0b-4ef8-bb6d-6bb9bd380a11","b0eebc99-9c0b-4ef8-bb6d-cbb9bd380a11"}\'', }, }); }); test('array #11: enum array default', async (t) => { const testEnum = pgEnum('test_enum', ['a', 'b', 'c']); const from = { enum: testEnum, test: pgTable('test', { id: serial('id').primaryKey(), }), }; const to = { enum: testEnum, test: pgTable('test', { id: serial('id').primaryKey(), values: testEnum('values').array().default(['a', 'b', 'c']), }), }; const { statements } = await diffTestSchemas(from, to, []); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'alter_table_add_column', tableName: 'test', schema: '', column: { name: 'values', type: 'test_enum[]', primaryKey: false, notNull: false, default: '\'{"a","b","c"}\'', typeSchema: 'public', }, }); }); test('array #12: enum empty array default', async (t) => { const testEnum = pgEnum('test_enum', ['a', 'b', 'c']); const from = { enum: testEnum, test: pgTable('test', { id: serial('id').primaryKey(), }), }; const to = { enum: testEnum, test: pgTable('test', { id: serial('id').primaryKey(), values: testEnum('values').array().default([]), }), }; const { statements } = await diffTestSchemas(from, to, []); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'alter_table_add_column', tableName: 'test', schema: '', column: { name: 'values', type: 'test_enum[]', primaryKey: false, notNull: false, default: "'{}'", typeSchema: 'public', }, }); }); ================================================ FILE: drizzle-kit/tests/pg-checks.test.ts ================================================ import { sql } from 'drizzle-orm'; import { check, integer, pgTable, serial, varchar } from 'drizzle-orm/pg-core'; import { JsonCreateTableStatement } from 'src/jsonStatements'; import { expect, test } from 'vitest'; import { diffTestSchemas } from './schemaDiffer'; test('create table with check', async (t) => { const to = { users: pgTable('users', { id: serial('id').primaryKey(), age: integer('age'), }, (table) => ({ checkConstraint: check('some_check_name', sql`${table.age} > 21`), })), }; const { sqlStatements, statements } = await diffTestSchemas({}, to, []); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'create_table', tableName: 'users', schema: '', columns: [ { name: 'id', type: 'serial', notNull: true, primaryKey: true, }, { name: 'age', type: 'integer', notNull: false, primaryKey: false, }, ], compositePKs: [], checkConstraints: ['some_check_name;"users"."age" > 21'], compositePkName: '', uniqueConstraints: [], isRLSEnabled: false, policies: [], } as JsonCreateTableStatement); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe(`CREATE TABLE "users" ( \t"id" serial PRIMARY KEY NOT NULL, \t"age" integer, \tCONSTRAINT "some_check_name" CHECK ("users"."age" > 21) );\n`); }); test('add check contraint to existing table', async (t) => { const from = { users: pgTable('users', { id: serial('id').primaryKey(), age: integer('age'), }), }; const to = { users: pgTable('users', { id: serial('id').primaryKey(), age: integer('age'), }, (table) => ({ checkConstraint: check('some_check_name', sql`${table.age} > 21`), })), }; const { sqlStatements, statements } = await diffTestSchemas(from, to, []); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'create_check_constraint', tableName: 'users', schema: '', data: 'some_check_name;"users"."age" > 21', }); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe( `ALTER TABLE "users" ADD CONSTRAINT "some_check_name" CHECK ("users"."age" > 21);`, ); }); test('drop check contraint in existing table', async (t) => { const from = { users: pgTable('users', { id: serial('id').primaryKey(), age: integer('age'), }, (table) => ({ checkConstraint: check('some_check_name', sql`${table.age} > 21`), })), }; const to = { users: pgTable('users', { id: serial('id').primaryKey(), age: integer('age'), }), }; const { sqlStatements, statements } = await diffTestSchemas(from, to, []); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'delete_check_constraint', tableName: 'users', schema: '', constraintName: 'some_check_name', }); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe( `ALTER TABLE "users" DROP CONSTRAINT "some_check_name";`, ); }); test('rename check constraint', async (t) => { const from = { users: pgTable('users', { id: serial('id').primaryKey(), age: integer('age'), }, (table) => ({ checkConstraint: check('some_check_name', sql`${table.age} > 21`), })), }; const to = { users: pgTable('users', { id: serial('id').primaryKey(), age: integer('age'), }, (table) => ({ checkConstraint: check('new_check_name', sql`${table.age} > 21`), })), }; const { sqlStatements, statements } = await diffTestSchemas(from, to, []); expect(statements.length).toBe(2); expect(statements[0]).toStrictEqual({ constraintName: 'some_check_name', schema: '', tableName: 'users', type: 'delete_check_constraint', }); expect(statements[1]).toStrictEqual({ data: 'new_check_name;"users"."age" > 21', schema: '', tableName: 'users', type: 'create_check_constraint', }); expect(sqlStatements.length).toBe(2); expect(sqlStatements[0]).toBe( `ALTER TABLE "users" DROP CONSTRAINT "some_check_name";`, ); expect(sqlStatements[1]).toBe( `ALTER TABLE "users" ADD CONSTRAINT "new_check_name" CHECK ("users"."age" > 21);`, ); }); test('alter check constraint', async (t) => { const from = { users: pgTable('users', { id: serial('id').primaryKey(), age: integer('age'), }, (table) => ({ checkConstraint: check('some_check_name', sql`${table.age} > 21`), })), }; const to = { users: pgTable('users', { id: serial('id').primaryKey(), age: integer('age'), }, (table) => ({ checkConstraint: check('new_check_name', sql`${table.age} > 10`), })), }; const { sqlStatements, statements } = await diffTestSchemas(from, to, []); expect(statements.length).toBe(2); expect(statements[0]).toStrictEqual({ constraintName: 'some_check_name', schema: '', tableName: 'users', type: 'delete_check_constraint', }); expect(statements[1]).toStrictEqual({ data: 'new_check_name;"users"."age" > 10', schema: '', tableName: 'users', type: 'create_check_constraint', }); expect(sqlStatements.length).toBe(2); expect(sqlStatements[0]).toBe( `ALTER TABLE "users" DROP CONSTRAINT "some_check_name";`, ); expect(sqlStatements[1]).toBe( `ALTER TABLE "users" ADD CONSTRAINT "new_check_name" CHECK ("users"."age" > 10);`, ); }); test('alter multiple check constraints', async (t) => { const from = { users: pgTable('users', { id: serial('id').primaryKey(), age: integer('age'), name: varchar('name'), }, (table) => ({ checkConstraint1: check('some_check_name_1', sql`${table.age} > 21`), checkConstraint2: check('some_check_name_2', sql`${table.name} != 'Alex'`), })), }; const to = { users: pgTable('users', { id: serial('id').primaryKey(), age: integer('age'), name: varchar('name'), }, (table) => ({ checkConstraint1: check('some_check_name_3', sql`${table.age} > 21`), checkConstraint2: check('some_check_name_4', sql`${table.name} != 'Alex'`), })), }; const { sqlStatements, statements } = await diffTestSchemas(from, to, []); expect(statements.length).toBe(4); expect(statements[0]).toStrictEqual({ constraintName: 'some_check_name_1', schema: '', tableName: 'users', type: 'delete_check_constraint', }); expect(statements[1]).toStrictEqual({ constraintName: 'some_check_name_2', schema: '', tableName: 'users', type: 'delete_check_constraint', }); expect(statements[2]).toStrictEqual({ data: 'some_check_name_3;"users"."age" > 21', schema: '', tableName: 'users', type: 'create_check_constraint', }); expect(statements[3]).toStrictEqual({ data: 'some_check_name_4;"users"."name" != \'Alex\'', schema: '', tableName: 'users', type: 'create_check_constraint', }); expect(sqlStatements.length).toBe(4); expect(sqlStatements[0]).toBe( `ALTER TABLE "users" DROP CONSTRAINT "some_check_name_1";`, ); expect(sqlStatements[1]).toBe( `ALTER TABLE "users" DROP CONSTRAINT "some_check_name_2";`, ); expect(sqlStatements[2]).toBe( `ALTER TABLE "users" ADD CONSTRAINT "some_check_name_3" CHECK ("users"."age" > 21);`, ); expect(sqlStatements[3]).toBe( `ALTER TABLE "users" ADD CONSTRAINT "some_check_name_4" CHECK ("users"."name" != \'Alex\');`, ); }); test('create checks with same names', async (t) => { const to = { users: pgTable('users', { id: serial('id').primaryKey(), age: integer('age'), name: varchar('name'), }, (table) => ({ checkConstraint1: check('some_check_name', sql`${table.age} > 21`), checkConstraint2: check('some_check_name', sql`${table.name} != 'Alex'`), })), }; await expect(diffTestSchemas({}, to, [])).rejects.toThrowError(); }); ================================================ FILE: drizzle-kit/tests/pg-columns.test.ts ================================================ import { integer, pgTable, primaryKey, serial, text, uuid, varchar } from 'drizzle-orm/pg-core'; import { expect, test } from 'vitest'; import { diffTestSchemas } from './schemaDiffer'; test('add columns #1', async (t) => { const schema1 = { users: pgTable('users', { id: serial('id').primaryKey(), }), }; const schema2 = { users: pgTable('users', { id: serial('id').primaryKey(), name: text('name'), }), }; const { statements } = await diffTestSchemas(schema1, schema2, []); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'alter_table_add_column', tableName: 'users', schema: '', column: { name: 'name', type: 'text', primaryKey: false, notNull: false }, }); }); test('add columns #2', async (t) => { const schema1 = { users: pgTable('users', { id: serial('id').primaryKey(), }), }; const schema2 = { users: pgTable('users', { id: serial('id').primaryKey(), name: text('name'), email: text('email'), }), }; const { statements } = await diffTestSchemas(schema1, schema2, []); expect(statements.length).toBe(2); expect(statements[0]).toStrictEqual({ type: 'alter_table_add_column', tableName: 'users', schema: '', column: { name: 'name', type: 'text', primaryKey: false, notNull: false }, }); expect(statements[1]).toStrictEqual({ type: 'alter_table_add_column', tableName: 'users', schema: '', column: { name: 'email', type: 'text', primaryKey: false, notNull: false }, }); }); test('alter column change name #1', async (t) => { const schema1 = { users: pgTable('users', { id: serial('id').primaryKey(), name: text('name'), }), }; const schema2 = { users: pgTable('users', { id: serial('id').primaryKey(), name: text('name1'), }), }; const { statements } = await diffTestSchemas(schema1, schema2, [ 'public.users.name->public.users.name1', ]); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'alter_table_rename_column', tableName: 'users', schema: '', oldColumnName: 'name', newColumnName: 'name1', }); }); test('alter column change name #2', async (t) => { const schema1 = { users: pgTable('users', { id: serial('id').primaryKey(), name: text('name'), }), }; const schema2 = { users: pgTable('users', { id: serial('id').primaryKey(), name: text('name1'), email: text('email'), }), }; const { statements } = await diffTestSchemas(schema1, schema2, [ 'public.users.name->public.users.name1', ]); expect(statements.length).toBe(2); expect(statements[0]).toStrictEqual({ type: 'alter_table_rename_column', tableName: 'users', schema: '', oldColumnName: 'name', newColumnName: 'name1', }); expect(statements[1]).toStrictEqual({ type: 'alter_table_add_column', tableName: 'users', schema: '', column: { name: 'email', notNull: false, primaryKey: false, type: 'text', }, }); }); test('alter table add composite pk', async (t) => { const schema1 = { table: pgTable('table', { id1: integer('id1'), id2: integer('id2'), }), }; const schema2 = { table: pgTable( 'table', { id1: integer('id1'), id2: integer('id2'), }, (t) => { return { pk: primaryKey({ columns: [t.id1, t.id2] }), }; }, ), }; const { statements, sqlStatements } = await diffTestSchemas( schema1, schema2, [], ); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'create_composite_pk', tableName: 'table', data: 'id1,id2;table_id1_id2_pk', schema: '', constraintName: 'table_id1_id2_pk', }); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe( 'ALTER TABLE "table" ADD CONSTRAINT "table_id1_id2_pk" PRIMARY KEY("id1","id2");', ); }); test('rename table rename column #1', async (t) => { const schema1 = { users: pgTable('users', { id: integer('id'), }), }; const schema2 = { users: pgTable('users1', { id: integer('id1'), }), }; const { statements } = await diffTestSchemas(schema1, schema2, [ 'public.users->public.users1', 'public.users1.id->public.users1.id1', ]); expect(statements.length).toBe(2); expect(statements[0]).toStrictEqual({ type: 'rename_table', tableNameFrom: 'users', tableNameTo: 'users1', fromSchema: '', toSchema: '', }); expect(statements[1]).toStrictEqual({ type: 'alter_table_rename_column', oldColumnName: 'id', newColumnName: 'id1', schema: '', tableName: 'users1', }); }); test('with composite pks #1', async (t) => { const schema1 = { users: pgTable( 'users', { id1: integer('id1'), id2: integer('id2'), }, (t) => { return { pk: primaryKey({ columns: [t.id1, t.id2], name: 'compositePK' }), }; }, ), }; const schema2 = { users: pgTable( 'users', { id1: integer('id1'), id2: integer('id2'), text: text('text'), }, (t) => { return { pk: primaryKey({ columns: [t.id1, t.id2], name: 'compositePK' }), }; }, ), }; const { statements } = await diffTestSchemas(schema1, schema2, []); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'alter_table_add_column', tableName: 'users', schema: '', column: { name: 'text', notNull: false, primaryKey: false, type: 'text', }, }); }); test('with composite pks #2', async (t) => { const schema1 = { users: pgTable('users', { id1: integer('id1'), id2: integer('id2'), }), }; const schema2 = { users: pgTable( 'users', { id1: integer('id1'), id2: integer('id2'), }, (t) => { return { pk: primaryKey({ columns: [t.id1, t.id2], name: 'compositePK' }), }; }, ), }; const { statements } = await diffTestSchemas(schema1, schema2, []); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'create_composite_pk', tableName: 'users', schema: '', constraintName: 'compositePK', data: 'id1,id2;compositePK', }); }); test('with composite pks #3', async (t) => { const schema1 = { users: pgTable( 'users', { id1: integer('id1'), id2: integer('id2'), }, (t) => { return { pk: primaryKey({ columns: [t.id1, t.id2], name: 'compositePK' }), }; }, ), }; const schema2 = { users: pgTable( 'users', { id1: integer('id1'), id3: integer('id3'), }, (t) => { return { pk: primaryKey({ columns: [t.id1, t.id3], name: 'compositePK' }), }; }, ), }; // TODO: remove redundand drop/create create constraint const { statements } = await diffTestSchemas(schema1, schema2, [ 'public.users.id2->public.users.id3', ]); expect(statements.length).toBe(2); expect(statements[0]).toStrictEqual({ type: 'alter_table_rename_column', tableName: 'users', schema: '', newColumnName: 'id3', oldColumnName: 'id2', }); expect(statements[1]).toStrictEqual({ type: 'alter_composite_pk', tableName: 'users', schema: '', new: 'id1,id3;compositePK', old: 'id1,id2;compositePK', newConstraintName: 'compositePK', oldConstraintName: 'compositePK', }); }); test('add multiple constraints #1', async (t) => { const t1 = pgTable('t1', { id: uuid('id').primaryKey().defaultRandom(), }); const t2 = pgTable('t2', { id: uuid('id').primaryKey().defaultRandom(), }); const t3 = pgTable('t3', { id: uuid('id').primaryKey().defaultRandom(), }); const schema1 = { t1, t2, t3, ref1: pgTable('ref1', { id1: uuid('id1').references(() => t1.id), id2: uuid('id2').references(() => t2.id), id3: uuid('id3').references(() => t3.id), }), }; const schema2 = { t1, t2, t3, ref1: pgTable('ref1', { id1: uuid('id1').references(() => t1.id, { onDelete: 'cascade' }), id2: uuid('id2').references(() => t2.id, { onDelete: 'set null' }), id3: uuid('id3').references(() => t3.id, { onDelete: 'cascade' }), }), }; // TODO: remove redundand drop/create create constraint const { statements } = await diffTestSchemas(schema1, schema2, []); expect(statements.length).toBe(6); }); test('add multiple constraints #2', async (t) => { const t1 = pgTable('t1', { id1: uuid('id1').primaryKey().defaultRandom(), id2: uuid('id2').primaryKey().defaultRandom(), id3: uuid('id3').primaryKey().defaultRandom(), }); const schema1 = { t1, ref1: pgTable('ref1', { id1: uuid('id1').references(() => t1.id1), id2: uuid('id2').references(() => t1.id2), id3: uuid('id3').references(() => t1.id3), }), }; const schema2 = { t1, ref1: pgTable('ref1', { id1: uuid('id1').references(() => t1.id1, { onDelete: 'cascade' }), id2: uuid('id2').references(() => t1.id2, { onDelete: 'set null' }), id3: uuid('id3').references(() => t1.id3, { onDelete: 'cascade' }), }), }; // TODO: remove redundand drop/create create constraint const { statements } = await diffTestSchemas(schema1, schema2, []); expect(statements.length).toBe(6); }); test('add multiple constraints #3', async (t) => { const t1 = pgTable('t1', { id1: uuid('id1').primaryKey().defaultRandom(), id2: uuid('id2').primaryKey().defaultRandom(), id3: uuid('id3').primaryKey().defaultRandom(), }); const schema1 = { t1, ref1: pgTable('ref1', { id: uuid('id').references(() => t1.id1), }), ref2: pgTable('ref2', { id: uuid('id').references(() => t1.id2), }), ref3: pgTable('ref3', { id: uuid('id').references(() => t1.id3), }), }; const schema2 = { t1, ref1: pgTable('ref1', { id: uuid('id').references(() => t1.id1, { onDelete: 'cascade' }), }), ref2: pgTable('ref2', { id: uuid('id').references(() => t1.id2, { onDelete: 'set null' }), }), ref3: pgTable('ref3', { id: uuid('id').references(() => t1.id3, { onDelete: 'cascade' }), }), }; // TODO: remove redundand drop/create create constraint const { statements } = await diffTestSchemas(schema1, schema2, []); expect(statements.length).toBe(6); }); test('varchar and text default values escape single quotes', async (t) => { const schema1 = { table: pgTable('table', { id: serial('id').primaryKey(), }), }; const schem2 = { table: pgTable('table', { id: serial('id').primaryKey(), text: text('text').default("escape's quotes"), varchar: varchar('varchar').default("escape's quotes"), }), }; const { sqlStatements } = await diffTestSchemas(schema1, schem2, []); expect(sqlStatements.length).toBe(2); expect(sqlStatements[0]).toStrictEqual( 'ALTER TABLE "table" ADD COLUMN "text" text DEFAULT \'escape\'\'s quotes\';', ); expect(sqlStatements[1]).toStrictEqual( 'ALTER TABLE "table" ADD COLUMN "varchar" varchar DEFAULT \'escape\'\'s quotes\';', ); }); ================================================ FILE: drizzle-kit/tests/pg-enums.test.ts ================================================ import { integer, pgEnum, pgSchema, pgTable, serial, text, varchar } from 'drizzle-orm/pg-core'; import { expect, test } from 'vitest'; import { diffTestSchemas } from './schemaDiffer'; test('enums #1', async () => { const to = { enum: pgEnum('enum', ['value']), }; const { statements, sqlStatements } = await diffTestSchemas({}, to, []); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe(`CREATE TYPE "public"."enum" AS ENUM('value');`); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ name: 'enum', schema: 'public', type: 'create_type_enum', values: ['value'], }); }); test('enums #2', async () => { const folder = pgSchema('folder'); const to = { enum: folder.enum('enum', ['value']), }; const { statements, sqlStatements } = await diffTestSchemas({}, to, []); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe(`CREATE TYPE "folder"."enum" AS ENUM('value');`); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ name: 'enum', schema: 'folder', type: 'create_type_enum', values: ['value'], }); }); test('enums #3', async () => { const from = { enum: pgEnum('enum', ['value']), }; const { statements, sqlStatements } = await diffTestSchemas(from, {}, []); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe(`DROP TYPE "public"."enum";`); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'drop_type_enum', name: 'enum', schema: 'public', }); }); test('enums #4', async () => { const folder = pgSchema('folder'); const from = { enum: folder.enum('enum', ['value']), }; const { statements, sqlStatements } = await diffTestSchemas(from, {}, []); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe(`DROP TYPE "folder"."enum";`); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'drop_type_enum', name: 'enum', schema: 'folder', }); }); test('enums #5', async () => { const folder1 = pgSchema('folder1'); const folder2 = pgSchema('folder2'); const from = { folder1, enum: folder1.enum('enum', ['value']), }; const to = { folder2, enum: folder2.enum('enum', ['value']), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, ['folder1->folder2']); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe(`ALTER SCHEMA "folder1" RENAME TO "folder2";\n`); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'rename_schema', from: 'folder1', to: 'folder2', }); }); test('enums #6', async () => { const folder1 = pgSchema('folder1'); const folder2 = pgSchema('folder2'); const from = { folder1, folder2, enum: folder1.enum('enum', ['value']), }; const to = { folder1, folder2, enum: folder2.enum('enum', ['value']), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, [ 'folder1.enum->folder2.enum', ]); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe(`ALTER TYPE "folder1"."enum" SET SCHEMA "folder2";`); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'move_type_enum', name: 'enum', schemaFrom: 'folder1', schemaTo: 'folder2', }); }); test('enums #7', async () => { const from = { enum: pgEnum('enum', ['value1']), }; const to = { enum: pgEnum('enum', ['value1', 'value2']), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe(`ALTER TYPE "public"."enum" ADD VALUE 'value2';`); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'alter_type_add_value', name: 'enum', schema: 'public', value: 'value2', before: '', }); }); test('enums #8', async () => { const from = { enum: pgEnum('enum', ['value1']), }; const to = { enum: pgEnum('enum', ['value1', 'value2', 'value3']), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(sqlStatements.length).toBe(2); expect(sqlStatements[0]).toBe(`ALTER TYPE "public"."enum" ADD VALUE 'value2';`); expect(sqlStatements[1]).toBe(`ALTER TYPE "public"."enum" ADD VALUE 'value3';`); expect(statements.length).toBe(2); expect(statements[0]).toStrictEqual({ type: 'alter_type_add_value', name: 'enum', schema: 'public', value: 'value2', before: '', }); expect(statements[1]).toStrictEqual({ type: 'alter_type_add_value', name: 'enum', schema: 'public', value: 'value3', before: '', }); }); test('enums #9', async () => { const from = { enum: pgEnum('enum', ['value1', 'value3']), }; const to = { enum: pgEnum('enum', ['value1', 'value2', 'value3']), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe(`ALTER TYPE "public"."enum" ADD VALUE 'value2' BEFORE 'value3';`); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'alter_type_add_value', name: 'enum', schema: 'public', value: 'value2', before: 'value3', }); }); test('enums #10', async () => { const schema = pgSchema('folder'); const from = { enum: schema.enum('enum', ['value1']), }; const to = { enum: schema.enum('enum', ['value1', 'value2']), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe(`ALTER TYPE "folder"."enum" ADD VALUE 'value2';`); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'alter_type_add_value', name: 'enum', schema: 'folder', value: 'value2', before: '', }); }); test('enums #11', async () => { const schema1 = pgSchema('folder1'); const from = { enum: schema1.enum('enum', ['value1']), }; const to = { enum: pgEnum('enum', ['value1']), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, [ 'folder1.enum->public.enum', ]); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe(`ALTER TYPE "folder1"."enum" SET SCHEMA "public";`); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'move_type_enum', name: 'enum', schemaFrom: 'folder1', schemaTo: 'public', }); }); test('enums #12', async () => { const schema1 = pgSchema('folder1'); const from = { enum: pgEnum('enum', ['value1']), }; const to = { enum: schema1.enum('enum', ['value1']), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, [ 'public.enum->folder1.enum', ]); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe(`ALTER TYPE "public"."enum" SET SCHEMA "folder1";`); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'move_type_enum', name: 'enum', schemaFrom: 'public', schemaTo: 'folder1', }); }); test('enums #13', async () => { const from = { enum: pgEnum('enum1', ['value1']), }; const to = { enum: pgEnum('enum2', ['value1']), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, [ 'public.enum1->public.enum2', ]); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe(`ALTER TYPE "public"."enum1" RENAME TO "enum2";`); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'rename_type_enum', nameFrom: 'enum1', nameTo: 'enum2', schema: 'public', }); }); test('enums #14', async () => { const folder1 = pgSchema('folder1'); const folder2 = pgSchema('folder2'); const from = { enum: folder1.enum('enum1', ['value1']), }; const to = { enum: folder2.enum('enum2', ['value1']), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, [ 'folder1.enum1->folder2.enum2', ]); expect(sqlStatements.length).toBe(2); expect(sqlStatements[0]).toBe(`ALTER TYPE "folder1"."enum1" SET SCHEMA "folder2";`); expect(sqlStatements[1]).toBe(`ALTER TYPE "folder2"."enum1" RENAME TO "enum2";`); expect(statements.length).toBe(2); expect(statements[0]).toStrictEqual({ type: 'move_type_enum', name: 'enum1', schemaFrom: 'folder1', schemaTo: 'folder2', }); expect(statements[1]).toStrictEqual({ type: 'rename_type_enum', nameFrom: 'enum1', nameTo: 'enum2', schema: 'folder2', }); }); test('enums #15', async () => { const folder1 = pgSchema('folder1'); const folder2 = pgSchema('folder2'); const from = { enum: folder1.enum('enum1', ['value1', 'value4']), }; const to = { enum: folder2.enum('enum2', ['value1', 'value2', 'value3', 'value4']), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, [ 'folder1.enum1->folder2.enum2', ]); expect(sqlStatements.length).toBe(4); expect(sqlStatements[0]).toBe(`ALTER TYPE "folder1"."enum1" SET SCHEMA "folder2";`); expect(sqlStatements[1]).toBe(`ALTER TYPE "folder2"."enum1" RENAME TO "enum2";`); expect(sqlStatements[2]).toBe(`ALTER TYPE "folder2"."enum2" ADD VALUE 'value2' BEFORE 'value4';`); expect(sqlStatements[3]).toBe(`ALTER TYPE "folder2"."enum2" ADD VALUE 'value3' BEFORE 'value4';`); expect(statements.length).toBe(4); expect(statements[0]).toStrictEqual({ type: 'move_type_enum', name: 'enum1', schemaFrom: 'folder1', schemaTo: 'folder2', }); expect(statements[1]).toStrictEqual({ type: 'rename_type_enum', nameFrom: 'enum1', nameTo: 'enum2', schema: 'folder2', }); expect(statements[2]).toStrictEqual({ type: 'alter_type_add_value', name: 'enum2', schema: 'folder2', value: 'value2', before: 'value4', }); expect(statements[3]).toStrictEqual({ type: 'alter_type_add_value', name: 'enum2', schema: 'folder2', value: 'value3', before: 'value4', }); }); test('enums #16', async () => { const enum1 = pgEnum('enum1', ['value1']); const enum2 = pgEnum('enum2', ['value1']); const from = { enum1, table: pgTable('table', { column: enum1('column'), }), }; const to = { enum2, table: pgTable('table', { column: enum2('column'), }), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, [ 'public.enum1->public.enum2', ]); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe(`ALTER TYPE "public"."enum1" RENAME TO "enum2";`); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'rename_type_enum', nameFrom: 'enum1', nameTo: 'enum2', schema: 'public', }); }); test('enums #17', async () => { const schema = pgSchema('schema'); const enum1 = pgEnum('enum1', ['value1']); const enum2 = schema.enum('enum1', ['value1']); const from = { enum1, table: pgTable('table', { column: enum1('column'), }), }; const to = { enum2, table: pgTable('table', { column: enum2('column'), }), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, [ 'public.enum1->schema.enum1', ]); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe(`ALTER TYPE "public"."enum1" SET SCHEMA "schema";`); expect(sqlStatements.length).toBe(1); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'move_type_enum', name: 'enum1', schemaFrom: 'public', schemaTo: 'schema', }); }); test('enums #18', async () => { const schema1 = pgSchema('schema1'); const schema2 = pgSchema('schema2'); const enum1 = schema1.enum('enum1', ['value1']); const enum2 = schema2.enum('enum2', ['value1']); const from = { enum1, table: pgTable('table', { column: enum1('column'), }), }; const to = { enum2, table: pgTable('table', { column: enum2('column'), }), }; // change name and schema of the enum, no table changes const { statements, sqlStatements } = await diffTestSchemas(from, to, [ 'schema1.enum1->schema2.enum2', ]); expect(sqlStatements.length).toBe(2); expect(sqlStatements[0]).toBe(`ALTER TYPE "schema1"."enum1" SET SCHEMA "schema2";`); expect(sqlStatements[1]).toBe(`ALTER TYPE "schema2"."enum1" RENAME TO "enum2";`); expect(statements.length).toBe(2); expect(statements[0]).toStrictEqual({ type: 'move_type_enum', name: 'enum1', schemaFrom: 'schema1', schemaTo: 'schema2', }); expect(statements[1]).toStrictEqual({ type: 'rename_type_enum', nameFrom: 'enum1', nameTo: 'enum2', schema: 'schema2', }); }); test('enums #19', async () => { const myEnum = pgEnum('my_enum', ["escape's quotes"]); const from = {}; const to = { myEnum }; const { sqlStatements } = await diffTestSchemas(from, to, []); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toStrictEqual( 'CREATE TYPE "public"."my_enum" AS ENUM(\'escape\'\'s quotes\');', ); }); test('enums #20', async () => { const myEnum = pgEnum('my_enum', ['one', 'two', 'three']); const from = { myEnum, table: pgTable('table', { id: serial('id').primaryKey(), }), }; const to = { myEnum, table: pgTable('table', { id: serial('id').primaryKey(), col1: myEnum('col1'), col2: integer('col2'), }), }; const { sqlStatements } = await diffTestSchemas(from, to, []); expect(sqlStatements.length).toBe(2); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE "table" ADD COLUMN "col1" "my_enum";', 'ALTER TABLE "table" ADD COLUMN "col2" integer;', ]); }); test('enums #21', async () => { const myEnum = pgEnum('my_enum', ['one', 'two', 'three']); const from = { myEnum, table: pgTable('table', { id: serial('id').primaryKey(), }), }; const to = { myEnum, table: pgTable('table', { id: serial('id').primaryKey(), col1: myEnum('col1').array(), col2: integer('col2').array(), }), }; const { sqlStatements } = await diffTestSchemas(from, to, []); expect(sqlStatements.length).toBe(2); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE "table" ADD COLUMN "col1" "my_enum"[];', 'ALTER TABLE "table" ADD COLUMN "col2" integer[];', ]); }); test('drop enum value', async () => { const enum1 = pgEnum('enum', ['value1', 'value2', 'value3']); const from = { enum1, }; const enum2 = pgEnum('enum', ['value1', 'value3']); const to = { enum2, }; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(sqlStatements.length).toBe(2); expect(sqlStatements[0]).toBe(`DROP TYPE "public"."enum";`); expect(sqlStatements[1]).toBe(`CREATE TYPE "public"."enum" AS ENUM('value1', 'value3');`); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ columnsWithEnum: [], deletedValues: [ 'value2', ], name: 'enum', newValues: [ 'value1', 'value3', ], enumSchema: 'public', type: 'alter_type_drop_value', }); }); test('drop enum value. enum is columns data type', async () => { const enum1 = pgEnum('enum', ['value1', 'value2', 'value3']); const schema = pgSchema('new_schema'); const from = { schema, enum1, table: pgTable('table', { column: enum1('column'), }), table2: schema.table('table', { column: enum1('column'), }), }; const enum2 = pgEnum('enum', ['value1', 'value3']); const to = { schema, enum2, table: pgTable('table', { column: enum1('column'), }), table2: schema.table('table', { column: enum1('column'), }), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(sqlStatements.length).toBe(6); expect(sqlStatements[0]).toBe(`ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE text;`); expect(sqlStatements[1]).toBe(`ALTER TABLE "new_schema"."table" ALTER COLUMN "column" SET DATA TYPE text;`); expect(sqlStatements[2]).toBe(`DROP TYPE "public"."enum";`); expect(sqlStatements[3]).toBe(`CREATE TYPE "public"."enum" AS ENUM('value1', 'value3');`); expect(sqlStatements[4]).toBe( `ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE "public"."enum" USING "column"::"public"."enum";`, ); expect(sqlStatements[5]).toBe( `ALTER TABLE "new_schema"."table" ALTER COLUMN "column" SET DATA TYPE "public"."enum" USING "column"::"public"."enum";`, ); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ columnsWithEnum: [ { column: 'column', tableSchema: '', table: 'table', default: undefined, columnType: 'enum', }, { column: 'column', tableSchema: 'new_schema', table: 'table', default: undefined, columnType: 'enum', }, ], deletedValues: [ 'value2', ], name: 'enum', newValues: [ 'value1', 'value3', ], enumSchema: 'public', type: 'alter_type_drop_value', }); }); test('shuffle enum values', async () => { const enum1 = pgEnum('enum', ['value1', 'value2', 'value3']); const schema = pgSchema('new_schema'); const from = { schema, enum1, table: pgTable('table', { column: enum1('column'), }), table2: schema.table('table', { column: enum1('column'), }), }; const enum2 = pgEnum('enum', ['value1', 'value3', 'value2']); const to = { schema, enum2, table: pgTable('table', { column: enum2('column'), }), table2: schema.table('table', { column: enum2('column'), }), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(sqlStatements.length).toBe(6); expect(sqlStatements[0]).toBe(`ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE text;`); expect(sqlStatements[1]).toBe(`ALTER TABLE "new_schema"."table" ALTER COLUMN "column" SET DATA TYPE text;`); expect(sqlStatements[2]).toBe(`DROP TYPE "public"."enum";`); expect(sqlStatements[3]).toBe(`CREATE TYPE "public"."enum" AS ENUM('value1', 'value3', 'value2');`); expect(sqlStatements[4]).toBe( `ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE "public"."enum" USING "column"::"public"."enum";`, ); expect(sqlStatements[5]).toBe( `ALTER TABLE "new_schema"."table" ALTER COLUMN "column" SET DATA TYPE "public"."enum" USING "column"::"public"."enum";`, ); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ columnsWithEnum: [ { column: 'column', tableSchema: '', table: 'table', default: undefined, columnType: 'enum', }, { column: 'column', tableSchema: 'new_schema', table: 'table', columnType: 'enum', default: undefined, }, ], deletedValues: [ 'value3', ], name: 'enum', newValues: [ 'value1', 'value3', 'value2', ], enumSchema: 'public', type: 'alter_type_drop_value', }); }); test('enums as ts enum', async () => { enum Test { value = 'value', } const to = { enum: pgEnum('enum', Test), }; const { statements, sqlStatements } = await diffTestSchemas({}, to, []); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe(`CREATE TYPE "public"."enum" AS ENUM('value');`); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ name: 'enum', schema: 'public', type: 'create_type_enum', values: ['value'], }); }); // + test('column is enum type with default value. shuffle enum', async () => { const enum1 = pgEnum('enum', ['value1', 'value2', 'value3']); const from = { enum1, table: pgTable('table', { column: enum1('column').default('value2'), }), }; const enum2 = pgEnum('enum', ['value1', 'value3', 'value2']); const to = { enum2, table: pgTable('table', { column: enum2('column').default('value2'), }), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(sqlStatements.length).toBe(6); expect(sqlStatements[0]).toBe(`ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE text;`); expect(sqlStatements[1]).toBe(`ALTER TABLE "table" ALTER COLUMN "column" SET DEFAULT 'value2'::text;`); expect(sqlStatements[2]).toBe(`DROP TYPE "public"."enum";`); expect(sqlStatements[3]).toBe(`CREATE TYPE "public"."enum" AS ENUM('value1', 'value3', 'value2');`); expect(sqlStatements[4]).toBe( `ALTER TABLE "table" ALTER COLUMN "column" SET DEFAULT 'value2'::"public"."enum";`, ); expect(sqlStatements[5]).toBe( `ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE "public"."enum" USING "column"::"public"."enum";`, ); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ columnsWithEnum: [ { column: 'column', tableSchema: '', table: 'table', default: "'value2'", columnType: 'enum', }, ], deletedValues: [ 'value3', ], name: 'enum', newValues: [ 'value1', 'value3', 'value2', ], enumSchema: 'public', type: 'alter_type_drop_value', }); }); // + test('column is array enum type with default value. shuffle enum', async () => { const enum1 = pgEnum('enum', ['value1', 'value2', 'value3']); const from = { enum1, table: pgTable('table', { column: enum1('column').array().default(['value2']), }), }; const enum2 = pgEnum('enum', ['value1', 'value3', 'value2']); const to = { enum2, table: pgTable('table', { column: enum2('column').array().default(['value3']), }), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(sqlStatements.length).toBe(6); expect(sqlStatements[0]).toBe(`ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE text;`); expect(sqlStatements[1]).toBe(`ALTER TABLE "table" ALTER COLUMN "column" SET DEFAULT '{"value3"}'::text;`); expect(sqlStatements[2]).toBe(`DROP TYPE "public"."enum";`); expect(sqlStatements[3]).toBe(`CREATE TYPE "public"."enum" AS ENUM('value1', 'value3', 'value2');`); expect(sqlStatements[4]).toBe( `ALTER TABLE "table" ALTER COLUMN "column" SET DEFAULT '{"value3"}'::"public"."enum"[];`, ); expect(sqlStatements[5]).toBe( `ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE "public"."enum"[] USING "column"::"public"."enum"[];`, ); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ columnsWithEnum: [ { column: 'column', tableSchema: '', table: 'table', default: `'{"value3"}'`, columnType: 'enum[]', }, ], deletedValues: [ 'value3', ], name: 'enum', newValues: [ 'value1', 'value3', 'value2', ], enumSchema: 'public', type: 'alter_type_drop_value', }); }); // + test('column is array enum with custom size type with default value. shuffle enum', async () => { const enum1 = pgEnum('enum', ['value1', 'value2', 'value3']); const from = { enum1, table: pgTable('table', { column: enum1('column').array(3).default(['value2']), }), }; const enum2 = pgEnum('enum', ['value1', 'value3', 'value2']); const to = { enum2, table: pgTable('table', { column: enum2('column').array(3).default(['value2']), }), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(sqlStatements.length).toBe(6); expect(sqlStatements[0]).toBe(`ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE text;`); expect(sqlStatements[1]).toBe(`ALTER TABLE "table" ALTER COLUMN "column" SET DEFAULT '{"value2"}'::text;`); expect(sqlStatements[2]).toBe(`DROP TYPE "public"."enum";`); expect(sqlStatements[3]).toBe(`CREATE TYPE "public"."enum" AS ENUM('value1', 'value3', 'value2');`); expect(sqlStatements[4]).toBe( `ALTER TABLE "table" ALTER COLUMN "column" SET DEFAULT '{"value2"}'::"public"."enum"[3];`, ); expect(sqlStatements[5]).toBe( `ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE "public"."enum"[3] USING "column"::"public"."enum"[3];`, ); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ columnsWithEnum: [ { column: 'column', tableSchema: '', table: 'table', default: `'{"value2"}'`, columnType: 'enum[3]', }, ], deletedValues: [ 'value3', ], name: 'enum', newValues: [ 'value1', 'value3', 'value2', ], enumSchema: 'public', type: 'alter_type_drop_value', }); }); // + test('column is array enum with custom size type. shuffle enum', async () => { const enum1 = pgEnum('enum', ['value1', 'value2', 'value3']); const from = { enum1, table: pgTable('table', { column: enum1('column').array(3), }), }; const enum2 = pgEnum('enum', ['value1', 'value3', 'value2']); const to = { enum2, table: pgTable('table', { column: enum2('column').array(3), }), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(sqlStatements.length).toBe(4); expect(sqlStatements[0]).toBe(`ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE text;`); expect(sqlStatements[1]).toBe(`DROP TYPE "public"."enum";`); expect(sqlStatements[2]).toBe(`CREATE TYPE "public"."enum" AS ENUM('value1', 'value3', 'value2');`); expect(sqlStatements[3]).toBe( `ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE "public"."enum"[3] USING "column"::"public"."enum"[3];`, ); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ columnsWithEnum: [ { column: 'column', tableSchema: '', table: 'table', default: undefined, columnType: 'enum[3]', }, ], deletedValues: [ 'value3', ], name: 'enum', newValues: [ 'value1', 'value3', 'value2', ], enumSchema: 'public', type: 'alter_type_drop_value', }); }); // + test('column is array of enum with multiple dimenions with custom sizes type. shuffle enum', async () => { const enum1 = pgEnum('enum', ['value1', 'value2', 'value3']); const from = { enum1, table: pgTable('table', { column: enum1('column').array(3).array(2), }), }; const enum2 = pgEnum('enum', ['value1', 'value3', 'value2']); const to = { enum2, table: pgTable('table', { column: enum2('column').array(3).array(2), }), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(sqlStatements.length).toBe(4); expect(sqlStatements[0]).toBe(`ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE text;`); expect(sqlStatements[1]).toBe(`DROP TYPE "public"."enum";`); expect(sqlStatements[2]).toBe(`CREATE TYPE "public"."enum" AS ENUM('value1', 'value3', 'value2');`); expect(sqlStatements[3]).toBe( `ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE "public"."enum"[3][2] USING "column"::"public"."enum"[3][2];`, ); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ columnsWithEnum: [ { column: 'column', tableSchema: '', table: 'table', default: undefined, columnType: 'enum[3][2]', }, ], deletedValues: [ 'value3', ], name: 'enum', newValues: [ 'value1', 'value3', 'value2', ], enumSchema: 'public', type: 'alter_type_drop_value', }); }); // + test('column is array of enum with multiple dimenions type with custom size with default value. shuffle enum', async () => { const enum1 = pgEnum('enum', ['value1', 'value2', 'value3']); const from = { enum1, table: pgTable('table', { column: enum1('column').array(3).array(2).default([['value2']]), }), }; const enum2 = pgEnum('enum', ['value1', 'value3', 'value2']); const to = { enum2, table: pgTable('table', { column: enum2('column').array(3).array(2).default([['value2']]), }), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(sqlStatements.length).toBe(6); expect(sqlStatements[0]).toBe(`ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE text;`); expect(sqlStatements[1]).toBe(`ALTER TABLE "table" ALTER COLUMN "column" SET DEFAULT '{{"value2"}}'::text;`); expect(sqlStatements[2]).toBe(`DROP TYPE "public"."enum";`); expect(sqlStatements[3]).toBe(`CREATE TYPE "public"."enum" AS ENUM('value1', 'value3', 'value2');`); expect(sqlStatements[4]).toBe( `ALTER TABLE "table" ALTER COLUMN "column" SET DEFAULT '{{"value2"}}'::"public"."enum"[3][2];`, ); expect(sqlStatements[5]).toBe( `ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE "public"."enum"[3][2] USING "column"::"public"."enum"[3][2];`, ); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ columnsWithEnum: [ { column: 'column', tableSchema: '', table: 'table', default: `'{{\"value2\"}}'`, columnType: 'enum[3][2]', }, ], deletedValues: [ 'value3', ], name: 'enum', newValues: [ 'value1', 'value3', 'value2', ], enumSchema: 'public', type: 'alter_type_drop_value', }); }); // + test('column is enum type with default value. custom schema. shuffle enum', async () => { const schema = pgSchema('new_schema'); const enum1 = schema.enum('enum', ['value1', 'value2', 'value3']); const from = { schema, enum1, table: pgTable('table', { column: enum1('column').default('value2'), }), }; const enum2 = schema.enum('enum', ['value1', 'value3', 'value2']); const to = { schema, enum2, table: pgTable('table', { column: enum2('column').default('value2'), }), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(sqlStatements.length).toBe(6); expect(sqlStatements[0]).toBe(`ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE text;`); expect(sqlStatements[1]).toBe(`ALTER TABLE "table" ALTER COLUMN "column" SET DEFAULT 'value2'::text;`); expect(sqlStatements[2]).toBe(`DROP TYPE "new_schema"."enum";`); expect(sqlStatements[3]).toBe(`CREATE TYPE "new_schema"."enum" AS ENUM('value1', 'value3', 'value2');`); expect(sqlStatements[4]).toBe( `ALTER TABLE "table" ALTER COLUMN "column" SET DEFAULT 'value2'::"new_schema"."enum";`, ); expect(sqlStatements[5]).toBe( `ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE "new_schema"."enum" USING "column"::"new_schema"."enum";`, ); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ columnsWithEnum: [ { column: 'column', tableSchema: '', table: 'table', default: "'value2'", columnType: 'enum', }, ], deletedValues: [ 'value3', ], name: 'enum', newValues: [ 'value1', 'value3', 'value2', ], enumSchema: 'new_schema', type: 'alter_type_drop_value', }); }); // + test('column is array enum type with default value. custom schema. shuffle enum', async () => { const schema = pgSchema('new_schema'); const enum1 = schema.enum('enum', ['value1', 'value2', 'value3']); const from = { enum1, table: schema.table('table', { column: enum1('column').array().default(['value2']), }), }; const enum2 = schema.enum('enum', ['value1', 'value3', 'value2']); const to = { enum2, table: schema.table('table', { column: enum2('column').array().default(['value2']), }), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(sqlStatements.length).toBe(6); expect(sqlStatements[0]).toBe(`ALTER TABLE "new_schema"."table" ALTER COLUMN "column" SET DATA TYPE text;`); expect(sqlStatements[1]).toBe( `ALTER TABLE "new_schema"."table" ALTER COLUMN "column" SET DEFAULT '{"value2"}'::text;`, ); expect(sqlStatements[2]).toBe(`DROP TYPE "new_schema"."enum";`); expect(sqlStatements[3]).toBe(`CREATE TYPE "new_schema"."enum" AS ENUM('value1', 'value3', 'value2');`); expect(sqlStatements[4]).toBe( `ALTER TABLE "new_schema"."table" ALTER COLUMN "column" SET DEFAULT '{"value2"}'::"new_schema"."enum"[];`, ); expect(sqlStatements[5]).toBe( `ALTER TABLE "new_schema"."table" ALTER COLUMN "column" SET DATA TYPE "new_schema"."enum"[] USING "column"::"new_schema"."enum"[];`, ); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ columnsWithEnum: [ { column: 'column', tableSchema: 'new_schema', table: 'table', default: `'{"value2"}'`, columnType: 'enum[]', }, ], deletedValues: [ 'value3', ], name: 'enum', newValues: [ 'value1', 'value3', 'value2', ], enumSchema: 'new_schema', type: 'alter_type_drop_value', }); }); // + test('column is array enum type with custom size with default value. custom schema. shuffle enum', async () => { const schema = pgSchema('new_schema'); const enum1 = schema.enum('enum', ['value1', 'value2', 'value3']); const from = { enum1, table: schema.table('table', { column: enum1('column').array(3).default(['value2']), }), }; const enum2 = schema.enum('enum', ['value1', 'value3', 'value2']); const to = { enum2, table: schema.table('table', { column: enum2('column').array(3).default(['value2']), }), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(sqlStatements.length).toBe(6); expect(sqlStatements[0]).toBe(`ALTER TABLE "new_schema"."table" ALTER COLUMN "column" SET DATA TYPE text;`); expect(sqlStatements[1]).toBe( `ALTER TABLE "new_schema"."table" ALTER COLUMN "column" SET DEFAULT '{"value2"}'::text;`, ); expect(sqlStatements[2]).toBe(`DROP TYPE "new_schema"."enum";`); expect(sqlStatements[3]).toBe(`CREATE TYPE "new_schema"."enum" AS ENUM('value1', 'value3', 'value2');`); expect(sqlStatements[4]).toBe( `ALTER TABLE "new_schema"."table" ALTER COLUMN "column" SET DEFAULT '{"value2"}'::"new_schema"."enum"[3];`, ); expect(sqlStatements[5]).toBe( `ALTER TABLE "new_schema"."table" ALTER COLUMN "column" SET DATA TYPE "new_schema"."enum"[3] USING "column"::"new_schema"."enum"[3];`, ); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ columnsWithEnum: [ { column: 'column', tableSchema: 'new_schema', table: 'table', default: `'{"value2"}'`, columnType: 'enum[3]', }, ], deletedValues: [ 'value3', ], name: 'enum', newValues: [ 'value1', 'value3', 'value2', ], enumSchema: 'new_schema', type: 'alter_type_drop_value', }); }); // + test('column is array enum type with custom size. custom schema. shuffle enum', async () => { const schema = pgSchema('new_schema'); const enum1 = schema.enum('enum', ['value1', 'value2', 'value3']); const from = { enum1, table: schema.table('table', { column: enum1('column').array(3), }), }; const enum2 = schema.enum('enum', ['value1', 'value3', 'value2']); const to = { enum2, table: schema.table('table', { column: enum2('column').array(3), }), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(sqlStatements.length).toBe(4); expect(sqlStatements[0]).toBe(`ALTER TABLE "new_schema"."table" ALTER COLUMN "column" SET DATA TYPE text;`); expect(sqlStatements[1]).toBe(`DROP TYPE "new_schema"."enum";`); expect(sqlStatements[2]).toBe(`CREATE TYPE "new_schema"."enum" AS ENUM('value1', 'value3', 'value2');`); expect(sqlStatements[3]).toBe( `ALTER TABLE "new_schema"."table" ALTER COLUMN "column" SET DATA TYPE "new_schema"."enum"[3] USING "column"::"new_schema"."enum"[3];`, ); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ columnsWithEnum: [ { column: 'column', tableSchema: 'new_schema', table: 'table', default: undefined, columnType: 'enum[3]', }, ], deletedValues: [ 'value3', ], name: 'enum', newValues: [ 'value1', 'value3', 'value2', ], enumSchema: 'new_schema', type: 'alter_type_drop_value', }); }); // + test('column is enum type without default value. add default to column', async () => { const enum1 = pgEnum('enum', ['value1', 'value3']); const from = { enum1, table: pgTable('table', { column: enum1('column'), }), }; const enum2 = pgEnum('enum', ['value1', 'value3']); const to = { enum2, table: pgTable('table', { column: enum2('column').default('value3'), }), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe(`ALTER TABLE "table" ALTER COLUMN "column" SET DEFAULT 'value3';`); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ columnAutoIncrement: undefined, columnName: 'column', columnNotNull: false, columnOnUpdate: undefined, columnPk: false, newDataType: 'enum', newDefaultValue: "'value3'", schema: '', tableName: 'table', type: 'alter_table_alter_column_set_default', }); }); // + test('change data type from standart type to enum', async () => { const enum1 = pgEnum('enum', ['value1', 'value3']); const from = { enum1, table: pgTable('table', { column: varchar('column'), }), }; const to = { enum1, table: pgTable('table', { column: enum1('column'), }), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe( `ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE "public"."enum" USING "column"::"public"."enum";`, ); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ columnAutoIncrement: undefined, columnDefault: undefined, columnName: 'column', columnNotNull: false, columnOnUpdate: undefined, columnPk: false, newDataType: { isEnum: true, name: 'enum', }, oldDataType: { isEnum: false, name: 'varchar', }, schema: '', tableName: 'table', type: 'pg_alter_table_alter_column_set_type', typeSchema: 'public', }); }); // + test('change data type from standart type to enum. column has default', async () => { const enum1 = pgEnum('enum', ['value1', 'value3']); const from = { enum1, table: pgTable('table', { column: varchar('column').default('value2'), }), }; const to = { enum1, table: pgTable('table', { column: enum1('column').default('value3'), }), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(sqlStatements.length).toBe(2); expect(sqlStatements[0]).toBe(`ALTER TABLE "table" ALTER COLUMN "column" SET DEFAULT 'value3'::"public"."enum";`); expect(sqlStatements[1]).toBe( `ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE "public"."enum" USING "column"::"public"."enum";`, ); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ columnAutoIncrement: undefined, columnDefault: "'value3'", columnName: 'column', columnNotNull: false, columnOnUpdate: undefined, columnPk: false, newDataType: { isEnum: true, name: 'enum', }, oldDataType: { isEnum: false, name: 'varchar', }, schema: '', tableName: 'table', type: 'pg_alter_table_alter_column_set_type', typeSchema: 'public', }); }); // + test('change data type from array standart type to array enum. column has default', async () => { const enum1 = pgEnum('enum', ['value1', 'value3']); const from = { enum1, table: pgTable('table', { column: varchar('column').array().default(['value2']), }), }; const to = { enum1, table: pgTable('table', { column: enum1('column').array().default(['value3']), }), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(sqlStatements.length).toBe(2); expect(sqlStatements[0]).toBe( `ALTER TABLE "table" ALTER COLUMN "column" SET DEFAULT '{"value3"}'::"public"."enum"[];`, ); expect(sqlStatements[1]).toBe( `ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE "public"."enum"[] USING "column"::"public"."enum"[];`, ); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ columnAutoIncrement: undefined, columnDefault: `'{"value3"}'`, columnName: 'column', columnNotNull: false, columnOnUpdate: undefined, columnPk: false, newDataType: { isEnum: true, name: 'enum[]', }, oldDataType: { isEnum: false, name: 'varchar[]', }, schema: '', tableName: 'table', type: 'pg_alter_table_alter_column_set_type', typeSchema: 'public', }); }); // + test('change data type from array standart type to array enum. column without default', async () => { const enum1 = pgEnum('enum', ['value1', 'value3']); const from = { enum1, table: pgTable('table', { column: varchar('column').array(), }), }; const to = { enum1, table: pgTable('table', { column: enum1('column').array(), }), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe( `ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE "public"."enum"[] USING "column"::"public"."enum"[];`, ); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ columnAutoIncrement: undefined, columnDefault: undefined, columnName: 'column', columnNotNull: false, columnOnUpdate: undefined, columnPk: false, newDataType: { isEnum: true, name: 'enum[]', }, oldDataType: { isEnum: false, name: 'varchar[]', }, schema: '', tableName: 'table', type: 'pg_alter_table_alter_column_set_type', typeSchema: 'public', }); }); // + test('change data type from array standart type with custom size to array enum with custom size. column has default', async () => { const enum1 = pgEnum('enum', ['value1', 'value3']); const from = { enum1, table: pgTable('table', { column: varchar('column').array(3).default(['value2']), }), }; const to = { enum1, table: pgTable('table', { column: enum1('column').array(3).default(['value3']), }), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(sqlStatements.length).toBe(2); expect(sqlStatements[0]).toBe( `ALTER TABLE "table" ALTER COLUMN "column" SET DEFAULT '{"value3"}'::"public"."enum"[3];`, ); expect(sqlStatements[1]).toBe( `ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE "public"."enum"[3] USING "column"::"public"."enum"[3];`, ); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ columnAutoIncrement: undefined, columnDefault: `'{"value3"}'`, columnName: 'column', columnNotNull: false, columnOnUpdate: undefined, columnPk: false, newDataType: { isEnum: true, name: 'enum[3]', }, oldDataType: { isEnum: false, name: 'varchar[3]', }, schema: '', tableName: 'table', type: 'pg_alter_table_alter_column_set_type', typeSchema: 'public', }); }); // + test('change data type from array standart type with custom size to array enum with custom size. column without default', async () => { const enum1 = pgEnum('enum', ['value1', 'value3']); const from = { enum1, table: pgTable('table', { column: varchar('column').array(2), }), }; const to = { enum1, table: pgTable('table', { column: enum1('column').array(2), }), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe( `ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE "public"."enum"[2] USING "column"::"public"."enum"[2];`, ); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ columnAutoIncrement: undefined, columnDefault: undefined, columnName: 'column', columnNotNull: false, columnOnUpdate: undefined, columnPk: false, newDataType: { isEnum: true, name: 'enum[2]', }, oldDataType: { isEnum: false, name: 'varchar[2]', }, schema: '', tableName: 'table', type: 'pg_alter_table_alter_column_set_type', typeSchema: 'public', }); }); // + test('change data type from enum type to standart type', async () => { const enum1 = pgEnum('enum', ['value1', 'value3']); const from = { enum1, table: pgTable('table', { column: enum1('column'), }), }; const to = { enum1, table: pgTable('table', { column: varchar('column'), }), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe( `ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE varchar;`, ); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ columnAutoIncrement: undefined, columnDefault: undefined, columnName: 'column', columnNotNull: false, columnOnUpdate: undefined, columnPk: false, newDataType: { isEnum: false, name: 'varchar', }, oldDataType: { isEnum: true, name: 'enum', }, schema: '', tableName: 'table', type: 'pg_alter_table_alter_column_set_type', typeSchema: undefined, }); }); // + test('change data type from enum type to standart type. column has default', async () => { const enum1 = pgEnum('enum', ['value1', 'value3']); const from = { enum1, table: pgTable('table', { column: enum1('column').default('value3'), }), }; const to = { enum1, table: pgTable('table', { column: varchar('column').default('value2'), }), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(sqlStatements.length).toBe(2); expect(sqlStatements[0]).toBe( `ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE varchar;`, ); expect(sqlStatements[1]).toBe(`ALTER TABLE "table" ALTER COLUMN "column" SET DEFAULT 'value2';`); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ columnAutoIncrement: undefined, columnDefault: "'value2'", columnName: 'column', columnNotNull: false, columnOnUpdate: undefined, columnPk: false, newDataType: { isEnum: false, name: 'varchar', }, oldDataType: { isEnum: true, name: 'enum', }, schema: '', tableName: 'table', type: 'pg_alter_table_alter_column_set_type', typeSchema: undefined, }); }); // + test('change data type from array enum type to array standart type', async () => { const enum1 = pgEnum('enum', ['value1', 'value3']); const from = { enum1, table: pgTable('table', { column: enum1('column').array(), }), }; const to = { enum1, table: pgTable('table', { column: varchar('column').array(), }), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe( `ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE varchar[];`, ); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ columnAutoIncrement: undefined, columnDefault: undefined, columnName: 'column', columnNotNull: false, columnOnUpdate: undefined, columnPk: false, newDataType: { isEnum: false, name: 'varchar[]', }, oldDataType: { isEnum: true, name: 'enum[]', }, schema: '', tableName: 'table', type: 'pg_alter_table_alter_column_set_type', typeSchema: undefined, }); }); // + test('change data type from array enum with custom size type to array standart type with custom size', async () => { const enum1 = pgEnum('enum', ['value1', 'value3']); const from = { enum1, table: pgTable('table', { column: enum1('column').array(2), }), }; const to = { enum1, table: pgTable('table', { column: varchar('column').array(2), }), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe( `ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE varchar[2];`, ); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ columnAutoIncrement: undefined, columnDefault: undefined, columnName: 'column', columnNotNull: false, columnOnUpdate: undefined, columnPk: false, newDataType: { isEnum: false, name: 'varchar[2]', }, oldDataType: { isEnum: true, name: 'enum[2]', }, schema: '', tableName: 'table', type: 'pg_alter_table_alter_column_set_type', typeSchema: undefined, }); }); // test('change data type from array enum type to array standart type. column has default', async () => { const enum1 = pgEnum('enum', ['value1', 'value2']); const from = { enum1, table: pgTable('table', { column: enum1('column').array().default(['value2']), }), }; const to = { enum1, table: pgTable('table', { column: varchar('column').array().default(['value2']), }), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(sqlStatements.length).toBe(2); expect(sqlStatements[0]).toBe( `ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE varchar[];`, ); expect(sqlStatements[1]).toBe( `ALTER TABLE "table" ALTER COLUMN "column" SET DEFAULT '{"value2"}';`, ); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ columnAutoIncrement: undefined, columnDefault: `'{"value2"}'`, columnName: 'column', columnNotNull: false, columnOnUpdate: undefined, columnPk: false, newDataType: { isEnum: false, name: 'varchar[]', }, oldDataType: { isEnum: true, name: 'enum[]', }, schema: '', tableName: 'table', type: 'pg_alter_table_alter_column_set_type', typeSchema: undefined, }); }); // + test('change data type from array enum type with custom size to array standart type with custom size. column has default', async () => { const enum1 = pgEnum('enum', ['value1', 'value2']); const from = { enum1, table: pgTable('table', { column: enum1('column').array(3).default(['value2']), }), }; const to = { enum1, table: pgTable('table', { column: varchar('column').array(3).default(['value2']), }), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(sqlStatements.length).toBe(2); expect(sqlStatements[0]).toBe( `ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE varchar[3];`, ); expect(sqlStatements[1]).toBe( `ALTER TABLE "table" ALTER COLUMN "column" SET DEFAULT '{"value2"}';`, ); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ columnAutoIncrement: undefined, columnDefault: `'{"value2"}'`, columnName: 'column', columnNotNull: false, columnOnUpdate: undefined, columnPk: false, newDataType: { isEnum: false, name: 'varchar[3]', }, oldDataType: { isEnum: true, name: 'enum[3]', }, schema: '', tableName: 'table', type: 'pg_alter_table_alter_column_set_type', typeSchema: undefined, }); }); // + test('change data type from standart type to standart type', async () => { const from = { table: pgTable('table', { column: varchar('column'), }), }; const to = { table: pgTable('table', { column: text('column'), }), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe( `ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE text;`, ); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ columnAutoIncrement: undefined, columnDefault: undefined, columnName: 'column', columnNotNull: false, columnOnUpdate: undefined, columnPk: false, newDataType: { isEnum: false, name: 'text', }, oldDataType: { isEnum: false, name: 'varchar', }, schema: '', tableName: 'table', type: 'pg_alter_table_alter_column_set_type', typeSchema: undefined, }); }); // + test('change data type from standart type to standart type. column has default', async () => { const from = { table: pgTable('table', { column: varchar('column').default('value3'), }), }; const to = { table: pgTable('table', { column: text('column').default('value2'), }), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(sqlStatements.length).toBe(2); expect(sqlStatements[0]).toBe( `ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE text;`, ); expect(sqlStatements[1]).toBe( `ALTER TABLE "table" ALTER COLUMN "column" SET DEFAULT 'value2';`, ); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ columnAutoIncrement: undefined, columnDefault: "'value2'", columnName: 'column', columnNotNull: false, columnOnUpdate: undefined, columnPk: false, newDataType: { isEnum: false, name: 'text', }, oldDataType: { isEnum: false, name: 'varchar', }, schema: '', tableName: 'table', type: 'pg_alter_table_alter_column_set_type', typeSchema: undefined, }); }); // + test('change data type from standart type to standart type. columns are arrays', async () => { const from = { table: pgTable('table', { column: varchar('column').array(), }), }; const to = { table: pgTable('table', { column: text('column').array(), }), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe( `ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE text[];`, ); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ columnAutoIncrement: undefined, columnDefault: undefined, columnName: 'column', columnNotNull: false, columnOnUpdate: undefined, columnPk: false, newDataType: { isEnum: false, name: 'text[]', }, oldDataType: { isEnum: false, name: 'varchar[]', }, schema: '', tableName: 'table', type: 'pg_alter_table_alter_column_set_type', typeSchema: undefined, }); }); // + test('change data type from standart type to standart type. columns are arrays with custom sizes', async () => { const from = { table: pgTable('table', { column: varchar('column').array(2), }), }; const to = { table: pgTable('table', { column: text('column').array(2), }), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe( `ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE text[2];`, ); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ columnAutoIncrement: undefined, columnDefault: undefined, columnName: 'column', columnNotNull: false, columnOnUpdate: undefined, columnPk: false, newDataType: { isEnum: false, name: 'text[2]', }, oldDataType: { isEnum: false, name: 'varchar[2]', }, schema: '', tableName: 'table', type: 'pg_alter_table_alter_column_set_type', typeSchema: undefined, }); }); // + test('change data type from standart type to standart type. columns are arrays. column has default', async () => { const from = { table: pgTable('table', { column: varchar('column').array().default(['hello']), }), }; const to = { table: pgTable('table', { column: text('column').array().default(['hello']), }), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(sqlStatements.length).toBe(2); expect(sqlStatements[0]).toBe( `ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE text[];`, ); expect(sqlStatements[1]).toBe( `ALTER TABLE "table" ALTER COLUMN "column" SET DEFAULT '{"hello"}';`, ); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ columnAutoIncrement: undefined, columnDefault: `'{"hello"}'`, columnName: 'column', columnNotNull: false, columnOnUpdate: undefined, columnPk: false, newDataType: { isEnum: false, name: 'text[]', }, oldDataType: { isEnum: false, name: 'varchar[]', }, schema: '', tableName: 'table', type: 'pg_alter_table_alter_column_set_type', typeSchema: undefined, }); }); // + test('change data type from standart type to standart type. columns are arrays with custom sizes.column has default', async () => { const from = { table: pgTable('table', { column: varchar('column').array(2).default(['hello']), }), }; const to = { table: pgTable('table', { column: text('column').array(2).default(['hello']), }), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(sqlStatements.length).toBe(2); expect(sqlStatements[0]).toBe( `ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE text[2];`, ); expect(sqlStatements[1]).toBe( `ALTER TABLE "table" ALTER COLUMN "column" SET DEFAULT '{"hello"}';`, ); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ columnAutoIncrement: undefined, columnDefault: `'{"hello"}'`, columnName: 'column', columnNotNull: false, columnOnUpdate: undefined, columnPk: false, newDataType: { isEnum: false, name: 'text[2]', }, oldDataType: { isEnum: false, name: 'varchar[2]', }, schema: '', tableName: 'table', type: 'pg_alter_table_alter_column_set_type', typeSchema: undefined, }); }); // + test('change data type from one enum to other', async () => { const enum1 = pgEnum('enum1', ['value1', 'value3']); const enum2 = pgEnum('enum2', ['value1', 'value3']); const from = { enum1, enum2, table: pgTable('table', { column: enum1('column'), }), }; const to = { enum1, enum2, table: pgTable('table', { column: enum2('column'), }), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe( `ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE "public"."enum2" USING "column"::text::"public"."enum2";`, ); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ columnAutoIncrement: undefined, columnDefault: undefined, columnName: 'column', columnNotNull: false, columnOnUpdate: undefined, columnPk: false, newDataType: { isEnum: true, name: 'enum2', }, oldDataType: { isEnum: true, name: 'enum1', }, schema: '', tableName: 'table', type: 'pg_alter_table_alter_column_set_type', typeSchema: 'public', }); }); // + test('change data type from one enum to other. column has default', async () => { const enum1 = pgEnum('enum1', ['value1', 'value3']); const enum2 = pgEnum('enum2', ['value1', 'value3']); const from = { enum1, enum2, table: pgTable('table', { column: enum1('column').default('value3'), }), }; const to = { enum1, enum2, table: pgTable('table', { column: enum2('column').default('value3'), }), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(sqlStatements.length).toBe(3); expect(sqlStatements[0]).toBe( `ALTER TABLE "table" ALTER COLUMN "column" DROP DEFAULT;`, ); expect(sqlStatements[1]).toBe( `ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE "public"."enum2" USING "column"::text::"public"."enum2";`, ); expect(sqlStatements[2]).toBe( `ALTER TABLE "table" ALTER COLUMN "column" SET DEFAULT 'value3';`, ); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ columnAutoIncrement: undefined, columnDefault: "'value3'", columnName: 'column', columnNotNull: false, columnOnUpdate: undefined, columnPk: false, newDataType: { isEnum: true, name: 'enum2', }, oldDataType: { isEnum: true, name: 'enum1', }, schema: '', tableName: 'table', type: 'pg_alter_table_alter_column_set_type', typeSchema: 'public', }); }); // + test('change data type from one enum to other. changed defaults', async () => { const enum1 = pgEnum('enum1', ['value1', 'value3']); const enum2 = pgEnum('enum2', ['value1', 'value3']); const from = { enum1, enum2, table: pgTable('table', { column: enum1('column').default('value3'), }), }; const to = { enum1, enum2, table: pgTable('table', { column: enum2('column').default('value1'), }), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(sqlStatements.length).toBe(3); expect(sqlStatements[0]).toBe( `ALTER TABLE "table" ALTER COLUMN "column" DROP DEFAULT;`, ); expect(sqlStatements[1]).toBe( `ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE "public"."enum2" USING "column"::text::"public"."enum2";`, ); expect(sqlStatements[2]).toBe( `ALTER TABLE "table" ALTER COLUMN "column" SET DEFAULT 'value1';`, ); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ columnAutoIncrement: undefined, columnDefault: "'value1'", columnName: 'column', columnNotNull: false, columnOnUpdate: undefined, columnPk: false, newDataType: { isEnum: true, name: 'enum2', }, oldDataType: { isEnum: true, name: 'enum1', }, schema: '', tableName: 'table', type: 'pg_alter_table_alter_column_set_type', typeSchema: 'public', }); }); test('check filtering json statements. here we have recreate enum + set new type + alter default', async () => { const enum1 = pgEnum('enum1', ['value1', 'value3']); const from = { enum1, table: pgTable('table', { column: varchar('column').default('value3'), }), }; const enum2 = pgEnum('enum1', ['value3', 'value1', 'value2']); const to = { enum2, table: pgTable('table', { column: enum2('column').default('value2'), }), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(sqlStatements.length).toBe(6); expect(sqlStatements[0]).toBe(`ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE text;`); expect(sqlStatements[1]).toBe(`ALTER TABLE "table" ALTER COLUMN "column" SET DEFAULT 'value2'::text;`); expect(sqlStatements[2]).toBe(`DROP TYPE "public"."enum1";`); expect(sqlStatements[3]).toBe(`CREATE TYPE "public"."enum1" AS ENUM('value3', 'value1', 'value2');`); expect(sqlStatements[4]).toBe( `ALTER TABLE "table" ALTER COLUMN "column" SET DEFAULT 'value2'::"public"."enum1";`, ); expect(sqlStatements[5]).toBe( `ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE "public"."enum1" USING "column"::"public"."enum1";`, ); expect(statements.length).toBe(2); expect(statements[0]).toStrictEqual({ columnsWithEnum: [ { column: 'column', columnType: 'enum1', default: "'value2'", table: 'table', tableSchema: '', }, ], deletedValues: [ 'value3', ], enumSchema: 'public', name: 'enum1', newValues: [ 'value3', 'value1', 'value2', ], type: 'alter_type_drop_value', }); expect(statements[1]).toStrictEqual({ columnAutoIncrement: undefined, columnDefault: "'value2'", columnName: 'column', columnNotNull: false, columnOnUpdate: undefined, columnPk: false, newDataType: { isEnum: true, name: 'enum1', }, oldDataType: { isEnum: false, name: 'varchar', }, schema: '', tableName: 'table', type: 'pg_alter_table_alter_column_set_type', typeSchema: 'public', }); }); ================================================ FILE: drizzle-kit/tests/pg-generated.test.ts ================================================ // test cases import { SQL, sql } from 'drizzle-orm'; import { integer, pgTable, text } from 'drizzle-orm/pg-core'; import { expect, test } from 'vitest'; import { diffTestSchemas } from './schemaDiffer'; test('generated as callback: add column with generated constraint', async () => { const from = { users: pgTable('users', { id: integer('id'), id2: integer('id2'), name: text('name'), }), }; const to = { users: pgTable('users', { id: integer('id'), id2: integer('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( (): SQL => sql`${to.users.name} || 'hello'`, ), }), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(statements).toStrictEqual([ { column: { generated: { as: '"users"."name" || \'hello\'', type: 'stored', }, name: 'gen_name', notNull: false, primaryKey: false, type: 'text', }, schema: '', tableName: 'users', type: 'alter_table_add_column', }, ]); expect(sqlStatements).toStrictEqual([ `ALTER TABLE "users" ADD COLUMN "gen_name" text GENERATED ALWAYS AS (\"users\".\"name\" || 'hello') STORED;`, ]); }); test('generated as callback: add generated constraint to an exisiting column', async () => { const from = { users: pgTable('users', { id: integer('id'), id2: integer('id2'), name: text('name'), generatedName: text('gen_name').notNull(), }), }; const to = { users: pgTable('users', { id: integer('id'), id2: integer('id2'), name: text('name'), generatedName: text('gen_name') .notNull() .generatedAlwaysAs((): SQL => sql`${from.users.name} || 'to add'`), }), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(statements).toStrictEqual([ { columnAutoIncrement: undefined, columnDefault: undefined, columnGenerated: { as: '"users"."name" || \'to add\'', type: 'stored' }, columnName: 'gen_name', columnNotNull: true, columnOnUpdate: undefined, columnPk: false, newDataType: 'text', schema: '', tableName: 'users', type: 'alter_table_alter_column_set_generated', }, ]); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE "users" drop column "gen_name";', 'ALTER TABLE "users" ADD COLUMN "gen_name" text GENERATED ALWAYS AS ("users"."name" || \'to add\') STORED NOT NULL;', ]); }); test('generated as callback: drop generated constraint', async () => { const from = { users: pgTable('users', { id: integer('id'), id2: integer('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( (): SQL => sql`${from.users.name} || 'to delete'`, ), }), }; const to = { users: pgTable('users', { id: integer('id'), id2: integer('id2'), name: text('name'), generatedName1: text('gen_name'), }), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(statements).toStrictEqual([ { columnAutoIncrement: undefined, columnDefault: undefined, columnGenerated: undefined, columnName: 'gen_name', columnNotNull: false, columnOnUpdate: undefined, columnPk: false, newDataType: 'text', schema: '', tableName: 'users', type: 'alter_table_alter_column_drop_generated', }, ]); expect(sqlStatements).toStrictEqual([ `ALTER TABLE \"users\" ALTER COLUMN \"gen_name\" DROP EXPRESSION;`, ]); }); test('generated as callback: change generated constraint', async () => { const from = { users: pgTable('users', { id: integer('id'), id2: integer('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( (): SQL => sql`${from.users.name}`, ), }), }; const to = { users: pgTable('users', { id: integer('id'), id2: integer('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( (): SQL => sql`${to.users.name} || 'hello'`, ), }), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(statements).toStrictEqual([ { columnAutoIncrement: undefined, columnDefault: undefined, columnGenerated: { as: '"users"."name" || \'hello\'', type: 'stored' }, columnName: 'gen_name', columnNotNull: false, columnOnUpdate: undefined, columnPk: false, newDataType: 'text', schema: '', tableName: 'users', type: 'alter_table_alter_column_alter_generated', }, ]); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE "users" drop column "gen_name";', 'ALTER TABLE "users" ADD COLUMN "gen_name" text GENERATED ALWAYS AS ("users"."name" || \'hello\') STORED;', ]); }); // --- test('generated as sql: add column with generated constraint', async () => { const from = { users: pgTable('users', { id: integer('id'), id2: integer('id2'), name: text('name'), }), }; const to = { users: pgTable('users', { id: integer('id'), id2: integer('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( sql`\"users\".\"name\" || 'hello'`, ), }), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(statements).toStrictEqual([ { column: { generated: { as: '"users"."name" || \'hello\'', type: 'stored', }, name: 'gen_name', notNull: false, primaryKey: false, type: 'text', }, schema: '', tableName: 'users', type: 'alter_table_add_column', }, ]); expect(sqlStatements).toStrictEqual([ `ALTER TABLE "users" ADD COLUMN "gen_name" text GENERATED ALWAYS AS (\"users\".\"name\" || 'hello') STORED;`, ]); }); test('generated as sql: add generated constraint to an exisiting column', async () => { const from = { users: pgTable('users', { id: integer('id'), id2: integer('id2'), name: text('name'), generatedName: text('gen_name').notNull(), }), }; const to = { users: pgTable('users', { id: integer('id'), id2: integer('id2'), name: text('name'), generatedName: text('gen_name') .notNull() .generatedAlwaysAs(sql`\"users\".\"name\" || 'to add'`), }), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(statements).toStrictEqual([ { columnAutoIncrement: undefined, columnDefault: undefined, columnGenerated: { as: '"users"."name" || \'to add\'', type: 'stored' }, columnName: 'gen_name', columnNotNull: true, columnOnUpdate: undefined, columnPk: false, newDataType: 'text', schema: '', tableName: 'users', type: 'alter_table_alter_column_set_generated', }, ]); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE "users" drop column "gen_name";', 'ALTER TABLE "users" ADD COLUMN "gen_name" text GENERATED ALWAYS AS ("users"."name" || \'to add\') STORED NOT NULL;', ]); }); test('generated as sql: drop generated constraint', async () => { const from = { users: pgTable('users', { id: integer('id'), id2: integer('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( sql`\"users\".\"name\" || 'to delete'`, ), }), }; const to = { users: pgTable('users', { id: integer('id'), id2: integer('id2'), name: text('name'), generatedName1: text('gen_name'), }), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(statements).toStrictEqual([ { columnAutoIncrement: undefined, columnDefault: undefined, columnGenerated: undefined, columnName: 'gen_name', columnNotNull: false, columnOnUpdate: undefined, columnPk: false, newDataType: 'text', schema: '', tableName: 'users', type: 'alter_table_alter_column_drop_generated', }, ]); expect(sqlStatements).toStrictEqual([ `ALTER TABLE \"users\" ALTER COLUMN \"gen_name\" DROP EXPRESSION;`, ]); }); test('generated as sql: change generated constraint', async () => { const from = { users: pgTable('users', { id: integer('id'), id2: integer('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( sql`\"users\".\"name\"`, ), }), }; const to = { users: pgTable('users', { id: integer('id'), id2: integer('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( sql`\"users\".\"name\" || 'hello'`, ), }), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(statements).toStrictEqual([ { columnAutoIncrement: undefined, columnDefault: undefined, columnGenerated: { as: '"users"."name" || \'hello\'', type: 'stored' }, columnName: 'gen_name', columnNotNull: false, columnOnUpdate: undefined, columnPk: false, newDataType: 'text', schema: '', tableName: 'users', type: 'alter_table_alter_column_alter_generated', }, ]); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE "users" drop column "gen_name";', 'ALTER TABLE "users" ADD COLUMN "gen_name" text GENERATED ALWAYS AS ("users"."name" || \'hello\') STORED;', ]); }); // --- test('generated as string: add column with generated constraint', async () => { const from = { users: pgTable('users', { id: integer('id'), id2: integer('id2'), name: text('name'), }), }; const to = { users: pgTable('users', { id: integer('id'), id2: integer('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( `\"users\".\"name\" || 'hello'`, ), }), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(statements).toStrictEqual([ { column: { generated: { as: '"users"."name" || \'hello\'', type: 'stored', }, name: 'gen_name', notNull: false, primaryKey: false, type: 'text', }, schema: '', tableName: 'users', type: 'alter_table_add_column', }, ]); expect(sqlStatements).toStrictEqual([ `ALTER TABLE "users" ADD COLUMN "gen_name" text GENERATED ALWAYS AS (\"users\".\"name\" || 'hello') STORED;`, ]); }); test('generated as string: add generated constraint to an exisiting column', async () => { const from = { users: pgTable('users', { id: integer('id'), id2: integer('id2'), name: text('name'), generatedName: text('gen_name').notNull(), }), }; const to = { users: pgTable('users', { id: integer('id'), id2: integer('id2'), name: text('name'), generatedName: text('gen_name') .notNull() .generatedAlwaysAs(`\"users\".\"name\" || 'to add'`), }), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(statements).toStrictEqual([ { columnAutoIncrement: undefined, columnDefault: undefined, columnGenerated: { as: '"users"."name" || \'to add\'', type: 'stored' }, columnName: 'gen_name', columnNotNull: true, columnOnUpdate: undefined, columnPk: false, newDataType: 'text', schema: '', tableName: 'users', type: 'alter_table_alter_column_set_generated', }, ]); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE "users" drop column "gen_name";', 'ALTER TABLE "users" ADD COLUMN "gen_name" text GENERATED ALWAYS AS ("users"."name" || \'to add\') STORED NOT NULL;', ]); }); test('generated as string: drop generated constraint', async () => { const from = { users: pgTable('users', { id: integer('id'), id2: integer('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( `\"users\".\"name\" || 'to delete'`, ), }), }; const to = { users: pgTable('users', { id: integer('id'), id2: integer('id2'), name: text('name'), generatedName1: text('gen_name'), }), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(statements).toStrictEqual([ { columnAutoIncrement: undefined, columnDefault: undefined, columnGenerated: undefined, columnName: 'gen_name', columnNotNull: false, columnOnUpdate: undefined, columnPk: false, newDataType: 'text', schema: '', tableName: 'users', type: 'alter_table_alter_column_drop_generated', }, ]); expect(sqlStatements).toStrictEqual([ `ALTER TABLE \"users\" ALTER COLUMN \"gen_name\" DROP EXPRESSION;`, ]); }); test('generated as string: change generated constraint', async () => { const from = { users: pgTable('users', { id: integer('id'), id2: integer('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( (): SQL => sql`${from.users.name}`, ), }), }; const to = { users: pgTable('users', { id: integer('id'), id2: integer('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( `\"users\".\"name\" || 'hello'`, ), }), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(statements).toStrictEqual([ { columnAutoIncrement: undefined, columnDefault: undefined, columnGenerated: { as: '"users"."name" || \'hello\'', type: 'stored' }, columnName: 'gen_name', columnNotNull: false, columnOnUpdate: undefined, columnPk: false, newDataType: 'text', schema: '', tableName: 'users', type: 'alter_table_alter_column_alter_generated', }, ]); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE "users" drop column "gen_name";', 'ALTER TABLE "users" ADD COLUMN "gen_name" text GENERATED ALWAYS AS ("users"."name" || \'hello\') STORED;', ]); }); ================================================ FILE: drizzle-kit/tests/pg-identity.test.ts ================================================ import { integer, pgSequence, pgTable } from 'drizzle-orm/pg-core'; import { expect, test } from 'vitest'; import { diffTestSchemas } from './schemaDiffer'; // same table - no diff // 2. identity always/by default - no params + // 3. identity always/by default - with a few params + // 4. identity always/by default - with all params + // diff table with create statement // 2. identity always/by default - no params + // 3. identity always/by default - with a few params + // 4. identity always/by default - with all params + // diff for drop statement // 2. identity always/by default - no params, with params + // diff for alters // 2. identity always/by default - no params -> add param + // 3. identity always/by default - with a few params - remove/add/change params + // 4. identity always/by default - with all params - remove/add/change params + test('create table: identity always/by default - no params', async () => { const from = {}; const to = { users: pgTable('users', { id: integer('id').generatedByDefaultAsIdentity(), }), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(statements).toStrictEqual([ { columns: [ { identity: 'users_id_seq;byDefault;1;2147483647;1;1;1;false', name: 'id', notNull: true, primaryKey: false, type: 'integer', }, ], compositePKs: [], compositePkName: '', schema: '', policies: [], isRLSEnabled: false, tableName: 'users', type: 'create_table', uniqueConstraints: [], checkConstraints: [], }, ]); expect(sqlStatements).toStrictEqual([ 'CREATE TABLE "users" (\n\t"id" integer GENERATED BY DEFAULT AS IDENTITY (sequence name "users_id_seq" INCREMENT BY 1 MINVALUE 1 MAXVALUE 2147483647 START WITH 1 CACHE 1)\n);\n', ]); }); test('create table: identity always/by default - few params', async () => { const from = {}; const to = { users: pgTable('users', { id: integer('id').generatedByDefaultAsIdentity({ name: 'custom_seq', increment: 4, }), }), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(statements).toStrictEqual([ { columns: [ { identity: 'custom_seq;byDefault;1;2147483647;4;1;1;false', name: 'id', notNull: true, primaryKey: false, type: 'integer', }, ], compositePKs: [], compositePkName: '', policies: [], schema: '', isRLSEnabled: false, tableName: 'users', type: 'create_table', uniqueConstraints: [], checkConstraints: [], }, ]); expect(sqlStatements).toStrictEqual([ 'CREATE TABLE "users" (\n\t"id" integer GENERATED BY DEFAULT AS IDENTITY (sequence name "custom_seq" INCREMENT BY 4 MINVALUE 1 MAXVALUE 2147483647 START WITH 1 CACHE 1)\n);\n', ]); }); test('create table: identity always/by default - all params', async () => { const from = {}; const to = { users: pgTable('users', { id: integer('id').generatedByDefaultAsIdentity({ name: 'custom_seq', increment: 4, minValue: 3, maxValue: 1000, cache: 200, cycle: false, }), }), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(statements).toStrictEqual([ { columns: [ { identity: 'custom_seq;byDefault;3;1000;4;3;200;false', name: 'id', notNull: true, primaryKey: false, type: 'integer', }, ], compositePKs: [], compositePkName: '', policies: [], isRLSEnabled: false, schema: '', tableName: 'users', type: 'create_table', uniqueConstraints: [], checkConstraints: [], }, ]); expect(sqlStatements).toStrictEqual([ 'CREATE TABLE "users" (\n\t"id" integer GENERATED BY DEFAULT AS IDENTITY (sequence name "custom_seq" INCREMENT BY 4 MINVALUE 3 MAXVALUE 1000 START WITH 3 CACHE 200)\n);\n', ]); }); test('no diff: identity always/by default - no params', async () => { const from = { users: pgTable('users', { id: integer('id').generatedByDefaultAsIdentity(), }), }; const to = { users: pgTable('users', { id: integer('id').generatedByDefaultAsIdentity(), }), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(statements).toStrictEqual([]); expect(sqlStatements).toStrictEqual([]); }); test('no diff: identity always/by default - few params', async () => { const from = { users: pgTable('users', { id: integer('id').generatedByDefaultAsIdentity({ name: 'custom_seq', increment: 4, }), }), }; const to = { users: pgTable('users', { id: integer('id').generatedByDefaultAsIdentity({ name: 'custom_seq', increment: 4, }), }), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(statements).toStrictEqual([]); expect(sqlStatements).toStrictEqual([]); }); test('no diff: identity always/by default - all params', async () => { const from = { users: pgTable('users', { id: integer('id').generatedByDefaultAsIdentity({ name: 'custom_seq', increment: 4, minValue: 3, maxValue: 1000, cache: 200, cycle: false, }), }), }; const to = { users: pgTable('users', { id: integer('id').generatedByDefaultAsIdentity({ name: 'custom_seq', increment: 4, minValue: 3, maxValue: 1000, cache: 200, cycle: false, }), }), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(statements).toStrictEqual([]); expect(sqlStatements).toStrictEqual([]); }); test('drop identity from a column - no params', async () => { const from = { users: pgTable('users', { id: integer('id').generatedByDefaultAsIdentity(), }), }; const to = { users: pgTable('users', { id: integer('id'), }), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(statements).toStrictEqual([ { columnName: 'id', schema: '', tableName: 'users', type: 'alter_table_alter_column_drop_identity', }, ]); expect(sqlStatements).toStrictEqual([ `ALTER TABLE \"users\" ALTER COLUMN \"id\" DROP IDENTITY;`, ]); }); test('drop identity from a column - few params', async () => { const from = { users: pgTable('users', { id: integer('id').generatedByDefaultAsIdentity({ startWith: 100, increment: 3, }), }), }; const to = { users: pgTable('users', { id: integer('id'), }), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(statements).toStrictEqual([ { columnName: 'id', schema: '', tableName: 'users', type: 'alter_table_alter_column_drop_identity', }, ]); expect(sqlStatements).toStrictEqual([ `ALTER TABLE \"users\" ALTER COLUMN \"id\" DROP IDENTITY;`, ]); }); test('drop identity from a column - all params', async () => { const from = { users: pgTable('users', { id: integer('id').generatedByDefaultAsIdentity({ startWith: 100, increment: 3, cache: 100, cycle: true, }), }), }; const to = { users: pgTable('users', { id: integer('id'), }), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(statements).toStrictEqual([ { columnName: 'id', schema: '', tableName: 'users', type: 'alter_table_alter_column_drop_identity', }, ]); expect(sqlStatements).toStrictEqual([ `ALTER TABLE \"users\" ALTER COLUMN \"id\" DROP IDENTITY;`, ]); }); test('alter identity from a column - no params', async () => { const from = { users: pgTable('users', { id: integer('id').generatedByDefaultAsIdentity(), }), }; const to = { users: pgTable('users', { id: integer('id').generatedByDefaultAsIdentity({ startWith: 100 }), }), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(statements).toStrictEqual([ { columnName: 'id', identity: 'users_id_seq;byDefault;1;2147483647;1;100;1;false', oldIdentity: 'users_id_seq;byDefault;1;2147483647;1;1;1;false', schema: '', tableName: 'users', type: 'alter_table_alter_column_change_identity', }, ]); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE "users" ALTER COLUMN "id" SET START WITH 100;', ]); }); test('alter identity from a column - few params', async () => { const from = { users: pgTable('users', { id: integer('id').generatedByDefaultAsIdentity({ startWith: 100 }), }), }; const to = { users: pgTable('users', { id: integer('id').generatedByDefaultAsIdentity({ startWith: 100, cache: 10, }), }), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(statements).toStrictEqual([ { columnName: 'id', identity: 'users_id_seq;byDefault;1;2147483647;1;100;10;false', oldIdentity: 'users_id_seq;byDefault;1;2147483647;1;100;1;false', schema: '', tableName: 'users', type: 'alter_table_alter_column_change_identity', }, ]); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE "users" ALTER COLUMN "id" SET CACHE 10;', ]); }); test('alter identity from a column - by default to always', async () => { const from = { users: pgTable('users', { id: integer('id').generatedByDefaultAsIdentity(), }), }; const to = { users: pgTable('users', { id: integer('id').generatedAlwaysAsIdentity({ startWith: 100, cache: 10, }), }), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(statements).toStrictEqual([ { columnName: 'id', identity: 'users_id_seq;always;1;2147483647;1;100;10;false', oldIdentity: 'users_id_seq;byDefault;1;2147483647;1;1;1;false', schema: '', tableName: 'users', type: 'alter_table_alter_column_change_identity', }, ]); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE "users" ALTER COLUMN "id" SET GENERATED ALWAYS;', 'ALTER TABLE "users" ALTER COLUMN "id" SET START WITH 100;', 'ALTER TABLE "users" ALTER COLUMN "id" SET CACHE 10;', ]); }); test('alter identity from a column - always to by default', async () => { const from = { users: pgTable('users', { id: integer('id').generatedAlwaysAsIdentity(), }), }; const to = { users: pgTable('users', { id: integer('id').generatedByDefaultAsIdentity({ startWith: 100, cache: 10, }), }), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(statements).toStrictEqual([ { columnName: 'id', identity: 'users_id_seq;byDefault;1;2147483647;1;100;10;false', oldIdentity: 'users_id_seq;always;1;2147483647;1;1;1;false', schema: '', tableName: 'users', type: 'alter_table_alter_column_change_identity', }, ]); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE "users" ALTER COLUMN "id" SET GENERATED BY DEFAULT;', 'ALTER TABLE "users" ALTER COLUMN "id" SET START WITH 100;', 'ALTER TABLE "users" ALTER COLUMN "id" SET CACHE 10;', ]); }); ================================================ FILE: drizzle-kit/tests/pg-schemas.test.ts ================================================ import { pgSchema } from 'drizzle-orm/pg-core'; import { expect, test } from 'vitest'; import { diffTestSchemas } from './schemaDiffer'; test('add schema #1', async () => { const to = { devSchema: pgSchema('dev'), }; const { statements } = await diffTestSchemas({}, to, []); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'create_schema', name: 'dev', }); }); test('add schema #2', async () => { const from = { devSchema: pgSchema('dev'), }; const to = { devSchema: pgSchema('dev'), devSchema2: pgSchema('dev2'), }; const { statements } = await diffTestSchemas(from, to, []); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'create_schema', name: 'dev2', }); }); test('delete schema #1', async () => { const from = { devSchema: pgSchema('dev'), }; const { statements } = await diffTestSchemas(from, {}, []); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'drop_schema', name: 'dev', }); }); test('delete schema #2', async () => { const from = { devSchema: pgSchema('dev'), devSchema2: pgSchema('dev2'), }; const to = { devSchema: pgSchema('dev'), }; const { statements } = await diffTestSchemas(from, to, []); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'drop_schema', name: 'dev2', }); }); test('rename schema #1', async () => { const from = { devSchema: pgSchema('dev'), }; const to = { devSchema2: pgSchema('dev2'), }; const { statements } = await diffTestSchemas(from, to, ['dev->dev2']); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'rename_schema', from: 'dev', to: 'dev2', }); }); test('rename schema #2', async () => { const from = { devSchema: pgSchema('dev'), devSchema1: pgSchema('dev1'), }; const to = { devSchema: pgSchema('dev'), devSchema2: pgSchema('dev2'), }; const { statements } = await diffTestSchemas(from, to, ['dev1->dev2']); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'rename_schema', from: 'dev1', to: 'dev2', }); }); ================================================ FILE: drizzle-kit/tests/pg-sequences.test.ts ================================================ import { pgSchema, pgSequence } from 'drizzle-orm/pg-core'; import { expect, test } from 'vitest'; import { diffTestSchemas } from './schemaDiffer'; test('create sequence', async () => { const from = {}; const to = { seq: pgSequence('name', { startWith: 100 }), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(statements).toStrictEqual([ { name: 'name', schema: 'public', type: 'create_sequence', values: { cache: '1', cycle: false, increment: '1', maxValue: '9223372036854775807', minValue: '1', startWith: '100', }, }, ]); expect(sqlStatements).toStrictEqual([ 'CREATE SEQUENCE "public"."name" INCREMENT BY 1 MINVALUE 1 MAXVALUE 9223372036854775807 START WITH 100 CACHE 1;', ]); }); test('create sequence: all fields', async () => { const from = {}; const to = { seq: pgSequence('name', { startWith: 100, maxValue: 10000, minValue: 100, cycle: true, cache: 10, increment: 2, }), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(statements).toStrictEqual([ { type: 'create_sequence', name: 'name', schema: 'public', values: { startWith: '100', maxValue: '10000', minValue: '100', cycle: true, cache: '10', increment: '2', }, }, ]); expect(sqlStatements).toStrictEqual([ 'CREATE SEQUENCE "public"."name" INCREMENT BY 2 MINVALUE 100 MAXVALUE 10000 START WITH 100 CACHE 10 CYCLE;', ]); }); test('create sequence: custom schema', async () => { const customSchema = pgSchema('custom'); const from = {}; const to = { seq: customSchema.sequence('name', { startWith: 100 }), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(statements).toStrictEqual([ { name: 'name', schema: 'custom', type: 'create_sequence', values: { cache: '1', cycle: false, increment: '1', maxValue: '9223372036854775807', minValue: '1', startWith: '100', }, }, ]); expect(sqlStatements).toStrictEqual([ 'CREATE SEQUENCE "custom"."name" INCREMENT BY 1 MINVALUE 1 MAXVALUE 9223372036854775807 START WITH 100 CACHE 1;', ]); }); test('create sequence: custom schema + all fields', async () => { const customSchema = pgSchema('custom'); const from = {}; const to = { seq: customSchema.sequence('name', { startWith: 100, maxValue: 10000, minValue: 100, cycle: true, cache: 10, increment: 2, }), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(statements).toStrictEqual([ { type: 'create_sequence', name: 'name', schema: 'custom', values: { startWith: '100', maxValue: '10000', minValue: '100', cycle: true, cache: '10', increment: '2', }, }, ]); expect(sqlStatements).toStrictEqual([ 'CREATE SEQUENCE "custom"."name" INCREMENT BY 2 MINVALUE 100 MAXVALUE 10000 START WITH 100 CACHE 10 CYCLE;', ]); }); test('drop sequence', async () => { const from = { seq: pgSequence('name', { startWith: 100 }) }; const to = {}; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(statements).toStrictEqual([ { type: 'drop_sequence', name: 'name', schema: 'public', }, ]); expect(sqlStatements).toStrictEqual(['DROP SEQUENCE "public"."name";']); }); test('drop sequence: custom schema', async () => { const customSchema = pgSchema('custom'); const from = { seq: customSchema.sequence('name', { startWith: 100 }) }; const to = {}; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(statements).toStrictEqual([ { type: 'drop_sequence', name: 'name', schema: 'custom', }, ]); expect(sqlStatements).toStrictEqual(['DROP SEQUENCE "custom"."name";']); }); // rename sequence test('rename sequence', async () => { const from = { seq: pgSequence('name', { startWith: 100 }) }; const to = { seq: pgSequence('name_new', { startWith: 100 }) }; const { statements, sqlStatements } = await diffTestSchemas(from, to, [ 'public.name->public.name_new', ]); expect(statements).toStrictEqual([ { type: 'rename_sequence', nameFrom: 'name', nameTo: 'name_new', schema: 'public', }, ]); expect(sqlStatements).toStrictEqual([ 'ALTER SEQUENCE "public"."name" RENAME TO "name_new";', ]); }); test('rename sequence in custom schema', async () => { const customSchema = pgSchema('custom'); const from = { seq: customSchema.sequence('name', { startWith: 100 }) }; const to = { seq: customSchema.sequence('name_new', { startWith: 100 }) }; const { statements, sqlStatements } = await diffTestSchemas(from, to, [ 'custom.name->custom.name_new', ]); expect(statements).toStrictEqual([ { type: 'rename_sequence', nameFrom: 'name', nameTo: 'name_new', schema: 'custom', }, ]); expect(sqlStatements).toStrictEqual([ 'ALTER SEQUENCE "custom"."name" RENAME TO "name_new";', ]); }); test('move sequence between schemas #1', async () => { const customSchema = pgSchema('custom'); const from = { seq: pgSequence('name', { startWith: 100 }) }; const to = { seq: customSchema.sequence('name', { startWith: 100 }) }; const { statements, sqlStatements } = await diffTestSchemas(from, to, [ 'public.name->custom.name', ]); expect(statements).toStrictEqual([ { type: 'move_sequence', name: 'name', schemaFrom: 'public', schemaTo: 'custom', }, ]); expect(sqlStatements).toStrictEqual([ 'ALTER SEQUENCE "public"."name" SET SCHEMA "custom";', ]); }); test('move sequence between schemas #2', async () => { const customSchema = pgSchema('custom'); const from = { seq: customSchema.sequence('name', { startWith: 100 }) }; const to = { seq: pgSequence('name', { startWith: 100 }) }; const { statements, sqlStatements } = await diffTestSchemas(from, to, [ 'custom.name->public.name', ]); expect(statements).toStrictEqual([ { type: 'move_sequence', name: 'name', schemaFrom: 'custom', schemaTo: 'public', }, ]); expect(sqlStatements).toStrictEqual([ 'ALTER SEQUENCE "custom"."name" SET SCHEMA "public";', ]); }); // Add squasher for sequences to make alters work + // Run all tests + // Finish introspect for sequences + // Check push for sequences + // add tests for generated to postgresql + // add tests for generated to mysql + // add tests for generated to sqlite + // add tests for identity to postgresql // check introspect generated(all dialects) + // check push generated(all dialect) + // add introspect ts file logic for all the features // manually test everything // beta release test('alter sequence', async () => { const from = { seq: pgSequence('name', { startWith: 100 }) }; const to = { seq: pgSequence('name', { startWith: 105 }) }; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(statements).toStrictEqual([ { name: 'name', schema: 'public', type: 'alter_sequence', values: { cache: '1', cycle: false, increment: '1', maxValue: '9223372036854775807', minValue: '1', startWith: '105', }, }, ]); expect(sqlStatements).toStrictEqual([ 'ALTER SEQUENCE "public"."name" INCREMENT BY 1 MINVALUE 1 MAXVALUE 9223372036854775807 START WITH 105 CACHE 1;', ]); }); ================================================ FILE: drizzle-kit/tests/pg-tables.test.ts ================================================ import { sql } from 'drizzle-orm'; import { AnyPgColumn, foreignKey, geometry, index, integer, pgEnum, pgSchema, pgSequence, pgTable, pgTableCreator, primaryKey, serial, text, unique, uniqueIndex, vector, } from 'drizzle-orm/pg-core'; import { expect, test } from 'vitest'; import { diffTestSchemas } from './schemaDiffer'; test('add table #1', async () => { const to = { users: pgTable('users', {}), }; const { statements } = await diffTestSchemas({}, to, []); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'create_table', tableName: 'users', schema: '', columns: [], compositePKs: [], policies: [], uniqueConstraints: [], checkConstraints: [], isRLSEnabled: false, compositePkName: '', }); }); test('add table #2', async () => { const to = { users: pgTable('users', { id: serial('id').primaryKey(), }), }; const { statements } = await diffTestSchemas({}, to, []); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'create_table', tableName: 'users', schema: '', columns: [ { name: 'id', notNull: true, primaryKey: true, type: 'serial', }, ], compositePKs: [], isRLSEnabled: false, policies: [], uniqueConstraints: [], checkConstraints: [], compositePkName: '', }); }); test('add table #3', async () => { const to = { users: pgTable( 'users', { id: serial('id'), }, (t) => { return { pk: primaryKey({ name: 'users_pk', columns: [t.id], }), }; }, ), }; const { statements } = await diffTestSchemas({}, to, []); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'create_table', tableName: 'users', schema: '', columns: [ { name: 'id', notNull: true, primaryKey: false, type: 'serial', }, ], compositePKs: ['id;users_pk'], policies: [], uniqueConstraints: [], isRLSEnabled: false, checkConstraints: [], compositePkName: 'users_pk', }); }); test('add table #4', async () => { const to = { users: pgTable('users', {}), posts: pgTable('posts', {}), }; const { statements } = await diffTestSchemas({}, to, []); expect(statements.length).toBe(2); expect(statements[0]).toStrictEqual({ type: 'create_table', tableName: 'users', schema: '', columns: [], compositePKs: [], policies: [], uniqueConstraints: [], checkConstraints: [], isRLSEnabled: false, compositePkName: '', }); expect(statements[1]).toStrictEqual({ type: 'create_table', tableName: 'posts', policies: [], schema: '', columns: [], compositePKs: [], isRLSEnabled: false, uniqueConstraints: [], checkConstraints: [], compositePkName: '', }); }); test('add table #5', async () => { const schema = pgSchema('folder'); const from = { schema, }; const to = { schema, users: schema.table('users', {}), }; const { statements } = await diffTestSchemas(from, to, []); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'create_table', tableName: 'users', schema: 'folder', columns: [], compositePKs: [], policies: [], uniqueConstraints: [], compositePkName: '', checkConstraints: [], isRLSEnabled: false, }); }); test('add table #6', async () => { const from = { users1: pgTable('users1', {}), }; const to = { users2: pgTable('users2', {}), }; const { statements } = await diffTestSchemas(from, to, []); expect(statements.length).toBe(2); expect(statements[0]).toStrictEqual({ type: 'create_table', tableName: 'users2', schema: '', columns: [], compositePKs: [], uniqueConstraints: [], policies: [], compositePkName: '', checkConstraints: [], isRLSEnabled: false, }); expect(statements[1]).toStrictEqual({ type: 'drop_table', policies: [], tableName: 'users1', schema: '', }); }); test('add table #7', async () => { const from = { users1: pgTable('users1', {}), }; const to = { users: pgTable('users', {}), users2: pgTable('users2', {}), }; const { statements } = await diffTestSchemas(from, to, [ 'public.users1->public.users2', ]); expect(statements.length).toBe(2); expect(statements[0]).toStrictEqual({ type: 'create_table', tableName: 'users', schema: '', columns: [], compositePKs: [], policies: [], uniqueConstraints: [], compositePkName: '', isRLSEnabled: false, checkConstraints: [], }); expect(statements[1]).toStrictEqual({ type: 'rename_table', tableNameFrom: 'users1', tableNameTo: 'users2', fromSchema: '', toSchema: '', }); }); test('add table #8: geometry types', async () => { const from = {}; const to = { users: pgTable('users', { geom: geometry('geom', { type: 'point' }).notNull(), geom1: geometry('geom1').notNull(), }), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(statements.length).toBe(1); expect(sqlStatements).toStrictEqual([ `CREATE TABLE "users" (\n\t"geom" geometry(point) NOT NULL,\n\t"geom1" geometry(point) NOT NULL\n);\n`, ]); }); test('multiproject schema add table #1', async () => { const table = pgTableCreator((name) => `prefix_${name}`); const to = { users: table('users', { id: serial('id').primaryKey(), }), }; const { statements } = await diffTestSchemas({}, to, []); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'create_table', tableName: 'prefix_users', schema: '', columns: [ { name: 'id', notNull: true, primaryKey: true, type: 'serial', }, ], compositePKs: [], policies: [], compositePkName: '', isRLSEnabled: false, uniqueConstraints: [], checkConstraints: [], }); }); test('multiproject schema drop table #1', async () => { const table = pgTableCreator((name) => `prefix_${name}`); const from = { users: table('users', { id: serial('id').primaryKey(), }), }; const to = {}; const { statements } = await diffTestSchemas(from, to, []); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ schema: '', tableName: 'prefix_users', type: 'drop_table', policies: [], }); }); test('multiproject schema alter table name #1', async () => { const table = pgTableCreator((name) => `prefix_${name}`); const from = { users: table('users', { id: serial('id').primaryKey(), }), }; const to = { users1: table('users1', { id: serial('id').primaryKey(), }), }; const { statements } = await diffTestSchemas(from, to, [ 'public.prefix_users->public.prefix_users1', ]); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'rename_table', fromSchema: '', toSchema: '', tableNameFrom: 'prefix_users', tableNameTo: 'prefix_users1', }); }); test('add table #8: column with pgvector', async () => { const from = {}; const to = { users2: pgTable('users2', { id: serial('id').primaryKey(), name: vector('name', { dimensions: 3 }), }), }; const { sqlStatements } = await diffTestSchemas(from, to, []); expect(sqlStatements[0]).toBe( `CREATE TABLE "users2" (\n\t"id" serial PRIMARY KEY NOT NULL,\n\t"name" vector(3)\n); `, ); }); test('add schema + table #1', async () => { const schema = pgSchema('folder'); const to = { schema, users: schema.table('users', {}), }; const { statements } = await diffTestSchemas({}, to, []); expect(statements.length).toBe(2); expect(statements[0]).toStrictEqual({ type: 'create_schema', name: 'folder', }); expect(statements[1]).toStrictEqual({ type: 'create_table', tableName: 'users', schema: 'folder', policies: [], columns: [], compositePKs: [], isRLSEnabled: false, uniqueConstraints: [], compositePkName: '', checkConstraints: [], }); }); test('change schema with tables #1', async () => { const schema = pgSchema('folder'); const schema2 = pgSchema('folder2'); const from = { schema, users: schema.table('users', {}), }; const to = { schema2, users: schema2.table('users', {}), }; const { statements } = await diffTestSchemas(from, to, ['folder->folder2']); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'rename_schema', from: 'folder', to: 'folder2', }); }); test('change table schema #1', async () => { const schema = pgSchema('folder'); const from = { schema, users: pgTable('users', {}), }; const to = { schema, users: schema.table('users', {}), }; const { statements } = await diffTestSchemas(from, to, [ 'public.users->folder.users', ]); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'alter_table_set_schema', tableName: 'users', schemaFrom: 'public', schemaTo: 'folder', }); }); test('change table schema #2', async () => { const schema = pgSchema('folder'); const from = { schema, users: schema.table('users', {}), }; const to = { schema, users: pgTable('users', {}), }; const { statements } = await diffTestSchemas(from, to, [ 'folder.users->public.users', ]); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'alter_table_set_schema', tableName: 'users', schemaFrom: 'folder', schemaTo: 'public', }); }); test('change table schema #3', async () => { const schema1 = pgSchema('folder1'); const schema2 = pgSchema('folder2'); const from = { schema1, schema2, users: schema1.table('users', {}), }; const to = { schema1, schema2, users: schema2.table('users', {}), }; const { statements } = await diffTestSchemas(from, to, [ 'folder1.users->folder2.users', ]); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'alter_table_set_schema', tableName: 'users', schemaFrom: 'folder1', schemaTo: 'folder2', }); }); test('change table schema #4', async () => { const schema1 = pgSchema('folder1'); const schema2 = pgSchema('folder2'); const from = { schema1, users: schema1.table('users', {}), }; const to = { schema1, schema2, // add schema users: schema2.table('users', {}), // move table }; const { statements } = await diffTestSchemas(from, to, [ 'folder1.users->folder2.users', ]); expect(statements.length).toBe(2); expect(statements[0]).toStrictEqual({ type: 'create_schema', name: 'folder2', }); expect(statements[1]).toStrictEqual({ type: 'alter_table_set_schema', tableName: 'users', schemaFrom: 'folder1', schemaTo: 'folder2', }); }); test('change table schema #5', async () => { const schema1 = pgSchema('folder1'); const schema2 = pgSchema('folder2'); const from = { schema1, // remove schema users: schema1.table('users', {}), }; const to = { schema2, // add schema users: schema2.table('users', {}), // move table }; const { statements } = await diffTestSchemas(from, to, [ 'folder1.users->folder2.users', ]); expect(statements.length).toBe(3); expect(statements[0]).toStrictEqual({ type: 'create_schema', name: 'folder2', }); expect(statements[1]).toStrictEqual({ type: 'alter_table_set_schema', tableName: 'users', schemaFrom: 'folder1', schemaTo: 'folder2', }); expect(statements[2]).toStrictEqual({ type: 'drop_schema', name: 'folder1', }); }); test('change table schema #5', async () => { const schema1 = pgSchema('folder1'); const schema2 = pgSchema('folder2'); const from = { schema1, schema2, users: schema1.table('users', {}), }; const to = { schema1, schema2, users: schema2.table('users2', {}), // rename and move table }; const { statements } = await diffTestSchemas(from, to, [ 'folder1.users->folder2.users2', ]); expect(statements.length).toBe(2); expect(statements[0]).toStrictEqual({ type: 'alter_table_set_schema', tableName: 'users', schemaFrom: 'folder1', schemaTo: 'folder2', }); expect(statements[1]).toStrictEqual({ type: 'rename_table', tableNameFrom: 'users', tableNameTo: 'users2', fromSchema: 'folder2', toSchema: 'folder2', }); }); test('change table schema #6', async () => { const schema1 = pgSchema('folder1'); const schema2 = pgSchema('folder2'); const from = { schema1, users: schema1.table('users', {}), }; const to = { schema2, // rename schema users: schema2.table('users2', {}), // rename table }; const { statements } = await diffTestSchemas(from, to, [ 'folder1->folder2', 'folder2.users->folder2.users2', ]); expect(statements.length).toBe(2); expect(statements[0]).toStrictEqual({ type: 'rename_schema', from: 'folder1', to: 'folder2', }); expect(statements[1]).toStrictEqual({ type: 'rename_table', tableNameFrom: 'users', tableNameTo: 'users2', fromSchema: 'folder2', toSchema: 'folder2', }); }); test('drop table + rename schema #1', async () => { const schema1 = pgSchema('folder1'); const schema2 = pgSchema('folder2'); const from = { schema1, users: schema1.table('users', {}), }; const to = { schema2, // rename schema // drop table }; const { statements } = await diffTestSchemas(from, to, ['folder1->folder2']); expect(statements.length).toBe(2); expect(statements[0]).toStrictEqual({ type: 'rename_schema', from: 'folder1', to: 'folder2', }); expect(statements[1]).toStrictEqual({ type: 'drop_table', tableName: 'users', schema: 'folder2', policies: [], }); }); test('create table with tsvector', async () => { const from = {}; const to = { users: pgTable( 'posts', { id: serial('id').primaryKey(), title: text('title').notNull(), description: text('description').notNull(), }, (table) => ({ titleSearchIndex: index('title_search_index').using( 'gin', sql`to_tsvector('english', ${table.title})`, ), }), ), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(sqlStatements).toStrictEqual([ 'CREATE TABLE "posts" (\n\t"id" serial PRIMARY KEY NOT NULL,\n\t"title" text NOT NULL,\n\t"description" text NOT NULL\n);\n', `CREATE INDEX "title_search_index" ON "posts" USING gin (to_tsvector('english', "title"));`, ]); }); test('composite primary key', async () => { const from = {}; const to = { table: pgTable('works_to_creators', { workId: integer('work_id').notNull(), creatorId: integer('creator_id').notNull(), classification: text('classification').notNull(), }, (t) => ({ pk: primaryKey({ columns: [t.workId, t.creatorId, t.classification], }), })), }; const { sqlStatements } = await diffTestSchemas(from, to, []); expect(sqlStatements).toStrictEqual([ 'CREATE TABLE "works_to_creators" (\n\t"work_id" integer NOT NULL,\n\t"creator_id" integer NOT NULL,\n\t"classification" text NOT NULL,\n\tCONSTRAINT "works_to_creators_work_id_creator_id_classification_pk" PRIMARY KEY("work_id","creator_id","classification")\n);\n', ]); }); test('add column before creating unique constraint', async () => { const from = { table: pgTable('table', { id: serial('id').primaryKey(), }), }; const to = { table: pgTable('table', { id: serial('id').primaryKey(), name: text('name').notNull(), }, (t) => ({ uq: unique('uq').on(t.name), })), }; const { sqlStatements } = await diffTestSchemas(from, to, []); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE "table" ADD COLUMN "name" text NOT NULL;', 'ALTER TABLE "table" ADD CONSTRAINT "uq" UNIQUE("name");', ]); }); test('alter composite primary key', async () => { const from = { table: pgTable('table', { col1: integer('col1').notNull(), col2: integer('col2').notNull(), col3: text('col3').notNull(), }, (t) => ({ pk: primaryKey({ name: 'table_pk', columns: [t.col1, t.col2], }), })), }; const to = { table: pgTable('table', { col1: integer('col1').notNull(), col2: integer('col2').notNull(), col3: text('col3').notNull(), }, (t) => ({ pk: primaryKey({ name: 'table_pk', columns: [t.col2, t.col3], }), })), }; const { sqlStatements } = await diffTestSchemas(from, to, []); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE "table" DROP CONSTRAINT "table_pk";\n--> statement-breakpoint\nALTER TABLE "table" ADD CONSTRAINT "table_pk" PRIMARY KEY("col2","col3");', ]); }); test('add index with op', async () => { const from = { users: pgTable('users', { id: serial('id').primaryKey(), name: text('name').notNull(), }), }; const to = { users: pgTable('users', { id: serial('id').primaryKey(), name: text('name').notNull(), }, (t) => ({ nameIdx: index().using('gin', t.name.op('gin_trgm_ops')), })), }; const { sqlStatements } = await diffTestSchemas(from, to, []); expect(sqlStatements).toStrictEqual([ 'CREATE INDEX "users_name_index" ON "users" USING gin ("name" gin_trgm_ops);', ]); }); test('optional db aliases (snake case)', async () => { const from = {}; const t1 = pgTable( 't1', { t1Id1: integer().notNull().primaryKey(), t1Col2: integer().notNull(), t1Col3: integer().notNull(), t2Ref: integer().notNull().references(() => t2.t2Id), t1Uni: integer().notNull(), t1UniIdx: integer().notNull(), t1Idx: integer().notNull(), }, (table) => ({ uni: unique('t1_uni').on(table.t1Uni), uniIdx: uniqueIndex('t1_uni_idx').on(table.t1UniIdx), idx: index('t1_idx').on(table.t1Idx).where(sql`${table.t1Idx} > 0`), fk: foreignKey({ columns: [table.t1Col2, table.t1Col3], foreignColumns: [t3.t3Id1, t3.t3Id2], }), }), ); const t2 = pgTable( 't2', { t2Id: serial().primaryKey(), }, ); const t3 = pgTable( 't3', { t3Id1: integer(), t3Id2: integer(), }, (table) => ({ pk: primaryKey({ columns: [table.t3Id1, table.t3Id2], }), }), ); const to = { t1, t2, t3, }; const { sqlStatements } = await diffTestSchemas(from, to, [], false, 'snake_case'); const st1 = `CREATE TABLE "t1" ( "t1_id1" integer PRIMARY KEY NOT NULL, "t1_col2" integer NOT NULL, "t1_col3" integer NOT NULL, "t2_ref" integer NOT NULL, "t1_uni" integer NOT NULL, "t1_uni_idx" integer NOT NULL, "t1_idx" integer NOT NULL, CONSTRAINT "t1_uni" UNIQUE("t1_uni") ); `; const st2 = `CREATE TABLE "t2" ( "t2_id" serial PRIMARY KEY NOT NULL ); `; const st3 = `CREATE TABLE "t3" ( "t3_id1" integer, "t3_id2" integer, CONSTRAINT "t3_t3_id1_t3_id2_pk" PRIMARY KEY("t3_id1","t3_id2") ); `; const st4 = `ALTER TABLE "t1" ADD CONSTRAINT "t1_t2_ref_t2_t2_id_fk" FOREIGN KEY ("t2_ref") REFERENCES "public"."t2"("t2_id") ON DELETE no action ON UPDATE no action;`; const st5 = `ALTER TABLE "t1" ADD CONSTRAINT "t1_t1_col2_t1_col3_t3_t3_id1_t3_id2_fk" FOREIGN KEY ("t1_col2","t1_col3") REFERENCES "public"."t3"("t3_id1","t3_id2") ON DELETE no action ON UPDATE no action;`; const st6 = `CREATE UNIQUE INDEX "t1_uni_idx" ON "t1" USING btree ("t1_uni_idx");`; const st7 = `CREATE INDEX "t1_idx" ON "t1" USING btree ("t1_idx") WHERE "t1"."t1_idx" > 0;`; expect(sqlStatements).toStrictEqual([st1, st2, st3, st4, st5, st6, st7]); }); test('optional db aliases (camel case)', async () => { const from = {}; const t1 = pgTable( 't1', { t1_id1: integer().notNull().primaryKey(), t1_col2: integer().notNull(), t1_col3: integer().notNull(), t2_ref: integer().notNull().references(() => t2.t2_id), t1_uni: integer().notNull(), t1_uni_idx: integer().notNull(), t1_idx: integer().notNull(), }, (table) => ({ uni: unique('t1Uni').on(table.t1_uni), uni_idx: uniqueIndex('t1UniIdx').on(table.t1_uni_idx), idx: index('t1Idx').on(table.t1_idx).where(sql`${table.t1_idx} > 0`), fk: foreignKey({ columns: [table.t1_col2, table.t1_col3], foreignColumns: [t3.t3_id1, t3.t3_id2], }), }), ); const t2 = pgTable( 't2', { t2_id: serial().primaryKey(), }, ); const t3 = pgTable( 't3', { t3_id1: integer(), t3_id2: integer(), }, (table) => ({ pk: primaryKey({ columns: [table.t3_id1, table.t3_id2], }), }), ); const to = { t1, t2, t3, }; const { sqlStatements } = await diffTestSchemas(from, to, [], false, 'camelCase'); const st1 = `CREATE TABLE "t1" ( "t1Id1" integer PRIMARY KEY NOT NULL, "t1Col2" integer NOT NULL, "t1Col3" integer NOT NULL, "t2Ref" integer NOT NULL, "t1Uni" integer NOT NULL, "t1UniIdx" integer NOT NULL, "t1Idx" integer NOT NULL, CONSTRAINT "t1Uni" UNIQUE("t1Uni") ); `; const st2 = `CREATE TABLE "t2" ( "t2Id" serial PRIMARY KEY NOT NULL ); `; const st3 = `CREATE TABLE "t3" ( "t3Id1" integer, "t3Id2" integer, CONSTRAINT "t3_t3Id1_t3Id2_pk" PRIMARY KEY("t3Id1","t3Id2") ); `; const st4 = `ALTER TABLE "t1" ADD CONSTRAINT "t1_t2Ref_t2_t2Id_fk" FOREIGN KEY ("t2Ref") REFERENCES "public"."t2"("t2Id") ON DELETE no action ON UPDATE no action;`; const st5 = `ALTER TABLE "t1" ADD CONSTRAINT "t1_t1Col2_t1Col3_t3_t3Id1_t3Id2_fk" FOREIGN KEY ("t1Col2","t1Col3") REFERENCES "public"."t3"("t3Id1","t3Id2") ON DELETE no action ON UPDATE no action;`; const st6 = `CREATE UNIQUE INDEX "t1UniIdx" ON "t1" USING btree ("t1UniIdx");`; const st7 = `CREATE INDEX "t1Idx" ON "t1" USING btree ("t1Idx") WHERE "t1"."t1Idx" > 0;`; expect(sqlStatements).toStrictEqual([st1, st2, st3, st4, st5, st6, st7]); }); ================================================ FILE: drizzle-kit/tests/pg-views.test.ts ================================================ import { sql } from 'drizzle-orm'; import { integer, pgMaterializedView, pgSchema, pgTable, pgView } from 'drizzle-orm/pg-core'; import { expect, test } from 'vitest'; import { diffTestSchemas } from './schemaDiffer'; test('create table and view #1', async () => { const users = pgTable('users', { id: integer('id').primaryKey().notNull(), }); const to = { users: users, view: pgView('some_view').as((qb) => qb.select().from(users)), }; const { statements, sqlStatements } = await diffTestSchemas({}, to, []); expect(statements.length).toBe(2); expect(statements[0]).toStrictEqual({ type: 'create_table', tableName: 'users', schema: '', columns: [{ name: 'id', notNull: true, primaryKey: true, type: 'integer', }], compositePKs: [], uniqueConstraints: [], isRLSEnabled: false, compositePkName: '', checkConstraints: [], policies: [], }); expect(statements[1]).toStrictEqual({ type: 'create_view', name: 'some_view', definition: `select "id" from "users"`, schema: 'public', with: undefined, materialized: false, tablespace: undefined, using: undefined, withNoData: false, }); expect(sqlStatements.length).toBe(2); expect(sqlStatements[0]).toBe(`CREATE TABLE "users" ( \t"id" integer PRIMARY KEY NOT NULL );\n`); expect(sqlStatements[1]).toBe(`CREATE VIEW "public"."some_view" AS (select "id" from "users");`); }); test('create table and view #2', async () => { const users = pgTable('users', { id: integer('id').primaryKey().notNull(), }); const to = { users: users, view: pgView('some_view', { id: integer('id') }).as(sql`SELECT * FROM ${users}`), }; const { statements, sqlStatements } = await diffTestSchemas({}, to, []); expect(statements.length).toBe(2); expect(statements[0]).toStrictEqual({ type: 'create_table', tableName: 'users', schema: '', columns: [{ name: 'id', notNull: true, primaryKey: true, type: 'integer', }], compositePKs: [], uniqueConstraints: [], isRLSEnabled: false, compositePkName: '', policies: [], checkConstraints: [], }); expect(statements[1]).toStrictEqual({ type: 'create_view', name: 'some_view', definition: `SELECT * FROM "users"`, schema: 'public', with: undefined, materialized: false, tablespace: undefined, using: undefined, withNoData: false, }); expect(sqlStatements.length).toBe(2); expect(sqlStatements[0]).toBe(`CREATE TABLE "users" ( \t"id" integer PRIMARY KEY NOT NULL );\n`); expect(sqlStatements[1]).toBe(`CREATE VIEW "public"."some_view" AS (SELECT * FROM "users");`); }); test('create table and view #3', async () => { const users = pgTable('users', { id: integer('id').primaryKey().notNull(), }); const to = { users: users, view1: pgView('some_view1', { id: integer('id') }).with({ checkOption: 'local', securityBarrier: false, securityInvoker: true, }).as(sql`SELECT * FROM ${users}`), view2: pgView('some_view2').with({ checkOption: 'cascaded', securityBarrier: true, securityInvoker: false, }).as((qb) => qb.select().from(users)), }; const { statements, sqlStatements } = await diffTestSchemas({}, to, []); expect(statements.length).toBe(3); expect(statements[0]).toStrictEqual({ type: 'create_table', tableName: 'users', schema: '', columns: [{ name: 'id', notNull: true, primaryKey: true, type: 'integer', }], compositePKs: [], uniqueConstraints: [], compositePkName: '', checkConstraints: [], isRLSEnabled: false, policies: [], }); expect(statements[1]).toStrictEqual({ type: 'create_view', name: 'some_view1', definition: `SELECT * FROM "users"`, schema: 'public', with: { checkOption: 'local', securityBarrier: false, securityInvoker: true, }, materialized: false, tablespace: undefined, using: undefined, withNoData: false, }); expect(statements[2]).toStrictEqual({ type: 'create_view', name: 'some_view2', definition: `select "id" from "users"`, schema: 'public', with: { checkOption: 'cascaded', securityBarrier: true, securityInvoker: false, }, materialized: false, tablespace: undefined, using: undefined, withNoData: false, }); expect(sqlStatements.length).toBe(3); expect(sqlStatements[0]).toBe(`CREATE TABLE "users" ( \t"id" integer PRIMARY KEY NOT NULL );\n`); expect(sqlStatements[1]).toBe( `CREATE VIEW "public"."some_view1" WITH (check_option = local, security_barrier = false, security_invoker = true) AS (SELECT * FROM "users");`, ); expect(sqlStatements[2]).toBe( `CREATE VIEW "public"."some_view2" WITH (check_option = cascaded, security_barrier = true, security_invoker = false) AS (select "id" from "users");`, ); }); test('create table and view #4', async () => { const schema = pgSchema('new_schema'); const users = schema.table('users', { id: integer('id').primaryKey().notNull(), }); const to = { schema, users: users, view1: schema.view('some_view1', { id: integer('id') }).with({ checkOption: 'local', securityBarrier: false, securityInvoker: true, }).as(sql`SELECT * FROM ${users}`), view2: schema.view('some_view2').with({ checkOption: 'cascaded', securityBarrier: true, securityInvoker: false, }).as((qb) => qb.select().from(users)), }; const { statements, sqlStatements } = await diffTestSchemas({}, to, []); expect(statements.length).toBe(4); expect(statements[0]).toStrictEqual({ type: 'create_schema', name: 'new_schema', }); expect(statements[1]).toStrictEqual({ type: 'create_table', tableName: 'users', schema: 'new_schema', columns: [{ name: 'id', notNull: true, primaryKey: true, type: 'integer', }], compositePKs: [], uniqueConstraints: [], compositePkName: '', isRLSEnabled: false, policies: [], checkConstraints: [], }); expect(statements[2]).toStrictEqual({ type: 'create_view', name: 'some_view1', definition: `SELECT * FROM "new_schema"."users"`, schema: 'new_schema', with: { checkOption: 'local', securityBarrier: false, securityInvoker: true, }, materialized: false, tablespace: undefined, using: undefined, withNoData: false, }); expect(statements[3]).toStrictEqual({ type: 'create_view', name: 'some_view2', definition: `select "id" from "new_schema"."users"`, schema: 'new_schema', with: { checkOption: 'cascaded', securityBarrier: true, securityInvoker: false, }, materialized: false, tablespace: undefined, using: undefined, withNoData: false, }); expect(sqlStatements.length).toBe(4); expect(sqlStatements[0]).toBe(`CREATE SCHEMA "new_schema";\n`); expect(sqlStatements[1]).toBe(`CREATE TABLE "new_schema"."users" ( \t"id" integer PRIMARY KEY NOT NULL );\n`); expect(sqlStatements[2]).toBe( `CREATE VIEW "new_schema"."some_view1" WITH (check_option = local, security_barrier = false, security_invoker = true) AS (SELECT * FROM "new_schema"."users");`, ); expect(sqlStatements[3]).toBe( `CREATE VIEW "new_schema"."some_view2" WITH (check_option = cascaded, security_barrier = true, security_invoker = false) AS (select "id" from "new_schema"."users");`, ); }); test('create table and view #5', async () => { const users = pgTable('users', { id: integer('id').primaryKey().notNull(), }); const to = { users: users, view1: pgView('some_view', { id: integer('id') }).as(sql`SELECT * FROM ${users}`), view2: pgView('some_view', { id: integer('id') }).as(sql`SELECT * FROM ${users}`), }; await expect(diffTestSchemas({}, to, [])).rejects.toThrowError(); }); test('create table and view #6', async () => { const users = pgTable('users', { id: integer('id').primaryKey().notNull(), }); const to = { users: users, view1: pgView('some_view', { id: integer('id') }).with({ checkOption: 'cascaded' }).as(sql`SELECT * FROM ${users}`), }; const { statements, sqlStatements } = await diffTestSchemas({}, to, []); expect(statements.length).toBe(2); expect(statements[0]).toStrictEqual({ columns: [ { name: 'id', notNull: true, primaryKey: true, type: 'integer', }, ], compositePKs: [], compositePkName: '', schema: '', tableName: 'users', type: 'create_table', uniqueConstraints: [], checkConstraints: [], isRLSEnabled: false, policies: [], }); expect(statements[1]).toStrictEqual({ definition: 'SELECT * FROM "users"', name: 'some_view', schema: 'public', type: 'create_view', with: { checkOption: 'cascaded', }, materialized: false, tablespace: undefined, using: undefined, withNoData: false, }); expect(sqlStatements.length).toBe(2); expect(sqlStatements[0]).toBe(`CREATE TABLE "users" ( \t"id" integer PRIMARY KEY NOT NULL );\n`); expect(sqlStatements[1]).toBe( `CREATE VIEW "public"."some_view" WITH (check_option = cascaded) AS (SELECT * FROM "users");`, ); }); test('create view with existing flag', async () => { const users = pgTable('users', { id: integer('id').primaryKey().notNull(), }); const from = { users, }; const to = { users: users, view1: pgView('some_view', { id: integer('id') }).with({ checkOption: 'cascaded' }).existing(), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(statements.length).toBe(0); expect(sqlStatements.length).toBe(0); }); test('create table and materialized view #1', async () => { const users = pgTable('users', { id: integer('id').primaryKey().notNull(), }); const to = { users: users, view: pgMaterializedView('some_view').as((qb) => qb.select().from(users)), }; const { statements, sqlStatements } = await diffTestSchemas({}, to, []); expect(statements.length).toBe(2); expect(statements[0]).toStrictEqual({ type: 'create_table', tableName: 'users', schema: '', columns: [{ name: 'id', notNull: true, primaryKey: true, type: 'integer', }], compositePKs: [], uniqueConstraints: [], isRLSEnabled: false, policies: [], compositePkName: '', checkConstraints: [], }); expect(statements[1]).toStrictEqual({ type: 'create_view', name: 'some_view', definition: `select "id" from "users"`, schema: 'public', with: undefined, materialized: true, tablespace: undefined, using: undefined, withNoData: false, }); expect(sqlStatements.length).toBe(2); expect(sqlStatements[0]).toBe(`CREATE TABLE "users" ( \t"id" integer PRIMARY KEY NOT NULL );\n`); expect(sqlStatements[1]).toBe(`CREATE MATERIALIZED VIEW "public"."some_view" AS (select "id" from "users");`); }); test('create table and materialized view #2', async () => { const users = pgTable('users', { id: integer('id').primaryKey().notNull(), }); const to = { users: users, view: pgMaterializedView('some_view', { id: integer('id') }).as(sql`SELECT * FROM ${users}`), }; const { statements, sqlStatements } = await diffTestSchemas({}, to, []); expect(statements.length).toBe(2); expect(statements[0]).toStrictEqual({ type: 'create_table', tableName: 'users', schema: '', columns: [{ name: 'id', notNull: true, primaryKey: true, type: 'integer', }], compositePKs: [], uniqueConstraints: [], compositePkName: '', isRLSEnabled: false, policies: [], checkConstraints: [], }); expect(statements[1]).toStrictEqual({ type: 'create_view', name: 'some_view', definition: `SELECT * FROM "users"`, schema: 'public', with: undefined, materialized: true, tablespace: undefined, using: undefined, withNoData: false, }); expect(sqlStatements.length).toBe(2); expect(sqlStatements[0]).toBe(`CREATE TABLE "users" ( \t"id" integer PRIMARY KEY NOT NULL );\n`); expect(sqlStatements[1]).toBe(`CREATE MATERIALIZED VIEW "public"."some_view" AS (SELECT * FROM "users");`); }); test('create table and materialized view #3', async () => { const users = pgTable('users', { id: integer('id').primaryKey().notNull(), }); const to = { users: users, view1: pgMaterializedView('some_view1', { id: integer('id') }).as(sql`SELECT * FROM ${users}`), view2: pgMaterializedView('some_view2').tablespace('some_tablespace').using('heap').withNoData().with({ autovacuumEnabled: true, autovacuumFreezeMaxAge: 1, autovacuumFreezeMinAge: 1, autovacuumFreezeTableAge: 1, autovacuumMultixactFreezeMaxAge: 1, autovacuumMultixactFreezeMinAge: 1, autovacuumMultixactFreezeTableAge: 1, autovacuumVacuumCostDelay: 1, autovacuumVacuumCostLimit: 1, autovacuumVacuumScaleFactor: 1, autovacuumVacuumThreshold: 1, fillfactor: 1, logAutovacuumMinDuration: 1, parallelWorkers: 1, toastTupleTarget: 1, userCatalogTable: true, vacuumIndexCleanup: 'off', vacuumTruncate: false, }).as((qb) => qb.select().from(users)), }; const { statements, sqlStatements } = await diffTestSchemas({}, to, []); expect(statements.length).toBe(3); expect(statements[0]).toStrictEqual({ type: 'create_table', tableName: 'users', schema: '', columns: [{ name: 'id', notNull: true, primaryKey: true, type: 'integer', }], compositePKs: [], uniqueConstraints: [], isRLSEnabled: false, compositePkName: '', policies: [], checkConstraints: [], }); expect(statements[1]).toStrictEqual({ type: 'create_view', name: 'some_view1', definition: `SELECT * FROM "users"`, schema: 'public', with: undefined, materialized: true, withNoData: false, using: undefined, tablespace: undefined, }); expect(statements[2]).toStrictEqual({ type: 'create_view', name: 'some_view2', definition: `select "id" from "users"`, schema: 'public', with: { autovacuumEnabled: true, autovacuumFreezeMaxAge: 1, autovacuumFreezeMinAge: 1, autovacuumFreezeTableAge: 1, autovacuumMultixactFreezeMaxAge: 1, autovacuumMultixactFreezeMinAge: 1, autovacuumMultixactFreezeTableAge: 1, autovacuumVacuumCostDelay: 1, autovacuumVacuumCostLimit: 1, autovacuumVacuumScaleFactor: 1, autovacuumVacuumThreshold: 1, fillfactor: 1, logAutovacuumMinDuration: 1, parallelWorkers: 1, toastTupleTarget: 1, userCatalogTable: true, vacuumIndexCleanup: 'off', vacuumTruncate: false, }, materialized: true, tablespace: 'some_tablespace', using: 'heap', withNoData: true, }); expect(sqlStatements.length).toBe(3); expect(sqlStatements[0]).toBe(`CREATE TABLE "users" ( \t"id" integer PRIMARY KEY NOT NULL );\n`); expect(sqlStatements[1]).toBe( `CREATE MATERIALIZED VIEW "public"."some_view1" AS (SELECT * FROM "users");`, ); expect(sqlStatements[2]).toBe( `CREATE MATERIALIZED VIEW "public"."some_view2" USING "heap" WITH (autovacuum_enabled = true, autovacuum_freeze_max_age = 1, autovacuum_freeze_min_age = 1, autovacuum_freeze_table_age = 1, autovacuum_multixact_freeze_max_age = 1, autovacuum_multixact_freeze_min_age = 1, autovacuum_multixact_freeze_table_age = 1, autovacuum_vacuum_cost_delay = 1, autovacuum_vacuum_cost_limit = 1, autovacuum_vacuum_scale_factor = 1, autovacuum_vacuum_threshold = 1, fillfactor = 1, log_autovacuum_min_duration = 1, parallel_workers = 1, toast_tuple_target = 1, user_catalog_table = true, vacuum_index_cleanup = off, vacuum_truncate = false) TABLESPACE some_tablespace AS (select "id" from "users") WITH NO DATA;`, ); }); test('create table and materialized view #4', async () => { // same names const users = pgTable('users', { id: integer('id').primaryKey().notNull(), }); const to = { users: users, view1: pgMaterializedView('some_view', { id: integer('id') }).as(sql`SELECT * FROM ${users}`), view2: pgMaterializedView('some_view', { id: integer('id') }).as(sql`SELECT * FROM ${users}`), }; await expect(diffTestSchemas({}, to, [])).rejects.toThrowError(); }); test('create table and materialized view #5', async () => { const users = pgTable('users', { id: integer('id').primaryKey().notNull(), }); const to = { users: users, view1: pgMaterializedView('some_view', { id: integer('id') }).with({ autovacuumFreezeMinAge: 14 }).as( sql`SELECT * FROM ${users}`, ), }; const { statements, sqlStatements } = await diffTestSchemas({}, to, []); expect(statements.length).toBe(2); expect(statements[0]).toStrictEqual({ columns: [ { name: 'id', notNull: true, primaryKey: true, type: 'integer', }, ], compositePKs: [], compositePkName: '', schema: '', tableName: 'users', type: 'create_table', uniqueConstraints: [], isRLSEnabled: false, policies: [], checkConstraints: [], }); expect(statements[1]).toEqual({ definition: 'SELECT * FROM "users"', name: 'some_view', schema: 'public', type: 'create_view', with: { autovacuumFreezeMinAge: 14, }, materialized: true, tablespace: undefined, using: undefined, withNoData: false, }); expect(sqlStatements.length).toBe(2); expect(sqlStatements[0]).toBe(`CREATE TABLE "users" ( \t"id" integer PRIMARY KEY NOT NULL );\n`); expect(sqlStatements[1]).toBe( `CREATE MATERIALIZED VIEW "public"."some_view" WITH (autovacuum_freeze_min_age = 14) AS (SELECT * FROM "users");`, ); }); test('create materialized view with existing flag', async () => { const users = pgTable('users', { id: integer('id').primaryKey().notNull(), }); const from = { users, }; const to = { users: users, view1: pgMaterializedView('some_view', { id: integer('id') }).with({ autovacuumEnabled: true }).existing(), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(statements.length).toBe(0); expect(sqlStatements.length).toBe(0); }); test('drop view #1', async () => { const users = pgTable('users', { id: integer('id').primaryKey().notNull(), }); const from = { users, view: pgView('some_view', { id: integer('id') }).as(sql`SELECT * FROM ${users}`), }; const to = { users: users, }; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'drop_view', name: 'some_view', schema: 'public', }); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe(`DROP VIEW "public"."some_view";`); }); test('drop view with existing flag', async () => { const users = pgTable('users', { id: integer('id').primaryKey().notNull(), }); const from = { users, view: pgView('some_view', { id: integer('id') }).existing(), }; const to = { users: users, }; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(statements.length).toBe(0); expect(sqlStatements.length).toBe(0); }); test('drop materialized view #1', async () => { const users = pgTable('users', { id: integer('id').primaryKey().notNull(), }); const from = { users, view: pgMaterializedView('some_view', { id: integer('id') }).as(sql`SELECT * FROM ${users}`), }; const to = { users: users, }; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'drop_view', name: 'some_view', schema: 'public', materialized: true, }); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe(`DROP MATERIALIZED VIEW "public"."some_view";`); }); test('drop materialized view with existing flag', async () => { const users = pgTable('users', { id: integer('id').primaryKey().notNull(), }); const from = { users, view: pgMaterializedView('some_view', { id: integer('id') }).existing(), }; const to = { users: users, }; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(statements.length).toBe(0); expect(sqlStatements.length).toBe(0); }); test('rename view #1', async () => { const from = { view: pgView('some_view', { id: integer('id') }).as(sql`SELECT * FROM "users"`), }; const to = { view: pgView('new_some_view', { id: integer('id') }).as(sql`SELECT * FROM "users"`), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, ['public.some_view->public.new_some_view']); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'rename_view', nameFrom: 'some_view', nameTo: 'new_some_view', schema: 'public', }); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe(`ALTER VIEW "public"."some_view" RENAME TO "new_some_view";`); }); test('rename view with existing flag', async () => { const from = { view: pgView('some_view', { id: integer('id') }).existing(), }; const to = { view: pgView('new_some_view', { id: integer('id') }).existing(), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, ['public.some_view->public.new_some_view']); expect(statements.length).toBe(0); expect(sqlStatements.length).toBe(0); }); test('rename materialized view #1', async () => { const from = { view: pgMaterializedView('some_view', { id: integer('id') }).as(sql`SELECT * FROM "users"`), }; const to = { view: pgMaterializedView('new_some_view', { id: integer('id') }).as(sql`SELECT * FROM "users"`), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, ['public.some_view->public.new_some_view']); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'rename_view', nameFrom: 'some_view', nameTo: 'new_some_view', schema: 'public', materialized: true, }); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe(`ALTER MATERIALIZED VIEW "public"."some_view" RENAME TO "new_some_view";`); }); test('rename materialized view with existing flag', async () => { const from = { view: pgMaterializedView('some_view', { id: integer('id') }).existing(), }; const to = { view: pgMaterializedView('new_some_view', { id: integer('id') }).existing(), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, ['public.some_view->public.new_some_view']); expect(statements.length).toBe(0); expect(sqlStatements.length).toBe(0); }); test('view alter schema', async () => { const schema = pgSchema('new_schema'); const from = { view: pgView('some_view', { id: integer('id') }).as(sql`SELECT * FROM "users"`), }; const to = { schema, view: schema.view('some_view', { id: integer('id') }).as(sql`SELECT * FROM "users"`), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, ['public.some_view->new_schema.some_view']); expect(statements.length).toBe(2); expect(statements[0]).toStrictEqual({ type: 'create_schema', name: 'new_schema', }); expect(statements[1]).toStrictEqual({ type: 'alter_view_alter_schema', toSchema: 'new_schema', fromSchema: 'public', name: 'some_view', }); expect(sqlStatements.length).toBe(2); expect(sqlStatements[0]).toBe(`CREATE SCHEMA "new_schema";\n`); expect(sqlStatements[1]).toBe(`ALTER VIEW "public"."some_view" SET SCHEMA "new_schema";`); }); test('view alter schema with existing flag', async () => { const schema = pgSchema('new_schema'); const from = { view: pgView('some_view', { id: integer('id') }).existing(), }; const to = { schema, view: schema.view('some_view', { id: integer('id') }).existing(), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, ['public.some_view->new_schema.some_view']); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'create_schema', name: 'new_schema', }); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe(`CREATE SCHEMA "new_schema";\n`); }); test('view alter schema for materialized', async () => { const schema = pgSchema('new_schema'); const from = { view: pgMaterializedView('some_view', { id: integer('id') }).as(sql`SELECT * FROM "users"`), }; const to = { schema, view: schema.materializedView('some_view', { id: integer('id') }).as(sql`SELECT * FROM "users"`), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, ['public.some_view->new_schema.some_view']); expect(statements.length).toBe(2); expect(statements[0]).toStrictEqual({ type: 'create_schema', name: 'new_schema', }); expect(statements[1]).toStrictEqual({ type: 'alter_view_alter_schema', toSchema: 'new_schema', fromSchema: 'public', name: 'some_view', materialized: true, }); expect(sqlStatements.length).toBe(2); expect(sqlStatements[0]).toBe(`CREATE SCHEMA "new_schema";\n`); expect(sqlStatements[1]).toBe(`ALTER MATERIALIZED VIEW "public"."some_view" SET SCHEMA "new_schema";`); }); test('view alter schema for materialized with existing flag', async () => { const schema = pgSchema('new_schema'); const from = { view: pgMaterializedView('some_view', { id: integer('id') }).existing(), }; const to = { schema, view: schema.materializedView('some_view', { id: integer('id') }).existing(), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, ['public.some_view->new_schema.some_view']); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'create_schema', name: 'new_schema', }); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe(`CREATE SCHEMA "new_schema";\n`); }); test('add with option to view #1', async () => { const users = pgTable('users', { id: integer('id').primaryKey().notNull(), }); const from = { users, view: pgView('some_view').as((qb) => qb.select().from(users)), }; const to = { users, view: pgView('some_view').with({ checkOption: 'cascaded', securityBarrier: true }).as((qb) => qb.select().from(users) ), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ name: 'some_view', schema: 'public', type: 'alter_view_add_with_option', with: { checkOption: 'cascaded', securityBarrier: true, }, materialized: false, }); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe( `ALTER VIEW "public"."some_view" SET (check_option = cascaded, security_barrier = true);`, ); }); test('add with option to view with existing flag', async () => { const users = pgTable('users', { id: integer('id').primaryKey().notNull(), }); const from = { users, view: pgView('some_view', {}).existing(), }; const to = { users, view: pgView('some_view', {}).with({ checkOption: 'cascaded', securityBarrier: true }).existing(), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(statements.length).toBe(0); expect(sqlStatements.length).toBe(0); }); test('add with option to materialized view #1', async () => { const users = pgTable('users', { id: integer('id').primaryKey().notNull(), }); const from = { users, view: pgMaterializedView('some_view').as((qb) => qb.select().from(users)), }; const to = { users, view: pgMaterializedView('some_view').with({ autovacuumMultixactFreezeMaxAge: 3 }).as((qb) => qb.select().from(users) ), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ name: 'some_view', schema: 'public', type: 'alter_view_add_with_option', with: { autovacuumMultixactFreezeMaxAge: 3, }, materialized: true, }); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe( `ALTER MATERIALIZED VIEW "public"."some_view" SET (autovacuum_multixact_freeze_max_age = 3);`, ); }); test('add with option to materialized view with existing flag', async () => { const users = pgTable('users', { id: integer('id').primaryKey().notNull(), }); const from = { users, view: pgMaterializedView('some_view', {}).existing(), }; const to = { users, view: pgMaterializedView('some_view', {}).with({ autovacuumMultixactFreezeMaxAge: 3 }).existing(), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(statements.length).toBe(0); expect(sqlStatements.length).toBe(0); }); test('drop with option from view #1', async () => { const users = pgTable('users', { id: integer('id').primaryKey().notNull(), }); const from = { users, view: pgView('some_view').with({ checkOption: 'cascaded', securityBarrier: true, securityInvoker: true }).as((qb) => qb.select().from(users) ), }; const to = { users, view: pgView('some_view').as((qb) => qb.select().from(users)), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ name: 'some_view', schema: 'public', type: 'alter_view_drop_with_option', materialized: false, with: { checkOption: 'cascaded', securityBarrier: true, securityInvoker: true, }, }); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe( `ALTER VIEW "public"."some_view" RESET (check_option, security_barrier, security_invoker);`, ); }); test('drop with option from view with existing flag', async () => { const users = pgTable('users', { id: integer('id').primaryKey().notNull(), }); const from = { users, view: pgView('some_view', {}).with({ checkOption: 'cascaded', securityBarrier: true, securityInvoker: true }) .existing(), }; const to = { users, view: pgView('some_view', {}).existing(), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(statements.length).toBe(0); expect(sqlStatements.length).toBe(0); }); test('drop with option from materialized view #1', async () => { const users = pgTable('users', { id: integer('id').primaryKey().notNull(), }); const from = { users, view: pgMaterializedView('some_view').with({ autovacuumEnabled: true, autovacuumFreezeMaxAge: 10 }).as((qb) => qb.select().from(users) ), }; const to = { users, view: pgMaterializedView('some_view').as((qb) => qb.select().from(users)), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ name: 'some_view', schema: 'public', type: 'alter_view_drop_with_option', materialized: true, with: { autovacuumEnabled: true, autovacuumFreezeMaxAge: 10, }, }); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe( `ALTER MATERIALIZED VIEW "public"."some_view" RESET (autovacuum_enabled, autovacuum_freeze_max_age);`, ); }); test('drop with option from materialized view with existing flag', async () => { const users = pgTable('users', { id: integer('id').primaryKey().notNull(), }); const from = { users, view: pgMaterializedView('some_view', {}).with({ autovacuumEnabled: true, autovacuumFreezeMaxAge: 10 }).existing(), }; const to = { users, view: pgMaterializedView('some_view', {}).existing(), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(statements.length).toBe(0); expect(sqlStatements.length).toBe(0); }); test('alter with option in view #1', async () => { const users = pgTable('users', { id: integer('id').primaryKey().notNull(), }); const from = { users, view: pgView('some_view').with({ securityBarrier: true, securityInvoker: true }).as((qb) => qb.select().from(users) ), }; const to = { users, view: pgView('some_view').with({ securityBarrier: true }).as((qb) => qb.select().from(users)), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ name: 'some_view', schema: 'public', type: 'alter_view_drop_with_option', with: { securityInvoker: true, }, materialized: false, }); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe( `ALTER VIEW "public"."some_view" RESET (security_invoker);`, ); }); test('alter with option in view with existing flag', async () => { const users = pgTable('users', { id: integer('id').primaryKey().notNull(), }); const from = { users, view: pgView('some_view', {}).with({ securityBarrier: true, securityInvoker: true }).existing(), }; const to = { users, view: pgView('some_view', {}).with({ securityBarrier: true }).existing(), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(statements.length).toBe(0); expect(sqlStatements.length).toBe(0); }); test('alter with option in materialized view #1', async () => { const users = pgTable('users', { id: integer('id').primaryKey().notNull(), }); const from = { users, view: pgMaterializedView('some_view').with({ autovacuumEnabled: true, autovacuumVacuumScaleFactor: 1 }).as((qb) => qb.select().from(users) ), }; const to = { users, view: pgMaterializedView('some_view').with({ autovacuumEnabled: true }).as((qb) => qb.select().from(users)), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ name: 'some_view', schema: 'public', type: 'alter_view_drop_with_option', with: { autovacuumVacuumScaleFactor: 1, }, materialized: true, }); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe( `ALTER MATERIALIZED VIEW "public"."some_view" RESET (autovacuum_vacuum_scale_factor);`, ); }); test('alter with option in materialized view with existing flag', async () => { const users = pgTable('users', { id: integer('id').primaryKey().notNull(), }); const from = { users, view: pgMaterializedView('some_view', {}).with({ autovacuumEnabled: true, autovacuumVacuumScaleFactor: 1 }) .existing(), }; const to = { users, view: pgMaterializedView('some_view', {}).with({ autovacuumEnabled: true }).existing(), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(statements.length).toBe(0); expect(sqlStatements.length).toBe(0); }); test('alter with option in view #2', async () => { const users = pgTable('users', { id: integer('id').primaryKey().notNull(), }); const from = { users, view: pgView('some_view').with({ checkOption: 'local', securityBarrier: true, securityInvoker: true }).as((qb) => qb.selectDistinct().from(users) ), }; const to = { users, view: pgView('some_view').with({ checkOption: 'cascaded', securityBarrier: true, securityInvoker: true }).as((qb) => qb.selectDistinct().from(users) ), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'alter_view_add_with_option', name: 'some_view', schema: 'public', with: { checkOption: 'cascaded', }, materialized: false, }); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe( `ALTER VIEW "public"."some_view" SET (check_option = cascaded);`, ); }); test('alter with option in materialized view #2', async () => { const users = pgTable('users', { id: integer('id').primaryKey().notNull(), }); const from = { users, view: pgMaterializedView('some_view').with({ autovacuumEnabled: true, fillfactor: 1 }).as((qb) => qb.select().from(users) ), }; const to = { users, view: pgMaterializedView('some_view').with({ autovacuumEnabled: false, fillfactor: 1 }).as((qb) => qb.select().from(users) ), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'alter_view_add_with_option', name: 'some_view', schema: 'public', with: { autovacuumEnabled: false, }, materialized: true, }); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe( `ALTER MATERIALIZED VIEW "public"."some_view" SET (autovacuum_enabled = false);`, ); }); test('alter view ".as" value', async () => { const users = pgTable('users', { id: integer('id').primaryKey().notNull(), }); const from = { users, view: pgView('some_view', { id: integer('id') }).with({ checkOption: 'local', securityBarrier: true, securityInvoker: true, }).as(sql`SELECT '123'`), }; const to = { users, view: pgView('some_view', { id: integer('id') }).with({ checkOption: 'local', securityBarrier: true, securityInvoker: true, }).as(sql`SELECT '1234'`), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(statements.length).toBe(2); expect(statements[0]).toStrictEqual( { name: 'some_view', schema: 'public', type: 'drop_view', }, ); expect(statements[1]).toStrictEqual( { definition: "SELECT '1234'", name: 'some_view', schema: 'public', type: 'create_view', materialized: false, with: { checkOption: 'local', securityBarrier: true, securityInvoker: true, }, withNoData: false, tablespace: undefined, using: undefined, }, ); expect(sqlStatements.length).toBe(2); expect(sqlStatements[0]).toBe('DROP VIEW "public"."some_view";'); expect(sqlStatements[1]).toBe( `CREATE VIEW "public"."some_view" WITH (check_option = local, security_barrier = true, security_invoker = true) AS (SELECT '1234');`, ); }); test('alter view ".as" value with existing flag', async () => { const users = pgTable('users', { id: integer('id').primaryKey().notNull(), }); const from = { users, view: pgView('some_view', { id: integer('id') }).with({ checkOption: 'local', securityBarrier: true, securityInvoker: true, }).existing(), }; const to = { users, view: pgView('some_view', { id: integer('id') }).with({ checkOption: 'local', securityBarrier: true, securityInvoker: true, }).existing(), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(statements.length).toBe(0); expect(sqlStatements.length).toBe(0); }); test('alter materialized view ".as" value', async () => { const users = pgTable('users', { id: integer('id').primaryKey().notNull(), }); const from = { users, view: pgMaterializedView('some_view', { id: integer('id') }).with({ autovacuumVacuumCostLimit: 1, }).as(sql`SELECT '123'`), }; const to = { users, view: pgMaterializedView('some_view', { id: integer('id') }).with({ autovacuumVacuumCostLimit: 1, }).as(sql`SELECT '1234'`), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(statements.length).toBe(2); expect(statements[0]).toStrictEqual( { name: 'some_view', schema: 'public', type: 'drop_view', materialized: true, }, ); expect(statements[1]).toStrictEqual( { definition: "SELECT '1234'", name: 'some_view', schema: 'public', type: 'create_view', with: { autovacuumVacuumCostLimit: 1, }, materialized: true, withNoData: false, tablespace: undefined, using: undefined, }, ); expect(sqlStatements.length).toBe(2); expect(sqlStatements[0]).toBe('DROP MATERIALIZED VIEW "public"."some_view";'); expect(sqlStatements[1]).toBe( `CREATE MATERIALIZED VIEW "public"."some_view" WITH (autovacuum_vacuum_cost_limit = 1) AS (SELECT '1234');`, ); }); test('alter materialized view ".as" value with existing flag', async () => { const users = pgTable('users', { id: integer('id').primaryKey().notNull(), }); const from = { users, view: pgMaterializedView('some_view', { id: integer('id') }).with({ autovacuumVacuumCostLimit: 1, }).existing(), }; const to = { users, view: pgMaterializedView('some_view', { id: integer('id') }).with({ autovacuumVacuumCostLimit: 1, }).existing(), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(statements.length).toBe(0); expect(sqlStatements.length).toBe(0); }); test('drop existing flag', async () => { const users = pgTable('users', { id: integer('id').primaryKey().notNull(), }); const from = { users, view: pgMaterializedView('some_view', { id: integer('id') }).with({ autovacuumVacuumCostLimit: 1, }).existing(), }; const to = { users, view: pgMaterializedView('some_view', { id: integer('id') }).with({ autovacuumVacuumCostLimit: 1, }).as(sql`SELECT 'asd'`), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(statements.length).toBe(2); expect(statements[0]).toEqual({ type: 'drop_view', name: 'some_view', schema: 'public', materialized: true, }); expect(statements[1]).toEqual({ definition: "SELECT 'asd'", materialized: true, name: 'some_view', schema: 'public', tablespace: undefined, type: 'create_view', using: undefined, with: { autovacuumVacuumCostLimit: 1, }, withNoData: false, }); expect(sqlStatements.length).toBe(2); expect(sqlStatements[0]).toBe(`DROP MATERIALIZED VIEW "public"."some_view";`); expect(sqlStatements[1]).toBe( `CREATE MATERIALIZED VIEW "public"."some_view" WITH (autovacuum_vacuum_cost_limit = 1) AS (SELECT 'asd');`, ); }); test('alter tablespace - materialize', async () => { const users = pgTable('users', { id: integer('id').primaryKey().notNull(), }); const from = { users, view: pgMaterializedView('some_view', { id: integer('id') }).tablespace('some_tablespace').with({ autovacuumVacuumCostLimit: 1, }).as(sql`SELECT 'asd'`), }; const to = { users, view: pgMaterializedView('some_view', { id: integer('id') }).tablespace('new_tablespace').with({ autovacuumVacuumCostLimit: 1, }).as(sql`SELECT 'asd'`), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(statements.length).toBe(1); expect(statements[0]).toEqual({ type: 'alter_view_alter_tablespace', name: 'some_view', schema: 'public', materialized: true, toTablespace: 'new_tablespace', }); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe( `ALTER MATERIALIZED VIEW "public"."some_view" SET TABLESPACE new_tablespace;`, ); }); test('set tablespace - materialize', async () => { const users = pgTable('users', { id: integer('id').primaryKey().notNull(), }); const from = { users, view: pgMaterializedView('some_view', { id: integer('id') }).with({ autovacuumVacuumCostLimit: 1, }).as(sql`SELECT 'asd'`), }; const to = { users, view: pgMaterializedView('some_view', { id: integer('id') }).tablespace('new_tablespace').with({ autovacuumVacuumCostLimit: 1, }).as(sql`SELECT 'asd'`), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(statements.length).toBe(1); expect(statements[0]).toEqual({ type: 'alter_view_alter_tablespace', name: 'some_view', schema: 'public', materialized: true, toTablespace: 'new_tablespace', }); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe( `ALTER MATERIALIZED VIEW "public"."some_view" SET TABLESPACE new_tablespace;`, ); }); test('drop tablespace - materialize', async () => { const users = pgTable('users', { id: integer('id').primaryKey().notNull(), }); const from = { users, view: pgMaterializedView('some_view', { id: integer('id') }).tablespace('new_tablespace').with({ autovacuumVacuumCostLimit: 1, }).as(sql`SELECT 'asd'`), }; const to = { users, view: pgMaterializedView('some_view', { id: integer('id') }).with({ autovacuumVacuumCostLimit: 1, }).as(sql`SELECT 'asd'`), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(statements.length).toBe(1); expect(statements[0]).toEqual({ type: 'alter_view_alter_tablespace', name: 'some_view', schema: 'public', materialized: true, toTablespace: 'pg_default', }); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe( `ALTER MATERIALIZED VIEW "public"."some_view" SET TABLESPACE pg_default;`, ); }); test('set existing - materialized', async () => { const users = pgTable('users', { id: integer('id').primaryKey().notNull(), }); const from = { users, view: pgMaterializedView('some_view', { id: integer('id') }).tablespace('new_tablespace').with({ autovacuumVacuumCostLimit: 1, }).as(sql`SELECT 'asd'`), }; const to = { users, view: pgMaterializedView('new_some_view', { id: integer('id') }).with({ autovacuumVacuumCostLimit: 1, autovacuumFreezeMinAge: 1, }).withNoData().existing(), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, ['public.some_view->public.new_some_view']); expect(statements.length).toBe(0); expect(sqlStatements.length).toBe(0); }); test('drop existing - materialized', async () => { const users = pgTable('users', { id: integer('id').primaryKey().notNull(), }); const from = { users, view: pgMaterializedView('some_view', { id: integer('id') }).tablespace('new_tablespace').with({ autovacuumVacuumCostLimit: 1, }).existing(), }; const to = { users, view: pgMaterializedView('some_view', { id: integer('id') }).with({ autovacuumVacuumCostLimit: 1, autovacuumFreezeMinAge: 1, }).withNoData().as(sql`SELECT 'asd'`), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(statements.length).toBe(2); expect(sqlStatements.length).toBe(2); }); test('set existing', async () => { const users = pgTable('users', { id: integer('id').primaryKey().notNull(), }); const from = { users, view: pgView('some_view', { id: integer('id') }).with({ checkOption: 'cascaded', }).as(sql`SELECT 'asd'`), }; const to = { users, view: pgView('new_some_view', { id: integer('id') }).with({ checkOption: 'cascaded', securityBarrier: true, }).existing(), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, ['public.some_view->public.new_some_view']); expect(statements.length).toBe(0); expect(sqlStatements.length).toBe(0); }); test('alter using - materialize', async () => { const users = pgTable('users', { id: integer('id').primaryKey().notNull(), }); const from = { users, view: pgMaterializedView('some_view', { id: integer('id') }).tablespace('some_tablespace').using('some_using').with( { autovacuumVacuumCostLimit: 1, }, ).as(sql`SELECT 'asd'`), }; const to = { users, view: pgMaterializedView('some_view', { id: integer('id') }).tablespace('some_tablespace').using('new_using').with({ autovacuumVacuumCostLimit: 1, }).as(sql`SELECT 'asd'`), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(statements.length).toBe(1); expect(statements[0]).toEqual({ type: 'alter_view_alter_using', name: 'some_view', schema: 'public', materialized: true, toUsing: 'new_using', }); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe( `ALTER MATERIALIZED VIEW "public"."some_view" SET ACCESS METHOD "new_using";`, ); }); test('set using - materialize', async () => { const users = pgTable('users', { id: integer('id').primaryKey().notNull(), }); const from = { users, view: pgMaterializedView('some_view', { id: integer('id') }).with({ autovacuumVacuumCostLimit: 1, }).as(sql`SELECT 'asd'`), }; const to = { users, view: pgMaterializedView('some_view', { id: integer('id') }).using('new_using').with({ autovacuumVacuumCostLimit: 1, }).as(sql`SELECT 'asd'`), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(statements.length).toBe(1); expect(statements[0]).toEqual({ type: 'alter_view_alter_using', name: 'some_view', schema: 'public', materialized: true, toUsing: 'new_using', }); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe( `ALTER MATERIALIZED VIEW "public"."some_view" SET ACCESS METHOD "new_using";`, ); }); test('drop using - materialize', async () => { const users = pgTable('users', { id: integer('id').primaryKey().notNull(), }); const from = { users, view: pgMaterializedView('some_view', { id: integer('id') }).using('new_using').with({ autovacuumVacuumCostLimit: 1, }).as(sql`SELECT 'asd'`), }; const to = { users, view: pgMaterializedView('some_view', { id: integer('id') }).with({ autovacuumVacuumCostLimit: 1, }).as(sql`SELECT 'asd'`), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, []); expect(statements.length).toBe(1); expect(statements[0]).toEqual({ type: 'alter_view_alter_using', name: 'some_view', schema: 'public', materialized: true, toUsing: 'heap', }); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe( `ALTER MATERIALIZED VIEW "public"."some_view" SET ACCESS METHOD "heap";`, ); }); test('rename view and alter view', async () => { const from = { view: pgView('some_view', { id: integer('id') }).as(sql`SELECT * FROM "users"`), }; const to = { view: pgView('new_some_view', { id: integer('id') }).with({ checkOption: 'cascaded' }).as( sql`SELECT * FROM "users"`, ), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, ['public.some_view->public.new_some_view']); expect(statements.length).toBe(2); expect(statements[0]).toStrictEqual({ type: 'rename_view', nameFrom: 'some_view', nameTo: 'new_some_view', schema: 'public', }); expect(statements[1]).toStrictEqual({ materialized: false, name: 'new_some_view', schema: 'public', type: 'alter_view_add_with_option', with: { checkOption: 'cascaded', }, }); expect(sqlStatements.length).toBe(2); expect(sqlStatements[0]).toBe(`ALTER VIEW "public"."some_view" RENAME TO "new_some_view";`); expect(sqlStatements[1]).toBe(`ALTER VIEW "public"."new_some_view" SET (check_option = cascaded);`); }); test('moved schema and alter view', async () => { const schema = pgSchema('my_schema'); const from = { schema, view: pgView('some_view', { id: integer('id') }).as(sql`SELECT * FROM "users"`), }; const to = { schema, view: schema.view('some_view', { id: integer('id') }).with({ checkOption: 'cascaded' }).as( sql`SELECT * FROM "users"`, ), }; const { statements, sqlStatements } = await diffTestSchemas(from, to, ['public.some_view->my_schema.some_view']); expect(statements.length).toBe(2); expect(statements[0]).toStrictEqual({ fromSchema: 'public', name: 'some_view', toSchema: 'my_schema', type: 'alter_view_alter_schema', }); expect(statements[1]).toStrictEqual({ name: 'some_view', schema: 'my_schema', type: 'alter_view_add_with_option', materialized: false, with: { checkOption: 'cascaded', }, }); expect(sqlStatements.length).toBe(2); expect(sqlStatements[0]).toBe(`ALTER VIEW "public"."some_view" SET SCHEMA "my_schema";`); expect(sqlStatements[1]).toBe(`ALTER VIEW "my_schema"."some_view" SET (check_option = cascaded);`); }); ================================================ FILE: drizzle-kit/tests/push/common.ts ================================================ import { afterAll, beforeAll, beforeEach, test } from 'vitest'; export interface DialectSuite { allTypes(context?: any): Promise; addBasicIndexes(context?: any): Promise; changeIndexFields(context?: any): Promise; dropIndex(context?: any): Promise; indexesToBeNotTriggered(context?: any): Promise; indexesTestCase1(context?: any): Promise; addNotNull(context?: any): Promise; addNotNullWithDataNoRollback(context?: any): Promise; addBasicSequences(context?: any): Promise; addGeneratedColumn(context?: any): Promise; addGeneratedToColumn(context?: any): Promise; dropGeneratedConstraint(context?: any): Promise; alterGeneratedConstraint(context?: any): Promise; createTableWithGeneratedConstraint(context?: any): Promise; createCompositePrimaryKey(context?: any): Promise; renameTableWithCompositePrimaryKey(context?: any): Promise; case1(): Promise; } export const run = ( suite: DialectSuite, beforeAllFn?: (context: any) => Promise, afterAllFn?: (context: any) => Promise, beforeEachFn?: (context: any) => Promise, ) => { let context: any = {}; beforeAll(beforeAllFn ? () => beforeAllFn(context) : () => {}); beforeEach(beforeEachFn ? () => beforeEachFn(context) : () => {}); test('No diffs for all database types', () => suite.allTypes(context)); test('Adding basic indexes', () => suite.addBasicIndexes(context)); test('Dropping basic index', () => suite.dropIndex(context)); test('Altering indexes', () => suite.changeIndexFields(context)); test('Indexes properties that should not trigger push changes', () => suite.indexesToBeNotTriggered(context)); test('Indexes test case #1', () => suite.indexesTestCase1(context)); test('Drop column', () => suite.case1()); test('Add not null to a column', () => suite.addNotNull()); test('Add not null to a column with null data. Should rollback', () => suite.addNotNullWithDataNoRollback()); test('Add basic sequences', () => suite.addBasicSequences()); test('Add generated column', () => suite.addGeneratedColumn(context)); test('Add generated constraint to an existing column', () => suite.addGeneratedToColumn(context)); test('Drop generated constraint from a column', () => suite.dropGeneratedConstraint(context)); // should ignore on push test('Alter generated constraint', () => suite.alterGeneratedConstraint(context)); test('Create table with generated column', () => suite.createTableWithGeneratedConstraint(context)); test('Rename table with composite primary key', () => suite.renameTableWithCompositePrimaryKey(context)); test('Create composite primary key', () => suite.createCompositePrimaryKey(context)); afterAll(afterAllFn ? () => afterAllFn(context) : () => {}); }; ================================================ FILE: drizzle-kit/tests/push/libsql.test.ts ================================================ import { createClient } from '@libsql/client'; import chalk from 'chalk'; import { sql } from 'drizzle-orm'; import { blob, check, foreignKey, getTableConfig, index, int, integer, numeric, real, sqliteTable, sqliteView, text, uniqueIndex, } from 'drizzle-orm/sqlite-core'; import { diffTestSchemasPushLibSQL } from 'tests/schemaDiffer'; import { expect, test } from 'vitest'; test('nothing changed in schema', async (t) => { const turso = createClient({ url: ':memory:', }); const users = sqliteTable('users', { id: integer('id').primaryKey().notNull(), name: text('name').notNull(), email: text('email'), textJson: text('text_json', { mode: 'json' }), blobJon: blob('blob_json', { mode: 'json' }), blobBigInt: blob('blob_bigint', { mode: 'bigint' }), numeric: numeric('numeric'), createdAt: integer('created_at', { mode: 'timestamp' }), createdAtMs: integer('created_at_ms', { mode: 'timestamp_ms' }), real: real('real'), text: text('text', { length: 255 }), role: text('role', { enum: ['admin', 'user'] }).default('user'), isConfirmed: integer('is_confirmed', { mode: 'boolean', }), }); const schema1 = { users, customers: sqliteTable('customers', { id: integer('id').primaryKey(), address: text('address').notNull(), isConfirmed: integer('is_confirmed', { mode: 'boolean' }), registrationDate: integer('registration_date', { mode: 'timestamp_ms' }) .notNull() .$defaultFn(() => new Date()), userId: integer('user_id') .references(() => users.id) .notNull(), }), posts: sqliteTable('posts', { id: integer('id').primaryKey(), content: text('content'), authorId: integer('author_id'), }), }; const { sqlStatements, statements, columnsToRemove, infoToPrint, shouldAskForApprove, tablesToRemove, tablesToTruncate, } = await diffTestSchemasPushLibSQL(turso, schema1, schema1, [], false); expect(sqlStatements.length).toBe(0); expect(statements.length).toBe(0); expect(columnsToRemove!.length).toBe(0); expect(infoToPrint!.length).toBe(0); expect(shouldAskForApprove).toBe(false); expect(tablesToRemove!.length).toBe(0); expect(tablesToTruncate!.length).toBe(0); expect(shouldAskForApprove).toBe(false); }); test('added, dropped index', async (t) => { const turso = createClient({ url: ':memory:', }); const users = sqliteTable('users', { id: integer('id').primaryKey().notNull(), name: text('name').notNull(), email: text('email'), textJson: text('text_json', { mode: 'json' }), blobJon: blob('blob_json', { mode: 'json' }), blobBigInt: blob('blob_bigint', { mode: 'bigint' }), numeric: numeric('numeric'), createdAt: integer('created_at', { mode: 'timestamp' }), createdAtMs: integer('created_at_ms', { mode: 'timestamp_ms' }), real: real('real'), text: text('text', { length: 255 }), role: text('role', { enum: ['admin', 'user'] }).default('user'), isConfirmed: integer('is_confirmed', { mode: 'boolean', }), }); const schema1 = { users, customers: sqliteTable( 'customers', { id: integer('id').primaryKey(), address: text('address').notNull(), isConfirmed: integer('is_confirmed', { mode: 'boolean' }), registrationDate: integer('registration_date', { mode: 'timestamp_ms' }) .notNull() .$defaultFn(() => new Date()), userId: integer('user_id').notNull(), }, (table) => ({ uniqueIndex: uniqueIndex('customers_address_unique').on(table.address), }), ), posts: sqliteTable('posts', { id: integer('id').primaryKey(), content: text('content'), authorId: integer('author_id'), }), }; const schema2 = { users, customers: sqliteTable( 'customers', { id: integer('id').primaryKey(), address: text('address').notNull(), isConfirmed: integer('is_confirmed', { mode: 'boolean' }), registrationDate: integer('registration_date', { mode: 'timestamp_ms' }) .notNull() .$defaultFn(() => new Date()), userId: integer('user_id').notNull(), }, (table) => ({ uniqueIndex: uniqueIndex('customers_is_confirmed_unique').on( table.isConfirmed, ), }), ), posts: sqliteTable('posts', { id: integer('id').primaryKey(), content: text('content'), authorId: integer('author_id'), }), }; const { sqlStatements, statements, columnsToRemove, infoToPrint, shouldAskForApprove, tablesToRemove, tablesToTruncate, } = await diffTestSchemasPushLibSQL(turso, schema1, schema2, [], false); expect(statements.length).toBe(2); expect(statements[0]).toStrictEqual({ type: 'drop_index', tableName: 'customers', data: 'customers_address_unique;address;true;', schema: '', }); expect(statements[1]).toStrictEqual({ type: 'create_index', tableName: 'customers', data: 'customers_is_confirmed_unique;is_confirmed;true;', schema: '', internal: { indexes: {} }, }); expect(sqlStatements.length).toBe(2); expect(sqlStatements[0]).toBe( `DROP INDEX \`customers_address_unique\`;`, ); expect(sqlStatements[1]).toBe( `CREATE UNIQUE INDEX \`customers_is_confirmed_unique\` ON \`customers\` (\`is_confirmed\`);`, ); expect(columnsToRemove!.length).toBe(0); expect(infoToPrint!.length).toBe(0); expect(shouldAskForApprove).toBe(false); expect(tablesToRemove!.length).toBe(0); expect(tablesToTruncate!.length).toBe(0); }); test('added column not null and without default to table with data', async (t) => { const turso = createClient({ url: ':memory:', }); const schema1 = { companies: sqliteTable('companies', { id: integer('id').primaryKey(), name: text('name').notNull(), }), }; const schema2 = { companies: sqliteTable('companies', { id: integer('id').primaryKey(), name: text('name').notNull(), age: integer('age').notNull(), }), }; const table = getTableConfig(schema1.companies); const seedStatements = [ `INSERT INTO \`${table.name}\` ("${schema1.companies.name.name}") VALUES ('drizzle');`, `INSERT INTO \`${table.name}\` ("${schema1.companies.name.name}") VALUES ('turso');`, ]; const { statements, sqlStatements, columnsToRemove, infoToPrint, shouldAskForApprove, tablesToRemove, tablesToTruncate, } = await diffTestSchemasPushLibSQL( turso, schema1, schema2, [], false, seedStatements, ); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'sqlite_alter_table_add_column', tableName: 'companies', column: { name: 'age', type: 'integer', primaryKey: false, notNull: true, autoincrement: false, }, referenceData: undefined, }); expect(sqlStatements.length).toBe(2); expect(sqlStatements[0]).toBe(`delete from companies;`); expect(sqlStatements[1]).toBe( `ALTER TABLE \`companies\` ADD \`age\` integer NOT NULL;`, ); expect(columnsToRemove!.length).toBe(0); expect(infoToPrint!.length).toBe(1); expect(infoToPrint![0]).toBe( `· You're about to add not-null ${ chalk.underline( 'age', ) } column without default value, which contains 2 items`, ); expect(shouldAskForApprove).toBe(true); expect(tablesToRemove!.length).toBe(0); expect(tablesToTruncate!.length).toBe(1); expect(tablesToTruncate![0]).toBe('companies'); }); test('added column not null and without default to table without data', async (t) => { const turso = createClient({ url: ':memory:', }); const schema1 = { companies: sqliteTable('companies', { id: integer('id').primaryKey(), name: text('name').notNull(), }), }; const schema2 = { companies: sqliteTable('companies', { id: integer('id').primaryKey(), name: text('name').notNull(), age: integer('age').notNull(), }), }; const { sqlStatements, statements, columnsToRemove, infoToPrint, shouldAskForApprove, tablesToRemove, tablesToTruncate, } = await diffTestSchemasPushLibSQL(turso, schema1, schema2, [], false); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'sqlite_alter_table_add_column', tableName: 'companies', column: { name: 'age', type: 'integer', primaryKey: false, notNull: true, autoincrement: false, }, referenceData: undefined, }); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe( `ALTER TABLE \`companies\` ADD \`age\` integer NOT NULL;`, ); expect(infoToPrint!.length).toBe(0); expect(columnsToRemove!.length).toBe(0); expect(shouldAskForApprove).toBe(false); expect(tablesToRemove!.length).toBe(0); expect(tablesToTruncate!.length).toBe(0); }); test('drop autoincrement. drop column with data', async (t) => { const turso = createClient({ url: ':memory:', }); const schema1 = { companies: sqliteTable('companies', { id: integer('id').primaryKey({ autoIncrement: true }), name: text('name'), }), }; const schema2 = { companies: sqliteTable('companies', { id: integer('id').primaryKey({ autoIncrement: false }), }), }; const table = getTableConfig(schema1.companies); const seedStatements = [ `INSERT INTO \`${table.name}\` ("${schema1.companies.id.name}", "${schema1.companies.name.name}") VALUES (1, 'drizzle');`, `INSERT INTO \`${table.name}\` ("${schema1.companies.id.name}", "${schema1.companies.name.name}") VALUES (2, 'turso');`, ]; const { sqlStatements, statements, columnsToRemove, infoToPrint, shouldAskForApprove, tablesToRemove, tablesToTruncate, } = await diffTestSchemasPushLibSQL( turso, schema1, schema2, [], false, seedStatements, ); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'recreate_table', tableName: 'companies', columns: [ { name: 'id', type: 'integer', autoincrement: false, notNull: true, primaryKey: true, generated: undefined, }, ], compositePKs: [], referenceData: [], uniqueConstraints: [], checkConstraints: [], }); expect(sqlStatements.length).toBe(4); expect(sqlStatements[0]).toBe( `CREATE TABLE \`__new_companies\` ( \t\`id\` integer PRIMARY KEY NOT NULL );\n`, ); expect(sqlStatements[1]).toBe(`INSERT INTO \`__new_companies\`("id") SELECT "id" FROM \`companies\`;`); expect(sqlStatements[2]).toBe(`DROP TABLE \`companies\`;`); expect(sqlStatements[3]).toBe( `ALTER TABLE \`__new_companies\` RENAME TO \`companies\`;`, ); expect(columnsToRemove!.length).toBe(1); expect(infoToPrint!.length).toBe(1); expect(infoToPrint![0]).toBe( `· You're about to delete ${ chalk.underline( 'name', ) } column in companies table with 2 items`, ); expect(shouldAskForApprove).toBe(true); expect(tablesToRemove!.length).toBe(0); expect(tablesToTruncate!.length).toBe(0); }); test('change autoincrement. table is part of foreign key', async (t) => { const turso = createClient({ url: ':memory:', }); const companies1 = sqliteTable('companies', { id: integer('id').primaryKey({ autoIncrement: true }), }); const users1 = sqliteTable('users', { id: integer('id').primaryKey({ autoIncrement: true }), name: text('name').unique(), companyId: integer('company_id').references(() => companies1.id), }); const schema1 = { companies: companies1, users: users1, }; const companies2 = sqliteTable('companies', { id: integer('id').primaryKey({ autoIncrement: false }), }); const users2 = sqliteTable('users', { id: integer('id').primaryKey({ autoIncrement: true }), name: text('name').unique(), companyId: integer('company_id').references(() => companies2.id), }); const schema2 = { companies: companies2, users: users2, }; const { name: usersTableName } = getTableConfig(users1); const { name: companiesTableName } = getTableConfig(companies1); const seedStatements = [ `INSERT INTO \`${usersTableName}\` ("${schema1.users.name.name}") VALUES ('drizzle');`, `INSERT INTO \`${usersTableName}\` ("${schema1.users.name.name}") VALUES ('turso');`, `INSERT INTO \`${companiesTableName}\` ("${schema1.companies.id.name}") VALUES (1);`, `INSERT INTO \`${companiesTableName}\` ("${schema1.companies.id.name}") VALUES (2);`, ]; const { statements, sqlStatements, columnsToRemove, infoToPrint, shouldAskForApprove, tablesToRemove, tablesToTruncate, } = await diffTestSchemasPushLibSQL( turso, schema1, schema2, [], false, seedStatements, ); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'recreate_table', tableName: 'companies', columns: [ { name: 'id', type: 'integer', autoincrement: false, notNull: true, primaryKey: true, generated: undefined, }, ], compositePKs: [], referenceData: [], uniqueConstraints: [], checkConstraints: [], }); expect(sqlStatements.length).toBe(4); expect(sqlStatements[0]).toBe( `CREATE TABLE \`__new_companies\` ( \t\`id\` integer PRIMARY KEY NOT NULL );\n`, ); expect(sqlStatements[1]).toBe( `INSERT INTO \`__new_companies\`("id") SELECT "id" FROM \`companies\`;`, ); expect(sqlStatements[2]).toBe(`DROP TABLE \`companies\`;`); expect(sqlStatements[3]).toBe( `ALTER TABLE \`__new_companies\` RENAME TO \`companies\`;`, ); expect(columnsToRemove!.length).toBe(0); expect(infoToPrint!.length).toBe(0); expect(shouldAskForApprove).toBe(false); expect(tablesToRemove!.length).toBe(0); expect(tablesToTruncate!.length).toBe(0); }); test('drop not null, add not null', async (t) => { const turso = createClient({ url: ':memory:', }); const schema1 = { users: sqliteTable('users', { id: int('id').primaryKey({ autoIncrement: true }), name: text('name').notNull(), }), posts: sqliteTable( 'posts', { id: int('id').primaryKey({ autoIncrement: true }), name: text('name'), userId: int('user_id'), }, ), }; const schema2 = { users: sqliteTable('users', { id: int('id').primaryKey({ autoIncrement: true }), name: text('name'), }), posts: sqliteTable( 'posts', { id: int('id').primaryKey({ autoIncrement: true }), name: text('name').notNull(), userId: int('user_id'), }, ), }; const { statements, sqlStatements, columnsToRemove, infoToPrint, shouldAskForApprove, tablesToRemove, tablesToTruncate, } = await diffTestSchemasPushLibSQL( turso, schema1, schema2, [], ); expect(statements!.length).toBe(2); expect(statements![0]).toStrictEqual({ columnAutoIncrement: false, columnDefault: undefined, columnName: 'name', columnNotNull: false, columnOnUpdate: undefined, columnPk: false, newDataType: 'text', schema: '', tableName: 'users', type: 'alter_table_alter_column_drop_notnull', }); expect(statements![1]).toStrictEqual({ columnAutoIncrement: false, columnDefault: undefined, columnName: 'name', columnNotNull: true, columnOnUpdate: undefined, columnPk: false, newDataType: 'text', schema: '', tableName: 'posts', type: 'alter_table_alter_column_set_notnull', }); expect(sqlStatements!.length).toBe(2); expect(sqlStatements![0]).toBe(`ALTER TABLE \`users\` ALTER COLUMN "name" TO "name" text;`); expect(sqlStatements![1]).toBe(`ALTER TABLE \`posts\` ALTER COLUMN "name" TO "name" text NOT NULL;`); expect(columnsToRemove!.length).toBe(0); expect(infoToPrint!.length).toBe(0); expect(shouldAskForApprove).toBe(false); expect(tablesToRemove!.length).toBe(0); expect(tablesToTruncate!.length).toBe(0); }); test('drop table with data', async (t) => { const turso = createClient({ url: ':memory:', }); const schema1 = { users: sqliteTable('users', { id: int('id').primaryKey({ autoIncrement: true }), name: text('name').notNull(), }), posts: sqliteTable( 'posts', { id: int('id').primaryKey({ autoIncrement: true }), name: text('name'), userId: int('user_id'), }, ), }; const schema2 = { posts: sqliteTable( 'posts', { id: int('id').primaryKey({ autoIncrement: true }), name: text('name'), userId: int('user_id'), }, ), }; const seedStatements = [ `INSERT INTO \`users\` ("name") VALUES ('drizzle')`, ]; const { statements, sqlStatements, columnsToRemove, infoToPrint, shouldAskForApprove, tablesToRemove, tablesToTruncate, } = await diffTestSchemasPushLibSQL( turso, schema1, schema2, [], false, seedStatements, ); expect(statements!.length).toBe(1); expect(statements![0]).toStrictEqual({ policies: [], schema: undefined, tableName: 'users', type: 'drop_table', }); expect(sqlStatements!.length).toBe(1); expect(sqlStatements![0]).toBe(`DROP TABLE \`users\`;`); expect(columnsToRemove!.length).toBe(0); expect(infoToPrint!.length).toBe(1); expect(infoToPrint![0]).toBe(`· You're about to delete ${chalk.underline('users')} table with 1 items`); expect(shouldAskForApprove).toBe(true); expect(tablesToRemove!.length).toBe(1); expect(tablesToRemove![0]).toBe('users'); expect(tablesToTruncate!.length).toBe(0); }); test('recreate table with nested references', async (t) => { const turso = createClient({ url: ':memory:', }); let users = sqliteTable('users', { id: int('id').primaryKey({ autoIncrement: true }), name: text('name'), age: integer('age'), }); let subscriptions = sqliteTable('subscriptions', { id: int('id').primaryKey({ autoIncrement: true }), userId: integer('user_id').references(() => users.id), customerId: text('customer_id'), }); const schema1 = { users: users, subscriptions: subscriptions, subscriptionMetadata: sqliteTable('subscriptions_metadata', { id: int('id').primaryKey({ autoIncrement: true }), subscriptionId: text('subscription_id').references( () => subscriptions.id, ), }), }; users = sqliteTable('users', { id: int('id').primaryKey({ autoIncrement: false }), name: text('name'), age: integer('age'), }); const schema2 = { users: users, subscriptions: subscriptions, subscriptionMetadata: sqliteTable('subscriptions_metadata', { id: int('id').primaryKey({ autoIncrement: true }), subscriptionId: text('subscription_id').references( () => subscriptions.id, ), }), }; const { statements, sqlStatements, columnsToRemove, infoToPrint, shouldAskForApprove, tablesToRemove, tablesToTruncate, } = await diffTestSchemasPushLibSQL(turso, schema1, schema2, []); expect(statements!.length).toBe(1); expect(statements![0]).toStrictEqual({ columns: [ { autoincrement: false, name: 'id', notNull: true, generated: undefined, primaryKey: true, type: 'integer', }, { autoincrement: false, name: 'name', notNull: false, generated: undefined, primaryKey: false, type: 'text', }, { autoincrement: false, name: 'age', notNull: false, generated: undefined, primaryKey: false, type: 'integer', }, ], compositePKs: [], referenceData: [], tableName: 'users', type: 'recreate_table', uniqueConstraints: [], checkConstraints: [], }); expect(sqlStatements!.length).toBe(4); expect(sqlStatements![0]).toBe(`CREATE TABLE \`__new_users\` ( \t\`id\` integer PRIMARY KEY NOT NULL, \t\`name\` text, \t\`age\` integer );\n`); expect(sqlStatements![1]).toBe( `INSERT INTO \`__new_users\`("id", "name", "age") SELECT "id", "name", "age" FROM \`users\`;`, ); expect(sqlStatements![2]).toBe(`DROP TABLE \`users\`;`); expect(sqlStatements![3]).toBe( `ALTER TABLE \`__new_users\` RENAME TO \`users\`;`, ); expect(columnsToRemove!.length).toBe(0); expect(infoToPrint!.length).toBe(0); expect(shouldAskForApprove).toBe(false); expect(tablesToRemove!.length).toBe(0); expect(tablesToTruncate!.length).toBe(0); }); test('recreate table with added column not null and without default', async (t) => { const turso = createClient({ url: ':memory:', }); const schema1 = { users: sqliteTable('users', { id: int('id').primaryKey({ autoIncrement: true }), name: text('name'), age: integer('age'), }), }; const schema2 = { users: sqliteTable('users', { id: int('id').primaryKey({ autoIncrement: false }), name: text('name'), age: integer('age'), newColumn: text('new_column').notNull(), }), }; const seedStatements = [ `INSERT INTO \`users\` ("name", "age") VALUES ('drizzle', 12)`, `INSERT INTO \`users\` ("name", "age") VALUES ('turso', 12)`, ]; const { statements, sqlStatements, columnsToRemove, infoToPrint, shouldAskForApprove, tablesToRemove, tablesToTruncate, } = await diffTestSchemasPushLibSQL( turso, schema1, schema2, [], false, seedStatements, ); expect(statements!.length).toBe(1); expect(statements![0]).toStrictEqual({ columns: [ { autoincrement: false, name: 'id', notNull: true, generated: undefined, primaryKey: true, type: 'integer', }, { autoincrement: false, name: 'name', notNull: false, generated: undefined, primaryKey: false, type: 'text', }, { autoincrement: false, name: 'age', notNull: false, generated: undefined, primaryKey: false, type: 'integer', }, { autoincrement: false, name: 'new_column', notNull: true, generated: undefined, primaryKey: false, type: 'text', }, ], compositePKs: [], referenceData: [], tableName: 'users', type: 'recreate_table', uniqueConstraints: [], checkConstraints: [], }); expect(sqlStatements!.length).toBe(4); expect(sqlStatements[0]).toBe('DELETE FROM \`users\`;'); expect(sqlStatements![1]).toBe(`CREATE TABLE \`__new_users\` ( \t\`id\` integer PRIMARY KEY NOT NULL, \t\`name\` text, \t\`age\` integer, \t\`new_column\` text NOT NULL );\n`); expect(sqlStatements![2]).toBe(`DROP TABLE \`users\`;`); expect(sqlStatements![3]).toBe( `ALTER TABLE \`__new_users\` RENAME TO \`users\`;`, ); expect(columnsToRemove!.length).toBe(0); expect(infoToPrint!.length).toBe(1); expect(infoToPrint![0]).toBe( `· You're about to add not-null ${ chalk.underline('new_column') } column without default value to table, which contains 2 items`, ); expect(shouldAskForApprove).toBe(true); expect(tablesToRemove!.length).toBe(0); expect(tablesToTruncate!.length).toBe(1); expect(tablesToTruncate![0]).toBe('users'); }); test('set not null with index', async (t) => { const turso = createClient({ url: ':memory:', }); const schema1 = { users: sqliteTable('users', { id: int('id').primaryKey({ autoIncrement: true }), name: text('name'), }, (table) => ({ someIndex: index('users_name_index').on(table.name), })), }; const schema2 = { users: sqliteTable('users', { id: int('id').primaryKey({ autoIncrement: true }), name: text('name').notNull(), }, (table) => ({ someIndex: index('users_name_index').on(table.name), })), }; const { statements, sqlStatements, columnsToRemove, infoToPrint, shouldAskForApprove, tablesToRemove, tablesToTruncate, } = await diffTestSchemasPushLibSQL( turso, schema1, schema2, [], ); expect(statements!.length).toBe(1); expect(statements![0]).toStrictEqual({ columnAutoIncrement: false, columnDefault: undefined, columnName: 'name', columnNotNull: true, columnOnUpdate: undefined, columnPk: false, newDataType: 'text', schema: '', tableName: 'users', type: 'alter_table_alter_column_set_notnull', }); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'alter_table_alter_column_set_notnull', tableName: 'users', columnName: 'name', schema: '', newDataType: 'text', columnDefault: undefined, columnOnUpdate: undefined, columnNotNull: true, columnAutoIncrement: false, columnPk: false, }); expect(sqlStatements.length).toBe(3); expect(sqlStatements[0]).toBe( `DROP INDEX "users_name_index";`, ); expect(sqlStatements[1]).toBe( `ALTER TABLE \`users\` ALTER COLUMN "name" TO "name" text NOT NULL;`, ); expect(sqlStatements[2]).toBe( `CREATE INDEX \`users_name_index\` ON \`users\` (\`name\`);`, ); expect(columnsToRemove!.length).toBe(0), expect(infoToPrint!.length).toBe(0); expect(shouldAskForApprove).toBe(false); expect(tablesToRemove!.length).toBe(0); expect(tablesToTruncate!.length).toBe(0); }); test('drop not null with two indexes', async (t) => { const turso = createClient({ url: ':memory:', }); const schema1 = { users: sqliteTable('users', { id: int('id').primaryKey({ autoIncrement: true }), name: text('name').notNull(), age: int('age').notNull(), }, (table) => ({ someUniqeIndex: uniqueIndex('users_name_unique').on(table.name), someIndex: index('users_age_index').on(table.age), })), }; const schema2 = { users: sqliteTable('users', { id: int('id').primaryKey({ autoIncrement: true }), name: text('name'), age: int('age').notNull(), }, (table) => ({ someUniqeIndex: uniqueIndex('users_name_unique').on(table.name), someIndex: index('users_age_index').on(table.age), })), }; const { statements, sqlStatements, columnsToRemove, infoToPrint, shouldAskForApprove, tablesToRemove, tablesToTruncate, } = await diffTestSchemasPushLibSQL( turso, schema1, schema2, [], ); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'alter_table_alter_column_drop_notnull', tableName: 'users', columnName: 'name', schema: '', newDataType: 'text', columnDefault: undefined, columnOnUpdate: undefined, columnNotNull: false, columnAutoIncrement: false, columnPk: false, }); expect(sqlStatements.length).toBe(5); expect(sqlStatements[0]).toBe( `DROP INDEX "users_name_unique";`, ); expect(sqlStatements[1]).toBe( `DROP INDEX "users_age_index";`, ); expect(sqlStatements[2]).toBe( `ALTER TABLE \`users\` ALTER COLUMN "name" TO "name" text;`, ); expect(sqlStatements[3]).toBe( `CREATE UNIQUE INDEX \`users_name_unique\` ON \`users\` (\`name\`);`, ); expect(sqlStatements[4]).toBe( `CREATE INDEX \`users_age_index\` ON \`users\` (\`age\`);`, ); expect(columnsToRemove!.length).toBe(0), expect(infoToPrint!.length).toBe(0); expect(shouldAskForApprove).toBe(false); expect(tablesToRemove!.length).toBe(0); expect(tablesToTruncate!.length).toBe(0); }); test('add check constraint to table', async (t) => { const turso = createClient({ url: ':memory:', }); const schema1 = { users: sqliteTable('users', { id: int('id').primaryKey({ autoIncrement: false }), name: text('name'), age: integer('age'), }), }; const schema2 = { users: sqliteTable('users', { id: int('id').primaryKey({ autoIncrement: false }), name: text('name'), age: integer('age'), }, (table) => ({ someCheck: check('some_check', sql`${table.age} > 21`), })), }; const { statements, sqlStatements, columnsToRemove, infoToPrint, shouldAskForApprove, tablesToRemove, tablesToTruncate, } = await diffTestSchemasPushLibSQL( turso, schema1, schema2, [], ); expect(statements!.length).toBe(1); expect(statements![0]).toStrictEqual({ columns: [ { autoincrement: false, name: 'id', notNull: true, generated: undefined, primaryKey: true, type: 'integer', }, { autoincrement: false, name: 'name', notNull: false, generated: undefined, primaryKey: false, type: 'text', }, { autoincrement: false, name: 'age', notNull: false, generated: undefined, primaryKey: false, type: 'integer', }, ], compositePKs: [], referenceData: [], tableName: 'users', type: 'recreate_table', uniqueConstraints: [], checkConstraints: ['some_check;"users"."age" > 21'], }); expect(sqlStatements!.length).toBe(4); expect(sqlStatements![0]).toBe(`CREATE TABLE \`__new_users\` ( \t\`id\` integer PRIMARY KEY NOT NULL, \t\`name\` text, \t\`age\` integer, \tCONSTRAINT "some_check" CHECK("__new_users"."age" > 21) );\n`); expect(sqlStatements[1]).toBe( 'INSERT INTO `__new_users`("id", "name", "age") SELECT "id", "name", "age" FROM `users`;', ); expect(sqlStatements![2]).toBe(`DROP TABLE \`users\`;`); expect(sqlStatements![3]).toBe( `ALTER TABLE \`__new_users\` RENAME TO \`users\`;`, ); expect(columnsToRemove!.length).toBe(0); expect(infoToPrint!.length).toBe(0); expect(shouldAskForApprove).toBe(false); expect(tablesToRemove!.length).toBe(0); expect(tablesToTruncate!.length).toBe(0); }); test('drop check constraint', async (t) => { const turso = createClient({ url: ':memory:', }); const schema1 = { users: sqliteTable('users', { id: int('id').primaryKey({ autoIncrement: false }), name: text('name'), age: integer('age'), }, (table) => ({ someCheck: check('some_check', sql`${table.age} > 21`), })), }; const schema2 = { users: sqliteTable('users', { id: int('id').primaryKey({ autoIncrement: false }), name: text('name'), age: integer('age'), }), }; const { statements, sqlStatements, columnsToRemove, infoToPrint, shouldAskForApprove, tablesToRemove, tablesToTruncate, } = await diffTestSchemasPushLibSQL( turso, schema1, schema2, [], ); expect(statements!.length).toBe(1); expect(statements![0]).toStrictEqual({ columns: [ { autoincrement: false, name: 'id', notNull: true, generated: undefined, primaryKey: true, type: 'integer', }, { autoincrement: false, name: 'name', notNull: false, generated: undefined, primaryKey: false, type: 'text', }, { autoincrement: false, name: 'age', notNull: false, generated: undefined, primaryKey: false, type: 'integer', }, ], compositePKs: [], referenceData: [], tableName: 'users', type: 'recreate_table', uniqueConstraints: [], checkConstraints: [], }); expect(sqlStatements!.length).toBe(4); expect(sqlStatements![0]).toBe(`CREATE TABLE \`__new_users\` ( \t\`id\` integer PRIMARY KEY NOT NULL, \t\`name\` text, \t\`age\` integer );\n`); expect(sqlStatements[1]).toBe( 'INSERT INTO `__new_users`("id", "name", "age") SELECT "id", "name", "age" FROM `users`;', ); expect(sqlStatements![2]).toBe(`DROP TABLE \`users\`;`); expect(sqlStatements![3]).toBe( `ALTER TABLE \`__new_users\` RENAME TO \`users\`;`, ); expect(columnsToRemove!.length).toBe(0); expect(infoToPrint!.length).toBe(0); expect(shouldAskForApprove).toBe(false); expect(tablesToRemove!.length).toBe(0); expect(tablesToTruncate!.length).toBe(0); }); test('db has checks. Push with same names', async () => { const turso = createClient({ url: ':memory:', }); const schema1 = { users: sqliteTable('users', { id: int('id').primaryKey({ autoIncrement: false }), name: text('name'), age: integer('age'), }, (table) => ({ someCheck: check('some_check', sql`${table.age} > 21`), })), }; const schema2 = { users: sqliteTable('users', { id: int('id').primaryKey({ autoIncrement: false }), name: text('name'), age: integer('age'), }, (table) => ({ someCheck: check('some_check', sql`some new value`), })), }; const { statements, sqlStatements, columnsToRemove, infoToPrint, shouldAskForApprove, tablesToRemove, tablesToTruncate, } = await diffTestSchemasPushLibSQL( turso, schema1, schema2, [], false, [], ); expect(statements).toStrictEqual([]); expect(sqlStatements).toStrictEqual([]); expect(columnsToRemove!.length).toBe(0); expect(infoToPrint!.length).toBe(0); expect(shouldAskForApprove).toBe(false); expect(tablesToRemove!.length).toBe(0); expect(tablesToTruncate!.length).toBe(0); }); test('create view', async () => { const turso = createClient({ url: ':memory:', }); const table = sqliteTable('test', { id: int('id').primaryKey(), }); const schema1 = { test: table, }; const schema2 = { test: table, view: sqliteView('view').as((qb) => qb.select().from(table)), }; const { statements, sqlStatements } = await diffTestSchemasPushLibSQL( turso, schema1, schema2, [], ); expect(statements).toStrictEqual([ { definition: 'select "id" from "test"', name: 'view', type: 'sqlite_create_view', }, ]); expect(sqlStatements).toStrictEqual([ `CREATE VIEW \`view\` AS select "id" from "test";`, ]); }); test('drop view', async () => { const turso = createClient({ url: ':memory:', }); const table = sqliteTable('test', { id: int('id').primaryKey(), }); const schema1 = { test: table, view: sqliteView('view').as((qb) => qb.select().from(table)), }; const schema2 = { test: table, }; const { statements, sqlStatements } = await diffTestSchemasPushLibSQL( turso, schema1, schema2, [], ); expect(statements).toStrictEqual([ { name: 'view', type: 'drop_view', }, ]); expect(sqlStatements).toStrictEqual([ 'DROP VIEW \`view\`;', ]); }); test('alter view ".as"', async () => { const turso = createClient({ url: ':memory:', }); const table = sqliteTable('test', { id: int('id').primaryKey(), }); const schema1 = { test: table, view: sqliteView('view').as((qb) => qb.select().from(table).where(sql`${table.id} = 1`)), }; const schema2 = { test: table, view: sqliteView('view').as((qb) => qb.select().from(table)), }; const { statements, sqlStatements } = await diffTestSchemasPushLibSQL( turso, schema1, schema2, [], ); expect(statements.length).toBe(0); expect(sqlStatements.length).toBe(0); }); ================================================ FILE: drizzle-kit/tests/push/mysql-push.test.ts ================================================ import Docker from 'dockerode'; import { sql } from 'drizzle-orm'; import { check, int, mysqlTable, mysqlView } from 'drizzle-orm/mysql-core'; import fs from 'fs'; import getPort from 'get-port'; import { Connection, createConnection } from 'mysql2/promise'; import { diffTestSchemasPushMysql } from 'tests/schemaDiffer'; import { v4 as uuid } from 'uuid'; import { afterAll, beforeAll, expect, test } from 'vitest'; let client: Connection; let mysqlContainer: Docker.Container; async function createDockerDB(): Promise { const docker = new Docker(); const port = await getPort({ port: 3306 }); const image = 'mysql:8'; const pullStream = await docker.pull(image); await new Promise((resolve, reject) => // eslint-disable-next-line @typescript-eslint/no-unsafe-argument docker.modem.followProgress(pullStream, (err) => err ? reject(err) : resolve(err)) ); mysqlContainer = await docker.createContainer({ Image: image, Env: ['MYSQL_ROOT_PASSWORD=mysql', 'MYSQL_DATABASE=drizzle'], name: `drizzle-integration-tests-${uuid()}`, HostConfig: { AutoRemove: true, PortBindings: { '3306/tcp': [{ HostPort: `${port}` }], }, }, }); await mysqlContainer.start(); return `mysql://root:mysql@127.0.0.1:${port}/drizzle`; } beforeAll(async () => { const connectionString = process.env.MYSQL_CONNECTION_STRING ?? await createDockerDB(); const sleep = 1000; let timeLeft = 20000; let connected = false; let lastError: unknown | undefined; do { try { client = await createConnection(connectionString); await client.connect(); connected = true; break; } catch (e) { lastError = e; await new Promise((resolve) => setTimeout(resolve, sleep)); timeLeft -= sleep; } } while (timeLeft > 0); if (!connected) { console.error('Cannot connect to MySQL'); await client?.end().catch(console.error); await mysqlContainer?.stop().catch(console.error); throw lastError; } }); afterAll(async () => { await client?.end().catch(console.error); await mysqlContainer?.stop().catch(console.error); }); if (!fs.existsSync('tests/push/mysql')) { fs.mkdirSync('tests/push/mysql'); } test('add check constraint to table', async () => { const schema1 = { test: mysqlTable('test', { id: int('id').primaryKey(), values: int('values'), }), }; const schema2 = { test: mysqlTable('test', { id: int('id').primaryKey(), values: int('values'), }, (table) => ({ checkConstraint1: check('some_check1', sql`${table.values} < 100`), checkConstraint2: check('some_check2', sql`'test' < 100`), })), }; const { statements, sqlStatements } = await diffTestSchemasPushMysql( client, schema1, schema2, [], 'drizzle', false, ); expect(statements).toStrictEqual([ { type: 'create_check_constraint', tableName: 'test', schema: '', data: 'some_check1;\`test\`.\`values\` < 100', }, { data: "some_check2;'test' < 100", schema: '', tableName: 'test', type: 'create_check_constraint', }, ]); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE \`test\` ADD CONSTRAINT \`some_check1\` CHECK (\`test\`.\`values\` < 100);', `ALTER TABLE \`test\` ADD CONSTRAINT \`some_check2\` CHECK ('test' < 100);`, ]); await client.query(`DROP TABLE \`test\`;`); }); test('drop check constraint to table', async () => { const schema1 = { test: mysqlTable('test', { id: int('id').primaryKey(), values: int('values'), }, (table) => ({ checkConstraint1: check('some_check1', sql`${table.values} < 100`), checkConstraint2: check('some_check2', sql`'test' < 100`), })), }; const schema2 = { test: mysqlTable('test', { id: int('id').primaryKey(), values: int('values'), }), }; const { statements, sqlStatements } = await diffTestSchemasPushMysql( client, schema1, schema2, [], 'drizzle', false, ); expect(statements).toStrictEqual([ { type: 'delete_check_constraint', tableName: 'test', schema: '', constraintName: 'some_check1', }, { constraintName: 'some_check2', schema: '', tableName: 'test', type: 'delete_check_constraint', }, ]); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE \`test\` DROP CONSTRAINT \`some_check1\`;', `ALTER TABLE \`test\` DROP CONSTRAINT \`some_check2\`;`, ]); await client.query(`DROP TABLE \`test\`;`); }); test('db has checks. Push with same names', async () => { const schema1 = { test: mysqlTable('test', { id: int('id').primaryKey(), values: int('values').default(1), }, (table) => ({ checkConstraint: check('some_check', sql`${table.values} < 100`), })), }; const schema2 = { test: mysqlTable('test', { id: int('id').primaryKey(), values: int('values').default(1), }, (table) => ({ checkConstraint: check('some_check', sql`some new value`), })), }; const { statements, sqlStatements } = await diffTestSchemasPushMysql( client, schema1, schema2, [], 'drizzle', ); expect(statements).toStrictEqual([]); expect(sqlStatements).toStrictEqual([]); await client.query(`DROP TABLE \`test\`;`); }); test('create view', async () => { const table = mysqlTable('test', { id: int('id').primaryKey(), }); const schema1 = { test: table, }; const schema2 = { test: table, view: mysqlView('view').as((qb) => qb.select().from(table)), }; const { statements, sqlStatements } = await diffTestSchemasPushMysql( client, schema1, schema2, [], 'drizzle', false, ); expect(statements).toStrictEqual([ { definition: 'select \`id\` from \`test\`', name: 'view', type: 'mysql_create_view', replace: false, sqlSecurity: 'definer', withCheckOption: undefined, algorithm: 'undefined', }, ]); expect(sqlStatements).toStrictEqual([ `CREATE ALGORITHM = undefined SQL SECURITY definer VIEW \`view\` AS (select \`id\` from \`test\`);`, ]); await client.query(`DROP TABLE \`test\`;`); }); test('drop view', async () => { const table = mysqlTable('test', { id: int('id').primaryKey(), }); const schema1 = { test: table, view: mysqlView('view').as((qb) => qb.select().from(table)), }; const schema2 = { test: table, }; const { statements, sqlStatements } = await diffTestSchemasPushMysql( client, schema1, schema2, [], 'drizzle', false, ); expect(statements).toStrictEqual([ { name: 'view', type: 'drop_view', }, ]); expect(sqlStatements).toStrictEqual([ 'DROP VIEW \`view\`;', ]); await client.query(`DROP TABLE \`test\`;`); await client.query(`DROP VIEW \`view\`;`); }); test('alter view ".as"', async () => { const table = mysqlTable('test', { id: int('id').primaryKey(), }); const schema1 = { test: table, view: mysqlView('view').as((qb) => qb.select().from(table).where(sql`${table.id} = 1`)), }; const schema2 = { test: table, view: mysqlView('view').as((qb) => qb.select().from(table)), }; const { statements, sqlStatements } = await diffTestSchemasPushMysql( client, schema1, schema2, [], 'drizzle', false, ); expect(statements.length).toBe(0); expect(sqlStatements.length).toBe(0); await client.query(`DROP TABLE \`test\`;`); await client.query(`DROP VIEW \`view\`;`); }); test('alter meta options with distinct in definition', async () => { const table = mysqlTable('test', { id: int('id').primaryKey(), }); const schema1 = { test: table, view: mysqlView('view').withCheckOption('cascaded').sqlSecurity('definer').algorithm('merge').as(( qb, ) => qb.selectDistinct().from(table).where(sql`${table.id} = 1`)), }; const schema2 = { test: table, view: mysqlView('view').withCheckOption('cascaded').sqlSecurity('definer').algorithm('undefined').as((qb) => qb.selectDistinct().from(table) ), }; await expect(diffTestSchemasPushMysql( client, schema1, schema2, [], 'drizzle', false, )).rejects.toThrowError(); await client.query(`DROP TABLE \`test\`;`); }); ================================================ FILE: drizzle-kit/tests/push/mysql.test.ts ================================================ import 'dotenv/config'; import Docker from 'dockerode'; import { SQL, sql } from 'drizzle-orm'; import { bigint, binary, char, date, datetime, decimal, double, float, int, json, mediumint, mysqlEnum, mysqlTable, primaryKey, serial, smallint, text, time, timestamp, tinyint, varbinary, varchar, year, } from 'drizzle-orm/mysql-core'; import getPort from 'get-port'; import { Connection, createConnection } from 'mysql2/promise'; import { diffTestSchemasMysql, diffTestSchemasPushMysql } from 'tests/schemaDiffer'; import { v4 as uuid } from 'uuid'; import { expect, test } from 'vitest'; import { DialectSuite, run } from './common'; async function createDockerDB(context: any): Promise { const docker = new Docker(); const port = await getPort({ port: 3306 }); const image = 'mysql:8'; const pullStream = await docker.pull(image); await new Promise((resolve, reject) => // eslint-disable-next-line @typescript-eslint/no-unsafe-argument docker.modem.followProgress(pullStream, (err) => err ? reject(err) : resolve(err)) ); context.mysqlContainer = await docker.createContainer({ Image: image, Env: ['MYSQL_ROOT_PASSWORD=mysql', 'MYSQL_DATABASE=drizzle'], name: `drizzle-integration-tests-${uuid()}`, HostConfig: { AutoRemove: true, PortBindings: { '3306/tcp': [{ HostPort: `${port}` }], }, }, }); await context.mysqlContainer.start(); return `mysql://root:mysql@127.0.0.1:${port}/drizzle`; } const mysqlSuite: DialectSuite = { allTypes: async function(context: any): Promise { const schema1 = { allBigInts: mysqlTable('all_big_ints', { simple: bigint('simple', { mode: 'number' }), columnNotNull: bigint('column_not_null', { mode: 'number' }).notNull(), columnDefault: bigint('column_default', { mode: 'number' }).default(12), columnDefaultSql: bigint('column_default_sql', { mode: 'number', }).default(12), }), allBools: mysqlTable('all_bools', { simple: tinyint('simple'), columnNotNull: tinyint('column_not_null').notNull(), columnDefault: tinyint('column_default').default(1), }), allChars: mysqlTable('all_chars', { simple: char('simple', { length: 1 }), columnNotNull: char('column_not_null', { length: 45 }).notNull(), // columnDefault: char("column_default", { length: 1 }).default("h"), columnDefaultSql: char('column_default_sql', { length: 1 }).default( 'h', ), }), allDateTimes: mysqlTable('all_date_times', { simple: datetime('simple', { mode: 'string', fsp: 1 }), columnNotNull: datetime('column_not_null', { mode: 'string', }).notNull(), columnDefault: datetime('column_default', { mode: 'string' }).default( '2023-03-01 14:05:29', ), }), allDates: mysqlTable('all_dates', { simple: date('simple', { mode: 'string' }), column_not_null: date('column_not_null', { mode: 'string' }).notNull(), column_default: date('column_default', { mode: 'string' }).default( '2023-03-01', ), }), allDecimals: mysqlTable('all_decimals', { simple: decimal('simple', { precision: 1, scale: 0 }), columnNotNull: decimal('column_not_null', { precision: 45, scale: 3, }).notNull(), columnDefault: decimal('column_default', { precision: 10, scale: 0, }).default('100'), columnDefaultSql: decimal('column_default_sql', { precision: 10, scale: 0, }).default('101'), }), allDoubles: mysqlTable('all_doubles', { simple: double('simple'), columnNotNull: double('column_not_null').notNull(), columnDefault: double('column_default').default(100), columnDefaultSql: double('column_default_sql').default(101), }), allEnums: mysqlTable('all_enums', { simple: mysqlEnum('simple', ['hi', 'hello']), }), allEnums1: mysqlTable('all_enums1', { simple: mysqlEnum('simple', ['hi', 'hello']).default('hi'), }), allFloats: mysqlTable('all_floats', { columnNotNull: float('column_not_null').notNull(), columnDefault: float('column_default').default(100), columnDefaultSql: float('column_default_sql').default(101), }), allInts: mysqlTable('all_ints', { simple: int('simple'), columnNotNull: int('column_not_null').notNull(), columnDefault: int('column_default').default(100), columnDefaultSql: int('column_default_sql').default(101), }), allIntsRef: mysqlTable('all_ints_ref', { simple: int('simple'), columnNotNull: int('column_not_null').notNull(), columnDefault: int('column_default').default(100), columnDefaultSql: int('column_default_sql').default(101), }), allJsons: mysqlTable('all_jsons', { columnDefaultObject: json('column_default_object') .default({ hello: 'world world' }) .notNull(), columnDefaultArray: json('column_default_array').default({ hello: { 'world world': ['foo', 'bar'] }, foo: 'bar', fe: 23, }), column: json('column'), }), allMInts: mysqlTable('all_m_ints', { simple: mediumint('simple'), columnNotNull: mediumint('column_not_null').notNull(), columnDefault: mediumint('column_default').default(100), columnDefaultSql: mediumint('column_default_sql').default(101), }), allReals: mysqlTable('all_reals', { simple: double('simple', { precision: 5, scale: 2 }), columnNotNull: double('column_not_null').notNull(), columnDefault: double('column_default').default(100), columnDefaultSql: double('column_default_sql').default(101), }), allSInts: mysqlTable('all_s_ints', { simple: smallint('simple'), columnNotNull: smallint('column_not_null').notNull(), columnDefault: smallint('column_default').default(100), columnDefaultSql: smallint('column_default_sql').default(101), }), allSmallSerials: mysqlTable('all_small_serials', { columnAll: serial('column_all').primaryKey().notNull(), }), allTInts: mysqlTable('all_t_ints', { simple: tinyint('simple'), columnNotNull: tinyint('column_not_null').notNull(), columnDefault: tinyint('column_default').default(10), columnDefaultSql: tinyint('column_default_sql').default(11), }), allTexts: mysqlTable('all_texts', { simple: text('simple'), columnNotNull: text('column_not_null').notNull(), columnDefault: text('column_default').default('hello'), columnDefaultSql: text('column_default_sql').default('hello'), }), allTimes: mysqlTable('all_times', { simple: time('simple', { fsp: 1 }), columnNotNull: time('column_not_null').notNull(), columnDefault: time('column_default').default('22:12:12'), }), allTimestamps: mysqlTable('all_timestamps', { columnDateNow: timestamp('column_date_now', { fsp: 1, mode: 'string', }).default(sql`(now())`), columnAll: timestamp('column_all', { mode: 'string' }) .default('2023-03-01 14:05:29') .notNull(), column: timestamp('column', { mode: 'string' }).default( '2023-02-28 16:18:31', ), }), allVarChars: mysqlTable('all_var_chars', { simple: varchar('simple', { length: 100 }), columnNotNull: varchar('column_not_null', { length: 45 }).notNull(), columnDefault: varchar('column_default', { length: 100 }).default( 'hello', ), columnDefaultSql: varchar('column_default_sql', { length: 100, }).default('hello'), }), allVarbinaries: mysqlTable('all_varbinaries', { simple: varbinary('simple', { length: 100 }), columnNotNull: varbinary('column_not_null', { length: 100 }).notNull(), columnDefault: varbinary('column_default', { length: 12 }).default( sql`(uuid_to_bin(uuid()))`, ), }), allYears: mysqlTable('all_years', { simple: year('simple'), columnNotNull: year('column_not_null').notNull(), columnDefault: year('column_default').default(2022), }), binafry: mysqlTable('binary', { simple: binary('simple', { length: 1 }), columnNotNull: binary('column_not_null', { length: 1 }).notNull(), columnDefault: binary('column_default', { length: 12 }).default( sql`(uuid_to_bin(uuid()))`, ), }), }; const { statements } = await diffTestSchemasPushMysql( context.client as Connection, schema1, schema1, [], 'drizzle', false, ); expect(statements.length).toBe(2); expect(statements).toEqual([ { type: 'delete_unique_constraint', tableName: 'all_small_serials', data: 'column_all;column_all', schema: '', }, { type: 'delete_unique_constraint', tableName: 'all_small_serials', data: 'column_all;column_all', schema: '', }, ]); const { sqlStatements: dropStatements } = await diffTestSchemasMysql( schema1, {}, [], false, ); for (const st of dropStatements) { await context.client.query(st); } }, addBasicIndexes: function(context?: any): Promise { return {} as any; }, changeIndexFields: function(context?: any): Promise { return {} as any; }, dropIndex: function(context?: any): Promise { return {} as any; }, indexesToBeNotTriggered: function(context?: any): Promise { return {} as any; }, indexesTestCase1: function(context?: any): Promise { return {} as any; }, async case1() { // TODO: implement if needed expect(true).toBe(true); }, addNotNull: function(context?: any): Promise { return {} as any; }, addNotNullWithDataNoRollback: function(context?: any): Promise { return {} as any; }, addBasicSequences: function(context?: any): Promise { return {} as any; }, addGeneratedColumn: async function(context: any): Promise { const schema1 = { users: mysqlTable('users', { id: int('id'), id2: int('id2'), name: text('name'), }), }; const schema2 = { users: mysqlTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( (): SQL => sql`${schema2.users.name} || 'hello'`, { mode: 'stored' }, ), generatedName1: text('gen_name1').generatedAlwaysAs( (): SQL => sql`${schema2.users.name} || 'hello'`, { mode: 'virtual' }, ), }), }; const { statements, sqlStatements } = await diffTestSchemasPushMysql( context.client as Connection, schema1, schema2, [], 'drizzle', false, ); expect(statements).toStrictEqual([ { column: { autoincrement: false, generated: { as: "`users`.`name` || 'hello'", type: 'stored', }, name: 'gen_name', notNull: false, primaryKey: false, type: 'text', }, schema: '', tableName: 'users', type: 'alter_table_add_column', }, { column: { autoincrement: false, generated: { as: "`users`.`name` || 'hello'", type: 'virtual', }, name: 'gen_name1', notNull: false, primaryKey: false, type: 'text', }, schema: '', tableName: 'users', type: 'alter_table_add_column', }, ]); expect(sqlStatements).toStrictEqual([ "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') STORED;", "ALTER TABLE `users` ADD `gen_name1` text GENERATED ALWAYS AS (`users`.`name` || 'hello') VIRTUAL;", ]); for (const st of sqlStatements) { await context.client.query(st); } const { sqlStatements: dropStatements } = await diffTestSchemasMysql( schema2, {}, [], false, ); for (const st of dropStatements) { await context.client.query(st); } }, addGeneratedToColumn: async function(context: any): Promise { const schema1 = { users: mysqlTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name'), generatedName1: text('gen_name1'), }), }; const schema2 = { users: mysqlTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( (): SQL => sql`${schema2.users.name} || 'hello'`, { mode: 'stored' }, ), generatedName1: text('gen_name1').generatedAlwaysAs( (): SQL => sql`${schema2.users.name} || 'hello'`, { mode: 'virtual' }, ), }), }; const { statements, sqlStatements } = await diffTestSchemasPushMysql( context.client as Connection, schema1, schema2, [], 'drizzle', false, ); expect(statements).toStrictEqual([ { columnAutoIncrement: false, columnDefault: undefined, columnGenerated: { as: "`users`.`name` || 'hello'", type: 'stored', }, columnName: 'gen_name', columnNotNull: false, columnOnUpdate: undefined, columnPk: false, newDataType: 'text', schema: '', tableName: 'users', type: 'alter_table_alter_column_set_generated', }, { columnAutoIncrement: false, columnDefault: undefined, columnGenerated: { as: "`users`.`name` || 'hello'", type: 'virtual', }, columnName: 'gen_name1', columnNotNull: false, columnOnUpdate: undefined, columnPk: false, newDataType: 'text', schema: '', tableName: 'users', type: 'alter_table_alter_column_set_generated', }, ]); expect(sqlStatements).toStrictEqual([ "ALTER TABLE `users` MODIFY COLUMN `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') STORED;", 'ALTER TABLE `users` DROP COLUMN `gen_name1`;', "ALTER TABLE `users` ADD `gen_name1` text GENERATED ALWAYS AS (`users`.`name` || 'hello') VIRTUAL;", ]); for (const st of sqlStatements) { await context.client.query(st); } const { sqlStatements: dropStatements } = await diffTestSchemasMysql( schema2, {}, [], false, ); for (const st of dropStatements) { await context.client.query(st); } }, dropGeneratedConstraint: async function(context: any): Promise { const schema1 = { users: mysqlTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( (): SQL => sql`${schema2.users.name}`, { mode: 'stored' }, ), generatedName1: text('gen_name1').generatedAlwaysAs( (): SQL => sql`${schema2.users.name}`, { mode: 'virtual' }, ), }), }; const schema2 = { users: mysqlTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name'), generatedName1: text('gen_name1'), }), }; const { statements, sqlStatements } = await diffTestSchemasPushMysql( context.client as Connection, schema1, schema2, [], 'drizzle', false, ); expect(statements).toStrictEqual([ { columnAutoIncrement: false, columnDefault: undefined, columnGenerated: undefined, columnName: 'gen_name', columnNotNull: false, columnOnUpdate: undefined, columnPk: false, newDataType: 'text', oldColumn: { autoincrement: false, default: undefined, generated: { as: '`name`', type: 'stored', }, name: 'gen_name', notNull: false, onUpdate: undefined, primaryKey: false, type: 'text', }, schema: '', tableName: 'users', type: 'alter_table_alter_column_drop_generated', }, { columnAutoIncrement: false, columnDefault: undefined, columnGenerated: undefined, columnName: 'gen_name1', columnNotNull: false, columnOnUpdate: undefined, columnPk: false, newDataType: 'text', oldColumn: { autoincrement: false, default: undefined, generated: { as: '`name`', type: 'virtual', }, name: 'gen_name1', notNull: false, onUpdate: undefined, primaryKey: false, type: 'text', }, schema: '', tableName: 'users', type: 'alter_table_alter_column_drop_generated', }, ]); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE `users` MODIFY COLUMN `gen_name` text;', 'ALTER TABLE `users` DROP COLUMN `gen_name1`;', 'ALTER TABLE `users` ADD `gen_name1` text;', ]); for (const st of sqlStatements) { await context.client.query(st); } const { sqlStatements: dropStatements } = await diffTestSchemasMysql( schema2, {}, [], false, ); for (const st of dropStatements) { await context.client.query(st); } }, alterGeneratedConstraint: async function(context: any): Promise { const schema1 = { users: mysqlTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( (): SQL => sql`${schema2.users.name}`, { mode: 'stored' }, ), generatedName1: text('gen_name1').generatedAlwaysAs( (): SQL => sql`${schema2.users.name}`, { mode: 'virtual' }, ), }), }; const schema2 = { users: mysqlTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( (): SQL => sql`${schema2.users.name} || 'hello'`, { mode: 'stored' }, ), generatedName1: text('gen_name1').generatedAlwaysAs( (): SQL => sql`${schema2.users.name} || 'hello'`, { mode: 'virtual' }, ), }), }; const { statements, sqlStatements } = await diffTestSchemasPushMysql( context.client as Connection, schema1, schema2, [], 'drizzle', false, ); expect(statements).toStrictEqual([]); expect(sqlStatements).toStrictEqual([]); const { sqlStatements: dropStatements } = await diffTestSchemasMysql( schema2, {}, [], false, ); for (const st of dropStatements) { await context.client.query(st); } }, createTableWithGeneratedConstraint: function(context?: any): Promise { return {} as any; }, createCompositePrimaryKey: async function(context: any): Promise { const schema1 = {}; const schema2 = { table: mysqlTable('table', { col1: int('col1').notNull(), col2: int('col2').notNull(), }, (t) => ({ pk: primaryKey({ columns: [t.col1, t.col2], }), })), }; const { statements, sqlStatements } = await diffTestSchemasPushMysql( context.client as Connection, schema1, schema2, [], 'drizzle', false, ); expect(statements).toStrictEqual([ { type: 'create_table', tableName: 'table', schema: undefined, internals: { indexes: {}, tables: {}, }, compositePKs: ['table_col1_col2_pk;col1,col2'], compositePkName: 'table_col1_col2_pk', uniqueConstraints: [], checkConstraints: [], columns: [ { name: 'col1', type: 'int', primaryKey: false, notNull: true, autoincrement: false }, { name: 'col2', type: 'int', primaryKey: false, notNull: true, autoincrement: false }, ], }, ]); expect(sqlStatements).toStrictEqual([ 'CREATE TABLE `table` (\n\t`col1` int NOT NULL,\n\t`col2` int NOT NULL,\n\tCONSTRAINT `table_col1_col2_pk` PRIMARY KEY(`col1`,`col2`)\n);\n', ]); }, renameTableWithCompositePrimaryKey: async function(context?: any): Promise { const productsCategoriesTable = (tableName: string) => { return mysqlTable(tableName, { productId: varchar('product_id', { length: 10 }).notNull(), categoryId: varchar('category_id', { length: 10 }).notNull(), }, (t) => ({ pk: primaryKey({ columns: [t.productId, t.categoryId], }), })); }; const schema1 = { table: productsCategoriesTable('products_categories'), }; const schema2 = { test: productsCategoriesTable('products_to_categories'), }; const { sqlStatements } = await diffTestSchemasPushMysql( context.client as Connection, schema1, schema2, ['public.products_categories->public.products_to_categories'], 'drizzle', false, ); expect(sqlStatements).toStrictEqual([ 'RENAME TABLE `products_categories` TO `products_to_categories`;', 'ALTER TABLE `products_to_categories` DROP PRIMARY KEY;', 'ALTER TABLE `products_to_categories` ADD PRIMARY KEY(`product_id`,`category_id`);', ]); await context.client.query(`DROP TABLE \`products_categories\``); }, }; run( mysqlSuite, async (context: any) => { const connectionString = process.env.MYSQL_CONNECTION_STRING ?? await createDockerDB(context); const sleep = 1000; let timeLeft = 20000; let connected = false; let lastError: unknown | undefined; do { try { context.client = await createConnection(connectionString); await context.client.connect(); connected = true; break; } catch (e) { lastError = e; await new Promise((resolve) => setTimeout(resolve, sleep)); timeLeft -= sleep; } } while (timeLeft > 0); if (!connected) { console.error('Cannot connect to MySQL'); await context.client?.end().catch(console.error); await context.mysqlContainer?.stop().catch(console.error); throw lastError; } }, async (context: any) => { await context.client?.end().catch(console.error); await context.mysqlContainer?.stop().catch(console.error); }, async (context: any) => { await context.client?.query(`drop database if exists \`drizzle\`;`); await context.client?.query(`create database \`drizzle\`;`); await context.client?.query(`use \`drizzle\`;`); }, ); ================================================ FILE: drizzle-kit/tests/push/pg.test.ts ================================================ import { PGlite } from '@electric-sql/pglite'; import chalk from 'chalk'; import { bigint, bigserial, boolean, char, check, date, doublePrecision, index, integer, interval, json, jsonb, numeric, pgEnum, pgMaterializedView, pgPolicy, pgRole, pgSchema, pgSequence, pgTable, pgView, primaryKey, real, serial, smallint, text, time, timestamp, uniqueIndex, uuid, varchar, } from 'drizzle-orm/pg-core'; import { drizzle } from 'drizzle-orm/pglite'; import { eq, SQL, sql } from 'drizzle-orm/sql'; import { pgSuggestions } from 'src/cli/commands/pgPushUtils'; import { diffTestSchemas, diffTestSchemasPush } from 'tests/schemaDiffer'; import { expect, test } from 'vitest'; import { DialectSuite, run } from './common'; const pgSuite: DialectSuite = { async allTypes() { const client = new PGlite(); const customSchema = pgSchema('schemass'); const transactionStatusEnum = customSchema.enum('TransactionStatusEnum', ['PENDING', 'FAILED', 'SUCCESS']); const enumname = pgEnum('enumname', ['three', 'two', 'one']); const schema1 = { test: pgEnum('test', ['ds']), testHello: pgEnum('test_hello', ['ds']), enumname: pgEnum('enumname', ['three', 'two', 'one']), customSchema: customSchema, transactionStatusEnum: customSchema.enum('TransactionStatusEnum', ['PENDING', 'FAILED', 'SUCCESS']), allSmallSerials: pgTable('schema_test', { columnAll: uuid('column_all').defaultRandom(), column: transactionStatusEnum('column').notNull(), }), allSmallInts: customSchema.table( 'schema_test2', { columnAll: smallint('column_all').default(124).notNull(), column: smallint('columns').array(), column1: smallint('column1').array().array(), column2: smallint('column2').array().array(), column3: smallint('column3').array(), }, (t) => ({ cd: uniqueIndex('testdfds').on(t.column), }), ), allEnums: customSchema.table( 'all_enums', { columnAll: enumname('column_all').default('three').notNull(), column: enumname('columns'), }, (t) => ({ d: index('ds').on(t.column), }), ), allTimestamps: customSchema.table('all_timestamps', { columnDateNow: timestamp('column_date_now', { precision: 1, withTimezone: true, mode: 'string', }).defaultNow(), columnAll: timestamp('column_all', { mode: 'string' }).default('2023-03-01 12:47:29.792'), column: timestamp('column', { mode: 'string' }).default(sql`'2023-02-28 16:18:31.18'`), column2: timestamp('column2', { mode: 'string', precision: 3 }).default(sql`'2023-02-28 16:18:31.18'`), }), allUuids: customSchema.table('all_uuids', { columnAll: uuid('column_all').defaultRandom().notNull(), column: uuid('column'), }), allDates: customSchema.table('all_dates', { column_date_now: date('column_date_now').defaultNow(), column_all: date('column_all', { mode: 'date' }).default(new Date()).notNull(), column: date('column'), }), allReals: customSchema.table('all_reals', { columnAll: real('column_all').default(32).notNull(), column: real('column'), columnPrimary: real('column_primary').primaryKey().notNull(), }), allBigints: pgTable('all_bigints', { columnAll: bigint('column_all', { mode: 'number' }).default(124).notNull(), column: bigint('column', { mode: 'number' }), }), allBigserials: customSchema.table('all_bigserials', { columnAll: bigserial('column_all', { mode: 'bigint' }).notNull(), column: bigserial('column', { mode: 'bigint' }).notNull(), }), allIntervals: customSchema.table('all_intervals', { columnAllConstrains: interval('column_all_constrains', { fields: 'month', }) .default('1 mon') .notNull(), columnMinToSec: interval('column_min_to_sec', { fields: 'minute to second', }), columnWithoutFields: interval('column_without_fields').default('00:00:01').notNull(), column: interval('column'), column5: interval('column5', { fields: 'minute to second', precision: 3, }), column6: interval('column6'), }), allSerials: customSchema.table('all_serials', { columnAll: serial('column_all').notNull(), column: serial('column').notNull(), }), allTexts: customSchema.table( 'all_texts', { columnAll: text('column_all').default('text').notNull(), column: text('columns').primaryKey(), }, (t) => ({ cd: index('test').on(t.column), }), ), allBools: customSchema.table('all_bools', { columnAll: boolean('column_all').default(true).notNull(), column: boolean('column'), }), allVarchars: customSchema.table('all_varchars', { columnAll: varchar('column_all').default('text').notNull(), column: varchar('column', { length: 200 }), }), allTimes: customSchema.table('all_times', { columnDateNow: time('column_date_now').defaultNow(), columnAll: time('column_all').default('22:12:12').notNull(), column: time('column'), }), allChars: customSchema.table('all_chars', { columnAll: char('column_all', { length: 1 }).default('text').notNull(), column: char('column', { length: 1 }), }), allDoublePrecision: customSchema.table('all_double_precision', { columnAll: doublePrecision('column_all').default(33.2).notNull(), column: doublePrecision('column'), }), allJsonb: customSchema.table('all_jsonb', { columnDefaultObject: jsonb('column_default_object').default({ hello: 'world world' }).notNull(), columnDefaultArray: jsonb('column_default_array').default({ hello: { 'world world': ['foo', 'bar'] }, }), column: jsonb('column'), }), allJson: customSchema.table('all_json', { columnDefaultObject: json('column_default_object').default({ hello: 'world world' }).notNull(), columnDefaultArray: json('column_default_array').default({ hello: { 'world world': ['foo', 'bar'] }, foo: 'bar', fe: 23, }), column: json('column'), }), allIntegers: customSchema.table('all_integers', { columnAll: integer('column_all').primaryKey(), column: integer('column'), columnPrimary: integer('column_primary'), }), allNumerics: customSchema.table('all_numerics', { columnAll: numeric('column_all', { precision: 1, scale: 1 }).default('32').notNull(), column: numeric('column'), columnPrimary: numeric('column_primary').primaryKey().notNull(), }), }; const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema1, [], false, [ 'public', 'schemass', ]); expect(statements.length).toBe(0); }, async addBasicIndexes() { const client = new PGlite(); const schema1 = { users: pgTable('users', { id: serial('id').primaryKey(), name: text('name'), }), }; const schema2 = { users: pgTable( 'users', { id: serial('id').primaryKey(), name: text('name'), }, (t) => ({ indx: index() .on(t.name.desc(), t.id.asc().nullsLast()) .with({ fillfactor: 70 }) .where(sql`select 1`), indx1: index('indx1') .using('hash', t.name.desc(), sql`${t.name}`) .with({ fillfactor: 70 }), }), ), }; const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); expect(statements.length).toBe(2); expect(statements[0]).toStrictEqual({ schema: '', tableName: 'users', type: 'create_index_pg', data: { columns: [ { asc: false, expression: 'name', isExpression: false, nulls: 'last', opclass: undefined, }, { asc: true, expression: 'id', isExpression: false, nulls: 'last', opclass: undefined, }, ], concurrently: false, isUnique: false, method: 'btree', name: 'users_name_id_index', where: 'select 1', with: { fillfactor: 70, }, }, }); expect(statements[1]).toStrictEqual({ schema: '', tableName: 'users', type: 'create_index_pg', data: { columns: [ { asc: false, expression: 'name', isExpression: false, nulls: 'last', opclass: undefined, }, { asc: true, expression: '"name"', isExpression: true, nulls: 'last', }, ], concurrently: false, isUnique: false, method: 'hash', name: 'indx1', where: undefined, with: { fillfactor: 70, }, }, }); expect(sqlStatements.length).toBe(2); expect(sqlStatements[0]).toBe( `CREATE INDEX "users_name_id_index" ON "users" USING btree ("name" DESC NULLS LAST,"id") WITH (fillfactor=70) WHERE select 1;`, ); expect(sqlStatements[1]).toBe( `CREATE INDEX "indx1" ON "users" USING hash ("name" DESC NULLS LAST,"name") WITH (fillfactor=70);`, ); }, async addGeneratedColumn() { const client = new PGlite(); const schema1 = { users: pgTable('users', { id: integer('id'), id2: integer('id2'), name: text('name'), }), }; const schema2 = { users: pgTable('users', { id: integer('id'), id2: integer('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs((): SQL => sql`${schema2.users.name}`), }), }; const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); expect(statements).toStrictEqual([ { column: { generated: { as: '"users"."name"', type: 'stored', }, name: 'gen_name', notNull: false, primaryKey: false, type: 'text', }, schema: '', tableName: 'users', type: 'alter_table_add_column', }, ]); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE "users" ADD COLUMN "gen_name" text GENERATED ALWAYS AS ("users"."name") STORED;', ]); // for (const st of sqlStatements) { // await client.query(st); // } }, async addGeneratedToColumn() { const client = new PGlite(); const schema1 = { users: pgTable('users', { id: integer('id'), id2: integer('id2'), name: text('name'), generatedName: text('gen_name'), }), }; const schema2 = { users: pgTable('users', { id: integer('id'), id2: integer('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs((): SQL => sql`${schema2.users.name}`), }), }; const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); expect(statements).toStrictEqual([ { columnAutoIncrement: undefined, columnDefault: undefined, columnGenerated: { as: '"users"."name"', type: 'stored', }, columnName: 'gen_name', columnNotNull: false, columnOnUpdate: undefined, columnPk: false, newDataType: 'text', schema: '', tableName: 'users', type: 'alter_table_alter_column_set_generated', }, ]); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE "users" drop column "gen_name";', 'ALTER TABLE "users" ADD COLUMN "gen_name" text GENERATED ALWAYS AS ("users"."name") STORED;', ]); // for (const st of sqlStatements) { // await client.query(st); // } }, async dropGeneratedConstraint() { const client = new PGlite(); const schema1 = { users: pgTable('users', { id: integer('id'), id2: integer('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs((): SQL => sql`${schema1.users.name}`), }), }; const schema2 = { users: pgTable('users', { id: integer('id'), id2: integer('id2'), name: text('name'), generatedName: text('gen_name'), }), }; const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); expect(statements).toStrictEqual([ { columnAutoIncrement: undefined, columnDefault: undefined, columnGenerated: undefined, columnName: 'gen_name', columnNotNull: false, columnOnUpdate: undefined, columnPk: false, newDataType: 'text', schema: '', tableName: 'users', type: 'alter_table_alter_column_drop_generated', }, ]); expect(sqlStatements).toStrictEqual(['ALTER TABLE "users" ALTER COLUMN "gen_name" DROP EXPRESSION;']); }, async alterGeneratedConstraint() { const client = new PGlite(); const schema1 = { users: pgTable('users', { id: integer('id'), id2: integer('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs((): SQL => sql`${schema1.users.name}`), }), }; const schema2 = { users: pgTable('users', { id: integer('id'), id2: integer('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs((): SQL => sql`${schema2.users.name} || 'hello'`), }), }; const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); expect(statements).toStrictEqual([]); expect(sqlStatements).toStrictEqual([]); }, async createTableWithGeneratedConstraint() { const client = new PGlite(); const schema1 = {}; const schema2 = { users: pgTable('users', { id: integer('id'), id2: integer('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs((): SQL => sql`${schema2.users.name} || 'hello'`), }), }; const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); expect(statements).toStrictEqual([ { columns: [ { name: 'id', notNull: false, primaryKey: false, type: 'integer', }, { name: 'id2', notNull: false, primaryKey: false, type: 'integer', }, { name: 'name', notNull: false, primaryKey: false, type: 'text', }, { generated: { as: '"users"."name" || \'hello\'', type: 'stored', }, name: 'gen_name', notNull: false, primaryKey: false, type: 'text', }, ], compositePKs: [], compositePkName: '', isRLSEnabled: false, schema: '', tableName: 'users', policies: [], type: 'create_table', uniqueConstraints: [], checkConstraints: [], }, ]); expect(sqlStatements).toStrictEqual([ 'CREATE TABLE "users" (\n\t"id" integer,\n\t"id2" integer,\n\t"name" text,\n\t"gen_name" text GENERATED ALWAYS AS ("users"."name" || \'hello\') STORED\n);\n', ]); }, async addBasicSequences() { const client = new PGlite(); const schema1 = { seq: pgSequence('my_seq', { startWith: 100 }), }; const schema2 = { seq: pgSequence('my_seq', { startWith: 100 }), }; const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); expect(statements.length).toBe(0); }, async changeIndexFields() { const client = new PGlite(); const schema1 = { users: pgTable( 'users', { id: serial('id').primaryKey(), name: text('name'), }, (t) => ({ removeColumn: index('removeColumn').on(t.name, t.id), addColumn: index('addColumn').on(t.name.desc()).with({ fillfactor: 70 }), removeExpression: index('removeExpression') .on(t.name.desc(), sql`name`) .concurrently(), addExpression: index('addExpression').on(t.id.desc()), changeExpression: index('changeExpression').on(t.id.desc(), sql`name`), changeName: index('changeName').on(t.name.desc(), t.id.asc().nullsLast()).with({ fillfactor: 70 }), changeWith: index('changeWith').on(t.name).with({ fillfactor: 70 }), changeUsing: index('changeUsing').on(t.name), }), ), }; const schema2 = { users: pgTable( 'users', { id: serial('id').primaryKey(), name: text('name'), }, (t) => ({ removeColumn: index('removeColumn').on(t.name), addColumn: index('addColumn').on(t.name.desc(), t.id.nullsLast()).with({ fillfactor: 70 }), removeExpression: index('removeExpression').on(t.name.desc()).concurrently(), addExpression: index('addExpression').on(t.id.desc()), changeExpression: index('changeExpression').on(t.id.desc(), sql`name desc`), changeName: index('newName') .on(t.name.desc(), sql`name`) .with({ fillfactor: 70 }), changeWith: index('changeWith').on(t.name).with({ fillfactor: 90 }), changeUsing: index('changeUsing').using('hash', t.name), }), ), }; const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); expect(sqlStatements).toStrictEqual([ 'DROP INDEX "changeName";', 'DROP INDEX "addColumn";', 'DROP INDEX "changeExpression";', 'DROP INDEX "changeUsing";', 'DROP INDEX "changeWith";', 'DROP INDEX "removeColumn";', 'DROP INDEX "removeExpression";', 'CREATE INDEX "newName" ON "users" USING btree ("name" DESC NULLS LAST,name) WITH (fillfactor=70);', 'CREATE INDEX "addColumn" ON "users" USING btree ("name" DESC NULLS LAST,"id") WITH (fillfactor=70);', 'CREATE INDEX "changeExpression" ON "users" USING btree ("id" DESC NULLS LAST,name desc);', 'CREATE INDEX "changeUsing" ON "users" USING hash ("name");', 'CREATE INDEX "changeWith" ON "users" USING btree ("name") WITH (fillfactor=90);', 'CREATE INDEX "removeColumn" ON "users" USING btree ("name");', 'CREATE INDEX CONCURRENTLY "removeExpression" ON "users" USING btree ("name" DESC NULLS LAST);', ]); }, async dropIndex() { const client = new PGlite(); const schema1 = { users: pgTable( 'users', { id: serial('id').primaryKey(), name: text('name'), }, (t) => ({ indx: index().on(t.name.desc(), t.id.asc().nullsLast()).with({ fillfactor: 70 }), }), ), }; const schema2 = { users: pgTable('users', { id: serial('id').primaryKey(), name: text('name'), }), }; const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ schema: '', tableName: 'users', type: 'drop_index', data: 'users_name_id_index;name--false--last,,id--true--last;false;btree;{"fillfactor":"70"}', }); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe(`DROP INDEX "users_name_id_index";`); }, async indexesToBeNotTriggered() { const client = new PGlite(); const schema1 = { users: pgTable( 'users', { id: serial('id').primaryKey(), name: text('name'), }, (t) => ({ indx: index('indx').on(t.name.desc()).concurrently(), indx1: index('indx1') .on(t.name.desc()) .where(sql`true`), indx2: index('indx2') .on(t.name.op('text_ops')) .where(sql`true`), indx3: index('indx3') .on(sql`lower(name)`) .where(sql`true`), }), ), }; const schema2 = { users: pgTable( 'users', { id: serial('id').primaryKey(), name: text('name'), }, (t) => ({ indx: index('indx').on(t.name.desc()), indx1: index('indx1') .on(t.name.desc()) .where(sql`false`), indx2: index('indx2') .on(t.name.op('test')) .where(sql`true`), indx3: index('indx3') .on(sql`lower(id)`) .where(sql`true`), }), ), }; const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); expect(statements.length).toBe(0); }, async indexesTestCase1() { const client = new PGlite(); const schema1 = { users: pgTable( 'users', { id: uuid('id').defaultRandom().primaryKey(), name: text('name').notNull(), description: text('description'), imageUrl: text('image_url'), inStock: boolean('in_stock').default(true), }, (t) => ({ indx: index().on(t.id.desc().nullsFirst()), indx1: index('indx1').on(t.id, t.imageUrl), indx2: index('indx4').on(t.id), }), ), }; const schema2 = { users: pgTable( 'users', { id: uuid('id').defaultRandom().primaryKey(), name: text('name').notNull(), description: text('description'), imageUrl: text('image_url'), inStock: boolean('in_stock').default(true), }, (t) => ({ indx: index().on(t.id.desc().nullsFirst()), indx1: index('indx1').on(t.id, t.imageUrl), indx2: index('indx4').on(t.id), }), ), }; const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); expect(statements.length).toBe(0); }, async addNotNull() { const client = new PGlite(); const schema1 = { users: pgTable( 'User', { id: text('id').primaryKey().notNull(), name: text('name'), username: text('username'), gh_username: text('gh_username'), email: text('email'), emailVerified: timestamp('emailVerified', { precision: 3, mode: 'date', }), image: text('image'), createdAt: timestamp('createdAt', { precision: 3, mode: 'date' }) .default(sql`CURRENT_TIMESTAMP`) .notNull(), updatedAt: timestamp('updatedAt', { precision: 3, mode: 'date' }) .notNull() .$onUpdate(() => new Date()), }, (table) => { return { emailKey: uniqueIndex('User_email_key').on(table.email), }; }, ), }; const schema2 = { users: pgTable( 'User', { id: text('id').primaryKey().notNull(), name: text('name'), username: text('username'), gh_username: text('gh_username'), email: text('email').notNull(), emailVerified: timestamp('emailVerified', { precision: 3, mode: 'date', }), image: text('image'), createdAt: timestamp('createdAt', { precision: 3, mode: 'date' }) .default(sql`CURRENT_TIMESTAMP`) .notNull(), updatedAt: timestamp('updatedAt', { precision: 3, mode: 'date' }) .notNull() .$onUpdate(() => new Date()), }, (table) => { return { emailKey: uniqueIndex('User_email_key').on(table.email), }; }, ), }; const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); const query = async (sql: string, params?: any[]) => { const result = await client.query(sql, params ?? []); return result.rows as any[]; }; const { statementsToExecute } = await pgSuggestions({ query }, statements); expect(statementsToExecute).toStrictEqual(['ALTER TABLE "User" ALTER COLUMN "email" SET NOT NULL;']); }, async addNotNullWithDataNoRollback() { const client = new PGlite(); const db = drizzle(client); const schema1 = { users: pgTable( 'User', { id: text('id').primaryKey().notNull(), name: text('name'), username: text('username'), gh_username: text('gh_username'), email: text('email'), emailVerified: timestamp('emailVerified', { precision: 3, mode: 'date', }), image: text('image'), createdAt: timestamp('createdAt', { precision: 3, mode: 'date' }) .default(sql`CURRENT_TIMESTAMP`) .notNull(), updatedAt: timestamp('updatedAt', { precision: 3, mode: 'date' }) .notNull() .$onUpdate(() => new Date()), }, (table) => { return { emailKey: uniqueIndex('User_email_key').on(table.email), }; }, ), }; const schema2 = { users: pgTable( 'User', { id: text('id').primaryKey().notNull(), name: text('name'), username: text('username'), gh_username: text('gh_username'), email: text('email').notNull(), emailVerified: timestamp('emailVerified', { precision: 3, mode: 'date', }), image: text('image'), createdAt: timestamp('createdAt', { precision: 3, mode: 'date' }) .default(sql`CURRENT_TIMESTAMP`) .notNull(), updatedAt: timestamp('updatedAt', { precision: 3, mode: 'date' }) .notNull() .$onUpdate(() => new Date()), }, (table) => { return { emailKey: uniqueIndex('User_email_key').on(table.email), }; }, ), }; const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); const query = async (sql: string, params?: any[]) => { const result = await client.query(sql, params ?? []); return result.rows as any[]; }; await db.insert(schema1.users).values({ id: 'str', email: 'email@gmail' }); const { statementsToExecute, shouldAskForApprove } = await pgSuggestions({ query }, statements); expect(statementsToExecute).toStrictEqual(['ALTER TABLE "User" ALTER COLUMN "email" SET NOT NULL;']); expect(shouldAskForApprove).toBeFalsy(); }, async createCompositePrimaryKey() { const client = new PGlite(); const schema1 = {}; const schema2 = { table: pgTable('table', { col1: integer('col1').notNull(), col2: integer('col2').notNull(), }, (t) => ({ pk: primaryKey({ columns: [t.col1, t.col2], }), })), }; const { statements, sqlStatements } = await diffTestSchemasPush( client, schema1, schema2, [], false, ['public'], ); expect(statements).toStrictEqual([ { type: 'create_table', tableName: 'table', schema: '', compositePKs: ['col1,col2;table_col1_col2_pk'], compositePkName: 'table_col1_col2_pk', isRLSEnabled: false, policies: [], uniqueConstraints: [], checkConstraints: [], columns: [ { name: 'col1', type: 'integer', primaryKey: false, notNull: true }, { name: 'col2', type: 'integer', primaryKey: false, notNull: true }, ], }, ]); expect(sqlStatements).toStrictEqual([ 'CREATE TABLE "table" (\n\t"col1" integer NOT NULL,\n\t"col2" integer NOT NULL,\n\tCONSTRAINT "table_col1_col2_pk" PRIMARY KEY("col1","col2")\n);\n', ]); }, async renameTableWithCompositePrimaryKey() { const client = new PGlite(); const productsCategoriesTable = (tableName: string) => { return pgTable(tableName, { productId: text('product_id').notNull(), categoryId: text('category_id').notNull(), }, (t) => ({ pk: primaryKey({ columns: [t.productId, t.categoryId], }), })); }; const schema1 = { table: productsCategoriesTable('products_categories'), }; const schema2 = { test: productsCategoriesTable('products_to_categories'), }; const { sqlStatements } = await diffTestSchemasPush( client, schema1, schema2, ['public.products_categories->public.products_to_categories'], false, ['public'], ); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE "products_categories" RENAME TO "products_to_categories";', 'ALTER TABLE "products_to_categories" DROP CONSTRAINT "products_categories_product_id_category_id_pk";', 'ALTER TABLE "products_to_categories" ADD CONSTRAINT "products_to_categories_product_id_category_id_pk" PRIMARY KEY("product_id","category_id");', ]); }, // async addVectorIndexes() { // const client = new PGlite(); // const schema1 = { // users: pgTable("users", { // id: serial("id").primaryKey(), // name: vector("name", { dimensions: 3 }), // }), // }; // const schema2 = { // users: pgTable( // "users", // { // id: serial("id").primaryKey(), // embedding: vector("name", { dimensions: 3 }), // }, // (t) => ({ // indx2: index("vector_embedding_idx") // .using("hnsw", t.embedding.op("vector_ip_ops")) // .with({ m: 16, ef_construction: 64 }), // }) // ), // }; // const { statements, sqlStatements } = await diffTestSchemasPush( // client, // schema1, // schema2, // [], // false, // ["public"] // ); // expect(statements.length).toBe(1); // expect(statements[0]).toStrictEqual({ // schema: "", // tableName: "users", // type: "create_index", // data: 'vector_embedding_idx;name,true,last,vector_ip_ops;false;false;hnsw;undefined;{"m":16,"ef_construction":64}', // }); // expect(sqlStatements.length).toBe(1); // expect(sqlStatements[0]).toBe( // `CREATE INDEX IF NOT EXISTS "vector_embedding_idx" ON "users" USING hnsw (name vector_ip_ops) WITH (m=16,ef_construction=64);` // ); // }, async case1() { // TODO: implement if needed expect(true).toBe(true); }, }; run(pgSuite); test('full sequence: no changes', async () => { const client = new PGlite(); const schema1 = { seq: pgSequence('my_seq', { startWith: 100, maxValue: 10000, minValue: 100, cycle: true, cache: 10, increment: 2, }), }; const schema2 = { seq: pgSequence('my_seq', { startWith: 100, maxValue: 10000, minValue: 100, cycle: true, cache: 10, increment: 2, }), }; const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); expect(statements.length).toBe(0); expect(sqlStatements.length).toBe(0); for (const st of sqlStatements) { await client.query(st); } }); test('basic sequence: change fields', async () => { const client = new PGlite(); const schema1 = { seq: pgSequence('my_seq', { startWith: 100, maxValue: 10000, minValue: 100, cycle: true, cache: 10, increment: 2, }), }; const schema2 = { seq: pgSequence('my_seq', { startWith: 100, maxValue: 100000, minValue: 100, cycle: true, cache: 10, increment: 4, }), }; const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); expect(statements).toStrictEqual([ { type: 'alter_sequence', schema: 'public', name: 'my_seq', values: { minValue: '100', maxValue: '100000', increment: '4', startWith: '100', cache: '10', cycle: true, }, }, ]); expect(sqlStatements).toStrictEqual([ 'ALTER SEQUENCE "public"."my_seq" INCREMENT BY 4 MINVALUE 100 MAXVALUE 100000 START WITH 100 CACHE 10 CYCLE;', ]); for (const st of sqlStatements) { await client.query(st); } }); test('basic sequence: change name', async () => { const client = new PGlite(); const schema1 = { seq: pgSequence('my_seq', { startWith: 100, maxValue: 10000, minValue: 100, cycle: true, cache: 10, increment: 2, }), }; const schema2 = { seq: pgSequence('my_seq2', { startWith: 100, maxValue: 10000, minValue: 100, cycle: true, cache: 10, increment: 2, }), }; const { statements, sqlStatements } = await diffTestSchemasPush( client, schema1, schema2, ['public.my_seq->public.my_seq2'], false, ['public'], ); expect(statements).toStrictEqual([ { nameFrom: 'my_seq', nameTo: 'my_seq2', schema: 'public', type: 'rename_sequence', }, ]); expect(sqlStatements).toStrictEqual(['ALTER SEQUENCE "public"."my_seq" RENAME TO "my_seq2";']); for (const st of sqlStatements) { await client.query(st); } }); test('basic sequence: change name and fields', async () => { const client = new PGlite(); const schema1 = { seq: pgSequence('my_seq', { startWith: 100, maxValue: 10000, minValue: 100, cycle: true, cache: 10, increment: 2, }), }; const schema2 = { seq: pgSequence('my_seq2', { startWith: 100, maxValue: 10000, minValue: 100, cycle: true, cache: 10, increment: 4, }), }; const { statements, sqlStatements } = await diffTestSchemasPush( client, schema1, schema2, ['public.my_seq->public.my_seq2'], false, ['public'], ); expect(statements).toStrictEqual([ { nameFrom: 'my_seq', nameTo: 'my_seq2', schema: 'public', type: 'rename_sequence', }, { name: 'my_seq2', schema: 'public', type: 'alter_sequence', values: { cache: '10', cycle: true, increment: '4', maxValue: '10000', minValue: '100', startWith: '100', }, }, ]); expect(sqlStatements).toStrictEqual([ 'ALTER SEQUENCE "public"."my_seq" RENAME TO "my_seq2";', 'ALTER SEQUENCE "public"."my_seq2" INCREMENT BY 4 MINVALUE 100 MAXVALUE 10000 START WITH 100 CACHE 10 CYCLE;', ]); for (const st of sqlStatements) { await client.query(st); } }); // identity push tests test('create table: identity always/by default - no params', async () => { const client = new PGlite(); const schema1 = {}; const schema2 = { users: pgTable('users', { id: integer('id').generatedByDefaultAsIdentity(), id1: bigint('id1', { mode: 'number' }).generatedByDefaultAsIdentity(), id2: smallint('id2').generatedByDefaultAsIdentity(), }), }; const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); expect(statements).toStrictEqual([ { columns: [ { identity: 'users_id_seq;byDefault;1;2147483647;1;1;1;false', name: 'id', notNull: true, primaryKey: false, type: 'integer', }, { identity: 'users_id1_seq;byDefault;1;9223372036854775807;1;1;1;false', name: 'id1', notNull: true, primaryKey: false, type: 'bigint', }, { identity: 'users_id2_seq;byDefault;1;32767;1;1;1;false', name: 'id2', notNull: true, primaryKey: false, type: 'smallint', }, ], compositePKs: [], compositePkName: '', schema: '', tableName: 'users', policies: [], type: 'create_table', uniqueConstraints: [], isRLSEnabled: false, checkConstraints: [], }, ]); expect(sqlStatements).toStrictEqual([ 'CREATE TABLE "users" (\n\t"id" integer GENERATED BY DEFAULT AS IDENTITY (sequence name "users_id_seq" INCREMENT BY 1 MINVALUE 1 MAXVALUE 2147483647 START WITH 1 CACHE 1),\n\t"id1" bigint GENERATED BY DEFAULT AS IDENTITY (sequence name "users_id1_seq" INCREMENT BY 1 MINVALUE 1 MAXVALUE 9223372036854775807 START WITH 1 CACHE 1),\n\t"id2" smallint GENERATED BY DEFAULT AS IDENTITY (sequence name "users_id2_seq" INCREMENT BY 1 MINVALUE 1 MAXVALUE 32767 START WITH 1 CACHE 1)\n);\n', ]); for (const st of sqlStatements) { await client.query(st); } }); test('create table: identity always/by default - few params', async () => { const client = new PGlite(); const schema1 = {}; const schema2 = { users: pgTable('users', { id: integer('id').generatedByDefaultAsIdentity({ increment: 4 }), id1: bigint('id1', { mode: 'number' }).generatedByDefaultAsIdentity({ startWith: 120, maxValue: 17000, }), id2: smallint('id2').generatedByDefaultAsIdentity({ cycle: true }), }), }; const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); expect(statements).toStrictEqual([ { columns: [ { identity: 'users_id_seq;byDefault;1;2147483647;4;1;1;false', name: 'id', notNull: true, primaryKey: false, type: 'integer', }, { identity: 'users_id1_seq;byDefault;1;17000;1;120;1;false', name: 'id1', notNull: true, primaryKey: false, type: 'bigint', }, { identity: 'users_id2_seq;byDefault;1;32767;1;1;1;true', name: 'id2', notNull: true, primaryKey: false, type: 'smallint', }, ], compositePKs: [], compositePkName: '', policies: [], schema: '', tableName: 'users', type: 'create_table', isRLSEnabled: false, uniqueConstraints: [], checkConstraints: [], }, ]); expect(sqlStatements).toStrictEqual([ 'CREATE TABLE "users" (\n\t"id" integer GENERATED BY DEFAULT AS IDENTITY (sequence name "users_id_seq" INCREMENT BY 4 MINVALUE 1 MAXVALUE 2147483647 START WITH 1 CACHE 1),\n\t"id1" bigint GENERATED BY DEFAULT AS IDENTITY (sequence name "users_id1_seq" INCREMENT BY 1 MINVALUE 1 MAXVALUE 17000 START WITH 120 CACHE 1),\n\t"id2" smallint GENERATED BY DEFAULT AS IDENTITY (sequence name "users_id2_seq" INCREMENT BY 1 MINVALUE 1 MAXVALUE 32767 START WITH 1 CACHE 1 CYCLE)\n);\n', ]); for (const st of sqlStatements) { await client.query(st); } }); test('create table: identity always/by default - all params', async () => { const client = new PGlite(); const schema1 = {}; const schema2 = { users: pgTable('users', { id: integer('id').generatedByDefaultAsIdentity({ increment: 4, minValue: 100, }), id1: bigint('id1', { mode: 'number' }).generatedByDefaultAsIdentity({ startWith: 120, maxValue: 17000, increment: 3, cycle: true, cache: 100, }), id2: smallint('id2').generatedByDefaultAsIdentity({ cycle: true }), }), }; const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); expect(statements).toStrictEqual([ { columns: [ { identity: 'users_id_seq;byDefault;100;2147483647;4;100;1;false', name: 'id', notNull: true, primaryKey: false, type: 'integer', }, { identity: 'users_id1_seq;byDefault;1;17000;3;120;100;true', name: 'id1', notNull: true, primaryKey: false, type: 'bigint', }, { identity: 'users_id2_seq;byDefault;1;32767;1;1;1;true', name: 'id2', notNull: true, primaryKey: false, type: 'smallint', }, ], compositePKs: [], compositePkName: '', schema: '', tableName: 'users', type: 'create_table', policies: [], isRLSEnabled: false, uniqueConstraints: [], checkConstraints: [], }, ]); expect(sqlStatements).toStrictEqual([ 'CREATE TABLE "users" (\n\t"id" integer GENERATED BY DEFAULT AS IDENTITY (sequence name "users_id_seq" INCREMENT BY 4 MINVALUE 100 MAXVALUE 2147483647 START WITH 100 CACHE 1),\n\t"id1" bigint GENERATED BY DEFAULT AS IDENTITY (sequence name "users_id1_seq" INCREMENT BY 3 MINVALUE 1 MAXVALUE 17000 START WITH 120 CACHE 100 CYCLE),\n\t"id2" smallint GENERATED BY DEFAULT AS IDENTITY (sequence name "users_id2_seq" INCREMENT BY 1 MINVALUE 1 MAXVALUE 32767 START WITH 1 CACHE 1 CYCLE)\n);\n', ]); for (const st of sqlStatements) { await client.query(st); } }); test('no diff: identity always/by default - no params', async () => { const client = new PGlite(); const schema1 = { users: pgTable('users', { id: integer('id').generatedByDefaultAsIdentity(), id2: integer('id2').generatedAlwaysAsIdentity(), }), }; const schema2 = { users: pgTable('users', { id: integer('id').generatedByDefaultAsIdentity(), id2: integer('id2').generatedAlwaysAsIdentity(), }), }; const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); expect(statements).toStrictEqual([]); expect(sqlStatements).toStrictEqual([]); }); test('no diff: identity always/by default - few params', async () => { const client = new PGlite(); const schema1 = { users: pgTable('users', { id: integer('id').generatedByDefaultAsIdentity({ name: 'custom_name', }), id2: integer('id2').generatedAlwaysAsIdentity({ increment: 1, startWith: 3, }), }), }; const schema2 = { users: pgTable('users', { id: integer('id').generatedByDefaultAsIdentity({ name: 'custom_name', }), id2: integer('id2').generatedAlwaysAsIdentity({ increment: 1, startWith: 3, }), }), }; const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); expect(statements).toStrictEqual([]); expect(sqlStatements).toStrictEqual([]); }); test('no diff: identity always/by default - all params', async () => { const client = new PGlite(); const schema1 = { users: pgTable('users', { id: integer('id').generatedByDefaultAsIdentity({ name: 'custom_name', startWith: 10, minValue: 10, maxValue: 1000, cycle: true, cache: 10, increment: 2, }), id2: integer('id2').generatedAlwaysAsIdentity({ startWith: 10, minValue: 10, maxValue: 1000, cycle: true, cache: 10, increment: 2, }), }), }; const schema2 = { users: pgTable('users', { id: integer('id').generatedByDefaultAsIdentity({ name: 'custom_name', startWith: 10, minValue: 10, maxValue: 1000, cycle: true, cache: 10, increment: 2, }), id2: integer('id2').generatedAlwaysAsIdentity({ startWith: 10, minValue: 10, maxValue: 1000, cycle: true, cache: 10, increment: 2, }), }), }; const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); expect(statements).toStrictEqual([]); expect(sqlStatements).toStrictEqual([]); }); test('drop identity from a column - no params', async () => { const client = new PGlite(); const schema1 = { users: pgTable('users', { id: integer('id').generatedByDefaultAsIdentity(), }), }; const schema2 = { users: pgTable('users', { id: integer('id'), }), }; const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); expect(statements).toStrictEqual([ { columnName: 'id', schema: '', tableName: 'users', type: 'alter_table_alter_column_drop_identity', }, ]); expect(sqlStatements).toStrictEqual([`ALTER TABLE \"users\" ALTER COLUMN \"id\" DROP IDENTITY;`]); for (const st of sqlStatements) { await client.query(st); } }); test('drop identity from a column - few params', async () => { const client = new PGlite(); const schema1 = { users: pgTable('users', { id: integer('id').generatedByDefaultAsIdentity({ name: 'custom_name' }), id1: integer('id1').generatedByDefaultAsIdentity({ name: 'custom_name1', increment: 4, }), id2: integer('id2').generatedAlwaysAsIdentity({ name: 'custom_name2', increment: 4, }), }), }; const schema2 = { users: pgTable('users', { id: integer('id'), id1: integer('id1'), id2: integer('id2'), }), }; const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); expect(statements).toStrictEqual([ { columnName: 'id', schema: '', tableName: 'users', type: 'alter_table_alter_column_drop_identity', }, { columnName: 'id1', schema: '', tableName: 'users', type: 'alter_table_alter_column_drop_identity', }, { columnName: 'id2', schema: '', tableName: 'users', type: 'alter_table_alter_column_drop_identity', }, ]); expect(sqlStatements).toStrictEqual([ `ALTER TABLE \"users\" ALTER COLUMN \"id\" DROP IDENTITY;`, 'ALTER TABLE "users" ALTER COLUMN "id1" DROP IDENTITY;', 'ALTER TABLE "users" ALTER COLUMN "id2" DROP IDENTITY;', ]); for (const st of sqlStatements) { await client.query(st); } }); test('drop identity from a column - all params', async () => { const client = new PGlite(); const schema1 = { users: pgTable('users', { id: integer('id').generatedByDefaultAsIdentity(), id1: integer('id1').generatedByDefaultAsIdentity({ name: 'custom_name1', startWith: 10, minValue: 10, maxValue: 1000, cycle: true, cache: 10, increment: 2, }), id2: integer('id2').generatedAlwaysAsIdentity({ name: 'custom_name2', startWith: 10, minValue: 10, maxValue: 1000, cycle: true, cache: 10, increment: 2, }), }), }; const schema2 = { users: pgTable('users', { id: integer('id'), id1: integer('id1'), id2: integer('id2'), }), }; const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); expect(statements).toStrictEqual([ { columnName: 'id', schema: '', tableName: 'users', type: 'alter_table_alter_column_drop_identity', }, { columnName: 'id1', schema: '', tableName: 'users', type: 'alter_table_alter_column_drop_identity', }, { columnName: 'id2', schema: '', tableName: 'users', type: 'alter_table_alter_column_drop_identity', }, ]); expect(sqlStatements).toStrictEqual([ `ALTER TABLE \"users\" ALTER COLUMN \"id\" DROP IDENTITY;`, 'ALTER TABLE "users" ALTER COLUMN "id1" DROP IDENTITY;', 'ALTER TABLE "users" ALTER COLUMN "id2" DROP IDENTITY;', ]); for (const st of sqlStatements) { await client.query(st); } }); test('alter identity from a column - no params', async () => { const client = new PGlite(); const schema1 = { users: pgTable('users', { id: integer('id').generatedByDefaultAsIdentity(), }), }; const schema2 = { users: pgTable('users', { id: integer('id').generatedByDefaultAsIdentity({ startWith: 100 }), }), }; const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); expect(statements).toStrictEqual([ { columnName: 'id', identity: 'users_id_seq;byDefault;1;2147483647;1;100;1;false', oldIdentity: 'users_id_seq;byDefault;1;2147483647;1;1;1;false', schema: '', tableName: 'users', type: 'alter_table_alter_column_change_identity', }, ]); expect(sqlStatements).toStrictEqual(['ALTER TABLE "users" ALTER COLUMN "id" SET START WITH 100;']); for (const st of sqlStatements) { await client.query(st); } }); test('alter identity from a column - few params', async () => { const client = new PGlite(); const schema1 = { users: pgTable('users', { id: integer('id').generatedByDefaultAsIdentity({ startWith: 100 }), }), }; const schema2 = { users: pgTable('users', { id: integer('id').generatedByDefaultAsIdentity({ startWith: 100, increment: 4, maxValue: 10000, }), }), }; const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); expect(statements).toStrictEqual([ { columnName: 'id', identity: 'users_id_seq;byDefault;1;10000;4;100;1;false', oldIdentity: 'users_id_seq;byDefault;1;2147483647;1;100;1;false', schema: '', tableName: 'users', type: 'alter_table_alter_column_change_identity', }, ]); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE "users" ALTER COLUMN "id" SET MAXVALUE 10000;', 'ALTER TABLE "users" ALTER COLUMN "id" SET INCREMENT BY 4;', ]); for (const st of sqlStatements) { await client.query(st); } }); test('alter identity from a column - by default to always', async () => { const client = new PGlite(); const schema1 = { users: pgTable('users', { id: integer('id').generatedByDefaultAsIdentity({ startWith: 100 }), }), }; const schema2 = { users: pgTable('users', { id: integer('id').generatedAlwaysAsIdentity({ startWith: 100, increment: 4, maxValue: 10000, }), }), }; const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); expect(statements).toStrictEqual([ { columnName: 'id', identity: 'users_id_seq;always;1;10000;4;100;1;false', oldIdentity: 'users_id_seq;byDefault;1;2147483647;1;100;1;false', schema: '', tableName: 'users', type: 'alter_table_alter_column_change_identity', }, ]); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE "users" ALTER COLUMN "id" SET GENERATED ALWAYS;', 'ALTER TABLE "users" ALTER COLUMN "id" SET MAXVALUE 10000;', 'ALTER TABLE "users" ALTER COLUMN "id" SET INCREMENT BY 4;', ]); for (const st of sqlStatements) { await client.query(st); } }); test('alter identity from a column - always to by default', async () => { const client = new PGlite(); const schema1 = { users: pgTable('users', { id: integer('id').generatedAlwaysAsIdentity({ startWith: 100 }), }), }; const schema2 = { users: pgTable('users', { id: integer('id').generatedByDefaultAsIdentity({ startWith: 100, increment: 4, maxValue: 10000, cycle: true, cache: 100, }), }), }; const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); expect(statements).toStrictEqual([ { columnName: 'id', identity: 'users_id_seq;byDefault;1;10000;4;100;100;true', oldIdentity: 'users_id_seq;always;1;2147483647;1;100;1;false', schema: '', tableName: 'users', type: 'alter_table_alter_column_change_identity', }, ]); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE "users" ALTER COLUMN "id" SET GENERATED BY DEFAULT;', 'ALTER TABLE "users" ALTER COLUMN "id" SET MAXVALUE 10000;', 'ALTER TABLE "users" ALTER COLUMN "id" SET INCREMENT BY 4;', 'ALTER TABLE "users" ALTER COLUMN "id" SET CACHE 100;', 'ALTER TABLE "users" ALTER COLUMN "id" SET CYCLE;', ]); for (const st of sqlStatements) { await client.query(st); } }); test('add column with identity - few params', async () => { const client = new PGlite(); const schema1 = { users: pgTable('users', { email: text('email'), }), }; const schema2 = { users: pgTable('users', { email: text('email'), id: integer('id').generatedByDefaultAsIdentity({ name: 'custom_name' }), id1: integer('id1').generatedAlwaysAsIdentity({ name: 'custom_name1', increment: 4, }), }), }; const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); expect(statements).toStrictEqual([ { column: { identity: 'custom_name;byDefault;1;2147483647;1;1;1;false', name: 'id', notNull: true, primaryKey: false, type: 'integer', }, schema: '', tableName: 'users', type: 'alter_table_add_column', }, { column: { identity: 'custom_name1;always;1;2147483647;4;1;1;false', name: 'id1', notNull: true, primaryKey: false, type: 'integer', }, schema: '', tableName: 'users', type: 'alter_table_add_column', }, ]); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE "users" ADD COLUMN "id" integer NOT NULL GENERATED BY DEFAULT AS IDENTITY (sequence name "custom_name" INCREMENT BY 1 MINVALUE 1 MAXVALUE 2147483647 START WITH 1 CACHE 1);', 'ALTER TABLE "users" ADD COLUMN "id1" integer NOT NULL GENERATED ALWAYS AS IDENTITY (sequence name "custom_name1" INCREMENT BY 4 MINVALUE 1 MAXVALUE 2147483647 START WITH 1 CACHE 1);', ]); // for (const st of sqlStatements) { // await client.query(st); // } }); test('add identity to column - few params', async () => { const client = new PGlite(); const schema1 = { users: pgTable('users', { id: integer('id'), id1: integer('id1'), }), }; const schema2 = { users: pgTable('users', { id: integer('id').generatedByDefaultAsIdentity({ name: 'custom_name' }), id1: integer('id1').generatedAlwaysAsIdentity({ name: 'custom_name1', increment: 4, }), }), }; const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); expect(statements).toStrictEqual([ { columnName: 'id', identity: 'custom_name;byDefault;1;2147483647;1;1;1;false', schema: '', tableName: 'users', type: 'alter_table_alter_column_set_identity', }, { columnName: 'id1', identity: 'custom_name1;always;1;2147483647;4;1;1;false', schema: '', tableName: 'users', type: 'alter_table_alter_column_set_identity', }, ]); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE "users" ALTER COLUMN "id" ADD GENERATED BY DEFAULT AS IDENTITY (sequence name "custom_name" INCREMENT BY 1 MINVALUE 1 MAXVALUE 2147483647 START WITH 1 CACHE 1);', 'ALTER TABLE "users" ALTER COLUMN "id1" ADD GENERATED ALWAYS AS IDENTITY (sequence name "custom_name1" INCREMENT BY 4 MINVALUE 1 MAXVALUE 2147483647 START WITH 1 CACHE 1);', ]); // for (const st of sqlStatements) { // await client.query(st); // } }); test('add array column - empty array default', async () => { const client = new PGlite(); const schema1 = { test: pgTable('test', { id: serial('id').primaryKey(), }), }; const schema2 = { test: pgTable('test', { id: serial('id').primaryKey(), values: integer('values').array().default([]), }), }; const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); expect(statements).toStrictEqual([ { type: 'alter_table_add_column', tableName: 'test', schema: '', column: { name: 'values', type: 'integer[]', primaryKey: false, notNull: false, default: "'{}'" }, }, ]); expect(sqlStatements).toStrictEqual(['ALTER TABLE "test" ADD COLUMN "values" integer[] DEFAULT \'{}\';']); }); test('add array column - default', async () => { const client = new PGlite(); const schema1 = { test: pgTable('test', { id: serial('id').primaryKey(), }), }; const schema2 = { test: pgTable('test', { id: serial('id').primaryKey(), values: integer('values').array().default([1, 2, 3]), }), }; const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); expect(statements).toStrictEqual([ { type: 'alter_table_add_column', tableName: 'test', schema: '', column: { name: 'values', type: 'integer[]', primaryKey: false, notNull: false, default: "'{1,2,3}'" }, }, ]); expect(sqlStatements).toStrictEqual(['ALTER TABLE "test" ADD COLUMN "values" integer[] DEFAULT \'{1,2,3}\';']); }); test('create view', async () => { const client = new PGlite(); const table = pgTable('test', { id: serial('id').primaryKey(), }); const schema1 = { test: table, }; const schema2 = { test: table, view: pgView('view').as((qb) => qb.selectDistinct().from(table)), }; const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); expect(statements).toStrictEqual([ { definition: 'select distinct "id" from "test"', name: 'view', schema: 'public', type: 'create_view', with: undefined, materialized: false, tablespace: undefined, using: undefined, withNoData: false, }, ]); expect(sqlStatements).toStrictEqual(['CREATE VIEW "public"."view" AS (select distinct "id" from "test");']); }); test('add check constraint to table', async () => { const client = new PGlite(); const schema1 = { test: pgTable('test', { id: serial('id').primaryKey(), values: integer('values').array().default([1, 2, 3]), }), }; const schema2 = { test: pgTable('test', { id: serial('id').primaryKey(), values: integer('values').array().default([1, 2, 3]), }, (table) => ({ checkConstraint1: check('some_check1', sql`${table.values} < 100`), checkConstraint2: check('some_check2', sql`'test' < 100`), })), }; const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); expect(statements).toStrictEqual([ { type: 'create_check_constraint', tableName: 'test', schema: '', data: 'some_check1;"test"."values" < 100', }, { data: "some_check2;'test' < 100", schema: '', tableName: 'test', type: 'create_check_constraint', }, ]); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE "test" ADD CONSTRAINT "some_check1" CHECK ("test"."values" < 100);', `ALTER TABLE "test" ADD CONSTRAINT "some_check2" CHECK ('test' < 100);`, ]); }); test('create materialized view', async () => { const client = new PGlite(); const table = pgTable('test', { id: serial('id').primaryKey(), }); const schema1 = { test: table, }; const schema2 = { test: table, view: pgMaterializedView('view') .withNoData() .using('heap') .as((qb) => qb.selectDistinct().from(table)), }; const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); expect(statements).toStrictEqual([ { definition: 'select distinct "id" from "test"', name: 'view', schema: 'public', type: 'create_view', with: undefined, materialized: true, tablespace: undefined, using: 'heap', withNoData: true, }, ]); expect(sqlStatements).toStrictEqual([ 'CREATE MATERIALIZED VIEW "public"."view" USING "heap" AS (select distinct "id" from "test") WITH NO DATA;', ]); }); test('drop check constraint', async () => { const client = new PGlite(); const schema1 = { test: pgTable('test', { id: serial('id').primaryKey(), values: integer('values').default(1), }, (table) => ({ checkConstraint: check('some_check', sql`${table.values} < 100`), })), }; const schema2 = { test: pgTable('test', { id: serial('id').primaryKey(), values: integer('values').default(1), }), }; const { statements, sqlStatements } = await diffTestSchemasPush( client, schema1, schema2, [], false, ['public'], ); expect(statements).toStrictEqual([ { type: 'delete_check_constraint', tableName: 'test', schema: '', constraintName: 'some_check', }, ]); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE "test" DROP CONSTRAINT "some_check";', ]); }); test('Column with same name as enum', async () => { const client = new PGlite(); const statusEnum = pgEnum('status', ['inactive', 'active', 'banned']); const schema1 = { statusEnum, table1: pgTable('table1', { id: serial('id').primaryKey(), }), }; const schema2 = { statusEnum, table1: pgTable('table1', { id: serial('id').primaryKey(), status: statusEnum('status').default('inactive'), }), table2: pgTable('table2', { id: serial('id').primaryKey(), status: statusEnum('status').default('inactive'), }), }; const { statements, sqlStatements } = await diffTestSchemasPush( client, schema1, schema2, [], false, ['public'], ); expect(statements).toStrictEqual([ { type: 'create_table', tableName: 'table2', schema: '', compositePKs: [], compositePkName: '', isRLSEnabled: false, policies: [], uniqueConstraints: [], checkConstraints: [], columns: [ { name: 'id', type: 'serial', primaryKey: true, notNull: true }, { name: 'status', type: 'status', typeSchema: 'public', primaryKey: false, notNull: false, default: "'inactive'", }, ], }, { type: 'alter_table_add_column', tableName: 'table1', schema: '', column: { name: 'status', type: 'status', typeSchema: 'public', primaryKey: false, notNull: false, default: "'inactive'", }, }, ]); expect(sqlStatements).toStrictEqual([ 'CREATE TABLE "table2" (\n\t"id" serial PRIMARY KEY NOT NULL,\n\t"status" "status" DEFAULT \'inactive\'\n);\n', 'ALTER TABLE "table1" ADD COLUMN "status" "status" DEFAULT \'inactive\';', ]); }); test('db has checks. Push with same names', async () => { const client = new PGlite(); const schema1 = { test: pgTable('test', { id: serial('id').primaryKey(), values: integer('values').default(1), }, (table) => ({ checkConstraint: check('some_check', sql`${table.values} < 100`), })), }; const schema2 = { test: pgTable('test', { id: serial('id').primaryKey(), values: integer('values').default(1), }, (table) => ({ checkConstraint: check('some_check', sql`some new value`), })), }; const { statements, sqlStatements } = await diffTestSchemasPush( client, schema1, schema2, [], false, ['public'], ); expect(statements).toStrictEqual([]); expect(sqlStatements).toStrictEqual([]); }); test('drop view', async () => { const client = new PGlite(); const table = pgTable('test', { id: serial('id').primaryKey(), }); const schema1 = { test: table, view: pgView('view').as((qb) => qb.selectDistinct().from(table)), }; const schema2 = { test: table, }; const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); expect(statements).toStrictEqual([ { name: 'view', schema: 'public', type: 'drop_view', }, ]); expect(sqlStatements).toStrictEqual(['DROP VIEW "public"."view";']); }); test('drop materialized view', async () => { const client = new PGlite(); const table = pgTable('test', { id: serial('id').primaryKey(), }); const schema1 = { test: table, view: pgMaterializedView('view').as((qb) => qb.selectDistinct().from(table)), }; const schema2 = { test: table, }; const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); expect(statements).toStrictEqual([ { name: 'view', schema: 'public', type: 'drop_view', materialized: true, }, ]); expect(sqlStatements).toStrictEqual(['DROP MATERIALIZED VIEW "public"."view";']); }); test('push view with same name', async () => { const client = new PGlite(); const table = pgTable('test', { id: serial('id').primaryKey(), }); const schema1 = { test: table, view: pgView('view').as((qb) => qb.selectDistinct().from(table)), }; const schema2 = { test: table, view: pgView('view').as((qb) => qb.selectDistinct().from(table).where(eq(table.id, 1))), }; const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); expect(statements).toStrictEqual([]); expect(sqlStatements).toStrictEqual([]); }); test('push materialized view with same name', async () => { const client = new PGlite(); const table = pgTable('test', { id: serial('id').primaryKey(), }); const schema1 = { test: table, view: pgMaterializedView('view').as((qb) => qb.selectDistinct().from(table)), }; const schema2 = { test: table, view: pgMaterializedView('view').as((qb) => qb.selectDistinct().from(table).where(eq(table.id, 1))), }; const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); expect(statements).toStrictEqual([]); expect(sqlStatements).toStrictEqual([]); }); test('add with options for materialized view', async () => { const client = new PGlite(); const table = pgTable('test', { id: serial('id').primaryKey(), }); const schema1 = { test: table, view: pgMaterializedView('view').as((qb) => qb.selectDistinct().from(table)), }; const schema2 = { test: table, view: pgMaterializedView('view') .with({ autovacuumFreezeTableAge: 1, autovacuumEnabled: false }) .as((qb) => qb.selectDistinct().from(table)), }; const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ name: 'view', schema: 'public', type: 'alter_view_add_with_option', with: { autovacuumFreezeTableAge: 1, autovacuumEnabled: false, }, materialized: true, }); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe( `ALTER MATERIALIZED VIEW "public"."view" SET (autovacuum_enabled = false, autovacuum_freeze_table_age = 1);`, ); }); test('add with options to materialized', async () => { const client = new PGlite(); const table = pgTable('test', { id: serial('id').primaryKey(), }); const schema1 = { test: table, view: pgMaterializedView('view').as((qb) => qb.selectDistinct().from(table)), }; const schema2 = { test: table, view: pgMaterializedView('view') .with({ autovacuumVacuumCostDelay: 100, vacuumTruncate: false }) .as((qb) => qb.selectDistinct().from(table)), }; const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ name: 'view', schema: 'public', type: 'alter_view_add_with_option', with: { autovacuumVacuumCostDelay: 100, vacuumTruncate: false, }, materialized: true, }); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe( `ALTER MATERIALIZED VIEW "public"."view" SET (vacuum_truncate = false, autovacuum_vacuum_cost_delay = 100);`, ); }); test('add with options to materialized with existing flag', async () => { const client = new PGlite(); const table = pgTable('test', { id: serial('id').primaryKey(), }); const schema1 = { test: table, view: pgMaterializedView('view', {}).as(sql`SELECT id FROM "test"`), }; const schema2 = { test: table, view: pgMaterializedView('view', {}).with({ autovacuumVacuumCostDelay: 100, vacuumTruncate: false }).existing(), }; const { statements, sqlStatements } = await diffTestSchemasPush(client, schema1, schema2, [], false, ['public']); expect(statements.length).toBe(0); expect(sqlStatements.length).toBe(0); }); test('drop mat view with data', async () => { const client = new PGlite(); const table = pgTable('table', { id: serial('id').primaryKey(), }); const schema1 = { test: table, view: pgMaterializedView('view', {}).as(sql`SELECT * FROM ${table}`), }; const schema2 = { test: table, }; const seedStatements = [`INSERT INTO "public"."table" ("id") VALUES (1), (2), (3)`]; const { statements, sqlStatements, columnsToRemove, infoToPrint, schemasToRemove, shouldAskForApprove, tablesToRemove, tablesToTruncate, matViewsToRemove, } = await diffTestSchemasPush( client, schema1, schema2, [], false, ['public'], undefined, undefined, { after: seedStatements }, ); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ materialized: true, name: 'view', schema: 'public', type: 'drop_view', }); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe(`DROP MATERIALIZED VIEW "public"."view";`); expect(infoToPrint!.length).toBe(1); expect(infoToPrint![0]).toBe(`· You're about to delete "${chalk.underline('view')}" materialized view with 3 items`); expect(columnsToRemove!.length).toBe(0); expect(schemasToRemove!.length).toBe(0); expect(shouldAskForApprove).toBe(true); expect(tablesToRemove!.length).toBe(0); expect(matViewsToRemove!.length).toBe(1); }); test('drop mat view without data', async () => { const client = new PGlite(); const table = pgTable('table', { id: serial('id').primaryKey(), }); const schema1 = { test: table, view: pgMaterializedView('view', {}).as(sql`SELECT * FROM ${table}`), }; const schema2 = { test: table, }; const { statements, sqlStatements, columnsToRemove, infoToPrint, schemasToRemove, shouldAskForApprove, tablesToRemove, tablesToTruncate, matViewsToRemove, } = await diffTestSchemasPush( client, schema1, schema2, [], false, ['public'], ); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ materialized: true, name: 'view', schema: 'public', type: 'drop_view', }); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe(`DROP MATERIALIZED VIEW "public"."view";`); expect(infoToPrint!.length).toBe(0); expect(columnsToRemove!.length).toBe(0); expect(schemasToRemove!.length).toBe(0); expect(shouldAskForApprove).toBe(false); expect(tablesToRemove!.length).toBe(0); expect(matViewsToRemove!.length).toBe(0); }); test('drop view with data', async () => { const client = new PGlite(); const table = pgTable('table', { id: serial('id').primaryKey(), }); const schema1 = { test: table, view: pgView('view', {}).as(sql`SELECT * FROM ${table}`), }; const schema2 = { test: table, }; const seedStatements = [`INSERT INTO "public"."table" ("id") VALUES (1), (2), (3)`]; const { statements, sqlStatements, columnsToRemove, infoToPrint, schemasToRemove, shouldAskForApprove, tablesToRemove, tablesToTruncate, matViewsToRemove, } = await diffTestSchemasPush( client, schema1, schema2, [], false, ['public'], undefined, undefined, { after: seedStatements }, ); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ name: 'view', schema: 'public', type: 'drop_view', }); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe(`DROP VIEW "public"."view";`); expect(infoToPrint!.length).toBe(0); expect(columnsToRemove!.length).toBe(0); expect(schemasToRemove!.length).toBe(0); expect(shouldAskForApprove).toBe(false); expect(tablesToRemove!.length).toBe(0); expect(matViewsToRemove!.length).toBe(0); }); test('enums ordering', async () => { const enum1 = pgEnum('enum_users_customer_and_ship_to_settings_roles', [ 'custAll', 'custAdmin', 'custClerk', 'custInvoiceManager', 'custMgf', 'custApprover', 'custOrderWriter', 'custBuyer', ]); const schema1 = {}; const schema2 = { enum1, }; const { sqlStatements: createEnum } = await diffTestSchemas(schema1, schema2, []); const enum2 = pgEnum('enum_users_customer_and_ship_to_settings_roles', [ 'addedToTop', 'custAll', 'custAdmin', 'custClerk', 'custInvoiceManager', 'custMgf', 'custApprover', 'custOrderWriter', 'custBuyer', ]); const schema3 = { enum2, }; const { sqlStatements: addedValueSql } = await diffTestSchemas(schema2, schema3, []); const enum3 = pgEnum('enum_users_customer_and_ship_to_settings_roles', [ 'addedToTop', 'custAll', 'custAdmin', 'custClerk', 'custInvoiceManager', 'addedToMiddle', 'custMgf', 'custApprover', 'custOrderWriter', 'custBuyer', ]); const schema4 = { enum3, }; const client = new PGlite(); const { statements, sqlStatements } = await diffTestSchemasPush( client, schema3, schema4, [], false, ['public'], undefined, undefined, { before: [...createEnum, ...addedValueSql], runApply: false }, ); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ before: 'custMgf', name: 'enum_users_customer_and_ship_to_settings_roles', schema: 'public', type: 'alter_type_add_value', value: 'addedToMiddle', }); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe( `ALTER TYPE "public"."enum_users_customer_and_ship_to_settings_roles" ADD VALUE 'addedToMiddle' BEFORE 'custMgf';`, ); }); test('drop enum values', async () => { const newSchema = pgSchema('mySchema'); const enum3 = pgEnum('enum_users_customer_and_ship_to_settings_roles', [ 'addedToTop', 'custAll', 'custAdmin', 'custClerk', 'custInvoiceManager', 'addedToMiddle', 'custMgf', 'custApprover', 'custOrderWriter', 'custBuyer', ]); const schema1 = { enum3, table: pgTable('enum_table', { id: enum3(), }), newSchema, table1: newSchema.table('enum_table', { id: enum3(), }), }; const enum4 = pgEnum('enum_users_customer_and_ship_to_settings_roles', [ 'addedToTop', 'custAll', 'custAdmin', 'custClerk', 'custInvoiceManager', 'custApprover', 'custOrderWriter', 'custBuyer', ]); const schema2 = { enum4, table: pgTable('enum_table', { id: enum4(), }), newSchema, table1: newSchema.table('enum_table', { id: enum4(), }), }; const client = new PGlite(); const { statements, sqlStatements } = await diffTestSchemasPush( client, schema1, schema2, [], false, ['public', 'mySchema'], undefined, ); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ name: 'enum_users_customer_and_ship_to_settings_roles', enumSchema: 'public', type: 'alter_type_drop_value', newValues: [ 'addedToTop', 'custAll', 'custAdmin', 'custClerk', 'custInvoiceManager', 'custApprover', 'custOrderWriter', 'custBuyer', ], deletedValues: ['addedToMiddle', 'custMgf'], columnsWithEnum: [{ column: 'id', tableSchema: '', table: 'enum_table', columnType: 'enum_users_customer_and_ship_to_settings_roles', default: undefined, }, { column: 'id', tableSchema: 'mySchema', table: 'enum_table', columnType: 'enum_users_customer_and_ship_to_settings_roles', default: undefined, }], }); expect(sqlStatements.length).toBe(6); expect(sqlStatements[0]).toBe( `ALTER TABLE "enum_table" ALTER COLUMN "id" SET DATA TYPE text;`, ); expect(sqlStatements[1]).toBe( `ALTER TABLE "mySchema"."enum_table" ALTER COLUMN "id" SET DATA TYPE text;`, ); expect(sqlStatements[2]).toBe( `DROP TYPE "public"."enum_users_customer_and_ship_to_settings_roles";`, ); expect(sqlStatements[3]).toBe( `CREATE TYPE "public"."enum_users_customer_and_ship_to_settings_roles" AS ENUM('addedToTop', 'custAll', 'custAdmin', 'custClerk', 'custInvoiceManager', 'custApprover', 'custOrderWriter', 'custBuyer');`, ); expect(sqlStatements[4]).toBe( `ALTER TABLE "enum_table" ALTER COLUMN "id" SET DATA TYPE "public"."enum_users_customer_and_ship_to_settings_roles" USING "id"::"public"."enum_users_customer_and_ship_to_settings_roles";`, ); expect(sqlStatements[5]).toBe( `ALTER TABLE "mySchema"."enum_table" ALTER COLUMN "id" SET DATA TYPE "public"."enum_users_customer_and_ship_to_settings_roles" USING "id"::"public"."enum_users_customer_and_ship_to_settings_roles";`, ); }); test('column is enum type with default value. shuffle enum', async () => { const client = new PGlite(); const enum1 = pgEnum('enum', ['value1', 'value2', 'value3']); const from = { enum1, table: pgTable('table', { column: enum1('column').default('value2'), }), }; const enum2 = pgEnum('enum', ['value1', 'value3', 'value2']); const to = { enum2, table: pgTable('table', { column: enum2('column').default('value2'), }), }; const { statements, sqlStatements } = await diffTestSchemasPush( client, from, to, [], false, ['public'], undefined, ); expect(sqlStatements.length).toBe(6); expect(sqlStatements[0]).toBe(`ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE text;`); expect(sqlStatements[1]).toBe(`ALTER TABLE "table" ALTER COLUMN "column" SET DEFAULT 'value2'::text;`); expect(sqlStatements[2]).toBe(`DROP TYPE "public"."enum";`); expect(sqlStatements[3]).toBe(`CREATE TYPE "public"."enum" AS ENUM('value1', 'value3', 'value2');`); expect(sqlStatements[4]).toBe( `ALTER TABLE "table" ALTER COLUMN "column" SET DEFAULT 'value2'::"public"."enum";`, ); expect(sqlStatements[5]).toBe( `ALTER TABLE "table" ALTER COLUMN "column" SET DATA TYPE "public"."enum" USING "column"::"public"."enum";`, ); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ columnsWithEnum: [ { column: 'column', tableSchema: '', table: 'table', default: "'value2'", columnType: 'enum', }, ], deletedValues: [ 'value3', ], name: 'enum', newValues: [ 'value1', 'value3', 'value2', ], enumSchema: 'public', type: 'alter_type_drop_value', }); }); // Policies and Roles push test test('full policy: no changes', async () => { const client = new PGlite(); const schema1 = { users: pgTable('users', { id: integer('id').primaryKey(), }, () => ({ rls: pgPolicy('test', { as: 'permissive' }), })), }; const schema2 = { users: pgTable('users', { id: integer('id').primaryKey(), }, () => ({ rls: pgPolicy('test', { as: 'permissive' }), })), }; const { statements, sqlStatements } = await diffTestSchemasPush( client, schema1, schema2, [], false, ['public'], ); expect(statements.length).toBe(0); expect(sqlStatements.length).toBe(0); for (const st of sqlStatements) { await client.query(st); } }); test('add policy', async () => { const client = new PGlite(); const schema1 = { users: pgTable('users', { id: integer('id').primaryKey(), }), }; const schema2 = { users: pgTable('users', { id: integer('id').primaryKey(), }, () => ({ rls: pgPolicy('test', { as: 'permissive' }), })), }; const { statements, sqlStatements } = await diffTestSchemasPush( client, schema1, schema2, [], false, ['public'], ); expect(statements).toStrictEqual([ { type: 'enable_rls', tableName: 'users', schema: '' }, { type: 'create_policy', tableName: 'users', data: { name: 'test', as: 'PERMISSIVE', for: 'ALL', to: ['public'], on: undefined, }, schema: '', }, ]); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE "users" ENABLE ROW LEVEL SECURITY;', 'CREATE POLICY "test" ON "users" AS PERMISSIVE FOR ALL TO public;', ]); for (const st of sqlStatements) { await client.query(st); } }); test('drop policy', async () => { const client = new PGlite(); const schema1 = { users: pgTable('users', { id: integer('id').primaryKey(), }, () => ({ rls: pgPolicy('test', { as: 'permissive' }), })), }; const schema2 = { users: pgTable('users', { id: integer('id').primaryKey(), }), }; const { statements, sqlStatements } = await diffTestSchemasPush( client, schema1, schema2, [], false, ['public'], ); expect(statements).toStrictEqual([ { type: 'disable_rls', tableName: 'users', schema: '' }, { schema: '', tableName: 'users', type: 'disable_rls', }, { type: 'drop_policy', tableName: 'users', data: { name: 'test', as: 'PERMISSIVE', for: 'ALL', to: ['public'], on: undefined, }, schema: '', }, ]); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE "users" DISABLE ROW LEVEL SECURITY;', 'DROP POLICY "test" ON "users" CASCADE;', ]); for (const st of sqlStatements) { await client.query(st); } }); test('add policy without enable rls', async () => { const client = new PGlite(); const schema1 = { users: pgTable('users', { id: integer('id').primaryKey(), }, () => ({ rls: pgPolicy('test', { as: 'permissive' }), })), }; const schema2 = { users: pgTable('users', { id: integer('id').primaryKey(), }, () => ({ rls: pgPolicy('test', { as: 'permissive' }), newrls: pgPolicy('newRls'), })), }; const { statements, sqlStatements } = await diffTestSchemasPush( client, schema1, schema2, [], false, ['public'], ); expect(statements).toStrictEqual([ { type: 'create_policy', tableName: 'users', data: { name: 'newRls', as: 'PERMISSIVE', for: 'ALL', to: ['public'], on: undefined, }, schema: '', }, ]); expect(sqlStatements).toStrictEqual([ 'CREATE POLICY "newRls" ON "users" AS PERMISSIVE FOR ALL TO public;', ]); for (const st of sqlStatements) { await client.query(st); } }); test('drop policy without disable rls', async () => { const client = new PGlite(); const schema1 = { users: pgTable('users', { id: integer('id').primaryKey(), }, () => ({ rls: pgPolicy('test', { as: 'permissive' }), oldRls: pgPolicy('oldRls'), })), }; const schema2 = { users: pgTable('users', { id: integer('id').primaryKey(), }, () => ({ rls: pgPolicy('test', { as: 'permissive' }), })), }; const { statements, sqlStatements } = await diffTestSchemasPush( client, schema1, schema2, [], false, ['public'], ); expect(statements).toStrictEqual([ { type: 'drop_policy', tableName: 'users', data: { name: 'oldRls', as: 'PERMISSIVE', for: 'ALL', to: ['public'], on: undefined, }, schema: '', }, ]); expect(sqlStatements).toStrictEqual([ 'DROP POLICY "oldRls" ON "users" CASCADE;', ]); for (const st of sqlStatements) { await client.query(st); } }); //// test('alter policy without recreation: changing roles', async (t) => { const client = new PGlite(); const schema1 = { users: pgTable('users', { id: integer('id').primaryKey(), }, () => ({ rls: pgPolicy('test', { as: 'permissive' }), })), }; const schema2 = { users: pgTable('users', { id: integer('id').primaryKey(), }, () => ({ rls: pgPolicy('test', { as: 'permissive', to: 'current_role' }), })), }; const { statements, sqlStatements } = await diffTestSchemasPush( client, schema1, schema2, [], false, ['public'], ); expect(sqlStatements).toStrictEqual([ 'ALTER POLICY "test" ON "users" TO current_role;', ]); expect(statements).toStrictEqual([ { newData: 'test--PERMISSIVE--ALL--current_role--undefined', oldData: 'test--PERMISSIVE--ALL--public--undefined', schema: '', tableName: 'users', type: 'alter_policy', }, ]); for (const st of sqlStatements) { await client.query(st); } }); test('alter policy without recreation: changing using', async (t) => { const client = new PGlite(); const schema1 = { users: pgTable('users', { id: integer('id').primaryKey(), }, () => ({ rls: pgPolicy('test', { as: 'permissive' }), })), }; const schema2 = { users: pgTable('users', { id: integer('id').primaryKey(), }, () => ({ rls: pgPolicy('test', { as: 'permissive', using: sql`true` }), })), }; const { statements, sqlStatements } = await diffTestSchemasPush( client, schema1, schema2, [], false, ['public'], ); expect(sqlStatements).toStrictEqual([]); expect(statements).toStrictEqual([]); for (const st of sqlStatements) { await client.query(st); } }); test('alter policy without recreation: changing with check', async (t) => { const client = new PGlite(); const schema1 = { users: pgTable('users', { id: integer('id').primaryKey(), }, () => ({ rls: pgPolicy('test', { as: 'permissive' }), })), }; const schema2 = { users: pgTable('users', { id: integer('id').primaryKey(), }, () => ({ rls: pgPolicy('test', { as: 'permissive', withCheck: sql`true` }), })), }; const { statements, sqlStatements } = await diffTestSchemasPush( client, schema1, schema2, [], false, ['public'], ); expect(sqlStatements).toStrictEqual([]); expect(statements).toStrictEqual([]); for (const st of sqlStatements) { await client.query(st); } }); test('alter policy with recreation: changing as', async (t) => { const client = new PGlite(); const schema1 = { users: pgTable('users', { id: integer('id').primaryKey(), }, () => ({ rls: pgPolicy('test', { as: 'permissive' }), })), }; const schema2 = { users: pgTable('users', { id: integer('id').primaryKey(), }, () => ({ rls: pgPolicy('test', { as: 'restrictive' }), })), }; const { statements, sqlStatements } = await diffTestSchemasPush( client, schema1, schema2, [], false, ['public'], ); expect(sqlStatements).toStrictEqual([ 'DROP POLICY "test" ON "users" CASCADE;', 'CREATE POLICY "test" ON "users" AS RESTRICTIVE FOR ALL TO public;', ]); expect(statements).toStrictEqual([ { data: { as: 'PERMISSIVE', for: 'ALL', name: 'test', to: ['public'], on: undefined, }, schema: '', tableName: 'users', type: 'drop_policy', }, { data: { as: 'RESTRICTIVE', for: 'ALL', name: 'test', to: ['public'], on: undefined, }, schema: '', tableName: 'users', type: 'create_policy', }, ]); for (const st of sqlStatements) { await client.query(st); } }); test('alter policy with recreation: changing for', async (t) => { const client = new PGlite(); const schema1 = { users: pgTable('users', { id: integer('id').primaryKey(), }, () => ({ rls: pgPolicy('test', { as: 'permissive' }), })), }; const schema2 = { users: pgTable('users', { id: integer('id').primaryKey(), }, () => ({ rls: pgPolicy('test', { as: 'permissive', for: 'delete' }), })), }; const { statements, sqlStatements } = await diffTestSchemasPush( client, schema1, schema2, [], false, ['public'], ); expect(sqlStatements).toStrictEqual([ 'DROP POLICY "test" ON "users" CASCADE;', 'CREATE POLICY "test" ON "users" AS PERMISSIVE FOR DELETE TO public;', ]); expect(statements).toStrictEqual([ { data: { as: 'PERMISSIVE', for: 'ALL', name: 'test', to: ['public'], on: undefined, }, schema: '', tableName: 'users', type: 'drop_policy', }, { data: { as: 'PERMISSIVE', for: 'DELETE', name: 'test', to: ['public'], on: undefined, }, schema: '', tableName: 'users', type: 'create_policy', }, ]); for (const st of sqlStatements) { await client.query(st); } }); test('alter policy with recreation: changing both "as" and "for"', async (t) => { const client = new PGlite(); const schema1 = { users: pgTable('users', { id: integer('id').primaryKey(), }, () => ({ rls: pgPolicy('test', { as: 'permissive' }), })), }; const schema2 = { users: pgTable('users', { id: integer('id').primaryKey(), }, () => ({ rls: pgPolicy('test', { as: 'restrictive', for: 'insert' }), })), }; const { statements, sqlStatements } = await diffTestSchemasPush( client, schema1, schema2, [], false, ['public'], ); expect(sqlStatements).toStrictEqual([ 'DROP POLICY "test" ON "users" CASCADE;', 'CREATE POLICY "test" ON "users" AS RESTRICTIVE FOR INSERT TO public;', ]); expect(statements).toStrictEqual([ { data: { as: 'PERMISSIVE', for: 'ALL', name: 'test', to: ['public'], on: undefined, }, schema: '', tableName: 'users', type: 'drop_policy', }, { data: { as: 'RESTRICTIVE', for: 'INSERT', name: 'test', to: ['public'], on: undefined, }, schema: '', tableName: 'users', type: 'create_policy', }, ]); for (const st of sqlStatements) { await client.query(st); } }); test('alter policy with recreation: changing all fields', async (t) => { const client = new PGlite(); const schema1 = { users: pgTable('users', { id: integer('id').primaryKey(), }, () => ({ rls: pgPolicy('test', { as: 'permissive', for: 'select', using: sql`true` }), })), }; const schema2 = { users: pgTable('users', { id: integer('id').primaryKey(), }, () => ({ rls: pgPolicy('test', { as: 'restrictive', to: 'current_role', withCheck: sql`true` }), })), }; const { statements, sqlStatements } = await diffTestSchemasPush( client, schema1, schema2, [], false, ['public'], ); expect(sqlStatements).toStrictEqual([ 'DROP POLICY "test" ON "users" CASCADE;', 'CREATE POLICY "test" ON "users" AS RESTRICTIVE FOR ALL TO current_role;', ]); expect(statements).toStrictEqual([ { data: { as: 'PERMISSIVE', for: 'SELECT', name: 'test', to: ['public'], on: undefined, }, schema: '', tableName: 'users', type: 'drop_policy', }, { data: { as: 'RESTRICTIVE', for: 'ALL', name: 'test', to: ['current_role'], on: undefined, }, schema: '', tableName: 'users', type: 'create_policy', }, ]); for (const st of sqlStatements) { await client.query(st); } }); test('rename policy', async (t) => { const client = new PGlite(); const schema1 = { users: pgTable('users', { id: integer('id').primaryKey(), }, () => ({ rls: pgPolicy('test', { as: 'permissive' }), })), }; const schema2 = { users: pgTable('users', { id: integer('id').primaryKey(), }, () => ({ rls: pgPolicy('newName', { as: 'permissive' }), })), }; const { statements, sqlStatements } = await diffTestSchemasPush( client, schema1, schema2, ['public.users.test->public.users.newName'], false, ['public'], ); expect(sqlStatements).toStrictEqual([ 'ALTER POLICY "test" ON "users" RENAME TO "newName";', ]); expect(statements).toStrictEqual([ { newName: 'newName', oldName: 'test', schema: '', tableName: 'users', type: 'rename_policy', }, ]); for (const st of sqlStatements) { await client.query(st); } }); test('rename policy in renamed table', async (t) => { const client = new PGlite(); const schema1 = { users: pgTable('users', { id: integer('id').primaryKey(), }, () => ({ rls: pgPolicy('test', { as: 'permissive' }), })), }; const schema2 = { users: pgTable('users2', { id: integer('id').primaryKey(), }, () => ({ rls: pgPolicy('newName', { as: 'permissive' }), })), }; const { statements, sqlStatements } = await diffTestSchemasPush( client, schema1, schema2, [ 'public.users->public.users2', 'public.users2.test->public.users2.newName', ], false, ['public'], ); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE "users" RENAME TO "users2";', 'ALTER POLICY "test" ON "users2" RENAME TO "newName";', ]); expect(statements).toStrictEqual([ { fromSchema: '', tableNameFrom: 'users', tableNameTo: 'users2', toSchema: '', type: 'rename_table', }, { newName: 'newName', oldName: 'test', schema: '', tableName: 'users2', type: 'rename_policy', }, ]); for (const st of sqlStatements) { await client.query(st); } }); test('create table with a policy', async (t) => { const client = new PGlite(); const schema1 = {}; const schema2 = { users: pgTable('users2', { id: integer('id').primaryKey(), }, () => ({ rls: pgPolicy('test', { as: 'permissive' }), })), }; const { statements, sqlStatements } = await diffTestSchemasPush( client, schema1, schema2, [], false, ['public'], ); expect(sqlStatements).toStrictEqual([ 'CREATE TABLE "users2" (\n\t"id" integer PRIMARY KEY NOT NULL\n);\n', 'ALTER TABLE "users2" ENABLE ROW LEVEL SECURITY;', 'CREATE POLICY "test" ON "users2" AS PERMISSIVE FOR ALL TO public;', ]); expect(statements).toStrictEqual([ { columns: [ { name: 'id', notNull: true, primaryKey: true, type: 'integer', }, ], checkConstraints: [], compositePKs: [], isRLSEnabled: false, compositePkName: '', policies: [ 'test--PERMISSIVE--ALL--public--undefined', ], schema: '', tableName: 'users2', type: 'create_table', uniqueConstraints: [], }, { data: { as: 'PERMISSIVE', for: 'ALL', name: 'test', to: [ 'public', ], on: undefined, }, schema: '', tableName: 'users2', type: 'create_policy', }, ]); for (const st of sqlStatements) { await client.query(st); } }); test('drop table with a policy', async (t) => { const client = new PGlite(); const schema1 = { users: pgTable('users2', { id: integer('id').primaryKey(), }, () => ({ rls: pgPolicy('test', { as: 'permissive' }), })), }; const schema2 = {}; const { statements, sqlStatements } = await diffTestSchemasPush( client, schema1, schema2, [], false, ['public'], ); expect(sqlStatements).toStrictEqual([ 'DROP POLICY "test" ON "users2" CASCADE;', 'DROP TABLE "users2" CASCADE;', ]); expect(statements).toStrictEqual([ { policies: [ 'test--PERMISSIVE--ALL--public--undefined', ], schema: '', tableName: 'users2', type: 'drop_table', }, ]); for (const st of sqlStatements) { await client.query(st); } }); test('add policy with multiple "to" roles', async (t) => { const client = new PGlite(); client.query(`CREATE ROLE manager;`); const schema1 = { users: pgTable('users', { id: integer('id').primaryKey(), }), }; const role = pgRole('manager').existing(); const schema2 = { role, users: pgTable('users', { id: integer('id').primaryKey(), }, () => ({ rls: pgPolicy('test', { to: ['current_role', role] }), })), }; const { statements, sqlStatements } = await diffTestSchemasPush( client, schema1, schema2, [], false, ['public'], ); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE "users" ENABLE ROW LEVEL SECURITY;', 'CREATE POLICY "test" ON "users" AS PERMISSIVE FOR ALL TO current_role, "manager";', ]); expect(statements).toStrictEqual([ { schema: '', tableName: 'users', type: 'enable_rls', }, { data: { as: 'PERMISSIVE', for: 'ALL', name: 'test', on: undefined, to: ['current_role', 'manager'], }, schema: '', tableName: 'users', type: 'create_policy', }, ]); for (const st of sqlStatements) { await client.query(st); } }); test('rename policy that is linked', async (t) => { const client = new PGlite(); const users = pgTable('users', { id: integer('id').primaryKey(), }); const { sqlStatements: createUsers } = await diffTestSchemas({}, { users }, []); const schema1 = { rls: pgPolicy('test', { as: 'permissive' }).link(users), }; const schema2 = { users, rls: pgPolicy('newName', { as: 'permissive' }).link(users), }; const { statements, sqlStatements } = await diffTestSchemasPush( client, schema1, schema2, ['public.users.test->public.users.newName'], false, ['public'], undefined, undefined, { before: createUsers }, ); expect(sqlStatements).toStrictEqual([ 'ALTER POLICY "test" ON "users" RENAME TO "newName";', ]); expect(statements).toStrictEqual([ { newName: 'newName', oldName: 'test', schema: '', tableName: 'users', type: 'rename_policy', }, ]); }); test('alter policy that is linked', async (t) => { const client = new PGlite(); const users = pgTable('users', { id: integer('id').primaryKey(), }); const { sqlStatements: createUsers } = await diffTestSchemas({}, { users }, []); const schema1 = { rls: pgPolicy('test', { as: 'permissive' }).link(users), }; const schema2 = { users, rls: pgPolicy('test', { as: 'permissive', to: 'current_role' }).link(users), }; const { statements, sqlStatements } = await diffTestSchemasPush( client, schema1, schema2, [], false, ['public'], undefined, undefined, { before: createUsers }, ); expect(sqlStatements).toStrictEqual([ 'ALTER POLICY "test" ON "users" TO current_role;', ]); expect(statements).toStrictEqual([{ newData: 'test--PERMISSIVE--ALL--current_role--undefined', oldData: 'test--PERMISSIVE--ALL--public--undefined', schema: '', tableName: 'users', type: 'alter_policy', }]); }); test('alter policy that is linked: withCheck', async (t) => { const client = new PGlite(); const users = pgTable('users', { id: integer('id').primaryKey(), }); const { sqlStatements: createUsers } = await diffTestSchemas({}, { users }, []); const schema1 = { rls: pgPolicy('test', { as: 'permissive', withCheck: sql`true` }).link(users), }; const schema2 = { users, rls: pgPolicy('test', { as: 'permissive', withCheck: sql`false` }).link(users), }; const { statements, sqlStatements } = await diffTestSchemasPush( client, schema1, schema2, [], false, ['public'], undefined, undefined, { before: createUsers }, ); expect(sqlStatements).toStrictEqual([]); expect(statements).toStrictEqual([]); }); test('alter policy that is linked: using', async (t) => { const client = new PGlite(); const users = pgTable('users', { id: integer('id').primaryKey(), }); const { sqlStatements: createUsers } = await diffTestSchemas({}, { users }, []); const schema1 = { rls: pgPolicy('test', { as: 'permissive', using: sql`true` }).link(users), }; const schema2 = { users, rls: pgPolicy('test', { as: 'permissive', using: sql`false` }).link(users), }; const { statements, sqlStatements } = await diffTestSchemasPush( client, schema1, schema2, [], false, ['public'], undefined, undefined, { before: createUsers }, ); expect(sqlStatements).toStrictEqual([]); expect(statements).toStrictEqual([]); }); test('alter policy that is linked: using', async (t) => { const client = new PGlite(); const users = pgTable('users', { id: integer('id').primaryKey(), }); const { sqlStatements: createUsers } = await diffTestSchemas({}, { users }, []); const schema1 = { rls: pgPolicy('test', { for: 'insert' }).link(users), }; const schema2 = { users, rls: pgPolicy('test', { for: 'delete' }).link(users), }; const { statements, sqlStatements } = await diffTestSchemasPush( client, schema1, schema2, [], false, ['public'], undefined, undefined, { before: createUsers }, ); expect(sqlStatements).toStrictEqual([ 'DROP POLICY "test" ON "users" CASCADE;', 'CREATE POLICY "test" ON "users" AS PERMISSIVE FOR DELETE TO public;', ]); expect(statements).toStrictEqual([ { data: { as: 'PERMISSIVE', for: 'INSERT', name: 'test', on: undefined, to: [ 'public', ], }, schema: '', tableName: 'users', type: 'drop_policy', }, { data: { as: 'PERMISSIVE', for: 'DELETE', name: 'test', on: undefined, to: [ 'public', ], }, schema: '', tableName: 'users', type: 'create_policy', }, ]); }); //// test('create role', async (t) => { const client = new PGlite(); const schema1 = {}; const schema2 = { manager: pgRole('manager'), }; const { statements, sqlStatements } = await diffTestSchemasPush( client, schema1, schema2, [], false, ['public'], undefined, { roles: { include: ['manager'] } }, ); expect(sqlStatements).toStrictEqual(['CREATE ROLE "manager";']); expect(statements).toStrictEqual([ { name: 'manager', type: 'create_role', values: { createDb: false, createRole: false, inherit: true, }, }, ]); for (const st of sqlStatements) { await client.query(st); } }); test('create role with properties', async (t) => { const client = new PGlite(); const schema1 = {}; const schema2 = { manager: pgRole('manager', { createDb: true, inherit: false, createRole: true }), }; const { statements, sqlStatements } = await diffTestSchemasPush( client, schema1, schema2, [], false, ['public'], undefined, { roles: { include: ['manager'] } }, ); expect(sqlStatements).toStrictEqual(['CREATE ROLE "manager" WITH CREATEDB CREATEROLE NOINHERIT;']); expect(statements).toStrictEqual([ { name: 'manager', type: 'create_role', values: { createDb: true, createRole: true, inherit: false, }, }, ]); for (const st of sqlStatements) { await client.query(st); } }); test('create role with some properties', async (t) => { const client = new PGlite(); const schema1 = {}; const schema2 = { manager: pgRole('manager', { createDb: true, inherit: false }), }; const { statements, sqlStatements } = await diffTestSchemasPush( client, schema1, schema2, [], false, ['public'], undefined, { roles: { include: ['manager'] } }, ); expect(sqlStatements).toStrictEqual(['CREATE ROLE "manager" WITH CREATEDB NOINHERIT;']); expect(statements).toStrictEqual([ { name: 'manager', type: 'create_role', values: { createDb: true, createRole: false, inherit: false, }, }, ]); for (const st of sqlStatements) { await client.query(st); } }); test('drop role', async (t) => { const client = new PGlite(); const schema1 = { manager: pgRole('manager') }; const schema2 = {}; const { statements, sqlStatements } = await diffTestSchemasPush( client, schema1, schema2, [], false, ['public'], undefined, { roles: { include: ['manager'] } }, ); expect(sqlStatements).toStrictEqual(['DROP ROLE "manager";']); expect(statements).toStrictEqual([ { name: 'manager', type: 'drop_role', }, ]); for (const st of sqlStatements) { await client.query(st); } }); test('create and drop role', async (t) => { const client = new PGlite(); const schema1 = { manager: pgRole('manager'), }; const schema2 = { admin: pgRole('admin'), }; const { statements, sqlStatements } = await diffTestSchemasPush( client, schema1, schema2, [], false, ['public'], undefined, { roles: { include: ['manager', 'admin'] } }, ); expect(sqlStatements).toStrictEqual(['DROP ROLE "manager";', 'CREATE ROLE "admin";']); expect(statements).toStrictEqual([ { name: 'manager', type: 'drop_role', }, { name: 'admin', type: 'create_role', values: { createDb: false, createRole: false, inherit: true, }, }, ]); for (const st of sqlStatements) { await client.query(st); } }); test('rename role', async (t) => { const client = new PGlite(); const schema1 = { manager: pgRole('manager'), }; const schema2 = { admin: pgRole('admin'), }; const { statements, sqlStatements } = await diffTestSchemasPush( client, schema1, schema2, ['manager->admin'], false, ['public'], undefined, { roles: { include: ['manager', 'admin'] } }, ); expect(sqlStatements).toStrictEqual(['ALTER ROLE "manager" RENAME TO "admin";']); expect(statements).toStrictEqual([ { nameFrom: 'manager', nameTo: 'admin', type: 'rename_role' }, ]); for (const st of sqlStatements) { await client.query(st); } }); test('alter all role field', async (t) => { const client = new PGlite(); const schema1 = { manager: pgRole('manager'), }; const schema2 = { manager: pgRole('manager', { createDb: true, createRole: true, inherit: false }), }; const { statements, sqlStatements } = await diffTestSchemasPush( client, schema1, schema2, [], false, ['public'], undefined, { roles: { include: ['manager'] } }, ); expect(sqlStatements).toStrictEqual(['ALTER ROLE "manager" WITH CREATEDB CREATEROLE NOINHERIT;']); expect(statements).toStrictEqual([ { name: 'manager', type: 'alter_role', values: { createDb: true, createRole: true, inherit: false, }, }, ]); for (const st of sqlStatements) { await client.query(st); } }); test('alter createdb in role', async (t) => { const client = new PGlite(); const schema1 = { manager: pgRole('manager'), }; const schema2 = { manager: pgRole('manager', { createDb: true }), }; const { statements, sqlStatements } = await diffTestSchemasPush( client, schema1, schema2, [], false, ['public'], undefined, { roles: { include: ['manager'] } }, ); expect(sqlStatements).toStrictEqual(['ALTER ROLE "manager" WITH CREATEDB NOCREATEROLE INHERIT;']); expect(statements).toStrictEqual([ { name: 'manager', type: 'alter_role', values: { createDb: true, createRole: false, inherit: true, }, }, ]); for (const st of sqlStatements) { await client.query(st); } }); test('alter createrole in role', async (t) => { const client = new PGlite(); const schema1 = { manager: pgRole('manager'), }; const schema2 = { manager: pgRole('manager', { createRole: true }), }; const { statements, sqlStatements } = await diffTestSchemasPush( client, schema1, schema2, [], false, ['public'], undefined, { roles: { include: ['manager'] } }, ); expect(sqlStatements).toStrictEqual(['ALTER ROLE "manager" WITH NOCREATEDB CREATEROLE INHERIT;']); expect(statements).toStrictEqual([ { name: 'manager', type: 'alter_role', values: { createDb: false, createRole: true, inherit: true, }, }, ]); for (const st of sqlStatements) { await client.query(st); } }); test('alter inherit in role', async (t) => { const client = new PGlite(); const schema1 = { manager: pgRole('manager'), }; const schema2 = { manager: pgRole('manager', { inherit: false }), }; const { statements, sqlStatements } = await diffTestSchemasPush( client, schema1, schema2, [], false, ['public'], undefined, { roles: { include: ['manager'] } }, ); expect(sqlStatements).toStrictEqual(['ALTER ROLE "manager" WITH NOCREATEDB NOCREATEROLE NOINHERIT;']); expect(statements).toStrictEqual([ { name: 'manager', type: 'alter_role', values: { createDb: false, createRole: false, inherit: false, }, }, ]); for (const st of sqlStatements) { await client.query(st); } }); ================================================ FILE: drizzle-kit/tests/push/singlestore-push.test.ts ================================================ import chalk from 'chalk'; import Docker from 'dockerode'; import { getTableConfig, index, int, singlestoreTable, text } from 'drizzle-orm/singlestore-core'; import fs from 'fs'; import getPort from 'get-port'; import { Connection, createConnection } from 'mysql2/promise'; import { diffTestSchemasPushSingleStore } from 'tests/schemaDiffer'; import { v4 as uuid } from 'uuid'; import { afterAll, beforeAll, expect, test } from 'vitest'; let client: Connection; let singlestoreContainer: Docker.Container; async function createDockerDB(): Promise { const docker = new Docker(); const port = await getPort({ port: 3306 }); const image = 'ghcr.io/singlestore-labs/singlestoredb-dev:latest'; const pullStream = await docker.pull(image); await new Promise((resolve, reject) => docker.modem.followProgress(pullStream, (err) => err ? reject(err) : resolve(err)) ); singlestoreContainer = await docker.createContainer({ Image: image, Env: ['ROOT_PASSWORD=singlestore'], name: `drizzle-integration-tests-${uuid()}`, HostConfig: { AutoRemove: true, PortBindings: { '3306/tcp': [{ HostPort: `${port}` }], }, }, }); await singlestoreContainer.start(); await new Promise((resolve) => setTimeout(resolve, 4000)); return `singlestore://root:singlestore@localhost:${port}/`; } beforeAll(async () => { const connectionString = process.env.MYSQL_CONNECTION_STRING ?? (await createDockerDB()); const sleep = 1000; let timeLeft = 20000; let connected = false; let lastError: unknown | undefined; do { try { client = await createConnection(connectionString); await client.connect(); connected = true; break; } catch (e) { lastError = e; await new Promise((resolve) => setTimeout(resolve, sleep)); timeLeft -= sleep; } } while (timeLeft > 0); if (!connected) { console.error('Cannot connect to MySQL'); await client?.end().catch(console.error); await singlestoreContainer?.stop().catch(console.error); throw lastError; } await client.query('DROP DATABASE IF EXISTS drizzle;'); await client.query('CREATE DATABASE drizzle;'); await client.query('USE drizzle;'); }); afterAll(async () => { await client?.end().catch(console.error); await singlestoreContainer?.stop().catch(console.error); }); if (!fs.existsSync('tests/push/singlestore')) { fs.mkdirSync('tests/push/singlestore'); } test('db has checks. Push with same names', async () => { const schema1 = { test: singlestoreTable('test', { id: int('id').primaryKey(), values: int('values').default(1), }), }; const schema2 = { test: singlestoreTable('test', { id: int('id').primaryKey(), values: int('values').default(1), }), }; const { statements, sqlStatements } = await diffTestSchemasPushSingleStore( client, schema1, schema2, [], 'drizzle', ); expect(statements).toStrictEqual([]); expect(sqlStatements).toStrictEqual([]); await client.query(`DROP TABLE \`test\`;`); }); // TODO: Unskip this test when views are implemented /* test.skip.skip('create view', async () => { const table = singlestoreTable('test', { id: int('id').primaryKey(), }); const schema1 = { test: table, }; const schema2 = { test: table, view: singlestoreView('view').as((qb) => qb.select().from(table)), }; const { statements, sqlStatements } = await diffTestSchemasPushSingleStore( client, schema1, schema2, [], 'drizzle', false, ); expect(statements).toStrictEqual([ { definition: 'select `id` from `test`', name: 'view', type: 'singlestore_create_view', replace: false, sqlSecurity: 'definer', withCheckOption: undefined, algorithm: 'undefined', }, ]); expect(sqlStatements).toStrictEqual([ `CREATE ALGORITHM = undefined SQL SECURITY definer VIEW \`view\` AS (select \`id\` from \`test\`);`, ]); await client.query(`DROP TABLE \`test\`;`); }); */ // TODO: Unskip this test when views are implemented /* test.skip('drop view', async () => { const table = singlestoreTable('test', { id: int('id').primaryKey(), }); const schema1 = { test: table, view: singlestoreView('view').as((qb) => qb.select().from(table)), }; const schema2 = { test: table, }; const { statements, sqlStatements } = await diffTestSchemasPushSingleStore( client, schema1, schema2, [], 'drizzle', false, ); expect(statements).toStrictEqual([ { name: 'view', type: 'drop_view', }, ]); expect(sqlStatements).toStrictEqual(['DROP VIEW `view`;']); await client.query(`DROP TABLE \`test\`;`); await client.query(`DROP VIEW \`view\`;`); }); */ // TODO: Unskip this test when views are implemented /* test.skip('alter view ".as"', async () => { const table = singlestoreTable('test', { id: int('id').primaryKey(), }); const schema1 = { test: table, view: singlestoreView('view').as((qb) => qb .select() .from(table) .where(sql`${table.id} = 1`) ), }; const schema2 = { test: table, view: singlestoreView('view').as((qb) => qb.select().from(table)), }; const { statements, sqlStatements } = await diffTestSchemasPushSingleStore( client, schema1, schema2, [], 'drizzle', false, ); expect(statements.length).toBe(0); expect(sqlStatements.length).toBe(0); await client.query(`DROP TABLE \`test\`;`); await client.query(`DROP VIEW \`view\`;`); }); */ // TODO: Unskip this test when views are implemented /* test.skip('alter meta options with distinct in definition', async () => { const table = singlestoreTable('test', { id: int('id').primaryKey(), }); const schema1 = { test: table, view: singlestoreView('view') .withCheckOption('cascaded') .sqlSecurity('definer') .algorithm('merge') .as((qb) => qb .selectDistinct() .from(table) .where(sql`${table.id} = 1`) ), }; const schema2 = { test: table, view: singlestoreView('view') .withCheckOption('cascaded') .sqlSecurity('definer') .algorithm('undefined') .as((qb) => qb.selectDistinct().from(table)), }; await expect( diffTestSchemasPushSingleStore( client, schema1, schema2, [], 'drizzle', false, ), ).rejects.toThrowError(); await client.query(`DROP TABLE \`test\`;`); }); */ test('added column not null and without default to table with data', async (t) => { const schema1 = { companies: singlestoreTable('companies', { id: int('id'), name: text('name'), }), }; const schema2 = { companies: singlestoreTable('companies', { id: int('id'), name: text('name'), age: int('age').notNull(), }), }; const table = getTableConfig(schema1.companies); const seedStatements = [ `INSERT INTO \`${table.name}\` (\`${schema1.companies.name.name}\`) VALUES ('drizzle');`, `INSERT INTO \`${table.name}\` (\`${schema1.companies.name.name}\`) VALUES ('turso');`, ]; const { statements, sqlStatements, columnsToRemove, infoToPrint, shouldAskForApprove, tablesToRemove, tablesToTruncate, } = await diffTestSchemasPushSingleStore( client, schema1, schema2, [], 'drizzle', false, undefined, { after: seedStatements, }, ); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'alter_table_add_column', tableName: 'companies', column: { name: 'age', type: 'int', primaryKey: false, notNull: true, autoincrement: false, }, schema: '', }); expect(sqlStatements.length).toBe(2); expect(sqlStatements[0]).toBe(`truncate table companies;`); expect(sqlStatements[1]).toBe( `ALTER TABLE \`companies\` ADD \`age\` int NOT NULL;`, ); expect(columnsToRemove!.length).toBe(0); expect(infoToPrint!.length).toBe(1); expect(infoToPrint![0]).toBe( `· You're about to add not-null ${ chalk.underline( 'age', ) } column without default value, which contains 2 items`, ); expect(shouldAskForApprove).toBe(true); expect(tablesToRemove!.length).toBe(0); expect(tablesToTruncate!.length).toBe(1); expect(tablesToTruncate![0]).toBe('companies'); await client.query(`DROP TABLE \`companies\`;`); }); test('added column not null and without default to table without data', async (t) => { const schema1 = { companies: singlestoreTable('companies', { id: int('id').primaryKey(), name: text('name').notNull(), }), }; const schema2 = { companies: singlestoreTable('companies', { id: int('id').primaryKey(), name: text('name').notNull(), age: int('age').notNull(), }), }; const { statements, sqlStatements, columnsToRemove, infoToPrint, shouldAskForApprove, tablesToRemove, tablesToTruncate, } = await diffTestSchemasPushSingleStore( client, schema1, schema2, [], 'drizzle', false, undefined, ); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'alter_table_add_column', tableName: 'companies', column: { name: 'age', type: 'int', primaryKey: false, notNull: true, autoincrement: false, }, schema: '', }); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe( `ALTER TABLE \`companies\` ADD \`age\` int NOT NULL;`, ); expect(infoToPrint!.length).toBe(0); expect(columnsToRemove!.length).toBe(0); expect(shouldAskForApprove).toBe(false); expect(tablesToRemove!.length).toBe(0); expect(tablesToTruncate!.length).toBe(0); await client.query(`DROP TABLE \`companies\`;`); }); test('drop not null, add not null', async (t) => { const schema1 = { users: singlestoreTable('users', { id: int('id').primaryKey(), name: text('name').notNull(), }), posts: singlestoreTable( 'posts', { id: int('id').primaryKey(), name: text('name'), userId: int('user_id'), }, ), }; const schema2 = { users: singlestoreTable('users', { id: int('id').primaryKey(), name: text('name'), }), posts: singlestoreTable( 'posts', { id: int('id').primaryKey(), name: text('name').notNull(), userId: int('user_id'), }, ), }; const { statements, sqlStatements, columnsToRemove, infoToPrint, shouldAskForApprove, tablesToRemove, tablesToTruncate, } = await diffTestSchemasPushSingleStore( client, schema1, schema2, [], 'drizzle', false, undefined, ); expect(statements!.length).toBe(2); expect(statements![0]).toStrictEqual({ columns: [ { autoincrement: false, generated: undefined, name: 'id', notNull: true, onUpdate: undefined, primaryKey: false, type: 'int', }, { autoincrement: false, generated: undefined, name: 'name', notNull: true, onUpdate: undefined, primaryKey: false, type: 'text', }, { autoincrement: false, generated: undefined, name: 'user_id', notNull: false, onUpdate: undefined, primaryKey: false, type: 'int', }, ], compositePKs: [ 'posts_id;id', ], tableName: 'posts', type: 'singlestore_recreate_table', uniqueConstraints: [], }); expect(statements![1]).toStrictEqual({ columns: [ { autoincrement: false, generated: undefined, name: 'id', notNull: true, onUpdate: undefined, primaryKey: false, type: 'int', }, { autoincrement: false, generated: undefined, name: 'name', notNull: false, onUpdate: undefined, primaryKey: false, type: 'text', }, ], compositePKs: [ 'users_id;id', ], tableName: 'users', type: 'singlestore_recreate_table', uniqueConstraints: [], }); expect(sqlStatements!.length).toBe(8); expect(sqlStatements![0]).toBe(`CREATE TABLE \`__new_posts\` ( \t\`id\` int NOT NULL, \t\`name\` text NOT NULL, \t\`user_id\` int, \tCONSTRAINT \`posts_id\` PRIMARY KEY(\`id\`) );\n`); expect(sqlStatements![1]).toBe( `INSERT INTO \`__new_posts\`(\`id\`, \`name\`, \`user_id\`) SELECT \`id\`, \`name\`, \`user_id\` FROM \`posts\`;`, ); expect(sqlStatements![2]).toBe(`DROP TABLE \`posts\`;`); expect(sqlStatements![3]).toBe(`ALTER TABLE \`__new_posts\` RENAME TO \`posts\`;`); expect(sqlStatements![4]).toBe(`CREATE TABLE \`__new_users\` ( \t\`id\` int NOT NULL, \t\`name\` text, \tCONSTRAINT \`users_id\` PRIMARY KEY(\`id\`) );\n`); expect(sqlStatements![5]).toBe( `INSERT INTO \`__new_users\`(\`id\`, \`name\`) SELECT \`id\`, \`name\` FROM \`users\`;`, ); expect(sqlStatements![6]).toBe( `DROP TABLE \`users\`;`, ); expect(sqlStatements![7]).toBe(`ALTER TABLE \`__new_users\` RENAME TO \`users\`;`); expect(columnsToRemove!.length).toBe(0); expect(infoToPrint!.length).toBe(0); expect(shouldAskForApprove).toBe(false); expect(tablesToRemove!.length).toBe(0); expect(tablesToTruncate!.length).toBe(0); await client.query(`DROP TABLE \`users\`;`); await client.query(`DROP TABLE \`posts\`;`); }); test('drop table with data', async (t) => { const schema1 = { users: singlestoreTable('users', { id: int('id').primaryKey(), name: text('name').notNull(), }), posts: singlestoreTable( 'posts', { id: int('id').primaryKey(), name: text('name'), userId: int('user_id'), }, ), }; const schema2 = { posts: singlestoreTable( 'posts', { id: int('id').primaryKey(), name: text('name'), userId: int('user_id'), }, ), }; const seedStatements = [ `INSERT INTO \`users\` (\`id\`, \`name\`) VALUES (1, 'drizzle')`, ]; const { statements, sqlStatements, columnsToRemove, infoToPrint, shouldAskForApprove, tablesToRemove, tablesToTruncate, } = await diffTestSchemasPushSingleStore( client, schema1, schema2, [], 'drizzle', false, undefined, { after: seedStatements }, ); expect(statements!.length).toBe(1); expect(statements![0]).toStrictEqual({ policies: [], schema: undefined, tableName: 'users', type: 'drop_table', }); expect(sqlStatements!.length).toBe(1); expect(sqlStatements![0]).toBe(`DROP TABLE \`users\`;`); expect(columnsToRemove!.length).toBe(0); expect(infoToPrint!.length).toBe(1); expect(infoToPrint![0]).toBe(`· You're about to delete ${chalk.underline('users')} table with 1 items`); expect(shouldAskForApprove).toBe(true); expect(tablesToRemove!.length).toBe(1); expect(tablesToRemove![0]).toBe('users'); expect(tablesToTruncate!.length).toBe(0); await client.query(`DROP TABLE \`users\`;`); await client.query(`DROP TABLE \`posts\`;`); }); test('change data type. db has indexes. table does not have values', async (t) => { const schema1 = { users: singlestoreTable('users', { id: int('id').primaryKey(), name: int('name').notNull(), }, (table) => [index('index').on(table.name)]), }; const schema2 = { users: singlestoreTable('users', { id: int('id').primaryKey(), name: text('name').notNull(), }, (table) => [index('index').on(table.name)]), }; const seedStatements = [`INSERT INTO users VALUES (1, 12)`]; const { statements, sqlStatements, columnsToRemove, infoToPrint, shouldAskForApprove, tablesToRemove, tablesToTruncate, } = await diffTestSchemasPushSingleStore( client, schema1, schema2, [], 'drizzle', false, undefined, ); expect(statements!.length).toBe(2); expect(statements![0]).toStrictEqual({ columns: [ { autoincrement: false, generated: undefined, name: 'id', notNull: true, onUpdate: undefined, primaryKey: false, type: 'int', }, { autoincrement: false, generated: undefined, name: 'name', notNull: true, onUpdate: undefined, primaryKey: false, type: 'text', }, ], compositePKs: [ 'users_id;id', ], tableName: 'users', type: 'singlestore_recreate_table', uniqueConstraints: [], }); expect(statements![1]).toStrictEqual({ data: 'index;name;false;;;', internal: undefined, schema: '', tableName: 'users', type: 'create_index', }); expect(sqlStatements!.length).toBe(5); expect(sqlStatements![0]).toBe(`CREATE TABLE \`__new_users\` ( \t\`id\` int NOT NULL, \t\`name\` text NOT NULL, \tCONSTRAINT \`users_id\` PRIMARY KEY(\`id\`) );\n`); expect(sqlStatements![1]).toBe( `INSERT INTO \`__new_users\`(\`id\`, \`name\`) SELECT \`id\`, \`name\` FROM \`users\`;`, ); expect(sqlStatements![2]).toBe(`DROP TABLE \`users\`;`); expect(sqlStatements![3]).toBe(`ALTER TABLE \`__new_users\` RENAME TO \`users\`;`); expect(sqlStatements![4]).toBe(`CREATE INDEX \`index\` ON \`users\` (\`name\`);`); expect(columnsToRemove!.length).toBe(0); expect(infoToPrint!.length).toBe(0); expect(shouldAskForApprove).toBe(false); expect(tablesToRemove!.length).toBe(0); expect(tablesToTruncate!.length).toBe(0); await client.query(`DROP TABLE \`users\`;`); }); test('change data type. db has indexes. table has values', async (t) => { const schema1 = { users: singlestoreTable('users', { id: int('id').primaryKey(), name: int('name'), }, (table) => [index('index').on(table.name)]), }; const schema2 = { users: singlestoreTable('users', { id: int('id').primaryKey(), name: text('name'), }, (table) => [index('index').on(table.name)]), }; const seedStatements = [`INSERT INTO users VALUES (1, 12);`, `INSERT INTO users (id) VALUES (2);`]; const { statements, sqlStatements, columnsToRemove, infoToPrint, shouldAskForApprove, tablesToRemove, tablesToTruncate, } = await diffTestSchemasPushSingleStore( client, schema1, schema2, [], 'drizzle', false, undefined, { after: seedStatements }, ); expect(statements!.length).toBe(2); expect(statements![0]).toStrictEqual({ columns: [ { autoincrement: false, generated: undefined, name: 'id', notNull: true, onUpdate: undefined, primaryKey: false, type: 'int', }, { autoincrement: false, generated: undefined, name: 'name', notNull: false, onUpdate: undefined, primaryKey: false, type: 'text', }, ], compositePKs: [ 'users_id;id', ], tableName: 'users', type: 'singlestore_recreate_table', uniqueConstraints: [], }); expect(statements![1]).toStrictEqual({ data: 'index;name;false;;;', internal: undefined, schema: '', tableName: 'users', type: 'create_index', }); expect(sqlStatements!.length).toBe(6); expect(sqlStatements![0]).toBe(`TRUNCATE TABLE \`users\`;`); expect(sqlStatements![1]).toBe(`CREATE TABLE \`__new_users\` ( \t\`id\` int NOT NULL, \t\`name\` text, \tCONSTRAINT \`users_id\` PRIMARY KEY(\`id\`) );\n`); expect(sqlStatements![2]).toBe( `INSERT INTO \`__new_users\`(\`id\`, \`name\`) SELECT \`id\`, \`name\` FROM \`users\`;`, ); expect(sqlStatements![3]).toBe(`DROP TABLE \`users\`;`); expect(sqlStatements![4]).toBe(`ALTER TABLE \`__new_users\` RENAME TO \`users\`;`); expect(sqlStatements![5]).toBe(`CREATE INDEX \`index\` ON \`users\` (\`name\`);`); expect(columnsToRemove!.length).toBe(0); expect(infoToPrint!.length).toBe(1); expect(infoToPrint![0]).toBe( `· You're about recreate ${chalk.underline('users')} table with data type changing for ${ chalk.underline('name') } column, which contains 1 items`, ); expect(shouldAskForApprove).toBe(true); expect(tablesToRemove!.length).toBe(0); expect(tablesToTruncate!.length).toBe(1); expect(tablesToTruncate![0]).toBe(`users`); await client.query(`DROP TABLE \`users\`;`); }); test('add column. add default to column without not null', async (t) => { const schema1 = { users: singlestoreTable('users', { id: int('id').primaryKey(), name: text('name'), }), }; const schema2 = { users: singlestoreTable('users', { id: int('id').primaryKey(), name: text('name').default('drizzle'), age: int('age'), }), }; const { statements, sqlStatements, columnsToRemove, infoToPrint, shouldAskForApprove, tablesToRemove, tablesToTruncate, } = await diffTestSchemasPushSingleStore( client, schema1, schema2, [], 'drizzle', false, undefined, ); expect(statements!.length).toBe(2); expect(statements![0]).toStrictEqual({ columnAutoIncrement: false, columnName: 'name', columnNotNull: false, columnOnUpdate: undefined, columnPk: false, newDataType: 'text', newDefaultValue: "'drizzle'", schema: '', tableName: 'users', type: 'alter_table_alter_column_set_default', }); expect(statements![1]).toStrictEqual({ type: 'alter_table_add_column', tableName: 'users', schema: '', column: { notNull: false, primaryKey: false, autoincrement: false, name: 'age', type: 'int', }, }); expect(sqlStatements!.length).toBe(2); expect(sqlStatements![0]).toBe(`ALTER TABLE \`users\` MODIFY COLUMN \`name\` text DEFAULT 'drizzle';`); expect(sqlStatements![1]).toBe(`ALTER TABLE \`users\` ADD \`age\` int;`); expect(columnsToRemove!.length).toBe(0); expect(infoToPrint!.length).toBe(0); expect(shouldAskForApprove).toBe(false); expect(tablesToRemove!.length).toBe(0); expect(tablesToTruncate!.length).toBe(0); await client.query(`DROP TABLE \`users\`;`); }); ================================================ FILE: drizzle-kit/tests/push/singlestore.test.ts ================================================ import Docker from 'dockerode'; import { SQL, sql } from 'drizzle-orm'; import { bigint, binary, char, date, decimal, double, float, int, mediumint, primaryKey, singlestoreEnum, singlestoreTable, smallint, text, time, timestamp, tinyint, varbinary, varchar, vector, year, } from 'drizzle-orm/singlestore-core'; import getPort from 'get-port'; import { Connection, createConnection } from 'mysql2/promise'; import { diffTestSchemasPushSingleStore, diffTestSchemasSingleStore } from 'tests/schemaDiffer'; import { v4 as uuid } from 'uuid'; import { expect } from 'vitest'; import { DialectSuite, run } from './common'; async function createDockerDB(context: any): Promise { const docker = new Docker(); const port = await getPort({ port: 3306 }); const image = 'ghcr.io/singlestore-labs/singlestoredb-dev:latest'; const pullStream = await docker.pull(image); await new Promise((resolve, reject) => docker.modem.followProgress(pullStream, (err) => err ? reject(err) : resolve(err)) ); context.singlestoreContainer = await docker.createContainer({ Image: image, Env: ['ROOT_PASSWORD=singlestore'], name: `drizzle-integration-tests-${uuid()}`, HostConfig: { AutoRemove: true, PortBindings: { '3306/tcp': [{ HostPort: `${port}` }], }, }, }); await context.singlestoreContainer.start(); await new Promise((resolve) => setTimeout(resolve, 4000)); return `singlestore://root:singlestore@localhost:${port}/`; } const singlestoreSuite: DialectSuite = { allTypes: async function(context: any): Promise { const schema1 = { allBigInts: singlestoreTable('all_big_ints', { simple: bigint('simple', { mode: 'number' }), columnNotNull: bigint('column_not_null', { mode: 'number' }).notNull(), columnDefault: bigint('column_default', { mode: 'number' }).default(12), columnDefaultSql: bigint('column_default_sql', { mode: 'number', }).default(12), }), allBools: singlestoreTable('all_bools', { simple: tinyint('simple'), columnNotNull: tinyint('column_not_null').notNull(), columnDefault: tinyint('column_default').default(1), }), allChars: singlestoreTable('all_chars', { simple: char('simple', { length: 1 }), columnNotNull: char('column_not_null', { length: 45 }).notNull(), // columnDefault: char("column_default", { length: 1 }).default("h"), columnDefaultSql: char('column_default_sql', { length: 1 }).default( 'h', ), }), // allDateTimes: singlestoreTable("all_date_times", { // simple: datetime("simple", { mode: "string", fsp: 1 }), // columnNotNull: datetime("column_not_null", { // mode: "string", // }).notNull(), // columnDefault: datetime("column_default", { mode: "string" }).default( // "2023-03-01 14:05:29" // ), // }), allDates: singlestoreTable('all_dates', { simple: date('simple', { mode: 'string' }), column_not_null: date('column_not_null', { mode: 'string' }).notNull(), column_default: date('column_default', { mode: 'string' }).default( '2023-03-01', ), }), allDecimals: singlestoreTable('all_decimals', { simple: decimal('simple', { precision: 1, scale: 0 }), columnNotNull: decimal('column_not_null', { precision: 45, scale: 3, }).notNull(), columnDefault: decimal('column_default', { precision: 10, scale: 0, }).default('100'), columnDefaultSql: decimal('column_default_sql', { precision: 10, scale: 0, }).default('101'), }), allDoubles: singlestoreTable('all_doubles', { simple: double('simple'), columnNotNull: double('column_not_null').notNull(), columnDefault: double('column_default').default(100), columnDefaultSql: double('column_default_sql').default(101), }), allEnums: singlestoreTable('all_enums', { simple: singlestoreEnum('simple', ['hi', 'hello']), }), allEnums1: singlestoreTable('all_enums1', { simple: singlestoreEnum('simple', ['hi', 'hello']).default('hi'), }), allFloats: singlestoreTable('all_floats', { columnNotNull: float('column_not_null').notNull(), columnDefault: float('column_default').default(100), columnDefaultSql: float('column_default_sql').default(101), }), allInts: singlestoreTable('all_ints', { simple: int('simple'), columnNotNull: int('column_not_null').notNull(), columnDefault: int('column_default').default(100), columnDefaultSql: int('column_default_sql').default(101), }), allIntsRef: singlestoreTable('all_ints_ref', { simple: int('simple'), columnNotNull: int('column_not_null').notNull(), columnDefault: int('column_default').default(100), columnDefaultSql: int('column_default_sql').default(101), }), // allJsons: singlestoreTable("all_jsons", { // columnDefaultObject: json("column_default_object") // .default({ hello: "world world" }) // .notNull(), // columnDefaultArray: json("column_default_array").default({ // hello: { "world world": ["foo", "bar"] }, // foo: "bar", // fe: 23, // }), // column: json("column"), // }), allMInts: singlestoreTable('all_m_ints', { simple: mediumint('simple'), columnNotNull: mediumint('column_not_null').notNull(), columnDefault: mediumint('column_default').default(100), columnDefaultSql: mediumint('column_default_sql').default(101), }), allReals: singlestoreTable('all_reals', { simple: double('simple', { precision: 5, scale: 2 }), columnNotNull: double('column_not_null').notNull(), columnDefault: double('column_default').default(100), columnDefaultSql: double('column_default_sql').default(101), }), allSInts: singlestoreTable('all_s_ints', { simple: smallint('simple'), columnNotNull: smallint('column_not_null').notNull(), columnDefault: smallint('column_default').default(100), columnDefaultSql: smallint('column_default_sql').default(101), }), // allSmallSerials: singlestoreTable("all_small_serials", { // columnAll: serial("column_all").notNull(), // }), allTInts: singlestoreTable('all_t_ints', { simple: tinyint('simple'), columnNotNull: tinyint('column_not_null').notNull(), columnDefault: tinyint('column_default').default(10), columnDefaultSql: tinyint('column_default_sql').default(11), }), allTexts: singlestoreTable('all_texts', { simple: text('simple'), columnNotNull: text('column_not_null').notNull(), columnDefault: text('column_default').default('hello'), columnDefaultSql: text('column_default_sql').default('hello'), }), allTimes: singlestoreTable('all_times', { // simple: time("simple", { fsp: 1 }), columnNotNull: time('column_not_null').notNull(), columnDefault: time('column_default').default('22:12:12'), }), allTimestamps: singlestoreTable('all_timestamps', { // columnDateNow: timestamp("column_date_now", { // fsp: 1, // mode: "string", // }).default(sql`(now())`), columnAll: timestamp('column_all', { mode: 'string' }) .default('2023-03-01 14:05:29') .notNull(), column: timestamp('column', { mode: 'string' }).default( '2023-02-28 16:18:31', ), }), allVarChars: singlestoreTable('all_var_chars', { simple: varchar('simple', { length: 100 }), columnNotNull: varchar('column_not_null', { length: 45 }).notNull(), columnDefault: varchar('column_default', { length: 100 }).default( 'hello', ), columnDefaultSql: varchar('column_default_sql', { length: 100, }).default('hello'), }), allVarbinaries: singlestoreTable('all_varbinaries', { simple: varbinary('simple', { length: 100 }), columnNotNull: varbinary('column_not_null', { length: 100 }).notNull(), columnDefault: varbinary('column_default', { length: 12 }), }), allYears: singlestoreTable('all_years', { simple: year('simple'), columnNotNull: year('column_not_null').notNull(), columnDefault: year('column_default').default(2022), }), binafry: singlestoreTable('binary', { simple: binary('simple', { length: 1 }), columnNotNull: binary('column_not_null', { length: 1 }).notNull(), columnDefault: binary('column_default', { length: 12 }), }), allVectors: singlestoreTable('all_vectors', { vectorSimple: vector('vector_simple', { dimensions: 1 }), vectorElementType: vector('vector_element_type', { dimensions: 1, elementType: 'I8' }), vectorNotNull: vector('vector_not_null', { dimensions: 1 }).notNull(), vectorDefault: vector('vector_default', { dimensions: 1 }).default([1]), }), }; const { statements } = await diffTestSchemasPushSingleStore( context.client as Connection, schema1, schema1, [], 'drizzle', false, ); console.log(statements); expect(statements.length).toBe(0); expect(statements).toEqual([]); const { sqlStatements: dropStatements } = await diffTestSchemasSingleStore( schema1, {}, [], false, ); for (const st of dropStatements) { await context.client.query(st); } }, addBasicIndexes: function(context?: any): Promise { return {} as any; }, changeIndexFields: function(context?: any): Promise { return {} as any; }, dropIndex: function(context?: any): Promise { return {} as any; }, indexesToBeNotTriggered: function(context?: any): Promise { return {} as any; }, indexesTestCase1: function(context?: any): Promise { return {} as any; }, async case1() { // TODO: implement if needed expect(true).toBe(true); }, addNotNull: function(context?: any): Promise { return {} as any; }, addNotNullWithDataNoRollback: function(context?: any): Promise { return {} as any; }, addBasicSequences: function(context?: any): Promise { return {} as any; }, addGeneratedColumn: async function(context: any): Promise { return {} as any; }, addGeneratedToColumn: async function(context: any): Promise { return {} as any; }, dropGeneratedConstraint: async function(context: any): Promise { return {} as any; }, alterGeneratedConstraint: async function(context: any): Promise { return {} as any; }, createTableWithGeneratedConstraint: function(context?: any): Promise { return {} as any; }, createCompositePrimaryKey: async function(context: any): Promise { const schema1 = {}; const schema2 = { table: singlestoreTable('table', { col1: int('col1').notNull(), col2: int('col2').notNull(), }, (t) => ({ pk: primaryKey({ columns: [t.col1, t.col2], }), })), }; const { statements, sqlStatements } = await diffTestSchemasPushSingleStore( context.client as Connection, schema1, schema2, [], 'drizzle', false, ); expect(statements).toStrictEqual([ { type: 'create_table', tableName: 'table', schema: undefined, internals: { indexes: {}, tables: {}, }, compositePKs: ['table_col1_col2_pk;col1,col2'], compositePkName: 'table_col1_col2_pk', uniqueConstraints: [], columns: [ { name: 'col1', type: 'int', primaryKey: false, notNull: true, autoincrement: false }, { name: 'col2', type: 'int', primaryKey: false, notNull: true, autoincrement: false }, ], }, ]); expect(sqlStatements).toStrictEqual([ 'CREATE TABLE `table` (\n\t`col1` int NOT NULL,\n\t`col2` int NOT NULL,\n\tCONSTRAINT `table_col1_col2_pk` PRIMARY KEY(`col1`,`col2`)\n);\n', ]); }, renameTableWithCompositePrimaryKey: async function(context?: any): Promise { const productsCategoriesTable = (tableName: string) => { return singlestoreTable(tableName, { productId: varchar('product_id', { length: 10 }).notNull(), categoryId: varchar('category_id', { length: 10 }).notNull(), }, (t) => ({ pk: primaryKey({ columns: [t.productId, t.categoryId], }), })); }; const schema1 = { table: productsCategoriesTable('products_categories'), }; const schema2 = { test: productsCategoriesTable('products_to_categories'), }; const { sqlStatements } = await diffTestSchemasPushSingleStore( context.client as Connection, schema1, schema2, ['public.products_categories->public.products_to_categories'], 'drizzle', false, ); // It's not possible to create/alter/drop primary keys in SingleStore expect(sqlStatements).toStrictEqual([ 'ALTER TABLE `products_categories` RENAME TO `products_to_categories`;', ]); await context.client.query(`DROP TABLE \`products_categories\``); }, }; run( singlestoreSuite, async (context: any) => { const connectionString = process.env.SINGLESTORE_CONNECTION_STRING ?? (await createDockerDB(context)); const sleep = 1000; let timeLeft = 20000; let connected = false; let lastError: unknown | undefined; do { try { context.client = await createConnection(connectionString); await context.client.connect(); connected = true; break; } catch (e) { lastError = e; await new Promise((resolve) => setTimeout(resolve, sleep)); timeLeft -= sleep; } } while (timeLeft > 0); if (!connected) { console.error('Cannot connect to SingleStore'); await context.client?.end().catch(console.error); await context.singlestoreContainer?.stop().catch(console.error); throw lastError; } await context.client.query(`DROP DATABASE IF EXISTS \`drizzle\`;`); await context.client.query('CREATE DATABASE drizzle;'); await context.client.query('USE drizzle;'); }, async (context: any) => { await context.client?.end().catch(console.error); await context.singlestoreContainer?.stop().catch(console.error); }, ); ================================================ FILE: drizzle-kit/tests/push/sqlite.test.ts ================================================ import Database from 'better-sqlite3'; import chalk from 'chalk'; import { sql } from 'drizzle-orm'; import { blob, check, foreignKey, getTableConfig, int, integer, numeric, primaryKey, real, sqliteTable, sqliteView, text, uniqueIndex, } from 'drizzle-orm/sqlite-core'; import { diffTestSchemasPushSqlite, introspectSQLiteToFile } from 'tests/schemaDiffer'; import { expect, test } from 'vitest'; test('nothing changed in schema', async (t) => { const client = new Database(':memory:'); const users = sqliteTable('users', { id: integer('id').primaryKey().notNull(), name: text('name').notNull(), email: text('email'), textJson: text('text_json', { mode: 'json' }), blobJon: blob('blob_json', { mode: 'json' }), blobBigInt: blob('blob_bigint', { mode: 'bigint' }), numeric: numeric('numeric'), createdAt: integer('created_at', { mode: 'timestamp' }), createdAtMs: integer('created_at_ms', { mode: 'timestamp_ms' }), real: real('real'), text: text('text', { length: 255 }), role: text('role', { enum: ['admin', 'user'] }).default('user'), isConfirmed: integer('is_confirmed', { mode: 'boolean', }), }); const schema1 = { users, customers: sqliteTable('customers', { id: integer('id').primaryKey(), address: text('address').notNull(), isConfirmed: integer('is_confirmed', { mode: 'boolean' }), registrationDate: integer('registration_date', { mode: 'timestamp_ms' }) .notNull() .$defaultFn(() => new Date()), userId: integer('user_id') .references(() => users.id) .notNull(), }), posts: sqliteTable('posts', { id: integer('id').primaryKey(), content: text('content'), authorId: integer('author_id'), }), }; const { sqlStatements, statements, columnsToRemove, infoToPrint, shouldAskForApprove, tablesToRemove, tablesToTruncate, } = await diffTestSchemasPushSqlite(client, schema1, schema1, [], false); expect(sqlStatements.length).toBe(0); expect(statements.length).toBe(0); expect(columnsToRemove!.length).toBe(0); expect(infoToPrint!.length).toBe(0); expect(shouldAskForApprove).toBe(false); expect(tablesToRemove!.length).toBe(0); expect(tablesToTruncate!.length).toBe(0); expect(shouldAskForApprove).toBe(false); }); test('dropped, added unique index', async (t) => { const client = new Database(':memory:'); const users = sqliteTable('users', { id: integer('id').primaryKey().notNull(), name: text('name').notNull(), email: text('email'), textJson: text('text_json', { mode: 'json' }), blobJon: blob('blob_json', { mode: 'json' }), blobBigInt: blob('blob_bigint', { mode: 'bigint' }), numeric: numeric('numeric'), createdAt: integer('created_at', { mode: 'timestamp' }), createdAtMs: integer('created_at_ms', { mode: 'timestamp_ms' }), real: real('real'), text: text('text', { length: 255 }), role: text('role', { enum: ['admin', 'user'] }).default('user'), isConfirmed: integer('is_confirmed', { mode: 'boolean', }), }); const schema1 = { users, customers: sqliteTable( 'customers', { id: integer('id').primaryKey(), address: text('address').notNull().unique(), isConfirmed: integer('is_confirmed', { mode: 'boolean' }), registrationDate: integer('registration_date', { mode: 'timestamp_ms' }) .notNull() .$defaultFn(() => new Date()), userId: integer('user_id').notNull(), }, (table) => ({ uniqueIndex: uniqueIndex('customers_address_unique').on(table.address), }), ), posts: sqliteTable('posts', { id: integer('id').primaryKey(), content: text('content'), authorId: integer('author_id'), }), }; const schema2 = { users, customers: sqliteTable( 'customers', { id: integer('id').primaryKey(), address: text('address').notNull(), isConfirmed: integer('is_confirmed', { mode: 'boolean' }), registrationDate: integer('registration_date', { mode: 'timestamp_ms' }) .notNull() .$defaultFn(() => new Date()), userId: integer('user_id').notNull(), }, (table) => ({ uniqueIndex: uniqueIndex('customers_is_confirmed_unique').on( table.isConfirmed, ), }), ), posts: sqliteTable('posts', { id: integer('id').primaryKey(), content: text('content'), authorId: integer('author_id'), }), }; const { sqlStatements, statements, columnsToRemove, infoToPrint, shouldAskForApprove, tablesToRemove, tablesToTruncate, } = await diffTestSchemasPushSqlite(client, schema1, schema2, [], false); expect(statements.length).toBe(2); expect(statements[0]).toStrictEqual({ type: 'drop_index', tableName: 'customers', data: 'customers_address_unique;address;true;', schema: '', }); expect(statements[1]).toStrictEqual({ type: 'create_index', tableName: 'customers', data: 'customers_is_confirmed_unique;is_confirmed;true;', schema: '', internal: { indexes: {}, }, }); expect(sqlStatements.length).toBe(2); expect(sqlStatements[0]).toBe( `DROP INDEX \`customers_address_unique\`;`, ); expect(sqlStatements[1]).toBe( `CREATE UNIQUE INDEX \`customers_is_confirmed_unique\` ON \`customers\` (\`is_confirmed\`);`, ); expect(columnsToRemove!.length).toBe(0); expect(infoToPrint!.length).toBe(0); expect(shouldAskForApprove).toBe(false); expect(tablesToRemove!.length).toBe(0); expect(tablesToTruncate!.length).toBe(0); }); test('added column not null and without default to table with data', async (t) => { const client = new Database(':memory:'); const schema1 = { companies: sqliteTable('companies', { id: integer('id').primaryKey(), name: text('name').notNull(), }), }; const schema2 = { companies: sqliteTable('companies', { id: integer('id').primaryKey(), name: text('name').notNull(), age: integer('age').notNull(), }), }; const table = getTableConfig(schema1.companies); const seedStatements = [ `INSERT INTO \`${table.name}\` ("${schema1.companies.name.name}") VALUES ('drizzle');`, `INSERT INTO \`${table.name}\` ("${schema1.companies.name.name}") VALUES ('turso');`, ]; const { statements, sqlStatements, columnsToRemove, infoToPrint, shouldAskForApprove, tablesToRemove, tablesToTruncate, } = await diffTestSchemasPushSqlite( client, schema1, schema2, [], false, seedStatements, ); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'sqlite_alter_table_add_column', tableName: 'companies', column: { name: 'age', type: 'integer', primaryKey: false, notNull: true, autoincrement: false, }, referenceData: undefined, }); expect(sqlStatements.length).toBe(2); expect(sqlStatements[0]).toBe(`delete from companies;`); expect(sqlStatements[1]).toBe( `ALTER TABLE \`companies\` ADD \`age\` integer NOT NULL;`, ); expect(columnsToRemove!.length).toBe(0); expect(infoToPrint!.length).toBe(1); expect(infoToPrint![0]).toBe( `· You're about to add not-null ${ chalk.underline( 'age', ) } column without default value, which contains 2 items`, ); expect(shouldAskForApprove).toBe(true); expect(tablesToRemove!.length).toBe(0); expect(tablesToTruncate!.length).toBe(1); expect(tablesToTruncate![0]).toBe('companies'); }); test('added column not null and without default to table without data', async (t) => { const turso = new Database(':memory:'); const schema1 = { companies: sqliteTable('companies', { id: integer('id').primaryKey(), name: text('name').notNull(), }), }; const schema2 = { companies: sqliteTable('companies', { id: integer('id').primaryKey(), name: text('name').notNull(), age: integer('age').notNull(), }), }; const { sqlStatements, statements, columnsToRemove, infoToPrint, shouldAskForApprove, tablesToRemove, tablesToTruncate, } = await diffTestSchemasPushSqlite(turso, schema1, schema2, [], false); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'sqlite_alter_table_add_column', tableName: 'companies', column: { name: 'age', type: 'integer', primaryKey: false, notNull: true, autoincrement: false, }, referenceData: undefined, }); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe( `ALTER TABLE \`companies\` ADD \`age\` integer NOT NULL;`, ); expect(infoToPrint!.length).toBe(0); expect(columnsToRemove!.length).toBe(0); expect(shouldAskForApprove).toBe(false); expect(tablesToRemove!.length).toBe(0); expect(tablesToTruncate!.length).toBe(0); }); test('drop autoincrement. drop column with data', async (t) => { const turso = new Database(':memory:'); const schema1 = { companies: sqliteTable('companies', { id: integer('id').primaryKey({ autoIncrement: true }), name: text('name'), }), }; const schema2 = { companies: sqliteTable('companies', { id: integer('id').primaryKey({ autoIncrement: false }), }), }; const table = getTableConfig(schema1.companies); const seedStatements = [ `INSERT INTO \`${table.name}\` ("${schema1.companies.id.name}", "${schema1.companies.name.name}") VALUES (1, 'drizzle');`, `INSERT INTO \`${table.name}\` ("${schema1.companies.id.name}", "${schema1.companies.name.name}") VALUES (2, 'turso');`, ]; const { sqlStatements, statements, columnsToRemove, infoToPrint, shouldAskForApprove, tablesToRemove, tablesToTruncate, } = await diffTestSchemasPushSqlite( turso, schema1, schema2, [], false, seedStatements, ); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'recreate_table', tableName: 'companies', columns: [ { name: 'id', type: 'integer', autoincrement: false, notNull: true, primaryKey: true, generated: undefined, }, ], compositePKs: [], referenceData: [], uniqueConstraints: [], checkConstraints: [], }); expect(sqlStatements.length).toBe(4); expect(sqlStatements[0]).toBe( `CREATE TABLE \`__new_companies\` ( \t\`id\` integer PRIMARY KEY NOT NULL );\n`, ); expect(sqlStatements[1]).toBe( `INSERT INTO \`__new_companies\`("id") SELECT "id" FROM \`companies\`;`, ); expect(sqlStatements[2]).toBe(`DROP TABLE \`companies\`;`); expect(sqlStatements[3]).toBe( `ALTER TABLE \`__new_companies\` RENAME TO \`companies\`;`, ); expect(columnsToRemove!.length).toBe(1); expect(columnsToRemove![0]).toBe('name'); expect(infoToPrint!.length).toBe(1); expect(infoToPrint![0]).toBe( `· You're about to delete ${ chalk.underline( 'name', ) } column in companies table with 2 items`, ); expect(shouldAskForApprove).toBe(true); expect(tablesToRemove!.length).toBe(0); expect(tablesToTruncate!.length).toBe(0); }); test('drop autoincrement. drop column with data with pragma off', async (t) => { const client = new Database(':memory:'); client.exec('PRAGMA foreign_keys=OFF;'); const users = sqliteTable('users', { id: integer('id').primaryKey({ autoIncrement: true }), }); const schema1 = { companies: sqliteTable('companies', { id: integer('id').primaryKey({ autoIncrement: true }), name: text('name'), user_id: integer('user_id').references(() => users.id), }), }; const schema2 = { companies: sqliteTable('companies', { id: integer('id').primaryKey({ autoIncrement: false }), user_id: integer('user_id').references(() => users.id), }), }; const table = getTableConfig(schema1.companies); const seedStatements = [ `INSERT INTO \`${table.name}\` ("${schema1.companies.id.name}", "${schema1.companies.name.name}") VALUES (1, 'drizzle');`, `INSERT INTO \`${table.name}\` ("${schema1.companies.id.name}", "${schema1.companies.name.name}") VALUES (2, 'turso');`, ]; const { sqlStatements, statements, columnsToRemove, infoToPrint, shouldAskForApprove, tablesToRemove, tablesToTruncate, } = await diffTestSchemasPushSqlite( client, schema1, schema2, [], false, seedStatements, ); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'recreate_table', tableName: 'companies', columns: [ { name: 'id', type: 'integer', autoincrement: false, notNull: true, primaryKey: true, generated: undefined, }, { name: 'user_id', type: 'integer', autoincrement: false, notNull: false, primaryKey: false, generated: undefined, }, ], compositePKs: [], referenceData: [ { columnsFrom: [ 'user_id', ], columnsTo: [ 'id', ], name: '', onDelete: 'no action', onUpdate: 'no action', tableFrom: 'companies', tableTo: 'users', }, ], uniqueConstraints: [], checkConstraints: [], }); expect(sqlStatements.length).toBe(4); expect(sqlStatements[0]).toBe( `CREATE TABLE \`__new_companies\` ( \t\`id\` integer PRIMARY KEY NOT NULL, \t\`user_id\` integer, \tFOREIGN KEY (\`user_id\`) REFERENCES \`users\`(\`id\`) ON UPDATE no action ON DELETE no action );\n`, ); expect(sqlStatements[1]).toBe( `INSERT INTO \`__new_companies\`("id", "user_id") SELECT "id", "user_id" FROM \`companies\`;`, ); expect(sqlStatements[2]).toBe(`DROP TABLE \`companies\`;`); expect(sqlStatements[3]).toBe( `ALTER TABLE \`__new_companies\` RENAME TO \`companies\`;`, ); expect(columnsToRemove!.length).toBe(1); expect(infoToPrint!.length).toBe(1); expect(infoToPrint![0]).toBe( `· You're about to delete ${ chalk.underline( 'name', ) } column in companies table with 2 items`, ); expect(shouldAskForApprove).toBe(true); expect(tablesToRemove!.length).toBe(0); expect(tablesToTruncate!.length).toBe(0); }); test('change autoincrement. other table references current', async (t) => { const client = new Database(':memory:'); const companies1 = sqliteTable('companies', { id: integer('id').primaryKey({ autoIncrement: true }), }); const users1 = sqliteTable('users', { id: integer('id').primaryKey({ autoIncrement: true }), name: text('name').unique(), companyId: text('company_id').references(() => companies1.id), }); const schema1 = { companies: companies1, users: users1, }; const companies2 = sqliteTable('companies', { id: integer('id').primaryKey({ autoIncrement: false }), }); const users2 = sqliteTable('users', { id: integer('id').primaryKey({ autoIncrement: true }), name: text('name').unique(), companyId: text('company_id').references(() => companies1.id), }); const schema2 = { companies: companies2, users: users2, }; const { name: usersTableName } = getTableConfig(users1); const { name: companiesTableName } = getTableConfig(companies1); const seedStatements = [ `INSERT INTO \`${usersTableName}\` ("${schema1.users.name.name}") VALUES ('drizzle');`, `INSERT INTO \`${usersTableName}\` ("${schema1.users.name.name}") VALUES ('turso');`, `INSERT INTO \`${companiesTableName}\` ("${schema1.companies.id.name}") VALUES ('1');`, `INSERT INTO \`${companiesTableName}\` ("${schema1.companies.id.name}") VALUES ('2');`, ]; const { statements, sqlStatements, columnsToRemove, infoToPrint, shouldAskForApprove, tablesToRemove, tablesToTruncate, } = await diffTestSchemasPushSqlite( client, schema1, schema2, [], false, seedStatements, ); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'recreate_table', tableName: 'companies', columns: [ { name: 'id', type: 'integer', autoincrement: false, notNull: true, primaryKey: true, generated: undefined, }, ], compositePKs: [], referenceData: [], uniqueConstraints: [], checkConstraints: [], }); expect(sqlStatements.length).toBe(6); expect(sqlStatements[0]).toBe(`PRAGMA foreign_keys=OFF;`); expect(sqlStatements[1]).toBe( `CREATE TABLE \`__new_companies\` ( \t\`id\` integer PRIMARY KEY NOT NULL );\n`, ); expect(sqlStatements[2]).toBe( `INSERT INTO \`__new_companies\`("id") SELECT "id" FROM \`companies\`;`, ); expect(sqlStatements[3]).toBe(`DROP TABLE \`companies\`;`); expect(sqlStatements[4]).toBe( `ALTER TABLE \`__new_companies\` RENAME TO \`companies\`;`, ); expect(sqlStatements[5]).toBe(`PRAGMA foreign_keys=ON;`); expect(columnsToRemove!.length).toBe(0); expect(infoToPrint!.length).toBe(0); expect(shouldAskForApprove).toBe(false); expect(tablesToRemove!.length).toBe(0); expect(tablesToTruncate!.length).toBe(0); }); test('create table with custom name references', async (t) => { const client = new Database(':memory:'); const users = sqliteTable('users', { id: int('id').primaryKey({ autoIncrement: true }), name: text('name').notNull(), }); const schema1 = { users, posts: sqliteTable( 'posts', { id: int('id').primaryKey({ autoIncrement: true }), name: text('name'), userId: int('user_id'), }, (t) => ({ fk: foreignKey({ columns: [t.id], foreignColumns: [users.id], name: 'custom_name_fk', }), }), ), }; const schema2 = { users, posts: sqliteTable( 'posts', { id: int('id').primaryKey({ autoIncrement: true }), name: text('name'), userId: int('user_id'), }, (t) => ({ fk: foreignKey({ columns: [t.id], foreignColumns: [users.id], name: 'custom_name_fk', }), }), ), }; const { sqlStatements } = await diffTestSchemasPushSqlite( client, schema1, schema2, [], ); expect(sqlStatements!.length).toBe(0); }); test('drop not null, add not null', async (t) => { const client = new Database(':memory:'); const schema1 = { users: sqliteTable('users', { id: int('id').primaryKey({ autoIncrement: true }), name: text('name').notNull(), }), posts: sqliteTable('posts', { id: int('id').primaryKey({ autoIncrement: true }), name: text('name'), userId: int('user_id'), }), }; const schema2 = { users: sqliteTable('users', { id: int('id').primaryKey({ autoIncrement: true }), name: text('name'), }), posts: sqliteTable('posts', { id: int('id').primaryKey({ autoIncrement: true }), name: text('name').notNull(), userId: int('user_id'), }), }; const { statements, sqlStatements, columnsToRemove, infoToPrint, shouldAskForApprove, tablesToRemove, tablesToTruncate, } = await diffTestSchemasPushSqlite(client, schema1, schema2, []); expect(statements!.length).toBe(2); expect(statements![0]).toStrictEqual({ checkConstraints: [], columns: [ { autoincrement: true, generated: undefined, name: 'id', notNull: true, primaryKey: true, type: 'integer', }, { autoincrement: false, generated: undefined, name: 'name', notNull: false, primaryKey: false, type: 'text', }, ], compositePKs: [], referenceData: [], tableName: 'users', type: 'recreate_table', uniqueConstraints: [], }); expect(statements![1]).toStrictEqual({ checkConstraints: [], columns: [ { autoincrement: true, generated: undefined, name: 'id', notNull: true, primaryKey: true, type: 'integer', }, { autoincrement: false, generated: undefined, name: 'name', notNull: true, primaryKey: false, type: 'text', }, { autoincrement: false, generated: undefined, name: 'user_id', notNull: false, primaryKey: false, type: 'integer', }, ], compositePKs: [], referenceData: [], tableName: 'posts', type: 'recreate_table', uniqueConstraints: [], }); expect(sqlStatements.length).toBe(8); expect(sqlStatements[0]).toBe(`CREATE TABLE \`__new_users\` ( \t\`id\` integer PRIMARY KEY AUTOINCREMENT NOT NULL, \t\`name\` text );\n`); expect(sqlStatements[1]).toBe( `INSERT INTO \`__new_users\`("id", "name") SELECT "id", "name" FROM \`users\`;`, ); expect(sqlStatements[2]).toBe(`DROP TABLE \`users\`;`); expect(sqlStatements[3]).toBe( `ALTER TABLE \`__new_users\` RENAME TO \`users\`;`, ); expect(sqlStatements![4]).toBe(`CREATE TABLE \`__new_posts\` ( \t\`id\` integer PRIMARY KEY AUTOINCREMENT NOT NULL, \t\`name\` text NOT NULL, \t\`user_id\` integer );\n`); expect(sqlStatements![5]).toBe( `INSERT INTO \`__new_posts\`("id", "name", "user_id") SELECT "id", "name", "user_id" FROM \`posts\`;`, ); expect(sqlStatements![6]).toBe(`DROP TABLE \`posts\`;`); expect(sqlStatements![7]).toBe( `ALTER TABLE \`__new_posts\` RENAME TO \`posts\`;`, ); expect(columnsToRemove!.length).toBe(0); expect(infoToPrint!.length).toBe(0); expect(shouldAskForApprove).toBe(false); expect(tablesToRemove!.length).toBe(0); expect(tablesToTruncate!.length).toBe(0); }); test('rename table and change data type', async (t) => { const client = new Database(':memory:'); const schema1 = { users: sqliteTable('old_users', { id: int('id').primaryKey({ autoIncrement: true }), age: text('age'), }), }; const schema2 = { users: sqliteTable('new_users', { id: int('id').primaryKey({ autoIncrement: true }), age: integer('age'), }), }; const { statements, sqlStatements, columnsToRemove, infoToPrint, shouldAskForApprove, tablesToRemove, tablesToTruncate, } = await diffTestSchemasPushSqlite(client, schema1, schema2, [ 'public.old_users->public.new_users', ]); expect(statements!.length).toBe(2); expect(statements![0]).toStrictEqual({ fromSchema: undefined, tableNameFrom: 'old_users', tableNameTo: 'new_users', toSchema: undefined, type: 'rename_table', }); expect(statements![1]).toStrictEqual({ columns: [ { autoincrement: true, name: 'id', notNull: true, generated: undefined, primaryKey: true, type: 'integer', }, { autoincrement: false, name: 'age', notNull: false, generated: undefined, primaryKey: false, type: 'integer', }, ], compositePKs: [], referenceData: [], tableName: 'new_users', type: 'recreate_table', uniqueConstraints: [], checkConstraints: [], }); expect(sqlStatements!.length).toBe(5); expect(sqlStatements![0]).toBe( `ALTER TABLE \`old_users\` RENAME TO \`new_users\`;`, ); expect(sqlStatements[1]).toBe(`CREATE TABLE \`__new_new_users\` ( \t\`id\` integer PRIMARY KEY AUTOINCREMENT NOT NULL, \t\`age\` integer );\n`); expect(sqlStatements![2]).toBe( `INSERT INTO \`__new_new_users\`("id", "age") SELECT "id", "age" FROM \`new_users\`;`, ); expect(sqlStatements![3]).toBe(`DROP TABLE \`new_users\`;`); expect(sqlStatements![4]).toBe( `ALTER TABLE \`__new_new_users\` RENAME TO \`new_users\`;`, ); expect(columnsToRemove!.length).toBe(0); expect(infoToPrint!.length).toBe(0); expect(shouldAskForApprove).toBe(false); expect(tablesToRemove!.length).toBe(0); expect(tablesToTruncate!.length).toBe(0); }); test('rename column and change data type', async (t) => { const client = new Database(':memory:'); const schema1 = { users: sqliteTable('users', { id: int('id').primaryKey({ autoIncrement: true }), name: text('name'), }), }; const schema2 = { users: sqliteTable('users', { id: int('id').primaryKey({ autoIncrement: true }), age: integer('age'), }), }; const { statements, sqlStatements, columnsToRemove, infoToPrint, shouldAskForApprove, tablesToRemove, tablesToTruncate, } = await diffTestSchemasPushSqlite(client, schema1, schema2, [ 'public.users.name->public.users.age', ]); expect(statements!.length).toBe(1); expect(statements![0]).toStrictEqual({ columns: [ { autoincrement: true, name: 'id', notNull: true, generated: undefined, primaryKey: true, type: 'integer', }, { autoincrement: false, name: 'age', notNull: false, generated: undefined, primaryKey: false, type: 'integer', }, ], compositePKs: [], referenceData: [], tableName: 'users', type: 'recreate_table', uniqueConstraints: [], checkConstraints: [], }); expect(sqlStatements!.length).toBe(4); expect(sqlStatements![0]).toBe(`CREATE TABLE \`__new_users\` ( \t\`id\` integer PRIMARY KEY AUTOINCREMENT NOT NULL, \t\`age\` integer );\n`); expect(sqlStatements![1]).toBe( `INSERT INTO \`__new_users\`("id", "age") SELECT "id", "age" FROM \`users\`;`, ); expect(sqlStatements![2]).toBe(`DROP TABLE \`users\`;`); expect(sqlStatements![3]).toBe( `ALTER TABLE \`__new_users\` RENAME TO \`users\`;`, ); expect(columnsToRemove!.length).toBe(0); expect(infoToPrint!.length).toBe(0); expect(shouldAskForApprove).toBe(false); expect(tablesToRemove!.length).toBe(0); expect(tablesToTruncate!.length).toBe(0); }); test('recreate table with nested references', async (t) => { const client = new Database(':memory:'); let users = sqliteTable('users', { id: int('id').primaryKey({ autoIncrement: true }), name: text('name'), age: integer('age'), }); let subscriptions = sqliteTable('subscriptions', { id: int('id').primaryKey({ autoIncrement: true }), userId: integer('user_id').references(() => users.id), customerId: text('customer_id'), }); const schema1 = { users: users, subscriptions: subscriptions, subscriptionMetadata: sqliteTable('subscriptions_metadata', { id: int('id').primaryKey({ autoIncrement: true }), subscriptionId: text('subscription_id').references( () => subscriptions.id, ), }), }; users = sqliteTable('users', { id: int('id').primaryKey({ autoIncrement: false }), name: text('name'), age: integer('age'), }); const schema2 = { users: users, subscriptions: subscriptions, subscriptionMetadata: sqliteTable('subscriptions_metadata', { id: int('id').primaryKey({ autoIncrement: true }), subscriptionId: text('subscription_id').references( () => subscriptions.id, ), }), }; const { statements, sqlStatements, columnsToRemove, infoToPrint, shouldAskForApprove, tablesToRemove, tablesToTruncate, } = await diffTestSchemasPushSqlite(client, schema1, schema2, [ 'public.users.name->public.users.age', ]); expect(statements!.length).toBe(1); expect(statements![0]).toStrictEqual({ columns: [ { autoincrement: false, name: 'id', notNull: true, generated: undefined, primaryKey: true, type: 'integer', }, { autoincrement: false, name: 'name', notNull: false, generated: undefined, primaryKey: false, type: 'text', }, { autoincrement: false, name: 'age', notNull: false, generated: undefined, primaryKey: false, type: 'integer', }, ], compositePKs: [], referenceData: [], tableName: 'users', type: 'recreate_table', uniqueConstraints: [], checkConstraints: [], }); expect(sqlStatements!.length).toBe(6); expect(sqlStatements[0]).toBe('PRAGMA foreign_keys=OFF;'); expect(sqlStatements![1]).toBe(`CREATE TABLE \`__new_users\` ( \t\`id\` integer PRIMARY KEY NOT NULL, \t\`name\` text, \t\`age\` integer );\n`); expect(sqlStatements![2]).toBe( `INSERT INTO \`__new_users\`("id", "name", "age") SELECT "id", "name", "age" FROM \`users\`;`, ); expect(sqlStatements![3]).toBe(`DROP TABLE \`users\`;`); expect(sqlStatements![4]).toBe( `ALTER TABLE \`__new_users\` RENAME TO \`users\`;`, ); expect(sqlStatements[5]).toBe('PRAGMA foreign_keys=ON;'); expect(columnsToRemove!.length).toBe(0); expect(infoToPrint!.length).toBe(0); expect(shouldAskForApprove).toBe(false); expect(tablesToRemove!.length).toBe(0); expect(tablesToTruncate!.length).toBe(0); }); test('recreate table with added column not null and without default with data', async (t) => { const client = new Database(':memory:'); const schema1 = { users: sqliteTable('users', { id: int('id').primaryKey({ autoIncrement: true }), name: text('name'), age: integer('age'), }), }; const schema2 = { users: sqliteTable('users', { id: int('id').primaryKey({ autoIncrement: false }), name: text('name'), age: integer('age'), newColumn: text('new_column').notNull(), }), }; const seedStatements = [ `INSERT INTO \`users\` ("name", "age") VALUES ('drizzle', 12)`, `INSERT INTO \`users\` ("name", "age") VALUES ('turso', 12)`, ]; const { statements, sqlStatements, columnsToRemove, infoToPrint, shouldAskForApprove, tablesToRemove, tablesToTruncate, } = await diffTestSchemasPushSqlite( client, schema1, schema2, [], false, seedStatements, ); expect(statements!.length).toBe(1); expect(statements![0]).toStrictEqual({ columns: [ { autoincrement: false, name: 'id', notNull: true, generated: undefined, primaryKey: true, type: 'integer', }, { autoincrement: false, name: 'name', notNull: false, generated: undefined, primaryKey: false, type: 'text', }, { autoincrement: false, name: 'age', notNull: false, generated: undefined, primaryKey: false, type: 'integer', }, { autoincrement: false, name: 'new_column', notNull: true, generated: undefined, primaryKey: false, type: 'text', }, ], compositePKs: [], referenceData: [], tableName: 'users', type: 'recreate_table', uniqueConstraints: [], checkConstraints: [], }); expect(sqlStatements!.length).toBe(4); expect(sqlStatements[0]).toBe('DELETE FROM \`users\`;'); expect(sqlStatements![1]).toBe(`CREATE TABLE \`__new_users\` ( \t\`id\` integer PRIMARY KEY NOT NULL, \t\`name\` text, \t\`age\` integer, \t\`new_column\` text NOT NULL );\n`); expect(sqlStatements![2]).toBe(`DROP TABLE \`users\`;`); expect(sqlStatements![3]).toBe( `ALTER TABLE \`__new_users\` RENAME TO \`users\`;`, ); expect(columnsToRemove!.length).toBe(0); expect(infoToPrint!.length).toBe(1); expect(infoToPrint![0]).toBe( `· You're about to add not-null ${ chalk.underline('new_column') } column without default value to table, which contains 2 items`, ); expect(shouldAskForApprove).toBe(true); expect(tablesToRemove!.length).toBe(0); expect(tablesToTruncate!.length).toBe(1); expect(tablesToTruncate![0]).toBe('users'); }); test('add check constraint to table', async (t) => { const client = new Database(':memory:'); const schema1 = { users: sqliteTable('users', { id: int('id').primaryKey({ autoIncrement: false }), name: text('name'), age: integer('age'), }), }; const schema2 = { users: sqliteTable('users', { id: int('id').primaryKey({ autoIncrement: false }), name: text('name'), age: integer('age'), }, (table) => ({ someCheck: check('some_check', sql`${table.age} > 21`), })), }; const { statements, sqlStatements, columnsToRemove, infoToPrint, shouldAskForApprove, tablesToRemove, tablesToTruncate, } = await diffTestSchemasPushSqlite( client, schema1, schema2, [], ); expect(statements!.length).toBe(1); expect(statements![0]).toStrictEqual({ columns: [ { autoincrement: false, name: 'id', notNull: true, generated: undefined, primaryKey: true, type: 'integer', }, { autoincrement: false, name: 'name', notNull: false, generated: undefined, primaryKey: false, type: 'text', }, { autoincrement: false, name: 'age', notNull: false, generated: undefined, primaryKey: false, type: 'integer', }, ], compositePKs: [], referenceData: [], tableName: 'users', type: 'recreate_table', uniqueConstraints: [], checkConstraints: ['some_check;"users"."age" > 21'], }); expect(sqlStatements!.length).toBe(4); expect(sqlStatements![0]).toBe(`CREATE TABLE \`__new_users\` ( \t\`id\` integer PRIMARY KEY NOT NULL, \t\`name\` text, \t\`age\` integer, \tCONSTRAINT "some_check" CHECK("__new_users"."age" > 21) );\n`); expect(sqlStatements[1]).toBe( 'INSERT INTO `__new_users`("id", "name", "age") SELECT "id", "name", "age" FROM `users`;', ); expect(sqlStatements![2]).toBe(`DROP TABLE \`users\`;`); expect(sqlStatements![3]).toBe( `ALTER TABLE \`__new_users\` RENAME TO \`users\`;`, ); expect(columnsToRemove!.length).toBe(0); expect(infoToPrint!.length).toBe(0); expect(shouldAskForApprove).toBe(false); expect(tablesToRemove!.length).toBe(0); expect(tablesToTruncate!.length).toBe(0); }); test('drop check constraint', async (t) => { const client = new Database(':memory:'); const schema1 = { users: sqliteTable('users', { id: int('id').primaryKey({ autoIncrement: false }), name: text('name'), age: integer('age'), }, (table) => ({ someCheck: check('some_check', sql`${table.age} > 21`), })), }; const schema2 = { users: sqliteTable('users', { id: int('id').primaryKey({ autoIncrement: false }), name: text('name'), age: integer('age'), }), }; const { statements, sqlStatements, columnsToRemove, infoToPrint, shouldAskForApprove, tablesToRemove, tablesToTruncate, } = await diffTestSchemasPushSqlite( client, schema1, schema2, [], ); expect(statements!.length).toBe(1); expect(statements![0]).toStrictEqual({ columns: [ { autoincrement: false, name: 'id', notNull: true, generated: undefined, primaryKey: true, type: 'integer', }, { autoincrement: false, name: 'name', notNull: false, generated: undefined, primaryKey: false, type: 'text', }, { autoincrement: false, name: 'age', notNull: false, generated: undefined, primaryKey: false, type: 'integer', }, ], compositePKs: [], referenceData: [], tableName: 'users', type: 'recreate_table', uniqueConstraints: [], checkConstraints: [], }); expect(sqlStatements!.length).toBe(4); expect(sqlStatements![0]).toBe(`CREATE TABLE \`__new_users\` ( \t\`id\` integer PRIMARY KEY NOT NULL, \t\`name\` text, \t\`age\` integer );\n`); expect(sqlStatements[1]).toBe( 'INSERT INTO `__new_users`("id", "name", "age") SELECT "id", "name", "age" FROM `users`;', ); expect(sqlStatements![2]).toBe(`DROP TABLE \`users\`;`); expect(sqlStatements![3]).toBe( `ALTER TABLE \`__new_users\` RENAME TO \`users\`;`, ); expect(columnsToRemove!.length).toBe(0); expect(infoToPrint!.length).toBe(0); expect(shouldAskForApprove).toBe(false); expect(tablesToRemove!.length).toBe(0); expect(tablesToTruncate!.length).toBe(0); }); test('db has checks. Push with same names', async () => { const client = new Database(':memory:'); const schema1 = { users: sqliteTable('users', { id: int('id').primaryKey({ autoIncrement: false }), name: text('name'), age: integer('age'), }, (table) => ({ someCheck: check('some_check', sql`${table.age} > 21`), })), }; const schema2 = { users: sqliteTable('users', { id: int('id').primaryKey({ autoIncrement: false }), name: text('name'), age: integer('age'), }, (table) => ({ someCheck: check('some_check', sql`some new value`), })), }; const { statements, sqlStatements, columnsToRemove, infoToPrint, schemasToRemove, shouldAskForApprove, tablesToRemove, tablesToTruncate, } = await diffTestSchemasPushSqlite( client, schema1, schema2, [], false, [], ); expect(statements).toStrictEqual([]); expect(sqlStatements).toStrictEqual([]); expect(columnsToRemove!.length).toBe(0); expect(infoToPrint!.length).toBe(0); expect(shouldAskForApprove).toBe(false); expect(tablesToRemove!.length).toBe(0); expect(tablesToTruncate!.length).toBe(0); }); test('create view', async () => { const client = new Database(':memory:'); const table = sqliteTable('test', { id: int('id').primaryKey(), }); const schema1 = { test: table, }; const schema2 = { test: table, view: sqliteView('view').as((qb) => qb.select().from(table)), }; const { statements, sqlStatements } = await diffTestSchemasPushSqlite( client, schema1, schema2, [], ); expect(statements).toStrictEqual([ { definition: 'select "id" from "test"', name: 'view', type: 'sqlite_create_view', }, ]); expect(sqlStatements).toStrictEqual([ `CREATE VIEW \`view\` AS select "id" from "test";`, ]); }); test('drop view', async () => { const client = new Database(':memory:'); const table = sqliteTable('test', { id: int('id').primaryKey(), }); const schema1 = { test: table, view: sqliteView('view').as((qb) => qb.select().from(table)), }; const schema2 = { test: table, }; const { statements, sqlStatements } = await diffTestSchemasPushSqlite( client, schema1, schema2, [], ); expect(statements).toStrictEqual([ { name: 'view', type: 'drop_view', }, ]); expect(sqlStatements).toStrictEqual([ 'DROP VIEW \`view\`;', ]); }); test('alter view ".as"', async () => { const client = new Database(':memory:'); const table = sqliteTable('test', { id: int('id').primaryKey(), }); const schema1 = { test: table, view: sqliteView('view').as((qb) => qb.select().from(table).where(sql`${table.id} = 1`)), }; const schema2 = { test: table, view: sqliteView('view').as((qb) => qb.select().from(table)), }; const { statements, sqlStatements } = await diffTestSchemasPushSqlite( client, schema1, schema2, [], ); expect(statements.length).toBe(0); expect(sqlStatements.length).toBe(0); }); test('create composite primary key', async (t) => { const client = new Database(':memory:'); const schema1 = {}; const schema2 = { table: sqliteTable('table', { col1: integer('col1').notNull(), col2: integer('col2').notNull(), }, (t) => ({ pk: primaryKey({ columns: [t.col1, t.col2], }), })), }; const { statements, sqlStatements, } = await diffTestSchemasPushSqlite( client, schema1, schema2, [], ); expect(statements).toStrictEqual([{ type: 'sqlite_create_table', tableName: 'table', compositePKs: [['col1', 'col2']], uniqueConstraints: [], referenceData: [], checkConstraints: [], columns: [ { name: 'col1', type: 'integer', primaryKey: false, notNull: true, autoincrement: false }, { name: 'col2', type: 'integer', primaryKey: false, notNull: true, autoincrement: false }, ], }]); expect(sqlStatements).toStrictEqual([ 'CREATE TABLE `table` (\n\t`col1` integer NOT NULL,\n\t`col2` integer NOT NULL,\n\tPRIMARY KEY(`col1`, `col2`)\n);\n', ]); }); test('rename table with composite primary key', async () => { const client = new Database(':memory:'); const productsCategoriesTable = (tableName: string) => { return sqliteTable(tableName, { productId: text('product_id').notNull(), categoryId: text('category_id').notNull(), }, (t) => ({ pk: primaryKey({ columns: [t.productId, t.categoryId], }), })); }; const schema1 = { table: productsCategoriesTable('products_categories'), }; const schema2 = { test: productsCategoriesTable('products_to_categories'), }; const { sqlStatements } = await diffTestSchemasPushSqlite( client, schema1, schema2, ['public.products_categories->public.products_to_categories'], false, ); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE `products_categories` RENAME TO `products_to_categories`;', ]); }); ================================================ FILE: drizzle-kit/tests/rls/pg-policy.test.ts ================================================ import { sql } from 'drizzle-orm'; import { integer, pgPolicy, pgRole, pgSchema, pgTable } from 'drizzle-orm/pg-core'; import { diffTestSchemas } from 'tests/schemaDiffer'; import { expect, test } from 'vitest'; test('add policy + enable rls', async (t) => { const schema1 = { users: pgTable('users', { id: integer('id').primaryKey(), }), }; const schema2 = { users: pgTable('users', { id: integer('id').primaryKey(), }, () => ({ rls: pgPolicy('test', { as: 'permissive' }), })), }; const { statements, sqlStatements } = await diffTestSchemas(schema1, schema2, []); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE "users" ENABLE ROW LEVEL SECURITY;', 'CREATE POLICY "test" ON "users" AS PERMISSIVE FOR ALL TO public;', ]); expect(statements).toStrictEqual([ { schema: '', tableName: 'users', type: 'enable_rls', }, { data: { as: 'PERMISSIVE', for: 'ALL', name: 'test', to: ['public'], on: undefined, using: undefined, withCheck: undefined, }, schema: '', tableName: 'users', type: 'create_policy', }, ]); }); test('drop policy + disable rls', async (t) => { const schema1 = { users: pgTable('users', { id: integer('id').primaryKey(), }, () => ({ rls: pgPolicy('test', { as: 'permissive' }), })), }; const schema2 = { users: pgTable('users', { id: integer('id').primaryKey(), }), }; const { statements, sqlStatements } = await diffTestSchemas(schema1, schema2, []); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE "users" DISABLE ROW LEVEL SECURITY;', 'DROP POLICY "test" ON "users" CASCADE;', ]); expect(statements).toStrictEqual([ { schema: '', tableName: 'users', type: 'disable_rls', }, { data: { as: 'PERMISSIVE', for: 'ALL', name: 'test', to: ['public'], on: undefined, using: undefined, withCheck: undefined, }, schema: '', tableName: 'users', type: 'drop_policy', }, ]); }); test('add policy without enable rls', async (t) => { const schema1 = { users: pgTable('users', { id: integer('id').primaryKey(), }, () => ({ rls: pgPolicy('test', { as: 'permissive' }), })), }; const schema2 = { users: pgTable('users', { id: integer('id').primaryKey(), }, () => ({ rls: pgPolicy('test', { as: 'permissive' }), newrls: pgPolicy('newRls'), })), }; const { statements, sqlStatements } = await diffTestSchemas(schema1, schema2, []); expect(sqlStatements).toStrictEqual([ 'CREATE POLICY "newRls" ON "users" AS PERMISSIVE FOR ALL TO public;', ]); expect(statements).toStrictEqual([ { data: { as: 'PERMISSIVE', for: 'ALL', name: 'newRls', to: ['public'], on: undefined, using: undefined, withCheck: undefined, }, schema: '', tableName: 'users', type: 'create_policy', }, ]); }); test('drop policy without disable rls', async (t) => { const schema1 = { users: pgTable('users', { id: integer('id').primaryKey(), }, () => ({ rls: pgPolicy('test', { as: 'permissive' }), oldRls: pgPolicy('oldRls'), })), }; const schema2 = { users: pgTable('users', { id: integer('id').primaryKey(), }, () => ({ rls: pgPolicy('test', { as: 'permissive' }), })), }; const { statements, sqlStatements } = await diffTestSchemas(schema1, schema2, []); expect(sqlStatements).toStrictEqual([ 'DROP POLICY "oldRls" ON "users" CASCADE;', ]); expect(statements).toStrictEqual([ { data: { as: 'PERMISSIVE', for: 'ALL', name: 'oldRls', to: ['public'], using: undefined, on: undefined, withCheck: undefined, }, schema: '', tableName: 'users', type: 'drop_policy', }, ]); }); test('alter policy without recreation: changing roles', async (t) => { const schema1 = { users: pgTable('users', { id: integer('id').primaryKey(), }, () => ({ rls: pgPolicy('test', { as: 'permissive' }), })), }; const schema2 = { users: pgTable('users', { id: integer('id').primaryKey(), }, () => ({ rls: pgPolicy('test', { as: 'permissive', to: 'current_role' }), })), }; const { statements, sqlStatements } = await diffTestSchemas(schema1, schema2, []); expect(sqlStatements).toStrictEqual([ 'ALTER POLICY "test" ON "users" TO current_role;', ]); expect(statements).toStrictEqual([ { newData: 'test--PERMISSIVE--ALL--current_role--undefined--undefined--undefined', oldData: 'test--PERMISSIVE--ALL--public--undefined--undefined--undefined', schema: '', tableName: 'users', type: 'alter_policy', }, ]); }); test('alter policy without recreation: changing using', async (t) => { const schema1 = { users: pgTable('users', { id: integer('id').primaryKey(), }, () => ({ rls: pgPolicy('test', { as: 'permissive' }), })), }; const schema2 = { users: pgTable('users', { id: integer('id').primaryKey(), }, () => ({ rls: pgPolicy('test', { as: 'permissive', using: sql`true` }), })), }; const { statements, sqlStatements } = await diffTestSchemas(schema1, schema2, []); expect(sqlStatements).toStrictEqual([ 'ALTER POLICY "test" ON "users" TO public USING (true);', ]); expect(statements).toStrictEqual([ { newData: 'test--PERMISSIVE--ALL--public--true--undefined--undefined', oldData: 'test--PERMISSIVE--ALL--public--undefined--undefined--undefined', schema: '', tableName: 'users', type: 'alter_policy', }, ]); }); test('alter policy without recreation: changing with check', async (t) => { const schema1 = { users: pgTable('users', { id: integer('id').primaryKey(), }, () => ({ rls: pgPolicy('test', { as: 'permissive' }), })), }; const schema2 = { users: pgTable('users', { id: integer('id').primaryKey(), }, () => ({ rls: pgPolicy('test', { as: 'permissive', withCheck: sql`true` }), })), }; const { statements, sqlStatements } = await diffTestSchemas(schema1, schema2, []); expect(sqlStatements).toStrictEqual([ 'ALTER POLICY "test" ON "users" TO public WITH CHECK (true);', ]); expect(statements).toStrictEqual([ { newData: 'test--PERMISSIVE--ALL--public--undefined--true--undefined', oldData: 'test--PERMISSIVE--ALL--public--undefined--undefined--undefined', schema: '', tableName: 'users', type: 'alter_policy', }, ]); }); /// test('alter policy with recreation: changing as', async (t) => { const schema1 = { users: pgTable('users', { id: integer('id').primaryKey(), }, () => ({ rls: pgPolicy('test', { as: 'permissive' }), })), }; const schema2 = { users: pgTable('users', { id: integer('id').primaryKey(), }, () => ({ rls: pgPolicy('test', { as: 'restrictive' }), })), }; const { statements, sqlStatements } = await diffTestSchemas(schema1, schema2, []); expect(sqlStatements).toStrictEqual([ 'DROP POLICY "test" ON "users" CASCADE;', 'CREATE POLICY "test" ON "users" AS RESTRICTIVE FOR ALL TO public;', ]); expect(statements).toStrictEqual([ { data: { as: 'PERMISSIVE', for: 'ALL', name: 'test', to: ['public'], using: undefined, on: undefined, withCheck: undefined, }, schema: '', tableName: 'users', type: 'drop_policy', }, { data: { as: 'RESTRICTIVE', for: 'ALL', name: 'test', to: ['public'], on: undefined, using: undefined, withCheck: undefined, }, schema: '', tableName: 'users', type: 'create_policy', }, ]); }); test('alter policy with recreation: changing for', async (t) => { const schema1 = { users: pgTable('users', { id: integer('id').primaryKey(), }, () => ({ rls: pgPolicy('test', { as: 'permissive' }), })), }; const schema2 = { users: pgTable('users', { id: integer('id').primaryKey(), }, () => ({ rls: pgPolicy('test', { as: 'permissive', for: 'delete' }), })), }; const { statements, sqlStatements } = await diffTestSchemas(schema1, schema2, []); expect(sqlStatements).toStrictEqual([ 'DROP POLICY "test" ON "users" CASCADE;', 'CREATE POLICY "test" ON "users" AS PERMISSIVE FOR DELETE TO public;', ]); expect(statements).toStrictEqual([ { data: { as: 'PERMISSIVE', for: 'ALL', name: 'test', to: ['public'], on: undefined, using: undefined, withCheck: undefined, }, schema: '', tableName: 'users', type: 'drop_policy', }, { data: { as: 'PERMISSIVE', for: 'DELETE', name: 'test', to: ['public'], on: undefined, using: undefined, withCheck: undefined, }, schema: '', tableName: 'users', type: 'create_policy', }, ]); }); test('alter policy with recreation: changing both "as" and "for"', async (t) => { const schema1 = { users: pgTable('users', { id: integer('id').primaryKey(), }, () => ({ rls: pgPolicy('test', { as: 'permissive' }), })), }; const schema2 = { users: pgTable('users', { id: integer('id').primaryKey(), }, () => ({ rls: pgPolicy('test', { as: 'restrictive', for: 'insert' }), })), }; const { statements, sqlStatements } = await diffTestSchemas(schema1, schema2, []); expect(sqlStatements).toStrictEqual([ 'DROP POLICY "test" ON "users" CASCADE;', 'CREATE POLICY "test" ON "users" AS RESTRICTIVE FOR INSERT TO public;', ]); expect(statements).toStrictEqual([ { data: { as: 'PERMISSIVE', for: 'ALL', name: 'test', to: ['public'], using: undefined, on: undefined, withCheck: undefined, }, schema: '', tableName: 'users', type: 'drop_policy', }, { data: { as: 'RESTRICTIVE', for: 'INSERT', name: 'test', to: ['public'], using: undefined, on: undefined, withCheck: undefined, }, schema: '', tableName: 'users', type: 'create_policy', }, ]); }); test('alter policy with recreation: changing all fields', async (t) => { const schema1 = { users: pgTable('users', { id: integer('id').primaryKey(), }, () => ({ rls: pgPolicy('test', { as: 'permissive', for: 'select', using: sql`true` }), })), }; const schema2 = { users: pgTable('users', { id: integer('id').primaryKey(), }, () => ({ rls: pgPolicy('test', { as: 'restrictive', to: 'current_role', withCheck: sql`true` }), })), }; const { statements, sqlStatements } = await diffTestSchemas(schema1, schema2, []); expect(sqlStatements).toStrictEqual([ 'DROP POLICY "test" ON "users" CASCADE;', 'CREATE POLICY "test" ON "users" AS RESTRICTIVE FOR ALL TO current_role WITH CHECK (true);', ]); expect(statements).toStrictEqual([ { data: { as: 'PERMISSIVE', for: 'SELECT', name: 'test', to: ['public'], using: 'true', on: undefined, withCheck: undefined, }, schema: '', tableName: 'users', type: 'drop_policy', }, { data: { as: 'RESTRICTIVE', for: 'ALL', name: 'test', on: undefined, to: ['current_role'], using: undefined, withCheck: 'true', }, schema: '', tableName: 'users', type: 'create_policy', }, ]); }); test('rename policy', async (t) => { const schema1 = { users: pgTable('users', { id: integer('id').primaryKey(), }, () => ({ rls: pgPolicy('test', { as: 'permissive' }), })), }; const schema2 = { users: pgTable('users', { id: integer('id').primaryKey(), }, () => ({ rls: pgPolicy('newName', { as: 'permissive' }), })), }; const { statements, sqlStatements } = await diffTestSchemas(schema1, schema2, [ 'public.users.test->public.users.newName', ]); expect(sqlStatements).toStrictEqual([ 'ALTER POLICY "test" ON "users" RENAME TO "newName";', ]); expect(statements).toStrictEqual([ { newName: 'newName', oldName: 'test', schema: '', tableName: 'users', type: 'rename_policy', }, ]); }); test('rename policy in renamed table', async (t) => { const schema1 = { users: pgTable('users', { id: integer('id').primaryKey(), }, () => ({ rls: pgPolicy('test', { as: 'permissive' }), })), }; const schema2 = { users: pgTable('users2', { id: integer('id').primaryKey(), }, () => ({ rls: pgPolicy('newName', { as: 'permissive' }), })), }; const { statements, sqlStatements } = await diffTestSchemas(schema1, schema2, [ 'public.users->public.users2', 'public.users2.test->public.users2.newName', ]); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE "users" RENAME TO "users2";', 'ALTER POLICY "test" ON "users2" RENAME TO "newName";', ]); expect(statements).toStrictEqual([ { fromSchema: '', tableNameFrom: 'users', tableNameTo: 'users2', toSchema: '', type: 'rename_table', }, { newName: 'newName', oldName: 'test', schema: '', tableName: 'users2', type: 'rename_policy', }, ]); }); test('create table with a policy', async (t) => { const schema1 = {}; const schema2 = { users: pgTable('users2', { id: integer('id').primaryKey(), }, () => ({ rls: pgPolicy('test', { as: 'permissive' }), })), }; const { statements, sqlStatements } = await diffTestSchemas(schema1, schema2, []); expect(sqlStatements).toStrictEqual([ 'CREATE TABLE "users2" (\n\t"id" integer PRIMARY KEY NOT NULL\n);\n', 'ALTER TABLE "users2" ENABLE ROW LEVEL SECURITY;', 'CREATE POLICY "test" ON "users2" AS PERMISSIVE FOR ALL TO public;', ]); expect(statements).toStrictEqual([ { columns: [ { name: 'id', notNull: true, primaryKey: true, type: 'integer', }, ], compositePKs: [], checkConstraints: [], compositePkName: '', policies: [ 'test--PERMISSIVE--ALL--public--undefined--undefined--undefined', ], schema: '', tableName: 'users2', isRLSEnabled: false, type: 'create_table', uniqueConstraints: [], }, { data: { as: 'PERMISSIVE', for: 'ALL', name: 'test', to: [ 'public', ], on: undefined, using: undefined, withCheck: undefined, }, schema: '', tableName: 'users2', type: 'create_policy', }, ]); }); test('drop table with a policy', async (t) => { const schema1 = { users: pgTable('users2', { id: integer('id').primaryKey(), }, () => ({ rls: pgPolicy('test', { as: 'permissive' }), })), }; const schema2 = {}; const { statements, sqlStatements } = await diffTestSchemas(schema1, schema2, []); expect(sqlStatements).toStrictEqual([ 'DROP POLICY "test" ON "users2" CASCADE;', 'DROP TABLE "users2" CASCADE;', ]); expect(statements).toStrictEqual([ { policies: [ 'test--PERMISSIVE--ALL--public--undefined--undefined--undefined', ], schema: '', tableName: 'users2', type: 'drop_table', }, ]); }); test('add policy with multiple "to" roles', async (t) => { const schema1 = { users: pgTable('users', { id: integer('id').primaryKey(), }), }; const role = pgRole('manager').existing(); const schema2 = { role, users: pgTable('users', { id: integer('id').primaryKey(), }, () => ({ rls: pgPolicy('test', { to: ['current_role', role] }), })), }; const { statements, sqlStatements } = await diffTestSchemas(schema1, schema2, []); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE "users" ENABLE ROW LEVEL SECURITY;', 'CREATE POLICY "test" ON "users" AS PERMISSIVE FOR ALL TO current_role, "manager";', ]); expect(statements).toStrictEqual([ { schema: '', tableName: 'users', type: 'enable_rls', }, { data: { as: 'PERMISSIVE', for: 'ALL', name: 'test', on: undefined, to: ['current_role', 'manager'], using: undefined, withCheck: undefined, }, schema: '', tableName: 'users', type: 'create_policy', }, ]); }); test('create table with rls enabled', async (t) => { const schema1 = {}; const schema2 = { users: pgTable('users', { id: integer('id').primaryKey(), }).enableRLS(), }; const { statements, sqlStatements } = await diffTestSchemas(schema1, schema2, []); expect(sqlStatements).toStrictEqual([ `CREATE TABLE "users" (\n\t"id" integer PRIMARY KEY NOT NULL\n); `, 'ALTER TABLE "users" ENABLE ROW LEVEL SECURITY;', ]); }); test('enable rls force', async (t) => { const schema1 = { users: pgTable('users', { id: integer('id').primaryKey(), }), }; const schema2 = { users: pgTable('users', { id: integer('id').primaryKey(), }).enableRLS(), }; const { statements, sqlStatements } = await diffTestSchemas(schema1, schema2, []); expect(sqlStatements).toStrictEqual(['ALTER TABLE "users" ENABLE ROW LEVEL SECURITY;']); }); test('disable rls force', async (t) => { const schema1 = { users: pgTable('users', { id: integer('id').primaryKey(), }).enableRLS(), }; const schema2 = { users: pgTable('users', { id: integer('id').primaryKey(), }), }; const { statements, sqlStatements } = await diffTestSchemas(schema1, schema2, []); expect(sqlStatements).toStrictEqual(['ALTER TABLE "users" DISABLE ROW LEVEL SECURITY;']); }); test('drop policy with enabled rls', async (t) => { const schema1 = { users: pgTable('users', { id: integer('id').primaryKey(), }, () => ({ rls: pgPolicy('test', { to: ['current_role', role] }), })).enableRLS(), }; const role = pgRole('manager').existing(); const schema2 = { role, users: pgTable('users', { id: integer('id').primaryKey(), }).enableRLS(), }; const { statements, sqlStatements } = await diffTestSchemas(schema1, schema2, []); expect(sqlStatements).toStrictEqual([ 'DROP POLICY "test" ON "users" CASCADE;', ]); }); test('add policy with enabled rls', async (t) => { const schema1 = { users: pgTable('users', { id: integer('id').primaryKey(), }).enableRLS(), }; const role = pgRole('manager').existing(); const schema2 = { role, users: pgTable('users', { id: integer('id').primaryKey(), }, () => ({ rls: pgPolicy('test', { to: ['current_role', role] }), })).enableRLS(), }; const { statements, sqlStatements } = await diffTestSchemas(schema1, schema2, []); expect(sqlStatements).toStrictEqual([ 'CREATE POLICY "test" ON "users" AS PERMISSIVE FOR ALL TO current_role, "manager";', ]); }); test('add policy + link table', async (t) => { const schema1 = { users: pgTable('users', { id: integer('id').primaryKey(), }), }; const users = pgTable('users', { id: integer('id').primaryKey(), }); const schema2 = { users, rls: pgPolicy('test', { as: 'permissive' }).link(users), }; const { statements, sqlStatements } = await diffTestSchemas(schema1, schema2, []); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE "users" ENABLE ROW LEVEL SECURITY;', 'CREATE POLICY "test" ON "users" AS PERMISSIVE FOR ALL TO public;', ]); expect(statements).toStrictEqual([ { schema: '', tableName: 'users', type: 'enable_rls', }, { data: { as: 'PERMISSIVE', for: 'ALL', name: 'test', to: ['public'], on: undefined, using: undefined, withCheck: undefined, }, schema: '', tableName: 'users', type: 'create_policy', }, ]); }); test('link table', async (t) => { const schema1 = { users: pgTable('users', { id: integer('id').primaryKey(), }), rls: pgPolicy('test', { as: 'permissive' }), }; const users = pgTable('users', { id: integer('id').primaryKey(), }); const schema2 = { users, rls: pgPolicy('test', { as: 'permissive' }).link(users), }; const { statements, sqlStatements } = await diffTestSchemas(schema1, schema2, []); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE "users" ENABLE ROW LEVEL SECURITY;', 'CREATE POLICY "test" ON "users" AS PERMISSIVE FOR ALL TO public;', ]); expect(statements).toStrictEqual([ { schema: '', tableName: 'users', type: 'enable_rls', }, { data: { as: 'PERMISSIVE', for: 'ALL', name: 'test', to: ['public'], on: undefined, using: undefined, withCheck: undefined, }, schema: '', tableName: 'users', type: 'create_policy', }, ]); }); test('unlink table', async (t) => { const users = pgTable('users', { id: integer('id').primaryKey(), }); const schema1 = { users, rls: pgPolicy('test', { as: 'permissive' }).link(users), }; const schema2 = { users, rls: pgPolicy('test', { as: 'permissive' }), }; const { statements, sqlStatements } = await diffTestSchemas(schema1, schema2, []); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE "users" DISABLE ROW LEVEL SECURITY;', 'DROP POLICY "test" ON "users" CASCADE;', ]); expect(statements).toStrictEqual([ { schema: '', tableName: 'users', type: 'disable_rls', }, { data: { as: 'PERMISSIVE', for: 'ALL', name: 'test', to: ['public'], on: undefined, using: undefined, withCheck: undefined, }, schema: '', tableName: 'users', type: 'drop_policy', }, ]); }); test('drop policy with link', async (t) => { const users = pgTable('users', { id: integer('id').primaryKey(), }); const schema1 = { users, rls: pgPolicy('test', { as: 'permissive' }).link(users), }; const schema2 = { users, }; const { statements, sqlStatements } = await diffTestSchemas(schema1, schema2, []); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE "users" DISABLE ROW LEVEL SECURITY;', 'DROP POLICY "test" ON "users" CASCADE;', ]); expect(statements).toStrictEqual([ { schema: '', tableName: 'users', type: 'disable_rls', }, { data: { as: 'PERMISSIVE', for: 'ALL', name: 'test', to: ['public'], on: undefined, using: undefined, withCheck: undefined, }, schema: '', tableName: 'users', type: 'drop_policy', }, ]); }); test('add policy in table and with link table', async (t) => { const schema1 = { users: pgTable('users', { id: integer('id').primaryKey(), }), }; const users = pgTable('users', { id: integer('id').primaryKey(), }, () => [ pgPolicy('test1', { to: 'current_user' }), ]); const schema2 = { users, rls: pgPolicy('test', { as: 'permissive' }).link(users), }; const { statements, sqlStatements } = await diffTestSchemas(schema1, schema2, []); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE "users" ENABLE ROW LEVEL SECURITY;', 'CREATE POLICY "test1" ON "users" AS PERMISSIVE FOR ALL TO current_user;', 'CREATE POLICY "test" ON "users" AS PERMISSIVE FOR ALL TO public;', ]); expect(statements).toStrictEqual([ { schema: '', tableName: 'users', type: 'enable_rls', }, { data: { as: 'PERMISSIVE', for: 'ALL', name: 'test1', to: ['current_user'], on: undefined, using: undefined, withCheck: undefined, }, schema: '', tableName: 'users', type: 'create_policy', }, { data: { as: 'PERMISSIVE', for: 'ALL', name: 'test', to: ['public'], using: undefined, on: undefined, withCheck: undefined, }, schema: '', tableName: 'users', type: 'create_policy', }, ]); }); test('link non-schema table', async (t) => { const users = pgTable('users', { id: integer('id').primaryKey(), }); const schema1 = {}; const schema2 = { rls: pgPolicy('test', { as: 'permissive' }).link(users), }; const { statements, sqlStatements } = await diffTestSchemas(schema1, schema2, []); expect(sqlStatements).toStrictEqual([ 'CREATE POLICY "test" ON "public"."users" AS PERMISSIVE FOR ALL TO public;', ]); expect(statements).toStrictEqual([ { data: { as: 'PERMISSIVE', for: 'ALL', name: 'test', on: '"public"."users"', to: [ 'public', ], using: undefined, withCheck: undefined, }, tableName: '"public"."users"', type: 'create_ind_policy', }, ]); }); test('unlink non-schema table', async (t) => { const users = pgTable('users', { id: integer('id').primaryKey(), }); const schema1 = { rls: pgPolicy('test', { as: 'permissive' }).link(users), }; const schema2 = { rls: pgPolicy('test', { as: 'permissive' }), }; const { statements, sqlStatements } = await diffTestSchemas(schema1, schema2, []); expect(sqlStatements).toStrictEqual([ 'DROP POLICY "test" ON "public"."users" CASCADE;', ]); expect(statements).toStrictEqual([ { data: { as: 'PERMISSIVE', for: 'ALL', name: 'test', on: '"public"."users"', to: [ 'public', ], using: undefined, withCheck: undefined, }, tableName: '"public"."users"', type: 'drop_ind_policy', }, ]); }); test('add policy + link non-schema table', async (t) => { const schema1 = { users: pgTable('users', { id: integer('id').primaryKey(), }), }; const cities = pgTable('cities', { id: integer('id').primaryKey(), }); const schema2 = { users: pgTable('users', { id: integer('id').primaryKey(), }, (t) => [ pgPolicy('test2'), ]), rls: pgPolicy('test', { as: 'permissive' }).link(cities), }; const { statements, sqlStatements } = await diffTestSchemas(schema1, schema2, []); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE "users" ENABLE ROW LEVEL SECURITY;', 'CREATE POLICY "test2" ON "users" AS PERMISSIVE FOR ALL TO public;', 'CREATE POLICY "test" ON "public"."cities" AS PERMISSIVE FOR ALL TO public;', ]); expect(statements).toStrictEqual([ { schema: '', tableName: 'users', type: 'enable_rls', }, { data: { as: 'PERMISSIVE', for: 'ALL', name: 'test2', on: undefined, to: [ 'public', ], using: undefined, withCheck: undefined, }, schema: '', tableName: 'users', type: 'create_policy', }, { data: { as: 'PERMISSIVE', for: 'ALL', name: 'test', on: '"public"."cities"', to: [ 'public', ], using: undefined, withCheck: undefined, }, tableName: '"public"."cities"', type: 'create_ind_policy', }, ]); }); test('add policy + link non-schema table from auth schema', async (t) => { const schema1 = { users: pgTable('users', { id: integer('id').primaryKey(), }), }; const authSchema = pgSchema('auth'); const cities = authSchema.table('cities', { id: integer('id').primaryKey(), }); const schema2 = { users: pgTable('users', { id: integer('id').primaryKey(), }, (t) => [ pgPolicy('test2'), ]), rls: pgPolicy('test', { as: 'permissive' }).link(cities), }; const { statements, sqlStatements } = await diffTestSchemas(schema1, schema2, []); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE "users" ENABLE ROW LEVEL SECURITY;', 'CREATE POLICY "test2" ON "users" AS PERMISSIVE FOR ALL TO public;', 'CREATE POLICY "test" ON "auth"."cities" AS PERMISSIVE FOR ALL TO public;', ]); expect(statements).toStrictEqual([ { schema: '', tableName: 'users', type: 'enable_rls', }, { data: { as: 'PERMISSIVE', for: 'ALL', name: 'test2', on: undefined, to: [ 'public', ], using: undefined, withCheck: undefined, }, schema: '', tableName: 'users', type: 'create_policy', }, { data: { as: 'PERMISSIVE', for: 'ALL', name: 'test', on: '"auth"."cities"', to: [ 'public', ], using: undefined, withCheck: undefined, }, tableName: '"auth"."cities"', type: 'create_ind_policy', }, ]); }); test('rename policy that is linked', async (t) => { const users = pgTable('users', { id: integer('id').primaryKey(), }); const schema1 = { rls: pgPolicy('test', { as: 'permissive' }).link(users), }; const schema2 = { rls: pgPolicy('newName', { as: 'permissive' }).link(users), }; const { statements, sqlStatements } = await diffTestSchemas(schema1, schema2, [ '"public"."users".test->"public"."users".newName', ]); expect(sqlStatements).toStrictEqual([ 'ALTER POLICY "test" ON "public"."users" RENAME TO "newName";', ]); expect(statements).toStrictEqual([ { newName: 'newName', oldName: 'test', tableKey: '"public"."users"', type: 'rename_ind_policy', }, ]); }); test('alter policy that is linked', async (t) => { const users = pgTable('users', { id: integer('id').primaryKey(), }); const schema1 = { rls: pgPolicy('test', { as: 'permissive' }).link(users), }; const schema2 = { rls: pgPolicy('test', { as: 'permissive', to: 'current_role' }).link(users), }; const { statements, sqlStatements } = await diffTestSchemas(schema1, schema2, []); expect(sqlStatements).toStrictEqual([ 'ALTER POLICY "test" ON "public"."users" TO current_role;', ]); expect(statements).toStrictEqual([ { newData: { as: 'PERMISSIVE', for: 'ALL', name: 'test', on: '"public"."users"', to: [ 'current_role', ], using: undefined, withCheck: undefined, }, oldData: { as: 'PERMISSIVE', for: 'ALL', name: 'test', on: '"public"."users"', to: [ 'public', ], using: undefined, withCheck: undefined, }, type: 'alter_ind_policy', }, ]); }); test('alter policy that is linked: withCheck', async (t) => { const users = pgTable('users', { id: integer('id').primaryKey(), }); const schema1 = { rls: pgPolicy('test', { as: 'permissive', withCheck: sql`true` }).link(users), }; const schema2 = { rls: pgPolicy('test', { as: 'permissive', withCheck: sql`false` }).link(users), }; const { statements, sqlStatements } = await diffTestSchemas(schema1, schema2, []); expect(sqlStatements).toStrictEqual([ 'ALTER POLICY "test" ON "public"."users" TO public WITH CHECK (false);', ]); expect(statements).toStrictEqual([ { newData: { as: 'PERMISSIVE', for: 'ALL', name: 'test', on: '"public"."users"', to: [ 'public', ], using: undefined, withCheck: 'false', }, oldData: { as: 'PERMISSIVE', for: 'ALL', name: 'test', on: '"public"."users"', to: [ 'public', ], using: undefined, withCheck: 'true', }, type: 'alter_ind_policy', }, ]); }); test('alter policy that is linked: using', async (t) => { const users = pgTable('users', { id: integer('id').primaryKey(), }); const schema1 = { rls: pgPolicy('test', { as: 'permissive', using: sql`true` }).link(users), }; const schema2 = { rls: pgPolicy('test', { as: 'permissive', using: sql`false` }).link(users), }; const { statements, sqlStatements } = await diffTestSchemas(schema1, schema2, []); expect(sqlStatements).toStrictEqual([ 'ALTER POLICY "test" ON "public"."users" TO public USING (false);', ]); expect(statements).toStrictEqual([ { newData: { as: 'PERMISSIVE', for: 'ALL', name: 'test', on: '"public"."users"', to: [ 'public', ], using: 'false', withCheck: undefined, }, oldData: { as: 'PERMISSIVE', for: 'ALL', name: 'test', on: '"public"."users"', to: [ 'public', ], using: 'true', withCheck: undefined, }, type: 'alter_ind_policy', }, ]); }); test('alter policy that is linked: using', async (t) => { const users = pgTable('users', { id: integer('id').primaryKey(), }); const schema1 = { rls: pgPolicy('test', { for: 'insert' }).link(users), }; const schema2 = { rls: pgPolicy('test', { for: 'delete' }).link(users), }; const { statements, sqlStatements } = await diffTestSchemas(schema1, schema2, []); expect(sqlStatements).toStrictEqual([ 'DROP POLICY "test" ON "public"."users" CASCADE;', 'CREATE POLICY "test" ON "public"."users" AS PERMISSIVE FOR DELETE TO public;', ]); expect(statements).toStrictEqual([ { data: { as: 'PERMISSIVE', for: 'INSERT', name: 'test', on: '"public"."users"', to: [ 'public', ], using: undefined, withCheck: undefined, }, tableName: '"public"."users"', type: 'drop_ind_policy', }, { data: { as: 'PERMISSIVE', for: 'DELETE', name: 'test', on: '"public"."users"', to: [ 'public', ], using: undefined, withCheck: undefined, }, tableName: '"public"."users"', type: 'create_ind_policy', }, ]); }); //// test('alter policy in the table', async (t) => { const schema1 = { users: pgTable('users', { id: integer('id').primaryKey(), }, (t) => [ pgPolicy('test', { as: 'permissive' }), ]), }; const schema2 = { users: pgTable('users', { id: integer('id').primaryKey(), }, (t) => [ pgPolicy('test', { as: 'permissive', to: 'current_role' }), ]), }; const { statements, sqlStatements } = await diffTestSchemas(schema1, schema2, []); expect(sqlStatements).toStrictEqual([ 'ALTER POLICY "test" ON "users" TO current_role;', ]); expect(statements).toStrictEqual([ { newData: 'test--PERMISSIVE--ALL--current_role--undefined--undefined--undefined', oldData: 'test--PERMISSIVE--ALL--public--undefined--undefined--undefined', schema: '', tableName: 'users', type: 'alter_policy', }, ]); }); test('alter policy in the table: withCheck', async (t) => { const users = pgTable('users', { id: integer('id').primaryKey(), }); const schema1 = { users: pgTable('users', { id: integer('id').primaryKey(), }, (t) => [ pgPolicy('test', { as: 'permissive', withCheck: sql`true` }), ]), }; const schema2 = { users: pgTable('users', { id: integer('id').primaryKey(), }, (t) => [ pgPolicy('test', { as: 'permissive', withCheck: sql`false` }), ]), }; const { statements, sqlStatements } = await diffTestSchemas(schema1, schema2, []); expect(sqlStatements).toStrictEqual([ 'ALTER POLICY "test" ON "users" TO public WITH CHECK (false);', ]); expect(statements).toStrictEqual([ { newData: 'test--PERMISSIVE--ALL--public--undefined--false--undefined', oldData: 'test--PERMISSIVE--ALL--public--undefined--true--undefined', schema: '', tableName: 'users', type: 'alter_policy', }, ]); }); test('alter policy in the table: using', async (t) => { const users = pgTable('users', { id: integer('id').primaryKey(), }); const schema1 = { users: pgTable('users', { id: integer('id').primaryKey(), }, (t) => [ pgPolicy('test', { as: 'permissive', using: sql`true` }), ]), }; const schema2 = { users: pgTable('users', { id: integer('id').primaryKey(), }, (t) => [ pgPolicy('test', { as: 'permissive', using: sql`false` }), ]), }; const { statements, sqlStatements } = await diffTestSchemas(schema1, schema2, []); expect(sqlStatements).toStrictEqual([ 'ALTER POLICY "test" ON "users" TO public USING (false);', ]); expect(statements).toStrictEqual([ { newData: 'test--PERMISSIVE--ALL--public--false--undefined--undefined', oldData: 'test--PERMISSIVE--ALL--public--true--undefined--undefined', schema: '', tableName: 'users', type: 'alter_policy', }, ]); }); test('alter policy in the table: using', async (t) => { const users = pgTable('users', { id: integer('id').primaryKey(), }); const schema1 = { users: pgTable('users', { id: integer('id').primaryKey(), }, (t) => [ pgPolicy('test', { for: 'insert' }), ]), }; const schema2 = { users: pgTable('users', { id: integer('id').primaryKey(), }, (t) => [ pgPolicy('test', { for: 'delete' }), ]), }; const { statements, sqlStatements } = await diffTestSchemas(schema1, schema2, []); expect(sqlStatements).toStrictEqual([ 'DROP POLICY "test" ON "users" CASCADE;', 'CREATE POLICY "test" ON "users" AS PERMISSIVE FOR DELETE TO public;', ]); expect(statements).toStrictEqual([ { data: { as: 'PERMISSIVE', for: 'INSERT', name: 'test', on: undefined, to: [ 'public', ], using: undefined, withCheck: undefined, }, schema: '', tableName: 'users', type: 'drop_policy', }, { data: { as: 'PERMISSIVE', for: 'DELETE', name: 'test', on: undefined, to: [ 'public', ], using: undefined, withCheck: undefined, }, schema: '', tableName: 'users', type: 'create_policy', }, ]); }); ================================================ FILE: drizzle-kit/tests/rls/pg-role.test.ts ================================================ import { pgRole } from 'drizzle-orm/pg-core'; import { diffTestSchemas } from 'tests/schemaDiffer'; import { expect, test } from 'vitest'; test('create role', async (t) => { const schema1 = {}; const schema2 = { manager: pgRole('manager'), }; const { statements, sqlStatements } = await diffTestSchemas(schema1, schema2, []); expect(sqlStatements).toStrictEqual(['CREATE ROLE "manager";']); expect(statements).toStrictEqual([ { name: 'manager', type: 'create_role', values: { createDb: false, createRole: false, inherit: true, }, }, ]); }); test('create role with properties', async (t) => { const schema1 = {}; const schema2 = { manager: pgRole('manager', { createDb: true, inherit: false, createRole: true }), }; const { statements, sqlStatements } = await diffTestSchemas(schema1, schema2, []); expect(sqlStatements).toStrictEqual(['CREATE ROLE "manager" WITH CREATEDB CREATEROLE NOINHERIT;']); expect(statements).toStrictEqual([ { name: 'manager', type: 'create_role', values: { createDb: true, createRole: true, inherit: false, }, }, ]); }); test('create role with some properties', async (t) => { const schema1 = {}; const schema2 = { manager: pgRole('manager', { createDb: true, inherit: false }), }; const { statements, sqlStatements } = await diffTestSchemas(schema1, schema2, []); expect(sqlStatements).toStrictEqual(['CREATE ROLE "manager" WITH CREATEDB NOINHERIT;']); expect(statements).toStrictEqual([ { name: 'manager', type: 'create_role', values: { createDb: true, createRole: false, inherit: false, }, }, ]); }); test('drop role', async (t) => { const schema1 = { manager: pgRole('manager') }; const schema2 = {}; const { statements, sqlStatements } = await diffTestSchemas(schema1, schema2, []); expect(sqlStatements).toStrictEqual(['DROP ROLE "manager";']); expect(statements).toStrictEqual([ { name: 'manager', type: 'drop_role', }, ]); }); test('create and drop role', async (t) => { const schema1 = { manager: pgRole('manager'), }; const schema2 = { admin: pgRole('admin'), }; const { statements, sqlStatements } = await diffTestSchemas(schema1, schema2, []); expect(sqlStatements).toStrictEqual(['DROP ROLE "manager";', 'CREATE ROLE "admin";']); expect(statements).toStrictEqual([ { name: 'manager', type: 'drop_role', }, { name: 'admin', type: 'create_role', values: { createDb: false, createRole: false, inherit: true, }, }, ]); }); test('rename role', async (t) => { const schema1 = { manager: pgRole('manager'), }; const schema2 = { admin: pgRole('admin'), }; const { statements, sqlStatements } = await diffTestSchemas(schema1, schema2, ['manager->admin']); expect(sqlStatements).toStrictEqual(['ALTER ROLE "manager" RENAME TO "admin";']); expect(statements).toStrictEqual([ { nameFrom: 'manager', nameTo: 'admin', type: 'rename_role' }, ]); }); test('alter all role field', async (t) => { const schema1 = { manager: pgRole('manager'), }; const schema2 = { manager: pgRole('manager', { createDb: true, createRole: true, inherit: false }), }; const { statements, sqlStatements } = await diffTestSchemas(schema1, schema2, []); expect(sqlStatements).toStrictEqual(['ALTER ROLE "manager" WITH CREATEDB CREATEROLE NOINHERIT;']); expect(statements).toStrictEqual([ { name: 'manager', type: 'alter_role', values: { createDb: true, createRole: true, inherit: false, }, }, ]); }); test('alter createdb in role', async (t) => { const schema1 = { manager: pgRole('manager'), }; const schema2 = { manager: pgRole('manager', { createDb: true }), }; const { statements, sqlStatements } = await diffTestSchemas(schema1, schema2, []); expect(sqlStatements).toStrictEqual(['ALTER ROLE "manager" WITH CREATEDB NOCREATEROLE INHERIT;']); expect(statements).toStrictEqual([ { name: 'manager', type: 'alter_role', values: { createDb: true, createRole: false, inherit: true, }, }, ]); }); test('alter createrole in role', async (t) => { const schema1 = { manager: pgRole('manager'), }; const schema2 = { manager: pgRole('manager', { createRole: true }), }; const { statements, sqlStatements } = await diffTestSchemas(schema1, schema2, []); expect(sqlStatements).toStrictEqual(['ALTER ROLE "manager" WITH NOCREATEDB CREATEROLE INHERIT;']); expect(statements).toStrictEqual([ { name: 'manager', type: 'alter_role', values: { createDb: false, createRole: true, inherit: true, }, }, ]); }); test('alter inherit in role', async (t) => { const schema1 = { manager: pgRole('manager'), }; const schema2 = { manager: pgRole('manager', { inherit: false }), }; const { statements, sqlStatements } = await diffTestSchemas(schema1, schema2, []); expect(sqlStatements).toStrictEqual(['ALTER ROLE "manager" WITH NOCREATEDB NOCREATEROLE NOINHERIT;']); expect(statements).toStrictEqual([ { name: 'manager', type: 'alter_role', values: { createDb: false, createRole: false, inherit: false, }, }, ]); }); ================================================ FILE: drizzle-kit/tests/schemaDiffer.ts ================================================ import { PGlite } from '@electric-sql/pglite'; import { Client } from '@libsql/client/.'; import { Database } from 'better-sqlite3'; import { is } from 'drizzle-orm'; import { MySqlSchema, MySqlTable, MySqlView } from 'drizzle-orm/mysql-core'; import { getMaterializedViewConfig, isPgEnum, isPgMaterializedView, isPgSequence, isPgView, PgEnum, PgEnumObject, PgMaterializedView, PgPolicy, PgRole, PgSchema, PgSequence, PgTable, PgView, } from 'drizzle-orm/pg-core'; import { SingleStoreSchema, SingleStoreTable } from 'drizzle-orm/singlestore-core'; import { SQLiteTable, SQLiteView } from 'drizzle-orm/sqlite-core'; import * as fs from 'fs'; import { type Client as GelClient } from 'gel'; import { Connection } from 'mysql2/promise'; import { libSqlLogSuggestionsAndReturn } from 'src/cli/commands/libSqlPushUtils'; import { columnsResolver, enumsResolver, indPolicyResolver, mySqlViewsResolver, Named, policyResolver, roleResolver, schemasResolver, sequencesResolver, sqliteViewsResolver, tablesResolver, viewsResolver, } from 'src/cli/commands/migrate'; import { pgSuggestions } from 'src/cli/commands/pgPushUtils'; import { logSuggestionsAndReturn as singleStoreLogSuggestionsAndReturn } from 'src/cli/commands/singlestorePushUtils'; import { logSuggestionsAndReturn } from 'src/cli/commands/sqlitePushUtils'; import { Entities } from 'src/cli/validations/cli'; import { CasingType } from 'src/cli/validations/common'; import { schemaToTypeScript as schemaToTypeScriptGel } from 'src/introspect-gel'; import { schemaToTypeScript as schemaToTypeScriptMySQL } from 'src/introspect-mysql'; import { schemaToTypeScript } from 'src/introspect-pg'; import { schemaToTypeScript as schemaToTypeScriptSingleStore } from 'src/introspect-singlestore'; import { schemaToTypeScript as schemaToTypeScriptSQLite } from 'src/introspect-sqlite'; import { fromDatabase as fromGelDatabase } from 'src/serializer/gelSerializer'; import { prepareFromMySqlImports } from 'src/serializer/mysqlImports'; import { mysqlSchema, squashMysqlScheme, ViewSquashed } from 'src/serializer/mysqlSchema'; import { fromDatabase as fromMySqlDatabase, generateMySqlSnapshot } from 'src/serializer/mysqlSerializer'; import { prepareFromPgImports } from 'src/serializer/pgImports'; import { pgSchema, Policy, Role, squashPgScheme, View } from 'src/serializer/pgSchema'; import { fromDatabase, generatePgSnapshot } from 'src/serializer/pgSerializer'; import { prepareFromSingleStoreImports } from 'src/serializer/singlestoreImports'; import { singlestoreSchema, squashSingleStoreScheme } from 'src/serializer/singlestoreSchema'; import { fromDatabase as fromSingleStoreDatabase, generateSingleStoreSnapshot, } from 'src/serializer/singlestoreSerializer'; import { prepareFromSqliteImports } from 'src/serializer/sqliteImports'; import { sqliteSchema, squashSqliteScheme, View as SqliteView } from 'src/serializer/sqliteSchema'; import { fromDatabase as fromSqliteDatabase, generateSqliteSnapshot } from 'src/serializer/sqliteSerializer'; import { applyLibSQLSnapshotsDiff, applyMysqlSnapshotsDiff, applyPgSnapshotsDiff, applySingleStoreSnapshotsDiff, applySqliteSnapshotsDiff, Column, ColumnsResolverInput, ColumnsResolverOutput, Enum, PolicyResolverInput, PolicyResolverOutput, ResolverInput, ResolverOutput, ResolverOutputWithMoved, RolesResolverInput, RolesResolverOutput, Sequence, Table, TablePolicyResolverInput, TablePolicyResolverOutput, } from 'src/snapshotsDiffer'; export type PostgresSchema = Record< string, | PgTable | PgEnum | PgEnumObject | PgSchema | PgSequence | PgView | PgMaterializedView | PgRole | PgPolicy >; export type MysqlSchema = Record< string, MySqlTable | MySqlSchema | MySqlView >; export type SqliteSchema = Record | SQLiteView>; export type SinglestoreSchema = Record< string, SingleStoreTable | SingleStoreSchema /* | SingleStoreView */ >; export const testSchemasResolver = (renames: Set) => async (input: ResolverInput): Promise> => { try { if ( input.created.length === 0 || input.deleted.length === 0 || renames.size === 0 ) { return { created: input.created, renamed: [], deleted: input.deleted, }; } let createdSchemas = [...input.created]; let deletedSchemas = [...input.deleted]; const result: { created: Named[]; renamed: { from: Named; to: Named }[]; deleted: Named[]; } = { created: [], renamed: [], deleted: [] }; for (let rename of renames) { const [from, to] = rename.split('->'); const idxFrom = deletedSchemas.findIndex((it) => { return it.name === from; }); if (idxFrom >= 0) { const idxTo = createdSchemas.findIndex((it) => { return it.name === to; }); result.renamed.push({ from: deletedSchemas[idxFrom], to: createdSchemas[idxTo], }); delete createdSchemas[idxTo]; delete deletedSchemas[idxFrom]; createdSchemas = createdSchemas.filter(Boolean); deletedSchemas = deletedSchemas.filter(Boolean); } } result.created = createdSchemas; result.deleted = deletedSchemas; return result; } catch (e) { console.error(e); throw e; } }; export const testSequencesResolver = (renames: Set) => async ( input: ResolverInput, ): Promise> => { try { if ( input.created.length === 0 || input.deleted.length === 0 || renames.size === 0 ) { return { created: input.created, moved: [], renamed: [], deleted: input.deleted, }; } let createdSequences = [...input.created]; let deletedSequences = [...input.deleted]; const result: { created: Sequence[]; moved: { name: string; schemaFrom: string; schemaTo: string }[]; renamed: { from: Sequence; to: Sequence }[]; deleted: Sequence[]; } = { created: [], renamed: [], deleted: [], moved: [] }; for (let rename of renames) { const [from, to] = rename.split('->'); const idxFrom = deletedSequences.findIndex((it) => { return `${it.schema || 'public'}.${it.name}` === from; }); if (idxFrom >= 0) { const idxTo = createdSequences.findIndex((it) => { return `${it.schema || 'public'}.${it.name}` === to; }); const tableFrom = deletedSequences[idxFrom]; const tableTo = createdSequences[idxFrom]; if (tableFrom.schema !== tableTo.schema) { result.moved.push({ name: tableFrom.name, schemaFrom: tableFrom.schema, schemaTo: tableTo.schema, }); } if (tableFrom.name !== tableTo.name) { result.renamed.push({ from: deletedSequences[idxFrom], to: createdSequences[idxTo], }); } delete createdSequences[idxTo]; delete deletedSequences[idxFrom]; createdSequences = createdSequences.filter(Boolean); deletedSequences = deletedSequences.filter(Boolean); } } result.created = createdSequences; result.deleted = deletedSequences; return result; } catch (e) { console.error(e); throw e; } }; export const testEnumsResolver = (renames: Set) => async ( input: ResolverInput, ): Promise> => { try { if ( input.created.length === 0 || input.deleted.length === 0 || renames.size === 0 ) { return { created: input.created, moved: [], renamed: [], deleted: input.deleted, }; } let createdEnums = [...input.created]; let deletedEnums = [...input.deleted]; const result: { created: Enum[]; moved: { name: string; schemaFrom: string; schemaTo: string }[]; renamed: { from: Enum; to: Enum }[]; deleted: Enum[]; } = { created: [], renamed: [], deleted: [], moved: [] }; for (let rename of renames) { const [from, to] = rename.split('->'); const idxFrom = deletedEnums.findIndex((it) => { return `${it.schema || 'public'}.${it.name}` === from; }); if (idxFrom >= 0) { const idxTo = createdEnums.findIndex((it) => { return `${it.schema || 'public'}.${it.name}` === to; }); const tableFrom = deletedEnums[idxFrom]; const tableTo = createdEnums[idxFrom]; if (tableFrom.schema !== tableTo.schema) { result.moved.push({ name: tableFrom.name, schemaFrom: tableFrom.schema, schemaTo: tableTo.schema, }); } if (tableFrom.name !== tableTo.name) { result.renamed.push({ from: deletedEnums[idxFrom], to: createdEnums[idxTo], }); } delete createdEnums[idxTo]; delete deletedEnums[idxFrom]; createdEnums = createdEnums.filter(Boolean); deletedEnums = deletedEnums.filter(Boolean); } } result.created = createdEnums; result.deleted = deletedEnums; return result; } catch (e) { console.error(e); throw e; } }; export const testTablesResolver = (renames: Set) => async ( input: ResolverInput
, ): Promise> => { try { if ( input.created.length === 0 || input.deleted.length === 0 || renames.size === 0 ) { return { created: input.created, moved: [], renamed: [], deleted: input.deleted, }; } let createdTables = [...input.created]; let deletedTables = [...input.deleted]; const result: { created: Table[]; moved: { name: string; schemaFrom: string; schemaTo: string }[]; renamed: { from: Table; to: Table }[]; deleted: Table[]; } = { created: [], renamed: [], deleted: [], moved: [] }; for (let rename of renames) { const [from, to] = rename.split('->'); const idxFrom = deletedTables.findIndex((it) => { return `${it.schema || 'public'}.${it.name}` === from; }); if (idxFrom >= 0) { const idxTo = createdTables.findIndex((it) => { return `${it.schema || 'public'}.${it.name}` === to; }); const tableFrom = deletedTables[idxFrom]; const tableTo = createdTables[idxFrom]; if (tableFrom.schema !== tableTo.schema) { result.moved.push({ name: tableFrom.name, schemaFrom: tableFrom.schema, schemaTo: tableTo.schema, }); } if (tableFrom.name !== tableTo.name) { result.renamed.push({ from: deletedTables[idxFrom], to: createdTables[idxTo], }); } delete createdTables[idxTo]; delete deletedTables[idxFrom]; createdTables = createdTables.filter(Boolean); deletedTables = deletedTables.filter(Boolean); } } result.created = createdTables; result.deleted = deletedTables; return result; } catch (e) { console.error(e); throw e; } }; export const testColumnsResolver = (renames: Set) => async ( input: ColumnsResolverInput, ): Promise> => { try { if ( input.created.length === 0 || input.deleted.length === 0 || renames.size === 0 ) { return { tableName: input.tableName, schema: input.schema, created: input.created, renamed: [], deleted: input.deleted, }; } let createdColumns = [...input.created]; let deletedColumns = [...input.deleted]; const renamed: { from: Column; to: Column }[] = []; const schema = input.schema || 'public'; for (let rename of renames) { const [from, to] = rename.split('->'); const idxFrom = deletedColumns.findIndex((it) => { return `${schema}.${input.tableName}.${it.name}` === from; }); if (idxFrom >= 0) { const idxTo = createdColumns.findIndex((it) => { return `${schema}.${input.tableName}.${it.name}` === to; }); renamed.push({ from: deletedColumns[idxFrom], to: createdColumns[idxTo], }); delete createdColumns[idxTo]; delete deletedColumns[idxFrom]; createdColumns = createdColumns.filter(Boolean); deletedColumns = deletedColumns.filter(Boolean); } } return { tableName: input.tableName, schema: input.schema, created: createdColumns, deleted: deletedColumns, renamed, }; } catch (e) { console.error(e); throw e; } }; export const testPolicyResolver = (renames: Set) => async ( input: TablePolicyResolverInput, ): Promise> => { try { if ( input.created.length === 0 || input.deleted.length === 0 || renames.size === 0 ) { return { tableName: input.tableName, schema: input.schema, created: input.created, renamed: [], deleted: input.deleted, }; } let createdPolicies = [...input.created]; let deletedPolicies = [...input.deleted]; const renamed: { from: Policy; to: Policy }[] = []; const schema = input.schema || 'public'; for (let rename of renames) { const [from, to] = rename.split('->'); const idxFrom = deletedPolicies.findIndex((it) => { return `${schema}.${input.tableName}.${it.name}` === from; }); if (idxFrom >= 0) { const idxTo = createdPolicies.findIndex((it) => { return `${schema}.${input.tableName}.${it.name}` === to; }); renamed.push({ from: deletedPolicies[idxFrom], to: createdPolicies[idxTo], }); delete createdPolicies[idxTo]; delete deletedPolicies[idxFrom]; createdPolicies = createdPolicies.filter(Boolean); deletedPolicies = deletedPolicies.filter(Boolean); } } return { tableName: input.tableName, schema: input.schema, created: createdPolicies, deleted: deletedPolicies, renamed, }; } catch (e) { console.error(e); throw e; } }; export const testIndPolicyResolver = (renames: Set) => async ( input: PolicyResolverInput, ): Promise> => { try { if ( input.created.length === 0 || input.deleted.length === 0 || renames.size === 0 ) { return { created: input.created, renamed: [], deleted: input.deleted, }; } let createdPolicies = [...input.created]; let deletedPolicies = [...input.deleted]; const renamed: { from: Policy; to: Policy }[] = []; for (let rename of renames) { const [from, to] = rename.split('->'); const idxFrom = deletedPolicies.findIndex((it) => { return `${it.on}.${it.name}` === from; }); if (idxFrom >= 0) { const idxTo = createdPolicies.findIndex((it) => { return `${it.on}.${it.name}` === to; }); renamed.push({ from: deletedPolicies[idxFrom], to: createdPolicies[idxTo], }); delete createdPolicies[idxTo]; delete deletedPolicies[idxFrom]; createdPolicies = createdPolicies.filter(Boolean); deletedPolicies = deletedPolicies.filter(Boolean); } } return { created: createdPolicies, deleted: deletedPolicies, renamed, }; } catch (e) { console.error(e); throw e; } }; export const testRolesResolver = (renames: Set) => async ( input: RolesResolverInput, ): Promise> => { try { if ( input.created.length === 0 || input.deleted.length === 0 || renames.size === 0 ) { return { created: input.created, renamed: [], deleted: input.deleted, }; } let createdPolicies = [...input.created]; let deletedPolicies = [...input.deleted]; const renamed: { from: Policy; to: Policy }[] = []; for (let rename of renames) { const [from, to] = rename.split('->'); const idxFrom = deletedPolicies.findIndex((it) => { return `${it.name}` === from; }); if (idxFrom >= 0) { const idxTo = createdPolicies.findIndex((it) => { return `${it.name}` === to; }); renamed.push({ from: deletedPolicies[idxFrom], to: createdPolicies[idxTo], }); delete createdPolicies[idxTo]; delete deletedPolicies[idxFrom]; createdPolicies = createdPolicies.filter(Boolean); deletedPolicies = deletedPolicies.filter(Boolean); } } return { created: createdPolicies, deleted: deletedPolicies, renamed, }; } catch (e) { console.error(e); throw e; } }; export const testViewsResolver = (renames: Set) => async ( input: ResolverInput, ): Promise> => { try { if ( input.created.length === 0 || input.deleted.length === 0 || renames.size === 0 ) { return { created: input.created, moved: [], renamed: [], deleted: input.deleted, }; } let createdViews = [...input.created]; let deletedViews = [...input.deleted]; const result: { created: View[]; moved: { name: string; schemaFrom: string; schemaTo: string }[]; renamed: { from: View; to: View }[]; deleted: View[]; } = { created: [], renamed: [], deleted: [], moved: [] }; for (let rename of renames) { const [from, to] = rename.split('->'); const idxFrom = deletedViews.findIndex((it) => { return `${it.schema || 'public'}.${it.name}` === from; }); if (idxFrom >= 0) { const idxTo = createdViews.findIndex((it) => { return `${it.schema || 'public'}.${it.name}` === to; }); const viewFrom = deletedViews[idxFrom]; const viewTo = createdViews[idxFrom]; if (viewFrom.schema !== viewTo.schema) { result.moved.push({ name: viewFrom.name, schemaFrom: viewFrom.schema, schemaTo: viewTo.schema, }); } if (viewFrom.name !== viewTo.name) { result.renamed.push({ from: deletedViews[idxFrom], to: createdViews[idxTo], }); } delete createdViews[idxTo]; delete deletedViews[idxFrom]; createdViews = createdViews.filter(Boolean); deletedViews = deletedViews.filter(Boolean); } } result.created = createdViews; result.deleted = deletedViews; return result; } catch (e) { console.error(e); throw e; } }; export const testViewsResolverMySql = (renames: Set) => async ( input: ResolverInput, ): Promise> => { try { if ( input.created.length === 0 || input.deleted.length === 0 || renames.size === 0 ) { return { created: input.created, moved: [], renamed: [], deleted: input.deleted, }; } let createdViews = [...input.created]; let deletedViews = [...input.deleted]; const result: { created: ViewSquashed[]; moved: { name: string; schemaFrom: string; schemaTo: string }[]; renamed: { from: ViewSquashed; to: ViewSquashed }[]; deleted: ViewSquashed[]; } = { created: [], renamed: [], deleted: [], moved: [] }; for (let rename of renames) { const [from, to] = rename.split('->'); const idxFrom = deletedViews.findIndex((it) => { return `${it.schema || 'public'}.${it.name}` === from; }); if (idxFrom >= 0) { const idxTo = createdViews.findIndex((it) => { return `${it.schema || 'public'}.${it.name}` === to; }); const viewFrom = deletedViews[idxFrom]; const viewTo = createdViews[idxFrom]; if (viewFrom.schema !== viewTo.schema) { result.moved.push({ name: viewFrom.name, schemaFrom: viewFrom.schema, schemaTo: viewTo.schema, }); } if (viewFrom.name !== viewTo.name) { result.renamed.push({ from: deletedViews[idxFrom], to: createdViews[idxTo], }); } delete createdViews[idxTo]; delete deletedViews[idxFrom]; createdViews = createdViews.filter(Boolean); deletedViews = deletedViews.filter(Boolean); } } result.created = createdViews; result.deleted = deletedViews; return result; } catch (e) { console.error(e); throw e; } }; export const testViewsResolverSingleStore = (renames: Set) => async ( input: ResolverInput, ): Promise> => { try { if ( input.created.length === 0 || input.deleted.length === 0 || renames.size === 0 ) { return { created: input.created, moved: [], renamed: [], deleted: input.deleted, }; } let createdViews = [...input.created]; let deletedViews = [...input.deleted]; const result: { created: ViewSquashed[]; moved: { name: string; schemaFrom: string; schemaTo: string }[]; renamed: { from: ViewSquashed; to: ViewSquashed }[]; deleted: ViewSquashed[]; } = { created: [], renamed: [], deleted: [], moved: [] }; for (let rename of renames) { const [from, to] = rename.split('->'); const idxFrom = deletedViews.findIndex((it) => { return `${it.schema || 'public'}.${it.name}` === from; }); if (idxFrom >= 0) { const idxTo = createdViews.findIndex((it) => { return `${it.schema || 'public'}.${it.name}` === to; }); const viewFrom = deletedViews[idxFrom]; const viewTo = createdViews[idxFrom]; if (viewFrom.schema !== viewTo.schema) { result.moved.push({ name: viewFrom.name, schemaFrom: viewFrom.schema, schemaTo: viewTo.schema, }); } if (viewFrom.name !== viewTo.name) { result.renamed.push({ from: deletedViews[idxFrom], to: createdViews[idxTo], }); } delete createdViews[idxTo]; delete deletedViews[idxFrom]; createdViews = createdViews.filter(Boolean); deletedViews = deletedViews.filter(Boolean); } } result.created = createdViews; result.deleted = deletedViews; return result; } catch (e) { console.error(e); throw e; } }; export const testViewsResolverSqlite = (renames: Set) => async ( input: ResolverInput, ): Promise> => { try { if ( input.created.length === 0 || input.deleted.length === 0 || renames.size === 0 ) { return { created: input.created, moved: [], renamed: [], deleted: input.deleted, }; } let createdViews = [...input.created]; let deletedViews = [...input.deleted]; const result: { created: SqliteView[]; moved: { name: string; schemaFrom: string; schemaTo: string }[]; renamed: { from: SqliteView; to: SqliteView }[]; deleted: SqliteView[]; } = { created: [], renamed: [], deleted: [], moved: [] }; for (let rename of renames) { const [from, to] = rename.split('->'); const idxFrom = deletedViews.findIndex((it) => { return it.name === from; }); if (idxFrom >= 0) { const idxTo = createdViews.findIndex((it) => { return it.name === to; }); const viewFrom = deletedViews[idxFrom]; const viewTo = createdViews[idxFrom]; if (viewFrom.name !== viewTo.name) { result.renamed.push({ from: deletedViews[idxFrom], to: createdViews[idxTo], }); } delete createdViews[idxTo]; delete deletedViews[idxFrom]; createdViews = createdViews.filter(Boolean); deletedViews = deletedViews.filter(Boolean); } } result.created = createdViews; result.deleted = deletedViews; return result; } catch (e) { console.error(e); throw e; } }; export const diffTestSchemasPush = async ( client: PGlite, left: PostgresSchema, right: PostgresSchema, renamesArr: string[], cli: boolean = false, schemas: string[] = ['public'], casing?: CasingType | undefined, entities?: Entities, sqlStatementsToRun: { before?: string[]; after?: string[]; runApply?: boolean; } = { before: [], after: [], runApply: true, }, ) => { const shouldRunApply = sqlStatementsToRun.runApply === undefined ? true : sqlStatementsToRun.runApply; for (const st of sqlStatementsToRun.before ?? []) { await client.query(st); } if (shouldRunApply) { const res = await applyPgDiffs(left, casing); for (const st of res.sqlStatements) { await client.query(st); } } for (const st of sqlStatementsToRun.after ?? []) { await client.query(st); } const materializedViewsForRefresh = Object.values(left).filter((it) => isPgMaterializedView(it) ) as PgMaterializedView[]; // refresh all mat views for (const view of materializedViewsForRefresh) { const viewConf = getMaterializedViewConfig(view); if (viewConf.isExisting) continue; await client.exec( `REFRESH MATERIALIZED VIEW "${viewConf.schema ?? 'public'}"."${viewConf.name}"${ viewConf.withNoData ? ' WITH NO DATA;' : ';' }`, ); } // do introspect into PgSchemaInternal const introspectedSchema = await fromDatabase( { query: async (query: string, values?: any[] | undefined) => { const res = await client.query(query, values); return res.rows as any[]; }, }, undefined, schemas, entities, ); const leftTables = Object.values(right).filter((it) => is(it, PgTable)) as PgTable[]; const leftSchemas = Object.values(right).filter((it) => is(it, PgSchema)) as PgSchema[]; const leftEnums = Object.values(right).filter((it) => isPgEnum(it)) as PgEnum[]; const leftSequences = Object.values(right).filter((it) => isPgSequence(it)) as PgSequence[]; const leftRoles = Object.values(right).filter((it) => is(it, PgRole)) as PgRole[]; const leftPolicies = Object.values(right).filter((it) => is(it, PgPolicy)) as PgPolicy[]; const leftViews = Object.values(right).filter((it) => isPgView(it)) as PgView[]; const leftMaterializedViews = Object.values(right).filter((it) => isPgMaterializedView(it)) as PgMaterializedView[]; const serialized2 = generatePgSnapshot( leftTables, leftEnums, leftSchemas, leftSequences, leftRoles, leftPolicies, leftViews, leftMaterializedViews, casing, ); const { version: v1, dialect: d1, ...rest1 } = introspectedSchema; const { version: v2, dialect: d2, ...rest2 } = serialized2; const sch1 = { version: '7', dialect: 'postgresql', id: '0', prevId: '0', ...rest1, } as const; const sch2 = { version: '7', dialect: 'postgresql', id: '0', prevId: '0', ...rest2, } as const; const sn1 = squashPgScheme(sch1, 'push'); const sn2 = squashPgScheme(sch2, 'push'); const validatedPrev = pgSchema.parse(sch1); const validatedCur = pgSchema.parse(sch2); const renames = new Set(renamesArr); if (!cli) { const { sqlStatements, statements } = await applyPgSnapshotsDiff( sn1, sn2, testSchemasResolver(renames), testEnumsResolver(renames), testSequencesResolver(renames), testPolicyResolver(renames), testIndPolicyResolver(renames), testRolesResolver(renames), testTablesResolver(renames), testColumnsResolver(renames), testViewsResolver(renames), validatedPrev, validatedCur, 'push', ); const { shouldAskForApprove, statementsToExecute, columnsToRemove, tablesToRemove, tablesToTruncate, infoToPrint, schemasToRemove, matViewsToRemove, } = await pgSuggestions( { query: async (sql: string, params: any[] = []) => { return (await client.query(sql, params)).rows as T[]; }, }, statements, ); return { sqlStatements: statementsToExecute, statements, shouldAskForApprove, columnsToRemove, tablesToRemove, tablesToTruncate, infoToPrint, schemasToRemove, matViewsToRemove, }; } else { const { sqlStatements, statements } = await applyPgSnapshotsDiff( sn1, sn2, schemasResolver, enumsResolver, sequencesResolver, policyResolver, indPolicyResolver, roleResolver, tablesResolver, columnsResolver, viewsResolver, validatedPrev, validatedCur, 'push', ); return { sqlStatements, statements }; } }; export const applyPgDiffs = async ( sn: PostgresSchema, casing: CasingType | undefined, ) => { const dryRun = { version: '7', dialect: 'postgresql', id: '0', prevId: '0', tables: {}, enums: {}, views: {}, schemas: {}, sequences: {}, policies: {}, roles: {}, _meta: { schemas: {}, tables: {}, columns: {}, }, } as const; const tables = Object.values(sn).filter((it) => is(it, PgTable)) as PgTable[]; const schemas = Object.values(sn).filter((it) => is(it, PgSchema)) as PgSchema[]; const enums = Object.values(sn).filter((it) => isPgEnum(it)) as PgEnum[]; const sequences = Object.values(sn).filter((it) => isPgSequence(it)) as PgSequence[]; const roles = Object.values(sn).filter((it) => is(it, PgRole)) as PgRole[]; const views = Object.values(sn).filter((it) => isPgView(it)) as PgView[]; const policies = Object.values(sn).filter((it) => is(it, PgPolicy)) as PgPolicy[]; const materializedViews = Object.values(sn).filter((it) => isPgMaterializedView(it)) as PgMaterializedView[]; const serialized1 = generatePgSnapshot( tables, enums, schemas, sequences, roles, policies, views, materializedViews, casing, ); const { version: v1, dialect: d1, ...rest1 } = serialized1; const sch1 = { version: '7', dialect: 'postgresql', id: '0', prevId: '0', ...rest1, } as const; const sn1 = squashPgScheme(sch1); const validatedPrev = pgSchema.parse(dryRun); const validatedCur = pgSchema.parse(sch1); const { sqlStatements, statements } = await applyPgSnapshotsDiff( dryRun, sn1, testSchemasResolver(new Set()), testEnumsResolver(new Set()), testSequencesResolver(new Set()), testPolicyResolver(new Set()), testIndPolicyResolver(new Set()), testRolesResolver(new Set()), testTablesResolver(new Set()), testColumnsResolver(new Set()), testViewsResolver(new Set()), validatedPrev, validatedCur, ); return { sqlStatements, statements }; }; export const diffTestSchemas = async ( left: PostgresSchema, right: PostgresSchema, renamesArr: string[], cli: boolean = false, casing?: CasingType | undefined, ) => { const leftTables = Object.values(left).filter((it) => is(it, PgTable)) as PgTable[]; const rightTables = Object.values(right).filter((it) => is(it, PgTable)) as PgTable[]; const leftSchemas = Object.values(left).filter((it) => is(it, PgSchema)) as PgSchema[]; const rightSchemas = Object.values(right).filter((it) => is(it, PgSchema)) as PgSchema[]; const leftEnums = Object.values(left).filter((it) => isPgEnum(it)) as PgEnum[]; const rightEnums = Object.values(right).filter((it) => isPgEnum(it)) as PgEnum[]; const leftSequences = Object.values(left).filter((it) => isPgSequence(it)) as PgSequence[]; const rightSequences = Object.values(right).filter((it) => isPgSequence(it)) as PgSequence[]; const leftRoles = Object.values(left).filter((it) => is(it, PgRole)) as PgRole[]; const rightRoles = Object.values(right).filter((it) => is(it, PgRole)) as PgRole[]; const leftPolicies = Object.values(left).filter((it) => is(it, PgPolicy)) as PgPolicy[]; const rightPolicies = Object.values(right).filter((it) => is(it, PgPolicy)) as PgPolicy[]; const leftViews = Object.values(left).filter((it) => isPgView(it)) as PgView[]; const rightViews = Object.values(right).filter((it) => isPgView(it)) as PgView[]; const leftMaterializedViews = Object.values(left).filter((it) => isPgMaterializedView(it)) as PgMaterializedView[]; const rightMaterializedViews = Object.values(right).filter((it) => isPgMaterializedView(it)) as PgMaterializedView[]; const serialized1 = generatePgSnapshot( leftTables, leftEnums, leftSchemas, leftSequences, leftRoles, leftPolicies, leftViews, leftMaterializedViews, casing, ); const serialized2 = generatePgSnapshot( rightTables, rightEnums, rightSchemas, rightSequences, rightRoles, rightPolicies, rightViews, rightMaterializedViews, casing, ); const { version: v1, dialect: d1, ...rest1 } = serialized1; const { version: v2, dialect: d2, ...rest2 } = serialized2; const sch1 = { version: '7', dialect: 'postgresql', id: '0', prevId: '0', ...rest1, } as const; const sch2 = { version: '7', dialect: 'postgresql', id: '0', prevId: '0', ...rest2, } as const; const sn1 = squashPgScheme(sch1); const sn2 = squashPgScheme(sch2); const validatedPrev = pgSchema.parse(sch1); const validatedCur = pgSchema.parse(sch2); const renames = new Set(renamesArr); if (!cli) { const { sqlStatements, statements } = await applyPgSnapshotsDiff( sn1, sn2, testSchemasResolver(renames), testEnumsResolver(renames), testSequencesResolver(renames), testPolicyResolver(renames), testIndPolicyResolver(renames), testRolesResolver(renames), testTablesResolver(renames), testColumnsResolver(renames), testViewsResolver(renames), validatedPrev, validatedCur, ); return { sqlStatements, statements }; } else { const { sqlStatements, statements } = await applyPgSnapshotsDiff( sn1, sn2, schemasResolver, enumsResolver, sequencesResolver, policyResolver, indPolicyResolver, roleResolver, tablesResolver, columnsResolver, viewsResolver, validatedPrev, validatedCur, ); return { sqlStatements, statements }; } }; export const diffTestSchemasPushMysql = async ( client: Connection, left: MysqlSchema, right: MysqlSchema, renamesArr: string[], schema: string, cli: boolean = false, casing?: CasingType | undefined, ) => { const { sqlStatements } = await applyMySqlDiffs(left, casing); for (const st of sqlStatements) { await client.query(st); } // do introspect into PgSchemaInternal const introspectedSchema = await fromMySqlDatabase( { query: async (sql: string, params?: any[]) => { const res = await client.execute(sql, params); return res[0] as any; }, }, schema, ); const leftTables = Object.values(right).filter((it) => is(it, MySqlTable)) as MySqlTable[]; const leftViews = Object.values(right).filter((it) => is(it, MySqlView)) as MySqlView[]; const serialized2 = generateMySqlSnapshot(leftTables, leftViews, casing); const { version: v1, dialect: d1, ...rest1 } = introspectedSchema; const { version: v2, dialect: d2, ...rest2 } = serialized2; const sch1 = { version: '5', dialect: 'mysql', id: '0', prevId: '0', ...rest1, } as const; const sch2 = { version: '5', dialect: 'mysql', id: '0', prevId: '0', ...rest2, } as const; const sn1 = squashMysqlScheme(sch1); const sn2 = squashMysqlScheme(sch2); const validatedPrev = mysqlSchema.parse(sch1); const validatedCur = mysqlSchema.parse(sch2); const renames = new Set(renamesArr); if (!cli) { const { sqlStatements, statements } = await applyMysqlSnapshotsDiff( sn1, sn2, testTablesResolver(renames), testColumnsResolver(renames), testViewsResolverMySql(renames), validatedPrev, validatedCur, 'push', ); return { sqlStatements, statements }; } else { const { sqlStatements, statements } = await applyMysqlSnapshotsDiff( sn1, sn2, tablesResolver, columnsResolver, mySqlViewsResolver, validatedPrev, validatedCur, 'push', ); return { sqlStatements, statements }; } }; export const applyMySqlDiffs = async ( sn: MysqlSchema, casing: CasingType | undefined, ) => { const dryRun = { version: '5', dialect: 'mysql', id: '0', prevId: '0', views: {}, tables: {}, enums: {}, schemas: {}, _meta: { schemas: {}, tables: {}, columns: {}, }, } as const; const tables = Object.values(sn).filter((it) => is(it, MySqlTable)) as MySqlTable[]; const views = Object.values(sn).filter((it) => is(it, MySqlView)) as MySqlView[]; const serialized1 = generateMySqlSnapshot(tables, views, casing); const { version: v1, dialect: d1, ...rest1 } = serialized1; const sch1 = { version: '5', dialect: 'mysql', id: '0', prevId: '0', ...rest1, } as const; const sn1 = squashMysqlScheme(sch1); const validatedPrev = mysqlSchema.parse(dryRun); const validatedCur = mysqlSchema.parse(sch1); const { sqlStatements, statements } = await applyMysqlSnapshotsDiff( dryRun, sn1, testTablesResolver(new Set()), testColumnsResolver(new Set()), testViewsResolverMySql(new Set()), validatedPrev, validatedCur, ); return { sqlStatements, statements }; }; export const diffTestSchemasMysql = async ( left: MysqlSchema, right: MysqlSchema, renamesArr: string[], cli: boolean = false, casing?: CasingType | undefined, ) => { const leftTables = Object.values(left).filter((it) => is(it, MySqlTable)) as MySqlTable[]; const leftViews = Object.values(left).filter((it) => is(it, MySqlView)) as MySqlView[]; const rightTables = Object.values(right).filter((it) => is(it, MySqlTable)) as MySqlTable[]; const rightViews = Object.values(right).filter((it) => is(it, MySqlView)) as MySqlView[]; const serialized1 = generateMySqlSnapshot(leftTables, leftViews, casing); const serialized2 = generateMySqlSnapshot(rightTables, rightViews, casing); const { version: v1, dialect: d1, ...rest1 } = serialized1; const { version: v2, dialect: d2, ...rest2 } = serialized2; const sch1 = { version: '5', dialect: 'mysql', id: '0', prevId: '0', ...rest1, } as const; const sch2 = { version: '5', dialect: 'mysql', id: '0', prevId: '0', ...rest2, } as const; const sn1 = squashMysqlScheme(sch1); const sn2 = squashMysqlScheme(sch2); const validatedPrev = mysqlSchema.parse(sch1); const validatedCur = mysqlSchema.parse(sch2); const renames = new Set(renamesArr); if (!cli) { const { sqlStatements, statements } = await applyMysqlSnapshotsDiff( sn1, sn2, testTablesResolver(renames), testColumnsResolver(renames), testViewsResolverMySql(renames), validatedPrev, validatedCur, ); return { sqlStatements, statements }; } const { sqlStatements, statements } = await applyMysqlSnapshotsDiff( sn1, sn2, tablesResolver, columnsResolver, mySqlViewsResolver, validatedPrev, validatedCur, ); return { sqlStatements, statements }; }; export const diffTestSchemasSingleStore = async ( left: SinglestoreSchema, right: SinglestoreSchema, renamesArr: string[], cli: boolean = false, casing?: CasingType | undefined, ) => { const leftTables = Object.values(left).filter((it) => is(it, SingleStoreTable)) as SingleStoreTable[]; /* const leftViews = Object.values(left).filter((it) => is(it, SingleStoreView)) as SingleStoreView[]; */ const rightTables = Object.values(right).filter((it) => is(it, SingleStoreTable)) as SingleStoreTable[]; /* const rightViews = Object.values(right).filter((it) => is(it, SingleStoreView)) as SingleStoreView[]; */ const serialized1 = generateSingleStoreSnapshot( leftTables, /* leftViews, */ casing, ); const serialized2 = generateSingleStoreSnapshot( rightTables, /* rightViews, */ casing, ); const { version: v1, dialect: d1, ...rest1 } = serialized1; const { version: v2, dialect: d2, ...rest2 } = serialized2; const sch1 = { version: '1', dialect: 'singlestore', id: '0', prevId: '0', ...rest1, } as const; const sch2 = { version: '1', dialect: 'singlestore', id: '0', prevId: '0', ...rest2, } as const; const sn1 = squashSingleStoreScheme(sch1); const sn2 = squashSingleStoreScheme(sch2); const validatedPrev = singlestoreSchema.parse(sch1); const validatedCur = singlestoreSchema.parse(sch2); const renames = new Set(renamesArr); if (!cli) { const { sqlStatements, statements } = await applySingleStoreSnapshotsDiff( sn1, sn2, testTablesResolver(renames), testColumnsResolver(renames), /* testViewsResolverSingleStore(renames), */ validatedPrev, validatedCur, ); return { sqlStatements, statements }; } const { sqlStatements, statements } = await applySingleStoreSnapshotsDiff( sn1, sn2, tablesResolver, columnsResolver, /* singleStoreViewsResolver, */ validatedPrev, validatedCur, ); return { sqlStatements, statements }; }; export const diffTestSchemasPushSingleStore = async ( client: Connection, left: SinglestoreSchema, right: SinglestoreSchema, renamesArr: string[], schema: string, cli: boolean = false, casing?: CasingType | undefined, sqlStatementsToRun: { before?: string[]; after?: string[]; runApply?: boolean; } = { before: [], after: [], runApply: true, }, ) => { const shouldRunApply = sqlStatementsToRun.runApply === undefined ? true : sqlStatementsToRun.runApply; for (const st of sqlStatementsToRun.before ?? []) { await client.query(st); } if (shouldRunApply) { const res = await applySingleStoreDiffs(left, casing); for (const st of res.sqlStatements) { await client.query(st); } } for (const st of sqlStatementsToRun.after ?? []) { await client.query(st); } // do introspect into PgSchemaInternal const introspectedSchema = await fromSingleStoreDatabase( { query: async (sql: string, params?: any[]) => { const res = await client.execute(sql, params); return res[0] as any; }, }, schema, ); const leftTables = Object.values(right).filter((it) => is(it, SingleStoreTable)) as SingleStoreTable[]; /* const leftViews = Object.values(right).filter((it) => is(it, SingleStoreView)) as SingleStoreView[]; */ const serialized2 = generateSingleStoreSnapshot( leftTables, /* leftViews, */ casing, ); const { version: v1, dialect: d1, ...rest1 } = introspectedSchema; const { version: v2, dialect: d2, ...rest2 } = serialized2; const sch1 = { version: '1', dialect: 'singlestore', id: '0', prevId: '0', ...rest1, } as const; const sch2 = { version: '1', dialect: 'singlestore', id: '0', prevId: '0', ...rest2, } as const; const sn1 = squashSingleStoreScheme(sch1); const sn2 = squashSingleStoreScheme(sch2); const validatedPrev = singlestoreSchema.parse(sch1); const validatedCur = singlestoreSchema.parse(sch2); const renames = new Set(renamesArr); if (!cli) { const { sqlStatements, statements } = await applySingleStoreSnapshotsDiff( sn1, sn2, testTablesResolver(renames), testColumnsResolver(renames), /* testViewsResolverSingleStore(renames), */ validatedPrev, validatedCur, 'push', ); const { statementsToExecute, columnsToRemove, infoToPrint, shouldAskForApprove, tablesToRemove, tablesToTruncate, } = await singleStoreLogSuggestionsAndReturn( { query: async (sql: string, params?: any[]) => { const res = await client.execute(sql, params); return res[0] as T[]; }, }, statements, sn1, sn2, ); return { sqlStatements: statementsToExecute, statements, columnsToRemove, infoToPrint, shouldAskForApprove, tablesToRemove, tablesToTruncate, }; } else { const { sqlStatements, statements } = await applySingleStoreSnapshotsDiff( sn1, sn2, tablesResolver, columnsResolver, /* singleStoreViewsResolver, */ validatedPrev, validatedCur, 'push', ); return { sqlStatements, statements }; } }; export const applySingleStoreDiffs = async ( sn: SinglestoreSchema, casing: CasingType | undefined, ) => { const dryRun = { version: '1', dialect: 'singlestore', id: '0', prevId: '0', tables: {}, views: {}, enums: {}, schemas: {}, _meta: { schemas: {}, tables: {}, columns: {}, }, } as const; const tables = Object.values(sn).filter((it) => is(it, SingleStoreTable)) as SingleStoreTable[]; /* const views = Object.values(sn).filter((it) => is(it, SingleStoreView)) as SingleStoreView[]; */ const serialized1 = generateSingleStoreSnapshot(tables, /* views, */ casing); const { version: v1, dialect: d1, ...rest1 } = serialized1; const sch1 = { version: '1', dialect: 'singlestore', id: '0', prevId: '0', ...rest1, } as const; const sn1 = squashSingleStoreScheme(sch1); const validatedPrev = singlestoreSchema.parse(dryRun); const validatedCur = singlestoreSchema.parse(sch1); const { sqlStatements, statements } = await applySingleStoreSnapshotsDiff( dryRun, sn1, testTablesResolver(new Set()), testColumnsResolver(new Set()), /* testViewsResolverSingleStore(new Set()), */ validatedPrev, validatedCur, ); return { sqlStatements, statements }; }; export const diffTestSchemasPushSqlite = async ( client: Database, left: SqliteSchema, right: SqliteSchema, renamesArr: string[], cli: boolean = false, seedStatements: string[] = [], casing?: CasingType | undefined, ) => { const { sqlStatements } = await applySqliteDiffs(left, 'push'); for (const st of sqlStatements) { client.exec(st); } for (const st of seedStatements) { client.exec(st); } // do introspect into PgSchemaInternal const introspectedSchema = await fromSqliteDatabase( { query: async (sql: string, params: any[] = []) => { return client.prepare(sql).bind(params).all() as T[]; }, run: async (query: string) => { client.prepare(query).run(); }, }, undefined, ); const rightTables = Object.values(right).filter((it) => is(it, SQLiteTable)) as SQLiteTable[]; const rightViews = Object.values(right).filter((it) => is(it, SQLiteView)) as SQLiteView[]; const serialized2 = generateSqliteSnapshot(rightTables, rightViews, casing); const { version: v1, dialect: d1, ...rest1 } = introspectedSchema; const { version: v2, dialect: d2, ...rest2 } = serialized2; const sch1 = { version: '6', dialect: 'sqlite', id: '0', prevId: '0', ...rest1, } as const; const sch2 = { version: '6', dialect: 'sqlite', id: '0', prevId: '0', ...rest2, } as const; const sn1 = squashSqliteScheme(sch1, 'push'); const sn2 = squashSqliteScheme(sch2, 'push'); const renames = new Set(renamesArr); if (!cli) { const { sqlStatements, statements, _meta } = await applySqliteSnapshotsDiff( sn1, sn2, testTablesResolver(renames), testColumnsResolver(renames), testViewsResolverSqlite(renames), sch1, sch2, 'push', ); const { statementsToExecute, columnsToRemove, infoToPrint, schemasToRemove, shouldAskForApprove, tablesToRemove, tablesToTruncate, } = await logSuggestionsAndReturn( { query: async (sql: string, params: any[] = []) => { return client.prepare(sql).bind(params).all() as T[]; }, run: async (query: string) => { client.prepare(query).run(); }, }, statements, sn1, sn2, _meta!, ); return { sqlStatements: statementsToExecute, statements, columnsToRemove, infoToPrint, schemasToRemove, shouldAskForApprove, tablesToRemove, tablesToTruncate, }; } else { const { sqlStatements, statements } = await applySqliteSnapshotsDiff( sn1, sn2, tablesResolver, columnsResolver, sqliteViewsResolver, sch1, sch2, 'push', ); return { sqlStatements, statements }; } }; export async function diffTestSchemasPushLibSQL( client: Client, left: SqliteSchema, right: SqliteSchema, renamesArr: string[], cli: boolean = false, seedStatements: string[] = [], casing?: CasingType | undefined, ) { const { sqlStatements } = await applyLibSQLDiffs(left, 'push'); for (const st of sqlStatements) { await client.execute(st); } for (const st of seedStatements) { await client.execute(st); } const introspectedSchema = await fromSqliteDatabase( { query: async (sql: string, params?: any[]) => { const res = await client.execute({ sql, args: params || [] }); return res.rows as T[]; }, run: async (query: string) => { await client.execute(query); }, }, undefined, ); const leftTables = Object.values(right).filter((it) => is(it, SQLiteTable)) as SQLiteTable[]; const leftViews = Object.values(right).filter((it) => is(it, SQLiteView)) as SQLiteView[]; const serialized2 = generateSqliteSnapshot(leftTables, leftViews, casing); const { version: v1, dialect: d1, ...rest1 } = introspectedSchema; const { version: v2, dialect: d2, ...rest2 } = serialized2; const sch1 = { version: '6', dialect: 'sqlite', id: '0', prevId: '0', ...rest1, } as const; const sch2 = { version: '6', dialect: 'sqlite', id: '0', prevId: '0', ...rest2, } as const; const sn1 = squashSqliteScheme(sch1, 'push'); const sn2 = squashSqliteScheme(sch2, 'push'); const renames = new Set(renamesArr); if (!cli) { const { sqlStatements, statements, _meta } = await applyLibSQLSnapshotsDiff( sn1, sn2, testTablesResolver(renames), testColumnsResolver(renames), testViewsResolverSqlite(renames), sch1, sch2, 'push', ); const { statementsToExecute, columnsToRemove, infoToPrint, shouldAskForApprove, tablesToRemove, tablesToTruncate, } = await libSqlLogSuggestionsAndReturn( { query: async (sql: string, params?: any[]) => { const res = await client.execute({ sql, args: params || [] }); return res.rows as T[]; }, run: async (query: string) => { await client.execute(query); }, }, statements, sn1, sn2, _meta!, ); return { sqlStatements: statementsToExecute, statements, columnsToRemove, infoToPrint, shouldAskForApprove, tablesToRemove, tablesToTruncate, }; } else { const { sqlStatements, statements } = await applyLibSQLSnapshotsDiff( sn1, sn2, tablesResolver, columnsResolver, sqliteViewsResolver, sch1, sch2, 'push', ); return { sqlStatements, statements }; } } export const applySqliteDiffs = async ( sn: SqliteSchema, action?: 'push' | undefined, casing?: CasingType | undefined, ) => { const dryRun = { version: '6', dialect: 'sqlite', id: '0', prevId: '0', tables: {}, enums: {}, views: {}, schemas: {}, _meta: { schemas: {}, tables: {}, columns: {}, }, } as const; const tables = Object.values(sn).filter((it) => is(it, SQLiteTable)) as SQLiteTable[]; const views = Object.values(sn).filter((it) => is(it, SQLiteView)) as SQLiteView[]; const serialized1 = generateSqliteSnapshot(tables, views, casing); const { version: v1, dialect: d1, ...rest1 } = serialized1; const sch1 = { version: '6', dialect: 'sqlite', id: '0', prevId: '0', ...rest1, } as const; const sn1 = squashSqliteScheme(sch1, action); const { sqlStatements, statements } = await applySqliteSnapshotsDiff( dryRun, sn1, testTablesResolver(new Set()), testColumnsResolver(new Set()), testViewsResolverSqlite(new Set()), dryRun, sch1, action, ); return { sqlStatements, statements }; }; export const applyLibSQLDiffs = async ( sn: SqliteSchema, action?: 'push' | undefined, casing?: CasingType | undefined, ) => { const dryRun = { version: '6', dialect: 'sqlite', id: '0', prevId: '0', tables: {}, views: {}, enums: {}, schemas: {}, _meta: { schemas: {}, tables: {}, columns: {}, }, } as const; const tables = Object.values(sn).filter((it) => is(it, SQLiteTable)) as SQLiteTable[]; const views = Object.values(sn).filter((it) => is(it, SQLiteView)) as SQLiteView[]; const serialized1 = generateSqliteSnapshot(tables, views, casing); const { version: v1, dialect: d1, ...rest1 } = serialized1; const sch1 = { version: '6', dialect: 'sqlite', id: '0', prevId: '0', ...rest1, } as const; const sn1 = squashSqliteScheme(sch1, action); const { sqlStatements, statements } = await applyLibSQLSnapshotsDiff( dryRun, sn1, testTablesResolver(new Set()), testColumnsResolver(new Set()), testViewsResolverSqlite(new Set()), dryRun, sch1, action, ); return { sqlStatements, statements }; }; export const diffTestSchemasSqlite = async ( left: SqliteSchema, right: SqliteSchema, renamesArr: string[], cli: boolean = false, casing?: CasingType | undefined, ) => { const leftTables = Object.values(left).filter((it) => is(it, SQLiteTable)) as SQLiteTable[]; const leftViews = Object.values(left).filter((it) => is(it, SQLiteView)) as SQLiteView[]; const rightTables = Object.values(right).filter((it) => is(it, SQLiteTable)) as SQLiteTable[]; const rightViews = Object.values(right).filter((it) => is(it, SQLiteView)) as SQLiteView[]; const serialized1 = generateSqliteSnapshot(leftTables, leftViews, casing); const serialized2 = generateSqliteSnapshot(rightTables, rightViews, casing); const { version: v1, dialect: d1, ...rest1 } = serialized1; const { version: v2, dialect: d2, ...rest2 } = serialized2; const sch1 = { version: '6', dialect: 'sqlite', id: '0', prevId: '0', ...rest1, } as const; const sch2 = { version: '6', dialect: 'sqlite', id: '0', prevId: '0', ...rest2, } as const; const sn1 = squashSqliteScheme(sch1); const sn2 = squashSqliteScheme(sch2); const renames = new Set(renamesArr); if (!cli) { const { sqlStatements, statements } = await applySqliteSnapshotsDiff( sn1, sn2, testTablesResolver(renames), testColumnsResolver(renames), testViewsResolverSqlite(renames), sch1, sch2, ); return { sqlStatements, statements }; } const { sqlStatements, statements } = await applySqliteSnapshotsDiff( sn1, sn2, tablesResolver, columnsResolver, sqliteViewsResolver, sch1, sch2, ); return { sqlStatements, statements }; }; export const diffTestSchemasLibSQL = async ( left: SqliteSchema, right: SqliteSchema, renamesArr: string[], cli: boolean = false, casing?: CasingType | undefined, ) => { const leftTables = Object.values(left).filter((it) => is(it, SQLiteTable)) as SQLiteTable[]; const leftViews = Object.values(left).filter((it) => is(it, SQLiteView)) as SQLiteView[]; const rightTables = Object.values(right).filter((it) => is(it, SQLiteTable)) as SQLiteTable[]; const rightViews = Object.values(right).filter((it) => is(it, SQLiteView)) as SQLiteView[]; const serialized1 = generateSqliteSnapshot(leftTables, leftViews, casing); const serialized2 = generateSqliteSnapshot(rightTables, rightViews, casing); const { version: v1, dialect: d1, ...rest1 } = serialized1; const { version: v2, dialect: d2, ...rest2 } = serialized2; const sch1 = { version: '6', dialect: 'sqlite', id: '0', prevId: '0', ...rest1, } as const; const sch2 = { version: '6', dialect: 'sqlite', id: '0', prevId: '0', ...rest2, } as const; const sn1 = squashSqliteScheme(sch1); const sn2 = squashSqliteScheme(sch2); const renames = new Set(renamesArr); if (!cli) { const { sqlStatements, statements } = await applyLibSQLSnapshotsDiff( sn1, sn2, testTablesResolver(renames), testColumnsResolver(renames), testViewsResolverSqlite(renames), sch1, sch2, ); return { sqlStatements, statements }; } const { sqlStatements, statements } = await applyLibSQLSnapshotsDiff( sn1, sn2, tablesResolver, columnsResolver, sqliteViewsResolver, sch1, sch2, ); return { sqlStatements, statements }; }; // --- Introspect to file helpers --- export const introspectPgToFile = async ( client: PGlite, initSchema: PostgresSchema, testName: string, schemas: string[] = ['public'], entities?: Entities, casing?: CasingType | undefined, ) => { // put in db const { sqlStatements } = await applyPgDiffs(initSchema, casing); for (const st of sqlStatements) { await client.query(st); } // introspect to schema const introspectedSchema = await fromDatabase( { query: async (query: string, values?: any[] | undefined) => { const res = await client.query(query, values); return res.rows as any[]; }, }, undefined, schemas, entities, ); const { version: initV, dialect: initD, ...initRest } = introspectedSchema; const initSch = { version: '7', dialect: 'postgresql', id: '0', prevId: '0', ...initRest, } as const; const initSn = squashPgScheme(initSch); const validatedCur = pgSchema.parse(initSch); // write to ts file const file = schemaToTypeScript(introspectedSchema, 'camel'); fs.writeFileSync(`tests/introspect/postgres/${testName}.ts`, file.file); // generate snapshot from ts file const response = await prepareFromPgImports([ `tests/introspect/postgres/${testName}.ts`, ]); const afterFileImports = generatePgSnapshot( response.tables, response.enums, response.schemas, response.sequences, response.roles, response.policies, response.views, response.matViews, casing, ); const { version: v2, dialect: d2, ...rest2 } = afterFileImports; const sch2 = { version: '7', dialect: 'postgresql', id: '0', prevId: '0', ...rest2, } as const; const sn2AfterIm = squashPgScheme(sch2); const validatedCurAfterImport = pgSchema.parse(sch2); const { sqlStatements: afterFileSqlStatements, statements: afterFileStatements, } = await applyPgSnapshotsDiff( initSn, sn2AfterIm, testSchemasResolver(new Set()), testEnumsResolver(new Set()), testSequencesResolver(new Set()), testPolicyResolver(new Set()), testIndPolicyResolver(new Set()), testRolesResolver(new Set()), testTablesResolver(new Set()), testColumnsResolver(new Set()), testViewsResolver(new Set()), validatedCur, validatedCurAfterImport, ); fs.rmSync(`tests/introspect/postgres/${testName}.ts`); return { sqlStatements: afterFileSqlStatements, statements: afterFileStatements, }; }; export const introspectGelToFile = async ( client: GelClient, testName: string, schemas: string[] = ['public'], entities?: Entities, casing?: CasingType | undefined, ) => { // introspect to schema const introspectedSchema = await fromGelDatabase( { query: async (query: string, values?: any[] | undefined) => { const res = await client.querySQL(query, values); return res as any[]; }, }, undefined, schemas, entities, ); // write to ts file const file = schemaToTypeScriptGel(introspectedSchema, 'camel'); const path = `tests/introspect/gel/${testName}.ts`; fs.writeFileSync(path, file.file); return path; }; export const introspectMySQLToFile = async ( client: Connection, initSchema: MysqlSchema, testName: string, schema: string, casing?: CasingType | undefined, ) => { // put in db const { sqlStatements } = await applyMySqlDiffs(initSchema, casing); for (const st of sqlStatements) { await client.query(st); } // introspect to schema const introspectedSchema = await fromMySqlDatabase( { query: async (sql: string, params?: any[] | undefined) => { const res = await client.execute(sql, params); return res[0] as any; }, }, schema, ); const { version: initV, dialect: initD, ...initRest } = introspectedSchema; const initSch = { version: '5', dialect: 'mysql', id: '0', prevId: '0', ...initRest, } as const; const initSn = squashMysqlScheme(initSch); const validatedCur = mysqlSchema.parse(initSch); const file = schemaToTypeScriptMySQL(introspectedSchema, 'camel'); fs.writeFileSync(`tests/introspect/mysql/${testName}.ts`, file.file); const response = await prepareFromMySqlImports([ `tests/introspect/mysql/${testName}.ts`, ]); const afterFileImports = generateMySqlSnapshot( response.tables, response.views, casing, ); const { version: v2, dialect: d2, ...rest2 } = afterFileImports; const sch2 = { version: '5', dialect: 'mysql', id: '0', prevId: '0', ...rest2, } as const; const sn2AfterIm = squashMysqlScheme(sch2); const validatedCurAfterImport = mysqlSchema.parse(sch2); const { sqlStatements: afterFileSqlStatements, statements: afterFileStatements, } = await applyMysqlSnapshotsDiff( sn2AfterIm, initSn, testTablesResolver(new Set()), testColumnsResolver(new Set()), testViewsResolverMySql(new Set()), validatedCurAfterImport, validatedCur, ); fs.rmSync(`tests/introspect/mysql/${testName}.ts`); return { sqlStatements: afterFileSqlStatements, statements: afterFileStatements, }; }; export const introspectSingleStoreToFile = async ( client: Connection, initSchema: SinglestoreSchema, testName: string, schema: string, casing?: CasingType | undefined, ) => { // put in db const { sqlStatements } = await applySingleStoreDiffs(initSchema, casing); for (const st of sqlStatements) { await client.query(st); } // introspect to schema const introspectedSchema = await fromSingleStoreDatabase( { query: async (sql: string, params?: any[] | undefined) => { const res = await client.execute(sql, params); return res[0] as any; }, }, schema, ); const file = schemaToTypeScriptSingleStore(introspectedSchema, 'camel'); fs.writeFileSync(`tests/introspect/singlestore/${testName}.ts`, file.file); const response = await prepareFromSingleStoreImports([ `tests/introspect/singlestore/${testName}.ts`, ]); const afterFileImports = generateSingleStoreSnapshot( response.tables, /* response.views, */ casing, ); const { version: v2, dialect: d2, ...rest2 } = afterFileImports; const sch2 = { version: '1', dialect: 'singlestore', id: '0', prevId: '0', ...rest2, } as const; const sn2AfterIm = squashSingleStoreScheme(sch2); const validatedCurAfterImport = singlestoreSchema.parse(sch2); const leftTables = Object.values(initSchema).filter((it) => is(it, SingleStoreTable)) as SingleStoreTable[]; const initSnapshot = generateSingleStoreSnapshot( leftTables, /* response.views, */ casing, ); const { version: initV, dialect: initD, ...initRest } = initSnapshot; const initSch = { version: '1', dialect: 'singlestore', id: '0', prevId: '0', ...initRest, } as const; const initSn = squashSingleStoreScheme(initSch); const validatedCur = singlestoreSchema.parse(initSch); const { sqlStatements: afterFileSqlStatements, statements: afterFileStatements, } = await applySingleStoreSnapshotsDiff( sn2AfterIm, initSn, testTablesResolver(new Set()), testColumnsResolver(new Set()), /* testViewsResolverSingleStore(new Set()), */ validatedCurAfterImport, validatedCur, ); fs.rmSync(`tests/introspect/singlestore/${testName}.ts`); return { sqlStatements: afterFileSqlStatements, statements: afterFileStatements, }; }; export const introspectSQLiteToFile = async ( client: Database, initSchema: SqliteSchema, testName: string, casing?: CasingType | undefined, ) => { // put in db const { sqlStatements } = await applySqliteDiffs(initSchema); for (const st of sqlStatements) { client.exec(st); } // introspect to schema const introspectedSchema = await fromSqliteDatabase( { query: async (sql: string, params: any[] = []) => { return client.prepare(sql).bind(params).all() as T[]; }, run: async (query: string) => { client.prepare(query).run(); }, }, undefined, ); const { version: initV, dialect: initD, ...initRest } = introspectedSchema; const initSch = { version: '6', dialect: 'sqlite', id: '0', prevId: '0', ...initRest, } as const; const initSn = squashSqliteScheme(initSch); const validatedCur = sqliteSchema.parse(initSch); const file = schemaToTypeScriptSQLite(introspectedSchema, 'camel'); fs.writeFileSync(`tests/introspect/sqlite/${testName}.ts`, file.file); const response = await prepareFromSqliteImports([ `tests/introspect/sqlite/${testName}.ts`, ]); const afterFileImports = generateSqliteSnapshot( response.tables, response.views, casing, ); const { version: v2, dialect: d2, ...rest2 } = afterFileImports; const sch2 = { version: '6', dialect: 'sqlite', id: '0', prevId: '0', ...rest2, } as const; const sn2AfterIm = squashSqliteScheme(sch2); const validatedCurAfterImport = sqliteSchema.parse(sch2); const { sqlStatements: afterFileSqlStatements, statements: afterFileStatements, } = await applySqliteSnapshotsDiff( sn2AfterIm, initSn, testTablesResolver(new Set()), testColumnsResolver(new Set()), testViewsResolverSqlite(new Set()), validatedCurAfterImport, validatedCur, ); fs.rmSync(`tests/introspect/sqlite/${testName}.ts`); return { sqlStatements: afterFileSqlStatements, statements: afterFileStatements, }; }; export const introspectLibSQLToFile = async ( client: Client, initSchema: SqliteSchema, testName: string, casing?: CasingType | undefined, ) => { // put in db const { sqlStatements } = await applyLibSQLDiffs(initSchema); for (const st of sqlStatements) { client.execute(st); } // introspect to schema const introspectedSchema = await fromSqliteDatabase( { query: async (sql: string, params: any[] = []) => { return (await client.execute({ sql, args: params })).rows as T[]; }, run: async (query: string) => { client.execute(query); }, }, undefined, ); const { version: initV, dialect: initD, ...initRest } = introspectedSchema; const initSch = { version: '6', dialect: 'sqlite', id: '0', prevId: '0', ...initRest, } as const; const initSn = squashSqliteScheme(initSch); const validatedCur = sqliteSchema.parse(initSch); const file = schemaToTypeScriptSQLite(introspectedSchema, 'camel'); fs.writeFileSync(`tests/introspect/libsql/${testName}.ts`, file.file); const response = await prepareFromSqliteImports([ `tests/introspect/libsql/${testName}.ts`, ]); const afterFileImports = generateSqliteSnapshot( response.tables, response.views, casing, ); const { version: v2, dialect: d2, ...rest2 } = afterFileImports; const sch2 = { version: '6', dialect: 'sqlite', id: '0', prevId: '0', ...rest2, } as const; const sn2AfterIm = squashSqliteScheme(sch2); const validatedCurAfterImport = sqliteSchema.parse(sch2); const { sqlStatements: afterFileSqlStatements, statements: afterFileStatements, } = await applyLibSQLSnapshotsDiff( sn2AfterIm, initSn, testTablesResolver(new Set()), testColumnsResolver(new Set()), testViewsResolverSqlite(new Set()), validatedCurAfterImport, validatedCur, ); fs.rmSync(`tests/introspect/libsql/${testName}.ts`); return { sqlStatements: afterFileSqlStatements, statements: afterFileStatements, }; }; ================================================ FILE: drizzle-kit/tests/singlestore-generated.test.ts ================================================ import { SQL, sql } from 'drizzle-orm'; import { int, singlestoreTable, text } from 'drizzle-orm/singlestore-core'; import { expect, test } from 'vitest'; import { diffTestSchemasSingleStore } from './schemaDiffer'; test('generated as callback: add column with generated constraint', async () => { const from = { users: singlestoreTable('users', { id: int('id'), id2: int('id2'), name: text('name'), }), }; const to = { users: singlestoreTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( (): SQL => sql`${to.users.name} || 'hello'`, { mode: 'stored' }, ), }), }; const { statements, sqlStatements } = await diffTestSchemasSingleStore( from, to, [], ); expect(statements).toStrictEqual([ { column: { generated: { as: "`users`.`name` || 'hello'", type: 'stored', }, autoincrement: false, name: 'gen_name', notNull: false, primaryKey: false, type: 'text', }, schema: '', tableName: 'users', type: 'alter_table_add_column', }, ]); expect(sqlStatements).toStrictEqual([ "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') STORED;", ]); }); test('generated as callback: add generated constraint to an exisiting column as stored', async () => { const from = { users: singlestoreTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').notNull(), }), }; const to = { users: singlestoreTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name') .notNull() .generatedAlwaysAs((): SQL => sql`${from.users.name} || 'to add'`, { mode: 'stored', }), }), }; const { statements, sqlStatements } = await diffTestSchemasSingleStore( from, to, [], ); expect(statements).toStrictEqual([ { columnDefault: undefined, columnGenerated: { as: "`users`.`name` || 'to add'", type: 'stored', }, columnAutoIncrement: false, columnName: 'gen_name', columnNotNull: true, columnOnUpdate: undefined, columnPk: false, newDataType: 'text', schema: '', tableName: 'users', type: 'alter_table_alter_column_set_generated', }, ]); expect(sqlStatements).toStrictEqual([ "ALTER TABLE `users` MODIFY COLUMN `gen_name` text NOT NULL GENERATED ALWAYS AS (`users`.`name` || 'to add') STORED;", ]); }); test('generated as callback: add generated constraint to an exisiting column as virtual', async () => { const from = { users: singlestoreTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').notNull(), }), }; const to = { users: singlestoreTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name') .notNull() .generatedAlwaysAs((): SQL => sql`${from.users.name} || 'to add'`, { mode: 'virtual', }), }), }; const { statements, sqlStatements } = await diffTestSchemasSingleStore( from, to, [], ); expect(statements).toStrictEqual([ { columnAutoIncrement: false, columnDefault: undefined, columnGenerated: { as: "`users`.`name` || 'to add'", type: 'virtual', }, columnName: 'gen_name', columnNotNull: true, columnOnUpdate: undefined, columnPk: false, newDataType: 'text', schema: '', tableName: 'users', type: 'alter_table_alter_column_set_generated', }, ]); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE `users` DROP COLUMN `gen_name`;', "ALTER TABLE `users` ADD `gen_name` text NOT NULL GENERATED ALWAYS AS (`users`.`name` || 'to add') VIRTUAL;", ]); }); test('generated as callback: drop generated constraint as stored', async () => { const from = { users: singlestoreTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( (): SQL => sql`${from.users.name} || 'to delete'`, { mode: 'stored' }, ), }), }; const to = { users: singlestoreTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName1: text('gen_name'), }), }; const { statements, sqlStatements } = await diffTestSchemasSingleStore( from, to, [], ); expect(statements).toStrictEqual([ { columnAutoIncrement: false, columnDefault: undefined, columnGenerated: undefined, columnName: 'gen_name', columnNotNull: false, columnOnUpdate: undefined, columnPk: false, newDataType: 'text', schema: '', tableName: 'users', oldColumn: { autoincrement: false, generated: { as: "`users`.`name` || 'to delete'", type: 'stored', }, name: 'gen_name', notNull: false, onUpdate: undefined, primaryKey: false, type: 'text', }, type: 'alter_table_alter_column_drop_generated', }, ]); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE `users` MODIFY COLUMN `gen_name` text;', ]); }); test('generated as callback: drop generated constraint as virtual', async () => { const from = { users: singlestoreTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( (): SQL => sql`${from.users.name} || 'to delete'`, { mode: 'virtual' }, ), }), }; const to = { users: singlestoreTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName1: text('gen_name'), }), }; const { statements, sqlStatements } = await diffTestSchemasSingleStore( from, to, [], ); expect(statements).toStrictEqual([ { columnAutoIncrement: false, columnDefault: undefined, columnGenerated: undefined, columnName: 'gen_name', columnNotNull: false, columnOnUpdate: undefined, columnPk: false, newDataType: 'text', schema: '', oldColumn: { autoincrement: false, generated: { as: "`users`.`name` || 'to delete'", type: 'virtual', }, name: 'gen_name', notNull: false, onUpdate: undefined, primaryKey: false, type: 'text', }, tableName: 'users', type: 'alter_table_alter_column_drop_generated', }, ]); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE `users` DROP COLUMN `gen_name`;', 'ALTER TABLE `users` ADD `gen_name` text;', ]); }); test('generated as callback: change generated constraint type from virtual to stored', async () => { const from = { users: singlestoreTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( (): SQL => sql`${from.users.name}`, { mode: 'virtual' }, ), }), }; const to = { users: singlestoreTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( (): SQL => sql`${to.users.name} || 'hello'`, { mode: 'stored' }, ), }), }; const { statements, sqlStatements } = await diffTestSchemasSingleStore( from, to, [], ); expect(statements).toStrictEqual([ { columnAutoIncrement: false, columnDefault: undefined, columnGenerated: { as: "`users`.`name` || 'hello'", type: 'stored', }, columnName: 'gen_name', columnNotNull: false, columnOnUpdate: undefined, columnPk: false, newDataType: 'text', schema: '', tableName: 'users', type: 'alter_table_alter_column_alter_generated', }, ]); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE `users` drop column `gen_name`;', "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') STORED;", ]); }); test('generated as callback: change generated constraint type from stored to virtual', async () => { const from = { users: singlestoreTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( (): SQL => sql`${from.users.name}`, ), }), }; const to = { users: singlestoreTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( (): SQL => sql`${to.users.name} || 'hello'`, ), }), }; const { statements, sqlStatements } = await diffTestSchemasSingleStore( from, to, [], ); expect(statements).toStrictEqual([ { columnAutoIncrement: false, columnDefault: undefined, columnGenerated: { as: "`users`.`name` || 'hello'", type: 'virtual', }, columnName: 'gen_name', columnNotNull: false, columnOnUpdate: undefined, columnPk: false, newDataType: 'text', schema: '', tableName: 'users', type: 'alter_table_alter_column_alter_generated', }, ]); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE `users` drop column `gen_name`;', "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') VIRTUAL;", ]); }); test('generated as callback: change generated constraint', async () => { const from = { users: singlestoreTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( (): SQL => sql`${from.users.name}`, ), }), }; const to = { users: singlestoreTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( (): SQL => sql`${to.users.name} || 'hello'`, ), }), }; const { statements, sqlStatements } = await diffTestSchemasSingleStore( from, to, [], ); expect(statements).toStrictEqual([ { columnAutoIncrement: false, columnDefault: undefined, columnGenerated: { as: "`users`.`name` || 'hello'", type: 'virtual', }, columnName: 'gen_name', columnNotNull: false, columnOnUpdate: undefined, columnPk: false, newDataType: 'text', schema: '', tableName: 'users', type: 'alter_table_alter_column_alter_generated', }, ]); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE `users` drop column `gen_name`;', "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') VIRTUAL;", ]); }); // --- test('generated as sql: add column with generated constraint', async () => { const from = { users: singlestoreTable('users', { id: int('id'), id2: int('id2'), name: text('name'), }), }; const to = { users: singlestoreTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( sql`\`users\`.\`name\` || 'hello'`, { mode: 'stored' }, ), }), }; const { statements, sqlStatements } = await diffTestSchemasSingleStore( from, to, [], ); expect(statements).toStrictEqual([ { column: { generated: { as: "`users`.`name` || 'hello'", type: 'stored', }, autoincrement: false, name: 'gen_name', notNull: false, primaryKey: false, type: 'text', }, schema: '', tableName: 'users', type: 'alter_table_add_column', }, ]); expect(sqlStatements).toStrictEqual([ "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') STORED;", ]); }); test('generated as sql: add generated constraint to an exisiting column as stored', async () => { const from = { users: singlestoreTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').notNull(), }), }; const to = { users: singlestoreTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name') .notNull() .generatedAlwaysAs(sql`\`users\`.\`name\` || 'to add'`, { mode: 'stored', }), }), }; const { statements, sqlStatements } = await diffTestSchemasSingleStore( from, to, [], ); expect(statements).toStrictEqual([ { columnDefault: undefined, columnGenerated: { as: "`users`.`name` || 'to add'", type: 'stored', }, columnAutoIncrement: false, columnName: 'gen_name', columnNotNull: true, columnOnUpdate: undefined, columnPk: false, newDataType: 'text', schema: '', tableName: 'users', type: 'alter_table_alter_column_set_generated', }, ]); expect(sqlStatements).toStrictEqual([ "ALTER TABLE `users` MODIFY COLUMN `gen_name` text NOT NULL GENERATED ALWAYS AS (`users`.`name` || 'to add') STORED;", ]); }); test('generated as sql: add generated constraint to an exisiting column as virtual', async () => { const from = { users: singlestoreTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').notNull(), }), }; const to = { users: singlestoreTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name') .notNull() .generatedAlwaysAs(sql`\`users\`.\`name\` || 'to add'`, { mode: 'virtual', }), }), }; const { statements, sqlStatements } = await diffTestSchemasSingleStore( from, to, [], ); expect(statements).toStrictEqual([ { columnAutoIncrement: false, columnDefault: undefined, columnGenerated: { as: "`users`.`name` || 'to add'", type: 'virtual', }, columnName: 'gen_name', columnNotNull: true, columnOnUpdate: undefined, columnPk: false, newDataType: 'text', schema: '', tableName: 'users', type: 'alter_table_alter_column_set_generated', }, ]); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE `users` DROP COLUMN `gen_name`;', "ALTER TABLE `users` ADD `gen_name` text NOT NULL GENERATED ALWAYS AS (`users`.`name` || 'to add') VIRTUAL;", ]); }); test('generated as sql: drop generated constraint as stored', async () => { const from = { users: singlestoreTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( sql`\`users\`.\`name\` || 'to delete'`, { mode: 'stored' }, ), }), }; const to = { users: singlestoreTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName1: text('gen_name'), }), }; const { statements, sqlStatements } = await diffTestSchemasSingleStore( from, to, [], ); expect(statements).toStrictEqual([ { columnAutoIncrement: false, columnDefault: undefined, columnGenerated: undefined, columnName: 'gen_name', columnNotNull: false, columnOnUpdate: undefined, columnPk: false, newDataType: 'text', schema: '', tableName: 'users', oldColumn: { autoincrement: false, generated: { as: "`users`.`name` || 'to delete'", type: 'stored', }, name: 'gen_name', notNull: false, onUpdate: undefined, primaryKey: false, type: 'text', }, type: 'alter_table_alter_column_drop_generated', }, ]); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE `users` MODIFY COLUMN `gen_name` text;', ]); }); test('generated as sql: drop generated constraint as virtual', async () => { const from = { users: singlestoreTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( sql`\`users\`.\`name\` || 'to delete'`, { mode: 'virtual' }, ), }), }; const to = { users: singlestoreTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName1: text('gen_name'), }), }; const { statements, sqlStatements } = await diffTestSchemasSingleStore( from, to, [], ); expect(statements).toStrictEqual([ { columnAutoIncrement: false, columnDefault: undefined, columnGenerated: undefined, columnName: 'gen_name', columnNotNull: false, columnOnUpdate: undefined, columnPk: false, newDataType: 'text', schema: '', oldColumn: { autoincrement: false, generated: { as: "`users`.`name` || 'to delete'", type: 'virtual', }, name: 'gen_name', notNull: false, onUpdate: undefined, primaryKey: false, type: 'text', }, tableName: 'users', type: 'alter_table_alter_column_drop_generated', }, ]); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE `users` DROP COLUMN `gen_name`;', 'ALTER TABLE `users` ADD `gen_name` text;', ]); }); test('generated as sql: change generated constraint type from virtual to stored', async () => { const from = { users: singlestoreTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( sql`\`users\`.\`name\``, { mode: 'virtual' }, ), }), }; const to = { users: singlestoreTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( sql`\`users\`.\`name\` || 'hello'`, { mode: 'stored' }, ), }), }; const { statements, sqlStatements } = await diffTestSchemasSingleStore( from, to, [], ); expect(statements).toStrictEqual([ { columnAutoIncrement: false, columnDefault: undefined, columnGenerated: { as: "`users`.`name` || 'hello'", type: 'stored', }, columnName: 'gen_name', columnNotNull: false, columnOnUpdate: undefined, columnPk: false, newDataType: 'text', schema: '', tableName: 'users', type: 'alter_table_alter_column_alter_generated', }, ]); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE `users` drop column `gen_name`;', "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') STORED;", ]); }); test('generated as sql: change generated constraint type from stored to virtual', async () => { const from = { users: singlestoreTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( sql`\`users\`.\`name\``, ), }), }; const to = { users: singlestoreTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( sql`\`users\`.\`name\` || 'hello'`, ), }), }; const { statements, sqlStatements } = await diffTestSchemasSingleStore( from, to, [], ); expect(statements).toStrictEqual([ { columnAutoIncrement: false, columnDefault: undefined, columnGenerated: { as: "`users`.`name` || 'hello'", type: 'virtual', }, columnName: 'gen_name', columnNotNull: false, columnOnUpdate: undefined, columnPk: false, newDataType: 'text', schema: '', tableName: 'users', type: 'alter_table_alter_column_alter_generated', }, ]); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE `users` drop column `gen_name`;', "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') VIRTUAL;", ]); }); test('generated as sql: change generated constraint', async () => { const from = { users: singlestoreTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( sql`\`users\`.\`name\``, ), }), }; const to = { users: singlestoreTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( sql`\`users\`.\`name\` || 'hello'`, ), }), }; const { statements, sqlStatements } = await diffTestSchemasSingleStore( from, to, [], ); expect(statements).toStrictEqual([ { columnAutoIncrement: false, columnDefault: undefined, columnGenerated: { as: "`users`.`name` || 'hello'", type: 'virtual', }, columnName: 'gen_name', columnNotNull: false, columnOnUpdate: undefined, columnPk: false, newDataType: 'text', schema: '', tableName: 'users', type: 'alter_table_alter_column_alter_generated', }, ]); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE `users` drop column `gen_name`;', "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') VIRTUAL;", ]); }); // --- test('generated as string: add column with generated constraint', async () => { const from = { users: singlestoreTable('users', { id: int('id'), id2: int('id2'), name: text('name'), }), }; const to = { users: singlestoreTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( `\`users\`.\`name\` || 'hello'`, { mode: 'stored' }, ), }), }; const { statements, sqlStatements } = await diffTestSchemasSingleStore( from, to, [], ); expect(statements).toStrictEqual([ { column: { generated: { as: "`users`.`name` || 'hello'", type: 'stored', }, autoincrement: false, name: 'gen_name', notNull: false, primaryKey: false, type: 'text', }, schema: '', tableName: 'users', type: 'alter_table_add_column', }, ]); expect(sqlStatements).toStrictEqual([ "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') STORED;", ]); }); test('generated as string: add generated constraint to an exisiting column as stored', async () => { const from = { users: singlestoreTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').notNull(), }), }; const to = { users: singlestoreTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name') .notNull() .generatedAlwaysAs(`\`users\`.\`name\` || 'to add'`, { mode: 'stored', }), }), }; const { statements, sqlStatements } = await diffTestSchemasSingleStore( from, to, [], ); expect(statements).toStrictEqual([ { columnDefault: undefined, columnGenerated: { as: "`users`.`name` || 'to add'", type: 'stored', }, columnAutoIncrement: false, columnName: 'gen_name', columnNotNull: true, columnOnUpdate: undefined, columnPk: false, newDataType: 'text', schema: '', tableName: 'users', type: 'alter_table_alter_column_set_generated', }, ]); expect(sqlStatements).toStrictEqual([ "ALTER TABLE `users` MODIFY COLUMN `gen_name` text NOT NULL GENERATED ALWAYS AS (`users`.`name` || 'to add') STORED;", ]); }); test('generated as string: add generated constraint to an exisiting column as virtual', async () => { const from = { users: singlestoreTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').notNull(), }), }; const to = { users: singlestoreTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name') .notNull() .generatedAlwaysAs(`\`users\`.\`name\` || 'to add'`, { mode: 'virtual', }), }), }; const { statements, sqlStatements } = await diffTestSchemasSingleStore( from, to, [], ); expect(statements).toStrictEqual([ { columnAutoIncrement: false, columnDefault: undefined, columnGenerated: { as: "`users`.`name` || 'to add'", type: 'virtual', }, columnName: 'gen_name', columnNotNull: true, columnOnUpdate: undefined, columnPk: false, newDataType: 'text', schema: '', tableName: 'users', type: 'alter_table_alter_column_set_generated', }, ]); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE `users` DROP COLUMN `gen_name`;', "ALTER TABLE `users` ADD `gen_name` text NOT NULL GENERATED ALWAYS AS (`users`.`name` || 'to add') VIRTUAL;", ]); }); test('generated as string: drop generated constraint as stored', async () => { const from = { users: singlestoreTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( `\`users\`.\`name\` || 'to delete'`, { mode: 'stored' }, ), }), }; const to = { users: singlestoreTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName1: text('gen_name'), }), }; const { statements, sqlStatements } = await diffTestSchemasSingleStore( from, to, [], ); expect(statements).toStrictEqual([ { columnAutoIncrement: false, columnDefault: undefined, columnGenerated: undefined, columnName: 'gen_name', columnNotNull: false, columnOnUpdate: undefined, columnPk: false, newDataType: 'text', schema: '', tableName: 'users', oldColumn: { autoincrement: false, generated: { as: "`users`.`name` || 'to delete'", type: 'stored', }, name: 'gen_name', notNull: false, onUpdate: undefined, primaryKey: false, type: 'text', }, type: 'alter_table_alter_column_drop_generated', }, ]); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE `users` MODIFY COLUMN `gen_name` text;', ]); }); test('generated as string: drop generated constraint as virtual', async () => { const from = { users: singlestoreTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( `\`users\`.\`name\` || 'to delete'`, { mode: 'virtual' }, ), }), }; const to = { users: singlestoreTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName1: text('gen_name'), }), }; const { statements, sqlStatements } = await diffTestSchemasSingleStore( from, to, [], ); expect(statements).toStrictEqual([ { columnAutoIncrement: false, columnDefault: undefined, columnGenerated: undefined, columnName: 'gen_name', columnNotNull: false, columnOnUpdate: undefined, columnPk: false, newDataType: 'text', schema: '', oldColumn: { autoincrement: false, generated: { as: "`users`.`name` || 'to delete'", type: 'virtual', }, name: 'gen_name', notNull: false, onUpdate: undefined, primaryKey: false, type: 'text', }, tableName: 'users', type: 'alter_table_alter_column_drop_generated', }, ]); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE `users` DROP COLUMN `gen_name`;', 'ALTER TABLE `users` ADD `gen_name` text;', ]); }); test('generated as string: change generated constraint type from virtual to stored', async () => { const from = { users: singlestoreTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs(`\`users\`.\`name\``, { mode: 'virtual', }), }), }; const to = { users: singlestoreTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( `\`users\`.\`name\` || 'hello'`, { mode: 'stored' }, ), }), }; const { statements, sqlStatements } = await diffTestSchemasSingleStore( from, to, [], ); expect(statements).toStrictEqual([ { columnAutoIncrement: false, columnDefault: undefined, columnGenerated: { as: "`users`.`name` || 'hello'", type: 'stored', }, columnName: 'gen_name', columnNotNull: false, columnOnUpdate: undefined, columnPk: false, newDataType: 'text', schema: '', tableName: 'users', type: 'alter_table_alter_column_alter_generated', }, ]); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE `users` drop column `gen_name`;', "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') STORED;", ]); }); test('generated as string: change generated constraint type from stored to virtual', async () => { const from = { users: singlestoreTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs(`\`users\`.\`name\``), }), }; const to = { users: singlestoreTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( `\`users\`.\`name\` || 'hello'`, ), }), }; const { statements, sqlStatements } = await diffTestSchemasSingleStore( from, to, [], ); expect(statements).toStrictEqual([ { columnAutoIncrement: false, columnDefault: undefined, columnGenerated: { as: "`users`.`name` || 'hello'", type: 'virtual', }, columnName: 'gen_name', columnNotNull: false, columnOnUpdate: undefined, columnPk: false, newDataType: 'text', schema: '', tableName: 'users', type: 'alter_table_alter_column_alter_generated', }, ]); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE `users` drop column `gen_name`;', "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') VIRTUAL;", ]); }); test('generated as string: change generated constraint', async () => { const from = { users: singlestoreTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs(`\`users\`.\`name\``), }), }; const to = { users: singlestoreTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( `\`users\`.\`name\` || 'hello'`, ), }), }; const { statements, sqlStatements } = await diffTestSchemasSingleStore( from, to, [], ); expect(statements).toStrictEqual([ { columnAutoIncrement: false, columnDefault: undefined, columnGenerated: { as: "`users`.`name` || 'hello'", type: 'virtual', }, columnName: 'gen_name', columnNotNull: false, columnOnUpdate: undefined, columnPk: false, newDataType: 'text', schema: '', tableName: 'users', type: 'alter_table_alter_column_alter_generated', }, ]); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE `users` drop column `gen_name`;', "ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS (`users`.`name` || 'hello') VIRTUAL;", ]); }); ================================================ FILE: drizzle-kit/tests/singlestore-schemas.test.ts ================================================ import { singlestoreSchema, singlestoreTable } from 'drizzle-orm/singlestore-core'; import { expect, test } from 'vitest'; import { diffTestSchemasSingleStore } from './schemaDiffer'; // We don't manage databases(schemas) in MySQL with Drizzle Kit test('add schema #1', async () => { const to = { devSchema: singlestoreSchema('dev'), }; const { statements } = await diffTestSchemasSingleStore({}, to, []); expect(statements.length).toBe(0); }); test('add schema #2', async () => { const from = { devSchema: singlestoreSchema('dev'), }; const to = { devSchema: singlestoreSchema('dev'), devSchema2: singlestoreSchema('dev2'), }; const { statements } = await diffTestSchemasSingleStore(from, to, []); expect(statements.length).toBe(0); }); test('delete schema #1', async () => { const from = { devSchema: singlestoreSchema('dev'), }; const { statements } = await diffTestSchemasSingleStore(from, {}, []); expect(statements.length).toBe(0); }); test('delete schema #2', async () => { const from = { devSchema: singlestoreSchema('dev'), devSchema2: singlestoreSchema('dev2'), }; const to = { devSchema: singlestoreSchema('dev'), }; const { statements } = await diffTestSchemasSingleStore(from, to, []); expect(statements.length).toBe(0); }); test('rename schema #1', async () => { const from = { devSchema: singlestoreSchema('dev'), }; const to = { devSchema2: singlestoreSchema('dev2'), }; const { statements } = await diffTestSchemasSingleStore(from, to, ['dev->dev2']); expect(statements.length).toBe(0); }); test('rename schema #2', async () => { const from = { devSchema: singlestoreSchema('dev'), devSchema1: singlestoreSchema('dev1'), }; const to = { devSchema: singlestoreSchema('dev'), devSchema2: singlestoreSchema('dev2'), }; const { statements } = await diffTestSchemasSingleStore(from, to, ['dev1->dev2']); expect(statements.length).toBe(0); }); test('add table to schema #1', async () => { const dev = singlestoreSchema('dev'); const from = {}; const to = { dev, users: dev.table('users', {}), }; const { statements } = await diffTestSchemasSingleStore(from, to, ['dev1->dev2']); expect(statements.length).toBe(0); }); test('add table to schema #2', async () => { const dev = singlestoreSchema('dev'); const from = { dev }; const to = { dev, users: dev.table('users', {}), }; const { statements } = await diffTestSchemasSingleStore(from, to, ['dev1->dev2']); expect(statements.length).toBe(0); }); test('add table to schema #3', async () => { const dev = singlestoreSchema('dev'); const from = { dev }; const to = { dev, usersInDev: dev.table('users', {}), users: singlestoreTable('users', {}), }; const { statements } = await diffTestSchemasSingleStore(from, to, ['dev1->dev2']); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'create_table', tableName: 'users', schema: undefined, columns: [], uniqueConstraints: [], internals: { tables: {}, indexes: {}, }, compositePkName: '', compositePKs: [], }); }); test('remove table from schema #1', async () => { const dev = singlestoreSchema('dev'); const from = { dev, users: dev.table('users', {}) }; const to = { dev, }; const { statements } = await diffTestSchemasSingleStore(from, to, ['dev1->dev2']); expect(statements.length).toBe(0); }); test('remove table from schema #2', async () => { const dev = singlestoreSchema('dev'); const from = { dev, users: dev.table('users', {}) }; const to = {}; const { statements } = await diffTestSchemasSingleStore(from, to, ['dev1->dev2']); expect(statements.length).toBe(0); }); ================================================ FILE: drizzle-kit/tests/singlestore.test.ts ================================================ import { sql } from 'drizzle-orm'; import { index, int, json, primaryKey, serial, singlestoreSchema, singlestoreTable, text, uniqueIndex, } from 'drizzle-orm/singlestore-core'; import { expect, test } from 'vitest'; import { diffTestSchemasSingleStore } from './schemaDiffer'; test('add table #1', async () => { const to = { users: singlestoreTable('users', {}), }; const { statements } = await diffTestSchemasSingleStore({}, to, []); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'create_table', tableName: 'users', schema: undefined, columns: [], compositePKs: [], internals: { tables: {}, indexes: {}, }, uniqueConstraints: [], compositePkName: '', }); }); test('add table #2', async () => { const to = { users: singlestoreTable('users', { id: serial('id').primaryKey(), }), }; const { statements } = await diffTestSchemasSingleStore({}, to, []); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'create_table', tableName: 'users', schema: undefined, columns: [ { name: 'id', notNull: true, primaryKey: false, type: 'serial', autoincrement: true, }, ], compositePKs: ['users_id;id'], compositePkName: 'users_id', uniqueConstraints: [], internals: { tables: {}, indexes: {}, }, }); }); test('add table #3', async () => { const to = { users: singlestoreTable( 'users', { id: serial('id'), }, (t) => { return { pk: primaryKey({ name: 'users_pk', columns: [t.id], }), }; }, ), }; const { statements } = await diffTestSchemasSingleStore({}, to, []); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'create_table', tableName: 'users', schema: undefined, columns: [ { name: 'id', notNull: true, primaryKey: false, type: 'serial', autoincrement: true, }, ], compositePKs: ['users_pk;id'], uniqueConstraints: [], compositePkName: 'users_pk', internals: { tables: {}, indexes: {}, }, }); }); test('add table #4', async () => { const to = { users: singlestoreTable('users', {}), posts: singlestoreTable('posts', {}), }; const { statements } = await diffTestSchemasSingleStore({}, to, []); expect(statements.length).toBe(2); expect(statements[0]).toStrictEqual({ type: 'create_table', tableName: 'users', schema: undefined, columns: [], internals: { tables: {}, indexes: {}, }, compositePKs: [], uniqueConstraints: [], compositePkName: '', }); expect(statements[1]).toStrictEqual({ type: 'create_table', tableName: 'posts', schema: undefined, columns: [], compositePKs: [], internals: { tables: {}, indexes: {}, }, uniqueConstraints: [], compositePkName: '', }); }); test('add table #5', async () => { const schema = singlestoreSchema('folder'); const from = { schema, }; const to = { schema, users: schema.table('users', {}), }; const { statements } = await diffTestSchemasSingleStore(from, to, []); expect(statements.length).toBe(0); }); test('add table #6', async () => { const from = { users1: singlestoreTable('users1', {}), }; const to = { users2: singlestoreTable('users2', {}), }; const { statements } = await diffTestSchemasSingleStore(from, to, []); expect(statements.length).toBe(2); expect(statements[0]).toStrictEqual({ type: 'create_table', tableName: 'users2', schema: undefined, columns: [], internals: { tables: {}, indexes: {}, }, compositePKs: [], uniqueConstraints: [], compositePkName: '', }); expect(statements[1]).toStrictEqual({ policies: [], type: 'drop_table', tableName: 'users1', schema: undefined, }); }); test('add table #7', async () => { const from = { users1: singlestoreTable('users1', {}), }; const to = { users: singlestoreTable('users', {}), users2: singlestoreTable('users2', {}), }; const { statements } = await diffTestSchemasSingleStore(from, to, [ 'public.users1->public.users2', ]); expect(statements.length).toBe(2); expect(statements[0]).toStrictEqual({ type: 'rename_table', tableNameFrom: 'users1', tableNameTo: 'users2', fromSchema: undefined, toSchema: undefined, }); expect(statements[1]).toStrictEqual({ type: 'create_table', tableName: 'users', schema: undefined, columns: [], compositePKs: [], uniqueConstraints: [], internals: { tables: {}, indexes: {}, }, compositePkName: '', }); }); test('add schema + table #1', async () => { const schema = singlestoreSchema('folder'); const to = { schema, users: schema.table('users', {}), }; const { statements } = await diffTestSchemasSingleStore({}, to, []); expect(statements.length).toBe(0); }); test('change schema with tables #1', async () => { const schema = singlestoreSchema('folder'); const schema2 = singlestoreSchema('folder2'); const from = { schema, users: schema.table('users', {}), }; const to = { schema2, users: schema2.table('users', {}), }; const { statements } = await diffTestSchemasSingleStore(from, to, [ 'folder->folder2', ]); expect(statements.length).toBe(0); }); test('change table schema #1', async () => { const schema = singlestoreSchema('folder'); const from = { schema, users: singlestoreTable('users', {}), }; const to = { schema, users: schema.table('users', {}), }; const { statements } = await diffTestSchemasSingleStore(from, to, [ 'public.users->folder.users', ]); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ policies: [], type: 'drop_table', tableName: 'users', schema: undefined, }); }); test('change table schema #2', async () => { const schema = singlestoreSchema('folder'); const from = { schema, users: schema.table('users', {}), }; const to = { schema, users: singlestoreTable('users', {}), }; const { statements } = await diffTestSchemasSingleStore(from, to, [ 'folder.users->public.users', ]); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'create_table', tableName: 'users', schema: undefined, columns: [], uniqueConstraints: [], compositePkName: '', compositePKs: [], internals: { tables: {}, indexes: {}, }, }); }); test('change table schema #3', async () => { const schema1 = singlestoreSchema('folder1'); const schema2 = singlestoreSchema('folder2'); const from = { schema1, schema2, users: schema1.table('users', {}), }; const to = { schema1, schema2, users: schema2.table('users', {}), }; const { statements } = await diffTestSchemasSingleStore(from, to, [ 'folder1.users->folder2.users', ]); expect(statements.length).toBe(0); }); test('change table schema #4', async () => { const schema1 = singlestoreSchema('folder1'); const schema2 = singlestoreSchema('folder2'); const from = { schema1, users: schema1.table('users', {}), }; const to = { schema1, schema2, // add schema users: schema2.table('users', {}), // move table }; const { statements } = await diffTestSchemasSingleStore(from, to, [ 'folder1.users->folder2.users', ]); expect(statements.length).toBe(0); }); test('change table schema #5', async () => { const schema1 = singlestoreSchema('folder1'); const schema2 = singlestoreSchema('folder2'); const from = { schema1, // remove schema users: schema1.table('users', {}), }; const to = { schema2, // add schema users: schema2.table('users', {}), // move table }; const { statements } = await diffTestSchemasSingleStore(from, to, [ 'folder1.users->folder2.users', ]); expect(statements.length).toBe(0); }); test('change table schema #5', async () => { const schema1 = singlestoreSchema('folder1'); const schema2 = singlestoreSchema('folder2'); const from = { schema1, schema2, users: schema1.table('users', {}), }; const to = { schema1, schema2, users: schema2.table('users2', {}), // rename and move table }; const { statements } = await diffTestSchemasSingleStore(from, to, [ 'folder1.users->folder2.users2', ]); expect(statements.length).toBe(0); }); test('change table schema #6', async () => { const schema1 = singlestoreSchema('folder1'); const schema2 = singlestoreSchema('folder2'); const from = { schema1, users: schema1.table('users', {}), }; const to = { schema2, // rename schema users: schema2.table('users2', {}), // rename table }; const { statements } = await diffTestSchemasSingleStore(from, to, [ 'folder1->folder2', 'folder2.users->folder2.users2', ]); expect(statements.length).toBe(0); }); test('add table #10', async () => { const to = { users: singlestoreTable('table', { json: json('json').default({}), }), }; const { sqlStatements } = await diffTestSchemasSingleStore({}, to, []); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe( "CREATE TABLE `table` (\n\t`json` json DEFAULT '{}'\n);\n", ); }); test('add table #11', async () => { const to = { users: singlestoreTable('table', { json: json('json').default([]), }), }; const { sqlStatements } = await diffTestSchemasSingleStore({}, to, []); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe( "CREATE TABLE `table` (\n\t`json` json DEFAULT '[]'\n);\n", ); }); test('add table #12', async () => { const to = { users: singlestoreTable('table', { json: json('json').default([1, 2, 3]), }), }; const { sqlStatements } = await diffTestSchemasSingleStore({}, to, []); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe( "CREATE TABLE `table` (\n\t`json` json DEFAULT '[1,2,3]'\n);\n", ); }); test('add table #13', async () => { const to = { users: singlestoreTable('table', { json: json('json').default({ key: 'value' }), }), }; const { sqlStatements } = await diffTestSchemasSingleStore({}, to, []); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe( 'CREATE TABLE `table` (\n\t`json` json DEFAULT \'{"key":"value"}\'\n);\n', ); }); test('add table #14', async () => { const to = { users: singlestoreTable('table', { json: json('json').default({ key: 'value', arr: [1, 2, 3], }), }), }; const { sqlStatements } = await diffTestSchemasSingleStore({}, to, []); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe( 'CREATE TABLE `table` (\n\t`json` json DEFAULT \'{"key":"value","arr":[1,2,3]}\'\n);\n', ); }); // TODO: add bson type tests // TODO: add blob type tests // TODO: add uuid type tests // TODO: add guid type tests // TODO: add vector type tests // TODO: add geopoint type tests test('drop index', async () => { const from = { users: singlestoreTable( 'table', { name: text('name'), }, (t) => { return { idx: index('name_idx').on(t.name), }; }, ), }; const to = { users: singlestoreTable('table', { name: text('name'), }), }; const { sqlStatements } = await diffTestSchemasSingleStore(from, to, []); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe('DROP INDEX `name_idx` ON `table`;'); }); test('add table with indexes', async () => { const from = {}; const to = { users: singlestoreTable( 'users', { id: serial('id').primaryKey(), name: text('name'), email: text('email'), }, (t) => ({ uniqueExpr: uniqueIndex('uniqueExpr').on(sql`(lower(${t.email}))`), indexExpr: index('indexExpr').on(sql`(lower(${t.email}))`), indexExprMultiple: index('indexExprMultiple').on( sql`(lower(${t.email}))`, sql`(lower(${t.email}))`, ), uniqueCol: uniqueIndex('uniqueCol').on(t.email), indexCol: index('indexCol').on(t.email), indexColMultiple: index('indexColMultiple').on(t.email, t.email), indexColExpr: index('indexColExpr').on( sql`(lower(${t.email}))`, t.email, ), }), ), }; const { sqlStatements } = await diffTestSchemasSingleStore(from, to, []); expect(sqlStatements.length).toBe(6); expect(sqlStatements).toStrictEqual([ `CREATE TABLE \`users\` (\n\t\`id\` serial AUTO_INCREMENT NOT NULL,\n\t\`name\` text,\n\t\`email\` text,\n\tCONSTRAINT \`users_id\` PRIMARY KEY(\`id\`),\n\tCONSTRAINT \`uniqueExpr\` UNIQUE((lower(\`email\`))),\n\tCONSTRAINT \`uniqueCol\` UNIQUE(\`email\`) ); `, 'CREATE INDEX `indexExpr` ON `users` ((lower(`email`)));', 'CREATE INDEX `indexExprMultiple` ON `users` ((lower(`email`)),(lower(`email`)));', 'CREATE INDEX `indexCol` ON `users` (`email`);', 'CREATE INDEX `indexColMultiple` ON `users` (`email`,`email`);', 'CREATE INDEX `indexColExpr` ON `users` ((lower(`email`)),`email`);', ]); }); test('rename table', async () => { const from = { table: singlestoreTable('table', { json: json('json').default([]), }), }; const to = { table1: singlestoreTable('table1', { json1: json('json').default([]), }), }; const { sqlStatements } = await diffTestSchemasSingleStore(from, to, [`public.table->public.table1`]); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe( 'ALTER TABLE `table` RENAME TO `table1`;', ); }); test('rename column', async () => { const from = { users: singlestoreTable('table', { json: json('json').default([]), }), }; const to = { users: singlestoreTable('table', { json1: json('json1').default([]), }), }; const { sqlStatements } = await diffTestSchemasSingleStore(from, to, [`public.table.json->public.table.json1`]); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe( 'ALTER TABLE `table` CHANGE `json` `json1`;', ); }); test('change data type', async () => { const from = { table: singlestoreTable('table', { id: int(), age: text(), }), }; const to = { table: singlestoreTable('table', { id: int(), age: int(), }), }; const { sqlStatements } = await diffTestSchemasSingleStore(from, to, []); expect(sqlStatements.length).toBe(4); expect(sqlStatements[0]).toBe( `CREATE TABLE \`__new_table\` ( \t\`id\` int, \t\`age\` int );\n`, ); expect(sqlStatements[1]).toBe( 'INSERT INTO `__new_table`(`id`, `age`) SELECT `id`, `age` FROM `table`;', ); expect(sqlStatements[2]).toBe( 'DROP TABLE `table`;', ); expect(sqlStatements[3]).toBe( 'ALTER TABLE `__new_table` RENAME TO `table`;', ); }); test('drop not null', async () => { const from = { table: singlestoreTable('table', { id: int().notNull(), age: int(), }), }; const to = { table: singlestoreTable('table', { id: int(), age: int(), }), }; const { sqlStatements } = await diffTestSchemasSingleStore(from, to, []); expect(sqlStatements.length).toBe(4); expect(sqlStatements[0]).toBe( `CREATE TABLE \`__new_table\` ( \t\`id\` int, \t\`age\` int );\n`, ); expect(sqlStatements[1]).toBe( 'INSERT INTO `__new_table`(`id`, `age`) SELECT `id`, `age` FROM `table`;', ); expect(sqlStatements[2]).toBe( 'DROP TABLE `table`;', ); expect(sqlStatements[3]).toBe( 'ALTER TABLE `__new_table` RENAME TO `table`;', ); }); test('set not null', async () => { const from = { table: singlestoreTable('table', { id: int(), age: int(), }), }; const to = { table: singlestoreTable('table', { id: int().notNull(), age: int(), }), }; const { sqlStatements } = await diffTestSchemasSingleStore(from, to, []); expect(sqlStatements.length).toBe(4); expect(sqlStatements[0]).toBe( `CREATE TABLE \`__new_table\` ( \t\`id\` int NOT NULL, \t\`age\` int );\n`, ); expect(sqlStatements[1]).toBe( 'INSERT INTO `__new_table`(`id`, `age`) SELECT `id`, `age` FROM `table`;', ); expect(sqlStatements[2]).toBe( 'DROP TABLE `table`;', ); expect(sqlStatements[3]).toBe( 'ALTER TABLE `__new_table` RENAME TO `table`;', ); }); test('set default with not null column', async () => { const from = { table: singlestoreTable('table', { id: int().notNull(), age: int(), }), }; const to = { table: singlestoreTable('table', { id: int().notNull().default(1), age: int(), }), }; const { sqlStatements } = await diffTestSchemasSingleStore(from, to, []); expect(sqlStatements.length).toBe(4); expect(sqlStatements[0]).toBe( `CREATE TABLE \`__new_table\` ( \t\`id\` int NOT NULL DEFAULT 1, \t\`age\` int );\n`, ); expect(sqlStatements[1]).toBe( 'INSERT INTO `__new_table`(`id`, `age`) SELECT `id`, `age` FROM `table`;', ); expect(sqlStatements[2]).toBe( 'DROP TABLE `table`;', ); expect(sqlStatements[3]).toBe( 'ALTER TABLE `__new_table` RENAME TO `table`;', ); }); test('drop default with not null column', async () => { const from = { table: singlestoreTable('table', { id: int().notNull().default(1), age: int(), }), }; const to = { table: singlestoreTable('table', { id: int().notNull(), age: int(), }), }; const { sqlStatements } = await diffTestSchemasSingleStore(from, to, []); expect(sqlStatements.length).toBe(4); expect(sqlStatements[0]).toBe( `CREATE TABLE \`__new_table\` ( \t\`id\` int NOT NULL, \t\`age\` int );\n`, ); expect(sqlStatements[1]).toBe( 'INSERT INTO `__new_table`(`id`, `age`) SELECT `id`, `age` FROM `table`;', ); expect(sqlStatements[2]).toBe( 'DROP TABLE `table`;', ); expect(sqlStatements[3]).toBe( 'ALTER TABLE `__new_table` RENAME TO `table`;', ); }); test('set default', async () => { const from = { table: singlestoreTable('table', { id: int(), age: int(), }), }; const to = { table: singlestoreTable('table', { id: int().default(1), age: int(), }), }; const { sqlStatements } = await diffTestSchemasSingleStore(from, to, []); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe( 'ALTER TABLE `table` MODIFY COLUMN `id` int DEFAULT 1;', ); }); test('drop default', async () => { const from = { table: singlestoreTable('table', { id: int().default(1), age: int(), }), }; const to = { table: singlestoreTable('table', { id: int(), age: int(), }), }; const { sqlStatements } = await diffTestSchemasSingleStore(from, to, []); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe( 'ALTER TABLE `table` MODIFY COLUMN `id` int;', ); }); test('set pk', async () => { const from = { table: singlestoreTable('table', { id: int(), age: int(), }), }; const to = { table: singlestoreTable('table', { id: int().primaryKey(), age: int(), }), }; const { sqlStatements } = await diffTestSchemasSingleStore(from, to, []); expect(sqlStatements.length).toBe(4); expect(sqlStatements[0]).toBe( `CREATE TABLE \`__new_table\` ( \t\`id\` int NOT NULL, \t\`age\` int, \tCONSTRAINT \`table_id\` PRIMARY KEY(\`id\`) );\n`, ); expect(sqlStatements[1]).toBe( 'INSERT INTO `__new_table`(`id`, `age`) SELECT `id`, `age` FROM `table`;', ); expect(sqlStatements[2]).toBe( 'DROP TABLE `table`;', ); expect(sqlStatements[3]).toBe( 'ALTER TABLE `__new_table` RENAME TO `table`;', ); }); test('drop pk', async () => { const from = { table: singlestoreTable('table', { id: int().primaryKey(), age: int(), }), }; const to = { table: singlestoreTable('table', { id: int(), age: int(), }), }; const { sqlStatements } = await diffTestSchemasSingleStore(from, to, []); expect(sqlStatements.length).toBe(4); expect(sqlStatements[0]).toBe( `CREATE TABLE \`__new_table\` ( \t\`id\` int, \t\`age\` int );\n`, ); expect(sqlStatements[1]).toBe( 'INSERT INTO `__new_table`(`id`, `age`) SELECT `id`, `age` FROM `table`;', ); expect(sqlStatements[2]).toBe( 'DROP TABLE `table`;', ); expect(sqlStatements[3]).toBe( 'ALTER TABLE `__new_table` RENAME TO `table`;', ); }); test('set not null + rename column on table with indexes', async () => { const from = { table: singlestoreTable('table', { id: int('id').default(1), age: int(), }), }; const to = { table: singlestoreTable('table', { id3: int('id3').notNull().default(1), age: int(), }), }; const { sqlStatements } = await diffTestSchemasSingleStore(from, to, [`public.table.id->public.table.id3`]); expect(sqlStatements.length).toBe(5); expect(sqlStatements[0]).toBe( 'ALTER TABLE \`table\` CHANGE `id` `id3`;', ); expect(sqlStatements[1]).toBe( `CREATE TABLE \`__new_table\` ( \t\`id3\` int NOT NULL DEFAULT 1, \t\`age\` int );\n`, ); expect(sqlStatements[2]).toBe( 'INSERT INTO `__new_table`(`id3`, `age`) SELECT `id3`, `age` FROM `table`;', ); expect(sqlStatements[3]).toBe( 'DROP TABLE `table`;', ); expect(sqlStatements[4]).toBe( 'ALTER TABLE `__new_table` RENAME TO `table`;', ); }); test('set not null + rename table on table with indexes', async () => { const from = { table: singlestoreTable('table', { id: int('id').default(1), age: int(), }), }; const to = { table1: singlestoreTable('table1', { id: int('id').notNull().default(1), age: int(), }), }; const { sqlStatements } = await diffTestSchemasSingleStore(from, to, [`public.table->public.table1`]); expect(sqlStatements.length).toBe(5); expect(sqlStatements[0]).toBe( 'ALTER TABLE `table` RENAME TO `table1`;', ); expect(sqlStatements[1]).toBe( `CREATE TABLE \`__new_table1\` ( \t\`id\` int NOT NULL DEFAULT 1, \t\`age\` int );\n`, ); expect(sqlStatements[2]).toBe( 'INSERT INTO `__new_table1`(\`id\`, \`age\`) SELECT \`id\`, \`age\` FROM `table1`;', ); expect(sqlStatements[3]).toBe( 'DROP TABLE `table1`;', ); expect(sqlStatements[4]).toBe( 'ALTER TABLE `__new_table1` RENAME TO `table1`;', ); }); ================================================ FILE: drizzle-kit/tests/sqlite-checks.test.ts ================================================ import { sql } from 'drizzle-orm'; import { check, int, sqliteTable, text } from 'drizzle-orm/sqlite-core'; import { expect, test } from 'vitest'; import { diffTestSchemasSqlite } from './schemaDiffer'; test('create table with check', async (t) => { const to = { users: sqliteTable('users', { id: int('id').primaryKey(), age: int('age'), }, (table) => ({ checkConstraint: check('some_check_name', sql`${table.age} > 21`), })), }; const { sqlStatements, statements } = await diffTestSchemasSqlite({}, to, []); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'sqlite_create_table', tableName: 'users', columns: [ { name: 'id', type: 'integer', notNull: true, primaryKey: true, autoincrement: false, }, { name: 'age', type: 'integer', notNull: false, primaryKey: false, autoincrement: false, }, ], compositePKs: [], checkConstraints: ['some_check_name;"users"."age" > 21'], referenceData: [], uniqueConstraints: [], }); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe(`CREATE TABLE \`users\` ( \t\`id\` integer PRIMARY KEY NOT NULL, \t\`age\` integer, \tCONSTRAINT "some_check_name" CHECK("users"."age" > 21) );\n`); }); test('add check contraint to existing table', async (t) => { const to = { users: sqliteTable('users', { id: int('id').primaryKey(), age: int('age'), }, (table) => ({ checkConstraint: check('some_check_name', sql`${table.age} > 21`), })), }; const from = { users: sqliteTable('users', { id: int('id').primaryKey(), age: int('age'), }), }; const { sqlStatements, statements } = await diffTestSchemasSqlite(from, to, []); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ columns: [ { autoincrement: false, generated: undefined, name: 'id', notNull: true, primaryKey: true, type: 'integer', }, { autoincrement: false, generated: undefined, name: 'age', notNull: false, primaryKey: false, type: 'integer', }, ], compositePKs: [], referenceData: [], tableName: 'users', type: 'recreate_table', uniqueConstraints: [], checkConstraints: ['some_check_name;"users"."age" > 21'], }); expect(sqlStatements.length).toBe(6); expect(sqlStatements[0]).toBe('PRAGMA foreign_keys=OFF;'); expect(sqlStatements[1]).toBe(`CREATE TABLE \`__new_users\` ( \t\`id\` integer PRIMARY KEY NOT NULL, \t\`age\` integer, \tCONSTRAINT "some_check_name" CHECK("__new_users"."age" > 21) );\n`); expect(sqlStatements[2]).toBe(`INSERT INTO \`__new_users\`("id", "age") SELECT "id", "age" FROM \`users\`;`); expect(sqlStatements[3]).toBe(`DROP TABLE \`users\`;`); expect(sqlStatements[4]).toBe(`ALTER TABLE \`__new_users\` RENAME TO \`users\`;`); expect(sqlStatements[5]).toBe(`PRAGMA foreign_keys=ON;`); }); test('drop check contraint to existing table', async (t) => { const from = { users: sqliteTable('users', { id: int('id').primaryKey(), age: int('age'), }, (table) => ({ checkConstraint: check('some_check_name', sql`${table.age} > 21`), })), }; const to = { users: sqliteTable('users', { id: int('id').primaryKey(), age: int('age'), }), }; const { sqlStatements, statements } = await diffTestSchemasSqlite(from, to, []); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ columns: [ { autoincrement: false, generated: undefined, name: 'id', notNull: true, primaryKey: true, type: 'integer', }, { autoincrement: false, generated: undefined, name: 'age', notNull: false, primaryKey: false, type: 'integer', }, ], compositePKs: [], referenceData: [], tableName: 'users', type: 'recreate_table', uniqueConstraints: [], checkConstraints: [], }); expect(sqlStatements.length).toBe(6); expect(sqlStatements[0]).toBe('PRAGMA foreign_keys=OFF;'); expect(sqlStatements[1]).toBe(`CREATE TABLE \`__new_users\` ( \t\`id\` integer PRIMARY KEY NOT NULL, \t\`age\` integer );\n`); expect(sqlStatements[2]).toBe(`INSERT INTO \`__new_users\`("id", "age") SELECT "id", "age" FROM \`users\`;`); expect(sqlStatements[3]).toBe(`DROP TABLE \`users\`;`); expect(sqlStatements[4]).toBe(`ALTER TABLE \`__new_users\` RENAME TO \`users\`;`); expect(sqlStatements[5]).toBe(`PRAGMA foreign_keys=ON;`); }); test('rename check constraint', async (t) => { const from = { users: sqliteTable('users', { id: int('id').primaryKey(), age: int('age'), }, (table) => ({ checkConstraint: check('some_check_name', sql`${table.age} > 21`), })), }; const to = { users: sqliteTable('users', { id: int('id').primaryKey(), age: int('age'), }, (table) => ({ checkConstraint: check('new_some_check_name', sql`${table.age} > 21`), })), }; const { sqlStatements, statements } = await diffTestSchemasSqlite(from, to, []); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ columns: [ { autoincrement: false, generated: undefined, name: 'id', notNull: true, primaryKey: true, type: 'integer', }, { autoincrement: false, generated: undefined, name: 'age', notNull: false, primaryKey: false, type: 'integer', }, ], compositePKs: [], referenceData: [], tableName: 'users', type: 'recreate_table', uniqueConstraints: [], checkConstraints: [`new_some_check_name;"users"."age" > 21`], }); expect(sqlStatements.length).toBe(6); expect(sqlStatements[0]).toBe('PRAGMA foreign_keys=OFF;'); expect(sqlStatements[1]).toBe(`CREATE TABLE \`__new_users\` ( \t\`id\` integer PRIMARY KEY NOT NULL, \t\`age\` integer, \tCONSTRAINT "new_some_check_name" CHECK("__new_users"."age" > 21) );\n`); expect(sqlStatements[2]).toBe(`INSERT INTO \`__new_users\`("id", "age") SELECT "id", "age" FROM \`users\`;`); expect(sqlStatements[3]).toBe(`DROP TABLE \`users\`;`); expect(sqlStatements[4]).toBe(`ALTER TABLE \`__new_users\` RENAME TO \`users\`;`); expect(sqlStatements[5]).toBe(`PRAGMA foreign_keys=ON;`); }); test('rename check constraint', async (t) => { const from = { users: sqliteTable('users', { id: int('id').primaryKey(), age: int('age'), }, (table) => ({ checkConstraint: check('some_check_name', sql`${table.age} > 21`), })), }; const to = { users: sqliteTable('users', { id: int('id').primaryKey(), age: int('age'), }, (table) => ({ checkConstraint: check('some_check_name', sql`${table.age} > 10`), })), }; const { sqlStatements, statements } = await diffTestSchemasSqlite(from, to, []); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ columns: [ { autoincrement: false, generated: undefined, name: 'id', notNull: true, primaryKey: true, type: 'integer', }, { autoincrement: false, generated: undefined, name: 'age', notNull: false, primaryKey: false, type: 'integer', }, ], compositePKs: [], referenceData: [], tableName: 'users', type: 'recreate_table', uniqueConstraints: [], checkConstraints: [`some_check_name;"users"."age" > 10`], }); expect(sqlStatements.length).toBe(6); expect(sqlStatements[0]).toBe('PRAGMA foreign_keys=OFF;'); expect(sqlStatements[1]).toBe(`CREATE TABLE \`__new_users\` ( \t\`id\` integer PRIMARY KEY NOT NULL, \t\`age\` integer, \tCONSTRAINT "some_check_name" CHECK("__new_users"."age" > 10) );\n`); expect(sqlStatements[2]).toBe(`INSERT INTO \`__new_users\`("id", "age") SELECT "id", "age" FROM \`users\`;`); expect(sqlStatements[3]).toBe(`DROP TABLE \`users\`;`); expect(sqlStatements[4]).toBe(`ALTER TABLE \`__new_users\` RENAME TO \`users\`;`); expect(sqlStatements[5]).toBe(`PRAGMA foreign_keys=ON;`); }); test('create checks with same names', async (t) => { const to = { users: sqliteTable('users', { id: int('id').primaryKey(), age: int('age'), name: text('name'), }, (table) => ({ checkConstraint1: check('some_check_name', sql`${table.age} > 21`), checkConstraint2: check('some_check_name', sql`${table.name} != 'Alex'`), })), }; await expect(diffTestSchemasSqlite({}, to, [])).rejects.toThrowError(); }); ================================================ FILE: drizzle-kit/tests/sqlite-columns.test.ts ================================================ import { AnySQLiteColumn, foreignKey, index, int, integer, primaryKey, sqliteTable, text, } from 'drizzle-orm/sqlite-core'; import { JsonCreateIndexStatement, JsonRecreateTableStatement } from 'src/jsonStatements'; import { expect, test } from 'vitest'; import { diffTestSchemasSqlite } from './schemaDiffer'; test('create table with id', async (t) => { const schema = { users: sqliteTable('users', { id: int('id').primaryKey({ autoIncrement: true }), }), }; const { statements } = await diffTestSchemasSqlite({}, schema, []); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'sqlite_create_table', tableName: 'users', columns: [ { name: 'id', type: 'integer', primaryKey: true, notNull: true, autoincrement: true, }, ], uniqueConstraints: [], referenceData: [], compositePKs: [], checkConstraints: [], }); }); test('add columns #1', async (t) => { const schema1 = { users: sqliteTable('users', { id: int('id').primaryKey({ autoIncrement: true }), }), }; const schema2 = { users: sqliteTable('users', { id: int('id').primaryKey({ autoIncrement: true }), name: text('name').notNull(), }), }; const { statements } = await diffTestSchemasSqlite(schema1, schema2, []); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'sqlite_alter_table_add_column', tableName: 'users', referenceData: undefined, column: { name: 'name', type: 'text', primaryKey: false, notNull: true, autoincrement: false, }, }); }); test('add columns #2', async (t) => { const schema1 = { users: sqliteTable('users', { id: int('id').primaryKey({ autoIncrement: true }), }), }; const schema2 = { users: sqliteTable('users', { id: int('id').primaryKey({ autoIncrement: true }), name: text('name'), email: text('email'), }), }; const { statements } = await diffTestSchemasSqlite(schema1, schema2, []); expect(statements.length).toBe(2); expect(statements[0]).toStrictEqual({ type: 'sqlite_alter_table_add_column', tableName: 'users', referenceData: undefined, column: { name: 'name', type: 'text', primaryKey: false, notNull: false, autoincrement: false, // TODO: add column has autoincrement??? }, }); expect(statements[1]).toStrictEqual({ type: 'sqlite_alter_table_add_column', tableName: 'users', referenceData: undefined, column: { name: 'email', type: 'text', primaryKey: false, notNull: false, autoincrement: false, }, }); }); test('add columns #3', async (t) => { const schema1 = { users: sqliteTable('users', { id: int('id').primaryKey({ autoIncrement: true }), }), }; const schema2 = { users: sqliteTable('users', { id: int('id').primaryKey({ autoIncrement: true }), name1: text('name1').default('name'), name2: text('name2').notNull(), name3: text('name3').default('name').notNull(), }), }; const { statements } = await diffTestSchemasSqlite(schema1, schema2, []); expect(statements.length).toBe(3); expect(statements[0]).toStrictEqual({ type: 'sqlite_alter_table_add_column', tableName: 'users', referenceData: undefined, column: { name: 'name1', type: 'text', primaryKey: false, notNull: false, autoincrement: false, // TODO: add column has autoincrement??? default: "'name'", }, }); expect(statements[1]).toStrictEqual({ type: 'sqlite_alter_table_add_column', tableName: 'users', referenceData: undefined, column: { name: 'name2', type: 'text', primaryKey: false, notNull: true, autoincrement: false, // TODO: add column has autoincrement??? }, }); expect(statements[2]).toStrictEqual({ type: 'sqlite_alter_table_add_column', tableName: 'users', referenceData: undefined, column: { name: 'name3', type: 'text', primaryKey: false, notNull: true, autoincrement: false, // TODO: add column has autoincrement??? default: "'name'", }, }); }); test('add columns #4', async (t) => { const schema1 = { users: sqliteTable('users', { id: int('id').primaryKey({ autoIncrement: true }), }), }; const schema2 = { users: sqliteTable('users', { id: int('id').primaryKey({ autoIncrement: true }), name: text('name', { enum: ['one', 'two'] }), }), }; const { statements } = await diffTestSchemasSqlite(schema1, schema2, []); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'sqlite_alter_table_add_column', tableName: 'users', referenceData: undefined, column: { name: 'name', type: 'text', primaryKey: false, notNull: false, autoincrement: false, }, }); }); test('add columns #5', async (t) => { const schema1 = { users: sqliteTable('users', { id: int('id').primaryKey({ autoIncrement: true }), }), }; const users = sqliteTable('users', { id: int('id').primaryKey({ autoIncrement: true }), reporteeId: int('report_to').references((): AnySQLiteColumn => users.id), }); const schema2 = { users, }; const { statements } = await diffTestSchemasSqlite(schema1, schema2, []); // TODO: Fix here expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'sqlite_alter_table_add_column', tableName: 'users', referenceData: 'users_report_to_users_id_fk;users;report_to;users;id;no action;no action', column: { name: 'report_to', type: 'integer', primaryKey: false, notNull: false, autoincrement: false, }, }); }); test('add columns #6', async (t) => { const schema1 = { users: sqliteTable('users', { id: integer('id').primaryKey({ autoIncrement: true }), name: text('name'), email: text('email').unique().notNull(), }), }; const schema2 = { users: sqliteTable('users', { id: integer('id').primaryKey({ autoIncrement: true }), name: text('name'), email: text('email').unique().notNull(), password: text('password').notNull(), }), }; const { statements } = await diffTestSchemasSqlite(schema1, schema2, []); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'sqlite_alter_table_add_column', tableName: 'users', referenceData: undefined, column: { name: 'password', type: 'text', primaryKey: false, notNull: true, autoincrement: false, }, }); }); test('add index #1', async (t) => { const schema1 = { users: sqliteTable('users', { id: int('id').primaryKey({ autoIncrement: true }), reporteeId: int('report_to').references((): AnySQLiteColumn => users.id), }), }; const users = sqliteTable( 'users', { id: int('id').primaryKey({ autoIncrement: true }), reporteeId: int('report_to').references((): AnySQLiteColumn => users.id), }, (t) => { return { reporteeIdx: index('reportee_idx').on(t.reporteeId), }; }, ); const schema2 = { users, }; const { statements } = await diffTestSchemasSqlite(schema1, schema2, []); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'create_index', tableName: 'users', internal: { indexes: {}, }, schema: '', data: 'reportee_idx;report_to;false;', }); }); test('add foreign key #1', async (t) => { const schema1 = { users: sqliteTable('users', { id: int('id').primaryKey({ autoIncrement: true }), reporteeId: int('report_to'), }), }; const users = sqliteTable('users', { id: int('id').primaryKey({ autoIncrement: true }), reporteeId: int('report_to').references((): AnySQLiteColumn => users.id), }); const schema2 = { users, }; const { statements } = await diffTestSchemasSqlite(schema1, schema2, []); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual( { type: 'recreate_table', columns: [{ autoincrement: true, generated: undefined, name: 'id', notNull: true, primaryKey: true, type: 'integer', }, { autoincrement: false, generated: undefined, name: 'report_to', notNull: false, primaryKey: false, type: 'integer', }], compositePKs: [], referenceData: [{ columnsFrom: ['report_to'], columnsTo: ['id'], name: 'users_report_to_users_id_fk', tableFrom: 'users', tableTo: 'users', onDelete: 'no action', onUpdate: 'no action', }], tableName: 'users', uniqueConstraints: [], checkConstraints: [], } as JsonRecreateTableStatement, ); }); test('add foreign key #2', async (t) => { const schema1 = { users: sqliteTable('users', { id: int('id').primaryKey({ autoIncrement: true }), reporteeId: int('report_to'), }), }; const schema2 = { users: sqliteTable( 'users', { id: int('id').primaryKey({ autoIncrement: true }), reporteeId: int('report_to'), }, (t) => { return { reporteeFk: foreignKey({ columns: [t.reporteeId], foreignColumns: [t.id], name: 'reportee_fk', }), }; }, ), }; const { statements } = await diffTestSchemasSqlite(schema1, schema2, []); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'recreate_table', columns: [{ autoincrement: true, generated: undefined, name: 'id', notNull: true, primaryKey: true, type: 'integer', }, { autoincrement: false, generated: undefined, name: 'report_to', notNull: false, primaryKey: false, type: 'integer', }], compositePKs: [], referenceData: [{ columnsFrom: ['report_to'], columnsTo: ['id'], name: 'reportee_fk', tableFrom: 'users', tableTo: 'users', onDelete: 'no action', onUpdate: 'no action', }], tableName: 'users', uniqueConstraints: [], checkConstraints: [], } as JsonRecreateTableStatement); }); test('alter column change name #1', async (t) => { const schema1 = { users: sqliteTable('users', { id: int('id').primaryKey({ autoIncrement: true }), name: text('name'), }), }; const schema2 = { users: sqliteTable('users', { id: int('id').primaryKey({ autoIncrement: true }), name: text('name1'), }), }; const { statements } = await diffTestSchemasSqlite(schema1, schema2, [ 'public.users.name->public.users.name1', ]); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'alter_table_rename_column', tableName: 'users', schema: '', oldColumnName: 'name', newColumnName: 'name1', }); }); test('alter column change name #2', async (t) => { const schema1 = { users: sqliteTable('users', { id: int('id').primaryKey({ autoIncrement: true }), name: text('name'), }), }; const schema2 = { users: sqliteTable('users', { id: int('id').primaryKey({ autoIncrement: true }), name: text('name1'), email: text('email'), }), }; const { statements } = await diffTestSchemasSqlite(schema1, schema2, [ 'public.users.name->public.users.name1', ]); expect(statements.length).toBe(2); expect(statements[0]).toStrictEqual({ type: 'alter_table_rename_column', tableName: 'users', schema: '', oldColumnName: 'name', newColumnName: 'name1', }); expect(statements[1]).toStrictEqual({ type: 'sqlite_alter_table_add_column', tableName: 'users', referenceData: undefined, column: { name: 'email', notNull: false, primaryKey: false, type: 'text', autoincrement: false, }, }); }); test('alter column change name #3', async (t) => { const schema1 = { users: sqliteTable('users', { id: int('id').primaryKey({ autoIncrement: true }), name: text('name'), email: text('email'), }), }; const schema2 = { users: sqliteTable('users', { id: int('id').primaryKey({ autoIncrement: true }), name: text('name1'), }), }; const { statements } = await diffTestSchemasSqlite(schema1, schema2, [ 'public.users.name->public.users.name1', ]); expect(statements.length).toBe(2); expect(statements[0]).toStrictEqual({ type: 'alter_table_rename_column', tableName: 'users', schema: '', oldColumnName: 'name', newColumnName: 'name1', }); expect(statements[1]).toStrictEqual({ type: 'alter_table_drop_column', tableName: 'users', schema: '', columnName: 'email', }); }); test('alter table add composite pk', async (t) => { const schema1 = { table: sqliteTable('table', { id1: integer('id1'), id2: integer('id2'), }), }; const schema2 = { table: sqliteTable( 'table', { id1: integer('id1'), id2: integer('id2'), }, (t) => { return { pk: primaryKey({ columns: [t.id1, t.id2] }), }; }, ), }; const { statements } = await diffTestSchemasSqlite(schema1, schema2, []); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'recreate_table', columns: [{ autoincrement: false, generated: undefined, name: 'id1', notNull: false, primaryKey: false, type: 'integer', }, { autoincrement: false, generated: undefined, name: 'id2', notNull: false, primaryKey: false, type: 'integer', }], compositePKs: [['id1', 'id2']], referenceData: [], tableName: 'table', uniqueConstraints: [], checkConstraints: [], }); }); test('alter column drop not null', async (t) => { const from = { users: sqliteTable('table', { name: text('name').notNull(), }), }; const to = { users: sqliteTable('table', { name: text('name'), }), }; const { statements, sqlStatements } = await diffTestSchemasSqlite( from, to, [], ); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'recreate_table', columns: [{ autoincrement: false, generated: undefined, name: 'name', notNull: false, primaryKey: false, type: 'text', }], compositePKs: [], referenceData: [], tableName: 'table', uniqueConstraints: [], checkConstraints: [], }); }); test('alter column add not null', async (t) => { const from = { users: sqliteTable('table', { name: text('name'), }), }; const to = { users: sqliteTable('table', { name: text('name').notNull(), }), }; const { statements, sqlStatements } = await diffTestSchemasSqlite( from, to, [], ); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'recreate_table', columns: [{ autoincrement: false, generated: undefined, name: 'name', notNull: true, primaryKey: false, type: 'text', }], compositePKs: [], referenceData: [], tableName: 'table', uniqueConstraints: [], checkConstraints: [], }); }); test('alter column add default', async (t) => { const from = { users: sqliteTable('table', { name: text('name'), }), }; const to = { users: sqliteTable('table', { name: text('name').default('dan'), }), }; const { statements, sqlStatements } = await diffTestSchemasSqlite( from, to, [], ); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'recreate_table', columns: [{ autoincrement: false, generated: undefined, name: 'name', notNull: false, primaryKey: false, type: 'text', default: "'dan'", }], compositePKs: [], referenceData: [], tableName: 'table', uniqueConstraints: [], checkConstraints: [], }); }); test('alter column drop default', async (t) => { const from = { users: sqliteTable('table', { name: text('name').default('dan'), }), }; const to = { users: sqliteTable('table', { name: text('name'), }), }; const { statements, sqlStatements } = await diffTestSchemasSqlite( from, to, [], ); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'recreate_table', columns: [{ autoincrement: false, generated: undefined, name: 'name', notNull: false, primaryKey: false, type: 'text', }], compositePKs: [], referenceData: [], tableName: 'table', uniqueConstraints: [], checkConstraints: [], }); }); test('alter column add default not null', async (t) => { const from = { users: sqliteTable('table', { name: text('name'), }), }; const to = { users: sqliteTable('table', { name: text('name').notNull().default('dan'), }), }; const { statements, sqlStatements } = await diffTestSchemasSqlite( from, to, [], ); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'recreate_table', columns: [{ autoincrement: false, generated: undefined, name: 'name', notNull: true, primaryKey: false, type: 'text', default: "'dan'", }], compositePKs: [], referenceData: [], tableName: 'table', uniqueConstraints: [], checkConstraints: [], }); }); test('alter column add default not null with indexes', async (t) => { const from = { users: sqliteTable('table', { name: text('name'), }, (table) => ({ someIndex: index('index_name').on(table.name), })), }; const to = { users: sqliteTable('table', { name: text('name').notNull().default('dan'), }, (table) => ({ someIndex: index('index_name').on(table.name), })), }; const { statements, sqlStatements } = await diffTestSchemasSqlite( from, to, [], ); expect(statements.length).toBe(2); expect(statements[0]).toStrictEqual({ type: 'recreate_table', columns: [{ autoincrement: false, generated: undefined, name: 'name', notNull: true, primaryKey: false, type: 'text', default: "'dan'", }], compositePKs: [], referenceData: [], tableName: 'table', uniqueConstraints: [], checkConstraints: [], }); expect(statements[1]).toStrictEqual({ data: 'index_name;name;false;', schema: '', tableName: 'table', type: 'create_index', internal: undefined, }); expect(sqlStatements.length).toBe(7); expect(sqlStatements[0]).toBe(`PRAGMA foreign_keys=OFF;`); expect(sqlStatements[1]).toBe(`CREATE TABLE \`__new_table\` ( \t\`name\` text DEFAULT 'dan' NOT NULL );\n`); expect(sqlStatements[2]).toBe( `INSERT INTO \`__new_table\`("name") SELECT "name" FROM \`table\`;`, ); expect(sqlStatements[3]).toBe(`DROP TABLE \`table\`;`); expect(sqlStatements[4]).toBe(`ALTER TABLE \`__new_table\` RENAME TO \`table\`;`); expect(sqlStatements[5]).toBe(`PRAGMA foreign_keys=ON;`); expect(sqlStatements[6]).toBe(`CREATE INDEX \`index_name\` ON \`table\` (\`name\`);`); }); test('alter column drop default not null', async (t) => { const from = { users: sqliteTable('table', { name: text('name').notNull().default('dan'), }), }; const to = { users: sqliteTable('table', { name: text('name'), }), }; const { statements, sqlStatements } = await diffTestSchemasSqlite( from, to, [], ); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'recreate_table', columns: [{ autoincrement: false, generated: undefined, name: 'name', notNull: false, primaryKey: false, type: 'text', }], compositePKs: [], referenceData: [], tableName: 'table', uniqueConstraints: [], checkConstraints: [], }); expect(sqlStatements.length).toBe(6); expect(sqlStatements[0]).toBe(`PRAGMA foreign_keys=OFF;`); expect(sqlStatements[1]).toBe(`CREATE TABLE \`__new_table\` ( \t\`name\` text );\n`); expect(sqlStatements[2]).toBe( `INSERT INTO \`__new_table\`("name") SELECT "name" FROM \`table\`;`, ); expect(sqlStatements[3]).toBe(`DROP TABLE \`table\`;`); expect(sqlStatements[4]).toBe(`ALTER TABLE \`__new_table\` RENAME TO \`table\`;`); expect(sqlStatements[5]).toBe(`PRAGMA foreign_keys=ON;`); }); test('alter column drop generated', async (t) => { const from = { users: sqliteTable('table', { id: int('id').primaryKey().notNull(), name: text('name').generatedAlwaysAs('drizzle is the best').notNull(), }), }; const to = { users: sqliteTable('table', { id: int('id').primaryKey().notNull(), name: text('name').notNull(), }), }; const { statements, sqlStatements } = await diffTestSchemasSqlite( from, to, [], ); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ columnAutoIncrement: false, columnDefault: undefined, columnGenerated: undefined, columnName: 'name', columnNotNull: true, columnOnUpdate: undefined, columnPk: false, newDataType: 'text', schema: '', tableName: 'table', type: 'alter_table_alter_column_drop_generated', }); expect(sqlStatements.length).toBe(2); expect(sqlStatements[0]).toBe(`ALTER TABLE \`table\` DROP COLUMN \`name\`;`); expect(sqlStatements[1]).toBe(`ALTER TABLE \`table\` ADD \`name\` text NOT NULL;`); }); test('recreate table with nested references', async (t) => { let users = sqliteTable('users', { id: int('id').primaryKey({ autoIncrement: true }), name: text('name'), age: integer('age'), }); let subscriptions = sqliteTable('subscriptions', { id: int('id').primaryKey({ autoIncrement: true }), userId: integer('user_id').references(() => users.id), customerId: text('customer_id'), }); const schema1 = { users: users, subscriptions: subscriptions, subscriptionMetadata: sqliteTable('subscriptions_metadata', { id: int('id').primaryKey({ autoIncrement: true }), subscriptionId: text('subscription_id').references(() => subscriptions.id), }), }; users = sqliteTable('users', { id: int('id').primaryKey({ autoIncrement: false }), name: text('name'), age: integer('age'), }); const schema2 = { users: users, subscriptions: subscriptions, subscriptionMetadata: sqliteTable('subscriptions_metadata', { id: int('id').primaryKey({ autoIncrement: true }), subscriptionId: text('subscription_id').references(() => subscriptions.id), }), }; const { statements, sqlStatements } = await diffTestSchemasSqlite( schema1, schema2, [], ); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ columns: [ { autoincrement: false, generated: undefined, name: 'id', notNull: true, primaryKey: true, type: 'integer', }, { autoincrement: false, generated: undefined, name: 'name', notNull: false, primaryKey: false, type: 'text', }, { autoincrement: false, generated: undefined, name: 'age', notNull: false, primaryKey: false, type: 'integer', }, ], compositePKs: [], referenceData: [], tableName: 'users', type: 'recreate_table', uniqueConstraints: [], checkConstraints: [], }); expect(sqlStatements.length).toBe(6); expect(sqlStatements[0]).toBe(`PRAGMA foreign_keys=OFF;`); expect(sqlStatements[1]).toBe(`CREATE TABLE \`__new_users\` ( \t\`id\` integer PRIMARY KEY NOT NULL, \t\`name\` text, \t\`age\` integer );\n`); expect(sqlStatements[2]).toBe( `INSERT INTO \`__new_users\`("id", "name", "age") SELECT "id", "name", "age" FROM \`users\`;`, ); expect(sqlStatements[3]).toBe(`DROP TABLE \`users\`;`); expect(sqlStatements[4]).toBe(`ALTER TABLE \`__new_users\` RENAME TO \`users\`;`); expect(sqlStatements[5]).toBe(`PRAGMA foreign_keys=ON;`); }); test('text default values escape single quotes', async (t) => { const schema1 = { table: sqliteTable('table', { id: integer('id').primaryKey(), }), }; const schem2 = { table: sqliteTable('table', { id: integer('id').primaryKey(), text: text('text').default("escape's quotes"), }), }; const { sqlStatements } = await diffTestSchemasSqlite(schema1, schem2, []); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toStrictEqual( "ALTER TABLE `table` ADD `text` text DEFAULT 'escape''s quotes';", ); }); ================================================ FILE: drizzle-kit/tests/sqlite-generated.test.ts ================================================ // 1. add stored column to existing table - not supported + // 2. add virtual column to existing table - supported + // 3. create table with stored/virtual columns(pg, mysql, sqlite) // 4. add stored generated to column -> not supported + // 5. add virtual generated to column -> supported with drop+add column + // 6. drop stored/virtual expression -> supported with drop+add column // 7. alter generated expession -> stored not supported, virtual supported import { SQL, sql } from 'drizzle-orm'; import { int, sqliteTable, text } from 'drizzle-orm/sqlite-core'; import { expect, test } from 'vitest'; import { diffTestSchemasSqlite } from './schemaDiffer'; // should generate 0 statements + warning/error in console test('generated as callback: add column with stored generated constraint', async () => { const from = { users: sqliteTable('users', { id: int('id'), id2: int('id2'), name: text('name'), }), }; const to = { users: sqliteTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( (): SQL => sql`${to.users.name} || 'hello'`, { mode: 'stored' }, ), }), }; const { statements, sqlStatements } = await diffTestSchemasSqlite( from, to, [], ); expect(statements).toStrictEqual([]); expect(sqlStatements).toStrictEqual([]); }); test('generated as callback: add column with virtual generated constraint', async () => { const from = { users: sqliteTable('users', { id: int('id'), id2: int('id2'), name: text('name'), }), }; const to = { users: sqliteTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( (): SQL => sql`${to.users.name} || 'hello'`, { mode: 'virtual' }, ), }), }; const { statements, sqlStatements } = await diffTestSchemasSqlite( from, to, [], ); expect(statements).toStrictEqual([ { column: { generated: { as: '("name" || \'hello\')', type: 'virtual', }, autoincrement: false, name: 'gen_name', notNull: false, primaryKey: false, type: 'text', }, referenceData: undefined, tableName: 'users', type: 'sqlite_alter_table_add_column', }, ]); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS ("name" || \'hello\') VIRTUAL;', ]); }); test('generated as callback: add generated constraint to an exisiting column as stored', async () => { const from = { users: sqliteTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').notNull(), }), }; const to = { users: sqliteTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name') .notNull() .generatedAlwaysAs((): SQL => sql`${from.users.name} || 'to add'`, { mode: 'stored', }), }), }; const { statements, sqlStatements } = await diffTestSchemasSqlite( from, to, [], ); expect(statements).toStrictEqual([]); expect(sqlStatements).toStrictEqual([]); }); test('generated as callback: add generated constraint to an exisiting column as virtual', async () => { const from = { users: sqliteTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').notNull(), }), }; const to = { users: sqliteTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name') .notNull() .generatedAlwaysAs((): SQL => sql`${from.users.name} || 'to add'`, { mode: 'virtual', }), }), }; const { statements, sqlStatements } = await diffTestSchemasSqlite( from, to, [], ); expect(statements).toStrictEqual([ { columnAutoIncrement: false, columnDefault: undefined, columnGenerated: { as: '("name" || \'to add\')', type: 'virtual', }, columnName: 'gen_name', columnNotNull: true, columnOnUpdate: undefined, columnPk: false, newDataType: 'text', schema: '', tableName: 'users', type: 'alter_table_alter_column_set_generated', }, ]); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE `users` DROP COLUMN `gen_name`;', 'ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS ("name" || \'to add\') VIRTUAL NOT NULL;', ]); }); test('generated as callback: drop generated constraint as stored', async () => { const from = { users: sqliteTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( (): SQL => sql`${from.users.name} || 'to delete'`, { mode: 'stored' }, ), }), }; const to = { users: sqliteTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName1: text('gen_name'), }), }; const { statements, sqlStatements } = await diffTestSchemasSqlite( from, to, [], ); expect(statements).toStrictEqual([ { columnAutoIncrement: false, columnDefault: undefined, columnGenerated: undefined, columnName: 'gen_name', columnNotNull: false, columnOnUpdate: undefined, columnPk: false, newDataType: 'text', schema: '', tableName: 'users', type: 'alter_table_alter_column_drop_generated', }, ]); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE `users` DROP COLUMN `gen_name`;', 'ALTER TABLE `users` ADD `gen_name` text;', ]); }); test('generated as callback: drop generated constraint as virtual', async () => { const from = { users: sqliteTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( (): SQL => sql`${from.users.name} || 'to delete'`, { mode: 'virtual' }, ), }), }; const to = { users: sqliteTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName1: text('gen_name'), }), }; const { statements, sqlStatements } = await diffTestSchemasSqlite( from, to, [], ); expect(statements).toStrictEqual([ { columnAutoIncrement: false, columnDefault: undefined, columnGenerated: undefined, columnName: 'gen_name', columnNotNull: false, columnOnUpdate: undefined, columnPk: false, newDataType: 'text', schema: '', tableName: 'users', type: 'alter_table_alter_column_drop_generated', }, ]); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE `users` DROP COLUMN `gen_name`;', 'ALTER TABLE `users` ADD `gen_name` text;', ]); }); // no way to do it test('generated as callback: change generated constraint type from virtual to stored', async () => { const from = { users: sqliteTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( (): SQL => sql`${from.users.name}`, { mode: 'virtual' }, ), }), }; const to = { users: sqliteTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( (): SQL => sql`${to.users.name} || 'hello'`, { mode: 'stored' }, ), }), }; const { statements, sqlStatements } = await diffTestSchemasSqlite( from, to, [], ); expect(statements).toStrictEqual([]); expect(sqlStatements).toStrictEqual([]); }); test('generated as callback: change generated constraint type from stored to virtual', async () => { const from = { users: sqliteTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( (): SQL => sql`${from.users.name}`, { mode: 'stored' }, ), }), }; const to = { users: sqliteTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( (): SQL => sql`${to.users.name} || 'hello'`, { mode: 'virtual' }, ), }), }; const { statements, sqlStatements } = await diffTestSchemasSqlite( from, to, [], ); expect(statements).toStrictEqual([ { columnAutoIncrement: false, columnDefault: undefined, columnGenerated: { as: '("name" || \'hello\')', type: 'virtual', }, columnName: 'gen_name', columnNotNull: false, columnOnUpdate: undefined, columnPk: false, newDataType: 'text', schema: '', tableName: 'users', type: 'alter_table_alter_column_alter_generated', }, ]); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE `users` DROP COLUMN `gen_name`;', 'ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS ("name" || \'hello\') VIRTUAL;', ]); }); // not supported test('generated as callback: change stored generated constraint', async () => { const from = { users: sqliteTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( (): SQL => sql`${from.users.name}`, { mode: 'stored' }, ), }), }; const to = { users: sqliteTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( (): SQL => sql`${to.users.name} || 'hello'`, { mode: 'stored' }, ), }), }; const { statements, sqlStatements } = await diffTestSchemasSqlite( from, to, [], ); expect(statements).toStrictEqual([]); expect(sqlStatements).toStrictEqual([]); }); test('generated as callback: change virtual generated constraint', async () => { const from = { users: sqliteTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( (): SQL => sql`${from.users.name}`, ), }), }; const to = { users: sqliteTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( (): SQL => sql`${to.users.name} || 'hello'`, ), }), }; const { statements, sqlStatements } = await diffTestSchemasSqlite( from, to, [], ); expect(statements).toStrictEqual([ { columnAutoIncrement: false, columnDefault: undefined, columnGenerated: { as: '("name" || \'hello\')', type: 'virtual', }, columnName: 'gen_name', columnNotNull: false, columnOnUpdate: undefined, columnPk: false, newDataType: 'text', schema: '', tableName: 'users', type: 'alter_table_alter_column_alter_generated', }, ]); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE `users` DROP COLUMN `gen_name`;', 'ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS ("name" || \'hello\') VIRTUAL;', ]); }); test('generated as callback: add table with column with stored generated constraint', async () => { const from = {}; const to = { users: sqliteTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( (): SQL => sql`${to.users.name} || 'hello'`, { mode: 'stored' }, ), }), }; const { statements, sqlStatements } = await diffTestSchemasSqlite( from, to, [], ); expect(statements).toStrictEqual([ { columns: [ { autoincrement: false, name: 'id', notNull: false, primaryKey: false, type: 'integer', }, { autoincrement: false, name: 'id2', notNull: false, primaryKey: false, type: 'integer', }, { autoincrement: false, name: 'name', notNull: false, primaryKey: false, type: 'text', }, { autoincrement: false, generated: { as: '("name" || \'hello\')', type: 'stored', }, name: 'gen_name', notNull: false, primaryKey: false, type: 'text', }, ], compositePKs: [], referenceData: [], tableName: 'users', type: 'sqlite_create_table', uniqueConstraints: [], checkConstraints: [], }, ]); expect(sqlStatements).toStrictEqual([ 'CREATE TABLE `users` (\n\t`id` integer,\n\t`id2` integer,\n\t`name` text,\n\t`gen_name` text GENERATED ALWAYS AS ("name" || \'hello\') STORED\n);\n', ]); }); test('generated as callback: add table with column with virtual generated constraint', async () => { const from = {}; const to = { users: sqliteTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( (): SQL => sql`${to.users.name} || 'hello'`, { mode: 'virtual' }, ), }), }; const { statements, sqlStatements } = await diffTestSchemasSqlite( from, to, [], ); expect(statements).toStrictEqual([ { columns: [ { autoincrement: false, name: 'id', notNull: false, primaryKey: false, type: 'integer', }, { autoincrement: false, name: 'id2', notNull: false, primaryKey: false, type: 'integer', }, { autoincrement: false, name: 'name', notNull: false, primaryKey: false, type: 'text', }, { autoincrement: false, generated: { as: '("name" || \'hello\')', type: 'virtual', }, name: 'gen_name', notNull: false, primaryKey: false, type: 'text', }, ], compositePKs: [], referenceData: [], tableName: 'users', type: 'sqlite_create_table', uniqueConstraints: [], checkConstraints: [], }, ]); expect(sqlStatements).toStrictEqual([ 'CREATE TABLE `users` (\n\t`id` integer,\n\t`id2` integer,\n\t`name` text,\n\t`gen_name` text GENERATED ALWAYS AS ("name" || \'hello\') VIRTUAL\n);\n', ]); }); // --- test('generated as sql: add column with stored generated constraint', async () => { const from = { users: sqliteTable('users', { id: int('id'), id2: int('id2'), name: text('name'), }), }; const to = { users: sqliteTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( sql`"users"."name" || \'hello\' || 'hello'`, { mode: 'stored' }, ), }), }; const { statements, sqlStatements } = await diffTestSchemasSqlite( from, to, [], ); expect(statements).toStrictEqual([]); expect(sqlStatements).toStrictEqual([]); }); test('generated as sql: add column with virtual generated constraint', async () => { const from = { users: sqliteTable('users', { id: int('id'), id2: int('id2'), name: text('name'), }), }; const to = { users: sqliteTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( sql`"users"."name" || \'hello\'`, { mode: 'virtual' }, ), }), }; const { statements, sqlStatements } = await diffTestSchemasSqlite( from, to, [], ); expect(statements).toStrictEqual([ { column: { generated: { as: '("users"."name" || \'hello\')', type: 'virtual', }, autoincrement: false, name: 'gen_name', notNull: false, primaryKey: false, type: 'text', }, referenceData: undefined, tableName: 'users', type: 'sqlite_alter_table_add_column', }, ]); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS ("users"."name" || \'hello\') VIRTUAL;', ]); }); test('generated as sql: add generated constraint to an exisiting column as stored', async () => { const from = { users: sqliteTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').notNull(), }), }; const to = { users: sqliteTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name') .notNull() .generatedAlwaysAs(sql`"users"."name" || 'to add'`, { mode: 'stored', }), }), }; const { statements, sqlStatements } = await diffTestSchemasSqlite( from, to, [], ); expect(statements).toStrictEqual([]); expect(sqlStatements).toStrictEqual([]); }); test('generated as sql: add generated constraint to an exisiting column as virtual', async () => { const from = { users: sqliteTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').notNull(), }), }; const to = { users: sqliteTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name') .notNull() .generatedAlwaysAs(sql`"users"."name" || 'to add'`, { mode: 'virtual', }), }), }; const { statements, sqlStatements } = await diffTestSchemasSqlite( from, to, [], ); expect(statements).toStrictEqual([ { columnAutoIncrement: false, columnDefault: undefined, columnGenerated: { as: '("users"."name" || \'to add\')', type: 'virtual', }, columnName: 'gen_name', columnNotNull: true, columnOnUpdate: undefined, columnPk: false, newDataType: 'text', schema: '', tableName: 'users', type: 'alter_table_alter_column_set_generated', }, ]); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE `users` DROP COLUMN `gen_name`;', 'ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS ("users"."name" || \'to add\') VIRTUAL NOT NULL;', ]); }); test('generated as sql: drop generated constraint as stored', async () => { const from = { users: sqliteTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( sql`"users"."name" || 'to delete'`, { mode: 'stored' }, ), }), }; const to = { users: sqliteTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName1: text('gen_name'), }), }; const { statements, sqlStatements } = await diffTestSchemasSqlite( from, to, [], ); expect(statements).toStrictEqual([ { columnAutoIncrement: false, columnDefault: undefined, columnGenerated: undefined, columnName: 'gen_name', columnNotNull: false, columnOnUpdate: undefined, columnPk: false, newDataType: 'text', schema: '', tableName: 'users', type: 'alter_table_alter_column_drop_generated', }, ]); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE `users` DROP COLUMN `gen_name`;', 'ALTER TABLE `users` ADD `gen_name` text;', ]); }); test('generated as sql: drop generated constraint as virtual', async () => { const from = { users: sqliteTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( sql`"users"."name" || 'to delete'`, { mode: 'virtual' }, ), }), }; const to = { users: sqliteTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName1: text('gen_name'), }), }; const { statements, sqlStatements } = await diffTestSchemasSqlite( from, to, [], ); expect(statements).toStrictEqual([ { columnAutoIncrement: false, columnDefault: undefined, columnGenerated: undefined, columnName: 'gen_name', columnNotNull: false, columnOnUpdate: undefined, columnPk: false, newDataType: 'text', schema: '', tableName: 'users', type: 'alter_table_alter_column_drop_generated', }, ]); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE `users` DROP COLUMN `gen_name`;', 'ALTER TABLE `users` ADD `gen_name` text;', ]); }); // no way to do it test('generated as sql: change generated constraint type from virtual to stored', async () => { const from = { users: sqliteTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs(sql`"users"."name"`, { mode: 'virtual', }), }), }; const to = { users: sqliteTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( sql`"users"."name" || 'hello'`, { mode: 'stored' }, ), }), }; const { statements, sqlStatements } = await diffTestSchemasSqlite( from, to, [], ); expect(statements).toStrictEqual([]); expect(sqlStatements).toStrictEqual([]); }); test('generated as sql: change generated constraint type from stored to virtual', async () => { const from = { users: sqliteTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs(sql`"users"."name"`, { mode: 'stored', }), }), }; const to = { users: sqliteTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( sql`"users"."name" || 'hello'`, { mode: 'virtual' }, ), }), }; const { statements, sqlStatements } = await diffTestSchemasSqlite( from, to, [], ); expect(statements).toStrictEqual([ { columnAutoIncrement: false, columnDefault: undefined, columnGenerated: { as: '("users"."name" || \'hello\')', type: 'virtual', }, columnName: 'gen_name', columnNotNull: false, columnOnUpdate: undefined, columnPk: false, newDataType: 'text', schema: '', tableName: 'users', type: 'alter_table_alter_column_alter_generated', }, ]); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE `users` DROP COLUMN `gen_name`;', 'ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS ("users"."name" || \'hello\') VIRTUAL;', ]); }); // not supported test('generated as sql: change stored generated constraint', async () => { const from = { users: sqliteTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs(sql`"users"."name"`, { mode: 'stored', }), }), }; const to = { users: sqliteTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( sql`"users"."name" || 'hello'`, { mode: 'stored' }, ), }), }; const { statements, sqlStatements } = await diffTestSchemasSqlite( from, to, [], ); expect(statements).toStrictEqual([]); expect(sqlStatements).toStrictEqual([]); }); test('generated as sql: change virtual generated constraint', async () => { const from = { users: sqliteTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs(sql`"users"."name"`), }), }; const to = { users: sqliteTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( sql`"users"."name" || 'hello'`, ), }), }; const { statements, sqlStatements } = await diffTestSchemasSqlite( from, to, [], ); expect(statements).toStrictEqual([ { columnAutoIncrement: false, columnDefault: undefined, columnGenerated: { as: '("users"."name" || \'hello\')', type: 'virtual', }, columnName: 'gen_name', columnNotNull: false, columnOnUpdate: undefined, columnPk: false, newDataType: 'text', schema: '', tableName: 'users', type: 'alter_table_alter_column_alter_generated', }, ]); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE `users` DROP COLUMN `gen_name`;', 'ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS ("users"."name" || \'hello\') VIRTUAL;', ]); }); test('generated as sql: add table with column with stored generated constraint', async () => { const from = {}; const to = { users: sqliteTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( sql`"users"."name" || 'hello'`, { mode: 'stored' }, ), }), }; const { statements, sqlStatements } = await diffTestSchemasSqlite( from, to, [], ); expect(statements).toStrictEqual([ { columns: [ { autoincrement: false, name: 'id', notNull: false, primaryKey: false, type: 'integer', }, { autoincrement: false, name: 'id2', notNull: false, primaryKey: false, type: 'integer', }, { autoincrement: false, name: 'name', notNull: false, primaryKey: false, type: 'text', }, { autoincrement: false, generated: { as: '("users"."name" || \'hello\')', type: 'stored', }, name: 'gen_name', notNull: false, primaryKey: false, type: 'text', }, ], compositePKs: [], referenceData: [], tableName: 'users', type: 'sqlite_create_table', uniqueConstraints: [], checkConstraints: [], }, ]); expect(sqlStatements).toStrictEqual([ 'CREATE TABLE `users` (\n\t`id` integer,\n\t`id2` integer,\n\t`name` text,\n\t`gen_name` text GENERATED ALWAYS AS ("users"."name" || \'hello\') STORED\n);\n', ]); }); test('generated as sql: add table with column with virtual generated constraint', async () => { const from = {}; const to = { users: sqliteTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( sql`"users"."name" || 'hello'`, { mode: 'virtual' }, ), }), }; const { statements, sqlStatements } = await diffTestSchemasSqlite( from, to, [], ); expect(statements).toStrictEqual([ { columns: [ { autoincrement: false, name: 'id', notNull: false, primaryKey: false, type: 'integer', }, { autoincrement: false, name: 'id2', notNull: false, primaryKey: false, type: 'integer', }, { autoincrement: false, name: 'name', notNull: false, primaryKey: false, type: 'text', }, { autoincrement: false, generated: { as: '("users"."name" || \'hello\')', type: 'virtual', }, name: 'gen_name', notNull: false, primaryKey: false, type: 'text', }, ], compositePKs: [], referenceData: [], tableName: 'users', type: 'sqlite_create_table', uniqueConstraints: [], checkConstraints: [], }, ]); expect(sqlStatements).toStrictEqual([ 'CREATE TABLE `users` (\n\t`id` integer,\n\t`id2` integer,\n\t`name` text,\n\t`gen_name` text GENERATED ALWAYS AS ("users"."name" || \'hello\') VIRTUAL\n);\n', ]); }); // --- test('generated as string: add column with stored generated constraint', async () => { const from = { users: sqliteTable('users', { id: int('id'), id2: int('id2'), name: text('name'), }), }; const to = { users: sqliteTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( `"users"."name" || \'hello\'`, { mode: 'stored' }, ), }), }; const { statements, sqlStatements } = await diffTestSchemasSqlite( from, to, [], ); expect(statements).toStrictEqual([]); expect(sqlStatements).toStrictEqual([]); }); test('generated as string: add column with virtual generated constraint', async () => { const from = { users: sqliteTable('users', { id: int('id'), id2: int('id2'), name: text('name'), }), }; const to = { users: sqliteTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( `"users"."name" || \'hello\'`, { mode: 'virtual' }, ), }), }; const { statements, sqlStatements } = await diffTestSchemasSqlite( from, to, [], ); expect(statements).toStrictEqual([ { column: { generated: { as: '("users"."name" || \'hello\')', type: 'virtual', }, autoincrement: false, name: 'gen_name', notNull: false, primaryKey: false, type: 'text', }, referenceData: undefined, tableName: 'users', type: 'sqlite_alter_table_add_column', }, ]); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS ("users"."name" || \'hello\') VIRTUAL;', ]); }); test('generated as string: add generated constraint to an exisiting column as stored', async () => { const from = { users: sqliteTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').notNull(), }), }; const to = { users: sqliteTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name') .notNull() .generatedAlwaysAs(`"users"."name" || 'to add'`, { mode: 'stored', }), }), }; const { statements, sqlStatements } = await diffTestSchemasSqlite( from, to, [], ); expect(statements).toStrictEqual([]); expect(sqlStatements).toStrictEqual([]); }); test('generated as string: add generated constraint to an exisiting column as virtual', async () => { const from = { users: sqliteTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').notNull(), }), }; const to = { users: sqliteTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name') .notNull() .generatedAlwaysAs(`"users"."name" || 'to add'`, { mode: 'virtual', }), }), }; const { statements, sqlStatements } = await diffTestSchemasSqlite( from, to, [], ); expect(statements).toStrictEqual([ { columnAutoIncrement: false, columnDefault: undefined, columnGenerated: { as: '("users"."name" || \'to add\')', type: 'virtual', }, columnName: 'gen_name', columnNotNull: true, columnOnUpdate: undefined, columnPk: false, newDataType: 'text', schema: '', tableName: 'users', type: 'alter_table_alter_column_set_generated', }, ]); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE `users` DROP COLUMN `gen_name`;', 'ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS ("users"."name" || \'to add\') VIRTUAL NOT NULL;', ]); }); test('generated as string: drop generated constraint as stored', async () => { const from = { users: sqliteTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( `"users"."name" || 'to delete'`, { mode: 'stored' }, ), }), }; const to = { users: sqliteTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName1: text('gen_name'), }), }; const { statements, sqlStatements } = await diffTestSchemasSqlite( from, to, [], ); expect(statements).toStrictEqual([ { columnAutoIncrement: false, columnDefault: undefined, columnGenerated: undefined, columnName: 'gen_name', columnNotNull: false, columnOnUpdate: undefined, columnPk: false, newDataType: 'text', schema: '', tableName: 'users', type: 'alter_table_alter_column_drop_generated', }, ]); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE `users` DROP COLUMN `gen_name`;', 'ALTER TABLE `users` ADD `gen_name` text;', ]); }); test('generated as string: drop generated constraint as virtual', async () => { const from = { users: sqliteTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( `"users"."name" || 'to delete'`, { mode: 'virtual' }, ), }), }; const to = { users: sqliteTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName1: text('gen_name'), }), }; const { statements, sqlStatements } = await diffTestSchemasSqlite( from, to, [], ); expect(statements).toStrictEqual([ { columnAutoIncrement: false, columnDefault: undefined, columnGenerated: undefined, columnName: 'gen_name', columnNotNull: false, columnOnUpdate: undefined, columnPk: false, newDataType: 'text', schema: '', tableName: 'users', type: 'alter_table_alter_column_drop_generated', }, ]); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE `users` DROP COLUMN `gen_name`;', 'ALTER TABLE `users` ADD `gen_name` text;', ]); }); // no way to do it test('generated as string: change generated constraint type from virtual to stored', async () => { const from = { users: sqliteTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs(`"users"."name"`, { mode: 'virtual', }), }), }; const to = { users: sqliteTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( `"users"."name" || 'hello'`, { mode: 'stored' }, ), }), }; const { statements, sqlStatements } = await diffTestSchemasSqlite( from, to, [], ); expect(statements).toStrictEqual([]); expect(sqlStatements).toStrictEqual([]); }); test('generated as string: change generated constraint type from stored to virtual', async () => { const from = { users: sqliteTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs(`"users"."name"`, { mode: 'stored', }), }), }; const to = { users: sqliteTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( `"users"."name" || 'hello'`, { mode: 'virtual' }, ), }), }; const { statements, sqlStatements } = await diffTestSchemasSqlite( from, to, [], ); expect(statements).toStrictEqual([ { columnAutoIncrement: false, columnDefault: undefined, columnGenerated: { as: '("users"."name" || \'hello\')', type: 'virtual', }, columnName: 'gen_name', columnNotNull: false, columnOnUpdate: undefined, columnPk: false, newDataType: 'text', schema: '', tableName: 'users', type: 'alter_table_alter_column_alter_generated', }, ]); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE `users` DROP COLUMN `gen_name`;', 'ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS ("users"."name" || \'hello\') VIRTUAL;', ]); }); // not supported test('generated as string: change stored generated constraint', async () => { const from = { users: sqliteTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs(`"users"."name"`, { mode: 'stored', }), }), }; const to = { users: sqliteTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( `"users"."name" || 'hello'`, { mode: 'stored' }, ), }), }; const { statements, sqlStatements } = await diffTestSchemasSqlite( from, to, [], ); expect(statements).toStrictEqual([]); expect(sqlStatements).toStrictEqual([]); }); test('generated as string: change virtual generated constraint', async () => { const from = { users: sqliteTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs(`"users"."name"`), }), }; const to = { users: sqliteTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( `"users"."name" || 'hello'`, ), }), }; const { statements, sqlStatements } = await diffTestSchemasSqlite( from, to, [], ); expect(statements).toStrictEqual([ { columnAutoIncrement: false, columnDefault: undefined, columnGenerated: { as: '("users"."name" || \'hello\')', type: 'virtual', }, columnName: 'gen_name', columnNotNull: false, columnOnUpdate: undefined, columnPk: false, newDataType: 'text', schema: '', tableName: 'users', type: 'alter_table_alter_column_alter_generated', }, ]); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE `users` DROP COLUMN `gen_name`;', 'ALTER TABLE `users` ADD `gen_name` text GENERATED ALWAYS AS ("users"."name" || \'hello\') VIRTUAL;', ]); }); test('generated as string: add table with column with stored generated constraint', async () => { const from = {}; const to = { users: sqliteTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( `"users"."name" || 'hello'`, { mode: 'stored' }, ), }), }; const { statements, sqlStatements } = await diffTestSchemasSqlite( from, to, [], ); expect(statements).toStrictEqual([ { columns: [ { autoincrement: false, name: 'id', notNull: false, primaryKey: false, type: 'integer', }, { autoincrement: false, name: 'id2', notNull: false, primaryKey: false, type: 'integer', }, { autoincrement: false, name: 'name', notNull: false, primaryKey: false, type: 'text', }, { autoincrement: false, generated: { as: '("users"."name" || \'hello\')', type: 'stored', }, name: 'gen_name', notNull: false, primaryKey: false, type: 'text', }, ], compositePKs: [], referenceData: [], tableName: 'users', type: 'sqlite_create_table', uniqueConstraints: [], checkConstraints: [], }, ]); expect(sqlStatements).toStrictEqual([ 'CREATE TABLE `users` (\n\t`id` integer,\n\t`id2` integer,\n\t`name` text,\n\t`gen_name` text GENERATED ALWAYS AS ("users"."name" || \'hello\') STORED\n);\n', ]); }); test('generated as string: add table with column with virtual generated constraint', async () => { const from = {}; const to = { users: sqliteTable('users', { id: int('id'), id2: int('id2'), name: text('name'), generatedName: text('gen_name').generatedAlwaysAs( `"users"."name" || 'hello'`, { mode: 'virtual' }, ), }), }; const { statements, sqlStatements } = await diffTestSchemasSqlite( from, to, [], ); expect(statements).toStrictEqual([ { columns: [ { autoincrement: false, name: 'id', notNull: false, primaryKey: false, type: 'integer', }, { autoincrement: false, name: 'id2', notNull: false, primaryKey: false, type: 'integer', }, { autoincrement: false, name: 'name', notNull: false, primaryKey: false, type: 'text', }, { autoincrement: false, generated: { as: '("users"."name" || \'hello\')', type: 'virtual', }, name: 'gen_name', notNull: false, primaryKey: false, type: 'text', }, ], compositePKs: [], referenceData: [], tableName: 'users', type: 'sqlite_create_table', uniqueConstraints: [], checkConstraints: [], }, ]); expect(sqlStatements).toStrictEqual([ 'CREATE TABLE `users` (\n\t`id` integer,\n\t`id2` integer,\n\t`name` text,\n\t`gen_name` text GENERATED ALWAYS AS ("users"."name" || \'hello\') VIRTUAL\n);\n', ]); }); ================================================ FILE: drizzle-kit/tests/sqlite-tables.test.ts ================================================ import { sql } from 'drizzle-orm'; import { AnySQLiteColumn, foreignKey, index, int, primaryKey, sqliteTable, text, unique, uniqueIndex, } from 'drizzle-orm/sqlite-core'; import { expect, test } from 'vitest'; import { diffTestSchemasSqlite } from './schemaDiffer'; test('add table #1', async () => { const to = { users: sqliteTable('users', {}), }; const { statements } = await diffTestSchemasSqlite({}, to, []); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'sqlite_create_table', tableName: 'users', columns: [], compositePKs: [], uniqueConstraints: [], referenceData: [], checkConstraints: [], }); }); test('add table #2', async () => { const to = { users: sqliteTable('users', { id: int('id').primaryKey({ autoIncrement: true }), }), }; const { statements } = await diffTestSchemasSqlite({}, to, []); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'sqlite_create_table', tableName: 'users', columns: [ { name: 'id', notNull: true, primaryKey: true, type: 'integer', autoincrement: true, }, ], compositePKs: [], referenceData: [], uniqueConstraints: [], checkConstraints: [], }); }); test('add table #3', async () => { const to = { users: sqliteTable( 'users', { id: int('id'), }, (t) => { return { pk: primaryKey({ name: 'users_pk', columns: [t.id], }), }; }, ), }; const { statements } = await diffTestSchemasSqlite({}, to, []); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'sqlite_create_table', tableName: 'users', columns: [ { name: 'id', notNull: false, primaryKey: true, type: 'integer', autoincrement: false, }, ], compositePKs: [], uniqueConstraints: [], referenceData: [], checkConstraints: [], }); }); test('add table #4', async () => { const to = { users: sqliteTable('users', {}), posts: sqliteTable('posts', {}), }; const { statements } = await diffTestSchemasSqlite({}, to, []); expect(statements.length).toBe(2); expect(statements[0]).toStrictEqual({ type: 'sqlite_create_table', tableName: 'users', columns: [], compositePKs: [], uniqueConstraints: [], referenceData: [], checkConstraints: [], }); expect(statements[1]).toStrictEqual({ type: 'sqlite_create_table', tableName: 'posts', columns: [], compositePKs: [], uniqueConstraints: [], referenceData: [], checkConstraints: [], }); }); test('add table #5', async () => { // no schemas in sqlite }); test('add table #6', async () => { const from = { users1: sqliteTable('users1', {}), }; const to = { users2: sqliteTable('users2', {}), }; const { statements } = await diffTestSchemasSqlite(from, to, []); expect(statements.length).toBe(2); expect(statements[0]).toStrictEqual({ type: 'sqlite_create_table', tableName: 'users2', columns: [], compositePKs: [], uniqueConstraints: [], referenceData: [], checkConstraints: [], }); expect(statements[1]).toStrictEqual({ type: 'drop_table', tableName: 'users1', schema: undefined, policies: [], }); }); test('add table #7', async () => { const from = { users1: sqliteTable('users1', {}), }; const to = { users: sqliteTable('users', {}), users2: sqliteTable('users2', {}), }; const { statements } = await diffTestSchemasSqlite(from, to, [ 'public.users1->public.users2', ]); expect(statements.length).toBe(2); expect(statements[0]).toStrictEqual({ type: 'rename_table', tableNameFrom: 'users1', tableNameTo: 'users2', fromSchema: undefined, toSchema: undefined, }); expect(statements[1]).toStrictEqual({ type: 'sqlite_create_table', tableName: 'users', columns: [], compositePKs: [], uniqueConstraints: [], referenceData: [], checkConstraints: [], }); }); test('add table #8', async () => { const users = sqliteTable('users', { id: int('id').primaryKey({ autoIncrement: true }), reporteeId: int('reportee_id').references((): AnySQLiteColumn => users.id), }); const to = { users, }; const { statements } = await diffTestSchemasSqlite({}, to, []); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'sqlite_create_table', tableName: 'users', columns: [ { autoincrement: true, name: 'id', notNull: true, primaryKey: true, type: 'integer', }, { autoincrement: false, name: 'reportee_id', notNull: false, primaryKey: false, type: 'integer', }, ], compositePKs: [], uniqueConstraints: [], checkConstraints: [], referenceData: [ { columnsFrom: ['reportee_id'], columnsTo: ['id'], name: 'users_reportee_id_users_id_fk', onDelete: 'no action', onUpdate: 'no action', tableFrom: 'users', tableTo: 'users', }, ], }); }); test('add table #9', async () => { const to = { users: sqliteTable( 'users', { id: int('id').primaryKey({ autoIncrement: true }), reporteeId: int('reportee_id'), }, (t) => { return { reporteeIdx: index('reportee_idx').on(t.reporteeId), }; }, ), }; const { statements } = await diffTestSchemasSqlite({}, to, []); expect(statements.length).toBe(2); expect(statements[0]).toStrictEqual({ type: 'sqlite_create_table', tableName: 'users', columns: [ { autoincrement: true, name: 'id', notNull: true, primaryKey: true, type: 'integer', }, { autoincrement: false, name: 'reportee_id', notNull: false, primaryKey: false, type: 'integer', }, ], compositePKs: [], uniqueConstraints: [], referenceData: [], checkConstraints: [], }); expect(statements[1]).toStrictEqual({ type: 'create_index', tableName: 'users', internal: { indexes: {}, }, schema: undefined, data: 'reportee_idx;reportee_id;false;', }); }); test('add table #10', async () => { const to = { users: sqliteTable('table', { json: text('json', { mode: 'json' }).default({}), }), }; const { sqlStatements } = await diffTestSchemasSqlite({}, to, []); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe( "CREATE TABLE `table` (\n\t`json` text DEFAULT '{}'\n);\n", ); }); test('add table #11', async () => { const to = { users: sqliteTable('table', { json: text('json', { mode: 'json' }).default([]), }), }; const { sqlStatements } = await diffTestSchemasSqlite({}, to, []); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe( "CREATE TABLE `table` (\n\t`json` text DEFAULT '[]'\n);\n", ); }); test('add table #12', async () => { const to = { users: sqliteTable('table', { json: text('json', { mode: 'json' }).default([1, 2, 3]), }), }; const { sqlStatements } = await diffTestSchemasSqlite({}, to, []); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe( "CREATE TABLE `table` (\n\t`json` text DEFAULT '[1,2,3]'\n);\n", ); }); test('add table #13', async () => { const to = { users: sqliteTable('table', { json: text('json', { mode: 'json' }).default({ key: 'value' }), }), }; const { sqlStatements } = await diffTestSchemasSqlite({}, to, []); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe( 'CREATE TABLE `table` (\n\t`json` text DEFAULT \'{"key":"value"}\'\n);\n', ); }); test('add table #14', async () => { const to = { users: sqliteTable('table', { json: text('json', { mode: 'json' }).default({ key: 'value', arr: [1, 2, 3], }), }), }; const { sqlStatements } = await diffTestSchemasSqlite({}, to, []); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe( 'CREATE TABLE `table` (\n\t`json` text DEFAULT \'{"key":"value","arr":[1,2,3]}\'\n);\n', ); }); test('add table with indexes', async () => { const from = {}; const to = { users: sqliteTable( 'users', { id: int('id').primaryKey(), name: text('name'), email: text('email'), }, (t) => ({ uniqueExpr: uniqueIndex('uniqueExpr').on(sql`(lower(${t.email}))`), indexExpr: index('indexExpr').on(sql`(lower(${t.email}))`), indexExprMultiple: index('indexExprMultiple').on( sql`(lower(${t.email}))`, sql`(lower(${t.email}))`, ), uniqueCol: uniqueIndex('uniqueCol').on(t.email), indexCol: index('indexCol').on(t.email), indexColMultiple: index('indexColMultiple').on(t.email, t.email), indexColExpr: index('indexColExpr').on( sql`(lower(${t.email}))`, t.email, ), }), ), }; const { sqlStatements } = await diffTestSchemasSqlite(from, to, []); expect(sqlStatements.length).toBe(8); expect(sqlStatements).toStrictEqual([ 'CREATE TABLE `users` (\n\t`id` integer PRIMARY KEY NOT NULL,\n\t`name` text,\n\t`email` text\n);\n', 'CREATE UNIQUE INDEX `uniqueExpr` ON `users` ((lower("email")));', 'CREATE INDEX `indexExpr` ON `users` ((lower("email")));', 'CREATE INDEX `indexExprMultiple` ON `users` ((lower("email")),(lower("email")));', 'CREATE UNIQUE INDEX `uniqueCol` ON `users` (`email`);', 'CREATE INDEX `indexCol` ON `users` (`email`);', 'CREATE INDEX `indexColMultiple` ON `users` (`email`,`email`);', 'CREATE INDEX `indexColExpr` ON `users` ((lower("email")),`email`);', ]); }); test('composite primary key', async () => { const from = {}; const to = { table: sqliteTable('works_to_creators', { workId: int('work_id').notNull(), creatorId: int('creator_id').notNull(), classification: text('classification').notNull(), }, (t) => ({ pk: primaryKey({ columns: [t.workId, t.creatorId, t.classification], }), })), }; const { sqlStatements } = await diffTestSchemasSqlite(from, to, []); expect(sqlStatements).toStrictEqual([ 'CREATE TABLE `works_to_creators` (\n\t`work_id` integer NOT NULL,\n\t`creator_id` integer NOT NULL,\n\t`classification` text NOT NULL,\n\tPRIMARY KEY(`work_id`, `creator_id`, `classification`)\n);\n', ]); }); test('add column before creating unique constraint', async () => { const from = { table: sqliteTable('table', { id: int('id').primaryKey(), }), }; const to = { table: sqliteTable('table', { id: int('id').primaryKey(), name: text('name').notNull(), }, (t) => ({ uq: unique('uq').on(t.name), })), }; const { sqlStatements } = await diffTestSchemasSqlite(from, to, []); expect(sqlStatements).toStrictEqual([ 'ALTER TABLE `table` ADD `name` text NOT NULL;', 'CREATE UNIQUE INDEX `uq` ON `table` (`name`);', ]); }); test('optional db aliases (snake case)', async () => { const from = {}; const t1 = sqliteTable( 't1', { t1Id1: int().notNull().primaryKey(), t1Col2: int().notNull(), t1Col3: int().notNull(), t2Ref: int().notNull().references(() => t2.t2Id), t1Uni: int().notNull(), t1UniIdx: int().notNull(), t1Idx: int().notNull(), }, (table) => ({ uni: unique('t1_uni').on(table.t1Uni), uniIdx: uniqueIndex('t1_uni_idx').on(table.t1UniIdx), idx: index('t1_idx').on(table.t1Idx), fk: foreignKey({ columns: [table.t1Col2, table.t1Col3], foreignColumns: [t3.t3Id1, t3.t3Id2], }), }), ); const t2 = sqliteTable( 't2', { t2Id: int().primaryKey({ autoIncrement: true }), }, ); const t3 = sqliteTable( 't3', { t3Id1: int(), t3Id2: int(), }, (table) => ({ pk: primaryKey({ columns: [table.t3Id1, table.t3Id2], }), }), ); const to = { t1, t2, t3, }; const { sqlStatements } = await diffTestSchemasSqlite(from, to, [], false, 'snake_case'); const st1 = `CREATE TABLE \`t1\` ( \`t1_id1\` integer PRIMARY KEY NOT NULL, \`t1_col2\` integer NOT NULL, \`t1_col3\` integer NOT NULL, \`t2_ref\` integer NOT NULL, \`t1_uni\` integer NOT NULL, \`t1_uni_idx\` integer NOT NULL, \`t1_idx\` integer NOT NULL, FOREIGN KEY (\`t2_ref\`) REFERENCES \`t2\`(\`t2_id\`) ON UPDATE no action ON DELETE no action, FOREIGN KEY (\`t1_col2\`,\`t1_col3\`) REFERENCES \`t3\`(\`t3_id1\`,\`t3_id2\`) ON UPDATE no action ON DELETE no action ); `; const st2 = `CREATE UNIQUE INDEX \`t1_uni_idx\` ON \`t1\` (\`t1_uni_idx\`);`; const st3 = `CREATE INDEX \`t1_idx\` ON \`t1\` (\`t1_idx\`);`; const st4 = `CREATE UNIQUE INDEX \`t1_uni\` ON \`t1\` (\`t1_uni\`);`; const st5 = `CREATE TABLE \`t2\` ( \`t2_id\` integer PRIMARY KEY AUTOINCREMENT NOT NULL ); `; const st6 = `CREATE TABLE \`t3\` ( \`t3_id1\` integer, \`t3_id2\` integer, PRIMARY KEY(\`t3_id1\`, \`t3_id2\`) ); `; expect(sqlStatements).toStrictEqual([st1, st2, st3, st4, st5, st6]); }); test('optional db aliases (camel case)', async () => { const from = {}; const t1 = sqliteTable( 't1', { t1_id1: int().notNull().primaryKey(), t1_col2: int().notNull(), t1_col3: int().notNull(), t2_ref: int().notNull().references(() => t2.t2_id), t1_uni: int().notNull(), t1_uni_idx: int().notNull(), t1_idx: int().notNull(), }, (table) => ({ uni: unique('t1Uni').on(table.t1_uni), uni_idx: uniqueIndex('t1UniIdx').on(table.t1_uni_idx), idx: index('t1Idx').on(table.t1_idx), fk: foreignKey({ columns: [table.t1_col2, table.t1_col3], foreignColumns: [t3.t3_id1, t3.t3_id2], }), }), ); const t2 = sqliteTable( 't2', { t2_id: int().primaryKey({ autoIncrement: true }), }, ); const t3 = sqliteTable( 't3', { t3_id1: int(), t3_id2: int(), }, (table) => ({ pk: primaryKey({ columns: [table.t3_id1, table.t3_id2], }), }), ); const to = { t1, t2, t3, }; const { sqlStatements } = await diffTestSchemasSqlite(from, to, [], false, 'camelCase'); const st1 = `CREATE TABLE \`t1\` ( \`t1Id1\` integer PRIMARY KEY NOT NULL, \`t1Col2\` integer NOT NULL, \`t1Col3\` integer NOT NULL, \`t2Ref\` integer NOT NULL, \`t1Uni\` integer NOT NULL, \`t1UniIdx\` integer NOT NULL, \`t1Idx\` integer NOT NULL, FOREIGN KEY (\`t2Ref\`) REFERENCES \`t2\`(\`t2Id\`) ON UPDATE no action ON DELETE no action, FOREIGN KEY (\`t1Col2\`,\`t1Col3\`) REFERENCES \`t3\`(\`t3Id1\`,\`t3Id2\`) ON UPDATE no action ON DELETE no action ); `; const st2 = `CREATE UNIQUE INDEX \`t1UniIdx\` ON \`t1\` (\`t1UniIdx\`);`; const st3 = `CREATE INDEX \`t1Idx\` ON \`t1\` (\`t1Idx\`);`; const st4 = `CREATE UNIQUE INDEX \`t1Uni\` ON \`t1\` (\`t1Uni\`);`; const st5 = `CREATE TABLE \`t2\` ( \`t2Id\` integer PRIMARY KEY AUTOINCREMENT NOT NULL ); `; const st6 = `CREATE TABLE \`t3\` ( \`t3Id1\` integer, \`t3Id2\` integer, PRIMARY KEY(\`t3Id1\`, \`t3Id2\`) ); `; expect(sqlStatements).toStrictEqual([st1, st2, st3, st4, st5, st6]); }); ================================================ FILE: drizzle-kit/tests/sqlite-views.test.ts ================================================ import { sql } from 'drizzle-orm'; import { int, sqliteTable, sqliteView } from 'drizzle-orm/sqlite-core'; import { expect, test } from 'vitest'; import { diffTestSchemasSqlite } from './schemaDiffer'; test('create view', async () => { const users = sqliteTable('users', { id: int('id').default(1) }); const view = sqliteView('view').as((qb) => qb.select().from(users)); const to = { users: users, testView: view, }; const { statements, sqlStatements } = await diffTestSchemasSqlite({}, to, []); expect(statements.length).toBe(2); expect(statements[0]).toStrictEqual({ type: 'sqlite_create_table', tableName: 'users', columns: [{ autoincrement: false, default: 1, name: 'id', type: 'integer', primaryKey: false, notNull: false, }], compositePKs: [], uniqueConstraints: [], referenceData: [], checkConstraints: [], }); expect(statements[1]).toStrictEqual({ type: 'sqlite_create_view', name: 'view', definition: 'select "id" from "users"', }); expect(sqlStatements.length).toBe(2); expect(sqlStatements[0]).toBe(`CREATE TABLE \`users\` ( \t\`id\` integer DEFAULT 1 );\n`); expect(sqlStatements[1]).toBe(`CREATE VIEW \`view\` AS select "id" from "users";`); }); test('drop view', async () => { const users = sqliteTable('users', { id: int('id').primaryKey({ autoIncrement: true }), }); const from = { users: users, testView: sqliteView('view', { id: int('id') }).as(sql`SELECT * FROM users`), }; const to = { users, }; const { statements, sqlStatements } = await diffTestSchemasSqlite(from, to, []); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ name: 'view', type: 'drop_view', }); expect(sqlStatements.length).toBe(1); expect(sqlStatements[0]).toBe( `DROP VIEW \`view\`;`, ); }); test('alter view', async () => { const users = sqliteTable('users', { id: int('id').primaryKey({ autoIncrement: true }), }); const from = { users: users, testView: sqliteView('view', { id: int('id') }).as(sql`SELECT * FROM users`), }; const to = { users, testView: sqliteView('view', { id: int('id') }).as(sql`SELECT * FROM users WHERE users.id = 1`), }; const { statements, sqlStatements } = await diffTestSchemasSqlite(from, to, []); expect(statements.length).toBe(2); expect(statements[0]).toStrictEqual({ name: 'view', type: 'drop_view', }); expect(statements[1]).toStrictEqual({ name: 'view', type: 'sqlite_create_view', definition: 'SELECT * FROM users WHERE users.id = 1', }); expect(sqlStatements.length).toBe(2); expect(sqlStatements[0]).toBe( `DROP VIEW \`view\`;`, ); expect(sqlStatements[1]).toBe( `CREATE VIEW \`view\` AS SELECT * FROM users WHERE users.id = 1;`, ); }); test('create view with existing flag', async () => { const view = sqliteView('view', {}).existing(); const to = { testView: view, }; const { statements, sqlStatements } = await diffTestSchemasSqlite({}, to, []); expect(statements.length).toBe(0); expect(sqlStatements.length).toBe(0); }); test('drop view with existing flag', async () => { const users = sqliteTable('users', { id: int('id').primaryKey({ autoIncrement: true }), }); const from = { users: users, testView: sqliteView('view', { id: int('id') }).existing(), }; const to = { users, }; const { statements, sqlStatements } = await diffTestSchemasSqlite(from, to, []); expect(statements.length).toBe(0); expect(sqlStatements.length).toBe(0); }); test('rename view with existing flag', async () => { const users = sqliteTable('users', { id: int('id').primaryKey({ autoIncrement: true }), }); const from = { users: users, testView: sqliteView('view', { id: int('id') }).existing(), }; const to = { users, testView: sqliteView('new_view', { id: int('id') }).existing(), }; const { statements, sqlStatements } = await diffTestSchemasSqlite(from, to, ['view->new_view']); expect(statements.length).toBe(0); expect(sqlStatements.length).toBe(0); }); test('rename view and drop existing flag', async () => { const users = sqliteTable('users', { id: int('id').primaryKey({ autoIncrement: true }), }); const from = { users: users, testView: sqliteView('view', { id: int('id') }).existing(), }; const to = { users, testView: sqliteView('new_view', { id: int('id') }).as(sql`SELECT * FROM users`), }; const { statements, sqlStatements } = await diffTestSchemasSqlite(from, to, ['view->new_view']); expect(statements.length).toBe(2); expect(statements[0]).toStrictEqual({ name: 'view', type: 'drop_view', }); expect(statements[1]).toStrictEqual({ type: 'sqlite_create_view', name: 'new_view', definition: 'SELECT * FROM users', }); expect(sqlStatements.length).toBe(2); expect(sqlStatements[0]).toBe(`DROP VIEW \`view\`;`); expect(sqlStatements[1]).toBe(`CREATE VIEW \`new_view\` AS SELECT * FROM users;`); }); test('rename view and alter ".as"', async () => { const users = sqliteTable('users', { id: int('id').primaryKey({ autoIncrement: true }), }); const from = { users: users, testView: sqliteView('view', { id: int('id') }).as(sql`SELECT * FROM users`), }; const to = { users, testView: sqliteView('new_view', { id: int('id') }).as(sql`SELECT * FROM users WHERE 1=1`), }; const { statements, sqlStatements } = await diffTestSchemasSqlite(from, to, ['view->new_view']); expect(statements.length).toBe(2); expect(statements[0]).toStrictEqual({ name: 'view', type: 'drop_view', }); expect(statements[1]).toStrictEqual({ type: 'sqlite_create_view', name: 'new_view', definition: 'SELECT * FROM users WHERE 1=1', }); expect(sqlStatements.length).toBe(2); expect(sqlStatements[0]).toBe('DROP VIEW `view`;'); expect(sqlStatements[1]).toBe(`CREATE VIEW \`new_view\` AS SELECT * FROM users WHERE 1=1;`); }); ================================================ FILE: drizzle-kit/tests/statements-combiner/libsql-statements-combiner.test.ts ================================================ import { JsonAddColumnStatement, JsonSqliteAddColumnStatement, JsonStatement } from 'src/jsonStatements'; import { SQLiteSchemaSquashed } from 'src/serializer/sqliteSchema'; import { SQLiteAlterTableAddColumnConvertor } from 'src/sqlgenerator'; import { libSQLCombineStatements } from 'src/statementCombiner'; import { expect, test } from 'vitest'; /** * ! before: * * user: { * id INT; * first_name INT; * iq INT; * PRIMARY KEY (id, iq) * INDEXES: { * UNIQUE id; * } * } * * ! after: * * new_user: { * id INT; * first_name INT; * iq INT; * PRIMARY KEY (id, iq) * INDEXES: {} * } * * rename table and drop unique index * expect to get "rename_table" statement and then "recreate_table" */ test(`rename table and drop index`, async (t) => { const statements: JsonStatement[] = [ { type: 'rename_table', fromSchema: '', toSchema: '', tableNameFrom: 'user', tableNameTo: 'new_user', }, { type: 'drop_index', tableName: 'new_user', data: 'user_first_name_unique;first_name;true;', schema: '', }, ]; const json1: SQLiteSchemaSquashed = { version: '6', dialect: 'sqlite', tables: { user: { name: 'user', columns: { id: { name: 'id', type: 'int', primaryKey: false, notNull: false, autoincrement: false, }, first_name: { name: 'first_name', type: 'int', primaryKey: false, notNull: false, autoincrement: false, }, iq: { name: 'iq', type: 'int', primaryKey: false, notNull: true, autoincrement: false, }, }, indexes: { user_first_name_unique: 'user_first_name_unique;first_name;true;', }, foreignKeys: {}, compositePrimaryKeys: { user_id_iq_pk: 'id,iq', }, uniqueConstraints: {}, checkConstraints: {}, }, }, enums: {}, views: {}, }; const json2: SQLiteSchemaSquashed = { version: '6', dialect: 'sqlite', tables: { new_user: { name: 'new_user', columns: { id: { name: 'id', type: 'int', primaryKey: false, notNull: false, autoincrement: false, }, first_name: { name: 'first_name', type: 'int', primaryKey: false, notNull: false, autoincrement: false, }, iq: { name: 'iq', type: 'int', primaryKey: false, notNull: true, autoincrement: false, }, }, indexes: {}, foreignKeys: {}, compositePrimaryKeys: { new_user_id_iq_pk: 'id,iq', }, uniqueConstraints: {}, checkConstraints: {}, }, }, enums: {}, views: {}, }; const newJsonStatements = [ { type: 'rename_table', fromSchema: '', toSchema: '', tableNameFrom: 'user', tableNameTo: 'new_user', }, { type: 'drop_index', tableName: 'new_user', data: 'user_first_name_unique;first_name;true;', schema: '', }, ]; expect(libSQLCombineStatements(statements, json2)).toStrictEqual( newJsonStatements, ); }); /** * ! before: * * autoincrement1: { * id INT PRIMARY KEY; * } * * autoincrement2: { * id INT PRIMARY KEY AUTOINCREMENT; * } * * dropNotNull: { * id INT NOT NULL; * } * * ! after: * * autoincrement1: { * id INT PRIMARY KEY AUTOINCREMENT; * } * * autoincrement2: { * id INT PRI { const statements: JsonStatement[] = [ { type: 'alter_table_alter_column_set_autoincrement', tableName: 'autoincrement1', columnName: 'id', schema: '', newDataType: 'int', columnDefault: undefined, columnOnUpdate: undefined, columnNotNull: true, columnAutoIncrement: true, columnPk: true, } as unknown as JsonStatement, { type: 'alter_table_alter_column_drop_autoincrement', tableName: 'autoincrement2', columnName: 'id', schema: '', newDataType: 'int', columnDefault: undefined, columnOnUpdate: undefined, columnNotNull: true, columnAutoIncrement: false, columnPk: true, } as unknown as JsonStatement, { type: 'alter_table_alter_column_drop_notnull', tableName: 'dropNotNull', columnName: 'id', schema: '', newDataType: 'int', columnDefault: undefined, columnOnUpdate: undefined, columnNotNull: false, columnAutoIncrement: false, columnPk: false, } as unknown as JsonStatement, ]; const json1: SQLiteSchemaSquashed = { version: '6', dialect: 'sqlite', tables: { autoincrement1: { name: 'autoincrement1', columns: { id: { name: 'id', type: 'int', primaryKey: true, notNull: true, autoincrement: false, }, }, indexes: {}, foreignKeys: {}, compositePrimaryKeys: {}, uniqueConstraints: {}, checkConstraints: {}, }, autoincrement2: { name: 'autoincrement2', columns: { id: { name: 'id', type: 'int', primaryKey: true, notNull: false, autoincrement: true, }, }, indexes: {}, foreignKeys: {}, compositePrimaryKeys: {}, uniqueConstraints: {}, checkConstraints: {}, }, dropNotNull: { name: 'dropNotNull', columns: { id: { name: 'id', type: 'int', primaryKey: false, notNull: true, autoincrement: false, }, }, indexes: {}, foreignKeys: {}, compositePrimaryKeys: {}, uniqueConstraints: {}, checkConstraints: {}, }, }, enums: {}, views: {}, }; const json2: SQLiteSchemaSquashed = { version: '6', dialect: 'sqlite', tables: { autoincrement1: { name: 'autoincrement1', columns: { id: { name: 'id', type: 'int', primaryKey: true, notNull: true, autoincrement: true, }, }, indexes: {}, foreignKeys: {}, compositePrimaryKeys: {}, uniqueConstraints: {}, checkConstraints: {}, }, autoincrement2: { name: 'autoincrement2', columns: { id: { name: 'id', type: 'int', primaryKey: true, notNull: true, autoincrement: false, }, }, indexes: {}, foreignKeys: {}, compositePrimaryKeys: {}, uniqueConstraints: {}, checkConstraints: {}, }, dropNotNull: { name: 'dropNotNull', columns: { id: { name: 'id', type: 'int', primaryKey: false, notNull: false, autoincrement: false, }, }, indexes: {}, foreignKeys: {}, compositePrimaryKeys: {}, uniqueConstraints: {}, checkConstraints: {}, }, }, enums: {}, views: {}, }; const newJsonStatements = [ { type: 'recreate_table', tableName: 'autoincrement1', columns: [ { name: 'id', type: 'int', primaryKey: true, notNull: true, autoincrement: true, }, ], compositePKs: [], referenceData: [], uniqueConstraints: [], checkConstraints: [], }, { type: 'recreate_table', tableName: 'autoincrement2', columns: [ { name: 'id', type: 'int', primaryKey: true, notNull: true, autoincrement: false, }, ], compositePKs: [], referenceData: [], uniqueConstraints: [], checkConstraints: [], }, { type: 'alter_table_alter_column_drop_notnull', tableName: 'dropNotNull', columnName: 'id', schema: '', newDataType: 'int', columnDefault: undefined, columnOnUpdate: undefined, columnNotNull: false, columnAutoIncrement: false, columnPk: false, }, ]; expect(libSQLCombineStatements(statements, json2)).toStrictEqual( newJsonStatements, ); }); /** * ! before: * * pk1: { * id INT; * } * * pk2: { * id INT PRIMARY KEY; * } * * ref_table: { * id INT; * } * * create_reference: { * id INT; * } * * ! after: * * pk1: { * id INT PRIMARY KEY; * } * * pk2: { * id INT; * } * * ref_table: { * id INT; * } * * create_reference: { * id INT -> ref_table INT; * } * * drop primary key for pk2 * set primary key for pk1 * "create_reference" reference on "ref_table" * * expect to: * - "recreate_table" statement for pk1 * - "recreate_table" statement for pk2 * - "create_reference" statement for create_reference */ test(`drop and set primary key. create reference`, async (t) => { const statements: JsonStatement[] = [ { type: 'alter_table_alter_column_set_pk', tableName: 'pk1', schema: '', columnName: 'id', }, { type: 'alter_table_alter_column_set_notnull', tableName: 'pk1', columnName: 'id', schema: '', newDataType: 'int', columnDefault: undefined, columnOnUpdate: undefined, columnNotNull: true, columnAutoIncrement: false, columnPk: true, } as unknown as JsonStatement, { type: 'alter_table_alter_column_drop_pk', tableName: 'pk2', columnName: 'id', schema: '', }, { type: 'alter_table_alter_column_drop_notnull', tableName: 'pk2', columnName: 'id', schema: '', newDataType: 'int', columnDefault: undefined, columnOnUpdate: undefined, columnNotNull: false, columnAutoIncrement: false, columnPk: false, } as unknown as JsonStatement, { type: 'create_reference', tableName: 'create_reference', data: 'create_reference_id_ref_table_id_fk;create_reference;id;ref_table;id;no action;no action', schema: '', columnNotNull: false, columnDefault: undefined, columnType: 'int', }, ]; const json1: SQLiteSchemaSquashed = { version: '6', dialect: 'sqlite', tables: { create_reference: { name: 'create_reference', columns: { id: { name: 'id', type: 'int', primaryKey: false, notNull: false, autoincrement: false, }, }, indexes: {}, foreignKeys: {}, compositePrimaryKeys: {}, uniqueConstraints: {}, checkConstraints: {}, }, pk1: { name: 'pk1', columns: { id: { name: 'id', type: 'int', primaryKey: false, notNull: false, autoincrement: false, }, }, indexes: {}, foreignKeys: {}, compositePrimaryKeys: {}, uniqueConstraints: {}, checkConstraints: {}, }, pk2: { name: 'pk2', columns: { id: { name: 'id', type: 'int', primaryKey: true, notNull: true, autoincrement: false, }, }, indexes: {}, foreignKeys: {}, compositePrimaryKeys: {}, uniqueConstraints: {}, checkConstraints: {}, }, ref_table: { name: 'ref_table', columns: { id: { name: 'id', type: 'int', primaryKey: true, notNull: true, autoincrement: false, }, }, indexes: {}, foreignKeys: {}, compositePrimaryKeys: {}, uniqueConstraints: {}, checkConstraints: {}, }, }, enums: {}, views: {}, }; const json2: SQLiteSchemaSquashed = { version: '6', dialect: 'sqlite', tables: { create_reference: { name: 'create_reference', columns: { id: { name: 'id', type: 'int', primaryKey: false, notNull: false, autoincrement: false, }, }, indexes: {}, foreignKeys: { create_reference_id_ref_table_id_fk: 'create_reference_id_ref_table_id_fk;create_reference;id;ref_table;id;no action;no action', }, compositePrimaryKeys: {}, uniqueConstraints: {}, checkConstraints: {}, }, pk1: { name: 'pk1', columns: { id: { name: 'id', type: 'int', primaryKey: true, notNull: true, autoincrement: false, }, }, indexes: {}, foreignKeys: {}, compositePrimaryKeys: {}, uniqueConstraints: {}, checkConstraints: {}, }, pk2: { name: 'pk2', columns: { id: { name: 'id', type: 'int', primaryKey: false, notNull: false, autoincrement: false, }, }, indexes: {}, foreignKeys: {}, compositePrimaryKeys: {}, uniqueConstraints: {}, checkConstraints: {}, }, ref_table: { name: 'ref_table', columns: { id: { name: 'id', type: 'int', primaryKey: true, notNull: true, autoincrement: false, }, }, indexes: {}, foreignKeys: {}, compositePrimaryKeys: {}, uniqueConstraints: {}, checkConstraints: {}, }, }, enums: {}, views: {}, }; const newJsonStatements = [ { type: 'recreate_table', tableName: 'pk1', columns: [ { name: 'id', type: 'int', primaryKey: true, notNull: true, autoincrement: false, }, ], compositePKs: [], referenceData: [], uniqueConstraints: [], checkConstraints: [], }, { type: 'recreate_table', tableName: 'pk2', columns: [ { name: 'id', type: 'int', primaryKey: false, notNull: false, autoincrement: false, }, ], compositePKs: [], referenceData: [], uniqueConstraints: [], checkConstraints: [], }, { type: 'create_reference', tableName: 'create_reference', data: 'create_reference_id_ref_table_id_fk;create_reference;id;ref_table;id;no action;no action', schema: '', columnNotNull: false, columnDefault: undefined, columnType: 'int', }, ]; expect(libSQLCombineStatements(statements, json2)).toStrictEqual( newJsonStatements, ); }); /** * ! before: * * fk1: { * fk_id INT; * fk_id1 INT; * } * * fk2: { * fk2_id INT; -> composite reference on ref_table id INT * fk2_id1 INT; -> composite reference on ref_table id1 INT * } * * ref_table: { * id INT; * id1 INT; * } * * ! after: * * fk1: { * fk_id INT; -> composite reference on ref_table id INT * fk_id1 INT; -> composite reference on ref_table id1 INT * } * * fk2: { * fk2_id INT; * fk2_id1 INT; * } * * ref_table: { * id INT; * id1 INT; * } * * set multi column reference for fk1 * drop multi column reference for fk2 * * expect to: * - "recreate_table" statement for fk1 * - "recreate_table" statement for fk2 */ test(`set and drop multiple columns reference`, async (t) => { const statements: JsonStatement[] = [ { type: 'delete_reference', tableName: 'fk1', data: 'fk1_fk_id_fk_id1_ref_table_id_id1_fk;fk1;fk_id,fk_id1;ref_table;id,id1;no action;no action', schema: '', isMulticolumn: true, }, { type: 'create_reference', tableName: 'fk2', data: 'fk2_fk2_id_fk2_id1_ref_table_id_id1_fk;fk2;fk2_id,fk2_id1;ref_table;id,id1;no action;no action', schema: '', isMulticolumn: true, }, ]; const json1: SQLiteSchemaSquashed = { version: '6', dialect: 'sqlite', tables: { fk1: { name: 'fk1', columns: { fk_id: { name: 'fk_id', type: 'int', primaryKey: false, notNull: false, autoincrement: false, }, fk_id1: { name: 'fk_id1', type: 'int', primaryKey: false, notNull: false, autoincrement: false, }, }, indexes: {}, foreignKeys: { fk1_fk_id_fk_id1_ref_table_id_id1_fk: 'fk1_fk_id_fk_id1_ref_table_id_id1_fk;fk1;fk_id,fk_id1;ref_table;id,id1;no action;no action', }, compositePrimaryKeys: {}, uniqueConstraints: {}, checkConstraints: {}, }, fk2: { name: 'fk2', columns: { fk2_id: { name: 'fk2_id', type: 'int', primaryKey: false, notNull: false, autoincrement: false, }, fk2_id1: { name: 'fk2_id1', type: 'int', primaryKey: false, notNull: false, autoincrement: false, }, }, indexes: {}, foreignKeys: {}, compositePrimaryKeys: {}, uniqueConstraints: {}, checkConstraints: {}, }, ref_table: { name: 'ref_table', columns: { id: { name: 'id', type: 'int', primaryKey: false, notNull: false, autoincrement: false, }, id1: { name: 'id1', type: 'int', primaryKey: false, notNull: false, autoincrement: false, }, }, indexes: {}, foreignKeys: {}, compositePrimaryKeys: {}, uniqueConstraints: {}, checkConstraints: {}, }, }, enums: {}, views: {}, }; const json2: SQLiteSchemaSquashed = { version: '6', dialect: 'sqlite', tables: { fk1: { name: 'fk1', columns: { fk_id: { name: 'fk_id', type: 'int', primaryKey: false, notNull: false, autoincrement: false, }, fk_id1: { name: 'fk_id1', type: 'int', primaryKey: false, notNull: false, autoincrement: false, }, }, indexes: {}, foreignKeys: {}, compositePrimaryKeys: {}, uniqueConstraints: {}, checkConstraints: {}, }, fk2: { name: 'fk2', columns: { fk2_id: { name: 'fk2_id', type: 'int', primaryKey: false, notNull: false, autoincrement: false, }, fk2_id1: { name: 'fk2_id1', type: 'int', primaryKey: false, notNull: false, autoincrement: false, }, }, indexes: {}, foreignKeys: { fk2_fk2_id_fk2_id1_ref_table_id_id1_fk: 'fk2_fk2_id_fk2_id1_ref_table_id_id1_fk;fk2;fk2_id,fk2_id1;ref_table;id,id1;no action;no action', }, compositePrimaryKeys: {}, uniqueConstraints: {}, checkConstraints: {}, }, ref_table: { name: 'ref_table', columns: { id: { name: 'id', type: 'int', primaryKey: false, notNull: false, autoincrement: false, }, id1: { name: 'id1', type: 'int', primaryKey: false, notNull: false, autoincrement: false, }, }, indexes: {}, foreignKeys: {}, compositePrimaryKeys: {}, uniqueConstraints: {}, checkConstraints: {}, }, }, enums: {}, views: {}, }; const newJsonStatements = [ { type: 'recreate_table', tableName: 'fk1', columns: [ { name: 'fk_id', type: 'int', primaryKey: false, notNull: false, autoincrement: false, }, { name: 'fk_id1', type: 'int', primaryKey: false, notNull: false, autoincrement: false, }, ], compositePKs: [], referenceData: [], uniqueConstraints: [], checkConstraints: [], }, { type: 'recreate_table', tableName: 'fk2', columns: [ { name: 'fk2_id', type: 'int', primaryKey: false, notNull: false, autoincrement: false, }, { name: 'fk2_id1', type: 'int', primaryKey: false, notNull: false, autoincrement: false, }, ], compositePKs: [], referenceData: [ { name: 'fk2_fk2_id_fk2_id1_ref_table_id_id1_fk', tableFrom: 'fk2', tableTo: 'ref_table', columnsFrom: ['fk2_id', 'fk2_id1'], columnsTo: ['id', 'id1'], onDelete: 'no action', onUpdate: 'no action', }, ], uniqueConstraints: [], checkConstraints: [], }, ]; expect(libSQLCombineStatements(statements, json2)).toStrictEqual( newJsonStatements, ); }); /** * ! before: * * pk: { * pk TEXT PRIMARY KEY; * } * * simple: { * simple TEXT; * } * * unique: { * unique INT UNIQUE; * } * * ! after: * * pk: { * pk INT PRIMARY KEY; * } * * simple: { * simple INT; * } * * unique: { * unique TEXT UNIQUE; * } * * set new type for primary key column * set new type for unique column * set new type for column without pk or unique * * expect to: * - "recreate_table" statement for pk * - "recreate_table" statement for unique * - "alter_table_alter_column_set_type" statement for simple * - "create_index" statement for unique */ test(`set new type for primary key, unique and normal column`, async (t) => { const statements: JsonStatement[] = [ { type: 'alter_table_alter_column_set_type', tableName: 'pk', columnName: 'pk', newDataType: 'int', oldDataType: 'text', schema: '', columnDefault: undefined, columnOnUpdate: undefined, columnNotNull: true, columnAutoIncrement: false, columnPk: true, } as unknown as JsonStatement, { type: 'alter_table_alter_column_set_type', tableName: 'simple', columnName: 'simple', newDataType: 'int', oldDataType: 'text', schema: '', columnDefault: undefined, columnOnUpdate: undefined, columnNotNull: false, columnAutoIncrement: false, columnPk: false, } as unknown as JsonStatement, { type: 'alter_table_alter_column_set_type', tableName: 'unique', columnName: 'unique', newDataType: 'text', oldDataType: 'int', schema: '', columnDefault: undefined, columnOnUpdate: undefined, columnNotNull: false, columnAutoIncrement: false, columnPk: false, } as unknown as JsonStatement, ]; const json1: SQLiteSchemaSquashed = { version: '6', dialect: 'sqlite', tables: { pk: { name: 'pk', columns: { pk: { name: 'pk', type: 'text', primaryKey: true, notNull: true, autoincrement: false, }, }, indexes: {}, foreignKeys: {}, compositePrimaryKeys: {}, uniqueConstraints: {}, checkConstraints: {}, }, simple: { name: 'simple', columns: { simple: { name: 'simple', type: 'text', primaryKey: false, notNull: false, autoincrement: false, }, }, indexes: {}, foreignKeys: {}, compositePrimaryKeys: {}, uniqueConstraints: {}, checkConstraints: {}, }, unique: { name: 'unique', columns: { unique: { name: 'unique', type: 'int', primaryKey: false, notNull: false, autoincrement: false, }, }, indexes: { unique_unique_unique: 'unique_unique_unique;unique;true;', }, foreignKeys: {}, compositePrimaryKeys: {}, uniqueConstraints: {}, checkConstraints: {}, }, }, enums: {}, views: {}, }; const json2: SQLiteSchemaSquashed = { version: '6', dialect: 'sqlite', tables: { pk: { name: 'pk', columns: { pk: { name: 'pk', type: 'int', primaryKey: true, notNull: true, autoincrement: false, }, }, indexes: {}, foreignKeys: {}, compositePrimaryKeys: {}, uniqueConstraints: {}, checkConstraints: {}, }, simple: { name: 'simple', columns: { simple: { name: 'simple', type: 'int', primaryKey: false, notNull: false, autoincrement: false, }, }, indexes: {}, foreignKeys: {}, compositePrimaryKeys: {}, uniqueConstraints: {}, checkConstraints: {}, }, unique: { name: 'unique', columns: { unique: { name: 'unique', type: 'text', primaryKey: false, notNull: false, autoincrement: false, }, }, indexes: { unique_unique_unique: 'unique_unique_unique;unique;true;', }, foreignKeys: {}, compositePrimaryKeys: {}, uniqueConstraints: {}, checkConstraints: {}, }, }, enums: {}, views: {}, }; const newJsonStatements = [ { type: 'recreate_table', tableName: 'pk', columns: [ { name: 'pk', type: 'int', primaryKey: true, notNull: true, autoincrement: false, }, ], compositePKs: [], referenceData: [], uniqueConstraints: [], checkConstraints: [], }, { type: 'alter_table_alter_column_set_type', tableName: 'simple', columnName: 'simple', newDataType: 'int', oldDataType: 'text', schema: '', columnDefault: undefined, columnOnUpdate: undefined, columnNotNull: false, columnAutoIncrement: false, columnPk: false, }, { type: 'alter_table_alter_column_set_type', tableName: 'unique', columnName: 'unique', newDataType: 'text', oldDataType: 'int', schema: '', columnDefault: undefined, columnOnUpdate: undefined, columnNotNull: false, columnAutoIncrement: false, columnPk: false, }, ]; expect(libSQLCombineStatements(statements, json2)).toStrictEqual( newJsonStatements, ); }); test(`add columns. set fk`, async (t) => { const statements: JsonStatement[] = [ { type: 'sqlite_alter_table_add_column', tableName: 'ref', column: { name: 'test', type: 'integer', primaryKey: false, notNull: false, autoincrement: false, }, referenceData: undefined, }, { type: 'sqlite_alter_table_add_column', tableName: 'ref', column: { name: 'test1', type: 'integer', primaryKey: false, notNull: false, autoincrement: false, }, referenceData: undefined, }, { type: 'create_reference', tableName: 'ref', data: 'ref_new_age_user_new_age_fk;ref;new_age;user;new_age;no action;no action', schema: '', columnNotNull: false, columnDefault: undefined, columnType: 'integer', }, ]; const json1: SQLiteSchemaSquashed = { version: '6', dialect: 'sqlite', tables: { ref: { name: 'ref', columns: { id1: { name: 'id1', type: 'text', primaryKey: false, notNull: true, autoincrement: false, }, new_age: { name: 'new_age', type: 'integer', primaryKey: false, notNull: false, autoincrement: false, }, }, indexes: {}, foreignKeys: {}, compositePrimaryKeys: {}, uniqueConstraints: {}, checkConstraints: {}, }, user: { name: 'user', columns: { id1: { name: 'id1', type: 'text', primaryKey: false, notNull: true, autoincrement: false, }, new_age: { name: 'new_age', type: 'integer', primaryKey: false, notNull: false, autoincrement: false, }, }, indexes: {}, foreignKeys: {}, compositePrimaryKeys: {}, uniqueConstraints: {}, checkConstraints: {}, }, }, enums: {}, views: {}, }; const json2: SQLiteSchemaSquashed = { version: '6', dialect: 'sqlite', tables: { ref: { name: 'ref', columns: { id1: { name: 'id1', type: 'text', primaryKey: false, notNull: true, autoincrement: false, }, new_age: { name: 'new_age', type: 'integer', primaryKey: false, notNull: false, autoincrement: false, }, test: { name: 'test', type: 'integer', primaryKey: false, notNull: false, autoincrement: false, }, test1: { name: 'test1', type: 'integer', primaryKey: false, notNull: false, autoincrement: false, }, }, indexes: {}, foreignKeys: { ref_new_age_user_new_age_fk: 'ref_new_age_user_new_age_fk;ref;new_age;user;new_age;no action;no action', }, compositePrimaryKeys: {}, uniqueConstraints: {}, checkConstraints: {}, }, user: { name: 'user', columns: { id1: { name: 'id1', type: 'text', primaryKey: false, notNull: true, autoincrement: false, }, new_age: { name: 'new_age', type: 'integer', primaryKey: false, notNull: false, autoincrement: false, }, }, indexes: {}, foreignKeys: {}, compositePrimaryKeys: {}, uniqueConstraints: {}, checkConstraints: {}, }, }, enums: {}, views: {}, }; const newJsonStatements = [ { type: 'sqlite_alter_table_add_column', tableName: 'ref', column: { name: 'test', type: 'integer', primaryKey: false, notNull: false, autoincrement: false, }, referenceData: undefined, }, { type: 'sqlite_alter_table_add_column', tableName: 'ref', column: { name: 'test1', type: 'integer', primaryKey: false, notNull: false, autoincrement: false, }, referenceData: undefined, }, { type: 'create_reference', tableName: 'ref', data: 'ref_new_age_user_new_age_fk;ref;new_age;user;new_age;no action;no action', schema: '', columnNotNull: false, columnDefault: undefined, columnType: 'integer', }, ]; expect(libSQLCombineStatements(statements, json2)).toStrictEqual( newJsonStatements, ); }); test(`add column and fk`, async (t) => { const statements: JsonStatement[] = [ { type: 'sqlite_alter_table_add_column', tableName: 'ref', column: { name: 'test1', type: 'integer', primaryKey: false, notNull: false, autoincrement: false, }, referenceData: 'ref_test1_user_new_age_fk;ref;test1;user;new_age;no action;no action', }, { type: 'create_reference', tableName: 'ref', data: 'ref_test1_user_new_age_fk;ref;test1;user;new_age;no action;no action', schema: '', columnNotNull: false, columnDefault: undefined, columnType: 'integer', }, ]; const json1: SQLiteSchemaSquashed = { version: '6', dialect: 'sqlite', tables: { ref: { name: 'ref', columns: { id1: { name: 'id1', type: 'text', primaryKey: false, notNull: true, autoincrement: false, }, new_age: { name: 'new_age', type: 'integer', primaryKey: false, notNull: false, autoincrement: false, }, test1: { name: 'test1', type: 'integer', primaryKey: false, notNull: false, autoincrement: false, }, }, indexes: {}, foreignKeys: { ref_test1_user_new_age_fk: 'ref_test1_user_new_age_fk;ref;test1;user;new_age;no action;no action', }, compositePrimaryKeys: {}, uniqueConstraints: {}, checkConstraints: {}, }, user: { name: 'user', columns: { id1: { name: 'id1', type: 'text', primaryKey: false, notNull: true, autoincrement: false, }, new_age: { name: 'new_age', type: 'integer', primaryKey: false, notNull: false, autoincrement: false, }, }, indexes: {}, foreignKeys: {}, compositePrimaryKeys: {}, uniqueConstraints: {}, checkConstraints: {}, }, }, enums: {}, views: {}, }; const json2: SQLiteSchemaSquashed = { version: '6', dialect: 'sqlite', tables: { ref: { name: 'ref', columns: { id1: { name: 'id1', type: 'text', primaryKey: false, notNull: true, autoincrement: false, }, new_age: { name: 'new_age', type: 'integer', primaryKey: false, notNull: false, autoincrement: false, }, test: { name: 'test', type: 'integer', primaryKey: false, notNull: false, autoincrement: false, }, test1: { name: 'test1', type: 'integer', primaryKey: false, notNull: false, autoincrement: false, }, }, indexes: {}, foreignKeys: { ref_new_age_user_new_age_fk: 'ref_new_age_user_new_age_fk;ref;new_age;user;new_age;no action;no action', }, compositePrimaryKeys: {}, uniqueConstraints: {}, checkConstraints: {}, }, user: { name: 'user', columns: { id1: { name: 'id1', type: 'text', primaryKey: false, notNull: true, autoincrement: false, }, new_age: { name: 'new_age', type: 'integer', primaryKey: false, notNull: false, autoincrement: false, }, }, indexes: {}, foreignKeys: {}, compositePrimaryKeys: {}, uniqueConstraints: {}, checkConstraints: {}, }, }, enums: {}, views: {}, }; const newJsonStatements = [ { type: 'sqlite_alter_table_add_column', tableName: 'ref', column: { name: 'test1', type: 'integer', primaryKey: false, notNull: false, autoincrement: false, }, referenceData: 'ref_test1_user_new_age_fk;ref;test1;user;new_age;no action;no action', }, ]; expect(libSQLCombineStatements(statements, json2)).toStrictEqual( newJsonStatements, ); }); test(`add column and fk`, async (t) => { const statements: JsonStatement[] = [ { type: 'sqlite_alter_table_add_column', tableName: 'ref', column: { name: 'test1', type: 'integer', primaryKey: false, notNull: false, autoincrement: false, }, referenceData: 'ref_test1_user_new_age_fk;ref;test1;user;new_age;no action;no action', }, { type: 'create_reference', tableName: 'ref', data: 'ref_test1_user_new_age_fk;ref;test1;user;new_age;no action;no action', schema: '', columnNotNull: false, columnDefault: undefined, columnType: 'integer', }, ]; const json1: SQLiteSchemaSquashed = { version: '6', dialect: 'sqlite', tables: { ref: { name: 'ref', columns: { id1: { name: 'id1', type: 'text', primaryKey: false, notNull: true, autoincrement: false, }, new_age: { name: 'new_age', type: 'integer', primaryKey: false, notNull: false, autoincrement: false, }, test1: { name: 'test1', type: 'integer', primaryKey: false, notNull: false, autoincrement: false, }, }, indexes: {}, foreignKeys: { ref_test1_user_new_age_fk: 'ref_test1_user_new_age_fk;ref;test1;user;new_age;no action;no action', }, compositePrimaryKeys: {}, uniqueConstraints: {}, checkConstraints: {}, }, user: { name: 'user', columns: { id1: { name: 'id1', type: 'text', primaryKey: false, notNull: true, autoincrement: false, }, new_age: { name: 'new_age', type: 'integer', primaryKey: false, notNull: false, autoincrement: false, }, }, indexes: {}, foreignKeys: {}, compositePrimaryKeys: {}, uniqueConstraints: {}, checkConstraints: {}, }, }, enums: {}, views: {}, }; const json2: SQLiteSchemaSquashed = { version: '6', dialect: 'sqlite', tables: { ref: { name: 'ref', columns: { id1: { name: 'id1', type: 'text', primaryKey: false, notNull: true, autoincrement: false, }, new_age: { name: 'new_age', type: 'integer', primaryKey: false, notNull: false, autoincrement: false, }, test: { name: 'test', type: 'integer', primaryKey: false, notNull: false, autoincrement: false, }, test1: { name: 'test1', type: 'integer', primaryKey: false, notNull: false, autoincrement: false, }, }, indexes: {}, foreignKeys: { ref_new_age_user_new_age_fk: 'ref_new_age_user_new_age_fk;ref;new_age;user;new_age;no action;no action', }, compositePrimaryKeys: {}, uniqueConstraints: {}, checkConstraints: {}, }, user: { name: 'user', columns: { id1: { name: 'id1', type: 'text', primaryKey: false, notNull: true, autoincrement: false, }, new_age: { name: 'new_age', type: 'integer', primaryKey: false, notNull: false, autoincrement: false, }, }, indexes: {}, foreignKeys: {}, compositePrimaryKeys: {}, uniqueConstraints: {}, checkConstraints: {}, }, }, enums: {}, views: {}, }; const newJsonStatements = [ { type: 'sqlite_alter_table_add_column', tableName: 'ref', column: { name: 'test1', type: 'integer', primaryKey: false, notNull: false, autoincrement: false, }, referenceData: 'ref_test1_user_new_age_fk;ref;test1;user;new_age;no action;no action', }, ]; expect(libSQLCombineStatements(statements, json2)).toStrictEqual( newJsonStatements, ); }); ================================================ FILE: drizzle-kit/tests/statements-combiner/singlestore-statements-combiner.test.ts ================================================ import { JsonStatement } from 'src/jsonStatements'; import { SingleStoreSchemaSquashed } from 'src/serializer/singlestoreSchema'; import { singleStoreCombineStatements } from 'src/statementCombiner'; import { expect, test } from 'vitest'; test(`change column data type`, async (t) => { const statements: JsonStatement[] = [ { type: 'alter_table_rename_column', tableName: 'user', oldColumnName: 'lastName', newColumnName: 'lastName123', schema: '', }, { type: 'alter_table_alter_column_set_type', tableName: 'user', columnName: 'lastName123', newDataType: 'int', oldDataType: 'text', schema: '', columnDefault: undefined, columnOnUpdate: undefined, columnNotNull: false, columnAutoIncrement: false, columnPk: false, columnIsUnique: false, } as unknown as JsonStatement, ]; const json1: SingleStoreSchemaSquashed = { version: '1', dialect: 'singlestore', tables: { user: { name: 'user', columns: { firstName: { name: 'firstName', type: 'int', primaryKey: true, notNull: true, autoincrement: false, }, lastName: { name: 'lastName', type: 'text', primaryKey: false, notNull: false, autoincrement: false, }, test: { name: 'test', type: 'text', primaryKey: false, notNull: false, autoincrement: false, }, }, indexes: {}, compositePrimaryKeys: {}, uniqueConstraints: {}, }, }, }; const json2: SingleStoreSchemaSquashed = { version: '1', dialect: 'singlestore', tables: { user: { name: 'user', columns: { firstName: { name: 'firstName', type: 'int', primaryKey: true, notNull: true, autoincrement: false, }, lastName: { name: 'lastName123', type: 'int', primaryKey: false, notNull: false, autoincrement: false, }, test: { name: 'test', type: 'int', primaryKey: false, notNull: false, autoincrement: false, }, }, indexes: {}, compositePrimaryKeys: {}, uniqueConstraints: {}, }, }, }; const newJsonStatements = [ { type: 'alter_table_rename_column', tableName: 'user', oldColumnName: 'lastName', newColumnName: 'lastName123', schema: '', }, { type: 'singlestore_recreate_table', tableName: 'user', columns: [ { name: 'firstName', type: 'int', primaryKey: true, notNull: true, autoincrement: false, }, { name: 'lastName123', type: 'int', primaryKey: false, notNull: false, autoincrement: false, }, { name: 'test', type: 'int', primaryKey: false, notNull: false, autoincrement: false, }, ], compositePKs: [], uniqueConstraints: [], }, ]; expect(singleStoreCombineStatements(statements, json2)).toStrictEqual( newJsonStatements, ); }); test(`set autoincrement`, async (t) => { const statements: JsonStatement[] = [ { type: 'alter_table_alter_column_set_autoincrement', tableName: 'users', columnName: 'id', schema: '', newDataType: 'int', columnDefault: undefined, columnOnUpdate: undefined, columnNotNull: true, columnAutoIncrement: true, columnPk: false, } as unknown as JsonStatement, ]; const json2: SingleStoreSchemaSquashed = { version: '1', dialect: 'singlestore', tables: { users: { name: 'users', columns: { new_id: { name: 'id', type: 'int', primaryKey: false, notNull: true, autoincrement: true, }, name: { name: 'name', type: 'text', primaryKey: false, notNull: false, autoincrement: false, }, email: { name: 'email', type: 'text', primaryKey: false, notNull: true, autoincrement: false, }, }, indexes: {}, compositePrimaryKeys: {}, uniqueConstraints: {}, }, }, }; const newJsonStatements = [ { type: 'singlestore_recreate_table', tableName: 'users', columns: [ { name: 'id', type: 'int', primaryKey: false, notNull: true, autoincrement: true, }, { name: 'name', type: 'text', primaryKey: false, notNull: false, autoincrement: false, }, { name: 'email', type: 'text', primaryKey: false, notNull: true, autoincrement: false, }, ], compositePKs: [], uniqueConstraints: [], }, ]; expect(singleStoreCombineStatements(statements, json2)).toStrictEqual( newJsonStatements, ); }); test(`drop autoincrement`, async (t) => { const statements: JsonStatement[] = [ { type: 'alter_table_alter_column_drop_autoincrement', tableName: 'users', columnName: 'id', schema: '', newDataType: 'int', columnDefault: undefined, columnOnUpdate: undefined, columnNotNull: true, columnAutoIncrement: true, columnPk: false, } as unknown as JsonStatement, ]; const json2: SingleStoreSchemaSquashed = { version: '1', dialect: 'singlestore', tables: { users: { name: 'users', columns: { new_id: { name: 'id', type: 'int', primaryKey: false, notNull: true, autoincrement: false, }, name: { name: 'name', type: 'text', primaryKey: false, notNull: false, autoincrement: false, }, email: { name: 'email', type: 'text', primaryKey: false, notNull: true, autoincrement: false, }, }, indexes: {}, compositePrimaryKeys: {}, uniqueConstraints: {}, }, }, }; const newJsonStatements = [ { type: 'singlestore_recreate_table', tableName: 'users', columns: [ { name: 'id', type: 'int', primaryKey: false, notNull: true, autoincrement: false, }, { name: 'name', type: 'text', primaryKey: false, notNull: false, autoincrement: false, }, { name: 'email', type: 'text', primaryKey: false, notNull: true, autoincrement: false, }, ], compositePKs: [], uniqueConstraints: [], }, ]; expect(singleStoreCombineStatements(statements, json2)).toStrictEqual( newJsonStatements, ); }); test(`drop autoincrement`, async (t) => { const statements: JsonStatement[] = [ { type: 'alter_table_alter_column_drop_autoincrement', tableName: 'users', columnName: 'id', schema: '', newDataType: 'int', columnDefault: undefined, columnOnUpdate: undefined, columnNotNull: true, columnAutoIncrement: true, columnPk: false, } as unknown as JsonStatement, ]; const json2: SingleStoreSchemaSquashed = { version: '1', dialect: 'singlestore', tables: { users: { name: 'users', columns: { new_id: { name: 'id', type: 'int', primaryKey: false, notNull: true, autoincrement: false, }, name: { name: 'name', type: 'text', primaryKey: false, notNull: false, autoincrement: false, }, email: { name: 'email', type: 'text', primaryKey: false, notNull: true, autoincrement: false, }, }, indexes: {}, compositePrimaryKeys: {}, uniqueConstraints: {}, }, }, }; const newJsonStatements = [ { type: 'singlestore_recreate_table', tableName: 'users', columns: [ { name: 'id', type: 'int', primaryKey: false, notNull: true, autoincrement: false, }, { name: 'name', type: 'text', primaryKey: false, notNull: false, autoincrement: false, }, { name: 'email', type: 'text', primaryKey: false, notNull: true, autoincrement: false, }, ], compositePKs: [], uniqueConstraints: [], }, ]; expect(singleStoreCombineStatements(statements, json2)).toStrictEqual( newJsonStatements, ); }); test(`set not null`, async (t) => { const statements: JsonStatement[] = [ { type: 'alter_table_alter_column_set_notnull', tableName: 'users', columnName: 'name', schema: '', newDataType: 'text', columnDefault: undefined, columnOnUpdate: undefined, columnNotNull: true, columnAutoIncrement: false, columnPk: false, } as unknown as JsonStatement, ]; const json2: SingleStoreSchemaSquashed = { version: '1', dialect: 'singlestore', tables: { users: { name: 'users', columns: { new_id: { name: 'id', type: 'int', primaryKey: false, notNull: true, autoincrement: false, }, name: { name: 'name', type: 'text', primaryKey: false, notNull: true, autoincrement: false, }, email: { name: 'email', type: 'text', primaryKey: false, notNull: true, autoincrement: false, }, }, indexes: {}, compositePrimaryKeys: {}, uniqueConstraints: {}, }, }, }; const newJsonStatements = [ { type: 'singlestore_recreate_table', tableName: 'users', columns: [ { name: 'id', type: 'int', primaryKey: false, notNull: true, autoincrement: false, }, { name: 'name', type: 'text', primaryKey: false, notNull: true, autoincrement: false, }, { name: 'email', type: 'text', primaryKey: false, notNull: true, autoincrement: false, }, ], compositePKs: [], uniqueConstraints: [], }, ]; expect(singleStoreCombineStatements(statements, json2)).toStrictEqual( newJsonStatements, ); }); test(`drop not null`, async (t) => { const statements: JsonStatement[] = [ { type: 'alter_table_alter_column_drop_notnull', tableName: 'users', columnName: 'name', schema: '', newDataType: 'text', columnDefault: undefined, columnOnUpdate: undefined, columnNotNull: false, columnAutoIncrement: false, columnPk: false, } as unknown as JsonStatement, ]; const json2: SingleStoreSchemaSquashed = { version: '1', dialect: 'singlestore', tables: { users: { name: 'users', columns: { new_id: { name: 'id', type: 'int', primaryKey: false, notNull: true, autoincrement: false, }, name: { name: 'name', type: 'text', primaryKey: false, notNull: true, autoincrement: false, }, email: { name: 'email', type: 'text', primaryKey: false, notNull: true, autoincrement: false, }, }, indexes: {}, compositePrimaryKeys: {}, uniqueConstraints: {}, }, }, }; const newJsonStatements = [ { type: 'singlestore_recreate_table', tableName: 'users', columns: [ { name: 'id', type: 'int', primaryKey: false, notNull: true, autoincrement: false, }, { name: 'name', type: 'text', primaryKey: false, notNull: true, autoincrement: false, }, { name: 'email', type: 'text', primaryKey: false, notNull: true, autoincrement: false, }, ], compositePKs: [], uniqueConstraints: [], }, ]; expect(singleStoreCombineStatements(statements, json2)).toStrictEqual( newJsonStatements, ); }); test(`renamed column and droped column "test"`, async (t) => { const statements: JsonStatement[] = [ { type: 'alter_table_rename_column', tableName: 'user', oldColumnName: 'lastName', newColumnName: 'lastName123', schema: '', }, { type: 'alter_table_drop_column', tableName: 'user', columnName: 'test', schema: '', }, ]; const json1: SingleStoreSchemaSquashed = { version: '1', dialect: 'singlestore', tables: { user: { name: 'user', columns: { firstName: { name: 'firstName', type: 'int', primaryKey: true, notNull: true, autoincrement: false, }, lastName: { name: 'lastName', type: 'text', primaryKey: false, notNull: false, autoincrement: false, }, test: { name: 'test', type: 'text', primaryKey: false, notNull: false, autoincrement: false, }, }, indexes: {}, compositePrimaryKeys: {}, uniqueConstraints: {}, }, }, }; const json2: SingleStoreSchemaSquashed = { version: '1', dialect: 'singlestore', tables: { user: { name: 'user', columns: { firstName: { name: 'firstName', type: 'int', primaryKey: true, notNull: true, autoincrement: false, }, lastName: { name: 'lastName123', type: 'int', primaryKey: false, notNull: false, autoincrement: false, }, test: { name: 'test', type: 'int', primaryKey: false, notNull: false, autoincrement: false, }, }, indexes: {}, compositePrimaryKeys: {}, uniqueConstraints: {}, }, }, }; const newJsonStatements: JsonStatement[] = [ { type: 'alter_table_rename_column', tableName: 'user', oldColumnName: 'lastName', newColumnName: 'lastName123', schema: '', }, { type: 'alter_table_drop_column', tableName: 'user', columnName: 'test', schema: '', }, ]; expect(singleStoreCombineStatements(statements, json2)).toStrictEqual( newJsonStatements, ); }); test(`droped column that is part of composite pk`, async (t) => { const statements: JsonStatement[] = [ { type: 'delete_composite_pk', tableName: 'user', data: 'id,iq' }, { type: 'alter_table_alter_column_set_pk', tableName: 'user', schema: '', columnName: 'id', }, { type: 'alter_table_drop_column', tableName: 'user', columnName: 'iq', schema: '', }, ]; const json1: SingleStoreSchemaSquashed = { version: '1', dialect: 'singlestore', tables: { user: { name: 'user', columns: { id: { name: 'id', type: 'int', primaryKey: false, notNull: false, autoincrement: false, }, first_nam: { name: 'first_nam', type: 'text', primaryKey: false, notNull: false, autoincrement: false, }, iq: { name: 'iq', type: 'int', primaryKey: false, notNull: false, autoincrement: false, }, }, indexes: {}, compositePrimaryKeys: { user_id_iq_pk: 'id,iq', }, uniqueConstraints: {}, }, }, }; const json2: SingleStoreSchemaSquashed = { version: '1', dialect: 'singlestore', tables: { user: { name: 'user', columns: { id: { name: 'id', type: 'int', primaryKey: true, notNull: false, autoincrement: false, }, first_nam: { name: 'first_name', type: 'text', primaryKey: false, notNull: false, autoincrement: false, }, }, indexes: {}, compositePrimaryKeys: {}, uniqueConstraints: {}, }, }, }; const newJsonStatements: JsonStatement[] = [ { type: 'singlestore_recreate_table', tableName: 'user', columns: [ { name: 'id', type: 'int', primaryKey: true, notNull: false, autoincrement: false, }, { name: 'first_name', type: 'text', primaryKey: false, notNull: false, autoincrement: false, }, ], compositePKs: [], uniqueConstraints: [], }, ]; expect(singleStoreCombineStatements(statements, json2)).toStrictEqual( newJsonStatements, ); }); test(`add column with pk`, async (t) => { const statements: JsonStatement[] = [ { type: 'alter_table_add_column', tableName: 'table', column: { name: 'test', type: 'integer', primaryKey: true, notNull: false, autoincrement: false, }, schema: '', }, ]; const json2: SingleStoreSchemaSquashed = { version: '1', dialect: 'singlestore', tables: { table: { name: 'table', columns: { id1: { name: 'id1', type: 'text', primaryKey: false, notNull: true, autoincrement: false, }, new_age: { name: 'new_age', type: 'integer', primaryKey: false, notNull: false, autoincrement: false, }, test: { name: 'test', type: 'integer', primaryKey: false, notNull: false, autoincrement: false, }, }, indexes: {}, compositePrimaryKeys: {}, uniqueConstraints: {}, }, }, }; const newJsonStatements = [ { columns: [ { name: 'id1', type: 'text', primaryKey: false, notNull: true, autoincrement: false, }, { name: 'new_age', type: 'integer', primaryKey: false, notNull: false, autoincrement: false, }, { name: 'test', type: 'integer', primaryKey: false, notNull: false, autoincrement: false, }, ], compositePKs: [], tableName: 'table', type: 'singlestore_recreate_table', uniqueConstraints: [], }, ]; expect(singleStoreCombineStatements(statements, json2)).toStrictEqual( newJsonStatements, ); }); ================================================ FILE: drizzle-kit/tests/statements-combiner/sqlite-statements-combiner.test.ts ================================================ import { JsonStatement } from 'src/jsonStatements'; import { SQLiteSchemaSquashed } from 'src/serializer/sqliteSchema'; import { sqliteCombineStatements } from 'src/statementCombiner'; import { expect, test } from 'vitest'; test(`renamed column and altered this column type`, async (t) => { const statements: JsonStatement[] = [ { type: 'alter_table_rename_column', tableName: 'user', oldColumnName: 'lastName', newColumnName: 'lastName123', schema: '', }, { type: 'alter_table_alter_column_set_type', tableName: 'user', columnName: 'lastName123', newDataType: 'int', oldDataType: 'text', schema: '', columnDefault: undefined, columnOnUpdate: undefined, columnNotNull: false, columnAutoIncrement: false, columnPk: false, columnIsUnique: false, } as unknown as JsonStatement, ]; const json1: SQLiteSchemaSquashed = { version: '6', dialect: 'sqlite', tables: { user: { name: 'user', columns: { firstName: { name: 'firstName', type: 'int', primaryKey: true, notNull: true, autoincrement: false, }, lastName: { name: 'lastName', type: 'text', primaryKey: false, notNull: false, autoincrement: false, }, test: { name: 'test', type: 'text', primaryKey: false, notNull: false, autoincrement: false, }, }, indexes: {}, foreignKeys: {}, compositePrimaryKeys: {}, uniqueConstraints: {}, checkConstraints: {}, }, }, enums: {}, views: {}, }; const json2: SQLiteSchemaSquashed = { version: '6', dialect: 'sqlite', tables: { user: { name: 'user', columns: { firstName: { name: 'firstName', type: 'int', primaryKey: true, notNull: true, autoincrement: false, }, lastName: { name: 'lastName123', type: 'int', primaryKey: false, notNull: false, autoincrement: false, }, test: { name: 'test', type: 'int', primaryKey: false, notNull: false, autoincrement: false, }, }, indexes: {}, foreignKeys: {}, compositePrimaryKeys: {}, uniqueConstraints: {}, checkConstraints: {}, }, }, enums: {}, views: {}, }; const newJsonStatements = [ { type: 'recreate_table', tableName: 'user', columns: [ { name: 'firstName', type: 'int', primaryKey: true, notNull: true, autoincrement: false, }, { name: 'lastName123', type: 'int', primaryKey: false, notNull: false, autoincrement: false, }, { name: 'test', type: 'int', primaryKey: false, notNull: false, autoincrement: false, }, ], compositePKs: [], referenceData: [], uniqueConstraints: [], checkConstraints: [], }, ]; expect(sqliteCombineStatements(statements, json2)).toStrictEqual( newJsonStatements, ); }); test(`renamed column and droped column "test"`, async (t) => { const statements: JsonStatement[] = [ { type: 'alter_table_rename_column', tableName: 'user', oldColumnName: 'lastName', newColumnName: 'lastName123', schema: '', }, { type: 'alter_table_drop_column', tableName: 'user', columnName: 'test', schema: '', }, ]; const json1: SQLiteSchemaSquashed = { version: '6', dialect: 'sqlite', tables: { user: { name: 'user', columns: { firstName: { name: 'firstName', type: 'int', primaryKey: true, notNull: true, autoincrement: false, }, lastName: { name: 'lastName', type: 'text', primaryKey: false, notNull: false, autoincrement: false, }, test: { name: 'test', type: 'text', primaryKey: false, notNull: false, autoincrement: false, }, }, indexes: {}, foreignKeys: {}, compositePrimaryKeys: {}, uniqueConstraints: {}, checkConstraints: {}, }, }, enums: {}, views: {}, }; const json2: SQLiteSchemaSquashed = { version: '6', dialect: 'sqlite', tables: { user: { name: 'user', columns: { firstName: { name: 'firstName', type: 'int', primaryKey: true, notNull: true, autoincrement: false, }, lastName: { name: 'lastName123', type: 'int', primaryKey: false, notNull: false, autoincrement: false, }, test: { name: 'test', type: 'int', primaryKey: false, notNull: false, autoincrement: false, }, }, indexes: {}, foreignKeys: {}, compositePrimaryKeys: {}, uniqueConstraints: {}, checkConstraints: {}, }, }, enums: {}, views: {}, }; const newJsonStatements: JsonStatement[] = [ { type: 'alter_table_rename_column', tableName: 'user', oldColumnName: 'lastName', newColumnName: 'lastName123', schema: '', }, { type: 'alter_table_drop_column', tableName: 'user', columnName: 'test', schema: '', }, ]; expect(sqliteCombineStatements(statements, json2)).toStrictEqual( newJsonStatements, ); }); test(`droped column that is part of composite pk`, async (t) => { const statements: JsonStatement[] = [ { type: 'delete_composite_pk', tableName: 'user', data: 'id,iq' }, { type: 'alter_table_alter_column_set_pk', tableName: 'user', schema: '', columnName: 'id', }, { type: 'alter_table_drop_column', tableName: 'user', columnName: 'iq', schema: '', }, ]; const json1: SQLiteSchemaSquashed = { version: '6', dialect: 'sqlite', tables: { user: { name: 'user', columns: { id: { name: 'id', type: 'int', primaryKey: false, notNull: false, autoincrement: false, }, first_nam: { name: 'first_nam', type: 'text', primaryKey: false, notNull: false, autoincrement: false, }, iq: { name: 'iq', type: 'int', primaryKey: false, notNull: false, autoincrement: false, }, }, indexes: {}, foreignKeys: {}, compositePrimaryKeys: { user_id_iq_pk: 'id,iq', }, uniqueConstraints: {}, checkConstraints: {}, }, }, enums: {}, views: {}, }; const json2: SQLiteSchemaSquashed = { version: '6', dialect: 'sqlite', tables: { user: { name: 'user', columns: { id: { name: 'id', type: 'int', primaryKey: true, notNull: false, autoincrement: false, }, first_nam: { name: 'first_nam', type: 'text', primaryKey: false, notNull: false, autoincrement: false, }, }, indexes: {}, foreignKeys: {}, compositePrimaryKeys: {}, uniqueConstraints: {}, checkConstraints: {}, }, }, enums: {}, views: {}, }; const newJsonStatements: JsonStatement[] = [ { type: 'recreate_table', tableName: 'user', columns: [ { name: 'id', type: 'int', primaryKey: true, notNull: false, autoincrement: false, }, { name: 'first_nam', type: 'text', primaryKey: false, notNull: false, autoincrement: false, }, ], compositePKs: [], referenceData: [], uniqueConstraints: [], checkConstraints: [], }, ]; expect(sqliteCombineStatements(statements, json2)).toStrictEqual( newJsonStatements, ); }); test(`drop column "ref"."name", rename column "ref"."age". dropped primary key "user"."id". Set not null to "user"."iq"`, async (t) => { const statements: JsonStatement[] = [ { type: 'alter_table_rename_column', tableName: 'ref', oldColumnName: 'age', newColumnName: 'age1', schema: '', }, { type: 'alter_table_alter_column_drop_pk', tableName: 'user', columnName: 'id', schema: '', }, { type: 'alter_table_alter_column_drop_autoincrement', tableName: 'user', columnName: 'id', schema: '', newDataType: 'int', columnDefault: undefined, columnOnUpdate: undefined, columnNotNull: false, columnAutoIncrement: false, columnPk: false, } as unknown as JsonStatement, { type: 'alter_table_alter_column_drop_notnull', tableName: 'user', columnName: 'id', schema: '', newDataType: 'int', columnDefault: undefined, columnOnUpdate: undefined, columnNotNull: false, columnAutoIncrement: false, columnPk: false, } as unknown as JsonStatement, { type: 'alter_table_alter_column_set_notnull', tableName: 'user', columnName: 'iq', schema: '', newDataType: 'int', columnDefault: undefined, columnOnUpdate: undefined, columnNotNull: true, columnAutoIncrement: false, columnPk: false, } as unknown as JsonStatement, { type: 'alter_table_drop_column', tableName: 'ref', columnName: 'text', schema: '', }, ]; const json1: SQLiteSchemaSquashed = { version: '6', dialect: 'sqlite', tables: { ref: { name: 'ref', columns: { id: { name: 'id', type: 'int', primaryKey: true, notNull: true, autoincrement: true, }, user_iq: { name: 'user_iq', type: 'text', primaryKey: false, notNull: false, autoincrement: false, }, name: { name: 'name', type: 'text', primaryKey: false, notNull: true, autoincrement: false, }, age: { name: 'age', type: 'int', primaryKey: false, notNull: true, autoincrement: false, }, }, indexes: {}, foreignKeys: { ref_user_iq_user_iq_fk: 'ref_user_iq_user_iq_fk;ref;user_iq;user;iq;no action;no action', }, compositePrimaryKeys: {}, uniqueConstraints: {}, checkConstraints: {}, }, user: { name: 'user', columns: { id: { name: 'id', type: 'int', primaryKey: true, notNull: true, autoincrement: true, }, first_name: { name: 'first_name', type: 'text', primaryKey: false, notNull: false, autoincrement: false, }, iq: { name: 'iq', type: 'int', primaryKey: false, notNull: false, autoincrement: false, }, }, indexes: {}, foreignKeys: {}, compositePrimaryKeys: {}, uniqueConstraints: {}, checkConstraints: {}, }, }, enums: {}, views: {}, }; const json2: SQLiteSchemaSquashed = { version: '6', dialect: 'sqlite', tables: { ref: { name: 'ref', columns: { id: { name: 'id', type: 'int', primaryKey: true, notNull: true, autoincrement: false, }, user_iq: { name: 'user_iq', type: 'text', primaryKey: false, notNull: false, autoincrement: false, }, age1: { name: 'age1', type: 'int', primaryKey: false, notNull: true, autoincrement: false, }, }, indexes: {}, foreignKeys: { ref_user_iq_user_iq_fk: 'ref_user_iq_user_iq_fk;ref;user_iq;user;iq;no action;no action', }, compositePrimaryKeys: {}, uniqueConstraints: {}, checkConstraints: {}, }, user: { name: 'user', columns: { id: { name: 'id', type: 'int', primaryKey: false, notNull: true, autoincrement: false, }, first_name: { name: 'first_name', type: 'text', primaryKey: false, notNull: false, autoincrement: false, }, iq: { name: 'iq', type: 'int', primaryKey: false, notNull: true, autoincrement: false, }, }, indexes: {}, foreignKeys: {}, compositePrimaryKeys: {}, uniqueConstraints: {}, checkConstraints: {}, }, }, enums: {}, views: {}, }; const newJsonStatements: JsonStatement[] = [ { type: 'alter_table_rename_column', tableName: 'ref', oldColumnName: 'age', newColumnName: 'age1', schema: '', }, { type: 'alter_table_drop_column', tableName: 'ref', columnName: 'text', schema: '', }, { type: 'recreate_table', tableName: 'user', columns: [ { name: 'id', type: 'int', primaryKey: false, notNull: true, autoincrement: false, }, { name: 'first_name', type: 'text', primaryKey: false, notNull: false, autoincrement: false, }, { name: 'iq', type: 'int', primaryKey: false, notNull: true, autoincrement: false, }, ], compositePKs: [], referenceData: [], uniqueConstraints: [], checkConstraints: [], }, ]; expect(sqliteCombineStatements(statements, json2)).toStrictEqual( newJsonStatements, ); }); test(`create reference on exising column (table includes unique index). expect to recreate column and recreate index`, async (t) => { const statements: JsonStatement[] = [ { type: 'create_reference', tableName: 'unique', data: 'unique_ref_pk_pk_pk_fk;unique;ref_pk;pk;pk;no action;no action', schema: '', }, ]; const json1: SQLiteSchemaSquashed = { version: '6', dialect: 'sqlite', tables: { pk: { name: 'pk', columns: { pk: { name: 'pk', type: 'int', primaryKey: true, notNull: true, autoincrement: false, }, }, indexes: {}, foreignKeys: {}, compositePrimaryKeys: {}, uniqueConstraints: {}, checkConstraints: {}, }, unique: { name: 'unique', columns: { unique: { name: 'unique', type: 'text', primaryKey: false, notNull: false, autoincrement: false, }, ref_pk: { name: 'ref_pk', type: 'int', primaryKey: false, notNull: false, autoincrement: false, }, }, indexes: { unique_unique_unique: 'unique_unique_unique;unique;true;', }, foreignKeys: {}, compositePrimaryKeys: {}, uniqueConstraints: {}, checkConstraints: {}, }, }, enums: {}, views: {}, }; const json2: SQLiteSchemaSquashed = { version: '6', dialect: 'sqlite', tables: { pk: { name: 'pk', columns: { pk: { name: 'pk', type: 'int', primaryKey: true, notNull: true, autoincrement: false, }, }, indexes: {}, foreignKeys: {}, compositePrimaryKeys: {}, uniqueConstraints: {}, checkConstraints: {}, }, unique: { name: 'unique', columns: { unique: { name: 'unique', type: 'text', primaryKey: false, notNull: false, autoincrement: false, }, ref_pk: { name: 'ref_pk', type: 'int', primaryKey: false, notNull: false, autoincrement: false, }, }, indexes: { unique_unique_unique: 'unique_unique_unique;unique;true;', }, foreignKeys: { unique_ref_pk_pk_pk_fk: 'unique_ref_pk_pk_pk_fk;unique;ref_pk;pk;pk;no action;no action', }, compositePrimaryKeys: {}, uniqueConstraints: {}, checkConstraints: {}, }, }, enums: {}, views: {}, }; const newJsonStatements: JsonStatement[] = [ { type: 'recreate_table', tableName: 'unique', columns: [ { name: 'unique', type: 'text', primaryKey: false, notNull: false, autoincrement: false, }, { name: 'ref_pk', type: 'int', primaryKey: false, notNull: false, autoincrement: false, }, ], compositePKs: [], referenceData: [ { name: 'unique_ref_pk_pk_pk_fk', tableFrom: 'unique', tableTo: 'pk', columnsFrom: ['ref_pk'], columnsTo: ['pk'], onDelete: 'no action', onUpdate: 'no action', }, ], uniqueConstraints: [], checkConstraints: [], }, { data: 'unique_unique_unique;unique;true;', internal: undefined, schema: '', tableName: 'unique', type: 'create_index', }, ]; expect(sqliteCombineStatements(statements, json2)).toStrictEqual( newJsonStatements, ); }); test(`add columns. set fk`, async (t) => { const statements: JsonStatement[] = [ { type: 'sqlite_alter_table_add_column', tableName: 'ref', column: { name: 'test', type: 'integer', primaryKey: false, notNull: false, autoincrement: false, }, referenceData: undefined, }, { type: 'sqlite_alter_table_add_column', tableName: 'ref', column: { name: 'test1', type: 'integer', primaryKey: false, notNull: false, autoincrement: false, }, referenceData: undefined, }, { type: 'create_reference', tableName: 'ref', data: 'ref_new_age_user_new_age_fk;ref;new_age;user;new_age;no action;no action', schema: '', columnNotNull: false, columnDefault: undefined, columnType: 'integer', }, ]; const json1: SQLiteSchemaSquashed = { version: '6', dialect: 'sqlite', tables: { ref: { name: 'ref', columns: { id1: { name: 'id1', type: 'text', primaryKey: false, notNull: true, autoincrement: false, }, new_age: { name: 'new_age', type: 'integer', primaryKey: false, notNull: false, autoincrement: false, }, }, indexes: {}, foreignKeys: {}, compositePrimaryKeys: {}, uniqueConstraints: {}, checkConstraints: {}, }, user: { name: 'user', columns: { id1: { name: 'id1', type: 'text', primaryKey: false, notNull: true, autoincrement: false, }, new_age: { name: 'new_age', type: 'integer', primaryKey: false, notNull: false, autoincrement: false, }, }, indexes: {}, foreignKeys: {}, compositePrimaryKeys: {}, uniqueConstraints: {}, checkConstraints: {}, }, }, enums: {}, views: {}, }; const json2: SQLiteSchemaSquashed = { version: '6', dialect: 'sqlite', tables: { ref: { name: 'ref', columns: { id1: { name: 'id1', type: 'text', primaryKey: false, notNull: true, autoincrement: false, }, new_age: { name: 'new_age', type: 'integer', primaryKey: false, notNull: false, autoincrement: false, }, test: { name: 'test', type: 'integer', primaryKey: false, notNull: false, autoincrement: false, }, test1: { name: 'test1', type: 'integer', primaryKey: false, notNull: false, autoincrement: false, }, }, indexes: {}, foreignKeys: { ref_new_age_user_new_age_fk: 'ref_new_age_user_new_age_fk;ref;new_age;user;new_age;no action;no action', }, compositePrimaryKeys: {}, uniqueConstraints: {}, checkConstraints: {}, }, user: { name: 'user', columns: { id1: { name: 'id1', type: 'text', primaryKey: false, notNull: true, autoincrement: false, }, new_age: { name: 'new_age', type: 'integer', primaryKey: false, notNull: false, autoincrement: false, }, }, indexes: {}, foreignKeys: {}, compositePrimaryKeys: {}, uniqueConstraints: {}, checkConstraints: {}, }, }, enums: {}, views: {}, }; const newJsonStatements = [ { columns: [ { autoincrement: false, name: 'id1', notNull: true, primaryKey: false, type: 'text', }, { autoincrement: false, name: 'new_age', notNull: false, primaryKey: false, type: 'integer', }, { autoincrement: false, name: 'test', notNull: false, primaryKey: false, type: 'integer', }, { autoincrement: false, name: 'test1', notNull: false, primaryKey: false, type: 'integer', }, ], compositePKs: [], referenceData: [ { columnsFrom: [ 'new_age', ], columnsTo: [ 'new_age', ], name: 'ref_new_age_user_new_age_fk', onDelete: 'no action', onUpdate: 'no action', tableFrom: 'ref', tableTo: 'user', }, ], tableName: 'ref', type: 'recreate_table', uniqueConstraints: [], checkConstraints: [], }, ]; expect(sqliteCombineStatements(statements, json2)).toStrictEqual( newJsonStatements, ); }); test(`add column and fk`, async (t) => { const statements: JsonStatement[] = [ { type: 'sqlite_alter_table_add_column', tableName: 'ref', column: { name: 'test1', type: 'integer', primaryKey: false, notNull: false, autoincrement: false, }, referenceData: 'ref_test1_user_new_age_fk;ref;test1;user;new_age;no action;no action', }, { type: 'create_reference', tableName: 'ref', data: 'ref_test1_user_new_age_fk;ref;test1;user;new_age;no action;no action', schema: '', columnNotNull: false, columnDefault: undefined, columnType: 'integer', }, ]; const json1: SQLiteSchemaSquashed = { version: '6', dialect: 'sqlite', tables: { ref: { name: 'ref', columns: { id1: { name: 'id1', type: 'text', primaryKey: false, notNull: true, autoincrement: false, }, new_age: { name: 'new_age', type: 'integer', primaryKey: false, notNull: false, autoincrement: false, }, test1: { name: 'test1', type: 'integer', primaryKey: false, notNull: false, autoincrement: false, }, }, indexes: {}, foreignKeys: { ref_test1_user_new_age_fk: 'ref_test1_user_new_age_fk;ref;test1;user;new_age;no action;no action', }, compositePrimaryKeys: {}, uniqueConstraints: {}, checkConstraints: {}, }, user: { name: 'user', columns: { id1: { name: 'id1', type: 'text', primaryKey: false, notNull: true, autoincrement: false, }, new_age: { name: 'new_age', type: 'integer', primaryKey: false, notNull: false, autoincrement: false, }, }, indexes: {}, foreignKeys: {}, compositePrimaryKeys: {}, uniqueConstraints: {}, checkConstraints: {}, }, }, enums: {}, views: {}, }; const json2: SQLiteSchemaSquashed = { version: '6', dialect: 'sqlite', tables: { ref: { name: 'ref', columns: { id1: { name: 'id1', type: 'text', primaryKey: false, notNull: true, autoincrement: false, }, new_age: { name: 'new_age', type: 'integer', primaryKey: false, notNull: false, autoincrement: false, }, test: { name: 'test', type: 'integer', primaryKey: false, notNull: false, autoincrement: false, }, test1: { name: 'test1', type: 'integer', primaryKey: false, notNull: false, autoincrement: false, }, }, indexes: {}, foreignKeys: { ref_new_age_user_new_age_fk: 'ref_new_age_user_new_age_fk;ref;new_age;user;new_age;no action;no action', }, compositePrimaryKeys: {}, uniqueConstraints: {}, checkConstraints: {}, }, user: { name: 'user', columns: { id1: { name: 'id1', type: 'text', primaryKey: false, notNull: true, autoincrement: false, }, new_age: { name: 'new_age', type: 'integer', primaryKey: false, notNull: false, autoincrement: false, }, }, indexes: {}, foreignKeys: {}, compositePrimaryKeys: {}, uniqueConstraints: {}, checkConstraints: {}, }, }, enums: {}, views: {}, }; const newJsonStatements = [ { type: 'sqlite_alter_table_add_column', tableName: 'ref', column: { name: 'test1', type: 'integer', primaryKey: false, notNull: false, autoincrement: false, }, referenceData: 'ref_test1_user_new_age_fk;ref;test1;user;new_age;no action;no action', }, ]; expect(sqliteCombineStatements(statements, json2)).toStrictEqual( newJsonStatements, ); }); ================================================ FILE: drizzle-kit/tests/test/sqlite.test.ts ================================================ import { int, sqliteTable, text } from 'drizzle-orm/sqlite-core'; import { diffTestSchemasSqlite } from 'tests/schemaDiffer'; import { expect } from 'vitest'; import { DialectSuite, run } from '../common'; const sqliteSuite: DialectSuite = { async columns1() { const schema1 = { users: sqliteTable('users', { id: int('id').primaryKey({ autoIncrement: true }), }), }; const schema2 = { users: sqliteTable('users', { id: int('id').primaryKey({ autoIncrement: true }), name: text('name'), }), }; const { statements } = await diffTestSchemasSqlite(schema1, schema2, []); expect(statements.length).toBe(1); expect(statements[0]).toStrictEqual({ type: 'sqlite_alter_table_add_column', tableName: 'users', referenceData: undefined, column: { name: 'name', type: 'text', primaryKey: false, notNull: false, autoincrement: false, }, }); }, }; run(sqliteSuite); ================================================ FILE: drizzle-kit/tests/testsinglestore.ts ================================================ import { index, singlestoreTable, text } from 'drizzle-orm/singlestore-core'; import { diffTestSchemasSingleStore } from './schemaDiffer'; const from = { users: singlestoreTable( 'table', { name: text('name'), }, (t) => { return { idx: index('name_idx').on(t.name), }; }, ), }; const to = { users: singlestoreTable('table', { name: text('name'), }), }; diffTestSchemasSingleStore(from, to, []).then((res) => { const { statements, sqlStatements } = res; console.log(statements); console.log(sqlStatements); }); ================================================ FILE: drizzle-kit/tests/validations.test.ts ================================================ import { mysqlCredentials } from 'src/cli/validations/mysql'; import { postgresCredentials } from 'src/cli/validations/postgres'; import { singlestoreCredentials } from 'src/cli/validations/singlestore'; import { sqliteCredentials } from 'src/cli/validations/sqlite'; import { expect, test } from 'vitest'; test('turso #1', () => { sqliteCredentials.parse({ dialect: 'sqlite', driver: 'turso', url: 'https://turso.tech', }); }); test('turso #2', () => { sqliteCredentials.parse({ dialect: 'sqlite', driver: 'turso', url: 'https://turso.tech', authToken: 'token', }); }); test('turso #3', () => { expect(() => sqliteCredentials.parse({ dialect: 'sqlite', driver: 'turso', url: 'https://turso.tech', authToken: '', }) ).toThrowError(); }); test('turso #4', () => { expect(() => { sqliteCredentials.parse({ dialect: 'sqlite', driver: 'turso', url: '', authToken: 'token', }); }).toThrowError(); }); test('turso #5', () => { expect(() => { sqliteCredentials.parse({ dialect: 'sqlite', driver: 'turso', url: '', authToken: '', }); }).toThrowError(); }); test('d1-http #1', () => { sqliteCredentials.parse({ dialect: 'sqlite', driver: 'd1-http', accountId: 'accountId', databaseId: 'databaseId', token: 'token', }); }); test('d1-http #2', () => { expect(() => sqliteCredentials.parse({ dialect: 'sqlite', driver: 'd1-http', accountId: 'accountId', databaseId: 'databaseId', // token: "token", }) ).toThrowError(); }); test('d1-http #3', () => { expect(() => sqliteCredentials.parse({ dialect: 'sqlite', driver: 'd1-http', accountId: 'accountId', databaseId: 'databaseId', token: '', }) ).toThrowError(); }); test('d1-http #4', () => { expect(() => sqliteCredentials.parse({ dialect: 'sqlite', driver: 'd1-http', accountId: 'accountId', // databaseId: "databaseId", token: 'token', }) ).toThrowError(); }); test('d1-http #5', () => { expect(() => sqliteCredentials.parse({ dialect: 'sqlite', driver: 'd1-http', accountId: 'accountId', databaseId: '', token: 'token', }) ).toThrowError(); }); test('d1-http #6', () => { expect(() => sqliteCredentials.parse({ dialect: 'sqlite', driver: 'd1-http', // accountId: "accountId", databaseId: 'databaseId', token: 'token', }) ).toThrowError(); }); test('d1-http #7', () => { expect(() => sqliteCredentials.parse({ dialect: 'sqlite', driver: 'd1-http', accountId: '', databaseId: 'databaseId', token: 'token', }) ).toThrowError(); }); // omit undefined driver test('sqlite #1', () => { expect( sqliteCredentials.parse({ dialect: 'sqlite', driver: undefined, url: 'https://turso.tech', }), ).toStrictEqual({ url: 'https://turso.tech', }); }); test('sqlite #2', () => { expect( sqliteCredentials.parse({ dialect: 'sqlite', url: 'https://turso.tech', }), ).toStrictEqual({ url: 'https://turso.tech', }); }); test('sqlite #3', () => { expect(() => sqliteCredentials.parse({ dialect: 'sqlite', url: '', }) ).toThrowError(); }); test('AWS Data API #1', () => { expect( postgresCredentials.parse({ dialect: 'postgres', url: 'https://turso.tech', }), ).toStrictEqual({ url: 'https://turso.tech', }); }); test('AWS Data API #1', () => { expect( postgresCredentials.parse({ dialect: 'postgres', driver: 'aws-data-api', database: 'database', secretArn: 'secretArn', resourceArn: 'resourceArn', }), ).toStrictEqual({ driver: 'aws-data-api', database: 'database', secretArn: 'secretArn', resourceArn: 'resourceArn', }); }); test('AWS Data API #2', () => { expect(() => { postgresCredentials.parse({ dialect: 'postgres', driver: 'aws-data-api', database: 'database', secretArn: '', resourceArn: 'resourceArn', }); }).toThrowError(); }); test('AWS Data API #3', () => { expect(() => { postgresCredentials.parse({ dialect: 'postgres', driver: 'aws-data-api', database: 'database', secretArn: 'secretArn', resourceArn: '', }); }).toThrowError(); }); test('AWS Data API #4', () => { expect(() => { postgresCredentials.parse({ dialect: 'postgres', driver: 'aws-data-api', database: '', secretArn: 'secretArn', resourceArn: 'resourceArn', }); }).toThrowError(); }); test('AWS Data API #5', () => { expect(() => { postgresCredentials.parse({ dialect: 'postgres', driver: 'aws-data-api', database: 'database', resourceArn: 'resourceArn', }); }).toThrowError(); }); test('AWS Data API #6', () => { expect(() => { postgresCredentials.parse({ dialect: 'postgres', driver: 'aws-data-api', secretArn: 'secretArn', resourceArn: 'resourceArn', }); }).toThrowError(); }); test('AWS Data API #7', () => { expect(() => { postgresCredentials.parse({ dialect: 'postgres', driver: 'aws-data-api', database: 'database', secretArn: 'secretArn', }); }).toThrowError(); }); test('AWS Data API #8', () => { expect(() => { postgresCredentials.parse({ dialect: 'postgres', driver: 'aws-data-api', }); }).toThrowError(); }); test('PGlite #1', () => { expect( postgresCredentials.parse({ dialect: 'postgres', driver: 'pglite', url: './my.db', }), ).toStrictEqual({ driver: 'pglite', url: './my.db', }); }); test('PGlite #2', () => { expect(() => { postgresCredentials.parse({ dialect: 'postgres', driver: 'pglite', url: '', }); }).toThrowError(); }); test('PGlite #3', () => { expect(() => { postgresCredentials.parse({ dialect: 'postgres', driver: 'pglite', }); }).toThrowError(); }); test('postgres #1', () => { expect( postgresCredentials.parse({ dialect: 'postgres', url: 'https://turso.tech', }), ).toStrictEqual({ url: 'https://turso.tech', }); }); test('postgres #2', () => { expect( postgresCredentials.parse({ dialect: 'postgres', driver: undefined, url: 'https://turso.tech', }), ).toStrictEqual({ url: 'https://turso.tech', }); }); test('postgres #3', () => { expect( postgresCredentials.parse({ dialect: 'postgres', database: 'database', host: 'host', }), ).toStrictEqual({ database: 'database', host: 'host', }); }); test('postgres #4', () => { expect( postgresCredentials.parse({ dialect: 'postgres', database: 'database', host: 'host', }), ).toStrictEqual({ database: 'database', host: 'host', }); }); test('postgres #5', () => { expect( postgresCredentials.parse({ dialect: 'postgres', host: 'host', port: 1234, user: 'user', password: 'password', database: 'database', ssl: 'require', }), ).toStrictEqual({ host: 'host', port: 1234, user: 'user', password: 'password', database: 'database', ssl: 'require', }); }); test('postgres #6', () => { expect( postgresCredentials.parse({ dialect: 'postgres', host: 'host', database: 'database', ssl: true, }), ).toStrictEqual({ host: 'host', database: 'database', ssl: true, }); }); test('postgres #7', () => { expect( postgresCredentials.parse({ dialect: 'postgres', host: 'host', database: 'database', ssl: 'allow', }), ).toStrictEqual({ host: 'host', database: 'database', ssl: 'allow', }); }); test('postgres #8', () => { expect( postgresCredentials.parse({ dialect: 'postgres', host: 'host', database: 'database', ssl: { ca: 'ca', cert: 'cert', }, }), ).toStrictEqual({ host: 'host', database: 'database', ssl: { ca: 'ca', cert: 'cert', }, }); }); test('postgres #9', () => { expect(() => { postgresCredentials.parse({ dialect: 'postgres', }); }).toThrowError(); }); test('postgres #10', () => { expect(() => { postgresCredentials.parse({ dialect: 'postgres', url: undefined, }); }).toThrowError(); }); test('postgres #11', () => { expect(() => { postgresCredentials.parse({ dialect: 'postgres', url: '', }); }).toThrowError(); }); test('postgres #12', () => { expect(() => { postgresCredentials.parse({ dialect: 'postgres', host: '', database: '', }); }).toThrowError(); }); test('postgres #13', () => { expect(() => { postgresCredentials.parse({ dialect: 'postgres', database: '', }); }).toThrowError(); }); test('postgres #14', () => { expect(() => { postgresCredentials.parse({ dialect: 'postgres', host: '', }); }).toThrowError(); }); test('postgres #15', () => { expect(() => { postgresCredentials.parse({ dialect: 'postgres', database: ' ', host: '', }); }).toThrowError(); }); test('postgres #16', () => { expect(() => { postgresCredentials.parse({ dialect: 'postgres', database: '', host: ' ', }); }).toThrowError(); }); test('postgres #17', () => { expect(() => { postgresCredentials.parse({ dialect: 'postgres', database: ' ', host: ' ', port: '', }); }).toThrowError(); }); test('mysql #1', () => { expect( mysqlCredentials.parse({ dialect: 'mysql', url: 'https://turso.tech', }), ).toStrictEqual({ url: 'https://turso.tech', }); }); test('mysql #2', () => { expect( mysqlCredentials.parse({ dialect: 'mysql', driver: undefined, url: 'https://turso.tech', }), ).toStrictEqual({ url: 'https://turso.tech', }); }); test('mysql #3', () => { expect( mysqlCredentials.parse({ dialect: 'mysql', database: 'database', host: 'host', }), ).toStrictEqual({ database: 'database', host: 'host', }); }); test('mysql #4', () => { expect( mysqlCredentials.parse({ dialect: 'mysql', database: 'database', host: 'host', }), ).toStrictEqual({ database: 'database', host: 'host', }); }); test('mysql #5', () => { expect( mysqlCredentials.parse({ dialect: 'mysql', host: 'host', port: 1234, user: 'user', password: 'password', database: 'database', ssl: 'require', }), ).toStrictEqual({ host: 'host', port: 1234, user: 'user', password: 'password', database: 'database', ssl: 'require', }); }); test('mysql #7', () => { expect( mysqlCredentials.parse({ dialect: 'mysql', host: 'host', database: 'database', ssl: 'allow', }), ).toStrictEqual({ host: 'host', database: 'database', ssl: 'allow', }); }); test('mysql #8', () => { expect( mysqlCredentials.parse({ dialect: 'mysql', host: 'host', database: 'database', ssl: { ca: 'ca', cert: 'cert', }, }), ).toStrictEqual({ host: 'host', database: 'database', ssl: { ca: 'ca', cert: 'cert', }, }); }); test('mysql #9', () => { expect(() => { mysqlCredentials.parse({ dialect: 'mysql', }); }).toThrowError(); }); test('mysql #10', () => { expect(() => { mysqlCredentials.parse({ dialect: 'mysql', url: undefined, }); }).toThrowError(); }); test('mysql #11', () => { expect(() => { mysqlCredentials.parse({ dialect: 'mysql', url: '', }); }).toThrowError(); }); test('mysql #12', () => { expect(() => { mysqlCredentials.parse({ dialect: 'mysql', host: '', database: '', }); }).toThrowError(); }); test('mysql #13', () => { expect(() => { mysqlCredentials.parse({ dialect: 'mysql', database: '', }); }).toThrowError(); }); test('mysql #14', () => { expect(() => { mysqlCredentials.parse({ dialect: 'mysql', host: '', }); }).toThrowError(); }); test('mysql #15', () => { expect(() => { mysqlCredentials.parse({ dialect: 'mysql', database: ' ', host: '', }); }).toThrowError(); }); test('mysql #16', () => { expect(() => { mysqlCredentials.parse({ dialect: 'mysql', database: '', host: ' ', }); }).toThrowError(); }); test('mysql #17', () => { expect(() => { mysqlCredentials.parse({ dialect: 'mysql', database: ' ', host: ' ', port: '', }); }).toThrowError(); }); test('singlestore #1', () => { expect( singlestoreCredentials.parse({ dialect: 'singlestore', database: 'database', host: 'host', }), ).toStrictEqual({ database: 'database', host: 'host', }); }); test('singlestore #2', () => { expect( singlestoreCredentials.parse({ dialect: 'singlestore', database: 'database', host: 'host', }), ).toStrictEqual({ database: 'database', host: 'host', }); }); test('singlestore #3', () => { expect( singlestoreCredentials.parse({ dialect: 'singlestore', host: 'host', port: 1234, user: 'user', password: 'password', database: 'database', ssl: 'require', }), ).toStrictEqual({ host: 'host', port: 1234, user: 'user', password: 'password', database: 'database', ssl: 'require', }); }); test('singlestore #4', () => { expect( singlestoreCredentials.parse({ dialect: 'singlestore', host: 'host', database: 'database', ssl: 'allow', }), ).toStrictEqual({ host: 'host', database: 'database', ssl: 'allow', }); }); test('singlestore #5', () => { expect( singlestoreCredentials.parse({ dialect: 'singlestore', host: 'host', database: 'database', ssl: { ca: 'ca', cert: 'cert', }, }), ).toStrictEqual({ host: 'host', database: 'database', ssl: { ca: 'ca', cert: 'cert', }, }); }); test('singlestore #6', () => { expect(() => { singlestoreCredentials.parse({ dialect: 'singlestore', }); }).toThrowError(); }); test('singlestore #7', () => { expect(() => { singlestoreCredentials.parse({ dialect: 'singlestore', url: undefined, }); }).toThrowError(); }); test('singlestore #8', () => { expect(() => { singlestoreCredentials.parse({ dialect: 'singlestore', url: '', }); }).toThrowError(); }); test('singlestore #9', () => { expect(() => { singlestoreCredentials.parse({ dialect: 'singlestore', host: '', database: '', }); }).toThrowError(); }); test('singlestore #10', () => { expect(() => { singlestoreCredentials.parse({ dialect: 'singlestore', database: '', }); }).toThrowError(); }); test('singlestore #11', () => { expect(() => { singlestoreCredentials.parse({ dialect: 'singlestore', host: '', }); }).toThrowError(); }); test('singlestore #12', () => { expect(() => { singlestoreCredentials.parse({ dialect: 'singlestore', database: ' ', host: '', }); }).toThrowError(); }); test('singlestore #13', () => { expect(() => { singlestoreCredentials.parse({ dialect: 'singlestore', database: '', host: ' ', }); }).toThrowError(); }); test('singlestore #14', () => { expect(() => { singlestoreCredentials.parse({ dialect: 'singlestore', database: ' ', host: ' ', port: '', }); }).toThrowError(); }); ================================================ FILE: drizzle-kit/tests/wrap-param.test.ts ================================================ import chalk from 'chalk'; import { assert, expect, test } from 'vitest'; import { wrapParam } from '../src/cli/validations/common'; test('wrapParam', () => { expect(wrapParam('password', 'password123', false, 'secret')).toBe(` [${chalk.green('✓')}] password: '*****'`); expect(wrapParam('url', 'mysql://user:password@localhost:3306/database', false, 'url')).toBe( ` [${chalk.green('✓')}] url: 'mysql://user:****@localhost:3306/database'`, ); expect(wrapParam('url', 'singlestore://user:password@localhost:3306/database', false, 'url')).toBe( ` [${chalk.green('✓')}] url: 'singlestore://user:****@localhost:3306/database'`, ); expect(wrapParam('url', 'postgresql://user:password@localhost:5432/database', false, 'url')).toBe( ` [${chalk.green('✓')}] url: 'postgresql://user:****@localhost:5432/database'`, ); }); ================================================ FILE: drizzle-kit/tsconfig.build.json ================================================ { "extends": "./tsconfig.json", "include": ["src"] } ================================================ FILE: drizzle-kit/tsconfig.cli-types.json ================================================ { "extends": "./tsconfig.json", "compilerOptions": { "declaration": true, "emitDeclarationOnly": true, "noEmit": false }, "include": ["src/index.ts", "src/utils.ts", "src/utils-studio.ts", "src/api.ts"] } ================================================ FILE: drizzle-kit/tsconfig.json ================================================ { "compilerOptions": { "target": "es2021", "lib": ["es2021"], "types": ["node"], "strictNullChecks": true, "strictFunctionTypes": false, "allowJs": true, "skipLibCheck": true, "esModuleInterop": true, "allowSyntheticDefaultImports": true, "strict": true, "noImplicitOverride": true, "forceConsistentCasingInFileNames": true, "module": "CommonJS", "moduleResolution": "node", "resolveJsonModule": true, "noErrorTruncation": true, "isolatedModules": true, "sourceMap": true, "baseUrl": ".", "outDir": "dist", "noEmit": true, "typeRoots": ["node_modules/@types", "src/@types"] }, "include": ["src", "dev", "tests", "drizzle.config.ts", "test.ts"], "exclude": ["node_modules"] } ================================================ FILE: drizzle-kit/vitest.config.ts ================================================ import tsconfigPaths from 'vite-tsconfig-paths'; import { defineConfig } from 'vitest/config'; export default defineConfig({ test: { include: [ 'tests/**/*.test.ts', // Need to test it first before pushing changes // 'tests/singlestore-schemas.test.ts', // 'tests/singlestore-views.test.ts', // 'tests/push/singlestore-push.test.ts', // 'tests/push/singlestore.test.ts', ], // This one was excluded because we need to modify an API for SingleStore-generated columns. // It’s in the backlog. exclude: ['tests/**/singlestore-generated.test.ts'], typecheck: { tsconfig: 'tsconfig.json', }, testTimeout: 100000, hookTimeout: 100000, isolate: true, poolOptions: { threads: { singleThread: true, }, }, maxWorkers: 1, fileParallelism: false, }, plugins: [tsconfigPaths()], }); ================================================ FILE: drizzle-orm/.madgerc ================================================ { "detectiveOptions": { "ts": { "skipTypeImports": true } } } ================================================ FILE: drizzle-orm/package.json ================================================ { "name": "drizzle-orm", "version": "0.45.1", "description": "Drizzle ORM package for SQL databases", "type": "module", "scripts": { "p": "prisma generate --schema src/prisma/schema.prisma", "build": "pnpm p && scripts/build.ts", "b": "pnpm build", "test:types": "cd type-tests && tsc", "test": "vitest run", "pack": "(cd dist && npm pack --pack-destination ..) && rm -f package.tgz && mv *.tgz package.tgz", "publish": "npm publish package.tgz" }, "main": "./index.cjs", "module": "./index.js", "types": "./index.d.ts", "sideEffects": false, "publishConfig": { "provenance": true }, "repository": { "type": "git", "url": "git+https://github.com/drizzle-team/drizzle-orm.git" }, "homepage": "https://orm.drizzle.team", "keywords": [ "drizzle", "orm", "pg", "mysql", "singlestore", "postgresql", "postgres", "sqlite", "database", "sql", "typescript", "ts", "drizzle-orm" ], "author": "Drizzle Team", "license": "Apache-2.0", "bugs": { "url": "https://github.com/drizzle-team/drizzle-orm/issues" }, "peerDependencies": { "@aws-sdk/client-rds-data": ">=3", "@cloudflare/workers-types": ">=4", "@electric-sql/pglite": ">=0.2.0", "@libsql/client": ">=0.10.0", "@libsql/client-wasm": ">=0.10.0", "@neondatabase/serverless": ">=0.10.0", "@op-engineering/op-sqlite": ">=2", "@opentelemetry/api": "^1.4.1", "@planetscale/database": ">=1.13", "@prisma/client": "*", "@tidbcloud/serverless": "*", "@types/better-sqlite3": "*", "@types/pg": "*", "@types/sql.js": "*", "@vercel/postgres": ">=0.8.0", "@xata.io/client": "*", "better-sqlite3": ">=7", "bun-types": "*", "expo-sqlite": ">=14.0.0", "knex": "*", "kysely": "*", "mysql2": ">=2", "pg": ">=8", "postgres": ">=3", "sql.js": ">=1", "sqlite3": ">=5", "gel": ">=2", "@upstash/redis": ">=1.34.7" }, "peerDependenciesMeta": { "mysql2": { "optional": true }, "@vercel/postgres": { "optional": true }, "@xata.io/client": { "optional": true }, "better-sqlite3": { "optional": true }, "@types/better-sqlite3": { "optional": true }, "sqlite3": { "optional": true }, "sql.js": { "optional": true }, "@types/sql.js": { "optional": true }, "@cloudflare/workers-types": { "optional": true }, "pg": { "optional": true }, "@types/pg": { "optional": true }, "postgres": { "optional": true }, "@neondatabase/serverless": { "optional": true }, "bun-types": { "optional": true }, "@aws-sdk/client-rds-data": { "optional": true }, "@planetscale/database": { "optional": true }, "knex": { "optional": true }, "kysely": { "optional": true }, "@libsql/client": { "optional": true }, "@libsql/client-wasm": { "optional": true }, "@opentelemetry/api": { "optional": true }, "expo-sqlite": { "optional": true }, "gel": { "optional": true }, "@op-engineering/op-sqlite": { "optional": true }, "@electric-sql/pglite": { "optional": true }, "@tidbcloud/serverless": { "optional": true }, "prisma": { "optional": true }, "@prisma/client": { "optional": true }, "@upstash/redis": { "optional": true } }, "devDependencies": { "@aws-sdk/client-rds-data": "^3.549.0", "@cloudflare/workers-types": "^4.20241112.0", "@electric-sql/pglite": "^0.2.12", "@libsql/client": "^0.10.0", "@libsql/client-wasm": "^0.10.0", "@miniflare/d1": "^2.14.4", "@neondatabase/serverless": "^0.10.0", "@op-engineering/op-sqlite": "^2.0.16", "@opentelemetry/api": "^1.4.1", "@originjs/vite-plugin-commonjs": "^1.0.3", "@planetscale/database": "^1.16.0", "@prisma/client": "5.14.0", "@tidbcloud/serverless": "^0.1.1", "@types/better-sqlite3": "^7.6.12", "@types/node": "^20.2.5", "@types/pg": "^8.10.1", "@types/react": "^18.2.45", "@types/sql.js": "^1.4.4", "@upstash/redis": "^1.34.3", "@vercel/postgres": "^0.8.0", "@xata.io/client": "^0.29.3", "better-sqlite3": "^11.9.1", "bun-types": "^1.2.0", "cpy": "^10.1.0", "expo-sqlite": "^14.0.0", "gel": "^2.0.0", "glob": "^11.0.1", "knex": "^2.4.2", "kysely": "^0.25.0", "mysql2": "^3.14.1", "pg": "^8.11.0", "postgres": "^3.3.5", "prisma": "5.14.0", "react": "^18.2.0", "sql.js": "^1.8.0", "sqlite3": "^5.1.2", "ts-morph": "^25.0.1", "tslib": "^2.5.2", "tsx": "^3.12.7", "vite-tsconfig-paths": "^4.3.2", "vitest": "^3.1.3", "zod": "^3.20.2", "zx": "^7.2.2" } } ================================================ FILE: drizzle-orm/scripts/build.ts ================================================ #!/usr/bin/env -S pnpm tsx import 'zx/globals'; import cpy from 'cpy'; async function updateAndCopyPackageJson() { const pkg = await fs.readJSON('package.json'); const entries = await glob('src/**/*.ts'); pkg.exports = entries.reduce< Record >( (acc, rawEntry) => { const entry = rawEntry.match(/src\/(.*)\.ts/)![1]!; const exportsEntry = entry === 'index' ? '.' : './' + entry.replace(/\/index$/, ''); const importEntry = `./${entry}.js`; const requireEntry = `./${entry}.cjs`; acc[exportsEntry] = { import: { types: `./${entry}.d.ts`, default: importEntry, }, require: { types: `./${entry}.d.cts`, default: requireEntry, }, types: `./${entry}.d.ts`, default: importEntry, }; return acc; }, {}, ); await fs.writeJSON('dist.new/package.json', pkg, { spaces: 2 }); } await fs.remove('dist.new'); await Promise.all([ (async () => { await $`tsup`.stdio('pipe', 'pipe', 'pipe'); })(), (async () => { await $`tsc -p tsconfig.dts.json`.stdio('pipe', 'pipe', 'pipe'); await cpy('dist-dts/**/*.d.ts', 'dist.new', { rename: (basename) => basename.replace(/\.d\.ts$/, '.d.cts'), }); await cpy('dist-dts/**/*.d.ts', 'dist.new', { rename: (basename) => basename.replace(/\.d\.ts$/, '.d.ts'), }); })(), ]); await Promise.all([ $`tsup src/version.ts --no-config --dts --format esm --outDir dist.new`.stdio('pipe', 'pipe', 'pipe'), $`tsup src/version.ts --no-config --dts --format cjs --outDir dist.new`.stdio('pipe', 'pipe', 'pipe'), ]); await $`scripts/fix-imports.ts`; await fs.copy('../README.md', 'dist.new/README.md'); await updateAndCopyPackageJson(); await fs.remove('dist'); await fs.rename('dist.new', 'dist'); ================================================ FILE: drizzle-orm/scripts/fix-imports.ts ================================================ #!/usr/bin/env -S pnpm tsx import 'zx/globals'; import path from 'node:path'; import { parse, print, visit } from 'recast'; import parser from 'recast/parsers/typescript'; function resolvePathAlias(importPath: string, file: string) { if (importPath.startsWith('~/')) { const relativePath = path.relative(path.dirname(file), path.resolve('dist.new', importPath.slice(2))); importPath = relativePath.startsWith('.') ? relativePath : './' + relativePath; } return importPath; } function fixImportPath(importPath: string, file: string, ext: string) { importPath = resolvePathAlias(importPath, file); if (!/\..*\.(js|ts)$/.test(importPath)) { return importPath; } return importPath.replace(/\.(js|ts)$/, ext); } const cjsFiles = await glob('dist.new/**/*.{cjs,d.cts}'); await Promise.all(cjsFiles.map(async (file) => { const code = parse(await fs.readFile(file, 'utf8'), { parser }); visit(code, { visitImportDeclaration(path) { path.value.source.value = fixImportPath(path.value.source.value, file, '.cjs'); this.traverse(path); }, visitExportAllDeclaration(path) { path.value.source.value = fixImportPath(path.value.source.value, file, '.cjs'); this.traverse(path); }, visitExportNamedDeclaration(path) { if (path.value.source) { path.value.source.value = fixImportPath(path.value.source.value, file, '.cjs'); } this.traverse(path); }, visitCallExpression(path) { if (path.value.callee.type === 'Identifier' && path.value.callee.name === 'require') { path.value.arguments[0].value = fixImportPath(path.value.arguments[0].value, file, '.cjs'); } this.traverse(path); }, visitTSImportType(path) { path.value.argument.value = resolvePathAlias(path.value.argument.value, file); this.traverse(path); }, visitAwaitExpression(path) { if (print(path.value).code.startsWith(`await import("./`)) { path.value.argument.arguments[0].value = fixImportPath(path.value.argument.arguments[0].value, file, '.cjs'); } this.traverse(path); }, }); await fs.writeFile(file, print(code).code); })); const esmFiles = await glob('dist.new/**/*.{js,d.ts}'); await Promise.all(esmFiles.map(async (file) => { const code = parse(await fs.readFile(file, 'utf8'), { parser }); visit(code, { visitImportDeclaration(path) { path.value.source.value = fixImportPath(path.value.source.value, file, '.js'); this.traverse(path); }, visitExportAllDeclaration(path) { path.value.source.value = fixImportPath(path.value.source.value, file, '.js'); this.traverse(path); }, visitExportNamedDeclaration(path) { if (path.value.source) { path.value.source.value = fixImportPath(path.value.source.value, file, '.js'); } this.traverse(path); }, visitTSImportType(path) { path.value.argument.value = fixImportPath(path.value.argument.value, file, '.js'); this.traverse(path); }, visitAwaitExpression(path) { if (print(path.value).code.startsWith(`await import("./`)) { path.value.argument.arguments[0].value = fixImportPath(path.value.argument.arguments[0].value, file, '.js'); } this.traverse(path); }, }); await fs.writeFile(file, print(code).code); })); ================================================ FILE: drizzle-orm/src/alias.ts ================================================ import type { AnyColumn } from './column.ts'; import { Column } from './column.ts'; import { entityKind, is } from './entity.ts'; import type { Relation } from './relations.ts'; import type { View } from './sql/sql.ts'; import { SQL, sql } from './sql/sql.ts'; import { Table } from './table.ts'; import { ViewBaseConfig } from './view-common.ts'; export class ColumnAliasProxyHandler implements ProxyHandler { static readonly [entityKind]: string = 'ColumnAliasProxyHandler'; constructor(private table: Table | View) {} get(columnObj: TColumn, prop: string | symbol): any { if (prop === 'table') { return this.table; } return columnObj[prop as keyof TColumn]; } } export class TableAliasProxyHandler implements ProxyHandler { static readonly [entityKind]: string = 'TableAliasProxyHandler'; constructor(private alias: string, private replaceOriginalName: boolean) {} get(target: T, prop: string | symbol): any { if (prop === Table.Symbol.IsAlias) { return true; } if (prop === Table.Symbol.Name) { return this.alias; } if (this.replaceOriginalName && prop === Table.Symbol.OriginalName) { return this.alias; } if (prop === ViewBaseConfig) { return { ...target[ViewBaseConfig as keyof typeof target], name: this.alias, isAlias: true, }; } if (prop === Table.Symbol.Columns) { const columns = (target as Table)[Table.Symbol.Columns]; if (!columns) { return columns; } const proxiedColumns: { [key: string]: any } = {}; Object.keys(columns).map((key) => { proxiedColumns[key] = new Proxy( columns[key]!, new ColumnAliasProxyHandler(new Proxy(target, this)), ); }); return proxiedColumns; } const value = target[prop as keyof typeof target]; if (is(value, Column)) { return new Proxy(value as AnyColumn, new ColumnAliasProxyHandler(new Proxy(target, this))); } return value; } } export class RelationTableAliasProxyHandler implements ProxyHandler { static readonly [entityKind]: string = 'RelationTableAliasProxyHandler'; constructor(private alias: string) {} get(target: T, prop: string | symbol): any { if (prop === 'sourceTable') { return aliasedTable(target.sourceTable, this.alias); } return target[prop as keyof typeof target]; } } export function aliasedTable( table: T, tableAlias: string, ): T { return new Proxy(table, new TableAliasProxyHandler(tableAlias, false)) as any; } export function aliasedRelation(relation: T, tableAlias: string): T { return new Proxy(relation, new RelationTableAliasProxyHandler(tableAlias)); } export function aliasedTableColumn(column: T, tableAlias: string): T { return new Proxy( column, new ColumnAliasProxyHandler(new Proxy(column.table, new TableAliasProxyHandler(tableAlias, false))), ); } export function mapColumnsInAliasedSQLToAlias(query: SQL.Aliased, alias: string): SQL.Aliased { return new SQL.Aliased(mapColumnsInSQLToAlias(query.sql, alias), query.fieldAlias); } export function mapColumnsInSQLToAlias(query: SQL, alias: string): SQL { return sql.join(query.queryChunks.map((c) => { if (is(c, Column)) { return aliasedTableColumn(c, alias); } if (is(c, SQL)) { return mapColumnsInSQLToAlias(c, alias); } if (is(c, SQL.Aliased)) { return mapColumnsInAliasedSQLToAlias(c, alias); } return c; })); } ================================================ FILE: drizzle-orm/src/aws-data-api/common/index.ts ================================================ import type { Field } from '@aws-sdk/client-rds-data'; import { TypeHint } from '@aws-sdk/client-rds-data'; import type { QueryTypingsValue } from '~/sql/sql.ts'; export function getValueFromDataApi(field: Field) { if (field.stringValue !== undefined) { return field.stringValue; } else if (field.booleanValue !== undefined) { return field.booleanValue; } else if (field.doubleValue !== undefined) { return field.doubleValue; } else if (field.isNull !== undefined) { return null; } else if (field.longValue !== undefined) { return field.longValue; } else if (field.blobValue !== undefined) { return field.blobValue; // eslint-disable-next-line unicorn/no-negated-condition } else if (field.arrayValue !== undefined) { if (field.arrayValue.stringValues !== undefined) { return field.arrayValue.stringValues; } if (field.arrayValue.longValues !== undefined) { return field.arrayValue.longValues; } if (field.arrayValue.doubleValues !== undefined) { return field.arrayValue.doubleValues; } if (field.arrayValue.booleanValues !== undefined) { return field.arrayValue.booleanValues; } if (field.arrayValue.arrayValues !== undefined) { return field.arrayValue.arrayValues; } throw new Error('Unknown array type'); } else { throw new Error('Unknown type'); } } export function typingsToAwsTypeHint(typings?: QueryTypingsValue): TypeHint | undefined { if (typings === 'date') { return TypeHint.DATE; } else if (typings === 'decimal') { return TypeHint.DECIMAL; } else if (typings === 'json') { return TypeHint.JSON; } else if (typings === 'time') { return TypeHint.TIME; } else if (typings === 'timestamp') { return TypeHint.TIMESTAMP; } else if (typings === 'uuid') { return TypeHint.UUID; } else { return undefined; } } export function toValueParam(value: any, typings?: QueryTypingsValue): { value: Field; typeHint?: TypeHint } { const response: { value: Field; typeHint?: TypeHint } = { value: {} as any, typeHint: typingsToAwsTypeHint(typings), }; if (value === null) { response.value = { isNull: true }; } else if (typeof value === 'string') { switch (response.typeHint) { case TypeHint.DATE: { response.value = { stringValue: value.split('T')[0]! }; break; } case TypeHint.TIMESTAMP: { response.value = { stringValue: value.replace('T', ' ').replace('Z', '') }; break; } default: { response.value = { stringValue: value }; break; } } } else if (typeof value === 'number' && Number.isInteger(value)) { response.value = { longValue: value }; } else if (typeof value === 'number' && !Number.isInteger(value)) { response.value = { doubleValue: value }; } else if (typeof value === 'boolean') { response.value = { booleanValue: value }; } else if (value instanceof Date) { // eslint-disable-line no-instanceof/no-instanceof // TODO: check if this clause is needed? Seems like date value always comes as string response.value = { stringValue: value.toISOString().replace('T', ' ').replace('Z', '') }; } else { throw new Error(`Unknown type for ${value}`); } return response; } ================================================ FILE: drizzle-orm/src/aws-data-api/pg/driver.ts ================================================ import { RDSDataClient, type RDSDataClientConfig } from '@aws-sdk/client-rds-data'; import { entityKind, is } from '~/entity.ts'; import type { Logger } from '~/logger.ts'; import { DefaultLogger } from '~/logger.ts'; import { PgDatabase } from '~/pg-core/db.ts'; import { PgDialect } from '~/pg-core/dialect.ts'; import type { PgColumn, PgInsertConfig, PgTable, TableConfig } from '~/pg-core/index.ts'; import { PgArray } from '~/pg-core/index.ts'; import type { PgRaw } from '~/pg-core/query-builders/raw.ts'; import { createTableRelationsHelpers, extractTablesRelationalConfig, type RelationalSchemaConfig, type TablesRelationalConfig, } from '~/relations.ts'; import { Param, type SQL, sql, type SQLWrapper } from '~/sql/sql.ts'; import { Table } from '~/table.ts'; import type { DrizzleConfig, UpdateSet } from '~/utils.ts'; import type { AwsDataApiClient, AwsDataApiPgQueryResult, AwsDataApiPgQueryResultHKT } from './session.ts'; import { AwsDataApiSession } from './session.ts'; export interface PgDriverOptions { logger?: Logger; cache?: Cache; database: string; resourceArn: string; secretArn: string; } export interface DrizzleAwsDataApiPgConfig< TSchema extends Record = Record, > extends DrizzleConfig { database: string; resourceArn: string; secretArn: string; } export class AwsDataApiPgDatabase< TSchema extends Record = Record, > extends PgDatabase { static override readonly [entityKind]: string = 'AwsDataApiPgDatabase'; override execute< TRow extends Record = Record, >(query: SQLWrapper | string): PgRaw> { return super.execute(query); } } export class AwsPgDialect extends PgDialect { static override readonly [entityKind]: string = 'AwsPgDialect'; override escapeParam(num: number): string { return `:${num + 1}`; } override buildInsertQuery( { table, values, onConflict, returning, select, withList }: PgInsertConfig>, ): SQL { const columns: Record = table[Table.Symbol.Columns]; if (!select) { for (const value of (values as Record[])) { for (const fieldName of Object.keys(columns)) { const colValue = value[fieldName]; if ( is(colValue, Param) && colValue.value !== undefined && is(colValue.encoder, PgArray) && Array.isArray(colValue.value) ) { value[fieldName] = sql`cast(${colValue} as ${sql.raw(colValue.encoder.getSQLType())})`; } } } } return super.buildInsertQuery({ table, values, onConflict, returning, withList }); } override buildUpdateSet(table: PgTable, set: UpdateSet): SQL { const columns: Record = table[Table.Symbol.Columns]; for (const [colName, colValue] of Object.entries(set)) { const currentColumn = columns[colName]; if ( currentColumn && is(colValue, Param) && colValue.value !== undefined && is(colValue.encoder, PgArray) && Array.isArray(colValue.value) ) { set[colName] = sql`cast(${colValue} as ${sql.raw(colValue.encoder.getSQLType())})`; } } return super.buildUpdateSet(table, set); } } function construct = Record>( client: AwsDataApiClient, config: DrizzleAwsDataApiPgConfig, ): AwsDataApiPgDatabase & { $client: AwsDataApiClient; } { const dialect = new AwsPgDialect({ casing: config.casing }); let logger; if (config.logger === true) { logger = new DefaultLogger(); } else if (config.logger !== false) { logger = config.logger; } let schema: RelationalSchemaConfig | undefined; if (config.schema) { const tablesConfig = extractTablesRelationalConfig( config.schema, createTableRelationsHelpers, ); schema = { fullSchema: config.schema, schema: tablesConfig.tables, tableNamesMap: tablesConfig.tableNamesMap, }; } const session = new AwsDataApiSession(client, dialect, schema, { ...config, logger, cache: config.cache }, undefined); const db = new AwsDataApiPgDatabase(dialect, session, schema as any); ( db).$client = client; ( db).$cache = config.cache; if (( db).$cache) { ( db).$cache['invalidate'] = config.cache?.onMutate; } return db as any; } export function drizzle< TSchema extends Record = Record, TClient extends AwsDataApiClient = RDSDataClient, >( ...params: [ TClient, DrizzleAwsDataApiPgConfig, ] | [ ( | ( & DrizzleConfig & { connection: RDSDataClientConfig & Omit; } ) | ( & DrizzleAwsDataApiPgConfig & { client: TClient; } ) ), ] ): AwsDataApiPgDatabase & { $client: TClient; } { // eslint-disable-next-line no-instanceof/no-instanceof if (params[0] instanceof RDSDataClient || params[0].constructor.name !== 'Object') { return construct(params[0] as TClient, params[1] as DrizzleAwsDataApiPgConfig) as any; } if ((params[0] as { client?: TClient }).client) { const { client, ...drizzleConfig } = params[0] as { client: TClient; } & DrizzleAwsDataApiPgConfig; return construct(client, drizzleConfig) as any; } const { connection, ...drizzleConfig } = params[0] as { connection: RDSDataClientConfig & Omit; } & DrizzleConfig; const { resourceArn, database, secretArn, ...rdsConfig } = connection; const instance = new RDSDataClient(rdsConfig); return construct(instance, { resourceArn, database, secretArn, ...drizzleConfig }) as any; } export namespace drizzle { export function mock = Record>( config: DrizzleAwsDataApiPgConfig, ): AwsDataApiPgDatabase & { $client: '$client is not available on drizzle.mock()'; } { return construct({} as any, config) as any; } } ================================================ FILE: drizzle-orm/src/aws-data-api/pg/index.ts ================================================ export * from './driver.ts'; export * from './session.ts'; ================================================ FILE: drizzle-orm/src/aws-data-api/pg/migrator.ts ================================================ import type { MigrationConfig } from '~/migrator.ts'; import { readMigrationFiles } from '~/migrator.ts'; import type { AwsDataApiPgDatabase } from './driver.ts'; export async function migrate>( db: AwsDataApiPgDatabase, config: MigrationConfig, ) { const migrations = readMigrationFiles(config); await db.dialect.migrate(migrations, db.session, config); } ================================================ FILE: drizzle-orm/src/aws-data-api/pg/session.ts ================================================ import type { ColumnMetadata, ExecuteStatementCommandOutput, Field, RDSDataClient } from '@aws-sdk/client-rds-data'; import { BeginTransactionCommand, CommitTransactionCommand, ExecuteStatementCommand, RollbackTransactionCommand, } from '@aws-sdk/client-rds-data'; import type { Cache } from '~/cache/core/cache.ts'; import { NoopCache } from '~/cache/core/cache.ts'; import type { WithCacheConfig } from '~/cache/core/types.ts'; import { entityKind } from '~/entity.ts'; import type { Logger } from '~/logger.ts'; import { type PgDialect, PgPreparedQuery, type PgQueryResultHKT, PgSession, PgTransaction, type PgTransactionConfig, type PreparedQueryConfig, } from '~/pg-core/index.ts'; import type { SelectedFieldsOrdered } from '~/pg-core/query-builders/select.types.ts'; import type { RelationalSchemaConfig, TablesRelationalConfig } from '~/relations.ts'; import { fillPlaceholders, type QueryTypingsValue, type QueryWithTypings, type SQL, sql } from '~/sql/sql.ts'; import { mapResultRow } from '~/utils.ts'; import { getValueFromDataApi, toValueParam } from '../common/index.ts'; export type AwsDataApiClient = RDSDataClient; export class AwsDataApiPreparedQuery< T extends PreparedQueryConfig & { values: AwsDataApiPgQueryResult }, > extends PgPreparedQuery { static override readonly [entityKind]: string = 'AwsDataApiPreparedQuery'; private rawQuery: ExecuteStatementCommand; constructor( private client: AwsDataApiClient, private queryString: string, private params: unknown[], private typings: QueryTypingsValue[], private options: AwsDataApiSessionOptions, cache: Cache, queryMetadata: { type: 'select' | 'update' | 'delete' | 'insert'; tables: string[]; } | undefined, cacheConfig: WithCacheConfig | undefined, private fields: SelectedFieldsOrdered | undefined, /** @internal */ readonly transactionId: string | undefined, private _isResponseInArrayMode: boolean, private customResultMapper?: (rows: unknown[][]) => T['execute'], ) { super({ sql: queryString, params }, cache, queryMetadata, cacheConfig); this.rawQuery = new ExecuteStatementCommand({ sql: queryString, parameters: [], secretArn: options.secretArn, resourceArn: options.resourceArn, database: options.database, transactionId, includeResultMetadata: !fields && !customResultMapper, }); } async execute(placeholderValues: Record | undefined = {}): Promise { const { fields, joinsNotNullableMap, customResultMapper } = this; const result = await this.values(placeholderValues); if (!fields && !customResultMapper) { const { columnMetadata, rows } = result; if (!columnMetadata) { return result; } const mappedRows = rows.map((sourceRow) => { const row: Record = {}; for (const [index, value] of sourceRow.entries()) { const metadata = columnMetadata[index]; if (!metadata) { throw new Error( `Unexpected state: no column metadata found for index ${index}. Please report this issue on GitHub: https://github.com/drizzle-team/drizzle-orm/issues/new/choose`, ); } if (!metadata.name) { throw new Error( `Unexpected state: no column name for index ${index} found in the column metadata. Please report this issue on GitHub: https://github.com/drizzle-team/drizzle-orm/issues/new/choose`, ); } row[metadata.name] = value; } return row; }); return Object.assign(result, { rows: mappedRows }); } return customResultMapper ? customResultMapper(result.rows!) : result.rows!.map((row) => mapResultRow(fields!, row, joinsNotNullableMap)); } async all(placeholderValues?: Record | undefined): Promise { const result = await this.execute(placeholderValues); if (!this.fields && !this.customResultMapper) { return (result as AwsDataApiPgQueryResult).rows; } return result; } async values(placeholderValues: Record = {}): Promise { const params = fillPlaceholders(this.params, placeholderValues ?? {}); this.rawQuery.input.parameters = params.map((param, index) => ({ name: `${index + 1}`, ...toValueParam(param, this.typings[index]), })); this.options.logger?.logQuery(this.rawQuery.input.sql!, this.rawQuery.input.parameters); const result = await this.queryWithCache(this.queryString, params, async () => { return await this.client.send(this.rawQuery); }); const rows = result.records?.map((row) => { return row.map((field) => getValueFromDataApi(field)); }) ?? []; return { ...result, rows, }; } /** @internal */ mapResultRows(records: Field[][], columnMetadata: ColumnMetadata[]) { return records.map((record) => { const row: Record = {}; for (const [index, field] of record.entries()) { const { name } = columnMetadata[index]!; row[name ?? index] = getValueFromDataApi(field); // not what to default if name is undefined } return row; }); } /** @internal */ isResponseInArrayMode(): boolean { return this._isResponseInArrayMode; } } export interface AwsDataApiSessionOptions { logger?: Logger; cache?: Cache; database: string; resourceArn: string; secretArn: string; } interface AwsDataApiQueryBase { resourceArn: string; secretArn: string; database: string; } export class AwsDataApiSession< TFullSchema extends Record, TSchema extends TablesRelationalConfig, > extends PgSession { static override readonly [entityKind]: string = 'AwsDataApiSession'; /** @internal */ readonly rawQuery: AwsDataApiQueryBase; private cache: Cache; constructor( /** @internal */ readonly client: AwsDataApiClient, dialect: PgDialect, private schema: RelationalSchemaConfig | undefined, private options: AwsDataApiSessionOptions, /** @internal */ readonly transactionId: string | undefined, ) { super(dialect); this.rawQuery = { secretArn: options.secretArn, resourceArn: options.resourceArn, database: options.database, }; this.cache = options.cache ?? new NoopCache(); } prepareQuery< T extends PreparedQueryConfig & { values: AwsDataApiPgQueryResult; } = PreparedQueryConfig & { values: AwsDataApiPgQueryResult; }, >( query: QueryWithTypings, fields: SelectedFieldsOrdered | undefined, name: string | undefined, isResponseInArrayMode: boolean, customResultMapper?: (rows: unknown[][]) => T['execute'], queryMetadata?: { type: 'select' | 'update' | 'delete' | 'insert'; tables: string[] }, cacheConfig?: WithCacheConfig, transactionId?: string, ): AwsDataApiPreparedQuery { return new AwsDataApiPreparedQuery( this.client, query.sql, query.params, query.typings ?? [], this.options, this.cache, queryMetadata, cacheConfig, fields, transactionId ?? this.transactionId, isResponseInArrayMode, customResultMapper, ); } override execute(query: SQL): Promise { return this.prepareQuery }>( this.dialect.sqlToQuery(query), undefined, undefined, false, undefined, undefined, undefined, this.transactionId, ).execute(); } override async transaction( transaction: (tx: AwsDataApiTransaction) => Promise, config?: PgTransactionConfig | undefined, ): Promise { const { transactionId } = await this.client.send(new BeginTransactionCommand(this.rawQuery)); const session = new AwsDataApiSession(this.client, this.dialect, this.schema, this.options, transactionId); const tx = new AwsDataApiTransaction(this.dialect, session, this.schema); if (config) { await tx.setTransaction(config); } try { const result = await transaction(tx); await this.client.send(new CommitTransactionCommand({ ...this.rawQuery, transactionId })); return result; } catch (e) { await this.client.send(new RollbackTransactionCommand({ ...this.rawQuery, transactionId })); throw e; } } } export class AwsDataApiTransaction< TFullSchema extends Record, TSchema extends TablesRelationalConfig, > extends PgTransaction { static override readonly [entityKind]: string = 'AwsDataApiTransaction'; override async transaction( transaction: (tx: AwsDataApiTransaction) => Promise, ): Promise { const savepointName = `sp${this.nestedIndex + 1}`; const tx = new AwsDataApiTransaction( this.dialect, this.session, this.schema, this.nestedIndex + 1, ); await this.session.execute(sql.raw(`savepoint ${savepointName}`)); try { const result = await transaction(tx); await this.session.execute(sql.raw(`release savepoint ${savepointName}`)); return result; } catch (e) { await this.session.execute(sql.raw(`rollback to savepoint ${savepointName}`)); throw e; } } } export type AwsDataApiPgQueryResult = ExecuteStatementCommandOutput & { rows: T[] }; export interface AwsDataApiPgQueryResultHKT extends PgQueryResultHKT { type: AwsDataApiPgQueryResult; } ================================================ FILE: drizzle-orm/src/batch.ts ================================================ import type { Dialect } from './column-builder.ts'; import type { RunnableQuery } from './runnable-query.ts'; export type BatchItem = RunnableQuery; export type BatchResponse = { [K in keyof T]: T[K]['_']['result']; }; ================================================ FILE: drizzle-orm/src/better-sqlite3/driver.ts ================================================ import Client, { type Database, type Options, type RunResult } from 'better-sqlite3'; import { entityKind } from '~/entity.ts'; import { DefaultLogger } from '~/logger.ts'; import { createTableRelationsHelpers, extractTablesRelationalConfig, type RelationalSchemaConfig, type TablesRelationalConfig, } from '~/relations.ts'; import { BaseSQLiteDatabase } from '~/sqlite-core/db.ts'; import { SQLiteSyncDialect } from '~/sqlite-core/dialect.ts'; import { type DrizzleConfig, isConfig } from '~/utils.ts'; import { BetterSQLiteSession } from './session.ts'; export type DrizzleBetterSQLite3DatabaseConfig = | ({ source?: | string | Buffer; } & Options) | string | undefined; export class BetterSQLite3Database = Record> extends BaseSQLiteDatabase<'sync', RunResult, TSchema> { static override readonly [entityKind]: string = 'BetterSQLite3Database'; } function construct = Record>( client: Database, config: Omit, 'cache'> = {}, ): BetterSQLite3Database & { $client: Database; } { const dialect = new SQLiteSyncDialect({ casing: config.casing }); let logger; if (config.logger === true) { logger = new DefaultLogger(); } else if (config.logger !== false) { logger = config.logger; } let schema: RelationalSchemaConfig | undefined; if (config.schema) { const tablesConfig = extractTablesRelationalConfig( config.schema, createTableRelationsHelpers, ); schema = { fullSchema: config.schema, schema: tablesConfig.tables, tableNamesMap: tablesConfig.tableNamesMap, }; } const session = new BetterSQLiteSession(client, dialect, schema, { logger }); const db = new BetterSQLite3Database('sync', dialect, session, schema); ( db).$client = client; // ( db).$cache = config.cache; // if (( db).$cache) { // ( db).$cache['invalidate'] = config.cache?.onMutate; // } return db as any; } export function drizzle< TSchema extends Record = Record, >( ...params: | [] | [ Database | string, ] | [ Database | string, DrizzleConfig, ] | [ ( & DrizzleConfig & ({ connection?: DrizzleBetterSQLite3DatabaseConfig; } | { client: Database; }) ), ] ): BetterSQLite3Database & { $client: Database; } { if (params[0] === undefined || typeof params[0] === 'string') { const instance = params[0] === undefined ? new Client() : new Client(params[0]); return construct(instance, params[1]) as any; } if (isConfig(params[0])) { const { connection, client, ...drizzleConfig } = params[0] as & { connection?: DrizzleBetterSQLite3DatabaseConfig; client?: Database; } & DrizzleConfig; if (client) return construct(client, drizzleConfig) as any; if (typeof connection === 'object') { const { source, ...options } = connection; const instance = new Client(source, options); return construct(instance, drizzleConfig) as any; } const instance = new Client(connection); return construct(instance, drizzleConfig) as any; } return construct(params[0] as Database, params[1] as DrizzleConfig | undefined) as any; } export namespace drizzle { export function mock = Record>( config?: DrizzleConfig, ): BetterSQLite3Database & { $client: '$client is not available on drizzle.mock()'; } { return construct({} as any, config) as any; } } ================================================ FILE: drizzle-orm/src/better-sqlite3/index.ts ================================================ export * from './driver.ts'; export * from './session.ts'; ================================================ FILE: drizzle-orm/src/better-sqlite3/migrator.ts ================================================ import type { MigrationConfig } from '~/migrator.ts'; import { readMigrationFiles } from '~/migrator.ts'; import type { BetterSQLite3Database } from './driver.ts'; export function migrate>( db: BetterSQLite3Database, config: MigrationConfig, ) { const migrations = readMigrationFiles(config); db.dialect.migrate(migrations, db.session, config); } ================================================ FILE: drizzle-orm/src/better-sqlite3/session.ts ================================================ import type { Database, RunResult, Statement } from 'better-sqlite3'; import { type Cache, NoopCache } from '~/cache/core/index.ts'; import type { WithCacheConfig } from '~/cache/core/types.ts'; import { entityKind } from '~/entity.ts'; import type { Logger } from '~/logger.ts'; import { NoopLogger } from '~/logger.ts'; import type { RelationalSchemaConfig, TablesRelationalConfig } from '~/relations.ts'; import { fillPlaceholders, type Query, sql } from '~/sql/sql.ts'; import type { SQLiteSyncDialect } from '~/sqlite-core/dialect.ts'; import { SQLiteTransaction } from '~/sqlite-core/index.ts'; import type { SelectedFieldsOrdered } from '~/sqlite-core/query-builders/select.types.ts'; import { type PreparedQueryConfig as PreparedQueryConfigBase, type SQLiteExecuteMethod, SQLitePreparedQuery as PreparedQueryBase, SQLiteSession, type SQLiteTransactionConfig, } from '~/sqlite-core/session.ts'; import { mapResultRow } from '~/utils.ts'; export interface BetterSQLiteSessionOptions { logger?: Logger; cache?: Cache; } type PreparedQueryConfig = Omit; export class BetterSQLiteSession< TFullSchema extends Record, TSchema extends TablesRelationalConfig, > extends SQLiteSession<'sync', RunResult, TFullSchema, TSchema> { static override readonly [entityKind]: string = 'BetterSQLiteSession'; private logger: Logger; private cache: Cache; constructor( private client: Database, dialect: SQLiteSyncDialect, private schema: RelationalSchemaConfig | undefined, options: BetterSQLiteSessionOptions = {}, ) { super(dialect); this.logger = options.logger ?? new NoopLogger(); this.cache = options.cache ?? new NoopCache(); } prepareQuery>( query: Query, fields: SelectedFieldsOrdered | undefined, executeMethod: SQLiteExecuteMethod, isResponseInArrayMode: boolean, customResultMapper?: (rows: unknown[][]) => unknown, queryMetadata?: { type: 'select' | 'update' | 'delete' | 'insert'; tables: string[]; }, cacheConfig?: WithCacheConfig, ): PreparedQuery { const stmt = this.client.prepare(query.sql); return new PreparedQuery( stmt, query, this.logger, this.cache, queryMetadata, cacheConfig, fields, executeMethod, isResponseInArrayMode, customResultMapper, ); } override transaction( transaction: (tx: BetterSQLiteTransaction) => T, config: SQLiteTransactionConfig = {}, ): T { const tx = new BetterSQLiteTransaction('sync', this.dialect, this, this.schema); const nativeTx = this.client.transaction(transaction); return nativeTx[config.behavior ?? 'deferred'](tx); } } export class BetterSQLiteTransaction< TFullSchema extends Record, TSchema extends TablesRelationalConfig, > extends SQLiteTransaction<'sync', RunResult, TFullSchema, TSchema> { static override readonly [entityKind]: string = 'BetterSQLiteTransaction'; override transaction(transaction: (tx: BetterSQLiteTransaction) => T): T { const savepointName = `sp${this.nestedIndex}`; const tx = new BetterSQLiteTransaction('sync', this.dialect, this.session, this.schema, this.nestedIndex + 1); this.session.run(sql.raw(`savepoint ${savepointName}`)); try { const result = transaction(tx); this.session.run(sql.raw(`release savepoint ${savepointName}`)); return result; } catch (err) { this.session.run(sql.raw(`rollback to savepoint ${savepointName}`)); throw err; } } } export class PreparedQuery extends PreparedQueryBase< { type: 'sync'; run: RunResult; all: T['all']; get: T['get']; values: T['values']; execute: T['execute'] } > { static override readonly [entityKind]: string = 'BetterSQLitePreparedQuery'; constructor( private stmt: Statement, query: Query, private logger: Logger, cache: Cache, queryMetadata: { type: 'select' | 'update' | 'delete' | 'insert'; tables: string[]; } | undefined, cacheConfig: WithCacheConfig | undefined, private fields: SelectedFieldsOrdered | undefined, executeMethod: SQLiteExecuteMethod, private _isResponseInArrayMode: boolean, private customResultMapper?: (rows: unknown[][]) => unknown, ) { super('sync', executeMethod, query, cache, queryMetadata, cacheConfig); } run(placeholderValues?: Record): RunResult { const params = fillPlaceholders(this.query.params, placeholderValues ?? {}); this.logger.logQuery(this.query.sql, params); return this.stmt.run(...params); } all(placeholderValues?: Record): T['all'] { const { fields, joinsNotNullableMap, query, logger, stmt, customResultMapper } = this; if (!fields && !customResultMapper) { const params = fillPlaceholders(query.params, placeholderValues ?? {}); logger.logQuery(query.sql, params); return stmt.all(...params); } const rows = this.values(placeholderValues) as unknown[][]; if (customResultMapper) { return customResultMapper(rows) as T['all']; } return rows.map((row) => mapResultRow(fields!, row, joinsNotNullableMap)); } get(placeholderValues?: Record): T['get'] { const params = fillPlaceholders(this.query.params, placeholderValues ?? {}); this.logger.logQuery(this.query.sql, params); const { fields, stmt, joinsNotNullableMap, customResultMapper } = this; if (!fields && !customResultMapper) { return stmt.get(...params); } const row = stmt.raw().get(...params) as unknown[]; if (!row) { return undefined; } if (customResultMapper) { return customResultMapper([row]) as T['get']; } return mapResultRow(fields!, row, joinsNotNullableMap); } values(placeholderValues?: Record): T['values'] { const params = fillPlaceholders(this.query.params, placeholderValues ?? {}); this.logger.logQuery(this.query.sql, params); return this.stmt.raw().all(...params) as T['values']; } /** @internal */ isResponseInArrayMode(): boolean { return this._isResponseInArrayMode; } } ================================================ FILE: drizzle-orm/src/bun-sql/driver.ts ================================================ /// import type { SQLOptions } from 'bun'; import { SQL } from 'bun'; import { entityKind } from '~/entity.ts'; import { DefaultLogger } from '~/logger.ts'; import { PgDatabase } from '~/pg-core/db.ts'; import { PgDialect } from '~/pg-core/dialect.ts'; import { createTableRelationsHelpers, extractTablesRelationalConfig, type RelationalSchemaConfig, type TablesRelationalConfig, } from '~/relations.ts'; import { type DrizzleConfig, isConfig } from '~/utils.ts'; import type { BunSQLQueryResultHKT } from './session.ts'; import { BunSQLSession } from './session.ts'; export class BunSQLDatabase< TSchema extends Record = Record, > extends PgDatabase { static override readonly [entityKind]: string = 'BunSQLDatabase'; } function construct = Record>( client: SQL, config: DrizzleConfig = {}, ): BunSQLDatabase & { $client: SQL; } { const dialect = new PgDialect({ casing: config.casing }); let logger; if (config.logger === true) { logger = new DefaultLogger(); } else if (config.logger !== false) { logger = config.logger; } let schema: RelationalSchemaConfig | undefined; if (config.schema) { const tablesConfig = extractTablesRelationalConfig( config.schema, createTableRelationsHelpers, ); schema = { fullSchema: config.schema, schema: tablesConfig.tables, tableNamesMap: tablesConfig.tableNamesMap, }; } const session = new BunSQLSession(client, dialect, schema, { logger, cache: config.cache }); const db = new BunSQLDatabase(dialect, session, schema as any) as BunSQLDatabase; ( db).$client = client; ( db).$cache = config.cache; if (( db).$cache) { ( db).$cache['invalidate'] = config.cache?.onMutate; } return db as any; } export function drizzle< TSchema extends Record = Record, TClient extends SQL = SQL, >( ...params: [ TClient | string, ] | [ TClient | string, DrizzleConfig, ] | [ ( & DrizzleConfig & ({ connection: string | ({ url?: string } & SQLOptions); } | { client: TClient; }) ), ] ): BunSQLDatabase & { $client: TClient; } { if (typeof params[0] === 'string') { const instance = new SQL(params[0]); return construct(instance, params[1]) as any; } if (isConfig(params[0])) { const { connection, client, ...drizzleConfig } = params[0] as { connection?: { url?: string } & SQLOptions; client?: TClient; } & DrizzleConfig; if (client) return construct(client, drizzleConfig) as any; if (typeof connection === 'object' && connection.url !== undefined) { const { url, ...config } = connection; const instance = new SQL({ url, ...config }); return construct(instance, drizzleConfig) as any; } const instance = new SQL(connection); return construct(instance, drizzleConfig) as any; } return construct(params[0] as TClient, params[1] as DrizzleConfig | undefined) as any; } export namespace drizzle { export function mock = Record>( config?: DrizzleConfig, ): BunSQLDatabase & { $client: '$client is not available on drizzle.mock()'; } { return construct({ options: { parsers: {}, serializers: {}, }, } as any, config) as any; } } ================================================ FILE: drizzle-orm/src/bun-sql/index.ts ================================================ export * from './driver.ts'; export * from './session.ts'; ================================================ FILE: drizzle-orm/src/bun-sql/migrator.ts ================================================ import type { MigrationConfig } from '~/migrator.ts'; import { readMigrationFiles } from '~/migrator.ts'; import type { BunSQLDatabase } from './driver.ts'; export async function migrate>( db: BunSQLDatabase, config: MigrationConfig, ) { const migrations = readMigrationFiles(config); await db.dialect.migrate(migrations, db.session, config); } ================================================ FILE: drizzle-orm/src/bun-sql/session.ts ================================================ /// import type { SavepointSQL, SQL, TransactionSQL } from 'bun'; import { type Cache, NoopCache } from '~/cache/core/index.ts'; import type { WithCacheConfig } from '~/cache/core/types.ts'; import { entityKind } from '~/entity.ts'; import type { Logger } from '~/logger.ts'; import { NoopLogger } from '~/logger.ts'; import type { PgDialect } from '~/pg-core/dialect.ts'; import { PgTransaction } from '~/pg-core/index.ts'; import type { SelectedFieldsOrdered } from '~/pg-core/query-builders/select.types.ts'; import type { PgQueryResultHKT, PgTransactionConfig, PreparedQueryConfig } from '~/pg-core/session.ts'; import { PgPreparedQuery, PgSession } from '~/pg-core/session.ts'; import type { RelationalSchemaConfig, TablesRelationalConfig } from '~/relations.ts'; import { fillPlaceholders, type Query } from '~/sql/sql.ts'; import { tracer } from '~/tracing.ts'; import { type Assume, mapResultRow } from '~/utils.ts'; export class BunSQLPreparedQuery extends PgPreparedQuery { static override readonly [entityKind]: string = 'BunSQLPreparedQuery'; constructor( private client: SQL, private queryString: string, private params: unknown[], private logger: Logger, cache: Cache, queryMetadata: { type: 'select' | 'update' | 'delete' | 'insert'; tables: string[]; } | undefined, cacheConfig: WithCacheConfig | undefined, private fields: SelectedFieldsOrdered | undefined, private _isResponseInArrayMode: boolean, private customResultMapper?: (rows: unknown[][]) => T['execute'], ) { super({ sql: queryString, params }, cache, queryMetadata, cacheConfig); } async execute(placeholderValues: Record | undefined = {}): Promise { return tracer.startActiveSpan('drizzle.execute', async (span) => { const params = fillPlaceholders(this.params, placeholderValues); span?.setAttributes({ 'drizzle.query.text': this.queryString, 'drizzle.query.params': JSON.stringify(params), }); this.logger.logQuery(this.queryString, params); const { fields, queryString: query, client, joinsNotNullableMap, customResultMapper } = this; if (!fields && !customResultMapper) { return tracer.startActiveSpan('drizzle.driver.execute', async () => { return await this.queryWithCache(query, params, async () => { return await client.unsafe(query, params as any[]); }); }); } const rows: any[] = await tracer.startActiveSpan('drizzle.driver.execute', async () => { span?.setAttributes({ 'drizzle.query.text': query, 'drizzle.query.params': JSON.stringify(params), }); return await this.queryWithCache(query, params, async () => { return client.unsafe(query, params as any[]).values(); }); }); return tracer.startActiveSpan('drizzle.mapResponse', () => { return customResultMapper ? customResultMapper(rows) : rows.map((row) => mapResultRow(fields!, row, joinsNotNullableMap)); }); }); } all(placeholderValues: Record | undefined = {}): Promise { return tracer.startActiveSpan('drizzle.execute', async (span) => { const params = fillPlaceholders(this.params, placeholderValues); span?.setAttributes({ 'drizzle.query.text': this.queryString, 'drizzle.query.params': JSON.stringify(params), }); this.logger.logQuery(this.queryString, params); return tracer.startActiveSpan('drizzle.driver.execute', async () => { span?.setAttributes({ 'drizzle.query.text': this.queryString, 'drizzle.query.params': JSON.stringify(params), }); return await this.queryWithCache(this.queryString, params, async () => { return await this.client.unsafe(this.queryString, params as any[]); }); }); }); } /** @internal */ isResponseInArrayMode(): boolean { return this._isResponseInArrayMode; } } export interface BunSQLSessionOptions { logger?: Logger; cache?: Cache; } export class BunSQLSession< TSQL extends SQL, TFullSchema extends Record, TSchema extends TablesRelationalConfig, > extends PgSession { static override readonly [entityKind]: string = 'BunSQLSession'; logger: Logger; private cache: Cache; constructor( public client: TSQL, dialect: PgDialect, private schema: RelationalSchemaConfig | undefined, /** @internal */ readonly options: BunSQLSessionOptions = {}, ) { super(dialect); this.logger = options.logger ?? new NoopLogger(); this.cache = options.cache ?? new NoopCache(); } prepareQuery( query: Query, fields: SelectedFieldsOrdered | undefined, name: string | undefined, isResponseInArrayMode: boolean, customResultMapper?: (rows: unknown[][]) => T['execute'], queryMetadata?: { type: 'select' | 'update' | 'delete' | 'insert'; tables: string[]; }, cacheConfig?: WithCacheConfig, ): PgPreparedQuery { return new BunSQLPreparedQuery( this.client, query.sql, query.params, this.logger, this.cache, queryMetadata, cacheConfig, fields, isResponseInArrayMode, customResultMapper, ); } query(query: string, params: unknown[]): Promise { this.logger.logQuery(query, params); return this.client.unsafe(query, params as any[]).values(); } queryObjects( query: string, params: unknown[], ): Promise { return this.client.unsafe(query, params as any[]); } override transaction( transaction: (tx: BunSQLTransaction) => Promise, config?: PgTransactionConfig, ): Promise { return this.client.begin(async (client) => { const session = new BunSQLSession( client, this.dialect, this.schema, this.options, ); const tx = new BunSQLTransaction(this.dialect, session, this.schema); if (config) { await tx.setTransaction(config); } return transaction(tx); }) as Promise; } } export class BunSQLTransaction< TFullSchema extends Record, TSchema extends TablesRelationalConfig, > extends PgTransaction { static override readonly [entityKind]: string = 'BunSQLTransaction'; constructor( dialect: PgDialect, /** @internal */ override readonly session: BunSQLSession, schema: RelationalSchemaConfig | undefined, nestedIndex = 0, ) { super(dialect, session, schema, nestedIndex); } override transaction( transaction: (tx: BunSQLTransaction) => Promise, ): Promise { return (this.session.client as TransactionSQL).savepoint((client: SQL) => { const session = new BunSQLSession( client, this.dialect, this.schema, this.session.options, ); const tx = new BunSQLTransaction(this.dialect, session, this.schema); return transaction(tx); }) as Promise; } } export interface BunSQLQueryResultHKT extends PgQueryResultHKT { type: Assume[]>; } ================================================ FILE: drizzle-orm/src/bun-sqlite/driver.ts ================================================ /// import { Database } from 'bun:sqlite'; import { entityKind } from '~/entity.ts'; import { DefaultLogger } from '~/logger.ts'; import { createTableRelationsHelpers, extractTablesRelationalConfig, type RelationalSchemaConfig, type TablesRelationalConfig, } from '~/relations.ts'; import { BaseSQLiteDatabase } from '~/sqlite-core/db.ts'; import { SQLiteSyncDialect } from '~/sqlite-core/dialect.ts'; import { type DrizzleConfig, isConfig } from '~/utils.ts'; import { SQLiteBunSession } from './session.ts'; export class BunSQLiteDatabase< TSchema extends Record = Record, > extends BaseSQLiteDatabase<'sync', void, TSchema> { static override readonly [entityKind]: string = 'BunSQLiteDatabase'; } type DrizzleBunSqliteDatabaseOptions = { /** * Open the database as read-only (no write operations, no create). * * Equivalent to {@link constants.SQLITE_OPEN_READONLY} */ readonly?: boolean; /** * Allow creating a new database * * Equivalent to {@link constants.SQLITE_OPEN_CREATE} */ create?: boolean; /** * Open the database as read-write * * Equivalent to {@link constants.SQLITE_OPEN_READWRITE} */ readwrite?: boolean; }; export type DrizzleBunSqliteDatabaseConfig = | ({ source?: string; } & DrizzleBunSqliteDatabaseOptions) | string | undefined; function construct = Record>( client: Database, config: DrizzleConfig = {}, ): BunSQLiteDatabase & { $client: Database; } { const dialect = new SQLiteSyncDialect({ casing: config.casing }); let logger; if (config.logger === true) { logger = new DefaultLogger(); } else if (config.logger !== false) { logger = config.logger; } let schema: RelationalSchemaConfig | undefined; if (config.schema) { const tablesConfig = extractTablesRelationalConfig( config.schema, createTableRelationsHelpers, ); schema = { fullSchema: config.schema, schema: tablesConfig.tables, tableNamesMap: tablesConfig.tableNamesMap, }; } const session = new SQLiteBunSession(client, dialect, schema, { logger }); const db = new BunSQLiteDatabase('sync', dialect, session, schema) as BunSQLiteDatabase; ( db).$client = client; return db as any; } export function drizzle< TSchema extends Record = Record, TClient extends Database = Database, >( ...params: | [] | [ TClient | string, ] | [ TClient | string, DrizzleConfig, ] | [ ( & DrizzleConfig & ({ connection?: DrizzleBunSqliteDatabaseConfig; } | { client: TClient; }) ), ] ): BunSQLiteDatabase & { $client: TClient; } { if (params[0] === undefined || typeof params[0] === 'string') { const instance = params[0] === undefined ? new Database() : new Database(params[0]); return construct(instance, params[1]) as any; } if (isConfig(params[0])) { const { connection, client, ...drizzleConfig } = params[0] as & ({ connection?: DrizzleBunSqliteDatabaseConfig | string; client?: TClient; }) & DrizzleConfig; if (client) return construct(client, drizzleConfig) as any; if (typeof connection === 'object') { const { source, ...opts } = connection; const options = Object.values(opts).filter((v) => v !== undefined).length ? opts : undefined; const instance = new Database(source, options); return construct(instance, drizzleConfig) as any; } const instance = new Database(connection); return construct(instance, drizzleConfig) as any; } return construct(params[0] as Database, params[1] as DrizzleConfig | undefined) as any; } export namespace drizzle { export function mock = Record>( config?: DrizzleConfig, ): BunSQLiteDatabase & { $client: '$client is not available on drizzle.mock()'; } { return construct({} as any, config) as any; } } ================================================ FILE: drizzle-orm/src/bun-sqlite/index.ts ================================================ export * from './driver.ts'; export * from './session.ts'; ================================================ FILE: drizzle-orm/src/bun-sqlite/migrator.ts ================================================ import type { MigrationConfig } from '~/migrator.ts'; import { readMigrationFiles } from '~/migrator.ts'; import type { BunSQLiteDatabase } from './driver.ts'; export function migrate>( db: BunSQLiteDatabase, config: MigrationConfig, ) { const migrations = readMigrationFiles(config); db.dialect.migrate(migrations, db.session, config); } ================================================ FILE: drizzle-orm/src/bun-sqlite/session.ts ================================================ /// import type { Database, Statement as BunStatement } from 'bun:sqlite'; import { entityKind } from '~/entity.ts'; import type { Logger } from '~/logger.ts'; import { NoopLogger } from '~/logger.ts'; import type { RelationalSchemaConfig, TablesRelationalConfig } from '~/relations.ts'; import { fillPlaceholders, type Query, sql } from '~/sql/sql.ts'; import type { SQLiteSyncDialect } from '~/sqlite-core/dialect.ts'; import { SQLiteTransaction } from '~/sqlite-core/index.ts'; import type { SelectedFieldsOrdered } from '~/sqlite-core/query-builders/select.types.ts'; import type { PreparedQueryConfig as PreparedQueryConfigBase, SQLiteExecuteMethod, SQLiteTransactionConfig, } from '~/sqlite-core/session.ts'; import { SQLitePreparedQuery as PreparedQueryBase, SQLiteSession } from '~/sqlite-core/session.ts'; import { mapResultRow } from '~/utils.ts'; export interface SQLiteBunSessionOptions { logger?: Logger; } type PreparedQueryConfig = Omit; type Statement = BunStatement; export class SQLiteBunSession< TFullSchema extends Record, TSchema extends TablesRelationalConfig, > extends SQLiteSession<'sync', void, TFullSchema, TSchema> { static override readonly [entityKind]: string = 'SQLiteBunSession'; private logger: Logger; constructor( private client: Database, dialect: SQLiteSyncDialect, private schema: RelationalSchemaConfig | undefined, options: SQLiteBunSessionOptions = {}, ) { super(dialect); this.logger = options.logger ?? new NoopLogger(); } exec(query: string): void { this.client.exec(query); } prepareQuery>( query: Query, fields: SelectedFieldsOrdered | undefined, executeMethod: SQLiteExecuteMethod, isResponseInArrayMode: boolean, customResultMapper?: (rows: unknown[][]) => unknown, ): PreparedQuery { const stmt = this.client.prepare(query.sql); return new PreparedQuery( stmt, query, this.logger, fields, executeMethod, isResponseInArrayMode, customResultMapper, ); } override transaction( transaction: (tx: SQLiteBunTransaction) => T, config: SQLiteTransactionConfig = {}, ): T { const tx = new SQLiteBunTransaction('sync', this.dialect, this, this.schema); let result: T | undefined; const nativeTx = this.client.transaction(() => { result = transaction(tx); }); nativeTx[config.behavior ?? 'deferred'](); return result!; } } export class SQLiteBunTransaction< TFullSchema extends Record, TSchema extends TablesRelationalConfig, > extends SQLiteTransaction<'sync', void, TFullSchema, TSchema> { static override readonly [entityKind]: string = 'SQLiteBunTransaction'; override transaction(transaction: (tx: SQLiteBunTransaction) => T): T { const savepointName = `sp${this.nestedIndex}`; const tx = new SQLiteBunTransaction('sync', this.dialect, this.session, this.schema, this.nestedIndex + 1); this.session.run(sql.raw(`savepoint ${savepointName}`)); try { const result = transaction(tx); this.session.run(sql.raw(`release savepoint ${savepointName}`)); return result; } catch (err) { this.session.run(sql.raw(`rollback to savepoint ${savepointName}`)); throw err; } } } export class PreparedQuery extends PreparedQueryBase< { type: 'sync'; run: void; all: T['all']; get: T['get']; values: T['values']; execute: T['execute'] } > { static override readonly [entityKind]: string = 'SQLiteBunPreparedQuery'; constructor( private stmt: Statement, query: Query, private logger: Logger, private fields: SelectedFieldsOrdered | undefined, executeMethod: SQLiteExecuteMethod, private _isResponseInArrayMode: boolean, private customResultMapper?: (rows: unknown[][]) => unknown, ) { super('sync', executeMethod, query); } run(placeholderValues?: Record) { const params = fillPlaceholders(this.query.params, placeholderValues ?? {}); this.logger.logQuery(this.query.sql, params); return this.stmt.run(...params); } all(placeholderValues?: Record): T['all'] { const { fields, query, logger, joinsNotNullableMap, stmt, customResultMapper } = this; if (!fields && !customResultMapper) { const params = fillPlaceholders(query.params, placeholderValues ?? {}); logger.logQuery(query.sql, params); return stmt.all(...params); } const rows = this.values(placeholderValues) as unknown[][]; if (customResultMapper) { return customResultMapper(rows) as T['all']; } return rows.map((row) => mapResultRow(fields!, row, joinsNotNullableMap)); } get(placeholderValues?: Record): T['get'] { const params = fillPlaceholders(this.query.params, placeholderValues ?? {}); this.logger.logQuery(this.query.sql, params); const row = this.stmt.values(...params)[0]; if (!row) { return undefined; } const { fields, joinsNotNullableMap, customResultMapper } = this; if (!fields && !customResultMapper) { return row; } if (customResultMapper) { return customResultMapper([row]) as T['get']; } return mapResultRow(fields!, row, joinsNotNullableMap); } values(placeholderValues?: Record): T['values'] { const params = fillPlaceholders(this.query.params, placeholderValues ?? {}); this.logger.logQuery(this.query.sql, params); return this.stmt.values(...params); } /** @internal */ isResponseInArrayMode(): boolean { return this._isResponseInArrayMode; } } ================================================ FILE: drizzle-orm/src/cache/core/cache.ts ================================================ import { entityKind } from '~/entity.ts'; import type { Table } from '~/index.ts'; import type { CacheConfig } from './types.ts'; export abstract class Cache { static readonly [entityKind]: string = 'Cache'; abstract strategy(): 'explicit' | 'all'; /** * Invoked if we should check cache for cached response * @param sql * @param tables */ abstract get( key: string, tables: string[], isTag: boolean, isAutoInvalidate?: boolean, ): Promise; /** * Invoked if new query should be inserted to cache * @param sql * @param tables */ abstract put( hashedQuery: string, response: any, tables: string[], isTag: boolean, config?: CacheConfig, ): Promise; /** * Invoked if insert, update, delete was invoked * @param tables */ abstract onMutate( params: MutationOption, ): Promise; } export class NoopCache extends Cache { override strategy() { return 'all' as const; } static override readonly [entityKind]: string = 'NoopCache'; override async get(_key: string): Promise { return undefined; } override async put( _hashedQuery: string, _response: any, _tables: string[], _config?: any, ): Promise { // noop } override async onMutate(_params: MutationOption): Promise { // noop } } export type MutationOption = { tags?: string | string[]; tables?: Table | Table[] | string | string[] }; export async function hashQuery(sql: string, params?: any[]) { const dataToHash = `${sql}-${JSON.stringify(params)}`; const encoder = new TextEncoder(); const data = encoder.encode(dataToHash); const hashBuffer = await crypto.subtle.digest('SHA-256', data); const hashArray = [...new Uint8Array(hashBuffer)]; const hashHex = hashArray.map((b) => b.toString(16).padStart(2, '0')).join(''); return hashHex; } ================================================ FILE: drizzle-orm/src/cache/core/index.ts ================================================ export * from './cache.ts'; ================================================ FILE: drizzle-orm/src/cache/core/types.ts ================================================ export type CacheConfig = { /** * expire time, in seconds (a positive integer) */ ex?: number; /** * expire time, in milliseconds (a positive integer). */ px?: number; /** * Unix time at which the key will expire, in seconds (a positive integer). */ exat?: number; /** * Unix time at which the key will expire, in milliseconds (a positive integer) */ pxat?: number; /** * Retain the time to live associated with the key. */ keepTtl?: boolean; /** * Set an expiration (TTL or time to live) on one or more fields of a given hash key. * Used for HEXPIRE command */ hexOptions?: 'NX' | 'nx' | 'XX' | 'xx' | 'GT' | 'gt' | 'LT' | 'lt'; }; export type WithCacheConfig = { enable: boolean; config?: CacheConfig; tag?: string; autoInvalidate?: boolean }; ================================================ FILE: drizzle-orm/src/cache/readme.md ================================================ ## Caching with Drizzle By default, Drizzle does not perform any implicit actions with your queries and mapping. There is no cache under the hood—each query is sent directly to your database, and you can actually see it. However, there are cases when you might want to implement a simple caching logic for specific queries or even for all queries. With Drizzle's cache option, you can define how and when the cache is used, how you store and retrieve data, and what actions to take when write statements are executed on the database. It's basically similar to `beforeQuery` hooks, that will be invoked before actual query will be executed. Additionally, Drizzle provides predefined logic for caching. Let's take a look at it. To make cache work you would need to define cache callbacks in drizzle instance or use a predefined ones we have in Drizzle, like a `upstashCache()` that was built together with Upstash team ### Cache overview **Using upstash cache with drizzle** ```ts const db = drizzle(process.env.DB_URL!, { cache: upstashCache() }) ``` You can also define custom logic for your cache behavior. This is an example of our NodeKV implementation for the Drizzle cache test suites ```ts const db = drizzle(process.env.DB_URL!, { cache: new TestGlobalCache() }) ``` ```ts import Keyv from 'keyv'; export class TestGlobalCache extends Cache { private globalTtl: number = 1000; // This object will be used to store which query keys were used // for a specific table, so we can later use it for invalidation. private usedTablesPerKey: Record = {}; constructor(private kv: Keyv = new Keyv()) { super(); } // For the strategy, we have two options: // - 'explicit': The cache is used only when .$withCache() is added to a query. // - 'all': All queries are cached globally. // The default behavior is 'explicit'. override strategy(): 'explicit' | 'all' { return 'all'; } // This function accepts query and parameters that cached into key param, // allowing you to retrieve response values for this query from the cache. override async get(key: string): Promise { const res = await this.kv.get(key) ?? undefined; return res; } // This function accepts several options to define how cached data will be stored: // - 'key': A hashed query and parameters. // - 'response': An array of values returned by Drizzle from the database. // - 'tables': An array of tables involved in the select queries. This information is needed for cache invalidation. // // For example, if a query uses the "users" and "posts" tables, you can store this information. Later, when the app executes // any mutation statements on these tables, you can remove the corresponding key from the cache. // If you're okay with eventual consistency for your queries, you can skip this option. override async put(key: string, response: any, tables: string[], config?: CacheConfig): Promise { await this.kv.set(key, response, config ? config.ex : this.globalTtl); for (const table of tables) { const keys = this.usedTablesPerKey[table]; if (keys === undefined) { this.usedTablesPerKey[table] = [key]; } else { keys.push(key); } } } // This function is called when insert, update, or delete statements are executed. // You can either skip this step or invalidate queries that used the affected tables. // // The function receives an object with two keys: // - 'tags': Used for queries labeled with a specific tag, allowing you to invalidate by that tag. // - 'tables': The actual tables affected by the insert, update, or delete statements, // helping you track which tables have changed since the last cache update. override async onMutate(params: { tags: string | string[], tables: string | string[] | Table | Table[]}): Promise { const tagsArray = params.tags ? Array.isArray(params.tags) ? params.tags : [params.tags] : []; const tablesArray = params.tables ? Array.isArray(params.tables) ? params.tables : [params.tables] : []; const keysToDelete = new Set(); for (const table of tablesArray) { const tableName = is(table, Table) ? getTableName(table) : table as string; const keys = this.usedTablesPerKey[tableName] ?? []; for (const key of keys) keysToDelete.add(key); } if (keysToDelete.size > 0 || tagsArray.length > 0) { for (const tag of tagsArray) { await this.kv.delete(tag); } for (const key of keysToDelete) { await this.kv.delete(key); for (const table of tablesArray) { const tableName = is(table, Table) ? getTableName(table) : table as string; this.usedTablesPerKey[tableName] = []; } } } } } ``` ### Cache definition **Define cache credentials, but no cache will be used globally for all queries** ```ts const db = drizzle(process.env.DB_URL!, { cache: upstashCache({ url: '', token: '' }) }) ``` **Define cache credentials, and the cache will be used globally for all queries** ```ts const db = drizzle(process.env.DB_URL!, { cache: upstashCache({ url: '', token: '', global: true }) }) ``` **Define cache credentials with custom config values to be used for all queries, unless overridden** ```ts const db = drizzle(process.env.DB_URL!, { cache: upstashCache({ url: '', token: '', global: true, config: {} }) }) ``` These are all the possible config values that Drizzle supports with the cache layer ```ts export type CacheConfig = { /** * expire time, in seconds (a positive integer) */ ex?: number; /** * expire time, in milliseconds (a positive integer). */ px?: number; /** * Unix time at which the key will expire, in seconds (a positive integer). */ exat?: number; /** * Unix time at which the key will expire, in milliseconds (a positive integer) */ pxat?: number; /** * Retain the time to live associated with the key. */ keepTtl?: boolean; }; ``` ### Cache usage Once you've provided all the necessary instructions to the Drizzle database instance, you can now use the cache with Drizzle **Case 1: Drizzle with global: false option** ```ts const db = drizzle(process.env.DB_URL!, { cache: upstashCache({ url: '', token: '' }) }) ``` In this case, the current query won't use the cache ```ts const res = await db.select().from(users) // However, any mutate operation will trigger the onMutate function in the cache // and attempt to invalidate queries that used the tables involved in this mutation query. await db.insert(users).value({ email: 'cacheman@upstash.com' }) ``` If you want the query to actually use the cache, you need to call `.$withCache()` ```ts const res = await db.select().from(users).$withCache() ``` `.$withCache` has a set of options you can use to manage and config this specific query strategy ```ts // rewrite the global config options for this specific query .$withCache({ config: {} }) // give a query custom cache key instead of hashing query+params under the hood .$withCache({ tag: 'custom_key' }) // disable autoinvalidation for this query, if you are fine with eventual consstnecy for this specific query .$withCache({ autoInvalidate: false }) ``` **Case 2: Drizzle with global: true option** ```ts const db = drizzle(process.env.DB_URL!, { cache: upstashCache({ url: '', token: '', global: true }) }) ``` In this case, the current query will use the cache ```ts const res = await db.select().from(users) ``` If you want the query to disable cache for some specific query, you need to call `.$withCache(false)` ```ts // cache is disabled for this query const res = await db.select().from(users).$withCache(false) ``` You can also use cache instance from a `db` to force invalidate specific tables or tags you've defined previously ```ts // Invalidate all queries that use the `users` table. You can do this with the Drizzle instance. await db.$cache?.invalidate({ tables: users }); // or await db.$cache?.invalidate({ tables: [users, posts] }); // Invalidate all queries that use the `usersTable`. You can do this by using just the table name. await db.$cache?.invalidate({ tables: 'usersTable' }); // or await db.$cache?.invalidate({ tables: ['usersTable' , 'postsTable' ] }); // You can also invalidate custom tags defined in any previously executed select queries. await db.$cache?.invalidate({ tags: 'custom_key' }); // or await db.$cache?.invalidate({ tags: ['custom_key', 'custom_key1'] }); ``` ================================================ FILE: drizzle-orm/src/cache/upstash/cache.ts ================================================ import { Redis } from '@upstash/redis'; import type { MutationOption } from '~/cache/core/index.ts'; import { Cache } from '~/cache/core/index.ts'; import { entityKind, is } from '~/entity.ts'; import { OriginalName, Table } from '~/index.ts'; import type { CacheConfig } from '../core/types.ts'; const getByTagScript = ` local tagsMapKey = KEYS[1] -- tags map key local tag = ARGV[1] -- tag local compositeTableName = redis.call('HGET', tagsMapKey, tag) if not compositeTableName then return nil end local value = redis.call('HGET', compositeTableName, tag) return value `; const onMutateScript = ` local tagsMapKey = KEYS[1] -- tags map key local tables = {} -- initialize tables array local tags = ARGV -- tags array for i = 2, #KEYS do tables[#tables + 1] = KEYS[i] -- add all keys except the first one to tables end if #tags > 0 then for _, tag in ipairs(tags) do if tag ~= nil and tag ~= '' then local compositeTableName = redis.call('HGET', tagsMapKey, tag) if compositeTableName then redis.call('HDEL', compositeTableName, tag) end end end redis.call('HDEL', tagsMapKey, unpack(tags)) end local keysToDelete = {} if #tables > 0 then local compositeTableNames = redis.call('SUNION', unpack(tables)) for _, compositeTableName in ipairs(compositeTableNames) do keysToDelete[#keysToDelete + 1] = compositeTableName end for _, table in ipairs(tables) do keysToDelete[#keysToDelete + 1] = table end redis.call('DEL', unpack(keysToDelete)) end `; type Script = ReturnType; type ExpireOptions = 'NX' | 'nx' | 'XX' | 'xx' | 'GT' | 'gt' | 'LT' | 'lt'; export class UpstashCache extends Cache { static override readonly [entityKind]: string = 'UpstashCache'; /** * Prefix for sets which denote the composite table names for each unique table * * Example: In the composite table set of "table1", you may find * `${compositeTablePrefix}table1,table2` and `${compositeTablePrefix}table1,table3` */ private static compositeTableSetPrefix = '__CTS__'; /** * Prefix for hashes which map hash or tags to cache values */ private static compositeTablePrefix = '__CT__'; /** * Key which holds the mapping of tags to composite table names * * Using this tagsMapKey, you can find the composite table name for a given tag * and get the cache value for that tag: * * ```ts * const compositeTable = redis.hget(tagsMapKey, 'tag1') * console.log(compositeTable) // `${compositeTablePrefix}table1,table2` * * const cachevalue = redis.hget(compositeTable, 'tag1') */ private static tagsMapKey = '__tagsMap__'; /** * Queries whose auto invalidation is false aren't stored in their respective * composite table hashes because those hashes are deleted when a mutation * occurs on related tables. * * Instead, they are stored in a separate hash with the prefix * `__nonAutoInvalidate__` to prevent them from being deleted when a mutation */ private static nonAutoInvalidateTablePrefix = '__nonAutoInvalidate__'; private luaScripts: { getByTagScript: Script; onMutateScript: Script; }; private internalConfig: { seconds: number; hexOptions?: ExpireOptions }; constructor(public redis: Redis, config?: CacheConfig, protected useGlobally?: boolean) { super(); this.internalConfig = this.toInternalConfig(config); this.luaScripts = { getByTagScript: this.redis.createScript(getByTagScript, { readonly: true }), onMutateScript: this.redis.createScript(onMutateScript), }; } public strategy() { return this.useGlobally ? 'all' : 'explicit'; } private toInternalConfig(config?: CacheConfig): { seconds: number; hexOptions?: ExpireOptions } { return config ? { seconds: config.ex!, hexOptions: config.hexOptions, } : { seconds: 1, }; } override async get( key: string, tables: string[], isTag: boolean = false, isAutoInvalidate?: boolean, ): Promise { if (!isAutoInvalidate) { const result = await this.redis.hget(UpstashCache.nonAutoInvalidateTablePrefix, key); return result === null ? undefined : result as any[]; } if (isTag) { const result = await this.luaScripts.getByTagScript.exec([UpstashCache.tagsMapKey], [key]); return result === null ? undefined : result as any[]; } // Normal cache lookup for the composite key const compositeKey = this.getCompositeKey(tables); const result = await this.redis.hget(compositeKey, key) ?? undefined; // Retrieve result for normal query return result === null ? undefined : result as any[]; } override async put( key: string, response: any, tables: string[], isTag: boolean = false, config?: CacheConfig, ): Promise { const isAutoInvalidate = tables.length !== 0; const pipeline = this.redis.pipeline(); const ttlSeconds = config && config.ex ? config.ex : this.internalConfig.seconds; const hexOptions = config && config.hexOptions ? config.hexOptions : this.internalConfig?.hexOptions; if (!isAutoInvalidate) { if (isTag) { pipeline.hset(UpstashCache.tagsMapKey, { [key]: UpstashCache.nonAutoInvalidateTablePrefix }); pipeline.hexpire(UpstashCache.tagsMapKey, key, ttlSeconds, hexOptions); } pipeline.hset(UpstashCache.nonAutoInvalidateTablePrefix, { [key]: response }); pipeline.hexpire(UpstashCache.nonAutoInvalidateTablePrefix, key, ttlSeconds, hexOptions); await pipeline.exec(); return; } const compositeKey = this.getCompositeKey(tables); pipeline.hset(compositeKey, { [key]: response }); // Store the result with the tag under the composite key pipeline.hexpire(compositeKey, key, ttlSeconds, hexOptions); // Set expiration for the composite key if (isTag) { pipeline.hset(UpstashCache.tagsMapKey, { [key]: compositeKey }); // Store the tag and its composite key in the map pipeline.hexpire(UpstashCache.tagsMapKey, key, ttlSeconds, hexOptions); // Set expiration for the tag } for (const table of tables) { pipeline.sadd(this.addTablePrefix(table), compositeKey); } await pipeline.exec(); } override async onMutate(params: MutationOption) { const tags = Array.isArray(params.tags) ? params.tags : params.tags ? [params.tags] : []; const tables = Array.isArray(params.tables) ? params.tables : params.tables ? [params.tables] : []; const tableNames: string[] = tables.map((table) => is(table, Table) ? table[OriginalName] : table as string); const compositeTableSets = tableNames.map((table) => this.addTablePrefix(table)); await this.luaScripts.onMutateScript.exec([UpstashCache.tagsMapKey, ...compositeTableSets], tags); } private addTablePrefix = (table: string) => `${UpstashCache.compositeTableSetPrefix}${table}`; private getCompositeKey = (tables: string[]) => `${UpstashCache.compositeTablePrefix}${tables.sort().join(',')}`; } export function upstashCache( { url, token, config, global = false }: { url: string; token: string; config?: CacheConfig; global?: boolean }, ): UpstashCache { const redis = new Redis({ url, token, }); return new UpstashCache(redis, config, global); } ================================================ FILE: drizzle-orm/src/cache/upstash/index.ts ================================================ export * from './cache.ts'; ================================================ FILE: drizzle-orm/src/casing.ts ================================================ import type { Column } from '~/column.ts'; import { entityKind } from './entity.ts'; import { Table } from './table.ts'; import type { Casing } from './utils.ts'; export function toSnakeCase(input: string) { const words = input .replace(/['\u2019]/g, '') .match(/[\da-z]+|[A-Z]+(?![a-z])|[A-Z][\da-z]+/g) ?? []; return words.map((word) => word.toLowerCase()).join('_'); } export function toCamelCase(input: string) { const words = input .replace(/['\u2019]/g, '') .match(/[\da-z]+|[A-Z]+(?![a-z])|[A-Z][\da-z]+/g) ?? []; return words.reduce((acc, word, i) => { const formattedWord = i === 0 ? word.toLowerCase() : `${word[0]!.toUpperCase()}${word.slice(1)}`; return acc + formattedWord; }, ''); } function noopCase(input: string) { return input; } export class CasingCache { static readonly [entityKind]: string = 'CasingCache'; /** @internal */ cache: Record = {}; private cachedTables: Record = {}; private convert: (input: string) => string; constructor(casing?: Casing) { this.convert = casing === 'snake_case' ? toSnakeCase : casing === 'camelCase' ? toCamelCase : noopCase; } getColumnCasing(column: Column): string { if (!column.keyAsName) return column.name; const schema = column.table[Table.Symbol.Schema] ?? 'public'; const tableName = column.table[Table.Symbol.OriginalName]; const key = `${schema}.${tableName}.${column.name}`; if (!this.cache[key]) { this.cacheTable(column.table); } return this.cache[key]!; } private cacheTable(table: Table) { const schema = table[Table.Symbol.Schema] ?? 'public'; const tableName = table[Table.Symbol.OriginalName]; const tableKey = `${schema}.${tableName}`; if (!this.cachedTables[tableKey]) { for (const column of Object.values(table[Table.Symbol.Columns])) { const columnKey = `${tableKey}.${column.name}`; this.cache[columnKey] = this.convert(column.name); } this.cachedTables[tableKey] = true; } } clearCache() { this.cache = {}; this.cachedTables = {}; } } ================================================ FILE: drizzle-orm/src/column-builder.ts ================================================ import { entityKind } from '~/entity.ts'; import type { Column } from './column.ts'; import type { GelColumn, GelExtraConfigColumn } from './gel-core/index.ts'; import type { MySqlColumn } from './mysql-core/index.ts'; import type { ExtraConfigColumn, PgColumn, PgSequenceOptions } from './pg-core/index.ts'; import type { SingleStoreColumn } from './singlestore-core/index.ts'; import type { SQL } from './sql/sql.ts'; import type { SQLiteColumn } from './sqlite-core/index.ts'; import type { Assume, Simplify } from './utils.ts'; export type ColumnDataType = | 'string' | 'number' | 'boolean' | 'array' | 'json' | 'date' | 'bigint' | 'custom' | 'buffer' | 'dateDuration' | 'duration' | 'relDuration' | 'localTime' | 'localDate' | 'localDateTime'; export type Dialect = 'pg' | 'mysql' | 'sqlite' | 'singlestore' | 'common' | 'gel'; export type GeneratedStorageMode = 'virtual' | 'stored'; export type GeneratedType = 'always' | 'byDefault'; export type GeneratedColumnConfig = { as: TDataType | SQL | (() => SQL); type?: GeneratedType; mode?: GeneratedStorageMode; }; export type GeneratedIdentityConfig = { sequenceName?: string; sequenceOptions?: PgSequenceOptions; type: 'always' | 'byDefault'; }; export interface ColumnBuilderBaseConfig { name: string; dataType: TDataType; columnType: TColumnType; data: unknown; driverParam: unknown; enumValues: string[] | undefined; } export type MakeColumnConfig< T extends ColumnBuilderBaseConfig, TTableName extends string, TData = T extends { $type: infer U } ? U : T['data'], > = { name: T['name']; tableName: TTableName; dataType: T['dataType']; columnType: T['columnType']; data: TData; driverParam: T['driverParam']; notNull: T extends { notNull: true } ? true : false; hasDefault: T extends { hasDefault: true } ? true : false; isPrimaryKey: T extends { isPrimaryKey: true } ? true : false; isAutoincrement: T extends { isAutoincrement: true } ? true : false; hasRuntimeDefault: T extends { hasRuntimeDefault: true } ? true : false; enumValues: T['enumValues']; baseColumn: T extends { baseBuilder: infer U extends ColumnBuilderBase } ? BuildColumn : never; identity: T extends { identity: 'always' } ? 'always' : T extends { identity: 'byDefault' } ? 'byDefault' : undefined; generated: T extends { generated: infer G } ? unknown extends G ? undefined : G extends undefined ? undefined : G : undefined; } & {}; export type ColumnBuilderTypeConfig< // eslint-disable-next-line @typescript-eslint/no-unused-vars T extends ColumnBuilderBaseConfig, TTypeConfig extends object = object, > = Simplify< & { brand: 'ColumnBuilder'; name: T['name']; dataType: T['dataType']; columnType: T['columnType']; data: T['data']; driverParam: T['driverParam']; notNull: T extends { notNull: infer U } ? U : boolean; hasDefault: T extends { hasDefault: infer U } ? U : boolean; enumValues: T['enumValues']; identity: T extends { identity: infer U } ? U : unknown; generated: T extends { generated: infer G } ? G extends undefined ? unknown : G : unknown; } & TTypeConfig >; export type ColumnBuilderRuntimeConfig = { name: string; keyAsName: boolean; notNull: boolean; default: TData | SQL | undefined; defaultFn: (() => TData | SQL) | undefined; onUpdateFn: (() => TData | SQL) | undefined; hasDefault: boolean; primaryKey: boolean; isUnique: boolean; uniqueName: string | undefined; uniqueType: string | undefined; dataType: string; columnType: string; generated: GeneratedColumnConfig | undefined; generatedIdentity: GeneratedIdentityConfig | undefined; } & TRuntimeConfig; export interface ColumnBuilderExtraConfig { primaryKeyHasDefault?: boolean; } export type NotNull = T & { _: { notNull: true; }; }; export type HasDefault = T & { _: { hasDefault: true; }; }; export type IsPrimaryKey = T & { _: { isPrimaryKey: true; }; }; export type IsAutoincrement = T & { _: { isAutoincrement: true; }; }; export type HasRuntimeDefault = T & { _: { hasRuntimeDefault: true; }; }; export type $Type = T & { _: { $type: TType; }; }; export type HasGenerated = T & { _: { hasDefault: true; generated: TGenerated; }; }; export type IsIdentity< T extends ColumnBuilderBase, TType extends 'always' | 'byDefault', > = T & { _: { notNull: true; hasDefault: true; identity: TType; }; }; export interface ColumnBuilderBase< T extends ColumnBuilderBaseConfig = ColumnBuilderBaseConfig, TTypeConfig extends object = object, > { _: ColumnBuilderTypeConfig; } // To understand how to use `ColumnBuilder` and `AnyColumnBuilder`, see `Column` and `AnyColumn` documentation. export abstract class ColumnBuilder< T extends ColumnBuilderBaseConfig = ColumnBuilderBaseConfig, TRuntimeConfig extends object = object, TTypeConfig extends object = object, TExtraConfig extends ColumnBuilderExtraConfig = ColumnBuilderExtraConfig, > implements ColumnBuilderBase { static readonly [entityKind]: string = 'ColumnBuilder'; declare _: ColumnBuilderTypeConfig; protected config: ColumnBuilderRuntimeConfig; constructor(name: T['name'], dataType: T['dataType'], columnType: T['columnType']) { this.config = { name, keyAsName: name === '', notNull: false, default: undefined, hasDefault: false, primaryKey: false, isUnique: false, uniqueName: undefined, uniqueType: undefined, dataType, columnType, generated: undefined, } as ColumnBuilderRuntimeConfig; } /** * Changes the data type of the column. Commonly used with `json` columns. Also, useful for branded types. * * @example * ```ts * const users = pgTable('users', { * id: integer('id').$type().primaryKey(), * details: json('details').$type().notNull(), * }); * ``` */ $type(): $Type { return this as $Type; } /** * Adds a `not null` clause to the column definition. * * Affects the `select` model of the table - columns *without* `not null` will be nullable on select. */ notNull(): NotNull { this.config.notNull = true; return this as NotNull; } /** * Adds a `default ` clause to the column definition. * * Affects the `insert` model of the table - columns *with* `default` are optional on insert. * * If you need to set a dynamic default value, use {@link $defaultFn} instead. */ default(value: (this['_'] extends { $type: infer U } ? U : this['_']['data']) | SQL): HasDefault { this.config.default = value; this.config.hasDefault = true; return this as HasDefault; } /** * Adds a dynamic default value to the column. * The function will be called when the row is inserted, and the returned value will be used as the column value. * * **Note:** This value does not affect the `drizzle-kit` behavior, it is only used at runtime in `drizzle-orm`. */ $defaultFn( fn: () => (this['_'] extends { $type: infer U } ? U : this['_']['data']) | SQL, ): HasRuntimeDefault> { this.config.defaultFn = fn; this.config.hasDefault = true; return this as HasRuntimeDefault>; } /** * Alias for {@link $defaultFn}. */ $default = this.$defaultFn; /** * Adds a dynamic update value to the column. * The function will be called when the row is updated, and the returned value will be used as the column value if none is provided. * If no `default` (or `$defaultFn`) value is provided, the function will be called when the row is inserted as well, and the returned value will be used as the column value. * * **Note:** This value does not affect the `drizzle-kit` behavior, it is only used at runtime in `drizzle-orm`. */ $onUpdateFn( fn: () => (this['_'] extends { $type: infer U } ? U : this['_']['data']) | SQL, ): HasDefault { this.config.onUpdateFn = fn; this.config.hasDefault = true; return this as HasDefault; } /** * Alias for {@link $onUpdateFn}. */ $onUpdate = this.$onUpdateFn; /** * Adds a `primary key` clause to the column definition. This implicitly makes the column `not null`. * * In SQLite, `integer primary key` implicitly makes the column auto-incrementing. */ primaryKey(): TExtraConfig['primaryKeyHasDefault'] extends true ? IsPrimaryKey>> : IsPrimaryKey> { this.config.primaryKey = true; this.config.notNull = true; return this as TExtraConfig['primaryKeyHasDefault'] extends true ? IsPrimaryKey>> : IsPrimaryKey>; } abstract generatedAlwaysAs( as: SQL | T['data'] | (() => SQL), config?: Partial>, ): HasGenerated; /** @internal Sets the name of the column to the key within the table definition if a name was not given. */ setName(name: string) { if (this.config.name !== '') return; this.config.name = name; } } export type BuildColumn< TTableName extends string, TBuilder extends ColumnBuilderBase, TDialect extends Dialect, > = TDialect extends 'pg' ? PgColumn< MakeColumnConfig, {}, Simplify | 'brand' | 'dialect'>> > : TDialect extends 'mysql' ? MySqlColumn< MakeColumnConfig, {}, Simplify< Omit< TBuilder['_'], | keyof MakeColumnConfig | 'brand' | 'dialect' | 'primaryKeyHasDefault' | 'mysqlColumnBuilderBrand' > > > : TDialect extends 'sqlite' ? SQLiteColumn< MakeColumnConfig, {}, Simplify | 'brand' | 'dialect'>> > : TDialect extends 'common' ? Column< MakeColumnConfig, {}, Simplify | 'brand' | 'dialect'>> > : TDialect extends 'singlestore' ? SingleStoreColumn< MakeColumnConfig, {}, Simplify< Omit< TBuilder['_'], | keyof MakeColumnConfig | 'brand' | 'dialect' | 'primaryKeyHasDefault' | 'singlestoreColumnBuilderBrand' > > > : TDialect extends 'gel' ? GelColumn< MakeColumnConfig, {}, Simplify | 'brand' | 'dialect'>> > : never; export type BuildIndexColumn< TDialect extends Dialect, > = TDialect extends 'pg' ? ExtraConfigColumn : TDialect extends 'gel' ? GelExtraConfigColumn : never; // TODO // try to make sql as well + indexRaw // optional after everything will be working as expected // also try to leave only needed methods for extraConfig // make an error if I pass .asc() to fk and so on export type BuildColumns< TTableName extends string, TConfigMap extends Record, TDialect extends Dialect, > = & { [Key in keyof TConfigMap]: BuildColumn & { name: TConfigMap[Key]['_']['name'] extends '' ? Assume : TConfigMap[Key]['_']['name'] }; }, TDialect>; } & {}; export type BuildExtraConfigColumns< _TTableName extends string, TConfigMap extends Record, TDialect extends Dialect, > = & { [Key in keyof TConfigMap]: BuildIndexColumn; } & {}; export type ChangeColumnTableName = TDialect extends 'pg' ? PgColumn> : TDialect extends 'mysql' ? MySqlColumn> : TDialect extends 'singlestore' ? SingleStoreColumn> : TDialect extends 'sqlite' ? SQLiteColumn> : TDialect extends 'gel' ? GelColumn> : never; ================================================ FILE: drizzle-orm/src/column.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, ColumnDataType, GeneratedColumnConfig, GeneratedIdentityConfig, } from './column-builder.ts'; import { entityKind } from './entity.ts'; import type { DriverValueMapper, SQL, SQLWrapper } from './sql/sql.ts'; import type { Table } from './table.ts'; import type { Update } from './utils.ts'; export interface ColumnBaseConfig< TDataType extends ColumnDataType, TColumnType extends string, > extends ColumnBuilderBaseConfig { tableName: string; notNull: boolean; hasDefault: boolean; isPrimaryKey: boolean; isAutoincrement: boolean; hasRuntimeDefault: boolean; } export type ColumnTypeConfig, TTypeConfig extends object> = T & { brand: 'Column'; tableName: T['tableName']; name: T['name']; dataType: T['dataType']; columnType: T['columnType']; data: T['data']; driverParam: T['driverParam']; notNull: T['notNull']; hasDefault: T['hasDefault']; isPrimaryKey: T['isPrimaryKey']; isAutoincrement: T['isAutoincrement']; hasRuntimeDefault: T['hasRuntimeDefault']; enumValues: T['enumValues']; baseColumn: T extends { baseColumn: infer U } ? U : unknown; generated: GeneratedColumnConfig | undefined; identity: undefined | 'always' | 'byDefault'; } & TTypeConfig; export type ColumnRuntimeConfig = ColumnBuilderRuntimeConfig< TData, TRuntimeConfig >; export interface Column< T extends ColumnBaseConfig = ColumnBaseConfig, // eslint-disable-next-line @typescript-eslint/no-unused-vars TRuntimeConfig extends object = object, // eslint-disable-next-line @typescript-eslint/no-unused-vars TTypeConfig extends object = object, > extends DriverValueMapper, SQLWrapper { // SQLWrapper runtime implementation is defined in 'sql/sql.ts' } /* `Column` only accepts a full `ColumnConfig` as its generic. To infer parts of the config, use `AnyColumn` that accepts a partial config. See `GetColumnData` for example usage of inferring. */ export abstract class Column< T extends ColumnBaseConfig = ColumnBaseConfig, TRuntimeConfig extends object = object, TTypeConfig extends object = object, > implements DriverValueMapper, SQLWrapper { static readonly [entityKind]: string = 'Column'; declare readonly _: ColumnTypeConfig; readonly name: string; readonly keyAsName: boolean; readonly primary: boolean; readonly notNull: boolean; readonly default: T['data'] | SQL | undefined; readonly defaultFn: (() => T['data'] | SQL) | undefined; readonly onUpdateFn: (() => T['data'] | SQL) | undefined; readonly hasDefault: boolean; readonly isUnique: boolean; readonly uniqueName: string | undefined; readonly uniqueType: string | undefined; readonly dataType: T['dataType']; readonly columnType: T['columnType']; readonly enumValues: T['enumValues'] = undefined; readonly generated: GeneratedColumnConfig | undefined = undefined; readonly generatedIdentity: GeneratedIdentityConfig | undefined = undefined; protected config: ColumnRuntimeConfig; constructor( readonly table: Table, config: ColumnRuntimeConfig, ) { this.config = config; this.name = config.name; this.keyAsName = config.keyAsName; this.notNull = config.notNull; this.default = config.default; this.defaultFn = config.defaultFn; this.onUpdateFn = config.onUpdateFn; this.hasDefault = config.hasDefault; this.primary = config.primaryKey; this.isUnique = config.isUnique; this.uniqueName = config.uniqueName; this.uniqueType = config.uniqueType; this.dataType = config.dataType as T['dataType']; this.columnType = config.columnType; this.generated = config.generated; this.generatedIdentity = config.generatedIdentity; } abstract getSQLType(): string; mapFromDriverValue(value: unknown): unknown { return value; } mapToDriverValue(value: unknown): unknown { return value; } // ** @internal */ shouldDisableInsert(): boolean { return this.config.generated !== undefined && this.config.generated.type !== 'byDefault'; } } export type UpdateColConfig< T extends ColumnBaseConfig, TUpdate extends Partial>, > = Update; export type AnyColumn> = {}> = Column< Required, TPartial>> >; export type GetColumnData = // dprint-ignore TInferMode extends 'raw' // Raw mode ? TColumn['_']['data'] // Just return the underlying type : TColumn['_']['notNull'] extends true // Query mode ? TColumn['_']['data'] // Query mode, not null : TColumn['_']['data'] | null; // Query mode, nullable export type InferColumnsDataTypes> = { [Key in keyof TColumns]: GetColumnData; }; ================================================ FILE: drizzle-orm/src/d1/driver.ts ================================================ /// import type { D1Database as MiniflareD1Database } from '@miniflare/d1'; import type { BatchItem, BatchResponse } from '~/batch.ts'; import { entityKind } from '~/entity.ts'; import { DefaultLogger } from '~/logger.ts'; import { createTableRelationsHelpers, extractTablesRelationalConfig, type ExtractTablesWithRelations, type RelationalSchemaConfig, type TablesRelationalConfig, } from '~/relations.ts'; import { BaseSQLiteDatabase } from '~/sqlite-core/db.ts'; import { SQLiteAsyncDialect } from '~/sqlite-core/dialect.ts'; import type { DrizzleConfig, IfNotImported } from '~/utils.ts'; import { SQLiteD1Session } from './session.ts'; export type AnyD1Database = IfNotImported< D1Database, MiniflareD1Database, D1Database | IfNotImported >; export class DrizzleD1Database< TSchema extends Record = Record, > extends BaseSQLiteDatabase<'async', D1Result, TSchema> { static override readonly [entityKind]: string = 'D1Database'; /** @internal */ declare readonly session: SQLiteD1Session>; async batch, T extends Readonly<[U, ...U[]]>>( batch: T, ): Promise> { return this.session.batch(batch) as Promise>; } } export function drizzle< TSchema extends Record = Record, TClient extends AnyD1Database = AnyD1Database, >( client: TClient, config: DrizzleConfig = {}, ): DrizzleD1Database & { $client: TClient; } { const dialect = new SQLiteAsyncDialect({ casing: config.casing }); let logger; if (config.logger === true) { logger = new DefaultLogger(); } else if (config.logger !== false) { logger = config.logger; } let schema: RelationalSchemaConfig | undefined; if (config.schema) { const tablesConfig = extractTablesRelationalConfig( config.schema, createTableRelationsHelpers, ); schema = { fullSchema: config.schema, schema: tablesConfig.tables, tableNamesMap: tablesConfig.tableNamesMap, }; } const session = new SQLiteD1Session(client as D1Database, dialect, schema, { logger, cache: config.cache }); const db = new DrizzleD1Database('async', dialect, session, schema) as DrizzleD1Database; ( db).$client = client; ( db).$cache = config.cache; if (( db).$cache) { ( db).$cache['invalidate'] = config.cache?.onMutate; } return db as any; } ================================================ FILE: drizzle-orm/src/d1/index.ts ================================================ export * from './driver.ts'; export * from './session.ts'; ================================================ FILE: drizzle-orm/src/d1/migrator.ts ================================================ import type { MigrationConfig } from '~/migrator.ts'; import { readMigrationFiles } from '~/migrator.ts'; import { sql } from '~/sql/sql.ts'; import type { DrizzleD1Database } from './driver.ts'; export async function migrate>( db: DrizzleD1Database, config: MigrationConfig, ) { const migrations = readMigrationFiles(config); const migrationsTable = config.migrationsTable ?? '__drizzle_migrations'; const migrationTableCreate = sql` CREATE TABLE IF NOT EXISTS ${sql.identifier(migrationsTable)} ( id SERIAL PRIMARY KEY, hash text NOT NULL, created_at numeric ) `; await db.session.run(migrationTableCreate); const dbMigrations = await db.values<[number, string, string]>( sql`SELECT id, hash, created_at FROM ${sql.identifier(migrationsTable)} ORDER BY created_at DESC LIMIT 1`, ); const lastDbMigration = dbMigrations[0] ?? undefined; const statementToBatch = []; for (const migration of migrations) { if (!lastDbMigration || Number(lastDbMigration[2])! < migration.folderMillis) { for (const stmt of migration.sql) { statementToBatch.push(db.run(sql.raw(stmt))); } statementToBatch.push( db.run( sql`INSERT INTO ${sql.identifier(migrationsTable)} ("hash", "created_at") VALUES(${ sql.raw(`'${migration.hash}'`) }, ${sql.raw(`${migration.folderMillis}`)})`, ), ); } } if (statementToBatch.length > 0) { await db.session.batch(statementToBatch); } } ================================================ FILE: drizzle-orm/src/d1/session.ts ================================================ /// import type { BatchItem } from '~/batch.ts'; import { type Cache, NoopCache } from '~/cache/core/index.ts'; import type { WithCacheConfig } from '~/cache/core/types.ts'; import { entityKind } from '~/entity.ts'; import type { Logger } from '~/logger.ts'; import { NoopLogger } from '~/logger.ts'; import type { RelationalSchemaConfig, TablesRelationalConfig } from '~/relations.ts'; import type { PreparedQuery } from '~/session.ts'; import { fillPlaceholders, type Query, sql } from '~/sql/sql.ts'; import type { SQLiteAsyncDialect } from '~/sqlite-core/dialect.ts'; import { SQLiteTransaction } from '~/sqlite-core/index.ts'; import type { SelectedFieldsOrdered } from '~/sqlite-core/query-builders/select.types.ts'; import type { PreparedQueryConfig as PreparedQueryConfigBase, SQLiteExecuteMethod, SQLiteTransactionConfig, } from '~/sqlite-core/session.ts'; import { SQLitePreparedQuery, SQLiteSession } from '~/sqlite-core/session.ts'; import { mapResultRow } from '~/utils.ts'; export interface SQLiteD1SessionOptions { logger?: Logger; cache?: Cache; } type PreparedQueryConfig = Omit; export class SQLiteD1Session< TFullSchema extends Record, TSchema extends TablesRelationalConfig, > extends SQLiteSession<'async', D1Result, TFullSchema, TSchema> { static override readonly [entityKind]: string = 'SQLiteD1Session'; private logger: Logger; private cache: Cache; constructor( private client: D1Database, dialect: SQLiteAsyncDialect, private schema: RelationalSchemaConfig | undefined, private options: SQLiteD1SessionOptions = {}, ) { super(dialect); this.logger = options.logger ?? new NoopLogger(); this.cache = options.cache ?? new NoopCache(); } prepareQuery( query: Query, fields: SelectedFieldsOrdered | undefined, executeMethod: SQLiteExecuteMethod, isResponseInArrayMode: boolean, customResultMapper?: (rows: unknown[][]) => unknown, queryMetadata?: { type: 'select' | 'update' | 'delete' | 'insert'; tables: string[]; }, cacheConfig?: WithCacheConfig, ): D1PreparedQuery { const stmt = this.client.prepare(query.sql); return new D1PreparedQuery( stmt, query, this.logger, this.cache, queryMetadata, cacheConfig, fields, executeMethod, isResponseInArrayMode, customResultMapper, ); } async batch[] | readonly BatchItem<'sqlite'>[]>(queries: T) { const preparedQueries: PreparedQuery[] = []; const builtQueries: D1PreparedStatement[] = []; for (const query of queries) { const preparedQuery = query._prepare(); const builtQuery = preparedQuery.getQuery(); preparedQueries.push(preparedQuery); if (builtQuery.params.length > 0) { builtQueries.push((preparedQuery as D1PreparedQuery).stmt.bind(...builtQuery.params)); } else { const builtQuery = preparedQuery.getQuery(); builtQueries.push( this.client.prepare(builtQuery.sql).bind(...builtQuery.params), ); } } const batchResults = await this.client.batch(builtQueries); return batchResults.map((result, i) => preparedQueries[i]!.mapResult(result, true)); } override extractRawAllValueFromBatchResult(result: unknown): unknown { return (result as D1Result).results; } override extractRawGetValueFromBatchResult(result: unknown): unknown { return (result as D1Result).results[0]; } override extractRawValuesValueFromBatchResult(result: unknown): unknown { return d1ToRawMapping((result as D1Result).results); } override async transaction( transaction: (tx: D1Transaction) => T | Promise, config?: SQLiteTransactionConfig, ): Promise { const tx = new D1Transaction('async', this.dialect, this, this.schema); await this.run(sql.raw(`begin${config?.behavior ? ' ' + config.behavior : ''}`)); try { const result = await transaction(tx); await this.run(sql`commit`); return result; } catch (err) { await this.run(sql`rollback`); throw err; } } } export class D1Transaction< TFullSchema extends Record, TSchema extends TablesRelationalConfig, > extends SQLiteTransaction<'async', D1Result, TFullSchema, TSchema> { static override readonly [entityKind]: string = 'D1Transaction'; override async transaction(transaction: (tx: D1Transaction) => Promise): Promise { const savepointName = `sp${this.nestedIndex}`; const tx = new D1Transaction('async', this.dialect, this.session, this.schema, this.nestedIndex + 1); await this.session.run(sql.raw(`savepoint ${savepointName}`)); try { const result = await transaction(tx); await this.session.run(sql.raw(`release savepoint ${savepointName}`)); return result; } catch (err) { await this.session.run(sql.raw(`rollback to savepoint ${savepointName}`)); throw err; } } } /** * This function was taken from the D1 implementation: https://github.com/cloudflare/workerd/blob/4aae9f4c7ae30a59a88ca868c4aff88bda85c956/src/cloudflare/internal/d1-api.ts#L287 * It may cause issues with duplicated column names in join queries, which should be fixed on the D1 side. * @param results * @returns */ function d1ToRawMapping(results: any) { const rows: unknown[][] = []; for (const row of results) { const entry = Object.keys(row).map((k) => row[k]); rows.push(entry); } return rows; } export class D1PreparedQuery extends SQLitePreparedQuery< { type: 'async'; run: D1Response; all: T['all']; get: T['get']; values: T['values']; execute: T['execute'] } > { static override readonly [entityKind]: string = 'D1PreparedQuery'; /** @internal */ customResultMapper?: (rows: unknown[][], mapColumnValue?: (value: unknown) => unknown) => unknown; /** @internal */ fields?: SelectedFieldsOrdered; /** @internal */ stmt: D1PreparedStatement; constructor( stmt: D1PreparedStatement, query: Query, private logger: Logger, cache: Cache, queryMetadata: { type: 'select' | 'update' | 'delete' | 'insert'; tables: string[]; } | undefined, cacheConfig: WithCacheConfig | undefined, fields: SelectedFieldsOrdered | undefined, executeMethod: SQLiteExecuteMethod, private _isResponseInArrayMode: boolean, customResultMapper?: (rows: unknown[][]) => unknown, ) { super('async', executeMethod, query, cache, queryMetadata, cacheConfig); this.customResultMapper = customResultMapper; this.fields = fields; this.stmt = stmt; } async run(placeholderValues?: Record): Promise { const params = fillPlaceholders(this.query.params, placeholderValues ?? {}); this.logger.logQuery(this.query.sql, params); return await this.queryWithCache(this.query.sql, params, async () => { return this.stmt.bind(...params).run(); }); } async all(placeholderValues?: Record): Promise { const { fields, query, logger, stmt, customResultMapper } = this; if (!fields && !customResultMapper) { const params = fillPlaceholders(query.params, placeholderValues ?? {}); logger.logQuery(query.sql, params); return await this.queryWithCache(query.sql, params, async () => { return stmt.bind(...params).all().then(({ results }) => this.mapAllResult(results!)); }); } const rows = await this.values(placeholderValues); return this.mapAllResult(rows); } override mapAllResult(rows: unknown, isFromBatch?: boolean): unknown { if (isFromBatch) { rows = d1ToRawMapping((rows as D1Result).results); } if (!this.fields && !this.customResultMapper) { return rows; } if (this.customResultMapper) { return this.customResultMapper(rows as unknown[][]); } return (rows as unknown[][]).map((row) => mapResultRow(this.fields!, row, this.joinsNotNullableMap)); } async get(placeholderValues?: Record): Promise { const { fields, joinsNotNullableMap, query, logger, stmt, customResultMapper } = this; if (!fields && !customResultMapper) { const params = fillPlaceholders(query.params, placeholderValues ?? {}); logger.logQuery(query.sql, params); return await this.queryWithCache(query.sql, params, async () => { return stmt.bind(...params).all().then(({ results }) => results![0]); }); } const rows = await this.values(placeholderValues); if (!rows[0]) { return undefined; } if (customResultMapper) { return customResultMapper(rows) as T['all']; } return mapResultRow(fields!, rows[0], joinsNotNullableMap); } override mapGetResult(result: unknown, isFromBatch?: boolean): unknown { if (isFromBatch) { result = d1ToRawMapping((result as D1Result).results)[0]; } if (!this.fields && !this.customResultMapper) { return result; } if (this.customResultMapper) { return this.customResultMapper([result as unknown[]]) as T['all']; } return mapResultRow(this.fields!, result as unknown[], this.joinsNotNullableMap); } async values(placeholderValues?: Record): Promise { const params = fillPlaceholders(this.query.params, placeholderValues ?? {}); this.logger.logQuery(this.query.sql, params); return await this.queryWithCache(this.query.sql, params, async () => { return this.stmt.bind(...params).raw(); }); } /** @internal */ isResponseInArrayMode(): boolean { return this._isResponseInArrayMode; } } ================================================ FILE: drizzle-orm/src/durable-sqlite/driver.ts ================================================ /// import { entityKind } from '~/entity.ts'; import { DefaultLogger } from '~/logger.ts'; import { createTableRelationsHelpers, extractTablesRelationalConfig, type ExtractTablesWithRelations, type RelationalSchemaConfig, type TablesRelationalConfig, } from '~/relations.ts'; import { BaseSQLiteDatabase } from '~/sqlite-core/db.ts'; import { SQLiteSyncDialect } from '~/sqlite-core/dialect.ts'; import type { DrizzleConfig } from '~/utils.ts'; import { SQLiteDOSession } from './session.ts'; export class DrizzleSqliteDODatabase< TSchema extends Record = Record, > extends BaseSQLiteDatabase<'sync', SqlStorageCursor>, TSchema> { static override readonly [entityKind]: string = 'DrizzleSqliteDODatabase'; /** @internal */ declare readonly session: SQLiteDOSession>; } export function drizzle< TSchema extends Record = Record, TClient extends DurableObjectStorage = DurableObjectStorage, >( client: TClient, config: DrizzleConfig = {}, ): DrizzleSqliteDODatabase & { $client: TClient; } { const dialect = new SQLiteSyncDialect({ casing: config.casing }); let logger; if (config.logger === true) { logger = new DefaultLogger(); } else if (config.logger !== false) { logger = config.logger; } let schema: RelationalSchemaConfig | undefined; if (config.schema) { const tablesConfig = extractTablesRelationalConfig( config.schema, createTableRelationsHelpers, ); schema = { fullSchema: config.schema, schema: tablesConfig.tables, tableNamesMap: tablesConfig.tableNamesMap, }; } const session = new SQLiteDOSession(client as DurableObjectStorage, dialect, schema, { logger }); const db = new DrizzleSqliteDODatabase('sync', dialect, session, schema) as DrizzleSqliteDODatabase; ( db).$client = client; return db as any; } ================================================ FILE: drizzle-orm/src/durable-sqlite/index.ts ================================================ export * from './driver.ts'; export * from './session.ts'; ================================================ FILE: drizzle-orm/src/durable-sqlite/migrator.ts ================================================ import type { MigrationMeta } from '~/migrator.ts'; import { sql } from '~/sql/index.ts'; import type { DrizzleSqliteDODatabase } from './driver.ts'; interface MigrationConfig { journal: { entries: { idx: number; when: number; tag: string; breakpoints: boolean }[]; }; migrations: Record; } function readMigrationFiles({ journal, migrations }: MigrationConfig): MigrationMeta[] { const migrationQueries: MigrationMeta[] = []; for (const journalEntry of journal.entries) { const query = migrations[`m${journalEntry.idx.toString().padStart(4, '0')}`]; if (!query) { throw new Error(`Missing migration: ${journalEntry.tag}`); } try { const result = query.split('--> statement-breakpoint').map((it) => { return it; }); migrationQueries.push({ sql: result, bps: journalEntry.breakpoints, folderMillis: journalEntry.when, hash: '', }); } catch { throw new Error(`Failed to parse migration: ${journalEntry.tag}`); } } return migrationQueries; } export async function migrate< TSchema extends Record, >( db: DrizzleSqliteDODatabase, config: MigrationConfig, ): Promise { const migrations = readMigrationFiles(config); db.transaction((tx) => { try { const migrationsTable = '__drizzle_migrations'; const migrationTableCreate = sql` CREATE TABLE IF NOT EXISTS ${sql.identifier(migrationsTable)} ( id SERIAL PRIMARY KEY, hash text NOT NULL, created_at numeric ) `; db.run(migrationTableCreate); const dbMigrations = db.values<[number, string, string]>( sql`SELECT id, hash, created_at FROM ${sql.identifier(migrationsTable)} ORDER BY created_at DESC LIMIT 1`, ); const lastDbMigration = dbMigrations[0] ?? undefined; for (const migration of migrations) { if (!lastDbMigration || Number(lastDbMigration[2])! < migration.folderMillis) { for (const stmt of migration.sql) { db.run(sql.raw(stmt)); } db.run( sql`INSERT INTO ${ sql.identifier(migrationsTable) } ("hash", "created_at") VALUES(${migration.hash}, ${migration.folderMillis})`, ); } } } catch (error: any) { tx.rollback(); throw error; } }); } ================================================ FILE: drizzle-orm/src/durable-sqlite/session.ts ================================================ import { entityKind } from '~/entity.ts'; import type { Logger } from '~/logger.ts'; import { NoopLogger } from '~/logger.ts'; import type { RelationalSchemaConfig, TablesRelationalConfig } from '~/relations.ts'; import { fillPlaceholders, type Query } from '~/sql/sql.ts'; import { type SQLiteSyncDialect, SQLiteTransaction } from '~/sqlite-core/index.ts'; import type { SelectedFieldsOrdered } from '~/sqlite-core/query-builders/select.types.ts'; import { type PreparedQueryConfig as PreparedQueryConfigBase, type SQLiteExecuteMethod, SQLiteSession, type SQLiteTransactionConfig, } from '~/sqlite-core/session.ts'; import { SQLitePreparedQuery as PreparedQueryBase } from '~/sqlite-core/session.ts'; import { mapResultRow } from '~/utils.ts'; export interface SQLiteDOSessionOptions { logger?: Logger; } type PreparedQueryConfig = Omit; export class SQLiteDOSession, TSchema extends TablesRelationalConfig> extends SQLiteSession< 'sync', SqlStorageCursor>, TFullSchema, TSchema > { static override readonly [entityKind]: string = 'SQLiteDOSession'; private logger: Logger; constructor( private client: DurableObjectStorage, dialect: SQLiteSyncDialect, private schema: RelationalSchemaConfig | undefined, options: SQLiteDOSessionOptions = {}, ) { super(dialect); this.logger = options.logger ?? new NoopLogger(); } prepareQuery>( query: Query, fields: SelectedFieldsOrdered | undefined, executeMethod: SQLiteExecuteMethod, isResponseInArrayMode: boolean, customResultMapper?: (rows: unknown[][]) => unknown, ): SQLiteDOPreparedQuery { return new SQLiteDOPreparedQuery( this.client, query, this.logger, fields, executeMethod, isResponseInArrayMode, customResultMapper, ); } override transaction( transaction: ( tx: SQLiteTransaction<'sync', SqlStorageCursor>, TFullSchema, TSchema>, ) => T, _config?: SQLiteTransactionConfig, ): T { const tx = new SQLiteDOTransaction('sync', this.dialect, this, this.schema); return this.client.transactionSync(() => transaction(tx)); } } export class SQLiteDOTransaction, TSchema extends TablesRelationalConfig> extends SQLiteTransaction< 'sync', SqlStorageCursor>, TFullSchema, TSchema > { static override readonly [entityKind]: string = 'SQLiteDOTransaction'; override transaction(transaction: (tx: SQLiteDOTransaction) => T): T { const tx = new SQLiteDOTransaction('sync', this.dialect, this.session, this.schema, this.nestedIndex + 1); return this.session.transaction(() => transaction(tx)); } } export class SQLiteDOPreparedQuery extends PreparedQueryBase<{ type: 'sync'; run: void; all: T['all']; get: T['get']; values: T['values']; execute: T['execute']; }> { static override readonly [entityKind]: string = 'SQLiteDOPreparedQuery'; constructor( private client: DurableObjectStorage, query: Query, private logger: Logger, private fields: SelectedFieldsOrdered | undefined, executeMethod: SQLiteExecuteMethod, private _isResponseInArrayMode: boolean, private customResultMapper?: (rows: unknown[][]) => unknown, ) { // 3-6 params are for cache. As long as we don't support sync cache - it will be skipped here super('sync', executeMethod, query, undefined, undefined, undefined); } run(placeholderValues?: Record): void { const params = fillPlaceholders(this.query.params, placeholderValues ?? {}); this.logger.logQuery(this.query.sql, params); params.length > 0 ? this.client.sql.exec(this.query.sql, ...params) : this.client.sql.exec(this.query.sql); } all(placeholderValues?: Record): T['all'] { const { fields, joinsNotNullableMap, query, logger, client, customResultMapper } = this; if (!fields && !customResultMapper) { const params = fillPlaceholders(query.params, placeholderValues ?? {}); logger.logQuery(query.sql, params); return params.length > 0 ? client.sql.exec(query.sql, ...params).toArray() : client.sql.exec(query.sql).toArray(); } const rows = this.values(placeholderValues) as unknown[][]; if (customResultMapper) { return customResultMapper(rows) as T['all']; } return rows.map((row) => mapResultRow(fields!, row, joinsNotNullableMap)); } get(placeholderValues?: Record): T['get'] { const params = fillPlaceholders(this.query.params, placeholderValues ?? {}); this.logger.logQuery(this.query.sql, params); const { fields, client, joinsNotNullableMap, customResultMapper, query } = this; if (!fields && !customResultMapper) { return (params.length > 0 ? client.sql.exec(query.sql, ...params) : client.sql.exec(query.sql)).next().value; } const rows = this.values(placeholderValues) as unknown[][]; const row = rows[0]; if (!row) { return undefined; } if (customResultMapper) { return customResultMapper(rows) as T['get']; } return mapResultRow(fields!, row, joinsNotNullableMap); } values(placeholderValues?: Record): T['values'] { const params = fillPlaceholders(this.query.params, placeholderValues ?? {}); this.logger.logQuery(this.query.sql, params); const res = params.length > 0 ? this.client.sql.exec(this.query.sql, ...params) : this.client.sql.exec(this.query.sql); // @ts-ignore .raw().toArray() exists return res.raw().toArray(); } /** @internal */ isResponseInArrayMode(): boolean { return this._isResponseInArrayMode; } } ================================================ FILE: drizzle-orm/src/entity.ts ================================================ export const entityKind = Symbol.for('drizzle:entityKind'); export const hasOwnEntityKind = Symbol.for('drizzle:hasOwnEntityKind'); export interface DrizzleEntity { [entityKind]: string; } export type DrizzleEntityClass = & ((abstract new(...args: any[]) => T) | (new(...args: any[]) => T)) & DrizzleEntity; export function is>(value: any, type: T): value is InstanceType { if (!value || typeof value !== 'object') { return false; } if (value instanceof type) { // eslint-disable-line no-instanceof/no-instanceof return true; } if (!Object.prototype.hasOwnProperty.call(type, entityKind)) { throw new Error( `Class "${ type.name ?? '' }" doesn't look like a Drizzle entity. If this is incorrect and the class is provided by Drizzle, please report this as a bug.`, ); } let cls = Object.getPrototypeOf(value).constructor; if (cls) { // Traverse the prototype chain to find the entityKind while (cls) { if (entityKind in cls && cls[entityKind] === type[entityKind]) { return true; } cls = Object.getPrototypeOf(cls); } } return false; } ================================================ FILE: drizzle-orm/src/errors.ts ================================================ import { entityKind } from '~/entity.ts'; export class DrizzleError extends Error { static readonly [entityKind]: string = 'DrizzleError'; constructor({ message, cause }: { message?: string; cause?: unknown }) { super(message); this.name = 'DrizzleError'; this.cause = cause; } } export class DrizzleQueryError extends Error { constructor( public query: string, public params: any[], public override cause?: Error, ) { super(`Failed query: ${query}\nparams: ${params}`); Error.captureStackTrace(this, DrizzleQueryError); // ES2022+: preserves original error on `.cause` if (cause) (this as any).cause = cause; } } export class TransactionRollbackError extends DrizzleError { static override readonly [entityKind]: string = 'TransactionRollbackError'; constructor() { super({ message: 'Rollback' }); } } ================================================ FILE: drizzle-orm/src/expo-sqlite/driver.ts ================================================ import type { SQLiteDatabase, SQLiteRunResult } from 'expo-sqlite'; import { entityKind } from '~/entity.ts'; import { DefaultLogger } from '~/logger.ts'; import { createTableRelationsHelpers, extractTablesRelationalConfig, type RelationalSchemaConfig, type TablesRelationalConfig, } from '~/relations.ts'; import { BaseSQLiteDatabase } from '~/sqlite-core/db.ts'; import { SQLiteSyncDialect } from '~/sqlite-core/dialect.ts'; import type { DrizzleConfig } from '~/utils.ts'; import { ExpoSQLiteSession } from './session.ts'; export class ExpoSQLiteDatabase = Record> extends BaseSQLiteDatabase<'sync', SQLiteRunResult, TSchema> { static override readonly [entityKind]: string = 'ExpoSQLiteDatabase'; } export function drizzle = Record>( client: SQLiteDatabase, config: DrizzleConfig = {}, ): ExpoSQLiteDatabase & { $client: SQLiteDatabase; } { const dialect = new SQLiteSyncDialect({ casing: config.casing }); let logger; if (config.logger === true) { logger = new DefaultLogger(); } else if (config.logger !== false) { logger = config.logger; } let schema: RelationalSchemaConfig | undefined; if (config.schema) { const tablesConfig = extractTablesRelationalConfig( config.schema, createTableRelationsHelpers, ); schema = { fullSchema: config.schema, schema: tablesConfig.tables, tableNamesMap: tablesConfig.tableNamesMap, }; } const session = new ExpoSQLiteSession(client, dialect, schema, { logger }); const db = new ExpoSQLiteDatabase('sync', dialect, session, schema) as ExpoSQLiteDatabase; ( db).$client = client; return db as any; } ================================================ FILE: drizzle-orm/src/expo-sqlite/index.ts ================================================ export * from './driver.ts'; export * from './query.ts'; export * from './session.ts'; ================================================ FILE: drizzle-orm/src/expo-sqlite/migrator.ts ================================================ import { useEffect, useReducer } from 'react'; import type { MigrationMeta } from '~/migrator.ts'; import type { ExpoSQLiteDatabase } from './driver.ts'; interface MigrationConfig { journal: { entries: { idx: number; when: number; tag: string; breakpoints: boolean }[]; }; migrations: Record; } async function readMigrationFiles({ journal, migrations }: MigrationConfig): Promise { const migrationQueries: MigrationMeta[] = []; for await (const journalEntry of journal.entries) { const query = migrations[`m${journalEntry.idx.toString().padStart(4, '0')}`]; if (!query) { throw new Error(`Missing migration: ${journalEntry.tag}`); } try { const result = query.split('--> statement-breakpoint').map((it) => { return it; }); migrationQueries.push({ sql: result, bps: journalEntry.breakpoints, folderMillis: journalEntry.when, hash: '', }); } catch { throw new Error(`Failed to parse migration: ${journalEntry.tag}`); } } return migrationQueries; } export async function migrate>( db: ExpoSQLiteDatabase, config: MigrationConfig, ) { const migrations = await readMigrationFiles(config); return db.dialect.migrate(migrations, db.session); } interface State { success: boolean; error?: Error; } type Action = | { type: 'migrating' } | { type: 'migrated'; payload: true } | { type: 'error'; payload: Error }; export const useMigrations = (db: ExpoSQLiteDatabase, migrations: { journal: { entries: { idx: number; when: number; tag: string; breakpoints: boolean }[]; }; migrations: Record; }): State => { const initialState: State = { success: false, error: undefined, }; const fetchReducer = (state: State, action: Action): State => { switch (action.type) { case 'migrating': { return { ...initialState }; } case 'migrated': { return { ...initialState, success: action.payload }; } case 'error': { return { ...initialState, error: action.payload }; } default: { return state; } } }; const [state, dispatch] = useReducer(fetchReducer, initialState); useEffect(() => { dispatch({ type: 'migrating' }); migrate(db, migrations as any).then(() => { dispatch({ type: 'migrated', payload: true }); }).catch((error) => { dispatch({ type: 'error', payload: error as Error }); }); }, []); return state; }; ================================================ FILE: drizzle-orm/src/expo-sqlite/query.ts ================================================ import { addDatabaseChangeListener } from 'expo-sqlite'; import { useEffect, useState } from 'react'; import { is } from '~/entity.ts'; import { SQL } from '~/sql/sql.ts'; import type { AnySQLiteSelect } from '~/sqlite-core/index.ts'; import { getTableConfig, getViewConfig, SQLiteTable, SQLiteView } from '~/sqlite-core/index.ts'; import { SQLiteRelationalQuery } from '~/sqlite-core/query-builders/query.ts'; import { Subquery } from '~/subquery.ts'; export const useLiveQuery = | SQLiteRelationalQuery<'sync', unknown>>( query: T, deps: unknown[] = [], ) => { const [data, setData] = useState>( (is(query, SQLiteRelationalQuery) && query.mode === 'first' ? undefined : []) as Awaited, ); const [error, setError] = useState(); const [updatedAt, setUpdatedAt] = useState(); useEffect(() => { const entity = is(query, SQLiteRelationalQuery) ? query.table : (query as AnySQLiteSelect).config.table; if (is(entity, Subquery) || is(entity, SQL)) { setError(new Error('Selecting from subqueries and SQL are not supported in useLiveQuery')); return; } let listener: ReturnType | undefined; const handleData = (data: any) => { setData(data); setUpdatedAt(new Date()); }; query.then(handleData).catch(setError); if (is(entity, SQLiteTable) || is(entity, SQLiteView)) { const config = is(entity, SQLiteTable) ? getTableConfig(entity) : getViewConfig(entity); listener = addDatabaseChangeListener(({ tableName }) => { if (config.name === tableName) { query.then(handleData).catch(setError); } }); } return () => { listener?.remove(); }; }, deps); return { data, error, updatedAt, } as const; }; ================================================ FILE: drizzle-orm/src/expo-sqlite/session.ts ================================================ import type { SQLiteDatabase, SQLiteRunResult, SQLiteStatement } from 'expo-sqlite'; import { entityKind } from '~/entity.ts'; import type { Logger } from '~/logger.ts'; import { NoopLogger } from '~/logger.ts'; import type { RelationalSchemaConfig, TablesRelationalConfig } from '~/relations.ts'; import { fillPlaceholders, type Query, sql } from '~/sql/sql.ts'; import type { SQLiteSyncDialect } from '~/sqlite-core/dialect.ts'; import { SQLiteTransaction } from '~/sqlite-core/index.ts'; import type { SelectedFieldsOrdered } from '~/sqlite-core/query-builders/select.types.ts'; import { type PreparedQueryConfig as PreparedQueryConfigBase, type SQLiteExecuteMethod, SQLitePreparedQuery, SQLiteSession, type SQLiteTransactionConfig, } from '~/sqlite-core/session.ts'; import { mapResultRow } from '~/utils.ts'; export interface ExpoSQLiteSessionOptions { logger?: Logger; } type PreparedQueryConfig = Omit; export class ExpoSQLiteSession< TFullSchema extends Record, TSchema extends TablesRelationalConfig, > extends SQLiteSession<'sync', SQLiteRunResult, TFullSchema, TSchema> { static override readonly [entityKind]: string = 'ExpoSQLiteSession'; private logger: Logger; constructor( private client: SQLiteDatabase, dialect: SQLiteSyncDialect, private schema: RelationalSchemaConfig | undefined, options: ExpoSQLiteSessionOptions = {}, ) { super(dialect); this.logger = options.logger ?? new NoopLogger(); } prepareQuery>( query: Query, fields: SelectedFieldsOrdered | undefined, executeMethod: SQLiteExecuteMethod, isResponseInArrayMode: boolean, customResultMapper?: (rows: unknown[][]) => unknown, ): ExpoSQLitePreparedQuery { const stmt = this.client.prepareSync(query.sql); return new ExpoSQLitePreparedQuery( stmt, query, this.logger, fields, executeMethod, isResponseInArrayMode, customResultMapper, ); } override transaction( transaction: (tx: ExpoSQLiteTransaction) => T, config: SQLiteTransactionConfig = {}, ): T { const tx = new ExpoSQLiteTransaction('sync', this.dialect, this, this.schema); this.run(sql.raw(`begin${config?.behavior ? ' ' + config.behavior : ''}`)); try { const result = transaction(tx); this.run(sql`commit`); return result; } catch (err) { this.run(sql`rollback`); throw err; } } } export class ExpoSQLiteTransaction< TFullSchema extends Record, TSchema extends TablesRelationalConfig, > extends SQLiteTransaction<'sync', SQLiteRunResult, TFullSchema, TSchema> { static override readonly [entityKind]: string = 'ExpoSQLiteTransaction'; override transaction(transaction: (tx: ExpoSQLiteTransaction) => T): T { const savepointName = `sp${this.nestedIndex}`; const tx = new ExpoSQLiteTransaction('sync', this.dialect, this.session, this.schema, this.nestedIndex + 1); this.session.run(sql.raw(`savepoint ${savepointName}`)); try { const result = transaction(tx); this.session.run(sql.raw(`release savepoint ${savepointName}`)); return result; } catch (err) { this.session.run(sql.raw(`rollback to savepoint ${savepointName}`)); throw err; } } } export class ExpoSQLitePreparedQuery extends SQLitePreparedQuery< { type: 'sync'; run: SQLiteRunResult; all: T['all']; get: T['get']; values: T['values']; execute: T['execute'] } > { static override readonly [entityKind]: string = 'ExpoSQLitePreparedQuery'; constructor( private stmt: SQLiteStatement, query: Query, private logger: Logger, private fields: SelectedFieldsOrdered | undefined, executeMethod: SQLiteExecuteMethod, private _isResponseInArrayMode: boolean, private customResultMapper?: (rows: unknown[][]) => unknown, ) { super('sync', executeMethod, query); } run(placeholderValues?: Record): SQLiteRunResult { const params = fillPlaceholders(this.query.params, placeholderValues ?? {}); this.logger.logQuery(this.query.sql, params); const { changes, lastInsertRowId } = this.stmt.executeSync(params as any[]); return { changes, lastInsertRowId, }; } all(placeholderValues?: Record): T['all'] { const { fields, joinsNotNullableMap, query, logger, stmt, customResultMapper } = this; if (!fields && !customResultMapper) { const params = fillPlaceholders(query.params, placeholderValues ?? {}); logger.logQuery(query.sql, params); return stmt.executeSync(params as any[]).getAllSync(); } const rows = this.values(placeholderValues) as unknown[][]; if (customResultMapper) { return customResultMapper(rows) as T['all']; } return rows.map((row) => mapResultRow(fields!, row, joinsNotNullableMap)); } get(placeholderValues?: Record): T['get'] { const params = fillPlaceholders(this.query.params, placeholderValues ?? {}); this.logger.logQuery(this.query.sql, params); const { fields, stmt, joinsNotNullableMap, customResultMapper } = this; if (!fields && !customResultMapper) { return stmt.executeSync(params as any[]).getFirstSync(); } const rows = this.values(placeholderValues) as unknown[][]; const row = rows[0]; if (!row) { return undefined; } if (customResultMapper) { return customResultMapper(rows) as T['get']; } return mapResultRow(fields!, row, joinsNotNullableMap); } values(placeholderValues?: Record): T['values'] { const params = fillPlaceholders(this.query.params, placeholderValues ?? {}); this.logger.logQuery(this.query.sql, params); return this.stmt.executeForRawResultSync(params as any[]).getAllSync(); } /** @internal */ isResponseInArrayMode(): boolean { return this._isResponseInArrayMode; } } ================================================ FILE: drizzle-orm/src/gel/driver.ts ================================================ import { type Client, type ConnectOptions, createClient } from 'gel'; import type { Cache } from '~/cache/core/index.ts'; import { entityKind } from '~/entity.ts'; import { GelDatabase } from '~/gel-core/db.ts'; import { GelDialect } from '~/gel-core/dialect.ts'; import type { GelQueryResultHKT } from '~/gel-core/session.ts'; import type { Logger } from '~/logger.ts'; import { DefaultLogger } from '~/logger.ts'; import { createTableRelationsHelpers, extractTablesRelationalConfig, type RelationalSchemaConfig, type TablesRelationalConfig, } from '~/relations.ts'; import { type DrizzleConfig, isConfig } from '~/utils.ts'; import type { GelClient } from './session.ts'; import { GelDbSession } from './session.ts'; export interface GelDriverOptions { logger?: Logger; cache?: Cache; } export class GelDriver { static readonly [entityKind]: string = 'GelDriver'; constructor( private client: GelClient, private dialect: GelDialect, private options: GelDriverOptions = {}, ) {} createSession( schema: RelationalSchemaConfig | undefined, ): GelDbSession, TablesRelationalConfig> { return new GelDbSession(this.client, this.dialect, schema, { logger: this.options.logger, cache: this.options.cache, }); } } export class GelJsDatabase = Record> extends GelDatabase { static override readonly [entityKind]: string = 'GelJsDatabase'; } function construct< TSchema extends Record = Record, TClient extends GelClient = GelClient, >( client: TClient, config: DrizzleConfig = {}, ): GelJsDatabase & { $client: GelClient extends TClient ? Client : TClient; } { const dialect = new GelDialect({ casing: config.casing }); let logger; if (config.logger === true) { logger = new DefaultLogger(); } else if (config.logger !== false) { logger = config.logger; } let schema: RelationalSchemaConfig | undefined; if (config.schema) { const tablesConfig = extractTablesRelationalConfig(config.schema, createTableRelationsHelpers); schema = { fullSchema: config.schema, schema: tablesConfig.tables, tableNamesMap: tablesConfig.tableNamesMap, }; } const driver = new GelDriver(client, dialect, { logger, cache: config.cache }); const session = driver.createSession(schema); const db = new GelJsDatabase(dialect, session, schema as any) as GelJsDatabase; ( db).$client = client; ( db).$cache = config.cache; if (( db).$cache) { ( db).$cache['invalidate'] = config.cache?.onMutate; } return db as any; } export function drizzle< TSchema extends Record = Record, TClient extends GelClient = Client, >( ...params: | [TClient | string] | [TClient | string, DrizzleConfig] | [ & DrizzleConfig & ( | { connection: string | ConnectOptions; } | { client: TClient; } ), ] ): GelJsDatabase & { $client: GelClient extends TClient ? Client : TClient; } { if (typeof params[0] === 'string') { const instance = createClient({ dsn: params[0] }); return construct(instance, params[1] as DrizzleConfig | undefined) as any; } if (isConfig(params[0])) { const { connection, client, ...drizzleConfig } = params[0] as ( & ({ connection?: ConnectOptions | string; client?: TClient }) & DrizzleConfig ); if (client) return construct(client, drizzleConfig); const instance = createClient(connection); return construct(instance, drizzleConfig) as any; } return construct(params[0] as TClient, params[1] as DrizzleConfig | undefined) as any; } export namespace drizzle { export function mock = Record>( config?: DrizzleConfig, ): GelJsDatabase & { $client: '$client is not available on drizzle.mock()'; } { return construct({} as any, config) as any; } } ================================================ FILE: drizzle-orm/src/gel/index.ts ================================================ export * from './driver.ts'; export * from './session.ts'; ================================================ FILE: drizzle-orm/src/gel/migrator.ts ================================================ // import type { MigrationConfig } from '~/migrator.ts'; // import { readMigrationFiles } from '~/migrator.ts'; // import type { GelJsDatabase } from './driver.ts'; // not supported // eslint-disable-next-line @typescript-eslint/no-unused-vars async function migrate>( // db: GelJsDatabase, // config: MigrationConfig, ) { return {}; // const migrations = readMigrationFiles(config); // await db.dialect.migrate(migrations, db.session, config); } ================================================ FILE: drizzle-orm/src/gel/session.ts ================================================ import type { Client } from 'gel'; import type { Transaction } from 'gel/dist/transaction'; import { type Cache, NoopCache } from '~/cache/core/index.ts'; import type { WithCacheConfig } from '~/cache/core/types.ts'; import { entityKind } from '~/entity.ts'; import type { GelDialect } from '~/gel-core/dialect.ts'; import type { SelectedFieldsOrdered } from '~/gel-core/query-builders/select.types.ts'; import { GelPreparedQuery, GelSession, GelTransaction, type PreparedQueryConfig } from '~/gel-core/session.ts'; import { type Logger, NoopLogger } from '~/logger.ts'; import type { RelationalSchemaConfig, TablesRelationalConfig } from '~/relations.ts'; import { fillPlaceholders, type Query, type SQL } from '~/sql/sql.ts'; import { tracer } from '~/tracing.ts'; import { mapResultRow } from '~/utils.ts'; export type GelClient = Client | Transaction; export class GelDbPreparedQuery extends GelPreparedQuery { static override readonly [entityKind]: string = 'GelPreparedQuery'; constructor( private client: GelClient, private queryString: string, private params: unknown[], private logger: Logger, cache: Cache, queryMetadata: { type: 'select' | 'update' | 'delete' | 'insert'; tables: string[]; } | undefined, cacheConfig: WithCacheConfig | undefined, private fields: SelectedFieldsOrdered | undefined, private _isResponseInArrayMode: boolean, private customResultMapper?: (rows: unknown[][]) => T['execute'], private transaction: boolean = false, ) { super({ sql: queryString, params }, cache, queryMetadata, cacheConfig); } async execute(placeholderValues: Record | undefined = {}): Promise { return tracer.startActiveSpan('drizzle.execute', async () => { const params = fillPlaceholders(this.params, placeholderValues); this.logger.logQuery(this.queryString, params); const { fields, queryString: query, client, joinsNotNullableMap, customResultMapper } = this; if (!fields && !customResultMapper) { return tracer.startActiveSpan('drizzle.driver.execute', async (span) => { span?.setAttributes({ 'drizzle.query.text': query, 'drizzle.query.params': JSON.stringify(params), }); return await this.queryWithCache(query, params, async () => { return await client.querySQL(query, params.length ? params : undefined); }); }); } const result = (await tracer.startActiveSpan('drizzle.driver.execute', async (span) => { span?.setAttributes({ 'drizzle.query.text': query, 'drizzle.query.params': JSON.stringify(params), }); return await this.queryWithCache(query, params, async () => { return await client.withSQLRowMode('array').querySQL(query, params.length ? params : undefined); }); })) as unknown[][]; return tracer.startActiveSpan('drizzle.mapResponse', () => { return customResultMapper ? customResultMapper(result) : result.map((row) => mapResultRow(fields!, row, joinsNotNullableMap)); }); }); } async all(placeholderValues: Record | undefined = {}): Promise { return await tracer.startActiveSpan('drizzle.execute', async () => { const params = fillPlaceholders(this.params, placeholderValues); this.logger.logQuery(this.queryString, params); return await tracer.startActiveSpan('drizzle.driver.execute', async (span) => { span?.setAttributes({ 'drizzle.query.text': this.queryString, 'drizzle.query.params': JSON.stringify(params), }); return await this.queryWithCache(this.queryString, params, async () => { return await this.client.withSQLRowMode('array').querySQL( this.queryString, params.length ? params : undefined, ).then(( result, ) => result); }); }); }); } /** @internal */ isResponseInArrayMode(): boolean { return this._isResponseInArrayMode; } } export interface GelSessionOptions { logger?: Logger; cache?: Cache; } export class GelDbSession, TSchema extends TablesRelationalConfig> extends GelSession { static override readonly [entityKind]: string = 'GelDbSession'; private logger: Logger; private cache: Cache; constructor( private client: GelClient, dialect: GelDialect, private schema: RelationalSchemaConfig | undefined, private options: GelSessionOptions = {}, ) { super(dialect); this.logger = options.logger ?? new NoopLogger(); this.cache = options.cache ?? new NoopCache(); } prepareQuery( query: Query, fields: SelectedFieldsOrdered | undefined, name: string | undefined, isResponseInArrayMode: boolean, customResultMapper?: (rows: unknown[][]) => T['execute'], queryMetadata?: { type: 'select' | 'update' | 'delete' | 'insert'; tables: string[]; }, cacheConfig?: WithCacheConfig, ): GelDbPreparedQuery { return new GelDbPreparedQuery( this.client, query.sql, query.params, this.logger, this.cache, queryMetadata, cacheConfig, fields, isResponseInArrayMode, customResultMapper, ); } override async transaction( transaction: (tx: GelTransaction) => Promise, ): Promise { return await (this.client as Client).transaction(async (clientTx) => { const session = new GelDbSession(clientTx, this.dialect, this.schema, this.options); const tx = new GelDbTransaction(this.dialect, session, this.schema); return await transaction(tx); }); } override async count(sql: SQL): Promise { const res = await this.execute<[{ count: string }]>(sql); return Number(res[0]['count']); } } export class GelDbTransaction, TSchema extends TablesRelationalConfig> extends GelTransaction { static override readonly [entityKind]: string = 'GelDbTransaction'; override async transaction(transaction: (tx: GelDbTransaction) => Promise): Promise { const tx = new GelDbTransaction( this.dialect, this.session, this.schema, ); return await transaction(tx); } } // TODO fix this export interface GelQueryResultHKT { readonly $brand: 'GelQueryResultHKT'; readonly row: unknown; readonly type: unknown; } ================================================ FILE: drizzle-orm/src/gel-core/alias.ts ================================================ import { TableAliasProxyHandler } from '~/alias.ts'; import type { BuildAliasTable } from './query-builders/select.types.ts'; import type { GelTable } from './table.ts'; import type { GelViewBase } from './view-base.ts'; export function alias( table: TTable, alias: TAlias, ): BuildAliasTable { return new Proxy(table, new TableAliasProxyHandler(alias, false)) as any; } ================================================ FILE: drizzle-orm/src/gel-core/checks.ts ================================================ import { entityKind } from '~/entity.ts'; import type { SQL } from '~/sql/index.ts'; import type { GelTable } from './table.ts'; export class CheckBuilder { static readonly [entityKind]: string = 'GelCheckBuilder'; protected brand!: 'GelConstraintBuilder'; constructor(public name: string, public value: SQL) {} /** @internal */ build(table: GelTable): Check { return new Check(table, this); } } export class Check { static readonly [entityKind]: string = 'GelCheck'; readonly name: string; readonly value: SQL; constructor(public table: GelTable, builder: CheckBuilder) { this.name = builder.name; this.value = builder.value; } } export function check(name: string, value: SQL): CheckBuilder { return new CheckBuilder(name, value); } ================================================ FILE: drizzle-orm/src/gel-core/columns/all.ts ================================================ import { bigint } from './bigint.ts'; import { bigintT } from './bigintT.ts'; import { boolean } from './boolean.ts'; import { bytes } from './bytes.ts'; import { customType } from './custom.ts'; import { dateDuration } from './date-duration.ts'; import { decimal } from './decimal.ts'; import { doublePrecision } from './double-precision.ts'; import { duration } from './duration.ts'; import { integer } from './integer.ts'; import { json } from './json.ts'; import { localDate } from './localdate.ts'; import { localTime } from './localtime.ts'; import { real } from './real.ts'; import { relDuration } from './relative-duration.ts'; import { smallint } from './smallint.ts'; import { text } from './text.ts'; import { timestamp } from './timestamp.ts'; import { timestamptz } from './timestamptz.ts'; import { uuid } from './uuid.ts'; // TODO add export function getGelColumnBuilders() { return { localDate, localTime, decimal, dateDuration, bigintT, duration, relDuration, bytes, customType, bigint, boolean, doublePrecision, integer, json, real, smallint, text, timestamptz, uuid, timestamp, }; } export type GelColumnsBuilders = ReturnType; ================================================ FILE: drizzle-orm/src/gel-core/columns/bigint.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnyGelTable } from '~/gel-core/table.ts'; import { GelColumn } from './common.ts'; import { GelIntColumnBaseBuilder } from './int.common.ts'; export type GelInt53BuilderInitial = GelInt53Builder<{ name: TName; dataType: 'number'; columnType: 'GelInt53'; data: number; driverParam: number; enumValues: undefined; }>; export class GelInt53Builder> extends GelIntColumnBaseBuilder { static override readonly [entityKind]: string = 'GelInt53Builder'; constructor(name: T['name']) { super(name, 'number', 'GelInt53'); } /** @internal */ override build( table: AnyGelTable<{ name: TTableName }>, ): GelInt53> { return new GelInt53>(table, this.config as ColumnBuilderRuntimeConfig); } } export class GelInt53> extends GelColumn { static override readonly [entityKind]: string = 'GelInt53'; getSQLType(): string { return 'bigint'; } } export function bigint(): GelInt53BuilderInitial<''>; export function bigint(name: TName): GelInt53BuilderInitial; export function bigint(name?: string) { return new GelInt53Builder(name ?? ''); } ================================================ FILE: drizzle-orm/src/gel-core/columns/bigintT.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnyGelTable } from '~/gel-core/table.ts'; import { GelColumn } from './common.ts'; import { GelIntColumnBaseBuilder } from './int.common.ts'; export type GelBigInt64BuilderInitial = GelBigInt64Builder<{ name: TName; dataType: 'bigint'; columnType: 'GelBigInt64'; data: bigint; driverParam: bigint; enumValues: undefined; }>; export class GelBigInt64Builder> extends GelIntColumnBaseBuilder { static override readonly [entityKind]: string = 'GelBigInt64Builder'; constructor(name: T['name']) { super(name, 'bigint', 'GelBigInt64'); } /** @internal */ override build( table: AnyGelTable<{ name: TTableName }>, ): GelBigInt64> { return new GelBigInt64>( table, this.config as ColumnBuilderRuntimeConfig, ); } } export class GelBigInt64> extends GelColumn { static override readonly [entityKind]: string = 'GelBigInt64'; getSQLType(): string { return 'edgedbt.bigint_t'; } override mapFromDriverValue(value: string): bigint { return BigInt(value as string); // TODO ts error if remove 'as string' } } export function bigintT(): GelBigInt64BuilderInitial<''>; export function bigintT(name: TName): GelBigInt64BuilderInitial; export function bigintT(name?: string) { return new GelBigInt64Builder(name ?? ''); } ================================================ FILE: drizzle-orm/src/gel-core/columns/boolean.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnyGelTable } from '~/gel-core/table.ts'; import { GelColumn, GelColumnBuilder } from './common.ts'; export type GelBooleanBuilderInitial = GelBooleanBuilder<{ name: TName; dataType: 'boolean'; columnType: 'GelBoolean'; data: boolean; driverParam: boolean; enumValues: undefined; }>; export class GelBooleanBuilder> extends GelColumnBuilder { static override readonly [entityKind]: string = 'GelBooleanBuilder'; constructor(name: T['name']) { super(name, 'boolean', 'GelBoolean'); } /** @internal */ override build( table: AnyGelTable<{ name: TTableName }>, ): GelBoolean> { return new GelBoolean>(table, this.config as ColumnBuilderRuntimeConfig); } } export class GelBoolean> extends GelColumn { static override readonly [entityKind]: string = 'GelBoolean'; getSQLType(): string { return 'boolean'; } } export function boolean(): GelBooleanBuilderInitial<''>; export function boolean(name: TName): GelBooleanBuilderInitial; export function boolean(name?: string) { return new GelBooleanBuilder(name ?? ''); } ================================================ FILE: drizzle-orm/src/gel-core/columns/bytes.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnyGelTable } from '~/gel-core/table.ts'; import { GelColumn, GelColumnBuilder } from './common.ts'; export type GelBytesBuilderInitial = GelBytesBuilder<{ name: TName; dataType: 'buffer'; columnType: 'GelBytes'; data: Uint8Array; driverParam: Uint8Array | Buffer; enumValues: undefined; }>; export class GelBytesBuilder> extends GelColumnBuilder { static override readonly [entityKind]: string = 'GelBytesBuilder'; constructor(name: T['name']) { super(name, 'buffer', 'GelBytes'); } /** @internal */ override build( table: AnyGelTable<{ name: TTableName }>, ): GelBytes> { return new GelBytes>( table, this.config as ColumnBuilderRuntimeConfig, ); } } export class GelBytes> extends GelColumn { static override readonly [entityKind]: string = 'GelBytes'; getSQLType(): string { return 'bytea'; } } export function bytes(): GelBytesBuilderInitial<''>; export function bytes(name: TName): GelBytesBuilderInitial; export function bytes(name?: string) { return new GelBytesBuilder(name ?? ''); } ================================================ FILE: drizzle-orm/src/gel-core/columns/common.ts ================================================ import type { ColumnBuilderBase, ColumnBuilderBaseConfig, ColumnBuilderExtraConfig, ColumnBuilderRuntimeConfig, ColumnDataType, HasGenerated, MakeColumnConfig, } from '~/column-builder.ts'; import { ColumnBuilder } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { Column } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { Simplify, Update } from '~/utils.ts'; import type { ForeignKey, UpdateDeleteAction } from '~/gel-core/foreign-keys.ts'; import { ForeignKeyBuilder } from '~/gel-core/foreign-keys.ts'; import type { AnyGelTable, GelTable } from '~/gel-core/table.ts'; import type { SQL } from '~/sql/sql.ts'; import { iife } from '~/tracing-utils.ts'; import type { GelIndexOpClass } from '../indexes.ts'; import { uniqueKeyName } from '../unique-constraint.ts'; export interface ReferenceConfig { ref: () => GelColumn; actions: { onUpdate?: UpdateDeleteAction; onDelete?: UpdateDeleteAction; }; } export interface GelColumnBuilderBase< T extends ColumnBuilderBaseConfig = ColumnBuilderBaseConfig, TTypeConfig extends object = object, > extends ColumnBuilderBase {} export abstract class GelColumnBuilder< T extends ColumnBuilderBaseConfig = ColumnBuilderBaseConfig, TRuntimeConfig extends object = object, TTypeConfig extends object = object, TExtraConfig extends ColumnBuilderExtraConfig = ColumnBuilderExtraConfig, > extends ColumnBuilder implements GelColumnBuilderBase { private foreignKeyConfigs: ReferenceConfig[] = []; static override readonly [entityKind]: string = 'GelColumnBuilder'; array(size?: TSize): GelArrayBuilder< & { name: T['name']; dataType: 'array'; columnType: 'GelArray'; data: T['data'][]; driverParam: T['driverParam'][] | string; enumValues: T['enumValues']; size: TSize; baseBuilder: T; } & (T extends { notNull: true } ? { notNull: true } : {}) & (T extends { hasDefault: true } ? { hasDefault: true } : {}), T > { return new GelArrayBuilder(this.config.name, this as GelColumnBuilder, size as any); } references( ref: ReferenceConfig['ref'], actions: ReferenceConfig['actions'] = {}, ): this { this.foreignKeyConfigs.push({ ref, actions }); return this; } unique( name?: string, config?: { nulls: 'distinct' | 'not distinct' }, ): this { this.config.isUnique = true; this.config.uniqueName = name; this.config.uniqueType = config?.nulls; return this; } generatedAlwaysAs(as: SQL | T['data'] | (() => SQL)): HasGenerated { this.config.generated = { as, type: 'always', mode: 'stored', }; return this as HasGenerated; } /** @internal */ buildForeignKeys(column: GelColumn, table: GelTable): ForeignKey[] { return this.foreignKeyConfigs.map(({ ref, actions }) => { return iife( (ref, actions) => { const builder = new ForeignKeyBuilder(() => { const foreignColumn = ref(); return { columns: [column], foreignColumns: [foreignColumn] }; }); if (actions.onUpdate) { builder.onUpdate(actions.onUpdate); } if (actions.onDelete) { builder.onDelete(actions.onDelete); } return builder.build(table); }, ref, actions, ); }); } /** @internal */ abstract build( table: AnyGelTable<{ name: TTableName }>, ): GelColumn>; /** @internal */ buildExtraConfigColumn( table: AnyGelTable<{ name: TTableName }>, ): GelExtraConfigColumn { return new GelExtraConfigColumn(table, this.config); } } // To understand how to use `GelColumn` and `GelColumn`, see `Column` and `AnyColumn` documentation. export abstract class GelColumn< T extends ColumnBaseConfig = ColumnBaseConfig, TRuntimeConfig extends object = {}, TTypeConfig extends object = {}, > extends Column { static override readonly [entityKind]: string = 'GelColumn'; constructor( override readonly table: GelTable, config: ColumnBuilderRuntimeConfig, ) { if (!config.uniqueName) { config.uniqueName = uniqueKeyName(table, [config.name]); } super(table, config); } } export type IndexedExtraConfigType = { order?: 'asc' | 'desc'; nulls?: 'first' | 'last'; opClass?: string }; export class GelExtraConfigColumn< T extends ColumnBaseConfig = ColumnBaseConfig, > extends GelColumn { static override readonly [entityKind]: string = 'GelExtraConfigColumn'; override getSQLType(): string { return this.getSQLType(); } indexConfig: IndexedExtraConfigType = { order: this.config.order ?? 'asc', nulls: this.config.nulls ?? 'last', opClass: this.config.opClass, }; defaultConfig: IndexedExtraConfigType = { order: 'asc', nulls: 'last', opClass: undefined, }; asc(): Omit { this.indexConfig.order = 'asc'; return this; } desc(): Omit { this.indexConfig.order = 'desc'; return this; } nullsFirst(): Omit { this.indexConfig.nulls = 'first'; return this; } nullsLast(): Omit { this.indexConfig.nulls = 'last'; return this; } /** * ### PostgreSQL documentation quote * * > An operator class with optional parameters can be specified for each column of an index. * The operator class identifies the operators to be used by the index for that column. * For example, a B-tree index on four-byte integers would use the int4_ops class; * this operator class includes comparison functions for four-byte integers. * In practice the default operator class for the column's data type is usually sufficient. * The main point of having operator classes is that for some data types, there could be more than one meaningful ordering. * For example, we might want to sort a complex-number data type either by absolute value or by real part. * We could do this by defining two operator classes for the data type and then selecting the proper class when creating an index. * More information about operator classes check: * * ### Useful links * https://www.postgresql.org/docs/current/sql-createindex.html * * https://www.postgresql.org/docs/current/indexes-opclass.html * * https://www.postgresql.org/docs/current/xindex.html * * ### Additional types * If you have the `Gel_vector` extension installed in your database, you can use the * `vector_l2_ops`, `vector_ip_ops`, `vector_cosine_ops`, `vector_l1_ops`, `bit_hamming_ops`, `bit_jaccard_ops`, `halfvec_l2_ops`, `sparsevec_l2_ops` options, which are predefined types. * * **You can always specify any string you want in the operator class, in case Drizzle doesn't have it natively in its types** * * @param opClass * @returns */ op(opClass: GelIndexOpClass): Omit { this.indexConfig.opClass = opClass; return this; } } export class IndexedColumn { static readonly [entityKind]: string = 'IndexedColumn'; constructor( name: string | undefined, keyAsName: boolean, type: string, indexConfig: IndexedExtraConfigType, ) { this.name = name; this.keyAsName = keyAsName; this.type = type; this.indexConfig = indexConfig; } name: string | undefined; keyAsName: boolean; type: string; indexConfig: IndexedExtraConfigType; } export type AnyGelColumn> = {}> = GelColumn< Required, TPartial>> >; export type GelArrayColumnBuilderBaseConfig = ColumnBuilderBaseConfig<'array', 'GelArray'> & { size: number | undefined; baseBuilder: ColumnBuilderBaseConfig; }; export class GelArrayBuilder< T extends GelArrayColumnBuilderBaseConfig, TBase extends ColumnBuilderBaseConfig | GelArrayColumnBuilderBaseConfig, > extends GelColumnBuilder< T, { baseBuilder: TBase extends GelArrayColumnBuilderBaseConfig ? GelArrayBuilder< TBase, TBase extends { baseBuilder: infer TBaseBuilder extends ColumnBuilderBaseConfig } ? TBaseBuilder : never > : GelColumnBuilder>>>; size: T['size']; }, { baseBuilder: TBase extends GelArrayColumnBuilderBaseConfig ? GelArrayBuilder< TBase, TBase extends { baseBuilder: infer TBaseBuilder extends ColumnBuilderBaseConfig } ? TBaseBuilder : never > : GelColumnBuilder>>>; size: T['size']; } > { static override readonly [entityKind] = 'GelArrayBuilder'; constructor( name: string, baseBuilder: GelArrayBuilder['config']['baseBuilder'], size: T['size'], ) { super(name, 'array', 'GelArray'); this.config.baseBuilder = baseBuilder; this.config.size = size; } /** @internal */ override build( table: AnyGelTable<{ name: TTableName }>, ): GelArray & { size: T['size']; baseBuilder: T['baseBuilder'] }, TBase> { const baseColumn = this.config.baseBuilder.build(table); return new GelArray & { size: T['size']; baseBuilder: T['baseBuilder'] }, TBase>( table as AnyGelTable<{ name: MakeColumnConfig['tableName'] }>, this.config as ColumnBuilderRuntimeConfig, baseColumn, ); } } export class GelArray< T extends ColumnBaseConfig<'array', 'GelArray'> & { size: number | undefined; baseBuilder: ColumnBuilderBaseConfig; }, TBase extends ColumnBuilderBaseConfig, > extends GelColumn { readonly size: T['size']; static override readonly [entityKind]: string = 'GelArray'; constructor( table: AnyGelTable<{ name: T['tableName'] }>, config: GelArrayBuilder['config'], readonly baseColumn: GelColumn, readonly range?: [number | undefined, number | undefined], ) { super(table, config); this.size = config.size; } getSQLType(): string { return `${this.baseColumn.getSQLType()}[${typeof this.size === 'number' ? this.size : ''}]`; } } ================================================ FILE: drizzle-orm/src/gel-core/columns/custom.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnyGelTable } from '~/gel-core/table.ts'; import type { SQL } from '~/sql/sql.ts'; import { type Equal, getColumnNameAndConfig } from '~/utils.ts'; import { GelColumn, GelColumnBuilder } from './common.ts'; export type ConvertCustomConfig> = & { name: TName; dataType: 'custom'; columnType: 'GelCustomColumn'; data: T['data']; driverParam: T['driverData']; enumValues: undefined; } & (T['notNull'] extends true ? { notNull: true } : {}) & (T['default'] extends true ? { hasDefault: true } : {}); export interface GelCustomColumnInnerConfig { customTypeValues: CustomTypeValues; } export class GelCustomColumnBuilder> extends GelColumnBuilder< T, { fieldConfig: CustomTypeValues['config']; customTypeParams: CustomTypeParams; }, { gelColumnBuilderBrand: 'GelCustomColumnBuilderBrand'; } > { static override readonly [entityKind]: string = 'GelCustomColumnBuilder'; constructor( name: T['name'], fieldConfig: CustomTypeValues['config'], customTypeParams: CustomTypeParams, ) { super(name, 'custom', 'GelCustomColumn'); this.config.fieldConfig = fieldConfig; this.config.customTypeParams = customTypeParams; } /** @internal */ build( table: AnyGelTable<{ name: TTableName }>, ): GelCustomColumn> { return new GelCustomColumn>( table, this.config as ColumnBuilderRuntimeConfig, ); } } export class GelCustomColumn> extends GelColumn { static override readonly [entityKind]: string = 'GelCustomColumn'; private sqlName: string; private mapTo?: (value: T['data']) => T['driverParam']; private mapFrom?: (value: T['driverParam']) => T['data']; constructor( table: AnyGelTable<{ name: T['tableName'] }>, config: GelCustomColumnBuilder['config'], ) { super(table, config); this.sqlName = config.customTypeParams.dataType(config.fieldConfig); this.mapTo = config.customTypeParams.toDriver; this.mapFrom = config.customTypeParams.fromDriver; } getSQLType(): string { return this.sqlName; } override mapFromDriverValue(value: T['driverParam']): T['data'] { return typeof this.mapFrom === 'function' ? this.mapFrom(value) : value as T['data']; } override mapToDriverValue(value: T['data']): T['driverParam'] { return typeof this.mapTo === 'function' ? this.mapTo(value) : value as T['data']; } } export type CustomTypeValues = { /** * Required type for custom column, that will infer proper type model * * Examples: * * If you want your column to be `string` type after selecting/or on inserting - use `data: string`. Like `text`, `varchar` * * If you want your column to be `number` type after selecting/or on inserting - use `data: number`. Like `integer` */ data: unknown; /** * Type helper, that represents what type database driver is accepting for specific database data type */ driverData?: unknown; /** * What config type should be used for {@link CustomTypeParams} `dataType` generation */ config?: Record; /** * Whether the config argument should be required or not * @default false */ configRequired?: boolean; /** * If your custom data type should be notNull by default you can use `notNull: true` * * @example * const customSerial = customType<{ data: number, notNull: true, default: true }>({ * dataType() { * return 'serial'; * }, * }); */ notNull?: boolean; /** * If your custom data type has default you can use `default: true` * * @example * const customSerial = customType<{ data: number, notNull: true, default: true }>({ * dataType() { * return 'serial'; * }, * }); */ default?: boolean; }; export interface CustomTypeParams { /** * Database data type string representation, that is used for migrations * @example * ``` * `jsonb`, `text` * ``` * * If database data type needs additional params you can use them from `config` param * @example * ``` * `varchar(256)`, `numeric(2,3)` * ``` * * To make `config` be of specific type please use config generic in {@link CustomTypeValues} * * @example * Usage example * ``` * dataType() { * return 'boolean'; * }, * ``` * Or * ``` * dataType(config) { * return typeof config.length !== 'undefined' ? `varchar(${config.length})` : `varchar`; * } * ``` */ dataType: (config: T['config'] | (Equal extends true ? never : undefined)) => string; /** * Optional mapping function, between user input and driver * @example * For example, when using jsonb we need to map JS/TS object to string before writing to database * ``` * toDriver(value: TData): string { * return JSON.stringify(value); * } * ``` */ toDriver?: (value: T['data']) => T['driverData'] | SQL; /** * Optional mapping function, that is responsible for data mapping from database to JS/TS code * @example * For example, when using timestamp we need to map string Date representation to JS Date * ``` * fromDriver(value: string): Date { * return new Date(value); * }, * ``` */ fromDriver?: (value: T['driverData']) => T['data']; } /** * Custom gel database data type generator */ export function customType( customTypeParams: CustomTypeParams, ): Equal extends true ? { & T['config']>( fieldConfig: TConfig, ): GelCustomColumnBuilder>; ( dbName: TName, fieldConfig: T['config'], ): GelCustomColumnBuilder>; } : { (): GelCustomColumnBuilder>; & T['config']>( fieldConfig?: TConfig, ): GelCustomColumnBuilder>; ( dbName: TName, fieldConfig?: T['config'], ): GelCustomColumnBuilder>; } { return ( a?: TName | T['config'], b?: T['config'], ): GelCustomColumnBuilder> => { const { name, config } = getColumnNameAndConfig(a, b); return new GelCustomColumnBuilder(name as ConvertCustomConfig['name'], config, customTypeParams); }; } ================================================ FILE: drizzle-orm/src/gel-core/columns/date-duration.ts ================================================ import type { DateDuration } from 'gel'; import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnyGelTable } from '~/gel-core/table.ts'; import { GelColumn, GelColumnBuilder } from './common.ts'; export type GelDateDurationBuilderInitial = GelDateDurationBuilder<{ name: TName; dataType: 'dateDuration'; columnType: 'GelDateDuration'; data: DateDuration; driverParam: DateDuration; enumValues: undefined; }>; export class GelDateDurationBuilder> extends GelColumnBuilder { static override readonly [entityKind]: string = 'GelDateDurationBuilder'; constructor( name: T['name'], ) { super(name, 'dateDuration', 'GelDateDuration'); } /** @internal */ override build( table: AnyGelTable<{ name: TTableName }>, ): GelDateDuration> { return new GelDateDuration>( table, this.config as ColumnBuilderRuntimeConfig, ); } } export class GelDateDuration> extends GelColumn { static override readonly [entityKind]: string = 'GelDateDuration'; getSQLType(): string { return `dateDuration`; } } export function dateDuration(): GelDateDurationBuilderInitial<''>; export function dateDuration(name: TName): GelDateDurationBuilderInitial; export function dateDuration(name?: string) { return new GelDateDurationBuilder(name ?? ''); } ================================================ FILE: drizzle-orm/src/gel-core/columns/date.common.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnDataType } from '~/column-builder.ts'; import { entityKind } from '~/entity.ts'; import { sql } from '~/sql/sql.ts'; import { GelColumnBuilder } from './common.ts'; export abstract class GelLocalDateColumnBaseBuilder< T extends ColumnBuilderBaseConfig, TRuntimeConfig extends object = object, > extends GelColumnBuilder { static override readonly [entityKind]: string = 'GelLocalDateColumnBaseBuilder'; defaultNow() { return this.default(sql`now()`); } } ================================================ FILE: drizzle-orm/src/gel-core/columns/decimal.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnyGelTable } from '~/gel-core/table.ts'; import { GelColumn, GelColumnBuilder } from './common.ts'; export type GelDecimalBuilderInitial = GelDecimalBuilder<{ name: TName; dataType: 'string'; columnType: 'GelDecimal'; data: string; driverParam: string; enumValues: undefined; }>; export class GelDecimalBuilder> extends GelColumnBuilder< T > { static override readonly [entityKind]: string = 'GelDecimalBuilder'; constructor(name: T['name']) { super(name, 'string', 'GelDecimal'); } /** @internal */ override build( table: AnyGelTable<{ name: TTableName }>, ): GelDecimal> { return new GelDecimal>(table, this.config as ColumnBuilderRuntimeConfig); } } export class GelDecimal> extends GelColumn { static override readonly [entityKind]: string = 'GelDecimal'; constructor(table: AnyGelTable<{ name: T['tableName'] }>, config: GelDecimalBuilder['config']) { super(table, config); } getSQLType(): string { return 'numeric'; } } export function decimal(): GelDecimalBuilderInitial<''>; export function decimal(name: TName): GelDecimalBuilderInitial; export function decimal(name?: string) { return new GelDecimalBuilder(name ?? ''); } ================================================ FILE: drizzle-orm/src/gel-core/columns/double-precision.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnyGelTable } from '~/gel-core/table.ts'; import { GelColumn, GelColumnBuilder } from './common.ts'; export type GelDoublePrecisionBuilderInitial = GelDoublePrecisionBuilder<{ name: TName; dataType: 'number'; columnType: 'GelDoublePrecision'; data: number; driverParam: number; enumValues: undefined; }>; export class GelDoublePrecisionBuilder> extends GelColumnBuilder { static override readonly [entityKind]: string = 'GelDoublePrecisionBuilder'; constructor(name: T['name']) { super(name, 'number', 'GelDoublePrecision'); } /** @internal */ override build( table: AnyGelTable<{ name: TTableName }>, ): GelDoublePrecision> { return new GelDoublePrecision>( table, this.config as ColumnBuilderRuntimeConfig, ); } } export class GelDoublePrecision> extends GelColumn { static override readonly [entityKind]: string = 'GelDoublePrecision'; getSQLType(): string { return 'double precision'; } override mapFromDriverValue(value: string | number): number { if (typeof value === 'string') { return Number.parseFloat(value); } return value; } } export function doublePrecision(): GelDoublePrecisionBuilderInitial<''>; export function doublePrecision(name: TName): GelDoublePrecisionBuilderInitial; export function doublePrecision(name?: string) { return new GelDoublePrecisionBuilder(name ?? ''); } ================================================ FILE: drizzle-orm/src/gel-core/columns/duration.ts ================================================ import type { Duration } from 'gel'; import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnyGelTable } from '~/gel-core/table.ts'; import { GelColumn, GelColumnBuilder } from './common.ts'; export type GelDurationBuilderInitial = GelDurationBuilder<{ name: TName; dataType: 'duration'; columnType: 'GelDuration'; data: Duration; driverParam: Duration; enumValues: undefined; }>; export class GelDurationBuilder> extends GelColumnBuilder { static override readonly [entityKind]: string = 'GelDurationBuilder'; constructor( name: T['name'], ) { super(name, 'duration', 'GelDuration'); } /** @internal */ override build( table: AnyGelTable<{ name: TTableName }>, ): GelDuration> { return new GelDuration>(table, this.config as ColumnBuilderRuntimeConfig); } } export class GelDuration> extends GelColumn { static override readonly [entityKind]: string = 'GelDuration'; getSQLType(): string { return `duration`; } } export function duration(): GelDurationBuilderInitial<''>; export function duration(name: TName): GelDurationBuilderInitial; export function duration(name?: string) { return new GelDurationBuilder(name ?? ''); } ================================================ FILE: drizzle-orm/src/gel-core/columns/index.ts ================================================ export * from './bigint.ts'; export * from './bigintT.ts'; export * from './boolean.ts'; export * from './bytes.ts'; export * from './common.ts'; export * from './custom.ts'; export * from './date-duration.ts'; export * from './decimal.ts'; export * from './double-precision.ts'; export * from './duration.ts'; export * from './int.common.ts'; export * from './integer.ts'; export * from './json.ts'; export * from './localdate.ts'; export * from './localtime.ts'; export * from './real.ts'; export * from './relative-duration.ts'; export * from './smallint.ts'; export * from './text.ts'; export * from './timestamp.ts'; export * from './timestamptz.ts'; export * from './uuid.ts'; ================================================ FILE: drizzle-orm/src/gel-core/columns/int.common.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnDataType, GeneratedIdentityConfig, IsIdentity } from '~/column-builder.ts'; import { entityKind } from '~/entity.ts'; import type { GelSequenceOptions } from '../sequence.ts'; import { GelColumnBuilder } from './common.ts'; export abstract class GelIntColumnBaseBuilder< T extends ColumnBuilderBaseConfig, > extends GelColumnBuilder< T, { generatedIdentity: GeneratedIdentityConfig } > { static override readonly [entityKind]: string = 'GelIntColumnBaseBuilder'; generatedAlwaysAsIdentity( sequence?: GelSequenceOptions & { name?: string }, ): IsIdentity { if (sequence) { const { name, ...options } = sequence; this.config.generatedIdentity = { type: 'always', sequenceName: name, sequenceOptions: options, }; } else { this.config.generatedIdentity = { type: 'always', }; } this.config.hasDefault = true; this.config.notNull = true; return this as IsIdentity; } generatedByDefaultAsIdentity( sequence?: GelSequenceOptions & { name?: string }, ): IsIdentity { if (sequence) { const { name, ...options } = sequence; this.config.generatedIdentity = { type: 'byDefault', sequenceName: name, sequenceOptions: options, }; } else { this.config.generatedIdentity = { type: 'byDefault', }; } this.config.hasDefault = true; this.config.notNull = true; return this as IsIdentity; } } ================================================ FILE: drizzle-orm/src/gel-core/columns/integer.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnyGelTable } from '../table.ts'; import { GelColumn } from './common.ts'; import { GelIntColumnBaseBuilder } from './int.common.ts'; export type GelIntegerBuilderInitial = GelIntegerBuilder<{ name: TName; dataType: 'number'; columnType: 'GelInteger'; data: number; driverParam: number; enumValues: undefined; }>; export class GelIntegerBuilder> extends GelIntColumnBaseBuilder { static override readonly [entityKind]: string = 'GelIntegerBuilder'; constructor(name: T['name']) { super(name, 'number', 'GelInteger'); } /** @internal */ override build( table: AnyGelTable<{ name: TTableName }>, ): GelInteger> { return new GelInteger>(table, this.config as ColumnBuilderRuntimeConfig); } } export class GelInteger> extends GelColumn { static override readonly [entityKind]: string = 'GelInteger'; getSQLType(): string { return 'integer'; } } export function integer(): GelIntegerBuilderInitial<''>; export function integer(name: TName): GelIntegerBuilderInitial; export function integer(name?: string) { return new GelIntegerBuilder(name ?? ''); } ================================================ FILE: drizzle-orm/src/gel-core/columns/json.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnyGelTable } from '~/gel-core/table.ts'; import { GelColumn, GelColumnBuilder } from './common.ts'; export type GelJsonBuilderInitial = GelJsonBuilder<{ name: TName; dataType: 'json'; columnType: 'GelJson'; data: unknown; driverParam: unknown; enumValues: undefined; }>; export class GelJsonBuilder> extends GelColumnBuilder< T > { static override readonly [entityKind]: string = 'GelJsonBuilder'; constructor(name: T['name']) { super(name, 'json', 'GelJson'); } /** @internal */ override build( table: AnyGelTable<{ name: TTableName }>, ): GelJson> { return new GelJson>(table, this.config as ColumnBuilderRuntimeConfig); } } export class GelJson> extends GelColumn { static override readonly [entityKind]: string = 'GelJson'; constructor(table: AnyGelTable<{ name: T['tableName'] }>, config: GelJsonBuilder['config']) { super(table, config); } getSQLType(): string { return 'json'; } } export function json(): GelJsonBuilderInitial<''>; export function json(name: TName): GelJsonBuilderInitial; export function json(name?: string) { return new GelJsonBuilder(name ?? ''); } ================================================ FILE: drizzle-orm/src/gel-core/columns/localdate.ts ================================================ import type { LocalDate } from 'gel'; import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnyGelTable } from '~/gel-core/table.ts'; import { GelColumn } from './common.ts'; import { GelLocalDateColumnBaseBuilder } from './date.common.ts'; export type GelLocalDateStringBuilderInitial = GelLocalDateStringBuilder<{ name: TName; dataType: 'localDate'; columnType: 'GelLocalDateString'; data: LocalDate; driverParam: LocalDate; enumValues: undefined; }>; export class GelLocalDateStringBuilder> extends GelLocalDateColumnBaseBuilder { static override readonly [entityKind]: string = 'GelLocalDateStringBuilder'; constructor(name: T['name']) { super(name, 'localDate', 'GelLocalDateString'); } /** @internal */ override build( table: AnyGelTable<{ name: TTableName }>, ): GelLocalDateString> { return new GelLocalDateString>( table, this.config as ColumnBuilderRuntimeConfig, ); } } export class GelLocalDateString> extends GelColumn { static override readonly [entityKind]: string = 'GelLocalDateString'; getSQLType(): string { return 'cal::local_date'; } } export function localDate(): GelLocalDateStringBuilderInitial<''>; export function localDate(name: TName): GelLocalDateStringBuilderInitial; export function localDate(name?: string) { return new GelLocalDateStringBuilder(name ?? ''); } ================================================ FILE: drizzle-orm/src/gel-core/columns/localtime.ts ================================================ import type { LocalTime } from 'gel'; import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnyGelTable } from '~/gel-core/table.ts'; import { GelColumn } from './common.ts'; import { GelLocalDateColumnBaseBuilder } from './date.common.ts'; export type GelLocalTimeBuilderInitial = GelLocalTimeBuilder<{ name: TName; dataType: 'localTime'; columnType: 'GelLocalTime'; data: LocalTime; driverParam: LocalTime; enumValues: undefined; }>; export class GelLocalTimeBuilder> extends GelLocalDateColumnBaseBuilder { static override readonly [entityKind]: string = 'GelLocalTimeBuilder'; constructor(name: T['name']) { super(name, 'localTime', 'GelLocalTime'); } /** @internal */ override build( table: AnyGelTable<{ name: TTableName }>, ): GelLocalTime> { return new GelLocalTime>( table, this.config as ColumnBuilderRuntimeConfig, ); } } export class GelLocalTime> extends GelColumn { static override readonly [entityKind]: string = 'GelLocalTime'; getSQLType(): string { return 'cal::local_time'; } } export function localTime(): GelLocalTimeBuilderInitial<''>; export function localTime(name: TName): GelLocalTimeBuilderInitial; export function localTime(name?: string) { return new GelLocalTimeBuilder(name ?? ''); } ================================================ FILE: drizzle-orm/src/gel-core/columns/real.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnyGelTable } from '~/gel-core/table.ts'; import { GelColumn, GelColumnBuilder } from './common.ts'; export type GelRealBuilderInitial = GelRealBuilder<{ name: TName; dataType: 'number'; columnType: 'GelReal'; data: number; driverParam: number; enumValues: undefined; }>; export class GelRealBuilder> extends GelColumnBuilder< T, { length: number | undefined } > { static override readonly [entityKind]: string = 'GelRealBuilder'; constructor(name: T['name'], length?: number) { super(name, 'number', 'GelReal'); this.config.length = length; } /** @internal */ override build( table: AnyGelTable<{ name: TTableName }>, ): GelReal> { return new GelReal>(table, this.config as ColumnBuilderRuntimeConfig); } } export class GelReal> extends GelColumn { static override readonly [entityKind]: string = 'GelReal'; constructor(table: AnyGelTable<{ name: T['tableName'] }>, config: GelRealBuilder['config']) { super(table, config); } getSQLType(): string { return 'real'; } } export function real(): GelRealBuilderInitial<''>; export function real(name: TName): GelRealBuilderInitial; export function real(name?: string) { return new GelRealBuilder(name ?? ''); } ================================================ FILE: drizzle-orm/src/gel-core/columns/relative-duration.ts ================================================ import type { RelativeDuration } from 'gel'; import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnyGelTable } from '~/gel-core/table.ts'; import { GelColumn, GelColumnBuilder } from './common.ts'; export type GelRelDurationBuilderInitial = GelRelDurationBuilder<{ name: TName; dataType: 'relDuration'; columnType: 'GelRelDuration'; data: RelativeDuration; driverParam: RelativeDuration; enumValues: undefined; }>; export class GelRelDurationBuilder> extends GelColumnBuilder { static override readonly [entityKind]: string = 'GelRelDurationBuilder'; constructor( name: T['name'], ) { super(name, 'relDuration', 'GelRelDuration'); } /** @internal */ override build( table: AnyGelTable<{ name: TTableName }>, ): GelRelDuration> { return new GelRelDuration>( table, this.config as ColumnBuilderRuntimeConfig, ); } } export class GelRelDuration> extends GelColumn { static override readonly [entityKind]: string = 'GelRelDuration'; getSQLType(): string { return `edgedbt.relative_duration_t`; } } export function relDuration(): GelRelDurationBuilderInitial<''>; export function relDuration(name: TName): GelRelDurationBuilderInitial; export function relDuration(name?: string) { return new GelRelDurationBuilder(name ?? ''); } ================================================ FILE: drizzle-orm/src/gel-core/columns/smallint.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnyGelTable } from '~/gel-core/table.ts'; import { GelColumn } from './common.ts'; import { GelIntColumnBaseBuilder } from './int.common.ts'; export type GelSmallIntBuilderInitial = GelSmallIntBuilder<{ name: TName; dataType: 'number'; columnType: 'GelSmallInt'; data: number; driverParam: number; enumValues: undefined; }>; export class GelSmallIntBuilder> extends GelIntColumnBaseBuilder { static override readonly [entityKind]: string = 'GelSmallIntBuilder'; constructor(name: T['name']) { super(name, 'number', 'GelSmallInt'); } /** @internal */ override build( table: AnyGelTable<{ name: TTableName }>, ): GelSmallInt> { return new GelSmallInt>(table, this.config as ColumnBuilderRuntimeConfig); } } export class GelSmallInt> extends GelColumn { static override readonly [entityKind]: string = 'GelSmallInt'; getSQLType(): string { return 'smallint'; } } export function smallint(): GelSmallIntBuilderInitial<''>; export function smallint(name: TName): GelSmallIntBuilderInitial; export function smallint(name?: string) { return new GelSmallIntBuilder(name ?? ''); } ================================================ FILE: drizzle-orm/src/gel-core/columns/text.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnyGelTable } from '~/gel-core/table.ts'; import { GelColumn, GelColumnBuilder } from './common.ts'; type GelTextBuilderInitial = GelTextBuilder<{ name: TName; dataType: 'string'; columnType: 'GelText'; data: string; driverParam: string; enumValues: undefined; }>; export class GelTextBuilder< T extends ColumnBuilderBaseConfig<'string', 'GelText'>, > extends GelColumnBuilder { static override readonly [entityKind]: string = 'GelTextBuilder'; constructor( name: T['name'], ) { super(name, 'string', 'GelText'); } /** @internal */ override build( table: AnyGelTable<{ name: TTableName }>, ): GelText> { return new GelText>(table, this.config as ColumnBuilderRuntimeConfig); } } export class GelText> extends GelColumn { static override readonly [entityKind]: string = 'GelText'; override readonly enumValues = this.config.enumValues; getSQLType(): string { return 'text'; } } export function text(): GelTextBuilderInitial<''>; export function text(name: TName): GelTextBuilderInitial; export function text(name?: string): any { return new GelTextBuilder(name ?? ''); } ================================================ FILE: drizzle-orm/src/gel-core/columns/timestamp.ts ================================================ import type { LocalDateTime } from 'gel'; import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnyGelTable } from '~/gel-core/table.ts'; import { GelColumn } from './common.ts'; import { GelLocalDateColumnBaseBuilder } from './date.common.ts'; export type GelTimestampBuilderInitial = GelTimestampBuilder<{ name: TName; dataType: 'localDateTime'; columnType: 'GelTimestamp'; data: LocalDateTime; driverParam: LocalDateTime; enumValues: undefined; }>; export class GelTimestampBuilder> extends GelLocalDateColumnBaseBuilder< T > { static override readonly [entityKind]: string = 'GelTimestampBuilder'; constructor( name: T['name'], ) { super(name, 'localDateTime', 'GelTimestamp'); } /** @internal */ override build( table: AnyGelTable<{ name: TTableName }>, ): GelTimestamp> { return new GelTimestamp>( table, this.config as ColumnBuilderRuntimeConfig, ); } } export class GelTimestamp> extends GelColumn { static override readonly [entityKind]: string = 'GelTimestamp'; constructor(table: AnyGelTable<{ name: T['tableName'] }>, config: GelTimestampBuilder['config']) { super(table, config); } getSQLType(): string { return 'cal::local_datetime'; } } export function timestamp(): GelTimestampBuilderInitial<''>; export function timestamp( name: TName, ): GelTimestampBuilderInitial; export function timestamp(name?: string) { return new GelTimestampBuilder(name ?? ''); } ================================================ FILE: drizzle-orm/src/gel-core/columns/timestamptz.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnyGelTable } from '~/gel-core/table.ts'; import { GelColumn } from './common.ts'; import { GelLocalDateColumnBaseBuilder } from './date.common.ts'; export type GelTimestampTzBuilderInitial = GelTimestampTzBuilder<{ name: TName; dataType: 'date'; columnType: 'GelTimestampTz'; data: Date; driverParam: Date; enumValues: undefined; }>; export class GelTimestampTzBuilder> extends GelLocalDateColumnBaseBuilder< T > { static override readonly [entityKind]: string = 'GelTimestampTzBuilder'; constructor( name: T['name'], ) { super(name, 'date', 'GelTimestampTz'); } /** @internal */ override build( table: AnyGelTable<{ name: TTableName }>, ): GelTimestampTz> { return new GelTimestampTz>( table, this.config as ColumnBuilderRuntimeConfig, ); } } export class GelTimestampTz> extends GelColumn { static override readonly [entityKind]: string = 'GelTimestampTz'; constructor(table: AnyGelTable<{ name: T['tableName'] }>, config: GelTimestampTzBuilder['config']) { super(table, config); } getSQLType(): string { return 'datetime'; } } export function timestamptz(): GelTimestampTzBuilderInitial<''>; export function timestamptz( name: TName, ): GelTimestampTzBuilderInitial; export function timestamptz(name?: string) { return new GelTimestampTzBuilder(name ?? ''); } ================================================ FILE: drizzle-orm/src/gel-core/columns/uuid.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnyGelTable } from '~/gel-core/table.ts'; import { GelColumn, GelColumnBuilder } from './common.ts'; export type GelUUIDBuilderInitial = GelUUIDBuilder<{ name: TName; dataType: 'string'; columnType: 'GelUUID'; data: string; driverParam: string; enumValues: undefined; }>; export class GelUUIDBuilder> extends GelColumnBuilder { static override readonly [entityKind]: string = 'GelUUIDBuilder'; constructor(name: T['name']) { super(name, 'string', 'GelUUID'); } /** @internal */ override build( table: AnyGelTable<{ name: TTableName }>, ): GelUUID> { return new GelUUID>(table, this.config as ColumnBuilderRuntimeConfig); } } export class GelUUID> extends GelColumn { static override readonly [entityKind]: string = 'GelUUID'; getSQLType(): string { return 'uuid'; } } export function uuid(): GelUUIDBuilderInitial<''>; export function uuid(name: TName): GelUUIDBuilderInitial; export function uuid(name?: string) { return new GelUUIDBuilder(name ?? ''); } ================================================ FILE: drizzle-orm/src/gel-core/db.ts ================================================ import type { Cache } from '~/cache/core/cache.ts'; import { entityKind } from '~/entity.ts'; import type { GelDialect } from '~/gel-core/dialect.ts'; import { GelDeleteBase, GelInsertBuilder, GelSelectBuilder, GelUpdateBuilder, QueryBuilder, } from '~/gel-core/query-builders/index.ts'; import type { GelQueryResultHKT, GelSession, GelTransaction, PreparedQueryConfig } from '~/gel-core/session.ts'; import type { GelTable } from '~/gel-core/table.ts'; import type { TypedQueryBuilder } from '~/query-builders/query-builder.ts'; import type { ExtractTablesWithRelations, RelationalSchemaConfig, TablesRelationalConfig } from '~/relations.ts'; import { SelectionProxyHandler } from '~/selection-proxy.ts'; import { type ColumnsSelection, type SQL, sql, type SQLWrapper } from '~/sql/sql.ts'; import { WithSubquery } from '~/subquery.ts'; import type { DrizzleTypeError } from '~/utils.ts'; import type { GelColumn } from './columns/index.ts'; import { GelCountBuilder } from './query-builders/count.ts'; import { RelationalQueryBuilder } from './query-builders/query.ts'; import { GelRaw } from './query-builders/raw.ts'; import type { SelectedFields } from './query-builders/select.types.ts'; import type { WithSubqueryWithSelection } from './subquery.ts'; import type { GelViewBase } from './view-base.ts'; export class GelDatabase< TQueryResult extends GelQueryResultHKT, TFullSchema extends Record = Record, TSchema extends TablesRelationalConfig = ExtractTablesWithRelations, > { static readonly [entityKind]: string = 'GelDatabase'; declare readonly _: { readonly schema: TSchema | undefined; readonly fullSchema: TFullSchema; readonly tableNamesMap: Record; readonly session: GelSession; }; query: TFullSchema extends Record ? DrizzleTypeError<'Seems like the schema generic is missing - did you forget to add it to your DB type?'> : { [K in keyof TSchema]: RelationalQueryBuilder; }; constructor( /** @internal */ readonly dialect: GelDialect, /** @internal */ readonly session: GelSession, schema: RelationalSchemaConfig | undefined, ) { this._ = schema ? { schema: schema.schema, fullSchema: schema.fullSchema as TFullSchema, tableNamesMap: schema.tableNamesMap, session, } : { schema: undefined, fullSchema: {} as TFullSchema, tableNamesMap: {}, session, }; this.query = {} as typeof this['query']; if (this._.schema) { for (const [tableName, columns] of Object.entries(this._.schema)) { (this.query as GelDatabase>['query'])[tableName] = new RelationalQueryBuilder( schema!.fullSchema, this._.schema, this._.tableNamesMap, schema!.fullSchema[tableName] as GelTable, columns, dialect, session, ); } } this.$cache = { invalidate: async (_params: any) => {} }; } /** * Creates a subquery that defines a temporary named result set as a CTE. * * It is useful for breaking down complex queries into simpler parts and for reusing the result set in subsequent parts of the query. * * See docs: {@link https://orm.drizzle.team/docs/select#with-clause} * * @param alias The alias for the subquery. * * Failure to provide an alias will result in a DrizzleTypeError, preventing the subquery from being referenced in other queries. * * @example * * ```ts * // Create a subquery with alias 'sq' and use it in the select query * const sq = db.$with('sq').as(db.select().from(users).where(eq(users.id, 42))); * * const result = await db.with(sq).select().from(sq); * ``` * * To select arbitrary SQL values as fields in a CTE and reference them in other CTEs or in the main query, you need to add aliases to them: * * ```ts * // Select an arbitrary SQL value as a field in a CTE and reference it in the main query * const sq = db.$with('sq').as(db.select({ * name: sql`upper(${users.name})`.as('name'), * }) * .from(users)); * * const result = await db.with(sq).select({ name: sq.name }).from(sq); * ``` */ $with(alias: TAlias) { const self = this; return { as( qb: TypedQueryBuilder | ((qb: QueryBuilder) => TypedQueryBuilder), ): WithSubqueryWithSelection { if (typeof qb === 'function') { qb = qb(new QueryBuilder(self.dialect)); } return new Proxy( new WithSubquery(qb.getSQL(), qb.getSelectedFields() as SelectedFields, alias, true), new SelectionProxyHandler({ alias, sqlAliasedBehavior: 'alias', sqlBehavior: 'error' }), ) as WithSubqueryWithSelection; }, }; } $count( source: GelTable | GelViewBase | SQL | SQLWrapper, filters?: SQL, ) { return new GelCountBuilder({ source, filters, session: this.session }); } /** * Incorporates a previously defined CTE (using `$with`) into the main query. * * This method allows the main query to reference a temporary named result set. * * See docs: {@link https://orm.drizzle.team/docs/select#with-clause} * * @param queries The CTEs to incorporate into the main query. * * @example * * ```ts * // Define a subquery 'sq' as a CTE using $with * const sq = db.$with('sq').as(db.select().from(users).where(eq(users.id, 42))); * * // Incorporate the CTE 'sq' into the main query and select from it * const result = await db.with(sq).select().from(sq); * ``` */ with(...queries: WithSubquery[]) { const self = this; /** * Creates a select query. * * Calling this method with no arguments will select all columns from the table. Pass a selection object to specify the columns you want to select. * * Use `.from()` method to specify which table to select from. * * See docs: {@link https://orm.drizzle.team/docs/select} * * @param fields The selection object. * * @example * * ```ts * // Select all columns and all rows from the 'cars' table * const allCars: Car[] = await db.select().from(cars); * * // Select specific columns and all rows from the 'cars' table * const carsIdsAndBrands: { id: number; brand: string }[] = await db.select({ * id: cars.id, * brand: cars.brand * }) * .from(cars); * ``` * * Like in SQL, you can use arbitrary expressions as selection fields, not just table columns: * * ```ts * // Select specific columns along with expression and all rows from the 'cars' table * const carsIdsAndLowerNames: { id: number; lowerBrand: string }[] = await db.select({ * id: cars.id, * lowerBrand: sql`lower(${cars.brand})`, * }) * .from(cars); * ``` */ function select(): GelSelectBuilder; function select(fields: TSelection): GelSelectBuilder; function select(fields?: SelectedFields): GelSelectBuilder { return new GelSelectBuilder({ fields: fields ?? undefined, session: self.session, dialect: self.dialect, withList: queries, }); } /** * Adds `distinct` expression to the select query. * * Calling this method will return only unique values. When multiple columns are selected, it returns rows with unique combinations of values in these columns. * * Use `.from()` method to specify which table to select from. * * See docs: {@link https://orm.drizzle.team/docs/select#distinct} * * @param fields The selection object. * * @example * ```ts * // Select all unique rows from the 'cars' table * await db.selectDistinct() * .from(cars) * .orderBy(cars.id, cars.brand, cars.color); * * // Select all unique brands from the 'cars' table * await db.selectDistinct({ brand: cars.brand }) * .from(cars) * .orderBy(cars.brand); * ``` */ function selectDistinct(): GelSelectBuilder; function selectDistinct(fields: TSelection): GelSelectBuilder; function selectDistinct(fields?: SelectedFields): GelSelectBuilder { return new GelSelectBuilder({ fields: fields ?? undefined, session: self.session, dialect: self.dialect, withList: queries, distinct: true, }); } /** * Adds `distinct on` expression to the select query. * * Calling this method will specify how the unique rows are determined. * * Use `.from()` method to specify which table to select from. * * See docs: {@link https://orm.drizzle.team/docs/select#distinct} * * @param on The expression defining uniqueness. * @param fields The selection object. * * @example * ```ts * // Select the first row for each unique brand from the 'cars' table * await db.selectDistinctOn([cars.brand]) * .from(cars) * .orderBy(cars.brand); * * // Selects the first occurrence of each unique car brand along with its color from the 'cars' table * await db.selectDistinctOn([cars.brand], { brand: cars.brand, color: cars.color }) * .from(cars) * .orderBy(cars.brand, cars.color); * ``` */ function selectDistinctOn(on: (GelColumn | SQLWrapper)[]): GelSelectBuilder; function selectDistinctOn( on: (GelColumn | SQLWrapper)[], fields: TSelection, ): GelSelectBuilder; function selectDistinctOn( on: (GelColumn | SQLWrapper)[], fields?: SelectedFields, ): GelSelectBuilder { return new GelSelectBuilder({ fields: fields ?? undefined, session: self.session, dialect: self.dialect, withList: queries, distinct: { on }, }); } /** * Creates an update query. * * Calling this method without `.where()` clause will update all rows in a table. The `.where()` clause specifies which rows should be updated. * * Use `.set()` method to specify which values to update. * * See docs: {@link https://orm.drizzle.team/docs/update} * * @param table The table to update. * * @example * * ```ts * // Update all rows in the 'cars' table * await db.update(cars).set({ color: 'red' }); * * // Update rows with filters and conditions * await db.update(cars).set({ color: 'red' }).where(eq(cars.brand, 'BMW')); * * // Update with returning clause * const updatedCar: Car[] = await db.update(cars) * .set({ color: 'red' }) * .where(eq(cars.id, 1)) * .returning(); * ``` */ function update(table: TTable): GelUpdateBuilder { return new GelUpdateBuilder(table, self.session, self.dialect, queries); } /** * Creates an insert query. * * Calling this method will create new rows in a table. Use `.values()` method to specify which values to insert. * * See docs: {@link https://orm.drizzle.team/docs/insert} * * @param table The table to insert into. * * @example * * ```ts * // Insert one row * await db.insert(cars).values({ brand: 'BMW' }); * * // Insert multiple rows * await db.insert(cars).values([{ brand: 'BMW' }, { brand: 'Porsche' }]); * * // Insert with returning clause * const insertedCar: Car[] = await db.insert(cars) * .values({ brand: 'BMW' }) * .returning(); * ``` */ function insert(table: TTable): GelInsertBuilder { return new GelInsertBuilder(table, self.session, self.dialect, queries); } /** * Creates a delete query. * * Calling this method without `.where()` clause will delete all rows in a table. The `.where()` clause specifies which rows should be deleted. * * See docs: {@link https://orm.drizzle.team/docs/delete} * * @param table The table to delete from. * * @example * * ```ts * // Delete all rows in the 'cars' table * await db.delete(cars); * * // Delete rows with filters and conditions * await db.delete(cars).where(eq(cars.color, 'green')); * * // Delete with returning clause * const deletedCar: Car[] = await db.delete(cars) * .where(eq(cars.id, 1)) * .returning(); * ``` */ function delete_(table: TTable): GelDeleteBase { return new GelDeleteBase(table, self.session, self.dialect, queries); } return { select, selectDistinct, selectDistinctOn, update, insert, delete: delete_ }; } /** * Creates a select query. * * Calling this method with no arguments will select all columns from the table. Pass a selection object to specify the columns you want to select. * * Use `.from()` method to specify which table to select from. * * See docs: {@link https://orm.drizzle.team/docs/select} * * @param fields The selection object. * * @example * * ```ts * // Select all columns and all rows from the 'cars' table * const allCars: Car[] = await db.select().from(cars); * * // Select specific columns and all rows from the 'cars' table * const carsIdsAndBrands: { id: number; brand: string }[] = await db.select({ * id: cars.id, * brand: cars.brand * }) * .from(cars); * ``` * * Like in SQL, you can use arbitrary expressions as selection fields, not just table columns: * * ```ts * // Select specific columns along with expression and all rows from the 'cars' table * const carsIdsAndLowerNames: { id: number; lowerBrand: string }[] = await db.select({ * id: cars.id, * lowerBrand: sql`lower(${cars.brand})`, * }) * .from(cars); * ``` */ select(): GelSelectBuilder; select(fields: TSelection): GelSelectBuilder; select(fields?: SelectedFields): GelSelectBuilder { return new GelSelectBuilder({ fields: fields ?? undefined, session: this.session, dialect: this.dialect, }); } /** * Adds `distinct` expression to the select query. * * Calling this method will return only unique values. When multiple columns are selected, it returns rows with unique combinations of values in these columns. * * Use `.from()` method to specify which table to select from. * * See docs: {@link https://orm.drizzle.team/docs/select#distinct} * * @param fields The selection object. * * @example * ```ts * // Select all unique rows from the 'cars' table * await db.selectDistinct() * .from(cars) * .orderBy(cars.id, cars.brand, cars.color); * * // Select all unique brands from the 'cars' table * await db.selectDistinct({ brand: cars.brand }) * .from(cars) * .orderBy(cars.brand); * ``` */ selectDistinct(): GelSelectBuilder; selectDistinct(fields: TSelection): GelSelectBuilder; selectDistinct(fields?: SelectedFields): GelSelectBuilder { return new GelSelectBuilder({ fields: fields ?? undefined, session: this.session, dialect: this.dialect, distinct: true, }); } /** * Adds `distinct on` expression to the select query. * * Calling this method will specify how the unique rows are determined. * * Use `.from()` method to specify which table to select from. * * See docs: {@link https://orm.drizzle.team/docs/select#distinct} * * @param on The expression defining uniqueness. * @param fields The selection object. * * @example * ```ts * // Select the first row for each unique brand from the 'cars' table * await db.selectDistinctOn([cars.brand]) * .from(cars) * .orderBy(cars.brand); * * // Selects the first occurrence of each unique car brand along with its color from the 'cars' table * await db.selectDistinctOn([cars.brand], { brand: cars.brand, color: cars.color }) * .from(cars) * .orderBy(cars.brand, cars.color); * ``` */ selectDistinctOn(on: (GelColumn | SQLWrapper)[]): GelSelectBuilder; selectDistinctOn( on: (GelColumn | SQLWrapper)[], fields: TSelection, ): GelSelectBuilder; selectDistinctOn( on: (GelColumn | SQLWrapper)[], fields?: SelectedFields, ): GelSelectBuilder { return new GelSelectBuilder({ fields: fields ?? undefined, session: this.session, dialect: this.dialect, distinct: { on }, }); } $cache: { invalidate: Cache['onMutate'] }; /** * Creates an update query. * * Calling this method without `.where()` clause will update all rows in a table. The `.where()` clause specifies which rows should be updated. * * Use `.set()` method to specify which values to update. * * See docs: {@link https://orm.drizzle.team/docs/update} * * @param table The table to update. * * @example * * ```ts * // Update all rows in the 'cars' table * await db.update(cars).set({ color: 'red' }); * * // Update rows with filters and conditions * await db.update(cars).set({ color: 'red' }).where(eq(cars.brand, 'BMW')); * * // Update with returning clause * const updatedCar: Car[] = await db.update(cars) * .set({ color: 'red' }) * .where(eq(cars.id, 1)) * .returning(); * ``` */ update(table: TTable): GelUpdateBuilder { return new GelUpdateBuilder(table, this.session, this.dialect); } /** * Creates an insert query. * * Calling this method will create new rows in a table. Use `.values()` method to specify which values to insert. * * See docs: {@link https://orm.drizzle.team/docs/insert} * * @param table The table to insert into. * * @example * * ```ts * // Insert one row * await db.insert(cars).values({ brand: 'BMW' }); * * // Insert multiple rows * await db.insert(cars).values([{ brand: 'BMW' }, { brand: 'Porsche' }]); * * // Insert with returning clause * const insertedCar: Car[] = await db.insert(cars) * .values({ brand: 'BMW' }) * .returning(); * ``` */ insert(table: TTable): GelInsertBuilder { return new GelInsertBuilder(table, this.session, this.dialect); } /** * Creates a delete query. * * Calling this method without `.where()` clause will delete all rows in a table. The `.where()` clause specifies which rows should be deleted. * * See docs: {@link https://orm.drizzle.team/docs/delete} * * @param table The table to delete from. * * @example * * ```ts * // Delete all rows in the 'cars' table * await db.delete(cars); * * // Delete rows with filters and conditions * await db.delete(cars).where(eq(cars.color, 'green')); * * // Delete with returning clause * const deletedCar: Car[] = await db.delete(cars) * .where(eq(cars.id, 1)) * .returning(); * ``` */ delete(table: TTable): GelDeleteBase { return new GelDeleteBase(table, this.session, this.dialect); } // TODO views are not implemented // refreshMaterializedView(view: TView): GelRefreshMaterializedView { // return new GelRefreshMaterializedView(view, this.session, this.dialect); // } execute = Record>( query: SQLWrapper | string, ): GelRaw { const sequel = typeof query === 'string' ? sql.raw(query) : query.getSQL(); const builtQuery = this.dialect.sqlToQuery(sequel); const prepared = this.session.prepareQuery< PreparedQueryConfig & { execute: TRow[] } >( builtQuery, undefined, undefined, false, ); return new GelRaw( () => prepared.execute(undefined), sequel, builtQuery, (result) => prepared.mapResult(result, true), ); } transaction( transaction: (tx: GelTransaction) => Promise, ): Promise { return this.session.transaction(transaction); } } export type GelWithReplicas = Q & { $primary: Q; $replicas: Q[] }; export const withReplicas = < HKT extends GelQueryResultHKT, TFullSchema extends Record, TSchema extends TablesRelationalConfig, Q extends GelDatabase< HKT, TFullSchema, TSchema extends Record ? ExtractTablesWithRelations : TSchema >, >( primary: Q, replicas: [Q, ...Q[]], getReplica: (replicas: Q[]) => Q = () => replicas[Math.floor(Math.random() * replicas.length)]!, ): GelWithReplicas => { const select: Q['select'] = (...args: []) => getReplica(replicas).select(...args); const selectDistinct: Q['selectDistinct'] = (...args: []) => getReplica(replicas).selectDistinct(...args); const selectDistinctOn: Q['selectDistinctOn'] = (...args: [any]) => getReplica(replicas).selectDistinctOn(...args); const _with: Q['with'] = (...args: any) => getReplica(replicas).with(...args); const $with: Q['$with'] = (arg: any) => getReplica(replicas).$with(arg); const update: Q['update'] = (...args: [any]) => primary.update(...args); const insert: Q['insert'] = (...args: [any]) => primary.insert(...args); const $delete: Q['delete'] = (...args: [any]) => primary.delete(...args); const execute: Q['execute'] = (...args: [any]) => primary.execute(...args); const transaction: Q['transaction'] = (...args: [any]) => primary.transaction(...args); // const refreshMaterializedView: Q['refreshMaterializedView'] = (...args: [any]) => // primary.refreshMaterializedView(...args); return { ...primary, update, insert, delete: $delete, execute, transaction, // refreshMaterializedView, $primary: primary, $replicas: replicas, select, selectDistinct, selectDistinctOn, $with, with: _with, get query() { return getReplica(replicas).query; }, }; }; ================================================ FILE: drizzle-orm/src/gel-core/dialect.ts ================================================ import { aliasedTable, aliasedTableColumn, mapColumnsInAliasedSQLToAlias, mapColumnsInSQLToAlias } from '~/alias.ts'; import { CasingCache } from '~/casing.ts'; import { Column } from '~/column.ts'; import { entityKind, is } from '~/entity.ts'; import { DrizzleError } from '~/errors.ts'; import { GelColumn, GelDecimal, GelJson, GelUUID } from '~/gel-core/columns/index.ts'; import type { AnyGelSelectQueryBuilder, GelDeleteConfig, GelInsertConfig, GelSelectJoinConfig, GelUpdateConfig, } from '~/gel-core/query-builders/index.ts'; import type { GelSelectConfig, SelectedFieldsOrdered } from '~/gel-core/query-builders/select.types.ts'; import { GelTable } from '~/gel-core/table.ts'; import { type BuildRelationalQueryResult, type DBQueryConfig, getOperators, getOrderByOperators, Many, normalizeRelation, One, type Relation, type TableRelationalConfig, type TablesRelationalConfig, } from '~/relations.ts'; import { and, eq, View } from '~/sql/index.ts'; import { type DriverValueEncoder, type Name, Param, type QueryTypingsValue, type QueryWithTypings, SQL, sql, type SQLChunk, } from '~/sql/sql.ts'; import { Subquery } from '~/subquery.ts'; import { getTableName, getTableUniqueName, Table } from '~/table.ts'; import { type Casing, orderSelectedFields, type UpdateSet } from '~/utils.ts'; import { ViewBaseConfig } from '~/view-common.ts'; import { GelTimestamp } from './columns/timestamp.ts'; import { GelViewBase } from './view-base.ts'; import type { GelMaterializedView } from './view.ts'; export interface GelDialectConfig { casing?: Casing; } export class GelDialect { static readonly [entityKind]: string = 'GelDialect'; /** @internal */ readonly casing: CasingCache; constructor(config?: GelDialectConfig) { this.casing = new CasingCache(config?.casing); } // TODO can not migrate gel with drizzle // async migrate(migrations: MigrationMeta[], session: GelSession, config: string | MigrationConfig): Promise { // const migrationsTable = typeof config === 'string' // ? '__drizzle_migrations' // : config.migrationsTable ?? '__drizzle_migrations'; // const migrationsSchema = typeof config === 'string' ? 'drizzle' : config.migrationsSchema ?? 'drizzle'; // const migrationTableCreate = sql` // CREATE TABLE IF NOT EXISTS ${sql.identifier(migrationsSchema)}.${sql.identifier(migrationsTable)} ( // id SERIAL PRIMARY KEY, // hash text NOT NULL, // created_at bigint // ) // `; // await session.execute(sql`CREATE SCHEMA IF NOT EXISTS ${sql.identifier(migrationsSchema)}`); // await session.execute(migrationTableCreate); // const dbMigrations = await session.all<{ id: number; hash: string; created_at: string }>( // sql`select id, hash, created_at from ${sql.identifier(migrationsSchema)}.${ // sql.identifier(migrationsTable) // } order by created_at desc limit 1`, // ); // const lastDbMigration = dbMigrations[0]; // await session.transaction(async (tx) => { // for await (const migration of migrations) { // if ( // !lastDbMigration // || Number(lastDbMigration.created_at) < migration.folderMillis // ) { // for (const stmt of migration.sql) { // await tx.execute(sql.raw(stmt)); // } // await tx.execute( // sql`insert into ${sql.identifier(migrationsSchema)}.${ // sql.identifier(migrationsTable) // } ("hash", "created_at") values(${migration.hash}, ${migration.folderMillis})`, // ); // } // } // }); // } escapeName(name: string): string { return `"${name}"`; } escapeParam(num: number): string { return `$${num + 1}`; } escapeString(str: string): string { return `'${str.replace(/'/g, "''")}'`; } private buildWithCTE(queries: Subquery[] | undefined): SQL | undefined { if (!queries?.length) return undefined; const withSqlChunks = [sql`with `]; for (const [i, w] of queries.entries()) { withSqlChunks.push(sql`${sql.identifier(w._.alias)} as (${w._.sql})`); if (i < queries.length - 1) { withSqlChunks.push(sql`, `); } } withSqlChunks.push(sql` `); return sql.join(withSqlChunks); } buildDeleteQuery({ table, where, returning, withList }: GelDeleteConfig): SQL { const withSql = this.buildWithCTE(withList); const returningSql = returning ? sql` returning ${this.buildSelection(returning, { isSingleTable: true })}` : undefined; const whereSql = where ? sql` where ${where}` : undefined; return sql`${withSql}delete from ${table}${whereSql}${returningSql}`; } buildUpdateSet(table: GelTable, set: UpdateSet): SQL { const tableColumns = table[Table.Symbol.Columns]; const columnNames = Object.keys(tableColumns).filter((colName) => set[colName] !== undefined || tableColumns[colName]?.onUpdateFn !== undefined ); const setSize = columnNames.length; return sql.join(columnNames.flatMap((colName, i) => { const col = tableColumns[colName]!; const onUpdateFnResult = col.onUpdateFn?.(); const value = set[colName] ?? (is(onUpdateFnResult, SQL) ? onUpdateFnResult : sql.param(onUpdateFnResult, col)); const res = sql`${sql.identifier(this.casing.getColumnCasing(col))} = ${value}`; if (i < setSize - 1) { return [res, sql.raw(', ')]; } return [res]; })); } buildUpdateQuery({ table, set, where, returning, withList, from, joins }: GelUpdateConfig): SQL { const withSql = this.buildWithCTE(withList); const tableName = table[GelTable.Symbol.Name]; const tableSchema = table[GelTable.Symbol.Schema]; const origTableName = table[GelTable.Symbol.OriginalName]; const alias = tableName === origTableName ? undefined : tableName; const tableSql = sql`${tableSchema ? sql`${sql.identifier(tableSchema)}.` : undefined}${ sql.identifier(origTableName) }${alias && sql` ${sql.identifier(alias)}`}`; const setSql = this.buildUpdateSet(table, set); const fromSql = from && sql.join([sql.raw(' from '), this.buildFromTable(from)]); const joinsSql = this.buildJoins(joins); const returningSql = returning ? sql` returning ${this.buildSelection(returning, { isSingleTable: !from })}` : undefined; const whereSql = where ? sql` where ${where}` : undefined; return sql`${withSql}update ${tableSql} set ${setSql}${fromSql}${joinsSql}${whereSql}${returningSql}`; } /** * Builds selection SQL with provided fields/expressions * * Examples: * * `select from` * * `insert ... returning ` * * If `isSingleTable` is true, then columns won't be prefixed with table name * ^ Temporarily disabled behaviour, see comments within method for a reasoning */ private buildSelection( fields: SelectedFieldsOrdered, // eslint-disable-next-line @typescript-eslint/no-unused-vars { isSingleTable = false }: { isSingleTable?: boolean } = {}, ): SQL { const columnsLen = fields.length; const chunks = fields .flatMap(({ field }, i) => { const chunk: SQLChunk[] = []; if (is(field, SQL.Aliased) && field.isSelectionField) { chunk.push(sql.identifier(field.fieldAlias)); } else if (is(field, SQL.Aliased) || is(field, SQL)) { const query = is(field, SQL.Aliased) ? field.sql : field; // Gel throws an error when more than one similarly named columns exist within context instead of preferring the closest one // thus forcing us to be explicit about column's source // if (isSingleTable) { // chunk.push( // new SQL( // query.queryChunks.map((c) => { // if (is(c, GelColumn)) { // return sql.identifier(this.casing.getColumnCasing(c)); // } // return c; // }), // ), // ); // } else { chunk.push(query); // } if (is(field, SQL.Aliased)) { chunk.push(sql` as ${sql.identifier(field.fieldAlias)}`); } } else if (is(field, Column)) { // Gel throws an error when more than one similarly named columns exist within context instead of preferring the closest one // thus forcing us to be explicit about column's source // if (isSingleTable) { // chunk.push(sql.identifier(this.casing.getColumnCasing(field))); // } else { chunk.push(field); // } } else if (is(field, Subquery)) { const entries = Object.entries(field._.selectedFields) as [string, SQL.Aliased | Column | SQL][]; if (entries.length === 1) { const entry = entries[0]![1]; const fieldDecoder = is(entry, SQL) ? entry.decoder : is(entry, Column) ? { mapFromDriverValue: (v: any) => entry.mapFromDriverValue(v) } : entry.sql.decoder; if (fieldDecoder) { field._.sql.decoder = fieldDecoder; } } chunk.push(field); } if (i < columnsLen - 1) { chunk.push(sql`, `); } return chunk; }); return sql.join(chunks); } private buildJoins(joins: GelSelectJoinConfig[] | undefined): SQL | undefined { if (!joins || joins.length === 0) { return undefined; } const joinsArray: SQL[] = []; for (const [index, joinMeta] of joins.entries()) { if (index === 0) { joinsArray.push(sql` `); } const table = joinMeta.table; const lateralSql = joinMeta.lateral ? sql` lateral` : undefined; const onSql = joinMeta.on ? sql` on ${joinMeta.on}` : undefined; if (is(table, GelTable)) { const tableName = table[GelTable.Symbol.Name]; const tableSchema = table[GelTable.Symbol.Schema]; const origTableName = table[GelTable.Symbol.OriginalName]; const alias = tableName === origTableName ? undefined : joinMeta.alias; joinsArray.push( sql`${sql.raw(joinMeta.joinType)} join${lateralSql} ${ tableSchema ? sql`${sql.identifier(tableSchema)}.` : undefined }${sql.identifier(origTableName)}${alias && sql` ${sql.identifier(alias)}`}${onSql}`, ); } else if (is(table, View)) { const viewName = table[ViewBaseConfig].name; const viewSchema = table[ViewBaseConfig].schema; const origViewName = table[ViewBaseConfig].originalName; const alias = viewName === origViewName ? undefined : joinMeta.alias; joinsArray.push( sql`${sql.raw(joinMeta.joinType)} join${lateralSql} ${ viewSchema ? sql`${sql.identifier(viewSchema)}.` : undefined }${sql.identifier(origViewName)}${alias && sql` ${sql.identifier(alias)}`}${onSql}`, ); } else { joinsArray.push( sql`${sql.raw(joinMeta.joinType)} join${lateralSql} ${table}${onSql}`, ); } if (index < joins.length - 1) { joinsArray.push(sql` `); } } return sql.join(joinsArray); } private buildFromTable( table: SQL | Subquery | GelViewBase | GelTable | undefined, ): SQL | Subquery | GelViewBase | GelTable | undefined { if (is(table, Table) && table[Table.Symbol.OriginalName] !== table[Table.Symbol.Name]) { let fullName = sql`${sql.identifier(table[Table.Symbol.OriginalName])}`; if (table[Table.Symbol.Schema]) { fullName = sql`${sql.identifier(table[Table.Symbol.Schema]!)}.${fullName}`; } return sql`${fullName} ${sql.identifier(table[Table.Symbol.Name])}`; } return table; } buildSelectQuery( { withList, fields, fieldsFlat, where, having, table, joins, orderBy, groupBy, limit, offset, lockingClause, distinct, setOperators, }: GelSelectConfig, ): SQL { const fieldsList = fieldsFlat ?? orderSelectedFields(fields); for (const f of fieldsList) { if ( is(f.field, Column) && getTableName(f.field.table) !== (is(table, Subquery) ? table._.alias : is(table, GelViewBase) ? table[ViewBaseConfig].name : is(table, SQL) ? undefined : getTableName(table)) && !((table) => joins?.some(({ alias }) => alias === (table[Table.Symbol.IsAlias] ? getTableName(table) : table[Table.Symbol.BaseName]) ))(f.field.table) ) { const tableName = getTableName(f.field.table); throw new Error( `Your "${ f.path.join('->') }" field references a column "${tableName}"."${f.field.name}", but the table "${tableName}" is not part of the query! Did you forget to join it?`, ); } } const isSingleTable = !joins || joins.length === 0; const withSql = this.buildWithCTE(withList); let distinctSql: SQL | undefined; if (distinct) { distinctSql = distinct === true ? sql` distinct` : sql` distinct on (${sql.join(distinct.on, sql`, `)})`; } const selection = this.buildSelection(fieldsList, { isSingleTable }); const tableSql = this.buildFromTable(table); const joinsSql = this.buildJoins(joins); const whereSql = where ? sql` where ${where}` : undefined; const havingSql = having ? sql` having ${having}` : undefined; let orderBySql; if (orderBy && orderBy.length > 0) { orderBySql = sql` order by ${sql.join(orderBy, sql`, `)}`; } let groupBySql; if (groupBy && groupBy.length > 0) { groupBySql = sql` group by ${sql.join(groupBy, sql`, `)}`; } const limitSql = typeof limit === 'object' || (typeof limit === 'number' && limit >= 0) ? sql` limit ${limit}` : undefined; const offsetSql = offset ? sql` offset ${offset}` : undefined; const lockingClauseSql = sql.empty(); if (lockingClause) { const clauseSql = sql` for ${sql.raw(lockingClause.strength)}`; if (lockingClause.config.of) { clauseSql.append( sql` of ${ sql.join( Array.isArray(lockingClause.config.of) ? lockingClause.config.of : [lockingClause.config.of], sql`, `, ) }`, ); } if (lockingClause.config.noWait) { clauseSql.append(sql` nowait`); } else if (lockingClause.config.skipLocked) { clauseSql.append(sql` skip locked`); } lockingClauseSql.append(clauseSql); } const finalQuery = sql`${withSql}select${distinctSql} ${selection} from ${tableSql}${joinsSql}${whereSql}${groupBySql}${havingSql}${orderBySql}${limitSql}${offsetSql}${lockingClauseSql}`; if (setOperators.length > 0) { return this.buildSetOperations(finalQuery, setOperators); } return finalQuery; } buildSetOperations(leftSelect: SQL, setOperators: GelSelectConfig['setOperators']): SQL { const [setOperator, ...rest] = setOperators; if (!setOperator) { throw new Error('Cannot pass undefined values to any set operator'); } if (rest.length === 0) { return this.buildSetOperationQuery({ leftSelect, setOperator }); } // Some recursive magic here return this.buildSetOperations( this.buildSetOperationQuery({ leftSelect, setOperator }), rest, ); } buildSetOperationQuery({ leftSelect, setOperator: { type, isAll, rightSelect, limit, orderBy, offset }, }: { leftSelect: SQL; setOperator: GelSelectConfig['setOperators'][number] }): SQL { const leftChunk = sql`(${leftSelect.getSQL()}) `; const rightChunk = sql`(${rightSelect.getSQL()})`; let orderBySql; if (orderBy && orderBy.length > 0) { const orderByValues: (SQL | Name)[] = []; // The next bit is necessary because the sql operator replaces ${table.column} with `table`.`column` // which is invalid Sql syntax, Table from one of the SELECTs cannot be used in global ORDER clause for (const singleOrderBy of orderBy) { if (is(singleOrderBy, GelColumn)) { orderByValues.push(sql.identifier(singleOrderBy.name)); } else if (is(singleOrderBy, SQL)) { for (let i = 0; i < singleOrderBy.queryChunks.length; i++) { const chunk = singleOrderBy.queryChunks[i]; if (is(chunk, GelColumn)) { singleOrderBy.queryChunks[i] = sql.identifier(chunk.name); } } orderByValues.push(sql`${singleOrderBy}`); } else { orderByValues.push(sql`${singleOrderBy}`); } } orderBySql = sql` order by ${sql.join(orderByValues, sql`, `)} `; } const limitSql = typeof limit === 'object' || (typeof limit === 'number' && limit >= 0) ? sql` limit ${limit}` : undefined; const operatorChunk = sql.raw(`${type} ${isAll ? 'all ' : ''}`); const offsetSql = offset ? sql` offset ${offset}` : undefined; return sql`${leftChunk}${operatorChunk}${rightChunk}${orderBySql}${limitSql}${offsetSql}`; } buildInsertQuery( { table, values: valuesOrSelect, onConflict, returning, withList, select, overridingSystemValue_ }: GelInsertConfig, ): SQL { const valuesSqlList: ((SQLChunk | SQL)[] | SQL)[] = []; const columns: Record = table[Table.Symbol.Columns]; const colEntries: [string, GelColumn][] = Object.entries(columns).filter(([_, col]) => !col.shouldDisableInsert()); const insertOrder = colEntries.map( ([, column]) => sql.identifier(this.casing.getColumnCasing(column)), ); if (select) { const select = valuesOrSelect as AnyGelSelectQueryBuilder | SQL; if (is(select, SQL)) { valuesSqlList.push(select); } else { valuesSqlList.push(select.getSQL()); } } else { const values = valuesOrSelect as Record[]; valuesSqlList.push(sql.raw('values ')); for (const [valueIndex, value] of values.entries()) { const valueList: (SQLChunk | SQL)[] = []; for (const [fieldName, col] of colEntries) { const colValue = value[fieldName]; if (colValue === undefined || (is(colValue, Param) && colValue.value === undefined)) { // eslint-disable-next-line unicorn/no-negated-condition if (col.defaultFn !== undefined) { const defaultFnResult = col.defaultFn(); const defaultValue = is(defaultFnResult, SQL) ? defaultFnResult : sql.param(defaultFnResult, col); valueList.push(defaultValue); // eslint-disable-next-line unicorn/no-negated-condition } else if (!col.default && col.onUpdateFn !== undefined) { const onUpdateFnResult = col.onUpdateFn(); const newValue = is(onUpdateFnResult, SQL) ? onUpdateFnResult : sql.param(onUpdateFnResult, col); valueList.push(newValue); } else { valueList.push(sql`default`); } } else { valueList.push(colValue); } } valuesSqlList.push(valueList); if (valueIndex < values.length - 1) { valuesSqlList.push(sql`, `); } } } const withSql = this.buildWithCTE(withList); const valuesSql = sql.join(valuesSqlList); const returningSql = returning ? sql` returning ${this.buildSelection(returning, { isSingleTable: true })}` : undefined; const onConflictSql = onConflict ? sql` on conflict ${onConflict}` : undefined; const overridingSql = overridingSystemValue_ === true ? sql`overriding system value ` : undefined; return sql`${withSql}insert into ${table} ${insertOrder} ${overridingSql}${valuesSql}${onConflictSql}${returningSql}`; } buildRefreshMaterializedViewQuery( { view, concurrently, withNoData }: { view: GelMaterializedView; concurrently?: boolean; withNoData?: boolean }, ): SQL { const concurrentlySql = concurrently ? sql` concurrently` : undefined; const withNoDataSql = withNoData ? sql` with no data` : undefined; return sql`refresh materialized view${concurrentlySql} ${view}${withNoDataSql}`; } prepareTyping(encoder: DriverValueEncoder): QueryTypingsValue { if (is(encoder, GelJson)) { return 'json'; } else if (is(encoder, GelDecimal)) { return 'decimal'; } else if (is(encoder, GelTimestamp)) { return 'timestamp'; } else if (is(encoder, GelUUID)) { return 'uuid'; } else { return 'none'; } } sqlToQuery(sql: SQL, invokeSource?: 'indexes' | undefined): QueryWithTypings { return sql.toQuery({ casing: this.casing, escapeName: this.escapeName, escapeParam: this.escapeParam, escapeString: this.escapeString, prepareTyping: this.prepareTyping, invokeSource, }); } // buildRelationalQueryWithPK({ // fullSchema, // schema, // tableNamesMap, // table, // tableConfig, // queryConfig: config, // tableAlias, // isRoot = false, // joinOn, // }: { // fullSchema: Record; // schema: TablesRelationalConfig; // tableNamesMap: Record; // table: GelTable; // tableConfig: TableRelationalConfig; // queryConfig: true | DBQueryConfig<'many', true>; // tableAlias: string; // isRoot?: boolean; // joinOn?: SQL; // }): BuildRelationalQueryResult { // // For { "": true }, return a table with selection of all columns // if (config === true) { // const selectionEntries = Object.entries(tableConfig.columns); // const selection: BuildRelationalQueryResult['selection'] = selectionEntries.map(( // [key, value], // ) => ({ // dbKey: value.name, // tsKey: key, // field: value as GelColumn, // relationTableTsKey: undefined, // isJson: false, // selection: [], // })); // return { // tableTsKey: tableConfig.tsName, // sql: table, // selection, // }; // } // // let selection: BuildRelationalQueryResult['selection'] = []; // // let selectionForBuild = selection; // const aliasedColumns = Object.fromEntries( // Object.entries(tableConfig.columns).map(([key, value]) => [key, aliasedTableColumn(value, tableAlias)]), // ); // const aliasedRelations = Object.fromEntries( // Object.entries(tableConfig.relations).map(([key, value]) => [key, aliasedRelation(value, tableAlias)]), // ); // const aliasedFields = Object.assign({}, aliasedColumns, aliasedRelations); // let where, hasUserDefinedWhere; // if (config.where) { // const whereSql = typeof config.where === 'function' ? config.where(aliasedFields, operators) : config.where; // where = whereSql && mapColumnsInSQLToAlias(whereSql, tableAlias); // hasUserDefinedWhere = !!where; // } // where = and(joinOn, where); // // const fieldsSelection: { tsKey: string; value: GelColumn | SQL.Aliased; isExtra?: boolean }[] = []; // let joins: Join[] = []; // let selectedColumns: string[] = []; // // Figure out which columns to select // if (config.columns) { // let isIncludeMode = false; // for (const [field, value] of Object.entries(config.columns)) { // if (value === undefined) { // continue; // } // if (field in tableConfig.columns) { // if (!isIncludeMode && value === true) { // isIncludeMode = true; // } // selectedColumns.push(field); // } // } // if (selectedColumns.length > 0) { // selectedColumns = isIncludeMode // ? selectedColumns.filter((c) => config.columns?.[c] === true) // : Object.keys(tableConfig.columns).filter((key) => !selectedColumns.includes(key)); // } // } else { // // Select all columns if selection is not specified // selectedColumns = Object.keys(tableConfig.columns); // } // // for (const field of selectedColumns) { // // const column = tableConfig.columns[field]! as GelColumn; // // fieldsSelection.push({ tsKey: field, value: column }); // // } // let initiallySelectedRelations: { // tsKey: string; // queryConfig: true | DBQueryConfig<'many', false>; // relation: Relation; // }[] = []; // // let selectedRelations: BuildRelationalQueryResult['selection'] = []; // // Figure out which relations to select // if (config.with) { // initiallySelectedRelations = Object.entries(config.with) // .filter((entry): entry is [typeof entry[0], NonNullable] => !!entry[1]) // .map(([tsKey, queryConfig]) => ({ tsKey, queryConfig, relation: tableConfig.relations[tsKey]! })); // } // const manyRelations = initiallySelectedRelations.filter((r) => // is(r.relation, Many) // && (schema[tableNamesMap[r.relation.referencedTable[Table.Symbol.Name]]!]?.primaryKey.length ?? 0) > 0 // ); // // If this is the last Many relation (or there are no Many relations), we are on the innermost subquery level // const isInnermostQuery = manyRelations.length < 2; // const selectedExtras: { // tsKey: string; // value: SQL.Aliased; // }[] = []; // // Figure out which extras to select // if (isInnermostQuery && config.extras) { // const extras = typeof config.extras === 'function' // ? config.extras(aliasedFields, { sql }) // : config.extras; // for (const [tsKey, value] of Object.entries(extras)) { // selectedExtras.push({ // tsKey, // value: mapColumnsInAliasedSQLToAlias(value, tableAlias), // }); // } // } // // Transform `fieldsSelection` into `selection` // // `fieldsSelection` shouldn't be used after this point // // for (const { tsKey, value, isExtra } of fieldsSelection) { // // selection.push({ // // dbKey: is(value, SQL.Aliased) ? value.fieldAlias : tableConfig.columns[tsKey]!.name, // // tsKey, // // field: is(value, Column) ? aliasedTableColumn(value, tableAlias) : value, // // relationTableTsKey: undefined, // // isJson: false, // // isExtra, // // selection: [], // // }); // // } // let orderByOrig = typeof config.orderBy === 'function' // ? config.orderBy(aliasedFields, orderByOperators) // : config.orderBy ?? []; // if (!Array.isArray(orderByOrig)) { // orderByOrig = [orderByOrig]; // } // const orderBy = orderByOrig.map((orderByValue) => { // if (is(orderByValue, Column)) { // return aliasedTableColumn(orderByValue, tableAlias) as GelColumn; // } // return mapColumnsInSQLToAlias(orderByValue, tableAlias); // }); // const limit = isInnermostQuery ? config.limit : undefined; // const offset = isInnermostQuery ? config.offset : undefined; // // For non-root queries without additional config except columns, return a table with selection // if ( // !isRoot // && initiallySelectedRelations.length === 0 // && selectedExtras.length === 0 // && !where // && orderBy.length === 0 // && limit === undefined // && offset === undefined // ) { // return { // tableTsKey: tableConfig.tsName, // sql: table, // selection: selectedColumns.map((key) => ({ // dbKey: tableConfig.columns[key]!.name, // tsKey: key, // field: tableConfig.columns[key] as GelColumn, // relationTableTsKey: undefined, // isJson: false, // selection: [], // })), // }; // } // const selectedRelationsWithoutPK: // // Process all relations without primary keys, because they need to be joined differently and will all be on the same query level // for ( // const { // tsKey: selectedRelationTsKey, // queryConfig: selectedRelationConfigValue, // relation, // } of initiallySelectedRelations // ) { // const normalizedRelation = normalizeRelation(schema, tableNamesMap, relation); // const relationTableName = relation.referencedTable[Table.Symbol.Name]; // const relationTableTsName = tableNamesMap[relationTableName]!; // const relationTable = schema[relationTableTsName]!; // if (relationTable.primaryKey.length > 0) { // continue; // } // const relationTableAlias = `${tableAlias}_${selectedRelationTsKey}`; // const joinOn = and( // ...normalizedRelation.fields.map((field, i) => // eq( // aliasedTableColumn(normalizedRelation.references[i]!, relationTableAlias), // aliasedTableColumn(field, tableAlias), // ) // ), // ); // const builtRelation = this.buildRelationalQueryWithoutPK({ // fullSchema, // schema, // tableNamesMap, // table: fullSchema[relationTableTsName] as GelTable, // tableConfig: schema[relationTableTsName]!, // queryConfig: selectedRelationConfigValue, // tableAlias: relationTableAlias, // joinOn, // nestedQueryRelation: relation, // }); // const field = sql`${sql.identifier(relationTableAlias)}.${sql.identifier('data')}`.as(selectedRelationTsKey); // joins.push({ // on: sql`true`, // table: new Subquery(builtRelation.sql as SQL, {}, relationTableAlias), // alias: relationTableAlias, // joinType: 'left', // lateral: true, // }); // selectedRelations.push({ // dbKey: selectedRelationTsKey, // tsKey: selectedRelationTsKey, // field, // relationTableTsKey: relationTableTsName, // isJson: true, // selection: builtRelation.selection, // }); // } // const oneRelations = initiallySelectedRelations.filter((r): r is typeof r & { relation: One } => // is(r.relation, One) // ); // // Process all One relations with PKs, because they can all be joined on the same level // for ( // const { // tsKey: selectedRelationTsKey, // queryConfig: selectedRelationConfigValue, // relation, // } of oneRelations // ) { // const normalizedRelation = normalizeRelation(schema, tableNamesMap, relation); // const relationTableName = relation.referencedTable[Table.Symbol.Name]; // const relationTableTsName = tableNamesMap[relationTableName]!; // const relationTableAlias = `${tableAlias}_${selectedRelationTsKey}`; // const relationTable = schema[relationTableTsName]!; // if (relationTable.primaryKey.length === 0) { // continue; // } // const joinOn = and( // ...normalizedRelation.fields.map((field, i) => // eq( // aliasedTableColumn(normalizedRelation.references[i]!, relationTableAlias), // aliasedTableColumn(field, tableAlias), // ) // ), // ); // const builtRelation = this.buildRelationalQueryWithPK({ // fullSchema, // schema, // tableNamesMap, // table: fullSchema[relationTableTsName] as GelTable, // tableConfig: schema[relationTableTsName]!, // queryConfig: selectedRelationConfigValue, // tableAlias: relationTableAlias, // joinOn, // }); // const field = sql`case when ${sql.identifier(relationTableAlias)} is null then null else json_build_array(${ // sql.join( // builtRelation.selection.map(({ field }) => // is(field, SQL.Aliased) // ? sql`${sql.identifier(relationTableAlias)}.${sql.identifier(field.fieldAlias)}` // : is(field, Column) // ? aliasedTableColumn(field, relationTableAlias) // : field // ), // sql`, `, // ) // }) end`.as(selectedRelationTsKey); // const isLateralJoin = is(builtRelation.sql, SQL); // joins.push({ // on: isLateralJoin ? sql`true` : joinOn, // table: is(builtRelation.sql, SQL) // ? new Subquery(builtRelation.sql, {}, relationTableAlias) // : aliasedTable(builtRelation.sql, relationTableAlias), // alias: relationTableAlias, // joinType: 'left', // lateral: is(builtRelation.sql, SQL), // }); // selectedRelations.push({ // dbKey: selectedRelationTsKey, // tsKey: selectedRelationTsKey, // field, // relationTableTsKey: relationTableTsName, // isJson: true, // selection: builtRelation.selection, // }); // } // let distinct: GelSelectConfig['distinct']; // let tableFrom: GelTable | Subquery = table; // // Process first Many relation - each one requires a nested subquery // const manyRelation = manyRelations[0]; // if (manyRelation) { // const { // tsKey: selectedRelationTsKey, // queryConfig: selectedRelationQueryConfig, // relation, // } = manyRelation; // distinct = { // on: tableConfig.primaryKey.map((c) => aliasedTableColumn(c as GelColumn, tableAlias)), // }; // const normalizedRelation = normalizeRelation(schema, tableNamesMap, relation); // const relationTableName = relation.referencedTable[Table.Symbol.Name]; // const relationTableTsName = tableNamesMap[relationTableName]!; // const relationTableAlias = `${tableAlias}_${selectedRelationTsKey}`; // const joinOn = and( // ...normalizedRelation.fields.map((field, i) => // eq( // aliasedTableColumn(normalizedRelation.references[i]!, relationTableAlias), // aliasedTableColumn(field, tableAlias), // ) // ), // ); // const builtRelationJoin = this.buildRelationalQueryWithPK({ // fullSchema, // schema, // tableNamesMap, // table: fullSchema[relationTableTsName] as GelTable, // tableConfig: schema[relationTableTsName]!, // queryConfig: selectedRelationQueryConfig, // tableAlias: relationTableAlias, // joinOn, // }); // const builtRelationSelectionField = sql`case when ${ // sql.identifier(relationTableAlias) // } is null then '[]' else json_agg(json_build_array(${ // sql.join( // builtRelationJoin.selection.map(({ field }) => // is(field, SQL.Aliased) // ? sql`${sql.identifier(relationTableAlias)}.${sql.identifier(field.fieldAlias)}` // : is(field, Column) // ? aliasedTableColumn(field, relationTableAlias) // : field // ), // sql`, `, // ) // })) over (partition by ${sql.join(distinct.on, sql`, `)}) end`.as(selectedRelationTsKey); // const isLateralJoin = is(builtRelationJoin.sql, SQL); // joins.push({ // on: isLateralJoin ? sql`true` : joinOn, // table: isLateralJoin // ? new Subquery(builtRelationJoin.sql as SQL, {}, relationTableAlias) // : aliasedTable(builtRelationJoin.sql as GelTable, relationTableAlias), // alias: relationTableAlias, // joinType: 'left', // lateral: isLateralJoin, // }); // // Build the "from" subquery with the remaining Many relations // const builtTableFrom = this.buildRelationalQueryWithPK({ // fullSchema, // schema, // tableNamesMap, // table, // tableConfig, // queryConfig: { // ...config, // where: undefined, // orderBy: undefined, // limit: undefined, // offset: undefined, // with: manyRelations.slice(1).reduce>( // (result, { tsKey, queryConfig: configValue }) => { // result[tsKey] = configValue; // return result; // }, // {}, // ), // }, // tableAlias, // }); // selectedRelations.push({ // dbKey: selectedRelationTsKey, // tsKey: selectedRelationTsKey, // field: builtRelationSelectionField, // relationTableTsKey: relationTableTsName, // isJson: true, // selection: builtRelationJoin.selection, // }); // // selection = builtTableFrom.selection.map((item) => // // is(item.field, SQL.Aliased) // // ? { ...item, field: sql`${sql.identifier(tableAlias)}.${sql.identifier(item.field.fieldAlias)}` } // // : item // // ); // // selectionForBuild = [{ // // dbKey: '*', // // tsKey: '*', // // field: sql`${sql.identifier(tableAlias)}.*`, // // selection: [], // // isJson: false, // // relationTableTsKey: undefined, // // }]; // // const newSelectionItem: (typeof selection)[number] = { // // dbKey: selectedRelationTsKey, // // tsKey: selectedRelationTsKey, // // field, // // relationTableTsKey: relationTableTsName, // // isJson: true, // // selection: builtRelationJoin.selection, // // }; // // selection.push(newSelectionItem); // // selectionForBuild.push(newSelectionItem); // tableFrom = is(builtTableFrom.sql, GelTable) // ? builtTableFrom.sql // : new Subquery(builtTableFrom.sql, {}, tableAlias); // } // if (selectedColumns.length === 0 && selectedRelations.length === 0 && selectedExtras.length === 0) { // throw new DrizzleError(`No fields selected for table "${tableConfig.tsName}" ("${tableAlias}")`); // } // let selection: BuildRelationalQueryResult['selection']; // function prepareSelectedColumns() { // return selectedColumns.map((key) => ({ // dbKey: tableConfig.columns[key]!.name, // tsKey: key, // field: tableConfig.columns[key] as GelColumn, // relationTableTsKey: undefined, // isJson: false, // selection: [], // })); // } // function prepareSelectedExtras() { // return selectedExtras.map((item) => ({ // dbKey: item.value.fieldAlias, // tsKey: item.tsKey, // field: item.value, // relationTableTsKey: undefined, // isJson: false, // selection: [], // })); // } // if (isRoot) { // selection = [ // ...prepareSelectedColumns(), // ...prepareSelectedExtras(), // ]; // } // if (hasUserDefinedWhere || orderBy.length > 0) { // tableFrom = new Subquery( // this.buildSelectQuery({ // table: is(tableFrom, GelTable) ? aliasedTable(tableFrom, tableAlias) : tableFrom, // fields: {}, // fieldsFlat: selectionForBuild.map(({ field }) => ({ // path: [], // field: is(field, Column) ? aliasedTableColumn(field, tableAlias) : field, // })), // joins, // distinct, // }), // {}, // tableAlias, // ); // selectionForBuild = selection.map((item) => // is(item.field, SQL.Aliased) // ? { ...item, field: sql`${sql.identifier(tableAlias)}.${sql.identifier(item.field.fieldAlias)}` } // : item // ); // joins = []; // distinct = undefined; // } // const result = this.buildSelectQuery({ // table: is(tableFrom, GelTable) ? aliasedTable(tableFrom, tableAlias) : tableFrom, // fields: {}, // fieldsFlat: selectionForBuild.map(({ field }) => ({ // path: [], // field: is(field, Column) ? aliasedTableColumn(field, tableAlias) : field, // })), // where, // limit, // offset, // joins, // orderBy, // distinct, // }); // return { // tableTsKey: tableConfig.tsName, // sql: result, // selection, // }; // } buildRelationalQueryWithoutPK({ fullSchema, schema, tableNamesMap, table, tableConfig, queryConfig: config, tableAlias, nestedQueryRelation, joinOn, }: { fullSchema: Record; schema: TablesRelationalConfig; tableNamesMap: Record; table: GelTable; tableConfig: TableRelationalConfig; queryConfig: true | DBQueryConfig<'many', true>; tableAlias: string; nestedQueryRelation?: Relation; joinOn?: SQL; }): BuildRelationalQueryResult { let selection: BuildRelationalQueryResult['selection'] = []; let limit, offset, orderBy: NonNullable = [], where; const joins: GelSelectJoinConfig[] = []; if (config === true) { const selectionEntries = Object.entries(tableConfig.columns); selection = selectionEntries.map(( [key, value], ) => ({ dbKey: value.name, tsKey: key, field: aliasedTableColumn(value as GelColumn, tableAlias), relationTableTsKey: undefined, isJson: false, selection: [], })); } else { const aliasedColumns = Object.fromEntries( Object.entries(tableConfig.columns).map(( [key, value], ) => [key, aliasedTableColumn(value, tableAlias)]), ); if (config.where) { const whereSql = typeof config.where === 'function' ? config.where(aliasedColumns, getOperators()) : config.where; where = whereSql && mapColumnsInSQLToAlias(whereSql, tableAlias); } const fieldsSelection: { tsKey: string; value: GelColumn | SQL.Aliased }[] = []; let selectedColumns: string[] = []; // Figure out which columns to select if (config.columns) { let isIncludeMode = false; for (const [field, value] of Object.entries(config.columns)) { if (value === undefined) { continue; } if (field in tableConfig.columns) { if (!isIncludeMode && value === true) { isIncludeMode = true; } selectedColumns.push(field); } } if (selectedColumns.length > 0) { selectedColumns = isIncludeMode ? selectedColumns.filter((c) => config.columns?.[c] === true) : Object.keys(tableConfig.columns).filter((key) => !selectedColumns.includes(key)); } } else { // Select all columns if selection is not specified selectedColumns = Object.keys(tableConfig.columns); } for (const field of selectedColumns) { const column = tableConfig.columns[field]! as GelColumn; fieldsSelection.push({ tsKey: field, value: column }); } let selectedRelations: { tsKey: string; queryConfig: true | DBQueryConfig<'many', false>; relation: Relation; }[] = []; // Figure out which relations to select if (config.with) { selectedRelations = Object.entries(config.with) .filter((entry): entry is [typeof entry[0], NonNullable] => !!entry[1]) .map(([tsKey, queryConfig]) => ({ tsKey, queryConfig, relation: tableConfig.relations[tsKey]! })); } let extras; // Figure out which extras to select if (config.extras) { extras = typeof config.extras === 'function' ? config.extras(aliasedColumns, { sql }) : config.extras; for (const [tsKey, value] of Object.entries(extras)) { fieldsSelection.push({ tsKey, value: mapColumnsInAliasedSQLToAlias(value, tableAlias), }); } } // Transform `fieldsSelection` into `selection` // `fieldsSelection` shouldn't be used after this point for (const { tsKey, value } of fieldsSelection) { selection.push({ dbKey: is(value, SQL.Aliased) ? value.fieldAlias : tableConfig.columns[tsKey]!.name, tsKey, field: is(value, Column) ? aliasedTableColumn(value, tableAlias) : value, relationTableTsKey: undefined, isJson: false, selection: [], }); } let orderByOrig = typeof config.orderBy === 'function' ? config.orderBy(aliasedColumns, getOrderByOperators()) : config.orderBy ?? []; if (!Array.isArray(orderByOrig)) { orderByOrig = [orderByOrig]; } orderBy = orderByOrig.map((orderByValue) => { if (is(orderByValue, Column)) { return aliasedTableColumn(orderByValue, tableAlias) as GelColumn; } return mapColumnsInSQLToAlias(orderByValue, tableAlias); }); limit = config.limit; offset = config.offset; // Process all relations for ( const { tsKey: selectedRelationTsKey, queryConfig: selectedRelationConfigValue, relation, } of selectedRelations ) { const normalizedRelation = normalizeRelation(schema, tableNamesMap, relation); const relationTableName = getTableUniqueName(relation.referencedTable); const relationTableTsName = tableNamesMap[relationTableName]!; const relationTableAlias = `${tableAlias}_${selectedRelationTsKey}`; const joinOn = and( ...normalizedRelation.fields.map((field, i) => eq( aliasedTableColumn(normalizedRelation.references[i]!, relationTableAlias), aliasedTableColumn(field, tableAlias), ) ), ); const builtRelation = this.buildRelationalQueryWithoutPK({ fullSchema, schema, tableNamesMap, table: fullSchema[relationTableTsName] as GelTable, tableConfig: schema[relationTableTsName]!, queryConfig: is(relation, One) ? (selectedRelationConfigValue === true ? { limit: 1 } : { ...selectedRelationConfigValue, limit: 1 }) : selectedRelationConfigValue, tableAlias: relationTableAlias, joinOn, nestedQueryRelation: relation, }); const field = sql`${sql.identifier(relationTableAlias)}.${sql.identifier('data')}`.as(selectedRelationTsKey); joins.push({ on: sql`true`, table: new Subquery(builtRelation.sql as SQL, {}, relationTableAlias), alias: relationTableAlias, joinType: 'left', lateral: true, }); selection.push({ dbKey: selectedRelationTsKey, tsKey: selectedRelationTsKey, field, relationTableTsKey: relationTableTsName, isJson: true, selection: builtRelation.selection, }); } } if (selection.length === 0) { throw new DrizzleError({ message: `No fields selected for table "${tableConfig.tsName}" ("${tableAlias}")` }); } let result; where = and(joinOn, where); if (nestedQueryRelation) { let field = sql`json_build_array(${ sql.join( selection.map(({ field, tsKey, isJson }) => isJson ? sql`${sql.identifier(`${tableAlias}_${tsKey}`)}.${sql.identifier('data')}` : is(field, SQL.Aliased) ? field.sql : field ), sql`, `, ) })`; if (is(nestedQueryRelation, Many)) { field = sql`coalesce(json_agg(${field}${ orderBy.length > 0 ? sql` order by ${sql.join(orderBy, sql`, `)}` : undefined }), '[]'::json)`; // orderBy = []; } const nestedSelection = [{ dbKey: 'data', tsKey: 'data', field: field.as('data'), isJson: true, relationTableTsKey: tableConfig.tsName, selection, }]; const needsSubquery = limit !== undefined || offset !== undefined || orderBy.length > 0; if (needsSubquery) { result = this.buildSelectQuery({ table: aliasedTable(table, tableAlias), fields: {}, fieldsFlat: [{ path: [], field: sql.raw('*'), }], where, limit, offset, orderBy, setOperators: [], }); where = undefined; limit = undefined; offset = undefined; orderBy = []; } else { result = aliasedTable(table, tableAlias); } result = this.buildSelectQuery({ table: is(result, GelTable) ? result : new Subquery(result, {}, tableAlias), fields: {}, fieldsFlat: nestedSelection.map(({ field }) => ({ path: [], field: is(field, Column) ? aliasedTableColumn(field, tableAlias) : field, })), joins, where, limit, offset, orderBy, setOperators: [], }); } else { result = this.buildSelectQuery({ table: aliasedTable(table, tableAlias), fields: {}, fieldsFlat: selection.map(({ field }) => ({ path: [], field: is(field, Column) ? aliasedTableColumn(field, tableAlias) : field, })), joins, where, limit, offset, orderBy, setOperators: [], }); } return { tableTsKey: tableConfig.tsName, sql: result, selection, }; } } ================================================ FILE: drizzle-orm/src/gel-core/expressions.ts ================================================ import type { GelColumn } from '~/gel-core/columns/index.ts'; import { bindIfParam } from '~/sql/expressions/index.ts'; import type { Placeholder, SQL, SQLChunk, SQLWrapper } from '~/sql/sql.ts'; import { sql } from '~/sql/sql.ts'; export * from '~/sql/expressions/index.ts'; export function concat(column: GelColumn | SQL.Aliased, value: string | Placeholder | SQLWrapper): SQL { return sql`${column} || ${bindIfParam(value, column)}`; } export function substring( column: GelColumn | SQL.Aliased, { from, for: _for }: { from?: number | Placeholder | SQLWrapper; for?: number | Placeholder | SQLWrapper }, ): SQL { const chunks: SQLChunk[] = [sql`substring(`, column]; if (from !== undefined) { chunks.push(sql` from `, bindIfParam(from, column)); } if (_for !== undefined) { chunks.push(sql` for `, bindIfParam(_for, column)); } chunks.push(sql`)`); return sql.join(chunks); } ================================================ FILE: drizzle-orm/src/gel-core/foreign-keys.ts ================================================ import { entityKind } from '~/entity.ts'; import { TableName } from '~/table.utils.ts'; import type { AnyGelColumn, GelColumn } from './columns/index.ts'; import type { GelTable } from './table.ts'; export type UpdateDeleteAction = 'cascade' | 'restrict' | 'no action' | 'set null' | 'set default'; export type Reference = () => { readonly name?: string; readonly columns: GelColumn[]; readonly foreignTable: GelTable; readonly foreignColumns: GelColumn[]; }; export class ForeignKeyBuilder { static readonly [entityKind]: string = 'GelForeignKeyBuilder'; /** @internal */ reference: Reference; /** @internal */ _onUpdate: UpdateDeleteAction | undefined = 'no action'; /** @internal */ _onDelete: UpdateDeleteAction | undefined = 'no action'; constructor( config: () => { name?: string; columns: GelColumn[]; foreignColumns: GelColumn[]; }, actions?: { onUpdate?: UpdateDeleteAction; onDelete?: UpdateDeleteAction; } | undefined, ) { this.reference = () => { const { name, columns, foreignColumns } = config(); return { name, columns, foreignTable: foreignColumns[0]!.table as GelTable, foreignColumns }; }; if (actions) { this._onUpdate = actions.onUpdate; this._onDelete = actions.onDelete; } } onUpdate(action: UpdateDeleteAction): this { this._onUpdate = action === undefined ? 'no action' : action; return this; } onDelete(action: UpdateDeleteAction): this { this._onDelete = action === undefined ? 'no action' : action; return this; } /** @internal */ build(table: GelTable): ForeignKey { return new ForeignKey(table, this); } } export type AnyForeignKeyBuilder = ForeignKeyBuilder; export class ForeignKey { static readonly [entityKind]: string = 'GelForeignKey'; readonly reference: Reference; readonly onUpdate: UpdateDeleteAction | undefined; readonly onDelete: UpdateDeleteAction | undefined; constructor(readonly table: GelTable, builder: ForeignKeyBuilder) { this.reference = builder.reference; this.onUpdate = builder._onUpdate; this.onDelete = builder._onDelete; } getName(): string { const { name, columns, foreignColumns } = this.reference(); const columnNames = columns.map((column) => column.name); const foreignColumnNames = foreignColumns.map((column) => column.name); const chunks = [ this.table[TableName], ...columnNames, foreignColumns[0]!.table[TableName], ...foreignColumnNames, ]; return name ?? `${chunks.join('_')}_fk`; } } type ColumnsWithTable< TTableName extends string, TColumns extends GelColumn[], > = { [Key in keyof TColumns]: AnyGelColumn<{ tableName: TTableName }> }; export function foreignKey< TTableName extends string, TForeignTableName extends string, TColumns extends [AnyGelColumn<{ tableName: TTableName }>, ...AnyGelColumn<{ tableName: TTableName }>[]], >( config: { name?: string; columns: TColumns; foreignColumns: ColumnsWithTable; }, ): ForeignKeyBuilder { function mappedConfig() { const { name, columns, foreignColumns } = config; return { name, columns, foreignColumns, }; } return new ForeignKeyBuilder(mappedConfig); } ================================================ FILE: drizzle-orm/src/gel-core/index.ts ================================================ export * from './alias.ts'; export * from './checks.ts'; export * from './columns/index.ts'; export * from './db.ts'; export * from './dialect.ts'; export * from './foreign-keys.ts'; export * from './indexes.ts'; export * from './policies.ts'; export * from './primary-keys.ts'; export * from './query-builders/index.ts'; export * from './roles.ts'; export * from './schema.ts'; export * from './sequence.ts'; export * from './session.ts'; export * from './subquery.ts'; export * from './table.ts'; export * from './unique-constraint.ts'; export * from './utils.ts'; export * from './view-common.ts'; export * from './view.ts'; ================================================ FILE: drizzle-orm/src/gel-core/indexes.ts ================================================ import { SQL } from '~/sql/sql.ts'; import { entityKind, is } from '~/entity.ts'; import type { GelColumn, GelExtraConfigColumn } from './columns/index.ts'; import { IndexedColumn } from './columns/index.ts'; import type { GelTable } from './table.ts'; interface IndexConfig { name?: string; columns: Partial[]; /** * If true, the index will be created as `create unique index` instead of `create index`. */ unique: boolean; /** * If true, the index will be created as `create index concurrently` instead of `create index`. */ concurrently?: boolean; /** * If true, the index will be created as `create index ... on only
` instead of `create index ... on
`. */ only: boolean; /** * Condition for partial index. */ where?: SQL; /** * The optional WITH clause specifies storage parameters for the index */ with?: Record; /** * The optional WITH clause method for the index */ method?: 'btree' | string; } export type IndexColumn = GelColumn; export type GelIndexMethod = | 'btree' | 'hash' | 'gist' | 'sGelist' | 'gin' | 'brin' | 'hnsw' | 'ivfflat' | (string & {}); export type GelIndexOpClass = | 'abstime_ops' | 'access_method' | 'anyarray_eq' | 'anyarray_ge' | 'anyarray_gt' | 'anyarray_le' | 'anyarray_lt' | 'anyarray_ne' | 'bigint_ops' | 'bit_ops' | 'bool_ops' | 'box_ops' | 'bpchar_ops' | 'char_ops' | 'cidr_ops' | 'cstring_ops' | 'date_ops' | 'float_ops' | 'int2_ops' | 'int4_ops' | 'int8_ops' | 'interval_ops' | 'jsonb_ops' | 'macaddr_ops' | 'name_ops' | 'numeric_ops' | 'oid_ops' | 'oidint4_ops' | 'oidint8_ops' | 'oidname_ops' | 'oidvector_ops' | 'point_ops' | 'polygon_ops' | 'range_ops' | 'record_eq' | 'record_ge' | 'record_gt' | 'record_le' | 'record_lt' | 'record_ne' | 'text_ops' | 'time_ops' | 'timestamp_ops' | 'timestamptz_ops' | 'timetz_ops' | 'uuid_ops' | 'varbit_ops' | 'varchar_ops' | 'xml_ops' | 'vector_l2_ops' | 'vector_ip_ops' | 'vector_cosine_ops' | 'vector_l1_ops' | 'bit_hamming_ops' | 'bit_jaccard_ops' | 'halfvec_l2_ops' | 'sparsevec_l2_op' | (string & {}); export class IndexBuilderOn { static readonly [entityKind]: string = 'GelIndexBuilderOn'; constructor(private unique: boolean, private name?: string) {} on(...columns: [Partial | SQL, ...Partial[]]): IndexBuilder { return new IndexBuilder( columns.map((it) => { if (is(it, SQL)) { return it; } it = it as GelExtraConfigColumn; const clonedIndexedColumn = new IndexedColumn(it.name, !!it.keyAsName, it.columnType!, it.indexConfig!); it.indexConfig = JSON.parse(JSON.stringify(it.defaultConfig)); return clonedIndexedColumn; }), this.unique, false, this.name, ); } onOnly(...columns: [Partial, ...Partial[]]): IndexBuilder { return new IndexBuilder( columns.map((it) => { if (is(it, SQL)) { return it; } it = it as GelExtraConfigColumn; const clonedIndexedColumn = new IndexedColumn(it.name, !!it.keyAsName, it.columnType!, it.indexConfig!); it.indexConfig = it.defaultConfig; return clonedIndexedColumn; }), this.unique, true, this.name, ); } /** * Specify what index method to use. Choices are `btree`, `hash`, `gist`, `sGelist`, `gin`, `brin`, or user-installed access methods like `bloom`. The default method is `btree. * * If you have the `Gel_vector` extension installed in your database, you can use the `hnsw` and `ivfflat` options, which are predefined types. * * **You can always specify any string you want in the method, in case Drizzle doesn't have it natively in its types** * * @param method The name of the index method to be used * @param columns * @returns */ using( method: GelIndexMethod, ...columns: [Partial, ...Partial[]] ): IndexBuilder { return new IndexBuilder( columns.map((it) => { if (is(it, SQL)) { return it; } it = it as GelExtraConfigColumn; const clonedIndexedColumn = new IndexedColumn(it.name, !!it.keyAsName, it.columnType!, it.indexConfig!); it.indexConfig = JSON.parse(JSON.stringify(it.defaultConfig)); return clonedIndexedColumn; }), this.unique, true, this.name, method, ); } } export interface AnyIndexBuilder { build(table: GelTable): Index; } // eslint-disable-next-line @typescript-eslint/no-empty-interface export interface IndexBuilder extends AnyIndexBuilder {} export class IndexBuilder implements AnyIndexBuilder { static readonly [entityKind]: string = 'GelIndexBuilder'; /** @internal */ config: IndexConfig; constructor( columns: Partial[], unique: boolean, only: boolean, name?: string, method: string = 'btree', ) { this.config = { name, columns, unique, only, method, }; } concurrently(): this { this.config.concurrently = true; return this; } with(obj: Record): this { this.config.with = obj; return this; } where(condition: SQL): this { this.config.where = condition; return this; } /** @internal */ build(table: GelTable): Index { return new Index(this.config, table); } } export class Index { static readonly [entityKind]: string = 'GelIndex'; readonly config: IndexConfig & { table: GelTable }; constructor(config: IndexConfig, table: GelTable) { this.config = { ...config, table }; } } export type GetColumnsTableName = TColumns extends GelColumn ? TColumns['_']['name'] : TColumns extends GelColumn[] ? TColumns[number]['_']['name'] : never; export function index(name?: string): IndexBuilderOn { return new IndexBuilderOn(false, name); } export function uniqueIndex(name?: string): IndexBuilderOn { return new IndexBuilderOn(true, name); } ================================================ FILE: drizzle-orm/src/gel-core/policies.ts ================================================ import { entityKind } from '~/entity.ts'; import type { SQL } from '~/sql/sql.ts'; import type { GelRole } from './roles.ts'; import type { GelTable } from './table.ts'; export type GelPolicyToOption = | 'public' | 'current_role' | 'current_user' | 'session_user' | (string & {}) | GelPolicyToOption[] | GelRole; export interface GelPolicyConfig { as?: 'permissive' | 'restrictive'; for?: 'all' | 'select' | 'insert' | 'update' | 'delete'; to?: GelPolicyToOption; using?: SQL; withCheck?: SQL; } export class GelPolicy implements GelPolicyConfig { static readonly [entityKind]: string = 'GelPolicy'; readonly as: GelPolicyConfig['as']; readonly for: GelPolicyConfig['for']; readonly to: GelPolicyConfig['to']; readonly using: GelPolicyConfig['using']; readonly withCheck: GelPolicyConfig['withCheck']; /** @internal */ _linkedTable?: GelTable; constructor( readonly name: string, config?: GelPolicyConfig, ) { if (config) { this.as = config.as; this.for = config.for; this.to = config.to; this.using = config.using; this.withCheck = config.withCheck; } } link(table: GelTable): this { this._linkedTable = table; return this; } } export function gelPolicy(name: string, config?: GelPolicyConfig) { return new GelPolicy(name, config); } ================================================ FILE: drizzle-orm/src/gel-core/primary-keys.ts ================================================ import { entityKind } from '~/entity.ts'; import type { AnyGelColumn, GelColumn } from './columns/index.ts'; import { GelTable } from './table.ts'; export function primaryKey< TTableName extends string, TColumn extends AnyGelColumn<{ tableName: TTableName }>, TColumns extends AnyGelColumn<{ tableName: TTableName }>[], >(config: { name?: string; columns: [TColumn, ...TColumns] }): PrimaryKeyBuilder; /** * @deprecated: Please use primaryKey({ columns: [] }) instead of this function * @param columns */ export function primaryKey< TTableName extends string, TColumns extends AnyGelColumn<{ tableName: TTableName }>[], >(...columns: TColumns): PrimaryKeyBuilder; export function primaryKey(...config: any) { if (config[0].columns) { return new PrimaryKeyBuilder(config[0].columns, config[0].name); } return new PrimaryKeyBuilder(config); } export class PrimaryKeyBuilder { static readonly [entityKind]: string = 'GelPrimaryKeyBuilder'; /** @internal */ columns: GelColumn[]; /** @internal */ name?: string; constructor( columns: GelColumn[], name?: string, ) { this.columns = columns; this.name = name; } /** @internal */ build(table: GelTable): PrimaryKey { return new PrimaryKey(table, this.columns, this.name); } } export class PrimaryKey { static readonly [entityKind]: string = 'GelPrimaryKey'; readonly columns: AnyGelColumn<{}>[]; readonly name?: string; constructor(readonly table: GelTable, columns: AnyGelColumn<{}>[], name?: string) { this.columns = columns; this.name = name; } getName(): string { return this.name ?? `${this.table[GelTable.Symbol.Name]}_${this.columns.map((column) => column.name).join('_')}_pk`; } } ================================================ FILE: drizzle-orm/src/gel-core/query-builders/count.ts ================================================ import { entityKind } from '~/entity.ts'; import { SQL, sql, type SQLWrapper } from '~/sql/sql.ts'; import type { GelSession } from '../session.ts'; import type { GelTable } from '../table.ts'; export class GelCountBuilder< TSession extends GelSession, > extends SQL implements Promise, SQLWrapper { private sql: SQL; static override readonly [entityKind] = 'GelCountBuilder'; [Symbol.toStringTag] = 'GelCountBuilder'; private session: TSession; private static buildEmbeddedCount( source: GelTable | SQL | SQLWrapper, filters?: SQL, ): SQL { return sql`(select count(*) from ${source}${sql.raw(' where ').if(filters)}${filters})`; } private static buildCount( source: GelTable | SQL | SQLWrapper, filters?: SQL, ): SQL { return sql`select count(*) as count from ${source}${sql.raw(' where ').if(filters)}${filters};`; } constructor( readonly params: { source: GelTable | SQL | SQLWrapper; filters?: SQL; session: TSession; }, ) { super(GelCountBuilder.buildEmbeddedCount(params.source, params.filters).queryChunks); this.mapWith(Number); this.session = params.session; this.sql = GelCountBuilder.buildCount( params.source, params.filters, ); } then( onfulfilled?: ((value: number) => TResult1 | PromiseLike) | null | undefined, onrejected?: ((reason: any) => TResult2 | PromiseLike) | null | undefined, ): Promise { return Promise.resolve(this.session.count(this.sql)) .then( onfulfilled, onrejected, ); } catch( onRejected?: ((reason: any) => any) | null | undefined, ): Promise { return this.then(undefined, onRejected); } finally(onFinally?: (() => void) | null | undefined): Promise { return this.then( (value) => { onFinally?.(); return value; }, (reason) => { onFinally?.(); throw reason; }, ); } } ================================================ FILE: drizzle-orm/src/gel-core/query-builders/delete.ts ================================================ import { entityKind } from '~/entity.ts'; import type { GelDialect } from '~/gel-core/dialect.ts'; import type { GelPreparedQuery, GelQueryResultHKT, GelQueryResultKind, GelSession, PreparedQueryConfig, } from '~/gel-core/session.ts'; import type { GelTable } from '~/gel-core/table.ts'; import type { SelectResultFields } from '~/query-builders/select.types.ts'; import { QueryPromise } from '~/query-promise.ts'; import type { RunnableQuery } from '~/runnable-query.ts'; import type { Query, SQL, SQLWrapper } from '~/sql/sql.ts'; import type { Subquery } from '~/subquery.ts'; import { Table } from '~/table.ts'; import { tracer } from '~/tracing.ts'; import { orderSelectedFields } from '~/utils.ts'; import type { GelColumn } from '../columns/common.ts'; import { extractUsedTable } from '../utils.ts'; import type { SelectedFieldsFlat, SelectedFieldsOrdered } from './select.types.ts'; export type GelDeleteWithout< T extends AnyGelDeleteBase, TDynamic extends boolean, K extends keyof T & string, > = TDynamic extends true ? T : Omit< GelDeleteBase< T['_']['table'], T['_']['queryResult'], T['_']['returning'], TDynamic, T['_']['excludedMethods'] | K >, T['_']['excludedMethods'] | K >; export type GelDelete< TTable extends GelTable = GelTable, TQueryResult extends GelQueryResultHKT = GelQueryResultHKT, TReturning extends Record | undefined = Record | undefined, > = GelDeleteBase; export interface GelDeleteConfig { where?: SQL | undefined; table: GelTable; returning?: SelectedFieldsOrdered; withList?: Subquery[]; } export type GelDeleteReturningAll< T extends AnyGelDeleteBase, TDynamic extends boolean, > = GelDeleteWithout< GelDeleteBase< T['_']['table'], T['_']['queryResult'], T['_']['table']['$inferSelect'], TDynamic, T['_']['excludedMethods'] >, TDynamic, 'returning' >; export type GelDeleteReturning< T extends AnyGelDeleteBase, TDynamic extends boolean, TSelectedFields extends SelectedFieldsFlat, > = GelDeleteWithout< GelDeleteBase< T['_']['table'], T['_']['queryResult'], SelectResultFields, TDynamic, T['_']['excludedMethods'] >, TDynamic, 'returning' >; export type GelDeletePrepare = GelPreparedQuery< PreparedQueryConfig & { execute: T['_']['returning'] extends undefined ? GelQueryResultKind : T['_']['returning'][]; } >; export type GelDeleteDynamic = GelDelete< T['_']['table'], T['_']['queryResult'], T['_']['returning'] >; export type AnyGelDeleteBase = GelDeleteBase; export interface GelDeleteBase< TTable extends GelTable, TQueryResult extends GelQueryResultHKT, TReturning extends Record | undefined = undefined, TDynamic extends boolean = false, TExcludedMethods extends string = never, > extends QueryPromise : TReturning[]>, RunnableQuery : TReturning[], 'gel'>, SQLWrapper { readonly _: { dialect: 'gel'; readonly table: TTable; readonly queryResult: TQueryResult; readonly returning: TReturning; readonly dynamic: TDynamic; readonly excludedMethods: TExcludedMethods; readonly result: TReturning extends undefined ? GelQueryResultKind : TReturning[]; }; } export class GelDeleteBase< TTable extends GelTable, TQueryResult extends GelQueryResultHKT, TReturning extends Record | undefined = undefined, TDynamic extends boolean = false, // eslint-disable-next-line @typescript-eslint/no-unused-vars TExcludedMethods extends string = never, > extends QueryPromise : TReturning[]> implements RunnableQuery : TReturning[], 'gel'>, SQLWrapper { static override readonly [entityKind]: string = 'GelDelete'; private config: GelDeleteConfig; constructor( table: TTable, private session: GelSession, private dialect: GelDialect, withList?: Subquery[], ) { super(); this.config = { table, withList }; } /** * Adds a `where` clause to the query. * * Calling this method will delete only those rows that fulfill a specified condition. * * See docs: {@link https://orm.drizzle.team/docs/delete} * * @param where the `where` clause. * * @example * You can use conditional operators and `sql function` to filter the rows to be deleted. * * ```ts * // Delete all cars with green color * await db.delete(cars).where(eq(cars.color, 'green')); * // or * await db.delete(cars).where(sql`${cars.color} = 'green'`) * ``` * * You can logically combine conditional operators with `and()` and `or()` operators: * * ```ts * // Delete all BMW cars with a green color * await db.delete(cars).where(and(eq(cars.color, 'green'), eq(cars.brand, 'BMW'))); * * // Delete all cars with the green or blue color * await db.delete(cars).where(or(eq(cars.color, 'green'), eq(cars.color, 'blue'))); * ``` */ where(where: SQL | undefined): GelDeleteWithout { this.config.where = where; return this as any; } /** * Adds a `returning` clause to the query. * * Calling this method will return the specified fields of the deleted rows. If no fields are specified, all fields will be returned. * * See docs: {@link https://orm.drizzle.team/docs/delete#delete-with-return} * * @example * ```ts * // Delete all cars with the green color and return all fields * const deletedCars: Car[] = await db.delete(cars) * .where(eq(cars.color, 'green')) * .returning(); * * // Delete all cars with the green color and return only their id and brand fields * const deletedCarsIdsAndBrands: { id: number, brand: string }[] = await db.delete(cars) * .where(eq(cars.color, 'green')) * .returning({ id: cars.id, brand: cars.brand }); * ``` */ returning(): GelDeleteReturningAll; returning( fields: TSelectedFields, ): GelDeleteReturning; returning( fields: SelectedFieldsFlat = this.config.table[Table.Symbol.Columns], ): GelDeleteReturning { this.config.returning = orderSelectedFields(fields); return this as any; } /** @internal */ getSQL(): SQL { return this.dialect.buildDeleteQuery(this.config); } toSQL(): Query { const { typings: _typings, ...rest } = this.dialect.sqlToQuery(this.getSQL()); return rest; } /** @internal */ _prepare(name?: string): GelDeletePrepare { return tracer.startActiveSpan('drizzle.prepareQuery', () => { return this.session.prepareQuery< PreparedQueryConfig & { execute: TReturning extends undefined ? GelQueryResultKind : TReturning[]; } >(this.dialect.sqlToQuery(this.getSQL()), this.config.returning, name, true, undefined, { type: 'delete', tables: extractUsedTable(this.config.table), }); }); } prepare(name: string): GelDeletePrepare { return this._prepare(name); } override execute: ReturnType['execute'] = (placeholderValues) => { return tracer.startActiveSpan('drizzle.operation', () => { return this._prepare().execute(placeholderValues); }); }; $dynamic(): GelDeleteDynamic { return this as any; } } ================================================ FILE: drizzle-orm/src/gel-core/query-builders/index.ts ================================================ export * from './delete.ts'; export * from './insert.ts'; export * from './query-builder.ts'; export * from './refresh-materialized-view.ts'; export * from './select.ts'; export * from './select.types.ts'; export * from './update.ts'; ================================================ FILE: drizzle-orm/src/gel-core/query-builders/insert.ts ================================================ import { entityKind, is } from '~/entity.ts'; import type { GelDialect } from '~/gel-core/dialect.ts'; import type { IndexColumn } from '~/gel-core/indexes.ts'; import type { GelPreparedQuery, GelQueryResultHKT, GelQueryResultKind, GelSession, PreparedQueryConfig, } from '~/gel-core/session.ts'; import type { GelTable, TableConfig } from '~/gel-core/table.ts'; import type { TypedQueryBuilder } from '~/query-builders/query-builder.ts'; import type { SelectResultFields } from '~/query-builders/select.types.ts'; import { QueryPromise } from '~/query-promise.ts'; import type { RunnableQuery } from '~/runnable-query.ts'; import type { Placeholder, Query, SQLWrapper } from '~/sql/sql.ts'; import { Param, SQL } from '~/sql/sql.ts'; import type { Subquery } from '~/subquery.ts'; import type { InferInsertModel } from '~/table.ts'; import { Columns, Table } from '~/table.ts'; import { tracer } from '~/tracing.ts'; import { haveSameKeys, type NeonAuthToken, orderSelectedFields } from '~/utils.ts'; import type { AnyGelColumn, GelColumn } from '../columns/common.ts'; import { extractUsedTable } from '../utils.ts'; import { QueryBuilder } from './query-builder.ts'; import type { SelectedFieldsFlat, SelectedFieldsOrdered } from './select.types.ts'; import type { GelUpdateSetSource } from './update.ts'; export interface GelInsertConfig { table: TTable; values: Record[] | GelInsertSelectQueryBuilder | SQL; withList?: Subquery[]; onConflict?: SQL; returning?: SelectedFieldsOrdered; select?: boolean; overridingSystemValue_?: boolean; } export type GelInsertValue, OverrideT extends boolean = false> = & { [Key in keyof InferInsertModel]: | InferInsertModel[Key] | SQL | Placeholder; } & {}; export type GelInsertSelectQueryBuilder = TypedQueryBuilder< { [K in keyof TTable['$inferInsert']]: AnyGelColumn | SQL | SQL.Aliased | TTable['$inferInsert'][K] } >; export class GelInsertBuilder< TTable extends GelTable, TQueryResult extends GelQueryResultHKT, OverrideT extends boolean = false, > { static readonly [entityKind]: string = 'GelInsertBuilder'; constructor( private table: TTable, private session: GelSession, private dialect: GelDialect, private withList?: Subquery[], private overridingSystemValue_?: boolean, ) {} private authToken?: NeonAuthToken; /** @internal */ setToken(token?: NeonAuthToken) { this.authToken = token; return this; } overridingSystemValue(): Omit, 'overridingSystemValue'> { this.overridingSystemValue_ = true; return this as any; } values(value: GelInsertValue): GelInsertBase; values(values: GelInsertValue[]): GelInsertBase; values( values: GelInsertValue | GelInsertValue[], ): GelInsertBase { values = Array.isArray(values) ? values : [values]; if (values.length === 0) { throw new Error('values() must be called with at least one value'); } const mappedValues = values.map((entry) => { const result: Record = {}; const cols = this.table[Table.Symbol.Columns]; for (const colKey of Object.keys(entry)) { const colValue = entry[colKey as keyof typeof entry]; result[colKey] = is(colValue, SQL) ? colValue : new Param(colValue, cols[colKey]); } return result; }); return new GelInsertBase( this.table, mappedValues, this.session, this.dialect, this.withList, false, this.overridingSystemValue_, ); } select(selectQuery: (qb: QueryBuilder) => GelInsertSelectQueryBuilder): GelInsertBase; select(selectQuery: (qb: QueryBuilder) => SQL): GelInsertBase; select(selectQuery: SQL): GelInsertBase; select(selectQuery: GelInsertSelectQueryBuilder): GelInsertBase; select( selectQuery: | SQL | GelInsertSelectQueryBuilder | ((qb: QueryBuilder) => GelInsertSelectQueryBuilder | SQL), ): GelInsertBase { const select = typeof selectQuery === 'function' ? selectQuery(new QueryBuilder()) : selectQuery; if ( !is(select, SQL) && !haveSameKeys(this.table[Columns], select._.selectedFields) ) { throw new Error( 'Insert select error: selected fields are not the same or are in a different order compared to the table definition', ); } return new GelInsertBase(this.table, select, this.session, this.dialect, this.withList, true); } } export type GelInsertWithout = TDynamic extends true ? T : Omit< GelInsertBase< T['_']['table'], T['_']['queryResult'], T['_']['returning'], TDynamic, T['_']['excludedMethods'] | K >, T['_']['excludedMethods'] | K >; export type GelInsertReturning< T extends AnyGelInsert, TDynamic extends boolean, TSelectedFields extends SelectedFieldsFlat, > = GelInsertBase< T['_']['table'], T['_']['queryResult'], SelectResultFields, TDynamic, T['_']['excludedMethods'] >; export type GelInsertReturningAll = GelInsertBase< T['_']['table'], T['_']['queryResult'], T['_']['table']['$inferSelect'], TDynamic, T['_']['excludedMethods'] >; export interface GelInsertOnConflictDoUpdateConfig { target: IndexColumn | IndexColumn[]; /** @deprecated use either `targetWhere` or `setWhere` */ where?: SQL; // TODO: add tests for targetWhere and setWhere targetWhere?: SQL; setWhere?: SQL; set: GelUpdateSetSource; } export type GelInsertPrepare = GelPreparedQuery< PreparedQueryConfig & { execute: T['_']['returning'] extends undefined ? GelQueryResultKind : T['_']['returning'][]; } >; export type GelInsertDynamic = GelInsert< T['_']['table'], T['_']['queryResult'], T['_']['returning'] >; export type AnyGelInsert = GelInsertBase; export type GelInsert< TTable extends GelTable = GelTable, TQueryResult extends GelQueryResultHKT = GelQueryResultHKT, TReturning extends Record | undefined = Record | undefined, > = GelInsertBase; export interface GelInsertBase< TTable extends GelTable, TQueryResult extends GelQueryResultHKT, TReturning extends Record | undefined = undefined, TDynamic extends boolean = false, TExcludedMethods extends string = never, > extends QueryPromise : TReturning[]>, RunnableQuery : TReturning[], 'gel'>, SQLWrapper { readonly _: { readonly dialect: 'gel'; readonly table: TTable; readonly queryResult: TQueryResult; readonly returning: TReturning; readonly dynamic: TDynamic; readonly excludedMethods: TExcludedMethods; readonly result: TReturning extends undefined ? GelQueryResultKind : TReturning[]; }; } export class GelInsertBase< TTable extends GelTable, TQueryResult extends GelQueryResultHKT, TReturning extends Record | undefined = undefined, // eslint-disable-next-line @typescript-eslint/no-unused-vars TDynamic extends boolean = false, // eslint-disable-next-line @typescript-eslint/no-unused-vars TExcludedMethods extends string = never, > extends QueryPromise : TReturning[]> implements RunnableQuery : TReturning[], 'gel'>, SQLWrapper { static override readonly [entityKind]: string = 'GelInsert'; private config: GelInsertConfig; constructor( table: TTable, values: GelInsertConfig['values'], private session: GelSession, private dialect: GelDialect, withList?: Subquery[], select?: boolean, overridingSystemValue_?: boolean, ) { super(); this.config = { table, values: values as any, withList, select, overridingSystemValue_ }; } /** * Adds a `returning` clause to the query. * * Calling this method will return the specified fields of the inserted rows. If no fields are specified, all fields will be returned. * * See docs: {@link https://orm.drizzle.team/docs/insert#insert-returning} * * @example * ```ts * // Insert one row and return all fields * const insertedCar: Car[] = await db.insert(cars) * .values({ brand: 'BMW' }) * .returning(); * * // Insert one row and return only the id * const insertedCarId: { id: number }[] = await db.insert(cars) * .values({ brand: 'BMW' }) * .returning({ id: cars.id }); * ``` */ returning(): GelInsertWithout, TDynamic, 'returning'>; returning( fields: TSelectedFields, ): GelInsertWithout, TDynamic, 'returning'>; returning( fields: SelectedFieldsFlat = this.config.table[Table.Symbol.Columns], ): GelInsertWithout { this.config.returning = orderSelectedFields(fields); return this as any; } /** * Adds an `on conflict do nothing` clause to the query. * * Calling this method simply avoids inserting a row as its alternative action. * * See docs: {@link https://orm.drizzle.team/docs/insert#on-conflict-do-nothing} * * @param config The `target` and `where` clauses. * * @example * ```ts * // Insert one row and cancel the insert if there's a conflict * await db.insert(cars) * .values({ id: 1, brand: 'BMW' }) * .onConflictDoNothing(); * * // Explicitly specify conflict target * await db.insert(cars) * .values({ id: 1, brand: 'BMW' }) * .onConflictDoNothing({ target: cars.id }); * ``` */ // TODO not supported // onConflictDoNothing( // config: { target?: IndexColumn | IndexColumn[]; where?: SQL } = {}, // ): GelInsertWithout { // if (config.target === undefined) { // this.config.onConflict = sql`do nothing`; // } else { // let targetColumn = ''; // targetColumn = Array.isArray(config.target) // ? config.target.map((it) => this.dialect.escapeName(this.dialect.casing.getColumnCasing(it))).join(',') // : this.dialect.escapeName(this.dialect.casing.getColumnCasing(config.target)); // const whereSql = config.where ? sql` where ${config.where}` : undefined; // this.config.onConflict = sql`(${sql.raw(targetColumn)})${whereSql} do nothing`; // } // return this as any; // } /** * Adds an `on conflict do update` clause to the query. * * Calling this method will update the existing row that conflicts with the row proposed for insertion as its alternative action. * * See docs: {@link https://orm.drizzle.team/docs/insert#upserts-and-conflicts} * * @param config The `target`, `set` and `where` clauses. * * @example * ```ts * // Update the row if there's a conflict * await db.insert(cars) * .values({ id: 1, brand: 'BMW' }) * .onConflictDoUpdate({ * target: cars.id, * set: { brand: 'Porsche' } * }); * * // Upsert with 'where' clause * await db.insert(cars) * .values({ id: 1, brand: 'BMW' }) * .onConflictDoUpdate({ * target: cars.id, * set: { brand: 'newBMW' }, * targetWhere: sql`${cars.createdAt} > '2023-01-01'::date`, * }); * ``` */ // TODO not supported // onConflictDoUpdate( // config: GelInsertOnConflictDoUpdateConfig, // ): GelInsertWithout { // if (config.where && (config.targetWhere || config.setWhere)) { // throw new Error( // 'You cannot use both "where" and "targetWhere"/"setWhere" at the same time - "where" is deprecated, use "targetWhere" or "setWhere" instead.', // ); // } // const whereSql = config.where ? sql` where ${config.where}` : undefined; // const targetWhereSql = config.targetWhere ? sql` where ${config.targetWhere}` : undefined; // const setWhereSql = config.setWhere ? sql` where ${config.setWhere}` : undefined; // const setSql = this.dialect.buildUpdateSet(this.config.table, mapUpdateSet(this.config.table, config.set)); // let targetColumn = ''; // targetColumn = Array.isArray(config.target) // ? config.target.map((it) => this.dialect.escapeName(this.dialect.casing.getColumnCasing(it))).join(',') // : this.dialect.escapeName(this.dialect.casing.getColumnCasing(config.target)); // this.config.onConflict = sql`(${ // sql.raw(targetColumn) // })${targetWhereSql} do update set ${setSql}${whereSql}${setWhereSql}`; // return this as any; // } /** @internal */ getSQL(): SQL { return this.dialect.buildInsertQuery(this.config); } toSQL(): Query { const { typings: _typings, ...rest } = this.dialect.sqlToQuery(this.getSQL()); return rest; } /** @internal */ _prepare(name?: string): GelInsertPrepare { return tracer.startActiveSpan('drizzle.prepareQuery', () => { return this.session.prepareQuery< PreparedQueryConfig & { execute: TReturning extends undefined ? GelQueryResultKind : TReturning[]; } >(this.dialect.sqlToQuery(this.getSQL()), this.config.returning, name, true, undefined, { type: 'insert', tables: extractUsedTable(this.config.table), }); }); } prepare(name: string): GelInsertPrepare { return this._prepare(name); } override execute: ReturnType['execute'] = (placeholderValues) => { return tracer.startActiveSpan('drizzle.operation', () => { return this._prepare().execute(placeholderValues); }); }; $dynamic(): GelInsertDynamic { return this as any; } } ================================================ FILE: drizzle-orm/src/gel-core/query-builders/query-builder.ts ================================================ import { entityKind, is } from '~/entity.ts'; import type { GelDialectConfig } from '~/gel-core/dialect.ts'; import { GelDialect } from '~/gel-core/dialect.ts'; import type { TypedQueryBuilder } from '~/query-builders/query-builder.ts'; import { SelectionProxyHandler } from '~/selection-proxy.ts'; import type { ColumnsSelection, SQLWrapper } from '~/sql/sql.ts'; import { WithSubquery } from '~/subquery.ts'; import type { GelColumn } from '../columns/index.ts'; import type { WithSubqueryWithSelection } from '../subquery.ts'; import { GelSelectBuilder } from './select.ts'; import type { SelectedFields } from './select.types.ts'; export class QueryBuilder { static readonly [entityKind]: string = 'GelQueryBuilder'; private dialect: GelDialect | undefined; private dialectConfig: GelDialectConfig | undefined; constructor(dialect?: GelDialect | GelDialectConfig) { this.dialect = is(dialect, GelDialect) ? dialect : undefined; this.dialectConfig = is(dialect, GelDialect) ? undefined : dialect; } $with(alias: TAlias) { const queryBuilder = this; return { as( qb: TypedQueryBuilder | ((qb: QueryBuilder) => TypedQueryBuilder), ): WithSubqueryWithSelection { if (typeof qb === 'function') { qb = qb(queryBuilder); } return new Proxy( new WithSubquery(qb.getSQL(), qb.getSelectedFields() as SelectedFields, alias, true), new SelectionProxyHandler({ alias, sqlAliasedBehavior: 'alias', sqlBehavior: 'error' }), ) as WithSubqueryWithSelection; }, }; } with(...queries: WithSubquery[]) { const self = this; function select(): GelSelectBuilder; function select(fields: TSelection): GelSelectBuilder; function select( fields?: TSelection, ): GelSelectBuilder { return new GelSelectBuilder({ fields: fields ?? undefined, session: undefined, dialect: self.getDialect(), withList: queries, }); } function selectDistinct(): GelSelectBuilder; function selectDistinct(fields: TSelection): GelSelectBuilder; function selectDistinct(fields?: SelectedFields): GelSelectBuilder { return new GelSelectBuilder({ fields: fields ?? undefined, session: undefined, dialect: self.getDialect(), distinct: true, }); } function selectDistinctOn(on: (GelColumn | SQLWrapper)[]): GelSelectBuilder; function selectDistinctOn( on: (GelColumn | SQLWrapper)[], fields: TSelection, ): GelSelectBuilder; function selectDistinctOn( on: (GelColumn | SQLWrapper)[], fields?: SelectedFields, ): GelSelectBuilder { return new GelSelectBuilder({ fields: fields ?? undefined, session: undefined, dialect: self.getDialect(), distinct: { on }, }); } return { select, selectDistinct, selectDistinctOn }; } select(): GelSelectBuilder; select(fields: TSelection): GelSelectBuilder; select(fields?: TSelection): GelSelectBuilder { return new GelSelectBuilder({ fields: fields ?? undefined, session: undefined, dialect: this.getDialect(), }); } selectDistinct(): GelSelectBuilder; selectDistinct(fields: TSelection): GelSelectBuilder; selectDistinct(fields?: SelectedFields): GelSelectBuilder { return new GelSelectBuilder({ fields: fields ?? undefined, session: undefined, dialect: this.getDialect(), distinct: true, }); } selectDistinctOn(on: (GelColumn | SQLWrapper)[]): GelSelectBuilder; selectDistinctOn( on: (GelColumn | SQLWrapper)[], fields: TSelection, ): GelSelectBuilder; selectDistinctOn( on: (GelColumn | SQLWrapper)[], fields?: SelectedFields, ): GelSelectBuilder { return new GelSelectBuilder({ fields: fields ?? undefined, session: undefined, dialect: this.getDialect(), distinct: { on }, }); } // Lazy load dialect to avoid circular dependency private getDialect() { if (!this.dialect) { this.dialect = new GelDialect(this.dialectConfig); } return this.dialect; } } ================================================ FILE: drizzle-orm/src/gel-core/query-builders/query.ts ================================================ import { entityKind } from '~/entity.ts'; import { QueryPromise } from '~/query-promise.ts'; import { type BuildQueryResult, type BuildRelationalQueryResult, type DBQueryConfig, mapRelationalRow, type TableRelationalConfig, type TablesRelationalConfig, } from '~/relations.ts'; import type { RunnableQuery } from '~/runnable-query.ts'; import type { Query, QueryWithTypings, SQL, SQLWrapper } from '~/sql/sql.ts'; import { tracer } from '~/tracing.ts'; import type { KnownKeysOnly } from '~/utils.ts'; import type { GelDialect } from '../dialect.ts'; import type { GelPreparedQuery, GelSession, PreparedQueryConfig } from '../session.ts'; import type { GelTable } from '../table.ts'; export class RelationalQueryBuilder { static readonly [entityKind]: string = 'GelRelationalQueryBuilder'; constructor( private fullSchema: Record, private schema: TSchema, private tableNamesMap: Record, private table: GelTable, private tableConfig: TableRelationalConfig, private dialect: GelDialect, private session: GelSession, ) {} findMany>( config?: KnownKeysOnly>, ): GelRelationalQuery[]> { return new GelRelationalQuery( this.fullSchema, this.schema, this.tableNamesMap, this.table, this.tableConfig, this.dialect, this.session, config ? (config as DBQueryConfig<'many', true>) : {}, 'many', ); } findFirst, 'limit'>>( config?: KnownKeysOnly, 'limit'>>, ): GelRelationalQuery | undefined> { return new GelRelationalQuery( this.fullSchema, this.schema, this.tableNamesMap, this.table, this.tableConfig, this.dialect, this.session, config ? { ...(config as DBQueryConfig<'many', true> | undefined), limit: 1 } : { limit: 1 }, 'first', ); } } export class GelRelationalQuery extends QueryPromise implements RunnableQuery, SQLWrapper { static override readonly [entityKind]: string = 'GelRelationalQuery'; declare readonly _: { readonly dialect: 'gel'; readonly result: TResult; }; constructor( private fullSchema: Record, private schema: TablesRelationalConfig, private tableNamesMap: Record, private table: GelTable, private tableConfig: TableRelationalConfig, private dialect: GelDialect, private session: GelSession, private config: DBQueryConfig<'many', true> | true, private mode: 'many' | 'first', ) { super(); } /** @internal */ _prepare(name?: string): GelPreparedQuery { return tracer.startActiveSpan('drizzle.prepareQuery', () => { const { query, builtQuery } = this._toSQL(); return this.session.prepareQuery( builtQuery, undefined, name, true, (rawRows, mapColumnValue) => { const rows = rawRows.map((row) => mapRelationalRow(this.schema, this.tableConfig, row, query.selection, mapColumnValue) ); if (this.mode === 'first') { return rows[0] as TResult; } return rows as TResult; }, ); }); } prepare(name: string): GelPreparedQuery { return this._prepare(name); } private _getQuery() { return this.dialect.buildRelationalQueryWithoutPK({ fullSchema: this.fullSchema, schema: this.schema, tableNamesMap: this.tableNamesMap, table: this.table, tableConfig: this.tableConfig, queryConfig: this.config, tableAlias: this.tableConfig.tsName, }); } /** @internal */ getSQL(): SQL { return this._getQuery().sql as SQL; } private _toSQL(): { query: BuildRelationalQueryResult; builtQuery: QueryWithTypings } { const query = this._getQuery(); const builtQuery = this.dialect.sqlToQuery(query.sql as SQL); return { query, builtQuery }; } toSQL(): Query { return this._toSQL().builtQuery; } override execute(): Promise { return tracer.startActiveSpan('drizzle.operation', () => { return this._prepare().execute(undefined); }); } } ================================================ FILE: drizzle-orm/src/gel-core/query-builders/raw.ts ================================================ import { entityKind } from '~/entity.ts'; import { QueryPromise } from '~/query-promise.ts'; import type { RunnableQuery } from '~/runnable-query.ts'; import type { PreparedQuery } from '~/session.ts'; import type { Query, SQL, SQLWrapper } from '~/sql/sql.ts'; export interface GelRaw extends QueryPromise, RunnableQuery, SQLWrapper {} export class GelRaw extends QueryPromise implements RunnableQuery, SQLWrapper, PreparedQuery { static override readonly [entityKind]: string = 'GelRaw'; declare readonly _: { readonly dialect: 'gel'; readonly result: TResult; }; constructor( public execute: () => Promise, private sql: SQL, private query: Query, private mapBatchResult: (result: unknown) => unknown, ) { super(); } /** @internal */ getSQL() { return this.sql; } getQuery() { return this.query; } mapResult(result: unknown, isFromBatch?: boolean) { return isFromBatch ? this.mapBatchResult(result) : result; } _prepare(): PreparedQuery { return this; } /** @internal */ isResponseInArrayMode() { return false; } } ================================================ FILE: drizzle-orm/src/gel-core/query-builders/refresh-materialized-view.ts ================================================ import { entityKind } from '~/entity.ts'; import type { GelDialect } from '~/gel-core/dialect.ts'; import type { GelPreparedQuery, GelQueryResultHKT, GelQueryResultKind, GelSession, PreparedQueryConfig, } from '~/gel-core/session.ts'; import type { GelMaterializedView } from '~/gel-core/view.ts'; import { QueryPromise } from '~/query-promise.ts'; import type { RunnableQuery } from '~/runnable-query.ts'; import type { Query, SQL, SQLWrapper } from '~/sql/sql.ts'; import { tracer } from '~/tracing.ts'; // eslint-disable-next-line @typescript-eslint/no-empty-interface export interface GelRefreshMaterializedView extends QueryPromise>, RunnableQuery, 'gel'>, SQLWrapper { readonly _: { readonly dialect: 'gel'; readonly result: GelQueryResultKind; }; } export class GelRefreshMaterializedView extends QueryPromise> implements RunnableQuery, 'gel'>, SQLWrapper { static override readonly [entityKind]: string = 'GelRefreshMaterializedView'; private config: { view: GelMaterializedView; concurrently?: boolean; withNoData?: boolean; }; constructor( view: GelMaterializedView, private session: GelSession, private dialect: GelDialect, ) { super(); this.config = { view }; } concurrently(): this { if (this.config.withNoData !== undefined) { throw new Error('Cannot use concurrently and withNoData together'); } this.config.concurrently = true; return this; } withNoData(): this { if (this.config.concurrently !== undefined) { throw new Error('Cannot use concurrently and withNoData together'); } this.config.withNoData = true; return this; } /** @internal */ getSQL(): SQL { return this.dialect.buildRefreshMaterializedViewQuery(this.config); } toSQL(): Query { const { typings: _typings, ...rest } = this.dialect.sqlToQuery(this.getSQL()); return rest; } /** @internal */ _prepare(name?: string): GelPreparedQuery< PreparedQueryConfig & { execute: GelQueryResultKind; } > { return tracer.startActiveSpan('drizzle.prepareQuery', () => { return this.session.prepareQuery(this.dialect.sqlToQuery(this.getSQL()), undefined, name, true); }); } prepare(name: string): GelPreparedQuery< PreparedQueryConfig & { execute: GelQueryResultKind; } > { return this._prepare(name); } execute: ReturnType['execute'] = (placeholderValues) => { return tracer.startActiveSpan('drizzle.operation', () => { return this._prepare().execute(placeholderValues); }); }; } ================================================ FILE: drizzle-orm/src/gel-core/query-builders/select.ts ================================================ import type { CacheConfig, WithCacheConfig } from '~/cache/core/types.ts'; import { entityKind, is } from '~/entity.ts'; import type { GelColumn } from '~/gel-core/columns/index.ts'; import type { GelDialect } from '~/gel-core/dialect.ts'; import type { GelSession, PreparedQueryConfig } from '~/gel-core/session.ts'; import type { SubqueryWithSelection } from '~/gel-core/subquery.ts'; import type { GelTable } from '~/gel-core/table.ts'; import { GelViewBase } from '~/gel-core/view-base.ts'; import { TypedQueryBuilder } from '~/query-builders/query-builder.ts'; import type { BuildSubquerySelection, GetSelectTableName, GetSelectTableSelection, JoinNullability, JoinType, SelectMode, SelectResult, SetOperator, } from '~/query-builders/select.types.ts'; import { QueryPromise } from '~/query-promise.ts'; import type { RunnableQuery } from '~/runnable-query.ts'; import { SelectionProxyHandler } from '~/selection-proxy.ts'; import { SQL, View } from '~/sql/sql.ts'; import type { ColumnsSelection, Placeholder, Query, SQLWrapper } from '~/sql/sql.ts'; import { Subquery } from '~/subquery.ts'; import { Table } from '~/table.ts'; import { tracer } from '~/tracing.ts'; import { applyMixins, getTableColumns, getTableLikeName, haveSameKeys, type NeonAuthToken, type ValueOrArray, } from '~/utils.ts'; import { orderSelectedFields } from '~/utils.ts'; import { ViewBaseConfig } from '~/view-common.ts'; import { extractUsedTable } from '../utils.ts'; import type { AnyGelSelect, CreateGelSelectFromBuilderMode, GelCreateSetOperatorFn, GelSelectConfig, GelSelectCrossJoinFn, GelSelectDynamic, GelSelectHKT, GelSelectHKTBase, GelSelectJoinFn, GelSelectPrepare, GelSelectWithout, GelSetOperatorExcludedMethods, GelSetOperatorWithResult, GetGelSetOperators, LockConfig, LockStrength, SelectedFields, SetOperatorRightSelect, } from './select.types.ts'; export class GelSelectBuilder< TSelection extends SelectedFields | undefined, TBuilderMode extends 'db' | 'qb' = 'db', > { static readonly [entityKind]: string = 'GelSelectBuilder'; private fields: TSelection; private session: GelSession | undefined; private dialect: GelDialect; private withList: Subquery[] = []; private distinct: boolean | { on: (GelColumn | SQLWrapper)[]; } | undefined; constructor( config: { fields: TSelection; session: GelSession | undefined; dialect: GelDialect; withList?: Subquery[]; distinct?: boolean | { on: (GelColumn | SQLWrapper)[]; }; }, ) { this.fields = config.fields; this.session = config.session; this.dialect = config.dialect; if (config.withList) { this.withList = config.withList; } this.distinct = config.distinct; } private authToken?: NeonAuthToken; /** @internal */ setToken(token?: NeonAuthToken) { this.authToken = token; return this; } /** * Specify the table, subquery, or other target that you're * building a select query against. * * {@link https://www.postgresql.org/docs/current/sql-select.html#SQL-FROM | Postgres from documentation} */ from( source: TFrom, ): CreateGelSelectFromBuilderMode< TBuilderMode, GetSelectTableName, TSelection extends undefined ? GetSelectTableSelection : TSelection, TSelection extends undefined ? 'single' : 'partial' > { const isPartialSelect = !!this.fields; let fields: SelectedFields; if (this.fields) { fields = this.fields; } else if (is(source, Subquery)) { // This is required to use the proxy handler to get the correct field values from the subquery fields = Object.fromEntries( Object.keys(source._.selectedFields).map(( key, ) => [key, source[key as unknown as keyof typeof source] as unknown as SelectedFields[string]]), ); } else if (is(source, GelViewBase)) { fields = source[ViewBaseConfig].selectedFields as SelectedFields; } else if (is(source, SQL)) { fields = {}; } else { fields = getTableColumns(source); } return new GelSelectBase({ table: source, fields, isPartialSelect, session: this.session, dialect: this.dialect, withList: this.withList, distinct: this.distinct, }) as any; } } export abstract class GelSelectQueryBuilderBase< THKT extends GelSelectHKTBase, TTableName extends string | undefined, TSelection extends ColumnsSelection, TSelectMode extends SelectMode, TNullabilityMap extends Record = TTableName extends string ? Record : {}, TDynamic extends boolean = false, TExcludedMethods extends string = never, TResult extends any[] = SelectResult[], TSelectedFields extends ColumnsSelection = BuildSubquerySelection, > extends TypedQueryBuilder { static override readonly [entityKind]: string = 'GelSelectQueryBuilder'; override readonly _: { readonly dialect: 'gel'; readonly hkt: THKT; readonly tableName: TTableName; readonly selection: TSelection; readonly selectMode: TSelectMode; readonly nullabilityMap: TNullabilityMap; readonly dynamic: TDynamic; readonly excludedMethods: TExcludedMethods; readonly result: TResult; readonly selectedFields: TSelectedFields; readonly config: GelSelectConfig; }; protected config: GelSelectConfig; protected joinsNotNullableMap: Record; private tableName: string | undefined; private isPartialSelect: boolean; protected session: GelSession | undefined; protected dialect: GelDialect; protected cacheConfig?: WithCacheConfig = undefined; protected usedTables: Set = new Set(); constructor( { table, fields, isPartialSelect, session, dialect, withList, distinct }: { table: GelSelectConfig['table']; fields: GelSelectConfig['fields']; isPartialSelect: boolean; session: GelSession | undefined; dialect: GelDialect; withList: Subquery[]; distinct: boolean | { on: (GelColumn | SQLWrapper)[]; } | undefined; }, ) { super(); this.config = { withList, table, fields: { ...fields }, distinct, setOperators: [], }; this.isPartialSelect = isPartialSelect; this.session = session; this.dialect = dialect; this._ = { selectedFields: fields as TSelectedFields, config: this.config, } as this['_']; this.tableName = getTableLikeName(table); this.joinsNotNullableMap = typeof this.tableName === 'string' ? { [this.tableName]: true } : {}; for (const item of extractUsedTable(table)) this.usedTables.add(item); } /** @internal */ getUsedTables() { return [...this.usedTables]; } private createJoin< TJoinType extends JoinType, TIsLateral extends (TJoinType extends 'full' | 'right' ? false : boolean), >( joinType: TJoinType, lateral: TIsLateral, ): 'cross' extends TJoinType ? GelSelectCrossJoinFn : GelSelectJoinFn { return (( table: GelTable | Subquery | GelViewBase | SQL, on?: ((aliases: TSelection) => SQL | undefined) | SQL | undefined, ) => { const baseTableName = this.tableName; const tableName = getTableLikeName(table); if (typeof tableName === 'string' && this.config.joins?.some((join) => join.alias === tableName)) { throw new Error(`Alias "${tableName}" is already used in this query`); } // store all tables used in a query for (const item of extractUsedTable(table)) this.usedTables.add(item); if (!this.isPartialSelect) { // If this is the first join and this is not a partial select and we're not selecting from raw SQL, "move" the fields from the main table to the nested object if (Object.keys(this.joinsNotNullableMap).length === 1 && typeof baseTableName === 'string') { this.config.fields = { [baseTableName]: this.config.fields, }; } if (typeof tableName === 'string' && !is(table, SQL)) { const selection = is(table, Subquery) ? table._.selectedFields : is(table, View) ? table[ViewBaseConfig].selectedFields : table[Table.Symbol.Columns]; this.config.fields[tableName] = selection; } } if (typeof on === 'function') { on = on( new Proxy( this.config.fields, new SelectionProxyHandler({ sqlAliasedBehavior: 'sql', sqlBehavior: 'sql' }), ) as TSelection, ); } if (!this.config.joins) { this.config.joins = []; } this.config.joins.push({ on, table, joinType, alias: tableName, lateral }); if (typeof tableName === 'string') { switch (joinType) { case 'left': { this.joinsNotNullableMap[tableName] = false; break; } case 'right': { this.joinsNotNullableMap = Object.fromEntries( Object.entries(this.joinsNotNullableMap).map(([key]) => [key, false]), ); this.joinsNotNullableMap[tableName] = true; break; } case 'cross': case 'inner': { this.joinsNotNullableMap[tableName] = true; break; } case 'full': { this.joinsNotNullableMap = Object.fromEntries( Object.entries(this.joinsNotNullableMap).map(([key]) => [key, false]), ); this.joinsNotNullableMap[tableName] = false; break; } } } return this as any; }) as any; } /** * Executes a `left join` operation by adding another table to the current query. * * Calling this method associates each row of the table with the corresponding row from the joined table, if a match is found. If no matching row exists, it sets all columns of the joined table to null. * * See docs: {@link https://orm.drizzle.team/docs/joins#left-join} * * @param table the table to join. * @param on the `on` clause. * * @example * * ```ts * // Select all users and their pets * const usersWithPets: { user: User; pets: Pet | null; }[] = await db.select() * .from(users) * .leftJoin(pets, eq(users.id, pets.ownerId)) * * // Select userId and petId * const usersIdsAndPetIds: { userId: number; petId: number | null; }[] = await db.select({ * userId: users.id, * petId: pets.id, * }) * .from(users) * .leftJoin(pets, eq(users.id, pets.ownerId)) * ``` */ leftJoin = this.createJoin('left', false); /** * Executes a `left join lateral` operation by adding subquery to the current query. * * A `lateral` join allows the right-hand expression to refer to columns from the left-hand side. * * Calling this method associates each row of the table with the corresponding row from the joined table, if a match is found. If no matching row exists, it sets all columns of the joined table to null. * * See docs: {@link https://orm.drizzle.team/docs/joins#left-join-lateral} * * @param table the subquery to join. * @param on the `on` clause. */ leftJoinLateral = this.createJoin('left', true); /** * Executes a `right join` operation by adding another table to the current query. * * Calling this method associates each row of the joined table with the corresponding row from the main table, if a match is found. If no matching row exists, it sets all columns of the main table to null. * * See docs: {@link https://orm.drizzle.team/docs/joins#right-join} * * @param table the table to join. * @param on the `on` clause. * * @example * * ```ts * // Select all users and their pets * const usersWithPets: { user: User | null; pets: Pet; }[] = await db.select() * .from(users) * .rightJoin(pets, eq(users.id, pets.ownerId)) * * // Select userId and petId * const usersIdsAndPetIds: { userId: number | null; petId: number; }[] = await db.select({ * userId: users.id, * petId: pets.id, * }) * .from(users) * .rightJoin(pets, eq(users.id, pets.ownerId)) * ``` */ rightJoin = this.createJoin('right', false); /** * Executes an `inner join` operation, creating a new table by combining rows from two tables that have matching values. * * Calling this method retrieves rows that have corresponding entries in both joined tables. Rows without matching entries in either table are excluded, resulting in a table that includes only matching pairs. * * See docs: {@link https://orm.drizzle.team/docs/joins#inner-join} * * @param table the table to join. * @param on the `on` clause. * * @example * * ```ts * // Select all users and their pets * const usersWithPets: { user: User; pets: Pet; }[] = await db.select() * .from(users) * .innerJoin(pets, eq(users.id, pets.ownerId)) * * // Select userId and petId * const usersIdsAndPetIds: { userId: number; petId: number; }[] = await db.select({ * userId: users.id, * petId: pets.id, * }) * .from(users) * .innerJoin(pets, eq(users.id, pets.ownerId)) * ``` */ innerJoin = this.createJoin('inner', false); /** * Executes an `inner join lateral` operation, creating a new table by combining rows from two queries that have matching values. * * A `lateral` join allows the right-hand expression to refer to columns from the left-hand side. * * Calling this method retrieves rows that have corresponding entries in both joined tables. Rows without matching entries in either table are excluded, resulting in a table that includes only matching pairs. * * See docs: {@link https://orm.drizzle.team/docs/joins#inner-join-lateral} * * @param table the subquery to join. * @param on the `on` clause. */ innerJoinLateral = this.createJoin('inner', true); /** * Executes a `full join` operation by combining rows from two tables into a new table. * * Calling this method retrieves all rows from both main and joined tables, merging rows with matching values and filling in `null` for non-matching columns. * * See docs: {@link https://orm.drizzle.team/docs/joins#full-join} * * @param table the table to join. * @param on the `on` clause. * * @example * * ```ts * // Select all users and their pets * const usersWithPets: { user: User | null; pets: Pet | null; }[] = await db.select() * .from(users) * .fullJoin(pets, eq(users.id, pets.ownerId)) * * // Select userId and petId * const usersIdsAndPetIds: { userId: number | null; petId: number | null; }[] = await db.select({ * userId: users.id, * petId: pets.id, * }) * .from(users) * .fullJoin(pets, eq(users.id, pets.ownerId)) * ``` */ fullJoin = this.createJoin('full', false); /** * Executes a `cross join` operation by combining rows from two tables into a new table. * * Calling this method retrieves all rows from both main and joined tables, merging all rows from each table. * * See docs: {@link https://orm.drizzle.team/docs/joins#cross-join} * * @param table the table to join. * * @example * * ```ts * // Select all users, each user with every pet * const usersWithPets: { user: User; pets: Pet; }[] = await db.select() * .from(users) * .crossJoin(pets) * * // Select userId and petId * const usersIdsAndPetIds: { userId: number; petId: number; }[] = await db.select({ * userId: users.id, * petId: pets.id, * }) * .from(users) * .crossJoin(pets) * ``` */ crossJoin = this.createJoin('cross', false); /** * Executes a `cross join lateral` operation by combining rows from two queries into a new table. * * A `lateral` join allows the right-hand expression to refer to columns from the left-hand side. * * Calling this method retrieves all rows from both main and joined queries, merging all rows from each query. * * See docs: {@link https://orm.drizzle.team/docs/joins#cross-join-lateral} * * @param table the query to join. */ crossJoinLateral = this.createJoin('cross', true); private createSetOperator( type: SetOperator, isAll: boolean, ): >( rightSelection: | ((setOperators: GetGelSetOperators) => SetOperatorRightSelect) | SetOperatorRightSelect, ) => GelSelectWithout< this, TDynamic, GelSetOperatorExcludedMethods, true > { return (rightSelection) => { const rightSelect = (typeof rightSelection === 'function' ? rightSelection(getGelSetOperators()) : rightSelection) as TypedQueryBuilder< any, TResult >; if (!haveSameKeys(this.getSelectedFields(), rightSelect.getSelectedFields())) { throw new Error( 'Set operator error (union / intersect / except): selected fields are not the same or are in a different order', ); } this.config.setOperators.push({ type, isAll, rightSelect }); return this as any; }; } /** * Adds `union` set operator to the query. * * Calling this method will combine the result sets of the `select` statements and remove any duplicate rows that appear across them. * * See docs: {@link https://orm.drizzle.team/docs/set-operations#union} * * @example * * ```ts * // Select all unique names from customers and users tables * await db.select({ name: users.name }) * .from(users) * .union( * db.select({ name: customers.name }).from(customers) * ); * // or * import { union } from 'drizzle-orm/gel-core' * * await union( * db.select({ name: users.name }).from(users), * db.select({ name: customers.name }).from(customers) * ); * ``` */ union = this.createSetOperator('union', false); /** * Adds `union all` set operator to the query. * * Calling this method will combine the result-set of the `select` statements and keep all duplicate rows that appear across them. * * See docs: {@link https://orm.drizzle.team/docs/set-operations#union-all} * * @example * * ```ts * // Select all transaction ids from both online and in-store sales * await db.select({ transaction: onlineSales.transactionId }) * .from(onlineSales) * .unionAll( * db.select({ transaction: inStoreSales.transactionId }).from(inStoreSales) * ); * // or * import { unionAll } from 'drizzle-orm/gel-core' * * await unionAll( * db.select({ transaction: onlineSales.transactionId }).from(onlineSales), * db.select({ transaction: inStoreSales.transactionId }).from(inStoreSales) * ); * ``` */ unionAll = this.createSetOperator('union', true); /** * Adds `intersect` set operator to the query. * * Calling this method will retain only the rows that are present in both result sets and eliminate duplicates. * * See docs: {@link https://orm.drizzle.team/docs/set-operations#intersect} * * @example * * ```ts * // Select course names that are offered in both departments A and B * await db.select({ courseName: depA.courseName }) * .from(depA) * .intersect( * db.select({ courseName: depB.courseName }).from(depB) * ); * // or * import { intersect } from 'drizzle-orm/gel-core' * * await intersect( * db.select({ courseName: depA.courseName }).from(depA), * db.select({ courseName: depB.courseName }).from(depB) * ); * ``` */ intersect = this.createSetOperator('intersect', false); /** * Adds `intersect all` set operator to the query. * * Calling this method will retain only the rows that are present in both result sets including all duplicates. * * See docs: {@link https://orm.drizzle.team/docs/set-operations#intersect-all} * * @example * * ```ts * // Select all products and quantities that are ordered by both regular and VIP customers * await db.select({ * productId: regularCustomerOrders.productId, * quantityOrdered: regularCustomerOrders.quantityOrdered * }) * .from(regularCustomerOrders) * .intersectAll( * db.select({ * productId: vipCustomerOrders.productId, * quantityOrdered: vipCustomerOrders.quantityOrdered * }) * .from(vipCustomerOrders) * ); * // or * import { intersectAll } from 'drizzle-orm/gel-core' * * await intersectAll( * db.select({ * productId: regularCustomerOrders.productId, * quantityOrdered: regularCustomerOrders.quantityOrdered * }) * .from(regularCustomerOrders), * db.select({ * productId: vipCustomerOrders.productId, * quantityOrdered: vipCustomerOrders.quantityOrdered * }) * .from(vipCustomerOrders) * ); * ``` */ intersectAll = this.createSetOperator('intersect', true); /** * Adds `except` set operator to the query. * * Calling this method will retrieve all unique rows from the left query, except for the rows that are present in the result set of the right query. * * See docs: {@link https://orm.drizzle.team/docs/set-operations#except} * * @example * * ```ts * // Select all courses offered in department A but not in department B * await db.select({ courseName: depA.courseName }) * .from(depA) * .except( * db.select({ courseName: depB.courseName }).from(depB) * ); * // or * import { except } from 'drizzle-orm/gel-core' * * await except( * db.select({ courseName: depA.courseName }).from(depA), * db.select({ courseName: depB.courseName }).from(depB) * ); * ``` */ except = this.createSetOperator('except', false); /** * Adds `except all` set operator to the query. * * Calling this method will retrieve all rows from the left query, except for the rows that are present in the result set of the right query. * * See docs: {@link https://orm.drizzle.team/docs/set-operations#except-all} * * @example * * ```ts * // Select all products that are ordered by regular customers but not by VIP customers * await db.select({ * productId: regularCustomerOrders.productId, * quantityOrdered: regularCustomerOrders.quantityOrdered, * }) * .from(regularCustomerOrders) * .exceptAll( * db.select({ * productId: vipCustomerOrders.productId, * quantityOrdered: vipCustomerOrders.quantityOrdered, * }) * .from(vipCustomerOrders) * ); * // or * import { exceptAll } from 'drizzle-orm/gel-core' * * await exceptAll( * db.select({ * productId: regularCustomerOrders.productId, * quantityOrdered: regularCustomerOrders.quantityOrdered * }) * .from(regularCustomerOrders), * db.select({ * productId: vipCustomerOrders.productId, * quantityOrdered: vipCustomerOrders.quantityOrdered * }) * .from(vipCustomerOrders) * ); * ``` */ exceptAll = this.createSetOperator('except', true); /** @internal */ addSetOperators(setOperators: GelSelectConfig['setOperators']): GelSelectWithout< this, TDynamic, GelSetOperatorExcludedMethods, true > { this.config.setOperators.push(...setOperators); return this as any; } /** * Adds a `where` clause to the query. * * Calling this method will select only those rows that fulfill a specified condition. * * See docs: {@link https://orm.drizzle.team/docs/select#filtering} * * @param where the `where` clause. * * @example * You can use conditional operators and `sql function` to filter the rows to be selected. * * ```ts * // Select all cars with green color * await db.select().from(cars).where(eq(cars.color, 'green')); * // or * await db.select().from(cars).where(sql`${cars.color} = 'green'`) * ``` * * You can logically combine conditional operators with `and()` and `or()` operators: * * ```ts * // Select all BMW cars with a green color * await db.select().from(cars).where(and(eq(cars.color, 'green'), eq(cars.brand, 'BMW'))); * * // Select all cars with the green or blue color * await db.select().from(cars).where(or(eq(cars.color, 'green'), eq(cars.color, 'blue'))); * ``` */ where( where: ((aliases: this['_']['selection']) => SQL | undefined) | SQL | undefined, ): GelSelectWithout { if (typeof where === 'function') { where = where( new Proxy( this.config.fields, new SelectionProxyHandler({ sqlAliasedBehavior: 'sql', sqlBehavior: 'sql' }), ) as TSelection, ); } this.config.where = where; return this as any; } /** * Adds a `having` clause to the query. * * Calling this method will select only those rows that fulfill a specified condition. It is typically used with aggregate functions to filter the aggregated data based on a specified condition. * * See docs: {@link https://orm.drizzle.team/docs/select#aggregations} * * @param having the `having` clause. * * @example * * ```ts * // Select all brands with more than one car * await db.select({ * brand: cars.brand, * count: sql`cast(count(${cars.id}) as int)`, * }) * .from(cars) * .groupBy(cars.brand) * .having(({ count }) => gt(count, 1)); * ``` */ having( having: ((aliases: this['_']['selection']) => SQL | undefined) | SQL | undefined, ): GelSelectWithout { if (typeof having === 'function') { having = having( new Proxy( this.config.fields, new SelectionProxyHandler({ sqlAliasedBehavior: 'sql', sqlBehavior: 'sql' }), ) as TSelection, ); } this.config.having = having; return this as any; } /** * Adds a `group by` clause to the query. * * Calling this method will group rows that have the same values into summary rows, often used for aggregation purposes. * * See docs: {@link https://orm.drizzle.team/docs/select#aggregations} * * @example * * ```ts * // Group and count people by their last names * await db.select({ * lastName: people.lastName, * count: sql`cast(count(*) as int)` * }) * .from(people) * .groupBy(people.lastName); * ``` */ groupBy( builder: (aliases: this['_']['selection']) => ValueOrArray, ): GelSelectWithout; groupBy(...columns: (GelColumn | SQL | SQL.Aliased)[]): GelSelectWithout; groupBy( ...columns: | [(aliases: this['_']['selection']) => ValueOrArray] | (GelColumn | SQL | SQL.Aliased)[] ): GelSelectWithout { if (typeof columns[0] === 'function') { const groupBy = columns[0]( new Proxy( this.config.fields, new SelectionProxyHandler({ sqlAliasedBehavior: 'alias', sqlBehavior: 'sql' }), ) as TSelection, ); this.config.groupBy = Array.isArray(groupBy) ? groupBy : [groupBy]; } else { this.config.groupBy = columns as (GelColumn | SQL | SQL.Aliased)[]; } return this as any; } /** * Adds an `order by` clause to the query. * * Calling this method will sort the result-set in ascending or descending order. By default, the sort order is ascending. * * See docs: {@link https://orm.drizzle.team/docs/select#order-by} * * @example * * ``` * // Select cars ordered by year * await db.select().from(cars).orderBy(cars.year); * ``` * * You can specify whether results are in ascending or descending order with the `asc()` and `desc()` operators. * * ```ts * // Select cars ordered by year in descending order * await db.select().from(cars).orderBy(desc(cars.year)); * * // Select cars ordered by year and price * await db.select().from(cars).orderBy(asc(cars.year), desc(cars.price)); * ``` */ orderBy( builder: (aliases: this['_']['selection']) => ValueOrArray, ): GelSelectWithout; orderBy(...columns: (GelColumn | SQL | SQL.Aliased)[]): GelSelectWithout; orderBy( ...columns: | [(aliases: this['_']['selection']) => ValueOrArray] | (GelColumn | SQL | SQL.Aliased)[] ): GelSelectWithout { if (typeof columns[0] === 'function') { const orderBy = columns[0]( new Proxy( this.config.fields, new SelectionProxyHandler({ sqlAliasedBehavior: 'alias', sqlBehavior: 'sql' }), ) as TSelection, ); const orderByArray = Array.isArray(orderBy) ? orderBy : [orderBy]; if (this.config.setOperators.length > 0) { this.config.setOperators.at(-1)!.orderBy = orderByArray; } else { this.config.orderBy = orderByArray; } } else { const orderByArray = columns as (GelColumn | SQL | SQL.Aliased)[]; if (this.config.setOperators.length > 0) { this.config.setOperators.at(-1)!.orderBy = orderByArray; } else { this.config.orderBy = orderByArray; } } return this as any; } /** * Adds a `limit` clause to the query. * * Calling this method will set the maximum number of rows that will be returned by this query. * * See docs: {@link https://orm.drizzle.team/docs/select#limit--offset} * * @param limit the `limit` clause. * * @example * * ```ts * // Get the first 10 people from this query. * await db.select().from(people).limit(10); * ``` */ limit(limit: number | Placeholder): GelSelectWithout { if (this.config.setOperators.length > 0) { this.config.setOperators.at(-1)!.limit = limit; } else { this.config.limit = limit; } return this as any; } /** * Adds an `offset` clause to the query. * * Calling this method will skip a number of rows when returning results from this query. * * See docs: {@link https://orm.drizzle.team/docs/select#limit--offset} * * @param offset the `offset` clause. * * @example * * ```ts * // Get the 10th-20th people from this query. * await db.select().from(people).offset(10).limit(10); * ``` */ offset(offset: number | Placeholder): GelSelectWithout { if (this.config.setOperators.length > 0) { this.config.setOperators.at(-1)!.offset = offset; } else { this.config.offset = offset; } return this as any; } /** * Adds a `for` clause to the query. * * Calling this method will specify a lock strength for this query that controls how strictly it acquires exclusive access to the rows being queried. * * See docs: {@link https://www.postgresql.org/docs/current/sql-select.html#SQL-FOR-UPDATE-SHARE} * * @param strength the lock strength. * @param config the lock configuration. */ for(strength: LockStrength, config: LockConfig = {}): GelSelectWithout { this.config.lockingClause = { strength, config }; return this as any; } /** @internal */ getSQL(): SQL { return this.dialect.buildSelectQuery(this.config); } toSQL(): Query { const { typings: _typings, ...rest } = this.dialect.sqlToQuery(this.getSQL()); return rest; } as( alias: TAlias, ): SubqueryWithSelection { const usedTables: string[] = []; usedTables.push(...extractUsedTable(this.config.table)); if (this.config.joins) { for (const it of this.config.joins) usedTables.push(...extractUsedTable(it.table)); } return new Proxy( new Subquery(this.getSQL(), this.config.fields, alias, false, [...new Set(usedTables)]), new SelectionProxyHandler({ alias, sqlAliasedBehavior: 'alias', sqlBehavior: 'error' }), ) as SubqueryWithSelection; } /** @internal */ override getSelectedFields(): this['_']['selectedFields'] { return new Proxy( this.config.fields, new SelectionProxyHandler({ alias: this.tableName, sqlAliasedBehavior: 'alias', sqlBehavior: 'error' }), ) as this['_']['selectedFields']; } $dynamic(): GelSelectDynamic { return this; } } export interface GelSelectBase< TTableName extends string | undefined, TSelection extends ColumnsSelection, TSelectMode extends SelectMode, TNullabilityMap extends Record = TTableName extends string ? Record : {}, TDynamic extends boolean = false, TExcludedMethods extends string = never, TResult extends any[] = SelectResult[], TSelectedFields extends ColumnsSelection = BuildSubquerySelection, > extends GelSelectQueryBuilderBase< GelSelectHKT, TTableName, TSelection, TSelectMode, TNullabilityMap, TDynamic, TExcludedMethods, TResult, TSelectedFields >, QueryPromise, SQLWrapper {} export class GelSelectBase< TTableName extends string | undefined, TSelection extends ColumnsSelection, TSelectMode extends SelectMode, TNullabilityMap extends Record = TTableName extends string ? Record : {}, TDynamic extends boolean = false, TExcludedMethods extends string = never, TResult = SelectResult[], TSelectedFields = BuildSubquerySelection, > extends GelSelectQueryBuilderBase< GelSelectHKT, TTableName, TSelection, TSelectMode, TNullabilityMap, TDynamic, TExcludedMethods, TResult, TSelectedFields > implements RunnableQuery, SQLWrapper { static override readonly [entityKind]: string = 'GelSelect'; /** @internal */ _prepare(name?: string): GelSelectPrepare { const { session, config, dialect, joinsNotNullableMap, cacheConfig, usedTables } = this; if (!session) { throw new Error('Cannot execute a query on a query builder. Please use a database instance instead.'); } return tracer.startActiveSpan('drizzle.prepareQuery', () => { const fieldsList = orderSelectedFields(config.fields); const query = session.prepareQuery< PreparedQueryConfig & { execute: TResult } >(dialect.sqlToQuery(this.getSQL()), fieldsList, name, true, undefined, { type: 'select', tables: [...usedTables], }, cacheConfig); query.joinsNotNullableMap = joinsNotNullableMap; return query; }); } $withCache(config?: { config?: CacheConfig; tag?: string; autoInvalidate?: boolean } | false) { this.cacheConfig = config === undefined ? { config: {}, enable: true, autoInvalidate: true } : config === false ? { enable: false } : { enable: true, autoInvalidate: true, ...config }; return this; } /** * Create a prepared statement for this query. This allows * the database to remember this query for the given session * and call it by name, rather than specifying the full query. * * {@link https://www.postgresql.org/docs/current/sql-prepare.html | Postgres prepare documentation} */ prepare(name: string): GelSelectPrepare { return this._prepare(name); } execute: ReturnType['execute'] = (placeholderValues) => { return tracer.startActiveSpan('drizzle.operation', () => { return this._prepare().execute(placeholderValues); }); }; } applyMixins(GelSelectBase, [QueryPromise]); function createSetOperator(type: SetOperator, isAll: boolean): GelCreateSetOperatorFn { return (leftSelect, rightSelect, ...restSelects) => { const setOperators = [rightSelect, ...restSelects].map((select) => ({ type, isAll, rightSelect: select as AnyGelSelect, })); for (const setOperator of setOperators) { if (!haveSameKeys((leftSelect as any).getSelectedFields(), setOperator.rightSelect.getSelectedFields())) { throw new Error( 'Set operator error (union / intersect / except): selected fields are not the same or are in a different order', ); } } return (leftSelect as AnyGelSelect).addSetOperators(setOperators) as any; }; } const getGelSetOperators = () => ({ union, unionAll, intersect, intersectAll, except, exceptAll, }); /** * Adds `union` set operator to the query. * * Calling this method will combine the result sets of the `select` statements and remove any duplicate rows that appear across them. * * See docs: {@link https://orm.drizzle.team/docs/set-operations#union} * * @example * * ```ts * // Select all unique names from customers and users tables * import { union } from 'drizzle-orm/Gel-core' * * await union( * db.select({ name: users.name }).from(users), * db.select({ name: customers.name }).from(customers) * ); * // or * await db.select({ name: users.name }) * .from(users) * .union( * db.select({ name: customers.name }).from(customers) * ); * ``` */ export const union = createSetOperator('union', false); /** * Adds `union all` set operator to the query. * * Calling this method will combine the result-set of the `select` statements and keep all duplicate rows that appear across them. * * See docs: {@link https://orm.drizzle.team/docs/set-operations#union-all} * * @example * * ```ts * // Select all transaction ids from both online and in-store sales * import { unionAll } from 'drizzle-orm/Gel-core' * * await unionAll( * db.select({ transaction: onlineSales.transactionId }).from(onlineSales), * db.select({ transaction: inStoreSales.transactionId }).from(inStoreSales) * ); * // or * await db.select({ transaction: onlineSales.transactionId }) * .from(onlineSales) * .unionAll( * db.select({ transaction: inStoreSales.transactionId }).from(inStoreSales) * ); * ``` */ export const unionAll = createSetOperator('union', true); /** * Adds `intersect` set operator to the query. * * Calling this method will retain only the rows that are present in both result sets and eliminate duplicates. * * See docs: {@link https://orm.drizzle.team/docs/set-operations#intersect} * * @example * * ```ts * // Select course names that are offered in both departments A and B * import { intersect } from 'drizzle-orm/Gel-core' * * await intersect( * db.select({ courseName: depA.courseName }).from(depA), * db.select({ courseName: depB.courseName }).from(depB) * ); * // or * await db.select({ courseName: depA.courseName }) * .from(depA) * .intersect( * db.select({ courseName: depB.courseName }).from(depB) * ); * ``` */ export const intersect = createSetOperator('intersect', false); /** * Adds `intersect all` set operator to the query. * * Calling this method will retain only the rows that are present in both result sets including all duplicates. * * See docs: {@link https://orm.drizzle.team/docs/set-operations#intersect-all} * * @example * * ```ts * // Select all products and quantities that are ordered by both regular and VIP customers * import { intersectAll } from 'drizzle-orm/Gel-core' * * await intersectAll( * db.select({ * productId: regularCustomerOrders.productId, * quantityOrdered: regularCustomerOrders.quantityOrdered * }) * .from(regularCustomerOrders), * db.select({ * productId: vipCustomerOrders.productId, * quantityOrdered: vipCustomerOrders.quantityOrdered * }) * .from(vipCustomerOrders) * ); * // or * await db.select({ * productId: regularCustomerOrders.productId, * quantityOrdered: regularCustomerOrders.quantityOrdered * }) * .from(regularCustomerOrders) * .intersectAll( * db.select({ * productId: vipCustomerOrders.productId, * quantityOrdered: vipCustomerOrders.quantityOrdered * }) * .from(vipCustomerOrders) * ); * ``` */ export const intersectAll = createSetOperator('intersect', true); /** * Adds `except` set operator to the query. * * Calling this method will retrieve all unique rows from the left query, except for the rows that are present in the result set of the right query. * * See docs: {@link https://orm.drizzle.team/docs/set-operations#except} * * @example * * ```ts * // Select all courses offered in department A but not in department B * import { except } from 'drizzle-orm/Gel-core' * * await except( * db.select({ courseName: depA.courseName }).from(depA), * db.select({ courseName: depB.courseName }).from(depB) * ); * // or * await db.select({ courseName: depA.courseName }) * .from(depA) * .except( * db.select({ courseName: depB.courseName }).from(depB) * ); * ``` */ export const except = createSetOperator('except', false); /** * Adds `except all` set operator to the query. * * Calling this method will retrieve all rows from the left query, except for the rows that are present in the result set of the right query. * * See docs: {@link https://orm.drizzle.team/docs/set-operations#except-all} * * @example * * ```ts * // Select all products that are ordered by regular customers but not by VIP customers * import { exceptAll } from 'drizzle-orm/Gel-core' * * await exceptAll( * db.select({ * productId: regularCustomerOrders.productId, * quantityOrdered: regularCustomerOrders.quantityOrdered * }) * .from(regularCustomerOrders), * db.select({ * productId: vipCustomerOrders.productId, * quantityOrdered: vipCustomerOrders.quantityOrdered * }) * .from(vipCustomerOrders) * ); * // or * await db.select({ * productId: regularCustomerOrders.productId, * quantityOrdered: regularCustomerOrders.quantityOrdered, * }) * .from(regularCustomerOrders) * .exceptAll( * db.select({ * productId: vipCustomerOrders.productId, * quantityOrdered: vipCustomerOrders.quantityOrdered, * }) * .from(vipCustomerOrders) * ); * ``` */ export const exceptAll = createSetOperator('except', true); ================================================ FILE: drizzle-orm/src/gel-core/query-builders/select.types.ts ================================================ import type { GelColumn } from '~/gel-core/columns/index.ts'; import type { GelTable, GelTableWithColumns } from '~/gel-core/table.ts'; import type { GelViewBase } from '~/gel-core/view-base.ts'; import type { GelViewWithSelection } from '~/gel-core/view.ts'; import type { SelectedFields as SelectedFieldsBase, SelectedFieldsFlat as SelectedFieldsFlatBase, SelectedFieldsOrdered as SelectedFieldsOrderedBase, } from '~/operations.ts'; import type { TypedQueryBuilder } from '~/query-builders/query-builder.ts'; import type { AppendToNullabilityMap, AppendToResult, BuildSubquerySelection, GetSelectTableName, JoinNullability, JoinType, MapColumnsToTableAlias, SelectMode, SelectResult, SetOperator, } from '~/query-builders/select.types.ts'; import type { ColumnsSelection, Placeholder, SQL, SQLWrapper, View } from '~/sql/sql.ts'; import type { Subquery } from '~/subquery.ts'; import type { Table, UpdateTableConfig } from '~/table.ts'; import type { Assume, ValidateShape, ValueOrArray } from '~/utils.ts'; import type { GelPreparedQuery, PreparedQueryConfig } from '../session.ts'; import type { GelSelectBase, GelSelectQueryBuilderBase } from './select.ts'; export interface GelSelectJoinConfig { on: SQL | undefined; table: GelTable | Subquery | GelViewBase | SQL; alias: string | undefined; joinType: JoinType; lateral?: boolean; } export type BuildAliasTable = TTable extends Table ? GelTableWithColumns< UpdateTableConfig; }> > : TTable extends View ? GelViewWithSelection< TAlias, TTable['_']['existing'], MapColumnsToTableAlias > : never; export interface GelSelectConfig { withList?: Subquery[]; // Either fields or fieldsFlat must be defined fields: Record; fieldsFlat?: SelectedFieldsOrdered; where?: SQL; having?: SQL; table: GelTable | Subquery | GelViewBase | SQL; limit?: number | Placeholder; offset?: number | Placeholder; joins?: GelSelectJoinConfig[]; orderBy?: (GelColumn | SQL | SQL.Aliased)[]; groupBy?: (GelColumn | SQL | SQL.Aliased)[]; lockingClause?: { strength: LockStrength; config: LockConfig; }; distinct?: boolean | { on: (GelColumn | SQLWrapper)[]; }; setOperators: { rightSelect: TypedQueryBuilder; type: SetOperator; isAll: boolean; orderBy?: (GelColumn | SQL | SQL.Aliased)[]; limit?: number | Placeholder; offset?: number | Placeholder; }[]; } export type GelSelectJoin< T extends AnyGelSelectQueryBuilder, TDynamic extends boolean, TJoinType extends JoinType, TJoinedTable extends GelTable | Subquery | GelViewBase | SQL, TJoinedName extends GetSelectTableName = GetSelectTableName, > = T extends any ? GelSelectWithout< GelSelectKind< T['_']['hkt'], T['_']['tableName'], AppendToResult< T['_']['tableName'], T['_']['selection'], TJoinedName, TJoinedTable extends Table ? TJoinedTable['_']['columns'] : TJoinedTable extends Subquery ? Assume : never, T['_']['selectMode'] >, T['_']['selectMode'] extends 'partial' ? T['_']['selectMode'] : 'multiple', AppendToNullabilityMap, T['_']['dynamic'], T['_']['excludedMethods'] >, TDynamic, T['_']['excludedMethods'] > : never; export type GelSelectJoinFn< T extends AnyGelSelectQueryBuilder, TDynamic extends boolean, TJoinType extends JoinType, TIsLateral extends boolean, > = < TJoinedTable extends (TIsLateral extends true ? Subquery | SQL : GelTable | Subquery | GelViewBase | SQL), TJoinedName extends GetSelectTableName = GetSelectTableName, >( table: TJoinedTable, on: ((aliases: T['_']['selection']) => SQL | undefined) | SQL | undefined, ) => GelSelectJoin; export type GelSelectCrossJoinFn< T extends AnyGelSelectQueryBuilder, TDynamic extends boolean, TIsLateral extends boolean, > = < TJoinedTable extends (TIsLateral extends true ? Subquery | SQL : GelTable | Subquery | GelViewBase | SQL), TJoinedName extends GetSelectTableName = GetSelectTableName, >(table: TJoinedTable) => GelSelectJoin; export type SelectedFieldsFlat = SelectedFieldsFlatBase; export type SelectedFields = SelectedFieldsBase; export type SelectedFieldsOrdered = SelectedFieldsOrderedBase; export type LockStrength = 'update' | 'no key update' | 'share' | 'key share'; export type LockConfig = & { of?: ValueOrArray; } & ({ noWait: true; skipLocked?: undefined; } | { noWait?: undefined; skipLocked: true; } | { noWait?: undefined; skipLocked?: undefined; }); export interface GelSelectHKTBase { tableName: string | undefined; selection: unknown; selectMode: SelectMode; nullabilityMap: unknown; dynamic: boolean; excludedMethods: string; result: unknown; selectedFields: unknown; _type: unknown; } export type GelSelectKind< T extends GelSelectHKTBase, TTableName extends string | undefined, TSelection extends ColumnsSelection, TSelectMode extends SelectMode, TNullabilityMap extends Record, TDynamic extends boolean, TExcludedMethods extends string, TResult = SelectResult[], TSelectedFields = BuildSubquerySelection, > = (T & { tableName: TTableName; selection: TSelection; selectMode: TSelectMode; nullabilityMap: TNullabilityMap; dynamic: TDynamic; excludedMethods: TExcludedMethods; result: TResult; selectedFields: TSelectedFields; })['_type']; export interface GelSelectQueryBuilderHKT extends GelSelectHKTBase { _type: GelSelectQueryBuilderBase< GelSelectQueryBuilderHKT, this['tableName'], Assume, this['selectMode'], Assume>, this['dynamic'], this['excludedMethods'], Assume, Assume >; } export interface GelSelectHKT extends GelSelectHKTBase { _type: GelSelectBase< this['tableName'], Assume, this['selectMode'], Assume>, this['dynamic'], this['excludedMethods'], Assume, Assume >; } export type CreateGelSelectFromBuilderMode< TBuilderMode extends 'db' | 'qb', TTableName extends string | undefined, TSelection extends ColumnsSelection, TSelectMode extends SelectMode, > = TBuilderMode extends 'db' ? GelSelectBase : GelSelectQueryBuilderBase; export type GelSetOperatorExcludedMethods = | 'leftJoin' | 'rightJoin' | 'innerJoin' | 'fullJoin' | 'where' | 'having' | 'groupBy' | 'for'; export type GelSelectWithout< T extends AnyGelSelectQueryBuilder, TDynamic extends boolean, K extends keyof T & string, TResetExcluded extends boolean = false, > = TDynamic extends true ? T : Omit< GelSelectKind< T['_']['hkt'], T['_']['tableName'], T['_']['selection'], T['_']['selectMode'], T['_']['nullabilityMap'], TDynamic, TResetExcluded extends true ? K : T['_']['excludedMethods'] | K, T['_']['result'], T['_']['selectedFields'] >, TResetExcluded extends true ? K : T['_']['excludedMethods'] | K >; export type GelSelectPrepare = GelPreparedQuery< PreparedQueryConfig & { execute: T['_']['result']; } >; export type GelSelectDynamic = GelSelectKind< T['_']['hkt'], T['_']['tableName'], T['_']['selection'], T['_']['selectMode'], T['_']['nullabilityMap'], true, never, T['_']['result'], T['_']['selectedFields'] >; export type GelSelectQueryBuilder< THKT extends GelSelectHKTBase = GelSelectQueryBuilderHKT, TTableName extends string | undefined = string | undefined, TSelection extends ColumnsSelection = ColumnsSelection, TSelectMode extends SelectMode = SelectMode, TNullabilityMap extends Record = Record, TResult extends any[] = unknown[], TSelectedFields extends ColumnsSelection = ColumnsSelection, > = GelSelectQueryBuilderBase< THKT, TTableName, TSelection, TSelectMode, TNullabilityMap, true, never, TResult, TSelectedFields >; export type AnyGelSelectQueryBuilder = GelSelectQueryBuilderBase; export type AnyGelSetOperatorInterface = GelSetOperatorInterface; export interface GelSetOperatorInterface< TTableName extends string | undefined, TSelection extends ColumnsSelection, TSelectMode extends SelectMode, TNullabilityMap extends Record = TTableName extends string ? Record : {}, TDynamic extends boolean = false, TExcludedMethods extends string = never, TResult extends any[] = SelectResult[], TSelectedFields extends ColumnsSelection = BuildSubquerySelection, > { _: { readonly hkt: GelSelectHKT; readonly tableName: TTableName; readonly selection: TSelection; readonly selectMode: TSelectMode; readonly nullabilityMap: TNullabilityMap; readonly dynamic: TDynamic; readonly excludedMethods: TExcludedMethods; readonly result: TResult; readonly selectedFields: TSelectedFields; }; } export type GelSetOperatorWithResult = GelSetOperatorInterface< any, any, any, any, any, any, TResult, any >; export type GelSelect< TTableName extends string | undefined = string | undefined, TSelection extends ColumnsSelection = Record, TSelectMode extends SelectMode = SelectMode, TNullabilityMap extends Record = Record, > = GelSelectBase; export type AnyGelSelect = GelSelectBase; export type GelSetOperator< TTableName extends string | undefined = string | undefined, TSelection extends ColumnsSelection = Record, TSelectMode extends SelectMode = SelectMode, TNullabilityMap extends Record = Record, > = GelSelectBase< TTableName, TSelection, TSelectMode, TNullabilityMap, true, GelSetOperatorExcludedMethods >; export type SetOperatorRightSelect< TValue extends GelSetOperatorWithResult, TResult extends any[], > = TValue extends GelSetOperatorInterface ? ValidateShape< TValueResult[number], TResult[number], TypedQueryBuilder > : TValue; export type SetOperatorRestSelect< TValue extends readonly GelSetOperatorWithResult[], TResult extends any[], > = TValue extends [infer First, ...infer Rest] ? First extends GelSetOperatorInterface ? Rest extends AnyGelSetOperatorInterface[] ? [ ValidateShape>, ...SetOperatorRestSelect, ] : ValidateShape[]> : never : TValue; export type GelCreateSetOperatorFn = < TTableName extends string | undefined, TSelection extends ColumnsSelection, TSelectMode extends SelectMode, TValue extends GelSetOperatorWithResult, TRest extends GelSetOperatorWithResult[], TNullabilityMap extends Record = TTableName extends string ? Record : {}, TDynamic extends boolean = false, TExcludedMethods extends string = never, TResult extends any[] = SelectResult[], TSelectedFields extends ColumnsSelection = BuildSubquerySelection, >( leftSelect: GelSetOperatorInterface< TTableName, TSelection, TSelectMode, TNullabilityMap, TDynamic, TExcludedMethods, TResult, TSelectedFields >, rightSelect: SetOperatorRightSelect, ...restSelects: SetOperatorRestSelect ) => GelSelectWithout< GelSelectBase< TTableName, TSelection, TSelectMode, TNullabilityMap, TDynamic, TExcludedMethods, TResult, TSelectedFields >, false, GelSetOperatorExcludedMethods, true >; export type GetGelSetOperators = { union: GelCreateSetOperatorFn; intersect: GelCreateSetOperatorFn; except: GelCreateSetOperatorFn; unionAll: GelCreateSetOperatorFn; intersectAll: GelCreateSetOperatorFn; exceptAll: GelCreateSetOperatorFn; }; ================================================ FILE: drizzle-orm/src/gel-core/query-builders/update.ts ================================================ import type { GetColumnData } from '~/column.ts'; import { entityKind, is } from '~/entity.ts'; import type { GelDialect } from '~/gel-core/dialect.ts'; import type { GelPreparedQuery, GelQueryResultHKT, GelQueryResultKind, GelSession, PreparedQueryConfig, } from '~/gel-core/session.ts'; import { GelTable } from '~/gel-core/table.ts'; import type { AppendToNullabilityMap, AppendToResult, GetSelectTableName, GetSelectTableSelection, JoinNullability, JoinType, SelectMode, SelectResult, } from '~/query-builders/select.types.ts'; import { QueryPromise } from '~/query-promise.ts'; import type { RunnableQuery } from '~/runnable-query.ts'; import { SelectionProxyHandler } from '~/selection-proxy.ts'; import { type ColumnsSelection, type Query, SQL, type SQLWrapper } from '~/sql/sql.ts'; import { Subquery } from '~/subquery.ts'; import { Table } from '~/table.ts'; import { type Assume, getTableLikeName, mapUpdateSet, type NeonAuthToken, orderSelectedFields, type UpdateSet, } from '~/utils.ts'; import { ViewBaseConfig } from '~/view-common.ts'; import type { GelColumn } from '../columns/common.ts'; import { extractUsedTable } from '../utils.ts'; import type { GelViewBase } from '../view-base.ts'; import type { GelSelectJoinConfig, SelectedFields, SelectedFieldsOrdered } from './select.types.ts'; export interface GelUpdateConfig { where?: SQL | undefined; set: UpdateSet; table: GelTable; from?: GelTable | Subquery | GelViewBase | SQL; joins: GelSelectJoinConfig[]; returning?: SelectedFieldsOrdered; withList?: Subquery[]; } export type GelUpdateSetSource = & { [Key in keyof TTable['$inferInsert']]?: | GetColumnData | SQL | GelColumn; } & {}; export class GelUpdateBuilder { static readonly [entityKind]: string = 'GelUpdateBuilder'; declare readonly _: { readonly table: TTable; }; constructor( private table: TTable, private session: GelSession, private dialect: GelDialect, private withList?: Subquery[], ) {} private authToken?: NeonAuthToken; setToken(token: NeonAuthToken) { this.authToken = token; return this; } set( values: GelUpdateSetSource, ): GelUpdateWithout, false, 'leftJoin' | 'rightJoin' | 'innerJoin' | 'fullJoin'> { return new GelUpdateBase( this.table, mapUpdateSet(this.table, values), this.session, this.dialect, this.withList, ); } } export type GelUpdateWithout< T extends AnyGelUpdate, TDynamic extends boolean, K extends keyof T & string, > = TDynamic extends true ? T : Omit< GelUpdateBase< T['_']['table'], T['_']['queryResult'], T['_']['from'], T['_']['returning'], T['_']['nullabilityMap'], T['_']['joins'], TDynamic, T['_']['excludedMethods'] | K >, T['_']['excludedMethods'] | K >; export type GelUpdateWithJoins< T extends AnyGelUpdate, TDynamic extends boolean, TFrom extends GelTable | Subquery | GelViewBase | SQL, > = TDynamic extends true ? T : Omit< GelUpdateBase< T['_']['table'], T['_']['queryResult'], TFrom, T['_']['returning'], AppendToNullabilityMap, 'inner'>, [...T['_']['joins'], { name: GetSelectTableName; joinType: 'inner'; table: TFrom; }], TDynamic, Exclude >, Exclude >; export type GelUpdateJoinFn< T extends AnyGelUpdate, TDynamic extends boolean, TJoinType extends JoinType, > = < TJoinedTable extends GelTable | Subquery | GelViewBase | SQL, >( table: TJoinedTable, on: | ( ( updateTable: T['_']['table']['_']['columns'], from: T['_']['from'] extends GelTable ? T['_']['from']['_']['columns'] : T['_']['from'] extends Subquery | GelViewBase ? T['_']['from']['_']['selectedFields'] : never, ) => SQL | undefined ) | SQL | undefined, ) => GelUpdateJoin; export type GelUpdateJoin< T extends AnyGelUpdate, TDynamic extends boolean, TJoinType extends JoinType, TJoinedTable extends GelTable | Subquery | GelViewBase | SQL, > = TDynamic extends true ? T : GelUpdateBase< T['_']['table'], T['_']['queryResult'], T['_']['from'], T['_']['returning'], AppendToNullabilityMap, TJoinType>, [...T['_']['joins'], { name: GetSelectTableName; joinType: TJoinType; table: TJoinedTable; }], TDynamic, T['_']['excludedMethods'] >; type Join = { name: string | undefined; joinType: JoinType; table: GelTable | Subquery | GelViewBase | SQL; }; type AccumulateToResult< T extends AnyGelUpdate, TSelectMode extends SelectMode, TJoins extends Join[], TSelectedFields extends ColumnsSelection, > = TJoins extends [infer TJoin extends Join, ...infer TRest extends Join[]] ? AccumulateToResult< T, TSelectMode extends 'partial' ? TSelectMode : 'multiple', TRest, AppendToResult< T['_']['table']['_']['name'], TSelectedFields, TJoin['name'], TJoin['table'] extends Table ? TJoin['table']['_']['columns'] : TJoin['table'] extends Subquery ? Assume : never, TSelectMode extends 'partial' ? TSelectMode : 'multiple' > > : TSelectedFields; export type GelUpdateReturningAll = GelUpdateWithout< GelUpdateBase< T['_']['table'], T['_']['queryResult'], T['_']['from'], SelectResult< AccumulateToResult< T, 'single', T['_']['joins'], GetSelectTableSelection >, 'partial', T['_']['nullabilityMap'] >, T['_']['nullabilityMap'], T['_']['joins'], TDynamic, T['_']['excludedMethods'] >, TDynamic, 'returning' >; export type GelUpdateReturning< T extends AnyGelUpdate, TDynamic extends boolean, TSelectedFields extends SelectedFields, > = GelUpdateWithout< GelUpdateBase< T['_']['table'], T['_']['queryResult'], T['_']['from'], SelectResult< AccumulateToResult< T, 'partial', T['_']['joins'], TSelectedFields >, 'partial', T['_']['nullabilityMap'] >, T['_']['nullabilityMap'], T['_']['joins'], TDynamic, T['_']['excludedMethods'] >, TDynamic, 'returning' >; export type GelUpdatePrepare = GelPreparedQuery< PreparedQueryConfig & { execute: T['_']['returning'] extends undefined ? GelQueryResultKind : T['_']['returning'][]; } >; export type GelUpdateDynamic = GelUpdate< T['_']['table'], T['_']['queryResult'], T['_']['from'], T['_']['returning'], T['_']['nullabilityMap'] >; export type GelUpdate< TTable extends GelTable = GelTable, TQueryResult extends GelQueryResultHKT = GelQueryResultHKT, TFrom extends GelTable | Subquery | GelViewBase | SQL | undefined = undefined, TReturning extends Record | undefined = Record | undefined, TNullabilityMap extends Record = Record, TJoins extends Join[] = [], > = GelUpdateBase; export type AnyGelUpdate = GelUpdateBase; export interface GelUpdateBase< TTable extends GelTable, TQueryResult extends GelQueryResultHKT, TFrom extends GelTable | Subquery | GelViewBase | SQL | undefined = undefined, TReturning extends Record | undefined = undefined, TNullabilityMap extends Record = Record, TJoins extends Join[] = [], TDynamic extends boolean = false, TExcludedMethods extends string = never, > extends QueryPromise : TReturning[]>, RunnableQuery : TReturning[], 'gel'>, SQLWrapper { readonly _: { readonly dialect: 'gel'; readonly table: TTable; readonly joins: TJoins; readonly nullabilityMap: TNullabilityMap; readonly queryResult: TQueryResult; readonly from: TFrom; readonly returning: TReturning; readonly dynamic: TDynamic; readonly excludedMethods: TExcludedMethods; readonly result: TReturning extends undefined ? GelQueryResultKind : TReturning[]; }; } export class GelUpdateBase< TTable extends GelTable, TQueryResult extends GelQueryResultHKT, TFrom extends GelTable | Subquery | GelViewBase | SQL | undefined = undefined, TReturning extends Record | undefined = undefined, // eslint-disable-next-line @typescript-eslint/no-unused-vars TNullabilityMap extends Record = Record, // eslint-disable-next-line @typescript-eslint/no-unused-vars TJoins extends Join[] = [], // eslint-disable-next-line @typescript-eslint/no-unused-vars TDynamic extends boolean = false, // eslint-disable-next-line @typescript-eslint/no-unused-vars TExcludedMethods extends string = never, > extends QueryPromise : TReturning[]> implements RunnableQuery : TReturning[], 'gel'>, SQLWrapper { static override readonly [entityKind]: string = 'GelUpdate'; private config: GelUpdateConfig; private tableName: string | undefined; private joinsNotNullableMap: Record; constructor( table: TTable, set: UpdateSet, private session: GelSession, private dialect: GelDialect, withList?: Subquery[], ) { super(); this.config = { set, table, withList, joins: [] }; this.tableName = getTableLikeName(table); this.joinsNotNullableMap = typeof this.tableName === 'string' ? { [this.tableName]: true } : {}; } from( source: TFrom, ): GelUpdateWithJoins { const tableName = getTableLikeName(source); if (typeof tableName === 'string') { this.joinsNotNullableMap[tableName] = true; } this.config.from = source; return this as any; } private getTableLikeFields(table: GelTable | Subquery | GelViewBase): Record { if (is(table, GelTable)) { return table[Table.Symbol.Columns]; } else if (is(table, Subquery)) { return table._.selectedFields; } return table[ViewBaseConfig].selectedFields; } private createJoin( joinType: TJoinType, ): GelUpdateJoinFn { return (( table: GelTable | Subquery | GelViewBase | SQL, on: ((updateTable: TTable, from: TFrom) => SQL | undefined) | SQL | undefined, ) => { const tableName = getTableLikeName(table); if (typeof tableName === 'string' && this.config.joins.some((join) => join.alias === tableName)) { throw new Error(`Alias "${tableName}" is already used in this query`); } if (typeof on === 'function') { const from = this.config.from && !is(this.config.from, SQL) ? this.getTableLikeFields(this.config.from) : undefined; on = on( new Proxy( this.config.table[Table.Symbol.Columns], new SelectionProxyHandler({ sqlAliasedBehavior: 'sql', sqlBehavior: 'sql' }), ) as any, from && new Proxy( from, new SelectionProxyHandler({ sqlAliasedBehavior: 'sql', sqlBehavior: 'sql' }), ) as any, ); } this.config.joins.push({ on, table, joinType, alias: tableName }); if (typeof tableName === 'string') { switch (joinType) { case 'left': { this.joinsNotNullableMap[tableName] = false; break; } case 'right': { this.joinsNotNullableMap = Object.fromEntries( Object.entries(this.joinsNotNullableMap).map(([key]) => [key, false]), ); this.joinsNotNullableMap[tableName] = true; break; } case 'inner': { this.joinsNotNullableMap[tableName] = true; break; } case 'full': { this.joinsNotNullableMap = Object.fromEntries( Object.entries(this.joinsNotNullableMap).map(([key]) => [key, false]), ); this.joinsNotNullableMap[tableName] = false; break; } } } return this as any; }) as any; } leftJoin = this.createJoin('left'); rightJoin = this.createJoin('right'); innerJoin = this.createJoin('inner'); fullJoin = this.createJoin('full'); /** * Adds a 'where' clause to the query. * * Calling this method will update only those rows that fulfill a specified condition. * * See docs: {@link https://orm.drizzle.team/docs/update} * * @param where the 'where' clause. * * @example * You can use conditional operators and `sql function` to filter the rows to be updated. * * ```ts * // Update all cars with green color * await db.update(cars).set({ color: 'red' }) * .where(eq(cars.color, 'green')); * // or * await db.update(cars).set({ color: 'red' }) * .where(sql`${cars.color} = 'green'`) * ``` * * You can logically combine conditional operators with `and()` and `or()` operators: * * ```ts * // Update all BMW cars with a green color * await db.update(cars).set({ color: 'red' }) * .where(and(eq(cars.color, 'green'), eq(cars.brand, 'BMW'))); * * // Update all cars with the green or blue color * await db.update(cars).set({ color: 'red' }) * .where(or(eq(cars.color, 'green'), eq(cars.color, 'blue'))); * ``` */ where(where: SQL | undefined): GelUpdateWithout { this.config.where = where; return this as any; } /** * Adds a `returning` clause to the query. * * Calling this method will return the specified fields of the updated rows. If no fields are specified, all fields will be returned. * * See docs: {@link https://orm.drizzle.team/docs/update#update-with-returning} * * @example * ```ts * // Update all cars with the green color and return all fields * const updatedCars: Car[] = await db.update(cars) * .set({ color: 'red' }) * .where(eq(cars.color, 'green')) * .returning(); * * // Update all cars with the green color and return only their id and brand fields * const updatedCarsIdsAndBrands: { id: number, brand: string }[] = await db.update(cars) * .set({ color: 'red' }) * .where(eq(cars.color, 'green')) * .returning({ id: cars.id, brand: cars.brand }); * ``` */ returning(): GelUpdateReturningAll; returning( fields: TSelectedFields, ): GelUpdateReturning; returning( fields?: SelectedFields, ): GelUpdateWithout { if (!fields) { fields = Object.assign({}, this.config.table[Table.Symbol.Columns]); if (this.config.from) { const tableName = getTableLikeName(this.config.from); if (typeof tableName === 'string' && this.config.from && !is(this.config.from, SQL)) { const fromFields = this.getTableLikeFields(this.config.from); fields[tableName] = fromFields as any; } for (const join of this.config.joins) { const tableName = getTableLikeName(join.table); if (typeof tableName === 'string' && !is(join.table, SQL)) { const fromFields = this.getTableLikeFields(join.table); fields[tableName] = fromFields as any; } } } } this.config.returning = orderSelectedFields(fields); return this as any; } /** @internal */ getSQL(): SQL { return this.dialect.buildUpdateQuery(this.config); } toSQL(): Query { const { typings: _typings, ...rest } = this.dialect.sqlToQuery(this.getSQL()); return rest; } /** @internal */ _prepare(name?: string): GelUpdatePrepare { const query = this.session.prepareQuery< PreparedQueryConfig & { execute: TReturning[] } >(this.dialect.sqlToQuery(this.getSQL()), this.config.returning, name, true, undefined, { type: 'update', tables: extractUsedTable(this.config.table), }); query.joinsNotNullableMap = this.joinsNotNullableMap; return query; } prepare(name: string): GelUpdatePrepare { return this._prepare(name); } override execute: ReturnType['execute'] = (placeholderValues) => { return this._prepare().execute(placeholderValues); }; $dynamic(): GelUpdateDynamic { return this as any; } } ================================================ FILE: drizzle-orm/src/gel-core/roles.ts ================================================ import { entityKind } from '~/entity.ts'; export interface GelRoleConfig { createDb?: boolean; createRole?: boolean; inherit?: boolean; } export class GelRole implements GelRoleConfig { static readonly [entityKind]: string = 'GelRole'; /** @internal */ _existing?: boolean; /** @internal */ readonly createDb: GelRoleConfig['createDb']; /** @internal */ readonly createRole: GelRoleConfig['createRole']; /** @internal */ readonly inherit: GelRoleConfig['inherit']; constructor( readonly name: string, config?: GelRoleConfig, ) { if (config) { this.createDb = config.createDb; this.createRole = config.createRole; this.inherit = config.inherit; } } existing(): this { this._existing = true; return this; } } export function gelRole(name: string, config?: GelRoleConfig) { return new GelRole(name, config); } ================================================ FILE: drizzle-orm/src/gel-core/schema.ts ================================================ import { entityKind, is } from '~/entity.ts'; import { SQL, sql, type SQLWrapper } from '~/sql/sql.ts'; import type { gelSequence } from './sequence.ts'; import { gelSequenceWithSchema } from './sequence.ts'; import { type GelTableFn, gelTableWithSchema } from './table.ts'; // import type { gelMaterializedView, gelView } from './view.ts'; // import { gelMaterializedViewWithSchema, gelViewWithSchema } from './view.ts'; export class GelSchema implements SQLWrapper { static readonly [entityKind]: string = 'GelSchema'; constructor( public readonly schemaName: TName, ) {} table: GelTableFn = ((name, columns, extraConfig) => { return gelTableWithSchema(name, columns, extraConfig, this.schemaName); }); // view = ((name, columns) => { // return gelViewWithSchema(name, columns, this.schemaName); // }) as typeof gelView; // materializedView = ((name, columns) => { // return gelMaterializedViewWithSchema(name, columns, this.schemaName); // }) as typeof gelMaterializedView; // enum: typeof gelEnum = ((name, values) => { // return gelEnumWithSchema(name, values, this.schemaName); // }); sequence: typeof gelSequence = ((name, options) => { return gelSequenceWithSchema(name, options, this.schemaName); }); getSQL(): SQL { return new SQL([sql.identifier(this.schemaName)]); } shouldOmitSQLParens(): boolean { return true; } } export function isGelSchema(obj: unknown): obj is GelSchema { return is(obj, GelSchema); } export function gelSchema(name: T) { if (name === 'public') { throw new Error( `You can't specify 'public' as schema name. Postgres is using public schema by default. If you want to use 'public' schema, just use GelTable() instead of creating a schema`, ); } return new GelSchema(name); } ================================================ FILE: drizzle-orm/src/gel-core/sequence.ts ================================================ import { entityKind, is } from '~/entity.ts'; export type GelSequenceOptions = { increment?: number | string; minValue?: number | string; maxValue?: number | string; startWith?: number | string; cache?: number | string; cycle?: boolean; }; export class GelSequence { static readonly [entityKind]: string = 'GelSequence'; constructor( public readonly seqName: string | undefined, public readonly seqOptions: GelSequenceOptions | undefined, public readonly schema: string | undefined, ) { } } export function gelSequence( name: string, options?: GelSequenceOptions, ): GelSequence { return gelSequenceWithSchema(name, options, undefined); } /** @internal */ export function gelSequenceWithSchema( name: string, options?: GelSequenceOptions, schema?: string, ): GelSequence { return new GelSequence(name, options, schema); } export function isGelSequence(obj: unknown): obj is GelSequence { return is(obj, GelSequence); } ================================================ FILE: drizzle-orm/src/gel-core/session.ts ================================================ import { type Cache, hashQuery, NoopCache } from '~/cache/core/cache.ts'; import type { WithCacheConfig } from '~/cache/core/types.ts'; import { entityKind, is } from '~/entity.ts'; import { DrizzleQueryError, TransactionRollbackError } from '~/errors.ts'; import type { TablesRelationalConfig } from '~/relations.ts'; import type { PreparedQuery } from '~/session.ts'; import type { Query, SQL } from '~/sql/index.ts'; import { tracer } from '~/tracing.ts'; import type { NeonAuthToken } from '~/utils.ts'; import { GelDatabase } from './db.ts'; import type { GelDialect } from './dialect.ts'; import type { SelectedFieldsOrdered } from './query-builders/select.types.ts'; export interface PreparedQueryConfig { execute: unknown; all: unknown; values: unknown; } export abstract class GelPreparedQuery implements PreparedQuery { constructor( protected query: Query, private cache?: Cache, // per query related metadata private queryMetadata?: { type: 'select' | 'update' | 'delete' | 'insert'; tables: string[]; } | undefined, // config that was passed through $withCache private cacheConfig?: WithCacheConfig, ) { // it means that no $withCache options were passed and it should be just enabled if (cache && cache.strategy() === 'all' && cacheConfig === undefined) { this.cacheConfig = { enable: true, autoInvalidate: true }; } if (!this.cacheConfig?.enable) { this.cacheConfig = undefined; } } /** @internal */ protected async queryWithCache( queryString: string, params: any[], query: () => Promise, ): Promise { if (this.cache === undefined || is(this.cache, NoopCache) || this.queryMetadata === undefined) { try { return await query(); } catch (e) { throw new DrizzleQueryError(queryString, params, e as Error); } } // don't do any mutations, if globally is false if (this.cacheConfig && !this.cacheConfig.enable) { try { return await query(); } catch (e) { throw new DrizzleQueryError(queryString, params, e as Error); } } // For mutate queries, we should query the database, wait for a response, and then perform invalidation if ( ( this.queryMetadata.type === 'insert' || this.queryMetadata.type === 'update' || this.queryMetadata.type === 'delete' ) && this.queryMetadata.tables.length > 0 ) { try { const [res] = await Promise.all([ query(), this.cache.onMutate({ tables: this.queryMetadata.tables }), ]); return res; } catch (e) { throw new DrizzleQueryError(queryString, params, e as Error); } } // don't do any reads if globally disabled if (!this.cacheConfig) { try { return await query(); } catch (e) { throw new DrizzleQueryError(queryString, params, e as Error); } } if (this.queryMetadata.type === 'select') { const fromCache = await this.cache.get( this.cacheConfig.tag ?? await hashQuery(queryString, params), this.queryMetadata.tables, this.cacheConfig.tag !== undefined, this.cacheConfig.autoInvalidate, ); if (fromCache === undefined) { let result; try { result = await query(); } catch (e) { throw new DrizzleQueryError(queryString, params, e as Error); } // put actual key await this.cache.put( this.cacheConfig.tag ?? await hashQuery(queryString, params), result, // make sure we send tables that were used in a query only if user wants to invalidate it on each write this.cacheConfig.autoInvalidate ? this.queryMetadata.tables : [], this.cacheConfig.tag !== undefined, this.cacheConfig.config, ); // put flag if we should invalidate or not return result; } return fromCache as unknown as T; } try { return await query(); } catch (e) { throw new DrizzleQueryError(queryString, params, e as Error); } } protected authToken?: NeonAuthToken; getQuery(): Query { return this.query; } mapResult(response: unknown, _isFromBatch?: boolean): unknown { return response; } static readonly [entityKind]: string = 'GelPreparedQuery'; /** @internal */ joinsNotNullableMap?: Record; abstract execute(placeholderValues?: Record): Promise; /** @internal */ abstract all(placeholderValues?: Record): Promise; /** @internal */ abstract isResponseInArrayMode(): boolean; } export abstract class GelSession< TQueryResult extends GelQueryResultHKT = any, // TO TFullSchema extends Record = Record, TSchema extends TablesRelationalConfig = Record, > { static readonly [entityKind]: string = 'GelSession'; constructor(protected dialect: GelDialect) {} abstract prepareQuery( query: Query, fields: SelectedFieldsOrdered | undefined, name: string | undefined, isResponseInArrayMode: boolean, customResultMapper?: (rows: unknown[][], mapColumnValue?: (value: unknown) => unknown) => T['execute'], queryMetadata?: { type: 'select' | 'update' | 'delete' | 'insert'; tables: string[]; }, cacheConfig?: WithCacheConfig, ): GelPreparedQuery; execute(query: SQL): Promise { return tracer.startActiveSpan('drizzle.operation', () => { const prepared = tracer.startActiveSpan('drizzle.prepareQuery', () => { return this.prepareQuery( this.dialect.sqlToQuery(query), undefined, undefined, false, ); }); return prepared.execute(undefined); }); } all(query: SQL): Promise { return this.prepareQuery( this.dialect.sqlToQuery(query), undefined, undefined, false, ).all(); } async count(sql: SQL): Promise { const res = await this.execute<[{ count: string }]>(sql); return Number( res[0]['count'], ); } abstract transaction( transaction: (tx: GelTransaction) => Promise, ): Promise; } export abstract class GelTransaction< TQueryResult extends GelQueryResultHKT, TFullSchema extends Record = Record, TSchema extends TablesRelationalConfig = Record, > extends GelDatabase { static override readonly [entityKind]: string = 'GelTransaction'; constructor( dialect: GelDialect, session: GelSession, protected schema: { fullSchema: Record; schema: TSchema; tableNamesMap: Record; } | undefined, ) { super(dialect, session, schema); } rollback(): never { throw new TransactionRollbackError(); } abstract override transaction( transaction: (tx: GelTransaction) => Promise, ): Promise; } export interface GelQueryResultHKT { readonly $brand: 'GelQueryResultHKT'; readonly row: unknown; readonly type: unknown; } export type GelQueryResultKind = (TKind & { readonly row: TRow; })['type']; ================================================ FILE: drizzle-orm/src/gel-core/subquery.ts ================================================ import type { AddAliasToSelection } from '~/query-builders/select.types.ts'; import type { ColumnsSelection } from '~/sql/sql.ts'; import type { Subquery, WithSubquery } from '~/subquery.ts'; export type SubqueryWithSelection = & Subquery> & AddAliasToSelection; export type WithSubqueryWithSelection = & WithSubquery> & AddAliasToSelection; ================================================ FILE: drizzle-orm/src/gel-core/table.ts ================================================ import type { BuildColumns, BuildExtraConfigColumns } from '~/column-builder.ts'; import { entityKind } from '~/entity.ts'; import { Table, type TableConfig as TableConfigBase, type UpdateTableConfig } from '~/table.ts'; import type { CheckBuilder } from './checks.ts'; import { type GelColumnsBuilders, getGelColumnBuilders } from './columns/all.ts'; import type { GelColumn, GelColumnBuilder, GelColumnBuilderBase, GelExtraConfigColumn } from './columns/common.ts'; import type { ForeignKey, ForeignKeyBuilder } from './foreign-keys.ts'; import type { AnyIndexBuilder } from './indexes.ts'; import type { GelPolicy } from './policies.ts'; import type { PrimaryKeyBuilder } from './primary-keys.ts'; import type { UniqueConstraintBuilder } from './unique-constraint.ts'; export type GelTableExtraConfigValue = | AnyIndexBuilder | CheckBuilder | ForeignKeyBuilder | PrimaryKeyBuilder | UniqueConstraintBuilder | GelPolicy; export type GelTableExtraConfig = Record< string, GelTableExtraConfigValue >; export type TableConfig = TableConfigBase; /** @internal */ export const InlineForeignKeys = Symbol.for('drizzle:GelInlineForeignKeys'); /** @internal */ export const EnableRLS = Symbol.for('drizzle:EnableRLS'); export class GelTable extends Table { static override readonly [entityKind]: string = 'GelTable'; /** @internal */ static override readonly Symbol = Object.assign({}, Table.Symbol, { InlineForeignKeys: InlineForeignKeys as typeof InlineForeignKeys, EnableRLS: EnableRLS as typeof EnableRLS, }); /**@internal */ [InlineForeignKeys]: ForeignKey[] = []; /** @internal */ [EnableRLS]: boolean = false; /** @internal */ override [Table.Symbol.ExtraConfigBuilder]: ((self: Record) => GelTableExtraConfig) | undefined = undefined; /** @internal */ override [Table.Symbol.ExtraConfigColumns]: Record = {}; } export type AnyGelTable = {}> = GelTable< UpdateTableConfig >; export type GelTableWithColumns = & GelTable & { [Key in keyof T['columns']]: T['columns'][Key]; } & { enableRLS: () => Omit< GelTableWithColumns, 'enableRLS' >; }; /** @internal */ export function gelTableWithSchema< TTableName extends string, TSchemaName extends string | undefined, TColumnsMap extends Record, >( name: TTableName, columns: TColumnsMap | ((columnTypes: GelColumnsBuilders) => TColumnsMap), extraConfig: | (( self: BuildExtraConfigColumns, ) => GelTableExtraConfig | GelTableExtraConfigValue[]) | undefined, schema: TSchemaName, baseName = name, ): GelTableWithColumns<{ name: TTableName; schema: TSchemaName; columns: BuildColumns; dialect: 'gel'; }> { const rawTable = new GelTable<{ name: TTableName; schema: TSchemaName; columns: BuildColumns; dialect: 'gel'; }>(name, schema, baseName); const parsedColumns: TColumnsMap = typeof columns === 'function' ? columns(getGelColumnBuilders()) : columns; const builtColumns = Object.fromEntries( Object.entries(parsedColumns).map(([name, colBuilderBase]) => { const colBuilder = colBuilderBase as GelColumnBuilder; colBuilder.setName(name); const column = colBuilder.build(rawTable); rawTable[InlineForeignKeys].push(...colBuilder.buildForeignKeys(column, rawTable)); return [name, column]; }), ) as unknown as BuildColumns; const builtColumnsForExtraConfig = Object.fromEntries( Object.entries(parsedColumns).map(([name, colBuilderBase]) => { const colBuilder = colBuilderBase as GelColumnBuilder; colBuilder.setName(name); const column = colBuilder.buildExtraConfigColumn(rawTable); return [name, column]; }), ) as unknown as BuildExtraConfigColumns; const table = Object.assign(rawTable, builtColumns); table[Table.Symbol.Columns] = builtColumns; table[Table.Symbol.ExtraConfigColumns] = builtColumnsForExtraConfig; if (extraConfig) { table[GelTable.Symbol.ExtraConfigBuilder] = extraConfig as any; } return Object.assign(table, { enableRLS: () => { table[GelTable.Symbol.EnableRLS] = true; return table as GelTableWithColumns<{ name: TTableName; schema: TSchemaName; columns: BuildColumns; dialect: 'gel'; }>; }, }); } export interface GelTableFn { /** * @deprecated The third parameter of GelTable is changing and will only accept an array instead of an object * * @example * Deprecated version: * ```ts * export const users = gelTable("users", { * id: integer(), * }, (t) => ({ * idx: index('custom_name').on(t.id) * })); * ``` * * New API: * ```ts * export const users = gelTable("users", { * id: integer(), * }, (t) => [ * index('custom_name').on(t.id) * ]); * ``` */ < TTableName extends string, TColumnsMap extends Record, >( name: TTableName, columns: TColumnsMap, extraConfig: ( self: BuildExtraConfigColumns, ) => GelTableExtraConfig, ): GelTableWithColumns<{ name: TTableName; schema: TSchema; columns: BuildColumns; dialect: 'gel'; }>; /** * @deprecated The third parameter of gelTable is changing and will only accept an array instead of an object * * @example * Deprecated version: * ```ts * export const users = gelTable("users", { * id: integer(), * }, (t) => ({ * idx: index('custom_name').on(t.id) * })); * ``` * * New API: * ```ts * export const users = gelTable("users", { * id: integer(), * }, (t) => [ * index('custom_name').on(t.id) * ]); * ``` */ < TTableName extends string, TColumnsMap extends Record, >( name: TTableName, columns: (columnTypes: GelColumnsBuilders) => TColumnsMap, extraConfig: (self: BuildExtraConfigColumns) => GelTableExtraConfig, ): GelTableWithColumns<{ name: TTableName; schema: TSchema; columns: BuildColumns; dialect: 'gel'; }>; < TTableName extends string, TColumnsMap extends Record, >( name: TTableName, columns: TColumnsMap, extraConfig?: ( self: BuildExtraConfigColumns, ) => GelTableExtraConfigValue[], ): GelTableWithColumns<{ name: TTableName; schema: TSchema; columns: BuildColumns; dialect: 'gel'; }>; < TTableName extends string, TColumnsMap extends Record, >( name: TTableName, columns: (columnTypes: GelColumnsBuilders) => TColumnsMap, extraConfig?: (self: BuildExtraConfigColumns) => GelTableExtraConfigValue[], ): GelTableWithColumns<{ name: TTableName; schema: TSchema; columns: BuildColumns; dialect: 'gel'; }>; } export const gelTable: GelTableFn = (name, columns, extraConfig) => { return gelTableWithSchema(name, columns, extraConfig, undefined); }; export function gelTableCreator(customizeTableName: (name: string) => string): GelTableFn { return (name, columns, extraConfig) => { return gelTableWithSchema(customizeTableName(name) as typeof name, columns, extraConfig, undefined, name); }; } ================================================ FILE: drizzle-orm/src/gel-core/unique-constraint.ts ================================================ import { entityKind } from '~/entity.ts'; import { TableName } from '~/table.utils.ts'; import type { GelColumn } from './columns/index.ts'; import type { GelTable } from './table.ts'; export function unique(name?: string): UniqueOnConstraintBuilder { return new UniqueOnConstraintBuilder(name); } export function uniqueKeyName(table: GelTable, columns: string[]) { return `${table[TableName]}_${columns.join('_')}_unique`; } export class UniqueConstraintBuilder { static readonly [entityKind]: string = 'GelUniqueConstraintBuilder'; /** @internal */ columns: GelColumn[]; /** @internal */ nullsNotDistinctConfig = false; constructor( columns: GelColumn[], private name?: string, ) { this.columns = columns; } nullsNotDistinct() { this.nullsNotDistinctConfig = true; return this; } /** @internal */ build(table: GelTable): UniqueConstraint { return new UniqueConstraint(table, this.columns, this.nullsNotDistinctConfig, this.name); } } export class UniqueOnConstraintBuilder { static readonly [entityKind]: string = 'GelUniqueOnConstraintBuilder'; /** @internal */ name?: string; constructor( name?: string, ) { this.name = name; } on(...columns: [GelColumn, ...GelColumn[]]) { return new UniqueConstraintBuilder(columns, this.name); } } export class UniqueConstraint { static readonly [entityKind]: string = 'GelUniqueConstraint'; readonly columns: GelColumn[]; readonly name?: string; readonly nullsNotDistinct: boolean = false; constructor(readonly table: GelTable, columns: GelColumn[], nullsNotDistinct: boolean, name?: string) { this.columns = columns; this.name = name ?? uniqueKeyName(this.table, this.columns.map((column) => column.name)); this.nullsNotDistinct = nullsNotDistinct; } getName() { return this.name; } } ================================================ FILE: drizzle-orm/src/gel-core/utils.ts ================================================ import { is } from '~/entity.ts'; import { SQL } from '~/sql/sql.ts'; import { Subquery } from '~/subquery.ts'; import { Table } from '~/table.ts'; import { ViewBaseConfig } from '~/view-common.ts'; import { type Check, CheckBuilder } from './checks.ts'; import type { AnyGelColumn } from './columns/index.ts'; import { type ForeignKey, ForeignKeyBuilder } from './foreign-keys.ts'; import type { Index } from './indexes.ts'; import { IndexBuilder } from './indexes.ts'; import { GelPolicy } from './policies.ts'; import { type PrimaryKey, PrimaryKeyBuilder } from './primary-keys.ts'; import { GelTable } from './table.ts'; import { type UniqueConstraint, UniqueConstraintBuilder } from './unique-constraint.ts'; import type { GelViewBase } from './view-base.ts'; import { GelViewConfig } from './view-common.ts'; import { type GelMaterializedView, GelMaterializedViewConfig, type GelView } from './view.ts'; export function getTableConfig(table: TTable) { const columns = Object.values(table[Table.Symbol.Columns]); const indexes: Index[] = []; const checks: Check[] = []; const primaryKeys: PrimaryKey[] = []; const foreignKeys: ForeignKey[] = Object.values(table[GelTable.Symbol.InlineForeignKeys]); const uniqueConstraints: UniqueConstraint[] = []; const name = table[Table.Symbol.Name]; const schema = table[Table.Symbol.Schema]; const policies: GelPolicy[] = []; const enableRLS: boolean = table[GelTable.Symbol.EnableRLS]; const extraConfigBuilder = table[GelTable.Symbol.ExtraConfigBuilder]; if (extraConfigBuilder !== undefined) { const extraConfig = extraConfigBuilder(table[Table.Symbol.ExtraConfigColumns]); const extraValues = Array.isArray(extraConfig) ? extraConfig.flat(1) as any[] : Object.values(extraConfig); for (const builder of extraValues) { if (is(builder, IndexBuilder)) { indexes.push(builder.build(table)); } else if (is(builder, CheckBuilder)) { checks.push(builder.build(table)); } else if (is(builder, UniqueConstraintBuilder)) { uniqueConstraints.push(builder.build(table)); } else if (is(builder, PrimaryKeyBuilder)) { primaryKeys.push(builder.build(table)); } else if (is(builder, ForeignKeyBuilder)) { foreignKeys.push(builder.build(table)); } else if (is(builder, GelPolicy)) { policies.push(builder); } } } return { columns, indexes, foreignKeys, checks, primaryKeys, uniqueConstraints, name, schema, policies, enableRLS, }; } export function extractUsedTable(table: GelTable | Subquery | GelViewBase | SQL): string[] { if (is(table, GelTable)) { return [`${table[Table.Symbol.BaseName]}`]; } if (is(table, Subquery)) { return table._.usedTables ?? []; } if (is(table, SQL)) { return table.usedTables ?? []; } return []; } export function getViewConfig< TName extends string = string, TExisting extends boolean = boolean, >(view: GelView) { return { ...view[ViewBaseConfig], ...view[GelViewConfig], }; } export function getMaterializedViewConfig< TName extends string = string, TExisting extends boolean = boolean, >(view: GelMaterializedView) { return { ...view[ViewBaseConfig], ...view[GelMaterializedViewConfig], }; } export type ColumnsWithTable< TTableName extends string, TForeignTableName extends string, TColumns extends AnyGelColumn<{ tableName: TTableName }>[], > = { [Key in keyof TColumns]: AnyGelColumn<{ tableName: TForeignTableName }> }; ================================================ FILE: drizzle-orm/src/gel-core/view-base.ts ================================================ import { entityKind } from '~/entity.ts'; import { type ColumnsSelection, View } from '~/sql/sql.ts'; export abstract class GelViewBase< TName extends string = string, TExisting extends boolean = boolean, TSelectedFields extends ColumnsSelection = ColumnsSelection, > extends View { static override readonly [entityKind]: string = 'GelViewBase'; declare readonly _: View['_'] & { readonly viewBrand: 'GelViewBase'; }; } ================================================ FILE: drizzle-orm/src/gel-core/view-common.ts ================================================ export const GelViewConfig = Symbol.for('drizzle:GelViewConfig'); ================================================ FILE: drizzle-orm/src/gel-core/view.ts ================================================ import type { BuildColumns } from '~/column-builder.ts'; import { entityKind, is } from '~/entity.ts'; import type { TypedQueryBuilder } from '~/query-builders/query-builder.ts'; import type { AddAliasToSelection } from '~/query-builders/select.types.ts'; import { SelectionProxyHandler } from '~/selection-proxy.ts'; import type { ColumnsSelection, SQL } from '~/sql/sql.ts'; import { getTableColumns } from '~/utils.ts'; import type { RequireAtLeastOne } from '~/utils.ts'; import type { GelColumn, GelColumnBuilderBase } from './columns/common.ts'; import { QueryBuilder } from './query-builders/query-builder.ts'; import { gelTable } from './table.ts'; import { GelViewBase } from './view-base.ts'; import { GelViewConfig } from './view-common.ts'; export type ViewWithConfig = RequireAtLeastOne<{ checkOption: 'local' | 'cascaded'; securityBarrier: boolean; securityInvoker: boolean; }>; export class DefaultViewBuilderCore { static readonly [entityKind]: string = 'GelDefaultViewBuilderCore'; declare readonly _: { readonly name: TConfig['name']; readonly columns: TConfig['columns']; }; constructor( protected name: TConfig['name'], protected schema: string | undefined, ) {} protected config: { with?: ViewWithConfig; } = {}; with(config: ViewWithConfig): this { this.config.with = config; return this; } } export class ViewBuilder extends DefaultViewBuilderCore<{ name: TName }> { static override readonly [entityKind]: string = 'GelViewBuilder'; as( qb: TypedQueryBuilder | ((qb: QueryBuilder) => TypedQueryBuilder), ): GelViewWithSelection> { if (typeof qb === 'function') { qb = qb(new QueryBuilder()); } const selectionProxy = new SelectionProxyHandler({ alias: this.name, sqlBehavior: 'error', sqlAliasedBehavior: 'alias', replaceOriginalName: true, }); const aliasedSelection = new Proxy(qb.getSelectedFields(), selectionProxy); return new Proxy( new GelView({ GelConfig: this.config, config: { name: this.name, schema: this.schema, selectedFields: aliasedSelection, query: qb.getSQL().inlineParams(), }, }), selectionProxy as any, ) as GelViewWithSelection>; } } export class ManualViewBuilder< TName extends string = string, TColumns extends Record = Record, > extends DefaultViewBuilderCore<{ name: TName; columns: TColumns }> { static override readonly [entityKind]: string = 'GelManualViewBuilder'; private columns: Record; constructor( name: TName, columns: TColumns, schema: string | undefined, ) { super(name, schema); this.columns = getTableColumns(gelTable(name, columns)); } existing(): GelViewWithSelection> { return new Proxy( new GelView({ GelConfig: undefined, config: { name: this.name, schema: this.schema, selectedFields: this.columns, query: undefined, }, }), new SelectionProxyHandler({ alias: this.name, sqlBehavior: 'error', sqlAliasedBehavior: 'alias', replaceOriginalName: true, }), ) as GelViewWithSelection>; } as(query: SQL): GelViewWithSelection> { return new Proxy( new GelView({ GelConfig: this.config, config: { name: this.name, schema: this.schema, selectedFields: this.columns, query: query.inlineParams(), }, }), new SelectionProxyHandler({ alias: this.name, sqlBehavior: 'error', sqlAliasedBehavior: 'alias', replaceOriginalName: true, }), ) as GelViewWithSelection>; } } export type GelMaterializedViewWithConfig = RequireAtLeastOne<{ fillfactor: number; toastTupleTarget: number; parallelWorkers: number; autovacuumEnabled: boolean; vacuumIndexCleanup: 'auto' | 'off' | 'on'; vacuumTruncate: boolean; autovacuumVacuumThreshold: number; autovacuumVacuumScaleFactor: number; autovacuumVacuumCostDelay: number; autovacuumVacuumCostLimit: number; autovacuumFreezeMinAge: number; autovacuumFreezeMaxAge: number; autovacuumFreezeTableAge: number; autovacuumMultixactFreezeMinAge: number; autovacuumMultixactFreezeMaxAge: number; autovacuumMultixactFreezeTableAge: number; logAutovacuumMinDuration: number; userCatalogTable: boolean; }>; export class MaterializedViewBuilderCore { static readonly [entityKind]: string = 'GelMaterializedViewBuilderCore'; declare _: { readonly name: TConfig['name']; readonly columns: TConfig['columns']; }; constructor( protected name: TConfig['name'], protected schema: string | undefined, ) {} protected config: { with?: GelMaterializedViewWithConfig; using?: string; tablespace?: string; withNoData?: boolean; } = {}; using(using: string): this { this.config.using = using; return this; } with(config: GelMaterializedViewWithConfig): this { this.config.with = config; return this; } tablespace(tablespace: string): this { this.config.tablespace = tablespace; return this; } withNoData(): this { this.config.withNoData = true; return this; } } export class MaterializedViewBuilder extends MaterializedViewBuilderCore<{ name: TName }> { static override readonly [entityKind]: string = 'GelMaterializedViewBuilder'; as( qb: TypedQueryBuilder | ((qb: QueryBuilder) => TypedQueryBuilder), ): GelMaterializedViewWithSelection> { if (typeof qb === 'function') { qb = qb(new QueryBuilder()); } const selectionProxy = new SelectionProxyHandler({ alias: this.name, sqlBehavior: 'error', sqlAliasedBehavior: 'alias', replaceOriginalName: true, }); const aliasedSelection = new Proxy(qb.getSelectedFields(), selectionProxy); return new Proxy( new GelMaterializedView({ GelConfig: { with: this.config.with, using: this.config.using, tablespace: this.config.tablespace, withNoData: this.config.withNoData, }, config: { name: this.name, schema: this.schema, selectedFields: aliasedSelection, query: qb.getSQL().inlineParams(), }, }), selectionProxy as any, ) as GelMaterializedViewWithSelection>; } } export class ManualMaterializedViewBuilder< TName extends string = string, TColumns extends Record = Record, > extends MaterializedViewBuilderCore<{ name: TName; columns: TColumns }> { static override readonly [entityKind]: string = 'GelManualMaterializedViewBuilder'; private columns: Record; constructor( name: TName, columns: TColumns, schema: string | undefined, ) { super(name, schema); this.columns = getTableColumns(gelTable(name, columns)); } existing(): GelMaterializedViewWithSelection> { return new Proxy( new GelMaterializedView({ GelConfig: { tablespace: this.config.tablespace, using: this.config.using, with: this.config.with, withNoData: this.config.withNoData, }, config: { name: this.name, schema: this.schema, selectedFields: this.columns, query: undefined, }, }), new SelectionProxyHandler({ alias: this.name, sqlBehavior: 'error', sqlAliasedBehavior: 'alias', replaceOriginalName: true, }), ) as GelMaterializedViewWithSelection>; } as(query: SQL): GelMaterializedViewWithSelection> { return new Proxy( new GelMaterializedView({ GelConfig: { tablespace: this.config.tablespace, using: this.config.using, with: this.config.with, withNoData: this.config.withNoData, }, config: { name: this.name, schema: this.schema, selectedFields: this.columns, query: query.inlineParams(), }, }), new SelectionProxyHandler({ alias: this.name, sqlBehavior: 'error', sqlAliasedBehavior: 'alias', replaceOriginalName: true, }), ) as GelMaterializedViewWithSelection>; } } export class GelView< TName extends string = string, TExisting extends boolean = boolean, TSelectedFields extends ColumnsSelection = ColumnsSelection, > extends GelViewBase { static override readonly [entityKind]: string = 'GelView'; [GelViewConfig]: { with?: ViewWithConfig; } | undefined; constructor({ GelConfig, config }: { GelConfig: { with?: ViewWithConfig; } | undefined; config: { name: TName; schema: string | undefined; selectedFields: ColumnsSelection; query: SQL | undefined; }; }) { super(config); if (GelConfig) { this[GelViewConfig] = { with: GelConfig.with, }; } } } export type GelViewWithSelection< TName extends string = string, TExisting extends boolean = boolean, TSelectedFields extends ColumnsSelection = ColumnsSelection, > = GelView & TSelectedFields; export const GelMaterializedViewConfig = Symbol.for('drizzle:GelMaterializedViewConfig'); export class GelMaterializedView< TName extends string = string, TExisting extends boolean = boolean, TSelectedFields extends ColumnsSelection = ColumnsSelection, > extends GelViewBase { static override readonly [entityKind]: string = 'GelMaterializedView'; readonly [GelMaterializedViewConfig]: { readonly with?: GelMaterializedViewWithConfig; readonly using?: string; readonly tablespace?: string; readonly withNoData?: boolean; } | undefined; constructor({ GelConfig, config }: { GelConfig: { with: GelMaterializedViewWithConfig | undefined; using: string | undefined; tablespace: string | undefined; withNoData: boolean | undefined; } | undefined; config: { name: TName; schema: string | undefined; selectedFields: ColumnsSelection; query: SQL | undefined; }; }) { super(config); this[GelMaterializedViewConfig] = { with: GelConfig?.with, using: GelConfig?.using, tablespace: GelConfig?.tablespace, withNoData: GelConfig?.withNoData, }; } } export type GelMaterializedViewWithSelection< TName extends string = string, TExisting extends boolean = boolean, TSelectedFields extends ColumnsSelection = ColumnsSelection, > = GelMaterializedView & TSelectedFields; /** @internal */ export function gelViewWithSchema( name: string, selection: Record | undefined, schema: string | undefined, ): ViewBuilder | ManualViewBuilder { if (selection) { return new ManualViewBuilder(name, selection, schema); } return new ViewBuilder(name, schema); } /** @internal */ export function gelMaterializedViewWithSchema( name: string, selection: Record | undefined, schema: string | undefined, ): MaterializedViewBuilder | ManualMaterializedViewBuilder { if (selection) { return new ManualMaterializedViewBuilder(name, selection, schema); } return new MaterializedViewBuilder(name, schema); } // TODO not implemented // eslint-disable-next-line @typescript-eslint/no-unused-vars function gelView(name: TName): ViewBuilder; function gelView>( name: TName, columns: TColumns, ): ManualViewBuilder; function gelView(name: string, columns?: Record): ViewBuilder | ManualViewBuilder { return gelViewWithSchema(name, columns, undefined); } // TODO not implemented // eslint-disable-next-line @typescript-eslint/no-unused-vars function gelMaterializedView(name: TName): MaterializedViewBuilder; function gelMaterializedView>( name: TName, columns: TColumns, ): ManualMaterializedViewBuilder; function gelMaterializedView( name: string, columns?: Record, ): MaterializedViewBuilder | ManualMaterializedViewBuilder { return gelMaterializedViewWithSchema(name, columns, undefined); } // eslint-disable-next-line @typescript-eslint/no-unused-vars function isGelView(obj: unknown): obj is GelView { return is(obj, GelView); } // eslint-disable-next-line @typescript-eslint/no-unused-vars function isGelMaterializedView(obj: unknown): obj is GelMaterializedView { return is(obj, GelMaterializedView); } ================================================ FILE: drizzle-orm/src/index.ts ================================================ export * from './alias.ts'; export * from './column-builder.ts'; export * from './column.ts'; export * from './entity.ts'; export * from './errors.ts'; export * from './logger.ts'; export * from './operations.ts'; export * from './query-promise.ts'; export * from './relations.ts'; export * from './sql/index.ts'; export * from './subquery.ts'; export * from './table.ts'; export * from './utils.ts'; export * from './view-common.ts'; ================================================ FILE: drizzle-orm/src/knex/README.md ================================================ # Drizzle ORM + Knex.js This is a toolchain for integrating Drizzle with [Knex.js](https://knexjs.org/). ## Using Knex as a query builder You can define you DB schema using Drizzle and use Knex as the query builder. With this approach you benefit from schema definition and automated migrations provided by Drizzle, and you can use Knex to build your queries. This might be helpful if you have an existing Knex.js project and you want to add Drizzle features to it. This integration is based on [official Knex TypeScript guide](https://knexjs.org/guide/#typescript). ```ts import Knex from 'knex'; import { pgTable, serial, text } from 'drizzle-orm/pg-core'; // This line is important - it allows you to use the Knexify type import 'drizzle-orm/knex'; const test = pgTable('test', { id: serial('id').primaryKey(), name: text('name').notNull(), }); declare module 'knex/types/tables' { interface Tables { test: Knexify; } } const db = Knex({}); const result/*: { id: number, name: string }[] */ = db('test').select(); ``` ## Wrapping Knex connection with Drizzle Coming soon! ================================================ FILE: drizzle-orm/src/knex/index.ts ================================================ import type { Knex as KnexType } from 'knex'; import type { InferInsertModel, InferSelectModel, Table } from '~/table.ts'; declare module 'knex/types/tables.ts' { export type Knexify = & KnexType.CompositeTableType< InferSelectModel, InferInsertModel > & {}; } ================================================ FILE: drizzle-orm/src/kysely/README.md ================================================ # Drizzle ORM + Kysely This is a toolchain for integrating Drizzle with [Kysely](https://kysely-org.github.io/kysely/). ## Using Kysely as a query builder You can define you DB schema using Drizzle and use Kysely as the query builder. With this approach you benefit from schema definition and automated migrations provided by Drizzle, and you can use Kysely to build your queries. This might be helpful if you have an existing Kysely project and you want to add Drizzle features to it. ```ts import { Kysely, PostgresDialect } from 'kysely'; import { Pool } from 'pg'; import { Kyselify } from 'drizzle-orm/kysely'; import { pgTable, serial, text } from 'drizzle-orm/pg-core'; const test = pgTable('test', { id: serial('id').primaryKey(), name: text('name').notNull(), }); interface Database { test: Kyselify; } const db = new Kysely({ dialect: new PostgresDialect({ pool: new Pool(), }), }); const result/*: { id: number, name: string }[] */ = db.selectFrom('test').selectAll().execute(); ``` ================================================ FILE: drizzle-orm/src/kysely/index.ts ================================================ import type { ColumnType } from 'kysely'; import type { InferInsertModel, InferSelectModel, MapColumnName, Table } from '~/table.ts'; import type { Simplify } from '~/utils.ts'; export type Kyselify = Simplify< { [Key in keyof T['_']['columns'] & string as MapColumnName]: ColumnType< // select InferSelectModel[MapColumnName], // insert MapColumnName extends keyof InferInsertModel< T, { dbColumnNames: true } > ? InferInsertModel[MapColumnName] : never, // update MapColumnName extends keyof InferInsertModel< T, { dbColumnNames: true } > ? InferInsertModel[MapColumnName] : never >; } >; ================================================ FILE: drizzle-orm/src/libsql/driver-core.ts ================================================ import type { Client, ResultSet } from '@libsql/client'; import type { BatchItem, BatchResponse } from '~/batch.ts'; import { entityKind } from '~/entity.ts'; import { DefaultLogger } from '~/logger.ts'; import { createTableRelationsHelpers, extractTablesRelationalConfig, type ExtractTablesWithRelations, type RelationalSchemaConfig, type TablesRelationalConfig, } from '~/relations.ts'; import { BaseSQLiteDatabase } from '~/sqlite-core/db.ts'; import { SQLiteAsyncDialect } from '~/sqlite-core/dialect.ts'; import type { DrizzleConfig } from '~/utils.ts'; import { LibSQLSession } from './session.ts'; export class LibSQLDatabase< TSchema extends Record = Record, > extends BaseSQLiteDatabase<'async', ResultSet, TSchema> { static override readonly [entityKind]: string = 'LibSQLDatabase'; /** @internal */ declare readonly session: LibSQLSession>; async batch, T extends Readonly<[U, ...U[]]>>( batch: T, ): Promise> { return this.session.batch(batch) as Promise>; } } /** @internal */ export function construct< TSchema extends Record = Record, >(client: Client, config: DrizzleConfig = {}): LibSQLDatabase & { $client: Client; } { const dialect = new SQLiteAsyncDialect({ casing: config.casing }); let logger; if (config.logger === true) { logger = new DefaultLogger(); } else if (config.logger !== false) { logger = config.logger; } let schema: RelationalSchemaConfig | undefined; if (config.schema) { const tablesConfig = extractTablesRelationalConfig( config.schema, createTableRelationsHelpers, ); schema = { fullSchema: config.schema, schema: tablesConfig.tables, tableNamesMap: tablesConfig.tableNamesMap, }; } const session = new LibSQLSession(client, dialect, schema, { logger, cache: config.cache }, undefined); const db = new LibSQLDatabase('async', dialect, session, schema) as LibSQLDatabase; ( db).$client = client; ( db).$cache = config.cache; if (( db).$cache) { ( db).$cache['invalidate'] = config.cache?.onMutate; } return db as any; } ================================================ FILE: drizzle-orm/src/libsql/driver.ts ================================================ import { type Client, type Config, createClient } from '@libsql/client'; import { type DrizzleConfig, isConfig } from '~/utils.ts'; import { construct as construct, type LibSQLDatabase } from './driver-core.ts'; export { LibSQLDatabase } from './driver-core.ts'; export function drizzle< TSchema extends Record = Record, TClient extends Client = Client, >( ...params: [ TClient | string, ] | [ TClient | string, DrizzleConfig, ] | [ ( & DrizzleConfig & ({ connection: string | Config; } | { client: TClient; }) ), ] ): LibSQLDatabase & { $client: TClient; } { if (typeof params[0] === 'string') { const instance = createClient({ url: params[0], }); return construct(instance, params[1]) as any; } if (isConfig(params[0])) { const { connection, client, ...drizzleConfig } = params[0] as & { connection?: Config; client?: TClient } & DrizzleConfig; if (client) return construct(client, drizzleConfig) as any; const instance = typeof connection === 'string' ? createClient({ url: connection }) : createClient(connection!); return construct(instance, drizzleConfig) as any; } return construct(params[0] as TClient, params[1] as DrizzleConfig | undefined) as any; } export namespace drizzle { export function mock = Record>( config?: DrizzleConfig, ): LibSQLDatabase & { $client: '$client is not available on drizzle.mock()'; } { return construct({} as any, config) as any; } } ================================================ FILE: drizzle-orm/src/libsql/http/index.ts ================================================ import { type Client, type Config, createClient } from '@libsql/client/http'; import { type DrizzleConfig, isConfig } from '~/utils.ts'; import { construct, type LibSQLDatabase } from '../driver-core.ts'; export function drizzle< TSchema extends Record = Record, TClient extends Client = Client, >( ...params: [ TClient | string, ] | [ TClient | string, DrizzleConfig, ] | [ ( & DrizzleConfig & ({ connection: string | Config; } | { client: TClient; }) ), ] ): LibSQLDatabase & { $client: TClient; } { if (typeof params[0] === 'string') { const instance = createClient({ url: params[0], }); return construct(instance, params[1]) as any; } if (isConfig(params[0])) { const { connection, client, ...drizzleConfig } = params[0] as & { connection?: Config; client?: TClient } & DrizzleConfig; if (client) return construct(client, drizzleConfig) as any; const instance = typeof connection === 'string' ? createClient({ url: connection }) : createClient(connection!); return construct(instance, drizzleConfig) as any; } return construct(params[0] as TClient, params[1] as DrizzleConfig | undefined) as any; } export namespace drizzle { export function mock = Record>( config?: DrizzleConfig, ): LibSQLDatabase & { $client: '$client is not available on drizzle.mock()'; } { return construct({} as any, config) as any; } } ================================================ FILE: drizzle-orm/src/libsql/index.ts ================================================ export * from './driver.ts'; export * from './session.ts'; ================================================ FILE: drizzle-orm/src/libsql/migrator.ts ================================================ import type { MigrationConfig } from '~/migrator.ts'; import { readMigrationFiles } from '~/migrator.ts'; import { sql } from '~/sql/sql.ts'; import type { LibSQLDatabase } from './driver.ts'; export async function migrate>( db: LibSQLDatabase, config: MigrationConfig, ) { const migrations = readMigrationFiles(config); const migrationsTable = config.migrationsTable ?? '__drizzle_migrations'; const migrationTableCreate = sql` CREATE TABLE IF NOT EXISTS ${sql.identifier(migrationsTable)} ( id SERIAL PRIMARY KEY, hash text NOT NULL, created_at numeric ) `; await db.session.run(migrationTableCreate); const dbMigrations = await db.values<[number, string, string]>( sql`SELECT id, hash, created_at FROM ${sql.identifier(migrationsTable)} ORDER BY created_at DESC LIMIT 1`, ); const lastDbMigration = dbMigrations[0] ?? undefined; const statementToBatch = []; for (const migration of migrations) { if (!lastDbMigration || Number(lastDbMigration[2])! < migration.folderMillis) { for (const stmt of migration.sql) { statementToBatch.push(db.run(sql.raw(stmt))); } statementToBatch.push( db.run( sql`INSERT INTO ${ sql.identifier(migrationsTable) } ("hash", "created_at") VALUES(${migration.hash}, ${migration.folderMillis})`, ), ); } } await db.session.migrate(statementToBatch); } ================================================ FILE: drizzle-orm/src/libsql/node/index.ts ================================================ import { type Client, type Config, createClient } from '@libsql/client/node'; import { type DrizzleConfig, isConfig } from '~/utils.ts'; import { construct, type LibSQLDatabase } from '../driver-core.ts'; export function drizzle< TSchema extends Record = Record, TClient extends Client = Client, >( ...params: [ TClient | string, ] | [ TClient | string, DrizzleConfig, ] | [ ( & DrizzleConfig & ({ connection: string | Config; } | { client: TClient; }) ), ] ): LibSQLDatabase & { $client: TClient; } { if (typeof params[0] === 'string') { const instance = createClient({ url: params[0], }); return construct(instance, params[1]) as any; } if (isConfig(params[0])) { const { connection, client, ...drizzleConfig } = params[0] as & { connection?: Config; client?: TClient } & DrizzleConfig; if (client) return construct(client, drizzleConfig) as any; const instance = typeof connection === 'string' ? createClient({ url: connection }) : createClient(connection!); return construct(instance, drizzleConfig) as any; } return construct(params[0] as TClient, params[1] as DrizzleConfig | undefined) as any; } export namespace drizzle { export function mock = Record>( config?: DrizzleConfig, ): LibSQLDatabase & { $client: '$client is not available on drizzle.mock()'; } { return construct({} as any, config) as any; } } ================================================ FILE: drizzle-orm/src/libsql/session.ts ================================================ import type { Client, InArgs, InStatement, ResultSet, Transaction } from '@libsql/client'; import type { BatchItem as BatchItem } from '~/batch.ts'; import { type Cache, NoopCache } from '~/cache/core/index.ts'; import type { WithCacheConfig } from '~/cache/core/types.ts'; import { entityKind } from '~/entity.ts'; import type { Logger } from '~/logger.ts'; import { NoopLogger } from '~/logger.ts'; import type { RelationalSchemaConfig, TablesRelationalConfig } from '~/relations.ts'; import type { PreparedQuery } from '~/session.ts'; import { fillPlaceholders, type Query, sql } from '~/sql/sql.ts'; import type { SQLiteAsyncDialect } from '~/sqlite-core/dialect.ts'; import { SQLiteTransaction } from '~/sqlite-core/index.ts'; import type { SelectedFieldsOrdered } from '~/sqlite-core/query-builders/select.types.ts'; import type { PreparedQueryConfig as PreparedQueryConfigBase, SQLiteExecuteMethod, SQLiteTransactionConfig, } from '~/sqlite-core/session.ts'; import { SQLitePreparedQuery, SQLiteSession } from '~/sqlite-core/session.ts'; import { mapResultRow } from '~/utils.ts'; export interface LibSQLSessionOptions { logger?: Logger; cache?: Cache; } type PreparedQueryConfig = Omit; export class LibSQLSession< TFullSchema extends Record, TSchema extends TablesRelationalConfig, > extends SQLiteSession<'async', ResultSet, TFullSchema, TSchema> { static override readonly [entityKind]: string = 'LibSQLSession'; private logger: Logger; private cache: Cache; constructor( private client: Client, dialect: SQLiteAsyncDialect, private schema: RelationalSchemaConfig | undefined, private options: LibSQLSessionOptions, private tx: Transaction | undefined, ) { super(dialect); this.logger = options.logger ?? new NoopLogger(); this.cache = options.cache ?? new NoopCache(); } prepareQuery>( query: Query, fields: SelectedFieldsOrdered | undefined, executeMethod: SQLiteExecuteMethod, isResponseInArrayMode: boolean, customResultMapper?: (rows: unknown[][]) => unknown, queryMetadata?: { type: 'select' | 'update' | 'delete' | 'insert'; tables: string[]; }, cacheConfig?: WithCacheConfig, ): LibSQLPreparedQuery { return new LibSQLPreparedQuery( this.client, query, this.logger, this.cache, queryMetadata, cacheConfig, fields, this.tx, executeMethod, isResponseInArrayMode, customResultMapper, ); } async batch[] | readonly BatchItem<'sqlite'>[]>(queries: T) { const preparedQueries: PreparedQuery[] = []; const builtQueries: InStatement[] = []; for (const query of queries) { const preparedQuery = query._prepare(); const builtQuery = preparedQuery.getQuery(); preparedQueries.push(preparedQuery); builtQueries.push({ sql: builtQuery.sql, args: builtQuery.params as InArgs }); } const batchResults = await this.client.batch(builtQueries); return batchResults.map((result, i) => preparedQueries[i]!.mapResult(result, true)); } async migrate[] | readonly BatchItem<'sqlite'>[]>(queries: T) { const preparedQueries: PreparedQuery[] = []; const builtQueries: InStatement[] = []; for (const query of queries) { const preparedQuery = query._prepare(); const builtQuery = preparedQuery.getQuery(); preparedQueries.push(preparedQuery); builtQueries.push({ sql: builtQuery.sql, args: builtQuery.params as InArgs }); } const batchResults = await this.client.migrate(builtQueries); return batchResults.map((result, i) => preparedQueries[i]!.mapResult(result, true)); } override async transaction( transaction: (db: LibSQLTransaction) => T | Promise, _config?: SQLiteTransactionConfig, ): Promise { // TODO: support transaction behavior const libsqlTx = await this.client.transaction(); const session = new LibSQLSession( this.client, this.dialect, this.schema, this.options, libsqlTx, ); const tx = new LibSQLTransaction('async', this.dialect, session, this.schema); try { const result = await transaction(tx); await libsqlTx.commit(); return result; } catch (err) { await libsqlTx.rollback(); throw err; } } override extractRawAllValueFromBatchResult(result: unknown): unknown { return (result as ResultSet).rows; } override extractRawGetValueFromBatchResult(result: unknown): unknown { return (result as ResultSet).rows[0]; } override extractRawValuesValueFromBatchResult(result: unknown): unknown { return (result as ResultSet).rows; } } export class LibSQLTransaction< TFullSchema extends Record, TSchema extends TablesRelationalConfig, > extends SQLiteTransaction<'async', ResultSet, TFullSchema, TSchema> { static override readonly [entityKind]: string = 'LibSQLTransaction'; override async transaction(transaction: (tx: LibSQLTransaction) => Promise): Promise { const savepointName = `sp${this.nestedIndex}`; const tx = new LibSQLTransaction('async', this.dialect, this.session, this.schema, this.nestedIndex + 1); await this.session.run(sql.raw(`savepoint ${savepointName}`)); try { const result = await transaction(tx); await this.session.run(sql.raw(`release savepoint ${savepointName}`)); return result; } catch (err) { await this.session.run(sql.raw(`rollback to savepoint ${savepointName}`)); throw err; } } } export class LibSQLPreparedQuery extends SQLitePreparedQuery< { type: 'async'; run: ResultSet; all: T['all']; get: T['get']; values: T['values']; execute: T['execute'] } > { static override readonly [entityKind]: string = 'LibSQLPreparedQuery'; constructor( private client: Client, query: Query, private logger: Logger, cache: Cache, queryMetadata: { type: 'select' | 'update' | 'delete' | 'insert'; tables: string[]; } | undefined, cacheConfig: WithCacheConfig | undefined, /** @internal */ public fields: SelectedFieldsOrdered | undefined, private tx: Transaction | undefined, executeMethod: SQLiteExecuteMethod, private _isResponseInArrayMode: boolean, /** @internal */ public customResultMapper?: ( rows: unknown[][], mapColumnValue?: (value: unknown) => unknown, ) => unknown, ) { super('async', executeMethod, query, cache, queryMetadata, cacheConfig); this.customResultMapper = customResultMapper; this.fields = fields; } async run(placeholderValues?: Record): Promise { const params = fillPlaceholders(this.query.params, placeholderValues ?? {}); this.logger.logQuery(this.query.sql, params); return await this.queryWithCache(this.query.sql, params, async () => { const stmt: InStatement = { sql: this.query.sql, args: params as InArgs }; return this.tx ? this.tx.execute(stmt) : this.client.execute(stmt); }); } async all(placeholderValues?: Record): Promise { const { fields, logger, query, tx, client, customResultMapper } = this; if (!fields && !customResultMapper) { const params = fillPlaceholders(query.params, placeholderValues ?? {}); logger.logQuery(query.sql, params); return await this.queryWithCache(query.sql, params, async () => { const stmt: InStatement = { sql: query.sql, args: params as InArgs }; return (tx ? tx.execute(stmt) : client.execute(stmt)).then(({ rows }) => this.mapAllResult(rows)); }); } const rows = await this.values(placeholderValues) as unknown[][]; return this.mapAllResult(rows); } override mapAllResult(rows: unknown, isFromBatch?: boolean): unknown { if (isFromBatch) { rows = (rows as ResultSet).rows; } if (!this.fields && !this.customResultMapper) { return (rows as unknown[]).map((row) => normalizeRow(row)); } if (this.customResultMapper) { return this.customResultMapper(rows as unknown[][], normalizeFieldValue) as T['all']; } return (rows as unknown[]).map((row) => { return mapResultRow( this.fields!, Array.prototype.slice.call(row).map((v) => normalizeFieldValue(v)), this.joinsNotNullableMap, ); }); } async get(placeholderValues?: Record): Promise { const { fields, logger, query, tx, client, customResultMapper } = this; if (!fields && !customResultMapper) { const params = fillPlaceholders(query.params, placeholderValues ?? {}); logger.logQuery(query.sql, params); return await this.queryWithCache(query.sql, params, async () => { const stmt: InStatement = { sql: query.sql, args: params as InArgs }; return (tx ? tx.execute(stmt) : client.execute(stmt)).then(({ rows }) => this.mapGetResult(rows)); }); } const rows = await this.values(placeholderValues) as unknown[][]; return this.mapGetResult(rows); } override mapGetResult(rows: unknown, isFromBatch?: boolean): unknown { if (isFromBatch) { rows = (rows as ResultSet).rows; } const row = (rows as unknown[])[0]; if (!this.fields && !this.customResultMapper) { return normalizeRow(row); } if (!row) { return undefined; } if (this.customResultMapper) { return this.customResultMapper(rows as unknown[][], normalizeFieldValue) as T['get']; } return mapResultRow( this.fields!, Array.prototype.slice.call(row).map((v) => normalizeFieldValue(v)), this.joinsNotNullableMap, ); } async values(placeholderValues?: Record): Promise { const params = fillPlaceholders(this.query.params, placeholderValues ?? {}); this.logger.logQuery(this.query.sql, params); return await this.queryWithCache(this.query.sql, params, async () => { const stmt: InStatement = { sql: this.query.sql, args: params as InArgs }; return (this.tx ? this.tx.execute(stmt) : this.client.execute(stmt)).then(({ rows }) => rows) as Promise< T['values'] >; }); } /** @internal */ isResponseInArrayMode(): boolean { return this._isResponseInArrayMode; } } function normalizeRow(obj: any) { // The libSQL node-sqlite3 compatibility wrapper returns rows // that can be accessed both as objects and arrays. Let's // turn them into objects what's what other SQLite drivers // do. return Object.keys(obj).reduce((acc: Record, key) => { if (Object.prototype.propertyIsEnumerable.call(obj, key)) { acc[key] = obj[key]; } return acc; }, {}); } function normalizeFieldValue(value: unknown) { if (typeof ArrayBuffer !== 'undefined' && value instanceof ArrayBuffer) { // eslint-disable-line no-instanceof/no-instanceof if (typeof Buffer !== 'undefined') { if (!(value instanceof Buffer)) { // eslint-disable-line no-instanceof/no-instanceof return Buffer.from(value); } return value; } if (typeof TextDecoder !== 'undefined') { return new TextDecoder().decode(value); } throw new Error('TextDecoder is not available. Please provide either Buffer or TextDecoder polyfill.'); } return value; } ================================================ FILE: drizzle-orm/src/libsql/sqlite3/index.ts ================================================ import { type Client, type Config, createClient } from '@libsql/client/sqlite3'; import { type DrizzleConfig, isConfig } from '~/utils.ts'; import { construct, type LibSQLDatabase } from '../driver-core.ts'; export function drizzle< TSchema extends Record = Record, TClient extends Client = Client, >( ...params: [ TClient | string, ] | [ TClient | string, DrizzleConfig, ] | [ ( & DrizzleConfig & ({ connection: string | Config; } | { client: TClient; }) ), ] ): LibSQLDatabase & { $client: TClient; } { if (typeof params[0] === 'string') { const instance = createClient({ url: params[0], }); return construct(instance, params[1]) as any; } if (isConfig(params[0])) { const { connection, client, ...drizzleConfig } = params[0] as & { connection?: Config; client?: TClient } & DrizzleConfig; if (client) return construct(client, drizzleConfig) as any; const instance = typeof connection === 'string' ? createClient({ url: connection }) : createClient(connection!); return construct(instance, drizzleConfig) as any; } return construct(params[0] as TClient, params[1] as DrizzleConfig | undefined) as any; } export namespace drizzle { export function mock = Record>( config?: DrizzleConfig, ): LibSQLDatabase & { $client: '$client is not available on drizzle.mock()'; } { return construct({} as any, config) as any; } } ================================================ FILE: drizzle-orm/src/libsql/wasm/index.ts ================================================ import { type Client, type Config, createClient } from '@libsql/client-wasm'; import { type DrizzleConfig, isConfig } from '~/utils.ts'; import { construct, type LibSQLDatabase } from '../driver-core.ts'; export function drizzle< TSchema extends Record = Record, TClient extends Client = Client, >( ...params: [ TClient | string, ] | [ TClient | string, DrizzleConfig, ] | [ ( & DrizzleConfig & ({ connection: string | Config; } | { client: TClient; }) ), ] ): LibSQLDatabase & { $client: TClient; } { if (typeof params[0] === 'string') { const instance = createClient({ url: params[0], }); return construct(instance, params[1]) as any; } if (isConfig(params[0])) { const { connection, client, ...drizzleConfig } = params[0] as & { connection?: Config; client?: TClient } & DrizzleConfig; if (client) return construct(client, drizzleConfig) as any; const instance = typeof connection === 'string' ? createClient({ url: connection }) : createClient(connection!); return construct(instance, drizzleConfig) as any; } return construct(params[0] as TClient, params[1] as DrizzleConfig | undefined) as any; } export namespace drizzle { export function mock = Record>( config?: DrizzleConfig, ): LibSQLDatabase & { $client: '$client is not available on drizzle.mock()'; } { return construct({} as any, config) as any; } } ================================================ FILE: drizzle-orm/src/libsql/web/index.ts ================================================ import { type Client, type Config, createClient } from '@libsql/client/web'; import { type DrizzleConfig, isConfig } from '~/utils.ts'; import { construct, type LibSQLDatabase } from '../driver-core.ts'; export function drizzle< TSchema extends Record = Record, TClient extends Client = Client, >( ...params: [ TClient | string, ] | [ TClient | string, DrizzleConfig, ] | [ ( & DrizzleConfig & ({ connection: string | Config; } | { client: TClient; }) ), ] ): LibSQLDatabase & { $client: TClient; } { if (typeof params[0] === 'string') { const instance = createClient({ url: params[0], }); return construct(instance, params[1]) as any; } if (isConfig(params[0])) { const { connection, client, ...drizzleConfig } = params[0] as & { connection?: Config; client?: TClient } & DrizzleConfig; if (client) return construct(client, drizzleConfig) as any; const instance = typeof connection === 'string' ? createClient({ url: connection }) : createClient(connection!); return construct(instance, drizzleConfig) as any; } return construct(params[0] as TClient, params[1] as DrizzleConfig | undefined) as any; } export namespace drizzle { export function mock = Record>( config?: DrizzleConfig, ): LibSQLDatabase & { $client: '$client is not available on drizzle.mock()'; } { return construct({} as any, config) as any; } } ================================================ FILE: drizzle-orm/src/libsql/ws/index.ts ================================================ import { type Client, type Config, createClient } from '@libsql/client/ws'; import { type DrizzleConfig, isConfig } from '~/utils.ts'; import { construct, type LibSQLDatabase } from '../driver-core.ts'; export function drizzle< TSchema extends Record = Record, TClient extends Client = Client, >( ...params: [ TClient | string, ] | [ TClient | string, DrizzleConfig, ] | [ ( & DrizzleConfig & ({ connection: string | Config; } | { client: TClient; }) ), ] ): LibSQLDatabase & { $client: TClient; } { if (typeof params[0] === 'string') { const instance = createClient({ url: params[0], }); return construct(instance, params[1]) as any; } if (isConfig(params[0])) { const { connection, client, ...drizzleConfig } = params[0] as & { connection?: Config; client?: TClient } & DrizzleConfig; if (client) return construct(client, drizzleConfig) as any; const instance = typeof connection === 'string' ? createClient({ url: connection }) : createClient(connection!); return construct(instance, drizzleConfig) as any; } return construct(params[0] as TClient, params[1] as DrizzleConfig | undefined) as any; } export namespace drizzle { export function mock = Record>( config?: DrizzleConfig, ): LibSQLDatabase & { $client: '$client is not available on drizzle.mock()'; } { return construct({} as any, config) as any; } } ================================================ FILE: drizzle-orm/src/logger.ts ================================================ import { entityKind } from '~/entity.ts'; export interface Logger { logQuery(query: string, params: unknown[]): void; } export interface LogWriter { write(message: string): void; } export class ConsoleLogWriter implements LogWriter { static readonly [entityKind]: string = 'ConsoleLogWriter'; write(message: string) { console.log(message); } } export class DefaultLogger implements Logger { static readonly [entityKind]: string = 'DefaultLogger'; readonly writer: LogWriter; constructor(config?: { writer: LogWriter }) { this.writer = config?.writer ?? new ConsoleLogWriter(); } logQuery(query: string, params: unknown[]): void { const stringifiedParams = params.map((p) => { try { return JSON.stringify(p); } catch { return String(p); } }); const paramsStr = stringifiedParams.length ? ` -- params: [${stringifiedParams.join(', ')}]` : ''; this.writer.write(`Query: ${query}${paramsStr}`); } } export class NoopLogger implements Logger { static readonly [entityKind]: string = 'NoopLogger'; logQuery(): void { // noop } } ================================================ FILE: drizzle-orm/src/migrator.ts ================================================ import crypto from 'node:crypto'; import fs from 'node:fs'; export interface KitConfig { out: string; schema: string; } export interface MigrationConfig { migrationsFolder: string; migrationsTable?: string; migrationsSchema?: string; } export interface MigrationMeta { sql: string[]; folderMillis: number; hash: string; bps: boolean; } export function readMigrationFiles(config: MigrationConfig): MigrationMeta[] { const migrationFolderTo = config.migrationsFolder; const migrationQueries: MigrationMeta[] = []; const journalPath = `${migrationFolderTo}/meta/_journal.json`; if (!fs.existsSync(journalPath)) { throw new Error(`Can't find meta/_journal.json file`); } const journalAsString = fs.readFileSync(`${migrationFolderTo}/meta/_journal.json`).toString(); const journal = JSON.parse(journalAsString) as { entries: { idx: number; when: number; tag: string; breakpoints: boolean }[]; }; for (const journalEntry of journal.entries) { const migrationPath = `${migrationFolderTo}/${journalEntry.tag}.sql`; try { const query = fs.readFileSync(`${migrationFolderTo}/${journalEntry.tag}.sql`).toString(); const result = query.split('--> statement-breakpoint').map((it) => { return it; }); migrationQueries.push({ sql: result, bps: journalEntry.breakpoints, folderMillis: journalEntry.when, hash: crypto.createHash('sha256').update(query).digest('hex'), }); } catch { throw new Error(`No file ${migrationPath} found in ${migrationFolderTo} folder`); } } return migrationQueries; } ================================================ FILE: drizzle-orm/src/mysql-core/alias.ts ================================================ import { TableAliasProxyHandler } from '~/alias.ts'; import type { BuildAliasTable } from './query-builders/select.types.ts'; import type { MySqlTable } from './table.ts'; import type { MySqlViewBase } from './view-base.ts'; export function alias( table: TTable, alias: TAlias, ): BuildAliasTable { return new Proxy(table, new TableAliasProxyHandler(alias, false)) as any; } ================================================ FILE: drizzle-orm/src/mysql-core/checks.ts ================================================ import { entityKind } from '~/entity.ts'; import type { SQL } from '~/sql/sql.ts'; import type { MySqlTable } from './table.ts'; export class CheckBuilder { static readonly [entityKind]: string = 'MySqlCheckBuilder'; protected brand!: 'MySqlConstraintBuilder'; constructor(public name: string, public value: SQL) {} /** @internal */ build(table: MySqlTable): Check { return new Check(table, this); } } export class Check { static readonly [entityKind]: string = 'MySqlCheck'; readonly name: string; readonly value: SQL; constructor(public table: MySqlTable, builder: CheckBuilder) { this.name = builder.name; this.value = builder.value; } } export function check(name: string, value: SQL): CheckBuilder { return new CheckBuilder(name, value); } ================================================ FILE: drizzle-orm/src/mysql-core/columns/all.ts ================================================ import { bigint } from './bigint.ts'; import { binary } from './binary.ts'; import { boolean } from './boolean.ts'; import { char } from './char.ts'; import { customType } from './custom.ts'; import { date } from './date.ts'; import { datetime } from './datetime.ts'; import { decimal } from './decimal.ts'; import { double } from './double.ts'; import { mysqlEnum } from './enum.ts'; import { float } from './float.ts'; import { int } from './int.ts'; import { json } from './json.ts'; import { mediumint } from './mediumint.ts'; import { real } from './real.ts'; import { serial } from './serial.ts'; import { smallint } from './smallint.ts'; import { longtext, mediumtext, text, tinytext } from './text.ts'; import { time } from './time.ts'; import { timestamp } from './timestamp.ts'; import { tinyint } from './tinyint.ts'; import { varbinary } from './varbinary.ts'; import { varchar } from './varchar.ts'; import { year } from './year.ts'; export function getMySqlColumnBuilders() { return { bigint, binary, boolean, char, customType, date, datetime, decimal, double, mysqlEnum, float, int, json, mediumint, real, serial, smallint, text, time, timestamp, tinyint, varbinary, varchar, year, longtext, mediumtext, tinytext, }; } export type MySqlColumnBuilders = ReturnType; ================================================ FILE: drizzle-orm/src/mysql-core/columns/bigint.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnyMySqlTable } from '~/mysql-core/table.ts'; import { getColumnNameAndConfig } from '~/utils.ts'; import { MySqlColumnBuilderWithAutoIncrement, MySqlColumnWithAutoIncrement } from './common.ts'; export type MySqlBigInt53BuilderInitial = MySqlBigInt53Builder<{ name: TName; dataType: 'number'; columnType: 'MySqlBigInt53'; data: number; driverParam: number | string; enumValues: undefined; }>; export class MySqlBigInt53Builder> extends MySqlColumnBuilderWithAutoIncrement { static override readonly [entityKind]: string = 'MySqlBigInt53Builder'; constructor(name: T['name'], unsigned: boolean = false) { super(name, 'number', 'MySqlBigInt53'); this.config.unsigned = unsigned; } /** @internal */ override build( table: AnyMySqlTable<{ name: TTableName }>, ): MySqlBigInt53> { return new MySqlBigInt53>( table, this.config as ColumnBuilderRuntimeConfig, ); } } export class MySqlBigInt53> extends MySqlColumnWithAutoIncrement { static override readonly [entityKind]: string = 'MySqlBigInt53'; getSQLType(): string { return `bigint${this.config.unsigned ? ' unsigned' : ''}`; } override mapFromDriverValue(value: number | string): number { if (typeof value === 'number') { return value; } return Number(value); } } export type MySqlBigInt64BuilderInitial = MySqlBigInt64Builder<{ name: TName; dataType: 'bigint'; columnType: 'MySqlBigInt64'; data: bigint; driverParam: string; enumValues: undefined; }>; export class MySqlBigInt64Builder> extends MySqlColumnBuilderWithAutoIncrement { static override readonly [entityKind]: string = 'MySqlBigInt64Builder'; constructor(name: T['name'], unsigned: boolean = false) { super(name, 'bigint', 'MySqlBigInt64'); this.config.unsigned = unsigned; } /** @internal */ override build( table: AnyMySqlTable<{ name: TTableName }>, ): MySqlBigInt64> { return new MySqlBigInt64>( table, this.config as ColumnBuilderRuntimeConfig, ); } } export class MySqlBigInt64> extends MySqlColumnWithAutoIncrement { static override readonly [entityKind]: string = 'MySqlBigInt64'; getSQLType(): string { return `bigint${this.config.unsigned ? ' unsigned' : ''}`; } // eslint-disable-next-line unicorn/prefer-native-coercion-functions override mapFromDriverValue(value: string): bigint { return BigInt(value); } } export interface MySqlBigIntConfig { mode: T; unsigned?: boolean; } export function bigint( config: MySqlBigIntConfig, ): TMode extends 'number' ? MySqlBigInt53BuilderInitial<''> : MySqlBigInt64BuilderInitial<''>; export function bigint( name: TName, config: MySqlBigIntConfig, ): TMode extends 'number' ? MySqlBigInt53BuilderInitial : MySqlBigInt64BuilderInitial; export function bigint(a?: string | MySqlBigIntConfig, b?: MySqlBigIntConfig) { const { name, config } = getColumnNameAndConfig(a, b); if (config.mode === 'number') { return new MySqlBigInt53Builder(name, config.unsigned); } return new MySqlBigInt64Builder(name, config.unsigned); } ================================================ FILE: drizzle-orm/src/mysql-core/columns/binary.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnyMySqlTable } from '~/mysql-core/table.ts'; import { getColumnNameAndConfig } from '~/utils.ts'; import { MySqlColumn, MySqlColumnBuilder } from './common.ts'; export type MySqlBinaryBuilderInitial = MySqlBinaryBuilder<{ name: TName; dataType: 'string'; columnType: 'MySqlBinary'; data: string; driverParam: string; enumValues: undefined; }>; export class MySqlBinaryBuilder> extends MySqlColumnBuilder< T, MySqlBinaryConfig > { static override readonly [entityKind]: string = 'MySqlBinaryBuilder'; constructor(name: T['name'], length: number | undefined) { super(name, 'string', 'MySqlBinary'); this.config.length = length; } /** @internal */ override build( table: AnyMySqlTable<{ name: TTableName }>, ): MySqlBinary> { return new MySqlBinary>(table, this.config as ColumnBuilderRuntimeConfig); } } export class MySqlBinary> extends MySqlColumn< T, MySqlBinaryConfig > { static override readonly [entityKind]: string = 'MySqlBinary'; length: number | undefined = this.config.length; override mapFromDriverValue(value: string | Buffer | Uint8Array): string { if (typeof value === 'string') return value; if (Buffer.isBuffer(value)) return value.toString(); const str: string[] = []; for (const v of value) { str.push(v === 49 ? '1' : '0'); } return str.join(''); } getSQLType(): string { return this.length === undefined ? `binary` : `binary(${this.length})`; } } export interface MySqlBinaryConfig { length?: number; } export function binary(): MySqlBinaryBuilderInitial<''>; export function binary( config?: MySqlBinaryConfig, ): MySqlBinaryBuilderInitial<''>; export function binary( name: TName, config?: MySqlBinaryConfig, ): MySqlBinaryBuilderInitial; export function binary(a?: string | MySqlBinaryConfig, b: MySqlBinaryConfig = {}) { const { name, config } = getColumnNameAndConfig(a, b); return new MySqlBinaryBuilder(name, config.length); } ================================================ FILE: drizzle-orm/src/mysql-core/columns/boolean.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnyMySqlTable } from '~/mysql-core/table.ts'; import { MySqlColumn, MySqlColumnBuilder } from './common.ts'; export type MySqlBooleanBuilderInitial = MySqlBooleanBuilder<{ name: TName; dataType: 'boolean'; columnType: 'MySqlBoolean'; data: boolean; driverParam: number | boolean; enumValues: undefined; }>; export class MySqlBooleanBuilder> extends MySqlColumnBuilder { static override readonly [entityKind]: string = 'MySqlBooleanBuilder'; constructor(name: T['name']) { super(name, 'boolean', 'MySqlBoolean'); } /** @internal */ override build( table: AnyMySqlTable<{ name: TTableName }>, ): MySqlBoolean> { return new MySqlBoolean>( table, this.config as ColumnBuilderRuntimeConfig, ); } } export class MySqlBoolean> extends MySqlColumn { static override readonly [entityKind]: string = 'MySqlBoolean'; getSQLType(): string { return 'boolean'; } override mapFromDriverValue(value: number | boolean): boolean { if (typeof value === 'boolean') { return value; } return value === 1; } } export function boolean(): MySqlBooleanBuilderInitial<''>; export function boolean(name: TName): MySqlBooleanBuilderInitial; export function boolean(name?: string) { return new MySqlBooleanBuilder(name ?? ''); } ================================================ FILE: drizzle-orm/src/mysql-core/columns/char.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnyMySqlTable } from '~/mysql-core/table.ts'; import { getColumnNameAndConfig, type Writable } from '~/utils.ts'; import { MySqlColumn, MySqlColumnBuilder } from './common.ts'; export type MySqlCharBuilderInitial< TName extends string, TEnum extends [string, ...string[]], TLength extends number | undefined, > = MySqlCharBuilder<{ name: TName; dataType: 'string'; columnType: 'MySqlChar'; data: TEnum[number]; driverParam: number | string; enumValues: TEnum; length: TLength; }>; export class MySqlCharBuilder< T extends ColumnBuilderBaseConfig<'string', 'MySqlChar'> & { length?: number | undefined }, > extends MySqlColumnBuilder< T, MySqlCharConfig, { length: T['length'] } > { static override readonly [entityKind]: string = 'MySqlCharBuilder'; constructor(name: T['name'], config: MySqlCharConfig) { super(name, 'string', 'MySqlChar'); this.config.length = config.length; this.config.enum = config.enum; } /** @internal */ override build( table: AnyMySqlTable<{ name: TTableName }>, ): MySqlChar & { length: T['length']; enumValues: T['enumValues'] }> { return new MySqlChar & { length: T['length']; enumValues: T['enumValues'] }>( table, this.config as ColumnBuilderRuntimeConfig, ); } } export class MySqlChar & { length?: number | undefined }> extends MySqlColumn, { length: T['length'] }> { static override readonly [entityKind]: string = 'MySqlChar'; readonly length: T['length'] = this.config.length; override readonly enumValues = this.config.enum; getSQLType(): string { return this.length === undefined ? `char` : `char(${this.length})`; } } export interface MySqlCharConfig< TEnum extends readonly string[] | string[] | undefined = readonly string[] | string[] | undefined, TLength extends number | undefined = number | undefined, > { enum?: TEnum; length?: TLength; } export function char(): MySqlCharBuilderInitial<'', [string, ...string[]], undefined>; export function char, L extends number | undefined>( config?: MySqlCharConfig, L>, ): MySqlCharBuilderInitial<'', Writable, L>; export function char< TName extends string, U extends string, T extends Readonly<[U, ...U[]]>, L extends number | undefined, >( name: TName, config?: MySqlCharConfig, L>, ): MySqlCharBuilderInitial, L>; export function char(a?: string | MySqlCharConfig, b: MySqlCharConfig = {}): any { const { name, config } = getColumnNameAndConfig(a, b); return new MySqlCharBuilder(name, config as any); } ================================================ FILE: drizzle-orm/src/mysql-core/columns/common.ts ================================================ import { ColumnBuilder } from '~/column-builder.ts'; import type { ColumnBuilderBase, ColumnBuilderBaseConfig, ColumnBuilderExtraConfig, ColumnBuilderRuntimeConfig, ColumnDataType, HasDefault, HasGenerated, IsAutoincrement, MakeColumnConfig, } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { Column } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { ForeignKey, UpdateDeleteAction } from '~/mysql-core/foreign-keys.ts'; import { ForeignKeyBuilder } from '~/mysql-core/foreign-keys.ts'; import type { AnyMySqlTable, MySqlTable } from '~/mysql-core/table.ts'; import type { SQL } from '~/sql/sql.ts'; import type { Update } from '~/utils.ts'; import { uniqueKeyName } from '../unique-constraint.ts'; export interface ReferenceConfig { ref: () => MySqlColumn; actions: { onUpdate?: UpdateDeleteAction; onDelete?: UpdateDeleteAction; }; } export interface MySqlColumnBuilderBase< T extends ColumnBuilderBaseConfig = ColumnBuilderBaseConfig, TTypeConfig extends object = object, > extends ColumnBuilderBase {} export interface MySqlGeneratedColumnConfig { mode?: 'virtual' | 'stored'; } export abstract class MySqlColumnBuilder< T extends ColumnBuilderBaseConfig = ColumnBuilderBaseConfig & { data: any; }, TRuntimeConfig extends object = object, TTypeConfig extends object = object, TExtraConfig extends ColumnBuilderExtraConfig = ColumnBuilderExtraConfig, > extends ColumnBuilder implements MySqlColumnBuilderBase { static override readonly [entityKind]: string = 'MySqlColumnBuilder'; private foreignKeyConfigs: ReferenceConfig[] = []; references(ref: ReferenceConfig['ref'], actions: ReferenceConfig['actions'] = {}): this { this.foreignKeyConfigs.push({ ref, actions }); return this; } unique(name?: string): this { this.config.isUnique = true; this.config.uniqueName = name; return this; } generatedAlwaysAs(as: SQL | T['data'] | (() => SQL), config?: MySqlGeneratedColumnConfig): HasGenerated { this.config.generated = { as, type: 'always', mode: config?.mode ?? 'virtual', }; return this as any; } /** @internal */ buildForeignKeys(column: MySqlColumn, table: MySqlTable): ForeignKey[] { return this.foreignKeyConfigs.map(({ ref, actions }) => { return ((ref, actions) => { const builder = new ForeignKeyBuilder(() => { const foreignColumn = ref(); return { columns: [column], foreignColumns: [foreignColumn] }; }); if (actions.onUpdate) { builder.onUpdate(actions.onUpdate); } if (actions.onDelete) { builder.onDelete(actions.onDelete); } return builder.build(table); })(ref, actions); }); } /** @internal */ abstract build( table: AnyMySqlTable<{ name: TTableName }>, ): MySqlColumn>; } // To understand how to use `MySqlColumn` and `AnyMySqlColumn`, see `Column` and `AnyColumn` documentation. export abstract class MySqlColumn< T extends ColumnBaseConfig = ColumnBaseConfig, TRuntimeConfig extends object = {}, TTypeConfig extends object = {}, > extends Column { static override readonly [entityKind]: string = 'MySqlColumn'; constructor( override readonly table: MySqlTable, config: ColumnBuilderRuntimeConfig, ) { if (!config.uniqueName) { config.uniqueName = uniqueKeyName(table, [config.name]); } super(table, config); } } export type AnyMySqlColumn> = {}> = MySqlColumn< Required, TPartial>> >; export interface MySqlColumnWithAutoIncrementConfig { autoIncrement: boolean; } export abstract class MySqlColumnBuilderWithAutoIncrement< T extends ColumnBuilderBaseConfig = ColumnBuilderBaseConfig, TRuntimeConfig extends object = object, TExtraConfig extends ColumnBuilderExtraConfig = ColumnBuilderExtraConfig, > extends MySqlColumnBuilder { static override readonly [entityKind]: string = 'MySqlColumnBuilderWithAutoIncrement'; constructor(name: NonNullable, dataType: T['dataType'], columnType: T['columnType']) { super(name, dataType, columnType); this.config.autoIncrement = false; } autoincrement(): IsAutoincrement> { this.config.autoIncrement = true; this.config.hasDefault = true; return this as IsAutoincrement>; } } export abstract class MySqlColumnWithAutoIncrement< T extends ColumnBaseConfig = ColumnBaseConfig, TRuntimeConfig extends object = object, > extends MySqlColumn { static override readonly [entityKind]: string = 'MySqlColumnWithAutoIncrement'; readonly autoIncrement: boolean = this.config.autoIncrement; } ================================================ FILE: drizzle-orm/src/mysql-core/columns/custom.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnyMySqlTable } from '~/mysql-core/table.ts'; import type { SQL } from '~/sql/sql.ts'; import { type Equal, getColumnNameAndConfig } from '~/utils.ts'; import { MySqlColumn, MySqlColumnBuilder } from './common.ts'; export type ConvertCustomConfig> = & { name: TName; dataType: 'custom'; columnType: 'MySqlCustomColumn'; data: T['data']; driverParam: T['driverData']; enumValues: undefined; } & (T['notNull'] extends true ? { notNull: true } : {}) & (T['default'] extends true ? { hasDefault: true } : {}); export interface MySqlCustomColumnInnerConfig { customTypeValues: CustomTypeValues; } export class MySqlCustomColumnBuilder> extends MySqlColumnBuilder< T, { fieldConfig: CustomTypeValues['config']; customTypeParams: CustomTypeParams; }, { mysqlColumnBuilderBrand: 'MySqlCustomColumnBuilderBrand'; } > { static override readonly [entityKind]: string = 'MySqlCustomColumnBuilder'; constructor( name: T['name'], fieldConfig: CustomTypeValues['config'], customTypeParams: CustomTypeParams, ) { super(name, 'custom', 'MySqlCustomColumn'); this.config.fieldConfig = fieldConfig; this.config.customTypeParams = customTypeParams; } /** @internal */ build( table: AnyMySqlTable<{ name: TTableName }>, ): MySqlCustomColumn> { return new MySqlCustomColumn>( table, this.config as ColumnBuilderRuntimeConfig, ); } } export class MySqlCustomColumn> extends MySqlColumn { static override readonly [entityKind]: string = 'MySqlCustomColumn'; private sqlName: string; private mapTo?: (value: T['data']) => T['driverParam']; private mapFrom?: (value: T['driverParam']) => T['data']; constructor( table: AnyMySqlTable<{ name: T['tableName'] }>, config: MySqlCustomColumnBuilder['config'], ) { super(table, config); this.sqlName = config.customTypeParams.dataType(config.fieldConfig); this.mapTo = config.customTypeParams.toDriver; this.mapFrom = config.customTypeParams.fromDriver; } getSQLType(): string { return this.sqlName; } override mapFromDriverValue(value: T['driverParam']): T['data'] { return typeof this.mapFrom === 'function' ? this.mapFrom(value) : value as T['data']; } override mapToDriverValue(value: T['data']): T['driverParam'] { return typeof this.mapTo === 'function' ? this.mapTo(value) : value as T['data']; } } export type CustomTypeValues = { /** * Required type for custom column, that will infer proper type model * * Examples: * * If you want your column to be `string` type after selecting/or on inserting - use `data: string`. Like `text`, `varchar` * * If you want your column to be `number` type after selecting/or on inserting - use `data: number`. Like `integer` */ data: unknown; /** * Type helper, that represents what type database driver is accepting for specific database data type */ driverData?: unknown; /** * What config type should be used for {@link CustomTypeParams} `dataType` generation */ config?: Record; /** * Whether the config argument should be required or not * @default false */ configRequired?: boolean; /** * If your custom data type should be notNull by default you can use `notNull: true` * * @example * const customSerial = customType<{ data: number, notNull: true, default: true }>({ * dataType() { * return 'serial'; * }, * }); */ notNull?: boolean; /** * If your custom data type has default you can use `default: true` * * @example * const customSerial = customType<{ data: number, notNull: true, default: true }>({ * dataType() { * return 'serial'; * }, * }); */ default?: boolean; }; export interface CustomTypeParams { /** * Database data type string representation, that is used for migrations * @example * ``` * `jsonb`, `text` * ``` * * If database data type needs additional params you can use them from `config` param * @example * ``` * `varchar(256)`, `numeric(2,3)` * ``` * * To make `config` be of specific type please use config generic in {@link CustomTypeValues} * * @example * Usage example * ``` * dataType() { * return 'boolean'; * }, * ``` * Or * ``` * dataType(config) { * return typeof config.length !== 'undefined' ? `varchar(${config.length})` : `varchar`; * } * ``` */ dataType: (config: T['config'] | (Equal extends true ? never : undefined)) => string; /** * Optional mapping function, between user input and driver * @example * For example, when using jsonb we need to map JS/TS object to string before writing to database * ``` * toDriver(value: TData): string { * return JSON.stringify(value); * } * ``` */ toDriver?: (value: T['data']) => T['driverData'] | SQL; /** * Optional mapping function, that is responsible for data mapping from database to JS/TS code * @example * For example, when using timestamp we need to map string Date representation to JS Date * ``` * fromDriver(value: string): Date { * return new Date(value); * }, * ``` */ fromDriver?: (value: T['driverData']) => T['data']; } /** * Custom mysql database data type generator */ export function customType( customTypeParams: CustomTypeParams, ): Equal extends true ? { & T['config']>( fieldConfig: TConfig, ): MySqlCustomColumnBuilder>; ( dbName: TName, fieldConfig: T['config'], ): MySqlCustomColumnBuilder>; } : { (): MySqlCustomColumnBuilder>; & T['config']>( fieldConfig?: TConfig, ): MySqlCustomColumnBuilder>; ( dbName: TName, fieldConfig?: T['config'], ): MySqlCustomColumnBuilder>; } { return ( a?: TName | T['config'], b?: T['config'], ): MySqlCustomColumnBuilder> => { const { name, config } = getColumnNameAndConfig(a, b); return new MySqlCustomColumnBuilder(name as ConvertCustomConfig['name'], config, customTypeParams); }; } ================================================ FILE: drizzle-orm/src/mysql-core/columns/date.common.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderExtraConfig, ColumnDataType, HasDefault, } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import { sql } from '~/sql/sql.ts'; import { MySqlColumn, MySqlColumnBuilder } from './common.ts'; export interface MySqlDateColumnBaseConfig { hasOnUpdateNow: boolean; } export abstract class MySqlDateColumnBaseBuilder< T extends ColumnBuilderBaseConfig, TRuntimeConfig extends object = object, TExtraConfig extends ColumnBuilderExtraConfig = ColumnBuilderExtraConfig, > extends MySqlColumnBuilder { static override readonly [entityKind]: string = 'MySqlDateColumnBuilder'; defaultNow() { return this.default(sql`(now())`); } // "on update now" also adds an implicit default value to the column - https://dev.mysql.com/doc/refman/8.0/en/timestamp-initialization.html onUpdateNow(): HasDefault { this.config.hasOnUpdateNow = true; this.config.hasDefault = true; return this as HasDefault; } } export abstract class MySqlDateBaseColumn< T extends ColumnBaseConfig, TRuntimeConfig extends object = object, > extends MySqlColumn { static override readonly [entityKind]: string = 'MySqlDateColumn'; readonly hasOnUpdateNow: boolean = this.config.hasOnUpdateNow; } ================================================ FILE: drizzle-orm/src/mysql-core/columns/date.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnyMySqlTable } from '~/mysql-core/table.ts'; import { type Equal, getColumnNameAndConfig } from '~/utils.ts'; import { MySqlColumn, MySqlColumnBuilder } from './common.ts'; export type MySqlDateBuilderInitial = MySqlDateBuilder<{ name: TName; dataType: 'date'; columnType: 'MySqlDate'; data: Date; driverParam: string | number; enumValues: undefined; }>; export class MySqlDateBuilder> extends MySqlColumnBuilder { static override readonly [entityKind]: string = 'MySqlDateBuilder'; constructor(name: T['name']) { super(name, 'date', 'MySqlDate'); } /** @internal */ override build( table: AnyMySqlTable<{ name: TTableName }>, ): MySqlDate> { return new MySqlDate>(table, this.config as ColumnBuilderRuntimeConfig); } } export class MySqlDate> extends MySqlColumn { static override readonly [entityKind]: string = 'MySqlDate'; constructor( table: AnyMySqlTable<{ name: T['tableName'] }>, config: MySqlDateBuilder['config'], ) { super(table, config); } getSQLType(): string { return `date`; } override mapFromDriverValue(value: string): Date { return new Date(value); } } export type MySqlDateStringBuilderInitial = MySqlDateStringBuilder<{ name: TName; dataType: 'string'; columnType: 'MySqlDateString'; data: string; driverParam: string | number; enumValues: undefined; }>; export class MySqlDateStringBuilder> extends MySqlColumnBuilder { static override readonly [entityKind]: string = 'MySqlDateStringBuilder'; constructor(name: T['name']) { super(name, 'string', 'MySqlDateString'); } /** @internal */ override build( table: AnyMySqlTable<{ name: TTableName }>, ): MySqlDateString> { return new MySqlDateString>( table, this.config as ColumnBuilderRuntimeConfig, ); } } export class MySqlDateString> extends MySqlColumn { static override readonly [entityKind]: string = 'MySqlDateString'; constructor( table: AnyMySqlTable<{ name: T['tableName'] }>, config: MySqlDateStringBuilder['config'], ) { super(table, config); } getSQLType(): string { return `date`; } } export interface MySqlDateConfig { mode?: TMode; } export function date(): MySqlDateBuilderInitial<''>; export function date( config?: MySqlDateConfig, ): Equal extends true ? MySqlDateStringBuilderInitial<''> : MySqlDateBuilderInitial<''>; export function date( name: TName, config?: MySqlDateConfig, ): Equal extends true ? MySqlDateStringBuilderInitial : MySqlDateBuilderInitial; export function date(a?: string | MySqlDateConfig, b?: MySqlDateConfig) { const { name, config } = getColumnNameAndConfig(a, b); if (config?.mode === 'string') { return new MySqlDateStringBuilder(name); } return new MySqlDateBuilder(name); } ================================================ FILE: drizzle-orm/src/mysql-core/columns/datetime.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnyMySqlTable } from '~/mysql-core/table.ts'; import { type Equal, getColumnNameAndConfig } from '~/utils.ts'; import { MySqlColumn, MySqlColumnBuilder } from './common.ts'; export type MySqlDateTimeBuilderInitial = MySqlDateTimeBuilder<{ name: TName; dataType: 'date'; columnType: 'MySqlDateTime'; data: Date; driverParam: string | number; enumValues: undefined; }>; export class MySqlDateTimeBuilder> extends MySqlColumnBuilder { static override readonly [entityKind]: string = 'MySqlDateTimeBuilder'; constructor(name: T['name'], config: MySqlDatetimeConfig | undefined) { super(name, 'date', 'MySqlDateTime'); this.config.fsp = config?.fsp; } /** @internal */ override build( table: AnyMySqlTable<{ name: TTableName }>, ): MySqlDateTime> { return new MySqlDateTime>( table, this.config as ColumnBuilderRuntimeConfig, ); } } export class MySqlDateTime> extends MySqlColumn { static override readonly [entityKind]: string = 'MySqlDateTime'; readonly fsp: number | undefined; constructor( table: AnyMySqlTable<{ name: T['tableName'] }>, config: MySqlDateTimeBuilder['config'], ) { super(table, config); this.fsp = config.fsp; } getSQLType(): string { const precision = this.fsp === undefined ? '' : `(${this.fsp})`; return `datetime${precision}`; } override mapToDriverValue(value: Date): unknown { return value.toISOString().replace('T', ' ').replace('Z', ''); } override mapFromDriverValue(value: string): Date { return new Date(value.replace(' ', 'T') + 'Z'); } } export type MySqlDateTimeStringBuilderInitial = MySqlDateTimeStringBuilder<{ name: TName; dataType: 'string'; columnType: 'MySqlDateTimeString'; data: string; driverParam: string | number; enumValues: undefined; }>; export class MySqlDateTimeStringBuilder> extends MySqlColumnBuilder { static override readonly [entityKind]: string = 'MySqlDateTimeStringBuilder'; constructor(name: T['name'], config: MySqlDatetimeConfig | undefined) { super(name, 'string', 'MySqlDateTimeString'); this.config.fsp = config?.fsp; } /** @internal */ override build( table: AnyMySqlTable<{ name: TTableName }>, ): MySqlDateTimeString> { return new MySqlDateTimeString>( table, this.config as ColumnBuilderRuntimeConfig, ); } } export class MySqlDateTimeString> extends MySqlColumn { static override readonly [entityKind]: string = 'MySqlDateTimeString'; readonly fsp: number | undefined; constructor( table: AnyMySqlTable<{ name: T['tableName'] }>, config: MySqlDateTimeStringBuilder['config'], ) { super(table, config); this.fsp = config.fsp; } getSQLType(): string { const precision = this.fsp === undefined ? '' : `(${this.fsp})`; return `datetime${precision}`; } } export type DatetimeFsp = 0 | 1 | 2 | 3 | 4 | 5 | 6; export interface MySqlDatetimeConfig { mode?: TMode; fsp?: DatetimeFsp; } export function datetime(): MySqlDateTimeBuilderInitial<''>; export function datetime( config?: MySqlDatetimeConfig, ): Equal extends true ? MySqlDateTimeStringBuilderInitial<''> : MySqlDateTimeBuilderInitial<''>; export function datetime( name: TName, config?: MySqlDatetimeConfig, ): Equal extends true ? MySqlDateTimeStringBuilderInitial : MySqlDateTimeBuilderInitial; export function datetime(a?: string | MySqlDatetimeConfig, b?: MySqlDatetimeConfig) { const { name, config } = getColumnNameAndConfig(a, b); if (config?.mode === 'string') { return new MySqlDateTimeStringBuilder(name, config); } return new MySqlDateTimeBuilder(name, config); } ================================================ FILE: drizzle-orm/src/mysql-core/columns/decimal.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnyMySqlTable } from '~/mysql-core/table.ts'; import { type Equal, getColumnNameAndConfig } from '~/utils.ts'; import { MySqlColumnBuilderWithAutoIncrement, MySqlColumnWithAutoIncrement } from './common.ts'; export type MySqlDecimalBuilderInitial = MySqlDecimalBuilder<{ name: TName; dataType: 'string'; columnType: 'MySqlDecimal'; data: string; driverParam: string; enumValues: undefined; }>; export class MySqlDecimalBuilder< T extends ColumnBuilderBaseConfig<'string', 'MySqlDecimal'>, > extends MySqlColumnBuilderWithAutoIncrement { static override readonly [entityKind]: string = 'MySqlDecimalBuilder'; constructor(name: T['name'], config: MySqlDecimalConfig | undefined) { super(name, 'string', 'MySqlDecimal'); this.config.precision = config?.precision; this.config.scale = config?.scale; this.config.unsigned = config?.unsigned; } /** @internal */ override build( table: AnyMySqlTable<{ name: TTableName }>, ): MySqlDecimal> { return new MySqlDecimal>( table, this.config as ColumnBuilderRuntimeConfig, ); } } export class MySqlDecimal> extends MySqlColumnWithAutoIncrement { static override readonly [entityKind]: string = 'MySqlDecimal'; readonly precision: number | undefined = this.config.precision; readonly scale: number | undefined = this.config.scale; readonly unsigned: boolean | undefined = this.config.unsigned; override mapFromDriverValue(value: unknown): string { if (typeof value === 'string') return value; return String(value); } getSQLType(): string { let type = ''; if (this.precision !== undefined && this.scale !== undefined) { type += `decimal(${this.precision},${this.scale})`; } else if (this.precision === undefined) { type += 'decimal'; } else { type += `decimal(${this.precision})`; } type = type === 'decimal(10,0)' || type === 'decimal(10)' ? 'decimal' : type; return this.unsigned ? `${type} unsigned` : type; } } export type MySqlDecimalNumberBuilderInitial = MySqlDecimalNumberBuilder<{ name: TName; dataType: 'number'; columnType: 'MySqlDecimalNumber'; data: number; driverParam: string; enumValues: undefined; }>; export class MySqlDecimalNumberBuilder< T extends ColumnBuilderBaseConfig<'number', 'MySqlDecimalNumber'>, > extends MySqlColumnBuilderWithAutoIncrement { static override readonly [entityKind]: string = 'MySqlDecimalNumberBuilder'; constructor(name: T['name'], config: MySqlDecimalConfig | undefined) { super(name, 'number', 'MySqlDecimalNumber'); this.config.precision = config?.precision; this.config.scale = config?.scale; this.config.unsigned = config?.unsigned; } /** @internal */ override build( table: AnyMySqlTable<{ name: TTableName }>, ): MySqlDecimalNumber> { return new MySqlDecimalNumber>( table, this.config as ColumnBuilderRuntimeConfig, ); } } export class MySqlDecimalNumber> extends MySqlColumnWithAutoIncrement { static override readonly [entityKind]: string = 'MySqlDecimalNumber'; readonly precision: number | undefined = this.config.precision; readonly scale: number | undefined = this.config.scale; readonly unsigned: boolean | undefined = this.config.unsigned; override mapFromDriverValue(value: unknown): number { if (typeof value === 'number') return value; return Number(value); } override mapToDriverValue = String; getSQLType(): string { let type = ''; if (this.precision !== undefined && this.scale !== undefined) { type += `decimal(${this.precision},${this.scale})`; } else if (this.precision === undefined) { type += 'decimal'; } else { type += `decimal(${this.precision})`; } type = type === 'decimal(10,0)' || type === 'decimal(10)' ? 'decimal' : type; return this.unsigned ? `${type} unsigned` : type; } } export type MySqlDecimalBigIntBuilderInitial = MySqlDecimalBigIntBuilder<{ name: TName; dataType: 'bigint'; columnType: 'MySqlDecimalBigInt'; data: bigint; driverParam: string; enumValues: undefined; }>; export class MySqlDecimalBigIntBuilder< T extends ColumnBuilderBaseConfig<'bigint', 'MySqlDecimalBigInt'>, > extends MySqlColumnBuilderWithAutoIncrement { static override readonly [entityKind]: string = 'MySqlDecimalBigIntBuilder'; constructor(name: T['name'], config: MySqlDecimalConfig | undefined) { super(name, 'bigint', 'MySqlDecimalBigInt'); this.config.precision = config?.precision; this.config.scale = config?.scale; this.config.unsigned = config?.unsigned; } /** @internal */ override build( table: AnyMySqlTable<{ name: TTableName }>, ): MySqlDecimalBigInt> { return new MySqlDecimalBigInt>( table, this.config as ColumnBuilderRuntimeConfig, ); } } export class MySqlDecimalBigInt> extends MySqlColumnWithAutoIncrement { static override readonly [entityKind]: string = 'MySqlDecimalBigInt'; readonly precision: number | undefined = this.config.precision; readonly scale: number | undefined = this.config.scale; readonly unsigned: boolean | undefined = this.config.unsigned; override mapFromDriverValue = BigInt; override mapToDriverValue = String; getSQLType(): string { let type = ''; if (this.precision !== undefined && this.scale !== undefined) { type += `decimal(${this.precision},${this.scale})`; } else if (this.precision === undefined) { type += 'decimal'; } else { type += `decimal(${this.precision})`; } type = type === 'decimal(10,0)' || type === 'decimal(10)' ? 'decimal' : type; return this.unsigned ? `${type} unsigned` : type; } } export interface MySqlDecimalConfig { precision?: number; scale?: number; unsigned?: boolean; mode?: T; } export function decimal(): MySqlDecimalBuilderInitial<''>; export function decimal( config: MySqlDecimalConfig, ): Equal extends true ? MySqlDecimalNumberBuilderInitial<''> : Equal extends true ? MySqlDecimalBigIntBuilderInitial<''> : MySqlDecimalBuilderInitial<''>; export function decimal( name: TName, config?: MySqlDecimalConfig, ): Equal extends true ? MySqlDecimalNumberBuilderInitial : Equal extends true ? MySqlDecimalBigIntBuilderInitial : MySqlDecimalBuilderInitial; export function decimal(a?: string | MySqlDecimalConfig, b: MySqlDecimalConfig = {}) { const { name, config } = getColumnNameAndConfig(a, b); const mode = config?.mode; return mode === 'number' ? new MySqlDecimalNumberBuilder(name, config) : mode === 'bigint' ? new MySqlDecimalBigIntBuilder(name, config) : new MySqlDecimalBuilder(name, config); } ================================================ FILE: drizzle-orm/src/mysql-core/columns/double.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnyMySqlTable } from '~/mysql-core/table.ts'; import { getColumnNameAndConfig } from '~/utils.ts'; import { MySqlColumnBuilderWithAutoIncrement, MySqlColumnWithAutoIncrement } from './common.ts'; export type MySqlDoubleBuilderInitial = MySqlDoubleBuilder<{ name: TName; dataType: 'number'; columnType: 'MySqlDouble'; data: number; driverParam: number | string; enumValues: undefined; }>; export class MySqlDoubleBuilder> extends MySqlColumnBuilderWithAutoIncrement { static override readonly [entityKind]: string = 'MySqlDoubleBuilder'; constructor(name: T['name'], config: MySqlDoubleConfig | undefined) { super(name, 'number', 'MySqlDouble'); this.config.precision = config?.precision; this.config.scale = config?.scale; this.config.unsigned = config?.unsigned; } /** @internal */ override build( table: AnyMySqlTable<{ name: TTableName }>, ): MySqlDouble> { return new MySqlDouble>(table, this.config as ColumnBuilderRuntimeConfig); } } export class MySqlDouble> extends MySqlColumnWithAutoIncrement { static override readonly [entityKind]: string = 'MySqlDouble'; readonly precision: number | undefined = this.config.precision; readonly scale: number | undefined = this.config.scale; readonly unsigned: boolean | undefined = this.config.unsigned; getSQLType(): string { let type = ''; if (this.precision !== undefined && this.scale !== undefined) { type += `double(${this.precision},${this.scale})`; } else if (this.precision === undefined) { type += 'double'; } else { type += `double(${this.precision})`; } return this.unsigned ? `${type} unsigned` : type; } } export interface MySqlDoubleConfig { precision?: number; scale?: number; unsigned?: boolean; } export function double(): MySqlDoubleBuilderInitial<''>; export function double( config?: MySqlDoubleConfig, ): MySqlDoubleBuilderInitial<''>; export function double( name: TName, config?: MySqlDoubleConfig, ): MySqlDoubleBuilderInitial; export function double(a?: string | MySqlDoubleConfig, b?: MySqlDoubleConfig) { const { name, config } = getColumnNameAndConfig(a, b); return new MySqlDoubleBuilder(name, config); } ================================================ FILE: drizzle-orm/src/mysql-core/columns/enum.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnyMySqlTable } from '~/mysql-core/table.ts'; import type { NonArray, Writable } from '~/utils.ts'; import { MySqlColumn, MySqlColumnBuilder } from './common.ts'; // enum as string union export type MySqlEnumColumnBuilderInitial = MySqlEnumColumnBuilder<{ name: TName; dataType: 'string'; columnType: 'MySqlEnumColumn'; data: TEnum[number]; driverParam: string; enumValues: TEnum; }>; export class MySqlEnumColumnBuilder> extends MySqlColumnBuilder { static override readonly [entityKind]: string = 'MySqlEnumColumnBuilder'; constructor(name: T['name'], values: T['enumValues']) { super(name, 'string', 'MySqlEnumColumn'); this.config.enumValues = values; } /** @internal */ override build( table: AnyMySqlTable<{ name: TTableName }>, ): MySqlEnumColumn & { enumValues: T['enumValues'] }> { return new MySqlEnumColumn & { enumValues: T['enumValues'] }>( table, this.config as ColumnBuilderRuntimeConfig, ); } } export class MySqlEnumColumn> extends MySqlColumn { static override readonly [entityKind]: string = 'MySqlEnumColumn'; override readonly enumValues = this.config.enumValues; getSQLType(): string { return `enum(${this.enumValues!.map((value) => `'${value}'`).join(',')})`; } } // enum as ts enum export type MySqlEnumObjectColumnBuilderInitial = MySqlEnumObjectColumnBuilder<{ name: TName; dataType: 'string'; columnType: 'MySqlEnumObjectColumn'; data: TEnum[keyof TEnum]; driverParam: string; enumValues: string[]; }>; export class MySqlEnumObjectColumnBuilder> extends MySqlColumnBuilder { static override readonly [entityKind]: string = 'MySqlEnumObjectColumnBuilder'; constructor(name: T['name'], values: T['enumValues']) { super(name, 'string', 'MySqlEnumObjectColumn'); this.config.enumValues = values; } /** @internal */ override build( table: AnyMySqlTable<{ name: TTableName }>, ): MySqlEnumObjectColumn & { enumValues: T['enumValues'] }> { return new MySqlEnumObjectColumn & { enumValues: T['enumValues'] }>( table, this.config as ColumnBuilderRuntimeConfig, ); } } export class MySqlEnumObjectColumn> extends MySqlColumn { static override readonly [entityKind]: string = 'MySqlEnumObjectColumn'; override readonly enumValues = this.config.enumValues; getSQLType(): string { return `enum(${this.enumValues!.map((value) => `'${value}'`).join(',')})`; } } export function mysqlEnum>( values: T | Writable, ): MySqlEnumColumnBuilderInitial<'', Writable>; export function mysqlEnum>( name: TName, values: T | Writable, ): MySqlEnumColumnBuilderInitial>; export function mysqlEnum>( enumObj: NonArray, ): MySqlEnumObjectColumnBuilderInitial<'', E>; export function mysqlEnum>( name: TName, values: NonArray, ): MySqlEnumObjectColumnBuilderInitial; export function mysqlEnum( a?: string | readonly [string, ...string[]] | [string, ...string[]] | Record, b?: readonly [string, ...string[]] | [string, ...string[]] | Record, ): any { // if name + array or just array - it means we have string union passed if (typeof a === 'string' && Array.isArray(b) || Array.isArray(a)) { const name = typeof a === 'string' && a.length > 0 ? a : ''; const values = (typeof a === 'string' ? b : a) ?? []; if (values.length === 0) { throw new Error(`You have an empty array for "${name}" enum values`); } return new MySqlEnumColumnBuilder(name, values as any); } if (typeof a === 'string' && typeof b === 'object' || typeof a === 'object') { const name = typeof a === 'object' ? '' : a; const values = typeof a === 'object' ? Object.values(a) : typeof b === 'object' ? Object.values(b) : []; if (values.length === 0) { throw new Error(`You have an empty array for "${name}" enum values`); } return new MySqlEnumObjectColumnBuilder(name, values as any); } } ================================================ FILE: drizzle-orm/src/mysql-core/columns/float.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnyMySqlTable } from '~/mysql-core/table.ts'; import { getColumnNameAndConfig } from '~/utils.ts'; import { MySqlColumnBuilderWithAutoIncrement, MySqlColumnWithAutoIncrement } from './common.ts'; export type MySqlFloatBuilderInitial = MySqlFloatBuilder<{ name: TName; dataType: 'number'; columnType: 'MySqlFloat'; data: number; driverParam: number | string; enumValues: undefined; }>; export class MySqlFloatBuilder> extends MySqlColumnBuilderWithAutoIncrement { static override readonly [entityKind]: string = 'MySqlFloatBuilder'; constructor(name: T['name'], config: MySqlFloatConfig | undefined) { super(name, 'number', 'MySqlFloat'); this.config.precision = config?.precision; this.config.scale = config?.scale; this.config.unsigned = config?.unsigned; } /** @internal */ override build( table: AnyMySqlTable<{ name: TTableName }>, ): MySqlFloat> { return new MySqlFloat>(table, this.config as ColumnBuilderRuntimeConfig); } } export class MySqlFloat> extends MySqlColumnWithAutoIncrement { static override readonly [entityKind]: string = 'MySqlFloat'; readonly precision: number | undefined = this.config.precision; readonly scale: number | undefined = this.config.scale; readonly unsigned: boolean | undefined = this.config.unsigned; getSQLType(): string { let type = ''; if (this.precision !== undefined && this.scale !== undefined) { type += `float(${this.precision},${this.scale})`; } else if (this.precision === undefined) { type += 'float'; } else { type += `float(${this.precision})`; } return this.unsigned ? `${type} unsigned` : type; } } export interface MySqlFloatConfig { precision?: number; scale?: number; unsigned?: boolean; } export function float(): MySqlFloatBuilderInitial<''>; export function float( config?: MySqlFloatConfig, ): MySqlFloatBuilderInitial<''>; export function float( name: TName, config?: MySqlFloatConfig, ): MySqlFloatBuilderInitial; export function float(a?: string | MySqlFloatConfig, b?: MySqlFloatConfig) { const { name, config } = getColumnNameAndConfig(a, b); return new MySqlFloatBuilder(name, config); } ================================================ FILE: drizzle-orm/src/mysql-core/columns/index.ts ================================================ export * from './bigint.ts'; export * from './binary.ts'; export * from './boolean.ts'; export * from './char.ts'; export * from './common.ts'; export * from './custom.ts'; export * from './date.ts'; export * from './datetime.ts'; export * from './decimal.ts'; export * from './double.ts'; export * from './enum.ts'; export * from './float.ts'; export * from './int.ts'; export * from './json.ts'; export * from './mediumint.ts'; export * from './real.ts'; export * from './serial.ts'; export * from './smallint.ts'; export * from './text.ts'; export * from './time.ts'; export * from './timestamp.ts'; export * from './tinyint.ts'; export * from './varbinary.ts'; export * from './varchar.ts'; export * from './year.ts'; ================================================ FILE: drizzle-orm/src/mysql-core/columns/int.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnyMySqlTable } from '~/mysql-core/table.ts'; import { getColumnNameAndConfig } from '~/utils.ts'; import { MySqlColumnBuilderWithAutoIncrement, MySqlColumnWithAutoIncrement } from './common.ts'; export type MySqlIntBuilderInitial = MySqlIntBuilder<{ name: TName; dataType: 'number'; columnType: 'MySqlInt'; data: number; driverParam: number | string; enumValues: undefined; }>; export class MySqlIntBuilder> extends MySqlColumnBuilderWithAutoIncrement { static override readonly [entityKind]: string = 'MySqlIntBuilder'; constructor(name: T['name'], config?: MySqlIntConfig) { super(name, 'number', 'MySqlInt'); this.config.unsigned = config ? config.unsigned : false; } /** @internal */ override build( table: AnyMySqlTable<{ name: TTableName }>, ): MySqlInt> { return new MySqlInt>(table, this.config as ColumnBuilderRuntimeConfig); } } export class MySqlInt> extends MySqlColumnWithAutoIncrement { static override readonly [entityKind]: string = 'MySqlInt'; getSQLType(): string { return `int${this.config.unsigned ? ' unsigned' : ''}`; } override mapFromDriverValue(value: number | string): number { if (typeof value === 'string') { return Number(value); } return value; } } export interface MySqlIntConfig { unsigned?: boolean; } export function int(): MySqlIntBuilderInitial<''>; export function int( config?: MySqlIntConfig, ): MySqlIntBuilderInitial<''>; export function int( name: TName, config?: MySqlIntConfig, ): MySqlIntBuilderInitial; export function int(a?: string | MySqlIntConfig, b?: MySqlIntConfig) { const { name, config } = getColumnNameAndConfig(a, b); return new MySqlIntBuilder(name, config); } ================================================ FILE: drizzle-orm/src/mysql-core/columns/json.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnyMySqlTable } from '~/mysql-core/table.ts'; import { MySqlColumn, MySqlColumnBuilder } from './common.ts'; export type MySqlJsonBuilderInitial = MySqlJsonBuilder<{ name: TName; dataType: 'json'; columnType: 'MySqlJson'; data: unknown; driverParam: string; enumValues: undefined; }>; export class MySqlJsonBuilder> extends MySqlColumnBuilder { static override readonly [entityKind]: string = 'MySqlJsonBuilder'; constructor(name: T['name']) { super(name, 'json', 'MySqlJson'); } /** @internal */ override build( table: AnyMySqlTable<{ name: TTableName }>, ): MySqlJson> { return new MySqlJson>(table, this.config as ColumnBuilderRuntimeConfig); } } export class MySqlJson> extends MySqlColumn { static override readonly [entityKind]: string = 'MySqlJson'; getSQLType(): string { return 'json'; } override mapToDriverValue(value: T['data']): string { return JSON.stringify(value); } } export function json(): MySqlJsonBuilderInitial<''>; export function json(name: TName): MySqlJsonBuilderInitial; export function json(name?: string) { return new MySqlJsonBuilder(name ?? ''); } ================================================ FILE: drizzle-orm/src/mysql-core/columns/mediumint.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnyMySqlTable } from '~/mysql-core/table.ts'; import { getColumnNameAndConfig } from '~/utils.ts'; import { MySqlColumnBuilderWithAutoIncrement, MySqlColumnWithAutoIncrement } from './common.ts'; import type { MySqlIntConfig } from './int.ts'; export type MySqlMediumIntBuilderInitial = MySqlMediumIntBuilder<{ name: TName; dataType: 'number'; columnType: 'MySqlMediumInt'; data: number; driverParam: number | string; enumValues: undefined; }>; export class MySqlMediumIntBuilder> extends MySqlColumnBuilderWithAutoIncrement { static override readonly [entityKind]: string = 'MySqlMediumIntBuilder'; constructor(name: T['name'], config?: MySqlIntConfig) { super(name, 'number', 'MySqlMediumInt'); this.config.unsigned = config ? config.unsigned : false; } /** @internal */ override build( table: AnyMySqlTable<{ name: TTableName }>, ): MySqlMediumInt> { return new MySqlMediumInt>( table, this.config as ColumnBuilderRuntimeConfig, ); } } export class MySqlMediumInt> extends MySqlColumnWithAutoIncrement { static override readonly [entityKind]: string = 'MySqlMediumInt'; getSQLType(): string { return `mediumint${this.config.unsigned ? ' unsigned' : ''}`; } override mapFromDriverValue(value: number | string): number { if (typeof value === 'string') { return Number(value); } return value; } } export function mediumint(): MySqlMediumIntBuilderInitial<''>; export function mediumint( config?: MySqlIntConfig, ): MySqlMediumIntBuilderInitial<''>; export function mediumint( name: TName, config?: MySqlIntConfig, ): MySqlMediumIntBuilderInitial; export function mediumint(a?: string | MySqlIntConfig, b?: MySqlIntConfig) { const { name, config } = getColumnNameAndConfig(a, b); return new MySqlMediumIntBuilder(name, config); } ================================================ FILE: drizzle-orm/src/mysql-core/columns/real.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnyMySqlTable } from '~/mysql-core/table.ts'; import { getColumnNameAndConfig } from '~/utils.ts'; import { MySqlColumnBuilderWithAutoIncrement, MySqlColumnWithAutoIncrement } from './common.ts'; export type MySqlRealBuilderInitial = MySqlRealBuilder<{ name: TName; dataType: 'number'; columnType: 'MySqlReal'; data: number; driverParam: number | string; enumValues: undefined; }>; export class MySqlRealBuilder> extends MySqlColumnBuilderWithAutoIncrement< T, MySqlRealConfig > { static override readonly [entityKind]: string = 'MySqlRealBuilder'; constructor(name: T['name'], config: MySqlRealConfig | undefined) { super(name, 'number', 'MySqlReal'); this.config.precision = config?.precision; this.config.scale = config?.scale; } /** @internal */ override build( table: AnyMySqlTable<{ name: TTableName }>, ): MySqlReal> { return new MySqlReal>(table, this.config as ColumnBuilderRuntimeConfig); } } export class MySqlReal> extends MySqlColumnWithAutoIncrement< T, MySqlRealConfig > { static override readonly [entityKind]: string = 'MySqlReal'; precision: number | undefined = this.config.precision; scale: number | undefined = this.config.scale; getSQLType(): string { if (this.precision !== undefined && this.scale !== undefined) { return `real(${this.precision}, ${this.scale})`; } else if (this.precision === undefined) { return 'real'; } else { return `real(${this.precision})`; } } } export interface MySqlRealConfig { precision?: number; scale?: number; } export function real(): MySqlRealBuilderInitial<''>; export function real( config?: MySqlRealConfig, ): MySqlRealBuilderInitial<''>; export function real( name: TName, config?: MySqlRealConfig, ): MySqlRealBuilderInitial; export function real(a?: string | MySqlRealConfig, b: MySqlRealConfig = {}) { const { name, config } = getColumnNameAndConfig(a, b); return new MySqlRealBuilder(name, config); } ================================================ FILE: drizzle-orm/src/mysql-core/columns/serial.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, HasDefault, IsAutoincrement, IsPrimaryKey, MakeColumnConfig, NotNull, } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnyMySqlTable } from '~/mysql-core/table.ts'; import { MySqlColumnBuilderWithAutoIncrement, MySqlColumnWithAutoIncrement } from './common.ts'; export type MySqlSerialBuilderInitial = IsAutoincrement< IsPrimaryKey< NotNull< HasDefault< MySqlSerialBuilder<{ name: TName; dataType: 'number'; columnType: 'MySqlSerial'; data: number; driverParam: number; enumValues: undefined; }> > > > >; export class MySqlSerialBuilder> extends MySqlColumnBuilderWithAutoIncrement { static override readonly [entityKind]: string = 'MySqlSerialBuilder'; constructor(name: T['name']) { super(name, 'number', 'MySqlSerial'); this.config.hasDefault = true; this.config.autoIncrement = true; } /** @internal */ override build( table: AnyMySqlTable<{ name: TTableName }>, ): MySqlSerial> { return new MySqlSerial>(table, this.config as ColumnBuilderRuntimeConfig); } } export class MySqlSerial< T extends ColumnBaseConfig<'number', 'MySqlSerial'>, > extends MySqlColumnWithAutoIncrement { static override readonly [entityKind]: string = 'MySqlSerial'; getSQLType(): string { return 'serial'; } override mapFromDriverValue(value: number | string): number { if (typeof value === 'string') { return Number(value); } return value; } } export function serial(): MySqlSerialBuilderInitial<''>; export function serial(name: TName): MySqlSerialBuilderInitial; export function serial(name?: string) { return new MySqlSerialBuilder(name ?? ''); } ================================================ FILE: drizzle-orm/src/mysql-core/columns/smallint.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnyMySqlTable } from '~/mysql-core/table.ts'; import { getColumnNameAndConfig } from '~/utils.ts'; import { MySqlColumnBuilderWithAutoIncrement, MySqlColumnWithAutoIncrement } from './common.ts'; import type { MySqlIntConfig } from './int.ts'; export type MySqlSmallIntBuilderInitial = MySqlSmallIntBuilder<{ name: TName; dataType: 'number'; columnType: 'MySqlSmallInt'; data: number; driverParam: number | string; enumValues: undefined; }>; export class MySqlSmallIntBuilder> extends MySqlColumnBuilderWithAutoIncrement { static override readonly [entityKind]: string = 'MySqlSmallIntBuilder'; constructor(name: T['name'], config?: MySqlIntConfig) { super(name, 'number', 'MySqlSmallInt'); this.config.unsigned = config ? config.unsigned : false; } /** @internal */ override build( table: AnyMySqlTable<{ name: TTableName }>, ): MySqlSmallInt> { return new MySqlSmallInt>( table, this.config as ColumnBuilderRuntimeConfig, ); } } export class MySqlSmallInt> extends MySqlColumnWithAutoIncrement { static override readonly [entityKind]: string = 'MySqlSmallInt'; getSQLType(): string { return `smallint${this.config.unsigned ? ' unsigned' : ''}`; } override mapFromDriverValue(value: number | string): number { if (typeof value === 'string') { return Number(value); } return value; } } export function smallint(): MySqlSmallIntBuilderInitial<''>; export function smallint( config?: MySqlIntConfig, ): MySqlSmallIntBuilderInitial<''>; export function smallint( name: TName, config?: MySqlIntConfig, ): MySqlSmallIntBuilderInitial; export function smallint(a?: string | MySqlIntConfig, b?: MySqlIntConfig) { const { name, config } = getColumnNameAndConfig(a, b); return new MySqlSmallIntBuilder(name, config); } ================================================ FILE: drizzle-orm/src/mysql-core/columns/text.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnyMySqlTable } from '~/mysql-core/table.ts'; import { getColumnNameAndConfig, type Writable } from '~/utils.ts'; import { MySqlColumn, MySqlColumnBuilder } from './common.ts'; export type MySqlTextColumnType = 'tinytext' | 'text' | 'mediumtext' | 'longtext'; export type MySqlTextBuilderInitial = MySqlTextBuilder<{ name: TName; dataType: 'string'; columnType: 'MySqlText'; data: TEnum[number]; driverParam: string; enumValues: TEnum; }>; export class MySqlTextBuilder> extends MySqlColumnBuilder< T, { textType: MySqlTextColumnType; enumValues: T['enumValues'] } > { static override readonly [entityKind]: string = 'MySqlTextBuilder'; constructor(name: T['name'], textType: MySqlTextColumnType, config: MySqlTextConfig) { super(name, 'string', 'MySqlText'); this.config.textType = textType; this.config.enumValues = config.enum; } /** @internal */ override build( table: AnyMySqlTable<{ name: TTableName }>, ): MySqlText> { return new MySqlText>(table, this.config as ColumnBuilderRuntimeConfig); } } export class MySqlText> extends MySqlColumn { static override readonly [entityKind]: string = 'MySqlText'; readonly textType: MySqlTextColumnType = this.config.textType; override readonly enumValues = this.config.enumValues; getSQLType(): string { return this.textType; } } export interface MySqlTextConfig< TEnum extends readonly string[] | string[] | undefined = readonly string[] | string[] | undefined, > { enum?: TEnum; } export function text(): MySqlTextBuilderInitial<'', [string, ...string[]]>; export function text>( config?: MySqlTextConfig>, ): MySqlTextBuilderInitial<'', Writable>; export function text>( name: TName, config?: MySqlTextConfig>, ): MySqlTextBuilderInitial>; export function text(a?: string | MySqlTextConfig, b: MySqlTextConfig = {}): any { const { name, config } = getColumnNameAndConfig(a, b); return new MySqlTextBuilder(name, 'text', config as any); } export function tinytext(): MySqlTextBuilderInitial<'', [string, ...string[]]>; export function tinytext>( config?: MySqlTextConfig>, ): MySqlTextBuilderInitial<'', Writable>; export function tinytext>( name: TName, config?: MySqlTextConfig>, ): MySqlTextBuilderInitial>; export function tinytext(a?: string | MySqlTextConfig, b: MySqlTextConfig = {}): any { const { name, config } = getColumnNameAndConfig(a, b); return new MySqlTextBuilder(name, 'tinytext', config as any); } export function mediumtext(): MySqlTextBuilderInitial<'', [string, ...string[]]>; export function mediumtext>( config?: MySqlTextConfig>, ): MySqlTextBuilderInitial<'', Writable>; export function mediumtext>( name: TName, config?: MySqlTextConfig>, ): MySqlTextBuilderInitial>; export function mediumtext(a?: string | MySqlTextConfig, b: MySqlTextConfig = {}): any { const { name, config } = getColumnNameAndConfig(a, b); return new MySqlTextBuilder(name, 'mediumtext', config as any); } export function longtext(): MySqlTextBuilderInitial<'', [string, ...string[]]>; export function longtext>( config?: MySqlTextConfig>, ): MySqlTextBuilderInitial<'', Writable>; export function longtext>( name: TName, config?: MySqlTextConfig>, ): MySqlTextBuilderInitial>; export function longtext(a?: string | MySqlTextConfig, b: MySqlTextConfig = {}): any { const { name, config } = getColumnNameAndConfig(a, b); return new MySqlTextBuilder(name, 'longtext', config as any); } ================================================ FILE: drizzle-orm/src/mysql-core/columns/time.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnyMySqlTable } from '~/mysql-core/table.ts'; import { getColumnNameAndConfig } from '~/utils.ts'; import { MySqlColumn, MySqlColumnBuilder } from './common.ts'; export type MySqlTimeBuilderInitial = MySqlTimeBuilder<{ name: TName; dataType: 'string'; columnType: 'MySqlTime'; data: string; driverParam: string | number; enumValues: undefined; }>; export class MySqlTimeBuilder> extends MySqlColumnBuilder< T, TimeConfig > { static override readonly [entityKind]: string = 'MySqlTimeBuilder'; constructor( name: T['name'], config: TimeConfig | undefined, ) { super(name, 'string', 'MySqlTime'); this.config.fsp = config?.fsp; } /** @internal */ override build( table: AnyMySqlTable<{ name: TTableName }>, ): MySqlTime> { return new MySqlTime>(table, this.config as ColumnBuilderRuntimeConfig); } } export class MySqlTime< T extends ColumnBaseConfig<'string', 'MySqlTime'>, > extends MySqlColumn { static override readonly [entityKind]: string = 'MySqlTime'; readonly fsp: number | undefined = this.config.fsp; getSQLType(): string { const precision = this.fsp === undefined ? '' : `(${this.fsp})`; return `time${precision}`; } } export type TimeConfig = { fsp?: 0 | 1 | 2 | 3 | 4 | 5 | 6; }; export function time(): MySqlTimeBuilderInitial<''>; export function time( config?: TimeConfig, ): MySqlTimeBuilderInitial<''>; export function time( name: TName, config?: TimeConfig, ): MySqlTimeBuilderInitial; export function time(a?: string | TimeConfig, b?: TimeConfig) { const { name, config } = getColumnNameAndConfig(a, b); return new MySqlTimeBuilder(name, config); } ================================================ FILE: drizzle-orm/src/mysql-core/columns/timestamp.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnyMySqlTable } from '~/mysql-core/table.ts'; import { type Equal, getColumnNameAndConfig } from '~/utils.ts'; import { MySqlDateBaseColumn, MySqlDateColumnBaseBuilder } from './date.common.ts'; export type MySqlTimestampBuilderInitial = MySqlTimestampBuilder<{ name: TName; dataType: 'date'; columnType: 'MySqlTimestamp'; data: Date; driverParam: string | number; enumValues: undefined; }>; export class MySqlTimestampBuilder> extends MySqlDateColumnBaseBuilder { static override readonly [entityKind]: string = 'MySqlTimestampBuilder'; constructor(name: T['name'], config: MySqlTimestampConfig | undefined) { super(name, 'date', 'MySqlTimestamp'); this.config.fsp = config?.fsp; } /** @internal */ override build( table: AnyMySqlTable<{ name: TTableName }>, ): MySqlTimestamp> { return new MySqlTimestamp>( table, this.config as ColumnBuilderRuntimeConfig, ); } } export class MySqlTimestamp> extends MySqlDateBaseColumn { static override readonly [entityKind]: string = 'MySqlTimestamp'; readonly fsp: number | undefined = this.config.fsp; getSQLType(): string { const precision = this.fsp === undefined ? '' : `(${this.fsp})`; return `timestamp${precision}`; } override mapFromDriverValue(value: string): Date { return new Date(value + '+0000'); } override mapToDriverValue(value: Date): string { return value.toISOString().slice(0, -1).replace('T', ' '); } } export type MySqlTimestampStringBuilderInitial = MySqlTimestampStringBuilder<{ name: TName; dataType: 'string'; columnType: 'MySqlTimestampString'; data: string; driverParam: string | number; enumValues: undefined; }>; export class MySqlTimestampStringBuilder> extends MySqlDateColumnBaseBuilder { static override readonly [entityKind]: string = 'MySqlTimestampStringBuilder'; constructor(name: T['name'], config: MySqlTimestampConfig | undefined) { super(name, 'string', 'MySqlTimestampString'); this.config.fsp = config?.fsp; } /** @internal */ override build( table: AnyMySqlTable<{ name: TTableName }>, ): MySqlTimestampString> { return new MySqlTimestampString>( table, this.config as ColumnBuilderRuntimeConfig, ); } } export class MySqlTimestampString> extends MySqlDateBaseColumn { static override readonly [entityKind]: string = 'MySqlTimestampString'; readonly fsp: number | undefined = this.config.fsp; getSQLType(): string { const precision = this.fsp === undefined ? '' : `(${this.fsp})`; return `timestamp${precision}`; } } export type TimestampFsp = 0 | 1 | 2 | 3 | 4 | 5 | 6; export interface MySqlTimestampConfig { mode?: TMode; fsp?: TimestampFsp; } export function timestamp(): MySqlTimestampBuilderInitial<''>; export function timestamp( config?: MySqlTimestampConfig, ): Equal extends true ? MySqlTimestampStringBuilderInitial<''> : MySqlTimestampBuilderInitial<''>; export function timestamp( name: TName, config?: MySqlTimestampConfig, ): Equal extends true ? MySqlTimestampStringBuilderInitial : MySqlTimestampBuilderInitial; export function timestamp(a?: string | MySqlTimestampConfig, b: MySqlTimestampConfig = {}) { const { name, config } = getColumnNameAndConfig(a, b); if (config?.mode === 'string') { return new MySqlTimestampStringBuilder(name, config); } return new MySqlTimestampBuilder(name, config); } ================================================ FILE: drizzle-orm/src/mysql-core/columns/tinyint.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnyMySqlTable } from '~/mysql-core/table.ts'; import { getColumnNameAndConfig } from '~/utils.ts'; import { MySqlColumnBuilderWithAutoIncrement, MySqlColumnWithAutoIncrement } from './common.ts'; import type { MySqlIntConfig } from './int.ts'; export type MySqlTinyIntBuilderInitial = MySqlTinyIntBuilder<{ name: TName; dataType: 'number'; columnType: 'MySqlTinyInt'; data: number; driverParam: number | string; enumValues: undefined; }>; export class MySqlTinyIntBuilder> extends MySqlColumnBuilderWithAutoIncrement { static override readonly [entityKind]: string = 'MySqlTinyIntBuilder'; constructor(name: T['name'], config?: MySqlIntConfig) { super(name, 'number', 'MySqlTinyInt'); this.config.unsigned = config ? config.unsigned : false; } /** @internal */ override build( table: AnyMySqlTable<{ name: TTableName }>, ): MySqlTinyInt> { return new MySqlTinyInt>( table, this.config as ColumnBuilderRuntimeConfig, ); } } export class MySqlTinyInt> extends MySqlColumnWithAutoIncrement { static override readonly [entityKind]: string = 'MySqlTinyInt'; getSQLType(): string { return `tinyint${this.config.unsigned ? ' unsigned' : ''}`; } override mapFromDriverValue(value: number | string): number { if (typeof value === 'string') { return Number(value); } return value; } } export function tinyint(): MySqlTinyIntBuilderInitial<''>; export function tinyint( config?: MySqlIntConfig, ): MySqlTinyIntBuilderInitial<''>; export function tinyint( name: TName, config?: MySqlIntConfig, ): MySqlTinyIntBuilderInitial; export function tinyint(a?: string | MySqlIntConfig, b?: MySqlIntConfig) { const { name, config } = getColumnNameAndConfig(a, b); return new MySqlTinyIntBuilder(name, config); } ================================================ FILE: drizzle-orm/src/mysql-core/columns/varbinary.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnyMySqlTable } from '~/mysql-core/table.ts'; import { getColumnNameAndConfig } from '~/utils.ts'; import { MySqlColumn, MySqlColumnBuilder } from './common.ts'; export type MySqlVarBinaryBuilderInitial = MySqlVarBinaryBuilder<{ name: TName; dataType: 'string'; columnType: 'MySqlVarBinary'; data: string; driverParam: string; enumValues: undefined; }>; export class MySqlVarBinaryBuilder> extends MySqlColumnBuilder { static override readonly [entityKind]: string = 'MySqlVarBinaryBuilder'; /** @internal */ constructor(name: T['name'], config: MySqlVarbinaryOptions) { super(name, 'string', 'MySqlVarBinary'); this.config.length = config?.length; } /** @internal */ override build( table: AnyMySqlTable<{ name: TTableName }>, ): MySqlVarBinary> { return new MySqlVarBinary>( table, this.config as ColumnBuilderRuntimeConfig, ); } } export class MySqlVarBinary< T extends ColumnBaseConfig<'string', 'MySqlVarBinary'>, > extends MySqlColumn { static override readonly [entityKind]: string = 'MySqlVarBinary'; length: number | undefined = this.config.length; override mapFromDriverValue(value: string | Buffer | Uint8Array): string { if (typeof value === 'string') return value; if (Buffer.isBuffer(value)) return value.toString(); const str: string[] = []; for (const v of value) { str.push(v === 49 ? '1' : '0'); } return str.join(''); } getSQLType(): string { return this.length === undefined ? `varbinary` : `varbinary(${this.length})`; } } export interface MySqlVarbinaryOptions { length: number; } export function varbinary( config: MySqlVarbinaryOptions, ): MySqlVarBinaryBuilderInitial<''>; export function varbinary( name: TName, config: MySqlVarbinaryOptions, ): MySqlVarBinaryBuilderInitial; export function varbinary(a?: string | MySqlVarbinaryOptions, b?: MySqlVarbinaryOptions) { const { name, config } = getColumnNameAndConfig(a, b); return new MySqlVarBinaryBuilder(name, config); } ================================================ FILE: drizzle-orm/src/mysql-core/columns/varchar.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnyMySqlTable } from '~/mysql-core/table.ts'; import { getColumnNameAndConfig, type Writable } from '~/utils.ts'; import { MySqlColumn, MySqlColumnBuilder } from './common.ts'; export type MySqlVarCharBuilderInitial< TName extends string, TEnum extends [string, ...string[]], TLength extends number | undefined, > = MySqlVarCharBuilder< { name: TName; dataType: 'string'; columnType: 'MySqlVarChar'; data: TEnum[number]; driverParam: number | string; enumValues: TEnum; length: TLength; } >; export class MySqlVarCharBuilder< T extends ColumnBuilderBaseConfig<'string', 'MySqlVarChar'> & { length?: number | undefined }, > extends MySqlColumnBuilder> { static override readonly [entityKind]: string = 'MySqlVarCharBuilder'; /** @internal */ constructor(name: T['name'], config: MySqlVarCharConfig) { super(name, 'string', 'MySqlVarChar'); this.config.length = config.length; this.config.enum = config.enum; } /** @internal */ override build( table: AnyMySqlTable<{ name: TTableName }>, ): MySqlVarChar & { length: T['length']; enumValues: T['enumValues'] }> { return new MySqlVarChar & { length: T['length']; enumValues: T['enumValues'] }>( table, this.config as ColumnBuilderRuntimeConfig, ); } } export class MySqlVarChar & { length?: number | undefined }> extends MySqlColumn, { length: T['length'] }> { static override readonly [entityKind]: string = 'MySqlVarChar'; readonly length: number | undefined = this.config.length; override readonly enumValues = this.config.enum; getSQLType(): string { return this.length === undefined ? `varchar` : `varchar(${this.length})`; } } export interface MySqlVarCharConfig< TEnum extends string[] | readonly string[] | undefined = string[] | readonly string[] | undefined, TLength extends number | undefined = number | undefined, > { enum?: TEnum; length: TLength; } export function varchar, L extends number | undefined>( config: MySqlVarCharConfig, L>, ): MySqlVarCharBuilderInitial<'', Writable, L>; export function varchar< TName extends string, U extends string, T extends Readonly<[U, ...U[]]>, L extends number | undefined, >( name: TName, config: MySqlVarCharConfig, L>, ): MySqlVarCharBuilderInitial, L>; export function varchar(a?: string | MySqlVarCharConfig, b?: MySqlVarCharConfig): any { const { name, config } = getColumnNameAndConfig(a, b); return new MySqlVarCharBuilder(name, config as any); } ================================================ FILE: drizzle-orm/src/mysql-core/columns/year.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnyMySqlTable } from '~/mysql-core/table.ts'; import { MySqlColumn, MySqlColumnBuilder } from './common.ts'; export type MySqlYearBuilderInitial = MySqlYearBuilder<{ name: TName; dataType: 'number'; columnType: 'MySqlYear'; data: number; driverParam: number; enumValues: undefined; }>; export class MySqlYearBuilder> extends MySqlColumnBuilder { static override readonly [entityKind]: string = 'MySqlYearBuilder'; constructor(name: T['name']) { super(name, 'number', 'MySqlYear'); } /** @internal */ override build( table: AnyMySqlTable<{ name: TTableName }>, ): MySqlYear> { return new MySqlYear>(table, this.config as ColumnBuilderRuntimeConfig); } } export class MySqlYear< T extends ColumnBaseConfig<'number', 'MySqlYear'>, > extends MySqlColumn { static override readonly [entityKind]: string = 'MySqlYear'; getSQLType(): string { return `year`; } } export function year(): MySqlYearBuilderInitial<''>; export function year(name: TName): MySqlYearBuilderInitial; export function year(name?: string) { return new MySqlYearBuilder(name ?? ''); } ================================================ FILE: drizzle-orm/src/mysql-core/db.ts ================================================ import type { ResultSetHeader } from 'mysql2/promise'; import type { Cache } from '~/cache/core/cache.ts'; import { entityKind } from '~/entity.ts'; import type { TypedQueryBuilder } from '~/query-builders/query-builder.ts'; import type { ExtractTablesWithRelations, RelationalSchemaConfig, TablesRelationalConfig } from '~/relations.ts'; import { SelectionProxyHandler } from '~/selection-proxy.ts'; import { type ColumnsSelection, type SQL, sql, type SQLWrapper } from '~/sql/sql.ts'; import { WithSubquery } from '~/subquery.ts'; import type { DrizzleTypeError } from '~/utils.ts'; import type { MySqlDialect } from './dialect.ts'; import { MySqlCountBuilder } from './query-builders/count.ts'; import { MySqlDeleteBase, MySqlInsertBuilder, MySqlSelectBuilder, MySqlUpdateBuilder, QueryBuilder, } from './query-builders/index.ts'; import { RelationalQueryBuilder } from './query-builders/query.ts'; import type { SelectedFields } from './query-builders/select.types.ts'; import type { Mode, MySqlQueryResultHKT, MySqlQueryResultKind, MySqlSession, MySqlTransaction, MySqlTransactionConfig, PreparedQueryHKTBase, } from './session.ts'; import type { WithBuilder } from './subquery.ts'; import type { MySqlTable } from './table.ts'; import type { MySqlViewBase } from './view-base.ts'; export class MySqlDatabase< TQueryResult extends MySqlQueryResultHKT, TPreparedQueryHKT extends PreparedQueryHKTBase, TFullSchema extends Record = {}, TSchema extends TablesRelationalConfig = ExtractTablesWithRelations, > { static readonly [entityKind]: string = 'MySqlDatabase'; declare readonly _: { readonly schema: TSchema | undefined; readonly fullSchema: TFullSchema; readonly tableNamesMap: Record; }; query: TFullSchema extends Record ? DrizzleTypeError<'Seems like the schema generic is missing - did you forget to add it to your DB type?'> : { [K in keyof TSchema]: RelationalQueryBuilder; }; constructor( /** @internal */ readonly dialect: MySqlDialect, /** @internal */ readonly session: MySqlSession, schema: RelationalSchemaConfig | undefined, protected readonly mode: Mode, ) { this._ = schema ? { schema: schema.schema, fullSchema: schema.fullSchema as TFullSchema, tableNamesMap: schema.tableNamesMap, } : { schema: undefined, fullSchema: {} as TFullSchema, tableNamesMap: {}, }; this.query = {} as typeof this['query']; if (this._.schema) { for (const [tableName, columns] of Object.entries(this._.schema)) { (this.query as MySqlDatabase>['query'])[tableName] = new RelationalQueryBuilder( schema!.fullSchema, this._.schema, this._.tableNamesMap, schema!.fullSchema[tableName] as MySqlTable, columns, dialect, session, this.mode, ); } } this.$cache = { invalidate: async (_params: any) => {} }; } /** * Creates a subquery that defines a temporary named result set as a CTE. * * It is useful for breaking down complex queries into simpler parts and for reusing the result set in subsequent parts of the query. * * See docs: {@link https://orm.drizzle.team/docs/select#with-clause} * * @param alias The alias for the subquery. * * Failure to provide an alias will result in a DrizzleTypeError, preventing the subquery from being referenced in other queries. * * @example * * ```ts * // Create a subquery with alias 'sq' and use it in the select query * const sq = db.$with('sq').as(db.select().from(users).where(eq(users.id, 42))); * * const result = await db.with(sq).select().from(sq); * ``` * * To select arbitrary SQL values as fields in a CTE and reference them in other CTEs or in the main query, you need to add aliases to them: * * ```ts * // Select an arbitrary SQL value as a field in a CTE and reference it in the main query * const sq = db.$with('sq').as(db.select({ * name: sql`upper(${users.name})`.as('name'), * }) * .from(users)); * * const result = await db.with(sq).select({ name: sq.name }).from(sq); * ``` */ $with: WithBuilder = (alias: string, selection?: ColumnsSelection) => { const self = this; const as = ( qb: | TypedQueryBuilder | SQL | ((qb: QueryBuilder) => TypedQueryBuilder | SQL), ) => { if (typeof qb === 'function') { qb = qb(new QueryBuilder(self.dialect)); } return new Proxy( new WithSubquery( qb.getSQL(), selection ?? ('getSelectedFields' in qb ? qb.getSelectedFields() ?? {} : {}) as SelectedFields, alias, true, ), new SelectionProxyHandler({ alias, sqlAliasedBehavior: 'alias', sqlBehavior: 'error' }), ); }; return { as }; }; $count( source: MySqlTable | MySqlViewBase | SQL | SQLWrapper, filters?: SQL, ) { return new MySqlCountBuilder({ source, filters, session: this.session }); } $cache: { invalidate: Cache['onMutate'] }; /** * Incorporates a previously defined CTE (using `$with`) into the main query. * * This method allows the main query to reference a temporary named result set. * * See docs: {@link https://orm.drizzle.team/docs/select#with-clause} * * @param queries The CTEs to incorporate into the main query. * * @example * * ```ts * // Define a subquery 'sq' as a CTE using $with * const sq = db.$with('sq').as(db.select().from(users).where(eq(users.id, 42))); * * // Incorporate the CTE 'sq' into the main query and select from it * const result = await db.with(sq).select().from(sq); * ``` */ with(...queries: WithSubquery[]) { const self = this; /** * Creates a select query. * * Calling this method with no arguments will select all columns from the table. Pass a selection object to specify the columns you want to select. * * Use `.from()` method to specify which table to select from. * * See docs: {@link https://orm.drizzle.team/docs/select} * * @param fields The selection object. * * @example * * ```ts * // Select all columns and all rows from the 'cars' table * const allCars: Car[] = await db.select().from(cars); * * // Select specific columns and all rows from the 'cars' table * const carsIdsAndBrands: { id: number; brand: string }[] = await db.select({ * id: cars.id, * brand: cars.brand * }) * .from(cars); * ``` * * Like in SQL, you can use arbitrary expressions as selection fields, not just table columns: * * ```ts * // Select specific columns along with expression and all rows from the 'cars' table * const carsIdsAndLowerNames: { id: number; lowerBrand: string }[] = await db.select({ * id: cars.id, * lowerBrand: sql`lower(${cars.brand})`, * }) * .from(cars); * ``` */ function select(): MySqlSelectBuilder; function select( fields: TSelection, ): MySqlSelectBuilder; function select(fields?: SelectedFields): MySqlSelectBuilder { return new MySqlSelectBuilder({ fields: fields ?? undefined, session: self.session, dialect: self.dialect, withList: queries, }); } /** * Adds `distinct` expression to the select query. * * Calling this method will return only unique values. When multiple columns are selected, it returns rows with unique combinations of values in these columns. * * Use `.from()` method to specify which table to select from. * * See docs: {@link https://orm.drizzle.team/docs/select#distinct} * * @param fields The selection object. * * @example * ```ts * // Select all unique rows from the 'cars' table * await db.selectDistinct() * .from(cars) * .orderBy(cars.id, cars.brand, cars.color); * * // Select all unique brands from the 'cars' table * await db.selectDistinct({ brand: cars.brand }) * .from(cars) * .orderBy(cars.brand); * ``` */ function selectDistinct(): MySqlSelectBuilder; function selectDistinct( fields: TSelection, ): MySqlSelectBuilder; function selectDistinct( fields?: SelectedFields, ): MySqlSelectBuilder { return new MySqlSelectBuilder({ fields: fields ?? undefined, session: self.session, dialect: self.dialect, withList: queries, distinct: true, }); } /** * Creates an update query. * * Calling this method without `.where()` clause will update all rows in a table. The `.where()` clause specifies which rows should be updated. * * Use `.set()` method to specify which values to update. * * See docs: {@link https://orm.drizzle.team/docs/update} * * @param table The table to update. * * @example * * ```ts * // Update all rows in the 'cars' table * await db.update(cars).set({ color: 'red' }); * * // Update rows with filters and conditions * await db.update(cars).set({ color: 'red' }).where(eq(cars.brand, 'BMW')); * ``` */ function update( table: TTable, ): MySqlUpdateBuilder { return new MySqlUpdateBuilder(table, self.session, self.dialect, queries); } /** * Creates a delete query. * * Calling this method without `.where()` clause will delete all rows in a table. The `.where()` clause specifies which rows should be deleted. * * See docs: {@link https://orm.drizzle.team/docs/delete} * * @param table The table to delete from. * * @example * * ```ts * // Delete all rows in the 'cars' table * await db.delete(cars); * * // Delete rows with filters and conditions * await db.delete(cars).where(eq(cars.color, 'green')); * ``` */ function delete_( table: TTable, ): MySqlDeleteBase { return new MySqlDeleteBase(table, self.session, self.dialect, queries); } return { select, selectDistinct, update, delete: delete_ }; } /** * Creates a select query. * * Calling this method with no arguments will select all columns from the table. Pass a selection object to specify the columns you want to select. * * Use `.from()` method to specify which table to select from. * * See docs: {@link https://orm.drizzle.team/docs/select} * * @param fields The selection object. * * @example * * ```ts * // Select all columns and all rows from the 'cars' table * const allCars: Car[] = await db.select().from(cars); * * // Select specific columns and all rows from the 'cars' table * const carsIdsAndBrands: { id: number; brand: string }[] = await db.select({ * id: cars.id, * brand: cars.brand * }) * .from(cars); * ``` * * Like in SQL, you can use arbitrary expressions as selection fields, not just table columns: * * ```ts * // Select specific columns along with expression and all rows from the 'cars' table * const carsIdsAndLowerNames: { id: number; lowerBrand: string }[] = await db.select({ * id: cars.id, * lowerBrand: sql`lower(${cars.brand})`, * }) * .from(cars); * ``` */ select(): MySqlSelectBuilder; select(fields: TSelection): MySqlSelectBuilder; select(fields?: SelectedFields): MySqlSelectBuilder { return new MySqlSelectBuilder({ fields: fields ?? undefined, session: this.session, dialect: this.dialect }); } /** * Adds `distinct` expression to the select query. * * Calling this method will return only unique values. When multiple columns are selected, it returns rows with unique combinations of values in these columns. * * Use `.from()` method to specify which table to select from. * * See docs: {@link https://orm.drizzle.team/docs/select#distinct} * * @param fields The selection object. * * @example * ```ts * // Select all unique rows from the 'cars' table * await db.selectDistinct() * .from(cars) * .orderBy(cars.id, cars.brand, cars.color); * * // Select all unique brands from the 'cars' table * await db.selectDistinct({ brand: cars.brand }) * .from(cars) * .orderBy(cars.brand); * ``` */ selectDistinct(): MySqlSelectBuilder; selectDistinct( fields: TSelection, ): MySqlSelectBuilder; selectDistinct(fields?: SelectedFields): MySqlSelectBuilder { return new MySqlSelectBuilder({ fields: fields ?? undefined, session: this.session, dialect: this.dialect, distinct: true, }); } /** * Creates an update query. * * Calling this method without `.where()` clause will update all rows in a table. The `.where()` clause specifies which rows should be updated. * * Use `.set()` method to specify which values to update. * * See docs: {@link https://orm.drizzle.team/docs/update} * * @param table The table to update. * * @example * * ```ts * // Update all rows in the 'cars' table * await db.update(cars).set({ color: 'red' }); * * // Update rows with filters and conditions * await db.update(cars).set({ color: 'red' }).where(eq(cars.brand, 'BMW')); * ``` */ update(table: TTable): MySqlUpdateBuilder { return new MySqlUpdateBuilder(table, this.session, this.dialect); } /** * Creates an insert query. * * Calling this method will create new rows in a table. Use `.values()` method to specify which values to insert. * * See docs: {@link https://orm.drizzle.team/docs/insert} * * @param table The table to insert into. * * @example * * ```ts * // Insert one row * await db.insert(cars).values({ brand: 'BMW' }); * * // Insert multiple rows * await db.insert(cars).values([{ brand: 'BMW' }, { brand: 'Porsche' }]); * ``` */ insert(table: TTable): MySqlInsertBuilder { return new MySqlInsertBuilder(table, this.session, this.dialect); } /** * Creates a delete query. * * Calling this method without `.where()` clause will delete all rows in a table. The `.where()` clause specifies which rows should be deleted. * * See docs: {@link https://orm.drizzle.team/docs/delete} * * @param table The table to delete from. * * @example * * ```ts * // Delete all rows in the 'cars' table * await db.delete(cars); * * // Delete rows with filters and conditions * await db.delete(cars).where(eq(cars.color, 'green')); * ``` */ delete(table: TTable): MySqlDeleteBase { return new MySqlDeleteBase(table, this.session, this.dialect); } execute( query: SQLWrapper | string, ): Promise> { return this.session.execute(typeof query === 'string' ? sql.raw(query) : query.getSQL()); } transaction( transaction: ( tx: MySqlTransaction, config?: MySqlTransactionConfig, ) => Promise, config?: MySqlTransactionConfig, ): Promise { return this.session.transaction(transaction, config); } } export type MySQLWithReplicas = Q & { $primary: Q; $replicas: Q[] }; export const withReplicas = < HKT extends MySqlQueryResultHKT, TPreparedQueryHKT extends PreparedQueryHKTBase, TFullSchema extends Record, TSchema extends TablesRelationalConfig, Q extends MySqlDatabase< HKT, TPreparedQueryHKT, TFullSchema, TSchema extends Record ? ExtractTablesWithRelations : TSchema >, >( primary: Q, replicas: [Q, ...Q[]], getReplica: (replicas: Q[]) => Q = () => replicas[Math.floor(Math.random() * replicas.length)]!, ): MySQLWithReplicas => { const select: Q['select'] = (...args: []) => getReplica(replicas).select(...args); const selectDistinct: Q['selectDistinct'] = (...args: []) => getReplica(replicas).selectDistinct(...args); const $count: Q['$count'] = (...args: [any]) => getReplica(replicas).$count(...args); const $with: Q['with'] = (...args: []) => getReplica(replicas).with(...args); const update: Q['update'] = (...args: [any]) => primary.update(...args); const insert: Q['insert'] = (...args: [any]) => primary.insert(...args); const $delete: Q['delete'] = (...args: [any]) => primary.delete(...args); const execute: Q['execute'] = (...args: [any]) => primary.execute(...args); const transaction: Q['transaction'] = (...args: [any, any]) => primary.transaction(...args); return { ...primary, update, insert, delete: $delete, execute, transaction, $primary: primary, $replicas: replicas, select, selectDistinct, $count, with: $with, get query() { return getReplica(replicas).query; }, }; }; ================================================ FILE: drizzle-orm/src/mysql-core/dialect.ts ================================================ import { aliasedTable, aliasedTableColumn, mapColumnsInAliasedSQLToAlias, mapColumnsInSQLToAlias } from '~/alias.ts'; import { CasingCache } from '~/casing.ts'; import { Column } from '~/column.ts'; import { entityKind, is } from '~/entity.ts'; import { DrizzleError } from '~/errors.ts'; import type { MigrationConfig, MigrationMeta } from '~/migrator.ts'; import { type BuildRelationalQueryResult, type DBQueryConfig, getOperators, getOrderByOperators, Many, normalizeRelation, One, type Relation, type TableRelationalConfig, type TablesRelationalConfig, } from '~/relations.ts'; import { and, eq } from '~/sql/expressions/index.ts'; import { Param, SQL, sql, View } from '~/sql/sql.ts'; import type { Name, Placeholder, QueryWithTypings, SQLChunk } from '~/sql/sql.ts'; import { Subquery } from '~/subquery.ts'; import { getTableName, getTableUniqueName, Table } from '~/table.ts'; import { type Casing, orderSelectedFields, type UpdateSet } from '~/utils.ts'; import { ViewBaseConfig } from '~/view-common.ts'; import { MySqlColumn } from './columns/common.ts'; import type { MySqlDeleteConfig } from './query-builders/delete.ts'; import type { MySqlInsertConfig } from './query-builders/insert.ts'; import type { AnyMySqlSelectQueryBuilder, MySqlSelectConfig, MySqlSelectJoinConfig, SelectedFieldsOrdered, } from './query-builders/select.types.ts'; import type { MySqlUpdateConfig } from './query-builders/update.ts'; import type { MySqlSession } from './session.ts'; import { MySqlTable } from './table.ts'; import { MySqlViewBase } from './view-base.ts'; export interface MySqlDialectConfig { casing?: Casing; } export class MySqlDialect { static readonly [entityKind]: string = 'MySqlDialect'; /** @internal */ readonly casing: CasingCache; constructor(config?: MySqlDialectConfig) { this.casing = new CasingCache(config?.casing); } async migrate( migrations: MigrationMeta[], session: MySqlSession, config: Omit, ): Promise { const migrationsTable = config.migrationsTable ?? '__drizzle_migrations'; const migrationTableCreate = sql` create table if not exists ${sql.identifier(migrationsTable)} ( id serial primary key, hash text not null, created_at bigint ) `; await session.execute(migrationTableCreate); const dbMigrations = await session.all<{ id: number; hash: string; created_at: string }>( sql`select id, hash, created_at from ${sql.identifier(migrationsTable)} order by created_at desc limit 1`, ); const lastDbMigration = dbMigrations[0]; await session.transaction(async (tx) => { for (const migration of migrations) { if ( !lastDbMigration || Number(lastDbMigration.created_at) < migration.folderMillis ) { for (const stmt of migration.sql) { await tx.execute(sql.raw(stmt)); } await tx.execute( sql`insert into ${ sql.identifier(migrationsTable) } (\`hash\`, \`created_at\`) values(${migration.hash}, ${migration.folderMillis})`, ); } } }); } escapeName(name: string): string { return `\`${name}\``; } escapeParam(_num: number): string { return `?`; } escapeString(str: string): string { return `'${str.replace(/'/g, "''")}'`; } private buildWithCTE(queries: Subquery[] | undefined): SQL | undefined { if (!queries?.length) return undefined; const withSqlChunks = [sql`with `]; for (const [i, w] of queries.entries()) { withSqlChunks.push(sql`${sql.identifier(w._.alias)} as (${w._.sql})`); if (i < queries.length - 1) { withSqlChunks.push(sql`, `); } } withSqlChunks.push(sql` `); return sql.join(withSqlChunks); } buildDeleteQuery({ table, where, returning, withList, limit, orderBy }: MySqlDeleteConfig): SQL { const withSql = this.buildWithCTE(withList); const returningSql = returning ? sql` returning ${this.buildSelection(returning, { isSingleTable: true })}` : undefined; const whereSql = where ? sql` where ${where}` : undefined; const orderBySql = this.buildOrderBy(orderBy); const limitSql = this.buildLimit(limit); return sql`${withSql}delete from ${table}${whereSql}${orderBySql}${limitSql}${returningSql}`; } buildUpdateSet(table: MySqlTable, set: UpdateSet): SQL { const tableColumns = table[Table.Symbol.Columns]; const columnNames = Object.keys(tableColumns).filter((colName) => set[colName] !== undefined || tableColumns[colName]?.onUpdateFn !== undefined ); const setSize = columnNames.length; return sql.join(columnNames.flatMap((colName, i) => { const col = tableColumns[colName]!; const onUpdateFnResult = col.onUpdateFn?.(); const value = set[colName] ?? (is(onUpdateFnResult, SQL) ? onUpdateFnResult : sql.param(onUpdateFnResult, col)); const res = sql`${sql.identifier(this.casing.getColumnCasing(col))} = ${value}`; if (i < setSize - 1) { return [res, sql.raw(', ')]; } return [res]; })); } buildUpdateQuery({ table, set, where, returning, withList, limit, orderBy }: MySqlUpdateConfig): SQL { const withSql = this.buildWithCTE(withList); const setSql = this.buildUpdateSet(table, set); const returningSql = returning ? sql` returning ${this.buildSelection(returning, { isSingleTable: true })}` : undefined; const whereSql = where ? sql` where ${where}` : undefined; const orderBySql = this.buildOrderBy(orderBy); const limitSql = this.buildLimit(limit); return sql`${withSql}update ${table} set ${setSql}${whereSql}${orderBySql}${limitSql}${returningSql}`; } /** * Builds selection SQL with provided fields/expressions * * Examples: * * `select from` * * `insert ... returning ` * * If `isSingleTable` is true, then columns won't be prefixed with table name */ private buildSelection( fields: SelectedFieldsOrdered, { isSingleTable = false }: { isSingleTable?: boolean } = {}, ): SQL { const columnsLen = fields.length; const chunks = fields .flatMap(({ field }, i) => { const chunk: SQLChunk[] = []; if (is(field, SQL.Aliased) && field.isSelectionField) { chunk.push(sql.identifier(field.fieldAlias)); } else if (is(field, SQL.Aliased) || is(field, SQL)) { const query = is(field, SQL.Aliased) ? field.sql : field; if (isSingleTable) { chunk.push( new SQL( query.queryChunks.map((c) => { if (is(c, MySqlColumn)) { return sql.identifier(this.casing.getColumnCasing(c)); } return c; }), ), ); } else { chunk.push(query); } if (is(field, SQL.Aliased)) { chunk.push(sql` as ${sql.identifier(field.fieldAlias)}`); } } else if (is(field, Column)) { if (isSingleTable) { chunk.push(sql.identifier(this.casing.getColumnCasing(field))); } else { chunk.push(field); } } else if (is(field, Subquery)) { const entries = Object.entries(field._.selectedFields) as [string, SQL.Aliased | Column | SQL][]; if (entries.length === 1) { const entry = entries[0]![1]; const fieldDecoder = is(entry, SQL) ? entry.decoder : is(entry, Column) ? { mapFromDriverValue: (v: any) => entry.mapFromDriverValue(v) } : entry.sql.decoder; if (fieldDecoder) { field._.sql.decoder = fieldDecoder; } } chunk.push(field); } if (i < columnsLen - 1) { chunk.push(sql`, `); } return chunk; }); return sql.join(chunks); } private buildLimit(limit: number | Placeholder | undefined): SQL | undefined { return typeof limit === 'object' || (typeof limit === 'number' && limit >= 0) ? sql` limit ${limit}` : undefined; } private buildOrderBy(orderBy: (MySqlColumn | SQL | SQL.Aliased)[] | undefined): SQL | undefined { return orderBy && orderBy.length > 0 ? sql` order by ${sql.join(orderBy, sql`, `)}` : undefined; } private buildIndex({ indexes, indexFor, }: { indexes: string[] | undefined; indexFor: 'USE' | 'FORCE' | 'IGNORE'; }): SQL | undefined { return indexes && indexes.length > 0 ? sql` ${sql.raw(indexFor)} INDEX (${sql.raw(indexes.join(`, `))})` : undefined; } buildSelectQuery( { withList, fields, fieldsFlat, where, having, table, joins, orderBy, groupBy, limit, offset, lockingClause, distinct, setOperators, useIndex, forceIndex, ignoreIndex, }: MySqlSelectConfig, ): SQL { const fieldsList = fieldsFlat ?? orderSelectedFields(fields); for (const f of fieldsList) { if ( is(f.field, Column) && getTableName(f.field.table) !== (is(table, Subquery) ? table._.alias : is(table, MySqlViewBase) ? table[ViewBaseConfig].name : is(table, SQL) ? undefined : getTableName(table)) && !((table) => joins?.some(({ alias }) => alias === (table[Table.Symbol.IsAlias] ? getTableName(table) : table[Table.Symbol.BaseName]) ))(f.field.table) ) { const tableName = getTableName(f.field.table); throw new Error( `Your "${ f.path.join('->') }" field references a column "${tableName}"."${f.field.name}", but the table "${tableName}" is not part of the query! Did you forget to join it?`, ); } } const isSingleTable = !joins || joins.length === 0; const withSql = this.buildWithCTE(withList); const distinctSql = distinct ? sql` distinct` : undefined; const selection = this.buildSelection(fieldsList, { isSingleTable }); const tableSql = (() => { if (is(table, Table) && table[Table.Symbol.IsAlias]) { return sql`${sql`${sql.identifier(table[Table.Symbol.Schema] ?? '')}.`.if(table[Table.Symbol.Schema])}${ sql.identifier(table[Table.Symbol.OriginalName]) } ${sql.identifier(table[Table.Symbol.Name])}`; } return table; })(); const joinsArray: SQL[] = []; if (joins) { for (const [index, joinMeta] of joins.entries()) { if (index === 0) { joinsArray.push(sql` `); } const table = joinMeta.table; const lateralSql = joinMeta.lateral ? sql` lateral` : undefined; const onSql = joinMeta.on ? sql` on ${joinMeta.on}` : undefined; if (is(table, MySqlTable)) { const tableName = table[MySqlTable.Symbol.Name]; const tableSchema = table[MySqlTable.Symbol.Schema]; const origTableName = table[MySqlTable.Symbol.OriginalName]; const alias = tableName === origTableName ? undefined : joinMeta.alias; const useIndexSql = this.buildIndex({ indexes: joinMeta.useIndex, indexFor: 'USE' }); const forceIndexSql = this.buildIndex({ indexes: joinMeta.forceIndex, indexFor: 'FORCE' }); const ignoreIndexSql = this.buildIndex({ indexes: joinMeta.ignoreIndex, indexFor: 'IGNORE' }); joinsArray.push( sql`${sql.raw(joinMeta.joinType)} join${lateralSql} ${ tableSchema ? sql`${sql.identifier(tableSchema)}.` : undefined }${sql.identifier(origTableName)}${useIndexSql}${forceIndexSql}${ignoreIndexSql}${ alias && sql` ${sql.identifier(alias)}` }${onSql}`, ); } else if (is(table, View)) { const viewName = table[ViewBaseConfig].name; const viewSchema = table[ViewBaseConfig].schema; const origViewName = table[ViewBaseConfig].originalName; const alias = viewName === origViewName ? undefined : joinMeta.alias; joinsArray.push( sql`${sql.raw(joinMeta.joinType)} join${lateralSql} ${ viewSchema ? sql`${sql.identifier(viewSchema)}.` : undefined }${sql.identifier(origViewName)}${alias && sql` ${sql.identifier(alias)}`}${onSql}`, ); } else { joinsArray.push( sql`${sql.raw(joinMeta.joinType)} join${lateralSql} ${table}${onSql}`, ); } if (index < joins.length - 1) { joinsArray.push(sql` `); } } } const joinsSql = sql.join(joinsArray); const whereSql = where ? sql` where ${where}` : undefined; const havingSql = having ? sql` having ${having}` : undefined; const orderBySql = this.buildOrderBy(orderBy); const groupBySql = groupBy && groupBy.length > 0 ? sql` group by ${sql.join(groupBy, sql`, `)}` : undefined; const limitSql = this.buildLimit(limit); const offsetSql = offset ? sql` offset ${offset}` : undefined; const useIndexSql = this.buildIndex({ indexes: useIndex, indexFor: 'USE' }); const forceIndexSql = this.buildIndex({ indexes: forceIndex, indexFor: 'FORCE' }); const ignoreIndexSql = this.buildIndex({ indexes: ignoreIndex, indexFor: 'IGNORE' }); let lockingClausesSql; if (lockingClause) { const { config, strength } = lockingClause; lockingClausesSql = sql` for ${sql.raw(strength)}`; if (config.noWait) { lockingClausesSql.append(sql` nowait`); } else if (config.skipLocked) { lockingClausesSql.append(sql` skip locked`); } } const finalQuery = sql`${withSql}select${distinctSql} ${selection} from ${tableSql}${useIndexSql}${forceIndexSql}${ignoreIndexSql}${joinsSql}${whereSql}${groupBySql}${havingSql}${orderBySql}${limitSql}${offsetSql}${lockingClausesSql}`; if (setOperators.length > 0) { return this.buildSetOperations(finalQuery, setOperators); } return finalQuery; } buildSetOperations(leftSelect: SQL, setOperators: MySqlSelectConfig['setOperators']): SQL { const [setOperator, ...rest] = setOperators; if (!setOperator) { throw new Error('Cannot pass undefined values to any set operator'); } if (rest.length === 0) { return this.buildSetOperationQuery({ leftSelect, setOperator }); } // Some recursive magic here return this.buildSetOperations( this.buildSetOperationQuery({ leftSelect, setOperator }), rest, ); } buildSetOperationQuery({ leftSelect, setOperator: { type, isAll, rightSelect, limit, orderBy, offset }, }: { leftSelect: SQL; setOperator: MySqlSelectConfig['setOperators'][number] }): SQL { const leftChunk = sql`(${leftSelect.getSQL()}) `; const rightChunk = sql`(${rightSelect.getSQL()})`; let orderBySql; if (orderBy && orderBy.length > 0) { const orderByValues: (SQL | Name)[] = []; // The next bit is necessary because the sql operator replaces ${table.column} with `table`.`column` // which is invalid MySql syntax, Table from one of the SELECTs cannot be used in global ORDER clause for (const orderByUnit of orderBy) { if (is(orderByUnit, MySqlColumn)) { orderByValues.push(sql.identifier(this.casing.getColumnCasing(orderByUnit))); } else if (is(orderByUnit, SQL)) { for (let i = 0; i < orderByUnit.queryChunks.length; i++) { const chunk = orderByUnit.queryChunks[i]; if (is(chunk, MySqlColumn)) { orderByUnit.queryChunks[i] = sql.identifier(this.casing.getColumnCasing(chunk)); } } orderByValues.push(sql`${orderByUnit}`); } else { orderByValues.push(sql`${orderByUnit}`); } } orderBySql = sql` order by ${sql.join(orderByValues, sql`, `)} `; } const limitSql = typeof limit === 'object' || (typeof limit === 'number' && limit >= 0) ? sql` limit ${limit}` : undefined; const operatorChunk = sql.raw(`${type} ${isAll ? 'all ' : ''}`); const offsetSql = offset ? sql` offset ${offset}` : undefined; return sql`${leftChunk}${operatorChunk}${rightChunk}${orderBySql}${limitSql}${offsetSql}`; } buildInsertQuery( { table, values: valuesOrSelect, ignore, onConflict, select }: MySqlInsertConfig, ): { sql: SQL; generatedIds: Record[] } { // const isSingleValue = values.length === 1; const valuesSqlList: ((SQLChunk | SQL)[] | SQL)[] = []; const columns: Record = table[Table.Symbol.Columns]; const colEntries: [string, MySqlColumn][] = Object.entries(columns).filter(([_, col]) => !col.shouldDisableInsert() ); const insertOrder = colEntries.map(([, column]) => sql.identifier(this.casing.getColumnCasing(column))); const generatedIdsResponse: Record[] = []; if (select) { const select = valuesOrSelect as AnyMySqlSelectQueryBuilder | SQL; if (is(select, SQL)) { valuesSqlList.push(select); } else { valuesSqlList.push(select.getSQL()); } } else { const values = valuesOrSelect as Record[]; valuesSqlList.push(sql.raw('values ')); for (const [valueIndex, value] of values.entries()) { const generatedIds: Record = {}; const valueList: (SQLChunk | SQL)[] = []; for (const [fieldName, col] of colEntries) { const colValue = value[fieldName]; if (colValue === undefined || (is(colValue, Param) && colValue.value === undefined)) { // eslint-disable-next-line unicorn/no-negated-condition if (col.defaultFn !== undefined) { const defaultFnResult = col.defaultFn(); generatedIds[fieldName] = defaultFnResult; const defaultValue = is(defaultFnResult, SQL) ? defaultFnResult : sql.param(defaultFnResult, col); valueList.push(defaultValue); // eslint-disable-next-line unicorn/no-negated-condition } else if (!col.default && col.onUpdateFn !== undefined) { const onUpdateFnResult = col.onUpdateFn(); const newValue = is(onUpdateFnResult, SQL) ? onUpdateFnResult : sql.param(onUpdateFnResult, col); valueList.push(newValue); } else { valueList.push(sql`default`); } } else { if (col.defaultFn && is(colValue, Param)) { generatedIds[fieldName] = colValue.value; } valueList.push(colValue); } } generatedIdsResponse.push(generatedIds); valuesSqlList.push(valueList); if (valueIndex < values.length - 1) { valuesSqlList.push(sql`, `); } } } const valuesSql = sql.join(valuesSqlList); const ignoreSql = ignore ? sql` ignore` : undefined; const onConflictSql = onConflict ? sql` on duplicate key ${onConflict}` : undefined; return { sql: sql`insert${ignoreSql} into ${table} ${insertOrder} ${valuesSql}${onConflictSql}`, generatedIds: generatedIdsResponse, }; } sqlToQuery(sql: SQL, invokeSource?: 'indexes' | undefined): QueryWithTypings { return sql.toQuery({ casing: this.casing, escapeName: this.escapeName, escapeParam: this.escapeParam, escapeString: this.escapeString, invokeSource, }); } buildRelationalQuery({ fullSchema, schema, tableNamesMap, table, tableConfig, queryConfig: config, tableAlias, nestedQueryRelation, joinOn, }: { fullSchema: Record; schema: TablesRelationalConfig; tableNamesMap: Record; table: MySqlTable; tableConfig: TableRelationalConfig; queryConfig: true | DBQueryConfig<'many', true>; tableAlias: string; nestedQueryRelation?: Relation; joinOn?: SQL; }): BuildRelationalQueryResult { let selection: BuildRelationalQueryResult['selection'] = []; let limit, offset, orderBy: MySqlSelectConfig['orderBy'], where; const joins: MySqlSelectJoinConfig[] = []; if (config === true) { const selectionEntries = Object.entries(tableConfig.columns); selection = selectionEntries.map(( [key, value], ) => ({ dbKey: value.name, tsKey: key, field: aliasedTableColumn(value as MySqlColumn, tableAlias), relationTableTsKey: undefined, isJson: false, selection: [], })); } else { const aliasedColumns = Object.fromEntries( Object.entries(tableConfig.columns).map(([key, value]) => [key, aliasedTableColumn(value, tableAlias)]), ); if (config.where) { const whereSql = typeof config.where === 'function' ? config.where(aliasedColumns, getOperators()) : config.where; where = whereSql && mapColumnsInSQLToAlias(whereSql, tableAlias); } const fieldsSelection: { tsKey: string; value: MySqlColumn | SQL.Aliased }[] = []; let selectedColumns: string[] = []; // Figure out which columns to select if (config.columns) { let isIncludeMode = false; for (const [field, value] of Object.entries(config.columns)) { if (value === undefined) { continue; } if (field in tableConfig.columns) { if (!isIncludeMode && value === true) { isIncludeMode = true; } selectedColumns.push(field); } } if (selectedColumns.length > 0) { selectedColumns = isIncludeMode ? selectedColumns.filter((c) => config.columns?.[c] === true) : Object.keys(tableConfig.columns).filter((key) => !selectedColumns.includes(key)); } } else { // Select all columns if selection is not specified selectedColumns = Object.keys(tableConfig.columns); } for (const field of selectedColumns) { const column = tableConfig.columns[field]! as MySqlColumn; fieldsSelection.push({ tsKey: field, value: column }); } let selectedRelations: { tsKey: string; queryConfig: true | DBQueryConfig<'many', false>; relation: Relation; }[] = []; // Figure out which relations to select if (config.with) { selectedRelations = Object.entries(config.with) .filter((entry): entry is [typeof entry[0], NonNullable] => !!entry[1]) .map(([tsKey, queryConfig]) => ({ tsKey, queryConfig, relation: tableConfig.relations[tsKey]! })); } let extras; // Figure out which extras to select if (config.extras) { extras = typeof config.extras === 'function' ? config.extras(aliasedColumns, { sql }) : config.extras; for (const [tsKey, value] of Object.entries(extras)) { fieldsSelection.push({ tsKey, value: mapColumnsInAliasedSQLToAlias(value, tableAlias), }); } } // Transform `fieldsSelection` into `selection` // `fieldsSelection` shouldn't be used after this point for (const { tsKey, value } of fieldsSelection) { selection.push({ dbKey: is(value, SQL.Aliased) ? value.fieldAlias : tableConfig.columns[tsKey]!.name, tsKey, field: is(value, Column) ? aliasedTableColumn(value, tableAlias) : value, relationTableTsKey: undefined, isJson: false, selection: [], }); } let orderByOrig = typeof config.orderBy === 'function' ? config.orderBy(aliasedColumns, getOrderByOperators()) : config.orderBy ?? []; if (!Array.isArray(orderByOrig)) { orderByOrig = [orderByOrig]; } orderBy = orderByOrig.map((orderByValue) => { if (is(orderByValue, Column)) { return aliasedTableColumn(orderByValue, tableAlias) as MySqlColumn; } return mapColumnsInSQLToAlias(orderByValue, tableAlias); }); limit = config.limit; offset = config.offset; // Process all relations for ( const { tsKey: selectedRelationTsKey, queryConfig: selectedRelationConfigValue, relation, } of selectedRelations ) { const normalizedRelation = normalizeRelation(schema, tableNamesMap, relation); const relationTableName = getTableUniqueName(relation.referencedTable); const relationTableTsName = tableNamesMap[relationTableName]!; const relationTableAlias = `${tableAlias}_${selectedRelationTsKey}`; const joinOn = and( ...normalizedRelation.fields.map((field, i) => eq( aliasedTableColumn(normalizedRelation.references[i]!, relationTableAlias), aliasedTableColumn(field, tableAlias), ) ), ); const builtRelation = this.buildRelationalQuery({ fullSchema, schema, tableNamesMap, table: fullSchema[relationTableTsName] as MySqlTable, tableConfig: schema[relationTableTsName]!, queryConfig: is(relation, One) ? (selectedRelationConfigValue === true ? { limit: 1 } : { ...selectedRelationConfigValue, limit: 1 }) : selectedRelationConfigValue, tableAlias: relationTableAlias, joinOn, nestedQueryRelation: relation, }); const field = sql`${sql.identifier(relationTableAlias)}.${sql.identifier('data')}`.as(selectedRelationTsKey); joins.push({ on: sql`true`, table: new Subquery(builtRelation.sql as SQL, {}, relationTableAlias), alias: relationTableAlias, joinType: 'left', lateral: true, }); selection.push({ dbKey: selectedRelationTsKey, tsKey: selectedRelationTsKey, field, relationTableTsKey: relationTableTsName, isJson: true, selection: builtRelation.selection, }); } } if (selection.length === 0) { throw new DrizzleError({ message: `No fields selected for table "${tableConfig.tsName}" ("${tableAlias}")` }); } let result; where = and(joinOn, where); if (nestedQueryRelation) { let field = sql`json_array(${ sql.join( selection.map(({ field, tsKey, isJson }) => isJson ? sql`${sql.identifier(`${tableAlias}_${tsKey}`)}.${sql.identifier('data')}` : is(field, SQL.Aliased) ? field.sql : field ), sql`, `, ) })`; if (is(nestedQueryRelation, Many)) { field = sql`coalesce(json_arrayagg(${field}), json_array())`; } const nestedSelection = [{ dbKey: 'data', tsKey: 'data', field: field.as('data'), isJson: true, relationTableTsKey: tableConfig.tsName, selection, }]; const needsSubquery = limit !== undefined || offset !== undefined || (orderBy?.length ?? 0) > 0; if (needsSubquery) { result = this.buildSelectQuery({ table: aliasedTable(table, tableAlias), fields: {}, fieldsFlat: [ { path: [], field: sql.raw('*'), }, ...(((orderBy?.length ?? 0) > 0) ? [{ path: [], field: sql`row_number() over (order by ${sql.join(orderBy!, sql`, `)})`, }] : []), ], where, limit, offset, setOperators: [], }); where = undefined; limit = undefined; offset = undefined; orderBy = undefined; } else { result = aliasedTable(table, tableAlias); } result = this.buildSelectQuery({ table: is(result, MySqlTable) ? result : new Subquery(result, {}, tableAlias), fields: {}, fieldsFlat: nestedSelection.map(({ field }) => ({ path: [], field: is(field, Column) ? aliasedTableColumn(field, tableAlias) : field, })), joins, where, limit, offset, orderBy, setOperators: [], }); } else { result = this.buildSelectQuery({ table: aliasedTable(table, tableAlias), fields: {}, fieldsFlat: selection.map(({ field }) => ({ path: [], field: is(field, Column) ? aliasedTableColumn(field, tableAlias) : field, })), joins, where, limit, offset, orderBy, setOperators: [], }); } return { tableTsKey: tableConfig.tsName, sql: result, selection, }; } buildRelationalQueryWithoutLateralSubqueries({ fullSchema, schema, tableNamesMap, table, tableConfig, queryConfig: config, tableAlias, nestedQueryRelation, joinOn, }: { fullSchema: Record; schema: TablesRelationalConfig; tableNamesMap: Record; table: MySqlTable; tableConfig: TableRelationalConfig; queryConfig: true | DBQueryConfig<'many', true>; tableAlias: string; nestedQueryRelation?: Relation; joinOn?: SQL; }): BuildRelationalQueryResult { let selection: BuildRelationalQueryResult['selection'] = []; let limit, offset, orderBy: MySqlSelectConfig['orderBy'] = [], where; if (config === true) { const selectionEntries = Object.entries(tableConfig.columns); selection = selectionEntries.map(( [key, value], ) => ({ dbKey: value.name, tsKey: key, field: aliasedTableColumn(value as MySqlColumn, tableAlias), relationTableTsKey: undefined, isJson: false, selection: [], })); } else { const aliasedColumns = Object.fromEntries( Object.entries(tableConfig.columns).map(([key, value]) => [key, aliasedTableColumn(value, tableAlias)]), ); if (config.where) { const whereSql = typeof config.where === 'function' ? config.where(aliasedColumns, getOperators()) : config.where; where = whereSql && mapColumnsInSQLToAlias(whereSql, tableAlias); } const fieldsSelection: { tsKey: string; value: MySqlColumn | SQL.Aliased }[] = []; let selectedColumns: string[] = []; // Figure out which columns to select if (config.columns) { let isIncludeMode = false; for (const [field, value] of Object.entries(config.columns)) { if (value === undefined) { continue; } if (field in tableConfig.columns) { if (!isIncludeMode && value === true) { isIncludeMode = true; } selectedColumns.push(field); } } if (selectedColumns.length > 0) { selectedColumns = isIncludeMode ? selectedColumns.filter((c) => config.columns?.[c] === true) : Object.keys(tableConfig.columns).filter((key) => !selectedColumns.includes(key)); } } else { // Select all columns if selection is not specified selectedColumns = Object.keys(tableConfig.columns); } for (const field of selectedColumns) { const column = tableConfig.columns[field]! as MySqlColumn; fieldsSelection.push({ tsKey: field, value: column }); } let selectedRelations: { tsKey: string; queryConfig: true | DBQueryConfig<'many', false>; relation: Relation; }[] = []; // Figure out which relations to select if (config.with) { selectedRelations = Object.entries(config.with) .filter((entry): entry is [typeof entry[0], NonNullable] => !!entry[1]) .map(([tsKey, queryConfig]) => ({ tsKey, queryConfig, relation: tableConfig.relations[tsKey]! })); } let extras; // Figure out which extras to select if (config.extras) { extras = typeof config.extras === 'function' ? config.extras(aliasedColumns, { sql }) : config.extras; for (const [tsKey, value] of Object.entries(extras)) { fieldsSelection.push({ tsKey, value: mapColumnsInAliasedSQLToAlias(value, tableAlias), }); } } // Transform `fieldsSelection` into `selection` // `fieldsSelection` shouldn't be used after this point for (const { tsKey, value } of fieldsSelection) { selection.push({ dbKey: is(value, SQL.Aliased) ? value.fieldAlias : tableConfig.columns[tsKey]!.name, tsKey, field: is(value, Column) ? aliasedTableColumn(value, tableAlias) : value, relationTableTsKey: undefined, isJson: false, selection: [], }); } let orderByOrig = typeof config.orderBy === 'function' ? config.orderBy(aliasedColumns, getOrderByOperators()) : config.orderBy ?? []; if (!Array.isArray(orderByOrig)) { orderByOrig = [orderByOrig]; } orderBy = orderByOrig.map((orderByValue) => { if (is(orderByValue, Column)) { return aliasedTableColumn(orderByValue, tableAlias) as MySqlColumn; } return mapColumnsInSQLToAlias(orderByValue, tableAlias); }); limit = config.limit; offset = config.offset; // Process all relations for ( const { tsKey: selectedRelationTsKey, queryConfig: selectedRelationConfigValue, relation, } of selectedRelations ) { const normalizedRelation = normalizeRelation(schema, tableNamesMap, relation); const relationTableName = getTableUniqueName(relation.referencedTable); const relationTableTsName = tableNamesMap[relationTableName]!; const relationTableAlias = `${tableAlias}_${selectedRelationTsKey}`; const joinOn = and( ...normalizedRelation.fields.map((field, i) => eq( aliasedTableColumn(normalizedRelation.references[i]!, relationTableAlias), aliasedTableColumn(field, tableAlias), ) ), ); const builtRelation = this.buildRelationalQueryWithoutLateralSubqueries({ fullSchema, schema, tableNamesMap, table: fullSchema[relationTableTsName] as MySqlTable, tableConfig: schema[relationTableTsName]!, queryConfig: is(relation, One) ? (selectedRelationConfigValue === true ? { limit: 1 } : { ...selectedRelationConfigValue, limit: 1 }) : selectedRelationConfigValue, tableAlias: relationTableAlias, joinOn, nestedQueryRelation: relation, }); let fieldSql = sql`(${builtRelation.sql})`; if (is(relation, Many)) { fieldSql = sql`coalesce(${fieldSql}, json_array())`; } const field = fieldSql.as(selectedRelationTsKey); selection.push({ dbKey: selectedRelationTsKey, tsKey: selectedRelationTsKey, field, relationTableTsKey: relationTableTsName, isJson: true, selection: builtRelation.selection, }); } } if (selection.length === 0) { throw new DrizzleError({ message: `No fields selected for table "${tableConfig.tsName}" ("${tableAlias}"). You need to have at least one item in "columns", "with" or "extras". If you need to select all columns, omit the "columns" key or set it to undefined.`, }); } let result; where = and(joinOn, where); if (nestedQueryRelation) { let field = sql`json_array(${ sql.join( selection.map(({ field }) => is(field, MySqlColumn) ? sql.identifier(this.casing.getColumnCasing(field)) : is(field, SQL.Aliased) ? field.sql : field ), sql`, `, ) })`; if (is(nestedQueryRelation, Many)) { field = sql`json_arrayagg(${field})`; } const nestedSelection = [{ dbKey: 'data', tsKey: 'data', field, isJson: true, relationTableTsKey: tableConfig.tsName, selection, }]; const needsSubquery = limit !== undefined || offset !== undefined || orderBy.length > 0; if (needsSubquery) { result = this.buildSelectQuery({ table: aliasedTable(table, tableAlias), fields: {}, fieldsFlat: [ { path: [], field: sql.raw('*'), }, ...(orderBy.length > 0) ? [{ path: [], field: sql`row_number() over (order by ${sql.join(orderBy, sql`, `)})`, }] : [], ], where, limit, offset, setOperators: [], }); where = undefined; limit = undefined; offset = undefined; orderBy = undefined; } else { result = aliasedTable(table, tableAlias); } result = this.buildSelectQuery({ table: is(result, MySqlTable) ? result : new Subquery(result, {}, tableAlias), fields: {}, fieldsFlat: nestedSelection.map(({ field }) => ({ path: [], field: is(field, Column) ? aliasedTableColumn(field, tableAlias) : field, })), where, limit, offset, orderBy, setOperators: [], }); } else { result = this.buildSelectQuery({ table: aliasedTable(table, tableAlias), fields: {}, fieldsFlat: selection.map(({ field }) => ({ path: [], field: is(field, Column) ? aliasedTableColumn(field, tableAlias) : field, })), where, limit, offset, orderBy, setOperators: [], }); } return { tableTsKey: tableConfig.tsName, sql: result, selection, }; } } ================================================ FILE: drizzle-orm/src/mysql-core/expressions.ts ================================================ import { bindIfParam } from '~/sql/expressions/index.ts'; import type { Placeholder, SQL, SQLChunk, SQLWrapper } from '~/sql/sql.ts'; import { sql } from '~/sql/sql.ts'; import type { MySqlColumn } from './columns/index.ts'; export * from '~/sql/expressions/index.ts'; export function concat(column: MySqlColumn | SQL.Aliased, value: string | Placeholder | SQLWrapper): SQL { return sql`${column} || ${bindIfParam(value, column)}`; } export function substring( column: MySqlColumn | SQL.Aliased, { from, for: _for }: { from?: number | Placeholder | SQLWrapper; for?: number | Placeholder | SQLWrapper }, ): SQL { const chunks: SQLChunk[] = [sql`substring(`, column]; if (from !== undefined) { chunks.push(sql` from `, bindIfParam(from, column)); } if (_for !== undefined) { chunks.push(sql` for `, bindIfParam(_for, column)); } chunks.push(sql`)`); return sql.join(chunks); } ================================================ FILE: drizzle-orm/src/mysql-core/foreign-keys.ts ================================================ import { entityKind } from '~/entity.ts'; import { TableName } from '~/table.utils.ts'; import type { AnyMySqlColumn, MySqlColumn } from './columns/index.ts'; import type { MySqlTable } from './table.ts'; export type UpdateDeleteAction = 'cascade' | 'restrict' | 'no action' | 'set null' | 'set default'; export type Reference = () => { readonly name?: string; readonly columns: MySqlColumn[]; readonly foreignTable: MySqlTable; readonly foreignColumns: MySqlColumn[]; }; export class ForeignKeyBuilder { static readonly [entityKind]: string = 'MySqlForeignKeyBuilder'; /** @internal */ reference: Reference; /** @internal */ _onUpdate: UpdateDeleteAction | undefined; /** @internal */ _onDelete: UpdateDeleteAction | undefined; constructor( config: () => { name?: string; columns: MySqlColumn[]; foreignColumns: MySqlColumn[]; }, actions?: { onUpdate?: UpdateDeleteAction; onDelete?: UpdateDeleteAction; } | undefined, ) { this.reference = () => { const { name, columns, foreignColumns } = config(); return { name, columns, foreignTable: foreignColumns[0]!.table as MySqlTable, foreignColumns }; }; if (actions) { this._onUpdate = actions.onUpdate; this._onDelete = actions.onDelete; } } onUpdate(action: UpdateDeleteAction): this { this._onUpdate = action; return this; } onDelete(action: UpdateDeleteAction): this { this._onDelete = action; return this; } /** @internal */ build(table: MySqlTable): ForeignKey { return new ForeignKey(table, this); } } export type AnyForeignKeyBuilder = ForeignKeyBuilder; export class ForeignKey { static readonly [entityKind]: string = 'MySqlForeignKey'; readonly reference: Reference; readonly onUpdate: UpdateDeleteAction | undefined; readonly onDelete: UpdateDeleteAction | undefined; constructor(readonly table: MySqlTable, builder: ForeignKeyBuilder) { this.reference = builder.reference; this.onUpdate = builder._onUpdate; this.onDelete = builder._onDelete; } getName(): string { const { name, columns, foreignColumns } = this.reference(); const columnNames = columns.map((column) => column.name); const foreignColumnNames = foreignColumns.map((column) => column.name); const chunks = [ this.table[TableName], ...columnNames, foreignColumns[0]!.table[TableName], ...foreignColumnNames, ]; return name ?? `${chunks.join('_')}_fk`; } } type ColumnsWithTable< TTableName extends string, TColumns extends MySqlColumn[], > = { [Key in keyof TColumns]: AnyMySqlColumn<{ tableName: TTableName }> }; export type GetColumnsTable = ( TColumns extends MySqlColumn ? TColumns : TColumns extends MySqlColumn[] ? TColumns[number] : never ) extends AnyMySqlColumn<{ tableName: infer TTableName extends string }> ? TTableName : never; export function foreignKey< TTableName extends string, TForeignTableName extends string, TColumns extends [AnyMySqlColumn<{ tableName: TTableName }>, ...AnyMySqlColumn<{ tableName: TTableName }>[]], >( config: { name?: string; columns: TColumns; foreignColumns: ColumnsWithTable; }, ): ForeignKeyBuilder { function mappedConfig() { const { name, columns, foreignColumns } = config; return { name, columns, foreignColumns, }; } return new ForeignKeyBuilder(mappedConfig); } ================================================ FILE: drizzle-orm/src/mysql-core/index.ts ================================================ export * from './alias.ts'; export * from './checks.ts'; export * from './columns/index.ts'; export * from './db.ts'; export * from './dialect.ts'; export * from './foreign-keys.ts'; export * from './indexes.ts'; export * from './primary-keys.ts'; export * from './query-builders/index.ts'; export * from './schema.ts'; export * from './session.ts'; export * from './subquery.ts'; export * from './table.ts'; export * from './unique-constraint.ts'; export * from './utils.ts'; export * from './view-common.ts'; export * from './view.ts'; ================================================ FILE: drizzle-orm/src/mysql-core/indexes.ts ================================================ import { entityKind } from '~/entity.ts'; import type { SQL } from '~/sql/sql.ts'; import type { AnyMySqlColumn, MySqlColumn } from './columns/index.ts'; import type { MySqlTable } from './table.ts'; interface IndexConfig { name: string; columns: IndexColumn[]; /** * If true, the index will be created as `create unique index` instead of `create index`. */ unique?: boolean; /** * If set, the index will be created as `create index ... using { 'btree' | 'hash' }`. */ using?: 'btree' | 'hash'; /** * If set, the index will be created as `create index ... algorithm { 'default' | 'inplace' | 'copy' }`. */ algorithm?: 'default' | 'inplace' | 'copy'; /** * If set, adds locks to the index creation. */ lock?: 'default' | 'none' | 'shared' | 'exclusive'; } export type IndexColumn = MySqlColumn | SQL; export class IndexBuilderOn { static readonly [entityKind]: string = 'MySqlIndexBuilderOn'; constructor(private name: string, private unique: boolean) {} on(...columns: [IndexColumn, ...IndexColumn[]]): IndexBuilder { return new IndexBuilder(this.name, columns, this.unique); } } export interface AnyIndexBuilder { build(table: MySqlTable): Index; } // eslint-disable-next-line @typescript-eslint/no-empty-interface export interface IndexBuilder extends AnyIndexBuilder {} export class IndexBuilder implements AnyIndexBuilder { static readonly [entityKind]: string = 'MySqlIndexBuilder'; /** @internal */ config: IndexConfig; constructor(name: string, columns: IndexColumn[], unique: boolean) { this.config = { name, columns, unique, }; } using(using: IndexConfig['using']): this { this.config.using = using; return this; } algorithm(algorithm: IndexConfig['algorithm']): this { this.config.algorithm = algorithm; return this; } lock(lock: IndexConfig['lock']): this { this.config.lock = lock; return this; } /** @internal */ build(table: MySqlTable): Index { return new Index(this.config, table); } } export class Index { static readonly [entityKind]: string = 'MySqlIndex'; readonly config: IndexConfig & { table: MySqlTable }; constructor(config: IndexConfig, table: MySqlTable) { this.config = { ...config, table }; } } export type GetColumnsTableName = TColumns extends AnyMySqlColumn<{ tableName: infer TTableName extends string }> | AnyMySqlColumn< { tableName: infer TTableName extends string } >[] ? TTableName : never; export function index(name: string): IndexBuilderOn { return new IndexBuilderOn(name, false); } export function uniqueIndex(name: string): IndexBuilderOn { return new IndexBuilderOn(name, true); } ================================================ FILE: drizzle-orm/src/mysql-core/primary-keys.ts ================================================ import { entityKind } from '~/entity.ts'; import type { AnyMySqlColumn, MySqlColumn } from './columns/index.ts'; import { MySqlTable } from './table.ts'; export function primaryKey< TTableName extends string, TColumn extends AnyMySqlColumn<{ tableName: TTableName }>, TColumns extends AnyMySqlColumn<{ tableName: TTableName }>[], >(config: { name?: string; columns: [TColumn, ...TColumns] }): PrimaryKeyBuilder; /** * @deprecated: Please use primaryKey({ columns: [] }) instead of this function * @param columns */ export function primaryKey< TTableName extends string, TColumns extends AnyMySqlColumn<{ tableName: TTableName }>[], >(...columns: TColumns): PrimaryKeyBuilder; export function primaryKey(...config: any) { if (config[0].columns) { return new PrimaryKeyBuilder(config[0].columns, config[0].name); } return new PrimaryKeyBuilder(config); } export class PrimaryKeyBuilder { static readonly [entityKind]: string = 'MySqlPrimaryKeyBuilder'; /** @internal */ columns: MySqlColumn[]; /** @internal */ name?: string; constructor( columns: MySqlColumn[], name?: string, ) { this.columns = columns; this.name = name; } /** @internal */ build(table: MySqlTable): PrimaryKey { return new PrimaryKey(table, this.columns, this.name); } } export class PrimaryKey { static readonly [entityKind]: string = 'MySqlPrimaryKey'; readonly columns: MySqlColumn[]; readonly name?: string; constructor(readonly table: MySqlTable, columns: MySqlColumn[], name?: string) { this.columns = columns; this.name = name; } getName(): string { return this.name ?? `${this.table[MySqlTable.Symbol.Name]}_${this.columns.map((column) => column.name).join('_')}_pk`; } } ================================================ FILE: drizzle-orm/src/mysql-core/query-builders/count.ts ================================================ import { entityKind } from '~/entity.ts'; import { SQL, sql, type SQLWrapper } from '~/sql/sql.ts'; import type { MySqlSession } from '../session.ts'; import type { MySqlTable } from '../table.ts'; import type { MySqlViewBase } from '../view-base.ts'; export class MySqlCountBuilder< TSession extends MySqlSession, > extends SQL implements Promise, SQLWrapper { private sql: SQL; static override readonly [entityKind] = 'MySqlCountBuilder'; [Symbol.toStringTag] = 'MySqlCountBuilder'; private session: TSession; private static buildEmbeddedCount( source: MySqlTable | MySqlViewBase | SQL | SQLWrapper, filters?: SQL, ): SQL { return sql`(select count(*) from ${source}${sql.raw(' where ').if(filters)}${filters})`; } private static buildCount( source: MySqlTable | MySqlViewBase | SQL | SQLWrapper, filters?: SQL, ): SQL { return sql`select count(*) as count from ${source}${sql.raw(' where ').if(filters)}${filters}`; } constructor( readonly params: { source: MySqlTable | MySqlViewBase | SQL | SQLWrapper; filters?: SQL; session: TSession; }, ) { super(MySqlCountBuilder.buildEmbeddedCount(params.source, params.filters).queryChunks); this.mapWith(Number); this.session = params.session; this.sql = MySqlCountBuilder.buildCount( params.source, params.filters, ); } then( onfulfilled?: ((value: number) => TResult1 | PromiseLike) | null | undefined, onrejected?: ((reason: any) => TResult2 | PromiseLike) | null | undefined, ): Promise { return Promise.resolve(this.session.count(this.sql)) .then( onfulfilled, onrejected, ); } catch( onRejected?: ((reason: any) => never | PromiseLike) | null | undefined, ): Promise { return this.then(undefined, onRejected); } finally(onFinally?: (() => void) | null | undefined): Promise { return this.then( (value) => { onFinally?.(); return value; }, (reason) => { onFinally?.(); throw reason; }, ); } } ================================================ FILE: drizzle-orm/src/mysql-core/query-builders/delete.ts ================================================ import { entityKind } from '~/entity.ts'; import type { MySqlDialect } from '~/mysql-core/dialect.ts'; import type { AnyMySqlQueryResultHKT, MySqlPreparedQueryConfig, MySqlQueryResultHKT, MySqlQueryResultKind, MySqlSession, PreparedQueryHKTBase, PreparedQueryKind, } from '~/mysql-core/session.ts'; import type { MySqlTable } from '~/mysql-core/table.ts'; import { QueryPromise } from '~/query-promise.ts'; import { SelectionProxyHandler } from '~/selection-proxy.ts'; import type { Placeholder, Query, SQL, SQLWrapper } from '~/sql/sql.ts'; import type { Subquery } from '~/subquery.ts'; import { Table } from '~/table.ts'; import type { ValueOrArray } from '~/utils.ts'; import type { MySqlColumn } from '../columns/common.ts'; import { extractUsedTable } from '../utils.ts'; import type { SelectedFieldsOrdered } from './select.types.ts'; export type MySqlDeleteWithout< T extends AnyMySqlDeleteBase, TDynamic extends boolean, K extends keyof T & string, > = TDynamic extends true ? T : Omit< MySqlDeleteBase< T['_']['table'], T['_']['queryResult'], T['_']['preparedQueryHKT'], TDynamic, T['_']['excludedMethods'] | K >, T['_']['excludedMethods'] | K >; export type MySqlDelete< TTable extends MySqlTable = MySqlTable, TQueryResult extends MySqlQueryResultHKT = AnyMySqlQueryResultHKT, TPreparedQueryHKT extends PreparedQueryHKTBase = PreparedQueryHKTBase, > = MySqlDeleteBase; export interface MySqlDeleteConfig { where?: SQL | undefined; limit?: number | Placeholder; orderBy?: (MySqlColumn | SQL | SQL.Aliased)[]; table: MySqlTable; returning?: SelectedFieldsOrdered; withList?: Subquery[]; } export type MySqlDeletePrepare = PreparedQueryKind< T['_']['preparedQueryHKT'], MySqlPreparedQueryConfig & { execute: MySqlQueryResultKind; iterator: never; }, true >; type MySqlDeleteDynamic = MySqlDelete< T['_']['table'], T['_']['queryResult'], T['_']['preparedQueryHKT'] >; type AnyMySqlDeleteBase = MySqlDeleteBase; export interface MySqlDeleteBase< TTable extends MySqlTable, TQueryResult extends MySqlQueryResultHKT, TPreparedQueryHKT extends PreparedQueryHKTBase, TDynamic extends boolean = false, TExcludedMethods extends string = never, > extends QueryPromise> { readonly _: { readonly table: TTable; readonly queryResult: TQueryResult; readonly preparedQueryHKT: TPreparedQueryHKT; readonly dynamic: TDynamic; readonly excludedMethods: TExcludedMethods; }; } export class MySqlDeleteBase< TTable extends MySqlTable, TQueryResult extends MySqlQueryResultHKT, // eslint-disable-next-line @typescript-eslint/no-unused-vars TPreparedQueryHKT extends PreparedQueryHKTBase, TDynamic extends boolean = false, // eslint-disable-next-line @typescript-eslint/no-unused-vars TExcludedMethods extends string = never, > extends QueryPromise> implements SQLWrapper { static override readonly [entityKind]: string = 'MySqlDelete'; private config: MySqlDeleteConfig; constructor( private table: TTable, private session: MySqlSession, private dialect: MySqlDialect, withList?: Subquery[], ) { super(); this.config = { table, withList }; } /** * Adds a `where` clause to the query. * * Calling this method will delete only those rows that fulfill a specified condition. * * See docs: {@link https://orm.drizzle.team/docs/delete} * * @param where the `where` clause. * * @example * You can use conditional operators and `sql function` to filter the rows to be deleted. * * ```ts * // Delete all cars with green color * db.delete(cars).where(eq(cars.color, 'green')); * // or * db.delete(cars).where(sql`${cars.color} = 'green'`) * ``` * * You can logically combine conditional operators with `and()` and `or()` operators: * * ```ts * // Delete all BMW cars with a green color * db.delete(cars).where(and(eq(cars.color, 'green'), eq(cars.brand, 'BMW'))); * * // Delete all cars with the green or blue color * db.delete(cars).where(or(eq(cars.color, 'green'), eq(cars.color, 'blue'))); * ``` */ where(where: SQL | undefined): MySqlDeleteWithout { this.config.where = where; return this as any; } orderBy( builder: (deleteTable: TTable) => ValueOrArray, ): MySqlDeleteWithout; orderBy(...columns: (MySqlColumn | SQL | SQL.Aliased)[]): MySqlDeleteWithout; orderBy( ...columns: | [(deleteTable: TTable) => ValueOrArray] | (MySqlColumn | SQL | SQL.Aliased)[] ): MySqlDeleteWithout { if (typeof columns[0] === 'function') { const orderBy = columns[0]( new Proxy( this.config.table[Table.Symbol.Columns], new SelectionProxyHandler({ sqlAliasedBehavior: 'alias', sqlBehavior: 'sql' }), ) as any, ); const orderByArray = Array.isArray(orderBy) ? orderBy : [orderBy]; this.config.orderBy = orderByArray; } else { const orderByArray = columns as (MySqlColumn | SQL | SQL.Aliased)[]; this.config.orderBy = orderByArray; } return this as any; } limit(limit: number | Placeholder): MySqlDeleteWithout { this.config.limit = limit; return this as any; } /** @internal */ getSQL(): SQL { return this.dialect.buildDeleteQuery(this.config); } toSQL(): Query { const { typings: _typings, ...rest } = this.dialect.sqlToQuery(this.getSQL()); return rest; } prepare(): MySqlDeletePrepare { return this.session.prepareQuery( this.dialect.sqlToQuery(this.getSQL()), this.config.returning, undefined, undefined, undefined, { type: 'delete', tables: extractUsedTable(this.config.table), }, ) as MySqlDeletePrepare; } override execute: ReturnType['execute'] = (placeholderValues) => { return this.prepare().execute(placeholderValues); }; private createIterator = (): ReturnType['iterator'] => { const self = this; return async function*(placeholderValues) { yield* self.prepare().iterator(placeholderValues); }; }; iterator = this.createIterator(); $dynamic(): MySqlDeleteDynamic { return this as any; } } ================================================ FILE: drizzle-orm/src/mysql-core/query-builders/index.ts ================================================ export * from './delete.ts'; export * from './insert.ts'; export * from './query-builder.ts'; export * from './select.ts'; export * from './select.types.ts'; export * from './update.ts'; ================================================ FILE: drizzle-orm/src/mysql-core/query-builders/insert.ts ================================================ import type { WithCacheConfig } from '~/cache/core/types.ts'; import { entityKind, is } from '~/entity.ts'; import type { MySqlDialect } from '~/mysql-core/dialect.ts'; import type { AnyMySqlQueryResultHKT, MySqlPreparedQueryConfig, MySqlQueryResultHKT, MySqlQueryResultKind, MySqlSession, PreparedQueryHKTBase, PreparedQueryKind, } from '~/mysql-core/session.ts'; import type { MySqlTable } from '~/mysql-core/table.ts'; import type { TypedQueryBuilder } from '~/query-builders/query-builder.ts'; import { QueryPromise } from '~/query-promise.ts'; import type { RunnableQuery } from '~/runnable-query.ts'; import type { Placeholder, Query, SQLWrapper } from '~/sql/sql.ts'; import { Param, SQL, sql } from '~/sql/sql.ts'; import type { InferModelFromColumns } from '~/table.ts'; import { Columns, Table } from '~/table.ts'; import { haveSameKeys, mapUpdateSet } from '~/utils.ts'; import type { AnyMySqlColumn } from '../columns/common.ts'; import { extractUsedTable } from '../utils.ts'; import { QueryBuilder } from './query-builder.ts'; import type { SelectedFieldsOrdered } from './select.types.ts'; import type { MySqlUpdateSetSource } from './update.ts'; export interface MySqlInsertConfig { table: TTable; values: Record[] | MySqlInsertSelectQueryBuilder | SQL; ignore: boolean; onConflict?: SQL; returning?: SelectedFieldsOrdered; select?: boolean; } export type AnyMySqlInsertConfig = MySqlInsertConfig; export type MySqlInsertValue = & { [Key in keyof TTable['$inferInsert']]: TTable['$inferInsert'][Key] | SQL | Placeholder; } & {}; export type MySqlInsertSelectQueryBuilder = TypedQueryBuilder< { [K in keyof TTable['$inferInsert']]: AnyMySqlColumn | SQL | SQL.Aliased | TTable['$inferInsert'][K] } >; export class MySqlInsertBuilder< TTable extends MySqlTable, TQueryResult extends MySqlQueryResultHKT, TPreparedQueryHKT extends PreparedQueryHKTBase, > { static readonly [entityKind]: string = 'MySqlInsertBuilder'; private shouldIgnore = false; constructor( private table: TTable, private session: MySqlSession, private dialect: MySqlDialect, ) {} ignore(): this { this.shouldIgnore = true; return this; } values(value: MySqlInsertValue): MySqlInsertBase; values(values: MySqlInsertValue[]): MySqlInsertBase; values( values: MySqlInsertValue | MySqlInsertValue[], ): MySqlInsertBase { values = Array.isArray(values) ? values : [values]; if (values.length === 0) { throw new Error('values() must be called with at least one value'); } const mappedValues = values.map((entry) => { const result: Record = {}; const cols = this.table[Table.Symbol.Columns]; for (const colKey of Object.keys(entry)) { const colValue = entry[colKey as keyof typeof entry]; result[colKey] = is(colValue, SQL) ? colValue : new Param(colValue, cols[colKey]); } return result; }); return new MySqlInsertBase(this.table, mappedValues, this.shouldIgnore, this.session, this.dialect); } select( selectQuery: (qb: QueryBuilder) => MySqlInsertSelectQueryBuilder, ): MySqlInsertBase; select(selectQuery: (qb: QueryBuilder) => SQL): MySqlInsertBase; select(selectQuery: SQL): MySqlInsertBase; select(selectQuery: MySqlInsertSelectQueryBuilder): MySqlInsertBase; select( selectQuery: | SQL | MySqlInsertSelectQueryBuilder | ((qb: QueryBuilder) => MySqlInsertSelectQueryBuilder | SQL), ): MySqlInsertBase { const select = typeof selectQuery === 'function' ? selectQuery(new QueryBuilder()) : selectQuery; if ( !is(select, SQL) && !haveSameKeys(this.table[Columns], select._.selectedFields) ) { throw new Error( 'Insert select error: selected fields are not the same or are in a different order compared to the table definition', ); } return new MySqlInsertBase(this.table, select, this.shouldIgnore, this.session, this.dialect, true); } } export type MySqlInsertWithout = TDynamic extends true ? T : Omit< MySqlInsertBase< T['_']['table'], T['_']['queryResult'], T['_']['preparedQueryHKT'], T['_']['returning'], TDynamic, T['_']['excludedMethods'] | '$returning' >, T['_']['excludedMethods'] | K >; export type MySqlInsertDynamic = MySqlInsert< T['_']['table'], T['_']['queryResult'], T['_']['preparedQueryHKT'], T['_']['returning'] >; export type MySqlInsertPrepare< T extends AnyMySqlInsert, TReturning extends Record | undefined = undefined, > = PreparedQueryKind< T['_']['preparedQueryHKT'], MySqlPreparedQueryConfig & { execute: TReturning extends undefined ? MySqlQueryResultKind : TReturning[]; iterator: never; }, true >; export type MySqlInsertOnDuplicateKeyUpdateConfig = { set: MySqlUpdateSetSource; }; export type MySqlInsert< TTable extends MySqlTable = MySqlTable, TQueryResult extends MySqlQueryResultHKT = AnyMySqlQueryResultHKT, TPreparedQueryHKT extends PreparedQueryHKTBase = PreparedQueryHKTBase, TReturning extends Record | undefined = Record | undefined, > = MySqlInsertBase; export type MySqlInsertReturning< T extends AnyMySqlInsert, TDynamic extends boolean, > = MySqlInsertBase< T['_']['table'], T['_']['queryResult'], T['_']['preparedQueryHKT'], InferModelFromColumns>, TDynamic, T['_']['excludedMethods'] | '$returning' >; export type AnyMySqlInsert = MySqlInsertBase; export interface MySqlInsertBase< TTable extends MySqlTable, TQueryResult extends MySqlQueryResultHKT, TPreparedQueryHKT extends PreparedQueryHKTBase, TReturning extends Record | undefined = undefined, TDynamic extends boolean = false, TExcludedMethods extends string = never, > extends QueryPromise : TReturning[]>, RunnableQuery : TReturning[], 'mysql'>, SQLWrapper { readonly _: { readonly dialect: 'mysql'; readonly table: TTable; readonly queryResult: TQueryResult; readonly preparedQueryHKT: TPreparedQueryHKT; readonly dynamic: TDynamic; readonly excludedMethods: TExcludedMethods; readonly returning: TReturning; readonly result: TReturning extends undefined ? MySqlQueryResultKind : TReturning[]; }; } export type PrimaryKeyKeys> = { [K in keyof T]: T[K]['_']['isPrimaryKey'] extends true ? T[K]['_']['isAutoincrement'] extends true ? K : T[K]['_']['hasRuntimeDefault'] extends true ? T[K]['_']['isPrimaryKey'] extends true ? K : never : never : T[K]['_']['hasRuntimeDefault'] extends true ? T[K]['_']['isPrimaryKey'] extends true ? K : never : never; }[keyof T]; export type GetPrimarySerialOrDefaultKeys> = { [K in PrimaryKeyKeys]: T[K]; }; export class MySqlInsertBase< TTable extends MySqlTable, TQueryResult extends MySqlQueryResultHKT, // eslint-disable-next-line @typescript-eslint/no-unused-vars TPreparedQueryHKT extends PreparedQueryHKTBase, // eslint-disable-next-line @typescript-eslint/no-unused-vars TReturning extends Record | undefined = undefined, // eslint-disable-next-line @typescript-eslint/no-unused-vars TDynamic extends boolean = false, // eslint-disable-next-line @typescript-eslint/no-unused-vars TExcludedMethods extends string = never, > extends QueryPromise : TReturning[]> implements RunnableQuery : TReturning[], 'mysql'>, SQLWrapper { static override readonly [entityKind]: string = 'MySqlInsert'; declare protected $table: TTable; private config: MySqlInsertConfig; protected cacheConfig?: WithCacheConfig; constructor( table: TTable, values: MySqlInsertConfig['values'], ignore: boolean, private session: MySqlSession, private dialect: MySqlDialect, select?: boolean, ) { super(); this.config = { table, values: values as any, select, ignore }; } /** * Adds an `on duplicate key update` clause to the query. * * Calling this method will update the row if any unique index conflicts. MySQL will automatically determine the conflict target based on the primary key and unique indexes. * * See docs: {@link https://orm.drizzle.team/docs/insert#on-duplicate-key-update} * * @param config The `set` clause * * @example * ```ts * await db.insert(cars) * .values({ id: 1, brand: 'BMW'}) * .onDuplicateKeyUpdate({ set: { brand: 'Porsche' }}); * ``` * * While MySQL does not directly support doing nothing on conflict, you can perform a no-op by setting any column's value to itself and achieve the same effect: * * ```ts * import { sql } from 'drizzle-orm'; * * await db.insert(cars) * .values({ id: 1, brand: 'BMW' }) * .onDuplicateKeyUpdate({ set: { id: sql`id` } }); * ``` */ onDuplicateKeyUpdate( config: MySqlInsertOnDuplicateKeyUpdateConfig, ): MySqlInsertWithout { const setSql = this.dialect.buildUpdateSet(this.config.table, mapUpdateSet(this.config.table, config.set)); this.config.onConflict = sql`update ${setSql}`; return this as any; } $returningId(): MySqlInsertWithout< MySqlInsertReturning, TDynamic, '$returningId' > { const returning: SelectedFieldsOrdered = []; for (const [key, value] of Object.entries(this.config.table[Table.Symbol.Columns])) { if (value.primary) { returning.push({ field: value, path: [key] }); } } this.config.returning = returning; return this as any; } /** @internal */ getSQL(): SQL { return this.dialect.buildInsertQuery(this.config).sql; } toSQL(): Query { const { typings: _typings, ...rest } = this.dialect.sqlToQuery(this.getSQL()); return rest; } prepare(): MySqlInsertPrepare { const { sql, generatedIds } = this.dialect.buildInsertQuery(this.config); return this.session.prepareQuery( this.dialect.sqlToQuery(sql), undefined, undefined, generatedIds, this.config.returning, { type: 'insert', tables: extractUsedTable(this.config.table), }, this.cacheConfig, ) as MySqlInsertPrepare; } override execute: ReturnType['execute'] = (placeholderValues) => { return this.prepare().execute(placeholderValues); }; private createIterator = (): ReturnType['iterator'] => { const self = this; return async function*(placeholderValues) { yield* self.prepare().iterator(placeholderValues); }; }; iterator = this.createIterator(); $dynamic(): MySqlInsertDynamic { return this as any; } } ================================================ FILE: drizzle-orm/src/mysql-core/query-builders/query-builder.ts ================================================ import { entityKind, is } from '~/entity.ts'; import type { MySqlDialectConfig } from '~/mysql-core/dialect.ts'; import { MySqlDialect } from '~/mysql-core/dialect.ts'; import type { WithBuilder } from '~/mysql-core/subquery.ts'; import type { TypedQueryBuilder } from '~/query-builders/query-builder.ts'; import { SelectionProxyHandler } from '~/selection-proxy.ts'; import type { ColumnsSelection, SQL } from '~/sql/sql.ts'; import { WithSubquery } from '~/subquery.ts'; import { MySqlSelectBuilder } from './select.ts'; import type { SelectedFields } from './select.types.ts'; export class QueryBuilder { static readonly [entityKind]: string = 'MySqlQueryBuilder'; private dialect: MySqlDialect | undefined; private dialectConfig: MySqlDialectConfig | undefined; constructor(dialect?: MySqlDialect | MySqlDialectConfig) { this.dialect = is(dialect, MySqlDialect) ? dialect : undefined; this.dialectConfig = is(dialect, MySqlDialect) ? undefined : dialect; } $with: WithBuilder = (alias: string, selection?: ColumnsSelection) => { const queryBuilder = this; const as = ( qb: | TypedQueryBuilder | SQL | ((qb: QueryBuilder) => TypedQueryBuilder | SQL), ) => { if (typeof qb === 'function') { qb = qb(queryBuilder); } return new Proxy( new WithSubquery( qb.getSQL(), selection ?? ('getSelectedFields' in qb ? qb.getSelectedFields() ?? {} : {}) as SelectedFields, alias, true, ), new SelectionProxyHandler({ alias, sqlAliasedBehavior: 'alias', sqlBehavior: 'error' }), ) as any; }; return { as }; }; with(...queries: WithSubquery[]) { const self = this; function select(): MySqlSelectBuilder; function select( fields: TSelection, ): MySqlSelectBuilder; function select( fields?: TSelection, ): MySqlSelectBuilder { return new MySqlSelectBuilder({ fields: fields ?? undefined, session: undefined, dialect: self.getDialect(), withList: queries, }); } function selectDistinct(): MySqlSelectBuilder; function selectDistinct( fields: TSelection, ): MySqlSelectBuilder; function selectDistinct( fields?: TSelection, ): MySqlSelectBuilder { return new MySqlSelectBuilder({ fields: fields ?? undefined, session: undefined, dialect: self.getDialect(), withList: queries, distinct: true, }); } return { select, selectDistinct }; } select(): MySqlSelectBuilder; select(fields: TSelection): MySqlSelectBuilder; select( fields?: TSelection, ): MySqlSelectBuilder { return new MySqlSelectBuilder({ fields: fields ?? undefined, session: undefined, dialect: this.getDialect() }); } selectDistinct(): MySqlSelectBuilder; selectDistinct( fields: TSelection, ): MySqlSelectBuilder; selectDistinct( fields?: TSelection, ): MySqlSelectBuilder { return new MySqlSelectBuilder({ fields: fields ?? undefined, session: undefined, dialect: this.getDialect(), distinct: true, }); } // Lazy load dialect to avoid circular dependency private getDialect() { if (!this.dialect) { this.dialect = new MySqlDialect(this.dialectConfig); } return this.dialect; } } ================================================ FILE: drizzle-orm/src/mysql-core/query-builders/query.ts ================================================ import { entityKind } from '~/entity.ts'; import { QueryPromise } from '~/query-promise.ts'; import { type BuildQueryResult, type BuildRelationalQueryResult, type DBQueryConfig, mapRelationalRow, type TableRelationalConfig, type TablesRelationalConfig, } from '~/relations.ts'; import type { Query, QueryWithTypings, SQL } from '~/sql/sql.ts'; import type { KnownKeysOnly } from '~/utils.ts'; import type { MySqlDialect } from '../dialect.ts'; import type { Mode, MySqlPreparedQueryConfig, MySqlSession, PreparedQueryHKTBase, PreparedQueryKind, } from '../session.ts'; import type { MySqlTable } from '../table.ts'; export class RelationalQueryBuilder< TPreparedQueryHKT extends PreparedQueryHKTBase, TSchema extends TablesRelationalConfig, TFields extends TableRelationalConfig, > { static readonly [entityKind]: string = 'MySqlRelationalQueryBuilder'; constructor( private fullSchema: Record, private schema: TSchema, private tableNamesMap: Record, private table: MySqlTable, private tableConfig: TableRelationalConfig, private dialect: MySqlDialect, private session: MySqlSession, private mode: Mode, ) {} findMany>( config?: KnownKeysOnly>, ): MySqlRelationalQuery[]> { return new MySqlRelationalQuery( this.fullSchema, this.schema, this.tableNamesMap, this.table, this.tableConfig, this.dialect, this.session, config ? (config as DBQueryConfig<'many', true>) : {}, 'many', this.mode, ); } findFirst, 'limit'>>( config?: KnownKeysOnly, 'limit'>>, ): MySqlRelationalQuery | undefined> { return new MySqlRelationalQuery( this.fullSchema, this.schema, this.tableNamesMap, this.table, this.tableConfig, this.dialect, this.session, config ? { ...(config as DBQueryConfig<'many', true> | undefined), limit: 1 } : { limit: 1 }, 'first', this.mode, ); } } export class MySqlRelationalQuery< TPreparedQueryHKT extends PreparedQueryHKTBase, TResult, > extends QueryPromise { static override readonly [entityKind]: string = 'MySqlRelationalQuery'; declare protected $brand: 'MySqlRelationalQuery'; constructor( private fullSchema: Record, private schema: TablesRelationalConfig, private tableNamesMap: Record, private table: MySqlTable, private tableConfig: TableRelationalConfig, private dialect: MySqlDialect, private session: MySqlSession, private config: DBQueryConfig<'many', true> | true, private queryMode: 'many' | 'first', private mode?: Mode, ) { super(); } prepare() { const { query, builtQuery } = this._toSQL(); return this.session.prepareQuery( builtQuery, undefined, (rawRows) => { const rows = rawRows.map((row) => mapRelationalRow(this.schema, this.tableConfig, row, query.selection)); if (this.queryMode === 'first') { return rows[0] as TResult; } return rows as TResult; }, ) as PreparedQueryKind; } private _getQuery() { const query = this.mode === 'planetscale' ? this.dialect.buildRelationalQueryWithoutLateralSubqueries({ fullSchema: this.fullSchema, schema: this.schema, tableNamesMap: this.tableNamesMap, table: this.table, tableConfig: this.tableConfig, queryConfig: this.config, tableAlias: this.tableConfig.tsName, }) : this.dialect.buildRelationalQuery({ fullSchema: this.fullSchema, schema: this.schema, tableNamesMap: this.tableNamesMap, table: this.table, tableConfig: this.tableConfig, queryConfig: this.config, tableAlias: this.tableConfig.tsName, }); return query; } private _toSQL(): { query: BuildRelationalQueryResult; builtQuery: QueryWithTypings } { const query = this._getQuery(); const builtQuery = this.dialect.sqlToQuery(query.sql as SQL); return { builtQuery, query }; } /** @internal */ getSQL(): SQL { return this._getQuery().sql as SQL; } toSQL(): Query { return this._toSQL().builtQuery; } override execute(): Promise { return this.prepare().execute(); } } ================================================ FILE: drizzle-orm/src/mysql-core/query-builders/select.ts ================================================ import type { CacheConfig, WithCacheConfig } from '~/cache/core/types.ts'; import { entityKind, is } from '~/entity.ts'; import type { MySqlColumn } from '~/mysql-core/columns/index.ts'; import type { MySqlDialect } from '~/mysql-core/dialect.ts'; import type { MySqlPreparedQueryConfig, MySqlSession, PreparedQueryHKTBase } from '~/mysql-core/session.ts'; import type { SubqueryWithSelection } from '~/mysql-core/subquery.ts'; import { MySqlTable } from '~/mysql-core/table.ts'; import { TypedQueryBuilder } from '~/query-builders/query-builder.ts'; import type { BuildSubquerySelection, GetSelectTableName, GetSelectTableSelection, JoinNullability, SelectMode, SelectResult, SetOperator, } from '~/query-builders/select.types.ts'; import { QueryPromise } from '~/query-promise.ts'; import { SelectionProxyHandler } from '~/selection-proxy.ts'; import type { ColumnsSelection, Placeholder, Query } from '~/sql/sql.ts'; import { SQL, View } from '~/sql/sql.ts'; import { Subquery } from '~/subquery.ts'; import { Table } from '~/table.ts'; import type { ValueOrArray } from '~/utils.ts'; import { applyMixins, getTableColumns, getTableLikeName, haveSameKeys, orderSelectedFields } from '~/utils.ts'; import { ViewBaseConfig } from '~/view-common.ts'; import type { IndexBuilder } from '../indexes.ts'; import { convertIndexToString, extractUsedTable, toArray } from '../utils.ts'; import { MySqlViewBase } from '../view-base.ts'; import type { AnyMySqlSelect, CreateMySqlSelectFromBuilderMode, GetMySqlSetOperators, LockConfig, LockStrength, MySqlCreateSetOperatorFn, MySqlCrossJoinFn, MySqlJoinFn, MySqlJoinType, MySqlSelectConfig, MySqlSelectDynamic, MySqlSelectHKT, MySqlSelectHKTBase, MySqlSelectPrepare, MySqlSelectWithout, MySqlSetOperatorExcludedMethods, MySqlSetOperatorWithResult, SelectedFields, SetOperatorRightSelect, } from './select.types.ts'; export type IndexForHint = IndexBuilder | string; export type IndexConfig = { useIndex?: IndexForHint | IndexForHint[]; forceIndex?: IndexForHint | IndexForHint[]; ignoreIndex?: IndexForHint | IndexForHint[]; }; export class MySqlSelectBuilder< TSelection extends SelectedFields | undefined, TPreparedQueryHKT extends PreparedQueryHKTBase, TBuilderMode extends 'db' | 'qb' = 'db', > { static readonly [entityKind]: string = 'MySqlSelectBuilder'; private fields: TSelection; private session: MySqlSession | undefined; private dialect: MySqlDialect; private withList: Subquery[] = []; private distinct: boolean | undefined; constructor( config: { fields: TSelection; session: MySqlSession | undefined; dialect: MySqlDialect; withList?: Subquery[]; distinct?: boolean; }, ) { this.fields = config.fields; this.session = config.session; this.dialect = config.dialect; if (config.withList) { this.withList = config.withList; } this.distinct = config.distinct; } from( source: TFrom, onIndex?: TFrom extends MySqlTable ? IndexConfig : 'Index hint configuration is allowed only for MySqlTable and not for subqueries or views', ): CreateMySqlSelectFromBuilderMode< TBuilderMode, GetSelectTableName, TSelection extends undefined ? GetSelectTableSelection : TSelection, TSelection extends undefined ? 'single' : 'partial', TPreparedQueryHKT > { const isPartialSelect = !!this.fields; let fields: SelectedFields; if (this.fields) { fields = this.fields; } else if (is(source, Subquery)) { // This is required to use the proxy handler to get the correct field values from the subquery fields = Object.fromEntries( Object.keys(source._.selectedFields).map(( key, ) => [key, source[key as unknown as keyof typeof source] as unknown as SelectedFields[string]]), ); } else if (is(source, MySqlViewBase)) { fields = source[ViewBaseConfig].selectedFields as SelectedFields; } else if (is(source, SQL)) { fields = {}; } else { fields = getTableColumns(source); } let useIndex: string[] = []; let forceIndex: string[] = []; let ignoreIndex: string[] = []; if (is(source, MySqlTable) && onIndex && typeof onIndex !== 'string') { if (onIndex.useIndex) { useIndex = convertIndexToString(toArray(onIndex.useIndex)); } if (onIndex.forceIndex) { forceIndex = convertIndexToString(toArray(onIndex.forceIndex)); } if (onIndex.ignoreIndex) { ignoreIndex = convertIndexToString(toArray(onIndex.ignoreIndex)); } } return new MySqlSelectBase( { table: source, fields, isPartialSelect, session: this.session, dialect: this.dialect, withList: this.withList, distinct: this.distinct, useIndex, forceIndex, ignoreIndex, }, ) as any; } } export abstract class MySqlSelectQueryBuilderBase< THKT extends MySqlSelectHKTBase, TTableName extends string | undefined, TSelection extends ColumnsSelection, TSelectMode extends SelectMode, TPreparedQueryHKT extends PreparedQueryHKTBase, TNullabilityMap extends Record = TTableName extends string ? Record : {}, TDynamic extends boolean = false, TExcludedMethods extends string = never, TResult extends any[] = SelectResult[], TSelectedFields extends ColumnsSelection = BuildSubquerySelection, > extends TypedQueryBuilder { static override readonly [entityKind]: string = 'MySqlSelectQueryBuilder'; override readonly _: { readonly hkt: THKT; readonly tableName: TTableName; readonly selection: TSelection; readonly selectMode: TSelectMode; readonly preparedQueryHKT: TPreparedQueryHKT; readonly nullabilityMap: TNullabilityMap; readonly dynamic: TDynamic; readonly excludedMethods: TExcludedMethods; readonly result: TResult; readonly selectedFields: TSelectedFields; readonly config: MySqlSelectConfig; }; protected config: MySqlSelectConfig; protected joinsNotNullableMap: Record; private tableName: string | undefined; private isPartialSelect: boolean; /** @internal */ readonly session: MySqlSession | undefined; protected dialect: MySqlDialect; protected cacheConfig?: WithCacheConfig = undefined; protected usedTables: Set = new Set(); constructor( { table, fields, isPartialSelect, session, dialect, withList, distinct, useIndex, forceIndex, ignoreIndex }: { table: MySqlSelectConfig['table']; fields: MySqlSelectConfig['fields']; isPartialSelect: boolean; session: MySqlSession | undefined; dialect: MySqlDialect; withList: Subquery[]; distinct: boolean | undefined; useIndex?: string[]; forceIndex?: string[]; ignoreIndex?: string[]; }, ) { super(); this.config = { withList, table, fields: { ...fields }, distinct, setOperators: [], useIndex, forceIndex, ignoreIndex, }; this.isPartialSelect = isPartialSelect; this.session = session; this.dialect = dialect; this._ = { selectedFields: fields as TSelectedFields, config: this.config, } as this['_']; this.tableName = getTableLikeName(table); this.joinsNotNullableMap = typeof this.tableName === 'string' ? { [this.tableName]: true } : {}; for (const item of extractUsedTable(table)) this.usedTables.add(item); } /** @internal */ getUsedTables() { return [...this.usedTables]; } private createJoin< TJoinType extends MySqlJoinType, TIsLateral extends (TJoinType extends 'full' | 'right' ? false : boolean), >( joinType: TJoinType, lateral: TIsLateral, ): 'cross' extends TJoinType ? MySqlCrossJoinFn : MySqlJoinFn { return < TJoinedTable extends MySqlTable | Subquery | MySqlViewBase | SQL, >( table: MySqlTable | Subquery | MySqlViewBase | SQL, a?: | ((aliases: TSelection) => SQL | undefined) | SQL | undefined | (TJoinedTable extends MySqlTable ? IndexConfig : 'Index hint configuration is allowed only for MySqlTable and not for subqueries or views'), b?: TJoinedTable extends MySqlTable ? IndexConfig : 'Index hint configuration is allowed only for MySqlTable and not for subqueries or views', ) => { const isCrossJoin = joinType === 'cross'; let on = (isCrossJoin ? undefined : a) as ( | ((aliases: TSelection) => SQL | undefined) | SQL | undefined ); const onIndex = (isCrossJoin ? a : b) as TJoinedTable extends MySqlTable ? IndexConfig : 'Index hint configuration is allowed only for MySqlTable and not for subqueries or views'; const baseTableName = this.tableName; const tableName = getTableLikeName(table); // store all tables used in a query for (const item of extractUsedTable(table)) this.usedTables.add(item); if (typeof tableName === 'string' && this.config.joins?.some((join) => join.alias === tableName)) { throw new Error(`Alias "${tableName}" is already used in this query`); } if (!this.isPartialSelect) { // If this is the first join and this is not a partial select and we're not selecting from raw SQL, "move" the fields from the main table to the nested object if (Object.keys(this.joinsNotNullableMap).length === 1 && typeof baseTableName === 'string') { this.config.fields = { [baseTableName]: this.config.fields, }; } if (typeof tableName === 'string' && !is(table, SQL)) { const selection = is(table, Subquery) ? table._.selectedFields : is(table, View) ? table[ViewBaseConfig].selectedFields : table[Table.Symbol.Columns]; this.config.fields[tableName] = selection; } } if (typeof on === 'function') { on = on( new Proxy( this.config.fields, new SelectionProxyHandler({ sqlAliasedBehavior: 'sql', sqlBehavior: 'sql' }), ) as TSelection, ); } if (!this.config.joins) { this.config.joins = []; } let useIndex: string[] = []; let forceIndex: string[] = []; let ignoreIndex: string[] = []; if (is(table, MySqlTable) && onIndex && typeof onIndex !== 'string') { if (onIndex.useIndex) { useIndex = convertIndexToString(toArray(onIndex.useIndex)); } if (onIndex.forceIndex) { forceIndex = convertIndexToString(toArray(onIndex.forceIndex)); } if (onIndex.ignoreIndex) { ignoreIndex = convertIndexToString(toArray(onIndex.ignoreIndex)); } } this.config.joins.push({ on, table, joinType, alias: tableName, useIndex, forceIndex, ignoreIndex, lateral }); if (typeof tableName === 'string') { switch (joinType) { case 'left': { this.joinsNotNullableMap[tableName] = false; break; } case 'right': { this.joinsNotNullableMap = Object.fromEntries( Object.entries(this.joinsNotNullableMap).map(([key]) => [key, false]), ); this.joinsNotNullableMap[tableName] = true; break; } case 'cross': case 'inner': { this.joinsNotNullableMap[tableName] = true; break; } } } return this as any; }; } /** * Executes a `left join` operation by adding another table to the current query. * * Calling this method associates each row of the table with the corresponding row from the joined table, if a match is found. If no matching row exists, it sets all columns of the joined table to null. * * See docs: {@link https://orm.drizzle.team/docs/joins#left-join} * * @param table the table to join. * @param on the `on` clause. * @param onIndex index hint. * * @example * * ```ts * // Select all users and their pets * const usersWithPets: { user: User; pets: Pet | null; }[] = await db.select() * .from(users) * .leftJoin(pets, eq(users.id, pets.ownerId)) * * // Select userId and petId * const usersIdsAndPetIds: { userId: number; petId: number | null; }[] = await db.select({ * userId: users.id, * petId: pets.id, * }) * .from(users) * .leftJoin(pets, eq(users.id, pets.ownerId)) * * // Select userId and petId with use index hint * const usersIdsAndPetIds: { userId: number; petId: number | null; }[] = await db.select({ * userId: users.id, * petId: pets.id, * }) * .from(users) * .leftJoin(pets, eq(users.id, pets.ownerId), { * useIndex: ['pets_owner_id_index'] * }) * ``` */ leftJoin = this.createJoin('left', false); /** * Executes a `left join lateral` operation by adding subquery to the current query. * * A `lateral` join allows the right-hand expression to refer to columns from the left-hand side. * * Calling this method associates each row of the table with the corresponding row from the joined table, if a match is found. If no matching row exists, it sets all columns of the joined table to null. * * See docs: {@link https://orm.drizzle.team/docs/joins#left-join-lateral} * * @param table the subquery to join. * @param on the `on` clause. */ leftJoinLateral = this.createJoin('left', true); /** * Executes a `right join` operation by adding another table to the current query. * * Calling this method associates each row of the joined table with the corresponding row from the main table, if a match is found. If no matching row exists, it sets all columns of the main table to null. * * See docs: {@link https://orm.drizzle.team/docs/joins#right-join} * * @param table the table to join. * @param on the `on` clause. * @param onIndex index hint. * * @example * * ```ts * // Select all users and their pets * const usersWithPets: { user: User | null; pets: Pet; }[] = await db.select() * .from(users) * .rightJoin(pets, eq(users.id, pets.ownerId)) * * // Select userId and petId * const usersIdsAndPetIds: { userId: number | null; petId: number; }[] = await db.select({ * userId: users.id, * petId: pets.id, * }) * .from(users) * .rightJoin(pets, eq(users.id, pets.ownerId)) * * // Select userId and petId with use index hint * const usersIdsAndPetIds: { userId: number; petId: number | null; }[] = await db.select({ * userId: users.id, * petId: pets.id, * }) * .from(users) * .leftJoin(pets, eq(users.id, pets.ownerId), { * useIndex: ['pets_owner_id_index'] * }) * ``` */ rightJoin = this.createJoin('right', false); /** * Executes an `inner join` operation, creating a new table by combining rows from two tables that have matching values. * * Calling this method retrieves rows that have corresponding entries in both joined tables. Rows without matching entries in either table are excluded, resulting in a table that includes only matching pairs. * * See docs: {@link https://orm.drizzle.team/docs/joins#inner-join} * * @param table the table to join. * @param on the `on` clause. * @param onIndex index hint. * * @example * * ```ts * // Select all users and their pets * const usersWithPets: { user: User; pets: Pet; }[] = await db.select() * .from(users) * .innerJoin(pets, eq(users.id, pets.ownerId)) * * // Select userId and petId * const usersIdsAndPetIds: { userId: number; petId: number; }[] = await db.select({ * userId: users.id, * petId: pets.id, * }) * .from(users) * .innerJoin(pets, eq(users.id, pets.ownerId)) * * // Select userId and petId with use index hint * const usersIdsAndPetIds: { userId: number; petId: number | null; }[] = await db.select({ * userId: users.id, * petId: pets.id, * }) * .from(users) * .leftJoin(pets, eq(users.id, pets.ownerId), { * useIndex: ['pets_owner_id_index'] * }) * ``` */ innerJoin = this.createJoin('inner', false); /** * Executes an `inner join lateral` operation, creating a new table by combining rows from two queries that have matching values. * * A `lateral` join allows the right-hand expression to refer to columns from the left-hand side. * * Calling this method retrieves rows that have corresponding entries in both joined tables. Rows without matching entries in either table are excluded, resulting in a table that includes only matching pairs. * * See docs: {@link https://orm.drizzle.team/docs/joins#inner-join-lateral} * * @param table the subquery to join. * @param on the `on` clause. */ innerJoinLateral = this.createJoin('inner', true); /** * Executes a `cross join` operation by combining rows from two tables into a new table. * * Calling this method retrieves all rows from both main and joined tables, merging all rows from each table. * * See docs: {@link https://orm.drizzle.team/docs/joins#cross-join} * * @param table the table to join. * @param onIndex index hint. * * @example * * ```ts * // Select all users, each user with every pet * const usersWithPets: { user: User; pets: Pet; }[] = await db.select() * .from(users) * .crossJoin(pets) * * // Select userId and petId * const usersIdsAndPetIds: { userId: number; petId: number; }[] = await db.select({ * userId: users.id, * petId: pets.id, * }) * .from(users) * .crossJoin(pets) * * // Select userId and petId with use index hint * const usersIdsAndPetIds: { userId: number; petId: number; }[] = await db.select({ * userId: users.id, * petId: pets.id, * }) * .from(users) * .crossJoin(pets, { * useIndex: ['pets_owner_id_index'] * }) * ``` */ crossJoin = this.createJoin('cross', false); /** * Executes a `cross join lateral` operation by combining rows from two queries into a new table. * * A `lateral` join allows the right-hand expression to refer to columns from the left-hand side. * * Calling this method retrieves all rows from both main and joined queries, merging all rows from each query. * * See docs: {@link https://orm.drizzle.team/docs/joins#cross-join-lateral} * * @param table the query to join. */ crossJoinLateral = this.createJoin('cross', true); private createSetOperator( type: SetOperator, isAll: boolean, ): >( rightSelection: | ((setOperators: GetMySqlSetOperators) => SetOperatorRightSelect) | SetOperatorRightSelect, ) => MySqlSelectWithout< this, TDynamic, MySqlSetOperatorExcludedMethods, true > { return (rightSelection) => { const rightSelect = (typeof rightSelection === 'function' ? rightSelection(getMySqlSetOperators()) : rightSelection) as TypedQueryBuilder< any, TResult >; if (!haveSameKeys(this.getSelectedFields(), rightSelect.getSelectedFields())) { throw new Error( 'Set operator error (union / intersect / except): selected fields are not the same or are in a different order', ); } this.config.setOperators.push({ type, isAll, rightSelect }); return this as any; }; } /** * Adds `union` set operator to the query. * * Calling this method will combine the result sets of the `select` statements and remove any duplicate rows that appear across them. * * See docs: {@link https://orm.drizzle.team/docs/set-operations#union} * * @example * * ```ts * // Select all unique names from customers and users tables * await db.select({ name: users.name }) * .from(users) * .union( * db.select({ name: customers.name }).from(customers) * ); * // or * import { union } from 'drizzle-orm/mysql-core' * * await union( * db.select({ name: users.name }).from(users), * db.select({ name: customers.name }).from(customers) * ); * ``` */ union = this.createSetOperator('union', false); /** * Adds `union all` set operator to the query. * * Calling this method will combine the result-set of the `select` statements and keep all duplicate rows that appear across them. * * See docs: {@link https://orm.drizzle.team/docs/set-operations#union-all} * * @example * * ```ts * // Select all transaction ids from both online and in-store sales * await db.select({ transaction: onlineSales.transactionId }) * .from(onlineSales) * .unionAll( * db.select({ transaction: inStoreSales.transactionId }).from(inStoreSales) * ); * // or * import { unionAll } from 'drizzle-orm/mysql-core' * * await unionAll( * db.select({ transaction: onlineSales.transactionId }).from(onlineSales), * db.select({ transaction: inStoreSales.transactionId }).from(inStoreSales) * ); * ``` */ unionAll = this.createSetOperator('union', true); /** * Adds `intersect` set operator to the query. * * Calling this method will retain only the rows that are present in both result sets and eliminate duplicates. * * See docs: {@link https://orm.drizzle.team/docs/set-operations#intersect} * * @example * * ```ts * // Select course names that are offered in both departments A and B * await db.select({ courseName: depA.courseName }) * .from(depA) * .intersect( * db.select({ courseName: depB.courseName }).from(depB) * ); * // or * import { intersect } from 'drizzle-orm/mysql-core' * * await intersect( * db.select({ courseName: depA.courseName }).from(depA), * db.select({ courseName: depB.courseName }).from(depB) * ); * ``` */ intersect = this.createSetOperator('intersect', false); /** * Adds `intersect all` set operator to the query. * * Calling this method will retain only the rows that are present in both result sets including all duplicates. * * See docs: {@link https://orm.drizzle.team/docs/set-operations#intersect-all} * * @example * * ```ts * // Select all products and quantities that are ordered by both regular and VIP customers * await db.select({ * productId: regularCustomerOrders.productId, * quantityOrdered: regularCustomerOrders.quantityOrdered * }) * .from(regularCustomerOrders) * .intersectAll( * db.select({ * productId: vipCustomerOrders.productId, * quantityOrdered: vipCustomerOrders.quantityOrdered * }) * .from(vipCustomerOrders) * ); * // or * import { intersectAll } from 'drizzle-orm/mysql-core' * * await intersectAll( * db.select({ * productId: regularCustomerOrders.productId, * quantityOrdered: regularCustomerOrders.quantityOrdered * }) * .from(regularCustomerOrders), * db.select({ * productId: vipCustomerOrders.productId, * quantityOrdered: vipCustomerOrders.quantityOrdered * }) * .from(vipCustomerOrders) * ); * ``` */ intersectAll = this.createSetOperator('intersect', true); /** * Adds `except` set operator to the query. * * Calling this method will retrieve all unique rows from the left query, except for the rows that are present in the result set of the right query. * * See docs: {@link https://orm.drizzle.team/docs/set-operations#except} * * @example * * ```ts * // Select all courses offered in department A but not in department B * await db.select({ courseName: depA.courseName }) * .from(depA) * .except( * db.select({ courseName: depB.courseName }).from(depB) * ); * // or * import { except } from 'drizzle-orm/mysql-core' * * await except( * db.select({ courseName: depA.courseName }).from(depA), * db.select({ courseName: depB.courseName }).from(depB) * ); * ``` */ except = this.createSetOperator('except', false); /** * Adds `except all` set operator to the query. * * Calling this method will retrieve all rows from the left query, except for the rows that are present in the result set of the right query. * * See docs: {@link https://orm.drizzle.team/docs/set-operations#except-all} * * @example * * ```ts * // Select all products that are ordered by regular customers but not by VIP customers * await db.select({ * productId: regularCustomerOrders.productId, * quantityOrdered: regularCustomerOrders.quantityOrdered, * }) * .from(regularCustomerOrders) * .exceptAll( * db.select({ * productId: vipCustomerOrders.productId, * quantityOrdered: vipCustomerOrders.quantityOrdered, * }) * .from(vipCustomerOrders) * ); * // or * import { exceptAll } from 'drizzle-orm/mysql-core' * * await exceptAll( * db.select({ * productId: regularCustomerOrders.productId, * quantityOrdered: regularCustomerOrders.quantityOrdered * }) * .from(regularCustomerOrders), * db.select({ * productId: vipCustomerOrders.productId, * quantityOrdered: vipCustomerOrders.quantityOrdered * }) * .from(vipCustomerOrders) * ); * ``` */ exceptAll = this.createSetOperator('except', true); /** @internal */ addSetOperators(setOperators: MySqlSelectConfig['setOperators']): MySqlSelectWithout< this, TDynamic, MySqlSetOperatorExcludedMethods, true > { this.config.setOperators.push(...setOperators); return this as any; } /** * Adds a `where` clause to the query. * * Calling this method will select only those rows that fulfill a specified condition. * * See docs: {@link https://orm.drizzle.team/docs/select#filtering} * * @param where the `where` clause. * * @example * You can use conditional operators and `sql function` to filter the rows to be selected. * * ```ts * // Select all cars with green color * await db.select().from(cars).where(eq(cars.color, 'green')); * // or * await db.select().from(cars).where(sql`${cars.color} = 'green'`) * ``` * * You can logically combine conditional operators with `and()` and `or()` operators: * * ```ts * // Select all BMW cars with a green color * await db.select().from(cars).where(and(eq(cars.color, 'green'), eq(cars.brand, 'BMW'))); * * // Select all cars with the green or blue color * await db.select().from(cars).where(or(eq(cars.color, 'green'), eq(cars.color, 'blue'))); * ``` */ where( where: ((aliases: this['_']['selection']) => SQL | undefined) | SQL | undefined, ): MySqlSelectWithout { if (typeof where === 'function') { where = where( new Proxy( this.config.fields, new SelectionProxyHandler({ sqlAliasedBehavior: 'sql', sqlBehavior: 'sql' }), ) as TSelection, ); } this.config.where = where; return this as any; } /** * Adds a `having` clause to the query. * * Calling this method will select only those rows that fulfill a specified condition. It is typically used with aggregate functions to filter the aggregated data based on a specified condition. * * See docs: {@link https://orm.drizzle.team/docs/select#aggregations} * * @param having the `having` clause. * * @example * * ```ts * // Select all brands with more than one car * await db.select({ * brand: cars.brand, * count: sql`cast(count(${cars.id}) as int)`, * }) * .from(cars) * .groupBy(cars.brand) * .having(({ count }) => gt(count, 1)); * ``` */ having( having: ((aliases: this['_']['selection']) => SQL | undefined) | SQL | undefined, ): MySqlSelectWithout { if (typeof having === 'function') { having = having( new Proxy( this.config.fields, new SelectionProxyHandler({ sqlAliasedBehavior: 'sql', sqlBehavior: 'sql' }), ) as TSelection, ); } this.config.having = having; return this as any; } /** * Adds a `group by` clause to the query. * * Calling this method will group rows that have the same values into summary rows, often used for aggregation purposes. * * See docs: {@link https://orm.drizzle.team/docs/select#aggregations} * * @example * * ```ts * // Group and count people by their last names * await db.select({ * lastName: people.lastName, * count: sql`cast(count(*) as int)` * }) * .from(people) * .groupBy(people.lastName); * ``` */ groupBy( builder: (aliases: this['_']['selection']) => ValueOrArray, ): MySqlSelectWithout; groupBy(...columns: (MySqlColumn | SQL | SQL.Aliased)[]): MySqlSelectWithout; groupBy( ...columns: | [(aliases: this['_']['selection']) => ValueOrArray] | (MySqlColumn | SQL | SQL.Aliased)[] ): MySqlSelectWithout { if (typeof columns[0] === 'function') { const groupBy = columns[0]( new Proxy( this.config.fields, new SelectionProxyHandler({ sqlAliasedBehavior: 'alias', sqlBehavior: 'sql' }), ) as TSelection, ); this.config.groupBy = Array.isArray(groupBy) ? groupBy : [groupBy]; } else { this.config.groupBy = columns as (MySqlColumn | SQL | SQL.Aliased)[]; } return this as any; } /** * Adds an `order by` clause to the query. * * Calling this method will sort the result-set in ascending or descending order. By default, the sort order is ascending. * * See docs: {@link https://orm.drizzle.team/docs/select#order-by} * * @example * * ``` * // Select cars ordered by year * await db.select().from(cars).orderBy(cars.year); * ``` * * You can specify whether results are in ascending or descending order with the `asc()` and `desc()` operators. * * ```ts * // Select cars ordered by year in descending order * await db.select().from(cars).orderBy(desc(cars.year)); * * // Select cars ordered by year and price * await db.select().from(cars).orderBy(asc(cars.year), desc(cars.price)); * ``` */ orderBy( builder: (aliases: this['_']['selection']) => ValueOrArray, ): MySqlSelectWithout; orderBy(...columns: (MySqlColumn | SQL | SQL.Aliased)[]): MySqlSelectWithout; orderBy( ...columns: | [(aliases: this['_']['selection']) => ValueOrArray] | (MySqlColumn | SQL | SQL.Aliased)[] ): MySqlSelectWithout { if (typeof columns[0] === 'function') { const orderBy = columns[0]( new Proxy( this.config.fields, new SelectionProxyHandler({ sqlAliasedBehavior: 'alias', sqlBehavior: 'sql' }), ) as TSelection, ); const orderByArray = Array.isArray(orderBy) ? orderBy : [orderBy]; if (this.config.setOperators.length > 0) { this.config.setOperators.at(-1)!.orderBy = orderByArray; } else { this.config.orderBy = orderByArray; } } else { const orderByArray = columns as (MySqlColumn | SQL | SQL.Aliased)[]; if (this.config.setOperators.length > 0) { this.config.setOperators.at(-1)!.orderBy = orderByArray; } else { this.config.orderBy = orderByArray; } } return this as any; } /** * Adds a `limit` clause to the query. * * Calling this method will set the maximum number of rows that will be returned by this query. * * See docs: {@link https://orm.drizzle.team/docs/select#limit--offset} * * @param limit the `limit` clause. * * @example * * ```ts * // Get the first 10 people from this query. * await db.select().from(people).limit(10); * ``` */ limit(limit: number | Placeholder): MySqlSelectWithout { if (this.config.setOperators.length > 0) { this.config.setOperators.at(-1)!.limit = limit; } else { this.config.limit = limit; } return this as any; } /** * Adds an `offset` clause to the query. * * Calling this method will skip a number of rows when returning results from this query. * * See docs: {@link https://orm.drizzle.team/docs/select#limit--offset} * * @param offset the `offset` clause. * * @example * * ```ts * // Get the 10th-20th people from this query. * await db.select().from(people).offset(10).limit(10); * ``` */ offset(offset: number | Placeholder): MySqlSelectWithout { if (this.config.setOperators.length > 0) { this.config.setOperators.at(-1)!.offset = offset; } else { this.config.offset = offset; } return this as any; } /** * Adds a `for` clause to the query. * * Calling this method will specify a lock strength for this query that controls how strictly it acquires exclusive access to the rows being queried. * * See docs: {@link https://dev.mysql.com/doc/refman/8.0/en/innodb-locking-reads.html} * * @param strength the lock strength. * @param config the lock configuration. */ for(strength: LockStrength, config: LockConfig = {}): MySqlSelectWithout { this.config.lockingClause = { strength, config }; return this as any; } /** @internal */ getSQL(): SQL { return this.dialect.buildSelectQuery(this.config); } toSQL(): Query { const { typings: _typings, ...rest } = this.dialect.sqlToQuery(this.getSQL()); return rest; } as( alias: TAlias, ): SubqueryWithSelection { const usedTables: string[] = []; usedTables.push(...extractUsedTable(this.config.table)); if (this.config.joins) { for (const it of this.config.joins) usedTables.push(...extractUsedTable(it.table)); } return new Proxy( new Subquery(this.getSQL(), this.config.fields, alias, false, [...new Set(usedTables)]), new SelectionProxyHandler({ alias, sqlAliasedBehavior: 'alias', sqlBehavior: 'error' }), ) as SubqueryWithSelection; } /** @internal */ override getSelectedFields(): this['_']['selectedFields'] { return new Proxy( this.config.fields, new SelectionProxyHandler({ alias: this.tableName, sqlAliasedBehavior: 'alias', sqlBehavior: 'error' }), ) as this['_']['selectedFields']; } $dynamic(): MySqlSelectDynamic { return this as any; } $withCache(config?: { config?: CacheConfig; tag?: string; autoInvalidate?: boolean } | false) { this.cacheConfig = config === undefined ? { config: {}, enable: true, autoInvalidate: true } : config === false ? { enable: false } : { enable: true, autoInvalidate: true, ...config }; return this; } } export interface MySqlSelectBase< TTableName extends string | undefined, TSelection extends ColumnsSelection, TSelectMode extends SelectMode, TPreparedQueryHKT extends PreparedQueryHKTBase, TNullabilityMap extends Record = TTableName extends string ? Record : {}, TDynamic extends boolean = false, TExcludedMethods extends string = never, TResult extends any[] = SelectResult[], TSelectedFields extends ColumnsSelection = BuildSubquerySelection, > extends MySqlSelectQueryBuilderBase< MySqlSelectHKT, TTableName, TSelection, TSelectMode, TPreparedQueryHKT, TNullabilityMap, TDynamic, TExcludedMethods, TResult, TSelectedFields >, QueryPromise {} export class MySqlSelectBase< TTableName extends string | undefined, TSelection, TSelectMode extends SelectMode, TPreparedQueryHKT extends PreparedQueryHKTBase, TNullabilityMap extends Record = TTableName extends string ? Record : {}, TDynamic extends boolean = false, TExcludedMethods extends string = never, TResult = SelectResult[], TSelectedFields = BuildSubquerySelection, > extends MySqlSelectQueryBuilderBase< MySqlSelectHKT, TTableName, TSelection, TSelectMode, TPreparedQueryHKT, TNullabilityMap, TDynamic, TExcludedMethods, TResult, TSelectedFields > { static override readonly [entityKind]: string = 'MySqlSelect'; prepare(): MySqlSelectPrepare { if (!this.session) { throw new Error('Cannot execute a query on a query builder. Please use a database instance instead.'); } const fieldsList = orderSelectedFields(this.config.fields); const query = this.session.prepareQuery< MySqlPreparedQueryConfig & { execute: SelectResult[] }, TPreparedQueryHKT >(this.dialect.sqlToQuery(this.getSQL()), fieldsList, undefined, undefined, undefined, { type: 'select', tables: [...this.usedTables], }, this.cacheConfig); query.joinsNotNullableMap = this.joinsNotNullableMap; return query as MySqlSelectPrepare; } execute = ((placeholderValues) => { return this.prepare().execute(placeholderValues); }) as ReturnType['execute']; private createIterator = (): ReturnType['iterator'] => { const self = this; return async function*(placeholderValues) { yield* self.prepare().iterator(placeholderValues); }; }; iterator = this.createIterator(); } applyMixins(MySqlSelectBase, [QueryPromise]); function createSetOperator(type: SetOperator, isAll: boolean): MySqlCreateSetOperatorFn { return (leftSelect, rightSelect, ...restSelects) => { const setOperators = [rightSelect, ...restSelects].map((select) => ({ type, isAll, rightSelect: select as AnyMySqlSelect, })); for (const setOperator of setOperators) { if (!haveSameKeys((leftSelect as any).getSelectedFields(), setOperator.rightSelect.getSelectedFields())) { throw new Error( 'Set operator error (union / intersect / except): selected fields are not the same or are in a different order', ); } } return (leftSelect as AnyMySqlSelect).addSetOperators(setOperators) as any; }; } const getMySqlSetOperators = () => ({ union, unionAll, intersect, intersectAll, except, exceptAll, }); /** * Adds `union` set operator to the query. * * Calling this method will combine the result sets of the `select` statements and remove any duplicate rows that appear across them. * * See docs: {@link https://orm.drizzle.team/docs/set-operations#union} * * @example * * ```ts * // Select all unique names from customers and users tables * import { union } from 'drizzle-orm/mysql-core' * * await union( * db.select({ name: users.name }).from(users), * db.select({ name: customers.name }).from(customers) * ); * // or * await db.select({ name: users.name }) * .from(users) * .union( * db.select({ name: customers.name }).from(customers) * ); * ``` */ export const union = createSetOperator('union', false); /** * Adds `union all` set operator to the query. * * Calling this method will combine the result-set of the `select` statements and keep all duplicate rows that appear across them. * * See docs: {@link https://orm.drizzle.team/docs/set-operations#union-all} * * @example * * ```ts * // Select all transaction ids from both online and in-store sales * import { unionAll } from 'drizzle-orm/mysql-core' * * await unionAll( * db.select({ transaction: onlineSales.transactionId }).from(onlineSales), * db.select({ transaction: inStoreSales.transactionId }).from(inStoreSales) * ); * // or * await db.select({ transaction: onlineSales.transactionId }) * .from(onlineSales) * .unionAll( * db.select({ transaction: inStoreSales.transactionId }).from(inStoreSales) * ); * ``` */ export const unionAll = createSetOperator('union', true); /** * Adds `intersect` set operator to the query. * * Calling this method will retain only the rows that are present in both result sets and eliminate duplicates. * * See docs: {@link https://orm.drizzle.team/docs/set-operations#intersect} * * @example * * ```ts * // Select course names that are offered in both departments A and B * import { intersect } from 'drizzle-orm/mysql-core' * * await intersect( * db.select({ courseName: depA.courseName }).from(depA), * db.select({ courseName: depB.courseName }).from(depB) * ); * // or * await db.select({ courseName: depA.courseName }) * .from(depA) * .intersect( * db.select({ courseName: depB.courseName }).from(depB) * ); * ``` */ export const intersect = createSetOperator('intersect', false); /** * Adds `intersect all` set operator to the query. * * Calling this method will retain only the rows that are present in both result sets including all duplicates. * * See docs: {@link https://orm.drizzle.team/docs/set-operations#intersect-all} * * @example * * ```ts * // Select all products and quantities that are ordered by both regular and VIP customers * import { intersectAll } from 'drizzle-orm/mysql-core' * * await intersectAll( * db.select({ * productId: regularCustomerOrders.productId, * quantityOrdered: regularCustomerOrders.quantityOrdered * }) * .from(regularCustomerOrders), * db.select({ * productId: vipCustomerOrders.productId, * quantityOrdered: vipCustomerOrders.quantityOrdered * }) * .from(vipCustomerOrders) * ); * // or * await db.select({ * productId: regularCustomerOrders.productId, * quantityOrdered: regularCustomerOrders.quantityOrdered * }) * .from(regularCustomerOrders) * .intersectAll( * db.select({ * productId: vipCustomerOrders.productId, * quantityOrdered: vipCustomerOrders.quantityOrdered * }) * .from(vipCustomerOrders) * ); * ``` */ export const intersectAll = createSetOperator('intersect', true); /** * Adds `except` set operator to the query. * * Calling this method will retrieve all unique rows from the left query, except for the rows that are present in the result set of the right query. * * See docs: {@link https://orm.drizzle.team/docs/set-operations#except} * * @example * * ```ts * // Select all courses offered in department A but not in department B * import { except } from 'drizzle-orm/mysql-core' * * await except( * db.select({ courseName: depA.courseName }).from(depA), * db.select({ courseName: depB.courseName }).from(depB) * ); * // or * await db.select({ courseName: depA.courseName }) * .from(depA) * .except( * db.select({ courseName: depB.courseName }).from(depB) * ); * ``` */ export const except = createSetOperator('except', false); /** * Adds `except all` set operator to the query. * * Calling this method will retrieve all rows from the left query, except for the rows that are present in the result set of the right query. * * See docs: {@link https://orm.drizzle.team/docs/set-operations#except-all} * * @example * * ```ts * // Select all products that are ordered by regular customers but not by VIP customers * import { exceptAll } from 'drizzle-orm/mysql-core' * * await exceptAll( * db.select({ * productId: regularCustomerOrders.productId, * quantityOrdered: regularCustomerOrders.quantityOrdered * }) * .from(regularCustomerOrders), * db.select({ * productId: vipCustomerOrders.productId, * quantityOrdered: vipCustomerOrders.quantityOrdered * }) * .from(vipCustomerOrders) * ); * // or * await db.select({ * productId: regularCustomerOrders.productId, * quantityOrdered: regularCustomerOrders.quantityOrdered, * }) * .from(regularCustomerOrders) * .exceptAll( * db.select({ * productId: vipCustomerOrders.productId, * quantityOrdered: vipCustomerOrders.quantityOrdered, * }) * .from(vipCustomerOrders) * ); * ``` */ export const exceptAll = createSetOperator('except', true); ================================================ FILE: drizzle-orm/src/mysql-core/query-builders/select.types.ts ================================================ import type { MySqlColumn } from '~/mysql-core/columns/index.ts'; import type { MySqlTable, MySqlTableWithColumns } from '~/mysql-core/table.ts'; import type { SelectedFields as SelectedFieldsBase, SelectedFieldsFlat as SelectedFieldsFlatBase, SelectedFieldsOrdered as SelectedFieldsOrderedBase, } from '~/operations.ts'; import type { TypedQueryBuilder } from '~/query-builders/query-builder.ts'; import type { AppendToNullabilityMap, AppendToResult, BuildSubquerySelection, GetSelectTableName, JoinNullability, JoinType, MapColumnsToTableAlias, SelectMode, SelectResult, SetOperator, } from '~/query-builders/select.types.ts'; import type { ColumnsSelection, Placeholder, SQL, View } from '~/sql/sql.ts'; import type { Subquery } from '~/subquery.ts'; import type { Table, UpdateTableConfig } from '~/table.ts'; import type { Assume, ValidateShape } from '~/utils.ts'; import type { MySqlPreparedQueryConfig, PreparedQueryHKTBase, PreparedQueryKind } from '../session.ts'; import type { MySqlViewBase } from '../view-base.ts'; import type { MySqlViewWithSelection } from '../view.ts'; import type { IndexConfig, MySqlSelectBase, MySqlSelectQueryBuilderBase } from './select.ts'; export type MySqlJoinType = Exclude; export interface MySqlSelectJoinConfig { on: SQL | undefined; table: MySqlTable | Subquery | MySqlViewBase | SQL; alias: string | undefined; joinType: MySqlJoinType; lateral?: boolean; useIndex?: string[]; forceIndex?: string[]; ignoreIndex?: string[]; } export type BuildAliasTable = TTable extends Table ? MySqlTableWithColumns< UpdateTableConfig; }> > : TTable extends View ? MySqlViewWithSelection< TAlias, TTable['_']['existing'], MapColumnsToTableAlias > : never; export interface MySqlSelectConfig { withList?: Subquery[]; fields: Record; fieldsFlat?: SelectedFieldsOrdered; where?: SQL; having?: SQL; table: MySqlTable | Subquery | MySqlViewBase | SQL; limit?: number | Placeholder; offset?: number | Placeholder; joins?: MySqlSelectJoinConfig[]; orderBy?: (MySqlColumn | SQL | SQL.Aliased)[]; groupBy?: (MySqlColumn | SQL | SQL.Aliased)[]; lockingClause?: { strength: LockStrength; config: LockConfig; }; distinct?: boolean; setOperators: { rightSelect: TypedQueryBuilder; type: SetOperator; isAll: boolean; orderBy?: (MySqlColumn | SQL | SQL.Aliased)[]; limit?: number | Placeholder; offset?: number | Placeholder; }[]; useIndex?: string[]; forceIndex?: string[]; ignoreIndex?: string[]; } export type MySqlJoin< T extends AnyMySqlSelectQueryBuilder, TDynamic extends boolean, TJoinType extends MySqlJoinType, TJoinedTable extends MySqlTable | Subquery | MySqlViewBase | SQL, TJoinedName extends GetSelectTableName = GetSelectTableName, > = T extends any ? MySqlSelectWithout< MySqlSelectKind< T['_']['hkt'], T['_']['tableName'], AppendToResult< T['_']['tableName'], T['_']['selection'], TJoinedName, TJoinedTable extends MySqlTable ? TJoinedTable['_']['columns'] : TJoinedTable extends Subquery | View ? Assume : never, T['_']['selectMode'] >, T['_']['selectMode'] extends 'partial' ? T['_']['selectMode'] : 'multiple', T['_']['preparedQueryHKT'], AppendToNullabilityMap, TDynamic, T['_']['excludedMethods'] >, TDynamic, T['_']['excludedMethods'] > : never; export type MySqlJoinFn< T extends AnyMySqlSelectQueryBuilder, TDynamic extends boolean, TJoinType extends MySqlJoinType, TIsLateral extends boolean, > = < TJoinedTable extends (TIsLateral extends true ? Subquery | SQL : MySqlTable | Subquery | MySqlViewBase | SQL), TJoinedName extends GetSelectTableName = GetSelectTableName, >( table: TJoinedTable, on: ((aliases: T['_']['selection']) => SQL | undefined) | SQL | undefined, onIndex?: | (TJoinedTable extends MySqlTable ? IndexConfig : 'Index hint configuration is allowed only for MySqlTable and not for subqueries or views') | undefined, ) => MySqlJoin; export type MySqlCrossJoinFn< T extends AnyMySqlSelectQueryBuilder, TDynamic extends boolean, TIsLateral extends boolean, > = < TJoinedTable extends (TIsLateral extends true ? Subquery | SQL : MySqlTable | Subquery | MySqlViewBase | SQL), TJoinedName extends GetSelectTableName = GetSelectTableName, >( table: TJoinedTable, onIndex?: | (TJoinedTable extends MySqlTable ? IndexConfig : 'Index hint configuration is allowed only for MySqlTable and not for subqueries or views') | undefined, ) => MySqlJoin; export type SelectedFieldsFlat = SelectedFieldsFlatBase; export type SelectedFields = SelectedFieldsBase; export type SelectedFieldsOrdered = SelectedFieldsOrderedBase; export type LockStrength = 'update' | 'share'; export type LockConfig = { noWait: true; skipLocked?: undefined; } | { noWait?: undefined; skipLocked: true; } | { noWait?: undefined; skipLocked?: undefined; }; export interface MySqlSelectHKTBase { tableName: string | undefined; selection: unknown; selectMode: SelectMode; preparedQueryHKT: unknown; nullabilityMap: unknown; dynamic: boolean; excludedMethods: string; result: unknown; selectedFields: unknown; _type: unknown; } export type MySqlSelectKind< T extends MySqlSelectHKTBase, TTableName extends string | undefined, TSelection extends ColumnsSelection, TSelectMode extends SelectMode, TPreparedQueryHKT extends PreparedQueryHKTBase, TNullabilityMap extends Record, TDynamic extends boolean, TExcludedMethods extends string, TResult = SelectResult[], TSelectedFields = BuildSubquerySelection, > = (T & { tableName: TTableName; selection: TSelection; selectMode: TSelectMode; preparedQueryHKT: TPreparedQueryHKT; nullabilityMap: TNullabilityMap; dynamic: TDynamic; excludedMethods: TExcludedMethods; result: TResult; selectedFields: TSelectedFields; })['_type']; export interface MySqlSelectQueryBuilderHKT extends MySqlSelectHKTBase { _type: MySqlSelectQueryBuilderBase< MySqlSelectQueryBuilderHKT, this['tableName'], Assume, this['selectMode'], Assume, Assume>, this['dynamic'], this['excludedMethods'], Assume, Assume >; } export interface MySqlSelectHKT extends MySqlSelectHKTBase { _type: MySqlSelectBase< this['tableName'], Assume, this['selectMode'], Assume, Assume>, this['dynamic'], this['excludedMethods'], Assume, Assume >; } export type MySqlSetOperatorExcludedMethods = | 'where' | 'having' | 'groupBy' | 'session' | 'leftJoin' | 'rightJoin' | 'innerJoin' | 'for'; export type MySqlSelectWithout< T extends AnyMySqlSelectQueryBuilder, TDynamic extends boolean, K extends keyof T & string, TResetExcluded extends boolean = false, > = TDynamic extends true ? T : Omit< MySqlSelectKind< T['_']['hkt'], T['_']['tableName'], T['_']['selection'], T['_']['selectMode'], T['_']['preparedQueryHKT'], T['_']['nullabilityMap'], TDynamic, TResetExcluded extends true ? K : T['_']['excludedMethods'] | K, T['_']['result'], T['_']['selectedFields'] >, TResetExcluded extends true ? K : T['_']['excludedMethods'] | K >; export type MySqlSelectPrepare = PreparedQueryKind< T['_']['preparedQueryHKT'], MySqlPreparedQueryConfig & { execute: T['_']['result']; iterator: T['_']['result'][number]; }, true >; export type MySqlSelectDynamic = MySqlSelectKind< T['_']['hkt'], T['_']['tableName'], T['_']['selection'], T['_']['selectMode'], T['_']['preparedQueryHKT'], T['_']['nullabilityMap'], true, never, T['_']['result'], T['_']['selectedFields'] >; export type CreateMySqlSelectFromBuilderMode< TBuilderMode extends 'db' | 'qb', TTableName extends string | undefined, TSelection extends ColumnsSelection, TSelectMode extends SelectMode, TPreparedQueryHKT extends PreparedQueryHKTBase, > = TBuilderMode extends 'db' ? MySqlSelectBase : MySqlSelectQueryBuilderBase; export type MySqlSelectQueryBuilder< THKT extends MySqlSelectHKTBase = MySqlSelectQueryBuilderHKT, TTableName extends string | undefined = string | undefined, TSelection extends ColumnsSelection = ColumnsSelection, TSelectMode extends SelectMode = SelectMode, TPreparedQueryHKT extends PreparedQueryHKTBase = PreparedQueryHKTBase, TNullabilityMap extends Record = Record, TResult extends any[] = unknown[], TSelectedFields extends ColumnsSelection = ColumnsSelection, > = MySqlSelectQueryBuilderBase< THKT, TTableName, TSelection, TSelectMode, TPreparedQueryHKT, TNullabilityMap, true, never, TResult, TSelectedFields >; export type AnyMySqlSelectQueryBuilder = MySqlSelectQueryBuilderBase; export type AnyMySqlSetOperatorInterface = MySqlSetOperatorInterface; export interface MySqlSetOperatorInterface< TTableName extends string | undefined, TSelection extends ColumnsSelection, TSelectMode extends SelectMode, TPreparedQueryHKT extends PreparedQueryHKTBase = PreparedQueryHKTBase, TNullabilityMap extends Record = TTableName extends string ? Record : {}, TDynamic extends boolean = false, TExcludedMethods extends string = never, TResult extends any[] = SelectResult[], TSelectedFields extends ColumnsSelection = BuildSubquerySelection, > { _: { readonly hkt: MySqlSelectHKT; readonly tableName: TTableName; readonly selection: TSelection; readonly selectMode: TSelectMode; readonly preparedQueryHKT: TPreparedQueryHKT; readonly nullabilityMap: TNullabilityMap; readonly dynamic: TDynamic; readonly excludedMethods: TExcludedMethods; readonly result: TResult; readonly selectedFields: TSelectedFields; }; } export type MySqlSetOperatorWithResult = MySqlSetOperatorInterface< any, any, any, any, any, any, any, TResult, any >; export type MySqlSelect< TTableName extends string | undefined = string | undefined, TSelection extends ColumnsSelection = Record, TSelectMode extends SelectMode = SelectMode, TNullabilityMap extends Record = Record, > = MySqlSelectBase; export type AnyMySqlSelect = MySqlSelectBase; export type MySqlSetOperator< TTableName extends string | undefined = string | undefined, TSelection extends ColumnsSelection = Record, TSelectMode extends SelectMode = SelectMode, TPreparedQueryHKT extends PreparedQueryHKTBase = PreparedQueryHKTBase, TNullabilityMap extends Record = Record, > = MySqlSelectBase< TTableName, TSelection, TSelectMode, TPreparedQueryHKT, TNullabilityMap, true, MySqlSetOperatorExcludedMethods >; export type SetOperatorRightSelect< TValue extends MySqlSetOperatorWithResult, TResult extends any[], > = TValue extends MySqlSetOperatorInterface ? ValidateShape< TValueResult[number], TResult[number], TypedQueryBuilder > : TValue; export type SetOperatorRestSelect< TValue extends readonly MySqlSetOperatorWithResult[], TResult extends any[], > = TValue extends [infer First, ...infer Rest] ? First extends MySqlSetOperatorInterface ? Rest extends AnyMySqlSetOperatorInterface[] ? [ ValidateShape>, ...SetOperatorRestSelect, ] : ValidateShape[]> : never : TValue; export type MySqlCreateSetOperatorFn = < TTableName extends string | undefined, TSelection extends ColumnsSelection, TSelectMode extends SelectMode, TValue extends MySqlSetOperatorWithResult, TRest extends MySqlSetOperatorWithResult[], TPreparedQueryHKT extends PreparedQueryHKTBase = PreparedQueryHKTBase, TNullabilityMap extends Record = TTableName extends string ? Record : {}, TDynamic extends boolean = false, TExcludedMethods extends string = never, TResult extends any[] = SelectResult[], TSelectedFields extends ColumnsSelection = BuildSubquerySelection, >( leftSelect: MySqlSetOperatorInterface< TTableName, TSelection, TSelectMode, TPreparedQueryHKT, TNullabilityMap, TDynamic, TExcludedMethods, TResult, TSelectedFields >, rightSelect: SetOperatorRightSelect, ...restSelects: SetOperatorRestSelect ) => MySqlSelectWithout< MySqlSelectBase< TTableName, TSelection, TSelectMode, TPreparedQueryHKT, TNullabilityMap, TDynamic, TExcludedMethods, TResult, TSelectedFields >, false, MySqlSetOperatorExcludedMethods, true >; export type GetMySqlSetOperators = { union: MySqlCreateSetOperatorFn; intersect: MySqlCreateSetOperatorFn; except: MySqlCreateSetOperatorFn; unionAll: MySqlCreateSetOperatorFn; intersectAll: MySqlCreateSetOperatorFn; exceptAll: MySqlCreateSetOperatorFn; }; ================================================ FILE: drizzle-orm/src/mysql-core/query-builders/update.ts ================================================ import type { WithCacheConfig } from '~/cache/core/types.ts'; import type { GetColumnData } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { MySqlDialect } from '~/mysql-core/dialect.ts'; import type { AnyMySqlQueryResultHKT, MySqlPreparedQueryConfig, MySqlQueryResultHKT, MySqlQueryResultKind, MySqlSession, PreparedQueryHKTBase, PreparedQueryKind, } from '~/mysql-core/session.ts'; import type { MySqlTable } from '~/mysql-core/table.ts'; import { QueryPromise } from '~/query-promise.ts'; import { SelectionProxyHandler } from '~/selection-proxy.ts'; import type { Placeholder, Query, SQL, SQLWrapper } from '~/sql/sql.ts'; import type { Subquery } from '~/subquery.ts'; import { Table } from '~/table.ts'; import { mapUpdateSet, type UpdateSet, type ValueOrArray } from '~/utils.ts'; import type { MySqlColumn } from '../columns/common.ts'; import { extractUsedTable } from '../utils.ts'; import type { SelectedFieldsOrdered } from './select.types.ts'; export interface MySqlUpdateConfig { where?: SQL | undefined; limit?: number | Placeholder; orderBy?: (MySqlColumn | SQL | SQL.Aliased)[]; set: UpdateSet; table: MySqlTable; returning?: SelectedFieldsOrdered; withList?: Subquery[]; } export type MySqlUpdateSetSource = & { [Key in keyof TTable['$inferInsert']]?: | GetColumnData | SQL | undefined; } & {}; export class MySqlUpdateBuilder< TTable extends MySqlTable, TQueryResult extends MySqlQueryResultHKT, TPreparedQueryHKT extends PreparedQueryHKTBase, > { static readonly [entityKind]: string = 'MySqlUpdateBuilder'; declare readonly _: { readonly table: TTable; }; constructor( private table: TTable, private session: MySqlSession, private dialect: MySqlDialect, private withList?: Subquery[], ) {} set(values: MySqlUpdateSetSource): MySqlUpdateBase { return new MySqlUpdateBase(this.table, mapUpdateSet(this.table, values), this.session, this.dialect, this.withList); } } export type MySqlUpdateWithout< T extends AnyMySqlUpdateBase, TDynamic extends boolean, K extends keyof T & string, > = TDynamic extends true ? T : Omit< MySqlUpdateBase< T['_']['table'], T['_']['queryResult'], T['_']['preparedQueryHKT'], TDynamic, T['_']['excludedMethods'] | K >, T['_']['excludedMethods'] | K >; export type MySqlUpdatePrepare = PreparedQueryKind< T['_']['preparedQueryHKT'], MySqlPreparedQueryConfig & { execute: MySqlQueryResultKind; iterator: never; }, true >; export type MySqlUpdateDynamic = MySqlUpdate< T['_']['table'], T['_']['queryResult'], T['_']['preparedQueryHKT'] >; export type MySqlUpdate< TTable extends MySqlTable = MySqlTable, TQueryResult extends MySqlQueryResultHKT = AnyMySqlQueryResultHKT, TPreparedQueryHKT extends PreparedQueryHKTBase = PreparedQueryHKTBase, > = MySqlUpdateBase; export type AnyMySqlUpdateBase = MySqlUpdateBase; export interface MySqlUpdateBase< TTable extends MySqlTable, TQueryResult extends MySqlQueryResultHKT, TPreparedQueryHKT extends PreparedQueryHKTBase, TDynamic extends boolean = false, TExcludedMethods extends string = never, > extends QueryPromise>, SQLWrapper { readonly _: { readonly table: TTable; readonly queryResult: TQueryResult; readonly preparedQueryHKT: TPreparedQueryHKT; readonly dynamic: TDynamic; readonly excludedMethods: TExcludedMethods; }; } export class MySqlUpdateBase< TTable extends MySqlTable, TQueryResult extends MySqlQueryResultHKT, // eslint-disable-next-line @typescript-eslint/no-unused-vars TPreparedQueryHKT extends PreparedQueryHKTBase, // eslint-disable-next-line @typescript-eslint/no-unused-vars TDynamic extends boolean = false, // eslint-disable-next-line @typescript-eslint/no-unused-vars TExcludedMethods extends string = never, > extends QueryPromise> implements SQLWrapper { static override readonly [entityKind]: string = 'MySqlUpdate'; private config: MySqlUpdateConfig; protected cacheConfig?: WithCacheConfig; constructor( table: TTable, set: UpdateSet, private session: MySqlSession, private dialect: MySqlDialect, withList?: Subquery[], ) { super(); this.config = { set, table, withList }; } /** * Adds a 'where' clause to the query. * * Calling this method will update only those rows that fulfill a specified condition. * * See docs: {@link https://orm.drizzle.team/docs/update} * * @param where the 'where' clause. * * @example * You can use conditional operators and `sql function` to filter the rows to be updated. * * ```ts * // Update all cars with green color * db.update(cars).set({ color: 'red' }) * .where(eq(cars.color, 'green')); * // or * db.update(cars).set({ color: 'red' }) * .where(sql`${cars.color} = 'green'`) * ``` * * You can logically combine conditional operators with `and()` and `or()` operators: * * ```ts * // Update all BMW cars with a green color * db.update(cars).set({ color: 'red' }) * .where(and(eq(cars.color, 'green'), eq(cars.brand, 'BMW'))); * * // Update all cars with the green or blue color * db.update(cars).set({ color: 'red' }) * .where(or(eq(cars.color, 'green'), eq(cars.color, 'blue'))); * ``` */ where(where: SQL | undefined): MySqlUpdateWithout { this.config.where = where; return this as any; } orderBy( builder: (updateTable: TTable) => ValueOrArray, ): MySqlUpdateWithout; orderBy(...columns: (MySqlColumn | SQL | SQL.Aliased)[]): MySqlUpdateWithout; orderBy( ...columns: | [(updateTable: TTable) => ValueOrArray] | (MySqlColumn | SQL | SQL.Aliased)[] ): MySqlUpdateWithout { if (typeof columns[0] === 'function') { const orderBy = columns[0]( new Proxy( this.config.table[Table.Symbol.Columns], new SelectionProxyHandler({ sqlAliasedBehavior: 'alias', sqlBehavior: 'sql' }), ) as any, ); const orderByArray = Array.isArray(orderBy) ? orderBy : [orderBy]; this.config.orderBy = orderByArray; } else { const orderByArray = columns as (MySqlColumn | SQL | SQL.Aliased)[]; this.config.orderBy = orderByArray; } return this as any; } limit(limit: number | Placeholder): MySqlUpdateWithout { this.config.limit = limit; return this as any; } /** @internal */ getSQL(): SQL { return this.dialect.buildUpdateQuery(this.config); } toSQL(): Query { const { typings: _typings, ...rest } = this.dialect.sqlToQuery(this.getSQL()); return rest; } prepare(): MySqlUpdatePrepare { return this.session.prepareQuery( this.dialect.sqlToQuery(this.getSQL()), undefined, undefined, undefined, this.config.returning, { type: 'insert', tables: extractUsedTable(this.config.table), }, this.cacheConfig, ) as MySqlUpdatePrepare; } override execute: ReturnType['execute'] = (placeholderValues) => { return this.prepare().execute(placeholderValues); }; private createIterator = (): ReturnType['iterator'] => { const self = this; return async function*(placeholderValues) { yield* self.prepare().iterator(placeholderValues); }; }; iterator = this.createIterator(); $dynamic(): MySqlUpdateDynamic { return this as any; } } ================================================ FILE: drizzle-orm/src/mysql-core/schema.ts ================================================ import { entityKind, is } from '~/entity.ts'; import { type MySqlTableFn, mysqlTableWithSchema } from './table.ts'; import { type mysqlView, mysqlViewWithSchema } from './view.ts'; export class MySqlSchema { static readonly [entityKind]: string = 'MySqlSchema'; constructor( public readonly schemaName: TName, ) {} table: MySqlTableFn = (name, columns, extraConfig) => { return mysqlTableWithSchema(name, columns, extraConfig, this.schemaName); }; view = ((name, columns) => { return mysqlViewWithSchema(name, columns, this.schemaName); }) as typeof mysqlView; } /** @deprecated - use `instanceof MySqlSchema` */ export function isMySqlSchema(obj: unknown): obj is MySqlSchema { return is(obj, MySqlSchema); } /** * Create a MySQL schema. * https://dev.mysql.com/doc/refman/8.0/en/create-database.html * * @param name mysql use schema name * @returns MySQL schema */ export function mysqlDatabase(name: TName) { return new MySqlSchema(name); } /** * @see mysqlDatabase */ export const mysqlSchema = mysqlDatabase; ================================================ FILE: drizzle-orm/src/mysql-core/session.ts ================================================ import { type Cache, hashQuery, NoopCache } from '~/cache/core/cache.ts'; import type { WithCacheConfig } from '~/cache/core/types.ts'; import { entityKind, is } from '~/entity.ts'; import { DrizzleQueryError, TransactionRollbackError } from '~/errors.ts'; import type { RelationalSchemaConfig, TablesRelationalConfig } from '~/relations.ts'; import { type Query, type SQL, sql } from '~/sql/sql.ts'; import type { Assume, Equal } from '~/utils.ts'; import { MySqlDatabase } from './db.ts'; import type { MySqlDialect } from './dialect.ts'; import type { SelectedFieldsOrdered } from './query-builders/select.types.ts'; export type Mode = 'default' | 'planetscale'; export interface MySqlQueryResultHKT { readonly $brand: 'MySqlQueryResultHKT'; readonly row: unknown; readonly type: unknown; } export interface AnyMySqlQueryResultHKT extends MySqlQueryResultHKT { readonly type: any; } export type MySqlQueryResultKind = (TKind & { readonly row: TRow; })['type']; export interface MySqlPreparedQueryConfig { execute: unknown; iterator: unknown; } export interface MySqlPreparedQueryHKT { readonly $brand: 'MySqlPreparedQueryHKT'; readonly config: unknown; readonly type: unknown; } export type PreparedQueryKind< TKind extends MySqlPreparedQueryHKT, TConfig extends MySqlPreparedQueryConfig, TAssume extends boolean = false, > = Equal extends true ? Assume<(TKind & { readonly config: TConfig })['type'], MySqlPreparedQuery> : (TKind & { readonly config: TConfig })['type']; export abstract class MySqlPreparedQuery { static readonly [entityKind]: string = 'MySqlPreparedQuery'; constructor( // cache instance private cache: Cache | undefined, // per query related metadata private queryMetadata: { type: 'select' | 'update' | 'delete' | 'insert'; tables: string[]; } | undefined, // config that was passed through $withCache private cacheConfig?: WithCacheConfig, ) { // it means that no $withCache options were passed and it should be just enabled if (cache && cache.strategy() === 'all' && cacheConfig === undefined) { this.cacheConfig = { enable: true, autoInvalidate: true }; } if (!this.cacheConfig?.enable) { this.cacheConfig = undefined; } } /** @internal */ protected async queryWithCache( queryString: string, params: any[], query: () => Promise, ): Promise { if (this.cache === undefined || is(this.cache, NoopCache) || this.queryMetadata === undefined) { try { return await query(); } catch (e) { throw new DrizzleQueryError(queryString, params, e as Error); } } // don't do any mutations, if globally is false if (this.cacheConfig && !this.cacheConfig.enable) { try { return await query(); } catch (e) { throw new DrizzleQueryError(queryString, params, e as Error); } } // For mutate queries, we should query the database, wait for a response, and then perform invalidation if ( ( this.queryMetadata.type === 'insert' || this.queryMetadata.type === 'update' || this.queryMetadata.type === 'delete' ) && this.queryMetadata.tables.length > 0 ) { try { const [res] = await Promise.all([ query(), this.cache.onMutate({ tables: this.queryMetadata.tables }), ]); return res; } catch (e) { throw new DrizzleQueryError(queryString, params, e as Error); } } // don't do any reads if globally disabled if (!this.cacheConfig) { try { return await query(); } catch (e) { throw new DrizzleQueryError(queryString, params, e as Error); } } if (this.queryMetadata.type === 'select') { const fromCache = await this.cache.get( this.cacheConfig.tag ?? await hashQuery(queryString, params), this.queryMetadata.tables, this.cacheConfig.tag !== undefined, this.cacheConfig.autoInvalidate, ); if (fromCache === undefined) { let result; try { result = await query(); } catch (e) { throw new DrizzleQueryError(queryString, params, e as Error); } // put actual key await this.cache.put( this.cacheConfig.tag ?? await hashQuery(queryString, params), result, // make sure we send tables that were used in a query only if user wants to invalidate it on each write this.cacheConfig.autoInvalidate ? this.queryMetadata.tables : [], this.cacheConfig.tag !== undefined, this.cacheConfig.config, ); // put flag if we should invalidate or not return result; } return fromCache as unknown as T; } try { return await query(); } catch (e) { throw new DrizzleQueryError(queryString, params, e as Error); } } /** @internal */ joinsNotNullableMap?: Record; abstract execute(placeholderValues?: Record): Promise; abstract iterator(placeholderValues?: Record): AsyncGenerator; } export interface MySqlTransactionConfig { withConsistentSnapshot?: boolean; accessMode?: 'read only' | 'read write'; isolationLevel: 'read uncommitted' | 'read committed' | 'repeatable read' | 'serializable'; } export abstract class MySqlSession< TQueryResult extends MySqlQueryResultHKT = MySqlQueryResultHKT, TPreparedQueryHKT extends PreparedQueryHKTBase = PreparedQueryHKTBase, TFullSchema extends Record = Record, TSchema extends TablesRelationalConfig = Record, > { static readonly [entityKind]: string = 'MySqlSession'; constructor(protected dialect: MySqlDialect) {} abstract prepareQuery( query: Query, fields: SelectedFieldsOrdered | undefined, customResultMapper?: (rows: unknown[][]) => T['execute'], generatedIds?: Record[], returningIds?: SelectedFieldsOrdered, queryMetadata?: { type: 'select' | 'update' | 'delete' | 'insert'; tables: string[]; }, cacheConfig?: WithCacheConfig, ): PreparedQueryKind; execute(query: SQL): Promise { return this.prepareQuery( this.dialect.sqlToQuery(query), undefined, ).execute(); } abstract all(query: SQL): Promise; async count(sql: SQL): Promise { const res = await this.execute<[[{ count: string }]]>(sql); return Number( res[0][0]['count'], ); } abstract transaction( transaction: (tx: MySqlTransaction) => Promise, config?: MySqlTransactionConfig, ): Promise; protected getSetTransactionSQL(config: MySqlTransactionConfig): SQL | undefined { const parts: string[] = []; if (config.isolationLevel) { parts.push(`isolation level ${config.isolationLevel}`); } return parts.length ? sql`set transaction ${sql.raw(parts.join(' '))}` : undefined; } protected getStartTransactionSQL(config: MySqlTransactionConfig): SQL | undefined { const parts: string[] = []; if (config.withConsistentSnapshot) { parts.push('with consistent snapshot'); } if (config.accessMode) { parts.push(config.accessMode); } return parts.length ? sql`start transaction ${sql.raw(parts.join(' '))}` : undefined; } } export abstract class MySqlTransaction< TQueryResult extends MySqlQueryResultHKT, TPreparedQueryHKT extends PreparedQueryHKTBase, TFullSchema extends Record = Record, TSchema extends TablesRelationalConfig = Record, > extends MySqlDatabase { static override readonly [entityKind]: string = 'MySqlTransaction'; constructor( dialect: MySqlDialect, session: MySqlSession, protected schema: RelationalSchemaConfig | undefined, protected readonly nestedIndex: number, mode: Mode, ) { super(dialect, session, schema, mode); } rollback(): never { throw new TransactionRollbackError(); } /** Nested transactions (aka savepoints) only work with InnoDB engine. */ abstract override transaction( transaction: (tx: MySqlTransaction) => Promise, ): Promise; } export interface PreparedQueryHKTBase extends MySqlPreparedQueryHKT { type: MySqlPreparedQuery>; } ================================================ FILE: drizzle-orm/src/mysql-core/subquery.ts ================================================ import type { TypedQueryBuilder } from '~/query-builders/query-builder.ts'; import type { AddAliasToSelection } from '~/query-builders/select.types.ts'; import type { ColumnsSelection, SQL } from '~/sql/sql.ts'; import type { Subquery, WithSubquery, WithSubqueryWithoutSelection } from '~/subquery.ts'; import type { QueryBuilder } from './query-builders/query-builder.ts'; export type SubqueryWithSelection< TSelection extends ColumnsSelection, TAlias extends string, > = & Subquery> & AddAliasToSelection; export type WithSubqueryWithSelection< TSelection extends ColumnsSelection, TAlias extends string, > = & WithSubquery> & AddAliasToSelection; export interface WithBuilder { (alias: TAlias): { as: { ( qb: TypedQueryBuilder | ((qb: QueryBuilder) => TypedQueryBuilder), ): WithSubqueryWithSelection; ( qb: TypedQueryBuilder | ((qb: QueryBuilder) => TypedQueryBuilder), ): WithSubqueryWithoutSelection; }; }; (alias: TAlias, selection: TSelection): { as: (qb: SQL | ((qb: QueryBuilder) => SQL)) => WithSubqueryWithSelection; }; } ================================================ FILE: drizzle-orm/src/mysql-core/table.ts ================================================ import type { BuildColumns, BuildExtraConfigColumns } from '~/column-builder.ts'; import { entityKind } from '~/entity.ts'; import { Table, type TableConfig as TableConfigBase, type UpdateTableConfig } from '~/table.ts'; import type { CheckBuilder } from './checks.ts'; import { getMySqlColumnBuilders, type MySqlColumnBuilders } from './columns/all.ts'; import type { MySqlColumn, MySqlColumnBuilder, MySqlColumnBuilderBase } from './columns/common.ts'; import type { ForeignKey, ForeignKeyBuilder } from './foreign-keys.ts'; import type { AnyIndexBuilder } from './indexes.ts'; import type { PrimaryKeyBuilder } from './primary-keys.ts'; import type { UniqueConstraintBuilder } from './unique-constraint.ts'; export type MySqlTableExtraConfigValue = | AnyIndexBuilder | CheckBuilder | ForeignKeyBuilder | PrimaryKeyBuilder | UniqueConstraintBuilder; export type MySqlTableExtraConfig = Record< string, MySqlTableExtraConfigValue >; export type TableConfig = TableConfigBase; /** @internal */ export const InlineForeignKeys = Symbol.for('drizzle:MySqlInlineForeignKeys'); export class MySqlTable extends Table { static override readonly [entityKind]: string = 'MySqlTable'; declare protected $columns: T['columns']; /** @internal */ static override readonly Symbol = Object.assign({}, Table.Symbol, { InlineForeignKeys: InlineForeignKeys as typeof InlineForeignKeys, }); /** @internal */ override [Table.Symbol.Columns]!: NonNullable; /** @internal */ [InlineForeignKeys]: ForeignKey[] = []; /** @internal */ override [Table.Symbol.ExtraConfigBuilder]: | ((self: Record) => MySqlTableExtraConfig) | undefined = undefined; } export type AnyMySqlTable = {}> = MySqlTable< UpdateTableConfig >; export type MySqlTableWithColumns = & MySqlTable & { [Key in keyof T['columns']]: T['columns'][Key]; }; export function mysqlTableWithSchema< TTableName extends string, TSchemaName extends string | undefined, TColumnsMap extends Record, >( name: TTableName, columns: TColumnsMap | ((columnTypes: MySqlColumnBuilders) => TColumnsMap), extraConfig: | (( self: BuildColumns, ) => MySqlTableExtraConfig | MySqlTableExtraConfigValue[]) | undefined, schema: TSchemaName, baseName = name, ): MySqlTableWithColumns<{ name: TTableName; schema: TSchemaName; columns: BuildColumns; dialect: 'mysql'; }> { const rawTable = new MySqlTable<{ name: TTableName; schema: TSchemaName; columns: BuildColumns; dialect: 'mysql'; }>(name, schema, baseName); const parsedColumns: TColumnsMap = typeof columns === 'function' ? columns(getMySqlColumnBuilders()) : columns; const builtColumns = Object.fromEntries( Object.entries(parsedColumns).map(([name, colBuilderBase]) => { const colBuilder = colBuilderBase as MySqlColumnBuilder; colBuilder.setName(name); const column = colBuilder.build(rawTable); rawTable[InlineForeignKeys].push(...colBuilder.buildForeignKeys(column, rawTable)); return [name, column]; }), ) as unknown as BuildColumns; const table = Object.assign(rawTable, builtColumns); table[Table.Symbol.Columns] = builtColumns; table[Table.Symbol.ExtraConfigColumns] = builtColumns as unknown as BuildExtraConfigColumns< TTableName, TColumnsMap, 'mysql' >; if (extraConfig) { table[MySqlTable.Symbol.ExtraConfigBuilder] = extraConfig as unknown as ( self: Record, ) => MySqlTableExtraConfig; } return table; } export interface MySqlTableFn { < TTableName extends string, TColumnsMap extends Record, >( name: TTableName, columns: TColumnsMap, extraConfig?: ( self: BuildColumns, ) => MySqlTableExtraConfigValue[], ): MySqlTableWithColumns<{ name: TTableName; schema: TSchemaName; columns: BuildColumns; dialect: 'mysql'; }>; < TTableName extends string, TColumnsMap extends Record, >( name: TTableName, columns: (columnTypes: MySqlColumnBuilders) => TColumnsMap, extraConfig?: (self: BuildColumns) => MySqlTableExtraConfigValue[], ): MySqlTableWithColumns<{ name: TTableName; schema: TSchemaName; columns: BuildColumns; dialect: 'mysql'; }>; /** * @deprecated The third parameter of mysqlTable is changing and will only accept an array instead of an object * * @example * Deprecated version: * ```ts * export const users = mysqlTable("users", { * id: int(), * }, (t) => ({ * idx: index('custom_name').on(t.id) * })); * ``` * * New API: * ```ts * export const users = mysqlTable("users", { * id: int(), * }, (t) => [ * index('custom_name').on(t.id) * ]); * ``` */ < TTableName extends string, TColumnsMap extends Record, >( name: TTableName, columns: TColumnsMap, extraConfig: (self: BuildColumns) => MySqlTableExtraConfig, ): MySqlTableWithColumns<{ name: TTableName; schema: TSchemaName; columns: BuildColumns; dialect: 'mysql'; }>; /** * @deprecated The third parameter of mysqlTable is changing and will only accept an array instead of an object * * @example * Deprecated version: * ```ts * export const users = mysqlTable("users", { * id: int(), * }, (t) => ({ * idx: index('custom_name').on(t.id) * })); * ``` * * New API: * ```ts * export const users = mysqlTable("users", { * id: int(), * }, (t) => [ * index('custom_name').on(t.id) * ]); * ``` */ < TTableName extends string, TColumnsMap extends Record, >( name: TTableName, columns: (columnTypes: MySqlColumnBuilders) => TColumnsMap, extraConfig: (self: BuildColumns) => MySqlTableExtraConfig, ): MySqlTableWithColumns<{ name: TTableName; schema: TSchemaName; columns: BuildColumns; dialect: 'mysql'; }>; } export const mysqlTable: MySqlTableFn = (name, columns, extraConfig) => { return mysqlTableWithSchema(name, columns, extraConfig, undefined, name); }; export function mysqlTableCreator(customizeTableName: (name: string) => string): MySqlTableFn { return (name, columns, extraConfig) => { return mysqlTableWithSchema(customizeTableName(name) as typeof name, columns, extraConfig, undefined, name); }; } ================================================ FILE: drizzle-orm/src/mysql-core/unique-constraint.ts ================================================ import { entityKind } from '~/entity.ts'; import { TableName } from '~/table.utils.ts'; import type { MySqlColumn } from './columns/index.ts'; import type { MySqlTable } from './table.ts'; export function unique(name?: string): UniqueOnConstraintBuilder { return new UniqueOnConstraintBuilder(name); } export function uniqueKeyName(table: MySqlTable, columns: string[]) { return `${table[TableName]}_${columns.join('_')}_unique`; } export class UniqueConstraintBuilder { static readonly [entityKind]: string = 'MySqlUniqueConstraintBuilder'; /** @internal */ columns: MySqlColumn[]; constructor( columns: MySqlColumn[], private name?: string, ) { this.columns = columns; } /** @internal */ build(table: MySqlTable): UniqueConstraint { return new UniqueConstraint(table, this.columns, this.name); } } export class UniqueOnConstraintBuilder { static readonly [entityKind]: string = 'MySqlUniqueOnConstraintBuilder'; /** @internal */ name?: string; constructor( name?: string, ) { this.name = name; } on(...columns: [MySqlColumn, ...MySqlColumn[]]) { return new UniqueConstraintBuilder(columns, this.name); } } export class UniqueConstraint { static readonly [entityKind]: string = 'MySqlUniqueConstraint'; readonly columns: MySqlColumn[]; readonly name?: string; readonly nullsNotDistinct: boolean = false; constructor(readonly table: MySqlTable, columns: MySqlColumn[], name?: string) { this.columns = columns; this.name = name ?? uniqueKeyName(this.table, this.columns.map((column) => column.name)); } getName() { return this.name; } } ================================================ FILE: drizzle-orm/src/mysql-core/utils.ts ================================================ import { is } from '~/entity.ts'; import { SQL } from '~/index.ts'; import { Subquery } from '~/subquery.ts'; import { Table } from '~/table.ts'; import { ViewBaseConfig } from '~/view-common.ts'; import type { Check } from './checks.ts'; import { CheckBuilder } from './checks.ts'; import type { ForeignKey } from './foreign-keys.ts'; import { ForeignKeyBuilder } from './foreign-keys.ts'; import type { Index } from './indexes.ts'; import { IndexBuilder } from './indexes.ts'; import type { PrimaryKey } from './primary-keys.ts'; import { PrimaryKeyBuilder } from './primary-keys.ts'; import type { IndexForHint } from './query-builders/select.ts'; import { MySqlTable } from './table.ts'; import { type UniqueConstraint, UniqueConstraintBuilder } from './unique-constraint.ts'; import type { MySqlViewBase } from './view-base.ts'; import { MySqlViewConfig } from './view-common.ts'; import type { MySqlView } from './view.ts'; export function extractUsedTable(table: MySqlTable | Subquery | MySqlViewBase | SQL): string[] { if (is(table, MySqlTable)) { return [`${table[Table.Symbol.BaseName]}`]; } if (is(table, Subquery)) { return table._.usedTables ?? []; } if (is(table, SQL)) { return table.usedTables ?? []; } return []; } export function getTableConfig(table: MySqlTable) { const columns = Object.values(table[MySqlTable.Symbol.Columns]); const indexes: Index[] = []; const checks: Check[] = []; const primaryKeys: PrimaryKey[] = []; const uniqueConstraints: UniqueConstraint[] = []; const foreignKeys: ForeignKey[] = Object.values(table[MySqlTable.Symbol.InlineForeignKeys]); const name = table[Table.Symbol.Name]; const schema = table[Table.Symbol.Schema]; const baseName = table[Table.Symbol.BaseName]; const extraConfigBuilder = table[MySqlTable.Symbol.ExtraConfigBuilder]; if (extraConfigBuilder !== undefined) { const extraConfig = extraConfigBuilder(table[MySqlTable.Symbol.Columns]); const extraValues = Array.isArray(extraConfig) ? extraConfig.flat(1) as any[] : Object.values(extraConfig); for (const builder of Object.values(extraValues)) { if (is(builder, IndexBuilder)) { indexes.push(builder.build(table)); } else if (is(builder, CheckBuilder)) { checks.push(builder.build(table)); } else if (is(builder, UniqueConstraintBuilder)) { uniqueConstraints.push(builder.build(table)); } else if (is(builder, PrimaryKeyBuilder)) { primaryKeys.push(builder.build(table)); } else if (is(builder, ForeignKeyBuilder)) { foreignKeys.push(builder.build(table)); } } } return { columns, indexes, foreignKeys, checks, primaryKeys, uniqueConstraints, name, schema, baseName, }; } export function getViewConfig< TName extends string = string, TExisting extends boolean = boolean, >(view: MySqlView) { return { ...view[ViewBaseConfig], ...view[MySqlViewConfig], }; } export function convertIndexToString(indexes: IndexForHint[]) { return indexes.map((idx) => { return typeof idx === 'object' ? idx.config.name : idx; }); } export function toArray(value: T | T[]): T[] { return Array.isArray(value) ? value : [value]; } ================================================ FILE: drizzle-orm/src/mysql-core/view-base.ts ================================================ import { entityKind } from '~/entity.ts'; import type { ColumnsSelection } from '~/sql/sql.ts'; import { View } from '~/sql/sql.ts'; export abstract class MySqlViewBase< TName extends string = string, TExisting extends boolean = boolean, TSelectedFields extends ColumnsSelection = ColumnsSelection, > extends View { static override readonly [entityKind]: string = 'MySqlViewBase'; declare readonly _: View['_'] & { readonly viewBrand: 'MySqlViewBase'; }; } ================================================ FILE: drizzle-orm/src/mysql-core/view-common.ts ================================================ export const MySqlViewConfig = Symbol.for('drizzle:MySqlViewConfig'); ================================================ FILE: drizzle-orm/src/mysql-core/view.ts ================================================ import type { BuildColumns } from '~/column-builder.ts'; import { entityKind } from '~/entity.ts'; import type { TypedQueryBuilder } from '~/query-builders/query-builder.ts'; import type { AddAliasToSelection } from '~/query-builders/select.types.ts'; import { SelectionProxyHandler } from '~/selection-proxy.ts'; import type { ColumnsSelection, SQL } from '~/sql/sql.ts'; import { getTableColumns } from '~/utils.ts'; import type { MySqlColumn, MySqlColumnBuilderBase } from './columns/index.ts'; import { QueryBuilder } from './query-builders/query-builder.ts'; import { mysqlTable } from './table.ts'; import { MySqlViewBase } from './view-base.ts'; import { MySqlViewConfig } from './view-common.ts'; export interface ViewBuilderConfig { algorithm?: 'undefined' | 'merge' | 'temptable'; sqlSecurity?: 'definer' | 'invoker'; withCheckOption?: 'cascaded' | 'local'; } export class ViewBuilderCore { static readonly [entityKind]: string = 'MySqlViewBuilder'; declare readonly _: { readonly name: TConfig['name']; readonly columns: TConfig['columns']; }; constructor( protected name: TConfig['name'], protected schema: string | undefined, ) {} protected config: ViewBuilderConfig = {}; algorithm( algorithm: Exclude, ): this { this.config.algorithm = algorithm; return this; } sqlSecurity( sqlSecurity: Exclude, ): this { this.config.sqlSecurity = sqlSecurity; return this; } withCheckOption( withCheckOption?: Exclude, ): this { this.config.withCheckOption = withCheckOption ?? 'cascaded'; return this; } } export class ViewBuilder extends ViewBuilderCore<{ name: TName }> { static override readonly [entityKind]: string = 'MySqlViewBuilder'; as( qb: TypedQueryBuilder | ((qb: QueryBuilder) => TypedQueryBuilder), ): MySqlViewWithSelection> { if (typeof qb === 'function') { qb = qb(new QueryBuilder()); } const selectionProxy = new SelectionProxyHandler({ alias: this.name, sqlBehavior: 'error', sqlAliasedBehavior: 'alias', replaceOriginalName: true, }); const aliasedSelection = new Proxy(qb.getSelectedFields(), selectionProxy); return new Proxy( new MySqlView({ mysqlConfig: this.config, config: { name: this.name, schema: this.schema, selectedFields: aliasedSelection, query: qb.getSQL().inlineParams(), }, }), selectionProxy as any, ) as MySqlViewWithSelection>; } } export class ManualViewBuilder< TName extends string = string, TColumns extends Record = Record, > extends ViewBuilderCore<{ name: TName; columns: TColumns }> { static override readonly [entityKind]: string = 'MySqlManualViewBuilder'; private columns: Record; constructor( name: TName, columns: TColumns, schema: string | undefined, ) { super(name, schema); this.columns = getTableColumns(mysqlTable(name, columns)) as BuildColumns; } existing(): MySqlViewWithSelection> { return new Proxy( new MySqlView({ mysqlConfig: undefined, config: { name: this.name, schema: this.schema, selectedFields: this.columns, query: undefined, }, }), new SelectionProxyHandler({ alias: this.name, sqlBehavior: 'error', sqlAliasedBehavior: 'alias', replaceOriginalName: true, }), ) as MySqlViewWithSelection>; } as(query: SQL): MySqlViewWithSelection> { return new Proxy( new MySqlView({ mysqlConfig: this.config, config: { name: this.name, schema: this.schema, selectedFields: this.columns, query: query.inlineParams(), }, }), new SelectionProxyHandler({ alias: this.name, sqlBehavior: 'error', sqlAliasedBehavior: 'alias', replaceOriginalName: true, }), ) as MySqlViewWithSelection>; } } export class MySqlView< TName extends string = string, TExisting extends boolean = boolean, TSelectedFields extends ColumnsSelection = ColumnsSelection, > extends MySqlViewBase { static override readonly [entityKind]: string = 'MySqlView'; declare protected $MySqlViewBrand: 'MySqlView'; [MySqlViewConfig]: ViewBuilderConfig | undefined; constructor({ mysqlConfig, config }: { mysqlConfig: ViewBuilderConfig | undefined; config: { name: TName; schema: string | undefined; selectedFields: ColumnsSelection; query: SQL | undefined; }; }) { super(config); this[MySqlViewConfig] = mysqlConfig; } } export type MySqlViewWithSelection< TName extends string, TExisting extends boolean, TSelectedFields extends ColumnsSelection, > = MySqlView & TSelectedFields; /** @internal */ export function mysqlViewWithSchema( name: string, selection: Record | undefined, schema: string | undefined, ): ViewBuilder | ManualViewBuilder { if (selection) { return new ManualViewBuilder(name, selection, schema); } return new ViewBuilder(name, schema); } export function mysqlView(name: TName): ViewBuilder; export function mysqlView>( name: TName, columns: TColumns, ): ManualViewBuilder; export function mysqlView( name: string, selection?: Record, ): ViewBuilder | ManualViewBuilder { return mysqlViewWithSchema(name, selection, undefined); } ================================================ FILE: drizzle-orm/src/mysql-proxy/driver.ts ================================================ import { entityKind } from '~/entity.ts'; import { DefaultLogger } from '~/logger.ts'; import { MySqlDatabase } from '~/mysql-core/db.ts'; import { MySqlDialect } from '~/mysql-core/dialect.ts'; import { createTableRelationsHelpers, extractTablesRelationalConfig, type RelationalSchemaConfig, type TablesRelationalConfig, } from '~/relations.ts'; import type { DrizzleConfig } from '~/utils.ts'; import { type MySqlRemotePreparedQueryHKT, type MySqlRemoteQueryResultHKT, MySqlRemoteSession } from './session.ts'; export class MySqlRemoteDatabase< TSchema extends Record = Record, > extends MySqlDatabase { static override readonly [entityKind]: string = 'MySqlRemoteDatabase'; } export type RemoteCallback = ( sql: string, params: any[], method: 'all' | 'execute', ) => Promise<{ rows: any[]; insertId?: number; affectedRows?: number }>; export function drizzle = Record>( callback: RemoteCallback, config: DrizzleConfig = {}, ): MySqlRemoteDatabase { const dialect = new MySqlDialect({ casing: config.casing }); let logger; if (config.logger === true) { logger = new DefaultLogger(); } else if (config.logger !== false) { logger = config.logger; } let schema: RelationalSchemaConfig | undefined; if (config.schema) { const tablesConfig = extractTablesRelationalConfig( config.schema, createTableRelationsHelpers, ); schema = { fullSchema: config.schema, schema: tablesConfig.tables, tableNamesMap: tablesConfig.tableNamesMap, }; } const session = new MySqlRemoteSession(callback, dialect, schema, { logger }); return new MySqlRemoteDatabase(dialect, session, schema as any, 'default') as MySqlRemoteDatabase; } ================================================ FILE: drizzle-orm/src/mysql-proxy/index.ts ================================================ export * from './driver.ts'; export * from './session.ts'; ================================================ FILE: drizzle-orm/src/mysql-proxy/migrator.ts ================================================ import type { MigrationConfig } from '~/migrator.ts'; import { readMigrationFiles } from '~/migrator.ts'; import { sql } from '~/sql/sql.ts'; import type { MySqlRemoteDatabase } from './driver.ts'; export type ProxyMigrator = (migrationQueries: string[]) => Promise; export async function migrate>( db: MySqlRemoteDatabase, callback: ProxyMigrator, config: MigrationConfig, ) { const migrations = readMigrationFiles(config); const migrationsTable = config.migrationsTable ?? '__drizzle_migrations'; const migrationTableCreate = sql` create table if not exists ${sql.identifier(migrationsTable)} ( id serial primary key, hash text not null, created_at bigint ) `; await db.execute(migrationTableCreate); const dbMigrations = await db.select({ id: sql.raw('id'), hash: sql.raw('hash'), created_at: sql.raw('created_at'), }).from(sql.identifier(migrationsTable).getSQL()).orderBy( sql.raw('created_at desc'), ).limit(1); const lastDbMigration = dbMigrations[0]; const queriesToRun: string[] = []; for (const migration of migrations) { if ( !lastDbMigration || Number(lastDbMigration.created_at) < migration.folderMillis ) { queriesToRun.push( ...migration.sql, `insert into ${ sql.identifier(migrationsTable).value } (\`hash\`, \`created_at\`) values('${migration.hash}', '${migration.folderMillis}')`, ); } } await callback(queriesToRun); } ================================================ FILE: drizzle-orm/src/mysql-proxy/session.ts ================================================ import type { FieldPacket, ResultSetHeader } from 'mysql2/promise'; import { type Cache, NoopCache } from '~/cache/core/index.ts'; import type { WithCacheConfig } from '~/cache/core/types.ts'; import { Column } from '~/column.ts'; import { entityKind, is } from '~/entity.ts'; import type { Logger } from '~/logger.ts'; import { NoopLogger } from '~/logger.ts'; import type { MySqlDialect } from '~/mysql-core/dialect.ts'; import { MySqlTransaction } from '~/mysql-core/index.ts'; import type { SelectedFieldsOrdered } from '~/mysql-core/query-builders/select.types.ts'; import type { MySqlPreparedQueryConfig, MySqlPreparedQueryHKT, MySqlQueryResultHKT, MySqlTransactionConfig, PreparedQueryKind, } from '~/mysql-core/session.ts'; import { MySqlPreparedQuery as PreparedQueryBase, MySqlSession } from '~/mysql-core/session.ts'; import type { RelationalSchemaConfig, TablesRelationalConfig } from '~/relations.ts'; import { fillPlaceholders } from '~/sql/sql.ts'; import type { Query, SQL } from '~/sql/sql.ts'; import { type Assume, mapResultRow } from '~/utils.ts'; import type { RemoteCallback } from './driver.ts'; export type MySqlRawQueryResult = [ResultSetHeader, FieldPacket[]]; export interface MySqlRemoteSessionOptions { logger?: Logger; cache?: Cache; } export class MySqlRemoteSession< TFullSchema extends Record, TSchema extends TablesRelationalConfig, > extends MySqlSession { static override readonly [entityKind]: string = 'MySqlRemoteSession'; private logger: Logger; private cache: Cache; constructor( private client: RemoteCallback, dialect: MySqlDialect, private schema: RelationalSchemaConfig | undefined, options: MySqlRemoteSessionOptions, ) { super(dialect); this.logger = options.logger ?? new NoopLogger(); this.cache = options.cache ?? new NoopCache(); } prepareQuery( query: Query, fields: SelectedFieldsOrdered | undefined, customResultMapper?: (rows: unknown[][]) => T['execute'], generatedIds?: Record[], returningIds?: SelectedFieldsOrdered, queryMetadata?: { type: 'select' | 'update' | 'delete' | 'insert'; tables: string[]; }, cacheConfig?: WithCacheConfig, ): PreparedQueryKind { return new PreparedQuery( this.client, query.sql, query.params, this.logger, this.cache, queryMetadata, cacheConfig, fields, customResultMapper, generatedIds, returningIds, ) as PreparedQueryKind; } override all(query: SQL): Promise { const querySql = this.dialect.sqlToQuery(query); this.logger.logQuery(querySql.sql, querySql.params); return this.client(querySql.sql, querySql.params, 'all').then(({ rows }) => rows) as Promise; } override async transaction( _transaction: (tx: MySqlProxyTransaction) => Promise, _config?: MySqlTransactionConfig, ): Promise { throw new Error('Transactions are not supported by the MySql Proxy driver'); } } export class MySqlProxyTransaction< TFullSchema extends Record, TSchema extends TablesRelationalConfig, > extends MySqlTransaction { static override readonly [entityKind]: string = 'MySqlProxyTransaction'; override async transaction( _transaction: (tx: MySqlProxyTransaction) => Promise, ): Promise { throw new Error('Transactions are not supported by the MySql Proxy driver'); } } export class PreparedQuery extends PreparedQueryBase { static override readonly [entityKind]: string = 'MySqlProxyPreparedQuery'; constructor( private client: RemoteCallback, private queryString: string, private params: unknown[], private logger: Logger, cache: Cache, queryMetadata: { type: 'select' | 'update' | 'delete' | 'insert'; tables: string[]; } | undefined, cacheConfig: WithCacheConfig | undefined, private fields: SelectedFieldsOrdered | undefined, private customResultMapper?: (rows: unknown[][]) => T['execute'], // Keys that were used in $default and the value that was generated for them private generatedIds?: Record[], // Keys that should be returned, it has the column with all properries + key from object private returningIds?: SelectedFieldsOrdered, ) { super(cache, queryMetadata, cacheConfig); } async execute(placeholderValues: Record | undefined = {}): Promise { const params = fillPlaceholders(this.params, placeholderValues); const { fields, client, queryString, logger, joinsNotNullableMap, customResultMapper, returningIds, generatedIds } = this; logger.logQuery(queryString, params); if (!fields && !customResultMapper) { const { rows: data } = await this.queryWithCache(queryString, params, async () => { return await client(queryString, params, 'execute'); }); const insertId = data[0].insertId as number; const affectedRows = data[0].affectedRows; if (returningIds) { const returningResponse = []; let j = 0; for (let i = insertId; i < insertId + affectedRows; i++) { for (const column of returningIds) { const key = returningIds[0]!.path[0]!; if (is(column.field, Column)) { // @ts-ignore if (column.field.primary && column.field.autoIncrement) { returningResponse.push({ [key]: i }); } if (column.field.defaultFn && generatedIds) { // generatedIds[rowIdx][key] returningResponse.push({ [key]: generatedIds[j]![key] }); } } } j++; } return returningResponse; } return data; } const { rows } = await this.queryWithCache(queryString, params, async () => { return await client(queryString, params, 'all'); }); if (customResultMapper) { return customResultMapper(rows); } return rows.map((row) => mapResultRow(fields!, row, joinsNotNullableMap)); } override iterator( _placeholderValues: Record = {}, ): AsyncGenerator { throw new Error('Streaming is not supported by the MySql Proxy driver'); } } export interface MySqlRemoteQueryResultHKT extends MySqlQueryResultHKT { type: MySqlRawQueryResult; } export interface MySqlRemotePreparedQueryHKT extends MySqlPreparedQueryHKT { type: PreparedQuery>; } ================================================ FILE: drizzle-orm/src/mysql2/driver.ts ================================================ import { type Connection as CallbackConnection, createPool, type Pool as CallbackPool, type PoolOptions } from 'mysql2'; import type { Connection, Pool } from 'mysql2/promise'; import type { Cache } from '~/cache/core/index.ts'; import { entityKind } from '~/entity.ts'; import type { Logger } from '~/logger.ts'; import { DefaultLogger } from '~/logger.ts'; import { MySqlDatabase } from '~/mysql-core/db.ts'; import { MySqlDialect } from '~/mysql-core/dialect.ts'; import type { Mode } from '~/mysql-core/session.ts'; import { createTableRelationsHelpers, extractTablesRelationalConfig, type RelationalSchemaConfig, type TablesRelationalConfig, } from '~/relations.ts'; import { type DrizzleConfig, isConfig } from '~/utils.ts'; import { DrizzleError } from '../errors.ts'; import type { MySql2Client, MySql2PreparedQueryHKT, MySql2QueryResultHKT } from './session.ts'; import { MySql2Session } from './session.ts'; export interface MySqlDriverOptions { logger?: Logger; cache?: Cache; } export class MySql2Driver { static readonly [entityKind]: string = 'MySql2Driver'; constructor( private client: MySql2Client, private dialect: MySqlDialect, private options: MySqlDriverOptions = {}, ) { } createSession( schema: RelationalSchemaConfig | undefined, mode: Mode, ): MySql2Session, TablesRelationalConfig> { return new MySql2Session(this.client, this.dialect, schema, { logger: this.options.logger, mode, cache: this.options.cache, }); } } export { MySqlDatabase } from '~/mysql-core/db.ts'; export class MySql2Database< TSchema extends Record = Record, > extends MySqlDatabase { static override readonly [entityKind]: string = 'MySql2Database'; } export type MySql2DrizzleConfig = Record> = & Omit, 'schema'> & ({ schema: TSchema; mode: Mode } | { schema?: undefined; mode?: Mode }); function construct< TSchema extends Record = Record, TClient extends Pool | Connection | CallbackPool | CallbackConnection = CallbackPool, >( client: TClient, config: MySql2DrizzleConfig = {}, ): MySql2Database & { $client: AnyMySql2Connection extends TClient ? CallbackPool : TClient; } { const dialect = new MySqlDialect({ casing: config.casing }); let logger; if (config.logger === true) { logger = new DefaultLogger(); } else if (config.logger !== false) { logger = config.logger; } const clientForInstance = isCallbackClient(client) ? client.promise() : client; let schema: RelationalSchemaConfig | undefined; if (config.schema) { if (config.mode === undefined) { throw new DrizzleError({ message: 'You need to specify "mode": "planetscale" or "default" when providing a schema. Read more: https://orm.drizzle.team/docs/rqb#modes', }); } const tablesConfig = extractTablesRelationalConfig( config.schema, createTableRelationsHelpers, ); schema = { fullSchema: config.schema, schema: tablesConfig.tables, tableNamesMap: tablesConfig.tableNamesMap, }; } const mode = config.mode ?? 'default'; const driver = new MySql2Driver(clientForInstance as MySql2Client, dialect, { logger, cache: config.cache }); const session = driver.createSession(schema, mode); const db = new MySql2Database(dialect, session, schema as any, mode) as MySql2Database; ( db).$client = client; ( db).$cache = config.cache; if (( db).$cache) { ( db).$cache['invalidate'] = config.cache?.onMutate; } return db as any; } interface CallbackClient { promise(): MySql2Client; } function isCallbackClient(client: any): client is CallbackClient { return typeof client.promise === 'function'; } export type AnyMySql2Connection = Pool | Connection | CallbackPool | CallbackConnection; export function drizzle< TSchema extends Record = Record, TClient extends AnyMySql2Connection = CallbackPool, >( ...params: [ TClient | string, ] | [ TClient | string, MySql2DrizzleConfig, ] | [ ( & MySql2DrizzleConfig & ({ connection: string | PoolOptions; } | { client: TClient; }) ), ] ): MySql2Database & { $client: AnyMySql2Connection extends TClient ? CallbackPool : TClient; } { if (typeof params[0] === 'string') { const connectionString = params[0]!; const instance = createPool({ uri: connectionString, }); return construct(instance, params[1]) as any; } if (isConfig(params[0])) { const { connection, client, ...drizzleConfig } = params[0] as & { connection?: PoolOptions | string; client?: TClient } & MySql2DrizzleConfig; if (client) return construct(client, drizzleConfig) as any; const instance = typeof connection === 'string' ? createPool({ uri: connection, supportBigNumbers: true, }) : createPool(connection!); const db = construct(instance, drizzleConfig); return db as any; } return construct(params[0] as TClient, params[1] as MySql2DrizzleConfig | undefined) as any; } export namespace drizzle { export function mock = Record>( config?: MySql2DrizzleConfig, ): MySql2Database & { $client: '$client is not available on drizzle.mock()'; } { return construct({} as any, config) as any; } } ================================================ FILE: drizzle-orm/src/mysql2/index.ts ================================================ export * from './driver.ts'; export * from './session.ts'; ================================================ FILE: drizzle-orm/src/mysql2/migrator.ts ================================================ import type { MigrationConfig } from '~/migrator.ts'; import { readMigrationFiles } from '~/migrator.ts'; import type { MySql2Database } from './driver.ts'; export async function migrate>( db: MySql2Database, config: MigrationConfig, ) { const migrations = readMigrationFiles(config); await db.dialect.migrate(migrations, db.session, config); } ================================================ FILE: drizzle-orm/src/mysql2/session.ts ================================================ import type { Connection as CallbackConnection } from 'mysql2'; import type { Connection, FieldPacket, OkPacket, Pool, PoolConnection, QueryOptions, ResultSetHeader, RowDataPacket, } from 'mysql2/promise'; import { once } from 'node:events'; import { type Cache, NoopCache } from '~/cache/core/index.ts'; import type { WithCacheConfig } from '~/cache/core/types.ts'; import { Column } from '~/column.ts'; import { entityKind, is } from '~/entity.ts'; import type { Logger } from '~/logger.ts'; import { NoopLogger } from '~/logger.ts'; import type { MySqlDialect } from '~/mysql-core/dialect.ts'; import type { SelectedFieldsOrdered } from '~/mysql-core/query-builders/select.types.ts'; import { type Mode, MySqlPreparedQuery, type MySqlPreparedQueryConfig, type MySqlPreparedQueryHKT, type MySqlQueryResultHKT, MySqlSession, MySqlTransaction, type MySqlTransactionConfig, type PreparedQueryKind, } from '~/mysql-core/session.ts'; import type { RelationalSchemaConfig, TablesRelationalConfig } from '~/relations.ts'; import { fillPlaceholders, sql } from '~/sql/sql.ts'; import type { Query, SQL } from '~/sql/sql.ts'; import { type Assume, mapResultRow } from '~/utils.ts'; export type MySql2Client = Pool | Connection; export type MySqlRawQueryResult = [ResultSetHeader, FieldPacket[]]; export type MySqlQueryResultType = RowDataPacket[][] | RowDataPacket[] | OkPacket | OkPacket[] | ResultSetHeader; export type MySqlQueryResult< T = any, > = [T extends ResultSetHeader ? T : T[], FieldPacket[]]; export class MySql2PreparedQuery extends MySqlPreparedQuery { static override readonly [entityKind]: string = 'MySql2PreparedQuery'; private rawQuery: QueryOptions; private query: QueryOptions; constructor( private client: MySql2Client, queryString: string, private params: unknown[], private logger: Logger, cache: Cache, queryMetadata: { type: 'select' | 'update' | 'delete' | 'insert'; tables: string[]; } | undefined, cacheConfig: WithCacheConfig | undefined, private fields: SelectedFieldsOrdered | undefined, private customResultMapper?: (rows: unknown[][]) => T['execute'], // Keys that were used in $default and the value that was generated for them private generatedIds?: Record[], // Keys that should be returned, it has the column with all properries + key from object private returningIds?: SelectedFieldsOrdered, ) { super(cache, queryMetadata, cacheConfig); this.rawQuery = { sql: queryString, // rowsAsArray: true, typeCast: function(field: any, next: any) { if (field.type === 'TIMESTAMP' || field.type === 'DATETIME' || field.type === 'DATE') { return field.string(); } return next(); }, }; this.query = { sql: queryString, rowsAsArray: true, typeCast: function(field: any, next: any) { if (field.type === 'TIMESTAMP' || field.type === 'DATETIME' || field.type === 'DATE') { return field.string(); } return next(); }, }; } async execute(placeholderValues: Record = {}): Promise { const params = fillPlaceholders(this.params, placeholderValues); this.logger.logQuery(this.rawQuery.sql, params); const { fields, client, rawQuery, query, joinsNotNullableMap, customResultMapper, returningIds, generatedIds } = this; if (!fields && !customResultMapper) { const res = await this.queryWithCache(rawQuery.sql, params, async () => { return await client.query(rawQuery, params); }); const insertId = res[0].insertId; const affectedRows = res[0].affectedRows; // for each row, I need to check keys from if (returningIds) { const returningResponse = []; let j = 0; for (let i = insertId; i < insertId + affectedRows; i++) { for (const column of returningIds) { const key = returningIds[0]!.path[0]!; if (is(column.field, Column)) { // @ts-ignore if (column.field.primary && column.field.autoIncrement) { returningResponse.push({ [key]: i }); } if (column.field.defaultFn && generatedIds) { // generatedIds[rowIdx][key] returningResponse.push({ [key]: generatedIds[j]![key] }); } } } j++; } return returningResponse; } return res; } const result = await this.queryWithCache(query.sql, params, async () => { return await client.query(query, params); }); const rows = result[0]; if (customResultMapper) { return customResultMapper(rows); } return rows.map((row) => mapResultRow(fields!, row, joinsNotNullableMap)); } async *iterator( placeholderValues: Record = {}, ): AsyncGenerator { const params = fillPlaceholders(this.params, placeholderValues); const conn = ((isPool(this.client) ? await this.client.getConnection() : this.client) as {} as { connection: CallbackConnection; }).connection; const { fields, query, rawQuery, joinsNotNullableMap, client, customResultMapper } = this; const hasRowsMapper = Boolean(fields || customResultMapper); const driverQuery = hasRowsMapper ? conn.query(query, params) : conn.query(rawQuery, params); const stream = driverQuery.stream(); function dataListener() { stream.pause(); } stream.on('data', dataListener); try { const onEnd = once(stream, 'end'); const onError = once(stream, 'error'); while (true) { stream.resume(); const row = await Promise.race([onEnd, onError, new Promise((resolve) => stream.once('data', resolve))]); if (row === undefined || (Array.isArray(row) && row.length === 0)) { break; } else if (row instanceof Error) { // eslint-disable-line no-instanceof/no-instanceof throw row; } else { if (hasRowsMapper) { if (customResultMapper) { const mappedRow = customResultMapper([row as unknown[]]); yield (Array.isArray(mappedRow) ? mappedRow[0] : mappedRow); } else { yield mapResultRow(fields!, row as unknown[], joinsNotNullableMap); } } else { yield row as T['execute']; } } } } finally { stream.off('data', dataListener); if (isPool(client)) { conn.end(); } } } } export interface MySql2SessionOptions { logger?: Logger; cache?: Cache; mode: Mode; } export class MySql2Session< TFullSchema extends Record, TSchema extends TablesRelationalConfig, > extends MySqlSession { static override readonly [entityKind]: string = 'MySql2Session'; private logger: Logger; private mode: Mode; private cache: Cache; constructor( private client: MySql2Client, dialect: MySqlDialect, private schema: RelationalSchemaConfig | undefined, private options: MySql2SessionOptions, ) { super(dialect); this.logger = options.logger ?? new NoopLogger(); this.cache = options.cache ?? new NoopCache(); this.mode = options.mode; } prepareQuery( query: Query, fields: SelectedFieldsOrdered | undefined, customResultMapper?: (rows: unknown[][]) => T['execute'], generatedIds?: Record[], returningIds?: SelectedFieldsOrdered, queryMetadata?: { type: 'select' | 'update' | 'delete' | 'insert'; tables: string[]; }, cacheConfig?: WithCacheConfig, ): PreparedQueryKind { // Add returningId fields // Each driver gets them from response from database return new MySql2PreparedQuery( this.client, query.sql, query.params, this.logger, this.cache, queryMetadata, cacheConfig, fields, customResultMapper, generatedIds, returningIds, ) as PreparedQueryKind; } /** * @internal * What is its purpose? */ async query(query: string, params: unknown[]): Promise { this.logger.logQuery(query, params); const result = await this.client.query({ sql: query, values: params, rowsAsArray: true, typeCast: function(field: any, next: any) { if (field.type === 'TIMESTAMP' || field.type === 'DATETIME' || field.type === 'DATE') { return field.string(); } return next(); }, }); return result; } override all(query: SQL): Promise { const querySql = this.dialect.sqlToQuery(query); this.logger.logQuery(querySql.sql, querySql.params); return this.client.execute(querySql.sql, querySql.params).then((result) => result[0]) as Promise; } override async transaction( transaction: (tx: MySql2Transaction) => Promise, config?: MySqlTransactionConfig, ): Promise { const session = isPool(this.client) ? new MySql2Session( await this.client.getConnection(), this.dialect, this.schema, this.options, ) : this; const tx = new MySql2Transaction( this.dialect, session as MySqlSession, this.schema, 0, this.mode, ); if (config) { const setTransactionConfigSql = this.getSetTransactionSQL(config); if (setTransactionConfigSql) { await tx.execute(setTransactionConfigSql); } const startTransactionSql = this.getStartTransactionSQL(config); await (startTransactionSql ? tx.execute(startTransactionSql) : tx.execute(sql`begin`)); } else { await tx.execute(sql`begin`); } try { const result = await transaction(tx); await tx.execute(sql`commit`); return result; } catch (err) { await tx.execute(sql`rollback`); throw err; } finally { if (isPool(this.client)) { (session.client as PoolConnection).release(); } } } } export class MySql2Transaction< TFullSchema extends Record, TSchema extends TablesRelationalConfig, > extends MySqlTransaction { static override readonly [entityKind]: string = 'MySql2Transaction'; override async transaction(transaction: (tx: MySql2Transaction) => Promise): Promise { const savepointName = `sp${this.nestedIndex + 1}`; const tx = new MySql2Transaction( this.dialect, this.session, this.schema, this.nestedIndex + 1, this.mode, ); await tx.execute(sql.raw(`savepoint ${savepointName}`)); try { const result = await transaction(tx); await tx.execute(sql.raw(`release savepoint ${savepointName}`)); return result; } catch (err) { await tx.execute(sql.raw(`rollback to savepoint ${savepointName}`)); throw err; } } } function isPool(client: MySql2Client): client is Pool { return 'getConnection' in client; } export interface MySql2QueryResultHKT extends MySqlQueryResultHKT { type: MySqlRawQueryResult; } export interface MySql2PreparedQueryHKT extends MySqlPreparedQueryHKT { type: MySql2PreparedQuery>; } ================================================ FILE: drizzle-orm/src/neon/index.ts ================================================ export * from './neon-auth.ts'; export * from './rls.ts'; ================================================ FILE: drizzle-orm/src/neon/neon-auth.ts ================================================ import { jsonb, pgSchema, text, timestamp } from '~/pg-core/index.ts'; const neonAuthSchema = pgSchema('neon_auth'); /** * Table schema of the `users_sync` table used by Neon Auth. * This table automatically synchronizes and stores user data from external authentication providers. * * @schema neon_auth * @table users_sync */ export const usersSync = neonAuthSchema.table('users_sync', { rawJson: jsonb('raw_json').notNull(), id: text().primaryKey().notNull(), name: text(), email: text(), createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }), deletedAt: timestamp('deleted_at', { withTimezone: true, mode: 'string' }), updatedAt: timestamp('updated_at', { withTimezone: true, mode: 'string' }), }); ================================================ FILE: drizzle-orm/src/neon/rls.ts ================================================ import { is } from '~/entity.ts'; import { type AnyPgColumn, pgPolicy, type PgPolicyToOption } from '~/pg-core/index.ts'; import { PgRole, pgRole } from '~/pg-core/roles.ts'; import { type SQL, sql } from '~/sql/sql.ts'; /** * Generates a set of PostgreSQL row-level security (RLS) policies for CRUD operations based on the provided options. * * @param options - An object containing the policy configuration. * @param options.role - The PostgreSQL role(s) to apply the policy to. Can be a single `PgRole` instance or an array of `PgRole` instances or role names. * @param options.read - The SQL expression or boolean value that defines the read policy. Set to `true` to allow all reads, `false` to deny all reads, or provide a custom SQL expression. Set to `null` to prevent the policy from being generated. * @param options.modify - The SQL expression or boolean value that defines the modify (insert, update, delete) policies. Set to `true` to allow all modifications, `false` to deny all modifications, or provide a custom SQL expression. Set to `null` to prevent policies from being generated. * @returns An array of PostgreSQL policy definitions, one for each CRUD operation. */ export const crudPolicy = (options: { role: PgPolicyToOption; read: SQL | boolean | null; modify: SQL | boolean | null; }) => { if (options.read === undefined) { throw new Error('crudPolicy requires a read policy'); } if (options.modify === undefined) { throw new Error('crudPolicy requires a modify policy'); } let read: SQL | undefined; if (options.read === true) { read = sql`true`; } else if (options.read === false) { read = sql`false`; } else if (options.read !== null) { read = options.read; } let modify: SQL | undefined; if (options.modify === true) { modify = sql`true`; } else if (options.modify === false) { modify = sql`false`; } else if (options.modify !== null) { modify = options.modify; } let rolesName = ''; if (Array.isArray(options.role)) { rolesName = options.role .map((it) => { return is(it, PgRole) ? it.name : (it as string); }) .join('-'); } else { rolesName = is(options.role, PgRole) ? options.role.name : (options.role as string); } return [ read && pgPolicy(`crud-${rolesName}-policy-select`, { for: 'select', to: options.role, using: read, }), modify && pgPolicy(`crud-${rolesName}-policy-insert`, { for: 'insert', to: options.role, withCheck: modify, }), modify && pgPolicy(`crud-${rolesName}-policy-update`, { for: 'update', to: options.role, using: modify, withCheck: modify, }), modify && pgPolicy(`crud-${rolesName}-policy-delete`, { for: 'delete', to: options.role, using: modify, }), ].filter(Boolean); }; // These are default roles that Neon will set up. export const authenticatedRole = pgRole('authenticated').existing(); export const anonymousRole = pgRole('anonymous').existing(); export const authUid = (userIdColumn: AnyPgColumn) => sql`(select auth.user_id() = ${userIdColumn})`; ================================================ FILE: drizzle-orm/src/neon-http/driver.ts ================================================ import type { HTTPQueryOptions, HTTPTransactionOptions, NeonQueryFunction } from '@neondatabase/serverless'; import { neon, types } from '@neondatabase/serverless'; import type { BatchItem, BatchResponse } from '~/batch.ts'; import type { Cache } from '~/cache/core/cache.ts'; import { entityKind } from '~/entity.ts'; import type { Logger } from '~/logger.ts'; import { DefaultLogger } from '~/logger.ts'; import { PgDatabase } from '~/pg-core/db.ts'; import { PgDialect } from '~/pg-core/dialect.ts'; import { createTableRelationsHelpers, extractTablesRelationalConfig } from '~/relations.ts'; import type { ExtractTablesWithRelations, RelationalSchemaConfig, TablesRelationalConfig } from '~/relations.ts'; import { type DrizzleConfig, isConfig } from '~/utils.ts'; import { type NeonHttpClient, type NeonHttpQueryResultHKT, NeonHttpSession } from './session.ts'; export interface NeonDriverOptions { logger?: Logger; cache?: Cache; } export class NeonHttpDriver { static readonly [entityKind]: string = 'NeonHttpDriver'; constructor( private client: NeonHttpClient, private dialect: PgDialect, private options: NeonDriverOptions = {}, ) { this.initMappers(); } createSession( schema: RelationalSchemaConfig | undefined, ): NeonHttpSession, TablesRelationalConfig> { return new NeonHttpSession(this.client, this.dialect, schema, { logger: this.options.logger, cache: this.options.cache, }); } initMappers() { types.setTypeParser(types.builtins.TIMESTAMPTZ, (val) => val); types.setTypeParser(types.builtins.TIMESTAMP, (val) => val); types.setTypeParser(types.builtins.DATE, (val) => val); types.setTypeParser(types.builtins.INTERVAL, (val) => val); types.setTypeParser(1231, (val) => val); types.setTypeParser(1115, (val) => val); types.setTypeParser(1185, (val) => val); types.setTypeParser(1187, (val) => val); types.setTypeParser(1182, (val) => val); } } function wrap( target: T, token: Exclude['authToken'], undefined>, cb: (target: any, p: string | symbol, res: any) => any, deep?: boolean, ) { return new Proxy(target, { get(target, p) { const element = target[p as keyof typeof p]; if (typeof element !== 'function' && (typeof element !== 'object' || element === null)) return element; if (deep) return wrap(element, token, cb); if (p === 'query') return wrap(element, token, cb, true); return new Proxy(element as any, { apply(target, thisArg, argArray) { const res = target.call(thisArg, ...argArray); if (typeof res === 'object' && res !== null && 'setToken' in res && typeof res.setToken === 'function') { res.setToken(token); } return cb(target, p, res); }, }); }, }); } export class NeonHttpDatabase< TSchema extends Record = Record, > extends PgDatabase { static override readonly [entityKind]: string = 'NeonHttpDatabase'; $withAuth( token: Exclude['authToken'], undefined>, ): Omit< this, Exclude< keyof this, | '$count' | 'delete' | 'select' | 'selectDistinct' | 'selectDistinctOn' | 'update' | 'insert' | 'with' | 'query' | 'execute' | 'refreshMaterializedView' > > { this.authToken = token; return wrap(this, token, (target, p, res) => { if (p === 'with') { return wrap(res, token, (_, __, res) => res); } return res; }); } /** @internal */ declare readonly session: NeonHttpSession>; async batch, T extends Readonly<[U, ...U[]]>>( batch: T, ): Promise> { return this.session.batch(batch) as Promise>; } } function construct< TSchema extends Record = Record, TClient extends NeonQueryFunction = NeonQueryFunction, >( client: TClient, config: DrizzleConfig = {}, ): NeonHttpDatabase & { $client: TClient; } { const dialect = new PgDialect({ casing: config.casing }); let logger; if (config.logger === true) { logger = new DefaultLogger(); } else if (config.logger !== false) { logger = config.logger; } let schema: RelationalSchemaConfig | undefined; if (config.schema) { const tablesConfig = extractTablesRelationalConfig( config.schema, createTableRelationsHelpers, ); schema = { fullSchema: config.schema, schema: tablesConfig.tables, tableNamesMap: tablesConfig.tableNamesMap, }; } const driver = new NeonHttpDriver(client, dialect, { logger, cache: config.cache }); const session = driver.createSession(schema); const db = new NeonHttpDatabase( dialect, session, schema as RelationalSchemaConfig> | undefined, ); ( db).$client = client; ( db).$cache = config.cache; if (( db).$cache) { ( db).$cache['invalidate'] = config.cache?.onMutate; } return db as any; } export function drizzle< TSchema extends Record = Record, TClient extends NeonQueryFunction = NeonQueryFunction, >( ...params: [ TClient | string, ] | [ TClient | string, DrizzleConfig, ] | [ ( & DrizzleConfig & ({ connection: string | ({ connectionString: string } & HTTPTransactionOptions); } | { client: TClient; }) ), ] ): NeonHttpDatabase & { $client: TClient; } { if (typeof params[0] === 'string') { const instance = neon(params[0] as string); return construct(instance, params[1]) as any; } if (isConfig(params[0])) { const { connection, client, ...drizzleConfig } = params[0] as & { connection?: | ({ connectionString: string; } & HTTPTransactionOptions) | string; client?: TClient; } & DrizzleConfig; if (client) return construct(client, drizzleConfig); if (typeof connection === 'object') { const { connectionString, ...options } = connection; const instance = neon(connectionString, options); return construct(instance, drizzleConfig) as any; } const instance = neon(connection!); return construct(instance, drizzleConfig) as any; } return construct(params[0] as TClient, params[1] as DrizzleConfig | undefined) as any; } export namespace drizzle { export function mock = Record>( config?: DrizzleConfig, ): NeonHttpDatabase & { $client: '$client is not available on drizzle.mock()'; } { return construct({} as any, config) as any; } } ================================================ FILE: drizzle-orm/src/neon-http/index.ts ================================================ export * from './driver.ts'; export * from './session.ts'; ================================================ FILE: drizzle-orm/src/neon-http/migrator.ts ================================================ import type { MigrationConfig } from '~/migrator.ts'; import { readMigrationFiles } from '~/migrator.ts'; import { type SQL, sql } from '~/sql/sql.ts'; import type { NeonHttpDatabase } from './driver.ts'; /** * This function reads migrationFolder and execute each unapplied migration and mark it as executed in database * * NOTE: The Neon HTTP driver does not support transactions. This means that if any part of a migration fails, * no rollback will be executed. Currently, you will need to handle unsuccessful migration yourself. * @param db - drizzle db instance * @param config - path to migration folder generated by drizzle-kit */ export async function migrate>( db: NeonHttpDatabase, config: MigrationConfig, ) { const migrations = readMigrationFiles(config); const migrationsTable = config.migrationsTable ?? '__drizzle_migrations'; const migrationsSchema = config.migrationsSchema ?? 'drizzle'; const migrationTableCreate = sql` CREATE TABLE IF NOT EXISTS ${sql.identifier(migrationsSchema)}.${sql.identifier(migrationsTable)} ( id SERIAL PRIMARY KEY, hash text NOT NULL, created_at bigint ) `; await db.session.execute(sql`CREATE SCHEMA IF NOT EXISTS ${sql.identifier(migrationsSchema)}`); await db.session.execute(migrationTableCreate); const dbMigrations = await db.session.all<{ id: number; hash: string; created_at: string }>( sql`select id, hash, created_at from ${sql.identifier(migrationsSchema)}.${ sql.identifier(migrationsTable) } order by created_at desc limit 1`, ); const lastDbMigration = dbMigrations[0]; const rowsToInsert: SQL[] = []; for await (const migration of migrations) { if ( !lastDbMigration || Number(lastDbMigration.created_at) < migration.folderMillis ) { for (const stmt of migration.sql) { await db.session.execute(sql.raw(stmt)); } rowsToInsert.push( sql`insert into ${sql.identifier(migrationsSchema)}.${ sql.identifier(migrationsTable) } ("hash", "created_at") values(${migration.hash}, ${migration.folderMillis})`, ); } } for await (const rowToInsert of rowsToInsert) { await db.session.execute(rowToInsert); } } ================================================ FILE: drizzle-orm/src/neon-http/session.ts ================================================ import type { FullQueryResults, NeonQueryFunction, NeonQueryPromise } from '@neondatabase/serverless'; import type { BatchItem } from '~/batch.ts'; import { type Cache, NoopCache } from '~/cache/core/index.ts'; import type { WithCacheConfig } from '~/cache/core/types.ts'; import { entityKind } from '~/entity.ts'; import type { Logger } from '~/logger.ts'; import { NoopLogger } from '~/logger.ts'; import type { PgDialect } from '~/pg-core/dialect.ts'; import { PgTransaction } from '~/pg-core/index.ts'; import type { SelectedFieldsOrdered } from '~/pg-core/query-builders/select.types.ts'; import type { PgQueryResultHKT, PgTransactionConfig, PreparedQueryConfig } from '~/pg-core/session.ts'; import { PgPreparedQuery as PgPreparedQuery, PgSession } from '~/pg-core/session.ts'; import type { RelationalSchemaConfig, TablesRelationalConfig } from '~/relations.ts'; import type { PreparedQuery } from '~/session.ts'; import { fillPlaceholders, type Query, type SQL } from '~/sql/sql.ts'; import { mapResultRow, type NeonAuthToken } from '~/utils.ts'; export type NeonHttpClient = NeonQueryFunction; const rawQueryConfig = { arrayMode: false, fullResults: true, } as const; const queryConfig = { arrayMode: true, fullResults: true, } as const; export class NeonHttpPreparedQuery extends PgPreparedQuery { static override readonly [entityKind]: string = 'NeonHttpPreparedQuery'; private clientQuery: (sql: string, params: any[], opts: Record) => NeonQueryPromise; constructor( private client: NeonHttpClient, query: Query, private logger: Logger, cache: Cache, queryMetadata: { type: 'select' | 'update' | 'delete' | 'insert'; tables: string[]; } | undefined, cacheConfig: WithCacheConfig | undefined, private fields: SelectedFieldsOrdered | undefined, private _isResponseInArrayMode: boolean, private customResultMapper?: (rows: unknown[][]) => T['execute'], ) { super(query, cache, queryMetadata, cacheConfig); // `client.query` is for @neondatabase/serverless v1.0.0 and up, where the // root query function `client` is only usable as a template function; // `client` is a fallback for earlier versions this.clientQuery = (client as any).query ?? client as any; } async execute(placeholderValues: Record | undefined): Promise; /** @internal */ async execute(placeholderValues: Record | undefined, token?: NeonAuthToken): Promise; /** @internal */ async execute( placeholderValues: Record | undefined = {}, token: NeonAuthToken | undefined = this.authToken, ): Promise { const params = fillPlaceholders(this.query.params, placeholderValues); this.logger.logQuery(this.query.sql, params); const { fields, clientQuery, query, customResultMapper } = this; if (!fields && !customResultMapper) { return this.queryWithCache(query.sql, params, async () => { return clientQuery( query.sql, params, token === undefined ? rawQueryConfig : { ...rawQueryConfig, authToken: token, }, ); }); } const result = await this.queryWithCache(query.sql, params, async () => { return await clientQuery( query.sql, params, token === undefined ? queryConfig : { ...queryConfig, authToken: token, }, ); }); return this.mapResult(result); } override mapResult(result: unknown): unknown { if (!this.fields && !this.customResultMapper) { return result; } const rows = (result as FullQueryResults).rows; if (this.customResultMapper) { return this.customResultMapper(rows); } return rows.map((row) => mapResultRow(this.fields!, row, this.joinsNotNullableMap)); } all(placeholderValues: Record | undefined = {}): Promise { const params = fillPlaceholders(this.query.params, placeholderValues); this.logger.logQuery(this.query.sql, params); return this.clientQuery( this.query.sql, params, this.authToken === undefined ? rawQueryConfig : { ...rawQueryConfig, authToken: this.authToken, }, ).then((result) => result.rows); } values(placeholderValues: Record | undefined): Promise; /** @internal */ values(placeholderValues: Record | undefined, token?: NeonAuthToken): Promise; /** @internal */ values(placeholderValues: Record | undefined = {}, token?: NeonAuthToken): Promise { const params = fillPlaceholders(this.query.params, placeholderValues); this.logger.logQuery(this.query.sql, params); return this.clientQuery(this.query.sql, params, { arrayMode: true, fullResults: true, authToken: token }).then(( result, ) => result.rows); } /** @internal */ isResponseInArrayMode() { return this._isResponseInArrayMode; } } export interface NeonHttpSessionOptions { logger?: Logger; cache?: Cache; } export class NeonHttpSession< TFullSchema extends Record, TSchema extends TablesRelationalConfig, > extends PgSession { static override readonly [entityKind]: string = 'NeonHttpSession'; private clientQuery: (sql: string, params: any[], opts: Record) => NeonQueryPromise; private logger: Logger; private cache: Cache; constructor( private client: NeonHttpClient, dialect: PgDialect, private schema: RelationalSchemaConfig | undefined, private options: NeonHttpSessionOptions = {}, ) { super(dialect); // `client.query` is for @neondatabase/serverless v1.0.0 and up, where the // root query function `client` is only usable as a template function; // `client` is a fallback for earlier versions this.clientQuery = (client as any).query ?? client as any; this.logger = options.logger ?? new NoopLogger(); this.cache = options.cache ?? new NoopCache(); } prepareQuery( query: Query, fields: SelectedFieldsOrdered | undefined, name: string | undefined, isResponseInArrayMode: boolean, customResultMapper?: (rows: unknown[][]) => T['execute'], queryMetadata?: { type: 'select' | 'update' | 'delete' | 'insert'; tables: string[]; }, cacheConfig?: WithCacheConfig, ): PgPreparedQuery { return new NeonHttpPreparedQuery( this.client, query, this.logger, this.cache, queryMetadata, cacheConfig, fields, isResponseInArrayMode, customResultMapper, ); } async batch, T extends Readonly<[U, ...U[]]>>( queries: T, ) { const preparedQueries: PreparedQuery[] = []; const builtQueries: NeonQueryPromise[] = []; for (const query of queries) { const preparedQuery = query._prepare(); const builtQuery = preparedQuery.getQuery(); preparedQueries.push(preparedQuery); builtQueries.push( this.clientQuery(builtQuery.sql, builtQuery.params, { fullResults: true, arrayMode: preparedQuery.isResponseInArrayMode(), }), ); } const batchResults = await this.client.transaction(builtQueries, queryConfig); return batchResults.map((result, i) => preparedQueries[i]!.mapResult(result, true)) as any; } // change return type to QueryRows async query(query: string, params: unknown[]): Promise> { this.logger.logQuery(query, params); const result = await this.clientQuery(query, params, { arrayMode: true, fullResults: true }); return result; } // change return type to QueryRows async queryObjects( query: string, params: unknown[], ): Promise> { return this.clientQuery(query, params, { arrayMode: false, fullResults: true }); } override async count(sql: SQL): Promise; /** @internal */ override async count(sql: SQL, token?: NeonAuthToken): Promise; /** @internal */ override async count(sql: SQL, token?: NeonAuthToken): Promise { const res = await this.execute<{ rows: [{ count: string }] }>(sql, token); return Number( res['rows'][0]['count'], ); } override async transaction( _transaction: (tx: NeonTransaction) => Promise, // eslint-disable-next-line @typescript-eslint/no-unused-vars _config: PgTransactionConfig = {}, ): Promise { throw new Error('No transactions support in neon-http driver'); } } export class NeonTransaction< TFullSchema extends Record, TSchema extends TablesRelationalConfig, > extends PgTransaction { static override readonly [entityKind]: string = 'NeonHttpTransaction'; override async transaction(_transaction: (tx: NeonTransaction) => Promise): Promise { throw new Error('No transactions support in neon-http driver'); // const savepointName = `sp${this.nestedIndex + 1}`; // const tx = new NeonTransaction(this.dialect, this.session, this.schema, this.nestedIndex + 1); // await tx.execute(sql.raw(`savepoint ${savepointName}`)); // try { // const result = await transaction(tx); // await tx.execute(sql.raw(`release savepoint ${savepointName}`)); // return result; // } catch (e) { // await tx.execute(sql.raw(`rollback to savepoint ${savepointName}`)); // throw e; // } } } export type NeonHttpQueryResult = Omit, 'rows'> & { rows: T[] }; export interface NeonHttpQueryResultHKT extends PgQueryResultHKT { type: NeonHttpQueryResult; } ================================================ FILE: drizzle-orm/src/neon-serverless/driver.ts ================================================ import { neonConfig, Pool, type PoolConfig } from '@neondatabase/serverless'; import type { Cache } from '~/cache/core/cache.ts'; import { entityKind } from '~/entity.ts'; import type { Logger } from '~/logger.ts'; import { DefaultLogger } from '~/logger.ts'; import { PgDatabase } from '~/pg-core/db.ts'; import { PgDialect } from '~/pg-core/dialect.ts'; import { createTableRelationsHelpers, extractTablesRelationalConfig, type RelationalSchemaConfig, type TablesRelationalConfig, } from '~/relations.ts'; import { type DrizzleConfig, isConfig } from '~/utils.ts'; import type { NeonClient, NeonQueryResultHKT } from './session.ts'; import { NeonSession } from './session.ts'; export interface NeonDriverOptions { logger?: Logger; cache?: Cache; } export class NeonDriver { static readonly [entityKind]: string = 'NeonDriver'; constructor( private client: NeonClient, private dialect: PgDialect, private options: NeonDriverOptions = {}, ) { } createSession( schema: RelationalSchemaConfig | undefined, ): NeonSession, TablesRelationalConfig> { return new NeonSession(this.client, this.dialect, schema, { logger: this.options.logger, cache: this.options.cache, }); } } export class NeonDatabase< TSchema extends Record = Record, > extends PgDatabase { static override readonly [entityKind]: string = 'NeonServerlessDatabase'; } function construct< TSchema extends Record = Record, TClient extends NeonClient = NeonClient, >( client: TClient, config: DrizzleConfig = {}, ): NeonDatabase & { $client: NeonClient extends TClient ? Pool : TClient; } { const dialect = new PgDialect({ casing: config.casing }); let logger; if (config.logger === true) { logger = new DefaultLogger(); } else if (config.logger !== false) { logger = config.logger; } let schema: RelationalSchemaConfig | undefined; if (config.schema) { const tablesConfig = extractTablesRelationalConfig( config.schema, createTableRelationsHelpers, ); schema = { fullSchema: config.schema, schema: tablesConfig.tables, tableNamesMap: tablesConfig.tableNamesMap, }; } const driver = new NeonDriver(client, dialect, { logger, cache: config.cache }); const session = driver.createSession(schema); const db = new NeonDatabase(dialect, session, schema as any) as NeonDatabase; ( db).$client = client; ( db).$cache = config.cache; if (( db).$cache) { ( db).$cache['invalidate'] = config.cache?.onMutate; } return db as any; } export function drizzle< TSchema extends Record = Record, TClient extends NeonClient = Pool, >( ...params: [ TClient | string, ] | [ TClient | string, DrizzleConfig, ] | [ ( & DrizzleConfig & ({ connection: string | PoolConfig; } | { client: TClient; }) & { ws?: any; } ), ] ): NeonDatabase & { $client: NeonClient extends TClient ? Pool : TClient; } { if (typeof params[0] === 'string') { const instance = new Pool({ connectionString: params[0], }); return construct(instance, params[1]) as any; } if (isConfig(params[0])) { const { connection, client, ws, ...drizzleConfig } = params[0] as { connection?: PoolConfig | string; ws?: any; client?: TClient; } & DrizzleConfig; if (ws) { neonConfig.webSocketConstructor = ws; } if (client) return construct(client, drizzleConfig); const instance = typeof connection === 'string' ? new Pool({ connectionString: connection, }) : new Pool(connection); return construct(instance, drizzleConfig) as any; } return construct(params[0] as TClient, params[1] as DrizzleConfig | undefined) as any; } export namespace drizzle { export function mock = Record>( config?: DrizzleConfig, ): NeonDatabase & { $client: '$client is not available on drizzle.mock()'; } { return construct({} as any, config) as any; } } ================================================ FILE: drizzle-orm/src/neon-serverless/index.ts ================================================ export * from './driver.ts'; export * from './session.ts'; ================================================ FILE: drizzle-orm/src/neon-serverless/migrator.ts ================================================ import type { MigrationConfig } from '~/migrator.ts'; import { readMigrationFiles } from '~/migrator.ts'; import type { NeonDatabase } from './driver.ts'; export async function migrate>( db: NeonDatabase, config: MigrationConfig, ) { const migrations = readMigrationFiles(config); await db.dialect.migrate(migrations, db.session, config); } ================================================ FILE: drizzle-orm/src/neon-serverless/session.ts ================================================ import { type Client, Pool, type PoolClient, type QueryArrayConfig, type QueryConfig, type QueryResult, type QueryResultRow, types, } from '@neondatabase/serverless'; import { type Cache, NoopCache } from '~/cache/core/cache.ts'; import type { WithCacheConfig } from '~/cache/core/types.ts'; import { entityKind } from '~/entity.ts'; import type { Logger } from '~/logger.ts'; import { NoopLogger } from '~/logger.ts'; import type { PgDialect } from '~/pg-core/dialect.ts'; import { PgTransaction } from '~/pg-core/index.ts'; import type { SelectedFieldsOrdered } from '~/pg-core/query-builders/select.types.ts'; import type { PgQueryResultHKT, PgTransactionConfig, PreparedQueryConfig } from '~/pg-core/session.ts'; import { PgPreparedQuery, PgSession } from '~/pg-core/session.ts'; import type { RelationalSchemaConfig, TablesRelationalConfig } from '~/relations.ts'; import { fillPlaceholders, type Query, type SQL, sql } from '~/sql/sql.ts'; import { type Assume, mapResultRow } from '~/utils.ts'; export type NeonClient = Pool | PoolClient | Client; export class NeonPreparedQuery extends PgPreparedQuery { static override readonly [entityKind]: string = 'NeonPreparedQuery'; private rawQueryConfig: QueryConfig; private queryConfig: QueryArrayConfig; constructor( private client: NeonClient, queryString: string, private params: unknown[], private logger: Logger, cache: Cache, queryMetadata: { type: 'select' | 'update' | 'delete' | 'insert'; tables: string[]; } | undefined, cacheConfig: WithCacheConfig | undefined, private fields: SelectedFieldsOrdered | undefined, name: string | undefined, private _isResponseInArrayMode: boolean, private customResultMapper?: (rows: unknown[][]) => T['execute'], ) { super({ sql: queryString, params }, cache, queryMetadata, cacheConfig); this.rawQueryConfig = { name, text: queryString, types: { // @ts-ignore getTypeParser: (typeId, format) => { if (typeId === types.builtins.TIMESTAMPTZ) { return (val: any) => val; } if (typeId === types.builtins.TIMESTAMP) { return (val: any) => val; } if (typeId === types.builtins.DATE) { return (val: any) => val; } if (typeId === types.builtins.INTERVAL) { return (val: any) => val; } // numeric[] if (typeId === 1231) { return (val: any) => val; } // timestamp[] if (typeId === 1115) { return (val) => val; } // timestamp with timezone[] if (typeId === 1185) { return (val) => val; } // interval[] if (typeId === 1187) { return (val) => val; } // date[] if (typeId === 1182) { return (val) => val; } // @ts-ignore return types.getTypeParser(typeId, format); }, }, }; this.queryConfig = { name, text: queryString, rowMode: 'array', types: { // @ts-ignore getTypeParser: (typeId, format) => { if (typeId === types.builtins.TIMESTAMPTZ) { return (val: any) => val; } if (typeId === types.builtins.TIMESTAMP) { return (val: any) => val; } if (typeId === types.builtins.DATE) { return (val: any) => val; } if (typeId === types.builtins.INTERVAL) { return (val: any) => val; } // numeric[] if (typeId === 1231) { return (val: any) => val; } // timestamp[] if (typeId === 1115) { return (val) => val; } // timestamp with timezone[] if (typeId === 1185) { return (val) => val; } // interval[] if (typeId === 1187) { return (val) => val; } // date[] if (typeId === 1182) { return (val) => val; } // @ts-ignore return types.getTypeParser(typeId, format); }, }, }; } async execute(placeholderValues: Record | undefined = {}): Promise { const params = fillPlaceholders(this.params, placeholderValues); this.logger.logQuery(this.rawQueryConfig.text, params); const { fields, client, rawQueryConfig: rawQuery, queryConfig: query, joinsNotNullableMap, customResultMapper } = this; if (!fields && !customResultMapper) { return await this.queryWithCache(rawQuery.text, params, async () => { return await client.query(rawQuery, params); }); } const result = await this.queryWithCache(query.text, params, async () => { return await client.query(query, params); }); return customResultMapper ? customResultMapper(result.rows) : result.rows.map((row) => mapResultRow(fields!, row, joinsNotNullableMap)); } all(placeholderValues: Record | undefined = {}): Promise { const params = fillPlaceholders(this.params, placeholderValues); this.logger.logQuery(this.rawQueryConfig.text, params); return this.queryWithCache(this.rawQueryConfig.text, params, async () => { return await this.client.query(this.rawQueryConfig, params); }).then((result) => result.rows); } values(placeholderValues: Record | undefined = {}): Promise { const params = fillPlaceholders(this.params, placeholderValues); this.logger.logQuery(this.rawQueryConfig.text, params); return this.queryWithCache(this.queryConfig.text, params, async () => { return await this.client.query(this.queryConfig, params); }).then((result) => result.rows); } /** @internal */ isResponseInArrayMode(): boolean { return this._isResponseInArrayMode; } } export interface NeonSessionOptions { logger?: Logger; cache?: Cache; } export class NeonSession< TFullSchema extends Record, TSchema extends TablesRelationalConfig, > extends PgSession { static override readonly [entityKind]: string = 'NeonSession'; private logger: Logger; private cache: Cache; constructor( private client: NeonClient, dialect: PgDialect, private schema: RelationalSchemaConfig | undefined, private options: NeonSessionOptions = {}, ) { super(dialect); this.logger = options.logger ?? new NoopLogger(); this.cache = options.cache ?? new NoopCache(); } prepareQuery( query: Query, fields: SelectedFieldsOrdered | undefined, name: string | undefined, isResponseInArrayMode: boolean, customResultMapper?: (rows: unknown[][]) => T['execute'], queryMetadata?: { type: 'select' | 'update' | 'delete' | 'insert'; tables: string[]; }, cacheConfig?: WithCacheConfig, ): PgPreparedQuery { return new NeonPreparedQuery( this.client, query.sql, query.params, this.logger, this.cache, queryMetadata, cacheConfig, fields, name, isResponseInArrayMode, customResultMapper, ); } async query(query: string, params: unknown[]): Promise { this.logger.logQuery(query, params); const result = await this.client.query({ rowMode: 'array', text: query, values: params, }); return result; } async queryObjects( query: string, params: unknown[], ): Promise> { return this.client.query(query, params); } override async count(sql: SQL): Promise { const res = await this.execute<{ rows: [{ count: string }] }>(sql); return Number( res['rows'][0]['count'], ); } override async transaction( transaction: (tx: NeonTransaction) => Promise, config: PgTransactionConfig = {}, ): Promise { const session = this.client instanceof Pool // eslint-disable-line no-instanceof/no-instanceof ? new NeonSession(await this.client.connect(), this.dialect, this.schema, this.options) : this; const tx = new NeonTransaction(this.dialect, session, this.schema); await tx.execute(sql`begin ${tx.getTransactionConfigSQL(config)}`); try { const result = await transaction(tx); await tx.execute(sql`commit`); return result; } catch (error) { await tx.execute(sql`rollback`); throw error; } finally { if (this.client instanceof Pool) { // eslint-disable-line no-instanceof/no-instanceof (session.client as PoolClient).release(); } } } } export class NeonTransaction< TFullSchema extends Record, TSchema extends TablesRelationalConfig, > extends PgTransaction { static override readonly [entityKind]: string = 'NeonTransaction'; override async transaction(transaction: (tx: NeonTransaction) => Promise): Promise { const savepointName = `sp${this.nestedIndex + 1}`; const tx = new NeonTransaction(this.dialect, this.session, this.schema, this.nestedIndex + 1); await tx.execute(sql.raw(`savepoint ${savepointName}`)); try { const result = await transaction(tx); await tx.execute(sql.raw(`release savepoint ${savepointName}`)); return result; } catch (e) { await tx.execute(sql.raw(`rollback to savepoint ${savepointName}`)); throw e; } } } export interface NeonQueryResultHKT extends PgQueryResultHKT { type: QueryResult>; } ================================================ FILE: drizzle-orm/src/node-postgres/driver.ts ================================================ import pg, { type Pool, type PoolConfig } from 'pg'; import type { Cache } from '~/cache/core/cache.ts'; import { entityKind } from '~/entity.ts'; import type { Logger } from '~/logger.ts'; import { DefaultLogger } from '~/logger.ts'; import { PgDatabase } from '~/pg-core/db.ts'; import { PgDialect } from '~/pg-core/dialect.ts'; import { createTableRelationsHelpers, extractTablesRelationalConfig, type RelationalSchemaConfig, type TablesRelationalConfig, } from '~/relations.ts'; import { type DrizzleConfig, isConfig } from '~/utils.ts'; import type { NodePgClient, NodePgQueryResultHKT } from './session.ts'; import { NodePgSession } from './session.ts'; export interface PgDriverOptions { logger?: Logger; cache?: Cache; } export class NodePgDriver { static readonly [entityKind]: string = 'NodePgDriver'; constructor( private client: NodePgClient, private dialect: PgDialect, private options: PgDriverOptions = {}, ) { } createSession( schema: RelationalSchemaConfig | undefined, ): NodePgSession, TablesRelationalConfig> { return new NodePgSession(this.client, this.dialect, schema, { logger: this.options.logger, cache: this.options.cache, }); } } export class NodePgDatabase< TSchema extends Record = Record, > extends PgDatabase { static override readonly [entityKind]: string = 'NodePgDatabase'; } function construct< TSchema extends Record = Record, TClient extends NodePgClient = NodePgClient, >( client: TClient, config: DrizzleConfig = {}, ): NodePgDatabase & { $client: NodePgClient extends TClient ? Pool : TClient; } { const dialect = new PgDialect({ casing: config.casing }); let logger; if (config.logger === true) { logger = new DefaultLogger(); } else if (config.logger !== false) { logger = config.logger; } let schema: RelationalSchemaConfig | undefined; if (config.schema) { const tablesConfig = extractTablesRelationalConfig( config.schema, createTableRelationsHelpers, ); schema = { fullSchema: config.schema, schema: tablesConfig.tables, tableNamesMap: tablesConfig.tableNamesMap, }; } const driver = new NodePgDriver(client, dialect, { logger, cache: config.cache }); const session = driver.createSession(schema); const db = new NodePgDatabase(dialect, session, schema as any) as NodePgDatabase; ( db).$client = client; ( db).$cache = config.cache; if (( db).$cache) { ( db).$cache['invalidate'] = config.cache?.onMutate; } return db as any; } export function drizzle< TSchema extends Record = Record, TClient extends NodePgClient = Pool, >( ...params: | [ TClient | string, ] | [ TClient | string, DrizzleConfig, ] | [ & DrizzleConfig & ({ client: TClient; } | { connection: string | PoolConfig; }), ] ): NodePgDatabase & { $client: NodePgClient extends TClient ? Pool : TClient; } { if (typeof params[0] === 'string') { const instance = new pg.Pool({ connectionString: params[0], }); return construct(instance, params[1] as DrizzleConfig | undefined) as any; } if (isConfig(params[0])) { const { connection, client, ...drizzleConfig } = params[0] as ( & ({ connection?: PoolConfig | string; client?: TClient }) & DrizzleConfig ); if (client) return construct(client, drizzleConfig); const instance = typeof connection === 'string' ? new pg.Pool({ connectionString: connection, }) : new pg.Pool(connection!); return construct(instance, drizzleConfig) as any; } return construct(params[0] as TClient, params[1] as DrizzleConfig | undefined) as any; } export namespace drizzle { export function mock = Record>( config?: DrizzleConfig, ): NodePgDatabase & { $client: '$client is not available on drizzle.mock()'; } { return construct({} as any, config) as any; } } ================================================ FILE: drizzle-orm/src/node-postgres/index.ts ================================================ export * from './driver.ts'; export * from './session.ts'; ================================================ FILE: drizzle-orm/src/node-postgres/migrator.ts ================================================ import type { MigrationConfig } from '~/migrator.ts'; import { readMigrationFiles } from '~/migrator.ts'; import type { NodePgDatabase } from './driver.ts'; export async function migrate>( db: NodePgDatabase, config: MigrationConfig, ) { const migrations = readMigrationFiles(config); await db.dialect.migrate(migrations, db.session, config); } ================================================ FILE: drizzle-orm/src/node-postgres/session.ts ================================================ import type { Client, PoolClient, QueryArrayConfig, QueryConfig, QueryResult, QueryResultRow } from 'pg'; import pg from 'pg'; import { type Cache, NoopCache } from '~/cache/core/index.ts'; import type { WithCacheConfig } from '~/cache/core/types.ts'; import { entityKind } from '~/entity.ts'; import { type Logger, NoopLogger } from '~/logger.ts'; import type { PgDialect } from '~/pg-core/dialect.ts'; import { PgTransaction } from '~/pg-core/index.ts'; import type { SelectedFieldsOrdered } from '~/pg-core/query-builders/select.types.ts'; import type { PgQueryResultHKT, PgTransactionConfig, PreparedQueryConfig } from '~/pg-core/session.ts'; import { PgPreparedQuery, PgSession } from '~/pg-core/session.ts'; import type { RelationalSchemaConfig, TablesRelationalConfig } from '~/relations.ts'; import { fillPlaceholders, type Query, type SQL, sql } from '~/sql/sql.ts'; import { tracer } from '~/tracing.ts'; import { type Assume, mapResultRow } from '~/utils.ts'; const { Pool, types } = pg; export type NodePgClient = pg.Pool | PoolClient | Client; export class NodePgPreparedQuery extends PgPreparedQuery { static override readonly [entityKind]: string = 'NodePgPreparedQuery'; private rawQueryConfig: QueryConfig; private queryConfig: QueryArrayConfig; constructor( private client: NodePgClient, private queryString: string, private params: unknown[], private logger: Logger, cache: Cache, queryMetadata: { type: 'select' | 'update' | 'delete' | 'insert'; tables: string[]; } | undefined, cacheConfig: WithCacheConfig | undefined, private fields: SelectedFieldsOrdered | undefined, name: string | undefined, private _isResponseInArrayMode: boolean, private customResultMapper?: (rows: unknown[][]) => T['execute'], ) { super({ sql: queryString, params }, cache, queryMetadata, cacheConfig); this.rawQueryConfig = { name, text: queryString, types: { // @ts-ignore getTypeParser: (typeId, format) => { if (typeId === types.builtins.TIMESTAMPTZ) { return (val) => val; } if (typeId === types.builtins.TIMESTAMP) { return (val) => val; } if (typeId === types.builtins.DATE) { return (val) => val; } if (typeId === types.builtins.INTERVAL) { return (val) => val; } // numeric[] if (typeId === 1231) { return (val) => val; } // timestamp[] if (typeId === 1115) { return (val) => val; } // timestamp with timezone[] if (typeId === 1185) { return (val) => val; } // interval[] if (typeId === 1187) { return (val) => val; } // date[] if (typeId === 1182) { return (val) => val; } // @ts-ignore return types.getTypeParser(typeId, format); }, }, }; this.queryConfig = { name, text: queryString, rowMode: 'array', types: { // @ts-ignore getTypeParser: (typeId, format) => { if (typeId === types.builtins.TIMESTAMPTZ) { return (val) => val; } if (typeId === types.builtins.TIMESTAMP) { return (val) => val; } if (typeId === types.builtins.DATE) { return (val) => val; } if (typeId === types.builtins.INTERVAL) { return (val) => val; } // numeric[] if (typeId === 1231) { return (val) => val; } // timestamp[] if (typeId === 1115) { return (val) => val; } // timestamp with timezone[] if (typeId === 1185) { return (val) => val; } // interval[] if (typeId === 1187) { return (val) => val; } // date[] if (typeId === 1182) { return (val) => val; } // @ts-ignore return types.getTypeParser(typeId, format); }, }, }; } async execute(placeholderValues: Record | undefined = {}): Promise { return tracer.startActiveSpan('drizzle.execute', async () => { const params = fillPlaceholders(this.params, placeholderValues); this.logger.logQuery(this.rawQueryConfig.text, params); const { fields, rawQueryConfig: rawQuery, client, queryConfig: query, joinsNotNullableMap, customResultMapper } = this; if (!fields && !customResultMapper) { return tracer.startActiveSpan('drizzle.driver.execute', async (span) => { span?.setAttributes({ 'drizzle.query.name': rawQuery.name, 'drizzle.query.text': rawQuery.text, 'drizzle.query.params': JSON.stringify(params), }); return this.queryWithCache(rawQuery.text, params, async () => { return await client.query(rawQuery, params); }); }); } const result = await tracer.startActiveSpan('drizzle.driver.execute', (span) => { span?.setAttributes({ 'drizzle.query.name': query.name, 'drizzle.query.text': query.text, 'drizzle.query.params': JSON.stringify(params), }); return this.queryWithCache(query.text, params, async () => { return await client.query(query, params); }); }); return tracer.startActiveSpan('drizzle.mapResponse', () => { return customResultMapper ? customResultMapper(result.rows) : result.rows.map((row) => mapResultRow(fields!, row, joinsNotNullableMap)); }); }); } all(placeholderValues: Record | undefined = {}): Promise { return tracer.startActiveSpan('drizzle.execute', () => { const params = fillPlaceholders(this.params, placeholderValues); this.logger.logQuery(this.rawQueryConfig.text, params); return tracer.startActiveSpan('drizzle.driver.execute', (span) => { span?.setAttributes({ 'drizzle.query.name': this.rawQueryConfig.name, 'drizzle.query.text': this.rawQueryConfig.text, 'drizzle.query.params': JSON.stringify(params), }); return this.queryWithCache(this.rawQueryConfig.text, params, async () => { return this.client.query(this.rawQueryConfig, params); }).then((result) => result.rows); }); }); } /** @internal */ isResponseInArrayMode(): boolean { return this._isResponseInArrayMode; } } export interface NodePgSessionOptions { logger?: Logger; cache?: Cache; } export class NodePgSession< TFullSchema extends Record, TSchema extends TablesRelationalConfig, > extends PgSession { static override readonly [entityKind]: string = 'NodePgSession'; private logger: Logger; private cache: Cache; constructor( private client: NodePgClient, dialect: PgDialect, private schema: RelationalSchemaConfig | undefined, private options: NodePgSessionOptions = {}, ) { super(dialect); this.logger = options.logger ?? new NoopLogger(); this.cache = options.cache ?? new NoopCache(); } prepareQuery( query: Query, fields: SelectedFieldsOrdered | undefined, name: string | undefined, isResponseInArrayMode: boolean, customResultMapper?: (rows: unknown[][]) => T['execute'], queryMetadata?: { type: 'select' | 'update' | 'delete' | 'insert'; tables: string[]; }, cacheConfig?: WithCacheConfig, ): PgPreparedQuery { return new NodePgPreparedQuery( this.client, query.sql, query.params, this.logger, this.cache, queryMetadata, cacheConfig, fields, name, isResponseInArrayMode, customResultMapper, ); } override async transaction( transaction: (tx: NodePgTransaction) => Promise, config?: PgTransactionConfig | undefined, ): Promise { const isPool = this.client instanceof Pool || Object.getPrototypeOf(this.client).constructor.name.includes('Pool'); // eslint-disable-line no-instanceof/no-instanceof const session = isPool ? new NodePgSession(await ( this.client).connect(), this.dialect, this.schema, this.options) : this; const tx = new NodePgTransaction(this.dialect, session, this.schema); await tx.execute(sql`begin${config ? sql` ${tx.getTransactionConfigSQL(config)}` : undefined}`); try { const result = await transaction(tx); await tx.execute(sql`commit`); return result; } catch (error) { await tx.execute(sql`rollback`); throw error; } finally { if (isPool) (session.client as PoolClient).release(); } } override async count(sql: SQL): Promise { const res = await this.execute<{ rows: [{ count: string }] }>(sql); return Number( res['rows'][0]['count'], ); } } export class NodePgTransaction< TFullSchema extends Record, TSchema extends TablesRelationalConfig, > extends PgTransaction { static override readonly [entityKind]: string = 'NodePgTransaction'; override async transaction(transaction: (tx: NodePgTransaction) => Promise): Promise { const savepointName = `sp${this.nestedIndex + 1}`; const tx = new NodePgTransaction( this.dialect, this.session, this.schema, this.nestedIndex + 1, ); await tx.execute(sql.raw(`savepoint ${savepointName}`)); try { const result = await transaction(tx); await tx.execute(sql.raw(`release savepoint ${savepointName}`)); return result; } catch (err) { await tx.execute(sql.raw(`rollback to savepoint ${savepointName}`)); throw err; } } } export interface NodePgQueryResultHKT extends PgQueryResultHKT { type: QueryResult>; } ================================================ FILE: drizzle-orm/src/op-sqlite/driver.ts ================================================ import type { OPSQLiteConnection, QueryResult } from '@op-engineering/op-sqlite'; import { entityKind } from '~/entity.ts'; import { DefaultLogger } from '~/logger.ts'; import { createTableRelationsHelpers, extractTablesRelationalConfig, type RelationalSchemaConfig, type TablesRelationalConfig, } from '~/relations.ts'; import { BaseSQLiteDatabase } from '~/sqlite-core/db.ts'; import { SQLiteAsyncDialect } from '~/sqlite-core/dialect.ts'; import type { DrizzleConfig } from '~/utils.ts'; import { OPSQLiteSession } from './session.ts'; export class OPSQLiteDatabase< TSchema extends Record = Record, > extends BaseSQLiteDatabase<'async', QueryResult, TSchema> { static override readonly [entityKind]: string = 'OPSQLiteDatabase'; } export function drizzle = Record>( client: OPSQLiteConnection, config: DrizzleConfig = {}, ): OPSQLiteDatabase & { $client: OPSQLiteConnection; } { const dialect = new SQLiteAsyncDialect({ casing: config.casing }); let logger; if (config.logger === true) { logger = new DefaultLogger(); } else if (config.logger !== false) { logger = config.logger; } let schema: RelationalSchemaConfig | undefined; if (config.schema) { const tablesConfig = extractTablesRelationalConfig( config.schema, createTableRelationsHelpers, ); schema = { fullSchema: config.schema, schema: tablesConfig.tables, tableNamesMap: tablesConfig.tableNamesMap, }; } const session = new OPSQLiteSession(client, dialect, schema, { logger, cache: config.cache }); const db = new OPSQLiteDatabase('async', dialect, session, schema) as OPSQLiteDatabase; ( db).$client = client; ( db).$cache = config.cache; if (( db).$cache) { ( db).$cache['invalidate'] = config.cache?.onMutate; } return db as any; } ================================================ FILE: drizzle-orm/src/op-sqlite/index.ts ================================================ export * from './driver.ts'; export * from './session.ts'; ================================================ FILE: drizzle-orm/src/op-sqlite/migrator.ts ================================================ import { useEffect, useReducer } from 'react'; import type { MigrationMeta } from '~/migrator.ts'; import type { OPSQLiteDatabase } from './driver.ts'; interface MigrationConfig { journal: { entries: { idx: number; when: number; tag: string; breakpoints: boolean }[]; }; migrations: Record; } async function readMigrationFiles({ journal, migrations }: MigrationConfig): Promise { const migrationQueries: MigrationMeta[] = []; for await (const journalEntry of journal.entries) { const query = migrations[`m${journalEntry.idx.toString().padStart(4, '0')}`]; if (!query) { throw new Error(`Missing migration: ${journalEntry.tag}`); } try { const result = query.split('--> statement-breakpoint').map((it) => { return it; }); migrationQueries.push({ sql: result, bps: journalEntry.breakpoints, folderMillis: journalEntry.when, hash: '', }); } catch { throw new Error(`Failed to parse migration: ${journalEntry.tag}`); } } return migrationQueries; } export async function migrate>( db: OPSQLiteDatabase, config: MigrationConfig, ) { const migrations = await readMigrationFiles(config); return db.dialect.migrate(migrations, db.session); } interface State { success: boolean; error?: Error; } type Action = | { type: 'migrating' } | { type: 'migrated'; payload: true } | { type: 'error'; payload: Error }; export const useMigrations = (db: OPSQLiteDatabase, migrations: { journal: { entries: { idx: number; when: number; tag: string; breakpoints: boolean }[]; }; migrations: Record; }): State => { const initialState: State = { success: false, error: undefined, }; const fetchReducer = (state: State, action: Action): State => { switch (action.type) { case 'migrating': { return { ...initialState }; } case 'migrated': { return { ...initialState, success: action.payload }; } case 'error': { return { ...initialState, error: action.payload }; } default: { return state; } } }; const [state, dispatch] = useReducer(fetchReducer, initialState); useEffect(() => { dispatch({ type: 'migrating' }); migrate(db, migrations).then(() => { dispatch({ type: 'migrated', payload: true }); }).catch((error) => { dispatch({ type: 'error', payload: error as Error }); }); }, []); return state; }; ================================================ FILE: drizzle-orm/src/op-sqlite/session.ts ================================================ import type { OPSQLiteConnection, QueryResult } from '@op-engineering/op-sqlite'; import { type Cache, NoopCache } from '~/cache/core/index.ts'; import type { WithCacheConfig } from '~/cache/core/types.ts'; import { entityKind } from '~/entity.ts'; import type { Logger } from '~/logger.ts'; import { NoopLogger } from '~/logger.ts'; import type { RelationalSchemaConfig, TablesRelationalConfig } from '~/relations.ts'; import { fillPlaceholders, type Query, sql } from '~/sql/sql.ts'; import type { SQLiteAsyncDialect } from '~/sqlite-core/dialect.ts'; import { SQLiteTransaction } from '~/sqlite-core/index.ts'; import type { SelectedFieldsOrdered } from '~/sqlite-core/query-builders/select.types.ts'; import { type PreparedQueryConfig as PreparedQueryConfigBase, type SQLiteExecuteMethod, SQLitePreparedQuery, SQLiteSession, type SQLiteTransactionConfig, } from '~/sqlite-core/session.ts'; import { mapResultRow } from '~/utils.ts'; export interface OPSQLiteSessionOptions { logger?: Logger; cache?: Cache; } type PreparedQueryConfig = Omit; export class OPSQLiteSession< TFullSchema extends Record, TSchema extends TablesRelationalConfig, > extends SQLiteSession<'async', QueryResult, TFullSchema, TSchema> { static override readonly [entityKind]: string = 'OPSQLiteSession'; private logger: Logger; private cache: Cache; constructor( private client: OPSQLiteConnection, dialect: SQLiteAsyncDialect, private schema: RelationalSchemaConfig | undefined, options: OPSQLiteSessionOptions = {}, ) { super(dialect); this.logger = options.logger ?? new NoopLogger(); this.cache = options.cache ?? new NoopCache(); } prepareQuery>( query: Query, fields: SelectedFieldsOrdered | undefined, executeMethod: SQLiteExecuteMethod, isResponseInArrayMode: boolean, customResultMapper?: (rows: unknown[][]) => unknown, queryMetadata?: { type: 'select' | 'update' | 'delete' | 'insert'; tables: string[]; }, cacheConfig?: WithCacheConfig, ): OPSQLitePreparedQuery { return new OPSQLitePreparedQuery( this.client, query, this.logger, this.cache, queryMetadata, cacheConfig, fields, executeMethod, isResponseInArrayMode, customResultMapper, ); } override transaction( transaction: (tx: OPSQLiteTransaction) => T, config: SQLiteTransactionConfig = {}, ): T { const tx = new OPSQLiteTransaction('async', this.dialect, this, this.schema); this.run(sql.raw(`begin${config?.behavior ? ' ' + config.behavior : ''}`)); try { const result = transaction(tx); this.run(sql`commit`); return result; } catch (err) { this.run(sql`rollback`); throw err; } } } export class OPSQLiteTransaction< TFullSchema extends Record, TSchema extends TablesRelationalConfig, > extends SQLiteTransaction<'async', QueryResult, TFullSchema, TSchema> { static override readonly [entityKind]: string = 'OPSQLiteTransaction'; override transaction(transaction: (tx: OPSQLiteTransaction) => T): T { const savepointName = `sp${this.nestedIndex}`; const tx = new OPSQLiteTransaction('async', this.dialect, this.session, this.schema, this.nestedIndex + 1); this.session.run(sql.raw(`savepoint ${savepointName}`)); try { const result = transaction(tx); this.session.run(sql.raw(`release savepoint ${savepointName}`)); return result; } catch (err) { this.session.run(sql.raw(`rollback to savepoint ${savepointName}`)); throw err; } } } export class OPSQLitePreparedQuery extends SQLitePreparedQuery< { type: 'async'; run: QueryResult; all: T['all']; get: T['get']; values: T['values']; execute: T['execute'] } > { static override readonly [entityKind]: string = 'OPSQLitePreparedQuery'; constructor( private client: OPSQLiteConnection, query: Query, private logger: Logger, cache: Cache, queryMetadata: { type: 'select' | 'update' | 'delete' | 'insert'; tables: string[]; } | undefined, cacheConfig: WithCacheConfig | undefined, private fields: SelectedFieldsOrdered | undefined, executeMethod: SQLiteExecuteMethod, private _isResponseInArrayMode: boolean, private customResultMapper?: (rows: unknown[][]) => unknown, ) { super('sync', executeMethod, query, cache, queryMetadata, cacheConfig); } async run(placeholderValues?: Record): Promise { const params = fillPlaceholders(this.query.params, placeholderValues ?? {}); this.logger.logQuery(this.query.sql, params); return await this.queryWithCache(this.query.sql, params, async () => { return this.client.executeAsync(this.query.sql, params); }); } async all(placeholderValues?: Record): Promise { const { fields, joinsNotNullableMap, query, logger, customResultMapper, client } = this; if (!fields && !customResultMapper) { const params = fillPlaceholders(query.params, placeholderValues ?? {}); logger.logQuery(query.sql, params); return await this.queryWithCache(query.sql, params, async () => { return client.execute(query.sql, params).rows?._array || []; }); } const rows = await this.values(placeholderValues) as unknown[][]; if (customResultMapper) { return customResultMapper(rows) as T['all']; } return rows.map((row) => mapResultRow(fields!, row, joinsNotNullableMap)); } async get(placeholderValues?: Record): Promise { const { fields, joinsNotNullableMap, customResultMapper, query, logger, client } = this; const params = fillPlaceholders(query.params, placeholderValues ?? {}); logger.logQuery(query.sql, params); if (!fields && !customResultMapper) { const rows = await this.queryWithCache(query.sql, params, async () => { return client.execute(query.sql, params).rows?._array || []; }); return rows[0]; } const rows = await this.values(placeholderValues) as unknown[][]; const row = rows[0]; if (!row) { return undefined; } if (customResultMapper) { return customResultMapper(rows) as T['get']; } return mapResultRow(fields!, row, joinsNotNullableMap); } async values(placeholderValues?: Record): Promise { const params = fillPlaceholders(this.query.params, placeholderValues ?? {}); this.logger.logQuery(this.query.sql, params); return await this.queryWithCache(this.query.sql, params, async () => { return await this.client.executeRawAsync(this.query.sql, params); }); } /** @internal */ isResponseInArrayMode(): boolean { return this._isResponseInArrayMode; } } ================================================ FILE: drizzle-orm/src/operations.ts ================================================ import type { AnyColumn, Column } from './column.ts'; import type { SQL } from './sql/sql.ts'; import type { Subquery } from './subquery.ts'; import type { Table } from './table.ts'; export type RequiredKeyOnly = T extends AnyColumn<{ notNull: true; hasDefault: false; }> ? TKey : never; export type OptionalKeyOnly = TKey extends RequiredKeyOnly ? never : T extends { _: { generated: undefined; }; } ? (T extends { _: { identity: undefined; }; } ? TKey : T['_']['identity'] extends 'always' ? OverrideT extends true ? TKey : never : TKey) : never; // TODO: SQL -> SQLWrapper export type SelectedFieldsFlat = Record< string, TColumn | SQL | SQL.Aliased | Subquery >; export type SelectedFieldsFlatFull = Record< string, TColumn | SQL | SQL.Aliased >; export type SelectedFields = Record< string, SelectedFieldsFlat[string] | TTable | SelectedFieldsFlat >; export type SelectedFieldsOrdered = { path: string[]; field: TColumn | SQL | SQL.Aliased | Subquery; }[]; ================================================ FILE: drizzle-orm/src/pg-core/alias.ts ================================================ import { TableAliasProxyHandler } from '~/alias.ts'; import type { BuildAliasTable } from './query-builders/select.types.ts'; import type { PgTable } from './table.ts'; import type { PgViewBase } from './view-base.ts'; export function alias( table: TTable, alias: TAlias, ): BuildAliasTable { return new Proxy(table, new TableAliasProxyHandler(alias, false)) as any; } ================================================ FILE: drizzle-orm/src/pg-core/checks.ts ================================================ import { entityKind } from '~/entity.ts'; import type { SQL } from '~/sql/index.ts'; import type { PgTable } from './table.ts'; export class CheckBuilder { static readonly [entityKind]: string = 'PgCheckBuilder'; protected brand!: 'PgConstraintBuilder'; constructor(public name: string, public value: SQL) {} /** @internal */ build(table: PgTable): Check { return new Check(table, this); } } export class Check { static readonly [entityKind]: string = 'PgCheck'; readonly name: string; readonly value: SQL; constructor(public table: PgTable, builder: CheckBuilder) { this.name = builder.name; this.value = builder.value; } } export function check(name: string, value: SQL): CheckBuilder { return new CheckBuilder(name, value); } ================================================ FILE: drizzle-orm/src/pg-core/columns/all.ts ================================================ import { bigint } from './bigint.ts'; import { bigserial } from './bigserial.ts'; import { boolean } from './boolean.ts'; import { char } from './char.ts'; import { cidr } from './cidr.ts'; import { customType } from './custom.ts'; import { date } from './date.ts'; import { doublePrecision } from './double-precision.ts'; import { inet } from './inet.ts'; import { integer } from './integer.ts'; import { interval } from './interval.ts'; import { json } from './json.ts'; import { jsonb } from './jsonb.ts'; import { line } from './line.ts'; import { macaddr } from './macaddr.ts'; import { macaddr8 } from './macaddr8.ts'; import { numeric } from './numeric.ts'; import { point } from './point.ts'; import { geometry } from './postgis_extension/geometry.ts'; import { real } from './real.ts'; import { serial } from './serial.ts'; import { smallint } from './smallint.ts'; import { smallserial } from './smallserial.ts'; import { text } from './text.ts'; import { time } from './time.ts'; import { timestamp } from './timestamp.ts'; import { uuid } from './uuid.ts'; import { varchar } from './varchar.ts'; import { bit } from './vector_extension/bit.ts'; import { halfvec } from './vector_extension/halfvec.ts'; import { sparsevec } from './vector_extension/sparsevec.ts'; import { vector } from './vector_extension/vector.ts'; export function getPgColumnBuilders() { return { bigint, bigserial, boolean, char, cidr, customType, date, doublePrecision, inet, integer, interval, json, jsonb, line, macaddr, macaddr8, numeric, point, geometry, real, serial, smallint, smallserial, text, time, timestamp, uuid, varchar, bit, halfvec, sparsevec, vector, }; } export type PgColumnsBuilders = ReturnType; ================================================ FILE: drizzle-orm/src/pg-core/columns/bigint.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnyPgTable } from '~/pg-core/table.ts'; import { getColumnNameAndConfig } from '~/utils.ts'; import { PgColumn } from './common.ts'; import { PgIntColumnBaseBuilder } from './int.common.ts'; export type PgBigInt53BuilderInitial = PgBigInt53Builder<{ name: TName; dataType: 'number'; columnType: 'PgBigInt53'; data: number; driverParam: number | string; enumValues: undefined; }>; export class PgBigInt53Builder> extends PgIntColumnBaseBuilder { static override readonly [entityKind]: string = 'PgBigInt53Builder'; constructor(name: T['name']) { super(name, 'number', 'PgBigInt53'); } /** @internal */ override build( table: AnyPgTable<{ name: TTableName }>, ): PgBigInt53> { return new PgBigInt53>(table, this.config as ColumnBuilderRuntimeConfig); } } export class PgBigInt53> extends PgColumn { static override readonly [entityKind]: string = 'PgBigInt53'; getSQLType(): string { return 'bigint'; } override mapFromDriverValue(value: number | string): number { if (typeof value === 'number') { return value; } return Number(value); } } export type PgBigInt64BuilderInitial = PgBigInt64Builder<{ name: TName; dataType: 'bigint'; columnType: 'PgBigInt64'; data: bigint; driverParam: string; enumValues: undefined; }>; export class PgBigInt64Builder> extends PgIntColumnBaseBuilder { static override readonly [entityKind]: string = 'PgBigInt64Builder'; constructor(name: T['name']) { super(name, 'bigint', 'PgBigInt64'); } /** @internal */ override build( table: AnyPgTable<{ name: TTableName }>, ): PgBigInt64> { return new PgBigInt64>( table, this.config as ColumnBuilderRuntimeConfig, ); } } export class PgBigInt64> extends PgColumn { static override readonly [entityKind]: string = 'PgBigInt64'; getSQLType(): string { return 'bigint'; } // eslint-disable-next-line unicorn/prefer-native-coercion-functions override mapFromDriverValue(value: string): bigint { return BigInt(value); } } export interface PgBigIntConfig { mode: T; } export function bigint( config: PgBigIntConfig, ): TMode extends 'number' ? PgBigInt53BuilderInitial<''> : PgBigInt64BuilderInitial<''>; export function bigint( name: TName, config: PgBigIntConfig, ): TMode extends 'number' ? PgBigInt53BuilderInitial : PgBigInt64BuilderInitial; export function bigint(a: string | PgBigIntConfig, b?: PgBigIntConfig) { const { name, config } = getColumnNameAndConfig(a, b); if (config.mode === 'number') { return new PgBigInt53Builder(name); } return new PgBigInt64Builder(name); } ================================================ FILE: drizzle-orm/src/pg-core/columns/bigserial.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, HasDefault, MakeColumnConfig, NotNull, } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import { getColumnNameAndConfig } from '~/utils.ts'; import type { AnyPgTable } from '../table.ts'; import { PgColumn, PgColumnBuilder } from './common.ts'; export type PgBigSerial53BuilderInitial = NotNull< HasDefault< PgBigSerial53Builder<{ name: TName; dataType: 'number'; columnType: 'PgBigSerial53'; data: number; driverParam: number; enumValues: undefined; }> > >; export class PgBigSerial53Builder> extends PgColumnBuilder { static override readonly [entityKind]: string = 'PgBigSerial53Builder'; constructor(name: string) { super(name, 'number', 'PgBigSerial53'); this.config.hasDefault = true; this.config.notNull = true; } /** @internal */ override build( table: AnyPgTable<{ name: TTableName }>, ): PgBigSerial53> { return new PgBigSerial53>( table, this.config as ColumnBuilderRuntimeConfig, ); } } export class PgBigSerial53> extends PgColumn { static override readonly [entityKind]: string = 'PgBigSerial53'; getSQLType(): string { return 'bigserial'; } override mapFromDriverValue(value: number): number { if (typeof value === 'number') { return value; } return Number(value); } } export type PgBigSerial64BuilderInitial = NotNull< HasDefault< PgBigSerial64Builder<{ name: TName; dataType: 'bigint'; columnType: 'PgBigSerial64'; data: bigint; driverParam: string; enumValues: undefined; }> > >; export class PgBigSerial64Builder> extends PgColumnBuilder { static override readonly [entityKind]: string = 'PgBigSerial64Builder'; constructor(name: string) { super(name, 'bigint', 'PgBigSerial64'); this.config.hasDefault = true; } /** @internal */ override build( table: AnyPgTable<{ name: TTableName }>, ): PgBigSerial64> { return new PgBigSerial64>( table, this.config as ColumnBuilderRuntimeConfig, ); } } export class PgBigSerial64> extends PgColumn { static override readonly [entityKind]: string = 'PgBigSerial64'; getSQLType(): string { return 'bigserial'; } // eslint-disable-next-line unicorn/prefer-native-coercion-functions override mapFromDriverValue(value: string): bigint { return BigInt(value); } } export interface PgBigSerialConfig { mode: T; } export function bigserial( config: PgBigSerialConfig, ): TMode extends 'number' ? PgBigSerial53BuilderInitial<''> : PgBigSerial64BuilderInitial<''>; export function bigserial( name: TName, config: PgBigSerialConfig, ): TMode extends 'number' ? PgBigSerial53BuilderInitial : PgBigSerial64BuilderInitial; export function bigserial(a: string | PgBigSerialConfig, b?: PgBigSerialConfig) { const { name, config } = getColumnNameAndConfig(a, b); if (config.mode === 'number') { return new PgBigSerial53Builder(name); } return new PgBigSerial64Builder(name); } ================================================ FILE: drizzle-orm/src/pg-core/columns/boolean.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnyPgTable } from '~/pg-core/table.ts'; import { PgColumn, PgColumnBuilder } from './common.ts'; export type PgBooleanBuilderInitial = PgBooleanBuilder<{ name: TName; dataType: 'boolean'; columnType: 'PgBoolean'; data: boolean; driverParam: boolean; enumValues: undefined; }>; export class PgBooleanBuilder> extends PgColumnBuilder { static override readonly [entityKind]: string = 'PgBooleanBuilder'; constructor(name: T['name']) { super(name, 'boolean', 'PgBoolean'); } /** @internal */ override build( table: AnyPgTable<{ name: TTableName }>, ): PgBoolean> { return new PgBoolean>(table, this.config as ColumnBuilderRuntimeConfig); } } export class PgBoolean> extends PgColumn { static override readonly [entityKind]: string = 'PgBoolean'; getSQLType(): string { return 'boolean'; } } export function boolean(): PgBooleanBuilderInitial<''>; export function boolean(name: TName): PgBooleanBuilderInitial; export function boolean(name?: string) { return new PgBooleanBuilder(name ?? ''); } ================================================ FILE: drizzle-orm/src/pg-core/columns/char.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnyPgTable } from '~/pg-core/table.ts'; import { getColumnNameAndConfig, type Writable } from '~/utils.ts'; import { PgColumn, PgColumnBuilder } from './common.ts'; export type PgCharBuilderInitial< TName extends string, TEnum extends [string, ...string[]], TLength extends number | undefined, > = PgCharBuilder<{ name: TName; dataType: 'string'; columnType: 'PgChar'; data: TEnum[number]; enumValues: TEnum; driverParam: string; length: TLength; }>; export class PgCharBuilder & { length?: number | undefined }> extends PgColumnBuilder< T, { length: T['length']; enumValues: T['enumValues'] }, { length: T['length'] } > { static override readonly [entityKind]: string = 'PgCharBuilder'; constructor(name: T['name'], config: PgCharConfig) { super(name, 'string', 'PgChar'); this.config.length = config.length; this.config.enumValues = config.enum; } /** @internal */ override build( table: AnyPgTable<{ name: TTableName }>, ): PgChar & { length: T['length'] }> { return new PgChar & { length: T['length'] }>( table, this.config as ColumnBuilderRuntimeConfig, ); } } export class PgChar & { length?: number | undefined }> extends PgColumn { static override readonly [entityKind]: string = 'PgChar'; readonly length = this.config.length; override readonly enumValues = this.config.enumValues; getSQLType(): string { return this.length === undefined ? `char` : `char(${this.length})`; } } export interface PgCharConfig< TEnum extends readonly string[] | string[] | undefined = readonly string[] | string[] | undefined, TLength extends number | undefined = number | undefined, > { enum?: TEnum; length?: TLength; } export function char(): PgCharBuilderInitial<'', [string, ...string[]], undefined>; export function char, L extends number | undefined>( config?: PgCharConfig, L>, ): PgCharBuilderInitial<'', Writable, L>; export function char< TName extends string, U extends string, T extends Readonly<[U, ...U[]]>, L extends number | undefined, >( name: TName, config?: PgCharConfig, L>, ): PgCharBuilderInitial, L>; export function char(a?: string | PgCharConfig, b: PgCharConfig = {}): any { const { name, config } = getColumnNameAndConfig(a, b); return new PgCharBuilder(name, config as any); } ================================================ FILE: drizzle-orm/src/pg-core/columns/cidr.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnyPgTable } from '../table.ts'; import { PgColumn, PgColumnBuilder } from './common.ts'; export type PgCidrBuilderInitial = PgCidrBuilder<{ name: TName; dataType: 'string'; columnType: 'PgCidr'; data: string; driverParam: string; enumValues: undefined; }>; export class PgCidrBuilder> extends PgColumnBuilder { static override readonly [entityKind]: string = 'PgCidrBuilder'; constructor(name: T['name']) { super(name, 'string', 'PgCidr'); } /** @internal */ override build( table: AnyPgTable<{ name: TTableName }>, ): PgCidr> { return new PgCidr>(table, this.config as ColumnBuilderRuntimeConfig); } } export class PgCidr> extends PgColumn { static override readonly [entityKind]: string = 'PgCidr'; getSQLType(): string { return 'cidr'; } } export function cidr(): PgCidrBuilderInitial<''>; export function cidr(name: TName): PgCidrBuilderInitial; export function cidr(name?: string) { return new PgCidrBuilder(name ?? ''); } ================================================ FILE: drizzle-orm/src/pg-core/columns/common.ts ================================================ import type { ColumnBuilderBase, ColumnBuilderBaseConfig, ColumnBuilderExtraConfig, ColumnBuilderRuntimeConfig, ColumnDataType, HasGenerated, MakeColumnConfig, } from '~/column-builder.ts'; import { ColumnBuilder } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { Column } from '~/column.ts'; import { entityKind, is } from '~/entity.ts'; import type { Simplify, Update } from '~/utils.ts'; import type { ForeignKey, UpdateDeleteAction } from '~/pg-core/foreign-keys.ts'; import { ForeignKeyBuilder } from '~/pg-core/foreign-keys.ts'; import type { AnyPgTable, PgTable } from '~/pg-core/table.ts'; import type { SQL } from '~/sql/sql.ts'; import { iife } from '~/tracing-utils.ts'; import type { PgIndexOpClass } from '../indexes.ts'; import { uniqueKeyName } from '../unique-constraint.ts'; import { makePgArray, parsePgArray } from '../utils/array.ts'; export interface ReferenceConfig { ref: () => PgColumn; actions: { onUpdate?: UpdateDeleteAction; onDelete?: UpdateDeleteAction; }; } export interface PgColumnBuilderBase< T extends ColumnBuilderBaseConfig = ColumnBuilderBaseConfig, TTypeConfig extends object = object, > extends ColumnBuilderBase {} export abstract class PgColumnBuilder< T extends ColumnBuilderBaseConfig = ColumnBuilderBaseConfig, TRuntimeConfig extends object = object, TTypeConfig extends object = object, TExtraConfig extends ColumnBuilderExtraConfig = ColumnBuilderExtraConfig, > extends ColumnBuilder implements PgColumnBuilderBase { private foreignKeyConfigs: ReferenceConfig[] = []; static override readonly [entityKind]: string = 'PgColumnBuilder'; array(size?: TSize): PgArrayBuilder< & { name: T['name']; dataType: 'array'; columnType: 'PgArray'; data: T['data'][]; driverParam: T['driverParam'][] | string; enumValues: T['enumValues']; size: TSize; baseBuilder: T; } & (T extends { notNull: true } ? { notNull: true } : {}) & (T extends { hasDefault: true } ? { hasDefault: true } : {}), T > { return new PgArrayBuilder(this.config.name, this as PgColumnBuilder, size as any); } references( ref: ReferenceConfig['ref'], actions: ReferenceConfig['actions'] = {}, ): this { this.foreignKeyConfigs.push({ ref, actions }); return this; } unique( name?: string, config?: { nulls: 'distinct' | 'not distinct' }, ): this { this.config.isUnique = true; this.config.uniqueName = name; this.config.uniqueType = config?.nulls; return this; } generatedAlwaysAs(as: SQL | T['data'] | (() => SQL)): HasGenerated { this.config.generated = { as, type: 'always', mode: 'stored', }; return this as HasGenerated; } /** @internal */ buildForeignKeys(column: PgColumn, table: PgTable): ForeignKey[] { return this.foreignKeyConfigs.map(({ ref, actions }) => { return iife( (ref, actions) => { const builder = new ForeignKeyBuilder(() => { const foreignColumn = ref(); return { columns: [column], foreignColumns: [foreignColumn] }; }); if (actions.onUpdate) { builder.onUpdate(actions.onUpdate); } if (actions.onDelete) { builder.onDelete(actions.onDelete); } return builder.build(table); }, ref, actions, ); }); } /** @internal */ abstract build( table: AnyPgTable<{ name: TTableName }>, ): PgColumn>; /** @internal */ buildExtraConfigColumn( table: AnyPgTable<{ name: TTableName }>, ): ExtraConfigColumn { return new ExtraConfigColumn(table, this.config); } } // To understand how to use `PgColumn` and `PgColumn`, see `Column` and `AnyColumn` documentation. export abstract class PgColumn< T extends ColumnBaseConfig = ColumnBaseConfig, TRuntimeConfig extends object = {}, TTypeConfig extends object = {}, > extends Column { static override readonly [entityKind]: string = 'PgColumn'; constructor( override readonly table: PgTable, config: ColumnBuilderRuntimeConfig, ) { if (!config.uniqueName) { config.uniqueName = uniqueKeyName(table, [config.name]); } super(table, config); } } export type IndexedExtraConfigType = { order?: 'asc' | 'desc'; nulls?: 'first' | 'last'; opClass?: string }; export class ExtraConfigColumn< T extends ColumnBaseConfig = ColumnBaseConfig, > extends PgColumn { static override readonly [entityKind]: string = 'ExtraConfigColumn'; override getSQLType(): string { return this.getSQLType(); } indexConfig: IndexedExtraConfigType = { order: this.config.order ?? 'asc', nulls: this.config.nulls ?? 'last', opClass: this.config.opClass, }; defaultConfig: IndexedExtraConfigType = { order: 'asc', nulls: 'last', opClass: undefined, }; asc(): Omit { this.indexConfig.order = 'asc'; return this; } desc(): Omit { this.indexConfig.order = 'desc'; return this; } nullsFirst(): Omit { this.indexConfig.nulls = 'first'; return this; } nullsLast(): Omit { this.indexConfig.nulls = 'last'; return this; } /** * ### PostgreSQL documentation quote * * > An operator class with optional parameters can be specified for each column of an index. * The operator class identifies the operators to be used by the index for that column. * For example, a B-tree index on four-byte integers would use the int4_ops class; * this operator class includes comparison functions for four-byte integers. * In practice the default operator class for the column's data type is usually sufficient. * The main point of having operator classes is that for some data types, there could be more than one meaningful ordering. * For example, we might want to sort a complex-number data type either by absolute value or by real part. * We could do this by defining two operator classes for the data type and then selecting the proper class when creating an index. * More information about operator classes check: * * ### Useful links * https://www.postgresql.org/docs/current/sql-createindex.html * * https://www.postgresql.org/docs/current/indexes-opclass.html * * https://www.postgresql.org/docs/current/xindex.html * * ### Additional types * If you have the `pg_vector` extension installed in your database, you can use the * `vector_l2_ops`, `vector_ip_ops`, `vector_cosine_ops`, `vector_l1_ops`, `bit_hamming_ops`, `bit_jaccard_ops`, `halfvec_l2_ops`, `sparsevec_l2_ops` options, which are predefined types. * * **You can always specify any string you want in the operator class, in case Drizzle doesn't have it natively in its types** * * @param opClass * @returns */ op(opClass: PgIndexOpClass): Omit { this.indexConfig.opClass = opClass; return this; } } export class IndexedColumn { static readonly [entityKind]: string = 'IndexedColumn'; constructor( name: string | undefined, keyAsName: boolean, type: string, indexConfig: IndexedExtraConfigType, ) { this.name = name; this.keyAsName = keyAsName; this.type = type; this.indexConfig = indexConfig; } name: string | undefined; keyAsName: boolean; type: string; indexConfig: IndexedExtraConfigType; } export type AnyPgColumn> = {}> = PgColumn< Required, TPartial>> >; export type PgArrayColumnBuilderBaseConfig = ColumnBuilderBaseConfig<'array', 'PgArray'> & { size: number | undefined; baseBuilder: ColumnBuilderBaseConfig; }; export class PgArrayBuilder< T extends PgArrayColumnBuilderBaseConfig, TBase extends ColumnBuilderBaseConfig | PgArrayColumnBuilderBaseConfig, > extends PgColumnBuilder< T, { baseBuilder: TBase extends PgArrayColumnBuilderBaseConfig ? PgArrayBuilder< TBase, TBase extends { baseBuilder: infer TBaseBuilder extends ColumnBuilderBaseConfig } ? TBaseBuilder : never > : PgColumnBuilder>>>; size: T['size']; }, { baseBuilder: TBase extends PgArrayColumnBuilderBaseConfig ? PgArrayBuilder< TBase, TBase extends { baseBuilder: infer TBaseBuilder extends ColumnBuilderBaseConfig } ? TBaseBuilder : never > : PgColumnBuilder>>>; size: T['size']; } > { static override readonly [entityKind] = 'PgArrayBuilder'; constructor( name: string, baseBuilder: PgArrayBuilder['config']['baseBuilder'], size: T['size'], ) { super(name, 'array', 'PgArray'); this.config.baseBuilder = baseBuilder; this.config.size = size; } /** @internal */ override build( table: AnyPgTable<{ name: TTableName }>, ): PgArray & { size: T['size']; baseBuilder: T['baseBuilder'] }, TBase> { const baseColumn = this.config.baseBuilder.build(table); return new PgArray & { size: T['size']; baseBuilder: T['baseBuilder'] }, TBase>( table as AnyPgTable<{ name: MakeColumnConfig['tableName'] }>, this.config as ColumnBuilderRuntimeConfig, baseColumn, ); } } export class PgArray< T extends ColumnBaseConfig<'array', 'PgArray'> & { size: number | undefined; baseBuilder: ColumnBuilderBaseConfig; }, TBase extends ColumnBuilderBaseConfig, > extends PgColumn { readonly size: T['size']; static override readonly [entityKind]: string = 'PgArray'; constructor( table: AnyPgTable<{ name: T['tableName'] }>, config: PgArrayBuilder['config'], readonly baseColumn: PgColumn, readonly range?: [number | undefined, number | undefined], ) { super(table, config); this.size = config.size; } getSQLType(): string { return `${this.baseColumn.getSQLType()}[${typeof this.size === 'number' ? this.size : ''}]`; } override mapFromDriverValue(value: unknown[] | string): T['data'] { if (typeof value === 'string') { // Thank you node-postgres for not parsing enum arrays value = parsePgArray(value); } return value.map((v) => this.baseColumn.mapFromDriverValue(v)); } override mapToDriverValue(value: unknown[], isNestedArray = false): unknown[] | string { const a = value.map((v) => v === null ? null : is(this.baseColumn, PgArray) ? this.baseColumn.mapToDriverValue(v as unknown[], true) : this.baseColumn.mapToDriverValue(v) ); if (isNestedArray) return a; return makePgArray(a); } } ================================================ FILE: drizzle-orm/src/pg-core/columns/custom.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnyPgTable } from '~/pg-core/table.ts'; import type { SQL } from '~/sql/sql.ts'; import { type Equal, getColumnNameAndConfig } from '~/utils.ts'; import { PgColumn, PgColumnBuilder } from './common.ts'; export type ConvertCustomConfig> = & { name: TName; dataType: 'custom'; columnType: 'PgCustomColumn'; data: T['data']; driverParam: T['driverData']; enumValues: undefined; } & (T['notNull'] extends true ? { notNull: true } : {}) & (T['default'] extends true ? { hasDefault: true } : {}); export interface PgCustomColumnInnerConfig { customTypeValues: CustomTypeValues; } export class PgCustomColumnBuilder> extends PgColumnBuilder< T, { fieldConfig: CustomTypeValues['config']; customTypeParams: CustomTypeParams; }, { pgColumnBuilderBrand: 'PgCustomColumnBuilderBrand'; } > { static override readonly [entityKind]: string = 'PgCustomColumnBuilder'; constructor( name: T['name'], fieldConfig: CustomTypeValues['config'], customTypeParams: CustomTypeParams, ) { super(name, 'custom', 'PgCustomColumn'); this.config.fieldConfig = fieldConfig; this.config.customTypeParams = customTypeParams; } /** @internal */ build( table: AnyPgTable<{ name: TTableName }>, ): PgCustomColumn> { return new PgCustomColumn>( table, this.config as ColumnBuilderRuntimeConfig, ); } } export class PgCustomColumn> extends PgColumn { static override readonly [entityKind]: string = 'PgCustomColumn'; private sqlName: string; private mapTo?: (value: T['data']) => T['driverParam']; private mapFrom?: (value: T['driverParam']) => T['data']; constructor( table: AnyPgTable<{ name: T['tableName'] }>, config: PgCustomColumnBuilder['config'], ) { super(table, config); this.sqlName = config.customTypeParams.dataType(config.fieldConfig); this.mapTo = config.customTypeParams.toDriver; this.mapFrom = config.customTypeParams.fromDriver; } getSQLType(): string { return this.sqlName; } override mapFromDriverValue(value: T['driverParam']): T['data'] { return typeof this.mapFrom === 'function' ? this.mapFrom(value) : value as T['data']; } override mapToDriverValue(value: T['data']): T['driverParam'] { return typeof this.mapTo === 'function' ? this.mapTo(value) : value as T['data']; } } export type CustomTypeValues = { /** * Required type for custom column, that will infer proper type model * * Examples: * * If you want your column to be `string` type after selecting/or on inserting - use `data: string`. Like `text`, `varchar` * * If you want your column to be `number` type after selecting/or on inserting - use `data: number`. Like `integer` */ data: unknown; /** * Type helper, that represents what type database driver is accepting for specific database data type */ driverData?: unknown; /** * What config type should be used for {@link CustomTypeParams} `dataType` generation */ config?: Record; /** * Whether the config argument should be required or not * @default false */ configRequired?: boolean; /** * If your custom data type should be notNull by default you can use `notNull: true` * * @example * const customSerial = customType<{ data: number, notNull: true, default: true }>({ * dataType() { * return 'serial'; * }, * }); */ notNull?: boolean; /** * If your custom data type has default you can use `default: true` * * @example * const customSerial = customType<{ data: number, notNull: true, default: true }>({ * dataType() { * return 'serial'; * }, * }); */ default?: boolean; }; export interface CustomTypeParams { /** * Database data type string representation, that is used for migrations * @example * ``` * `jsonb`, `text` * ``` * * If database data type needs additional params you can use them from `config` param * @example * ``` * `varchar(256)`, `numeric(2,3)` * ``` * * To make `config` be of specific type please use config generic in {@link CustomTypeValues} * * @example * Usage example * ``` * dataType() { * return 'boolean'; * }, * ``` * Or * ``` * dataType(config) { * return typeof config.length !== 'undefined' ? `varchar(${config.length})` : `varchar`; * } * ``` */ dataType: (config: T['config'] | (Equal extends true ? never : undefined)) => string; /** * Optional mapping function, between user input and driver * @example * For example, when using jsonb we need to map JS/TS object to string before writing to database * ``` * toDriver(value: TData): string { * return JSON.stringify(value); * } * ``` */ toDriver?: (value: T['data']) => T['driverData'] | SQL; /** * Optional mapping function, that is responsible for data mapping from database to JS/TS code * @example * For example, when using timestamp we need to map string Date representation to JS Date * ``` * fromDriver(value: string): Date { * return new Date(value); * }, * ``` */ fromDriver?: (value: T['driverData']) => T['data']; } /** * Custom pg database data type generator */ export function customType( customTypeParams: CustomTypeParams, ): Equal extends true ? { & T['config']>( fieldConfig: TConfig, ): PgCustomColumnBuilder>; ( dbName: TName, fieldConfig: T['config'], ): PgCustomColumnBuilder>; } : { (): PgCustomColumnBuilder>; & T['config']>( fieldConfig?: TConfig, ): PgCustomColumnBuilder>; ( dbName: TName, fieldConfig?: T['config'], ): PgCustomColumnBuilder>; } { return ( a?: TName | T['config'], b?: T['config'], ): PgCustomColumnBuilder> => { const { name, config } = getColumnNameAndConfig(a, b); return new PgCustomColumnBuilder(name as ConvertCustomConfig['name'], config, customTypeParams); }; } ================================================ FILE: drizzle-orm/src/pg-core/columns/date.common.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnDataType } from '~/column-builder.ts'; import { entityKind } from '~/entity.ts'; import { sql } from '~/sql/sql.ts'; import { PgColumnBuilder } from './common.ts'; export abstract class PgDateColumnBaseBuilder< T extends ColumnBuilderBaseConfig, TRuntimeConfig extends object = object, > extends PgColumnBuilder { static override readonly [entityKind]: string = 'PgDateColumnBaseBuilder'; defaultNow() { return this.default(sql`now()`); } } ================================================ FILE: drizzle-orm/src/pg-core/columns/date.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnyPgTable } from '~/pg-core/table.ts'; import { type Equal, getColumnNameAndConfig } from '~/utils.ts'; import { PgColumn } from './common.ts'; import { PgDateColumnBaseBuilder } from './date.common.ts'; export type PgDateBuilderInitial = PgDateBuilder<{ name: TName; dataType: 'date'; columnType: 'PgDate'; data: Date; driverParam: string; enumValues: undefined; }>; export class PgDateBuilder> extends PgDateColumnBaseBuilder { static override readonly [entityKind]: string = 'PgDateBuilder'; constructor(name: T['name']) { super(name, 'date', 'PgDate'); } /** @internal */ override build( table: AnyPgTable<{ name: TTableName }>, ): PgDate> { return new PgDate>(table, this.config as ColumnBuilderRuntimeConfig); } } export class PgDate> extends PgColumn { static override readonly [entityKind]: string = 'PgDate'; getSQLType(): string { return 'date'; } override mapFromDriverValue(value: string | Date): Date { if (typeof value === 'string') return new Date(value); return value; } override mapToDriverValue(value: Date): string { return value.toISOString(); } } export type PgDateStringBuilderInitial = PgDateStringBuilder<{ name: TName; dataType: 'string'; columnType: 'PgDateString'; data: string; driverParam: string; enumValues: undefined; }>; export class PgDateStringBuilder> extends PgDateColumnBaseBuilder { static override readonly [entityKind]: string = 'PgDateStringBuilder'; constructor(name: T['name']) { super(name, 'string', 'PgDateString'); } /** @internal */ override build( table: AnyPgTable<{ name: TTableName }>, ): PgDateString> { return new PgDateString>( table, this.config as ColumnBuilderRuntimeConfig, ); } } export class PgDateString> extends PgColumn { static override readonly [entityKind]: string = 'PgDateString'; getSQLType(): string { return 'date'; } override mapFromDriverValue(value: Date | string): string { if (typeof value === 'string') return value; return value.toISOString().slice(0, -14); } } export interface PgDateConfig { mode: T; } export function date(): PgDateStringBuilderInitial<''>; export function date( config?: PgDateConfig, ): Equal extends true ? PgDateBuilderInitial<''> : PgDateStringBuilderInitial<''>; export function date( name: TName, config?: PgDateConfig, ): Equal extends true ? PgDateBuilderInitial : PgDateStringBuilderInitial; export function date(a?: string | PgDateConfig, b?: PgDateConfig) { const { name, config } = getColumnNameAndConfig(a, b); if (config?.mode === 'date') { return new PgDateBuilder(name); } return new PgDateStringBuilder(name); } ================================================ FILE: drizzle-orm/src/pg-core/columns/double-precision.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnyPgTable } from '~/pg-core/table.ts'; import { PgColumn, PgColumnBuilder } from './common.ts'; export type PgDoublePrecisionBuilderInitial = PgDoublePrecisionBuilder<{ name: TName; dataType: 'number'; columnType: 'PgDoublePrecision'; data: number; driverParam: string | number; enumValues: undefined; }>; export class PgDoublePrecisionBuilder> extends PgColumnBuilder { static override readonly [entityKind]: string = 'PgDoublePrecisionBuilder'; constructor(name: T['name']) { super(name, 'number', 'PgDoublePrecision'); } /** @internal */ override build( table: AnyPgTable<{ name: TTableName }>, ): PgDoublePrecision> { return new PgDoublePrecision>( table, this.config as ColumnBuilderRuntimeConfig, ); } } export class PgDoublePrecision> extends PgColumn { static override readonly [entityKind]: string = 'PgDoublePrecision'; getSQLType(): string { return 'double precision'; } override mapFromDriverValue(value: string | number): number { if (typeof value === 'string') { return Number.parseFloat(value); } return value; } } export function doublePrecision(): PgDoublePrecisionBuilderInitial<''>; export function doublePrecision(name: TName): PgDoublePrecisionBuilderInitial; export function doublePrecision(name?: string) { return new PgDoublePrecisionBuilder(name ?? ''); } ================================================ FILE: drizzle-orm/src/pg-core/columns/enum.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnyPgTable } from '~/pg-core/table.ts'; import type { NonArray, Writable } from '~/utils.ts'; import { PgColumn, PgColumnBuilder } from './common.ts'; // Enum as ts enum export type PgEnumObjectColumnBuilderInitial = PgEnumObjectColumnBuilder<{ name: TName; dataType: 'string'; columnType: 'PgEnumObjectColumn'; data: TValues[keyof TValues]; enumValues: string[]; driverParam: string; }>; export interface PgEnumObject { (): PgEnumObjectColumnBuilderInitial<'', TValues>; (name: TName): PgEnumObjectColumnBuilderInitial; (name?: TName): PgEnumObjectColumnBuilderInitial; readonly enumName: string; readonly enumValues: string[]; readonly schema: string | undefined; /** @internal */ [isPgEnumSym]: true; } export class PgEnumObjectColumnBuilder< T extends ColumnBuilderBaseConfig<'string', 'PgEnumObjectColumn'> & { enumValues: string[] }, > extends PgColumnBuilder }> { static override readonly [entityKind]: string = 'PgEnumObjectColumnBuilder'; constructor(name: T['name'], enumInstance: PgEnumObject) { super(name, 'string', 'PgEnumObjectColumn'); this.config.enum = enumInstance; } /** @internal */ override build( table: AnyPgTable<{ name: TTableName }>, ): PgEnumObjectColumn> { return new PgEnumObjectColumn>( table, this.config as ColumnBuilderRuntimeConfig, ); } } export class PgEnumObjectColumn & { enumValues: object }> extends PgColumn }> { static override readonly [entityKind]: string = 'PgEnumObjectColumn'; readonly enum; override readonly enumValues = this.config.enum.enumValues; constructor( table: AnyPgTable<{ name: T['tableName'] }>, config: PgEnumObjectColumnBuilder['config'], ) { super(table, config); this.enum = config.enum; } getSQLType(): string { return this.enum.enumName; } } // Enum as string union export type PgEnumColumnBuilderInitial = PgEnumColumnBuilder<{ name: TName; dataType: 'string'; columnType: 'PgEnumColumn'; data: TValues[number]; enumValues: TValues; driverParam: string; }>; const isPgEnumSym = Symbol.for('drizzle:isPgEnum'); export interface PgEnum { (): PgEnumColumnBuilderInitial<'', TValues>; (name: TName): PgEnumColumnBuilderInitial; (name?: TName): PgEnumColumnBuilderInitial; readonly enumName: string; readonly enumValues: TValues; readonly schema: string | undefined; /** @internal */ [isPgEnumSym]: true; } export function isPgEnum(obj: unknown): obj is PgEnum<[string, ...string[]]> { return !!obj && typeof obj === 'function' && isPgEnumSym in obj && obj[isPgEnumSym] === true; } export class PgEnumColumnBuilder< T extends ColumnBuilderBaseConfig<'string', 'PgEnumColumn'> & { enumValues: [string, ...string[]] }, > extends PgColumnBuilder }> { static override readonly [entityKind]: string = 'PgEnumColumnBuilder'; constructor(name: T['name'], enumInstance: PgEnum) { super(name, 'string', 'PgEnumColumn'); this.config.enum = enumInstance; } /** @internal */ override build( table: AnyPgTable<{ name: TTableName }>, ): PgEnumColumn> { return new PgEnumColumn>( table, this.config as ColumnBuilderRuntimeConfig, ); } } export class PgEnumColumn & { enumValues: [string, ...string[]] }> extends PgColumn }> { static override readonly [entityKind]: string = 'PgEnumColumn'; readonly enum = this.config.enum; override readonly enumValues = this.config.enum.enumValues; constructor( table: AnyPgTable<{ name: T['tableName'] }>, config: PgEnumColumnBuilder['config'], ) { super(table, config); this.enum = config.enum; } getSQLType(): string { return this.enum.enumName; } } export function pgEnum>( enumName: string, values: T | Writable, ): PgEnum>; export function pgEnum>( enumName: string, enumObj: NonArray, ): PgEnumObject; export function pgEnum( enumName: any, input: any, ): any { return Array.isArray(input) ? pgEnumWithSchema(enumName, [...input] as [string, ...string[]], undefined) : pgEnumObjectWithSchema(enumName, input, undefined); } /** @internal */ export function pgEnumWithSchema>( enumName: string, values: T | Writable, schema?: string, ): PgEnum> { const enumInstance: PgEnum> = Object.assign( (name?: TName): PgEnumColumnBuilderInitial> => new PgEnumColumnBuilder(name ?? '' as TName, enumInstance), { enumName, enumValues: values, schema, [isPgEnumSym]: true, } as const, ); return enumInstance; } /** @internal */ export function pgEnumObjectWithSchema( enumName: string, values: T, schema?: string, ): PgEnumObject { const enumInstance: PgEnumObject = Object.assign( (name?: TName): PgEnumObjectColumnBuilderInitial => new PgEnumObjectColumnBuilder(name ?? '' as TName, enumInstance), { enumName, enumValues: Object.values(values), schema, [isPgEnumSym]: true, } as const, ); return enumInstance; } ================================================ FILE: drizzle-orm/src/pg-core/columns/index.ts ================================================ export * from './bigint.ts'; export * from './bigserial.ts'; export * from './boolean.ts'; export * from './char.ts'; export * from './cidr.ts'; export * from './common.ts'; export * from './custom.ts'; export * from './date.ts'; export * from './double-precision.ts'; export * from './enum.ts'; export * from './inet.ts'; export * from './int.common.ts'; export * from './integer.ts'; export * from './interval.ts'; export * from './json.ts'; export * from './jsonb.ts'; export * from './line.ts'; export * from './macaddr.ts'; export * from './macaddr8.ts'; export * from './numeric.ts'; export * from './point.ts'; export * from './postgis_extension/geometry.ts'; export * from './real.ts'; export * from './serial.ts'; export * from './smallint.ts'; export * from './smallserial.ts'; export * from './text.ts'; export * from './time.ts'; export * from './timestamp.ts'; export * from './uuid.ts'; export * from './varchar.ts'; export * from './vector_extension/bit.ts'; export * from './vector_extension/halfvec.ts'; export * from './vector_extension/sparsevec.ts'; export * from './vector_extension/vector.ts'; ================================================ FILE: drizzle-orm/src/pg-core/columns/inet.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnyPgTable } from '../table.ts'; import { PgColumn, PgColumnBuilder } from './common.ts'; export type PgInetBuilderInitial = PgInetBuilder<{ name: TName; dataType: 'string'; columnType: 'PgInet'; data: string; driverParam: string; enumValues: undefined; }>; export class PgInetBuilder> extends PgColumnBuilder { static override readonly [entityKind]: string = 'PgInetBuilder'; constructor(name: T['name']) { super(name, 'string', 'PgInet'); } /** @internal */ override build( table: AnyPgTable<{ name: TTableName }>, ): PgInet> { return new PgInet>(table, this.config as ColumnBuilderRuntimeConfig); } } export class PgInet> extends PgColumn { static override readonly [entityKind]: string = 'PgInet'; getSQLType(): string { return 'inet'; } } export function inet(): PgInetBuilderInitial<''>; export function inet(name: TName): PgInetBuilderInitial; export function inet(name?: string) { return new PgInetBuilder(name ?? ''); } ================================================ FILE: drizzle-orm/src/pg-core/columns/int.common.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnDataType, GeneratedIdentityConfig, IsIdentity } from '~/column-builder.ts'; import { entityKind } from '~/entity.ts'; import type { PgSequenceOptions } from '../sequence.ts'; import { PgColumnBuilder } from './common.ts'; export abstract class PgIntColumnBaseBuilder< T extends ColumnBuilderBaseConfig, > extends PgColumnBuilder< T, { generatedIdentity: GeneratedIdentityConfig } > { static override readonly [entityKind]: string = 'PgIntColumnBaseBuilder'; generatedAlwaysAsIdentity( sequence?: PgSequenceOptions & { name?: string }, ): IsIdentity { if (sequence) { const { name, ...options } = sequence; this.config.generatedIdentity = { type: 'always', sequenceName: name, sequenceOptions: options, }; } else { this.config.generatedIdentity = { type: 'always', }; } this.config.hasDefault = true; this.config.notNull = true; return this as IsIdentity; } generatedByDefaultAsIdentity( sequence?: PgSequenceOptions & { name?: string }, ): IsIdentity { if (sequence) { const { name, ...options } = sequence; this.config.generatedIdentity = { type: 'byDefault', sequenceName: name, sequenceOptions: options, }; } else { this.config.generatedIdentity = { type: 'byDefault', }; } this.config.hasDefault = true; this.config.notNull = true; return this as IsIdentity; } } ================================================ FILE: drizzle-orm/src/pg-core/columns/integer.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnyPgTable } from '../table.ts'; import { PgColumn } from './common.ts'; import { PgIntColumnBaseBuilder } from './int.common.ts'; export type PgIntegerBuilderInitial = PgIntegerBuilder<{ name: TName; dataType: 'number'; columnType: 'PgInteger'; data: number; driverParam: number | string; enumValues: undefined; }>; export class PgIntegerBuilder> extends PgIntColumnBaseBuilder { static override readonly [entityKind]: string = 'PgIntegerBuilder'; constructor(name: T['name']) { super(name, 'number', 'PgInteger'); } /** @internal */ override build( table: AnyPgTable<{ name: TTableName }>, ): PgInteger> { return new PgInteger>(table, this.config as ColumnBuilderRuntimeConfig); } } export class PgInteger> extends PgColumn { static override readonly [entityKind]: string = 'PgInteger'; getSQLType(): string { return 'integer'; } override mapFromDriverValue(value: number | string): number { if (typeof value === 'string') { return Number.parseInt(value); } return value; } } export function integer(): PgIntegerBuilderInitial<''>; export function integer(name: TName): PgIntegerBuilderInitial; export function integer(name?: string) { return new PgIntegerBuilder(name ?? ''); } ================================================ FILE: drizzle-orm/src/pg-core/columns/interval.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnyPgTable } from '~/pg-core/table.ts'; import { getColumnNameAndConfig } from '~/utils.ts'; import { PgColumn, PgColumnBuilder } from './common.ts'; import type { Precision } from './timestamp.ts'; export type PgIntervalBuilderInitial = PgIntervalBuilder<{ name: TName; dataType: 'string'; columnType: 'PgInterval'; data: string; driverParam: string; enumValues: undefined; }>; export class PgIntervalBuilder> extends PgColumnBuilder { static override readonly [entityKind]: string = 'PgIntervalBuilder'; constructor( name: T['name'], intervalConfig: IntervalConfig, ) { super(name, 'string', 'PgInterval'); this.config.intervalConfig = intervalConfig; } /** @internal */ override build( table: AnyPgTable<{ name: TTableName }>, ): PgInterval> { return new PgInterval>(table, this.config as ColumnBuilderRuntimeConfig); } } export class PgInterval> extends PgColumn { static override readonly [entityKind]: string = 'PgInterval'; readonly fields: IntervalConfig['fields'] = this.config.intervalConfig.fields; readonly precision: IntervalConfig['precision'] = this.config.intervalConfig.precision; getSQLType(): string { const fields = this.fields ? ` ${this.fields}` : ''; const precision = this.precision ? `(${this.precision})` : ''; return `interval${fields}${precision}`; } } export interface IntervalConfig { fields?: | 'year' | 'month' | 'day' | 'hour' | 'minute' | 'second' | 'year to month' | 'day to hour' | 'day to minute' | 'day to second' | 'hour to minute' | 'hour to second' | 'minute to second'; precision?: Precision; } export function interval(): PgIntervalBuilderInitial<''>; export function interval( config?: IntervalConfig, ): PgIntervalBuilderInitial<''>; export function interval( name: TName, config?: IntervalConfig, ): PgIntervalBuilderInitial; export function interval(a?: string | IntervalConfig, b: IntervalConfig = {}) { const { name, config } = getColumnNameAndConfig(a, b); return new PgIntervalBuilder(name, config); } ================================================ FILE: drizzle-orm/src/pg-core/columns/json.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnyPgTable } from '~/pg-core/table.ts'; import { PgColumn, PgColumnBuilder } from './common.ts'; export type PgJsonBuilderInitial = PgJsonBuilder<{ name: TName; dataType: 'json'; columnType: 'PgJson'; data: unknown; driverParam: unknown; enumValues: undefined; }>; export class PgJsonBuilder> extends PgColumnBuilder< T > { static override readonly [entityKind]: string = 'PgJsonBuilder'; constructor(name: T['name']) { super(name, 'json', 'PgJson'); } /** @internal */ override build( table: AnyPgTable<{ name: TTableName }>, ): PgJson> { return new PgJson>(table, this.config as ColumnBuilderRuntimeConfig); } } export class PgJson> extends PgColumn { static override readonly [entityKind]: string = 'PgJson'; constructor(table: AnyPgTable<{ name: T['tableName'] }>, config: PgJsonBuilder['config']) { super(table, config); } getSQLType(): string { return 'json'; } override mapToDriverValue(value: T['data']): string { return JSON.stringify(value); } override mapFromDriverValue(value: T['data'] | string): T['data'] { if (typeof value === 'string') { try { return JSON.parse(value); } catch { return value as T['data']; } } return value; } } export function json(): PgJsonBuilderInitial<''>; export function json(name: TName): PgJsonBuilderInitial; export function json(name?: string) { return new PgJsonBuilder(name ?? ''); } ================================================ FILE: drizzle-orm/src/pg-core/columns/jsonb.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnyPgTable } from '~/pg-core/table.ts'; import { PgColumn, PgColumnBuilder } from './common.ts'; export type PgJsonbBuilderInitial = PgJsonbBuilder<{ name: TName; dataType: 'json'; columnType: 'PgJsonb'; data: unknown; driverParam: unknown; enumValues: undefined; }>; export class PgJsonbBuilder> extends PgColumnBuilder { static override readonly [entityKind]: string = 'PgJsonbBuilder'; constructor(name: T['name']) { super(name, 'json', 'PgJsonb'); } /** @internal */ override build( table: AnyPgTable<{ name: TTableName }>, ): PgJsonb> { return new PgJsonb>(table, this.config as ColumnBuilderRuntimeConfig); } } export class PgJsonb> extends PgColumn { static override readonly [entityKind]: string = 'PgJsonb'; constructor(table: AnyPgTable<{ name: T['tableName'] }>, config: PgJsonbBuilder['config']) { super(table, config); } getSQLType(): string { return 'jsonb'; } override mapToDriverValue(value: T['data']): string { return JSON.stringify(value); } override mapFromDriverValue(value: T['data'] | string): T['data'] { if (typeof value === 'string') { try { return JSON.parse(value); } catch { return value as T['data']; } } return value; } } export function jsonb(): PgJsonbBuilderInitial<''>; export function jsonb(name: TName): PgJsonbBuilderInitial; export function jsonb(name?: string) { return new PgJsonbBuilder(name ?? ''); } ================================================ FILE: drizzle-orm/src/pg-core/columns/line.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnyPgTable } from '~/pg-core/table.ts'; import { type Equal, getColumnNameAndConfig } from '~/utils.ts'; import { PgColumn, PgColumnBuilder } from './common.ts'; export type PgLineBuilderInitial = PgLineBuilder<{ name: TName; dataType: 'array'; columnType: 'PgLine'; data: [number, number, number]; driverParam: number | string; enumValues: undefined; }>; export class PgLineBuilder> extends PgColumnBuilder { static override readonly [entityKind]: string = 'PgLineBuilder'; constructor(name: T['name']) { super(name, 'array', 'PgLine'); } /** @internal */ override build( table: AnyPgTable<{ name: TTableName }>, ): PgLineTuple> { return new PgLineTuple>( table, this.config as ColumnBuilderRuntimeConfig, ); } } export class PgLineTuple> extends PgColumn { static override readonly [entityKind]: string = 'PgLine'; getSQLType(): string { return 'line'; } override mapFromDriverValue(value: string): [number, number, number] { const [a, b, c] = value.slice(1, -1).split(','); return [Number.parseFloat(a!), Number.parseFloat(b!), Number.parseFloat(c!)]; } override mapToDriverValue(value: [number, number, number]): string { return `{${value[0]},${value[1]},${value[2]}}`; } } export type PgLineABCBuilderInitial = PgLineABCBuilder<{ name: TName; dataType: 'json'; columnType: 'PgLineABC'; data: { a: number; b: number; c: number }; driverParam: string; enumValues: undefined; }>; export class PgLineABCBuilder> extends PgColumnBuilder { static override readonly [entityKind]: string = 'PgLineABCBuilder'; constructor(name: T['name']) { super(name, 'json', 'PgLineABC'); } /** @internal */ override build( table: AnyPgTable<{ name: TTableName }>, ): PgLineABC> { return new PgLineABC>( table, this.config as ColumnBuilderRuntimeConfig, ); } } export class PgLineABC> extends PgColumn { static override readonly [entityKind]: string = 'PgLineABC'; getSQLType(): string { return 'line'; } override mapFromDriverValue(value: string): { a: number; b: number; c: number } { const [a, b, c] = value.slice(1, -1).split(','); return { a: Number.parseFloat(a!), b: Number.parseFloat(b!), c: Number.parseFloat(c!) }; } override mapToDriverValue(value: { a: number; b: number; c: number }): string { return `{${value.a},${value.b},${value.c}}`; } } export interface PgLineTypeConfig { mode?: T; } export function line(): PgLineBuilderInitial<''>; export function line( config?: PgLineTypeConfig, ): Equal extends true ? PgLineABCBuilderInitial<''> : PgLineBuilderInitial<''>; export function line( name: TName, config?: PgLineTypeConfig, ): Equal extends true ? PgLineABCBuilderInitial : PgLineBuilderInitial; export function line(a?: string | PgLineTypeConfig, b?: PgLineTypeConfig) { const { name, config } = getColumnNameAndConfig(a, b); if (!config?.mode || config.mode === 'tuple') { return new PgLineBuilder(name); } return new PgLineABCBuilder(name); } ================================================ FILE: drizzle-orm/src/pg-core/columns/macaddr.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnyPgTable } from '../table.ts'; import { PgColumn, PgColumnBuilder } from './common.ts'; export type PgMacaddrBuilderInitial = PgMacaddrBuilder<{ name: TName; dataType: 'string'; columnType: 'PgMacaddr'; data: string; driverParam: string; enumValues: undefined; }>; export class PgMacaddrBuilder> extends PgColumnBuilder { static override readonly [entityKind]: string = 'PgMacaddrBuilder'; constructor(name: T['name']) { super(name, 'string', 'PgMacaddr'); } /** @internal */ override build( table: AnyPgTable<{ name: TTableName }>, ): PgMacaddr> { return new PgMacaddr>(table, this.config as ColumnBuilderRuntimeConfig); } } export class PgMacaddr> extends PgColumn { static override readonly [entityKind]: string = 'PgMacaddr'; getSQLType(): string { return 'macaddr'; } } export function macaddr(): PgMacaddrBuilderInitial<''>; export function macaddr(name: TName): PgMacaddrBuilderInitial; export function macaddr(name?: string) { return new PgMacaddrBuilder(name ?? ''); } ================================================ FILE: drizzle-orm/src/pg-core/columns/macaddr8.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnyPgTable } from '../table.ts'; import { PgColumn, PgColumnBuilder } from './common.ts'; export type PgMacaddr8BuilderInitial = PgMacaddr8Builder<{ name: TName; dataType: 'string'; columnType: 'PgMacaddr8'; data: string; driverParam: string; enumValues: undefined; }>; export class PgMacaddr8Builder> extends PgColumnBuilder { static override readonly [entityKind]: string = 'PgMacaddr8Builder'; constructor(name: T['name']) { super(name, 'string', 'PgMacaddr8'); } /** @internal */ override build( table: AnyPgTable<{ name: TTableName }>, ): PgMacaddr8> { return new PgMacaddr8>(table, this.config as ColumnBuilderRuntimeConfig); } } export class PgMacaddr8> extends PgColumn { static override readonly [entityKind]: string = 'PgMacaddr8'; getSQLType(): string { return 'macaddr8'; } } export function macaddr8(): PgMacaddr8BuilderInitial<''>; export function macaddr8(name: TName): PgMacaddr8BuilderInitial; export function macaddr8(name?: string) { return new PgMacaddr8Builder(name ?? ''); } ================================================ FILE: drizzle-orm/src/pg-core/columns/numeric.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnyPgTable } from '~/pg-core/table.ts'; import { type Equal, getColumnNameAndConfig } from '~/utils.ts'; import { PgColumn, PgColumnBuilder } from './common.ts'; export type PgNumericBuilderInitial = PgNumericBuilder<{ name: TName; dataType: 'string'; columnType: 'PgNumeric'; data: string; driverParam: string; enumValues: undefined; }>; export class PgNumericBuilder> extends PgColumnBuilder< T, { precision: number | undefined; scale: number | undefined; } > { static override readonly [entityKind]: string = 'PgNumericBuilder'; constructor(name: T['name'], precision?: number, scale?: number) { super(name, 'string', 'PgNumeric'); this.config.precision = precision; this.config.scale = scale; } /** @internal */ override build( table: AnyPgTable<{ name: TTableName }>, ): PgNumeric> { return new PgNumeric>(table, this.config as ColumnBuilderRuntimeConfig); } } export class PgNumeric> extends PgColumn { static override readonly [entityKind]: string = 'PgNumeric'; readonly precision: number | undefined; readonly scale: number | undefined; constructor(table: AnyPgTable<{ name: T['tableName'] }>, config: PgNumericBuilder['config']) { super(table, config); this.precision = config.precision; this.scale = config.scale; } override mapFromDriverValue(value: unknown): string { if (typeof value === 'string') return value; return String(value); } getSQLType(): string { if (this.precision !== undefined && this.scale !== undefined) { return `numeric(${this.precision}, ${this.scale})`; } else if (this.precision === undefined) { return 'numeric'; } else { return `numeric(${this.precision})`; } } } export type PgNumericNumberBuilderInitial = PgNumericNumberBuilder<{ name: TName; dataType: 'number'; columnType: 'PgNumericNumber'; data: number; driverParam: string; enumValues: undefined; }>; export class PgNumericNumberBuilder> extends PgColumnBuilder< T, { precision: number | undefined; scale: number | undefined; } > { static override readonly [entityKind]: string = 'PgNumericNumberBuilder'; constructor(name: T['name'], precision?: number, scale?: number) { super(name, 'number', 'PgNumericNumber'); this.config.precision = precision; this.config.scale = scale; } /** @internal */ override build( table: AnyPgTable<{ name: TTableName }>, ): PgNumericNumber> { return new PgNumericNumber>( table, this.config as ColumnBuilderRuntimeConfig, ); } } export class PgNumericNumber> extends PgColumn { static override readonly [entityKind]: string = 'PgNumericNumber'; readonly precision: number | undefined; readonly scale: number | undefined; constructor(table: AnyPgTable<{ name: T['tableName'] }>, config: PgNumericNumberBuilder['config']) { super(table, config); this.precision = config.precision; this.scale = config.scale; } override mapFromDriverValue(value: unknown): number { if (typeof value === 'number') return value; return Number(value); } override mapToDriverValue = String; getSQLType(): string { if (this.precision !== undefined && this.scale !== undefined) { return `numeric(${this.precision}, ${this.scale})`; } else if (this.precision === undefined) { return 'numeric'; } else { return `numeric(${this.precision})`; } } } export type PgNumericBigIntBuilderInitial = PgNumericBigIntBuilder<{ name: TName; dataType: 'bigint'; columnType: 'PgNumericBigInt'; data: bigint; driverParam: string; enumValues: undefined; }>; export class PgNumericBigIntBuilder> extends PgColumnBuilder< T, { precision: number | undefined; scale: number | undefined; } > { static override readonly [entityKind]: string = 'PgNumericBigIntBuilder'; constructor(name: T['name'], precision?: number, scale?: number) { super(name, 'bigint', 'PgNumericBigInt'); this.config.precision = precision; this.config.scale = scale; } /** @internal */ override build( table: AnyPgTable<{ name: TTableName }>, ): PgNumericBigInt> { return new PgNumericBigInt>( table, this.config as ColumnBuilderRuntimeConfig, ); } } export class PgNumericBigInt> extends PgColumn { static override readonly [entityKind]: string = 'PgNumericBigInt'; readonly precision: number | undefined; readonly scale: number | undefined; constructor(table: AnyPgTable<{ name: T['tableName'] }>, config: PgNumericBigIntBuilder['config']) { super(table, config); this.precision = config.precision; this.scale = config.scale; } override mapFromDriverValue = BigInt; override mapToDriverValue = String; getSQLType(): string { if (this.precision !== undefined && this.scale !== undefined) { return `numeric(${this.precision}, ${this.scale})`; } else if (this.precision === undefined) { return 'numeric'; } else { return `numeric(${this.precision})`; } } } export type PgNumericConfig = | { precision: number; scale?: number; mode?: T } | { precision?: number; scale: number; mode?: T } | { precision?: number; scale?: number; mode: T }; export function numeric( config?: PgNumericConfig, ): Equal extends true ? PgNumericNumberBuilderInitial<''> : Equal extends true ? PgNumericBigIntBuilderInitial<''> : PgNumericBuilderInitial<''>; export function numeric( name: TName, config?: PgNumericConfig, ): Equal extends true ? PgNumericNumberBuilderInitial : Equal extends true ? PgNumericBigIntBuilderInitial : PgNumericBuilderInitial; export function numeric(a?: string | PgNumericConfig, b?: PgNumericConfig) { const { name, config } = getColumnNameAndConfig(a, b); const mode = config?.mode; return mode === 'number' ? new PgNumericNumberBuilder(name, config?.precision, config?.scale) : mode === 'bigint' ? new PgNumericBigIntBuilder(name, config?.precision, config?.scale) : new PgNumericBuilder(name, config?.precision, config?.scale); } export const decimal = numeric; ================================================ FILE: drizzle-orm/src/pg-core/columns/point.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnyPgTable } from '~/pg-core/table.ts'; import { type Equal, getColumnNameAndConfig } from '~/utils.ts'; import { PgColumn, PgColumnBuilder } from './common.ts'; export type PgPointTupleBuilderInitial = PgPointTupleBuilder<{ name: TName; dataType: 'array'; columnType: 'PgPointTuple'; data: [number, number]; driverParam: number | string; enumValues: undefined; }>; export class PgPointTupleBuilder> extends PgColumnBuilder { static override readonly [entityKind]: string = 'PgPointTupleBuilder'; constructor(name: string) { super(name, 'array', 'PgPointTuple'); } /** @internal */ override build( table: AnyPgTable<{ name: TTableName }>, ): PgPointTuple> { return new PgPointTuple>( table, this.config as ColumnBuilderRuntimeConfig, ); } } export class PgPointTuple> extends PgColumn { static override readonly [entityKind]: string = 'PgPointTuple'; getSQLType(): string { return 'point'; } override mapFromDriverValue(value: string | { x: number; y: number }): [number, number] { if (typeof value === 'string') { const [x, y] = value.slice(1, -1).split(','); return [Number.parseFloat(x!), Number.parseFloat(y!)]; } return [value.x, value.y]; } override mapToDriverValue(value: [number, number]): string { return `(${value[0]},${value[1]})`; } } export type PgPointObjectBuilderInitial = PgPointObjectBuilder<{ name: TName; dataType: 'json'; columnType: 'PgPointObject'; data: { x: number; y: number }; driverParam: string; enumValues: undefined; }>; export class PgPointObjectBuilder> extends PgColumnBuilder { static override readonly [entityKind]: string = 'PgPointObjectBuilder'; constructor(name: string) { super(name, 'json', 'PgPointObject'); } /** @internal */ override build( table: AnyPgTable<{ name: TTableName }>, ): PgPointObject> { return new PgPointObject>( table, this.config as ColumnBuilderRuntimeConfig, ); } } export class PgPointObject> extends PgColumn { static override readonly [entityKind]: string = 'PgPointObject'; getSQLType(): string { return 'point'; } override mapFromDriverValue(value: string | { x: number; y: number }): { x: number; y: number } { if (typeof value === 'string') { const [x, y] = value.slice(1, -1).split(','); return { x: Number.parseFloat(x!), y: Number.parseFloat(y!) }; } return value; } override mapToDriverValue(value: { x: number; y: number }): string { return `(${value.x},${value.y})`; } } export interface PgPointConfig { mode?: T; } export function point(): PgPointTupleBuilderInitial<''>; export function point( config?: PgPointConfig, ): Equal extends true ? PgPointObjectBuilderInitial<''> : PgPointTupleBuilderInitial<''>; export function point( name: TName, config?: PgPointConfig, ): Equal extends true ? PgPointObjectBuilderInitial : PgPointTupleBuilderInitial; export function point(a?: string | PgPointConfig, b?: PgPointConfig) { const { name, config } = getColumnNameAndConfig(a, b); if (!config?.mode || config.mode === 'tuple') { return new PgPointTupleBuilder(name); } return new PgPointObjectBuilder(name); } ================================================ FILE: drizzle-orm/src/pg-core/columns/postgis_extension/geometry.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnyPgTable } from '~/pg-core/table.ts'; import { type Equal, getColumnNameAndConfig } from '~/utils.ts'; import { PgColumn, PgColumnBuilder } from '../common.ts'; import { parseEWKB } from './utils.ts'; export type PgGeometryBuilderInitial = PgGeometryBuilder<{ name: TName; dataType: 'array'; columnType: 'PgGeometry'; data: [number, number]; driverParam: string; enumValues: undefined; }>; export class PgGeometryBuilder> extends PgColumnBuilder { static override readonly [entityKind]: string = 'PgGeometryBuilder'; constructor(name: T['name']) { super(name, 'array', 'PgGeometry'); } /** @internal */ override build( table: AnyPgTable<{ name: TTableName }>, ): PgGeometry> { return new PgGeometry>( table, this.config as ColumnBuilderRuntimeConfig, ); } } export class PgGeometry> extends PgColumn { static override readonly [entityKind]: string = 'PgGeometry'; getSQLType(): string { return 'geometry(point)'; } override mapFromDriverValue(value: string): [number, number] { return parseEWKB(value); } override mapToDriverValue(value: [number, number]): string { return `point(${value[0]} ${value[1]})`; } } export type PgGeometryObjectBuilderInitial = PgGeometryObjectBuilder<{ name: TName; dataType: 'json'; columnType: 'PgGeometryObject'; data: { x: number; y: number }; driverParam: string; enumValues: undefined; }>; export class PgGeometryObjectBuilder> extends PgColumnBuilder { static override readonly [entityKind]: string = 'PgGeometryObjectBuilder'; constructor(name: T['name']) { super(name, 'json', 'PgGeometryObject'); } /** @internal */ override build( table: AnyPgTable<{ name: TTableName }>, ): PgGeometryObject> { return new PgGeometryObject>( table, this.config as ColumnBuilderRuntimeConfig, ); } } export class PgGeometryObject> extends PgColumn { static override readonly [entityKind]: string = 'PgGeometryObject'; getSQLType(): string { return 'geometry(point)'; } override mapFromDriverValue(value: string): { x: number; y: number } { const parsed = parseEWKB(value); return { x: parsed[0], y: parsed[1] }; } override mapToDriverValue(value: { x: number; y: number }): string { return `point(${value.x} ${value.y})`; } } export interface PgGeometryConfig { mode?: T; type?: 'point' | (string & {}); srid?: number; } export function geometry(): PgGeometryBuilderInitial<''>; export function geometry( config?: PgGeometryConfig, ): Equal extends true ? PgGeometryObjectBuilderInitial<''> : PgGeometryBuilderInitial<''>; export function geometry( name: TName, config?: PgGeometryConfig, ): Equal extends true ? PgGeometryObjectBuilderInitial : PgGeometryBuilderInitial; export function geometry(a?: string | PgGeometryConfig, b?: PgGeometryConfig) { const { name, config } = getColumnNameAndConfig(a, b); if (!config?.mode || config.mode === 'tuple') { return new PgGeometryBuilder(name); } return new PgGeometryObjectBuilder(name); } ================================================ FILE: drizzle-orm/src/pg-core/columns/postgis_extension/utils.ts ================================================ function hexToBytes(hex: string): Uint8Array { const bytes: number[] = []; for (let c = 0; c < hex.length; c += 2) { bytes.push(Number.parseInt(hex.slice(c, c + 2), 16)); } return new Uint8Array(bytes); } function bytesToFloat64(bytes: Uint8Array, offset: number): number { const buffer = new ArrayBuffer(8); const view = new DataView(buffer); for (let i = 0; i < 8; i++) { view.setUint8(i, bytes[offset + i]!); } return view.getFloat64(0, true); } export function parseEWKB(hex: string): [number, number] { const bytes = hexToBytes(hex); let offset = 0; // Byte order: 1 is little-endian, 0 is big-endian const byteOrder = bytes[offset]; offset += 1; const view = new DataView(bytes.buffer); const geomType = view.getUint32(offset, byteOrder === 1); offset += 4; let _srid: number | undefined; if (geomType & 0x20000000) { // SRID flag _srid = view.getUint32(offset, byteOrder === 1); offset += 4; } if ((geomType & 0xFFFF) === 1) { const x = bytesToFloat64(bytes, offset); offset += 8; const y = bytesToFloat64(bytes, offset); offset += 8; return [x, y]; } throw new Error('Unsupported geometry type'); } ================================================ FILE: drizzle-orm/src/pg-core/columns/real.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnyPgTable } from '~/pg-core/table.ts'; import { PgColumn, PgColumnBuilder } from './common.ts'; export type PgRealBuilderInitial = PgRealBuilder<{ name: TName; dataType: 'number'; columnType: 'PgReal'; data: number; driverParam: string | number; enumValues: undefined; }>; export class PgRealBuilder> extends PgColumnBuilder< T, { length: number | undefined } > { static override readonly [entityKind]: string = 'PgRealBuilder'; constructor(name: T['name'], length?: number) { super(name, 'number', 'PgReal'); this.config.length = length; } /** @internal */ override build( table: AnyPgTable<{ name: TTableName }>, ): PgReal> { return new PgReal>(table, this.config as ColumnBuilderRuntimeConfig); } } export class PgReal> extends PgColumn { static override readonly [entityKind]: string = 'PgReal'; constructor(table: AnyPgTable<{ name: T['tableName'] }>, config: PgRealBuilder['config']) { super(table, config); } getSQLType(): string { return 'real'; } override mapFromDriverValue = (value: string | number): number => { if (typeof value === 'string') { return Number.parseFloat(value); } return value; }; } export function real(): PgRealBuilderInitial<''>; export function real(name: TName): PgRealBuilderInitial; export function real(name?: string) { return new PgRealBuilder(name ?? ''); } ================================================ FILE: drizzle-orm/src/pg-core/columns/serial.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, HasDefault, MakeColumnConfig, NotNull, } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnyPgTable } from '~/pg-core/table.ts'; import { PgColumn, PgColumnBuilder } from './common.ts'; export type PgSerialBuilderInitial = NotNull< HasDefault< PgSerialBuilder<{ name: TName; dataType: 'number'; columnType: 'PgSerial'; data: number; driverParam: number; enumValues: undefined; }> > >; export class PgSerialBuilder> extends PgColumnBuilder { static override readonly [entityKind]: string = 'PgSerialBuilder'; constructor(name: T['name']) { super(name, 'number', 'PgSerial'); this.config.hasDefault = true; this.config.notNull = true; } /** @internal */ override build( table: AnyPgTable<{ name: TTableName }>, ): PgSerial> { return new PgSerial>(table, this.config as ColumnBuilderRuntimeConfig); } } export class PgSerial> extends PgColumn { static override readonly [entityKind]: string = 'PgSerial'; getSQLType(): string { return 'serial'; } } export function serial(): PgSerialBuilderInitial<''>; export function serial(name: TName): PgSerialBuilderInitial; export function serial(name?: string) { return new PgSerialBuilder(name ?? ''); } ================================================ FILE: drizzle-orm/src/pg-core/columns/smallint.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnyPgTable } from '~/pg-core/table.ts'; import { PgColumn } from './common.ts'; import { PgIntColumnBaseBuilder } from './int.common.ts'; export type PgSmallIntBuilderInitial = PgSmallIntBuilder<{ name: TName; dataType: 'number'; columnType: 'PgSmallInt'; data: number; driverParam: number | string; enumValues: undefined; }>; export class PgSmallIntBuilder> extends PgIntColumnBaseBuilder { static override readonly [entityKind]: string = 'PgSmallIntBuilder'; constructor(name: T['name']) { super(name, 'number', 'PgSmallInt'); } /** @internal */ override build( table: AnyPgTable<{ name: TTableName }>, ): PgSmallInt> { return new PgSmallInt>(table, this.config as ColumnBuilderRuntimeConfig); } } export class PgSmallInt> extends PgColumn { static override readonly [entityKind]: string = 'PgSmallInt'; getSQLType(): string { return 'smallint'; } override mapFromDriverValue = (value: number | string): number => { if (typeof value === 'string') { return Number(value); } return value; }; } export function smallint(): PgSmallIntBuilderInitial<''>; export function smallint(name: TName): PgSmallIntBuilderInitial; export function smallint(name?: string) { return new PgSmallIntBuilder(name ?? ''); } ================================================ FILE: drizzle-orm/src/pg-core/columns/smallserial.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, HasDefault, MakeColumnConfig, NotNull, } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnyPgTable } from '~/pg-core/table.ts'; import { PgColumn, PgColumnBuilder } from './common.ts'; export type PgSmallSerialBuilderInitial = NotNull< HasDefault< PgSmallSerialBuilder<{ name: TName; dataType: 'number'; columnType: 'PgSmallSerial'; data: number; driverParam: number; enumValues: undefined; }> > >; export class PgSmallSerialBuilder> extends PgColumnBuilder { static override readonly [entityKind]: string = 'PgSmallSerialBuilder'; constructor(name: T['name']) { super(name, 'number', 'PgSmallSerial'); this.config.hasDefault = true; this.config.notNull = true; } /** @internal */ override build( table: AnyPgTable<{ name: TTableName }>, ): PgSmallSerial> { return new PgSmallSerial>( table, this.config as ColumnBuilderRuntimeConfig, ); } } export class PgSmallSerial> extends PgColumn { static override readonly [entityKind]: string = 'PgSmallSerial'; getSQLType(): string { return 'smallserial'; } } export function smallserial(): PgSmallSerialBuilderInitial<''>; export function smallserial(name: TName): PgSmallSerialBuilderInitial; export function smallserial(name?: string) { return new PgSmallSerialBuilder(name ?? ''); } ================================================ FILE: drizzle-orm/src/pg-core/columns/text.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnyPgTable } from '~/pg-core/table.ts'; import { getColumnNameAndConfig, type Writable } from '~/utils.ts'; import { PgColumn, PgColumnBuilder } from './common.ts'; export type PgTextBuilderInitial = PgTextBuilder<{ name: TName; dataType: 'string'; columnType: 'PgText'; data: TEnum[number]; enumValues: TEnum; driverParam: string; }>; export class PgTextBuilder< T extends ColumnBuilderBaseConfig<'string', 'PgText'>, > extends PgColumnBuilder { static override readonly [entityKind]: string = 'PgTextBuilder'; constructor( name: T['name'], config: PgTextConfig, ) { super(name, 'string', 'PgText'); this.config.enumValues = config.enum; } /** @internal */ override build( table: AnyPgTable<{ name: TTableName }>, ): PgText> { return new PgText>(table, this.config as ColumnBuilderRuntimeConfig); } } export class PgText> extends PgColumn { static override readonly [entityKind]: string = 'PgText'; override readonly enumValues = this.config.enumValues; getSQLType(): string { return 'text'; } } export interface PgTextConfig< TEnum extends readonly string[] | string[] | undefined = readonly string[] | string[] | undefined, > { enum?: TEnum; } export function text(): PgTextBuilderInitial<'', [string, ...string[]]>; export function text>( config?: PgTextConfig>, ): PgTextBuilderInitial<'', Writable>; export function text>( name: TName, config?: PgTextConfig>, ): PgTextBuilderInitial>; export function text(a?: string | PgTextConfig, b: PgTextConfig = {}): any { const { name, config } = getColumnNameAndConfig(a, b); return new PgTextBuilder(name, config as any); } ================================================ FILE: drizzle-orm/src/pg-core/columns/time.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnyPgTable } from '~/pg-core/table.ts'; import { getColumnNameAndConfig } from '~/utils.ts'; import { PgColumn } from './common.ts'; import { PgDateColumnBaseBuilder } from './date.common.ts'; import type { Precision } from './timestamp.ts'; export type PgTimeBuilderInitial = PgTimeBuilder<{ name: TName; dataType: 'string'; columnType: 'PgTime'; data: string; driverParam: string; enumValues: undefined; }>; export class PgTimeBuilder> extends PgDateColumnBaseBuilder< T, { withTimezone: boolean; precision: number | undefined } > { static override readonly [entityKind]: string = 'PgTimeBuilder'; constructor( name: T['name'], readonly withTimezone: boolean, readonly precision: number | undefined, ) { super(name, 'string', 'PgTime'); this.config.withTimezone = withTimezone; this.config.precision = precision; } /** @internal */ override build( table: AnyPgTable<{ name: TTableName }>, ): PgTime> { return new PgTime>(table, this.config as ColumnBuilderRuntimeConfig); } } export class PgTime> extends PgColumn { static override readonly [entityKind]: string = 'PgTime'; readonly withTimezone: boolean; readonly precision: number | undefined; constructor(table: AnyPgTable<{ name: T['tableName'] }>, config: PgTimeBuilder['config']) { super(table, config); this.withTimezone = config.withTimezone; this.precision = config.precision; } getSQLType(): string { const precision = this.precision === undefined ? '' : `(${this.precision})`; return `time${precision}${this.withTimezone ? ' with time zone' : ''}`; } } export interface TimeConfig { precision?: Precision; withTimezone?: boolean; } export function time(): PgTimeBuilderInitial<''>; export function time(config?: TimeConfig): PgTimeBuilderInitial<''>; export function time(name: TName, config?: TimeConfig): PgTimeBuilderInitial; export function time(a?: string | TimeConfig, b: TimeConfig = {}) { const { name, config } = getColumnNameAndConfig(a, b); return new PgTimeBuilder(name, config.withTimezone ?? false, config.precision); } ================================================ FILE: drizzle-orm/src/pg-core/columns/timestamp.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnyPgTable } from '~/pg-core/table.ts'; import { type Equal, getColumnNameAndConfig } from '~/utils.ts'; import { PgColumn } from './common.ts'; import { PgDateColumnBaseBuilder } from './date.common.ts'; export type PgTimestampBuilderInitial = PgTimestampBuilder<{ name: TName; dataType: 'date'; columnType: 'PgTimestamp'; data: Date; driverParam: string; enumValues: undefined; }>; export class PgTimestampBuilder> extends PgDateColumnBaseBuilder< T, { withTimezone: boolean; precision: number | undefined } > { static override readonly [entityKind]: string = 'PgTimestampBuilder'; constructor( name: T['name'], withTimezone: boolean, precision: number | undefined, ) { super(name, 'date', 'PgTimestamp'); this.config.withTimezone = withTimezone; this.config.precision = precision; } /** @internal */ override build( table: AnyPgTable<{ name: TTableName }>, ): PgTimestamp> { return new PgTimestamp>(table, this.config as ColumnBuilderRuntimeConfig); } } export class PgTimestamp> extends PgColumn { static override readonly [entityKind]: string = 'PgTimestamp'; readonly withTimezone: boolean; readonly precision: number | undefined; constructor(table: AnyPgTable<{ name: T['tableName'] }>, config: PgTimestampBuilder['config']) { super(table, config); this.withTimezone = config.withTimezone; this.precision = config.precision; } getSQLType(): string { const precision = this.precision === undefined ? '' : ` (${this.precision})`; return `timestamp${precision}${this.withTimezone ? ' with time zone' : ''}`; } override mapFromDriverValue(value: Date | string): Date { if (typeof value === 'string') return new Date(this.withTimezone ? value : value + '+0000'); return value; } override mapToDriverValue = (value: Date): string => { return value.toISOString(); }; } export type PgTimestampStringBuilderInitial = PgTimestampStringBuilder<{ name: TName; dataType: 'string'; columnType: 'PgTimestampString'; data: string; driverParam: string; enumValues: undefined; }>; export class PgTimestampStringBuilder> extends PgDateColumnBaseBuilder< T, { withTimezone: boolean; precision: number | undefined } > { static override readonly [entityKind]: string = 'PgTimestampStringBuilder'; constructor( name: T['name'], withTimezone: boolean, precision: number | undefined, ) { super(name, 'string', 'PgTimestampString'); this.config.withTimezone = withTimezone; this.config.precision = precision; } /** @internal */ override build( table: AnyPgTable<{ name: TTableName }>, ): PgTimestampString> { return new PgTimestampString>( table, this.config as ColumnBuilderRuntimeConfig, ); } } export class PgTimestampString> extends PgColumn { static override readonly [entityKind]: string = 'PgTimestampString'; readonly withTimezone: boolean; readonly precision: number | undefined; constructor(table: AnyPgTable<{ name: T['tableName'] }>, config: PgTimestampStringBuilder['config']) { super(table, config); this.withTimezone = config.withTimezone; this.precision = config.precision; } getSQLType(): string { const precision = this.precision === undefined ? '' : `(${this.precision})`; return `timestamp${precision}${this.withTimezone ? ' with time zone' : ''}`; } override mapFromDriverValue(value: Date | string): string { if (typeof value === 'string') return value; const shortened = value.toISOString().slice(0, -1).replace('T', ' '); if (this.withTimezone) { const offset = value.getTimezoneOffset(); const sign = offset <= 0 ? '+' : '-'; return `${shortened}${sign}${Math.floor(Math.abs(offset) / 60).toString().padStart(2, '0')}`; } return shortened; } } export type Precision = 0 | 1 | 2 | 3 | 4 | 5 | 6; export interface PgTimestampConfig { mode?: TMode; precision?: Precision; withTimezone?: boolean; } export function timestamp(): PgTimestampBuilderInitial<''>; export function timestamp( config?: PgTimestampConfig, ): Equal extends true ? PgTimestampStringBuilderInitial<''> : PgTimestampBuilderInitial<''>; export function timestamp( name: TName, config?: PgTimestampConfig, ): Equal extends true ? PgTimestampStringBuilderInitial : PgTimestampBuilderInitial; export function timestamp(a?: string | PgTimestampConfig, b: PgTimestampConfig = {}) { const { name, config } = getColumnNameAndConfig(a, b); if (config?.mode === 'string') { return new PgTimestampStringBuilder(name, config.withTimezone ?? false, config.precision); } return new PgTimestampBuilder(name, config?.withTimezone ?? false, config?.precision); } ================================================ FILE: drizzle-orm/src/pg-core/columns/uuid.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnyPgTable } from '~/pg-core/table.ts'; import { sql } from '~/sql/sql.ts'; import { PgColumn, PgColumnBuilder } from './common.ts'; export type PgUUIDBuilderInitial = PgUUIDBuilder<{ name: TName; dataType: 'string'; columnType: 'PgUUID'; data: string; driverParam: string; enumValues: undefined; }>; export class PgUUIDBuilder> extends PgColumnBuilder { static override readonly [entityKind]: string = 'PgUUIDBuilder'; constructor(name: T['name']) { super(name, 'string', 'PgUUID'); } /** * Adds `default gen_random_uuid()` to the column definition. */ defaultRandom(): ReturnType { return this.default(sql`gen_random_uuid()`) as ReturnType; } /** @internal */ override build( table: AnyPgTable<{ name: TTableName }>, ): PgUUID> { return new PgUUID>(table, this.config as ColumnBuilderRuntimeConfig); } } export class PgUUID> extends PgColumn { static override readonly [entityKind]: string = 'PgUUID'; getSQLType(): string { return 'uuid'; } } export function uuid(): PgUUIDBuilderInitial<''>; export function uuid(name: TName): PgUUIDBuilderInitial; export function uuid(name?: string) { return new PgUUIDBuilder(name ?? ''); } ================================================ FILE: drizzle-orm/src/pg-core/columns/varchar.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnyPgTable } from '~/pg-core/table.ts'; import { getColumnNameAndConfig, type Writable } from '~/utils.ts'; import { PgColumn, PgColumnBuilder } from './common.ts'; export type PgVarcharBuilderInitial< TName extends string, TEnum extends [string, ...string[]], TLength extends number | undefined, > = PgVarcharBuilder<{ name: TName; dataType: 'string'; columnType: 'PgVarchar'; data: TEnum[number]; driverParam: string; enumValues: TEnum; length: TLength; }>; export class PgVarcharBuilder< T extends ColumnBuilderBaseConfig<'string', 'PgVarchar'> & { length?: number | undefined }, > extends PgColumnBuilder< T, { length: T['length']; enumValues: T['enumValues'] }, { length: T['length'] } > { static override readonly [entityKind]: string = 'PgVarcharBuilder'; constructor(name: T['name'], config: PgVarcharConfig) { super(name, 'string', 'PgVarchar'); this.config.length = config.length; this.config.enumValues = config.enum; } /** @internal */ override build( table: AnyPgTable<{ name: TTableName }>, ): PgVarchar & { length: T['length'] }> { return new PgVarchar & { length: T['length'] }>( table, this.config as ColumnBuilderRuntimeConfig, ); } } export class PgVarchar & { length?: number | undefined }> extends PgColumn { static override readonly [entityKind]: string = 'PgVarchar'; readonly length = this.config.length; override readonly enumValues = this.config.enumValues; getSQLType(): string { return this.length === undefined ? `varchar` : `varchar(${this.length})`; } } export interface PgVarcharConfig< TEnum extends readonly string[] | string[] | undefined = readonly string[] | string[] | undefined, TLength extends number | undefined = number | undefined, > { enum?: TEnum; length?: TLength; } export function varchar(): PgVarcharBuilderInitial<'', [string, ...string[]], undefined>; export function varchar< U extends string, T extends Readonly<[U, ...U[]]>, L extends number | undefined, >( config?: PgVarcharConfig, L>, ): PgVarcharBuilderInitial<'', Writable, L>; export function varchar< TName extends string, U extends string, T extends Readonly<[U, ...U[]]>, L extends number | undefined, >( name: TName, config?: PgVarcharConfig, L>, ): PgVarcharBuilderInitial, L>; export function varchar(a?: string | PgVarcharConfig, b: PgVarcharConfig = {}): any { const { name, config } = getColumnNameAndConfig(a, b); return new PgVarcharBuilder(name, config as any); } ================================================ FILE: drizzle-orm/src/pg-core/columns/vector_extension/bit.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnyPgTable } from '~/pg-core/table.ts'; import { getColumnNameAndConfig } from '~/utils.ts'; import { PgColumn, PgColumnBuilder } from '../common.ts'; export type PgBinaryVectorBuilderInitial = PgBinaryVectorBuilder<{ name: TName; dataType: 'string'; columnType: 'PgBinaryVector'; data: string; driverParam: string; enumValues: undefined; dimensions: TDimensions; }>; export class PgBinaryVectorBuilder< T extends ColumnBuilderBaseConfig<'string', 'PgBinaryVector'> & { dimensions: number }, > extends PgColumnBuilder< T, { dimensions: T['dimensions'] } > { static override readonly [entityKind]: string = 'PgBinaryVectorBuilder'; constructor(name: string, config: PgBinaryVectorConfig) { super(name, 'string', 'PgBinaryVector'); this.config.dimensions = config.dimensions; } /** @internal */ override build( table: AnyPgTable<{ name: TTableName }>, ): PgBinaryVector & { dimensions: T['dimensions'] }> { return new PgBinaryVector & { dimensions: T['dimensions'] }>( table, this.config as ColumnBuilderRuntimeConfig, ); } } export class PgBinaryVector & { dimensions: number }> extends PgColumn { static override readonly [entityKind]: string = 'PgBinaryVector'; readonly dimensions = this.config.dimensions; getSQLType(): string { return `bit(${this.dimensions})`; } } export interface PgBinaryVectorConfig { dimensions: TDimensions; } export function bit( config: PgBinaryVectorConfig, ): PgBinaryVectorBuilderInitial<'', D>; export function bit( name: TName, config: PgBinaryVectorConfig, ): PgBinaryVectorBuilderInitial; export function bit(a: string | PgBinaryVectorConfig, b?: PgBinaryVectorConfig) { const { name, config } = getColumnNameAndConfig(a, b); return new PgBinaryVectorBuilder(name, config); } ================================================ FILE: drizzle-orm/src/pg-core/columns/vector_extension/halfvec.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnyPgTable } from '~/pg-core/table.ts'; import { getColumnNameAndConfig } from '~/utils.ts'; import { PgColumn, PgColumnBuilder } from '../common.ts'; export type PgHalfVectorBuilderInitial = PgHalfVectorBuilder<{ name: TName; dataType: 'array'; columnType: 'PgHalfVector'; data: number[]; driverParam: string; enumValues: undefined; dimensions: TDimensions; }>; export class PgHalfVectorBuilder & { dimensions: number }> extends PgColumnBuilder< T, { dimensions: T['dimensions'] }, { dimensions: T['dimensions'] } > { static override readonly [entityKind]: string = 'PgHalfVectorBuilder'; constructor(name: string, config: PgHalfVectorConfig) { super(name, 'array', 'PgHalfVector'); this.config.dimensions = config.dimensions; } /** @internal */ override build( table: AnyPgTable<{ name: TTableName }>, ): PgHalfVector & { dimensions: T['dimensions'] }> { return new PgHalfVector & { dimensions: T['dimensions'] }>( table, this.config as ColumnBuilderRuntimeConfig, ); } } export class PgHalfVector & { dimensions: number }> extends PgColumn { static override readonly [entityKind]: string = 'PgHalfVector'; readonly dimensions: T['dimensions'] = this.config.dimensions; getSQLType(): string { return `halfvec(${this.dimensions})`; } override mapToDriverValue(value: unknown): unknown { return JSON.stringify(value); } override mapFromDriverValue(value: string): unknown { return value .slice(1, -1) .split(',') .map((v) => Number.parseFloat(v)); } } export interface PgHalfVectorConfig { dimensions: TDimensions; } export function halfvec( config: PgHalfVectorConfig, ): PgHalfVectorBuilderInitial<'', D>; export function halfvec( name: TName, config: PgHalfVectorConfig, ): PgHalfVectorBuilderInitial; export function halfvec(a: string | PgHalfVectorConfig, b?: PgHalfVectorConfig) { const { name, config } = getColumnNameAndConfig(a, b); return new PgHalfVectorBuilder(name, config); } ================================================ FILE: drizzle-orm/src/pg-core/columns/vector_extension/sparsevec.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnyPgTable } from '~/pg-core/table.ts'; import { getColumnNameAndConfig } from '~/utils.ts'; import { PgColumn, PgColumnBuilder } from '../common.ts'; export type PgSparseVectorBuilderInitial = PgSparseVectorBuilder<{ name: TName; dataType: 'string'; columnType: 'PgSparseVector'; data: string; driverParam: string; enumValues: undefined; }>; export class PgSparseVectorBuilder> extends PgColumnBuilder< T, { dimensions: number | undefined } > { static override readonly [entityKind]: string = 'PgSparseVectorBuilder'; constructor(name: string, config: PgSparseVectorConfig) { super(name, 'string', 'PgSparseVector'); this.config.dimensions = config.dimensions; } /** @internal */ override build( table: AnyPgTable<{ name: TTableName }>, ): PgSparseVector> { return new PgSparseVector>( table, this.config as ColumnBuilderRuntimeConfig, ); } } export class PgSparseVector> extends PgColumn { static override readonly [entityKind]: string = 'PgSparseVector'; readonly dimensions = this.config.dimensions; getSQLType(): string { return `sparsevec(${this.dimensions})`; } } export interface PgSparseVectorConfig { dimensions: number; } export function sparsevec( config: PgSparseVectorConfig, ): PgSparseVectorBuilderInitial<''>; export function sparsevec( name: TName, config: PgSparseVectorConfig, ): PgSparseVectorBuilderInitial; export function sparsevec(a: string | PgSparseVectorConfig, b?: PgSparseVectorConfig) { const { name, config } = getColumnNameAndConfig(a, b); return new PgSparseVectorBuilder(name, config); } ================================================ FILE: drizzle-orm/src/pg-core/columns/vector_extension/vector.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnyPgTable } from '~/pg-core/table.ts'; import { getColumnNameAndConfig } from '~/utils.ts'; import { PgColumn, PgColumnBuilder } from '../common.ts'; export type PgVectorBuilderInitial = PgVectorBuilder<{ name: TName; dataType: 'array'; columnType: 'PgVector'; data: number[]; driverParam: string; enumValues: undefined; dimensions: TDimensions; }>; export class PgVectorBuilder & { dimensions: number }> extends PgColumnBuilder< T, { dimensions: T['dimensions'] }, { dimensions: T['dimensions'] } > { static override readonly [entityKind]: string = 'PgVectorBuilder'; constructor(name: string, config: PgVectorConfig) { super(name, 'array', 'PgVector'); this.config.dimensions = config.dimensions; } /** @internal */ override build( table: AnyPgTable<{ name: TTableName }>, ): PgVector & { dimensions: T['dimensions'] }> { return new PgVector & { dimensions: T['dimensions'] }>( table, this.config as ColumnBuilderRuntimeConfig, ); } } export class PgVector & { dimensions: number | undefined }> extends PgColumn { static override readonly [entityKind]: string = 'PgVector'; readonly dimensions: T['dimensions'] = this.config.dimensions; getSQLType(): string { return `vector(${this.dimensions})`; } override mapToDriverValue(value: unknown): unknown { return JSON.stringify(value); } override mapFromDriverValue(value: string): unknown { return value .slice(1, -1) .split(',') .map((v) => Number.parseFloat(v)); } } export interface PgVectorConfig { dimensions: TDimensions; } export function vector( config: PgVectorConfig, ): PgVectorBuilderInitial<'', D>; export function vector( name: TName, config: PgVectorConfig, ): PgVectorBuilderInitial; export function vector(a: string | PgVectorConfig, b?: PgVectorConfig) { const { name, config } = getColumnNameAndConfig(a, b); return new PgVectorBuilder(name, config); } ================================================ FILE: drizzle-orm/src/pg-core/db.ts ================================================ import type { Cache } from '~/cache/core/cache.ts'; import { entityKind } from '~/entity.ts'; import type { PgDialect } from '~/pg-core/dialect.ts'; import { PgDeleteBase, PgInsertBuilder, PgSelectBuilder, PgUpdateBuilder, QueryBuilder, } from '~/pg-core/query-builders/index.ts'; import type { PgQueryResultHKT, PgQueryResultKind, PgSession, PgTransaction, PgTransactionConfig, PreparedQueryConfig, } from '~/pg-core/session.ts'; import type { PgTable } from '~/pg-core/table.ts'; import type { TypedQueryBuilder } from '~/query-builders/query-builder.ts'; import type { ExtractTablesWithRelations, RelationalSchemaConfig, TablesRelationalConfig } from '~/relations.ts'; import { SelectionProxyHandler } from '~/selection-proxy.ts'; import { type ColumnsSelection, type SQL, sql, type SQLWrapper } from '~/sql/sql.ts'; import { WithSubquery } from '~/subquery.ts'; import type { DrizzleTypeError, NeonAuthToken } from '~/utils.ts'; import type { PgColumn } from './columns/index.ts'; import { PgCountBuilder } from './query-builders/count.ts'; import { RelationalQueryBuilder } from './query-builders/query.ts'; import { PgRaw } from './query-builders/raw.ts'; import { PgRefreshMaterializedView } from './query-builders/refresh-materialized-view.ts'; import type { SelectedFields } from './query-builders/select.types.ts'; import type { WithBuilder } from './subquery.ts'; import type { PgViewBase } from './view-base.ts'; import type { PgMaterializedView } from './view.ts'; export class PgDatabase< TQueryResult extends PgQueryResultHKT, TFullSchema extends Record = Record, TSchema extends TablesRelationalConfig = ExtractTablesWithRelations, > { static readonly [entityKind]: string = 'PgDatabase'; declare readonly _: { readonly schema: TSchema | undefined; readonly fullSchema: TFullSchema; readonly tableNamesMap: Record; readonly session: PgSession; }; query: TFullSchema extends Record ? DrizzleTypeError<'Seems like the schema generic is missing - did you forget to add it to your DB type?'> : { [K in keyof TSchema]: RelationalQueryBuilder; }; constructor( /** @internal */ readonly dialect: PgDialect, /** @internal */ readonly session: PgSession, schema: RelationalSchemaConfig | undefined, ) { this._ = schema ? { schema: schema.schema, fullSchema: schema.fullSchema as TFullSchema, tableNamesMap: schema.tableNamesMap, session, } : { schema: undefined, fullSchema: {} as TFullSchema, tableNamesMap: {}, session, }; this.query = {} as typeof this['query']; if (this._.schema) { for (const [tableName, columns] of Object.entries(this._.schema)) { (this.query as PgDatabase>['query'])[tableName] = new RelationalQueryBuilder( schema!.fullSchema, this._.schema, this._.tableNamesMap, schema!.fullSchema[tableName] as PgTable, columns, dialect, session, ); } } this.$cache = { invalidate: async (_params: any) => {} }; } /** * Creates a subquery that defines a temporary named result set as a CTE. * * It is useful for breaking down complex queries into simpler parts and for reusing the result set in subsequent parts of the query. * * See docs: {@link https://orm.drizzle.team/docs/select#with-clause} * * @param alias The alias for the subquery. * * Failure to provide an alias will result in a DrizzleTypeError, preventing the subquery from being referenced in other queries. * * @example * * ```ts * // Create a subquery with alias 'sq' and use it in the select query * const sq = db.$with('sq').as(db.select().from(users).where(eq(users.id, 42))); * * const result = await db.with(sq).select().from(sq); * ``` * * To select arbitrary SQL values as fields in a CTE and reference them in other CTEs or in the main query, you need to add aliases to them: * * ```ts * // Select an arbitrary SQL value as a field in a CTE and reference it in the main query * const sq = db.$with('sq').as(db.select({ * name: sql`upper(${users.name})`.as('name'), * }) * .from(users)); * * const result = await db.with(sq).select({ name: sq.name }).from(sq); * ``` */ $with: WithBuilder = (alias: string, selection?: ColumnsSelection) => { const self = this; const as = ( qb: | TypedQueryBuilder | SQL | ((qb: QueryBuilder) => TypedQueryBuilder | SQL), ) => { if (typeof qb === 'function') { qb = qb(new QueryBuilder(self.dialect)); } return new Proxy( new WithSubquery( qb.getSQL(), selection ?? ('getSelectedFields' in qb ? qb.getSelectedFields() ?? {} : {}) as SelectedFields, alias, true, ), new SelectionProxyHandler({ alias, sqlAliasedBehavior: 'alias', sqlBehavior: 'error' }), ); }; return { as }; }; $count( source: PgTable | PgViewBase | SQL | SQLWrapper, filters?: SQL, ) { return new PgCountBuilder({ source, filters, session: this.session }); } $cache: { invalidate: Cache['onMutate'] }; /** * Incorporates a previously defined CTE (using `$with`) into the main query. * * This method allows the main query to reference a temporary named result set. * * See docs: {@link https://orm.drizzle.team/docs/select#with-clause} * * @param queries The CTEs to incorporate into the main query. * * @example * * ```ts * // Define a subquery 'sq' as a CTE using $with * const sq = db.$with('sq').as(db.select().from(users).where(eq(users.id, 42))); * * // Incorporate the CTE 'sq' into the main query and select from it * const result = await db.with(sq).select().from(sq); * ``` */ with(...queries: WithSubquery[]) { const self = this; /** * Creates a select query. * * Calling this method with no arguments will select all columns from the table. Pass a selection object to specify the columns you want to select. * * Use `.from()` method to specify which table to select from. * * See docs: {@link https://orm.drizzle.team/docs/select} * * @param fields The selection object. * * @example * * ```ts * // Select all columns and all rows from the 'cars' table * const allCars: Car[] = await db.select().from(cars); * * // Select specific columns and all rows from the 'cars' table * const carsIdsAndBrands: { id: number; brand: string }[] = await db.select({ * id: cars.id, * brand: cars.brand * }) * .from(cars); * ``` * * Like in SQL, you can use arbitrary expressions as selection fields, not just table columns: * * ```ts * // Select specific columns along with expression and all rows from the 'cars' table * const carsIdsAndLowerNames: { id: number; lowerBrand: string }[] = await db.select({ * id: cars.id, * lowerBrand: sql`lower(${cars.brand})`, * }) * .from(cars); * ``` */ function select(): PgSelectBuilder; function select(fields: TSelection): PgSelectBuilder; function select(fields?: TSelection): PgSelectBuilder { return new PgSelectBuilder({ fields: fields ?? undefined, session: self.session, dialect: self.dialect, withList: queries, }); } /** * Adds `distinct` expression to the select query. * * Calling this method will return only unique values. When multiple columns are selected, it returns rows with unique combinations of values in these columns. * * Use `.from()` method to specify which table to select from. * * See docs: {@link https://orm.drizzle.team/docs/select#distinct} * * @param fields The selection object. * * @example * ```ts * // Select all unique rows from the 'cars' table * await db.selectDistinct() * .from(cars) * .orderBy(cars.id, cars.brand, cars.color); * * // Select all unique brands from the 'cars' table * await db.selectDistinct({ brand: cars.brand }) * .from(cars) * .orderBy(cars.brand); * ``` */ function selectDistinct(): PgSelectBuilder; function selectDistinct(fields: TSelection): PgSelectBuilder; function selectDistinct( fields?: TSelection, ): PgSelectBuilder { return new PgSelectBuilder({ fields: fields ?? undefined, session: self.session, dialect: self.dialect, withList: queries, distinct: true, }); } /** * Adds `distinct on` expression to the select query. * * Calling this method will specify how the unique rows are determined. * * Use `.from()` method to specify which table to select from. * * See docs: {@link https://orm.drizzle.team/docs/select#distinct} * * @param on The expression defining uniqueness. * @param fields The selection object. * * @example * ```ts * // Select the first row for each unique brand from the 'cars' table * await db.selectDistinctOn([cars.brand]) * .from(cars) * .orderBy(cars.brand); * * // Selects the first occurrence of each unique car brand along with its color from the 'cars' table * await db.selectDistinctOn([cars.brand], { brand: cars.brand, color: cars.color }) * .from(cars) * .orderBy(cars.brand, cars.color); * ``` */ function selectDistinctOn(on: (PgColumn | SQLWrapper)[]): PgSelectBuilder; function selectDistinctOn( on: (PgColumn | SQLWrapper)[], fields: TSelection, ): PgSelectBuilder; function selectDistinctOn( on: (PgColumn | SQLWrapper)[], fields?: TSelection, ): PgSelectBuilder { return new PgSelectBuilder({ fields: fields ?? undefined, session: self.session, dialect: self.dialect, withList: queries, distinct: { on }, }); } /** * Creates an update query. * * Calling this method without `.where()` clause will update all rows in a table. The `.where()` clause specifies which rows should be updated. * * Use `.set()` method to specify which values to update. * * See docs: {@link https://orm.drizzle.team/docs/update} * * @param table The table to update. * * @example * * ```ts * // Update all rows in the 'cars' table * await db.update(cars).set({ color: 'red' }); * * // Update rows with filters and conditions * await db.update(cars).set({ color: 'red' }).where(eq(cars.brand, 'BMW')); * * // Update with returning clause * const updatedCar: Car[] = await db.update(cars) * .set({ color: 'red' }) * .where(eq(cars.id, 1)) * .returning(); * ``` */ function update(table: TTable): PgUpdateBuilder { return new PgUpdateBuilder(table, self.session, self.dialect, queries); } /** * Creates an insert query. * * Calling this method will create new rows in a table. Use `.values()` method to specify which values to insert. * * See docs: {@link https://orm.drizzle.team/docs/insert} * * @param table The table to insert into. * * @example * * ```ts * // Insert one row * await db.insert(cars).values({ brand: 'BMW' }); * * // Insert multiple rows * await db.insert(cars).values([{ brand: 'BMW' }, { brand: 'Porsche' }]); * * // Insert with returning clause * const insertedCar: Car[] = await db.insert(cars) * .values({ brand: 'BMW' }) * .returning(); * ``` */ function insert(table: TTable): PgInsertBuilder { return new PgInsertBuilder(table, self.session, self.dialect, queries); } /** * Creates a delete query. * * Calling this method without `.where()` clause will delete all rows in a table. The `.where()` clause specifies which rows should be deleted. * * See docs: {@link https://orm.drizzle.team/docs/delete} * * @param table The table to delete from. * * @example * * ```ts * // Delete all rows in the 'cars' table * await db.delete(cars); * * // Delete rows with filters and conditions * await db.delete(cars).where(eq(cars.color, 'green')); * * // Delete with returning clause * const deletedCar: Car[] = await db.delete(cars) * .where(eq(cars.id, 1)) * .returning(); * ``` */ function delete_(table: TTable): PgDeleteBase { return new PgDeleteBase(table, self.session, self.dialect, queries); } return { select, selectDistinct, selectDistinctOn, update, insert, delete: delete_ }; } /** * Creates a select query. * * Calling this method with no arguments will select all columns from the table. Pass a selection object to specify the columns you want to select. * * Use `.from()` method to specify which table to select from. * * See docs: {@link https://orm.drizzle.team/docs/select} * * @param fields The selection object. * * @example * * ```ts * // Select all columns and all rows from the 'cars' table * const allCars: Car[] = await db.select().from(cars); * * // Select specific columns and all rows from the 'cars' table * const carsIdsAndBrands: { id: number; brand: string }[] = await db.select({ * id: cars.id, * brand: cars.brand * }) * .from(cars); * ``` * * Like in SQL, you can use arbitrary expressions as selection fields, not just table columns: * * ```ts * // Select specific columns along with expression and all rows from the 'cars' table * const carsIdsAndLowerNames: { id: number; lowerBrand: string }[] = await db.select({ * id: cars.id, * lowerBrand: sql`lower(${cars.brand})`, * }) * .from(cars); * ``` */ select(): PgSelectBuilder; select(fields: TSelection): PgSelectBuilder; select(fields?: TSelection): PgSelectBuilder { return new PgSelectBuilder({ fields: fields ?? undefined, session: this.session, dialect: this.dialect, }); } /** * Adds `distinct` expression to the select query. * * Calling this method will return only unique values. When multiple columns are selected, it returns rows with unique combinations of values in these columns. * * Use `.from()` method to specify which table to select from. * * See docs: {@link https://orm.drizzle.team/docs/select#distinct} * * @param fields The selection object. * * @example * ```ts * // Select all unique rows from the 'cars' table * await db.selectDistinct() * .from(cars) * .orderBy(cars.id, cars.brand, cars.color); * * // Select all unique brands from the 'cars' table * await db.selectDistinct({ brand: cars.brand }) * .from(cars) * .orderBy(cars.brand); * ``` */ selectDistinct(): PgSelectBuilder; selectDistinct(fields: TSelection): PgSelectBuilder; selectDistinct(fields?: TSelection): PgSelectBuilder { return new PgSelectBuilder({ fields: fields ?? undefined, session: this.session, dialect: this.dialect, distinct: true, }); } /** * Adds `distinct on` expression to the select query. * * Calling this method will specify how the unique rows are determined. * * Use `.from()` method to specify which table to select from. * * See docs: {@link https://orm.drizzle.team/docs/select#distinct} * * @param on The expression defining uniqueness. * @param fields The selection object. * * @example * ```ts * // Select the first row for each unique brand from the 'cars' table * await db.selectDistinctOn([cars.brand]) * .from(cars) * .orderBy(cars.brand); * * // Selects the first occurrence of each unique car brand along with its color from the 'cars' table * await db.selectDistinctOn([cars.brand], { brand: cars.brand, color: cars.color }) * .from(cars) * .orderBy(cars.brand, cars.color); * ``` */ selectDistinctOn(on: (PgColumn | SQLWrapper)[]): PgSelectBuilder; selectDistinctOn( on: (PgColumn | SQLWrapper)[], fields: TSelection, ): PgSelectBuilder; selectDistinctOn( on: (PgColumn | SQLWrapper)[], fields?: TSelection, ): PgSelectBuilder { return new PgSelectBuilder({ fields: fields ?? undefined, session: this.session, dialect: this.dialect, distinct: { on }, }); } /** * Creates an update query. * * Calling this method without `.where()` clause will update all rows in a table. The `.where()` clause specifies which rows should be updated. * * Use `.set()` method to specify which values to update. * * See docs: {@link https://orm.drizzle.team/docs/update} * * @param table The table to update. * * @example * * ```ts * // Update all rows in the 'cars' table * await db.update(cars).set({ color: 'red' }); * * // Update rows with filters and conditions * await db.update(cars).set({ color: 'red' }).where(eq(cars.brand, 'BMW')); * * // Update with returning clause * const updatedCar: Car[] = await db.update(cars) * .set({ color: 'red' }) * .where(eq(cars.id, 1)) * .returning(); * ``` */ update(table: TTable): PgUpdateBuilder { return new PgUpdateBuilder(table, this.session, this.dialect); } /** * Creates an insert query. * * Calling this method will create new rows in a table. Use `.values()` method to specify which values to insert. * * See docs: {@link https://orm.drizzle.team/docs/insert} * * @param table The table to insert into. * * @example * * ```ts * // Insert one row * await db.insert(cars).values({ brand: 'BMW' }); * * // Insert multiple rows * await db.insert(cars).values([{ brand: 'BMW' }, { brand: 'Porsche' }]); * * // Insert with returning clause * const insertedCar: Car[] = await db.insert(cars) * .values({ brand: 'BMW' }) * .returning(); * ``` */ insert(table: TTable): PgInsertBuilder { return new PgInsertBuilder(table, this.session, this.dialect); } /** * Creates a delete query. * * Calling this method without `.where()` clause will delete all rows in a table. The `.where()` clause specifies which rows should be deleted. * * See docs: {@link https://orm.drizzle.team/docs/delete} * * @param table The table to delete from. * * @example * * ```ts * // Delete all rows in the 'cars' table * await db.delete(cars); * * // Delete rows with filters and conditions * await db.delete(cars).where(eq(cars.color, 'green')); * * // Delete with returning clause * const deletedCar: Car[] = await db.delete(cars) * .where(eq(cars.id, 1)) * .returning(); * ``` */ delete(table: TTable): PgDeleteBase { return new PgDeleteBase(table, this.session, this.dialect); } refreshMaterializedView(view: TView): PgRefreshMaterializedView { return new PgRefreshMaterializedView(view, this.session, this.dialect); } protected authToken?: NeonAuthToken; execute = Record>( query: SQLWrapper | string, ): PgRaw> { const sequel = typeof query === 'string' ? sql.raw(query) : query.getSQL(); const builtQuery = this.dialect.sqlToQuery(sequel); const prepared = this.session.prepareQuery< PreparedQueryConfig & { execute: PgQueryResultKind } >( builtQuery, undefined, undefined, false, ); return new PgRaw( () => prepared.execute(undefined, this.authToken), sequel, builtQuery, (result) => prepared.mapResult(result, true), ); } transaction( transaction: (tx: PgTransaction) => Promise, config?: PgTransactionConfig, ): Promise { return this.session.transaction(transaction, config); } } export type PgWithReplicas = Q & { $primary: Q; $replicas: Q[] }; export const withReplicas = < HKT extends PgQueryResultHKT, TFullSchema extends Record, TSchema extends TablesRelationalConfig, Q extends PgDatabase< HKT, TFullSchema, TSchema extends Record ? ExtractTablesWithRelations : TSchema >, >( primary: Q, replicas: [Q, ...Q[]], getReplica: (replicas: Q[]) => Q = () => replicas[Math.floor(Math.random() * replicas.length)]!, ): PgWithReplicas => { const select: Q['select'] = (...args: []) => getReplica(replicas).select(...args); const selectDistinct: Q['selectDistinct'] = (...args: []) => getReplica(replicas).selectDistinct(...args); const selectDistinctOn: Q['selectDistinctOn'] = (...args: [any]) => getReplica(replicas).selectDistinctOn(...args); const $count: Q['$count'] = (...args: [any]) => getReplica(replicas).$count(...args); const _with: Q['with'] = (...args: any) => getReplica(replicas).with(...args); const $with: Q['$with'] = (arg: any) => getReplica(replicas).$with(arg) as any; const update: Q['update'] = (...args: [any]) => primary.update(...args); const insert: Q['insert'] = (...args: [any]) => primary.insert(...args); const $delete: Q['delete'] = (...args: [any]) => primary.delete(...args); const execute: Q['execute'] = (...args: [any]) => primary.execute(...args); const transaction: Q['transaction'] = (...args: [any]) => primary.transaction(...args); const refreshMaterializedView: Q['refreshMaterializedView'] = (...args: [any]) => primary.refreshMaterializedView(...args); return { ...primary, update, insert, delete: $delete, execute, transaction, refreshMaterializedView, $primary: primary, $replicas: replicas, select, selectDistinct, selectDistinctOn, $count, $with, with: _with, get query() { return getReplica(replicas).query; }, }; }; ================================================ FILE: drizzle-orm/src/pg-core/dialect.ts ================================================ import { aliasedTable, aliasedTableColumn, mapColumnsInAliasedSQLToAlias, mapColumnsInSQLToAlias } from '~/alias.ts'; import { CasingCache } from '~/casing.ts'; import { Column } from '~/column.ts'; import { entityKind, is } from '~/entity.ts'; import { DrizzleError } from '~/errors.ts'; import type { MigrationConfig, MigrationMeta } from '~/migrator.ts'; import { PgColumn, PgDate, PgDateString, PgJson, PgJsonb, PgNumeric, PgTime, PgTimestamp, PgTimestampString, PgUUID, } from '~/pg-core/columns/index.ts'; import type { AnyPgSelectQueryBuilder, PgDeleteConfig, PgInsertConfig, PgSelectJoinConfig, PgUpdateConfig, } from '~/pg-core/query-builders/index.ts'; import type { PgSelectConfig, SelectedFieldsOrdered } from '~/pg-core/query-builders/select.types.ts'; import { PgTable } from '~/pg-core/table.ts'; import { type BuildRelationalQueryResult, type DBQueryConfig, getOperators, getOrderByOperators, Many, normalizeRelation, One, type Relation, type TableRelationalConfig, type TablesRelationalConfig, } from '~/relations.ts'; import { and, eq, View } from '~/sql/index.ts'; import { type DriverValueEncoder, type Name, Param, type QueryTypingsValue, type QueryWithTypings, SQL, sql, type SQLChunk, } from '~/sql/sql.ts'; import { Subquery } from '~/subquery.ts'; import { getTableName, getTableUniqueName, Table } from '~/table.ts'; import { type Casing, orderSelectedFields, type UpdateSet } from '~/utils.ts'; import { ViewBaseConfig } from '~/view-common.ts'; import type { PgSession } from './session.ts'; import { PgViewBase } from './view-base.ts'; import type { PgMaterializedView } from './view.ts'; export interface PgDialectConfig { casing?: Casing; } export class PgDialect { static readonly [entityKind]: string = 'PgDialect'; /** @internal */ readonly casing: CasingCache; constructor(config?: PgDialectConfig) { this.casing = new CasingCache(config?.casing); } async migrate(migrations: MigrationMeta[], session: PgSession, config: string | MigrationConfig): Promise { const migrationsTable = typeof config === 'string' ? '__drizzle_migrations' : config.migrationsTable ?? '__drizzle_migrations'; const migrationsSchema = typeof config === 'string' ? 'drizzle' : config.migrationsSchema ?? 'drizzle'; const migrationTableCreate = sql` CREATE TABLE IF NOT EXISTS ${sql.identifier(migrationsSchema)}.${sql.identifier(migrationsTable)} ( id SERIAL PRIMARY KEY, hash text NOT NULL, created_at bigint ) `; await session.execute(sql`CREATE SCHEMA IF NOT EXISTS ${sql.identifier(migrationsSchema)}`); await session.execute(migrationTableCreate); const dbMigrations = await session.all<{ id: number; hash: string; created_at: string }>( sql`select id, hash, created_at from ${sql.identifier(migrationsSchema)}.${ sql.identifier(migrationsTable) } order by created_at desc limit 1`, ); const lastDbMigration = dbMigrations[0]; await session.transaction(async (tx) => { for await (const migration of migrations) { if ( !lastDbMigration || Number(lastDbMigration.created_at) < migration.folderMillis ) { for (const stmt of migration.sql) { await tx.execute(sql.raw(stmt)); } await tx.execute( sql`insert into ${sql.identifier(migrationsSchema)}.${ sql.identifier(migrationsTable) } ("hash", "created_at") values(${migration.hash}, ${migration.folderMillis})`, ); } } }); } escapeName(name: string): string { return `"${name}"`; } escapeParam(num: number): string { return `$${num + 1}`; } escapeString(str: string): string { return `'${str.replace(/'/g, "''")}'`; } private buildWithCTE(queries: Subquery[] | undefined): SQL | undefined { if (!queries?.length) return undefined; const withSqlChunks = [sql`with `]; for (const [i, w] of queries.entries()) { withSqlChunks.push(sql`${sql.identifier(w._.alias)} as (${w._.sql})`); if (i < queries.length - 1) { withSqlChunks.push(sql`, `); } } withSqlChunks.push(sql` `); return sql.join(withSqlChunks); } buildDeleteQuery({ table, where, returning, withList }: PgDeleteConfig): SQL { const withSql = this.buildWithCTE(withList); const returningSql = returning ? sql` returning ${this.buildSelection(returning, { isSingleTable: true })}` : undefined; const whereSql = where ? sql` where ${where}` : undefined; return sql`${withSql}delete from ${table}${whereSql}${returningSql}`; } buildUpdateSet(table: PgTable, set: UpdateSet): SQL { const tableColumns = table[Table.Symbol.Columns]; const columnNames = Object.keys(tableColumns).filter((colName) => set[colName] !== undefined || tableColumns[colName]?.onUpdateFn !== undefined ); const setSize = columnNames.length; return sql.join(columnNames.flatMap((colName, i) => { const col = tableColumns[colName]!; const onUpdateFnResult = col.onUpdateFn?.(); const value = set[colName] ?? (is(onUpdateFnResult, SQL) ? onUpdateFnResult : sql.param(onUpdateFnResult, col)); const res = sql`${sql.identifier(this.casing.getColumnCasing(col))} = ${value}`; if (i < setSize - 1) { return [res, sql.raw(', ')]; } return [res]; })); } buildUpdateQuery({ table, set, where, returning, withList, from, joins }: PgUpdateConfig): SQL { const withSql = this.buildWithCTE(withList); const tableName = table[PgTable.Symbol.Name]; const tableSchema = table[PgTable.Symbol.Schema]; const origTableName = table[PgTable.Symbol.OriginalName]; const alias = tableName === origTableName ? undefined : tableName; const tableSql = sql`${tableSchema ? sql`${sql.identifier(tableSchema)}.` : undefined}${ sql.identifier(origTableName) }${alias && sql` ${sql.identifier(alias)}`}`; const setSql = this.buildUpdateSet(table, set); const fromSql = from && sql.join([sql.raw(' from '), this.buildFromTable(from)]); const joinsSql = this.buildJoins(joins); const returningSql = returning ? sql` returning ${this.buildSelection(returning, { isSingleTable: !from })}` : undefined; const whereSql = where ? sql` where ${where}` : undefined; return sql`${withSql}update ${tableSql} set ${setSql}${fromSql}${joinsSql}${whereSql}${returningSql}`; } /** * Builds selection SQL with provided fields/expressions * * Examples: * * `select from` * * `insert ... returning ` * * If `isSingleTable` is true, then columns won't be prefixed with table name */ private buildSelection( fields: SelectedFieldsOrdered, { isSingleTable = false }: { isSingleTable?: boolean } = {}, ): SQL { const columnsLen = fields.length; const chunks = fields .flatMap(({ field }, i) => { const chunk: SQLChunk[] = []; if (is(field, SQL.Aliased) && field.isSelectionField) { chunk.push(sql.identifier(field.fieldAlias)); } else if (is(field, SQL.Aliased) || is(field, SQL)) { const query = is(field, SQL.Aliased) ? field.sql : field; if (isSingleTable) { chunk.push( new SQL( query.queryChunks.map((c) => { if (is(c, PgColumn)) { return sql.identifier(this.casing.getColumnCasing(c)); } return c; }), ), ); } else { chunk.push(query); } if (is(field, SQL.Aliased)) { chunk.push(sql` as ${sql.identifier(field.fieldAlias)}`); } } else if (is(field, Column)) { if (isSingleTable) { chunk.push(sql.identifier(this.casing.getColumnCasing(field))); } else { chunk.push(field); } } else if (is(field, Subquery)) { const entries = Object.entries(field._.selectedFields) as [string, SQL.Aliased | Column | SQL][]; if (entries.length === 1) { const entry = entries[0]![1]; const fieldDecoder = is(entry, SQL) ? entry.decoder : is(entry, Column) ? { mapFromDriverValue: (v: any) => entry.mapFromDriverValue(v) } : entry.sql.decoder; if (fieldDecoder) { field._.sql.decoder = fieldDecoder; } } chunk.push(field); } if (i < columnsLen - 1) { chunk.push(sql`, `); } return chunk; }); return sql.join(chunks); } private buildJoins(joins: PgSelectJoinConfig[] | undefined): SQL | undefined { if (!joins || joins.length === 0) { return undefined; } const joinsArray: SQL[] = []; for (const [index, joinMeta] of joins.entries()) { if (index === 0) { joinsArray.push(sql` `); } const table = joinMeta.table; const lateralSql = joinMeta.lateral ? sql` lateral` : undefined; const onSql = joinMeta.on ? sql` on ${joinMeta.on}` : undefined; if (is(table, PgTable)) { const tableName = table[PgTable.Symbol.Name]; const tableSchema = table[PgTable.Symbol.Schema]; const origTableName = table[PgTable.Symbol.OriginalName]; const alias = tableName === origTableName ? undefined : joinMeta.alias; joinsArray.push( sql`${sql.raw(joinMeta.joinType)} join${lateralSql} ${ tableSchema ? sql`${sql.identifier(tableSchema)}.` : undefined }${sql.identifier(origTableName)}${alias && sql` ${sql.identifier(alias)}`}${onSql}`, ); } else if (is(table, View)) { const viewName = table[ViewBaseConfig].name; const viewSchema = table[ViewBaseConfig].schema; const origViewName = table[ViewBaseConfig].originalName; const alias = viewName === origViewName ? undefined : joinMeta.alias; joinsArray.push( sql`${sql.raw(joinMeta.joinType)} join${lateralSql} ${ viewSchema ? sql`${sql.identifier(viewSchema)}.` : undefined }${sql.identifier(origViewName)}${alias && sql` ${sql.identifier(alias)}`}${onSql}`, ); } else { joinsArray.push( sql`${sql.raw(joinMeta.joinType)} join${lateralSql} ${table}${onSql}`, ); } if (index < joins.length - 1) { joinsArray.push(sql` `); } } return sql.join(joinsArray); } private buildFromTable( table: SQL | Subquery | PgViewBase | PgTable | undefined, ): SQL | Subquery | PgViewBase | PgTable | undefined { if (is(table, Table) && table[Table.Symbol.IsAlias]) { let fullName = sql`${sql.identifier(table[Table.Symbol.OriginalName])}`; if (table[Table.Symbol.Schema]) { fullName = sql`${sql.identifier(table[Table.Symbol.Schema]!)}.${fullName}`; } return sql`${fullName} ${sql.identifier(table[Table.Symbol.Name])}`; } return table; } buildSelectQuery( { withList, fields, fieldsFlat, where, having, table, joins, orderBy, groupBy, limit, offset, lockingClause, distinct, setOperators, }: PgSelectConfig, ): SQL { const fieldsList = fieldsFlat ?? orderSelectedFields(fields); for (const f of fieldsList) { if ( is(f.field, Column) && getTableName(f.field.table) !== (is(table, Subquery) ? table._.alias : is(table, PgViewBase) ? table[ViewBaseConfig].name : is(table, SQL) ? undefined : getTableName(table)) && !((table) => joins?.some(({ alias }) => alias === (table[Table.Symbol.IsAlias] ? getTableName(table) : table[Table.Symbol.BaseName]) ))(f.field.table) ) { const tableName = getTableName(f.field.table); throw new Error( `Your "${ f.path.join('->') }" field references a column "${tableName}"."${f.field.name}", but the table "${tableName}" is not part of the query! Did you forget to join it?`, ); } } const isSingleTable = !joins || joins.length === 0; const withSql = this.buildWithCTE(withList); let distinctSql: SQL | undefined; if (distinct) { distinctSql = distinct === true ? sql` distinct` : sql` distinct on (${sql.join(distinct.on, sql`, `)})`; } const selection = this.buildSelection(fieldsList, { isSingleTable }); const tableSql = this.buildFromTable(table); const joinsSql = this.buildJoins(joins); const whereSql = where ? sql` where ${where}` : undefined; const havingSql = having ? sql` having ${having}` : undefined; let orderBySql; if (orderBy && orderBy.length > 0) { orderBySql = sql` order by ${sql.join(orderBy, sql`, `)}`; } let groupBySql; if (groupBy && groupBy.length > 0) { groupBySql = sql` group by ${sql.join(groupBy, sql`, `)}`; } const limitSql = typeof limit === 'object' || (typeof limit === 'number' && limit >= 0) ? sql` limit ${limit}` : undefined; const offsetSql = offset ? sql` offset ${offset}` : undefined; const lockingClauseSql = sql.empty(); if (lockingClause) { const clauseSql = sql` for ${sql.raw(lockingClause.strength)}`; if (lockingClause.config.of) { clauseSql.append( sql` of ${ sql.join( Array.isArray(lockingClause.config.of) ? lockingClause.config.of : [lockingClause.config.of], sql`, `, ) }`, ); } if (lockingClause.config.noWait) { clauseSql.append(sql` nowait`); } else if (lockingClause.config.skipLocked) { clauseSql.append(sql` skip locked`); } lockingClauseSql.append(clauseSql); } const finalQuery = sql`${withSql}select${distinctSql} ${selection} from ${tableSql}${joinsSql}${whereSql}${groupBySql}${havingSql}${orderBySql}${limitSql}${offsetSql}${lockingClauseSql}`; if (setOperators.length > 0) { return this.buildSetOperations(finalQuery, setOperators); } return finalQuery; } buildSetOperations(leftSelect: SQL, setOperators: PgSelectConfig['setOperators']): SQL { const [setOperator, ...rest] = setOperators; if (!setOperator) { throw new Error('Cannot pass undefined values to any set operator'); } if (rest.length === 0) { return this.buildSetOperationQuery({ leftSelect, setOperator }); } // Some recursive magic here return this.buildSetOperations( this.buildSetOperationQuery({ leftSelect, setOperator }), rest, ); } buildSetOperationQuery({ leftSelect, setOperator: { type, isAll, rightSelect, limit, orderBy, offset }, }: { leftSelect: SQL; setOperator: PgSelectConfig['setOperators'][number] }): SQL { const leftChunk = sql`(${leftSelect.getSQL()}) `; const rightChunk = sql`(${rightSelect.getSQL()})`; let orderBySql; if (orderBy && orderBy.length > 0) { const orderByValues: (SQL | Name)[] = []; // The next bit is necessary because the sql operator replaces ${table.column} with `table`.`column` // which is invalid Sql syntax, Table from one of the SELECTs cannot be used in global ORDER clause for (const singleOrderBy of orderBy) { if (is(singleOrderBy, PgColumn)) { orderByValues.push(sql.identifier(singleOrderBy.name)); } else if (is(singleOrderBy, SQL)) { for (let i = 0; i < singleOrderBy.queryChunks.length; i++) { const chunk = singleOrderBy.queryChunks[i]; if (is(chunk, PgColumn)) { singleOrderBy.queryChunks[i] = sql.identifier(chunk.name); } } orderByValues.push(sql`${singleOrderBy}`); } else { orderByValues.push(sql`${singleOrderBy}`); } } orderBySql = sql` order by ${sql.join(orderByValues, sql`, `)} `; } const limitSql = typeof limit === 'object' || (typeof limit === 'number' && limit >= 0) ? sql` limit ${limit}` : undefined; const operatorChunk = sql.raw(`${type} ${isAll ? 'all ' : ''}`); const offsetSql = offset ? sql` offset ${offset}` : undefined; return sql`${leftChunk}${operatorChunk}${rightChunk}${orderBySql}${limitSql}${offsetSql}`; } buildInsertQuery( { table, values: valuesOrSelect, onConflict, returning, withList, select, overridingSystemValue_ }: PgInsertConfig, ): SQL { const valuesSqlList: ((SQLChunk | SQL)[] | SQL)[] = []; const columns: Record = table[Table.Symbol.Columns]; const colEntries: [string, PgColumn][] = Object.entries(columns).filter(([_, col]) => !col.shouldDisableInsert()); const insertOrder = colEntries.map( ([, column]) => sql.identifier(this.casing.getColumnCasing(column)), ); if (select) { const select = valuesOrSelect as AnyPgSelectQueryBuilder | SQL; if (is(select, SQL)) { valuesSqlList.push(select); } else { valuesSqlList.push(select.getSQL()); } } else { const values = valuesOrSelect as Record[]; valuesSqlList.push(sql.raw('values ')); for (const [valueIndex, value] of values.entries()) { const valueList: (SQLChunk | SQL)[] = []; for (const [fieldName, col] of colEntries) { const colValue = value[fieldName]; if (colValue === undefined || (is(colValue, Param) && colValue.value === undefined)) { // eslint-disable-next-line unicorn/no-negated-condition if (col.defaultFn !== undefined) { const defaultFnResult = col.defaultFn(); const defaultValue = is(defaultFnResult, SQL) ? defaultFnResult : sql.param(defaultFnResult, col); valueList.push(defaultValue); // eslint-disable-next-line unicorn/no-negated-condition } else if (!col.default && col.onUpdateFn !== undefined) { const onUpdateFnResult = col.onUpdateFn(); const newValue = is(onUpdateFnResult, SQL) ? onUpdateFnResult : sql.param(onUpdateFnResult, col); valueList.push(newValue); } else { valueList.push(sql`default`); } } else { valueList.push(colValue); } } valuesSqlList.push(valueList); if (valueIndex < values.length - 1) { valuesSqlList.push(sql`, `); } } } const withSql = this.buildWithCTE(withList); const valuesSql = sql.join(valuesSqlList); const returningSql = returning ? sql` returning ${this.buildSelection(returning, { isSingleTable: true })}` : undefined; const onConflictSql = onConflict ? sql` on conflict ${onConflict}` : undefined; const overridingSql = overridingSystemValue_ === true ? sql`overriding system value ` : undefined; return sql`${withSql}insert into ${table} ${insertOrder} ${overridingSql}${valuesSql}${onConflictSql}${returningSql}`; } buildRefreshMaterializedViewQuery( { view, concurrently, withNoData }: { view: PgMaterializedView; concurrently?: boolean; withNoData?: boolean }, ): SQL { const concurrentlySql = concurrently ? sql` concurrently` : undefined; const withNoDataSql = withNoData ? sql` with no data` : undefined; return sql`refresh materialized view${concurrentlySql} ${view}${withNoDataSql}`; } prepareTyping(encoder: DriverValueEncoder): QueryTypingsValue { if (is(encoder, PgJsonb) || is(encoder, PgJson)) { return 'json'; } else if (is(encoder, PgNumeric)) { return 'decimal'; } else if (is(encoder, PgTime)) { return 'time'; } else if (is(encoder, PgTimestamp) || is(encoder, PgTimestampString)) { return 'timestamp'; } else if (is(encoder, PgDate) || is(encoder, PgDateString)) { return 'date'; } else if (is(encoder, PgUUID)) { return 'uuid'; } else { return 'none'; } } sqlToQuery(sql: SQL, invokeSource?: 'indexes' | undefined): QueryWithTypings { return sql.toQuery({ casing: this.casing, escapeName: this.escapeName, escapeParam: this.escapeParam, escapeString: this.escapeString, prepareTyping: this.prepareTyping, invokeSource, }); } // buildRelationalQueryWithPK({ // fullSchema, // schema, // tableNamesMap, // table, // tableConfig, // queryConfig: config, // tableAlias, // isRoot = false, // joinOn, // }: { // fullSchema: Record; // schema: TablesRelationalConfig; // tableNamesMap: Record; // table: PgTable; // tableConfig: TableRelationalConfig; // queryConfig: true | DBQueryConfig<'many', true>; // tableAlias: string; // isRoot?: boolean; // joinOn?: SQL; // }): BuildRelationalQueryResult { // // For { "": true }, return a table with selection of all columns // if (config === true) { // const selectionEntries = Object.entries(tableConfig.columns); // const selection: BuildRelationalQueryResult['selection'] = selectionEntries.map(( // [key, value], // ) => ({ // dbKey: value.name, // tsKey: key, // field: value as PgColumn, // relationTableTsKey: undefined, // isJson: false, // selection: [], // })); // return { // tableTsKey: tableConfig.tsName, // sql: table, // selection, // }; // } // // let selection: BuildRelationalQueryResult['selection'] = []; // // let selectionForBuild = selection; // const aliasedColumns = Object.fromEntries( // Object.entries(tableConfig.columns).map(([key, value]) => [key, aliasedTableColumn(value, tableAlias)]), // ); // const aliasedRelations = Object.fromEntries( // Object.entries(tableConfig.relations).map(([key, value]) => [key, aliasedRelation(value, tableAlias)]), // ); // const aliasedFields = Object.assign({}, aliasedColumns, aliasedRelations); // let where, hasUserDefinedWhere; // if (config.where) { // const whereSql = typeof config.where === 'function' ? config.where(aliasedFields, operators) : config.where; // where = whereSql && mapColumnsInSQLToAlias(whereSql, tableAlias); // hasUserDefinedWhere = !!where; // } // where = and(joinOn, where); // // const fieldsSelection: { tsKey: string; value: PgColumn | SQL.Aliased; isExtra?: boolean }[] = []; // let joins: Join[] = []; // let selectedColumns: string[] = []; // // Figure out which columns to select // if (config.columns) { // let isIncludeMode = false; // for (const [field, value] of Object.entries(config.columns)) { // if (value === undefined) { // continue; // } // if (field in tableConfig.columns) { // if (!isIncludeMode && value === true) { // isIncludeMode = true; // } // selectedColumns.push(field); // } // } // if (selectedColumns.length > 0) { // selectedColumns = isIncludeMode // ? selectedColumns.filter((c) => config.columns?.[c] === true) // : Object.keys(tableConfig.columns).filter((key) => !selectedColumns.includes(key)); // } // } else { // // Select all columns if selection is not specified // selectedColumns = Object.keys(tableConfig.columns); // } // // for (const field of selectedColumns) { // // const column = tableConfig.columns[field]! as PgColumn; // // fieldsSelection.push({ tsKey: field, value: column }); // // } // let initiallySelectedRelations: { // tsKey: string; // queryConfig: true | DBQueryConfig<'many', false>; // relation: Relation; // }[] = []; // // let selectedRelations: BuildRelationalQueryResult['selection'] = []; // // Figure out which relations to select // if (config.with) { // initiallySelectedRelations = Object.entries(config.with) // .filter((entry): entry is [typeof entry[0], NonNullable] => !!entry[1]) // .map(([tsKey, queryConfig]) => ({ tsKey, queryConfig, relation: tableConfig.relations[tsKey]! })); // } // const manyRelations = initiallySelectedRelations.filter((r) => // is(r.relation, Many) // && (schema[tableNamesMap[r.relation.referencedTable[Table.Symbol.Name]]!]?.primaryKey.length ?? 0) > 0 // ); // // If this is the last Many relation (or there are no Many relations), we are on the innermost subquery level // const isInnermostQuery = manyRelations.length < 2; // const selectedExtras: { // tsKey: string; // value: SQL.Aliased; // }[] = []; // // Figure out which extras to select // if (isInnermostQuery && config.extras) { // const extras = typeof config.extras === 'function' // ? config.extras(aliasedFields, { sql }) // : config.extras; // for (const [tsKey, value] of Object.entries(extras)) { // selectedExtras.push({ // tsKey, // value: mapColumnsInAliasedSQLToAlias(value, tableAlias), // }); // } // } // // Transform `fieldsSelection` into `selection` // // `fieldsSelection` shouldn't be used after this point // // for (const { tsKey, value, isExtra } of fieldsSelection) { // // selection.push({ // // dbKey: is(value, SQL.Aliased) ? value.fieldAlias : tableConfig.columns[tsKey]!.name, // // tsKey, // // field: is(value, Column) ? aliasedTableColumn(value, tableAlias) : value, // // relationTableTsKey: undefined, // // isJson: false, // // isExtra, // // selection: [], // // }); // // } // let orderByOrig = typeof config.orderBy === 'function' // ? config.orderBy(aliasedFields, orderByOperators) // : config.orderBy ?? []; // if (!Array.isArray(orderByOrig)) { // orderByOrig = [orderByOrig]; // } // const orderBy = orderByOrig.map((orderByValue) => { // if (is(orderByValue, Column)) { // return aliasedTableColumn(orderByValue, tableAlias) as PgColumn; // } // return mapColumnsInSQLToAlias(orderByValue, tableAlias); // }); // const limit = isInnermostQuery ? config.limit : undefined; // const offset = isInnermostQuery ? config.offset : undefined; // // For non-root queries without additional config except columns, return a table with selection // if ( // !isRoot // && initiallySelectedRelations.length === 0 // && selectedExtras.length === 0 // && !where // && orderBy.length === 0 // && limit === undefined // && offset === undefined // ) { // return { // tableTsKey: tableConfig.tsName, // sql: table, // selection: selectedColumns.map((key) => ({ // dbKey: tableConfig.columns[key]!.name, // tsKey: key, // field: tableConfig.columns[key] as PgColumn, // relationTableTsKey: undefined, // isJson: false, // selection: [], // })), // }; // } // const selectedRelationsWithoutPK: // // Process all relations without primary keys, because they need to be joined differently and will all be on the same query level // for ( // const { // tsKey: selectedRelationTsKey, // queryConfig: selectedRelationConfigValue, // relation, // } of initiallySelectedRelations // ) { // const normalizedRelation = normalizeRelation(schema, tableNamesMap, relation); // const relationTableName = relation.referencedTable[Table.Symbol.Name]; // const relationTableTsName = tableNamesMap[relationTableName]!; // const relationTable = schema[relationTableTsName]!; // if (relationTable.primaryKey.length > 0) { // continue; // } // const relationTableAlias = `${tableAlias}_${selectedRelationTsKey}`; // const joinOn = and( // ...normalizedRelation.fields.map((field, i) => // eq( // aliasedTableColumn(normalizedRelation.references[i]!, relationTableAlias), // aliasedTableColumn(field, tableAlias), // ) // ), // ); // const builtRelation = this.buildRelationalQueryWithoutPK({ // fullSchema, // schema, // tableNamesMap, // table: fullSchema[relationTableTsName] as PgTable, // tableConfig: schema[relationTableTsName]!, // queryConfig: selectedRelationConfigValue, // tableAlias: relationTableAlias, // joinOn, // nestedQueryRelation: relation, // }); // const field = sql`${sql.identifier(relationTableAlias)}.${sql.identifier('data')}`.as(selectedRelationTsKey); // joins.push({ // on: sql`true`, // table: new Subquery(builtRelation.sql as SQL, {}, relationTableAlias), // alias: relationTableAlias, // joinType: 'left', // lateral: true, // }); // selectedRelations.push({ // dbKey: selectedRelationTsKey, // tsKey: selectedRelationTsKey, // field, // relationTableTsKey: relationTableTsName, // isJson: true, // selection: builtRelation.selection, // }); // } // const oneRelations = initiallySelectedRelations.filter((r): r is typeof r & { relation: One } => // is(r.relation, One) // ); // // Process all One relations with PKs, because they can all be joined on the same level // for ( // const { // tsKey: selectedRelationTsKey, // queryConfig: selectedRelationConfigValue, // relation, // } of oneRelations // ) { // const normalizedRelation = normalizeRelation(schema, tableNamesMap, relation); // const relationTableName = relation.referencedTable[Table.Symbol.Name]; // const relationTableTsName = tableNamesMap[relationTableName]!; // const relationTableAlias = `${tableAlias}_${selectedRelationTsKey}`; // const relationTable = schema[relationTableTsName]!; // if (relationTable.primaryKey.length === 0) { // continue; // } // const joinOn = and( // ...normalizedRelation.fields.map((field, i) => // eq( // aliasedTableColumn(normalizedRelation.references[i]!, relationTableAlias), // aliasedTableColumn(field, tableAlias), // ) // ), // ); // const builtRelation = this.buildRelationalQueryWithPK({ // fullSchema, // schema, // tableNamesMap, // table: fullSchema[relationTableTsName] as PgTable, // tableConfig: schema[relationTableTsName]!, // queryConfig: selectedRelationConfigValue, // tableAlias: relationTableAlias, // joinOn, // }); // const field = sql`case when ${sql.identifier(relationTableAlias)} is null then null else json_build_array(${ // sql.join( // builtRelation.selection.map(({ field }) => // is(field, SQL.Aliased) // ? sql`${sql.identifier(relationTableAlias)}.${sql.identifier(field.fieldAlias)}` // : is(field, Column) // ? aliasedTableColumn(field, relationTableAlias) // : field // ), // sql`, `, // ) // }) end`.as(selectedRelationTsKey); // const isLateralJoin = is(builtRelation.sql, SQL); // joins.push({ // on: isLateralJoin ? sql`true` : joinOn, // table: is(builtRelation.sql, SQL) // ? new Subquery(builtRelation.sql, {}, relationTableAlias) // : aliasedTable(builtRelation.sql, relationTableAlias), // alias: relationTableAlias, // joinType: 'left', // lateral: is(builtRelation.sql, SQL), // }); // selectedRelations.push({ // dbKey: selectedRelationTsKey, // tsKey: selectedRelationTsKey, // field, // relationTableTsKey: relationTableTsName, // isJson: true, // selection: builtRelation.selection, // }); // } // let distinct: PgSelectConfig['distinct']; // let tableFrom: PgTable | Subquery = table; // // Process first Many relation - each one requires a nested subquery // const manyRelation = manyRelations[0]; // if (manyRelation) { // const { // tsKey: selectedRelationTsKey, // queryConfig: selectedRelationQueryConfig, // relation, // } = manyRelation; // distinct = { // on: tableConfig.primaryKey.map((c) => aliasedTableColumn(c as PgColumn, tableAlias)), // }; // const normalizedRelation = normalizeRelation(schema, tableNamesMap, relation); // const relationTableName = relation.referencedTable[Table.Symbol.Name]; // const relationTableTsName = tableNamesMap[relationTableName]!; // const relationTableAlias = `${tableAlias}_${selectedRelationTsKey}`; // const joinOn = and( // ...normalizedRelation.fields.map((field, i) => // eq( // aliasedTableColumn(normalizedRelation.references[i]!, relationTableAlias), // aliasedTableColumn(field, tableAlias), // ) // ), // ); // const builtRelationJoin = this.buildRelationalQueryWithPK({ // fullSchema, // schema, // tableNamesMap, // table: fullSchema[relationTableTsName] as PgTable, // tableConfig: schema[relationTableTsName]!, // queryConfig: selectedRelationQueryConfig, // tableAlias: relationTableAlias, // joinOn, // }); // const builtRelationSelectionField = sql`case when ${ // sql.identifier(relationTableAlias) // } is null then '[]' else json_agg(json_build_array(${ // sql.join( // builtRelationJoin.selection.map(({ field }) => // is(field, SQL.Aliased) // ? sql`${sql.identifier(relationTableAlias)}.${sql.identifier(field.fieldAlias)}` // : is(field, Column) // ? aliasedTableColumn(field, relationTableAlias) // : field // ), // sql`, `, // ) // })) over (partition by ${sql.join(distinct.on, sql`, `)}) end`.as(selectedRelationTsKey); // const isLateralJoin = is(builtRelationJoin.sql, SQL); // joins.push({ // on: isLateralJoin ? sql`true` : joinOn, // table: isLateralJoin // ? new Subquery(builtRelationJoin.sql as SQL, {}, relationTableAlias) // : aliasedTable(builtRelationJoin.sql as PgTable, relationTableAlias), // alias: relationTableAlias, // joinType: 'left', // lateral: isLateralJoin, // }); // // Build the "from" subquery with the remaining Many relations // const builtTableFrom = this.buildRelationalQueryWithPK({ // fullSchema, // schema, // tableNamesMap, // table, // tableConfig, // queryConfig: { // ...config, // where: undefined, // orderBy: undefined, // limit: undefined, // offset: undefined, // with: manyRelations.slice(1).reduce>( // (result, { tsKey, queryConfig: configValue }) => { // result[tsKey] = configValue; // return result; // }, // {}, // ), // }, // tableAlias, // }); // selectedRelations.push({ // dbKey: selectedRelationTsKey, // tsKey: selectedRelationTsKey, // field: builtRelationSelectionField, // relationTableTsKey: relationTableTsName, // isJson: true, // selection: builtRelationJoin.selection, // }); // // selection = builtTableFrom.selection.map((item) => // // is(item.field, SQL.Aliased) // // ? { ...item, field: sql`${sql.identifier(tableAlias)}.${sql.identifier(item.field.fieldAlias)}` } // // : item // // ); // // selectionForBuild = [{ // // dbKey: '*', // // tsKey: '*', // // field: sql`${sql.identifier(tableAlias)}.*`, // // selection: [], // // isJson: false, // // relationTableTsKey: undefined, // // }]; // // const newSelectionItem: (typeof selection)[number] = { // // dbKey: selectedRelationTsKey, // // tsKey: selectedRelationTsKey, // // field, // // relationTableTsKey: relationTableTsName, // // isJson: true, // // selection: builtRelationJoin.selection, // // }; // // selection.push(newSelectionItem); // // selectionForBuild.push(newSelectionItem); // tableFrom = is(builtTableFrom.sql, PgTable) // ? builtTableFrom.sql // : new Subquery(builtTableFrom.sql, {}, tableAlias); // } // if (selectedColumns.length === 0 && selectedRelations.length === 0 && selectedExtras.length === 0) { // throw new DrizzleError(`No fields selected for table "${tableConfig.tsName}" ("${tableAlias}")`); // } // let selection: BuildRelationalQueryResult['selection']; // function prepareSelectedColumns() { // return selectedColumns.map((key) => ({ // dbKey: tableConfig.columns[key]!.name, // tsKey: key, // field: tableConfig.columns[key] as PgColumn, // relationTableTsKey: undefined, // isJson: false, // selection: [], // })); // } // function prepareSelectedExtras() { // return selectedExtras.map((item) => ({ // dbKey: item.value.fieldAlias, // tsKey: item.tsKey, // field: item.value, // relationTableTsKey: undefined, // isJson: false, // selection: [], // })); // } // if (isRoot) { // selection = [ // ...prepareSelectedColumns(), // ...prepareSelectedExtras(), // ]; // } // if (hasUserDefinedWhere || orderBy.length > 0) { // tableFrom = new Subquery( // this.buildSelectQuery({ // table: is(tableFrom, PgTable) ? aliasedTable(tableFrom, tableAlias) : tableFrom, // fields: {}, // fieldsFlat: selectionForBuild.map(({ field }) => ({ // path: [], // field: is(field, Column) ? aliasedTableColumn(field, tableAlias) : field, // })), // joins, // distinct, // }), // {}, // tableAlias, // ); // selectionForBuild = selection.map((item) => // is(item.field, SQL.Aliased) // ? { ...item, field: sql`${sql.identifier(tableAlias)}.${sql.identifier(item.field.fieldAlias)}` } // : item // ); // joins = []; // distinct = undefined; // } // const result = this.buildSelectQuery({ // table: is(tableFrom, PgTable) ? aliasedTable(tableFrom, tableAlias) : tableFrom, // fields: {}, // fieldsFlat: selectionForBuild.map(({ field }) => ({ // path: [], // field: is(field, Column) ? aliasedTableColumn(field, tableAlias) : field, // })), // where, // limit, // offset, // joins, // orderBy, // distinct, // }); // return { // tableTsKey: tableConfig.tsName, // sql: result, // selection, // }; // } buildRelationalQueryWithoutPK({ fullSchema, schema, tableNamesMap, table, tableConfig, queryConfig: config, tableAlias, nestedQueryRelation, joinOn, }: { fullSchema: Record; schema: TablesRelationalConfig; tableNamesMap: Record; table: PgTable; tableConfig: TableRelationalConfig; queryConfig: true | DBQueryConfig<'many', true>; tableAlias: string; nestedQueryRelation?: Relation; joinOn?: SQL; }): BuildRelationalQueryResult { let selection: BuildRelationalQueryResult['selection'] = []; let limit, offset, orderBy: NonNullable = [], where; const joins: PgSelectJoinConfig[] = []; if (config === true) { const selectionEntries = Object.entries(tableConfig.columns); selection = selectionEntries.map(( [key, value], ) => ({ dbKey: value.name, tsKey: key, field: aliasedTableColumn(value as PgColumn, tableAlias), relationTableTsKey: undefined, isJson: false, selection: [], })); } else { const aliasedColumns = Object.fromEntries( Object.entries(tableConfig.columns).map(( [key, value], ) => [key, aliasedTableColumn(value, tableAlias)]), ); if (config.where) { const whereSql = typeof config.where === 'function' ? config.where(aliasedColumns, getOperators()) : config.where; where = whereSql && mapColumnsInSQLToAlias(whereSql, tableAlias); } const fieldsSelection: { tsKey: string; value: PgColumn | SQL.Aliased }[] = []; let selectedColumns: string[] = []; // Figure out which columns to select if (config.columns) { let isIncludeMode = false; for (const [field, value] of Object.entries(config.columns)) { if (value === undefined) { continue; } if (field in tableConfig.columns) { if (!isIncludeMode && value === true) { isIncludeMode = true; } selectedColumns.push(field); } } if (selectedColumns.length > 0) { selectedColumns = isIncludeMode ? selectedColumns.filter((c) => config.columns?.[c] === true) : Object.keys(tableConfig.columns).filter((key) => !selectedColumns.includes(key)); } } else { // Select all columns if selection is not specified selectedColumns = Object.keys(tableConfig.columns); } for (const field of selectedColumns) { const column = tableConfig.columns[field]! as PgColumn; fieldsSelection.push({ tsKey: field, value: column }); } let selectedRelations: { tsKey: string; queryConfig: true | DBQueryConfig<'many', false>; relation: Relation; }[] = []; // Figure out which relations to select if (config.with) { selectedRelations = Object.entries(config.with) .filter((entry): entry is [typeof entry[0], NonNullable] => !!entry[1]) .map(([tsKey, queryConfig]) => ({ tsKey, queryConfig, relation: tableConfig.relations[tsKey]! })); } let extras; // Figure out which extras to select if (config.extras) { extras = typeof config.extras === 'function' ? config.extras(aliasedColumns, { sql }) : config.extras; for (const [tsKey, value] of Object.entries(extras)) { fieldsSelection.push({ tsKey, value: mapColumnsInAliasedSQLToAlias(value, tableAlias), }); } } // Transform `fieldsSelection` into `selection` // `fieldsSelection` shouldn't be used after this point for (const { tsKey, value } of fieldsSelection) { selection.push({ dbKey: is(value, SQL.Aliased) ? value.fieldAlias : tableConfig.columns[tsKey]!.name, tsKey, field: is(value, Column) ? aliasedTableColumn(value, tableAlias) : value, relationTableTsKey: undefined, isJson: false, selection: [], }); } let orderByOrig = typeof config.orderBy === 'function' ? config.orderBy(aliasedColumns, getOrderByOperators()) : config.orderBy ?? []; if (!Array.isArray(orderByOrig)) { orderByOrig = [orderByOrig]; } orderBy = orderByOrig.map((orderByValue) => { if (is(orderByValue, Column)) { return aliasedTableColumn(orderByValue, tableAlias) as PgColumn; } return mapColumnsInSQLToAlias(orderByValue, tableAlias); }); limit = config.limit; offset = config.offset; // Process all relations for ( const { tsKey: selectedRelationTsKey, queryConfig: selectedRelationConfigValue, relation, } of selectedRelations ) { const normalizedRelation = normalizeRelation(schema, tableNamesMap, relation); const relationTableName = getTableUniqueName(relation.referencedTable); const relationTableTsName = tableNamesMap[relationTableName]!; const relationTableAlias = `${tableAlias}_${selectedRelationTsKey}`; const joinOn = and( ...normalizedRelation.fields.map((field, i) => eq( aliasedTableColumn(normalizedRelation.references[i]!, relationTableAlias), aliasedTableColumn(field, tableAlias), ) ), ); const builtRelation = this.buildRelationalQueryWithoutPK({ fullSchema, schema, tableNamesMap, table: fullSchema[relationTableTsName] as PgTable, tableConfig: schema[relationTableTsName]!, queryConfig: is(relation, One) ? (selectedRelationConfigValue === true ? { limit: 1 } : { ...selectedRelationConfigValue, limit: 1 }) : selectedRelationConfigValue, tableAlias: relationTableAlias, joinOn, nestedQueryRelation: relation, }); const field = sql`${sql.identifier(relationTableAlias)}.${sql.identifier('data')}`.as(selectedRelationTsKey); joins.push({ on: sql`true`, table: new Subquery(builtRelation.sql as SQL, {}, relationTableAlias), alias: relationTableAlias, joinType: 'left', lateral: true, }); selection.push({ dbKey: selectedRelationTsKey, tsKey: selectedRelationTsKey, field, relationTableTsKey: relationTableTsName, isJson: true, selection: builtRelation.selection, }); } } if (selection.length === 0) { throw new DrizzleError({ message: `No fields selected for table "${tableConfig.tsName}" ("${tableAlias}")` }); } let result; where = and(joinOn, where); if (nestedQueryRelation) { let field = sql`json_build_array(${ sql.join( selection.map(({ field, tsKey, isJson }) => isJson ? sql`${sql.identifier(`${tableAlias}_${tsKey}`)}.${sql.identifier('data')}` : is(field, SQL.Aliased) ? field.sql : field ), sql`, `, ) })`; if (is(nestedQueryRelation, Many)) { field = sql`coalesce(json_agg(${field}${ orderBy.length > 0 ? sql` order by ${sql.join(orderBy, sql`, `)}` : undefined }), '[]'::json)`; // orderBy = []; } const nestedSelection = [{ dbKey: 'data', tsKey: 'data', field: field.as('data'), isJson: true, relationTableTsKey: tableConfig.tsName, selection, }]; const needsSubquery = limit !== undefined || offset !== undefined || orderBy.length > 0; if (needsSubquery) { result = this.buildSelectQuery({ table: aliasedTable(table, tableAlias), fields: {}, fieldsFlat: [{ path: [], field: sql.raw('*'), }], where, limit, offset, orderBy, setOperators: [], }); where = undefined; limit = undefined; offset = undefined; orderBy = []; } else { result = aliasedTable(table, tableAlias); } result = this.buildSelectQuery({ table: is(result, PgTable) ? result : new Subquery(result, {}, tableAlias), fields: {}, fieldsFlat: nestedSelection.map(({ field }) => ({ path: [], field: is(field, Column) ? aliasedTableColumn(field, tableAlias) : field, })), joins, where, limit, offset, orderBy, setOperators: [], }); } else { result = this.buildSelectQuery({ table: aliasedTable(table, tableAlias), fields: {}, fieldsFlat: selection.map(({ field }) => ({ path: [], field: is(field, Column) ? aliasedTableColumn(field, tableAlias) : field, })), joins, where, limit, offset, orderBy, setOperators: [], }); } return { tableTsKey: tableConfig.tsName, sql: result, selection, }; } } ================================================ FILE: drizzle-orm/src/pg-core/expressions.ts ================================================ import type { PgColumn } from '~/pg-core/columns/index.ts'; import { bindIfParam } from '~/sql/expressions/index.ts'; import type { Placeholder, SQL, SQLChunk, SQLWrapper } from '~/sql/sql.ts'; import { sql } from '~/sql/sql.ts'; export * from '~/sql/expressions/index.ts'; export function concat(column: PgColumn | SQL.Aliased, value: string | Placeholder | SQLWrapper): SQL { return sql`${column} || ${bindIfParam(value, column)}`; } export function substring( column: PgColumn | SQL.Aliased, { from, for: _for }: { from?: number | Placeholder | SQLWrapper; for?: number | Placeholder | SQLWrapper }, ): SQL { const chunks: SQLChunk[] = [sql`substring(`, column]; if (from !== undefined) { chunks.push(sql` from `, bindIfParam(from, column)); } if (_for !== undefined) { chunks.push(sql` for `, bindIfParam(_for, column)); } chunks.push(sql`)`); return sql.join(chunks); } ================================================ FILE: drizzle-orm/src/pg-core/foreign-keys.ts ================================================ import { entityKind } from '~/entity.ts'; import { TableName } from '~/table.utils.ts'; import type { AnyPgColumn, PgColumn } from './columns/index.ts'; import type { PgTable } from './table.ts'; export type UpdateDeleteAction = 'cascade' | 'restrict' | 'no action' | 'set null' | 'set default'; export type Reference = () => { readonly name?: string; readonly columns: PgColumn[]; readonly foreignTable: PgTable; readonly foreignColumns: PgColumn[]; }; export class ForeignKeyBuilder { static readonly [entityKind]: string = 'PgForeignKeyBuilder'; /** @internal */ reference: Reference; /** @internal */ _onUpdate: UpdateDeleteAction | undefined = 'no action'; /** @internal */ _onDelete: UpdateDeleteAction | undefined = 'no action'; constructor( config: () => { name?: string; columns: PgColumn[]; foreignColumns: PgColumn[]; }, actions?: { onUpdate?: UpdateDeleteAction; onDelete?: UpdateDeleteAction; } | undefined, ) { this.reference = () => { const { name, columns, foreignColumns } = config(); return { name, columns, foreignTable: foreignColumns[0]!.table as PgTable, foreignColumns }; }; if (actions) { this._onUpdate = actions.onUpdate; this._onDelete = actions.onDelete; } } onUpdate(action: UpdateDeleteAction): this { this._onUpdate = action === undefined ? 'no action' : action; return this; } onDelete(action: UpdateDeleteAction): this { this._onDelete = action === undefined ? 'no action' : action; return this; } /** @internal */ build(table: PgTable): ForeignKey { return new ForeignKey(table, this); } } export type AnyForeignKeyBuilder = ForeignKeyBuilder; export class ForeignKey { static readonly [entityKind]: string = 'PgForeignKey'; readonly reference: Reference; readonly onUpdate: UpdateDeleteAction | undefined; readonly onDelete: UpdateDeleteAction | undefined; constructor(readonly table: PgTable, builder: ForeignKeyBuilder) { this.reference = builder.reference; this.onUpdate = builder._onUpdate; this.onDelete = builder._onDelete; } getName(): string { const { name, columns, foreignColumns } = this.reference(); const columnNames = columns.map((column) => column.name); const foreignColumnNames = foreignColumns.map((column) => column.name); const chunks = [ this.table[TableName], ...columnNames, foreignColumns[0]!.table[TableName], ...foreignColumnNames, ]; return name ?? `${chunks.join('_')}_fk`; } } type ColumnsWithTable< TTableName extends string, TColumns extends PgColumn[], > = { [Key in keyof TColumns]: AnyPgColumn<{ tableName: TTableName }> }; export function foreignKey< TTableName extends string, TForeignTableName extends string, TColumns extends [AnyPgColumn<{ tableName: TTableName }>, ...AnyPgColumn<{ tableName: TTableName }>[]], >( config: { name?: string; columns: TColumns; foreignColumns: ColumnsWithTable; }, ): ForeignKeyBuilder { function mappedConfig() { const { name, columns, foreignColumns } = config; return { name, columns, foreignColumns, }; } return new ForeignKeyBuilder(mappedConfig); } ================================================ FILE: drizzle-orm/src/pg-core/index.ts ================================================ export * from './alias.ts'; export * from './checks.ts'; export * from './columns/index.ts'; export * from './db.ts'; export * from './dialect.ts'; export * from './foreign-keys.ts'; export * from './indexes.ts'; export * from './policies.ts'; export * from './primary-keys.ts'; export * from './query-builders/index.ts'; export * from './roles.ts'; export * from './schema.ts'; export * from './sequence.ts'; export * from './session.ts'; export * from './subquery.ts'; export * from './table.ts'; export * from './unique-constraint.ts'; export * from './utils.ts'; export * from './utils/index.ts'; export * from './view-common.ts'; export * from './view.ts'; ================================================ FILE: drizzle-orm/src/pg-core/indexes.ts ================================================ import { SQL } from '~/sql/sql.ts'; import { entityKind, is } from '~/entity.ts'; import type { ExtraConfigColumn, PgColumn } from './columns/index.ts'; import { IndexedColumn } from './columns/index.ts'; import type { PgTable } from './table.ts'; interface IndexConfig { name?: string; columns: Partial[]; /** * If true, the index will be created as `create unique index` instead of `create index`. */ unique: boolean; /** * If true, the index will be created as `create index concurrently` instead of `create index`. */ concurrently?: boolean; /** * If true, the index will be created as `create index ... on only
` instead of `create index ... on
`. */ only: boolean; /** * Condition for partial index. */ where?: SQL; /** * The optional WITH clause specifies storage parameters for the index */ with?: Record; /** * The optional WITH clause method for the index */ method?: 'btree' | string; } export type IndexColumn = PgColumn; export type PgIndexMethod = 'btree' | 'hash' | 'gist' | 'spgist' | 'gin' | 'brin' | 'hnsw' | 'ivfflat' | (string & {}); export type PgIndexOpClass = | 'abstime_ops' | 'access_method' | 'anyarray_eq' | 'anyarray_ge' | 'anyarray_gt' | 'anyarray_le' | 'anyarray_lt' | 'anyarray_ne' | 'bigint_ops' | 'bit_ops' | 'bool_ops' | 'box_ops' | 'bpchar_ops' | 'char_ops' | 'cidr_ops' | 'cstring_ops' | 'date_ops' | 'float_ops' | 'int2_ops' | 'int4_ops' | 'int8_ops' | 'interval_ops' | 'jsonb_ops' | 'macaddr_ops' | 'name_ops' | 'numeric_ops' | 'oid_ops' | 'oidint4_ops' | 'oidint8_ops' | 'oidname_ops' | 'oidvector_ops' | 'point_ops' | 'polygon_ops' | 'range_ops' | 'record_eq' | 'record_ge' | 'record_gt' | 'record_le' | 'record_lt' | 'record_ne' | 'text_ops' | 'time_ops' | 'timestamp_ops' | 'timestamptz_ops' | 'timetz_ops' | 'uuid_ops' | 'varbit_ops' | 'varchar_ops' // pg_vector types | 'xml_ops' | 'vector_l2_ops' | 'vector_ip_ops' | 'vector_cosine_ops' | 'vector_l1_ops' | 'bit_hamming_ops' | 'bit_jaccard_ops' | 'halfvec_l2_ops' | 'sparsevec_l2_op' | (string & {}); export class IndexBuilderOn { static readonly [entityKind]: string = 'PgIndexBuilderOn'; constructor(private unique: boolean, private name?: string) {} on(...columns: [Partial | SQL, ...Partial[]]): IndexBuilder { return new IndexBuilder( columns.map((it) => { if (is(it, SQL)) { return it; } it = it as ExtraConfigColumn; const clonedIndexedColumn = new IndexedColumn(it.name, !!it.keyAsName, it.columnType!, it.indexConfig!); it.indexConfig = JSON.parse(JSON.stringify(it.defaultConfig)); return clonedIndexedColumn; }), this.unique, false, this.name, ); } onOnly(...columns: [Partial, ...Partial[]]): IndexBuilder { return new IndexBuilder( columns.map((it) => { if (is(it, SQL)) { return it; } it = it as ExtraConfigColumn; const clonedIndexedColumn = new IndexedColumn(it.name, !!it.keyAsName, it.columnType!, it.indexConfig!); it.indexConfig = it.defaultConfig; return clonedIndexedColumn; }), this.unique, true, this.name, ); } /** * Specify what index method to use. Choices are `btree`, `hash`, `gist`, `spgist`, `gin`, `brin`, or user-installed access methods like `bloom`. The default method is `btree. * * If you have the `pg_vector` extension installed in your database, you can use the `hnsw` and `ivfflat` options, which are predefined types. * * **You can always specify any string you want in the method, in case Drizzle doesn't have it natively in its types** * * @param method The name of the index method to be used * @param columns * @returns */ using( method: PgIndexMethod, ...columns: [Partial, ...Partial[]] ): IndexBuilder { return new IndexBuilder( columns.map((it) => { if (is(it, SQL)) { return it; } it = it as ExtraConfigColumn; const clonedIndexedColumn = new IndexedColumn(it.name, !!it.keyAsName, it.columnType!, it.indexConfig!); it.indexConfig = JSON.parse(JSON.stringify(it.defaultConfig)); return clonedIndexedColumn; }), this.unique, true, this.name, method, ); } } export interface AnyIndexBuilder { build(table: PgTable): Index; } // eslint-disable-next-line @typescript-eslint/no-empty-interface export interface IndexBuilder extends AnyIndexBuilder {} export class IndexBuilder implements AnyIndexBuilder { static readonly [entityKind]: string = 'PgIndexBuilder'; /** @internal */ config: IndexConfig; constructor( columns: Partial[], unique: boolean, only: boolean, name?: string, method: string = 'btree', ) { this.config = { name, columns, unique, only, method, }; } concurrently(): this { this.config.concurrently = true; return this; } with(obj: Record): this { this.config.with = obj; return this; } where(condition: SQL): this { this.config.where = condition; return this; } /** @internal */ build(table: PgTable): Index { return new Index(this.config, table); } } export class Index { static readonly [entityKind]: string = 'PgIndex'; readonly config: IndexConfig & { table: PgTable }; constructor(config: IndexConfig, table: PgTable) { this.config = { ...config, table }; } } export type GetColumnsTableName = TColumns extends PgColumn ? TColumns['_']['name'] : TColumns extends PgColumn[] ? TColumns[number]['_']['name'] : never; export function index(name?: string): IndexBuilderOn { return new IndexBuilderOn(false, name); } export function uniqueIndex(name?: string): IndexBuilderOn { return new IndexBuilderOn(true, name); } ================================================ FILE: drizzle-orm/src/pg-core/policies.ts ================================================ import { entityKind } from '~/entity.ts'; import type { SQL } from '~/sql/sql.ts'; import type { PgRole } from './roles.ts'; import type { PgTable } from './table.ts'; export type PgPolicyToOption = | 'public' | 'current_role' | 'current_user' | 'session_user' | (string & {}) | PgPolicyToOption[] | PgRole; export interface PgPolicyConfig { as?: 'permissive' | 'restrictive'; for?: 'all' | 'select' | 'insert' | 'update' | 'delete'; to?: PgPolicyToOption; using?: SQL; withCheck?: SQL; } export class PgPolicy implements PgPolicyConfig { static readonly [entityKind]: string = 'PgPolicy'; readonly as: PgPolicyConfig['as']; readonly for: PgPolicyConfig['for']; readonly to: PgPolicyConfig['to']; readonly using: PgPolicyConfig['using']; readonly withCheck: PgPolicyConfig['withCheck']; /** @internal */ _linkedTable?: PgTable; constructor( readonly name: string, config?: PgPolicyConfig, ) { if (config) { this.as = config.as; this.for = config.for; this.to = config.to; this.using = config.using; this.withCheck = config.withCheck; } } link(table: PgTable): this { this._linkedTable = table; return this; } } export function pgPolicy(name: string, config?: PgPolicyConfig) { return new PgPolicy(name, config); } ================================================ FILE: drizzle-orm/src/pg-core/primary-keys.ts ================================================ import { entityKind } from '~/entity.ts'; import type { AnyPgColumn, PgColumn } from './columns/index.ts'; import { PgTable } from './table.ts'; export function primaryKey< TTableName extends string, TColumn extends AnyPgColumn<{ tableName: TTableName }>, TColumns extends AnyPgColumn<{ tableName: TTableName }>[], >(config: { name?: string; columns: [TColumn, ...TColumns] }): PrimaryKeyBuilder; /** * @deprecated: Please use primaryKey({ columns: [] }) instead of this function * @param columns */ export function primaryKey< TTableName extends string, TColumns extends AnyPgColumn<{ tableName: TTableName }>[], >(...columns: TColumns): PrimaryKeyBuilder; export function primaryKey(...config: any) { if (config[0].columns) { return new PrimaryKeyBuilder(config[0].columns, config[0].name); } return new PrimaryKeyBuilder(config); } export class PrimaryKeyBuilder { static readonly [entityKind]: string = 'PgPrimaryKeyBuilder'; /** @internal */ columns: PgColumn[]; /** @internal */ name?: string; constructor( columns: PgColumn[], name?: string, ) { this.columns = columns; this.name = name; } /** @internal */ build(table: PgTable): PrimaryKey { return new PrimaryKey(table, this.columns, this.name); } } export class PrimaryKey { static readonly [entityKind]: string = 'PgPrimaryKey'; readonly columns: AnyPgColumn<{}>[]; readonly name?: string; constructor(readonly table: PgTable, columns: AnyPgColumn<{}>[], name?: string) { this.columns = columns; this.name = name; } getName(): string { return this.name ?? `${this.table[PgTable.Symbol.Name]}_${this.columns.map((column) => column.name).join('_')}_pk`; } } ================================================ FILE: drizzle-orm/src/pg-core/query-builders/count.ts ================================================ import { entityKind } from '~/entity.ts'; import { SQL, sql, type SQLWrapper } from '~/sql/sql.ts'; import type { NeonAuthToken } from '~/utils.ts'; import type { PgSession } from '../session.ts'; import type { PgTable } from '../table.ts'; export class PgCountBuilder< TSession extends PgSession, > extends SQL implements Promise, SQLWrapper { private sql: SQL; private token?: NeonAuthToken; static override readonly [entityKind] = 'PgCountBuilder'; [Symbol.toStringTag] = 'PgCountBuilder'; private session: TSession; private static buildEmbeddedCount( source: PgTable | SQL | SQLWrapper, filters?: SQL, ): SQL { return sql`(select count(*) from ${source}${sql.raw(' where ').if(filters)}${filters})`; } private static buildCount( source: PgTable | SQL | SQLWrapper, filters?: SQL, ): SQL { return sql`select count(*) as count from ${source}${sql.raw(' where ').if(filters)}${filters};`; } constructor( readonly params: { source: PgTable | SQL | SQLWrapper; filters?: SQL; session: TSession; }, ) { super(PgCountBuilder.buildEmbeddedCount(params.source, params.filters).queryChunks); this.mapWith(Number); this.session = params.session; this.sql = PgCountBuilder.buildCount( params.source, params.filters, ); } /** @intrnal */ setToken(token?: NeonAuthToken) { this.token = token; return this; } then( onfulfilled?: ((value: number) => TResult1 | PromiseLike) | null | undefined, onrejected?: ((reason: any) => TResult2 | PromiseLike) | null | undefined, ): Promise { return Promise.resolve(this.session.count(this.sql, this.token)) .then( onfulfilled, onrejected, ); } catch( onRejected?: ((reason: any) => any) | null | undefined, ): Promise { return this.then(undefined, onRejected); } finally(onFinally?: (() => void) | null | undefined): Promise { return this.then( (value) => { onFinally?.(); return value; }, (reason) => { onFinally?.(); throw reason; }, ); } } ================================================ FILE: drizzle-orm/src/pg-core/query-builders/delete.ts ================================================ import type { WithCacheConfig } from '~/cache/core/types.ts'; import { entityKind } from '~/entity.ts'; import type { PgDialect } from '~/pg-core/dialect.ts'; import type { PgPreparedQuery, PgQueryResultHKT, PgQueryResultKind, PgSession, PreparedQueryConfig, } from '~/pg-core/session.ts'; import type { PgTable } from '~/pg-core/table.ts'; import type { TypedQueryBuilder } from '~/query-builders/query-builder.ts'; import type { SelectResultFields } from '~/query-builders/select.types.ts'; import { QueryPromise } from '~/query-promise.ts'; import type { RunnableQuery } from '~/runnable-query.ts'; import { SelectionProxyHandler } from '~/selection-proxy.ts'; import type { ColumnsSelection, Query, SQL, SQLWrapper } from '~/sql/sql.ts'; import type { Subquery } from '~/subquery.ts'; import { getTableName, Table } from '~/table.ts'; import { tracer } from '~/tracing.ts'; import { type NeonAuthToken, orderSelectedFields } from '~/utils.ts'; import type { PgColumn } from '../columns/common.ts'; import { extractUsedTable } from '../utils.ts'; import type { SelectedFieldsFlat, SelectedFieldsOrdered } from './select.types.ts'; export type PgDeleteWithout< T extends AnyPgDeleteBase, TDynamic extends boolean, K extends keyof T & string, > = TDynamic extends true ? T : Omit< PgDeleteBase< T['_']['table'], T['_']['queryResult'], T['_']['selectedFields'], T['_']['returning'], TDynamic, T['_']['excludedMethods'] | K >, T['_']['excludedMethods'] | K >; export type PgDelete< TTable extends PgTable = PgTable, TQueryResult extends PgQueryResultHKT = PgQueryResultHKT, TSelectedFields extends ColumnsSelection | undefined = undefined, TReturning extends Record | undefined = Record | undefined, > = PgDeleteBase; export interface PgDeleteConfig { where?: SQL | undefined; table: PgTable; returningFields?: SelectedFieldsFlat; returning?: SelectedFieldsOrdered; withList?: Subquery[]; } export type PgDeleteReturningAll< T extends AnyPgDeleteBase, TDynamic extends boolean, > = PgDeleteWithout< PgDeleteBase< T['_']['table'], T['_']['queryResult'], T['_']['table']['_']['columns'], T['_']['table']['$inferSelect'], TDynamic, T['_']['excludedMethods'] >, TDynamic, 'returning' >; export type PgDeleteReturning< T extends AnyPgDeleteBase, TDynamic extends boolean, TSelectedFields extends SelectedFieldsFlat, > = PgDeleteWithout< PgDeleteBase< T['_']['table'], T['_']['queryResult'], TSelectedFields, SelectResultFields, TDynamic, T['_']['excludedMethods'] >, TDynamic, 'returning' >; export type PgDeletePrepare = PgPreparedQuery< PreparedQueryConfig & { execute: T['_']['returning'] extends undefined ? PgQueryResultKind : T['_']['returning'][]; } >; export type PgDeleteDynamic = PgDelete< T['_']['table'], T['_']['queryResult'], T['_']['selectedFields'], T['_']['returning'] >; export type AnyPgDeleteBase = PgDeleteBase; export interface PgDeleteBase< TTable extends PgTable, TQueryResult extends PgQueryResultHKT, TSelectedFields extends ColumnsSelection | undefined = undefined, TReturning extends Record | undefined = undefined, TDynamic extends boolean = false, TExcludedMethods extends string = never, > extends TypedQueryBuilder< TSelectedFields, TReturning extends undefined ? PgQueryResultKind : TReturning[] >, QueryPromise : TReturning[]>, RunnableQuery : TReturning[], 'pg'>, SQLWrapper { readonly _: { readonly dialect: 'pg'; readonly table: TTable; readonly queryResult: TQueryResult; readonly selectedFields: TSelectedFields; readonly returning: TReturning; readonly dynamic: TDynamic; readonly excludedMethods: TExcludedMethods; readonly result: TReturning extends undefined ? PgQueryResultKind : TReturning[]; }; } export class PgDeleteBase< TTable extends PgTable, TQueryResult extends PgQueryResultHKT, TSelectedFields extends ColumnsSelection | undefined = undefined, TReturning extends Record | undefined = undefined, TDynamic extends boolean = false, // eslint-disable-next-line @typescript-eslint/no-unused-vars TExcludedMethods extends string = never, > extends QueryPromise : TReturning[]> implements TypedQueryBuilder< TSelectedFields, TReturning extends undefined ? PgQueryResultKind : TReturning[] >, RunnableQuery : TReturning[], 'pg'>, SQLWrapper { static override readonly [entityKind]: string = 'PgDelete'; private config: PgDeleteConfig; protected cacheConfig?: WithCacheConfig; constructor( table: TTable, private session: PgSession, private dialect: PgDialect, withList?: Subquery[], ) { super(); this.config = { table, withList }; } /** * Adds a `where` clause to the query. * * Calling this method will delete only those rows that fulfill a specified condition. * * See docs: {@link https://orm.drizzle.team/docs/delete} * * @param where the `where` clause. * * @example * You can use conditional operators and `sql function` to filter the rows to be deleted. * * ```ts * // Delete all cars with green color * await db.delete(cars).where(eq(cars.color, 'green')); * // or * await db.delete(cars).where(sql`${cars.color} = 'green'`) * ``` * * You can logically combine conditional operators with `and()` and `or()` operators: * * ```ts * // Delete all BMW cars with a green color * await db.delete(cars).where(and(eq(cars.color, 'green'), eq(cars.brand, 'BMW'))); * * // Delete all cars with the green or blue color * await db.delete(cars).where(or(eq(cars.color, 'green'), eq(cars.color, 'blue'))); * ``` */ where(where: SQL | undefined): PgDeleteWithout { this.config.where = where; return this as any; } /** * Adds a `returning` clause to the query. * * Calling this method will return the specified fields of the deleted rows. If no fields are specified, all fields will be returned. * * See docs: {@link https://orm.drizzle.team/docs/delete#delete-with-return} * * @example * ```ts * // Delete all cars with the green color and return all fields * const deletedCars: Car[] = await db.delete(cars) * .where(eq(cars.color, 'green')) * .returning(); * * // Delete all cars with the green color and return only their id and brand fields * const deletedCarsIdsAndBrands: { id: number, brand: string }[] = await db.delete(cars) * .where(eq(cars.color, 'green')) * .returning({ id: cars.id, brand: cars.brand }); * ``` */ returning(): PgDeleteReturningAll; returning( fields: TSelectedFields, ): PgDeleteReturning; returning( fields: SelectedFieldsFlat = this.config.table[Table.Symbol.Columns], ): PgDeleteReturning { this.config.returningFields = fields; this.config.returning = orderSelectedFields(fields); return this as any; } /** @internal */ getSQL(): SQL { return this.dialect.buildDeleteQuery(this.config); } toSQL(): Query { const { typings: _typings, ...rest } = this.dialect.sqlToQuery(this.getSQL()); return rest; } /** @internal */ _prepare(name?: string): PgDeletePrepare { return tracer.startActiveSpan('drizzle.prepareQuery', () => { return this.session.prepareQuery< PreparedQueryConfig & { execute: TReturning extends undefined ? PgQueryResultKind : TReturning[]; } >(this.dialect.sqlToQuery(this.getSQL()), this.config.returning, name, true, undefined, { type: 'delete', tables: extractUsedTable(this.config.table), }, this.cacheConfig); }); } prepare(name: string): PgDeletePrepare { return this._prepare(name); } private authToken?: NeonAuthToken; /** @internal */ setToken(token?: NeonAuthToken) { this.authToken = token; return this; } override execute: ReturnType['execute'] = (placeholderValues) => { return tracer.startActiveSpan('drizzle.operation', () => { return this._prepare().execute(placeholderValues, this.authToken); }); }; /** @internal */ getSelectedFields(): this['_']['selectedFields'] { return ( this.config.returningFields ? new Proxy( this.config.returningFields, new SelectionProxyHandler({ alias: getTableName(this.config.table), sqlAliasedBehavior: 'alias', sqlBehavior: 'error', }), ) : undefined ) as this['_']['selectedFields']; } $dynamic(): PgDeleteDynamic { return this as any; } } ================================================ FILE: drizzle-orm/src/pg-core/query-builders/index.ts ================================================ export * from './delete.ts'; export * from './insert.ts'; export * from './query-builder.ts'; export * from './refresh-materialized-view.ts'; export * from './select.ts'; export * from './select.types.ts'; export * from './update.ts'; ================================================ FILE: drizzle-orm/src/pg-core/query-builders/insert.ts ================================================ import type { WithCacheConfig } from '~/cache/core/types.ts'; import { entityKind, is } from '~/entity.ts'; import type { PgDialect } from '~/pg-core/dialect.ts'; import type { IndexColumn } from '~/pg-core/indexes.ts'; import type { PgPreparedQuery, PgQueryResultHKT, PgQueryResultKind, PgSession, PreparedQueryConfig, } from '~/pg-core/session.ts'; import type { PgTable, TableConfig } from '~/pg-core/table.ts'; import type { TypedQueryBuilder } from '~/query-builders/query-builder.ts'; import type { SelectResultFields } from '~/query-builders/select.types.ts'; import { QueryPromise } from '~/query-promise.ts'; import type { RunnableQuery } from '~/runnable-query.ts'; import { SelectionProxyHandler } from '~/selection-proxy.ts'; import type { ColumnsSelection, Placeholder, Query, SQLWrapper } from '~/sql/sql.ts'; import { Param, SQL, sql } from '~/sql/sql.ts'; import type { Subquery } from '~/subquery.ts'; import type { InferInsertModel } from '~/table.ts'; import { Columns, getTableName, Table } from '~/table.ts'; import { tracer } from '~/tracing.ts'; import { haveSameKeys, mapUpdateSet, type NeonAuthToken, orderSelectedFields } from '~/utils.ts'; import type { AnyPgColumn, PgColumn } from '../columns/common.ts'; import { extractUsedTable } from '../utils.ts'; import { QueryBuilder } from './query-builder.ts'; import type { SelectedFieldsFlat, SelectedFieldsOrdered } from './select.types.ts'; import type { PgUpdateSetSource } from './update.ts'; export interface PgInsertConfig { table: TTable; values: Record[] | PgInsertSelectQueryBuilder | SQL; withList?: Subquery[]; onConflict?: SQL; returningFields?: SelectedFieldsFlat; returning?: SelectedFieldsOrdered; select?: boolean; overridingSystemValue_?: boolean; } export type PgInsertValue, OverrideT extends boolean = false> = & { [Key in keyof InferInsertModel]: | InferInsertModel[Key] | SQL | Placeholder; } & {}; export type PgInsertSelectQueryBuilder = TypedQueryBuilder< { [K in keyof TTable['$inferInsert']]: AnyPgColumn | SQL | SQL.Aliased | TTable['$inferInsert'][K] } >; export class PgInsertBuilder< TTable extends PgTable, TQueryResult extends PgQueryResultHKT, OverrideT extends boolean = false, > { static readonly [entityKind]: string = 'PgInsertBuilder'; constructor( private table: TTable, private session: PgSession, private dialect: PgDialect, private withList?: Subquery[], private overridingSystemValue_?: boolean, ) {} private authToken?: NeonAuthToken; /** @internal */ setToken(token?: NeonAuthToken) { this.authToken = token; return this; } overridingSystemValue(): Omit, 'overridingSystemValue'> { this.overridingSystemValue_ = true; return this as any; } values(value: PgInsertValue): PgInsertBase; values(values: PgInsertValue[]): PgInsertBase; values( values: PgInsertValue | PgInsertValue[], ): PgInsertBase { values = Array.isArray(values) ? values : [values]; if (values.length === 0) { throw new Error('values() must be called with at least one value'); } const mappedValues = values.map((entry) => { const result: Record = {}; const cols = this.table[Table.Symbol.Columns]; for (const colKey of Object.keys(entry)) { const colValue = entry[colKey as keyof typeof entry]; result[colKey] = is(colValue, SQL) ? colValue : new Param(colValue, cols[colKey]); } return result; }); return new PgInsertBase( this.table, mappedValues, this.session, this.dialect, this.withList, false, this.overridingSystemValue_, ).setToken(this.authToken) as any; } select(selectQuery: (qb: QueryBuilder) => PgInsertSelectQueryBuilder): PgInsertBase; select(selectQuery: (qb: QueryBuilder) => SQL): PgInsertBase; select(selectQuery: SQL): PgInsertBase; select(selectQuery: PgInsertSelectQueryBuilder): PgInsertBase; select( selectQuery: | SQL | PgInsertSelectQueryBuilder | ((qb: QueryBuilder) => PgInsertSelectQueryBuilder | SQL), ): PgInsertBase { const select = typeof selectQuery === 'function' ? selectQuery(new QueryBuilder()) : selectQuery; if ( !is(select, SQL) && !haveSameKeys(this.table[Columns], select._.selectedFields) ) { throw new Error( 'Insert select error: selected fields are not the same or are in a different order compared to the table definition', ); } return new PgInsertBase(this.table, select, this.session, this.dialect, this.withList, true); } } export type PgInsertWithout = TDynamic extends true ? T : Omit< PgInsertBase< T['_']['table'], T['_']['queryResult'], T['_']['selectedFields'], T['_']['returning'], TDynamic, T['_']['excludedMethods'] | K >, T['_']['excludedMethods'] | K >; export type PgInsertReturning< T extends AnyPgInsert, TDynamic extends boolean, TSelectedFields extends SelectedFieldsFlat, > = PgInsertBase< T['_']['table'], T['_']['queryResult'], TSelectedFields, SelectResultFields, TDynamic, T['_']['excludedMethods'] >; export type PgInsertReturningAll = PgInsertBase< T['_']['table'], T['_']['queryResult'], T['_']['table']['_']['columns'], T['_']['table']['$inferSelect'], TDynamic, T['_']['excludedMethods'] >; export interface PgInsertOnConflictDoUpdateConfig { target: IndexColumn | IndexColumn[]; /** @deprecated use either `targetWhere` or `setWhere` */ where?: SQL; // TODO: add tests for targetWhere and setWhere targetWhere?: SQL; setWhere?: SQL; set: PgUpdateSetSource; } export type PgInsertPrepare = PgPreparedQuery< PreparedQueryConfig & { execute: T['_']['returning'] extends undefined ? PgQueryResultKind : T['_']['returning'][]; } >; export type PgInsertDynamic = PgInsert< T['_']['table'], T['_']['queryResult'], T['_']['returning'] >; export type AnyPgInsert = PgInsertBase; export type PgInsert< TTable extends PgTable = PgTable, TQueryResult extends PgQueryResultHKT = PgQueryResultHKT, TSelectedFields extends ColumnsSelection | undefined = ColumnsSelection | undefined, TReturning extends Record | undefined = Record | undefined, > = PgInsertBase; export interface PgInsertBase< TTable extends PgTable, TQueryResult extends PgQueryResultHKT, TSelectedFields extends ColumnsSelection | undefined = undefined, TReturning extends Record | undefined = undefined, TDynamic extends boolean = false, TExcludedMethods extends string = never, > extends TypedQueryBuilder< TSelectedFields, TReturning extends undefined ? PgQueryResultKind : TReturning[] >, QueryPromise : TReturning[]>, RunnableQuery : TReturning[], 'pg'>, SQLWrapper { readonly _: { readonly dialect: 'pg'; readonly table: TTable; readonly queryResult: TQueryResult; readonly selectedFields: TSelectedFields; readonly returning: TReturning; readonly dynamic: TDynamic; readonly excludedMethods: TExcludedMethods; readonly result: TReturning extends undefined ? PgQueryResultKind : TReturning[]; }; } export class PgInsertBase< TTable extends PgTable, TQueryResult extends PgQueryResultHKT, TSelectedFields extends ColumnsSelection | undefined = undefined, TReturning extends Record | undefined = undefined, // eslint-disable-next-line @typescript-eslint/no-unused-vars TDynamic extends boolean = false, // eslint-disable-next-line @typescript-eslint/no-unused-vars TExcludedMethods extends string = never, > extends QueryPromise : TReturning[]> implements TypedQueryBuilder< TSelectedFields, TReturning extends undefined ? PgQueryResultKind : TReturning[] >, RunnableQuery : TReturning[], 'pg'>, SQLWrapper { static override readonly [entityKind]: string = 'PgInsert'; private config: PgInsertConfig; protected cacheConfig?: WithCacheConfig; constructor( table: TTable, values: PgInsertConfig['values'], private session: PgSession, private dialect: PgDialect, withList?: Subquery[], select?: boolean, overridingSystemValue_?: boolean, ) { super(); this.config = { table, values: values as any, withList, select, overridingSystemValue_ }; } /** * Adds a `returning` clause to the query. * * Calling this method will return the specified fields of the inserted rows. If no fields are specified, all fields will be returned. * * See docs: {@link https://orm.drizzle.team/docs/insert#insert-returning} * * @example * ```ts * // Insert one row and return all fields * const insertedCar: Car[] = await db.insert(cars) * .values({ brand: 'BMW' }) * .returning(); * * // Insert one row and return only the id * const insertedCarId: { id: number }[] = await db.insert(cars) * .values({ brand: 'BMW' }) * .returning({ id: cars.id }); * ``` */ returning(): PgInsertWithout, TDynamic, 'returning'>; returning( fields: TSelectedFields, ): PgInsertWithout, TDynamic, 'returning'>; returning( fields: SelectedFieldsFlat = this.config.table[Table.Symbol.Columns], ): PgInsertWithout { this.config.returningFields = fields; this.config.returning = orderSelectedFields(fields); return this as any; } /** * Adds an `on conflict do nothing` clause to the query. * * Calling this method simply avoids inserting a row as its alternative action. * * See docs: {@link https://orm.drizzle.team/docs/insert#on-conflict-do-nothing} * * @param config The `target` and `where` clauses. * * @example * ```ts * // Insert one row and cancel the insert if there's a conflict * await db.insert(cars) * .values({ id: 1, brand: 'BMW' }) * .onConflictDoNothing(); * * // Explicitly specify conflict target * await db.insert(cars) * .values({ id: 1, brand: 'BMW' }) * .onConflictDoNothing({ target: cars.id }); * ``` */ onConflictDoNothing( config: { target?: IndexColumn | IndexColumn[]; where?: SQL } = {}, ): PgInsertWithout { if (config.target === undefined) { this.config.onConflict = sql`do nothing`; } else { let targetColumn = ''; targetColumn = Array.isArray(config.target) ? config.target.map((it) => this.dialect.escapeName(this.dialect.casing.getColumnCasing(it))).join(',') : this.dialect.escapeName(this.dialect.casing.getColumnCasing(config.target)); const whereSql = config.where ? sql` where ${config.where}` : undefined; this.config.onConflict = sql`(${sql.raw(targetColumn)})${whereSql} do nothing`; } return this as any; } /** * Adds an `on conflict do update` clause to the query. * * Calling this method will update the existing row that conflicts with the row proposed for insertion as its alternative action. * * See docs: {@link https://orm.drizzle.team/docs/insert#upserts-and-conflicts} * * @param config The `target`, `set` and `where` clauses. * * @example * ```ts * // Update the row if there's a conflict * await db.insert(cars) * .values({ id: 1, brand: 'BMW' }) * .onConflictDoUpdate({ * target: cars.id, * set: { brand: 'Porsche' } * }); * * // Upsert with 'where' clause * await db.insert(cars) * .values({ id: 1, brand: 'BMW' }) * .onConflictDoUpdate({ * target: cars.id, * set: { brand: 'newBMW' }, * targetWhere: sql`${cars.createdAt} > '2023-01-01'::date`, * }); * ``` */ onConflictDoUpdate( config: PgInsertOnConflictDoUpdateConfig, ): PgInsertWithout { if (config.where && (config.targetWhere || config.setWhere)) { throw new Error( 'You cannot use both "where" and "targetWhere"/"setWhere" at the same time - "where" is deprecated, use "targetWhere" or "setWhere" instead.', ); } const whereSql = config.where ? sql` where ${config.where}` : undefined; const targetWhereSql = config.targetWhere ? sql` where ${config.targetWhere}` : undefined; const setWhereSql = config.setWhere ? sql` where ${config.setWhere}` : undefined; const setSql = this.dialect.buildUpdateSet(this.config.table, mapUpdateSet(this.config.table, config.set)); let targetColumn = ''; targetColumn = Array.isArray(config.target) ? config.target.map((it) => this.dialect.escapeName(this.dialect.casing.getColumnCasing(it))).join(',') : this.dialect.escapeName(this.dialect.casing.getColumnCasing(config.target)); this.config.onConflict = sql`(${ sql.raw(targetColumn) })${targetWhereSql} do update set ${setSql}${whereSql}${setWhereSql}`; return this as any; } /** @internal */ getSQL(): SQL { return this.dialect.buildInsertQuery(this.config); } toSQL(): Query { const { typings: _typings, ...rest } = this.dialect.sqlToQuery(this.getSQL()); return rest; } /** @internal */ _prepare(name?: string): PgInsertPrepare { return tracer.startActiveSpan('drizzle.prepareQuery', () => { return this.session.prepareQuery< PreparedQueryConfig & { execute: TReturning extends undefined ? PgQueryResultKind : TReturning[]; } >(this.dialect.sqlToQuery(this.getSQL()), this.config.returning, name, true, undefined, { type: 'insert', tables: extractUsedTable(this.config.table), }, this.cacheConfig); }); } prepare(name: string): PgInsertPrepare { return this._prepare(name); } private authToken?: NeonAuthToken; /** @internal */ setToken(token?: NeonAuthToken) { this.authToken = token; return this; } override execute: ReturnType['execute'] = (placeholderValues) => { return tracer.startActiveSpan('drizzle.operation', () => { return this._prepare().execute(placeholderValues, this.authToken); }); }; /** @internal */ getSelectedFields(): this['_']['selectedFields'] { return ( this.config.returningFields ? new Proxy( this.config.returningFields, new SelectionProxyHandler({ alias: getTableName(this.config.table), sqlAliasedBehavior: 'alias', sqlBehavior: 'error', }), ) : undefined ) as this['_']['selectedFields']; } $dynamic(): PgInsertDynamic { return this as any; } } ================================================ FILE: drizzle-orm/src/pg-core/query-builders/query-builder.ts ================================================ import { entityKind, is } from '~/entity.ts'; import type { PgDialectConfig } from '~/pg-core/dialect.ts'; import { PgDialect } from '~/pg-core/dialect.ts'; import type { TypedQueryBuilder } from '~/query-builders/query-builder.ts'; import { SelectionProxyHandler } from '~/selection-proxy.ts'; import type { ColumnsSelection, SQL, SQLWrapper } from '~/sql/sql.ts'; import { WithSubquery } from '~/subquery.ts'; import type { PgColumn } from '../columns/index.ts'; import type { WithBuilder } from '../subquery.ts'; import { PgSelectBuilder } from './select.ts'; import type { SelectedFields } from './select.types.ts'; export class QueryBuilder { static readonly [entityKind]: string = 'PgQueryBuilder'; private dialect: PgDialect | undefined; private dialectConfig: PgDialectConfig | undefined; constructor(dialect?: PgDialect | PgDialectConfig) { this.dialect = is(dialect, PgDialect) ? dialect : undefined; this.dialectConfig = is(dialect, PgDialect) ? undefined : dialect; } $with: WithBuilder = (alias: string, selection?: ColumnsSelection) => { const queryBuilder = this; const as = ( qb: | TypedQueryBuilder | SQL | ((qb: QueryBuilder) => TypedQueryBuilder | SQL), ) => { if (typeof qb === 'function') { qb = qb(queryBuilder); } return new Proxy( new WithSubquery( qb.getSQL(), selection ?? ('getSelectedFields' in qb ? qb.getSelectedFields() ?? {} : {}) as SelectedFields, alias, true, ), new SelectionProxyHandler({ alias, sqlAliasedBehavior: 'alias', sqlBehavior: 'error' }), ) as any; }; return { as }; }; with(...queries: WithSubquery[]) { const self = this; function select(): PgSelectBuilder; function select(fields: TSelection): PgSelectBuilder; function select( fields?: TSelection, ): PgSelectBuilder { return new PgSelectBuilder({ fields: fields ?? undefined, session: undefined, dialect: self.getDialect(), withList: queries, }); } function selectDistinct(): PgSelectBuilder; function selectDistinct(fields: TSelection): PgSelectBuilder; function selectDistinct( fields?: TSelection, ): PgSelectBuilder { return new PgSelectBuilder({ fields: fields ?? undefined, session: undefined, dialect: self.getDialect(), distinct: true, }); } function selectDistinctOn(on: (PgColumn | SQLWrapper)[]): PgSelectBuilder; function selectDistinctOn( on: (PgColumn | SQLWrapper)[], fields: TSelection, ): PgSelectBuilder; function selectDistinctOn( on: (PgColumn | SQLWrapper)[], fields?: TSelection, ): PgSelectBuilder { return new PgSelectBuilder({ fields: fields ?? undefined, session: undefined, dialect: self.getDialect(), distinct: { on }, }); } return { select, selectDistinct, selectDistinctOn }; } select(): PgSelectBuilder; select(fields: TSelection): PgSelectBuilder; select(fields?: TSelection): PgSelectBuilder { return new PgSelectBuilder({ fields: fields ?? undefined, session: undefined, dialect: this.getDialect(), }); } selectDistinct(): PgSelectBuilder; selectDistinct(fields: TSelection): PgSelectBuilder; selectDistinct(fields?: TSelection): PgSelectBuilder { return new PgSelectBuilder({ fields: fields ?? undefined, session: undefined, dialect: this.getDialect(), distinct: true, }); } selectDistinctOn(on: (PgColumn | SQLWrapper)[]): PgSelectBuilder; selectDistinctOn( on: (PgColumn | SQLWrapper)[], fields: TSelection, ): PgSelectBuilder; selectDistinctOn( on: (PgColumn | SQLWrapper)[], fields?: TSelection, ): PgSelectBuilder { return new PgSelectBuilder({ fields: fields ?? undefined, session: undefined, dialect: this.getDialect(), distinct: { on }, }); } // Lazy load dialect to avoid circular dependency private getDialect() { if (!this.dialect) { this.dialect = new PgDialect(this.dialectConfig); } return this.dialect; } } ================================================ FILE: drizzle-orm/src/pg-core/query-builders/query.ts ================================================ import { entityKind } from '~/entity.ts'; import { QueryPromise } from '~/query-promise.ts'; import { type BuildQueryResult, type BuildRelationalQueryResult, type DBQueryConfig, mapRelationalRow, type TableRelationalConfig, type TablesRelationalConfig, } from '~/relations.ts'; import type { RunnableQuery } from '~/runnable-query.ts'; import type { Query, QueryWithTypings, SQL, SQLWrapper } from '~/sql/sql.ts'; import { tracer } from '~/tracing.ts'; import type { KnownKeysOnly, NeonAuthToken } from '~/utils.ts'; import type { PgDialect } from '../dialect.ts'; import type { PgPreparedQuery, PgSession, PreparedQueryConfig } from '../session.ts'; import type { PgTable } from '../table.ts'; export class RelationalQueryBuilder { static readonly [entityKind]: string = 'PgRelationalQueryBuilder'; constructor( private fullSchema: Record, private schema: TSchema, private tableNamesMap: Record, private table: PgTable, private tableConfig: TableRelationalConfig, private dialect: PgDialect, private session: PgSession, ) {} findMany>( config?: KnownKeysOnly>, ): PgRelationalQuery[]> { return new PgRelationalQuery( this.fullSchema, this.schema, this.tableNamesMap, this.table, this.tableConfig, this.dialect, this.session, config ? (config as DBQueryConfig<'many', true>) : {}, 'many', ); } findFirst, 'limit'>>( config?: KnownKeysOnly, 'limit'>>, ): PgRelationalQuery | undefined> { return new PgRelationalQuery( this.fullSchema, this.schema, this.tableNamesMap, this.table, this.tableConfig, this.dialect, this.session, config ? { ...(config as DBQueryConfig<'many', true> | undefined), limit: 1 } : { limit: 1 }, 'first', ); } } export class PgRelationalQuery extends QueryPromise implements RunnableQuery, SQLWrapper { static override readonly [entityKind]: string = 'PgRelationalQuery'; declare readonly _: { readonly dialect: 'pg'; readonly result: TResult; }; constructor( private fullSchema: Record, private schema: TablesRelationalConfig, private tableNamesMap: Record, private table: PgTable, private tableConfig: TableRelationalConfig, private dialect: PgDialect, private session: PgSession, private config: DBQueryConfig<'many', true> | true, private mode: 'many' | 'first', ) { super(); } /** @internal */ _prepare(name?: string): PgPreparedQuery { return tracer.startActiveSpan('drizzle.prepareQuery', () => { const { query, builtQuery } = this._toSQL(); return this.session.prepareQuery( builtQuery, undefined, name, true, (rawRows, mapColumnValue) => { const rows = rawRows.map((row) => mapRelationalRow(this.schema, this.tableConfig, row, query.selection, mapColumnValue) ); if (this.mode === 'first') { return rows[0] as TResult; } return rows as TResult; }, ); }); } prepare(name: string): PgPreparedQuery { return this._prepare(name); } private _getQuery() { return this.dialect.buildRelationalQueryWithoutPK({ fullSchema: this.fullSchema, schema: this.schema, tableNamesMap: this.tableNamesMap, table: this.table, tableConfig: this.tableConfig, queryConfig: this.config, tableAlias: this.tableConfig.tsName, }); } /** @internal */ getSQL(): SQL { return this._getQuery().sql as SQL; } private _toSQL(): { query: BuildRelationalQueryResult; builtQuery: QueryWithTypings } { const query = this._getQuery(); const builtQuery = this.dialect.sqlToQuery(query.sql as SQL); return { query, builtQuery }; } toSQL(): Query { return this._toSQL().builtQuery; } private authToken?: NeonAuthToken; /** @internal */ setToken(token?: NeonAuthToken) { this.authToken = token; return this; } override execute(): Promise { return tracer.startActiveSpan('drizzle.operation', () => { return this._prepare().execute(undefined, this.authToken); }); } } ================================================ FILE: drizzle-orm/src/pg-core/query-builders/raw.ts ================================================ import { entityKind } from '~/entity.ts'; import { QueryPromise } from '~/query-promise.ts'; import type { RunnableQuery } from '~/runnable-query.ts'; import type { PreparedQuery } from '~/session.ts'; import type { Query, SQL, SQLWrapper } from '~/sql/sql.ts'; export interface PgRaw extends QueryPromise, RunnableQuery, SQLWrapper {} export class PgRaw extends QueryPromise implements RunnableQuery, SQLWrapper, PreparedQuery { static override readonly [entityKind]: string = 'PgRaw'; declare readonly _: { readonly dialect: 'pg'; readonly result: TResult; }; constructor( public execute: () => Promise, private sql: SQL, private query: Query, private mapBatchResult: (result: unknown) => unknown, ) { super(); } /** @internal */ getSQL() { return this.sql; } getQuery() { return this.query; } mapResult(result: unknown, isFromBatch?: boolean) { return isFromBatch ? this.mapBatchResult(result) : result; } _prepare(): PreparedQuery { return this; } /** @internal */ isResponseInArrayMode() { return false; } } ================================================ FILE: drizzle-orm/src/pg-core/query-builders/refresh-materialized-view.ts ================================================ import { entityKind } from '~/entity.ts'; import type { PgDialect } from '~/pg-core/dialect.ts'; import type { PgPreparedQuery, PgQueryResultHKT, PgQueryResultKind, PgSession, PreparedQueryConfig, } from '~/pg-core/session.ts'; import type { PgMaterializedView } from '~/pg-core/view.ts'; import { QueryPromise } from '~/query-promise.ts'; import type { RunnableQuery } from '~/runnable-query.ts'; import type { Query, SQL, SQLWrapper } from '~/sql/sql.ts'; import { tracer } from '~/tracing.ts'; import type { NeonAuthToken } from '~/utils'; // eslint-disable-next-line @typescript-eslint/no-empty-interface export interface PgRefreshMaterializedView extends QueryPromise>, RunnableQuery, 'pg'>, SQLWrapper { readonly _: { readonly dialect: 'pg'; readonly result: PgQueryResultKind; }; } export class PgRefreshMaterializedView extends QueryPromise> implements RunnableQuery, 'pg'>, SQLWrapper { static override readonly [entityKind]: string = 'PgRefreshMaterializedView'; private config: { view: PgMaterializedView; concurrently?: boolean; withNoData?: boolean; }; constructor( view: PgMaterializedView, private session: PgSession, private dialect: PgDialect, ) { super(); this.config = { view }; } concurrently(): this { if (this.config.withNoData !== undefined) { throw new Error('Cannot use concurrently and withNoData together'); } this.config.concurrently = true; return this; } withNoData(): this { if (this.config.concurrently !== undefined) { throw new Error('Cannot use concurrently and withNoData together'); } this.config.withNoData = true; return this; } /** @internal */ getSQL(): SQL { return this.dialect.buildRefreshMaterializedViewQuery(this.config); } toSQL(): Query { const { typings: _typings, ...rest } = this.dialect.sqlToQuery(this.getSQL()); return rest; } /** @internal */ _prepare(name?: string): PgPreparedQuery< PreparedQueryConfig & { execute: PgQueryResultKind; } > { return tracer.startActiveSpan('drizzle.prepareQuery', () => { return this.session.prepareQuery(this.dialect.sqlToQuery(this.getSQL()), undefined, name, true); }); } prepare(name: string): PgPreparedQuery< PreparedQueryConfig & { execute: PgQueryResultKind; } > { return this._prepare(name); } private authToken?: NeonAuthToken; /** @internal */ setToken(token: NeonAuthToken) { this.authToken = token; return this; } execute: ReturnType['execute'] = (placeholderValues) => { return tracer.startActiveSpan('drizzle.operation', () => { return this._prepare().execute(placeholderValues, this.authToken); }); }; } ================================================ FILE: drizzle-orm/src/pg-core/query-builders/select.ts ================================================ import type { CacheConfig, WithCacheConfig } from '~/cache/core/types.ts'; import { entityKind, is } from '~/entity.ts'; import type { PgColumn } from '~/pg-core/columns/index.ts'; import type { PgDialect } from '~/pg-core/dialect.ts'; import type { PgSession, PreparedQueryConfig } from '~/pg-core/session.ts'; import type { SubqueryWithSelection } from '~/pg-core/subquery.ts'; import type { PgTable } from '~/pg-core/table.ts'; import { PgViewBase } from '~/pg-core/view-base.ts'; import { TypedQueryBuilder } from '~/query-builders/query-builder.ts'; import type { BuildSubquerySelection, GetSelectTableName, GetSelectTableSelection, JoinNullability, JoinType, SelectMode, SelectResult, SetOperator, } from '~/query-builders/select.types.ts'; import { QueryPromise } from '~/query-promise.ts'; import type { RunnableQuery } from '~/runnable-query.ts'; import { SelectionProxyHandler } from '~/selection-proxy.ts'; import { SQL, View } from '~/sql/sql.ts'; import type { ColumnsSelection, Placeholder, Query, SQLWrapper } from '~/sql/sql.ts'; import { Subquery } from '~/subquery.ts'; import { Table } from '~/table.ts'; import { tracer } from '~/tracing.ts'; import { applyMixins, type DrizzleTypeError, getTableColumns, getTableLikeName, haveSameKeys, type NeonAuthToken, type ValueOrArray, } from '~/utils.ts'; import { orderSelectedFields } from '~/utils.ts'; import { ViewBaseConfig } from '~/view-common.ts'; import { extractUsedTable } from '../utils.ts'; import type { AnyPgSelect, CreatePgSelectFromBuilderMode, GetPgSetOperators, LockConfig, LockStrength, PgCreateSetOperatorFn, PgSelectConfig, PgSelectCrossJoinFn, PgSelectDynamic, PgSelectHKT, PgSelectHKTBase, PgSelectJoinFn, PgSelectPrepare, PgSelectWithout, PgSetOperatorExcludedMethods, PgSetOperatorWithResult, SelectedFields, SetOperatorRightSelect, TableLikeHasEmptySelection, } from './select.types.ts'; export class PgSelectBuilder< TSelection extends SelectedFields | undefined, TBuilderMode extends 'db' | 'qb' = 'db', > { static readonly [entityKind]: string = 'PgSelectBuilder'; private fields: TSelection; private session: PgSession | undefined; private dialect: PgDialect; private withList: Subquery[] = []; private distinct: boolean | { on: (PgColumn | SQLWrapper)[]; } | undefined; constructor( config: { fields: TSelection; session: PgSession | undefined; dialect: PgDialect; withList?: Subquery[]; distinct?: boolean | { on: (PgColumn | SQLWrapper)[]; }; }, ) { this.fields = config.fields; this.session = config.session; this.dialect = config.dialect; if (config.withList) { this.withList = config.withList; } this.distinct = config.distinct; } private authToken?: NeonAuthToken; /** @internal */ setToken(token?: NeonAuthToken) { this.authToken = token; return this; } /** * Specify the table, subquery, or other target that you're * building a select query against. * * {@link https://www.postgresql.org/docs/current/sql-select.html#SQL-FROM | Postgres from documentation} */ from( source: TableLikeHasEmptySelection extends true ? DrizzleTypeError< "Cannot reference a data-modifying statement subquery if it doesn't contain a `returning` clause" > : TFrom, ): CreatePgSelectFromBuilderMode< TBuilderMode, GetSelectTableName, TSelection extends undefined ? GetSelectTableSelection : TSelection, TSelection extends undefined ? 'single' : 'partial' > { const isPartialSelect = !!this.fields; const src = source as TFrom; let fields: SelectedFields; if (this.fields) { fields = this.fields; } else if (is(src, Subquery)) { // This is required to use the proxy handler to get the correct field values from the subquery fields = Object.fromEntries( Object.keys(src._.selectedFields).map(( key, ) => [key, src[key as unknown as keyof typeof src] as unknown as SelectedFields[string]]), ); } else if (is(src, PgViewBase)) { fields = src[ViewBaseConfig].selectedFields as SelectedFields; } else if (is(src, SQL)) { fields = {}; } else { fields = getTableColumns(src); } return (new PgSelectBase({ table: src, fields, isPartialSelect, session: this.session, dialect: this.dialect, withList: this.withList, distinct: this.distinct, }).setToken(this.authToken)) as any; } } export abstract class PgSelectQueryBuilderBase< THKT extends PgSelectHKTBase, TTableName extends string | undefined, TSelection extends ColumnsSelection, TSelectMode extends SelectMode, TNullabilityMap extends Record = TTableName extends string ? Record : {}, TDynamic extends boolean = false, TExcludedMethods extends string = never, TResult extends any[] = SelectResult[], TSelectedFields extends ColumnsSelection = BuildSubquerySelection, > extends TypedQueryBuilder { static override readonly [entityKind]: string = 'PgSelectQueryBuilder'; override readonly _: { readonly dialect: 'pg'; readonly hkt: THKT; readonly tableName: TTableName; readonly selection: TSelection; readonly selectMode: TSelectMode; readonly nullabilityMap: TNullabilityMap; readonly dynamic: TDynamic; readonly excludedMethods: TExcludedMethods; readonly result: TResult; readonly selectedFields: TSelectedFields; readonly config: PgSelectConfig; }; protected config: PgSelectConfig; protected joinsNotNullableMap: Record; protected tableName: string | undefined; private isPartialSelect: boolean; protected session: PgSession | undefined; protected dialect: PgDialect; protected cacheConfig?: WithCacheConfig = undefined; protected usedTables: Set = new Set(); constructor( { table, fields, isPartialSelect, session, dialect, withList, distinct }: { table: PgSelectConfig['table']; fields: PgSelectConfig['fields']; isPartialSelect: boolean; session: PgSession | undefined; dialect: PgDialect; withList: Subquery[]; distinct: boolean | { on: (PgColumn | SQLWrapper)[]; } | undefined; }, ) { super(); this.config = { withList, table, fields: { ...fields }, distinct, setOperators: [], }; this.isPartialSelect = isPartialSelect; this.session = session; this.dialect = dialect; this._ = { selectedFields: fields as TSelectedFields, config: this.config, } as this['_']; this.tableName = getTableLikeName(table); this.joinsNotNullableMap = typeof this.tableName === 'string' ? { [this.tableName]: true } : {}; for (const item of extractUsedTable(table)) this.usedTables.add(item); } /** @internal */ getUsedTables() { return [...this.usedTables]; } private createJoin< TJoinType extends JoinType, TIsLateral extends (TJoinType extends 'full' | 'right' ? false : boolean), >( joinType: TJoinType, lateral: TIsLateral, ): 'cross' extends TJoinType ? PgSelectCrossJoinFn : PgSelectJoinFn { return (( table: TIsLateral extends true ? Subquery | SQL : PgTable | Subquery | PgViewBase | SQL, on?: ((aliases: TSelection) => SQL | undefined) | SQL | undefined, ) => { const baseTableName = this.tableName; const tableName = getTableLikeName(table); // store all tables used in a query for (const item of extractUsedTable(table)) this.usedTables.add(item); if (typeof tableName === 'string' && this.config.joins?.some((join) => join.alias === tableName)) { throw new Error(`Alias "${tableName}" is already used in this query`); } if (!this.isPartialSelect) { // If this is the first join and this is not a partial select and we're not selecting from raw SQL, "move" the fields from the main table to the nested object if (Object.keys(this.joinsNotNullableMap).length === 1 && typeof baseTableName === 'string') { this.config.fields = { [baseTableName]: this.config.fields, }; } if (typeof tableName === 'string' && !is(table, SQL)) { const selection = is(table, Subquery) ? table._.selectedFields : is(table, View) ? table[ViewBaseConfig].selectedFields : table[Table.Symbol.Columns]; this.config.fields[tableName] = selection; } } if (typeof on === 'function') { on = on( new Proxy( this.config.fields, new SelectionProxyHandler({ sqlAliasedBehavior: 'sql', sqlBehavior: 'sql' }), ) as TSelection, ); } if (!this.config.joins) { this.config.joins = []; } this.config.joins.push({ on, table, joinType, alias: tableName, lateral }); if (typeof tableName === 'string') { switch (joinType) { case 'left': { this.joinsNotNullableMap[tableName] = false; break; } case 'right': { this.joinsNotNullableMap = Object.fromEntries( Object.entries(this.joinsNotNullableMap).map(([key]) => [key, false]), ); this.joinsNotNullableMap[tableName] = true; break; } case 'cross': case 'inner': { this.joinsNotNullableMap[tableName] = true; break; } case 'full': { this.joinsNotNullableMap = Object.fromEntries( Object.entries(this.joinsNotNullableMap).map(([key]) => [key, false]), ); this.joinsNotNullableMap[tableName] = false; break; } } } return this as any; }) as any; } /** * Executes a `left join` operation by adding another table to the current query. * * Calling this method associates each row of the table with the corresponding row from the joined table, if a match is found. If no matching row exists, it sets all columns of the joined table to null. * * See docs: {@link https://orm.drizzle.team/docs/joins#left-join} * * @param table the table to join. * @param on the `on` clause. * * @example * * ```ts * // Select all users and their pets * const usersWithPets: { user: User; pets: Pet | null; }[] = await db.select() * .from(users) * .leftJoin(pets, eq(users.id, pets.ownerId)) * * // Select userId and petId * const usersIdsAndPetIds: { userId: number; petId: number | null; }[] = await db.select({ * userId: users.id, * petId: pets.id, * }) * .from(users) * .leftJoin(pets, eq(users.id, pets.ownerId)) * ``` */ leftJoin = this.createJoin('left', false); /** * Executes a `left join lateral` operation by adding subquery to the current query. * * A `lateral` join allows the right-hand expression to refer to columns from the left-hand side. * * Calling this method associates each row of the table with the corresponding row from the joined table, if a match is found. If no matching row exists, it sets all columns of the joined table to null. * * See docs: {@link https://orm.drizzle.team/docs/joins#left-join-lateral} * * @param table the subquery to join. * @param on the `on` clause. */ leftJoinLateral = this.createJoin('left', true); /** * Executes a `right join` operation by adding another table to the current query. * * Calling this method associates each row of the joined table with the corresponding row from the main table, if a match is found. If no matching row exists, it sets all columns of the main table to null. * * See docs: {@link https://orm.drizzle.team/docs/joins#right-join} * * @param table the table to join. * @param on the `on` clause. * * @example * * ```ts * // Select all users and their pets * const usersWithPets: { user: User | null; pets: Pet; }[] = await db.select() * .from(users) * .rightJoin(pets, eq(users.id, pets.ownerId)) * * // Select userId and petId * const usersIdsAndPetIds: { userId: number | null; petId: number; }[] = await db.select({ * userId: users.id, * petId: pets.id, * }) * .from(users) * .rightJoin(pets, eq(users.id, pets.ownerId)) * ``` */ rightJoin = this.createJoin('right', false); /** * Executes an `inner join` operation, creating a new table by combining rows from two tables that have matching values. * * Calling this method retrieves rows that have corresponding entries in both joined tables. Rows without matching entries in either table are excluded, resulting in a table that includes only matching pairs. * * See docs: {@link https://orm.drizzle.team/docs/joins#inner-join} * * @param table the table to join. * @param on the `on` clause. * * @example * * ```ts * // Select all users and their pets * const usersWithPets: { user: User; pets: Pet; }[] = await db.select() * .from(users) * .innerJoin(pets, eq(users.id, pets.ownerId)) * * // Select userId and petId * const usersIdsAndPetIds: { userId: number; petId: number; }[] = await db.select({ * userId: users.id, * petId: pets.id, * }) * .from(users) * .innerJoin(pets, eq(users.id, pets.ownerId)) * ``` */ innerJoin = this.createJoin('inner', false); /** * Executes an `inner join lateral` operation, creating a new table by combining rows from two queries that have matching values. * * A `lateral` join allows the right-hand expression to refer to columns from the left-hand side. * * Calling this method retrieves rows that have corresponding entries in both joined tables. Rows without matching entries in either table are excluded, resulting in a table that includes only matching pairs. * * See docs: {@link https://orm.drizzle.team/docs/joins#inner-join-lateral} * * @param table the subquery to join. * @param on the `on` clause. */ innerJoinLateral = this.createJoin('inner', true); /** * Executes a `full join` operation by combining rows from two tables into a new table. * * Calling this method retrieves all rows from both main and joined tables, merging rows with matching values and filling in `null` for non-matching columns. * * See docs: {@link https://orm.drizzle.team/docs/joins#full-join} * * @param table the table to join. * @param on the `on` clause. * * @example * * ```ts * // Select all users and their pets * const usersWithPets: { user: User | null; pets: Pet | null; }[] = await db.select() * .from(users) * .fullJoin(pets, eq(users.id, pets.ownerId)) * * // Select userId and petId * const usersIdsAndPetIds: { userId: number | null; petId: number | null; }[] = await db.select({ * userId: users.id, * petId: pets.id, * }) * .from(users) * .fullJoin(pets, eq(users.id, pets.ownerId)) * ``` */ fullJoin = this.createJoin('full', false); /** * Executes a `cross join` operation by combining rows from two tables into a new table. * * Calling this method retrieves all rows from both main and joined tables, merging all rows from each table. * * See docs: {@link https://orm.drizzle.team/docs/joins#cross-join} * * @param table the table to join. * * @example * * ```ts * // Select all users, each user with every pet * const usersWithPets: { user: User; pets: Pet; }[] = await db.select() * .from(users) * .crossJoin(pets) * * // Select userId and petId * const usersIdsAndPetIds: { userId: number; petId: number; }[] = await db.select({ * userId: users.id, * petId: pets.id, * }) * .from(users) * .crossJoin(pets) * ``` */ crossJoin = this.createJoin('cross', false); /** * Executes a `cross join lateral` operation by combining rows from two queries into a new table. * * A `lateral` join allows the right-hand expression to refer to columns from the left-hand side. * * Calling this method retrieves all rows from both main and joined queries, merging all rows from each query. * * See docs: {@link https://orm.drizzle.team/docs/joins#cross-join-lateral} * * @param table the query to join. */ crossJoinLateral = this.createJoin('cross', true); private createSetOperator( type: SetOperator, isAll: boolean, ): >( rightSelection: | ((setOperators: GetPgSetOperators) => SetOperatorRightSelect) | SetOperatorRightSelect, ) => PgSelectWithout< this, TDynamic, PgSetOperatorExcludedMethods, true > { return (rightSelection) => { const rightSelect = (typeof rightSelection === 'function' ? rightSelection(getPgSetOperators()) : rightSelection) as TypedQueryBuilder< any, TResult >; if (!haveSameKeys(this.getSelectedFields(), rightSelect.getSelectedFields())) { throw new Error( 'Set operator error (union / intersect / except): selected fields are not the same or are in a different order', ); } this.config.setOperators.push({ type, isAll, rightSelect }); return this as any; }; } /** * Adds `union` set operator to the query. * * Calling this method will combine the result sets of the `select` statements and remove any duplicate rows that appear across them. * * See docs: {@link https://orm.drizzle.team/docs/set-operations#union} * * @example * * ```ts * // Select all unique names from customers and users tables * await db.select({ name: users.name }) * .from(users) * .union( * db.select({ name: customers.name }).from(customers) * ); * // or * import { union } from 'drizzle-orm/pg-core' * * await union( * db.select({ name: users.name }).from(users), * db.select({ name: customers.name }).from(customers) * ); * ``` */ union = this.createSetOperator('union', false); /** * Adds `union all` set operator to the query. * * Calling this method will combine the result-set of the `select` statements and keep all duplicate rows that appear across them. * * See docs: {@link https://orm.drizzle.team/docs/set-operations#union-all} * * @example * * ```ts * // Select all transaction ids from both online and in-store sales * await db.select({ transaction: onlineSales.transactionId }) * .from(onlineSales) * .unionAll( * db.select({ transaction: inStoreSales.transactionId }).from(inStoreSales) * ); * // or * import { unionAll } from 'drizzle-orm/pg-core' * * await unionAll( * db.select({ transaction: onlineSales.transactionId }).from(onlineSales), * db.select({ transaction: inStoreSales.transactionId }).from(inStoreSales) * ); * ``` */ unionAll = this.createSetOperator('union', true); /** * Adds `intersect` set operator to the query. * * Calling this method will retain only the rows that are present in both result sets and eliminate duplicates. * * See docs: {@link https://orm.drizzle.team/docs/set-operations#intersect} * * @example * * ```ts * // Select course names that are offered in both departments A and B * await db.select({ courseName: depA.courseName }) * .from(depA) * .intersect( * db.select({ courseName: depB.courseName }).from(depB) * ); * // or * import { intersect } from 'drizzle-orm/pg-core' * * await intersect( * db.select({ courseName: depA.courseName }).from(depA), * db.select({ courseName: depB.courseName }).from(depB) * ); * ``` */ intersect = this.createSetOperator('intersect', false); /** * Adds `intersect all` set operator to the query. * * Calling this method will retain only the rows that are present in both result sets including all duplicates. * * See docs: {@link https://orm.drizzle.team/docs/set-operations#intersect-all} * * @example * * ```ts * // Select all products and quantities that are ordered by both regular and VIP customers * await db.select({ * productId: regularCustomerOrders.productId, * quantityOrdered: regularCustomerOrders.quantityOrdered * }) * .from(regularCustomerOrders) * .intersectAll( * db.select({ * productId: vipCustomerOrders.productId, * quantityOrdered: vipCustomerOrders.quantityOrdered * }) * .from(vipCustomerOrders) * ); * // or * import { intersectAll } from 'drizzle-orm/pg-core' * * await intersectAll( * db.select({ * productId: regularCustomerOrders.productId, * quantityOrdered: regularCustomerOrders.quantityOrdered * }) * .from(regularCustomerOrders), * db.select({ * productId: vipCustomerOrders.productId, * quantityOrdered: vipCustomerOrders.quantityOrdered * }) * .from(vipCustomerOrders) * ); * ``` */ intersectAll = this.createSetOperator('intersect', true); /** * Adds `except` set operator to the query. * * Calling this method will retrieve all unique rows from the left query, except for the rows that are present in the result set of the right query. * * See docs: {@link https://orm.drizzle.team/docs/set-operations#except} * * @example * * ```ts * // Select all courses offered in department A but not in department B * await db.select({ courseName: depA.courseName }) * .from(depA) * .except( * db.select({ courseName: depB.courseName }).from(depB) * ); * // or * import { except } from 'drizzle-orm/pg-core' * * await except( * db.select({ courseName: depA.courseName }).from(depA), * db.select({ courseName: depB.courseName }).from(depB) * ); * ``` */ except = this.createSetOperator('except', false); /** * Adds `except all` set operator to the query. * * Calling this method will retrieve all rows from the left query, except for the rows that are present in the result set of the right query. * * See docs: {@link https://orm.drizzle.team/docs/set-operations#except-all} * * @example * * ```ts * // Select all products that are ordered by regular customers but not by VIP customers * await db.select({ * productId: regularCustomerOrders.productId, * quantityOrdered: regularCustomerOrders.quantityOrdered, * }) * .from(regularCustomerOrders) * .exceptAll( * db.select({ * productId: vipCustomerOrders.productId, * quantityOrdered: vipCustomerOrders.quantityOrdered, * }) * .from(vipCustomerOrders) * ); * // or * import { exceptAll } from 'drizzle-orm/pg-core' * * await exceptAll( * db.select({ * productId: regularCustomerOrders.productId, * quantityOrdered: regularCustomerOrders.quantityOrdered * }) * .from(regularCustomerOrders), * db.select({ * productId: vipCustomerOrders.productId, * quantityOrdered: vipCustomerOrders.quantityOrdered * }) * .from(vipCustomerOrders) * ); * ``` */ exceptAll = this.createSetOperator('except', true); /** @internal */ addSetOperators(setOperators: PgSelectConfig['setOperators']): PgSelectWithout< this, TDynamic, PgSetOperatorExcludedMethods, true > { this.config.setOperators.push(...setOperators); return this as any; } /** * Adds a `where` clause to the query. * * Calling this method will select only those rows that fulfill a specified condition. * * See docs: {@link https://orm.drizzle.team/docs/select#filtering} * * @param where the `where` clause. * * @example * You can use conditional operators and `sql function` to filter the rows to be selected. * * ```ts * // Select all cars with green color * await db.select().from(cars).where(eq(cars.color, 'green')); * // or * await db.select().from(cars).where(sql`${cars.color} = 'green'`) * ``` * * You can logically combine conditional operators with `and()` and `or()` operators: * * ```ts * // Select all BMW cars with a green color * await db.select().from(cars).where(and(eq(cars.color, 'green'), eq(cars.brand, 'BMW'))); * * // Select all cars with the green or blue color * await db.select().from(cars).where(or(eq(cars.color, 'green'), eq(cars.color, 'blue'))); * ``` */ where( where: ((aliases: this['_']['selection']) => SQL | undefined) | SQL | undefined, ): PgSelectWithout { if (typeof where === 'function') { where = where( new Proxy( this.config.fields, new SelectionProxyHandler({ sqlAliasedBehavior: 'sql', sqlBehavior: 'sql' }), ) as TSelection, ); } this.config.where = where; return this as any; } /** * Adds a `having` clause to the query. * * Calling this method will select only those rows that fulfill a specified condition. It is typically used with aggregate functions to filter the aggregated data based on a specified condition. * * See docs: {@link https://orm.drizzle.team/docs/select#aggregations} * * @param having the `having` clause. * * @example * * ```ts * // Select all brands with more than one car * await db.select({ * brand: cars.brand, * count: sql`cast(count(${cars.id}) as int)`, * }) * .from(cars) * .groupBy(cars.brand) * .having(({ count }) => gt(count, 1)); * ``` */ having( having: ((aliases: this['_']['selection']) => SQL | undefined) | SQL | undefined, ): PgSelectWithout { if (typeof having === 'function') { having = having( new Proxy( this.config.fields, new SelectionProxyHandler({ sqlAliasedBehavior: 'sql', sqlBehavior: 'sql' }), ) as TSelection, ); } this.config.having = having; return this as any; } /** * Adds a `group by` clause to the query. * * Calling this method will group rows that have the same values into summary rows, often used for aggregation purposes. * * See docs: {@link https://orm.drizzle.team/docs/select#aggregations} * * @example * * ```ts * // Group and count people by their last names * await db.select({ * lastName: people.lastName, * count: sql`cast(count(*) as int)` * }) * .from(people) * .groupBy(people.lastName); * ``` */ groupBy( builder: (aliases: this['_']['selection']) => ValueOrArray, ): PgSelectWithout; groupBy(...columns: (PgColumn | SQL | SQL.Aliased)[]): PgSelectWithout; groupBy( ...columns: | [(aliases: this['_']['selection']) => ValueOrArray] | (PgColumn | SQL | SQL.Aliased)[] ): PgSelectWithout { if (typeof columns[0] === 'function') { const groupBy = columns[0]( new Proxy( this.config.fields, new SelectionProxyHandler({ sqlAliasedBehavior: 'alias', sqlBehavior: 'sql' }), ) as TSelection, ); this.config.groupBy = Array.isArray(groupBy) ? groupBy : [groupBy]; } else { this.config.groupBy = columns as (PgColumn | SQL | SQL.Aliased)[]; } return this as any; } /** * Adds an `order by` clause to the query. * * Calling this method will sort the result-set in ascending or descending order. By default, the sort order is ascending. * * See docs: {@link https://orm.drizzle.team/docs/select#order-by} * * @example * * ``` * // Select cars ordered by year * await db.select().from(cars).orderBy(cars.year); * ``` * * You can specify whether results are in ascending or descending order with the `asc()` and `desc()` operators. * * ```ts * // Select cars ordered by year in descending order * await db.select().from(cars).orderBy(desc(cars.year)); * * // Select cars ordered by year and price * await db.select().from(cars).orderBy(asc(cars.year), desc(cars.price)); * ``` */ orderBy( builder: (aliases: this['_']['selection']) => ValueOrArray, ): PgSelectWithout; orderBy(...columns: (PgColumn | SQL | SQL.Aliased)[]): PgSelectWithout; orderBy( ...columns: | [(aliases: this['_']['selection']) => ValueOrArray] | (PgColumn | SQL | SQL.Aliased)[] ): PgSelectWithout { if (typeof columns[0] === 'function') { const orderBy = columns[0]( new Proxy( this.config.fields, new SelectionProxyHandler({ sqlAliasedBehavior: 'alias', sqlBehavior: 'sql' }), ) as TSelection, ); const orderByArray = Array.isArray(orderBy) ? orderBy : [orderBy]; if (this.config.setOperators.length > 0) { this.config.setOperators.at(-1)!.orderBy = orderByArray; } else { this.config.orderBy = orderByArray; } } else { const orderByArray = columns as (PgColumn | SQL | SQL.Aliased)[]; if (this.config.setOperators.length > 0) { this.config.setOperators.at(-1)!.orderBy = orderByArray; } else { this.config.orderBy = orderByArray; } } return this as any; } /** * Adds a `limit` clause to the query. * * Calling this method will set the maximum number of rows that will be returned by this query. * * See docs: {@link https://orm.drizzle.team/docs/select#limit--offset} * * @param limit the `limit` clause. * * @example * * ```ts * // Get the first 10 people from this query. * await db.select().from(people).limit(10); * ``` */ limit(limit: number | Placeholder): PgSelectWithout { if (this.config.setOperators.length > 0) { this.config.setOperators.at(-1)!.limit = limit; } else { this.config.limit = limit; } return this as any; } /** * Adds an `offset` clause to the query. * * Calling this method will skip a number of rows when returning results from this query. * * See docs: {@link https://orm.drizzle.team/docs/select#limit--offset} * * @param offset the `offset` clause. * * @example * * ```ts * // Get the 10th-20th people from this query. * await db.select().from(people).offset(10).limit(10); * ``` */ offset(offset: number | Placeholder): PgSelectWithout { if (this.config.setOperators.length > 0) { this.config.setOperators.at(-1)!.offset = offset; } else { this.config.offset = offset; } return this as any; } /** * Adds a `for` clause to the query. * * Calling this method will specify a lock strength for this query that controls how strictly it acquires exclusive access to the rows being queried. * * See docs: {@link https://www.postgresql.org/docs/current/sql-select.html#SQL-FOR-UPDATE-SHARE} * * @param strength the lock strength. * @param config the lock configuration. */ for(strength: LockStrength, config: LockConfig = {}): PgSelectWithout { this.config.lockingClause = { strength, config }; return this as any; } /** @internal */ getSQL(): SQL { return this.dialect.buildSelectQuery(this.config); } toSQL(): Query { const { typings: _typings, ...rest } = this.dialect.sqlToQuery(this.getSQL()); return rest; } as( alias: TAlias, ): SubqueryWithSelection { const usedTables: string[] = []; usedTables.push(...extractUsedTable(this.config.table)); if (this.config.joins) { for (const it of this.config.joins) usedTables.push(...extractUsedTable(it.table)); } return new Proxy( new Subquery(this.getSQL(), this.config.fields, alias, false, [...new Set(usedTables)]), new SelectionProxyHandler({ alias, sqlAliasedBehavior: 'alias', sqlBehavior: 'error' }), ) as SubqueryWithSelection; } /** @internal */ override getSelectedFields(): this['_']['selectedFields'] { return new Proxy( this.config.fields, new SelectionProxyHandler({ alias: this.tableName, sqlAliasedBehavior: 'alias', sqlBehavior: 'error' }), ) as this['_']['selectedFields']; } $dynamic(): PgSelectDynamic { return this; } $withCache(config?: { config?: CacheConfig; tag?: string; autoInvalidate?: boolean } | false) { this.cacheConfig = config === undefined ? { config: {}, enable: true, autoInvalidate: true } : config === false ? { enable: false } : { enable: true, autoInvalidate: true, ...config }; return this; } } export interface PgSelectBase< TTableName extends string | undefined, TSelection extends ColumnsSelection, TSelectMode extends SelectMode, TNullabilityMap extends Record = TTableName extends string ? Record : {}, TDynamic extends boolean = false, TExcludedMethods extends string = never, TResult extends any[] = SelectResult[], TSelectedFields extends ColumnsSelection = BuildSubquerySelection, > extends PgSelectQueryBuilderBase< PgSelectHKT, TTableName, TSelection, TSelectMode, TNullabilityMap, TDynamic, TExcludedMethods, TResult, TSelectedFields >, QueryPromise, SQLWrapper {} export class PgSelectBase< TTableName extends string | undefined, TSelection extends ColumnsSelection, TSelectMode extends SelectMode, TNullabilityMap extends Record = TTableName extends string ? Record : {}, TDynamic extends boolean = false, TExcludedMethods extends string = never, TResult = SelectResult[], TSelectedFields = BuildSubquerySelection, > extends PgSelectQueryBuilderBase< PgSelectHKT, TTableName, TSelection, TSelectMode, TNullabilityMap, TDynamic, TExcludedMethods, TResult, TSelectedFields > implements RunnableQuery, SQLWrapper { static override readonly [entityKind]: string = 'PgSelect'; /** @internal */ _prepare(name?: string): PgSelectPrepare { const { session, config, dialect, joinsNotNullableMap, authToken, cacheConfig, usedTables } = this; if (!session) { throw new Error('Cannot execute a query on a query builder. Please use a database instance instead.'); } const { fields } = config; return tracer.startActiveSpan('drizzle.prepareQuery', () => { const fieldsList = orderSelectedFields(fields); const query = session.prepareQuery< PreparedQueryConfig & { execute: TResult } >(dialect.sqlToQuery(this.getSQL()), fieldsList, name, true, undefined, { type: 'select', tables: [...usedTables], }, cacheConfig); query.joinsNotNullableMap = joinsNotNullableMap; return query.setToken(authToken); }); } /** * Create a prepared statement for this query. This allows * the database to remember this query for the given session * and call it by name, rather than specifying the full query. * * {@link https://www.postgresql.org/docs/current/sql-prepare.html | Postgres prepare documentation} */ prepare(name: string): PgSelectPrepare { return this._prepare(name); } private authToken?: NeonAuthToken; /** @internal */ setToken(token?: NeonAuthToken) { this.authToken = token; return this; } execute: ReturnType['execute'] = (placeholderValues) => { return tracer.startActiveSpan('drizzle.operation', () => { return this._prepare().execute(placeholderValues, this.authToken); }); }; } applyMixins(PgSelectBase, [QueryPromise]); function createSetOperator(type: SetOperator, isAll: boolean): PgCreateSetOperatorFn { return (leftSelect, rightSelect, ...restSelects) => { const setOperators = [rightSelect, ...restSelects].map((select) => ({ type, isAll, rightSelect: select as AnyPgSelect, })); for (const setOperator of setOperators) { if (!haveSameKeys((leftSelect as any).getSelectedFields(), setOperator.rightSelect.getSelectedFields())) { throw new Error( 'Set operator error (union / intersect / except): selected fields are not the same or are in a different order', ); } } return (leftSelect as AnyPgSelect).addSetOperators(setOperators) as any; }; } const getPgSetOperators = () => ({ union, unionAll, intersect, intersectAll, except, exceptAll, }); /** * Adds `union` set operator to the query. * * Calling this method will combine the result sets of the `select` statements and remove any duplicate rows that appear across them. * * See docs: {@link https://orm.drizzle.team/docs/set-operations#union} * * @example * * ```ts * // Select all unique names from customers and users tables * import { union } from 'drizzle-orm/pg-core' * * await union( * db.select({ name: users.name }).from(users), * db.select({ name: customers.name }).from(customers) * ); * // or * await db.select({ name: users.name }) * .from(users) * .union( * db.select({ name: customers.name }).from(customers) * ); * ``` */ export const union = createSetOperator('union', false); /** * Adds `union all` set operator to the query. * * Calling this method will combine the result-set of the `select` statements and keep all duplicate rows that appear across them. * * See docs: {@link https://orm.drizzle.team/docs/set-operations#union-all} * * @example * * ```ts * // Select all transaction ids from both online and in-store sales * import { unionAll } from 'drizzle-orm/pg-core' * * await unionAll( * db.select({ transaction: onlineSales.transactionId }).from(onlineSales), * db.select({ transaction: inStoreSales.transactionId }).from(inStoreSales) * ); * // or * await db.select({ transaction: onlineSales.transactionId }) * .from(onlineSales) * .unionAll( * db.select({ transaction: inStoreSales.transactionId }).from(inStoreSales) * ); * ``` */ export const unionAll = createSetOperator('union', true); /** * Adds `intersect` set operator to the query. * * Calling this method will retain only the rows that are present in both result sets and eliminate duplicates. * * See docs: {@link https://orm.drizzle.team/docs/set-operations#intersect} * * @example * * ```ts * // Select course names that are offered in both departments A and B * import { intersect } from 'drizzle-orm/pg-core' * * await intersect( * db.select({ courseName: depA.courseName }).from(depA), * db.select({ courseName: depB.courseName }).from(depB) * ); * // or * await db.select({ courseName: depA.courseName }) * .from(depA) * .intersect( * db.select({ courseName: depB.courseName }).from(depB) * ); * ``` */ export const intersect = createSetOperator('intersect', false); /** * Adds `intersect all` set operator to the query. * * Calling this method will retain only the rows that are present in both result sets including all duplicates. * * See docs: {@link https://orm.drizzle.team/docs/set-operations#intersect-all} * * @example * * ```ts * // Select all products and quantities that are ordered by both regular and VIP customers * import { intersectAll } from 'drizzle-orm/pg-core' * * await intersectAll( * db.select({ * productId: regularCustomerOrders.productId, * quantityOrdered: regularCustomerOrders.quantityOrdered * }) * .from(regularCustomerOrders), * db.select({ * productId: vipCustomerOrders.productId, * quantityOrdered: vipCustomerOrders.quantityOrdered * }) * .from(vipCustomerOrders) * ); * // or * await db.select({ * productId: regularCustomerOrders.productId, * quantityOrdered: regularCustomerOrders.quantityOrdered * }) * .from(regularCustomerOrders) * .intersectAll( * db.select({ * productId: vipCustomerOrders.productId, * quantityOrdered: vipCustomerOrders.quantityOrdered * }) * .from(vipCustomerOrders) * ); * ``` */ export const intersectAll = createSetOperator('intersect', true); /** * Adds `except` set operator to the query. * * Calling this method will retrieve all unique rows from the left query, except for the rows that are present in the result set of the right query. * * See docs: {@link https://orm.drizzle.team/docs/set-operations#except} * * @example * * ```ts * // Select all courses offered in department A but not in department B * import { except } from 'drizzle-orm/pg-core' * * await except( * db.select({ courseName: depA.courseName }).from(depA), * db.select({ courseName: depB.courseName }).from(depB) * ); * // or * await db.select({ courseName: depA.courseName }) * .from(depA) * .except( * db.select({ courseName: depB.courseName }).from(depB) * ); * ``` */ export const except = createSetOperator('except', false); /** * Adds `except all` set operator to the query. * * Calling this method will retrieve all rows from the left query, except for the rows that are present in the result set of the right query. * * See docs: {@link https://orm.drizzle.team/docs/set-operations#except-all} * * @example * * ```ts * // Select all products that are ordered by regular customers but not by VIP customers * import { exceptAll } from 'drizzle-orm/pg-core' * * await exceptAll( * db.select({ * productId: regularCustomerOrders.productId, * quantityOrdered: regularCustomerOrders.quantityOrdered * }) * .from(regularCustomerOrders), * db.select({ * productId: vipCustomerOrders.productId, * quantityOrdered: vipCustomerOrders.quantityOrdered * }) * .from(vipCustomerOrders) * ); * // or * await db.select({ * productId: regularCustomerOrders.productId, * quantityOrdered: regularCustomerOrders.quantityOrdered, * }) * .from(regularCustomerOrders) * .exceptAll( * db.select({ * productId: vipCustomerOrders.productId, * quantityOrdered: vipCustomerOrders.quantityOrdered, * }) * .from(vipCustomerOrders) * ); * ``` */ export const exceptAll = createSetOperator('except', true); ================================================ FILE: drizzle-orm/src/pg-core/query-builders/select.types.ts ================================================ import type { SelectedFields as SelectedFieldsBase, SelectedFieldsFlat as SelectedFieldsFlatBase, SelectedFieldsOrdered as SelectedFieldsOrderedBase, } from '~/operations.ts'; import type { PgColumn } from '~/pg-core/columns/index.ts'; import type { PgTable, PgTableWithColumns } from '~/pg-core/table.ts'; import type { PgViewBase } from '~/pg-core/view-base.ts'; import type { PgViewWithSelection } from '~/pg-core/view.ts'; import type { TypedQueryBuilder } from '~/query-builders/query-builder.ts'; import type { AppendToNullabilityMap, AppendToResult, BuildSubquerySelection, GetSelectTableName, JoinNullability, JoinType, MapColumnsToTableAlias, SelectMode, SelectResult, SetOperator, } from '~/query-builders/select.types.ts'; import type { ColumnsSelection, Placeholder, SQL, SQLWrapper, View } from '~/sql/sql.ts'; import type { Subquery } from '~/subquery.ts'; import type { Table, UpdateTableConfig } from '~/table.ts'; import type { Assume, DrizzleTypeError, Equal, ValidateShape, ValueOrArray } from '~/utils.ts'; import type { PgPreparedQuery, PreparedQueryConfig } from '../session.ts'; import type { PgSelectBase, PgSelectQueryBuilderBase } from './select.ts'; export interface PgSelectJoinConfig { on: SQL | undefined; table: PgTable | Subquery | PgViewBase | SQL; alias: string | undefined; joinType: JoinType; lateral?: boolean; } export type BuildAliasTable = TTable extends Table ? PgTableWithColumns< UpdateTableConfig; }> > : TTable extends View ? PgViewWithSelection< TAlias, TTable['_']['existing'], MapColumnsToTableAlias > : never; export interface PgSelectConfig { withList?: Subquery[]; // Either fields or fieldsFlat must be defined fields: Record; fieldsFlat?: SelectedFieldsOrdered; where?: SQL; having?: SQL; table: PgTable | Subquery | PgViewBase | SQL; limit?: number | Placeholder; offset?: number | Placeholder; joins?: PgSelectJoinConfig[]; orderBy?: (PgColumn | SQL | SQL.Aliased)[]; groupBy?: (PgColumn | SQL | SQL.Aliased)[]; lockingClause?: { strength: LockStrength; config: LockConfig; }; distinct?: boolean | { on: (PgColumn | SQLWrapper)[]; }; setOperators: { rightSelect: TypedQueryBuilder; type: SetOperator; isAll: boolean; orderBy?: (PgColumn | SQL | SQL.Aliased)[]; limit?: number | Placeholder; offset?: number | Placeholder; }[]; } export type TableLikeHasEmptySelection = T extends Subquery ? Equal extends true ? true : false : false; export type PgSelectJoin< T extends AnyPgSelectQueryBuilder, TDynamic extends boolean, TJoinType extends JoinType, TJoinedTable extends PgTable | Subquery | PgViewBase | SQL, TJoinedName extends GetSelectTableName = GetSelectTableName, > = T extends any ? PgSelectWithout< PgSelectKind< T['_']['hkt'], T['_']['tableName'], AppendToResult< T['_']['tableName'], T['_']['selection'], TJoinedName, TJoinedTable extends Table ? TJoinedTable['_']['columns'] : TJoinedTable extends Subquery | View ? Assume : never, T['_']['selectMode'] >, T['_']['selectMode'] extends 'partial' ? T['_']['selectMode'] : 'multiple', AppendToNullabilityMap, T['_']['dynamic'], T['_']['excludedMethods'] >, TDynamic, T['_']['excludedMethods'] > : never; export type PgSelectJoinFn< T extends AnyPgSelectQueryBuilder, TDynamic extends boolean, TJoinType extends JoinType, TIsLateral extends boolean, > = < TJoinedTable extends (TIsLateral extends true ? Subquery | SQL : PgTable | Subquery | PgViewBase | SQL), TJoinedName extends GetSelectTableName = GetSelectTableName, >( table: TableLikeHasEmptySelection extends true ? DrizzleTypeError< "Cannot reference a data-modifying statement subquery if it doesn't contain a `returning` clause" > : TJoinedTable, on: ((aliases: T['_']['selection']) => SQL | undefined) | SQL | undefined, ) => PgSelectJoin; export type PgSelectCrossJoinFn< T extends AnyPgSelectQueryBuilder, TDynamic extends boolean, TIsLateral extends boolean, > = < TJoinedTable extends (TIsLateral extends true ? Subquery | SQL : PgTable | Subquery | PgViewBase | SQL), TJoinedName extends GetSelectTableName = GetSelectTableName, >( table: TableLikeHasEmptySelection extends true ? DrizzleTypeError< "Cannot reference a data-modifying statement subquery if it doesn't contain a `returning` clause" > : TJoinedTable, ) => PgSelectJoin; export type SelectedFieldsFlat = SelectedFieldsFlatBase; export type SelectedFields = SelectedFieldsBase; export type SelectedFieldsOrdered = SelectedFieldsOrderedBase; export type LockStrength = 'update' | 'no key update' | 'share' | 'key share'; export type LockConfig = & { of?: ValueOrArray; } & ({ noWait: true; skipLocked?: undefined; } | { noWait?: undefined; skipLocked: true; } | { noWait?: undefined; skipLocked?: undefined; }); export interface PgSelectHKTBase { tableName: string | undefined; selection: unknown; selectMode: SelectMode; nullabilityMap: unknown; dynamic: boolean; excludedMethods: string; result: unknown; selectedFields: unknown; _type: unknown; } export type PgSelectKind< T extends PgSelectHKTBase, TTableName extends string | undefined, TSelection extends ColumnsSelection, TSelectMode extends SelectMode, TNullabilityMap extends Record, TDynamic extends boolean, TExcludedMethods extends string, TResult = SelectResult[], TSelectedFields = BuildSubquerySelection, > = (T & { tableName: TTableName; selection: TSelection; selectMode: TSelectMode; nullabilityMap: TNullabilityMap; dynamic: TDynamic; excludedMethods: TExcludedMethods; result: TResult; selectedFields: TSelectedFields; })['_type']; export interface PgSelectQueryBuilderHKT extends PgSelectHKTBase { _type: PgSelectQueryBuilderBase< PgSelectQueryBuilderHKT, this['tableName'], Assume, this['selectMode'], Assume>, this['dynamic'], this['excludedMethods'], Assume, Assume >; } export interface PgSelectHKT extends PgSelectHKTBase { _type: PgSelectBase< this['tableName'], Assume, this['selectMode'], Assume>, this['dynamic'], this['excludedMethods'], Assume, Assume >; } export type CreatePgSelectFromBuilderMode< TBuilderMode extends 'db' | 'qb', TTableName extends string | undefined, TSelection extends ColumnsSelection, TSelectMode extends SelectMode, > = TBuilderMode extends 'db' ? PgSelectBase : PgSelectQueryBuilderBase; export type PgSetOperatorExcludedMethods = | 'leftJoin' | 'rightJoin' | 'innerJoin' | 'fullJoin' | 'where' | 'having' | 'groupBy' | 'for'; export type PgSelectWithout< T extends AnyPgSelectQueryBuilder, TDynamic extends boolean, K extends keyof T & string, TResetExcluded extends boolean = false, > = TDynamic extends true ? T : Omit< PgSelectKind< T['_']['hkt'], T['_']['tableName'], T['_']['selection'], T['_']['selectMode'], T['_']['nullabilityMap'], TDynamic, TResetExcluded extends true ? K : T['_']['excludedMethods'] | K, T['_']['result'], T['_']['selectedFields'] >, TResetExcluded extends true ? K : T['_']['excludedMethods'] | K >; export type PgSelectPrepare = PgPreparedQuery< PreparedQueryConfig & { execute: T['_']['result']; } >; export type PgSelectDynamic = PgSelectKind< T['_']['hkt'], T['_']['tableName'], T['_']['selection'], T['_']['selectMode'], T['_']['nullabilityMap'], true, never, T['_']['result'], T['_']['selectedFields'] >; export type PgSelectQueryBuilder< THKT extends PgSelectHKTBase = PgSelectQueryBuilderHKT, TTableName extends string | undefined = string | undefined, TSelection extends ColumnsSelection = ColumnsSelection, TSelectMode extends SelectMode = SelectMode, TNullabilityMap extends Record = Record, TResult extends any[] = unknown[], TSelectedFields extends ColumnsSelection = ColumnsSelection, > = PgSelectQueryBuilderBase< THKT, TTableName, TSelection, TSelectMode, TNullabilityMap, true, never, TResult, TSelectedFields >; export type AnyPgSelectQueryBuilder = PgSelectQueryBuilderBase; export type AnyPgSetOperatorInterface = PgSetOperatorInterface; export interface PgSetOperatorInterface< TTableName extends string | undefined, TSelection extends ColumnsSelection, TSelectMode extends SelectMode, TNullabilityMap extends Record = TTableName extends string ? Record : {}, TDynamic extends boolean = false, TExcludedMethods extends string = never, TResult extends any[] = SelectResult[], TSelectedFields extends ColumnsSelection = BuildSubquerySelection, > { _: { readonly hkt: PgSelectHKT; readonly tableName: TTableName; readonly selection: TSelection; readonly selectMode: TSelectMode; readonly nullabilityMap: TNullabilityMap; readonly dynamic: TDynamic; readonly excludedMethods: TExcludedMethods; readonly result: TResult; readonly selectedFields: TSelectedFields; }; } export type PgSetOperatorWithResult = PgSetOperatorInterface< any, any, any, any, any, any, TResult, any >; export type PgSelect< TTableName extends string | undefined = string | undefined, TSelection extends ColumnsSelection = Record, TSelectMode extends SelectMode = SelectMode, TNullabilityMap extends Record = Record, > = PgSelectBase; export type AnyPgSelect = PgSelectBase; export type PgSetOperator< TTableName extends string | undefined = string | undefined, TSelection extends ColumnsSelection = Record, TSelectMode extends SelectMode = SelectMode, TNullabilityMap extends Record = Record, > = PgSelectBase< TTableName, TSelection, TSelectMode, TNullabilityMap, true, PgSetOperatorExcludedMethods >; export type SetOperatorRightSelect< TValue extends PgSetOperatorWithResult, TResult extends any[], > = TValue extends PgSetOperatorInterface ? ValidateShape< TValueResult[number], TResult[number], TypedQueryBuilder > : TValue; export type SetOperatorRestSelect< TValue extends readonly PgSetOperatorWithResult[], TResult extends any[], > = TValue extends [infer First, ...infer Rest] ? First extends PgSetOperatorInterface ? Rest extends AnyPgSetOperatorInterface[] ? [ ValidateShape>, ...SetOperatorRestSelect, ] : ValidateShape[]> : never : TValue; export type PgCreateSetOperatorFn = < TTableName extends string | undefined, TSelection extends ColumnsSelection, TSelectMode extends SelectMode, TValue extends PgSetOperatorWithResult, TRest extends PgSetOperatorWithResult[], TNullabilityMap extends Record = TTableName extends string ? Record : {}, TDynamic extends boolean = false, TExcludedMethods extends string = never, TResult extends any[] = SelectResult[], TSelectedFields extends ColumnsSelection = BuildSubquerySelection, >( leftSelect: PgSetOperatorInterface< TTableName, TSelection, TSelectMode, TNullabilityMap, TDynamic, TExcludedMethods, TResult, TSelectedFields >, rightSelect: SetOperatorRightSelect, ...restSelects: SetOperatorRestSelect ) => PgSelectWithout< PgSelectBase< TTableName, TSelection, TSelectMode, TNullabilityMap, TDynamic, TExcludedMethods, TResult, TSelectedFields >, false, PgSetOperatorExcludedMethods, true >; export type GetPgSetOperators = { union: PgCreateSetOperatorFn; intersect: PgCreateSetOperatorFn; except: PgCreateSetOperatorFn; unionAll: PgCreateSetOperatorFn; intersectAll: PgCreateSetOperatorFn; exceptAll: PgCreateSetOperatorFn; }; ================================================ FILE: drizzle-orm/src/pg-core/query-builders/update.ts ================================================ import type { WithCacheConfig } from '~/cache/core/types.ts'; import type { GetColumnData } from '~/column.ts'; import { entityKind, is } from '~/entity.ts'; import type { PgDialect } from '~/pg-core/dialect.ts'; import type { PgPreparedQuery, PgQueryResultHKT, PgQueryResultKind, PgSession, PreparedQueryConfig, } from '~/pg-core/session.ts'; import { PgTable } from '~/pg-core/table.ts'; import type { TypedQueryBuilder } from '~/query-builders/query-builder.ts'; import type { AppendToNullabilityMap, AppendToResult, GetSelectTableName, GetSelectTableSelection, JoinNullability, JoinType, SelectMode, SelectResult, } from '~/query-builders/select.types.ts'; import { QueryPromise } from '~/query-promise.ts'; import type { RunnableQuery } from '~/runnable-query.ts'; import { SelectionProxyHandler } from '~/selection-proxy.ts'; import { type ColumnsSelection, type Query, SQL, type SQLWrapper } from '~/sql/sql.ts'; import { Subquery } from '~/subquery.ts'; import { getTableName, Table } from '~/table.ts'; import { type Assume, type DrizzleTypeError, type Equal, getTableLikeName, mapUpdateSet, type NeonAuthToken, orderSelectedFields, type Simplify, type UpdateSet, } from '~/utils.ts'; import { ViewBaseConfig } from '~/view-common.ts'; import type { PgColumn } from '../columns/common.ts'; import { extractUsedTable } from '../utils.ts'; import type { PgViewBase } from '../view-base.ts'; import type { PgSelectJoinConfig, SelectedFields, SelectedFieldsOrdered, TableLikeHasEmptySelection, } from './select.types.ts'; export interface PgUpdateConfig { where?: SQL | undefined; set: UpdateSet; table: PgTable; from?: PgTable | Subquery | PgViewBase | SQL; joins: PgSelectJoinConfig[]; returningFields?: SelectedFields; returning?: SelectedFieldsOrdered; withList?: Subquery[]; } export type PgUpdateSetSource = & { [Key in keyof TTable['$inferInsert']]?: | GetColumnData | SQL | PgColumn | undefined; } & {}; export class PgUpdateBuilder { static readonly [entityKind]: string = 'PgUpdateBuilder'; declare readonly _: { readonly table: TTable; }; constructor( private table: TTable, private session: PgSession, private dialect: PgDialect, private withList?: Subquery[], ) {} private authToken?: NeonAuthToken; setToken(token: NeonAuthToken) { this.authToken = token; return this; } set( values: PgUpdateSetSource, ): PgUpdateWithout, false, 'leftJoin' | 'rightJoin' | 'innerJoin' | 'fullJoin'> { return new PgUpdateBase( this.table, mapUpdateSet(this.table, values), this.session, this.dialect, this.withList, ).setToken(this.authToken); } } export type PgUpdateWithout< T extends AnyPgUpdate, TDynamic extends boolean, K extends keyof T & string, > = TDynamic extends true ? T : Omit< PgUpdateBase< T['_']['table'], T['_']['queryResult'], T['_']['from'], T['_']['selectedFields'], T['_']['returning'], T['_']['nullabilityMap'], T['_']['joins'], TDynamic, T['_']['excludedMethods'] | K >, T['_']['excludedMethods'] | K >; export type PgUpdateWithJoins< T extends AnyPgUpdate, TDynamic extends boolean, TFrom extends PgTable | Subquery | PgViewBase | SQL, > = TDynamic extends true ? T : Omit< PgUpdateBase< T['_']['table'], T['_']['queryResult'], TFrom, T['_']['selectedFields'], T['_']['returning'], AppendToNullabilityMap, 'inner'>, [...T['_']['joins'], { name: GetSelectTableName; joinType: 'inner'; table: TFrom; }], TDynamic, Exclude >, Exclude >; export type PgUpdateJoinFn< T extends AnyPgUpdate, TDynamic extends boolean, TJoinType extends JoinType, > = < TJoinedTable extends PgTable | Subquery | PgViewBase | SQL, >( table: TableLikeHasEmptySelection extends true ? DrizzleTypeError< "Cannot reference a data-modifying statement subquery if it doesn't contain a `returning` clause" > : TJoinedTable, on: | ( ( updateTable: T['_']['table']['_']['columns'], from: T['_']['from'] extends PgTable ? T['_']['from']['_']['columns'] : T['_']['from'] extends Subquery | PgViewBase ? T['_']['from']['_']['selectedFields'] : never, ) => SQL | undefined ) | SQL | undefined, ) => PgUpdateJoin; export type PgUpdateJoin< T extends AnyPgUpdate, TDynamic extends boolean, TJoinType extends JoinType, TJoinedTable extends PgTable | Subquery | PgViewBase | SQL, > = TDynamic extends true ? T : PgUpdateBase< T['_']['table'], T['_']['queryResult'], T['_']['from'], T['_']['selectedFields'], T['_']['returning'], AppendToNullabilityMap, TJoinType>, [...T['_']['joins'], { name: GetSelectTableName; joinType: TJoinType; table: TJoinedTable; }], TDynamic, T['_']['excludedMethods'] >; type Join = { name: string | undefined; joinType: JoinType; table: PgTable | Subquery | PgViewBase | SQL; }; type AccumulateToResult< T extends AnyPgUpdate, TSelectMode extends SelectMode, TJoins extends Join[], TSelectedFields extends ColumnsSelection, > = TJoins extends [infer TJoin extends Join, ...infer TRest extends Join[]] ? AccumulateToResult< T, TSelectMode extends 'partial' ? TSelectMode : 'multiple', TRest, AppendToResult< T['_']['table']['_']['name'], TSelectedFields, TJoin['name'], TJoin['table'] extends Table ? TJoin['table']['_']['columns'] : TJoin['table'] extends Subquery ? Assume : never, TSelectMode extends 'partial' ? TSelectMode : 'multiple' > > : TSelectedFields; export type PgUpdateReturningAll = PgUpdateWithout< PgUpdateBase< T['_']['table'], T['_']['queryResult'], T['_']['from'], Equal extends true ? T['_']['table']['_']['columns'] : Simplify< & Record & { [K in keyof T['_']['joins'] as T['_']['joins'][K]['table']['_']['name']]: T['_']['joins'][K]['table']['_']['columns']; } >, SelectResult< AccumulateToResult< T, 'single', T['_']['joins'], GetSelectTableSelection >, 'partial', T['_']['nullabilityMap'] >, T['_']['nullabilityMap'], T['_']['joins'], TDynamic, T['_']['excludedMethods'] >, TDynamic, 'returning' >; export type PgUpdateReturning< T extends AnyPgUpdate, TDynamic extends boolean, TSelectedFields extends SelectedFields, > = PgUpdateWithout< PgUpdateBase< T['_']['table'], T['_']['queryResult'], T['_']['from'], TSelectedFields, SelectResult< AccumulateToResult< T, 'partial', T['_']['joins'], TSelectedFields >, 'partial', T['_']['nullabilityMap'] >, T['_']['nullabilityMap'], T['_']['joins'], TDynamic, T['_']['excludedMethods'] >, TDynamic, 'returning' >; export type PgUpdatePrepare = PgPreparedQuery< PreparedQueryConfig & { execute: T['_']['returning'] extends undefined ? PgQueryResultKind : T['_']['returning'][]; } >; export type PgUpdateDynamic = PgUpdate< T['_']['table'], T['_']['queryResult'], T['_']['from'], T['_']['returning'], T['_']['nullabilityMap'] >; export type PgUpdate< TTable extends PgTable = PgTable, TQueryResult extends PgQueryResultHKT = PgQueryResultHKT, TFrom extends PgTable | Subquery | PgViewBase | SQL | undefined = undefined, TSelectedFields extends ColumnsSelection | undefined = undefined, TReturning extends Record | undefined = Record | undefined, TNullabilityMap extends Record = Record, TJoins extends Join[] = [], > = PgUpdateBase; export type AnyPgUpdate = PgUpdateBase; export interface PgUpdateBase< TTable extends PgTable, TQueryResult extends PgQueryResultHKT, TFrom extends PgTable | Subquery | PgViewBase | SQL | undefined = undefined, TSelectedFields extends ColumnsSelection | undefined = undefined, TReturning extends Record | undefined = undefined, TNullabilityMap extends Record = Record, TJoins extends Join[] = [], TDynamic extends boolean = false, TExcludedMethods extends string = never, > extends TypedQueryBuilder< TSelectedFields, TReturning extends undefined ? PgQueryResultKind : TReturning[] >, QueryPromise : TReturning[]>, RunnableQuery : TReturning[], 'pg'>, SQLWrapper { readonly _: { readonly dialect: 'pg'; readonly table: TTable; readonly joins: TJoins; readonly nullabilityMap: TNullabilityMap; readonly queryResult: TQueryResult; readonly from: TFrom; readonly selectedFields: TSelectedFields; readonly returning: TReturning; readonly dynamic: TDynamic; readonly excludedMethods: TExcludedMethods; readonly result: TReturning extends undefined ? PgQueryResultKind : TReturning[]; }; } export class PgUpdateBase< TTable extends PgTable, TQueryResult extends PgQueryResultHKT, TFrom extends PgTable | Subquery | PgViewBase | SQL | undefined = undefined, // eslint-disable-next-line @typescript-eslint/no-unused-vars TSelectedFields extends ColumnsSelection | undefined = undefined, TReturning extends Record | undefined = undefined, // eslint-disable-next-line @typescript-eslint/no-unused-vars TNullabilityMap extends Record = Record, // eslint-disable-next-line @typescript-eslint/no-unused-vars TJoins extends Join[] = [], // eslint-disable-next-line @typescript-eslint/no-unused-vars TDynamic extends boolean = false, // eslint-disable-next-line @typescript-eslint/no-unused-vars TExcludedMethods extends string = never, > extends QueryPromise : TReturning[]> implements RunnableQuery : TReturning[], 'pg'>, SQLWrapper { static override readonly [entityKind]: string = 'PgUpdate'; private config: PgUpdateConfig; private tableName: string | undefined; private joinsNotNullableMap: Record; protected cacheConfig?: WithCacheConfig; constructor( table: TTable, set: UpdateSet, private session: PgSession, private dialect: PgDialect, withList?: Subquery[], ) { super(); this.config = { set, table, withList, joins: [] }; this.tableName = getTableLikeName(table); this.joinsNotNullableMap = typeof this.tableName === 'string' ? { [this.tableName]: true } : {}; } from( source: TableLikeHasEmptySelection extends true ? DrizzleTypeError< "Cannot reference a data-modifying statement subquery if it doesn't contain a `returning` clause" > : TFrom, ): PgUpdateWithJoins { const src = source as TFrom; const tableName = getTableLikeName(src); if (typeof tableName === 'string') { this.joinsNotNullableMap[tableName] = true; } this.config.from = src; return this as any; } private getTableLikeFields(table: PgTable | Subquery | PgViewBase): Record { if (is(table, PgTable)) { return table[Table.Symbol.Columns]; } else if (is(table, Subquery)) { return table._.selectedFields; } return table[ViewBaseConfig].selectedFields; } private createJoin( joinType: TJoinType, ): PgUpdateJoinFn { return (( table: PgTable | Subquery | PgViewBase | SQL, on: ((updateTable: TTable, from: TFrom) => SQL | undefined) | SQL | undefined, ) => { const tableName = getTableLikeName(table); if (typeof tableName === 'string' && this.config.joins.some((join) => join.alias === tableName)) { throw new Error(`Alias "${tableName}" is already used in this query`); } if (typeof on === 'function') { const from = this.config.from && !is(this.config.from, SQL) ? this.getTableLikeFields(this.config.from) : undefined; on = on( new Proxy( this.config.table[Table.Symbol.Columns], new SelectionProxyHandler({ sqlAliasedBehavior: 'sql', sqlBehavior: 'sql' }), ) as any, from && new Proxy( from, new SelectionProxyHandler({ sqlAliasedBehavior: 'sql', sqlBehavior: 'sql' }), ) as any, ); } this.config.joins.push({ on, table, joinType, alias: tableName }); if (typeof tableName === 'string') { switch (joinType) { case 'left': { this.joinsNotNullableMap[tableName] = false; break; } case 'right': { this.joinsNotNullableMap = Object.fromEntries( Object.entries(this.joinsNotNullableMap).map(([key]) => [key, false]), ); this.joinsNotNullableMap[tableName] = true; break; } case 'inner': { this.joinsNotNullableMap[tableName] = true; break; } case 'full': { this.joinsNotNullableMap = Object.fromEntries( Object.entries(this.joinsNotNullableMap).map(([key]) => [key, false]), ); this.joinsNotNullableMap[tableName] = false; break; } } } return this as any; }) as any; } leftJoin = this.createJoin('left'); rightJoin = this.createJoin('right'); innerJoin = this.createJoin('inner'); fullJoin = this.createJoin('full'); /** * Adds a 'where' clause to the query. * * Calling this method will update only those rows that fulfill a specified condition. * * See docs: {@link https://orm.drizzle.team/docs/update} * * @param where the 'where' clause. * * @example * You can use conditional operators and `sql function` to filter the rows to be updated. * * ```ts * // Update all cars with green color * await db.update(cars).set({ color: 'red' }) * .where(eq(cars.color, 'green')); * // or * await db.update(cars).set({ color: 'red' }) * .where(sql`${cars.color} = 'green'`) * ``` * * You can logically combine conditional operators with `and()` and `or()` operators: * * ```ts * // Update all BMW cars with a green color * await db.update(cars).set({ color: 'red' }) * .where(and(eq(cars.color, 'green'), eq(cars.brand, 'BMW'))); * * // Update all cars with the green or blue color * await db.update(cars).set({ color: 'red' }) * .where(or(eq(cars.color, 'green'), eq(cars.color, 'blue'))); * ``` */ where(where: SQL | undefined): PgUpdateWithout { this.config.where = where; return this as any; } /** * Adds a `returning` clause to the query. * * Calling this method will return the specified fields of the updated rows. If no fields are specified, all fields will be returned. * * See docs: {@link https://orm.drizzle.team/docs/update#update-with-returning} * * @example * ```ts * // Update all cars with the green color and return all fields * const updatedCars: Car[] = await db.update(cars) * .set({ color: 'red' }) * .where(eq(cars.color, 'green')) * .returning(); * * // Update all cars with the green color and return only their id and brand fields * const updatedCarsIdsAndBrands: { id: number, brand: string }[] = await db.update(cars) * .set({ color: 'red' }) * .where(eq(cars.color, 'green')) * .returning({ id: cars.id, brand: cars.brand }); * ``` */ returning(): PgUpdateReturningAll; returning( fields: TSelectedFields, ): PgUpdateReturning; returning( fields?: SelectedFields, ): PgUpdateWithout { if (!fields) { fields = Object.assign({}, this.config.table[Table.Symbol.Columns]); if (this.config.from) { const tableName = getTableLikeName(this.config.from); if (typeof tableName === 'string' && this.config.from && !is(this.config.from, SQL)) { const fromFields = this.getTableLikeFields(this.config.from); fields[tableName] = fromFields as any; } for (const join of this.config.joins) { const tableName = getTableLikeName(join.table); if (typeof tableName === 'string' && !is(join.table, SQL)) { const fromFields = this.getTableLikeFields(join.table); fields[tableName] = fromFields as any; } } } } this.config.returningFields = fields; this.config.returning = orderSelectedFields(fields); return this as any; } /** @internal */ getSQL(): SQL { return this.dialect.buildUpdateQuery(this.config); } toSQL(): Query { const { typings: _typings, ...rest } = this.dialect.sqlToQuery(this.getSQL()); return rest; } /** @internal */ _prepare(name?: string): PgUpdatePrepare { const query = this.session.prepareQuery< PreparedQueryConfig & { execute: TReturning[] } >(this.dialect.sqlToQuery(this.getSQL()), this.config.returning, name, true, undefined, { type: 'insert', tables: extractUsedTable(this.config.table), }, this.cacheConfig); query.joinsNotNullableMap = this.joinsNotNullableMap; return query; } prepare(name: string): PgUpdatePrepare { return this._prepare(name); } private authToken?: NeonAuthToken; /** @internal */ setToken(token?: NeonAuthToken) { this.authToken = token; return this; } override execute: ReturnType['execute'] = (placeholderValues) => { return this._prepare().execute(placeholderValues, this.authToken); }; /** @internal */ getSelectedFields(): this['_']['selectedFields'] { return ( this.config.returningFields ? new Proxy( this.config.returningFields, new SelectionProxyHandler({ alias: getTableName(this.config.table), sqlAliasedBehavior: 'alias', sqlBehavior: 'error', }), ) : undefined ) as this['_']['selectedFields']; } $dynamic(): PgUpdateDynamic { return this as any; } } ================================================ FILE: drizzle-orm/src/pg-core/roles.ts ================================================ import { entityKind } from '~/entity.ts'; export interface PgRoleConfig { createDb?: boolean; createRole?: boolean; inherit?: boolean; } export class PgRole implements PgRoleConfig { static readonly [entityKind]: string = 'PgRole'; /** @internal */ _existing?: boolean; /** @internal */ readonly createDb: PgRoleConfig['createDb']; /** @internal */ readonly createRole: PgRoleConfig['createRole']; /** @internal */ readonly inherit: PgRoleConfig['inherit']; constructor( readonly name: string, config?: PgRoleConfig, ) { if (config) { this.createDb = config.createDb; this.createRole = config.createRole; this.inherit = config.inherit; } } existing(): this { this._existing = true; return this; } } export function pgRole(name: string, config?: PgRoleConfig) { return new PgRole(name, config); } ================================================ FILE: drizzle-orm/src/pg-core/schema.ts ================================================ import { entityKind, is } from '~/entity.ts'; import { SQL, sql, type SQLWrapper } from '~/sql/sql.ts'; import type { NonArray, Writable } from '~/utils.ts'; import { type PgEnum, type PgEnumObject, pgEnumObjectWithSchema, pgEnumWithSchema } from './columns/enum.ts'; import { type pgSequence, pgSequenceWithSchema } from './sequence.ts'; import { type PgTableFn, pgTableWithSchema } from './table.ts'; import { type pgMaterializedView, pgMaterializedViewWithSchema, type pgView, pgViewWithSchema } from './view.ts'; export class PgSchema implements SQLWrapper { static readonly [entityKind]: string = 'PgSchema'; constructor( public readonly schemaName: TName, ) {} table: PgTableFn = ((name, columns, extraConfig) => { return pgTableWithSchema(name, columns, extraConfig, this.schemaName); }); view = ((name, columns) => { return pgViewWithSchema(name, columns, this.schemaName); }) as typeof pgView; materializedView = ((name, columns) => { return pgMaterializedViewWithSchema(name, columns, this.schemaName); }) as typeof pgMaterializedView; public enum>( enumName: string, values: T | Writable, ): PgEnum>; public enum>( enumName: string, enumObj: NonArray, ): PgEnumObject; public enum(enumName: any, input: any): any { return Array.isArray(input) ? pgEnumWithSchema( enumName, [...input] as [string, ...string[]], this.schemaName, ) : pgEnumObjectWithSchema(enumName, input, this.schemaName); } sequence: typeof pgSequence = ((name, options) => { return pgSequenceWithSchema(name, options, this.schemaName); }); getSQL(): SQL { return new SQL([sql.identifier(this.schemaName)]); } shouldOmitSQLParens(): boolean { return true; } } export function isPgSchema(obj: unknown): obj is PgSchema { return is(obj, PgSchema); } export function pgSchema(name: T) { if (name === 'public') { throw new Error( `You can't specify 'public' as schema name. Postgres is using public schema by default. If you want to use 'public' schema, just use pgTable() instead of creating a schema`, ); } return new PgSchema(name); } ================================================ FILE: drizzle-orm/src/pg-core/sequence.ts ================================================ import { entityKind, is } from '~/entity.ts'; export type PgSequenceOptions = { increment?: number | string; minValue?: number | string; maxValue?: number | string; startWith?: number | string; cache?: number | string; cycle?: boolean; }; export class PgSequence { static readonly [entityKind]: string = 'PgSequence'; constructor( public readonly seqName: string | undefined, public readonly seqOptions: PgSequenceOptions | undefined, public readonly schema: string | undefined, ) { } } export function pgSequence( name: string, options?: PgSequenceOptions, ): PgSequence { return pgSequenceWithSchema(name, options, undefined); } /** @internal */ export function pgSequenceWithSchema( name: string, options?: PgSequenceOptions, schema?: string, ): PgSequence { return new PgSequence(name, options, schema); } export function isPgSequence(obj: unknown): obj is PgSequence { return is(obj, PgSequence); } ================================================ FILE: drizzle-orm/src/pg-core/session.ts ================================================ import { type Cache, hashQuery, NoopCache } from '~/cache/core/cache.ts'; import type { WithCacheConfig } from '~/cache/core/types.ts'; import { entityKind, is } from '~/entity.ts'; import { DrizzleQueryError, TransactionRollbackError } from '~/errors.ts'; import type { TablesRelationalConfig } from '~/relations.ts'; import type { PreparedQuery } from '~/session.ts'; import { type Query, type SQL, sql } from '~/sql/index.ts'; import { tracer } from '~/tracing.ts'; import type { NeonAuthToken } from '~/utils.ts'; import { PgDatabase } from './db.ts'; import type { PgDialect } from './dialect.ts'; import type { SelectedFieldsOrdered } from './query-builders/select.types.ts'; export interface PreparedQueryConfig { execute: unknown; all: unknown; values: unknown; } export abstract class PgPreparedQuery implements PreparedQuery { constructor( protected query: Query, // cache instance private cache: Cache | undefined, // per query related metadata private queryMetadata: { type: 'select' | 'update' | 'delete' | 'insert'; tables: string[]; } | undefined, // config that was passed through $withCache private cacheConfig?: WithCacheConfig, ) { // it means that no $withCache options were passed and it should be just enabled if (cache && cache.strategy() === 'all' && cacheConfig === undefined) { this.cacheConfig = { enable: true, autoInvalidate: true }; } if (!this.cacheConfig?.enable) { this.cacheConfig = undefined; } } protected authToken?: NeonAuthToken; getQuery(): Query { return this.query; } mapResult(response: unknown, _isFromBatch?: boolean): unknown { return response; } /** @internal */ setToken(token?: NeonAuthToken) { this.authToken = token; return this; } static readonly [entityKind]: string = 'PgPreparedQuery'; /** @internal */ joinsNotNullableMap?: Record; /** @internal */ protected async queryWithCache( queryString: string, params: any[], query: () => Promise, ): Promise { if (this.cache === undefined || is(this.cache, NoopCache) || this.queryMetadata === undefined) { try { return await query(); } catch (e) { throw new DrizzleQueryError(queryString, params, e as Error); } } // don't do any mutations, if globally is false if (this.cacheConfig && !this.cacheConfig.enable) { try { return await query(); } catch (e) { throw new DrizzleQueryError(queryString, params, e as Error); } } // For mutate queries, we should query the database, wait for a response, and then perform invalidation if ( ( this.queryMetadata.type === 'insert' || this.queryMetadata.type === 'update' || this.queryMetadata.type === 'delete' ) && this.queryMetadata.tables.length > 0 ) { try { const [res] = await Promise.all([ query(), this.cache.onMutate({ tables: this.queryMetadata.tables }), ]); return res; } catch (e) { throw new DrizzleQueryError(queryString, params, e as Error); } } // don't do any reads if globally disabled if (!this.cacheConfig) { try { return await query(); } catch (e) { throw new DrizzleQueryError(queryString, params, e as Error); } } if (this.queryMetadata.type === 'select') { const fromCache = await this.cache.get( this.cacheConfig.tag ?? await hashQuery(queryString, params), this.queryMetadata.tables, this.cacheConfig.tag !== undefined, this.cacheConfig.autoInvalidate, ); if (fromCache === undefined) { let result; try { result = await query(); } catch (e) { throw new DrizzleQueryError(queryString, params, e as Error); } // put actual key await this.cache.put( this.cacheConfig.tag ?? await hashQuery(queryString, params), result, // make sure we send tables that were used in a query only if user wants to invalidate it on each write this.cacheConfig.autoInvalidate ? this.queryMetadata.tables : [], this.cacheConfig.tag !== undefined, this.cacheConfig.config, ); // put flag if we should invalidate or not return result; } return fromCache as unknown as T; } try { return await query(); } catch (e) { throw new DrizzleQueryError(queryString, params, e as Error); } } abstract execute(placeholderValues?: Record): Promise; /** @internal */ abstract execute(placeholderValues?: Record, token?: NeonAuthToken): Promise; /** @internal */ abstract execute(placeholderValues?: Record, token?: NeonAuthToken): Promise; /** @internal */ abstract all(placeholderValues?: Record): Promise; /** @internal */ abstract isResponseInArrayMode(): boolean; } export interface PgTransactionConfig { isolationLevel?: 'read uncommitted' | 'read committed' | 'repeatable read' | 'serializable'; accessMode?: 'read only' | 'read write'; deferrable?: boolean; } export abstract class PgSession< TQueryResult extends PgQueryResultHKT = PgQueryResultHKT, TFullSchema extends Record = Record, TSchema extends TablesRelationalConfig = Record, > { static readonly [entityKind]: string = 'PgSession'; constructor(protected dialect: PgDialect) {} abstract prepareQuery( query: Query, fields: SelectedFieldsOrdered | undefined, name: string | undefined, isResponseInArrayMode: boolean, customResultMapper?: (rows: unknown[][], mapColumnValue?: (value: unknown) => unknown) => T['execute'], queryMetadata?: { type: 'select' | 'update' | 'delete' | 'insert'; tables: string[]; }, cacheConfig?: WithCacheConfig, ): PgPreparedQuery; execute(query: SQL): Promise; /** @internal */ execute(query: SQL, token?: NeonAuthToken): Promise; /** @internal */ execute(query: SQL, token?: NeonAuthToken): Promise { return tracer.startActiveSpan('drizzle.operation', () => { const prepared = tracer.startActiveSpan('drizzle.prepareQuery', () => { return this.prepareQuery( this.dialect.sqlToQuery(query), undefined, undefined, false, ); }); return prepared.setToken(token).execute(undefined, token); }); } all(query: SQL): Promise { return this.prepareQuery( this.dialect.sqlToQuery(query), undefined, undefined, false, ).all(); } async count(sql: SQL): Promise; /** @internal */ async count(sql: SQL, token?: NeonAuthToken): Promise; /** @internal */ async count(sql: SQL, token?: NeonAuthToken): Promise { const res = await this.execute<[{ count: string }]>(sql, token); return Number( res[0]['count'], ); } abstract transaction( transaction: (tx: PgTransaction) => Promise, config?: PgTransactionConfig, ): Promise; } export abstract class PgTransaction< TQueryResult extends PgQueryResultHKT, TFullSchema extends Record = Record, TSchema extends TablesRelationalConfig = Record, > extends PgDatabase { static override readonly [entityKind]: string = 'PgTransaction'; constructor( dialect: PgDialect, session: PgSession, protected schema: { fullSchema: Record; schema: TSchema; tableNamesMap: Record; } | undefined, protected readonly nestedIndex = 0, ) { super(dialect, session, schema); } rollback(): never { throw new TransactionRollbackError(); } /** @internal */ getTransactionConfigSQL(config: PgTransactionConfig): SQL { const chunks: string[] = []; if (config.isolationLevel) { chunks.push(`isolation level ${config.isolationLevel}`); } if (config.accessMode) { chunks.push(config.accessMode); } if (typeof config.deferrable === 'boolean') { chunks.push(config.deferrable ? 'deferrable' : 'not deferrable'); } return sql.raw(chunks.join(' ')); } setTransaction(config: PgTransactionConfig): Promise { return this.session.execute(sql`set transaction ${this.getTransactionConfigSQL(config)}`); } abstract override transaction( transaction: (tx: PgTransaction) => Promise, ): Promise; } export interface PgQueryResultHKT { readonly $brand: 'PgQueryResultHKT'; readonly row: unknown; readonly type: unknown; } export type PgQueryResultKind = (TKind & { readonly row: TRow; })['type']; ================================================ FILE: drizzle-orm/src/pg-core/subquery.ts ================================================ import type { TypedQueryBuilder } from '~/query-builders/query-builder.ts'; import type { AddAliasToSelection } from '~/query-builders/select.types.ts'; import type { ColumnsSelection, SQL } from '~/sql/sql.ts'; import type { Subquery, WithSubquery, WithSubqueryWithoutSelection } from '~/subquery.ts'; import type { QueryBuilder } from './query-builders/query-builder.ts'; export type SubqueryWithSelection = & Subquery> & AddAliasToSelection; export type WithSubqueryWithSelection = & WithSubquery> & AddAliasToSelection; export interface WithBuilder { (alias: TAlias): { as: { ( qb: TypedQueryBuilder | ((qb: QueryBuilder) => TypedQueryBuilder), ): WithSubqueryWithSelection; ( qb: TypedQueryBuilder | ((qb: QueryBuilder) => TypedQueryBuilder), ): WithSubqueryWithoutSelection; }; }; (alias: TAlias, selection: TSelection): { as: (qb: SQL | ((qb: QueryBuilder) => SQL)) => WithSubqueryWithSelection; }; } ================================================ FILE: drizzle-orm/src/pg-core/table.ts ================================================ import type { BuildColumns, BuildExtraConfigColumns } from '~/column-builder.ts'; import { entityKind } from '~/entity.ts'; import { Table, type TableConfig as TableConfigBase, type UpdateTableConfig } from '~/table.ts'; import type { CheckBuilder } from './checks.ts'; import { getPgColumnBuilders, type PgColumnsBuilders } from './columns/all.ts'; import type { ExtraConfigColumn, PgColumn, PgColumnBuilder, PgColumnBuilderBase } from './columns/common.ts'; import type { ForeignKey, ForeignKeyBuilder } from './foreign-keys.ts'; import type { AnyIndexBuilder } from './indexes.ts'; import type { PgPolicy } from './policies.ts'; import type { PrimaryKeyBuilder } from './primary-keys.ts'; import type { UniqueConstraintBuilder } from './unique-constraint.ts'; export type PgTableExtraConfigValue = | AnyIndexBuilder | CheckBuilder | ForeignKeyBuilder | PrimaryKeyBuilder | UniqueConstraintBuilder | PgPolicy; export type PgTableExtraConfig = Record< string, PgTableExtraConfigValue >; export type TableConfig = TableConfigBase; /** @internal */ export const InlineForeignKeys = Symbol.for('drizzle:PgInlineForeignKeys'); /** @internal */ export const EnableRLS = Symbol.for('drizzle:EnableRLS'); export class PgTable extends Table { static override readonly [entityKind]: string = 'PgTable'; /** @internal */ static override readonly Symbol = Object.assign({}, Table.Symbol, { InlineForeignKeys: InlineForeignKeys as typeof InlineForeignKeys, EnableRLS: EnableRLS as typeof EnableRLS, }); /**@internal */ [InlineForeignKeys]: ForeignKey[] = []; /** @internal */ [EnableRLS]: boolean = false; /** @internal */ override [Table.Symbol.ExtraConfigBuilder]: ((self: Record) => PgTableExtraConfig) | undefined = undefined; /** @internal */ override [Table.Symbol.ExtraConfigColumns]: Record = {}; } export type AnyPgTable = {}> = PgTable>; export type PgTableWithColumns = & PgTable & { [Key in keyof T['columns']]: T['columns'][Key]; } & { enableRLS: () => Omit< PgTableWithColumns, 'enableRLS' >; }; /** @internal */ export function pgTableWithSchema< TTableName extends string, TSchemaName extends string | undefined, TColumnsMap extends Record, >( name: TTableName, columns: TColumnsMap | ((columnTypes: PgColumnsBuilders) => TColumnsMap), extraConfig: | ((self: BuildExtraConfigColumns) => PgTableExtraConfig | PgTableExtraConfigValue[]) | undefined, schema: TSchemaName, baseName = name, ): PgTableWithColumns<{ name: TTableName; schema: TSchemaName; columns: BuildColumns; dialect: 'pg'; }> { const rawTable = new PgTable<{ name: TTableName; schema: TSchemaName; columns: BuildColumns; dialect: 'pg'; }>(name, schema, baseName); const parsedColumns: TColumnsMap = typeof columns === 'function' ? columns(getPgColumnBuilders()) : columns; const builtColumns = Object.fromEntries( Object.entries(parsedColumns).map(([name, colBuilderBase]) => { const colBuilder = colBuilderBase as PgColumnBuilder; colBuilder.setName(name); const column = colBuilder.build(rawTable); rawTable[InlineForeignKeys].push(...colBuilder.buildForeignKeys(column, rawTable)); return [name, column]; }), ) as unknown as BuildColumns; const builtColumnsForExtraConfig = Object.fromEntries( Object.entries(parsedColumns).map(([name, colBuilderBase]) => { const colBuilder = colBuilderBase as PgColumnBuilder; colBuilder.setName(name); const column = colBuilder.buildExtraConfigColumn(rawTable); return [name, column]; }), ) as unknown as BuildExtraConfigColumns; const table = Object.assign(rawTable, builtColumns); table[Table.Symbol.Columns] = builtColumns; table[Table.Symbol.ExtraConfigColumns] = builtColumnsForExtraConfig; if (extraConfig) { table[PgTable.Symbol.ExtraConfigBuilder] = extraConfig as any; } return Object.assign(table, { enableRLS: () => { table[PgTable.Symbol.EnableRLS] = true; return table as PgTableWithColumns<{ name: TTableName; schema: TSchemaName; columns: BuildColumns; dialect: 'pg'; }>; }, }); } export interface PgTableFn { < TTableName extends string, TColumnsMap extends Record, >( name: TTableName, columns: TColumnsMap, extraConfig?: ( self: BuildExtraConfigColumns, ) => PgTableExtraConfigValue[], ): PgTableWithColumns<{ name: TTableName; schema: TSchema; columns: BuildColumns; dialect: 'pg'; }>; < TTableName extends string, TColumnsMap extends Record, >( name: TTableName, columns: (columnTypes: PgColumnsBuilders) => TColumnsMap, extraConfig?: (self: BuildExtraConfigColumns) => PgTableExtraConfigValue[], ): PgTableWithColumns<{ name: TTableName; schema: TSchema; columns: BuildColumns; dialect: 'pg'; }>; /** * @deprecated The third parameter of pgTable is changing and will only accept an array instead of an object * * @example * Deprecated version: * ```ts * export const users = pgTable("users", { * id: integer(), * }, (t) => ({ * idx: index('custom_name').on(t.id) * })); * ``` * * New API: * ```ts * export const users = pgTable("users", { * id: integer(), * }, (t) => [ * index('custom_name').on(t.id) * ]); * ``` */ < TTableName extends string, TColumnsMap extends Record, >( name: TTableName, columns: TColumnsMap, extraConfig: ( self: BuildExtraConfigColumns, ) => PgTableExtraConfig, ): PgTableWithColumns<{ name: TTableName; schema: TSchema; columns: BuildColumns; dialect: 'pg'; }>; /** * @deprecated The third parameter of pgTable is changing and will only accept an array instead of an object * * @example * Deprecated version: * ```ts * export const users = pgTable("users", { * id: integer(), * }, (t) => ({ * idx: index('custom_name').on(t.id) * })); * ``` * * New API: * ```ts * export const users = pgTable("users", { * id: integer(), * }, (t) => [ * index('custom_name').on(t.id) * ]); * ``` */ < TTableName extends string, TColumnsMap extends Record, >( name: TTableName, columns: (columnTypes: PgColumnsBuilders) => TColumnsMap, extraConfig: (self: BuildExtraConfigColumns) => PgTableExtraConfig, ): PgTableWithColumns<{ name: TTableName; schema: TSchema; columns: BuildColumns; dialect: 'pg'; }>; } export const pgTable: PgTableFn = (name, columns, extraConfig) => { return pgTableWithSchema(name, columns, extraConfig, undefined); }; export function pgTableCreator(customizeTableName: (name: string) => string): PgTableFn { return (name, columns, extraConfig) => { return pgTableWithSchema(customizeTableName(name) as typeof name, columns, extraConfig, undefined, name); }; } ================================================ FILE: drizzle-orm/src/pg-core/unique-constraint.ts ================================================ import { entityKind } from '~/entity.ts'; import { TableName } from '~/table.utils.ts'; import type { PgColumn } from './columns/index.ts'; import type { PgTable } from './table.ts'; export function unique(name?: string): UniqueOnConstraintBuilder { return new UniqueOnConstraintBuilder(name); } export function uniqueKeyName(table: PgTable, columns: string[]) { return `${table[TableName]}_${columns.join('_')}_unique`; } export class UniqueConstraintBuilder { static readonly [entityKind]: string = 'PgUniqueConstraintBuilder'; /** @internal */ columns: PgColumn[]; /** @internal */ nullsNotDistinctConfig = false; constructor( columns: PgColumn[], private name?: string, ) { this.columns = columns; } nullsNotDistinct() { this.nullsNotDistinctConfig = true; return this; } /** @internal */ build(table: PgTable): UniqueConstraint { return new UniqueConstraint(table, this.columns, this.nullsNotDistinctConfig, this.name); } } export class UniqueOnConstraintBuilder { static readonly [entityKind]: string = 'PgUniqueOnConstraintBuilder'; /** @internal */ name?: string; constructor( name?: string, ) { this.name = name; } on(...columns: [PgColumn, ...PgColumn[]]) { return new UniqueConstraintBuilder(columns, this.name); } } export class UniqueConstraint { static readonly [entityKind]: string = 'PgUniqueConstraint'; readonly columns: PgColumn[]; readonly name?: string; readonly nullsNotDistinct: boolean = false; constructor(readonly table: PgTable, columns: PgColumn[], nullsNotDistinct: boolean, name?: string) { this.columns = columns; this.name = name ?? uniqueKeyName(this.table, this.columns.map((column) => column.name)); this.nullsNotDistinct = nullsNotDistinct; } getName() { return this.name; } } ================================================ FILE: drizzle-orm/src/pg-core/utils/array.ts ================================================ function parsePgArrayValue(arrayString: string, startFrom: number, inQuotes: boolean): [string, number] { for (let i = startFrom; i < arrayString.length; i++) { const char = arrayString[i]; if (char === '\\') { i++; continue; } if (char === '"') { return [arrayString.slice(startFrom, i).replace(/\\/g, ''), i + 1]; } if (inQuotes) { continue; } if (char === ',' || char === '}') { return [arrayString.slice(startFrom, i).replace(/\\/g, ''), i]; } } return [arrayString.slice(startFrom).replace(/\\/g, ''), arrayString.length]; } export function parsePgNestedArray(arrayString: string, startFrom = 0): [any[], number] { const result: any[] = []; let i = startFrom; let lastCharIsComma = false; while (i < arrayString.length) { const char = arrayString[i]; if (char === ',') { if (lastCharIsComma || i === startFrom) { result.push(''); } lastCharIsComma = true; i++; continue; } lastCharIsComma = false; if (char === '\\') { i += 2; continue; } if (char === '"') { const [value, startFrom] = parsePgArrayValue(arrayString, i + 1, true); result.push(value); i = startFrom; continue; } if (char === '}') { return [result, i + 1]; } if (char === '{') { const [value, startFrom] = parsePgNestedArray(arrayString, i + 1); result.push(value); i = startFrom; continue; } const [value, newStartFrom] = parsePgArrayValue(arrayString, i, false); result.push(value); i = newStartFrom; } return [result, i]; } export function parsePgArray(arrayString: string): any[] { const [result] = parsePgNestedArray(arrayString, 1); return result; } export function makePgArray(array: any[]): string { return `{${ array.map((item) => { if (Array.isArray(item)) { return makePgArray(item); } if (typeof item === 'string') { return `"${item.replace(/\\/g, '\\\\').replace(/"/g, '\\"')}"`; } return `${item}`; }).join(',') }}`; } ================================================ FILE: drizzle-orm/src/pg-core/utils/index.ts ================================================ export * from './array.ts'; ================================================ FILE: drizzle-orm/src/pg-core/utils.ts ================================================ import { is } from '~/entity.ts'; import { PgTable } from '~/pg-core/table.ts'; import { SQL } from '~/sql/sql.ts'; import { Subquery } from '~/subquery.ts'; import { Schema, Table } from '~/table.ts'; import { ViewBaseConfig } from '~/view-common.ts'; import { type Check, CheckBuilder } from './checks.ts'; import type { AnyPgColumn } from './columns/index.ts'; import { type ForeignKey, ForeignKeyBuilder } from './foreign-keys.ts'; import type { Index } from './indexes.ts'; import { IndexBuilder } from './indexes.ts'; import { PgPolicy } from './policies.ts'; import { type PrimaryKey, PrimaryKeyBuilder } from './primary-keys.ts'; import { type UniqueConstraint, UniqueConstraintBuilder } from './unique-constraint.ts'; import type { PgViewBase } from './view-base.ts'; import { PgViewConfig } from './view-common.ts'; import { type PgMaterializedView, PgMaterializedViewConfig, type PgView } from './view.ts'; export function getTableConfig(table: TTable) { const columns = Object.values(table[Table.Symbol.Columns]); const indexes: Index[] = []; const checks: Check[] = []; const primaryKeys: PrimaryKey[] = []; const foreignKeys: ForeignKey[] = Object.values(table[PgTable.Symbol.InlineForeignKeys]); const uniqueConstraints: UniqueConstraint[] = []; const name = table[Table.Symbol.Name]; const schema = table[Table.Symbol.Schema]; const policies: PgPolicy[] = []; const enableRLS: boolean = table[PgTable.Symbol.EnableRLS]; const extraConfigBuilder = table[PgTable.Symbol.ExtraConfigBuilder]; if (extraConfigBuilder !== undefined) { const extraConfig = extraConfigBuilder(table[Table.Symbol.ExtraConfigColumns]); const extraValues = Array.isArray(extraConfig) ? extraConfig.flat(1) as any[] : Object.values(extraConfig); for (const builder of extraValues) { if (is(builder, IndexBuilder)) { indexes.push(builder.build(table)); } else if (is(builder, CheckBuilder)) { checks.push(builder.build(table)); } else if (is(builder, UniqueConstraintBuilder)) { uniqueConstraints.push(builder.build(table)); } else if (is(builder, PrimaryKeyBuilder)) { primaryKeys.push(builder.build(table)); } else if (is(builder, ForeignKeyBuilder)) { foreignKeys.push(builder.build(table)); } else if (is(builder, PgPolicy)) { policies.push(builder); } } } return { columns, indexes, foreignKeys, checks, primaryKeys, uniqueConstraints, name, schema, policies, enableRLS, }; } export function extractUsedTable(table: PgTable | Subquery | PgViewBase | SQL): string[] { if (is(table, PgTable)) { return [table[Schema] ? `${table[Schema]}.${table[Table.Symbol.BaseName]}` : table[Table.Symbol.BaseName]]; } if (is(table, Subquery)) { return table._.usedTables ?? []; } if (is(table, SQL)) { return table.usedTables ?? []; } return []; } export function getViewConfig< TName extends string = string, TExisting extends boolean = boolean, >(view: PgView) { return { ...view[ViewBaseConfig], ...view[PgViewConfig], }; } export function getMaterializedViewConfig< TName extends string = string, TExisting extends boolean = boolean, >(view: PgMaterializedView) { return { ...view[ViewBaseConfig], ...view[PgMaterializedViewConfig], }; } export type ColumnsWithTable< TTableName extends string, TForeignTableName extends string, TColumns extends AnyPgColumn<{ tableName: TTableName }>[], > = { [Key in keyof TColumns]: AnyPgColumn<{ tableName: TForeignTableName }> }; ================================================ FILE: drizzle-orm/src/pg-core/view-base.ts ================================================ import { entityKind } from '~/entity.ts'; import { type ColumnsSelection, View } from '~/sql/sql.ts'; export abstract class PgViewBase< TName extends string = string, TExisting extends boolean = boolean, TSelectedFields extends ColumnsSelection = ColumnsSelection, > extends View { static override readonly [entityKind]: string = 'PgViewBase'; declare readonly _: View['_'] & { readonly viewBrand: 'PgViewBase'; }; } ================================================ FILE: drizzle-orm/src/pg-core/view-common.ts ================================================ export const PgViewConfig = Symbol.for('drizzle:PgViewConfig'); ================================================ FILE: drizzle-orm/src/pg-core/view.ts ================================================ import type { BuildColumns } from '~/column-builder.ts'; import { entityKind, is } from '~/entity.ts'; import type { TypedQueryBuilder } from '~/query-builders/query-builder.ts'; import type { AddAliasToSelection } from '~/query-builders/select.types.ts'; import { SelectionProxyHandler } from '~/selection-proxy.ts'; import type { ColumnsSelection, SQL } from '~/sql/sql.ts'; import { getTableColumns } from '~/utils.ts'; import type { RequireAtLeastOne } from '~/utils.ts'; import type { PgColumn, PgColumnBuilderBase } from './columns/common.ts'; import { QueryBuilder } from './query-builders/query-builder.ts'; import { pgTable } from './table.ts'; import { PgViewBase } from './view-base.ts'; import { PgViewConfig } from './view-common.ts'; export type ViewWithConfig = RequireAtLeastOne<{ checkOption: 'local' | 'cascaded'; securityBarrier: boolean; securityInvoker: boolean; }>; export class DefaultViewBuilderCore { static readonly [entityKind]: string = 'PgDefaultViewBuilderCore'; declare readonly _: { readonly name: TConfig['name']; readonly columns: TConfig['columns']; }; constructor( protected name: TConfig['name'], protected schema: string | undefined, ) {} protected config: { with?: ViewWithConfig; } = {}; with(config: ViewWithConfig): this { this.config.with = config; return this; } } export class ViewBuilder extends DefaultViewBuilderCore<{ name: TName }> { static override readonly [entityKind]: string = 'PgViewBuilder'; as( qb: TypedQueryBuilder | ((qb: QueryBuilder) => TypedQueryBuilder), ): PgViewWithSelection> { if (typeof qb === 'function') { qb = qb(new QueryBuilder()); } const selectionProxy = new SelectionProxyHandler({ alias: this.name, sqlBehavior: 'error', sqlAliasedBehavior: 'alias', replaceOriginalName: true, }); const aliasedSelection = new Proxy(qb.getSelectedFields(), selectionProxy); return new Proxy( new PgView({ pgConfig: this.config, config: { name: this.name, schema: this.schema, selectedFields: aliasedSelection, query: qb.getSQL().inlineParams(), }, }), selectionProxy as any, ) as PgViewWithSelection>; } } export class ManualViewBuilder< TName extends string = string, TColumns extends Record = Record, > extends DefaultViewBuilderCore<{ name: TName; columns: TColumns }> { static override readonly [entityKind]: string = 'PgManualViewBuilder'; private columns: Record; constructor( name: TName, columns: TColumns, schema: string | undefined, ) { super(name, schema); this.columns = getTableColumns(pgTable(name, columns)); } existing(): PgViewWithSelection> { return new Proxy( new PgView({ pgConfig: undefined, config: { name: this.name, schema: this.schema, selectedFields: this.columns, query: undefined, }, }), new SelectionProxyHandler({ alias: this.name, sqlBehavior: 'error', sqlAliasedBehavior: 'alias', replaceOriginalName: true, }), ) as PgViewWithSelection>; } as(query: SQL): PgViewWithSelection> { return new Proxy( new PgView({ pgConfig: this.config, config: { name: this.name, schema: this.schema, selectedFields: this.columns, query: query.inlineParams(), }, }), new SelectionProxyHandler({ alias: this.name, sqlBehavior: 'error', sqlAliasedBehavior: 'alias', replaceOriginalName: true, }), ) as PgViewWithSelection>; } } export type PgMaterializedViewWithConfig = RequireAtLeastOne<{ fillfactor: number; toastTupleTarget: number; parallelWorkers: number; autovacuumEnabled: boolean; vacuumIndexCleanup: 'auto' | 'off' | 'on'; vacuumTruncate: boolean; autovacuumVacuumThreshold: number; autovacuumVacuumScaleFactor: number; autovacuumVacuumCostDelay: number; autovacuumVacuumCostLimit: number; autovacuumFreezeMinAge: number; autovacuumFreezeMaxAge: number; autovacuumFreezeTableAge: number; autovacuumMultixactFreezeMinAge: number; autovacuumMultixactFreezeMaxAge: number; autovacuumMultixactFreezeTableAge: number; logAutovacuumMinDuration: number; userCatalogTable: boolean; }>; export class MaterializedViewBuilderCore { static readonly [entityKind]: string = 'PgMaterializedViewBuilderCore'; declare _: { readonly name: TConfig['name']; readonly columns: TConfig['columns']; }; constructor( protected name: TConfig['name'], protected schema: string | undefined, ) {} protected config: { with?: PgMaterializedViewWithConfig; using?: string; tablespace?: string; withNoData?: boolean; } = {}; using(using: string): this { this.config.using = using; return this; } with(config: PgMaterializedViewWithConfig): this { this.config.with = config; return this; } tablespace(tablespace: string): this { this.config.tablespace = tablespace; return this; } withNoData(): this { this.config.withNoData = true; return this; } } export class MaterializedViewBuilder extends MaterializedViewBuilderCore<{ name: TName }> { static override readonly [entityKind]: string = 'PgMaterializedViewBuilder'; as( qb: TypedQueryBuilder | ((qb: QueryBuilder) => TypedQueryBuilder), ): PgMaterializedViewWithSelection> { if (typeof qb === 'function') { qb = qb(new QueryBuilder()); } const selectionProxy = new SelectionProxyHandler({ alias: this.name, sqlBehavior: 'error', sqlAliasedBehavior: 'alias', replaceOriginalName: true, }); const aliasedSelection = new Proxy(qb.getSelectedFields(), selectionProxy); return new Proxy( new PgMaterializedView({ pgConfig: { with: this.config.with, using: this.config.using, tablespace: this.config.tablespace, withNoData: this.config.withNoData, }, config: { name: this.name, schema: this.schema, selectedFields: aliasedSelection, query: qb.getSQL().inlineParams(), }, }), selectionProxy as any, ) as PgMaterializedViewWithSelection>; } } export class ManualMaterializedViewBuilder< TName extends string = string, TColumns extends Record = Record, > extends MaterializedViewBuilderCore<{ name: TName; columns: TColumns }> { static override readonly [entityKind]: string = 'PgManualMaterializedViewBuilder'; private columns: Record; constructor( name: TName, columns: TColumns, schema: string | undefined, ) { super(name, schema); this.columns = getTableColumns(pgTable(name, columns)); } existing(): PgMaterializedViewWithSelection> { return new Proxy( new PgMaterializedView({ pgConfig: { tablespace: this.config.tablespace, using: this.config.using, with: this.config.with, withNoData: this.config.withNoData, }, config: { name: this.name, schema: this.schema, selectedFields: this.columns, query: undefined, }, }), new SelectionProxyHandler({ alias: this.name, sqlBehavior: 'error', sqlAliasedBehavior: 'alias', replaceOriginalName: true, }), ) as PgMaterializedViewWithSelection>; } as(query: SQL): PgMaterializedViewWithSelection> { return new Proxy( new PgMaterializedView({ pgConfig: { tablespace: this.config.tablespace, using: this.config.using, with: this.config.with, withNoData: this.config.withNoData, }, config: { name: this.name, schema: this.schema, selectedFields: this.columns, query: query.inlineParams(), }, }), new SelectionProxyHandler({ alias: this.name, sqlBehavior: 'error', sqlAliasedBehavior: 'alias', replaceOriginalName: true, }), ) as PgMaterializedViewWithSelection>; } } export class PgView< TName extends string = string, TExisting extends boolean = boolean, TSelectedFields extends ColumnsSelection = ColumnsSelection, > extends PgViewBase { static override readonly [entityKind]: string = 'PgView'; [PgViewConfig]: { with?: ViewWithConfig; } | undefined; constructor({ pgConfig, config }: { pgConfig: { with?: ViewWithConfig; } | undefined; config: { name: TName; schema: string | undefined; selectedFields: ColumnsSelection; query: SQL | undefined; }; }) { super(config); if (pgConfig) { this[PgViewConfig] = { with: pgConfig.with, }; } } } export type PgViewWithSelection< TName extends string = string, TExisting extends boolean = boolean, TSelectedFields extends ColumnsSelection = ColumnsSelection, > = PgView & TSelectedFields; export const PgMaterializedViewConfig = Symbol.for('drizzle:PgMaterializedViewConfig'); export class PgMaterializedView< TName extends string = string, TExisting extends boolean = boolean, TSelectedFields extends ColumnsSelection = ColumnsSelection, > extends PgViewBase { static override readonly [entityKind]: string = 'PgMaterializedView'; readonly [PgMaterializedViewConfig]: { readonly with?: PgMaterializedViewWithConfig; readonly using?: string; readonly tablespace?: string; readonly withNoData?: boolean; } | undefined; constructor({ pgConfig, config }: { pgConfig: { with: PgMaterializedViewWithConfig | undefined; using: string | undefined; tablespace: string | undefined; withNoData: boolean | undefined; } | undefined; config: { name: TName; schema: string | undefined; selectedFields: ColumnsSelection; query: SQL | undefined; }; }) { super(config); this[PgMaterializedViewConfig] = { with: pgConfig?.with, using: pgConfig?.using, tablespace: pgConfig?.tablespace, withNoData: pgConfig?.withNoData, }; } } export type PgMaterializedViewWithSelection< TName extends string = string, TExisting extends boolean = boolean, TSelectedFields extends ColumnsSelection = ColumnsSelection, > = PgMaterializedView & TSelectedFields; /** @internal */ export function pgViewWithSchema( name: string, selection: Record | undefined, schema: string | undefined, ): ViewBuilder | ManualViewBuilder { if (selection) { return new ManualViewBuilder(name, selection, schema); } return new ViewBuilder(name, schema); } /** @internal */ export function pgMaterializedViewWithSchema( name: string, selection: Record | undefined, schema: string | undefined, ): MaterializedViewBuilder | ManualMaterializedViewBuilder { if (selection) { return new ManualMaterializedViewBuilder(name, selection, schema); } return new MaterializedViewBuilder(name, schema); } export function pgView(name: TName): ViewBuilder; export function pgView>( name: TName, columns: TColumns, ): ManualViewBuilder; export function pgView(name: string, columns?: Record): ViewBuilder | ManualViewBuilder { return pgViewWithSchema(name, columns, undefined); } export function pgMaterializedView(name: TName): MaterializedViewBuilder; export function pgMaterializedView>( name: TName, columns: TColumns, ): ManualMaterializedViewBuilder; export function pgMaterializedView( name: string, columns?: Record, ): MaterializedViewBuilder | ManualMaterializedViewBuilder { return pgMaterializedViewWithSchema(name, columns, undefined); } export function isPgView(obj: unknown): obj is PgView { return is(obj, PgView); } export function isPgMaterializedView(obj: unknown): obj is PgMaterializedView { return is(obj, PgMaterializedView); } ================================================ FILE: drizzle-orm/src/pg-proxy/driver.ts ================================================ import { entityKind } from '~/entity.ts'; import { DefaultLogger } from '~/logger.ts'; import { PgDatabase } from '~/pg-core/db.ts'; import { PgDialect } from '~/pg-core/dialect.ts'; import { createTableRelationsHelpers, extractTablesRelationalConfig, type RelationalSchemaConfig, type TablesRelationalConfig, } from '~/relations.ts'; import type { DrizzleConfig } from '~/utils.ts'; import { type PgRemoteQueryResultHKT, PgRemoteSession } from './session.ts'; export class PgRemoteDatabase< TSchema extends Record = Record, > extends PgDatabase { static override readonly [entityKind]: string = 'PgRemoteDatabase'; } export type RemoteCallback = ( sql: string, params: any[], method: 'all' | 'execute', typings?: any[], ) => Promise<{ rows: any[] }>; export function drizzle = Record>( callback: RemoteCallback, config: DrizzleConfig = {}, _dialect: () => PgDialect = () => new PgDialect({ casing: config.casing }), ): PgRemoteDatabase { const dialect = _dialect(); let logger; if (config.logger === true) { logger = new DefaultLogger(); } else if (config.logger !== false) { logger = config.logger; } let schema: RelationalSchemaConfig | undefined; if (config.schema) { const tablesConfig = extractTablesRelationalConfig( config.schema, createTableRelationsHelpers, ); schema = { fullSchema: config.schema, schema: tablesConfig.tables, tableNamesMap: tablesConfig.tableNamesMap, }; } const session = new PgRemoteSession(callback, dialect, schema, { logger, cache: config.cache }); const db = new PgRemoteDatabase(dialect, session, schema as any) as PgRemoteDatabase; ( db).$cache = config.cache; if (( db).$cache) { ( db).$cache['invalidate'] = config.cache?.onMutate; } return db; } ================================================ FILE: drizzle-orm/src/pg-proxy/index.ts ================================================ export * from './driver.ts'; export * from './session.ts'; ================================================ FILE: drizzle-orm/src/pg-proxy/migrator.ts ================================================ import type { MigrationConfig } from '~/migrator.ts'; import { readMigrationFiles } from '~/migrator.ts'; import { sql } from '~/sql/sql.ts'; import type { PgRemoteDatabase } from './driver.ts'; export type ProxyMigrator = (migrationQueries: string[]) => Promise; export async function migrate>( db: PgRemoteDatabase, callback: ProxyMigrator, config: MigrationConfig, ) { const migrations = readMigrationFiles(config); const migrationTableCreate = sql` CREATE TABLE IF NOT EXISTS "drizzle"."__drizzle_migrations" ( id SERIAL PRIMARY KEY, hash text NOT NULL, created_at numeric ) `; await db.execute(sql`CREATE SCHEMA IF NOT EXISTS "drizzle"`); await db.execute(migrationTableCreate); const dbMigrations = await db.execute<{ id: number; hash: string; created_at: string; }>( sql`SELECT id, hash, created_at FROM "drizzle"."__drizzle_migrations" ORDER BY created_at DESC LIMIT 1`, ); const lastDbMigration = dbMigrations[0] ?? undefined; const queriesToRun: string[] = []; for (const migration of migrations) { if ( !lastDbMigration || Number(lastDbMigration.created_at)! < migration.folderMillis ) { queriesToRun.push( ...migration.sql, `INSERT INTO "drizzle"."__drizzle_migrations" ("hash", "created_at") VALUES('${migration.hash}', '${migration.folderMillis}')`, ); } } await callback(queriesToRun); } ================================================ FILE: drizzle-orm/src/pg-proxy/session.ts ================================================ import { type Cache, NoopCache } from '~/cache/core/cache.ts'; import type { WithCacheConfig } from '~/cache/core/types.ts'; import { entityKind } from '~/entity.ts'; import type { Logger } from '~/logger.ts'; import { NoopLogger } from '~/logger.ts'; import type { PgDialect } from '~/pg-core/dialect.ts'; import { PgTransaction } from '~/pg-core/index.ts'; import type { SelectedFieldsOrdered } from '~/pg-core/query-builders/select.types.ts'; import type { PgQueryResultHKT, PgTransactionConfig, PreparedQueryConfig } from '~/pg-core/session.ts'; import { PgPreparedQuery as PreparedQueryBase, PgSession } from '~/pg-core/session.ts'; import type { RelationalSchemaConfig, TablesRelationalConfig } from '~/relations.ts'; import type { QueryWithTypings } from '~/sql/sql.ts'; import { fillPlaceholders } from '~/sql/sql.ts'; import { tracer } from '~/tracing.ts'; import { type Assume, mapResultRow } from '~/utils.ts'; import type { RemoteCallback } from './driver.ts'; export interface PgRemoteSessionOptions { logger?: Logger; cache?: Cache; } export class PgRemoteSession< TFullSchema extends Record, TSchema extends TablesRelationalConfig, > extends PgSession { static override readonly [entityKind]: string = 'PgRemoteSession'; private logger: Logger; private cache: Cache; constructor( private client: RemoteCallback, dialect: PgDialect, private schema: RelationalSchemaConfig | undefined, options: PgRemoteSessionOptions = {}, ) { super(dialect); this.logger = options.logger ?? new NoopLogger(); this.cache = options.cache ?? new NoopCache(); } prepareQuery( query: QueryWithTypings, fields: SelectedFieldsOrdered | undefined, name: string | undefined, isResponseInArrayMode: boolean, customResultMapper?: (rows: unknown[][]) => T['execute'], queryMetadata?: { type: 'select' | 'update' | 'delete' | 'insert'; tables: string[]; }, cacheConfig?: WithCacheConfig, ): PreparedQuery { return new PreparedQuery( this.client, query.sql, query.params, query.typings, this.logger, this.cache, queryMetadata, cacheConfig, fields, isResponseInArrayMode, customResultMapper, ); } override async transaction( _transaction: (tx: PgProxyTransaction) => Promise, _config?: PgTransactionConfig, ): Promise { throw new Error('Transactions are not supported by the Postgres Proxy driver'); } } export class PgProxyTransaction< TFullSchema extends Record, TSchema extends TablesRelationalConfig, > extends PgTransaction { static override readonly [entityKind]: string = 'PgProxyTransaction'; override async transaction( _transaction: (tx: PgProxyTransaction) => Promise, ): Promise { throw new Error('Transactions are not supported by the Postgres Proxy driver'); } } export class PreparedQuery extends PreparedQueryBase { static override readonly [entityKind]: string = 'PgProxyPreparedQuery'; constructor( private client: RemoteCallback, private queryString: string, private params: unknown[], private typings: any[] | undefined, private logger: Logger, cache: Cache, queryMetadata: { type: 'select' | 'update' | 'delete' | 'insert'; tables: string[]; } | undefined, cacheConfig: WithCacheConfig | undefined, private fields: SelectedFieldsOrdered | undefined, private _isResponseInArrayMode: boolean, private customResultMapper?: (rows: unknown[][]) => T['execute'], ) { super({ sql: queryString, params }, cache, queryMetadata, cacheConfig); } async execute(placeholderValues: Record | undefined = {}): Promise { return tracer.startActiveSpan('drizzle.execute', async (span) => { const params = fillPlaceholders(this.params, placeholderValues); const { fields, client, queryString, joinsNotNullableMap, customResultMapper, logger, typings } = this; span?.setAttributes({ 'drizzle.query.text': queryString, 'drizzle.query.params': JSON.stringify(params), }); logger.logQuery(queryString, params); if (!fields && !customResultMapper) { return tracer.startActiveSpan('drizzle.driver.execute', async () => { const { rows } = await this.queryWithCache(queryString, params, async () => { return await client(queryString, params as any[], 'execute', typings); }); return rows; }); } const rows = await tracer.startActiveSpan('drizzle.driver.execute', async () => { span?.setAttributes({ 'drizzle.query.text': queryString, 'drizzle.query.params': JSON.stringify(params), }); const { rows } = await this.queryWithCache(queryString, params, async () => { return await client(queryString, params as any[], 'all', typings); }); return rows; }); return tracer.startActiveSpan('drizzle.mapResponse', () => { return customResultMapper ? customResultMapper(rows) : rows.map((row) => mapResultRow(fields!, row, joinsNotNullableMap)); }); }); } async all() { } /** @internal */ isResponseInArrayMode(): boolean { return this._isResponseInArrayMode; } } export interface PgRemoteQueryResultHKT extends PgQueryResultHKT { type: Assume[]; } ================================================ FILE: drizzle-orm/src/pglite/driver.ts ================================================ import { PGlite, type PGliteOptions } from '@electric-sql/pglite'; import type { Cache } from '~/cache/core/cache.ts'; import { entityKind } from '~/entity.ts'; import type { Logger } from '~/logger.ts'; import { DefaultLogger } from '~/logger.ts'; import { PgDatabase } from '~/pg-core/db.ts'; import { PgDialect } from '~/pg-core/dialect.ts'; import { createTableRelationsHelpers, extractTablesRelationalConfig, type RelationalSchemaConfig, type TablesRelationalConfig, } from '~/relations.ts'; import { type DrizzleConfig, isConfig } from '~/utils.ts'; import type { PgliteClient, PgliteQueryResultHKT } from './session.ts'; import { PgliteSession } from './session.ts'; export interface PgDriverOptions { logger?: Logger; cache?: Cache; } export class PgliteDriver { static readonly [entityKind]: string = 'PgliteDriver'; constructor( private client: PgliteClient, private dialect: PgDialect, private options: PgDriverOptions = {}, ) { } createSession( schema: RelationalSchemaConfig | undefined, ): PgliteSession, TablesRelationalConfig> { return new PgliteSession(this.client, this.dialect, schema, { logger: this.options.logger, cache: this.options.cache, }); } } export class PgliteDatabase< TSchema extends Record = Record, > extends PgDatabase { static override readonly [entityKind]: string = 'PgliteDatabase'; } function construct = Record>( client: PgliteClient, config: DrizzleConfig = {}, ): PgliteDatabase & { $client: PgliteClient; } { const dialect = new PgDialect({ casing: config.casing }); let logger; if (config.logger === true) { logger = new DefaultLogger(); } else if (config.logger !== false) { logger = config.logger; } let schema: RelationalSchemaConfig | undefined; if (config.schema) { const tablesConfig = extractTablesRelationalConfig( config.schema, createTableRelationsHelpers, ); schema = { fullSchema: config.schema, schema: tablesConfig.tables, tableNamesMap: tablesConfig.tableNamesMap, }; } const driver = new PgliteDriver(client, dialect, { logger, cache: config.cache }); const session = driver.createSession(schema); const db = new PgliteDatabase(dialect, session, schema as any) as PgliteDatabase; ( db).$client = client; ( db).$cache = config.cache; if (( db).$cache) { ( db).$cache['invalidate'] = config.cache?.onMutate; } // ( db).$cache = { invalidate: ( config).cache?.onMutate }; // if (config.cache) { // for ( // const key of Object.getOwnPropertyNames(Object.getPrototypeOf(config.cache)).filter((key) => // key !== 'constructor' // ) // ) { // ( db).$cache[key as keyof typeof config.cache] = ( config).cache[key]; // } // } return db as any; } export function drizzle< TSchema extends Record = Record, TClient extends PGlite = PGlite, >( ...params: | [] | [ TClient | string, ] | [ TClient | string, DrizzleConfig, ] | [ ( & DrizzleConfig & ({ connection?: (PGliteOptions & { dataDir?: string }) | string; } | { client: TClient; }) ), ] ): PgliteDatabase & { $client: TClient; } { if (params[0] === undefined || typeof params[0] === 'string') { const instance = new PGlite(params[0]); return construct(instance, params[1]) as any; } if (isConfig(params[0])) { const { connection, client, ...drizzleConfig } = params[0] as { connection?: PGliteOptions & { dataDir: string }; client?: TClient; } & DrizzleConfig; if (client) return construct(client, drizzleConfig) as any; if (typeof connection === 'object') { const { dataDir, ...options } = connection; const instance = new PGlite(dataDir, options); return construct(instance, drizzleConfig) as any; } const instance = new PGlite(connection); return construct(instance, drizzleConfig) as any; } return construct(params[0] as TClient, params[1] as DrizzleConfig | undefined) as any; } export namespace drizzle { export function mock = Record>( config?: DrizzleConfig, ): PgliteDatabase & { $client: '$client is not available on drizzle.mock()'; } { return construct({} as any, config) as any; } } ================================================ FILE: drizzle-orm/src/pglite/index.ts ================================================ export * from './driver.ts'; export * from './session.ts'; ================================================ FILE: drizzle-orm/src/pglite/migrator.ts ================================================ import type { MigrationConfig } from '~/migrator.ts'; import { readMigrationFiles } from '~/migrator.ts'; import type { PgliteDatabase } from './driver.ts'; export async function migrate>( db: PgliteDatabase, config: MigrationConfig, ) { const migrations = readMigrationFiles(config); await db.dialect.migrate(migrations, db.session, config); } ================================================ FILE: drizzle-orm/src/pglite/session.ts ================================================ import type { PGlite, QueryOptions, Results, Row, Transaction } from '@electric-sql/pglite'; import { entityKind } from '~/entity.ts'; import { type Logger, NoopLogger } from '~/logger.ts'; import type { PgDialect } from '~/pg-core/dialect.ts'; import { PgTransaction } from '~/pg-core/index.ts'; import type { SelectedFieldsOrdered } from '~/pg-core/query-builders/select.types.ts'; import type { PgQueryResultHKT, PgTransactionConfig, PreparedQueryConfig } from '~/pg-core/session.ts'; import { PgPreparedQuery, PgSession } from '~/pg-core/session.ts'; import type { RelationalSchemaConfig, TablesRelationalConfig } from '~/relations.ts'; import { fillPlaceholders, type Query, type SQL, sql } from '~/sql/sql.ts'; import { type Assume, mapResultRow } from '~/utils.ts'; import { types } from '@electric-sql/pglite'; import { type Cache, NoopCache } from '~/cache/core/cache.ts'; import type { WithCacheConfig } from '~/cache/core/types.ts'; export type PgliteClient = PGlite; export class PglitePreparedQuery extends PgPreparedQuery { static override readonly [entityKind]: string = 'PglitePreparedQuery'; private rawQueryConfig: QueryOptions; private queryConfig: QueryOptions; constructor( private client: PgliteClient | Transaction, private queryString: string, private params: unknown[], private logger: Logger, cache: Cache, queryMetadata: { type: 'select' | 'update' | 'delete' | 'insert'; tables: string[]; } | undefined, cacheConfig: WithCacheConfig | undefined, private fields: SelectedFieldsOrdered | undefined, name: string | undefined, private _isResponseInArrayMode: boolean, private customResultMapper?: (rows: unknown[][]) => T['execute'], ) { super({ sql: queryString, params }, cache, queryMetadata, cacheConfig); this.rawQueryConfig = { rowMode: 'object', parsers: { [types.TIMESTAMP]: (value) => value, [types.TIMESTAMPTZ]: (value) => value, [types.INTERVAL]: (value) => value, [types.DATE]: (value) => value, // numeric[] [1231]: (value) => value, // timestamp[] [1115]: (value) => value, // timestamp with timezone[] [1185]: (value) => value, // interval[] [1187]: (value) => value, // date[] [1182]: (value) => value, }, }; this.queryConfig = { rowMode: 'array', parsers: { [types.TIMESTAMP]: (value) => value, [types.TIMESTAMPTZ]: (value) => value, [types.INTERVAL]: (value) => value, [types.DATE]: (value) => value, // numeric[] [1231]: (value) => value, // timestamp[] [1115]: (value) => value, // timestamp with timezone[] [1185]: (value) => value, // interval[] [1187]: (value) => value, // date[] [1182]: (value) => value, }, }; } async execute(placeholderValues: Record | undefined = {}): Promise { const params = fillPlaceholders(this.params, placeholderValues); this.logger.logQuery(this.queryString, params); const { fields, client, queryConfig, joinsNotNullableMap, customResultMapper, queryString, rawQueryConfig } = this; if (!fields && !customResultMapper) { return this.queryWithCache(queryString, params, async () => { return await client.query(queryString, params, rawQueryConfig); }); } const result = await this.queryWithCache(queryString, params, async () => { return await client.query(queryString, params, queryConfig); }); return customResultMapper ? customResultMapper(result.rows) : result.rows.map((row) => mapResultRow(fields!, row, joinsNotNullableMap)); } all(placeholderValues: Record | undefined = {}): Promise { const params = fillPlaceholders(this.params, placeholderValues); this.logger.logQuery(this.queryString, params); return this.queryWithCache(this.queryString, params, async () => { return await this.client.query(this.queryString, params, this.rawQueryConfig); }).then((result) => result.rows); } /** @internal */ isResponseInArrayMode(): boolean { return this._isResponseInArrayMode; } } export interface PgliteSessionOptions { logger?: Logger; cache?: Cache; } export class PgliteSession< TFullSchema extends Record, TSchema extends TablesRelationalConfig, > extends PgSession { static override readonly [entityKind]: string = 'PgliteSession'; private logger: Logger; private cache: Cache; constructor( private client: PgliteClient | Transaction, dialect: PgDialect, private schema: RelationalSchemaConfig | undefined, private options: PgliteSessionOptions = {}, ) { super(dialect); this.logger = options.logger ?? new NoopLogger(); this.cache = options.cache ?? new NoopCache(); } prepareQuery( query: Query, fields: SelectedFieldsOrdered | undefined, name: string | undefined, isResponseInArrayMode: boolean, customResultMapper?: (rows: unknown[][]) => T['execute'], queryMetadata?: { type: 'select' | 'update' | 'delete' | 'insert'; tables: string[]; }, cacheConfig?: WithCacheConfig, ): PgPreparedQuery { return new PglitePreparedQuery( this.client, query.sql, query.params, this.logger, this.cache, queryMetadata, cacheConfig, fields, name, isResponseInArrayMode, customResultMapper, ); } override async transaction( transaction: (tx: PgliteTransaction) => Promise, config?: PgTransactionConfig | undefined, ): Promise { return (this.client as PgliteClient).transaction(async (client) => { const session = new PgliteSession( client, this.dialect, this.schema, this.options, ); const tx = new PgliteTransaction(this.dialect, session, this.schema); if (config) { await tx.setTransaction(config); } return transaction(tx); }) as Promise; } override async count(sql: SQL): Promise { const res = await this.execute<{ rows: [{ count: string }] }>(sql); return Number( res['rows'][0]['count'], ); } } export class PgliteTransaction< TFullSchema extends Record, TSchema extends TablesRelationalConfig, > extends PgTransaction { static override readonly [entityKind]: string = 'PgliteTransaction'; override async transaction(transaction: (tx: PgliteTransaction) => Promise): Promise { const savepointName = `sp${this.nestedIndex + 1}`; const tx = new PgliteTransaction( this.dialect, this.session, this.schema, this.nestedIndex + 1, ); await tx.execute(sql.raw(`savepoint ${savepointName}`)); try { const result = await transaction(tx); await tx.execute(sql.raw(`release savepoint ${savepointName}`)); return result; } catch (err) { await tx.execute(sql.raw(`rollback to savepoint ${savepointName}`)); throw err; } } } export interface PgliteQueryResultHKT extends PgQueryResultHKT { type: Results>; } ================================================ FILE: drizzle-orm/src/planetscale-serverless/driver.ts ================================================ import type { Config } from '@planetscale/database'; import { Client } from '@planetscale/database'; import { entityKind } from '~/entity.ts'; import type { Logger } from '~/logger.ts'; import { DefaultLogger } from '~/logger.ts'; import { MySqlDatabase } from '~/mysql-core/db.ts'; import { MySqlDialect } from '~/mysql-core/dialect.ts'; import { createTableRelationsHelpers, extractTablesRelationalConfig, type RelationalSchemaConfig, type TablesRelationalConfig, } from '~/relations.ts'; import { type DrizzleConfig, isConfig } from '~/utils.ts'; import type { PlanetScalePreparedQueryHKT, PlanetscaleQueryResultHKT } from './session.ts'; import { PlanetscaleSession } from './session.ts'; export interface PlanetscaleSDriverOptions { logger?: Logger; cache?: Cache; } export class PlanetScaleDatabase< TSchema extends Record = Record, > extends MySqlDatabase { static override readonly [entityKind]: string = 'PlanetScaleDatabase'; } function construct< TSchema extends Record = Record, TClient extends Client = Client, >( client: TClient, config: DrizzleConfig = {}, ): PlanetScaleDatabase & { $client: TClient; } { // Client is not Drizzle Object, so we can ignore this rule here // eslint-disable-next-line no-instanceof/no-instanceof if (!(client instanceof Client)) { throw new Error(`Warning: You need to pass an instance of Client: import { Client } from "@planetscale/database"; const client = new Client({ host: process.env["DATABASE_HOST"], username: process.env["DATABASE_USERNAME"], password: process.env["DATABASE_PASSWORD"], }); const db = drizzle(client); `); } const dialect = new MySqlDialect({ casing: config.casing }); let logger; if (config.logger === true) { logger = new DefaultLogger(); } else if (config.logger !== false) { logger = config.logger; } let schema: RelationalSchemaConfig | undefined; if (config.schema) { const tablesConfig = extractTablesRelationalConfig( config.schema, createTableRelationsHelpers, ); schema = { fullSchema: config.schema, schema: tablesConfig.tables, tableNamesMap: tablesConfig.tableNamesMap, }; } const session = new PlanetscaleSession(client, dialect, undefined, schema, { logger, cache: config.cache }); const db = new PlanetScaleDatabase(dialect, session, schema as any, 'planetscale') as PlanetScaleDatabase; ( db).$client = client; ( db).$cache = config.cache; if (( db).$cache) { ( db).$cache['invalidate'] = config.cache?.onMutate; } return db as any; } export function drizzle< TSchema extends Record = Record, TClient extends Client = Client, >( ...params: [ TClient | string, ] | [ TClient | string, DrizzleConfig, ] | [ ( & DrizzleConfig & ({ connection: string | Config; } | { client: TClient; }) ), ] ): PlanetScaleDatabase & { $client: TClient; } { if (typeof params[0] === 'string') { const instance = new Client({ url: params[0], }); return construct(instance, params[1]) as any; } if (isConfig(params[0])) { const { connection, client, ...drizzleConfig } = params[0] as & { connection?: Config | string; client?: TClient } & DrizzleConfig; if (client) return construct(client, drizzleConfig) as any; const instance = typeof connection === 'string' ? new Client({ url: connection, }) : new Client( connection!, ); return construct(instance, drizzleConfig) as any; } return construct(params[0] as TClient, params[1] as DrizzleConfig | undefined) as any; } export namespace drizzle { export function mock = Record>( config?: DrizzleConfig, ): PlanetScaleDatabase & { $client: '$client is not available on drizzle.mock()'; } { return construct({} as any, config) as any; } } ================================================ FILE: drizzle-orm/src/planetscale-serverless/index.ts ================================================ export * from './driver.ts'; export * from './session.ts'; ================================================ FILE: drizzle-orm/src/planetscale-serverless/migrator.ts ================================================ import type { MigrationConfig } from '~/migrator.ts'; import { readMigrationFiles } from '~/migrator.ts'; import type { PlanetScaleDatabase } from './driver.ts'; export async function migrate>( db: PlanetScaleDatabase, config: MigrationConfig, ) { const migrations = readMigrationFiles(config); await db.dialect.migrate(migrations, db.session, config); } ================================================ FILE: drizzle-orm/src/planetscale-serverless/session.ts ================================================ import type { Client, Connection, ExecutedQuery, Transaction } from '@planetscale/database'; import { type Cache, NoopCache } from '~/cache/core/index.ts'; import type { WithCacheConfig } from '~/cache/core/types.ts'; import { Column } from '~/column.ts'; import { entityKind, is } from '~/entity.ts'; import type { Logger } from '~/logger.ts'; import { NoopLogger } from '~/logger.ts'; import type { MySqlDialect } from '~/mysql-core/dialect.ts'; import type { SelectedFieldsOrdered } from '~/mysql-core/query-builders/select.types.ts'; import { MySqlPreparedQuery, type MySqlPreparedQueryConfig, type MySqlPreparedQueryHKT, type MySqlQueryResultHKT, MySqlSession, MySqlTransaction, } from '~/mysql-core/session.ts'; import type { RelationalSchemaConfig, TablesRelationalConfig } from '~/relations.ts'; import { fillPlaceholders, type Query, type SQL, sql } from '~/sql/sql.ts'; import { type Assume, mapResultRow } from '~/utils.ts'; export class PlanetScalePreparedQuery extends MySqlPreparedQuery { static override readonly [entityKind]: string = 'PlanetScalePreparedQuery'; private rawQuery = { as: 'object' } as const; private query = { as: 'array' } as const; constructor( private client: Client | Transaction | Connection, private queryString: string, private params: unknown[], private logger: Logger, cache: Cache, queryMetadata: { type: 'select' | 'update' | 'delete' | 'insert'; tables: string[]; } | undefined, cacheConfig: WithCacheConfig | undefined, private fields: SelectedFieldsOrdered | undefined, private customResultMapper?: (rows: unknown[][]) => T['execute'], // Keys that were used in $default and the value that was generated for them private generatedIds?: Record[], // Keys that should be returned, it has the column with all properries + key from object private returningIds?: SelectedFieldsOrdered, ) { super(cache, queryMetadata, cacheConfig); } async execute(placeholderValues: Record | undefined = {}): Promise { const params = fillPlaceholders(this.params, placeholderValues); this.logger.logQuery(this.queryString, params); const { fields, client, queryString, rawQuery, query, joinsNotNullableMap, customResultMapper, returningIds, generatedIds, } = this; if (!fields && !customResultMapper) { const res = await this.queryWithCache(queryString, params, async () => { return await client.execute(queryString, params, rawQuery); }); const insertId = Number.parseFloat(res.insertId); const affectedRows = res.rowsAffected; // for each row, I need to check keys from if (returningIds) { const returningResponse = []; let j = 0; for (let i = insertId; i < insertId + affectedRows; i++) { for (const column of returningIds) { const key = returningIds[0]!.path[0]!; if (is(column.field, Column)) { // @ts-ignore if (column.field.primary && column.field.autoIncrement) { returningResponse.push({ [key]: i }); } if (column.field.defaultFn && generatedIds) { // generatedIds[rowIdx][key] returningResponse.push({ [key]: generatedIds[j]![key] }); } } } j++; } return returningResponse; } return res; } const { rows } = await this.queryWithCache(queryString, params, async () => { return await client.execute(queryString, params, query); }); if (customResultMapper) { return customResultMapper(rows as unknown[][]); } return rows.map((row) => mapResultRow(fields!, row as unknown[], joinsNotNullableMap)); } override iterator(_placeholderValues?: Record): AsyncGenerator { throw new Error('Streaming is not supported by the PlanetScale Serverless driver'); } } export interface PlanetscaleSessionOptions { logger?: Logger; cache?: Cache; } export class PlanetscaleSession< TFullSchema extends Record, TSchema extends TablesRelationalConfig, > extends MySqlSession { static override readonly [entityKind]: string = 'PlanetscaleSession'; private logger: Logger; private client: Client | Transaction | Connection; private cache: Cache; constructor( private baseClient: Client | Connection, dialect: MySqlDialect, tx: Transaction | undefined, private schema: RelationalSchemaConfig | undefined, private options: PlanetscaleSessionOptions = {}, ) { super(dialect); this.client = tx ?? baseClient; this.logger = options.logger ?? new NoopLogger(); this.cache = options.cache ?? new NoopCache(); } prepareQuery( query: Query, fields: SelectedFieldsOrdered | undefined, customResultMapper?: (rows: unknown[][]) => T['execute'], generatedIds?: Record[], returningIds?: SelectedFieldsOrdered, queryMetadata?: { type: 'select' | 'update' | 'delete' | 'insert'; tables: string[]; }, cacheConfig?: WithCacheConfig, ): MySqlPreparedQuery { return new PlanetScalePreparedQuery( this.client, query.sql, query.params, this.logger, this.cache, queryMetadata, cacheConfig, fields, customResultMapper, generatedIds, returningIds, ); } async query(query: string, params: unknown[]): Promise { this.logger.logQuery(query, params); return await this.client.execute(query, params, { as: 'array' }); } async queryObjects( query: string, params: unknown[], ): Promise { return this.client.execute(query, params, { as: 'object' }); } override all(query: SQL): Promise { const querySql = this.dialect.sqlToQuery(query); this.logger.logQuery(querySql.sql, querySql.params); return this.client.execute(querySql.sql, querySql.params, { as: 'object' }).then(( eQuery, ) => eQuery.rows); } override async count(sql: SQL): Promise { const res = await this.execute<{ rows: [{ count: string }] }>(sql); return Number( res['rows'][0]['count'], ); } override transaction( transaction: (tx: PlanetScaleTransaction) => Promise, ): Promise { return this.baseClient.transaction((pstx) => { const session = new PlanetscaleSession(this.baseClient, this.dialect, pstx, this.schema, this.options); const tx = new PlanetScaleTransaction( this.dialect, session as MySqlSession, this.schema, ); return transaction(tx); }); } } export class PlanetScaleTransaction< TFullSchema extends Record, TSchema extends TablesRelationalConfig, > extends MySqlTransaction { static override readonly [entityKind]: string = 'PlanetScaleTransaction'; constructor( dialect: MySqlDialect, session: MySqlSession, schema: RelationalSchemaConfig | undefined, nestedIndex = 0, ) { super(dialect, session, schema, nestedIndex, 'planetscale'); } override async transaction( transaction: (tx: PlanetScaleTransaction) => Promise, ): Promise { const savepointName = `sp${this.nestedIndex + 1}`; const tx = new PlanetScaleTransaction( this.dialect, this.session, this.schema, this.nestedIndex + 1, ); await tx.execute(sql.raw(`savepoint ${savepointName}`)); try { const result = await transaction(tx); await tx.execute(sql.raw(`release savepoint ${savepointName}`)); return result; } catch (err) { await tx.execute(sql.raw(`rollback to savepoint ${savepointName}`)); throw err; } } } export interface PlanetscaleQueryResultHKT extends MySqlQueryResultHKT { type: ExecutedQuery; } export interface PlanetScalePreparedQueryHKT extends MySqlPreparedQueryHKT { type: PlanetScalePreparedQuery>; } ================================================ FILE: drizzle-orm/src/postgres-js/README.md ================================================ # Drizzle ORM | [Postgres.js](https://github.com/porsager/postgres) driver ## Installation ```bash # npm npm i drizzle-orm postgres npm i -D drizzle-kit # yarn yarn add drizzle-orm postgres yarn add -D drizzle-kit # pnpm pnpm add drizzle-orm postgres pnpm add -D drizzle-kit ``` ## Connection ```typescript import { drizzle } from 'drizzle-orm/postgres-js'; import postgres from 'postgres'; const client = postgres(connectionString); const db = drizzle(client); ``` See [main docs](/drizzle-orm/src/pg-core/README.md#sql-schema-declaration) for further usage. ## Running migrations In order to run the migrations, [you need to use `max: 1` in the postgres.js connection options](https://github.com/porsager/postgres#unsafe_transaction). You can create a separate connection instance for migrations with that setting. ```typescript import postgres from 'postgres'; import { migrate } from 'drizzle-orm/postgres-js/migrator'; const migrationsClient = postgres(connectionString, { max: 1, }); const db = drizzle(migrationsClient); await migrate(db, { migrationsFolder: '...' }); ``` See [main migrations docs](/drizzle-orm/src/pg-core/README.md#migrations) for further info. ================================================ FILE: drizzle-orm/src/postgres-js/driver.ts ================================================ import pgClient, { type Options, type PostgresType, type Sql } from 'postgres'; import { entityKind } from '~/entity.ts'; import { DefaultLogger } from '~/logger.ts'; import { PgDatabase } from '~/pg-core/db.ts'; import { PgDialect } from '~/pg-core/dialect.ts'; import { createTableRelationsHelpers, extractTablesRelationalConfig, type RelationalSchemaConfig, type TablesRelationalConfig, } from '~/relations.ts'; import { type DrizzleConfig, isConfig } from '~/utils.ts'; import type { PostgresJsQueryResultHKT } from './session.ts'; import { PostgresJsSession } from './session.ts'; export class PostgresJsDatabase< TSchema extends Record = Record, > extends PgDatabase { static override readonly [entityKind]: string = 'PostgresJsDatabase'; } function construct = Record>( client: Sql, config: DrizzleConfig = {}, ): PostgresJsDatabase & { $client: Sql; } { const transparentParser = (val: any) => val; // Override postgres.js default date parsers: https://github.com/porsager/postgres/discussions/761 for (const type of ['1184', '1082', '1083', '1114', '1182', '1185', '1115', '1231']) { client.options.parsers[type as any] = transparentParser; client.options.serializers[type as any] = transparentParser; } client.options.serializers['114'] = transparentParser; client.options.serializers['3802'] = transparentParser; const dialect = new PgDialect({ casing: config.casing }); let logger; if (config.logger === true) { logger = new DefaultLogger(); } else if (config.logger !== false) { logger = config.logger; } let schema: RelationalSchemaConfig | undefined; if (config.schema) { const tablesConfig = extractTablesRelationalConfig( config.schema, createTableRelationsHelpers, ); schema = { fullSchema: config.schema, schema: tablesConfig.tables, tableNamesMap: tablesConfig.tableNamesMap, }; } const session = new PostgresJsSession(client, dialect, schema, { logger, cache: config.cache }); const db = new PostgresJsDatabase(dialect, session, schema as any) as PostgresJsDatabase; ( db).$client = client; ( db).$cache = config.cache; if (( db).$cache) { ( db).$cache['invalidate'] = config.cache?.onMutate; } return db as any; } export function drizzle< TSchema extends Record = Record, TClient extends Sql = Sql, >( ...params: [ TClient | string, ] | [ TClient | string, DrizzleConfig, ] | [ ( & DrizzleConfig & ({ connection: string | ({ url?: string } & Options>); } | { client: TClient; }) ), ] ): PostgresJsDatabase & { $client: TClient; } { if (typeof params[0] === 'string') { const instance = pgClient(params[0] as string); return construct(instance, params[1]) as any; } if (isConfig(params[0])) { const { connection, client, ...drizzleConfig } = params[0] as { connection?: { url?: string } & Options>; client?: TClient; } & DrizzleConfig; if (client) return construct(client, drizzleConfig) as any; if (typeof connection === 'object' && connection.url !== undefined) { const { url, ...config } = connection; const instance = pgClient(url, config); return construct(instance, drizzleConfig) as any; } const instance = pgClient(connection); return construct(instance, drizzleConfig) as any; } return construct(params[0] as TClient, params[1] as DrizzleConfig | undefined) as any; } export namespace drizzle { export function mock = Record>( config?: DrizzleConfig, ): PostgresJsDatabase & { $client: '$client is not available on drizzle.mock()'; } { return construct({ options: { parsers: {}, serializers: {}, }, } as any, config) as any; } } ================================================ FILE: drizzle-orm/src/postgres-js/index.ts ================================================ export * from './driver.ts'; export * from './session.ts'; ================================================ FILE: drizzle-orm/src/postgres-js/migrator.ts ================================================ import type { MigrationConfig } from '~/migrator.ts'; import { readMigrationFiles } from '~/migrator.ts'; import type { PostgresJsDatabase } from './driver.ts'; export async function migrate>( db: PostgresJsDatabase, config: MigrationConfig, ) { const migrations = readMigrationFiles(config); await db.dialect.migrate(migrations, db.session, config); } ================================================ FILE: drizzle-orm/src/postgres-js/session.ts ================================================ import type { Row, RowList, Sql, TransactionSql } from 'postgres'; import { type Cache, NoopCache } from '~/cache/core/index.ts'; import type { WithCacheConfig } from '~/cache/core/types.ts'; import { entityKind } from '~/entity.ts'; import type { Logger } from '~/logger.ts'; import { NoopLogger } from '~/logger.ts'; import type { PgDialect } from '~/pg-core/dialect.ts'; import { PgTransaction } from '~/pg-core/index.ts'; import type { SelectedFieldsOrdered } from '~/pg-core/query-builders/select.types.ts'; import type { PgQueryResultHKT, PgTransactionConfig, PreparedQueryConfig } from '~/pg-core/session.ts'; import { PgPreparedQuery, PgSession } from '~/pg-core/session.ts'; import type { RelationalSchemaConfig, TablesRelationalConfig } from '~/relations.ts'; import { fillPlaceholders, type Query } from '~/sql/sql.ts'; import { tracer } from '~/tracing.ts'; import { type Assume, mapResultRow } from '~/utils.ts'; export class PostgresJsPreparedQuery extends PgPreparedQuery { static override readonly [entityKind]: string = 'PostgresJsPreparedQuery'; constructor( private client: Sql, private queryString: string, private params: unknown[], private logger: Logger, cache: Cache, queryMetadata: { type: 'select' | 'update' | 'delete' | 'insert'; tables: string[]; } | undefined, cacheConfig: WithCacheConfig | undefined, private fields: SelectedFieldsOrdered | undefined, private _isResponseInArrayMode: boolean, private customResultMapper?: (rows: unknown[][]) => T['execute'], ) { super({ sql: queryString, params }, cache, queryMetadata, cacheConfig); } async execute(placeholderValues: Record | undefined = {}): Promise { return tracer.startActiveSpan('drizzle.execute', async (span) => { const params = fillPlaceholders(this.params, placeholderValues); span?.setAttributes({ 'drizzle.query.text': this.queryString, 'drizzle.query.params': JSON.stringify(params), }); this.logger.logQuery(this.queryString, params); const { fields, queryString: query, client, joinsNotNullableMap, customResultMapper } = this; if (!fields && !customResultMapper) { return tracer.startActiveSpan('drizzle.driver.execute', () => { return this.queryWithCache(query, params, async () => { return await client.unsafe(query, params as any[]); }); }); } const rows = await tracer.startActiveSpan('drizzle.driver.execute', () => { span?.setAttributes({ 'drizzle.query.text': query, 'drizzle.query.params': JSON.stringify(params), }); return this.queryWithCache(query, params, async () => { return await client.unsafe(query, params as any[]).values(); }); }); return tracer.startActiveSpan('drizzle.mapResponse', () => { return customResultMapper ? customResultMapper(rows) : rows.map((row) => mapResultRow(fields!, row, joinsNotNullableMap)); }); }); } all(placeholderValues: Record | undefined = {}): Promise { return tracer.startActiveSpan('drizzle.execute', async (span) => { const params = fillPlaceholders(this.params, placeholderValues); span?.setAttributes({ 'drizzle.query.text': this.queryString, 'drizzle.query.params': JSON.stringify(params), }); this.logger.logQuery(this.queryString, params); return tracer.startActiveSpan('drizzle.driver.execute', () => { span?.setAttributes({ 'drizzle.query.text': this.queryString, 'drizzle.query.params': JSON.stringify(params), }); return this.queryWithCache(this.queryString, params, async () => { return this.client.unsafe(this.queryString, params as any[]); }); }); }); } /** @internal */ isResponseInArrayMode(): boolean { return this._isResponseInArrayMode; } } export interface PostgresJsSessionOptions { logger?: Logger; cache?: Cache; } export class PostgresJsSession< TSQL extends Sql, TFullSchema extends Record, TSchema extends TablesRelationalConfig, > extends PgSession { static override readonly [entityKind]: string = 'PostgresJsSession'; logger: Logger; private cache: Cache; constructor( public client: TSQL, dialect: PgDialect, private schema: RelationalSchemaConfig | undefined, /** @internal */ readonly options: PostgresJsSessionOptions = {}, ) { super(dialect); this.logger = options.logger ?? new NoopLogger(); this.cache = options.cache ?? new NoopCache(); } prepareQuery( query: Query, fields: SelectedFieldsOrdered | undefined, name: string | undefined, isResponseInArrayMode: boolean, customResultMapper?: (rows: unknown[][]) => T['execute'], queryMetadata?: { type: 'select' | 'update' | 'delete' | 'insert'; tables: string[]; }, cacheConfig?: WithCacheConfig, ): PgPreparedQuery { return new PostgresJsPreparedQuery( this.client, query.sql, query.params, this.logger, this.cache, queryMetadata, cacheConfig, fields, isResponseInArrayMode, customResultMapper, ); } query(query: string, params: unknown[]): Promise> { this.logger.logQuery(query, params); return this.client.unsafe(query, params as any[]).values(); } queryObjects( query: string, params: unknown[], ): Promise> { return this.client.unsafe(query, params as any[]); } override transaction( transaction: (tx: PostgresJsTransaction) => Promise, config?: PgTransactionConfig, ): Promise { return this.client.begin(async (client) => { const session = new PostgresJsSession( client, this.dialect, this.schema, this.options, ); const tx = new PostgresJsTransaction(this.dialect, session, this.schema); if (config) { await tx.setTransaction(config); } return transaction(tx); }) as Promise; } } export class PostgresJsTransaction< TFullSchema extends Record, TSchema extends TablesRelationalConfig, > extends PgTransaction { static override readonly [entityKind]: string = 'PostgresJsTransaction'; constructor( dialect: PgDialect, /** @internal */ override readonly session: PostgresJsSession, schema: RelationalSchemaConfig | undefined, nestedIndex = 0, ) { super(dialect, session, schema, nestedIndex); } override transaction( transaction: (tx: PostgresJsTransaction) => Promise, ): Promise { return this.session.client.savepoint((client) => { const session = new PostgresJsSession( client, this.dialect, this.schema, this.session.options, ); const tx = new PostgresJsTransaction(this.dialect, session, this.schema); return transaction(tx); }) as Promise; } } export interface PostgresJsQueryResultHKT extends PgQueryResultHKT { type: RowList[]>; } ================================================ FILE: drizzle-orm/src/primary-key.ts ================================================ import { entityKind } from '~/entity.ts'; import type { AnyColumn } from './column.ts'; import type { Table } from './table.ts'; export abstract class PrimaryKey { static readonly [entityKind]: string = 'PrimaryKey'; declare protected $brand: 'PrimaryKey'; constructor(readonly table: Table, readonly columns: AnyColumn[]) {} } ================================================ FILE: drizzle-orm/src/prisma/mysql/driver.ts ================================================ import type { PrismaClient } from '@prisma/client/extension'; import { Prisma } from '@prisma/client'; import { entityKind } from '~/entity.ts'; import type { Logger } from '~/logger.ts'; import { DefaultLogger } from '~/logger.ts'; import { MySqlDatabase, MySqlDialect } from '~/mysql-core/index.ts'; import type { DrizzleConfig } from '~/utils.ts'; import type { PrismaMySqlPreparedQueryHKT, PrismaMySqlQueryResultHKT } from './session.ts'; import { PrismaMySqlSession } from './session.ts'; export class PrismaMySqlDatabase extends MySqlDatabase> { static override readonly [entityKind]: string = 'PrismaMySqlDatabase'; constructor(client: PrismaClient, logger: Logger | undefined) { const dialect = new MySqlDialect(); super(dialect, new PrismaMySqlSession(dialect, client, { logger }), undefined, 'default'); } } export type PrismaMySqlConfig = Omit; export function drizzle(config: PrismaMySqlConfig = {}) { let logger: Logger | undefined; if (config.logger === true) { logger = new DefaultLogger(); } else if (config.logger !== false) { logger = config.logger; } return Prisma.defineExtension((client) => { return client.$extends({ name: 'drizzle', client: { $drizzle: new PrismaMySqlDatabase(client, logger), }, }); }); } ================================================ FILE: drizzle-orm/src/prisma/mysql/index.ts ================================================ export * from './driver.ts'; export * from './session.ts'; ================================================ FILE: drizzle-orm/src/prisma/mysql/session.ts ================================================ import type { PrismaClient } from '@prisma/client/extension'; import { entityKind } from '~/entity.ts'; import { type Logger, NoopLogger } from '~/logger.ts'; import type { MySqlDialect, MySqlPreparedQueryConfig, MySqlPreparedQueryHKT, MySqlQueryResultHKT, MySqlTransaction, MySqlTransactionConfig, } from '~/mysql-core/index.ts'; import { MySqlPreparedQuery, MySqlSession } from '~/mysql-core/index.ts'; import { fillPlaceholders } from '~/sql/sql.ts'; import type { Query, SQL } from '~/sql/sql.ts'; import type { Assume } from '~/utils.ts'; export class PrismaMySqlPreparedQuery extends MySqlPreparedQuery { override iterator(_placeholderValues?: Record | undefined): AsyncGenerator { throw new Error('Method not implemented.'); } static override readonly [entityKind]: string = 'PrismaMySqlPreparedQuery'; constructor( private readonly prisma: PrismaClient, private readonly query: Query, private readonly logger: Logger, ) { super(undefined, undefined, undefined); } override execute(placeholderValues?: Record): Promise { const params = fillPlaceholders(this.query.params, placeholderValues ?? {}); this.logger.logQuery(this.query.sql, params); return this.prisma.$queryRawUnsafe(this.query.sql, ...params); } } export interface PrismaMySqlSessionOptions { logger?: Logger; } export class PrismaMySqlSession extends MySqlSession { static override readonly [entityKind]: string = 'PrismaMySqlSession'; private readonly logger: Logger; constructor( dialect: MySqlDialect, private readonly prisma: PrismaClient, private readonly options: PrismaMySqlSessionOptions, ) { super(dialect); this.logger = options.logger ?? new NoopLogger(); } override execute(query: SQL): Promise { return this.prepareQuery(this.dialect.sqlToQuery(query)).execute(); } override all(_query: SQL): Promise { throw new Error('Method not implemented.'); } override prepareQuery( query: Query, ): MySqlPreparedQuery { return new PrismaMySqlPreparedQuery(this.prisma, query, this.logger); } override transaction( _transaction: ( tx: MySqlTransaction< PrismaMySqlQueryResultHKT, PrismaMySqlPreparedQueryHKT, Record, Record >, ) => Promise, _config?: MySqlTransactionConfig, ): Promise { throw new Error('Method not implemented.'); } } export interface PrismaMySqlQueryResultHKT extends MySqlQueryResultHKT { type: []; } export interface PrismaMySqlPreparedQueryHKT extends MySqlPreparedQueryHKT { type: PrismaMySqlPreparedQuery>; } ================================================ FILE: drizzle-orm/src/prisma/pg/driver.ts ================================================ import type { PrismaClient } from '@prisma/client/extension'; import { Prisma } from '@prisma/client'; import { entityKind } from '~/entity.ts'; import type { Logger } from '~/logger.ts'; import { DefaultLogger } from '~/logger.ts'; import { PgDatabase, PgDialect } from '~/pg-core/index.ts'; import type { DrizzleConfig } from '~/utils.ts'; import type { PrismaPgQueryResultHKT } from './session.ts'; import { PrismaPgSession } from './session.ts'; export class PrismaPgDatabase extends PgDatabase> { static override readonly [entityKind]: string = 'PrismaPgDatabase'; constructor(client: PrismaClient, logger: Logger | undefined) { const dialect = new PgDialect(); super(dialect, new PrismaPgSession(dialect, client, { logger }), undefined); } } export type PrismaPgConfig = Omit; export function drizzle(config: PrismaPgConfig = {}) { let logger: Logger | undefined; if (config.logger === true) { logger = new DefaultLogger(); } else if (config.logger !== false) { logger = config.logger; } return Prisma.defineExtension((client) => { return client.$extends({ name: 'drizzle', client: { $drizzle: new PrismaPgDatabase(client, logger), }, }); }); } ================================================ FILE: drizzle-orm/src/prisma/pg/index.ts ================================================ export * from './driver.ts'; export * from './session.ts'; ================================================ FILE: drizzle-orm/src/prisma/pg/session.ts ================================================ import type { PrismaClient } from '@prisma/client/extension'; import { entityKind } from '~/entity.ts'; import { type Logger, NoopLogger } from '~/logger.ts'; import type { PgDialect, PgQueryResultHKT, PgTransaction, PgTransactionConfig, PreparedQueryConfig, } from '~/pg-core/index.ts'; import { PgPreparedQuery, PgSession } from '~/pg-core/index.ts'; import type { Query, SQL } from '~/sql/sql.ts'; import { fillPlaceholders } from '~/sql/sql.ts'; export class PrismaPgPreparedQuery extends PgPreparedQuery { static override readonly [entityKind]: string = 'PrismaPgPreparedQuery'; constructor( private readonly prisma: PrismaClient, query: Query, private readonly logger: Logger, ) { super(query, undefined, undefined, undefined); } override execute(placeholderValues?: Record): Promise { const params = fillPlaceholders(this.query.params, placeholderValues ?? {}); this.logger.logQuery(this.query.sql, params); return this.prisma.$queryRawUnsafe(this.query.sql, ...params); } override all(): Promise { throw new Error('Method not implemented.'); } override isResponseInArrayMode(): boolean { return false; } } export interface PrismaPgSessionOptions { logger?: Logger; } export class PrismaPgSession extends PgSession { static override readonly [entityKind]: string = 'PrismaPgSession'; private readonly logger: Logger; constructor( dialect: PgDialect, private readonly prisma: PrismaClient, private readonly options: PrismaPgSessionOptions, ) { super(dialect); this.logger = options.logger ?? new NoopLogger(); } override execute(query: SQL): Promise { return this.prepareQuery(this.dialect.sqlToQuery(query)).execute(); } override prepareQuery(query: Query): PgPreparedQuery { return new PrismaPgPreparedQuery(this.prisma, query, this.logger); } override transaction( _transaction: (tx: PgTransaction, Record>) => Promise, _config?: PgTransactionConfig, ): Promise { throw new Error('Method not implemented.'); } } export interface PrismaPgQueryResultHKT extends PgQueryResultHKT { type: []; } ================================================ FILE: drizzle-orm/src/prisma/schema.prisma ================================================ generator client { provider = "prisma-client-js" } datasource db { provider = "postgresql" url = env("DB_URL") } model User { id Int @id @default(autoincrement()) email String @unique name String? } ================================================ FILE: drizzle-orm/src/prisma/sqlite/driver.ts ================================================ import { Prisma } from '@prisma/client'; import type { Logger } from '~/logger.ts'; import { DefaultLogger } from '~/logger.ts'; import { BaseSQLiteDatabase, SQLiteAsyncDialect } from '~/sqlite-core/index.ts'; import type { DrizzleConfig } from '~/utils.ts'; import { PrismaSQLiteSession } from './session.ts'; export type PrismaSQLiteDatabase = BaseSQLiteDatabase<'async', []>; export type PrismaSQLiteConfig = Omit; export function drizzle(config: PrismaSQLiteConfig = {}) { const dialect = new SQLiteAsyncDialect(); let logger: Logger | undefined; if (config.logger === true) { logger = new DefaultLogger(); } else if (config.logger !== false) { logger = config.logger; } return Prisma.defineExtension((client) => { const session = new PrismaSQLiteSession(client, dialect, { logger }); return client.$extends({ name: 'drizzle', client: { $drizzle: new BaseSQLiteDatabase('async', dialect, session, undefined) as PrismaSQLiteDatabase, }, }); }); } ================================================ FILE: drizzle-orm/src/prisma/sqlite/index.ts ================================================ export * from './driver.ts'; export * from './session.ts'; ================================================ FILE: drizzle-orm/src/prisma/sqlite/session.ts ================================================ import type { PrismaClient } from '@prisma/client/extension'; import { entityKind } from '~/entity.ts'; import { type Logger, NoopLogger } from '~/logger.ts'; import type { Query } from '~/sql/sql.ts'; import { fillPlaceholders } from '~/sql/sql.ts'; import type { PreparedQueryConfig as PreparedQueryConfigBase, SelectedFieldsOrdered, SQLiteAsyncDialect, SQLiteExecuteMethod, SQLiteTransaction, SQLiteTransactionConfig, } from '~/sqlite-core/index.ts'; import { SQLitePreparedQuery, SQLiteSession } from '~/sqlite-core/index.ts'; type PreparedQueryConfig = Omit; export class PrismaSQLitePreparedQuery extends SQLitePreparedQuery< { type: 'async'; run: []; all: T['all']; get: T['get']; values: never; execute: T['execute'] } > { static override readonly [entityKind]: string = 'PrismaSQLitePreparedQuery'; constructor( private readonly prisma: PrismaClient, query: Query, private readonly logger: Logger, executeMethod: SQLiteExecuteMethod, ) { super('async', executeMethod, query); } override all(placeholderValues?: Record): Promise { const params = fillPlaceholders(this.query.params, placeholderValues ?? {}); this.logger.logQuery(this.query.sql, params); return this.prisma.$queryRawUnsafe(this.query.sql, ...params); } override async run(placeholderValues?: Record | undefined): Promise<[]> { await this.all(placeholderValues); return []; } override async get(placeholderValues?: Record | undefined): Promise { const all = await this.all(placeholderValues) as unknown[]; return all[0]; } override values(_placeholderValues?: Record | undefined): Promise { throw new Error('Method not implemented.'); } override isResponseInArrayMode(): boolean { return false; } } export interface PrismaSQLiteSessionOptions { logger?: Logger; } export class PrismaSQLiteSession extends SQLiteSession<'async', unknown, Record, Record> { static override readonly [entityKind]: string = 'PrismaSQLiteSession'; private readonly logger: Logger; constructor( private readonly prisma: PrismaClient, dialect: SQLiteAsyncDialect, options: PrismaSQLiteSessionOptions, ) { super(dialect); this.logger = options.logger ?? new NoopLogger(); } override prepareQuery>( query: Query, fields: SelectedFieldsOrdered | undefined, executeMethod: SQLiteExecuteMethod, ): PrismaSQLitePreparedQuery { return new PrismaSQLitePreparedQuery(this.prisma, query, this.logger, executeMethod); } override transaction( _transaction: (tx: SQLiteTransaction<'async', unknown, Record, Record>) => Promise, _config?: SQLiteTransactionConfig, ): Promise { throw new Error('Method not implemented.'); } } ================================================ FILE: drizzle-orm/src/query-builders/query-builder.ts ================================================ import { entityKind } from '~/entity.ts'; import type { SQL, SQLWrapper } from '~/sql/index.ts'; export abstract class TypedQueryBuilder implements SQLWrapper { static readonly [entityKind]: string = 'TypedQueryBuilder'; declare _: { selectedFields: TSelection; result: TResult; config?: TConfig; }; /** @internal */ getSelectedFields(): TSelection { return this._.selectedFields; } abstract getSQL(): SQL; } ================================================ FILE: drizzle-orm/src/query-builders/select.types.ts ================================================ import type { ChangeColumnTableName, ColumnDataType, Dialect } from '~/column-builder.ts'; import type { AnyColumn, Column, ColumnBaseConfig, GetColumnData, UpdateColConfig } from '~/column.ts'; import type { SelectedFields } from '~/operations.ts'; import type { ColumnsSelection, SQL, View } from '~/sql/sql.ts'; import type { Subquery } from '~/subquery.ts'; import type { Table } from '~/table.ts'; import type { Assume, DrizzleTypeError, Equal, FromSingleKeyObject, IsAny, IsUnion, Not, Simplify } from '~/utils.ts'; export type JoinType = 'inner' | 'left' | 'right' | 'full' | 'cross'; export type JoinNullability = 'nullable' | 'not-null'; export type ApplyNullability = TNullability extends 'nullable' ? T | null : TNullability extends 'null' ? null : T; export type ApplyNullabilityToColumn = TNullability extends 'not-null' ? TColumn : Column< Assume< UpdateColConfig, ColumnBaseConfig > >; export type ApplyNotNullMapToJoins> = & { [TTableName in keyof TResult & keyof TNullabilityMap & string]: ApplyNullability< TResult[TTableName], TNullabilityMap[TTableName] >; } & {}; export type SelectMode = 'partial' | 'single' | 'multiple'; export type SelectResult< TResult, TSelectMode extends SelectMode, TNullabilityMap extends Record, > = TSelectMode extends 'partial' ? SelectPartialResult : TSelectMode extends 'single' ? SelectResultFields : ApplyNotNullMapToJoins, TNullabilityMap>; type SelectPartialResult> = TNullability extends TNullability ? { [Key in keyof TFields]: TFields[Key] extends infer TField ? TField extends Table ? TField['_']['name'] extends keyof TNullability ? ApplyNullability< SelectResultFields, TNullability[TField['_']['name']] > : never : TField extends Column ? TField['_']['tableName'] extends keyof TNullability ? ApplyNullability, TNullability[TField['_']['tableName']]> : never : TField extends SQL | SQL.Aliased ? SelectResultField : TField extends Subquery ? FromSingleKeyObject< TField['_']['selectedFields'], TField['_']['selectedFields'] extends { [key: string]: infer TValue } ? SelectResultField : never, 'You can only select one column in the subquery' > : TField extends Record ? TField[keyof TField] extends AnyColumn<{ tableName: infer TTableName extends string }> | SQL | SQL.Aliased ? Not> extends true ? ApplyNullability, TNullability[TTableName]> : SelectPartialResult : never : never : never; } : never; export type MapColumnsToTableAlias< TColumns extends ColumnsSelection, TAlias extends string, TDialect extends Dialect, > = & { [Key in keyof TColumns]: TColumns[Key] extends Column ? ChangeColumnTableName, TAlias, TDialect> : TColumns[Key]; } & {}; export type AddAliasToSelection< TSelection extends ColumnsSelection, TAlias extends string, TDialect extends Dialect, > = Simplify< IsAny extends true ? any : { [Key in keyof TSelection]: TSelection[Key] extends Column ? ChangeColumnTableName : TSelection[Key] extends Table ? AddAliasToSelection : TSelection[Key] extends SQL | SQL.Aliased ? TSelection[Key] : TSelection[Key] extends ColumnsSelection ? MapColumnsToTableAlias : never; } >; export type AppendToResult< TTableName extends string | undefined, TResult, TJoinedName extends string | undefined, TSelectedFields extends SelectedFields, TOldSelectMode extends SelectMode, > = TOldSelectMode extends 'partial' ? TResult : TOldSelectMode extends 'single' ? & (TTableName extends string ? Record : TResult) & (TJoinedName extends string ? Record : TSelectedFields) : TResult & (TJoinedName extends string ? Record : TSelectedFields); export type BuildSubquerySelection< TSelection extends ColumnsSelection, TNullability extends Record, > = TSelection extends never ? any : & { [Key in keyof TSelection]: TSelection[Key] extends SQL ? DrizzleTypeError<'You cannot reference this field without assigning it an alias first - use `.as()`'> : TSelection[Key] extends SQL.Aliased ? TSelection[Key] : TSelection[Key] extends Table ? BuildSubquerySelection : TSelection[Key] extends Column ? ApplyNullabilityToColumn : TSelection[Key] extends ColumnsSelection ? BuildSubquerySelection : never; } & {}; type SetJoinsNullability, TValue extends JoinNullability> = { [Key in keyof TNullabilityMap]: TValue; }; export type AppendToNullabilityMap< TJoinsNotNull extends Record, TJoinedName extends string | undefined, TJoinType extends JoinType, > = TJoinedName extends string ? 'left' extends TJoinType ? TJoinsNotNull & { [name in TJoinedName]: 'nullable' } : 'right' extends TJoinType ? SetJoinsNullability & { [name in TJoinedName]: 'not-null' } : 'inner' extends TJoinType ? TJoinsNotNull & { [name in TJoinedName]: 'not-null' } : 'cross' extends TJoinType ? TJoinsNotNull & { [name in TJoinedName]: 'not-null' } : 'full' extends TJoinType ? SetJoinsNullability & { [name in TJoinedName]: 'nullable' } : never : TJoinsNotNull; export type TableLike = Table | Subquery | View | SQL; export type GetSelectTableName = TTable extends Table ? TTable['_']['name'] : TTable extends Subquery ? TTable['_']['alias'] : TTable extends View ? TTable['_']['name'] : TTable extends SQL ? undefined : never; export type GetSelectTableSelection = TTable extends Table ? TTable['_']['columns'] : TTable extends Subquery | View ? Assume : TTable extends SQL ? {} : never; export type SelectResultField = T extends DrizzleTypeError ? T : T extends Table ? Equal extends true ? SelectResultField : never : T extends Column ? GetColumnData : T extends SQL | SQL.Aliased ? T['_']['type'] : T extends Record ? SelectResultFields : never; export type SelectResultFields = Simplify< { [Key in keyof TSelectedFields]: SelectResultField; } >; export type SetOperator = 'union' | 'intersect' | 'except'; ================================================ FILE: drizzle-orm/src/query-promise.ts ================================================ import { entityKind } from '~/entity.ts'; export abstract class QueryPromise implements Promise { static readonly [entityKind]: string = 'QueryPromise'; [Symbol.toStringTag] = 'QueryPromise'; catch( onRejected?: ((reason: any) => TResult | PromiseLike) | null | undefined, ): Promise { return this.then(undefined, onRejected); } finally(onFinally?: (() => void) | null | undefined): Promise { return this.then( (value) => { onFinally?.(); return value; }, (reason) => { onFinally?.(); throw reason; }, ); } then( onFulfilled?: ((value: T) => TResult1 | PromiseLike) | undefined | null, onRejected?: ((reason: any) => TResult2 | PromiseLike) | undefined | null, ): Promise { return this.execute().then(onFulfilled, onRejected); } abstract execute(): Promise; } ================================================ FILE: drizzle-orm/src/relations.ts ================================================ import { type AnyTable, getTableUniqueName, type InferModelFromColumns, Table } from '~/table.ts'; import { type AnyColumn, Column } from './column.ts'; import { entityKind, is } from './entity.ts'; import { PrimaryKeyBuilder } from './pg-core/primary-keys.ts'; import { and, asc, between, desc, eq, exists, gt, gte, ilike, inArray, isNotNull, isNull, like, lt, lte, ne, not, notBetween, notExists, notIlike, notInArray, notLike, or, } from './sql/expressions/index.ts'; import { type Placeholder, SQL, sql } from './sql/sql.ts'; import type { Assume, ColumnsWithTable, Equal, Simplify, ValueOrArray } from './utils.ts'; export abstract class Relation { static readonly [entityKind]: string = 'Relation'; declare readonly $brand: 'Relation'; readonly referencedTableName: TTableName; fieldName!: string; constructor( readonly sourceTable: Table, readonly referencedTable: AnyTable<{ name: TTableName }>, readonly relationName: string | undefined, ) { this.referencedTableName = referencedTable[Table.Symbol.Name] as TTableName; } abstract withFieldName(fieldName: string): Relation; } export class Relations< TTableName extends string = string, TConfig extends Record = Record, > { static readonly [entityKind]: string = 'Relations'; declare readonly $brand: 'Relations'; constructor( readonly table: AnyTable<{ name: TTableName }>, readonly config: (helpers: TableRelationsHelpers) => TConfig, ) {} } export class One< TTableName extends string = string, TIsNullable extends boolean = boolean, > extends Relation { static override readonly [entityKind]: string = 'One'; declare protected $relationBrand: 'One'; constructor( sourceTable: Table, referencedTable: AnyTable<{ name: TTableName }>, readonly config: | RelationConfig< TTableName, string, AnyColumn<{ tableName: TTableName }>[] > | undefined, readonly isNullable: TIsNullable, ) { super(sourceTable, referencedTable, config?.relationName); } withFieldName(fieldName: string): One { const relation = new One( this.sourceTable, this.referencedTable, this.config, this.isNullable, ); relation.fieldName = fieldName; return relation; } } export class Many extends Relation { static override readonly [entityKind]: string = 'Many'; declare protected $relationBrand: 'Many'; constructor( sourceTable: Table, referencedTable: AnyTable<{ name: TTableName }>, readonly config: { relationName: string } | undefined, ) { super(sourceTable, referencedTable, config?.relationName); } withFieldName(fieldName: string): Many { const relation = new Many( this.sourceTable, this.referencedTable, this.config, ); relation.fieldName = fieldName; return relation; } } export type TableRelationsKeysOnly< TSchema extends Record, TTableName extends string, K extends keyof TSchema, > = TSchema[K] extends Relations ? K : never; export type ExtractTableRelationsFromSchema< TSchema extends Record, TTableName extends string, > = ExtractObjectValues< { [ K in keyof TSchema as TableRelationsKeysOnly< TSchema, TTableName, K > ]: TSchema[K] extends Relations ? TConfig : never; } >; export type ExtractObjectValues = T[keyof T]; export type ExtractRelationsFromTableExtraConfigSchema< TConfig extends unknown[], > = ExtractObjectValues< { [ K in keyof TConfig as TConfig[K] extends Relations ? K : never ]: TConfig[K] extends Relations ? TRelationConfig : never; } >; export function getOperators() { return { and, between, eq, exists, gt, gte, ilike, inArray, isNull, isNotNull, like, lt, lte, ne, not, notBetween, notExists, notLike, notIlike, notInArray, or, sql, }; } export type Operators = ReturnType; export function getOrderByOperators() { return { sql, asc, desc, }; } export type OrderByOperators = ReturnType; export type FindTableByDBName< TSchema extends TablesRelationalConfig, TTableName extends string, > = ExtractObjectValues< { [ K in keyof TSchema as TSchema[K]['dbName'] extends TTableName ? K : never ]: TSchema[K]; } >; export type DBQueryConfig< TRelationType extends 'one' | 'many' = 'one' | 'many', TIsRoot extends boolean = boolean, TSchema extends TablesRelationalConfig = TablesRelationalConfig, TTableConfig extends TableRelationalConfig = TableRelationalConfig, > = & { columns?: | { [K in keyof TTableConfig['columns']]?: boolean; } | undefined; with?: | { [K in keyof TTableConfig['relations']]?: | true | DBQueryConfig< TTableConfig['relations'][K] extends One ? 'one' : 'many', false, TSchema, FindTableByDBName< TSchema, TTableConfig['relations'][K]['referencedTableName'] > > | undefined; } | undefined; extras?: | Record | (( fields: Simplify< [TTableConfig['columns']] extends [never] ? {} : TTableConfig['columns'] >, operators: { sql: Operators['sql'] }, ) => Record) | undefined; } & (TRelationType extends 'many' ? & { where?: | SQL | undefined | (( fields: Simplify< [TTableConfig['columns']] extends [never] ? {} : TTableConfig['columns'] >, operators: Operators, ) => SQL | undefined); orderBy?: | ValueOrArray | (( fields: Simplify< [TTableConfig['columns']] extends [never] ? {} : TTableConfig['columns'] >, operators: OrderByOperators, ) => ValueOrArray) | undefined; limit?: number | Placeholder | undefined; } & (TIsRoot extends true ? { offset?: number | Placeholder | undefined; } : {}) : {}); export interface TableRelationalConfig { tsName: string; dbName: string; columns: Record; relations: Record; primaryKey: AnyColumn[]; schema?: string; } export type TablesRelationalConfig = Record; export interface RelationalSchemaConfig< TSchema extends TablesRelationalConfig, > { fullSchema: Record; schema: TSchema; tableNamesMap: Record; } export type ExtractTablesWithRelations< TSchema extends Record, > = { [ K in keyof TSchema as TSchema[K] extends Table ? K : never ]: TSchema[K] extends Table ? { tsName: K & string; dbName: TSchema[K]['_']['name']; columns: TSchema[K]['_']['columns']; relations: ExtractTableRelationsFromSchema< TSchema, TSchema[K]['_']['name'] >; primaryKey: AnyColumn[]; } : never; }; export type ReturnTypeOrValue = T extends (...args: any[]) => infer R ? R : T; export type BuildRelationResult< TSchema extends TablesRelationalConfig, TInclude, TRelations extends Record, > = { [ K in & NonUndefinedKeysOnly & keyof TRelations ]: TRelations[K] extends infer TRel extends Relation ? BuildQueryResult< TSchema, FindTableByDBName, Assume> > extends infer TResult ? TRel extends One ? | TResult | (Equal extends true ? null : never) : TResult[] : never : never; }; export type NonUndefinedKeysOnly = & ExtractObjectValues< { [K in keyof T as T[K] extends undefined ? never : K]: K; } > & keyof T; export type BuildQueryResult< TSchema extends TablesRelationalConfig, TTableConfig extends TableRelationalConfig, TFullSelection extends true | Record, > = Equal extends true ? InferModelFromColumns : TFullSelection extends Record ? Simplify< & (TFullSelection['columns'] extends Record ? InferModelFromColumns< { [ K in Equal< Exclude< TFullSelection['columns'][ & keyof TFullSelection['columns'] & keyof TTableConfig['columns'] ], undefined >, false > extends true ? Exclude< keyof TTableConfig['columns'], NonUndefinedKeysOnly > : & { [K in keyof TFullSelection['columns']]: Equal< TFullSelection['columns'][K], true > extends true ? K : never; }[keyof TFullSelection['columns']] & keyof TTableConfig['columns'] ]: TTableConfig['columns'][K]; } > : InferModelFromColumns) & (TFullSelection['extras'] extends | Record | ((...args: any[]) => Record) ? { [ K in NonUndefinedKeysOnly< ReturnTypeOrValue > ]: Assume< ReturnTypeOrValue[K], SQL.Aliased >['_']['type']; } : {}) & (TFullSelection['with'] extends Record ? BuildRelationResult< TSchema, TFullSelection['with'], TTableConfig['relations'] > : {}) > : never; export interface RelationConfig< TTableName extends string, TForeignTableName extends string, TColumns extends AnyColumn<{ tableName: TTableName }>[], > { relationName?: string; fields: TColumns; references: ColumnsWithTable; } export function extractTablesRelationalConfig< TTables extends TablesRelationalConfig, >( schema: Record, configHelpers: (table: Table) => any, ): { tables: TTables; tableNamesMap: Record } { if ( Object.keys(schema).length === 1 && 'default' in schema && !is(schema['default'], Table) ) { schema = schema['default'] as Record; } // table DB name -> schema table key const tableNamesMap: Record = {}; // Table relations found before their tables - need to buffer them until we know the schema table key const relationsBuffer: Record< string, { relations: Record; primaryKey?: AnyColumn[] } > = {}; const tablesConfig: TablesRelationalConfig = {}; for (const [key, value] of Object.entries(schema)) { if (is(value, Table)) { const dbName = getTableUniqueName(value); const bufferedRelations = relationsBuffer[dbName]; tableNamesMap[dbName] = key; tablesConfig[key] = { tsName: key, dbName: value[Table.Symbol.Name], schema: value[Table.Symbol.Schema], columns: value[Table.Symbol.Columns], relations: bufferedRelations?.relations ?? {}, primaryKey: bufferedRelations?.primaryKey ?? [], }; // Fill in primary keys for ( const column of Object.values( (value as Table)[Table.Symbol.Columns], ) ) { if (column.primary) { tablesConfig[key]!.primaryKey.push(column); } } const extraConfig = value[Table.Symbol.ExtraConfigBuilder]?.((value as Table)[Table.Symbol.ExtraConfigColumns]); if (extraConfig) { for (const configEntry of Object.values(extraConfig)) { if (is(configEntry, PrimaryKeyBuilder)) { tablesConfig[key]!.primaryKey.push(...configEntry.columns); } } } } else if (is(value, Relations)) { const dbName = getTableUniqueName(value.table); const tableName = tableNamesMap[dbName]; const relations: Record = value.config( configHelpers(value.table), ); let primaryKey: AnyColumn[] | undefined; for (const [relationName, relation] of Object.entries(relations)) { if (tableName) { const tableConfig = tablesConfig[tableName]!; tableConfig.relations[relationName] = relation; if (primaryKey) { tableConfig.primaryKey.push(...primaryKey); } } else { if (!(dbName in relationsBuffer)) { relationsBuffer[dbName] = { relations: {}, primaryKey, }; } relationsBuffer[dbName]!.relations[relationName] = relation; } } } } return { tables: tablesConfig as TTables, tableNamesMap }; } export function relations< TTableName extends string, TRelations extends Record>, >( table: AnyTable<{ name: TTableName }>, relations: (helpers: TableRelationsHelpers) => TRelations, ): Relations { return new Relations( table, (helpers: TableRelationsHelpers) => Object.fromEntries( Object.entries(relations(helpers)).map(([key, value]) => [ key, value.withFieldName(key), ]), ) as TRelations, ); } export function createOne(sourceTable: Table) { return function one< TForeignTable extends Table, TColumns extends [ AnyColumn<{ tableName: TTableName }>, ...AnyColumn<{ tableName: TTableName }>[], ], >( table: TForeignTable, config?: RelationConfig, ): One< TForeignTable['_']['name'], Equal > { return new One( sourceTable, table, config, (config?.fields.reduce((res, f) => res && f.notNull, true) ?? false) as Equal, ); }; } export function createMany(sourceTable: Table) { return function many( referencedTable: TForeignTable, config?: { relationName: string }, ): Many { return new Many(sourceTable, referencedTable, config); }; } export interface NormalizedRelation { fields: AnyColumn[]; references: AnyColumn[]; } export function normalizeRelation( schema: TablesRelationalConfig, tableNamesMap: Record, relation: Relation, ): NormalizedRelation { if (is(relation, One) && relation.config) { return { fields: relation.config.fields, references: relation.config.references, }; } const referencedTableTsName = tableNamesMap[getTableUniqueName(relation.referencedTable)]; if (!referencedTableTsName) { throw new Error( `Table "${relation.referencedTable[Table.Symbol.Name]}" not found in schema`, ); } const referencedTableConfig = schema[referencedTableTsName]; if (!referencedTableConfig) { throw new Error(`Table "${referencedTableTsName}" not found in schema`); } const sourceTable = relation.sourceTable; const sourceTableTsName = tableNamesMap[getTableUniqueName(sourceTable)]; if (!sourceTableTsName) { throw new Error( `Table "${sourceTable[Table.Symbol.Name]}" not found in schema`, ); } const reverseRelations: Relation[] = []; for ( const referencedTableRelation of Object.values( referencedTableConfig.relations, ) ) { if ( (relation.relationName && relation !== referencedTableRelation && referencedTableRelation.relationName === relation.relationName) || (!relation.relationName && referencedTableRelation.referencedTable === relation.sourceTable) ) { reverseRelations.push(referencedTableRelation); } } if (reverseRelations.length > 1) { throw relation.relationName ? new Error( `There are multiple relations with name "${relation.relationName}" in table "${referencedTableTsName}"`, ) : new Error( `There are multiple relations between "${referencedTableTsName}" and "${ relation.sourceTable[Table.Symbol.Name] }". Please specify relation name`, ); } if ( reverseRelations[0] && is(reverseRelations[0], One) && reverseRelations[0].config ) { return { fields: reverseRelations[0].config.references, references: reverseRelations[0].config.fields, }; } throw new Error( `There is not enough information to infer relation "${sourceTableTsName}.${relation.fieldName}"`, ); } export function createTableRelationsHelpers( sourceTable: AnyTable<{ name: TTableName }>, ) { return { one: createOne(sourceTable), many: createMany(sourceTable), }; } export type TableRelationsHelpers = ReturnType< typeof createTableRelationsHelpers >; export interface BuildRelationalQueryResult< TTable extends Table = Table, TColumn extends Column = Column, > { tableTsKey: string; selection: { dbKey: string; tsKey: string; field: TColumn | SQL | SQL.Aliased; relationTableTsKey: string | undefined; isJson: boolean; isExtra?: boolean; selection: BuildRelationalQueryResult['selection']; }[]; sql: TTable | SQL; } export function mapRelationalRow( tablesConfig: TablesRelationalConfig, tableConfig: TableRelationalConfig, row: unknown[], buildQueryResultSelection: BuildRelationalQueryResult['selection'], mapColumnValue: (value: unknown) => unknown = (value) => value, ): Record { const result: Record = {}; for ( const [ selectionItemIndex, selectionItem, ] of buildQueryResultSelection.entries() ) { if (selectionItem.isJson) { const relation = tableConfig.relations[selectionItem.tsKey]!; const rawSubRows = row[selectionItemIndex] as | unknown[] | null | [null] | string; const subRows = typeof rawSubRows === 'string' ? (JSON.parse(rawSubRows) as unknown[]) : rawSubRows; result[selectionItem.tsKey] = is(relation, One) ? subRows && mapRelationalRow( tablesConfig, tablesConfig[selectionItem.relationTableTsKey!]!, subRows, selectionItem.selection, mapColumnValue, ) : (subRows as unknown[][]).map((subRow) => mapRelationalRow( tablesConfig, tablesConfig[selectionItem.relationTableTsKey!]!, subRow, selectionItem.selection, mapColumnValue, ) ); } else { const value = mapColumnValue(row[selectionItemIndex]); const field = selectionItem.field!; let decoder; if (is(field, Column)) { decoder = field; } else if (is(field, SQL)) { decoder = field.decoder; } else { decoder = field.sql.decoder; } result[selectionItem.tsKey] = value === null ? null : decoder.mapFromDriverValue(value); } } return result; } ================================================ FILE: drizzle-orm/src/runnable-query.ts ================================================ import type { Dialect } from './column-builder.ts'; import type { PreparedQuery } from './session.ts'; export interface RunnableQuery { readonly _: { readonly dialect: TDialect; readonly result: T; }; /** @internal */ _prepare(): PreparedQuery; } ================================================ FILE: drizzle-orm/src/selection-proxy.ts ================================================ import { ColumnAliasProxyHandler, TableAliasProxyHandler } from './alias.ts'; import { Column } from './column.ts'; import { entityKind, is } from './entity.ts'; import { SQL, View } from './sql/sql.ts'; import { Subquery } from './subquery.ts'; import { ViewBaseConfig } from './view-common.ts'; export class SelectionProxyHandler | View> implements ProxyHandler | View> { static readonly [entityKind]: string = 'SelectionProxyHandler'; private config: { /** * Table alias for the columns */ alias?: string; /** * What to do when a field is an instance of `SQL.Aliased` and it's not a selection field (from a subquery) * * `sql` - return the underlying SQL expression * * `alias` - return the field alias */ sqlAliasedBehavior: 'sql' | 'alias'; /** * What to do when a field is an instance of `SQL` and it doesn't have an alias declared * * `sql` - return the underlying SQL expression * * `error` - return a DrizzleTypeError on type level and throw an error on runtime */ sqlBehavior: 'sql' | 'error'; /** * Whether to replace the original name of the column with the alias * Should be set to `true` for views creation * @default false */ replaceOriginalName?: boolean; }; constructor(config: SelectionProxyHandler['config']) { this.config = { ...config }; } get(subquery: T, prop: string | symbol): any { if (prop === '_') { return { ...subquery['_' as keyof typeof subquery], selectedFields: new Proxy( (subquery as Subquery)._.selectedFields, this as ProxyHandler>, ), }; } if (prop === ViewBaseConfig) { return { ...subquery[ViewBaseConfig as keyof typeof subquery], selectedFields: new Proxy( (subquery as View)[ViewBaseConfig].selectedFields, this as ProxyHandler>, ), }; } if (typeof prop === 'symbol') { return subquery[prop as keyof typeof subquery]; } const columns = is(subquery, Subquery) ? subquery._.selectedFields : is(subquery, View) ? subquery[ViewBaseConfig].selectedFields : subquery; const value: unknown = columns[prop as keyof typeof columns]; if (is(value, SQL.Aliased)) { // Never return the underlying SQL expression for a field previously selected in a subquery if (this.config.sqlAliasedBehavior === 'sql' && !value.isSelectionField) { return value.sql; } const newValue = value.clone(); newValue.isSelectionField = true; return newValue; } if (is(value, SQL)) { if (this.config.sqlBehavior === 'sql') { return value; } throw new Error( `You tried to reference "${prop}" field from a subquery, which is a raw SQL field, but it doesn't have an alias declared. Please add an alias to the field using ".as('alias')" method.`, ); } if (is(value, Column)) { if (this.config.alias) { return new Proxy( value, new ColumnAliasProxyHandler( new Proxy( value.table, new TableAliasProxyHandler(this.config.alias, this.config.replaceOriginalName ?? false), ), ), ); } return value; } if (typeof value !== 'object' || value === null) { return value; } return new Proxy(value, new SelectionProxyHandler(this.config)); } } ================================================ FILE: drizzle-orm/src/session.ts ================================================ import type { Query } from './index.ts'; export interface PreparedQuery { getQuery(): Query; mapResult(response: unknown, isFromBatch?: boolean): unknown; /** @internal */ isResponseInArrayMode(): boolean; } ================================================ FILE: drizzle-orm/src/singlestore/driver.ts ================================================ import { type Connection as CallbackConnection, createPool, type Pool as CallbackPool, type PoolOptions } from 'mysql2'; import type { Connection, Pool } from 'mysql2/promise'; import type { Cache } from '~/cache/core/cache.ts'; import { entityKind } from '~/entity.ts'; import type { Logger } from '~/logger.ts'; import { DefaultLogger } from '~/logger.ts'; import { createTableRelationsHelpers, extractTablesRelationalConfig, type RelationalSchemaConfig, type TablesRelationalConfig, } from '~/relations.ts'; import { SingleStoreDatabase } from '~/singlestore-core/db.ts'; import { SingleStoreDialect } from '~/singlestore-core/dialect.ts'; import { type DrizzleConfig, isConfig } from '~/utils.ts'; import { npmVersion } from '~/version.ts'; import type { SingleStoreDriverClient, SingleStoreDriverPreparedQueryHKT, SingleStoreDriverQueryResultHKT, } from './session.ts'; import { SingleStoreDriverSession } from './session.ts'; export interface SingleStoreDriverOptions { logger?: Logger; cache?: Cache; } export class SingleStoreDriverDriver { static readonly [entityKind]: string = 'SingleStoreDriverDriver'; constructor( private client: SingleStoreDriverClient, private dialect: SingleStoreDialect, private options: SingleStoreDriverOptions = {}, ) { } createSession( schema: RelationalSchemaConfig | undefined, ): SingleStoreDriverSession, TablesRelationalConfig> { return new SingleStoreDriverSession(this.client, this.dialect, schema, { logger: this.options.logger, cache: this.options.cache, }); } } export { SingleStoreDatabase } from '~/singlestore-core/db.ts'; export class SingleStoreDriverDatabase< TSchema extends Record = Record, > extends SingleStoreDatabase { static override readonly [entityKind]: string = 'SingleStoreDriverDatabase'; } export type SingleStoreDriverDrizzleConfig = Record> = & Omit, 'schema'> & ({ schema: TSchema } | { schema?: undefined }); function construct< TSchema extends Record = Record, TClient extends Pool | Connection | CallbackPool | CallbackConnection = CallbackPool, >( client: TClient, config: SingleStoreDriverDrizzleConfig = {}, ): SingleStoreDriverDatabase & { $client: AnySingleStoreDriverConnection extends TClient ? CallbackPool : TClient; } { const dialect = new SingleStoreDialect({ casing: config.casing }); let logger; if (config.logger === true) { logger = new DefaultLogger(); } else if (config.logger !== false) { logger = config.logger; } const clientForInstance = isCallbackClient(client) ? client.promise() : client; let schema: RelationalSchemaConfig | undefined; if (config.schema) { const tablesConfig = extractTablesRelationalConfig( config.schema, createTableRelationsHelpers, ); schema = { fullSchema: config.schema, schema: tablesConfig.tables, tableNamesMap: tablesConfig.tableNamesMap, }; } const driver = new SingleStoreDriverDriver(clientForInstance as SingleStoreDriverClient, dialect, { logger, cache: config.cache, }); const session = driver.createSession(schema); const db = new SingleStoreDriverDatabase(dialect, session, schema as any) as SingleStoreDriverDatabase; ( db).$client = client; ( db).$cache = config.cache; if (( db).$cache) { ( db).$cache['invalidate'] = config.cache?.onMutate; } return db as any; } interface CallbackClient { promise(): SingleStoreDriverClient; } function isCallbackClient(client: any): client is CallbackClient { return typeof client.promise === 'function'; } export type AnySingleStoreDriverConnection = Pool | Connection | CallbackPool | CallbackConnection; const CONNECTION_ATTRS: PoolOptions['connectAttributes'] = { _connector_name: 'SingleStore Drizzle ORM Driver', _connector_version: npmVersion, }; export function drizzle< TSchema extends Record = Record, TClient extends AnySingleStoreDriverConnection = CallbackPool, >( ...params: [ TClient | string, ] | [ TClient | string, SingleStoreDriverDrizzleConfig, ] | [ ( & SingleStoreDriverDrizzleConfig & ({ connection: string | PoolOptions; } | { client: TClient; }) ), ] ): SingleStoreDriverDatabase & { $client: AnySingleStoreDriverConnection extends TClient ? CallbackPool : TClient; } { if (typeof params[0] === 'string') { const connectionString = params[0]!; const instance = createPool({ uri: connectionString, connectAttributes: CONNECTION_ATTRS, }); return construct(instance, params[1]) as any; } if (isConfig(params[0])) { const { connection, client, ...drizzleConfig } = params[0] as & { connection?: PoolOptions | string; client?: TClient } & SingleStoreDriverDrizzleConfig; if (client) return construct(client, drizzleConfig) as any; let opts: PoolOptions = {}; opts = typeof connection === 'string' ? { uri: connection, supportBigNumbers: true, connectAttributes: CONNECTION_ATTRS, } : { ...connection, connectAttributes: { ...connection!.connectAttributes, ...CONNECTION_ATTRS, }, }; const instance = createPool(opts); const db = construct(instance, drizzleConfig); return db as any; } return construct(params[0] as TClient, params[1] as SingleStoreDriverDrizzleConfig | undefined) as any; } export namespace drizzle { export function mock = Record>( config?: SingleStoreDriverDrizzleConfig, ): SingleStoreDriverDatabase & { $client: '$client is not available on drizzle.mock()'; } { return construct({} as any, config) as any; } } ================================================ FILE: drizzle-orm/src/singlestore/index.ts ================================================ export * from './driver.ts'; export * from './session.ts'; ================================================ FILE: drizzle-orm/src/singlestore/migrator.ts ================================================ import type { MigrationConfig } from '~/migrator.ts'; import { readMigrationFiles } from '~/migrator.ts'; import type { SingleStoreDriverDatabase } from './driver.ts'; export async function migrate>( db: SingleStoreDriverDatabase, config: MigrationConfig, ) { const migrations = readMigrationFiles(config); await db.dialect.migrate(migrations, db.session, config); } ================================================ FILE: drizzle-orm/src/singlestore/session.ts ================================================ import type { Connection as CallbackConnection } from 'mysql2'; import type { Connection, FieldPacket, OkPacket, Pool, PoolConnection, QueryOptions, ResultSetHeader, RowDataPacket, } from 'mysql2/promise'; import { once } from 'node:events'; import { type Cache, NoopCache } from '~/cache/core/index.ts'; import type { WithCacheConfig } from '~/cache/core/types.ts'; import { Column } from '~/column.ts'; import { entityKind, is } from '~/entity.ts'; import type { Logger } from '~/logger.ts'; import { NoopLogger } from '~/logger.ts'; import type { RelationalSchemaConfig, TablesRelationalConfig } from '~/relations.ts'; import type { SingleStoreDialect } from '~/singlestore-core/dialect.ts'; import type { SelectedFieldsOrdered } from '~/singlestore-core/query-builders/select.types.ts'; import { type PreparedQueryKind, SingleStorePreparedQuery, type SingleStorePreparedQueryConfig, type SingleStorePreparedQueryHKT, type SingleStoreQueryResultHKT, SingleStoreSession, SingleStoreTransaction, type SingleStoreTransactionConfig, } from '~/singlestore-core/session.ts'; import type { Query, SQL } from '~/sql/sql.ts'; import { fillPlaceholders, sql } from '~/sql/sql.ts'; import { type Assume, mapResultRow } from '~/utils.ts'; export type SingleStoreDriverClient = Pool | Connection; export type SingleStoreRawQueryResult = [ResultSetHeader, FieldPacket[]]; export type SingleStoreQueryResultType = RowDataPacket[][] | RowDataPacket[] | OkPacket | OkPacket[] | ResultSetHeader; export type SingleStoreQueryResult< T = any, > = [T extends ResultSetHeader ? T : T[], FieldPacket[]]; export class SingleStoreDriverPreparedQuery extends SingleStorePreparedQuery { static override readonly [entityKind]: string = 'SingleStoreDriverPreparedQuery'; private rawQuery: QueryOptions; private query: QueryOptions; constructor( private client: SingleStoreDriverClient, queryString: string, private params: unknown[], private logger: Logger, cache: Cache, queryMetadata: { type: 'select' | 'update' | 'delete' | 'insert'; tables: string[]; } | undefined, cacheConfig: WithCacheConfig | undefined, private fields: SelectedFieldsOrdered | undefined, private customResultMapper?: (rows: unknown[][]) => T['execute'], // Keys that were used in $default and the value that was generated for them private generatedIds?: Record[], // Keys that should be returned, it has the column with all properties + key from object private returningIds?: SelectedFieldsOrdered, ) { super(cache, queryMetadata, cacheConfig); this.rawQuery = { sql: queryString, // rowsAsArray: true, typeCast: function(field: any, next: any) { if (field.type === 'TIMESTAMP' || field.type === 'DATETIME' || field.type === 'DATE') { return field.string(); } return next(); }, }; this.query = { sql: queryString, rowsAsArray: true, typeCast: function(field: any, next: any) { if (field.type === 'TIMESTAMP' || field.type === 'DATETIME' || field.type === 'DATE') { return field.string(); } return next(); }, }; } async execute(placeholderValues: Record = {}): Promise { const params = fillPlaceholders(this.params, placeholderValues); this.logger.logQuery(this.rawQuery.sql, params); const { fields, client, rawQuery, query, joinsNotNullableMap, customResultMapper, returningIds, generatedIds } = this; if (!fields && !customResultMapper) { const res = await this.queryWithCache(rawQuery.sql, params, async () => { return await client.query(rawQuery, params); }); const insertId = res[0].insertId; const affectedRows = res[0].affectedRows; // for each row, I need to check keys from if (returningIds) { const returningResponse = []; let j = 0; for (let i = insertId; i < insertId + affectedRows; i++) { for (const column of returningIds) { const key = returningIds[0]!.path[0]!; if (is(column.field, Column)) { // @ts-ignore if (column.field.primary && column.field.autoIncrement) { returningResponse.push({ [key]: i }); } if (column.field.defaultFn && generatedIds) { // generatedIds[rowIdx][key] returningResponse.push({ [key]: generatedIds[j]![key] }); } } } j++; } return returningResponse; } return res; } const result = await this.queryWithCache(query.sql, params, async () => { return await client.query(query, params); }); const rows = result[0]; if (customResultMapper) { return customResultMapper(rows); } return rows.map((row) => mapResultRow(fields!, row, joinsNotNullableMap)); } async *iterator( placeholderValues: Record = {}, ): AsyncGenerator { const params = fillPlaceholders(this.params, placeholderValues); const conn = ((isPool(this.client) ? await this.client.getConnection() : this.client) as {} as { connection: CallbackConnection; }).connection; const { fields, query, rawQuery, joinsNotNullableMap, client, customResultMapper } = this; const hasRowsMapper = Boolean(fields || customResultMapper); const driverQuery = hasRowsMapper ? conn.query(query, params) : conn.query(rawQuery, params); const stream = driverQuery.stream(); function dataListener() { stream.pause(); } stream.on('data', dataListener); try { const onEnd = once(stream, 'end'); const onError = once(stream, 'error'); while (true) { stream.resume(); const row = await Promise.race([onEnd, onError, new Promise((resolve) => stream.once('data', resolve))]); if (row === undefined || (Array.isArray(row) && row.length === 0)) { break; } else if (row instanceof Error) { // eslint-disable-line no-instanceof/no-instanceof throw row; } else { if (hasRowsMapper) { if (customResultMapper) { const mappedRow = customResultMapper([row as unknown[]]); yield (Array.isArray(mappedRow) ? mappedRow[0] : mappedRow); } else { yield mapResultRow(fields!, row as unknown[], joinsNotNullableMap); } } else { yield row as T['execute']; } } } } finally { stream.off('data', dataListener); if (isPool(client)) { conn.end(); } } } } export interface SingleStoreDriverSessionOptions { logger?: Logger; cache?: Cache; } export class SingleStoreDriverSession< TFullSchema extends Record, TSchema extends TablesRelationalConfig, > extends SingleStoreSession { static override readonly [entityKind]: string = 'SingleStoreDriverSession'; private logger: Logger; private cache: Cache; constructor( private client: SingleStoreDriverClient, dialect: SingleStoreDialect, private schema: RelationalSchemaConfig | undefined, private options: SingleStoreDriverSessionOptions, ) { super(dialect); this.logger = options.logger ?? new NoopLogger(); this.cache = options.cache ?? new NoopCache(); } prepareQuery( query: Query, fields: SelectedFieldsOrdered | undefined, customResultMapper?: (rows: unknown[][]) => T['execute'], generatedIds?: Record[], returningIds?: SelectedFieldsOrdered, queryMetadata?: { type: 'select' | 'update' | 'delete' | 'insert'; tables: string[]; }, cacheConfig?: WithCacheConfig, ): PreparedQueryKind { // Add returningId fields // Each driver gets them from response from database return new SingleStoreDriverPreparedQuery( this.client, query.sql, query.params, this.logger, this.cache, queryMetadata, cacheConfig, fields, customResultMapper, generatedIds, returningIds, ) as PreparedQueryKind; } /** * @internal * What is its purpose? */ async query(query: string, params: unknown[]): Promise { this.logger.logQuery(query, params); const result = await this.client.query({ sql: query, values: params, rowsAsArray: true, typeCast: function(field: any, next: any) { if (field.type === 'TIMESTAMP' || field.type === 'DATETIME' || field.type === 'DATE') { return field.string(); } return next(); }, }); return result; } override all(query: SQL): Promise { const querySql = this.dialect.sqlToQuery(query); this.logger.logQuery(querySql.sql, querySql.params); return this.client.execute(querySql.sql, querySql.params).then((result) => result[0]) as Promise; } override async transaction( transaction: (tx: SingleStoreDriverTransaction) => Promise, config?: SingleStoreTransactionConfig, ): Promise { const session = isPool(this.client) ? new SingleStoreDriverSession( await this.client.getConnection(), this.dialect, this.schema, this.options, ) : this; const tx = new SingleStoreDriverTransaction( this.dialect, session as SingleStoreSession, this.schema, 0, ); if (config) { const setTransactionConfigSql = this.getSetTransactionSQL(config); if (setTransactionConfigSql) { await tx.execute(setTransactionConfigSql); } const startTransactionSql = this.getStartTransactionSQL(config); await (startTransactionSql ? tx.execute(startTransactionSql) : tx.execute(sql`begin`)); } else { await tx.execute(sql`begin`); } try { const result = await transaction(tx); await tx.execute(sql`commit`); return result; } catch (err) { await tx.execute(sql`rollback`); throw err; } finally { if (isPool(this.client)) { (session.client as PoolConnection).release(); } } } } export class SingleStoreDriverTransaction< TFullSchema extends Record, TSchema extends TablesRelationalConfig, > extends SingleStoreTransaction< SingleStoreDriverQueryResultHKT, SingleStoreDriverPreparedQueryHKT, TFullSchema, TSchema > { static override readonly [entityKind]: string = 'SingleStoreDriverTransaction'; override async transaction( transaction: (tx: SingleStoreDriverTransaction) => Promise, ): Promise { const savepointName = `sp${this.nestedIndex + 1}`; const tx = new SingleStoreDriverTransaction( this.dialect, this.session, this.schema, this.nestedIndex + 1, ); await tx.execute(sql.raw(`savepoint ${savepointName}`)); try { const result = await transaction(tx); await tx.execute(sql.raw(`release savepoint ${savepointName}`)); return result; } catch (err) { await tx.execute(sql.raw(`rollback to savepoint ${savepointName}`)); throw err; } } } function isPool(client: SingleStoreDriverClient): client is Pool { return 'getConnection' in client; } export interface SingleStoreDriverQueryResultHKT extends SingleStoreQueryResultHKT { type: SingleStoreRawQueryResult; } export interface SingleStoreDriverPreparedQueryHKT extends SingleStorePreparedQueryHKT { type: SingleStoreDriverPreparedQuery>; } ================================================ FILE: drizzle-orm/src/singlestore-core/alias.ts ================================================ import { TableAliasProxyHandler } from '~/alias.ts'; import type { BuildAliasTable } from './query-builders/select.types.ts'; import type { SingleStoreTable } from './table.ts'; export function alias( // | SingleStoreViewBase table: TTable, alias: TAlias, ): BuildAliasTable { return new Proxy(table, new TableAliasProxyHandler(alias, false)) as any; } ================================================ FILE: drizzle-orm/src/singlestore-core/columns/all.ts ================================================ import { bigint } from './bigint.ts'; import { binary } from './binary.ts'; import { boolean } from './boolean.ts'; import { char } from './char.ts'; import { customType } from './custom.ts'; import { date } from './date.ts'; import { datetime } from './datetime.ts'; import { decimal } from './decimal.ts'; import { double } from './double.ts'; import { singlestoreEnum } from './enum.ts'; import { float } from './float.ts'; import { int } from './int.ts'; import { json } from './json.ts'; import { mediumint } from './mediumint.ts'; import { real } from './real.ts'; import { serial } from './serial.ts'; import { smallint } from './smallint.ts'; import { longtext, mediumtext, text, tinytext } from './text.ts'; import { time } from './time.ts'; import { timestamp } from './timestamp.ts'; import { tinyint } from './tinyint.ts'; import { varbinary } from './varbinary.ts'; import { varchar } from './varchar.ts'; import { vector } from './vector.ts'; import { year } from './year.ts'; export function getSingleStoreColumnBuilders() { return { bigint, binary, boolean, char, customType, date, datetime, decimal, double, singlestoreEnum, float, int, json, mediumint, real, serial, smallint, longtext, mediumtext, text, tinytext, time, timestamp, tinyint, varbinary, varchar, vector, year, }; } export type SingleStoreColumnBuilders = ReturnType; ================================================ FILE: drizzle-orm/src/singlestore-core/columns/bigint.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnySingleStoreTable } from '~/singlestore-core/table.ts'; import { getColumnNameAndConfig } from '~/utils.ts'; import { SingleStoreColumnBuilderWithAutoIncrement, SingleStoreColumnWithAutoIncrement } from './common.ts'; export type SingleStoreBigInt53BuilderInitial = SingleStoreBigInt53Builder<{ name: TName; dataType: 'number'; columnType: 'SingleStoreBigInt53'; data: number; driverParam: number | string; enumValues: undefined; }>; export class SingleStoreBigInt53Builder> extends SingleStoreColumnBuilderWithAutoIncrement { static override readonly [entityKind]: string = 'SingleStoreBigInt53Builder'; constructor(name: T['name'], unsigned: boolean = false) { super(name, 'number', 'SingleStoreBigInt53'); this.config.unsigned = unsigned; } /** @internal */ override build( table: AnySingleStoreTable<{ name: TTableName }>, ): SingleStoreBigInt53> { return new SingleStoreBigInt53>( table, this.config as ColumnBuilderRuntimeConfig, ); } } export class SingleStoreBigInt53> extends SingleStoreColumnWithAutoIncrement { static override readonly [entityKind]: string = 'SingleStoreBigInt53'; getSQLType(): string { return `bigint${this.config.unsigned ? ' unsigned' : ''}`; } override mapFromDriverValue(value: number | string): number { if (typeof value === 'number') { return value; } return Number(value); } } export type SingleStoreBigInt64BuilderInitial = SingleStoreBigInt64Builder<{ name: TName; dataType: 'bigint'; columnType: 'SingleStoreBigInt64'; data: bigint; driverParam: string; enumValues: undefined; generated: undefined; }>; export class SingleStoreBigInt64Builder> extends SingleStoreColumnBuilderWithAutoIncrement { static override readonly [entityKind]: string = 'SingleStoreBigInt64Builder'; constructor(name: T['name'], unsigned: boolean = false) { super(name, 'bigint', 'SingleStoreBigInt64'); this.config.unsigned = unsigned; } /** @internal */ override build( table: AnySingleStoreTable<{ name: TTableName }>, ): SingleStoreBigInt64> { return new SingleStoreBigInt64>( table, this.config as ColumnBuilderRuntimeConfig, ); } } export class SingleStoreBigInt64> extends SingleStoreColumnWithAutoIncrement { static override readonly [entityKind]: string = 'SingleStoreBigInt64'; getSQLType(): string { return `bigint${this.config.unsigned ? ' unsigned' : ''}`; } // eslint-disable-next-line unicorn/prefer-native-coercion-functions override mapFromDriverValue(value: string): bigint { return BigInt(value); } } export interface SingleStoreBigIntConfig { mode: T; unsigned?: boolean; } export function bigint( config: SingleStoreBigIntConfig, ): TMode extends 'number' ? SingleStoreBigInt53BuilderInitial<''> : SingleStoreBigInt64BuilderInitial<''>; export function bigint( name: TName, config: SingleStoreBigIntConfig, ): TMode extends 'number' ? SingleStoreBigInt53BuilderInitial : SingleStoreBigInt64BuilderInitial; export function bigint(a?: string | SingleStoreBigIntConfig, b?: SingleStoreBigIntConfig) { const { name, config } = getColumnNameAndConfig(a, b); if (config.mode === 'number') { return new SingleStoreBigInt53Builder(name, config.unsigned); } return new SingleStoreBigInt64Builder(name, config.unsigned); } ================================================ FILE: drizzle-orm/src/singlestore-core/columns/binary.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnySingleStoreTable } from '~/singlestore-core/table.ts'; import { getColumnNameAndConfig } from '~/utils.ts'; import { SingleStoreColumn, SingleStoreColumnBuilder } from './common.ts'; export type SingleStoreBinaryBuilderInitial = SingleStoreBinaryBuilder<{ name: TName; dataType: 'string'; columnType: 'SingleStoreBinary'; data: string; driverParam: string; enumValues: undefined; generated: undefined; }>; export class SingleStoreBinaryBuilder> extends SingleStoreColumnBuilder< T, SingleStoreBinaryConfig > { static override readonly [entityKind]: string = 'SingleStoreBinaryBuilder'; constructor(name: T['name'], length: number | undefined) { super(name, 'string', 'SingleStoreBinary'); this.config.length = length; } /** @internal */ override build( table: AnySingleStoreTable<{ name: TTableName }>, ): SingleStoreBinary> { return new SingleStoreBinary>( table, this.config as ColumnBuilderRuntimeConfig, ); } } export class SingleStoreBinary> extends SingleStoreColumn< T, SingleStoreBinaryConfig > { static override readonly [entityKind]: string = 'SingleStoreBinary'; length: number | undefined = this.config.length; override mapFromDriverValue(value: string | Buffer | Uint8Array): string { if (typeof value === 'string') return value; if (Buffer.isBuffer(value)) return value.toString(); const str: string[] = []; for (const v of value) { str.push(v === 49 ? '1' : '0'); } return str.join(''); } getSQLType(): string { return this.length === undefined ? `binary` : `binary(${this.length})`; } } export interface SingleStoreBinaryConfig { length?: number; } export function binary(): SingleStoreBinaryBuilderInitial<''>; export function binary( config?: SingleStoreBinaryConfig, ): SingleStoreBinaryBuilderInitial<''>; export function binary( name: TName, config?: SingleStoreBinaryConfig, ): SingleStoreBinaryBuilderInitial; export function binary(a?: string | SingleStoreBinaryConfig, b: SingleStoreBinaryConfig = {}) { const { name, config } = getColumnNameAndConfig(a, b); return new SingleStoreBinaryBuilder(name, config.length); } ================================================ FILE: drizzle-orm/src/singlestore-core/columns/boolean.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnySingleStoreTable } from '~/singlestore-core/table.ts'; import { SingleStoreColumn, SingleStoreColumnBuilder } from './common.ts'; export type SingleStoreBooleanBuilderInitial = SingleStoreBooleanBuilder<{ name: TName; dataType: 'boolean'; columnType: 'SingleStoreBoolean'; data: boolean; driverParam: number | boolean; enumValues: undefined; generated: undefined; }>; export class SingleStoreBooleanBuilder> extends SingleStoreColumnBuilder { static override readonly [entityKind]: string = 'SingleStoreBooleanBuilder'; constructor(name: T['name']) { super(name, 'boolean', 'SingleStoreBoolean'); } /** @internal */ override build( table: AnySingleStoreTable<{ name: TTableName }>, ): SingleStoreBoolean> { return new SingleStoreBoolean>( table, this.config as ColumnBuilderRuntimeConfig, ); } } export class SingleStoreBoolean> extends SingleStoreColumn { static override readonly [entityKind]: string = 'SingleStoreBoolean'; getSQLType(): string { return 'boolean'; } override mapFromDriverValue(value: number | boolean): boolean { if (typeof value === 'boolean') { return value; } return value === 1; } } export function boolean(): SingleStoreBooleanBuilderInitial<''>; export function boolean(name: TName): SingleStoreBooleanBuilderInitial; export function boolean(name?: string) { return new SingleStoreBooleanBuilder(name ?? ''); } ================================================ FILE: drizzle-orm/src/singlestore-core/columns/char.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnySingleStoreTable } from '~/singlestore-core/table.ts'; import { getColumnNameAndConfig, type Writable } from '~/utils.ts'; import { SingleStoreColumn, SingleStoreColumnBuilder } from './common.ts'; export type SingleStoreCharBuilderInitial< TName extends string, TEnum extends [string, ...string[]], TLength extends number | undefined, > = SingleStoreCharBuilder<{ name: TName; dataType: 'string'; columnType: 'SingleStoreChar'; data: TEnum[number]; driverParam: number | string; enumValues: TEnum; generated: undefined; length: TLength; }>; export class SingleStoreCharBuilder< T extends ColumnBuilderBaseConfig<'string', 'SingleStoreChar'> & { length?: number | undefined }, > extends SingleStoreColumnBuilder< T, SingleStoreCharConfig, { length: T['length'] } > { static override readonly [entityKind]: string = 'SingleStoreCharBuilder'; constructor(name: T['name'], config: SingleStoreCharConfig) { super(name, 'string', 'SingleStoreChar'); this.config.length = config.length; this.config.enum = config.enum; } /** @internal */ override build( table: AnySingleStoreTable<{ name: TTableName }>, ): SingleStoreChar & { length: T['length']; enumValues: T['enumValues'] }> { return new SingleStoreChar & { length: T['length']; enumValues: T['enumValues'] }>( table, this.config as ColumnBuilderRuntimeConfig, ); } } export class SingleStoreChar & { length?: number | undefined }> extends SingleStoreColumn, { length: T['length'] }> { static override readonly [entityKind]: string = 'SingleStoreChar'; readonly length: T['length'] = this.config.length; override readonly enumValues = this.config.enum; getSQLType(): string { return this.length === undefined ? `char` : `char(${this.length})`; } } export interface SingleStoreCharConfig< TEnum extends readonly string[] | string[] | undefined = readonly string[] | string[] | undefined, TLength extends number | undefined = number | undefined, > { enum?: TEnum; length?: TLength; } export function char(): SingleStoreCharBuilderInitial<'', [string, ...string[]], undefined>; export function char, L extends number | undefined>( config?: SingleStoreCharConfig, L>, ): SingleStoreCharBuilderInitial<'', Writable, L>; export function char< TName extends string, U extends string, T extends Readonly<[U, ...U[]]>, L extends number | undefined, >( name: TName, config?: SingleStoreCharConfig, L>, ): SingleStoreCharBuilderInitial, L>; export function char(a?: string | SingleStoreCharConfig, b: SingleStoreCharConfig = {}): any { const { name, config } = getColumnNameAndConfig(a, b); return new SingleStoreCharBuilder(name, config as any); } ================================================ FILE: drizzle-orm/src/singlestore-core/columns/common.ts ================================================ import type { ColumnBuilderBase, ColumnBuilderBaseConfig, ColumnBuilderExtraConfig, ColumnBuilderRuntimeConfig, ColumnDataType, HasDefault, IsAutoincrement, MakeColumnConfig, } from '~/column-builder.ts'; import { ColumnBuilder } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { Column } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnySingleStoreTable, SingleStoreTable } from '~/singlestore-core/table.ts'; import type { SQL } from '~/sql/sql.ts'; import type { Update } from '~/utils.ts'; import { uniqueKeyName } from '../unique-constraint.ts'; export interface SingleStoreColumnBuilderBase< T extends ColumnBuilderBaseConfig = ColumnBuilderBaseConfig, TTypeConfig extends object = object, > extends ColumnBuilderBase {} export interface SingleStoreGeneratedColumnConfig { mode?: 'virtual' | 'stored'; } export abstract class SingleStoreColumnBuilder< T extends ColumnBuilderBaseConfig = ColumnBuilderBaseConfig & { data: any; }, TRuntimeConfig extends object = object, TTypeConfig extends object = object, TExtraConfig extends ColumnBuilderExtraConfig = ColumnBuilderExtraConfig, > extends ColumnBuilder implements SingleStoreColumnBuilderBase { static override readonly [entityKind]: string = 'SingleStoreColumnBuilder'; unique(name?: string): this { this.config.isUnique = true; this.config.uniqueName = name; return this; } // TODO: Implement generated columns for SingleStore (https://docs.singlestore.com/cloud/create-a-database/using-persistent-computed-columns/) /** @internal */ generatedAlwaysAs(as: SQL | T['data'] | (() => SQL), config?: SingleStoreGeneratedColumnConfig) { this.config.generated = { as, type: 'always', mode: config?.mode ?? 'virtual', }; return this as any; } /** @internal */ abstract build( table: AnySingleStoreTable<{ name: TTableName }>, ): SingleStoreColumn>; } // To understand how to use `SingleStoreColumn` and `AnySingleStoreColumn`, see `Column` and `AnyColumn` documentation. export abstract class SingleStoreColumn< T extends ColumnBaseConfig = ColumnBaseConfig, TRuntimeConfig extends object = {}, TTypeConfig extends object = {}, > extends Column { static override readonly [entityKind]: string = 'SingleStoreColumn'; constructor( override readonly table: SingleStoreTable, config: ColumnBuilderRuntimeConfig, ) { if (!config.uniqueName) { config.uniqueName = uniqueKeyName(table, [config.name]); } super(table, config); } } export type AnySingleStoreColumn> = {}> = SingleStoreColumn< Required, TPartial>> >; export interface SingleStoreColumnWithAutoIncrementConfig { autoIncrement: boolean; } export abstract class SingleStoreColumnBuilderWithAutoIncrement< T extends ColumnBuilderBaseConfig = ColumnBuilderBaseConfig, TRuntimeConfig extends object = object, TExtraConfig extends ColumnBuilderExtraConfig = ColumnBuilderExtraConfig, > extends SingleStoreColumnBuilder { static override readonly [entityKind]: string = 'SingleStoreColumnBuilderWithAutoIncrement'; constructor(name: NonNullable, dataType: T['dataType'], columnType: T['columnType']) { super(name, dataType, columnType); this.config.autoIncrement = false; } autoincrement(): IsAutoincrement> { this.config.autoIncrement = true; this.config.hasDefault = true; return this as IsAutoincrement>; } } export abstract class SingleStoreColumnWithAutoIncrement< T extends ColumnBaseConfig = ColumnBaseConfig, TRuntimeConfig extends object = object, > extends SingleStoreColumn { static override readonly [entityKind]: string = 'SingleStoreColumnWithAutoIncrement'; readonly autoIncrement: boolean = this.config.autoIncrement; } ================================================ FILE: drizzle-orm/src/singlestore-core/columns/custom.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnySingleStoreTable } from '~/singlestore-core/table.ts'; import type { SQL } from '~/sql/sql.ts'; import { type Equal, getColumnNameAndConfig } from '~/utils.ts'; import { SingleStoreColumn, SingleStoreColumnBuilder } from './common.ts'; export type ConvertCustomConfig> = & { name: TName; dataType: 'custom'; columnType: 'SingleStoreCustomColumn'; data: T['data']; driverParam: T['driverData']; enumValues: undefined; generated: undefined; } & (T['notNull'] extends true ? { notNull: true } : {}) & (T['default'] extends true ? { hasDefault: true } : {}); export interface SingleStoreCustomColumnInnerConfig { customTypeValues: CustomTypeValues; } export class SingleStoreCustomColumnBuilder> extends SingleStoreColumnBuilder< T, { fieldConfig: CustomTypeValues['config']; customTypeParams: CustomTypeParams; }, { singlestoreColumnBuilderBrand: 'SingleStoreCustomColumnBuilderBrand'; } > { static override readonly [entityKind]: string = 'SingleStoreCustomColumnBuilder'; constructor( name: T['name'], fieldConfig: CustomTypeValues['config'], customTypeParams: CustomTypeParams, ) { super(name, 'custom', 'SingleStoreCustomColumn'); this.config.fieldConfig = fieldConfig; this.config.customTypeParams = customTypeParams; } /** @internal */ build( table: AnySingleStoreTable<{ name: TTableName }>, ): SingleStoreCustomColumn> { return new SingleStoreCustomColumn>( table, this.config as ColumnBuilderRuntimeConfig, ); } } export class SingleStoreCustomColumn> extends SingleStoreColumn { static override readonly [entityKind]: string = 'SingleStoreCustomColumn'; private sqlName: string; private mapTo?: (value: T['data']) => T['driverParam']; private mapFrom?: (value: T['driverParam']) => T['data']; constructor( table: AnySingleStoreTable<{ name: T['tableName'] }>, config: SingleStoreCustomColumnBuilder['config'], ) { super(table, config); this.sqlName = config.customTypeParams.dataType(config.fieldConfig); this.mapTo = config.customTypeParams.toDriver; this.mapFrom = config.customTypeParams.fromDriver; } getSQLType(): string { return this.sqlName; } override mapFromDriverValue(value: T['driverParam']): T['data'] { return typeof this.mapFrom === 'function' ? this.mapFrom(value) : value as T['data']; } override mapToDriverValue(value: T['data']): T['driverParam'] { return typeof this.mapTo === 'function' ? this.mapTo(value) : value as T['data']; } } export type CustomTypeValues = { /** * Required type for custom column, that will infer proper type model * * Examples: * * If you want your column to be `string` type after selecting/or on inserting - use `data: string`. Like `text`, `varchar` * * If you want your column to be `number` type after selecting/or on inserting - use `data: number`. Like `integer` */ data: unknown; /** * Type helper, that represents what type database driver is accepting for specific database data type */ driverData?: unknown; /** * What config type should be used for {@link CustomTypeParams} `dataType` generation */ config?: Record; /** * Whether the config argument should be required or not * @default false */ configRequired?: boolean; /** * If your custom data type should be notNull by default you can use `notNull: true` * * @example * const customSerial = customType<{ data: number, notNull: true, default: true }>({ * dataType() { * return 'serial'; * }, * }); */ notNull?: boolean; /** * If your custom data type has default you can use `default: true` * * @example * const customSerial = customType<{ data: number, notNull: true, default: true }>({ * dataType() { * return 'serial'; * }, * }); */ default?: boolean; }; export interface CustomTypeParams { /** * Database data type string representation, that is used for migrations * @example * ``` * `jsonb`, `text` * ``` * * If database data type needs additional params you can use them from `config` param * @example * ``` * `varchar(256)`, `numeric(2,3)` * ``` * * To make `config` be of specific type please use config generic in {@link CustomTypeValues} * * @example * Usage example * ``` * dataType() { * return 'boolean'; * }, * ``` * Or * ``` * dataType(config) { * return typeof config.length !== 'undefined' ? `varchar(${config.length})` : `varchar`; * } * ``` */ dataType: (config: T['config'] | (Equal extends true ? never : undefined)) => string; /** * Optional mapping function, between user input and driver * @example * For example, when using jsonb we need to map JS/TS object to string before writing to database * ``` * toDriver(value: TData): string { * return JSON.stringify(value); * } * ``` */ toDriver?: (value: T['data']) => T['driverData'] | SQL; /** * Optional mapping function, that is responsible for data mapping from database to JS/TS code * @example * For example, when using timestamp we need to map string Date representation to JS Date * ``` * fromDriver(value: string): Date { * return new Date(value); * }, * ``` */ fromDriver?: (value: T['driverData']) => T['data']; } /** * Custom singlestore database data type generator */ export function customType( customTypeParams: CustomTypeParams, ): Equal extends true ? { & T['config']>( fieldConfig: TConfig, ): SingleStoreCustomColumnBuilder>; ( dbName: TName, fieldConfig: T['config'], ): SingleStoreCustomColumnBuilder>; } : { (): SingleStoreCustomColumnBuilder>; & T['config']>( fieldConfig?: TConfig, ): SingleStoreCustomColumnBuilder>; ( dbName: TName, fieldConfig?: T['config'], ): SingleStoreCustomColumnBuilder>; } { return ( a?: TName | T['config'], b?: T['config'], ): SingleStoreCustomColumnBuilder> => { const { name, config } = getColumnNameAndConfig(a, b); return new SingleStoreCustomColumnBuilder(name as ConvertCustomConfig['name'], config, customTypeParams); }; } ================================================ FILE: drizzle-orm/src/singlestore-core/columns/date.common.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderExtraConfig, ColumnDataType, HasDefault, } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import { sql } from '~/sql/sql.ts'; import { SingleStoreColumn, SingleStoreColumnBuilder } from './common.ts'; export interface SingleStoreDateColumnBaseConfig { hasOnUpdateNow: boolean; } export abstract class SingleStoreDateColumnBaseBuilder< T extends ColumnBuilderBaseConfig, TRuntimeConfig extends object = object, TExtraConfig extends ColumnBuilderExtraConfig = ColumnBuilderExtraConfig, > extends SingleStoreColumnBuilder { static override readonly [entityKind]: string = 'SingleStoreDateColumnBuilder'; defaultNow() { return this.default(sql`now()`); } onUpdateNow(): HasDefault { this.config.hasOnUpdateNow = true; this.config.hasDefault = true; return this as HasDefault; } } export abstract class SingleStoreDateBaseColumn< T extends ColumnBaseConfig, TRuntimeConfig extends object = object, > extends SingleStoreColumn { static override readonly [entityKind]: string = 'SingleStoreDateColumn'; readonly hasOnUpdateNow: boolean = this.config.hasOnUpdateNow; } ================================================ FILE: drizzle-orm/src/singlestore-core/columns/date.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnySingleStoreTable } from '~/singlestore-core/table.ts'; import { type Equal, getColumnNameAndConfig } from '~/utils.ts'; import { SingleStoreColumn, SingleStoreColumnBuilder } from './common.ts'; export type SingleStoreDateBuilderInitial = SingleStoreDateBuilder<{ name: TName; dataType: 'date'; columnType: 'SingleStoreDate'; data: Date; driverParam: string | number; enumValues: undefined; generated: undefined; }>; export class SingleStoreDateBuilder> extends SingleStoreColumnBuilder { static override readonly [entityKind]: string = 'SingleStoreDateBuilder'; constructor(name: T['name']) { super(name, 'date', 'SingleStoreDate'); } /** @internal */ override build( table: AnySingleStoreTable<{ name: TTableName }>, ): SingleStoreDate> { return new SingleStoreDate>( table, this.config as ColumnBuilderRuntimeConfig, ); } } export class SingleStoreDate> extends SingleStoreColumn { static override readonly [entityKind]: string = 'SingleStoreDate'; constructor( table: AnySingleStoreTable<{ name: T['tableName'] }>, config: SingleStoreDateBuilder['config'], ) { super(table, config); } getSQLType(): string { return `date`; } override mapFromDriverValue(value: string): Date { return new Date(value); } } export type SingleStoreDateStringBuilderInitial = SingleStoreDateStringBuilder<{ name: TName; dataType: 'string'; columnType: 'SingleStoreDateString'; data: string; driverParam: string | number; enumValues: undefined; generated: undefined; }>; export class SingleStoreDateStringBuilder> extends SingleStoreColumnBuilder { static override readonly [entityKind]: string = 'SingleStoreDateStringBuilder'; constructor(name: T['name']) { super(name, 'string', 'SingleStoreDateString'); } /** @internal */ override build( table: AnySingleStoreTable<{ name: TTableName }>, ): SingleStoreDateString> { return new SingleStoreDateString>( table, this.config as ColumnBuilderRuntimeConfig, ); } } export class SingleStoreDateString> extends SingleStoreColumn { static override readonly [entityKind]: string = 'SingleStoreDateString'; constructor( table: AnySingleStoreTable<{ name: T['tableName'] }>, config: SingleStoreDateStringBuilder['config'], ) { super(table, config); } getSQLType(): string { return `date`; } } export interface SingleStoreDateConfig { mode?: TMode; } export function date(): SingleStoreDateBuilderInitial<''>; export function date( config?: SingleStoreDateConfig, ): Equal extends true ? SingleStoreDateStringBuilderInitial<''> : SingleStoreDateBuilderInitial<''>; export function date( name: TName, config?: SingleStoreDateConfig, ): Equal extends true ? SingleStoreDateStringBuilderInitial : SingleStoreDateBuilderInitial; export function date(a?: string | SingleStoreDateConfig, b?: SingleStoreDateConfig) { const { name, config } = getColumnNameAndConfig(a, b); if (config?.mode === 'string') { return new SingleStoreDateStringBuilder(name); } return new SingleStoreDateBuilder(name); } ================================================ FILE: drizzle-orm/src/singlestore-core/columns/datetime.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, GeneratedColumnConfig, HasGenerated, MakeColumnConfig, } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnySingleStoreTable } from '~/singlestore-core/table.ts'; import type { SQL } from '~/sql/index.ts'; import { type Equal, getColumnNameAndConfig } from '~/utils.ts'; import { SingleStoreColumn, SingleStoreColumnBuilder } from './common.ts'; export type SingleStoreDateTimeBuilderInitial = SingleStoreDateTimeBuilder<{ name: TName; dataType: 'date'; columnType: 'SingleStoreDateTime'; data: Date; driverParam: string | number; enumValues: undefined; generated: undefined; }>; export class SingleStoreDateTimeBuilder> extends SingleStoreColumnBuilder { /** @internal */ // TODO: we need to add a proper support for SingleStore override generatedAlwaysAs( _as: SQL | (() => SQL) | T['data'], _config?: Partial>, ): HasGenerated { throw new Error('Method not implemented.'); } static override readonly [entityKind]: string = 'SingleStoreDateTimeBuilder'; constructor(name: T['name']) { super(name, 'date', 'SingleStoreDateTime'); } /** @internal */ override build( table: AnySingleStoreTable<{ name: TTableName }>, ): SingleStoreDateTime> { return new SingleStoreDateTime>( table, this.config as ColumnBuilderRuntimeConfig, ); } } export class SingleStoreDateTime> extends SingleStoreColumn { static override readonly [entityKind]: string = 'SingleStoreDateTime'; constructor( table: AnySingleStoreTable<{ name: T['tableName'] }>, config: SingleStoreDateTimeBuilder['config'], ) { super(table, config); } getSQLType(): string { return `datetime`; } override mapToDriverValue(value: Date): unknown { return value.toISOString().replace('T', ' ').replace('Z', ''); } override mapFromDriverValue(value: string): Date { return new Date(value.replace(' ', 'T') + 'Z'); } } export type SingleStoreDateTimeStringBuilderInitial = SingleStoreDateTimeStringBuilder<{ name: TName; dataType: 'string'; columnType: 'SingleStoreDateTimeString'; data: string; driverParam: string | number; enumValues: undefined; generated: undefined; }>; export class SingleStoreDateTimeStringBuilder> extends SingleStoreColumnBuilder { /** @internal */ // TODO: we need to add a proper support for SingleStore override generatedAlwaysAs( _as: SQL | (() => SQL) | T['data'], _config?: Partial>, ): HasGenerated { throw new Error('Method not implemented.'); } static override readonly [entityKind]: string = 'SingleStoreDateTimeStringBuilder'; constructor(name: T['name']) { super(name, 'string', 'SingleStoreDateTimeString'); } /** @internal */ override build( table: AnySingleStoreTable<{ name: TTableName }>, ): SingleStoreDateTimeString> { return new SingleStoreDateTimeString>( table, this.config as ColumnBuilderRuntimeConfig, ); } } export class SingleStoreDateTimeString> extends SingleStoreColumn { static override readonly [entityKind]: string = 'SingleStoreDateTimeString'; constructor( table: AnySingleStoreTable<{ name: T['tableName'] }>, config: SingleStoreDateTimeStringBuilder['config'], ) { super(table, config); } getSQLType(): string { return `datetime`; } } export interface SingleStoreDatetimeConfig { mode?: TMode; } export function datetime(): SingleStoreDateTimeBuilderInitial<''>; export function datetime( config?: SingleStoreDatetimeConfig, ): Equal extends true ? SingleStoreDateTimeStringBuilderInitial<''> : SingleStoreDateTimeBuilderInitial<''>; export function datetime( name: TName, config?: SingleStoreDatetimeConfig, ): Equal extends true ? SingleStoreDateTimeStringBuilderInitial : SingleStoreDateTimeBuilderInitial; export function datetime(a?: string | SingleStoreDatetimeConfig, b?: SingleStoreDatetimeConfig) { const { name, config } = getColumnNameAndConfig(a, b); if (config?.mode === 'string') { return new SingleStoreDateTimeStringBuilder(name); } return new SingleStoreDateTimeBuilder(name); } ================================================ FILE: drizzle-orm/src/singlestore-core/columns/decimal.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnySingleStoreTable } from '~/singlestore-core/table.ts'; import { type Equal, getColumnNameAndConfig } from '~/utils.ts'; import { SingleStoreColumnBuilderWithAutoIncrement, SingleStoreColumnWithAutoIncrement } from './common.ts'; export type SingleStoreDecimalBuilderInitial = SingleStoreDecimalBuilder<{ name: TName; dataType: 'string'; columnType: 'SingleStoreDecimal'; data: string; driverParam: string; enumValues: undefined; generated: undefined; }>; export class SingleStoreDecimalBuilder< T extends ColumnBuilderBaseConfig<'string', 'SingleStoreDecimal'>, > extends SingleStoreColumnBuilderWithAutoIncrement { static override readonly [entityKind]: string = 'SingleStoreDecimalBuilder'; constructor(name: T['name'], config: SingleStoreDecimalConfig | undefined) { super(name, 'string', 'SingleStoreDecimal'); this.config.precision = config?.precision; this.config.scale = config?.scale; this.config.unsigned = config?.unsigned; } /** @internal */ override build( table: AnySingleStoreTable<{ name: TTableName }>, ): SingleStoreDecimal> { return new SingleStoreDecimal>( table, this.config as ColumnBuilderRuntimeConfig, ); } } export class SingleStoreDecimal> extends SingleStoreColumnWithAutoIncrement { static override readonly [entityKind]: string = 'SingleStoreDecimal'; readonly precision: number | undefined = this.config.precision; readonly scale: number | undefined = this.config.scale; readonly unsigned: boolean | undefined = this.config.unsigned; override mapFromDriverValue(value: unknown): string { // For RQBv2 if (typeof value === 'string') return value; return String(value); } getSQLType(): string { let type = ''; if (this.precision !== undefined && this.scale !== undefined) { type += `decimal(${this.precision},${this.scale})`; } else if (this.precision === undefined) { type += 'decimal'; } else { type += `decimal(${this.precision})`; } type = type === 'decimal(10,0)' || type === 'decimal(10)' ? 'decimal' : type; return this.unsigned ? `${type} unsigned` : type; } } export type SingleStoreDecimalNumberBuilderInitial = SingleStoreDecimalNumberBuilder<{ name: TName; dataType: 'number'; columnType: 'SingleStoreDecimalNumber'; data: number; driverParam: string; enumValues: undefined; generated: undefined; }>; export class SingleStoreDecimalNumberBuilder< T extends ColumnBuilderBaseConfig<'number', 'SingleStoreDecimalNumber'>, > extends SingleStoreColumnBuilderWithAutoIncrement { static override readonly [entityKind]: string = 'SingleStoreDecimalNumberBuilder'; constructor(name: T['name'], config: SingleStoreDecimalConfig | undefined) { super(name, 'number', 'SingleStoreDecimalNumber'); this.config.precision = config?.precision; this.config.scale = config?.scale; this.config.unsigned = config?.unsigned; } /** @internal */ override build( table: AnySingleStoreTable<{ name: TTableName }>, ): SingleStoreDecimalNumber> { return new SingleStoreDecimalNumber>( table, this.config as ColumnBuilderRuntimeConfig, ); } } export class SingleStoreDecimalNumber> extends SingleStoreColumnWithAutoIncrement { static override readonly [entityKind]: string = 'SingleStoreDecimalNumber'; readonly precision: number | undefined = this.config.precision; readonly scale: number | undefined = this.config.scale; readonly unsigned: boolean | undefined = this.config.unsigned; override mapFromDriverValue(value: unknown): number { if (typeof value === 'number') return value; return Number(value); } override mapToDriverValue = String; getSQLType(): string { let type = ''; if (this.precision !== undefined && this.scale !== undefined) { type += `decimal(${this.precision},${this.scale})`; } else if (this.precision === undefined) { type += 'decimal'; } else { type += `decimal(${this.precision})`; } type = type === 'decimal(10,0)' || type === 'decimal(10)' ? 'decimal' : type; return this.unsigned ? `${type} unsigned` : type; } } export type SingleStoreDecimalBigIntBuilderInitial = SingleStoreDecimalBigIntBuilder<{ name: TName; dataType: 'bigint'; columnType: 'SingleStoreDecimalBigInt'; data: bigint; driverParam: string; enumValues: undefined; generated: undefined; }>; export class SingleStoreDecimalBigIntBuilder< T extends ColumnBuilderBaseConfig<'bigint', 'SingleStoreDecimalBigInt'>, > extends SingleStoreColumnBuilderWithAutoIncrement { static override readonly [entityKind]: string = 'SingleStoreDecimalBigIntBuilder'; constructor(name: T['name'], config: SingleStoreDecimalConfig | undefined) { super(name, 'bigint', 'SingleStoreDecimalBigInt'); this.config.precision = config?.precision; this.config.scale = config?.scale; this.config.unsigned = config?.unsigned; } /** @internal */ override build( table: AnySingleStoreTable<{ name: TTableName }>, ): SingleStoreDecimalBigInt> { return new SingleStoreDecimalBigInt>( table, this.config as ColumnBuilderRuntimeConfig, ); } } export class SingleStoreDecimalBigInt> extends SingleStoreColumnWithAutoIncrement { static override readonly [entityKind]: string = 'SingleStoreDecimalBigInt'; readonly precision: number | undefined = this.config.precision; readonly scale: number | undefined = this.config.scale; readonly unsigned: boolean | undefined = this.config.unsigned; override mapFromDriverValue = BigInt; override mapToDriverValue = String; getSQLType(): string { let type = ''; if (this.precision !== undefined && this.scale !== undefined) { type += `decimal(${this.precision},${this.scale})`; } else if (this.precision === undefined) { type += 'decimal'; } else { type += `decimal(${this.precision})`; } type = type === 'decimal(10,0)' || type === 'decimal(10)' ? 'decimal' : type; return this.unsigned ? `${type} unsigned` : type; } } export interface SingleStoreDecimalConfig { precision?: number; scale?: number; unsigned?: boolean; mode?: T; } export function decimal(): SingleStoreDecimalBuilderInitial<''>; export function decimal( config: SingleStoreDecimalConfig, ): Equal extends true ? SingleStoreDecimalNumberBuilderInitial<''> : Equal extends true ? SingleStoreDecimalBigIntBuilderInitial<''> : SingleStoreDecimalBuilderInitial<''>; export function decimal( name: TName, config?: SingleStoreDecimalConfig, ): Equal extends true ? SingleStoreDecimalNumberBuilderInitial : Equal extends true ? SingleStoreDecimalBigIntBuilderInitial : SingleStoreDecimalBuilderInitial; export function decimal(a?: string | SingleStoreDecimalConfig, b: SingleStoreDecimalConfig = {}) { const { name, config } = getColumnNameAndConfig(a, b); const mode = config?.mode; return mode === 'number' ? new SingleStoreDecimalNumberBuilder(name, config) : mode === 'bigint' ? new SingleStoreDecimalBigIntBuilder(name, config) : new SingleStoreDecimalBuilder(name, config); } ================================================ FILE: drizzle-orm/src/singlestore-core/columns/double.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnySingleStoreTable } from '~/singlestore-core/table.ts'; import { getColumnNameAndConfig } from '~/utils.ts'; import { SingleStoreColumnBuilderWithAutoIncrement, SingleStoreColumnWithAutoIncrement } from './common.ts'; export type SingleStoreDoubleBuilderInitial = SingleStoreDoubleBuilder<{ name: TName; dataType: 'number'; columnType: 'SingleStoreDouble'; data: number; driverParam: number | string; enumValues: undefined; generated: undefined; }>; export class SingleStoreDoubleBuilder> extends SingleStoreColumnBuilderWithAutoIncrement { static override readonly [entityKind]: string = 'SingleStoreDoubleBuilder'; constructor(name: T['name'], config: SingleStoreDoubleConfig | undefined) { super(name, 'number', 'SingleStoreDouble'); this.config.precision = config?.precision; this.config.scale = config?.scale; this.config.unsigned = config?.unsigned; } /** @internal */ override build( table: AnySingleStoreTable<{ name: TTableName }>, ): SingleStoreDouble> { return new SingleStoreDouble>( table, this.config as ColumnBuilderRuntimeConfig, ); } } export class SingleStoreDouble> extends SingleStoreColumnWithAutoIncrement { static override readonly [entityKind]: string = 'SingleStoreDouble'; readonly precision: number | undefined = this.config.precision; readonly scale: number | undefined = this.config.scale; readonly unsigned: boolean | undefined = this.config.unsigned; getSQLType(): string { let type = ''; if (this.precision !== undefined && this.scale !== undefined) { type += `double(${this.precision},${this.scale})`; } else if (this.precision === undefined) { type += 'double'; } else { type += `double(${this.precision})`; } return this.unsigned ? `${type} unsigned` : type; } } export interface SingleStoreDoubleConfig { precision?: number; scale?: number; unsigned?: boolean; } export function double(): SingleStoreDoubleBuilderInitial<''>; export function double( config?: SingleStoreDoubleConfig, ): SingleStoreDoubleBuilderInitial<''>; export function double( name: TName, config?: SingleStoreDoubleConfig, ): SingleStoreDoubleBuilderInitial; export function double(a?: string | SingleStoreDoubleConfig, b?: SingleStoreDoubleConfig) { const { name, config } = getColumnNameAndConfig(a, b); return new SingleStoreDoubleBuilder(name, config); } ================================================ FILE: drizzle-orm/src/singlestore-core/columns/enum.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, GeneratedColumnConfig, HasGenerated, MakeColumnConfig, } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnySingleStoreTable } from '~/singlestore-core/table.ts'; import type { SQL } from '~/sql/index.ts'; import { getColumnNameAndConfig, type Writable } from '~/utils.ts'; import { SingleStoreColumn, SingleStoreColumnBuilder } from './common.ts'; export type SingleStoreEnumColumnBuilderInitial = SingleStoreEnumColumnBuilder<{ name: TName; dataType: 'string'; columnType: 'SingleStoreEnumColumn'; data: TEnum[number]; driverParam: string; enumValues: TEnum; generated: undefined; }>; export class SingleStoreEnumColumnBuilder> extends SingleStoreColumnBuilder { // eslint-disable-next-line @typescript-eslint/no-unused-vars override generatedAlwaysAs( as: SQL | (() => SQL) | T['data'], config?: Partial>, ): HasGenerated { throw new Error('Method not implemented.'); } static override readonly [entityKind]: string = 'SingleStoreEnumColumnBuilder'; constructor(name: T['name'], values: T['enumValues']) { super(name, 'string', 'SingleStoreEnumColumn'); this.config.enumValues = values; } /** @internal */ override build( table: AnySingleStoreTable<{ name: TTableName }>, ): SingleStoreEnumColumn & { enumValues: T['enumValues'] }> { return new SingleStoreEnumColumn & { enumValues: T['enumValues'] }>( table, this.config as ColumnBuilderRuntimeConfig, ); } } export class SingleStoreEnumColumn> extends SingleStoreColumn { static override readonly [entityKind]: string = 'SingleStoreEnumColumn'; override readonly enumValues = this.config.enumValues; getSQLType(): string { return `enum(${this.enumValues!.map((value) => `'${value}'`).join(',')})`; } } export function singlestoreEnum>( values: T | Writable, ): SingleStoreEnumColumnBuilderInitial<'', Writable>; export function singlestoreEnum>( name: TName, values: T | Writable, ): SingleStoreEnumColumnBuilderInitial>; export function singlestoreEnum( a?: string | readonly [string, ...string[]] | [string, ...string[]], b?: readonly [string, ...string[]] | [string, ...string[]], ): any { const { name, config: values } = getColumnNameAndConfig(a, b); if (values.length === 0) { throw new Error(`You have an empty array for "${name}" enum values`); } return new SingleStoreEnumColumnBuilder(name, values as any); } ================================================ FILE: drizzle-orm/src/singlestore-core/columns/float.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnySingleStoreTable } from '~/singlestore-core/table.ts'; import { getColumnNameAndConfig } from '~/utils.ts'; import { SingleStoreColumnBuilderWithAutoIncrement, SingleStoreColumnWithAutoIncrement } from './common.ts'; export type SingleStoreFloatBuilderInitial = SingleStoreFloatBuilder<{ name: TName; dataType: 'number'; columnType: 'SingleStoreFloat'; data: number; driverParam: number | string; enumValues: undefined; generated: undefined; }>; export class SingleStoreFloatBuilder> extends SingleStoreColumnBuilderWithAutoIncrement { static override readonly [entityKind]: string = 'SingleStoreFloatBuilder'; constructor(name: T['name'], config: SingleStoreFloatConfig | undefined) { super(name, 'number', 'SingleStoreFloat'); this.config.precision = config?.precision; this.config.scale = config?.scale; this.config.unsigned = config?.unsigned; } /** @internal */ override build( table: AnySingleStoreTable<{ name: TTableName }>, ): SingleStoreFloat> { return new SingleStoreFloat>( table, this.config as ColumnBuilderRuntimeConfig, ); } } export class SingleStoreFloat> extends SingleStoreColumnWithAutoIncrement { static override readonly [entityKind]: string = 'SingleStoreFloat'; readonly precision: number | undefined = this.config.precision; readonly scale: number | undefined = this.config.scale; readonly unsigned: boolean | undefined = this.config.unsigned; getSQLType(): string { let type = ''; if (this.precision !== undefined && this.scale !== undefined) { type += `float(${this.precision},${this.scale})`; } else if (this.precision === undefined) { type += 'float'; } else { type += `float(${this.precision},0)`; } return this.unsigned ? `${type} unsigned` : type; } } export interface SingleStoreFloatConfig { precision?: number; scale?: number; unsigned?: boolean; } export function float(): SingleStoreFloatBuilderInitial<''>; export function float( config?: SingleStoreFloatConfig, ): SingleStoreFloatBuilderInitial<''>; export function float( name: TName, config?: SingleStoreFloatConfig, ): SingleStoreFloatBuilderInitial; export function float(a?: string | SingleStoreFloatConfig, b?: SingleStoreFloatConfig) { const { name, config } = getColumnNameAndConfig(a, b); return new SingleStoreFloatBuilder(name, config); } ================================================ FILE: drizzle-orm/src/singlestore-core/columns/index.ts ================================================ export * from './bigint.ts'; export * from './binary.ts'; export * from './boolean.ts'; export * from './char.ts'; export * from './common.ts'; export * from './custom.ts'; export * from './date.ts'; export * from './datetime.ts'; export * from './decimal.ts'; export * from './double.ts'; export * from './enum.ts'; export * from './float.ts'; export * from './int.ts'; export * from './json.ts'; export * from './mediumint.ts'; export * from './real.ts'; export * from './serial.ts'; export * from './smallint.ts'; export * from './text.ts'; export * from './time.ts'; export * from './timestamp.ts'; export * from './tinyint.ts'; export * from './varbinary.ts'; export * from './varchar.ts'; export * from './vector.ts'; export * from './year.ts'; ================================================ FILE: drizzle-orm/src/singlestore-core/columns/int.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnySingleStoreTable } from '~/singlestore-core/table.ts'; import { getColumnNameAndConfig } from '~/utils.ts'; import { SingleStoreColumnBuilderWithAutoIncrement, SingleStoreColumnWithAutoIncrement } from './common.ts'; export type SingleStoreIntBuilderInitial = SingleStoreIntBuilder<{ name: TName; dataType: 'number'; columnType: 'SingleStoreInt'; data: number; driverParam: number | string; enumValues: undefined; generated: undefined; }>; export class SingleStoreIntBuilder> extends SingleStoreColumnBuilderWithAutoIncrement { static override readonly [entityKind]: string = 'SingleStoreIntBuilder'; constructor(name: T['name'], config?: SingleStoreIntConfig) { super(name, 'number', 'SingleStoreInt'); this.config.unsigned = config ? config.unsigned : false; } /** @internal */ override build( table: AnySingleStoreTable<{ name: TTableName }>, ): SingleStoreInt> { return new SingleStoreInt>( table, this.config as ColumnBuilderRuntimeConfig, ); } } export class SingleStoreInt> extends SingleStoreColumnWithAutoIncrement { static override readonly [entityKind]: string = 'SingleStoreInt'; getSQLType(): string { return `int${this.config.unsigned ? ' unsigned' : ''}`; } override mapFromDriverValue(value: number | string): number { if (typeof value === 'string') { return Number(value); } return value; } } export interface SingleStoreIntConfig { unsigned?: boolean; } export function int(): SingleStoreIntBuilderInitial<''>; export function int( config?: SingleStoreIntConfig, ): SingleStoreIntBuilderInitial<''>; export function int( name: TName, config?: SingleStoreIntConfig, ): SingleStoreIntBuilderInitial; export function int(a?: string | SingleStoreIntConfig, b?: SingleStoreIntConfig) { const { name, config } = getColumnNameAndConfig(a, b); return new SingleStoreIntBuilder(name, config); } ================================================ FILE: drizzle-orm/src/singlestore-core/columns/json.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnySingleStoreTable } from '~/singlestore-core/table.ts'; import { SingleStoreColumn, SingleStoreColumnBuilder } from './common.ts'; export type SingleStoreJsonBuilderInitial = SingleStoreJsonBuilder<{ name: TName; dataType: 'json'; columnType: 'SingleStoreJson'; data: unknown; driverParam: string; enumValues: undefined; generated: undefined; }>; export class SingleStoreJsonBuilder> extends SingleStoreColumnBuilder { static override readonly [entityKind]: string = 'SingleStoreJsonBuilder'; constructor(name: T['name']) { super(name, 'json', 'SingleStoreJson'); } /** @internal */ override build( table: AnySingleStoreTable<{ name: TTableName }>, ): SingleStoreJson> { return new SingleStoreJson>( table, this.config as ColumnBuilderRuntimeConfig, ); } } export class SingleStoreJson> extends SingleStoreColumn { static override readonly [entityKind]: string = 'SingleStoreJson'; getSQLType(): string { return 'json'; } override mapToDriverValue(value: T['data']): string { return JSON.stringify(value); } } export function json(): SingleStoreJsonBuilderInitial<''>; export function json(name: TName): SingleStoreJsonBuilderInitial; export function json(name?: string) { return new SingleStoreJsonBuilder(name ?? ''); } ================================================ FILE: drizzle-orm/src/singlestore-core/columns/mediumint.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnySingleStoreTable } from '~/singlestore-core/table.ts'; import { getColumnNameAndConfig } from '~/utils.ts'; import { SingleStoreColumnBuilderWithAutoIncrement, SingleStoreColumnWithAutoIncrement } from './common.ts'; import type { SingleStoreIntConfig } from './int.ts'; export type SingleStoreMediumIntBuilderInitial = SingleStoreMediumIntBuilder<{ name: TName; dataType: 'number'; columnType: 'SingleStoreMediumInt'; data: number; driverParam: number | string; enumValues: undefined; generated: undefined; }>; export class SingleStoreMediumIntBuilder> extends SingleStoreColumnBuilderWithAutoIncrement { static override readonly [entityKind]: string = 'SingleStoreMediumIntBuilder'; constructor(name: T['name'], config?: SingleStoreIntConfig) { super(name, 'number', 'SingleStoreMediumInt'); this.config.unsigned = config ? config.unsigned : false; } /** @internal */ override build( table: AnySingleStoreTable<{ name: TTableName }>, ): SingleStoreMediumInt> { return new SingleStoreMediumInt>( table, this.config as ColumnBuilderRuntimeConfig, ); } } export class SingleStoreMediumInt> extends SingleStoreColumnWithAutoIncrement { static override readonly [entityKind]: string = 'SingleStoreMediumInt'; getSQLType(): string { return `mediumint${this.config.unsigned ? ' unsigned' : ''}`; } override mapFromDriverValue(value: number | string): number { if (typeof value === 'string') { return Number(value); } return value; } } export function mediumint(): SingleStoreMediumIntBuilderInitial<''>; export function mediumint( config?: SingleStoreIntConfig, ): SingleStoreMediumIntBuilderInitial<''>; export function mediumint( name: TName, config?: SingleStoreIntConfig, ): SingleStoreMediumIntBuilderInitial; export function mediumint(a?: string | SingleStoreIntConfig, b?: SingleStoreIntConfig) { const { name, config } = getColumnNameAndConfig(a, b); return new SingleStoreMediumIntBuilder(name, config); } ================================================ FILE: drizzle-orm/src/singlestore-core/columns/real.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnySingleStoreTable } from '~/singlestore-core/table.ts'; import { getColumnNameAndConfig } from '~/utils.ts'; import { SingleStoreColumnBuilderWithAutoIncrement, SingleStoreColumnWithAutoIncrement } from './common.ts'; export type SingleStoreRealBuilderInitial = SingleStoreRealBuilder<{ name: TName; dataType: 'number'; columnType: 'SingleStoreReal'; data: number; driverParam: number | string; enumValues: undefined; generated: undefined; }>; export class SingleStoreRealBuilder> extends SingleStoreColumnBuilderWithAutoIncrement< T, SingleStoreRealConfig > { static override readonly [entityKind]: string = 'SingleStoreRealBuilder'; constructor(name: T['name'], config: SingleStoreRealConfig | undefined) { super(name, 'number', 'SingleStoreReal'); this.config.precision = config?.precision; this.config.scale = config?.scale; } /** @internal */ override build( table: AnySingleStoreTable<{ name: TTableName }>, ): SingleStoreReal> { return new SingleStoreReal>( table, this.config as ColumnBuilderRuntimeConfig, ); } } export class SingleStoreReal> extends SingleStoreColumnWithAutoIncrement< T, SingleStoreRealConfig > { static override readonly [entityKind]: string = 'SingleStoreReal'; precision: number | undefined = this.config.precision; scale: number | undefined = this.config.scale; getSQLType(): string { if (this.precision !== undefined && this.scale !== undefined) { return `real(${this.precision}, ${this.scale})`; } else if (this.precision === undefined) { return 'real'; } else { return `real(${this.precision})`; } } } export interface SingleStoreRealConfig { precision?: number; scale?: number; } export function real(): SingleStoreRealBuilderInitial<''>; export function real( config?: SingleStoreRealConfig, ): SingleStoreRealBuilderInitial<''>; export function real( name: TName, config?: SingleStoreRealConfig, ): SingleStoreRealBuilderInitial; export function real(a?: string | SingleStoreRealConfig, b: SingleStoreRealConfig = {}) { const { name, config } = getColumnNameAndConfig(a, b); return new SingleStoreRealBuilder(name, config); } ================================================ FILE: drizzle-orm/src/singlestore-core/columns/serial.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, HasDefault, IsAutoincrement, IsPrimaryKey, MakeColumnConfig, NotNull, } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnySingleStoreTable } from '~/singlestore-core/table.ts'; import { SingleStoreColumnBuilderWithAutoIncrement, SingleStoreColumnWithAutoIncrement } from './common.ts'; export type SingleStoreSerialBuilderInitial = IsAutoincrement< IsPrimaryKey< NotNull< HasDefault< SingleStoreSerialBuilder<{ name: TName; dataType: 'number'; columnType: 'SingleStoreSerial'; data: number; driverParam: number; enumValues: undefined; generated: undefined; }> > > > >; export class SingleStoreSerialBuilder> extends SingleStoreColumnBuilderWithAutoIncrement { static override readonly [entityKind]: string = 'SingleStoreSerialBuilder'; constructor(name: T['name']) { super(name, 'number', 'SingleStoreSerial'); this.config.hasDefault = true; this.config.autoIncrement = true; } /** @internal */ override build( table: AnySingleStoreTable<{ name: TTableName }>, ): SingleStoreSerial> { return new SingleStoreSerial>( table, this.config as ColumnBuilderRuntimeConfig, ); } } export class SingleStoreSerial< T extends ColumnBaseConfig<'number', 'SingleStoreSerial'>, > extends SingleStoreColumnWithAutoIncrement { static override readonly [entityKind]: string = 'SingleStoreSerial'; getSQLType(): string { return 'serial'; } override mapFromDriverValue(value: number | string): number { if (typeof value === 'string') { return Number(value); } return value; } } export function serial(): SingleStoreSerialBuilderInitial<''>; export function serial(name: TName): SingleStoreSerialBuilderInitial; export function serial(name?: string) { return new SingleStoreSerialBuilder(name ?? ''); } ================================================ FILE: drizzle-orm/src/singlestore-core/columns/smallint.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnySingleStoreTable } from '~/singlestore-core/table.ts'; import { getColumnNameAndConfig } from '~/utils.ts'; import { SingleStoreColumnBuilderWithAutoIncrement, SingleStoreColumnWithAutoIncrement } from './common.ts'; import type { SingleStoreIntConfig } from './int.ts'; export type SingleStoreSmallIntBuilderInitial = SingleStoreSmallIntBuilder<{ name: TName; dataType: 'number'; columnType: 'SingleStoreSmallInt'; data: number; driverParam: number | string; enumValues: undefined; generated: undefined; }>; export class SingleStoreSmallIntBuilder> extends SingleStoreColumnBuilderWithAutoIncrement { static override readonly [entityKind]: string = 'SingleStoreSmallIntBuilder'; constructor(name: T['name'], config?: SingleStoreIntConfig) { super(name, 'number', 'SingleStoreSmallInt'); this.config.unsigned = config ? config.unsigned : false; } /** @internal */ override build( table: AnySingleStoreTable<{ name: TTableName }>, ): SingleStoreSmallInt> { return new SingleStoreSmallInt>( table, this.config as ColumnBuilderRuntimeConfig, ); } } export class SingleStoreSmallInt> extends SingleStoreColumnWithAutoIncrement { static override readonly [entityKind]: string = 'SingleStoreSmallInt'; getSQLType(): string { return `smallint${this.config.unsigned ? ' unsigned' : ''}`; } override mapFromDriverValue(value: number | string): number { if (typeof value === 'string') { return Number(value); } return value; } } export function smallint(): SingleStoreSmallIntBuilderInitial<''>; export function smallint( config?: SingleStoreIntConfig, ): SingleStoreSmallIntBuilderInitial<''>; export function smallint( name: TName, config?: SingleStoreIntConfig, ): SingleStoreSmallIntBuilderInitial; export function smallint(a?: string | SingleStoreIntConfig, b?: SingleStoreIntConfig) { const { name, config } = getColumnNameAndConfig(a, b); return new SingleStoreSmallIntBuilder(name, config); } ================================================ FILE: drizzle-orm/src/singlestore-core/columns/text.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnySingleStoreTable } from '~/singlestore-core/table.ts'; import { getColumnNameAndConfig, type Writable } from '~/utils.ts'; import { SingleStoreColumn, SingleStoreColumnBuilder } from './common.ts'; export type SingleStoreTextColumnType = 'tinytext' | 'text' | 'mediumtext' | 'longtext'; export type SingleStoreTextBuilderInitial = SingleStoreTextBuilder<{ name: TName; dataType: 'string'; columnType: 'SingleStoreText'; data: TEnum[number]; driverParam: string; enumValues: TEnum; generated: undefined; }>; export class SingleStoreTextBuilder> extends SingleStoreColumnBuilder< T, { textType: SingleStoreTextColumnType; enumValues: T['enumValues'] } > { static override readonly [entityKind]: string = 'SingleStoreTextBuilder'; constructor(name: T['name'], textType: SingleStoreTextColumnType, config: SingleStoreTextConfig) { super(name, 'string', 'SingleStoreText'); this.config.textType = textType; this.config.enumValues = config.enum; } /** @internal */ override build( table: AnySingleStoreTable<{ name: TTableName }>, ): SingleStoreText> { return new SingleStoreText>( table, this.config as ColumnBuilderRuntimeConfig, ); } } export class SingleStoreText> extends SingleStoreColumn { static override readonly [entityKind]: string = 'SingleStoreText'; readonly textType: SingleStoreTextColumnType = this.config.textType; override readonly enumValues = this.config.enumValues; getSQLType(): string { return this.textType; } } export interface SingleStoreTextConfig< TEnum extends readonly string[] | string[] | undefined = readonly string[] | string[] | undefined, > { enum?: TEnum; } export function text(): SingleStoreTextBuilderInitial<'', [string, ...string[]]>; export function text>( config?: SingleStoreTextConfig>, ): SingleStoreTextBuilderInitial<'', Writable>; export function text>( name: TName, config?: SingleStoreTextConfig>, ): SingleStoreTextBuilderInitial>; export function text(a?: string | SingleStoreTextConfig, b: SingleStoreTextConfig = {}): any { const { name, config } = getColumnNameAndConfig(a, b); return new SingleStoreTextBuilder(name, 'text', config as any); } export function tinytext(): SingleStoreTextBuilderInitial<'', [string, ...string[]]>; export function tinytext>( config?: SingleStoreTextConfig>, ): SingleStoreTextBuilderInitial<'', Writable>; export function tinytext>( name: TName, config?: SingleStoreTextConfig>, ): SingleStoreTextBuilderInitial>; export function tinytext(a?: string | SingleStoreTextConfig, b: SingleStoreTextConfig = {}): any { const { name, config } = getColumnNameAndConfig(a, b); return new SingleStoreTextBuilder(name, 'tinytext', config as any); } export function mediumtext(): SingleStoreTextBuilderInitial<'', [string, ...string[]]>; export function mediumtext>( config?: SingleStoreTextConfig>, ): SingleStoreTextBuilderInitial<'', Writable>; export function mediumtext>( name: TName, config?: SingleStoreTextConfig>, ): SingleStoreTextBuilderInitial>; export function mediumtext(a?: string | SingleStoreTextConfig, b: SingleStoreTextConfig = {}): any { const { name, config } = getColumnNameAndConfig(a, b); return new SingleStoreTextBuilder(name, 'mediumtext', config as any); } export function longtext(): SingleStoreTextBuilderInitial<'', [string, ...string[]]>; export function longtext>( config?: SingleStoreTextConfig>, ): SingleStoreTextBuilderInitial<'', Writable>; export function longtext>( name: TName, config?: SingleStoreTextConfig>, ): SingleStoreTextBuilderInitial>; export function longtext(a?: string | SingleStoreTextConfig, b: SingleStoreTextConfig = {}): any { const { name, config } = getColumnNameAndConfig(a, b); return new SingleStoreTextBuilder(name, 'longtext', config as any); } ================================================ FILE: drizzle-orm/src/singlestore-core/columns/time.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnySingleStoreTable } from '~/singlestore-core/table.ts'; import { SingleStoreColumn, SingleStoreColumnBuilder } from './common.ts'; export type SingleStoreTimeBuilderInitial = SingleStoreTimeBuilder<{ name: TName; dataType: 'string'; columnType: 'SingleStoreTime'; data: string; driverParam: string | number; enumValues: undefined; generated: undefined; }>; export class SingleStoreTimeBuilder> extends SingleStoreColumnBuilder< T > { static override readonly [entityKind]: string = 'SingleStoreTimeBuilder'; constructor( name: T['name'], ) { super(name, 'string', 'SingleStoreTime'); } /** @internal */ override build( table: AnySingleStoreTable<{ name: TTableName }>, ): SingleStoreTime> { return new SingleStoreTime>( table, this.config as ColumnBuilderRuntimeConfig, ); } } export class SingleStoreTime< T extends ColumnBaseConfig<'string', 'SingleStoreTime'>, > extends SingleStoreColumn { static override readonly [entityKind]: string = 'SingleStoreTime'; getSQLType(): string { return `time`; } } export function time(): SingleStoreTimeBuilderInitial<''>; export function time(name: TName): SingleStoreTimeBuilderInitial; export function time(name?: string) { return new SingleStoreTimeBuilder(name ?? ''); } ================================================ FILE: drizzle-orm/src/singlestore-core/columns/timestamp.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnySingleStoreTable } from '~/singlestore-core/table.ts'; import { sql } from '~/sql/sql.ts'; import { type Equal, getColumnNameAndConfig } from '~/utils.ts'; import { SingleStoreDateBaseColumn, SingleStoreDateColumnBaseBuilder } from './date.common.ts'; export type SingleStoreTimestampBuilderInitial = SingleStoreTimestampBuilder<{ name: TName; dataType: 'date'; columnType: 'SingleStoreTimestamp'; data: Date; driverParam: string | number; enumValues: undefined; generated: undefined; }>; export class SingleStoreTimestampBuilder> extends SingleStoreDateColumnBaseBuilder { static override readonly [entityKind]: string = 'SingleStoreTimestampBuilder'; constructor(name: T['name']) { super(name, 'date', 'SingleStoreTimestamp'); } /** @internal */ override build( table: AnySingleStoreTable<{ name: TTableName }>, ): SingleStoreTimestamp> { return new SingleStoreTimestamp>( table, this.config as ColumnBuilderRuntimeConfig, ); } override defaultNow() { return this.default(sql`CURRENT_TIMESTAMP`); } } export class SingleStoreTimestamp> extends SingleStoreDateBaseColumn { static override readonly [entityKind]: string = 'SingleStoreTimestamp'; getSQLType(): string { return `timestamp`; } override mapFromDriverValue(value: string): Date { return new Date(value + '+0000'); } override mapToDriverValue(value: Date): string { return value.toISOString().slice(0, -1).replace('T', ' '); } } export type SingleStoreTimestampStringBuilderInitial = SingleStoreTimestampStringBuilder<{ name: TName; dataType: 'string'; columnType: 'SingleStoreTimestampString'; data: string; driverParam: string | number; enumValues: undefined; generated: undefined; }>; export class SingleStoreTimestampStringBuilder< T extends ColumnBuilderBaseConfig<'string', 'SingleStoreTimestampString'>, > extends SingleStoreDateColumnBaseBuilder { static override readonly [entityKind]: string = 'SingleStoreTimestampStringBuilder'; constructor(name: T['name']) { super(name, 'string', 'SingleStoreTimestampString'); } /** @internal */ override build( table: AnySingleStoreTable<{ name: TTableName }>, ): SingleStoreTimestampString> { return new SingleStoreTimestampString>( table, this.config as ColumnBuilderRuntimeConfig, ); } override defaultNow() { return this.default(sql`CURRENT_TIMESTAMP`); } } export class SingleStoreTimestampString> extends SingleStoreDateBaseColumn { static override readonly [entityKind]: string = 'SingleStoreTimestampString'; getSQLType(): string { return `timestamp`; } } export interface SingleStoreTimestampConfig { mode?: TMode; } export function timestamp(): SingleStoreTimestampBuilderInitial<''>; export function timestamp( config?: SingleStoreTimestampConfig, ): Equal extends true ? SingleStoreTimestampStringBuilderInitial<''> : SingleStoreTimestampBuilderInitial<''>; export function timestamp( name: TName, config?: SingleStoreTimestampConfig, ): Equal extends true ? SingleStoreTimestampStringBuilderInitial : SingleStoreTimestampBuilderInitial; export function timestamp(a?: string | SingleStoreTimestampConfig, b: SingleStoreTimestampConfig = {}) { const { name, config } = getColumnNameAndConfig(a, b); if (config?.mode === 'string') { return new SingleStoreTimestampStringBuilder(name); } return new SingleStoreTimestampBuilder(name); } ================================================ FILE: drizzle-orm/src/singlestore-core/columns/tinyint.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnySingleStoreTable } from '~/singlestore-core/table.ts'; import { getColumnNameAndConfig } from '~/utils.ts'; import { SingleStoreColumnBuilderWithAutoIncrement, SingleStoreColumnWithAutoIncrement } from './common.ts'; import type { SingleStoreIntConfig } from './int.ts'; export type SingleStoreTinyIntBuilderInitial = SingleStoreTinyIntBuilder<{ name: TName; dataType: 'number'; columnType: 'SingleStoreTinyInt'; data: number; driverParam: number | string; enumValues: undefined; generated: undefined; }>; export class SingleStoreTinyIntBuilder> extends SingleStoreColumnBuilderWithAutoIncrement { static override readonly [entityKind]: string = 'SingleStoreTinyIntBuilder'; constructor(name: T['name'], config?: SingleStoreIntConfig) { super(name, 'number', 'SingleStoreTinyInt'); this.config.unsigned = config ? config.unsigned : false; } /** @internal */ override build( table: AnySingleStoreTable<{ name: TTableName }>, ): SingleStoreTinyInt> { return new SingleStoreTinyInt>( table, this.config as ColumnBuilderRuntimeConfig, ); } } export class SingleStoreTinyInt> extends SingleStoreColumnWithAutoIncrement { static override readonly [entityKind]: string = 'SingleStoreTinyInt'; getSQLType(): string { return `tinyint${this.config.unsigned ? ' unsigned' : ''}`; } override mapFromDriverValue(value: number | string): number { if (typeof value === 'string') { return Number(value); } return value; } } export function tinyint(): SingleStoreTinyIntBuilderInitial<''>; export function tinyint( config?: SingleStoreIntConfig, ): SingleStoreTinyIntBuilderInitial<''>; export function tinyint( name: TName, config?: SingleStoreIntConfig, ): SingleStoreTinyIntBuilderInitial; export function tinyint(a?: string | SingleStoreIntConfig, b?: SingleStoreIntConfig) { const { name, config } = getColumnNameAndConfig(a, b); return new SingleStoreTinyIntBuilder(name, config); } ================================================ FILE: drizzle-orm/src/singlestore-core/columns/varbinary.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnySingleStoreTable } from '~/singlestore-core/table.ts'; import { getColumnNameAndConfig } from '~/utils.ts'; import { SingleStoreColumn, SingleStoreColumnBuilder } from './common.ts'; export type SingleStoreVarBinaryBuilderInitial = SingleStoreVarBinaryBuilder<{ name: TName; dataType: 'string'; columnType: 'SingleStoreVarBinary'; data: string; driverParam: string; enumValues: undefined; generated: undefined; }>; export class SingleStoreVarBinaryBuilder> extends SingleStoreColumnBuilder { static override readonly [entityKind]: string = 'SingleStoreVarBinaryBuilder'; /** @internal */ constructor(name: T['name'], config: SingleStoreVarbinaryOptions) { super(name, 'string', 'SingleStoreVarBinary'); this.config.length = config?.length; } /** @internal */ override build( table: AnySingleStoreTable<{ name: TTableName }>, ): SingleStoreVarBinary> { return new SingleStoreVarBinary>( table, this.config as ColumnBuilderRuntimeConfig, ); } } export class SingleStoreVarBinary< T extends ColumnBaseConfig<'string', 'SingleStoreVarBinary'>, > extends SingleStoreColumn { static override readonly [entityKind]: string = 'SingleStoreVarBinary'; length: number | undefined = this.config.length; override mapFromDriverValue(value: string | Buffer | Uint8Array): string { if (typeof value === 'string') return value; if (Buffer.isBuffer(value)) return value.toString(); const str: string[] = []; for (const v of value) { str.push(v === 49 ? '1' : '0'); } return str.join(''); } getSQLType(): string { return this.length === undefined ? `varbinary` : `varbinary(${this.length})`; } } export interface SingleStoreVarbinaryOptions { length: number; } export function varbinary( config: SingleStoreVarbinaryOptions, ): SingleStoreVarBinaryBuilderInitial<''>; export function varbinary( name: TName, config: SingleStoreVarbinaryOptions, ): SingleStoreVarBinaryBuilderInitial; export function varbinary(a?: string | SingleStoreVarbinaryOptions, b?: SingleStoreVarbinaryOptions) { const { name, config } = getColumnNameAndConfig(a, b); return new SingleStoreVarBinaryBuilder(name, config); } ================================================ FILE: drizzle-orm/src/singlestore-core/columns/varchar.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnySingleStoreTable } from '~/singlestore-core/table.ts'; import { getColumnNameAndConfig, type Writable } from '~/utils.ts'; import { SingleStoreColumn, SingleStoreColumnBuilder } from './common.ts'; export type SingleStoreVarCharBuilderInitial< TName extends string, TEnum extends [string, ...string[]], TLength extends number | undefined, > = SingleStoreVarCharBuilder< { name: TName; dataType: 'string'; columnType: 'SingleStoreVarChar'; data: TEnum[number]; driverParam: number | string; enumValues: TEnum; generated: undefined; length: TLength; } >; export class SingleStoreVarCharBuilder< T extends ColumnBuilderBaseConfig<'string', 'SingleStoreVarChar'> & { length?: number | undefined }, > extends SingleStoreColumnBuilder, { length: T['length'] }> { static override readonly [entityKind]: string = 'SingleStoreVarCharBuilder'; /** @internal */ constructor(name: T['name'], config: SingleStoreVarCharConfig) { super(name, 'string', 'SingleStoreVarChar'); this.config.length = config.length; this.config.enum = config.enum; } /** @internal */ override build( table: AnySingleStoreTable<{ name: TTableName }>, ): SingleStoreVarChar & { length: T['length']; enumValues: T['enumValues'] }> { return new SingleStoreVarChar< MakeColumnConfig & { length: T['length']; enumValues: T['enumValues'] } >( table, this.config as ColumnBuilderRuntimeConfig, ); } } export class SingleStoreVarChar< T extends ColumnBaseConfig<'string', 'SingleStoreVarChar'> & { length?: number | undefined }, > extends SingleStoreColumn, { length: T['length'] }> { static override readonly [entityKind]: string = 'SingleStoreVarChar'; readonly length: T['length'] = this.config.length; override readonly enumValues = this.config.enum; getSQLType(): string { return this.length === undefined ? `varchar` : `varchar(${this.length})`; } } export interface SingleStoreVarCharConfig< TEnum extends string[] | readonly string[] | undefined = string[] | readonly string[] | undefined, TLength extends number | undefined = number | undefined, > { enum?: TEnum; length: TLength; } export function varchar, L extends number | undefined>( config: SingleStoreVarCharConfig, L>, ): SingleStoreVarCharBuilderInitial<'', Writable, L>; export function varchar< TName extends string, U extends string, T extends Readonly<[U, ...U[]]>, L extends number | undefined, >( name: TName, config: SingleStoreVarCharConfig, L>, ): SingleStoreVarCharBuilderInitial, L>; export function varchar(a?: string | SingleStoreVarCharConfig, b?: SingleStoreVarCharConfig): any { const { name, config } = getColumnNameAndConfig(a, b); return new SingleStoreVarCharBuilder(name, config as any); } ================================================ FILE: drizzle-orm/src/singlestore-core/columns/vector.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnySingleStoreTable } from '~/singlestore-core/table.ts'; import { SQL } from '~/sql/index.ts'; import { getColumnNameAndConfig } from '~/utils.ts'; import { SingleStoreColumn, SingleStoreColumnBuilder, SingleStoreGeneratedColumnConfig } from './common.ts'; export type SingleStoreVectorBuilderInitial = SingleStoreVectorBuilder<{ name: TName; dataType: 'array'; columnType: 'SingleStoreVector'; data: Array; driverParam: string; enumValues: undefined; }>; export class SingleStoreVectorBuilder> extends SingleStoreColumnBuilder { static override readonly [entityKind]: string = 'SingleStoreVectorBuilder'; constructor(name: T['name'], config: SingleStoreVectorConfig) { super(name, 'array', 'SingleStoreVector'); this.config.dimensions = config.dimensions; this.config.elementType = config.elementType; } /** @internal */ override build( table: AnySingleStoreTable<{ name: TTableName }>, ): SingleStoreVector> { return new SingleStoreVector>( table, this.config as ColumnBuilderRuntimeConfig, ); } /** @internal */ override generatedAlwaysAs(as: SQL | (() => SQL) | T['data'], config?: SingleStoreGeneratedColumnConfig) { throw new Error('not implemented'); } } export class SingleStoreVector> extends SingleStoreColumn { static override readonly [entityKind]: string = 'SingleStoreVector'; dimensions: number = this.config.dimensions; elementType: ElementType | undefined = this.config.elementType; getSQLType(): string { return `vector(${this.dimensions}, ${this.elementType || 'F32'})`; } override mapToDriverValue(value: Array) { return JSON.stringify(value); } override mapFromDriverValue(value: string): Array { return JSON.parse(value); } } type ElementType = 'I8' | 'I16' | 'I32' | 'I64' | 'F32' | 'F64'; export interface SingleStoreVectorConfig { dimensions: number; elementType?: ElementType; } export function vector( config: SingleStoreVectorConfig, ): SingleStoreVectorBuilderInitial<''>; export function vector( name: TName, config: SingleStoreVectorConfig, ): SingleStoreVectorBuilderInitial; export function vector(a: string | SingleStoreVectorConfig, b?: SingleStoreVectorConfig) { const { name, config } = getColumnNameAndConfig(a, b); return new SingleStoreVectorBuilder(name, config); } ================================================ FILE: drizzle-orm/src/singlestore-core/columns/year.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnySingleStoreTable } from '~/singlestore-core/table.ts'; import { SingleStoreColumn, SingleStoreColumnBuilder } from './common.ts'; export type SingleStoreYearBuilderInitial = SingleStoreYearBuilder<{ name: TName; dataType: 'number'; columnType: 'SingleStoreYear'; data: number; driverParam: number; enumValues: undefined; generated: undefined; }>; export class SingleStoreYearBuilder> extends SingleStoreColumnBuilder { static override readonly [entityKind]: string = 'SingleStoreYearBuilder'; constructor(name: T['name']) { super(name, 'number', 'SingleStoreYear'); } /** @internal */ override build( table: AnySingleStoreTable<{ name: TTableName }>, ): SingleStoreYear> { return new SingleStoreYear>( table, this.config as ColumnBuilderRuntimeConfig, ); } } export class SingleStoreYear< T extends ColumnBaseConfig<'number', 'SingleStoreYear'>, > extends SingleStoreColumn { static override readonly [entityKind]: string = 'SingleStoreYear'; getSQLType(): string { return `year`; } } export function year(): SingleStoreYearBuilderInitial<''>; export function year(name: TName): SingleStoreYearBuilderInitial; export function year(name?: string) { return new SingleStoreYearBuilder(name ?? ''); } ================================================ FILE: drizzle-orm/src/singlestore-core/db.ts ================================================ import type { ResultSetHeader } from 'mysql2/promise'; import type { Cache } from '~/cache/core/cache.ts'; import { entityKind } from '~/entity.ts'; import type { TypedQueryBuilder } from '~/query-builders/query-builder.ts'; import type { ExtractTablesWithRelations, RelationalSchemaConfig, TablesRelationalConfig } from '~/relations.ts'; import { SelectionProxyHandler } from '~/selection-proxy.ts'; import type { SingleStoreDriverDatabase } from '~/singlestore/driver.ts'; import { type ColumnsSelection, type SQL, sql, type SQLWrapper } from '~/sql/sql.ts'; import { WithSubquery } from '~/subquery.ts'; import type { SingleStoreDialect } from './dialect.ts'; import { SingleStoreCountBuilder } from './query-builders/count.ts'; import { QueryBuilder, SingleStoreDeleteBase, SingleStoreInsertBuilder, SingleStoreSelectBuilder, SingleStoreUpdateBuilder, } from './query-builders/index.ts'; import type { SelectedFields } from './query-builders/select.types.ts'; import type { PreparedQueryHKTBase, SingleStoreQueryResultHKT, SingleStoreQueryResultKind, SingleStoreSession, SingleStoreTransaction, SingleStoreTransactionConfig, } from './session.ts'; import type { WithBuilder } from './subquery.ts'; import type { SingleStoreTable } from './table.ts'; export class SingleStoreDatabase< TQueryResult extends SingleStoreQueryResultHKT, TPreparedQueryHKT extends PreparedQueryHKTBase, TFullSchema extends Record = {}, TSchema extends TablesRelationalConfig = ExtractTablesWithRelations, > { static readonly [entityKind]: string = 'SingleStoreDatabase'; declare readonly _: { readonly schema: TSchema | undefined; readonly fullSchema: TFullSchema; readonly tableNamesMap: Record; }; // We are waiting for SingleStore support for `json_array` function /**@inrernal */ query: unknown; constructor( /** @internal */ readonly dialect: SingleStoreDialect, /** @internal */ readonly session: SingleStoreSession, schema: RelationalSchemaConfig | undefined, ) { this._ = schema ? { schema: schema.schema, fullSchema: schema.fullSchema as TFullSchema, tableNamesMap: schema.tableNamesMap, } : { schema: undefined, fullSchema: {} as TFullSchema, tableNamesMap: {}, }; this.query = {} as typeof this['query']; // this.queryNotSupported = true; // if (this._.schema) { // for (const [tableName, columns] of Object.entries(this._.schema)) { // (this.query as SingleStoreDatabase>['query'])[tableName] = // new RelationalQueryBuilder( // schema!.fullSchema, // this._.schema, // this._.tableNamesMap, // schema!.fullSchema[tableName] as SingleStoreTable, // columns, // dialect, // session, // ); // } // } this.$cache = { invalidate: async (_params: any) => {} }; } /** * Creates a subquery that defines a temporary named result set as a CTE. * * It is useful for breaking down complex queries into simpler parts and for reusing the result set in subsequent parts of the query. * * See docs: {@link https://orm.drizzle.team/docs/select#with-clause} * * @param alias The alias for the subquery. * * Failure to provide an alias will result in a DrizzleTypeError, preventing the subquery from being referenced in other queries. * * @example * * ```ts * // Create a subquery with alias 'sq' and use it in the select query * const sq = db.$with('sq').as(db.select().from(users).where(eq(users.id, 42))); * * const result = await db.with(sq).select().from(sq); * ``` * * To select arbitrary SQL values as fields in a CTE and reference them in other CTEs or in the main query, you need to add aliases to them: * * ```ts * // Select an arbitrary SQL value as a field in a CTE and reference it in the main query * const sq = db.$with('sq').as(db.select({ * name: sql`upper(${users.name})`.as('name'), * }) * .from(users)); * * const result = await db.with(sq).select({ name: sq.name }).from(sq); * ``` */ $with: WithBuilder = (alias: string, selection?: ColumnsSelection) => { const self = this; const as = ( qb: | TypedQueryBuilder | SQL | ((qb: QueryBuilder) => TypedQueryBuilder | SQL), ) => { if (typeof qb === 'function') { qb = qb(new QueryBuilder(self.dialect)); } return new Proxy( new WithSubquery( qb.getSQL(), selection ?? ('getSelectedFields' in qb ? qb.getSelectedFields() ?? {} : {}) as SelectedFields, alias, true, ), new SelectionProxyHandler({ alias, sqlAliasedBehavior: 'alias', sqlBehavior: 'error' }), ); }; return { as }; }; $count( source: SingleStoreTable | SQL | SQLWrapper, // SingleStoreViewBase | filters?: SQL, ) { return new SingleStoreCountBuilder({ source, filters, session: this.session }); } /** * Incorporates a previously defined CTE (using `$with`) into the main query. * * This method allows the main query to reference a temporary named result set. * * See docs: {@link https://orm.drizzle.team/docs/select#with-clause} * * @param queries The CTEs to incorporate into the main query. * * @example * * ```ts * // Define a subquery 'sq' as a CTE using $with * const sq = db.$with('sq').as(db.select().from(users).where(eq(users.id, 42))); * * // Incorporate the CTE 'sq' into the main query and select from it * const result = await db.with(sq).select().from(sq); * ``` */ with(...queries: WithSubquery[]) { const self = this; /** * Creates a select query. * * Calling this method with no arguments will select all columns from the table. Pass a selection object to specify the columns you want to select. * * Use `.from()` method to specify which table to select from. * * See docs: {@link https://orm.drizzle.team/docs/select} * * @param fields The selection object. * * @example * * ```ts * // Select all columns and all rows from the 'cars' table * const allCars: Car[] = await db.select().from(cars); * * // Select specific columns and all rows from the 'cars' table * const carsIdsAndBrands: { id: number; brand: string }[] = await db.select({ * id: cars.id, * brand: cars.brand * }) * .from(cars); * ``` * * Like in SQL, you can use arbitrary expressions as selection fields, not just table columns: * * ```ts * // Select specific columns along with expression and all rows from the 'cars' table * const carsIdsAndLowerNames: { id: number; lowerBrand: string }[] = await db.select({ * id: cars.id, * lowerBrand: sql`lower(${cars.brand})`, * }) * .from(cars); * ``` */ function select(): SingleStoreSelectBuilder; function select( fields: TSelection, ): SingleStoreSelectBuilder; function select(fields?: SelectedFields): SingleStoreSelectBuilder { return new SingleStoreSelectBuilder({ fields: fields ?? undefined, session: self.session, dialect: self.dialect, withList: queries, }); } /** * Adds `distinct` expression to the select query. * * Calling this method will return only unique values. When multiple columns are selected, it returns rows with unique combinations of values in these columns. * * Use `.from()` method to specify which table to select from. * * See docs: {@link https://orm.drizzle.team/docs/select#distinct} * * @param fields The selection object. * * @example * ```ts * // Select all unique rows from the 'cars' table * await db.selectDistinct() * .from(cars) * .orderBy(cars.id, cars.brand, cars.color); * * // Select all unique brands from the 'cars' table * await db.selectDistinct({ brand: cars.brand }) * .from(cars) * .orderBy(cars.brand); * ``` */ function selectDistinct(): SingleStoreSelectBuilder; function selectDistinct( fields: TSelection, ): SingleStoreSelectBuilder; function selectDistinct( fields?: SelectedFields, ): SingleStoreSelectBuilder { return new SingleStoreSelectBuilder({ fields: fields ?? undefined, session: self.session, dialect: self.dialect, withList: queries, distinct: true, }); } /** * Creates an update query. * * Calling this method without `.where()` clause will update all rows in a table. The `.where()` clause specifies which rows should be updated. * * Use `.set()` method to specify which values to update. * * See docs: {@link https://orm.drizzle.team/docs/update} * * @param table The table to update. * * @example * * ```ts * // Update all rows in the 'cars' table * await db.update(cars).set({ color: 'red' }); * * // Update rows with filters and conditions * await db.update(cars).set({ color: 'red' }).where(eq(cars.brand, 'BMW')); * ``` */ function update( table: TTable, ): SingleStoreUpdateBuilder { return new SingleStoreUpdateBuilder(table, self.session, self.dialect, queries); } /** * Creates a delete query. * * Calling this method without `.where()` clause will delete all rows in a table. The `.where()` clause specifies which rows should be deleted. * * See docs: {@link https://orm.drizzle.team/docs/delete} * * @param table The table to delete from. * * @example * * ```ts * // Delete all rows in the 'cars' table * await db.delete(cars); * * // Delete rows with filters and conditions * await db.delete(cars).where(eq(cars.color, 'green')); * ``` */ function delete_( table: TTable, ): SingleStoreDeleteBase { return new SingleStoreDeleteBase(table, self.session, self.dialect, queries); } return { select, selectDistinct, update, delete: delete_ }; } /** * Creates a select query. * * Calling this method with no arguments will select all columns from the table. Pass a selection object to specify the columns you want to select. * * Use `.from()` method to specify which table to select from. * * See docs: {@link https://orm.drizzle.team/docs/select} * * @param fields The selection object. * * @example * * ```ts * // Select all columns and all rows from the 'cars' table * const allCars: Car[] = await db.select().from(cars); * * // Select specific columns and all rows from the 'cars' table * const carsIdsAndBrands: { id: number; brand: string }[] = await db.select({ * id: cars.id, * brand: cars.brand * }) * .from(cars); * ``` * * Like in SQL, you can use arbitrary expressions as selection fields, not just table columns: * * ```ts * // Select specific columns along with expression and all rows from the 'cars' table * const carsIdsAndLowerNames: { id: number; lowerBrand: string }[] = await db.select({ * id: cars.id, * lowerBrand: sql`lower(${cars.brand})`, * }) * .from(cars); * ``` */ select(): SingleStoreSelectBuilder; select( fields: TSelection, ): SingleStoreSelectBuilder; select(fields?: SelectedFields): SingleStoreSelectBuilder { return new SingleStoreSelectBuilder({ fields: fields ?? undefined, session: this.session, dialect: this.dialect }); } /** * Adds `distinct` expression to the select query. * * Calling this method will return only unique values. When multiple columns are selected, it returns rows with unique combinations of values in these columns. * * Use `.from()` method to specify which table to select from. * * See docs: {@link https://orm.drizzle.team/docs/select#distinct} * * @param fields The selection object. * * @example * ```ts * // Select all unique rows from the 'cars' table * await db.selectDistinct() * .from(cars) * .orderBy(cars.id, cars.brand, cars.color); * * // Select all unique brands from the 'cars' table * await db.selectDistinct({ brand: cars.brand }) * .from(cars) * .orderBy(cars.brand); * ``` */ selectDistinct(): SingleStoreSelectBuilder; selectDistinct( fields: TSelection, ): SingleStoreSelectBuilder; selectDistinct(fields?: SelectedFields): SingleStoreSelectBuilder { return new SingleStoreSelectBuilder({ fields: fields ?? undefined, session: this.session, dialect: this.dialect, distinct: true, }); } /** * Creates an update query. * * Calling this method without `.where()` clause will update all rows in a table. The `.where()` clause specifies which rows should be updated. * * Use `.set()` method to specify which values to update. * * See docs: {@link https://orm.drizzle.team/docs/update} * * @param table The table to update. * * @example * * ```ts * // Update all rows in the 'cars' table * await db.update(cars).set({ color: 'red' }); * * // Update rows with filters and conditions * await db.update(cars).set({ color: 'red' }).where(eq(cars.brand, 'BMW')); * ``` */ update( table: TTable, ): SingleStoreUpdateBuilder { return new SingleStoreUpdateBuilder(table, this.session, this.dialect); } /** * Creates an insert query. * * Calling this method will create new rows in a table. Use `.values()` method to specify which values to insert. * * See docs: {@link https://orm.drizzle.team/docs/insert} * * @param table The table to insert into. * * @example * * ```ts * // Insert one row * await db.insert(cars).values({ brand: 'BMW' }); * * // Insert multiple rows * await db.insert(cars).values([{ brand: 'BMW' }, { brand: 'Porsche' }]); * ``` */ insert( table: TTable, ): SingleStoreInsertBuilder { return new SingleStoreInsertBuilder(table, this.session, this.dialect); } /** * Creates a delete query. * * Calling this method without `.where()` clause will delete all rows in a table. The `.where()` clause specifies which rows should be deleted. * * See docs: {@link https://orm.drizzle.team/docs/delete} * * @param table The table to delete from. * * @example * * ```ts * // Delete all rows in the 'cars' table * await db.delete(cars); * * // Delete rows with filters and conditions * await db.delete(cars).where(eq(cars.color, 'green')); * ``` */ delete( table: TTable, ): SingleStoreDeleteBase { return new SingleStoreDeleteBase(table, this.session, this.dialect); } execute( query: SQLWrapper | string, ): Promise> { return this.session.execute(typeof query === 'string' ? sql.raw(query) : query.getSQL()); } $cache: { invalidate: Cache['onMutate'] }; transaction( transaction: ( tx: SingleStoreTransaction, config?: SingleStoreTransactionConfig, ) => Promise, config?: SingleStoreTransactionConfig, ): Promise { return this.session.transaction(transaction, config); } } export type SingleStoreWithReplicas = Q & { $primary: Q; $replicas: Q[] }; export const withReplicas = < Q extends SingleStoreDriverDatabase, >( primary: Q, replicas: [Q, ...Q[]], getReplica: (replicas: Q[]) => Q = () => replicas[Math.floor(Math.random() * replicas.length)]!, ): SingleStoreWithReplicas => { const select: Q['select'] = (...args: []) => getReplica(replicas).select(...args); const selectDistinct: Q['selectDistinct'] = (...args: []) => getReplica(replicas).selectDistinct(...args); const $count: Q['$count'] = (...args: [any]) => getReplica(replicas).$count(...args); const $with: Q['with'] = (...args: []) => getReplica(replicas).with(...args); const update: Q['update'] = (...args: [any]) => primary.update(...args); const insert: Q['insert'] = (...args: [any]) => primary.insert(...args); const $delete: Q['delete'] = (...args: [any]) => primary.delete(...args); const execute: Q['execute'] = (...args: [any]) => primary.execute(...args); const transaction: Q['transaction'] = (...args: [any, any]) => primary.transaction(...args); return { ...primary, update, insert, delete: $delete, execute, transaction, $primary: primary, $replicas: replicas, select, selectDistinct, $count, with: $with, get query() { return getReplica(replicas).query; }, }; }; ================================================ FILE: drizzle-orm/src/singlestore-core/dialect.ts ================================================ import { aliasedTable, aliasedTableColumn, mapColumnsInAliasedSQLToAlias, mapColumnsInSQLToAlias } from '~/alias.ts'; import { CasingCache } from '~/casing.ts'; import { Column } from '~/column.ts'; import { entityKind, is } from '~/entity.ts'; import { DrizzleError } from '~/errors.ts'; import type { MigrationConfig, MigrationMeta } from '~/migrator.ts'; import { type BuildRelationalQueryResult, type DBQueryConfig, getOperators, getOrderByOperators, Many, normalizeRelation, One, type Relation, type TableRelationalConfig, type TablesRelationalConfig, } from '~/relations.ts'; import { and, eq } from '~/sql/expressions/index.ts'; import type { Name, Placeholder, QueryWithTypings, SQLChunk } from '~/sql/sql.ts'; import { Param, SQL, sql, View } from '~/sql/sql.ts'; import { Subquery } from '~/subquery.ts'; import { getTableName, getTableUniqueName, Table } from '~/table.ts'; import { type Casing, orderSelectedFields, type UpdateSet } from '~/utils.ts'; import { ViewBaseConfig } from '~/view-common.ts'; import { SingleStoreColumn } from './columns/common.ts'; import type { SingleStoreDeleteConfig } from './query-builders/delete.ts'; import type { SingleStoreInsertConfig } from './query-builders/insert.ts'; import type { SelectedFieldsOrdered, SingleStoreSelectConfig, SingleStoreSelectJoinConfig, } from './query-builders/select.types.ts'; import type { SingleStoreUpdateConfig } from './query-builders/update.ts'; import type { SingleStoreSession } from './session.ts'; import { SingleStoreTable } from './table.ts'; /* import { SingleStoreViewBase } from './view-base.ts'; */ export interface SingleStoreDialectConfig { casing?: Casing; } export class SingleStoreDialect { static readonly [entityKind]: string = 'SingleStoreDialect'; /** @internal */ readonly casing: CasingCache; constructor(config?: SingleStoreDialectConfig) { this.casing = new CasingCache(config?.casing); } async migrate( migrations: MigrationMeta[], session: SingleStoreSession, config: Omit, ): Promise { const migrationsTable = config.migrationsTable ?? '__drizzle_migrations'; const migrationTableCreate = sql` create table if not exists ${sql.identifier(migrationsTable)} ( id serial primary key, hash text not null, created_at bigint ) `; await session.execute(migrationTableCreate); const dbMigrations = await session.all<{ id: number; hash: string; created_at: string }>( sql`select id, hash, created_at from ${sql.identifier(migrationsTable)} order by created_at desc limit 1`, ); const lastDbMigration = dbMigrations[0]; await session.transaction(async (tx) => { for (const migration of migrations) { if ( !lastDbMigration || Number(lastDbMigration.created_at) < migration.folderMillis ) { for (const stmt of migration.sql) { await tx.execute(sql.raw(stmt)); } await tx.execute( sql`insert into ${ sql.identifier(migrationsTable) } (\`hash\`, \`created_at\`) values(${migration.hash}, ${migration.folderMillis})`, ); } } }); } escapeName(name: string): string { return `\`${name}\``; } escapeParam(_num: number): string { return `?`; } escapeString(str: string): string { return `'${str.replace(/'/g, "''")}'`; } private buildWithCTE(queries: Subquery[] | undefined): SQL | undefined { if (!queries?.length) return undefined; const withSqlChunks = [sql`with `]; for (const [i, w] of queries.entries()) { withSqlChunks.push(sql`${sql.identifier(w._.alias)} as (${w._.sql})`); if (i < queries.length - 1) { withSqlChunks.push(sql`, `); } } withSqlChunks.push(sql` `); return sql.join(withSqlChunks); } buildDeleteQuery({ table, where, returning, withList, limit, orderBy }: SingleStoreDeleteConfig): SQL { const withSql = this.buildWithCTE(withList); const returningSql = returning ? sql` returning ${this.buildSelection(returning, { isSingleTable: true })}` : undefined; const whereSql = where ? sql` where ${where}` : undefined; const orderBySql = this.buildOrderBy(orderBy); const limitSql = this.buildLimit(limit); return sql`${withSql}delete from ${table}${whereSql}${orderBySql}${limitSql}${returningSql}`; } buildUpdateSet(table: SingleStoreTable, set: UpdateSet): SQL { const tableColumns = table[Table.Symbol.Columns]; const columnNames = Object.keys(tableColumns).filter((colName) => set[colName] !== undefined || tableColumns[colName]?.onUpdateFn !== undefined ); const setSize = columnNames.length; return sql.join(columnNames.flatMap((colName, i) => { const col = tableColumns[colName]!; const onUpdateFnResult = col.onUpdateFn?.(); const value = set[colName] ?? (is(onUpdateFnResult, SQL) ? onUpdateFnResult : sql.param(onUpdateFnResult, col)); const res = sql`${sql.identifier(this.casing.getColumnCasing(col))} = ${value}`; if (i < setSize - 1) { return [res, sql.raw(', ')]; } return [res]; })); } buildUpdateQuery({ table, set, where, returning, withList, limit, orderBy }: SingleStoreUpdateConfig): SQL { const withSql = this.buildWithCTE(withList); const setSql = this.buildUpdateSet(table, set); const returningSql = returning ? sql` returning ${this.buildSelection(returning, { isSingleTable: true })}` : undefined; const whereSql = where ? sql` where ${where}` : undefined; const orderBySql = this.buildOrderBy(orderBy); const limitSql = this.buildLimit(limit); return sql`${withSql}update ${table} set ${setSql}${whereSql}${orderBySql}${limitSql}${returningSql}`; } /** * Builds selection SQL with provided fields/expressions * * Examples: * * `select from` * * `insert ... returning ` * * If `isSingleTable` is true, then columns won't be prefixed with table name */ private buildSelection( fields: SelectedFieldsOrdered, { isSingleTable = false }: { isSingleTable?: boolean } = {}, ): SQL { const columnsLen = fields.length; const chunks = fields .flatMap(({ field }, i) => { const chunk: SQLChunk[] = []; if (is(field, SQL.Aliased) && field.isSelectionField) { chunk.push(sql.identifier(field.fieldAlias)); } else if (is(field, SQL.Aliased) || is(field, SQL)) { const query = is(field, SQL.Aliased) ? field.sql : field; if (isSingleTable) { chunk.push( new SQL( query.queryChunks.map((c) => { if (is(c, SingleStoreColumn)) { return sql.identifier(this.casing.getColumnCasing(c)); } return c; }), ), ); } else { chunk.push(query); } if (is(field, SQL.Aliased)) { chunk.push(sql` as ${sql.identifier(field.fieldAlias)}`); } } else if (is(field, Column)) { if (isSingleTable) { chunk.push(sql.identifier(this.casing.getColumnCasing(field))); } else { chunk.push(field); } } else if (is(field, Subquery)) { const entries = Object.entries(field._.selectedFields) as [string, SQL.Aliased | Column | SQL][]; if (entries.length === 1) { const entry = entries[0]![1]; const fieldDecoder = is(entry, SQL) ? entry.decoder : is(entry, Column) ? { mapFromDriverValue: (v: any) => entry.mapFromDriverValue(v) } : entry.sql.decoder; if (fieldDecoder) { field._.sql.decoder = fieldDecoder; } } chunk.push(field); } if (i < columnsLen - 1) { chunk.push(sql`, `); } return chunk; }); return sql.join(chunks); } private buildLimit(limit: number | Placeholder | undefined): SQL | undefined { return typeof limit === 'object' || (typeof limit === 'number' && limit >= 0) ? sql` limit ${limit}` : undefined; } private buildOrderBy(orderBy: (SingleStoreColumn | SQL | SQL.Aliased)[] | undefined): SQL | undefined { return orderBy && orderBy.length > 0 ? sql` order by ${sql.join(orderBy, sql`, `)}` : undefined; } buildSelectQuery( { withList, fields, fieldsFlat, where, having, table, joins, orderBy, groupBy, limit, offset, lockingClause, distinct, setOperators, }: SingleStoreSelectConfig, ): SQL { const fieldsList = fieldsFlat ?? orderSelectedFields(fields); for (const f of fieldsList) { if ( is(f.field, Column) && getTableName(f.field.table) !== (is(table, Subquery) ? table._.alias /* : is(table, SingleStoreViewBase) ? table[ViewBaseConfig].name */ : is(table, SQL) ? undefined : getTableName(table)) && !((table) => joins?.some(({ alias }) => alias === (table[Table.Symbol.IsAlias] ? getTableName(table) : table[Table.Symbol.BaseName]) ))(f.field.table) ) { const tableName = getTableName(f.field.table); throw new Error( `Your "${ f.path.join('->') }" field references a column "${tableName}"."${f.field.name}", but the table "${tableName}" is not part of the query! Did you forget to join it?`, ); } } const isSingleTable = !joins || joins.length === 0; const withSql = this.buildWithCTE(withList); const distinctSql = distinct ? sql` distinct` : undefined; const selection = this.buildSelection(fieldsList, { isSingleTable }); const tableSql = (() => { if (is(table, Table) && table[Table.Symbol.IsAlias]) { return sql`${sql`${sql.identifier(table[Table.Symbol.Schema] ?? '')}.`.if(table[Table.Symbol.Schema])}${ sql.identifier(table[Table.Symbol.OriginalName]) } ${sql.identifier(table[Table.Symbol.Name])}`; } return table; })(); const joinsArray: SQL[] = []; if (joins) { for (const [index, joinMeta] of joins.entries()) { if (index === 0) { joinsArray.push(sql` `); } const table = joinMeta.table; const lateralSql = joinMeta.lateral ? sql` lateral` : undefined; const onSql = joinMeta.on ? sql` on ${joinMeta.on}` : undefined; if (is(table, SingleStoreTable)) { const tableName = table[SingleStoreTable.Symbol.Name]; const tableSchema = table[SingleStoreTable.Symbol.Schema]; const origTableName = table[SingleStoreTable.Symbol.OriginalName]; const alias = tableName === origTableName ? undefined : joinMeta.alias; joinsArray.push( sql`${sql.raw(joinMeta.joinType)} join${lateralSql} ${ tableSchema ? sql`${sql.identifier(tableSchema)}.` : undefined }${sql.identifier(origTableName)}${alias && sql` ${sql.identifier(alias)}`}${onSql}`, ); } else if (is(table, View)) { const viewName = table[ViewBaseConfig].name; const viewSchema = table[ViewBaseConfig].schema; const origViewName = table[ViewBaseConfig].originalName; const alias = viewName === origViewName ? undefined : joinMeta.alias; joinsArray.push( sql`${sql.raw(joinMeta.joinType)} join${lateralSql} ${ viewSchema ? sql`${sql.identifier(viewSchema)}.` : undefined }${sql.identifier(origViewName)}${alias && sql` ${sql.identifier(alias)}`}${onSql}`, ); } else { joinsArray.push( sql`${sql.raw(joinMeta.joinType)} join${lateralSql} ${table}${onSql}`, ); } if (index < joins.length - 1) { joinsArray.push(sql` `); } } } const joinsSql = sql.join(joinsArray); const whereSql = where ? sql` where ${where}` : undefined; const havingSql = having ? sql` having ${having}` : undefined; const orderBySql = this.buildOrderBy(orderBy); const groupBySql = groupBy && groupBy.length > 0 ? sql` group by ${sql.join(groupBy, sql`, `)}` : undefined; const limitSql = this.buildLimit(limit); const offsetSql = offset ? sql` offset ${offset}` : undefined; let lockingClausesSql; if (lockingClause) { const { config, strength } = lockingClause; lockingClausesSql = sql` for ${sql.raw(strength)}`; if (config.noWait) { lockingClausesSql.append(sql` nowait`); } else if (config.skipLocked) { lockingClausesSql.append(sql` skip locked`); } } const finalQuery = sql`${withSql}select${distinctSql} ${selection} from ${tableSql}${joinsSql}${whereSql}${groupBySql}${havingSql}${orderBySql}${limitSql}${offsetSql}${lockingClausesSql}`; if (setOperators.length > 0) { return this.buildSetOperations(finalQuery, setOperators); } return finalQuery; } buildSetOperations(leftSelect: SQL, setOperators: SingleStoreSelectConfig['setOperators']): SQL { const [setOperator, ...rest] = setOperators; if (!setOperator) { throw new Error('Cannot pass undefined values to any set operator'); } if (rest.length === 0) { return this.buildSetOperationQuery({ leftSelect, setOperator }); } // Some recursive magic here return this.buildSetOperations( this.buildSetOperationQuery({ leftSelect, setOperator }), rest, ); } buildSetOperationQuery({ leftSelect, setOperator: { type, isAll, rightSelect, limit, orderBy, offset }, }: { leftSelect: SQL; setOperator: SingleStoreSelectConfig['setOperators'][number] }): SQL { const leftChunk = sql`(${leftSelect.getSQL()}) `; const rightChunk = sql`(${rightSelect.getSQL()})`; let orderBySql; if (orderBy && orderBy.length > 0) { const orderByValues: (SQL | Name)[] = []; // The next bit is necessary because the sql operator replaces ${table.column} with `table`.`column` // which is invalid SingleStore syntax, Table from one of the SELECTs cannot be used in global ORDER clause for (const orderByUnit of orderBy) { if (is(orderByUnit, SingleStoreColumn)) { orderByValues.push(sql.identifier(this.casing.getColumnCasing(orderByUnit))); } else if (is(orderByUnit, SQL)) { for (let i = 0; i < orderByUnit.queryChunks.length; i++) { const chunk = orderByUnit.queryChunks[i]; if (is(chunk, SingleStoreColumn)) { orderByUnit.queryChunks[i] = sql.identifier(this.casing.getColumnCasing(chunk)); } } orderByValues.push(sql`${orderByUnit}`); } else { orderByValues.push(sql`${orderByUnit}`); } } orderBySql = sql` order by ${sql.join(orderByValues, sql`, `)} `; } const limitSql = typeof limit === 'object' || (typeof limit === 'number' && limit >= 0) ? sql` limit ${limit}` : undefined; const operatorChunk = sql.raw(`${type} ${isAll ? 'all ' : ''}`); const offsetSql = offset ? sql` offset ${offset}` : undefined; return sql`${leftChunk}${operatorChunk}${rightChunk}${orderBySql}${limitSql}${offsetSql}`; } buildInsertQuery( { table, values, ignore, onConflict }: SingleStoreInsertConfig, ): { sql: SQL; generatedIds: Record[] } { // const isSingleValue = values.length === 1; const valuesSqlList: ((SQLChunk | SQL)[] | SQL)[] = []; const columns: Record = table[Table.Symbol.Columns]; const colEntries: [string, SingleStoreColumn][] = Object.entries(columns).filter(([_, col]) => !col.shouldDisableInsert() ); const insertOrder = colEntries.map(([, column]) => sql.identifier(this.casing.getColumnCasing(column))); const generatedIdsResponse: Record[] = []; for (const [valueIndex, value] of values.entries()) { const generatedIds: Record = {}; const valueList: (SQLChunk | SQL)[] = []; for (const [fieldName, col] of colEntries) { const colValue = value[fieldName]; if (colValue === undefined || (is(colValue, Param) && colValue.value === undefined)) { // eslint-disable-next-line unicorn/no-negated-condition if (col.defaultFn !== undefined) { const defaultFnResult = col.defaultFn(); generatedIds[fieldName] = defaultFnResult; const defaultValue = is(defaultFnResult, SQL) ? defaultFnResult : sql.param(defaultFnResult, col); valueList.push(defaultValue); // eslint-disable-next-line unicorn/no-negated-condition } else if (!col.default && col.onUpdateFn !== undefined) { const onUpdateFnResult = col.onUpdateFn(); const newValue = is(onUpdateFnResult, SQL) ? onUpdateFnResult : sql.param(onUpdateFnResult, col); valueList.push(newValue); } else { valueList.push(sql`default`); } } else { if (col.defaultFn && is(colValue, Param)) { generatedIds[fieldName] = colValue.value; } valueList.push(colValue); } } generatedIdsResponse.push(generatedIds); valuesSqlList.push(valueList); if (valueIndex < values.length - 1) { valuesSqlList.push(sql`, `); } } const valuesSql = sql.join(valuesSqlList); const ignoreSql = ignore ? sql` ignore` : undefined; const onConflictSql = onConflict ? sql` on duplicate key ${onConflict}` : undefined; return { sql: sql`insert${ignoreSql} into ${table} ${insertOrder} values ${valuesSql}${onConflictSql}`, generatedIds: generatedIdsResponse, }; } sqlToQuery(sql: SQL, invokeSource?: 'indexes' | undefined): QueryWithTypings { return sql.toQuery({ casing: this.casing, escapeName: this.escapeName, escapeParam: this.escapeParam, escapeString: this.escapeString, invokeSource, }); } buildRelationalQuery({ fullSchema, schema, tableNamesMap, table, tableConfig, queryConfig: config, tableAlias, nestedQueryRelation, joinOn, }: { fullSchema: Record; schema: TablesRelationalConfig; tableNamesMap: Record; table: SingleStoreTable; tableConfig: TableRelationalConfig; queryConfig: true | DBQueryConfig<'many', true>; tableAlias: string; nestedQueryRelation?: Relation; joinOn?: SQL; }): BuildRelationalQueryResult { let selection: BuildRelationalQueryResult['selection'] = []; let limit, offset, orderBy: SingleStoreSelectConfig['orderBy'], where; const joins: SingleStoreSelectJoinConfig[] = []; if (config === true) { const selectionEntries = Object.entries(tableConfig.columns); selection = selectionEntries.map(( [key, value], ) => ({ dbKey: value.name, tsKey: key, field: aliasedTableColumn(value as SingleStoreColumn, tableAlias), relationTableTsKey: undefined, isJson: false, selection: [], })); } else { const aliasedColumns = Object.fromEntries( Object.entries(tableConfig.columns).map(([key, value]) => [key, aliasedTableColumn(value, tableAlias)]), ); if (config.where) { const whereSql = typeof config.where === 'function' ? config.where(aliasedColumns, getOperators()) : config.where; where = whereSql && mapColumnsInSQLToAlias(whereSql, tableAlias); } const fieldsSelection: { tsKey: string; value: SingleStoreColumn | SQL.Aliased }[] = []; let selectedColumns: string[] = []; // Figure out which columns to select if (config.columns) { let isIncludeMode = false; for (const [field, value] of Object.entries(config.columns)) { if (value === undefined) { continue; } if (field in tableConfig.columns) { if (!isIncludeMode && value === true) { isIncludeMode = true; } selectedColumns.push(field); } } if (selectedColumns.length > 0) { selectedColumns = isIncludeMode ? selectedColumns.filter((c) => config.columns?.[c] === true) : Object.keys(tableConfig.columns).filter((key) => !selectedColumns.includes(key)); } } else { // Select all columns if selection is not specified selectedColumns = Object.keys(tableConfig.columns); } for (const field of selectedColumns) { const column = tableConfig.columns[field]! as SingleStoreColumn; fieldsSelection.push({ tsKey: field, value: column }); } let selectedRelations: { tsKey: string; queryConfig: true | DBQueryConfig<'many', false>; relation: Relation; }[] = []; // Figure out which relations to select if (config.with) { selectedRelations = Object.entries(config.with) .filter((entry): entry is [typeof entry[0], NonNullable] => !!entry[1]) .map(([tsKey, queryConfig]) => ({ tsKey, queryConfig, relation: tableConfig.relations[tsKey]! })); } let extras; // Figure out which extras to select if (config.extras) { extras = typeof config.extras === 'function' ? config.extras(aliasedColumns, { sql }) : config.extras; for (const [tsKey, value] of Object.entries(extras)) { fieldsSelection.push({ tsKey, value: mapColumnsInAliasedSQLToAlias(value, tableAlias), }); } } // Transform `fieldsSelection` into `selection` // `fieldsSelection` shouldn't be used after this point for (const { tsKey, value } of fieldsSelection) { selection.push({ dbKey: is(value, SQL.Aliased) ? value.fieldAlias : tableConfig.columns[tsKey]!.name, tsKey, field: is(value, Column) ? aliasedTableColumn(value, tableAlias) : value, relationTableTsKey: undefined, isJson: false, selection: [], }); } let orderByOrig = typeof config.orderBy === 'function' ? config.orderBy(aliasedColumns, getOrderByOperators()) : config.orderBy ?? []; if (!Array.isArray(orderByOrig)) { orderByOrig = [orderByOrig]; } orderBy = orderByOrig.map((orderByValue) => { if (is(orderByValue, Column)) { return aliasedTableColumn(orderByValue, tableAlias) as SingleStoreColumn; } return mapColumnsInSQLToAlias(orderByValue, tableAlias); }); limit = config.limit; offset = config.offset; // Process all relations for ( const { tsKey: selectedRelationTsKey, queryConfig: selectedRelationConfigValue, relation, } of selectedRelations ) { const normalizedRelation = normalizeRelation(schema, tableNamesMap, relation); const relationTableName = getTableUniqueName(relation.referencedTable); const relationTableTsName = tableNamesMap[relationTableName]!; const relationTableAlias = `${tableAlias}_${selectedRelationTsKey}`; const joinOn = and( ...normalizedRelation.fields.map((field, i) => eq( aliasedTableColumn(normalizedRelation.references[i]!, relationTableAlias), aliasedTableColumn(field, tableAlias), ) ), ); const builtRelation = this.buildRelationalQuery({ fullSchema, schema, tableNamesMap, table: fullSchema[relationTableTsName] as SingleStoreTable, tableConfig: schema[relationTableTsName]!, queryConfig: is(relation, One) ? (selectedRelationConfigValue === true ? { limit: 1 } : { ...selectedRelationConfigValue, limit: 1 }) : selectedRelationConfigValue, tableAlias: relationTableAlias, joinOn, nestedQueryRelation: relation, }); const field = sql`${sql.identifier(relationTableAlias)}.${sql.identifier('data')}`.as(selectedRelationTsKey); joins.push({ on: sql`true`, table: new Subquery(builtRelation.sql as SQL, {}, relationTableAlias), alias: relationTableAlias, joinType: 'left', lateral: true, }); selection.push({ dbKey: selectedRelationTsKey, tsKey: selectedRelationTsKey, field, relationTableTsKey: relationTableTsName, isJson: true, selection: builtRelation.selection, }); } } if (selection.length === 0) { throw new DrizzleError({ message: `No fields selected for table "${tableConfig.tsName}" ("${tableAlias}")` }); } let result; where = and(joinOn, where); if (nestedQueryRelation) { let field = sql`JSON_TO_ARRAY(${ sql.join( selection.map(({ field, tsKey, isJson }) => isJson ? sql`${sql.identifier(`${tableAlias}_${tsKey}`)}.${sql.identifier('data')}` : is(field, SQL.Aliased) ? field.sql : field ), sql`, `, ) })`; if (is(nestedQueryRelation, Many)) { field = sql`json_agg(${field})`; } const nestedSelection = [{ dbKey: 'data', tsKey: 'data', field: field.as('data'), isJson: true, relationTableTsKey: tableConfig.tsName, selection, }]; const needsSubquery = limit !== undefined || offset !== undefined || (orderBy?.length ?? 0) > 0; if (needsSubquery) { result = this.buildSelectQuery({ table: aliasedTable(table, tableAlias), fields: {}, fieldsFlat: [ { path: [], field: sql.raw('*'), }, ...(((orderBy?.length ?? 0) > 0) ? [{ path: [], field: sql`row_number() over (order by ${sql.join(orderBy!, sql`, `)})`, }] : []), ], where, limit, offset, setOperators: [], }); where = undefined; limit = undefined; offset = undefined; orderBy = undefined; } else { result = aliasedTable(table, tableAlias); } result = this.buildSelectQuery({ table: is(result, SingleStoreTable) ? result : new Subquery(result, {}, tableAlias), fields: {}, fieldsFlat: nestedSelection.map(({ field }) => ({ path: [], field: is(field, Column) ? aliasedTableColumn(field, tableAlias) : field, })), joins, where, limit, offset, orderBy, setOperators: [], }); } else { result = this.buildSelectQuery({ table: aliasedTable(table, tableAlias), fields: {}, fieldsFlat: selection.map(({ field }) => ({ path: [], field: is(field, Column) ? aliasedTableColumn(field, tableAlias) : field, })), joins, where, limit, offset, orderBy, setOperators: [], }); } return { tableTsKey: tableConfig.tsName, sql: result, selection, }; } } ================================================ FILE: drizzle-orm/src/singlestore-core/expressions.ts ================================================ import { bindIfParam } from '~/sql/expressions/index.ts'; import type { Placeholder, SQL, SQLChunk, SQLWrapper } from '~/sql/sql.ts'; import { sql } from '~/sql/sql.ts'; import type { SingleStoreColumn } from './columns/index.ts'; export * from '~/sql/expressions/index.ts'; export function concat(column: SingleStoreColumn | SQL.Aliased, value: string | Placeholder | SQLWrapper): SQL { return sql`${column} || ${bindIfParam(value, column)}`; } export function substring( column: SingleStoreColumn | SQL.Aliased, { from, for: _for }: { from?: number | Placeholder | SQLWrapper; for?: number | Placeholder | SQLWrapper }, ): SQL { const chunks: SQLChunk[] = [sql`substring(`, column]; if (from !== undefined) { chunks.push(sql` from `, bindIfParam(from, column)); } if (_for !== undefined) { chunks.push(sql` for `, bindIfParam(_for, column)); } chunks.push(sql`)`); return sql.join(chunks); } // Vectors export function dotProduct(column: SingleStoreColumn | SQL.Aliased, value: Array): SQL { return sql`${column} <*> ${JSON.stringify(value)}`; } export function euclideanDistance(column: SingleStoreColumn | SQL.Aliased, value: Array): SQL { return sql`${column} <-> ${JSON.stringify(value)}`; } ================================================ FILE: drizzle-orm/src/singlestore-core/index.ts ================================================ export * from './alias.ts'; export * from './columns/index.ts'; export * from './db.ts'; export * from './dialect.ts'; export * from './indexes.ts'; export * from './primary-keys.ts'; export * from './query-builders/index.ts'; export * from './schema.ts'; export * from './session.ts'; export * from './subquery.ts'; export * from './table.ts'; export * from './unique-constraint.ts'; export * from './utils.ts'; /* export * from './view-common.ts'; export * from './view.ts'; */ ================================================ FILE: drizzle-orm/src/singlestore-core/indexes.ts ================================================ import { entityKind } from '~/entity.ts'; import type { SQL } from '~/sql/sql.ts'; import type { AnySingleStoreColumn, SingleStoreColumn } from './columns/index.ts'; import type { SingleStoreTable } from './table.ts'; interface IndexConfig { name: string; columns: IndexColumn[]; /** * If true, the index will be created as `create unique index` instead of `create index`. */ unique?: boolean; /** * If set, the index will be created as `create index ... using { 'btree' | 'hash' }`. */ using?: 'btree' | 'hash'; /** * If set, the index will be created as `create index ... algorithm { 'default' | 'inplace' | 'copy' }`. */ algorithm?: 'default' | 'inplace' | 'copy'; /** * If set, adds locks to the index creation. */ lock?: 'default' | 'none' | 'shared' | 'exclusive'; } export type IndexColumn = SingleStoreColumn | SQL; export class IndexBuilderOn { static readonly [entityKind]: string = 'SingleStoreIndexBuilderOn'; constructor(private name: string, private unique: boolean) {} on(...columns: [IndexColumn, ...IndexColumn[]]): IndexBuilder { return new IndexBuilder(this.name, columns, this.unique); } } export interface AnyIndexBuilder { build(table: SingleStoreTable): Index; } // eslint-disable-next-line @typescript-eslint/no-empty-interface export interface IndexBuilder extends AnyIndexBuilder {} export class IndexBuilder implements AnyIndexBuilder { static readonly [entityKind]: string = 'SingleStoreIndexBuilder'; /** @internal */ config: IndexConfig; constructor(name: string, columns: IndexColumn[], unique: boolean) { this.config = { name, columns, unique, }; } using(using: IndexConfig['using']): this { this.config.using = using; return this; } algorithm(algorithm: IndexConfig['algorithm']): this { this.config.algorithm = algorithm; return this; } lock(lock: IndexConfig['lock']): this { this.config.lock = lock; return this; } /** @internal */ build(table: SingleStoreTable): Index { return new Index(this.config, table); } } export class Index { static readonly [entityKind]: string = 'SingleStoreIndex'; readonly config: IndexConfig & { table: SingleStoreTable }; constructor(config: IndexConfig, table: SingleStoreTable) { this.config = { ...config, table }; } } export type GetColumnsTableName = TColumns extends AnySingleStoreColumn<{ tableName: infer TTableName extends string }> | AnySingleStoreColumn< { tableName: infer TTableName extends string } >[] ? TTableName : never; export function index(name: string): IndexBuilderOn { return new IndexBuilderOn(name, false); } export function uniqueIndex(name: string): IndexBuilderOn { return new IndexBuilderOn(name, true); } /* export interface AnyFullTextIndexBuilder { build(table: SingleStoreTable): FullTextIndex; } */ /* interface FullTextIndexConfig { version?: number; } interface FullTextIndexFullConfig extends FullTextIndexConfig { columns: IndexColumn[]; name: string; } export class FullTextIndexBuilderOn { static readonly [entityKind]: string = 'SingleStoreFullTextIndexBuilderOn'; constructor(private name: string, private config: FullTextIndexConfig) {} on(...columns: [IndexColumn, ...IndexColumn[]]): FullTextIndexBuilder { return new FullTextIndexBuilder({ name: this.name, columns: columns, ...this.config, }); } } */ /* export interface FullTextIndexBuilder extends AnyFullTextIndexBuilder {} export class FullTextIndexBuilder implements AnyFullTextIndexBuilder { static readonly [entityKind]: string = 'SingleStoreFullTextIndexBuilder'; */ /** @internal */ /* config: FullTextIndexFullConfig; constructor(config: FullTextIndexFullConfig) { this.config = config; } */ /** @internal */ /* build(table: SingleStoreTable): FullTextIndex { return new FullTextIndex(this.config, table); } } export class FullTextIndex { static readonly [entityKind]: string = 'SingleStoreFullTextIndex'; readonly config: FullTextIndexConfig & { table: SingleStoreTable }; constructor(config: FullTextIndexConfig, table: SingleStoreTable) { this.config = { ...config, table }; } } export function fulltext(name: string, config: FullTextIndexConfig): FullTextIndexBuilderOn { return new FullTextIndexBuilderOn(name, config); } export type SortKeyColumn = SingleStoreColumn | SQL; export class SortKeyBuilder { static readonly [entityKind]: string = 'SingleStoreSortKeyBuilder'; constructor(private columns: SortKeyColumn[]) {} */ /** @internal */ /* build(table: SingleStoreTable): SortKey { return new SortKey(this.columns, table); } } export class SortKey { static readonly [entityKind]: string = 'SingleStoreSortKey'; constructor(public columns: SortKeyColumn[], public table: SingleStoreTable) {} } export function sortKey(...columns: SortKeyColumn[]): SortKeyBuilder { return new SortKeyBuilder(columns); } */ ================================================ FILE: drizzle-orm/src/singlestore-core/primary-keys.ts ================================================ import { entityKind } from '~/entity.ts'; import type { AnySingleStoreColumn, SingleStoreColumn } from './columns/index.ts'; import { SingleStoreTable } from './table.ts'; export function primaryKey< TTableName extends string, TColumn extends AnySingleStoreColumn<{ tableName: TTableName }>, TColumns extends AnySingleStoreColumn<{ tableName: TTableName }>[], >(config: { name?: string; columns: [TColumn, ...TColumns] }): PrimaryKeyBuilder; /** * @deprecated: Please use primaryKey({ columns: [] }) instead of this function * @param columns */ export function primaryKey< TTableName extends string, TColumns extends AnySingleStoreColumn<{ tableName: TTableName }>[], >(...columns: TColumns): PrimaryKeyBuilder; export function primaryKey(...config: any) { if (config[0].columns) { return new PrimaryKeyBuilder(config[0].columns, config[0].name); } return new PrimaryKeyBuilder(config); } export class PrimaryKeyBuilder { static readonly [entityKind]: string = 'SingleStorePrimaryKeyBuilder'; /** @internal */ columns: SingleStoreColumn[]; /** @internal */ name?: string; constructor( columns: SingleStoreColumn[], name?: string, ) { this.columns = columns; this.name = name; } /** @internal */ build(table: SingleStoreTable): PrimaryKey { return new PrimaryKey(table, this.columns, this.name); } } export class PrimaryKey { static readonly [entityKind]: string = 'SingleStorePrimaryKey'; readonly columns: SingleStoreColumn[]; readonly name?: string; constructor(readonly table: SingleStoreTable, columns: SingleStoreColumn[], name?: string) { this.columns = columns; this.name = name; } getName(): string { return this.name ?? `${this.table[SingleStoreTable.Symbol.Name]}_${this.columns.map((column) => column.name).join('_')}_pk`; } } ================================================ FILE: drizzle-orm/src/singlestore-core/query-builders/count.ts ================================================ import { entityKind } from '~/entity.ts'; import { SQL, sql, type SQLWrapper } from '~/sql/sql.ts'; import type { SingleStoreSession } from '../session.ts'; import type { SingleStoreTable } from '../table.ts'; /* import type { SingleStoreViewBase } from '../view-base.ts'; */ export class SingleStoreCountBuilder< TSession extends SingleStoreSession, > extends SQL implements Promise, SQLWrapper { private sql: SQL; static override readonly [entityKind] = 'SingleStoreCountBuilder'; [Symbol.toStringTag] = 'SingleStoreCountBuilder'; private session: TSession; private static buildEmbeddedCount( source: SingleStoreTable | /* SingleStoreViewBase | */ SQL | SQLWrapper, filters?: SQL, ): SQL { return sql`(select count(*) from ${source}${sql.raw(' where ').if(filters)}${filters})`; } private static buildCount( source: SingleStoreTable | /* SingleStoreViewBase | */ SQL | SQLWrapper, filters?: SQL, ): SQL { return sql`select count(*) as count from ${source}${sql.raw(' where ').if(filters)}${filters}`; } constructor( readonly params: { source: SingleStoreTable | /* SingleStoreViewBase | */ SQL | SQLWrapper; filters?: SQL; session: TSession; }, ) { super(SingleStoreCountBuilder.buildEmbeddedCount(params.source, params.filters).queryChunks); this.mapWith(Number); this.session = params.session; this.sql = SingleStoreCountBuilder.buildCount( params.source, params.filters, ); } then( onfulfilled?: ((value: number) => TResult1 | PromiseLike) | null | undefined, onrejected?: ((reason: any) => TResult2 | PromiseLike) | null | undefined, ): Promise { return Promise.resolve(this.session.count(this.sql)) .then( onfulfilled, onrejected, ); } catch( onRejected?: ((reason: any) => never | PromiseLike) | null | undefined, ): Promise { return this.then(undefined, onRejected); } finally(onFinally?: (() => void) | null | undefined): Promise { return this.then( (value) => { onFinally?.(); return value; }, (reason) => { onFinally?.(); throw reason; }, ); } } ================================================ FILE: drizzle-orm/src/singlestore-core/query-builders/delete.ts ================================================ import { entityKind } from '~/entity.ts'; import { QueryPromise } from '~/query-promise.ts'; import { SelectionProxyHandler } from '~/selection-proxy.ts'; import type { SingleStoreDialect } from '~/singlestore-core/dialect.ts'; import type { AnySingleStoreQueryResultHKT, PreparedQueryHKTBase, PreparedQueryKind, SingleStorePreparedQueryConfig, SingleStoreQueryResultHKT, SingleStoreQueryResultKind, SingleStoreSession, } from '~/singlestore-core/session.ts'; import type { SingleStoreTable } from '~/singlestore-core/table.ts'; import type { Placeholder, Query, SQL, SQLWrapper } from '~/sql/sql.ts'; import type { Subquery } from '~/subquery.ts'; import { Table } from '~/table.ts'; import type { ValueOrArray } from '~/utils.ts'; import type { SingleStoreColumn } from '../columns/common.ts'; import { extractUsedTable } from '../utils.ts'; import type { SelectedFieldsOrdered } from './select.types.ts'; export type SingleStoreDeleteWithout< T extends AnySingleStoreDeleteBase, TDynamic extends boolean, K extends keyof T & string, > = TDynamic extends true ? T : Omit< SingleStoreDeleteBase< T['_']['table'], T['_']['queryResult'], T['_']['preparedQueryHKT'], TDynamic, T['_']['excludedMethods'] | K >, T['_']['excludedMethods'] | K >; export type SingleStoreDelete< TTable extends SingleStoreTable = SingleStoreTable, TQueryResult extends SingleStoreQueryResultHKT = AnySingleStoreQueryResultHKT, TPreparedQueryHKT extends PreparedQueryHKTBase = PreparedQueryHKTBase, > = SingleStoreDeleteBase; export interface SingleStoreDeleteConfig { where?: SQL | undefined; limit?: number | Placeholder; orderBy?: (SingleStoreColumn | SQL | SQL.Aliased)[]; table: SingleStoreTable; returning?: SelectedFieldsOrdered; withList?: Subquery[]; } export type SingleStoreDeletePrepare = PreparedQueryKind< T['_']['preparedQueryHKT'], SingleStorePreparedQueryConfig & { execute: SingleStoreQueryResultKind; iterator: never; }, true >; type SingleStoreDeleteDynamic = SingleStoreDelete< T['_']['table'], T['_']['queryResult'], T['_']['preparedQueryHKT'] >; type AnySingleStoreDeleteBase = SingleStoreDeleteBase; export interface SingleStoreDeleteBase< TTable extends SingleStoreTable, TQueryResult extends SingleStoreQueryResultHKT, TPreparedQueryHKT extends PreparedQueryHKTBase, TDynamic extends boolean = false, TExcludedMethods extends string = never, > extends QueryPromise> { readonly _: { readonly table: TTable; readonly queryResult: TQueryResult; readonly preparedQueryHKT: TPreparedQueryHKT; readonly dynamic: TDynamic; readonly excludedMethods: TExcludedMethods; }; } export class SingleStoreDeleteBase< TTable extends SingleStoreTable, TQueryResult extends SingleStoreQueryResultHKT, // eslint-disable-next-line @typescript-eslint/no-unused-vars TPreparedQueryHKT extends PreparedQueryHKTBase, TDynamic extends boolean = false, // eslint-disable-next-line @typescript-eslint/no-unused-vars TExcludedMethods extends string = never, > extends QueryPromise> implements SQLWrapper { static override readonly [entityKind]: string = 'SingleStoreDelete'; private config: SingleStoreDeleteConfig; constructor( private table: TTable, private session: SingleStoreSession, private dialect: SingleStoreDialect, withList?: Subquery[], ) { super(); this.config = { table, withList }; } /** * Adds a `where` clause to the query. * * Calling this method will delete only those rows that fulfill a specified condition. * * See docs: {@link https://orm.drizzle.team/docs/delete} * * @param where the `where` clause. * * @example * You can use conditional operators and `sql function` to filter the rows to be deleted. * * ```ts * // Delete all cars with green color * db.delete(cars).where(eq(cars.color, 'green')); * // or * db.delete(cars).where(sql`${cars.color} = 'green'`) * ``` * * You can logically combine conditional operators with `and()` and `or()` operators: * * ```ts * // Delete all BMW cars with a green color * db.delete(cars).where(and(eq(cars.color, 'green'), eq(cars.brand, 'BMW'))); * * // Delete all cars with the green or blue color * db.delete(cars).where(or(eq(cars.color, 'green'), eq(cars.color, 'blue'))); * ``` */ where(where: SQL | undefined): SingleStoreDeleteWithout { this.config.where = where; return this as any; } orderBy( builder: (deleteTable: TTable) => ValueOrArray, ): SingleStoreDeleteWithout; orderBy(...columns: (SingleStoreColumn | SQL | SQL.Aliased)[]): SingleStoreDeleteWithout; orderBy( ...columns: | [(deleteTable: TTable) => ValueOrArray] | (SingleStoreColumn | SQL | SQL.Aliased)[] ): SingleStoreDeleteWithout { if (typeof columns[0] === 'function') { const orderBy = columns[0]( new Proxy( this.config.table[Table.Symbol.Columns], new SelectionProxyHandler({ sqlAliasedBehavior: 'alias', sqlBehavior: 'sql' }), ) as any, ); const orderByArray = Array.isArray(orderBy) ? orderBy : [orderBy]; this.config.orderBy = orderByArray; } else { const orderByArray = columns as (SingleStoreColumn | SQL | SQL.Aliased)[]; this.config.orderBy = orderByArray; } return this as any; } limit(limit: number | Placeholder): SingleStoreDeleteWithout { this.config.limit = limit; return this as any; } /** @internal */ getSQL(): SQL { return this.dialect.buildDeleteQuery(this.config); } toSQL(): Query { const { typings: _typings, ...rest } = this.dialect.sqlToQuery(this.getSQL()); return rest; } prepare(): SingleStoreDeletePrepare { return this.session.prepareQuery( this.dialect.sqlToQuery(this.getSQL()), this.config.returning, undefined, undefined, undefined, { type: 'delete', tables: extractUsedTable(this.config.table), }, ) as SingleStoreDeletePrepare; } override execute: ReturnType['execute'] = (placeholderValues) => { return this.prepare().execute(placeholderValues); }; private createIterator = (): ReturnType['iterator'] => { const self = this; return async function*(placeholderValues) { yield* self.prepare().iterator(placeholderValues); }; }; iterator = this.createIterator(); $dynamic(): SingleStoreDeleteDynamic { return this as any; } } ================================================ FILE: drizzle-orm/src/singlestore-core/query-builders/index.ts ================================================ /* export * from './attach.ts'; export * from './branch.ts'; export * from './createMilestone.ts'; */ export * from './delete.ts'; /* export * from './detach.ts'; */ export * from './insert.ts'; /* export * from './optimizeTable.ts'; */ export * from './query-builder.ts'; export * from './select.ts'; export * from './select.types.ts'; export * from './update.ts'; ================================================ FILE: drizzle-orm/src/singlestore-core/query-builders/insert.ts ================================================ import { entityKind, is } from '~/entity.ts'; import { QueryPromise } from '~/query-promise.ts'; import type { RunnableQuery } from '~/runnable-query.ts'; import type { SingleStoreDialect } from '~/singlestore-core/dialect.ts'; import type { AnySingleStoreQueryResultHKT, PreparedQueryHKTBase, PreparedQueryKind, SingleStorePreparedQueryConfig, SingleStoreQueryResultHKT, SingleStoreQueryResultKind, SingleStoreSession, } from '~/singlestore-core/session.ts'; import type { SingleStoreTable } from '~/singlestore-core/table.ts'; import type { Placeholder, Query, SQLWrapper } from '~/sql/sql.ts'; import { Param, SQL, sql } from '~/sql/sql.ts'; import type { InferModelFromColumns } from '~/table.ts'; import { Table } from '~/table.ts'; import { mapUpdateSet, orderSelectedFields } from '~/utils.ts'; import type { AnySingleStoreColumn, SingleStoreColumn } from '../columns/common.ts'; import { extractUsedTable } from '../utils.ts'; import type { SelectedFieldsOrdered } from './select.types.ts'; import type { SingleStoreUpdateSetSource } from './update.ts'; export interface SingleStoreInsertConfig { table: TTable; values: Record[]; ignore: boolean; onConflict?: SQL; returning?: SelectedFieldsOrdered; } export type AnySingleStoreInsertConfig = SingleStoreInsertConfig; export type SingleStoreInsertValue = & { [Key in keyof TTable['$inferInsert']]: TTable['$inferInsert'][Key] | SQL | Placeholder; } & {}; export class SingleStoreInsertBuilder< TTable extends SingleStoreTable, TQueryResult extends SingleStoreQueryResultHKT, TPreparedQueryHKT extends PreparedQueryHKTBase, > { static readonly [entityKind]: string = 'SingleStoreInsertBuilder'; private shouldIgnore = false; constructor( private table: TTable, private session: SingleStoreSession, private dialect: SingleStoreDialect, ) {} ignore(): this { this.shouldIgnore = true; return this; } values(value: SingleStoreInsertValue): SingleStoreInsertBase; values(values: SingleStoreInsertValue[]): SingleStoreInsertBase; values( values: SingleStoreInsertValue | SingleStoreInsertValue[], ): SingleStoreInsertBase { values = Array.isArray(values) ? values : [values]; if (values.length === 0) { throw new Error('values() must be called with at least one value'); } const mappedValues = values.map((entry) => { const result: Record = {}; const cols = this.table[Table.Symbol.Columns]; for (const colKey of Object.keys(entry)) { const colValue = entry[colKey as keyof typeof entry]; result[colKey] = is(colValue, SQL) ? colValue : new Param(colValue, cols[colKey]); } return result; }); return new SingleStoreInsertBase(this.table, mappedValues, this.shouldIgnore, this.session, this.dialect); } } export type SingleStoreInsertWithout< T extends AnySingleStoreInsert, TDynamic extends boolean, K extends keyof T & string, > = TDynamic extends true ? T : Omit< SingleStoreInsertBase< T['_']['table'], T['_']['queryResult'], T['_']['preparedQueryHKT'], T['_']['returning'], TDynamic, T['_']['excludedMethods'] | '$returning' >, T['_']['excludedMethods'] | K >; export type SingleStoreInsertDynamic = SingleStoreInsert< T['_']['table'], T['_']['queryResult'], T['_']['preparedQueryHKT'], T['_']['returning'] >; export type SingleStoreInsertPrepare< T extends AnySingleStoreInsert, TReturning extends Record | undefined = undefined, > = PreparedQueryKind< T['_']['preparedQueryHKT'], SingleStorePreparedQueryConfig & { execute: TReturning extends undefined ? SingleStoreQueryResultKind : TReturning[]; iterator: never; }, true >; export type SingleStoreInsertOnDuplicateKeyUpdateConfig = { set: SingleStoreUpdateSetSource; }; export type SingleStoreInsert< TTable extends SingleStoreTable = SingleStoreTable, TQueryResult extends SingleStoreQueryResultHKT = AnySingleStoreQueryResultHKT, TPreparedQueryHKT extends PreparedQueryHKTBase = PreparedQueryHKTBase, TReturning extends Record | undefined = Record | undefined, > = SingleStoreInsertBase; export type SingleStoreInsertReturning< T extends AnySingleStoreInsert, TDynamic extends boolean, > = SingleStoreInsertBase< T['_']['table'], T['_']['queryResult'], T['_']['preparedQueryHKT'], InferModelFromColumns>, TDynamic, T['_']['excludedMethods'] | '$returning' >; export type AnySingleStoreInsert = SingleStoreInsertBase; export interface SingleStoreInsertBase< TTable extends SingleStoreTable, TQueryResult extends SingleStoreQueryResultHKT, TPreparedQueryHKT extends PreparedQueryHKTBase, TReturning extends Record | undefined = undefined, TDynamic extends boolean = false, TExcludedMethods extends string = never, > extends QueryPromise : TReturning[]>, RunnableQuery< TReturning extends undefined ? SingleStoreQueryResultKind : TReturning[], 'singlestore' >, SQLWrapper { readonly _: { readonly dialect: 'singlestore'; readonly table: TTable; readonly queryResult: TQueryResult; readonly preparedQueryHKT: TPreparedQueryHKT; readonly dynamic: TDynamic; readonly excludedMethods: TExcludedMethods; readonly returning: TReturning; readonly result: TReturning extends undefined ? SingleStoreQueryResultKind : TReturning[]; }; } export type PrimaryKeyKeys> = { [K in keyof T]: T[K]['_']['isPrimaryKey'] extends true ? T[K]['_']['isAutoincrement'] extends true ? K : T[K]['_']['hasRuntimeDefault'] extends true ? T[K]['_']['isPrimaryKey'] extends true ? K : never : never : T[K]['_']['hasRuntimeDefault'] extends true ? T[K]['_']['isPrimaryKey'] extends true ? K : never : never; }[keyof T]; export type GetPrimarySerialOrDefaultKeys> = { [K in PrimaryKeyKeys]: T[K]; }; export class SingleStoreInsertBase< TTable extends SingleStoreTable, TQueryResult extends SingleStoreQueryResultHKT, // eslint-disable-next-line @typescript-eslint/no-unused-vars TPreparedQueryHKT extends PreparedQueryHKTBase, // eslint-disable-next-line @typescript-eslint/no-unused-vars TReturning extends Record | undefined = undefined, // eslint-disable-next-line @typescript-eslint/no-unused-vars TDynamic extends boolean = false, // eslint-disable-next-line @typescript-eslint/no-unused-vars TExcludedMethods extends string = never, > extends QueryPromise : TReturning[]> implements RunnableQuery< TReturning extends undefined ? SingleStoreQueryResultKind : TReturning[], 'singlestore' >, SQLWrapper { static override readonly [entityKind]: string = 'SingleStoreInsert'; declare protected $table: TTable; private config: SingleStoreInsertConfig; constructor( table: TTable, values: SingleStoreInsertConfig['values'], ignore: boolean, private session: SingleStoreSession, private dialect: SingleStoreDialect, ) { super(); this.config = { table, values, ignore }; } /** * Adds an `on duplicate key update` clause to the query. * * Calling this method will update update the row if any unique index conflicts. MySQL will automatically determine the conflict target based on the primary key and unique indexes. * * See docs: {@link https://orm.drizzle.team/docs/insert#on-duplicate-key-update} * * @param config The `set` clause * * @example * ```ts * await db.insert(cars) * .values({ id: 1, brand: 'BMW'}) * .onDuplicateKeyUpdate({ set: { brand: 'Porsche' }}); * ``` * * While MySQL does not directly support doing nothing on conflict, you can perform a no-op by setting any column's value to itself and achieve the same effect: * * ```ts * import { sql } from 'drizzle-orm'; * * await db.insert(cars) * .values({ id: 1, brand: 'BMW' }) * .onDuplicateKeyUpdate({ set: { id: sql`id` } }); * ``` */ onDuplicateKeyUpdate( config: SingleStoreInsertOnDuplicateKeyUpdateConfig, ): SingleStoreInsertWithout { const setSql = this.dialect.buildUpdateSet(this.config.table, mapUpdateSet(this.config.table, config.set)); this.config.onConflict = sql`update ${setSql}`; return this as any; } $returningId(): SingleStoreInsertWithout< SingleStoreInsertReturning, TDynamic, '$returningId' > { const returning: SelectedFieldsOrdered = []; for (const [key, value] of Object.entries(this.config.table[Table.Symbol.Columns])) { if (value.primary) { returning.push({ field: value, path: [key] }); } } this.config.returning = orderSelectedFields(this.config.table[Table.Symbol.Columns]); return this as any; } /** @internal */ getSQL(): SQL { return this.dialect.buildInsertQuery(this.config).sql; } toSQL(): Query { const { typings: _typings, ...rest } = this.dialect.sqlToQuery(this.getSQL()); return rest; } prepare(): SingleStoreInsertPrepare { const { sql, generatedIds } = this.dialect.buildInsertQuery(this.config); return this.session.prepareQuery( this.dialect.sqlToQuery(sql), undefined, undefined, generatedIds, this.config.returning, { type: 'delete', tables: extractUsedTable(this.config.table), }, ) as SingleStoreInsertPrepare; } override execute: ReturnType['execute'] = (placeholderValues) => { return this.prepare().execute(placeholderValues); }; private createIterator = (): ReturnType['iterator'] => { const self = this; return async function*(placeholderValues) { yield* self.prepare().iterator(placeholderValues); }; }; iterator = this.createIterator(); $dynamic(): SingleStoreInsertDynamic { return this as any; } } ================================================ FILE: drizzle-orm/src/singlestore-core/query-builders/query-builder.ts ================================================ import { entityKind, is } from '~/entity.ts'; import type { TypedQueryBuilder } from '~/query-builders/query-builder.ts'; import { SelectionProxyHandler } from '~/selection-proxy.ts'; import type { SingleStoreDialectConfig } from '~/singlestore-core/dialect.ts'; import { SingleStoreDialect } from '~/singlestore-core/dialect.ts'; import type { WithBuilder } from '~/singlestore-core/subquery.ts'; import type { ColumnsSelection, SQL } from '~/sql/sql.ts'; import { WithSubquery } from '~/subquery.ts'; import { SingleStoreSelectBuilder } from './select.ts'; import type { SelectedFields } from './select.types.ts'; export class QueryBuilder { static readonly [entityKind]: string = 'SingleStoreQueryBuilder'; private dialect: SingleStoreDialect | undefined; private dialectConfig: SingleStoreDialectConfig | undefined; constructor(dialect?: SingleStoreDialect | SingleStoreDialectConfig) { this.dialect = is(dialect, SingleStoreDialect) ? dialect : undefined; this.dialectConfig = is(dialect, SingleStoreDialect) ? undefined : dialect; } $with: WithBuilder = (alias: string, selection?: ColumnsSelection) => { const queryBuilder = this; const as = ( qb: | TypedQueryBuilder | SQL | ((qb: QueryBuilder) => TypedQueryBuilder | SQL), ) => { if (typeof qb === 'function') { qb = qb(queryBuilder); } return new Proxy( new WithSubquery( qb.getSQL(), selection ?? ('getSelectedFields' in qb ? qb.getSelectedFields() ?? {} : {}) as SelectedFields, alias, true, ), new SelectionProxyHandler({ alias, sqlAliasedBehavior: 'alias', sqlBehavior: 'error' }), ) as any; }; return { as }; }; with(...queries: WithSubquery[]) { const self = this; function select(): SingleStoreSelectBuilder; function select( fields: TSelection, ): SingleStoreSelectBuilder; function select( fields?: TSelection, ): SingleStoreSelectBuilder { return new SingleStoreSelectBuilder({ fields: fields ?? undefined, session: undefined, dialect: self.getDialect(), withList: queries, }); } function selectDistinct(): SingleStoreSelectBuilder; function selectDistinct( fields: TSelection, ): SingleStoreSelectBuilder; function selectDistinct( fields?: TSelection, ): SingleStoreSelectBuilder { return new SingleStoreSelectBuilder({ fields: fields ?? undefined, session: undefined, dialect: self.getDialect(), withList: queries, distinct: true, }); } return { select, selectDistinct }; } select(): SingleStoreSelectBuilder; select(fields: TSelection): SingleStoreSelectBuilder; select( fields?: TSelection, ): SingleStoreSelectBuilder { return new SingleStoreSelectBuilder({ fields: fields ?? undefined, session: undefined, dialect: this.getDialect(), }); } selectDistinct(): SingleStoreSelectBuilder; selectDistinct( fields: TSelection, ): SingleStoreSelectBuilder; selectDistinct( fields?: TSelection, ): SingleStoreSelectBuilder { return new SingleStoreSelectBuilder({ fields: fields ?? undefined, session: undefined, dialect: this.getDialect(), distinct: true, }); } // Lazy load dialect to avoid circular dependency private getDialect() { if (!this.dialect) { this.dialect = new SingleStoreDialect(this.dialectConfig); } return this.dialect; } } ================================================ FILE: drizzle-orm/src/singlestore-core/query-builders/query.ts ================================================ import { entityKind } from '~/entity.ts'; import { QueryPromise } from '~/query-promise.ts'; import { type BuildQueryResult, type BuildRelationalQueryResult, type DBQueryConfig, mapRelationalRow, type TableRelationalConfig, type TablesRelationalConfig, } from '~/relations.ts'; import type { Query, QueryWithTypings, SQL } from '~/sql/sql.ts'; import type { KnownKeysOnly } from '~/utils.ts'; import type { SingleStoreDialect } from '../dialect.ts'; import type { PreparedQueryHKTBase, PreparedQueryKind, SingleStorePreparedQueryConfig, SingleStoreSession, } from '../session.ts'; import type { SingleStoreTable } from '../table.ts'; export class RelationalQueryBuilder< TPreparedQueryHKT extends PreparedQueryHKTBase, TSchema extends TablesRelationalConfig, TFields extends TableRelationalConfig, > { static readonly [entityKind]: string = 'SingleStoreRelationalQueryBuilder'; constructor( private fullSchema: Record, private schema: TSchema, private tableNamesMap: Record, private table: SingleStoreTable, private tableConfig: TableRelationalConfig, private dialect: SingleStoreDialect, private session: SingleStoreSession, ) {} findMany>( config?: KnownKeysOnly>, ): SingleStoreRelationalQuery[]> { return new SingleStoreRelationalQuery( this.fullSchema, this.schema, this.tableNamesMap, this.table, this.tableConfig, this.dialect, this.session, config ? (config as DBQueryConfig<'many', true>) : {}, 'many', ); } findFirst, 'limit'>>( config?: KnownKeysOnly, 'limit'>>, ): SingleStoreRelationalQuery | undefined> { return new SingleStoreRelationalQuery( this.fullSchema, this.schema, this.tableNamesMap, this.table, this.tableConfig, this.dialect, this.session, config ? { ...(config as DBQueryConfig<'many', true> | undefined), limit: 1 } : { limit: 1 }, 'first', ); } } export class SingleStoreRelationalQuery< TPreparedQueryHKT extends PreparedQueryHKTBase, TResult, > extends QueryPromise { static override readonly [entityKind]: string = 'SingleStoreRelationalQuery'; declare protected $brand: 'SingleStoreRelationalQuery'; constructor( private fullSchema: Record, private schema: TablesRelationalConfig, private tableNamesMap: Record, private table: SingleStoreTable, private tableConfig: TableRelationalConfig, private dialect: SingleStoreDialect, private session: SingleStoreSession, private config: DBQueryConfig<'many', true> | true, private queryMode: 'many' | 'first', ) { super(); } prepare() { const { query, builtQuery } = this._toSQL(); return this.session.prepareQuery( builtQuery, undefined, (rawRows) => { const rows = rawRows.map((row) => mapRelationalRow(this.schema, this.tableConfig, row, query.selection)); if (this.queryMode === 'first') { return rows[0] as TResult; } return rows as TResult; }, ) as PreparedQueryKind; } private _getQuery() { return this.dialect.buildRelationalQuery({ fullSchema: this.fullSchema, schema: this.schema, tableNamesMap: this.tableNamesMap, table: this.table, tableConfig: this.tableConfig, queryConfig: this.config, tableAlias: this.tableConfig.tsName, }); } private _toSQL(): { query: BuildRelationalQueryResult; builtQuery: QueryWithTypings } { const query = this._getQuery(); const builtQuery = this.dialect.sqlToQuery(query.sql as SQL); return { builtQuery, query }; } /** @internal */ getSQL(): SQL { return this._getQuery().sql as SQL; } toSQL(): Query { return this._toSQL().builtQuery; } override execute(): Promise { return this.prepare().execute(); } } ================================================ FILE: drizzle-orm/src/singlestore-core/query-builders/select.ts ================================================ import type { CacheConfig, WithCacheConfig } from '~/cache/core/types.ts'; import { entityKind, is } from '~/entity.ts'; import { TypedQueryBuilder } from '~/query-builders/query-builder.ts'; import type { BuildSubquerySelection, GetSelectTableName, GetSelectTableSelection, JoinNullability, JoinType, SelectMode, SelectResult, SetOperator, } from '~/query-builders/select.types.ts'; import { QueryPromise } from '~/query-promise.ts'; import { SelectionProxyHandler } from '~/selection-proxy.ts'; import type { SingleStoreColumn } from '~/singlestore-core/columns/index.ts'; import type { SingleStoreDialect } from '~/singlestore-core/dialect.ts'; import type { PreparedQueryHKTBase, SingleStorePreparedQueryConfig, SingleStoreSession, } from '~/singlestore-core/session.ts'; import type { SubqueryWithSelection } from '~/singlestore-core/subquery.ts'; import type { SingleStoreTable } from '~/singlestore-core/table.ts'; import type { ColumnsSelection, Query } from '~/sql/sql.ts'; import { SQL } from '~/sql/sql.ts'; import { Subquery } from '~/subquery.ts'; import { Table } from '~/table.ts'; import { applyMixins, getTableColumns, getTableLikeName, haveSameKeys, orderSelectedFields, type ValueOrArray, } from '~/utils.ts'; import { extractUsedTable } from '../utils.ts'; import type { AnySingleStoreSelect, CreateSingleStoreSelectFromBuilderMode, GetSingleStoreSetOperators, LockConfig, LockStrength, SelectedFields, SetOperatorRightSelect, SingleStoreCreateSetOperatorFn, SingleStoreCrossJoinFn, SingleStoreJoinFn, SingleStoreSelectConfig, SingleStoreSelectDynamic, SingleStoreSelectHKT, SingleStoreSelectHKTBase, SingleStoreSelectPrepare, SingleStoreSelectWithout, SingleStoreSetOperatorExcludedMethods, SingleStoreSetOperatorWithResult, } from './select.types.ts'; export class SingleStoreSelectBuilder< TSelection extends SelectedFields | undefined, TPreparedQueryHKT extends PreparedQueryHKTBase, TBuilderMode extends 'db' | 'qb' = 'db', > { static readonly [entityKind]: string = 'SingleStoreSelectBuilder'; private fields: TSelection; private session: SingleStoreSession | undefined; private dialect: SingleStoreDialect; private withList: Subquery[] = []; private distinct: boolean | undefined; constructor( config: { fields: TSelection; session: SingleStoreSession | undefined; dialect: SingleStoreDialect; withList?: Subquery[]; distinct?: boolean; }, ) { this.fields = config.fields; this.session = config.session; this.dialect = config.dialect; if (config.withList) { this.withList = config.withList; } this.distinct = config.distinct; } from( // | SingleStoreViewBase source: TFrom, ): CreateSingleStoreSelectFromBuilderMode< TBuilderMode, GetSelectTableName, TSelection extends undefined ? GetSelectTableSelection : TSelection, TSelection extends undefined ? 'single' : 'partial', TPreparedQueryHKT > { const isPartialSelect = !!this.fields; let fields: SelectedFields; if (this.fields) { fields = this.fields; } else if (is(source, Subquery)) { // This is required to use the proxy handler to get the correct field values from the subquery fields = Object.fromEntries( Object.keys(source._.selectedFields).map(( key, ) => [key, source[key as unknown as keyof typeof source] as unknown as SelectedFields[string]]), ); /* } else if (is(source, SingleStoreViewBase)) { fields = source[ViewBaseConfig].selectedFields as SelectedFields; */ } else if (is(source, SQL)) { fields = {}; } else { fields = getTableColumns(source); } return new SingleStoreSelectBase( { table: source, fields, isPartialSelect, session: this.session, dialect: this.dialect, withList: this.withList, distinct: this.distinct, }, ) as any; } } export abstract class SingleStoreSelectQueryBuilderBase< THKT extends SingleStoreSelectHKTBase, TTableName extends string | undefined, TSelection extends ColumnsSelection, TSelectMode extends SelectMode, TPreparedQueryHKT extends PreparedQueryHKTBase, TNullabilityMap extends Record = TTableName extends string ? Record : {}, TDynamic extends boolean = false, TExcludedMethods extends string = never, TResult extends any[] = SelectResult[], TSelectedFields extends ColumnsSelection = BuildSubquerySelection, > extends TypedQueryBuilder { static override readonly [entityKind]: string = 'SingleStoreSelectQueryBuilder'; override readonly _: { readonly hkt: THKT; readonly tableName: TTableName; readonly selection: TSelection; readonly selectMode: TSelectMode; readonly preparedQueryHKT: TPreparedQueryHKT; readonly nullabilityMap: TNullabilityMap; readonly dynamic: TDynamic; readonly excludedMethods: TExcludedMethods; readonly result: TResult; readonly selectedFields: TSelectedFields; readonly config: SingleStoreSelectConfig; }; protected config: SingleStoreSelectConfig; protected joinsNotNullableMap: Record; private tableName: string | undefined; private isPartialSelect: boolean; /** @internal */ readonly session: SingleStoreSession | undefined; protected dialect: SingleStoreDialect; protected cacheConfig?: WithCacheConfig = undefined; protected usedTables: Set = new Set(); constructor( { table, fields, isPartialSelect, session, dialect, withList, distinct }: { table: SingleStoreSelectConfig['table']; fields: SingleStoreSelectConfig['fields']; isPartialSelect: boolean; session: SingleStoreSession | undefined; dialect: SingleStoreDialect; withList: Subquery[]; distinct: boolean | undefined; }, ) { super(); this.config = { withList, table, fields: { ...fields }, distinct, setOperators: [], }; this.isPartialSelect = isPartialSelect; this.session = session; this.dialect = dialect; this._ = { selectedFields: fields as TSelectedFields, config: this.config, } as this['_']; this.tableName = getTableLikeName(table); this.joinsNotNullableMap = typeof this.tableName === 'string' ? { [this.tableName]: true } : {}; for (const item of extractUsedTable(table)) this.usedTables.add(item); } /** @internal */ getUsedTables() { return [...this.usedTables]; } private createJoin< TJoinType extends JoinType, TIsLateral extends (TJoinType extends 'full' | 'right' ? false : boolean), >( joinType: TJoinType, lateral: TIsLateral, ): 'cross' extends TJoinType ? SingleStoreCrossJoinFn : SingleStoreJoinFn { return ( table: SingleStoreTable | Subquery | SQL, // | SingleStoreViewBase on?: ((aliases: TSelection) => SQL | undefined) | SQL | undefined, ) => { const baseTableName = this.tableName; const tableName = getTableLikeName(table); // store all tables used in a query for (const item of extractUsedTable(table)) this.usedTables.add(item); if (typeof tableName === 'string' && this.config.joins?.some((join) => join.alias === tableName)) { throw new Error(`Alias "${tableName}" is already used in this query`); } if (!this.isPartialSelect) { // If this is the first join and this is not a partial select and we're not selecting from raw SQL, "move" the fields from the main table to the nested object if (Object.keys(this.joinsNotNullableMap).length === 1 && typeof baseTableName === 'string') { this.config.fields = { [baseTableName]: this.config.fields, }; } if (typeof tableName === 'string' && !is(table, SQL)) { const selection = is(table, Subquery) ? table._.selectedFields /* : is(table, View) ? table[ViewBaseConfig].selectedFields */ : table[Table.Symbol.Columns]; this.config.fields[tableName] = selection; } } if (typeof on === 'function') { on = on( new Proxy( this.config.fields, new SelectionProxyHandler({ sqlAliasedBehavior: 'sql', sqlBehavior: 'sql' }), ) as TSelection, ); } if (!this.config.joins) { this.config.joins = []; } this.config.joins.push({ on, table, joinType, alias: tableName, lateral }); if (typeof tableName === 'string') { switch (joinType) { case 'left': { this.joinsNotNullableMap[tableName] = false; break; } case 'right': { this.joinsNotNullableMap = Object.fromEntries( Object.entries(this.joinsNotNullableMap).map(([key]) => [key, false]), ); this.joinsNotNullableMap[tableName] = true; break; } case 'cross': case 'inner': { this.joinsNotNullableMap[tableName] = true; break; } case 'full': { this.joinsNotNullableMap = Object.fromEntries( Object.entries(this.joinsNotNullableMap).map(([key]) => [key, false]), ); this.joinsNotNullableMap[tableName] = false; break; } } } return this as any; }; } /** * Executes a `left join` operation by adding another table to the current query. * * Calling this method associates each row of the table with the corresponding row from the joined table, if a match is found. If no matching row exists, it sets all columns of the joined table to null. * * See docs: {@link https://orm.drizzle.team/docs/joins#left-join} * * @param table the table to join. * @param on the `on` clause. * * @example * * ```ts * // Select all users and their pets * const usersWithPets: { user: User; pets: Pet | null; }[] = await db.select() * .from(users) * .leftJoin(pets, eq(users.id, pets.ownerId)) * * // Select userId and petId * const usersIdsAndPetIds: { userId: number; petId: number | null; }[] = await db.select({ * userId: users.id, * petId: pets.id, * }) * .from(users) * .leftJoin(pets, eq(users.id, pets.ownerId)) * ``` */ leftJoin = this.createJoin('left', false); /** * Executes a `left join lateral` operation by adding subquery to the current query. * * A `lateral` join allows the right-hand expression to refer to columns from the left-hand side. * * Calling this method associates each row of the table with the corresponding row from the joined table, if a match is found. If no matching row exists, it sets all columns of the joined table to null. * * See docs: {@link https://orm.drizzle.team/docs/joins#left-join-lateral} * * @param table the subquery to join. * @param on the `on` clause. */ leftJoinLateral = this.createJoin('left', true); /** * Executes a `right join` operation by adding another table to the current query. * * Calling this method associates each row of the joined table with the corresponding row from the main table, if a match is found. If no matching row exists, it sets all columns of the main table to null. * * See docs: {@link https://orm.drizzle.team/docs/joins#right-join} * * @param table the table to join. * @param on the `on` clause. * * @example * * ```ts * // Select all users and their pets * const usersWithPets: { user: User | null; pets: Pet; }[] = await db.select() * .from(users) * .rightJoin(pets, eq(users.id, pets.ownerId)) * * // Select userId and petId * const usersIdsAndPetIds: { userId: number | null; petId: number; }[] = await db.select({ * userId: users.id, * petId: pets.id, * }) * .from(users) * .rightJoin(pets, eq(users.id, pets.ownerId)) * ``` */ rightJoin = this.createJoin('right', false); /** * Executes an `inner join` operation, creating a new table by combining rows from two tables that have matching values. * * Calling this method retrieves rows that have corresponding entries in both joined tables. Rows without matching entries in either table are excluded, resulting in a table that includes only matching pairs. * * See docs: {@link https://orm.drizzle.team/docs/joins#inner-join} * * @param table the table to join. * @param on the `on` clause. * * @example * * ```ts * // Select all users and their pets * const usersWithPets: { user: User; pets: Pet; }[] = await db.select() * .from(users) * .innerJoin(pets, eq(users.id, pets.ownerId)) * * // Select userId and petId * const usersIdsAndPetIds: { userId: number; petId: number; }[] = await db.select({ * userId: users.id, * petId: pets.id, * }) * .from(users) * .innerJoin(pets, eq(users.id, pets.ownerId)) * ``` */ innerJoin = this.createJoin('inner', false); /** * Executes an `inner join lateral` operation, creating a new table by combining rows from two queries that have matching values. * * A `lateral` join allows the right-hand expression to refer to columns from the left-hand side. * * Calling this method retrieves rows that have corresponding entries in both joined tables. Rows without matching entries in either table are excluded, resulting in a table that includes only matching pairs. * * See docs: {@link https://orm.drizzle.team/docs/joins#inner-join-lateral} * * @param table the subquery to join. * @param on the `on` clause. */ innerJoinLateral = this.createJoin('inner', true); /** * Executes a `full join` operation by combining rows from two tables into a new table. * * Calling this method retrieves all rows from both main and joined tables, merging rows with matching values and filling in `null` for non-matching columns. * * See docs: {@link https://orm.drizzle.team/docs/joins#full-join} * * @param table the table to join. * @param on the `on` clause. * * @example * * ```ts * // Select all users and their pets * const usersWithPets: { user: User | null; pets: Pet | null; }[] = await db.select() * .from(users) * .fullJoin(pets, eq(users.id, pets.ownerId)) * * // Select userId and petId * const usersIdsAndPetIds: { userId: number | null; petId: number | null; }[] = await db.select({ * userId: users.id, * petId: pets.id, * }) * .from(users) * .fullJoin(pets, eq(users.id, pets.ownerId)) * ``` */ fullJoin = this.createJoin('full', false); /** * Executes a `cross join` operation by combining rows from two tables into a new table. * * Calling this method retrieves all rows from both main and joined tables, merging all rows from each table. * * See docs: {@link https://orm.drizzle.team/docs/joins#cross-join} * * @param table the table to join. * * @example * * ```ts * // Select all users, each user with every pet * const usersWithPets: { user: User; pets: Pet; }[] = await db.select() * .from(users) * .crossJoin(pets) * * // Select userId and petId * const usersIdsAndPetIds: { userId: number; petId: number; }[] = await db.select({ * userId: users.id, * petId: pets.id, * }) * .from(users) * .crossJoin(pets) * ``` */ crossJoin = this.createJoin('cross', false); /** * Executes a `cross join lateral` operation by combining rows from two queries into a new table. * * A `lateral` join allows the right-hand expression to refer to columns from the left-hand side. * * Calling this method retrieves all rows from both main and joined queries, merging all rows from each query. * * See docs: {@link https://orm.drizzle.team/docs/joins#cross-join-lateral} * * @param table the query to join. */ crossJoinLateral = this.createJoin('cross', true); private createSetOperator( type: SetOperator, isAll: boolean, ): >( rightSelection: | ((setOperators: GetSingleStoreSetOperators) => SetOperatorRightSelect) | SetOperatorRightSelect, ) => SingleStoreSelectWithout< this, TDynamic, SingleStoreSetOperatorExcludedMethods, true > { return (rightSelection) => { const rightSelect = (typeof rightSelection === 'function' ? rightSelection(getSingleStoreSetOperators()) : rightSelection) as TypedQueryBuilder< any, TResult >; if (!haveSameKeys(this.getSelectedFields(), rightSelect.getSelectedFields())) { throw new Error( 'Set operator error (union / intersect / except): selected fields are not the same or are in a different order', ); } this.config.setOperators.push({ type, isAll, rightSelect }); return this as any; }; } /** * Adds `union` set operator to the query. * * Calling this method will combine the result sets of the `select` statements and remove any duplicate rows that appear across them. * * See docs: {@link https://orm.drizzle.team/docs/set-operations#union} * * @example * * ```ts * // Select all unique names from customers and users tables * await db.select({ name: users.name }) * .from(users) * .union( * db.select({ name: customers.name }).from(customers) * ); * // or * import { union } from 'drizzle-orm/singlestore-core' * * await union( * db.select({ name: users.name }).from(users), * db.select({ name: customers.name }).from(customers) * ); * ``` */ union = this.createSetOperator('union', false); /** * Adds `union all` set operator to the query. * * Calling this method will combine the result-set of the `select` statements and keep all duplicate rows that appear across them. * * See docs: {@link https://orm.drizzle.team/docs/set-operations#union-all} * * @example * * ```ts * // Select all transaction ids from both online and in-store sales * await db.select({ transaction: onlineSales.transactionId }) * .from(onlineSales) * .unionAll( * db.select({ transaction: inStoreSales.transactionId }).from(inStoreSales) * ); * // or * import { unionAll } from 'drizzle-orm/singlestore-core' * * await unionAll( * db.select({ transaction: onlineSales.transactionId }).from(onlineSales), * db.select({ transaction: inStoreSales.transactionId }).from(inStoreSales) * ); * ``` */ unionAll = this.createSetOperator('union', true); /** * Adds `intersect` set operator to the query. * * Calling this method will retain only the rows that are present in both result sets and eliminate duplicates. * * See docs: {@link https://orm.drizzle.team/docs/set-operations#intersect} * * @example * * ```ts * // Select course names that are offered in both departments A and B * await db.select({ courseName: depA.courseName }) * .from(depA) * .intersect( * db.select({ courseName: depB.courseName }).from(depB) * ); * // or * import { intersect } from 'drizzle-orm/singlestore-core' * * await intersect( * db.select({ courseName: depA.courseName }).from(depA), * db.select({ courseName: depB.courseName }).from(depB) * ); * ``` */ intersect = this.createSetOperator('intersect', false); /** * Adds `except` set operator to the query. * * Calling this method will retrieve all unique rows from the left query, except for the rows that are present in the result set of the right query. * * See docs: {@link https://orm.drizzle.team/docs/set-operations#except} * * @example * * ```ts * // Select all courses offered in department A but not in department B * await db.select({ courseName: depA.courseName }) * .from(depA) * .except( * db.select({ courseName: depB.courseName }).from(depB) * ); * // or * import { except } from 'drizzle-orm/singlestore-core' * * await except( * db.select({ courseName: depA.courseName }).from(depA), * db.select({ courseName: depB.courseName }).from(depB) * ); * ``` */ except = this.createSetOperator('except', false); /** * Adds `minus` set operator to the query. * * This is an alias of `except` supported by SingleStore. * * @example * * ```ts * // Select all courses offered in department A but not in department B * await db.select({ courseName: depA.courseName }) * .from(depA) * .minus( * db.select({ courseName: depB.courseName }).from(depB) * ); * // or * import { minus } from 'drizzle-orm/singlestore-core' * * await minus( * db.select({ courseName: depA.courseName }).from(depA), * db.select({ courseName: depB.courseName }).from(depB) * ); * ``` */ minus = this.createSetOperator('except', false); /** @internal */ addSetOperators(setOperators: SingleStoreSelectConfig['setOperators']): SingleStoreSelectWithout< this, TDynamic, SingleStoreSetOperatorExcludedMethods, true > { this.config.setOperators.push(...setOperators); return this as any; } /** * Adds a `where` clause to the query. * * Calling this method will select only those rows that fulfill a specified condition. * * See docs: {@link https://orm.drizzle.team/docs/select#filtering} * * @param where the `where` clause. * * @example * You can use conditional operators and `sql function` to filter the rows to be selected. * * ```ts * // Select all cars with green color * await db.select().from(cars).where(eq(cars.color, 'green')); * // or * await db.select().from(cars).where(sql`${cars.color} = 'green'`) * ``` * * You can logically combine conditional operators with `and()` and `or()` operators: * * ```ts * // Select all BMW cars with a green color * await db.select().from(cars).where(and(eq(cars.color, 'green'), eq(cars.brand, 'BMW'))); * * // Select all cars with the green or blue color * await db.select().from(cars).where(or(eq(cars.color, 'green'), eq(cars.color, 'blue'))); * ``` */ where( where: ((aliases: this['_']['selection']) => SQL | undefined) | SQL | undefined, ): SingleStoreSelectWithout { if (typeof where === 'function') { where = where( new Proxy( this.config.fields, new SelectionProxyHandler({ sqlAliasedBehavior: 'sql', sqlBehavior: 'sql' }), ) as TSelection, ); } this.config.where = where; return this as any; } /** * Adds a `having` clause to the query. * * Calling this method will select only those rows that fulfill a specified condition. It is typically used with aggregate functions to filter the aggregated data based on a specified condition. * * See docs: {@link https://orm.drizzle.team/docs/select#aggregations} * * @param having the `having` clause. * * @example * * ```ts * // Select all brands with more than one car * await db.select({ * brand: cars.brand, * count: sql`cast(count(${cars.id}) as int)`, * }) * .from(cars) * .groupBy(cars.brand) * .having(({ count }) => gt(count, 1)); * ``` */ having( having: ((aliases: this['_']['selection']) => SQL | undefined) | SQL | undefined, ): SingleStoreSelectWithout { if (typeof having === 'function') { having = having( new Proxy( this.config.fields, new SelectionProxyHandler({ sqlAliasedBehavior: 'sql', sqlBehavior: 'sql' }), ) as TSelection, ); } this.config.having = having; return this as any; } /** * Adds a `group by` clause to the query. * * Calling this method will group rows that have the same values into summary rows, often used for aggregation purposes. * * See docs: {@link https://orm.drizzle.team/docs/select#aggregations} * * @example * * ```ts * // Group and count people by their last names * await db.select({ * lastName: people.lastName, * count: sql`cast(count(*) as int)` * }) * .from(people) * .groupBy(people.lastName); * ``` */ groupBy( builder: (aliases: this['_']['selection']) => ValueOrArray, ): SingleStoreSelectWithout; groupBy(...columns: (SingleStoreColumn | SQL | SQL.Aliased)[]): SingleStoreSelectWithout; groupBy( ...columns: | [(aliases: this['_']['selection']) => ValueOrArray] | (SingleStoreColumn | SQL | SQL.Aliased)[] ): SingleStoreSelectWithout { if (typeof columns[0] === 'function') { const groupBy = columns[0]( new Proxy( this.config.fields, new SelectionProxyHandler({ sqlAliasedBehavior: 'alias', sqlBehavior: 'sql' }), ) as TSelection, ); this.config.groupBy = Array.isArray(groupBy) ? groupBy : [groupBy]; } else { this.config.groupBy = columns as (SingleStoreColumn | SQL | SQL.Aliased)[]; } return this as any; } /** * Adds an `order by` clause to the query. * * Calling this method will sort the result-set in ascending or descending order. By default, the sort order is ascending. * * See docs: {@link https://orm.drizzle.team/docs/select#order-by} * * @example * * ``` * // Select cars ordered by year * await db.select().from(cars).orderBy(cars.year); * ``` * * You can specify whether results are in ascending or descending order with the `asc()` and `desc()` operators. * * ```ts * // Select cars ordered by year in descending order * await db.select().from(cars).orderBy(desc(cars.year)); * * // Select cars ordered by year and price * await db.select().from(cars).orderBy(asc(cars.year), desc(cars.price)); * ``` */ orderBy( builder: (aliases: this['_']['selection']) => ValueOrArray, ): SingleStoreSelectWithout; orderBy(...columns: (SingleStoreColumn | SQL | SQL.Aliased)[]): SingleStoreSelectWithout; orderBy( ...columns: | [(aliases: this['_']['selection']) => ValueOrArray] | (SingleStoreColumn | SQL | SQL.Aliased)[] ): SingleStoreSelectWithout { if (typeof columns[0] === 'function') { const orderBy = columns[0]( new Proxy( this.config.fields, new SelectionProxyHandler({ sqlAliasedBehavior: 'alias', sqlBehavior: 'sql' }), ) as TSelection, ); const orderByArray = Array.isArray(orderBy) ? orderBy : [orderBy]; if (this.config.setOperators.length > 0) { this.config.setOperators.at(-1)!.orderBy = orderByArray; } else { this.config.orderBy = orderByArray; } } else { const orderByArray = columns as (SingleStoreColumn | SQL | SQL.Aliased)[]; if (this.config.setOperators.length > 0) { this.config.setOperators.at(-1)!.orderBy = orderByArray; } else { this.config.orderBy = orderByArray; } } return this as any; } /** * Adds a `limit` clause to the query. * * Calling this method will set the maximum number of rows that will be returned by this query. * * See docs: {@link https://orm.drizzle.team/docs/select#limit--offset} * * @param limit the `limit` clause. * * @example * * ```ts * // Get the first 10 people from this query. * await db.select().from(people).limit(10); * ``` */ limit(limit: number): SingleStoreSelectWithout { if (this.config.setOperators.length > 0) { this.config.setOperators.at(-1)!.limit = limit; } else { this.config.limit = limit; } return this as any; } /** * Adds an `offset` clause to the query. * * Calling this method will skip a number of rows when returning results from this query. * * See docs: {@link https://orm.drizzle.team/docs/select#limit--offset} * * @param offset the `offset` clause. * * @example * * ```ts * // Get the 10th-20th people from this query. * await db.select().from(people).offset(10).limit(10); * ``` */ offset(offset: number): SingleStoreSelectWithout { if (this.config.setOperators.length > 0) { this.config.setOperators.at(-1)!.offset = offset; } else { this.config.offset = offset; } return this as any; } /** * Adds a `for` clause to the query. * * Calling this method will specify a lock strength for this query that controls how strictly it acquires exclusive access to the rows being queried. * * @param strength the lock strength. * @param config the lock configuration. */ for(strength: LockStrength, config: LockConfig = {}): SingleStoreSelectWithout { this.config.lockingClause = { strength, config }; return this as any; } /** @internal */ getSQL(): SQL { return this.dialect.buildSelectQuery(this.config); } toSQL(): Query { const { typings: _typings, ...rest } = this.dialect.sqlToQuery(this.getSQL()); return rest; } as( alias: TAlias, ): SubqueryWithSelection { const usedTables: string[] = []; usedTables.push(...extractUsedTable(this.config.table)); if (this.config.joins) { for (const it of this.config.joins) usedTables.push(...extractUsedTable(it.table)); } return new Proxy( new Subquery(this.getSQL(), this.config.fields, alias, false, [...new Set(usedTables)]), new SelectionProxyHandler({ alias, sqlAliasedBehavior: 'alias', sqlBehavior: 'error' }), ) as SubqueryWithSelection; } /** @internal */ override getSelectedFields(): this['_']['selectedFields'] { return new Proxy( this.config.fields, new SelectionProxyHandler({ alias: this.tableName, sqlAliasedBehavior: 'alias', sqlBehavior: 'error' }), ) as this['_']['selectedFields']; } $dynamic(): SingleStoreSelectDynamic { return this as any; } } export interface SingleStoreSelectBase< TTableName extends string | undefined, TSelection extends ColumnsSelection, TSelectMode extends SelectMode, TPreparedQueryHKT extends PreparedQueryHKTBase, TNullabilityMap extends Record = TTableName extends string ? Record : {}, TDynamic extends boolean = false, TExcludedMethods extends string = never, TResult extends any[] = SelectResult[], TSelectedFields extends ColumnsSelection = BuildSubquerySelection, > extends SingleStoreSelectQueryBuilderBase< SingleStoreSelectHKT, TTableName, TSelection, TSelectMode, TPreparedQueryHKT, TNullabilityMap, TDynamic, TExcludedMethods, TResult, TSelectedFields >, QueryPromise {} export class SingleStoreSelectBase< TTableName extends string | undefined, TSelection, TSelectMode extends SelectMode, TPreparedQueryHKT extends PreparedQueryHKTBase, TNullabilityMap extends Record = TTableName extends string ? Record : {}, TDynamic extends boolean = false, TExcludedMethods extends string = never, TResult = SelectResult[], TSelectedFields = BuildSubquerySelection, > extends SingleStoreSelectQueryBuilderBase< SingleStoreSelectHKT, TTableName, TSelection, TSelectMode, TPreparedQueryHKT, TNullabilityMap, TDynamic, TExcludedMethods, TResult, TSelectedFields > { static override readonly [entityKind]: string = 'SingleStoreSelect'; prepare(): SingleStoreSelectPrepare { if (!this.session) { throw new Error('Cannot execute a query on a query builder. Please use a database instance instead.'); } const fieldsList = orderSelectedFields(this.config.fields); const query = this.session.prepareQuery< SingleStorePreparedQueryConfig & { execute: SelectResult[] }, TPreparedQueryHKT >(this.dialect.sqlToQuery(this.getSQL()), fieldsList, undefined, undefined, undefined, { type: 'select', tables: [...this.usedTables], }, this.cacheConfig); query.joinsNotNullableMap = this.joinsNotNullableMap; return query as SingleStoreSelectPrepare; } $withCache(config?: { config?: CacheConfig; tag?: string; autoInvalidate?: boolean } | false) { this.cacheConfig = config === undefined ? { config: {}, enable: true, autoInvalidate: true } : config === false ? { enable: false } : { enable: true, autoInvalidate: true, ...config }; return this; } execute = ((placeholderValues) => { return this.prepare().execute(placeholderValues); }) as ReturnType['execute']; private createIterator = (): ReturnType['iterator'] => { const self = this; return async function*(placeholderValues) { yield* self.prepare().iterator(placeholderValues); }; }; iterator = this.createIterator(); } applyMixins(SingleStoreSelectBase, [QueryPromise]); function createSetOperator(type: SetOperator, isAll: boolean): SingleStoreCreateSetOperatorFn { return (leftSelect, rightSelect, ...restSelects) => { const setOperators = [rightSelect, ...restSelects].map((select) => ({ type, isAll, rightSelect: select as AnySingleStoreSelect, })); for (const setOperator of setOperators) { if (!haveSameKeys((leftSelect as any).getSelectedFields(), setOperator.rightSelect.getSelectedFields())) { throw new Error( 'Set operator error (union / intersect / except): selected fields are not the same or are in a different order', ); } } return (leftSelect as AnySingleStoreSelect).addSetOperators(setOperators) as any; }; } const getSingleStoreSetOperators = () => ({ union, unionAll, intersect, except, minus, }); /** * Adds `union` set operator to the query. * * Calling this method will combine the result sets of the `select` statements and remove any duplicate rows that appear across them. * * See docs: {@link https://orm.drizzle.team/docs/set-operations#union} * * @example * * ```ts * // Select all unique names from customers and users tables * import { union } from 'drizzle-orm/singlestore-core' * * await union( * db.select({ name: users.name }).from(users), * db.select({ name: customers.name }).from(customers) * ); * // or * await db.select({ name: users.name }) * .from(users) * .union( * db.select({ name: customers.name }).from(customers) * ); * ``` */ export const union = createSetOperator('union', false); /** * Adds `union all` set operator to the query. * * Calling this method will combine the result-set of the `select` statements and keep all duplicate rows that appear across them. * * See docs: {@link https://orm.drizzle.team/docs/set-operations#union-all} * * @example * * ```ts * // Select all transaction ids from both online and in-store sales * import { unionAll } from 'drizzle-orm/singlestore-core' * * await unionAll( * db.select({ transaction: onlineSales.transactionId }).from(onlineSales), * db.select({ transaction: inStoreSales.transactionId }).from(inStoreSales) * ); * // or * await db.select({ transaction: onlineSales.transactionId }) * .from(onlineSales) * .unionAll( * db.select({ transaction: inStoreSales.transactionId }).from(inStoreSales) * ); * ``` */ export const unionAll = createSetOperator('union', true); /** * Adds `intersect` set operator to the query. * * Calling this method will retain only the rows that are present in both result sets and eliminate duplicates. * * See docs: {@link https://orm.drizzle.team/docs/set-operations#intersect} * * @example * * ```ts * // Select course names that are offered in both departments A and B * import { intersect } from 'drizzle-orm/singlestore-core' * * await intersect( * db.select({ courseName: depA.courseName }).from(depA), * db.select({ courseName: depB.courseName }).from(depB) * ); * // or * await db.select({ courseName: depA.courseName }) * .from(depA) * .intersect( * db.select({ courseName: depB.courseName }).from(depB) * ); * ``` */ export const intersect = createSetOperator('intersect', false); /** * Adds `except` set operator to the query. * * Calling this method will retrieve all unique rows from the left query, except for the rows that are present in the result set of the right query. * * See docs: {@link https://orm.drizzle.team/docs/set-operations#except} * * @example * * ```ts * // Select all courses offered in department A but not in department B * import { except } from 'drizzle-orm/singlestore-core' * * await except( * db.select({ courseName: depA.courseName }).from(depA), * db.select({ courseName: depB.courseName }).from(depB) * ); * // or * await db.select({ courseName: depA.courseName }) * .from(depA) * .except( * db.select({ courseName: depB.courseName }).from(depB) * ); * ``` */ export const except = createSetOperator('except', false); /** * Adds `minus` set operator to the query. * * This is an alias of `except` supported by SingleStore. * * @example * * ```ts * // Select all courses offered in department A but not in department B * import { minus } from 'drizzle-orm/singlestore-core' * * await minus( * db.select({ courseName: depA.courseName }).from(depA), * db.select({ courseName: depB.courseName }).from(depB) * ); * // or * await db.select({ courseName: depA.courseName }) * .from(depA) * .minus( * db.select({ courseName: depB.courseName }).from(depB) * ); * ``` */ export const minus = createSetOperator('except', true); ================================================ FILE: drizzle-orm/src/singlestore-core/query-builders/select.types.ts ================================================ import type { SelectedFields as SelectedFieldsBase, SelectedFieldsFlat as SelectedFieldsFlatBase, SelectedFieldsOrdered as SelectedFieldsOrderedBase, } from '~/operations.ts'; import type { TypedQueryBuilder } from '~/query-builders/query-builder.ts'; import type { AppendToNullabilityMap, AppendToResult, BuildSubquerySelection, GetSelectTableName, JoinNullability, JoinType, MapColumnsToTableAlias, SelectMode, SelectResult, SetOperator, } from '~/query-builders/select.types.ts'; import type { SingleStoreColumn } from '~/singlestore-core/columns/index.ts'; import type { SingleStoreTable, SingleStoreTableWithColumns } from '~/singlestore-core/table.ts'; import type { ColumnsSelection, Placeholder, SQL, View } from '~/sql/sql.ts'; import type { Subquery } from '~/subquery.ts'; import type { Table, UpdateTableConfig } from '~/table.ts'; import type { Assume, ValidateShape } from '~/utils.ts'; import type { PreparedQueryHKTBase, PreparedQueryKind, SingleStorePreparedQueryConfig } from '../session.ts'; /* import type { SingleStoreViewBase } from '../view-base.ts'; */ /* import type { SingleStoreViewWithSelection } from '../view.ts'; */ import type { SingleStoreSelectBase, SingleStoreSelectQueryBuilderBase } from './select.ts'; export interface SingleStoreSelectJoinConfig { on: SQL | undefined; table: SingleStoreTable | Subquery | SQL; // SingleStoreViewBase | alias: string | undefined; joinType: JoinType; lateral?: boolean; } export type BuildAliasTable = TTable extends Table ? SingleStoreTableWithColumns< UpdateTableConfig; }> > /* : TTable extends View ? SingleStoreViewWithSelection< TAlias, TTable['_']['existing'], MapColumnsToTableAlias > */ : never; export interface SingleStoreSelectConfig { withList?: Subquery[]; fields: Record; fieldsFlat?: SelectedFieldsOrdered; where?: SQL; having?: SQL; table: SingleStoreTable | Subquery | SQL; // | SingleStoreViewBase limit?: number | Placeholder; offset?: number | Placeholder; joins?: SingleStoreSelectJoinConfig[]; orderBy?: (SingleStoreColumn | SQL | SQL.Aliased)[]; groupBy?: (SingleStoreColumn | SQL | SQL.Aliased)[]; lockingClause?: { strength: LockStrength; config: LockConfig; }; distinct?: boolean; setOperators: { rightSelect: TypedQueryBuilder; type: SetOperator; isAll: boolean; orderBy?: (SingleStoreColumn | SQL | SQL.Aliased)[]; limit?: number | Placeholder; offset?: number | Placeholder; }[]; } export type SingleStoreJoin< T extends AnySingleStoreSelectQueryBuilder, TDynamic extends boolean, TJoinType extends JoinType, TJoinedTable extends SingleStoreTable | Subquery | SQL, // | SingleStoreViewBase TJoinedName extends GetSelectTableName = GetSelectTableName, > = T extends any ? SingleStoreSelectWithout< SingleStoreSelectKind< T['_']['hkt'], T['_']['tableName'], AppendToResult< T['_']['tableName'], T['_']['selection'], TJoinedName, TJoinedTable extends SingleStoreTable ? TJoinedTable['_']['columns'] : TJoinedTable extends Subquery ? Assume : never, T['_']['selectMode'] >, T['_']['selectMode'] extends 'partial' ? T['_']['selectMode'] : 'multiple', T['_']['preparedQueryHKT'], AppendToNullabilityMap, TDynamic, T['_']['excludedMethods'] >, TDynamic, T['_']['excludedMethods'] > : never; export type SingleStoreJoinFn< T extends AnySingleStoreSelectQueryBuilder, TDynamic extends boolean, TJoinType extends JoinType, TIsLateral extends boolean, > = < TJoinedTable extends (TIsLateral extends true ? Subquery | SQL : SingleStoreTable | Subquery | SQL /* | SingleStoreViewBase */), TJoinedName extends GetSelectTableName = GetSelectTableName, >( table: TJoinedTable, on: ((aliases: T['_']['selection']) => SQL | undefined) | SQL | undefined, ) => SingleStoreJoin; export type SingleStoreCrossJoinFn< T extends AnySingleStoreSelectQueryBuilder, TDynamic extends boolean, TIsLateral extends boolean, > = < TJoinedTable extends (TIsLateral extends true ? Subquery | SQL : SingleStoreTable | Subquery | SQL /* | SingleStoreViewBase */), TJoinedName extends GetSelectTableName = GetSelectTableName, >(table: TJoinedTable) => SingleStoreJoin; export type SelectedFieldsFlat = SelectedFieldsFlatBase; export type SelectedFields = SelectedFieldsBase; export type SelectedFieldsOrdered = SelectedFieldsOrderedBase; export type LockStrength = 'update' | 'share'; export type LockConfig = { noWait: true; skipLocked?: undefined; } | { noWait?: undefined; skipLocked: true; } | { noWait?: undefined; skipLocked?: undefined; }; export interface SingleStoreSelectHKTBase { tableName: string | undefined; selection: unknown; selectMode: SelectMode; preparedQueryHKT: unknown; nullabilityMap: unknown; dynamic: boolean; excludedMethods: string; result: unknown; selectedFields: unknown; _type: unknown; } export type SingleStoreSelectKind< T extends SingleStoreSelectHKTBase, TTableName extends string | undefined, TSelection extends ColumnsSelection, TSelectMode extends SelectMode, TPreparedQueryHKT extends PreparedQueryHKTBase, TNullabilityMap extends Record, TDynamic extends boolean, TExcludedMethods extends string, TResult = SelectResult[], TSelectedFields = BuildSubquerySelection, > = (T & { tableName: TTableName; selection: TSelection; selectMode: TSelectMode; preparedQueryHKT: TPreparedQueryHKT; nullabilityMap: TNullabilityMap; dynamic: TDynamic; excludedMethods: TExcludedMethods; result: TResult; selectedFields: TSelectedFields; })['_type']; export interface SingleStoreSelectQueryBuilderHKT extends SingleStoreSelectHKTBase { _type: SingleStoreSelectQueryBuilderBase< SingleStoreSelectQueryBuilderHKT, this['tableName'], Assume, this['selectMode'], Assume, Assume>, this['dynamic'], this['excludedMethods'], Assume, Assume >; } export interface SingleStoreSelectHKT extends SingleStoreSelectHKTBase { _type: SingleStoreSelectBase< this['tableName'], Assume, this['selectMode'], Assume, Assume>, this['dynamic'], this['excludedMethods'], Assume, Assume >; } export type SingleStoreSetOperatorExcludedMethods = | 'where' | 'having' | 'groupBy' | 'session' | 'leftJoin' | 'rightJoin' | 'innerJoin' | 'fullJoin' | 'for'; export type SingleStoreSelectWithout< T extends AnySingleStoreSelectQueryBuilder, TDynamic extends boolean, K extends keyof T & string, TResetExcluded extends boolean = false, > = TDynamic extends true ? T : Omit< SingleStoreSelectKind< T['_']['hkt'], T['_']['tableName'], T['_']['selection'], T['_']['selectMode'], T['_']['preparedQueryHKT'], T['_']['nullabilityMap'], TDynamic, TResetExcluded extends true ? K : T['_']['excludedMethods'] | K, T['_']['result'], T['_']['selectedFields'] >, TResetExcluded extends true ? K : T['_']['excludedMethods'] | K >; export type SingleStoreSelectPrepare = PreparedQueryKind< T['_']['preparedQueryHKT'], SingleStorePreparedQueryConfig & { execute: T['_']['result']; iterator: T['_']['result'][number]; }, true >; export type SingleStoreSelectDynamic = SingleStoreSelectKind< T['_']['hkt'], T['_']['tableName'], T['_']['selection'], T['_']['selectMode'], T['_']['preparedQueryHKT'], T['_']['nullabilityMap'], true, never, T['_']['result'], T['_']['selectedFields'] >; export type CreateSingleStoreSelectFromBuilderMode< TBuilderMode extends 'db' | 'qb', TTableName extends string | undefined, TSelection extends ColumnsSelection, TSelectMode extends SelectMode, TPreparedQueryHKT extends PreparedQueryHKTBase, > = TBuilderMode extends 'db' ? SingleStoreSelectBase : SingleStoreSelectQueryBuilderBase< SingleStoreSelectQueryBuilderHKT, TTableName, TSelection, TSelectMode, TPreparedQueryHKT >; export type SingleStoreSelectQueryBuilder< THKT extends SingleStoreSelectHKTBase = SingleStoreSelectQueryBuilderHKT, TTableName extends string | undefined = string | undefined, TSelection extends ColumnsSelection = ColumnsSelection, TSelectMode extends SelectMode = SelectMode, TPreparedQueryHKT extends PreparedQueryHKTBase = PreparedQueryHKTBase, TNullabilityMap extends Record = Record, TResult extends any[] = unknown[], TSelectedFields extends ColumnsSelection = ColumnsSelection, > = SingleStoreSelectQueryBuilderBase< THKT, TTableName, TSelection, TSelectMode, TPreparedQueryHKT, TNullabilityMap, true, never, TResult, TSelectedFields >; export type AnySingleStoreSelectQueryBuilder = SingleStoreSelectQueryBuilderBase< any, any, any, any, any, any, any, any, any >; export type AnySingleStoreSetOperatorInterface = SingleStoreSetOperatorInterface< any, any, any, any, any, any, any, any, any >; export interface SingleStoreSetOperatorInterface< TTableName extends string | undefined, TSelection extends ColumnsSelection, TSelectMode extends SelectMode, TPreparedQueryHKT extends PreparedQueryHKTBase = PreparedQueryHKTBase, TNullabilityMap extends Record = TTableName extends string ? Record : {}, TDynamic extends boolean = false, TExcludedMethods extends string = never, TResult extends any[] = SelectResult[], TSelectedFields extends ColumnsSelection = BuildSubquerySelection, > { _: { readonly hkt: SingleStoreSelectHKT; readonly tableName: TTableName; readonly selection: TSelection; readonly selectMode: TSelectMode; readonly preparedQueryHKT: TPreparedQueryHKT; readonly nullabilityMap: TNullabilityMap; readonly dynamic: TDynamic; readonly excludedMethods: TExcludedMethods; readonly result: TResult; readonly selectedFields: TSelectedFields; }; } export type SingleStoreSetOperatorWithResult = SingleStoreSetOperatorInterface< any, any, any, any, any, any, any, TResult, any >; export type SingleStoreSelect< TTableName extends string | undefined = string | undefined, TSelection extends ColumnsSelection = Record, TSelectMode extends SelectMode = SelectMode, TNullabilityMap extends Record = Record, > = SingleStoreSelectBase; export type AnySingleStoreSelect = SingleStoreSelectBase; export type SingleStoreSetOperator< TTableName extends string | undefined = string | undefined, TSelection extends ColumnsSelection = Record, TSelectMode extends SelectMode = SelectMode, TPreparedQueryHKT extends PreparedQueryHKTBase = PreparedQueryHKTBase, TNullabilityMap extends Record = Record, > = SingleStoreSelectBase< TTableName, TSelection, TSelectMode, TPreparedQueryHKT, TNullabilityMap, true, SingleStoreSetOperatorExcludedMethods >; export type SetOperatorRightSelect< TValue extends SingleStoreSetOperatorWithResult, TResult extends any[], > = TValue extends SingleStoreSetOperatorInterface ? ValidateShape< TValueResult[number], TResult[number], TypedQueryBuilder > : TValue; export type SetOperatorRestSelect< TValue extends readonly SingleStoreSetOperatorWithResult[], TResult extends any[], > = TValue extends [infer First, ...infer Rest] ? First extends SingleStoreSetOperatorInterface ? Rest extends AnySingleStoreSetOperatorInterface[] ? [ ValidateShape>, ...SetOperatorRestSelect, ] : ValidateShape[]> : never : TValue; export type SingleStoreCreateSetOperatorFn = < TTableName extends string | undefined, TSelection extends ColumnsSelection, TSelectMode extends SelectMode, TValue extends SingleStoreSetOperatorWithResult, TRest extends SingleStoreSetOperatorWithResult[], TPreparedQueryHKT extends PreparedQueryHKTBase = PreparedQueryHKTBase, TNullabilityMap extends Record = TTableName extends string ? Record : {}, TDynamic extends boolean = false, TExcludedMethods extends string = never, TResult extends any[] = SelectResult[], TSelectedFields extends ColumnsSelection = BuildSubquerySelection, >( leftSelect: SingleStoreSetOperatorInterface< TTableName, TSelection, TSelectMode, TPreparedQueryHKT, TNullabilityMap, TDynamic, TExcludedMethods, TResult, TSelectedFields >, rightSelect: SetOperatorRightSelect, ...restSelects: SetOperatorRestSelect ) => SingleStoreSelectWithout< SingleStoreSelectBase< TTableName, TSelection, TSelectMode, TPreparedQueryHKT, TNullabilityMap, TDynamic, TExcludedMethods, TResult, TSelectedFields >, false, SingleStoreSetOperatorExcludedMethods, true >; export type GetSingleStoreSetOperators = { union: SingleStoreCreateSetOperatorFn; intersect: SingleStoreCreateSetOperatorFn; except: SingleStoreCreateSetOperatorFn; unionAll: SingleStoreCreateSetOperatorFn; minus: SingleStoreCreateSetOperatorFn; }; ================================================ FILE: drizzle-orm/src/singlestore-core/query-builders/update.ts ================================================ import type { GetColumnData } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import { QueryPromise } from '~/query-promise.ts'; import { SelectionProxyHandler } from '~/selection-proxy.ts'; import type { SingleStoreDialect } from '~/singlestore-core/dialect.ts'; import type { AnySingleStoreQueryResultHKT, PreparedQueryHKTBase, PreparedQueryKind, SingleStorePreparedQueryConfig, SingleStoreQueryResultHKT, SingleStoreQueryResultKind, SingleStoreSession, } from '~/singlestore-core/session.ts'; import type { SingleStoreTable } from '~/singlestore-core/table.ts'; import type { Placeholder, Query, SQL, SQLWrapper } from '~/sql/sql.ts'; import type { Subquery } from '~/subquery.ts'; import { Table } from '~/table.ts'; import { mapUpdateSet, type UpdateSet, type ValueOrArray } from '~/utils.ts'; import type { SingleStoreColumn } from '../columns/common.ts'; import { extractUsedTable } from '../utils.ts'; import type { SelectedFieldsOrdered } from './select.types.ts'; export interface SingleStoreUpdateConfig { where?: SQL | undefined; limit?: number | Placeholder; orderBy?: (SingleStoreColumn | SQL | SQL.Aliased)[]; set: UpdateSet; table: SingleStoreTable; returning?: SelectedFieldsOrdered; withList?: Subquery[]; } export type SingleStoreUpdateSetSource = & { [Key in keyof TTable['$inferInsert']]?: | GetColumnData | SQL | undefined; } & {}; export class SingleStoreUpdateBuilder< TTable extends SingleStoreTable, TQueryResult extends SingleStoreQueryResultHKT, TPreparedQueryHKT extends PreparedQueryHKTBase, > { static readonly [entityKind]: string = 'SingleStoreUpdateBuilder'; declare readonly _: { readonly table: TTable; }; constructor( private table: TTable, private session: SingleStoreSession, private dialect: SingleStoreDialect, private withList?: Subquery[], ) {} set(values: SingleStoreUpdateSetSource): SingleStoreUpdateBase { return new SingleStoreUpdateBase( this.table, mapUpdateSet(this.table, values), this.session, this.dialect, this.withList, ); } } export type SingleStoreUpdateWithout< T extends AnySingleStoreUpdateBase, TDynamic extends boolean, K extends keyof T & string, > = TDynamic extends true ? T : Omit< SingleStoreUpdateBase< T['_']['table'], T['_']['queryResult'], T['_']['preparedQueryHKT'], TDynamic, T['_']['excludedMethods'] | K >, T['_']['excludedMethods'] | K >; export type SingleStoreUpdatePrepare = PreparedQueryKind< T['_']['preparedQueryHKT'], SingleStorePreparedQueryConfig & { execute: SingleStoreQueryResultKind; iterator: never; }, true >; export type SingleStoreUpdateDynamic = SingleStoreUpdate< T['_']['table'], T['_']['queryResult'], T['_']['preparedQueryHKT'] >; export type SingleStoreUpdate< TTable extends SingleStoreTable = SingleStoreTable, TQueryResult extends SingleStoreQueryResultHKT = AnySingleStoreQueryResultHKT, TPreparedQueryHKT extends PreparedQueryHKTBase = PreparedQueryHKTBase, > = SingleStoreUpdateBase; export type AnySingleStoreUpdateBase = SingleStoreUpdateBase; export interface SingleStoreUpdateBase< TTable extends SingleStoreTable, TQueryResult extends SingleStoreQueryResultHKT, TPreparedQueryHKT extends PreparedQueryHKTBase, TDynamic extends boolean = false, TExcludedMethods extends string = never, > extends QueryPromise>, SQLWrapper { readonly _: { readonly table: TTable; readonly queryResult: TQueryResult; readonly preparedQueryHKT: TPreparedQueryHKT; readonly dynamic: TDynamic; readonly excludedMethods: TExcludedMethods; }; } export class SingleStoreUpdateBase< TTable extends SingleStoreTable, TQueryResult extends SingleStoreQueryResultHKT, // eslint-disable-next-line @typescript-eslint/no-unused-vars TPreparedQueryHKT extends PreparedQueryHKTBase, // eslint-disable-next-line @typescript-eslint/no-unused-vars TDynamic extends boolean = false, // eslint-disable-next-line @typescript-eslint/no-unused-vars TExcludedMethods extends string = never, > extends QueryPromise> implements SQLWrapper { static override readonly [entityKind]: string = 'SingleStoreUpdate'; private config: SingleStoreUpdateConfig; constructor( table: TTable, set: UpdateSet, private session: SingleStoreSession, private dialect: SingleStoreDialect, withList?: Subquery[], ) { super(); this.config = { set, table, withList }; } /** * Adds a 'where' clause to the query. * * Calling this method will update only those rows that fulfill a specified condition. * * See docs: {@link https://orm.drizzle.team/docs/update} * * @param where the 'where' clause. * * @example * You can use conditional operators and `sql function` to filter the rows to be updated. * * ```ts * // Update all cars with green color * db.update(cars).set({ color: 'red' }) * .where(eq(cars.color, 'green')); * // or * db.update(cars).set({ color: 'red' }) * .where(sql`${cars.color} = 'green'`) * ``` * * You can logically combine conditional operators with `and()` and `or()` operators: * * ```ts * // Update all BMW cars with a green color * db.update(cars).set({ color: 'red' }) * .where(and(eq(cars.color, 'green'), eq(cars.brand, 'BMW'))); * * // Update all cars with the green or blue color * db.update(cars).set({ color: 'red' }) * .where(or(eq(cars.color, 'green'), eq(cars.color, 'blue'))); * ``` */ where(where: SQL | undefined): SingleStoreUpdateWithout { this.config.where = where; return this as any; } orderBy( builder: (updateTable: TTable) => ValueOrArray, ): SingleStoreUpdateWithout; orderBy(...columns: (SingleStoreColumn | SQL | SQL.Aliased)[]): SingleStoreUpdateWithout; orderBy( ...columns: | [(updateTable: TTable) => ValueOrArray] | (SingleStoreColumn | SQL | SQL.Aliased)[] ): SingleStoreUpdateWithout { if (typeof columns[0] === 'function') { const orderBy = columns[0]( new Proxy( this.config.table[Table.Symbol.Columns], new SelectionProxyHandler({ sqlAliasedBehavior: 'alias', sqlBehavior: 'sql' }), ) as any, ); const orderByArray = Array.isArray(orderBy) ? orderBy : [orderBy]; this.config.orderBy = orderByArray; } else { const orderByArray = columns as (SingleStoreColumn | SQL | SQL.Aliased)[]; this.config.orderBy = orderByArray; } return this as any; } limit(limit: number | Placeholder): SingleStoreUpdateWithout { this.config.limit = limit; return this as any; } /** @internal */ getSQL(): SQL { return this.dialect.buildUpdateQuery(this.config); } toSQL(): Query { const { typings: _typings, ...rest } = this.dialect.sqlToQuery(this.getSQL()); return rest; } prepare(): SingleStoreUpdatePrepare { return this.session.prepareQuery( this.dialect.sqlToQuery(this.getSQL()), this.config.returning, undefined, undefined, undefined, { type: 'delete', tables: extractUsedTable(this.config.table), }, ) as SingleStoreUpdatePrepare; } override execute: ReturnType['execute'] = (placeholderValues) => { return this.prepare().execute(placeholderValues); }; private createIterator = (): ReturnType['iterator'] => { const self = this; return async function*(placeholderValues) { yield* self.prepare().iterator(placeholderValues); }; }; iterator = this.createIterator(); $dynamic(): SingleStoreUpdateDynamic { return this as any; } } ================================================ FILE: drizzle-orm/src/singlestore-core/schema.ts ================================================ import { entityKind, is } from '~/entity.ts'; import { type SingleStoreTableFn, singlestoreTableWithSchema } from './table.ts'; /* import { type singlestoreView, singlestoreViewWithSchema } from './view.ts'; */ export class SingleStoreSchema { static readonly [entityKind]: string = 'SingleStoreSchema'; constructor( public readonly schemaName: TName, ) {} table: SingleStoreTableFn = (name, columns, extraConfig) => { return singlestoreTableWithSchema(name, columns, extraConfig, this.schemaName); }; /* view = ((name, columns) => { return singlestoreViewWithSchema(name, columns, this.schemaName); }) as typeof singlestoreView; */ } /** @deprecated - use `instanceof SingleStoreSchema` */ export function isSingleStoreSchema(obj: unknown): obj is SingleStoreSchema { return is(obj, SingleStoreSchema); } /** * Create a SingleStore schema. * https://docs.singlestore.com/cloud/create-a-database/ * * @param name singlestore use schema name * @returns SingleStore schema */ export function singlestoreDatabase(name: TName) { return new SingleStoreSchema(name); } /** * @see singlestoreDatabase */ export const singlestoreSchema = singlestoreDatabase; ================================================ FILE: drizzle-orm/src/singlestore-core/session.ts ================================================ import { type Cache, hashQuery, NoopCache } from '~/cache/core/cache.ts'; import type { WithCacheConfig } from '~/cache/core/types.ts'; import { entityKind, is } from '~/entity.ts'; import { DrizzleQueryError, TransactionRollbackError } from '~/errors.ts'; import type { RelationalSchemaConfig, TablesRelationalConfig } from '~/relations.ts'; import { type Query, type SQL, sql } from '~/sql/sql.ts'; import type { Assume, Equal } from '~/utils.ts'; import { SingleStoreDatabase } from './db.ts'; import type { SingleStoreDialect } from './dialect.ts'; import type { SelectedFieldsOrdered } from './query-builders/select.types.ts'; export interface SingleStoreQueryResultHKT { readonly $brand: 'SingleStoreQueryResultHKT'; readonly row: unknown; readonly type: unknown; } export interface AnySingleStoreQueryResultHKT extends SingleStoreQueryResultHKT { readonly type: any; } export type SingleStoreQueryResultKind = (TKind & { readonly row: TRow; })['type']; export interface SingleStorePreparedQueryConfig { execute: unknown; iterator: unknown; } export interface SingleStorePreparedQueryHKT { readonly $brand: 'SingleStorePreparedQueryHKT'; readonly config: unknown; readonly type: unknown; } export type PreparedQueryKind< TKind extends SingleStorePreparedQueryHKT, TConfig extends SingleStorePreparedQueryConfig, TAssume extends boolean = false, > = Equal extends true ? Assume<(TKind & { readonly config: TConfig })['type'], SingleStorePreparedQuery> : (TKind & { readonly config: TConfig })['type']; export abstract class SingleStorePreparedQuery { static readonly [entityKind]: string = 'SingleStorePreparedQuery'; constructor( private cache?: Cache, // per query related metadata private queryMetadata?: { type: 'select' | 'update' | 'delete' | 'insert'; tables: string[]; } | undefined, // config that was passed through $withCache private cacheConfig?: WithCacheConfig, ) { // it means that no $withCache options were passed and it should be just enabled if (cache && cache.strategy() === 'all' && cacheConfig === undefined) { this.cacheConfig = { enable: true, autoInvalidate: true }; } if (!this.cacheConfig?.enable) { this.cacheConfig = undefined; } } /** @internal */ protected async queryWithCache( queryString: string, params: any[], query: () => Promise, ): Promise { if (this.cache === undefined || is(this.cache, NoopCache) || this.queryMetadata === undefined) { try { return await query(); } catch (e) { throw new DrizzleQueryError(queryString, params, e as Error); } } // don't do any mutations, if globally is false if (this.cacheConfig && !this.cacheConfig.enable) { try { return await query(); } catch (e) { throw new DrizzleQueryError(queryString, params, e as Error); } } // For mutate queries, we should query the database, wait for a response, and then perform invalidation if ( ( this.queryMetadata.type === 'insert' || this.queryMetadata.type === 'update' || this.queryMetadata.type === 'delete' ) && this.queryMetadata.tables.length > 0 ) { try { const [res] = await Promise.all([ query(), this.cache.onMutate({ tables: this.queryMetadata.tables }), ]); return res; } catch (e) { throw new DrizzleQueryError(queryString, params, e as Error); } } // don't do any reads if globally disabled if (!this.cacheConfig) { try { return await query(); } catch (e) { throw new DrizzleQueryError(queryString, params, e as Error); } } if (this.queryMetadata.type === 'select') { const fromCache = await this.cache.get( this.cacheConfig.tag ?? await hashQuery(queryString, params), this.queryMetadata.tables, this.cacheConfig.tag !== undefined, this.cacheConfig.autoInvalidate, ); if (fromCache === undefined) { let result; try { result = await query(); } catch (e) { throw new DrizzleQueryError(queryString, params, e as Error); } // put actual key await this.cache.put( this.cacheConfig.tag ?? await hashQuery(queryString, params), result, // make sure we send tables that were used in a query only if user wants to invalidate it on each write this.cacheConfig.autoInvalidate ? this.queryMetadata.tables : [], this.cacheConfig.tag !== undefined, this.cacheConfig.config, ); // put flag if we should invalidate or not return result; } return fromCache as unknown as T; } try { return await query(); } catch (e) { throw new DrizzleQueryError(queryString, params, e as Error); } } /** @internal */ joinsNotNullableMap?: Record; abstract execute(placeholderValues?: Record): Promise; abstract iterator(placeholderValues?: Record): AsyncGenerator; } export interface SingleStoreTransactionConfig { withConsistentSnapshot?: boolean; accessMode?: 'read only' | 'read write'; isolationLevel: 'read committed'; // SingleStore only supports read committed isolation level (https://docs.singlestore.com/db/v8.7/introduction/faqs/durability/) } export abstract class SingleStoreSession< TQueryResult extends SingleStoreQueryResultHKT = SingleStoreQueryResultHKT, TPreparedQueryHKT extends PreparedQueryHKTBase = PreparedQueryHKTBase, TFullSchema extends Record = Record, TSchema extends TablesRelationalConfig = Record, > { static readonly [entityKind]: string = 'SingleStoreSession'; constructor(protected dialect: SingleStoreDialect) {} abstract prepareQuery< T extends SingleStorePreparedQueryConfig, TPreparedQueryHKT extends SingleStorePreparedQueryHKT, >( query: Query, fields: SelectedFieldsOrdered | undefined, customResultMapper?: (rows: unknown[][]) => T['execute'], generatedIds?: Record[], returningIds?: SelectedFieldsOrdered, queryMetadata?: { type: 'select' | 'update' | 'delete' | 'insert'; tables: string[]; }, cacheConfig?: WithCacheConfig, ): PreparedQueryKind; execute(query: SQL): Promise { return this.prepareQuery( this.dialect.sqlToQuery(query), undefined, ).execute(); } abstract all(query: SQL): Promise; async count(sql: SQL): Promise { const res = await this.execute<[[{ count: string }]]>(sql); return Number( res[0][0]['count'], ); } abstract transaction( transaction: (tx: SingleStoreTransaction) => Promise, config?: SingleStoreTransactionConfig, ): Promise; protected getSetTransactionSQL(config: SingleStoreTransactionConfig): SQL | undefined { const parts: string[] = []; if (config.isolationLevel) { parts.push(`isolation level ${config.isolationLevel}`); } return parts.length ? sql`set transaction ${sql.raw(parts.join(' '))}` : undefined; } protected getStartTransactionSQL(config: SingleStoreTransactionConfig): SQL | undefined { const parts: string[] = []; if (config.withConsistentSnapshot) { parts.push('with consistent snapshot'); } if (config.accessMode) { parts.push(config.accessMode); } return parts.length ? sql`start transaction ${sql.raw(parts.join(' '))}` : undefined; } } export abstract class SingleStoreTransaction< TQueryResult extends SingleStoreQueryResultHKT, TPreparedQueryHKT extends PreparedQueryHKTBase, TFullSchema extends Record = Record, TSchema extends TablesRelationalConfig = Record, > extends SingleStoreDatabase { static override readonly [entityKind]: string = 'SingleStoreTransaction'; constructor( dialect: SingleStoreDialect, session: SingleStoreSession, protected schema: RelationalSchemaConfig | undefined, protected readonly nestedIndex: number, ) { super(dialect, session, schema); } rollback(): never { throw new TransactionRollbackError(); } /** Nested transactions (aka savepoints) only work with InnoDB engine. */ abstract override transaction( transaction: (tx: SingleStoreTransaction) => Promise, ): Promise; } export interface PreparedQueryHKTBase extends SingleStorePreparedQueryHKT { type: SingleStorePreparedQuery>; } ================================================ FILE: drizzle-orm/src/singlestore-core/subquery.ts ================================================ import type { TypedQueryBuilder } from '~/query-builders/query-builder.ts'; import type { AddAliasToSelection } from '~/query-builders/select.types.ts'; import type { ColumnsSelection, SQL } from '~/sql/sql.ts'; import type { Subquery, WithSubquery, WithSubqueryWithoutSelection } from '~/subquery.ts'; import type { QueryBuilder } from './query-builders/query-builder.ts'; export type SubqueryWithSelection< TSelection extends ColumnsSelection, TAlias extends string, > = & Subquery> & AddAliasToSelection; export type WithSubqueryWithSelection< TSelection extends ColumnsSelection, TAlias extends string, > = & WithSubquery> & AddAliasToSelection; export interface WithBuilder { (alias: TAlias): { as: { ( qb: TypedQueryBuilder | ((qb: QueryBuilder) => TypedQueryBuilder), ): WithSubqueryWithSelection; ( qb: TypedQueryBuilder | ((qb: QueryBuilder) => TypedQueryBuilder), ): WithSubqueryWithoutSelection; }; }; (alias: TAlias, selection: TSelection): { as: (qb: SQL | ((qb: QueryBuilder) => SQL)) => WithSubqueryWithSelection; }; } ================================================ FILE: drizzle-orm/src/singlestore-core/table.ts ================================================ import type { BuildColumns, BuildExtraConfigColumns } from '~/column-builder.ts'; import { entityKind } from '~/entity.ts'; import { Table, type TableConfig as TableConfigBase, type UpdateTableConfig } from '~/table.ts'; import { getSingleStoreColumnBuilders, type SingleStoreColumnBuilders } from './columns/all.ts'; import type { SingleStoreColumn, SingleStoreColumnBuilder, SingleStoreColumnBuilderBase } from './columns/common.ts'; import type { AnyIndexBuilder } from './indexes.ts'; import type { PrimaryKeyBuilder } from './primary-keys.ts'; import type { UniqueConstraintBuilder } from './unique-constraint.ts'; export type SingleStoreTableExtraConfigValue = | AnyIndexBuilder | PrimaryKeyBuilder | UniqueConstraintBuilder; export type SingleStoreTableExtraConfig = Record< string, SingleStoreTableExtraConfigValue >; export type TableConfig = TableConfigBase; export class SingleStoreTable extends Table { static override readonly [entityKind]: string = 'SingleStoreTable'; declare protected $columns: T['columns']; /** @internal */ static override readonly Symbol = Object.assign({}, Table.Symbol, {}); /** @internal */ override [Table.Symbol.Columns]!: NonNullable; /** @internal */ override [Table.Symbol.ExtraConfigBuilder]: | ((self: Record) => SingleStoreTableExtraConfig) | undefined = undefined; } export type AnySingleStoreTable = {}> = SingleStoreTable< UpdateTableConfig >; export type SingleStoreTableWithColumns = & SingleStoreTable & { [Key in keyof T['columns']]: T['columns'][Key]; }; export function singlestoreTableWithSchema< TTableName extends string, TSchemaName extends string | undefined, TColumnsMap extends Record, >( name: TTableName, columns: TColumnsMap | ((columnTypes: SingleStoreColumnBuilders) => TColumnsMap), extraConfig: | (( self: BuildColumns, ) => SingleStoreTableExtraConfig | SingleStoreTableExtraConfigValue[]) | undefined, schema: TSchemaName, baseName = name, ): SingleStoreTableWithColumns<{ name: TTableName; schema: TSchemaName; columns: BuildColumns; dialect: 'singlestore'; }> { const rawTable = new SingleStoreTable<{ name: TTableName; schema: TSchemaName; columns: BuildColumns; dialect: 'singlestore'; }>(name, schema, baseName); const parsedColumns: TColumnsMap = typeof columns === 'function' ? columns(getSingleStoreColumnBuilders()) : columns; const builtColumns = Object.fromEntries( Object.entries(parsedColumns).map(([name, colBuilderBase]) => { const colBuilder = colBuilderBase as SingleStoreColumnBuilder; colBuilder.setName(name); const column = colBuilder.build(rawTable); return [name, column]; }), ) as unknown as BuildColumns; const table = Object.assign(rawTable, builtColumns); table[Table.Symbol.Columns] = builtColumns; table[Table.Symbol.ExtraConfigColumns] = builtColumns as unknown as BuildExtraConfigColumns< TTableName, TColumnsMap, 'singlestore' >; if (extraConfig) { table[SingleStoreTable.Symbol.ExtraConfigBuilder] = extraConfig as unknown as ( self: Record, ) => SingleStoreTableExtraConfig; } return table; } export interface SingleStoreTableFn { < TTableName extends string, TColumnsMap extends Record, >( name: TTableName, columns: TColumnsMap, extraConfig?: ( self: BuildColumns, ) => SingleStoreTableExtraConfigValue[], ): SingleStoreTableWithColumns<{ name: TTableName; schema: TSchemaName; columns: BuildColumns; dialect: 'singlestore'; }>; < TTableName extends string, TColumnsMap extends Record, >( name: TTableName, columns: (columnTypes: SingleStoreColumnBuilders) => TColumnsMap, extraConfig?: (self: BuildColumns) => SingleStoreTableExtraConfigValue[], ): SingleStoreTableWithColumns<{ name: TTableName; schema: TSchemaName; columns: BuildColumns; dialect: 'singlestore'; }>; /** * @deprecated The third parameter of singlestoreTable is changing and will only accept an array instead of an object * * @example * Deprecated version: * ```ts * export const users = singlestoreTable("users", { * id: int(), * }, (t) => ({ * idx: index('custom_name').on(t.id) * })); * ``` * * New API: * ```ts * export const users = singlestoreTable("users", { * id: int(), * }, (t) => [ * index('custom_name').on(t.id) * ]); * ``` */ < TTableName extends string, TColumnsMap extends Record, >( name: TTableName, columns: TColumnsMap, extraConfig?: (self: BuildColumns) => SingleStoreTableExtraConfig, ): SingleStoreTableWithColumns<{ name: TTableName; schema: TSchemaName; columns: BuildColumns; dialect: 'singlestore'; }>; /** * @deprecated The third parameter of singlestoreTable is changing and will only accept an array instead of an object * * @example * Deprecated version: * ```ts * export const users = singlestoreTable("users", { * id: int(), * }, (t) => ({ * idx: index('custom_name').on(t.id) * })); * ``` * * New API: * ```ts * export const users = singlestoreTable("users", { * id: int(), * }, (t) => [ * index('custom_name').on(t.id) * ]); * ``` */ < TTableName extends string, TColumnsMap extends Record, >( name: TTableName, columns: (columnTypes: SingleStoreColumnBuilders) => TColumnsMap, extraConfig?: (self: BuildColumns) => SingleStoreTableExtraConfig, ): SingleStoreTableWithColumns<{ name: TTableName; schema: TSchemaName; columns: BuildColumns; dialect: 'singlestore'; }>; } export const singlestoreTable: SingleStoreTableFn = (name, columns, extraConfig) => { return singlestoreTableWithSchema(name, columns, extraConfig, undefined, name); }; export function singlestoreTableCreator(customizeTableName: (name: string) => string): SingleStoreTableFn { return (name, columns, extraConfig) => { return singlestoreTableWithSchema(customizeTableName(name) as typeof name, columns, extraConfig, undefined, name); }; } ================================================ FILE: drizzle-orm/src/singlestore-core/unique-constraint.ts ================================================ import { entityKind } from '~/entity.ts'; import { TableName } from '~/table.utils.ts'; import type { SingleStoreColumn } from './columns/index.ts'; import type { SingleStoreTable } from './table.ts'; export function unique(name?: string): UniqueOnConstraintBuilder { return new UniqueOnConstraintBuilder(name); } export function uniqueKeyName(table: SingleStoreTable, columns: string[]) { return `${table[TableName]}_${columns.join('_')}_unique`; } export class UniqueConstraintBuilder { static readonly [entityKind]: string = 'SingleStoreUniqueConstraintBuilder'; /** @internal */ columns: SingleStoreColumn[]; constructor( columns: SingleStoreColumn[], private name?: string, ) { this.columns = columns; } /** @internal */ build(table: SingleStoreTable): UniqueConstraint { return new UniqueConstraint(table, this.columns, this.name); } } export class UniqueOnConstraintBuilder { static readonly [entityKind]: string = 'SingleStoreUniqueOnConstraintBuilder'; /** @internal */ name?: string; constructor( name?: string, ) { this.name = name; } on(...columns: [SingleStoreColumn, ...SingleStoreColumn[]]) { return new UniqueConstraintBuilder(columns, this.name); } } export class UniqueConstraint { static readonly [entityKind]: string = 'SingleStoreUniqueConstraint'; readonly columns: SingleStoreColumn[]; readonly name?: string; readonly nullsNotDistinct: boolean = false; constructor(readonly table: SingleStoreTable, columns: SingleStoreColumn[], name?: string) { this.columns = columns; this.name = name ?? uniqueKeyName(this.table, this.columns.map((column) => column.name)); } getName() { return this.name; } } ================================================ FILE: drizzle-orm/src/singlestore-core/utils.ts ================================================ import { is } from '~/entity.ts'; import { SQL } from '~/sql/sql.ts'; import { Subquery } from '~/subquery.ts'; import { Table } from '~/table.ts'; import type { Index } from './indexes.ts'; import { IndexBuilder } from './indexes.ts'; import type { PrimaryKey } from './primary-keys.ts'; import { PrimaryKeyBuilder } from './primary-keys.ts'; import { SingleStoreTable } from './table.ts'; import { type UniqueConstraint, UniqueConstraintBuilder } from './unique-constraint.ts'; /* import { SingleStoreViewConfig } from './view-common.ts'; import type { SingleStoreView } from './view.ts'; */ export function extractUsedTable(table: SingleStoreTable | Subquery | SQL): string[] { if (is(table, SingleStoreTable)) { return [`${table[Table.Symbol.BaseName]}`]; } if (is(table, Subquery)) { return table._.usedTables ?? []; } if (is(table, SQL)) { return table.usedTables ?? []; } return []; } export function getTableConfig(table: SingleStoreTable) { const columns = Object.values(table[SingleStoreTable.Symbol.Columns]); const indexes: Index[] = []; const primaryKeys: PrimaryKey[] = []; const uniqueConstraints: UniqueConstraint[] = []; const name = table[Table.Symbol.Name]; const schema = table[Table.Symbol.Schema]; const baseName = table[Table.Symbol.BaseName]; const extraConfigBuilder = table[SingleStoreTable.Symbol.ExtraConfigBuilder]; if (extraConfigBuilder !== undefined) { const extraConfig = extraConfigBuilder(table[SingleStoreTable.Symbol.Columns]); const extraValues = Array.isArray(extraConfig) ? extraConfig.flat(1) as any[] : Object.values(extraConfig); for (const builder of Object.values(extraValues)) { if (is(builder, IndexBuilder)) { indexes.push(builder.build(table)); } else if (is(builder, UniqueConstraintBuilder)) { uniqueConstraints.push(builder.build(table)); } else if (is(builder, PrimaryKeyBuilder)) { primaryKeys.push(builder.build(table)); } } } return { columns, indexes, primaryKeys, uniqueConstraints, name, schema, baseName, }; } /* export function getViewConfig< TName extends string = string, TExisting extends boolean = boolean, >(view: SingleStoreView) { return { ...view[ViewBaseConfig], ...view[SingleStoreViewConfig], }; } */ ================================================ FILE: drizzle-orm/src/singlestore-core/view-base.ts ================================================ import { entityKind } from '~/entity.ts'; import type { ColumnsSelection } from '~/sql/sql.ts'; import { View } from '~/sql/sql.ts'; export abstract class SingleStoreViewBase< TName extends string = string, TExisting extends boolean = boolean, TSelectedFields extends ColumnsSelection = ColumnsSelection, > extends View { static override readonly [entityKind]: string = 'SingleStoreViewBase'; declare readonly _: View['_'] & { readonly viewBrand: 'SingleStoreViewBase'; }; } ================================================ FILE: drizzle-orm/src/singlestore-core/view-common.ts ================================================ export const SingleStoreViewConfig = Symbol.for('drizzle:SingleStoreViewConfig'); ================================================ FILE: drizzle-orm/src/singlestore-core/view.ts ================================================ import type { BuildColumns } from '~/column-builder.ts'; import { entityKind } from '~/entity.ts'; import type { TypedQueryBuilder } from '~/query-builders/query-builder.ts'; import type { AddAliasToSelection } from '~/query-builders/select.types.ts'; import { SelectionProxyHandler } from '~/selection-proxy.ts'; import type { ColumnsSelection, SQL } from '~/sql/sql.ts'; import { getTableColumns } from '~/utils.ts'; import type { SingleStoreColumn, SingleStoreColumnBuilderBase } from './columns/index.ts'; import { QueryBuilder } from './query-builders/query-builder.ts'; import type { SelectedFields } from './query-builders/select.types.ts'; import { singlestoreTable } from './table.ts'; import { SingleStoreViewBase } from './view-base.ts'; import { SingleStoreViewConfig } from './view-common.ts'; export interface ViewBuilderConfig { algorithm?: 'undefined' | 'merge' | 'temptable'; definer?: string; sqlSecurity?: 'definer' | 'invoker'; withCheckOption?: 'cascaded' | 'local'; } export class ViewBuilderCore { static readonly [entityKind]: string = 'SingleStoreViewBuilder'; declare readonly _: { readonly name: TConfig['name']; readonly columns: TConfig['columns']; }; constructor( protected name: TConfig['name'], protected schema: string | undefined, ) {} protected config: ViewBuilderConfig = {}; algorithm( algorithm: Exclude, ): this { this.config.algorithm = algorithm; return this; } definer( definer: Exclude, ): this { this.config.definer = definer; return this; } sqlSecurity( sqlSecurity: Exclude, ): this { this.config.sqlSecurity = sqlSecurity; return this; } withCheckOption( withCheckOption?: Exclude, ): this { this.config.withCheckOption = withCheckOption ?? 'cascaded'; return this; } } export class ViewBuilder extends ViewBuilderCore<{ name: TName }> { static override readonly [entityKind]: string = 'SingleStoreViewBuilder'; as( qb: TypedQueryBuilder | ((qb: QueryBuilder) => TypedQueryBuilder), ): SingleStoreViewWithSelection> { if (typeof qb === 'function') { qb = qb(new QueryBuilder()); } const selectionProxy = new SelectionProxyHandler({ alias: this.name, sqlBehavior: 'error', sqlAliasedBehavior: 'alias', replaceOriginalName: true, }); const aliasedSelection = new Proxy(qb.getSelectedFields(), selectionProxy); return new Proxy( new SingleStoreView({ singlestoreConfig: this.config, config: { name: this.name, schema: this.schema, selectedFields: aliasedSelection, query: qb.getSQL().inlineParams(), }, }), selectionProxy as any, ) as SingleStoreViewWithSelection>; } } export class ManualViewBuilder< TName extends string = string, TColumns extends Record = Record, > extends ViewBuilderCore<{ name: TName; columns: TColumns }> { static override readonly [entityKind]: string = 'SingleStoreManualViewBuilder'; private columns: Record; constructor( name: TName, columns: TColumns, schema: string | undefined, ) { super(name, schema); this.columns = getTableColumns(singlestoreTable(name, columns)) as BuildColumns; } existing(): SingleStoreViewWithSelection> { return new Proxy( new SingleStoreView({ singlestoreConfig: undefined, config: { name: this.name, schema: this.schema, selectedFields: this.columns, query: undefined, }, }), new SelectionProxyHandler({ alias: this.name, sqlBehavior: 'error', sqlAliasedBehavior: 'alias', replaceOriginalName: true, }), ) as SingleStoreViewWithSelection>; } as(query: SQL): SingleStoreViewWithSelection> { return new Proxy( new SingleStoreView({ singlestoreConfig: this.config, config: { name: this.name, schema: this.schema, selectedFields: this.columns, query: query.inlineParams(), }, }), new SelectionProxyHandler({ alias: this.name, sqlBehavior: 'error', sqlAliasedBehavior: 'alias', replaceOriginalName: true, }), ) as SingleStoreViewWithSelection>; } } export class SingleStoreView< TName extends string = string, TExisting extends boolean = boolean, TSelectedFields extends ColumnsSelection = ColumnsSelection, > extends SingleStoreViewBase { static override readonly [entityKind]: string = 'SingleStoreView'; declare protected $SingleStoreViewBrand: 'SingleStoreView'; [SingleStoreViewConfig]: ViewBuilderConfig | undefined; constructor({ singlestoreConfig, config }: { singlestoreConfig: ViewBuilderConfig | undefined; config: { name: TName; schema: string | undefined; selectedFields: SelectedFields; query: SQL | undefined; }; }) { super(config); this[SingleStoreViewConfig] = singlestoreConfig; } } export type SingleStoreViewWithSelection< TName extends string, TExisting extends boolean, TSelectedFields extends ColumnsSelection, > = SingleStoreView & TSelectedFields; // TODO: needs to be implemented differently compared to MySQL. // /** @internal */ // export function singlestoreViewWithSchema( // name: string, // selection: Record | undefined, // schema: string | undefined, // ): ViewBuilder | ManualViewBuilder { // if (selection) { // return new ManualViewBuilder(name, selection, schema); // } // return new ViewBuilder(name, schema); // } // export function singlestoreView(name: TName): ViewBuilder; // export function singlestoreView>( // name: TName, // columns: TColumns, // ): ManualViewBuilder; // export function singlestoreView( // name: string, // selection?: Record, // ): ViewBuilder | ManualViewBuilder { // return singlestoreViewWithSchema(name, selection, undefined); // } ================================================ FILE: drizzle-orm/src/singlestore-proxy/driver.ts ================================================ import { entityKind } from '~/entity.ts'; import { DefaultLogger } from '~/logger.ts'; import { createTableRelationsHelpers, extractTablesRelationalConfig, type RelationalSchemaConfig, type TablesRelationalConfig, } from '~/relations.ts'; import { SingleStoreDatabase } from '~/singlestore-core/db.ts'; import { SingleStoreDialect } from '~/singlestore-core/dialect.ts'; import type { DrizzleConfig } from '~/utils.ts'; import { type SingleStoreRemotePreparedQueryHKT, type SingleStoreRemoteQueryResultHKT, SingleStoreRemoteSession, } from './session.ts'; export class SingleStoreRemoteDatabase< TSchema extends Record = Record, > extends SingleStoreDatabase { static override readonly [entityKind]: string = 'SingleStoreRemoteDatabase'; } export type RemoteCallback = ( sql: string, params: any[], method: 'all' | 'execute', ) => Promise<{ rows: any[]; insertId?: number; affectedRows?: number }>; export function drizzle = Record>( callback: RemoteCallback, config: DrizzleConfig = {}, ): SingleStoreRemoteDatabase { const dialect = new SingleStoreDialect({ casing: config.casing }); let logger; if (config.logger === true) { logger = new DefaultLogger(); } else if (config.logger !== false) { logger = config.logger; } let schema: RelationalSchemaConfig | undefined; if (config.schema) { const tablesConfig = extractTablesRelationalConfig( config.schema, createTableRelationsHelpers, ); schema = { fullSchema: config.schema, schema: tablesConfig.tables, tableNamesMap: tablesConfig.tableNamesMap, }; } const session = new SingleStoreRemoteSession(callback, dialect, schema, { logger }); return new SingleStoreRemoteDatabase(dialect, session, schema as any) as SingleStoreRemoteDatabase< TSchema >; } ================================================ FILE: drizzle-orm/src/singlestore-proxy/index.ts ================================================ export * from './driver.ts'; export * from './session.ts'; ================================================ FILE: drizzle-orm/src/singlestore-proxy/migrator.ts ================================================ import type { MigrationConfig } from '~/migrator.ts'; import { readMigrationFiles } from '~/migrator.ts'; import { sql } from '~/sql/sql.ts'; import type { SingleStoreRemoteDatabase } from './driver.ts'; export type ProxyMigrator = (migrationQueries: string[]) => Promise; export async function migrate>( db: SingleStoreRemoteDatabase, callback: ProxyMigrator, config: MigrationConfig, ) { const migrations = readMigrationFiles(config); const migrationsTable = config.migrationsTable ?? '__drizzle_migrations'; const migrationTableCreate = sql` create table if not exists ${sql.identifier(migrationsTable)} ( id serial primary key, hash text not null, created_at bigint ) `; await db.execute(migrationTableCreate); const dbMigrations = await db.select({ id: sql.raw('id'), hash: sql.raw('hash'), created_at: sql.raw('created_at'), }).from(sql.identifier(migrationsTable).getSQL()).orderBy( sql.raw('created_at desc'), ).limit(1); const lastDbMigration = dbMigrations[0]; const queriesToRun: string[] = []; for (const migration of migrations) { if ( !lastDbMigration || Number(lastDbMigration.created_at) < migration.folderMillis ) { queriesToRun.push( ...migration.sql, `insert into ${ sql.identifier(migrationsTable).value } (\`hash\`, \`created_at\`) values('${migration.hash}', '${migration.folderMillis}')`, ); } } await callback(queriesToRun); } ================================================ FILE: drizzle-orm/src/singlestore-proxy/session.ts ================================================ import type { FieldPacket, ResultSetHeader } from 'mysql2/promise'; import { Column } from '~/column.ts'; import { entityKind, is } from '~/entity.ts'; import type { Logger } from '~/logger.ts'; import { NoopLogger } from '~/logger.ts'; import type { RelationalSchemaConfig, TablesRelationalConfig } from '~/relations.ts'; import type { SingleStoreDialect } from '~/singlestore-core/dialect.ts'; import { SingleStoreTransaction } from '~/singlestore-core/index.ts'; import type { SelectedFieldsOrdered } from '~/singlestore-core/query-builders/select.types.ts'; import type { PreparedQueryKind, SingleStorePreparedQueryConfig, SingleStorePreparedQueryHKT, SingleStoreQueryResultHKT, SingleStoreTransactionConfig, } from '~/singlestore-core/session.ts'; import { SingleStorePreparedQuery as PreparedQueryBase, SingleStoreSession } from '~/singlestore-core/session.ts'; import type { Query, SQL } from '~/sql/sql.ts'; import { fillPlaceholders } from '~/sql/sql.ts'; import { type Assume, mapResultRow } from '~/utils.ts'; import type { RemoteCallback } from './driver.ts'; export type SingleStoreRawQueryResult = [ResultSetHeader, FieldPacket[]]; export interface SingleStoreRemoteSessionOptions { logger?: Logger; } export class SingleStoreRemoteSession< TFullSchema extends Record, TSchema extends TablesRelationalConfig, > extends SingleStoreSession { static override readonly [entityKind]: string = 'SingleStoreRemoteSession'; private logger: Logger; constructor( private client: RemoteCallback, dialect: SingleStoreDialect, private schema: RelationalSchemaConfig | undefined, options: SingleStoreRemoteSessionOptions, ) { super(dialect); this.logger = options.logger ?? new NoopLogger(); } prepareQuery( query: Query, fields: SelectedFieldsOrdered | undefined, customResultMapper?: (rows: unknown[][]) => T['execute'], generatedIds?: Record[], returningIds?: SelectedFieldsOrdered, ): PreparedQueryKind { return new PreparedQuery( this.client, query.sql, query.params, this.logger, fields, customResultMapper, generatedIds, returningIds, ) as PreparedQueryKind; } override all(query: SQL): Promise { const querySql = this.dialect.sqlToQuery(query); this.logger.logQuery(querySql.sql, querySql.params); return this.client(querySql.sql, querySql.params, 'all').then(({ rows }) => rows) as Promise; } override async transaction( _transaction: (tx: SingleStoreProxyTransaction) => Promise, _config?: SingleStoreTransactionConfig, ): Promise { throw new Error('Transactions are not supported by the SingleStore Proxy driver'); } } export class SingleStoreProxyTransaction< TFullSchema extends Record, TSchema extends TablesRelationalConfig, > extends SingleStoreTransaction< SingleStoreRemoteQueryResultHKT, SingleStoreRemotePreparedQueryHKT, TFullSchema, TSchema > { static override readonly [entityKind]: string = 'SingleStoreProxyTransaction'; override async transaction( _transaction: (tx: SingleStoreProxyTransaction) => Promise, ): Promise { throw new Error('Transactions are not supported by the SingleStore Proxy driver'); } } export class PreparedQuery extends PreparedQueryBase { static override readonly [entityKind]: string = 'SingleStoreProxyPreparedQuery'; constructor( private client: RemoteCallback, private queryString: string, private params: unknown[], private logger: Logger, private fields: SelectedFieldsOrdered | undefined, private customResultMapper?: (rows: unknown[][]) => T['execute'], // Keys that were used in $default and the value that was generated for them private generatedIds?: Record[], // Keys that should be returned, it has the column with all properries + key from object private returningIds?: SelectedFieldsOrdered, ) { super(); } async execute(placeholderValues: Record | undefined = {}): Promise { const params = fillPlaceholders(this.params, placeholderValues); const { fields, client, queryString, logger, joinsNotNullableMap, customResultMapper, returningIds, generatedIds } = this; logger.logQuery(queryString, params); if (!fields && !customResultMapper) { const { rows: data } = await client(queryString, params, 'execute'); const insertId = data[0].insertId as number; const affectedRows = data[0].affectedRows; if (returningIds) { const returningResponse = []; let j = 0; for (let i = insertId; i < insertId + affectedRows; i++) { for (const column of returningIds) { const key = returningIds[0]!.path[0]!; if (is(column.field, Column)) { // @ts-ignore if (column.field.primary && column.field.autoIncrement) { returningResponse.push({ [key]: i }); } if (column.field.defaultFn && generatedIds) { // generatedIds[rowIdx][key] returningResponse.push({ [key]: generatedIds[j]![key] }); } } } j++; } return returningResponse; } return data; } const { rows } = await client(queryString, params, 'all'); if (customResultMapper) { return customResultMapper(rows); } return rows.map((row) => mapResultRow(fields!, row, joinsNotNullableMap)); } override iterator( _placeholderValues: Record = {}, ): AsyncGenerator { throw new Error('Streaming is not supported by the SingleStore Proxy driver'); } } export interface SingleStoreRemoteQueryResultHKT extends SingleStoreQueryResultHKT { type: SingleStoreRawQueryResult; } export interface SingleStoreRemotePreparedQueryHKT extends SingleStorePreparedQueryHKT { type: PreparedQuery>; } ================================================ FILE: drizzle-orm/src/sql/expressions/conditions.ts ================================================ import { type AnyColumn, Column, type GetColumnData } from '~/column.ts'; import { is } from '~/entity.ts'; import { Table } from '~/table.ts'; import { isDriverValueEncoder, isSQLWrapper, Param, Placeholder, SQL, sql, type SQLChunk, type SQLWrapper, StringChunk, View, } from '../sql.ts'; export function bindIfParam(value: unknown, column: SQLWrapper): SQLChunk { if ( isDriverValueEncoder(column) && !isSQLWrapper(value) && !is(value, Param) && !is(value, Placeholder) && !is(value, Column) && !is(value, Table) && !is(value, View) ) { return new Param(value, column); } return value as SQLChunk; } export interface BinaryOperator { ( left: TColumn, right: GetColumnData | SQLWrapper, ): SQL; (left: SQL.Aliased, right: T | SQLWrapper): SQL; ( left: Exclude, right: unknown, ): SQL; } /** * Test that two values are equal. * * Remember that the SQL standard dictates that * two NULL values are not equal, so if you want to test * whether a value is null, you may want to use * `isNull` instead. * * ## Examples * * ```ts * // Select cars made by Ford * db.select().from(cars) * .where(eq(cars.make, 'Ford')) * ``` * * @see isNull for a way to test equality to NULL. */ export const eq: BinaryOperator = (left: SQLWrapper, right: unknown): SQL => { return sql`${left} = ${bindIfParam(right, left)}`; }; /** * Test that two values are not equal. * * Remember that the SQL standard dictates that * two NULL values are not equal, so if you want to test * whether a value is not null, you may want to use * `isNotNull` instead. * * ## Examples * * ```ts * // Select cars not made by Ford * db.select().from(cars) * .where(ne(cars.make, 'Ford')) * ``` * * @see isNotNull for a way to test whether a value is not null. */ export const ne: BinaryOperator = (left: SQLWrapper, right: unknown): SQL => { return sql`${left} <> ${bindIfParam(right, left)}`; }; /** * Combine a list of conditions with the `and` operator. Conditions * that are equal `undefined` are automatically ignored. * * ## Examples * * ```ts * db.select().from(cars) * .where( * and( * eq(cars.make, 'Volvo'), * eq(cars.year, 1950), * ) * ) * ``` */ export function and(...conditions: (SQLWrapper | undefined)[]): SQL | undefined; export function and( ...unfilteredConditions: (SQLWrapper | undefined)[] ): SQL | undefined { const conditions = unfilteredConditions.filter( (c): c is Exclude => c !== undefined, ); if (conditions.length === 0) { return undefined; } if (conditions.length === 1) { return new SQL(conditions); } return new SQL([ new StringChunk('('), sql.join(conditions, new StringChunk(' and ')), new StringChunk(')'), ]); } /** * Combine a list of conditions with the `or` operator. Conditions * that are equal `undefined` are automatically ignored. * * ## Examples * * ```ts * db.select().from(cars) * .where( * or( * eq(cars.make, 'GM'), * eq(cars.make, 'Ford'), * ) * ) * ``` */ export function or(...conditions: (SQLWrapper | undefined)[]): SQL | undefined; export function or( ...unfilteredConditions: (SQLWrapper | undefined)[] ): SQL | undefined { const conditions = unfilteredConditions.filter( (c): c is Exclude => c !== undefined, ); if (conditions.length === 0) { return undefined; } if (conditions.length === 1) { return new SQL(conditions); } return new SQL([ new StringChunk('('), sql.join(conditions, new StringChunk(' or ')), new StringChunk(')'), ]); } /** * Negate the meaning of an expression using the `not` keyword. * * ## Examples * * ```ts * // Select cars _not_ made by GM or Ford. * db.select().from(cars) * .where(not(inArray(cars.make, ['GM', 'Ford']))) * ``` */ export function not(condition: SQLWrapper): SQL { return sql`not ${condition}`; } /** * Test that the first expression passed is greater than * the second expression. * * ## Examples * * ```ts * // Select cars made after 2000. * db.select().from(cars) * .where(gt(cars.year, 2000)) * ``` * * @see gte for greater-than-or-equal */ export const gt: BinaryOperator = (left: SQLWrapper, right: unknown): SQL => { return sql`${left} > ${bindIfParam(right, left)}`; }; /** * Test that the first expression passed is greater than * or equal to the second expression. Use `gt` to * test whether an expression is strictly greater * than another. * * ## Examples * * ```ts * // Select cars made on or after 2000. * db.select().from(cars) * .where(gte(cars.year, 2000)) * ``` * * @see gt for a strictly greater-than condition */ export const gte: BinaryOperator = (left: SQLWrapper, right: unknown): SQL => { return sql`${left} >= ${bindIfParam(right, left)}`; }; /** * Test that the first expression passed is less than * the second expression. * * ## Examples * * ```ts * // Select cars made before 2000. * db.select().from(cars) * .where(lt(cars.year, 2000)) * ``` * * @see lte for less-than-or-equal */ export const lt: BinaryOperator = (left: SQLWrapper, right: unknown): SQL => { return sql`${left} < ${bindIfParam(right, left)}`; }; /** * Test that the first expression passed is less than * or equal to the second expression. * * ## Examples * * ```ts * // Select cars made before 2000. * db.select().from(cars) * .where(lte(cars.year, 2000)) * ``` * * @see lt for a strictly less-than condition */ export const lte: BinaryOperator = (left: SQLWrapper, right: unknown): SQL => { return sql`${left} <= ${bindIfParam(right, left)}`; }; /** * Test whether the first parameter, a column or expression, * has a value from a list passed as the second argument. * * ## Examples * * ```ts * // Select cars made by Ford or GM. * db.select().from(cars) * .where(inArray(cars.make, ['Ford', 'GM'])) * ``` * * @see notInArray for the inverse of this test */ export function inArray( column: SQL.Aliased, values: (T | Placeholder)[] | SQLWrapper, ): SQL; export function inArray( column: TColumn, values: ReadonlyArray | Placeholder> | SQLWrapper, ): SQL; export function inArray( column: Exclude, values: ReadonlyArray | SQLWrapper, ): SQL; export function inArray( column: SQLWrapper, values: ReadonlyArray | SQLWrapper, ): SQL { if (Array.isArray(values)) { if (values.length === 0) { return sql`false`; } return sql`${column} in ${values.map((v) => bindIfParam(v, column))}`; } return sql`${column} in ${bindIfParam(values, column)}`; } /** * Test whether the first parameter, a column or expression, * has a value that is not present in a list passed as the * second argument. * * ## Examples * * ```ts * // Select cars made by any company except Ford or GM. * db.select().from(cars) * .where(notInArray(cars.make, ['Ford', 'GM'])) * ``` * * @see inArray for the inverse of this test */ export function notInArray( column: SQL.Aliased, values: (T | Placeholder)[] | SQLWrapper, ): SQL; export function notInArray( column: TColumn, values: (GetColumnData | Placeholder)[] | SQLWrapper, ): SQL; export function notInArray( column: Exclude, values: (unknown | Placeholder)[] | SQLWrapper, ): SQL; export function notInArray( column: SQLWrapper, values: (unknown | Placeholder)[] | SQLWrapper, ): SQL { if (Array.isArray(values)) { if (values.length === 0) { return sql`true`; } return sql`${column} not in ${values.map((v) => bindIfParam(v, column))}`; } return sql`${column} not in ${bindIfParam(values, column)}`; } /** * Test whether an expression is NULL. By the SQL standard, * NULL is neither equal nor not equal to itself, so * it's recommended to use `isNull` and `notIsNull` for * comparisons to NULL. * * ## Examples * * ```ts * // Select cars that have no discontinuedAt date. * db.select().from(cars) * .where(isNull(cars.discontinuedAt)) * ``` * * @see isNotNull for the inverse of this test */ export function isNull(value: SQLWrapper): SQL { return sql`${value} is null`; } /** * Test whether an expression is not NULL. By the SQL standard, * NULL is neither equal nor not equal to itself, so * it's recommended to use `isNull` and `notIsNull` for * comparisons to NULL. * * ## Examples * * ```ts * // Select cars that have been discontinued. * db.select().from(cars) * .where(isNotNull(cars.discontinuedAt)) * ``` * * @see isNull for the inverse of this test */ export function isNotNull(value: SQLWrapper): SQL { return sql`${value} is not null`; } /** * Test whether a subquery evaluates to have any rows. * * ## Examples * * ```ts * // Users whose `homeCity` column has a match in a cities * // table. * db * .select() * .from(users) * .where( * exists(db.select() * .from(cities) * .where(eq(users.homeCity, cities.id))), * ); * ``` * * @see notExists for the inverse of this test */ export function exists(subquery: SQLWrapper): SQL { return sql`exists ${subquery}`; } /** * Test whether a subquery doesn't include any result * rows. * * ## Examples * * ```ts * // Users whose `homeCity` column doesn't match * // a row in the cities table. * db * .select() * .from(users) * .where( * notExists(db.select() * .from(cities) * .where(eq(users.homeCity, cities.id))), * ); * ``` * * @see exists for the inverse of this test */ export function notExists(subquery: SQLWrapper): SQL { return sql`not exists ${subquery}`; } /** * Test whether an expression is between two values. This * is an easier way to express range tests, which would be * expressed mathematically as `x <= a <= y` but in SQL * would have to be like `a >= x AND a <= y`. * * Between is inclusive of the endpoints: if `column` * is equal to `min` or `max`, it will be TRUE. * * ## Examples * * ```ts * // Select cars made between 1990 and 2000 * db.select().from(cars) * .where(between(cars.year, 1990, 2000)) * ``` * * @see notBetween for the inverse of this test */ export function between( column: SQL.Aliased, min: T | SQLWrapper, max: T | SQLWrapper, ): SQL; export function between( column: TColumn, min: GetColumnData | SQLWrapper, max: GetColumnData | SQLWrapper, ): SQL; export function between( column: Exclude, min: unknown, max: unknown, ): SQL; export function between(column: SQLWrapper, min: unknown, max: unknown): SQL { return sql`${column} between ${bindIfParam(min, column)} and ${ bindIfParam( max, column, ) }`; } /** * Test whether an expression is not between two values. * * This, like `between`, includes its endpoints, so if * the `column` is equal to `min` or `max`, in this case * it will evaluate to FALSE. * * ## Examples * * ```ts * // Exclude cars made in the 1970s * db.select().from(cars) * .where(notBetween(cars.year, 1970, 1979)) * ``` * * @see between for the inverse of this test */ export function notBetween( column: SQL.Aliased, min: T | SQLWrapper, max: T | SQLWrapper, ): SQL; export function notBetween( column: TColumn, min: GetColumnData | SQLWrapper, max: GetColumnData | SQLWrapper, ): SQL; export function notBetween( column: Exclude, min: unknown, max: unknown, ): SQL; export function notBetween( column: SQLWrapper, min: unknown, max: unknown, ): SQL { return sql`${column} not between ${ bindIfParam( min, column, ) } and ${bindIfParam(max, column)}`; } /** * Compare a column to a pattern, which can include `%` and `_` * characters to match multiple variations. Including `%` * in the pattern matches zero or more characters, and including * `_` will match a single character. * * ## Examples * * ```ts * // Select all cars with 'Turbo' in their names. * db.select().from(cars) * .where(like(cars.name, '%Turbo%')) * ``` * * @see ilike for a case-insensitive version of this condition */ export function like(column: Column | SQL.Aliased | SQL, value: string | SQLWrapper): SQL { return sql`${column} like ${value}`; } /** * The inverse of like - this tests that a given column * does not match a pattern, which can include `%` and `_` * characters to match multiple variations. Including `%` * in the pattern matches zero or more characters, and including * `_` will match a single character. * * ## Examples * * ```ts * // Select all cars that don't have "ROver" in their name. * db.select().from(cars) * .where(notLike(cars.name, '%Rover%')) * ``` * * @see like for the inverse condition * @see notIlike for a case-insensitive version of this condition */ export function notLike(column: Column | SQL.Aliased | SQL, value: string | SQLWrapper): SQL { return sql`${column} not like ${value}`; } /** * Case-insensitively compare a column to a pattern, * which can include `%` and `_` * characters to match multiple variations. Including `%` * in the pattern matches zero or more characters, and including * `_` will match a single character. * * Unlike like, this performs a case-insensitive comparison. * * ## Examples * * ```ts * // Select all cars with 'Turbo' in their names. * db.select().from(cars) * .where(ilike(cars.name, '%Turbo%')) * ``` * * @see like for a case-sensitive version of this condition */ export function ilike(column: Column | SQL.Aliased | SQL, value: string | SQLWrapper): SQL { return sql`${column} ilike ${value}`; } /** * The inverse of ilike - this case-insensitively tests that a given column * does not match a pattern, which can include `%` and `_` * characters to match multiple variations. Including `%` * in the pattern matches zero or more characters, and including * `_` will match a single character. * * ## Examples * * ```ts * // Select all cars that don't have "Rover" in their name. * db.select().from(cars) * .where(notLike(cars.name, '%Rover%')) * ``` * * @see ilike for the inverse condition * @see notLike for a case-sensitive version of this condition */ export function notIlike(column: Column | SQL.Aliased | SQL, value: string | SQLWrapper): SQL { return sql`${column} not ilike ${value}`; } /** * Test that a column or expression contains all elements of * the list passed as the second argument. * * ## Throws * * The argument passed in the second array can't be empty: * if an empty is provided, this method will throw. * * ## Examples * * ```ts * // Select posts where its tags contain "Typescript" and "ORM". * db.select().from(posts) * .where(arrayContains(posts.tags, ['Typescript', 'ORM'])) * ``` * * @see arrayContained to find if an array contains all elements of a column or expression * @see arrayOverlaps to find if a column or expression contains any elements of an array */ export function arrayContains( column: SQL.Aliased, values: (T | Placeholder) | SQLWrapper, ): SQL; export function arrayContains( column: TColumn, values: (GetColumnData | Placeholder) | SQLWrapper, ): SQL; export function arrayContains( column: Exclude, values: (unknown | Placeholder)[] | SQLWrapper, ): SQL; export function arrayContains( column: SQLWrapper, values: (unknown | Placeholder)[] | SQLWrapper, ): SQL { if (Array.isArray(values)) { if (values.length === 0) { throw new Error('arrayContains requires at least one value'); } const array = sql`${bindIfParam(values, column)}`; return sql`${column} @> ${array}`; } return sql`${column} @> ${bindIfParam(values, column)}`; } /** * Test that the list passed as the second argument contains * all elements of a column or expression. * * ## Throws * * The argument passed in the second array can't be empty: * if an empty is provided, this method will throw. * * ## Examples * * ```ts * // Select posts where its tags contain "Typescript", "ORM" or both, * // but filtering posts that have additional tags. * db.select().from(posts) * .where(arrayContained(posts.tags, ['Typescript', 'ORM'])) * ``` * * @see arrayContains to find if a column or expression contains all elements of an array * @see arrayOverlaps to find if a column or expression contains any elements of an array */ export function arrayContained( column: SQL.Aliased, values: (T | Placeholder) | SQLWrapper, ): SQL; export function arrayContained( column: TColumn, values: (GetColumnData | Placeholder) | SQLWrapper, ): SQL; export function arrayContained( column: Exclude, values: (unknown | Placeholder)[] | SQLWrapper, ): SQL; export function arrayContained( column: SQLWrapper, values: (unknown | Placeholder)[] | SQLWrapper, ): SQL { if (Array.isArray(values)) { if (values.length === 0) { throw new Error('arrayContained requires at least one value'); } const array = sql`${bindIfParam(values, column)}`; return sql`${column} <@ ${array}`; } return sql`${column} <@ ${bindIfParam(values, column)}`; } /** * Test that a column or expression contains any elements of * the list passed as the second argument. * * ## Throws * * The argument passed in the second array can't be empty: * if an empty is provided, this method will throw. * * ## Examples * * ```ts * // Select posts where its tags contain "Typescript", "ORM" or both. * db.select().from(posts) * .where(arrayOverlaps(posts.tags, ['Typescript', 'ORM'])) * ``` * * @see arrayContains to find if a column or expression contains all elements of an array * @see arrayContained to find if an array contains all elements of a column or expression */ export function arrayOverlaps( column: SQL.Aliased, values: (T | Placeholder) | SQLWrapper, ): SQL; export function arrayOverlaps( column: TColumn, values: (GetColumnData | Placeholder) | SQLWrapper, ): SQL; export function arrayOverlaps( column: Exclude, values: (unknown | Placeholder)[] | SQLWrapper, ): SQL; export function arrayOverlaps( column: SQLWrapper, values: (unknown | Placeholder)[] | SQLWrapper, ): SQL { if (Array.isArray(values)) { if (values.length === 0) { throw new Error('arrayOverlaps requires at least one value'); } const array = sql`${bindIfParam(values, column)}`; return sql`${column} && ${array}`; } return sql`${column} && ${bindIfParam(values, column)}`; } ================================================ FILE: drizzle-orm/src/sql/expressions/index.ts ================================================ export * from './conditions.ts'; export * from './select.ts'; ================================================ FILE: drizzle-orm/src/sql/expressions/select.ts ================================================ import type { AnyColumn } from '../../column.ts'; import type { SQL, SQLWrapper } from '../sql.ts'; import { sql } from '../sql.ts'; /** * Used in sorting, this specifies that the given * column or expression should be sorted in ascending * order. By the SQL standard, ascending order is the * default, so it is not usually necessary to specify * ascending sort order. * * ## Examples * * ```ts * // Return cars, starting with the oldest models * // and going in ascending order to the newest. * db.select().from(cars) * .orderBy(asc(cars.year)); * ``` * * @see desc to sort in descending order */ export function asc(column: AnyColumn | SQLWrapper): SQL { return sql`${column} asc`; } /** * Used in sorting, this specifies that the given * column or expression should be sorted in descending * order. * * ## Examples * * ```ts * // Select users, with the most recently created * // records coming first. * db.select().from(users) * .orderBy(desc(users.createdAt)); * ``` * * @see asc to sort in ascending order */ export function desc(column: AnyColumn | SQLWrapper): SQL { return sql`${column} desc`; } ================================================ FILE: drizzle-orm/src/sql/functions/aggregate.ts ================================================ import { type AnyColumn, Column } from '~/column.ts'; import { is } from '~/entity.ts'; import { type SQL, sql, type SQLWrapper } from '../sql.ts'; /** * Returns the number of values in `expression`. * * ## Examples * * ```ts * // Number employees with null values * db.select({ value: count() }).from(employees) * // Number of employees where `name` is not null * db.select({ value: count(employees.name) }).from(employees) * ``` * * @see countDistinct to get the number of non-duplicate values in `expression` */ export function count(expression?: SQLWrapper): SQL { return sql`count(${expression || sql.raw('*')})`.mapWith(Number); } /** * Returns the number of non-duplicate values in `expression`. * * ## Examples * * ```ts * // Number of employees where `name` is distinct * db.select({ value: countDistinct(employees.name) }).from(employees) * ``` * * @see count to get the number of values in `expression`, including duplicates */ export function countDistinct(expression: SQLWrapper): SQL { return sql`count(distinct ${expression})`.mapWith(Number); } /** * Returns the average (arithmetic mean) of all non-null values in `expression`. * * ## Examples * * ```ts * // Average salary of an employee * db.select({ value: avg(employees.salary) }).from(employees) * ``` * * @see avgDistinct to get the average of all non-null and non-duplicate values in `expression` */ export function avg(expression: SQLWrapper): SQL { return sql`avg(${expression})`.mapWith(String); } /** * Returns the average (arithmetic mean) of all non-null and non-duplicate values in `expression`. * * ## Examples * * ```ts * // Average salary of an employee where `salary` is distinct * db.select({ value: avgDistinct(employees.salary) }).from(employees) * ``` * * @see avg to get the average of all non-null values in `expression`, including duplicates */ export function avgDistinct(expression: SQLWrapper): SQL { return sql`avg(distinct ${expression})`.mapWith(String); } /** * Returns the sum of all non-null values in `expression`. * * ## Examples * * ```ts * // Sum of every employee's salary * db.select({ value: sum(employees.salary) }).from(employees) * ``` * * @see sumDistinct to get the sum of all non-null and non-duplicate values in `expression` */ export function sum(expression: SQLWrapper): SQL { return sql`sum(${expression})`.mapWith(String); } /** * Returns the sum of all non-null and non-duplicate values in `expression`. * * ## Examples * * ```ts * // Sum of every employee's salary where `salary` is distinct (no duplicates) * db.select({ value: sumDistinct(employees.salary) }).from(employees) * ``` * * @see sum to get the sum of all non-null values in `expression`, including duplicates */ export function sumDistinct(expression: SQLWrapper): SQL { return sql`sum(distinct ${expression})`.mapWith(String); } /** * Returns the maximum value in `expression`. * * ## Examples * * ```ts * // The employee with the highest salary * db.select({ value: max(employees.salary) }).from(employees) * ``` */ export function max(expression: T): SQL<(T extends AnyColumn ? T['_']['data'] : string) | null> { return sql`max(${expression})`.mapWith(is(expression, Column) ? expression : String) as any; } /** * Returns the minimum value in `expression`. * * ## Examples * * ```ts * // The employee with the lowest salary * db.select({ value: min(employees.salary) }).from(employees) * ``` */ export function min(expression: T): SQL<(T extends AnyColumn ? T['_']['data'] : string) | null> { return sql`min(${expression})`.mapWith(is(expression, Column) ? expression : String) as any; } ================================================ FILE: drizzle-orm/src/sql/functions/index.ts ================================================ export * from './aggregate.ts'; export * from './vector.ts'; ================================================ FILE: drizzle-orm/src/sql/functions/vector.ts ================================================ import type { AnyColumn } from '~/column.ts'; import type { TypedQueryBuilder } from '~/query-builders/query-builder.ts'; import { type SQL, sql, type SQLWrapper } from '../sql.ts'; function toSql(value: number[] | string[]): string { return JSON.stringify(value); } /** * Used in sorting and in querying, if used in sorting, * this specifies that the given column or expression should be sorted in an order * that minimizes the L2 distance to the given value. * If used in querying, this specifies that it should return the L2 distance * between the given column or expression and the given value. * * ## Examples * * ```ts * // Sort cars by embedding similarity * // to the given embedding * db.select().from(cars) * .orderBy(l2Distance(cars.embedding, embedding)); * ``` * * ```ts * // Select distance of cars and embedding * // to the given embedding * db.select({distance: l2Distance(cars.embedding, embedding)}).from(cars) * ``` */ export function l2Distance( column: SQLWrapper | AnyColumn, value: number[] | string[] | TypedQueryBuilder | string, ): SQL { if (Array.isArray(value)) { return sql`${column} <-> ${toSql(value)}`; } return sql`${column} <-> ${value}`; } /** * L1 distance is one of the possible distance measures between two probability distribution vectors and it is * calculated as the sum of the absolute differences. * The smaller the distance between the observed probability vectors, the higher the accuracy of the synthetic data * * ## Examples * * ```ts * // Sort cars by embedding similarity * // to the given embedding * db.select().from(cars) * .orderBy(l1Distance(cars.embedding, embedding)); * ``` * * ```ts * // Select distance of cars and embedding * // to the given embedding * db.select({distance: l1Distance(cars.embedding, embedding)}).from(cars) * ``` */ export function l1Distance( column: SQLWrapper | AnyColumn, value: number[] | string[] | TypedQueryBuilder | string, ): SQL { if (Array.isArray(value)) { return sql`${column} <+> ${toSql(value)}`; } return sql`${column} <+> ${value}`; } /** * Used in sorting and in querying, if used in sorting, * this specifies that the given column or expression should be sorted in an order * that minimizes the inner product distance to the given value. * If used in querying, this specifies that it should return the inner product distance * between the given column or expression and the given value. * * ## Examples * * ```ts * // Sort cars by embedding similarity * // to the given embedding * db.select().from(cars) * .orderBy(innerProduct(cars.embedding, embedding)); * ``` * * ```ts * // Select distance of cars and embedding * // to the given embedding * db.select({ distance: innerProduct(cars.embedding, embedding) }).from(cars) * ``` */ export function innerProduct( column: SQLWrapper | AnyColumn, value: number[] | string[] | TypedQueryBuilder | string, ): SQL { if (Array.isArray(value)) { return sql`${column} <#> ${toSql(value)}`; } return sql`${column} <#> ${value}`; } /** * Used in sorting and in querying, if used in sorting, * this specifies that the given column or expression should be sorted in an order * that minimizes the cosine distance to the given value. * If used in querying, this specifies that it should return the cosine distance * between the given column or expression and the given value. * * ## Examples * * ```ts * // Sort cars by embedding similarity * // to the given embedding * db.select().from(cars) * .orderBy(cosineDistance(cars.embedding, embedding)); * ``` * * ```ts * // Select distance of cars and embedding * // to the given embedding * db.select({distance: cosineDistance(cars.embedding, embedding)}).from(cars) * ``` */ export function cosineDistance( column: SQLWrapper | AnyColumn, value: number[] | string[] | TypedQueryBuilder | string, ): SQL { if (Array.isArray(value)) { return sql`${column} <=> ${toSql(value)}`; } return sql`${column} <=> ${value}`; } /** * Hamming distance between two strings or vectors of equal length is the number of positions at which the * corresponding symbols are different. In other words, it measures the minimum number of * substitutions required to change one string into the other, or equivalently, * the minimum number of errors that could have transformed one string into the other * * ## Examples * * ```ts * // Sort cars by embedding similarity * // to the given embedding * db.select().from(cars) * .orderBy(hammingDistance(cars.embedding, embedding)); * ``` */ export function hammingDistance( column: SQLWrapper | AnyColumn, value: number[] | string[] | TypedQueryBuilder | string, ): SQL { if (Array.isArray(value)) { return sql`${column} <~> ${toSql(value)}`; } return sql`${column} <~> ${value}`; } /** * ## Examples * * ```ts * // Sort cars by embedding similarity * // to the given embedding * db.select().from(cars) * .orderBy(jaccardDistance(cars.embedding, embedding)); * ``` */ export function jaccardDistance( column: SQLWrapper | AnyColumn, value: number[] | string[] | TypedQueryBuilder | string, ): SQL { if (Array.isArray(value)) { return sql`${column} <%> ${toSql(value)}`; } return sql`${column} <%> ${value}`; } ================================================ FILE: drizzle-orm/src/sql/index.ts ================================================ export * from './expressions/index.ts'; export * from './functions/index.ts'; export * from './sql.ts'; ================================================ FILE: drizzle-orm/src/sql/sql.ts ================================================ import type { CasingCache } from '~/casing.ts'; import { entityKind, is } from '~/entity.ts'; import { isPgEnum } from '~/pg-core/columns/enum.ts'; import type { SelectResult } from '~/query-builders/select.types.ts'; import { Subquery } from '~/subquery.ts'; import { tracer } from '~/tracing.ts'; import type { Assume, Equal } from '~/utils.ts'; import { ViewBaseConfig } from '~/view-common.ts'; import type { AnyColumn } from '../column.ts'; import { Column } from '../column.ts'; import { IsAlias, Table } from '../table.ts'; /** * This class is used to indicate a primitive param value that is used in `sql` tag. * It is only used on type level and is never instantiated at runtime. * If you see a value of this type in the code, its runtime value is actually the primitive param value. */ export class FakePrimitiveParam { static readonly [entityKind]: string = 'FakePrimitiveParam'; } export type Chunk = | string | Table | View | AnyColumn | Name | Param | Placeholder | SQL; export interface BuildQueryConfig { casing: CasingCache; escapeName(name: string): string; escapeParam(num: number, value: unknown): string; escapeString(str: string): string; prepareTyping?: (encoder: DriverValueEncoder) => QueryTypingsValue; paramStartIndex?: { value: number }; inlineParams?: boolean; invokeSource?: 'indexes' | undefined; } export type QueryTypingsValue = 'json' | 'decimal' | 'time' | 'timestamp' | 'uuid' | 'date' | 'none'; export interface Query { sql: string; params: unknown[]; } export interface QueryWithTypings extends Query { typings?: QueryTypingsValue[]; } /** * Any value that implements the `getSQL` method. The implementations include: * - `Table` * - `Column` * - `View` * - `Subquery` * - `SQL` * - `SQL.Aliased` * - `Placeholder` * - `Param` */ export interface SQLWrapper { getSQL(): SQL; shouldOmitSQLParens?(): boolean; } export function isSQLWrapper(value: unknown): value is SQLWrapper { return value !== null && value !== undefined && typeof (value as any).getSQL === 'function'; } function mergeQueries(queries: QueryWithTypings[]): QueryWithTypings { const result: QueryWithTypings = { sql: '', params: [] }; for (const query of queries) { result.sql += query.sql; result.params.push(...query.params); if (query.typings?.length) { if (!result.typings) { result.typings = []; } result.typings.push(...query.typings); } } return result; } export class StringChunk implements SQLWrapper { static readonly [entityKind]: string = 'StringChunk'; readonly value: string[]; constructor(value: string | string[]) { this.value = Array.isArray(value) ? value : [value]; } getSQL(): SQL { return new SQL([this]); } } export class SQL implements SQLWrapper { static readonly [entityKind]: string = 'SQL'; declare _: { brand: 'SQL'; type: T; }; /** @internal */ decoder: DriverValueDecoder = noopDecoder; private shouldInlineParams = false; /** @internal */ usedTables: string[] = []; constructor(readonly queryChunks: SQLChunk[]) { for (const chunk of queryChunks) { if (is(chunk, Table)) { const schemaName = chunk[Table.Symbol.Schema]; this.usedTables.push( schemaName === undefined ? chunk[Table.Symbol.Name] : schemaName + '.' + chunk[Table.Symbol.Name], ); } } } append(query: SQL): this { this.queryChunks.push(...query.queryChunks); return this; } toQuery(config: BuildQueryConfig): QueryWithTypings { return tracer.startActiveSpan('drizzle.buildSQL', (span) => { const query = this.buildQueryFromSourceParams(this.queryChunks, config); span?.setAttributes({ 'drizzle.query.text': query.sql, 'drizzle.query.params': JSON.stringify(query.params), }); return query; }); } buildQueryFromSourceParams(chunks: SQLChunk[], _config: BuildQueryConfig): Query { const config = Object.assign({}, _config, { inlineParams: _config.inlineParams || this.shouldInlineParams, paramStartIndex: _config.paramStartIndex || { value: 0 }, }); const { casing, escapeName, escapeParam, prepareTyping, inlineParams, paramStartIndex, } = config; return mergeQueries(chunks.map((chunk): QueryWithTypings => { if (is(chunk, StringChunk)) { return { sql: chunk.value.join(''), params: [] }; } if (is(chunk, Name)) { return { sql: escapeName(chunk.value), params: [] }; } if (chunk === undefined) { return { sql: '', params: [] }; } if (Array.isArray(chunk)) { const result: SQLChunk[] = [new StringChunk('(')]; for (const [i, p] of chunk.entries()) { result.push(p); if (i < chunk.length - 1) { result.push(new StringChunk(', ')); } } result.push(new StringChunk(')')); return this.buildQueryFromSourceParams(result, config); } if (is(chunk, SQL)) { return this.buildQueryFromSourceParams(chunk.queryChunks, { ...config, inlineParams: inlineParams || chunk.shouldInlineParams, }); } if (is(chunk, Table)) { const schemaName = chunk[Table.Symbol.Schema]; const tableName = chunk[Table.Symbol.Name]; return { sql: schemaName === undefined || chunk[IsAlias] ? escapeName(tableName) : escapeName(schemaName) + '.' + escapeName(tableName), params: [], }; } if (is(chunk, Column)) { const columnName = casing.getColumnCasing(chunk); if (_config.invokeSource === 'indexes') { return { sql: escapeName(columnName), params: [] }; } const schemaName = chunk.table[Table.Symbol.Schema]; return { sql: chunk.table[IsAlias] || schemaName === undefined ? escapeName(chunk.table[Table.Symbol.Name]) + '.' + escapeName(columnName) : escapeName(schemaName) + '.' + escapeName(chunk.table[Table.Symbol.Name]) + '.' + escapeName(columnName), params: [], }; } if (is(chunk, View)) { const schemaName = chunk[ViewBaseConfig].schema; const viewName = chunk[ViewBaseConfig].name; return { sql: schemaName === undefined || chunk[ViewBaseConfig].isAlias ? escapeName(viewName) : escapeName(schemaName) + '.' + escapeName(viewName), params: [], }; } if (is(chunk, Param)) { if (is(chunk.value, Placeholder)) { return { sql: escapeParam(paramStartIndex.value++, chunk), params: [chunk], typings: ['none'] }; } const mappedValue = chunk.value === null ? null : chunk.encoder.mapToDriverValue(chunk.value); if (is(mappedValue, SQL)) { return this.buildQueryFromSourceParams([mappedValue], config); } if (inlineParams) { return { sql: this.mapInlineParam(mappedValue, config), params: [] }; } let typings: QueryTypingsValue[] = ['none']; if (prepareTyping) { typings = [prepareTyping(chunk.encoder)]; } return { sql: escapeParam(paramStartIndex.value++, mappedValue), params: [mappedValue], typings }; } if (is(chunk, Placeholder)) { return { sql: escapeParam(paramStartIndex.value++, chunk), params: [chunk], typings: ['none'] }; } if (is(chunk, SQL.Aliased) && chunk.fieldAlias !== undefined) { return { sql: escapeName(chunk.fieldAlias), params: [] }; } if (is(chunk, Subquery)) { if (chunk._.isWith) { return { sql: escapeName(chunk._.alias), params: [] }; } return this.buildQueryFromSourceParams([ new StringChunk('('), chunk._.sql, new StringChunk(') '), new Name(chunk._.alias), ], config); } if (isPgEnum(chunk)) { if (chunk.schema) { return { sql: escapeName(chunk.schema) + '.' + escapeName(chunk.enumName), params: [] }; } return { sql: escapeName(chunk.enumName), params: [] }; } if (isSQLWrapper(chunk)) { if (chunk.shouldOmitSQLParens?.()) { return this.buildQueryFromSourceParams([chunk.getSQL()], config); } return this.buildQueryFromSourceParams([ new StringChunk('('), chunk.getSQL(), new StringChunk(')'), ], config); } if (inlineParams) { return { sql: this.mapInlineParam(chunk, config), params: [] }; } return { sql: escapeParam(paramStartIndex.value++, chunk), params: [chunk], typings: ['none'] }; })); } private mapInlineParam( chunk: unknown, { escapeString }: BuildQueryConfig, ): string { if (chunk === null) { return 'null'; } if (typeof chunk === 'number' || typeof chunk === 'boolean') { return chunk.toString(); } if (typeof chunk === 'string') { return escapeString(chunk); } if (typeof chunk === 'object') { const mappedValueAsString = chunk.toString(); if (mappedValueAsString === '[object Object]') { return escapeString(JSON.stringify(chunk)); } return escapeString(mappedValueAsString); } throw new Error('Unexpected param value: ' + chunk); } getSQL(): SQL { return this; } as(alias: string): SQL.Aliased; /** * @deprecated * Use ``sql`query`.as(alias)`` instead. */ as(): SQL; /** * @deprecated * Use ``sql`query`.as(alias)`` instead. */ as(alias: string): SQL.Aliased; as(alias?: string): SQL | SQL.Aliased { // TODO: remove with deprecated overloads if (alias === undefined) { return this; } return new SQL.Aliased(this, alias); } mapWith< TDecoder extends | DriverValueDecoder | DriverValueDecoder['mapFromDriverValue'], >(decoder: TDecoder): SQL> { this.decoder = typeof decoder === 'function' ? { mapFromDriverValue: decoder } : decoder; return this as SQL>; } inlineParams(): this { this.shouldInlineParams = true; return this; } /** * This method is used to conditionally include a part of the query. * * @param condition - Condition to check * @returns itself if the condition is `true`, otherwise `undefined` */ if(condition: any | undefined): this | undefined { return condition ? this : undefined; } } export type GetDecoderResult = T extends Column ? T['_']['data'] : T extends | DriverValueDecoder | DriverValueDecoder['mapFromDriverValue'] ? TData : never; /** * Any DB name (table, column, index etc.) */ export class Name implements SQLWrapper { static readonly [entityKind]: string = 'Name'; protected brand!: 'Name'; constructor(readonly value: string) {} getSQL(): SQL { return new SQL([this]); } } /** * Any DB name (table, column, index etc.) * @deprecated Use `sql.identifier` instead. */ export function name(value: string): Name { return new Name(value); } export interface DriverValueDecoder { mapFromDriverValue(value: TDriverParam): TData; } export interface DriverValueEncoder { mapToDriverValue(value: TData): TDriverParam | SQL; } export function isDriverValueEncoder(value: unknown): value is DriverValueEncoder { return typeof value === 'object' && value !== null && 'mapToDriverValue' in value && typeof (value as any).mapToDriverValue === 'function'; } export const noopDecoder: DriverValueDecoder = { mapFromDriverValue: (value) => value, }; export const noopEncoder: DriverValueEncoder = { mapToDriverValue: (value) => value, }; export interface DriverValueMapper extends DriverValueDecoder, DriverValueEncoder {} export const noopMapper: DriverValueMapper = { ...noopDecoder, ...noopEncoder, }; /** Parameter value that is optionally bound to an encoder (for example, a column). */ export class Param implements SQLWrapper { static readonly [entityKind]: string = 'Param'; protected brand!: 'BoundParamValue'; /** * @param value - Parameter value * @param encoder - Encoder to convert the value to a driver parameter */ constructor( readonly value: TDataType, readonly encoder: DriverValueEncoder = noopEncoder, ) {} getSQL(): SQL { return new SQL([this]); } } /** @deprecated Use `sql.param` instead. */ export function param( value: TData, encoder?: DriverValueEncoder, ): Param { return new Param(value, encoder); } /** * Anything that can be passed to the `` sql`...` `` tagged function. */ export type SQLChunk = | StringChunk | SQLChunk[] | SQLWrapper | SQL | Table | View | Subquery | AnyColumn | Param | Name | undefined | FakePrimitiveParam | Placeholder; export function sql(strings: TemplateStringsArray, ...params: any[]): SQL; /* The type of `params` is specified as `SQLChunk[]`, but that's slightly incorrect - in runtime, users won't pass `FakePrimitiveParam` instances as `params` - they will pass primitive values which will be wrapped in `Param`. That's why the overload specifies `params` as `any[]` and not as `SQLSourceParam[]`. This type is used to make our lives easier and the type checker happy. */ export function sql(strings: TemplateStringsArray, ...params: SQLChunk[]): SQL { const queryChunks: SQLChunk[] = []; if (params.length > 0 || (strings.length > 0 && strings[0] !== '')) { queryChunks.push(new StringChunk(strings[0]!)); } for (const [paramIndex, param] of params.entries()) { queryChunks.push(param, new StringChunk(strings[paramIndex + 1]!)); } return new SQL(queryChunks); } export namespace sql { export function empty(): SQL { return new SQL([]); } /** @deprecated - use `sql.join()` */ export function fromList(list: SQLChunk[]): SQL { return new SQL(list); } /** * Convenience function to create an SQL query from a raw string. * @param str The raw SQL query string. */ export function raw(str: string): SQL { return new SQL([new StringChunk(str)]); } /** * Join a list of SQL chunks with a separator. * @example * ```ts * const query = sql.join([sql`a`, sql`b`, sql`c`]); * // sql`abc` * ``` * @example * ```ts * const query = sql.join([sql`a`, sql`b`, sql`c`], sql`, `); * // sql`a, b, c` * ``` */ export function join(chunks: SQLChunk[], separator?: SQLChunk): SQL { const result: SQLChunk[] = []; for (const [i, chunk] of chunks.entries()) { if (i > 0 && separator !== undefined) { result.push(separator); } result.push(chunk); } return new SQL(result); } /** * Create a SQL chunk that represents a DB identifier (table, column, index etc.). * When used in a query, the identifier will be escaped based on the DB engine. * For example, in PostgreSQL, identifiers are escaped with double quotes. * * **WARNING: This function does not offer any protection against SQL injections, so you must validate any user input beforehand.** * * @example ```ts * const query = sql`SELECT * FROM ${sql.identifier('my-table')}`; * // 'SELECT * FROM "my-table"' * ``` */ export function identifier(value: string): Name { return new Name(value); } export function placeholder(name: TName): Placeholder { return new Placeholder(name); } export function param( value: TData, encoder?: DriverValueEncoder, ): Param { return new Param(value, encoder); } } export namespace SQL { export class Aliased implements SQLWrapper { static readonly [entityKind]: string = 'SQL.Aliased'; declare _: { brand: 'SQL.Aliased'; type: T; }; /** @internal */ isSelectionField = false; constructor( readonly sql: SQL, readonly fieldAlias: string, ) {} getSQL(): SQL { return this.sql; } /** @internal */ clone() { return new Aliased(this.sql, this.fieldAlias); } } } export class Placeholder implements SQLWrapper { static readonly [entityKind]: string = 'Placeholder'; declare protected: TValue; constructor(readonly name: TName) {} getSQL(): SQL { return new SQL([this]); } } /** @deprecated Use `sql.placeholder` instead. */ export function placeholder(name: TName): Placeholder { return new Placeholder(name); } export function fillPlaceholders(params: unknown[], values: Record): unknown[] { return params.map((p) => { if (is(p, Placeholder)) { if (!(p.name in values)) { throw new Error(`No value for placeholder "${p.name}" was provided`); } return values[p.name]; } if (is(p, Param) && is(p.value, Placeholder)) { if (!(p.value.name in values)) { throw new Error(`No value for placeholder "${p.value.name}" was provided`); } return p.encoder.mapToDriverValue(values[p.value.name]); } return p; }); } export type ColumnsSelection = Record; const IsDrizzleView = Symbol.for('drizzle:IsDrizzleView'); export abstract class View< TName extends string = string, TExisting extends boolean = boolean, TSelection extends ColumnsSelection = ColumnsSelection, > implements SQLWrapper { static readonly [entityKind]: string = 'View'; declare _: { brand: 'View'; viewBrand: string; name: TName; existing: TExisting; selectedFields: TSelection; }; /** @internal */ [ViewBaseConfig]: { name: TName; originalName: TName; schema: string | undefined; selectedFields: ColumnsSelection; isExisting: TExisting; query: TExisting extends true ? undefined : SQL; isAlias: boolean; }; /** @internal */ [IsDrizzleView] = true; declare readonly $inferSelect: InferSelectViewModel, TExisting, TSelection>>; constructor( { name, schema, selectedFields, query }: { name: TName; schema: string | undefined; selectedFields: ColumnsSelection; query: SQL | undefined; }, ) { this[ViewBaseConfig] = { name, originalName: name, schema, selectedFields, query: query as (TExisting extends true ? undefined : SQL), isExisting: !query as TExisting, isAlias: false, }; } getSQL(): SQL { return new SQL([this]); } } export function isView(view: unknown): view is View { return typeof view === 'object' && view !== null && IsDrizzleView in view; } export function getViewName(view: T): T['_']['name'] { return view[ViewBaseConfig].name; } export type InferSelectViewModel = Equal extends true ? { [x: string]: unknown } : SelectResult< TView['_']['selectedFields'], 'single', Record >; // Defined separately from the Column class to resolve circular dependency Column.prototype.getSQL = function() { return new SQL([this]); }; // Defined separately from the Table class to resolve circular dependency Table.prototype.getSQL = function() { return new SQL([this]); }; // Defined separately from the Column class to resolve circular dependency Subquery.prototype.getSQL = function() { return new SQL([this]); }; ================================================ FILE: drizzle-orm/src/sql-js/driver.ts ================================================ import type { Database } from 'sql.js'; import { DefaultLogger } from '~/logger.ts'; import { createTableRelationsHelpers, extractTablesRelationalConfig, type RelationalSchemaConfig, type TablesRelationalConfig, } from '~/relations.ts'; import { BaseSQLiteDatabase } from '~/sqlite-core/db.ts'; import { SQLiteSyncDialect } from '~/sqlite-core/dialect.ts'; import type { DrizzleConfig } from '~/utils.ts'; import { SQLJsSession } from './session.ts'; export type SQLJsDatabase< TSchema extends Record = Record, > = BaseSQLiteDatabase<'sync', void, TSchema>; export function drizzle = Record>( client: Database, config: DrizzleConfig = {}, ): SQLJsDatabase { const dialect = new SQLiteSyncDialect({ casing: config.casing }); let logger; if (config.logger === true) { logger = new DefaultLogger(); } else if (config.logger !== false) { logger = config.logger; } let schema: RelationalSchemaConfig | undefined; if (config.schema) { const tablesConfig = extractTablesRelationalConfig( config.schema, createTableRelationsHelpers, ); schema = { fullSchema: config.schema, schema: tablesConfig.tables, tableNamesMap: tablesConfig.tableNamesMap, }; } const session = new SQLJsSession(client, dialect, schema, { logger }); return new BaseSQLiteDatabase('sync', dialect, session, schema) as SQLJsDatabase; } ================================================ FILE: drizzle-orm/src/sql-js/index.ts ================================================ export * from './driver.ts'; export * from './session.ts'; ================================================ FILE: drizzle-orm/src/sql-js/migrator.ts ================================================ import type { MigrationConfig } from '~/migrator.ts'; import { readMigrationFiles } from '~/migrator.ts'; import type { SQLJsDatabase } from './driver.ts'; export function migrate>( db: SQLJsDatabase, config: MigrationConfig, ) { const migrations = readMigrationFiles(config); db.dialect.migrate(migrations, db.session, config); } ================================================ FILE: drizzle-orm/src/sql-js/session.ts ================================================ import type { BindParams, Database } from 'sql.js'; import { entityKind } from '~/entity.ts'; import type { Logger } from '~/logger.ts'; import { NoopLogger } from '~/logger.ts'; import type { RelationalSchemaConfig, TablesRelationalConfig } from '~/relations.ts'; import { fillPlaceholders, type Query, sql } from '~/sql/sql.ts'; import type { SQLiteSyncDialect } from '~/sqlite-core/dialect.ts'; import { SQLiteTransaction } from '~/sqlite-core/index.ts'; import type { SelectedFieldsOrdered } from '~/sqlite-core/query-builders/select.types.ts'; import type { PreparedQueryConfig as PreparedQueryConfigBase, SQLiteExecuteMethod, SQLiteTransactionConfig, } from '~/sqlite-core/session.ts'; import { SQLitePreparedQuery as PreparedQueryBase, SQLiteSession } from '~/sqlite-core/session.ts'; import { mapResultRow } from '~/utils.ts'; export interface SQLJsSessionOptions { logger?: Logger; } type PreparedQueryConfig = Omit; export class SQLJsSession< TFullSchema extends Record, TSchema extends TablesRelationalConfig, > extends SQLiteSession<'sync', void, TFullSchema, TSchema> { static override readonly [entityKind]: string = 'SQLJsSession'; private logger: Logger; constructor( private client: Database, dialect: SQLiteSyncDialect, private schema: RelationalSchemaConfig | undefined, options: SQLJsSessionOptions = {}, ) { super(dialect); this.logger = options.logger ?? new NoopLogger(); } prepareQuery>( query: Query, fields: SelectedFieldsOrdered | undefined, executeMethod: SQLiteExecuteMethod, isResponseInArrayMode: boolean, ): PreparedQuery { return new PreparedQuery(this.client, query, this.logger, fields, executeMethod, isResponseInArrayMode); } override transaction( transaction: (tx: SQLJsTransaction) => T, config: SQLiteTransactionConfig = {}, ): T { const tx = new SQLJsTransaction('sync', this.dialect, this, this.schema); this.run(sql.raw(`begin${config.behavior ? ` ${config.behavior}` : ''}`)); try { const result = transaction(tx); this.run(sql`commit`); return result; } catch (err) { this.run(sql`rollback`); throw err; } } } export class SQLJsTransaction< TFullSchema extends Record, TSchema extends TablesRelationalConfig, > extends SQLiteTransaction<'sync', void, TFullSchema, TSchema> { static override readonly [entityKind]: string = 'SQLJsTransaction'; override transaction(transaction: (tx: SQLJsTransaction) => T): T { const savepointName = `sp${this.nestedIndex + 1}`; const tx = new SQLJsTransaction('sync', this.dialect, this.session, this.schema, this.nestedIndex + 1); tx.run(sql.raw(`savepoint ${savepointName}`)); try { const result = transaction(tx); tx.run(sql.raw(`release savepoint ${savepointName}`)); return result; } catch (err) { tx.run(sql.raw(`rollback to savepoint ${savepointName}`)); throw err; } } } export class PreparedQuery extends PreparedQueryBase< { type: 'sync'; run: void; all: T['all']; get: T['get']; values: T['values']; execute: T['execute'] } > { static override readonly [entityKind]: string = 'SQLJsPreparedQuery'; constructor( private client: Database, query: Query, private logger: Logger, private fields: SelectedFieldsOrdered | undefined, executeMethod: SQLiteExecuteMethod, private _isResponseInArrayMode: boolean, private customResultMapper?: (rows: unknown[][], mapColumnValue?: (value: unknown) => unknown) => unknown, ) { super('sync', executeMethod, query); } run(placeholderValues?: Record): void { const stmt = this.client.prepare(this.query.sql); const params = fillPlaceholders(this.query.params, placeholderValues ?? {}); this.logger.logQuery(this.query.sql, params); const result = stmt.run(params as BindParams); stmt.free(); return result; } all(placeholderValues?: Record): T['all'] { const stmt = this.client.prepare(this.query.sql); const { fields, joinsNotNullableMap, logger, query, customResultMapper } = this; if (!fields && !customResultMapper) { const params = fillPlaceholders(query.params, placeholderValues ?? {}); logger.logQuery(query.sql, params); stmt.bind(params as BindParams); const rows: unknown[] = []; while (stmt.step()) { rows.push(stmt.getAsObject()); } stmt.free(); return rows; } const rows = this.values(placeholderValues) as unknown[][]; if (customResultMapper) { return customResultMapper(rows, normalizeFieldValue) as T['all']; } return rows.map((row) => mapResultRow(fields!, row.map((v) => normalizeFieldValue(v)), joinsNotNullableMap)); } get(placeholderValues?: Record): T['get'] { const stmt = this.client.prepare(this.query.sql); const params = fillPlaceholders(this.query.params, placeholderValues ?? {}); this.logger.logQuery(this.query.sql, params); const { fields, joinsNotNullableMap, customResultMapper } = this; if (!fields && !customResultMapper) { const result = stmt.getAsObject(params as BindParams); stmt.free(); return result; } const row = stmt.get(params as BindParams); stmt.free(); if (!row || (row.length === 0 && fields!.length > 0)) { return undefined; } if (customResultMapper) { return customResultMapper([row], normalizeFieldValue) as T['get']; } return mapResultRow(fields!, row.map((v) => normalizeFieldValue(v)), joinsNotNullableMap); } values(placeholderValues?: Record): T['values'] { const stmt = this.client.prepare(this.query.sql); const params = fillPlaceholders(this.query.params, placeholderValues ?? {}); this.logger.logQuery(this.query.sql, params); stmt.bind(params as BindParams); const rows: unknown[] = []; while (stmt.step()) { rows.push(stmt.get()); } stmt.free(); return rows; } /** @internal */ isResponseInArrayMode(): boolean { return this._isResponseInArrayMode; } } function normalizeFieldValue(value: unknown) { if (value instanceof Uint8Array) { // eslint-disable-line no-instanceof/no-instanceof if (typeof Buffer !== 'undefined') { if (!(value instanceof Buffer)) { // eslint-disable-line no-instanceof/no-instanceof return Buffer.from(value); } return value; } if (typeof TextDecoder !== 'undefined') { return new TextDecoder().decode(value); } throw new Error('TextDecoder is not available. Please provide either Buffer or TextDecoder polyfill.'); } return value; } ================================================ FILE: drizzle-orm/src/sqlite-core/README.md ================================================

Drizzle ORM | SQLite npm

npm npm bundle size Discord License
If you know SQL, you know Drizzle ORM

Drizzle ORM is a [tiny](https://twitter.com/_alexblokh/status/1594735880417472512), [blazingly fast](#️-performance-and-prepared-statements) TypeScript ORM library with a [drizzle-kit](#-migrations) CLI companion for automatic SQL migrations generation. Here you can find extensive docs for SQLite module. | Driver | Support | | |:----------------------------------------------------------------------|:-------:|:----------------------------------:| | [better-sqlite3](https://github.com/WiseLibs/better-sqlite3) | ✅ | | | [sql.js](https://github.com/sql-js/sql.js/) | ✅ | | | [node-sqlite3](https://github.com/TryGhost/node-sqlite3) | ⏳ | | | [bun:sqlite](https://github.com/oven-sh/bun#bunsqlite-sqlite3-module) | ✅ | [Example](/examples/bun-sqlite) | | [Cloudflare D1](https://developers.cloudflare.com/d1/) | ✅ | [Example](/examples/cloudflare-d1) | | [Fly.io LiteFS](https://fly.io/docs/litefs/getting-started/) | ✅ | | | [libSQL server](https://github.com/libsql/sqld/) | ✅ | [Example](/examples/libsql) | | [Turso](https://turso.tech/) | ✅ | [Example](/examples/libsql) | | [Custom proxy driver](/examples/sqlite-proxy) | ✅ | | ## 💾 Installation ```bash npm install drizzle-orm better-sqlite3 ## opt-in automatic migrations generator npm install -D drizzle-kit ``` ## 🚀 Quick start ```typescript import { sqliteTable, text, integer } from 'drizzle-orm/sqlite-core'; import { drizzle } from 'drizzle-orm/better-sqlite3'; import Database from 'better-sqlite3'; const users = sqliteTable('users', { id: integer('id').primaryKey(), // 'id' is the column name fullName: text('full_name'), }) const sqlite = new Database('sqlite.db'); const db = drizzle(sqlite); const allUsers = db.select().from(users).all(); ``` ### Using Drizzle ORM in Next.js App Router Next.js' App Router have zero-config support for Drizzle ORM. ## Connecting to databases ```typescript // better-sqlite3 or fly.io LiteFS import { drizzle, BetterSQLite3Database } from 'drizzle-orm/better-sqlite3'; import Database from 'better-sqlite3'; const sqlite = new Database('sqlite.db'); const db/*: BetterSQLite3Database*/ = drizzle(sqlite); const result = db.select().from(users).all() // bun js embedded sqlite connector import { drizzle, BunSQLiteDatabase } from 'drizzle-orm/bun-sqlite'; import { Database } from 'bun:sqlite'; const sqlite = new Database('nw.sqlite'); const db/*: BunSQLiteDatabase*/ = drizzle(sqlite); const result = db.select().from(users).all() // Cloudflare D1 connector import { drizzle, DrizzleD1Database } from 'drizzle-orm/d1'; // env.DB from cloudflare worker environment const db/*: DrizzleD1Database*/ = drizzle(env.DB); const result = await db.select().from(users).all(); // pay attention this one is async // libSQL or Turso import { drizzle, LibSQLDatabase } from 'drizzle-orm/libsql'; import { Database } from '@libsql/sqlite3'; const sqlite = new Database('libsql://...'); // Remote server // or const sqlite = new Database('sqlite.db'); // Local file const db/*: LibSQLDatabase*/ = drizzle(sqlite); const result = await db.select().from(users).all(); // pay attention this one is async // Custom Proxy HTTP driver const db = drizzle(async (sql, params, method) => { try { const rows = await axios.post('http://localhost:3000/query', { sql, params, method }); return { rows: rows.data }; } catch (e: any) { console.error('Error from sqlite proxy server: ', e.response.data) return { rows: [] }; } }); // More example for proxy: https://github.com/drizzle-team/drizzle-orm/tree/main/examples/sqlite-proxy ``` ## SQL schema declaration With `drizzle-orm` you declare SQL schema in TypeScript. You can have either one `schema.ts` file with all declarations or you can group them logically in multiple files. We prefer to use single file schema. ### Single schema file example ```plaintext 📦 └ 📂 src └ 📂 db └ 📜schema.ts ``` ### Multiple schema files example ```plaintext 📦 └ 📂 src └ 📂 db └ 📂 schema ├ 📜users.ts ├ 📜countries.ts ├ 📜cities.ts ├ 📜products.ts ├ 📜clients.ts ├ 📜enums.ts └ 📜etc.ts ``` This is how you declare SQL schema in `schema.ts`. You can declare tables, indexes and constraints, foreign keys and enums. ℹ Every column has a special _column type_ function that accepts the name of the column in the database (like `integer('id')`) Please pay attention to `export` keyword, they are mandatory if you'll be using [drizzle-kit SQL migrations generator](#-migrations). ```typescript import { sqliteTable, text, integer, uniqueIndex } from 'drizzle-orm/sqlite-core'; export const countries = sqliteTable('countries', { id: integer('id').primaryKey(), name: text('name'), }, (countries) => ({ nameIdx: uniqueIndex('nameIdx').on(countries.name), }) ); export const cities = sqliteTable('cities', { id: integer('id').primaryKey(), name: text('name'), countryId: integer('country_id').references(() => countries.id), }) ``` ### Database and table entity types ```typescript import { text, integer, sqliteTable } from 'drizzle-orm/sqlite-core'; import { InferModel } from 'drizzle-orm'; const users = sqliteTable('users', { id: integer('id').primaryKey(), fullName: text('full_name'), phone: text('phone'), }) export type User = InferModel // return type when queried export type InsertUser = InferModel // insert type ... import { drizzle, BetterSQLite3Database } from 'drizzle-orm/better-sqlite3'; import Database from 'better-sqlite3'; const sqlite = new Database('sqlite.db'); const db: BetterSQLite3Database = drizzle(sqlite); const result: User[] = await db.select().from(users).all(); const insertUser = (user: InsertUser) => { return db.insert(users).values(user).run() } ``` ### Customizing the table name There is a "table creator" available, which allow you to customize the table name, for example, to add a prefix or suffix. This is useful if you need to have tables for different environments or applications in the same database. ```ts import { sqliteTableCreator } from 'drizzle-orm/sqlite-core'; const sqliteTable = sqliteTableCreator((name) => `myprefix_${name}`); const users = sqliteTable('users', { id: int('id').primaryKey(), name: text('name').notNull(), }); ``` ## Column types The list of all column types. You can also create custom types - [see here](/docs/custom-types.md) ```typescript integer('...'); integer('...', { mode: 'number' | 'timestamp' | 'timestamp_ms' }) real('...'); text('...'); text('role', { enum: ['admin', 'user'] }); blob('...'); blob('...', { mode: 'json' | 'buffer' }); blob('...').$type<{ foo: string }>(); column.primaryKey(); column.notNull(); column.default(...); ``` ### Customizing column data type Every column builder has a `.$type()` method, which allows you to customize the data type of the column. This is useful, for example, with branded types. ```ts const users = sqliteTable('users', { id: integer('id').$type().primaryKey(), jsonField: blob('json_field').$type(), }); ``` Declaring indexes, foreign keys and composite primary keys ```typescript import { sqliteTable, foreignKey, primaryKey, text, integer, index, uniqueIndex, AnySQLiteColumn } from "drizzle-orm/sqlite-core"; export const countries = sqliteTable('countries', { id: integer('id').primaryKey(), name: text('name'), population: integer('population'), capital: integer('capital').references(() => cities.id, { onUpdate: 'cascade', onDelete: 'cascade' }) }, (countries) => ({ nameIdx: index('name_idx').on(countries.name), // one column namePopulationIdx: index('name_population_idx').on(countries.name, countries.population), // multiple columns uniqueIdx: uniqueIndex('unique_idx').on(countries.name), // unique index }) ); export const cities = sqliteTable('cities', { id: integer('id').primaryKey(), name: text('name'), countryId: integer('country_id').references(() => countries.id), // inline foreign key countryName: text('country_id'), sisterCityId: integer('sister_city_id').references((): AnySQLiteColumn => cities.id), // self-referencing foreign key }, (cities) => ({ // explicit foreign key with 1 column countryFk: foreignKey(() => ({ columns: [cities.countryId], foreignColumns: [countries.id], })), // explicit foreign key with multiple columns countryIdNameFk: foreignKey(() => ({ columns: [cities.countryId, cities.countryName], foreignColumns: [countries.id, countries.name], })), })); const pkExample = sqliteTable('pk_example', { id: integer('id'), name: text('name').notNull(), email: text('email').notNull(), }, (pkExample) => ({ // composite primary key on multiple columns compositePk: primaryKey(pkExample.id, pkExample.name) })); // you can have .where() on indexes index('name_idx').on(table.column).where(sql``) ``` ## Select, Insert, Update, Delete ### Select Querying, sorting and filtering. We also support partial select. ```typescript ... import { sqliteTable, text, integer } from 'drizzle-orm/sqlite-core'; import { and, asc, desc, eq, or } from 'drizzle-orm' import { drizzle } from 'drizzle-orm/better-sqlite3'; import Database from 'better-sqlite3'; const users = sqliteTable('users', { id: integer('id').primaryKey(), name: text('full_name'), }); const sqlite = new Database('sqlite.db'); const db = drizzle(sqlite); db.select().from(users).all(); db.select().from(users).where(eq(users.id, 42)).get(); // you can combine filters with and(...) or or(...) db.select().from(users).where(and(eq(users.id, 42), eq(users.name, 'Dan'))).all(); db.select().from(users).where(or(eq(users.id, 42), eq(users.id, 1))).all(); // partial select const result = db .select({ field1: users.id, field2: users.name, }) .from(users) .all(); const { field1, field2 } = result[0]; // limit offset & order by db.select().from(users).limit(10).offset(10).all(); db.select().from(users).orderBy(users.name).all(); db.select().from(users).orderBy(desc(users.name)).all(); // you can pass multiple order args db.select().from(users).orderBy(asc(users.name), desc(users.name)).all(); ``` #### Select from/join raw SQL ```typescript db.select({ x: sql`x` }).from(sql`generate_series(2, 4) as g(x)`).all(); db .select({ x1: sql`g1.x`, x2: sql`g2.x` }) .from(sql`generate_series(2, 4) as g1(x)`) .leftJoin(sql`generate_series(2, 4) as g2(x)`) .all(); ``` #### Conditionally select fields ```typescript function selectUsers(withName: boolean) { return db .select({ id: users.id, ...(withName ? { name: users.name } : {}), }) .from(users) .all(); } const users = selectUsers(true); ``` #### WITH clause ```typescript const sq = db.$with('sq').as(db.select().from(users).where(eq(users.id, 42))); const result = db.with(sq).select().from(sq).all(); ``` > [!NOTE] > Keep in mind that if you need to select raw `sql` in a WITH subquery and reference that field in other queries, you must add an alias to it: ```typescript const sq = db.$with('sq').as(db.select({ name: sql`upper(${users.name})`.as('name') }).from(users)); const result = db.with(sq).select({ name: sq.name }).from(sq).all(); ``` Otherwise, the field type will become `DrizzleTypeError` and you won't be able to reference it in other queries. If you ignore the type error and still try to reference the field, you will get a runtime error, because we cannot reference that field without an alias. #### Select from subquery ```typescript const sq = db.select().from(users).where(eq(users.id, 42)).as('sq'); const result = db.select().from(sq).all(); ``` Subqueries in joins are supported, too: ```typescript const result = db.select().from(users).leftJoin(sq, eq(users.id, sq.id)).all(); ``` #### List of all filter operators ```typescript eq(column, value) eq(column1, column2) ne(column, value) ne(column1, column2) less(column, value) lessEq(column, value) gt(column, value) gt(column1, column2) gte(column, value) gte(column1, column2) lt(column, value) lt(column1, column2) lte(column, value) lte(column1, column2) isNull(column) isNotNull(column) inArray(column, values[]) inArray(column, sqlSubquery) notInArray(column, values[]) notInArray(column, sqlSubquery) exists(sqlSubquery) notExists(sqlSubquery) between(column, min, max) notBetween(column, min, max) like(column, value) like(column, value) ilike(column, value) notIlike(column, value) not(sqlExpression) and(...expressions: Expr[]) or(...expressions: Expr[]) ``` ### Insert ```typescript import { sqliteTable, text, integer } from 'drizzle-orm/sqlite-core'; import { InferModel } from 'drizzle-orm'; import { drizzle } from 'drizzle-orm/better-sqlite3'; import Database from 'better-sqlite3'; const sqlite = new Database('sqlite.db'); const db = drizzle(sqlite); const users = sqliteTable('users', { id: integer('id').primaryKey(), name: text('name'), createdAt: integer('created_at', { mode: 'timestamp' }), }); type NewUser = InferModel; const newUser: NewUser = { name: 'Andrew', createdAt: new Date(), }; db.insert(users).values(newUser).run(); const insertedUsers/*: NewUser[]*/ = db.insert(users).values(newUser).returning().all(); const insertedUsersIds/*: { insertedId: number }[]*/ = db.insert(users) .values(newUser) .returning({ insertedId: users.id }) .all(); ``` #### Insert several items ```ts db.insert(users) .values( { name: 'Andrew', createdAt: new Date(), }, { name: 'Dan', createdAt: new Date(), }, ) .run(); ``` #### Insert array of items ```ts const newUsers: NewUser[] = [ { name: 'Andrew', createdAt: new Date(), }, { name: 'Dan', createdAt: new Date(), }, ]; db.insert(users).values(newUsers).run(); ``` ### Upsert (Insert with on conflict statement) ```typescript db.insert(users) .values({ id: 1, name: 'Dan' }) .onConflictDoUpdate({ target: users.id, set: { name: 'John' } }) .run(); db.insert(users) .values({ id: 1, name: 'John' }) .onConflictDoNothing() .run(); db.insert(users) .values({ id: 1, name: 'John' }) .onConflictDoNothing({ target: users.id }) .run(); ``` ### Update and Delete ```typescript db.update(users) .set({ name: 'Mr. Dan' }) .where(eq(usersTable.name, 'Dan')) .run(); db.delete(users) .where(eq(usersTable.name, 'Dan')) .run(); ``` ### Aggregations They work just like they do in SQL, but you have them fully type safe ```typescript const orders = sqliteTable('order', { id: integer('id').primaryKey(), orderDate: integer('order_date', { mode: 'timestamp' }).notNull(), requiredDate: integer('required_date', { mode: 'timestamp' }).notNull(), shippedDate: integer('shipped_date', { mode: 'timestamp' }), shipVia: integer('ship_via').notNull(), freight: numeric('freight').notNull(), shipName: text('ship_name').notNull(), shipCity: text('ship_city').notNull(), shipRegion: text('ship_region'), shipPostalCode: text('ship_postal_code'), shipCountry: text('ship_country').notNull(), customerId: text('customer_id').notNull(), employeeId: integer('employee_id').notNull(), }); const details = sqliteTable('order_detail', { unitPrice: numeric('unit_price').notNull(), quantity: integer('quantity').notNull(), discount: numeric('discount').notNull(), orderId: integer('order_id').notNull(), productId: integer('product_id').notNull(), }); db .select({ id: orders.id, shippedDate: orders.shippedDate, shipName: orders.shipName, shipCity: orders.shipCity, shipCountry: orders.shipCountry, productsCount: sql`count(${details.productId})`, quantitySum: sql`sum(${details.quantity})`, totalPrice: sql`sum(${details.quantity} * ${details.unitPrice})`, }) .from(orders) .leftJoin(details, eq(orders.id, details.orderId)) .groupBy(orders.id) .orderBy(asc(orders.id)) .all(); ``` ### Joins > [!NOTE] > For in-depth partial select joins documentation, refer to [this page](/docs/joins.md). ### Many-to-one ```typescript import { sqliteTable, text, integer } from 'drizzle-orm/sqlite-core'; import { drizzle } from 'drizzle-orm/better-sqlite3'; const cities = sqliteTable('cities', { id: integer('id').primaryKey(), name: text('name'), }); const users = sqliteTable('users', { id: integer('id').primaryKey(), name: text('name'), cityId: integer('city_id').references(() => cities.id) }); const db = drizzle(sqlite); const result = db.select().from(cities).leftJoin(users, eq(cities.id, users.cityId)).all(); ``` ### Many-to-many ```typescript const users = sqliteTable('users', { id: integer('id').primaryKey(), name: text('name'), }); const chatGroups = sqliteTable('chat_groups', { id: integer('id').primaryKey(), name: text('name'), }); const usersToChatGroups = sqliteTable('usersToChatGroups', { userId: integer('user_id').notNull().references(() => users.id), groupId: integer('group_id').notNull().references(() => chatGroups.id), }); ... const db = drizzle(...); // querying user group with id 1 and all the participants(users) db .select() .from(usersToChatGroups) .leftJoin(users, eq(usersToChatGroups.userId, users.id)) .leftJoin(chatGroups, eq(usersToChatGroups.groupId, chatGroups.id)) .where(eq(chatGroups.id, 1)) .all(); ``` ### Join aliases and self-joins ```typescript import { ..., alias } from 'drizzle-orm/sqlite-core'; export const files = sqliteTable('folders', { name: text('name').notNull(), parent: text('parent_folder') }) ... const db = drizzle(...); const nestedFiles = alias(files, 'nested_files'); db.select().from(files) .leftJoin(nestedFiles, eq(files.name, nestedFiles.name)) .where(eq(files.parent, '/')) .all(); // will return files and folders and nested files for each folder at root dir ``` ### Join using partial field select Join Cities with Users getting only needed fields form request ```typescript db .select({ id: cities.id, cityName: cities.name, userId: users.id }) .from(cities) .leftJoin(users, eq(users.cityId, cities.id)) .all(); ``` ## Transactions ```ts db.transaction((tx) => { tx.insert(users).values(newUser).run(); tx.update(users).set({ name: 'Mr. Dan' }).where(eq(users.name, 'Dan')).run(); tx.delete(users).where(eq(users.name, 'Dan')).run(); }); ``` ### Nested transactions ```ts db.transaction((tx) => { tx.insert(users).values(newUser).run(); tx.transaction((tx2) => { tx2.update(users).set({ name: 'Mr. Dan' }).where(eq(users.name, 'Dan')).run(); tx2.delete(users).where(eq(users.name, 'Dan')).run(); }); }); ``` ### Transaction settings ```ts interface SQLiteTransactionConfig { behavior?: 'deferred' | 'immediate' | 'exclusive'; } db.transaction((tx) => { ... }, { behavior: 'immediate', }); ``` ## Query builder Drizzle ORM provides a standalone query builder that allows you to build queries without creating a database instance. ```ts import { queryBuilder as qb } from 'drizzle-orm/sqlite-core'; const query = qb.select().from(users).where(eq(users.name, 'Dan')); const { sql, params } = query.toSQL(); ``` ## Views (WIP) > [!WARNING] > views are currently only implemented on the ORM side. That means you can query the views that already exist in the database, but they won't be added to drizzle-kit migrations or `db push` yet. ### Creating a view ```ts import { sqliteView } from 'drizzle-orm/sqlite-core'; const newYorkers = sqliteView('new_yorkers').as((qb) => qb.select().from(users).where(eq(users.cityId, 1))); ``` > [!WARNING] > All the parameters inside the query will be inlined, instead of replaced by `$1`, `$2`, etc. You can also use the [`queryBuilder` instance](#query-builder) directly instead of passing a callback, if you already have it imported. ```ts import { queryBuilder as qb } from 'drizzle-orm/sqlite-core'; const newYorkers = sqliteView('new_yorkers').as(qb.select().from(users2Table).where(eq(users2Table.cityId, 1))); ``` ### Using raw SQL in a view query In case you need to specify the view query using a syntax that is not supported by the query builder, you can directly use SQL. In that case, you also need to specify the view shape. ```ts const newYorkers = sqliteView('new_yorkers', { id: integer('id').primaryKey(), name: text('name').notNull(), cityId: integer('city_id').notNull(), }).as(sql`select * from ${users} where ${eq(users.cityId, 1)}`); ``` ### Describing existing views There are cases when you are given readonly access to an existing view. In such cases you can just describe the view shape without specifying the query itself or using it in the migrations. ```ts const newYorkers = sqliteView('new_yorkers', { userId: integer('user_id').notNull(), cityId: integer('city_id'), }).existing(); ``` ## ⚡️ Performance and prepared statements With Drizzle ORM you can go [**faster than better-sqlite3 driver**](https://twitter.com/_alexblokh/status/1593593415907909634) by utilizing our `prepared statements` and `placeholder` APIs ```typescript import { placeholder } from 'drizzle-orm'; const db = drizzle(...); const q = db.select().from(customers).prepare(); q.all() // SELECT * FROM customers const q = db.select().from(customers).where(eq(customers.id, placeholder('id'))).prepare() q.get({ id: 10 }) // SELECT * FROM customers WHERE id = 10 q.get({ id: 12 }) // SELECT * FROM customers WHERE id = 12 const q = db .select() .from(customers) .where(sql`lower(${customers.name}) like ${placeholder('name')}`) .prepare(); q.all({ name: '%an%' }) // SELECT * FROM customers WHERE name ilike '%an%' ``` ## 🗄 Migrations ### Automatic SQL migrations generation with drizzle-kit [Drizzle Kit](https://www.npmjs.com/package/drizzle-kit) is a CLI migrator tool for Drizzle ORM. It is probably the one and only tool that lets you completely automatically generate SQL migrations and covers ~95% of the common cases like deletions and renames by prompting user input. Check out the [docs for Drizzle Kit](https://github.com/drizzle-team/drizzle-kit-mirror). For schema file: ```typescript import { index, integer, sqliteTable, text } from 'drizzle-orm/sqlite-core'; export const users = sqliteTable('users', { id: integer('id').primaryKey(), fullName: text('full_name'), }, (users) => ({ nameIdx: index('name_idx', users.fullName), })); export const authOtps = sqliteTable('auth_otp', { id: integer('id').primaryKey(), phone: text('phone'), userId: integer('user_id').references(() => users.id), }); ``` It will generate: ```SQL CREATE TABLE IF NOT EXISTS auth_otp ( 'id' INTEGER PRIMARY KEY, 'phone' TEXT, 'user_id' INTEGER ); CREATE TABLE IF NOT EXISTS users ( 'id' INTEGER PRIMARY KEY, 'full_name' TEXT ); DO $$ BEGIN ALTER TABLE auth_otp ADD CONSTRAINT auth_otp_user_id_fkey FOREIGN KEY ('user_id') REFERENCES users(id); EXCEPTION WHEN duplicate_object THEN null; END $$; CREATE INDEX IF NOT EXISTS users_full_name_index ON users (full_name); ``` And you can run migrations manually or using our embedded migrations module ```typescript import { drizzle } from 'drizzle-orm/better-sqlite3'; import { migrate } from 'drizzle-orm/better-sqlite3/migrator'; import Database from 'better-sqlite3'; const sqlite = new Database('sqlite.db'); const db = drizzle(sqlite); // this will automatically run needed migrations on the database migrate(db, { migrationsFolder: './drizzle' }); ``` ## Utility stuff ### Printing SQL query ```typescript const query = db .select({ id: users.id, name: users.name }) .from(users) .groupBy(users.id) .toSQL(); // query: { sql: 'select 'id', 'name' from 'users' group by 'users'.'id'', params: [], } ``` ### Raw query usage ```typescript // it will automatically run a parametrized query! const res: QueryResult = db.run(sql`SELECT * FROM users WHERE user.id = ${userId}`); ``` ## Logging To enable default query logging, just pass `{ logger: true }` to the `drizzle` function: ```typescript import { drizzle } from 'drizzle-orm/better-sqlite3'; const db = drizzle(sqlite, { logger: true }); ``` You can change the logs destination by creating a `DefaultLogger` instance and providing a custom `writer` to it: ```typescript import { DefaultLogger, LogWriter } from 'drizzle-orm/logger'; import { drizzle } from 'drizzle-orm/better-sqlite3'; class MyLogWriter implements LogWriter { write(message: string) { // Write to file, console, etc. } } const logger = new DefaultLogger({ writer: new MyLogWriter() }); const db = drizzle(sqlite, { logger }); ``` You can also create a custom logger: ```typescript import { Logger } from 'drizzle-orm/logger'; import { drizzle } from 'drizzle-orm/better-sqlite3'; class MyLogger implements Logger { logQuery(query: string, params: unknown[]): void { console.log({ query, params }); } } const db = drizzle(sqlite, { logger: new MyLogger() }); ``` ## Table introspect API See [dedicated docs](/docs/table-introspect-api.md). ================================================ FILE: drizzle-orm/src/sqlite-core/alias.ts ================================================ import { TableAliasProxyHandler } from '~/alias.ts'; import type { BuildAliasTable } from './query-builders/select.types.ts'; import type { SQLiteTable } from './table.ts'; import type { SQLiteViewBase } from './view-base.ts'; export function alias( table: TTable, alias: TAlias, ): BuildAliasTable { return new Proxy(table, new TableAliasProxyHandler(alias, false)) as any; } ================================================ FILE: drizzle-orm/src/sqlite-core/checks.ts ================================================ import { entityKind } from '~/entity.ts'; import type { SQL } from '~/sql/sql.ts'; import type { SQLiteTable } from './table.ts'; export class CheckBuilder { static readonly [entityKind]: string = 'SQLiteCheckBuilder'; protected brand!: 'SQLiteConstraintBuilder'; constructor(public name: string, public value: SQL) {} build(table: SQLiteTable): Check { return new Check(table, this); } } export class Check { static readonly [entityKind]: string = 'SQLiteCheck'; declare _: { brand: 'SQLiteCheck'; }; readonly name: string; readonly value: SQL; constructor(public table: SQLiteTable, builder: CheckBuilder) { this.name = builder.name; this.value = builder.value; } } export function check(name: string, value: SQL): CheckBuilder { return new CheckBuilder(name, value); } ================================================ FILE: drizzle-orm/src/sqlite-core/columns/all.ts ================================================ import { blob } from './blob.ts'; import { customType } from './custom.ts'; import { integer } from './integer.ts'; import { numeric } from './numeric.ts'; import { real } from './real.ts'; import { text } from './text.ts'; export function getSQLiteColumnBuilders() { return { blob, customType, integer, numeric, real, text, }; } export type SQLiteColumnBuilders = ReturnType; ================================================ FILE: drizzle-orm/src/sqlite-core/columns/blob.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnySQLiteTable } from '~/sqlite-core/table.ts'; import { type Equal, getColumnNameAndConfig, textDecoder } from '~/utils.ts'; import { SQLiteColumn, SQLiteColumnBuilder } from './common.ts'; type BlobMode = 'buffer' | 'json' | 'bigint'; export type SQLiteBigIntBuilderInitial = SQLiteBigIntBuilder<{ name: TName; dataType: 'bigint'; columnType: 'SQLiteBigInt'; data: bigint; driverParam: Buffer; enumValues: undefined; }>; export class SQLiteBigIntBuilder> extends SQLiteColumnBuilder { static override readonly [entityKind]: string = 'SQLiteBigIntBuilder'; constructor(name: T['name']) { super(name, 'bigint', 'SQLiteBigInt'); } /** @internal */ override build( table: AnySQLiteTable<{ name: TTableName }>, ): SQLiteBigInt> { return new SQLiteBigInt>(table, this.config as ColumnBuilderRuntimeConfig); } } export class SQLiteBigInt> extends SQLiteColumn { static override readonly [entityKind]: string = 'SQLiteBigInt'; getSQLType(): string { return 'blob'; } override mapFromDriverValue(value: Buffer | Uint8Array | ArrayBuffer): bigint { if (typeof Buffer !== 'undefined' && Buffer.from) { const buf = Buffer.isBuffer(value) ? value // eslint-disable-next-line no-instanceof/no-instanceof : value instanceof ArrayBuffer ? Buffer.from(value) : value.buffer ? Buffer.from(value.buffer, value.byteOffset, value.byteLength) : Buffer.from(value); return BigInt(buf.toString('utf8')); } return BigInt(textDecoder!.decode(value)); } override mapToDriverValue(value: bigint): Buffer { return Buffer.from(value.toString()); } } export type SQLiteBlobJsonBuilderInitial = SQLiteBlobJsonBuilder<{ name: TName; dataType: 'json'; columnType: 'SQLiteBlobJson'; data: unknown; driverParam: Buffer; enumValues: undefined; }>; export class SQLiteBlobJsonBuilder> extends SQLiteColumnBuilder { static override readonly [entityKind]: string = 'SQLiteBlobJsonBuilder'; constructor(name: T['name']) { super(name, 'json', 'SQLiteBlobJson'); } /** @internal */ override build( table: AnySQLiteTable<{ name: TTableName }>, ): SQLiteBlobJson> { return new SQLiteBlobJson>( table, this.config as ColumnBuilderRuntimeConfig, ); } } export class SQLiteBlobJson> extends SQLiteColumn { static override readonly [entityKind]: string = 'SQLiteBlobJson'; getSQLType(): string { return 'blob'; } override mapFromDriverValue(value: Buffer | Uint8Array | ArrayBuffer): T['data'] { if (typeof Buffer !== 'undefined' && Buffer.from) { const buf = Buffer.isBuffer(value) ? value // eslint-disable-next-line no-instanceof/no-instanceof : value instanceof ArrayBuffer ? Buffer.from(value) : value.buffer ? Buffer.from(value.buffer, value.byteOffset, value.byteLength) : Buffer.from(value); return JSON.parse(buf.toString('utf8')); } return JSON.parse(textDecoder!.decode(value)); } override mapToDriverValue(value: T['data']): Buffer { return Buffer.from(JSON.stringify(value)); } } export type SQLiteBlobBufferBuilderInitial = SQLiteBlobBufferBuilder<{ name: TName; dataType: 'buffer'; columnType: 'SQLiteBlobBuffer'; data: Buffer; driverParam: Buffer; enumValues: undefined; }>; export class SQLiteBlobBufferBuilder> extends SQLiteColumnBuilder { static override readonly [entityKind]: string = 'SQLiteBlobBufferBuilder'; constructor(name: T['name']) { super(name, 'buffer', 'SQLiteBlobBuffer'); } /** @internal */ override build( table: AnySQLiteTable<{ name: TTableName }>, ): SQLiteBlobBuffer> { return new SQLiteBlobBuffer>(table, this.config as ColumnBuilderRuntimeConfig); } } export class SQLiteBlobBuffer> extends SQLiteColumn { static override readonly [entityKind]: string = 'SQLiteBlobBuffer'; override mapFromDriverValue(value: Buffer | Uint8Array | ArrayBuffer): T['data'] { if (Buffer.isBuffer(value)) { return value; } return Buffer.from(value as Uint8Array); } getSQLType(): string { return 'blob'; } } export interface BlobConfig { mode: TMode; } /** * It's recommended to use `text('...', { mode: 'json' })` instead of `blob` in JSON mode, because it supports JSON functions: * >All JSON functions currently throw an error if any of their arguments are BLOBs because BLOBs are reserved for a future enhancement in which BLOBs will store the binary encoding for JSON. * * https://www.sqlite.org/json1.html */ export function blob(): SQLiteBlobJsonBuilderInitial<''>; export function blob( config?: BlobConfig, ): Equal extends true ? SQLiteBigIntBuilderInitial<''> : Equal extends true ? SQLiteBlobBufferBuilderInitial<''> : SQLiteBlobJsonBuilderInitial<''>; export function blob( name: TName, config?: BlobConfig, ): Equal extends true ? SQLiteBigIntBuilderInitial : Equal extends true ? SQLiteBlobBufferBuilderInitial : SQLiteBlobJsonBuilderInitial; export function blob(a?: string | BlobConfig, b?: BlobConfig) { const { name, config } = getColumnNameAndConfig(a, b); if (config?.mode === 'json') { return new SQLiteBlobJsonBuilder(name); } if (config?.mode === 'bigint') { return new SQLiteBigIntBuilder(name); } return new SQLiteBlobBufferBuilder(name); } ================================================ FILE: drizzle-orm/src/sqlite-core/columns/common.ts ================================================ import type { ColumnBuilderBase, ColumnBuilderBaseConfig, ColumnBuilderExtraConfig, ColumnBuilderRuntimeConfig, ColumnDataType, HasGenerated, MakeColumnConfig, } from '~/column-builder.ts'; import { ColumnBuilder } from '~/column-builder.ts'; import { Column } from '~/column.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { SQL } from '~/sql/sql.ts'; import type { ForeignKey, UpdateDeleteAction } from '~/sqlite-core/foreign-keys.ts'; import { ForeignKeyBuilder } from '~/sqlite-core/foreign-keys.ts'; import type { AnySQLiteTable, SQLiteTable } from '~/sqlite-core/table.ts'; import type { Update } from '~/utils.ts'; import { uniqueKeyName } from '../unique-constraint.ts'; export interface ReferenceConfig { ref: () => SQLiteColumn; actions: { onUpdate?: UpdateDeleteAction; onDelete?: UpdateDeleteAction; }; } export interface SQLiteColumnBuilderBase< T extends ColumnBuilderBaseConfig = ColumnBuilderBaseConfig, TTypeConfig extends object = object, > extends ColumnBuilderBase {} export interface SQLiteGeneratedColumnConfig { mode?: 'virtual' | 'stored'; } export abstract class SQLiteColumnBuilder< T extends ColumnBuilderBaseConfig = ColumnBuilderBaseConfig, TRuntimeConfig extends object = object, TTypeConfig extends object = object, TExtraConfig extends ColumnBuilderExtraConfig = object, > extends ColumnBuilder implements SQLiteColumnBuilderBase { static override readonly [entityKind]: string = 'SQLiteColumnBuilder'; private foreignKeyConfigs: ReferenceConfig[] = []; references( ref: ReferenceConfig['ref'], actions: ReferenceConfig['actions'] = {}, ): this { this.foreignKeyConfigs.push({ ref, actions }); return this; } unique( name?: string, ): this { this.config.isUnique = true; this.config.uniqueName = name; return this; } generatedAlwaysAs(as: SQL | T['data'] | (() => SQL), config?: SQLiteGeneratedColumnConfig): HasGenerated { this.config.generated = { as, type: 'always', mode: config?.mode ?? 'virtual', }; return this as any; } /** @internal */ buildForeignKeys(column: SQLiteColumn, table: SQLiteTable): ForeignKey[] { return this.foreignKeyConfigs.map(({ ref, actions }) => { return ((ref, actions) => { const builder = new ForeignKeyBuilder(() => { const foreignColumn = ref(); return { columns: [column], foreignColumns: [foreignColumn] }; }); if (actions.onUpdate) { builder.onUpdate(actions.onUpdate); } if (actions.onDelete) { builder.onDelete(actions.onDelete); } return builder.build(table); })(ref, actions); }); } /** @internal */ abstract build( table: AnySQLiteTable<{ name: TTableName }>, ): SQLiteColumn>; } // To understand how to use `SQLiteColumn` and `AnySQLiteColumn`, see `Column` and `AnyColumn` documentation. export abstract class SQLiteColumn< T extends ColumnBaseConfig = ColumnBaseConfig, TRuntimeConfig extends object = {}, TTypeConfig extends object = {}, > extends Column { static override readonly [entityKind]: string = 'SQLiteColumn'; constructor( override readonly table: SQLiteTable, config: ColumnBuilderRuntimeConfig, ) { if (!config.uniqueName) { config.uniqueName = uniqueKeyName(table, [config.name]); } super(table, config); } } export type AnySQLiteColumn> = {}> = SQLiteColumn< Required, TPartial>> >; ================================================ FILE: drizzle-orm/src/sqlite-core/columns/custom.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { SQL } from '~/sql/sql.ts'; import type { AnySQLiteTable } from '~/sqlite-core/table.ts'; import { type Equal, getColumnNameAndConfig } from '~/utils.ts'; import { SQLiteColumn, SQLiteColumnBuilder } from './common.ts'; export type ConvertCustomConfig> = & { name: TName; dataType: 'custom'; columnType: 'SQLiteCustomColumn'; data: T['data']; driverParam: T['driverData']; enumValues: undefined; } & (T['notNull'] extends true ? { notNull: true } : {}) & (T['default'] extends true ? { hasDefault: true } : {}); export interface SQLiteCustomColumnInnerConfig { customTypeValues: CustomTypeValues; } export class SQLiteCustomColumnBuilder> extends SQLiteColumnBuilder< T, { fieldConfig: CustomTypeValues['config']; customTypeParams: CustomTypeParams; }, { sqliteColumnBuilderBrand: 'SQLiteCustomColumnBuilderBrand'; } > { static override readonly [entityKind]: string = 'SQLiteCustomColumnBuilder'; constructor( name: T['name'], fieldConfig: CustomTypeValues['config'], customTypeParams: CustomTypeParams, ) { super(name, 'custom', 'SQLiteCustomColumn'); this.config.fieldConfig = fieldConfig; this.config.customTypeParams = customTypeParams; } /** @internal */ build( table: AnySQLiteTable<{ name: TTableName }>, ): SQLiteCustomColumn> { return new SQLiteCustomColumn>( table, this.config as ColumnBuilderRuntimeConfig, ); } } export class SQLiteCustomColumn> extends SQLiteColumn { static override readonly [entityKind]: string = 'SQLiteCustomColumn'; private sqlName: string; private mapTo?: (value: T['data']) => T['driverParam']; private mapFrom?: (value: T['driverParam']) => T['data']; constructor( table: AnySQLiteTable<{ name: T['tableName'] }>, config: SQLiteCustomColumnBuilder['config'], ) { super(table, config); this.sqlName = config.customTypeParams.dataType(config.fieldConfig); this.mapTo = config.customTypeParams.toDriver; this.mapFrom = config.customTypeParams.fromDriver; } getSQLType(): string { return this.sqlName; } override mapFromDriverValue(value: T['driverParam']): T['data'] { return typeof this.mapFrom === 'function' ? this.mapFrom(value) : value as T['data']; } override mapToDriverValue(value: T['data']): T['driverParam'] { return typeof this.mapTo === 'function' ? this.mapTo(value) : value as T['data']; } } export type CustomTypeValues = { /** * Required type for custom column, that will infer proper type model * * Examples: * * If you want your column to be `string` type after selecting/or on inserting - use `data: string`. Like `text`, `varchar` * * If you want your column to be `number` type after selecting/or on inserting - use `data: number`. Like `integer` */ data: unknown; /** * Type helper, that represents what type database driver is accepting for specific database data type */ driverData?: unknown; /** * What config type should be used for {@link CustomTypeParams} `dataType` generation */ config?: Record; /** * Whether the config argument should be required or not * @default false */ configRequired?: boolean; /** * If your custom data type should be notNull by default you can use `notNull: true` * * @example * const customSerial = customType<{ data: number, notNull: true, default: true }>({ * dataType() { * return 'serial'; * }, * }); */ notNull?: boolean; /** * If your custom data type has default you can use `default: true` * * @example * const customSerial = customType<{ data: number, notNull: true, default: true }>({ * dataType() { * return 'serial'; * }, * }); */ default?: boolean; }; export interface CustomTypeParams { /** * Database data type string representation, that is used for migrations * @example * ``` * `jsonb`, `text` * ``` * * If database data type needs additional params you can use them from `config` param * @example * ``` * `varchar(256)`, `numeric(2,3)` * ``` * * To make `config` be of specific type please use config generic in {@link CustomTypeValues} * * @example * Usage example * ``` * dataType() { * return 'boolean'; * }, * ``` * Or * ``` * dataType(config) { * return typeof config.length !== 'undefined' ? `varchar(${config.length})` : `varchar`; * } * ``` */ dataType: (config: T['config'] | (Equal extends true ? never : undefined)) => string; /** * Optional mapping function, between user input and driver * @example * For example, when using jsonb we need to map JS/TS object to string before writing to database * ``` * toDriver(value: TData): string { * return JSON.stringify(value); * } * ``` */ toDriver?: (value: T['data']) => T['driverData'] | SQL; /** * Optional mapping function, that is responsible for data mapping from database to JS/TS code * @example * For example, when using timestamp we need to map string Date representation to JS Date * ``` * fromDriver(value: string): Date { * return new Date(value); * }, * ``` */ fromDriver?: (value: T['driverData']) => T['data']; } /** * Custom sqlite database data type generator */ export function customType( customTypeParams: CustomTypeParams, ): Equal extends true ? { & T['config']>( fieldConfig: TConfig, ): SQLiteCustomColumnBuilder>; ( dbName: TName, fieldConfig: T['config'], ): SQLiteCustomColumnBuilder>; } : { (): SQLiteCustomColumnBuilder>; & T['config']>( fieldConfig?: TConfig, ): SQLiteCustomColumnBuilder>; ( dbName: TName, fieldConfig?: T['config'], ): SQLiteCustomColumnBuilder>; } { return ( a?: TName | T['config'], b?: T['config'], ): SQLiteCustomColumnBuilder> => { const { name, config } = getColumnNameAndConfig(a, b); return new SQLiteCustomColumnBuilder( name as ConvertCustomConfig['name'], config, customTypeParams, ); }; } ================================================ FILE: drizzle-orm/src/sqlite-core/columns/index.ts ================================================ export * from './blob.ts'; export * from './common.ts'; export * from './custom.ts'; export * from './integer.ts'; export * from './numeric.ts'; export * from './real.ts'; export * from './text.ts'; ================================================ FILE: drizzle-orm/src/sqlite-core/columns/integer.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, ColumnDataType, HasDefault, IsPrimaryKey, MakeColumnConfig, NotNull, } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import { sql } from '~/sql/sql.ts'; import type { OnConflict } from '~/sqlite-core/utils.ts'; import { type Equal, getColumnNameAndConfig, type Or } from '~/utils.ts'; import type { AnySQLiteTable } from '../table.ts'; import { SQLiteColumn, SQLiteColumnBuilder } from './common.ts'; export interface PrimaryKeyConfig { autoIncrement?: boolean; onConflict?: OnConflict; } export abstract class SQLiteBaseIntegerBuilder< T extends ColumnBuilderBaseConfig, TRuntimeConfig extends object = object, > extends SQLiteColumnBuilder< T, TRuntimeConfig & { autoIncrement: boolean }, {}, { primaryKeyHasDefault: true } > { static override readonly [entityKind]: string = 'SQLiteBaseIntegerBuilder'; constructor(name: T['name'], dataType: T['dataType'], columnType: T['columnType']) { super(name, dataType, columnType); this.config.autoIncrement = false; } override primaryKey(config?: PrimaryKeyConfig): IsPrimaryKey>> { if (config?.autoIncrement) { this.config.autoIncrement = true; } this.config.hasDefault = true; return super.primaryKey() as IsPrimaryKey>>; } /** @internal */ abstract override build( table: AnySQLiteTable<{ name: TTableName }>, ): SQLiteBaseInteger>; } export abstract class SQLiteBaseInteger< T extends ColumnBaseConfig, TRuntimeConfig extends object = object, > extends SQLiteColumn { static override readonly [entityKind]: string = 'SQLiteBaseInteger'; readonly autoIncrement: boolean = this.config.autoIncrement; getSQLType(): string { return 'integer'; } } export type SQLiteIntegerBuilderInitial = SQLiteIntegerBuilder<{ name: TName; dataType: 'number'; columnType: 'SQLiteInteger'; data: number; driverParam: number; enumValues: undefined; }>; export class SQLiteIntegerBuilder> extends SQLiteBaseIntegerBuilder { static override readonly [entityKind]: string = 'SQLiteIntegerBuilder'; constructor(name: T['name']) { super(name, 'number', 'SQLiteInteger'); } build( table: AnySQLiteTable<{ name: TTableName }>, ): SQLiteInteger> { return new SQLiteInteger>( table, this.config as ColumnBuilderRuntimeConfig, ); } } export class SQLiteInteger> extends SQLiteBaseInteger { static override readonly [entityKind]: string = 'SQLiteInteger'; } export type SQLiteTimestampBuilderInitial = SQLiteTimestampBuilder<{ name: TName; dataType: 'date'; columnType: 'SQLiteTimestamp'; data: Date; driverParam: number; enumValues: undefined; }>; export class SQLiteTimestampBuilder> extends SQLiteBaseIntegerBuilder { static override readonly [entityKind]: string = 'SQLiteTimestampBuilder'; constructor(name: T['name'], mode: 'timestamp' | 'timestamp_ms') { super(name, 'date', 'SQLiteTimestamp'); this.config.mode = mode; } /** * @deprecated Use `default()` with your own expression instead. * * Adds `DEFAULT (cast((julianday('now') - 2440587.5)*86400000 as integer))` to the column, which is the current epoch timestamp in milliseconds. */ defaultNow(): HasDefault { return this.default(sql`(cast((julianday('now') - 2440587.5)*86400000 as integer))`) as any; } build( table: AnySQLiteTable<{ name: TTableName }>, ): SQLiteTimestamp> { return new SQLiteTimestamp>( table, this.config as ColumnBuilderRuntimeConfig, ); } } export class SQLiteTimestamp> extends SQLiteBaseInteger { static override readonly [entityKind]: string = 'SQLiteTimestamp'; readonly mode: 'timestamp' | 'timestamp_ms' = this.config.mode; override mapFromDriverValue(value: number): Date { if (this.config.mode === 'timestamp') { return new Date(value * 1000); } return new Date(value); } override mapToDriverValue(value: Date): number { const unix = value.getTime(); if (this.config.mode === 'timestamp') { return Math.floor(unix / 1000); } return unix; } } export type SQLiteBooleanBuilderInitial = SQLiteBooleanBuilder<{ name: TName; dataType: 'boolean'; columnType: 'SQLiteBoolean'; data: boolean; driverParam: number; enumValues: undefined; }>; export class SQLiteBooleanBuilder> extends SQLiteBaseIntegerBuilder { static override readonly [entityKind]: string = 'SQLiteBooleanBuilder'; constructor(name: T['name'], mode: 'boolean') { super(name, 'boolean', 'SQLiteBoolean'); this.config.mode = mode; } build( table: AnySQLiteTable<{ name: TTableName }>, ): SQLiteBoolean> { return new SQLiteBoolean>( table, this.config as ColumnBuilderRuntimeConfig, ); } } export class SQLiteBoolean> extends SQLiteBaseInteger { static override readonly [entityKind]: string = 'SQLiteBoolean'; readonly mode: 'boolean' = this.config.mode; override mapFromDriverValue(value: number): boolean { return Number(value) === 1; } override mapToDriverValue(value: boolean): number { return value ? 1 : 0; } } export interface IntegerConfig< TMode extends 'number' | 'timestamp' | 'timestamp_ms' | 'boolean' = | 'number' | 'timestamp' | 'timestamp_ms' | 'boolean', > { mode: TMode; } export function integer(): SQLiteIntegerBuilderInitial<''>; export function integer( config?: IntegerConfig, ): Or, Equal> extends true ? SQLiteTimestampBuilderInitial<''> : Equal extends true ? SQLiteBooleanBuilderInitial<''> : SQLiteIntegerBuilderInitial<''>; export function integer( name: TName, config?: IntegerConfig, ): Or, Equal> extends true ? SQLiteTimestampBuilderInitial : Equal extends true ? SQLiteBooleanBuilderInitial : SQLiteIntegerBuilderInitial; export function integer(a?: string | IntegerConfig, b?: IntegerConfig) { const { name, config } = getColumnNameAndConfig(a, b); if (config?.mode === 'timestamp' || config?.mode === 'timestamp_ms') { return new SQLiteTimestampBuilder(name, config.mode); } if (config?.mode === 'boolean') { return new SQLiteBooleanBuilder(name, config.mode); } return new SQLiteIntegerBuilder(name); } export const int = integer; ================================================ FILE: drizzle-orm/src/sqlite-core/columns/numeric.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnySQLiteTable } from '~/sqlite-core/table.ts'; import { type Equal, getColumnNameAndConfig } from '~/utils.ts'; import { SQLiteColumn, SQLiteColumnBuilder } from './common.ts'; export type SQLiteNumericBuilderInitial = SQLiteNumericBuilder<{ name: TName; dataType: 'string'; columnType: 'SQLiteNumeric'; data: string; driverParam: string; enumValues: undefined; }>; export class SQLiteNumericBuilder> extends SQLiteColumnBuilder { static override readonly [entityKind]: string = 'SQLiteNumericBuilder'; constructor(name: T['name']) { super(name, 'string', 'SQLiteNumeric'); } /** @internal */ override build( table: AnySQLiteTable<{ name: TTableName }>, ): SQLiteNumeric> { return new SQLiteNumeric>( table, this.config as ColumnBuilderRuntimeConfig, ); } } export class SQLiteNumeric> extends SQLiteColumn { static override readonly [entityKind]: string = 'SQLiteNumeric'; override mapFromDriverValue(value: unknown): string { if (typeof value === 'string') return value; return String(value); } getSQLType(): string { return 'numeric'; } } export type SQLiteNumericNumberBuilderInitial = SQLiteNumericNumberBuilder<{ name: TName; dataType: 'number'; columnType: 'SQLiteNumericNumber'; data: number; driverParam: string; enumValues: undefined; }>; export class SQLiteNumericNumberBuilder> extends SQLiteColumnBuilder { static override readonly [entityKind]: string = 'SQLiteNumericNumberBuilder'; constructor(name: T['name']) { super(name, 'number', 'SQLiteNumericNumber'); } /** @internal */ override build( table: AnySQLiteTable<{ name: TTableName }>, ): SQLiteNumericNumber> { return new SQLiteNumericNumber>( table, this.config as ColumnBuilderRuntimeConfig, ); } } export class SQLiteNumericNumber> extends SQLiteColumn { static override readonly [entityKind]: string = 'SQLiteNumericNumber'; override mapFromDriverValue(value: unknown): number { if (typeof value === 'number') return value; return Number(value); } override mapToDriverValue = String; getSQLType(): string { return 'numeric'; } } export type SQLiteNumericBigIntBuilderInitial = SQLiteNumericBigIntBuilder<{ name: TName; dataType: 'bigint'; columnType: 'SQLiteNumericBigInt'; data: bigint; driverParam: string; enumValues: undefined; }>; export class SQLiteNumericBigIntBuilder> extends SQLiteColumnBuilder { static override readonly [entityKind]: string = 'SQLiteNumericBigIntBuilder'; constructor(name: T['name']) { super(name, 'bigint', 'SQLiteNumericBigInt'); } /** @internal */ override build( table: AnySQLiteTable<{ name: TTableName }>, ): SQLiteNumericBigInt> { return new SQLiteNumericBigInt>( table, this.config as ColumnBuilderRuntimeConfig, ); } } export class SQLiteNumericBigInt> extends SQLiteColumn { static override readonly [entityKind]: string = 'SQLiteNumericBigInt'; override mapFromDriverValue = BigInt; override mapToDriverValue = String; getSQLType(): string { return 'numeric'; } } export type SQLiteNumericConfig = { mode: T; }; export function numeric( config?: SQLiteNumericConfig, ): Equal extends true ? SQLiteNumericNumberBuilderInitial<''> : Equal extends true ? SQLiteNumericBigIntBuilderInitial<''> : SQLiteNumericBuilderInitial<''>; export function numeric( name: TName, config?: SQLiteNumericConfig, ): Equal extends true ? SQLiteNumericNumberBuilderInitial : Equal extends true ? SQLiteNumericBigIntBuilderInitial : SQLiteNumericBuilderInitial; export function numeric(a?: string | SQLiteNumericConfig, b?: SQLiteNumericConfig) { const { name, config } = getColumnNameAndConfig(a, b); const mode = config?.mode; return mode === 'number' ? new SQLiteNumericNumberBuilder(name) : mode === 'bigint' ? new SQLiteNumericBigIntBuilder(name) : new SQLiteNumericBuilder(name); } ================================================ FILE: drizzle-orm/src/sqlite-core/columns/real.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnySQLiteTable } from '../table.ts'; import { SQLiteColumn, SQLiteColumnBuilder } from './common.ts'; export type SQLiteRealBuilderInitial = SQLiteRealBuilder<{ name: TName; dataType: 'number'; columnType: 'SQLiteReal'; data: number; driverParam: number; enumValues: undefined; }>; export class SQLiteRealBuilder> extends SQLiteColumnBuilder { static override readonly [entityKind]: string = 'SQLiteRealBuilder'; constructor(name: T['name']) { super(name, 'number', 'SQLiteReal'); } /** @internal */ override build( table: AnySQLiteTable<{ name: TTableName }>, ): SQLiteReal> { return new SQLiteReal>(table, this.config as ColumnBuilderRuntimeConfig); } } export class SQLiteReal> extends SQLiteColumn { static override readonly [entityKind]: string = 'SQLiteReal'; getSQLType(): string { return 'real'; } } export function real(): SQLiteRealBuilderInitial<''>; export function real(name: TName): SQLiteRealBuilderInitial; export function real(name?: string) { return new SQLiteRealBuilder(name ?? ''); } ================================================ FILE: drizzle-orm/src/sqlite-core/columns/text.ts ================================================ import type { ColumnBuilderBaseConfig, ColumnBuilderRuntimeConfig, MakeColumnConfig } from '~/column-builder.ts'; import type { ColumnBaseConfig } from '~/column.ts'; import { entityKind } from '~/entity.ts'; import type { AnySQLiteTable } from '~/sqlite-core/table.ts'; import { type Equal, getColumnNameAndConfig, type Writable } from '~/utils.ts'; import { SQLiteColumn, SQLiteColumnBuilder } from './common.ts'; export type SQLiteTextBuilderInitial< TName extends string, TEnum extends [string, ...string[]], TLength extends number | undefined, > = SQLiteTextBuilder<{ name: TName; dataType: 'string'; columnType: 'SQLiteText'; data: TEnum[number]; driverParam: string; enumValues: TEnum; length: TLength; }>; export class SQLiteTextBuilder< T extends ColumnBuilderBaseConfig<'string', 'SQLiteText'> & { length?: number | undefined }, > extends SQLiteColumnBuilder< T, { length: T['length']; enumValues: T['enumValues'] }, { length: T['length'] } > { static override readonly [entityKind]: string = 'SQLiteTextBuilder'; constructor(name: T['name'], config: SQLiteTextConfig<'text', T['enumValues'], T['length']>) { super(name, 'string', 'SQLiteText'); this.config.enumValues = config.enum; this.config.length = config.length; } /** @internal */ override build( table: AnySQLiteTable<{ name: TTableName }>, ): SQLiteText & { length: T['length'] }> { return new SQLiteText & { length: T['length'] }>( table, this.config as ColumnBuilderRuntimeConfig, ); } } export class SQLiteText & { length?: number | undefined }> extends SQLiteColumn { static override readonly [entityKind]: string = 'SQLiteText'; override readonly enumValues = this.config.enumValues; readonly length: T['length'] = this.config.length; constructor( table: AnySQLiteTable<{ name: T['tableName'] }>, config: SQLiteTextBuilder['config'], ) { super(table, config); } getSQLType(): string { return `text${this.config.length ? `(${this.config.length})` : ''}`; } } export type SQLiteTextJsonBuilderInitial = SQLiteTextJsonBuilder<{ name: TName; dataType: 'json'; columnType: 'SQLiteTextJson'; data: unknown; driverParam: string; enumValues: undefined; generated: undefined; }>; export class SQLiteTextJsonBuilder> extends SQLiteColumnBuilder { static override readonly [entityKind]: string = 'SQLiteTextJsonBuilder'; constructor(name: T['name']) { super(name, 'json', 'SQLiteTextJson'); } /** @internal */ override build( table: AnySQLiteTable<{ name: TTableName }>, ): SQLiteTextJson> { return new SQLiteTextJson>( table, this.config as ColumnBuilderRuntimeConfig, ); } } export class SQLiteTextJson> extends SQLiteColumn { static override readonly [entityKind]: string = 'SQLiteTextJson'; getSQLType(): string { return 'text'; } override mapFromDriverValue(value: string): T['data'] { return JSON.parse(value); } override mapToDriverValue(value: T['data']): string { return JSON.stringify(value); } } export type SQLiteTextConfig< TMode extends 'text' | 'json' = 'text' | 'json', TEnum extends readonly string[] | string[] | undefined = readonly string[] | string[] | undefined, TLength extends number | undefined = number | undefined, > = TMode extends 'text' ? { mode?: TMode; length?: TLength; enum?: TEnum; } : { mode?: TMode; }; export function text(): SQLiteTextBuilderInitial<'', [string, ...string[]], undefined>; export function text< U extends string, T extends Readonly<[U, ...U[]]>, L extends number | undefined, TMode extends 'text' | 'json' = 'text' | 'json', >( config?: SQLiteTextConfig, L>, ): Equal extends true ? SQLiteTextJsonBuilderInitial<''> : SQLiteTextBuilderInitial<'', Writable, L>; export function text< TName extends string, U extends string, T extends Readonly<[U, ...U[]]>, L extends number | undefined, TMode extends 'text' | 'json' = 'text' | 'json', >( name: TName, config?: SQLiteTextConfig, L>, ): Equal extends true ? SQLiteTextJsonBuilderInitial : SQLiteTextBuilderInitial, L>; export function text(a?: string | SQLiteTextConfig, b: SQLiteTextConfig = {}): any { const { name, config } = getColumnNameAndConfig(a, b); if (config.mode === 'json') { return new SQLiteTextJsonBuilder(name); } return new SQLiteTextBuilder(name, config as any); } ================================================ FILE: drizzle-orm/src/sqlite-core/db.ts ================================================ import type { Cache } from '~/cache/core/cache.ts'; import { entityKind } from '~/entity.ts'; import type { TypedQueryBuilder } from '~/query-builders/query-builder.ts'; import type { ExtractTablesWithRelations, RelationalSchemaConfig, TablesRelationalConfig } from '~/relations.ts'; import { SelectionProxyHandler } from '~/selection-proxy.ts'; import { type ColumnsSelection, type SQL, sql, type SQLWrapper } from '~/sql/sql.ts'; import type { SQLiteAsyncDialect, SQLiteSyncDialect } from '~/sqlite-core/dialect.ts'; import { QueryBuilder, SQLiteDeleteBase, SQLiteInsertBuilder, SQLiteSelectBuilder, SQLiteUpdateBuilder, } from '~/sqlite-core/query-builders/index.ts'; import type { DBResult, Result, SQLiteSession, SQLiteTransaction, SQLiteTransactionConfig, } from '~/sqlite-core/session.ts'; import type { SQLiteTable } from '~/sqlite-core/table.ts'; import { WithSubquery } from '~/subquery.ts'; import type { DrizzleTypeError } from '~/utils.ts'; import { SQLiteCountBuilder } from './query-builders/count.ts'; import { RelationalQueryBuilder } from './query-builders/query.ts'; import { SQLiteRaw } from './query-builders/raw.ts'; import type { SelectedFields } from './query-builders/select.types.ts'; import type { WithBuilder } from './subquery.ts'; import type { SQLiteViewBase } from './view-base.ts'; export class BaseSQLiteDatabase< TResultKind extends 'sync' | 'async', TRunResult, TFullSchema extends Record = Record, TSchema extends TablesRelationalConfig = ExtractTablesWithRelations, > { static readonly [entityKind]: string = 'BaseSQLiteDatabase'; declare readonly _: { readonly schema: TSchema | undefined; readonly fullSchema: TFullSchema; readonly tableNamesMap: Record; }; query: TFullSchema extends Record ? DrizzleTypeError<'Seems like the schema generic is missing - did you forget to add it to your DB type?'> : { [K in keyof TSchema]: RelationalQueryBuilder; }; constructor( private resultKind: TResultKind, /** @internal */ readonly dialect: { sync: SQLiteSyncDialect; async: SQLiteAsyncDialect }[TResultKind], /** @internal */ readonly session: SQLiteSession, schema: RelationalSchemaConfig | undefined, ) { this._ = schema ? { schema: schema.schema, fullSchema: schema.fullSchema as TFullSchema, tableNamesMap: schema.tableNamesMap, } : { schema: undefined, fullSchema: {} as TFullSchema, tableNamesMap: {}, }; this.query = {} as typeof this['query']; const query = this.query as { [K in keyof TSchema]: RelationalQueryBuilder; }; if (this._.schema) { for (const [tableName, columns] of Object.entries(this._.schema)) { query[tableName as keyof TSchema] = new RelationalQueryBuilder( resultKind, schema!.fullSchema, this._.schema, this._.tableNamesMap, schema!.fullSchema[tableName] as SQLiteTable, columns, dialect, session as SQLiteSession as any, ) as typeof query[keyof TSchema]; } } this.$cache = { invalidate: async (_params: any) => {} }; } /** * Creates a subquery that defines a temporary named result set as a CTE. * * It is useful for breaking down complex queries into simpler parts and for reusing the result set in subsequent parts of the query. * * See docs: {@link https://orm.drizzle.team/docs/select#with-clause} * * @param alias The alias for the subquery. * * Failure to provide an alias will result in a DrizzleTypeError, preventing the subquery from being referenced in other queries. * * @example * * ```ts * // Create a subquery with alias 'sq' and use it in the select query * const sq = db.$with('sq').as(db.select().from(users).where(eq(users.id, 42))); * * const result = await db.with(sq).select().from(sq); * ``` * * To select arbitrary SQL values as fields in a CTE and reference them in other CTEs or in the main query, you need to add aliases to them: * * ```ts * // Select an arbitrary SQL value as a field in a CTE and reference it in the main query * const sq = db.$with('sq').as(db.select({ * name: sql`upper(${users.name})`.as('name'), * }) * .from(users)); * * const result = await db.with(sq).select({ name: sq.name }).from(sq); * ``` */ $with: WithBuilder = (alias: string, selection?: ColumnsSelection) => { const self = this; const as = ( qb: | TypedQueryBuilder | SQL | ((qb: QueryBuilder) => TypedQueryBuilder | SQL), ) => { if (typeof qb === 'function') { qb = qb(new QueryBuilder(self.dialect)); } return new Proxy( new WithSubquery( qb.getSQL(), selection ?? ('getSelectedFields' in qb ? qb.getSelectedFields() ?? {} : {}) as SelectedFields, alias, true, ), new SelectionProxyHandler({ alias, sqlAliasedBehavior: 'alias', sqlBehavior: 'error' }), ); }; return { as }; }; $count( source: SQLiteTable | SQLiteViewBase | SQL | SQLWrapper, filters?: SQL, ) { return new SQLiteCountBuilder({ source, filters, session: this.session }); } /** * Incorporates a previously defined CTE (using `$with`) into the main query. * * This method allows the main query to reference a temporary named result set. * * See docs: {@link https://orm.drizzle.team/docs/select#with-clause} * * @param queries The CTEs to incorporate into the main query. * * @example * * ```ts * // Define a subquery 'sq' as a CTE using $with * const sq = db.$with('sq').as(db.select().from(users).where(eq(users.id, 42))); * * // Incorporate the CTE 'sq' into the main query and select from it * const result = await db.with(sq).select().from(sq); * ``` */ with(...queries: WithSubquery[]) { const self = this; /** * Creates a select query. * * Calling this method with no arguments will select all columns from the table. Pass a selection object to specify the columns you want to select. * * Use `.from()` method to specify which table to select from. * * See docs: {@link https://orm.drizzle.team/docs/select} * * @param fields The selection object. * * @example * * ```ts * // Select all columns and all rows from the 'cars' table * const allCars: Car[] = await db.select().from(cars); * * // Select specific columns and all rows from the 'cars' table * const carsIdsAndBrands: { id: number; brand: string }[] = await db.select({ * id: cars.id, * brand: cars.brand * }) * .from(cars); * ``` * * Like in SQL, you can use arbitrary expressions as selection fields, not just table columns: * * ```ts * // Select specific columns along with expression and all rows from the 'cars' table * const carsIdsAndLowerNames: { id: number; lowerBrand: string }[] = await db.select({ * id: cars.id, * lowerBrand: sql`lower(${cars.brand})`, * }) * .from(cars); * ``` */ function select(): SQLiteSelectBuilder; function select( fields: TSelection, ): SQLiteSelectBuilder; function select( fields?: SelectedFields, ): SQLiteSelectBuilder { return new SQLiteSelectBuilder({ fields: fields ?? undefined, session: self.session, dialect: self.dialect, withList: queries, }); } /** * Adds `distinct` expression to the select query. * * Calling this method will return only unique values. When multiple columns are selected, it returns rows with unique combinations of values in these columns. * * Use `.from()` method to specify which table to select from. * * See docs: {@link https://orm.drizzle.team/docs/select#distinct} * * @param fields The selection object. * * @example * * ```ts * // Select all unique rows from the 'cars' table * await db.selectDistinct() * .from(cars) * .orderBy(cars.id, cars.brand, cars.color); * * // Select all unique brands from the 'cars' table * await db.selectDistinct({ brand: cars.brand }) * .from(cars) * .orderBy(cars.brand); * ``` */ function selectDistinct(): SQLiteSelectBuilder; function selectDistinct( fields: TSelection, ): SQLiteSelectBuilder; function selectDistinct( fields?: SelectedFields, ): SQLiteSelectBuilder { return new SQLiteSelectBuilder({ fields: fields ?? undefined, session: self.session, dialect: self.dialect, withList: queries, distinct: true, }); } /** * Creates an update query. * * Calling this method without `.where()` clause will update all rows in a table. The `.where()` clause specifies which rows should be updated. * * Use `.set()` method to specify which values to update. * * See docs: {@link https://orm.drizzle.team/docs/update} * * @param table The table to update. * * @example * * ```ts * // Update all rows in the 'cars' table * await db.update(cars).set({ color: 'red' }); * * // Update rows with filters and conditions * await db.update(cars).set({ color: 'red' }).where(eq(cars.brand, 'BMW')); * * // Update with returning clause * const updatedCar: Car[] = await db.update(cars) * .set({ color: 'red' }) * .where(eq(cars.id, 1)) * .returning(); * ``` */ function update(table: TTable): SQLiteUpdateBuilder { return new SQLiteUpdateBuilder(table, self.session, self.dialect, queries); } /** * Creates an insert query. * * Calling this method will create new rows in a table. Use `.values()` method to specify which values to insert. * * See docs: {@link https://orm.drizzle.team/docs/insert} * * @param table The table to insert into. * * @example * * ```ts * // Insert one row * await db.insert(cars).values({ brand: 'BMW' }); * * // Insert multiple rows * await db.insert(cars).values([{ brand: 'BMW' }, { brand: 'Porsche' }]); * * // Insert with returning clause * const insertedCar: Car[] = await db.insert(cars) * .values({ brand: 'BMW' }) * .returning(); * ``` */ function insert(into: TTable): SQLiteInsertBuilder { return new SQLiteInsertBuilder(into, self.session, self.dialect, queries); } /** * Creates a delete query. * * Calling this method without `.where()` clause will delete all rows in a table. The `.where()` clause specifies which rows should be deleted. * * See docs: {@link https://orm.drizzle.team/docs/delete} * * @param table The table to delete from. * * @example * * ```ts * // Delete all rows in the 'cars' table * await db.delete(cars); * * // Delete rows with filters and conditions * await db.delete(cars).where(eq(cars.color, 'green')); * * // Delete with returning clause * const deletedCar: Car[] = await db.delete(cars) * .where(eq(cars.id, 1)) * .returning(); * ``` */ function delete_(from: TTable): SQLiteDeleteBase { return new SQLiteDeleteBase(from, self.session, self.dialect, queries); } return { select, selectDistinct, update, insert, delete: delete_ }; } /** * Creates a select query. * * Calling this method with no arguments will select all columns from the table. Pass a selection object to specify the columns you want to select. * * Use `.from()` method to specify which table to select from. * * See docs: {@link https://orm.drizzle.team/docs/select} * * @param fields The selection object. * * @example * * ```ts * // Select all columns and all rows from the 'cars' table * const allCars: Car[] = await db.select().from(cars); * * // Select specific columns and all rows from the 'cars' table * const carsIdsAndBrands: { id: number; brand: string }[] = await db.select({ * id: cars.id, * brand: cars.brand * }) * .from(cars); * ``` * * Like in SQL, you can use arbitrary expressions as selection fields, not just table columns: * * ```ts * // Select specific columns along with expression and all rows from the 'cars' table * const carsIdsAndLowerNames: { id: number; lowerBrand: string }[] = await db.select({ * id: cars.id, * lowerBrand: sql`lower(${cars.brand})`, * }) * .from(cars); * ``` */ select(): SQLiteSelectBuilder; select( fields: TSelection, ): SQLiteSelectBuilder; select(fields?: SelectedFields): SQLiteSelectBuilder { return new SQLiteSelectBuilder({ fields: fields ?? undefined, session: this.session, dialect: this.dialect }); } /** * Adds `distinct` expression to the select query. * * Calling this method will return only unique values. When multiple columns are selected, it returns rows with unique combinations of values in these columns. * * Use `.from()` method to specify which table to select from. * * See docs: {@link https://orm.drizzle.team/docs/select#distinct} * * @param fields The selection object. * * @example * * ```ts * // Select all unique rows from the 'cars' table * await db.selectDistinct() * .from(cars) * .orderBy(cars.id, cars.brand, cars.color); * * // Select all unique brands from the 'cars' table * await db.selectDistinct({ brand: cars.brand }) * .from(cars) * .orderBy(cars.brand); * ``` */ selectDistinct(): SQLiteSelectBuilder; selectDistinct( fields: TSelection, ): SQLiteSelectBuilder; selectDistinct( fields?: SelectedFields, ): SQLiteSelectBuilder { return new SQLiteSelectBuilder({ fields: fields ?? undefined, session: this.session, dialect: this.dialect, distinct: true, }); } /** * Creates an update query. * * Calling this method without `.where()` clause will update all rows in a table. The `.where()` clause specifies which rows should be updated. * * Use `.set()` method to specify which values to update. * * See docs: {@link https://orm.drizzle.team/docs/update} * * @param table The table to update. * * @example * * ```ts * // Update all rows in the 'cars' table * await db.update(cars).set({ color: 'red' }); * * // Update rows with filters and conditions * await db.update(cars).set({ color: 'red' }).where(eq(cars.brand, 'BMW')); * * // Update with returning clause * const updatedCar: Car[] = await db.update(cars) * .set({ color: 'red' }) * .where(eq(cars.id, 1)) * .returning(); * ``` */ update(table: TTable): SQLiteUpdateBuilder { return new SQLiteUpdateBuilder(table, this.session, this.dialect); } $cache: { invalidate: Cache['onMutate'] }; /** * Creates an insert query. * * Calling this method will create new rows in a table. Use `.values()` method to specify which values to insert. * * See docs: {@link https://orm.drizzle.team/docs/insert} * * @param table The table to insert into. * * @example * * ```ts * // Insert one row * await db.insert(cars).values({ brand: 'BMW' }); * * // Insert multiple rows * await db.insert(cars).values([{ brand: 'BMW' }, { brand: 'Porsche' }]); * * // Insert with returning clause * const insertedCar: Car[] = await db.insert(cars) * .values({ brand: 'BMW' }) * .returning(); * ``` */ insert(into: TTable): SQLiteInsertBuilder { return new SQLiteInsertBuilder(into, this.session, this.dialect); } /** * Creates a delete query. * * Calling this method without `.where()` clause will delete all rows in a table. The `.where()` clause specifies which rows should be deleted. * * See docs: {@link https://orm.drizzle.team/docs/delete} * * @param table The table to delete from. * * @example * * ```ts * // Delete all rows in the 'cars' table * await db.delete(cars); * * // Delete rows with filters and conditions * await db.delete(cars).where(eq(cars.color, 'green')); * * // Delete with returning clause * const deletedCar: Car[] = await db.delete(cars) * .where(eq(cars.id, 1)) * .returning(); * ``` */ delete(from: TTable): SQLiteDeleteBase { return new SQLiteDeleteBase(from, this.session, this.dialect); } run(query: SQLWrapper | string): DBResult { const sequel = typeof query === 'string' ? sql.raw(query) : query.getSQL(); if (this.resultKind === 'async') { return new SQLiteRaw( async () => this.session.run(sequel), () => sequel, 'run', this.dialect as SQLiteAsyncDialect, this.session.extractRawRunValueFromBatchResult.bind(this.session), ) as DBResult; } return this.session.run(sequel) as DBResult; } all(query: SQLWrapper | string): DBResult { const sequel = typeof query === 'string' ? sql.raw(query) : query.getSQL(); if (this.resultKind === 'async') { return new SQLiteRaw( async () => this.session.all(sequel), () => sequel, 'all', this.dialect as SQLiteAsyncDialect, this.session.extractRawAllValueFromBatchResult.bind(this.session), ) as any; } return this.session.all(sequel) as DBResult; } get(query: SQLWrapper | string): DBResult { const sequel = typeof query === 'string' ? sql.raw(query) : query.getSQL(); if (this.resultKind === 'async') { return new SQLiteRaw( async () => this.session.get(sequel), () => sequel, 'get', this.dialect as SQLiteAsyncDialect, this.session.extractRawGetValueFromBatchResult.bind(this.session), ) as DBResult; } return this.session.get(sequel) as DBResult; } values(query: SQLWrapper | string): DBResult { const sequel = typeof query === 'string' ? sql.raw(query) : query.getSQL(); if (this.resultKind === 'async') { return new SQLiteRaw( async () => this.session.values(sequel), () => sequel, 'values', this.dialect as SQLiteAsyncDialect, this.session.extractRawValuesValueFromBatchResult.bind(this.session), ) as any; } return this.session.values(sequel) as DBResult; } transaction( transaction: (tx: SQLiteTransaction) => Result, config?: SQLiteTransactionConfig, ): Result { return this.session.transaction(transaction, config); } } export type SQLiteWithReplicas = Q & { $primary: Q; $replicas: Q[] }; export const withReplicas = < TResultKind extends 'sync' | 'async', TRunResult, TFullSchema extends Record, TSchema extends TablesRelationalConfig, Q extends BaseSQLiteDatabase< TResultKind, TRunResult, TFullSchema, TSchema extends Record ? ExtractTablesWithRelations : TSchema >, >( primary: Q, replicas: [Q, ...Q[]], getReplica: (replicas: Q[]) => Q = () => replicas[Math.floor(Math.random() * replicas.length)]!, ): SQLiteWithReplicas => { const select: Q['select'] = (...args: []) => getReplica(replicas).select(...args); const selectDistinct: Q['selectDistinct'] = (...args: []) => getReplica(replicas).selectDistinct(...args); const $count: Q['$count'] = (...args: [any]) => getReplica(replicas).$count(...args); const $with: Q['with'] = (...args: []) => getReplica(replicas).with(...args); const update: Q['update'] = (...args: [any]) => primary.update(...args); const insert: Q['insert'] = (...args: [any]) => primary.insert(...args); const $delete: Q['delete'] = (...args: [any]) => primary.delete(...args); const run: Q['run'] = (...args: [any]) => primary.run(...args); const all: Q['all'] = (...args: [any]) => primary.all(...args); const get: Q['get'] = (...args: [any]) => primary.get(...args); const values: Q['values'] = (...args: [any]) => primary.values(...args); const transaction: Q['transaction'] = (...args: [any]) => primary.transaction(...args); return { ...primary, update, insert, delete: $delete, run, all, get, values, transaction, $primary: primary, $replicas: replicas, select, selectDistinct, $count, with: $with, get query() { return getReplica(replicas).query; }, }; }; ================================================ FILE: drizzle-orm/src/sqlite-core/dialect.ts ================================================ import { aliasedTable, aliasedTableColumn, mapColumnsInAliasedSQLToAlias, mapColumnsInSQLToAlias } from '~/alias.ts'; import { CasingCache } from '~/casing.ts'; import type { AnyColumn } from '~/column.ts'; import { Column } from '~/column.ts'; import { entityKind, is } from '~/entity.ts'; import { DrizzleError } from '~/errors.ts'; import type { MigrationConfig, MigrationMeta } from '~/migrator.ts'; import { type BuildRelationalQueryResult, type DBQueryConfig, getOperators, getOrderByOperators, Many, normalizeRelation, One, type Relation, type TableRelationalConfig, type TablesRelationalConfig, } from '~/relations.ts'; import type { Name, Placeholder } from '~/sql/index.ts'; import { and, eq } from '~/sql/index.ts'; import { Param, type QueryWithTypings, SQL, sql, type SQLChunk } from '~/sql/sql.ts'; import { SQLiteColumn } from '~/sqlite-core/columns/index.ts'; import type { AnySQLiteSelectQueryBuilder, SQLiteDeleteConfig, SQLiteInsertConfig, SQLiteUpdateConfig, } from '~/sqlite-core/query-builders/index.ts'; import { SQLiteTable } from '~/sqlite-core/table.ts'; import { Subquery } from '~/subquery.ts'; import { getTableName, getTableUniqueName, Table } from '~/table.ts'; import { type Casing, orderSelectedFields, type UpdateSet } from '~/utils.ts'; import { ViewBaseConfig } from '~/view-common.ts'; import type { SelectedFieldsOrdered, SQLiteSelectConfig, SQLiteSelectJoinConfig, } from './query-builders/select.types.ts'; import type { SQLiteSession } from './session.ts'; import { SQLiteViewBase } from './view-base.ts'; export interface SQLiteDialectConfig { casing?: Casing; } export abstract class SQLiteDialect { static readonly [entityKind]: string = 'SQLiteDialect'; /** @internal */ readonly casing: CasingCache; constructor(config?: SQLiteDialectConfig) { this.casing = new CasingCache(config?.casing); } escapeName(name: string): string { return `"${name}"`; } escapeParam(_num: number): string { return '?'; } escapeString(str: string): string { return `'${str.replace(/'/g, "''")}'`; } private buildWithCTE(queries: Subquery[] | undefined): SQL | undefined { if (!queries?.length) return undefined; const withSqlChunks = [sql`with `]; for (const [i, w] of queries.entries()) { withSqlChunks.push(sql`${sql.identifier(w._.alias)} as (${w._.sql})`); if (i < queries.length - 1) { withSqlChunks.push(sql`, `); } } withSqlChunks.push(sql` `); return sql.join(withSqlChunks); } buildDeleteQuery({ table, where, returning, withList, limit, orderBy }: SQLiteDeleteConfig): SQL { const withSql = this.buildWithCTE(withList); const returningSql = returning ? sql` returning ${this.buildSelection(returning, { isSingleTable: true })}` : undefined; const whereSql = where ? sql` where ${where}` : undefined; const orderBySql = this.buildOrderBy(orderBy); const limitSql = this.buildLimit(limit); return sql`${withSql}delete from ${table}${whereSql}${returningSql}${orderBySql}${limitSql}`; } buildUpdateSet(table: SQLiteTable, set: UpdateSet): SQL { const tableColumns = table[Table.Symbol.Columns]; const columnNames = Object.keys(tableColumns).filter((colName) => set[colName] !== undefined || tableColumns[colName]?.onUpdateFn !== undefined ); const setSize = columnNames.length; return sql.join(columnNames.flatMap((colName, i) => { const col = tableColumns[colName]!; const onUpdateFnResult = col.onUpdateFn?.(); const value = set[colName] ?? (is(onUpdateFnResult, SQL) ? onUpdateFnResult : sql.param(onUpdateFnResult, col)); const res = sql`${sql.identifier(this.casing.getColumnCasing(col))} = ${value}`; if (i < setSize - 1) { return [res, sql.raw(', ')]; } return [res]; })); } buildUpdateQuery({ table, set, where, returning, withList, joins, from, limit, orderBy }: SQLiteUpdateConfig): SQL { const withSql = this.buildWithCTE(withList); const setSql = this.buildUpdateSet(table, set); const fromSql = from && sql.join([sql.raw(' from '), this.buildFromTable(from)]); const joinsSql = this.buildJoins(joins); const returningSql = returning ? sql` returning ${this.buildSelection(returning, { isSingleTable: true })}` : undefined; const whereSql = where ? sql` where ${where}` : undefined; const orderBySql = this.buildOrderBy(orderBy); const limitSql = this.buildLimit(limit); return sql`${withSql}update ${table} set ${setSql}${fromSql}${joinsSql}${whereSql}${returningSql}${orderBySql}${limitSql}`; } /** * Builds selection SQL with provided fields/expressions * * Examples: * * `select from` * * `insert ... returning ` * * If `isSingleTable` is true, then columns won't be prefixed with table name */ private buildSelection( fields: SelectedFieldsOrdered, { isSingleTable = false }: { isSingleTable?: boolean } = {}, ): SQL { const columnsLen = fields.length; const chunks = fields .flatMap(({ field }, i) => { const chunk: SQLChunk[] = []; if (is(field, SQL.Aliased) && field.isSelectionField) { chunk.push(sql.identifier(field.fieldAlias)); } else if (is(field, SQL.Aliased) || is(field, SQL)) { const query = is(field, SQL.Aliased) ? field.sql : field; if (isSingleTable) { chunk.push( new SQL( query.queryChunks.map((c) => { if (is(c, Column)) { return sql.identifier(this.casing.getColumnCasing(c)); } return c; }), ), ); } else { chunk.push(query); } if (is(field, SQL.Aliased)) { chunk.push(sql` as ${sql.identifier(field.fieldAlias)}`); } } else if (is(field, Column)) { const tableName = field.table[Table.Symbol.Name]; if (field.columnType === 'SQLiteNumericBigInt') { if (isSingleTable) { chunk.push(sql`cast(${sql.identifier(this.casing.getColumnCasing(field))} as text)`); } else { chunk.push( sql`cast(${sql.identifier(tableName)}.${sql.identifier(this.casing.getColumnCasing(field))} as text)`, ); } } else { if (isSingleTable) { chunk.push(sql.identifier(this.casing.getColumnCasing(field))); } else { chunk.push(sql`${sql.identifier(tableName)}.${sql.identifier(this.casing.getColumnCasing(field))}`); } } } else if (is(field, Subquery)) { const entries = Object.entries(field._.selectedFields) as [string, SQL.Aliased | Column | SQL][]; if (entries.length === 1) { const entry = entries[0]![1]; const fieldDecoder = is(entry, SQL) ? entry.decoder : is(entry, Column) ? { mapFromDriverValue: (v: any) => entry.mapFromDriverValue(v) } : entry.sql.decoder; if (fieldDecoder) field._.sql.decoder = fieldDecoder; } chunk.push(field); } if (i < columnsLen - 1) { chunk.push(sql`, `); } return chunk; }); return sql.join(chunks); } private buildJoins(joins: SQLiteSelectJoinConfig[] | undefined): SQL | undefined { if (!joins || joins.length === 0) { return undefined; } const joinsArray: SQL[] = []; if (joins) { for (const [index, joinMeta] of joins.entries()) { if (index === 0) { joinsArray.push(sql` `); } const table = joinMeta.table; const onSql = joinMeta.on ? sql` on ${joinMeta.on}` : undefined; if (is(table, SQLiteTable)) { const tableName = table[SQLiteTable.Symbol.Name]; const tableSchema = table[SQLiteTable.Symbol.Schema]; const origTableName = table[SQLiteTable.Symbol.OriginalName]; const alias = tableName === origTableName ? undefined : joinMeta.alias; joinsArray.push( sql`${sql.raw(joinMeta.joinType)} join ${tableSchema ? sql`${sql.identifier(tableSchema)}.` : undefined}${ sql.identifier(origTableName) }${alias && sql` ${sql.identifier(alias)}`}${onSql}`, ); } else { joinsArray.push( sql`${sql.raw(joinMeta.joinType)} join ${table}${onSql}`, ); } if (index < joins.length - 1) { joinsArray.push(sql` `); } } } return sql.join(joinsArray); } private buildLimit(limit: number | Placeholder | undefined): SQL | undefined { return typeof limit === 'object' || (typeof limit === 'number' && limit >= 0) ? sql` limit ${limit}` : undefined; } private buildOrderBy(orderBy: (SQLiteColumn | SQL | SQL.Aliased)[] | undefined): SQL | undefined { const orderByList: (SQLiteColumn | SQL | SQL.Aliased)[] = []; if (orderBy) { for (const [index, orderByValue] of orderBy.entries()) { orderByList.push(orderByValue); if (index < orderBy.length - 1) { orderByList.push(sql`, `); } } } return orderByList.length > 0 ? sql` order by ${sql.join(orderByList)}` : undefined; } private buildFromTable( table: SQL | Subquery | SQLiteViewBase | SQLiteTable | undefined, ): SQL | Subquery | SQLiteViewBase | SQLiteTable | undefined { if (is(table, Table) && table[Table.Symbol.IsAlias]) { return sql`${sql`${sql.identifier(table[Table.Symbol.Schema] ?? '')}.`.if(table[Table.Symbol.Schema])}${ sql.identifier(table[Table.Symbol.OriginalName]) } ${sql.identifier(table[Table.Symbol.Name])}`; } return table; } buildSelectQuery( { withList, fields, fieldsFlat, where, having, table, joins, orderBy, groupBy, limit, offset, distinct, setOperators, }: SQLiteSelectConfig, ): SQL { const fieldsList = fieldsFlat ?? orderSelectedFields(fields); for (const f of fieldsList) { if ( is(f.field, Column) && getTableName(f.field.table) !== (is(table, Subquery) ? table._.alias : is(table, SQLiteViewBase) ? table[ViewBaseConfig].name : is(table, SQL) ? undefined : getTableName(table)) && !((table) => joins?.some(({ alias }) => alias === (table[Table.Symbol.IsAlias] ? getTableName(table) : table[Table.Symbol.BaseName]) ))(f.field.table) ) { const tableName = getTableName(f.field.table); throw new Error( `Your "${ f.path.join('->') }" field references a column "${tableName}"."${f.field.name}", but the table "${tableName}" is not part of the query! Did you forget to join it?`, ); } } const isSingleTable = !joins || joins.length === 0; const withSql = this.buildWithCTE(withList); const distinctSql = distinct ? sql` distinct` : undefined; const selection = this.buildSelection(fieldsList, { isSingleTable }); const tableSql = this.buildFromTable(table); const joinsSql = this.buildJoins(joins); const whereSql = where ? sql` where ${where}` : undefined; const havingSql = having ? sql` having ${having}` : undefined; const groupByList: (SQL | AnyColumn | SQL.Aliased)[] = []; if (groupBy) { for (const [index, groupByValue] of groupBy.entries()) { groupByList.push(groupByValue); if (index < groupBy.length - 1) { groupByList.push(sql`, `); } } } const groupBySql = groupByList.length > 0 ? sql` group by ${sql.join(groupByList)}` : undefined; const orderBySql = this.buildOrderBy(orderBy); const limitSql = this.buildLimit(limit); const offsetSql = offset ? sql` offset ${offset}` : undefined; const finalQuery = sql`${withSql}select${distinctSql} ${selection} from ${tableSql}${joinsSql}${whereSql}${groupBySql}${havingSql}${orderBySql}${limitSql}${offsetSql}`; if (setOperators.length > 0) { return this.buildSetOperations(finalQuery, setOperators); } return finalQuery; } buildSetOperations(leftSelect: SQL, setOperators: SQLiteSelectConfig['setOperators']): SQL { const [setOperator, ...rest] = setOperators; if (!setOperator) { throw new Error('Cannot pass undefined values to any set operator'); } if (rest.length === 0) { return this.buildSetOperationQuery({ leftSelect, setOperator }); } // Some recursive magic here return this.buildSetOperations( this.buildSetOperationQuery({ leftSelect, setOperator }), rest, ); } buildSetOperationQuery({ leftSelect, setOperator: { type, isAll, rightSelect, limit, orderBy, offset }, }: { leftSelect: SQL; setOperator: SQLiteSelectConfig['setOperators'][number] }): SQL { // SQLite doesn't support parenthesis in set operations const leftChunk = sql`${leftSelect.getSQL()} `; const rightChunk = sql`${rightSelect.getSQL()}`; let orderBySql; if (orderBy && orderBy.length > 0) { const orderByValues: (SQL | Name)[] = []; // The next bit is necessary because the sql operator replaces ${table.column} with `table`.`column` // which is invalid Sql syntax, Table from one of the SELECTs cannot be used in global ORDER clause for (const singleOrderBy of orderBy) { if (is(singleOrderBy, SQLiteColumn)) { orderByValues.push(sql.identifier(singleOrderBy.name)); } else if (is(singleOrderBy, SQL)) { for (let i = 0; i < singleOrderBy.queryChunks.length; i++) { const chunk = singleOrderBy.queryChunks[i]; if (is(chunk, SQLiteColumn)) { singleOrderBy.queryChunks[i] = sql.identifier(this.casing.getColumnCasing(chunk)); } } orderByValues.push(sql`${singleOrderBy}`); } else { orderByValues.push(sql`${singleOrderBy}`); } } orderBySql = sql` order by ${sql.join(orderByValues, sql`, `)}`; } const limitSql = typeof limit === 'object' || (typeof limit === 'number' && limit >= 0) ? sql` limit ${limit}` : undefined; const operatorChunk = sql.raw(`${type} ${isAll ? 'all ' : ''}`); const offsetSql = offset ? sql` offset ${offset}` : undefined; return sql`${leftChunk}${operatorChunk}${rightChunk}${orderBySql}${limitSql}${offsetSql}`; } buildInsertQuery( { table, values: valuesOrSelect, onConflict, returning, withList, select }: SQLiteInsertConfig, ): SQL { // const isSingleValue = values.length === 1; const valuesSqlList: ((SQLChunk | SQL)[] | SQL)[] = []; const columns: Record = table[Table.Symbol.Columns]; const colEntries: [string, SQLiteColumn][] = Object.entries(columns).filter(([_, col]) => !col.shouldDisableInsert() ); const insertOrder = colEntries.map(([, column]) => sql.identifier(this.casing.getColumnCasing(column))); if (select) { const select = valuesOrSelect as AnySQLiteSelectQueryBuilder | SQL; if (is(select, SQL)) { valuesSqlList.push(select); } else { valuesSqlList.push(select.getSQL()); } } else { const values = valuesOrSelect as Record[]; valuesSqlList.push(sql.raw('values ')); for (const [valueIndex, value] of values.entries()) { const valueList: (SQLChunk | SQL)[] = []; for (const [fieldName, col] of colEntries) { const colValue = value[fieldName]; if (colValue === undefined || (is(colValue, Param) && colValue.value === undefined)) { let defaultValue; if (col.default !== null && col.default !== undefined) { defaultValue = is(col.default, SQL) ? col.default : sql.param(col.default, col); // eslint-disable-next-line unicorn/no-negated-condition } else if (col.defaultFn !== undefined) { const defaultFnResult = col.defaultFn(); defaultValue = is(defaultFnResult, SQL) ? defaultFnResult : sql.param(defaultFnResult, col); // eslint-disable-next-line unicorn/no-negated-condition } else if (!col.default && col.onUpdateFn !== undefined) { const onUpdateFnResult = col.onUpdateFn(); defaultValue = is(onUpdateFnResult, SQL) ? onUpdateFnResult : sql.param(onUpdateFnResult, col); } else { defaultValue = sql`null`; } valueList.push(defaultValue); } else { valueList.push(colValue); } } valuesSqlList.push(valueList); if (valueIndex < values.length - 1) { valuesSqlList.push(sql`, `); } } } const withSql = this.buildWithCTE(withList); const valuesSql = sql.join(valuesSqlList); const returningSql = returning ? sql` returning ${this.buildSelection(returning, { isSingleTable: true })}` : undefined; const onConflictSql = onConflict?.length ? sql.join(onConflict) : undefined; // if (isSingleValue && valuesSqlList.length === 0){ // return sql`insert into ${table} default values ${onConflictSql}${returningSql}`; // } return sql`${withSql}insert into ${table} ${insertOrder} ${valuesSql}${onConflictSql}${returningSql}`; } sqlToQuery(sql: SQL, invokeSource?: 'indexes' | undefined): QueryWithTypings { return sql.toQuery({ casing: this.casing, escapeName: this.escapeName, escapeParam: this.escapeParam, escapeString: this.escapeString, invokeSource, }); } buildRelationalQuery({ fullSchema, schema, tableNamesMap, table, tableConfig, queryConfig: config, tableAlias, nestedQueryRelation, joinOn, }: { fullSchema: Record; schema: TablesRelationalConfig; tableNamesMap: Record; table: SQLiteTable; tableConfig: TableRelationalConfig; queryConfig: true | DBQueryConfig<'many', true>; tableAlias: string; nestedQueryRelation?: Relation; joinOn?: SQL; }): BuildRelationalQueryResult { let selection: BuildRelationalQueryResult['selection'] = []; let limit, offset, orderBy: SQLiteSelectConfig['orderBy'] = [], where; const joins: SQLiteSelectJoinConfig[] = []; if (config === true) { const selectionEntries = Object.entries(tableConfig.columns); selection = selectionEntries.map(( [key, value], ) => ({ dbKey: value.name, tsKey: key, field: aliasedTableColumn(value as SQLiteColumn, tableAlias), relationTableTsKey: undefined, isJson: false, selection: [], })); } else { const aliasedColumns = Object.fromEntries( Object.entries(tableConfig.columns).map(([key, value]) => [key, aliasedTableColumn(value, tableAlias)]), ); if (config.where) { const whereSql = typeof config.where === 'function' ? config.where(aliasedColumns, getOperators()) : config.where; where = whereSql && mapColumnsInSQLToAlias(whereSql, tableAlias); } const fieldsSelection: { tsKey: string; value: SQLiteColumn | SQL.Aliased }[] = []; let selectedColumns: string[] = []; // Figure out which columns to select if (config.columns) { let isIncludeMode = false; for (const [field, value] of Object.entries(config.columns)) { if (value === undefined) { continue; } if (field in tableConfig.columns) { if (!isIncludeMode && value === true) { isIncludeMode = true; } selectedColumns.push(field); } } if (selectedColumns.length > 0) { selectedColumns = isIncludeMode ? selectedColumns.filter((c) => config.columns?.[c] === true) : Object.keys(tableConfig.columns).filter((key) => !selectedColumns.includes(key)); } } else { // Select all columns if selection is not specified selectedColumns = Object.keys(tableConfig.columns); } for (const field of selectedColumns) { const column = tableConfig.columns[field]! as SQLiteColumn; fieldsSelection.push({ tsKey: field, value: column }); } let selectedRelations: { tsKey: string; queryConfig: true | DBQueryConfig<'many', false>; relation: Relation; }[] = []; // Figure out which relations to select if (config.with) { selectedRelations = Object.entries(config.with) .filter((entry): entry is [typeof entry[0], NonNullable] => !!entry[1]) .map(([tsKey, queryConfig]) => ({ tsKey, queryConfig, relation: tableConfig.relations[tsKey]! })); } let extras; // Figure out which extras to select if (config.extras) { extras = typeof config.extras === 'function' ? config.extras(aliasedColumns, { sql }) : config.extras; for (const [tsKey, value] of Object.entries(extras)) { fieldsSelection.push({ tsKey, value: mapColumnsInAliasedSQLToAlias(value, tableAlias), }); } } // Transform `fieldsSelection` into `selection` // `fieldsSelection` shouldn't be used after this point for (const { tsKey, value } of fieldsSelection) { selection.push({ dbKey: is(value, SQL.Aliased) ? value.fieldAlias : tableConfig.columns[tsKey]!.name, tsKey, field: is(value, Column) ? aliasedTableColumn(value, tableAlias) : value, relationTableTsKey: undefined, isJson: false, selection: [], }); } let orderByOrig = typeof config.orderBy === 'function' ? config.orderBy(aliasedColumns, getOrderByOperators()) : config.orderBy ?? []; if (!Array.isArray(orderByOrig)) { orderByOrig = [orderByOrig]; } orderBy = orderByOrig.map((orderByValue) => { if (is(orderByValue, Column)) { return aliasedTableColumn(orderByValue, tableAlias) as SQLiteColumn; } return mapColumnsInSQLToAlias(orderByValue, tableAlias); }); limit = config.limit; offset = config.offset; // Process all relations for ( const { tsKey: selectedRelationTsKey, queryConfig: selectedRelationConfigValue, relation, } of selectedRelations ) { const normalizedRelation = normalizeRelation(schema, tableNamesMap, relation); const relationTableName = getTableUniqueName(relation.referencedTable); const relationTableTsName = tableNamesMap[relationTableName]!; const relationTableAlias = `${tableAlias}_${selectedRelationTsKey}`; // const relationTable = schema[relationTableTsName]!; const joinOn = and( ...normalizedRelation.fields.map((field, i) => eq( aliasedTableColumn(normalizedRelation.references[i]!, relationTableAlias), aliasedTableColumn(field, tableAlias), ) ), ); const builtRelation = this.buildRelationalQuery({ fullSchema, schema, tableNamesMap, table: fullSchema[relationTableTsName] as SQLiteTable, tableConfig: schema[relationTableTsName]!, queryConfig: is(relation, One) ? (selectedRelationConfigValue === true ? { limit: 1 } : { ...selectedRelationConfigValue, limit: 1 }) : selectedRelationConfigValue, tableAlias: relationTableAlias, joinOn, nestedQueryRelation: relation, }); const field = (sql`(${builtRelation.sql})`).as(selectedRelationTsKey); selection.push({ dbKey: selectedRelationTsKey, tsKey: selectedRelationTsKey, field, relationTableTsKey: relationTableTsName, isJson: true, selection: builtRelation.selection, }); } } if (selection.length === 0) { throw new DrizzleError({ message: `No fields selected for table "${tableConfig.tsName}" ("${tableAlias}"). You need to have at least one item in "columns", "with" or "extras". If you need to select all columns, omit the "columns" key or set it to undefined.`, }); } let result; where = and(joinOn, where); if (nestedQueryRelation) { let field = sql`json_array(${ sql.join( selection.map(({ field }) => is(field, SQLiteColumn) ? sql.identifier(this.casing.getColumnCasing(field)) : is(field, SQL.Aliased) ? field.sql : field ), sql`, `, ) })`; if (is(nestedQueryRelation, Many)) { field = sql`coalesce(json_group_array(${field}), json_array())`; } const nestedSelection = [{ dbKey: 'data', tsKey: 'data', field: field.as('data'), isJson: true, relationTableTsKey: tableConfig.tsName, selection, }]; const needsSubquery = limit !== undefined || offset !== undefined || orderBy.length > 0; if (needsSubquery) { result = this.buildSelectQuery({ table: aliasedTable(table, tableAlias), fields: {}, fieldsFlat: [ { path: [], field: sql.raw('*'), }, ], where, limit, offset, orderBy, setOperators: [], }); where = undefined; limit = undefined; offset = undefined; orderBy = undefined; } else { result = aliasedTable(table, tableAlias); } result = this.buildSelectQuery({ table: is(result, SQLiteTable) ? result : new Subquery(result, {}, tableAlias), fields: {}, fieldsFlat: nestedSelection.map(({ field }) => ({ path: [], field: is(field, Column) ? aliasedTableColumn(field, tableAlias) : field, })), joins, where, limit, offset, orderBy, setOperators: [], }); } else { result = this.buildSelectQuery({ table: aliasedTable(table, tableAlias), fields: {}, fieldsFlat: selection.map(({ field }) => ({ path: [], field: is(field, Column) ? aliasedTableColumn(field, tableAlias) : field, })), joins, where, limit, offset, orderBy, setOperators: [], }); } return { tableTsKey: tableConfig.tsName, sql: result, selection, }; } } export class SQLiteSyncDialect extends SQLiteDialect { static override readonly [entityKind]: string = 'SQLiteSyncDialect'; migrate( migrations: MigrationMeta[], session: SQLiteSession<'sync', unknown, Record, TablesRelationalConfig>, config?: string | MigrationConfig, ): void { const migrationsTable = config === undefined ? '__drizzle_migrations' : typeof config === 'string' ? '__drizzle_migrations' : config.migrationsTable ?? '__drizzle_migrations'; const migrationTableCreate = sql` CREATE TABLE IF NOT EXISTS ${sql.identifier(migrationsTable)} ( id SERIAL PRIMARY KEY, hash text NOT NULL, created_at numeric ) `; session.run(migrationTableCreate); const dbMigrations = session.values<[number, string, string]>( sql`SELECT id, hash, created_at FROM ${sql.identifier(migrationsTable)} ORDER BY created_at DESC LIMIT 1`, ); const lastDbMigration = dbMigrations[0] ?? undefined; session.run(sql`BEGIN`); try { for (const migration of migrations) { if (!lastDbMigration || Number(lastDbMigration[2])! < migration.folderMillis) { for (const stmt of migration.sql) { session.run(sql.raw(stmt)); } session.run( sql`INSERT INTO ${ sql.identifier(migrationsTable) } ("hash", "created_at") VALUES(${migration.hash}, ${migration.folderMillis})`, ); } } session.run(sql`COMMIT`); } catch (e) { session.run(sql`ROLLBACK`); throw e; } } } export class SQLiteAsyncDialect extends SQLiteDialect { static override readonly [entityKind]: string = 'SQLiteAsyncDialect'; async migrate( migrations: MigrationMeta[], session: SQLiteSession<'async', any, any, any>, config?: string | MigrationConfig, ): Promise { const migrationsTable = config === undefined ? '__drizzle_migrations' : typeof config === 'string' ? '__drizzle_migrations' : config.migrationsTable ?? '__drizzle_migrations'; const migrationTableCreate = sql` CREATE TABLE IF NOT EXISTS ${sql.identifier(migrationsTable)} ( id SERIAL PRIMARY KEY, hash text NOT NULL, created_at numeric ) `; await session.run(migrationTableCreate); const dbMigrations = await session.values<[number, string, string]>( sql`SELECT id, hash, created_at FROM ${sql.identifier(migrationsTable)} ORDER BY created_at DESC LIMIT 1`, ); const lastDbMigration = dbMigrations[0] ?? undefined; await session.transaction(async (tx) => { for (const migration of migrations) { if (!lastDbMigration || Number(lastDbMigration[2])! < migration.folderMillis) { for (const stmt of migration.sql) { await tx.run(sql.raw(stmt)); } await tx.run( sql`INSERT INTO ${ sql.identifier(migrationsTable) } ("hash", "created_at") VALUES(${migration.hash}, ${migration.folderMillis})`, ); } } }); } } ================================================ FILE: drizzle-orm/src/sqlite-core/expressions.ts ================================================ import { bindIfParam } from '~/sql/expressions/index.ts'; import type { SQL, SQLChunk, SQLWrapper } from '~/sql/sql.ts'; import { sql } from '~/sql/sql.ts'; import type { SQLiteColumn } from '~/sqlite-core/columns/index.ts'; export * from '~/sql/expressions/index.ts'; export function concat(column: SQLiteColumn | SQL.Aliased, value: string | SQLWrapper): SQL { return sql`${column} || ${bindIfParam(value, column)}`; } export function substring( column: SQLiteColumn | SQL.Aliased, { from, for: _for }: { from?: number | SQLWrapper; for?: number | SQLWrapper }, ): SQL { const chunks: SQLChunk[] = [sql`substring(`, column]; if (from !== undefined) { chunks.push(sql` from `, bindIfParam(from, column)); } if (_for !== undefined) { chunks.push(sql` for `, bindIfParam(_for, column)); } chunks.push(sql`)`); return sql.join(chunks); } export function rowId(): SQL { return sql`rowid`; } ================================================ FILE: drizzle-orm/src/sqlite-core/foreign-keys.ts ================================================ import { entityKind } from '~/entity.ts'; import { TableName } from '~/table.utils.ts'; import type { AnySQLiteColumn, SQLiteColumn } from './columns/index.ts'; import type { SQLiteTable } from './table.ts'; export type UpdateDeleteAction = 'cascade' | 'restrict' | 'no action' | 'set null' | 'set default'; export type Reference = () => { readonly name?: string; readonly columns: SQLiteColumn[]; readonly foreignTable: SQLiteTable; readonly foreignColumns: SQLiteColumn[]; }; export class ForeignKeyBuilder { static readonly [entityKind]: string = 'SQLiteForeignKeyBuilder'; declare _: { brand: 'SQLiteForeignKeyBuilder'; foreignTableName: 'TForeignTableName'; }; /** @internal */ reference: Reference; /** @internal */ _onUpdate: UpdateDeleteAction | undefined; /** @internal */ _onDelete: UpdateDeleteAction | undefined; constructor( config: () => { name?: string; columns: SQLiteColumn[]; foreignColumns: SQLiteColumn[]; }, actions?: { onUpdate?: UpdateDeleteAction; onDelete?: UpdateDeleteAction; } | undefined, ) { this.reference = () => { const { name, columns, foreignColumns } = config(); return { name, columns, foreignTable: foreignColumns[0]!.table as SQLiteTable, foreignColumns }; }; if (actions) { this._onUpdate = actions.onUpdate; this._onDelete = actions.onDelete; } } onUpdate(action: UpdateDeleteAction): this { this._onUpdate = action; return this; } onDelete(action: UpdateDeleteAction): this { this._onDelete = action; return this; } /** @internal */ build(table: SQLiteTable): ForeignKey { return new ForeignKey(table, this); } } export class ForeignKey { static readonly [entityKind]: string = 'SQLiteForeignKey'; readonly reference: Reference; readonly onUpdate: UpdateDeleteAction | undefined; readonly onDelete: UpdateDeleteAction | undefined; constructor(readonly table: SQLiteTable, builder: ForeignKeyBuilder) { this.reference = builder.reference; this.onUpdate = builder._onUpdate; this.onDelete = builder._onDelete; } getName(): string { const { name, columns, foreignColumns } = this.reference(); const columnNames = columns.map((column) => column.name); const foreignColumnNames = foreignColumns.map((column) => column.name); const chunks = [ this.table[TableName], ...columnNames, foreignColumns[0]!.table[TableName], ...foreignColumnNames, ]; return name ?? `${chunks.join('_')}_fk`; } } type ColumnsWithTable< TTableName extends string, TColumns extends SQLiteColumn[], > = { [Key in keyof TColumns]: AnySQLiteColumn<{ tableName: TTableName }> }; /** * @deprecated please use `foreignKey({ columns: [], foreignColumns: [] })` syntax without callback * @param config * @returns */ export function foreignKey< TTableName extends string, TForeignTableName extends string, TColumns extends [AnySQLiteColumn<{ tableName: TTableName }>, ...AnySQLiteColumn<{ tableName: TTableName }>[]], >( config: () => { name?: string; columns: TColumns; foreignColumns: ColumnsWithTable; }, ): ForeignKeyBuilder; export function foreignKey< TTableName extends string, TForeignTableName extends string, TColumns extends [AnySQLiteColumn<{ tableName: TTableName }>, ...AnySQLiteColumn<{ tableName: TTableName }>[]], >( config: { name?: string; columns: TColumns; foreignColumns: ColumnsWithTable; }, ): ForeignKeyBuilder; export function foreignKey( config: any, ): ForeignKeyBuilder { function mappedConfig() { if (typeof config === 'function') { const { name, columns, foreignColumns } = config(); return { name, columns, foreignColumns, }; } return config; } return new ForeignKeyBuilder(mappedConfig); } ================================================ FILE: drizzle-orm/src/sqlite-core/index.ts ================================================ export * from './alias.ts'; export * from './checks.ts'; export * from './columns/index.ts'; export * from './db.ts'; export * from './dialect.ts'; export * from './foreign-keys.ts'; export * from './indexes.ts'; export * from './primary-keys.ts'; export * from './query-builders/index.ts'; export * from './session.ts'; export * from './subquery.ts'; export * from './table.ts'; export * from './unique-constraint.ts'; export * from './utils.ts'; export * from './view.ts'; ================================================ FILE: drizzle-orm/src/sqlite-core/indexes.ts ================================================ import { entityKind } from '~/entity.ts'; import type { SQL } from '~/sql/sql.ts'; import type { SQLiteColumn } from './columns/index.ts'; import type { SQLiteTable } from './table.ts'; export interface IndexConfig { name: string; columns: IndexColumn[]; unique: boolean; where: SQL | undefined; } export type IndexColumn = SQLiteColumn | SQL; export class IndexBuilderOn { static readonly [entityKind]: string = 'SQLiteIndexBuilderOn'; constructor(private name: string, private unique: boolean) {} on(...columns: [IndexColumn, ...IndexColumn[]]): IndexBuilder { return new IndexBuilder(this.name, columns, this.unique); } } export class IndexBuilder { static readonly [entityKind]: string = 'SQLiteIndexBuilder'; declare _: { brand: 'SQLiteIndexBuilder'; }; /** @internal */ config: IndexConfig; constructor(name: string, columns: IndexColumn[], unique: boolean) { this.config = { name, columns, unique, where: undefined, }; } /** * Condition for partial index. */ where(condition: SQL): this { this.config.where = condition; return this; } /** @internal */ build(table: SQLiteTable): Index { return new Index(this.config, table); } } export class Index { static readonly [entityKind]: string = 'SQLiteIndex'; declare _: { brand: 'SQLiteIndex'; }; readonly config: IndexConfig & { table: SQLiteTable }; constructor(config: IndexConfig, table: SQLiteTable) { this.config = { ...config, table }; } } export function index(name: string): IndexBuilderOn { return new IndexBuilderOn(name, false); } export function uniqueIndex(name: string): IndexBuilderOn { return new IndexBuilderOn(name, true); } ================================================ FILE: drizzle-orm/src/sqlite-core/primary-keys.ts ================================================ import { entityKind } from '~/entity.ts'; import type { AnySQLiteColumn, SQLiteColumn } from './columns/index.ts'; import { SQLiteTable } from './table.ts'; export function primaryKey< TTableName extends string, TColumn extends AnySQLiteColumn<{ tableName: TTableName }>, TColumns extends AnySQLiteColumn<{ tableName: TTableName }>[], >(config: { name?: string; columns: [TColumn, ...TColumns] }): PrimaryKeyBuilder; /** * @deprecated: Please use primaryKey({ columns: [] }) instead of this function * @param columns */ export function primaryKey< TTableName extends string, TColumns extends AnySQLiteColumn<{ tableName: TTableName }>[], >(...columns: TColumns): PrimaryKeyBuilder; export function primaryKey(...config: any) { if (config[0].columns) { return new PrimaryKeyBuilder(config[0].columns, config[0].name); } return new PrimaryKeyBuilder(config); } export class PrimaryKeyBuilder { static readonly [entityKind]: string = 'SQLitePrimaryKeyBuilder'; declare _: { brand: 'SQLitePrimaryKeyBuilder'; }; /** @internal */ columns: SQLiteColumn[]; /** @internal */ name?: string; constructor( columns: SQLiteColumn[], name?: string, ) { this.columns = columns; this.name = name; } /** @internal */ build(table: SQLiteTable): PrimaryKey { return new PrimaryKey(table, this.columns, this.name); } } export class PrimaryKey { static readonly [entityKind]: string = 'SQLitePrimaryKey'; readonly columns: SQLiteColumn[]; readonly name?: string; constructor(readonly table: SQLiteTable, columns: SQLiteColumn[], name?: string) { this.columns = columns; this.name = name; } getName(): string { return this.name ?? `${this.table[SQLiteTable.Symbol.Name]}_${this.columns.map((column) => column.name).join('_')}_pk`; } } ================================================ FILE: drizzle-orm/src/sqlite-core/query-builders/count.ts ================================================ import { entityKind } from '~/entity.ts'; import { SQL, sql, type SQLWrapper } from '~/sql/sql.ts'; import type { SQLiteSession } from '../session.ts'; import type { SQLiteTable } from '../table.ts'; import type { SQLiteView } from '../view.ts'; export class SQLiteCountBuilder< TSession extends SQLiteSession, > extends SQL implements Promise, SQLWrapper { private sql: SQL; static override readonly [entityKind] = 'SQLiteCountBuilderAsync'; [Symbol.toStringTag] = 'SQLiteCountBuilderAsync'; private session: TSession; private static buildEmbeddedCount( source: SQLiteTable | SQLiteView | SQL | SQLWrapper, filters?: SQL, ): SQL { return sql`(select count(*) from ${source}${sql.raw(' where ').if(filters)}${filters})`; } private static buildCount( source: SQLiteTable | SQLiteView | SQL | SQLWrapper, filters?: SQL, ): SQL { return sql`select count(*) from ${source}${sql.raw(' where ').if(filters)}${filters}`; } constructor( readonly params: { source: SQLiteTable | SQLiteView | SQL | SQLWrapper; filters?: SQL; session: TSession; }, ) { super(SQLiteCountBuilder.buildEmbeddedCount(params.source, params.filters).queryChunks); this.session = params.session; this.sql = SQLiteCountBuilder.buildCount( params.source, params.filters, ); } then( onfulfilled?: ((value: number) => TResult1 | PromiseLike) | null | undefined, onrejected?: ((reason: any) => TResult2 | PromiseLike) | null | undefined, ): Promise { return Promise.resolve(this.session.count(this.sql)).then( onfulfilled, onrejected, ); } catch( onRejected?: ((reason: any) => never | PromiseLike) | null | undefined, ): Promise { return this.then(undefined, onRejected); } finally(onFinally?: (() => void) | null | undefined): Promise { return this.then( (value) => { onFinally?.(); return value; }, (reason) => { onFinally?.(); throw reason; }, ); } } ================================================ FILE: drizzle-orm/src/sqlite-core/query-builders/delete.ts ================================================ import { entityKind } from '~/entity.ts'; import type { SelectResultFields } from '~/query-builders/select.types.ts'; import { QueryPromise } from '~/query-promise.ts'; import type { RunnableQuery } from '~/runnable-query.ts'; import { SelectionProxyHandler } from '~/selection-proxy.ts'; import type { Placeholder, Query, SQL, SQLWrapper } from '~/sql/sql.ts'; import type { SQLiteDialect } from '~/sqlite-core/dialect.ts'; import type { SQLitePreparedQuery, SQLiteSession } from '~/sqlite-core/session.ts'; import { SQLiteTable } from '~/sqlite-core/table.ts'; import type { Subquery } from '~/subquery.ts'; import { Table } from '~/table.ts'; import { type DrizzleTypeError, orderSelectedFields, type ValueOrArray } from '~/utils.ts'; import type { SQLiteColumn } from '../columns/common.ts'; import { extractUsedTable } from '../utils.ts'; import type { SelectedFieldsFlat, SelectedFieldsOrdered } from './select.types.ts'; export type SQLiteDeleteWithout< T extends AnySQLiteDeleteBase, TDynamic extends boolean, K extends keyof T & string, > = TDynamic extends true ? T : Omit< SQLiteDeleteBase< T['_']['table'], T['_']['resultType'], T['_']['runResult'], T['_']['returning'], TDynamic, T['_']['excludedMethods'] | K >, T['_']['excludedMethods'] | K >; export type SQLiteDelete< TTable extends SQLiteTable = SQLiteTable, TResultType extends 'sync' | 'async' = 'sync' | 'async', TRunResult = unknown, TReturning extends Record | undefined = undefined, > = SQLiteDeleteBase; export interface SQLiteDeleteConfig { where?: SQL | undefined; limit?: number | Placeholder; orderBy?: (SQLiteColumn | SQL | SQL.Aliased)[]; table: SQLiteTable; returning?: SelectedFieldsOrdered; withList?: Subquery[]; } export type SQLiteDeleteReturningAll< T extends AnySQLiteDeleteBase, TDynamic extends boolean, > = SQLiteDeleteWithout< SQLiteDeleteBase< T['_']['table'], T['_']['resultType'], T['_']['runResult'], T['_']['table']['$inferSelect'], T['_']['dynamic'], T['_']['excludedMethods'] >, TDynamic, 'returning' >; export type SQLiteDeleteReturning< T extends AnySQLiteDeleteBase, TDynamic extends boolean, TSelectedFields extends SelectedFieldsFlat, > = SQLiteDeleteWithout< SQLiteDeleteBase< T['_']['table'], T['_']['resultType'], T['_']['runResult'], SelectResultFields, T['_']['dynamic'], T['_']['excludedMethods'] >, TDynamic, 'returning' >; export type SQLiteDeleteExecute = T['_']['returning'] extends undefined ? T['_']['runResult'] : T['_']['returning'][]; export type SQLiteDeletePrepare = SQLitePreparedQuery<{ type: T['_']['resultType']; run: T['_']['runResult']; all: T['_']['returning'] extends undefined ? DrizzleTypeError<'.all() cannot be used without .returning()'> : T['_']['returning'][]; get: T['_']['returning'] extends undefined ? DrizzleTypeError<'.get() cannot be used without .returning()'> : T['_']['returning'] | undefined; values: T['_']['returning'] extends undefined ? DrizzleTypeError<'.values() cannot be used without .returning()'> : any[][]; execute: SQLiteDeleteExecute; }>; export type SQLiteDeleteDynamic = SQLiteDelete< T['_']['table'], T['_']['resultType'], T['_']['runResult'], T['_']['returning'] >; export type AnySQLiteDeleteBase = SQLiteDeleteBase; export interface SQLiteDeleteBase< TTable extends SQLiteTable, TResultType extends 'sync' | 'async', TRunResult, TReturning extends Record | undefined = undefined, TDynamic extends boolean = false, TExcludedMethods extends string = never, > extends QueryPromise, RunnableQuery, SQLWrapper { readonly _: { dialect: 'sqlite'; readonly table: TTable; readonly resultType: TResultType; readonly runResult: TRunResult; readonly returning: TReturning; readonly dynamic: TDynamic; readonly excludedMethods: TExcludedMethods; readonly result: TReturning extends undefined ? TRunResult : TReturning[]; }; } export class SQLiteDeleteBase< TTable extends SQLiteTable, // eslint-disable-next-line @typescript-eslint/no-unused-vars TResultType extends 'sync' | 'async', TRunResult, TReturning extends Record | undefined = undefined, TDynamic extends boolean = false, // eslint-disable-next-line @typescript-eslint/no-unused-vars TExcludedMethods extends string = never, > extends QueryPromise implements RunnableQuery, SQLWrapper { static override readonly [entityKind]: string = 'SQLiteDelete'; /** @internal */ config: SQLiteDeleteConfig; constructor( private table: TTable, private session: SQLiteSession, private dialect: SQLiteDialect, withList?: Subquery[], ) { super(); this.config = { table, withList }; } /** * Adds a `where` clause to the query. * * Calling this method will delete only those rows that fulfill a specified condition. * * See docs: {@link https://orm.drizzle.team/docs/delete} * * @param where the `where` clause. * * @example * You can use conditional operators and `sql function` to filter the rows to be deleted. * * ```ts * // Delete all cars with green color * db.delete(cars).where(eq(cars.color, 'green')); * // or * db.delete(cars).where(sql`${cars.color} = 'green'`) * ``` * * You can logically combine conditional operators with `and()` and `or()` operators: * * ```ts * // Delete all BMW cars with a green color * db.delete(cars).where(and(eq(cars.color, 'green'), eq(cars.brand, 'BMW'))); * * // Delete all cars with the green or blue color * db.delete(cars).where(or(eq(cars.color, 'green'), eq(cars.color, 'blue'))); * ``` */ where(where: SQL | undefined): SQLiteDeleteWithout { this.config.where = where; return this as any; } orderBy( builder: (deleteTable: TTable) => ValueOrArray, ): SQLiteDeleteWithout; orderBy(...columns: (SQLiteColumn | SQL | SQL.Aliased)[]): SQLiteDeleteWithout; orderBy( ...columns: | [(deleteTable: TTable) => ValueOrArray] | (SQLiteColumn | SQL | SQL.Aliased)[] ): SQLiteDeleteWithout { if (typeof columns[0] === 'function') { const orderBy = columns[0]( new Proxy( this.config.table[Table.Symbol.Columns], new SelectionProxyHandler({ sqlAliasedBehavior: 'alias', sqlBehavior: 'sql' }), ) as any, ); const orderByArray = Array.isArray(orderBy) ? orderBy : [orderBy]; this.config.orderBy = orderByArray; } else { const orderByArray = columns as (SQLiteColumn | SQL | SQL.Aliased)[]; this.config.orderBy = orderByArray; } return this as any; } limit(limit: number | Placeholder): SQLiteDeleteWithout { this.config.limit = limit; return this as any; } /** * Adds a `returning` clause to the query. * * Calling this method will return the specified fields of the deleted rows. If no fields are specified, all fields will be returned. * * See docs: {@link https://orm.drizzle.team/docs/delete#delete-with-return} * * @example * ```ts * // Delete all cars with the green color and return all fields * const deletedCars: Car[] = await db.delete(cars) * .where(eq(cars.color, 'green')) * .returning(); * * // Delete all cars with the green color and return only their id and brand fields * const deletedCarsIdsAndBrands: { id: number, brand: string }[] = await db.delete(cars) * .where(eq(cars.color, 'green')) * .returning({ id: cars.id, brand: cars.brand }); * ``` */ returning(): SQLiteDeleteReturningAll; returning( fields: TSelectedFields, ): SQLiteDeleteReturning; returning( fields: SelectedFieldsFlat = this.table[SQLiteTable.Symbol.Columns], ): SQLiteDeleteReturning { this.config.returning = orderSelectedFields(fields); return this as any; } /** @internal */ getSQL(): SQL { return this.dialect.buildDeleteQuery(this.config); } toSQL(): Query { const { typings: _typings, ...rest } = this.dialect.sqlToQuery(this.getSQL()); return rest; } /** @internal */ _prepare(isOneTimeQuery = true): SQLiteDeletePrepare { return this.session[isOneTimeQuery ? 'prepareOneTimeQuery' : 'prepareQuery']( this.dialect.sqlToQuery(this.getSQL()), this.config.returning, this.config.returning ? 'all' : 'run', true, undefined, { type: 'delete', tables: extractUsedTable(this.config.table), }, ) as SQLiteDeletePrepare; } prepare(): SQLiteDeletePrepare { return this._prepare(false); } run: ReturnType['run'] = (placeholderValues) => { return this._prepare().run(placeholderValues); }; all: ReturnType['all'] = (placeholderValues) => { return this._prepare().all(placeholderValues); }; get: ReturnType['get'] = (placeholderValues) => { return this._prepare().get(placeholderValues); }; values: ReturnType['values'] = (placeholderValues) => { return this._prepare().values(placeholderValues); }; override async execute(placeholderValues?: Record): Promise> { return this._prepare().execute(placeholderValues) as SQLiteDeleteExecute; } $dynamic(): SQLiteDeleteDynamic { return this as any; } } ================================================ FILE: drizzle-orm/src/sqlite-core/query-builders/index.ts ================================================ export * from './delete.ts'; export * from './insert.ts'; export * from './query-builder.ts'; export * from './select.ts'; export * from './select.types.ts'; export * from './update.ts'; ================================================ FILE: drizzle-orm/src/sqlite-core/query-builders/insert.ts ================================================ import { entityKind, is } from '~/entity.ts'; import type { TypedQueryBuilder } from '~/query-builders/query-builder.ts'; import type { SelectResultFields } from '~/query-builders/select.types.ts'; import { QueryPromise } from '~/query-promise.ts'; import type { RunnableQuery } from '~/runnable-query.ts'; import type { Placeholder, Query, SQLWrapper } from '~/sql/sql.ts'; import { Param, SQL, sql } from '~/sql/sql.ts'; import type { SQLiteDialect } from '~/sqlite-core/dialect.ts'; import type { IndexColumn } from '~/sqlite-core/indexes.ts'; import type { SQLitePreparedQuery, SQLiteSession } from '~/sqlite-core/session.ts'; import { SQLiteTable } from '~/sqlite-core/table.ts'; import type { Subquery } from '~/subquery.ts'; import { Columns, Table } from '~/table.ts'; import { type DrizzleTypeError, haveSameKeys, mapUpdateSet, orderSelectedFields, type Simplify } from '~/utils.ts'; import type { AnySQLiteColumn, SQLiteColumn } from '../columns/common.ts'; import { extractUsedTable } from '../utils.ts'; import { QueryBuilder } from './query-builder.ts'; import type { SelectedFieldsFlat, SelectedFieldsOrdered } from './select.types.ts'; import type { SQLiteUpdateSetSource } from './update.ts'; export interface SQLiteInsertConfig { table: TTable; values: Record[] | SQLiteInsertSelectQueryBuilder | SQL; withList?: Subquery[]; onConflict?: SQL[]; returning?: SelectedFieldsOrdered; select?: boolean; } export type SQLiteInsertValue = Simplify< { [Key in keyof TTable['$inferInsert']]: TTable['$inferInsert'][Key] | SQL | Placeholder; } >; export type SQLiteInsertSelectQueryBuilder = TypedQueryBuilder< { [K in keyof TTable['$inferInsert']]: AnySQLiteColumn | SQL | SQL.Aliased | TTable['$inferInsert'][K] } >; export class SQLiteInsertBuilder< TTable extends SQLiteTable, TResultType extends 'sync' | 'async', TRunResult, > { static readonly [entityKind]: string = 'SQLiteInsertBuilder'; constructor( protected table: TTable, protected session: SQLiteSession, protected dialect: SQLiteDialect, private withList?: Subquery[], ) {} values(value: SQLiteInsertValue): SQLiteInsertBase; values(values: SQLiteInsertValue[]): SQLiteInsertBase; values( values: SQLiteInsertValue | SQLiteInsertValue[], ): SQLiteInsertBase { values = Array.isArray(values) ? values : [values]; if (values.length === 0) { throw new Error('values() must be called with at least one value'); } const mappedValues = values.map((entry) => { const result: Record = {}; const cols = this.table[Table.Symbol.Columns]; for (const colKey of Object.keys(entry)) { const colValue = entry[colKey as keyof typeof entry]; result[colKey] = is(colValue, SQL) ? colValue : new Param(colValue, cols[colKey]); } return result; }); // if (mappedValues.length > 1 && mappedValues.some((t) => Object.keys(t).length === 0)) { // throw new Error( // `One of the values you want to insert is empty. In SQLite you can insert only one empty object per statement. For this case Drizzle with use "INSERT INTO ... DEFAULT VALUES" syntax`, // ); // } return new SQLiteInsertBase(this.table, mappedValues, this.session, this.dialect, this.withList); } select( selectQuery: (qb: QueryBuilder) => SQLiteInsertSelectQueryBuilder, ): SQLiteInsertBase; select(selectQuery: (qb: QueryBuilder) => SQL): SQLiteInsertBase; select(selectQuery: SQL): SQLiteInsertBase; select(selectQuery: SQLiteInsertSelectQueryBuilder): SQLiteInsertBase; select( selectQuery: | SQL | SQLiteInsertSelectQueryBuilder | ((qb: QueryBuilder) => SQLiteInsertSelectQueryBuilder | SQL), ): SQLiteInsertBase { const select = typeof selectQuery === 'function' ? selectQuery(new QueryBuilder()) : selectQuery; if ( !is(select, SQL) && !haveSameKeys(this.table[Columns], select._.selectedFields) ) { throw new Error( 'Insert select error: selected fields are not the same or are in a different order compared to the table definition', ); } return new SQLiteInsertBase(this.table, select, this.session, this.dialect, this.withList, true); } } export type SQLiteInsertWithout = TDynamic extends true ? T : Omit< SQLiteInsertBase< T['_']['table'], T['_']['resultType'], T['_']['runResult'], T['_']['returning'], TDynamic, T['_']['excludedMethods'] | K >, T['_']['excludedMethods'] | K >; export type SQLiteInsertReturning< T extends AnySQLiteInsert, TDynamic extends boolean, TSelectedFields extends SelectedFieldsFlat, > = SQLiteInsertWithout< SQLiteInsertBase< T['_']['table'], T['_']['resultType'], T['_']['runResult'], SelectResultFields, TDynamic, T['_']['excludedMethods'] >, TDynamic, 'returning' >; export type SQLiteInsertReturningAll< T extends AnySQLiteInsert, TDynamic extends boolean, > = SQLiteInsertWithout< SQLiteInsertBase< T['_']['table'], T['_']['resultType'], T['_']['runResult'], T['_']['table']['$inferSelect'], TDynamic, T['_']['excludedMethods'] >, TDynamic, 'returning' >; export type SQLiteInsertOnConflictDoUpdateConfig = { target: IndexColumn | IndexColumn[]; /** @deprecated - use either `targetWhere` or `setWhere` */ where?: SQL; // TODO: add tests for targetWhere and setWhere targetWhere?: SQL; setWhere?: SQL; set: SQLiteUpdateSetSource; }; export type SQLiteInsertDynamic = SQLiteInsert< T['_']['table'], T['_']['resultType'], T['_']['runResult'], T['_']['returning'] >; export type SQLiteInsertExecute = T['_']['returning'] extends undefined ? T['_']['runResult'] : T['_']['returning'][]; export type SQLiteInsertPrepare = SQLitePreparedQuery< { type: T['_']['resultType']; run: T['_']['runResult']; all: T['_']['returning'] extends undefined ? DrizzleTypeError<'.all() cannot be used without .returning()'> : T['_']['returning'][]; get: T['_']['returning'] extends undefined ? DrizzleTypeError<'.get() cannot be used without .returning()'> : T['_']['returning']; values: T['_']['returning'] extends undefined ? DrizzleTypeError<'.values() cannot be used without .returning()'> : any[][]; execute: SQLiteInsertExecute; } >; export type AnySQLiteInsert = SQLiteInsertBase; export type SQLiteInsert< TTable extends SQLiteTable = SQLiteTable, TResultType extends 'sync' | 'async' = 'sync' | 'async', TRunResult = unknown, TReturning = any, > = SQLiteInsertBase; export interface SQLiteInsertBase< TTable extends SQLiteTable, TResultType extends 'sync' | 'async', TRunResult, TReturning = undefined, TDynamic extends boolean = false, TExcludedMethods extends string = never, > extends SQLWrapper, QueryPromise, RunnableQuery { readonly _: { readonly dialect: 'sqlite'; readonly table: TTable; readonly resultType: TResultType; readonly runResult: TRunResult; readonly returning: TReturning; readonly dynamic: TDynamic; readonly excludedMethods: TExcludedMethods; readonly result: TReturning extends undefined ? TRunResult : TReturning[]; }; } export class SQLiteInsertBase< TTable extends SQLiteTable, // eslint-disable-next-line @typescript-eslint/no-unused-vars TResultType extends 'sync' | 'async', TRunResult, TReturning = undefined, // eslint-disable-next-line @typescript-eslint/no-unused-vars TDynamic extends boolean = false, // eslint-disable-next-line @typescript-eslint/no-unused-vars TExcludedMethods extends string = never, > extends QueryPromise implements RunnableQuery, SQLWrapper { static override readonly [entityKind]: string = 'SQLiteInsert'; /** @internal */ config: SQLiteInsertConfig; constructor( table: TTable, values: SQLiteInsertConfig['values'], private session: SQLiteSession, private dialect: SQLiteDialect, withList?: Subquery[], select?: boolean, ) { super(); this.config = { table, values: values as any, withList, select }; } /** * Adds a `returning` clause to the query. * * Calling this method will return the specified fields of the inserted rows. If no fields are specified, all fields will be returned. * * See docs: {@link https://orm.drizzle.team/docs/insert#insert-returning} * * @example * ```ts * // Insert one row and return all fields * const insertedCar: Car[] = await db.insert(cars) * .values({ brand: 'BMW' }) * .returning(); * * // Insert one row and return only the id * const insertedCarId: { id: number }[] = await db.insert(cars) * .values({ brand: 'BMW' }) * .returning({ id: cars.id }); * ``` */ returning(): SQLiteInsertReturningAll; returning( fields: TSelectedFields, ): SQLiteInsertReturning; returning( fields: SelectedFieldsFlat = this.config.table[SQLiteTable.Symbol.Columns], ): SQLiteInsertWithout { this.config.returning = orderSelectedFields(fields); return this as any; } /** * Adds an `on conflict do nothing` clause to the query. * * Calling this method simply avoids inserting a row as its alternative action. * * See docs: {@link https://orm.drizzle.team/docs/insert#on-conflict-do-nothing} * * @param config The `target` and `where` clauses. * * @example * ```ts * // Insert one row and cancel the insert if there's a conflict * await db.insert(cars) * .values({ id: 1, brand: 'BMW' }) * .onConflictDoNothing(); * * // Explicitly specify conflict target * await db.insert(cars) * .values({ id: 1, brand: 'BMW' }) * .onConflictDoNothing({ target: cars.id }); * ``` */ onConflictDoNothing(config: { target?: IndexColumn | IndexColumn[]; where?: SQL } = {}): this { if (!this.config.onConflict) this.config.onConflict = []; if (config.target === undefined) { this.config.onConflict.push(sql` on conflict do nothing`); } else { const targetSql = Array.isArray(config.target) ? sql`${config.target}` : sql`${[config.target]}`; const whereSql = config.where ? sql` where ${config.where}` : sql``; this.config.onConflict.push(sql` on conflict ${targetSql} do nothing${whereSql}`); } return this; } /** * Adds an `on conflict do update` clause to the query. * * Calling this method will update the existing row that conflicts with the row proposed for insertion as its alternative action. * * See docs: {@link https://orm.drizzle.team/docs/insert#upserts-and-conflicts} * * @param config The `target`, `set` and `where` clauses. * * @example * ```ts * // Update the row if there's a conflict * await db.insert(cars) * .values({ id: 1, brand: 'BMW' }) * .onConflictDoUpdate({ * target: cars.id, * set: { brand: 'Porsche' } * }); * * // Upsert with 'where' clause * await db.insert(cars) * .values({ id: 1, brand: 'BMW' }) * .onConflictDoUpdate({ * target: cars.id, * set: { brand: 'newBMW' }, * where: sql`${cars.createdAt} > '2023-01-01'::date`, * }); * ``` */ onConflictDoUpdate(config: SQLiteInsertOnConflictDoUpdateConfig): this { if (config.where && (config.targetWhere || config.setWhere)) { throw new Error( 'You cannot use both "where" and "targetWhere"/"setWhere" at the same time - "where" is deprecated, use "targetWhere" or "setWhere" instead.', ); } if (!this.config.onConflict) this.config.onConflict = []; const whereSql = config.where ? sql` where ${config.where}` : undefined; const targetWhereSql = config.targetWhere ? sql` where ${config.targetWhere}` : undefined; const setWhereSql = config.setWhere ? sql` where ${config.setWhere}` : undefined; const targetSql = Array.isArray(config.target) ? sql`${config.target}` : sql`${[config.target]}`; const setSql = this.dialect.buildUpdateSet(this.config.table, mapUpdateSet(this.config.table, config.set)); this.config.onConflict.push( sql` on conflict ${targetSql}${targetWhereSql} do update set ${setSql}${whereSql}${setWhereSql}`, ); return this; } /** @internal */ getSQL(): SQL { return this.dialect.buildInsertQuery(this.config); } toSQL(): Query { const { typings: _typings, ...rest } = this.dialect.sqlToQuery(this.getSQL()); return rest; } /** @internal */ _prepare(isOneTimeQuery = true): SQLiteInsertPrepare { return this.session[isOneTimeQuery ? 'prepareOneTimeQuery' : 'prepareQuery']( this.dialect.sqlToQuery(this.getSQL()), this.config.returning, this.config.returning ? 'all' : 'run', true, undefined, { type: 'insert', tables: extractUsedTable(this.config.table), }, ) as SQLiteInsertPrepare; } prepare(): SQLiteInsertPrepare { return this._prepare(false); } run: ReturnType['run'] = (placeholderValues) => { return this._prepare().run(placeholderValues); }; all: ReturnType['all'] = (placeholderValues) => { return this._prepare().all(placeholderValues); }; get: ReturnType['get'] = (placeholderValues) => { return this._prepare().get(placeholderValues); }; values: ReturnType['values'] = (placeholderValues) => { return this._prepare().values(placeholderValues); }; override async execute(): Promise> { return (this.config.returning ? this.all() : this.run()) as SQLiteInsertExecute; } $dynamic(): SQLiteInsertDynamic { return this as any; } } ================================================ FILE: drizzle-orm/src/sqlite-core/query-builders/query-builder.ts ================================================ import { entityKind, is } from '~/entity.ts'; import type { TypedQueryBuilder } from '~/query-builders/query-builder.ts'; import { SelectionProxyHandler } from '~/selection-proxy.ts'; import type { ColumnsSelection, SQL } from '~/sql/sql.ts'; import type { SQLiteDialectConfig } from '~/sqlite-core/dialect.ts'; import { SQLiteDialect, SQLiteSyncDialect } from '~/sqlite-core/dialect.ts'; import type { WithBuilder } from '~/sqlite-core/subquery.ts'; import { WithSubquery } from '~/subquery.ts'; import { SQLiteSelectBuilder } from './select.ts'; import type { SelectedFields } from './select.types.ts'; export class QueryBuilder { static readonly [entityKind]: string = 'SQLiteQueryBuilder'; private dialect: SQLiteDialect | undefined; private dialectConfig: SQLiteDialectConfig | undefined; constructor(dialect?: SQLiteDialect | SQLiteDialectConfig) { this.dialect = is(dialect, SQLiteDialect) ? dialect : undefined; this.dialectConfig = is(dialect, SQLiteDialect) ? undefined : dialect; } $with: WithBuilder = (alias: string, selection?: ColumnsSelection) => { const queryBuilder = this; const as = ( qb: | TypedQueryBuilder | SQL | ((qb: QueryBuilder) => TypedQueryBuilder | SQL), ) => { if (typeof qb === 'function') { qb = qb(queryBuilder); } return new Proxy( new WithSubquery( qb.getSQL(), selection ?? ('getSelectedFields' in qb ? qb.getSelectedFields() ?? {} : {}) as SelectedFields, alias, true, ), new SelectionProxyHandler({ alias, sqlAliasedBehavior: 'alias', sqlBehavior: 'error' }), ) as any; }; return { as }; }; with(...queries: WithSubquery[]) { const self = this; function select(): SQLiteSelectBuilder; function select( fields: TSelection, ): SQLiteSelectBuilder; function select( fields?: TSelection, ): SQLiteSelectBuilder { return new SQLiteSelectBuilder({ fields: fields ?? undefined, session: undefined, dialect: self.getDialect(), withList: queries, }); } function selectDistinct(): SQLiteSelectBuilder; function selectDistinct( fields: TSelection, ): SQLiteSelectBuilder; function selectDistinct( fields?: TSelection, ): SQLiteSelectBuilder { return new SQLiteSelectBuilder({ fields: fields ?? undefined, session: undefined, dialect: self.getDialect(), withList: queries, distinct: true, }); } return { select, selectDistinct }; } select(): SQLiteSelectBuilder; select( fields: TSelection, ): SQLiteSelectBuilder; select( fields?: TSelection, ): SQLiteSelectBuilder { return new SQLiteSelectBuilder({ fields: fields ?? undefined, session: undefined, dialect: this.getDialect() }); } selectDistinct(): SQLiteSelectBuilder; selectDistinct( fields: TSelection, ): SQLiteSelectBuilder; selectDistinct( fields?: TSelection, ): SQLiteSelectBuilder { return new SQLiteSelectBuilder({ fields: fields ?? undefined, session: undefined, dialect: this.getDialect(), distinct: true, }); } // Lazy load dialect to avoid circular dependency private getDialect() { if (!this.dialect) { this.dialect = new SQLiteSyncDialect(this.dialectConfig); } return this.dialect; } } ================================================ FILE: drizzle-orm/src/sqlite-core/query-builders/query.ts ================================================ import { entityKind } from '~/entity.ts'; import { QueryPromise } from '~/query-promise.ts'; import { type BuildQueryResult, type BuildRelationalQueryResult, type DBQueryConfig, mapRelationalRow, type TableRelationalConfig, type TablesRelationalConfig, } from '~/relations.ts'; import type { RunnableQuery } from '~/runnable-query.ts'; import type { Query, QueryWithTypings, SQL, SQLWrapper } from '~/sql/sql.ts'; import type { KnownKeysOnly } from '~/utils.ts'; import type { SQLiteDialect } from '../dialect.ts'; import type { PreparedQueryConfig, SQLitePreparedQuery, SQLiteSession } from '../session.ts'; import type { SQLiteTable } from '../table.ts'; export type SQLiteRelationalQueryKind = TMode extends 'async' ? SQLiteRelationalQuery : SQLiteSyncRelationalQuery; export class RelationalQueryBuilder< TMode extends 'sync' | 'async', TFullSchema extends Record, TSchema extends TablesRelationalConfig, TFields extends TableRelationalConfig, > { static readonly [entityKind]: string = 'SQLiteAsyncRelationalQueryBuilder'; constructor( protected mode: TMode, protected fullSchema: Record, protected schema: TSchema, protected tableNamesMap: Record, protected table: SQLiteTable, protected tableConfig: TableRelationalConfig, protected dialect: SQLiteDialect, protected session: SQLiteSession<'async', unknown, TFullSchema, TSchema>, ) {} findMany>( config?: KnownKeysOnly>, ): SQLiteRelationalQueryKind[]> { return (this.mode === 'sync' ? new SQLiteSyncRelationalQuery( this.fullSchema, this.schema, this.tableNamesMap, this.table, this.tableConfig, this.dialect, this.session, config ? (config as DBQueryConfig<'many', true>) : {}, 'many', ) : new SQLiteRelationalQuery( this.fullSchema, this.schema, this.tableNamesMap, this.table, this.tableConfig, this.dialect, this.session, config ? (config as DBQueryConfig<'many', true>) : {}, 'many', )) as SQLiteRelationalQueryKind[]>; } findFirst, 'limit'>>( config?: KnownKeysOnly, 'limit'>>, ): SQLiteRelationalQueryKind | undefined> { return (this.mode === 'sync' ? new SQLiteSyncRelationalQuery( this.fullSchema, this.schema, this.tableNamesMap, this.table, this.tableConfig, this.dialect, this.session, config ? { ...(config as DBQueryConfig<'many', true> | undefined), limit: 1 } : { limit: 1 }, 'first', ) : new SQLiteRelationalQuery( this.fullSchema, this.schema, this.tableNamesMap, this.table, this.tableConfig, this.dialect, this.session, config ? { ...(config as DBQueryConfig<'many', true> | undefined), limit: 1 } : { limit: 1 }, 'first', )) as SQLiteRelationalQueryKind | undefined>; } } export class SQLiteRelationalQuery extends QueryPromise implements RunnableQuery, SQLWrapper { static override readonly [entityKind]: string = 'SQLiteAsyncRelationalQuery'; declare readonly _: { readonly dialect: 'sqlite'; readonly type: TType; readonly result: TResult; }; /** @internal */ mode: 'many' | 'first'; constructor( private fullSchema: Record, private schema: TablesRelationalConfig, private tableNamesMap: Record, /** @internal */ public table: SQLiteTable, private tableConfig: TableRelationalConfig, private dialect: SQLiteDialect, private session: SQLiteSession<'sync' | 'async', unknown, Record, TablesRelationalConfig>, private config: DBQueryConfig<'many', true> | true, mode: 'many' | 'first', ) { super(); this.mode = mode; } /** @internal */ getSQL(): SQL { return this.dialect.buildRelationalQuery({ fullSchema: this.fullSchema, schema: this.schema, tableNamesMap: this.tableNamesMap, table: this.table, tableConfig: this.tableConfig, queryConfig: this.config, tableAlias: this.tableConfig.tsName, }).sql as SQL; } /** @internal */ _prepare( isOneTimeQuery = false, ): SQLitePreparedQuery { const { query, builtQuery } = this._toSQL(); return this.session[isOneTimeQuery ? 'prepareOneTimeQuery' : 'prepareQuery']( builtQuery, undefined, this.mode === 'first' ? 'get' : 'all', true, (rawRows, mapColumnValue) => { const rows = rawRows.map((row) => mapRelationalRow(this.schema, this.tableConfig, row, query.selection, mapColumnValue) ); if (this.mode === 'first') { return rows[0] as TResult; } return rows as TResult; }, ) as SQLitePreparedQuery; } prepare(): SQLitePreparedQuery { return this._prepare(false); } private _toSQL(): { query: BuildRelationalQueryResult; builtQuery: QueryWithTypings } { const query = this.dialect.buildRelationalQuery({ fullSchema: this.fullSchema, schema: this.schema, tableNamesMap: this.tableNamesMap, table: this.table, tableConfig: this.tableConfig, queryConfig: this.config, tableAlias: this.tableConfig.tsName, }); const builtQuery = this.dialect.sqlToQuery(query.sql as SQL); return { query, builtQuery }; } toSQL(): Query { return this._toSQL().builtQuery; } /** @internal */ executeRaw(): TResult { if (this.mode === 'first') { return this._prepare(false).get() as TResult; } return this._prepare(false).all() as TResult; } override async execute(): Promise { return this.executeRaw(); } } export class SQLiteSyncRelationalQuery extends SQLiteRelationalQuery<'sync', TResult> { static override readonly [entityKind]: string = 'SQLiteSyncRelationalQuery'; sync(): TResult { return this.executeRaw(); } } ================================================ FILE: drizzle-orm/src/sqlite-core/query-builders/raw.ts ================================================ import { entityKind } from '~/entity.ts'; import { QueryPromise } from '~/query-promise.ts'; import type { RunnableQuery } from '~/runnable-query.ts'; import type { PreparedQuery } from '~/session.ts'; import type { SQL, SQLWrapper } from '~/sql/sql.ts'; import type { SQLiteAsyncDialect } from '../dialect.ts'; type SQLiteRawAction = 'all' | 'get' | 'values' | 'run'; export interface SQLiteRawConfig { action: SQLiteRawAction; } export interface SQLiteRaw extends QueryPromise, RunnableQuery, SQLWrapper {} export class SQLiteRaw extends QueryPromise implements RunnableQuery, SQLWrapper, PreparedQuery { static override readonly [entityKind]: string = 'SQLiteRaw'; declare readonly _: { readonly dialect: 'sqlite'; readonly result: TResult; }; /** @internal */ config: SQLiteRawConfig; constructor( public execute: () => Promise, /** @internal */ public getSQL: () => SQL, action: SQLiteRawAction, private dialect: SQLiteAsyncDialect, private mapBatchResult: (result: unknown) => unknown, ) { super(); this.config = { action }; } getQuery() { return { ...this.dialect.sqlToQuery(this.getSQL()), method: this.config.action }; } mapResult(result: unknown, isFromBatch?: boolean) { return isFromBatch ? this.mapBatchResult(result) : result; } _prepare(): PreparedQuery { return this; } /** @internal */ isResponseInArrayMode(): boolean { return false; } } ================================================ FILE: drizzle-orm/src/sqlite-core/query-builders/select.ts ================================================ import type { CacheConfig, WithCacheConfig } from '~/cache/core/types.ts'; import { entityKind, is } from '~/entity.ts'; import { TypedQueryBuilder } from '~/query-builders/query-builder.ts'; import type { BuildSubquerySelection, GetSelectTableName, GetSelectTableSelection, JoinNullability, JoinType, SelectMode, SelectResult, SetOperator, } from '~/query-builders/select.types.ts'; import { QueryPromise } from '~/query-promise.ts'; import type { RunnableQuery } from '~/runnable-query.ts'; import { SelectionProxyHandler } from '~/selection-proxy.ts'; import { SQL, View } from '~/sql/sql.ts'; import type { ColumnsSelection, Placeholder, Query, SQLWrapper } from '~/sql/sql.ts'; import type { SQLiteColumn } from '~/sqlite-core/columns/index.ts'; import type { SQLiteDialect } from '~/sqlite-core/dialect.ts'; import type { SQLiteSession } from '~/sqlite-core/session.ts'; import type { SubqueryWithSelection } from '~/sqlite-core/subquery.ts'; import type { SQLiteTable } from '~/sqlite-core/table.ts'; import { Subquery } from '~/subquery.ts'; import { Table } from '~/table.ts'; import { applyMixins, getTableColumns, getTableLikeName, haveSameKeys, orderSelectedFields, type ValueOrArray, } from '~/utils.ts'; import { ViewBaseConfig } from '~/view-common.ts'; import { extractUsedTable } from '../utils.ts'; import { SQLiteViewBase } from '../view-base.ts'; import type { AnySQLiteSelect, CreateSQLiteSelectFromBuilderMode, GetSQLiteSetOperators, SelectedFields, SetOperatorRightSelect, SQLiteCreateSetOperatorFn, SQLiteSelectConfig, SQLiteSelectCrossJoinFn, SQLiteSelectDynamic, SQLiteSelectExecute, SQLiteSelectHKT, SQLiteSelectHKTBase, SQLiteSelectJoinFn, SQLiteSelectPrepare, SQLiteSelectWithout, SQLiteSetOperatorExcludedMethods, SQLiteSetOperatorWithResult, } from './select.types.ts'; export class SQLiteSelectBuilder< TSelection extends SelectedFields | undefined, TResultType extends 'sync' | 'async', TRunResult, TBuilderMode extends 'db' | 'qb' = 'db', > { static readonly [entityKind]: string = 'SQLiteSelectBuilder'; private fields: TSelection; private session: SQLiteSession | undefined; private dialect: SQLiteDialect; private withList: Subquery[] | undefined; private distinct: boolean | undefined; constructor( config: { fields: TSelection; session: SQLiteSession | undefined; dialect: SQLiteDialect; withList?: Subquery[]; distinct?: boolean; }, ) { this.fields = config.fields; this.session = config.session; this.dialect = config.dialect; this.withList = config.withList; this.distinct = config.distinct; } from( source: TFrom, ): CreateSQLiteSelectFromBuilderMode< TBuilderMode, GetSelectTableName, TResultType, TRunResult, TSelection extends undefined ? GetSelectTableSelection : TSelection, TSelection extends undefined ? 'single' : 'partial' > { const isPartialSelect = !!this.fields; let fields: SelectedFields; if (this.fields) { fields = this.fields; } else if (is(source, Subquery)) { // This is required to use the proxy handler to get the correct field values from the subquery fields = Object.fromEntries( Object.keys(source._.selectedFields).map(( key, ) => [key, source[key as unknown as keyof typeof source] as unknown as SelectedFields[string]]), ); } else if (is(source, SQLiteViewBase)) { fields = source[ViewBaseConfig].selectedFields as SelectedFields; } else if (is(source, SQL)) { fields = {}; } else { fields = getTableColumns(source); } return new SQLiteSelectBase({ table: source, fields, isPartialSelect, session: this.session, dialect: this.dialect, withList: this.withList, distinct: this.distinct, }) as any; } } export abstract class SQLiteSelectQueryBuilderBase< THKT extends SQLiteSelectHKTBase, TTableName extends string | undefined, TResultType extends 'sync' | 'async', TRunResult, TSelection extends ColumnsSelection, TSelectMode extends SelectMode, TNullabilityMap extends Record = TTableName extends string ? Record : {}, TDynamic extends boolean = false, TExcludedMethods extends string = never, TResult extends any[] = SelectResult[], TSelectedFields extends ColumnsSelection = BuildSubquerySelection, > extends TypedQueryBuilder { static override readonly [entityKind]: string = 'SQLiteSelectQueryBuilder'; override readonly _: { readonly dialect: 'sqlite'; readonly hkt: THKT; readonly tableName: TTableName; readonly resultType: TResultType; readonly runResult: TRunResult; readonly selection: TSelection; readonly selectMode: TSelectMode; readonly nullabilityMap: TNullabilityMap; readonly dynamic: TDynamic; readonly excludedMethods: TExcludedMethods; readonly result: TResult; readonly selectedFields: TSelectedFields; readonly config: SQLiteSelectConfig; }; /** @internal */ config: SQLiteSelectConfig; protected joinsNotNullableMap: Record; private tableName: string | undefined; private isPartialSelect: boolean; protected session: SQLiteSession | undefined; protected dialect: SQLiteDialect; protected cacheConfig?: WithCacheConfig = undefined; protected usedTables: Set = new Set(); constructor( { table, fields, isPartialSelect, session, dialect, withList, distinct }: { table: SQLiteSelectConfig['table']; fields: SQLiteSelectConfig['fields']; isPartialSelect: boolean; session: SQLiteSession | undefined; dialect: SQLiteDialect; withList: Subquery[] | undefined; distinct: boolean | undefined; }, ) { super(); this.config = { withList, table, fields: { ...fields }, distinct, setOperators: [], }; this.isPartialSelect = isPartialSelect; this.session = session; this.dialect = dialect; this._ = { selectedFields: fields as TSelectedFields, config: this.config, } as this['_']; this.tableName = getTableLikeName(table); this.joinsNotNullableMap = typeof this.tableName === 'string' ? { [this.tableName]: true } : {}; for (const item of extractUsedTable(table)) this.usedTables.add(item); } /** @internal */ getUsedTables() { return [...this.usedTables]; } private createJoin( joinType: TJoinType, ): 'cross' extends TJoinType ? SQLiteSelectCrossJoinFn : SQLiteSelectJoinFn { return ( table: SQLiteTable | Subquery | SQLiteViewBase | SQL, on?: ((aliases: TSelection) => SQL | undefined) | SQL | undefined, ) => { const baseTableName = this.tableName; const tableName = getTableLikeName(table); // store all tables used in a query for (const item of extractUsedTable(table)) this.usedTables.add(item); if (typeof tableName === 'string' && this.config.joins?.some((join) => join.alias === tableName)) { throw new Error(`Alias "${tableName}" is already used in this query`); } if (!this.isPartialSelect) { // If this is the first join and this is not a partial select and we're not selecting from raw SQL, "move" the fields from the main table to the nested object if (Object.keys(this.joinsNotNullableMap).length === 1 && typeof baseTableName === 'string') { this.config.fields = { [baseTableName]: this.config.fields, }; } if (typeof tableName === 'string' && !is(table, SQL)) { const selection = is(table, Subquery) ? table._.selectedFields : is(table, View) ? table[ViewBaseConfig].selectedFields : table[Table.Symbol.Columns]; this.config.fields[tableName] = selection; } } if (typeof on === 'function') { on = on( new Proxy( this.config.fields, new SelectionProxyHandler({ sqlAliasedBehavior: 'sql', sqlBehavior: 'sql' }), ) as TSelection, ); } if (!this.config.joins) { this.config.joins = []; } this.config.joins.push({ on, table, joinType, alias: tableName }); if (typeof tableName === 'string') { switch (joinType) { case 'left': { this.joinsNotNullableMap[tableName] = false; break; } case 'right': { this.joinsNotNullableMap = Object.fromEntries( Object.entries(this.joinsNotNullableMap).map(([key]) => [key, false]), ); this.joinsNotNullableMap[tableName] = true; break; } case 'cross': case 'inner': { this.joinsNotNullableMap[tableName] = true; break; } case 'full': { this.joinsNotNullableMap = Object.fromEntries( Object.entries(this.joinsNotNullableMap).map(([key]) => [key, false]), ); this.joinsNotNullableMap[tableName] = false; break; } } } return this as any; }; } /** * Executes a `left join` operation by adding another table to the current query. * * Calling this method associates each row of the table with the corresponding row from the joined table, if a match is found. If no matching row exists, it sets all columns of the joined table to null. * * See docs: {@link https://orm.drizzle.team/docs/joins#left-join} * * @param table the table to join. * @param on the `on` clause. * * @example * * ```ts * // Select all users and their pets * const usersWithPets: { user: User; pets: Pet | null; }[] = await db.select() * .from(users) * .leftJoin(pets, eq(users.id, pets.ownerId)) * * // Select userId and petId * const usersIdsAndPetIds: { userId: number; petId: number | null; }[] = await db.select({ * userId: users.id, * petId: pets.id, * }) * .from(users) * .leftJoin(pets, eq(users.id, pets.ownerId)) * ``` */ leftJoin = this.createJoin('left'); /** * Executes a `right join` operation by adding another table to the current query. * * Calling this method associates each row of the joined table with the corresponding row from the main table, if a match is found. If no matching row exists, it sets all columns of the main table to null. * * See docs: {@link https://orm.drizzle.team/docs/joins#right-join} * * @param table the table to join. * @param on the `on` clause. * * @example * * ```ts * // Select all users and their pets * const usersWithPets: { user: User | null; pets: Pet; }[] = await db.select() * .from(users) * .rightJoin(pets, eq(users.id, pets.ownerId)) * * // Select userId and petId * const usersIdsAndPetIds: { userId: number | null; petId: number; }[] = await db.select({ * userId: users.id, * petId: pets.id, * }) * .from(users) * .rightJoin(pets, eq(users.id, pets.ownerId)) * ``` */ rightJoin = this.createJoin('right'); /** * Executes an `inner join` operation, creating a new table by combining rows from two tables that have matching values. * * Calling this method retrieves rows that have corresponding entries in both joined tables. Rows without matching entries in either table are excluded, resulting in a table that includes only matching pairs. * * See docs: {@link https://orm.drizzle.team/docs/joins#inner-join} * * @param table the table to join. * @param on the `on` clause. * * @example * * ```ts * // Select all users and their pets * const usersWithPets: { user: User; pets: Pet; }[] = await db.select() * .from(users) * .innerJoin(pets, eq(users.id, pets.ownerId)) * * // Select userId and petId * const usersIdsAndPetIds: { userId: number; petId: number; }[] = await db.select({ * userId: users.id, * petId: pets.id, * }) * .from(users) * .innerJoin(pets, eq(users.id, pets.ownerId)) * ``` */ innerJoin = this.createJoin('inner'); /** * Executes a `full join` operation by combining rows from two tables into a new table. * * Calling this method retrieves all rows from both main and joined tables, merging rows with matching values and filling in `null` for non-matching columns. * * See docs: {@link https://orm.drizzle.team/docs/joins#full-join} * * @param table the table to join. * @param on the `on` clause. * * @example * * ```ts * // Select all users and their pets * const usersWithPets: { user: User | null; pets: Pet | null; }[] = await db.select() * .from(users) * .fullJoin(pets, eq(users.id, pets.ownerId)) * * // Select userId and petId * const usersIdsAndPetIds: { userId: number | null; petId: number | null; }[] = await db.select({ * userId: users.id, * petId: pets.id, * }) * .from(users) * .fullJoin(pets, eq(users.id, pets.ownerId)) * ``` */ fullJoin = this.createJoin('full'); /** * Executes a `cross join` operation by combining rows from two tables into a new table. * * Calling this method retrieves all rows from both main and joined tables, merging all rows from each table. * * See docs: {@link https://orm.drizzle.team/docs/joins#cross-join} * * @param table the table to join. * * @example * * ```ts * // Select all users, each user with every pet * const usersWithPets: { user: User; pets: Pet; }[] = await db.select() * .from(users) * .crossJoin(pets) * * // Select userId and petId * const usersIdsAndPetIds: { userId: number; petId: number; }[] = await db.select({ * userId: users.id, * petId: pets.id, * }) * .from(users) * .crossJoin(pets) * ``` */ crossJoin = this.createJoin('cross'); private createSetOperator( type: SetOperator, isAll: boolean, ): >( rightSelection: | ((setOperators: GetSQLiteSetOperators) => SetOperatorRightSelect) | SetOperatorRightSelect, ) => SQLiteSelectWithout< this, TDynamic, SQLiteSetOperatorExcludedMethods, true > { return (rightSelection) => { const rightSelect = (typeof rightSelection === 'function' ? rightSelection(getSQLiteSetOperators()) : rightSelection) as TypedQueryBuilder< any, TResult >; if (!haveSameKeys(this.getSelectedFields(), rightSelect.getSelectedFields())) { throw new Error( 'Set operator error (union / intersect / except): selected fields are not the same or are in a different order', ); } this.config.setOperators.push({ type, isAll, rightSelect }); return this as any; }; } /** * Adds `union` set operator to the query. * * Calling this method will combine the result sets of the `select` statements and remove any duplicate rows that appear across them. * * See docs: {@link https://orm.drizzle.team/docs/set-operations#union} * * @example * * ```ts * // Select all unique names from customers and users tables * await db.select({ name: users.name }) * .from(users) * .union( * db.select({ name: customers.name }).from(customers) * ); * // or * import { union } from 'drizzle-orm/sqlite-core' * * await union( * db.select({ name: users.name }).from(users), * db.select({ name: customers.name }).from(customers) * ); * ``` */ union = this.createSetOperator('union', false); /** * Adds `union all` set operator to the query. * * Calling this method will combine the result-set of the `select` statements and keep all duplicate rows that appear across them. * * See docs: {@link https://orm.drizzle.team/docs/set-operations#union-all} * * @example * * ```ts * // Select all transaction ids from both online and in-store sales * await db.select({ transaction: onlineSales.transactionId }) * .from(onlineSales) * .unionAll( * db.select({ transaction: inStoreSales.transactionId }).from(inStoreSales) * ); * // or * import { unionAll } from 'drizzle-orm/sqlite-core' * * await unionAll( * db.select({ transaction: onlineSales.transactionId }).from(onlineSales), * db.select({ transaction: inStoreSales.transactionId }).from(inStoreSales) * ); * ``` */ unionAll = this.createSetOperator('union', true); /** * Adds `intersect` set operator to the query. * * Calling this method will retain only the rows that are present in both result sets and eliminate duplicates. * * See docs: {@link https://orm.drizzle.team/docs/set-operations#intersect} * * @example * * ```ts * // Select course names that are offered in both departments A and B * await db.select({ courseName: depA.courseName }) * .from(depA) * .intersect( * db.select({ courseName: depB.courseName }).from(depB) * ); * // or * import { intersect } from 'drizzle-orm/sqlite-core' * * await intersect( * db.select({ courseName: depA.courseName }).from(depA), * db.select({ courseName: depB.courseName }).from(depB) * ); * ``` */ intersect = this.createSetOperator('intersect', false); /** * Adds `except` set operator to the query. * * Calling this method will retrieve all unique rows from the left query, except for the rows that are present in the result set of the right query. * * See docs: {@link https://orm.drizzle.team/docs/set-operations#except} * * @example * * ```ts * // Select all courses offered in department A but not in department B * await db.select({ courseName: depA.courseName }) * .from(depA) * .except( * db.select({ courseName: depB.courseName }).from(depB) * ); * // or * import { except } from 'drizzle-orm/sqlite-core' * * await except( * db.select({ courseName: depA.courseName }).from(depA), * db.select({ courseName: depB.courseName }).from(depB) * ); * ``` */ except = this.createSetOperator('except', false); /** @internal */ addSetOperators(setOperators: SQLiteSelectConfig['setOperators']): SQLiteSelectWithout< this, TDynamic, SQLiteSetOperatorExcludedMethods, true > { this.config.setOperators.push(...setOperators); return this as any; } /** * Adds a `where` clause to the query. * * Calling this method will select only those rows that fulfill a specified condition. * * See docs: {@link https://orm.drizzle.team/docs/select#filtering} * * @param where the `where` clause. * * @example * You can use conditional operators and `sql function` to filter the rows to be selected. * * ```ts * // Select all cars with green color * await db.select().from(cars).where(eq(cars.color, 'green')); * // or * await db.select().from(cars).where(sql`${cars.color} = 'green'`) * ``` * * You can logically combine conditional operators with `and()` and `or()` operators: * * ```ts * // Select all BMW cars with a green color * await db.select().from(cars).where(and(eq(cars.color, 'green'), eq(cars.brand, 'BMW'))); * * // Select all cars with the green or blue color * await db.select().from(cars).where(or(eq(cars.color, 'green'), eq(cars.color, 'blue'))); * ``` */ where( where: ((aliases: TSelection) => SQL | undefined) | SQL | undefined, ): SQLiteSelectWithout { if (typeof where === 'function') { where = where( new Proxy( this.config.fields, new SelectionProxyHandler({ sqlAliasedBehavior: 'sql', sqlBehavior: 'sql' }), ) as TSelection, ); } this.config.where = where; return this as any; } /** * Adds a `having` clause to the query. * * Calling this method will select only those rows that fulfill a specified condition. It is typically used with aggregate functions to filter the aggregated data based on a specified condition. * * See docs: {@link https://orm.drizzle.team/docs/select#aggregations} * * @param having the `having` clause. * * @example * * ```ts * // Select all brands with more than one car * await db.select({ * brand: cars.brand, * count: sql`cast(count(${cars.id}) as int)`, * }) * .from(cars) * .groupBy(cars.brand) * .having(({ count }) => gt(count, 1)); * ``` */ having( having: ((aliases: this['_']['selection']) => SQL | undefined) | SQL | undefined, ): SQLiteSelectWithout { if (typeof having === 'function') { having = having( new Proxy( this.config.fields, new SelectionProxyHandler({ sqlAliasedBehavior: 'sql', sqlBehavior: 'sql' }), ) as TSelection, ); } this.config.having = having; return this as any; } /** * Adds a `group by` clause to the query. * * Calling this method will group rows that have the same values into summary rows, often used for aggregation purposes. * * See docs: {@link https://orm.drizzle.team/docs/select#aggregations} * * @example * * ```ts * // Group and count people by their last names * await db.select({ * lastName: people.lastName, * count: sql`cast(count(*) as int)` * }) * .from(people) * .groupBy(people.lastName); * ``` */ groupBy( builder: (aliases: this['_']['selection']) => ValueOrArray, ): SQLiteSelectWithout; groupBy(...columns: (SQLiteColumn | SQL)[]): SQLiteSelectWithout; groupBy( ...columns: | [(aliases: this['_']['selection']) => ValueOrArray] | (SQLiteColumn | SQL | SQL.Aliased)[] ): SQLiteSelectWithout { if (typeof columns[0] === 'function') { const groupBy = columns[0]( new Proxy( this.config.fields, new SelectionProxyHandler({ sqlAliasedBehavior: 'alias', sqlBehavior: 'sql' }), ) as TSelection, ); this.config.groupBy = Array.isArray(groupBy) ? groupBy : [groupBy]; } else { this.config.groupBy = columns as (SQLiteColumn | SQL | SQL.Aliased)[]; } return this as any; } /** * Adds an `order by` clause to the query. * * Calling this method will sort the result-set in ascending or descending order. By default, the sort order is ascending. * * See docs: {@link https://orm.drizzle.team/docs/select#order-by} * * @example * * ``` * // Select cars ordered by year * await db.select().from(cars).orderBy(cars.year); * ``` * * You can specify whether results are in ascending or descending order with the `asc()` and `desc()` operators. * * ```ts * // Select cars ordered by year in descending order * await db.select().from(cars).orderBy(desc(cars.year)); * * // Select cars ordered by year and price * await db.select().from(cars).orderBy(asc(cars.year), desc(cars.price)); * ``` */ orderBy( builder: (aliases: this['_']['selection']) => ValueOrArray, ): SQLiteSelectWithout; orderBy(...columns: (SQLiteColumn | SQL)[]): SQLiteSelectWithout; orderBy( ...columns: | [(aliases: this['_']['selection']) => ValueOrArray] | (SQLiteColumn | SQL | SQL.Aliased)[] ): SQLiteSelectWithout { if (typeof columns[0] === 'function') { const orderBy = columns[0]( new Proxy( this.config.fields, new SelectionProxyHandler({ sqlAliasedBehavior: 'alias', sqlBehavior: 'sql' }), ) as TSelection, ); const orderByArray = Array.isArray(orderBy) ? orderBy : [orderBy]; if (this.config.setOperators.length > 0) { this.config.setOperators.at(-1)!.orderBy = orderByArray; } else { this.config.orderBy = orderByArray; } } else { const orderByArray = columns as (SQLiteColumn | SQL | SQL.Aliased)[]; if (this.config.setOperators.length > 0) { this.config.setOperators.at(-1)!.orderBy = orderByArray; } else { this.config.orderBy = orderByArray; } } return this as any; } /** * Adds a `limit` clause to the query. * * Calling this method will set the maximum number of rows that will be returned by this query. * * See docs: {@link https://orm.drizzle.team/docs/select#limit--offset} * * @param limit the `limit` clause. * * @example * * ```ts * // Get the first 10 people from this query. * await db.select().from(people).limit(10); * ``` */ limit(limit: number | Placeholder): SQLiteSelectWithout { if (this.config.setOperators.length > 0) { this.config.setOperators.at(-1)!.limit = limit; } else { this.config.limit = limit; } return this as any; } /** * Adds an `offset` clause to the query. * * Calling this method will skip a number of rows when returning results from this query. * * See docs: {@link https://orm.drizzle.team/docs/select#limit--offset} * * @param offset the `offset` clause. * * @example * * ```ts * // Get the 10th-20th people from this query. * await db.select().from(people).offset(10).limit(10); * ``` */ offset(offset: number | Placeholder): SQLiteSelectWithout { if (this.config.setOperators.length > 0) { this.config.setOperators.at(-1)!.offset = offset; } else { this.config.offset = offset; } return this as any; } /** @internal */ getSQL(): SQL { return this.dialect.buildSelectQuery(this.config); } toSQL(): Query { const { typings: _typings, ...rest } = this.dialect.sqlToQuery(this.getSQL()); return rest; } as( alias: TAlias, ): SubqueryWithSelection { const usedTables: string[] = []; usedTables.push(...extractUsedTable(this.config.table)); if (this.config.joins) { for (const it of this.config.joins) usedTables.push(...extractUsedTable(it.table)); } return new Proxy( new Subquery(this.getSQL(), this.config.fields, alias, false, [...new Set(usedTables)]), new SelectionProxyHandler({ alias, sqlAliasedBehavior: 'alias', sqlBehavior: 'error' }), ) as SubqueryWithSelection; } /** @internal */ override getSelectedFields(): this['_']['selectedFields'] { return new Proxy( this.config.fields, new SelectionProxyHandler({ alias: this.tableName, sqlAliasedBehavior: 'alias', sqlBehavior: 'error' }), ) as this['_']['selectedFields']; } $dynamic(): SQLiteSelectDynamic { return this; } } // eslint-disable-next-line @typescript-eslint/no-empty-interface export interface SQLiteSelectBase< TTableName extends string | undefined, TResultType extends 'sync' | 'async', TRunResult, TSelection extends ColumnsSelection, TSelectMode extends SelectMode = 'single', TNullabilityMap extends Record = TTableName extends string ? Record : {}, TDynamic extends boolean = false, TExcludedMethods extends string = never, TResult extends any[] = SelectResult[], TSelectedFields extends ColumnsSelection = BuildSubquerySelection, > extends SQLiteSelectQueryBuilderBase< SQLiteSelectHKT, TTableName, TResultType, TRunResult, TSelection, TSelectMode, TNullabilityMap, TDynamic, TExcludedMethods, TResult, TSelectedFields >, QueryPromise {} export class SQLiteSelectBase< TTableName extends string | undefined, TResultType extends 'sync' | 'async', TRunResult, TSelection, TSelectMode extends SelectMode = 'single', TNullabilityMap extends Record = TTableName extends string ? Record : {}, TDynamic extends boolean = false, TExcludedMethods extends string = never, TResult = SelectResult[], TSelectedFields extends ColumnsSelection = BuildSubquerySelection, > extends SQLiteSelectQueryBuilderBase< SQLiteSelectHKT, TTableName, TResultType, TRunResult, TSelection, TSelectMode, TNullabilityMap, TDynamic, TExcludedMethods, TResult, TSelectedFields > implements RunnableQuery, SQLWrapper { static override readonly [entityKind]: string = 'SQLiteSelect'; /** @internal */ _prepare(isOneTimeQuery = true): SQLiteSelectPrepare { if (!this.session) { throw new Error('Cannot execute a query on a query builder. Please use a database instance instead.'); } const fieldsList = orderSelectedFields(this.config.fields); const query = this.session[isOneTimeQuery ? 'prepareOneTimeQuery' : 'prepareQuery']( this.dialect.sqlToQuery(this.getSQL()), fieldsList, 'all', true, undefined, { type: 'select', tables: [...this.usedTables], }, this.cacheConfig, ); query.joinsNotNullableMap = this.joinsNotNullableMap; return query as ReturnType; } $withCache(config?: { config?: CacheConfig; tag?: string; autoInvalidate?: boolean } | false) { this.cacheConfig = config === undefined ? { config: {}, enable: true, autoInvalidate: true } : config === false ? { enable: false } : { enable: true, autoInvalidate: true, ...config }; return this; } prepare(): SQLiteSelectPrepare { return this._prepare(false); } run: ReturnType['run'] = (placeholderValues) => { return this._prepare().run(placeholderValues); }; all: ReturnType['all'] = (placeholderValues) => { return this._prepare().all(placeholderValues); }; get: ReturnType['get'] = (placeholderValues) => { return this._prepare().get(placeholderValues); }; values: ReturnType['values'] = (placeholderValues) => { return this._prepare().values(placeholderValues); }; async execute(): Promise> { return this.all() as SQLiteSelectExecute; } } applyMixins(SQLiteSelectBase, [QueryPromise]); function createSetOperator(type: SetOperator, isAll: boolean): SQLiteCreateSetOperatorFn { return (leftSelect, rightSelect, ...restSelects) => { const setOperators = [rightSelect, ...restSelects].map((select) => ({ type, isAll, rightSelect: select as AnySQLiteSelect, })); for (const setOperator of setOperators) { if (!haveSameKeys((leftSelect as any).getSelectedFields(), setOperator.rightSelect.getSelectedFields())) { throw new Error( 'Set operator error (union / intersect / except): selected fields are not the same or are in a different order', ); } } return (leftSelect as AnySQLiteSelect).addSetOperators(setOperators) as any; }; } const getSQLiteSetOperators = () => ({ union, unionAll, intersect, except, }); /** * Adds `union` set operator to the query. * * Calling this method will combine the result sets of the `select` statements and remove any duplicate rows that appear across them. * * See docs: {@link https://orm.drizzle.team/docs/set-operations#union} * * @example * * ```ts * // Select all unique names from customers and users tables * import { union } from 'drizzle-orm/sqlite-core' * * await union( * db.select({ name: users.name }).from(users), * db.select({ name: customers.name }).from(customers) * ); * // or * await db.select({ name: users.name }) * .from(users) * .union( * db.select({ name: customers.name }).from(customers) * ); * ``` */ export const union = createSetOperator('union', false); /** * Adds `union all` set operator to the query. * * Calling this method will combine the result-set of the `select` statements and keep all duplicate rows that appear across them. * * See docs: {@link https://orm.drizzle.team/docs/set-operations#union-all} * * @example * * ```ts * // Select all transaction ids from both online and in-store sales * import { unionAll } from 'drizzle-orm/sqlite-core' * * await unionAll( * db.select({ transaction: onlineSales.transactionId }).from(onlineSales), * db.select({ transaction: inStoreSales.transactionId }).from(inStoreSales) * ); * // or * await db.select({ transaction: onlineSales.transactionId }) * .from(onlineSales) * .unionAll( * db.select({ transaction: inStoreSales.transactionId }).from(inStoreSales) * ); * ``` */ export const unionAll = createSetOperator('union', true); /** * Adds `intersect` set operator to the query. * * Calling this method will retain only the rows that are present in both result sets and eliminate duplicates. * * See docs: {@link https://orm.drizzle.team/docs/set-operations#intersect} * * @example * * ```ts * // Select course names that are offered in both departments A and B * import { intersect } from 'drizzle-orm/sqlite-core' * * await intersect( * db.select({ courseName: depA.courseName }).from(depA), * db.select({ courseName: depB.courseName }).from(depB) * ); * // or * await db.select({ courseName: depA.courseName }) * .from(depA) * .intersect( * db.select({ courseName: depB.courseName }).from(depB) * ); * ``` */ export const intersect = createSetOperator('intersect', false); /** * Adds `except` set operator to the query. * * Calling this method will retrieve all unique rows from the left query, except for the rows that are present in the result set of the right query. * * See docs: {@link https://orm.drizzle.team/docs/set-operations#except} * * @example * * ```ts * // Select all courses offered in department A but not in department B * import { except } from 'drizzle-orm/sqlite-core' * * await except( * db.select({ courseName: depA.courseName }).from(depA), * db.select({ courseName: depB.courseName }).from(depB) * ); * // or * await db.select({ courseName: depA.courseName }) * .from(depA) * .except( * db.select({ courseName: depB.courseName }).from(depB) * ); * ``` */ export const except = createSetOperator('except', false); ================================================ FILE: drizzle-orm/src/sqlite-core/query-builders/select.types.ts ================================================ import type { ColumnsSelection, Placeholder, SQL, View } from '~/sql/sql.ts'; import type { SQLiteColumn } from '~/sqlite-core/columns/index.ts'; import type { SQLiteTable, SQLiteTableWithColumns } from '~/sqlite-core/table.ts'; import type { Assume, ValidateShape } from '~/utils.ts'; import type { SelectedFields as SelectFieldsBase, SelectedFieldsFlat as SelectFieldsFlatBase, SelectedFieldsOrdered as SelectFieldsOrderedBase, } from '~/operations.ts'; import type { TypedQueryBuilder } from '~/query-builders/query-builder.ts'; import type { AppendToNullabilityMap, AppendToResult, BuildSubquerySelection, GetSelectTableName, JoinNullability, JoinType, MapColumnsToTableAlias, SelectMode, SelectResult, SetOperator, } from '~/query-builders/select.types.ts'; import type { Subquery } from '~/subquery.ts'; import type { Table, UpdateTableConfig } from '~/table.ts'; import type { SQLitePreparedQuery } from '../session.ts'; import type { SQLiteViewBase } from '../view-base.ts'; import type { SQLiteViewWithSelection } from '../view.ts'; import type { SQLiteSelectBase, SQLiteSelectQueryBuilderBase } from './select.ts'; export interface SQLiteSelectJoinConfig { on: SQL | undefined; table: SQLiteTable | Subquery | SQLiteViewBase | SQL; alias: string | undefined; joinType: JoinType; } export type BuildAliasTable = TTable extends Table ? SQLiteTableWithColumns< UpdateTableConfig; }> > : TTable extends View ? SQLiteViewWithSelection< TAlias, TTable['_']['existing'], MapColumnsToTableAlias > : never; export interface SQLiteSelectConfig { withList?: Subquery[]; fields: Record; fieldsFlat?: SelectedFieldsOrdered; where?: SQL; having?: SQL; table: SQLiteTable | Subquery | SQLiteViewBase | SQL; limit?: number | Placeholder; offset?: number | Placeholder; joins?: SQLiteSelectJoinConfig[]; orderBy?: (SQLiteColumn | SQL | SQL.Aliased)[]; groupBy?: (SQLiteColumn | SQL | SQL.Aliased)[]; distinct?: boolean; setOperators: { rightSelect: TypedQueryBuilder; type: SetOperator; isAll: boolean; orderBy?: (SQLiteColumn | SQL | SQL.Aliased)[]; limit?: number | Placeholder; offset?: number | Placeholder; }[]; } export type SQLiteSelectJoin< T extends AnySQLiteSelectQueryBuilder, TDynamic extends boolean, TJoinType extends JoinType, TJoinedTable extends SQLiteTable | Subquery | SQLiteViewBase | SQL, TJoinedName extends GetSelectTableName = GetSelectTableName, > = T extends any ? SQLiteSelectWithout< SQLiteSelectKind< T['_']['hkt'], T['_']['tableName'], T['_']['resultType'], T['_']['runResult'], AppendToResult< T['_']['tableName'], T['_']['selection'], TJoinedName, TJoinedTable extends SQLiteTable ? TJoinedTable['_']['columns'] : TJoinedTable extends Subquery | View ? Assume : never, T['_']['selectMode'] >, T['_']['selectMode'] extends 'partial' ? T['_']['selectMode'] : 'multiple', AppendToNullabilityMap, T['_']['dynamic'], T['_']['excludedMethods'] >, TDynamic, T['_']['excludedMethods'] > : never; export type SQLiteSelectJoinFn< T extends AnySQLiteSelectQueryBuilder, TDynamic extends boolean, TJoinType extends JoinType, > = < TJoinedTable extends SQLiteTable | Subquery | SQLiteViewBase | SQL, TJoinedName extends GetSelectTableName = GetSelectTableName, >( table: TJoinedTable, on: ((aliases: T['_']['selection']) => SQL | undefined) | SQL | undefined, ) => SQLiteSelectJoin; export type SQLiteSelectCrossJoinFn< T extends AnySQLiteSelectQueryBuilder, TDynamic extends boolean, > = < TJoinedTable extends SQLiteTable | Subquery | SQLiteViewBase | SQL, TJoinedName extends GetSelectTableName = GetSelectTableName, >(table: TJoinedTable) => SQLiteSelectJoin; export type SelectedFieldsFlat = SelectFieldsFlatBase; export type SelectedFields = SelectFieldsBase; export type SelectedFieldsOrdered = SelectFieldsOrderedBase; export interface SQLiteSelectHKTBase { tableName: string | undefined; resultType: 'sync' | 'async'; runResult: unknown; selection: unknown; selectMode: SelectMode; nullabilityMap: unknown; dynamic: boolean; excludedMethods: string; result: unknown; selectedFields: unknown; _type: unknown; } export type SQLiteSelectKind< T extends SQLiteSelectHKTBase, TTableName extends string | undefined, TResultType extends 'sync' | 'async', TRunResult, TSelection extends ColumnsSelection, TSelectMode extends SelectMode, TNullabilityMap extends Record, TDynamic extends boolean, TExcludedMethods extends string, TResult = SelectResult[], TSelectedFields = BuildSubquerySelection, > = (T & { tableName: TTableName; resultType: TResultType; runResult: TRunResult; selection: TSelection; selectMode: TSelectMode; nullabilityMap: TNullabilityMap; dynamic: TDynamic; excludedMethods: TExcludedMethods; result: TResult; selectedFields: TSelectedFields; })['_type']; export interface SQLiteSelectQueryBuilderHKT extends SQLiteSelectHKTBase { _type: SQLiteSelectQueryBuilderBase< SQLiteSelectQueryBuilderHKT, this['tableName'], this['resultType'], this['runResult'], Assume, this['selectMode'], Assume>, this['dynamic'], this['excludedMethods'], Assume, Assume >; } export interface SQLiteSelectHKT extends SQLiteSelectHKTBase { _type: SQLiteSelectBase< this['tableName'], this['resultType'], this['runResult'], Assume, this['selectMode'], Assume>, this['dynamic'], this['excludedMethods'], Assume, Assume >; } export type SQLiteSetOperatorExcludedMethods = | 'config' | 'leftJoin' | 'rightJoin' | 'innerJoin' | 'fullJoin' | 'where' | 'having' | 'groupBy'; export type CreateSQLiteSelectFromBuilderMode< TBuilderMode extends 'db' | 'qb', TTableName extends string | undefined, TResultType extends 'sync' | 'async', TRunResult, TSelection extends ColumnsSelection, TSelectMode extends SelectMode, > = TBuilderMode extends 'db' ? SQLiteSelectBase< TTableName, TResultType, TRunResult, TSelection, TSelectMode > : SQLiteSelectQueryBuilderBase< SQLiteSelectQueryBuilderHKT, TTableName, TResultType, TRunResult, TSelection, TSelectMode >; export type SQLiteSelectWithout< T extends AnySQLiteSelectQueryBuilder, TDynamic extends boolean, K extends keyof T & string, TResetExcluded extends boolean = false, > = TDynamic extends true ? T : Omit< SQLiteSelectKind< T['_']['hkt'], T['_']['tableName'], T['_']['resultType'], T['_']['runResult'], T['_']['selection'], T['_']['selectMode'], T['_']['nullabilityMap'], TDynamic, TResetExcluded extends true ? K : T['_']['excludedMethods'] | K, T['_']['result'], T['_']['selectedFields'] >, TResetExcluded extends true ? K : T['_']['excludedMethods'] | K >; export type SQLiteSelectExecute = T['_']['result']; export type SQLiteSelectPrepare = SQLitePreparedQuery< { type: T['_']['resultType']; run: T['_']['runResult']; all: T['_']['result']; get: T['_']['result'][number] | undefined; values: any[][]; execute: SQLiteSelectExecute; } >; export type SQLiteSelectDynamic = SQLiteSelectKind< T['_']['hkt'], T['_']['tableName'], T['_']['resultType'], T['_']['runResult'], T['_']['selection'], T['_']['selectMode'], T['_']['nullabilityMap'], true, never, T['_']['result'], T['_']['selectedFields'] >; export type SQLiteSelectQueryBuilder< THKT extends SQLiteSelectHKTBase = SQLiteSelectQueryBuilderHKT, TTableName extends string | undefined = string | undefined, TResultType extends 'sync' | 'async' = 'sync' | 'async', TRunResult = unknown, TSelection extends ColumnsSelection = ColumnsSelection, TSelectMode extends SelectMode = SelectMode, TNullabilityMap extends Record = Record, TResult extends any[] = unknown[], TSelectedFields extends ColumnsSelection = ColumnsSelection, > = SQLiteSelectQueryBuilderBase< THKT, TTableName, TResultType, TRunResult, TSelection, TSelectMode, TNullabilityMap, true, never, TResult, TSelectedFields >; export type AnySQLiteSelectQueryBuilder = SQLiteSelectQueryBuilderBase< any, any, any, any, any, any, any, any, any, any, any >; export type AnySQLiteSetOperatorInterface = SQLiteSetOperatorInterface; export interface SQLiteSetOperatorInterface< TTableName extends string | undefined, TResultType extends 'sync' | 'async', TRunResult, TSelection extends ColumnsSelection, TSelectMode extends SelectMode = 'single', TNullabilityMap extends Record = TTableName extends string ? Record : {}, TDynamic extends boolean = false, TExcludedMethods extends string = never, TResult extends any[] = SelectResult[], TSelectedFields extends ColumnsSelection = BuildSubquerySelection, > { _: { readonly hkt: SQLiteSelectHKTBase; readonly tableName: TTableName; readonly resultType: TResultType; readonly runResult: TRunResult; readonly selection: TSelection; readonly selectMode: TSelectMode; readonly nullabilityMap: TNullabilityMap; readonly dynamic: TDynamic; readonly excludedMethods: TExcludedMethods; readonly result: TResult; readonly selectedFields: TSelectedFields; }; } export type SQLiteSetOperatorWithResult = SQLiteSetOperatorInterface< any, any, any, any, any, any, any, any, TResult, any >; export type SQLiteSelect< TTableName extends string | undefined = string | undefined, TResultType extends 'sync' | 'async' = 'sync' | 'async', TRunResult = unknown, TSelection extends ColumnsSelection = Record, TSelectMode extends SelectMode = SelectMode, TNullabilityMap extends Record = Record, > = SQLiteSelectBase; export type AnySQLiteSelect = SQLiteSelectBase; export type SQLiteSetOperator< TTableName extends string | undefined = string | undefined, TResultType extends 'sync' | 'async' = 'sync' | 'async', TRunResult = unknown, TSelection extends ColumnsSelection = Record, TSelectMode extends SelectMode = SelectMode, TNullabilityMap extends Record = Record, > = SQLiteSelectBase< TTableName, TResultType, TRunResult, TSelection, TSelectMode, TNullabilityMap, true, SQLiteSetOperatorExcludedMethods >; export type SetOperatorRightSelect< TValue extends SQLiteSetOperatorWithResult, TResult extends any[], > = TValue extends SQLiteSetOperatorInterface ? ValidateShape< TValueResult[number], TResult[number], TypedQueryBuilder > : TValue; export type SetOperatorRestSelect< TValue extends readonly SQLiteSetOperatorWithResult[], TResult extends any[], > = TValue extends [infer First, ...infer Rest] ? First extends SQLiteSetOperatorInterface ? Rest extends AnySQLiteSetOperatorInterface[] ? [ ValidateShape>, ...SetOperatorRestSelect, ] : ValidateShape[]> : never : TValue; export type SQLiteCreateSetOperatorFn = < TTableName extends string | undefined, TResultType extends 'sync' | 'async', TRunResult, TSelection extends ColumnsSelection, TValue extends SQLiteSetOperatorWithResult, TRest extends SQLiteSetOperatorWithResult[], TSelectMode extends SelectMode = 'single', TNullabilityMap extends Record = TTableName extends string ? Record : {}, TDynamic extends boolean = false, TExcludedMethods extends string = never, TResult extends any[] = SelectResult[], TSelectedFields extends ColumnsSelection = BuildSubquerySelection, >( leftSelect: SQLiteSetOperatorInterface< TTableName, TResultType, TRunResult, TSelection, TSelectMode, TNullabilityMap, TDynamic, TExcludedMethods, TResult, TSelectedFields >, rightSelect: SetOperatorRightSelect, ...restSelects: SetOperatorRestSelect ) => SQLiteSelectWithout< SQLiteSelectBase< TTableName, TResultType, TRunResult, TSelection, TSelectMode, TNullabilityMap, TDynamic, TExcludedMethods, TResult, TSelectedFields >, false, SQLiteSetOperatorExcludedMethods, true >; export type GetSQLiteSetOperators = { union: SQLiteCreateSetOperatorFn; intersect: SQLiteCreateSetOperatorFn; except: SQLiteCreateSetOperatorFn; unionAll: SQLiteCreateSetOperatorFn; }; ================================================ FILE: drizzle-orm/src/sqlite-core/query-builders/update.ts ================================================ import type { GetColumnData } from '~/column.ts'; import { entityKind, is } from '~/entity.ts'; import type { JoinType, SelectResultFields } from '~/query-builders/select.types.ts'; import { QueryPromise } from '~/query-promise.ts'; import type { RunnableQuery } from '~/runnable-query.ts'; import { SelectionProxyHandler } from '~/selection-proxy.ts'; import type { Placeholder, Query, SQL, SQLWrapper } from '~/sql/sql.ts'; import type { SQLiteDialect } from '~/sqlite-core/dialect.ts'; import type { SQLitePreparedQuery, SQLiteSession } from '~/sqlite-core/session.ts'; import { SQLiteTable } from '~/sqlite-core/table.ts'; import { Subquery } from '~/subquery.ts'; import { Table } from '~/table.ts'; import { type DrizzleTypeError, getTableLikeName, mapUpdateSet, orderSelectedFields, type UpdateSet, type ValueOrArray, } from '~/utils.ts'; import { ViewBaseConfig } from '~/view-common.ts'; import type { SQLiteColumn } from '../columns/common.ts'; import { extractUsedTable } from '../utils.ts'; import { SQLiteViewBase } from '../view-base.ts'; import type { SelectedFields, SelectedFieldsOrdered, SQLiteSelectJoinConfig } from './select.types.ts'; export interface SQLiteUpdateConfig { where?: SQL | undefined; limit?: number | Placeholder; orderBy?: (SQLiteColumn | SQL | SQL.Aliased)[]; set: UpdateSet; table: SQLiteTable; from?: SQLiteTable | Subquery | SQLiteViewBase | SQL; joins: SQLiteSelectJoinConfig[]; returning?: SelectedFieldsOrdered; withList?: Subquery[]; } export type SQLiteUpdateSetSource = & { [Key in keyof TTable['$inferInsert']]?: | GetColumnData | SQL | SQLiteColumn | undefined; } & {}; export class SQLiteUpdateBuilder< TTable extends SQLiteTable, TResultType extends 'sync' | 'async', TRunResult, > { static readonly [entityKind]: string = 'SQLiteUpdateBuilder'; declare readonly _: { readonly table: TTable; }; constructor( protected table: TTable, protected session: SQLiteSession, protected dialect: SQLiteDialect, private withList?: Subquery[], ) {} set( values: SQLiteUpdateSetSource, ): SQLiteUpdateWithout< SQLiteUpdateBase, false, 'leftJoin' | 'rightJoin' | 'innerJoin' | 'fullJoin' > { return new SQLiteUpdateBase( this.table, mapUpdateSet(this.table, values), this.session, this.dialect, this.withList, ) as any; } } export type SQLiteUpdateWithout< T extends AnySQLiteUpdate, TDynamic extends boolean, K extends keyof T & string, > = TDynamic extends true ? T : Omit< SQLiteUpdateBase< T['_']['table'], T['_']['resultType'], T['_']['runResult'], T['_']['from'], T['_']['returning'], TDynamic, T['_']['excludedMethods'] | K >, T['_']['excludedMethods'] | K >; export type SQLiteUpdateWithJoins< T extends AnySQLiteUpdate, TDynamic extends boolean, TFrom extends SQLiteTable | Subquery | SQLiteViewBase | SQL, > = TDynamic extends true ? T : Omit< SQLiteUpdateBase< T['_']['table'], T['_']['resultType'], T['_']['runResult'], TFrom, T['_']['returning'], TDynamic, Exclude >, Exclude >; export type SQLiteUpdateReturningAll = SQLiteUpdateWithout< SQLiteUpdateBase< T['_']['table'], T['_']['resultType'], T['_']['runResult'], T['_']['from'], T['_']['table']['$inferSelect'], TDynamic, T['_']['excludedMethods'] >, TDynamic, 'returning' >; export type SQLiteUpdateReturning< T extends AnySQLiteUpdate, TDynamic extends boolean, TSelectedFields extends SelectedFields, > = SQLiteUpdateWithout< SQLiteUpdateBase< T['_']['table'], T['_']['resultType'], T['_']['runResult'], T['_']['from'], SelectResultFields, TDynamic, T['_']['excludedMethods'] >, TDynamic, 'returning' >; export type SQLiteUpdateExecute = T['_']['returning'] extends undefined ? T['_']['runResult'] : T['_']['returning'][]; export type SQLiteUpdatePrepare = SQLitePreparedQuery< { type: T['_']['resultType']; run: T['_']['runResult']; all: T['_']['returning'] extends undefined ? DrizzleTypeError<'.all() cannot be used without .returning()'> : T['_']['returning'][]; get: T['_']['returning'] extends undefined ? DrizzleTypeError<'.get() cannot be used without .returning()'> : T['_']['returning']; values: T['_']['returning'] extends undefined ? DrizzleTypeError<'.values() cannot be used without .returning()'> : any[][]; execute: SQLiteUpdateExecute; } >; export type SQLiteUpdateJoinFn< T extends AnySQLiteUpdate, > = < TJoinedTable extends SQLiteTable | Subquery | SQLiteViewBase | SQL, >( table: TJoinedTable, on: | ( ( updateTable: T['_']['table']['_']['columns'], from: T['_']['from'] extends SQLiteTable ? T['_']['from']['_']['columns'] : T['_']['from'] extends Subquery | SQLiteViewBase ? T['_']['from']['_']['selectedFields'] : never, ) => SQL | undefined ) | SQL | undefined, ) => T; export type SQLiteUpdateDynamic = SQLiteUpdate< T['_']['table'], T['_']['resultType'], T['_']['runResult'], T['_']['returning'] >; export type SQLiteUpdate< TTable extends SQLiteTable = SQLiteTable, TResultType extends 'sync' | 'async' = 'sync' | 'async', TRunResult = any, TFrom extends SQLiteTable | Subquery | SQLiteViewBase | SQL | undefined = undefined, TReturning extends Record | undefined = Record | undefined, > = SQLiteUpdateBase; export type AnySQLiteUpdate = SQLiteUpdateBase; export interface SQLiteUpdateBase< TTable extends SQLiteTable = SQLiteTable, TResultType extends 'sync' | 'async' = 'sync' | 'async', TRunResult = unknown, TFrom extends SQLiteTable | Subquery | SQLiteViewBase | SQL | undefined = undefined, TReturning = undefined, TDynamic extends boolean = false, TExcludedMethods extends string = never, > extends SQLWrapper, QueryPromise { readonly _: { readonly dialect: 'sqlite'; readonly table: TTable; readonly resultType: TResultType; readonly runResult: TRunResult; readonly from: TFrom; readonly returning: TReturning; readonly dynamic: TDynamic; readonly excludedMethods: TExcludedMethods; readonly result: TReturning extends undefined ? TRunResult : TReturning[]; }; } export class SQLiteUpdateBase< TTable extends SQLiteTable = SQLiteTable, // eslint-disable-next-line @typescript-eslint/no-unused-vars TResultType extends 'sync' | 'async' = 'sync' | 'async', TRunResult = unknown, TFrom extends SQLiteTable | Subquery | SQLiteViewBase | SQL | undefined = undefined, TReturning = undefined, // eslint-disable-next-line @typescript-eslint/no-unused-vars TDynamic extends boolean = false, // eslint-disable-next-line @typescript-eslint/no-unused-vars TExcludedMethods extends string = never, > extends QueryPromise implements RunnableQuery, SQLWrapper { static override readonly [entityKind]: string = 'SQLiteUpdate'; /** @internal */ config: SQLiteUpdateConfig; constructor( table: TTable, set: UpdateSet, private session: SQLiteSession, private dialect: SQLiteDialect, withList?: Subquery[], ) { super(); this.config = { set, table, withList, joins: [] }; } from( source: TFrom, ): SQLiteUpdateWithJoins { this.config.from = source; return this as any; } private createJoin( joinType: TJoinType, ): SQLiteUpdateJoinFn { return (( table: SQLiteTable | Subquery | SQLiteViewBase | SQL, on: ((updateTable: TTable, from: TFrom) => SQL | undefined) | SQL | undefined, ) => { const tableName = getTableLikeName(table); if (typeof tableName === 'string' && this.config.joins.some((join) => join.alias === tableName)) { throw new Error(`Alias "${tableName}" is already used in this query`); } if (typeof on === 'function') { const from = this.config.from ? is(table, SQLiteTable) ? table[Table.Symbol.Columns] : is(table, Subquery) ? table._.selectedFields : is(table, SQLiteViewBase) ? table[ViewBaseConfig].selectedFields : undefined : undefined; on = on( new Proxy( this.config.table[Table.Symbol.Columns], new SelectionProxyHandler({ sqlAliasedBehavior: 'sql', sqlBehavior: 'sql' }), ) as any, from && new Proxy( from, new SelectionProxyHandler({ sqlAliasedBehavior: 'sql', sqlBehavior: 'sql' }), ) as any, ); } this.config.joins.push({ on, table, joinType, alias: tableName }); return this as any; }) as any; } leftJoin = this.createJoin('left'); rightJoin = this.createJoin('right'); innerJoin = this.createJoin('inner'); fullJoin = this.createJoin('full'); /** * Adds a 'where' clause to the query. * * Calling this method will update only those rows that fulfill a specified condition. * * See docs: {@link https://orm.drizzle.team/docs/update} * * @param where the 'where' clause. * * @example * You can use conditional operators and `sql function` to filter the rows to be updated. * * ```ts * // Update all cars with green color * db.update(cars).set({ color: 'red' }) * .where(eq(cars.color, 'green')); * // or * db.update(cars).set({ color: 'red' }) * .where(sql`${cars.color} = 'green'`) * ``` * * You can logically combine conditional operators with `and()` and `or()` operators: * * ```ts * // Update all BMW cars with a green color * db.update(cars).set({ color: 'red' }) * .where(and(eq(cars.color, 'green'), eq(cars.brand, 'BMW'))); * * // Update all cars with the green or blue color * db.update(cars).set({ color: 'red' }) * .where(or(eq(cars.color, 'green'), eq(cars.color, 'blue'))); * ``` */ where(where: SQL | undefined): SQLiteUpdateWithout { this.config.where = where; return this as any; } orderBy( builder: (updateTable: TTable) => ValueOrArray, ): SQLiteUpdateWithout; orderBy(...columns: (SQLiteColumn | SQL | SQL.Aliased)[]): SQLiteUpdateWithout; orderBy( ...columns: | [(updateTable: TTable) => ValueOrArray] | (SQLiteColumn | SQL | SQL.Aliased)[] ): SQLiteUpdateWithout { if (typeof columns[0] === 'function') { const orderBy = columns[0]( new Proxy( this.config.table[Table.Symbol.Columns], new SelectionProxyHandler({ sqlAliasedBehavior: 'alias', sqlBehavior: 'sql' }), ) as any, ); const orderByArray = Array.isArray(orderBy) ? orderBy : [orderBy]; this.config.orderBy = orderByArray; } else { const orderByArray = columns as (SQLiteColumn | SQL | SQL.Aliased)[]; this.config.orderBy = orderByArray; } return this as any; } limit(limit: number | Placeholder): SQLiteUpdateWithout { this.config.limit = limit; return this as any; } /** * Adds a `returning` clause to the query. * * Calling this method will return the specified fields of the updated rows. If no fields are specified, all fields will be returned. * * See docs: {@link https://orm.drizzle.team/docs/update#update-with-returning} * * @example * ```ts * // Update all cars with the green color and return all fields * const updatedCars: Car[] = await db.update(cars) * .set({ color: 'red' }) * .where(eq(cars.color, 'green')) * .returning(); * * // Update all cars with the green color and return only their id and brand fields * const updatedCarsIdsAndBrands: { id: number, brand: string }[] = await db.update(cars) * .set({ color: 'red' }) * .where(eq(cars.color, 'green')) * .returning({ id: cars.id, brand: cars.brand }); * ``` */ returning(): SQLiteUpdateReturningAll; returning( fields: TSelectedFields, ): SQLiteUpdateReturning; returning( fields: SelectedFields = this.config.table[SQLiteTable.Symbol.Columns], ): SQLiteUpdateWithout { this.config.returning = orderSelectedFields(fields); return this as any; } /** @internal */ getSQL(): SQL { return this.dialect.buildUpdateQuery(this.config); } toSQL(): Query { const { typings: _typings, ...rest } = this.dialect.sqlToQuery(this.getSQL()); return rest; } /** @internal */ _prepare(isOneTimeQuery = true): SQLiteUpdatePrepare { return this.session[isOneTimeQuery ? 'prepareOneTimeQuery' : 'prepareQuery']( this.dialect.sqlToQuery(this.getSQL()), this.config.returning, this.config.returning ? 'all' : 'run', true, undefined, { type: 'insert', tables: extractUsedTable(this.config.table), }, ) as SQLiteUpdatePrepare; } prepare(): SQLiteUpdatePrepare { return this._prepare(false); } run: ReturnType['run'] = (placeholderValues) => { return this._prepare().run(placeholderValues); }; all: ReturnType['all'] = (placeholderValues) => { return this._prepare().all(placeholderValues); }; get: ReturnType['get'] = (placeholderValues) => { return this._prepare().get(placeholderValues); }; values: ReturnType['values'] = (placeholderValues) => { return this._prepare().values(placeholderValues); }; override async execute(): Promise> { return (this.config.returning ? this.all() : this.run()) as SQLiteUpdateExecute; } $dynamic(): SQLiteUpdateDynamic { return this as any; } } ================================================ FILE: drizzle-orm/src/sqlite-core/session.ts ================================================ import { type Cache, hashQuery, NoopCache } from '~/cache/core/cache.ts'; import type { WithCacheConfig } from '~/cache/core/types.ts'; import { entityKind, is } from '~/entity.ts'; import { DrizzleError, DrizzleQueryError, TransactionRollbackError } from '~/errors.ts'; import { QueryPromise } from '~/query-promise.ts'; import type { TablesRelationalConfig } from '~/relations.ts'; import type { PreparedQuery } from '~/session.ts'; import type { Query, SQL } from '~/sql/sql.ts'; import type { SQLiteAsyncDialect, SQLiteSyncDialect } from '~/sqlite-core/dialect.ts'; import { BaseSQLiteDatabase } from './db.ts'; import type { SQLiteRaw } from './query-builders/raw.ts'; import type { SelectedFieldsOrdered } from './query-builders/select.types.ts'; export interface PreparedQueryConfig { type: 'sync' | 'async'; run: unknown; all: unknown; get: unknown; values: unknown; execute: unknown; } export class ExecuteResultSync extends QueryPromise { static override readonly [entityKind]: string = 'ExecuteResultSync'; constructor(private resultCb: () => T) { super(); } override async execute(): Promise { return this.resultCb(); } sync(): T { return this.resultCb(); } } export type ExecuteResult = TType extends 'async' ? Promise : ExecuteResultSync; export abstract class SQLitePreparedQuery implements PreparedQuery { static readonly [entityKind]: string = 'PreparedQuery'; /** @internal */ joinsNotNullableMap?: Record; constructor( private mode: 'sync' | 'async', private executeMethod: SQLiteExecuteMethod, protected query: Query, private cache?: Cache, // per query related metadata private queryMetadata?: { type: 'select' | 'update' | 'delete' | 'insert'; tables: string[]; } | undefined, // config that was passed through $withCache private cacheConfig?: WithCacheConfig, ) { // it means that no $withCache options were passed and it should be just enabled if (cache && cache.strategy() === 'all' && cacheConfig === undefined) { this.cacheConfig = { enable: true, autoInvalidate: true }; } if (!this.cacheConfig?.enable) { this.cacheConfig = undefined; } } /** @internal */ protected async queryWithCache( queryString: string, params: any[], query: () => Promise, ): Promise { if (this.cache === undefined || is(this.cache, NoopCache) || this.queryMetadata === undefined) { try { return await query(); } catch (e) { throw new DrizzleQueryError(queryString, params, e as Error); } } // don't do any mutations, if globally is false if (this.cacheConfig && !this.cacheConfig.enable) { try { return await query(); } catch (e) { throw new DrizzleQueryError(queryString, params, e as Error); } } // For mutate queries, we should query the database, wait for a response, and then perform invalidation if ( ( this.queryMetadata.type === 'insert' || this.queryMetadata.type === 'update' || this.queryMetadata.type === 'delete' ) && this.queryMetadata.tables.length > 0 ) { try { const [res] = await Promise.all([ query(), this.cache.onMutate({ tables: this.queryMetadata.tables }), ]); return res; } catch (e) { throw new DrizzleQueryError(queryString, params, e as Error); } } // don't do any reads if globally disabled if (!this.cacheConfig) { try { return await query(); } catch (e) { throw new DrizzleQueryError(queryString, params, e as Error); } } if (this.queryMetadata.type === 'select') { const fromCache = await this.cache.get( this.cacheConfig.tag ?? await hashQuery(queryString, params), this.queryMetadata.tables, this.cacheConfig.tag !== undefined, this.cacheConfig.autoInvalidate, ); if (fromCache === undefined) { let result; try { result = await query(); } catch (e) { throw new DrizzleQueryError(queryString, params, e as Error); } // put actual key await this.cache.put( this.cacheConfig.tag ?? await hashQuery(queryString, params), result, // make sure we send tables that were used in a query only if user wants to invalidate it on each write this.cacheConfig.autoInvalidate ? this.queryMetadata.tables : [], this.cacheConfig.tag !== undefined, this.cacheConfig.config, ); // put flag if we should invalidate or not return result; } return fromCache as unknown as T; } try { return await query(); } catch (e) { throw new DrizzleQueryError(queryString, params, e as Error); } } getQuery(): Query { return this.query; } abstract run(placeholderValues?: Record): Result; mapRunResult(result: unknown, _isFromBatch?: boolean): unknown { return result; } abstract all(placeholderValues?: Record): Result; mapAllResult(_result: unknown, _isFromBatch?: boolean): unknown { throw new Error('Not implemented'); } abstract get(placeholderValues?: Record): Result; mapGetResult(_result: unknown, _isFromBatch?: boolean): unknown { throw new Error('Not implemented'); } abstract values(placeholderValues?: Record): Result; execute(placeholderValues?: Record): ExecuteResult { if (this.mode === 'async') { return this[this.executeMethod](placeholderValues) as ExecuteResult; } return new ExecuteResultSync(() => this[this.executeMethod](placeholderValues)); } mapResult(response: unknown, isFromBatch?: boolean) { switch (this.executeMethod) { case 'run': { return this.mapRunResult(response, isFromBatch); } case 'all': { return this.mapAllResult(response, isFromBatch); } case 'get': { return this.mapGetResult(response, isFromBatch); } } } /** @internal */ abstract isResponseInArrayMode(): boolean; } export interface SQLiteTransactionConfig { behavior?: 'deferred' | 'immediate' | 'exclusive'; } export type SQLiteExecuteMethod = 'run' | 'all' | 'get'; export abstract class SQLiteSession< TResultKind extends 'sync' | 'async', TRunResult, TFullSchema extends Record, TSchema extends TablesRelationalConfig, > { static readonly [entityKind]: string = 'SQLiteSession'; constructor( /** @internal */ readonly dialect: { sync: SQLiteSyncDialect; async: SQLiteAsyncDialect }[TResultKind], ) {} abstract prepareQuery( query: Query, fields: SelectedFieldsOrdered | undefined, executeMethod: SQLiteExecuteMethod, isResponseInArrayMode: boolean, customResultMapper?: (rows: unknown[][], mapColumnValue?: (value: unknown) => unknown) => unknown, queryMetadata?: { type: 'select' | 'update' | 'delete' | 'insert'; tables: string[]; }, cacheConfig?: WithCacheConfig, ): SQLitePreparedQuery; prepareOneTimeQuery( query: Query, fields: SelectedFieldsOrdered | undefined, executeMethod: SQLiteExecuteMethod, isResponseInArrayMode: boolean, customResultMapper?: (rows: unknown[][], mapColumnValue?: (value: unknown) => unknown) => unknown, queryMetadata?: { type: 'select' | 'update' | 'delete' | 'insert'; tables: string[]; }, cacheConfig?: WithCacheConfig, ): SQLitePreparedQuery { return this.prepareQuery( query, fields, executeMethod, isResponseInArrayMode, customResultMapper, queryMetadata, cacheConfig, ); } abstract transaction( transaction: (tx: SQLiteTransaction) => Result, config?: SQLiteTransactionConfig, ): Result; run(query: SQL): Result { const staticQuery = this.dialect.sqlToQuery(query); try { return this.prepareOneTimeQuery(staticQuery, undefined, 'run', false).run() as Result; } catch (err) { throw new DrizzleError({ cause: err, message: `Failed to run the query '${staticQuery.sql}'` }); } } /** @internal */ extractRawRunValueFromBatchResult(result: unknown) { return result; } all(query: SQL): Result { return this.prepareOneTimeQuery(this.dialect.sqlToQuery(query), undefined, 'run', false).all() as Result< TResultKind, T[] >; } /** @internal */ extractRawAllValueFromBatchResult(_result: unknown): unknown { throw new Error('Not implemented'); } get(query: SQL): Result { return this.prepareOneTimeQuery(this.dialect.sqlToQuery(query), undefined, 'run', false).get() as Result< TResultKind, T >; } /** @internal */ extractRawGetValueFromBatchResult(_result: unknown): unknown { throw new Error('Not implemented'); } values( query: SQL, ): Result { return this.prepareOneTimeQuery(this.dialect.sqlToQuery(query), undefined, 'run', false).values() as Result< TResultKind, T[] >; } async count(sql: SQL) { const result = await this.values(sql) as [[number]]; return result[0][0]; } /** @internal */ extractRawValuesValueFromBatchResult(_result: unknown): unknown { throw new Error('Not implemented'); } } export type Result = { sync: TResult; async: Promise }[TKind]; export type DBResult = { sync: TResult; async: SQLiteRaw }[TKind]; export abstract class SQLiteTransaction< TResultType extends 'sync' | 'async', TRunResult, TFullSchema extends Record, TSchema extends TablesRelationalConfig, > extends BaseSQLiteDatabase { static override readonly [entityKind]: string = 'SQLiteTransaction'; constructor( resultType: TResultType, dialect: { sync: SQLiteSyncDialect; async: SQLiteAsyncDialect }[TResultType], session: SQLiteSession, protected schema: { fullSchema: Record; schema: TSchema; tableNamesMap: Record; } | undefined, protected readonly nestedIndex = 0, ) { super(resultType, dialect, session, schema); } rollback(): never { throw new TransactionRollbackError(); } } ================================================ FILE: drizzle-orm/src/sqlite-core/subquery.ts ================================================ import type { TypedQueryBuilder } from '~/query-builders/query-builder.ts'; import type { AddAliasToSelection } from '~/query-builders/select.types.ts'; import type { ColumnsSelection, SQL } from '~/sql/sql.ts'; import type { Subquery, WithSubquery, WithSubqueryWithoutSelection } from '~/subquery.ts'; import type { QueryBuilder } from './query-builders/query-builder.ts'; export type SubqueryWithSelection = & Subquery> & AddAliasToSelection; export type WithSubqueryWithSelection = & WithSubquery> & AddAliasToSelection; export interface WithBuilder { (alias: TAlias): { as: { ( qb: TypedQueryBuilder | ((qb: QueryBuilder) => TypedQueryBuilder), ): WithSubqueryWithSelection; ( qb: TypedQueryBuilder | ((qb: QueryBuilder) => TypedQueryBuilder), ): WithSubqueryWithoutSelection; }; }; (alias: TAlias, selection: TSelection): { as: (qb: SQL | ((qb: QueryBuilder) => SQL)) => WithSubqueryWithSelection; }; } ================================================ FILE: drizzle-orm/src/sqlite-core/table.ts ================================================ import type { BuildColumns, BuildExtraConfigColumns } from '~/column-builder.ts'; import { entityKind } from '~/entity.ts'; import { Table, type TableConfig as TableConfigBase, type UpdateTableConfig } from '~/table.ts'; import type { CheckBuilder } from './checks.ts'; import { getSQLiteColumnBuilders, type SQLiteColumnBuilders } from './columns/all.ts'; import type { SQLiteColumn, SQLiteColumnBuilder, SQLiteColumnBuilderBase } from './columns/common.ts'; import type { ForeignKey, ForeignKeyBuilder } from './foreign-keys.ts'; import type { IndexBuilder } from './indexes.ts'; import type { PrimaryKeyBuilder } from './primary-keys.ts'; import type { UniqueConstraintBuilder } from './unique-constraint.ts'; export type SQLiteTableExtraConfigValue = | IndexBuilder | CheckBuilder | ForeignKeyBuilder | PrimaryKeyBuilder | UniqueConstraintBuilder; export type SQLiteTableExtraConfig = Record< string, SQLiteTableExtraConfigValue >; export type TableConfig = TableConfigBase>; /** @internal */ export const InlineForeignKeys = Symbol.for('drizzle:SQLiteInlineForeignKeys'); export class SQLiteTable extends Table { static override readonly [entityKind]: string = 'SQLiteTable'; /** @internal */ static override readonly Symbol = Object.assign({}, Table.Symbol, { InlineForeignKeys: InlineForeignKeys as typeof InlineForeignKeys, }); /** @internal */ override [Table.Symbol.Columns]!: NonNullable; /** @internal */ [InlineForeignKeys]: ForeignKey[] = []; /** @internal */ override [Table.Symbol.ExtraConfigBuilder]: | ((self: Record) => SQLiteTableExtraConfig) | undefined = undefined; } export type AnySQLiteTable = {}> = SQLiteTable< UpdateTableConfig >; export type SQLiteTableWithColumns = & SQLiteTable & { [Key in keyof T['columns']]: T['columns'][Key]; }; export interface SQLiteTableFn { < TTableName extends string, TColumnsMap extends Record, >( name: TTableName, columns: TColumnsMap, extraConfig?: ( self: BuildColumns, ) => SQLiteTableExtraConfigValue[], ): SQLiteTableWithColumns<{ name: TTableName; schema: TSchema; columns: BuildColumns; dialect: 'sqlite'; }>; < TTableName extends string, TColumnsMap extends Record, >( name: TTableName, columns: (columnTypes: SQLiteColumnBuilders) => TColumnsMap, extraConfig?: (self: BuildColumns) => SQLiteTableExtraConfigValue[], ): SQLiteTableWithColumns<{ name: TTableName; schema: TSchema; columns: BuildColumns; dialect: 'sqlite'; }>; /** * @deprecated The third parameter of sqliteTable is changing and will only accept an array instead of an object * * @example * Deprecated version: * ```ts * export const users = sqliteTable("users", { * id: int(), * }, (t) => ({ * idx: index('custom_name').on(t.id) * })); * ``` * * New API: * ```ts * export const users = sqliteTable("users", { * id: int(), * }, (t) => [ * index('custom_name').on(t.id) * ]); * ``` */ < TTableName extends string, TColumnsMap extends Record, >( name: TTableName, columns: TColumnsMap, extraConfig?: (self: BuildColumns) => SQLiteTableExtraConfig, ): SQLiteTableWithColumns<{ name: TTableName; schema: TSchema; columns: BuildColumns; dialect: 'sqlite'; }>; /** * @deprecated The third parameter of sqliteTable is changing and will only accept an array instead of an object * * @example * Deprecated version: * ```ts * export const users = sqliteTable("users", { * id: int(), * }, (t) => ({ * idx: index('custom_name').on(t.id) * })); * ``` * * New API: * ```ts * export const users = sqliteTable("users", { * id: int(), * }, (t) => [ * index('custom_name').on(t.id) * ]); * ``` */ < TTableName extends string, TColumnsMap extends Record, >( name: TTableName, columns: (columnTypes: SQLiteColumnBuilders) => TColumnsMap, extraConfig?: (self: BuildColumns) => SQLiteTableExtraConfig, ): SQLiteTableWithColumns<{ name: TTableName; schema: TSchema; columns: BuildColumns; dialect: 'sqlite'; }>; } function sqliteTableBase< TTableName extends string, TColumnsMap extends Record, TSchema extends string | undefined, >( name: TTableName, columns: TColumnsMap | ((columnTypes: SQLiteColumnBuilders) => TColumnsMap), extraConfig: | (( self: BuildColumns, ) => SQLiteTableExtraConfig | SQLiteTableExtraConfigValue[]) | undefined, schema?: TSchema, baseName = name, ): SQLiteTableWithColumns<{ name: TTableName; schema: TSchema; columns: BuildColumns; dialect: 'sqlite'; }> { const rawTable = new SQLiteTable<{ name: TTableName; schema: TSchema; columns: BuildColumns; dialect: 'sqlite'; }>(name, schema, baseName); const parsedColumns: TColumnsMap = typeof columns === 'function' ? columns(getSQLiteColumnBuilders()) : columns; const builtColumns = Object.fromEntries( Object.entries(parsedColumns).map(([name, colBuilderBase]) => { const colBuilder = colBuilderBase as SQLiteColumnBuilder; colBuilder.setName(name); const column = colBuilder.build(rawTable); rawTable[InlineForeignKeys].push(...colBuilder.buildForeignKeys(column, rawTable)); return [name, column]; }), ) as unknown as BuildColumns; const table = Object.assign(rawTable, builtColumns); table[Table.Symbol.Columns] = builtColumns; table[Table.Symbol.ExtraConfigColumns] = builtColumns as unknown as BuildExtraConfigColumns< TTableName, TColumnsMap, 'sqlite' >; if (extraConfig) { table[SQLiteTable.Symbol.ExtraConfigBuilder] = extraConfig as ( self: Record, ) => SQLiteTableExtraConfig; } return table; } export const sqliteTable: SQLiteTableFn = (name, columns, extraConfig) => { return sqliteTableBase(name, columns, extraConfig); }; export function sqliteTableCreator(customizeTableName: (name: string) => string): SQLiteTableFn { return (name, columns, extraConfig) => { return sqliteTableBase(customizeTableName(name) as typeof name, columns, extraConfig, undefined, name); }; } ================================================ FILE: drizzle-orm/src/sqlite-core/unique-constraint.ts ================================================ import { entityKind } from '~/entity.ts'; import { TableName } from '~/table.utils.ts'; import type { SQLiteColumn } from './columns/common.ts'; import type { SQLiteTable } from './table.ts'; export function uniqueKeyName(table: SQLiteTable, columns: string[]) { return `${table[TableName]}_${columns.join('_')}_unique`; } export function unique(name?: string): UniqueOnConstraintBuilder { return new UniqueOnConstraintBuilder(name); } export class UniqueConstraintBuilder { static readonly [entityKind]: string = 'SQLiteUniqueConstraintBuilder'; /** @internal */ columns: SQLiteColumn[]; constructor( columns: SQLiteColumn[], private name?: string, ) { this.columns = columns; } /** @internal */ build(table: SQLiteTable): UniqueConstraint { return new UniqueConstraint(table, this.columns, this.name); } } export class UniqueOnConstraintBuilder { static readonly [entityKind]: string = 'SQLiteUniqueOnConstraintBuilder'; /** @internal */ name?: string; constructor( name?: string, ) { this.name = name; } on(...columns: [SQLiteColumn, ...SQLiteColumn[]]) { return new UniqueConstraintBuilder(columns, this.name); } } export class UniqueConstraint { static readonly [entityKind]: string = 'SQLiteUniqueConstraint'; readonly columns: SQLiteColumn[]; readonly name?: string; constructor(readonly table: SQLiteTable, columns: SQLiteColumn[], name?: string) { this.columns = columns; this.name = name ?? uniqueKeyName(this.table, this.columns.map((column) => column.name)); } getName() { return this.name; } } ================================================ FILE: drizzle-orm/src/sqlite-core/utils.ts ================================================ import { is } from '~/entity.ts'; import { SQL } from '~/sql/sql.ts'; import { Subquery } from '~/subquery.ts'; import { Table } from '~/table.ts'; import { ViewBaseConfig } from '~/view-common.ts'; import type { Check } from './checks.ts'; import { CheckBuilder } from './checks.ts'; import type { ForeignKey } from './foreign-keys.ts'; import { ForeignKeyBuilder } from './foreign-keys.ts'; import type { Index } from './indexes.ts'; import { IndexBuilder } from './indexes.ts'; import type { PrimaryKey } from './primary-keys.ts'; import { PrimaryKeyBuilder } from './primary-keys.ts'; import { SQLiteTable } from './table.ts'; import { type UniqueConstraint, UniqueConstraintBuilder } from './unique-constraint.ts'; import type { SQLiteViewBase } from './view-base.ts'; import type { SQLiteView } from './view.ts'; export function getTableConfig(table: TTable) { const columns = Object.values(table[SQLiteTable.Symbol.Columns]); const indexes: Index[] = []; const checks: Check[] = []; const primaryKeys: PrimaryKey[] = []; const uniqueConstraints: UniqueConstraint[] = []; const foreignKeys: ForeignKey[] = Object.values(table[SQLiteTable.Symbol.InlineForeignKeys]); const name = table[Table.Symbol.Name]; const extraConfigBuilder = table[SQLiteTable.Symbol.ExtraConfigBuilder]; if (extraConfigBuilder !== undefined) { const extraConfig = extraConfigBuilder(table[SQLiteTable.Symbol.Columns]); const extraValues = Array.isArray(extraConfig) ? extraConfig.flat(1) as any[] : Object.values(extraConfig); for (const builder of Object.values(extraValues)) { if (is(builder, IndexBuilder)) { indexes.push(builder.build(table)); } else if (is(builder, CheckBuilder)) { checks.push(builder.build(table)); } else if (is(builder, UniqueConstraintBuilder)) { uniqueConstraints.push(builder.build(table)); } else if (is(builder, PrimaryKeyBuilder)) { primaryKeys.push(builder.build(table)); } else if (is(builder, ForeignKeyBuilder)) { foreignKeys.push(builder.build(table)); } } } return { columns, indexes, foreignKeys, checks, primaryKeys, uniqueConstraints, name, }; } export function extractUsedTable(table: SQLiteTable | Subquery | SQLiteViewBase | SQL): string[] { if (is(table, SQLiteTable)) { return [`${table[Table.Symbol.BaseName]}`]; } if (is(table, Subquery)) { return table._.usedTables ?? []; } if (is(table, SQL)) { return table.usedTables ?? []; } return []; } export type OnConflict = 'rollback' | 'abort' | 'fail' | 'ignore' | 'replace'; export function getViewConfig< TName extends string = string, TExisting extends boolean = boolean, >(view: SQLiteView) { return { ...view[ViewBaseConfig], // ...view[SQLiteViewConfig], }; } ================================================ FILE: drizzle-orm/src/sqlite-core/view-base.ts ================================================ import { entityKind } from '~/entity.ts'; import type { ColumnsSelection } from '~/sql/sql.ts'; import { View } from '~/sql/sql.ts'; export abstract class SQLiteViewBase< TName extends string = string, TExisting extends boolean = boolean, TSelection extends ColumnsSelection = ColumnsSelection, > extends View { static override readonly [entityKind]: string = 'SQLiteViewBase'; declare _: View['_'] & { viewBrand: 'SQLiteView'; }; } ================================================ FILE: drizzle-orm/src/sqlite-core/view-common.ts ================================================ export const SQLiteViewConfig = Symbol.for('drizzle:SQLiteViewConfig'); ================================================ FILE: drizzle-orm/src/sqlite-core/view.ts ================================================ import type { BuildColumns } from '~/column-builder.ts'; import { entityKind } from '~/entity.ts'; import type { TypedQueryBuilder } from '~/query-builders/query-builder.ts'; import type { AddAliasToSelection } from '~/query-builders/select.types.ts'; import { SelectionProxyHandler } from '~/selection-proxy.ts'; import type { ColumnsSelection, SQL } from '~/sql/sql.ts'; import { getTableColumns } from '~/utils.ts'; import type { SQLiteColumn, SQLiteColumnBuilderBase } from './columns/common.ts'; import { QueryBuilder } from './query-builders/query-builder.ts'; import { sqliteTable } from './table.ts'; import { SQLiteViewBase } from './view-base.ts'; export interface ViewBuilderConfig { algorithm?: 'undefined' | 'merge' | 'temptable'; definer?: string; sqlSecurity?: 'definer' | 'invoker'; withCheckOption?: 'cascaded' | 'local'; } export class ViewBuilderCore< TConfig extends { name: string; columns?: unknown }, > { static readonly [entityKind]: string = 'SQLiteViewBuilderCore'; declare readonly _: { readonly name: TConfig['name']; readonly columns: TConfig['columns']; }; constructor( protected name: TConfig['name'], ) {} protected config: ViewBuilderConfig = {}; } export class ViewBuilder extends ViewBuilderCore<{ name: TName }> { static override readonly [entityKind]: string = 'SQLiteViewBuilder'; as( qb: TypedQueryBuilder | ((qb: QueryBuilder) => TypedQueryBuilder), ): SQLiteViewWithSelection> { if (typeof qb === 'function') { qb = qb(new QueryBuilder()); } const selectionProxy = new SelectionProxyHandler({ alias: this.name, sqlBehavior: 'error', sqlAliasedBehavior: 'alias', replaceOriginalName: true, }); // const aliasedSelectedFields = new Proxy(qb.getSelectedFields(), selectionProxy); const aliasedSelectedFields = qb.getSelectedFields(); return new Proxy( new SQLiteView({ // sqliteConfig: this.config, config: { name: this.name, schema: undefined, selectedFields: aliasedSelectedFields, query: qb.getSQL().inlineParams(), }, }), selectionProxy as any, ) as SQLiteViewWithSelection>; } } export class ManualViewBuilder< TName extends string = string, TColumns extends Record = Record, > extends ViewBuilderCore< { name: TName; columns: TColumns } > { static override readonly [entityKind]: string = 'SQLiteManualViewBuilder'; private columns: Record; constructor( name: TName, columns: TColumns, ) { super(name); this.columns = getTableColumns(sqliteTable(name, columns)) as BuildColumns; } existing(): SQLiteViewWithSelection> { return new Proxy( new SQLiteView({ config: { name: this.name, schema: undefined, selectedFields: this.columns, query: undefined, }, }), new SelectionProxyHandler({ alias: this.name, sqlBehavior: 'error', sqlAliasedBehavior: 'alias', replaceOriginalName: true, }), ) as SQLiteViewWithSelection>; } as(query: SQL): SQLiteViewWithSelection> { return new Proxy( new SQLiteView({ config: { name: this.name, schema: undefined, selectedFields: this.columns, query: query.inlineParams(), }, }), new SelectionProxyHandler({ alias: this.name, sqlBehavior: 'error', sqlAliasedBehavior: 'alias', replaceOriginalName: true, }), ) as SQLiteViewWithSelection>; } } export class SQLiteView< TName extends string = string, TExisting extends boolean = boolean, TSelection extends ColumnsSelection = ColumnsSelection, > extends SQLiteViewBase { static override readonly [entityKind]: string = 'SQLiteView'; constructor({ config }: { config: { name: TName; schema: string | undefined; selectedFields: ColumnsSelection; query: SQL | undefined; }; }) { super(config); } } export type SQLiteViewWithSelection< TName extends string, TExisting extends boolean, TSelection extends ColumnsSelection, > = SQLiteView & TSelection; export function sqliteView(name: TName): ViewBuilder; export function sqliteView>( name: TName, columns: TColumns, ): ManualViewBuilder; export function sqliteView( name: string, selection?: Record, ): ViewBuilder | ManualViewBuilder { if (selection) { return new ManualViewBuilder(name, selection); } return new ViewBuilder(name); } export const view = sqliteView; ================================================ FILE: drizzle-orm/src/sqlite-proxy/driver.ts ================================================ import type { BatchItem, BatchResponse } from '~/batch.ts'; import { entityKind } from '~/entity.ts'; import { DefaultLogger } from '~/logger.ts'; import { createTableRelationsHelpers, extractTablesRelationalConfig } from '~/relations.ts'; import type { ExtractTablesWithRelations, RelationalSchemaConfig, TablesRelationalConfig } from '~/relations.ts'; import { BaseSQLiteDatabase } from '~/sqlite-core/db.ts'; import { SQLiteAsyncDialect } from '~/sqlite-core/dialect.ts'; import type { DrizzleConfig } from '~/utils.ts'; import { SQLiteRemoteSession } from './session.ts'; export interface SqliteRemoteResult { rows?: T[]; } export class SqliteRemoteDatabase< TSchema extends Record = Record, > extends BaseSQLiteDatabase<'async', SqliteRemoteResult, TSchema> { static override readonly [entityKind]: string = 'SqliteRemoteDatabase'; /** @internal */ declare readonly session: SQLiteRemoteSession>; async batch, T extends Readonly<[U, ...U[]]>>( batch: T, ): Promise> { return this.session.batch(batch) as Promise>; } } export type AsyncRemoteCallback = ( sql: string, params: any[], method: 'run' | 'all' | 'values' | 'get', ) => Promise<{ rows: any[] }>; export type AsyncBatchRemoteCallback = (batch: { sql: string; params: any[]; method: 'run' | 'all' | 'values' | 'get'; }[]) => Promise<{ rows: any[] }[]>; export type RemoteCallback = AsyncRemoteCallback; export function drizzle = Record>( callback: RemoteCallback, config?: DrizzleConfig, ): SqliteRemoteDatabase; export function drizzle = Record>( callback: RemoteCallback, batchCallback?: AsyncBatchRemoteCallback, config?: DrizzleConfig, ): SqliteRemoteDatabase; export function drizzle = Record>( callback: RemoteCallback, batchCallback?: AsyncBatchRemoteCallback | DrizzleConfig, config?: DrizzleConfig, ): SqliteRemoteDatabase { const dialect = new SQLiteAsyncDialect({ casing: config?.casing }); let logger; let cache; let _batchCallback: AsyncBatchRemoteCallback | undefined; let _config: DrizzleConfig = {}; if (batchCallback) { if (typeof batchCallback === 'function') { _batchCallback = batchCallback as AsyncBatchRemoteCallback; _config = config ?? {}; } else { _batchCallback = undefined; _config = batchCallback as DrizzleConfig; } if (_config.logger === true) { logger = new DefaultLogger(); } else if (_config.logger !== false) { logger = _config.logger; cache = _config.cache; } } let schema: RelationalSchemaConfig | undefined; if (_config.schema) { const tablesConfig = extractTablesRelationalConfig( _config.schema, createTableRelationsHelpers, ); schema = { fullSchema: _config.schema, schema: tablesConfig.tables, tableNamesMap: tablesConfig.tableNamesMap, }; } const session = new SQLiteRemoteSession(callback, dialect, schema, _batchCallback, { logger, cache }); const db = new SqliteRemoteDatabase('async', dialect, session, schema) as SqliteRemoteDatabase; ( db).$cache = cache; if (( db).$cache) { ( db).$cache['invalidate'] = cache?.onMutate; } return db; } ================================================ FILE: drizzle-orm/src/sqlite-proxy/index.ts ================================================ export * from './driver.ts'; export * from './session.ts'; ================================================ FILE: drizzle-orm/src/sqlite-proxy/migrator.ts ================================================ import type { MigrationConfig } from '~/migrator.ts'; import { readMigrationFiles } from '~/migrator.ts'; import { sql } from '~/sql/sql.ts'; import type { SqliteRemoteDatabase } from './driver.ts'; export type ProxyMigrator = (migrationQueries: string[]) => Promise; export async function migrate>( db: SqliteRemoteDatabase, callback: ProxyMigrator, config: MigrationConfig, ) { const migrations = readMigrationFiles(config); const migrationsTable = typeof config === 'string' ? '__drizzle_migrations' : config.migrationsTable ?? '__drizzle_migrations'; const migrationTableCreate = sql` CREATE TABLE IF NOT EXISTS ${sql.identifier(migrationsTable)} ( id SERIAL PRIMARY KEY, hash text NOT NULL, created_at numeric ) `; await db.run(migrationTableCreate); const dbMigrations = await db.values<[number, string, string]>( sql`SELECT id, hash, created_at FROM ${sql.identifier(migrationsTable)} ORDER BY created_at DESC LIMIT 1`, ); const lastDbMigration = dbMigrations[0] ?? undefined; const queriesToRun: string[] = []; for (const migration of migrations) { if ( !lastDbMigration || Number(lastDbMigration[2])! < migration.folderMillis ) { queriesToRun.push( ...migration.sql, `INSERT INTO \`${migrationsTable}\` ("hash", "created_at") VALUES('${migration.hash}', '${migration.folderMillis}')`, ); } } await callback(queriesToRun); } ================================================ FILE: drizzle-orm/src/sqlite-proxy/session.ts ================================================ import type { BatchItem } from '~/batch.ts'; import { type Cache, NoopCache } from '~/cache/core/index.ts'; import type { WithCacheConfig } from '~/cache/core/types.ts'; import { entityKind } from '~/entity.ts'; import type { Logger } from '~/logger.ts'; import { NoopLogger } from '~/logger.ts'; import type { RelationalSchemaConfig, TablesRelationalConfig } from '~/relations.ts'; import type { PreparedQuery } from '~/session.ts'; import { fillPlaceholders, type Query, sql } from '~/sql/sql.ts'; import type { SQLiteAsyncDialect } from '~/sqlite-core/dialect.ts'; import { SQLiteTransaction } from '~/sqlite-core/index.ts'; import type { SelectedFieldsOrdered } from '~/sqlite-core/query-builders/select.types.ts'; import type { PreparedQueryConfig as PreparedQueryConfigBase, SQLiteExecuteMethod, SQLiteTransactionConfig, } from '~/sqlite-core/session.ts'; import { SQLitePreparedQuery, SQLiteSession } from '~/sqlite-core/session.ts'; import { mapResultRow } from '~/utils.ts'; import type { AsyncBatchRemoteCallback, AsyncRemoteCallback, RemoteCallback, SqliteRemoteResult } from './driver.ts'; export interface SQLiteRemoteSessionOptions { logger?: Logger; cache?: Cache; } export type PreparedQueryConfig = Omit; export class SQLiteRemoteSession< TFullSchema extends Record, TSchema extends TablesRelationalConfig, > extends SQLiteSession<'async', SqliteRemoteResult, TFullSchema, TSchema> { static override readonly [entityKind]: string = 'SQLiteRemoteSession'; private logger: Logger; private cache: Cache; constructor( private client: RemoteCallback, dialect: SQLiteAsyncDialect, private schema: RelationalSchemaConfig | undefined, private batchCLient?: AsyncBatchRemoteCallback, options: SQLiteRemoteSessionOptions = {}, ) { super(dialect); this.logger = options.logger ?? new NoopLogger(); this.cache = options.cache ?? new NoopCache(); } prepareQuery>( query: Query, fields: SelectedFieldsOrdered | undefined, executeMethod: SQLiteExecuteMethod, isResponseInArrayMode: boolean, customResultMapper?: (rows: unknown[][]) => unknown, queryMetadata?: { type: 'select' | 'update' | 'delete' | 'insert'; tables: string[]; }, cacheConfig?: WithCacheConfig, ): RemotePreparedQuery { return new RemotePreparedQuery( this.client, query, this.logger, this.cache, queryMetadata, cacheConfig, fields, executeMethod, isResponseInArrayMode, customResultMapper, ); } async batch[] | readonly BatchItem<'sqlite'>[]>(queries: T) { const preparedQueries: PreparedQuery[] = []; const builtQueries: { sql: string; params: any[]; method: 'run' | 'all' | 'values' | 'get' }[] = []; for (const query of queries) { const preparedQuery = query._prepare(); const builtQuery = (preparedQuery as RemotePreparedQuery).getQuery(); preparedQueries.push(preparedQuery); builtQueries.push({ sql: builtQuery.sql, params: builtQuery.params, method: builtQuery.method }); } const batchResults = await (this.batchCLient as AsyncBatchRemoteCallback)(builtQueries); return batchResults.map((result, i) => preparedQueries[i]!.mapResult(result, true)); } override async transaction( transaction: (tx: SQLiteProxyTransaction) => Promise, config?: SQLiteTransactionConfig, ): Promise { const tx = new SQLiteProxyTransaction('async', this.dialect, this, this.schema); await this.run(sql.raw(`begin${config?.behavior ? ' ' + config.behavior : ''}`)); try { const result = await transaction(tx); await this.run(sql`commit`); return result; } catch (err) { await this.run(sql`rollback`); throw err; } } override extractRawAllValueFromBatchResult(result: unknown): unknown { return (result as SqliteRemoteResult).rows; } override extractRawGetValueFromBatchResult(result: unknown): unknown { return (result as SqliteRemoteResult).rows![0]; } override extractRawValuesValueFromBatchResult(result: unknown): unknown { return (result as SqliteRemoteResult).rows; } } export class SQLiteProxyTransaction< TFullSchema extends Record, TSchema extends TablesRelationalConfig, > extends SQLiteTransaction<'async', SqliteRemoteResult, TFullSchema, TSchema> { static override readonly [entityKind]: string = 'SQLiteProxyTransaction'; override async transaction( transaction: (tx: SQLiteProxyTransaction) => Promise, ): Promise { const savepointName = `sp${this.nestedIndex}`; const tx = new SQLiteProxyTransaction('async', this.dialect, this.session, this.schema, this.nestedIndex + 1); await this.session.run(sql.raw(`savepoint ${savepointName}`)); try { const result = await transaction(tx); await this.session.run(sql.raw(`release savepoint ${savepointName}`)); return result; } catch (err) { await this.session.run(sql.raw(`rollback to savepoint ${savepointName}`)); throw err; } } } export class RemotePreparedQuery extends SQLitePreparedQuery< { type: 'async'; run: SqliteRemoteResult; all: T['all']; get: T['get']; values: T['values']; execute: T['execute'] } > { static override readonly [entityKind]: string = 'SQLiteProxyPreparedQuery'; private method: SQLiteExecuteMethod; constructor( private client: RemoteCallback, query: Query, private logger: Logger, cache: Cache, queryMetadata: { type: 'select' | 'update' | 'delete' | 'insert'; tables: string[]; } | undefined, cacheConfig: WithCacheConfig | undefined, private fields: SelectedFieldsOrdered | undefined, executeMethod: SQLiteExecuteMethod, private _isResponseInArrayMode: boolean, /** @internal */ public customResultMapper?: ( rows: unknown[][], mapColumnValue?: (value: unknown) => unknown, ) => unknown, ) { super('async', executeMethod, query, cache, queryMetadata, cacheConfig); this.customResultMapper = customResultMapper; this.method = executeMethod; } override getQuery(): Query & { method: SQLiteExecuteMethod } { return { ...this.query, method: this.method }; } async run(placeholderValues?: Record): Promise { const params = fillPlaceholders(this.query.params, placeholderValues ?? {}); this.logger.logQuery(this.query.sql, params); return await this.queryWithCache(this.query.sql, params, async () => { return await (this.client as AsyncRemoteCallback)(this.query.sql, params, 'run'); }); } override mapAllResult(rows: unknown, isFromBatch?: boolean): unknown { if (isFromBatch) { rows = (rows as SqliteRemoteResult).rows; } if (!this.fields && !this.customResultMapper) { return rows; } if (this.customResultMapper) { return this.customResultMapper(rows as unknown[][]) as T['all']; } return (rows as unknown[][]).map((row) => { return mapResultRow( this.fields!, row, this.joinsNotNullableMap, ); }); } async all(placeholderValues?: Record): Promise { const { query, logger, client } = this; const params = fillPlaceholders(query.params, placeholderValues ?? {}); logger.logQuery(query.sql, params); const { rows } = await this.queryWithCache(query.sql, params, async () => { return await (client as AsyncRemoteCallback)(query.sql, params, 'all'); }); return this.mapAllResult(rows); } async get(placeholderValues?: Record): Promise { const { query, logger, client } = this; const params = fillPlaceholders(query.params, placeholderValues ?? {}); logger.logQuery(query.sql, params); const clientResult = await this.queryWithCache(query.sql, params, async () => { return await (client as AsyncRemoteCallback)(query.sql, params, 'get'); }); return this.mapGetResult(clientResult.rows); } override mapGetResult(rows: unknown, isFromBatch?: boolean): unknown { if (isFromBatch) { rows = (rows as SqliteRemoteResult).rows; } const row = rows as unknown[]; if (!this.fields && !this.customResultMapper) { return row; } if (!row) { return undefined; } if (this.customResultMapper) { return this.customResultMapper([rows] as unknown[][]) as T['get']; } return mapResultRow( this.fields!, row, this.joinsNotNullableMap, ); } async values(placeholderValues?: Record): Promise { const params = fillPlaceholders(this.query.params, placeholderValues ?? {}); this.logger.logQuery(this.query.sql, params); const clientResult = await this.queryWithCache(this.query.sql, params, async () => { return await (this.client as AsyncRemoteCallback)(this.query.sql, params, 'values'); }); return clientResult.rows as T[]; } /** @internal */ isResponseInArrayMode(): boolean { return this._isResponseInArrayMode; } } ================================================ FILE: drizzle-orm/src/subquery.ts ================================================ import { entityKind } from './entity.ts'; import type { SQL, SQLWrapper } from './sql/sql.ts'; export interface Subquery< // eslint-disable-next-line @typescript-eslint/no-unused-vars TAlias extends string = string, // eslint-disable-next-line @typescript-eslint/no-unused-vars TSelectedFields extends Record = Record, > extends SQLWrapper { // SQLWrapper runtime implementation is defined in 'sql/sql.ts' } export class Subquery< TAlias extends string = string, TSelectedFields extends Record = Record, > implements SQLWrapper { static readonly [entityKind]: string = 'Subquery'; declare _: { brand: 'Subquery'; sql: SQL; selectedFields: TSelectedFields; alias: TAlias; isWith: boolean; usedTables?: string[]; }; constructor(sql: SQL, fields: TSelectedFields, alias: string, isWith = false, usedTables: string[] = []) { this._ = { brand: 'Subquery', sql, selectedFields: fields as TSelectedFields, alias: alias as TAlias, isWith, usedTables, }; } // getSQL(): SQL { // return new SQL([this]); // } } export class WithSubquery< TAlias extends string = string, TSelection extends Record = Record, > extends Subquery { static override readonly [entityKind]: string = 'WithSubquery'; } export type WithSubqueryWithoutSelection = WithSubquery; ================================================ FILE: drizzle-orm/src/supabase/index.ts ================================================ export * from './rls.ts'; ================================================ FILE: drizzle-orm/src/supabase/rls.ts ================================================ import { bigserial, pgSchema, text, timestamp, uuid, varchar } from '~/pg-core/index.ts'; import { pgRole } from '~/pg-core/roles.ts'; import { sql } from '~/sql/sql.ts'; export const anonRole = pgRole('anon').existing(); export const authenticatedRole = pgRole('authenticated').existing(); export const serviceRole = pgRole('service_role').existing(); export const postgresRole = pgRole('postgres_role').existing(); export const supabaseAuthAdminRole = pgRole('supabase_auth_admin').existing(); /* ------------------------------ auth schema; ------------------------------ */ const auth = pgSchema('auth'); export const authUsers = auth.table('users', { id: uuid().primaryKey().notNull(), email: varchar({ length: 255 }), phone: text().unique(), emailConfirmedAt: timestamp('email_confirmed_at', { withTimezone: true }), phoneConfirmedAt: timestamp('phone_confirmed_at', { withTimezone: true }), lastSignInAt: timestamp('last_sign_in_at', { withTimezone: true }), createdAt: timestamp('created_at', { withTimezone: true }), updatedAt: timestamp('updated_at', { withTimezone: true }), }); /* ------------------------------ realtime schema; ------------------------------- */ const realtime = pgSchema('realtime'); export const realtimeMessages = realtime.table( 'messages', { id: bigserial({ mode: 'bigint' }).primaryKey(), topic: text().notNull(), extension: text({ enum: ['presence', 'broadcast', 'postgres_changes'], }).notNull(), }, ); export const authUid = sql`(select auth.uid())`; export const realtimeTopic = sql`realtime.topic()`; ================================================ FILE: drizzle-orm/src/table.ts ================================================ import type { Column, GetColumnData } from './column.ts'; import { entityKind } from './entity.ts'; import type { OptionalKeyOnly, RequiredKeyOnly } from './operations.ts'; import type { SQLWrapper } from './sql/sql.ts'; import { TableName } from './table.utils.ts'; import type { Simplify, Update } from './utils.ts'; export interface TableConfig> { name: string; schema: string | undefined; columns: Record; dialect: string; } export type UpdateTableConfig> = Required< Update >; /** @internal */ export const Schema = Symbol.for('drizzle:Schema'); /** @internal */ export const Columns = Symbol.for('drizzle:Columns'); /** @internal */ export const ExtraConfigColumns = Symbol.for('drizzle:ExtraConfigColumns'); /** @internal */ export const OriginalName = Symbol.for('drizzle:OriginalName'); /** @internal */ export const BaseName = Symbol.for('drizzle:BaseName'); /** @internal */ export const IsAlias = Symbol.for('drizzle:IsAlias'); /** @internal */ export const ExtraConfigBuilder = Symbol.for('drizzle:ExtraConfigBuilder'); const IsDrizzleTable = Symbol.for('drizzle:IsDrizzleTable'); export interface Table< // eslint-disable-next-line @typescript-eslint/no-unused-vars T extends TableConfig = TableConfig, > extends SQLWrapper { // SQLWrapper runtime implementation is defined in 'sql/sql.ts' } export class Table implements SQLWrapper { static readonly [entityKind]: string = 'Table'; declare readonly _: { readonly brand: 'Table'; readonly config: T; readonly name: T['name']; readonly schema: T['schema']; readonly columns: T['columns']; readonly inferSelect: InferSelectModel>; readonly inferInsert: InferInsertModel>; }; declare readonly $inferSelect: InferSelectModel>; declare readonly $inferInsert: InferInsertModel>; /** @internal */ static readonly Symbol = { Name: TableName as typeof TableName, Schema: Schema as typeof Schema, OriginalName: OriginalName as typeof OriginalName, Columns: Columns as typeof Columns, ExtraConfigColumns: ExtraConfigColumns as typeof ExtraConfigColumns, BaseName: BaseName as typeof BaseName, IsAlias: IsAlias as typeof IsAlias, ExtraConfigBuilder: ExtraConfigBuilder as typeof ExtraConfigBuilder, }; /** * @internal * Can be changed if the table is aliased. */ [TableName]: string; /** * @internal * Used to store the original name of the table, before any aliasing. */ [OriginalName]: string; /** @internal */ [Schema]: string | undefined; /** @internal */ [Columns]!: T['columns']; /** @internal */ [ExtraConfigColumns]!: Record; /** * @internal * Used to store the table name before the transformation via the `tableCreator` functions. */ [BaseName]: string; /** @internal */ [IsAlias] = false; /** @internal */ [IsDrizzleTable] = true; /** @internal */ [ExtraConfigBuilder]: ((self: any) => Record | unknown[]) | undefined = undefined; constructor(name: string, schema: string | undefined, baseName: string) { this[TableName] = this[OriginalName] = name; this[Schema] = schema; this[BaseName] = baseName; } } export function isTable(table: unknown): table is Table { return typeof table === 'object' && table !== null && IsDrizzleTable in table; } /** * Any table with a specified boundary. * * @example ```ts // Any table with a specific name type AnyUsersTable = AnyTable<{ name: 'users' }>; ``` * * To describe any table with any config, simply use `Table` without any type arguments, like this: * ```ts function needsTable(table: Table) { ... } ``` */ export type AnyTable> = Table>; export function getTableName(table: T): T['_']['name'] { return table[TableName]; } export function getTableUniqueName(table: T): `${T['_']['schema']}.${T['_']['name']}` { return `${table[Schema] ?? 'public'}.${table[TableName]}`; } export type MapColumnName = TDBColumNames extends true ? TColumn['_']['name'] : TName; export type InferModelFromColumns< TColumns extends Record, TInferMode extends 'select' | 'insert' = 'select', TConfig extends { dbColumnNames: boolean; override?: boolean } = { dbColumnNames: false; override: false }, > = Simplify< TInferMode extends 'insert' ? & { [ Key in keyof TColumns & string as RequiredKeyOnly< MapColumnName, TColumns[Key] > ]: GetColumnData; } & { [ Key in keyof TColumns & string as OptionalKeyOnly< MapColumnName, TColumns[Key], TConfig['override'] > ]?: GetColumnData | undefined; } : { [ Key in keyof TColumns & string as MapColumnName< Key, TColumns[Key], TConfig['dbColumnNames'] > ]: GetColumnData; } >; /** @deprecated Use one of the alternatives: {@link InferSelectModel} / {@link InferInsertModel}, or `table.$inferSelect` / `table.$inferInsert` */ export type InferModel< TTable extends Table, TInferMode extends 'select' | 'insert' = 'select', TConfig extends { dbColumnNames: boolean } = { dbColumnNames: false }, > = InferModelFromColumns; export type InferSelectModel< TTable extends Table, TConfig extends { dbColumnNames: boolean } = { dbColumnNames: false }, > = InferModelFromColumns; export type InferInsertModel< TTable extends Table, TConfig extends { dbColumnNames: boolean; override?: boolean } = { dbColumnNames: false; override: false }, > = InferModelFromColumns; export type InferEnum = T extends { enumValues: readonly (infer U)[] } ? U : never; ================================================ FILE: drizzle-orm/src/table.utils.ts ================================================ /** @internal */ export const TableName = Symbol.for('drizzle:Name'); ================================================ FILE: drizzle-orm/src/tidb-serverless/driver.ts ================================================ import { type Config, connect, type Connection } from '@tidbcloud/serverless'; import { entityKind } from '~/entity.ts'; import type { Logger } from '~/logger.ts'; import { DefaultLogger } from '~/logger.ts'; import { MySqlDatabase } from '~/mysql-core/db.ts'; import { MySqlDialect } from '~/mysql-core/dialect.ts'; import { createTableRelationsHelpers, extractTablesRelationalConfig, type RelationalSchemaConfig, type TablesRelationalConfig, } from '~/relations.ts'; import { type DrizzleConfig, isConfig } from '~/utils.ts'; import type { TiDBServerlessPreparedQueryHKT, TiDBServerlessQueryResultHKT } from './session.ts'; import { TiDBServerlessSession } from './session.ts'; export interface TiDBServerlessSDriverOptions { logger?: Logger; cache?: Cache; } export class TiDBServerlessDatabase< TSchema extends Record = Record, > extends MySqlDatabase { static override readonly [entityKind]: string = 'TiDBServerlessDatabase'; } function construct = Record>( client: Connection, config: DrizzleConfig = {}, ): TiDBServerlessDatabase & { $client: Connection; } { const dialect = new MySqlDialect({ casing: config.casing }); let logger; if (config.logger === true) { logger = new DefaultLogger(); } else if (config.logger !== false) { logger = config.logger; } let schema: RelationalSchemaConfig | undefined; if (config.schema) { const tablesConfig = extractTablesRelationalConfig( config.schema, createTableRelationsHelpers, ); schema = { fullSchema: config.schema, schema: tablesConfig.tables, tableNamesMap: tablesConfig.tableNamesMap, }; } const session = new TiDBServerlessSession(client, dialect, undefined, schema, { logger, cache: config.cache }); const db = new TiDBServerlessDatabase(dialect, session, schema as any, 'default') as TiDBServerlessDatabase; ( db).$client = client; ( db).$cache = config.cache; if (( db).$cache) { ( db).$cache['invalidate'] = config.cache?.onMutate; } return db as any; } export function drizzle< TSchema extends Record = Record, TClient extends Connection = Connection, >( ...params: [ TClient | string, ] | [ TClient | string, DrizzleConfig, ] | [ & ({ connection: string | Config; } | { client: TClient; }) & DrizzleConfig, ] ): TiDBServerlessDatabase & { $client: TClient; } { if (typeof params[0] === 'string') { const instance = connect({ url: params[0], }); return construct(instance, params[1]) as any; } if (isConfig(params[0])) { const { connection, client, ...drizzleConfig } = params[0] as & { connection?: Config | string; client?: TClient } & DrizzleConfig; if (client) return construct(client, drizzleConfig) as any; const instance = typeof connection === 'string' ? connect({ url: connection, }) : connect(connection!); return construct(instance, drizzleConfig) as any; } return construct(params[0] as TClient, params[1] as DrizzleConfig | undefined) as any; } export namespace drizzle { export function mock = Record>( config?: DrizzleConfig, ): TiDBServerlessDatabase & { $client: '$client is not available on drizzle.mock()'; } { return construct({} as any, config) as any; } } ================================================ FILE: drizzle-orm/src/tidb-serverless/index.ts ================================================ export * from './driver.ts'; export * from './session.ts'; ================================================ FILE: drizzle-orm/src/tidb-serverless/migrator.ts ================================================ import type { MigrationConfig } from '~/migrator.ts'; import { readMigrationFiles } from '~/migrator.ts'; import type { TiDBServerlessDatabase } from './driver.ts'; export async function migrate>( db: TiDBServerlessDatabase, config: MigrationConfig, ) { const migrations = readMigrationFiles(config); await db.dialect.migrate(migrations, db.session, config); } ================================================ FILE: drizzle-orm/src/tidb-serverless/session.ts ================================================ import type { Connection, ExecuteOptions, FullResult, Tx } from '@tidbcloud/serverless'; import { type Cache, NoopCache } from '~/cache/core/index.ts'; import type { WithCacheConfig } from '~/cache/core/types.ts'; import { Column } from '~/column.ts'; import { entityKind, is } from '~/entity.ts'; import type { Logger } from '~/logger.ts'; import { NoopLogger } from '~/logger.ts'; import type { MySqlDialect } from '~/mysql-core/dialect.ts'; import type { SelectedFieldsOrdered } from '~/mysql-core/query-builders/select.types.ts'; import { MySqlPreparedQuery, type MySqlPreparedQueryConfig, type MySqlPreparedQueryHKT, type MySqlQueryResultHKT, MySqlSession, MySqlTransaction, } from '~/mysql-core/session.ts'; import type { RelationalSchemaConfig, TablesRelationalConfig } from '~/relations.ts'; import { fillPlaceholders, type Query, type SQL, sql } from '~/sql/sql.ts'; import { type Assume, mapResultRow } from '~/utils.ts'; const executeRawConfig = { fullResult: true } satisfies ExecuteOptions; const queryConfig = { arrayMode: true } satisfies ExecuteOptions; export class TiDBServerlessPreparedQuery extends MySqlPreparedQuery { static override readonly [entityKind]: string = 'TiDBPreparedQuery'; constructor( private client: Tx | Connection, private queryString: string, private params: unknown[], private logger: Logger, cache: Cache, queryMetadata: { type: 'select' | 'update' | 'delete' | 'insert'; tables: string[]; } | undefined, cacheConfig: WithCacheConfig | undefined, private fields: SelectedFieldsOrdered | undefined, private customResultMapper?: (rows: unknown[][]) => T['execute'], // Keys that were used in $default and the value that was generated for them private generatedIds?: Record[], // Keys that should be returned, it has the column with all properries + key from object private returningIds?: SelectedFieldsOrdered, ) { super(cache, queryMetadata, cacheConfig); } async execute(placeholderValues: Record | undefined = {}): Promise { const params = fillPlaceholders(this.params, placeholderValues); this.logger.logQuery(this.queryString, params); const { fields, client, queryString, joinsNotNullableMap, customResultMapper, returningIds, generatedIds } = this; if (!fields && !customResultMapper) { const res = await this.queryWithCache(queryString, params, async () => { return await client.execute(queryString, params, executeRawConfig) as FullResult; }); const insertId = res.lastInsertId ?? 0; const affectedRows = res.rowsAffected ?? 0; // for each row, I need to check keys from if (returningIds) { const returningResponse = []; let j = 0; for (let i = insertId; i < insertId + affectedRows; i++) { for (const column of returningIds) { const key = returningIds[0]!.path[0]!; if (is(column.field, Column)) { // @ts-ignore if (column.field.primary && column.field.autoIncrement) { returningResponse.push({ [key]: i }); } if (column.field.defaultFn && generatedIds) { // generatedIds[rowIdx][key] returningResponse.push({ [key]: generatedIds[j]![key] }); } } } j++; } return returningResponse; } return res; } const rows = await this.queryWithCache(queryString, params, async () => { return await client.execute(queryString, params, queryConfig) as unknown[][]; }); if (customResultMapper) { return customResultMapper(rows); } return rows.map((row) => mapResultRow(fields!, row, joinsNotNullableMap)); } override iterator(_placeholderValues?: Record): AsyncGenerator { throw new Error('Streaming is not supported by the TiDB Cloud Serverless driver'); } } export interface TiDBServerlessSessionOptions { logger?: Logger; cache?: Cache; } export class TiDBServerlessSession< TFullSchema extends Record, TSchema extends TablesRelationalConfig, > extends MySqlSession { static override readonly [entityKind]: string = 'TiDBServerlessSession'; private logger: Logger; private client: Tx | Connection; private cache: Cache; constructor( private baseClient: Connection, dialect: MySqlDialect, tx: Tx | undefined, private schema: RelationalSchemaConfig | undefined, private options: TiDBServerlessSessionOptions = {}, ) { super(dialect); this.client = tx ?? baseClient; this.logger = options.logger ?? new NoopLogger(); this.cache = options.cache ?? new NoopCache(); } prepareQuery( query: Query, fields: SelectedFieldsOrdered | undefined, customResultMapper?: (rows: unknown[][]) => T['execute'], generatedIds?: Record[], returningIds?: SelectedFieldsOrdered, queryMetadata?: { type: 'select' | 'update' | 'delete' | 'insert'; tables: string[]; }, cacheConfig?: WithCacheConfig, ): MySqlPreparedQuery { return new TiDBServerlessPreparedQuery( this.client, query.sql, query.params, this.logger, this.cache, queryMetadata, cacheConfig, fields, customResultMapper, generatedIds, returningIds, ); } override all(query: SQL): Promise { const querySql = this.dialect.sqlToQuery(query); this.logger.logQuery(querySql.sql, querySql.params); return this.client.execute(querySql.sql, querySql.params) as Promise; } override async count(sql: SQL): Promise { const res = await this.execute<{ rows: [{ count: string }] }>(sql); return Number( res['rows'][0]['count'], ); } override async transaction( transaction: (tx: TiDBServerlessTransaction) => Promise, ): Promise { const nativeTx = await this.baseClient.begin(); try { const session = new TiDBServerlessSession(this.baseClient, this.dialect, nativeTx, this.schema, this.options); const tx = new TiDBServerlessTransaction( this.dialect, session as MySqlSession, this.schema, ); const result = await transaction(tx); await nativeTx.commit(); return result; } catch (err) { await nativeTx.rollback(); throw err; } } } export class TiDBServerlessTransaction< TFullSchema extends Record, TSchema extends TablesRelationalConfig, > extends MySqlTransaction { static override readonly [entityKind]: string = 'TiDBServerlessTransaction'; constructor( dialect: MySqlDialect, session: MySqlSession, schema: RelationalSchemaConfig | undefined, nestedIndex = 0, ) { super(dialect, session, schema, nestedIndex, 'default'); } override async transaction( transaction: (tx: TiDBServerlessTransaction) => Promise, ): Promise { const savepointName = `sp${this.nestedIndex + 1}`; const tx = new TiDBServerlessTransaction( this.dialect, this.session, this.schema, this.nestedIndex + 1, ); await tx.execute(sql.raw(`savepoint ${savepointName}`)); try { const result = await transaction(tx); await tx.execute(sql.raw(`release savepoint ${savepointName}`)); return result; } catch (err) { await tx.execute(sql.raw(`rollback to savepoint ${savepointName}`)); throw err; } } } export interface TiDBServerlessQueryResultHKT extends MySqlQueryResultHKT { type: FullResult; } export interface TiDBServerlessPreparedQueryHKT extends MySqlPreparedQueryHKT { type: TiDBServerlessPreparedQuery>; } ================================================ FILE: drizzle-orm/src/tracing-utils.ts ================================================ export function iife(fn: (...args: T) => U, ...args: T): U { return fn(...args); } ================================================ FILE: drizzle-orm/src/tracing.ts ================================================ import type { Span, Tracer } from '@opentelemetry/api'; import { iife } from '~/tracing-utils.ts'; import { npmVersion } from '~/version.ts'; let otel: typeof import('@opentelemetry/api') | undefined; let rawTracer: Tracer | undefined; // try { // otel = await import('@opentelemetry/api'); // } catch (err: any) { // if (err.code !== 'MODULE_NOT_FOUND' && err.code !== 'ERR_MODULE_NOT_FOUND') { // throw err; // } // } type SpanName = | 'drizzle.operation' | 'drizzle.prepareQuery' | 'drizzle.buildSQL' | 'drizzle.execute' | 'drizzle.driver.execute' | 'drizzle.mapResponse'; /** @internal */ export const tracer = { startActiveSpan unknown>(name: SpanName, fn: F): ReturnType { if (!otel) { return fn() as ReturnType; } if (!rawTracer) { rawTracer = otel.trace.getTracer('drizzle-orm', npmVersion); } return iife( (otel, rawTracer) => rawTracer.startActiveSpan( name, ((span: Span) => { try { return fn(span); } catch (e) { span.setStatus({ code: otel.SpanStatusCode.ERROR, message: e instanceof Error ? e.message : 'Unknown error', // eslint-disable-line no-instanceof/no-instanceof }); throw e; } finally { span.end(); } }) as F, ), otel, rawTracer, ); }, }; ================================================ FILE: drizzle-orm/src/utils.ts ================================================ import type { Cache } from './cache/core/cache.ts'; import type { AnyColumn } from './column.ts'; import { Column } from './column.ts'; import { is } from './entity.ts'; import type { Logger } from './logger.ts'; import type { SelectedFieldsOrdered } from './operations.ts'; import type { TableLike } from './query-builders/select.types.ts'; import { Param, SQL, View } from './sql/sql.ts'; import type { DriverValueDecoder } from './sql/sql.ts'; import { Subquery } from './subquery.ts'; import { getTableName, Table } from './table.ts'; import { ViewBaseConfig } from './view-common.ts'; /** @internal */ export function mapResultRow( columns: SelectedFieldsOrdered, row: unknown[], joinsNotNullableMap: Record | undefined, ): TResult { // Key -> nested object key, value -> table name if all fields in the nested object are from the same table, false otherwise const nullifyMap: Record = {}; const result = columns.reduce>( (result, { path, field }, columnIndex) => { let decoder: DriverValueDecoder; if (is(field, Column)) { decoder = field; } else if (is(field, SQL)) { decoder = field.decoder; } else if (is(field, Subquery)) { decoder = field._.sql.decoder; } else { decoder = field.sql.decoder; } let node = result; for (const [pathChunkIndex, pathChunk] of path.entries()) { if (pathChunkIndex < path.length - 1) { if (!(pathChunk in node)) { node[pathChunk] = {}; } node = node[pathChunk]; } else { const rawValue = row[columnIndex]!; const value = node[pathChunk] = rawValue === null ? null : decoder.mapFromDriverValue(rawValue); if (joinsNotNullableMap && is(field, Column) && path.length === 2) { const objectName = path[0]!; if (!(objectName in nullifyMap)) { nullifyMap[objectName] = value === null ? getTableName(field.table) : false; } else if ( typeof nullifyMap[objectName] === 'string' && nullifyMap[objectName] !== getTableName(field.table) ) { nullifyMap[objectName] = false; } } } } return result; }, {}, ); // Nullify all nested objects from nullifyMap that are nullable if (joinsNotNullableMap && Object.keys(nullifyMap).length > 0) { for (const [objectName, tableName] of Object.entries(nullifyMap)) { if (typeof tableName === 'string' && !joinsNotNullableMap[tableName]) { result[objectName] = null; } } } return result as TResult; } /** @internal */ export function orderSelectedFields( fields: Record, pathPrefix?: string[], ): SelectedFieldsOrdered { return Object.entries(fields).reduce>((result, [name, field]) => { if (typeof name !== 'string') { return result; } const newPath = pathPrefix ? [...pathPrefix, name] : [name]; if (is(field, Column) || is(field, SQL) || is(field, SQL.Aliased) || is(field, Subquery)) { result.push({ path: newPath, field }); } else if (is(field, Table)) { result.push(...orderSelectedFields(field[Table.Symbol.Columns], newPath)); } else { result.push(...orderSelectedFields(field as Record, newPath)); } return result; }, []) as SelectedFieldsOrdered; } export function haveSameKeys(left: Record, right: Record) { const leftKeys = Object.keys(left); const rightKeys = Object.keys(right); if (leftKeys.length !== rightKeys.length) { return false; } for (const [index, key] of leftKeys.entries()) { if (key !== rightKeys[index]) { return false; } } return true; } /** @internal */ export function mapUpdateSet(table: Table, values: Record): UpdateSet { const entries: [string, UpdateSet[string]][] = Object.entries(values) .filter(([, value]) => value !== undefined) .map(([key, value]) => { // eslint-disable-next-line unicorn/prefer-ternary if (is(value, SQL) || is(value, Column)) { return [key, value]; } else { return [key, new Param(value, table[Table.Symbol.Columns][key])]; } }); if (entries.length === 0) { throw new Error('No values to set'); } return Object.fromEntries(entries); } export type UpdateSet = Record; export type OneOrMany = T | T[]; export type Update = & { [K in Exclude]: T[K]; } & TUpdate; export type Simplify = & { // @ts-ignore - "Type parameter 'K' has a circular constraint", not sure why [K in keyof T]: T[K]; } & {}; export type Not = T extends true ? false : true; export type IsNever = [T] extends [never] ? true : false; export type IsUnion = (T extends any ? (U extends T ? false : true) : never) extends false ? false : true; export type SingleKeyObject = IsNever extends true ? never : IsUnion extends true ? DrizzleTypeError : T; export type FromSingleKeyObject = IsNever extends true ? never : IsUnion extends true ? DrizzleTypeError : Result; export type SimplifyMappedType = [T] extends [unknown] ? T : never; export type ShallowRecord = SimplifyMappedType<{ [P in K]: T }>; export type Assume = T extends U ? T : U; export type Equal = (() => T extends X ? 1 : 2) extends (() => T extends Y ? 1 : 2) ? true : false; export interface DrizzleTypeError { $drizzleTypeError: T; } export type ValueOrArray = T | T[]; /** @internal */ export function applyMixins(baseClass: any, extendedClasses: any[]) { for (const extendedClass of extendedClasses) { for (const name of Object.getOwnPropertyNames(extendedClass.prototype)) { if (name === 'constructor') continue; Object.defineProperty( baseClass.prototype, name, Object.getOwnPropertyDescriptor(extendedClass.prototype, name) || Object.create(null), ); } } } export type Or = T1 extends true ? true : T2 extends true ? true : false; export type IfThenElse = If extends true ? Then : Else; export type PromiseOf = T extends Promise ? U : T; export type Writable = { -readonly [P in keyof T]: T[P]; }; export type NonArray = T extends any[] ? never : T; export function getTableColumns(table: T): T['_']['columns'] { return table[Table.Symbol.Columns]; } export function getViewSelectedFields(view: T): T['_']['selectedFields'] { return view[ViewBaseConfig].selectedFields; } /** @internal */ export function getTableLikeName(table: TableLike): string | undefined { return is(table, Subquery) ? table._.alias : is(table, View) ? table[ViewBaseConfig].name : is(table, SQL) ? undefined : table[Table.Symbol.IsAlias] ? table[Table.Symbol.Name] : table[Table.Symbol.BaseName]; } export type ColumnsWithTable< TTableName extends string, TForeignTableName extends string, TColumns extends AnyColumn<{ tableName: TTableName }>[], > = { [Key in keyof TColumns]: AnyColumn<{ tableName: TForeignTableName }> }; export type Casing = 'snake_case' | 'camelCase'; export interface DrizzleConfig = Record> { logger?: boolean | Logger; schema?: TSchema; casing?: Casing; cache?: Cache; } export type ValidateShape = T extends ValidShape ? Exclude extends never ? TResult : DrizzleTypeError< `Invalid key(s): ${Exclude<(keyof T) & (string | number | bigint | boolean | null | undefined), keyof ValidShape>}` > : never; export type KnownKeysOnly = { [K in keyof T]: K extends keyof U ? T[K] : never; }; export type IsAny = 0 extends (1 & T) ? true : false; /** @internal */ export function getColumnNameAndConfig< TConfig extends Record | undefined, >(a: string | TConfig | undefined, b: TConfig | undefined) { return { name: typeof a === 'string' && a.length > 0 ? a : '' as string, config: typeof a === 'object' ? a : b as TConfig, }; } export type IfNotImported = unknown extends T ? Y : N; export type ImportTypeError = `Please install \`${TPackageName}\` to allow Drizzle ORM to connect to the database`; export type RequireAtLeastOne = Keys extends any ? Required> & Partial> : never; type ExpectedConfigShape = { logger?: boolean | { logQuery(query: string, params: unknown[]): void; }; schema?: Record; casing?: 'snake_case' | 'camelCase'; }; // If this errors, you must update config shape checker function with new config specs const _: DrizzleConfig = {} as ExpectedConfigShape; const __: ExpectedConfigShape = {} as DrizzleConfig; export function isConfig(data: any): boolean { if (typeof data !== 'object' || data === null) return false; if (data.constructor.name !== 'Object') return false; if ('logger' in data) { const type = typeof data['logger']; if ( type !== 'boolean' && (type !== 'object' || typeof data['logger']['logQuery'] !== 'function') && type !== 'undefined' ) return false; return true; } if ('schema' in data) { const type = typeof data['schema']; if (type !== 'object' && type !== 'undefined') return false; return true; } if ('casing' in data) { const type = typeof data['casing']; if (type !== 'string' && type !== 'undefined') return false; return true; } if ('mode' in data) { if (data['mode'] !== 'default' || data['mode'] !== 'planetscale' || data['mode'] !== undefined) return false; return true; } if ('connection' in data) { const type = typeof data['connection']; if (type !== 'string' && type !== 'object' && type !== 'undefined') return false; return true; } if ('client' in data) { const type = typeof data['client']; if (type !== 'object' && type !== 'function' && type !== 'undefined') return false; return true; } if (Object.keys(data).length === 0) return true; return false; } export type NeonAuthToken = string | (() => string | Promise); export const textDecoder = typeof TextDecoder === 'undefined' ? null : new TextDecoder(); ================================================ FILE: drizzle-orm/src/vercel-postgres/driver.ts ================================================ import { sql } from '@vercel/postgres'; import type { Cache } from '~/cache/core/cache.ts'; import { entityKind } from '~/entity.ts'; import type { Logger } from '~/logger.ts'; import { DefaultLogger } from '~/logger.ts'; import { PgDatabase } from '~/pg-core/db.ts'; import { PgDialect } from '~/pg-core/index.ts'; import { createTableRelationsHelpers, extractTablesRelationalConfig, type RelationalSchemaConfig, type TablesRelationalConfig, } from '~/relations.ts'; import { type DrizzleConfig, isConfig } from '~/utils.ts'; import { type VercelPgClient, type VercelPgQueryResultHKT, VercelPgSession } from './session.ts'; export interface VercelPgDriverOptions { logger?: Logger; cache?: Cache; } export class VercelPgDriver { static readonly [entityKind]: string = 'VercelPgDriver'; constructor( private client: VercelPgClient, private dialect: PgDialect, private options: VercelPgDriverOptions = {}, ) { } createSession( schema: RelationalSchemaConfig | undefined, ): VercelPgSession, TablesRelationalConfig> { return new VercelPgSession(this.client, this.dialect, schema, { logger: this.options.logger, cache: this.options.cache, }); } } export class VercelPgDatabase< TSchema extends Record = Record, > extends PgDatabase { static override readonly [entityKind]: string = 'VercelPgDatabase'; } function construct = Record>( client: VercelPgClient, config: DrizzleConfig = {}, ): VercelPgDatabase & { $client: VercelPgClient; } { const dialect = new PgDialect({ casing: config.casing }); let logger; if (config.logger === true) { logger = new DefaultLogger(); } else if (config.logger !== false) { logger = config.logger; } let schema: RelationalSchemaConfig | undefined; if (config.schema) { const tablesConfig = extractTablesRelationalConfig( config.schema, createTableRelationsHelpers, ); schema = { fullSchema: config.schema, schema: tablesConfig.tables, tableNamesMap: tablesConfig.tableNamesMap, }; } const driver = new VercelPgDriver(client, dialect, { logger, cache: config.cache }); const session = driver.createSession(schema); const db = new VercelPgDatabase(dialect, session, schema as any) as VercelPgDatabase; ( db).$client = client; ( db).$cache = config.cache; if (( db).$cache) { ( db).$cache['invalidate'] = config.cache?.onMutate; } return db as any; } export function drizzle< TSchema extends Record = Record, TClient extends VercelPgClient = typeof sql, >( ...params: [] | [ TClient, ] | [ TClient, DrizzleConfig, ] | [ ( & DrizzleConfig & ({ client?: TClient; }) ), ] ): VercelPgDatabase & { $client: VercelPgClient extends TClient ? typeof sql : TClient; } { if (isConfig(params[0])) { const { client, ...drizzleConfig } = params[0] as ({ client?: TClient } & DrizzleConfig); return construct(client ?? sql, drizzleConfig) as any; } return construct((params[0] ?? sql) as TClient, params[1] as DrizzleConfig | undefined) as any; } export namespace drizzle { export function mock = Record>( config?: DrizzleConfig, ): VercelPgDatabase & { $client: '$client is not available on drizzle.mock()'; } { return construct({} as any, config) as any; } } ================================================ FILE: drizzle-orm/src/vercel-postgres/index.ts ================================================ export * from './driver.ts'; export * from './session.ts'; ================================================ FILE: drizzle-orm/src/vercel-postgres/migrator.ts ================================================ import type { MigrationConfig } from '~/migrator.ts'; import { readMigrationFiles } from '~/migrator.ts'; import type { VercelPgDatabase } from './driver.ts'; export async function migrate>( db: VercelPgDatabase, config: MigrationConfig, ) { const migrations = readMigrationFiles(config); await db.dialect.migrate(migrations, db.session, config); } ================================================ FILE: drizzle-orm/src/vercel-postgres/session.ts ================================================ import { type QueryArrayConfig, type QueryConfig, type QueryResult, type QueryResultRow, types, type VercelClient, VercelPool, type VercelPoolClient, } from '@vercel/postgres'; import type { Cache } from '~/cache/core/cache.ts'; import { NoopCache } from '~/cache/core/cache.ts'; import type { WithCacheConfig } from '~/cache/core/types.ts'; import { entityKind } from '~/entity.ts'; import { type Logger, NoopLogger } from '~/logger.ts'; import { type PgDialect, PgTransaction } from '~/pg-core/index.ts'; import type { SelectedFieldsOrdered } from '~/pg-core/query-builders/select.types.ts'; import type { PgQueryResultHKT, PgTransactionConfig, PreparedQueryConfig } from '~/pg-core/session.ts'; import { PgPreparedQuery, PgSession } from '~/pg-core/session.ts'; import type { RelationalSchemaConfig, TablesRelationalConfig } from '~/relations.ts'; import { fillPlaceholders, type Query, type SQL, sql } from '~/sql/sql.ts'; import { type Assume, mapResultRow } from '~/utils.ts'; export type VercelPgClient = VercelPool | VercelClient | VercelPoolClient; export class VercelPgPreparedQuery extends PgPreparedQuery { static override readonly [entityKind]: string = 'VercelPgPreparedQuery'; private rawQuery: QueryConfig; private queryConfig: QueryArrayConfig; constructor( private client: VercelPgClient, queryString: string, private params: unknown[], private logger: Logger, cache: Cache, queryMetadata: { type: 'select' | 'update' | 'delete' | 'insert'; tables: string[]; } | undefined, cacheConfig: WithCacheConfig | undefined, private fields: SelectedFieldsOrdered | undefined, name: string | undefined, private _isResponseInArrayMode: boolean, private customResultMapper?: (rows: unknown[][]) => T['execute'], ) { super({ sql: queryString, params }, cache, queryMetadata, cacheConfig); this.rawQuery = { name, text: queryString, types: { // @ts-ignore getTypeParser: (typeId, format) => { if (typeId === types.builtins.TIMESTAMPTZ) { return (val: any) => val; } if (typeId === types.builtins.TIMESTAMP) { return (val: any) => val; } if (typeId === types.builtins.DATE) { return (val: any) => val; } if (typeId === types.builtins.INTERVAL) { return (val: any) => val; } // numeric[] if (typeId === 1231 as any) { return (val: any) => val; } // timestamp[] if (typeId === 1115 as any) { return (val: any) => val; } // timestamp with timezone[] if (typeId === 1185 as any) { return (val: any) => val; } // interval[] if (typeId === 1187 as any) { return (val: any) => val; } // date[] if (typeId === 1182 as any) { return (val: any) => val; } // @ts-ignore return types.getTypeParser(typeId, format); }, }, }; this.queryConfig = { name, text: queryString, rowMode: 'array', types: { // @ts-ignore getTypeParser: (typeId, format) => { if (typeId === types.builtins.TIMESTAMPTZ) { return (val: any) => val; } if (typeId === types.builtins.TIMESTAMP) { return (val: any) => val; } if (typeId === types.builtins.DATE) { return (val: any) => val; } if (typeId === types.builtins.INTERVAL) { return (val: any) => val; } // numeric[] if (typeId === 1231 as any) { return (val: any) => val; } // timestamp[] if (typeId === 1115 as any) { return (val: any) => val; } // timestamp with timezone[] if (typeId === 1185 as any) { return (val: any) => val; } // interval[] if (typeId === 1187 as any) { return (val: any) => val; } // date[] if (typeId === 1182 as any) { return (val: any) => val; } // @ts-ignore return types.getTypeParser(typeId, format); }, }, }; } async execute(placeholderValues: Record | undefined = {}): Promise { const params = fillPlaceholders(this.params, placeholderValues); this.logger.logQuery(this.rawQuery.text, params); const { fields, rawQuery, client, queryConfig: query, joinsNotNullableMap, customResultMapper } = this; if (!fields && !customResultMapper) { return this.queryWithCache(rawQuery.text, params, async () => { return await client.query(rawQuery, params); }); } const { rows } = await this.queryWithCache(query.text, params, async () => { return await client.query(query, params); }); if (customResultMapper) { return customResultMapper(rows); } return rows.map((row) => mapResultRow(fields!, row, joinsNotNullableMap)); } all(placeholderValues: Record | undefined = {}): Promise { const params = fillPlaceholders(this.params, placeholderValues); this.logger.logQuery(this.rawQuery.text, params); return this.queryWithCache(this.rawQuery.text, params, async () => { return await this.client.query(this.rawQuery, params); }).then((result) => result.rows); } values(placeholderValues: Record | undefined = {}): Promise { const params = fillPlaceholders(this.params, placeholderValues); this.logger.logQuery(this.rawQuery.text, params); return this.queryWithCache(this.queryConfig.text, params, async () => { return await this.client.query(this.queryConfig, params); }).then((result) => result.rows); } /** @internal */ isResponseInArrayMode(): boolean { return this._isResponseInArrayMode; } } export interface VercelPgSessionOptions { logger?: Logger; cache?: Cache; } export class VercelPgSession< TFullSchema extends Record, TSchema extends TablesRelationalConfig, > extends PgSession { static override readonly [entityKind]: string = 'VercelPgSession'; private logger: Logger; private cache: Cache; constructor( private client: VercelPgClient, dialect: PgDialect, private schema: RelationalSchemaConfig | undefined, private options: VercelPgSessionOptions = {}, ) { super(dialect); this.logger = options.logger ?? new NoopLogger(); this.cache = options.cache ?? new NoopCache(); } prepareQuery( query: Query, fields: SelectedFieldsOrdered | undefined, name: string | undefined, isResponseInArrayMode: boolean, customResultMapper?: (rows: unknown[][]) => T['execute'], queryMetadata?: { type: 'select' | 'update' | 'delete' | 'insert'; tables: string[]; }, cacheConfig?: WithCacheConfig, ): PgPreparedQuery { return new VercelPgPreparedQuery( this.client, query.sql, query.params, this.logger, this.cache, queryMetadata, cacheConfig, fields, name, isResponseInArrayMode, customResultMapper, ); } async query(query: string, params: unknown[]): Promise { this.logger.logQuery(query, params); const result = await this.client.query({ rowMode: 'array', text: query, values: params, }); return result; } async queryObjects( query: string, params: unknown[], ): Promise> { return this.client.query(query, params); } override async count(sql: SQL): Promise { const result = await this.execute(sql); return Number((result as any)['rows'][0]['count']); } override async transaction( transaction: (tx: VercelPgTransaction) => Promise, config?: PgTransactionConfig | undefined, ): Promise { const session = this.client instanceof VercelPool // eslint-disable-line no-instanceof/no-instanceof ? new VercelPgSession(await this.client.connect(), this.dialect, this.schema, this.options) : this; const tx = new VercelPgTransaction(this.dialect, session, this.schema); await tx.execute(sql`begin${config ? sql` ${tx.getTransactionConfigSQL(config)}` : undefined}`); try { const result = await transaction(tx); await tx.execute(sql`commit`); return result; } catch (error) { await tx.execute(sql`rollback`); throw error; } finally { if (this.client instanceof VercelPool) { // eslint-disable-line no-instanceof/no-instanceof (session.client as VercelPoolClient).release(); } } } } export class VercelPgTransaction< TFullSchema extends Record, TSchema extends TablesRelationalConfig, > extends PgTransaction { static override readonly [entityKind]: string = 'VercelPgTransaction'; override async transaction( transaction: (tx: VercelPgTransaction) => Promise, ): Promise { const savepointName = `sp${this.nestedIndex + 1}`; const tx = new VercelPgTransaction( this.dialect, this.session, this.schema, this.nestedIndex + 1, ); await tx.execute(sql.raw(`savepoint ${savepointName}`)); try { const result = await transaction(tx); await tx.execute(sql.raw(`release savepoint ${savepointName}`)); return result; } catch (err) { await tx.execute(sql.raw(`rollback to savepoint ${savepointName}`)); throw err; } } } export interface VercelPgQueryResultHKT extends PgQueryResultHKT { type: QueryResult>; } ================================================ FILE: drizzle-orm/src/version.ts ================================================ // @ts-ignore - imported using Rollup json plugin export { version as npmVersion } from '../package.json'; // In version 7, we changed the PostgreSQL indexes API export const compatibilityVersion = 10; ================================================ FILE: drizzle-orm/src/view-common.ts ================================================ export const ViewBaseConfig = Symbol.for('drizzle:ViewBaseConfig'); ================================================ FILE: drizzle-orm/src/xata-http/driver.ts ================================================ import type { Cache } from '~/cache/core/cache.ts'; import { entityKind } from '~/entity.ts'; import type { Logger } from '~/logger.ts'; import { DefaultLogger } from '~/logger.ts'; import { PgDatabase } from '~/pg-core/db.ts'; import { PgDialect } from '~/pg-core/dialect.ts'; import type { ExtractTablesWithRelations, RelationalSchemaConfig, TablesRelationalConfig } from '~/relations.ts'; import { createTableRelationsHelpers, extractTablesRelationalConfig } from '~/relations.ts'; import type { DrizzleConfig } from '~/utils.ts'; import type { XataHttpClient, XataHttpQueryResultHKT } from './session.ts'; import { XataHttpSession } from './session.ts'; export interface XataDriverOptions { logger?: Logger; cache?: Cache; } export class XataHttpDriver { static readonly [entityKind]: string = 'XataDriver'; constructor( private client: XataHttpClient, private dialect: PgDialect, private options: XataDriverOptions = {}, ) { this.initMappers(); } createSession( schema: RelationalSchemaConfig | undefined, ): XataHttpSession, TablesRelationalConfig> { return new XataHttpSession(this.client, this.dialect, schema, { logger: this.options.logger, cache: this.options.cache, }); } initMappers() { // TODO: Add custom type parsers } } export class XataHttpDatabase = Record> extends PgDatabase { static override readonly [entityKind]: string = 'XataHttpDatabase'; /** @internal */ declare readonly session: XataHttpSession>; } export function drizzle = Record>( client: XataHttpClient, config: DrizzleConfig = {}, ): XataHttpDatabase & { $client: XataHttpClient; } { const dialect = new PgDialect({ casing: config.casing }); let logger; if (config.logger === true) { logger = new DefaultLogger(); } else if (config.logger !== false) { logger = config.logger; } let schema: RelationalSchemaConfig | undefined; if (config.schema) { const tablesConfig = extractTablesRelationalConfig(config.schema, createTableRelationsHelpers); schema = { fullSchema: config.schema, schema: tablesConfig.tables, tableNamesMap: tablesConfig.tableNamesMap, }; } const driver = new XataHttpDriver(client, dialect, { logger, cache: config.cache }); const session = driver.createSession(schema); const db = new XataHttpDatabase( dialect, session, schema as RelationalSchemaConfig> | undefined, ); ( db).$client = client; ( db).$cache = config.cache; if (( db).$cache) { ( db).$cache['invalidate'] = config.cache?.onMutate; } return db as any; } ================================================ FILE: drizzle-orm/src/xata-http/index.ts ================================================ export * from './driver.ts'; export * from './session.ts'; ================================================ FILE: drizzle-orm/src/xata-http/migrator.ts ================================================ import { readMigrationFiles } from '~/migrator.ts'; import { sql } from '~/sql/sql.ts'; import type { XataHttpDatabase } from './driver.ts'; export interface MigrationConfig { migrationsFolder: string; migrationsTable?: string; } /** * This function reads migrationFolder and execute each unapplied migration and mark it as executed in database * * NOTE: The Xata HTTP driver does not support transactions. This means that if any part of a migration fails, * no rollback will be executed. Currently, you will need to handle unsuccessful migration yourself. * @param db - drizzle db instance * @param config - path to migration folder generated by drizzle-kit */ export async function migrate>( db: XataHttpDatabase, config: MigrationConfig, ) { const migrations = readMigrationFiles(config); const migrationsTable = config.migrationsTable ?? '__drizzle_migrations'; const migrationTableCreate = sql` CREATE TABLE IF NOT EXISTS ${sql.identifier(migrationsTable)} ( id SERIAL PRIMARY KEY, hash text NOT NULL, created_at bigint ) `; await db.session.execute(migrationTableCreate); const dbMigrations = await db.session.all<{ id: number; hash: string; created_at: string; }>( sql`select id, hash, created_at from ${sql.identifier(migrationsTable)} order by created_at desc limit 1`, ); const lastDbMigration = dbMigrations[0]; for await (const migration of migrations) { if (!lastDbMigration || Number(lastDbMigration.created_at) < migration.folderMillis) { for (const stmt of migration.sql) { await db.session.execute(sql.raw(stmt)); } await db.session.execute( sql`insert into ${ sql.identifier(migrationsTable) } ("hash", "created_at") values(${migration.hash}, ${migration.folderMillis})`, ); } } } ================================================ FILE: drizzle-orm/src/xata-http/session.ts ================================================ import type { SQLPluginResult, SQLQueryResult } from '@xata.io/client'; import type { Cache } from '~/cache/core/index.ts'; import { NoopCache } from '~/cache/core/index.ts'; import type { WithCacheConfig } from '~/cache/core/types.ts'; import { entityKind } from '~/entity.ts'; import type { Logger } from '~/logger.ts'; import { NoopLogger } from '~/logger.ts'; import type { PgDialect } from '~/pg-core/dialect.ts'; import { PgTransaction } from '~/pg-core/index.ts'; import type { SelectedFieldsOrdered } from '~/pg-core/query-builders/select.types.ts'; import type { PgQueryResultHKT, PgTransactionConfig, PreparedQueryConfig } from '~/pg-core/session.ts'; import { PgPreparedQuery, PgSession } from '~/pg-core/session.ts'; import type { RelationalSchemaConfig, TablesRelationalConfig } from '~/relations.ts'; import { fillPlaceholders, type Query } from '~/sql/sql.ts'; import { mapResultRow } from '~/utils.ts'; export type XataHttpClient = { sql: SQLPluginResult; }; export interface QueryResults { rowCount: number; rows: ArrayMode extends 'array' ? any[][] : Record[]; rowAsArray: ArrayMode extends 'array' ? true : false; } export class XataHttpPreparedQuery extends PgPreparedQuery { static override readonly [entityKind]: string = 'XataHttpPreparedQuery'; constructor( private client: XataHttpClient, query: Query, private logger: Logger, cache: Cache, queryMetadata: { type: 'select' | 'update' | 'delete' | 'insert'; tables: string[]; } | undefined, cacheConfig: WithCacheConfig | undefined, private fields: SelectedFieldsOrdered | undefined, private _isResponseInArrayMode: boolean, private customResultMapper?: (rows: unknown[][]) => T['execute'], ) { super(query, cache, queryMetadata, cacheConfig); } async execute(placeholderValues: Record | undefined = {}): Promise { const params = fillPlaceholders(this.query.params, placeholderValues); this.logger.logQuery(this.query.sql, params); const { fields, client, query, customResultMapper, joinsNotNullableMap } = this; if (!fields && !customResultMapper) { return this.queryWithCache(query.sql, params, async () => { return await client.sql>({ statement: query.sql, params }); }); } const { rows, warning } = await this.queryWithCache(query.sql, params, async () => { return await client.sql({ statement: query.sql, params, responseType: 'array' }); }); if (warning) console.warn(warning); return customResultMapper ? customResultMapper(rows as unknown[][]) : rows.map((row) => mapResultRow(fields!, row as unknown[], joinsNotNullableMap)); } all(placeholderValues: Record | undefined = {}): Promise { const params = fillPlaceholders(this.query.params, placeholderValues); this.logger.logQuery(this.query.sql, params); return this.queryWithCache(this.query.sql, params, async () => { return this.client.sql({ statement: this.query.sql, params, responseType: 'array' }); }).then((result) => result.rows); } values(placeholderValues: Record | undefined = {}): Promise { const params = fillPlaceholders(this.query.params, placeholderValues); this.logger.logQuery(this.query.sql, params); return this.queryWithCache(this.query.sql, params, async () => { return this.client.sql({ statement: this.query.sql, params }); }).then((result) => result.records); } /** @internal */ isResponseInArrayMode() { return this._isResponseInArrayMode; } } export interface XataHttpSessionOptions { logger?: Logger; cache?: Cache; } export class XataHttpSession, TSchema extends TablesRelationalConfig> extends PgSession< XataHttpQueryResultHKT, TFullSchema, TSchema > { static override readonly [entityKind]: string = 'XataHttpSession'; private logger: Logger; private cache: Cache; constructor( private client: XataHttpClient, dialect: PgDialect, private schema: RelationalSchemaConfig | undefined, private options: XataHttpSessionOptions = {}, ) { super(dialect); this.logger = options.logger ?? new NoopLogger(); this.cache = options.cache ?? new NoopCache(); } prepareQuery( query: Query, fields: SelectedFieldsOrdered | undefined, name: string | undefined, isResponseInArrayMode: boolean, customResultMapper?: (rows: unknown[][]) => T['execute'], queryMetadata?: { type: 'select' | 'update' | 'delete' | 'insert'; tables: string[]; }, cacheConfig?: WithCacheConfig, ): PgPreparedQuery { return new XataHttpPreparedQuery( this.client, query, this.logger, this.cache, queryMetadata, cacheConfig, fields, isResponseInArrayMode, customResultMapper, ); } async query(query: string, params: unknown[]): Promise> { this.logger.logQuery(query, params); const result = await this.client.sql({ statement: query, params, responseType: 'array' }); return { rowCount: result.rows.length, rows: result.rows, rowAsArray: true, }; } async queryObjects(query: string, params: unknown[]): Promise> { const result = await this.client.sql>({ statement: query, params }); return { rowCount: result.records.length, rows: result.records, rowAsArray: false, }; } override async transaction( _transaction: (tx: XataTransaction) => Promise, // eslint-disable-next-line @typescript-eslint/no-unused-vars _config: PgTransactionConfig = {}, ): Promise { throw new Error('No transactions support in Xata Http driver'); } } export class XataTransaction, TSchema extends TablesRelationalConfig> extends PgTransaction< XataHttpQueryResultHKT, TFullSchema, TSchema > { static override readonly [entityKind]: string = 'XataHttpTransaction'; override async transaction(_transaction: (tx: XataTransaction) => Promise): Promise { throw new Error('No transactions support in Xata Http driver'); } } export interface XataHttpQueryResultHKT extends PgQueryResultHKT { type: SQLQueryResult; } ================================================ FILE: drizzle-orm/tests/casing/casing.test.ts ================================================ import { describe, it } from 'vitest'; import { toCamelCase, toSnakeCase } from '~/casing'; describe.concurrent('casing', () => { it('transforms to snake case', ({ expect }) => { expect(toSnakeCase('drizzleKit')).toEqual('drizzle_kit'); }); it('transforms an uppercase acronym/abbreviation to snake case', ({ expect }) => { expect(toSnakeCase('drizzleORM')).toEqual('drizzle_orm'); }); it('transforms a camel case acronym/abbreviation to snake case', ({ expect }) => { expect(toSnakeCase('drizzleOrm')).toEqual('drizzle_orm'); }); it('transforms an uppercase acronym/abbreviation followed by a word to snake case', ({ expect }) => { expect(toSnakeCase('drizzleORMAndKit')).toEqual('drizzle_orm_and_kit'); }); it('transforms a camel case acronym/abbreviation followed by a word to snake case', ({ expect }) => { expect(toSnakeCase('drizzleOrmAndKit')).toEqual('drizzle_orm_and_kit'); }); it('transforms to camel case 1', ({ expect }) => { expect(toCamelCase('drizzle_kit')).toEqual('drizzleKit'); }); }); ================================================ FILE: drizzle-orm/tests/casing/mysql-to-camel.test.ts ================================================ import { Client } from '@planetscale/database'; import { connect } from '@tidbcloud/serverless'; import { beforeEach, describe, it } from 'vitest'; import { alias, boolean, int, mysqlSchema, mysqlTable, serial, text, union } from '~/mysql-core'; import { drizzle as planetscale } from '~/planetscale-serverless'; import { relations } from '~/relations'; import { asc, eq, sql } from '~/sql'; import { drizzle as mysql } from '~/tidb-serverless'; const testSchema = mysqlSchema('test'); const users = mysqlTable('users', { id: serial().primaryKey(), first_name: text().notNull(), last_name: text().notNull(), // Test that custom aliases remain age: int('AGE'), }); const usersRelations = relations(users, ({ one }) => ({ developers: one(developers), })); const developers = testSchema.table('developers', { user_id: serial().primaryKey().references(() => users.id), uses_drizzle_orm: boolean().notNull(), }); const developersRelations = relations(developers, ({ one }) => ({ user: one(users, { fields: [developers.user_id], references: [users.id], }), })); const devs = alias(developers, 'devs'); const schema = { users, usersRelations, developers, developersRelations }; const db = mysql(connect({}), { schema, casing: 'camelCase' }); const ps = planetscale(new Client({}), { schema, casing: 'camelCase' }); const usersCache = { 'public.users.id': 'id', 'public.users.first_name': 'firstName', 'public.users.last_name': 'lastName', 'public.users.AGE': 'age', }; const developersCache = { 'test.developers.user_id': 'userId', 'test.developers.uses_drizzle_orm': 'usesDrizzleOrm', }; const cache = { ...usersCache, ...developersCache, }; const fullName = sql`${users.first_name} || ' ' || ${users.last_name}`.as('name'); describe('mysql to snake case', () => { beforeEach(() => { ps.dialect.casing.clearCache(); db.dialect.casing.clearCache(); }); it('select', ({ expect }) => { const query = db .select({ name: fullName, age: users.age }) .from(users) .leftJoin(developers, eq(users.id, developers.user_id)) .orderBy(asc(users.first_name)); expect(query.toSQL()).toEqual({ sql: "select `users`.`firstName` || ' ' || `users`.`lastName` as `name`, `users`.`AGE` from `users` left join `test`.`developers` on `users`.`id` = `test`.`developers`.`userId` order by `users`.`firstName` asc", params: [], }); expect(db.dialect.casing.cache).toEqual(cache); }); it('select (with alias)', ({ expect }) => { const query = db .select({ firstName: users.first_name }) .from(users) .leftJoin(devs, eq(users.id, devs.user_id)); expect(query.toSQL()).toEqual({ sql: 'select `users`.`firstName` from `users` left join `test`.`developers` `devs` on `users`.`id` = `devs`.`userId`', params: [], }); expect(db.dialect.casing.cache).toEqual(cache); }); it('with CTE', ({ expect }) => { const cte = db.$with('cte').as(db.select({ name: fullName }).from(users)); const query = db.with(cte).select().from(cte); expect(query.toSQL()).toEqual({ sql: "with `cte` as (select `firstName` || ' ' || `lastName` as `name` from `users`) select `name` from `cte`", params: [], }); expect(db.dialect.casing.cache).toEqual(usersCache); }); it('with CTE (with query builder)', ({ expect }) => { const cte = db.$with('cte').as((qb) => qb.select({ name: fullName }).from(users)); const query = db.with(cte).select().from(cte); expect(query.toSQL()).toEqual({ sql: "with `cte` as (select `firstName` || ' ' || `lastName` as `name` from `users`) select `name` from `cte`", params: [], }); expect(db.dialect.casing.cache).toEqual(usersCache); }); it('set operator', ({ expect }) => { const query = db .select({ firstName: users.first_name }) .from(users) .union(db.select({ firstName: users.first_name }).from(users)); expect(query.toSQL()).toEqual({ sql: '(select `firstName` from `users`) union (select `firstName` from `users`)', params: [], }); expect(db.dialect.casing.cache).toEqual(usersCache); }); it('set operator (function)', ({ expect }) => { const query = union( db.select({ firstName: users.first_name }).from(users), db.select({ firstName: users.first_name }).from(users), ); expect(query.toSQL()).toEqual({ sql: '(select `firstName` from `users`) union (select `firstName` from `users`)', params: [], }); expect(db.dialect.casing.cache).toEqual(usersCache); }); it('query (find first)', ({ expect }) => { const query = db.query.users.findFirst({ columns: { id: true, age: true, }, extras: { fullName, }, where: eq(users.id, 1), with: { developers: { columns: { uses_drizzle_orm: true, }, }, }, }); expect(query.toSQL()).toEqual({ sql: "select `users`.`id`, `users`.`AGE`, `users`.`firstName` || ' ' || `users`.`lastName` as `name`, `users_developers`.`data` as `developers` from `users` `users` left join lateral (select json_array(`users_developers`.`usesDrizzleOrm`) as `data` from (select * from `test`.`developers` `users_developers` where `users_developers`.`userId` = `users`.`id` limit ?) `users_developers`) `users_developers` on true where `users`.`id` = ? limit ?", params: [1, 1, 1], typings: ['none', 'none', 'none'], }); expect(db.dialect.casing.cache).toEqual(cache); }); it('query (find first, planetscale)', ({ expect }) => { const query = ps.query.users.findFirst({ columns: { id: true, age: true, }, extras: { fullName, }, where: eq(users.id, 1), with: { developers: { columns: { uses_drizzle_orm: true, }, }, }, }); expect(query.toSQL()).toEqual({ sql: "select `id`, `AGE`, `firstName` || ' ' || `lastName` as `name`, (select json_array(`usesDrizzleOrm`) from (select * from `test`.`developers` `users_developers` where `users_developers`.`userId` = `users`.`id` limit ?) `users_developers`) as `developers` from `users` `users` where `users`.`id` = ? limit ?", params: [1, 1, 1], typings: ['none', 'none', 'none'], }); expect(ps.dialect.casing.cache).toEqual(cache); }); it('query (find many)', ({ expect }) => { const query = db.query.users.findMany({ columns: { id: true, age: true, }, extras: { fullName, }, where: eq(users.id, 1), with: { developers: { columns: { uses_drizzle_orm: true, }, }, }, }); expect(query.toSQL()).toEqual({ sql: "select `users`.`id`, `users`.`AGE`, `users`.`firstName` || ' ' || `users`.`lastName` as `name`, `users_developers`.`data` as `developers` from `users` `users` left join lateral (select json_array(`users_developers`.`usesDrizzleOrm`) as `data` from (select * from `test`.`developers` `users_developers` where `users_developers`.`userId` = `users`.`id` limit ?) `users_developers`) `users_developers` on true where `users`.`id` = ?", params: [1, 1], typings: ['none', 'none'], }); expect(db.dialect.casing.cache).toEqual(cache); }); it('query (find many, planetscale)', ({ expect }) => { const query = ps.query.users.findMany({ columns: { id: true, age: true, }, extras: { fullName, }, where: eq(users.id, 1), with: { developers: { columns: { uses_drizzle_orm: true, }, }, }, }); expect(query.toSQL()).toEqual({ sql: "select `id`, `AGE`, `firstName` || ' ' || `lastName` as `name`, (select json_array(`usesDrizzleOrm`) from (select * from `test`.`developers` `users_developers` where `users_developers`.`userId` = `users`.`id` limit ?) `users_developers`) as `developers` from `users` `users` where `users`.`id` = ?", params: [1, 1], typings: ['none', 'none'], }); expect(ps.dialect.casing.cache).toEqual(cache); }); it('insert', ({ expect }) => { const query = db .insert(users) .values({ first_name: 'John', last_name: 'Doe', age: 30 }); expect(query.toSQL()).toEqual({ sql: 'insert into `users` (`id`, `firstName`, `lastName`, `AGE`) values (default, ?, ?, ?)', params: ['John', 'Doe', 30], }); expect(db.dialect.casing.cache).toEqual(usersCache); }); it('insert (on duplicate key update)', ({ expect }) => { const query = db .insert(users) .values({ first_name: 'John', last_name: 'Doe', age: 30 }) .onDuplicateKeyUpdate({ set: { age: 31 } }); expect(query.toSQL()).toEqual({ sql: 'insert into `users` (`id`, `firstName`, `lastName`, `AGE`) values (default, ?, ?, ?) on duplicate key update `AGE` = ?', params: ['John', 'Doe', 30, 31], }); expect(db.dialect.casing.cache).toEqual(usersCache); }); it('update', ({ expect }) => { const query = db .update(users) .set({ first_name: 'John', last_name: 'Doe', age: 30 }) .where(eq(users.id, 1)); expect(query.toSQL()).toEqual({ sql: 'update `users` set `firstName` = ?, `lastName` = ?, `AGE` = ? where `users`.`id` = ?', params: ['John', 'Doe', 30, 1], }); expect(db.dialect.casing.cache).toEqual(usersCache); }); it('delete', ({ expect }) => { const query = db .delete(users) .where(eq(users.id, 1)); expect(query.toSQL()).toEqual({ sql: 'delete from `users` where `users`.`id` = ?', params: [1], }); expect(db.dialect.casing.cache).toEqual(usersCache); }); }); ================================================ FILE: drizzle-orm/tests/casing/mysql-to-snake.test.ts ================================================ import { Client } from '@planetscale/database'; import { connect } from '@tidbcloud/serverless'; import { beforeEach, describe, it } from 'vitest'; import { alias, boolean, int, mysqlSchema, mysqlTable, serial, text, union } from '~/mysql-core'; import { drizzle as planetscale } from '~/planetscale-serverless'; import { relations } from '~/relations'; import { asc, eq, sql } from '~/sql'; import { drizzle as mysql } from '~/tidb-serverless'; const testSchema = mysqlSchema('test'); const users = mysqlTable('users', { id: serial().primaryKey(), firstName: text().notNull(), lastName: text().notNull(), // Test that custom aliases remain age: int('AGE'), }); const usersRelations = relations(users, ({ one }) => ({ developers: one(developers), })); const developers = testSchema.table('developers', { userId: serial().primaryKey().references(() => users.id), usesDrizzleORM: boolean().notNull(), }); const developersRelations = relations(developers, ({ one }) => ({ user: one(users, { fields: [developers.userId], references: [users.id], }), })); const devs = alias(developers, 'devs'); const schema = { users, usersRelations, developers, developersRelations }; const db = mysql(connect({}), { schema, casing: 'snake_case' }); const ps = planetscale(new Client({}), { schema, casing: 'snake_case' }); const usersCache = { 'public.users.id': 'id', 'public.users.firstName': 'first_name', 'public.users.lastName': 'last_name', 'public.users.AGE': 'age', }; const developersCache = { 'test.developers.userId': 'user_id', 'test.developers.usesDrizzleORM': 'uses_drizzle_orm', }; const cache = { ...usersCache, ...developersCache, }; const fullName = sql`${users.firstName} || ' ' || ${users.lastName}`.as('name'); describe('mysql to snake case', () => { beforeEach(() => { db.dialect.casing.clearCache(); ps.dialect.casing.clearCache(); }); it('select', ({ expect }) => { const query = db .select({ name: fullName, age: users.age }) .from(users) .leftJoin(developers, eq(users.id, developers.userId)) .orderBy(asc(users.firstName)); expect(query.toSQL()).toEqual({ sql: "select `users`.`first_name` || ' ' || `users`.`last_name` as `name`, `users`.`AGE` from `users` left join `test`.`developers` on `users`.`id` = `test`.`developers`.`user_id` order by `users`.`first_name` asc", params: [], }); expect(db.dialect.casing.cache).toEqual(cache); }); it('select (with alias)', ({ expect }) => { const query = db .select({ firstName: users.firstName }) .from(users) .leftJoin(devs, eq(users.id, devs.userId)); expect(query.toSQL()).toEqual({ sql: 'select `users`.`first_name` from `users` left join `test`.`developers` `devs` on `users`.`id` = `devs`.`user_id`', params: [], }); expect(db.dialect.casing.cache).toEqual(cache); }); it('with CTE', ({ expect }) => { const cte = db.$with('cte').as(db.select({ name: fullName }).from(users)); const query = db.with(cte).select().from(cte); expect(query.toSQL()).toEqual({ sql: "with `cte` as (select `first_name` || ' ' || `last_name` as `name` from `users`) select `name` from `cte`", params: [], }); expect(db.dialect.casing.cache).toEqual(usersCache); }); it('with CTE (with query builder)', ({ expect }) => { const cte = db.$with('cte').as((qb) => qb.select({ name: fullName }).from(users)); const query = db.with(cte).select().from(cte); expect(query.toSQL()).toEqual({ sql: "with `cte` as (select `first_name` || ' ' || `last_name` as `name` from `users`) select `name` from `cte`", params: [], }); expect(db.dialect.casing.cache).toEqual(usersCache); }); it('set operator', ({ expect }) => { const query = db .select({ firstName: users.firstName }) .from(users) .union(db.select({ firstName: users.firstName }).from(users)); expect(query.toSQL()).toEqual({ sql: '(select `first_name` from `users`) union (select `first_name` from `users`)', params: [], }); expect(db.dialect.casing.cache).toEqual(usersCache); }); it('set operator (function)', ({ expect }) => { const query = union( db.select({ firstName: users.firstName }).from(users), db.select({ firstName: users.firstName }).from(users), ); expect(query.toSQL()).toEqual({ sql: '(select `first_name` from `users`) union (select `first_name` from `users`)', params: [], }); expect(db.dialect.casing.cache).toEqual(usersCache); }); it('query (find first)', ({ expect }) => { const query = db.query.users.findFirst({ columns: { id: true, age: true, }, extras: { fullName, }, where: eq(users.id, 1), with: { developers: { columns: { usesDrizzleORM: true, }, }, }, }); expect(query.toSQL()).toEqual({ sql: "select `users`.`id`, `users`.`AGE`, `users`.`first_name` || ' ' || `users`.`last_name` as `name`, `users_developers`.`data` as `developers` from `users` `users` left join lateral (select json_array(`users_developers`.`uses_drizzle_orm`) as `data` from (select * from `test`.`developers` `users_developers` where `users_developers`.`user_id` = `users`.`id` limit ?) `users_developers`) `users_developers` on true where `users`.`id` = ? limit ?", params: [1, 1, 1], typings: ['none', 'none', 'none'], }); expect(db.dialect.casing.cache).toEqual(cache); }); it('query (find first, planetscale)', ({ expect }) => { const query = ps.query.users.findFirst({ columns: { id: true, age: true, }, extras: { fullName, }, where: eq(users.id, 1), with: { developers: { columns: { usesDrizzleORM: true, }, }, }, }); expect(query.toSQL()).toEqual({ sql: "select `id`, `AGE`, `first_name` || ' ' || `last_name` as `name`, (select json_array(`uses_drizzle_orm`) from (select * from `test`.`developers` `users_developers` where `users_developers`.`user_id` = `users`.`id` limit ?) `users_developers`) as `developers` from `users` `users` where `users`.`id` = ? limit ?", params: [1, 1, 1], typings: ['none', 'none', 'none'], }); expect(ps.dialect.casing.cache).toEqual(cache); }); it('query (find many)', ({ expect }) => { const query = db.query.users.findMany({ columns: { id: true, age: true, }, extras: { fullName, }, where: eq(users.id, 1), with: { developers: { columns: { usesDrizzleORM: true, }, }, }, }); expect(query.toSQL()).toEqual({ sql: "select `users`.`id`, `users`.`AGE`, `users`.`first_name` || ' ' || `users`.`last_name` as `name`, `users_developers`.`data` as `developers` from `users` `users` left join lateral (select json_array(`users_developers`.`uses_drizzle_orm`) as `data` from (select * from `test`.`developers` `users_developers` where `users_developers`.`user_id` = `users`.`id` limit ?) `users_developers`) `users_developers` on true where `users`.`id` = ?", params: [1, 1], typings: ['none', 'none'], }); expect(db.dialect.casing.cache).toEqual(cache); }); it('query (find many, planetscale)', ({ expect }) => { const query = ps.query.users.findMany({ columns: { id: true, age: true, }, extras: { fullName, }, where: eq(users.id, 1), with: { developers: { columns: { usesDrizzleORM: true, }, }, }, }); expect(query.toSQL()).toEqual({ sql: "select `id`, `AGE`, `first_name` || ' ' || `last_name` as `name`, (select json_array(`uses_drizzle_orm`) from (select * from `test`.`developers` `users_developers` where `users_developers`.`user_id` = `users`.`id` limit ?) `users_developers`) as `developers` from `users` `users` where `users`.`id` = ?", params: [1, 1], typings: ['none', 'none'], }); expect(ps.dialect.casing.cache).toEqual(cache); }); it('insert', ({ expect }) => { const query = db .insert(users) .values({ firstName: 'John', lastName: 'Doe', age: 30 }); expect(query.toSQL()).toEqual({ sql: 'insert into `users` (`id`, `first_name`, `last_name`, `AGE`) values (default, ?, ?, ?)', params: ['John', 'Doe', 30], }); expect(db.dialect.casing.cache).toEqual(usersCache); }); it('insert (on duplicate key update)', ({ expect }) => { const query = db .insert(users) .values({ firstName: 'John', lastName: 'Doe', age: 30 }) .onDuplicateKeyUpdate({ set: { age: 31 } }); expect(query.toSQL()).toEqual({ sql: 'insert into `users` (`id`, `first_name`, `last_name`, `AGE`) values (default, ?, ?, ?) on duplicate key update `AGE` = ?', params: ['John', 'Doe', 30, 31], }); expect(db.dialect.casing.cache).toEqual(usersCache); }); it('update', ({ expect }) => { const query = db .update(users) .set({ firstName: 'John', lastName: 'Doe', age: 30 }) .where(eq(users.id, 1)); expect(query.toSQL()).toEqual({ sql: 'update `users` set `first_name` = ?, `last_name` = ?, `AGE` = ? where `users`.`id` = ?', params: ['John', 'Doe', 30, 1], }); expect(db.dialect.casing.cache).toEqual(usersCache); }); it('delete', ({ expect }) => { const query = db .delete(users) .where(eq(users.id, 1)); expect(query.toSQL()).toEqual({ sql: 'delete from `users` where `users`.`id` = ?', params: [1], }); expect(db.dialect.casing.cache).toEqual(usersCache); }); }); ================================================ FILE: drizzle-orm/tests/casing/pg-to-camel.test.ts ================================================ import postgres from 'postgres'; import { beforeEach, describe, it } from 'vitest'; import { alias, boolean, integer, pgSchema, pgTable, serial, text, union } from '~/pg-core'; import { drizzle } from '~/postgres-js'; import { relations } from '~/relations'; import { asc, eq, sql } from '~/sql'; const testSchema = pgSchema('test'); const users = pgTable('users', { id: serial().primaryKey(), first_name: text().notNull(), last_name: text().notNull(), // Test that custom aliases remain age: integer('AGE'), }); const usersRelations = relations(users, ({ one }) => ({ developers: one(developers), })); const developers = testSchema.table('developers', { user_id: serial().primaryKey().references(() => users.id), uses_drizzle_orm: boolean().notNull(), }); const developersRelations = relations(developers, ({ one }) => ({ user: one(users, { fields: [developers.user_id], references: [users.id], }), })); const devs = alias(developers, 'devs'); const schema = { users, usersRelations, developers, developersRelations }; const db = drizzle(postgres(''), { schema, casing: 'camelCase' }); const usersCache = { 'public.users.id': 'id', 'public.users.first_name': 'firstName', 'public.users.last_name': 'lastName', 'public.users.AGE': 'age', }; const developersCache = { 'test.developers.user_id': 'userId', 'test.developers.uses_drizzle_orm': 'usesDrizzleOrm', }; const cache = { ...usersCache, ...developersCache, }; const fullName = sql`${users.first_name} || ' ' || ${users.last_name}`.as('name'); describe('postgres to camel case', () => { beforeEach(() => { db.dialect.casing.clearCache(); }); it('select', ({ expect }) => { const query = db .select({ name: fullName, age: users.age }) .from(users) .leftJoin(developers, eq(users.id, developers.user_id)) .orderBy(asc(users.first_name)); expect(query.toSQL()).toEqual({ sql: 'select "users"."firstName" || \' \' || "users"."lastName" as "name", "users"."AGE" from "users" left join "test"."developers" on "users"."id" = "test"."developers"."userId" order by "users"."firstName" asc', params: [], }); expect(db.dialect.casing.cache).toEqual(cache); }); it('select (with alias)', ({ expect }) => { const query = db .select({ first_name: users.first_name }) .from(users) .leftJoin(devs, eq(users.id, devs.user_id)); expect(query.toSQL()).toEqual({ sql: 'select "users"."firstName" from "users" left join "test"."developers" "devs" on "users"."id" = "devs"."userId"', params: [], }); expect(db.dialect.casing.cache).toEqual(cache); }); it('with CTE', ({ expect }) => { const cte = db.$with('cte').as(db.select({ name: fullName }).from(users)); const query = db.with(cte).select().from(cte); expect(query.toSQL()).toEqual({ sql: 'with "cte" as (select "firstName" || \' \' || "lastName" as "name" from "users") select "name" from "cte"', params: [], }); expect(db.dialect.casing.cache).toEqual(usersCache); }); it('with CTE (with query builder)', ({ expect }) => { const cte = db.$with('cte').as((qb) => qb.select({ name: fullName }).from(users)); const query = db.with(cte).select().from(cte); expect(query.toSQL()).toEqual({ sql: 'with "cte" as (select "firstName" || \' \' || "lastName" as "name" from "users") select "name" from "cte"', params: [], }); expect(db.dialect.casing.cache).toEqual(usersCache); }); it('set operator', ({ expect }) => { const query = db .select({ first_name: users.first_name }) .from(users) .union(db.select({ first_name: users.first_name }).from(users)); expect(query.toSQL()).toEqual({ sql: '(select "firstName" from "users") union (select "firstName" from "users")', params: [], }); expect(db.dialect.casing.cache).toEqual(usersCache); }); it('set operator (function)', ({ expect }) => { const query = union( db.select({ first_name: users.first_name }).from(users), db.select({ first_name: users.first_name }).from(users), ); expect(query.toSQL()).toEqual({ sql: '(select "firstName" from "users") union (select "firstName" from "users")', params: [], }); expect(db.dialect.casing.cache).toEqual(usersCache); }); it('query (find first)', ({ expect }) => { const query = db.query.users.findFirst({ columns: { id: true, age: true, }, extras: { fullName, }, where: eq(users.id, 1), with: { developers: { columns: { uses_drizzle_orm: true, }, }, }, }); expect(query.toSQL()).toEqual({ sql: 'select "users"."id", "users"."AGE", "users"."firstName" || \' \' || "users"."lastName" as "name", "users_developers"."data" as "developers" from "users" "users" left join lateral (select json_build_array("users_developers"."usesDrizzleOrm") as "data" from (select * from "test"."developers" "users_developers" where "users_developers"."userId" = "users"."id" limit $1) "users_developers") "users_developers" on true where "users"."id" = $2 limit $3', params: [1, 1, 1], typings: ['none', 'none', 'none'], }); expect(db.dialect.casing.cache).toEqual(cache); }); it('query (find many)', ({ expect }) => { const query = db.query.users.findMany({ columns: { id: true, age: true, }, extras: { fullName, }, where: eq(users.id, 1), with: { developers: { columns: { uses_drizzle_orm: true, }, }, }, }); expect(query.toSQL()).toEqual({ sql: 'select "users"."id", "users"."AGE", "users"."firstName" || \' \' || "users"."lastName" as "name", "users_developers"."data" as "developers" from "users" "users" left join lateral (select json_build_array("users_developers"."usesDrizzleOrm") as "data" from (select * from "test"."developers" "users_developers" where "users_developers"."userId" = "users"."id" limit $1) "users_developers") "users_developers" on true where "users"."id" = $2', params: [1, 1], typings: ['none', 'none'], }); expect(db.dialect.casing.cache).toEqual(cache); }); it('insert (on conflict do nothing)', ({ expect }) => { const query = db .insert(users) .values({ first_name: 'John', last_name: 'Doe', age: 30 }) .onConflictDoNothing({ target: users.first_name }) .returning({ first_name: users.first_name, age: users.age }); expect(query.toSQL()).toEqual({ sql: 'insert into "users" ("id", "firstName", "lastName", "AGE") values (default, $1, $2, $3) on conflict ("firstName") do nothing returning "firstName", "AGE"', params: ['John', 'Doe', 30], }); expect(db.dialect.casing.cache).toEqual(usersCache); }); it('insert (on conflict do update)', ({ expect }) => { const query = db .insert(users) .values({ first_name: 'John', last_name: 'Doe', age: 30 }) .onConflictDoUpdate({ target: users.first_name, set: { age: 31 } }) .returning({ first_name: users.first_name, age: users.age }); expect(query.toSQL()).toEqual({ sql: 'insert into "users" ("id", "firstName", "lastName", "AGE") values (default, $1, $2, $3) on conflict ("firstName") do update set "AGE" = $4 returning "firstName", "AGE"', params: ['John', 'Doe', 30, 31], }); expect(db.dialect.casing.cache).toEqual(usersCache); }); it('update', ({ expect }) => { const query = db .update(users) .set({ first_name: 'John', last_name: 'Doe', age: 30 }) .where(eq(users.id, 1)) .returning({ first_name: users.first_name, age: users.age }); expect(query.toSQL()).toEqual({ sql: 'update "users" set "firstName" = $1, "lastName" = $2, "AGE" = $3 where "users"."id" = $4 returning "firstName", "AGE"', params: ['John', 'Doe', 30, 1], }); expect(db.dialect.casing.cache).toEqual(usersCache); }); it('delete', ({ expect }) => { const query = db .delete(users) .where(eq(users.id, 1)) .returning({ first_name: users.first_name, age: users.age }); expect(query.toSQL()).toEqual({ sql: 'delete from "users" where "users"."id" = $1 returning "firstName", "AGE"', params: [1], }); expect(db.dialect.casing.cache).toEqual(usersCache); }); }); ================================================ FILE: drizzle-orm/tests/casing/pg-to-snake.test.ts ================================================ import postgres from 'postgres'; import { beforeEach, describe, it } from 'vitest'; import { alias, boolean, integer, pgSchema, pgTable, serial, text, union } from '~/pg-core'; import { drizzle } from '~/postgres-js'; import { relations } from '~/relations'; import { asc, eq, sql } from '~/sql'; const testSchema = pgSchema('test'); const users = pgTable('users', { id: serial().primaryKey(), firstName: text().notNull(), lastName: text().notNull(), // Test that custom aliases remain age: integer('AGE'), }); const usersRelations = relations(users, ({ one }) => ({ developers: one(developers), })); const developers = testSchema.table('developers', { userId: serial().primaryKey().references(() => users.id), usesDrizzleORM: boolean().notNull(), }); const developersRelations = relations(developers, ({ one }) => ({ user: one(users, { fields: [developers.userId], references: [users.id], }), })); const devs = alias(developers, 'devs'); const schema = { users, usersRelations, developers, developersRelations }; const db = drizzle(postgres(''), { schema, casing: 'snake_case' }); const usersCache = { 'public.users.id': 'id', 'public.users.firstName': 'first_name', 'public.users.lastName': 'last_name', 'public.users.AGE': 'age', }; const developersCache = { 'test.developers.userId': 'user_id', 'test.developers.usesDrizzleORM': 'uses_drizzle_orm', }; const cache = { ...usersCache, ...developersCache, }; const fullName = sql`${users.firstName} || ' ' || ${users.lastName}`.as('name'); describe('postgres to snake case', () => { beforeEach(() => { db.dialect.casing.clearCache(); }); it('select', ({ expect }) => { const query = db .select({ name: fullName, age: users.age }) .from(users) .leftJoin(developers, eq(users.id, developers.userId)) .orderBy(asc(users.firstName)); expect(query.toSQL()).toEqual({ sql: 'select "users"."first_name" || \' \' || "users"."last_name" as "name", "users"."AGE" from "users" left join "test"."developers" on "users"."id" = "test"."developers"."user_id" order by "users"."first_name" asc', params: [], }); expect(db.dialect.casing.cache).toEqual(cache); }); it('select (with alias)', ({ expect }) => { const query = db .select({ firstName: users.firstName }) .from(users) .leftJoin(devs, eq(users.id, devs.userId)); expect(query.toSQL()).toEqual({ sql: 'select "users"."first_name" from "users" left join "test"."developers" "devs" on "users"."id" = "devs"."user_id"', params: [], }); expect(db.dialect.casing.cache).toEqual(cache); }); it('with CTE', ({ expect }) => { const cte = db.$with('cte').as(db.select({ name: fullName }).from(users)); const query = db.with(cte).select().from(cte); expect(query.toSQL()).toEqual({ sql: 'with "cte" as (select "first_name" || \' \' || "last_name" as "name" from "users") select "name" from "cte"', params: [], }); expect(db.dialect.casing.cache).toEqual(usersCache); }); it('with CTE (with query builder)', ({ expect }) => { const cte = db.$with('cte').as((qb) => qb.select({ name: fullName }).from(users)); const query = db.with(cte).select().from(cte); expect(query.toSQL()).toEqual({ sql: 'with "cte" as (select "first_name" || \' \' || "last_name" as "name" from "users") select "name" from "cte"', params: [], }); expect(db.dialect.casing.cache).toEqual(usersCache); }); it('set operator', ({ expect }) => { const query = db .select({ firstName: users.firstName }) .from(users) .union(db.select({ firstName: users.firstName }).from(users)); expect(query.toSQL()).toEqual({ sql: '(select "first_name" from "users") union (select "first_name" from "users")', params: [], }); expect(db.dialect.casing.cache).toEqual(usersCache); }); it('set operator (function)', ({ expect }) => { const query = union( db.select({ firstName: users.firstName }).from(users), db.select({ firstName: users.firstName }).from(users), ); expect(query.toSQL()).toEqual({ sql: '(select "first_name" from "users") union (select "first_name" from "users")', params: [], }); expect(db.dialect.casing.cache).toEqual(usersCache); }); it('query (find first)', ({ expect }) => { const query = db.query.users.findFirst({ columns: { id: true, age: true, }, extras: { fullName, }, where: eq(users.id, 1), with: { developers: { columns: { usesDrizzleORM: true, }, }, }, }); expect(query.toSQL()).toEqual({ sql: 'select "users"."id", "users"."AGE", "users"."first_name" || \' \' || "users"."last_name" as "name", "users_developers"."data" as "developers" from "users" "users" left join lateral (select json_build_array("users_developers"."uses_drizzle_orm") as "data" from (select * from "test"."developers" "users_developers" where "users_developers"."user_id" = "users"."id" limit $1) "users_developers") "users_developers" on true where "users"."id" = $2 limit $3', params: [1, 1, 1], typings: ['none', 'none', 'none'], }); expect(db.dialect.casing.cache).toEqual(cache); }); it('query (find many)', ({ expect }) => { const query = db.query.users.findMany({ columns: { id: true, age: true, }, extras: { fullName, }, where: eq(users.id, 1), with: { developers: { columns: { usesDrizzleORM: true, }, }, }, }); expect(query.toSQL()).toEqual({ sql: 'select "users"."id", "users"."AGE", "users"."first_name" || \' \' || "users"."last_name" as "name", "users_developers"."data" as "developers" from "users" "users" left join lateral (select json_build_array("users_developers"."uses_drizzle_orm") as "data" from (select * from "test"."developers" "users_developers" where "users_developers"."user_id" = "users"."id" limit $1) "users_developers") "users_developers" on true where "users"."id" = $2', params: [1, 1], typings: ['none', 'none'], }); expect(db.dialect.casing.cache).toEqual(cache); }); it('insert (on conflict do nothing)', ({ expect }) => { const query = db .insert(users) .values({ firstName: 'John', lastName: 'Doe', age: 30 }) .onConflictDoNothing({ target: users.firstName }) .returning({ firstName: users.firstName, age: users.age }); expect(query.toSQL()).toEqual({ sql: 'insert into "users" ("id", "first_name", "last_name", "AGE") values (default, $1, $2, $3) on conflict ("first_name") do nothing returning "first_name", "AGE"', params: ['John', 'Doe', 30], }); expect(db.dialect.casing.cache).toEqual(usersCache); }); it('insert (on conflict do update)', ({ expect }) => { const query = db .insert(users) .values({ firstName: 'John', lastName: 'Doe', age: 30 }) .onConflictDoUpdate({ target: users.firstName, set: { age: 31 } }) .returning({ firstName: users.firstName, age: users.age }); expect(query.toSQL()).toEqual({ sql: 'insert into "users" ("id", "first_name", "last_name", "AGE") values (default, $1, $2, $3) on conflict ("first_name") do update set "AGE" = $4 returning "first_name", "AGE"', params: ['John', 'Doe', 30, 31], }); expect(db.dialect.casing.cache).toEqual(usersCache); }); it('update', ({ expect }) => { const query = db .update(users) .set({ firstName: 'John', lastName: 'Doe', age: 30 }) .where(eq(users.id, 1)) .returning({ firstName: users.firstName, age: users.age }); expect(query.toSQL()).toEqual({ sql: 'update "users" set "first_name" = $1, "last_name" = $2, "AGE" = $3 where "users"."id" = $4 returning "first_name", "AGE"', params: ['John', 'Doe', 30, 1], }); expect(db.dialect.casing.cache).toEqual(usersCache); }); it('delete', ({ expect }) => { const query = db .delete(users) .where(eq(users.id, 1)) .returning({ firstName: users.firstName, age: users.age }); expect(query.toSQL()).toEqual({ sql: 'delete from "users" where "users"."id" = $1 returning "first_name", "AGE"', params: [1], }); expect(db.dialect.casing.cache).toEqual(usersCache); }); }); ================================================ FILE: drizzle-orm/tests/casing/sqlite-to-camel.test.ts ================================================ import Database from 'better-sqlite3'; import { beforeEach, describe, it } from 'vitest'; import { drizzle } from '~/better-sqlite3'; import { relations } from '~/relations'; import { asc, eq, sql } from '~/sql'; import { alias, integer, sqliteTable, text, union } from '~/sqlite-core'; const users = sqliteTable('users', { id: integer().primaryKey({ autoIncrement: true }), first_name: text().notNull(), last_name: text().notNull(), // Test that custom aliases remain age: integer('AGE'), }); const usersRelations = relations(users, ({ one }) => ({ developers: one(developers), })); const developers = sqliteTable('developers', { user_id: integer().primaryKey().references(() => users.id), uses_drizzle_orm: integer({ mode: 'boolean' }).notNull(), }); const developersRelations = relations(developers, ({ one }) => ({ user: one(users, { fields: [developers.user_id], references: [users.id], }), })); const devs = alias(developers, 'devs'); const schema = { users, usersRelations, developers, developersRelations }; const db = drizzle(new Database(':memory:'), { schema, casing: 'camelCase' }); const usersCache = { 'public.users.id': 'id', 'public.users.first_name': 'firstName', 'public.users.last_name': 'lastName', 'public.users.AGE': 'age', }; const developersCache = { 'public.developers.user_id': 'userId', 'public.developers.uses_drizzle_orm': 'usesDrizzleOrm', }; const cache = { ...usersCache, ...developersCache, }; const fullName = sql`${users.first_name} || ' ' || ${users.last_name}`.as('name'); describe('sqlite to camel case', () => { beforeEach(() => { db.dialect.casing.clearCache(); }); it('select', ({ expect }) => { const query = db .select({ name: fullName, age: users.age }) .from(users) .leftJoin(developers, eq(users.id, developers.user_id)) .orderBy(asc(users.first_name)); expect(query.toSQL()).toEqual({ sql: 'select "users"."firstName" || \' \' || "users"."lastName" as "name", "users"."AGE" from "users" left join "developers" on "users"."id" = "developers"."userId" order by "users"."firstName" asc', params: [], }); expect(db.dialect.casing.cache).toEqual(cache); }); it('select (with alias)', ({ expect }) => { const query = db .select({ first_name: users.first_name }) .from(users) .leftJoin(devs, eq(users.id, devs.user_id)); expect(query.toSQL()).toEqual({ sql: 'select "users"."firstName" from "users" left join "developers" "devs" on "users"."id" = "devs"."userId"', params: [], }); expect(db.dialect.casing.cache).toEqual(cache); }); it('with CTE', ({ expect }) => { const cte = db.$with('cte').as(db.select({ name: fullName }).from(users)); const query = db.with(cte).select().from(cte); expect(query.toSQL()).toEqual({ sql: 'with "cte" as (select "firstName" || \' \' || "lastName" as "name" from "users") select "name" from "cte"', params: [], }); expect(db.dialect.casing.cache).toEqual(usersCache); }); it('with CTE (with query builder)', ({ expect }) => { const cte = db.$with('cte').as((qb) => qb.select({ name: fullName }).from(users)); const query = db.with(cte).select().from(cte); expect(query.toSQL()).toEqual({ sql: 'with "cte" as (select "firstName" || \' \' || "lastName" as "name" from "users") select "name" from "cte"', params: [], }); expect(db.dialect.casing.cache).toEqual(usersCache); }); it('set operator', ({ expect }) => { const query = db .select({ first_name: users.first_name }) .from(users) .union(db.select({ first_name: users.first_name }).from(users)); expect(query.toSQL()).toEqual({ sql: 'select "firstName" from "users" union select "firstName" from "users"', params: [], }); expect(db.dialect.casing.cache).toEqual(usersCache); }); it('set operator (function)', ({ expect }) => { const query = union( db.select({ first_name: users.first_name }).from(users), db.select({ first_name: users.first_name }).from(users), ); expect(query.toSQL()).toEqual({ sql: 'select "firstName" from "users" union select "firstName" from "users"', params: [], }); expect(db.dialect.casing.cache).toEqual(usersCache); }); it('query (find first)', ({ expect }) => { const query = db.query.users.findFirst({ columns: { id: true, age: true, }, extras: { fullName, }, where: eq(users.id, 1), with: { developers: { columns: { uses_drizzle_orm: true, }, }, }, }); expect(query.toSQL()).toEqual({ sql: 'select "id", "AGE", "firstName" || \' \' || "lastName" as "name", (select json_array("usesDrizzleOrm") as "data" from (select * from "developers" "users_developers" where "users_developers"."userId" = "users"."id" limit ?) "users_developers") as "developers" from "users" "users" where "users"."id" = ? limit ?', params: [1, 1, 1], typings: ['none', 'none', 'none'], }); expect(db.dialect.casing.cache).toEqual(cache); }); it('query (find many)', ({ expect }) => { const query = db.query.users.findMany({ columns: { id: true, age: true, }, extras: { fullName, }, where: eq(users.id, 1), with: { developers: { columns: { uses_drizzle_orm: true, }, }, }, }); expect(query.toSQL()).toEqual({ sql: 'select "id", "AGE", "firstName" || \' \' || "lastName" as "name", (select json_array("usesDrizzleOrm") as "data" from (select * from "developers" "users_developers" where "users_developers"."userId" = "users"."id" limit ?) "users_developers") as "developers" from "users" "users" where "users"."id" = ?', params: [1, 1], typings: ['none', 'none'], }); expect(db.dialect.casing.cache).toEqual(cache); }); it('insert (on conflict do nothing)', ({ expect }) => { const query = db .insert(users) .values({ first_name: 'John', last_name: 'Doe', age: 30 }) .onConflictDoNothing({ target: users.first_name }) .returning({ first_name: users.first_name, age: users.age }); expect(query.toSQL()).toEqual({ sql: 'insert into "users" ("id", "firstName", "lastName", "AGE") values (null, ?, ?, ?) on conflict ("users"."firstName") do nothing returning "firstName", "AGE"', params: ['John', 'Doe', 30], }); expect(db.dialect.casing.cache).toEqual(usersCache); }); it('insert (on conflict do update)', ({ expect }) => { const query = db .insert(users) .values({ first_name: 'John', last_name: 'Doe', age: 30 }) .onConflictDoUpdate({ target: users.first_name, set: { age: 31 } }) .returning({ first_name: users.first_name, age: users.age }); expect(query.toSQL()).toEqual({ sql: 'insert into "users" ("id", "firstName", "lastName", "AGE") values (null, ?, ?, ?) on conflict ("users"."firstName") do update set "AGE" = ? returning "firstName", "AGE"', params: ['John', 'Doe', 30, 31], }); expect(db.dialect.casing.cache).toEqual(usersCache); }); it('update', ({ expect }) => { const query = db .update(users) .set({ first_name: 'John', last_name: 'Doe', age: 30 }) .where(eq(users.id, 1)) .returning({ first_name: users.first_name, age: users.age }); expect(query.toSQL()).toEqual({ sql: 'update "users" set "firstName" = ?, "lastName" = ?, "AGE" = ? where "users"."id" = ? returning "firstName", "AGE"', params: ['John', 'Doe', 30, 1], }); expect(db.dialect.casing.cache).toEqual(usersCache); }); it('delete', ({ expect }) => { const query = db .delete(users) .where(eq(users.id, 1)) .returning({ first_name: users.first_name, age: users.age }); expect(query.toSQL()).toEqual({ sql: 'delete from "users" where "users"."id" = ? returning "firstName", "AGE"', params: [1], }); expect(db.dialect.casing.cache).toEqual(usersCache); }); }); ================================================ FILE: drizzle-orm/tests/casing/sqlite-to-snake.test.ts ================================================ import Database from 'better-sqlite3'; import { beforeEach, describe, it } from 'vitest'; import { drizzle } from '~/better-sqlite3'; import { relations } from '~/relations'; import { asc, eq, sql } from '~/sql'; import { alias, integer, sqliteTable, text, union } from '~/sqlite-core'; const users = sqliteTable('users', { id: integer().primaryKey({ autoIncrement: true }), firstName: text().notNull(), lastName: text().notNull(), // Test that custom aliases remain age: integer('AGE'), }); const usersRelations = relations(users, ({ one }) => ({ developers: one(developers), })); const developers = sqliteTable('developers', { userId: integer().primaryKey().references(() => users.id), usesDrizzleORM: integer({ mode: 'boolean' }).notNull(), }); const developersRelations = relations(developers, ({ one }) => ({ user: one(users, { fields: [developers.userId], references: [users.id], }), })); const devs = alias(developers, 'devs'); const schema = { users, usersRelations, developers, developersRelations }; const db = drizzle(new Database(':memory:'), { schema, casing: 'snake_case' }); const usersCache = { 'public.users.id': 'id', 'public.users.firstName': 'first_name', 'public.users.lastName': 'last_name', 'public.users.AGE': 'age', }; const developersCache = { 'public.developers.userId': 'user_id', 'public.developers.usesDrizzleORM': 'uses_drizzle_orm', }; const cache = { ...usersCache, ...developersCache, }; const fullName = sql`${users.firstName} || ' ' || ${users.lastName}`.as('name'); describe('sqlite to camel case', () => { beforeEach(() => { db.dialect.casing.clearCache(); }); it('select', ({ expect }) => { const query = db .select({ name: fullName, age: users.age }) .from(users) .leftJoin(developers, eq(users.id, developers.userId)) .orderBy(asc(users.firstName)); expect(query.toSQL()).toEqual({ sql: 'select "users"."first_name" || \' \' || "users"."last_name" as "name", "users"."AGE" from "users" left join "developers" on "users"."id" = "developers"."user_id" order by "users"."first_name" asc', params: [], }); expect(db.dialect.casing.cache).toEqual(cache); }); it('select (with alias)', ({ expect }) => { const query = db .select({ firstName: users.firstName }) .from(users) .leftJoin(devs, eq(users.id, devs.userId)); expect(query.toSQL()).toEqual({ sql: 'select "users"."first_name" from "users" left join "developers" "devs" on "users"."id" = "devs"."user_id"', params: [], }); expect(db.dialect.casing.cache).toEqual(cache); }); it('with CTE', ({ expect }) => { const cte = db.$with('cte').as(db.select({ name: fullName }).from(users)); const query = db.with(cte).select().from(cte); expect(query.toSQL()).toEqual({ sql: 'with "cte" as (select "first_name" || \' \' || "last_name" as "name" from "users") select "name" from "cte"', params: [], }); expect(db.dialect.casing.cache).toEqual(usersCache); }); it('with CTE (with query builder)', ({ expect }) => { const cte = db.$with('cte').as((qb) => qb.select({ name: fullName }).from(users)); const query = db.with(cte).select().from(cte); expect(query.toSQL()).toEqual({ sql: 'with "cte" as (select "first_name" || \' \' || "last_name" as "name" from "users") select "name" from "cte"', params: [], }); expect(db.dialect.casing.cache).toEqual(usersCache); }); it('set operator', ({ expect }) => { const query = db .select({ firstName: users.firstName }) .from(users) .union(db.select({ firstName: users.firstName }).from(users)); expect(query.toSQL()).toEqual({ sql: 'select "first_name" from "users" union select "first_name" from "users"', params: [], }); expect(db.dialect.casing.cache).toEqual(usersCache); }); it('set operator (function)', ({ expect }) => { const query = union( db.select({ firstName: users.firstName }).from(users), db.select({ firstName: users.firstName }).from(users), ); expect(query.toSQL()).toEqual({ sql: 'select "first_name" from "users" union select "first_name" from "users"', params: [], }); expect(db.dialect.casing.cache).toEqual(usersCache); }); it('query (find first)', ({ expect }) => { const query = db.query.users.findFirst({ columns: { id: true, age: true, }, extras: { fullName, }, where: eq(users.id, 1), with: { developers: { columns: { usesDrizzleORM: true, }, }, }, }); expect(query.toSQL()).toEqual({ sql: 'select "id", "AGE", "first_name" || \' \' || "last_name" as "name", (select json_array("uses_drizzle_orm") as "data" from (select * from "developers" "users_developers" where "users_developers"."user_id" = "users"."id" limit ?) "users_developers") as "developers" from "users" "users" where "users"."id" = ? limit ?', params: [1, 1, 1], typings: ['none', 'none', 'none'], }); expect(db.dialect.casing.cache).toEqual(cache); }); it('query (find many)', ({ expect }) => { const query = db.query.users.findMany({ columns: { id: true, age: true, }, extras: { fullName, }, where: eq(users.id, 1), with: { developers: { columns: { usesDrizzleORM: true, }, }, }, }); expect(query.toSQL()).toEqual({ sql: 'select "id", "AGE", "first_name" || \' \' || "last_name" as "name", (select json_array("uses_drizzle_orm") as "data" from (select * from "developers" "users_developers" where "users_developers"."user_id" = "users"."id" limit ?) "users_developers") as "developers" from "users" "users" where "users"."id" = ?', params: [1, 1], typings: ['none', 'none'], }); expect(db.dialect.casing.cache).toEqual(cache); }); it('insert (on conflict do nothing)', ({ expect }) => { const query = db .insert(users) .values({ firstName: 'John', lastName: 'Doe', age: 30 }) .onConflictDoNothing({ target: users.firstName }) .returning({ firstName: users.firstName, age: users.age }); expect(query.toSQL()).toEqual({ sql: 'insert into "users" ("id", "first_name", "last_name", "AGE") values (null, ?, ?, ?) on conflict ("users"."first_name") do nothing returning "first_name", "AGE"', params: ['John', 'Doe', 30], }); expect(db.dialect.casing.cache).toEqual(usersCache); }); it('insert (on conflict do update)', ({ expect }) => { const query = db .insert(users) .values({ firstName: 'John', lastName: 'Doe', age: 30 }) .onConflictDoUpdate({ target: users.firstName, set: { age: 31 } }) .returning({ firstName: users.firstName, age: users.age }); expect(query.toSQL()).toEqual({ sql: 'insert into "users" ("id", "first_name", "last_name", "AGE") values (null, ?, ?, ?) on conflict ("users"."first_name") do update set "AGE" = ? returning "first_name", "AGE"', params: ['John', 'Doe', 30, 31], }); expect(db.dialect.casing.cache).toEqual(usersCache); }); it('update', ({ expect }) => { const query = db .update(users) .set({ firstName: 'John', lastName: 'Doe', age: 30 }) .where(eq(users.id, 1)) .returning({ firstName: users.firstName, age: users.age }); expect(query.toSQL()).toEqual({ sql: 'update "users" set "first_name" = ?, "last_name" = ?, "AGE" = ? where "users"."id" = ? returning "first_name", "AGE"', params: ['John', 'Doe', 30, 1], }); expect(db.dialect.casing.cache).toEqual(usersCache); }); it('delete', ({ expect }) => { const query = db .delete(users) .where(eq(users.id, 1)) .returning({ first_name: users.firstName, age: users.age }); expect(query.toSQL()).toEqual({ sql: 'delete from "users" where "users"."id" = ? returning "first_name", "AGE"', params: [1], }); expect(db.dialect.casing.cache).toEqual(usersCache); }); }); ================================================ FILE: drizzle-orm/tests/exports.test.ts ================================================ import { globSync } from 'glob'; import { Project } from 'ts-morph'; import { assert, test } from 'vitest'; const project = new Project({ tsConfigFilePath: './tsconfig.build.json' }); const filesList = globSync('src/**/*.ts'); for (const filePath of filesList) { test(filePath, () => { const conflicts: { name: string; files: [string, string] }[] = []; const exports = new Map(); const sourceFile = project.getSourceFileOrThrow(filePath); for (const decl of sourceFile.getExportDeclarations()) { const moduleSpecifier = decl.getModuleSpecifierValue(); if (!moduleSpecifier || !moduleSpecifier.endsWith('.ts')) { continue; } const exportSourcePath = decl.getModuleSpecifierSourceFile()!.getFilePath(); const exported = project.getSourceFileOrThrow(exportSourcePath); for (const symbol of exported.getExportSymbols()) { const name = symbol.getName(); const from = exports.get(name); if (from) { conflicts.push({ name, files: [from, moduleSpecifier], }); } else { exports.set(name, moduleSpecifier); } } } if (conflicts.length) { assert.fail( conflicts.map(({ name, files }) => `\n- ${name} is exported from ${files.join(' and ')}`).join('\n'), ); } }); } ================================================ FILE: drizzle-orm/tests/is.test.ts ================================================ import { describe, test } from 'vitest'; import { Column, is } from '~/index.ts'; import { PgArray, PgColumn, PgSerial, pgTable, serial } from '~/pg-core/index.ts'; const pgExampleTable = pgTable('test', { a: serial('a').array(), }); describe.concurrent('is', () => { test('Column', ({ expect }) => { expect(is(pgExampleTable.a, Column)).toBe(true); expect(is(pgExampleTable.a, PgColumn)).toBe(true); expect(is(pgExampleTable.a, PgArray)).toBe(true); expect(is(pgExampleTable.a, PgSerial)).toBe(false); }); }); ================================================ FILE: drizzle-orm/tests/makePgArray.test.ts ================================================ import { describe, it } from 'vitest'; import { customType, pgTable } from '~/pg-core/index.ts'; const anyColumn = customType<{ data: any }>({ dataType() { return 'any'; }, }); const table = pgTable('test', { a: anyColumn('a').array(), b: anyColumn('a').array().array(), }); describe.concurrent('makePgArray', () => { it('parses simple 1D array', ({ expect }) => { const input = ['1', '2', '3']; const output = table.a.mapToDriverValue(input); expect(output).toEqual('{"1","2","3"}'); }); it('parses simple 2D array', ({ expect }) => { const input = [ ['1', '2', '3'], ['4', '5', '6'], ['7', '8', '9'], ]; const output = table.b.mapToDriverValue(input); expect(output).toEqual('{{"1","2","3"},{"4","5","6"},{"7","8","9"}}'); }); it('parses array with quoted values', ({ expect }) => { const input = ['1', '2,3', '4']; const output = table.a.mapToDriverValue(input); expect(output).toEqual('{"1","2,3","4"}'); }); it('parses array with nested quoted values', ({ expect }) => { const input = [ ['1', '2,3', '4'], ['5', '6,7', '8'], ]; const output = table.b.mapToDriverValue(input); expect(output).toEqual('{{"1","2,3","4"},{"5","6,7","8"}}'); }); it('parses array with empty values', ({ expect }) => { const input = ['1', '', '3']; const output = table.a.mapToDriverValue(input); expect(output).toEqual('{"1","","3"}'); }); it('parses array with empty nested values', ({ expect }) => { const input = [ ['1', '2', '3'], ['', '5', '6'], ['7', '8', '9'], ]; const output = table.b.mapToDriverValue(input); expect(output).toEqual('{{"1","2","3"},{"","5","6"},{"7","8","9"}}'); }); it('parses empty array', ({ expect }) => { const input: string[] = []; const output = table.a.mapToDriverValue(input); expect(output).toEqual('{}'); }); it('parses empty nested array', ({ expect }) => { const input = [[]]; const output = table.b.mapToDriverValue(input); expect(output).toEqual('{{}}'); }); it('parses single-level array with strings', ({ expect }) => { const input = ['one', 'two', 'three']; const output = table.a.mapToDriverValue(input); expect(output).toEqual('{"one","two","three"}'); }); it('parses single-level array with mixed values', ({ expect }) => { const input = ['1', 'two', '3']; const output = table.a.mapToDriverValue(input); expect(output).toEqual('{"1","two","3"}'); }); it('parses single-level array with commas inside quotes', ({ expect }) => { const input = ['1', 'two, three', '4']; const output = table.a.mapToDriverValue(input); expect(output).toEqual('{"1","two, three","4"}'); }); it('parses single-level array with escaped quotes inside quotes', ({ expect }) => { const input = ['1', 'two "three", four', '5']; const output = table.a.mapToDriverValue(input); expect(output).toEqual('{"1","two \\"three\\", four","5"}'); }); it('parses two-dimensional array with strings', ({ expect }) => { const input = [ ['one', 'two', 'three'], ['four', 'five', 'six'], ['seven', 'eight', 'nine'], ]; const output = table.b.mapToDriverValue(input); expect(output).toEqual('{{"one","two","three"},{"four","five","six"},{"seven","eight","nine"}}'); }); it('parses two-dimensional array with mixed values and escaped quotes', ({ expect }) => { const input = [ ['1', 'two "and a half", three', '3'], ['four', 'five "and a half", six', '6'], ['seven', 'eight', 'nine'], ]; const output = table.b.mapToDriverValue(input); expect(output).toEqual( '{{"1","two \\"and a half\\", three","3"},{"four","five \\"and a half\\", six","6"},{"seven","eight","nine"}}', ); }); it('parses an array with null values', ({ expect }) => { const input = ['1', null, '3']; const output = table.a.mapToDriverValue(input); expect(output).toEqual('{"1",null,"3"}'); }); it('parses an array with null values in nested arrays', ({ expect }) => { const input = [ ['1', '2', '3'], [null, '5', '6'], ['7', '8', '9'], ]; const output = table.b.mapToDriverValue(input); expect(output).toEqual('{{"1","2","3"},{null,"5","6"},{"7","8","9"}}'); }); it('parses string array with empty strings', ({ expect }) => { const input = ['1', '', '3']; const output = table.a.mapToDriverValue(input); expect(output).toEqual('{"1","","3"}'); }); it('parses string array with backlash strings', ({ expect }) => { const input = ['1', '\n', '3\\']; const output = table.a.mapToDriverValue(input); expect(output).toEqual('{"1","\n","3\\\\"}'); }); }); ================================================ FILE: drizzle-orm/tests/parsePgArray.test.ts ================================================ import { describe, it } from 'vitest'; import { customType, pgTable } from '~/pg-core/index.ts'; const anyColumn = customType<{ data: any }>({ dataType() { return 'any'; }, }); const table = pgTable('test', { a: anyColumn('a').array(), b: anyColumn('a').array().array(), }); describe.concurrent('parsePgArray', () => { it('parses simple 1D array', ({ expect }) => { const input = '{1,2,3}'; const output = table.a.mapFromDriverValue(input); expect(output).toEqual(['1', '2', '3']); }); it('parses simple 2D array', ({ expect }) => { const input = '{{1,2,3},{4,5,6},{7,8,9}}'; const output = table.b.mapFromDriverValue(input); expect(output).toEqual([ ['1', '2', '3'], ['4', '5', '6'], ['7', '8', '9'], ]); }); it('parses array with quoted values', ({ expect }) => { const input = '{1,"2,3",4}'; const output = table.a.mapFromDriverValue(input); expect(output).toEqual(['1', '2,3', '4']); }); it('parses array with nested quoted values', ({ expect }) => { const input = '{{1,"2,3",4},{5,"6,7",8}}'; const output = table.b.mapFromDriverValue(input); expect(output).toEqual([ ['1', '2,3', '4'], ['5', '6,7', '8'], ]); }); it('parses array with empty values', ({ expect }) => { const input = '{1,"",3}'; const output = table.a.mapFromDriverValue(input); expect(output).toEqual(['1', '', '3']); }); it('parses array with empty nested values', ({ expect }) => { const input = '{{1,2,3},{,5,6},{7,8,9}}'; const output = table.b.mapFromDriverValue(input); expect(output).toEqual([ ['1', '2', '3'], ['', '5', '6'], ['7', '8', '9'], ]); }); it('parses empty array', ({ expect }) => { const input = '{}'; const output = table.a.mapFromDriverValue(input); expect(output).toEqual([]); }); it('parses empty nested array', ({ expect }) => { const input = '{{}}'; const output = table.b.mapFromDriverValue(input); expect(output).toEqual([[]]); }); it('parses single-level array with strings', ({ expect }) => { const input = '{"one","two","three"}'; const output = table.a.mapFromDriverValue(input); expect(output).toEqual(['one', 'two', 'three']); }); it('parses single-level array with mixed values', ({ expect }) => { const input = '{1,"two",3}'; const output = table.a.mapFromDriverValue(input); expect(output).toEqual(['1', 'two', '3']); }); it('parses single-level array with commas inside quotes', ({ expect }) => { const input = '{1,"two, three",4}'; const output = table.a.mapFromDriverValue(input); expect(output).toEqual(['1', 'two, three', '4']); }); it('parses single-level array with escaped quotes inside quotes', ({ expect }) => { const input = '{1,"two \\"three\\", four",5}'; const output = table.a.mapFromDriverValue(input); expect(output).toEqual(['1', 'two "three", four', '5']); }); it('parses two-dimensional array with strings', ({ expect }) => { const input = '{{"one","two",three},{"four",five,"six"},{seven,eight,"nine"}}'; const output = table.b.mapFromDriverValue(input); expect(output).toEqual([ ['one', 'two', 'three'], ['four', 'five', 'six'], ['seven', 'eight', 'nine'], ]); }); it('parses two-dimensional array with mixed values and escaped quotes', ({ expect }) => { const input = '{{1,"two \\"and a half\\", three",3},{"four","five \\"and a half\\", six",6},{"seven","eight","nine"}}'; const output = table.b.mapFromDriverValue(input); expect(output).toEqual([ ['1', 'two "and a half", three', '3'], ['four', 'five "and a half", six', '6'], ['seven', 'eight', 'nine'], ]); }); }); ================================================ FILE: drizzle-orm/tests/relation.test.ts ================================================ import { expect, test } from 'vitest'; import { pgSchema, pgTable } from '~/pg-core/index.ts'; import { createTableRelationsHelpers, extractTablesRelationalConfig } from '~/relations.ts'; test('tables with same name in different schemas', () => { const folder = pgSchema('folder'); const schema = { folder: { usersInFolder: folder.table('users', {}), }, public: { users: pgTable('users', {}), }, }; const relationalSchema = { ...Object.fromEntries( Object.entries(schema) .flatMap(([key, val]) => { // have unique keys across schemas const mappedTableEntries = Object.entries(val).map((tableEntry) => { return [`__${key}__.${tableEntry[0]}`, tableEntry[1]]; }); return mappedTableEntries; }), ), }; const relationsConfig = extractTablesRelationalConfig( relationalSchema, createTableRelationsHelpers, ); expect(Object.keys(relationsConfig)).toHaveLength(2); }); ================================================ FILE: drizzle-orm/tests/tsconfig.json ================================================ { "extends": "../tsconfig.build.json", "compilerOptions": { "rootDir": ".." }, "include": [".", "../src"] } ================================================ FILE: drizzle-orm/tests/type-hints.test.ts ================================================ import { RDSDataClient } from '@aws-sdk/client-rds-data'; import crypto from 'crypto'; import { expect, test } from 'vitest'; import { drizzle } from '~/aws-data-api/pg'; import { customType, json, PgDialect, pgTable, text, timestamp, uuid, varchar } from '~/pg-core'; import { sql } from '~/sql/sql'; const db = drizzle(new RDSDataClient(), { database: '', resourceArn: '', secretArn: '', }); test('type hints - case #1', () => { const t = pgTable('t', { id: varchar('id', { length: 255 }).primaryKey(), workspaceID: varchar('workspace_id', { length: 255 }).notNull(), description: text('description').notNull(), enrichment: json('enrichment').notNull(), category: text('category'), tags: text('tags').array().notNull(), counterpartyName: text('counterparty_name'), timePlaced: timestamp('time_placed').notNull(), timeSynced: timestamp('time_synced').notNull(), }); const q = db.insert(t).values({ id: 'id', tags: [], workspaceID: 'workspaceID', enrichment: {}, category: 'category', description: 'description', timePlaced: new Date(), timeSynced: sql`CURRENT_TIMESTAMP(6)`, counterpartyName: 'counterpartyName', }); const query = new PgDialect().sqlToQuery(q.getSQL()); expect(query.typings).toEqual(['none', 'none', 'none', 'json', 'none', 'none', 'none', 'timestamp']); }); test('type hints - case #2', () => { const prefixedUlid = ( name: string, opts: { prefix: Prefix }, ) => customType<{ data: PrefixedUlid; driverData: string }>({ dataType: () => 'uuid', toDriver: (value) => { return value as string; }, fromDriver: (value) => { return `${opts.prefix}_${value}` as PrefixedUlid; }, })(name); const calendars = pgTable('calendars', { id: uuid('id').primaryKey().default(sql`gen_random_uuid()`), orgMembershipId: prefixedUlid('om_id', { prefix: 'om' }).notNull(), platform: text('platform').notNull(), externalId: text('external_id').notNull(), externalData: json('external_data').notNull(), updatedAt: timestamp('updated_at').notNull().default(sql`now()`), createdAt: timestamp('created_at').notNull().default(sql`now()`), }); const q = db .insert(calendars) .values({ id: crypto.randomUUID(), orgMembershipId: 'om_id', platform: 'platform', externalId: 'externalId', externalData: {}, }) .returning(); const query = new PgDialect().sqlToQuery(q.getSQL()); expect(query.typings).toEqual(['uuid', 'none', 'none', 'none', 'json']); }); ================================================ FILE: drizzle-orm/tsconfig.build.json ================================================ { "extends": "./tsconfig.json", "include": ["src"] } ================================================ FILE: drizzle-orm/tsconfig.dts.json ================================================ { "extends": "./tsconfig.build.json", "compilerOptions": { "composite": false, "rootDir": "src", "outDir": "dist-dts", "declaration": true, "noEmit": false, "emitDeclarationOnly": true, "incremental": false }, "include": ["src"] } ================================================ FILE: drizzle-orm/tsconfig.json ================================================ { "extends": "../tsconfig.json", "compilerOptions": { "baseUrl": ".", "paths": { "~/*": ["src/*"] }, "declaration": true, "outDir": "dist", "noEmit": true }, "include": ["src", "scripts"] } ================================================ FILE: drizzle-orm/tsup.config.ts ================================================ import { globSync } from 'glob'; import { defineConfig } from 'tsup'; const entries = globSync('src/**/*.ts'); export default defineConfig({ entry: entries, outDir: 'dist.new', format: ['cjs', 'esm'], bundle: false, splitting: false, sourcemap: true, outExtension({ format }) { return { js: format === 'cjs' ? '.cjs' : '.js', }; }, tsconfig: 'tsconfig.build.json', }); ================================================ FILE: drizzle-orm/type-tests/common/aliased-table.ts ================================================ import { type Equal, Expect } from 'type-tests/utils.ts'; import { eq } from '~/index.ts'; import { drizzle as sqlited } from '~/libsql/index.ts'; import { alias as mysqlAliasFn } from '~/mysql-core/alias.ts'; import { mysqlView } from '~/mysql-core/view.ts'; import { drizzle as mysqld } from '~/mysql2/index.ts'; import { alias as pgAliasFn } from '~/pg-core/alias.ts'; import { pgView } from '~/pg-core/view.ts'; import { drizzle as pgd } from '~/postgres-js/index.ts'; import { alias as sqliteAliasFn } from '~/sqlite-core/alias.ts'; import { sqliteView } from '~/sqlite-core/view.ts'; import { users as mysqlUsers } from '../mysql/tables.ts'; import { users as pgUsers } from '../pg/tables.ts'; import { users as sqliteUsers } from '../sqlite/tables.ts'; const pg = pgd.mock(); const sqlite = sqlited.mock(); const mysql = mysqld.mock(); const pgvUsers = pgView('users_view').as((qb) => qb.select().from(pgUsers)); const sqlitevUsers = sqliteView('users_view').as((qb) => qb.select().from(sqliteUsers)); const mysqlvUsers = mysqlView('users_view').as((qb) => qb.select().from(mysqlUsers)); const pgAlias = pgAliasFn(pgUsers, 'usersAlias'); const sqliteAlias = sqliteAliasFn(sqliteUsers, 'usersAlias'); const mysqlAlias = mysqlAliasFn(mysqlUsers, 'usersAlias'); const pgvAlias = pgAliasFn(pgvUsers, 'usersvAlias'); const sqlitevAlias = sqliteAliasFn(sqlitevUsers, 'usersvAlias'); const mysqlvAlias = mysqlAliasFn(mysqlvUsers, 'usersvAlias'); const pgRes = await pg.select().from(pgUsers).leftJoin(pgAlias, eq(pgAlias.id, pgUsers.id)); const sqliteRes = await sqlite.select().from(sqliteUsers).leftJoin(sqliteAlias, eq(sqliteAlias.id, sqliteUsers.id)); const mysqlRes = await mysql.select().from(mysqlUsers).leftJoin(mysqlAlias, eq(mysqlAlias.id, mysqlUsers.id)); const pgvRes = await pg.select().from(pgUsers).leftJoin(pgvAlias, eq(pgvAlias.id, pgUsers.id)); const sqlitevRes = await sqlite.select().from(sqliteUsers).leftJoin(sqlitevAlias, eq(sqlitevAlias.id, sqliteUsers.id)); const mysqlvRes = await mysql.select().from(mysqlUsers).leftJoin(mysqlvAlias, eq(mysqlvAlias.id, mysqlUsers.id)); Expect< Equal >; Expect< Equal >; Expect< Equal >; Expect< Equal >; Expect< Equal >; Expect< Equal >; ================================================ FILE: drizzle-orm/type-tests/geldb/1-to-1-fk.ts ================================================ import { type GelColumn, integer } from '~/gel-core/columns/index.ts'; import { gelTable } from '~/gel-core/table.ts'; { const test1 = gelTable('test1_table', { id: integer('id').primaryKey(), test2Id: integer('test2_id').references(() => test2.id), }); const test1Id = integer('test1_id').references(() => test1.id); const test2 = gelTable('test2_table', { id: integer('id').primaryKey(), test1Id, }); } { const test1 = gelTable('test1_table', { id: integer('id').primaryKey(), test2Id: integer('test2_id').references((): GelColumn => test2.id), }); const test2 = gelTable('test2_table', { id: integer('id').primaryKey(), test1Id: integer('test1_id').references(() => test1.id), }); } ================================================ FILE: drizzle-orm/type-tests/geldb/array.ts ================================================ import { type Equal, Expect } from 'type-tests/utils.ts'; import type { Column } from '~/column.ts'; import { gelTable, integer } from '~/gel-core/index.ts'; { const table = gelTable('table', { a: integer('a').array().notNull(), }); Expect< Equal< Column< { name: 'a'; tableName: 'table'; dataType: 'number'; columnType: 'GelInteger'; data: number; driverParam: number; notNull: false; hasDefault: false; enumValues: undefined; baseColumn: never; generated: undefined; identity: undefined; isPrimaryKey: false; isAutoincrement: false; hasRuntimeDefault: false; }, {}, {} >, typeof table['a']['_']['baseColumn'] > >; } ================================================ FILE: drizzle-orm/type-tests/geldb/count.ts ================================================ import { Expect } from 'type-tests/utils.ts'; import { gelTable, integer, text } from '~/gel-core/index.ts'; import { and, gt, ne } from '~/sql/expressions/index.ts'; import type { Equal } from '~/utils.ts'; import { db } from './db.ts'; const names = gelTable('names', { id: integer('id').primaryKey(), name: text('name'), authorId: integer('author_id'), }); const separate = await db.$count(names); const separateFilters = await db.$count(names, and(gt(names.id, 1), ne(names.name, 'forbidden'))); const embedded = await db .select({ id: names.id, name: names.name, authorId: names.authorId, count1: db.$count(names).as('count1'), }) .from(names); const embeddedFilters = await db .select({ id: names.id, name: names.name, authorId: names.authorId, count1: db.$count(names, and(gt(names.id, 1), ne(names.name, 'forbidden'))).as('count1'), }) .from(names); Expect>; Expect>; Expect< Equal< { id: number; name: string | null; authorId: number | null; count1: number; }[], typeof embedded > >; Expect< Equal< { id: number; name: string | null; authorId: number | null; count1: number; }[], typeof embeddedFilters > >; ================================================ FILE: drizzle-orm/type-tests/geldb/db-rel.ts ================================================ import * as gel from 'gel'; import { type Equal, Expect } from 'type-tests/utils.ts'; import { drizzle } from '~/gel/index.ts'; import { sql } from '~/sql/sql.ts'; import * as schema from './tables-rel.ts'; const db = drizzle(gel.createClient(), { schema }); { const result = await db.query.users.findMany({ where: (users, { sql }) => sql`char_length(${users.name} > 1)`, limit: sql.placeholder('l'), orderBy: (users, { asc, desc }) => [asc(users.name), desc(users.id)], with: { posts: { where: (posts, { sql }) => sql`char_length(${posts.title} > 1)`, limit: sql.placeholder('l'), columns: { id: false, title: undefined, }, with: { author: true, comments: { where: (comments, { sql }) => sql`char_length(${comments.text} > 1)`, limit: sql.placeholder('l'), columns: { text: true, }, with: { author: { columns: { id: undefined, }, with: { city: { with: { users: true, }, }, }, }, }, }, }, }, }, }); Expect< Equal<{ id: number; name: string; cityId: number; homeCityId: number | null; createdAt: Date; posts: { title: string; authorId: number | null; comments: { text: string; author: { city: { id: number; name: string; users: { id: number; name: string; cityId: number; homeCityId: number | null; createdAt: Date; }[]; }; } | null; }[]; author: { id: number; name: string; cityId: number; homeCityId: number | null; createdAt: Date; } | null; }[]; }[], typeof result> >; } { const result = await db.query.users.findMany({ columns: { id: true, name: true, }, with: { posts: { columns: { authorId: true, }, extras: { lower: sql`lower(${schema.posts.title})`.as('lower_name'), }, }, }, }); Expect< Equal< { id: number; name: string; posts: { authorId: number | null; lower: string; }[]; }[], typeof result > >; } ================================================ FILE: drizzle-orm/type-tests/geldb/db.ts ================================================ import * as gel from 'gel'; import { drizzle } from '~/gel/index.ts'; export const db = drizzle(gel.createClient()); ================================================ FILE: drizzle-orm/type-tests/geldb/delete.ts ================================================ import type { Equal } from 'type-tests/utils.ts'; import { Expect } from 'type-tests/utils.ts'; import type { GelDelete } from '~/gel-core/index.ts'; import { eq } from '~/sql/expressions/index.ts'; import { sql } from '~/sql/sql.ts'; import { db } from './db.ts'; import { users } from './tables.ts'; const deleteAll = await db.delete(users); Expect>; const deleteAllStmt = db.delete(users).prepare('deleteAllStmt'); const deleteAllPrepared = await deleteAllStmt.execute(); Expect>; const deleteWhere = await db.delete(users).where(eq(users.id, 1)); Expect>; const deleteWhereStmt = db.delete(users).where(eq(users.id, 1)).prepare('deleteWhereStmt'); const deleteWherePrepared = await deleteWhereStmt.execute(); Expect>; const deleteReturningAll = await db.delete(users).returning(); Expect>; const deleteReturningAllStmt = db.delete(users).returning().prepare('deleteReturningAllStmt'); const deleteReturningAllPrepared = await deleteReturningAllStmt.execute(); Expect>; const deleteReturningPartial = await db.delete(users).returning({ myId: users.id, myHomeCity: users.homeCity, }); Expect>; const deleteReturningPartialStmt = db .delete(users) .returning({ myId: users.id, myHomeCity: users.homeCity, }) .prepare('deleteReturningPartialStmt'); const deleteReturningPartialPrepared = await deleteReturningPartialStmt.execute(); Expect>; { function dynamic(qb: T) { return qb.where(sql``).returning(); } const qbBase = db.delete(users).$dynamic(); const qb = dynamic(qbBase); const result = await qb; Expect>; } { function withReturning(qb: T) { return qb.returning(); } const qbBase = db.delete(users).$dynamic(); const qb = withReturning(qbBase); const result = await qb; Expect>; } { db.delete(users) .where(sql``) // @ts-expect-error method was already called .where(sql``); db.delete(users) .returning() // @ts-expect-error method was already called .returning(); } ================================================ FILE: drizzle-orm/type-tests/geldb/generated-columns.ts ================================================ import { type Equal, Expect } from 'type-tests/utils'; import { gelTable, integer, text } from '~/gel-core'; import { type InferInsertModel, type InferSelectModel, sql } from '~/index'; import { drizzle } from '~/node-postgres'; import { db } from './db'; const users = gelTable( 'users', { id: integer('id').primaryKey(), firstName: text('first_name'), lastName: text('last_name'), email: text('email').notNull(), fullName: text('full_name').generatedAlwaysAs(sql`concat_ws(first_name, ' ', last_name)`).notNull(), upperName: text('upper_name').generatedAlwaysAs( sql` case when first_name is null then null else upper(first_name) end `, ), }, ); { type User = typeof users.$inferSelect; type NewUser = typeof users.$inferInsert; Expect< Equal< { id: number; firstName: string | null; lastName: string | null; email: string; fullName: string; upperName: string | null; }, User > >(); Expect< Equal< { email: string; id: number; firstName?: string | null | undefined; lastName?: string | null | undefined; }, NewUser > >(); } { type User = InferSelectModel; type NewUser = InferInsertModel; Expect< Equal< { id: number; firstName: string | null; lastName: string | null; email: string; fullName: string; upperName: string | null; }, User > >(); Expect< Equal< { email: string; id: number; firstName?: string | null | undefined; lastName?: string | null | undefined; }, NewUser > >(); } { const dbUsers = await db.select().from(users); Expect< Equal< { id: number; firstName: string | null; lastName: string | null; email: string; fullName: string; upperName: string | null; }[], typeof dbUsers > >(); } { const db = drizzle({} as any, { schema: { users } }); const dbUser = await db.query.users.findFirst(); Expect< Equal< { id: number; firstName: string | null; lastName: string | null; email: string; fullName: string; upperName: string | null; } | undefined, typeof dbUser > >(); } { const db = drizzle({} as any, { schema: { users } }); const dbUser = await db.query.users.findMany(); Expect< Equal< { id: number; firstName: string | null; lastName: string | null; email: string; fullName: string; upperName: string | null; }[], typeof dbUser > >(); } { // @ts-expect-error - Can't use the fullName because it's a generated column await db.insert(users).values({ firstName: 'test', lastName: 'test', email: 'test', fullName: 'test', }); } { await db.update(users).set({ firstName: 'test', lastName: 'test', email: 'test', // @ts-expect-error - Can't use the fullName because it's a generated column fullName: 'test', }); } const users2 = gelTable( 'users', { id: integer('id').generatedByDefaultAsIdentity(), id2: integer('id').generatedAlwaysAsIdentity(), }, ); { type User = typeof users2.$inferSelect; type NewUser = typeof users2.$inferInsert; Expect< Equal< { id: number; id2: number; }, User > >(); Expect< Equal< { id?: number | undefined; }, NewUser > >(); } const usersSeq = gelTable( 'users', { id: integer('id').generatedByDefaultAsIdentity(), id2: integer('id').generatedAlwaysAsIdentity(), }, ); { type User = typeof usersSeq.$inferSelect; type NewUser = typeof usersSeq.$inferInsert; Expect< Equal< { id: number; id2: number; }, User > >(); Expect< Equal< { id?: number | undefined; }, NewUser > >(); } ================================================ FILE: drizzle-orm/type-tests/geldb/insert.ts ================================================ import type { Equal } from 'type-tests/utils.ts'; import { Expect } from 'type-tests/utils.ts'; import { boolean, gelTable, integer, QueryBuilder, text } from '~/gel-core/index.ts'; import type { GelInsert } from '~/gel-core/query-builders/insert.ts'; import { sql } from '~/sql/sql.ts'; import { db } from './db.ts'; import { identityColumnsTable, users } from './tables.ts'; const insert = await db .insert(users) .values({ id: 1, homeCity: 1, class: 'A', createdAt: new Date(), uuid: '', age1: 1, arrayCol: [''], }); Expect>; const insertStmt = db .insert(users) .values({ id: 1, homeCity: 1, class: 'A', createdAt: new Date(), uuid: '', age1: 1, arrayCol: [''], }) .prepare('insertStmt'); const insertPrepared = await insertStmt.execute(); Expect>; const insertSql = await db.insert(users).values({ id: 1, homeCity: sql`123`, class: 'A', age1: 1, createdAt: new Date(), uuid: '', arrayCol: [''], }); Expect>; const insertSqlStmt = db .insert(users) .values({ id: 1, homeCity: sql`123`, class: 'A', age1: 1, createdAt: new Date(), uuid: '', arrayCol: [''], }) .prepare('insertSqlStmt'); const insertSqlPrepared = await insertSqlStmt.execute(); Expect>; const insertReturning = await db .insert(users) .values({ id: 1, homeCity: 1, class: 'A', age1: 1, createdAt: new Date(), uuid: '', arrayCol: [''], }) .returning(); Expect>; const insertReturningStmt = db .insert(users) .values({ id: 1, homeCity: 1, class: 'A', createdAt: new Date(), uuid: '', age1: 1, arrayCol: [''], }) .returning() .prepare('insertReturningStmt'); const insertReturningPrepared = await insertReturningStmt.execute(); Expect>; const insertReturningPartial = await db .insert(users) .values({ id: 1, homeCity: 1, createdAt: new Date(), uuid: '', class: 'A', age1: 1, arrayCol: [''], }) .returning({ id: users.id, homeCity: users.homeCity, mySubclass: users.subClass, }); Expect< Equal<{ id: number; homeCity: number; mySubclass: string | null; }[], typeof insertReturningPartial> >; const insertReturningPartialStmt = db .insert(users) .values({ id: 1, homeCity: 1, class: 'A', createdAt: new Date(), uuid: '', age1: 1, arrayCol: [''], }) .returning({ id: users.id, homeCity: users.homeCity, mySubclass: users.subClass, }) .prepare('insertReturningPartialStmt'); const insertReturningPartialPrepared = await insertReturningPartialStmt.execute(); Expect< Equal<{ id: number; homeCity: number; mySubclass: string | null; }[], typeof insertReturningPartialPrepared> >; const insertReturningSql = await db .insert(users) .values({ id: 1, homeCity: 1, class: 'A', age1: sql`2 + 2`, createdAt: new Date(), uuid: '', arrayCol: [''], }) .returning({ id: users.id, homeCity: users.homeCity, subclassLower: sql`lower(${users.subClass})`, classLower: sql`lower(${users.class})`, }); Expect< Equal<{ id: number; homeCity: number; subclassLower: unknown; classLower: string; }[], typeof insertReturningSql> >; const insertReturningSqlStmt = db .insert(users) .values({ id: 1, homeCity: 1, class: 'A', createdAt: new Date(), uuid: '', age1: sql`2 + 2`, arrayCol: [''], }) .returning({ id: users.id, homeCity: users.homeCity, subclassLower: sql`lower(${users.subClass})`, classLower: sql`lower(${users.class})`, }) .prepare('insertReturningSqlStmt'); const insertReturningSqlPrepared = await insertReturningSqlStmt.execute(); Expect< Equal<{ id: number; homeCity: number; subclassLower: unknown; classLower: string; }[], typeof insertReturningSqlPrepared> >; { function dynamic(qb: T) { return qb.returning(); } const qbBase = db.insert(users).values({ id: 1, age1: 0, class: 'A', homeCity: 0, arrayCol: [], createdAt: new Date(), uuid: '', }).$dynamic(); const qb = dynamic(qbBase); const result = await qb; Expect>; } { function withReturning(qb: T) { return qb.returning(); } const qbBase = db.insert(users).values({ id: 1, age1: 0, class: 'A', homeCity: 0, arrayCol: [], createdAt: new Date(), uuid: '', }).$dynamic(); const qb = withReturning(qbBase); const result = await qb; Expect>; } { db .insert(users) .values({ id: 1, age1: 0, class: 'A', homeCity: 0, arrayCol: [], createdAt: new Date(), uuid: '' }) .returning() // @ts-expect-error method was already called .returning(); } { const users1 = gelTable('users1', { id: integer('id').primaryKey(), name: text('name').notNull(), admin: boolean('admin').notNull().default(false), }); const users2 = gelTable('users2', { id: integer('id').primaryKey(), firstName: text('first_name').notNull(), lastName: text('last_name').notNull(), admin: boolean('admin').notNull().default(false), phoneNumber: text('phone_number'), }); const qb = new QueryBuilder(); db.insert(users1).select(sql`select * from users1`); db.insert(users1).select(() => sql`select * from users1`); db .insert(users1) .select( qb.select({ id: users2.id, name: users2.firstName, admin: users2.admin, }).from(users2), ); db .insert(users1) .select( qb.select({ id: users2.id, name: users2.firstName, admin: users2.admin, }).from(users2).where(sql``), ); db .insert(users2) .select( qb.select({ id: users2.id, firstName: users2.firstName, lastName: users2.lastName, admin: users2.admin, }).from(users2), ); db .insert(users1) .select( qb.select({ id: users2.id, name: sql`${users2.firstName} || ' ' || ${users2.lastName}`.as('name'), admin: users2.admin, }).from(users2), ); db .insert(users1) .select( // @ts-expect-error name is undefined qb.select({ admin: users1.admin }).from(users1), ); db.insert(users1).select(db.select().from(users1)); db.insert(users1).select(() => db.select().from(users1)); db.insert(users1).select((qb) => qb.select().from(users1)); // @ts-expect-error tables have different keys db.insert(users1).select(db.select().from(users2)); // @ts-expect-error tables have different keys db.insert(users1).select(() => db.select().from(users2)); } { db.insert(identityColumnsTable).values([ { byDefaultAsIdentity: 4, name: 'fdf' }, ]); // @ts-expect-error db.insert(identityColumnsTable).values([ { alwaysAsIdentity: 2 }, ]); db.insert(identityColumnsTable).overridingSystemValue().values([ { alwaysAsIdentity: 2 }, ]); // @ts-expect-error db.insert(identityColumnsTable).values([ { generatedCol: 2 }, ]); } ================================================ FILE: drizzle-orm/type-tests/geldb/no-strict-null-checks/test.ts ================================================ import { drizzle } from '~/gel'; import { gelTable, integer, text } from '~/gel-core'; export const test = gelTable( 'test', { id: text('id') .primaryKey() .generatedAlwaysAs('genstr'), intId: integer('int_id') .primaryKey() .generatedAlwaysAsIdentity(), int2Id: integer('int2_id').generatedByDefaultAsIdentity(), name: text('name').$defaultFn(() => '' as string), title: text('title').notNull(), description: text('description'), dbdef: text('dbdef').default('dbdefval'), }, ); const db = drizzle.mock(); db.update(test) .set({ // @ts-expect-error id: '1', name: 'name', title: 'title', description: 'desc', dbdef: 'upddef', }); db.update(test) .set({ // @ts-expect-error intId: 1, name: 'name', title: 'title', description: 'desc', dbdef: 'upddef', }); db.update(test) .set({ int2Id: 1, name: 'name', title: 'title', description: 'desc', dbdef: 'upddef', }); db.update(test) .set({ name: 'name', title: 'title', description: 'desc', dbdef: 'upddef', }); db.insert(test).values({ // @ts-expect-error id: '1', name: 'name', title: 'title', description: 'desc', dbdef: 'upddef', }); db.insert(test).values({ // @ts-expect-error intId: 1, name: 'name', title: 'title', description: 'desc', dbdef: 'upddef', }); db.insert(test).values({ int2Id: 1, name: 'name', title: 'title', description: 'desc', dbdef: 'upddef', }); db.insert(test).values({ name: 'name', title: 'title', description: 'desc', dbdef: 'upddef', }); db.insert(test).values({ title: 'title', description: 'desc', dbdef: 'upddef', }); db.insert(test).values({ title: 'title', description: 'desc', }); db.insert(test).values({ title: 'title', }); ================================================ FILE: drizzle-orm/type-tests/geldb/no-strict-null-checks/tsconfig.json ================================================ { "extends": "../../tsconfig.json", "compilerOptions": { "noEmit": true, "strictNullChecks": false, "strictPropertyInitialization": false, "exactOptionalPropertyTypes": false }, "include": ["./test.ts"] } ================================================ FILE: drizzle-orm/type-tests/geldb/other.ts ================================================ import type { Equal } from 'type-tests/utils.ts'; import { Expect } from 'type-tests/utils.ts'; import { eq, inArray } from '~/sql/expressions/index.ts'; import { sql } from '~/sql/sql.ts'; import { db } from './db.ts'; import { users } from './tables.ts'; const rawQuery = await db.execute( sql`select ${users.id}, ${users.class} from ${users} where ${inArray(users.id, [1, 2, 3])} and ${ eq(users.class, 'A') }`, ); Expect[], typeof rawQuery>>; ================================================ FILE: drizzle-orm/type-tests/geldb/select.ts ================================================ import type { Equal } from 'type-tests/utils.ts'; import { Expect } from 'type-tests/utils.ts'; import { alias } from '~/gel-core/alias.ts'; import { boolean, // gelMaterializedView, type GelSelect, type GelSelectQueryBuilder, gelTable, // gelView, integer, QueryBuilder, text, } from '~/gel-core/index.ts'; import { and, arrayContained, arrayContains, arrayOverlaps, between, eq, exists, gt, gte, ilike, inArray, isNotNull, isNull, like, lt, lte, ne, not, notBetween, notExists, notIlike, notInArray, notLike, or, } from '~/sql/expressions/index.ts'; import { type SQL, sql } from '~/sql/sql.ts'; import { db } from './db.ts'; import { cities, classes, users } from './tables.ts'; const city = alias(cities, 'city'); const city1 = alias(cities, 'city1'); const leftJoinFull = await db.select().from(users).leftJoin(city, eq(users.id, city.id)); { await db.select().from(users).leftJoin(city, eq(users.id, city.id)); } Expect< Equal< { users_table: typeof users.$inferSelect; city: typeof cities.$inferSelect | null; }[], typeof leftJoinFull > >; const rightJoinFull = await db.select().from(users).rightJoin(city, eq(users.id, city.id)); Expect< Equal< { users_table: typeof users.$inferSelect | null; city: typeof city.$inferSelect; }[], typeof rightJoinFull > >; const innerJoinFull = await db.select().from(users).innerJoin(city, eq(users.id, city.id)); Expect< Equal< { users_table: typeof users.$inferSelect; city: typeof city.$inferSelect; }[], typeof innerJoinFull > >; const fullJoinFull = await db.select().from(users).fullJoin(city, eq(users.id, city.id)); Expect< Equal< { users_table: typeof users.$inferSelect | null; city: typeof city.$inferSelect | null; }[], typeof fullJoinFull > >; const crossJoinFull = await db.select().from(users).crossJoin(city); Expect< Equal< { users_table: typeof users.$inferSelect; city: typeof city.$inferSelect; }[], typeof crossJoinFull > >; const leftJoinFlat = await db .select({ userId: users.id, userText: users.text, cityId: city.id, cityName: city.name, }) .from(users) .leftJoin(city, eq(users.id, city.id)); Expect< Equal<{ userId: number; userText: string | null; cityId: number | null; cityName: string | null; }[], typeof leftJoinFlat> >; const rightJoinFlat = await db .select({ userId: users.id, userText: users.text, cityId: city.id, cityName: city.name, }) .from(users) .rightJoin(city, eq(users.id, city.id)); Expect< Equal<{ userId: number | null; userText: string | null; cityId: number; cityName: string; }[], typeof rightJoinFlat> >; const innerJoinFlat = await db .select({ userId: users.id, userText: users.text, cityId: city.id, cityName: city.name, }) .from(users) .innerJoin(city, eq(users.id, city.id)); Expect< Equal<{ userId: number; userText: string | null; cityId: number; cityName: string; }[], typeof innerJoinFlat> >; const fullJoinFlat = await db .select({ userId: users.id, userText: users.text, cityId: city.id, cityName: city.name, }) .from(users) .fullJoin(city, eq(users.id, city.id)); Expect< Equal<{ userId: number | null; userText: string | null; cityId: number | null; cityName: string | null; }[], typeof fullJoinFlat> >; const crossJoinFlat = await db .select({ userId: users.id, userText: users.text, cityId: city.id, cityName: city.name, }) .from(users) .crossJoin(city); Expect< Equal<{ userId: number; userText: string | null; cityId: number; cityName: string; }[], typeof crossJoinFlat> >; const leftJoinMixed = await db .select({ id: users.id, text: users.text, textUpper: sql`upper(${users.text})`, idComplex: sql`${users.id}::text || ${city.id}::text`, city: { id: city.id, name: city.name, }, }) .from(users) .leftJoin(city, eq(users.id, city.id)); Expect< Equal< { id: number; text: string | null; textUpper: string | null; idComplex: string | null; city: { id: number; name: string; } | null; }[], typeof leftJoinMixed > >; const leftJoinMixed2 = await db .select({ id: users.id, text: users.text, foo: { bar: users.uuid, baz: cities.id, }, }) .from(users) .leftJoin(cities, eq(users.id, cities.id)); Expect< Equal< { id: number; text: string | null; foo: { bar: string; baz: number | null; }; }[], typeof leftJoinMixed2 > >; const join1 = await db .select({ user: { id: users.id, text: users.text, }, city: { id: city.id, name: city.name, nameUpper: sql`upper(${city.name})`, }, }) .from(users) .leftJoin(city, eq(users.id, city.id)); Expect< Equal<{ user: { id: number; text: string | null; }; city: { id: number; name: string; nameUpper: string; } | null; }[], typeof join1> >; const join = await db .select({ users, cities, city, city1: { id: city1.id, }, }) .from(users) .leftJoin(cities, eq(users.id, cities.id)) .rightJoin(city, eq(city.id, users.id)) .rightJoin(city1, eq(city1.id, users.id)); Expect< Equal< { users: { id: number; uuid: string; homeCity: number; currentCity: number | null; class: string; subClass: string | null; text: string | null; age1: number; createdAt: Date; arrayCol: string[]; } | null; cities: { id: number; name: string; population: number | null; } | null; city: { id: number; name: string; population: number | null; } | null; city1: { id: number; }; }[], typeof join > >; const join2 = await db .select({ user: { id: users.id, }, city: { id: cities.id, }, }) .from(users) .fullJoin(cities, eq(users.id, cities.id)); Expect< Equal< { user: { id: number; } | null; city: { id: number; } | null; }[], typeof join2 > >; const join3 = await db .select({ user: { id: users.id, }, city: { id: cities.id, }, class: { id: classes.id, }, }) .from(users) .fullJoin(cities, eq(users.id, cities.id)) .rightJoin(classes, eq(users.id, classes.id)); Expect< Equal< { user: { id: number; } | null; city: { id: number; } | null; class: { id: number; }; }[], typeof join3 > >; db.select() .from(users) .where(exists(db.select().from(cities).where(eq(users.homeCity, cities.id)))); function mapFunkyFuncResult(valueFromDriver: unknown) { return { foo: (valueFromDriver as Record)['foo'], }; } const age = 1; const allOperators = await db .select({ col2: sql`5 - ${users.id} + 1`, // unknown col3: sql`${users.id} + 1`, // number col33: sql`${users.id} + 1`.mapWith(users.id), // number col34: sql`${users.id} + 1`.mapWith(mapFunkyFuncResult), // number col4: sql`one_or_another(${users.id}, ${users.class})`, // string | number col5: sql`true`, // unknown col6: sql`true`, // boolean col7: sql`random()`, // number col8: sql`some_funky_func(${users.id})`.mapWith(mapFunkyFuncResult), // { foo: string } col9: sql`greatest(${users.createdAt}, ${sql.param(new Date(), users.createdAt)})`, // unknown col10: sql`date_or_false(${users.createdAt}, ${ sql.param( new Date(), users.createdAt, ) })`, // Date | boolean col11: sql`${users.age1} + ${age}`, // unknown col12: sql`${users.age1} + ${sql.param(age, users.age1)}`, // unknown col13: sql`lower(${users.class})`, // unknown col14: sql`length(${users.class})`, // number count: sql`count(*)::int`, // number }) .from(users) .where( and( eq(users.id, 1), ne(users.id, 1), or(eq(users.id, 1), ne(users.id, 1)), not(eq(users.id, 1)), gt(users.id, 1), gte(users.id, 1), lt(users.id, 1), lte(users.id, 1), inArray(users.id, [1, 2, 3]), inArray(users.id, db.select({ id: users.id }).from(users)), inArray(users.id, sql`select id from ${users}`), notInArray(users.id, [1, 2, 3]), notInArray(users.id, db.select({ id: users.id }).from(users)), notInArray(users.id, sql`select id from ${users}`), isNull(users.subClass), isNotNull(users.id), exists(db.select({ id: users.id }).from(users)), exists(sql`select id from ${users}`), notExists(db.select({ id: users.id }).from(users)), notExists(sql`select id from ${users}`), between(users.id, 1, 2), notBetween(users.id, 1, 2), like(users.id, '%1%'), notLike(users.id, '%1%'), ilike(users.id, '%1%'), notIlike(users.id, '%1%'), arrayContains(users.arrayCol, ['abc']), arrayContains(users.arrayCol, db.select({ arrayCol: users.arrayCol }).from(users)), arrayContains(users.arrayCol, sql`select array_col from ${users}`), arrayContained(users.arrayCol, ['abc']), arrayContained(users.arrayCol, db.select({ arrayCol: users.arrayCol }).from(users)), arrayContained(users.arrayCol, sql`select array_col from ${users}`), arrayOverlaps(users.arrayCol, ['abc']), arrayOverlaps(users.arrayCol, db.select({ arrayCol: users.arrayCol }).from(users)), arrayOverlaps(users.arrayCol, sql`select array_col from ${users}`), ), ); Expect< Equal<{ col2: unknown; col3: number; col33: number; col34: { foo: any }; col4: string | number; col5: unknown; col6: boolean; col7: number; col8: { foo: any; }; col9: unknown; col10: boolean | Date; col11: unknown; col12: unknown; col13: unknown; col14: number; count: number; }[], typeof allOperators> >; const textSelect = await db .select({ t: users.text, }) .from(users); Expect>; const homeCity = alias(cities, 'homeCity'); const c = alias(classes, 'c'); const otherClass = alias(classes, 'otherClass'); const anotherClass = alias(classes, 'anotherClass'); const friend = alias(users, 'friend'); const currentCity = alias(cities, 'currentCity'); const subscriber = alias(users, 'subscriber'); const closestCity = alias(cities, 'closestCity'); const closestCity2 = alias(cities, 'closestCity2'); const closestCity3 = alias(cities, 'closestCity3'); const closestCity4 = alias(cities, 'closestCity4'); const closestCity5 = alias(cities, 'closestCity5'); const closestCity6 = alias(cities, 'closestCity6'); const closestCity7 = alias(cities, 'closestCity7'); const megaJoin = await db .select({ user: { id: users.id, maxAge: sql`max(${users.age1})`, }, city: { id: cities.id, }, homeCity, c, otherClass, anotherClass, friend, currentCity, subscriber, closestCity, }) .from(users) .innerJoin(cities, sql`${users.id} = ${cities.id}`) .innerJoin(homeCity, sql`${users.homeCity} = ${homeCity.id}`) .innerJoin(c, eq(c.id, users.class)) .innerJoin(otherClass, sql`${c.id} = ${otherClass.id}`) .innerJoin(anotherClass, sql`${users.class} = ${anotherClass.id}`) .innerJoin(friend, sql`${users.id} = ${friend.id}`) .innerJoin(currentCity, sql`${homeCity.id} = ${currentCity.id}`) .innerJoin(subscriber, sql`${users.class} = ${subscriber.id}`) .innerJoin(closestCity, sql`${users.currentCity} = ${closestCity.id}`) .where(and(sql`${users.age1} > 0`, eq(cities.id, 1))) .limit(1) .offset(1); Expect< Equal< { user: { id: number; maxAge: unknown; }; city: { id: number; }; homeCity: { id: number; name: string; population: number | null; }; c: { id: number; class: string | null; subClass: string; }; otherClass: { id: number; class: string | null; subClass: string; }; anotherClass: { id: number; class: string | null; subClass: string; }; friend: { id: number; uuid: string; homeCity: number; currentCity: number | null; class: string; subClass: string | null; text: string | null; age1: number; createdAt: Date; arrayCol: string[]; }; currentCity: { id: number; name: string; population: number | null; }; subscriber: { id: number; uuid: string; homeCity: number; currentCity: number | null; class: string; subClass: string | null; text: string | null; age1: number; createdAt: Date; arrayCol: string[]; }; closestCity: { id: number; name: string; population: number | null; }; }[], typeof megaJoin > >; const megaLeftJoin = await db .select({ user: { id: users.id, maxAge: sql`max(${users.age1})`, }, city: { id: cities.id, }, homeCity, c, otherClass, anotherClass, friend, currentCity, subscriber, closestCity, closestCity2, closestCity3, closestCity4, closestCity5, closestCity6, closestCity7, }) .from(users) .leftJoin(cities, sql`${users.id} = ${cities.id}`) .leftJoin(homeCity, sql`${users.homeCity} = ${homeCity.id}`) .leftJoin(c, eq(c.id, users.class)) .leftJoin(otherClass, sql`${c.id} = ${otherClass.id}`) .leftJoin(anotherClass, sql`${users.class} = ${anotherClass.id}`) .leftJoin(friend, sql`${users.id} = ${friend.id}`) .leftJoin(currentCity, sql`${homeCity.id} = ${currentCity.id}`) .leftJoin(subscriber, sql`${users.class} = ${subscriber.id}`) .leftJoin(closestCity, sql`${users.currentCity} = ${closestCity.id}`) .leftJoin(closestCity2, sql`${users.currentCity} = ${closestCity.id}`) .leftJoin(closestCity3, sql`${users.currentCity} = ${closestCity.id}`) .leftJoin(closestCity4, sql`${users.currentCity} = ${closestCity.id}`) .leftJoin(closestCity5, sql`${users.currentCity} = ${closestCity.id}`) .leftJoin(closestCity6, sql`${users.currentCity} = ${closestCity.id}`) .leftJoin(closestCity7, sql`${users.currentCity} = ${closestCity.id}`) .where(and(sql`${users.age1} > 0`, eq(cities.id, 1))) .limit(1) .offset(1); Expect< Equal< { user: { id: number; maxAge: unknown; }; city: { id: number; } | null; homeCity: { id: number; name: string; population: number | null; } | null; c: { id: number; class: string | null; subClass: string; } | null; otherClass: { id: number; class: string | null; subClass: string; } | null; anotherClass: { id: number; class: string | null; subClass: string; } | null; friend: { id: number; uuid: string; homeCity: number; currentCity: number | null; class: string; subClass: string | null; text: string | null; age1: number; createdAt: Date; arrayCol: string[]; } | null; currentCity: { id: number; name: string; population: number | null; } | null; subscriber: { id: number; uuid: string; homeCity: number; currentCity: number | null; class: string; subClass: string | null; text: string | null; age1: number; createdAt: Date; arrayCol: string[]; } | null; closestCity: { id: number; name: string; population: number | null; } | null; closestCity2: { id: number; name: string; population: number | null; } | null; closestCity3: { id: number; name: string; population: number | null; } | null; closestCity4: { id: number; name: string; population: number | null; } | null; closestCity5: { id: number; name: string; population: number | null; } | null; closestCity6: { id: number; name: string; population: number | null; } | null; closestCity7: { id: number; name: string; population: number | null; } | null; }[], typeof megaLeftJoin > >; await db .select({ user: { id: users.id, maxAge: sql`max(${users.age1})`, }, city: { id: cities.id, }, homeCity, c, otherClass, anotherClass, friend, currentCity, subscriber, closestCity, closestCity2, closestCity3, closestCity4, closestCity5, closestCity6, closestCity7, }) .from(users) .fullJoin(cities, sql`${users.id} = ${cities.id}`) .fullJoin(homeCity, sql`${users.homeCity} = ${homeCity.id}`) .fullJoin(c, eq(c.id, users.class)) .fullJoin(otherClass, sql`${c.id} = ${otherClass.id}`) .fullJoin(anotherClass, sql`${users.class} = ${anotherClass.id}`) .fullJoin(friend, sql`${users.id} = ${friend.id}`) .fullJoin(currentCity, sql`${homeCity.id} = ${currentCity.id}`) .fullJoin(subscriber, sql`${users.class} = ${subscriber.id}`) .fullJoin(closestCity, sql`${users.currentCity} = ${closestCity.id}`) .fullJoin(closestCity2, sql`${users.currentCity} = ${closestCity.id}`) .fullJoin(closestCity3, sql`${users.currentCity} = ${closestCity.id}`) .fullJoin(closestCity4, sql`${users.currentCity} = ${closestCity.id}`) .fullJoin(closestCity5, sql`${users.currentCity} = ${closestCity.id}`) .fullJoin(closestCity6, sql`${users.currentCity} = ${closestCity.id}`) .fullJoin(closestCity7, sql`${users.currentCity} = ${closestCity.id}`) .where(and(sql`${users.age1} > 0`, eq(cities.id, 1))) .limit(1) .offset(1); const friends = alias(users, 'friends'); const join4 = await db .select({ user: { id: users.id, }, city: { id: cities.id, }, class: classes, friend: friends, }) .from(users) .innerJoin(cities, sql`${users.id} = ${cities.id}`) .innerJoin(classes, sql`${cities.id} = ${classes.id}`) .innerJoin(friends, sql`${friends.id} = ${users.id}`) .where(sql`${users.age1} > 0`); Expect< Equal<{ user: { id: number; }; city: { id: number; }; class: { id: number; class: string | null; subClass: string; }; friend: { id: number; uuid: string; homeCity: number; currentCity: number | null; class: string; subClass: string | null; text: string | null; age1: number; createdAt: Date; arrayCol: string[]; }; }[], typeof join4> >; { const authenticated = false as boolean; const result = await db .select({ id: users.id, ...(authenticated ? { city: users.homeCity } : {}), }) .from(users); Expect< Equal< { id: number; city?: number; }[], typeof result > >; } await db .select() .from(users) .for('update'); await db .select() .from(users) .for('no key update', { of: users }); await db .select() .from(users) .for('no key update', { of: users, skipLocked: true }); await db .select() .from(users) .for('share', { of: users, noWait: true }); await db .select() .from(users) // @ts-expect-error - can't use both skipLocked and noWait .for('share', { of: users, noWait: true, skipLocked: true }); await db .select({ id: cities.id, name: sql`upper(${cities.name})`.as('name'), usersCount: sql`count(${users.id})`.as('users'), }) .from(cities) .leftJoin(users, eq(users.homeCity, cities.id)) .where(({ name }) => sql`length(${name}) >= 3`) .groupBy(cities.id) .having(({ usersCount }) => sql`${usersCount} > 0`); { db .select() .from(users) .where(eq(users.id, 1)); db .select() .from(users) .where(eq(users.id, 1)) // @ts-expect-error - can't use where twice .where(eq(users.id, 1)); db .select() .from(users) .where(eq(users.id, 1)) .limit(10) // @ts-expect-error - can't use where twice .where(eq(users.id, 1)); } { function withFriends(qb: T) { const friends = alias(users, 'friends'); const friends2 = alias(users, 'friends2'); const friends3 = alias(users, 'friends3'); const friends4 = alias(users, 'friends4'); const friends5 = alias(users, 'friends5'); return qb .leftJoin(friends, sql`true`) .leftJoin(friends2, sql`true`) .leftJoin(friends3, sql`true`) .leftJoin(friends4, sql`true`) .leftJoin(friends5, sql`true`); } const qb = db.select().from(users).$dynamic(); const result = await withFriends(qb); Expect< Equal >; } { function withFriends(qb: T) { const friends = alias(users, 'friends'); const friends2 = alias(users, 'friends2'); const friends3 = alias(users, 'friends3'); const friends4 = alias(users, 'friends4'); const friends5 = alias(users, 'friends5'); return qb .leftJoin(friends, sql`true`) .leftJoin(friends2, sql`true`) .leftJoin(friends3, sql`true`) .leftJoin(friends4, sql`true`) .leftJoin(friends5, sql`true`); } const qb = db.select().from(users).$dynamic(); const result = await withFriends(qb); Expect< Equal >; } { function dynamic(qb: T) { return qb.where(sql``).having(sql``).groupBy(sql``).orderBy(sql``).limit(1).offset(1).for('update'); } const qb = db.select().from(users).$dynamic(); const result = await dynamic(qb); Expect>; } { // TODO: add to docs function dynamic(qb: T) { return qb.where(sql``).having(sql``).groupBy(sql``).orderBy(sql``).limit(1).offset(1).for('update'); } const query = new QueryBuilder().select().from(users).$dynamic(); dynamic(query); } { // TODO: add to docs function paginated(qb: T, page: number) { return qb.limit(10).offset((page - 1) * 10); } const qb = db.select().from(users).$dynamic(); const result = await paginated(qb, 1); Expect>; } { db .select() .from(users) .where(sql``) .limit(10) // @ts-expect-error method was already called .where(sql``); db .select() .from(users) .having(sql``) .limit(10) // @ts-expect-error method was already called .having(sql``); db .select() .from(users) .groupBy(sql``) .limit(10) // @ts-expect-error method was already called .groupBy(sql``); db .select() .from(users) .orderBy(sql``) .limit(10) // @ts-expect-error method was already called .orderBy(sql``); db .select() .from(users) .limit(10) .where(sql``) // @ts-expect-error method was already called .limit(10); db .select() .from(users) .offset(10) .limit(10) // @ts-expect-error method was already called .offset(10); db .select() .from(users) .for('update') .limit(10) // @ts-expect-error method was already called .for('update'); } { const users = gelTable('users', { developer: boolean('developer'), application: text('application'), }); const startIt = (whereCallback: (condition: SQL) => SQL | undefined = (c) => c) => { return db.select().from(users).where(whereCallback(eq(users.developer, true))); }; startIt((c) => and(c, eq(users.application, 'approved'))); } { const school = gelTable('school', { faculty: integer('faculty'), studentid: integer('studentid'), }); const student = gelTable('student', { id: integer('id'), email: text('email'), }); await db .select() .from(school) .where( and( eq(school.faculty, 2), eq( school.studentid, db.select({ id: student.id }).from(student).where(eq(student.email, 'foo@demo.com')), ), ), ); } { const table1 = gelTable('table1', { id: integer().primaryKey(), name: text().notNull(), }); const table2 = gelTable('table2', { id: integer().primaryKey(), age: integer().notNull(), table1Id: integer().references(() => table1.id).notNull(), }); const leftLateralRawRes = await db.select({ table1, sqId: sql`${sql.identifier('t2')}.${sql.identifier('id')}`.as('sqId'), }).from(table1).leftJoinLateral(sql`(SELECT * FROM ${table2}) as ${sql.identifier('t2')}`, sql`true`); Expect< Equal >; const leftLateralSubRes = await db.select().from(table1).leftJoinLateral( db.select().from(table2).as('sub'), sql`true`, ); Expect< Equal >; const sqLeftLateral = db.select().from(table2).as('sub'); const leftLateralSubSelectionRes = await db.select( { id: table1.id, sId: sqLeftLateral.id, }, ).from(table1).leftJoinLateral( sqLeftLateral, sql`true`, ); Expect< Equal >; await db.select().from(table1) // @ts-expect-error .leftJoinLateral(table2, sql`true`); const innerLateralRawRes = await db.select({ table1, sqId: sql`${sql.identifier('t2')}.${sql.identifier('id')}`.as('sqId'), }).from(table1).innerJoinLateral(sql`(SELECT * FROM ${table2}) as ${sql.identifier('t2')}`, sql`true`); Expect< Equal >; const innerLateralSubRes = await db.select().from(table1).innerJoinLateral( db.select().from(table2).as('sub'), sql`true`, ); Expect< Equal >; const sqInnerLateral = db.select().from(table2).as('sub'); const innerLateralSubSelectionRes = await db.select( { id: table1.id, sId: sqLeftLateral.id, }, ).from(table1).innerJoinLateral( sqInnerLateral, sql`true`, ); Expect< Equal >; await db.select().from(table1) // @ts-expect-error .innerJoinLateral(table2, sql`true`); const crossLateralRawRes = await db.select({ table1, sqId: sql`${sql.identifier('t2')}.${sql.identifier('id')}`.as('sqId'), }).from(table1).crossJoinLateral(sql`(SELECT * FROM ${table2}) as ${sql.identifier('t2')}`); Expect< Equal >; const crossLateralSubRes = await db.select().from(table1).crossJoinLateral( db.select().from(table2).as('sub'), ); Expect< Equal >; const sqCrossLateral = db.select().from(table2).as('sub'); const crossLateralSubSelectionRes = await db.select( { id: table1.id, sId: sqCrossLateral.id, }, ).from(table1).crossJoinLateral( sqInnerLateral, ); Expect< Equal >; await db.select().from(table1) // @ts-expect-error .crossJoinLateral(table2); } ================================================ FILE: drizzle-orm/type-tests/geldb/set-operators.ts ================================================ import { type Equal, Expect } from 'type-tests/utils.ts'; import { except, exceptAll, type GelSetOperator, intersect, intersectAll, union, unionAll } from '~/gel-core/index.ts'; import { eq } from '~/sql/expressions/index.ts'; import { desc, sql } from '~/sql/index.ts'; import { db } from './db.ts'; import { cities, classes, users } from './tables.ts'; const except2Test = await except( db.select({ userId: users.id, }) .from(users), db.select({ userId: users.id, }).from(users), ); Expect>; const exceptAll2Test = await exceptAll( db.select({ id: users.id, age1: users.age1, }) .from(users).where(sql``), db.select({ id: users.id, age1: users.age1, }).from(users), ); Expect>; const unionTest = await db .select({ id: users.id }) .from(users) .union( db .select({ id: users.id }) .from(users), ); Expect>; const unionAllTest = await db .select({ id: users.id, age: users.age1 }) .from(users) .unionAll( db.select({ id: users.id, age: users.age1 }) .from(users) .leftJoin(cities, eq(users.id, cities.id)), ); Expect>; const intersectTest = await db .select({ id: users.id, homeCity: users.homeCity }) .from(users) .intersect(({ intersect }) => intersect( db .select({ id: users.id, homeCity: users.homeCity }) .from(users), db .select({ id: users.id, homeCity: sql`${users.homeCity}`.mapWith(Number) }) .from(users), ) ); Expect>; const intersectAllTest = await db .select({ id: users.id, homeCity: users.class }) .from(users) .intersect( db .select({ id: users.id, homeCity: users.class }) .from(users) .leftJoin(cities, eq(users.id, cities.id)), ); Expect>; const exceptTest = await db .select({ id: users.id, homeCity: users.homeCity }) .from(users) .except( db .select({ id: users.id, homeCity: sql`${users.homeCity}`.mapWith(Number) }) .from(users), ); Expect>; const exceptAllTest = await db .select({ id: users.id, homeCity: users.class }) .from(users) .except( db .select({ id: users.id, homeCity: sql`${users.class}` }) .from(users), ); Expect>; const union2Test = await union(db.select().from(cities), db.select().from(cities), db.select().from(cities)); Expect>; const unionAll2Test = await unionAll( db.select({ id: cities.id, name: cities.name, population: cities.population, }).from(cities), db.select().from(cities), ); Expect>; const intersect2Test = await intersect( db.select({ id: cities.id, name: cities.name, population: cities.population, }).from(cities), db.select({ id: cities.id, name: cities.name, population: cities.population, }).from(cities), db.select({ id: cities.id, name: cities.name, population: cities.population, }).from(cities), ); Expect>; const intersectAll2Test = await intersectAll( union( db.select({ id: cities.id, }).from(cities), db.select({ id: cities.id, }) .from(cities).where(sql``), ), db.select({ id: cities.id, }) .from(cities), ).orderBy(desc(cities.id)).limit(23); Expect>; const unionfull = await union(db.select().from(users), db.select().from(users)).orderBy(sql``).limit(1).offset(2); Expect< Equal<{ id: number; uuid: string; homeCity: number; currentCity: number | null; class: string; subClass: string | null; text: string | null; age1: number; createdAt: Date; arrayCol: string[]; }[], typeof unionfull> >; union(db.select().from(users), db.select().from(users)) .orderBy(sql``) // @ts-expect-error - method was already called .orderBy(sql``); union(db.select().from(users), db.select().from(users)) .offset(1) // @ts-expect-error - method was already called .offset(2); union(db.select().from(users), db.select().from(users)) .orderBy(sql``) // @ts-expect-error - method was already called .orderBy(sql``); { function dynamic(qb: T) { return qb.orderBy(sql``).limit(1).offset(2); } const qb = union(db.select().from(users), db.select().from(users)).$dynamic(); const result = await dynamic(qb); Expect>; } await db .select({ id: users.id, homeCity: users.homeCity }) .from(users) // All queries in combining statements should return the same number of columns // and the corresponding columns should have compatible data type // @ts-expect-error .intersect(({ intersect }) => intersect(db.select().from(users), db.select().from(users))); // All queries in combining statements should return the same number of columns // and the corresponding columns should have compatible data type // @ts-expect-error db.select().from(classes).union(db.select({ id: classes.id }).from(classes)); // All queries in combining statements should return the same number of columns // and the corresponding columns should have compatible data type // @ts-expect-error db.select({ id: classes.id }).from(classes).union(db.select().from(classes).where(sql``)); // All queries in combining statements should return the same number of columns // and the corresponding columns should have compatible data type // @ts-expect-error db.select({ id: classes.id }).from(classes).union(db.select().from(classes)); union( db.select({ id: cities.id, name: cities.name }).from(cities).where(sql``), db.select({ id: cities.id, name: cities.name }).from(cities), // All queries in combining statements should return the same number of columns // and the corresponding columns should have compatible data type // @ts-expect-error db.select().from(cities), ); union( db.select({ id: cities.id, name: cities.name }).from(cities).where(sql``), // All queries in combining statements should return the same number of columns // and the corresponding columns should have compatible data type // @ts-expect-error db.select({ id: cities.id, name: cities.name, population: cities.population }).from(cities), db.select({ id: cities.id, name: cities.name }).from(cities).where(sql``).limit(3).$dynamic(), db.select({ id: cities.id, name: cities.name }).from(cities), ); union( db.select({ id: cities.id }).from(cities), db.select({ id: cities.id }).from(cities), db.select({ id: cities.id }).from(cities), // All queries in combining statements should return the same number of columns // and the corresponding columns should have compatible data type // @ts-expect-error db.select({ id: cities.id, name: cities.name }).from(cities), db.select({ id: cities.id }).from(cities), db.select({ id: cities.id }).from(cities), ); union( db.select({ id: cities.id }).from(cities), db.select({ id: cities.id }).from(cities), // All queries in combining statements should return the same number of columns // and the corresponding columns should have compatible data type // @ts-expect-error db.select({ id: cities.id, name: cities.name }).from(cities), db.select({ id: cities.id }).from(cities), db.select({ id: cities.id }).from(cities), ); union( db.select({ id: cities.id }).from(cities), db.select({ id: cities.id }).from(cities), db.select({ id: cities.id }).from(cities).where(sql``), db.select({ id: sql`${cities.id}` }).from(cities), db.select({ id: cities.id }).from(cities), // All queries in combining statements should return the same number of columns // and the corresponding columns should have compatible data type // @ts-expect-error db.select({ id: cities.id, name: cities.name, population: cities.population }).from(cities).where(sql``), ); ================================================ FILE: drizzle-orm/type-tests/geldb/subquery.ts ================================================ import { Expect } from 'type-tests/utils.ts'; import { alias, gelTable, integer, text } from '~/gel-core/index.ts'; import { and, eq } from '~/sql/expressions/index.ts'; import { sql } from '~/sql/sql.ts'; import type { DrizzleTypeError, Equal } from '~/utils.ts'; import { db } from './db.ts'; const names = gelTable('names', { id: integer('id').primaryKey(), name: text('name'), authorId: integer('author_id'), }); const n1 = db .select({ id: names.id, name: names.name, authorId: names.authorId, count1: sql`count(1)::int`.as('count1'), }) .from(names) .groupBy(names.id, names.name, names.authorId) .as('n1'); const n2 = db .select({ id: names.id, authorId: names.authorId, totalCount: sql`count(1)::int`.as('totalCount'), }) .from(names) .groupBy(names.id, names.authorId) .as('n2'); const result = await db .select({ name: n1.name, authorId: n1.authorId, count1: n1.count1, totalCount: n2.totalCount, }) .from(n1) .innerJoin(n2, and(eq(n2.id, n1.id), eq(n2.authorId, n1.authorId))); Expect< Equal< { name: string | null; authorId: number | null; count1: number; totalCount: number; }[], typeof result > >; const names2 = alias(names, 'names2'); const sq1 = db .select({ id: names.id, name: names.name, id2: names2.id, }) .from(names) .leftJoin(names2, eq(names.name, names2.name)) .as('sq1'); const res = await db.select().from(sq1); Expect< Equal< { id: number; name: string | null; id2: number | null; }[], typeof res > >; { const sq = db.select({ count: sql`count(1)::int` }).from(names).as('sq'); Expect ? true : false>; } const sqUnion = db.select().from(names).union(db.select().from(names2)).as('sqUnion'); const resUnion = await db.select().from(sqUnion); Expect< Equal<{ id: number; name: string | null; authorId: number | null; }[], typeof resUnion> >; ================================================ FILE: drizzle-orm/type-tests/geldb/tables-rel.ts ================================================ import { foreignKey, gelTable, integer, text, timestamptz } from '~/gel-core/index.ts'; import { relations } from '~/relations.ts'; export const users = gelTable('users', { id: integer('id').primaryKey(), name: text('name').notNull(), cityId: integer('city_id').references(() => cities.id).notNull(), homeCityId: integer('home_city_id').references(() => cities.id), createdAt: timestamptz('created_at').notNull(), }); export const usersConfig = relations(users, ({ one, many }) => ({ city: one(cities, { relationName: 'UsersInCity', fields: [users.cityId], references: [cities.id] }), homeCity: one(cities, { fields: [users.homeCityId], references: [cities.id] }), posts: many(posts), comments: many(comments), })); export const cities = gelTable('cities', { id: integer('id').primaryKey(), name: text('name').notNull(), }); export const citiesConfig = relations(cities, ({ many }) => ({ users: many(users, { relationName: 'UsersInCity' }), })); export const posts = gelTable('posts', { id: integer('id').primaryKey(), title: text('title').notNull(), authorId: integer('author_id').references(() => users.id), }); export const postsConfig = relations(posts, ({ one, many }) => ({ author: one(users, { fields: [posts.authorId], references: [users.id] }), comments: many(comments), })); export const comments = gelTable('comments', { id: integer('id').primaryKey(), postId: integer('post_id').references(() => posts.id).notNull(), authorId: integer('author_id').references(() => users.id), text: text('text').notNull(), }); export const commentsConfig = relations(comments, ({ one }) => ({ post: one(posts, { fields: [comments.postId], references: [posts.id] }), author: one(users, { fields: [comments.authorId], references: [users.id] }), })); export const books = gelTable('books', { id: integer('id').primaryKey(), name: text('name').notNull(), }); export const booksConfig = relations(books, ({ many }) => ({ authors: many(bookAuthors), })); export const bookAuthors = gelTable('book_authors', { bookId: integer('book_id').references(() => books.id).notNull(), authorId: integer('author_id').references(() => users.id).notNull(), role: text('role').notNull(), }); export const bookAuthorsConfig = relations(bookAuthors, ({ one }) => ({ book: one(books, { fields: [bookAuthors.bookId], references: [books.id] }), author: one(users, { fields: [bookAuthors.authorId], references: [users.id] }), })); export const node = gelTable('node', { id: integer('id').primaryKey(), parentId: integer('parent_id'), leftId: integer('left_id'), rightId: integer('right_id'), }, (node) => ({ fk1: foreignKey({ columns: [node.parentId], foreignColumns: [node.id] }), fk2: foreignKey({ columns: [node.leftId], foreignColumns: [node.id] }), fk3: foreignKey({ columns: [node.rightId], foreignColumns: [node.id] }), })); export const nodeRelations = relations(node, ({ one }) => ({ parent: one(node, { fields: [node.parentId], references: [node.id] }), left: one(node, { fields: [node.leftId], references: [node.id] }), right: one(node, { fields: [node.rightId], references: [node.id] }), })); ================================================ FILE: drizzle-orm/type-tests/geldb/tables.ts ================================================ import { DateDuration, Duration, LocalDate, LocalDateTime, RelativeDuration } from 'gel'; import crypto from 'node:crypto'; import type { Equal } from 'type-tests/utils.ts'; import { Expect } from 'type-tests/utils.ts'; import { relDuration } from '~/gel-core/columns/relative-duration.ts'; import { bigint, bigintT, boolean, check, dateDuration, decimal, doublePrecision, duration, foreignKey, type GelColumn, gelTable, type GelTableWithColumns, index, integer, json, localDate, primaryKey, real, smallint, text, timestamp, timestamptz, uniqueIndex, uuid, } from '~/gel-core/index.ts'; import { gelSchema } from '~/gel-core/schema.ts'; import { eq } from '~/sql/expressions/index.ts'; import { sql } from '~/sql/sql.ts'; import type { InferInsertModel, InferSelectModel } from '~/table.ts'; import type { Simplify } from '~/utils.ts'; import { db } from './db.ts'; // export const myEnum = gelEnum('my_enum', ['a', 'b', 'c']); export const identityColumnsTable = gelTable('identity_columns_table', { generatedCol: integer('generated_col').generatedAlwaysAs(1), alwaysAsIdentity: integer('always_as_identity').generatedAlwaysAsIdentity(), byDefaultAsIdentity: integer('by_default_as_identity').generatedByDefaultAsIdentity(), name: text('name'), }); Expect, typeof identityColumnsTable['$inferSelect']>>; Expect, typeof identityColumnsTable['_']['inferSelect']>>; Expect, typeof identityColumnsTable['$inferInsert']>>; Expect, typeof identityColumnsTable['_']['inferInsert']>>; Expect< Equal< InferInsertModel, Simplify > >; Expect< Equal< InferInsertModel, Simplify > >; export const users = gelTable( 'users_table', { id: integer('id').primaryKey(), uuid: uuid('uuid').notNull(), homeCity: integer('home_city') .notNull() .references(() => cities.id), currentCity: integer('current_city').references(() => cities.id), class: text('class').notNull(), subClass: text('sub_class'), text: text('text'), age1: integer('age1').notNull(), createdAt: timestamptz('created_at').notNull(), arrayCol: text('array_col').array().notNull(), }, (users) => [ uniqueIndex('usersAge1Idx').on(users.class.asc().nullsFirst(), sql``), index('usersAge2Idx').on(sql``), uniqueIndex('uniqueClass') .using('btree', users.class.desc().op('text_ops'), users.subClass.nullsLast()) .where(sql`${users.class} is not null`) .concurrently(), check('legalAge', sql`${users.age1} > 18`), foreignKey({ columns: [users.subClass], foreignColumns: [classes.subClass] }) .onUpdate('cascade') .onDelete('cascade'), foreignKey({ columns: [users.class, users.subClass], foreignColumns: [classes.class, classes.subClass], }), primaryKey({ columns: [users.age1, users.class] }), ], ); Expect, typeof users['$inferSelect']>>; Expect, typeof users['_']['inferSelect']>>; Expect, typeof users['$inferInsert']>>; Expect, typeof users['_']['inferInsert']>>; export const cities = gelTable('cities_table', { id: integer('id').primaryKey(), name: text('name').notNull(), population: integer('population').default(0), }, (cities) => ({ citiesNameIdx: index().on(cities.id), })); export const classes = gelTable('classes_table', { id: integer('id').primaryKey(), class: text('class'), subClass: text('sub_class').notNull(), }); Expect< Equal<{ id: number; class?: string | null; subClass: string; }, typeof classes.$inferInsert> >; export const salEmp = gelTable('sal_emp', { name: text('name').notNull(), payByQuarter: integer('pay_by_quarter').array().notNull(), schedule: text('schedule').array().array().notNull(), }); export const tictactoe = gelTable('tictactoe', { squares: integer('squares').array(3).array(3).notNull(), }); export const customSchema = gelSchema('custom'); export const citiesCustom = customSchema.table('cities_table', { id: integer('id').primaryKey(), name: text('name').notNull(), population: integer('population').default(0), }, (cities) => [index().on(cities.id)]); // TODO not exists // { // const newYorkers = gelView('new_yorkers', { // userId: integer('user_id').notNull(), // cityId: integer('city_id'), // }).existing(); // Expect< // Equal< // GelViewWithSelection<'new_yorkers', true, { // userId: GelColumn<{ // tableName: 'new_yorkers'; // name: 'user_id'; // dataType: 'number'; // columnType: 'GelInteger'; // data: number; // driverParam: string | number; // hasDefault: false; // notNull: true; // enumValues: undefined; // baseColumn: never; // generated: undefined; // identity: undefined; // isPrimaryKey: false; // isAutoincrement: false; // hasRuntimeDefault: false; // }>; // cityId: GelColumn<{ // tableName: 'new_yorkers'; // name: 'city_id'; // dataType: 'number'; // columnType: 'GelInteger'; // notNull: false; // hasDefault: false; // data: number; // driverParam: string | number; // enumValues: undefined; // baseColumn: never; // generated: undefined; // identity: undefined; // isPrimaryKey: false; // isAutoincrement: false; // hasRuntimeDefault: false; // }>; // }>, // typeof newYorkers // > // >; // } // { // const newYorkers = customSchema.view('new_yorkers', { // userId: integer('user_id').notNull(), // cityId: integer('city_id'), // }).existing(); // Expect< // Equal< // GelViewWithSelection<'new_yorkers', true, { // userId: GelColumn<{ // tableName: 'new_yorkers'; // name: 'user_id'; // dataType: 'number'; // columnType: 'GelInteger'; // data: number; // driverParam: number; // hasDefault: false; // notNull: true; // enumValues: undefined; // baseColumn: never; // generated: undefined; // identity: undefined; // isPrimaryKey: false; // isAutoincrement: false; // hasRuntimeDefault: false; // }>; // cityId: GelColumn<{ // tableName: 'new_yorkers'; // name: 'city_id'; // dataType: 'number'; // columnType: 'GelInteger'; // notNull: false; // hasDefault: false; // data: number; // driverParam: number; // enumValues: undefined; // baseColumn: never; // generated: undefined; // identity: undefined; // isPrimaryKey: false; // isAutoincrement: false; // hasRuntimeDefault: false; // }>; // }>, // typeof newYorkers // > // >; // } // export const newYorkers2 = gelMaterializedView('new_yorkers') // .using('btree') // .with({ // fillfactor: 90, // toastTupleTarget: 0.5, // autovacuumEnabled: true, // }) // .tablespace('custom_tablespace') // .withNoData() // .as((qb) => { // const sq = qb // .$with('sq') // .as( // qb.select({ userId: users.id, cityId: cities.id }) // .from(users) // .leftJoin(cities, eq(cities.id, users.homeCity)) // .where(sql`${users.age1} > 18`), // ); // return qb.with(sq).select().from(sq).where(sql`${users.homeCity} = 1`); // }); // Expect< // Equal< // GelMaterializedViewWithSelection<'new_yorkers', false, { // userId: GelColumn<{ // tableName: 'new_yorkers'; // name: 'id'; // dataType: 'number'; // columnType: 'GelSerial'; // data: number; // driverParam: number; // notNull: true; // hasDefault: true; // enumValues: undefined; // baseColumn: never; // generated: undefined; // identity: undefined; // isPrimaryKey: true; // isAutoincrement: false; // hasRuntimeDefault: false; // }>; // cityId: GelColumn<{ // tableName: 'new_yorkers'; // name: 'id'; // dataType: 'number'; // columnType: 'GelSerial'; // data: number; // driverParam: number; // notNull: false; // hasDefault: true; // enumValues: undefined; // baseColumn: never; // generated: undefined; // identity: undefined; // isPrimaryKey: true; // isAutoincrement: false; // hasRuntimeDefault: false; // }>; // }>, // typeof newYorkers2 // > // >; // { // const newYorkers2 = customSchema.materializedView('new_yorkers') // .using('btree') // .with({ // fillfactor: 90, // toastTupleTarget: 0.5, // autovacuumEnabled: true, // }) // .tablespace('custom_tablespace') // .withNoData() // .as((qb) => { // const sq = qb // .$with('sq') // .as( // qb.select({ userId: users.id, cityId: cities.id }) // .from(users) // .leftJoin(cities, eq(cities.id, users.homeCity)) // .where(sql`${users.age1} > 18`), // ); // return qb.with(sq).select().from(sq).where(sql`${users.homeCity} = 1`); // }); // Expect< // Equal< // GelMaterializedViewWithSelection<'new_yorkers', false, { // userId: GelColumn<{ // tableName: 'new_yorkers'; // name: 'id'; // dataType: 'number'; // columnType: 'GelSerial'; // data: number; // driverParam: number; // notNull: true; // hasDefault: true; // enumValues: undefined; // baseColumn: never; // generated: undefined; // identity: undefined; // isPrimaryKey: true; // isAutoincrement: false; // hasRuntimeDefault: false; // }>; // cityId: GelColumn<{ // tableName: 'new_yorkers'; // name: 'id'; // dataType: 'number'; // columnType: 'GelSerial'; // data: number; // driverParam: number; // notNull: false; // hasDefault: true; // enumValues: undefined; // baseColumn: never; // generated: undefined; // identity: undefined; // isPrimaryKey: true; // isAutoincrement: false; // hasRuntimeDefault: false; // }>; // }>, // typeof newYorkers2 // > // >; // } // { // const newYorkers2 = gelMaterializedView('new_yorkers', { // userId: integer('user_id').notNull(), // cityId: integer('city_id'), // }) // .using('btree') // .with({ // fillfactor: 90, // toastTupleTarget: 0.5, // autovacuumEnabled: true, // }) // .tablespace('custom_tablespace') // .withNoData() // .as( // sql`select ${users.id} as user_id, ${cities.id} as city_id from ${users} left join ${cities} on ${ // eq(cities.id, users.homeCity) // } where ${gt(users.age1, 18)}`, // ); // Expect< // Equal< // GelMaterializedViewWithSelection<'new_yorkers', false, { // userId: GelColumn<{ // tableName: 'new_yorkers'; // name: 'user_id'; // dataType: 'number'; // columnType: 'GelInteger'; // data: number; // driverParam: string | number; // hasDefault: false; // notNull: true; // enumValues: undefined; // baseColumn: never; // generated: undefined; // identity: undefined; // isPrimaryKey: false; // isAutoincrement: false; // hasRuntimeDefault: false; // }>; // cityId: GelColumn<{ // tableName: 'new_yorkers'; // name: 'city_id'; // dataType: 'number'; // columnType: 'GelInteger'; // notNull: false; // hasDefault: false; // data: number; // driverParam: string | number; // enumValues: undefined; // baseColumn: never; // generated: undefined; // identity: undefined; // isPrimaryKey: false; // isAutoincrement: false; // hasRuntimeDefault: false; // }>; // }>, // typeof newYorkers2 // > // >; // } // { // const newYorkers2 = customSchema.materializedView('new_yorkers', { // userId: integer('user_id').notNull(), // cityId: integer('city_id'), // }) // .using('btree') // .with({ // fillfactor: 90, // toastTupleTarget: 0.5, // autovacuumEnabled: true, // }) // .tablespace('custom_tablespace') // .withNoData() // .as( // sql`select ${users.id} as user_id, ${cities.id} as city_id from ${users} left join ${cities} on ${ // eq(cities.id, users.homeCity) // } where ${gt(users.age1, 18)}`, // ); // Expect< // Equal< // GelMaterializedViewWithSelection<'new_yorkers', false, { // userId: GelColumn<{ // tableName: 'new_yorkers'; // name: 'user_id'; // dataType: 'number'; // columnType: 'GelInteger'; // data: number; // driverParam: string | number; // hasDefault: false; // notNull: true; // enumValues: undefined; // baseColumn: never; // generated: undefined; // identity: undefined; // isPrimaryKey: false; // isAutoincrement: false; // hasRuntimeDefault: false; // }>; // cityId: GelColumn<{ // tableName: 'new_yorkers'; // name: 'city_id'; // dataType: 'number'; // columnType: 'GelInteger'; // notNull: false; // hasDefault: false; // data: number; // driverParam: string | number; // enumValues: undefined; // baseColumn: never; // generated: undefined; // identity: undefined; // isPrimaryKey: false; // isAutoincrement: false; // hasRuntimeDefault: false; // }>; // }>, // typeof newYorkers2 // > // >; // } // { // const newYorkers2 = gelMaterializedView('new_yorkers', { // userId: integer('user_id').notNull(), // cityId: integer('city_id'), // }).existing(); // Expect< // Equal< // GelMaterializedViewWithSelection<'new_yorkers', true, { // userId: GelColumn<{ // tableName: 'new_yorkers'; // name: 'user_id'; // dataType: 'number'; // columnType: 'GelInteger'; // data: number; // driverParam: string | number; // hasDefault: false; // notNull: true; // enumValues: undefined; // baseColumn: never; // generated: undefined; // identity: undefined; // isPrimaryKey: false; // isAutoincrement: false; // hasRuntimeDefault: false; // }>; // cityId: GelColumn<{ // tableName: 'new_yorkers'; // name: 'city_id'; // dataType: 'number'; // columnType: 'GelInteger'; // notNull: false; // hasDefault: false; // data: number; // driverParam: string | number; // enumValues: undefined; // baseColumn: never; // generated: undefined; // identity: undefined; // isPrimaryKey: false; // isAutoincrement: false; // hasRuntimeDefault: false; // }>; // }>, // typeof newYorkers2 // > // >; // } // { // const newYorkers2 = customSchema.materializedView('new_yorkers', { // userId: integer('user_id').notNull(), // cityId: integer('city_id'), // }).existing(); // Expect< // Equal< // GelMaterializedViewWithSelection<'new_yorkers', true, { // userId: GelColumn<{ // tableName: 'new_yorkers'; // name: 'user_id'; // dataType: 'number'; // columnType: 'GelInteger'; // data: number; // driverParam: string | number; // hasDefault: false; // notNull: true; // enumValues: undefined; // baseColumn: never; // generated: undefined; // identity: undefined; // isPrimaryKey: false; // isAutoincrement: false; // hasRuntimeDefault: false; // }>; // cityId: GelColumn<{ // tableName: 'new_yorkers'; // name: 'city_id'; // dataType: 'number'; // columnType: 'GelInteger'; // notNull: false; // hasDefault: false; // data: number; // driverParam: string | number; // enumValues: undefined; // baseColumn: never; // generated: undefined; // identity: undefined; // isPrimaryKey: false; // isAutoincrement: false; // hasRuntimeDefault: false; // }>; // }>, // typeof newYorkers2 // > // >; // } // await db.refreshMaterializedView(newYorkers2).concurrently(); // await db.refreshMaterializedView(newYorkers2).withNoData(); // await db.refreshMaterializedView(newYorkers2).concurrently().withNoData(); // await db.refreshMaterializedView(newYorkers2).withNoData().concurrently(); // await migrate(db, { // migrationsFolder: './drizzle/gel', // onMigrationError(error) { // if (['0001_drizli_klaud', '0002_beep_boop'].includes(error.migration.name)) { // return; // } // throw error; // }, // }); // TODO not sure that this should be implemented now // { // const customTextRequired = customType<{ // data: string; // driverData: string; // config: { length: number }; // configRequired: true; // }>({ // dataType(config) { // Expect>; // return `varchar(${config.length})`; // }, // toDriver(value) { // Expect>(); // return value; // }, // fromDriver(value) { // Expect>(); // return value; // }, // }); // customTextRequired('t', { length: 10 }); // customTextRequired({ length: 10 }); // // @ts-expect-error - config is required // customTextRequired('t'); // // @ts-expect-error - config is required // customTextRequired(); // } // { // const customTextOptional = customType<{ // data: string; // driverData: string; // config: { length: number }; // }>({ // dataType(config) { // Expect>; // return config ? `varchar(${config.length})` : `text`; // }, // toDriver(value) { // Expect>(); // return value; // }, // fromDriver(value) { // Expect>(); // return value; // }, // }); // customTextOptional('t', { length: 10 }); // customTextOptional('t'); // customTextOptional({ length: 10 }); // customTextOptional(); // } { const cities1 = gelTable('cities_table', { id: integer('id').primaryKey(), name: text('name').notNull().primaryKey(), role: text('role').$type<'admin' | 'user'>().default('user').notNull(), population: integer('population').default(0), }); const cities2 = gelTable('cities_table', ({ text, integer }) => ({ id: integer('id').primaryKey(), name: text('name').notNull().primaryKey(), role: text('role').$type<'admin' | 'user'>().default('user').notNull(), population: integer('population').default(0), })); type Expected = GelTableWithColumns<{ name: 'cities_table'; schema: undefined; dialect: 'gel'; columns: { id: GelColumn<{ tableName: 'cities_table'; name: 'id'; dataType: 'number'; columnType: 'GelInteger'; data: number; driverParam: number; hasDefault: false; notNull: true; enumValues: undefined; baseColumn: never; generated: undefined; identity: undefined; isPrimaryKey: true; isAutoincrement: false; hasRuntimeDefault: false; }>; name: GelColumn<{ tableName: 'cities_table'; name: 'name'; dataType: 'string'; columnType: 'GelText'; data: string; driverParam: string; hasDefault: false; enumValues: undefined; notNull: true; baseColumn: never; generated: undefined; identity: undefined; isPrimaryKey: true; isAutoincrement: false; hasRuntimeDefault: false; }>; role: GelColumn< { tableName: 'cities_table'; name: 'role'; dataType: 'string'; columnType: 'GelText'; data: 'admin' | 'user'; driverParam: string; hasDefault: true; enumValues: undefined; notNull: true; baseColumn: never; generated: undefined; identity: undefined; isPrimaryKey: false; isAutoincrement: false; hasRuntimeDefault: false; }, {}, { $type: 'admin' | 'user' } >; population: GelColumn<{ tableName: 'cities_table'; name: 'population'; dataType: 'number'; columnType: 'GelInteger'; data: number; driverParam: number; notNull: false; hasDefault: true; enumValues: undefined; baseColumn: never; generated: undefined; identity: undefined; isPrimaryKey: false; isAutoincrement: false; hasRuntimeDefault: false; }>; }; }>; Expect>; Expect>; } { gelTable('test', { bigint: bigintT('bigintT').default(BigInt(10)), timestamp: timestamp('timestamp').default(new LocalDateTime(2023, 12, 3, 12, 3, 12)), timestamptz: timestamptz('timestamp2').default(new Date()), }); } { const test = gelTable('test', { col1: decimal('col1').notNull().default('10.2'), }); Expect>; } { const getUsersTable = (schemaName: TSchema) => { return gelSchema(schemaName).table('users', { id: integer('id').primaryKey(), name: text('name').notNull(), }); }; const users1 = getUsersTable('id1'); Expect>; const users2 = getUsersTable('id2'); Expect>; } { const internalStaff = gelTable('internal_staff', { userId: integer('user_id').notNull(), }); const customUser = gelTable('custom_user', { id: integer('id').notNull(), }); const ticket = gelTable('ticket', { staffId: integer('staff_id').notNull(), }); const subq = db .select() .from(internalStaff) .leftJoin( customUser, eq(internalStaff.userId, customUser.id), ).as('internal_staff'); const mainQuery = await db .select() .from(ticket) .leftJoin(subq, eq(subq.internal_staff.userId, ticket.staffId)); Expect< Equal<{ internal_staff: { internal_staff: { userId: number; }; custom_user: { id: number | null; }; } | null; ticket: { staffId: number; }; }[], typeof mainQuery> >; } { const test = gelTable('test', { id: text('id').$defaultFn(() => crypto.randomUUID()).primaryKey(), }); Expect< Equal<{ id?: string; }, typeof test.$inferInsert> >; } { gelTable('test', { id: integer('id').$default(() => 1), id2: integer('id').$defaultFn(() => 1), // @ts-expect-error - should be number id3: integer('id').$default(() => '1'), // @ts-expect-error - should be number id4: integer('id').$defaultFn(() => '1'), }); } { gelTable('all_columns', { sm: smallint('smallint'), smdef: smallint('smallint_def').default(10), int: integer('integer'), intdef: integer('integer_def').default(10), bigint: bigint('bigint'), bigintT: bigintT('bigintT').default(BigInt(100)), bool: boolean('boolean'), booldef: boolean('boolean_def').default(true), text: text('text'), textdef: text('textdef').default('text'), decimal: decimal('decimal'), decimaldef: decimal('decimaldef').default('100.0'), doublePrecision: doublePrecision('doublePrecision'), doublePrecisiondef: doublePrecision('doublePrecisiondef').default(100), real: real('real'), realdef: real('realdef').default(100), json: json('json').$type<{ attr: string }>(), jsondef: json('jsondef').$type<{ attr: string }>().default({ attr: 'value' }), jsonb: json('json').$type<{ attr: string }>(), jsonbdef: json('json').$type<{ attr: string }>().default({ attr: 'value' }), localDate: localDate('localDate'), localDate2: localDate('local_date_def').default(new LocalDate(2023, 12, 1)), duration: duration('duration'), durationdef: duration('durationdef').default(new Duration(12, 523, 0, 9, 0, 0, 0, 0, 0, 0)), relDuration: relDuration('relDuration'), relDurationdef: relDuration('relDurationdef').default(new RelativeDuration(12, 523, 0, 9, 0, 0, 0, 0, 0)), dateDuration: dateDuration('dateDuration'), dateDurationdef: dateDuration('relDurationdef').default(new DateDuration(12, 12, 12, 6)), timestamp: timestamp('timestamp'), timestampdef: timestamp('timestamp_def').default(new LocalDateTime(2023, 1, 1, 1, 1, 14, 0, 0, 0)), timestamptz: timestamptz('timestamp3'), timestamptz2: timestamptz('timestampdef').default(new Date()), }); } { const keysAsColumnNames = gelTable('test', { id: integer(), name: text(), }); Expect>; Expect>; } { gelTable('all_columns_without_name', { sm: smallint(), smdef: smallint().default(10), int: integer(), intdef: integer().default(10), bigint: bigint(), bigintT: bigintT().default(BigInt(100)), bool: boolean(), booldef: boolean().default(true), text: text(), textdef: text().default('text'), decimal: decimal(), decimaldef: decimal().default('100.0'), doublePrecision: doublePrecision(), doublePrecisiondef: doublePrecision().default(100), real: real(), realdef: real().default(100), json: json().$type<{ attr: string }>(), jsondef: json().$type<{ attr: string }>().default({ attr: 'value' }), jsonb: json().$type<{ attr: string }>(), jsonbdef: json().$type<{ attr: string }>().default({ attr: 'value' }), localDate: localDate(), localDate2: localDate().default(new LocalDate(2023, 12, 1)), duration: duration(), durationdef: duration().default(new Duration(12, 523, 0, 9, 0, 0, 0, 0, 0, 0)), relDuration: relDuration(), relDurationdef: relDuration().default(new RelativeDuration(12, 523, 0, 9, 0, 0, 0, 0, 0)), dateDuration: dateDuration(), dateDurationdef: dateDuration().default(new DateDuration(12, 12, 12, 6)), timestamp: timestamp(), timestampdef: timestamp().default(new LocalDateTime(2023, 1, 1, 1, 1, 14, 0, 0, 0)), timestamptz: timestamptz(), timestamptz2: timestamptz().default(new Date()), }); } ================================================ FILE: drizzle-orm/type-tests/geldb/update.ts ================================================ import type { Equal } from 'type-tests/utils.ts'; import { Expect } from 'type-tests/utils.ts'; import type { GelUpdate } from '~/gel-core/index.ts'; import { eq } from '~/sql/expressions/index.ts'; import { sql } from '~/sql/sql.ts'; import type { Simplify } from '~/utils.ts'; import { db } from './db.ts'; import { cities, salEmp, users } from './tables.ts'; const update = await db.update(users) .set({ text: 'John', age1: 30, }) .where(eq(users.id, 1)); Expect>; const updateStmt = db.update(users) .set({ text: 'John', age1: 30, }) .where(eq(users.id, 1)) .prepare('updateStmt'); const updatePrepared = await updateStmt.execute(); Expect>; const updateReturning = await db.update(users) .set({ text: 'John', age1: 30, }) .where(eq(users.id, 1)) .returning({ text: users.text, }); Expect>; const updateReturningStmt = db.update(users) .set({ text: 'John', age1: 30, }) .where(eq(users.id, 1)) .returning({ text: users.text, }) .prepare('updateReturningStmt'); const updateReturningPrepared = await updateReturningStmt.execute(); Expect>; { function dynamic(qb: T) { return qb.where(sql``).returning(); } const qbBase = db.update(users).set({}).$dynamic(); const qb = dynamic(qbBase); const result = await qb; Expect>; } { function withReturning(qb: T) { return qb.returning(); } const qbBase = db.update(users).set({}).$dynamic(); const qb = withReturning(qbBase); const result = await qb; Expect>; } { db .update(users) .set({}) .returning() // @ts-expect-error method was already called .returning(); db .update(users) .set({}) .where(sql``) // @ts-expect-error method was already called .where(sql``); } { db .update(users) .set({}) .from(sql``) .leftJoin(sql``, (table, from) => { Expect>; Expect>; return sql``; }); db .update(users) .set({}) .from(cities) .leftJoin(sql``, (table, from) => { Expect>; Expect>; return sql``; }); const citiesSq = db.$with('cities_sq').as(db.select({ id: cities.id }).from(cities)); db .with(citiesSq) .update(users) .set({}) .from(citiesSq) .leftJoin(sql``, (table, from) => { Expect>; Expect>; return sql``; }); db .with(citiesSq) .update(users) .set({ homeCity: citiesSq.id, }) .from(citiesSq); } { const result = await db.update(users).set({}).from(cities).returning(); Expect< Equal[], typeof result> >; } { const result1 = await db.update(users).set({}).from(cities).leftJoin(salEmp, sql``).returning(); Expect< Equal[], typeof result1> >; const result2 = await db.update(users).set({}).from(cities).rightJoin(salEmp, sql``).returning(); Expect< Equal[], typeof result2> >; const result3 = await db.update(users).set({}).from(cities).innerJoin(salEmp, sql``).returning(); Expect< Equal[], typeof result3> >; const result4 = await db.update(users).set({}).from(cities).fullJoin(salEmp, sql``).returning(); Expect< Equal[], typeof result4> >; } { const result = await db.update(users).set({}).from(cities).returning({ id: users.id, cities: cities, cityName: cities.name, }); Expect< Equal[], typeof result> >; } { const result1 = await db.update(users).set({}).from(cities).leftJoin(salEmp, sql``).returning({ id: users.id, cities: cities, cityName: cities.name, salEmp: salEmp, salEmpName: salEmp.name, }); Expect< Equal[], typeof result1> >; const result2 = await db.update(users).set({}).from(cities).rightJoin(salEmp, sql``).returning({ id: users.id, cities: cities, cityName: cities.name, salEmp: salEmp, salEmpName: salEmp.name, }); Expect< Equal[], typeof result2> >; const result3 = await db.update(users).set({}).from(cities).innerJoin(salEmp, sql``).returning({ id: users.id, cities: cities, cityName: cities.name, salEmp: salEmp, salEmpName: salEmp.name, }); Expect< Equal[], typeof result3> >; const result4 = await db.update(users).set({}).from(cities).fullJoin(salEmp, sql``).returning({ id: users.id, cities: cities, cityName: cities.name, salEmp: salEmp, salEmpName: salEmp.name, }); Expect< Equal[], typeof result4> >; } { await db .update(users) .set({}) // @ts-expect-error can't use joins before from .fullJoin(salEmp, sql``); } ================================================ FILE: drizzle-orm/type-tests/geldb/with.ts ================================================ import type { Equal } from 'type-tests/utils.ts'; import { Expect } from 'type-tests/utils.ts'; import { gelTable, integer, text } from '~/gel-core/index.ts'; import { gt, inArray, like } from '~/sql/expressions/index.ts'; import { sql } from '~/sql/sql.ts'; import { db } from './db.ts'; const orders = gelTable('orders', { id: integer('id').primaryKey(), region: text('region').notNull(), product: text('product').notNull(), amount: integer('amount').notNull(), quantity: integer('quantity').notNull(), generated: text('generatedText').generatedAlwaysAs(sql``), }); { const regionalSales = db .$with('regional_sales') .as((qb) => qb .select({ region: orders.region, totalSales: sql`sum(${orders.amount})`.as('total_sales'), }) .from(orders) .groupBy(orders.region) ); const topRegions = db .$with('top_regions') .as((qb) => qb .select({ region: orders.region, totalSales: orders.amount, }) .from(regionalSales) .where( gt( regionalSales.totalSales, db.select({ sales: sql`sum(${regionalSales.totalSales})/10` }).from(regionalSales), ), ) ); const result = await db .with(regionalSales, topRegions) .select({ region: orders.region, product: orders.product, productUnits: sql`sum(${orders.quantity})`, productSales: sql`sum(${orders.amount})`, }) .from(orders) .where(inArray(orders.region, db.select({ region: topRegions.region }).from(topRegions))); Expect< Equal<{ region: string; product: string; productUnits: number; productSales: number; }[], typeof result> >; const allOrdersWith = db.$with('all_orders_with').as(db.select().from(orders)); const allFromWith = await db.with(allOrdersWith).select().from(allOrdersWith); Expect< Equal<{ id: number; region: string; product: string; amount: number; quantity: number; generated: string | null; }[], typeof allFromWith> >; const regionalSalesWith = db.$with('regional_sales_with').as(db.select().from(regionalSales)); db.with(regionalSalesWith).select().from(regionalSalesWith).where(like(regionalSalesWith.totalSales, 'abc')); } ================================================ FILE: drizzle-orm/type-tests/knex/index.ts ================================================ import { knex } from 'knex'; import { type Equal, Expect } from 'type-tests/utils.ts'; import { pgTable, serial, text } from '~/pg-core/index.ts'; import type { PromiseOf } from '~/utils.ts'; import '~/knex'; const test = pgTable('test', { id: serial('id').primaryKey(), name: text('name').notNull(), }); declare module 'knex/types/tables.ts' { interface Tables { test: Knexify; } } const db = knex({}); { const res = db('test').select(); Expect, typeof test.$inferSelect[]>>; } ================================================ FILE: drizzle-orm/type-tests/kysely/index.ts ================================================ import { Kysely, PostgresDialect } from 'kysely'; import pg from 'pg'; import { type Equal, Expect } from 'type-tests/utils.ts'; import type { Kyselify } from '~/kysely/index.ts'; import { char, mysqlTable, timestamp as mysqlTimestamp, varchar as mysqlVarchar } from '~/mysql-core/index.ts'; import { integer, pgEnum, pgTable, serial, text, timestamp, varchar } from '~/pg-core/index.ts'; import type { PromiseOf } from '~/utils.ts'; const { Pool } = pg; const test = pgTable('test', { id: serial('id').primaryKey(), name: text('name').notNull(), }); interface Database { test: Kyselify; } const db = new Kysely({ dialect: new PostgresDialect({ pool: new Pool(), }), }); const result = db.selectFrom('test').selectAll().execute(); Expect, typeof test.$inferSelect[]>>(); { const units = mysqlTable('units', { id: char('id', { length: 16 }).primaryKey(), name: mysqlVarchar('name', { length: 255 }).notNull(), abbreviation: mysqlVarchar('abbreviation', { length: 10 }).notNull(), created_at: mysqlTimestamp('created_at').defaultNow().notNull(), updated_at: mysqlTimestamp('updated_at').defaultNow().notNull().onUpdateNow(), }); type UnitModel = typeof units; interface Database { units: Kyselify; } const db = new Kysely({ dialect: new PostgresDialect({ pool: new Pool(), }), }); await db .insertInto('units') .values({ id: 'my-unique-id', abbreviation: 'foo', name: 'bar', }) .execute(); } { const uploadStateEnum = pgEnum('upload_state', ['uploading', 'uploaded', 'failed']); const uploadTypeEnum = pgEnum('upload_type', ['image', 'video']); const uploads = pgTable('uploads', { id: varchar('id', { length: 100 }).primaryKey(), state: uploadStateEnum('state').notNull().default('uploading'), type: uploadTypeEnum('type').notNull(), fileName: varchar('file_name', { length: 100 }).notNull(), fileType: varchar('file_type', { length: 100 }).notNull(), fileSize: integer('file_size').notNull(), createdAt: timestamp('created_at').notNull().defaultNow(), uploadedAt: timestamp('uploaded_at'), }); interface Database { uploads: Kyselify; } const db = new Kysely({ dialect: new PostgresDialect({ pool: new Pool(), }), }); await db .insertInto('uploads') .values({ id: '1', file_name: 'fileName', file_type: 'contentType', type: 'image', file_size: 1, }) .returning('id') .executeTakeFirst(); } ================================================ FILE: drizzle-orm/type-tests/mysql/1-to-1-fk.ts ================================================ import { int, serial } from '~/mysql-core/columns/index.ts'; import { mysqlTable } from '~/mysql-core/table.ts'; const test1 = mysqlTable('test1_table', { id: serial('id').primaryKey(), test2Id: int('test2_id').references(() => test2.id), }); const test1Id = int('test1_id').references(() => test1.id); const test2 = mysqlTable('test2_table', { id: serial('id').primaryKey(), test1Id, }); ================================================ FILE: drizzle-orm/type-tests/mysql/1000columns.ts ================================================ import { bigint, double, mysqlTable, varchar } from '~/mysql-core/index.ts'; mysqlTable('test', { col0: double('col1').primaryKey().autoincrement().default(0), col1: double('col1').primaryKey().autoincrement().default(0), col2: double('col1').primaryKey().autoincrement().default(0), col3: double('col1').primaryKey().autoincrement().default(0), col4: double('col1').primaryKey().autoincrement().default(0), col5: double('col1').primaryKey().autoincrement().default(0), col6: double('col1').primaryKey().autoincrement().default(0), col8: double('col1').primaryKey().autoincrement().default(0), col9: double('col1').primaryKey().autoincrement().default(0), col10: double('col1').primaryKey().autoincrement().default(0), col11: double('col1').primaryKey().autoincrement().default(0), col12: double('col1').primaryKey().autoincrement().default(0), col13: double('col1').primaryKey().autoincrement().default(0), col14: double('col1').primaryKey().autoincrement().default(0), col15: double('col1').primaryKey().autoincrement().default(0), col16: double('col1').primaryKey().autoincrement().default(0), col18: double('col1').primaryKey().autoincrement().default(0), col19: double('col1').primaryKey().autoincrement().default(0), col20: double('col1').primaryKey().autoincrement().default(0), col21: double('col1').primaryKey().autoincrement().default(0), col22: double('col1').primaryKey().autoincrement().default(0), col23: double('col1').primaryKey().autoincrement().default(0), col24: double('col1').primaryKey().autoincrement().default(0), col25: double('col1').primaryKey().autoincrement().default(0), col26: double('col1').primaryKey().autoincrement().default(0), col28: double('col1').primaryKey().autoincrement().default(0), col29: double('col1').primaryKey().autoincrement().default(0), col30: double('col1').primaryKey().autoincrement().default(0), col31: double('col1').primaryKey().autoincrement().default(0), col32: double('col1').primaryKey().autoincrement().default(0), col33: double('col1').primaryKey().autoincrement().default(0), col34: double('col1').primaryKey().autoincrement().default(0), col35: double('col1').primaryKey().autoincrement().default(0), col36: double('col1').primaryKey().autoincrement().default(0), col38: double('col1').primaryKey().autoincrement().default(0), col39: double('col1').primaryKey().autoincrement().default(0), col40: double('col1').primaryKey().autoincrement().default(0), col41: double('col1').primaryKey().autoincrement().default(0), col42: double('col1').primaryKey().autoincrement().default(0), col43: double('col1').primaryKey().autoincrement().default(0), col44: double('col1').primaryKey().autoincrement().default(0), col45: double('col1').primaryKey().autoincrement().default(0), col46: double('col1').primaryKey().autoincrement().default(0), col48: double('col1').primaryKey().autoincrement().default(0), col49: double('col1').primaryKey().autoincrement().default(0), col50: double('col1').primaryKey().autoincrement().default(0), col51: double('col1').primaryKey().autoincrement().default(0), col52: double('col1').primaryKey().autoincrement().default(0), col53: double('col1').primaryKey().autoincrement().default(0), col54: double('col1').primaryKey().autoincrement().default(0), col55: double('col1').primaryKey().autoincrement().default(0), col56: double('col1').primaryKey().autoincrement().default(0), col58: double('col1').primaryKey().autoincrement().default(0), col59: double('col1').primaryKey().autoincrement().default(0), col60: double('col1').primaryKey().autoincrement().default(0), col61: double('col1').primaryKey().autoincrement().default(0), col62: double('col1').primaryKey().autoincrement().default(0), col63: double('col1').primaryKey().autoincrement().default(0), col64: double('col1').primaryKey().autoincrement().default(0), col65: double('col1').primaryKey().autoincrement().default(0), col66: double('col1').primaryKey().autoincrement().default(0), col68: double('col1').primaryKey().autoincrement().default(0), col69: double('col1').primaryKey().autoincrement().default(0), col70: double('col1').primaryKey().autoincrement().default(0), col71: double('col1').primaryKey().autoincrement().default(0), col72: double('col1').primaryKey().autoincrement().default(0), col73: double('col1').primaryKey().autoincrement().default(0), col74: double('col1').primaryKey().autoincrement().default(0), col75: double('col1').primaryKey().autoincrement().default(0), col76: double('col1').primaryKey().autoincrement().default(0), col78: double('col1').primaryKey().autoincrement().default(0), col79: double('col1').primaryKey().autoincrement().default(0), col80: double('col1').primaryKey().autoincrement().default(0), col81: double('col1').primaryKey().autoincrement().default(0), col82: double('col1').primaryKey().autoincrement().default(0), col83: double('col1').primaryKey().autoincrement().default(0), col84: double('col1').primaryKey().autoincrement().default(0), col85: double('col1').primaryKey().autoincrement().default(0), col86: double('col1').primaryKey().autoincrement().default(0), col88: double('col1').primaryKey().autoincrement().default(0), col89: double('col1').primaryKey().autoincrement().default(0), col90: double('col1').primaryKey().autoincrement().default(0), col91: double('col1').primaryKey().autoincrement().default(0), col92: double('col1').primaryKey().autoincrement().default(0), col93: double('col1').primaryKey().autoincrement().default(0), col94: double('col1').primaryKey().autoincrement().default(0), col95: double('col1').primaryKey().autoincrement().default(0), col96: double('col1').primaryKey().autoincrement().default(0), col98: double('col1').primaryKey().autoincrement().default(0), col99: double('col1').primaryKey().autoincrement().default(0), col100: double('col1').primaryKey().autoincrement().default(0), col101: double('col1').primaryKey().autoincrement().default(0), col102: double('col1').primaryKey().autoincrement().default(0), col103: double('col1').primaryKey().autoincrement().default(0), col104: double('col1').primaryKey().autoincrement().default(0), col105: double('col1').primaryKey().autoincrement().default(0), col106: double('col1').primaryKey().autoincrement().default(0), col108: double('col1').primaryKey().autoincrement().default(0), col109: double('col1').primaryKey().autoincrement().default(0), col110: double('col11').primaryKey().autoincrement().default(0), col111: double('col11').primaryKey().autoincrement().default(0), col112: double('col11').primaryKey().autoincrement().default(0), col113: double('col11').primaryKey().autoincrement().default(0), col114: double('col11').primaryKey().autoincrement().default(0), col115: double('col11').primaryKey().autoincrement().default(0), col116: double('col11').primaryKey().autoincrement().default(0), col118: double('col11').primaryKey().autoincrement().default(0), col119: double('col11').primaryKey().autoincrement().default(0), col120: double('col11').primaryKey().autoincrement().default(0), col121: double('col11').primaryKey().autoincrement().default(0), col122: double('col11').primaryKey().autoincrement().default(0), col123: double('col11').primaryKey().autoincrement().default(0), col124: double('col11').primaryKey().autoincrement().default(0), col125: double('col11').primaryKey().autoincrement().default(0), col126: double('col11').primaryKey().autoincrement().default(0), col128: double('col11').primaryKey().autoincrement().default(0), col129: double('col11').primaryKey().autoincrement().default(0), col130: double('col11').primaryKey().autoincrement().default(0), col131: double('col11').primaryKey().autoincrement().default(0), col132: double('col11').primaryKey().autoincrement().default(0), col133: double('col11').primaryKey().autoincrement().default(0), col134: double('col11').primaryKey().autoincrement().default(0), col135: double('col11').primaryKey().autoincrement().default(0), col136: double('col11').primaryKey().autoincrement().default(0), col138: double('col11').primaryKey().autoincrement().default(0), col139: double('col11').primaryKey().autoincrement().default(0), col140: double('col11').primaryKey().autoincrement().default(0), col141: double('col11').primaryKey().autoincrement().default(0), col142: double('col11').primaryKey().autoincrement().default(0), col143: double('col11').primaryKey().autoincrement().default(0), col144: double('col11').primaryKey().autoincrement().default(0), col145: double('col11').primaryKey().autoincrement().default(0), col146: double('col11').primaryKey().autoincrement().default(0), col148: double('col11').primaryKey().autoincrement().default(0), col149: double('col11').primaryKey().autoincrement().default(0), col150: double('col11').primaryKey().autoincrement().default(0), col151: double('col11').primaryKey().autoincrement().default(0), col152: double('col11').primaryKey().autoincrement().default(0), col153: double('col11').primaryKey().autoincrement().default(0), col154: double('col11').primaryKey().autoincrement().default(0), col155: double('col11').primaryKey().autoincrement().default(0), col156: double('col11').primaryKey().autoincrement().default(0), col158: double('col11').primaryKey().autoincrement().default(0), col159: double('col11').primaryKey().autoincrement().default(0), col160: double('col11').primaryKey().autoincrement().default(0), col161: double('col11').primaryKey().autoincrement().default(0), col162: double('col11').primaryKey().autoincrement().default(0), col163: double('col11').primaryKey().autoincrement().default(0), col164: double('col11').primaryKey().autoincrement().default(0), col165: double('col11').primaryKey().autoincrement().default(0), col166: double('col11').primaryKey().autoincrement().default(0), col168: double('col11').primaryKey().autoincrement().default(0), col169: double('col11').primaryKey().autoincrement().default(0), col170: double('col11').primaryKey().autoincrement().default(0), col171: double('col11').primaryKey().autoincrement().default(0), col172: double('col11').primaryKey().autoincrement().default(0), col173: double('col11').primaryKey().autoincrement().default(0), col174: double('col11').primaryKey().autoincrement().default(0), col175: double('col11').primaryKey().autoincrement().default(0), col176: double('col11').primaryKey().autoincrement().default(0), col178: double('col11').primaryKey().autoincrement().default(0), col179: double('col11').primaryKey().autoincrement().default(0), col180: double('col11').primaryKey().autoincrement().default(0), col181: double('col11').primaryKey().autoincrement().default(0), col182: double('col11').primaryKey().autoincrement().default(0), col183: double('col11').primaryKey().autoincrement().default(0), col184: double('col11').primaryKey().autoincrement().default(0), col185: double('col11').primaryKey().autoincrement().default(0), col186: double('col11').primaryKey().autoincrement().default(0), col188: double('col11').primaryKey().autoincrement().default(0), col189: double('col11').primaryKey().autoincrement().default(0), col190: double('col11').primaryKey().autoincrement().default(0), col191: double('col11').primaryKey().autoincrement().default(0), col192: double('col11').primaryKey().autoincrement().default(0), col193: double('col11').primaryKey().autoincrement().default(0), col194: double('col11').primaryKey().autoincrement().default(0), col195: double('col11').primaryKey().autoincrement().default(0), col196: double('col11').primaryKey().autoincrement().default(0), col198: double('col11').primaryKey().autoincrement().default(0), col199: double('col11').primaryKey().autoincrement().default(0), col200: double('col2').primaryKey().autoincrement().default(0), col201: double('col2').primaryKey().autoincrement().default(0), col202: double('col2').primaryKey().autoincrement().default(0), col203: double('col2').primaryKey().autoincrement().default(0), col204: double('col2').primaryKey().autoincrement().default(0), col205: double('col2').primaryKey().autoincrement().default(0), col206: double('col2').primaryKey().autoincrement().default(0), col208: double('col2').primaryKey().autoincrement().default(0), col209: double('col2').primaryKey().autoincrement().default(0), col210: double('col21').primaryKey().autoincrement().default(0), col211: double('col21').primaryKey().autoincrement().default(0), col212: double('col21').primaryKey().autoincrement().default(0), col213: double('col21').primaryKey().autoincrement().default(0), col214: double('col21').primaryKey().autoincrement().default(0), col215: double('col21').primaryKey().autoincrement().default(0), col216: double('col21').primaryKey().autoincrement().default(0), col218: double('col21').primaryKey().autoincrement().default(0), col219: double('col21').primaryKey().autoincrement().default(0), col220: double('col21').primaryKey().autoincrement().default(0), col221: double('col21').primaryKey().autoincrement().default(0), col222: double('col21').primaryKey().autoincrement().default(0), col223: double('col21').primaryKey().autoincrement().default(0), col224: double('col21').primaryKey().autoincrement().default(0), col225: double('col21').primaryKey().autoincrement().default(0), col226: double('col21').primaryKey().autoincrement().default(0), col228: double('col21').primaryKey().autoincrement().default(0), col229: double('col21').primaryKey().autoincrement().default(0), col230: double('col21').primaryKey().autoincrement().default(0), col231: double('col21').primaryKey().autoincrement().default(0), col232: double('col21').primaryKey().autoincrement().default(0), col233: double('col21').primaryKey().autoincrement().default(0), col234: double('col21').primaryKey().autoincrement().default(0), col235: double('col21').primaryKey().autoincrement().default(0), col236: double('col21').primaryKey().autoincrement().default(0), col238: double('col21').primaryKey().autoincrement().default(0), col239: double('col21').primaryKey().autoincrement().default(0), col240: double('col21').primaryKey().autoincrement().default(0), col241: double('col21').primaryKey().autoincrement().default(0), col242: double('col21').primaryKey().autoincrement().default(0), col243: double('col21').primaryKey().autoincrement().default(0), col244: double('col21').primaryKey().autoincrement().default(0), col245: double('col21').primaryKey().autoincrement().default(0), col246: double('col21').primaryKey().autoincrement().default(0), col248: double('col21').primaryKey().autoincrement().default(0), col249: double('col21').primaryKey().autoincrement().default(0), col250: double('col21').primaryKey().autoincrement().default(0), col251: double('col21').primaryKey().autoincrement().default(0), col252: double('col21').primaryKey().autoincrement().default(0), col253: double('col21').primaryKey().autoincrement().default(0), col254: double('col21').primaryKey().autoincrement().default(0), col255: double('col21').primaryKey().autoincrement().default(0), col256: double('col21').primaryKey().autoincrement().default(0), col258: double('col21').primaryKey().autoincrement().default(0), col259: double('col21').primaryKey().autoincrement().default(0), col260: double('col21').primaryKey().autoincrement().default(0), col261: double('col21').primaryKey().autoincrement().default(0), col262: double('col21').primaryKey().autoincrement().default(0), col263: double('col21').primaryKey().autoincrement().default(0), col264: double('col21').primaryKey().autoincrement().default(0), col265: double('col21').primaryKey().autoincrement().default(0), col266: double('col21').primaryKey().autoincrement().default(0), col268: double('col21').primaryKey().autoincrement().default(0), col269: double('col21').primaryKey().autoincrement().default(0), col270: double('col21').primaryKey().autoincrement().default(0), col271: double('col21').primaryKey().autoincrement().default(0), col272: double('col21').primaryKey().autoincrement().default(0), col273: double('col21').primaryKey().autoincrement().default(0), col274: double('col21').primaryKey().autoincrement().default(0), col275: double('col21').primaryKey().autoincrement().default(0), col276: double('col21').primaryKey().autoincrement().default(0), col278: double('col21').primaryKey().autoincrement().default(0), col279: double('col21').primaryKey().autoincrement().default(0), col280: double('col21').primaryKey().autoincrement().default(0), col281: double('col21').primaryKey().autoincrement().default(0), col282: double('col21').primaryKey().autoincrement().default(0), col283: double('col21').primaryKey().autoincrement().default(0), col284: double('col21').primaryKey().autoincrement().default(0), col285: double('col21').primaryKey().autoincrement().default(0), col286: double('col21').primaryKey().autoincrement().default(0), col288: double('col21').primaryKey().autoincrement().default(0), col289: double('col21').primaryKey().autoincrement().default(0), col290: double('col21').primaryKey().autoincrement().default(0), col291: double('col21').primaryKey().autoincrement().default(0), col292: double('col21').primaryKey().autoincrement().default(0), col293: double('col21').primaryKey().autoincrement().default(0), col294: double('col21').primaryKey().autoincrement().default(0), col295: double('col21').primaryKey().autoincrement().default(0), col296: double('col21').primaryKey().autoincrement().default(0), col298: double('col21').primaryKey().autoincrement().default(0), col299: double('col21').primaryKey().autoincrement().default(0), col300: double('col3').primaryKey().autoincrement().default(0), col301: double('col3').primaryKey().autoincrement().default(0), col302: double('col3').primaryKey().autoincrement().default(0), col303: double('col3').primaryKey().autoincrement().default(0), col304: double('col3').primaryKey().autoincrement().default(0), col305: double('col3').primaryKey().autoincrement().default(0), col306: double('col3').primaryKey().autoincrement().default(0), col308: double('col3').primaryKey().autoincrement().default(0), col309: double('col3').primaryKey().autoincrement().default(0), col310: double('col31').primaryKey().autoincrement().default(0), col311: double('col31').primaryKey().autoincrement().default(0), col312: double('col31').primaryKey().autoincrement().default(0), col313: double('col31').primaryKey().autoincrement().default(0), col314: double('col31').primaryKey().autoincrement().default(0), col315: double('col31').primaryKey().autoincrement().default(0), col316: double('col31').primaryKey().autoincrement().default(0), col318: double('col31').primaryKey().autoincrement().default(0), col319: double('col31').primaryKey().autoincrement().default(0), col320: double('col31').primaryKey().autoincrement().default(0), col321: double('col31').primaryKey().autoincrement().default(0), col322: double('col31').primaryKey().autoincrement().default(0), col323: double('col31').primaryKey().autoincrement().default(0), col324: double('col31').primaryKey().autoincrement().default(0), col325: double('col31').primaryKey().autoincrement().default(0), col326: double('col31').primaryKey().autoincrement().default(0), col328: double('col31').primaryKey().autoincrement().default(0), col329: double('col31').primaryKey().autoincrement().default(0), col330: double('col31').primaryKey().autoincrement().default(0), col331: double('col31').primaryKey().autoincrement().default(0), col332: double('col31').primaryKey().autoincrement().default(0), col333: double('col31').primaryKey().autoincrement().default(0), col334: double('col31').primaryKey().autoincrement().default(0), col335: double('col31').primaryKey().autoincrement().default(0), col336: double('col31').primaryKey().autoincrement().default(0), col338: double('col31').primaryKey().autoincrement().default(0), col339: double('col31').primaryKey().autoincrement().default(0), col340: double('col31').primaryKey().autoincrement().default(0), col341: double('col31').primaryKey().autoincrement().default(0), col342: double('col31').primaryKey().autoincrement().default(0), col343: double('col31').primaryKey().autoincrement().default(0), col344: double('col31').primaryKey().autoincrement().default(0), col345: double('col31').primaryKey().autoincrement().default(0), col346: double('col31').primaryKey().autoincrement().default(0), col348: double('col31').primaryKey().autoincrement().default(0), col349: double('col31').primaryKey().autoincrement().default(0), col350: double('col31').primaryKey().autoincrement().default(0), col351: double('col31').primaryKey().autoincrement().default(0), col352: double('col31').primaryKey().autoincrement().default(0), col353: double('col31').primaryKey().autoincrement().default(0), col354: double('col31').primaryKey().autoincrement().default(0), col355: double('col31').primaryKey().autoincrement().default(0), col356: double('col31').primaryKey().autoincrement().default(0), col358: double('col31').primaryKey().autoincrement().default(0), col359: double('col31').primaryKey().autoincrement().default(0), col360: double('col31').primaryKey().autoincrement().default(0), col361: double('col31').primaryKey().autoincrement().default(0), col362: double('col31').primaryKey().autoincrement().default(0), col363: double('col31').primaryKey().autoincrement().default(0), col364: double('col31').primaryKey().autoincrement().default(0), col365: double('col31').primaryKey().autoincrement().default(0), col366: double('col31').primaryKey().autoincrement().default(0), col368: double('col31').primaryKey().autoincrement().default(0), col369: double('col31').primaryKey().autoincrement().default(0), col370: double('col31').primaryKey().autoincrement().default(0), col371: double('col31').primaryKey().autoincrement().default(0), col372: double('col31').primaryKey().autoincrement().default(0), col373: double('col31').primaryKey().autoincrement().default(0), col374: double('col31').primaryKey().autoincrement().default(0), col375: double('col31').primaryKey().autoincrement().default(0), col376: double('col31').primaryKey().autoincrement().default(0), col378: double('col31').primaryKey().autoincrement().default(0), col379: double('col31').primaryKey().autoincrement().default(0), col380: double('col31').primaryKey().autoincrement().default(0), col381: double('col31').primaryKey().autoincrement().default(0), col382: double('col31').primaryKey().autoincrement().default(0), col383: double('col31').primaryKey().autoincrement().default(0), col384: double('col31').primaryKey().autoincrement().default(0), col385: double('col31').primaryKey().autoincrement().default(0), col386: double('col31').primaryKey().autoincrement().default(0), col388: double('col31').primaryKey().autoincrement().default(0), col389: double('col31').primaryKey().autoincrement().default(0), col390: double('col31').primaryKey().autoincrement().default(0), col391: double('col31').primaryKey().autoincrement().default(0), col392: double('col31').primaryKey().autoincrement().default(0), col393: double('col31').primaryKey().autoincrement().default(0), col394: double('col31').primaryKey().autoincrement().default(0), col395: double('col31').primaryKey().autoincrement().default(0), col396: double('col31').primaryKey().autoincrement().default(0), col398: double('col31').primaryKey().autoincrement().default(0), col399: double('col31').primaryKey().autoincrement().default(0), col400: double('col4').primaryKey().autoincrement().default(0), col401: double('col4').primaryKey().autoincrement().default(0), col402: double('col4').primaryKey().autoincrement().default(0), col403: double('col4').primaryKey().autoincrement().default(0), col404: double('col4').primaryKey().autoincrement().default(0), col405: double('col4').primaryKey().autoincrement().default(0), col406: double('col4').primaryKey().autoincrement().default(0), col408: double('col4').primaryKey().autoincrement().default(0), col409: double('col4').primaryKey().autoincrement().default(0), col410: double('col41').primaryKey().autoincrement().default(0), col411: double('col41').primaryKey().autoincrement().default(0), col412: double('col41').primaryKey().autoincrement().default(0), col413: double('col41').primaryKey().autoincrement().default(0), col414: double('col41').primaryKey().autoincrement().default(0), col415: double('col41').primaryKey().autoincrement().default(0), col416: double('col41').primaryKey().autoincrement().default(0), col418: double('col41').primaryKey().autoincrement().default(0), col419: double('col41').primaryKey().autoincrement().default(0), col420: double('col41').primaryKey().autoincrement().default(0), col421: double('col41').primaryKey().autoincrement().default(0), col422: double('col41').primaryKey().autoincrement().default(0), col423: double('col41').primaryKey().autoincrement().default(0), col424: double('col41').primaryKey().autoincrement().default(0), col425: double('col41').primaryKey().autoincrement().default(0), col426: double('col41').primaryKey().autoincrement().default(0), col428: double('col41').primaryKey().autoincrement().default(0), col429: double('col41').primaryKey().autoincrement().default(0), col430: double('col41').primaryKey().autoincrement().default(0), col431: double('col41').primaryKey().autoincrement().default(0), col432: double('col41').primaryKey().autoincrement().default(0), col433: double('col41').primaryKey().autoincrement().default(0), col434: double('col41').primaryKey().autoincrement().default(0), col435: double('col41').primaryKey().autoincrement().default(0), col436: double('col41').primaryKey().autoincrement().default(0), col438: double('col41').primaryKey().autoincrement().default(0), col439: double('col41').primaryKey().autoincrement().default(0), col440: double('col41').primaryKey().autoincrement().default(0), col441: double('col41').primaryKey().autoincrement().default(0), col442: double('col41').primaryKey().autoincrement().default(0), col443: double('col41').primaryKey().autoincrement().default(0), col444: double('col41').primaryKey().autoincrement().default(0), col445: double('col41').primaryKey().autoincrement().default(0), col446: double('col41').primaryKey().autoincrement().default(0), col448: double('col41').primaryKey().autoincrement().default(0), col449: double('col41').primaryKey().autoincrement().default(0), col450: double('col41').primaryKey().autoincrement().default(0), col451: double('col41').primaryKey().autoincrement().default(0), col452: double('col41').primaryKey().autoincrement().default(0), col453: double('col41').primaryKey().autoincrement().default(0), col454: double('col41').primaryKey().autoincrement().default(0), col455: double('col41').primaryKey().autoincrement().default(0), col456: double('col41').primaryKey().autoincrement().default(0), col458: double('col41').primaryKey().autoincrement().default(0), col459: double('col41').primaryKey().autoincrement().default(0), col460: double('col41').primaryKey().autoincrement().default(0), col461: double('col41').primaryKey().autoincrement().default(0), col462: double('col41').primaryKey().autoincrement().default(0), col463: double('col41').primaryKey().autoincrement().default(0), col464: double('col41').primaryKey().autoincrement().default(0), col465: double('col41').primaryKey().autoincrement().default(0), col466: double('col41').primaryKey().autoincrement().default(0), col468: double('col41').primaryKey().autoincrement().default(0), col469: double('col41').primaryKey().autoincrement().default(0), col470: double('col41').primaryKey().autoincrement().default(0), col471: double('col41').primaryKey().autoincrement().default(0), col472: double('col41').primaryKey().autoincrement().default(0), col473: double('col41').primaryKey().autoincrement().default(0), col474: double('col41').primaryKey().autoincrement().default(0), col475: double('col41').primaryKey().autoincrement().default(0), col476: double('col41').primaryKey().autoincrement().default(0), col478: double('col41').primaryKey().autoincrement().default(0), col479: double('col41').primaryKey().autoincrement().default(0), col480: double('col41').primaryKey().autoincrement().default(0), col481: double('col41').primaryKey().autoincrement().default(0), col482: double('col41').primaryKey().autoincrement().default(0), col483: double('col41').primaryKey().autoincrement().default(0), col484: double('col41').primaryKey().autoincrement().default(0), col485: double('col41').primaryKey().autoincrement().default(0), col486: double('col41').primaryKey().autoincrement().default(0), col488: double('col41').primaryKey().autoincrement().default(0), col489: double('col41').primaryKey().autoincrement().default(0), col490: double('col41').primaryKey().autoincrement().default(0), col491: double('col41').primaryKey().autoincrement().default(0), col492: double('col41').primaryKey().autoincrement().default(0), col493: double('col41').primaryKey().autoincrement().default(0), col494: double('col41').primaryKey().autoincrement().default(0), col495: double('col41').primaryKey().autoincrement().default(0), col496: double('col41').primaryKey().autoincrement().default(0), col498: double('col41').primaryKey().autoincrement().default(0), col499: double('col41').primaryKey().autoincrement().default(0), col500: double('col5').primaryKey().autoincrement().default(0), col501: double('col5').primaryKey().autoincrement().default(0), col502: double('col5').primaryKey().autoincrement().default(0), col503: double('col5').primaryKey().autoincrement().default(0), col504: double('col5').primaryKey().autoincrement().default(0), col505: double('col5').primaryKey().autoincrement().default(0), col506: double('col5').primaryKey().autoincrement().default(0), col508: double('col5').primaryKey().autoincrement().default(0), col509: double('col5').primaryKey().autoincrement().default(0), col510: double('col51').primaryKey().autoincrement().default(0), col511: double('col51').primaryKey().autoincrement().default(0), col512: double('col51').primaryKey().autoincrement().default(0), col513: double('col51').primaryKey().autoincrement().default(0), col514: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col515: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col516: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col518: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col519: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col520: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col521: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col522: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col523: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col524: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col525: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col526: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col528: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col529: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col530: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col531: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col532: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col533: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col534: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col535: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col536: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col538: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col539: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col540: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col541: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col542: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col543: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col544: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col545: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col546: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col548: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col549: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col550: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col551: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col552: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col553: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col554: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col555: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col556: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col558: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col559: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col560: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col561: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col562: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col563: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col564: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col565: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col566: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col568: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col569: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col570: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col571: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col572: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col573: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col574: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col575: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col576: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col578: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col579: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col580: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col581: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col582: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col583: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col584: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col585: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col586: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col588: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col589: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col590: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col591: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col592: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col593: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col594: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col595: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col596: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col598: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col599: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col600: bigint('col6', { mode: 'number' }).primaryKey().autoincrement().default(0), col601: double('col6').primaryKey().autoincrement().default(0), col602: double('col6').primaryKey().autoincrement().default(0), col603: double('col6').primaryKey().autoincrement().default(0), col604: double('col6').primaryKey().autoincrement().default(0), col605: double('col6').primaryKey().autoincrement().default(0), col606: double('col6').primaryKey().autoincrement().default(0), col608: double('col6').primaryKey().autoincrement().default(0), col609: double('col6').primaryKey().autoincrement().default(0), col610: double('col61').primaryKey().autoincrement().default(0), col611: double('col61').primaryKey().autoincrement().default(0), col612: double('col61').primaryKey().autoincrement().default(0), col613: double('col61').primaryKey().autoincrement().default(0), col614: double('col61').primaryKey().autoincrement().default(0), col615: double('col61').primaryKey().autoincrement().default(0), col616: double('col61').primaryKey().autoincrement().default(0), col618: double('col61').primaryKey().autoincrement().default(0), col619: double('col61').primaryKey().autoincrement().default(0), col620: double('col61').primaryKey().autoincrement().default(0), col621: double('col61').primaryKey().autoincrement().default(0), col622: double('col61').primaryKey().autoincrement().default(0), col623: double('col61').primaryKey().autoincrement().default(0), col624: double('col61').primaryKey().autoincrement().default(0), col625: double('col61').primaryKey().autoincrement().default(0), col626: double('col61').primaryKey().autoincrement().default(0), col628: double('col61').primaryKey().autoincrement().default(0), col629: double('col61').primaryKey().autoincrement().default(0), col630: double('col61').primaryKey().autoincrement().default(0), col631: double('col61').primaryKey().autoincrement().default(0), col632: double('col61').primaryKey().autoincrement().default(0), col633: double('col61').primaryKey().autoincrement().default(0), col634: double('col61').primaryKey().autoincrement().default(0), col635: double('col61').primaryKey().autoincrement().default(0), col636: double('col61').primaryKey().autoincrement().default(0), col638: double('col61').primaryKey().autoincrement().default(0), col639: double('col61').primaryKey().autoincrement().default(0), col640: double('col61').primaryKey().autoincrement().default(0), col641: double('col61').primaryKey().autoincrement().default(0), col642: double('col61').primaryKey().autoincrement().default(0), col643: double('col61').primaryKey().autoincrement().default(0), col644: double('col61').primaryKey().autoincrement().default(0), col645: double('col61').primaryKey().autoincrement().default(0), col646: double('col61').primaryKey().autoincrement().default(0), col648: double('col61').primaryKey().autoincrement().default(0), col649: double('col61').primaryKey().autoincrement().default(0), col650: double('col61').primaryKey().autoincrement().default(0), col651: double('col61').primaryKey().autoincrement().default(0), col652: double('col61').primaryKey().autoincrement().default(0), col653: double('col61').primaryKey().autoincrement().default(0), col654: double('col61').primaryKey().autoincrement().default(0), col655: double('col61').primaryKey().autoincrement().default(0), col656: double('col61').primaryKey().autoincrement().default(0), col658: double('col61').primaryKey().autoincrement().default(0), col659: double('col61').primaryKey().autoincrement().default(0), col660: double('col61').primaryKey().autoincrement().default(0), col661: double('col61').primaryKey().autoincrement().default(0), col662: double('col61').primaryKey().autoincrement().default(0), col663: double('col61').primaryKey().autoincrement().default(0), col664: double('col61').primaryKey().autoincrement().default(0), col665: double('col61').primaryKey().autoincrement().default(0), col666: double('col61').primaryKey().autoincrement().default(0), col668: double('col61').primaryKey().autoincrement().default(0), col669: double('col61').primaryKey().autoincrement().default(0), col670: double('col61').primaryKey().autoincrement().default(0), col671: double('col61').primaryKey().autoincrement().default(0), col672: double('col61').primaryKey().autoincrement().default(0), col673: double('col61').primaryKey().autoincrement().default(0), col674: double('col61').primaryKey().autoincrement().default(0), col675: double('col61').primaryKey().autoincrement().default(0), col676: double('col61').primaryKey().autoincrement().default(0), col678: double('col61').primaryKey().autoincrement().default(0), col679: double('col61').primaryKey().autoincrement().default(0), col680: double('col61').primaryKey().autoincrement().default(0), col681: double('col61').primaryKey().autoincrement().default(0), col682: double('col61').primaryKey().autoincrement().default(0), col683: double('col61').primaryKey().autoincrement().default(0), col684: double('col61').primaryKey().autoincrement().default(0), col685: double('col61').primaryKey().autoincrement().default(0), col686: double('col61').primaryKey().autoincrement().default(0), col688: double('col61').primaryKey().autoincrement().default(0), col689: double('col61').primaryKey().autoincrement().default(0), col690: double('col61').primaryKey().autoincrement().default(0), col691: double('col61').primaryKey().autoincrement().default(0), col692: double('col61').primaryKey().autoincrement().default(0), col693: double('col61').primaryKey().autoincrement().default(0), col694: double('col61').primaryKey().autoincrement().default(0), col695: double('col61').primaryKey().autoincrement().default(0), col696: double('col61').primaryKey().autoincrement().default(0), col698: double('col61').primaryKey().autoincrement().default(0), col699: double('col61').primaryKey().autoincrement().default(0), col700: double('col7').primaryKey().autoincrement().default(0), col701: double('col7').primaryKey().autoincrement().default(0), col702: double('col7').primaryKey().autoincrement().default(0), col703: double('col7').primaryKey().autoincrement().default(0), col704: double('col7').primaryKey().autoincrement().default(0), col705: double('col7').primaryKey().autoincrement().default(0), col706: double('col7').primaryKey().autoincrement().default(0), col708: double('col7').primaryKey().autoincrement().default(0), col709: double('col7').primaryKey().autoincrement().default(0), col710: double('col71').primaryKey().autoincrement().default(0), col711: double('col71').primaryKey().autoincrement().default(0), col712: double('col71').primaryKey().autoincrement().default(0), col713: double('col71').primaryKey().autoincrement().default(0), col714: double('col71').primaryKey().autoincrement().default(0), col715: double('col71').primaryKey().autoincrement().default(0), col716: double('col71').primaryKey().autoincrement().default(0), col718: double('col71').primaryKey().autoincrement().default(0), col719: double('col71').primaryKey().autoincrement().default(0), col720: double('col71').primaryKey().autoincrement().default(0), col721: double('col71').primaryKey().autoincrement().default(0), col722: double('col71').primaryKey().autoincrement().default(0), col723: double('col71').primaryKey().autoincrement().default(0), col724: double('col71').primaryKey().autoincrement().default(0), col725: double('col71').primaryKey().autoincrement().default(0), col726: double('col71').primaryKey().autoincrement().default(0), col728: double('col71').primaryKey().autoincrement().default(0), col729: double('col71').primaryKey().autoincrement().default(0), col730: double('col71').primaryKey().autoincrement().default(0), col731: double('col71').primaryKey().autoincrement().default(0), col732: double('col71').primaryKey().autoincrement().default(0), col733: double('col71').primaryKey().autoincrement().default(0), col734: double('col71').primaryKey().autoincrement().default(0), col735: double('col71').primaryKey().autoincrement().default(0), col736: double('col71').primaryKey().autoincrement().default(0), col738: double('col71').primaryKey().autoincrement().default(0), col739: double('col71').primaryKey().autoincrement().default(0), col740: double('col71').primaryKey().autoincrement().default(0), col741: double('col71').primaryKey().autoincrement().default(0), col742: double('col71').primaryKey().autoincrement().default(0), col743: double('col71').primaryKey().autoincrement().default(0), col744: double('col71').primaryKey().autoincrement().default(0), col745: double('col71').primaryKey().autoincrement().default(0), col746: double('col71').primaryKey().autoincrement().default(0), col748: double('col71').primaryKey().autoincrement().default(0), col749: double('col71').primaryKey().autoincrement().default(0), col750: double('col71').primaryKey().autoincrement().default(0), col751: double('col71').primaryKey().autoincrement().default(0), col752: double('col71').primaryKey().autoincrement().default(0), col753: double('col71').primaryKey().autoincrement().default(0), col754: double('col71').primaryKey().autoincrement().default(0), col755: double('col71').primaryKey().autoincrement().default(0), col756: double('col71').primaryKey().autoincrement().default(0), col758: double('col71').primaryKey().autoincrement().default(0), col759: double('col71').primaryKey().autoincrement().default(0), col760: double('col71').primaryKey().autoincrement().default(0), col761: double('col71').primaryKey().autoincrement().default(0), col762: double('col71').primaryKey().autoincrement().default(0), col763: double('col71').primaryKey().autoincrement().default(0), col764: double('col71').primaryKey().autoincrement().default(0), col765: double('col71').primaryKey().autoincrement().default(0), col766: double('col71').primaryKey().autoincrement().default(0), col768: double('col71').primaryKey().autoincrement().default(0), col769: double('col71').primaryKey().autoincrement().default(0), col770: double('col71').primaryKey().autoincrement().default(0), col771: double('col71').primaryKey().autoincrement().default(0), col772: double('col71').primaryKey().autoincrement().default(0), col773: double('col71').primaryKey().autoincrement().default(0), col774: double('col71').primaryKey().autoincrement().default(0), col775: double('col71').primaryKey().autoincrement().default(0), col776: double('col71').primaryKey().autoincrement().default(0), col778: double('col71').primaryKey().autoincrement().default(0), col779: double('col71').primaryKey().autoincrement().default(0), col780: double('col71').primaryKey().autoincrement().default(0), col781: double('col71').primaryKey().autoincrement().default(0), col782: double('col71').primaryKey().autoincrement().default(0), col783: double('col71').primaryKey().autoincrement().default(0), col784: double('col71').primaryKey().autoincrement().default(0), col785: double('col71').primaryKey().autoincrement().default(0), col786: double('col71').primaryKey().autoincrement().default(0), col788: double('col71').primaryKey().autoincrement().default(0), col789: double('col71').primaryKey().autoincrement().default(0), col790: double('col71').primaryKey().autoincrement().default(0), col791: double('col71').primaryKey().autoincrement().default(0), col792: double('col71').primaryKey().autoincrement().default(0), col793: double('col71').primaryKey().autoincrement().default(0), col794: double('col71').primaryKey().autoincrement().default(0), col795: double('col71').primaryKey().autoincrement().default(0), col796: double('col71').primaryKey().autoincrement().default(0), col798: double('col71').primaryKey().autoincrement().default(0), col799: double('col71').primaryKey().autoincrement().default(0), col800: double('col8').primaryKey().autoincrement().default(0), col801: double('col8').primaryKey().autoincrement().default(0), col802: double('col8').primaryKey().autoincrement().default(0), col803: double('col8').primaryKey().autoincrement().default(0), col804: double('col8').primaryKey().autoincrement().default(0), col805: double('col8').primaryKey().autoincrement().default(0), col806: double('col8').primaryKey().autoincrement().default(0), col808: double('col8').primaryKey().autoincrement().default(0), col809: double('col8').primaryKey().autoincrement().default(0), col810: double('col81').primaryKey().autoincrement().default(0), col811: double('col81').primaryKey().autoincrement().default(0), col812: double('col81').primaryKey().autoincrement().default(0), col813: double('col81').primaryKey().autoincrement().default(0), col814: double('col81').primaryKey().autoincrement().default(0), col815: double('col81').primaryKey().autoincrement().default(0), col816: double('col81').primaryKey().autoincrement().default(0), col818: double('col81').primaryKey().autoincrement().default(0), col819: double('col81').primaryKey().autoincrement().default(0), col820: double('col81').primaryKey().autoincrement().default(0), col821: double('col81').primaryKey().autoincrement().default(0), col822: double('col81').primaryKey().autoincrement().default(0), col823: double('col81').primaryKey().autoincrement().default(0), col824: double('col81').primaryKey().autoincrement().default(0), col825: double('col81').primaryKey().autoincrement().default(0), col826: double('col81').primaryKey().autoincrement().default(0), col828: double('col81').primaryKey().autoincrement().default(0), col829: double('col81').primaryKey().autoincrement().default(0), col830: double('col81').primaryKey().autoincrement().default(0), col831: double('col81').primaryKey().autoincrement().default(0), col832: double('col81').primaryKey().autoincrement().default(0), col833: double('col81').primaryKey().autoincrement().default(0), col834: double('col81').primaryKey().autoincrement().default(0), col835: double('col81').primaryKey().autoincrement().default(0), col836: double('col81').primaryKey().autoincrement().default(0), col838: double('col81').primaryKey().autoincrement().default(0), col839: double('col81').primaryKey().autoincrement().default(0), col840: double('col81').primaryKey().autoincrement().default(0), col841: double('col81').primaryKey().autoincrement().default(0), col842: double('col81').primaryKey().autoincrement().default(0), col843: double('col81').primaryKey().autoincrement().default(0), col844: double('col81').primaryKey().autoincrement().default(0), col845: double('col81').primaryKey().autoincrement().default(0), col846: double('col81').primaryKey().autoincrement().default(0), col848: double('col81').primaryKey().autoincrement().default(0), col849: double('col81').primaryKey().autoincrement().default(0), col850: double('col81').primaryKey().autoincrement().default(0), col851: double('col81').primaryKey().autoincrement().default(0), col852: double('col81').primaryKey().autoincrement().default(0), col853: double('col81').primaryKey().autoincrement().default(0), col854: double('col81').primaryKey().autoincrement().default(0), col855: double('col81').primaryKey().autoincrement().default(0), col856: double('col81').primaryKey().autoincrement().default(0), col858: double('col81').primaryKey().autoincrement().default(0), col859: double('col81').primaryKey().autoincrement().default(0), col860: double('col81').primaryKey().autoincrement().default(0), col861: double('col81').primaryKey().autoincrement().default(0), col862: double('col81').primaryKey().autoincrement().default(0), col863: double('col81').primaryKey().autoincrement().default(0), col864: double('col81').primaryKey().autoincrement().default(0), col865: double('col81').primaryKey().autoincrement().default(0), col866: double('col81').primaryKey().autoincrement().default(0), col868: double('col81').primaryKey().autoincrement().default(0), col869: double('col81').primaryKey().autoincrement().default(0), col870: double('col81').primaryKey().autoincrement().default(0), col871: double('col81').primaryKey().autoincrement().default(0), col872: double('col81').primaryKey().autoincrement().default(0), col873: double('col81').primaryKey().autoincrement().default(0), col874: double('col81').primaryKey().autoincrement().default(0), col875: double('col81').primaryKey().autoincrement().default(0), col876: double('col81').primaryKey().autoincrement().default(0), col878: double('col81').primaryKey().autoincrement().default(0), col879: double('col81').primaryKey().autoincrement().default(0), col880: double('col81').primaryKey().autoincrement().default(0), col881: double('col81').primaryKey().autoincrement().default(0), col882: double('col81').primaryKey().autoincrement().default(0), col883: double('col81').primaryKey().autoincrement().default(0), col884: double('col81').primaryKey().autoincrement().default(0), col885: double('col81').primaryKey().autoincrement().default(0), col886: double('col81').primaryKey().autoincrement().default(0), col888: double('col81').primaryKey().autoincrement().default(0), col889: double('col81').primaryKey().autoincrement().default(0), col890: double('col81').primaryKey().autoincrement().default(0), col891: double('col81').primaryKey().autoincrement().default(0), col892: double('col81').primaryKey().autoincrement().default(0), col893: double('col81').primaryKey().autoincrement().default(0), col894: double('col81').primaryKey().autoincrement().default(0), col895: double('col81').primaryKey().autoincrement().default(0), col896: double('col81').primaryKey().autoincrement().default(0), col898: double('col81').primaryKey().autoincrement().default(0), col899: double('col81').primaryKey().autoincrement().default(0), col900: double('col9').primaryKey().autoincrement().default(0), col901: double('col9').primaryKey().autoincrement().default(0), col902: double('col9').primaryKey().autoincrement().default(0), col903: double('col9').primaryKey().autoincrement().default(0), col904: double('col9').primaryKey().autoincrement().default(0), col905: double('col9').primaryKey().autoincrement().default(0), col906: double('col9').primaryKey().autoincrement().default(0), col908: double('col9').primaryKey().autoincrement().default(0), col909: double('col9').primaryKey().autoincrement().default(0), col910: double('col91').primaryKey().autoincrement().default(0), col911: double('col91').primaryKey().autoincrement().default(0), col912: double('col91').primaryKey().autoincrement().default(0), col913: double('col91').primaryKey().autoincrement().default(0), col914: double('col91').primaryKey().autoincrement().default(0), col915: double('col91').primaryKey().autoincrement().default(0), col916: double('col91').primaryKey().autoincrement().default(0), col918: double('col91').primaryKey().autoincrement().default(0), col919: double('col91').primaryKey().autoincrement().default(0), col920: double('col91').primaryKey().autoincrement().default(0), col921: double('col91').primaryKey().autoincrement().default(0), col922: double('col91').primaryKey().autoincrement().default(0), col923: double('col91').primaryKey().autoincrement().default(0), col924: double('col91').primaryKey().autoincrement().default(0), col925: double('col91').primaryKey().autoincrement().default(0), col926: double('col91').primaryKey().autoincrement().default(0), col928: double('col91').primaryKey().autoincrement().default(0), col929: double('col91').primaryKey().autoincrement().default(0), col930: double('col91').primaryKey().autoincrement().default(0), col931: double('col91').primaryKey().autoincrement().default(0), col932: double('col91').primaryKey().autoincrement().default(0), col933: double('col91').primaryKey().autoincrement().default(0), col934: double('col91').primaryKey().autoincrement().default(0), col935: double('col91').primaryKey().autoincrement().default(0), col936: double('col91').primaryKey().autoincrement().default(0), col938: double('col91').primaryKey().autoincrement().default(0), col939: double('col91').primaryKey().autoincrement().default(0), col940: double('col91').primaryKey().autoincrement().default(0), col941: double('col91').primaryKey().autoincrement().default(0), col942: double('col91').primaryKey().autoincrement().default(0), col943: double('col91').primaryKey().autoincrement().default(0), col944: varchar('col91', { length: 200 }).primaryKey().default('0'), col945: varchar('col91', { length: 200 }).primaryKey().default('0'), col946: varchar('col91', { length: 200 }).primaryKey().default('0'), col948: varchar('col91', { length: 200 }).primaryKey().default('0'), col949: varchar('col91', { length: 200 }).primaryKey().default('0'), col950: varchar('col91', { length: 200 }).primaryKey().default('0'), col951: varchar('col91', { length: 200 }).primaryKey().default('0'), col952: varchar('col91', { length: 200 }).primaryKey().default('0'), col953: varchar('col91', { length: 200 }).primaryKey().default('0'), col954: varchar('col91', { length: 200 }).primaryKey().default('0'), col955: varchar('col91', { length: 200 }).primaryKey().default('0'), col956: varchar('col91', { length: 200 }).primaryKey().default('0'), col958: varchar('col91', { length: 200 }).primaryKey().default('0'), col959: varchar('col91', { length: 200 }).primaryKey().default('0'), col960: varchar('col91', { length: 200 }).primaryKey().default('0'), col961: varchar('col91', { length: 200 }).primaryKey().default('0'), col962: varchar('col91', { length: 200 }).primaryKey().default('0'), col963: varchar('col91', { length: 200 }).primaryKey().default('0'), col964: varchar('col91', { length: 200 }).primaryKey().default('0'), col965: varchar('col91', { length: 200 }).primaryKey().default('0'), col966: varchar('col91', { length: 200 }).primaryKey().default('0'), col968: varchar('col91', { length: 200 }).primaryKey().default('0'), col969: varchar('col91', { length: 200 }).primaryKey().default('0'), col970: varchar('col91', { length: 200 }).primaryKey().default('0'), col971: varchar('col91', { length: 200 }).primaryKey().default('0'), col972: varchar('col91', { length: 200 }).primaryKey().default('0'), col973: varchar('col91', { length: 200 }).primaryKey().default('0'), col974: varchar('col91', { length: 200 }).primaryKey().default('0'), col975: varchar('col91', { length: 200 }).primaryKey().default('0'), col976: varchar('col91', { length: 200 }).primaryKey().default('0'), col978: varchar('col91', { length: 200 }).primaryKey().default('0'), col979: varchar('col91', { length: 200 }).primaryKey().default('0'), col980: varchar('col91', { length: 200 }).primaryKey().default('0'), col981: varchar('col91', { length: 200 }).primaryKey().default('0'), col982: varchar('col91', { length: 200 }).primaryKey().default('0'), col983: varchar('col91', { length: 200 }).primaryKey().default('0'), col984: varchar('col91', { length: 200 }).primaryKey().default('0'), col985: varchar('col91', { length: 200 }).primaryKey().default('0'), col986: varchar('col91', { length: 200 }).primaryKey().default('0'), col988: varchar('col91', { length: 200 }).primaryKey().default('0'), col989: varchar('col91', { length: 200 }).primaryKey().default('0'), col990: varchar('col91', { length: 200 }).primaryKey().default('0'), col991: varchar('col91', { length: 200 }).primaryKey().default('0'), col992: varchar('col91', { length: 200 }).primaryKey().default('0'), col993: varchar('col91', { length: 200 }).primaryKey().default('0'), col994: varchar('col91', { length: 200 }).primaryKey().default('0'), col995: varchar('col91', { length: 200 }).primaryKey().default('0'), col996: varchar('col91', { length: 200 }).primaryKey().default('0'), col998: varchar('col91', { length: 200 }).primaryKey().default('0'), col999: varchar('col91', { length: 200 }).primaryKey().default('0'), }); ================================================ FILE: drizzle-orm/type-tests/mysql/count.ts ================================================ import { Expect } from 'type-tests/utils.ts'; import { int, mysqlTable, serial, text } from '~/mysql-core/index.ts'; import { and, gt, ne } from '~/sql/expressions/index.ts'; import type { Equal } from '~/utils.ts'; import { db } from './db.ts'; const names = mysqlTable('names', { id: serial('id').primaryKey(), name: text('name'), authorId: int('author_id'), }); const separate = await db.$count(names); const separateFilters = await db.$count(names, and(gt(names.id, 1), ne(names.name, 'forbidden'))); const embedded = await db .select({ id: names.id, name: names.name, authorId: names.authorId, count1: db.$count(names).as('count1'), }) .from(names); const embeddedFilters = await db .select({ id: names.id, name: names.name, authorId: names.authorId, count1: db.$count(names, and(gt(names.id, 1), ne(names.name, 'forbidden'))).as('count1'), }) .from(names); Expect>; Expect>; Expect< Equal< { id: number; name: string | null; authorId: number | null; count1: number; }[], typeof embedded > >; Expect< Equal< { id: number; name: string | null; authorId: number | null; count1: number; }[], typeof embeddedFilters > >; ================================================ FILE: drizzle-orm/type-tests/mysql/db-rel.ts ================================================ import pg from 'pg'; import { type Equal, Expect } from 'type-tests/utils.ts'; import { drizzle } from '~/node-postgres/index.ts'; import { placeholder, sql } from '~/sql/sql.ts'; import * as schema from './tables-rel.ts'; const { Pool } = pg; const pdb = new Pool({ connectionString: process.env['PG_CONNECTION_STRING'] }); const db = drizzle(pdb, { schema }); { const result = await db.query.users.findMany({ where: (users, { sql }) => sql`char_length(${users.name} > 1)`, limit: placeholder('l'), orderBy: (users, { asc, desc }) => [asc(users.name), desc(users.id)], with: { posts: { where: (posts, { sql }) => sql`char_length(${posts.title} > 1)`, limit: placeholder('l'), columns: { id: false, }, with: { author: true, comments: { where: (comments, { sql }) => sql`char_length(${comments.text} > 1)`, limit: placeholder('l'), columns: { text: true, }, with: { author: { columns: {}, with: { city: { with: { users: true, }, }, }, }, }, }, }, }, }, }); Expect< Equal<{ id: number; name: string; cityId: number; homeCityId: number | null; createdAt: Date; posts: { title: string; authorId: number | null; comments: { text: string; author: { city: { id: number; name: string; users: { id: number; name: string; cityId: number; homeCityId: number | null; createdAt: Date; }[]; }; } | null; }[]; author: { id: number; name: string; cityId: number; homeCityId: number | null; createdAt: Date; } | null; }[]; }[], typeof result> >; } { const result = await db.query.users.findMany({ columns: { id: true, name: true, }, with: { posts: { columns: { authorId: true, }, extras: { lower: sql`lower(${schema.posts.title})`.as('lower_name'), }, }, }, }); Expect< Equal< { id: number; name: string; posts: { authorId: number | null; lower: string; }[]; }[], typeof result > >; } ================================================ FILE: drizzle-orm/type-tests/mysql/db.ts ================================================ import { createPool } from 'mysql2/promise'; import { drizzle } from '~/mysql2/index.ts'; const pool = createPool({}); export const db = drizzle(pool); { drizzle(pool); // @ts-expect-error - missing mode drizzle(pool, { schema: {} }); drizzle(pool, { schema: {}, mode: 'default' }); drizzle(pool, { schema: {}, mode: 'planetscale' }); drizzle(pool, { mode: 'default' }); drizzle(pool, { mode: 'planetscale' }); } ================================================ FILE: drizzle-orm/type-tests/mysql/delete.ts ================================================ import type { Equal } from 'type-tests/utils.ts'; import { Expect } from 'type-tests/utils.ts'; import type { MySqlDelete } from '~/mysql-core/index.ts'; import type { MySqlRawQueryResult } from '~/mysql2/index.ts'; import { eq } from '~/sql/expressions/index.ts'; import { sql } from '~/sql/sql.ts'; import { db } from './db.ts'; import { users } from './tables.ts'; const deleteAll = await db.delete(users); Expect>; const deleteAllStmt = db.delete(users).prepare(); const deleteAllPrepared = await deleteAllStmt.execute(); Expect>; const deleteWhere = await db.delete(users).where(eq(users.id, 1)); Expect>; const deleteWhereStmt = db.delete(users).where(eq(users.id, 1)).prepare(); const deleteWherePrepared = await deleteWhereStmt.execute(); Expect>; const deleteReturningAll = await db.delete(users); Expect>; const deleteReturningAllStmt = db.delete(users).prepare(); const deleteReturningAllPrepared = await deleteReturningAllStmt.execute(); Expect>; const deleteReturningPartial = await db.delete(users); Expect>; const deleteReturningPartialStmt = db.delete(users).prepare(); const deleteReturningPartialPrepared = await deleteReturningPartialStmt.execute(); Expect>; { function dynamic(qb: T) { return qb.where(sql``); } const qbBase = db.delete(users).$dynamic(); const qb = dynamic(qbBase); const result = await qb; Expect>; } { db .delete(users) .where(sql``) // @ts-expect-error method was already called .where(sql``); db .delete(users) .$dynamic() .where(sql``) .where(sql``); } { db.delete(users).where(sql``).limit(1).orderBy(sql``); } ================================================ FILE: drizzle-orm/type-tests/mysql/generated-columns.ts ================================================ import { type Equal, Expect } from 'type-tests/utils'; import { type InferInsertModel, type InferSelectModel, sql } from '~/index'; import { mysqlTable, serial, text, varchar } from '~/mysql-core'; import { drizzle } from '~/mysql2'; import { db } from './db'; const users = mysqlTable( 'users', { id: serial('id').primaryKey(), firstName: varchar('first_name', { length: 255 }), lastName: varchar('last_name', { length: 255 }), email: text('email').notNull(), fullName: text('full_name').generatedAlwaysAs(sql`concat_ws(first_name, ' ', last_name)`), upperName: text('upper_name').generatedAlwaysAs( sql` case when first_name is null then null else upper(first_name) end `, ).$type(), // There is no way for drizzle to detect nullability in these cases. This is how the user can work around it }, ); { type User = typeof users.$inferSelect; type NewUser = typeof users.$inferInsert; Expect< Equal< { id: number; firstName: string | null; lastName: string | null; email: string; fullName: string | null; upperName: string | null; }, User > >(); Expect< Equal< { email: string; id?: number | undefined; firstName?: string | null | undefined; lastName?: string | null | undefined; }, NewUser > >(); } { type User = InferSelectModel; type NewUser = InferInsertModel; Expect< Equal< { id: number; firstName: string | null; lastName: string | null; email: string; fullName: string | null; upperName: string | null; }, User > >(); Expect< Equal< { email: string; id?: number | undefined; firstName?: string | null | undefined; lastName?: string | null | undefined; }, NewUser > >(); } { const dbUsers = await db.select().from(users); Expect< Equal< { id: number; firstName: string | null; lastName: string | null; email: string; fullName: string | null; upperName: string | null; }[], typeof dbUsers > >(); } { const db = drizzle({} as any, { schema: { users }, mode: 'default' }); const dbUser = await db.query.users.findFirst(); Expect< Equal< { id: number; firstName: string | null; lastName: string | null; email: string; fullName: string | null; upperName: string | null; } | undefined, typeof dbUser > >(); } { const db = drizzle({} as any, { schema: { users }, mode: 'default' }); const dbUser = await db.query.users.findMany(); Expect< Equal< { id: number; firstName: string | null; lastName: string | null; email: string; fullName: string | null; upperName: string | null; }[], typeof dbUser > >(); } { // @ts-expect-error - Can't use the fullName because it's a generated column await db.insert(users).values({ firstName: 'test', lastName: 'test', email: 'test', fullName: 'test', }); } { await db.update(users).set({ firstName: 'test', lastName: 'test', email: 'test', // @ts-expect-error - Can't use the fullName because it's a generated column fullName: 'test', }); } ================================================ FILE: drizzle-orm/type-tests/mysql/insert.ts ================================================ import type { Equal } from 'type-tests/utils.ts'; import { Expect } from 'type-tests/utils.ts'; import { boolean, int, mysqlTable, QueryBuilder, serial, text } from '~/mysql-core/index.ts'; import type { MySqlInsert } from '~/mysql-core/index.ts'; import type { MySqlRawQueryResult } from '~/mysql2/index.ts'; import { sql } from '~/sql/sql.ts'; import { db } from './db.ts'; import { users } from './tables.ts'; const mysqlInsertReturning = await db.insert(users).values({ // ^? homeCity: 1, class: 'A', age1: 1, enumCol: 'a', }).$returningId(); Expect>; const insert = await db.insert(users).values({ homeCity: 1, class: 'A', age1: 1, enumCol: 'a', }); Expect>; const insertStmt = db.insert(users).values({ homeCity: 1, class: 'A', age1: 1, enumCol: 'a', }).prepare(); const insertPrepared = await insertStmt.execute(); Expect>; const insertSql = await db.insert(users).values({ homeCity: sql`123`, class: 'A', age1: 1, enumCol: sql`foobar`, }); Expect>; const insertSqlStmt = db.insert(users).values({ homeCity: sql`123`, class: 'A', age1: 1, enumCol: sql`foobar`, }).prepare(); const insertSqlPrepared = await insertSqlStmt.execute(); Expect>; const insertReturning = await db.insert(users).values({ homeCity: 1, class: 'A', age1: 1, enumCol: 'a', }); Expect>; const insertReturningStmt = db.insert(users).values({ homeCity: 1, class: 'A', age1: 1, enumCol: 'a', }).prepare(); const insertReturningPrepared = await insertReturningStmt.execute(); Expect>; const insertReturningPartial = await db.insert(users).values({ homeCity: 1, class: 'A', age1: 1, enumCol: 'a', }); Expect>; const insertReturningPartialStmt = db.insert(users).values({ homeCity: 1, class: 'A', age1: 1, enumCol: 'a', }).prepare(); const insertReturningPartialPrepared = await insertReturningPartialStmt.execute(); Expect>; const insertReturningSql = await db.insert(users).values({ homeCity: 1, class: 'A', age1: sql`2 + 2`, enumCol: 'a', }); Expect>; const insertReturningSqlStmt = db.insert(users).values({ homeCity: 1, class: 'A', age1: sql`2 + 2`, enumCol: 'a', }).prepare(); const insertReturningSqlPrepared = await insertReturningSqlStmt.execute(); Expect>; { const users = mysqlTable('users', { id: int('id').autoincrement().primaryKey(), name: text('name').notNull(), age: int('age'), occupation: text('occupation'), }); await db.insert(users).values({ name: 'John Wick', age: 58, occupation: 'housekeeper' }); } { function dynamic(qb: T) { return qb.onDuplicateKeyUpdate({ set: {} }); } const qbBase = db.insert(users).values({ age1: 0, class: 'A', enumCol: 'a', homeCity: 0 }).$dynamic(); const qb = dynamic(qbBase); const result = await qb; Expect>; } { db .insert(users) .values({ age1: 0, class: 'A', enumCol: 'a', homeCity: 0 }) .onDuplicateKeyUpdate({ set: {} }) // @ts-expect-error method was already called .onDuplicateKeyUpdate({ set: {} }); } { const users1 = mysqlTable('users1', { id: serial('id').primaryKey(), name: text('name').notNull(), admin: boolean('admin').notNull().default(false), }); const users2 = mysqlTable('users2', { id: serial('id').primaryKey(), firstName: text('first_name').notNull(), lastName: text('last_name').notNull(), admin: boolean('admin').notNull().default(false), phoneNumber: text('phone_number'), }); const qb = new QueryBuilder(); db.insert(users1).select(sql`select * from users1`); db.insert(users1).select(() => sql`select * from users1`); db .insert(users1) .select( qb.select({ name: users2.firstName, admin: users2.admin, }).from(users2), ); db .insert(users1) .select( qb.select({ name: users2.firstName, admin: users2.admin, }).from(users2).where(sql``), ); db .insert(users2) .select( qb.select({ firstName: users2.firstName, lastName: users2.lastName, admin: users2.admin, }).from(users2), ); db .insert(users1) .select( qb.select({ name: sql`${users2.firstName} || ' ' || ${users2.lastName}`.as('name'), admin: users2.admin, }).from(users2), ); db .insert(users1) .select( // @ts-expect-error name is undefined qb.select({ admin: users1.admin }).from(users1), ); db.insert(users1).select(db.select().from(users1)); db.insert(users1).select(() => db.select().from(users1)); db.insert(users1).select((qb) => qb.select().from(users1)); // @ts-expect-error tables have different keys db.insert(users1).select(db.select().from(users2)); // @ts-expect-error tables have different keys db.insert(users1).select(() => db.select().from(users2)); } ================================================ FILE: drizzle-orm/type-tests/mysql/no-strict-null-checks/test.ts ================================================ import { mysqlTable, text } from '~/mysql-core'; import { drizzle } from '~/mysql2'; export const test = mysqlTable( 'test', { id: text('id') .primaryKey() .generatedAlwaysAs('genstr'), name: text('name').$defaultFn(() => '' as string), title: text('title').notNull(), description: text('description'), dbdef: text('dbdef').default('dbdefval'), }, ); const db = drizzle.mock(); db.update(test) .set({ // @ts-expect-error id: '1', name: 'name', title: 'title', description: 'desc', dbdef: 'upddef', }); db.update(test) .set({ name: 'name', title: 'title', description: 'desc', dbdef: 'upddef', }); db.insert(test).values({ // @ts-expect-error id: '1', name: 'name', title: 'title', description: 'desc', dbdef: 'upddef', }); db.insert(test).values({ name: 'name', title: 'title', description: 'desc', dbdef: 'upddef', }); db.insert(test).values({ title: 'title', description: 'desc', dbdef: 'upddef', }); db.insert(test).values({ title: 'title', description: 'desc', }); db.insert(test).values({ title: 'title', }); ================================================ FILE: drizzle-orm/type-tests/mysql/no-strict-null-checks/tsconfig.json ================================================ { "extends": "../../tsconfig.json", "compilerOptions": { "noEmit": true, "strictNullChecks": false, "strictPropertyInitialization": false, "exactOptionalPropertyTypes": false }, "include": ["./test.ts"] } ================================================ FILE: drizzle-orm/type-tests/mysql/select.ts ================================================ import { alias } from '~/mysql-core/alias.ts'; import { and, between, eq, exists, gt, gte, ilike, inArray, isNotNull, isNull, like, lt, lte, ne, not, notBetween, notExists, notIlike, notInArray, notLike, or, } from '~/sql/expressions/index.ts'; import { type InferSelectViewModel, param, sql } from '~/sql/sql.ts'; import type { Equal } from 'type-tests/utils.ts'; import { Expect } from 'type-tests/utils.ts'; import { index, int, type MySqlSelect, type MySqlSelectQueryBuilder, mysqlTable, mysqlView, QueryBuilder, text, } from '~/mysql-core/index.ts'; import { db } from './db.ts'; import { cities, classes, newYorkers, users } from './tables.ts'; const city = alias(cities, 'city'); const city1 = alias(cities, 'city1'); const leftJoinFull = await db.select().from(users).leftJoin(city, eq(users.id, city.id)); Expect< Equal< { users_table: typeof users.$inferSelect; city: typeof cities.$inferSelect | null; }[], typeof leftJoinFull > >; const rightJoinFull = await db.select().from(users).rightJoin(city, eq(users.id, city.id)); Expect< Equal< { users_table: typeof users.$inferSelect | null; city: typeof city.$inferSelect; }[], typeof rightJoinFull > >; const innerJoinFull = await db.select().from(users).innerJoin(city, eq(users.id, city.id)); Expect< Equal< { users_table: typeof users.$inferSelect; city: typeof city.$inferSelect; }[], typeof innerJoinFull > >; const crossJoinFull = await db.select().from(users).crossJoin(city); Expect< Equal< { users_table: typeof users.$inferSelect; city: typeof city.$inferSelect; }[], typeof crossJoinFull > >; const leftJoinFlat = await db .select({ userId: users.id, userText: users.text, cityId: city.id, cityName: city.name, }) .from(users) .leftJoin(city, eq(users.id, city.id)); Expect< Equal<{ userId: number; userText: string | null; cityId: number | null; cityName: string | null; }[], typeof leftJoinFlat> >; const rightJoinFlat = await db .select({ userId: users.id, userText: users.text, cityId: city.id, cityName: city.name, }) .from(users) .rightJoin(city, eq(users.id, city.id)); Expect< Equal<{ userId: number | null; userText: string | null; cityId: number; cityName: string; }[], typeof rightJoinFlat> >; const innerJoinFlat = await db .select({ userId: users.id, userText: users.text, cityId: city.id, cityName: city.name, }) .from(users) .innerJoin(city, eq(users.id, city.id)); Expect< Equal<{ userId: number; userText: string | null; cityId: number; cityName: string; }[], typeof innerJoinFlat> >; const crossJoinFlat = await db .select({ userId: users.id, userText: users.text, cityId: city.id, cityName: city.name, }) .from(users) .crossJoin(city); Expect< Equal<{ userId: number; userText: string | null; cityId: number; cityName: string; }[], typeof crossJoinFlat> >; const leftJoinMixed = await db .select({ id: users.id, text: users.text, textUpper: sql`upper(${users.text})`, idComplex: sql`${users.id}::text || ${city.id}::text`, city: { id: city.id, name: city.name, }, }) .from(users) .leftJoin(city, eq(users.id, city.id)); Expect< Equal< { id: number; text: string | null; textUpper: string | null; idComplex: string | null; city: { id: number; name: string; } | null; }[], typeof leftJoinMixed > >; const leftJoinMixed2 = await db .select({ id: users.id, text: users.text, foo: { bar: users.id, baz: cities.id, }, }) .from(users) .leftJoin(cities, eq(users.id, cities.id)); Expect< Equal< { id: number; text: string | null; foo: { bar: number; baz: number | null; }; }[], typeof leftJoinMixed2 > >; const join = await db .select({ users, cities, city, city1: { id: city1.id, }, }) .from(users) .leftJoin(cities, eq(users.id, cities.id)) .rightJoin(city, eq(city.id, users.id)) .rightJoin(city1, eq(city1.id, users.id)); Expect< Equal< { users: { id: number; text: string | null; homeCity: number; currentCity: number | null; serialNullable: number; serialNotNull: number; class: 'A' | 'C'; subClass: 'B' | 'D' | null; age1: number; createdAt: Date; enumCol: 'a' | 'b' | 'c'; } | null; cities: { id: number; name: string; population: number | null; } | null; city: { id: number; name: string; population: number | null; } | null; city1: { id: number; }; }[], typeof join > >; db .select() .from(users) .where(exists(db.select().from(cities).where(eq(users.homeCity, cities.id)))); function mapFunkyFuncResult(valueFromDriver: unknown) { return { foo: (valueFromDriver as Record)['foo'], }; } const age = 1; const allOperators = await db .select({ col2: sql`5 - ${users.id} + 1`, // unknown col3: sql`${users.id} + 1`, // number col33: sql`${users.id} + 1`.mapWith(users.id), // number col34: sql`${users.id} + 1`.mapWith(mapFunkyFuncResult), // number col4: sql`one_or_another(${users.id}, ${users.class})`, // string | number col5: sql`true`, // unknown col6: sql`true`, // boolean col7: sql`random()`, // number col8: sql`some_funky_func(${users.id})`.mapWith(mapFunkyFuncResult), // { foo: string } col9: sql`greatest(${users.createdAt}, ${param(new Date(), users.createdAt)})`, // unknown col10: sql`date_or_false(${users.createdAt}, ${param(new Date(), users.createdAt)})`, // Date | boolean col11: sql`${users.age1} + ${age}`, // unknown col12: sql`${users.age1} + ${param(age, users.age1)}`, // unknown col13: sql`lower(${users.class})`, // unknown col14: sql`length(${users.class})`, // number count: sql`count(*)::int`, // number }) .from(users) .where(and( eq(users.id, 1), ne(users.id, 1), or(eq(users.id, 1), ne(users.id, 1)), not(eq(users.id, 1)), gt(users.id, 1), gte(users.id, 1), lt(users.id, 1), lte(users.id, 1), inArray(users.id, [1, 2, 3]), inArray(users.id, db.select({ id: users.id }).from(users)), inArray(users.id, sql`select id from ${users}`), notInArray(users.id, [1, 2, 3]), notInArray(users.id, db.select({ id: users.id }).from(users)), notInArray(users.id, sql`select id from ${users}`), isNull(users.subClass), isNotNull(users.id), exists(db.select({ id: users.id }).from(users)), exists(sql`select id from ${users}`), notExists(db.select({ id: users.id }).from(users)), notExists(sql`select id from ${users}`), between(users.id, 1, 2), notBetween(users.id, 1, 2), like(users.id, '%1%'), notLike(users.id, '%1%'), ilike(users.id, '%1%'), notIlike(users.id, '%1%'), )); Expect< Equal<{ col2: unknown; col3: number; col33: number; col34: { foo: any }; col4: string | number; col5: unknown; col6: boolean; col7: number; col8: { foo: any; }; col9: unknown; col10: boolean | Date; col11: unknown; col12: unknown; col13: unknown; col14: number; count: number; }[], typeof allOperators> >; const textSelect = await db .select({ t: users.text, }) .from(users); Expect>; const homeCity = alias(cities, 'homeCity'); const c = alias(classes, 'c'); const otherClass = alias(classes, 'otherClass'); const anotherClass = alias(classes, 'anotherClass'); const friend = alias(users, 'friend'); const currentCity = alias(cities, 'currentCity'); const subscriber = alias(users, 'subscriber'); const closestCity = alias(cities, 'closestCity'); const megaJoin = await db .select({ user: { id: users.id, maxAge: sql`max(${users.age1})`, }, city: { id: cities.id, }, homeCity, c, otherClass, anotherClass, friend, currentCity, subscriber, closestCity, }) .from(users) .innerJoin(cities, sql`${users.id} = ${cities.id}`) .innerJoin(homeCity, sql`${users.homeCity} = ${homeCity.id}`) .innerJoin(c, eq(c.id, users.class)) .innerJoin(otherClass, sql`${c.id} = ${otherClass.id}`) .innerJoin(anotherClass, sql`${users.class} = ${anotherClass.id}`) .innerJoin(friend, sql`${users.id} = ${friend.id}`) .innerJoin(currentCity, sql`${homeCity.id} = ${currentCity.id}`) .innerJoin(subscriber, sql`${users.class} = ${subscriber.id}`) .innerJoin(closestCity, sql`${users.currentCity} = ${closestCity.id}`) .where(and(sql`${users.age1} > 0`, eq(cities.id, 1))) .limit(1) .offset(1); Expect< Equal< { user: { id: number; maxAge: unknown; }; city: { id: number; }; homeCity: { id: number; name: string; population: number | null; }; c: { id: number; class: 'A' | 'C' | null; subClass: 'B' | 'D'; }; otherClass: { id: number; class: 'A' | 'C' | null; subClass: 'B' | 'D'; }; anotherClass: { id: number; class: 'A' | 'C' | null; subClass: 'B' | 'D'; }; friend: { id: number; homeCity: number; currentCity: number | null; serialNullable: number; serialNotNull: number; class: 'A' | 'C'; subClass: 'B' | 'D' | null; text: string | null; age1: number; createdAt: Date; enumCol: 'a' | 'b' | 'c'; }; currentCity: { id: number; name: string; population: number | null; }; subscriber: { id: number; homeCity: number; currentCity: number | null; serialNullable: number; serialNotNull: number; class: 'A' | 'C'; subClass: 'B' | 'D' | null; text: string | null; age1: number; createdAt: Date; enumCol: 'a' | 'b' | 'c'; }; closestCity: { id: number; name: string; population: number | null; }; }[], typeof megaJoin > >; const friends = alias(users, 'friends'); const join4 = await db .select({ user: { id: users.id, }, city: { id: cities.id, }, class: classes, friend: friends, }) .from(users) .innerJoin(cities, sql`${users.id} = ${cities.id}`) .innerJoin(classes, sql`${cities.id} = ${classes.id}`) .innerJoin(friends, sql`${friends.id} = ${users.id}`) .where(sql`${users.age1} > 0`); Expect< Equal<{ user: { id: number; }; city: { id: number; }; class: { id: number; class: 'A' | 'C' | null; subClass: 'B' | 'D'; }; friend: { id: number; homeCity: number; currentCity: number | null; serialNullable: number; serialNotNull: number; class: 'A' | 'C'; subClass: 'B' | 'D' | null; text: string | null; age1: number; createdAt: Date; enumCol: 'a' | 'b' | 'c'; }; }[], typeof join4> >; { const authenticated = false as boolean; const result = await db .select({ id: users.id, ...(authenticated ? { city: users.homeCity } : {}), }) .from(users); Expect< Equal< { id: number; city?: number; }[], typeof result > >; } await db.select().from(users).for('update'); await db.select().from(users).for('share', { skipLocked: true }); await db.select().from(users).for('update', { noWait: true }); await db .select() .from(users) // @ts-expect-error - can't use both skipLocked and noWait .for('share', { noWait: true, skipLocked: true }); { const result = await db.select().from(newYorkers); Expect< Equal< { userId: number; cityId: number | null; }[], typeof result > >; } { const result = await db.select({ userId: newYorkers.userId }).from(newYorkers); Expect< Equal< { userId: number; }[], typeof result > >; } { const query = db.select().from(users).prepare().iterator(); for await (const row of query) { Expect>(); } } { db .select() .from(users) .where(eq(users.id, 1)); db .select() .from(users) .where(eq(users.id, 1)) // @ts-expect-error - can't use where twice .where(eq(users.id, 1)); db .select() .from(users) .where(eq(users.id, 1)) .limit(10) // @ts-expect-error - can't use where twice .where(eq(users.id, 1)); } { function withFriends(qb: T) { const friends = alias(users, 'friends'); const friends2 = alias(users, 'friends2'); const friends3 = alias(users, 'friends3'); const friends4 = alias(users, 'friends4'); const friends5 = alias(users, 'friends5'); return qb .leftJoin(friends, sql`true`) .leftJoin(friends2, sql`true`) .leftJoin(friends3, sql`true`) .leftJoin(friends4, sql`true`) .leftJoin(friends5, sql`true`); } const qb = db.select().from(users).$dynamic(); const result = await withFriends(qb); Expect< Equal >; } { function withFriends(qb: T) { const friends = alias(users, 'friends'); const friends2 = alias(users, 'friends2'); const friends3 = alias(users, 'friends3'); const friends4 = alias(users, 'friends4'); const friends5 = alias(users, 'friends5'); return qb .leftJoin(friends, sql`true`) .leftJoin(friends2, sql`true`) .leftJoin(friends3, sql`true`) .leftJoin(friends4, sql`true`) .leftJoin(friends5, sql`true`); } const qb = db.select().from(users).$dynamic(); const result = await withFriends(qb); Expect< Equal >; } { function dynamic(qb: T) { return qb.where(sql``).having(sql``).groupBy(sql``).orderBy(sql``).limit(1).offset(1).for('update'); } const qb = db.select().from(users).$dynamic(); const result = await dynamic(qb); Expect>; } { // TODO: add to docs function dynamic(qb: T) { return qb.where(sql``).having(sql``).groupBy(sql``).orderBy(sql``).limit(1).offset(1).for('update'); } const query = new QueryBuilder().select().from(users).$dynamic(); dynamic(query); } { // TODO: add to docs function paginated(qb: T, page: number) { return qb.limit(10).offset((page - 1) * 10); } const qb = db.select().from(users).$dynamic(); const result = await paginated(qb, 1); Expect>; } { db .select() .from(users) .where(sql``) .limit(10) // @ts-expect-error method was already called .where(sql``); db .select() .from(users) .having(sql``) .limit(10) // @ts-expect-error method was already called .having(sql``); db .select() .from(users) .groupBy(sql``) .limit(10) // @ts-expect-error method was already called .groupBy(sql``); db .select() .from(users) .orderBy(sql``) .limit(10) // @ts-expect-error method was already called .orderBy(sql``); db .select() .from(users) .limit(10) .where(sql``) // @ts-expect-error method was already called .limit(10); db .select() .from(users) .offset(10) .limit(10) // @ts-expect-error method was already called .offset(10); db .select() .from(users) .for('update') .limit(10) // @ts-expect-error method was already called .for('update'); } { const table1 = mysqlTable('table1', { id: int().primaryKey(), name: text().notNull(), }); const table2 = mysqlTable('table2', { id: int().primaryKey(), age: int().notNull(), }); const table3 = mysqlTable('table3', { id: int().primaryKey(), phone: text().notNull(), }); const view = mysqlView('view').as((qb) => qb.select({ table: table1, column: table2.age, nested: { column: table3.phone, }, }).from(table1).innerJoin(table2, sql``).leftJoin(table3, sql``) ); const result = await db.select().from(view); Expect< Equal >; Expect>; Expect[]>>; } { const table1 = mysqlTable('table1', { id: int().primaryKey(), name: text().notNull(), }, () => [table1NameIndex]); const table1NameIndex = index('table1_name_index').on(table1.name); const table2 = mysqlTable('table2', { id: int().primaryKey(), age: int().notNull(), table1Id: int().references(() => table1.id).notNull(), }, () => [table2AgeIndex, table2Table1Index]); const table2AgeIndex = index('table2_name_index').on(table2.age); const table2Table1Index = index('table2_table1_index').on(table2.table1Id); const view = mysqlView('view').as((qb) => qb.select().from(table2)); const sq = db.select().from(table2, { useIndex: ['posts_text_index'] }).as('sq'); await db.select().from(table1, { useIndex: table1NameIndex, forceIndex: table1NameIndex, ignoreIndex: table1NameIndex, }); await db.select().from(table1, { useIndex: [table1NameIndex], forceIndex: [table1NameIndex], ignoreIndex: [table1NameIndex], }); await db.select().from(table1, { useIndex: table1NameIndex, // @ts-expect-error table1NameIndex, forceIndex: table1NameIndex, ignoreIndex: table1NameIndex, }); // @ts-expect-error await db.select().from(view, { useIndex: table1NameIndex, forceIndex: table1NameIndex, table1NameIndex, ignoreIndex: [table1NameIndex], }); // @ts-expect-error await db.select().from(sq, { useIndex: table1NameIndex, forceIndex: table1NameIndex, table1NameIndex, ignoreIndex: [table1NameIndex], }); const join1 = await db.select().from(table1) .leftJoin(table2, eq(table1.id, table2.table1Id), { useIndex: table2AgeIndex, forceIndex: table2AgeIndex, ignoreIndex: table2AgeIndex, }); Expect< Equal< { table1: { id: number; name: string; }; table2: { id: number; age: number; table1Id: number; } | null; }[], typeof join1 > >; const join2 = await db.select().from(table1) .leftJoin(table2, eq(table1.id, table2.table1Id), { useIndex: [table2AgeIndex, table2Table1Index], forceIndex: [table2AgeIndex, table2Table1Index], ignoreIndex: [table2AgeIndex, table2Table1Index], }); Expect< Equal< { table1: { id: number; name: string; }; table2: { id: number; age: number; table1Id: number; } | null; }[], typeof join2 > >; const join3 = await db.select().from(table1) .crossJoin(table2, { useIndex: [table2AgeIndex, table2Table1Index], forceIndex: [table2AgeIndex, table2Table1Index], ignoreIndex: [table2AgeIndex, table2Table1Index], }); Expect< Equal< { table1: { id: number; name: string; }; table2: { id: number; age: number; table1Id: number; }; }[], typeof join3 > >; const sqJoin1 = await db.select().from(table1, { useIndex: table1NameIndex, }) .leftJoin(sq, eq(table1.id, sq.table1Id)); Expect< Equal< { table1: { id: number; name: string; }; sq: { id: number; age: number; table1Id: number; } | null; }[], typeof sqJoin1 > >; const sqJoin2 = await db.select().from(table1, { useIndex: [table1NameIndex, table1NameIndex], }) .leftJoin(sq, eq(table1.id, sq.table1Id)); Expect< Equal< { table1: { id: number; name: string; }; sq: { id: number; age: number; table1Id: number; } | null; }[], typeof sqJoin2 > >; await db.select().from(table1) // @ts-expect-error .leftJoin(view, eq(table1.id, view.table1Id), { useIndex: table2AgeIndex, forceIndex: table2AgeIndex, table2Table1Index, ignoreIndex: [table2AgeIndex, table2Table1Index], }); await db.select().from(table1) // @ts-expect-error .leftJoin(sq, eq(table1.id, sq.table1Id), { useIndex: table2AgeIndex, forceIndex: table2AgeIndex, table2Table1Index, ignoreIndex: [table2AgeIndex, table2Table1Index], }); await db.select().from(table1) // @ts-expect-error .crossJoin(table2, eq(table1.id, table2.table1Id), { useIndex: [table2AgeIndex, table2Table1Index], forceIndex: [table2AgeIndex, table2Table1Index], ignoreIndex: [table2AgeIndex, table2Table1Index], }); await db.select().from(table1) // @ts-expect-error .crossJoin(table2, eq(table1.id, table2.table1Id)); } { const table1 = mysqlTable('table1', { id: int().primaryKey(), name: text().notNull(), }); const table2 = mysqlTable('table2', { id: int().primaryKey(), age: int().notNull(), table1Id: int().references(() => table1.id).notNull(), }); const view = mysqlView('view').as((qb) => qb.select().from(table2)); const leftLateralRawRes = await db.select({ table1, sqId: sql`${sql.identifier('t2')}.${sql.identifier('id')}`.as('sqId'), }).from(table1).leftJoinLateral(sql`(SELECT * FROM ${table2}) as ${sql.identifier('t2')}`, sql`true`); Expect< Equal >; const leftLateralSubRes = await db.select().from(table1).leftJoinLateral( db.select().from(table2).as('sub'), sql`true`, ); Expect< Equal >; const sqLeftLateral = db.select().from(table2).as('sub'); const leftLateralSubSelectionRes = await db.select( { id: table1.id, sId: sqLeftLateral.id, }, ).from(table1).leftJoinLateral( sqLeftLateral, sql`true`, ); Expect< Equal >; await db.select().from(table1) // @ts-expect-error .leftJoinLateral(table2, sql`true`); await db.select().from(table1) // @ts-expect-error .leftJoinLateral(view, sql`true`); const innerLateralRawRes = await db.select({ table1, sqId: sql`${sql.identifier('t2')}.${sql.identifier('id')}`.as('sqId'), }).from(table1).innerJoinLateral(sql`(SELECT * FROM ${table2}) as ${sql.identifier('t2')}`, sql`true`); Expect< Equal >; const innerLateralSubRes = await db.select().from(table1).innerJoinLateral( db.select().from(table2).as('sub'), sql`true`, ); Expect< Equal >; const sqInnerLateral = db.select().from(table2).as('sub'); const innerLateralSubSelectionRes = await db.select( { id: table1.id, sId: sqLeftLateral.id, }, ).from(table1).innerJoinLateral( sqInnerLateral, sql`true`, ); Expect< Equal >; await db.select().from(table1) // @ts-expect-error .innerJoinLateral(table2, sql`true`); await db.select().from(table1) // @ts-expect-error .innerJoinLateral(view, sql`true`); const crossLateralRawRes = await db.select({ table1, sqId: sql`${sql.identifier('t2')}.${sql.identifier('id')}`.as('sqId'), }).from(table1).crossJoinLateral(sql`(SELECT * FROM ${table2}) as ${sql.identifier('t2')}`); Expect< Equal >; const crossLateralSubRes = await db.select().from(table1).crossJoinLateral( db.select().from(table2).as('sub'), ); Expect< Equal >; const sqCrossLateral = db.select().from(table2).as('sub'); const crossLateralSubSelectionRes = await db.select( { id: table1.id, sId: sqCrossLateral.id, }, ).from(table1).crossJoinLateral( sqInnerLateral, ); Expect< Equal >; await db.select().from(table1) // @ts-expect-error .crossJoinLateral(table2); await db.select().from(table1) // @ts-expect-error .crossJoinLateral(view); } ================================================ FILE: drizzle-orm/type-tests/mysql/set-operators.ts ================================================ import { type Equal, Expect } from 'type-tests/utils.ts'; import { except, exceptAll, intersect, intersectAll, type MySqlSetOperator, union, unionAll, } from '~/mysql-core/index.ts'; import { eq } from '~/sql/expressions/index.ts'; import { desc, sql } from '~/sql/index.ts'; import { db } from './db.ts'; import { cities, classes, newYorkers, users } from './tables.ts'; const unionTest = await db .select({ id: users.id }) .from(users) .union( db .select({ id: users.id }) .from(users), ); Expect>; const unionAllTest = await db .select({ id: users.id, age: users.age1 }) .from(users) .unionAll( db.select({ id: users.id, age: users.age1 }) .from(users) .leftJoin(cities, eq(users.id, cities.id)), ); Expect>; const intersectTest = await db .select({ id: users.id, homeCity: users.homeCity }) .from(users) .intersect(({ intersect }) => intersect( db .select({ id: users.id, homeCity: users.homeCity }) .from(users), db .select({ id: users.id, homeCity: sql`${users.homeCity}`.mapWith(Number) }) .from(users), ) ); Expect>; const intersectAllTest = await db .select({ id: users.id, homeCity: users.class }) .from(users) .intersect( db .select({ id: users.id, homeCity: users.class }) .from(users) .leftJoin(cities, eq(users.id, cities.id)), ); Expect>; const exceptTest = await db .select({ id: users.id, homeCity: users.homeCity }) .from(users) .except( db .select({ id: users.id, homeCity: sql`${users.homeCity}`.mapWith(Number) }) .from(users), ); Expect>; const exceptAllTest = await db .select({ id: users.id, homeCity: users.class }) .from(users) .except( db .select({ id: users.id, homeCity: sql<'A' | 'C'>`${users.class}` }) .from(users), ); Expect>; const union2Test = await union(db.select().from(cities), db.select().from(cities), db.select().from(cities)); Expect>; const unionAll2Test = await unionAll( db.select({ id: cities.id, name: cities.name, population: cities.population, }).from(cities), db.select().from(cities), ); Expect>; const intersect2Test = await intersect( db.select({ id: cities.id, name: cities.name, population: cities.population, }).from(cities), db.select({ id: cities.id, name: cities.name, population: cities.population, }).from(cities), db.select({ id: cities.id, name: cities.name, population: cities.population, }).from(cities), ); Expect>; const intersectAll2Test = await intersectAll( union( db.select({ id: cities.id, }).from(cities), db.select({ id: cities.id, }) .from(cities).where(sql``), ), db.select({ id: cities.id, }) .from(cities), ).orderBy(desc(cities.id)).limit(23); Expect>; const except2Test = await except( db.select({ userId: newYorkers.userId, }) .from(newYorkers), db.select({ userId: newYorkers.userId, }).from(newYorkers), ); Expect>; const exceptAll2Test = await exceptAll( db.select({ userId: newYorkers.userId, cityId: newYorkers.cityId, }) .from(newYorkers).where(sql``), db.select({ userId: newYorkers.userId, cityId: newYorkers.cityId, }).from(newYorkers).leftJoin(users, sql``), ); Expect>; const unionfull = await union(db.select().from(users), db.select().from(users)).orderBy(sql``).limit(1).offset(2); Expect< Equal<{ id: number; text: string | null; homeCity: number; currentCity: number | null; serialNullable: number; serialNotNull: number; class: 'A' | 'C'; subClass: 'B' | 'D' | null; age1: number; createdAt: Date; enumCol: 'a' | 'b' | 'c'; }[], typeof unionfull> >; union(db.select().from(users), db.select().from(users)) .orderBy(sql``) // @ts-expect-error - method was already called .orderBy(sql``); union(db.select().from(users), db.select().from(users)) .offset(1) // @ts-expect-error - method was already called .offset(2); union(db.select().from(users), db.select().from(users)) .orderBy(sql``) // @ts-expect-error - method was already called .orderBy(sql``); { function dynamic(qb: T) { return qb.orderBy(sql``).limit(1).offset(2); } const qb = union(db.select().from(users), db.select().from(users)).$dynamic(); const result = await dynamic(qb); Expect>; } await db .select({ id: users.id, homeCity: users.homeCity }) .from(users) // All queries in combining statements should return the same number of columns // and the corresponding columns should have compatible data type // @ts-expect-error .intersect(({ intersect }) => intersect(db.select().from(users), db.select().from(users))); // All queries in combining statements should return the same number of columns // and the corresponding columns should have compatible data type // @ts-expect-error db.select().from(classes).union(db.select({ id: classes.id }).from(classes)); // All queries in combining statements should return the same number of columns // and the corresponding columns should have compatible data type // @ts-expect-error db.select({ id: classes.id }).from(classes).union(db.select().from(classes).where(sql``)); // All queries in combining statements should return the same number of columns // and the corresponding columns should have compatible data type // @ts-expect-error db.select({ id: classes.id }).from(classes).union(db.select().from(classes)); union( db.select({ id: cities.id, name: cities.name }).from(cities).where(sql``), db.select({ id: cities.id, name: cities.name }).from(cities), // All queries in combining statements should return the same number of columns // and the corresponding columns should have compatible data type // @ts-expect-error db.select().from(cities), ); union( db.select({ id: cities.id, name: cities.name }).from(cities).where(sql``), // All queries in combining statements should return the same number of columns // and the corresponding columns should have compatible data type // @ts-expect-error db.select({ id: cities.id, name: cities.name, population: cities.population }).from(cities), db.select({ id: cities.id, name: cities.name }).from(cities).where(sql``).limit(3).$dynamic(), db.select({ id: cities.id, name: cities.name }).from(cities), ); union( db.select({ id: cities.id }).from(cities), db.select({ id: cities.id }).from(cities), db.select({ id: cities.id }).from(cities), // All queries in combining statements should return the same number of columns // and the corresponding columns should have compatible data type // @ts-expect-error db.select({ id: cities.id, name: cities.name }).from(cities), db.select({ id: cities.id }).from(cities), db.select({ id: cities.id }).from(cities), ); union( db.select({ id: cities.id }).from(cities), db.select({ id: cities.id }).from(cities), // All queries in combining statements should return the same number of columns // and the corresponding columns should have compatible data type // @ts-expect-error db.select({ id: cities.id, name: cities.name }).from(cities), db.select({ id: cities.id }).from(cities), db.select({ id: newYorkers.userId }).from(newYorkers), db.select({ id: cities.id }).from(cities), ); union( db.select({ id: cities.id }).from(cities), db.select({ id: cities.id }).from(cities), db.select({ id: cities.id }).from(cities).where(sql``), db.select({ id: sql`${cities.id}` }).from(cities), db.select({ id: cities.id }).from(cities), // All queries in combining statements should return the same number of columns // and the corresponding columns should have compatible data type // @ts-expect-error db.select({ id: cities.id, name: cities.name, population: cities.population }).from(cities).where(sql``), ); ================================================ FILE: drizzle-orm/type-tests/mysql/subquery.ts ================================================ import { Expect } from 'type-tests/utils.ts'; import { alias, int, mysqlTable, serial, text } from '~/mysql-core/index.ts'; import { and, eq } from '~/sql/expressions/index.ts'; import { count } from '~/sql/functions/aggregate.ts'; import { sql } from '~/sql/sql.ts'; import type { DrizzleTypeError, Equal } from '~/utils.ts'; import { db } from './db.ts'; const users = mysqlTable('names', { id: serial('id').primaryKey(), name: text('name'), managerId: int('author_id'), }); const posts = mysqlTable('posts', { id: serial('id').primaryKey(), authorId: int('author_id'), title: text('title'), }); const n1 = db .select({ id: users.id, name: users.name, authorId: users.managerId, count1: sql`count(1)::int`.as('count1'), }) .from(users) .groupBy(users.id, users.name, users.managerId) .as('n1'); const n2 = db .select({ id: users.id, authorId: users.managerId, totalCount: sql`count(1)::int`.as('totalCount'), }) .from(users) .groupBy(users.id, users.managerId) .as('n2'); const result = await db .select({ name: n1.name, authorId: n1.authorId, count1: n1.count1, totalCount: n2.totalCount, }) .from(n1) .innerJoin(n2, and(eq(n2.id, n1.id), eq(n2.authorId, n1.authorId))); Expect< Equal< { name: string | null; authorId: number | null; count1: number; totalCount: number; }[], typeof result > >; const names2 = alias(users, 'names2'); const sq1 = db .select({ id: users.id, name: users.name, id2: names2.id, }) .from(users) .leftJoin(names2, eq(users.name, names2.name)) .as('sq1'); const res = await db.select().from(sq1); Expect< Equal< { id: number; name: string | null; id2: number | null; }[], typeof res > >; { const sq = db.select({ count: sql`count(1)::int` }).from(users).as('sq'); Expect ? true : false>; } const sqUnion = db.select().from(users).union(db.select().from(names2)).as('sqUnion'); const resUnion = await db.select().from(sqUnion); Expect< Equal<{ id: number; name: string | null; managerId: number | null; }[], typeof resUnion> >; const fromSubquery = await db.select({ count: db.select({ count: count().as('c') }).from(posts).where(eq(posts.authorId, users.id)).as('count'), }).from(users); Expect>; const fromSubquery2 = await db.select({ name: db.select({ name: users.name }).from(users).where(eq(users.id, posts.authorId)).as('name'), }).from(posts); Expect>; const errorSubquery = await db.select({ name: db.select({ name: users.name, managerId: users.managerId }).from(users).where(eq(users.id, posts.authorId)).as( 'name', ), }).from(posts); Expect }[]>>; ================================================ FILE: drizzle-orm/type-tests/mysql/tables-rel.ts ================================================ import { foreignKey, int, mysqlTable, serial, text, timestamp } from '~/mysql-core/index.ts'; import { relations } from '~/relations.ts'; export const users = mysqlTable('users', { id: serial('id').primaryKey(), name: text('name').notNull(), cityId: int('city_id').references(() => cities.id).notNull(), homeCityId: int('home_city_id').references(() => cities.id), createdAt: timestamp('created_at').notNull(), }); export const usersConfig = relations(users, ({ one, many }) => ({ city: one(cities, { relationName: 'UsersInCity', fields: [users.cityId], references: [cities.id] }), homeCity: one(cities, { fields: [users.homeCityId], references: [cities.id] }), posts: many(posts), comments: many(comments), })); export const cities = mysqlTable('cities', { id: serial('id').primaryKey(), name: text('name').notNull(), }); export const citiesConfig = relations(cities, ({ many }) => ({ users: many(users, { relationName: 'UsersInCity' }), })); export const posts = mysqlTable('posts', { id: serial('id').primaryKey(), title: text('title').notNull(), authorId: int('author_id').references(() => users.id), }); export const postsConfig = relations(posts, ({ one, many }) => ({ author: one(users, { fields: [posts.authorId], references: [users.id] }), comments: many(comments), })); export const comments = mysqlTable('comments', { id: serial('id').primaryKey(), postId: int('post_id').references(() => posts.id).notNull(), authorId: int('author_id').references(() => users.id), text: text('text').notNull(), }); export const commentsConfig = relations(comments, ({ one }) => ({ post: one(posts, { fields: [comments.postId], references: [posts.id] }), author: one(users, { fields: [comments.authorId], references: [users.id] }), })); export const books = mysqlTable('books', { id: serial('id').primaryKey(), name: text('name').notNull(), }); export const booksConfig = relations(books, ({ many }) => ({ authors: many(bookAuthors), })); export const bookAuthors = mysqlTable('book_authors', { bookId: int('book_id').references(() => books.id).notNull(), authorId: int('author_id').references(() => users.id).notNull(), role: text('role').notNull(), }); export const bookAuthorsConfig = relations(bookAuthors, ({ one }) => ({ book: one(books, { fields: [bookAuthors.bookId], references: [books.id] }), author: one(users, { fields: [bookAuthors.authorId], references: [users.id] }), })); export const node = mysqlTable('node', { id: serial('id').primaryKey(), parentId: int('parent_id'), leftId: int('left_id'), rightId: int('right_id'), }, (node) => ({ fk1: foreignKey({ columns: [node.parentId], foreignColumns: [node.id] }), fk2: foreignKey({ columns: [node.leftId], foreignColumns: [node.id] }), fk3: foreignKey({ columns: [node.rightId], foreignColumns: [node.id] }), })); export const nodeRelations = relations(node, ({ one }) => ({ parent: one(node, { fields: [node.parentId], references: [node.id] }), left: one(node, { fields: [node.leftId], references: [node.id] }), right: one(node, { fields: [node.rightId], references: [node.id] }), })); ================================================ FILE: drizzle-orm/type-tests/mysql/tables.ts ================================================ import * as crypto from 'node:crypto'; import { type Equal, Expect } from 'type-tests/utils.ts'; import type { BuildColumn } from '~/column-builder.ts'; import { bigint, binary, boolean, char, check, customType, date, datetime, decimal, double, float, foreignKey, index, int, json, longtext, mediumint, mediumtext, type MySqlColumn, mysqlEnum, mysqlTable, primaryKey, real, serial, smallint, text, time, timestamp, tinyint, tinytext, unique, uniqueIndex, varbinary, varchar, year, } from '~/mysql-core/index.ts'; import { mysqlSchema } from '~/mysql-core/schema.ts'; import { mysqlView, type MySqlViewWithSelection } from '~/mysql-core/view.ts'; import { eq, gt } from '~/sql/expressions/index.ts'; import { sql } from '~/sql/sql.ts'; import type { InferSelectModel } from '~/table.ts'; import type { Simplify } from '~/utils.ts'; import { db } from './db.ts'; export const users = mysqlTable( 'users_table', { id: serial('id').primaryKey(), homeCity: int('home_city') .notNull() .references(() => cities.id), currentCity: int('current_city').references(() => cities.id), serialNullable: serial('serial1'), serialNotNull: serial('serial2').notNull(), class: text('class', { enum: ['A', 'C'] }).notNull(), subClass: text('sub_class', { enum: ['B', 'D'] }), text: text('text'), age1: int('age1').notNull(), createdAt: timestamp('created_at', { mode: 'date' }).notNull().defaultNow(), enumCol: mysqlEnum('enum_col', ['a', 'b', 'c']).notNull(), }, (users) => ({ usersAge1Idx: uniqueIndex('usersAge1Idx').on(users.class), usersAge2Idx: index('usersAge2Idx').on(users.class), uniqueClass: uniqueIndex('uniqueClass') .on(users.class, users.subClass) .lock('default') .algorithm('copy') .using(`btree`), legalAge: check('legalAge', sql`${users.age1} > 18`), usersClassFK: foreignKey({ columns: [users.subClass], foreignColumns: [classes.subClass] }), usersClassComplexFK: foreignKey({ columns: [users.class, users.subClass], foreignColumns: [classes.class, classes.subClass], }), pk: primaryKey(users.age1, users.class), }), ); export const cities = mysqlTable('cities_table', { id: serial('id').primaryKey(), name: text('name_db').notNull(), population: int('population').default(0), }, (cities) => ({ citiesNameIdx: index('citiesNameIdx').on(cities.id), })); Expect< Equal< { id: MySqlColumn< { name: 'id'; tableName: 'cities_table'; dataType: 'number'; columnType: 'MySqlSerial'; data: number; driverParam: number; notNull: true; hasDefault: true; isPrimaryKey: true; enumValues: undefined; baseColumn: never; generated: undefined; identity: undefined; isAutoincrement: true; hasRuntimeDefault: false; }, {}, {} >; name: MySqlColumn< { name: 'name_db'; tableName: 'cities_table'; dataType: 'string'; columnType: 'MySqlText'; data: string; driverParam: string; notNull: true; hasDefault: false; isPrimaryKey: false; enumValues: [string, ...string[]]; baseColumn: never; generated: undefined; identity: undefined; isAutoincrement: false; hasRuntimeDefault: false; }, {}, {} >; population: MySqlColumn< { name: 'population'; tableName: 'cities_table'; dataType: 'number'; columnType: 'MySqlInt'; data: number; driverParam: string | number; notNull: false; hasDefault: true; isPrimaryKey: false; enumValues: undefined; baseColumn: never; generated: undefined; identity: undefined; isAutoincrement: false; hasRuntimeDefault: false; }, {}, {} >; }, typeof cities._.columns > >; Expect< Equal<{ id: number; name_db: string; population: number | null; }, InferSelectModel> >; Expect< Equal<{ id?: number; name: string; population?: number | null; }, typeof cities.$inferInsert> >; export const customSchema = mysqlSchema('custom_schema'); export const citiesCustom = customSchema.table('cities_table', { id: serial('id').primaryKey(), name: text('name_db').notNull(), population: int('population').default(0), }, (cities) => ({ citiesNameIdx: index('citiesNameIdx').on(cities.id), })); Expect>; export const classes = mysqlTable('classes_table', ({ serial, text }) => ({ id: serial('id').primaryKey(), class: text('class', { enum: ['A', 'C'] }), subClass: text('sub_class', { enum: ['B', 'D'] }).notNull(), })); /* export const classes2 = mysqlTable('classes_table', { id: serial().primaryKey(), class: text({ enum: ['A', 'C'] }).$dbName('class_db'), subClass: text({ enum: ['B', 'D'] }).notNull(), }); */ export const newYorkers = mysqlView('new_yorkers') .algorithm('merge') .sqlSecurity('definer') .as((qb) => { const sq = qb .$with('sq') .as( qb.select({ userId: users.id, cityId: cities.id }) .from(users) .leftJoin(cities, eq(cities.id, users.homeCity)) .where(sql`${users.age1} > 18`), ); return qb.with(sq).select().from(sq).where(sql`${users.homeCity} = 1`); }); Expect< Equal< MySqlViewWithSelection<'new_yorkers', false, { userId: MySqlColumn<{ name: 'id'; dataType: 'number'; columnType: 'MySqlSerial'; data: number; driverParam: number; notNull: true; hasDefault: true; tableName: 'new_yorkers'; enumValues: undefined; baseColumn: never; generated: undefined; identity: undefined; isPrimaryKey: true; isAutoincrement: true; hasRuntimeDefault: false; }>; cityId: MySqlColumn<{ name: 'id'; dataType: 'number'; columnType: 'MySqlSerial'; data: number; driverParam: number; notNull: false; hasDefault: true; tableName: 'new_yorkers'; enumValues: undefined; baseColumn: never; generated: undefined; identity: undefined; isPrimaryKey: true; isAutoincrement: true; hasRuntimeDefault: false; }>; }>, typeof newYorkers > >; { const newYorkers = customSchema.view('new_yorkers') .algorithm('merge') .sqlSecurity('definer') .as((qb) => { const sq = qb .$with('sq') .as( qb.select({ userId: users.id, cityId: cities.id }) .from(users) .leftJoin(cities, eq(cities.id, users.homeCity)) .where(sql`${users.age1} > 18`), ); return qb.with(sq).select().from(sq).where(sql`${users.homeCity} = 1`); }); Expect< Equal< MySqlViewWithSelection<'new_yorkers', false, { userId: MySqlColumn<{ name: 'id'; dataType: 'number'; columnType: 'MySqlSerial'; data: number; driverParam: number; notNull: true; hasDefault: true; tableName: 'new_yorkers'; enumValues: undefined; baseColumn: never; generated: undefined; identity: undefined; isPrimaryKey: true; isAutoincrement: true; hasRuntimeDefault: false; }>; cityId: MySqlColumn<{ name: 'id'; dataType: 'number'; columnType: 'MySqlSerial'; data: number; driverParam: number; notNull: false; hasDefault: true; tableName: 'new_yorkers'; enumValues: undefined; baseColumn: never; generated: undefined; identity: undefined; isPrimaryKey: true; isAutoincrement: true; hasRuntimeDefault: false; }>; }>, typeof newYorkers > >; } { const newYorkers = mysqlView('new_yorkers', { userId: int('user_id').notNull(), cityId: int('city_id'), }) .algorithm('merge') .sqlSecurity('definer') .as( sql`select ${users.id} as user_id, ${cities.id} as city_id from ${users} left join ${cities} on ${ eq(cities.id, users.homeCity) } where ${gt(users.age1, 18)}`, ); Expect< Equal< MySqlViewWithSelection<'new_yorkers', false, { userId: MySqlColumn<{ name: 'user_id'; dataType: 'number'; columnType: 'MySqlInt'; data: number; driverParam: string | number; hasDefault: false; notNull: true; tableName: 'new_yorkers'; enumValues: undefined; baseColumn: never; generated: undefined; identity: undefined; isPrimaryKey: false; isAutoincrement: false; hasRuntimeDefault: false; }>; cityId: MySqlColumn<{ name: 'city_id'; notNull: false; hasDefault: false; dataType: 'number'; columnType: 'MySqlInt'; data: number; driverParam: string | number; tableName: 'new_yorkers'; enumValues: undefined; baseColumn: never; generated: undefined; identity: undefined; isPrimaryKey: false; isAutoincrement: false; hasRuntimeDefault: false; }>; }>, typeof newYorkers > >; } { const newYorkers = customSchema.view('new_yorkers', { userId: int('user_id').notNull(), cityId: int('city_id'), }) .algorithm('merge') .sqlSecurity('definer') .as( sql`select ${users.id} as user_id, ${cities.id} as city_id from ${users} left join ${cities} on ${ eq(cities.id, users.homeCity) } where ${gt(users.age1, 18)}`, ); Expect< Equal< MySqlViewWithSelection<'new_yorkers', false, { userId: MySqlColumn<{ name: 'user_id'; dataType: 'number'; columnType: 'MySqlInt'; data: number; driverParam: string | number; hasDefault: false; notNull: true; tableName: 'new_yorkers'; enumValues: undefined; baseColumn: never; generated: undefined; identity: undefined; isPrimaryKey: false; isAutoincrement: false; hasRuntimeDefault: false; }>; cityId: MySqlColumn<{ name: 'city_id'; notNull: false; hasDefault: false; dataType: 'number'; columnType: 'MySqlInt'; data: number; driverParam: string | number; tableName: 'new_yorkers'; enumValues: undefined; baseColumn: never; generated: undefined; identity: undefined; isPrimaryKey: false; isAutoincrement: false; hasRuntimeDefault: false; }>; }>, typeof newYorkers > >; } { const newYorkers = mysqlView('new_yorkers', { userId: int('user_id').notNull(), cityId: int('city_id'), }).existing(); Expect< Equal< MySqlViewWithSelection<'new_yorkers', true, { userId: MySqlColumn<{ name: 'user_id'; dataType: 'number'; columnType: 'MySqlInt'; data: number; driverParam: string | number; hasDefault: false; notNull: true; tableName: 'new_yorkers'; enumValues: undefined; baseColumn: never; generated: undefined; identity: undefined; isPrimaryKey: false; isAutoincrement: false; hasRuntimeDefault: false; }>; cityId: MySqlColumn<{ name: 'city_id'; notNull: false; hasDefault: false; dataType: 'number'; columnType: 'MySqlInt'; data: number; driverParam: string | number; tableName: 'new_yorkers'; enumValues: undefined; baseColumn: never; generated: undefined; identity: undefined; isPrimaryKey: false; isAutoincrement: false; hasRuntimeDefault: false; }>; }>, typeof newYorkers > >; } { const newYorkers = customSchema.view('new_yorkers', { userId: int('user_id').notNull(), cityId: int('city_id'), }).existing(); Expect< Equal< MySqlViewWithSelection<'new_yorkers', true, { userId: MySqlColumn<{ name: 'user_id'; dataType: 'number'; columnType: 'MySqlInt'; data: number; driverParam: string | number; hasDefault: false; notNull: true; tableName: 'new_yorkers'; enumValues: undefined; baseColumn: never; generated: undefined; identity: undefined; isPrimaryKey: false; isAutoincrement: false; hasRuntimeDefault: false; }>; cityId: MySqlColumn<{ name: 'city_id'; notNull: false; hasDefault: false; dataType: 'number'; columnType: 'MySqlInt'; data: number; driverParam: string | number; tableName: 'new_yorkers'; enumValues: undefined; baseColumn: never; generated: undefined; identity: undefined; isPrimaryKey: false; isAutoincrement: false; hasRuntimeDefault: false; }>; }>, typeof newYorkers > >; } { const customText = customType<{ data: string }>({ dataType() { return 'text'; }, }); const t = customText('name').notNull(); Expect< Equal< { brand: 'Column'; name: 'name'; tableName: 'table'; dataType: 'custom'; columnType: 'MySqlCustomColumn'; data: string; driverParam: unknown; notNull: true; hasDefault: false; enumValues: undefined; baseColumn: never; dialect: 'mysql'; generated: undefined; identity: undefined; isPrimaryKey: false; isAutoincrement: false; hasRuntimeDefault: false; }, Simplify['_']> > >; } { mysqlTable('test', { bigint: bigint('bigint', { mode: 'bigint' }), number: bigint('number', { mode: 'number' }), date: date('date').default(new Date()), date2: date('date2', { mode: 'date' }).default(new Date()), date3: date('date3', { mode: 'string' }).default('2020-01-01'), date4: date('date4', { mode: undefined }).default(new Date()), datetime: datetime('datetime').default(new Date()), datetime2: datetime('datetime2', { mode: 'date' }).default(new Date()), datetime3: datetime('datetime3', { mode: 'string' }).default('2020-01-01'), datetime4: datetime('datetime4', { mode: undefined }).default(new Date()), timestamp: timestamp('timestamp').default(new Date()), timestamp2: timestamp('timestamp2', { mode: 'date' }).default(new Date()), timestamp3: timestamp('timestamp3', { mode: 'string' }).default('2020-01-01'), timestamp4: timestamp('timestamp4', { mode: undefined }).default(new Date()), }); } { mysqlTable('test', { col1: decimal('col1').default('1'), }); } { const test = mysqlTable('test', { test1: mysqlEnum('test', ['a', 'b', 'c'] as const).notNull(), test2: mysqlEnum('test', ['a', 'b', 'c']).notNull(), test3: varchar('test', { length: 255, enum: ['a', 'b', 'c'] as const }).notNull(), test4: varchar('test', { length: 255, enum: ['a', 'b', 'c'] }).notNull(), test5: text('test', { enum: ['a', 'b', 'c'] as const }).notNull(), test6: text('test', { enum: ['a', 'b', 'c'] }).notNull(), test7: tinytext('test', { enum: ['a', 'b', 'c'] as const }).notNull(), test8: tinytext('test', { enum: ['a', 'b', 'c'] }).notNull(), test9: mediumtext('test', { enum: ['a', 'b', 'c'] as const }).notNull(), test10: mediumtext('test', { enum: ['a', 'b', 'c'] }).notNull(), test11: longtext('test', { enum: ['a', 'b', 'c'] as const }).notNull(), test12: longtext('test', { enum: ['a', 'b', 'c'] }).notNull(), test13: char('test', { enum: ['a', 'b', 'c'] as const }).notNull(), test14: char('test', { enum: ['a', 'b', 'c'] }).notNull(), test15: text('test').notNull(), }); Expect>; Expect>; Expect>; Expect>; Expect>; Expect>; Expect>; Expect>; Expect>; Expect>; Expect>; Expect>; Expect>; Expect>; Expect>; } { // All types with generated columns const test = mysqlTable('test', { test1: mysqlEnum('test', ['a', 'b', 'c'] as const).generatedAlwaysAs(sql``), test2: mysqlEnum('test', ['a', 'b', 'c']).generatedAlwaysAs(sql``), test3: varchar('test', { length: 255, enum: ['a', 'b', 'c'] as const }).generatedAlwaysAs(sql``), test4: varchar('test', { length: 255, enum: ['a', 'b', 'c'] }).generatedAlwaysAs(sql``), test5: text('test', { enum: ['a', 'b', 'c'] as const }).generatedAlwaysAs(sql``), test6: text('test', { enum: ['a', 'b', 'c'] }).generatedAlwaysAs(sql``), test7: tinytext('test', { enum: ['a', 'b', 'c'] as const }).generatedAlwaysAs(sql``), test8: tinytext('test', { enum: ['a', 'b', 'c'] }).generatedAlwaysAs(sql``), test9: mediumtext('test', { enum: ['a', 'b', 'c'] as const }).generatedAlwaysAs(sql``), test10: mediumtext('test', { enum: ['a', 'b', 'c'] }).generatedAlwaysAs(sql``), test11: longtext('test', { enum: ['a', 'b', 'c'] as const }).generatedAlwaysAs(sql``), test12: longtext('test', { enum: ['a', 'b', 'c'] }).generatedAlwaysAs(sql``), test13: char('test', { enum: ['a', 'b', 'c'] as const }).generatedAlwaysAs(sql``), test14: char('test', { enum: ['a', 'b', 'c'] }).generatedAlwaysAs(sql``), test15: text('test').generatedAlwaysAs(sql``), }); Expect>; Expect>; Expect>; Expect>; Expect>; Expect>; Expect>; Expect>; Expect>; Expect>; Expect>; Expect>; Expect>; Expect>; Expect>; } { const getUsersTable = (schemaName: TSchema) => { return mysqlSchema(schemaName).table('users', { id: int('id').primaryKey(), name: text('name').notNull(), }); }; const users1 = getUsersTable('id1'); Expect>; const users2 = getUsersTable('id2'); Expect>; } { const internalStaff = mysqlTable('internal_staff', { userId: int('user_id').notNull(), }); const customUser = mysqlTable('custom_user', { id: int('id').notNull(), }); const ticket = mysqlTable('ticket', { staffId: int('staff_id').notNull(), }); const subq = db .select() .from(internalStaff) .leftJoin( customUser, eq(internalStaff.userId, customUser.id), ).as('internal_staff'); const mainQuery = await db .select() .from(ticket) .leftJoin(subq, eq(subq.internal_staff.userId, ticket.staffId)); Expect< Equal<{ internal_staff: { internal_staff: { userId: number; }; custom_user: { id: number | null; }; } | null; ticket: { staffId: number; }; }[], typeof mainQuery> >; } { const newYorkers = mysqlView('new_yorkers') .as((qb) => { const sq = qb .$with('sq') .as( qb.select({ userId: users.id, cityId: cities.id }) .from(users) .leftJoin(cities, eq(cities.id, users.homeCity)) .where(sql`${users.age1} > 18`), ); return qb.with(sq).select().from(sq).where(sql`${users.homeCity} = 1`); }); await db.select().from(newYorkers).leftJoin(newYorkers, eq(newYorkers.userId, newYorkers.userId)); } { const test = mysqlTable('test', { id: text('id').$defaultFn(() => crypto.randomUUID()).primaryKey(), }); Expect< Equal<{ id?: string; }, typeof test.$inferInsert> >; } { mysqlTable('test', { id: int('id').$default(() => 1), id2: int('id').$defaultFn(() => 1), // @ts-expect-error - should be number id3: int('id').$default(() => '1'), // @ts-expect-error - should be number id4: int('id').$defaultFn(() => '1'), }); } { const emailLog = mysqlTable( 'email_log', { id: int('id', { unsigned: true }).autoincrement().notNull(), clientId: int('id_client', { unsigned: true }).references((): MySqlColumn => emailLog.id, { onDelete: 'set null', onUpdate: 'cascade', }), receiverEmail: varchar('receiver_email', { length: 255 }).notNull(), messageId: varchar('message_id', { length: 255 }), contextId: int('context_id', { unsigned: true }), contextType: mysqlEnum('context_type', ['test']).$type<['test']>(), action: varchar('action', { length: 80 }).$type<['test']>(), events: json('events').$type<{ t: 'test' }[]>(), createdAt: timestamp('created_at', { mode: 'string' }).defaultNow().notNull(), updatedAt: timestamp('updated_at', { mode: 'string' }).defaultNow().onUpdateNow(), }, (table) => { return { emailLogId: primaryKey({ columns: [table.id], name: 'email_log_id' }), emailLogMessageIdUnique: unique('email_log_message_id_unique').on(table.messageId), }; }, ); Expect< Equal<{ receiverEmail: string; id?: number | undefined; createdAt?: string | undefined; clientId?: number | null | undefined; messageId?: string | null | undefined; contextId?: number | null | undefined; contextType?: ['test'] | null | undefined; action?: ['test'] | null | undefined; events?: | { t: 'test'; }[] | null | undefined; updatedAt?: string | null | undefined; }, typeof emailLog.$inferInsert> >; } { const customRequiredConfig = customType<{ data: string; driverData: string; config: { length: number }; configRequired: true; }>({ dataType(config) { Expect>; return `varchar(${config.length})`; }, toDriver(value) { Expect>(); return value; }, fromDriver(value) { Expect>(); return value; }, }); customRequiredConfig('t', { length: 10 }); customRequiredConfig({ length: 10 }); // @ts-expect-error - config is required customRequiredConfig('t'); // @ts-expect-error - config is required customRequiredConfig(); } { const customOptionalConfig = customType<{ data: string; driverData: string; config: { length: number }; }>({ dataType(config) { Expect>; return config ? `varchar(${config.length})` : `text`; }, toDriver(value) { Expect>(); return value; }, fromDriver(value) { Expect>(); return value; }, }); customOptionalConfig('t', { length: 10 }); customOptionalConfig('t'); customOptionalConfig({ length: 10 }); customOptionalConfig(); } { mysqlTable('all_columns', { bigint: bigint('bigint', { mode: 'number' }), bigint2: bigint('bigint', { mode: 'number', unsigned: true }), bigintdef: bigint('bigintdef', { mode: 'number' }).default(0), binary: binary('binary'), binary1: binary('binary1', { length: 1 }), binarydef: binary('binarydef').default(''), boolean: boolean('boolean'), booleandef: boolean('booleandef').default(false), char: char('char'), char2: char('char2', { length: 1 }), char3: char('char3', { enum: ['a', 'b', 'c'] }), char4: char('char4', { length: 1, enum: ['a', 'b', 'c'] }), chardef: char('chardef').default(''), date: date('date'), date2: date('date2', { mode: 'string' }), datedef: date('datedef').default(new Date()), datetime: datetime('datetime'), datetime2: datetime('datetime2', { mode: 'string' }), datetime3: datetime('datetime3', { mode: 'string', fsp: 3 }), datetimedef: datetime('datetimedef').default(new Date()), decimal: decimal('decimal'), decimal2: decimal('decimal2', { precision: 10 }), decimal3: decimal('decimal3', { scale: 2 }), decimal4: decimal('decimal4', { precision: 10, scale: 2 }), decimaldef: decimal('decimaldef').default('0'), double: double('double'), double2: double('double2', { precision: 10 }), double3: double('double3', { scale: 2 }), double4: double('double4', { precision: 10, scale: 2 }), doubledef: double('doubledef').default(0), enum: mysqlEnum('enum', ['a', 'b', 'c']), enumdef: mysqlEnum('enumdef', ['a', 'b', 'c']).default('a'), float: float('float'), float2: float('float2', { precision: 10 }), float3: float('float3', { scale: 2 }), float4: float('float4', { precision: 10, scale: 2 }), floatdef: float('floatdef').default(0), int: int('int'), int2: int('int2', { unsigned: true }), intdef: int('intdef').default(0), json: json('json'), jsondef: json('jsondef').default({}), mediumint: mediumint('mediumint'), mediumint2: mediumint('mediumint2', { unsigned: true }), mediumintdef: mediumint('mediumintdef').default(0), real: real('real'), real2: real('real2', { precision: 10 }), real3: real('real3', { scale: 2 }), real4: real('real4', { precision: 10, scale: 2 }), realdef: real('realdef').default(0), serial: serial('serial'), serialdef: serial('serialdef').default(0), smallint: smallint('smallint'), smallint2: smallint('smallint2', { unsigned: true }), smallintdef: smallint('smallintdef').default(0), text: text('text'), text2: text('text2', { enum: ['a', 'b', 'c'] }), textdef: text('textdef').default(''), tinytext: tinytext('tinytext'), tinytext2: tinytext('tinytext2', { enum: ['a', 'b', 'c'] }), tinytextdef: tinytext('tinytextdef').default(''), mediumtext: mediumtext('mediumtext'), mediumtext2: mediumtext('mediumtext2', { enum: ['a', 'b', 'c'] }), mediumtextdef: mediumtext('mediumtextdef').default(''), longtext: longtext('longtext'), longtext2: longtext('longtext2', { enum: ['a', 'b', 'c'] }), longtextdef: longtext('longtextdef').default(''), time: time('time'), time2: time('time2', { fsp: 1 }), timedef: time('timedef').default('00:00:00'), timestamp: timestamp('timestamp'), timestamp2: timestamp('timestamp2', { mode: 'string' }), timestamp3: timestamp('timestamp3', { mode: 'string', fsp: 1 }), timestamp4: timestamp('timestamp4', { fsp: 1 }), timestampdef: timestamp('timestampdef').default(new Date()), tinyint: tinyint('tinyint'), tinyint2: tinyint('tinyint2', { unsigned: true }), tinyintdef: tinyint('tinyintdef').default(0), varbinary: varbinary('varbinary', { length: 1 }), varbinarydef: varbinary('varbinarydef', { length: 1 }).default(''), varchar: varchar('varchar', { length: 1 }), varchar2: varchar('varchar2', { length: 1, enum: ['a', 'b', 'c'] }), varchardef: varchar('varchardef', { length: 1 }).default(''), year: year('year'), yeardef: year('yeardef').default(0), }); } { const keysAsColumnNames = mysqlTable('test', { id: int(), name: text(), }); Expect>; Expect>; } { mysqlTable('all_columns_without_name', { bigint: bigint({ mode: 'number' }), bigint2: bigint({ mode: 'number', unsigned: true }), bigintdef: bigint({ mode: 'number' }).default(0), binary: binary(), binrary1: binary({ length: 1 }), binarydef: binary().default(''), boolean: boolean(), booleandef: boolean().default(false), char: char(), char2: char({ length: 1 }), char3: char({ enum: ['a', 'b', 'c'] }), char4: char({ length: 1, enum: ['a', 'b', 'c'] }), chardef: char().default(''), date: date(), date2: date({ mode: 'string' }), datedef: date('datedef').default(new Date()), datetime: datetime(), datetime2: datetime({ mode: 'string' }), datetime3: datetime({ mode: 'string', fsp: 3 }), datetimedef: datetime('datetimedef').default(new Date()), decimal: decimal(), decimal2: decimal({ precision: 10 }), decimal3: decimal({ scale: 2 }), decimal4: decimal({ precision: 10, scale: 2 }), decimaldef: decimal('decimaldef').default('0'), double: double(), double2: double({ precision: 10 }), double3: double({ scale: 2 }), double4: double({ precision: 10, scale: 2 }), doubledef: double().default(0), enum: mysqlEnum(['a', 'b', 'c']), enumdef: mysqlEnum(['a', 'b', 'c']).default('a'), float: float(), float2: float({ precision: 10 }), float3: float({ scale: 2 }), float4: float({ precision: 10, scale: 2 }), floatdef: float().default(0), int: int(), int2: int({ unsigned: true }), intdef: int().default(0), json: json(), jsondef: json().default({}), mediumint: mediumint(), mediumint2: mediumint({ unsigned: true }), mediumintdef: mediumint().default(0), real: real(), real2: real({ precision: 10 }), real3: real({ scale: 2 }), real4: real({ precision: 10, scale: 2 }), realdef: real().default(0), serial: serial(), serialdef: serial().default(0), smallint: smallint(), smallint2: smallint({ unsigned: true }), smallintdef: smallint().default(0), text: text(), text2: text({ enum: ['a', 'b', 'c'] }), textdef: text().default(''), tinytext: tinytext(), tinytext2: tinytext({ enum: ['a', 'b', 'c'] }), tinytextdef: tinytext().default(''), mediumtext: mediumtext(), mediumtext2: mediumtext({ enum: ['a', 'b', 'c'] }), mediumtextdef: mediumtext().default(''), longtext: longtext(), longtext2: longtext({ enum: ['a', 'b', 'c'] }), longtextdef: longtext().default(''), time: time(), time2: time({ fsp: 1 }), timedef: time().default('00:00:00'), timestamp: timestamp(), timestamp2: timestamp({ mode: 'string' }), timestamp3: timestamp({ mode: 'string', fsp: 1 }), timestamp4: timestamp({ fsp: 1 }), timestampdef: timestamp().default(new Date()), tinyint: tinyint(), tinyint2: tinyint({ unsigned: true }), tinyintdef: tinyint().default(0), varbinary: varbinary({ length: 1 }), varbinarydef: varbinary({ length: 1 }).default(''), varchar: varchar({ length: 1 }), varchar2: varchar({ length: 1, enum: ['a', 'b', 'c'] }), varchardef: varchar({ length: 1 }).default(''), year: year(), yeardef: year().default(0), }); } { enum Role { admin = 'admin', user = 'user', guest = 'guest', } enum RoleNonString { admin, user, guest, } enum RolePartiallyString { admin, user = 'user', guest = 'guest', } const table = mysqlTable('table', { enum: mysqlEnum('enum', Role), // @ts-expect-error enum1: mysqlEnum('enum1', RoleNonString), // @ts-expect-error enum2: mysqlEnum('enum2', RolePartiallyString), }); const res = await db.select({ enum: table.enum }).from(table); Expect>; } ================================================ FILE: drizzle-orm/type-tests/mysql/update.ts ================================================ import { type Equal, Expect } from 'type-tests/utils.ts'; import type { MySqlUpdate } from '~/mysql-core/index.ts'; import type { MySqlRawQueryResult } from '~/mysql2/session.ts'; import { sql } from '~/sql/sql.ts'; import { db } from './db.ts'; import { users } from './tables.ts'; { function dynamic(qb: T) { return qb.where(sql``); } const qbBase = db.update(users).set({}).$dynamic(); const qb = dynamic(qbBase); const result = await qb; Expect>; } { db .update(users) .set({}) .where(sql``) // @ts-expect-error method was already called .where(sql``); } { db.update(users).set({}).where(sql``).limit(1).orderBy(sql``); } ================================================ FILE: drizzle-orm/type-tests/mysql/with.ts ================================================ import type { Equal } from 'type-tests/utils.ts'; import { Expect } from 'type-tests/utils.ts'; import { int, mysqlTable, serial, text } from '~/mysql-core/index.ts'; import { gt, inArray, like } from '~/sql/expressions/index.ts'; import { sql } from '~/sql/sql.ts'; import { db } from './db.ts'; const orders = mysqlTable('orders', { id: serial('id').primaryKey(), region: text('region').notNull(), product: text('product').notNull(), amount: int('amount').notNull(), quantity: int('quantity').notNull(), generated: text('generatedText').generatedAlwaysAs(sql``), }); { const regionalSales = db .$with('regional_sales') .as( db .select({ region: orders.region, totalSales: sql`sum(${orders.amount})`.as('total_sales'), }) .from(orders) .groupBy(orders.region), ); const topRegions = db .$with('top_regions') .as( db .select({ region: orders.region, totalSales: orders.amount, }) .from(regionalSales) .where( gt( regionalSales.totalSales, db.select({ sales: sql`sum(${regionalSales.totalSales})/10` }).from(regionalSales), ), ), ); const result = await db .with(regionalSales, topRegions) .select({ region: orders.region, product: orders.product, productUnits: sql`sum(${orders.quantity})`, productSales: sql`sum(${orders.amount})`, }) .from(orders) .where(inArray(orders.region, db.select({ region: topRegions.region }).from(topRegions))); Expect< Equal<{ region: string; product: string; productUnits: number; productSales: number; }[], typeof result> >; const allOrdersWith = db.$with('all_orders_with').as(db.select().from(orders)); const allFromWith = await db.with(allOrdersWith).select().from(allOrdersWith); Expect< Equal<{ id: number; region: string; product: string; amount: number; quantity: number; generated: string | null; }[], typeof allFromWith> >; const regionalSalesWith = db.$with('regional_sales_with').as(db.select().from(regionalSales)); db.with(regionalSalesWith).select().from(regionalSalesWith).where(like(regionalSalesWith.totalSales, 'abc')); } { const providers = mysqlTable('providers', { id: serial().primaryKey(), providerName: text().notNull(), }); const sq1 = db.$with('providers_sq', { name: providers.providerName, }).as(sql`select provider_name as name from providers`); const q1 = await db.with(sq1).select().from(sq1); Expect>; const sq2 = db.$with('providers_sq', { nested: { id: providers.id, }, }).as(() => sql`select id from providers`); const q2 = await db.with(sq2).select().from(sq2); Expect>; // @ts-expect-error db.$with('providers_sq', { name: providers.providerName }).as(db.select().from(providers)); // @ts-expect-error db.$with('providers_sq', { name: providers.providerName }).as((qb) => qb.select().from(providers)); } ================================================ FILE: drizzle-orm/type-tests/pg/1-to-1-fk.ts ================================================ import { integer, type PgColumn, serial } from '~/pg-core/columns/index.ts'; import { pgTable } from '~/pg-core/table.ts'; { const test1 = pgTable('test1_table', { id: serial('id').primaryKey(), test2Id: integer('test2_id').references(() => test2.id), }); const test1Id = integer('test1_id').references(() => test1.id); const test2 = pgTable('test2_table', { id: serial('id').primaryKey(), test1Id, }); } { const test1 = pgTable('test1_table', { id: serial('id').primaryKey(), test2Id: integer('test2_id').references((): PgColumn => test2.id), }); const test2 = pgTable('test2_table', { id: serial('id').primaryKey(), test1Id: integer('test1_id').references(() => test1.id), }); } ================================================ FILE: drizzle-orm/type-tests/pg/array.ts ================================================ import { type Equal, Expect } from 'type-tests/utils.ts'; import type { Column } from '~/column.ts'; import { integer, pgTable } from '~/pg-core/index.ts'; { const table = pgTable('table', { a: integer('a').array().notNull(), }); Expect< Equal< Column< { name: 'a'; tableName: 'table'; dataType: 'number'; columnType: 'PgInteger'; data: number; driverParam: string | number; notNull: false; hasDefault: false; enumValues: undefined; baseColumn: never; generated: undefined; identity: undefined; isPrimaryKey: false; isAutoincrement: false; hasRuntimeDefault: false; }, {}, {} >, typeof table['a']['_']['baseColumn'] > >; } ================================================ FILE: drizzle-orm/type-tests/pg/count.ts ================================================ import { Expect } from 'type-tests/utils.ts'; import { integer, pgTable, serial, text } from '~/pg-core/index.ts'; import { and, gt, ne } from '~/sql/expressions/index.ts'; import type { Equal } from '~/utils.ts'; import { db } from './db.ts'; const names = pgTable('names', { id: serial('id').primaryKey(), name: text('name'), authorId: integer('author_id'), }); const separate = await db.$count(names); const separateFilters = await db.$count(names, and(gt(names.id, 1), ne(names.name, 'forbidden'))); const embedded = await db .select({ id: names.id, name: names.name, authorId: names.authorId, count1: db.$count(names).as('count1'), }) .from(names); const embeddedFilters = await db .select({ id: names.id, name: names.name, authorId: names.authorId, count1: db.$count(names, and(gt(names.id, 1), ne(names.name, 'forbidden'))).as('count1'), }) .from(names); Expect>; Expect>; Expect< Equal< { id: number; name: string | null; authorId: number | null; count1: number; }[], typeof embedded > >; Expect< Equal< { id: number; name: string | null; authorId: number | null; count1: number; }[], typeof embeddedFilters > >; ================================================ FILE: drizzle-orm/type-tests/pg/db-rel.ts ================================================ import pg from 'pg'; import { type Equal, Expect } from 'type-tests/utils.ts'; import { drizzle } from '~/node-postgres/index.ts'; import { sql } from '~/sql/sql.ts'; import * as schema from './tables-rel.ts'; const { Pool } = pg; const pdb = new Pool({ connectionString: process.env['PG_CONNECTION_STRING'] }); const db = drizzle(pdb, { schema }); { const result = await db.query.users.findMany({ where: (users, { sql }) => sql`char_length(${users.name} > 1)`, limit: sql.placeholder('l'), orderBy: (users, { asc, desc }) => [asc(users.name), desc(users.id)], with: { posts: { where: (posts, { sql }) => sql`char_length(${posts.title} > 1)`, limit: sql.placeholder('l'), columns: { id: false, title: undefined, }, with: { author: true, comments: { where: (comments, { sql }) => sql`char_length(${comments.text} > 1)`, limit: sql.placeholder('l'), columns: { text: true, }, with: { author: { columns: { id: undefined, }, with: { city: { with: { users: true, }, }, }, }, }, }, }, }, }, }); Expect< Equal<{ id: number; name: string; cityId: number; homeCityId: number | null; createdAt: Date; posts: { title: string; authorId: number | null; comments: { text: string; author: { city: { id: number; name: string; users: { id: number; name: string; cityId: number; homeCityId: number | null; createdAt: Date; }[]; }; } | null; }[]; author: { id: number; name: string; cityId: number; homeCityId: number | null; createdAt: Date; } | null; }[]; }[], typeof result> >; } { const result = await db.query.users.findMany({ columns: { id: true, name: true, }, with: { posts: { columns: { authorId: true, }, extras: { lower: sql`lower(${schema.posts.title})`.as('lower_name'), }, }, }, }); Expect< Equal< { id: number; name: string; posts: { authorId: number | null; lower: string; }[]; }[], typeof result > >; } ================================================ FILE: drizzle-orm/type-tests/pg/db.ts ================================================ import pg from 'pg'; import { drizzle } from '~/node-postgres/index.ts'; const { Client } = pg; export const db = drizzle(new Client()); ================================================ FILE: drizzle-orm/type-tests/pg/delete.ts ================================================ import type { QueryResult } from 'pg'; import type { Equal } from 'type-tests/utils.ts'; import { Expect } from 'type-tests/utils.ts'; import type { PgDelete } from '~/pg-core/index.ts'; import { eq } from '~/sql/expressions/index.ts'; import { sql } from '~/sql/sql.ts'; import { db } from './db.ts'; import { users } from './tables.ts'; const deleteAll = await db.delete(users); Expect, typeof deleteAll>>; const deleteAllStmt = db.delete(users).prepare('deleteAllStmt'); const deleteAllPrepared = await deleteAllStmt.execute(); Expect, typeof deleteAllPrepared>>; const deleteWhere = await db.delete(users).where(eq(users.id, 1)); Expect, typeof deleteWhere>>; const deleteWhereStmt = db.delete(users).where(eq(users.id, 1)).prepare('deleteWhereStmt'); const deleteWherePrepared = await deleteWhereStmt.execute(); Expect, typeof deleteWherePrepared>>; const deleteReturningAll = await db.delete(users).returning(); Expect>; const deleteReturningAllStmt = db.delete(users).returning().prepare('deleteReturningAllStmt'); const deleteReturningAllPrepared = await deleteReturningAllStmt.execute(); Expect>; const deleteReturningPartial = await db.delete(users).returning({ myId: users.id, myHomeCity: users.homeCity, }); Expect>; const deleteReturningPartialStmt = db.delete(users).returning({ myId: users.id, myHomeCity: users.homeCity, }).prepare('deleteReturningPartialStmt'); const deleteReturningPartialPrepared = await deleteReturningPartialStmt.execute(); Expect>; { function dynamic(qb: T) { return qb.where(sql``).returning(); } const qbBase = db.delete(users).$dynamic(); const qb = dynamic(qbBase); const result = await qb; Expect>; } { function withReturning(qb: T) { return qb.returning(); } const qbBase = db.delete(users).$dynamic(); const qb = withReturning(qbBase); const result = await qb; Expect>; } { db .delete(users) .where(sql``) // @ts-expect-error method was already called .where(sql``); db .delete(users) .returning() // @ts-expect-error method was already called .returning(); } ================================================ FILE: drizzle-orm/type-tests/pg/generated-columns.ts ================================================ import { type Equal, Expect } from 'type-tests/utils'; import { type InferInsertModel, type InferSelectModel, sql } from '~/index'; import { drizzle } from '~/node-postgres'; import { integer, pgTable, serial, text, varchar } from '~/pg-core'; import { db } from './db'; const users = pgTable( 'users', { id: serial('id').primaryKey(), firstName: varchar('first_name', { length: 255 }), lastName: varchar('last_name', { length: 255 }), email: text('email').notNull(), fullName: text('full_name').generatedAlwaysAs(sql`concat_ws(first_name, ' ', last_name)`).notNull(), upperName: text('upper_name').generatedAlwaysAs( sql` case when first_name is null then null else upper(first_name) end `, ), }, ); { type User = typeof users.$inferSelect; type NewUser = typeof users.$inferInsert; Expect< Equal< { id: number; firstName: string | null; lastName: string | null; email: string; fullName: string; upperName: string | null; }, User > >(); Expect< Equal< { email: string; id?: number | undefined; firstName?: string | null | undefined; lastName?: string | null | undefined; }, NewUser > >(); } { type User = InferSelectModel; type NewUser = InferInsertModel; Expect< Equal< { id: number; firstName: string | null; lastName: string | null; email: string; fullName: string; upperName: string | null; }, User > >(); Expect< Equal< { email: string; id?: number | undefined; firstName?: string | null | undefined; lastName?: string | null | undefined; }, NewUser > >(); } { const dbUsers = await db.select().from(users); Expect< Equal< { id: number; firstName: string | null; lastName: string | null; email: string; fullName: string; upperName: string | null; }[], typeof dbUsers > >(); } { const db = drizzle({} as any, { schema: { users } }); const dbUser = await db.query.users.findFirst(); Expect< Equal< { id: number; firstName: string | null; lastName: string | null; email: string; fullName: string; upperName: string | null; } | undefined, typeof dbUser > >(); } { const db = drizzle({} as any, { schema: { users } }); const dbUser = await db.query.users.findMany(); Expect< Equal< { id: number; firstName: string | null; lastName: string | null; email: string; fullName: string; upperName: string | null; }[], typeof dbUser > >(); } { // @ts-expect-error - Can't use the fullName because it's a generated column await db.insert(users).values({ firstName: 'test', lastName: 'test', email: 'test', fullName: 'test', }); } { await db.update(users).set({ firstName: 'test', lastName: 'test', email: 'test', // @ts-expect-error - Can't use the fullName because it's a generated column fullName: 'test', }); } const users2 = pgTable( 'users', { id: integer('id').generatedByDefaultAsIdentity(), id2: integer('id').generatedAlwaysAsIdentity(), }, ); { type User = typeof users2.$inferSelect; type NewUser = typeof users2.$inferInsert; Expect< Equal< { id: number; id2: number; }, User > >(); Expect< Equal< { id?: number | undefined; }, NewUser > >(); } const usersSeq = pgTable( 'users', { id: integer('id').generatedByDefaultAsIdentity(), id2: integer('id').generatedAlwaysAsIdentity(), }, ); { type User = typeof usersSeq.$inferSelect; type NewUser = typeof usersSeq.$inferInsert; Expect< Equal< { id: number; id2: number; }, User > >(); Expect< Equal< { id?: number | undefined; }, NewUser > >(); } ================================================ FILE: drizzle-orm/type-tests/pg/insert.ts ================================================ import type { QueryResult } from 'pg'; import type { Equal } from 'type-tests/utils.ts'; import { Expect } from 'type-tests/utils.ts'; import { boolean, pgTable, QueryBuilder, serial, text } from '~/pg-core/index.ts'; import type { PgInsert } from '~/pg-core/query-builders/insert.ts'; import { sql } from '~/sql/sql.ts'; import { db } from './db.ts'; import { identityColumnsTable, users } from './tables.ts'; const insert = await db .insert(users) .values({ homeCity: 1, class: 'A', age1: 1, enumCol: 'a', arrayCol: [''], }); Expect, typeof insert>>; const insertStmt = db .insert(users) .values({ homeCity: 1, class: 'A', age1: 1, enumCol: 'a', arrayCol: [''], }) .prepare('insertStmt'); const insertPrepared = await insertStmt.execute(); Expect, typeof insertPrepared>>; const insertSql = await db.insert(users).values({ homeCity: sql`123`, class: 'A', age1: 1, enumCol: sql`foobar`, arrayCol: [''], }); Expect, typeof insertSql>>; const insertSqlStmt = db .insert(users) .values({ homeCity: sql`123`, class: 'A', age1: 1, enumCol: sql`foobar`, arrayCol: [''], }) .prepare('insertSqlStmt'); const insertSqlPrepared = await insertSqlStmt.execute(); Expect, typeof insertSqlPrepared>>; const insertReturning = await db .insert(users) .values({ homeCity: 1, class: 'A', age1: 1, enumCol: 'a', arrayCol: [''], }) .returning(); Expect>; const insertReturningStmt = db .insert(users) .values({ homeCity: 1, class: 'A', age1: 1, enumCol: 'a', arrayCol: [''], }) .returning() .prepare('insertReturningStmt'); const insertReturningPrepared = await insertReturningStmt.execute(); Expect>; const insertReturningPartial = await db .insert(users) .values({ homeCity: 1, class: 'A', age1: 1, enumCol: 'a', arrayCol: [''], }) .returning({ id: users.id, homeCity: users.homeCity, mySubclass: users.subClass, }); Expect< Equal<{ id: number; homeCity: number; mySubclass: 'B' | 'D' | null; }[], typeof insertReturningPartial> >; const insertReturningPartialStmt = db .insert(users) .values({ homeCity: 1, class: 'A', age1: 1, enumCol: 'a', arrayCol: [''], }) .returning({ id: users.id, homeCity: users.homeCity, mySubclass: users.subClass, }) .prepare('insertReturningPartialStmt'); const insertReturningPartialPrepared = await insertReturningPartialStmt.execute(); Expect< Equal<{ id: number; homeCity: number; mySubclass: 'B' | 'D' | null; }[], typeof insertReturningPartialPrepared> >; const insertReturningSql = await db .insert(users) .values({ homeCity: 1, class: 'A', age1: sql`2 + 2`, enumCol: 'a', arrayCol: [''], }) .returning({ id: users.id, homeCity: users.homeCity, subclassLower: sql`lower(${users.subClass})`, classLower: sql`lower(${users.class})`, }); Expect< Equal<{ id: number; homeCity: number; subclassLower: unknown; classLower: string; }[], typeof insertReturningSql> >; const insertReturningSqlStmt = db .insert(users) .values({ homeCity: 1, class: 'A', age1: sql`2 + 2`, enumCol: 'a', arrayCol: [''], }) .returning({ id: users.id, homeCity: users.homeCity, subclassLower: sql`lower(${users.subClass})`, classLower: sql`lower(${users.class})`, }) .prepare('insertReturningSqlStmt'); const insertReturningSqlPrepared = await insertReturningSqlStmt.execute(); Expect< Equal<{ id: number; homeCity: number; subclassLower: unknown; classLower: string; }[], typeof insertReturningSqlPrepared> >; { function dynamic(qb: T) { return qb.returning().onConflictDoNothing().onConflictDoUpdate({ set: {}, target: users.id, where: sql`` }); } const qbBase = db.insert(users).values({ age1: 0, class: 'A', enumCol: 'a', homeCity: 0, arrayCol: [] }).$dynamic(); const qb = dynamic(qbBase); const result = await qb; Expect>; } { function withReturning(qb: T) { return qb.returning(); } const qbBase = db.insert(users).values({ age1: 0, class: 'A', enumCol: 'a', homeCity: 0, arrayCol: [] }).$dynamic(); const qb = withReturning(qbBase); const result = await qb; Expect>; } { db .insert(users) .values({ age1: 0, class: 'A', enumCol: 'a', homeCity: 0, arrayCol: [] }) .returning() // @ts-expect-error method was already called .returning(); } { const users1 = pgTable('users1', { id: serial('id').primaryKey(), name: text('name').notNull(), admin: boolean('admin').notNull().default(false), }); const users2 = pgTable('users2', { id: serial('id').primaryKey(), firstName: text('first_name').notNull(), lastName: text('last_name').notNull(), admin: boolean('admin').notNull().default(false), phoneNumber: text('phone_number'), }); const qb = new QueryBuilder(); db.insert(users1).select(sql`select * from users1`); db.insert(users1).select(() => sql`select * from users1`); db .insert(users1) .select( qb.select({ name: users2.firstName, admin: users2.admin, }).from(users2), ); db .insert(users1) .select( qb.select({ name: users2.firstName, admin: users2.admin, }).from(users2).where(sql``), ); db .insert(users2) .select( qb.select({ firstName: users2.firstName, lastName: users2.lastName, admin: users2.admin, }).from(users2), ); db .insert(users1) .select( qb.select({ name: sql`${users2.firstName} || ' ' || ${users2.lastName}`.as('name'), admin: users2.admin, }).from(users2), ); db .insert(users1) .select( // @ts-expect-error name is undefined qb.select({ admin: users1.admin }).from(users1), ); db.insert(users1).select(db.select().from(users1)); db.insert(users1).select(() => db.select().from(users1)); db.insert(users1).select((qb) => qb.select().from(users1)); // @ts-expect-error tables have different keys db.insert(users1).select(db.select().from(users2)); // @ts-expect-error tables have different keys db.insert(users1).select(() => db.select().from(users2)); } { db.insert(identityColumnsTable).values([ { byDefaultAsIdentity: 4, name: 'fdf' }, ]); // @ts-expect-error db.insert(identityColumnsTable).values([ { alwaysAsIdentity: 2 }, ]); db.insert(identityColumnsTable).overridingSystemValue().values([ { alwaysAsIdentity: 2 }, ]); // @ts-expect-error db.insert(identityColumnsTable).values([ { generatedCol: 2 }, ]); } ================================================ FILE: drizzle-orm/type-tests/pg/no-strict-null-checks/test.ts ================================================ import { drizzle } from '~/node-postgres'; import { integer, pgTable, text } from '~/pg-core'; export const test = pgTable( 'test', { id: text('id') .primaryKey() .generatedAlwaysAs('genstr'), intId: integer('int_id') .primaryKey() .generatedAlwaysAsIdentity(), int2Id: integer('int2_id').generatedByDefaultAsIdentity(), name: text('name').$defaultFn(() => '' as string), title: text('title').notNull(), description: text('description'), dbdef: text('dbdef').default('dbdefval'), }, ); const db = drizzle.mock(); db.update(test) .set({ // @ts-expect-error id: '1', name: 'name', title: 'title', description: 'desc', dbdef: 'upddef', }); db.update(test) .set({ // @ts-expect-error intId: 1, name: 'name', title: 'title', description: 'desc', dbdef: 'upddef', }); db.update(test) .set({ int2Id: 1, name: 'name', title: 'title', description: 'desc', dbdef: 'upddef', }); db.update(test) .set({ name: 'name', title: 'title', description: 'desc', dbdef: 'upddef', }); db.insert(test).values({ // @ts-expect-error id: '1', name: 'name', title: 'title', description: 'desc', dbdef: 'upddef', }); db.insert(test).values({ // @ts-expect-error intId: 1, name: 'name', title: 'title', description: 'desc', dbdef: 'upddef', }); db.insert(test).values({ int2Id: 1, name: 'name', title: 'title', description: 'desc', dbdef: 'upddef', }); db.insert(test).values({ name: 'name', title: 'title', description: 'desc', dbdef: 'upddef', }); db.insert(test).values({ title: 'title', description: 'desc', dbdef: 'upddef', }); db.insert(test).values({ title: 'title', description: 'desc', }); db.insert(test).values({ title: 'title', }); ================================================ FILE: drizzle-orm/type-tests/pg/no-strict-null-checks/tsconfig.json ================================================ { "extends": "../../tsconfig.json", "compilerOptions": { "noEmit": true, "strictNullChecks": false, "strictPropertyInitialization": false, "exactOptionalPropertyTypes": false }, "include": ["./test.ts"] } ================================================ FILE: drizzle-orm/type-tests/pg/other.ts ================================================ import type { QueryResult } from 'pg'; import { eq, inArray } from '~/sql/expressions/index.ts'; import { sql } from '~/sql/sql.ts'; import type { Equal } from 'type-tests/utils.ts'; import { Expect } from 'type-tests/utils.ts'; import { db } from './db.ts'; import { users } from './tables.ts'; const rawQuery = await db.execute( sql`select ${users.id}, ${users.class} from ${users} where ${inArray(users.id, [1, 2, 3])} and ${ eq(users.class, 'A') }`, ); Expect>, typeof rawQuery>>; ================================================ FILE: drizzle-orm/type-tests/pg/select.ts ================================================ import type { Equal } from 'type-tests/utils.ts'; import { Expect } from 'type-tests/utils.ts'; import { alias } from '~/pg-core/alias.ts'; import { boolean, integer, pgMaterializedView, type PgSelect, type PgSelectQueryBuilder, pgTable, pgView, QueryBuilder, text, } from '~/pg-core/index.ts'; import { and, arrayContained, arrayContains, arrayOverlaps, between, eq, exists, gt, gte, ilike, inArray, isNotNull, isNull, like, lt, lte, ne, not, notBetween, notExists, notIlike, notInArray, notLike, or, } from '~/sql/expressions/index.ts'; import { type InferSelectViewModel, type SQL, sql } from '~/sql/sql.ts'; import { db } from './db.ts'; import { cities, classes, newYorkers, newYorkers2, users } from './tables.ts'; const city = alias(cities, 'city'); const city1 = alias(cities, 'city1'); const leftJoinFull = await db.select().from(users).leftJoin(city, eq(users.id, city.id)); Expect< Equal< { users_table: typeof users.$inferSelect; city: typeof cities.$inferSelect | null; }[], typeof leftJoinFull > >; const rightJoinFull = await db.select().from(users).rightJoin(city, eq(users.id, city.id)); Expect< Equal< { users_table: typeof users.$inferSelect | null; city: typeof city.$inferSelect; }[], typeof rightJoinFull > >; const innerJoinFull = await db.select().from(users).innerJoin(city, eq(users.id, city.id)); Expect< Equal< { users_table: typeof users.$inferSelect; city: typeof city.$inferSelect; }[], typeof innerJoinFull > >; const fullJoinFull = await db.select().from(users).fullJoin(city, eq(users.id, city.id)); Expect< Equal< { users_table: typeof users.$inferSelect | null; city: typeof city.$inferSelect | null; }[], typeof fullJoinFull > >; const crossJoinFull = await db.select().from(users).crossJoin(city); Expect< Equal< { users_table: typeof users.$inferSelect; city: typeof city.$inferSelect; }[], typeof crossJoinFull > >; const leftJoinFlat = await db .select({ userId: users.id, userText: users.text, cityId: city.id, cityName: city.name, }) .from(users) .leftJoin(city, eq(users.id, city.id)); Expect< Equal<{ userId: number; userText: string | null; cityId: number | null; cityName: string | null; }[], typeof leftJoinFlat> >; const rightJoinFlat = await db .select({ userId: users.id, userText: users.text, cityId: city.id, cityName: city.name, }) .from(users) .rightJoin(city, eq(users.id, city.id)); Expect< Equal<{ userId: number | null; userText: string | null; cityId: number; cityName: string; }[], typeof rightJoinFlat> >; const innerJoinFlat = await db .select({ userId: users.id, userText: users.text, cityId: city.id, cityName: city.name, }) .from(users) .innerJoin(city, eq(users.id, city.id)); Expect< Equal<{ userId: number; userText: string | null; cityId: number; cityName: string; }[], typeof innerJoinFlat> >; const fullJoinFlat = await db .select({ userId: users.id, userText: users.text, cityId: city.id, cityName: city.name, }) .from(users) .fullJoin(city, eq(users.id, city.id)); Expect< Equal<{ userId: number | null; userText: string | null; cityId: number | null; cityName: string | null; }[], typeof fullJoinFlat> >; const crossJoinFlat = await db .select({ userId: users.id, userText: users.text, cityId: city.id, cityName: city.name, }) .from(users) .crossJoin(city); Expect< Equal<{ userId: number; userText: string | null; cityId: number; cityName: string; }[], typeof crossJoinFlat> >; const leftJoinMixed = await db .select({ id: users.id, text: users.text, textUpper: sql`upper(${users.text})`, idComplex: sql`${users.id}::text || ${city.id}::text`, city: { id: city.id, name: city.name, }, }) .from(users) .leftJoin(city, eq(users.id, city.id)); Expect< Equal< { id: number; text: string | null; textUpper: string | null; idComplex: string | null; city: { id: number; name: string; } | null; }[], typeof leftJoinMixed > >; const leftJoinMixed2 = await db .select({ id: users.id, text: users.text, foo: { bar: users.uuid, baz: cities.id, }, }) .from(users) .leftJoin(cities, eq(users.id, cities.id)); Expect< Equal< { id: number; text: string | null; foo: { bar: string; baz: number | null; }; }[], typeof leftJoinMixed2 > >; const join1 = await db .select({ user: { id: users.id, text: users.text, }, city: { id: city.id, name: city.name, nameUpper: sql`upper(${city.name})`, }, }) .from(users) .leftJoin(city, eq(users.id, city.id)); Expect< Equal<{ user: { id: number; text: string | null; }; city: { id: number; name: string; nameUpper: string; } | null; }[], typeof join1> >; const join = await db .select({ users, cities, city, city1: { id: city1.id, }, }) .from(users) .leftJoin(cities, eq(users.id, cities.id)) .rightJoin(city, eq(city.id, users.id)) .rightJoin(city1, eq(city1.id, users.id)); Expect< Equal< { users: { id: number; uuid: string; homeCity: number; currentCity: number | null; serialNullable: number; serialNotNull: number; class: 'A' | 'C'; subClass: 'B' | 'D' | null; text: string | null; age1: number; createdAt: Date; enumCol: 'a' | 'b' | 'c'; arrayCol: string[]; } | null; cities: { id: number; name: string; population: number | null; } | null; city: { id: number; name: string; population: number | null; } | null; city1: { id: number; }; }[], typeof join > >; const join2 = await db .select({ user: { id: users.id, }, city: { id: cities.id, }, }) .from(users) .fullJoin(cities, eq(users.id, cities.id)); Expect< Equal< { user: { id: number; } | null; city: { id: number; } | null; }[], typeof join2 > >; const join3 = await db .select({ user: { id: users.id, }, city: { id: cities.id, }, class: { id: classes.id, }, }) .from(users) .fullJoin(cities, eq(users.id, cities.id)) .rightJoin(classes, eq(users.id, classes.id)); Expect< Equal< { user: { id: number; } | null; city: { id: number; } | null; class: { id: number; }; }[], typeof join3 > >; db.select() .from(users) .where(exists(db.select().from(cities).where(eq(users.homeCity, cities.id)))); function mapFunkyFuncResult(valueFromDriver: unknown) { return { foo: (valueFromDriver as Record)['foo'], }; } const age = 1; const allOperators = await db .select({ col2: sql`5 - ${users.id} + 1`, // unknown col3: sql`${users.id} + 1`, // number col33: sql`${users.id} + 1`.mapWith(users.id), // number col34: sql`${users.id} + 1`.mapWith(mapFunkyFuncResult), // number col4: sql`one_or_another(${users.id}, ${users.class})`, // string | number col5: sql`true`, // unknown col6: sql`true`, // boolean col7: sql`random()`, // number col8: sql`some_funky_func(${users.id})`.mapWith(mapFunkyFuncResult), // { foo: string } col9: sql`greatest(${users.createdAt}, ${sql.param(new Date(), users.createdAt)})`, // unknown col10: sql`date_or_false(${users.createdAt}, ${ sql.param( new Date(), users.createdAt, ) })`, // Date | boolean col11: sql`${users.age1} + ${age}`, // unknown col12: sql`${users.age1} + ${sql.param(age, users.age1)}`, // unknown col13: sql`lower(${users.class})`, // unknown col14: sql`length(${users.class})`, // number count: sql`count(*)::int`, // number }) .from(users) .where( and( eq(users.id, 1), ne(users.id, 1), or(eq(users.id, 1), ne(users.id, 1)), not(eq(users.id, 1)), gt(users.id, 1), gte(users.id, 1), lt(users.id, 1), lte(users.id, 1), inArray(users.id, [1, 2, 3]), inArray(users.id, db.select({ id: users.id }).from(users)), inArray(users.id, sql`select id from ${users}`), notInArray(users.id, [1, 2, 3]), notInArray(users.id, db.select({ id: users.id }).from(users)), notInArray(users.id, sql`select id from ${users}`), isNull(users.subClass), isNotNull(users.id), exists(db.select({ id: users.id }).from(users)), exists(sql`select id from ${users}`), notExists(db.select({ id: users.id }).from(users)), notExists(sql`select id from ${users}`), between(users.id, 1, 2), notBetween(users.id, 1, 2), like(users.id, '%1%'), notLike(users.id, '%1%'), ilike(users.id, '%1%'), notIlike(users.id, '%1%'), arrayContains(users.arrayCol, ['abc']), arrayContains(users.arrayCol, db.select({ arrayCol: users.arrayCol }).from(users)), arrayContains(users.arrayCol, sql`select array_col from ${users}`), arrayContained(users.arrayCol, ['abc']), arrayContained(users.arrayCol, db.select({ arrayCol: users.arrayCol }).from(users)), arrayContained(users.arrayCol, sql`select array_col from ${users}`), arrayOverlaps(users.arrayCol, ['abc']), arrayOverlaps(users.arrayCol, db.select({ arrayCol: users.arrayCol }).from(users)), arrayOverlaps(users.arrayCol, sql`select array_col from ${users}`), ), ); Expect< Equal<{ col2: unknown; col3: number; col33: number; col34: { foo: any }; col4: string | number; col5: unknown; col6: boolean; col7: number; col8: { foo: any; }; col9: unknown; col10: boolean | Date; col11: unknown; col12: unknown; col13: unknown; col14: number; count: number; }[], typeof allOperators> >; const textSelect = await db .select({ t: users.text, }) .from(users); Expect>; const homeCity = alias(cities, 'homeCity'); const c = alias(classes, 'c'); const otherClass = alias(classes, 'otherClass'); const anotherClass = alias(classes, 'anotherClass'); const friend = alias(users, 'friend'); const currentCity = alias(cities, 'currentCity'); const subscriber = alias(users, 'subscriber'); const closestCity = alias(cities, 'closestCity'); const closestCity2 = alias(cities, 'closestCity2'); const closestCity3 = alias(cities, 'closestCity3'); const closestCity4 = alias(cities, 'closestCity4'); const closestCity5 = alias(cities, 'closestCity5'); const closestCity6 = alias(cities, 'closestCity6'); const closestCity7 = alias(cities, 'closestCity7'); const megaJoin = await db .select({ user: { id: users.id, maxAge: sql`max(${users.age1})`, }, city: { id: cities.id, }, homeCity, c, otherClass, anotherClass, friend, currentCity, subscriber, closestCity, }) .from(users) .innerJoin(cities, sql`${users.id} = ${cities.id}`) .innerJoin(homeCity, sql`${users.homeCity} = ${homeCity.id}`) .innerJoin(c, eq(c.id, users.class)) .innerJoin(otherClass, sql`${c.id} = ${otherClass.id}`) .innerJoin(anotherClass, sql`${users.class} = ${anotherClass.id}`) .innerJoin(friend, sql`${users.id} = ${friend.id}`) .innerJoin(currentCity, sql`${homeCity.id} = ${currentCity.id}`) .innerJoin(subscriber, sql`${users.class} = ${subscriber.id}`) .innerJoin(closestCity, sql`${users.currentCity} = ${closestCity.id}`) .where(and(sql`${users.age1} > 0`, eq(cities.id, 1))) .limit(1) .offset(1); Expect< Equal< { user: { id: number; maxAge: unknown; }; city: { id: number; }; homeCity: { id: number; name: string; population: number | null; }; c: { id: number; class: 'A' | 'C' | null; subClass: 'B' | 'D'; }; otherClass: { id: number; class: 'A' | 'C' | null; subClass: 'B' | 'D'; }; anotherClass: { id: number; class: 'A' | 'C' | null; subClass: 'B' | 'D'; }; friend: { id: number; uuid: string; homeCity: number; currentCity: number | null; serialNullable: number; serialNotNull: number; class: 'A' | 'C'; subClass: 'B' | 'D' | null; text: string | null; age1: number; createdAt: Date; enumCol: 'a' | 'b' | 'c'; arrayCol: string[]; }; currentCity: { id: number; name: string; population: number | null; }; subscriber: { id: number; uuid: string; homeCity: number; currentCity: number | null; serialNullable: number; serialNotNull: number; class: 'A' | 'C'; subClass: 'B' | 'D' | null; text: string | null; age1: number; createdAt: Date; enumCol: 'a' | 'b' | 'c'; arrayCol: string[]; }; closestCity: { id: number; name: string; population: number | null; }; }[], typeof megaJoin > >; const megaLeftJoin = await db .select({ user: { id: users.id, maxAge: sql`max(${users.age1})`, }, city: { id: cities.id, }, homeCity, c, otherClass, anotherClass, friend, currentCity, subscriber, closestCity, closestCity2, closestCity3, closestCity4, closestCity5, closestCity6, closestCity7, }) .from(users) .leftJoin(cities, sql`${users.id} = ${cities.id}`) .leftJoin(homeCity, sql`${users.homeCity} = ${homeCity.id}`) .leftJoin(c, eq(c.id, users.class)) .leftJoin(otherClass, sql`${c.id} = ${otherClass.id}`) .leftJoin(anotherClass, sql`${users.class} = ${anotherClass.id}`) .leftJoin(friend, sql`${users.id} = ${friend.id}`) .leftJoin(currentCity, sql`${homeCity.id} = ${currentCity.id}`) .leftJoin(subscriber, sql`${users.class} = ${subscriber.id}`) .leftJoin(closestCity, sql`${users.currentCity} = ${closestCity.id}`) .leftJoin(closestCity2, sql`${users.currentCity} = ${closestCity.id}`) .leftJoin(closestCity3, sql`${users.currentCity} = ${closestCity.id}`) .leftJoin(closestCity4, sql`${users.currentCity} = ${closestCity.id}`) .leftJoin(closestCity5, sql`${users.currentCity} = ${closestCity.id}`) .leftJoin(closestCity6, sql`${users.currentCity} = ${closestCity.id}`) .leftJoin(closestCity7, sql`${users.currentCity} = ${closestCity.id}`) .where(and(sql`${users.age1} > 0`, eq(cities.id, 1))) .limit(1) .offset(1); Expect< Equal< { user: { id: number; maxAge: unknown; }; city: { id: number; } | null; homeCity: { id: number; name: string; population: number | null; } | null; c: { id: number; class: 'A' | 'C' | null; subClass: 'B' | 'D'; } | null; otherClass: { id: number; class: 'A' | 'C' | null; subClass: 'B' | 'D'; } | null; anotherClass: { id: number; class: 'A' | 'C' | null; subClass: 'B' | 'D'; } | null; friend: { id: number; uuid: string; homeCity: number; currentCity: number | null; serialNullable: number; serialNotNull: number; class: 'A' | 'C'; subClass: 'B' | 'D' | null; text: string | null; age1: number; createdAt: Date; enumCol: 'a' | 'b' | 'c'; arrayCol: string[]; } | null; currentCity: { id: number; name: string; population: number | null; } | null; subscriber: { id: number; uuid: string; homeCity: number; currentCity: number | null; serialNullable: number; serialNotNull: number; class: 'A' | 'C'; subClass: 'B' | 'D' | null; text: string | null; age1: number; createdAt: Date; enumCol: 'a' | 'b' | 'c'; arrayCol: string[]; } | null; closestCity: { id: number; name: string; population: number | null; } | null; closestCity2: { id: number; name: string; population: number | null; } | null; closestCity3: { id: number; name: string; population: number | null; } | null; closestCity4: { id: number; name: string; population: number | null; } | null; closestCity5: { id: number; name: string; population: number | null; } | null; closestCity6: { id: number; name: string; population: number | null; } | null; closestCity7: { id: number; name: string; population: number | null; } | null; }[], typeof megaLeftJoin > >; await db .select({ user: { id: users.id, maxAge: sql`max(${users.age1})`, }, city: { id: cities.id, }, homeCity, c, otherClass, anotherClass, friend, currentCity, subscriber, closestCity, closestCity2, closestCity3, closestCity4, closestCity5, closestCity6, closestCity7, }) .from(users) .fullJoin(cities, sql`${users.id} = ${cities.id}`) .fullJoin(homeCity, sql`${users.homeCity} = ${homeCity.id}`) .fullJoin(c, eq(c.id, users.class)) .fullJoin(otherClass, sql`${c.id} = ${otherClass.id}`) .fullJoin(anotherClass, sql`${users.class} = ${anotherClass.id}`) .fullJoin(friend, sql`${users.id} = ${friend.id}`) .fullJoin(currentCity, sql`${homeCity.id} = ${currentCity.id}`) .fullJoin(subscriber, sql`${users.class} = ${subscriber.id}`) .fullJoin(closestCity, sql`${users.currentCity} = ${closestCity.id}`) .fullJoin(closestCity2, sql`${users.currentCity} = ${closestCity.id}`) .fullJoin(closestCity3, sql`${users.currentCity} = ${closestCity.id}`) .fullJoin(closestCity4, sql`${users.currentCity} = ${closestCity.id}`) .fullJoin(closestCity5, sql`${users.currentCity} = ${closestCity.id}`) .fullJoin(closestCity6, sql`${users.currentCity} = ${closestCity.id}`) .fullJoin(closestCity7, sql`${users.currentCity} = ${closestCity.id}`) .where(and(sql`${users.age1} > 0`, eq(cities.id, 1))) .limit(1) .offset(1); const friends = alias(users, 'friends'); const join4 = await db .select({ user: { id: users.id, }, city: { id: cities.id, }, class: classes, friend: friends, }) .from(users) .innerJoin(cities, sql`${users.id} = ${cities.id}`) .innerJoin(classes, sql`${cities.id} = ${classes.id}`) .innerJoin(friends, sql`${friends.id} = ${users.id}`) .where(sql`${users.age1} > 0`); Expect< Equal<{ user: { id: number; }; city: { id: number; }; class: { id: number; class: 'A' | 'C' | null; subClass: 'B' | 'D'; }; friend: { id: number; uuid: string; homeCity: number; currentCity: number | null; serialNullable: number; serialNotNull: number; class: 'A' | 'C'; subClass: 'B' | 'D' | null; text: string | null; age1: number; createdAt: Date; enumCol: 'a' | 'b' | 'c'; arrayCol: string[]; }; }[], typeof join4> >; { const authenticated = false as boolean; const result = await db .select({ id: users.id, ...(authenticated ? { city: users.homeCity } : {}), }) .from(users); Expect< Equal< { id: number; city?: number; }[], typeof result > >; } await db .select() .from(users) .for('update'); await db .select() .from(users) .for('no key update', { of: users }); await db .select() .from(users) .for('no key update', { of: users, skipLocked: true }); await db .select() .from(users) .for('share', { of: users, noWait: true }); await db .select() .from(users) // @ts-expect-error - can't use both skipLocked and noWait .for('share', { of: users, noWait: true, skipLocked: true }); await db .select({ id: cities.id, name: sql`upper(${cities.name})`.as('name'), usersCount: sql`count(${users.id})`.as('users'), }) .from(cities) .leftJoin(users, eq(users.homeCity, cities.id)) .where(({ name }) => sql`length(${name}) >= 3`) .groupBy(cities.id) .having(({ usersCount }) => sql`${usersCount} > 0`); { const result = await db.select().from(newYorkers); Expect< Equal< { userId: number; cityId: number | null; }[], typeof result > >; } { const result = await db.select({ userId: newYorkers.userId }).from(newYorkers); Expect< Equal< { userId: number; }[], typeof result > >; } { const result = await db.select().from(newYorkers2); Expect< Equal< { userId: number; cityId: number | null; }[], typeof result > >; } { const result = await db.select({ userId: newYorkers.userId }).from(newYorkers2); Expect< Equal< { userId: number; }[], typeof result > >; } { db .select() .from(users) .where(eq(users.id, 1)); db .select() .from(users) .where(eq(users.id, 1)) // @ts-expect-error - can't use where twice .where(eq(users.id, 1)); db .select() .from(users) .where(eq(users.id, 1)) .limit(10) // @ts-expect-error - can't use where twice .where(eq(users.id, 1)); } { function withFriends(qb: T) { const friends = alias(users, 'friends'); const friends2 = alias(users, 'friends2'); const friends3 = alias(users, 'friends3'); const friends4 = alias(users, 'friends4'); const friends5 = alias(users, 'friends5'); return qb .leftJoin(friends, sql`true`) .leftJoin(friends2, sql`true`) .leftJoin(friends3, sql`true`) .leftJoin(friends4, sql`true`) .leftJoin(friends5, sql`true`); } const qb = db.select().from(users).$dynamic(); const result = await withFriends(qb); Expect< Equal >; } { function withFriends(qb: T) { const friends = alias(users, 'friends'); const friends2 = alias(users, 'friends2'); const friends3 = alias(users, 'friends3'); const friends4 = alias(users, 'friends4'); const friends5 = alias(users, 'friends5'); return qb .leftJoin(friends, sql`true`) .leftJoin(friends2, sql`true`) .leftJoin(friends3, sql`true`) .leftJoin(friends4, sql`true`) .leftJoin(friends5, sql`true`); } const qb = db.select().from(users).$dynamic(); const result = await withFriends(qb); Expect< Equal >; } { function dynamic(qb: T) { return qb.where(sql``).having(sql``).groupBy(sql``).orderBy(sql``).limit(1).offset(1).for('update'); } const qb = db.select().from(users).$dynamic(); const result = await dynamic(qb); Expect>; } { // TODO: add to docs function dynamic(qb: T) { return qb.where(sql``).having(sql``).groupBy(sql``).orderBy(sql``).limit(1).offset(1).for('update'); } const query = new QueryBuilder().select().from(users).$dynamic(); dynamic(query); } { // TODO: add to docs function paginated(qb: T, page: number) { return qb.limit(10).offset((page - 1) * 10); } const qb = db.select().from(users).$dynamic(); const result = await paginated(qb, 1); Expect>; } { db .select() .from(users) .where(sql``) .limit(10) // @ts-expect-error method was already called .where(sql``); db .select() .from(users) .having(sql``) .limit(10) // @ts-expect-error method was already called .having(sql``); db .select() .from(users) .groupBy(sql``) .limit(10) // @ts-expect-error method was already called .groupBy(sql``); db .select() .from(users) .orderBy(sql``) .limit(10) // @ts-expect-error method was already called .orderBy(sql``); db .select() .from(users) .limit(10) .where(sql``) // @ts-expect-error method was already called .limit(10); db .select() .from(users) .offset(10) .limit(10) // @ts-expect-error method was already called .offset(10); db .select() .from(users) .for('update') .limit(10) // @ts-expect-error method was already called .for('update'); } { const users = pgTable('users', { developer: boolean('developer'), application: text('application', { enum: ['pending', 'approved'] }), }); const startIt = (whereCallback: (condition: SQL) => SQL | undefined = (c) => c) => { return db.select().from(users).where(whereCallback(eq(users.developer, true))); }; startIt((c) => and(c, eq(users.application, 'approved'))); } { const school = pgTable('school', { faculty: integer('faculty'), studentid: integer('studentid'), }); const student = pgTable('student', { id: integer('id'), email: text('email'), }); await db .select() .from(school) .where( and( eq(school.faculty, 2), eq( school.studentid, db.select({ id: student.id }).from(student).where(eq(student.email, 'foo@demo.com')), ), ), ); } { const table1 = pgTable('table1', { id: integer().primaryKey(), name: text().notNull(), }); const table2 = pgTable('table2', { id: integer().primaryKey(), age: integer().notNull(), }); const table3 = pgTable('table3', { id: integer().primaryKey(), phone: text().notNull(), }); const view = pgView('view').as((qb) => qb.select({ table: table1, column: table2.age, nested: { column: table3.phone, }, }).from(table1).innerJoin(table2, sql``).leftJoin(table3, sql``) ); const result = await db.select().from(view); Expect< Equal >; Expect>; Expect[]>>; } { const table1 = pgTable('table1', { id: integer().primaryKey(), name: text().notNull(), }); const table2 = pgTable('table2', { id: integer().primaryKey(), age: integer().notNull(), }); const table3 = pgTable('table3', { id: integer().primaryKey(), phone: text().notNull(), }); const view = pgMaterializedView('view').as((qb) => qb.select({ table: table1, column: table2.age, nested: { column: table3.phone, }, }).from(table1).innerJoin(table2, sql``).leftJoin(table3, sql``) ); const result = await db.select().from(view); Expect< Equal >; Expect>; Expect[]>>; } { const table1 = pgTable('table1', { id: integer().primaryKey(), name: text().notNull(), }); const table2 = pgTable('table2', { id: integer().primaryKey(), age: integer().notNull(), table1Id: integer().references(() => table1.id).notNull(), }); const view = pgView('view').as((qb) => qb.select().from(table2)); const leftLateralRawRes = await db.select({ table1, sqId: sql`${sql.identifier('t2')}.${sql.identifier('id')}`.as('sqId'), }).from(table1).leftJoinLateral(sql`(SELECT * FROM ${table2}) as ${sql.identifier('t2')}`, sql`true`); Expect< Equal >; const leftLateralSubRes = await db.select().from(table1).leftJoinLateral( db.select().from(table2).as('sub'), sql`true`, ); Expect< Equal >; const sqLeftLateral = db.select().from(table2).as('sub'); const leftLateralSubSelectionRes = await db.select( { id: table1.id, sId: sqLeftLateral.id, }, ).from(table1).leftJoinLateral( sqLeftLateral, sql`true`, ); Expect< Equal >; await db.select().from(table1) // @ts-expect-error .leftJoinLateral(table2, sql`true`); await db.select().from(table1) // @ts-expect-error .leftJoinLateral(view, sql`true`); const innerLateralRawRes = await db.select({ table1, sqId: sql`${sql.identifier('t2')}.${sql.identifier('id')}`.as('sqId'), }).from(table1).innerJoinLateral(sql`(SELECT * FROM ${table2}) as ${sql.identifier('t2')}`, sql`true`); Expect< Equal >; const innerLateralSubRes = await db.select().from(table1).innerJoinLateral( db.select().from(table2).as('sub'), sql`true`, ); Expect< Equal >; const sqInnerLateral = db.select().from(table2).as('sub'); const innerLateralSubSelectionRes = await db.select( { id: table1.id, sId: sqLeftLateral.id, }, ).from(table1).innerJoinLateral( sqInnerLateral, sql`true`, ); Expect< Equal >; await db.select().from(table1) // @ts-expect-error .innerJoinLateral(table2, sql`true`); await db.select().from(table1) // @ts-expect-error .innerJoinLateral(view, sql`true`); const crossLateralRawRes = await db.select({ table1, sqId: sql`${sql.identifier('t2')}.${sql.identifier('id')}`.as('sqId'), }).from(table1).crossJoinLateral(sql`(SELECT * FROM ${table2}) as ${sql.identifier('t2')}`); Expect< Equal >; const crossLateralSubRes = await db.select().from(table1).crossJoinLateral( db.select().from(table2).as('sub'), ); Expect< Equal >; const sqCrossLateral = db.select().from(table2).as('sub'); const crossLateralSubSelectionRes = await db.select( { id: table1.id, sId: sqCrossLateral.id, }, ).from(table1).crossJoinLateral( sqInnerLateral, ); Expect< Equal >; await db.select().from(table1) // @ts-expect-error .crossJoinLateral(table2); await db.select().from(table1) // @ts-expect-error .crossJoinLateral(view); } ================================================ FILE: drizzle-orm/type-tests/pg/set-operators.ts ================================================ import { type Equal, Expect } from 'type-tests/utils.ts'; import { except, exceptAll, intersect, intersectAll, type PgSetOperator, union, unionAll } from '~/pg-core/index.ts'; import { eq } from '~/sql/expressions/index.ts'; import { desc, sql } from '~/sql/index.ts'; import { db } from './db.ts'; import { cities, classes, newYorkers, users } from './tables.ts'; const unionTest = await db .select({ id: users.id }) .from(users) .union( db .select({ id: users.id }) .from(users), ); Expect>; const unionAllTest = await db .select({ id: users.id, age: users.age1 }) .from(users) .unionAll( db.select({ id: users.id, age: users.age1 }) .from(users) .leftJoin(cities, eq(users.id, cities.id)), ); Expect>; const intersectTest = await db .select({ id: users.id, homeCity: users.homeCity }) .from(users) .intersect(({ intersect }) => intersect( db .select({ id: users.id, homeCity: users.homeCity }) .from(users), db .select({ id: users.id, homeCity: sql`${users.homeCity}`.mapWith(Number) }) .from(users), ) ); Expect>; const intersectAllTest = await db .select({ id: users.id, homeCity: users.class }) .from(users) .intersect( db .select({ id: users.id, homeCity: users.class }) .from(users) .leftJoin(cities, eq(users.id, cities.id)), ); Expect>; const exceptTest = await db .select({ id: users.id, homeCity: users.homeCity }) .from(users) .except( db .select({ id: users.id, homeCity: sql`${users.homeCity}`.mapWith(Number) }) .from(users), ); Expect>; const exceptAllTest = await db .select({ id: users.id, homeCity: users.class }) .from(users) .except( db .select({ id: users.id, homeCity: sql<'A' | 'C'>`${users.class}` }) .from(users), ); Expect>; const union2Test = await union(db.select().from(cities), db.select().from(cities), db.select().from(cities)); Expect>; const unionAll2Test = await unionAll( db.select({ id: cities.id, name: cities.name, population: cities.population, }).from(cities), db.select().from(cities), ); Expect>; const intersect2Test = await intersect( db.select({ id: cities.id, name: cities.name, population: cities.population, }).from(cities), db.select({ id: cities.id, name: cities.name, population: cities.population, }).from(cities), db.select({ id: cities.id, name: cities.name, population: cities.population, }).from(cities), ); Expect>; const intersectAll2Test = await intersectAll( union( db.select({ id: cities.id, }).from(cities), db.select({ id: cities.id, }) .from(cities).where(sql``), ), db.select({ id: cities.id, }) .from(cities), ).orderBy(desc(cities.id)).limit(23); Expect>; const except2Test = await except( db.select({ userId: newYorkers.userId, }) .from(newYorkers), db.select({ userId: newYorkers.userId, }).from(newYorkers), ); Expect>; const exceptAll2Test = await exceptAll( db.select({ userId: newYorkers.userId, cityId: newYorkers.cityId, }) .from(newYorkers).where(sql``), db.select({ userId: newYorkers.userId, cityId: newYorkers.cityId, }).from(newYorkers).leftJoin(users, sql``), ); Expect>; const unionfull = await union(db.select().from(users), db.select().from(users)).orderBy(sql``).limit(1).offset(2); Expect< Equal<{ id: number; uuid: string; homeCity: number; currentCity: number | null; serialNullable: number; serialNotNull: number; class: 'A' | 'C'; subClass: 'B' | 'D' | null; text: string | null; age1: number; createdAt: Date; enumCol: 'a' | 'b' | 'c'; arrayCol: string[]; }[], typeof unionfull> >; union(db.select().from(users), db.select().from(users)) .orderBy(sql``) // @ts-expect-error - method was already called .orderBy(sql``); union(db.select().from(users), db.select().from(users)) .offset(1) // @ts-expect-error - method was already called .offset(2); union(db.select().from(users), db.select().from(users)) .orderBy(sql``) // @ts-expect-error - method was already called .orderBy(sql``); { function dynamic(qb: T) { return qb.orderBy(sql``).limit(1).offset(2); } const qb = union(db.select().from(users), db.select().from(users)).$dynamic(); const result = await dynamic(qb); Expect>; } await db .select({ id: users.id, homeCity: users.homeCity }) .from(users) // All queries in combining statements should return the same number of columns // and the corresponding columns should have compatible data type // @ts-expect-error .intersect(({ intersect }) => intersect(db.select().from(users), db.select().from(users))); // All queries in combining statements should return the same number of columns // and the corresponding columns should have compatible data type // @ts-expect-error db.select().from(classes).union(db.select({ id: classes.id }).from(classes)); // All queries in combining statements should return the same number of columns // and the corresponding columns should have compatible data type // @ts-expect-error db.select({ id: classes.id }).from(classes).union(db.select().from(classes).where(sql``)); // All queries in combining statements should return the same number of columns // and the corresponding columns should have compatible data type // @ts-expect-error db.select({ id: classes.id }).from(classes).union(db.select().from(classes)); union( db.select({ id: cities.id, name: cities.name }).from(cities).where(sql``), db.select({ id: cities.id, name: cities.name }).from(cities), // All queries in combining statements should return the same number of columns // and the corresponding columns should have compatible data type // @ts-expect-error db.select().from(cities), ); union( db.select({ id: cities.id, name: cities.name }).from(cities).where(sql``), // All queries in combining statements should return the same number of columns // and the corresponding columns should have compatible data type // @ts-expect-error db.select({ id: cities.id, name: cities.name, population: cities.population }).from(cities), db.select({ id: cities.id, name: cities.name }).from(cities).where(sql``).limit(3).$dynamic(), db.select({ id: cities.id, name: cities.name }).from(cities), ); union( db.select({ id: cities.id }).from(cities), db.select({ id: cities.id }).from(cities), db.select({ id: cities.id }).from(cities), // All queries in combining statements should return the same number of columns // and the corresponding columns should have compatible data type // @ts-expect-error db.select({ id: cities.id, name: cities.name }).from(cities), db.select({ id: cities.id }).from(cities), db.select({ id: cities.id }).from(cities), ); union( db.select({ id: cities.id }).from(cities), db.select({ id: cities.id }).from(cities), // All queries in combining statements should return the same number of columns // and the corresponding columns should have compatible data type // @ts-expect-error db.select({ id: cities.id, name: cities.name }).from(cities), db.select({ id: cities.id }).from(cities), db.select({ id: newYorkers.userId }).from(newYorkers), db.select({ id: cities.id }).from(cities), ); union( db.select({ id: cities.id }).from(cities), db.select({ id: cities.id }).from(cities), db.select({ id: cities.id }).from(cities).where(sql``), db.select({ id: sql`${cities.id}` }).from(cities), db.select({ id: cities.id }).from(cities), // All queries in combining statements should return the same number of columns // and the corresponding columns should have compatible data type // @ts-expect-error db.select({ id: cities.id, name: cities.name, population: cities.population }).from(cities).where(sql``), ); ================================================ FILE: drizzle-orm/type-tests/pg/subquery.ts ================================================ import { Expect } from 'type-tests/utils.ts'; import { alias, integer, pgTable, serial, text } from '~/pg-core/index.ts'; import { and, eq } from '~/sql/expressions/index.ts'; import { count } from '~/sql/functions/aggregate.ts'; import { sql } from '~/sql/sql.ts'; import type { DrizzleTypeError, Equal } from '~/utils.ts'; import { db } from './db.ts'; const users = pgTable('names', { id: serial('id').primaryKey(), name: text('name'), managerId: integer('author_id'), }); const posts = pgTable('posts', { id: serial('id').primaryKey(), authorId: integer('author_id'), title: text('title'), }); const n1 = db .select({ id: users.id, name: users.name, authorId: users.managerId, count1: sql`count(1)::int`.as('count1'), }) .from(users) .groupBy(users.id, users.name, users.managerId) .as('n1'); const n2 = db .select({ id: users.id, authorId: users.managerId, totalCount: sql`count(1)::int`.as('totalCount'), }) .from(users) .groupBy(users.id, users.managerId) .as('n2'); const result = await db .select({ name: n1.name, authorId: n1.authorId, count1: n1.count1, totalCount: n2.totalCount, }) .from(n1) .innerJoin(n2, and(eq(n2.id, n1.id), eq(n2.authorId, n1.authorId))); Expect< Equal< { name: string | null; authorId: number | null; count1: number; totalCount: number; }[], typeof result > >; const names2 = alias(users, 'names2'); const sq1 = db .select({ id: users.id, name: users.name, id2: names2.id, }) .from(users) .leftJoin(names2, eq(users.name, names2.name)) .as('sq1'); const res = await db.select().from(sq1); Expect< Equal< { id: number; name: string | null; id2: number | null; }[], typeof res > >; { const sq = db.select({ count: sql`count(1)::int` }).from(users).as('sq'); Expect ? true : false>; } const sqUnion = db.select().from(users).union(db.select().from(names2)).as('sqUnion'); const resUnion = await db.select().from(sqUnion); Expect< Equal<{ id: number; name: string | null; managerId: number | null; }[], typeof resUnion> >; const fromSubquery = await db.select({ count: db.select({ count: count().as('c') }).from(posts).where(eq(posts.authorId, users.id)).as('count'), }).from(users); Expect>; const fromSubquery2 = await db.select({ name: db.select({ name: users.name }).from(users).where(eq(users.id, posts.authorId)).as('name'), }).from(posts); Expect>; const errorSubquery = await db.select({ name: db.select({ name: users.name, managerId: users.managerId }).from(users).where(eq(users.id, posts.authorId)).as( 'name', ), }).from(posts); Expect }[]>>; ================================================ FILE: drizzle-orm/type-tests/pg/tables-rel.ts ================================================ import { foreignKey, integer, pgTable, serial, text, timestamp } from '~/pg-core/index.ts'; import { relations } from '~/relations.ts'; export const users = pgTable('users', { id: serial('id').primaryKey(), name: text('name').notNull(), cityId: integer('city_id').references(() => cities.id).notNull(), homeCityId: integer('home_city_id').references(() => cities.id), createdAt: timestamp('created_at', { withTimezone: true }).notNull(), }); export const usersConfig = relations(users, ({ one, many }) => ({ city: one(cities, { relationName: 'UsersInCity', fields: [users.cityId], references: [cities.id] }), homeCity: one(cities, { fields: [users.homeCityId], references: [cities.id] }), posts: many(posts), comments: many(comments), })); export const cities = pgTable('cities', { id: serial('id').primaryKey(), name: text('name').notNull(), }); export const citiesConfig = relations(cities, ({ many }) => ({ users: many(users, { relationName: 'UsersInCity' }), })); export const posts = pgTable('posts', { id: serial('id').primaryKey(), title: text('title').notNull(), authorId: integer('author_id').references(() => users.id), }); export const postsConfig = relations(posts, ({ one, many }) => ({ author: one(users, { fields: [posts.authorId], references: [users.id] }), comments: many(comments), })); export const comments = pgTable('comments', { id: serial('id').primaryKey(), postId: integer('post_id').references(() => posts.id).notNull(), authorId: integer('author_id').references(() => users.id), text: text('text').notNull(), }); export const commentsConfig = relations(comments, ({ one }) => ({ post: one(posts, { fields: [comments.postId], references: [posts.id] }), author: one(users, { fields: [comments.authorId], references: [users.id] }), })); export const books = pgTable('books', { id: serial('id').primaryKey(), name: text('name').notNull(), }); export const booksConfig = relations(books, ({ many }) => ({ authors: many(bookAuthors), })); export const bookAuthors = pgTable('book_authors', { bookId: integer('book_id').references(() => books.id).notNull(), authorId: integer('author_id').references(() => users.id).notNull(), role: text('role').notNull(), }); export const bookAuthorsConfig = relations(bookAuthors, ({ one }) => ({ book: one(books, { fields: [bookAuthors.bookId], references: [books.id] }), author: one(users, { fields: [bookAuthors.authorId], references: [users.id] }), })); export const node = pgTable('node', { id: serial('id').primaryKey(), parentId: integer('parent_id'), leftId: integer('left_id'), rightId: integer('right_id'), }, (node) => ({ fk1: foreignKey({ columns: [node.parentId], foreignColumns: [node.id] }), fk2: foreignKey({ columns: [node.leftId], foreignColumns: [node.id] }), fk3: foreignKey({ columns: [node.rightId], foreignColumns: [node.id] }), })); export const nodeRelations = relations(node, ({ one }) => ({ parent: one(node, { fields: [node.parentId], references: [node.id] }), left: one(node, { fields: [node.leftId], references: [node.id] }), right: one(node, { fields: [node.rightId], references: [node.id] }), })); ================================================ FILE: drizzle-orm/type-tests/pg/tables.ts ================================================ import crypto from 'node:crypto'; import type { Equal } from 'type-tests/utils.ts'; import { Expect } from 'type-tests/utils.ts'; import { z } from 'zod'; import { bigint, bigserial, bit, boolean, char, check, cidr, customType, date, decimal, doublePrecision, foreignKey, geometry, halfvec, index, inet, integer, json, jsonb, line, macaddr, macaddr8, numeric, type PgColumn, pgEnum, pgTable, type PgTableWithColumns, point, primaryKey, real, serial, smallint, smallserial, sparsevec, text, time, timestamp, uniqueIndex, uuid, varchar, vector, } from '~/pg-core/index.ts'; import { pgSchema } from '~/pg-core/schema.ts'; import { pgMaterializedView, type PgMaterializedViewWithSelection, pgView, type PgViewWithSelection, } from '~/pg-core/view.ts'; import { eq, gt } from '~/sql/expressions/index.ts'; import { sql } from '~/sql/sql.ts'; import type { InferInsertModel, InferSelectModel } from '~/table.ts'; import type { Simplify } from '~/utils.ts'; import { db } from './db.ts'; export const myEnum = pgEnum('my_enum', ['a', 'b', 'c']); export const identityColumnsTable = pgTable('identity_columns_table', { generatedCol: integer('generated_col').generatedAlwaysAs(1), alwaysAsIdentity: integer('always_as_identity').generatedAlwaysAsIdentity(), byDefaultAsIdentity: integer('by_default_as_identity').generatedByDefaultAsIdentity(), name: text('name'), }); Expect, typeof identityColumnsTable['$inferSelect']>>; Expect, typeof identityColumnsTable['_']['inferSelect']>>; Expect, typeof identityColumnsTable['$inferInsert']>>; Expect, typeof identityColumnsTable['_']['inferInsert']>>; Expect< Equal< InferInsertModel, Simplify > >; Expect< Equal< InferInsertModel, Simplify > >; export const users = pgTable( 'users_table', { id: serial('id').primaryKey(), uuid: uuid('uuid').defaultRandom().notNull(), homeCity: integer('home_city') .notNull() .references(() => cities.id), currentCity: integer('current_city').references(() => cities.id), serialNullable: serial('serial1'), serialNotNull: serial('serial2').notNull(), class: text('class', { enum: ['A', 'C'] }).notNull(), subClass: text('sub_class', { enum: ['B', 'D'] }), text: text('text'), age1: integer('age1').notNull(), createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(), enumCol: myEnum('enum_col').notNull(), arrayCol: text('array_col').array().notNull(), }, (users) => [ uniqueIndex('usersAge1Idx').on(users.class.asc().nullsFirst(), sql``), index('usersAge2Idx').on(sql``), uniqueIndex('uniqueClass') .using('btree', users.class.desc().op('text_ops'), users.subClass.nullsLast()) .where(sql`${users.class} is not null`) .concurrently(), check('legalAge', sql`${users.age1} > 18`), foreignKey({ columns: [users.subClass], foreignColumns: [classes.subClass] }) .onUpdate('cascade') .onDelete('cascade'), foreignKey({ columns: [users.class, users.subClass], foreignColumns: [classes.class, classes.subClass], }), primaryKey(users.age1, users.class), ], ); Expect, typeof users['$inferSelect']>>; Expect, typeof users['_']['inferSelect']>>; Expect, typeof users['$inferInsert']>>; Expect, typeof users['_']['inferInsert']>>; export const cities = pgTable('cities_table', { id: serial('id').primaryKey(), name: text('name').notNull(), population: integer('population').default(0), }, (cities) => ({ citiesNameIdx: index().on(cities.id), })); export const smallSerialTest = pgTable('cities_table', { id: smallserial('id').primaryKey(), name: text('name').notNull(), population: integer('population').default(0), }); Expect< Equal<{ id?: number; name: string; population?: number | null; }, typeof smallSerialTest.$inferInsert> >; export const classes = pgTable('classes_table', { id: serial('id').primaryKey(), class: text('class', { enum: ['A', 'C'] }), subClass: text('sub_class', { enum: ['B', 'D'] }).notNull(), }); Expect< Equal<{ id?: number; class?: 'A' | 'C' | null; subClass: 'B' | 'D'; }, typeof classes.$inferInsert> >; export const network = pgTable('network_table', { inet: inet('inet').notNull(), cidr: cidr('cidr').notNull(), macaddr: macaddr('macaddr').notNull(), macaddr8: macaddr8('macaddr8').notNull(), }); Expect< Equal<{ inet: string; cidr: string; macaddr: string; macaddr8: string; }, typeof network.$inferSelect> >; export const salEmp = pgTable('sal_emp', { name: text('name').notNull(), payByQuarter: integer('pay_by_quarter').array().notNull(), schedule: text('schedule').array().array().notNull(), }); export const tictactoe = pgTable('tictactoe', { squares: integer('squares').array(3).array(3).notNull(), }); export const customSchema = pgSchema('custom'); export const citiesCustom = customSchema.table('cities_table', { id: serial('id').primaryKey(), name: text('name').notNull(), population: integer('population').default(0), }, (cities) => [index().on(cities.id)]); export const newYorkers = pgView('new_yorkers') .with({ checkOption: 'cascaded', securityBarrier: true, securityInvoker: true, }) .as((qb) => { const sq = qb .$with('sq') .as( qb.select({ userId: users.id, cityId: cities.id }) .from(users) .leftJoin(cities, eq(cities.id, users.homeCity)) .where(sql`${users.age1} > 18`), ); return qb.with(sq).select().from(sq).where(sql`${users.homeCity} = 1`); }); Expect< Equal< PgViewWithSelection<'new_yorkers', false, { userId: PgColumn<{ tableName: 'new_yorkers'; name: 'id'; dataType: 'number'; columnType: 'PgSerial'; data: number; driverParam: number; notNull: true; hasDefault: true; enumValues: undefined; baseColumn: never; generated: undefined; identity: undefined; isPrimaryKey: true; isAutoincrement: false; hasRuntimeDefault: false; }>; cityId: PgColumn<{ tableName: 'new_yorkers'; name: 'id'; dataType: 'number'; columnType: 'PgSerial'; data: number; driverParam: number; notNull: false; hasDefault: true; enumValues: undefined; baseColumn: never; generated: undefined; identity: undefined; isPrimaryKey: true; isAutoincrement: false; hasRuntimeDefault: false; }>; }>, typeof newYorkers > >; { const newYorkers = customSchema.view('new_yorkers') .with({ checkOption: 'cascaded', securityBarrier: true, securityInvoker: true, }) .as((qb) => { const sq = qb .$with('sq') .as( qb.select({ userId: users.id, cityId: cities.id }) .from(users) .leftJoin(cities, eq(cities.id, users.homeCity)) .where(sql`${users.age1} > 18`), ); return qb.with(sq).select().from(sq).where(sql`${users.homeCity} = 1`); }); Expect< Equal< PgViewWithSelection<'new_yorkers', false, { userId: PgColumn<{ tableName: 'new_yorkers'; name: 'id'; dataType: 'number'; columnType: 'PgSerial'; data: number; driverParam: number; notNull: true; hasDefault: true; enumValues: undefined; baseColumn: never; generated: undefined; identity: undefined; isPrimaryKey: true; isAutoincrement: false; hasRuntimeDefault: false; }>; cityId: PgColumn<{ tableName: 'new_yorkers'; name: 'id'; dataType: 'number'; columnType: 'PgSerial'; data: number; driverParam: number; notNull: false; hasDefault: true; enumValues: undefined; baseColumn: never; generated: undefined; identity: undefined; isPrimaryKey: true; isAutoincrement: false; hasRuntimeDefault: false; }>; }>, typeof newYorkers > >; } { const newYorkers = pgView('new_yorkers', { userId: integer('user_id').notNull(), cityId: integer('city_id'), }) .with({ checkOption: 'cascaded', securityBarrier: true, securityInvoker: true, }) .as( sql`select ${users.id} as user_id, ${cities.id} as city_id from ${users} left join ${cities} on ${ eq(cities.id, users.homeCity) } where ${gt(users.age1, 18)}`, ); Expect< Equal< PgViewWithSelection<'new_yorkers', false, { userId: PgColumn<{ tableName: 'new_yorkers'; name: 'user_id'; dataType: 'number'; columnType: 'PgInteger'; data: number; driverParam: string | number; hasDefault: false; notNull: true; enumValues: undefined; baseColumn: never; generated: undefined; identity: undefined; isPrimaryKey: false; isAutoincrement: false; hasRuntimeDefault: false; }>; cityId: PgColumn<{ tableName: 'new_yorkers'; name: 'city_id'; dataType: 'number'; columnType: 'PgInteger'; notNull: false; hasDefault: false; data: number; driverParam: string | number; enumValues: undefined; baseColumn: never; generated: undefined; identity: undefined; isPrimaryKey: false; isAutoincrement: false; hasRuntimeDefault: false; }>; }>, typeof newYorkers > >; } { const newYorkers = customSchema.view('new_yorkers', { userId: integer('user_id').notNull(), cityId: integer('city_id'), }) .with({ checkOption: 'cascaded', securityBarrier: true, securityInvoker: true, }) .as( sql`select ${users.id} as user_id, ${cities.id} as city_id from ${users} left join ${cities} on ${ eq(cities.id, users.homeCity) } where ${gt(users.age1, 18)}`, ); Expect< Equal< PgViewWithSelection<'new_yorkers', false, { userId: PgColumn<{ tableName: 'new_yorkers'; name: 'user_id'; dataType: 'number'; columnType: 'PgInteger'; data: number; driverParam: string | number; hasDefault: false; notNull: true; enumValues: undefined; baseColumn: never; generated: undefined; identity: undefined; isPrimaryKey: false; isAutoincrement: false; hasRuntimeDefault: false; }>; cityId: PgColumn<{ tableName: 'new_yorkers'; name: 'city_id'; dataType: 'number'; columnType: 'PgInteger'; notNull: false; hasDefault: false; data: number; driverParam: string | number; enumValues: undefined; baseColumn: never; generated: undefined; identity: undefined; isPrimaryKey: false; isAutoincrement: false; hasRuntimeDefault: false; }>; }>, typeof newYorkers > >; } { const newYorkers = pgView('new_yorkers', { userId: integer('user_id').notNull(), cityId: integer('city_id'), }).existing(); Expect< Equal< PgViewWithSelection<'new_yorkers', true, { userId: PgColumn<{ tableName: 'new_yorkers'; name: 'user_id'; dataType: 'number'; columnType: 'PgInteger'; data: number; driverParam: string | number; hasDefault: false; notNull: true; enumValues: undefined; baseColumn: never; generated: undefined; identity: undefined; isPrimaryKey: false; isAutoincrement: false; hasRuntimeDefault: false; }>; cityId: PgColumn<{ tableName: 'new_yorkers'; name: 'city_id'; dataType: 'number'; columnType: 'PgInteger'; notNull: false; hasDefault: false; data: number; driverParam: string | number; enumValues: undefined; baseColumn: never; generated: undefined; identity: undefined; isPrimaryKey: false; isAutoincrement: false; hasRuntimeDefault: false; }>; }>, typeof newYorkers > >; } { const newYorkers = customSchema.view('new_yorkers', { userId: integer('user_id').notNull(), cityId: integer('city_id'), }).existing(); Expect< Equal< PgViewWithSelection<'new_yorkers', true, { userId: PgColumn<{ tableName: 'new_yorkers'; name: 'user_id'; dataType: 'number'; columnType: 'PgInteger'; data: number; driverParam: string | number; hasDefault: false; notNull: true; enumValues: undefined; baseColumn: never; generated: undefined; identity: undefined; isPrimaryKey: false; isAutoincrement: false; hasRuntimeDefault: false; }>; cityId: PgColumn<{ tableName: 'new_yorkers'; name: 'city_id'; dataType: 'number'; columnType: 'PgInteger'; notNull: false; hasDefault: false; data: number; driverParam: string | number; enumValues: undefined; baseColumn: never; generated: undefined; identity: undefined; isPrimaryKey: false; isAutoincrement: false; hasRuntimeDefault: false; }>; }>, typeof newYorkers > >; } export const newYorkers2 = pgMaterializedView('new_yorkers') .using('btree') .with({ fillfactor: 90, toastTupleTarget: 0.5, autovacuumEnabled: true, }) .tablespace('custom_tablespace') .withNoData() .as((qb) => { const sq = qb .$with('sq') .as( qb.select({ userId: users.id, cityId: cities.id }) .from(users) .leftJoin(cities, eq(cities.id, users.homeCity)) .where(sql`${users.age1} > 18`), ); return qb.with(sq).select().from(sq).where(sql`${users.homeCity} = 1`); }); Expect< Equal< PgMaterializedViewWithSelection<'new_yorkers', false, { userId: PgColumn<{ tableName: 'new_yorkers'; name: 'id'; dataType: 'number'; columnType: 'PgSerial'; data: number; driverParam: number; notNull: true; hasDefault: true; enumValues: undefined; baseColumn: never; generated: undefined; identity: undefined; isPrimaryKey: true; isAutoincrement: false; hasRuntimeDefault: false; }>; cityId: PgColumn<{ tableName: 'new_yorkers'; name: 'id'; dataType: 'number'; columnType: 'PgSerial'; data: number; driverParam: number; notNull: false; hasDefault: true; enumValues: undefined; baseColumn: never; generated: undefined; identity: undefined; isPrimaryKey: true; isAutoincrement: false; hasRuntimeDefault: false; }>; }>, typeof newYorkers2 > >; { const newYorkers2 = customSchema.materializedView('new_yorkers') .using('btree') .with({ fillfactor: 90, toastTupleTarget: 0.5, autovacuumEnabled: true, }) .tablespace('custom_tablespace') .withNoData() .as((qb) => { const sq = qb .$with('sq') .as( qb.select({ userId: users.id, cityId: cities.id }) .from(users) .leftJoin(cities, eq(cities.id, users.homeCity)) .where(sql`${users.age1} > 18`), ); return qb.with(sq).select().from(sq).where(sql`${users.homeCity} = 1`); }); Expect< Equal< PgMaterializedViewWithSelection<'new_yorkers', false, { userId: PgColumn<{ tableName: 'new_yorkers'; name: 'id'; dataType: 'number'; columnType: 'PgSerial'; data: number; driverParam: number; notNull: true; hasDefault: true; enumValues: undefined; baseColumn: never; generated: undefined; identity: undefined; isPrimaryKey: true; isAutoincrement: false; hasRuntimeDefault: false; }>; cityId: PgColumn<{ tableName: 'new_yorkers'; name: 'id'; dataType: 'number'; columnType: 'PgSerial'; data: number; driverParam: number; notNull: false; hasDefault: true; enumValues: undefined; baseColumn: never; generated: undefined; identity: undefined; isPrimaryKey: true; isAutoincrement: false; hasRuntimeDefault: false; }>; }>, typeof newYorkers2 > >; } { const newYorkers2 = pgMaterializedView('new_yorkers', { userId: integer('user_id').notNull(), cityId: integer('city_id'), }) .using('btree') .with({ fillfactor: 90, toastTupleTarget: 0.5, autovacuumEnabled: true, }) .tablespace('custom_tablespace') .withNoData() .as( sql`select ${users.id} as user_id, ${cities.id} as city_id from ${users} left join ${cities} on ${ eq(cities.id, users.homeCity) } where ${gt(users.age1, 18)}`, ); Expect< Equal< PgMaterializedViewWithSelection<'new_yorkers', false, { userId: PgColumn<{ tableName: 'new_yorkers'; name: 'user_id'; dataType: 'number'; columnType: 'PgInteger'; data: number; driverParam: string | number; hasDefault: false; notNull: true; enumValues: undefined; baseColumn: never; generated: undefined; identity: undefined; isPrimaryKey: false; isAutoincrement: false; hasRuntimeDefault: false; }>; cityId: PgColumn<{ tableName: 'new_yorkers'; name: 'city_id'; dataType: 'number'; columnType: 'PgInteger'; notNull: false; hasDefault: false; data: number; driverParam: string | number; enumValues: undefined; baseColumn: never; generated: undefined; identity: undefined; isPrimaryKey: false; isAutoincrement: false; hasRuntimeDefault: false; }>; }>, typeof newYorkers2 > >; } { const newYorkers2 = customSchema.materializedView('new_yorkers', { userId: integer('user_id').notNull(), cityId: integer('city_id'), }) .using('btree') .with({ fillfactor: 90, toastTupleTarget: 0.5, autovacuumEnabled: true, }) .tablespace('custom_tablespace') .withNoData() .as( sql`select ${users.id} as user_id, ${cities.id} as city_id from ${users} left join ${cities} on ${ eq(cities.id, users.homeCity) } where ${gt(users.age1, 18)}`, ); Expect< Equal< PgMaterializedViewWithSelection<'new_yorkers', false, { userId: PgColumn<{ tableName: 'new_yorkers'; name: 'user_id'; dataType: 'number'; columnType: 'PgInteger'; data: number; driverParam: string | number; hasDefault: false; notNull: true; enumValues: undefined; baseColumn: never; generated: undefined; identity: undefined; isPrimaryKey: false; isAutoincrement: false; hasRuntimeDefault: false; }>; cityId: PgColumn<{ tableName: 'new_yorkers'; name: 'city_id'; dataType: 'number'; columnType: 'PgInteger'; notNull: false; hasDefault: false; data: number; driverParam: string | number; enumValues: undefined; baseColumn: never; generated: undefined; identity: undefined; isPrimaryKey: false; isAutoincrement: false; hasRuntimeDefault: false; }>; }>, typeof newYorkers2 > >; } { const newYorkers2 = pgMaterializedView('new_yorkers', { userId: integer('user_id').notNull(), cityId: integer('city_id'), }).existing(); Expect< Equal< PgMaterializedViewWithSelection<'new_yorkers', true, { userId: PgColumn<{ tableName: 'new_yorkers'; name: 'user_id'; dataType: 'number'; columnType: 'PgInteger'; data: number; driverParam: string | number; hasDefault: false; notNull: true; enumValues: undefined; baseColumn: never; generated: undefined; identity: undefined; isPrimaryKey: false; isAutoincrement: false; hasRuntimeDefault: false; }>; cityId: PgColumn<{ tableName: 'new_yorkers'; name: 'city_id'; dataType: 'number'; columnType: 'PgInteger'; notNull: false; hasDefault: false; data: number; driverParam: string | number; enumValues: undefined; baseColumn: never; generated: undefined; identity: undefined; isPrimaryKey: false; isAutoincrement: false; hasRuntimeDefault: false; }>; }>, typeof newYorkers2 > >; } { const newYorkers2 = customSchema.materializedView('new_yorkers', { userId: integer('user_id').notNull(), cityId: integer('city_id'), }).existing(); Expect< Equal< PgMaterializedViewWithSelection<'new_yorkers', true, { userId: PgColumn<{ tableName: 'new_yorkers'; name: 'user_id'; dataType: 'number'; columnType: 'PgInteger'; data: number; driverParam: string | number; hasDefault: false; notNull: true; enumValues: undefined; baseColumn: never; generated: undefined; identity: undefined; isPrimaryKey: false; isAutoincrement: false; hasRuntimeDefault: false; }>; cityId: PgColumn<{ tableName: 'new_yorkers'; name: 'city_id'; dataType: 'number'; columnType: 'PgInteger'; notNull: false; hasDefault: false; data: number; driverParam: string | number; enumValues: undefined; baseColumn: never; generated: undefined; identity: undefined; isPrimaryKey: false; isAutoincrement: false; hasRuntimeDefault: false; }>; }>, typeof newYorkers2 > >; } await db.refreshMaterializedView(newYorkers2).concurrently(); await db.refreshMaterializedView(newYorkers2).withNoData(); await db.refreshMaterializedView(newYorkers2).concurrently().withNoData(); await db.refreshMaterializedView(newYorkers2).withNoData().concurrently(); // await migrate(db, { // migrationsFolder: './drizzle/pg', // onMigrationError(error) { // if (['0001_drizli_klaud', '0002_beep_boop'].includes(error.migration.name)) { // return; // } // throw error; // }, // }); { const customTextRequired = customType<{ data: string; driverData: string; config: { length: number }; configRequired: true; }>({ dataType(config) { Expect>; return `varchar(${config.length})`; }, toDriver(value) { Expect>(); return value; }, fromDriver(value) { Expect>(); return value; }, }); customTextRequired('t', { length: 10 }); customTextRequired({ length: 10 }); // @ts-expect-error - config is required customTextRequired('t'); // @ts-expect-error - config is required customTextRequired(); } { const customTextOptional = customType<{ data: string; driverData: string; config: { length: number }; }>({ dataType(config) { Expect>; return config ? `varchar(${config.length})` : `text`; }, toDriver(value) { Expect>(); return value; }, fromDriver(value) { Expect>(); return value; }, }); customTextOptional('t', { length: 10 }); customTextOptional('t'); customTextOptional({ length: 10 }); customTextOptional(); } { const cities1 = pgTable('cities_table', { id: serial('id').primaryKey(), name: text('name').notNull().primaryKey(), role: text('role', { enum: ['admin', 'user'] }).default('user').notNull(), population: integer('population').default(0), }); const cities2 = pgTable('cities_table', ({ serial, text, integer }) => ({ id: serial('id').primaryKey(), name: text('name').notNull().primaryKey(), role: text('role', { enum: ['admin', 'user'] }).default('user').notNull(), population: integer('population').default(0), })); type Expected = PgTableWithColumns<{ name: 'cities_table'; schema: undefined; dialect: 'pg'; columns: { id: PgColumn<{ tableName: 'cities_table'; name: 'id'; dataType: 'number'; columnType: 'PgSerial'; data: number; driverParam: number; hasDefault: true; notNull: true; enumValues: undefined; baseColumn: never; generated: undefined; identity: undefined; isPrimaryKey: true; isAutoincrement: false; hasRuntimeDefault: false; }>; name: PgColumn<{ tableName: 'cities_table'; name: 'name'; dataType: 'string'; columnType: 'PgText'; data: string; driverParam: string; hasDefault: false; enumValues: [string, ...string[]]; notNull: true; baseColumn: never; generated: undefined; identity: undefined; isPrimaryKey: true; isAutoincrement: false; hasRuntimeDefault: false; }>; role: PgColumn<{ tableName: 'cities_table'; name: 'role'; dataType: 'string'; columnType: 'PgText'; data: 'admin' | 'user'; driverParam: string; hasDefault: true; enumValues: ['admin', 'user']; notNull: true; baseColumn: never; generated: undefined; identity: undefined; isPrimaryKey: false; isAutoincrement: false; hasRuntimeDefault: false; }>; population: PgColumn<{ tableName: 'cities_table'; name: 'population'; dataType: 'number'; columnType: 'PgInteger'; data: number; driverParam: string | number; notNull: false; hasDefault: true; enumValues: undefined; baseColumn: never; generated: undefined; identity: undefined; isPrimaryKey: false; isAutoincrement: false; hasRuntimeDefault: false; }>; }; }>; Expect>; Expect>; } { pgTable('test', { bigint: bigint('bigint', { mode: 'bigint' }).default(BigInt(10)), bigintNumber: bigint('bigintNumber', { mode: 'number' }), bigserial: bigserial('bigserial', { mode: 'bigint' }).default(BigInt(10)), bigserialNumber: bigserial('bigserialNumber', { mode: 'number' }), timestamp: timestamp('timestamp').default(new Date()), timestamp2: timestamp('timestamp2', { mode: 'date' }).default(new Date()), timestamp3: timestamp('timestamp3', { mode: undefined }).default(new Date()), timestamp4: timestamp('timestamp4', { mode: 'string' }).default('2020-01-01'), }); } { const test = pgTable('test', { col1: decimal('col1', { precision: 10, scale: 2 }).notNull().default('10.2'), }); Expect>; } { const a = ['a', 'b', 'c'] as const; const b = pgEnum('test', a); z.enum(b.enumValues); } { const b = pgEnum('test', ['a', 'b', 'c']); z.enum(b.enumValues); } { const getUsersTable = (schemaName: TSchema) => { return pgSchema(schemaName).table('users', { id: integer('id').primaryKey(), name: text('name').notNull(), }); }; const users1 = getUsersTable('id1'); Expect>; const users2 = getUsersTable('id2'); Expect>; } { const internalStaff = pgTable('internal_staff', { userId: integer('user_id').notNull(), }); const customUser = pgTable('custom_user', { id: integer('id').notNull(), }); const ticket = pgTable('ticket', { staffId: integer('staff_id').notNull(), }); const subq = db .select() .from(internalStaff) .leftJoin( customUser, eq(internalStaff.userId, customUser.id), ).as('internal_staff'); const mainQuery = await db .select() .from(ticket) .leftJoin(subq, eq(subq.internal_staff.userId, ticket.staffId)); Expect< Equal<{ internal_staff: { internal_staff: { userId: number; }; custom_user: { id: number | null; }; } | null; ticket: { staffId: number; }; }[], typeof mainQuery> >; } { const newYorkers = pgView('new_yorkers') .as((qb) => { const sq = qb .$with('sq') .as( qb.select({ userId: users.id, cityId: cities.id }) .from(users) .leftJoin(cities, eq(cities.id, users.homeCity)) .where(sql`${users.age1} > 18`), ); return qb.with(sq).select().from(sq).where(sql`${users.homeCity} = 1`); }); await db.select().from(newYorkers).leftJoin(newYorkers, eq(newYorkers.userId, newYorkers.userId)); } { const testSchema = pgSchema('test'); const e1 = pgEnum('test', ['a', 'b', 'c']); const e2 = pgEnum('test', ['a', 'b', 'c'] as const); const e3 = testSchema.enum('test', ['a', 'b', 'c']); const e4 = testSchema.enum('test', ['a', 'b', 'c'] as const); const test = pgTable('test', { col1: char('col1', { enum: ['a', 'b', 'c'] as const }), col2: char('col2', { enum: ['a', 'b', 'c'] }), col3: char('col3'), col4: e1('col4'), col5: e2('col5'), col6: text('col6', { enum: ['a', 'b', 'c'] as const }), col7: text('col7', { enum: ['a', 'b', 'c'] }), col8: text('col8'), col9: varchar('col9', { enum: ['a', 'b', 'c'] as const }), col10: varchar('col10', { enum: ['a', 'b', 'c'] }), col11: varchar('col11'), col12: e3('col4'), col13: e4('col5'), }); Expect>; Expect>; Expect>; Expect>; Expect>; Expect>; Expect>; Expect>; Expect>; Expect>; Expect>; Expect>; Expect>; } { const testSchema = pgSchema('test'); const e1 = pgEnum('test', ['a', 'b', 'c']); const e2 = pgEnum('test', ['a', 'b', 'c'] as const); const e3 = testSchema.enum('test', ['a', 'b', 'c']); const e4 = testSchema.enum('test', ['a', 'b', 'c'] as const); const test = pgTable('test', { col1: char('col1', { enum: ['a', 'b', 'c'] as const }).generatedAlwaysAs(sql``), col2: char('col2', { enum: ['a', 'b', 'c'] }).generatedAlwaysAs(sql``), col3: char('col3').generatedAlwaysAs(sql``), col4: e1('col4').generatedAlwaysAs(sql``), col5: e2('col5').generatedAlwaysAs(sql``), col6: text('col6', { enum: ['a', 'b', 'c'] as const }).generatedAlwaysAs(sql``), col7: text('col7', { enum: ['a', 'b', 'c'] }).generatedAlwaysAs(sql``), col8: text('col8').generatedAlwaysAs(sql``), col9: varchar('col9', { enum: ['a', 'b', 'c'] as const }).generatedAlwaysAs(sql``), col10: varchar('col10', { enum: ['a', 'b', 'c'] }).generatedAlwaysAs(sql``), col11: varchar('col11').generatedAlwaysAs(sql``), col12: e3('col4').generatedAlwaysAs(sql``), col13: e4('col5').generatedAlwaysAs(sql``), }); Expect>; Expect>; Expect>; Expect>; Expect>; Expect>; Expect>; Expect>; Expect>; Expect>; Expect>; Expect>; Expect>; } { const test = pgTable('test', { id: text('id').$defaultFn(() => crypto.randomUUID()).primaryKey(), }); Expect< Equal<{ id?: string; }, typeof test.$inferInsert> >; } { pgTable('test', { id: integer('id').$default(() => 1), id2: integer('id').$defaultFn(() => 1), // @ts-expect-error - should be number id3: integer('id').$default(() => '1'), // @ts-expect-error - should be number id4: integer('id').$defaultFn(() => '1'), }); } { const enum_ = pgEnum('enum', ['a', 'b', 'c']); pgTable('all_columns', { enum: enum_('enum'), enumdef: enum_('enumdef').default('a'), sm: smallint('smallint'), smdef: smallint('smallint_def').default(10), int: integer('integer'), intdef: integer('integer_def').default(10), numeric: numeric('numeric'), numeric2: numeric('numeric2', { precision: 5 }), numeric3: numeric('numeric3', { scale: 2 }), numeric4: numeric('numeric4', { precision: 5, scale: 2 }), numericdef: numeric('numeridef').default('100'), bigint: bigint('bigint', { mode: 'number' }), bigintdef: bigint('bigintdef', { mode: 'number' }).default(100), bool: boolean('boolean'), booldef: boolean('boolean_def').default(true), text: text('text'), textdef: text('textdef').default('text'), varchar: varchar('varchar'), varchardef: varchar('varchardef').default('text'), serial: serial('serial'), bigserial: bigserial('bigserial', { mode: 'number' }), decimal: decimal('decimal', { precision: 100, scale: 2 }), decimaldef: decimal('decimaldef', { precision: 100, scale: 2 }).default('100.0'), doublePrecision: doublePrecision('doublePrecision'), doublePrecisiondef: doublePrecision('doublePrecisiondef').default(100), real: real('real'), realdef: real('realdef').default(100), json: json('json').$type<{ attr: string }>(), jsondef: json('jsondef').$type<{ attr: string }>().default({ attr: 'value' }), jsonb: jsonb('jsonb').$type<{ attr: string }>(), jsonbdef: jsonb('jsonbdef').$type<{ attr: string }>().default({ attr: 'value' }), time: time('time'), time2: time('time2', { precision: 6, withTimezone: true }), timedef: time('timedef').default('00:00:00'), timedefnow: time('timedefnow').defaultNow(), timestamp: timestamp('timestamp'), timestamp2: timestamp('timestamp2', { precision: 6, withTimezone: true }), timestamp3: timestamp('timestamp3', { withTimezone: true }), timestamp4: timestamp('timestamp4', { precision: 4 }), timestampdef: timestamp('timestampdef').default(new Date()), date: date('date', { mode: 'date' }), datedef: date('datedef').default('2024-01-01'), datedefnow: date('datedefnow').defaultNow(), }); pgTable('all_postgis_columns', { geometry: geometry('geometry'), geometry2: geometry('geometry2', { srid: 2, mode: 'xy' }), geometry3: geometry('geometry3', { srid: 3, mode: 'tuple' }), geometry4: geometry('geometry4', { mode: 'tuple' }), geometrydef: geometry('geometrydef').default([1, 2]), point: point('point'), point2: point('point2', { mode: 'xy' }), pointdef: point('pointdef').default([1, 2]), line: line('line'), line2: line('line2', { mode: 'abc' }), linedef: line('linedef').default([1, 2, 3]), }); pgTable('all_vector_columns', { bit: bit('bit', { dimensions: 1 }), bitdef: bit('bitdef', { dimensions: 1 }).default('1'), halfvec: halfvec('halfvec', { dimensions: 1 }), halfvecdef: halfvec('halfvecdef', { dimensions: 1 }).default([1]), sparsevec: sparsevec('sparsevec', { dimensions: 1 }), sparsevecdef: sparsevec('sparsevecdef', { dimensions: 1 }).default('{1:1}/1'), vector: vector('vector', { dimensions: 1 }), vectordef: vector('vectordef', { dimensions: 1 }).default([1]), }); } { const keysAsColumnNames = pgTable('test', { id: serial(), name: text(), }); Expect>; Expect>; } { const enum_ = pgEnum('enum', ['a', 'b', 'c']); pgTable('all_columns_without_name', { enum: enum_(), enumdef: enum_().default('a'), sm: smallint(), smdef: smallint().default(10), int: integer(), intdef: integer().default(10), numeric: numeric(), numeric2: numeric({ precision: 5 }), numeric3: numeric({ scale: 2 }), numeric4: numeric({ precision: 5, scale: 2 }), numericdef: numeric().default('100'), bigint: bigint({ mode: 'number' }), bigintdef: bigint({ mode: 'number' }).default(100), bool: boolean(), booldef: boolean().default(true), text: text(), textdef: text().default('text'), varchar: varchar(), varchardef: varchar().default('text'), serial: serial(), bigserial: bigserial({ mode: 'number' }), decimal: decimal({ precision: 100, scale: 2 }), decimaldef: decimal({ precision: 100, scale: 2 }).default('100.0'), doublePrecision: doublePrecision(), doublePrecisiondef: doublePrecision().default(100), real: real(), realdef: real().default(100), json: json().$type<{ attr: string }>(), jsondef: json().$type<{ attr: string }>().default({ attr: 'value' }), jsonb: jsonb().$type<{ attr: string }>(), jsonbdef: jsonb().$type<{ attr: string }>().default({ attr: 'value' }), time: time(), time2: time({ precision: 6, withTimezone: true }), timedef: time().default('00:00:00'), timedefnow: time().defaultNow(), timestamp: timestamp(), timestamp2: timestamp({ precision: 6, withTimezone: true }), timestamp3: timestamp({ withTimezone: true }), timestamp4: timestamp({ precision: 4 }), timestampdef: timestamp().default(new Date()), date: date({ mode: 'date' }), datedef: date().default('2024-01-01'), datedefnow: date().defaultNow(), }); pgTable('all_postgis_columns', { geometry: geometry(), geometry2: geometry({ srid: 2, mode: 'xy' }), geometry3: geometry({ srid: 3, mode: 'tuple' }), geometry4: geometry({ mode: 'tuple' }), geometrydef: geometry().default([1, 2]), point: point(), point2: point({ mode: 'xy' }), pointdef: point().default([1, 2]), line: line(), line2: line({ mode: 'abc' }), linedef: line().default([1, 2, 3]), }); pgTable('all_vector_columns', { bit: bit({ dimensions: 1 }), bitdef: bit({ dimensions: 1 }).default('1'), halfvec: halfvec({ dimensions: 1 }), halfvecdef: halfvec({ dimensions: 1 }).default([1]), sparsevec: sparsevec({ dimensions: 1 }), sparsevecdef: sparsevec({ dimensions: 1 }).default('{1:1}/1'), vector: vector({ dimensions: 1 }), vectordef: vector({ dimensions: 1 }).default([1]), }); } // ts enums test { enum Role { admin = 'admin', user = 'user', guest = 'guest', } const role = pgEnum('role', Role); enum RoleNonString { admin, user, guest, } // @ts-expect-error pgEnum('role', RoleNonString); enum RolePartiallyString { admin, user = 'user', guest = 'guest', } // @ts-expect-error pgEnum('role', RolePartiallyString); const table = pgTable('table', { enum: role('enum'), }); const res = await db.select().from(table); Expect>; const mySchema = pgSchema('my_schema'); const schemaRole = mySchema.enum('role', Role); // @ts-expect-error mySchema.enum('role', RoleNonString); // @ts-expect-error mySchema.enum('role', RolePartiallyString); const schemaTable = mySchema.table('table', { enum: schemaRole('enum'), }); const schemaRes = await db.select().from(schemaTable); Expect>; } ================================================ FILE: drizzle-orm/type-tests/pg/update.ts ================================================ import type { QueryResult } from 'pg'; import type { Equal } from 'type-tests/utils.ts'; import { Expect } from 'type-tests/utils.ts'; import type { PgUpdate } from '~/pg-core/index.ts'; import { eq } from '~/sql/expressions/index.ts'; import { sql } from '~/sql/sql.ts'; import type { Simplify } from '~/utils.ts'; import { db } from './db.ts'; import { cities, salEmp, users } from './tables.ts'; const update = await db.update(users) .set({ text: 'John', age1: 30, }) .where(eq(users.id, 1)); Expect, typeof update>>; const updateStmt = db.update(users) .set({ text: 'John', age1: 30, }) .where(eq(users.id, 1)) .prepare('updateStmt'); const updatePrepared = await updateStmt.execute(); Expect, typeof updatePrepared>>; const updateReturning = await db.update(users) .set({ text: 'John', age1: 30, }) .where(eq(users.id, 1)) .returning({ text: users.text, }); Expect>; const updateReturningStmt = db.update(users) .set({ text: 'John', age1: 30, }) .where(eq(users.id, 1)) .returning({ text: users.text, }) .prepare('updateReturningStmt'); const updateReturningPrepared = await updateReturningStmt.execute(); Expect>; { function dynamic(qb: T) { return qb.where(sql``).returning(); } const qbBase = db.update(users).set({}).$dynamic(); const qb = dynamic(qbBase); const result = await qb; Expect>; } { function withReturning(qb: T) { return qb.returning(); } const qbBase = db.update(users).set({}).$dynamic(); const qb = withReturning(qbBase); const result = await qb; Expect>; } { db .update(users) .set({}) .returning() // @ts-expect-error method was already called .returning(); db .update(users) .set({}) .where(sql``) // @ts-expect-error method was already called .where(sql``); } { db .update(users) .set({}) .from(sql``) .leftJoin(sql``, (table, from) => { Expect>; Expect>; return sql``; }); db .update(users) .set({}) .from(cities) .leftJoin(sql``, (table, from) => { Expect>; Expect>; return sql``; }); const citiesSq = db.$with('cities_sq').as(db.select({ id: cities.id }).from(cities)); db .with(citiesSq) .update(users) .set({}) .from(citiesSq) .leftJoin(sql``, (table, from) => { Expect>; Expect>; return sql``; }); db .with(citiesSq) .update(users) .set({ homeCity: citiesSq.id, }) .from(citiesSq); } { const result = await db.update(users).set({}).from(cities).returning(); Expect< Equal[], typeof result> >; } { const result1 = await db.update(users).set({}).from(cities).leftJoin(salEmp, sql``).returning(); Expect< Equal[], typeof result1> >; const result2 = await db.update(users).set({}).from(cities).rightJoin(salEmp, sql``).returning(); Expect< Equal[], typeof result2> >; const result3 = await db.update(users).set({}).from(cities).innerJoin(salEmp, sql``).returning(); Expect< Equal[], typeof result3> >; const result4 = await db.update(users).set({}).from(cities).fullJoin(salEmp, sql``).returning(); Expect< Equal[], typeof result4> >; } { const result = await db.update(users).set({}).from(cities).returning({ id: users.id, cities: cities, cityName: cities.name, }); Expect< Equal[], typeof result> >; } { const result1 = await db.update(users).set({}).from(cities).leftJoin(salEmp, sql``).returning({ id: users.id, cities: cities, cityName: cities.name, salEmp: salEmp, salEmpName: salEmp.name, }); Expect< Equal[], typeof result1> >; const result2 = await db.update(users).set({}).from(cities).rightJoin(salEmp, sql``).returning({ id: users.id, cities: cities, cityName: cities.name, salEmp: salEmp, salEmpName: salEmp.name, }); Expect< Equal[], typeof result2> >; const result3 = await db.update(users).set({}).from(cities).innerJoin(salEmp, sql``).returning({ id: users.id, cities: cities, cityName: cities.name, salEmp: salEmp, salEmpName: salEmp.name, }); Expect< Equal[], typeof result3> >; const result4 = await db.update(users).set({}).from(cities).fullJoin(salEmp, sql``).returning({ id: users.id, cities: cities, cityName: cities.name, salEmp: salEmp, salEmpName: salEmp.name, }); Expect< Equal[], typeof result4> >; } { await db .update(users) .set({}) // @ts-expect-error can't use joins before from .fullJoin(salEmp, sql``); } ================================================ FILE: drizzle-orm/type-tests/pg/with.ts ================================================ import type { Equal } from 'type-tests/utils.ts'; import { Expect } from 'type-tests/utils.ts'; import { integer, pgTable, serial, text } from '~/pg-core/index.ts'; import { gt, inArray, like } from '~/sql/expressions/index.ts'; import { sql } from '~/sql/sql.ts'; import { DrizzleTypeError } from '~/utils.ts'; import { db } from './db.ts'; { const orders = pgTable('orders', { id: serial('id').primaryKey(), region: text('region').notNull(), product: text('product').notNull(), amount: integer('amount').notNull(), quantity: integer('quantity').notNull(), generated: text('generatedText').generatedAlwaysAs(sql``), }); const regionalSales = db .$with('regional_sales') .as((qb) => qb .select({ region: orders.region, totalSales: sql`sum(${orders.amount})`.as('total_sales'), }) .from(orders) .groupBy(orders.region) ); const topRegions = db .$with('top_regions') .as((qb) => qb .select({ region: orders.region, totalSales: orders.amount, }) .from(regionalSales) .where( gt( regionalSales.totalSales, db.select({ sales: sql`sum(${regionalSales.totalSales})/10` }).from(regionalSales), ), ) ); const result = await db .with(regionalSales, topRegions) .select({ region: orders.region, product: orders.product, productUnits: sql`sum(${orders.quantity})`, productSales: sql`sum(${orders.amount})`, }) .from(orders) .where(inArray(orders.region, db.select({ region: topRegions.region }).from(topRegions))); Expect< Equal<{ region: string; product: string; productUnits: number; productSales: number; }[], typeof result> >; const allOrdersWith = db.$with('all_orders_with').as(db.select().from(orders)); const allFromWith = await db.with(allOrdersWith).select().from(allOrdersWith); Expect< Equal<{ id: number; region: string; product: string; amount: number; quantity: number; generated: string | null; }[], typeof allFromWith> >; const regionalSalesWith = db.$with('regional_sales_with').as(db.select().from(regionalSales)); db.with(regionalSalesWith).select().from(regionalSalesWith).where(like(regionalSalesWith.totalSales, 'abc')); } { const providers = pgTable('providers', { id: serial().primaryKey(), providerName: text().notNull(), }); const products = pgTable('products', { id: serial().primaryKey(), productName: text().notNull(), }); const sq1 = db.$with('inserted_products').as( db.insert(products).values({ productName: sql`` }), ); const sq2 = db.$with('inserted_products').as( db.insert(products).values({ productName: sql`` }).returning(), ); const sq3 = db.$with('inserted_products').as( db.insert(products).values({ productName: sql`` }).returning({ productName: products.productName }), ); // @ts-expect-error db.with(sq1).select().from(sq1); // @ts-expect-error db.with(sq1).select().from(providers).leftJoin(sq1, sql``); const q3 = await db.with(sq2).select().from(sq2); Expect< Equal >; const q4 = await db.with(sq3).select().from(providers).leftJoin(sq2, sql``); Expect< Equal >; const q5 = await db.with(sq3).select().from(sq3); Expect>; const q6 = await db.with(sq3).select().from(providers).leftJoin(sq3, sql``); Expect< Equal< typeof q6, { providers: { id: number; providerName: string }; inserted_products: { productName: string } | null }[] > >; } { const providers = pgTable('providers', { id: serial().primaryKey(), providerName: text().notNull(), }); const products = pgTable('products', { id: serial().primaryKey(), productName: text().notNull(), }); const otherProducts = pgTable('other_products', { id: serial().primaryKey(), productName: text().notNull(), }); const sq1 = db.$with('updated_products').as( db.update(products).set({ productName: sql`` }), ); const sq2 = db.$with('updated_products').as( db.update(products).set({ productName: sql`` }).returning(), ); const sq3 = db.$with('updated_products').as( db.update(products).set({ productName: sql`` }).returning({ productName: products.productName }), ); const sq4 = db.$with('updated_products').as( db.update(products).set({ productName: sql`` }).from(otherProducts).returning(), ); // @ts-expect-error db.with(sq1).select().from(sq1); // @ts-expect-error db.with(sq1).select().from(providers).leftJoin(sq1, sql``); const q3 = await db.with(sq2).select().from(sq2); Expect< Equal >; const q4 = await db.with(sq3).select().from(providers).leftJoin(sq2, sql``); Expect< Equal >; const q5 = await db.with(sq3).select().from(sq3); Expect< Equal >; const q6 = await db.with(sq3).select().from(providers).leftJoin(sq3, sql``); Expect< Equal >; const q7 = await db.with(sq4).select().from(sq4); Expect< Equal >; const q8 = await db.with(sq4).select().from(providers).leftJoin(sq4, sql``); Expect< Equal >; } { const providers = pgTable('providers', { id: serial().primaryKey(), providerName: text().notNull(), }); const products = pgTable('products', { id: serial().primaryKey(), productName: text().notNull(), }); const sq1 = db.$with('inserted_products').as( db.delete(products), ); const sq2 = db.$with('inserted_products').as( db.delete(products).returning(), ); const sq3 = db.$with('inserted_products').as( db.delete(products).returning({ productName: products.productName }), ); // @ts-expect-error db.with(sq1).select().from(sq1); // @ts-expect-error db.with(sq1).select().from(providers).leftJoin(sq1, sql``); const q3 = await db.with(sq2).select().from(sq2); Expect< Equal >; const q4 = await db.with(sq3).select().from(providers).leftJoin(sq2, sql``); Expect< Equal >; const q5 = await db.with(sq3).select().from(sq3); Expect>; const q6 = await db.with(sq3).select().from(providers).leftJoin(sq3, sql``); Expect< Equal< typeof q6, { providers: { id: number; providerName: string }; inserted_products: { productName: string } | null }[] > >; } { const providers = pgTable('providers', { id: serial().primaryKey(), providerName: text().notNull(), }); const sq1 = db.$with('providers_sq', { name: providers.providerName, }).as(sql`select provider_name as name from providers`); const q1 = await db.with(sq1).select().from(sq1); Expect>; const sq2 = db.$with('providers_sq', { nested: { id: providers.id, }, }).as(() => sql`select id from providers`); const q2 = await db.with(sq2).select().from(sq2); Expect>; // @ts-expect-error db.$with('providers_sq', { name: providers.providerName }).as(db.select().from(providers)); // @ts-expect-error db.$with('providers_sq', { name: providers.providerName }).as((qb) => qb.select().from(providers)); } ================================================ FILE: drizzle-orm/type-tests/singlestore/1000columns.ts ================================================ import { bigint, double, singlestoreTable, varchar } from '~/singlestore-core/index.ts'; singlestoreTable('test', { col0: double('col1').primaryKey().autoincrement().default(0), col1: double('col1').primaryKey().autoincrement().default(0), col2: double('col1').primaryKey().autoincrement().default(0), col3: double('col1').primaryKey().autoincrement().default(0), col4: double('col1').primaryKey().autoincrement().default(0), col5: double('col1').primaryKey().autoincrement().default(0), col6: double('col1').primaryKey().autoincrement().default(0), col8: double('col1').primaryKey().autoincrement().default(0), col9: double('col1').primaryKey().autoincrement().default(0), col10: double('col1').primaryKey().autoincrement().default(0), col11: double('col1').primaryKey().autoincrement().default(0), col12: double('col1').primaryKey().autoincrement().default(0), col13: double('col1').primaryKey().autoincrement().default(0), col14: double('col1').primaryKey().autoincrement().default(0), col15: double('col1').primaryKey().autoincrement().default(0), col16: double('col1').primaryKey().autoincrement().default(0), col18: double('col1').primaryKey().autoincrement().default(0), col19: double('col1').primaryKey().autoincrement().default(0), col20: double('col1').primaryKey().autoincrement().default(0), col21: double('col1').primaryKey().autoincrement().default(0), col22: double('col1').primaryKey().autoincrement().default(0), col23: double('col1').primaryKey().autoincrement().default(0), col24: double('col1').primaryKey().autoincrement().default(0), col25: double('col1').primaryKey().autoincrement().default(0), col26: double('col1').primaryKey().autoincrement().default(0), col28: double('col1').primaryKey().autoincrement().default(0), col29: double('col1').primaryKey().autoincrement().default(0), col30: double('col1').primaryKey().autoincrement().default(0), col31: double('col1').primaryKey().autoincrement().default(0), col32: double('col1').primaryKey().autoincrement().default(0), col33: double('col1').primaryKey().autoincrement().default(0), col34: double('col1').primaryKey().autoincrement().default(0), col35: double('col1').primaryKey().autoincrement().default(0), col36: double('col1').primaryKey().autoincrement().default(0), col38: double('col1').primaryKey().autoincrement().default(0), col39: double('col1').primaryKey().autoincrement().default(0), col40: double('col1').primaryKey().autoincrement().default(0), col41: double('col1').primaryKey().autoincrement().default(0), col42: double('col1').primaryKey().autoincrement().default(0), col43: double('col1').primaryKey().autoincrement().default(0), col44: double('col1').primaryKey().autoincrement().default(0), col45: double('col1').primaryKey().autoincrement().default(0), col46: double('col1').primaryKey().autoincrement().default(0), col48: double('col1').primaryKey().autoincrement().default(0), col49: double('col1').primaryKey().autoincrement().default(0), col50: double('col1').primaryKey().autoincrement().default(0), col51: double('col1').primaryKey().autoincrement().default(0), col52: double('col1').primaryKey().autoincrement().default(0), col53: double('col1').primaryKey().autoincrement().default(0), col54: double('col1').primaryKey().autoincrement().default(0), col55: double('col1').primaryKey().autoincrement().default(0), col56: double('col1').primaryKey().autoincrement().default(0), col58: double('col1').primaryKey().autoincrement().default(0), col59: double('col1').primaryKey().autoincrement().default(0), col60: double('col1').primaryKey().autoincrement().default(0), col61: double('col1').primaryKey().autoincrement().default(0), col62: double('col1').primaryKey().autoincrement().default(0), col63: double('col1').primaryKey().autoincrement().default(0), col64: double('col1').primaryKey().autoincrement().default(0), col65: double('col1').primaryKey().autoincrement().default(0), col66: double('col1').primaryKey().autoincrement().default(0), col68: double('col1').primaryKey().autoincrement().default(0), col69: double('col1').primaryKey().autoincrement().default(0), col70: double('col1').primaryKey().autoincrement().default(0), col71: double('col1').primaryKey().autoincrement().default(0), col72: double('col1').primaryKey().autoincrement().default(0), col73: double('col1').primaryKey().autoincrement().default(0), col74: double('col1').primaryKey().autoincrement().default(0), col75: double('col1').primaryKey().autoincrement().default(0), col76: double('col1').primaryKey().autoincrement().default(0), col78: double('col1').primaryKey().autoincrement().default(0), col79: double('col1').primaryKey().autoincrement().default(0), col80: double('col1').primaryKey().autoincrement().default(0), col81: double('col1').primaryKey().autoincrement().default(0), col82: double('col1').primaryKey().autoincrement().default(0), col83: double('col1').primaryKey().autoincrement().default(0), col84: double('col1').primaryKey().autoincrement().default(0), col85: double('col1').primaryKey().autoincrement().default(0), col86: double('col1').primaryKey().autoincrement().default(0), col88: double('col1').primaryKey().autoincrement().default(0), col89: double('col1').primaryKey().autoincrement().default(0), col90: double('col1').primaryKey().autoincrement().default(0), col91: double('col1').primaryKey().autoincrement().default(0), col92: double('col1').primaryKey().autoincrement().default(0), col93: double('col1').primaryKey().autoincrement().default(0), col94: double('col1').primaryKey().autoincrement().default(0), col95: double('col1').primaryKey().autoincrement().default(0), col96: double('col1').primaryKey().autoincrement().default(0), col98: double('col1').primaryKey().autoincrement().default(0), col99: double('col1').primaryKey().autoincrement().default(0), col100: double('col1').primaryKey().autoincrement().default(0), col101: double('col1').primaryKey().autoincrement().default(0), col102: double('col1').primaryKey().autoincrement().default(0), col103: double('col1').primaryKey().autoincrement().default(0), col104: double('col1').primaryKey().autoincrement().default(0), col105: double('col1').primaryKey().autoincrement().default(0), col106: double('col1').primaryKey().autoincrement().default(0), col108: double('col1').primaryKey().autoincrement().default(0), col109: double('col1').primaryKey().autoincrement().default(0), col110: double('col11').primaryKey().autoincrement().default(0), col111: double('col11').primaryKey().autoincrement().default(0), col112: double('col11').primaryKey().autoincrement().default(0), col113: double('col11').primaryKey().autoincrement().default(0), col114: double('col11').primaryKey().autoincrement().default(0), col115: double('col11').primaryKey().autoincrement().default(0), col116: double('col11').primaryKey().autoincrement().default(0), col118: double('col11').primaryKey().autoincrement().default(0), col119: double('col11').primaryKey().autoincrement().default(0), col120: double('col11').primaryKey().autoincrement().default(0), col121: double('col11').primaryKey().autoincrement().default(0), col122: double('col11').primaryKey().autoincrement().default(0), col123: double('col11').primaryKey().autoincrement().default(0), col124: double('col11').primaryKey().autoincrement().default(0), col125: double('col11').primaryKey().autoincrement().default(0), col126: double('col11').primaryKey().autoincrement().default(0), col128: double('col11').primaryKey().autoincrement().default(0), col129: double('col11').primaryKey().autoincrement().default(0), col130: double('col11').primaryKey().autoincrement().default(0), col131: double('col11').primaryKey().autoincrement().default(0), col132: double('col11').primaryKey().autoincrement().default(0), col133: double('col11').primaryKey().autoincrement().default(0), col134: double('col11').primaryKey().autoincrement().default(0), col135: double('col11').primaryKey().autoincrement().default(0), col136: double('col11').primaryKey().autoincrement().default(0), col138: double('col11').primaryKey().autoincrement().default(0), col139: double('col11').primaryKey().autoincrement().default(0), col140: double('col11').primaryKey().autoincrement().default(0), col141: double('col11').primaryKey().autoincrement().default(0), col142: double('col11').primaryKey().autoincrement().default(0), col143: double('col11').primaryKey().autoincrement().default(0), col144: double('col11').primaryKey().autoincrement().default(0), col145: double('col11').primaryKey().autoincrement().default(0), col146: double('col11').primaryKey().autoincrement().default(0), col148: double('col11').primaryKey().autoincrement().default(0), col149: double('col11').primaryKey().autoincrement().default(0), col150: double('col11').primaryKey().autoincrement().default(0), col151: double('col11').primaryKey().autoincrement().default(0), col152: double('col11').primaryKey().autoincrement().default(0), col153: double('col11').primaryKey().autoincrement().default(0), col154: double('col11').primaryKey().autoincrement().default(0), col155: double('col11').primaryKey().autoincrement().default(0), col156: double('col11').primaryKey().autoincrement().default(0), col158: double('col11').primaryKey().autoincrement().default(0), col159: double('col11').primaryKey().autoincrement().default(0), col160: double('col11').primaryKey().autoincrement().default(0), col161: double('col11').primaryKey().autoincrement().default(0), col162: double('col11').primaryKey().autoincrement().default(0), col163: double('col11').primaryKey().autoincrement().default(0), col164: double('col11').primaryKey().autoincrement().default(0), col165: double('col11').primaryKey().autoincrement().default(0), col166: double('col11').primaryKey().autoincrement().default(0), col168: double('col11').primaryKey().autoincrement().default(0), col169: double('col11').primaryKey().autoincrement().default(0), col170: double('col11').primaryKey().autoincrement().default(0), col171: double('col11').primaryKey().autoincrement().default(0), col172: double('col11').primaryKey().autoincrement().default(0), col173: double('col11').primaryKey().autoincrement().default(0), col174: double('col11').primaryKey().autoincrement().default(0), col175: double('col11').primaryKey().autoincrement().default(0), col176: double('col11').primaryKey().autoincrement().default(0), col178: double('col11').primaryKey().autoincrement().default(0), col179: double('col11').primaryKey().autoincrement().default(0), col180: double('col11').primaryKey().autoincrement().default(0), col181: double('col11').primaryKey().autoincrement().default(0), col182: double('col11').primaryKey().autoincrement().default(0), col183: double('col11').primaryKey().autoincrement().default(0), col184: double('col11').primaryKey().autoincrement().default(0), col185: double('col11').primaryKey().autoincrement().default(0), col186: double('col11').primaryKey().autoincrement().default(0), col188: double('col11').primaryKey().autoincrement().default(0), col189: double('col11').primaryKey().autoincrement().default(0), col190: double('col11').primaryKey().autoincrement().default(0), col191: double('col11').primaryKey().autoincrement().default(0), col192: double('col11').primaryKey().autoincrement().default(0), col193: double('col11').primaryKey().autoincrement().default(0), col194: double('col11').primaryKey().autoincrement().default(0), col195: double('col11').primaryKey().autoincrement().default(0), col196: double('col11').primaryKey().autoincrement().default(0), col198: double('col11').primaryKey().autoincrement().default(0), col199: double('col11').primaryKey().autoincrement().default(0), col200: double('col2').primaryKey().autoincrement().default(0), col201: double('col2').primaryKey().autoincrement().default(0), col202: double('col2').primaryKey().autoincrement().default(0), col203: double('col2').primaryKey().autoincrement().default(0), col204: double('col2').primaryKey().autoincrement().default(0), col205: double('col2').primaryKey().autoincrement().default(0), col206: double('col2').primaryKey().autoincrement().default(0), col208: double('col2').primaryKey().autoincrement().default(0), col209: double('col2').primaryKey().autoincrement().default(0), col210: double('col21').primaryKey().autoincrement().default(0), col211: double('col21').primaryKey().autoincrement().default(0), col212: double('col21').primaryKey().autoincrement().default(0), col213: double('col21').primaryKey().autoincrement().default(0), col214: double('col21').primaryKey().autoincrement().default(0), col215: double('col21').primaryKey().autoincrement().default(0), col216: double('col21').primaryKey().autoincrement().default(0), col218: double('col21').primaryKey().autoincrement().default(0), col219: double('col21').primaryKey().autoincrement().default(0), col220: double('col21').primaryKey().autoincrement().default(0), col221: double('col21').primaryKey().autoincrement().default(0), col222: double('col21').primaryKey().autoincrement().default(0), col223: double('col21').primaryKey().autoincrement().default(0), col224: double('col21').primaryKey().autoincrement().default(0), col225: double('col21').primaryKey().autoincrement().default(0), col226: double('col21').primaryKey().autoincrement().default(0), col228: double('col21').primaryKey().autoincrement().default(0), col229: double('col21').primaryKey().autoincrement().default(0), col230: double('col21').primaryKey().autoincrement().default(0), col231: double('col21').primaryKey().autoincrement().default(0), col232: double('col21').primaryKey().autoincrement().default(0), col233: double('col21').primaryKey().autoincrement().default(0), col234: double('col21').primaryKey().autoincrement().default(0), col235: double('col21').primaryKey().autoincrement().default(0), col236: double('col21').primaryKey().autoincrement().default(0), col238: double('col21').primaryKey().autoincrement().default(0), col239: double('col21').primaryKey().autoincrement().default(0), col240: double('col21').primaryKey().autoincrement().default(0), col241: double('col21').primaryKey().autoincrement().default(0), col242: double('col21').primaryKey().autoincrement().default(0), col243: double('col21').primaryKey().autoincrement().default(0), col244: double('col21').primaryKey().autoincrement().default(0), col245: double('col21').primaryKey().autoincrement().default(0), col246: double('col21').primaryKey().autoincrement().default(0), col248: double('col21').primaryKey().autoincrement().default(0), col249: double('col21').primaryKey().autoincrement().default(0), col250: double('col21').primaryKey().autoincrement().default(0), col251: double('col21').primaryKey().autoincrement().default(0), col252: double('col21').primaryKey().autoincrement().default(0), col253: double('col21').primaryKey().autoincrement().default(0), col254: double('col21').primaryKey().autoincrement().default(0), col255: double('col21').primaryKey().autoincrement().default(0), col256: double('col21').primaryKey().autoincrement().default(0), col258: double('col21').primaryKey().autoincrement().default(0), col259: double('col21').primaryKey().autoincrement().default(0), col260: double('col21').primaryKey().autoincrement().default(0), col261: double('col21').primaryKey().autoincrement().default(0), col262: double('col21').primaryKey().autoincrement().default(0), col263: double('col21').primaryKey().autoincrement().default(0), col264: double('col21').primaryKey().autoincrement().default(0), col265: double('col21').primaryKey().autoincrement().default(0), col266: double('col21').primaryKey().autoincrement().default(0), col268: double('col21').primaryKey().autoincrement().default(0), col269: double('col21').primaryKey().autoincrement().default(0), col270: double('col21').primaryKey().autoincrement().default(0), col271: double('col21').primaryKey().autoincrement().default(0), col272: double('col21').primaryKey().autoincrement().default(0), col273: double('col21').primaryKey().autoincrement().default(0), col274: double('col21').primaryKey().autoincrement().default(0), col275: double('col21').primaryKey().autoincrement().default(0), col276: double('col21').primaryKey().autoincrement().default(0), col278: double('col21').primaryKey().autoincrement().default(0), col279: double('col21').primaryKey().autoincrement().default(0), col280: double('col21').primaryKey().autoincrement().default(0), col281: double('col21').primaryKey().autoincrement().default(0), col282: double('col21').primaryKey().autoincrement().default(0), col283: double('col21').primaryKey().autoincrement().default(0), col284: double('col21').primaryKey().autoincrement().default(0), col285: double('col21').primaryKey().autoincrement().default(0), col286: double('col21').primaryKey().autoincrement().default(0), col288: double('col21').primaryKey().autoincrement().default(0), col289: double('col21').primaryKey().autoincrement().default(0), col290: double('col21').primaryKey().autoincrement().default(0), col291: double('col21').primaryKey().autoincrement().default(0), col292: double('col21').primaryKey().autoincrement().default(0), col293: double('col21').primaryKey().autoincrement().default(0), col294: double('col21').primaryKey().autoincrement().default(0), col295: double('col21').primaryKey().autoincrement().default(0), col296: double('col21').primaryKey().autoincrement().default(0), col298: double('col21').primaryKey().autoincrement().default(0), col299: double('col21').primaryKey().autoincrement().default(0), col300: double('col3').primaryKey().autoincrement().default(0), col301: double('col3').primaryKey().autoincrement().default(0), col302: double('col3').primaryKey().autoincrement().default(0), col303: double('col3').primaryKey().autoincrement().default(0), col304: double('col3').primaryKey().autoincrement().default(0), col305: double('col3').primaryKey().autoincrement().default(0), col306: double('col3').primaryKey().autoincrement().default(0), col308: double('col3').primaryKey().autoincrement().default(0), col309: double('col3').primaryKey().autoincrement().default(0), col310: double('col31').primaryKey().autoincrement().default(0), col311: double('col31').primaryKey().autoincrement().default(0), col312: double('col31').primaryKey().autoincrement().default(0), col313: double('col31').primaryKey().autoincrement().default(0), col314: double('col31').primaryKey().autoincrement().default(0), col315: double('col31').primaryKey().autoincrement().default(0), col316: double('col31').primaryKey().autoincrement().default(0), col318: double('col31').primaryKey().autoincrement().default(0), col319: double('col31').primaryKey().autoincrement().default(0), col320: double('col31').primaryKey().autoincrement().default(0), col321: double('col31').primaryKey().autoincrement().default(0), col322: double('col31').primaryKey().autoincrement().default(0), col323: double('col31').primaryKey().autoincrement().default(0), col324: double('col31').primaryKey().autoincrement().default(0), col325: double('col31').primaryKey().autoincrement().default(0), col326: double('col31').primaryKey().autoincrement().default(0), col328: double('col31').primaryKey().autoincrement().default(0), col329: double('col31').primaryKey().autoincrement().default(0), col330: double('col31').primaryKey().autoincrement().default(0), col331: double('col31').primaryKey().autoincrement().default(0), col332: double('col31').primaryKey().autoincrement().default(0), col333: double('col31').primaryKey().autoincrement().default(0), col334: double('col31').primaryKey().autoincrement().default(0), col335: double('col31').primaryKey().autoincrement().default(0), col336: double('col31').primaryKey().autoincrement().default(0), col338: double('col31').primaryKey().autoincrement().default(0), col339: double('col31').primaryKey().autoincrement().default(0), col340: double('col31').primaryKey().autoincrement().default(0), col341: double('col31').primaryKey().autoincrement().default(0), col342: double('col31').primaryKey().autoincrement().default(0), col343: double('col31').primaryKey().autoincrement().default(0), col344: double('col31').primaryKey().autoincrement().default(0), col345: double('col31').primaryKey().autoincrement().default(0), col346: double('col31').primaryKey().autoincrement().default(0), col348: double('col31').primaryKey().autoincrement().default(0), col349: double('col31').primaryKey().autoincrement().default(0), col350: double('col31').primaryKey().autoincrement().default(0), col351: double('col31').primaryKey().autoincrement().default(0), col352: double('col31').primaryKey().autoincrement().default(0), col353: double('col31').primaryKey().autoincrement().default(0), col354: double('col31').primaryKey().autoincrement().default(0), col355: double('col31').primaryKey().autoincrement().default(0), col356: double('col31').primaryKey().autoincrement().default(0), col358: double('col31').primaryKey().autoincrement().default(0), col359: double('col31').primaryKey().autoincrement().default(0), col360: double('col31').primaryKey().autoincrement().default(0), col361: double('col31').primaryKey().autoincrement().default(0), col362: double('col31').primaryKey().autoincrement().default(0), col363: double('col31').primaryKey().autoincrement().default(0), col364: double('col31').primaryKey().autoincrement().default(0), col365: double('col31').primaryKey().autoincrement().default(0), col366: double('col31').primaryKey().autoincrement().default(0), col368: double('col31').primaryKey().autoincrement().default(0), col369: double('col31').primaryKey().autoincrement().default(0), col370: double('col31').primaryKey().autoincrement().default(0), col371: double('col31').primaryKey().autoincrement().default(0), col372: double('col31').primaryKey().autoincrement().default(0), col373: double('col31').primaryKey().autoincrement().default(0), col374: double('col31').primaryKey().autoincrement().default(0), col375: double('col31').primaryKey().autoincrement().default(0), col376: double('col31').primaryKey().autoincrement().default(0), col378: double('col31').primaryKey().autoincrement().default(0), col379: double('col31').primaryKey().autoincrement().default(0), col380: double('col31').primaryKey().autoincrement().default(0), col381: double('col31').primaryKey().autoincrement().default(0), col382: double('col31').primaryKey().autoincrement().default(0), col383: double('col31').primaryKey().autoincrement().default(0), col384: double('col31').primaryKey().autoincrement().default(0), col385: double('col31').primaryKey().autoincrement().default(0), col386: double('col31').primaryKey().autoincrement().default(0), col388: double('col31').primaryKey().autoincrement().default(0), col389: double('col31').primaryKey().autoincrement().default(0), col390: double('col31').primaryKey().autoincrement().default(0), col391: double('col31').primaryKey().autoincrement().default(0), col392: double('col31').primaryKey().autoincrement().default(0), col393: double('col31').primaryKey().autoincrement().default(0), col394: double('col31').primaryKey().autoincrement().default(0), col395: double('col31').primaryKey().autoincrement().default(0), col396: double('col31').primaryKey().autoincrement().default(0), col398: double('col31').primaryKey().autoincrement().default(0), col399: double('col31').primaryKey().autoincrement().default(0), col400: double('col4').primaryKey().autoincrement().default(0), col401: double('col4').primaryKey().autoincrement().default(0), col402: double('col4').primaryKey().autoincrement().default(0), col403: double('col4').primaryKey().autoincrement().default(0), col404: double('col4').primaryKey().autoincrement().default(0), col405: double('col4').primaryKey().autoincrement().default(0), col406: double('col4').primaryKey().autoincrement().default(0), col408: double('col4').primaryKey().autoincrement().default(0), col409: double('col4').primaryKey().autoincrement().default(0), col410: double('col41').primaryKey().autoincrement().default(0), col411: double('col41').primaryKey().autoincrement().default(0), col412: double('col41').primaryKey().autoincrement().default(0), col413: double('col41').primaryKey().autoincrement().default(0), col414: double('col41').primaryKey().autoincrement().default(0), col415: double('col41').primaryKey().autoincrement().default(0), col416: double('col41').primaryKey().autoincrement().default(0), col418: double('col41').primaryKey().autoincrement().default(0), col419: double('col41').primaryKey().autoincrement().default(0), col420: double('col41').primaryKey().autoincrement().default(0), col421: double('col41').primaryKey().autoincrement().default(0), col422: double('col41').primaryKey().autoincrement().default(0), col423: double('col41').primaryKey().autoincrement().default(0), col424: double('col41').primaryKey().autoincrement().default(0), col425: double('col41').primaryKey().autoincrement().default(0), col426: double('col41').primaryKey().autoincrement().default(0), col428: double('col41').primaryKey().autoincrement().default(0), col429: double('col41').primaryKey().autoincrement().default(0), col430: double('col41').primaryKey().autoincrement().default(0), col431: double('col41').primaryKey().autoincrement().default(0), col432: double('col41').primaryKey().autoincrement().default(0), col433: double('col41').primaryKey().autoincrement().default(0), col434: double('col41').primaryKey().autoincrement().default(0), col435: double('col41').primaryKey().autoincrement().default(0), col436: double('col41').primaryKey().autoincrement().default(0), col438: double('col41').primaryKey().autoincrement().default(0), col439: double('col41').primaryKey().autoincrement().default(0), col440: double('col41').primaryKey().autoincrement().default(0), col441: double('col41').primaryKey().autoincrement().default(0), col442: double('col41').primaryKey().autoincrement().default(0), col443: double('col41').primaryKey().autoincrement().default(0), col444: double('col41').primaryKey().autoincrement().default(0), col445: double('col41').primaryKey().autoincrement().default(0), col446: double('col41').primaryKey().autoincrement().default(0), col448: double('col41').primaryKey().autoincrement().default(0), col449: double('col41').primaryKey().autoincrement().default(0), col450: double('col41').primaryKey().autoincrement().default(0), col451: double('col41').primaryKey().autoincrement().default(0), col452: double('col41').primaryKey().autoincrement().default(0), col453: double('col41').primaryKey().autoincrement().default(0), col454: double('col41').primaryKey().autoincrement().default(0), col455: double('col41').primaryKey().autoincrement().default(0), col456: double('col41').primaryKey().autoincrement().default(0), col458: double('col41').primaryKey().autoincrement().default(0), col459: double('col41').primaryKey().autoincrement().default(0), col460: double('col41').primaryKey().autoincrement().default(0), col461: double('col41').primaryKey().autoincrement().default(0), col462: double('col41').primaryKey().autoincrement().default(0), col463: double('col41').primaryKey().autoincrement().default(0), col464: double('col41').primaryKey().autoincrement().default(0), col465: double('col41').primaryKey().autoincrement().default(0), col466: double('col41').primaryKey().autoincrement().default(0), col468: double('col41').primaryKey().autoincrement().default(0), col469: double('col41').primaryKey().autoincrement().default(0), col470: double('col41').primaryKey().autoincrement().default(0), col471: double('col41').primaryKey().autoincrement().default(0), col472: double('col41').primaryKey().autoincrement().default(0), col473: double('col41').primaryKey().autoincrement().default(0), col474: double('col41').primaryKey().autoincrement().default(0), col475: double('col41').primaryKey().autoincrement().default(0), col476: double('col41').primaryKey().autoincrement().default(0), col478: double('col41').primaryKey().autoincrement().default(0), col479: double('col41').primaryKey().autoincrement().default(0), col480: double('col41').primaryKey().autoincrement().default(0), col481: double('col41').primaryKey().autoincrement().default(0), col482: double('col41').primaryKey().autoincrement().default(0), col483: double('col41').primaryKey().autoincrement().default(0), col484: double('col41').primaryKey().autoincrement().default(0), col485: double('col41').primaryKey().autoincrement().default(0), col486: double('col41').primaryKey().autoincrement().default(0), col488: double('col41').primaryKey().autoincrement().default(0), col489: double('col41').primaryKey().autoincrement().default(0), col490: double('col41').primaryKey().autoincrement().default(0), col491: double('col41').primaryKey().autoincrement().default(0), col492: double('col41').primaryKey().autoincrement().default(0), col493: double('col41').primaryKey().autoincrement().default(0), col494: double('col41').primaryKey().autoincrement().default(0), col495: double('col41').primaryKey().autoincrement().default(0), col496: double('col41').primaryKey().autoincrement().default(0), col498: double('col41').primaryKey().autoincrement().default(0), col499: double('col41').primaryKey().autoincrement().default(0), col500: double('col5').primaryKey().autoincrement().default(0), col501: double('col5').primaryKey().autoincrement().default(0), col502: double('col5').primaryKey().autoincrement().default(0), col503: double('col5').primaryKey().autoincrement().default(0), col504: double('col5').primaryKey().autoincrement().default(0), col505: double('col5').primaryKey().autoincrement().default(0), col506: double('col5').primaryKey().autoincrement().default(0), col508: double('col5').primaryKey().autoincrement().default(0), col509: double('col5').primaryKey().autoincrement().default(0), col510: double('col51').primaryKey().autoincrement().default(0), col511: double('col51').primaryKey().autoincrement().default(0), col512: double('col51').primaryKey().autoincrement().default(0), col513: double('col51').primaryKey().autoincrement().default(0), col514: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col515: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col516: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col518: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col519: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col520: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col521: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col522: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col523: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col524: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col525: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col526: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col528: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col529: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col530: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col531: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col532: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col533: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col534: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col535: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col536: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col538: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col539: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col540: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col541: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col542: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col543: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col544: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col545: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col546: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col548: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col549: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col550: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col551: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col552: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col553: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col554: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col555: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col556: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col558: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col559: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col560: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col561: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col562: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col563: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col564: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col565: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col566: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col568: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col569: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col570: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col571: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col572: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col573: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col574: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col575: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col576: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col578: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col579: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col580: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col581: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col582: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col583: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col584: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col585: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col586: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col588: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col589: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col590: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col591: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col592: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col593: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col594: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col595: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col596: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col598: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col599: bigint('col51', { mode: 'number' }).primaryKey().autoincrement().default(0), col600: bigint('col6', { mode: 'number' }).primaryKey().autoincrement().default(0), col601: double('col6').primaryKey().autoincrement().default(0), col602: double('col6').primaryKey().autoincrement().default(0), col603: double('col6').primaryKey().autoincrement().default(0), col604: double('col6').primaryKey().autoincrement().default(0), col605: double('col6').primaryKey().autoincrement().default(0), col606: double('col6').primaryKey().autoincrement().default(0), col608: double('col6').primaryKey().autoincrement().default(0), col609: double('col6').primaryKey().autoincrement().default(0), col610: double('col61').primaryKey().autoincrement().default(0), col611: double('col61').primaryKey().autoincrement().default(0), col612: double('col61').primaryKey().autoincrement().default(0), col613: double('col61').primaryKey().autoincrement().default(0), col614: double('col61').primaryKey().autoincrement().default(0), col615: double('col61').primaryKey().autoincrement().default(0), col616: double('col61').primaryKey().autoincrement().default(0), col618: double('col61').primaryKey().autoincrement().default(0), col619: double('col61').primaryKey().autoincrement().default(0), col620: double('col61').primaryKey().autoincrement().default(0), col621: double('col61').primaryKey().autoincrement().default(0), col622: double('col61').primaryKey().autoincrement().default(0), col623: double('col61').primaryKey().autoincrement().default(0), col624: double('col61').primaryKey().autoincrement().default(0), col625: double('col61').primaryKey().autoincrement().default(0), col626: double('col61').primaryKey().autoincrement().default(0), col628: double('col61').primaryKey().autoincrement().default(0), col629: double('col61').primaryKey().autoincrement().default(0), col630: double('col61').primaryKey().autoincrement().default(0), col631: double('col61').primaryKey().autoincrement().default(0), col632: double('col61').primaryKey().autoincrement().default(0), col633: double('col61').primaryKey().autoincrement().default(0), col634: double('col61').primaryKey().autoincrement().default(0), col635: double('col61').primaryKey().autoincrement().default(0), col636: double('col61').primaryKey().autoincrement().default(0), col638: double('col61').primaryKey().autoincrement().default(0), col639: double('col61').primaryKey().autoincrement().default(0), col640: double('col61').primaryKey().autoincrement().default(0), col641: double('col61').primaryKey().autoincrement().default(0), col642: double('col61').primaryKey().autoincrement().default(0), col643: double('col61').primaryKey().autoincrement().default(0), col644: double('col61').primaryKey().autoincrement().default(0), col645: double('col61').primaryKey().autoincrement().default(0), col646: double('col61').primaryKey().autoincrement().default(0), col648: double('col61').primaryKey().autoincrement().default(0), col649: double('col61').primaryKey().autoincrement().default(0), col650: double('col61').primaryKey().autoincrement().default(0), col651: double('col61').primaryKey().autoincrement().default(0), col652: double('col61').primaryKey().autoincrement().default(0), col653: double('col61').primaryKey().autoincrement().default(0), col654: double('col61').primaryKey().autoincrement().default(0), col655: double('col61').primaryKey().autoincrement().default(0), col656: double('col61').primaryKey().autoincrement().default(0), col658: double('col61').primaryKey().autoincrement().default(0), col659: double('col61').primaryKey().autoincrement().default(0), col660: double('col61').primaryKey().autoincrement().default(0), col661: double('col61').primaryKey().autoincrement().default(0), col662: double('col61').primaryKey().autoincrement().default(0), col663: double('col61').primaryKey().autoincrement().default(0), col664: double('col61').primaryKey().autoincrement().default(0), col665: double('col61').primaryKey().autoincrement().default(0), col666: double('col61').primaryKey().autoincrement().default(0), col668: double('col61').primaryKey().autoincrement().default(0), col669: double('col61').primaryKey().autoincrement().default(0), col670: double('col61').primaryKey().autoincrement().default(0), col671: double('col61').primaryKey().autoincrement().default(0), col672: double('col61').primaryKey().autoincrement().default(0), col673: double('col61').primaryKey().autoincrement().default(0), col674: double('col61').primaryKey().autoincrement().default(0), col675: double('col61').primaryKey().autoincrement().default(0), col676: double('col61').primaryKey().autoincrement().default(0), col678: double('col61').primaryKey().autoincrement().default(0), col679: double('col61').primaryKey().autoincrement().default(0), col680: double('col61').primaryKey().autoincrement().default(0), col681: double('col61').primaryKey().autoincrement().default(0), col682: double('col61').primaryKey().autoincrement().default(0), col683: double('col61').primaryKey().autoincrement().default(0), col684: double('col61').primaryKey().autoincrement().default(0), col685: double('col61').primaryKey().autoincrement().default(0), col686: double('col61').primaryKey().autoincrement().default(0), col688: double('col61').primaryKey().autoincrement().default(0), col689: double('col61').primaryKey().autoincrement().default(0), col690: double('col61').primaryKey().autoincrement().default(0), col691: double('col61').primaryKey().autoincrement().default(0), col692: double('col61').primaryKey().autoincrement().default(0), col693: double('col61').primaryKey().autoincrement().default(0), col694: double('col61').primaryKey().autoincrement().default(0), col695: double('col61').primaryKey().autoincrement().default(0), col696: double('col61').primaryKey().autoincrement().default(0), col698: double('col61').primaryKey().autoincrement().default(0), col699: double('col61').primaryKey().autoincrement().default(0), col700: double('col7').primaryKey().autoincrement().default(0), col701: double('col7').primaryKey().autoincrement().default(0), col702: double('col7').primaryKey().autoincrement().default(0), col703: double('col7').primaryKey().autoincrement().default(0), col704: double('col7').primaryKey().autoincrement().default(0), col705: double('col7').primaryKey().autoincrement().default(0), col706: double('col7').primaryKey().autoincrement().default(0), col708: double('col7').primaryKey().autoincrement().default(0), col709: double('col7').primaryKey().autoincrement().default(0), col710: double('col71').primaryKey().autoincrement().default(0), col711: double('col71').primaryKey().autoincrement().default(0), col712: double('col71').primaryKey().autoincrement().default(0), col713: double('col71').primaryKey().autoincrement().default(0), col714: double('col71').primaryKey().autoincrement().default(0), col715: double('col71').primaryKey().autoincrement().default(0), col716: double('col71').primaryKey().autoincrement().default(0), col718: double('col71').primaryKey().autoincrement().default(0), col719: double('col71').primaryKey().autoincrement().default(0), col720: double('col71').primaryKey().autoincrement().default(0), col721: double('col71').primaryKey().autoincrement().default(0), col722: double('col71').primaryKey().autoincrement().default(0), col723: double('col71').primaryKey().autoincrement().default(0), col724: double('col71').primaryKey().autoincrement().default(0), col725: double('col71').primaryKey().autoincrement().default(0), col726: double('col71').primaryKey().autoincrement().default(0), col728: double('col71').primaryKey().autoincrement().default(0), col729: double('col71').primaryKey().autoincrement().default(0), col730: double('col71').primaryKey().autoincrement().default(0), col731: double('col71').primaryKey().autoincrement().default(0), col732: double('col71').primaryKey().autoincrement().default(0), col733: double('col71').primaryKey().autoincrement().default(0), col734: double('col71').primaryKey().autoincrement().default(0), col735: double('col71').primaryKey().autoincrement().default(0), col736: double('col71').primaryKey().autoincrement().default(0), col738: double('col71').primaryKey().autoincrement().default(0), col739: double('col71').primaryKey().autoincrement().default(0), col740: double('col71').primaryKey().autoincrement().default(0), col741: double('col71').primaryKey().autoincrement().default(0), col742: double('col71').primaryKey().autoincrement().default(0), col743: double('col71').primaryKey().autoincrement().default(0), col744: double('col71').primaryKey().autoincrement().default(0), col745: double('col71').primaryKey().autoincrement().default(0), col746: double('col71').primaryKey().autoincrement().default(0), col748: double('col71').primaryKey().autoincrement().default(0), col749: double('col71').primaryKey().autoincrement().default(0), col750: double('col71').primaryKey().autoincrement().default(0), col751: double('col71').primaryKey().autoincrement().default(0), col752: double('col71').primaryKey().autoincrement().default(0), col753: double('col71').primaryKey().autoincrement().default(0), col754: double('col71').primaryKey().autoincrement().default(0), col755: double('col71').primaryKey().autoincrement().default(0), col756: double('col71').primaryKey().autoincrement().default(0), col758: double('col71').primaryKey().autoincrement().default(0), col759: double('col71').primaryKey().autoincrement().default(0), col760: double('col71').primaryKey().autoincrement().default(0), col761: double('col71').primaryKey().autoincrement().default(0), col762: double('col71').primaryKey().autoincrement().default(0), col763: double('col71').primaryKey().autoincrement().default(0), col764: double('col71').primaryKey().autoincrement().default(0), col765: double('col71').primaryKey().autoincrement().default(0), col766: double('col71').primaryKey().autoincrement().default(0), col768: double('col71').primaryKey().autoincrement().default(0), col769: double('col71').primaryKey().autoincrement().default(0), col770: double('col71').primaryKey().autoincrement().default(0), col771: double('col71').primaryKey().autoincrement().default(0), col772: double('col71').primaryKey().autoincrement().default(0), col773: double('col71').primaryKey().autoincrement().default(0), col774: double('col71').primaryKey().autoincrement().default(0), col775: double('col71').primaryKey().autoincrement().default(0), col776: double('col71').primaryKey().autoincrement().default(0), col778: double('col71').primaryKey().autoincrement().default(0), col779: double('col71').primaryKey().autoincrement().default(0), col780: double('col71').primaryKey().autoincrement().default(0), col781: double('col71').primaryKey().autoincrement().default(0), col782: double('col71').primaryKey().autoincrement().default(0), col783: double('col71').primaryKey().autoincrement().default(0), col784: double('col71').primaryKey().autoincrement().default(0), col785: double('col71').primaryKey().autoincrement().default(0), col786: double('col71').primaryKey().autoincrement().default(0), col788: double('col71').primaryKey().autoincrement().default(0), col789: double('col71').primaryKey().autoincrement().default(0), col790: double('col71').primaryKey().autoincrement().default(0), col791: double('col71').primaryKey().autoincrement().default(0), col792: double('col71').primaryKey().autoincrement().default(0), col793: double('col71').primaryKey().autoincrement().default(0), col794: double('col71').primaryKey().autoincrement().default(0), col795: double('col71').primaryKey().autoincrement().default(0), col796: double('col71').primaryKey().autoincrement().default(0), col798: double('col71').primaryKey().autoincrement().default(0), col799: double('col71').primaryKey().autoincrement().default(0), col800: double('col8').primaryKey().autoincrement().default(0), col801: double('col8').primaryKey().autoincrement().default(0), col802: double('col8').primaryKey().autoincrement().default(0), col803: double('col8').primaryKey().autoincrement().default(0), col804: double('col8').primaryKey().autoincrement().default(0), col805: double('col8').primaryKey().autoincrement().default(0), col806: double('col8').primaryKey().autoincrement().default(0), col808: double('col8').primaryKey().autoincrement().default(0), col809: double('col8').primaryKey().autoincrement().default(0), col810: double('col81').primaryKey().autoincrement().default(0), col811: double('col81').primaryKey().autoincrement().default(0), col812: double('col81').primaryKey().autoincrement().default(0), col813: double('col81').primaryKey().autoincrement().default(0), col814: double('col81').primaryKey().autoincrement().default(0), col815: double('col81').primaryKey().autoincrement().default(0), col816: double('col81').primaryKey().autoincrement().default(0), col818: double('col81').primaryKey().autoincrement().default(0), col819: double('col81').primaryKey().autoincrement().default(0), col820: double('col81').primaryKey().autoincrement().default(0), col821: double('col81').primaryKey().autoincrement().default(0), col822: double('col81').primaryKey().autoincrement().default(0), col823: double('col81').primaryKey().autoincrement().default(0), col824: double('col81').primaryKey().autoincrement().default(0), col825: double('col81').primaryKey().autoincrement().default(0), col826: double('col81').primaryKey().autoincrement().default(0), col828: double('col81').primaryKey().autoincrement().default(0), col829: double('col81').primaryKey().autoincrement().default(0), col830: double('col81').primaryKey().autoincrement().default(0), col831: double('col81').primaryKey().autoincrement().default(0), col832: double('col81').primaryKey().autoincrement().default(0), col833: double('col81').primaryKey().autoincrement().default(0), col834: double('col81').primaryKey().autoincrement().default(0), col835: double('col81').primaryKey().autoincrement().default(0), col836: double('col81').primaryKey().autoincrement().default(0), col838: double('col81').primaryKey().autoincrement().default(0), col839: double('col81').primaryKey().autoincrement().default(0), col840: double('col81').primaryKey().autoincrement().default(0), col841: double('col81').primaryKey().autoincrement().default(0), col842: double('col81').primaryKey().autoincrement().default(0), col843: double('col81').primaryKey().autoincrement().default(0), col844: double('col81').primaryKey().autoincrement().default(0), col845: double('col81').primaryKey().autoincrement().default(0), col846: double('col81').primaryKey().autoincrement().default(0), col848: double('col81').primaryKey().autoincrement().default(0), col849: double('col81').primaryKey().autoincrement().default(0), col850: double('col81').primaryKey().autoincrement().default(0), col851: double('col81').primaryKey().autoincrement().default(0), col852: double('col81').primaryKey().autoincrement().default(0), col853: double('col81').primaryKey().autoincrement().default(0), col854: double('col81').primaryKey().autoincrement().default(0), col855: double('col81').primaryKey().autoincrement().default(0), col856: double('col81').primaryKey().autoincrement().default(0), col858: double('col81').primaryKey().autoincrement().default(0), col859: double('col81').primaryKey().autoincrement().default(0), col860: double('col81').primaryKey().autoincrement().default(0), col861: double('col81').primaryKey().autoincrement().default(0), col862: double('col81').primaryKey().autoincrement().default(0), col863: double('col81').primaryKey().autoincrement().default(0), col864: double('col81').primaryKey().autoincrement().default(0), col865: double('col81').primaryKey().autoincrement().default(0), col866: double('col81').primaryKey().autoincrement().default(0), col868: double('col81').primaryKey().autoincrement().default(0), col869: double('col81').primaryKey().autoincrement().default(0), col870: double('col81').primaryKey().autoincrement().default(0), col871: double('col81').primaryKey().autoincrement().default(0), col872: double('col81').primaryKey().autoincrement().default(0), col873: double('col81').primaryKey().autoincrement().default(0), col874: double('col81').primaryKey().autoincrement().default(0), col875: double('col81').primaryKey().autoincrement().default(0), col876: double('col81').primaryKey().autoincrement().default(0), col878: double('col81').primaryKey().autoincrement().default(0), col879: double('col81').primaryKey().autoincrement().default(0), col880: double('col81').primaryKey().autoincrement().default(0), col881: double('col81').primaryKey().autoincrement().default(0), col882: double('col81').primaryKey().autoincrement().default(0), col883: double('col81').primaryKey().autoincrement().default(0), col884: double('col81').primaryKey().autoincrement().default(0), col885: double('col81').primaryKey().autoincrement().default(0), col886: double('col81').primaryKey().autoincrement().default(0), col888: double('col81').primaryKey().autoincrement().default(0), col889: double('col81').primaryKey().autoincrement().default(0), col890: double('col81').primaryKey().autoincrement().default(0), col891: double('col81').primaryKey().autoincrement().default(0), col892: double('col81').primaryKey().autoincrement().default(0), col893: double('col81').primaryKey().autoincrement().default(0), col894: double('col81').primaryKey().autoincrement().default(0), col895: double('col81').primaryKey().autoincrement().default(0), col896: double('col81').primaryKey().autoincrement().default(0), col898: double('col81').primaryKey().autoincrement().default(0), col899: double('col81').primaryKey().autoincrement().default(0), col900: double('col9').primaryKey().autoincrement().default(0), col901: double('col9').primaryKey().autoincrement().default(0), col902: double('col9').primaryKey().autoincrement().default(0), col903: double('col9').primaryKey().autoincrement().default(0), col904: double('col9').primaryKey().autoincrement().default(0), col905: double('col9').primaryKey().autoincrement().default(0), col906: double('col9').primaryKey().autoincrement().default(0), col908: double('col9').primaryKey().autoincrement().default(0), col909: double('col9').primaryKey().autoincrement().default(0), col910: double('col91').primaryKey().autoincrement().default(0), col911: double('col91').primaryKey().autoincrement().default(0), col912: double('col91').primaryKey().autoincrement().default(0), col913: double('col91').primaryKey().autoincrement().default(0), col914: double('col91').primaryKey().autoincrement().default(0), col915: double('col91').primaryKey().autoincrement().default(0), col916: double('col91').primaryKey().autoincrement().default(0), col918: double('col91').primaryKey().autoincrement().default(0), col919: double('col91').primaryKey().autoincrement().default(0), col920: double('col91').primaryKey().autoincrement().default(0), col921: double('col91').primaryKey().autoincrement().default(0), col922: double('col91').primaryKey().autoincrement().default(0), col923: double('col91').primaryKey().autoincrement().default(0), col924: double('col91').primaryKey().autoincrement().default(0), col925: double('col91').primaryKey().autoincrement().default(0), col926: double('col91').primaryKey().autoincrement().default(0), col928: double('col91').primaryKey().autoincrement().default(0), col929: double('col91').primaryKey().autoincrement().default(0), col930: double('col91').primaryKey().autoincrement().default(0), col931: double('col91').primaryKey().autoincrement().default(0), col932: double('col91').primaryKey().autoincrement().default(0), col933: double('col91').primaryKey().autoincrement().default(0), col934: double('col91').primaryKey().autoincrement().default(0), col935: double('col91').primaryKey().autoincrement().default(0), col936: double('col91').primaryKey().autoincrement().default(0), col938: double('col91').primaryKey().autoincrement().default(0), col939: double('col91').primaryKey().autoincrement().default(0), col940: double('col91').primaryKey().autoincrement().default(0), col941: double('col91').primaryKey().autoincrement().default(0), col942: double('col91').primaryKey().autoincrement().default(0), col943: double('col91').primaryKey().autoincrement().default(0), col944: varchar('col91', { length: 200 }).primaryKey().default('0'), col945: varchar('col91', { length: 200 }).primaryKey().default('0'), col946: varchar('col91', { length: 200 }).primaryKey().default('0'), col948: varchar('col91', { length: 200 }).primaryKey().default('0'), col949: varchar('col91', { length: 200 }).primaryKey().default('0'), col950: varchar('col91', { length: 200 }).primaryKey().default('0'), col951: varchar('col91', { length: 200 }).primaryKey().default('0'), col952: varchar('col91', { length: 200 }).primaryKey().default('0'), col953: varchar('col91', { length: 200 }).primaryKey().default('0'), col954: varchar('col91', { length: 200 }).primaryKey().default('0'), col955: varchar('col91', { length: 200 }).primaryKey().default('0'), col956: varchar('col91', { length: 200 }).primaryKey().default('0'), col958: varchar('col91', { length: 200 }).primaryKey().default('0'), col959: varchar('col91', { length: 200 }).primaryKey().default('0'), col960: varchar('col91', { length: 200 }).primaryKey().default('0'), col961: varchar('col91', { length: 200 }).primaryKey().default('0'), col962: varchar('col91', { length: 200 }).primaryKey().default('0'), col963: varchar('col91', { length: 200 }).primaryKey().default('0'), col964: varchar('col91', { length: 200 }).primaryKey().default('0'), col965: varchar('col91', { length: 200 }).primaryKey().default('0'), col966: varchar('col91', { length: 200 }).primaryKey().default('0'), col968: varchar('col91', { length: 200 }).primaryKey().default('0'), col969: varchar('col91', { length: 200 }).primaryKey().default('0'), col970: varchar('col91', { length: 200 }).primaryKey().default('0'), col971: varchar('col91', { length: 200 }).primaryKey().default('0'), col972: varchar('col91', { length: 200 }).primaryKey().default('0'), col973: varchar('col91', { length: 200 }).primaryKey().default('0'), col974: varchar('col91', { length: 200 }).primaryKey().default('0'), col975: varchar('col91', { length: 200 }).primaryKey().default('0'), col976: varchar('col91', { length: 200 }).primaryKey().default('0'), col978: varchar('col91', { length: 200 }).primaryKey().default('0'), col979: varchar('col91', { length: 200 }).primaryKey().default('0'), col980: varchar('col91', { length: 200 }).primaryKey().default('0'), col981: varchar('col91', { length: 200 }).primaryKey().default('0'), col982: varchar('col91', { length: 200 }).primaryKey().default('0'), col983: varchar('col91', { length: 200 }).primaryKey().default('0'), col984: varchar('col91', { length: 200 }).primaryKey().default('0'), col985: varchar('col91', { length: 200 }).primaryKey().default('0'), col986: varchar('col91', { length: 200 }).primaryKey().default('0'), col988: varchar('col91', { length: 200 }).primaryKey().default('0'), col989: varchar('col91', { length: 200 }).primaryKey().default('0'), col990: varchar('col91', { length: 200 }).primaryKey().default('0'), col991: varchar('col91', { length: 200 }).primaryKey().default('0'), col992: varchar('col91', { length: 200 }).primaryKey().default('0'), col993: varchar('col91', { length: 200 }).primaryKey().default('0'), col994: varchar('col91', { length: 200 }).primaryKey().default('0'), col995: varchar('col91', { length: 200 }).primaryKey().default('0'), col996: varchar('col91', { length: 200 }).primaryKey().default('0'), col998: varchar('col91', { length: 200 }).primaryKey().default('0'), col999: varchar('col91', { length: 200 }).primaryKey().default('0'), }); ================================================ FILE: drizzle-orm/type-tests/singlestore/count.ts ================================================ import { Expect } from 'type-tests/utils.ts'; import { int, serial, singlestoreTable, text } from '~/singlestore-core/index.ts'; import { and, gt, ne } from '~/sql/expressions/index.ts'; import type { Equal } from '~/utils.ts'; import { db } from './db.ts'; const names = singlestoreTable('names', { id: serial('id').primaryKey(), name: text('name'), authorId: int('author_id'), }); const separate = await db.$count(names); const separateFilters = await db.$count(names, and(gt(names.id, 1), ne(names.name, 'forbidden'))); const embedded = await db .select({ id: names.id, name: names.name, authorId: names.authorId, count1: db.$count(names).as('count1'), }) .from(names); const embeddedFilters = await db .select({ id: names.id, name: names.name, authorId: names.authorId, count1: db.$count(names, and(gt(names.id, 1), ne(names.name, 'forbidden'))).as('count1'), }) .from(names); Expect>; Expect>; Expect< Equal< { id: number; name: string | null; authorId: number | null; count1: number; }[], typeof embedded > >; Expect< Equal< { id: number; name: string | null; authorId: number | null; count1: number; }[], typeof embeddedFilters > >; ================================================ FILE: drizzle-orm/type-tests/singlestore/db.ts ================================================ import { createPool } from 'mysql2/promise'; import { drizzle } from '~/singlestore/index.ts'; const pool = createPool({}); export const db = drizzle(pool); { drizzle(pool); drizzle(pool, { schema: {} }); drizzle(pool, { schema: {} }); drizzle(pool, {}); } ================================================ FILE: drizzle-orm/type-tests/singlestore/delete.ts ================================================ import type { Equal } from 'type-tests/utils.ts'; import { Expect } from 'type-tests/utils.ts'; import type { SingleStoreDelete } from '~/singlestore-core/index.ts'; import type { SingleStoreRawQueryResult } from '~/singlestore/index.ts'; import { eq } from '~/sql/expressions/index.ts'; import { sql } from '~/sql/sql.ts'; import { db } from './db.ts'; import { users } from './tables.ts'; const deleteAll = await db.delete(users); Expect>; const deleteAllStmt = db.delete(users).prepare(); const deleteAllPrepared = await deleteAllStmt.execute(); Expect>; const deleteWhere = await db.delete(users).where(eq(users.id, 1)); Expect>; const deleteWhereStmt = db.delete(users).where(eq(users.id, 1)).prepare(); const deleteWherePrepared = await deleteWhereStmt.execute(); Expect>; const deleteReturningAll = await db.delete(users); Expect>; const deleteReturningAllStmt = db.delete(users).prepare(); const deleteReturningAllPrepared = await deleteReturningAllStmt.execute(); Expect>; const deleteReturningPartial = await db.delete(users); Expect>; const deleteReturningPartialStmt = db.delete(users).prepare(); const deleteReturningPartialPrepared = await deleteReturningPartialStmt.execute(); Expect>; { function dynamic(qb: T) { return qb.where(sql``); } const qbBase = db.delete(users).$dynamic(); const qb = dynamic(qbBase); const result = await qb; Expect>; } { db .delete(users) .where(sql``) // @ts-expect-error method was already called .where(sql``); db .delete(users) .$dynamic() .where(sql``) .where(sql``); } { db.delete(users).where(sql``).limit(1).orderBy(sql``); } ================================================ FILE: drizzle-orm/type-tests/singlestore/insert.ts ================================================ import type { Equal } from 'type-tests/utils.ts'; import { Expect } from 'type-tests/utils.ts'; import { int, singlestoreTable, text } from '~/singlestore-core/index.ts'; import type { SingleStoreInsert } from '~/singlestore-core/index.ts'; import type { SingleStoreRawQueryResult } from '~/singlestore/index.ts'; import { sql } from '~/sql/sql.ts'; import { db } from './db.ts'; import { users } from './tables.ts'; const singlestoreInsertReturning = await db.insert(users).values({ // ^? homeCity: 1, class: 'A', age1: 1, enumCol: 'a', }).$returningId(); Expect>; const insert = await db.insert(users).values({ homeCity: 1, class: 'A', age1: 1, enumCol: 'a', }); Expect>; const insertStmt = db.insert(users).values({ homeCity: 1, class: 'A', age1: 1, enumCol: 'a', }).prepare(); const insertPrepared = await insertStmt.execute(); Expect>; const insertSql = await db.insert(users).values({ homeCity: sql`123`, class: 'A', age1: 1, enumCol: sql`foobar`, }); Expect>; const insertSqlStmt = db.insert(users).values({ homeCity: sql`123`, class: 'A', age1: 1, enumCol: sql`foobar`, }).prepare(); const insertSqlPrepared = await insertSqlStmt.execute(); Expect>; const insertReturning = await db.insert(users).values({ homeCity: 1, class: 'A', age1: 1, enumCol: 'a', }); Expect>; const insertReturningStmt = db.insert(users).values({ homeCity: 1, class: 'A', age1: 1, enumCol: 'a', }).prepare(); const insertReturningPrepared = await insertReturningStmt.execute(); Expect>; const insertReturningPartial = await db.insert(users).values({ homeCity: 1, class: 'A', age1: 1, enumCol: 'a', }); Expect>; const insertReturningPartialStmt = db.insert(users).values({ homeCity: 1, class: 'A', age1: 1, enumCol: 'a', }).prepare(); const insertReturningPartialPrepared = await insertReturningPartialStmt.execute(); Expect>; const insertReturningSql = await db.insert(users).values({ homeCity: 1, class: 'A', age1: sql`2 + 2`, enumCol: 'a', }); Expect>; const insertReturningSqlStmt = db.insert(users).values({ homeCity: 1, class: 'A', age1: sql`2 + 2`, enumCol: 'a', }).prepare(); const insertReturningSqlPrepared = await insertReturningSqlStmt.execute(); Expect>; { const users = singlestoreTable('users', { id: int('id').autoincrement().primaryKey(), name: text('name').notNull(), age: int('age'), occupation: text('occupation'), }); await db.insert(users).values({ name: 'John Wick', age: 58, occupation: 'housekeeper' }); } { function dynamic(qb: T) { return qb.onDuplicateKeyUpdate({ set: {} }); } const qbBase = db.insert(users).values({ age1: 0, class: 'A', enumCol: 'a', homeCity: 0 }).$dynamic(); const qb = dynamic(qbBase); const result = await qb; Expect>; } { db .insert(users) .values({ age1: 0, class: 'A', enumCol: 'a', homeCity: 0 }) .onDuplicateKeyUpdate({ set: {} }) // @ts-expect-error method was already called .onDuplicateKeyUpdate({ set: {} }); } ================================================ FILE: drizzle-orm/type-tests/singlestore/no-strict-null-checks/test.ts ================================================ import { drizzle } from '~/singlestore'; import { singlestoreTable, text } from '~/singlestore-core'; export const test = singlestoreTable( 'test', { id: text('id') .primaryKey() .generatedAlwaysAs('genstr'), name: text('name').$defaultFn(() => '' as string), title: text('title').notNull(), description: text('description'), dbdef: text('dbdef').default('dbdefval'), }, ); const db = drizzle.mock(); // Enable after `.generatedAlwaysAs()` is implemented // db.update(test) // .set({ // // @ts-expect-error // id: '1', // name: 'name', // title: 'title', // description: 'desc', // dbdef: 'upddef', // }); db.update(test) .set({ name: 'name', title: 'title', description: 'desc', dbdef: 'upddef', }); // Enable after `.generatedAlwaysAs()` is implemented // db.insert(test).values({ // // @ts-expect-error // id: '1', // name: 'name', // title: 'title', // description: 'desc', // dbdef: 'upddef', // }); db.insert(test).values({ name: 'name', title: 'title', description: 'desc', dbdef: 'upddef', }); db.insert(test).values({ title: 'title', description: 'desc', dbdef: 'upddef', }); db.insert(test).values({ title: 'title', description: 'desc', }); db.insert(test).values({ title: 'title', }); ================================================ FILE: drizzle-orm/type-tests/singlestore/no-strict-null-checks/tsconfig.json ================================================ { "extends": "../../tsconfig.json", "compilerOptions": { "noEmit": true, "strictNullChecks": false, "strictPropertyInitialization": false, "exactOptionalPropertyTypes": false }, "include": ["./test.ts"] } ================================================ FILE: drizzle-orm/type-tests/singlestore/select.ts ================================================ import { alias } from '~/singlestore-core/alias.ts'; import { and, between, eq, exists, gt, gte, ilike, inArray, isNotNull, isNull, like, lt, lte, ne, not, notBetween, notExists, notIlike, notInArray, notLike, or, } from '~/sql/expressions/index.ts'; import { param, sql } from '~/sql/sql.ts'; import type { Equal } from 'type-tests/utils.ts'; import { Expect } from 'type-tests/utils.ts'; import { int, QueryBuilder, type SingleStoreSelect, type SingleStoreSelectQueryBuilder, singlestoreTable, text, } from '~/singlestore-core/index.ts'; import { db } from './db.ts'; import { cities, classes, users } from './tables.ts'; const city = alias(cities, 'city'); const city1 = alias(cities, 'city1'); const leftJoinFull = await db.select().from(users).leftJoin(city, eq(users.id, city.id)); Expect< Equal< { users_table: typeof users.$inferSelect; city: typeof cities.$inferSelect | null; }[], typeof leftJoinFull > >; const rightJoinFull = await db.select().from(users).rightJoin(city, eq(users.id, city.id)); Expect< Equal< { users_table: typeof users.$inferSelect | null; city: typeof city.$inferSelect; }[], typeof rightJoinFull > >; const innerJoinFull = await db.select().from(users).innerJoin(city, eq(users.id, city.id)); Expect< Equal< { users_table: typeof users.$inferSelect; city: typeof city.$inferSelect; }[], typeof innerJoinFull > >; const fullJoinFull = await db.select().from(users).fullJoin(city, eq(users.id, city.id)); Expect< Equal< { users_table: typeof users.$inferSelect | null; city: typeof city.$inferSelect | null; }[], typeof fullJoinFull > >; const crossJoinFull = await db.select().from(users).crossJoin(city); Expect< Equal< { users_table: typeof users.$inferSelect; city: typeof city.$inferSelect; }[], typeof crossJoinFull > >; const leftJoinFlat = await db .select({ userId: users.id, userText: users.text, cityId: city.id, cityName: city.name, }) .from(users) .leftJoin(city, eq(users.id, city.id)); Expect< Equal<{ userId: number; userText: string | null; cityId: number | null; cityName: string | null; }[], typeof leftJoinFlat> >; const rightJoinFlat = await db .select({ userId: users.id, userText: users.text, cityId: city.id, cityName: city.name, }) .from(users) .rightJoin(city, eq(users.id, city.id)); Expect< Equal<{ userId: number | null; userText: string | null; cityId: number; cityName: string; }[], typeof rightJoinFlat> >; const innerJoinFlat = await db .select({ userId: users.id, userText: users.text, cityId: city.id, cityName: city.name, }) .from(users) .innerJoin(city, eq(users.id, city.id)); Expect< Equal<{ userId: number; userText: string | null; cityId: number; cityName: string; }[], typeof innerJoinFlat> >; const fullJoinFlat = await db .select({ userId: users.id, userText: users.text, cityId: city.id, cityName: city.name, }) .from(users) .fullJoin(city, eq(users.id, city.id)); Expect< Equal<{ userId: number | null; userText: string | null; cityId: number | null; cityName: string | null; }[], typeof fullJoinFlat> >; const crossJoinFlat = await db .select({ userId: users.id, userText: users.text, cityId: city.id, cityName: city.name, }) .from(users) .crossJoin(city); Expect< Equal<{ userId: number; userText: string | null; cityId: number; cityName: string; }[], typeof crossJoinFlat> >; const leftJoinMixed = await db .select({ id: users.id, text: users.text, textUpper: sql`upper(${users.text})`, idComplex: sql`${users.id}::text || ${city.id}::text`, city: { id: city.id, name: city.name, }, }) .from(users) .leftJoin(city, eq(users.id, city.id)); Expect< Equal< { id: number; text: string | null; textUpper: string | null; idComplex: string | null; city: { id: number; name: string; } | null; }[], typeof leftJoinMixed > >; const leftJoinMixed2 = await db .select({ id: users.id, text: users.text, foo: { bar: users.id, baz: cities.id, }, }) .from(users) .leftJoin(cities, eq(users.id, cities.id)); Expect< Equal< { id: number; text: string | null; foo: { bar: number; baz: number | null; }; }[], typeof leftJoinMixed2 > >; const join = await db .select({ users, cities, city, city1: { id: city1.id, }, }) .from(users) .leftJoin(cities, eq(users.id, cities.id)) .rightJoin(city, eq(city.id, users.id)) .rightJoin(city1, eq(city1.id, users.id)); Expect< Equal< { users: { id: number; text: string | null; homeCity: number; currentCity: number | null; serialNullable: number; serialNotNull: number; class: 'A' | 'C'; subClass: 'B' | 'D' | null; age1: number; createdAt: Date; enumCol: 'a' | 'b' | 'c'; } | null; cities: { id: number; name: string; population: number | null; } | null; city: { id: number; name: string; population: number | null; } | null; city1: { id: number; }; }[], typeof join > >; const join2 = await db .select({ userId: users.id, cityId: cities.id, }) .from(users) .fullJoin(cities, eq(users.id, cities.id)); Expect< Equal< { userId: number | null; cityId: number | null; }[], typeof join2 > >; const join3 = await db .select({ userId: users.id, cityId: cities.id, classId: classes.id, }) .from(users) .fullJoin(cities, eq(users.id, cities.id)) .rightJoin(classes, eq(users.id, classes.id)); Expect< Equal< { userId: number | null; cityId: number | null; classId: number; }[], typeof join3 > >; db .select() .from(users) .where(exists(db.select().from(cities).where(eq(users.homeCity, cities.id)))); function mapFunkyFuncResult(valueFromDriver: unknown) { return { foo: (valueFromDriver as Record)['foo'], }; } const age = 1; const allOperators = await db .select({ col2: sql`5 - ${users.id} + 1`, // unknown col3: sql`${users.id} + 1`, // number col33: sql`${users.id} + 1`.mapWith(users.id), // number col34: sql`${users.id} + 1`.mapWith(mapFunkyFuncResult), // number col4: sql`one_or_another(${users.id}, ${users.class})`, // string | number col5: sql`true`, // unknown col6: sql`true`, // boolean col7: sql`random()`, // number col8: sql`some_funky_func(${users.id})`.mapWith(mapFunkyFuncResult), // { foo: string } col9: sql`greatest(${users.createdAt}, ${param(new Date(), users.createdAt)})`, // unknown col10: sql`date_or_false(${users.createdAt}, ${param(new Date(), users.createdAt)})`, // Date | boolean col11: sql`${users.age1} + ${age}`, // unknown col12: sql`${users.age1} + ${param(age, users.age1)}`, // unknown col13: sql`lower(${users.class})`, // unknown col14: sql`length(${users.class})`, // number count: sql`count(*)::int`, // number }) .from(users) .where(and( eq(users.id, 1), ne(users.id, 1), or(eq(users.id, 1), ne(users.id, 1)), not(eq(users.id, 1)), gt(users.id, 1), gte(users.id, 1), lt(users.id, 1), lte(users.id, 1), inArray(users.id, [1, 2, 3]), inArray(users.id, db.select({ id: users.id }).from(users)), inArray(users.id, sql`select id from ${users}`), notInArray(users.id, [1, 2, 3]), notInArray(users.id, db.select({ id: users.id }).from(users)), notInArray(users.id, sql`select id from ${users}`), isNull(users.subClass), isNotNull(users.id), exists(db.select({ id: users.id }).from(users)), exists(sql`select id from ${users}`), notExists(db.select({ id: users.id }).from(users)), notExists(sql`select id from ${users}`), between(users.id, 1, 2), notBetween(users.id, 1, 2), like(users.id, '%1%'), notLike(users.id, '%1%'), ilike(users.id, '%1%'), notIlike(users.id, '%1%'), )); Expect< Equal<{ col2: unknown; col3: number; col33: number; col34: { foo: any }; col4: string | number; col5: unknown; col6: boolean; col7: number; col8: { foo: any; }; col9: unknown; col10: boolean | Date; col11: unknown; col12: unknown; col13: unknown; col14: number; count: number; }[], typeof allOperators> >; const textSelect = await db .select({ t: users.text, }) .from(users); Expect>; const homeCity = alias(cities, 'homeCity'); const c = alias(classes, 'c'); const otherClass = alias(classes, 'otherClass'); const anotherClass = alias(classes, 'anotherClass'); const friend = alias(users, 'friend'); const currentCity = alias(cities, 'currentCity'); const subscriber = alias(users, 'subscriber'); const closestCity = alias(cities, 'closestCity'); const megaJoin = await db .select({ user: { id: users.id, maxAge: sql`max(${users.age1})`, }, city: { id: cities.id, }, homeCity, c, otherClass, anotherClass, friend, currentCity, subscriber, closestCity, }) .from(users) .innerJoin(cities, sql`${users.id} = ${cities.id}`) .innerJoin(homeCity, sql`${users.homeCity} = ${homeCity.id}`) .innerJoin(c, eq(c.id, users.class)) .innerJoin(otherClass, sql`${c.id} = ${otherClass.id}`) .innerJoin(anotherClass, sql`${users.class} = ${anotherClass.id}`) .innerJoin(friend, sql`${users.id} = ${friend.id}`) .innerJoin(currentCity, sql`${homeCity.id} = ${currentCity.id}`) .innerJoin(subscriber, sql`${users.class} = ${subscriber.id}`) .innerJoin(closestCity, sql`${users.currentCity} = ${closestCity.id}`) .where(and(sql`${users.age1} > 0`, eq(cities.id, 1))) .limit(1) .offset(1); Expect< Equal< { user: { id: number; maxAge: unknown; }; city: { id: number; }; homeCity: { id: number; name: string; population: number | null; }; c: { id: number; class: 'A' | 'C' | null; subClass: 'B' | 'D'; }; otherClass: { id: number; class: 'A' | 'C' | null; subClass: 'B' | 'D'; }; anotherClass: { id: number; class: 'A' | 'C' | null; subClass: 'B' | 'D'; }; friend: { id: number; homeCity: number; currentCity: number | null; serialNullable: number; serialNotNull: number; class: 'A' | 'C'; subClass: 'B' | 'D' | null; text: string | null; age1: number; createdAt: Date; enumCol: 'a' | 'b' | 'c'; }; currentCity: { id: number; name: string; population: number | null; }; subscriber: { id: number; homeCity: number; currentCity: number | null; serialNullable: number; serialNotNull: number; class: 'A' | 'C'; subClass: 'B' | 'D' | null; text: string | null; age1: number; createdAt: Date; enumCol: 'a' | 'b' | 'c'; }; closestCity: { id: number; name: string; population: number | null; }; }[], typeof megaJoin > >; const friends = alias(users, 'friends'); const join4 = await db .select({ user: { id: users.id, }, city: { id: cities.id, }, class: classes, friend: friends, }) .from(users) .innerJoin(cities, sql`${users.id} = ${cities.id}`) .innerJoin(classes, sql`${cities.id} = ${classes.id}`) .innerJoin(friends, sql`${friends.id} = ${users.id}`) .where(sql`${users.age1} > 0`); Expect< Equal<{ user: { id: number; }; city: { id: number; }; class: { id: number; class: 'A' | 'C' | null; subClass: 'B' | 'D'; }; friend: { id: number; homeCity: number; currentCity: number | null; serialNullable: number; serialNotNull: number; class: 'A' | 'C'; subClass: 'B' | 'D' | null; text: string | null; age1: number; createdAt: Date; enumCol: 'a' | 'b' | 'c'; }; }[], typeof join4> >; { const authenticated = false as boolean; const result = await db .select({ id: users.id, ...(authenticated ? { city: users.homeCity } : {}), }) .from(users); Expect< Equal< { id: number; city?: number; }[], typeof result > >; } await db.select().from(users).for('update'); await db.select().from(users).for('share', { skipLocked: true }); await db.select().from(users).for('update', { noWait: true }); // TODO Implement views for SingleStore (https://docs.singlestore.com/cloud/reference/sql-reference/data-definition-language-ddl/create-view/) /* await db .select() .from(users) // @ts-expect-error - can't use both skipLocked and noWait .for('share', { noWait: true, skipLocked: true }); { const result = await db.select().from(newYorkers); Expect< Equal< { userId: number; cityId: number | null; }[], typeof result > >; } { const result = await db.select({ userId: newYorkers.userId }).from(newYorkers); Expect< Equal< { userId: number; }[], typeof result > >; } */ { const query = db.select().from(users).prepare().iterator(); for await (const row of query) { Expect>(); } } { db .select() .from(users) .where(eq(users.id, 1)); db .select() .from(users) .where(eq(users.id, 1)) // @ts-expect-error - can't use where twice .where(eq(users.id, 1)); db .select() .from(users) .where(eq(users.id, 1)) .limit(10) // @ts-expect-error - can't use where twice .where(eq(users.id, 1)); } { function withFriends(qb: T) { const friends = alias(users, 'friends'); const friends2 = alias(users, 'friends2'); const friends3 = alias(users, 'friends3'); const friends4 = alias(users, 'friends4'); const friends5 = alias(users, 'friends5'); return qb .leftJoin(friends, sql`true`) .leftJoin(friends2, sql`true`) .leftJoin(friends3, sql`true`) .leftJoin(friends4, sql`true`) .leftJoin(friends5, sql`true`); } const qb = db.select().from(users).$dynamic(); const result = await withFriends(qb); Expect< Equal >; } { function withFriends(qb: T) { const friends = alias(users, 'friends'); const friends2 = alias(users, 'friends2'); const friends3 = alias(users, 'friends3'); const friends4 = alias(users, 'friends4'); const friends5 = alias(users, 'friends5'); return qb .leftJoin(friends, sql`true`) .leftJoin(friends2, sql`true`) .leftJoin(friends3, sql`true`) .leftJoin(friends4, sql`true`) .leftJoin(friends5, sql`true`); } const qb = db.select().from(users).$dynamic(); const result = await withFriends(qb); Expect< Equal >; } { function dynamic(qb: T) { return qb.where(sql``).having(sql``).groupBy(sql``).orderBy(sql``).limit(1).offset(1).for('update'); } const qb = db.select().from(users).$dynamic(); const result = await dynamic(qb); Expect>; } { // TODO: add to docs function dynamic(qb: T) { return qb.where(sql``).having(sql``).groupBy(sql``).orderBy(sql``).limit(1).offset(1).for('update'); } const query = new QueryBuilder().select().from(users).$dynamic(); dynamic(query); } { // TODO: add to docs function paginated(qb: T, page: number) { return qb.limit(10).offset((page - 1) * 10); } const qb = db.select().from(users).$dynamic(); const result = await paginated(qb, 1); Expect>; } { db .select() .from(users) .where(sql``) .limit(10) // @ts-expect-error method was already called .where(sql``); db .select() .from(users) .having(sql``) .limit(10) // @ts-expect-error method was already called .having(sql``); db .select() .from(users) .groupBy(sql``) .limit(10) // @ts-expect-error method was already called .groupBy(sql``); db .select() .from(users) .orderBy(sql``) .limit(10) // @ts-expect-error method was already called .orderBy(sql``); db .select() .from(users) .limit(10) .where(sql``) // @ts-expect-error method was already called .limit(10); db .select() .from(users) .offset(10) .limit(10) // @ts-expect-error method was already called .offset(10); db .select() .from(users) .for('update') .limit(10) // @ts-expect-error method was already called .for('update'); } { const table1 = singlestoreTable('table1', { id: int().primaryKey(), name: text().notNull(), }); const table2 = singlestoreTable('table2', { id: int().primaryKey(), age: int().notNull(), table1Id: int().notNull(), }); const leftLateralRawRes = await db.select({ table1, sqId: sql`${sql.identifier('t2')}.${sql.identifier('id')}`.as('sqId'), }).from(table1).leftJoinLateral(sql`(SELECT * FROM ${table2}) as ${sql.identifier('t2')}`, sql`true`); Expect< Equal >; const leftLateralSubRes = await db.select().from(table1).leftJoinLateral( db.select().from(table2).as('sub'), sql`true`, ); Expect< Equal >; const sqLeftLateral = db.select().from(table2).as('sub'); const leftLateralSubSelectionRes = await db.select( { id: table1.id, sId: sqLeftLateral.id, }, ).from(table1).leftJoinLateral( sqLeftLateral, sql`true`, ); Expect< Equal >; await db.select().from(table1) // @ts-expect-error .leftJoinLateral(table2, sql`true`); const innerLateralRawRes = await db.select({ table1, sqId: sql`${sql.identifier('t2')}.${sql.identifier('id')}`.as('sqId'), }).from(table1).innerJoinLateral(sql`(SELECT * FROM ${table2}) as ${sql.identifier('t2')}`, sql`true`); Expect< Equal >; const innerLateralSubRes = await db.select().from(table1).innerJoinLateral( db.select().from(table2).as('sub'), sql`true`, ); Expect< Equal >; const sqInnerLateral = db.select().from(table2).as('sub'); const innerLateralSubSelectionRes = await db.select( { id: table1.id, sId: sqLeftLateral.id, }, ).from(table1).innerJoinLateral( sqInnerLateral, sql`true`, ); Expect< Equal >; await db.select().from(table1) // @ts-expect-error .innerJoinLateral(table2, sql`true`); const crossLateralRawRes = await db.select({ table1, sqId: sql`${sql.identifier('t2')}.${sql.identifier('id')}`.as('sqId'), }).from(table1).crossJoinLateral(sql`(SELECT * FROM ${table2}) as ${sql.identifier('t2')}`); Expect< Equal >; const crossLateralSubRes = await db.select().from(table1).crossJoinLateral( db.select().from(table2).as('sub'), ); Expect< Equal >; const sqCrossLateral = db.select().from(table2).as('sub'); const crossLateralSubSelectionRes = await db.select( { id: table1.id, sId: sqCrossLateral.id, }, ).from(table1).crossJoinLateral( sqInnerLateral, ); Expect< Equal >; await db.select().from(table1) // @ts-expect-error .crossJoinLateral(table2); } ================================================ FILE: drizzle-orm/type-tests/singlestore/set-operators.ts ================================================ import { type Equal, Expect } from 'type-tests/utils.ts'; import { intersect, type SingleStoreSetOperator, union, unionAll } from '~/singlestore-core/index.ts'; import { eq } from '~/sql/expressions/index.ts'; import { sql } from '~/sql/index.ts'; import { db } from './db.ts'; import { cities, classes, users } from './tables.ts'; const unionTest = await db .select({ id: users.id }) .from(users) .union( db .select({ id: users.id }) .from(users), ); Expect>; const unionAllTest = await db .select({ id: users.id, age: users.age1 }) .from(users) .unionAll( db.select({ id: users.id, age: users.age1 }) .from(users) .leftJoin(cities, eq(users.id, cities.id)), ); Expect>; const intersectTest = await db .select({ id: users.id, homeCity: users.homeCity }) .from(users) .intersect(({ intersect }) => intersect( db .select({ id: users.id, homeCity: users.homeCity }) .from(users), db .select({ id: users.id, homeCity: sql`${users.homeCity}`.mapWith(Number) }) .from(users), ) ); Expect>; const exceptTest = await db .select({ id: users.id, homeCity: users.homeCity }) .from(users) .except( db .select({ id: users.id, homeCity: sql`${users.homeCity}`.mapWith(Number) }) .from(users), ); Expect>; const minusTest = await db .select({ id: users.id, homeCity: users.homeCity }) .from(users) .minus( db .select({ id: users.id, homeCity: sql`${users.homeCity}`.mapWith(Number) }) .from(users), ); Expect>; const union2Test = await union(db.select().from(cities), db.select().from(cities), db.select().from(cities)); Expect>; const unionAll2Test = await unionAll( db.select({ id: cities.id, name: cities.name, population: cities.population, }).from(cities), db.select().from(cities), ); Expect>; const intersect2Test = await intersect( db.select({ id: cities.id, name: cities.name, population: cities.population, }).from(cities), db.select({ id: cities.id, name: cities.name, population: cities.population, }).from(cities), db.select({ id: cities.id, name: cities.name, population: cities.population, }).from(cities), ); Expect>; // TODO Implement views for SingleStore (https://docs.singlestore.com/cloud/reference/sql-reference/data-definition-language-ddl/create-view/) /* const except2Test = await except( db.select({ userId: newYorkers.userId, }) .from(newYorkers), db.select({ userId: newYorkers.userId, }).from(newYorkers), ); Expect>; */ const unionfull = await union(db.select().from(users), db.select().from(users)).orderBy(sql``).limit(1).offset(2); Expect< Equal<{ id: number; text: string | null; homeCity: number; currentCity: number | null; serialNullable: number; serialNotNull: number; class: 'A' | 'C'; subClass: 'B' | 'D' | null; age1: number; createdAt: Date; enumCol: 'a' | 'b' | 'c'; }[], typeof unionfull> >; union(db.select().from(users), db.select().from(users)) .orderBy(sql``) // @ts-expect-error - method was already called .orderBy(sql``); union(db.select().from(users), db.select().from(users)) .offset(1) // @ts-expect-error - method was already called .offset(2); union(db.select().from(users), db.select().from(users)) .orderBy(sql``) // @ts-expect-error - method was already called .orderBy(sql``); { function dynamic(qb: T) { return qb.orderBy(sql``).limit(1).offset(2); } const qb = union(db.select().from(users), db.select().from(users)).$dynamic(); const result = await dynamic(qb); Expect>; } await db .select({ id: users.id, homeCity: users.homeCity }) .from(users) // All queries in combining statements should return the same number of columns // and the corresponding columns should have compatible data type // @ts-expect-error .intersect(({ intersect }) => intersect(db.select().from(users), db.select().from(users))); // All queries in combining statements should return the same number of columns // and the corresponding columns should have compatible data type // @ts-expect-error db.select().from(classes).union(db.select({ id: classes.id }).from(classes)); // All queries in combining statements should return the same number of columns // and the corresponding columns should have compatible data type // @ts-expect-error db.select({ id: classes.id }).from(classes).union(db.select().from(classes).where(sql``)); // All queries in combining statements should return the same number of columns // and the corresponding columns should have compatible data type // @ts-expect-error db.select({ id: classes.id }).from(classes).union(db.select().from(classes)); union( db.select({ id: cities.id, name: cities.name }).from(cities).where(sql``), db.select({ id: cities.id, name: cities.name }).from(cities), // All queries in combining statements should return the same number of columns // and the corresponding columns should have compatible data type // @ts-expect-error db.select().from(cities), ); union( db.select({ id: cities.id, name: cities.name }).from(cities).where(sql``), // All queries in combining statements should return the same number of columns // and the corresponding columns should have compatible data type // @ts-expect-error db.select({ id: cities.id, name: cities.name, population: cities.population }).from(cities), db.select({ id: cities.id, name: cities.name }).from(cities).where(sql``).limit(3).$dynamic(), db.select({ id: cities.id, name: cities.name }).from(cities), ); union( db.select({ id: cities.id }).from(cities), db.select({ id: cities.id }).from(cities), db.select({ id: cities.id }).from(cities), // All queries in combining statements should return the same number of columns // and the corresponding columns should have compatible data type // @ts-expect-error db.select({ id: cities.id, name: cities.name }).from(cities), db.select({ id: cities.id }).from(cities), db.select({ id: cities.id }).from(cities), ); union( db.select({ id: cities.id }).from(cities), db.select({ id: cities.id }).from(cities), // All queries in combining statements should return the same number of columns // and the corresponding columns should have compatible data type // @ts-expect-error db.select({ id: cities.id, name: cities.name }).from(cities), db.select({ id: cities.id }).from(cities), /* db.select({ id: newYorkers.userId }).from(newYorkers), */ db.select({ id: cities.id }).from(cities), ); union( db.select({ id: cities.id }).from(cities), db.select({ id: cities.id }).from(cities), db.select({ id: cities.id }).from(cities).where(sql``), db.select({ id: sql`${cities.id}` }).from(cities), db.select({ id: cities.id }).from(cities), // All queries in combining statements should return the same number of columns // and the corresponding columns should have compatible data type // @ts-expect-error db.select({ id: cities.id, name: cities.name, population: cities.population }).from(cities).where(sql``), ); ================================================ FILE: drizzle-orm/type-tests/singlestore/subquery.ts ================================================ import { Expect } from 'type-tests/utils.ts'; import { alias, int, serial, singlestoreTable, text } from '~/singlestore-core/index.ts'; import { and, eq } from '~/sql/expressions/index.ts'; import { sql } from '~/sql/sql.ts'; import type { DrizzleTypeError, Equal } from '~/utils.ts'; import { db } from './db.ts'; const names = singlestoreTable('names', { id: serial('id').primaryKey(), name: text('name'), authorId: int('author_id'), }); const n1 = db .select({ id: names.id, name: names.name, authorId: names.authorId, count1: sql`count(1)::int`.as('count1'), }) .from(names) .groupBy(names.id, names.name, names.authorId) .as('n1'); const n2 = db .select({ id: names.id, authorId: names.authorId, totalCount: sql`count(1)::int`.as('totalCount'), }) .from(names) .groupBy(names.id, names.authorId) .as('n2'); const result = await db .select({ name: n1.name, authorId: n1.authorId, count1: n1.count1, totalCount: n2.totalCount, }) .from(n1) .innerJoin(n2, and(eq(n2.id, n1.id), eq(n2.authorId, n1.authorId))); Expect< Equal< { name: string | null; authorId: number | null; count1: number; totalCount: number; }[], typeof result > >; const names2 = alias(names, 'names2'); const sq1 = db .select({ id: names.id, name: names.name, id2: names2.id, }) .from(names) .leftJoin(names2, eq(names.name, names2.name)) .as('sq1'); const res = await db.select().from(sq1); Expect< Equal< { id: number; name: string | null; id2: number | null; }[], typeof res > >; { const sq = db.select({ count: sql`count(1)::int` }).from(names).as('sq'); Expect ? true : false>; } const sqUnion = db.select().from(names).union(db.select().from(names2)).as('sqUnion'); const resUnion = await db.select().from(sqUnion); Expect< Equal<{ id: number; name: string | null; authorId: number | null; }[], typeof resUnion> >; ================================================ FILE: drizzle-orm/type-tests/singlestore/tables.ts ================================================ import * as crypto from 'node:crypto'; import { type Equal, Expect } from 'type-tests/utils.ts'; import type { BuildColumn } from '~/column-builder.ts'; import { bigint, binary, boolean, char, customType, date, datetime, decimal, double, float, index, int, json, longtext, mediumint, mediumtext, primaryKey, real, serial, type SingleStoreColumn, singlestoreEnum, singlestoreTable, smallint, text, time, timestamp, tinyint, tinytext, unique, uniqueIndex, varbinary, varchar, vector, year, } from '~/singlestore-core/index.ts'; import { singlestoreSchema } from '~/singlestore-core/schema.ts'; import { eq } from '~/sql/expressions/index.ts'; /* import { singlestoreView, type SingleStoreViewWithSelection } from '~/singlestore-core/view.ts'; */ import type { InferSelectModel } from '~/table.ts'; import type { Simplify } from '~/utils.ts'; import { db } from './db.ts'; export const users = singlestoreTable( 'users_table', { id: serial('id').primaryKey(), homeCity: int('home_city') .notNull(), currentCity: int('current_city'), serialNullable: serial('serial1'), serialNotNull: serial('serial2').notNull(), class: text('class', { enum: ['A', 'C'] }).notNull(), subClass: text('sub_class', { enum: ['B', 'D'] }), text: text('text'), age1: int('age1').notNull(), createdAt: timestamp('created_at', { mode: 'date' }).notNull().defaultNow(), enumCol: singlestoreEnum('enum_col', ['a', 'b', 'c']).notNull(), }, (users) => ({ usersAge1Idx: uniqueIndex('usersAge1Idx').on(users.class), usersAge2Idx: index('usersAge2Idx').on(users.class), uniqueClass: uniqueIndex('uniqueClass') .on(users.class, users.subClass) .lock('default') .algorithm('copy') .using(`btree`), pk: primaryKey(users.age1, users.class), }), ); export const cities = singlestoreTable('cities_table', { id: serial('id').primaryKey(), name: text('name_db').notNull(), population: int('population').default(0), }, (cities) => ({ citiesNameIdx: index('citiesNameIdx').on(cities.id), })); Expect< Equal< { id: SingleStoreColumn< { name: 'id'; tableName: 'cities_table'; dataType: 'number'; columnType: 'SingleStoreSerial'; data: number; driverParam: number; notNull: true; hasDefault: true; isPrimaryKey: true; isAutoincrement: true; hasRuntimeDefault: false; enumValues: undefined; baseColumn: never; identity: undefined; generated: undefined; }, {}, {} >; name: SingleStoreColumn< { name: 'name_db'; tableName: 'cities_table'; dataType: 'string'; columnType: 'SingleStoreText'; data: string; driverParam: string; notNull: true; hasDefault: false; isPrimaryKey: false; isAutoincrement: false; hasRuntimeDefault: false; enumValues: [string, ...string[]]; baseColumn: never; identity: undefined; generated: undefined; }, {}, {} >; population: SingleStoreColumn< { name: 'population'; tableName: 'cities_table'; dataType: 'number'; columnType: 'SingleStoreInt'; data: number; driverParam: string | number; notNull: false; hasDefault: true; isPrimaryKey: false; isAutoincrement: false; hasRuntimeDefault: false; enumValues: undefined; baseColumn: never; identity: undefined; generated: undefined; }, {}, {} >; }, typeof cities._.columns > >; Expect< Equal<{ id: number; name_db: string; population: number | null; }, InferSelectModel> >; Expect< Equal<{ id?: number; name: string; population?: number | null; }, typeof cities.$inferInsert> >; export const customSchema = singlestoreSchema('custom_schema'); export const citiesCustom = customSchema.table('cities_table', { id: serial('id').primaryKey(), name: text('name_db').notNull(), population: int('population').default(0), }, (cities) => ({ citiesNameIdx: index('citiesNameIdx').on(cities.id), })); Expect>; export const classes = singlestoreTable('classes_table', ({ serial, text }) => ({ id: serial('id').primaryKey(), class: text('class', { enum: ['A', 'C'] }), subClass: text('sub_class', { enum: ['B', 'D'] }).notNull(), })); // TODO Implement views for SingleStore (https://docs.singlestore.com/cloud/reference/sql-reference/data-definition-language-ddl/create-view/) /* export const classes2 = singlestoreTable('classes_table', { id: serial().primaryKey(), class: text({ enum: ['A', 'C'] }).$dbName('class_db'), subClass: text({ enum: ['B', 'D'] }).notNull(), }); */ /* export const newYorkers = singlestoreView('new_yorkers') .algorithm('merge') .sqlSecurity('definer') .as((qb) => { const sq = qb .$with('sq') .as( qb.select({ userId: users.id, cityId: cities.id }) .from(users) .leftJoin(cities, eq(cities.id, users.homeCity)) .where(sql`${users.age1} > 18`), ); return qb.with(sq).select().from(sq).where(sql`${users.homeCity} = 1`); }); Expect< Equal< SingleStoreViewWithSelection<'new_yorkers', false, { userId: SingleStoreColumn<{ name: 'id'; dataType: 'number'; columnType: 'SingleStoreSerial'; data: number; driverParam: number; notNull: true; hasDefault: true; tableName: 'new_yorkers'; enumValues: undefined; baseColumn: never; generated: undefined; isPrimaryKey: true; isAutoincrement: true; hasRuntimeDefault: false; }>; cityId: SingleStoreColumn<{ name: 'id'; dataType: 'number'; columnType: 'SingleStoreSerial'; data: number; driverParam: number; notNull: false; hasDefault: true; tableName: 'new_yorkers'; enumValues: undefined; baseColumn: never; generated: undefined; isPrimaryKey: true; isAutoincrement: true; hasRuntimeDefault: false; }>; }>, typeof newYorkers > >; { const newYorkers = customSchema.view('new_yorkers') .algorithm('merge') .sqlSecurity('definer') .as((qb) => { const sq = qb .$with('sq') .as( qb.select({ userId: users.id, cityId: cities.id }) .from(users) .leftJoin(cities, eq(cities.id, users.homeCity)) .where(sql`${users.age1} > 18`), ); return qb.with(sq).select().from(sq).where(sql`${users.homeCity} = 1`); }); Expect< Equal< SingleStoreViewWithSelection<'new_yorkers', false, { userId: SingleStoreColumn<{ name: 'id'; dataType: 'number'; columnType: 'SingleStoreSerial'; data: number; driverParam: number; notNull: true; hasDefault: true; tableName: 'new_yorkers'; enumValues: undefined; baseColumn: never; generated: undefined; isPrimaryKey: true; isAutoincrement: true; hasRuntimeDefault: false; }>; cityId: SingleStoreColumn<{ name: 'id'; dataType: 'number'; columnType: 'SingleStoreSerial'; data: number; driverParam: number; notNull: false; hasDefault: true; tableName: 'new_yorkers'; enumValues: undefined; baseColumn: never; generated: undefined; isPrimaryKey: true; isAutoincrement: true; hasRuntimeDefault: false; }>; }>, typeof newYorkers > >; } { const newYorkers = singlestoreView('new_yorkers', { userId: int('user_id').notNull(), cityId: int('city_id'), }) .algorithm('merge') .sqlSecurity('definer') .as( sql`select ${users.id} as user_id, ${cities.id} as city_id from ${users} left join ${cities} on ${ eq(cities.id, users.homeCity) } where ${gt(users.age1, 18)}`, ); Expect< Equal< SingleStoreViewWithSelection<'new_yorkers', false, { userId: SingleStoreColumn<{ name: 'user_id'; dataType: 'number'; columnType: 'SingleStoreInt'; data: number; driverParam: string | number; hasDefault: false; notNull: true; tableName: 'new_yorkers'; enumValues: undefined; baseColumn: never; generated: undefined; isPrimaryKey: false; isAutoincrement: false; hasRuntimeDefault: false; }>; cityId: SingleStoreColumn<{ name: 'city_id'; notNull: false; hasDefault: false; dataType: 'number'; columnType: 'SingleStoreInt'; data: number; driverParam: string | number; tableName: 'new_yorkers'; enumValues: undefined; baseColumn: never; generated: undefined; isPrimaryKey: false; isAutoincrement: false; hasRuntimeDefault: false; }>; }>, typeof newYorkers > >; } { const newYorkers = customSchema.view('new_yorkers', { userId: int('user_id').notNull(), cityId: int('city_id'), }) .algorithm('merge') .sqlSecurity('definer') .as( sql`select ${users.id} as user_id, ${cities.id} as city_id from ${users} left join ${cities} on ${ eq(cities.id, users.homeCity) } where ${gt(users.age1, 18)}`, ); Expect< Equal< SingleStoreViewWithSelection<'new_yorkers', false, { userId: SingleStoreColumn<{ name: 'user_id'; dataType: 'number'; columnType: 'SingleStoreInt'; data: number; driverParam: string | number; hasDefault: false; notNull: true; tableName: 'new_yorkers'; enumValues: undefined; baseColumn: never; generated: undefined; isPrimaryKey: false; isAutoincrement: false; hasRuntimeDefault: false; }>; cityId: SingleStoreColumn<{ name: 'city_id'; notNull: false; hasDefault: false; dataType: 'number'; columnType: 'SingleStoreInt'; data: number; driverParam: string | number; tableName: 'new_yorkers'; enumValues: undefined; baseColumn: never; generated: undefined; isPrimaryKey: false; isAutoincrement: false; hasRuntimeDefault: false; }>; }>, typeof newYorkers > >; } { const newYorkers = singlestoreView('new_yorkers', { userId: int('user_id').notNull(), cityId: int('city_id'), }).existing(); Expect< Equal< SingleStoreViewWithSelection<'new_yorkers', true, { userId: SingleStoreColumn<{ name: 'user_id'; dataType: 'number'; columnType: 'SingleStoreInt'; data: number; driverParam: string | number; hasDefault: false; notNull: true; tableName: 'new_yorkers'; enumValues: undefined; baseColumn: never; generated: undefined; isPrimaryKey: false; isAutoincrement: false; hasRuntimeDefault: false; }>; cityId: SingleStoreColumn<{ name: 'city_id'; notNull: false; hasDefault: false; dataType: 'number'; columnType: 'SingleStoreInt'; data: number; driverParam: string | number; tableName: 'new_yorkers'; enumValues: undefined; baseColumn: never; generated: undefined; isPrimaryKey: false; isAutoincrement: false; hasRuntimeDefault: false; }>; }>, typeof newYorkers > >; } { const newYorkers = customSchema.view('new_yorkers', { userId: int('user_id').notNull(), cityId: int('city_id'), }).existing(); Expect< Equal< SingleStoreViewWithSelection<'new_yorkers', true, { userId: SingleStoreColumn<{ name: 'user_id'; dataType: 'number'; columnType: 'SingleStoreInt'; data: number; driverParam: string | number; hasDefault: false; notNull: true; tableName: 'new_yorkers'; enumValues: undefined; baseColumn: never; generated: undefined; isPrimaryKey: false; isAutoincrement: false; hasRuntimeDefault: false; }>; cityId: SingleStoreColumn<{ name: 'city_id'; notNull: false; hasDefault: false; dataType: 'number'; columnType: 'SingleStoreInt'; data: number; driverParam: string | number; tableName: 'new_yorkers'; enumValues: undefined; baseColumn: never; generated: undefined; isPrimaryKey: false; isAutoincrement: false; hasRuntimeDefault: false; }>; }>, typeof newYorkers > >; } */ { const customText = customType<{ data: string }>({ dataType() { return 'text'; }, }); const t = customText('name').notNull(); Expect< Equal< { name: 'name'; tableName: 'table'; dataType: 'custom'; columnType: 'SingleStoreCustomColumn'; data: string; driverParam: unknown; notNull: true; hasDefault: false; isPrimaryKey: false; isAutoincrement: false; hasRuntimeDefault: false; enumValues: undefined; baseColumn: never; identity: undefined; generated: undefined; brand: 'Column'; dialect: 'singlestore'; }, Simplify['_']> > >; } { singlestoreTable('test', { bigint: bigint('bigint', { mode: 'bigint' }), number: bigint('number', { mode: 'number' }), date: date('date').default(new Date()), date2: date('date2', { mode: 'date' }).default(new Date()), date3: date('date3', { mode: 'string' }).default('2020-01-01'), date4: date('date4', { mode: undefined }).default(new Date()), datetime: datetime('datetime').default(new Date()), datetime2: datetime('datetime2', { mode: 'date' }).default(new Date()), datetime3: datetime('datetime3', { mode: 'string' }).default('2020-01-01'), datetime4: datetime('datetime4', { mode: undefined }).default(new Date()), timestamp: timestamp('timestamp').default(new Date()), timestamp2: timestamp('timestamp2', { mode: 'date' }).default(new Date()), timestamp3: timestamp('timestamp3', { mode: 'string' }).default('2020-01-01'), timestamp4: timestamp('timestamp4', { mode: undefined }).default(new Date()), }); } { singlestoreTable('test', { col1: decimal('col1').default('1'), }); } { const test = singlestoreTable('test', { test1: singlestoreEnum('test', ['a', 'b', 'c'] as const).notNull(), test2: singlestoreEnum('test', ['a', 'b', 'c']).notNull(), test3: varchar('test', { length: 255, enum: ['a', 'b', 'c'] as const }).notNull(), test4: varchar('test', { length: 255, enum: ['a', 'b', 'c'] }).notNull(), test5: text('test', { enum: ['a', 'b', 'c'] as const }).notNull(), test6: text('test', { enum: ['a', 'b', 'c'] }).notNull(), test7: tinytext('test', { enum: ['a', 'b', 'c'] as const }).notNull(), test8: tinytext('test', { enum: ['a', 'b', 'c'] }).notNull(), test9: mediumtext('test', { enum: ['a', 'b', 'c'] as const }).notNull(), test10: mediumtext('test', { enum: ['a', 'b', 'c'] }).notNull(), test11: longtext('test', { enum: ['a', 'b', 'c'] as const }).notNull(), test12: longtext('test', { enum: ['a', 'b', 'c'] }).notNull(), test13: char('test', { enum: ['a', 'b', 'c'] as const }).notNull(), test14: char('test', { enum: ['a', 'b', 'c'] }).notNull(), test15: text('test').notNull(), }); Expect>; Expect>; Expect>; Expect>; Expect>; Expect>; Expect>; Expect>; Expect>; Expect>; Expect>; Expect>; Expect>; Expect>; Expect>; } /* { // All types with generated columns const test = singlestoreTable('test', { test1: singlestoreEnum('test', ['a', 'b', 'c'] as const).generatedAlwaysAs(sql``), test2: singlestoreEnum('test', ['a', 'b', 'c']).generatedAlwaysAs(sql``), test3: varchar('test', { length: 255, enum: ['a', 'b', 'c'] as const }).generatedAlwaysAs(sql``), test4: varchar('test', { length: 255, enum: ['a', 'b', 'c'] }).generatedAlwaysAs(sql``), test5: text('test', { enum: ['a', 'b', 'c'] as const }).generatedAlwaysAs(sql``), test6: text('test', { enum: ['a', 'b', 'c'] }).generatedAlwaysAs(sql``), test7: tinytext('test', { enum: ['a', 'b', 'c'] as const }).generatedAlwaysAs(sql``), test8: tinytext('test', { enum: ['a', 'b', 'c'] }).generatedAlwaysAs(sql``), test9: mediumtext('test', { enum: ['a', 'b', 'c'] as const }).generatedAlwaysAs(sql``), test10: mediumtext('test', { enum: ['a', 'b', 'c'] }).generatedAlwaysAs(sql``), test11: longtext('test', { enum: ['a', 'b', 'c'] as const }).generatedAlwaysAs(sql``), test12: longtext('test', { enum: ['a', 'b', 'c'] }).generatedAlwaysAs(sql``), test13: char('test', { enum: ['a', 'b', 'c'] as const }).generatedAlwaysAs(sql``), test14: char('test', { enum: ['a', 'b', 'c'] }).generatedAlwaysAs(sql``), test15: text('test').generatedAlwaysAs(sql``), }); Expect>; Expect>; Expect>; Expect>; Expect>; Expect>; Expect>; Expect>; Expect>; Expect>; Expect>; Expect>; Expect>; Expect>; Expect>; } */ { const getUsersTable = (schemaName: TSchema) => { return singlestoreSchema(schemaName).table('users', { id: int('id').primaryKey(), name: text('name').notNull(), }); }; const users1 = getUsersTable('id1'); Expect>; const users2 = getUsersTable('id2'); Expect>; } { const internalStaff = singlestoreTable('internal_staff', { userId: int('user_id').notNull(), }); const customUser = singlestoreTable('custom_user', { id: int('id').notNull(), }); const ticket = singlestoreTable('ticket', { staffId: int('staff_id').notNull(), }); const subq = db .select() .from(internalStaff) .leftJoin( customUser, eq(internalStaff.userId, customUser.id), ).as('internal_staff'); const mainQuery = await db .select() .from(ticket) .leftJoin(subq, eq(subq.internal_staff.userId, ticket.staffId)); Expect< Equal<{ internal_staff: { internal_staff: { userId: number; }; custom_user: { id: number | null; }; } | null; ticket: { staffId: number; }; }[], typeof mainQuery> >; } // TODO Implement views for SingleStore (https://docs.singlestore.com/cloud/reference/sql-reference/data-definition-language-ddl/create-view/) /* { const newYorkers = singlestoreView('new_yorkers') .as((qb) => { const sq = qb .$with('sq') .as( qb.select({ userId: users.id, cityId: cities.id }) .from(users) .leftJoin(cities, eq(cities.id, users.homeCity)) .where(sql`${users.age1} > 18`), ); return qb.with(sq).select().from(sq).where(sql`${users.homeCity} = 1`); }); await db.select().from(newYorkers).leftJoin(newYorkers, eq(newYorkers.userId, newYorkers.userId)); } */ { const test = singlestoreTable('test', { id: text('id').$defaultFn(() => crypto.randomUUID()).primaryKey(), }); Expect< Equal<{ id?: string; }, typeof test.$inferInsert> >; } { singlestoreTable('test', { id: int('id').$default(() => 1), id2: int('id').$defaultFn(() => 1), // @ts-expect-error - should be number id3: int('id').$default(() => '1'), // @ts-expect-error - should be number id4: int('id').$defaultFn(() => '1'), }); } { const emailLog = singlestoreTable( 'email_log', { id: int('id', { unsigned: true }).autoincrement().notNull(), clientId: int('id_client', { unsigned: true }), receiverEmail: varchar('receiver_email', { length: 255 }).notNull(), messageId: varchar('message_id', { length: 255 }), contextId: int('context_id', { unsigned: true }), contextType: singlestoreEnum('context_type', ['test']).$type<['test']>(), action: varchar('action', { length: 80 }).$type<['test']>(), events: json('events').$type<{ t: 'test' }[]>(), createdAt: timestamp('created_at', { mode: 'string' }).defaultNow().notNull(), updatedAt: timestamp('updated_at', { mode: 'string' }).defaultNow().onUpdateNow(), }, (table) => { return { emailLogId: primaryKey({ columns: [table.id], name: 'email_log_id' }), emailLogMessageIdUnique: unique('email_log_message_id_unique').on(table.messageId), }; }, ); Expect< Equal<{ receiverEmail: string; id?: number | undefined; createdAt?: string | undefined; clientId?: number | null | undefined; messageId?: string | null | undefined; contextId?: number | null | undefined; contextType?: ['test'] | null | undefined; action?: ['test'] | null | undefined; events?: | { t: 'test'; }[] | null | undefined; updatedAt?: string | null | undefined; }, typeof emailLog.$inferInsert> >; } { const customRequiredConfig = customType<{ data: string; driverData: string; config: { length: number }; configRequired: true; }>({ dataType(config) { Expect>; return `varchar(${config.length})`; }, toDriver(value) { Expect>(); return value; }, fromDriver(value) { Expect>(); return value; }, }); customRequiredConfig('t', { length: 10 }); customRequiredConfig({ length: 10 }); // @ts-expect-error - config is required customRequiredConfig('t'); // @ts-expect-error - config is required customRequiredConfig(); } { const customOptionalConfig = customType<{ data: string; driverData: string; config: { length: number }; }>({ dataType(config) { Expect>; return config ? `varchar(${config.length})` : `text`; }, toDriver(value) { Expect>(); return value; }, fromDriver(value) { Expect>(); return value; }, }); customOptionalConfig('t', { length: 10 }); customOptionalConfig('t'); customOptionalConfig({ length: 10 }); customOptionalConfig(); } { singlestoreTable('all_columns', { bigint: bigint('bigint', { mode: 'number' }), bigint2: bigint('bigint', { mode: 'number', unsigned: true }), bigintdef: bigint('bigintdef', { mode: 'number' }).default(0), binary: binary('binary'), binary1: binary('binary1', { length: 1 }), binarydef: binary('binarydef').default(''), boolean: boolean('boolean'), booleandef: boolean('booleandef').default(false), char: char('char'), char2: char('char2', { length: 1 }), char3: char('char3', { enum: ['a', 'b', 'c'] }), char4: char('char4', { length: 1, enum: ['a', 'b', 'c'] }), chardef: char('chardef').default(''), date: date('date'), date2: date('date2', { mode: 'string' }), datedef: date('datedef').default(new Date()), datetime: datetime('datetime'), datetime2: datetime('datetime2', { mode: 'string' }), datetimedef: datetime('datetimedef').default(new Date()), decimal: decimal('decimal'), decimal2: decimal('decimal2', { precision: 10 }), decimal3: decimal('decimal3', { scale: 2 }), decimal4: decimal('decimal4', { precision: 10, scale: 2 }), decimaldef: decimal('decimaldef').default('0'), double: double('double'), double2: double('double2', { precision: 10 }), double3: double('double3', { scale: 2 }), double4: double('double4', { precision: 10, scale: 2 }), doubledef: double('doubledef').default(0), enum: singlestoreEnum('enum', ['a', 'b', 'c']), enumdef: singlestoreEnum('enumdef', ['a', 'b', 'c']).default('a'), float: float('float'), float2: float('float2', { precision: 10 }), float3: float('float3', { scale: 2 }), float4: float('float4', { precision: 10, scale: 2 }), floatdef: float('floatdef').default(0), int: int('int'), int2: int('int2', { unsigned: true }), intdef: int('intdef').default(0), json: json('json'), jsondef: json('jsondef').default({}), mediumint: mediumint('mediumint'), mediumint2: mediumint('mediumint2', { unsigned: true }), mediumintdef: mediumint('mediumintdef').default(0), real: real('real'), real2: real('real2', { precision: 10 }), real3: real('real3', { scale: 2 }), real4: real('real4', { precision: 10, scale: 2 }), realdef: real('realdef').default(0), serial: serial('serial'), serialdef: serial('serialdef').default(0), smallint: smallint('smallint'), smallint2: smallint('smallint2', { unsigned: true }), smallintdef: smallint('smallintdef').default(0), text: text('text'), text2: text('text2', { enum: ['a', 'b', 'c'] }), textdef: text('textdef').default(''), tinytext: tinytext('tinytext'), tinytext2: tinytext('tinytext2', { enum: ['a', 'b', 'c'] }), tinytextdef: tinytext('tinytextdef').default(''), mediumtext: mediumtext('mediumtext'), mediumtext2: mediumtext('mediumtext2', { enum: ['a', 'b', 'c'] }), mediumtextdef: mediumtext('mediumtextdef').default(''), longtext: longtext('longtext'), longtext2: longtext('longtext2', { enum: ['a', 'b', 'c'] }), longtextdef: longtext('longtextdef').default(''), time: time('time'), timedef: time('timedef').default('00:00:00'), timestamp: timestamp('timestamp'), timestamp2: timestamp('timestamp2', { mode: 'string' }), timestamp3: timestamp('timestamp3', { mode: 'string' }), timestamp4: timestamp('timestamp4'), timestampdef: timestamp('timestampdef').default(new Date()), tinyint: tinyint('tinyint'), tinyint2: tinyint('tinyint2', { unsigned: true }), tinyintdef: tinyint('tinyintdef').default(0), varbinary: varbinary('varbinary', { length: 1 }), varbinarydef: varbinary('varbinarydef', { length: 1 }).default(''), varchar: varchar('varchar', { length: 1 }), varchar2: varchar('varchar2', { length: 1, enum: ['a', 'b', 'c'] }), varchardef: varchar('varchardef', { length: 1 }).default(''), vector: vector('vector', { dimensions: 1 }), vector2: vector('vector2', { dimensions: 1, elementType: 'I8' }), year: year('year'), yeardef: year('yeardef').default(0), }); } { const keysAsColumnNames = singlestoreTable('test', { id: int(), name: text(), }); Expect>; Expect>; } { singlestoreTable('all_columns_without_name', { bigint: bigint({ mode: 'number' }), bigint2: bigint({ mode: 'number', unsigned: true }), bigintdef: bigint({ mode: 'number' }).default(0), binary: binary(), binrary1: binary({ length: 1 }), binarydef: binary().default(''), boolean: boolean(), booleandef: boolean().default(false), char: char(), char2: char({ length: 1 }), char3: char({ enum: ['a', 'b', 'c'] }), char4: char({ length: 1, enum: ['a', 'b', 'c'] }), chardef: char().default(''), date: date(), date2: date({ mode: 'string' }), datedef: date('datedef').default(new Date()), datetime: datetime(), datetime2: datetime({ mode: 'string' }), datetimedef: datetime('datetimedef').default(new Date()), decimal: decimal(), decimal2: decimal({ precision: 10 }), decimal3: decimal({ scale: 2 }), decimal4: decimal({ precision: 10, scale: 2 }), decimaldef: decimal('decimaldef').default('0'), double: double(), double2: double({ precision: 10 }), double3: double({ scale: 2 }), double4: double({ precision: 10, scale: 2 }), doubledef: double().default(0), enum: singlestoreEnum(['a', 'b', 'c']), enumdef: singlestoreEnum(['a', 'b', 'c']).default('a'), float: float(), float2: float({ precision: 10 }), float3: float({ scale: 2 }), float4: float({ precision: 10, scale: 2 }), floatdef: float().default(0), int: int(), int2: int({ unsigned: true }), intdef: int().default(0), json: json(), jsondef: json().default({}), mediumint: mediumint(), mediumint2: mediumint({ unsigned: true }), mediumintdef: mediumint().default(0), real: real(), real2: real({ precision: 10 }), real3: real({ scale: 2 }), real4: real({ precision: 10, scale: 2 }), realdef: real().default(0), serial: serial(), serialdef: serial().default(0), smallint: smallint(), smallint2: smallint({ unsigned: true }), smallintdef: smallint().default(0), text: text(), text2: text({ enum: ['a', 'b', 'c'] }), textdef: text().default(''), tinytext: tinytext(), tinytext2: tinytext({ enum: ['a', 'b', 'c'] }), tinytextdef: tinytext().default(''), mediumtext: mediumtext(), mediumtext2: mediumtext({ enum: ['a', 'b', 'c'] }), mediumtextdef: mediumtext().default(''), longtext: longtext(), longtext2: longtext({ enum: ['a', 'b', 'c'] }), longtextdef: longtext().default(''), time: time(), timedef: time().default('00:00:00'), timestamp: timestamp(), timestamp2: timestamp({ mode: 'string' }), timestamp3: timestamp({ mode: 'string' }), timestamp4: timestamp(), timestampdef: timestamp().default(new Date()), tinyint: tinyint(), tinyint2: tinyint({ unsigned: true }), tinyintdef: tinyint().default(0), varbinary: varbinary({ length: 1 }), varbinarydef: varbinary({ length: 1 }).default(''), varchar: varchar({ length: 1 }), varchar2: varchar({ length: 1, enum: ['a', 'b', 'c'] }), varchardef: varchar({ length: 1 }).default(''), vector: vector({ dimensions: 1 }), vector2: vector({ dimensions: 1, elementType: 'I8' }), year: year(), yeardef: year().default(0), }); } ================================================ FILE: drizzle-orm/type-tests/singlestore/update.ts ================================================ import { type Equal, Expect } from 'type-tests/utils.ts'; import type { SingleStoreUpdate } from '~/singlestore-core/index.ts'; import type { SingleStoreRawQueryResult } from '~/singlestore/session.ts'; import { sql } from '~/sql/sql.ts'; import { db } from './db.ts'; import { users } from './tables.ts'; { function dynamic(qb: T) { return qb.where(sql``); } const qbBase = db.update(users).set({}).$dynamic(); const qb = dynamic(qbBase); const result = await qb; Expect>; } { db .update(users) .set({}) .where(sql``) // @ts-expect-error method was already called .where(sql``); } { db.update(users).set({}).where(sql``).limit(1).orderBy(sql``); } ================================================ FILE: drizzle-orm/type-tests/singlestore/with.ts ================================================ import type { Equal } from 'type-tests/utils.ts'; import { Expect } from 'type-tests/utils.ts'; import { int, serial, singlestoreTable, text } from '~/singlestore-core/index.ts'; import { gt, inArray } from '~/sql/expressions/index.ts'; import { sql } from '~/sql/sql.ts'; import { db } from './db.ts'; const orders = singlestoreTable('orders', { id: serial('id').primaryKey(), region: text('region').notNull(), product: text('product').notNull(), amount: int('amount').notNull(), quantity: int('quantity').notNull(), /* generated: text('generatedText').generatedAlwaysAs(sql``), */ }); { const regionalSales = db .$with('regional_sales') .as( db .select({ region: orders.region, totalSales: sql`sum(${orders.amount})`.as('total_sales'), }) .from(orders) .groupBy(orders.region), ); const topRegions = db .$with('top_regions') .as( db .select({ region: orders.region, totalSales: orders.amount, }) .from(regionalSales) .where( gt( regionalSales.totalSales, db.select({ sales: sql`sum(${regionalSales.totalSales})/10` }).from(regionalSales), ), ), ); const result = await db .with(regionalSales, topRegions) .select({ region: orders.region, product: orders.product, productUnits: sql`sum(${orders.quantity})`, productSales: sql`sum(${orders.amount})`, }) .from(orders) .where(inArray(orders.region, db.select({ region: topRegions.region }).from(topRegions))); Expect< Equal<{ region: string; product: string; productUnits: number; productSales: number; }[], typeof result> >; const allOrdersWith = db.$with('all_orders_with').as(db.select().from(orders)); const allFromWith = await db.with(allOrdersWith).select().from(allOrdersWith); Expect< Equal<{ id: number; region: string; product: string; amount: number; quantity: number; /* generated: string | null; */ }[], typeof allFromWith> >; } { const providers = singlestoreTable('providers', { id: serial().primaryKey(), providerName: text().notNull(), }); const sq1 = db.$with('providers_sq', { name: providers.providerName, }).as(sql`select provider_name as name from providers`); const q1 = await db.with(sq1).select().from(sq1); Expect>; const sq2 = db.$with('providers_sq', { nested: { id: providers.id, }, }).as(() => sql`select id from providers`); const q2 = await db.with(sq2).select().from(sq2); Expect>; // @ts-expect-error db.$with('providers_sq', { name: providers.providerName }).as(db.select().from(providers)); // @ts-expect-error db.$with('providers_sq', { name: providers.providerName }).as((qb) => qb.select().from(providers)); } ================================================ FILE: drizzle-orm/type-tests/sqlite/.gitignore ================================================ ./database.db ================================================ FILE: drizzle-orm/type-tests/sqlite/count.ts ================================================ import { Expect } from 'type-tests/utils.ts'; import { and, gt, ne } from '~/sql/expressions/index.ts'; import { integer, sqliteTable, text } from '~/sqlite-core/index.ts'; import type { Equal } from '~/utils.ts'; import { db } from './db.ts'; const names = sqliteTable('names', { id: integer('id').primaryKey(), name: text('name'), authorId: integer('author_id'), }); const separate = await db.$count(names); const separateFilters = await db.$count(names, and(gt(names.id, 1), ne(names.name, 'forbidden'))); const embedded = await db .select({ id: names.id, name: names.name, authorId: names.authorId, count1: db.$count(names).as('count1'), }) .from(names); const embeddedFilters = await db .select({ id: names.id, name: names.name, authorId: names.authorId, count1: db.$count(names, and(gt(names.id, 1), ne(names.name, 'forbidden'))).as('count1'), }) .from(names); Expect>; Expect>; Expect< Equal< { id: number; name: string | null; authorId: number | null; count1: number; }[], typeof embedded > >; Expect< Equal< { id: number; name: string | null; authorId: number | null; count1: number; }[], typeof embeddedFilters > >; ================================================ FILE: drizzle-orm/type-tests/sqlite/db.ts ================================================ import Database from 'better-sqlite3'; import { Database as BunDatabase } from 'bun:sqlite'; import { drizzle as drizzleBetterSqlite3 } from '~/better-sqlite3/index.ts'; import { drizzle as drizzleBun } from '~/bun-sqlite/index.ts'; import { drizzle as drizzleD1 } from '~/d1/index.ts'; import { drizzle as durableSqlite } from '~/durable-sqlite/index.ts'; const client = new Database(':memory:'); const bunClient = new BunDatabase(':memory:'); declare const d1: D1Database; declare const durableSql: DurableObjectStorage; export const db = drizzleBetterSqlite3(client); export const bunDb = drizzleBun(bunClient); export const d1Db = drizzleD1(d1); export const durableSqliteDb = durableSqlite(durableSql); ================================================ FILE: drizzle-orm/type-tests/sqlite/delete.ts ================================================ import type { RunResult } from 'better-sqlite3'; import { eq } from '~/sql/expressions/index.ts'; import type { Equal } from 'type-tests/utils.ts'; import { Expect } from 'type-tests/utils.ts'; import { sql } from '~/sql/sql.ts'; import type { SQLiteDelete } from '~/sqlite-core/index.ts'; import type { DrizzleTypeError } from '~/utils.ts'; import { bunDb, db } from './db.ts'; import { users } from './tables.ts'; const deleteRun = db.delete(users).run(); Expect>; const deleteAll = db.delete(users).all(); Expect, typeof deleteAll>>; const deleteGet = db.delete(users).get(); Expect, typeof deleteGet>>; const deleteValues = db.delete(users).values(); Expect, typeof deleteValues>>; const deleteRunBun = bunDb.delete(users).run(); Expect>; const deleteAllBun = bunDb.delete(users).all(); Expect, typeof deleteAllBun>>; const deleteGetBun = bunDb.delete(users).get(); Expect, typeof deleteGetBun>>; const deleteValuesBun = bunDb.delete(users).values(); Expect, typeof deleteValuesBun>>; const deleteRunWhere = db.delete(users).where(eq(users.id, 1)).run(); Expect>; const deleteAllWhere = db.delete(users).where(eq(users.id, 1)).all(); Expect, typeof deleteAllWhere>>; const deleteGetWhere = db.delete(users).where(eq(users.id, 1)).get(); Expect, typeof deleteGetWhere>>; const deleteValuesWhere = db.delete(users).where(eq(users.id, 1)).values(); Expect, typeof deleteValuesWhere>>; const deleteRunBunWhere = bunDb.delete(users).where(eq(users.id, 1)).run(); Expect>; const deleteAllBunWhere = bunDb.delete(users).where(eq(users.id, 1)).all(); Expect, typeof deleteAllBunWhere>>; const deleteGetBunWhere = bunDb.delete(users).where(eq(users.id, 1)).get(); Expect, typeof deleteGetBunWhere>>; const deleteValuesBunWhere = bunDb.delete(users).where(eq(users.id, 1)).values(); Expect, typeof deleteValuesBunWhere>>; const deleteRunReturning = db.delete(users).returning().run(); Expect>; const deleteAllReturning = db.delete(users).returning().all(); Expect>; const deleteGetReturning = db.delete(users).returning().get(); Expect>; const deleteValuesReturning = db.delete(users).returning().values(); Expect>; const deleteRunBunReturning = bunDb.delete(users).returning().run(); Expect>; const deleteAllBunReturning = bunDb.delete(users).returning().all(); Expect>; const deleteGetBunReturning = bunDb.delete(users).returning().get(); Expect>; const deleteValuesBunReturning = bunDb.delete(users).returning().values(); Expect>; const deleteAllReturningPartial = db.delete(users).returning({ myId: users.id, myHomeCity: users.homeCity, }).all(); Expect>; const deleteGetReturningPartial = db.delete(users).returning({ myId: users.id, myHomeCity: users.homeCity, }).get(); Expect>; const deleteValuesReturningPartial = db.delete(users).returning({ myId: users.id, myHomeCity: users.homeCity, }).values(); Expect>; const deleteAllBunReturningPartial = bunDb.delete(users).returning({ myId: users.id, myHomeCity: users.homeCity, }).all(); Expect>; const deleteGetBunReturningPartial = bunDb.delete(users).returning({ myId: users.id, myHomeCity: users.homeCity, }).get(); Expect>; const deleteValuesBunReturningPartial = bunDb.delete(users).returning({ myId: users.id, myHomeCity: users.homeCity, }).values(); Expect>; { function dynamic(qb: T) { return qb.where(sql``).returning(); } const qbBase = db.delete(users).$dynamic(); const qb = dynamic(qbBase); const result = await qb; Expect>; } { function withReturning(qb: T) { return qb.returning(); } const qbBase = db.delete(users).$dynamic(); const qb = withReturning(qbBase); const result = await qb; Expect>; } { db .delete(users) .where(sql``) // @ts-expect-error method was already called .where(sql``); db .delete(users) .returning() // @ts-expect-error method was already called .returning(); } { db.delete(users).where(sql``).limit(1).orderBy(sql``); } ================================================ FILE: drizzle-orm/type-tests/sqlite/generated-columns.ts ================================================ import { type Equal, Expect } from 'type-tests/utils'; import { type InferInsertModel, type InferSelectModel, sql } from '~/index'; import { drizzle } from '~/libsql'; import { int, sqliteTable, text } from '~/sqlite-core'; import { db } from './db'; const users = sqliteTable( 'users', { id: int('id').primaryKey(), firstName: text('first_name', { length: 255 }), lastName: text('last_name', { length: 255 }), email: text('email').notNull(), fullName: text('full_name') .generatedAlwaysAs(sql`concat_ws(first_name, ' ', last_name)`), upperName: text('upper_name').generatedAlwaysAs( sql` case when first_name is null then null else upper(first_name) end `, ).$type(), // There is no way for drizzle to detect nullability in these cases. This is how the user can work around it }, ); { type User = typeof users.$inferSelect; type NewUser = typeof users.$inferInsert; Expect< Equal< { id: number; firstName: string | null; lastName: string | null; email: string; fullName: string | null; upperName: string | null; }, User > >(); Expect< Equal< { email: string; id?: number | undefined; firstName?: string | null | undefined; lastName?: string | null | undefined; }, NewUser > >(); } { type User = InferSelectModel; type NewUser = InferInsertModel; Expect< Equal< { id: number; firstName: string | null; lastName: string | null; email: string; fullName: string | null; upperName: string | null; }, User > >(); Expect< Equal< { email: string; id?: number | undefined; firstName?: string | null | undefined; lastName?: string | null | undefined; }, NewUser > >(); } { const dbUsers = await db.select().from(users); Expect< Equal< { id: number; firstName: string | null; lastName: string | null; email: string; fullName: string | null; upperName: string | null; }[], typeof dbUsers > >(); } { const db = drizzle({} as any, { schema: { users } }); const dbUser = await db.query.users.findFirst(); Expect< Equal< { id: number; firstName: string | null; lastName: string | null; email: string; fullName: string | null; upperName: string | null; } | undefined, typeof dbUser > >(); } { const db = drizzle({} as any, { schema: { users } }); const dbUser = await db.query.users.findMany(); Expect< Equal< { id: number; firstName: string | null; lastName: string | null; email: string; fullName: string | null; upperName: string | null; }[], typeof dbUser > >(); } { // @ts-expect-error - Can't use the fullName because it's a generated column await db.insert(users).values({ firstName: 'test', lastName: 'test', email: 'test', fullName: 'test', }); } { await db.update(users).set({ firstName: 'test', lastName: 'test', email: 'test', // @ts-expect-error - Can't use the fullName because it's a generated column fullName: 'test', }); } ================================================ FILE: drizzle-orm/type-tests/sqlite/insert.ts ================================================ import type { RunResult } from 'better-sqlite3'; import type { Equal } from 'type-tests/utils.ts'; import { Expect } from 'type-tests/utils.ts'; import { and, eq } from '~/sql/expressions/index.ts'; import { sql } from '~/sql/sql.ts'; import { integer, QueryBuilder, sqliteTable, text } from '~/sqlite-core/index.ts'; import type { SQLiteInsert } from '~/sqlite-core/query-builders/insert.ts'; import type { DrizzleTypeError } from '~/utils.ts'; import { bunDb, db } from './db.ts'; import type { NewUser } from './tables.ts'; import { users } from './tables.ts'; const newUser: NewUser = { homeCity: 1, class: 'A', age1: 1, enumCol: 'a', serialNotNull: 1, }; const insertRun = db.insert(users).values(newUser).run(); Expect>; const insertRunBun = bunDb.insert(users).values(newUser).run(); Expect>; const insertAll = db.insert(users).values(newUser).all(); Expect, typeof insertAll>>; const insertAllBun = bunDb.insert(users).values(newUser).all(); Expect, typeof insertAllBun>>; const insertGet = db.insert(users).values(newUser).get(); Expect, typeof insertGet>>; const insertGetBun = bunDb.insert(users).values(newUser).get(); Expect, typeof insertGetBun>>; const insertValues = db.insert(users).values(newUser).values(); Expect, typeof insertValues>>; const insertValuesBun = bunDb.insert(users).values(newUser).values(); Expect, typeof insertValuesBun>>; const insertRunReturningAll = db.insert(users).values(newUser).returning().run(); Expect>; const insertRunReturningAllBun = bunDb.insert(users).values(newUser).returning().run(); Expect>; const insertAllReturningAll = db.insert(users).values(newUser).returning().all(); Expect>; const insertAllReturningAllBun = bunDb.insert(users).values(newUser).returning().all(); Expect>; const insertGetReturningAll = db.insert(users).values(newUser).returning().get(); Expect>; const insertGetReturningAllBun = bunDb.insert(users).values(newUser).returning().get(); Expect>; const insertValuesReturningAll = db.insert(users).values(newUser).returning().values(); Expect>; const insertValuesReturningAllBun = bunDb.insert(users).values(newUser).returning().values(); Expect>; const insertRunReturningPartial = db.insert(users).values(newUser).returning({ id: users.id, homeCity: users.homeCity, mySubclass: users.subClass, }).run(); Expect>; const insertRunReturningPartialBun = bunDb.insert(users).values(newUser).returning({ id: users.id, homeCity: users.homeCity, mySubclass: users.subClass, }).run(); Expect>; const insertAllReturningPartial = db.insert(users).values(newUser).returning({ id: users.id, homeCity: users.homeCity, mySubclass: users.subClass, }).all(); Expect< Equal< { id: number; homeCity: number; mySubclass: 'B' | 'D' | null; }[], typeof insertAllReturningPartial > >; const insertAllReturningPartialBun = bunDb.insert(users).values(newUser).returning({ id: users.id, homeCity: users.homeCity, mySubclass: users.subClass, }).all(); Expect< Equal< { id: number; homeCity: number; mySubclass: 'B' | 'D' | null; }[], typeof insertAllReturningPartialBun > >; const insertReturningSql = db.insert(users).values(newUser).returning({ id: users.id, homeCity: users.homeCity, subclassLower: sql`lower(${users.subClass})`, classLower: sql`lower(${users.class})`, }).all(); Expect< Equal<{ id: number; homeCity: number; subclassLower: unknown; classLower: string; }[], typeof insertReturningSql> >; const insertReturningSqlBun = bunDb.insert(users).values(newUser).returning({ id: users.id, homeCity: users.homeCity, subclassLower: sql`lower(${users.subClass})`, classLower: sql`lower(${users.class})`, }).all(); Expect< Equal<{ id: number; homeCity: number; subclassLower: unknown; classLower: string; }[], typeof insertReturningSqlBun> >; db.insert(users).values(newUser).onConflictDoNothing().run(); db.insert(users).values(newUser).onConflictDoNothing({ target: users.class }).run(); db.insert(users).values(newUser).onConflictDoNothing({ target: [ sql`${users.class} collate nocase asc`, sql`${users.age1} desc`, users.subClass, ], }).run(); db.insert(users).values(newUser).onConflictDoUpdate({ target: users.age1, set: { age1: sql`${users.age1} + 1` }, }) .run(); db.insert(users).values(newUser) .onConflictDoUpdate({ target: users.age1, set: { age1: sql`${users.age1} + 1` }, where: sql`${users.age1} > 10`, }) .onConflictDoNothing() .run(); const stmt = db.select().from(users) .where(and(eq(users.id, sql.placeholder('id')))) .offset(sql.placeholder('offset')) .limit(sql.placeholder('limit')) .prepare(); stmt.run({ id: 1, limit: 10, offset: 20 }); { function dynamic(qb: T) { return qb.returning().onConflictDoNothing().onConflictDoUpdate({ set: {}, target: users.id, where: sql`` }); } const qbBase = db.insert(users).values({ age1: 0, class: 'A', enumCol: 'a', homeCity: 0, serialNotNull: 0 }) .$dynamic(); const qb = dynamic(qbBase); const result = await qb; Expect>; } { function withReturning(qb: T) { return qb.returning(); } const qbBase = db.insert(users).values({ age1: 0, class: 'A', enumCol: 'a', homeCity: 0, serialNotNull: 0 }) .$dynamic(); const qb = withReturning(qbBase); const result = await qb; Expect>; } { db .insert(users) .values({ age1: 0, class: 'A', enumCol: 'a', homeCity: 0, serialNotNull: 0 }) .returning() // @ts-expect-error method was already called .returning(); } { const users1 = sqliteTable('users1', { id: integer('id').primaryKey(), name: text('name').notNull(), admin: integer('admin', { mode: 'boolean' }).notNull().default(false), }); const users2 = sqliteTable('users2', { id: integer('id').primaryKey(), firstName: text('first_name').notNull(), lastName: text('last_name').notNull(), admin: integer('admin', { mode: 'boolean' }).notNull().default(false), phoneNumber: text('phone_number'), }); const qb = new QueryBuilder(); db.insert(users1).select(sql`select * from users1`); db.insert(users1).select(() => sql`select * from users1`); db .insert(users1) .select( qb.select({ name: users2.firstName, admin: users2.admin, }).from(users2), ); db .insert(users1) .select( qb.select({ name: users2.firstName, admin: users2.admin, }).from(users2).where(sql``), ); db .insert(users2) .select( qb.select({ firstName: users2.firstName, lastName: users2.lastName, admin: users2.admin, }).from(users2), ); db .insert(users1) .select( qb.select({ name: sql`${users2.firstName} || ' ' || ${users2.lastName}`.as('name'), admin: users2.admin, }).from(users2), ); db .insert(users1) .select( // @ts-expect-error name is undefined qb.select({ admin: users1.admin }).from(users1), ); db.insert(users1).select(db.select().from(users1)); db.insert(users1).select(() => db.select().from(users1)); db.insert(users1).select((qb) => qb.select().from(users1)); // @ts-expect-error tables have different keys db.insert(users1).select(db.select().from(users2)); // @ts-expect-error tables have different keys db.insert(users1).select(() => db.select().from(users2)); } ================================================ FILE: drizzle-orm/type-tests/sqlite/no-strict-null-checks/test.ts ================================================ import { drizzle } from '~/better-sqlite3'; import { sqliteTable, text } from '~/sqlite-core'; export const test = sqliteTable( 'test', { id: text('id') .primaryKey() .generatedAlwaysAs('genstr'), name: text('name').$defaultFn(() => '' as string), title: text('title').notNull(), description: text('description'), dbdef: text('dbdef').default('dbdefval'), }, ); const db = drizzle.mock(); db.update(test) .set({ // @ts-expect-error id: '1', name: 'name', title: 'title', description: 'desc', dbdef: 'upddef', }); db.update(test) .set({ name: 'name', title: 'title', description: 'desc', dbdef: 'upddef', }); db.insert(test).values({ // @ts-expect-error id: '1', name: 'name', title: 'title', description: 'desc', dbdef: 'upddef', }); db.insert(test).values({ name: 'name', title: 'title', description: 'desc', dbdef: 'upddef', }); db.insert(test).values({ title: 'title', description: 'desc', dbdef: 'upddef', }); db.insert(test).values({ title: 'title', description: 'desc', }); db.insert(test).values({ title: 'title', }); ================================================ FILE: drizzle-orm/type-tests/sqlite/no-strict-null-checks/tsconfig.json ================================================ { "extends": "../../tsconfig.json", "compilerOptions": { "noEmit": true, "strictNullChecks": false, "strictPropertyInitialization": false, "exactOptionalPropertyTypes": false }, "include": ["./test.ts"] } ================================================ FILE: drizzle-orm/type-tests/sqlite/other.ts ================================================ import type { RunResult } from 'better-sqlite3'; import { eq, inArray } from '~/sql/expressions/index.ts'; import { sql } from '~/sql/sql.ts'; import type { Equal } from 'type-tests/utils.ts'; import { Expect } from 'type-tests/utils.ts'; import { db } from './db.ts'; import { users } from './tables.ts'; const query = sql`select ${users.id}, ${users.class} from ${users} where ${inArray(users.id, [1, 2, 3])} and ${ eq(users.class, 'A') }`; const all = await db.all(query); Expect>; const allValuesTyped = await db.values<[number, 'A' | 'B' | 'C']>(query); Expect>; const allObjects = await db.all(query); Expect>; const allObjectsTyped = await db.all<{ id: number; class: 'A' | 'B' | 'C' }>(query); Expect>; const run = await db.run(query); Expect>; ================================================ FILE: drizzle-orm/type-tests/sqlite/select.ts ================================================ import { and, between, eq, exists, gt, gte, ilike, inArray, isNotNull, isNull, like, lt, lte, ne, not, notBetween, notExists, notIlike, notInArray, notLike, or, } from '~/sql/expressions/index.ts'; import { type InferSelectViewModel, param, sql } from '~/sql/sql.ts'; import { alias } from '~/sqlite-core/alias.ts'; import type { Equal } from 'type-tests/utils.ts'; import { Expect } from 'type-tests/utils.ts'; import { integer, text } from '~/sqlite-core/index.ts'; import type { SQLiteSelect, SQLiteSelectQueryBuilder } from '~/sqlite-core/query-builders/select.types.ts'; import { sqliteTable } from '~/sqlite-core/table.ts'; import { sqliteView } from '~/sqlite-core/view.ts'; import { db } from './db.ts'; import { cities, classes, newYorkers, users } from './tables.ts'; const city = alias(cities, 'city'); const city1 = alias(cities, 'city1'); const leftJoinFull = db.select().from(users).leftJoin(city, eq(users.id, city.id)).all(); Expect< Equal< { users_table: typeof users.$inferSelect; city: typeof cities.$inferSelect | null; }[], typeof leftJoinFull > >; const rightJoinFull = db.select().from(users).rightJoin(city, eq(users.id, city.id)).all(); Expect< Equal< { users_table: typeof users.$inferSelect | null; city: typeof city.$inferSelect; }[], typeof rightJoinFull > >; const innerJoinFull = db.select().from(users).innerJoin(city, eq(users.id, city.id)).all(); Expect< Equal< { users_table: typeof users.$inferSelect; city: typeof city.$inferSelect; }[], typeof innerJoinFull > >; const fullJoinFull = db.select().from(users).fullJoin(city, eq(users.id, city.id)).all(); Expect< Equal< { users_table: typeof users.$inferSelect | null; city: typeof city.$inferSelect | null; }[], typeof fullJoinFull > >; const crossJoinFull = db.select().from(users).crossJoin(city).all(); Expect< Equal< { users_table: typeof users.$inferSelect; city: typeof city.$inferSelect; }[], typeof crossJoinFull > >; const leftJoinFlat = db .select({ userId: users.id, userName: users.name, cityId: city.id, cityName: city.name, }) .from(users) .leftJoin(city, eq(users.id, city.id)) .all(); Expect< Equal<{ userId: number; userName: string | null; cityId: number | null; cityName: string | null; }[], typeof leftJoinFlat> >; const rightJoinFlat = db .select({ userId: users.id, userName: users.name, cityId: city.id, cityName: city.name, }) .from(users) .rightJoin(city, eq(users.id, city.id)) .all(); Expect< Equal<{ userId: number | null; userName: string | null; cityId: number; cityName: string; }[], typeof rightJoinFlat> >; const innerJoinFlat = db .select({ userId: users.id, userName: users.name, cityId: city.id, cityName: city.name, }) .from(users) .innerJoin(city, eq(users.id, city.id)) .all(); Expect< Equal<{ userId: number; userName: string | null; cityId: number; cityName: string; }[], typeof innerJoinFlat> >; const fullJoinFlat = db .select({ userId: users.id, userName: users.name, cityId: city.id, cityName: city.name, }) .from(users) .fullJoin(city, eq(users.id, city.id)) .all(); Expect< Equal<{ userId: number | null; userName: string | null; cityId: number | null; cityName: string | null; }[], typeof fullJoinFlat> >; const crossJoinFlat = db .select({ userId: users.id, userName: users.name, cityId: city.id, cityName: city.name, }) .from(users) .crossJoin(city) .all(); Expect< Equal<{ userId: number; userName: string | null; cityId: number; cityName: string; }[], typeof crossJoinFlat> >; const leftJoinMixed = db .select({ id: users.id, name: users.name, nameUpper: sql`upper(${users.name})`, idComplex: sql`${users.id}::text || ${city.id}::text`, city: { id: city.id, name: city.name, }, }) .from(users) .leftJoin(city, eq(users.id, city.id)) .all(); Expect< Equal< { id: number; name: string | null; nameUpper: string | null; idComplex: string | null; city: { id: number; name: string; } | null; }[], typeof leftJoinMixed > >; const leftJoinMixed2 = db .select({ id: users.id, name: users.name, foo: { bar: users.id, baz: cities.id, }, }) .from(users) .leftJoin(cities, eq(users.id, cities.id)) .all(); Expect< Equal< { id: number; name: string | null; foo: { bar: number; baz: number | null; }; }[], typeof leftJoinMixed2 > >; const joinAll = db .select() .from(users) .leftJoin(cities, eq(users.id, cities.id)) .rightJoin(city, eq(city.id, users.id)) .rightJoin(city1, eq(city1.id, users.id)) .all(); Expect< Equal<{ users_table: typeof users.$inferSelect | null; cities_table: typeof cities.$inferSelect | null; city: typeof city.$inferSelect | null; city1: typeof city1.$inferSelect; }[], typeof joinAll> >; const joinGet = db .select() .from(users) .leftJoin(cities, eq(users.id, cities.id)) .rightJoin(city, eq(city.id, users.id)) .rightJoin(city1, eq(city1.id, users.id)) .get(); Expect< Equal< { users_table: typeof users.$inferSelect | null; cities_table: typeof cities.$inferSelect | null; city: typeof city.$inferSelect | null; city1: typeof city1.$inferSelect; } | undefined, typeof joinGet > >; const joinValues = db .select() .from(users) .leftJoin(cities, eq(users.id, cities.id)) .rightJoin(city, eq(city.id, users.id)) .rightJoin(city1, eq(city1.id, users.id)) .values(); Expect>; const joinPartial = db .select({ user: { id: users.id, age: users.age1, name: users.name, }, city: { id: cities.id, name: cities.name, }, }) .from(users) .fullJoin(cities, eq(users.id, cities.id)) .all(); Expect< Equal< { user: { id: number; name: string | null; age: number; } | null; city: { id: number; name: string; } | null; }[], typeof joinPartial > >; const join3 = db .select({ userId: users.id, cityId: cities.id, classId: classes.id, }) .from(users) .fullJoin(cities, eq(users.id, cities.id)) .rightJoin(classes, eq(users.id, classes.id)).all(); Expect< Equal<{ userId: number | null; cityId: number | null; classId: number; }[], typeof join3> >; db .select() .from(users) .where(exists(db.select().from(cities).where(eq(users.homeCity, cities.id)))); function mapFunkyFuncResult(valueFromDriver: unknown) { return { foo: (valueFromDriver as Record)['foo'], }; } const age = 1; const allOperators = db .select({ col2: sql`5 - ${users.id} + 1`, // unknown col3: sql`${users.id} + 1`, // number col33: sql`${users.id} + 1`.mapWith(users.id), // number col34: sql`${users.id} + 1`.mapWith(mapFunkyFuncResult), // number col4: sql`one_or_another(${users.id}, ${users.class})`, // string | number col5: sql`true`, // unknown col6: sql`true`, // boolean col7: sql`random()`, // number col8: sql`some_funky_func(${users.id})`.mapWith(mapFunkyFuncResult), // { foo: string } col9: sql`greatest(${users.createdAt}, ${param(new Date(), users.createdAt)})`, // unknown col10: sql`date_or_false(${users.createdAt}, ${param(new Date(), users.createdAt)})`, // Date | boolean col11: sql`${users.age1} + ${age}`, // unknown col12: sql`${users.age1} + ${param(age, users.age1)}`, // unknown col13: sql`lower(${users.class})`, // unknown col14: sql`length(${users.class})`, // number count: sql`count(*)`, // number }) .from(users) .where(and( eq(users.id, 1), ne(users.id, 1), or(eq(users.id, 1), ne(users.id, 1)), not(eq(users.id, 1)), gt(users.id, 1), gte(users.id, 1), lt(users.id, 1), lte(users.id, 1), inArray(users.id, [1, 2, 3]), inArray(users.id, db.select({ id: users.id }).from(users)), inArray(users.id, sql`select id from ${users}`), notInArray(users.id, [1, 2, 3]), notInArray(users.id, db.select({ id: users.id }).from(users)), notInArray(users.id, sql`select id from ${users}`), isNull(users.subClass), isNotNull(users.id), exists(db.select({ id: users.id }).from(users)), exists(sql`select id from ${users}`), notExists(db.select({ id: users.id }).from(users)), notExists(sql`select id from ${users}`), between(users.id, 1, 2), notBetween(users.id, 1, 2), like(users.id, '%1%'), notLike(users.id, '%1%'), ilike(users.id, '%1%'), notIlike(users.id, '%1%'), )).all(); Expect< Equal<{ col2: unknown; col3: number; col33: number; col34: { foo: any }; col4: string | number; col5: unknown; col6: boolean; col7: number; col8: { foo: any; }; col9: unknown; col10: boolean | Date; col11: unknown; col12: unknown; col13: unknown; col14: number; count: number; }[], typeof allOperators> >; const textSelect = db .select({ t: users.name, }) .from(users) .all(); Expect>; const homeCity = alias(cities, 'homeCity'); const c = alias(classes, 'c'); const otherClass = alias(classes, 'otherClass'); const anotherClass = alias(classes, 'anotherClass'); const friend = alias(users, 'friend'); const currentCity = alias(cities, 'currentCity'); const subscriber = alias(users, 'subscriber'); const closestCity = alias(cities, 'closestCity'); const megaJoin = db .select({ user: { id: users.id, maxAge: sql`max(${users.age1})`, }, city: { id: cities.id, }, homeCity, c, otherClass, anotherClass, friend, currentCity, subscriber, closestCity, }) .from(users) .innerJoin(cities, sql`${users.id} = ${cities.id}`) .innerJoin(homeCity, sql`${users.homeCity} = ${homeCity.id}`) .innerJoin(c, eq(c.id, users.class)) .innerJoin(otherClass, sql`${c.id} = ${otherClass.id}`) .innerJoin(anotherClass, sql`${users.class} = ${anotherClass.id}`) .innerJoin(friend, sql`${users.id} = ${friend.id}`) .innerJoin(currentCity, sql`${homeCity.id} = ${currentCity.id}`) .innerJoin(subscriber, sql`${users.class} = ${subscriber.id}`) .innerJoin(closestCity, sql`${users.currentCity} = ${closestCity.id}`) .where(and(sql`${users.age1} > 0`, eq(cities.id, 1))) .limit(1) .offset(1).all(); Expect< Equal< { user: { id: number; maxAge: unknown; }; city: { id: number; }; homeCity: { id: number; name: string; population: number | null; }; currentCity: { id: number; name: string; population: number | null; }; c: { id: number; class: 'A' | 'C' | null; subClass: 'B' | 'D'; }; otherClass: { id: number; class: 'A' | 'C' | null; subClass: 'B' | 'D'; }; anotherClass: { id: number; class: 'A' | 'C' | null; subClass: 'B' | 'D'; }; friend: { id: number; homeCity: number; currentCity: number | null; serialNullable: number | null; serialNotNull: number; class: 'A' | 'C'; subClass: 'B' | 'D' | null; name: string | null; age1: number; createdAt: Date; enumCol: 'a' | 'b' | 'c'; }; subscriber: { id: number; homeCity: number; currentCity: number | null; serialNullable: number | null; serialNotNull: number; class: 'A' | 'C'; subClass: 'B' | 'D' | null; name: string | null; age1: number; createdAt: Date; enumCol: 'a' | 'b' | 'c'; }; closestCity: { id: number; name: string; population: number | null; }; }[], typeof megaJoin > >; const friends = alias(users, 'friends'); const join4 = db .select({ user: { id: users.id, }, users123: { id: users.id, }, city: { name: cities.name, population: cities.population, }, class: classes, friend: friends, }) .from(users) .innerJoin(cities, sql`${users.id} = ${cities.id}`) .innerJoin(classes, sql`${cities.id} = ${classes.id}`) .innerJoin(friends, sql`${friends.id} = ${users.id}`) .where(sql`${users.age1} > 0`).all(); Expect< Equal< { user: { id: number; }; users123: { id: number; }; city: { name: string; population: number | null; }; class: { id: number; class: 'A' | 'C' | null; subClass: 'B' | 'D'; }; friend: { id: number; name: string | null; homeCity: number; currentCity: number | null; serialNullable: number | null; serialNotNull: number; class: 'A' | 'C'; subClass: 'B' | 'D' | null; age1: number; createdAt: Date; enumCol: 'a' | 'b' | 'c'; }; }[], typeof join4 > >; { const authenticated = false as boolean; const result = db .select({ id: users.id, ...(authenticated ? { city: users.homeCity } : {}), }) .from(users) .all(); Expect< Equal< { id: number; city?: number; }[], typeof result > >; } { const result = db.select().from(newYorkers).all(); Expect< Equal< { userId: number; cityId: number | null; }[], typeof result > >; } { const result = db.select({ userId: newYorkers.userId }).from(newYorkers).all(); Expect< Equal< { userId: number; }[], typeof result > >; } { db .select() .from(users) .where(eq(users.id, 1)); db .select() .from(users) .where(eq(users.id, 1)) // @ts-expect-error - can't use where twice .where(eq(users.id, 1)); db .select() .from(users) .where(eq(users.id, 1)) .limit(10) // @ts-expect-error - can't use where twice .where(eq(users.id, 1)); } { function withFriends(qb: T) { const friends = alias(users, 'friends'); const friends2 = alias(users, 'friends2'); const friends3 = alias(users, 'friends3'); const friends4 = alias(users, 'friends4'); const friends5 = alias(users, 'friends5'); return qb .leftJoin(friends, sql`true`) .leftJoin(friends2, sql`true`) .leftJoin(friends3, sql`true`) .leftJoin(friends4, sql`true`) .leftJoin(friends5, sql`true`); } const qb = db.select().from(users).$dynamic(); const result = await withFriends(qb); Expect< Equal >; } { function withFriends(qb: T) { const friends = alias(users, 'friends'); const friends2 = alias(users, 'friends2'); const friends3 = alias(users, 'friends3'); const friends4 = alias(users, 'friends4'); const friends5 = alias(users, 'friends5'); return qb .leftJoin(friends, sql`true`) .leftJoin(friends2, sql`true`) .leftJoin(friends3, sql`true`) .leftJoin(friends4, sql`true`) .leftJoin(friends5, sql`true`); } const qb = db.select().from(users).$dynamic(); const result = await withFriends(qb); Expect< Equal >; } { function dynamic(qb: T) { return qb.where(sql``).having(sql``).groupBy(sql``).orderBy(sql``).limit(1).offset(1); } const qb = db.select().from(users).$dynamic(); const result = await dynamic(qb); Expect>; } { db .select() .from(users) .where(sql``) .limit(10) // @ts-expect-error method was already called .where(sql``); db .select() .from(users) .having(sql``) .limit(10) // @ts-expect-error method was already called .having(sql``); db .select() .from(users) .groupBy(sql``) .limit(10) // @ts-expect-error method was already called .groupBy(sql``); db .select() .from(users) .orderBy(sql``) .limit(10) // @ts-expect-error method was already called .orderBy(sql``); db .select() .from(users) .limit(10) .where(sql``) // @ts-expect-error method was already called .limit(10); db .select() .from(users) .offset(10) .limit(10) // @ts-expect-error method was already called .offset(10); } { const table1 = sqliteTable('table1', { id: integer().primaryKey(), name: text().notNull(), }); const table2 = sqliteTable('table2', { id: integer().primaryKey(), age: integer().notNull(), }); const table3 = sqliteTable('table3', { id: integer().primaryKey(), phone: text().notNull(), }); const view = sqliteView('view').as((qb) => qb.select({ table: table1, column: table2.age, nested: { column: table3.phone, }, }).from(table1).innerJoin(table2, sql``).leftJoin(table3, sql``) ); const result = await db.select().from(view); Expect< Equal >; Expect>; Expect[]>>; } ================================================ FILE: drizzle-orm/type-tests/sqlite/set-operators.ts ================================================ import { type Equal, Expect } from 'type-tests/utils.ts'; import { eq } from '~/sql/expressions/index.ts'; import { desc, sql } from '~/sql/index.ts'; import { except, intersect, type SQLiteSetOperator, union, unionAll } from '~/sqlite-core/index.ts'; import { db } from './db.ts'; import { cities, classes, newYorkers, users } from './tables.ts'; const unionTest = await db .select({ id: users.id }) .from(users) .union( db .select({ id: users.id }) .from(users), ); Expect>; const unionAllTest = await db .select({ id: users.id, text: users.name }) .from(users) .unionAll( db.select({ id: users.id, text: users.name }) .from(users) .leftJoin(cities, eq(users.id, cities.id)), ); Expect>; const intersectTest = await db .select({ id: users.id, homeCity: users.homeCity }) .from(users) .intersect(({ intersect }) => intersect( db .select({ id: users.id, homeCity: users.homeCity }) .from(users), db .select({ id: users.id, homeCity: sql`${users.homeCity}`.mapWith(Number) }) .from(users), ) ); Expect>; const intersectAllTest = await db .select({ id: users.id, homeCity: users.class }) .from(users) .intersect( db .select({ id: users.id, homeCity: users.class }) .from(users) .leftJoin(cities, eq(users.id, cities.id)), ); Expect>; const exceptTest = await db .select({ id: users.id, homeCity: users.homeCity }) .from(users) .except( db .select({ id: users.id, homeCity: sql`${users.homeCity}`.mapWith(Number) }) .from(users), ); Expect>; const exceptAllTest = await db .select({ id: users.id, homeCity: users.class }) .from(users) .except( db .select({ id: users.id, homeCity: sql<'A' | 'C'>`${users.class}` }) .from(users), ); Expect>; const union2Test = await union(db.select().from(cities), db.select().from(cities), db.select().from(cities)); Expect>; const unionAll2Test = await unionAll( db.select({ id: cities.id, name: cities.name, population: cities.population, }).from(cities), db.select().from(cities), ); Expect>; const intersect2Test = await intersect( db.select({ id: cities.id, name: cities.name, population: cities.population, }).from(cities), db.select({ id: cities.id, name: cities.name, population: cities.population, }).from(cities), db.select({ id: cities.id, name: cities.name, population: cities.population, }).from(cities), ); Expect>; const intersectAll2Test = await intersect( union( db.select({ id: cities.id, }).from(cities), db.select({ id: cities.id, }) .from(cities).where(sql``), ), db.select({ id: cities.id, }) .from(cities), ).orderBy(desc(cities.id)).limit(23); Expect>; const except2Test = await except( db.select({ userId: newYorkers.userId, }) .from(newYorkers), db.select({ userId: newYorkers.userId, }).from(newYorkers), ); Expect>; const exceptAll2Test = await except( db.select({ userId: newYorkers.userId, cityId: newYorkers.cityId, }) .from(newYorkers).where(eq(newYorkers.cityId, 2)), db.select({ userId: newYorkers.userId, cityId: newYorkers.cityId, }).from(newYorkers).leftJoin(users, sql``), ); Expect>; const unionfull = await union(db.select().from(users), db.select().from(users)).orderBy(sql``).limit(1).offset(2); Expect< Equal<{ id: number; name: string | null; homeCity: number; currentCity: number | null; serialNullable: number | null; serialNotNull: number; class: 'A' | 'C'; subClass: 'B' | 'D' | null; age1: number; createdAt: Date; enumCol: 'a' | 'b' | 'c'; }[], typeof unionfull> >; union(db.select().from(users), db.select().from(users)) .orderBy(sql``) // @ts-expect-error - method was already called .orderBy(sql``); union(db.select().from(users), db.select().from(users)) .offset(1) // @ts-expect-error - method was already called .offset(2); union(db.select().from(users), db.select().from(users)) .orderBy(sql``) // @ts-expect-error - method was already called .orderBy(sql``); { function dynamic(qb: T) { return qb.orderBy(sql``).limit(1).offset(2); } const qb = union(db.select().from(users), db.select().from(users)).$dynamic(); const result = await dynamic(qb); Expect>; } await db .select({ id: users.id, homeCity: users.homeCity }) .from(users) // All queries in combining statements should return the same number of columns // and the corresponding columns should have compatible data type // @ts-expect-error .intersect(({ intersect }) => intersect(db.select().from(users), db.select().from(users))); // All queries in combining statements should return the same number of columns // and the corresponding columns should have compatible data type // @ts-expect-error db.select().from(classes).union(db.select({ id: classes.id }).from(classes)); // All queries in combining statements should return the same number of columns // and the corresponding columns should have compatible data type // @ts-expect-error db.select({ id: classes.id }).from(classes).union(db.select().from(classes).where(sql``)); // All queries in combining statements should return the same number of columns // and the corresponding columns should have compatible data type // @ts-expect-error db.select({ id: classes.id }).from(classes).union(db.select().from(classes)); union( db.select({ id: cities.id, name: cities.name }).from(cities).where(sql``), db.select({ id: cities.id, name: cities.name }).from(cities), // All queries in combining statements should return the same number of columns // and the corresponding columns should have compatible data type // @ts-expect-error db.select().from(cities), ); union( db.select({ id: cities.id, name: cities.name }).from(cities).where(sql``), // All queries in combining statements should return the same number of columns // and the corresponding columns should have compatible data type // @ts-expect-error db.select({ id: cities.id, name: cities.name, population: cities.population }).from(cities), db.select({ id: cities.id, name: cities.name }).from(cities).where(sql``).limit(3).$dynamic(), db.select({ id: cities.id, name: cities.name }).from(cities), ); union( db.select({ id: cities.id }).from(cities), db.select({ id: cities.id }).from(cities), db.select({ id: cities.id }).from(cities), // All queries in combining statements should return the same number of columns // and the corresponding columns should have compatible data type // @ts-expect-error db.select({ id: cities.id, name: cities.name }).from(cities), db.select({ id: cities.id }).from(cities), db.select({ id: cities.id }).from(cities), ); union( db.select({ id: cities.id }).from(cities), db.select({ id: cities.id }).from(cities), // All queries in combining statements should return the same number of columns // and the corresponding columns should have compatible data type // @ts-expect-error db.select({ id: cities.id, name: cities.name }).from(cities), db.select({ id: cities.id }).from(cities), db.select({ id: newYorkers.userId }).from(newYorkers), db.select({ id: cities.id }).from(cities), ); union( db.select({ id: cities.id }).from(cities), db.select({ id: cities.id }).from(cities), db.select({ id: cities.id }).from(cities).where(sql``), db.select({ id: sql`${cities.id}` }).from(cities), db.select({ id: cities.id }).from(cities), // All queries in combining statements should return the same number of columns // and the corresponding columns should have compatible data type // @ts-expect-error db.select({ id: cities.id, name: cities.name, population: cities.population }).from(cities).where(sql``), ); ================================================ FILE: drizzle-orm/type-tests/sqlite/subquery.ts ================================================ import { Expect } from 'type-tests/utils.ts'; import { and, eq } from '~/sql/expressions/index.ts'; import { count } from '~/sql/functions/aggregate.ts'; import { sql } from '~/sql/sql.ts'; import { alias, integer, sqliteTable, text } from '~/sqlite-core/index.ts'; import type { DrizzleTypeError, Equal } from '~/utils.ts'; import { db } from './db.ts'; const users = sqliteTable('names', { id: integer('id').primaryKey(), name: text('name'), managerId: integer('author_id'), }); const posts = sqliteTable('posts', { id: integer('id').primaryKey(), authorId: integer('author_id'), title: text('title'), }); const n1 = db .select({ id: users.id, name: users.name, authorId: users.managerId, count1: sql`count(1)::int`.as('count1'), }) .from(users) .groupBy(users.id, users.name, users.managerId) .as('n1'); const n2 = db .select({ id: users.id, authorId: users.managerId, totalCount: sql`count(1)::int`.as('totalCount'), }) .from(users) .groupBy(users.id, users.managerId) .as('n2'); const result = db .select({ name: n1.name, authorId: n1.authorId, count1: n1.count1, totalCount: n2.totalCount, }) .from(n1) .innerJoin(n2, and(eq(n2.id, n1.id), eq(n2.authorId, n1.authorId))) .all(); Expect< Equal< { name: string | null; authorId: number | null; count1: number; totalCount: number; }[], typeof result > >; const names2 = alias(users, 'names2'); const sq1 = db .select({ id: users.id, name: users.name, id2: names2.id, }) .from(users) .leftJoin(names2, eq(users.name, names2.name)) .as('sq1'); const res = db.select().from(sq1).all(); Expect< Equal< { id: number; name: string | null; id2: number | null; }[], typeof res > >; { const sq = db.select({ count: sql`count(1)::int` }).from(users).as('sq'); Expect ? true : false>; } const sqUnion = db.select().from(users).union(db.select().from(names2)).as('sqUnion'); const resUnion = await db.select().from(sqUnion); Expect< Equal<{ id: number; name: string | null; managerId: number | null; }[], typeof resUnion> >; const fromSubquery = await db.select({ count: db.select({ count: count().as('c') }).from(posts).where(eq(posts.authorId, users.id)).as('count'), }).from(users); Expect>; const fromSubquery2 = await db.select({ name: db.select({ name: users.name }).from(users).where(eq(users.id, posts.authorId)).as('name'), }).from(posts); Expect>; const errorSubquery = await db.select({ name: db.select({ name: users.name, managerId: users.managerId }).from(users).where(eq(users.id, posts.authorId)).as( 'name', ), }).from(posts); Expect }[]>>; ================================================ FILE: drizzle-orm/type-tests/sqlite/tables.ts ================================================ import * as crypto from 'node:crypto'; import type { Equal } from 'type-tests/utils.ts'; import { Expect } from 'type-tests/utils.ts'; import { eq, gt } from '~/sql/expressions/index.ts'; import { sql } from '~/sql/sql.ts'; import { alias, blob, check, customType, foreignKey, index, integer, numeric, primaryKey, real, type SQLiteColumn, sqliteTable, text, uniqueIndex, } from '~/sqlite-core/index.ts'; import { sqliteView, type SQLiteViewWithSelection } from '~/sqlite-core/view.ts'; import { db } from './db.ts'; export const users = sqliteTable( 'users_table', { id: integer('id').primaryKey({ autoIncrement: true }), homeCity: integer('home_city') .notNull() .references(() => cities.id), currentCity: integer('current_city').references(() => cities.id), serialNullable: integer('serial1'), serialNotNull: integer('serial2').notNull(), class: text('class', { enum: ['A', 'C'] }).notNull(), subClass: text('sub_class', { enum: ['B', 'D'] }), name: text('name'), age1: integer('age1').notNull(), createdAt: integer('created_at', { mode: 'timestamp' }).notNull().defaultNow(), enumCol: text('enum_col', { enum: ['a', 'b', 'c'] }).notNull(), }, (users) => ({ usersAge1Idx: uniqueIndex('usersAge1Idx').on(users.class), usersAge2Idx: index('usersAge2Idx').on(users.class), uniqueClass: uniqueIndex('uniqueClass') .on(users.class, users.subClass) .where( sql`${users.class} is not null`, ), uniqueClassEvenBetterThanPrisma: uniqueIndex('uniqueClass') .on(users.class, users.subClass) .where( sql`${users.class} is not null`, ), legalAge: check('legalAge', sql`${users.age1} > 18`), usersClassFK: foreignKey(() => ({ columns: [users.subClass], foreignColumns: [classes.subClass] })), usersClassComplexFK: foreignKey(() => ({ columns: [users.class, users.subClass], foreignColumns: [classes.class, classes.subClass], })), pk: primaryKey(users.age1, users.class), }), ); export type User = typeof users.$inferSelect; Expect< Equal >; export type NewUser = typeof users.$inferInsert; Expect< Equal >; export const cities = sqliteTable('cities_table', ({ integer, text }) => ({ id: integer('id').primaryKey(), name: text('name').notNull(), population: integer('population').default(0), })); export type City = typeof cities.$inferSelect; Expect< Equal >; export type NewCity = typeof cities.$inferInsert; Expect< Equal >; export const classes = sqliteTable('classes_table', { id: integer('id').primaryKey(), class: text('class', { enum: ['A', 'C'] }), subClass: text('sub_class', { enum: ['B', 'D'] }).notNull(), }); export type Class = typeof classes.$inferSelect; Expect< Equal >; export type NewClass = typeof classes.$inferInsert; Expect< Equal >; export const newYorkers = sqliteView('new_yorkers') .as((qb) => { const sq = qb .$with('sq') .as( qb.select({ userId: users.id, cityId: cities.id }) .from(users) .leftJoin(cities, eq(cities.id, users.homeCity)) .where(sql`${users.age1} > 18`), ); return qb.with(sq).select().from(sq).where(sql`${users.homeCity} = 1`); }); Expect< Equal< SQLiteViewWithSelection<'new_yorkers', false, { userId: SQLiteColumn<{ name: 'id'; dataType: 'number'; columnType: 'SQLiteInteger'; data: number; driverParam: number; notNull: true; hasDefault: true; tableName: 'new_yorkers'; enumValues: undefined; baseColumn: never; generated: undefined; identity: undefined; isAutoincrement: false; hasRuntimeDefault: false; isPrimaryKey: true; }>; cityId: SQLiteColumn<{ name: 'id'; dataType: 'number'; columnType: 'SQLiteInteger'; data: number; driverParam: number; notNull: false; hasDefault: true; tableName: 'new_yorkers'; enumValues: undefined; baseColumn: never; generated: undefined; identity: undefined; isAutoincrement: false; hasRuntimeDefault: false; isPrimaryKey: true; }>; }>, typeof newYorkers > >; { const newYorkers = sqliteView('new_yorkers', { userId: integer('user_id').notNull(), cityId: integer('city_id'), }) .as( sql`select ${users.id} as user_id, ${cities.id} as city_id from ${users} left join ${cities} on ${ eq(cities.id, users.homeCity) } where ${gt(users.age1, 18)}`, ); Expect< Equal< SQLiteViewWithSelection<'new_yorkers', false, { userId: SQLiteColumn<{ name: 'user_id'; dataType: 'number'; columnType: 'SQLiteInteger'; data: number; driverParam: number; hasDefault: false; notNull: true; tableName: 'new_yorkers'; enumValues: undefined; baseColumn: never; generated: undefined; identity: undefined; isAutoincrement: false; hasRuntimeDefault: false; isPrimaryKey: false; }>; cityId: SQLiteColumn<{ name: 'city_id'; notNull: false; hasDefault: false; dataType: 'number'; columnType: 'SQLiteInteger'; data: number; driverParam: number; tableName: 'new_yorkers'; enumValues: undefined; baseColumn: never; generated: undefined; identity: undefined; isAutoincrement: false; hasRuntimeDefault: false; isPrimaryKey: false; }>; }>, typeof newYorkers > >; } { const newYorkers = sqliteView('new_yorkers', { userId: integer('user_id').notNull(), cityId: integer('city_id'), }).existing(); Expect< Equal< SQLiteViewWithSelection<'new_yorkers', true, { userId: SQLiteColumn<{ name: 'user_id'; dataType: 'number'; columnType: 'SQLiteInteger'; data: number; driverParam: number; hasDefault: false; notNull: true; tableName: 'new_yorkers'; enumValues: undefined; baseColumn: never; generated: undefined; identity: undefined; isAutoincrement: false; hasRuntimeDefault: false; isPrimaryKey: false; }>; cityId: SQLiteColumn<{ name: 'city_id'; notNull: false; hasDefault: false; dataType: 'number'; columnType: 'SQLiteInteger'; data: number; driverParam: number; tableName: 'new_yorkers'; enumValues: undefined; baseColumn: never; generated: undefined; identity: undefined; isAutoincrement: false; hasRuntimeDefault: false; isPrimaryKey: false; }>; }>, typeof newYorkers > >; } { sqliteTable('test', { col1: integer('col1').default(1), col2: integer('col2', { mode: 'number' }).default(1), col3: integer('col3', { mode: 'timestamp' }).default(new Date()), col4: integer('col4', { mode: 'timestamp_ms' }).default(new Date()), // @ts-expect-error - invalid mode col5: integer('col4', { mode: undefined }).default(new Date()), }); } { const internalStaff = sqliteTable('internal_staff', { userId: integer('user_id').notNull(), }); const customUser = sqliteTable('custom_user', { id: integer('id').notNull(), }); const ticket = sqliteTable('ticket', { staffId: integer('staff_id').notNull(), }); const subq = db .select() .from(internalStaff) .leftJoin( customUser, eq(internalStaff.userId, customUser.id), ).as('internal_staff'); const mainQuery = db .select() .from(ticket) .leftJoin(subq, eq(subq.internal_staff.userId, ticket.staffId)) .all(); Expect< Equal<{ internal_staff: { internal_staff: { userId: number; }; custom_user: { id: number | null; }; } | null; ticket: { staffId: number; }; }[], typeof mainQuery> >; } { const newYorkers = sqliteView('new_yorkers') .as((qb) => { const sq = qb .$with('sq') .as( qb.select({ userId: users.id, cityId: cities.id }) .from(users) .leftJoin(cities, eq(cities.id, users.homeCity)) .where(sql`${users.age1} > 18`), ); return qb.with(sq).select().from(sq).where(sql`${users.homeCity} = 1`); }); const ny1 = alias(newYorkers, 'ny1'); const result = db.select().from(newYorkers).leftJoin(ny1, eq(newYorkers.userId, ny1.userId)).all(); Expect< Equal<{ new_yorkers: { userId: number; cityId: number | null; }; ny1: { userId: number; cityId: number | null; } | null; }[], typeof result> >; } { type Id = number & { __id: true }; const _table = sqliteTable('test', { // @ts-expect-error - type should be Id, not number col1: integer('col1').$type().default(1), col2: integer('col2').$type().default(1 as Id), }); const table = sqliteTable('test', { col1: integer('col1').$type().notNull(), }); Expect< Equal<{ col1: Id; }, typeof table.$inferSelect> >; } { const test = sqliteTable('test', { id: text('id').$defaultFn(() => crypto.randomUUID()).primaryKey(), }); Expect< Equal<{ id?: string; }, typeof test.$inferInsert> >; } { sqliteTable('test', { id: integer('id').$default(() => 1), id2: integer('id').$defaultFn(() => 1), // @ts-expect-error - should be number id3: integer('id').$default(() => '1'), // @ts-expect-error - should be number id4: integer('id').$defaultFn(() => '1'), }); } { const table = sqliteTable('test', { data: text('data', { mode: 'json' }).notNull(), dataTyped: text('dataTyped', { mode: 'json' }).$type<{ a: number }>().notNull(), }); Expect< Equal<{ data: unknown; dataTyped: { a: number }; }, typeof table.$inferSelect> >; } { const test = sqliteTable('test', { test1: text('test', { length: 255, enum: ['a', 'b', 'c'] as const }).notNull(), test2: text('test', { length: 255, enum: ['a', 'b', 'c'] }).notNull(), test3: text('test', { enum: ['a', 'b', 'c'] as const }).notNull(), test4: text('test', { enum: ['a', 'b', 'c'] }).notNull(), }); Expect>; Expect>; Expect>; Expect>; } { // All types with generated columns const test = sqliteTable('test', { test1: text('test', { length: 255, enum: ['a', 'b', 'c'] as const }).generatedAlwaysAs(sql``), test2: text('test', { length: 255, enum: ['a', 'b', 'c'] }).generatedAlwaysAs(sql``), test3: text('test', { enum: ['a', 'b', 'c'] as const }).generatedAlwaysAs(sql``), test4: text('test', { enum: ['a', 'b', 'c'] }).generatedAlwaysAs(sql``), }); Expect>; Expect>; Expect>; Expect>; } { const customRequiredConfig = customType<{ data: string; driverData: string; config: { length: number }; configRequired: true; }>({ dataType(config) { Expect>; return `varchar(${config.length})`; }, toDriver(value) { Expect>(); return value; }, fromDriver(value) { Expect>(); return value; }, }); customRequiredConfig('t', { length: 10 }); customRequiredConfig({ length: 10 }); // @ts-expect-error - config is required customRequiredConfig('t'); // @ts-expect-error - config is required customRequiredConfig(); } { const customOptionalConfig = customType<{ data: string; driverData: string; config: { length: number }; }>({ dataType(config) { Expect>; return config ? `varchar(${config.length})` : `text`; }, toDriver(value) { Expect>(); return value; }, fromDriver(value) { Expect>(); return value; }, }); customOptionalConfig('t', { length: 10 }); customOptionalConfig('t'); customOptionalConfig({ length: 10 }); customOptionalConfig(); } { sqliteTable('all_columns', { blob: blob('blob'), blob2: blob('blob2', { mode: 'bigint' }), blobdef: blob('blobdef').default(0), integer: integer('integer'), integer2: integer('integer2', { mode: 'boolean' }), integerdef: integer('integerdef').default(0), numeric: numeric('numeric'), numericdef: numeric('numericdef').default(''), real: real('real'), realdef: real('realdef').default(0), text: text('text'), text2: text('text2', { enum: ['a', 'b', 'c'] }), text3: text('text3', { length: 1 }), text4: text('text4', { length: 1, enum: ['a', 'b', 'c'] }), text5: text('text5', { mode: 'json' }), textdef: text('textdef').default(''), }); } { const keysAsColumnNames = sqliteTable('test', { id: integer(), name: text(), }); Expect>; Expect>; } { sqliteTable('all_columns_without_name', { blob: blob(), blob2: blob({ mode: 'bigint' }), blobdef: blob().default(0), integer: integer(), integer2: integer({ mode: 'boolean' }), integerdef: integer().default(0), numeric: numeric(), numericdef: numeric().default(''), real: real(), realdef: real().default(0), text: text(), text2: text({ enum: ['a', 'b', 'c'] }), text3: text({ length: 1 }), text4: text({ length: 1, enum: ['a', 'b', 'c'] }), text5: text({ mode: 'json' }), textdef: text().default(''), }); } ================================================ FILE: drizzle-orm/type-tests/sqlite/update.ts ================================================ import type { RunResult } from 'better-sqlite3'; import type { Equal } from 'type-tests/utils.ts'; import { Expect } from 'type-tests/utils.ts'; import { eq } from '~/sql/expressions/index.ts'; import { sql } from '~/sql/sql.ts'; import type { SQLiteUpdate } from '~/sqlite-core/query-builders/update.ts'; import type { DrizzleTypeError } from '~/utils.ts'; import { bunDb, db } from './db.ts'; import { cities, users } from './tables.ts'; const updateRun = db.update(users) .set({ name: 'John', age1: 30, }) .where(eq(users.id, 1)) .run(); Expect>; const updateRunBun = bunDb.update(users) .set({ name: 'John', age1: 30, }) .where(eq(users.id, 1)) .run(); Expect>; const updateAll = db.update(users) .set({ name: 'John', age1: 30, }) .all(); Expect, typeof updateAll>>; const updateAllBun = bunDb.update(users) .set({ name: 'John', age1: 30, }) .all(); Expect, typeof updateAllBun>>; const updateGet = db.update(users) .set({ name: 'John', age1: 30, }).get(); Expect, typeof updateGet>>; const updateGetBun = bunDb.update(users) .set({ name: 'John', age1: 30, }).get(); Expect, typeof updateGetBun>>; const updateAllReturningAll = db.update(users) .set({ name: 'John', age1: 30, }) .where(eq(users.id, 1)) .returning() .all(); Expect>; const updateAllReturningAllBun = bunDb.update(users) .set({ name: 'John', age1: 30, }) .where(eq(users.id, 1)) .returning() .all(); Expect>; const updateGetReturningAll = db.update(users) .set({ name: 'John', age1: 30, }) .where(eq(users.id, 1)) .returning() .get(); Expect>; const updateGetReturningAllBun = bunDb.update(users) .set({ name: 'John', age1: 30, }) .where(eq(users.id, 1)) .returning() .get(); Expect>; { function dynamic(qb: T) { return qb.where(sql``).returning(); } const qbBase = db.update(users).set({}).$dynamic(); const qb = dynamic(qbBase); const result = await qb; Expect>; } { function withReturning(qb: T) { return qb.returning(); } const qbBase = db.update(users).set({}).$dynamic(); const qb = withReturning(qbBase); const result = await qb; Expect>; } { db .update(users) .set({}) .returning() // @ts-expect-error method was already called .returning(); db .update(users) .set({}) .where(sql``) // @ts-expect-error method was already called .where(sql``); } { db .update(users) .set({}) .from(sql``) .leftJoin(sql``, (table, from) => { Expect>; Expect>; return sql``; }); db .update(users) .set({}) .from(cities) .leftJoin(sql``, (table, from) => { Expect>; Expect>; return sql``; }); const citiesSq = db.$with('cities_sq').as(db.select({ id: cities.id }).from(cities)); db .with(citiesSq) .update(users) .set({}) .from(citiesSq) .leftJoin(sql``, (table, from) => { Expect>; Expect>; return sql``; }); db .with(citiesSq) .update(users) .set({ homeCity: citiesSq.id, }) .from(citiesSq); } { const result = await db.update(users).set({}).from(cities).returning(); Expect< Equal >; } { const result = await db.update(users).set({}).from(cities).returning({ id: users.id, }); Expect< Equal<{ id: number; }[], typeof result> >; db.update(users).set({}).where(sql``).limit(1).orderBy(sql``); } ================================================ FILE: drizzle-orm/type-tests/sqlite/with.ts ================================================ import type { Equal } from 'type-tests/utils.ts'; import { Expect } from 'type-tests/utils.ts'; import { gt, inArray, like } from '~/sql/expressions/index.ts'; import { sql } from '~/sql/sql.ts'; import { integer, sqliteTable, text } from '~/sqlite-core/index.ts'; import { db } from './db.ts'; const orders = sqliteTable('orders', { id: integer('id').primaryKey(), region: text('region').notNull(), product: text('product').notNull(), amount: integer('amount').notNull(), quantity: integer('quantity').notNull(), generated: text('generatedText').generatedAlwaysAs(sql``), }); { const regionalSales = db .$with('regional_sales') .as( db .select({ region: orders.region, totalSales: sql`sum(${orders.amount})`.as('total_sales'), }) .from(orders) .groupBy(orders.region), ); const topRegions = db .$with('top_regions') .as( db .select({ region: orders.region, totalSales: orders.amount, }) .from(regionalSales) .where( gt( regionalSales.totalSales, db.select({ sales: sql`sum(${regionalSales.totalSales})/10` }).from(regionalSales), ), ), ); const result = db .with(regionalSales, topRegions) .select({ region: orders.region, product: orders.product, productUnits: sql`sum(${orders.quantity})`, productSales: sql`sum(${orders.amount})`, }) .from(orders) .where(inArray(orders.region, db.select({ region: topRegions.region }).from(topRegions))) .all(); Expect< Equal<{ region: string; product: string; productUnits: number; productSales: number; }[], typeof result> >; const allOrdersWith = db.$with('all_orders_with').as(db.select().from(orders)); const allFromWith = await db.with(allOrdersWith).select().from(allOrdersWith); Expect< Equal<{ id: number; region: string; product: string; amount: number; quantity: number; generated: string | null; }[], typeof allFromWith> >; const regionalSalesWith = db.$with('regional_sales_with').as(db.select().from(regionalSales)); db.with(regionalSalesWith).select().from(regionalSalesWith).where(like(regionalSalesWith.totalSales, 'abc')); } { const providers = sqliteTable('providers', { id: integer().primaryKey(), providerName: text().notNull(), }); const sq1 = db.$with('providers_sq', { name: providers.providerName, }).as(sql`select provider_name as name from providers`); const q1 = await db.with(sq1).select().from(sq1); Expect>; const sq2 = db.$with('providers_sq', { nested: { id: providers.id, }, }).as(() => sql`select id from providers`); const q2 = await db.with(sq2).select().from(sq2); Expect>; // @ts-expect-error db.$with('providers_sq', { name: providers.providerName }).as(db.select().from(providers)); // @ts-expect-error db.$with('providers_sq', { name: providers.providerName }).as((qb) => qb.select().from(providers)); } ================================================ FILE: drizzle-orm/type-tests/tsconfig.json ================================================ { "extends": "../tsconfig.build.json", "compilerOptions": { "composite": false, "noEmit": true, "rootDir": "..", "outDir": "./.cache" }, "include": [".", "../src"], "exclude": ["**/playground"] } ================================================ FILE: drizzle-orm/type-tests/utils/neon-auth-token.ts ================================================ import type { HTTPQueryOptions } from '@neondatabase/serverless'; import { type Equal, Expect } from 'type-tests/utils.ts'; import type { NeonAuthToken } from '~/utils'; Expect['authToken'], undefined>, NeonAuthToken>>; ================================================ FILE: drizzle-orm/type-tests/utils.ts ================================================ // eslint-disable-next-line @typescript-eslint/no-unused-vars, @typescript-eslint/no-empty-function export function Expect() {} export type Equal = (() => T extends X ? 1 : 2) extends (() => T extends Y ? 1 : 2) ? true : false; ================================================ FILE: drizzle-orm/vitest.config.ts ================================================ import { viteCommonjs } from '@originjs/vite-plugin-commonjs'; import tsconfigPaths from 'vite-tsconfig-paths'; import { defineConfig } from 'vitest/config'; export default defineConfig({ test: { typecheck: { tsconfig: 'tests/tsconfig.json', }, }, plugins: [viteCommonjs(), tsconfigPaths()], }); ================================================ FILE: drizzle-seed/README.md ================================================ # Drizzle Seed > [!NOTE] > `drizzle-seed` can only be used with `drizzle-orm@0.36.4` or higher. Versions lower than this may work at runtime but could have type issues and identity column issues, as this patch was introduced in `drizzle-orm@0.36.4` ## Documentation The full API reference and package overview can be found in our [official documentation](https://orm.drizzle.team/docs/seed-overview) ## Overview `drizzle-seed` is a TypeScript library that helps you generate deterministic, yet realistic, fake data to populate your database. By leveraging a seedable pseudorandom number generator (pRNG), it ensures that the data you generate is consistent and reproducible across different runs. This is especially useful for testing, development, and debugging purposes. #### What is Deterministic Data Generation? Deterministic data generation means that the same input will always produce the same output. In the context of `drizzle-seed`, when you initialize the library with the same seed number, it will generate the same sequence of fake data every time. This allows for predictable and repeatable data sets. #### Pseudorandom Number Generator (pRNG) A pseudorandom number generator is an algorithm that produces a sequence of numbers that approximates the properties of random numbers. However, because it's based on an initial value called a seed, you can control its randomness. By using the same seed, the pRNG will produce the same sequence of numbers, making your data generation process reproducible. #### Benefits of Using a pRNG: - Consistency: Ensures that your tests run on the same data every time. - Debugging: Makes it easier to reproduce and fix bugs by providing a consistent data set. - Collaboration: Team members can share seed numbers to work with the same data sets. With drizzle-seed, you get the best of both worlds: the ability to generate realistic fake data and the control to reproduce it whenever needed. ## Getting started `npm install drizzle-seed` You have to install `drizzle-orm` in order to use `drizzle-seed`. `npm install drizzle-orm` ## Basic Usage In this example we will create 10 users with random names and ids ```ts {12} import { pgTable, integer, text } from "drizzle-orm/pg-core"; import { drizzle } from "drizzle-orm/node-postgres"; import { seed } from "drizzle-seed"; const users = pgTable("users", { id: integer().primaryKey(), name: text().notNull(), }); async function main() { const db = drizzle(process.env.DATABASE_URL!); await seed(db, { users }); } main(); ``` ## Options **`count`** By default, the `seed` function will create 10 entities. However, if you need more for your tests, you can specify this in the seed options object ```ts await seed(db, schema, { count: 1000 }); ``` **`seed`** If you need a seed to generate a different set of values for all subsequent runs, you can define a different number in the `seed` option. Any new number will generate a unique set of values ```ts await seed(db, schema, { seed: 12345 }); ``` ## Reset databases With `drizzle-seed`, you can easily reset your database and seed it with new values, for example, in your test suites ```ts // path to a file with schema you want to reset import * as schema from "./schema.ts"; import { reset } from "drizzle-seed"; async function main() { const db = drizzle(process.env.DATABASE_URL!); await reset(db, schema); } main(); ``` More examples are available in our [official documentation](https://orm.drizzle.team/docs/seed-overview) ================================================ FILE: drizzle-seed/package.json ================================================ { "name": "drizzle-seed", "version": "0.3.1", "main": "index.js", "type": "module", "scripts": { "build": "tsx scripts/build.ts", "pack": "(cd dist && npm pack --pack-destination ..) && rm -f package.tgz && mv *.tgz package.tgz", "test": "vitest --config ./vitest.config.ts", "test:types": "cd type-tests && tsc", "generate-for-tests:pg": "drizzle-kit generate --config=./src/tests/pg/drizzle.config.ts", "generate-for-tests:mysql": "drizzle-kit generate --config=./src/tests/mysql/drizzle.config.ts", "generate-for-tests:sqlite": "drizzle-kit generate --config=./src/tests/sqlite/drizzle.config.ts", "generate": "drizzle-kit generate", "start": "npx tsx ./src/dev/test.ts", "start:pg": "npx tsx ./src/tests/northwind/pgTest.ts", "start:mysql": "npx tsx ./src/tests/northwind/mysqlTest.ts", "start:sqlite": "npx tsx ./src/tests/northwind/sqliteTest.ts", "benchmark": "npx tsx ./src/tests/benchmarks/generatorsBenchmark.ts", "publish": "npm publish package.tgz" }, "author": "Drizzle Team", "license": "Apache-2.0", "bugs": { "url": "https://github.com/drizzle-team/drizzle-orm/issues" }, "keywords": [ "drizzle", "orm", "pg", "mysql", "postgresql", "postgres", "sqlite", "database", "sql", "typescript", "ts", "drizzle-orm", "drizzle-seed", "seeding", "seed" ], "repository": { "type": "git", "url": "git+https://github.com/drizzle-team/drizzle-orm.git" }, "publishConfig": { "provenance": true }, "sideEffects": false, "description": "A package to seed your database using Drizzle ORM", "exports": { ".": { "import": { "types": "./index.d.mts", "default": "./index.mjs" }, "require": { "types": "./index.d.cjs", "default": "./index.cjs" }, "types": "./index.d.ts", "default": "./index.mjs" } }, "peerDependencies": { "drizzle-orm": ">=0.36.4" }, "peerDependenciesMeta": { "drizzle-orm": { "optional": true } }, "devDependencies": { "@arethetypeswrong/cli": "^0.16.1", "@electric-sql/pglite": "^0.2.12", "@rollup/plugin-terser": "^0.4.4", "@rollup/plugin-typescript": "^11.1.6", "@types/better-sqlite3": "^7.6.11", "@types/dockerode": "^3.3.31", "@types/node": "^22.5.4", "@types/pg": "^8.11.6", "@types/uuid": "^10.0.0", "better-sqlite3": "^11.1.2", "cpy": "^11.1.0", "dockerode": "^4.0.6", "dotenv": "^16.4.5", "drizzle-kit": "workspace:./drizzle-kit/dist", "drizzle-orm": "workspace:./drizzle-orm/dist", "get-port": "^7.1.0", "mysql2": "^3.14.1", "pg": "^8.12.0", "resolve-tspaths": "^0.8.19", "rollup": "^3.29.5", "tslib": "^2.7.0", "tsx": "^4.19.0", "uuid": "^10.0.0", "vitest": "^3.1.3", "zx": "^8.1.5" }, "dependencies": { "pure-rand": "^6.1.0" } } ================================================ FILE: drizzle-seed/rollup.config.ts ================================================ import typescript from '@rollup/plugin-typescript'; import { defineConfig } from 'rollup'; export default defineConfig([ { input: 'src/index.ts', output: [ { format: 'esm', dir: 'dist', entryFileNames: '[name].mjs', chunkFileNames: '[name]-[hash].mjs', sourcemap: true, }, { format: 'cjs', dir: 'dist', entryFileNames: '[name].cjs', chunkFileNames: '[name]-[hash].cjs', sourcemap: true, }, ], external: [/^drizzle-orm\/?/, 'pure-rand'], plugins: [ typescript({ tsconfig: 'tsconfig.build.json', }), ], }, ]); ================================================ FILE: drizzle-seed/scripts/build.ts ================================================ #!/usr/bin/env -S pnpm tsx import 'zx/globals'; import cpy from 'cpy'; await fs.remove('dist'); await $`rollup --config rollup.config.ts --configPlugin typescript`; await $`resolve-tspaths`; await fs.copy('README.md', 'dist/README.md'); await cpy('dist/**/*.d.ts', 'dist', { rename: (basename) => basename.replace(/\.d\.ts$/, '.d.mts'), }); await cpy('dist/**/*.d.ts', 'dist', { rename: (basename) => basename.replace(/\.d\.ts$/, '.d.cts'), }); await fs.copy('package.json', 'dist/package.json'); ================================================ FILE: drizzle-seed/src/datasets/adjectives.ts ================================================ /** * The original source for the Adjectives data was taken from https://www.kaggle.com/datasets/jordansiem/adjectives-list */ export default [ 'abandoned', 'abdominal', 'abhorrent', 'abiding', 'abject', 'able', 'able-bodied', 'abnormal', 'abounding', 'abrasive', 'abrupt', 'absent', 'absentminded', 'absolute', 'absorbed', 'absorbing', 'abstracted', 'absurd', 'abundant', 'abusive', 'abysmal', 'academic', 'acceptable', 'accepting', 'accessible', 'accidental', 'acclaimed', 'accommodating', 'accompanying', 'accountable', 'accurate', 'accusative', 'accused', 'accusing', 'acerbic', 'achievable', 'aching', 'acid', 'acidic', 'acknowledged', 'acoustic', 'acrid', 'acrimonious', 'acrobatic', 'actionable', 'active', 'actual', 'adhoc', 'adamant', 'adaptable', 'adaptive', 'addicted', 'addictive', 'additional', 'adept', 'adequate', 'adhesive', 'adjacent', 'adjoining', 'adjustable', 'administrative', 'admirable', 'admired', 'admiring', 'adopted', 'adoptive', 'adorable', 'adored', 'adoring', 'adrenalized', 'adroit', 'adult', 'advanced', 'advantageous', 'adventurous', 'adversarial', 'advisable', 'aerial', 'affable', 'affected', 'affectionate', 'affirmative', 'affordable', 'afraid', 'afternoon', 'ageless', 'aggravated', 'aggravating', 'aggressive', 'agitated', 'agonizing', 'agrarian', 'agreeable', 'aimless', 'airline', 'airsick', 'ajar', 'alarmed', 'alarming', 'alert', 'algebraic', 'alien', 'alienated', 'alike', 'alive', 'all-around', 'alleged', 'allowable', 'all-purpose', 'all-too-common', 'alluring', 'allusive', 'alone', 'aloof', 'alterable', 'alternating', 'alternative', 'amazed', 'amazing', 'ambiguous', 'ambitious', 'ambulant', 'ambulatory', 'amiable', 'amicable', 'amphibian', 'amused', 'amusing', 'ancient', 'anecdotal', 'anemic', 'angelic', 'angered', 'angry', 'angular', 'animal', 'animated', 'annoyed', 'annoying', 'annual', 'anonymous', 'another', 'antagonistic', 'anticipated', 'anticlimactic', 'anticorrosive', 'antiquated', 'antiseptic', 'antisocial', 'antsy', 'anxious', 'any', 'apathetic', 'apologetic', 'apologizing', 'appalling', 'appealing', 'appetizing', 'applauding', 'applicable', 'applicative', 'appreciative', 'apprehensive', 'approachable', 'approaching', 'appropriate', 'approving', 'approximate', 'aquatic', 'architectural', 'ardent', 'arduous', 'arguable', 'argumentative', 'arid', 'aristocratic', 'aromatic', 'arresting', 'arrogant', 'artful', 'artificial', 'artistic', 'artless', 'ashamed', 'aspiring', 'assertive', 'assignable', 'assorted', 'assumable', 'assured', 'assuring', 'astonished', 'astonishing', 'astounded', 'astounding', 'astringent', 'astronomical', 'astute', 'asymmetrical', 'athletic', 'atomic', 'atrocious', 'attachable', 'attainable', 'attentive', 'attractive', 'attributable', 'atypical', 'audacious', 'auspicious', 'authentic', 'authoritarian', 'authoritative', 'autobiographic', 'autographed', 'automatic', 'autonomous', 'available', 'avant-garde', 'avenging', 'average', 'avian', 'avid', 'avoidable', 'awake', 'awakening', 'aware', 'away', 'awesome', 'awful', 'awkward', 'axiomatic', 'babbling', 'baby', 'background', 'backhanded', 'bacterial', 'bad', 'bad-tempered', 'baffled', 'baffling', 'bald', 'balding', 'balmy', 'bandaged', 'banging', 'bankable', 'banned', 'bantering', 'barbaric', 'barbarous', 'barbequed', 'barefooted', 'barking', 'barren', 'bashful', 'basic', 'battered', 'batty', 'bawling', 'beady', 'beaming', 'bearable', 'beautiful', 'beckoning', 'bedazzled', 'bedazzling', 'beefy', 'beeping', 'befitting', 'befuddled', 'beginning', 'belching', 'believable', 'bellicose', 'belligerent', 'bellowing', 'bendable', 'beneficial', 'benevolent', 'benign', 'bent', 'berserk', 'best', 'betrayed', 'better', 'betteroff', 'better-late-than-never', 'bewildered', 'bewildering', 'bewitched', 'bewitching', 'biased', 'biblical', 'big', 'big-city', 'bigger', 'biggest', 'big-headed', 'bighearted', 'bigoted', 'bilingual', 'billable', 'billowy', 'binary', 'binding', 'bioactive', 'biodegradable', 'biographical', 'bite-sized', 'biting', 'bitter', 'bizarre', 'black', 'black-and-blue', 'blamable', 'blameless', 'bland', 'blank', 'blaring', 'blasphemous', 'blatant', 'blazing', 'bleached', 'bleak', 'bleary', 'bleary-eyed', 'blessed', 'blind', 'blindfolded', 'blinding', 'blissful', 'blistering', 'bloated', 'blonde', 'bloodied', 'blood-red', 'bloodthirsty', 'bloody', 'blooming', 'blossoming', 'blue', 'blundering', 'blunt', 'blurred', 'blurry', 'blushing', 'boastful', 'bodacious', 'bohemian', 'boiling', 'boisterous', 'bold', 'bookish', 'booming', 'boorish', 'bordering', 'bored', 'boring', 'born', 'bossy', 'both', 'bothered', 'bouncing', 'bouncy', 'boundless', 'bountiful', 'boyish', 'braided', 'brainless', 'brainy', 'brash', 'brassy', 'brave', 'brawny', 'brazen', 'breakable', 'breathable', 'breathless', 'breathtaking', 'breezy', 'bribable', 'brick', 'brief', 'bright', 'bright-eyed', 'bright-red', 'brilliant', 'briny', 'brisk', 'bristly', 'broad', 'broken', 'broken-hearted', 'bronchial', 'bronze', 'bronzed', 'brooding', 'brown', 'bruised', 'brunette', 'brutal', 'brutish', 'bubbly', 'budget', 'built-in', 'bulky', 'bumpy', 'bungling', 'buoyant', 'bureaucratic', 'burly', 'burnable', 'burning', 'bushy', 'busiest', 'business', 'bustling', 'busy', 'buzzing', 'cackling', 'caged', 'cagey', 'calculable', 'calculated', 'calculating', 'callous', 'calm', 'calming', 'camouflaged', 'cancelled', 'cancerous', 'candid', 'cantankerous', 'capable', 'capricious', 'captivated', 'captivating', 'captive', 'carefree', 'careful', 'careless', 'caring', 'carnivorous', 'carpeted', 'carsick', 'casual', 'catastrophic', 'catatonic', 'catchable', 'caustic', 'cautious', 'cavalier', 'cavernous', 'ceaseless', 'celebrated', 'celestial', 'centered', 'central', 'cerebral', 'ceremonial', 'certain', 'certifiable', 'certified', 'challenged', 'challenging', 'chance', 'changeable', 'changing', 'chanting', 'charging', 'charismatic', 'charitable', 'charmed', 'charming', 'chattering', 'chatting', 'chatty', 'chauvinistic', 'cheap', 'cheapest', 'cheeky', 'cheerful', 'cheering', 'cheerless', 'cheery', 'chemical', 'chewable', 'chewy', 'chic', 'chicken', 'chief', 'childish', 'childlike', 'chilling', 'chilly', 'chivalrous', 'choice', 'choking', 'choppy', 'chronological', 'chubby', 'chuckling', 'chunky', 'cinematic', 'circling', 'circular', 'circumstantial', 'civil', 'civilian', 'civilized', 'clammy', 'clamoring', 'clandestine', 'clanging', 'clapping', 'clashing', 'classic', 'classical', 'classifiable', 'classified', 'classy', 'clean', 'cleanable', 'clear', 'cleared', 'clearheaded', 'clever', 'climatic', 'climbable', 'clinging', 'clingy', 'clinical', 'cliquish', 'clogged', 'cloistered', 'close', 'closeable', 'closed', 'close-minded', 'cloudless', 'cloudy', 'clownish', 'clueless', 'clumsy', 'cluttered', 'coachable', 'coarse', 'cockamamie', 'cocky', 'codified', 'coercive', 'cognitive', 'coherent', 'cohesive', 'coincidental', 'cold', 'coldhearted', 'collaborative', 'collapsed', 'collapsing', 'collectable', 'collegial', 'colloquial', 'colonial', 'colorful', 'colorless', 'colossal', 'combative', 'combined', 'comfortable', 'comforted', 'comforting', 'comical', 'commanding', 'commemorative', 'commendable', 'commercial', 'committed', 'common', 'communal', 'communicable', 'communicative', 'communist', 'compact', 'comparable', 'comparative', 'compassionate', 'compelling', 'competent', 'competitive', 'complacent', 'complaining', 'complete', 'completed', 'complex', 'compliant', 'complicated', 'complimentary', 'compound', 'comprehensive', 'compulsive', 'compulsory', 'computer', 'computerized', 'concealable', 'concealed', 'conceited', 'conceivable', 'concerned', 'concerning', 'concerted', 'concise', 'concurrent', 'condemned', 'condensed', 'condescending', 'conditional', 'confident', 'confidential', 'confirmable', 'confirmed', 'conflicted', 'conflicting', 'conformable', 'confounded', 'confused', 'confusing', 'congenial', 'congested', 'congressional', 'congruent', 'congruous', 'connectable', 'connected', 'connecting', 'connective', 'conscientious', 'conscious', 'consecutive', 'consensual', 'consenting', 'conservative', 'considerable', 'considerate', 'consistent', 'consoling', 'conspicuous', 'conspiratorial', 'constant', 'constitutional', 'constrictive', 'constructive', 'consumable', 'consummate', 'contagious', 'containable', 'contemplative', 'contemporary', 'contemptible', 'contemptuous', 'content', 'contented', 'contentious', 'contextual', 'continual', 'continuing', 'continuous', 'contoured', 'contractual', 'contradicting', 'contradictory', 'contrarian', 'contrary', 'contributive', 'contrite', 'controllable', 'controlling', 'controversial', 'convenient', 'conventional', 'conversational', 'convinced', 'convincing', 'convoluted', 'convulsive', 'cooing', 'cooked', 'cool', 'coolest', 'cooperative', 'coordinated', 'copious', 'coquettish', 'cordial', 'corner', 'cornered', 'corny', 'corporate', 'corpulent', 'correct', 'correctable', 'corrective', 'corresponding', 'corrosive', 'corrupt', 'corrupting', 'corruptive', 'cosmetic', 'cosmic', 'costly', 'cottony', 'coughing', 'courageous', 'courteous', 'covert', 'coveted', 'cowardly', 'cowering', 'coy', 'cozy', 'crabby', 'cracked', 'crackling', 'crafty', 'craggy', 'crammed', 'cramped', 'cranky', 'crashing', 'crass', 'craven', 'crawling', 'crazy', 'creaking', 'creaky', 'creamy', 'creative', 'credible', 'creeping', 'creepy', 'crestfallen', 'criminal', 'crippled', 'crippling', 'crisp', 'crispy', 'critical', 'crooked', 'cropped', 'cross', 'crossed', 'crotchety', 'crowded', 'crucial', 'crude', 'cruel', 'crumbling', 'crumbly', 'crumply', 'crunchable', 'crunching', 'crunchy', 'crushable', 'crushed', 'crusty', 'crying', 'cryptic', 'crystalline', 'crystallized', 'cuddly', 'culpable', 'cultural', 'cultured', 'cumbersome', 'cumulative', 'cunning', 'curable', 'curative', 'curious', 'curly', 'current', 'cursed', 'curt', 'curved', 'curvy', 'customary', 'cut', 'cute', 'cutting', 'cylindrical', 'cynical', 'daffy', 'daft', 'daily', 'dainty', 'damaged', 'damaging', 'damp', 'danceable', 'dandy', 'dangerous', 'dapper', 'daring', 'dark', 'darkened', 'dashing', 'daughterly', 'daunting', 'dawdling', 'day', 'dazed', 'dazzling', 'dead', 'deadly', 'deadpan', 'deaf', 'deafening', 'dear', 'debatable', 'debonair', 'decadent', 'decayed', 'decaying', 'deceitful', 'deceivable', 'deceiving', 'decent', 'decentralized', 'deceptive', 'decimated', 'decipherable', 'decisive', 'declining', 'decorative', 'decorous', 'decreasing', 'decrepit', 'dedicated', 'deep', 'deepening', 'deeply', 'defeated', 'defective', 'defendable', 'defenseless', 'defensible', 'defensive', 'defiant', 'deficient', 'definable', 'definitive', 'deformed', 'degenerative', 'degraded', 'dehydrated', 'dejected', 'delectable', 'deliberate', 'deliberative', 'delicate', 'delicious', 'delighted', 'delightful', 'delinquent', 'delirious', 'deliverable', 'deluded', 'demanding', 'demented', 'democratic', 'demonic', 'demonstrative', 'demure', 'deniable', 'dense', 'dependable', 'dependent', 'deplorable', 'deploring', 'depraved', 'depressed', 'depressing', 'depressive', 'deprived', 'deranged', 'derivative', 'derogative', 'derogatory', 'descriptive', 'deserted', 'designer', 'desirable', 'desirous', 'desolate', 'despairing', 'desperate', 'despicable', 'despised', 'despondent', 'destroyed', 'destructive', 'detachable', 'detached', 'detailed', 'detectable', 'determined', 'detestable', 'detrimental', 'devastated', 'devastating', 'devious', 'devoted', 'devout', 'dexterous', 'diabolical', 'diagonal', 'didactic', 'different', 'difficult', 'diffuse', 'digestive', 'digital', 'dignified', 'digressive', 'dilapidated', 'diligent', 'dim', 'diminishing', 'diminutive', 'dingy', 'diplomatic', 'dire', 'direct', 'direful', 'dirty', 'disabled', 'disadvantaged', 'disadvantageous', 'disaffected', 'disagreeable', 'disappearing', 'disappointed', 'disappointing', 'disapproving', 'disarming', 'disastrous', 'discarded', 'discernable', 'disciplined', 'disconnected', 'discontented', 'discordant', 'discouraged', 'discouraging', 'discourteous', 'discredited', 'discreet', 'discriminating', 'discriminatory', 'discussable', 'disdainful', 'diseased', 'disenchanted', 'disgraceful', 'disgruntled', 'disgusted', 'disgusting', 'disheartened', 'disheartening', 'dishonest', 'dishonorable', 'disillusioned', 'disinclined', 'disingenuous', 'disinterested', 'disjointed', 'dislikeable', 'disliked', 'disloyal', 'dismal', 'dismissive', 'disobedient', 'disorderly', 'disorganized', 'disparaging', 'disparate', 'dispassionate', 'dispensable', 'displaced', 'displeased', 'displeasing', 'disposable', 'disproportionate', 'disproved', 'disputable', 'disputatious', 'disputed', 'disreputable', 'disrespectful', 'disruptive', 'dissatisfied', 'dissimilar', 'dissolvable', 'dissolving', 'dissonant', 'dissuasive', 'distant', 'distasteful', 'distinct', 'distinctive', 'distinguished', 'distracted', 'distracting', 'distraught', 'distressed', 'distressing', 'distrustful', 'disturbed', 'disturbing', 'divergent', 'diverging', 'diverse', 'diversified', 'divided', 'divine', 'divisive', 'dizzy', 'dizzying', 'doable', 'documentary', 'dogged', 'doggish', 'dogmatic', 'doleful', 'dollish', 'domed', 'domestic', 'dominant', 'domineering', 'dorsal', 'doting', 'double', 'doubtful', 'doubting', 'dovish', 'dowdy', 'down', 'down-and-out', 'downhearted', 'downloadable', 'downtown', 'downward', 'dozing', 'drab', 'drained', 'dramatic', 'drastic', 'dreaded', 'dreadful', 'dreaming', 'dreamy', 'dreary', 'drenched', 'dress', 'dressy', 'dried', 'dripping', 'drivable', 'driven', 'droll', 'drooping', 'droopy', 'drowsy', 'drunk', 'dry', 'dual', 'dubious', 'due', 'dulcet', 'dull', 'duplicitous', 'durable', 'dusty', 'dutiful', 'dwarfish', 'dwindling', 'dynamic', 'dysfunctional', 'each', 'eager', 'early', 'earnest', 'ear-piercing', 'ear-splitting', 'earthshaking', 'earthy', 'east', 'eastern', 'easy', 'eatable', 'eccentric', 'echoing', 'ecological', 'economic', 'economical', 'economy', 'ecstatic', 'edgy', 'editable', 'educated', 'educational', 'eerie', 'effective', 'effervescent', 'efficacious', 'efficient', 'effortless', 'effusive', 'egalitarian', 'egocentric', 'egomaniacal', 'egotistical', 'eight', 'eighth', 'either', 'elaborate', 'elastic', 'elated', 'elderly', 'electric', 'electrical', 'electrifying', 'electronic', 'elegant', 'elementary', 'elevated', 'elfish', 'eligible', 'elite', 'eloquent', 'elusive', 'emaciated', 'embarrassed', 'embarrassing', 'embattled', 'embittered', 'emblematic', 'emboldened', 'embroiled', 'emergency', 'eminent', 'emotional', 'emotionless', 'empirical', 'empty', 'enamored', 'enchanted', 'enchanting', 'encouraged', 'encouraging', 'encrusted', 'endangered', 'endearing', 'endemic', 'endless', 'endurable', 'enduring', 'energetic', 'energizing', 'enforceable', 'engaging', 'engrossing', 'enhanced', 'enigmatic', 'enjoyable', 'enlarged', 'enlightened', 'enormous', 'enough', 'enraged', 'ensuing', 'enterprising', 'entertained', 'entertaining', 'enthralled', 'enthused', 'enthusiastic', 'enticing', 'entire', 'entranced', 'entrepreneurial', 'enumerable', 'enviable', 'envious', 'environmental', 'episodic', 'equable', 'equal', 'equidistant', 'equitable', 'equivalent', 'erasable', 'erect', 'eroding', 'errant', 'erratic', 'erroneous', 'eruptive', 'escalating', 'esoteric', 'essential', 'established', 'estimated', 'estranged', 'eternal', 'ethereal', 'ethical', 'ethnic', 'euphemistic', 'euphoric', 'evasive', 'even', 'evenhanded', 'evening', 'eventful', 'eventual', 'everlasting', 'every', 'evil', 'evocative', 'exacerbating', 'exact', 'exacting', 'exaggerated', 'exalted', 'exasperated', 'exasperating', 'excellent', 'exceptional', 'excessive', 'exchangeable', 'excitable', 'excited', 'exciting', 'exclusive', 'excruciating', 'excusable', 'executable', 'exemplary', 'exhausted', 'exhausting', 'exhaustive', 'exhilarated', 'exhilarating', 'existing', 'exotic', 'expandable', 'expanded', 'expanding', 'expansive', 'expectant', 'expected', 'expedient', 'expeditious', 'expendable', 'expensive', 'experimental', 'expert', 'expired', 'expiring', 'explainable', 'explicit', 'exploding', 'exploitative', 'exploited', 'explosive', 'exponential', 'exposed', 'express', 'expressionistic', 'expressionless', 'expressive', 'exquisite', 'extemporaneous', 'extendable', 'extended', 'extension', 'extensive', 'exterior', 'external', 'extra', 'extra-large', 'extraneous', 'extraordinary', 'extra-small', 'extravagant', 'extreme', 'exuberant', 'eye-popping', 'fabled', 'fabulous', 'facetious', 'facial', 'factitious', 'factual', 'faded', 'fading', 'failed', 'faint', 'fainthearted', 'fair', 'faithful', 'faithless', 'fallacious', 'false', 'falsified', 'faltering', 'familiar', 'famished', 'famous', 'fanatical', 'fanciful', 'fancy', 'fantastic', 'far', 'faraway', 'farcical', 'far-flung', 'farsighted', 'fascinated', 'fascinating', 'fascistic', 'fashionable', 'fast', 'fastest', 'fastidious', 'fast-moving', 'fat', 'fatal', 'fateful', 'fatherly', 'fathomable', 'fathomless', 'fatigued', 'faulty', 'favorable', 'favorite', 'fawning', 'feared', 'fearful', 'fearless', 'fearsome', 'feathered', 'feathery', 'feckless', 'federal', 'feeble', 'feebleminded', 'feeling', 'feigned', 'felonious', 'female', 'feminine', 'fermented', 'ferocious', 'fertile', 'fervent', 'fervid', 'festive', 'fetching', 'fetid', 'feudal', 'feverish', 'few,', 'fewer', 'fictional', 'fictitious', 'fidgeting', 'fidgety', 'fiendish', 'fierce', 'fiery', 'fifth', 'filmy', 'filtered', 'filthy', 'final', 'financial', 'fine', 'finicky', 'finite', 'fireproof', 'firm', 'first', 'fiscal', 'fishy', 'fit', 'fitted', 'fitting', 'five', 'fixable', 'fixed', 'flabby', 'flagrant', 'flaky', 'flamboyant', 'flaming', 'flammable', 'flashy', 'flat', 'flattened', 'flattered', 'flattering', 'flavored', 'flavorful', 'flavorless', 'flawed', 'flawless', 'fleeting', 'flexible', 'flickering', 'flimsy', 'flippant', 'flirtatious', 'floating', 'flooded', 'floppy', 'floral', 'flowering', 'flowery', 'fluent', 'fluffy', 'flushed', 'fluttering', 'flying', 'foamy', 'focused', 'foggy', 'folded', 'following', 'fond', 'foolhardy', 'foolish', 'forbidding', 'forceful', 'foreboding', 'foregoing', 'foreign', 'forensic', 'foreseeable', 'forged', 'forgetful', 'forgettable', 'forgivable', 'forgiving', 'forgotten', 'forked', 'formal', 'formative', 'former', 'formidable', 'formless', 'formulaic', 'forthright', 'fortuitous', 'fortunate', 'forward', 'foul', 'foul-smelling', 'four', 'fourth', 'foxy', 'fractional', 'fractious', 'fragile', 'fragmented', 'fragrant', 'frail', 'frank', 'frantic', 'fraternal', 'fraudulent', 'frayed', 'freakish', 'freaky', 'freckled', 'free', 'freezing', 'frequent', 'fresh', 'fretful', 'fried', 'friendly', 'frightened', 'frightening', 'frightful', 'frigid', 'frilly', 'frisky', 'frivolous', 'front', 'frosty', 'frothy', 'frowning', 'frozen', 'frugal', 'fruitful', 'fruitless', 'fruity', 'frumpy', 'frustrated', 'frustrating', 'fulfilled', 'fulfilling', 'full', 'fully-grown', 'fumbling', 'fuming', 'fun', 'functional', 'fundamental', 'fun-loving', 'funniest', 'funny', 'furious', 'furry', 'furthest', 'furtive', 'fussy', 'futile', 'future', 'futuristic', 'fuzzy', 'gabby', 'gainful', 'gallant', 'galling', 'game', 'gangly', 'gaping', 'garbled', 'gargantuan', 'garish', 'garrulous', 'gaseous', 'gasping', 'gaudy', 'gaunt', 'gauzy', 'gawky', 'general', 'generative', 'generic', 'generous', 'genial', 'gentle', 'genuine', 'geographic', 'geologic', 'geometric', 'geriatric', 'ghastly', 'ghostly', 'ghoulish', 'giant', 'giddy', 'gifted', 'gigantic', 'giggling', 'gilded', 'giving', 'glad', 'glamorous', 'glaring', 'glass', 'glassy', 'gleaming', 'glib', 'glistening', 'glittering', 'global', 'globular', 'gloomy', 'glorious', 'glossy', 'glowing', 'gluey', 'glum', 'gluttonous', 'gnarly', 'gold', 'golden', 'good', 'good-looking', 'good-natured', 'gooey', 'goofy', 'gorgeous', 'graceful', 'gracious', 'gradual', 'grainy', 'grand', 'grandiose', 'graphic', 'grateful', 'gratified', 'gratifying', 'grating', 'gratis', 'gratuitous', 'grave', 'gray', 'greasy', 'great', 'greatest', 'greedy', 'green', 'gregarious', 'grey', 'grieving', 'grim', 'grimacing', 'grimy', 'grinding', 'grinning', 'gripping', 'gritty', 'grizzled', 'groaning', 'groggy', 'groomed', 'groovy', 'gross', 'grotesque', 'grouchy', 'growling', 'grown-up', 'grubby', 'grueling', 'gruesome', 'gruff', 'grumbling', 'grumpy', 'guaranteed', 'guarded', 'guiltless', 'guilt-ridden', 'guilty', 'gullible', 'gurgling', 'gushing', 'gushy', 'gusty', 'gutsy', 'habitable', 'habitual', 'haggard', 'hairless', 'hairy', 'half', 'halfhearted', 'hallowed', 'halting', 'handsome', 'handy', 'hanging', 'haphazard', 'hapless', 'happy', 'hard', 'hard-to-find', 'hardworking', 'hardy', 'harebrained', 'harmful', 'harmless', 'harmonic', 'harmonious', 'harried', 'harsh', 'hasty', 'hated', 'hateful', 'haughty', 'haunting', 'hawkish', 'hazardous', 'hazy', 'head', 'heady', 'healthy', 'heartbreaking', 'heartbroken', 'heartless', 'heartrending', 'hearty', 'heated', 'heavenly', 'heavy', 'hectic', 'hefty', 'heinous', 'helpful', 'helpless', 'her', 'heroic', 'hesitant', 'hideous', 'high', 'highest', 'highfalutin', 'high-functioning', 'high-maintenance', 'high-pitched', 'high-risk', 'hilarious', 'his', 'hissing', 'historical', 'hoarse', 'hoggish', 'holiday', 'holistic', 'hollow', 'home', 'homeless', 'homely', 'homeopathic', 'homey', 'homogeneous', 'honest', 'honking', 'honorable', 'hopeful', 'hopeless', 'horizontal', 'hormonal', 'horned', 'horrendous', 'horrible', 'horrid', 'horrific', 'horrified', 'horrifying', 'hospitable', 'hostile', 'hot', 'hotpink', 'hot-blooded', 'hotheaded', 'hot-shot', 'hot-tempered', 'hour-long', 'house', 'howling', 'huffy', 'huge', 'huggable', 'hulking', 'human', 'humanitarian', 'humanlike', 'humble', 'humdrum', 'humid', 'humiliated', 'humiliating', 'humming', 'humongous', 'humorless', 'humorous', 'hungry', 'hurried', 'hurt', 'hurtful', 'hushed', 'husky', 'hydraulic', 'hydrothermal', 'hygienic', 'hyper-active', 'hyperbolic', 'hypercritical', 'hyperirritable', 'hypersensitive', 'hypertensive', 'hypnotic', 'hypnotizable', 'hypothetical', 'hysterical', 'icky', 'iconoclastic', 'icy', 'icy-cold', 'ideal', 'idealistic', 'identical', 'identifiable', 'idiosyncratic', 'idiotic', 'idyllic', 'ignorable', 'ignorant', 'ill', 'illegal', 'illegible', 'illegitimate', 'ill-equipped', 'ill-fated', 'ill-humored', 'illicit', 'ill-informed', 'illiterate', 'illogical', 'illuminating', 'illusive', 'illustrious', 'imaginable', 'imaginary', 'imaginative', 'imitative', 'immaculate', 'immanent', 'immature', 'immeasurable', 'immediate', 'immense', 'immensurable', 'imminent', 'immobile', 'immodest', 'immoral', 'immortal', 'immovable', 'impartial', 'impassable', 'impassioned', 'impatient', 'impeccable', 'impenetrable', 'imperative', 'imperceptible', 'imperceptive', 'imperfect', 'imperial', 'imperialistic', 'impermeable', 'impersonal', 'impertinent', 'impervious', 'impetuous', 'impish', 'implausible', 'implicit', 'implosive', 'impolite', 'imponderable', 'important', 'imported', 'imposing', 'impossible', 'impoverished', 'impractical', 'imprecise', 'impressionable', 'impressive', 'improbable', 'improper', 'improvable', 'improved', 'improving', 'imprudent', 'impulsive', 'impure', 'inaccessible', 'inaccurate', 'inactive', 'inadequate', 'inadmissible', 'inadvertent', 'inadvisable', 'inalienable', 'inalterable', 'inane', 'inanimate', 'inapplicable', 'inappropriate', 'inapt', 'inarguable', 'inarticulate', 'inartistic', 'inattentive', 'inaudible', 'inauspicious', 'incalculable', 'incandescent', 'incapable', 'incessant', 'incidental', 'inclusive', 'incoherent', 'incomparable', 'incompatible', 'incompetent', 'incomplete', 'incomprehensible', 'inconceivable', 'inconclusive', 'incongruent', 'incongruous', 'inconsequential', 'inconsiderable', 'inconsiderate', 'inconsistent', 'inconsolable', 'inconspicuous', 'incontrovertible', 'inconvenient', 'incorrect', 'incorrigible', 'incorruptible', 'increasing', 'incredible', 'incredulous', 'incremental', 'incurable', 'indecent', 'indecipherable', 'indecisive', 'indefensible', 'indefinable', 'indefinite', 'indelible', 'independent', 'indescribable', 'indestructible', 'indeterminable', 'indeterminate', 'indicative', 'indifferent', 'indigenous', 'indignant', 'indirect', 'indiscreet', 'indiscriminate', 'indispensable', 'indisputable', 'indistinct', 'individual', 'individualistic', 'indivisible', 'indomitable', 'inductive', 'indulgent', 'industrial', 'industrious', 'ineffective', 'ineffectual', 'inefficient', 'inelegant', 'ineloquent', 'inequitable', 'inert', 'inescapable', 'inevitable', 'inexact', 'inexcusable', 'inexhaustible', 'inexpedient', 'inexpensive', 'inexplicable', 'inexpressible', 'inexpressive', 'inextricable', 'infallible', 'infamous', 'infantile', 'infatuated', 'infected', 'infectious', 'inferable', 'inferior', 'infernal', 'infinite', 'infinitesimal', 'inflamed', 'inflammable', 'inflammatory', 'inflatable', 'inflated', 'inflexible', 'influential', 'informal', 'informative', 'informed', 'infrequent', 'infuriated', 'infuriating', 'ingenious', 'ingenuous', 'inglorious', 'ingratiating', 'inhabitable', 'inharmonious', 'inherent', 'inhibited', 'inhospitable', 'inhuman', 'inhumane', 'initial', 'injudicious', 'injured', 'injurious', 'innate', 'inner', 'innocent', 'innocuous', 'innovative', 'innumerable', 'inoffensive', 'inoperable', 'inoperative', 'inopportune', 'inordinate', 'inorganic', 'inquiring', 'inquisitive', 'insane', 'insatiable', 'inscrutable', 'insecure', 'insensible', 'insensitive', 'inseparable', 'inside', 'insidious', 'insightful', 'insignificant', 'insincere', 'insipid', 'insistent', 'insolent', 'inspirational', 'inspired', 'inspiring', 'instant', 'instantaneous', 'instinctive', 'instinctual', 'institutional', 'instructive', 'instrumental', 'insubordinate', 'insufferable', 'insufficient', 'insulted', 'insulting', 'insurable', 'insurmountable', 'intangible', 'integral', 'intellectual', 'intelligent', 'intelligible', 'intended', 'intense', 'intensive', 'intentional', 'interactive', 'interchangeable', 'interdepartmental', 'interdependent', 'interested', 'interesting', 'interior', 'intermediate', 'intermittent', 'internal', 'international', 'interpersonal', 'interracial', 'intestinal', 'intimate', 'intimidating', 'intolerable', 'intolerant', 'intravenous', 'intrepid', 'intricate', 'intrigued', 'intriguing', 'intrinsic', 'introductory', 'introspective', 'introverted', 'intrusive', 'intuitive', 'invalid', 'invaluable', 'invasive', 'inventive', 'invigorating', 'invincible', 'invisible', 'invited', 'inviting', 'involuntary', 'involved', 'inward', 'irascible', 'irate', 'iridescent', 'irksome', 'iron', 'iron-fisted', 'ironic', 'irrational', 'irreconcilable', 'irrefutable', 'irregular', 'irrelative', 'irrelevant', 'irremovable', 'irreparable', 'irreplaceable', 'irrepressible', 'irresistible', 'irresponsible', 'irretrievably', 'irreverent', 'irreversible', 'irrevocable', 'irritable', 'irritated', 'irritating', 'isolated', 'itchy', 'its', 'itty-bitty', 'jabbering', 'jaded', 'jagged', 'jarring', 'jaundiced', 'jazzy', 'jealous', 'jeering', 'jerky', 'jiggling', 'jittery', 'jobless', 'jocular', 'joint', 'jolly', 'jovial', 'joyful', 'joyless', 'joyous', 'jubilant', 'judgmental', 'judicious', 'juicy', 'jumbled', 'jumpy', 'junior', 'just', 'justifiable', 'juvenile', 'kaput', 'keen', 'key', 'kind', 'kindhearted', 'kindly', 'kinesthetic', 'kingly', 'kitchen', 'knavish', 'knightly', 'knobbed', 'knobby', 'knotty', 'knowable', 'knowing', 'knowledgeable', 'known', 'labored', 'laborious', 'lackadaisical', 'lacking', 'lacy', 'lame', 'lamentable', 'languid', 'languishing', 'lanky', 'larcenous', 'large', 'larger', 'largest', 'lascivious', 'last', 'lasting', 'late', 'latent', 'later', 'lateral', 'latest', 'latter', 'laudable', 'laughable', 'laughing', 'lavish', 'lawful', 'lawless', 'lax', 'lazy', 'lead', 'leading', 'lean', 'learnable', 'learned', 'leased', 'least', 'leather', 'leathery', 'lecherous', 'leering', 'left', 'left-handed', 'legal', 'legendary', 'legible', 'legislative', 'legitimate', 'lengthy', 'lenient', 'less', 'lesser', 'lesser-known', 'less-qualified', 'lethal', 'lethargic', 'level', 'liable', 'libelous', 'liberal', 'licensed', 'life', 'lifeless', 'lifelike', 'lifelong', 'light', 'light-blue', 'lighthearted', 'likable', 'likeable', 'likely', 'like-minded', 'lily-livered', 'limber', 'limited', 'limitless', 'limp', 'limping', 'linear', 'lined', 'lingering', 'linguistic', 'liquid', 'listless', 'literal', 'literary', 'literate', 'lithe', 'lithographic', 'litigious', 'little', 'livable', 'live', 'lively', 'livid', 'living', 'loathsome', 'local', 'locatable', 'locked', 'lofty', 'logarithmic', 'logical', 'logistic', 'lonely', 'long', 'longer', 'longest', 'longing', 'long-term', 'long-winded', 'loose', 'lopsided', 'loquacious', 'lordly', 'lost', 'loud', 'lousy', 'loutish', 'lovable', 'loveable', 'lovely', 'loving', 'low', 'low-calorie', 'low-carb', 'lower', 'low-fat', 'lowly', 'low-maintenance', 'low-ranking', 'low-risk', 'loyal', 'lucent', 'lucid', 'lucky', 'lucrative', 'ludicrous', 'lukewarm', 'lulling', 'luminescent', 'luminous', 'lumpy', 'lurid', 'luscious', 'lush', 'lustrous', 'luxurious', 'lying', 'lyrical', 'macabre', 'Machiavellian', 'macho', 'mad', 'maddening', 'magenta', 'magic', 'magical', 'magnanimous', 'magnetic', 'magnificent', 'maiden', 'main', 'maintainable', 'majestic', 'major', 'makeable', 'makeshift', 'maladjusted', 'male', 'malevolent', 'malicious', 'malignant', 'malleable', 'mammoth', 'manageable', 'managerial', 'mandatory', 'maneuverable', 'mangy', 'maniacal', 'manic', 'manicured', 'manipulative', 'man-made', 'manual', 'many,', 'marbled', 'marginal', 'marked', 'marketable', 'married', 'marvelous', 'masked', 'massive', 'master', 'masterful', 'matchless', 'material', 'materialistic', 'maternal', 'mathematical', 'matronly', 'matted', 'mature', 'maximum', 'meager', 'mean', 'meandering', 'meaningful', 'meaningless', 'mean-spirited', 'measly', 'measurable', 'meat-eating', 'meaty', 'mechanical', 'medical', 'medicinal', 'meditative', 'medium', 'medium-rare', 'meek', 'melancholy', 'mellow', 'melodic', 'melodious', 'melodramatic', 'melted', 'memorable', 'menacing', 'menial', 'mental', 'merciful', 'merciless', 'mercurial', 'mere', 'merry', 'messy', 'metabolic', 'metallic', 'metaphoric', 'meteoric', 'meticulous', 'microscopic', 'microwaveable', 'middle', 'middle-class', 'midweek', 'mighty', 'mild', 'militant', 'militaristic', 'military', 'milky', 'mincing', 'mind-bending', 'mindful', 'mindless', 'mini', 'miniature', 'minimal', 'minimum', 'minor', 'minute', 'miraculous', 'mirthful', 'miscellaneous', 'mischievous', 'miscreant', 'miserable', 'miserly', 'misguided', 'misleading', 'mission', 'mistaken', 'mistrustful', 'mistrusting', 'misty', 'mixed', 'mnemonic', 'moaning', 'mobile', 'mocking', 'moderate', 'modern', 'modest', 'modified', 'modular', 'moist', 'moldy', 'momentary', 'momentous', 'monetary', 'money-grubbing', 'monopolistic', 'monosyllabic', 'monotone', 'monotonous', 'monstrous', 'monumental', 'moody', 'moral', 'moralistic', 'morbid', 'mordant', 'more', 'moronic', 'morose', 'mortal', 'mortified', 'most', 'mother', 'motherly', 'motionless', 'motivated', 'motivating', 'motivational', 'motor', 'mountain', 'mountainous', 'mournful', 'mouthwatering', 'movable', 'moved', 'moving', 'much', 'muddled', 'muddy', 'muffled', 'muggy', 'multicultural', 'multifaceted', 'multipurpose', 'multitalented', 'mumbled', 'mundane', 'municipal', 'murky', 'muscular', 'mushy', 'musical', 'musky', 'musty', 'mutative', 'mute', 'muted', 'mutinous', 'muttering', 'mutual', 'my', 'myopic', 'mysterious', 'mystic', 'mystical', 'mystified', 'mystifying', 'mythical', 'naive', 'nameless', 'narcissistic', 'narrow', 'narrow-minded', 'nasal', 'nasty', 'national', 'native', 'natural', 'naughty', 'nauseating', 'nauseous', 'nautical', 'navigable', 'navy-blue', 'near', 'nearby', 'nearest', 'nearsighted', 'neat', 'nebulous', 'necessary', 'needless', 'needy', 'nefarious', 'negative', 'neglected', 'neglectful', 'negligent', 'negligible', 'negotiable', 'neighborly', 'neither', 'nerve-racking', 'nervous', 'neurological', 'neurotic', 'neutral', 'new', 'newest', 'next', 'next-door', 'nice', 'nifty', 'nightmarish', 'nimble', 'nine', 'ninth', 'nippy', 'no', 'noble', 'nocturnal', 'noiseless', 'noisy', 'nominal', 'nonabrasive', 'nonaggressive', 'nonchalant', 'noncommittal', 'noncompetitive', 'nonconsecutive', 'nondescript', 'nondestructive', 'nonexclusive', 'nonnegotiable', 'nonproductive', 'nonrefundable', 'nonrenewable', 'nonresponsive', 'nonrestrictive', 'nonreturnable', 'nonsensical', 'nonspecific', 'nonstop', 'nontransferable', 'nonverbal', 'nonviolent', 'normal', 'north', 'northeast', 'northerly', 'northwest', 'nostalgic', 'nosy', 'notable', 'noticeable', 'notorious', 'novel', 'noxious', 'null', 'numb', 'numberless', 'numbing', 'numerable', 'numeric', 'numerous', 'nutritional', 'nutritious', 'nutty', 'oafish', 'obedient', 'obeisant', 'obese', 'objectionable', 'objective', 'obligatory', 'obliging', 'oblique', 'oblivious', 'oblong', 'obnoxious', 'obscene', 'obscure', 'observable', 'observant', 'obsessive', 'obsolete', 'obstinate', 'obstructive', 'obtainable', 'obtrusive', 'obtuse', 'obvious', 'occasional', 'occupational', 'occupied', 'oceanic', 'odd', 'odd-looking', 'odiferous', 'odious', 'odorless', 'odorous', 'offbeat', 'offensive', 'offhanded', 'official', 'officious', 'oily', 'OK', 'okay', 'old', 'older', 'oldest', 'old-fashioned', 'ominous', 'omniscient', 'omnivorous', 'one', 'one-hour', 'onerous', 'one-sided', 'only', 'opaque', 'open', 'opened', 'openhanded', 'openhearted', 'opening', 'open-minded', 'operable', 'operatic', 'operational', 'operative', 'opinionated', 'opportune', 'opportunistic', 'opposable', 'opposed', 'opposing', 'opposite', 'oppressive', 'optimal', 'optimistic', 'optional', 'opulent', 'oral', 'orange', 'ordinary', 'organic', 'organizational', 'original', 'ornamental', 'ornate', 'ornery', 'orphaned', 'orthopedic', 'ossified', 'ostentatious', 'other', 'otherwise', 'our', 'outer', 'outermost', 'outgoing', 'outlandish', 'outraged', 'outrageous', 'outside', 'outspoken', 'outstanding', 'outward', 'oval', 'overactive', 'overaggressive', 'overall', 'overambitious', 'overassertive', 'overbearing', 'overcast', 'overcautious', 'overconfident', 'overcritical', 'overcrowded', 'overemotional', 'overenthusiastic', 'overjoyed', 'overoptimistic', 'overpowering', 'overpriced', 'overprotective', 'overqualified', 'overrated', 'oversensitive', 'oversized', 'overt', 'overwhelmed', 'overwhelming', 'overworked', 'overwrought', 'overzealous', 'own', 'oxymoronic', 'padded', 'painful', 'painless', 'painstaking', 'palatable', 'palatial', 'pale', 'pallid', 'palpable', 'paltry', 'pampered', 'panicky', 'panoramic', 'paradoxical', 'parallel', 'paranormal', 'parasitic', 'parched', 'pardonable', 'parental', 'parenthetic', 'parking', 'parsimonious', 'partial', 'particular', 'partisan', 'part-time', 'party', 'passing', 'passionate', 'passive', 'past', 'pastoral', 'patched', 'patchy', 'patented', 'paternal', 'paternalistic', 'pathetic', 'pathological', 'patient', 'patriotic', 'patronizing', 'patterned', 'payable', 'peaceable', 'peaceful', 'peculiar', 'pedantic', 'pedestrian', 'peerless', 'peeved', 'peevish', 'penetrable', 'penetrating', 'pensive', 'peppery', 'perceivable', 'perceptible', 'perceptive', 'perceptual', 'peremptory', 'perennial', 'perfect', 'perfumed', 'perilous', 'period', 'periodic', 'peripheral', 'perishable', 'perky', 'permanent', 'permeable', 'permissible', 'permissive', 'pernicious', 'perpendicular', 'perpetual', 'perplexed', 'perplexing', 'persevering', 'persistent', 'personable', 'personal', 'persuasive', 'pert', 'pertinent', 'perturbed', 'perturbing', 'pervasive', 'perverse', 'pessimistic', 'petite', 'pettish', 'petty', 'petulant', 'pharmaceutical', 'phenomenal', 'philanthropic', 'philosophical', 'phobic', 'phonemic', 'phonetic', 'phosphorescent', 'photographic', 'physical', 'physiological', 'picturesque', 'piercing', 'pigheaded', 'pink', 'pious', 'piquant', 'pitch-dark', 'pitch-perfect', 'piteous', 'pithy', 'pitiful', 'pitiless', 'pivotal', 'placid', 'plaid', 'plain', 'plane', 'planned', 'plastic', 'platonic', 'plausible', 'playful', 'pleading', 'pleasant', 'pleased', 'pleasing', 'pleasurable', 'plentiful', 'pliable', 'plodding', 'plopping', 'plucky', 'plump', 'pluralistic', 'plus', 'plush', 'pneumatic', 'poetic', 'poignant', 'pointless', 'poised', 'poisonous', 'polished', 'polite', 'political', 'polka-dotted', 'polluted', 'polyunsaturated', 'pompous', 'ponderous', 'poor', 'poorer', 'poorest', 'popping', 'popular', 'populous', 'porous', 'portable', 'portly', 'positive', 'possessive', 'possible', 'posthoc', 'posthumous', 'postoperative', 'potable', 'potent', 'potential', 'powdery', 'powerful', 'powerless', 'practical', 'pragmatic', 'praiseworthy', 'precarious', 'precious', 'precipitous', 'precise', 'precocious', 'preconceived', 'predicative', 'predictable', 'predisposed', 'predominant', 'preeminent', 'preemptive', 'prefabricated', 'preferable', 'preferential', 'pregnant', 'prehistoric', 'prejudiced', 'prejudicial', 'preliminary', 'premature', 'premeditated', 'premium', 'prenatal', 'preoccupied', 'preoperative', 'preparative', 'prepared', 'preposterous', 'prescriptive', 'present', 'presentable', 'presidential', 'pressing', 'pressurized', 'prestigious', 'presumable', 'presumptive', 'presumptuous', 'pretend', 'pretentious', 'pretty', 'prevalent', 'preventable', 'preventative', 'preventive', 'previous', 'priceless', 'pricey', 'prickly', 'prim', 'primary', 'primitive', 'primordial', 'princely', 'principal', 'principled', 'prior', 'prissy', 'pristine', 'private', 'prize', 'prized', 'proactive', 'probabilistic', 'probable', 'problematic', 'procedural', 'prodigious', 'productive', 'profane', 'professed', 'professional', 'professorial', 'proficient', 'profitable', 'profound', 'profuse', 'programmable', 'progressive', 'prohibitive', 'prolific', 'prominent', 'promised', 'promising', 'prompt', 'pronounceable', 'pronounced', 'proof', 'proper', 'prophetic', 'proportional', 'proportionate', 'proportioned', 'prospective', 'prosperous', 'protective', 'prototypical', 'proud', 'proverbial', 'provisional', 'provocative', 'provoking', 'proximal', 'proximate', 'prudent', 'prudential', 'prying', 'psychedelic', 'psychiatric', 'psychological', 'psychosomatic', 'psychotic', 'public', 'puckish', 'puffy', 'pugnacious', 'pumped', 'punctual', 'pungent', 'punishable', 'punitive', 'puny', 'pure', 'purified', 'puritanical', 'purple', 'purported', 'purposeful', 'purposeless', 'purring', 'pushy', 'pusillanimous', 'putrid', 'puzzled', 'puzzling', 'pyrotechnic', 'quackish', 'quacky', 'quaint', 'qualified', 'qualitative', 'quality', 'quantifiable', 'quantitative', 'quarrelsome', 'queasy', 'queenly', 'querulous', 'questionable', 'quick', 'quick-acting', 'quick-drying', 'quickest', 'quick-minded', 'quick-paced', 'quick-tempered', 'quick-thinking', 'quick-witted', 'quiet', 'quintessential', 'quirky', 'quivering', 'quizzical', 'quotable', 'rabid', 'racial', 'racist', 'radiant', 'radical', 'radioactive', 'ragged', 'raging', 'rainbowcolored', 'rainy', 'rakish', 'rambling', 'rambunctious', 'rampageous', 'rampant', 'rancid', 'rancorous', 'random', 'rank', 'rapid', 'rapid-fire', 'rapturous', 'rare', 'rascally', 'rash', 'rasping', 'raspy', 'rational', 'ratty', 'ravenous', 'raving', 'ravishing', 'raw', 'razor-edged', 'reactive', 'ready', 'real', 'realistic', 'reasonable', 'reassured', 'reassuring', 'rebel', 'rebellious', 'receding', 'recent', 'receptive', 'recessive', 'rechargeable', 'reciprocal', 'reckless', 'reclusive', 'recognizable', 'recognized', 'rectangular', 'rectifiable', 'recurrent', 'recyclable', 'red', 'red-blooded', 'reddish', 'redeemable', 'redolent', 'redundant', 'referential', 'refillable', 'reflective', 'refractive', 'refreshing', 'refundable', 'refurbished', 'refutable', 'regal', 'regional', 'regretful', 'regrettable', 'regular', 'reigning', 'relatable', 'relative', 'relaxed', 'relaxing', 'relentless', 'relevant', 'reliable', 'relieved', 'religious', 'reluctant', 'remaining', 'remarkable', 'remedial', 'reminiscent', 'remorseful', 'remorseless', 'remote', 'removable', 'renegotiable', 'renewable', 'rented', 'repairable', 'repaired', 'repeatable', 'repeated', 'repentant', 'repetitious', 'repetitive', 'replaceable', 'replicable', 'reported', 'reprehensible', 'representative', 'repressive', 'reproachful', 'reproductive', 'republican', 'repugnant', 'repulsive', 'reputable', 'reputed', 'rescued', 'resealable', 'resentful', 'reserved', 'resident', 'residential', 'residual', 'resilient', 'resolute', 'resolvable', 'resonant', 'resounding', 'resourceful', 'respectable', 'respectful', 'respective', 'responsible', 'responsive', 'rested', 'restful', 'restless', 'restored', 'restrained', 'restrictive', 'retired', 'retroactive', 'retrogressive', 'retrospective', 'reusable', 'revamped', 'revealing', 'revengeful', 'reverent', 'reverential', 'reverse', 'reversible', 'reviewable', 'reviled', 'revisable', 'revised', 'revocable', 'revolting', 'revolutionary', 'rewarding', 'rhetorical', 'rhythmic', 'rich', 'richer', 'richest', 'ridiculing', 'ridiculous', 'right', 'righteous', 'rightful', 'right-handed', 'rigid', 'rigorous', 'ringing', 'riotous', 'ripe', 'rippling', 'risky', 'ritualistic', 'ritzy', 'riveting', 'roaring', 'roasted', 'robotic', 'robust', 'rocketing', 'roguish', 'romantic', 'roomy', 'rosy', 'rotating', 'rotten', 'rotting', 'rotund', 'rough', 'round', 'roundtable', 'rousing', 'routine', 'rowdy', 'royal', 'ruddy', 'rude', 'rudimentary', 'rueful', 'rugged', 'ruined', 'ruinous', 'rumbling', 'rumpled', 'ruptured', 'rural', 'rusted', 'rustic', 'rustling', 'rusty', 'ruthless', 'rutted', 'saccharin', 'sacred', 'sacrificial', 'sacrilegious', 'sad', 'saddened', 'safe', 'saintly', 'salacious', 'salient', 'salt', 'salted', 'salty', 'salvageable', 'salvaged', 'same', 'sanctimonious', 'sandy', 'sane', 'sanguine', 'sanitary', 'sappy', 'sarcastic', 'sardonic', 'sassy', 'satin', 'satiny', 'satiric', 'satirical', 'satisfactory', 'satisfied', 'satisfying', 'saucy', 'savage', 'savory', 'savvy', 'scalding', 'scaly', 'scandalous', 'scant', 'scanty', 'scarce', 'scared', 'scarred', 'scary', 'scathing', 'scattered', 'scenic', 'scented', 'scheduled', 'schematic', 'scholarly', 'scholastic', 'scientific', 'scintillating', 'scorching', 'scornful', 'scrabbled', 'scraggly', 'scrappy', 'scratched', 'scratchy', 'scrawny', 'screaming', 'screeching', 'scribbled', 'scriptural', 'scruffy', 'scrumptious', 'scrupulous', 'sculpted', 'sculptural', 'scummy', 'sea', 'sealed', 'seamless', 'searching', 'searing', 'seasick', 'seasonable', 'seasonal', 'secluded', 'second', 'secondary', 'second-hand', 'secret', 'secretive', 'secular', 'secure', 'secured', 'sedate', 'seditious', 'seductive', 'seedy', 'seeming', 'seemly', 'seething', 'seismic', 'select', 'selected', 'selective', 'self-absorbed', 'self-aggrandizing', 'self-assured', 'self-centered', 'self-confident', 'self-directed', 'self-disciplined', 'self-effacing', 'self-indulgent', 'self-interested', 'selfish', 'selfless', 'self-reliant', 'self-respect', 'self-satisfied', 'sellable', 'semiconscious', 'semiofficial', 'semiprecious', 'semiprofessional', 'senior', 'sensational', 'senseless', 'sensible', 'sensitive', 'sensual', 'sensuous', 'sentimental', 'separate', 'sequential', 'serendipitous', 'serene', 'serial', 'serious', 'serrated', 'serviceable', 'seven', 'seventh', 'several', 'severe', 'shabbiest', 'shabby', 'shaded', 'shadowed', 'shadowy', 'shady', 'shaggy', 'shaky', 'shallow', 'shamefaced', 'shameful', 'shameless', 'shapeless', 'shapely', 'sharp', 'sharpened', 'shattered', 'shattering', 'sheepish', 'sheer', 'sheltered', 'shifty', 'shimmering', 'shining', 'shiny', 'shivering', 'shivery', 'shocked', 'shocking', 'shoddy', 'short', 'short-lived', 'shortsighted', 'short-tempered', 'short-term', 'showy', 'shrewd', 'shrieking', 'shrill', 'shut', 'shy', 'sick', 'sickened', 'sickening', 'sickly', 'side-splitting', 'signed', 'significant', 'silent', 'silky', 'silly', 'silver', 'silver-tongued', 'simian', 'similar', 'simple', 'simpleminded', 'simplified', 'simplistic', 'simultaneous', 'sincere', 'sinful', 'single', 'single-minded', 'singular', 'sinister', 'sinuous', 'sisterly', 'six', 'sixth', 'sizable', 'sizzling', 'skeptical', 'sketchy', 'skilled', 'skillful', 'skimpy', 'skin-deep', 'skinny', 'skittish', 'sky-blue', 'slanderous', 'slanted', 'slanting', 'sleek', 'sleeping', 'sleepless', 'sleepy', 'slender', 'slick', 'slight', 'slim', 'slimy', 'slippery', 'sloped', 'sloping', 'sloppy', 'slothful', 'slow', 'slow-moving', 'sluggish', 'slushy', 'sly', 'small', 'smaller', 'smallest', 'small-minded', 'small-scale', 'small-time', 'small-town', 'smarmy', 'smart', 'smarter', 'smartest', 'smashing', 'smeared', 'smelly', 'smiling', 'smoggy', 'smoked', 'smoky', 'smooth', 'smothering', 'smudged', 'smug', 'snapping', 'snappish', 'snappy', 'snarling', 'sneaky', 'snide', 'snippy', 'snobbish', 'snoopy', 'snooty', 'snoring', 'snotty', 'snow-white', 'snug', 'snuggly', 'soaked', 'soaking', 'soakingwet', 'soaring', 'sober', 'sociable', 'social', 'socialist', 'sociological', 'soft', 'softhearted', 'soggy', 'solar', 'soldierly', 'sole', 'solemn', 'solicitous', 'solid', 'solitary', 'somatic', 'somber', 'some,', 'sonic', 'sonly', 'soothed', 'soothing', 'sophisticated', 'sordid', 'sore', 'sorrowful', 'sorry', 'soulful', 'soulless', 'soundless', 'sour', 'south', 'southeasterly', 'southern', 'southwestern', 'spacious', 'spare', 'sparing', 'sparkling', 'sparkly', 'sparse', 'spasmodic', 'spastic', 'spatial', 'spattered', 'special', 'specialist', 'specialized', 'specific', 'speckled', 'spectacular', 'spectral', 'speculative', 'speechless', 'speedy', 'spellbinding', 'spendthrift', 'spherical', 'spicy', 'spiffy', 'spiky', 'spinal', 'spineless', 'spiral', 'spiraled', 'spirited', 'spiritless', 'spiritual', 'spiteful', 'splashing', 'splashy', 'splattered', 'splendid', 'splintered', 'spoiled', 'spoken', 'spongy', 'spontaneous', 'spooky', 'sporadic', 'sporting', 'sportsmanly', 'spotless', 'spotted', 'spotty', 'springy', 'sprite', 'spry', 'spurious', 'squalid', 'squandered', 'square', 'squashed', 'squashy', 'squatting', 'squawking', 'squealing', 'squeamish', 'squeezable', 'squiggly', 'squirming', 'squirrelly', 'stable', 'stackable', 'stacked', 'staggering', 'stagnant', 'stained', 'stale', 'stanch', 'standard', 'standing', 'standoffish', 'starched', 'star-crossed', 'stark', 'startled', 'startling', 'starving', 'stately', 'static', 'statistical', 'statuesque', 'status', 'statutory', 'staunch', 'steadfast', 'steady', 'stealth', 'steaming', 'steamy', 'steel', 'steely', 'steep', 'stereophonic', 'stereotyped', 'stereotypical', 'sterile', 'stern', 'sticky', 'stiff', 'stifled', 'stifling', 'stigmatic', 'still', 'stilled', 'stilted', 'stimulating', 'stinging', 'stingy', 'stinking', 'stinky', 'stirring', 'stock', 'stodgy', 'stoic', 'stony', 'stormy', 'stout', 'straggly', 'straight', 'straightforward', 'stranded', 'strange', 'strategic', 'streaked', 'street', 'strenuous', 'stressful', 'stretchy', 'strict', 'strident', 'striking', 'stringent', 'striped', 'strong', 'stronger', 'strongest', 'structural', 'stubborn', 'stubby', 'stuck-up', 'studied', 'studious', 'stuffed', 'stuffy', 'stumbling', 'stunned', 'stunning', 'stupendous', 'sturdy', 'stuttering', 'stylish', 'stylistic', 'suave', 'subconscious', 'subdued', 'subject', 'subjective', 'sublime', 'subliminal', 'submissive', 'subordinate', 'subsequent', 'subservient', 'substantial', 'substantiated', 'substitute', 'subterranean', 'subtitled', 'subtle', 'subversive', 'successful', 'successive', 'succinct', 'succulent', 'such', 'sudden', 'suffering', 'sufficient', 'sugary', 'suggestive', 'suitable', 'sulky', 'sullen', 'sumptuous', 'sunny', 'super', 'superabundant', 'superb', 'supercilious', 'superficial', 'superhuman', 'superior', 'superlative', 'supernatural', 'supersensitive', 'supersonic', 'superstitious', 'supple', 'supportive', 'supposed', 'suppressive', 'supreme', 'sure', 'sure-footed', 'surgical', 'surly', 'surmountable', 'surprised', 'surprising', 'surrealistic', 'survivable', 'susceptible', 'suspected', 'suspicious', 'sustainable', 'swaggering', 'swanky', 'swaying', 'sweaty', 'sweeping', 'sweet', 'sweltering', 'swift', 'swimming', 'swinish', 'swishing', 'swollen', 'swooping', 'syllabic', 'syllogistic', 'symbiotic', 'symbolic', 'symmetrical', 'sympathetic', 'symptomatic', 'synergistic', 'synonymous', 'syntactic', 'synthetic', 'systematic', 'taboo', 'tacit', 'tacky', 'tactful', 'tactical', 'tactless', 'tactual', 'tainted', 'take-charge', 'talented', 'talkative', 'tall', 'taller', 'tallest', 'tame', 'tamed', 'tan', 'tangential', 'tangible', 'tangled', 'tangy', 'tanned', 'tantalizing', 'tapered', 'tardy', 'targeted', 'tarnished', 'tart', 'tasteful', 'tasteless', 'tasty', 'tattered', 'taunting', 'taut', 'taxing', 'teachable', 'tearful', 'tearing', 'teasing', 'technical', 'technological', 'tectonic', 'tedious', 'teenage', 'teensy', 'teeny', 'teeny-tiny', 'telegraphic', 'telekinetic', 'telepathic', 'telephonic', 'telescopic', 'telling', 'temperamental', 'temperate', 'tempestuous', 'temporary', 'tempted', 'tempting', 'ten', 'tenable', 'tenacious', 'tender', 'tenderhearted', 'ten-minute', 'tense', 'tentative', 'tenth', 'tenuous', 'tepid', 'terminal', 'terrestrial', 'terrible', 'terrific', 'terrified', 'terrifying', 'territorial', 'terse', 'tested', 'testy', 'tetchy', 'textual', 'textural', 'thankful', 'thankless', 'that', 'the', 'theatrical', 'their', 'thematic', 'theological', 'theoretical', 'therapeutic', 'thermal', 'these', 'thick', 'thievish', 'thin', 'thinkable', 'third', 'thirsty', 'this', 'thorny', 'thorough', 'those', 'thoughtful', 'thoughtless', 'thrashed', 'threatened', 'threatening', 'three', 'thriftless', 'thrifty', 'thrilled', 'thrilling', 'throbbing', 'thumping', 'thundering', 'thunderous', 'ticking', 'tickling', 'ticklish', 'tidal', 'tidy', 'tight', 'tightfisted', 'time', 'timeless', 'timely', 'timid', 'timorous', 'tiny', 'tipsy', 'tired', 'tireless', 'tiresome', 'tiring', 'tolerable', 'tolerant', 'tonal', 'tone-deaf', 'toneless', 'toothsome', 'toothy', 'top', 'topical', 'topographical', 'tormented', 'torpid', 'torrential', 'torrid', 'torturous', 'total', 'touched', 'touching', 'touchy', 'tough', 'towering', 'toxic', 'traditional', 'tragic', 'trainable', 'trained', 'training', 'traitorous', 'tranquil', 'transcendent', 'transcendental', 'transformational', 'transformative', 'transformed', 'transient', 'transitional', 'transitory', 'translucent', 'transparent', 'transplanted', 'trapped', 'trashed', 'trashy', 'traumatic', 'treacherous', 'treasonable', 'treasonous', 'treasured', 'treatable', 'tremendous', 'tremulous', 'trenchant', 'trendy', 'triangular', 'tribal', 'trick', 'tricky', 'trim', 'tripping', 'trite', 'triumphant', 'trivial', 'tropical', 'troubled', 'troublesome', 'troubling', 'truculent', 'true', 'trusted', 'trustful', 'trusting', 'trustworthy', 'trusty', 'truthful', 'trying', 'tumultuous', 'tuneful', 'tuneless', 'turbulent', 'twinkling', 'twinkly', 'twisted', 'twitchy', 'two', 'typical', 'tyrannical', 'tyrannous', 'ubiquitous', 'ugly', 'ultimate', 'ultraconservative', 'ultrasensitive', 'ultrasonic', 'ultraviolet', 'unabashed', 'unabated', 'unable', 'unacceptable', 'unaccompanied', 'unaccountable', 'unaccustomed', 'unacknowledged', 'unadorned', 'unadulterated', 'unadventurous', 'unadvised', 'unaffected', 'unaffordable', 'unafraid', 'unaggressive', 'unaided', 'unalienable', 'unalterable', 'unaltered', 'unambiguous', 'unanimous', 'unannounced', 'unanswerable', 'unanticipated', 'unapologetic', 'unappealing', 'unappetizing', 'unappreciative', 'unapproachable', 'unashamed', 'unassailable', 'unassertive', 'unassisted', 'unattached', 'unattainable', 'unattractive', 'unauthorized', 'unavailable', 'unavailing', 'unavoidable', 'unbalanced', 'unbearable', 'unbeatable', 'unbeaten', 'unbecoming', 'unbelievable', 'unbelieving', 'unbendable', 'unbending', 'unbiased', 'unblemished', 'unblinking', 'unblushing', 'unbounded', 'unbreakable', 'unbridled', 'uncanny', 'uncaring', 'unceasing', 'unceremonious', 'uncertain', 'unchangeable', 'unchanging', 'uncharacteristic', 'uncharitable', 'uncharted', 'uncivil', 'uncivilized', 'unclassified', 'unclean', 'uncluttered', 'uncomely', 'uncomfortable', 'uncommitted', 'uncommon', 'uncommunicative', 'uncomplaining', 'uncomprehending', 'uncompromising', 'unconcerned', 'unconditional', 'unconfirmed', 'unconquerable', 'unconscionable', 'unconscious', 'unconstitutional', 'unconstrained', 'unconstructive', 'uncontainable', 'uncontrollable', 'unconventional', 'unconvinced', 'unconvincing', 'uncooked', 'uncooperative', 'uncoordinated', 'uncouth', 'uncovered', 'uncreative', 'uncritical', 'undamaged', 'undated', 'undaunted', 'undeclared', 'undefeated', 'undefined', 'undemocratic', 'undeniable', 'undependable', 'underdeveloped', 'underfunded', 'underhanded', 'underprivileged', 'understandable', 'understanding', 'understated', 'understood', 'undeserved', 'undesirable', 'undetected', 'undeterred', 'undeveloped', 'undeviating', 'undifferentiated', 'undignified', 'undiminished', 'undiplomatic', 'undisciplined', 'undiscovered', 'undisguised', 'undisputed', 'undistinguished', 'undivided', 'undoubted', 'unearthly', 'uneasy', 'uneducated', 'unemotional', 'unemployed', 'unencumbered', 'unending', 'unendurable', 'unenforceable', 'unenthusiastic', 'unenviable', 'unequal', 'unequaled', 'unequivocal', 'unerring', 'uneven', 'uneventful', 'unexceptional', 'unexcited', 'unexpected', 'unexplainable', 'unexplored', 'unexpressive', 'unfailing', 'unfair', 'unfaithful', 'unfaltering', 'unfamiliar', 'unfashionable', 'unfathomable', 'unfavorable', 'unfeeling', 'unfettered', 'unfilled', 'unflagging', 'unflappable', 'unflattering', 'unflinching', 'unfocused', 'unforeseeable', 'unforgettable', 'unforgivable', 'unforgiving', 'unfortunate', 'unfriendly', 'unfulfilled', 'ungallant', 'ungenerous', 'ungentlemanly', 'unglamorous', 'ungraceful', 'ungracious', 'ungrateful', 'unguarded', 'unhandsome', 'unhappy', 'unharmed', 'unhealthy', 'unheated', 'unheeded', 'unhelpful', 'unhesitating', 'unhurried', 'uniform', 'unilateral', 'unimaginable', 'unimaginative', 'unimpeachable', 'unimpeded', 'unimpressive', 'unincorporated', 'uninformed', 'uninhabitable', 'uninhibited', 'uninitiated', 'uninjured', 'uninspired', 'uninsurable', 'unintelligent', 'unintelligible', 'unintended', 'unintentional', 'uninterested', 'uninterrupted', 'uninvited', 'unique', 'united', 'universal', 'unjust', 'unjustifiable', 'unkempt', 'unkind', 'unknowing', 'unknown', 'unlawful', 'unlicensed', 'unlikable', 'unlikely', 'unlivable', 'unloved', 'unlucky', 'unmanageable', 'unmanly', 'unmanned', 'unmarketable', 'unmasked', 'unmatched', 'unmemorable', 'unmentionable', 'unmerciful', 'unmistakable', 'unmitigated', 'unmodified', 'unmotivated', 'unnatural', 'unnecessary', 'unnerved', 'unnerving', 'unnoticeable', 'unobserved', 'unobtainable', 'unobtrusive', 'unofficial', 'unopened', 'unopposed', 'unorthodox', 'unostentatious', 'unpalatable', 'unpardonable', 'unpersuasive', 'unperturbed', 'unplanned', 'unpleasant', 'unprecedented', 'unpredictable', 'unpretentious', 'unprincipled', 'unproductive', 'unprofessional', 'unprofitable', 'unpromising', 'unpronounceable', 'unprovoked', 'unqualified', 'unquantifiable', 'unquenchable', 'unquestionable', 'unquestioned', 'unquestioning', 'unraveled', 'unreachable', 'unreadable', 'unrealistic', 'unrealized', 'unreasonable', 'unreceptive', 'unrecognizable', 'unrecognized', 'unredeemable', 'unregulated', 'unrelenting', 'unreliable', 'unremarkable', 'unremitting', 'unrepentant', 'unrepresentative', 'unrepresented', 'unreserved', 'unrespectable', 'unresponsive', 'unrestrained', 'unripe', 'unrivaled', 'unromantic', 'unruffled', 'unruly', 'unsafe', 'unsalvageable', 'unsatisfactory', 'unsatisfied', 'unscheduled', 'unscholarly', 'unscientific', 'unscrupulous', 'unseasonable', 'unseemly', 'unselfish', 'unsettled', 'unsettling', 'unshakable', 'unshapely', 'unsightly', 'unsigned', 'unsinkable', 'unskilled', 'unsociable', 'unsolicited', 'unsolvable', 'unsolved', 'unsophisticated', 'unsound', 'unsparing', 'unspeakable', 'unspoiled', 'unstable', 'unstated', 'unsteady', 'unstoppable', 'unstressed', 'unstructured', 'unsubstantial', 'unsubstantiated', 'unsuccessful', 'unsuitable', 'unsuited', 'unsupervised', 'unsupported', 'unsure', 'unsurpassable', 'unsurpassed', 'unsurprising', 'unsuspected', 'unsuspecting', 'unsustainable', 'unsympathetic', 'unsystematic', 'untainted', 'untamable', 'untamed', 'untapped', 'untenable', 'untested', 'unthinkable', 'unthinking', 'untidy', 'untimely', 'untitled', 'untouchable', 'untraditional', 'untrained', 'untried', 'untroubled', 'untrustworthy', 'untruthful', 'unused', 'unusual', 'unverified', 'unwary', 'unwashed', 'unwatchable', 'unwavering', 'unwholesome', 'unwieldy', 'unwilling', 'unwise', 'unwitting', 'unworkable', 'unworldly', 'unworthy', 'unwritten', 'unyielding', 'upbeat', 'upmost', 'upper', 'uppity', 'upright', 'uproarious', 'upset', 'upsetting', 'upstairs', 'uptight', 'up-to-date', 'up-to-the-minute', 'upward', 'urbane', 'urgent', 'usable', 'used', 'useful', 'useless', 'usual', 'utilitarian', 'utopian', 'utter', 'uttermost', 'vacant', 'vacillating', 'vacuous', 'vagabond', 'vagrant', 'vague', 'vain', 'valiant', 'valid', 'valorous', 'valuable', 'vanishing', 'vapid', 'vaporous', 'variable', 'varied', 'various', 'varying', 'vast', 'vegetable', 'vegetarian', 'vegetative', 'vehement', 'velvety', 'venal', 'venerable', 'vengeful', 'venomous', 'venturesome', 'venturous', 'veracious', 'verbal', 'verbose', 'verdant', 'verifiable', 'verified', 'veritable', 'vernacular', 'versatile', 'versed', 'vertical', 'very', 'vexed', 'vexing', 'viable', 'vibrant', 'vibrating', 'vicarious', 'vicious', 'victorious', 'vigilant', 'vigorous', 'vile', 'villainous', 'vindictive', 'vinegary', 'violent', 'violet', 'viperous', 'viral', 'virtual', 'virtuous', 'virulent', 'visceral', 'viscous', 'visible', 'visionary', 'visual', 'vital', 'vitriolic', 'vivacious', 'vivid', 'vocal', 'vocational', 'voiceless', 'volatile', 'volcanic', 'voluminous', 'voluntary', 'voluptuous', 'voracious', 'vulgar', 'vulnerable', 'wacky', 'wailing', 'waiting', 'wakeful', 'wandering', 'wanting', 'wanton', 'warlike', 'warm', 'warmest', 'warning', 'warring', 'wary', 'waspish', 'waste', 'wasted', 'wasteful', 'watchful', 'waterlogged', 'waterproof', 'watertight', 'watery', 'wavering', 'wax', 'waxen', 'weak', 'weakened', 'weak-willed', 'wealthy', 'wearisome', 'weary', 'wee', 'weedy', 'week-long', 'weekly', 'weightless', 'weighty', 'weird', 'welcoming', 'well', 'well-adjusted', 'well-argued', 'well-aware', 'well-balanced', 'well-behaved', 'well-built', 'well-conceived', 'well-considered', 'well-crafted', 'well-deserved', 'well-developed', 'well-done', 'well-dressed', 'well-educated', 'well-equipped', 'well-established', 'well-founded', 'well-groomed', 'well-heeled', 'well-honed', 'well-informed', 'well-intentioned', 'well-kempt', 'well-known', 'well-liked', 'well-lit', 'well-made', 'well-maintained', 'well-mannered', 'well-meaning', 'well-off', 'well-placed', 'well-planned', 'well-prepared', 'well-qualified', 'well-read', 'well-received', 'well-rounded', 'well-spoken', 'well-suited', 'well-thought-of', 'well-thought-out', 'well-to-do', 'well-traveled', 'well-used', 'well-versed', 'well-worn', 'well-written', 'west', 'western', 'wet', 'what', 'wheezing', 'which', 'whimpering', 'whimsical', 'whining', 'whispering', 'whistling', 'white', 'whole', 'wholehearted', 'wholesale', 'wholesome', 'whooping', 'whopping', 'whose', 'wicked', 'wide', 'wide-eyed', 'wide-ranging', 'widespread', 'wiggly', 'wild', 'willful', 'willing', 'wily', 'windy', 'winning', 'winsome', 'winter', 'wintery', 'wiry', 'wise', 'wishful', 'wispy', 'wistful', 'withering', 'witless', 'witty', 'wizardly', 'wobbly', 'woeful', 'wolfish', 'wonderful', 'wondrous', 'wonted', 'wood', 'wooden', 'wooing', 'wool', 'woolen', 'woozy', 'wordless', 'wordy', 'work', 'workable', 'working', 'work-oriented', 'worldly', 'worn', 'worndown', 'wornout', 'worried', 'worrisome', 'worrying', 'worse', 'worshipful', 'worst', 'worth', 'worthless', 'worthwhile', 'worthy', 'wounding', 'wrathful', 'wrenching', 'wretched', 'wriggling', 'wriggly', 'wrinkled', 'wrinkly', 'written', 'wrong', 'wrongful', 'wry', 'yawning', 'yearly', 'yearning', 'yellow', 'yelping', 'yielding', 'young', 'younger', 'youngest', 'youthful', 'yummy', 'zany', 'zealous', 'zestful', 'zesty', 'zippy', 'zonked', 'zoological', ]; export const maxStringLength = 22; ================================================ FILE: drizzle-seed/src/datasets/cityNames.ts ================================================ /** * The original source for cities names data was taken from https://www.kaggle.com/datasets/juanmah/world-cities * We've excluded a few countries and their cities from this list because we don't think they should ever appear in any list */ export default [ 'Humpata', 'Qunghirot', 'Erdek', 'Asenovgrad', 'Payyoli', 'Pidhorodne', 'Clawson', 'Kala Diara', 'Kadan', 'Tumut', 'Bayat', 'Gangoli', 'Nuqui', 'Lochau', 'Albignasego', 'Narayanganj', 'Novo Lino', 'Regeneracao', 'Belvedere Marittimo', 'Santana de Parnaiba', 'Miyako', 'Aniche', 'Andrijevica', 'Buggenhout', 'Kurikka', 'Milanowek', 'Liaquatpur', 'Khamaria', 'Jora Khurd', 'Petal', 'Pak Tin Pa', 'Xiantangcun', 'Bovingdon', 'Lianmuqin Kancun', 'Sainte-Maxime', 'Campinorte', 'Vammanal', 'Yabelo', 'Wani', 'Kaita', 'Monkey Bay', 'Ianapera', 'Mariestad', 'Bukomansimbi', 'Kavaje', 'Amahai', 'Luis Alves', 'Altmunster', 'Spring Lake', 'Niemasson', 'Douala', 'Naranjos', 'Madhubani', 'Atotonilco el Alto', 'Palmar de Varela', 'Hanerik', 'Wadhraf', 'Terre Neuve', 'Libungan', 'Mattigiri', 'Prieto Diaz', 'Barjora', 'Kagamino', 'Pakokku', 'Bol', 'Prienai', 'Tan Phong', 'Kali', 'Rauenberg', 'Chios', 'Jerico', "Qal'acha", 'Biwong', 'Albi', 'Plantation', 'Nargund', 'Yamakita', 'Ghota Fatehgarh', 'Vredenburg', 'Sherman', 'Penal', 'Varkaus', 'Zhongwei', 'Miri', 'Guarapuava', 'Bolszewo', 'Park Forest Village', 'Cianjur', 'Kempsey', 'Guaraci', 'Odorheiu Secuiesc', 'Alytus', 'Mulanje', 'Evinayong', 'Bukkapatnam', 'Berd', 'Kisaran', 'Bitung', 'Selcuk', 'Xikeng', 'Cantanhede', 'Pasuquin', 'Steinheim am der Murr', 'Mahem', 'Gloria', 'Benbrook', 'Beforona', 'Borken', 'Ouled Rahmoun', 'Pedras de Maria da Cruz', 'Al Fuhays', 'Titiribi', 'Kien Luong', 'Shediac', 'Villasanta', 'Guelph', 'Duzkoy', 'Narkher', 'Tateyama', 'Penzance', 'Pasay City', 'San Lucas Sacatepequez', 'Skardu', 'Maharajapuram', 'Al `Asharah', 'Tanant', 'Kawaguchi', 'Madhuban Bediban', 'Bridge City', 'Greenbelt', 'Iara', 'Baton Rouge', 'Yopal', 'Moribila', 'Ciudad-Rodrigo', 'Madira', 'Bobonong', 'Fort Hood', 'Amboasary', 'Sostanj', 'Fnidq', 'Vikasnagar', 'Yangping', 'Arraias', 'Goslar', 'Shuixi', 'Lagoa da Canoa', 'Baiersbronn', 'Forest', 'Sedavi', 'Kosonsoy', 'Trelleborg', 'Bucheya', 'Kidricevo', 'Gokulgarh', 'Coswig', 'Marechal Taumaturgo', 'Sidi Akkacha', 'Kouarfa', 'Finspang', 'Timberlane', 'Colotenango', 'Puerto Ayora', 'Goytapa', 'Tarlapalli', 'Acandi', 'General Luna', 'Calamba', 'Miqiao', 'Linxia Chengguanzhen', 'Nizamabad', 'Doria Sonapur', 'Chongoroi', 'Oskaloosa', 'Prevalje', 'Lubang', 'Ladispoli', 'Coite do Noia', 'Sarjapur', 'Panuco', 'Jendouba', 'Nayakanhatti', "'Ain Mouilah", 'Chapeco', 'Stanton', 'Balve', 'Libante', 'Etah', 'Yarim', 'Litchfield Beach', 'Tsingoni', 'Kendall Park', 'Karkala', 'Vlaardingen', 'Torrente', 'Periyanegamam', 'Senhor do Bonfim', 'Fleet', 'Geita', 'Chettipalaiyam', 'New Washington', 'Vlore', 'San Juan Ermita', 'Lagonglong', 'Shwebo', 'Hattiesburg', 'Mering', 'Basibasy', 'Sarahs', 'Nadendla', 'Palaiyam', 'Porto de Pedras', 'Porto Novo', 'Westampton', 'Oriximina', 'Al Jaghbub', 'Hajjah', 'Cutler Bay', 'Toyokawa', 'Tehuipango', 'Catabola', 'Ivanic-Grad', 'Caudete', 'Del Rio', 'Clarence-Rockland', 'Morrisville', 'Divisopolis', 'Chigasaki', 'West Auckland', 'Suzu', 'Blythe', 'Khunti', 'Putussibau', 'Ourikela', 'Ambano', 'Calpe', 'Ambohitralanana', 'Longjin', 'Sao Filipe', 'San Jose de Gracia', 'Bura', 'Ziarat', 'Termoli', 'Vohindava', 'Xinbu', 'Biliaivka', 'Sahneh', 'Rutherglen', 'Naolinco de Victoria', 'Windlesham', 'Manchester', 'Orinda', 'South Yarmouth', 'Tabaco', 'Kayseri', 'Ibiapina', 'Murcia', 'Swieqi', 'Valencia', 'Chirala', 'Eloxochitlan', 'Dembeni', 'Fardis', 'Misaki', 'Nagar', 'Miyajima', 'Brikama', 'Umurlu', 'Brierley Hill', 'Khodabandeh', 'Ozgon', 'Reze', 'Tinajeros', 'Novomyrhorod', 'Fully', 'Biak', 'Colider', 'Kashiwara', 'Sjenica', 'Hednesford', 'Madukkarai', 'Villaflores', 'Chiautla de Tapia', 'Taormina', 'Huwei', 'Seiyo', 'Washim', 'Benton', 'Venturina', 'Kakalur', 'Andamooka', 'Anghad', 'West Hempfield', 'Turnhout', 'Markkleeberg', 'Poko', 'Wealdstone', 'Ban Na Pa', 'Cerklje na Gorenjskem', 'Bombon', 'Arimalam', 'Jangy-Kyshtak', 'Isny im Allgau', 'Braga', 'Kawadgaon', 'Bonate di Sopra', 'Tonneins', 'Munagala', 'Chinna Mushidivada', 'Talagutong', 'Neihuzhai', 'Agua de Dios', 'Herceg Novi', "Nerk'in Getashen", 'Botucatu', 'Arroio do Meio', 'Villafranca del Panades', 'Madikeri', 'Serik', 'San Sebastian Huehuetenango', 'Kyzyl-Adyr', 'Somarasampettai', 'Surmene', 'Turbana', 'Chilas', 'Marinette', 'The Hammocks', 'McPherson', 'General MacArthur', 'Strasbourg', 'Tibana', 'Polatli', 'Sur', 'Lye', 'West Carson', 'Uludere', 'Moroni', 'Mata de Sao Joao', 'Pachmir', 'Saskatoon', 'Nina Rodrigues', 'Chinnakkavundanur', 'Norton Shores', 'Abu', 'Santo Angelo', 'Pianezza', 'Wythenshawe', 'Mabeskraal', 'Fort Drum', 'Gobindpur', 'Kostopil', 'Calca', 'Nonkon', 'Kolno', 'Tati', 'Glastonbury', 'Itakura', 'Payyanadam', 'Puerto Caimito', 'Thionville', 'Wezep', 'Llandudno', 'Antequera', 'Camp Perrin', 'Sao Luis do Paraitinga', 'Kabatas', 'Volta Redonda', 'Beni Mered', 'Kaffrine', 'Reutlingen', 'Fort Morgan', 'Naucalpan de Juarez', 'Ebo', 'Dargaz', 'Vina del Mar', 'Ilam', 'Kyzyl-Suu', 'Perinton', 'Nenmini', 'La Mirada', 'Sumbe', 'Jicin', 'Indian Trail', 'Nova Bassano', 'Batam Centre', 'Tocache Nuevo', 'Tambaram', 'Kikugawa', 'Sari', 'Borca', 'Beja', 'Feltre', 'Godo', 'Oshawa', 'German Flatts', 'Schaffhausen', 'Albino', 'Wejherowo', 'Plav', 'Beccar', 'Pitsea', 'Lauro de Freitas', 'Gogrial', 'Ferros', 'Tipp City', 'Waynesboro', 'Khost', 'Golungo Alto', 'Tamesis', 'Almondbury', 'Bernardo de Irigoyen', 'Vairampatti', 'Neiafu', 'Everswinkel', 'Garden Acres', 'Yufu', 'Coxim', 'Dilra', 'Guwahati', 'Tan Phu', "Khairpur Mir's", 'Poza Rica de Hidalgo', 'Trancoso', 'Koytendag', 'At Tafilah', 'Ranohira', 'Legionowo', 'Romitan Shahri', 'The Villages', 'Carroll', 'Mogi Mirim', 'Mo i Rana', 'San Miguel Duenas', 'Dardoq', 'Abaetetuba', 'New Milford', 'Palda', 'Tsinjomitondraka', 'Pisz', 'Otaru', 'Caldas Novas', 'Pampanito', 'Panuco de Coronado', 'Lanyi', 'Mayoyao', 'Tlagasana', 'Chetouane', 'Grand-Popo', 'Nagaoki', 'Chong Nonsi', 'Emmiganur', 'Looc', 'Vila Muriqui', 'Frattamaggiore', 'Ormoz', 'Goynucek', 'Arles', 'Laungowal', 'Jalhay', 'Wan Tau Tong', 'Villa Angela', 'Mariyammanahalli', 'Pivka', 'Firmat', 'Patsanda', 'Colle Salvetti', 'Gyula', 'Valparai', 'Issaquah', 'Jeronimo Monteiro', 'Bugasong', 'Broxbourne', 'Tsundupalle', 'Jacupiranga', 'San Juan Ixcoy', 'Ambatomirahavavy', 'Lanco', 'Monsenhor Gil', 'Sabaa Aiyoun', 'Narnaul', 'Carutapera', 'Altay', 'Ambodiampana', 'Kouvola', 'Harahan', 'Paoskoto', 'Vargem Grande Paulista', 'Schwarzenbruck', 'Bellheim', 'Oak Ridge', 'Chimaltenango', 'Issaba', 'Tiri', 'Taroudannt', 'Nonahi', 'Ranquitte', 'Tala Yfassene', 'Meybod', 'Muchamiel', 'Antohobe', 'Axochiapan', 'Malakanagiri', 'Whitchurch', 'Lake in the Hills', 'San Mateo del Mar', 'Curuzu Cuatia', 'Schweizer-Reineke', 'Babusnica', 'Montgomeryville', 'Gundugolanu', 'Abi al Khasib', 'Pillanallur', 'Soulei', 'Ayas', 'Azuqueca de Henares', 'Chodov', 'Mingjiujie', 'Patar', 'Porto Murtinho', 'Jafra', 'El Progreso', 'Rulin', 'Penetanguishene', 'Ban Khek Noi', 'Tubao', 'Calw', 'Gaundra', 'Nasik', 'Gaolingcun', 'Kalol', 'Meadowbrook', 'Kidapawan', 'Rudsar', 'Maddaloni', 'Kabul', 'Karlskoga', 'Yazd', 'Podalakur', 'Asakura', 'Teranikallu', 'Keokuk', 'Uyuni', 'Yorosso', 'Pfaffikon', 'Arniquet', 'Kyzyl-Kyshtak', 'Kambur', 'Mburucuya', 'Mingacevir', 'Zvolen', 'Cabaceiras do Paraguacu', 'Tamworth', 'Guantanamo', 'Darregueira', 'Ezequiel Montes', 'Fomboni', 'Quintanar de la Orden', 'Siachoque', 'Kericho', 'Mineral de Angangueo', 'Peka', 'Fenglin', 'Quetzaltenango', 'Sevanagala', 'Port Pirie', 'Tengyue', 'Novoyavorovskoye', 'Forli', 'Suchitoto', 'Hosdurga', 'Arcoverde', 'Vale de Cambra', 'San Pablo Atlazalpan', 'Hockenheim', 'Dandu Mailaram', 'Tadjmout', 'Port of Spain', 'Nanaimo', 'Colonia del Sol', 'Primeiro de Maio', 'Valletta', 'Horishni Plavni', 'Kingswood', 'Furstenwalde', 'Grosse Pointe Farms', 'Bang Kruai', 'Haiger', 'Chansolme', 'Xiaoganzhan', 'Mulaikkaraippatti', 'Nandazhang', 'Hambuhren', 'Medellin', 'Canapolis', 'Corozal', 'Bignona', 'Nova Vodolaha', 'Shedbal', 'Maojiatang', 'Greentree', 'Mahayag', 'Rajendranagar', 'Guararema', 'Pirane', 'Tall Dhahab', 'Sipoo', 'Aiquile', 'Kish', 'Taltal', 'Kushmanchi', 'Santa Genoveva de Docordo', 'Tapiales', 'Yunak', 'Floirac', 'Parbata', 'Zhangzhengqiao', 'Zossen', 'Caselle Torinese', 'Zaliohouan', 'Waldkirchen', 'Sao Jose da Coroa Grande', 'Pasinler', 'Gaspe', 'Soubakaniedougou', 'Nabire', 'Sieyik', 'Sao Jose de Piranhas', 'Gioia del Colle', 'Villanueva de Arosa', 'Marihatag', 'Daugavpils', 'Saint Helens', 'Jiannan', 'Gross Kreutz', 'Ganapathivattam', 'Santiago de Cuba', 'Katanning', 'Marotolana', 'Dingzhou', 'Spoleto', 'San Agustin Chahal', 'Calheta de Sao Miguel', 'Aragua de Barcelona', 'Vammala', 'Sao Goncalo do Amarante', 'Shahba', 'Cornedo Vicentino', 'Kew Green', 'Kuzuculu', 'Tobati', 'Currais Novos', 'Mounds View', 'Wetzlar', 'Nesoddtangen', 'Bosilovo', 'Marolambo', 'Horodyshche', 'Aleksandrow Kujawski', 'Sao Jose do Cedro', 'Dhabauli', 'Macka', 'Oelsnitz', 'Kiambu', 'Mugutkhan Hubli', 'Jalkaura', "Bo'ao", 'Emmiganuru', 'Manglur', 'Shenmu', 'Kerch', 'Paluan', 'Jaguaribe', 'Palmeira dos Indios', 'Zorgo', 'Chagne', 'Ugrinovci', 'Feuchtwangen', 'Mitake', 'Pacora', "Rui'an", 'West Hanover', 'Buca', 'Sam Son', 'Matsuyama', 'Gouka', 'Tuchin', 'Tonantins', 'Narangba', 'Inami', 'Wolf Trap', 'Kumharsan', 'Gundi', 'Rubiera', 'Balia', 'Estarreja', 'Golkoy', 'Nordestina', 'Dattapulia', 'Wapienica', 'Unao', 'Vasterhaninge', 'Wenxian Chengguanzhen', 'Hanover Park', 'Urrugne', 'San Antonio Ilotenango', 'San Vicente dels Horts', "Sao Miguel d'Oeste", 'Caerfyrddin', 'Sidi Rahhal', 'Olongapo', 'Jasol', 'Castelnuovo Rangone', 'Don Benito', 'Thung Song', 'Zhanibek', 'Male', 'Anpachi', 'Buford', 'Kwekwe', 'Urmston', 'Fernan-Nunez', 'Rio das Ostras', 'Zhmerynka', 'Shahrud', 'Las Lomitas', 'Curuca', 'Ar Ramadi', 'Jamundi', 'Skalica', 'Taixing', 'Huron', 'Basse-Goulaine', 'Arteche', 'Roxbury', 'Nawnghkio', 'Terryville', 'Eastlake', 'Guarabira', 'Zionsville', 'Yala', 'Os', 'Tuquerres', 'Zlatograd', 'Harnai', 'Cherukara', 'Numancia', 'Negotino', 'Sparta', 'Buzen', 'Karuvelampatti', 'Monte San Pietro', 'Rosario do Catete', 'Thamaga', 'Johnstown', 'Manaira', 'Zabrat', 'Tebourba', 'Cornelius', 'Al Mu`addamiyah', 'Epinal', 'Haria', 'Surrey', 'Tarapoto', 'Morecambe', 'Maribojoc', 'Savda', 'Calbiga', 'Ayodhyapattanam', 'Dulmen', 'Porteirinha', 'Armstrong', 'Darton', 'Poco Redondo', 'Yoboki', 'Maevka', 'Pihra', 'Nong Khae', 'Mascoutah', 'Ukrainka', 'Ivrea', 'Vadavalam', 'Huaraz', 'Gjakove', 'Penablanca', 'Lishuping', 'Kiso', 'Guaranda', 'Palliman', 'Rawtenstall', 'Atchampeta', 'Tarazona de Aragon', 'Vincent', 'Longtang', 'Muragacha', 'Gwadar', 'Aguilas', 'Karttigappalli', 'Rahway', 'Churi', 'Leini', 'Ragan Sur', 'Hongliuwan', 'Bayang', 'Plattsburgh', 'Anagni', 'Gidan Ider', 'Ait Bouchta', 'Sidi Abdelkader', 'Salaverry', "Ma'erkang", 'Niena', 'Santamaguluru', 'Buxtehude', 'Ugep', 'Zelezniki', 'Emmendingen', 'Monforte de Lemos', 'Tourza', 'Rivalta di Torino', 'Orange Cove', 'Bijbiara', 'Jardim do Serido', 'Kajaani', 'Barros Cassal', 'Shengang', 'Candido de Abreu', 'Ashdod', 'Bhit Bhagwanpur', 'Sajoszentpeter', 'Ambolotarakely', 'Viota', 'Muskegon', 'Hanau', 'Madhupur', 'Popesti-Leordeni', 'Coyhaique', 'Hojai', 'Nehe', 'Maddagiri', 'Bodrum', 'Mountain View', 'Tash-Komur', 'Cili', 'Kristianstad', 'Pehuajo', 'Jam', 'Frederickson', 'Aywaille', 'Heule', 'Kasungu', 'Koge', 'Mukdahan', 'Dialakoroba', 'Les Sorinieres', 'Al Kiswah', 'Dongxiaozhai', 'Mirandela', 'Mochizuki', 'Hanumantanpatti', 'Bregenz', 'Kongjiazhuangcun', 'Xianxi', 'Kovin', 'Konan', 'La Serena', 'Menomonie', 'Zemmouri', 'Nakagusuku', 'Gharbara', 'Oro-Medonte', 'Qiloane', 'Larantuka', 'Nonoai', 'Silvarpatti', 'Kharagbani', 'Nghia Lo', 'Kissimmee', 'Higuey', 'Susques', 'Pyalakurti', 'Sleaford', 'Barranco de Loba', 'Rattihalli', 'Standish', 'Holyoke', 'Nettadahalli', 'Torrejon de la Calzada', 'Nidzica', 'Sovata', 'Ilha Solteira', 'Rampur Khajuriya', 'Pappinissheri', 'Sint Willebrord', 'Benito Soliven', "Sek'ot'a", 'Jhang City', 'Vadakkanandal', 'Kolo', 'Montecorvino Rovella', 'Mancio Lima', 'Kashkar-Kyshtak', 'Mohania', 'Kochgawan', 'Honiara', 'Brejoes', 'Krasnyy Yar', 'Otrokovice', 'Saidpur', 'Nainijor', 'Jhanjharpur', 'Yarm', 'Caransebes', 'Santa Teresa del Tuy', 'Banate', 'Altadena', 'Sihu', 'Velten', 'San Pablo', 'Chbar Mon', 'Baraderes', 'Baraon', 'Telsang', 'Saint Budeaux', 'Candelaria', 'Silva Jardim', 'Udburu', 'Mastic', 'Itapora', 'Iklin', 'Vlasotince', 'Svay Pak', 'Havelock North', 'Kalipatnam', 'Mozarlandia', 'Kamarhati', 'Strehaia', 'Joshimath', 'Bridgnorth', 'Batticaloa', 'Pallisa', 'Coronel Dorrego', 'Chelmek', 'Hinthada', 'Nola', 'Little Rock', 'New Braunfels', 'Havre', 'Lavezares', 'Lamphun', 'Karajgi', 'Maheshram', 'Rangasamudram', 'Cuango', 'Tonse East', 'Morlanwelz-Mariemont', 'Otavalo', 'Nerkunram', 'Jaque', 'Okha', 'Bhamo', 'Gadabay', 'Timbauba', 'Andapafito', 'Punta Umbria', 'Vanj', 'Kun Puhal', 'Timmendorfer Strand', 'Pau dos Ferros', 'Matsuzaka', 'Columbus', 'Putina', 'Kotdwara', 'Tagana-an', 'Ajdovscina', 'Siegen', 'Sangin', 'Bihpuriagaon', 'Biswanath Chariali', 'Chiva', 'Paszto', 'Ban Wang Daeng', 'Karlsfeld', 'El Alamo', 'Lelystad', 'Chand Chaur', 'Nioaque', 'Yargatti', 'Hilzingen', 'Ayvacik', 'Suceava', 'Padova', 'Carneys Point', 'Winkfield', 'Absecon', 'Guntakal Junction', 'Bhadwar', 'Iwaka', 'San Pedro de Lloc', 'Key Largo', 'Monterotondo', 'Al Hillah', 'Zaouia Ait Ishak', 'Floriana', 'Ubajara', 'Ouled Fayet', 'Novo Cruzeiro', 'Selwyn', 'Phitsanulok', 'Amarzgane', 'Punakha', 'Liperi', 'Biblis', 'Pavlodar', 'Salalah', 'Tysvaer', 'Yapqan', 'Vetapalem', 'Baisari', 'Boryeong', 'Dhanauli', 'Atenco', 'Little Chute', 'Mimasaka', 'Champapur', 'Itamonte', 'Marple', 'El Callao', 'Whanganui', 'Korschenbroich', 'Zhuqi', 'Asfarvarin', 'Kodayattur', 'Yozyovon', 'Thompson', 'Wingles', 'Karliova', 'Radyvyliv', 'Mollendo', 'Jagdalpur', 'Nandgaon', 'Toyono', 'Gryfino', 'Ventspils', 'Sibiti', 'Shishi', 'Lulhaul', 'Bhalpatti', 'Norman', 'Halesowen', 'Al Midhnab', 'Viroflay', 'Nether Providence', 'Anglet', 'Seven Corners', 'Kurort Steinbach-Hallenberg', 'Eden Prairie', 'Firestone', 'Shanywathit', 'North Wantagh', 'Grenzach-Wyhlen', 'Chiari', 'Keningau', 'Miranda do Corvo', 'Tenambakkam', 'Tilburg', 'Budipuram', 'Nallamada', 'San Giovanni la Punta', 'Middle', 'Nanzhai', 'Castelfiorentino', 'Mont-Royal', 'Khasbalanda', 'Zirara', 'Carluke', 'Kawatana', 'Idangansalai', 'Mandi', 'Sabanitas', 'Balangir', 'Phillipsburg', 'Poniatowa', 'Jizzax', 'Corumbaiba', 'Sampues', 'Muynoq', 'Valga', 'Daharia', 'Mundakkal', 'Qarqin', 'Gandu', 'Los Vilos', 'Arkonam', 'Kananya', 'South Middleton', 'Devsar', 'Kamin-Kashyrskyi', 'Hauterive', 'Raitar', 'Swords', 'Elche', 'Shakhtinsk', 'Meadow Woods', 'Kadiapattanam', 'Sherrelwood', 'Altamura', 'Bloomingdale', 'Hesperia', 'Marathon', 'Sogndal', 'Timberlake', 'Upper Gwynedd', 'Saint-Andre-de-Cubzac', 'Chelmsford', 'Udaipur Bithwar', 'Provins', 'Prievidza', 'Loiyo', 'Dabou', 'Kastav', 'Vellarivalli', 'Orchards', 'Summerfield', 'Ilidza', 'Badiadka', 'Ypsilanti', 'Warsaw', 'Randers', 'Aqtobe', 'Rosendael', 'New Paltz', 'Port Neches', 'Cranford', 'Hadamar', 'Bergenfield', 'Orland Park', 'Ba Chuc', 'Fomento', 'Tegueste', 'Omagh', 'Maria Pinto', 'Ban Suan', 'Cuernavaca', 'Leominster', 'Eidsvold', 'Sohtha', 'Todi', 'Choszczno', 'Yamato', 'Lengquancun', 'Sultanpur Mazra', 'Boumalne', 'Manises', 'Xiaoli', 'Zolochiv', 'Pyapon', 'Lake Ronkonkoma', 'Hasselt', 'El Minie', 'Ambinanindovoka', 'Trittau', 'Mulavur', 'Loyish Shaharchasi', 'Bembereke', 'Antratsyt', 'Sayalkudi', 'Chorrocho', 'Ipiau', 'Pembroke Pines', 'Palghar', 'Frodsham', 'Sullana', 'Sunamganj', 'Finike', 'Qiryat Mozqin', 'Correia Pinto', 'Macheng', 'Ban Du', 'Agde', 'Santa Quiteria', 'Voyenno-Antonovka', 'Kapellen', 'An Chau', 'Valera', 'Glen Parva', 'Mahikeng', 'Gland', 'Northallerton', 'Su-ngai Kolok', 'Ash Shaykhan', 'Roztoky', 'Kokomo', 'Duren', 'Iver', 'Campanha', 'Befody', 'Vaisampakkal', 'Ucar', 'Janesville', 'Aksehir', 'Buur Gaabo', 'Ouled Fares', 'Eseka', 'Cossimbazar', 'Abingdon', 'Andrychow', 'Calimesa', 'Manati', 'Hannoversch Munden', 'Nalakadoddi', 'Xiaoba', 'Suo-Oshima', 'Beaver Dam', 'Oulad Ouchchih', 'Analavory', 'Lopez Jaena', 'Midland', 'Zhongbu', 'Tarrytown', 'Brighton', 'Vannes', 'Rajgir', 'Kuppam', 'Kottakota', 'Onesti', 'Massa', 'Sebastian', 'Benahavis', 'Gigmoto', 'Probolinggo', 'Valkenswaard', 'Momostenango', 'Det Udom', 'Myrza-Ake', 'Bou Saada', 'Maur Kalan', 'Cholargos', 'Senglea', 'Baranzate', 'Point Fortin', 'Wutiancun', 'South Whittier', 'Tiran', 'Fitchburg', 'Eyvan', 'Burbaliq', 'Heanor', 'Rancharia', 'Karumattampatti', 'Piagapo', 'Rishivandiyam', 'Amposta', 'Tultepec', 'Cabaret', 'Cloquet', 'Plouzane', 'Wilnecote', 'Presidente Epitacio', 'Mossaka', 'Piano di Sorrento', 'Colomiers', 'Kaithal', 'Cotoca', 'Kofele', 'Nangong', 'Yhu', 'Chepo', 'Kyoto', 'Sorocaba', 'Phagwara', 'Ludenscheid', 'Guotang', 'Vayakkalattu', 'Mianeh', 'Closepet', 'Kronshagen', 'Unye', 'Vale de Cavalos', 'Zitorada', 'Ramnicu Valcea', 'Samastipur', 'West Hempstead', 'Saunshi', 'Atamyrat', 'Morfelden-Walldorf', 'Okahandja', 'Shaoyu', 'Mirpur Mathelo', 'Odendaalsrus', 'Jisrayn', 'Elankur', 'Facatativa', 'Leshan', 'Halle', 'Paharpur', 'Dialafara', 'Villeneuve-Loubet', 'Regidor', 'Pukkulam', 'Torre Boldone', 'Tirukkalikkunram', 'Xihuangcun', 'Artemida', 'Lorrach', 'Governador Valadares', 'Virton', 'Xochiatipan de Castillo', 'Ehingen an der Donau', 'Carlosama', 'Zarzal', 'Terni', 'Tomaszow Mazowiecki', 'Villa Union', 'North Lakes', 'Lospalos', 'Wuhan', 'La Puebla del Rio', 'Caraga', 'Drolshagen', 'Mahad', 'Charleville', 'Dhorimanna', 'Hakodate', 'Soumagne', 'Magra', 'Chulumani', 'Alloa', 'Montegranaro', 'Singhara Buzurg', 'Errahalli', 'Sakleshpur', 'Serris', 'Zell am See', 'Danbury', 'Yoshioka', 'Eruh', 'Kinshasa', 'Mellieha', 'Ibadan', 'Filottrano', 'Dongjiangshui', 'Mariehamn', 'Mbalmayo', 'Cobija', 'Cairo', 'Jatara', 'Famalicao', 'Bayonet Point', 'Trang Bang', 'Fandriana', 'Copceac', 'Nhamunda', 'Sriperumbudur', 'Kitamoto', 'Le Grand-Quevilly', 'Higuera de Zaragoza', 'Sa Dec', 'Plougastel-Daoulas', 'IJsselstein', 'Rivoli', 'Neu Bleckede', 'Am-Timan', 'Jobabo', 'Lerik', 'Chaiyo', 'Bergen op Zoom', 'Niederzier', 'Sesimbra', 'Jagta', 'Levis', "Stara L'ubovna", 'Horbury', 'Bialy Kamien', 'Alamo', 'Palaio Faliro', 'Roselle Park', 'Rudewa', 'Kotma', 'Cojedes', 'Guangming', 'Shanwei', 'Vanimo', 'Alesd', 'Sidmouth', 'Hamah', 'Alejandro Korn', 'Emsworth', 'Shenjiabang', 'Calatayud', 'Furano', 'Moca', 'Viraghattam', 'Farshut', 'Nachod', 'Patzun', 'Southborough', 'Hopetown', 'Regina', 'Estavayer-le-Lac', 'Castellammare di Stabia', 'Marabut', 'Wegrow', 'Moncton', 'Pochuta', 'Yumbe', 'Raynes Park', 'Kaiserslautern', 'Ixtahuacan', 'Montemor-o-Novo', 'Palanisettipatti', 'Baaqline', 'Pen-y-Bont ar Ogwr', 'Altinekin', 'Villaba', 'Cudahy', 'Pazhanji', 'Icononzo', 'Yaozhuangcun', 'Sanson', 'Mol', 'Lejiangxiang', 'Prien am Chiemsee', 'Heerde', 'Nabatiye', 'Dobre Miasto', 'Franklin', 'Ban Khi Lek', 'Calumpang', 'Levelland', 'San Jose de Bocay', 'Xichang', 'Uravakonda', 'Madinat Zayid', 'Udayagiri', "Jeppe's Reef", "Nek'emte", 'Fajardo', 'Ratodero', 'South Elgin', 'Gogogogo', 'Awjilah', 'Kantharalak', 'La Trinidad', 'Nakanoto', 'Paamiut', 'Augustdorf', 'Ferizli', 'Belauncha', 'Elurupadu', 'Abuna', 'Kuantan', 'Casimiro de Abreu', 'Battle Creek', 'Ohara', 'Knottingley', 'Varzedo', 'Ouled Slama Tahta', 'Sorrento', 'Lalmatie', 'Hemavati', 'Lewisboro', 'Gary', 'Hartbeespoort', 'Piriyapatna', 'Fulacunda', 'Coatzacoalcos', 'Puliyara', 'Busca', 'Jamkhed', 'Targu Lapus', "Ra's al Ma`arrah", 'Nakhtarana', 'Monte Plata', 'Gravenhurst', 'Jingcheng', 'Starnberg', 'Sa al Hajar', 'Whitehouse', 'Batatais', 'Ciempozuelos', 'Bingmei', 'Divinopolis', 'Como', 'Sirgora', 'Ban Bang Yai', 'Tejucuoca', 'Soavina Antanety', 'Grapevine', 'Waseca', 'Sindelfingen', 'Ulhasnagar', 'Tulcan', 'Ararica', 'Camoapa', 'Zoubiria', 'Dour', 'Terku Narippaiyur', 'Llagostera', 'Qo`rg`ontepa', 'San Rafael La Independencia', 'Sandino', 'Dasoguz', 'Bankra', 'Mauguio', 'Viagrande', 'Bay Village', 'Galivedu', 'Oulad Fares', 'Patapatnam', 'Swinoujscie', 'Dharmkot', 'Recklinghausen', 'General Viamonte', 'Cyangugu', 'Monteux', 'Ozalp', 'Quintero', 'Baocheng', 'Marhaura', 'La Solana', 'Simpang Renggam', 'Bad Durkheim', 'Sao Bento do Sapucai', 'Mitzic', 'Etajima', 'Volos', 'Hailey', 'Weinsberg', 'Dahi', 'Braselton', 'Miro Khan', 'Qujingpu', 'Ince-in-Makerfield', 'Spanish Springs', 'Montanhas', 'Presevo', 'San Carlos Centro', 'Nandnawan', 'Nowy Sacz', 'View Royal', 'West Rembo', 'Paragould', 'Tortoli', 'Guapiacu', 'Gijon', 'Ilesa', 'Tongjiang', 'Belakvadi', 'Santa Sylvina', 'Adelfia', 'Mandya', 'Bay City', 'Rosny-sous-Bois', 'Angono', 'Uzhhorod', 'Ap Phu Hai', 'Bembe', 'Palma di Montechiaro', 'Shanhe', 'Oued el Djemaa', 'Atturkuppam', 'Adwa', 'Zabbar', 'Bhagalpur', 'West Valley City', 'Ruislip', 'Talatamaty', 'Ban Wang Krachae', 'Iskapalem', 'Kashmar', 'Bouchagroun', 'Timissa', 'Xizi', 'Elmwood Park', 'Santa Ana Huista', 'Hickory Hills', 'Rayong', 'Zambrow', 'Diamantina', 'Carmen de Areco', 'Kodigenahalli', 'Jamikunta', 'Soyaux', 'Juraqan', 'Burgess Hill', 'Piskent', 'Wloclawek', 'Meerssen', 'Cruz Alta', 'Huitzilan', 'Melo', 'Takelsa', 'Xiaoyi', 'Jhabua', 'Jagna', 'Sondiha', 'Sopiste', 'Koungou', 'Yeldurti', 'Dois Irmaos', 'Jaru', 'Kulasekharapuram', 'Shinshiro', 'Qaanaaq', 'Cangucu', 'Rosas', 'Westtown', 'Kaka', 'Gobardanga', 'Fangyuan', 'Kallupatti', 'Raiganj', 'Chitapur', 'Benevides', 'Miercurea-Ciuc', 'Massapequa', 'Kallakkudi', 'Dinuba', 'Ouagadougou', 'Hanford', 'Mfou', 'Mandalapalle', 'Alvand', 'Fort Campbell North', 'Kueneng', 'Bunnik', 'Radnor', 'Ikoto', 'Blankenburg', 'Santa Marinella', 'Snizhne', 'Zerakpur', 'Caetite', 'Boechout', 'Gulbarga', 'Southern Pines', 'Nanakuli', 'Miarinarivo', 'Lakshmeshwar', 'Gaocheng', 'Laheji', 'Bagumbayan', 'Goshen', 'Neopolis', 'Baohe', 'Diss', 'Hangu', 'Muyinga', 'Apahida', 'Miagao', 'Videm pri Ptuju', 'Kuysinjaq', 'Kalfou', 'Danilovgrad', 'Ambalatany', 'Katkol', 'Forio', 'Chaohucun', 'Manki', 'Capulhuac', 'White Rock', 'Ban Pa Tueng', 'Marahom', 'Constanta', 'Ntoum', 'Los Minas', 'Nabaruh', 'Mahuver', 'Crosia', 'Bilenke', 'Miranda de Ebro', 'Oshakati', 'Kochugaon', 'Paralimni', 'Ramon Santana', 'Villa Bisono', 'Ichikai', 'Nefasit', 'Sangmelima', 'Jasmine Estates', 'Marion Oaks', 'Ahmedabad', 'Bello', 'Koporo-Kenie-na', 'Attard', 'Kamargani', 'Greenwood Village', 'Dubacherla', 'Ad Dabyah', 'Katosi', 'Nhandeara', 'Thap Than', 'Andranambolava', 'Totutla', 'Gambettola', 'Hatoyama', 'Shanyincun', 'Pinjranwan', 'Yadiki', 'Ceelbuur', 'Petrovec', 'Tighenif', 'Berwick-Upon-Tweed', 'Pakhtaobod', 'Sentilj', 'Barahra', 'Pingdeng', 'Zunil', 'Darwen', 'Trang', 'Napier', 'Oroshaza', 'Sao Tiago', 'Sipe Sipe', 'Gera', 'Sadhoa', 'Sungo', 'Papagaios', 'Yahaba', 'Tassera', 'Cuyotenango', 'Doumanaba', 'Tortuguitas', 'La Prairie', 'Enfida', 'Nafpaktos', 'Chhapra', 'Hoenheim', 'Bondoufle', 'La Virginia', 'Guneysu', 'Champaign', 'Kampala', 'Sundararaopeta', 'Uppidamangalam', 'Ikkadu', 'Valjevo', 'Xidiancun', 'Bangkinang', 'Ermenek', 'Bayaguana', 'Bitburg', 'North Smithfield', 'Lagunas', 'Chuquicamata', 'Kannamangalam', 'Taunggyi', 'Bibipet', 'Fangcun', 'Baikatpur', 'Sultanpur', 'Rio de Mouro', 'Dongcun', 'Yali', 'Gora Kalwaria', 'Ottawa', 'Achankovil', 'Przemysl', 'Ambohinamboarina', 'Sangalbahita', 'Ban Pet', 'Segaon', 'Watertown', 'Baghmaria', 'Meissen', 'Chebli', 'Santo Antonio do Descoberto', 'Santa Rosa de Rio Primero', 'Gold Canyon', 'Marburg', 'Bhadas', 'Arcos', 'Kediri', 'Kasaoka', 'Teisendorf', 'Tadikombu', 'Enger', 'Hajan', 'Fenggeling', 'Guroymak', 'Kolanpak', 'Magam', 'Marsberg', 'Panagar', 'Nove Mesto nad Vahom', 'Koluszki', 'Tan-Tan', 'Dehdasht', 'Karimganj', 'Tekman', 'Dudelange', 'Remscheid', 'Xiaolongtan', 'Srisailain', 'Pike Road', 'Villaquilambre', 'Neustadt', 'Anserma', 'Prerov', 'Mandleshwar', 'Entrerrios', 'Andranopasy', 'Ambohibe', 'Eagle Mountain', 'Kadimetla', 'Obala', 'Nichinan', 'Kouloum', 'Karumba', 'Sagamihara', 'Multi', 'Imabari', 'Vizela', 'Chiquimula', 'Okkerneutboom', 'Faribault', 'Seyhan', 'Vinces', 'Vijayapuri North', 'Santa Clara del Cobre', 'Yorkville', 'Purisima de Bustos', 'Belhi', 'Balarampur', 'Buluan', 'Jefferson Valley-Yorktown', 'Buchs', 'Agua Boa', 'Karayazi', 'Raipur Buzurg', 'Sisa', 'Adigala', 'Demba', 'Tangerhutte', 'Sidi Moussa Ben Ali', 'Alpharetta', 'Shangping', 'Sabha', 'Iguatemi', 'Ragampet', 'Andranovao', 'Chitaga', 'Olalapadi', 'Chenalho', 'Kent', 'Gorele', 'Fiumicino', 'Carrboro', 'Sihanoukville', 'Cepin', 'Mawlaik', 'Melmuri', 'Mapai', 'Ciudad Lopez Mateos', 'Santaluz', 'Leninskoe', 'Esbiaat', 'Shterpce', 'Sidi Abdelkarim', 'Meoqui', 'Wajimazakimachi', 'Sulya', 'Kangersuatsiaq', 'Telpur', 'Osmanabad', 'Diourbel', 'Waycross', 'Hohenmolsen', 'Mukhtarpur Salkani', 'Campo Grande', 'Willebroek', 'Bamaur', 'Vico Equense', 'Andipalaiyam', 'Peraia', 'Ashtown', 'Biro', 'Weddington', 'Arboletes', 'Tauramena', 'Bayserke', 'Toretsk', 'Ha Tinh', 'Redondela', 'Patcham', 'Hilden', 'New Corella', 'San Francisco del Rincon', 'Tlacolulan', 'Letlhakane', 'Rubanda', 'Bohechio', 'Boujediane', 'Hacienda Heights', 'Alderwood Manor', 'Hendrik-Ido-Ambacht', 'Dhangadhi', 'Chanute', 'Paraippatti', 'Senftenberg', 'Remich', 'Nimbahera', 'Kadrabad', 'Taperoa', 'Dharan', 'Bidar', 'Jiaozuo', 'Waterford', 'Nanao', 'Verona', 'Bawali', 'Vellakkovil', 'Lakkavaram', 'Lyss', 'Punnavalli', 'Martil', 'Melut', 'Newburgh', 'Qaskeleng', 'Crevillente', 'Assemini', 'Kilankundal', 'Rades', 'Beidao', 'Leiden', 'Phu Ly', 'Antehiroka', 'Ouolodo', 'Vaddepalli', 'Andalusia', 'Marechal Candido Rondon', 'Malaut', 'Port Shepstone', 'Julich', 'Spitak', 'Si Wilai', 'Oblesevo', 'Nantes', 'Totana', 'Godhavn', 'Augusto Correa', 'Mirchpur', 'Yzeure', 'Pelabuhanratu', 'Werneuchen', 'Dospat', 'Ikkarai Boluvampatti', 'Sareh Mowndeh', 'Ramiriqui', 'Sorriso', 'Wasi', 'Indi', 'Talavera de la Reina', 'Lichuan', 'Cachoeiro de Itapemirim', 'Kariba', 'Narita', 'Dazhou', 'Marmeleiro', 'Wollongong', 'Alwaye', 'Fazilka', 'Tynemouth', 'Aramari', 'Bantay', 'Helmond', 'Talegaon Dabhade', 'Dowbaran', 'Babak', 'Country Walk', 'Irara', 'Simoes', 'Surakarta', 'Carapegua', 'Lal Khatanga', 'Nowshera', 'Sao Leopoldo', 'Pinrang', 'Bouchegouf', 'Kipili', 'Yuzhne', 'Narasimharajapura', 'Halls', 'Paco', 'Tarija', 'Vergina', 'Sheridan', 'Bastrop', 'Cota', 'Jinghong', 'Aylestone', 'Tlalixcoyan', 'Tiszakecske', 'Blackwells Mills', 'Serarou', 'Kherameh', 'Kampung Baharu Nilai', 'Gomboussougou', 'Mulheim-Karlich', 'Oshnaviyeh', 'Ocnita', 'Thornton Heath', 'Enfield', 'Castricum', 'Rio dos Cedros', 'West Chester', 'Beanana', 'Wiener Neudorf', 'Takkolam', 'Santo Antonio de Posse', 'Les Abricots', 'Higashi-Matsuyama', 'Burutu', 'Yingtan', 'Valkeala', 'Council Bluffs', 'North Decatur', 'Gaozhou', 'Ghabrah', 'Barun', 'Mugla', 'Olgiy', 'Pramatam', 'Ituacu', 'Hihya', 'Faridkot', 'Zarand', 'Escada', 'Ambaguio', 'Sao Joao Nepomuceno', 'Bardoli', 'Murakami', 'Leinefelde', 'Zawyat an Nwacer', 'Banjul', 'Genthin', 'Susono', 'Buin', 'Vryheid', 'El Tocuyo', 'Pompton Lakes', 'Ibipitanga', 'Magwe', 'Ocuilan de Arteaga', 'Solleftea', 'Areal', 'Casal di Principe', 'Hyvinkaa', 'Kukdeshwar', 'Ludlow', 'Klamath Falls', 'Husnabad', 'Belwara', 'Tonisvorst', 'Shekhupur', 'Yokaichiba', 'Amlash', 'Usumatlan', 'Mascota', 'Hemau', 'Bhawana', 'Dongshi', 'Siriari', 'Gulf Hills', 'Khromtau', 'Jalal-Abad', 'Ayvalik', 'Baza', 'Knowsley', 'Gibsonville', 'Tondangi', 'Uzunkopru', 'Pedda Penki', 'New Rochelle', 'Limache', 'Nasiyanur', 'Pansemal', 'Ottendorf-Okrilla', 'Karukurti', 'Xiangjiaba', 'Varennes', 'Del Gallego', 'Riviera Beach', 'Chicacole', 'Marignane', 'Nova Vas', 'Dammaj', 'Kayunga', 'Wallaceburg', 'Bangkalan', 'Kandi', 'Corman Park No. 344', 'Lavinia', 'Estelle', 'Parthenay', 'Datiana', 'Tamzoura', 'Banapur', 'Campia Turzii', 'Pylaia', 'Las Heras', 'Diamantino', 'Dachepalle', 'Vegarai', 'Campbell', 'Sao Cristovao', 'Nossa Senhora do Socorro', 'Ocotal', 'Del Carmen', 'Mangalvedha', 'Bamba', 'Yaojia', 'Ecully', 'Tadjourah', 'Flanders', 'Nordhragota', 'Stutterheim', 'Sher', 'Seferhisar', 'Lieshout', 'Brignoles', 'Alcoa', 'Senta', 'Isola della Scala', 'Ekchari', 'Formello', 'Rio Bananal', 'Uppalapadu', 'Apatzingan de la Constitucion', 'Yuancun', 'Mandi Burewala', 'Baraka', 'Ban Bang Khu Lat', 'Tradate', 'Fangliao', 'Santa Ana de Yacuma', 'Castel San Giovanni', 'Aachen', 'Ginan', 'Riviere-du-Loup', 'Blain', 'Alawandi', 'Xo`jayli Shahri', 'Samut Prakan', 'Ban Wang Nok Aen', 'Triprangottur', 'Tumbes', 'Vandam', 'Bistan', 'Punnaikkayal', 'St. Francis', 'Garforth', 'Sultanbeyli', 'Jalpaiguri', 'Nelas', 'Embrun', 'Karia Ba Mohamed', 'Guasavito', 'Chiroqchi', 'Leander', 'Adrano', 'Boudjima', 'Ostroh', 'Kodriva', 'Meriden', 'Nova Laranjeiras', 'Barnoldswick', 'Urbiztondo', 'Marapanim', 'Jiangshan', 'Jamshoro', 'Aki', 'Lackawanna', 'Kafr Nubl', 'Siroda', 'Whittier', 'Junqueiro', 'Karavaram', 'Jaguariaiva', 'Fort Stewart', 'Alvarado', 'Vicopisano', 'Raseiniai', 'Zolotonosha', 'Arbutus', 'Parsahi Sirsia', 'Alcora', 'Canutama', 'Edmonton', 'Kalaa Srira', 'Shirayamamachi', 'Orito', 'Mahadebnagar', 'Wallaroo', 'Ciampino', 'Sirumugai', 'Zhangshicun', 'Fanambana', 'Los Guayos', 'Santa Gertrudes', 'Godome', 'Formby', 'Paramankurichi', 'Caratinga', 'Dayalpur Sapha', 'Mahuakheraganj', 'Pine Hill', 'Zhosaly', 'Kousseri', 'Jabalpur', 'Bad Lauchstadt', 'Julu', 'Cortazar', 'Drexel Heights', 'Bolzano', 'Plano', 'Grossenhain', 'Ventaquemada', 'Bechem', 'Masaki', 'Denpasar', 'Gaggiano', 'Roxas', 'Wickede', 'Gomoh', 'Arklow', 'Fossano', 'Stralsund', 'Kresek', 'Pohrebyshche', 'Siruvalur', 'Karpi', 'Adra', 'Rafiganj', 'Oftringen', 'Ilirska Bistrica', 'Anazzou', 'Jonnagiri', 'Norcross', 'Sidi Bennour', 'Berkine', 'Ueckermunde', 'Walsall', 'Dingle', 'Raxaul', 'Chard', 'Cataingan', 'Ervadi', 'Santiago', 'Dallgow-Doberitz', 'Poperinge', 'Shankarpur', 'Felton', 'Nesebar', 'Junction City', 'Socorro', 'Horb am Neckar', 'Slavonski Brod', 'Samaniego', 'Bolingbrook', 'Sombrerete', 'Pastrana', 'Bertoua', 'Zungeru', 'Ataleia', 'Ath Thawrah', 'Palisades Park', 'Biatorbagy', 'Perches', 'Ashqelon', 'Janjanbureh', 'Stendal', 'Quixabeira', 'San Giovanni Valdarno', 'Madepalli', 'Bubanza', 'Fatehpur Shahbaz', 'Vigonza', 'Pallikapuzha', 'Stepney', 'Sibiu', 'Bodagudipadu', 'Isidro Casanova', 'Chatia', 'Angor', 'Ouro Verde', 'Markala', 'Sapkyo', 'Amarapuuram', 'Kannadiparamba', 'Koula', 'Laksar', 'Uonuma', 'Mainpuri', 'Liberia', 'Zarah Sharan', 'Alcaniz', 'Lakhaura', 'Bansbari', 'Kurikuppi', 'Muan', 'Sandrandahy', 'Corail', 'Sardrud', 'Ambositra', 'Zhengtun', 'Ijebu-Ode', 'Athol', 'Menderes', 'Shirbadgi', 'Oaxaca', 'Nogales', 'Tekkattur', 'Mobile', 'Pazaryeri', 'Hirakud', 'Oyama', 'Anrochte', 'Mashiko', 'Vobkent Shahri', 'Neumunster', 'Lingbao Chengguanzhen', 'Elbistan', 'Hakka', 'Safranbolu', 'Goulds', 'Kadriye', 'Komae', 'Mahdia', 'Peer', 'Mbulungu', 'Prince George', 'Bir Ben Laabed', 'Manukau City', 'Isfana', 'Buxton', 'Hassfurt', 'Paro', 'Andimeshk', 'Mockmuhl', 'Leonora', 'Healdsburg', 'Pyrgos', 'Dardilly', 'Richardson', 'Espiritu', 'Dipolog', 'Oyodo', 'Concord', 'Rye Brook', 'Komoro', 'Liedekerke', 'Nishon Tumani', 'Wyke', 'Padangsidempuan', 'Milton', 'Princes Town', 'Bandar-e Deylam', 'Perry Hall', "Ahmer el 'Ain", 'Kampong Cham', 'Vijayawada', 'Kendal', 'Sabinas', 'Sihanamaro', 'Kanye', 'George Town', 'Piquete', 'Mullach Ide', 'Fukushima', 'Hohen Neuendorf', 'Antsahabe', 'Navoiy', 'Riehen', 'Castel Bolognese', 'Virovitica', 'Solihull', 'Galaat el Andeless', 'Endwell', 'Langeloop', 'Tuban', 'Quaregnon', 'Mastchoh', 'La Chapelle-sur-Erdre', 'Tapes', 'Bac Ninh', 'Tustin', 'Leava', 'Nagai', 'Huatusco', 'Kyjov', 'Duque Bacelar', 'Tanbei', 'Copiague', 'Langenzenn', 'Iaciara', 'Bimbo', 'Guri', 'Lake Ridge', 'Star Dojran', 'Nivala', 'Damme', 'Ambohitompoina', 'Slovenj Gradec', 'Najibabad', 'Ziguinchor', 'Chatenay-Malabry', 'Mulanay', 'Nakasi', 'Guadalajara', 'Bad Hersfeld', 'Raciborz', 'Sesto San Giovanni', 'Walker', 'Nonnweiler', 'Dujiashigou', 'Akune', 'Malabang', 'Konarka', 'Chaves', 'Yueqing', 'Goias', 'Isbergues', 'Esenler', 'Jovellanos', 'Hajin', 'Forest Acres', 'Duchcov', 'Keza', 'Summit View', 'Sokotindji', 'Chitvel', 'Tunis', 'Sarkoy', 'Nerang', 'Berlaar', 'Lukovica', 'Bilthoven', 'Muddebihal', 'Thi Tran Mau A', 'Fano', 'Samtse', 'Litian Gezhuang', 'Dundalk', 'Silistra', 'Jangamguda', 'Borna', 'Bassar', 'Gerstungen', 'Talevad', 'Worksop', 'Sidlaghatta', 'Chapulhuacan', 'Guasdualito', 'Garhi', 'Cugir', 'Epitacio Huerta', 'Sankhavaram', 'Sabotsy', 'Nausori', 'Purnea', 'Ponnamaravati', 'Sarkikaraagac', 'Bourem', 'Quepos', 'South San Jose Hills', 'Changchunpu', 'Wissen', 'Nova Canaa', 'Whitburn', 'Wysokie Mazowieckie', 'Soacha', 'Nagathan', 'Daly City', 'Antanimasaka', 'Pura', 'Petersfield', 'Overlea', 'Patjirwa', 'Weldiya', 'Iringa', 'Guaruja', 'Sitges', 'Paola', 'West End', 'Nasice', 'Mullanwala', 'Nanjangud', 'Jozefoslaw', 'Zalau', 'Petorca', 'Zandhoven', 'Xiashi', 'Jaspur', 'Pasuruan', 'Bergen', 'Gangneung', 'Stargard Szczecinski', 'President Roxas', 'El Achir', 'Beltangadi', 'Iida', 'Ardooie', 'Swallownest', 'Bairi Chak', 'Zhijiang', 'Shiyan', 'Huzurabad', 'Kampene', 'Norden', 'Chumpak', 'Kokrajhar', 'Arbelaez', 'Mendig', 'Independent Hill', 'Etimesgut', 'White Settlement', 'Pottanur', 'Shisui', 'Mukkudal', 'Mellila', 'Pinagkaisahan', 'Alcorta', 'Dhantola', 'San Carlos de Bariloche', 'New Kingman-Butler', 'Lawang', 'Taki', 'Khanpur', 'Pirallahi', 'Amatlan de los Reyes', 'Kottapuram', 'Mount Magnet', 'Hunedoara', 'Urayasu', 'Provadia', 'Siddarampuram', 'Warfield', 'Lemery', 'Lidkoping', 'Kalikiri', 'Cravinhos', 'Mukkanur', 'Lakeland Highlands', 'Pilani', 'Al Mayadin', 'Port Charlotte', 'Jinshan', 'Zeydabad', 'Cantonment', 'Nakhon Thai', 'Dingras', 'Kirippatti', 'Haverford', 'Rufisque', 'Alagarai', 'Bang Ban', 'Querencia do Norte', 'Puebloviejo', 'Helena Valley Southeast', 'Mezitli', 'Pryor Creek', 'Hem', 'Antadinga', 'Costa de Caparica', 'Ghardimaou', 'Shuinancun', 'Shelek', 'Saldanha', 'Maues', 'Apaseo el Grande', 'Gendou', 'Conceicao da Aparecida', 'Burtonsville', 'Eagle Pass', 'Babhangawan', 'Halle-Neustadt', "Bahla'", 'Matelandia', 'Muddanuru', 'Coroaci', 'Morro Agudo', 'Lipjan', 'Mizusawa', 'Cangola', 'Montanha', 'Rethymno', 'Westwood Lakes', 'Valpovo', 'Cayambe', 'Itapicuru', 'Bourdoud', 'Polasara', 'Beltinci', 'Armentieres', 'Nuriston', 'Hattian Bala', 'Torre del Campo', 'Hormigueros', 'Tan An', 'Sao Jose dos Campos', 'Belem de Maria', 'Ksar Lmajaz', 'Ossining', 'Tikota', 'Matsumoto', 'Reina Mercedes Viejo', 'New Plymouth', 'Ocean Pointe', 'Zogbodome', 'Ahlaf', 'Southfield', 'Nijverdal', 'Mumias', 'Vac', 'Hevie', 'Malumfashi', 'Merignac', 'Agua Prieta', 'Parabcan', 'Kako', 'Chestermere', 'Nijar', 'Esanboy', 'Loango', 'Boulder City', 'Pedda Mupparam', 'Bender', 'Snellville', 'Aibongo', 'Geraardsbergen', 'Skillounta', 'Zalingei', 'Argyroupoli', 'Steiner Ranch', 'Meghraj', 'Midrand', 'Akola', 'Sacacoyo', 'Osogbo', 'Kaukhali', 'Cotui', 'Nzalat Laadam', 'Bunji', 'East Meadow', 'Yangasso', 'Saint-Cyprien', 'Banda Aceh', 'Shipley', 'Nelamangala', 'Sidi Redouane', 'Mahomet', 'El Abadia', 'Kanagicho', 'Port Elgin', 'Parkway', 'Kashiwazaki', 'Belabo', 'Rajauli', 'Vlist', 'Lucheng', 'Chania', 'Garching bei Munchen', 'Taurianova', 'Kahhale', 'Novellara', 'Cerejeiras', 'Maintal', 'Old Orchard Beach', 'Lecco', 'Oirase', 'Kailahun', 'Tres Valles', 'Sao Jose de Ribamar', 'Pala', 'Galikesh', 'Phularwan', 'Santo Tomas de los Platanos', 'Sher Chakla', 'Rumia', 'Almasi', 'Hasanpur Juned', 'Acambaro', 'Market Warsop', 'Diankabou', 'Yelahanka', 'Florissant', 'Daegu', 'Neckarsulm', 'Suan', 'Abadan', 'Piritu', 'Tulancingo', 'Al Malikiyah', 'Hazel Grove', 'Zhanlicun', 'Shoreline', 'Sutihar', 'Kopa', 'Conshohocken', 'Cuilco', 'Budaun', 'Makhar', 'Kondrukota', 'South Huron', 'Jambukuttaippatti', 'Plabennec', 'Benoni', 'Maddikera', 'Vredendal', 'Aci Castello', 'Solotvyno', 'Tarhzout', 'Webb City', 'Shagamu', 'Svendborg', 'Manokwari', 'Lalian', 'Kings Mountain', 'Sambhar', 'Kovacica', 'Sumperk', 'Barra Bonita', 'Cedeno', 'El Paraiso', 'Zeghanghane', 'Parihara', 'Chosica', 'Allur', 'Al Qatif', 'Wadlakonda', 'Itoshima', 'Huckelhoven', 'Lempaala', 'Fleming Island', 'Sewai', 'Mahesh Khunt', 'Cranbourne', 'Bahia Honda', 'Sanderstead', 'Munagapaka', 'Kamata', 'Arakere', 'Nasirabad', 'Tennala', 'Sumbal', 'Mandaluyong City', 'Sunne', 'Skierniewice', 'Dilarpur', 'Qujing', 'Minneapolis', 'Abalessa', 'Roosevelt', 'Balatonfured', 'Mel Palaiyam', 'Maitum', 'Umm el Fahm', 'Nanpala', 'Isahara', 'Qantir', 'Boucherville', 'Port Townsend', 'Sassuolo', 'Lipno', 'Vahdat', 'Orastie', 'Oakland', 'Reus', 'Arpacay', 'Andenne', 'Kafr Takharim', 'Wakasa', 'Rutigliano', 'Marianao', 'Norderstedt', 'Orobo', 'Helsinki', 'Bria', 'Wehr', 'Melsungen', 'Saint-Basile-le-Grand', 'Keisen', 'Khorramshahr', 'Brighouse', 'Mount Dora', 'Bogor', 'Ayni', 'Karoi', 'Kujri', 'Codo', 'Police', 'Ascension', 'Chaguaramas', 'Montauban', 'Kukichuo', 'Ruti', 'Abbottabad', 'Zgornje Jezersko', 'Bushkill', 'Ilobu', 'Allison Park', 'Abreu e Lima', 'Magdalena', 'Natividade do Carangola', 'Chiclayo', 'Hard', 'Singrauliya', 'Thanh Phu', 'Kraljevo', 'Grossos', 'Algonquin', 'Nowgong', 'San Pietro in Casale', 'Si Racha', 'Anjoma-Ramartina', 'Somain', 'Ronkonkoma', 'Draper', 'Gudikallu', 'Anandnagar', 'Bouknadel', 'Torri di Quartesolo', 'Kaluvaya', 'Benato-Toby', 'Bormujos', 'Xinzhou', 'Qingyuan', 'Tiruppattur', 'Rotherham', 'Alawalpur', 'West Perrine', 'Sahab', 'Brejo do Cruz', 'Uchiko', 'Lichfield', 'Schwaz', 'Prevost', 'Srimushnam', 'Isalo', 'Malinagar', 'Tucson Estates', 'Santiago do Cacem', 'Neuenhaus', 'Sa`dah', 'Diebougou', 'Karmah an Nuzul', 'Muradpur', 'Hov', 'Duffel', 'Ankeny', 'Selim', 'San Juan Bautista', 'La Lisa', 'Upper Arlington', 'Chagallu', 'West Wickham', 'Godhra', 'Bilaspur', 'Bijeraghogarh', 'Misterbianco', 'Immingham', 'Lawndale', 'Ham Lake', 'Bytow', 'Lahnstein', 'Illertissen', 'Carpi', 'Uch Sharif', 'Pariconia', 'Teixeira de Freitas', 'Newstead', 'Chegga', 'Potsdam', 'Korinthos', 'Palmital', 'Undavalli', 'Onteniente', 'Ayutuxtepeque', 'Parbhani', 'Ban Noen Kum Nueng', 'Sucha Beskidzka', 'Pieve di Soligo', 'Sioux City', 'Bni Darkoul', 'Mae O', 'Nova Sintra', 'Piratininga', 'Massantola', 'Alice Springs', 'Tidili Masfiywat', 'Mahaly', 'Baglar', 'Heartland', 'Kuyucak', 'Pili', 'Katri', 'Saladas', 'Samarinda', 'Okhtyrka', 'Sulagiri', 'Pluzine', 'Di Linh', 'Kabacan', 'Koprubasi', 'Bayanauyl', 'Murray Bridge', 'Sunchales', 'Holzminden', 'Solon', 'Orosi', 'Ofaqim', 'Vadakkangara', 'Tooele', '`Adra', 'Nagercoil', 'Berriche', 'Hyuga', 'Santos Reyes Nopala', 'Sezana', 'Halifax', 'Ambodisakoana', 'Booneville', 'Kirkenes', 'Mahmutlar', 'Bad Mergentheim', 'Tnine Lgharbia', 'Tanamarina-Sakay', 'Escaudain', 'Paterno', 'Lepe', 'Marui', 'Lalitpur', 'Las Veredas', 'Pocos de Caldas', 'Guasave', 'Canterbury', 'Rakhwari', 'Benbutucun', 'Harrisburg', 'Casalgrande', 'Mercaderes', 'Celorico de Basto', 'Goiania', 'Thiruvananthapuram', 'Padrauna', 'Navarro', 'Minturno', 'Sarari', 'Carugate', 'Math Lohiyar', 'Vairichettipalaiyam', 'Leo', 'Saposoa', 'Vasa', 'Heggadadevankote', 'Po', 'Rock Springs', 'Kantilo', 'Jedeida', 'Pignon', 'Izunokuni', 'Shiyeli', 'Mandiavato', 'San Jose del Monte', 'Ouenou', 'Bouabout', 'Kangaba', 'Ulm', 'Gwalior', 'Recke', 'Puruk Cahu', 'Leonia', 'Zapala', 'Ban Na San', 'Morant Bay', 'Teonthar', 'Douar Bou Tlelis', 'Gondia', 'Pulur', 'Andonabe', 'Santo Antonio do Jacinto', 'Poldokhtar', 'Setubal', 'Mogalturru', 'Tlahuelilpan', 'Strood', 'Pirapetinga', 'Hamina', 'Pompeia', 'Pistoia', 'Varre-Sai', 'Tonosho', 'Cisneros', 'Bad Sackingen', 'Yinggen', 'Barwa Sagar', 'Ayorou', 'Chandhaus', 'Mehsari', 'Bobrovytsia', 'McKinney', 'Gernsbach', 'Huejutla de Reyes', 'Cintalapa de Figueroa', 'Terra Alta', 'Isemi-Ile', 'Nishiwaki', 'Siruma', 'Leinfelden-Echterdingen', 'Bernissart', 'Dharampur', 'Golden', 'Beek', 'Noordwijk', 'Elixku', "Piest'any", 'Kilvisharam', 'Andria', 'Nanjakkad', 'Sankaridrug', 'Ondorhaan', 'Campo Novo do Parecis', 'Castellon de la Plana', 'Houthulst', 'Choi Hung', 'Zabrze', 'Madaya', 'Alfafar', 'Nili', 'Robertsonpet', 'Yedapalli', 'Cidade de Nacala', 'Mathibestad', 'Oued Tlelat', 'Cuetzalan', 'Juli', 'Xixinzhuangzhen', 'Hanwell', 'Pomezia', 'Lucani', 'Fastiv', 'Bela Palanka', 'Kumarkhali', 'Golubac', 'Svencionys', 'Sesena', 'Savigny-sur-Orge', 'Cal', 'Brown Deer', 'Thessaloniki', 'Jiroft', 'Taoyang', 'Beziers', 'Pathraha', 'Minalabac', 'Genova', 'Sarea Khas', 'Mahabe', 'Bela Vista', 'Zawyat Ahancal', 'Speyer', 'Cicekdagi', 'Ouled Brahim', 'Yinying', 'Aguas Vermelhas', 'Lille', 'Dillingen', 'Silang', 'Monsey', 'Kalakada', 'Comarapa', 'Tit Mellil', 'Sergio Osmena Sr', 'Tabapua', 'Nata', 'Redlands', 'Jaggisettigudem', 'Loma de Cabrera', 'Kasongo', 'Kincardine', 'Aukstieji Paneriai', 'Sabana Grande', 'Barwadih', 'Eragny', 'Mochudi', 'New Ulm', 'La Chorrera', 'Teresa', 'Pingtan', 'Caete', 'Wieruszow', 'Torrelavega', 'Waltham Cross', 'Yaupi', 'Becancour', 'Sao Fidelis', 'Koch', 'San Martin de Valdeiglesias', 'Akron', 'Cruzeiro do Sul', 'Halabjah', 'Channapatna', 'Aliquippa', 'Tilehurst', 'Sabaoani', 'Berck-sur-Mer', 'Marquetalia', 'Shepparton', 'Stalybridge', 'Banos', 'Rengam', 'Andradina', 'Andicun', 'Changde', 'Vacaville', 'Sattegalam', 'Dadukou', 'Machida', 'Quisqueya', 'Solcava', 'Annecy', 'Golakpur', 'Altenburg', 'Vahdattiyeh', 'Damoh', 'Mojo', 'Vercelli', 'Simijaca', 'Chainpur', 'Esopus', 'Dimasalang', 'Grand Rapids', 'Bowdon', 'Socastee', 'Sibila', 'Miches', 'Kalkuni', 'Kananga', 'Ain Legdah', 'Songo', 'Huandacareo', 'Panazol', 'Iacu', 'Kelandis', 'Miragoane', 'Pote', 'Shimabara', 'Mariluz', 'Anyuan', 'Guisborough', 'Hashtgerd', 'La Esperanza', 'Gitarama', 'San Miguel de Tucuman', 'Hialeah', 'Lake Wales', 'Orihuela', 'Lakhzazra', 'Marivan', 'Whitehorse', 'Arawa', 'Ono', 'Sidi Bou Ali', 'Papa', 'Guankou', 'Marghita', 'Takarazuka', 'Sao Joao do Manhuacu', "G'ijduvon Shahri", 'Puerto Heath', 'San Juanito de Escobedo', 'Mattenhof', 'Amriswil', 'Semara', 'Lokeren', 'Nevers', 'Ken Caryl', 'Hungen', 'Kehl', 'Tysmenytsia', 'Khaur', 'Hawsh al Bahdaliyah', 'Palasa', 'Vaduz', 'Munnuru', 'Oranjestad', 'Tallkalakh', 'Elwood', 'Tocaima', 'Ostringen', 'Fontoura Xavier', 'Palau', 'Eastleigh', 'Ferry Pass', 'Renigunta', 'Gig Harbor', 'Guma', 'Angadikkal Tekkekara', 'Pathri', 'Neya', 'Santa Ana Maya', 'Campobello di Mazara', 'Zhongshu', 'Lorton', 'Wankaner', 'Claremont', 'Casca', 'Izumi', 'Lodhwe', 'Sidi Boushab', 'Karcag', 'Siyabuswa', 'Simeria', 'Nemby', 'Tema', 'Komatsushimacho', 'Canhotinho', 'Ubstadt-Weiher', 'Mill Hill', 'Stange', 'Rincon', 'Leteri', 'Cavite City', 'Msaken', 'Maputsoe', 'Martorell', 'Villiers-le-Bel', 'San Esteban', 'Fruita', 'Sagunto', 'Monkayo', 'Maurepas', 'Jianguang', 'Zetel', 'Dordrecht', 'Kangbao', 'Linkou', 'Virginia', 'Freital', 'Tut', 'Tashi', 'Lang Son', 'Falun', 'Tentena', 'Cristino Castro', 'Langwedel', 'Joaima', 'Maraiyur', 'Mata Grande', 'Estanzuela', 'Rosemere', 'Kloof', 'Or `Aqiva', 'Cerro Grande', 'Villa Berthet', 'Buena Park', 'Balkanabat', 'Yaguate', 'Katagon', 'Cantaura', 'Stung Treng', 'Shambu', 'Froha', 'Anzio', 'Trichur', 'Bhatranha', 'Sideropolis', 'Herbrechtingen', 'Gent', 'Burhaniye', 'Odienne', 'Hendon', 'Rustenburg', 'Ninohe', 'Godollo', 'Muelle de los Bueyes', 'Astorga', 'Nowy Dwor Mazowiecki', 'Pilibangan', 'Rapu-Rapu', 'Ingham', 'Sable-sur-Sarthe', 'Andre Fernandes', 'Khomeyni Shahr', 'Languyan', 'Schwyz', 'Terranuova Bracciolini', 'Sangan', 'Nirpur', 'Palmeira', 'Aziylal', "Vel'ky Krtis", 'Babhnoul', 'Ait Yaazem', 'Usti nad Orlici', 'Bonham', 'Achampet', 'San Baudilio de Llobregat', 'Tres Arroyos', 'Suva', 'Hameln', 'Oued Taria', 'Tamalameque', 'Aguinaldo', 'Kitajima', 'Bilhorod-Dnistrovskyi', 'Glocester', 'Nuevo Ideal', 'Rialma', 'Tukums', 'Calhoun', 'Ambalavero', 'Idappadi', 'Benevento', 'Lormont', 'Anndevarapeta', 'Upton', 'Zamalka', 'Sasaram', 'Bucksburn', 'Segamat', 'Zywiec', 'Ribnitz-Damgarten', 'Marly', 'Mogwase', 'Senaki', 'Arroio Grande', 'Grobbendonk', 'Clarendon Hills', 'Vyshneve', 'Lomas del Mirador', 'Royal Palm Beach', 'Koloriang', 'Niort', 'Tazhakara', 'Papanasam', 'Arroio do Tigre', 'Los Bajos', 'Ostuncalco', 'Jastrzebie-Zdroj', 'Bejucal', 'St. Clements', 'Itanhem', 'Bayamo', 'Vejle', 'Kishanpur Ratwara', 'Chok Chai', 'Oberhausen-Rheinhausen', 'Karawang', 'Lengir', 'Katori', 'Karapinar', 'Talcahuano', 'Ciudad de Huajuapam de Leon', 'Hindupur', 'Keller', 'Fagaras', 'Wodonga', 'Tanakkangulam', 'Pereira', 'Clonmel', 'Eunice', 'Eberswalde', 'Tottiyam', 'Iquira', 'Ergani', 'Santa Clara', 'Jaitpur', 'Anzin', 'Toumodi', 'Sitio do Mato', 'Drabar', 'Srikhanda', 'Ban Mae Hia Nai', 'Shimubi', 'Ozd', 'Mucur', 'Burdeos', 'Hawthorne', 'El Aouana', 'Kirkuk', 'Mekele', 'Barguna', 'Brzeg Dolny', 'Santa Maria Tonameca', 'San Luis', 'Les Ponts-de-Ce', 'Helong', 'Marneuli', 'Anjiamangirana I', 'Punjai Kalamangalam', 'Phan Thiet', 'Beni Mellal', 'Jose Panganiban', 'Boxley', 'Tamalpais-Homestead Valley', 'Raisari', 'Calanasan', 'Kazanlak', 'Buved', 'Amarpatan', 'Quang Tri', 'Premia de Mar', 'Nandimandalam', 'Leeuwarden', 'Difficult Run', 'Bergamo', 'Kuwana', 'Paignton', 'Puduparambubhagam', 'Tranqueras', 'Haren', 'Wertingen', 'Castelar', 'Glenrothes', 'Bilimora', 'Funes', 'Longview', 'Afdem', 'Soyalo', 'Nasatta', 'Takkali', 'Downpatrick', 'Khotyn', 'Sidi Brahim', 'Halewood', 'Lawton', 'Surampatti', 'Arroio dos Ratos', 'Ain Zohra', 'Barking', 'Sucre', 'Amboanjo', 'Lubalo', 'Chachapoyas', 'Villa Yapacani', 'Vettakkaranpudur', 'Montemarciano', 'Aracatu', 'Avigliano', 'Roanne', 'Nadur', 'Weinan', 'Heyuan', 'Tabontabon', 'Peterborough', 'Dieli', 'El Ach', 'Ezanville', 'Gore', 'Bougoula', 'Porlamar', 'Sedona', 'Saint-Louis', 'Cebazat', 'Nueva Guinea', 'Sao Francisco de Assis', 'Achi', 'Mummidivaram', 'Antsirabe', 'Samalapuram', 'Chandler', 'Shuanghe', 'Santana do Paraiso', 'Ampondra', 'Delavan', 'Cacoal', 'Abangaritos', 'South Londonderry', 'Triesenberg', 'Amantea', 'Bou Arkoub', 'Tamponala', 'Sogamoso', 'Verdun', 'Mali Zvornik', 'La Democracia', 'El Prat de Llobregat', 'Borgloon', 'Morwa', 'Jarville-la-Malgrange', 'Evans', 'Peto', 'Forde', 'Grants', 'Maglie', 'Altagracia de Orituco', 'Budaors', 'El Affroun', 'Azhikkod', 'Hiramandalam', 'Botad', 'Rye', 'Broadstairs', 'Tilmi', 'Harleysville', 'Moncao', 'Usti nad Labem', 'Maracha', 'Gaojiayingcun', 'Castelnuovo di Verona', 'Versmold', 'Luzilandia', 'Vernier', 'Southold', 'Nkhata Bay', 'Chavkandak', 'Paniqui', 'South Milwaukee', 'Mauranwan', 'Sidi Ladjel', 'Mozirje', 'Mthatha', 'Novotroitske', 'Aracinovo', 'Fulin', 'Tatalon', 'Carpentersville', 'Vaijapur', 'Zhongcheng', 'Glendale', 'Saugeen Shores', 'Saint-Paul-les-Dax', 'Kobeliaky', 'Pai Bigha', 'Bay St. Louis', 'Bluffdale', 'Zagora', 'Careiro', 'Karahrud', 'Mudakkal', 'Amuru', 'Ismailia', 'Chodavaram', 'Kiyose', 'Cerritos', 'Ambondrona', 'Penn Hills', 'Le Taillan-Medoc', 'Girua', 'Rio Linda', 'Poas', 'Lucas do Rio Verde', 'Pratapgarh', 'Daddi', 'Lifford', 'Vandikarai', 'Westhoughton', 'Rajepur', 'Monreal', 'Loviisa', 'Juanacatlan', 'Hankey', 'Amboise', 'Bozkir', 'Ha Tien', 'Kucukcekmece', 'Posse', 'Buerarema', 'Leixlip', 'Huanren', 'Bawgalegyi', 'Manage', 'Dumanjog', 'Mulifanua', 'Traun', 'Pantelho', 'Gundur', 'Mirialguda', 'Russelsheim', 'Quixelo', 'Chipata', 'Macaubas', 'Fujino', 'Craig', 'Iskapalli', 'Solhan', 'Costessey', 'La Vega', 'Espejo', 'Perevalsk', 'Middle Island', 'Zavora', 'Lorain', 'Leninskiy', 'Rossdorf', 'Pembroke', 'Weirton', 'Villa Gonzalez', 'Multan', 'Patamundai', 'Wicklow', 'Florence', 'Tibati', 'Steti', 'Ugento', 'Messstetten', 'Pestel', 'Sujangarh', 'Camanducaia', 'Acarau', 'Balagam', 'Sungal', 'Linyi', 'Tukuyu', 'Faetano', 'Batgram', 'Saundatti', 'Roux', 'Hermosa Beach', 'Mavelikara', 'Canapi', 'Lovendegem', 'Busogo', 'Villeneuve-les-Avignon', 'Phokeng', "Sant'Arpino", 'Ambasamudram', 'Gobernador Virasora', 'Langley Park', 'Mungo', 'Carrick on Shannon', 'Warangal', 'Haan', 'Markt Indersdorf', 'Ngororero', 'Pavannur', 'Boekel', 'Sotteville-les-Rouen', 'Sarai Alamgir', 'Kutiyana', 'Great Cornard', 'Oiapoque', 'Diondiori', 'Xiangshui', 'Benesov', 'Borja', 'Traunstein', 'Cardito', 'Evreux', 'Camponogara', 'Kladovo', 'Oncativo', 'Nazret', 'Cachoeiras de Macacu', 'Bayport', 'Mandaguari', 'Katha', 'Scarborough', 'Hotan', 'Dinangorou', 'Kilimli', 'Killamarsh', 'Bifoun', 'Gorom-Gorom', 'Reddish', 'Wildomar', 'Tlalmanalco', 'Grafenhainichen', "Sama'il", 'Touwu', 'Castillos', 'Sipacapa', 'Dionisio Cerqueira', 'Maqsuda', 'Iranduba', 'Bharweli', 'Irakleio', 'Shima', 'Zandvoort', 'Edremit', 'Vitomirice', 'Laoaoba', 'Kocakoy', 'Barnet', 'Majitha', 'Minnehaha', 'Holbaek', 'Panchgram', 'San Michele al Tagliamento', 'San Agustin de Guadalix', 'Fanjakana', 'Piat', 'Marjampad', 'River Falls', 'Gualan', 'Capannori', 'Takashima', 'Siripur', 'Antakotako', "Qa'en", 'Agadir Melloul', 'West Jordan', 'Dipalpur', 'Ar Rudayyif', 'New Hyde Park', 'Rasdhoo', 'Amakusa', 'Jember', 'North Richland Hills', 'Andrainjato', "Bo'ness", 'Curridabat', 'Wangaratta', 'Mirante', 'Pulppatta', 'Prestwick', 'El Tarra', 'Ambodimanga II', 'Beausoleil', 'Raisio', 'Puttur', 'Diriamba', 'Davyhulme', 'Murukondapadu', 'North Union', 'Guayaramerin', 'Mingguang', 'Milaor', 'Dongluocun', 'Ghuenke', 'Market Harborough', 'Sainte-Savine', 'Makaha', 'Security-Widefield', 'Donabate', 'Wigan', 'Tasquillo', 'Villasagar', 'Buikwe', "Ouro Preto d'Oeste", 'Roldanillo', "Ait I'yach", 'Camliyayla', 'Cinderford', 'Maraial', 'Santo Stefano di Magra', 'Anaiyur', 'Kushk', 'Amontada', 'Marituba', 'Cambe', 'Horjul', 'Seguela', 'Wagin', 'Schwandorf', 'Lawaan', 'Kadirli', 'Vaughan', 'East Retford', 'Hinatuan', 'Shoranur', 'Rayen', 'Bistrica ob Sotli', 'Acharnes', 'Rajsamand', 'Aipe', 'Faxinal', 'Ollioules', 'Ismaning', 'Smartno', 'Lamhadi', 'Lecce', 'Bredbury', 'Adjarra', 'Lichana', 'La Troncal', 'Sao Caetano do Sul', 'Koppal', 'Hansa', 'Sirinhaem', 'Weissenfels', 'Aloha', 'Tailai', 'Jucuaran', 'Summerstrand', 'Amnat Charoen', 'Kornwestheim', 'Sorum', 'Iarpur', 'Minquan', 'Srivaikuntam', 'Almunecar', 'Ban Don Thong', 'Targu Jiu', 'Tamparan', 'Oyabe', 'Stadtlohn', 'Hazorasp', 'Torredembarra', 'Kalavad', 'Ochakiv', 'Williamsburg', 'North Greenbush', 'Victor Larco Herrera', 'Karasu', 'Sabnima', 'Iramala', 'Hoa Binh', 'Naregal', 'Concordia Sagittaria', 'Portishead', 'Zhaltyr', 'Liberty Triangle', 'Chegurumomadi', 'Vila Junqueiro', 'Bitlis', "Tajura'", 'Ambodivoara', 'Highbury', 'Nizza Monferrato', 'Kesariya', 'Walur', 'Turbat', 'Kars', 'Colorado Springs', 'Lashkar Gah', 'Lavaur', 'Nova Petropolis', 'Goalpara', 'Padra', 'Fallbrook', 'Imotski', 'Domnesti', 'Ilami', 'Edewecht', 'Playas de Rosarito', 'Palo Negro', 'Ban Song', 'Batna', 'Galt', 'Ambatolava', 'Le Cannet', 'Urumita', 'Ankily', 'Missouri City', 'Partap Tanr', 'Margarita', 'Vandiperiyar', 'Kaminokawa', 'Goldach', 'Viamao', 'Nainpur', 'Ya`bad', 'Palhano', 'Chhatapur', 'Commune Sidi Youssef Ben Ahmed', 'Jucati', 'Con Dao', "Al Bayda'", 'Badhoevedorp', 'San Rafael Oriente', 'Ndulo', 'Az Zulfi', 'Ban San Pong', 'Antioch', 'Stoke-on-Trent', 'Sahuli', 'Asagi Ayibli', 'Palukudoddi', 'Chorfa', 'Campos Novos', 'Borim', 'Hueytown', 'Anuppur', 'Port Lincoln', 'Sertanopolis', 'Cradock', 'Winsford', 'Bakhchysarai', 'Spreitenbach', 'Arese', 'Buug', 'Paruchuru', 'Lower Swatara', 'Vinica', 'Pottsville', 'Moncalieri', 'Lamzoudia', 'Crestwood', 'Damongo', 'Greece', 'Utinga', 'Niamtougou', 'Kallayi', 'Trogir', 'Osmaneli', 'Friendly', 'Kanchanpur', 'Hawera', 'Carupano', 'Nador', 'Grand-Couronne', 'Ogbomoso', 'Sahagun', 'Bridgwater', 'Xicotepec de Juarez', 'Durham', 'Jerez de los Caballeros', 'North Hykeham', 'Stannington', 'Ibanda', 'Batac', 'Logten', 'Hoorn', 'Roussillon', 'Minzhu', 'Blindio', 'Mainz', 'Catarman', 'Lumberton', 'Payabon', 'Sangeorz-Bai', "Pau d'Alho", 'Honggu', 'Sauk Rapids', 'Ortaklar', 'Thousand Oaks', 'Kocani', 'Kudangulam', 'Chicoloapan', 'De Meern', 'Qiblai', 'Montevista', 'San Felipe Orizatlan', 'Lakatoro', 'Badia Polesine', 'Bad Kissingen', 'Bom Jesus do Itabapoana', 'Erenler', 'Katwijk', 'Sao Francisco do Sul', 'Lila', 'Na Sceiri', 'Perumbakkam', 'Leling', 'Gladstone', 'Al `Aqabah', 'Waregem', 'Mulgund', 'Ambolomadinika', 'Osorno', 'Gorgab', 'Udaipur', 'Khomam', 'Sindalakkundu', 'Toukountouna', 'Chebba', 'Pettampalaiyam', 'Baile Atha Luain', 'Nurobod Shahri', 'Kropyvnytskyi', 'Guarapari', 'Caidat Sidi Boubker El Haj', 'Ala-Buka', 'Turin', 'Bucharest', 'Gingoog', 'Youngsville', 'Angwali', 'Orikhiv', 'Hofn', 'Varzea da Palma', 'Lower Makefield', 'Chillicothe', 'Seabrook', 'Phetchaburi', 'Fujisawa', 'Matara', 'Kepno', 'Alden', 'Omigawa', 'Bayombong', 'Toride', 'Mirabela', 'Tullukuttinayakkanur', 'Ugong', 'Zhangye', 'Mangha', 'Krsko', 'Comrat', 'Sudbury', 'Almaty', 'Sonamukhi', 'Terrabona', 'Windsor', 'Coralville', 'Capela', 'Qufu', 'Stallings', 'Kingaroy', 'Downham Market', 'Calapan', 'Aizuwakamatsu', 'Kaintragarh', 'Inhuma', 'Pangururan', 'Scotchtown', 'La Marque', 'Iwata', 'Zrece', 'Waldheim', 'Ajmer', 'Pamekasan', 'Mafune', 'Weiyuan', 'Gunzburg', 'Drohobych', 'Damal', 'Sigtuna', 'Campulung', 'Patancheruvu', 'Gavardo', 'Acushnet', 'Poiana Mare', 'Tarsus', 'Betanzos', 'Chau Doc', 'Cacequi', 'Lalsaraia', 'Aweitancun', 'Gia Rai', 'Corat', 'Arcos de la Frontera', 'Akassato', "Samch'ok", 'Lake Tapps', 'Aine Draham', 'Vertou', 'Neenah', 'Tanhacu', 'Shimla', 'Keal', 'Rani', 'Kadaiyanallur', 'Sefwi Wiawso', 'Shankarpur Khawas', 'Veliyangod', 'Senador Guiomard', 'Danzao', 'Ndele', 'Pornic', 'Nowy Tomysl', 'Scenic Oaks', 'Ursulo Galvan', 'Freudenberg', 'Araruna', 'Popayan', 'Centereach', 'Kamaishi', 'Sainte-Marthe-sur-le-Lac', 'Imilchil', 'Zimapan', 'Meschede', 'Puthuppariyaram', 'Paso de los Libres', 'Longbenton', 'Kampong Speu', 'Novy Jicin', 'Jinggang', 'Asahi', 'Khokri Kalan', 'Kaveripatnam', 'Lviv', 'Bowling Green', 'Braganca', 'Panjgirain Kalan', 'Emeryville', 'Ruhango', 'Tarim', 'Castiglione del Lago', 'Kyiv', 'Mineral Wells', 'Burnsville', 'Franeker', 'Teignmouth', 'Canas', 'Ajjampur', 'Barhauna', 'Zacharo', 'Fujimi', 'Billerica', 'Falconara Marittima', 'Alto Araguaia', 'Tak Bai', 'Bishops Cleeve', 'Orhangazi', 'Spokane Valley', 'Kovel', 'Walworth', 'Zushi', 'Aglasun', 'Romainville', 'Sorsogon', 'Castellamonte', 'Des Plaines', 'Xai', "'Ain Abid", 'Plattekill', 'Sinzheim', 'Cipanas', 'Paliaturutu', 'Katwe', 'Lorenskog', 'San Tung Chung Hang', 'Abrego', 'Viana', 'Ishtixon Shahri', 'Kharika', 'Sautron', 'San Narciso', 'Hilton Head Island', 'Lingolsheim', 'Bornheim', 'Koregaon', 'Matao', 'Sumida', 'Ketsch', 'Gopalnagar', 'Rodez', 'Canelones', 'Tolten', 'Crawley', 'Tillsonburg', 'Wislane', 'Mortsel', 'Akitakata', 'Halasztelek', 'Herselt', 'Kartal', 'Balabac', 'Ungaran', 'Da', 'Sao Joao del Rei', 'Ghanipur Bejha', 'Al Quway`iyah', 'Zalaegerszeg', 'Carquefou', 'Huanimaro', 'Ponte Nova', 'Monte Azul', 'Melito di Porto Salvo', 'Shimenzhai', 'Pinehurst', "K'ebri Beyah", 'Molsheim', 'Konigstein im Taunus', 'Taranagar', 'Farciennes', 'Bhilai', 'Mathurapur', 'Port-a-Piment', 'Kyegegwa', 'Ortakoy', 'Antonibe', 'Mercedes Umana', 'Coronel', 'Pedrinhas', 'Suhagi', 'Jaboatao dos Guararapes', 'Oxon Hill', 'Jalolquduq', 'Morros', 'Sumbas', 'Chingola', 'Rada Tilly', 'Kolachel', 'Bandarawela', 'Pallattur', 'Carauari', 'Colares', 'Santa Fe do Sul', 'Musapur', 'Arni', 'Negage', 'Graham', 'Lake Grove', 'Tolosa', 'Marlboro', 'Kandrawan', 'Ponte Serrada', 'Shanghai', 'Ouedeme', 'Saki', 'Chembra', 'Syston', 'Palmeirais', 'Borlange', 'Bordj Bounaama', 'Montceau-les-Mines', 'Zhufeng', 'Frei Paulo', 'Western Springs', 'Hemmingen', 'Santa Cruz del Sur', 'Pleasantville', 'Chandla', 'Maihma Sarja', 'Sacaba', 'Reynoldsburg', 'Highland', 'Sunyani', 'Ajnala', 'Ourem', 'Zaltan', 'Vannikkonendal', 'Suchindram', 'Metouia', 'Bandar-e Lengeh', 'Sestu', 'Troisdorf', 'Tibasosa', "Villenave-d'Ornon", 'Huseni', 'Komorniki', 'Ankerana', 'Planura', 'Saguenay', 'Xiangping', 'Almendralejo', 'Slawno', 'Bir Ghbalou', 'Luna', 'Tiaret', 'Pulakurti', 'General Santos', 'Shibancun', 'Lansing', 'Lake Los Angeles', 'Hala', 'Port Washington', 'Cortalim', 'Apostoles', 'Radnevo', 'Reina Mercedes', 'Tivoli', 'Gomishan', 'Maigh Nuad', 'Gujiao', 'Sorvagur', 'Milpitas', 'Ramnagar Bankat', 'Longyearbyen', 'Sarauni', 'Angel R. Cabada', 'Brooksville', 'Bochaha', 'Alguazas', 'Tigaraksa', 'Guiguinto', 'Granbury', 'Aiyetoro Gbede', 'Zenica', 'Itsoseng', "Al Madrah Sama'il", 'Murtajapur', 'Gyongyos', 'Kabale', 'Armiansk', 'Bandar-e Kong', 'Makhambet', 'Naval', 'Issoudun', 'Valmiera', 'Virapandiyanpattanam', 'Beirut', 'Tagbilaran City', 'El Factor', 'Banikane', 'Khartoum', 'Jiangna', 'Bhawanipatna', 'Aungban', 'Leonding', 'Limeil-Brevannes', 'Obernkirchen', 'Nova Trento', 'Zambezi', 'Leith', 'Gessate', 'Huguan Nongchang', 'Tirupati', 'Cilimli', 'Brie-Comte-Robert', 'Cebu City', 'Patti', 'Germi', 'Jaleshwar', 'Piliscsaba', 'Devipattinam', 'Chato', 'Khartoum North', 'Brushy Creek', 'Vlagtwedde', 'Amacuzac', 'Kosigi', 'Aranjuez', 'Leama', 'Roanoke', 'Limpio', 'Kemalpasa', 'Haisyn', 'Peoria', 'Stade', 'In Guezzam', 'Gashua', 'Nerubaiske', 'Ratlam', 'Mahmutlu', 'Srirampuram', 'Veldurti', 'Wemmel', 'Phichit', 'Dibaya-Lubwe', 'Madurai', 'Sao Goncalo do Para', 'Vitry-le-Francois', 'Anantarazupeta', 'Boudinar', 'Zschopau', 'Quellon', 'Longonjo', 'Hexham', 'Mangqu', 'Hoxut', 'Twickenham', 'Gueckedou', 'Labbaikkudikkadu', 'Mittenwalde', 'Hatton', 'Feldbach', 'Lahij', 'Sasan', 'Miastko', 'Waitakere', 'Shizuoka', 'West Warwick', 'Galati', 'Biu', 'Kudatini', 'Viradouro', 'Saffle', 'Gyumai', 'Zhongshan', 'Picayune', 'Antaly', 'Anse-a-Veau', 'Saint Peter Port', 'Ambohitrolomahitsy', 'Avadattur', 'Calasiao', 'Bambang', 'Warder', 'Koforidua', 'Tangier', 'Duggirala', 'Glenville', 'Bagrinagar', 'Ingelmunster', 'Carrigaline', 'Ashtabula', 'Galeras', 'Ulchin', 'Paarl', 'Bandung', 'Roxana', 'Gulu', 'Kesavapuram', 'Beantake', 'Yokoshiba', 'Kaohsiung', 'Piranguinho', 'Port-de-Bouc', 'Lakri', 'Cadereyta Jimenez', 'Merad', 'Centralia', 'Putte', 'Baozhong', 'Tougan', 'Knutsford', 'Estancia Velha', 'Nawada', 'Gangelt', 'Frontignan', 'Chiconcuac', 'Gunnedah', 'Lingen', 'Bozmargi', 'Porac', 'St. Peter', 'East Hempfield', 'Tapalpa', 'Jolfa', 'Photharam', '`Ajman', 'Altunhisar', 'Staufenberg', 'Fgura', 'Heist-op-den-Berg', 'Colima', 'Hoogstraten', 'Oi', 'Rustington', 'Vandamettu', 'Manzanares el Real', 'Laohekou', 'Szigethalom', 'Colindres', 'Harrow', 'Kishanganj', 'Brejo Santo', 'Dalfsen', 'Werdau', 'Rijkevorsel', 'Coondapoor', 'Kagizman', 'Pardi', 'Paombong', 'Pitseng', 'Chikwawa', 'Togou', 'Akanavaritota', 'Ojus', 'Buhl', 'Ganjing', 'Sona', 'Vayalpad', 'Taishacho-kizukikita', 'Ocoee', 'Noda', 'Nurdagi', 'Maidan Shahr', 'Culleredo', 'Paravurkambolam', 'Olintepeque', 'Kien Giang', 'Kunoy', 'Zawyat Sidi al Mekki', 'Aleksinac', 'Mandalay', 'La Laguna', 'Kanabur', 'San Miguel Panan', 'Batumi', 'Gambolo', 'Cibitoke', 'Swatara', 'Ganga', 'Cotorro', 'Skofljica', 'Piranhas', 'Aricak', 'Darb-e Behesht', 'Ampana', 'North Bellmore', 'La Roche-sur-Yon', 'Challapata', 'Binghamton', 'Hatod', 'Gouveia', 'Gardendale', 'Newnan', 'Le Haillan', 'Lencois Paulista', 'Dangriga', 'Mallan', 'Forest Park', 'Chiyoda', 'Pirenopolis', 'Sao Joao do Piaui', 'Mahavanona', 'Hanzhong', 'Madalag', 'Los Osos', 'Guatajiagua', 'Berri', 'Lardero', 'El Jicaral', 'Ter Apel', 'Kola', 'Vasteras', "Al 'Attawia", 'Pamplona', 'Tomobe', 'Bejaia', 'Kullorsuaq', 'Arganil', 'Comapa', 'Dehqonobod', 'Gaoping', 'Buntok', 'Somerton', 'Herstal', 'Igualada', 'Tumaco', 'Saint Joseph', 'Mandeville', 'Monterrey', 'Jamindan', 'Nguti', 'Sidi Lahssen', 'Redruth', 'Akcakoca', 'Jankampet', 'Tataouine', 'Manizales', 'Narasannapeta', 'Karempudi', 'St. Augustine', 'Cumanayagua', 'Montesilvano', 'Gracias', 'Congaz', 'Poso', 'Humenne', 'Shalqar', 'Lago Ranco', 'Morogoro', 'Qingan', 'Anini-y', 'Marche-en-Famenne', 'Erseke', 'Presidente Venceslau', 'Abomey', 'Dorohoi', 'Talipparamba', 'Bonito', 'Algarrobo', 'Yamoussoukro', 'Lafey', 'Kadi', 'Beberibe', 'Yurihama', 'Viana do Castelo', 'Assen', 'Sniatyn', 'Spanish Fort', 'Xintianfeng', 'Bukungu', 'Xiaqiaotou', 'Peshtera', 'Hueyapan de Ocampo', 'Marreddipalli', 'Kaimati', 'Isabela', 'Selfoss', 'Delhi', 'Fujikawaguchiko', 'Marutharod', 'Bom Repouso', 'Jagadhri', 'El Valle', 'Zlotoryja', 'Grover Beach', 'Hashtpar', 'San Alejo', 'Nazarje', 'Hillcrest', 'Sananduva', 'Peissenberg', 'Bada Barabil', 'Watampone', 'Nove Mesto nad Metuji', 'Karankot', 'Pucon', 'Guangshui', 'Falls Church', 'Danihe', 'Mudakkiraye', 'Ceuti', 'Fussen', 'Zaghouan', 'Yongbei', 'Hong Ngu', 'Conchas', 'Manivilundan', 'Tadhwa Nandpur', 'Monistrol-sur-Loire', 'Chateaubriant', 'Sangar', 'Fafe', 'Shoshong', 'Madang', 'Marcy', 'Puttlingen', 'Groves', 'Al Bahah', 'Gorham', 'Afogados da Ingazeira', 'Knokke-Heist', 'Pariyaram', 'Zagreb', 'Winton', 'Les Iles-de-la-Madeleine', 'Kusterdingen', 'Paraguari', 'Kuknur', 'Black River', 'Baishaling', 'Sigulda', 'Uppsala', 'Selm', 'Muy Muy', 'Paracale', 'Luleburgaz', 'La Llagosta', 'Demirci', 'Kalasin', 'Ibajay', 'Ceska Lipa', 'Camocim de Sao Felix', 'Ogose', 'Menlo Park', 'Ouedo-Agueko', 'Karakurisshi', 'Franklin Lakes', 'Ismayilli', 'Gottingen', 'Melmadai', 'Meulaboh', 'Ogre', 'Bad Abbach', 'Longmen', 'Awka', 'Braco do Norte', 'Bigadic', 'General Pacheco', 'Sint-Oedenrode', 'Annamalainagar', 'Angamali', 'Julio de Castilhos', 'Capao Bonito', 'Poynton', 'Santa Cruz Atizapan', 'Ekeren', 'Gandara West', 'Metairie', 'Katsuren-haebaru', 'Dupnitsa', 'Tricase', 'Mokpo', 'Dassari', 'Givatayim', 'Jambai', 'Hamsavaram', 'Foggia', 'Scott', 'Inkhil', 'Catamayo', 'Nana', 'Ntchisi', 'Olomouc', 'Madison Heights', 'Genzano di Roma', 'Vieux Fort', 'Kagvad', 'Motegi', 'Breves', 'Myitkyina', 'Aqkol', 'Al `Ashir min Ramadan', 'Ariano Irpino', 'Restrepo', 'Jeumont', 'Drazinda', 'San Pedro de Ribas', 'Chintakommadinne', 'Tank', 'Preili', 'Konakli', 'Julita', 'Meda', 'Ciudad Obregon', 'Barabai', 'Gizycko', 'Boureit', 'Arambagh', 'Bonito Oriental', 'Mbanga', 'Chemmanam', 'Kakira', 'Mundka', 'Noniya', 'Marianske Lazne', 'Khanaqin', 'Pajacuaran', 'Ain el Bya', 'Bacliff', 'Sewari', 'Altusried', 'Zinapecuaro', 'Matsuo', 'Vasylivka', 'Peda-Houeyogbe', 'Nossa Senhora dos Remedios', 'Chaltabaria', 'Hailakandi', 'Poznan', 'Taoudenni', 'Sidhwan', 'Guadalajara de Buga', 'Rozhyshche', 'Caracarai', 'Suwannaphum', "Bur Fu'ad", 'Fderik', 'Mezotur', 'Las Vegas', 'Limassol', 'Jursinci', 'Taggia', 'Poco das Trincheiras', 'Douar Oulad Naoual', 'Calarca', 'Mont-Tremblant', 'Belterra', 'Sheikhpura', 'North Olmsted', 'Ramdurg', 'Orumanayur', 'Villafranca de los Barros', 'Esztergom', 'Payakaraopeta', 'Ramchandrapur', 'Khajamahalpur', 'Najafgarh', 'Athni', 'Mikkelin Maalaiskunta', 'Bijnor', 'Ighram', 'Dragash', 'Thargomindah', 'Castel Maggiore', 'Belambo', 'Nueva Palmira', 'Kurabur', 'Yahsihan', 'Chesterton', 'Konduru', 'Tordesillas', 'Biala Podlaska', 'Katsuragi', 'South Bradenton', 'Naranja', 'Okhmalynka', 'Newtownards', 'Pimenta Bueno', 'Naraura', 'Craponne', 'Auta', 'Kumaravadi', 'Anajas', 'Mouiat Ouennsa', 'Norvenich', 'Jaruco', 'Stockelsdorf', 'Moron de la Frontera', 'Dalmine', 'Le Pont-de-Claix', 'Ambahy', 'Fianga', 'Pinecrest', 'San Bonifacio', 'Carpinteria', 'Stip', 'Ergolding', 'Lokossa', 'Siniloan', 'Seeheim-Jugenheim', 'Nottingham', 'Abrantes', 'San Felipe Jalapa de Diaz', 'Boninal', 'Digor', 'Hosur', 'Trepuzzi', 'Koidu', 'Kumta', 'Paracatu', 'Shinkai', 'Thuan An', 'Pokaran', 'Betzdorf', 'Binde', 'Ghazni', 'Vembur', 'Pontivy', 'Juterbog', 'Sheerness', 'Chhindwara', 'Alagoinhas', 'Itabira', 'San Jose Ojetenam', 'Mazatenango', 'Nabinagar', 'Manavgat', 'Ovenden', 'Lagkadas', 'Gaildorf', 'Chirpan', 'Vidalia', 'Maracaju', 'Pindwara', 'Nova Dubnica', 'Mokarrampur', 'Bocaue', 'Warsop', 'Florida', 'Gangajalghati', 'Monte Alegre do Sul', 'Kibi', 'Pirakkad', 'Bani `Ubayd', 'Tiberias', 'Palombara Sabina', 'Rongat', 'Wabagai', 'Maceio', 'Agliana', 'Bell', 'Yosano', 'Potiragua', 'Mannamangalam', 'Yerevan', 'Valledupar', 'Faversham', 'Xavantes', 'Embrach', 'Desanagi', 'Ciudad de Ceuta', 'Contagem', 'As Sib', 'Kulgam', 'Kanigiri', 'Kuljibrin', 'Palm Beach Gardens', 'Mugnano di Napoli', 'Chunian', 'Merzig', 'Dhilwan Kalan', 'Stainz', 'Betera', 'Vallur', 'Seva', 'Puerto Lopez', 'Inekar', 'Suong', 'Tiruvottiyur', 'Baghlan', 'Ilo', 'Bautzen', 'Koutiala', 'Allahabad', 'Lhokseumawe', 'San Juan del Sur', 'Taua', 'Sapa Sapa', 'Otuke', 'Sabra', 'Kutahya', 'Coccaglio', 'Mamun', 'Billerbeck', 'Weil am Rhein', 'Lajia', 'Argentan', 'Ninheira', 'Aranya Kalan', 'Lumding', 'Comendador Levy Gasparian', 'Lilongwe', 'Cerro Largo', 'Oosterhout', 'Oswestry', 'Gaspar', 'Videle', 'Maumelle', 'Azezo', 'Osuna', 'Pulla', 'Yangshuling', 'Khirkiyan', 'Hamilton', 'Kalgoorlie', 'Rejiche', 'Aylesford', 'Nagaoka', 'Monterey Park', 'Potavaram', 'Jequeri', 'Boisbriand', 'Meliana', 'Quilenda', 'Santa Leopoldina', 'Lakeland', 'Eatontown', 'Basilisa', 'Wernau', 'Sassenheim', 'Twistringen', 'Talitay', 'East Wenatchee', 'Jinbi', '`Anbarabad', 'Shimanto', 'Caputira', 'Goppingen', 'Turuttikkara', 'Zhongtai', 'San Francisco Zapotitlan', 'Hazebrouck', 'Clemencia', 'Surandai', 'Khijri', 'Jacarezinho', 'Lunsar', 'Ouled Sidi Brahim', 'Chaqra', 'Distraccion', 'Eppelborn', 'Vakhrusheve', 'San Carlos del Zulia', 'Khairpur Tamewah', 'Shongzhy', 'Ban Bang Kaeo', 'Murillo', 'Ban Mai', 'Lakoucun', 'Kottaikuppam', 'Sini', 'Karditsa', 'Penumur', 'Ramos', 'Arauco', 'Chatayamangalam', 'Nieuw-Lekkerland', 'Ixhuatlan del Sureste', 'Bog Walk', 'Daltonganj', 'Guamo', 'Kiklah', 'Bongouanou', 'Las Pinas City', 'Narni', 'Olesa de Montserrat', 'Barrancabermeja', 'Souama', 'Sokolow Podlaski', 'Katakwi', 'Sunset', 'Dilawarpur', 'Telagh', 'Caapiranga', 'Pavittiram', 'Yunzhong', 'Paicandu', 'Farafangana', 'Zawyat Sidi Ben Hamdoun', 'Bosilegrad', 'Cabanaquinta', 'Eaubonne', 'El Paujil', 'Matca', 'Rawdon', 'Tongoma', 'Monsefu', 'Itaguara', 'Mihama', 'Alengad', 'Zhumadian', 'Schlieren', 'Murree', 'Seto', 'Yenipazar', 'Kharak Kalan', 'Ambares-et-Lagrave', 'Kawlin', 'Sainte-Julienne', 'Khagaul', 'Dyersburg', 'Ekuvukeni', 'Virginopolis', 'Zhenzhou', 'Jiashizhuangcun', 'Changchong', 'Notodden', 'Stephenville', 'Rellingen', 'Keolari', 'Los Alcarrizos', 'Brake', 'Yuzawa', 'Cabatuan', 'Kanegasaki', 'Vohiposa', 'Arequipa', 'Antseza', 'Puspokladany', 'Natick', 'Astrea', 'Auchel', 'Aquidauana', 'Mohlin', 'Redcar', 'Brookline', 'Jekabpils', 'Melville', 'Pintadas', 'Manfalut', 'Aasiaat', 'Sao Borja', 'Ransiki', 'Ibipetuba', 'Kuala Lumpur', "Tong'anyi", 'Eklahra', 'Satara', 'Pupri', 'Gualcince', 'Friedberg', 'Sinnai', 'Saynshand', 'Bupoto', 'Kalanjur', 'Hilversum', 'Shobara', 'Mahaplag', 'Puturge', 'Oued Jdida', 'Melnik', 'Pointe-Noire', 'Tipton', 'Tekari', 'Ounagha', 'Cavriago', 'Lupon', 'Paoay', 'Follonica', 'Bhachhi Asli', 'Hofheim', 'Kasukabe', 'Viligili', 'Tinton Falls', 'Limay', 'Pauktaw', 'Sedziszow Malopolski', 'Citrus', 'Chirongui', 'Mitane', 'Payimattam', 'Pajaros', 'Gelves', 'Tupi Paulista', 'Pokhuria', 'Thetford Mines', 'Iselin', 'Bandar Murcaayo', 'Taungoo', 'Hikawadai', 'Los Santos', 'Marbella', 'Vranje', 'Squamish', 'Mahthi', 'Toura', 'Cicciano', 'Wernigerode', 'Xiaobazi', 'Reforma', 'Ispir', 'Ntcheu', 'Samboan', 'Kelamangalam', 'Diglipur', 'Malkhaid', 'Tomakomai', 'Vidhareidhi', 'Rio Grande City', 'Puerto Libertador', 'Ariccia', 'Mashhad', 'Renkum', 'Sakabansi', 'Dandeli', 'Gahanna', 'Waupun', 'Sonoma', 'Kutchan', 'Phetchabun', 'Bischwiller', 'Barotac Viejo', 'Giresun', 'Caieiras', 'Rodeiro', 'Guabo', 'Kusti', 'Ketou', 'Oestrich-Winkel', 'Cayce', 'Sume', 'Yaese', 'Zemamra', 'Spring Garden', 'Sheron', 'Karnawad', 'Neu-Ulm', 'Grafelfing', 'Costa Mesa', 'Wangjia', 'Joaquim Gomes', 'Al Qurnah', 'Naivasha', 'Borgerhout', 'Santa Rosalia', 'Corocoro', 'Liskeard', 'Kruibeke', 'Mingaora', 'Latauna', 'Sepolno Krajenskie', 'Madison', 'Berkane', 'San Giovanni Rotondo', 'Siwah', 'Aldeia de Paio Pires', 'Krosno Odrzanskie', 'Zriba-Village', 'Tampico', 'Gastre', 'Katrineholm', 'Joao Pinheiro', 'Martensville', 'Karakocan', 'San Antero', 'Pocinhos', 'Erravaram', 'Ogaki', 'Ezzhiliga', 'Dimmbal', 'Torul', 'Darlaston', 'Itapissuma', 'Mragowo', 'Buton', 'Villach', 'Xinzhancun', 'Rusape', 'Ialoveni', 'Biltine', 'Alukkuli', 'Panpuli', 'Barili', 'Kempen', "`Alem T'ena", 'Wustermark', 'Murnau am Staffelsee', 'El Ghomri', 'Kambhaladinne', 'Ciego de Avila', 'Khoragachhi', 'Tiburon', 'Ohata', 'San Martin Zapotitlan', 'Priego de Cordoba', "Mun'gyong", 'Placetas', 'Comodoro Rivadavia', 'Goluwali', 'Janglot', 'Ammanford', 'Juquia', 'Parapua', 'Mitchell', 'Lincoln', 'Kafr Sa`d', 'Dhanwada', 'Vergiate', 'Son Servera', 'Uto', 'Gesuba', 'Jamaat Shaim', 'Basirhat', 'Dalain Hob', 'Bishenpur', 'Regensburg', 'Altensteig', 'Kekem', 'Sanwal', 'Palankottai', 'Raquira', 'Antsahanoro', 'Cerignola', 'Beesel', 'Burj al `Arab', 'Rantepao', 'Ankadindambo', 'Jinotega', 'Hechingen', 'Ingersoll', 'Vitoria da Conquista', 'Parma', 'Vaciamadrid', 'Daita', 'Santa Catarina Juquila', 'Kozje', 'Hagen', 'Pargi', 'Porto Nacional', 'Krishnarajpur', 'Huazangsi', 'Blida', 'Majra', 'Sahasoa', 'Inzai', 'Murrysville', 'Jalalpur', 'Belakoba', 'Ichalkaranji', 'Almagro', 'Vassouras', 'Taubate', 'Sarrat', 'Jupiter', 'Icara', 'Avrankou', 'Brick', "Giv`at Shemu'el", 'Bargarh', 'Wang Tau Hom', 'Teixeira', 'Darmstadt', 'Kasuya', 'Periya Pattanam', 'Dangbo', 'El Viejo', 'Xapuri', 'Woodbury', 'Pampatar', 'Waspan', 'Samabouro', 'Al Ahad al Masarihah', 'Koath', 'Gwanda', 'Vinhais', 'Islahiye', 'Mogpog', 'Bayt Jala', 'Nagireddipalli', 'Miami Beach', 'Conversano', 'Quartier Militaire', 'Cauto Cristo', 'Arhribs', "Villa O'Higgins", 'Orange Walk', 'Dharmavaram', 'Paraiso do Tocantins', 'Calolziocorte', 'Capitan Sarmiento', 'Tyagadurgam', 'Vendram', 'Harstad', 'Caldogno', 'Tlaxcoapan', 'Jiangjiafan', 'Arta', 'Jacona de Plancarte', 'Sarpol-e Zahab', 'Nahuala', 'Boa Esperanca', 'Mealhada', 'Troutdale', 'Wharton', 'Tournon-sur-Rhone', 'Casino', 'Saint-Lin--Laurentides', 'Jaco', 'Evander', 'Figeac', 'Elukone', 'Camocim', 'Sahpur', 'Lower Tungawan', 'Vengur', 'Miraima', 'Ximeicun', 'Hoogland', 'Weimar', 'Kokkola', 'Atiquizaya', 'Oldenburg in Holstein', 'Kulattuppalaiyam', 'Ramada', 'Itampolo', 'Bommarbettu', 'Piove di Sacco', 'Kanavaypatti', 'Ise', "Oktyabr'sk", 'Tigzirt', 'Oulad Amrane', 'Kundal', 'Cheria', 'Kalchini', 'Dschang', 'Bentiu', 'Emboscada', 'Sangram', 'Plerin', 'Kezi', 'Grobenzell', 'Tomino', 'Santa Cruz do Rio Pardo', 'Isorana', 'Al Qbab', 'Sleepy Hollow', 'Dehloran', 'Taixi', 'Noamundi', 'Ankilimalinika', 'Hwasu-dong', 'San Bruno', 'Dijon', 'Yiyang', 'Guntakal', 'Suluova', 'Kottacheruvu', 'Ludwigshafen', 'Ghargaon', 'Rosa', 'Sual', 'Miller Place', 'Dronten', 'Dobbs Ferry', 'Santa Marta', 'Khopoli', 'Kelilalina', 'Bela Cruz', 'Klela', 'Santomera', 'Chortoq', 'Kemp Mill', 'Mae Rim', 'Yalvac', 'Meridiala', 'Itaporanga', 'Macerata Campania', 'Maxeville', 'Sierra Colorada', 'Ginatilan', 'Fortaleza', 'Tenmalai', 'Oyonnax', 'Taishi', 'Krapkowice', 'Sojitra', "Alamat'a", 'Flemalle-Haute', 'Jeannette', 'Edessa', 'Fairview Park', 'Pabianice', 'An Nayrab', 'Gussago', 'Aridagawa', 'Inebolu', 'Ponte Buggianese', 'Comallo', 'Handeni', 'Kadogawa', 'Vesala', 'Santana do Manhuacu', 'Vicente Guerrero', 'Trnovska Vas', 'Franklin Square', 'Hessle', 'Tuvur', 'Les Mureaux', 'Rikuzen-Takata', 'Shahmirzad', 'Irshava', 'Almirante Tamandare', 'Bhairapura', 'Grand Forks', 'Adolfo Gonzales Chaves', 'Donmatias', 'Jobat', 'Jami', 'Norfolk', 'Kasagi', 'Senador Canedo', 'Bhabanipur', 'Danville', 'Guaraciaba do Norte', 'Perez', 'Akaiwa', 'Santa Cruz do Monte Castelo', 'Deerlijk', 'Key West', 'Somandepalle', 'Bartoszyce', 'Tsuiki', 'Hillegom', "Qa'emiyeh", 'Sahidganj', 'Terrier Rouge', 'Marikina City', 'Vinzons', 'Makouda', 'Conceicao da Barra', 'Tebesbest', 'Hachinohe', 'Carora', 'Luckeesarai', 'Castelo', 'Bommagondanahalli', 'Kutavur', 'Bedworth', 'Laarne', 'East Grand Rapids', 'Itatiba', 'Pineville', 'Banhatti', 'Thiotte', 'San Martin de las Piramides', 'Puconci', 'Saidpur Dabra', 'Xaghra', 'Zhuhai', 'Altamonte Springs', 'Warminster', 'Zollikon', 'Patiala', 'Oxted', 'Gornja Radgona', 'Bade', 'Dahe', 'Risca', 'Puxinana', 'Mixtla de Altamirano', 'Haderslev', 'Sun Village', 'Mahazoarivo', 'Asara', 'Tortosa', 'Bellmead', 'San Marino', 'Khrestivka', 'Usuki', 'Yasynuvata', 'North Valley', 'Chennur', 'Bishnupur', 'Fanzhao', 'Oltinko`l', 'El Carmen', 'Lagawe', 'Argentona', 'Torgau', 'Tiflet', 'Xincun', 'Thap Khlo', 'Havant', 'Sulingen', 'Apt', 'Sabana de Torres', 'Qyzylorda', 'Hebburn', 'Katerini', 'Werota', 'Taishan Houcun', 'Zefyri', 'Akseki', 'Nkawkaw', 'Sabangan', 'West Sacramento', 'Calandala', 'Charcas', 'Ash Shaykh `Uthman', 'Mahajeran-e Kamar', 'Toundout', 'Mainvilliers', 'Kronach', 'Jayal', 'Ruthen', 'Kamenz', 'Cayirova', 'Temple', 'Sammamish', 'Agstafa', 'Gleno', 'Sattahip', 'Kichha', 'Tartagal', 'Pingliang', 'Ganvie', 'Zapotiltic', 'Temsamane', 'Gyegu', 'Shangqiu', 'Niquen', 'Baotou', 'Orzesze', 'Ghanzi', 'Manikkal', 'Zaraza', 'Sarbisheh', 'Mabehiri', 'Clitheroe', 'Jastrebarsko', 'Frydek-Mistek', 'Taxila', 'Valencia West', 'Ambalaroka', 'Fouriesburg', 'Ojuelos de Jalisco', 'Saurimo', 'Bereket', 'Ledbury', 'Lenexa', 'Bakhor', 'Come', 'Tonk Khurd', 'Chak Jhumra', 'Qarah', 'Macaparana', 'Dormagen', 'Kamirenjaku', 'Nhlangano', 'Loughton', 'Carcarana', 'Bahawalpur', 'Hengshui', 'Pointe-a-Pitre', 'Louvain-la-Neuve', 'Lindong', 'Elsfleth', 'Gangavalli', 'Aguelmous', 'Termas de Rio Hondo', 'Nueva Paz', 'Karuvellur', 'Dholka', 'Dishashah', 'Neropolis', 'Ourilandia do Norte', 'Gorna Oryahovitsa', 'Dangyang', 'Raiganj Bazar', 'Piliv', 'Chosei', 'Chambas', 'Ringwood', 'Bongaigaon', 'Polignano a Mare', 'Naranda', 'Mondai', 'Babadag', 'Nattam', 'Crosne', 'Ananipalle', 'Sorata', 'Collipulli', 'Famenin', 'Waddinxveen', 'Koratgi', 'Mojokerto', 'Moreno Valley', 'Baltit', 'Wokha', 'Salzwedel', 'Shangtangcun', 'Stahnsdorf', 'Jetpur', 'Vellallur', 'Drouin', 'Tochigi', 'Kaniyambadi', 'Aran Bidgol', 'Park Ridge', 'Ammon', 'Guipavas', 'Miaoli', 'Monchengladbach', 'Sulahpet', 'McMinns Lagoon', 'Palangotu Adwar', 'Delray Beach', 'Singura', 'Oakland Park', 'Goonellabah', 'Drezdenko', 'Athurugiriya', 'Aizumi', 'Maria la Baja', 'Toshloq', 'Pale', 'Burhar', 'Murrieta', 'Loufan', 'Kibungo', 'Doi Lo', 'New Bedford', 'Srinivaspur', 'Atru', 'Mataili Khemchand', 'Ashibetsu', 'Hansi', 'Bacong', 'Rukhae', 'Kosching', 'Addison', 'Rangpur', 'Toppenish', 'Eyvanekey', 'Girardot', 'Piravanthur', 'Korem', 'Porto Torres', 'Lagoa Grande', 'Aratuba', 'Hadithah', 'Bel Air South', 'Buluko', 'Ambala', 'Baie du Tombeau', 'El Cairo', 'Caluire-et-Cuire', 'Caluya', 'Ponedera', 'Ban Bang Mae Nang', 'Leer', 'Kissing', 'Shamalgan', "'s-Heerenberg", 'Miguel Calmon', 'Bar', 'Birak', 'Igana', 'Onnaing', 'Sveta Trojica v Slovenskih Goricah', 'Boca do Acre', 'Amaravati', 'Dhulkot', 'Joubb Jannine', 'Edeia', 'Toda', 'Okinoshima', 'Murra', 'East Renton Highlands', 'Badshahpur', 'Lalgudi', 'Bomareddipalli', 'Curaca', 'Zetale', 'Yuci', 'Levittown', 'Linbian', 'Toyooka', 'Krugersdorp', 'Northfleet', 'McKee City', 'Atescatempa', 'Dundo', 'Sarableh', 'Santo Domingo', 'Lucca', 'Le Chambon-Feugerolles', 'Kanyato', 'Sokoura', 'Ponte de Lima', 'Kafr Laha', 'Covenas', 'Shchastia', 'Bamenda', 'Tolmin', 'Ottakkadai', 'Palmetto Bay', 'Castrop-Rauxel', 'Barra dos Coqueiros', 'Mahdishahr', 'Angra do Heroismo', 'Chulucanas', 'Haslev', 'Cahul', 'Luocheng', 'Asyut', 'Muping', 'Country Club Hills', 'Ambondro', 'Nakayama', 'Boljevac', 'Chaman', 'Birqash', 'Ghat', 'Cungus', 'Cuyapo', 'Kissane Ltouqi', 'Mexborough', 'Mansinghpur Bijrauli', 'Komlo', 'Boissy-Saint-Leger', 'Sabinopolis', 'Talkhvoncheh', "'s-Gravendeel", 'Malkara', 'Glace Bay', 'San Andres Sajcabaja', 'Pforzheim', 'Wombwell', 'New Hanover', 'Schneverdingen', 'Vidisha', 'Neubiberg', 'Banu Chhapra', 'Pamidi', 'Benkovac', 'Cocoa', 'Stawell', 'Saint-Quentin', 'Northdale', 'Clermont', 'Don Torcuato', 'Hanchuan', 'Hessisch Lichtenau', 'Jena', 'Tursunzoda', 'Avintes', 'Burela de Cabo', 'Gladenbach', 'Myszkow', 'Quiroga', 'Azove', 'Sao Geraldo', 'Penrith', 'Arenapolis', 'Miyada', 'Jurbarkas', 'Matias Olimpio', 'Bindki', 'Ditzingen', 'Babaeski', 'Lyngdal', 'Baarn', 'Xinmin', 'Sondho Dullah', 'San Onofre', 'Halawah', 'Soran', 'Suihua', 'Perere', 'Ban Thum', 'Massy', 'Delta', 'Qom', 'Eldoret', 'Tournavista', 'Cangas', 'Juchen', 'Pattikonda', 'Cacuaco', 'Kamisato', 'Ilarionove', 'Spijkenisse', 'Anavatti', 'Seringueiras', 'Beipiao', 'Kouribga', 'Kirangur', 'Bismarck', 'Ahaus', 'Barajor', 'Canta', 'Sierre', 'Gopalpur', 'Fremont', 'Baixa Grande', 'Apolo', 'Xigujing', 'Vevcani', 'Madhura', 'Feucht', 'Fehmarnsund', 'Cesme', 'Nerviano', 'Brownsweg', 'Baynala', 'Ky Anh', 'Khunti Dhanaili', 'Arjona', 'Ciudad Sandino', 'Lyantonde', 'Pililla', "Santa Croce sull' Arno", 'Liangshi', 'Pirkkala', 'Saligrama', 'Odugattur', 'Pullanvidudi', 'Fort Irwin', 'Veliko Gradiste', 'Culiacan', 'Singa', "Conde-sur-l'Escaut", 'North Fort Myers', 'Martinez', 'Raisinghnagar', 'Heubach', 'Accokeek', 'Citta di Castello', 'Sint-Genesius-Rode', 'Iruma', 'Curico', 'Pitlam', 'Niamey', 'Araci', 'Zaozhuang', 'Az Zahiriyah', 'Takouta', 'Douar Bouchfaa', 'Mirna', 'Aron', 'Firminopolis', 'Niederwerrn', 'Chandi', 'Parana', 'Bom Jesus do Galho', 'Boussu', 'General Nakar', 'Elancourt', 'Tirodi', 'Cosenza', 'Sooke', 'Alga', 'Duruma', '`Amuda', 'Aburi', 'Short Hills', 'Haripura', 'Granada', 'The Mumbles', 'Yadavolu', 'East Honolulu', 'Vellalapuram', 'Boufarik', 'Oer-Erkenschwick', 'Terkuvenganallur', 'Orodara', 'Didouche Mourad', 'Stapleford', 'Iles', 'Cantilan', 'Kennebunk', 'Urasoe', 'Singosari', 'Onda', 'Rushall', 'Bukhara', 'Ibbenburen', 'Epsom', 'Glendale Heights', 'Ubai', 'Chatra', 'Ranavav', 'Chevilly-Larue', 'Kemijarvi', 'Ungoofaaru', 'Zaouiat Moulay Bouchta El Khammar', 'Vinaninkarena', 'Zaleze', 'Zelenikovo', 'Newfane', 'Langedijk', 'Orly', 'Berchha', 'Guimba', 'Gamboula', 'Alakamisy Itenina', 'Alotau', 'Sothgaon', 'Bockum', 'Sawai Madhopur', 'Lahan', 'Santa Ignacia', 'Modesto', 'Jales', 'Baragaon', 'Huarmey', 'North Ridgeville', 'Kolomyia', 'San Martin de los Andes', 'Saboeiro', 'Bucay', 'Xexeu', 'Santo Stino di Livenza', 'Boca Chica', 'Keskin', 'Namtu', 'Nyenga', 'Carlow', 'Medemblik', 'Cadoneghe', 'Hagondange', 'Zozocolco de Hidalgo', 'Desamparados', 'Bengonbeyene', 'Bou Zemou', 'Bellary', 'Bremen', 'Fitzgerald', 'Finale Emilia', 'Bilgi', 'Starobilsk', 'Korangal', 'Ingeniero Maschwitz', 'Akat Amnuai', 'Afua', 'Dondon', 'Dhangaraha', 'Avellaneda', 'Castelfranco di Sopra', 'Bang Racham', 'Bisaul', 'Persan', 'Palaw', 'Manja', 'Lindesberg', 'Halton Hills', 'Mandaon', 'Ratingen', 'Staszow', 'Sitakili', 'Niebull', 'Malabo', 'Berceni', 'Floha', 'Wiltz', 'Anamoros', 'Banqiao', 'Afgooye', 'Druzhba', 'Montevrain', 'Socopo', 'Logatec', 'Prestatyn', 'Lodhikheda', 'Phuket', 'Berriozabal', 'Puan', 'Brooklyn', 'Casma', 'New Square', 'Lake Station', 'San Juan de Rio Coco', 'Korydallos', 'Cubatao', 'Stidia', 'Kalamansig', 'Amstetten', 'Ventersburg', 'Khovaling', 'Millburn', 'Halba', 'Tenggarong', 'Stelle', 'Bacup', 'Cacak', 'Khamanon Kalan', 'Fanyang', 'Patikul', 'Igarassu', 'Lakeland Village', 'Sanxing', 'Kongsberg', 'Harelbeke', 'Chintalapudi', 'Chinna Ganjam', 'Utena', 'Carai', 'Calcoene', 'Salgar', 'Maardu', 'Tarhunah', 'Breaza', 'Villamontes', 'Vadakakarai', 'Jaidte Lbatma', 'Chautapal', 'Dois Corregos', 'Mahmudabad Nemuneh', 'Raul Soares', 'Walker Mill', 'Qiaoyang', 'Alachua', 'Nailsea', 'Ipaumirim', 'Wakabadai', 'Sebt Labrikiyne', 'Anupshahr', 'Calinog', 'Hammamet', 'Famagusta', 'Norwich', 'Little Falls', 'Arealva', 'Bad Schwalbach', 'Dovzhansk', 'Amiens', 'Floridia', 'Sukma', 'Zoeterwoude', 'Narahia', 'Santo Nino', 'Acate', 'Zuromin', 'Kagithane', 'Quarrata', 'Ivankiv', 'Sougueur', 'Ain Harrouda', 'Temsia', 'Yangjiang', 'Baja', 'Ampasina-Maningory', 'Porto', 'Kolavallur', 'Dianbu', 'Sakuragawa', "L'Union", 'Prattipadu', 'Mogosoaia', 'Lija', 'Dornbirn', 'Madera Acres', 'Munro Turuttu', 'Ban Ao Nang', 'Catubig', 'Kibungan', 'Cienfuegos', 'Taza', 'Zoma-Bealoka', 'Zabid', 'Sandare', 'Labuleng', 'Dresden', 'The Valley', 'Le Kram', 'Shihe', 'Zira', 'Kurume', 'Wengtiancun', 'Mucuri', 'Burnley', 'Aborlan', 'Allen', 'Diriomo', 'Metro', 'Mandalgovi', 'Etten-Leur', 'Almeria', 'Wootton', 'Doukouya', 'Ban Tha Khlong', 'Trikala', 'Hasanpura', 'Gisborne', 'Cooper City', 'Mukasi Pidariyur', 'Oulad Yaich', 'Ibitita', 'Majarhat', 'Naas', 'Sete Barras', 'Ongata Rongai', 'El Parral', 'Hayrabolu', 'Kasipalaiyam', 'Jamaame', 'Tripoli', 'Ettapur', 'Gurnee', 'Ofunato', 'Statesville', 'Taiwa', 'Boekenhouthoek', 'Bandar-e Khamir', 'Meningie', 'Valle de La Pascua', 'Vinhedo', 'Ksar', 'Al Mindak', 'Blackrock', 'Kaua Kol', 'Ban Yaeng', 'Kodaimangalam', 'Brownsville', 'Zhushan', 'Vrede', 'Pombas', 'Albatera', 'Puerto Narino', 'Pakpattan', 'Awfouss', 'Santa Cruz de Los Taques', 'Thi Tran Ngai Giao', 'Pakkam', 'Chibok', 'Wollert', 'Berisso', 'Pocone', 'Eindhoven', 'Eramala', 'Alphen aan den Rijn', 'Kamuli', 'Puerto Quijarro', 'Manaus', 'Burleson', 'Topsham', 'Maribondo', 'Buta', 'Tampamolon Corona', 'Alagoinha', 'Valljkkod', 'Nilo Pecanha', 'Chachahuantla', 'Dax', 'Texcatepec', 'Horizontina', 'Serra Negra', 'Kolda', 'Bee Ridge', 'Nekarikallu', 'Wasilla', 'Vatana', 'Bestobe', 'Fatsa', "Sao Joao d'Alianca", 'Sondrio', 'Gorgonzola', 'Nongstoin', 'Yaguajay', 'Solapur', 'Abington', 'Star', 'Asuncion', 'Valdosta', 'Zhongtanying', 'Rewahi', 'Ankarana-Miraihina', 'Debar', 'Playa Grande', 'Guabiruba', 'Del Aire', 'Tumen', 'Perket', 'Sinor', 'Canford Cliffs', 'Khorramdarreh', 'Latehar', 'Guerouma', 'Pine Ridge', 'Alba Iulia', 'Qalansuwa', 'Orocue', 'Taraz', 'Jesus Maria', 'Langsa', 'Trikkunnapuzha', 'Nosy Varika', 'Nishi', 'Anjangaon', 'Trebisacce', 'Taucha', 'Grosshansdorf', 'Shirone', 'Swakopmund', "Bog'ot", 'Antenor Navarro', 'Parsippany-Troy Hills', 'East Northport', 'Sur Singh', 'Huy', 'Gandikunta', 'Tamarakulam', 'Cerknica', 'Hindaun', 'Vichy', 'Hilo', 'Branson', 'Kalimala', 'Othello', 'Dol', 'Nanga Eboko', 'Angical', 'Plains', 'Culpeper', 'Famjin', 'Watrap', 'Basrur', 'Aracoiaba da Serra', 'Bretten', 'Gavere', 'Lingxi', 'Doctor Juan Eulogio Estigarribia', 'Aracatuba', 'Fredonia', 'Wittmund', 'Hirao', 'Elmshorn', 'Pfullingen', 'Temse', 'Betim', 'Bhiraha', 'Taquara', 'Orivesi', 'Thanh Pho Uong Bi', 'Kamalganj', 'Manamadurai', 'Sint-Kruis', 'Sankaramangalam', 'Evergreen Park', 'Catigbian', 'Auhar Sheikh', 'Gornji Vakuf', 'Obanazawa', 'Casandrino', 'Saquisili', 'Narsinghgarh', 'Mitcham', 'Carmelo', 'Green', 'Ringsted', 'Timmins', 'Wobulenzi', 'Haddington', 'Lohna', 'West Mifflin', 'Al Musayyib', 'Brugg', 'Jyvaskyla', 'Al Muharraq', 'Heroica Caborca', 'Mapandan', 'Osimo', 'Srem', 'Caxito', 'Tantega', 'Mahajamba', 'Stara Tura', 'Palo Alto', 'Miskolc', 'Dharmapuri', 'Krefeld', 'Sibutao', 'Itinga', 'Kuacjok', 'Zhaodong', 'Ambikapur', 'Cologno al Serio', 'Agua Branca', 'Barskoon', 'Jora', 'Ciudadela', 'Yenice', 'Anderlecht', 'Sunny Isles Beach', 'Pilikodu', 'San Marzano di San Giuseppe', 'Hunenberg', 'Khandpara', 'Bieber', 'Timezgana', 'Sam Phran', 'Yuquan', '`Akko', 'Ittikara', 'Louveira', 'Kalingiyam', 'La Grande-Motte', 'Maebara', 'Billinghurst', 'Bou Adel', 'Leszno', 'Inhumas', 'Atoka', 'Kerou', 'Saint-Benoit', 'Zhangzhou', 'Belazao', 'Senapparetti', 'Cua', 'Tangerang', 'Zebbug', 'Levanger', 'Liminka', 'Punceres', 'Mavinpalli', 'Jharsugra', 'Hassan Abdal', 'Karaburun', 'Zazafotsy', 'Simri Bakhriarpur', 'Caprino Veronese', 'Yorito', 'Makaya', 'Tenango de Doria', 'Minamata', 'Fujimino', 'Vecchiano', 'Tavas', 'Punta Indio', 'San Gregorio Atzompa', 'Ambohimiarivo', 'Hessisch Oldendorf', 'Geretsried', 'Calabozo', 'Kara-Balta', 'Pieta', 'Grande Prairie', 'Los Angeles', 'Dingalan', 'Potengi', 'San Jose de Ocoa', 'Suncheon', 'Fornaka', 'Rehti', 'Bandixon', 'Virapperumanallur', 'North Lindenhurst', 'Sainte-Julie', 'Los Reyes de Salgado', 'Pinamar', 'Mayfield Heights', 'Santa Apolonia', 'Harpur Bhindi', 'Campodarsego', 'Urbana', "Braine-l'Alleud", 'Wukari', 'Martahalli', 'Leppavirta', 'Kastel Stari', 'Sansare', 'Braganca Paulista', 'Bridgeport', 'Cecina', 'Ossett', 'Chepica', 'Koratagere', 'Muthuswamipuram', 'Kamigori', 'Tuchola', 'Fukang', 'Balanga', 'Isola Capo Rizzuto', 'Galmaarden', 'Heinola', 'Gioia Tauro', 'Zelfana', 'Quarai', 'Frauenfeld', 'Makole', 'Puerto Lempira', 'Brandenburg', 'Kot Radha Kishan', 'Armazem', 'Mahaboboka', 'Jingjiazhuang', 'Ongole', 'Magalhaes de Almeida', 'Darat `Izzah', 'Longtian', 'Ahualulco de Mercado', 'Alexander City', 'Iwai', 'Coolbaugh', 'Khambhaliya', 'Narayanavanam', 'Littleton', 'Ciudad Lerdo', 'Martin Coronado', 'Vialonga', 'Hamminkeln', 'Arrah', 'Pianco', 'Villa Elisa', 'Tawaramoto', 'Skien', 'Santa Clara La Laguna', 'Takad Sahel', 'Giardini', 'Barbosa Ferraz', 'Brevnov', 'Fuenlabrada', 'Birhana', 'Kaberamaido', 'Siocon', 'Loja', 'Midori', 'Sambhu Chak', 'Federacion', 'Masan', 'Lousada', 'Laguna Salada', 'Pooler', 'Geisenfeld', 'Orlandia', 'El Castillo de La Concepcion', 'Koriyama', 'Altagracia', 'Minalin', 'San Feliu de Llobregat', 'Uithoorn', 'Rivadavia', 'Castello de Ampurias', 'Zhur', 'Greenock', 'Criuleni', 'Nangola', 'Claymont', 'Sihecun', 'Palmers Green', 'Guatica', 'Mandaue City', 'Pharkia', 'Cambui', 'Placerville', 'Junin de los Andes', 'Omachi', 'Gaspar Hernandez', 'Valinda', 'Fort Walton Beach', 'Arttuvattala', 'Mahela', 'Dowa', 'Kelme', 'Chippenham', 'Metahara', 'Marovantaza', "Saint David's", 'West Windsor', 'Cleburne', 'Itupiranga', 'Trikarpur North', 'Miyanaga', 'Gandhari', 'Tatarbunary', 'Zholymbet', 'Springwater', 'Seekonk', 'Groningen', 'Chocaman', 'Lianzhou', 'Siggiewi', 'Ig', 'Sarh', 'Al Hayy', 'Chum Phae', 'Meridjonou', 'El Copey', 'Salerno', 'Antsampandrano', 'Zahana', 'Stuttgart', 'Razan', 'Hexiang', 'North Merrick', 'Hicksville', 'Palmares Paulista', 'Kalpi', 'Sariyer', 'Nandamuru', 'Guidel', 'Morinda', 'Jinhe', 'Yasinia', 'Nova Friburgo', 'Buriti Alegre', 'Shahin Shahr', 'Chivolo', 'Gosforth', 'Hunchun', 'Gexianzhuang', 'Mareno di Piave', 'Montville', 'Kikuyu', 'Wauwatosa', 'Dhalaa', 'Villa del Carbon', 'Tigbao', 'Aidlingen', 'Mekla', 'Nieuwegein', 'Geldern', 'Pepillo Salcedo', 'Belampalli', 'Karakol', 'Saint-Louis du Nord', 'Bartabwa', 'Zardab', 'Kanakpur', 'Dock Sur', 'Todos Santos Cuchumatan', 'Sunbat', 'Matmata', 'Le Passage', 'Panr', 'Galdacano', 'Wa', 'West Freehold', 'Merville', 'Sonderborg', 'Corovode', 'Tizgane', 'Kadinhani', 'Penalva', 'Piqua', 'Wenwu', 'Castanuelas', 'Jazan', 'Nowe Miasto Lubawskie', 'Velletri', 'Cativa', 'Niihama', 'Carapebus', 'Jai', 'Sao Jose da Laje', 'Southgate', 'Ottaikkalmantapam', 'Ladhuka', 'Kulkent', 'Mobetsu', 'Pincourt', 'Tagoloan', 'Kiseljak', 'Maryport', 'Flores da Cunha', 'Sao Jeronimo', 'Ban Nong Han', 'Kirk of Shotts', 'Diyadin', 'Uchti', 'Marlboro Village', 'El Consejo', 'Dasraha Bhograjpur', 'Hagenow', 'Beniajan', 'Halfway', 'Jiming', 'Antibes', 'Manushmuria', 'Nantucket', 'La Mision', 'Limbang', 'Rubano', 'Tchaourou', 'Repatriacion', 'Danyang', 'Ad Duraykish', 'Osterode', 'Karvetnagar', 'Secunda', 'Kuhbanan', 'Poissy', 'Itupeva', 'Liuhe', 'New Smyrna Beach', 'Okuta', 'Shuangqiao', 'Irati', 'Lampa', 'Gautampura', 'Cacahoatan', 'Mooirivier', 'Spanaway', 'Abu Hardub', 'Bor', 'Dingjiagouxiang', 'Rehburg-Loccum', 'Ambajogai', 'Detmold', 'Yuncheng', 'Boyarka', 'Eden', 'Penne', 'Barghat', 'Ampahimanga', 'Turkmenabat', 'Zarqan', 'Dover', 'Sindirgi', 'Panabo', 'Holliston', 'Tambolaka', 'Ciudad Arce', 'Nadbai', 'Nova Era', 'Bekodoka', 'South Fayette', 'Ampanavoana', 'Mahbubabad', 'Palkonda', 'Alasehir', 'Dongshicuo', 'Uniondale', 'Bhakua', 'Damavand', 'Kami-kawabe', 'Deutschlandsberg', 'Domahani', 'Pallippatti', 'Kaolack', 'Bolhrad', 'Murfreesboro', 'Sankt Georgen im Schwarzwald', 'Antanambe', 'Macajuba', 'Thies', 'Tarlac City', 'Taquaritinga', 'City of Orange', 'Glendora', 'Tufanganj', 'Claremore', 'Zejtun', 'Figueira da Foz', 'Holmen', 'Aiea', 'Runkel', 'Forbe Oroya', 'Ondjiva', 'Tosu', 'Surinam', 'Rahatgarh', 'Winterswijk', 'Sakiet ed Daier', 'Kolonnawa', 'Wawizaght', 'Imisli', 'Horokhiv', 'Shenjiatun', 'Lenguazaque', 'Newington', 'Lecheng', 'Gamboma', 'Manubolu', 'Perumbalam', 'Bougaribaya', 'Dryden', 'Kielce', 'Edeleny', 'Ouidah', 'Comalapa', 'Harrogate', 'Qianwu', 'Nguru', 'South Daytona', 'Corbin', 'Abnub', 'Saren', 'Beydag', 'Monthey', 'Bonheiden', 'August', 'North Miami Beach', 'Aulnay-sous-Bois', 'Aaley', 'Talne', 'Kodungallur', 'San Buenaventura', 'Gerlingen', 'West Earl', 'Pacatu', 'Hallein', 'Nueva Ocotepeque', 'Biassono', 'Garfield Heights', 'Motozintla', 'Gerpinnes', 'Mnasra', 'Riohacha', 'Etacheri', 'Pudur', "Sant'Antonio Abate", 'Steinhagen', 'Sfax', 'San Miguel Chicaj', 'El Congo', 'Falimari', 'Pinhal Novo', 'Ystrad Mynach', 'Wangen im Allgau', 'Qazmalar', 'Severinia', 'Nonea', 'Dargecit', 'Sukumo', 'Cittadella', 'Yamba', 'Sao Luis de Montes Belos', 'Suleswaranpatti', 'Abu Tisht', 'Ploermel', 'Arvika', 'Warud', 'Mazatlan Villa de Flores', 'Inga', 'Jaworzno', 'Canalete', 'Balayan', 'Ankazomborona', 'Nadimpalem', 'Illintsi', 'Laives', 'Juchitan de Zaragoza', 'Ponot', 'Annan', 'Fiorentino', 'Lambarkiyine', 'Linluo', 'Neu-Anspach', 'Rio Maria', 'South Venice', 'Moranbah', 'North Haledon', 'Orchard Park', 'Pico Truncado', 'Gomez Palacio', 'I-n-Amenas', 'Barreiros', 'San Mateo', 'Harran', 'Toccoa', 'Ghoradal', 'Medgidia', 'Roermond', 'Madinat al Habbaniyah', 'San Agustin Tlaxiaca', 'Nakodar', 'La Baneza', 'Citrus Park', 'Guttal', 'Lovington', 'Carahue', 'Vicosa do Ceara', 'Saitama', 'Whistler', 'Amjhera', 'Rio Brilhante', 'Mora', 'I-n-Salah', 'Sao Joaquim do Monte', 'Huanian', 'Alcalde Diaz', 'Bijaynagar', 'Lowton', 'Anjahamarina', 'Tohoue', 'Birkenhead', 'Ghogaon', 'Kadachchinallur', 'Lal-lo', 'Colorado do Oeste', 'Boukoumbe', 'La Source', 'Knittelfeld', 'Guangping', 'Torrington', 'Yako', 'Batesville', 'Dhobipet', 'Ain Kechera', 'Prey Veng', 'Webster Groves', 'Huai Yot', 'Thandewala', 'Koksijde', 'Vosselaar', 'Ashtarak', 'Timaru', 'Myrtle Beach', 'Ban Na Sai', 'Masbate', 'Buloqboshi', 'Trzebnica', 'Biskra', 'Sidi Bousber', 'Jiangjiehe', 'Qinhe', 'Kunnur', 'Palmerston North', 'Kodanad', 'Gulyam', 'Gulou', 'Fort Dodge', "Mek'i", 'Mount Olive', 'Marco de Canavezes', 'Ucuma', 'Kucukkuyu', 'Sendamaram', 'Minacu', 'Quezon City', 'Caxias do Sul', 'Banjarbaru', 'Analamisampy', 'Zhongwangzhuang', 'Morroa', 'Laghzawna', 'Talakulam', 'Pailin', 'Clearfield', 'Soumpi', 'Drochtersen', 'Lourinha', 'Zerbst', 'Beohari', 'Marina di Carrara', 'Sherpur Khurd', 'Ploufragan', 'Sucat', 'Nadikude', 'Live Oak', 'Puerto Casado', 'Mortara', 'Beichengqu', 'Iwanuma', 'Melle', 'Guney', 'Geneva', 'Whitley Bay', 'Chintalavadi', 'Buwama', 'Midlothian', 'Guatire', 'Baghauni', 'Chiang Mai', 'East Ham', 'Lenyenye', 'Shirakawa-tsuda', 'Dibulla', 'Nocera Superiore', 'Paispamba', 'Puerto Princesa', 'Ban Ton Thong Chai', 'Markham', 'Bileh Savar', 'Timmapuram', 'Kone', 'Tchindjendje', 'Bir el Djir', 'Saldus', 'Coalville', 'Waterville', 'North Canton', 'Dalandzadgad', 'New Franklin', 'Presidencia de la Plaza', "M'Chedallah", 'Heves', 'Brodnica', 'Kicevo', 'Niaogho', 'Sao Pedro', 'Connahs Quay', 'Renningen', 'Kaiwen', 'Frejus', 'Nobeji', 'Le Bardo', 'North Fair Oaks', 'Metzingen', 'Thonex', 'Argos Orestiko', 'Erdington', 'Takasaki', 'Storm Lake', 'Ronnenberg', 'Freetown', 'Neuenburg', 'Pinotepa', 'Wood Buffalo', 'Usme', 'Kitaakita', 'Nagykoros', 'Midleton', 'South Ockendon', 'Brahmana Periya Agraharam', 'Mantua', 'Hayange', 'Qarshi', 'Komono', 'Yasuj', 'Cuartero', 'Fuding', 'Oberderdingen', 'Consaca', 'Calatrava', 'San Vicente de Baracaldo', 'Yakakent', 'Sowan', 'Major Isidoro', 'Dongou', 'Monza', 'Millbrae', 'Panshi', 'Roquebrune-sur-Argens', 'Satravada', 'Visby', 'Iseyin', 'Kesarimangalam', 'Greensburg', 'Nova Paka', 'Butterworth', 'Fort Liberte', 'Hongseong', 'Anand', 'Kalamasseri', 'Kazhukambalam', 'Beian', 'Derik', 'Malalag', 'Prakhon Chai', 'Laje', 'Upper Uwchlan', 'Sun Prairie', 'Maghaghah', 'Al Qaryatayn', 'Ghonchi', 'Masabdisa', 'Luan Chau', 'Czersk Pomorski', 'Poggibonsi', 'Privas', 'Sa`adat Shahr', 'Divinolandia', 'Frontino', 'Nurota', 'Kpandae', 'Kamitonda', 'Molln', 'Rogozno', 'Kavital', 'Sarlat-la-Caneda', 'Reserva', 'Ankazotsifantatra', 'Shiraoka', 'Baisha', 'Sodertalje', 'Sao Mateus do Sul', 'Cimahi', 'Nirakpurpali', 'Baluntaicun', 'Aloran', 'Usmat Shaharchasi', 'Jalpura', 'Prairieville', 'Neuenburg am Rhein', 'Pangzawl', 'Oshkosh', 'Kalaidasht', 'El Milia', 'Williams Lake', 'Lillebonne', 'Kalyves Polygyrou', 'Nova Europa', 'Kawit', 'Grande Saline', 'Shuili', 'Khanda', 'Sa`idi', 'Mount Eliza', 'Queensferry', 'Gibara', 'Bathurst', 'Itri', 'Cosquin', 'Dakovo', 'Jimbolia', 'Parsad', 'Tartar', 'Santarem', 'Ingenio', 'Az Za`ayin', 'Avola', 'Campiernagar', 'Hioki', 'Gaizhou', 'Amancio', 'Fort Salonga', 'Ramhormoz', 'Es Senia', 'Konosu', 'Tallaght', 'San Giovanni in Marignano', 'Al Mahallah al Kubra', 'Nashville', 'Nkongsamba', 'Cazenga', 'Teutonia', 'Asthanwan', 'Genappe', 'Neuenstadt am Kocher', 'Grammichele', 'Latteri', 'Aragarcas', 'Thevur', 'Yaglidere', 'Radomir', 'Bearsden', 'Novo Airao', 'Bierun Stary', 'Lagunia Surajkanth', 'Dengtangcun', 'Schwechat', 'Kalkar', 'Cukurca', 'Xiedian', 'Teramo', 'Yonago', 'Samakhiali', 'Pemba', 'Rancho Palos Verdes', 'Barclayville', 'Bridgeton', 'Eslov', 'Shahpur', 'Mandalgarh', 'Nordstemmen', 'Portlethen', 'Opmeer', 'Gemona del Friuli', 'Askim', 'Sidi Moussa', 'Josefina', 'Oskarshamn', 'Kaujalgi', 'Vicente Lopez', 'Mandasa', 'Bordj Menaiel', 'Melgaco', 'Beaufort', 'Catskill', 'Madaba', 'Savoy', 'Rahta', 'Chiran', 'London Grove', 'Malov', 'Serra Caiada', 'Arico el Nuevo', 'Dongping', 'Elumalai', 'Montelimar', 'Weingarten', 'Talbahat', 'Bima', 'Frouzins', 'Kanur', 'Beavercreek', 'Vange', 'Tianwei', 'Westbury', 'Donduseni', 'Teotepeque', 'Felidhoo', 'McCordsville', 'Catamarca', 'Gurpinar', 'Matola', 'Lezajsk', 'Chinna Gollapalem', 'Fervedouro', 'Ambodihara', 'Bagalkot', 'Quillabamba', 'Boundiali', 'Ankara', 'Montfoort', 'Union', 'Pulheim', 'Itako', 'Kampong Tunah Jambu', 'Bandar `Abbas', 'Ghotki', 'Phek', 'Calabar', 'Taft', 'Nong Bua Lamphu', 'Yenkuvarigudem', 'Sebeta', 'Freeport', 'Gehrden', 'Doctor Phillips', 'Leers', 'Mannedorf', 'Linz', 'Carvin', 'Zhengdong', 'Itabera', 'Fujita', 'Fazakerley', 'Rillieux-la-Pape', 'Xikou', 'Grain Valley', 'Chia', 'Taiobeiras', 'Kamianske', 'Novyi Svit', 'Pasni', 'Nawanagar', 'Zwickau', 'Albstadt', 'Tawau', 'Sandnes', 'Hilsa', 'Trollhattan', 'Le Mee-sur-Seine', 'Villa del Rosario', 'Barlinek', 'Dacheng', 'Ardestan', 'Morbegno', 'Bujaru', 'Oued el Abtal', 'Catio', 'Mabalacat', 'Venkidanga', 'Leshou', 'Makedonska Kamenica', 'Pipra Latif', 'Lae', 'Myeik', 'Lisburn', 'Curralinho', 'Cunen', 'Wanparti', 'Marica', 'Muscle Shoals', 'Nykobing Mors', 'Rwamagana', "Jin'e", 'Quitandinha', 'Nilufer', 'Camisano Vicentino', 'Masera di Padova', 'Brockville', 'Ashiya', 'Velenje', 'Chencha', 'Pongotan', 'Rogaska Slatina', 'Barasat', 'Lognes', "N'Goussa", 'Bordeaux', 'Huaquillas', 'Puvalur', 'Pasraha', 'Belladere', 'Tonghae', 'Montebelluna', 'Nihonmatsu', 'Mariano Roque Alonso', 'Doura', 'Xiangxiang', 'Mang La', 'Dumjor', 'Jedrzejow', 'Butler', 'Rebola', 'Douar Hammadi', 'Sala Consilina', 'Alassio', 'Fenoarivobe', 'Pueblo Rico', 'Bramhapuri', 'Mejillones', 'Arcore', 'Wiefelstede', 'Wick', 'Porsa', 'Urdinarrain', 'Seabra', 'Pompton Plains', 'Wells', 'Paete', 'Thorne', 'Moss Vale', 'Kortrijk', 'San Giovanni in Fiore', 'Dhar', 'Bonneuil-sur-Marne', 'Beregadougou', 'Khandaich', 'Arrigorriaga', 'Glossop', 'Neusass', 'Olivenza', 'Chaodongcun', 'Lichtenburg', 'Perupalem', 'Pulimathu', 'Buharkent', 'Lancing', 'Arendal', 'Mpessoba', 'Massillon', 'Lordelo do Ouro', 'Tlaltenango de Sanchez Roman', 'Berkley', 'Tracunhaem', 'Avitanallur', 'Livron-sur-Drome', 'Badoc', 'Salman Bak', 'Vaihingen an der Enz', 'Las Torres de Cotillas', 'Claxton Bay', 'Popasna', 'Astana', 'Fairhope', 'Barras', 'Acala', 'Balti', 'San Bartolome Jocotenango', 'Capalonga', 'Probistip', 'Candoi', 'Dapa', 'Yei', 'Roznov pod Radhostem', "Oulad 'Azzouz", 'Sherobod', 'Andondabe', 'Gedera', 'Mandi Dabwali', 'Le Blanc-Mesnil', 'Elambalur', 'Altlandsberg', 'Chouafaa', 'Sao Jose da Tapera', 'Loimaa', 'Concepcion', 'Dhrol', 'Kulpahar', 'Mililani Town', 'Goondiwindi', 'Varzea Nova', 'Sampgaon', 'Dibraghani', 'Wiwili', 'Elk Grove', 'Iskenderun', 'Tandarampattu', 'Bani Mazar', 'Parramos', 'Northwood', 'Charabidya', 'Villa Sandino', 'Karsiyang', 'Loughborough', 'Tetari', 'Nigde', 'Yarmouth', 'Bonen', 'Monserrato', 'Echirolles', 'Iglesias', 'Senges', 'Tacambaro de Codallos', 'Zwolle', 'Baradero', 'Belize City', 'Ponmundam', 'Vaterstetten', 'Tiddas', 'Uracoa', 'Nandigaon', 'Greven', 'Vila Franca do Campo', 'Dhumnagar', 'Yiwu', 'Casoria', 'Tovuz', 'Yakossi', 'Domont', 'Bai Chay', 'Grimsby', 'Manzini', 'Tiszaujvaros', 'Sapele', 'Lihuzhuang', 'Michalovce', 'Tanamarina', 'Apartado', 'Albu Kamal', 'Griffith', 'Zhashkiv', 'Oroco', 'Falkirk', 'Derassi', 'Vipava', 'Berriane', 'Kiskunhalas', 'Gradiska', 'Doesburg', 'Jamestown', 'Kingston upon Hull', 'Djibloho', 'Sachin', '`Aliabad-e Katul', 'Bouanri', 'Chandpur', 'Godalming', 'Toamasina', 'Basarh', 'Taviano', 'Apricena', 'Takayama', 'Nossen', 'Hashima', 'Staines-upon-Thames', 'Lachen', 'Ecclesfield', 'Kiraz', 'Cercado Abajo', 'Mangalpur Gudaria', 'Anolaima', 'Ban Mae Chedi', 'Namur', 'Bragadiru', 'Cusano Milanino', 'Shirosato', 'Khilchipur', 'Lymington', 'Kuala Kurun', 'Santa Cruz Balanya', 'North Babylon', 'Thogaduru', 'Shirin', 'El Ghourdane', 'Qanliko`l', 'Jangalapalli', 'Mariakerke', 'Kukkundur', 'Coyula', 'Meghauna', 'Ain el Hadjar', 'Sen Monorom', 'Madakasira', 'Shepton Mallet', 'Biggleswade', 'Llallagua', 'Birzai', 'Beneditinos', "Kattaqo'rg'on Shahri", 'La Crucecita', 'Tall Abyad', 'Ar Rahmaniyah', 'Milnrow', 'Shasta Lake', 'San Giovanni Lupatoto', 'Lovejoy', 'Fo-Boure', 'Konigswinter', 'Ironton', 'Sabirabad', 'Sai Wan Ho', 'Ueda', 'Yinchengpu', 'Miandrarivo', 'Horley', 'Djidian', 'Koundara', 'Dereli', 'Sidi Merouane', 'Alwal', 'Debre Tabor', 'Beltsville', 'San Francisco de los Romo', 'Singkawang', 'Kryvyi Rih', 'Cisnadie', 'Altotting', 'Cape Canaveral', 'Mitsuke', 'Baroueli', 'Abrandabad-e Shahediyeh', 'Merkanam', 'Huazalingo', "L'Hay-les-Roses", 'Riorges', 'Serra Dourada', 'Woensdrecht', 'Castelli Calepio', 'Kiruna', 'Fatima', 'Bensonville', 'Weissenhorn', 'San Jose El Idolo', 'Racconigi', 'Lluchmayor', 'Inabe', 'Amgachia', 'Aswapuram', 'Wilmette', 'Zwonitz', 'Haji Shah', 'Nava', 'El Cerrito', 'Tabira', 'Treuchtlingen', 'Ponte da Barca', 'Poitiers', 'Watari', 'As Safirah', 'Handsworth', 'Muniz Freire', 'God', 'Nova Crixas', 'Tujg', 'Jakobstad', 'San Bartolo Tutotepec', 'Kennewick', 'Baianopolis', 'Las Navas', 'Paracelis', 'Moniquira', 'Porumamilla', 'Arroyo Seco', 'Bom Sucesso', 'West Haven', 'Kinik', 'Nangavalli', 'Draksharama', 'Wells Branch', 'Shumanay', 'Zlocieniec', 'Monte di Procida', 'Novodnistrovsk', 'Chitungwiza', 'San Pedro de Macoris', 'Mombin Crochu', 'Dumbarton', 'Ostersund', 'Le Grand-Saconnex', 'Uijeongbu', 'Lomianki', 'Piazza Armerina', 'Butebo', 'Kirsehir', 'Huamachuco', 'Ismailpur', 'North Amityville', 'Lanquin', 'Rio Negrinho', 'Ukrainsk', 'Rockingham', 'Zarasai', 'Terneuzen', 'Musiri', 'Badanahatti', 'Edayikunnam', 'Caridade', 'Jensen Beach', 'Kuriyama', 'Antarvedi', 'Amos', 'Byatarayanpur', 'Sao Domingos do Maranhao', 'Tarutung', 'Worthington', 'Fujiidera', 'Gerd Faramarz Shahediyeh', 'Buchach', 'Igarape', 'Lincolnton', 'Ameskroud', 'Bad Oldesloe', 'Yorii', 'Juticalpa', 'Lakshmaneswaram', 'Catanduvas', 'Ryhope', 'Wesselsbron', 'As Suwayhirah as Sahil', 'Selvazzano Dentro', 'Tanxia', 'East Hampton', 'Agua Fria', 'Garanhuns', 'Saint-Lazare', 'Char Fasson', 'Malanguan', 'Waco', 'Kanaya', 'Totolapan', 'Bloemhof', 'Tokkavadi', 'Nogent-sur-Marne', 'Zhuolan', 'Nandyalampeta', 'Invercargill', 'Trabia', 'Boromo', 'Blackfalds', 'General Trias', 'Kalakkadu', 'Ellesmere Port', 'Gampola', 'Macclesfield', 'Bachhauta', 'Unzen', 'Balbalan', 'Maimon', 'Minanba', 'Andalucia', 'Baclaran', 'Sumqayit', 'Amargosa', 'Ghatawan', 'Alajuela', 'Maroaloka', 'Bongandanga', 'Papagos', 'Bairia', 'Ghagga', 'Lillehammer', 'Iizuna', 'Kifosso', 'Crailsheim', 'Zvornik', 'Devgadh Bariya', 'Imola', 'Estreito', 'Haga', 'Toyoyama', 'Abeokuta', 'Laoang', 'West Seneca', 'Kharhial', 'Menzel Temime', 'Jankinagar', 'El Carmen de Chucuri', 'Huari', 'Maua', 'Pekalongan', 'Coltauco', 'Carmen de Apicala', 'Cacador', 'Andohajango', 'Yangiqo`rg`on', 'Yesagyo', 'Sahuayo de Morelos', 'Nijmegen', 'West Whittier-Los Nietos', 'Santa Rosa del Sur', 'Tharial', 'Boves', 'Bodhan', 'Bagula', 'Gobabis', 'Ouahigouya', 'Maubeuge', 'Eldorado', 'Bakhtawarpur', 'Ternivka', 'Kasserine', 'Sayula de Aleman', 'Fallanden', 'Fonseca', 'Lipari', 'Bou Djeniba', 'Jagraon', 'Parrita', 'Erongaricuaro', 'Mizumaki', 'Dieramana', 'Moyamba', 'La Linea de la Concepcion', 'Kiffa', 'Silleda', 'Port Said', 'Gross-Gerau', 'Ilakatra', 'Wooster', 'Sicasica', 'Eureka', 'Dodworth', 'Hereford', '`Amran', "La Chapelle d'Armentieres", 'Pelsall', 'Borshchiv', 'Kaiyuan', 'Pandag', 'Mortagua', 'Errachidia', 'Gaya', 'Dietzenbach', 'An Najaf', 'Dungarpur', 'Oued Seguin', 'Sampaloc', 'Traralgon', 'Andalatanosy', 'Gillingham', 'Mudon', 'Deville-les-Rouen', 'Xicheng', 'Afonso Claudio', 'Daanbantayan', 'Horqueta', 'Bahraigh', 'North Brunswick', 'Marotsiraka', 'Ribnita', 'North Royalton', 'Kalemie', 'El Oro de Hidalgo', 'Woods Cross', 'Tsiamalao', 'Shankarampet', 'Tecpan de Galeana', 'Humlebaek', 'Phatthalung', 'Meddappakkam', 'Raha', 'Sensuntepeque', 'Paidiipalli', 'Itatira', 'Siddapur', 'Catemaco', 'Joue-les-Tours', 'Amal', 'Bara Belun', 'Hidalgotitlan', 'Algeciras', 'Carbonita', 'Kulrian', 'Kasaishi', 'Pasir Gudang', 'Yingchuan', 'Owo', 'Saanich', 'Altos', 'Dana Point', 'Morro da Fumaca', 'Copparo', 'Gerze', 'Werneck', 'Athis-Mons', 'Yanam', 'Palmilla', 'Port St. John', 'Kapay', 'Pottipuram', 'Vaniyambadi', 'Glenmont', 'Ocara', 'Huasuo', 'Raver', 'Manouba', 'Volklingen', 'Calcado', 'Kattiyeri', 'West Boldon', 'Asaita', 'Maradah', 'Nagasu', 'Mundelein', 'Boerne', 'Kuji', 'Voi', 'Hafnarfjordhur', 'Huatabampo', 'Juliaca', 'Parkland', 'Kedougou', 'Suqian', 'Caicedonia', 'Dhing', 'Azambuja', 'Sekimachi', 'Jasauli', 'Benxi', 'Ifanhim', 'Sparti', 'Reichelsheim', 'Beni Khalled', 'Salkhua', 'Rotenburg an der Fulda', 'Cheticheri', 'Dorking', 'Al Majma`ah', 'Ghajnsielem', 'Yomou', 'Monsummano', 'Richton Park', 'Capbreton', 'Curitibanos', 'East Brandywine', 'Karmana', 'As Salamiyah', 'Pallippurattuseri', 'Mareeba', 'Port Sudan', 'Jumilla', 'Schleswig', 'Cedar Lake', 'Richmond West', 'Puqi', 'Sbeitla', 'Vaddapalli', 'River Grove', 'Northbridge', 'Erkoklu', 'Miliana', 'Autun', 'Lohja', 'Ixtaczoquitlan', 'Tczew', "Shaqra'", 'Djugu', "Ben N'Choud", 'East Kilbride', 'Ciudad Melchor Muzquiz', 'Tezpur', 'Aldershot', 'Tankara', 'Honganur', 'Nikshahr', 'Jeffersonville', 'Portage', 'Waremme', 'Bacuag', 'Bhajani', 'Kutno', 'Kinston', 'Macabebe', 'Greater Napanee', 'Isla Vista', 'Santona', 'Rimavska Sobota', 'Sultanganj', 'Mittweida', 'Humayingcun', 'Buon Ma Thuot', 'Meaux', 'Beibu', 'La Vergne', 'Warri', 'Huaibaijie', 'Yasica Arriba', 'Zayda', 'East Whittier', 'Wasquehal', 'Elkton', 'Pasupatikovil', 'Leuna', 'Jamtara', 'Baildon', 'Lobo', 'Karor', 'Havsa', 'Puente de Piedra', 'Santana do Ipanema', 'Strombeek-Bever', 'Egg', 'Charter Oak', 'Romit', 'Sevenoaks', 'Toshima', 'Lashio', 'Tocina', 'Phu My', 'El Fula', 'Dimona', 'Leopoldshohe', 'Marungur', 'Goleniow', 'Ouangani', 'Dumalinao', 'Sulz am Neckar', 'Colchester', 'Tarin Kot', 'Bhairahawa', 'Jaguaretama', 'Shin', "Pallazzolo sull'Oglio", 'Rajnagar', 'Massi', 'Redlynch', 'Badin', 'Paredes de Coura', 'Malambo', 'Abjij', 'Bekasi', 'Shaomi', 'Salzano', 'Harlow', 'Flower Hill', 'Soasio', 'Oakton', 'Lukula', 'Garhakota', 'Monteforte Irpino', 'Shamsabad', 'Marsala', 'Lauri', 'Lala', 'Siniscola', 'Huixtla', 'Taxisco', 'Pesaro', 'Khapalu', 'Oyster Bay', 'Ivanhoe', 'Xai-Xai', 'Medulla', 'Naini', 'Santa Cruz del Quiche', 'Saint-Gratien', 'Sante Bennur', 'Al Brouj', 'Raje', 'Perladka', 'Ban Laem Chabang', 'Gora', 'Monitos', 'Panchanandapur', 'Pandacan', 'Surbiton', 'Sandy', 'Wittenheim', 'Gujranwala', 'Zaria', 'Hirayama', 'Neyveli', 'Guane', 'Oak Forest', 'Suzhou', 'Tulle', 'Prantij', 'Rocca di Papa', 'Tielt', 'Bleicherode', 'Ponte San Pietro', "'Ain el Turk", 'Bagou', 'Taragi', 'Ukal', 'Ruy Barbosa', 'Siroki Brijeg', 'Pabellon de Arteaga', 'Granite City', 'Ait Majdane', 'Hirehaluhosahalli', 'Pinabacdao', 'Kawa', 'Phaltan', 'Piombino Dese', 'Belmopan', 'Kodad', 'Bozova', 'Quibala', 'Nakur', 'Berkovitsa', 'Chiredzi', 'Sanmenxia', 'Bugdayli', 'Nersingen', 'Palma Campania', 'Paceco', 'Isesaki', 'Lopatcong', 'Pecan Grove', 'Thomaston', 'Sri Jayewardenepura Kotte', 'Quang Yen', 'Greeley', 'Omidiyeh', 'Namakkal', 'Macuro', 'Neuenkirchen', 'Goure', 'Kehen', 'Dongyangshi', 'Fiumefreddo di Sicilia', 'Alta Floresta', 'Ban Na Chom Thian', 'Viet Tri', 'Kashgar', 'Shawinigan', 'Danghara', 'Belgrade', 'Puerto Tejada', 'Tulin', 'Agblangandan', 'Poranga', 'Lagos', 'Mallig', 'Ciftlik', 'Sao Francisco de Paula', 'Kiyama', 'Crespo', 'Bad Salzungen', 'Doganhisar', 'Indargarh', 'Miandrivazo', 'Mondovi', 'Quthing', 'Guipos', 'Dikhil', 'Polakala', 'Ambaliha', 'Anuppampattu', 'Aquitania', 'La Rinconada', 'Paramaribo', "Ma'anshan", 'Uniao da Vitoria', 'Cuihua', 'Areka', 'Huldenberg', 'Gudlavalleru', 'Alasandigutta', 'Changtoushang', 'Segni', 'Durlesti', 'Chivhu', 'Carmen de Carupa', 'Prachin Buri', 'Buzhum', 'Phalodi', 'Munak', 'Rewari', 'La Gomera', 'Yuhuan', 'Woburn', 'Aghbal', 'Santa Maria a Vico', 'Bogo', 'Mendon', 'Kong', 'Benapol', 'Sakoueba', 'Uruana', 'Chengam', 'Freising', 'Nobsa', 'Ciechocinek', 'Chengjiao', 'Cerca Carvajal', 'Matawan', 'Al Madamud', 'Mettmann', 'Yakumo', 'Bir Mogrein', 'Puerto Acosta', 'Nunna', 'Gunjapalle', 'Collier', 'Mateszalka', 'Bento Goncalves', 'Umbrete', 'Kapakli', 'Padada', 'Oud-Turnhout', 'Karukh', 'Fort Meade', 'Domanic', 'Emsburen', 'Braunsbedra', 'Sacramento', 'Vidauban', 'Ranomafana', 'Chone', 'Malapannanagudi', 'Siralkoppa', 'Mequon', 'Rottingdean', 'Lindenwold', 'Chanal', 'Wakuya', 'Malaba', 'Chaabat el Leham', 'Huinan', 'Port-Vila', 'Silopi', 'Tanabi', 'Yelmalla', 'Siteki', 'Casiguran', 'Kennedale', 'Kenduadih', 'Intibuca', 'Fos-sur-Mer', 'Nueva Era', 'Jicheon', 'Rovigo', 'Acipayam', 'Bajala', 'Drama', 'Shiv', 'San Giorgio a Cremano', 'Antonio Dias', 'Jalajala', 'Ocsa', 'Oviedo', 'University City', 'Boquira', 'Gaura', 'Naaldwijk', 'Manteswar', 'Farmingville', 'Bokonbaev', 'Qingxicun', 'Scharbeutz', 'Hamrun', 'Lake Barcroft', 'Adeje', 'Saint Austell', 'Jarrow', 'Kandira', 'Lincang', 'Orsay', 'Affoltern am Albis', 'Bilozerka', 'Lishaocun', 'Ochanthururtha', 'Koekelare', 'Vengikkal', 'Dysselsdorp', 'New Scotland', 'Isfara', 'Cuers', 'Derry', 'Tampere', 'Debiapur', 'Yandian', 'Awan Patti', 'Kara-Kyshtak', 'Oguz', 'Qualiano', 'Jose Maria Ezeiza', 'Pfaffenhofen', 'Sidi Ali', 'Arayat', 'Carvajal', 'Palakollu', 'Jingzhou', 'Owerri', 'Bato', 'Yanyan', 'Dasmarinas', 'Srifa', 'Duzhuang', 'Qazi Ahmad', 'Brook Park', 'Market Drayton', 'Kitahiroshima', 'Swindon', 'Pilibhit', 'Hoyland', 'Georgsmarienhutte', 'Chigorodo', 'Abu Qir', 'Marysville', 'Singerei', "Town 'n' Country", 'Jangany', 'Atwater', 'Fuchucho', 'Rutana', 'Shamgarh', 'Cotegipe', 'Atascadero', 'Maisons-Alfort', 'Bela Vista de Minas', 'Kopervik', 'Madugula', 'Semra', 'Binh Hoa', 'Krnov', 'Itapaci', 'Sao Raimundo Nonato', 'Sint-Joost-ten-Node', 'Maski', 'Regen', 'Folkestone', 'Rardhu', 'Toyohashi', 'Duragan', 'Guelendeng', 'Bugongi', 'Montecchio Emilia', 'Mahwah', 'Dayrut', 'Papeete', 'Worsley', 'Mponela', 'Barakpur', 'Broken Hill', 'Sakiet ez Zit', 'Glen Burnie', 'Negapatam', 'Itahari', 'Kambainellur', 'Cienega de Flores', 'Jau', 'Xalatlaco', 'Dalaman', 'Thi Tran Dong Dang', 'Zacatlan', 'Changbang', 'Chikugo', 'Uden', 'Kansas City', 'Sremska Mitrovica', 'Pizhou', 'Fannuj', 'Slany', 'Soye', 'Painal', 'Station des Essais M.V.A.', 'Egg Harbor', 'Morsi', 'Araioses', 'Escondido', 'Castelnuovo Berardenga', 'Ijra', 'Ndora', 'Ain Taya', 'Essaouira', 'Kimba', 'Taher', 'Bouar', 'Kyrenia', 'Chilca', 'Quatis', 'Badulla', 'Kalpatta', 'Turffontein', 'Potunuru', 'Karuhatan', 'Amalfi', 'Wuppertal', 'Cayo Mambi', 'Riva del Garda', 'Boucau', 'Sejong', 'Elesbao Veloso', 'Jiaojiangcun', 'Pullanpallikonam', 'Borborema', 'Corella', 'Schkeuditz', 'Gurgenpinari', 'Buddayyakota', "Zd'ar nad Sazavou", 'Jiangmen', 'Tallahassee', 'Ciudad Lazaro Cardenas', 'Peabiru', 'Ghomrassen', 'Sultan Kudarat', 'Brunswick', 'Brookes Point', 'Jihlava', 'Kotharpettai', 'Koumia', 'Hasanpur', 'Riachao do Dantas', 'Tinqueux', 'Stillwater', 'Tempoal de Sanchez', 'Weiterstadt', 'Ichikikushikino', 'Atlapadu', 'Kiashahr', 'Bertioga', 'Centro Habana', 'Makassar', 'Rajakheri', 'Kuhbil', 'Baldwin Park', 'Chunchura', 'Shepshed', 'Yakima', 'Rayagada', 'Saltpond', 'Karamadai', 'Sainte-Suzanne', "St. John's", 'Peddannavaripalle', 'Ban Houayxay', 'Pir Maker', 'Bagewadi', 'Chemancheri', 'San Luis Rio Colorado', 'Fullerton', 'Khargram', 'Yacuiba', 'Rancho Grande', 'Giurgiu', 'San Blas Atempa', 'Koulamoutou', 'Jwaneng', 'Kamalapuram', 'Crna Trava', 'Brooklyn Park', 'Singapperumalkovil', 'Kiban', 'Budalur', 'Yeonil', 'Nsele', 'Cumra', 'Baie-Comeau', 'Avila', 'Chantilly', 'Tongyangdao', 'Izamal', 'Yalaguina', 'Obersiggenthal', 'Sadalgi', 'Cuprija', 'Laufenburg (Baden)', 'Bukan', 'Siyana', 'Kafue', 'Portmore', 'Perchtoldsdorf', 'Catemu', 'Kaimana', 'Demir Hisar', 'Tabor', 'Alakamisy', 'Bayt Lahya', 'Rosiori de Vede', 'Sarnia', 'Venustiano Carranza', 'Nunchia', 'Kankakee', 'Chittur', 'Duma', 'Bou Arada', 'Burton', 'Itatiaiucu', 'Mason', 'Araputanga', 'San Juan Tecuaco', 'Meghaul', 'Abhwar', 'Zhujiacun', 'Saline', 'Fethiye', 'Leeds', 'Ogori', 'Keffi', 'Karanja', 'Heiligenhaus', 'Haci Zeynalabdin', 'Gubin', 'Chenab Nagar', 'Gormi', 'Lydenburg', 'Neuhaus am Rennweg', 'Kings Grant', 'Umi', 'Dengka', 'Gangaur', 'Atsugicho', 'Ibshaway', 'Mananasy-Tsitakondaza', 'Itacarambi', 'Minowa', 'Corzuela', 'Vanduvancheri', 'Bohl-Iggelheim', 'Shinile', 'Wilde', 'Montlucon', 'Latham', 'Picana', 'Tuao', 'Drapetsona', 'Mountain Brook', 'Montechiarugolo', 'Kasba Tanora', 'Rajghat Garail', 'Kasumpti', 'Devirammanahalli', 'Singalandapuram', 'Chevy Chase', 'Aspropyrgos', "'Ain Taghrout", 'Delft', 'Beining', 'Blainville', 'Krasnohrad', 'Siem Reap', 'Betamcherla', 'Labin', 'Pompei', 'Gospic', 'Hakui', 'Gomaringen', 'Saint-Brevin-les-Pins', 'Langtang', 'Mangalapadi', 'Santa Lucia del Camino', 'East Whiteland', 'Morioka', 'Bouira', 'Coria del Rio', 'Varzea Paulista', 'Araira', 'Thana Bhawan', 'Ain Taoujdat', 'Mumbwa', 'Comandante Luis Piedra Buena', 'Powell River', 'Nampa', 'Nisang', 'Piran', 'Bude', 'Merosina', 'Kamez', 'Fairborn', 'Abhar', 'Giffoni Valle Piana', 'Jose Bonifacio', 'Andhana', 'Kisanuki', 'Landover', 'Dekanme', 'Raposos', 'Columbio', 'Flieden', 'Antalaha', 'Impruneta', 'Pyapali', 'Butuan', 'Hlevakha', 'Braslovce', 'Kalajoki', 'Minami-Boso', 'Tocumen', 'Matan', 'Zapopan', 'Calbayog City', 'Liloy', 'Durgi', 'Byadgi', 'Vero Beach South', 'Hongtuliang', 'Baharu', 'Hounslow', 'Joquicingo', 'Cranleigh', 'Cassilandia', 'Burton Latimer', 'Poco Fundo', 'Starogard Gdanski', 'Paikpara', 'Calera', 'Gyumri', 'Higashine', 'Hialeah Gardens', 'Moravce', 'Lac', 'Poruvakara', 'Cadaval', 'Pasig City', 'Montpellier', 'Nantang', 'Baruni', 'Imani', 'Rommerskirchen', 'Orizaba', "Saint Ann's Bay", "Olho d'Agua das Flores", 'Amvrosiivka', "M'dhilla", 'Abulug', 'Helmstedt', 'La Chapelle-Saint-Luc', 'Delitzsch', 'Gurmaila', 'Sainte-Anne-des-Plaines', 'Gardhabaer', 'Sodankyla', 'Chandrakona', 'Leczyca', 'Madhipura', 'Khargapur', 'Cruz do Espirito Santo', 'Gloucester City', 'Latisana', 'Doranda', 'Holly Springs', 'Mechtras', 'Grenoble', 'Red Bluff', 'Titu', 'Karavalur', 'Chiquimulilla', 'Kirkcaldy', 'Soledad de Doblado', 'Ben Nasseur', 'Miyato', 'Temecula', 'Araripe', 'Ambatoharanana', 'Union Park', 'Cuemba', 'Birkirkara', 'Ambohitsilaozana', 'La Louviere', 'Enniscorthy', 'Ishkashim', 'Touna', 'Presidente Olegario', 'Maranguape', 'Mati', 'Sainte-Catherine', 'Nerja', 'Freiburg im Breisgau', 'Cairo Montenotte', 'Uchchangidurgam', 'Jhansi', 'Elmali', 'Al Jabin', 'Orono', 'Wickliffe', 'Lontras', 'Mound', 'Cosio', 'Drummondville', 'Royal Tunbridge Wells', 'Dolneni', 'Tabogon', 'Sosnowiec', 'Badrashni', 'Papanduva', 'Kalayaan', 'Zottegem', 'General Salgado', 'Ozatlan', 'Southchase', 'Santa Adelia', 'Aribinda', 'Nakanojomachi', 'Coroata', 'Marovatolena', 'Saubara', 'Pingtiancun', 'Syosset', 'Dinslaken', 'Carpentras', 'San Martino di Lupari', 'Tabarka', 'Yulara', 'Ratzeburg', 'Punta Gorda', 'San Rafael del Yuma', 'Arima', 'Jannat Shahr', 'El Escorial', 'Honwada', 'Bhagwanpur Khurd', 'Ayutla de los Libres', 'Guaiba', 'Garzon', 'Tarangnan', 'Marktheidenfeld', 'Ban Chang Phuak', 'Aurad', 'Epazoyucan', 'Tone', 'Seforong', 'Lichtervelde', 'Seriate', 'Suileng', 'Viraganur', 'Novi Banovci', 'Knaresborough', 'Bulisa', 'Shaqlawah', 'Kingstowne', 'Zalec', 'Siano', 'Flexeiras', 'Ar Raqqah', 'Kercem', 'El Kef', 'Cicero Dantas', 'Schonefeld', 'Neves Paulista', 'Fish Hawk', 'Akjoujt', 'La Courneuve', 'Biliran', 'Telkap', 'Sher Muhammadpuram', 'Arapoti', 'Laupheim', 'Boden', 'Ratau', 'Rudesheim am Rhein', 'Xintai', 'Temerin', 'Rebordosa', 'Bernolakovo', 'Sulat', 'Ellwangen', 'Gates', 'Merano', 'Nacozari de Garcia', 'Camp Springs', 'Taraka', 'Yutiancun', 'Soanpeta', 'Navgilem', 'Nagakute', 'Ipetumodu', 'Minnetonka', 'The Acreage', 'American Canyon', 'Tripurantakam', 'Acara', 'Greer', 'Conception Bay South', 'Oiwa', 'Ukhai Purbari Patti', 'My Hoa', 'Miklavz na Dravskem Polju', 'Porbandar', 'As Samawah', 'Tamba', 'Namangan', 'Alluru', 'Zinzana', 'Karasburg', "Ich'on", 'Vojnik', 'San Miguel Siguila', 'Lome', 'Vieiro', 'Hardas Bigha', "Jalawla'", 'Belfast', 'Sotik Post', 'Hejiaji', 'Lovech', 'Rokupr', 'Yalda', 'Mangaldai', 'Goodyear', 'Kolakaluru', 'Bonney Lake', 'Alcobaca', 'Bideford', 'Godawari', 'Parnaiba', 'Eskilstuna', 'Conceicao do Almeida', 'San Carlos Park', 'Dolyna', 'East Liverpool', 'Ceylanpinar', 'Adachi', 'Pottassheri', 'Kauhajoki', 'Rio Paranaiba', 'Patos de Minas', 'Saint-Jean-de-Monts', 'Irapuato', 'Zhuolu', 'Miadanandriana', 'Inashiki', 'Hildesheim', 'Cold Springs', 'Merksplas', 'Engen', 'LaSalle', 'Uyo', 'Vinh Chau', 'Parsuram', 'Yoshimi', 'Jaypul', 'Minooka', 'Mehtar Lam', 'Corsico', 'Wanding', 'Corbas', 'Shaogang', 'Lusanga', 'Xining', 'Navalgund', 'Binh Minh', 'Dindigul', 'Doddanahalli', 'Tefenni', 'Samdrup Jongkhar', 'Taylorsville', 'Bulicun', 'Lisieux', 'Valley Falls', 'Salpazari', 'Pianoro', 'Kedia', 'Green Valley', 'Cape Breton', 'Rencun', 'Bandar-e Genaveh', 'Beaverton', 'Purranque', 'My Tho', 'Wendlingen am Neckar', 'Mohon', 'Ngaoundere', 'Boryslav', 'Antonio Prado', 'Ramgarh', 'Gdynia', 'Hoyland Nether', 'Cori', 'Adakli', 'Na Wa', 'Ramla', 'Ellicott', 'Nowa Sol', 'Laakirchen', 'Jalalpur Bhattian', 'Jaragua do Sul', 'Ilic', 'Guimar', 'Bullas', 'Dalawarpur', 'Anuradhapura', 'Carnaubal', 'Yokosuka', 'Lake Mohawk', 'Phulwar', 'Bohemia', 'Imzouren', 'Tibigan', 'Apostolove', 'Cantley', 'Mbuji-Mayi', 'Uarini', "Qarah Zia' od Din", 'Cortes', 'Cullercoats', 'Reggio di Calabria', 'Figline Valdarno', 'Mutsu', 'Maiquinique', 'New Panamao', 'Odder', 'Mar de Espanha', 'Bukedea', 'Rath To', 'Choppington', 'Irugur', 'Padang Besar', 'Cine', 'Beckett Ridge', 'Baghuz Fawqani', 'Campos del Puerto', 'Bandar-e Torkaman', 'Sakon Nakhon', 'San Juan de Alicante', 'Benicarlo', 'Beuningen', 'Vrable', 'Hennebont', 'Mamobihat', 'Three Springs', "Saint-Cyr-l'Ecole", 'Brod', 'Taourirt', 'Sliven', 'Stanytsia Luhanska', 'Ruston', 'Plauen', 'Sarasota Springs', 'Sao Gotardo', 'Beloeil', 'Liuguang', 'Bani Suhayla', 'Pilar', 'Sao Joao do Araguaia', 'Teseney', 'Domzale', 'Feira', 'Ambatoria', "Guang'an", 'Carice', 'Loha', 'Carmo do Rio Claro', 'Calle Larga', 'Ban Yang Hom', 'Ilha de Mocambique', 'Barwani', 'Lowes Island', 'Liaocheng', 'Shergarh', 'La Loggia', 'Halden', 'Khowrzuq', 'Sahavalanina-Antenina', 'Alum Rock', 'Kilakkarai', 'Finsterwalde', 'Ouedo', 'Tlalnepantla', 'Tafi Viejo', 'Minnampalli', 'Santa Comba', 'Sant Joan de Vilatorrada', 'Marl', 'Hulkoti', 'Libourne', 'Orasje', 'Svolvaer', 'Villa Curuguaty', 'Velden am Worthersee', 'Riverton', 'Selmane', 'Jamshedpur', 'Sinsina', 'Amity', 'Kani-Bonzon', 'Ilopango', 'Kiliia', 'Trelew', 'Kowdalli', 'Sao Martinho do Bispo', 'Bayabas', 'Marcolandia', 'Hemel Hempstead', 'Titlagarh', 'Thathupur', 'Kavieng', 'Pepa', 'Mangarwara', 'Wanderlandia', 'Sokolka', 'Nara', 'Koror', 'Mbamba Bay', 'Wezembeek-Oppem', 'Tanjombato', 'Eichstatt', 'Independence', 'Kagoshima', 'Velingara', 'Ouistreham', 'St. Stephens', 'Altinova', 'Juchipila', 'Jiaxing', 'Millcreek', 'Ameca', 'Palmacia', 'Fiano Romano', 'Costa Volpino', 'Rioja', 'Kalkara', 'Zanica', 'Husainabad', 'Naters', 'Mistrato', 'Ambahive', 'Savanna-la-Mar', 'Irinjalakuda', 'Padappakara', 'Chiantla', 'Cidade Gaucha', 'Walton-on-the-Naze', 'Habiganj', 'Irukanni', 'Tekpanja', 'Ellisville', 'Luancheng', 'Rudauli', 'Abu al Matamir', 'Nabas', 'Kagadi', 'Los Llanos', 'Ilovaisk', 'Ar Rumaythah', 'Melun', 'Catral', 'Canico', 'Pandharpur', 'Hiroshima', 'Don Bosco', 'Billericay', 'Mentana', 'Sandravinany', 'Pacos de Ferreira', 'Darsi', 'Tuineje', 'Medinipur', 'Mensora', 'Nikaia', 'Tan Chau', 'Ma`alot Tarshiha', "Huai'an", 'Spinea', 'Silvia', 'Alcarraz', 'Dongxishan', 'Putrajaya', 'Two Rivers', 'Inacio Martins', 'Kharahara', 'Jakarta', 'Hurth', 'Giengen an der Brenz', 'New Delhi', 'Morlenbach', 'Krus na Ligas', 'Tornquist', 'Hacine', 'Yanji', 'Fraiburgo', 'Gursu', 'San Giljan', 'Verin', 'Moissac', 'Baghambarpur', 'Bhatpara', 'Pontinha', 'Cunha Alta', 'Riacho das Almas', 'Yadwad', 'St. Albert', 'Agno', 'Sitarganj', 'Budhni', 'Comayagua', 'Matagua', 'Remagen', 'Aigle', 'Forest Hills', 'Wangtan', 'Avanhandava', 'Karnobat', 'Chanp', 'Amherst', 'Duiven', 'Bilohirsk', 'Pleternica', 'Marogong', 'Kefar Sava', 'Quilmes', 'Conchagua', 'Al Khankah', 'Dulken', 'Lobatse', 'Utehia', 'Sotomayor', 'Traunreut', 'Aleksandrovka', 'Hetane', 'Bad Oeynhausen', 'Pescara', 'Ashkezar', 'Seiro', 'Torrejon de Ardoz', 'Antanimora Atsinanana', 'Yiewsley', 'Fojnica', 'Iharana', 'Dutse', 'Ban Lao Yao', 'Dirusumarru', 'Tame', 'Beledweyne', 'San Andres Xecul', 'Formia', 'Mengla', 'Croix', 'Kraslava', 'Pedagadi', 'Denbigh', 'Barharwa Kalan', 'Alexania', 'Sakhnin', 'Yenagoa', 'Kaya', 'Karlstad', 'Bundibugyo', 'Harrai', 'Dragor', 'San Jose de Las Matas', 'Gabela', 'Alpu', 'Karema', 'Yevlax', 'Allanmyo', 'Ghafurov', 'Brandis', 'Pulupandan', 'Eidsvoll', 'Hinunangan', 'Manmad', 'Guledagudda', 'Bangshang', 'Kujwa', 'Fairview', 'Arani', 'Spoltore', 'Peiting', 'Panj', 'Sahavato', 'Cramlington', 'Poggio a Caiano', 'Sisauna', 'Mamushe', 'Tsevie', 'Barikot', 'Tecax', 'Sivapuram', 'Kalbacar', 'Mitry-Mory', 'Biguacu', 'Trowbridge', 'Hasanganj', 'Chamblee', 'Sid', 'Alboraya', 'Pinar del Rio', 'Calvillo', 'Mercedes', 'Chapel en le Frith', 'Kalat', "Ra's al Khaymah", 'Mapoteng', 'Ponta de Pedras', 'Glenfield', 'Temple Terrace', 'Tlokweng', 'Raun', 'Aquin', 'Boiling Springs', 'Slobozhanske', 'Truckee', 'Nuremberg', 'Yanghe', 'Charqueadas', 'Clarkston', 'Wallingford Center', 'Colleferro', 'Luxor', 'Gadsden', 'Dautphe', 'Macedo de Cavaleiros', 'Venissieux', 'Shamva', 'Chesapeake', 'Felsberg', 'East Los Angeles', 'Monte Caseros', 'Misawa', 'Souaflia', 'Bouguirat', 'Talara', 'Iarinarivo', 'Atalaia do Norte', 'Pangunattam', 'Little Ferry', 'Oulad Tayeb', 'Masar', 'Melena del Sur', 'Miura', 'Panihati', 'Andresy', 'Ramayampet', 'Basoko', 'Thala', 'Chifeng', 'Majhaul', 'Nediyanad', 'Chaumont', 'Belen de Escobar', 'Karak', 'Araxa', 'Canteleu', 'Vanrhynsdorp', 'Achaguas', 'Norwood', 'Kandahar', 'Santa Teresa', 'Salmon Arm', 'Kalutara', 'Greene', 'Harrisonburg', 'Lohur', 'Dingman', 'Phon Charoen', 'Farakka', 'College Place', 'Wodzislaw Slaski', 'Alimodian', 'Kemi', 'Khapdeh', 'Xaafuun', 'Pongalur', 'Chandwara', 'Tecali', 'Villacanas', 'Kalluru', 'Bouarouss', 'Misantla', 'Mazinde', 'Rixensart', 'Satellite Beach', 'Jinhua', 'Reedsburg', "Togoch'ale", 'Marchena', 'Wan Long', "Lyuboml'", 'Ivancice', 'Wendelstein', 'Helston', 'Manduria', 'Shama', 'Sedriano', 'Cheruvaranam', 'Nechi', 'Mandu', 'Binefar', 'Sha Kok Mei', 'Muta', 'Caripito', 'Petrolina de Goias', 'Lianhe', 'Brock', 'Schonaich', 'Aloguinsan', 'Acapulco de Juarez', 'Ayt Mohamed', 'Poplar Bluff', 'Limburgerhof', 'Jaggampeta', 'Ostroleka', 'Kalamboli', 'Ash Shihr', 'Sasaima', 'Vesele', 'Lac-Brome', 'Matnog', 'Skokie', "Ha'il", 'Tzaneen', 'Greenford', 'Savanur', 'Permet', 'Mallapur', 'Ladyzhyn', 'Dama', 'Angers', 'Montigny-le-Bretonneux', 'Xiaodian', 'Narok', 'Eching', 'Castel San Pietro Terme', 'Itondy', 'Alcudia', 'Periyanayakkanpalaiyam', 'Artigues-pres-Bordeaux', 'Marikina Heights', 'Ben Zakkay', 'Cachipay', 'Hertzogville', 'Daman', 'Wilsdruff', 'Ben Guerir', 'Sciacca', 'Signa', 'Tuy', 'Kireka', 'Reston', 'Sarab-e Taveh-ye `Olya', 'Singida', 'Waldorf', 'Ain Jemaa', 'Al Ghat', 'Deodrug', 'Kotmale', 'Guemar', 'Skoura', 'Tuaran', 'Outa Bouabane', 'Talaivasal', 'Jerusalem', 'Hegang', 'Daskasan', 'Hannan', 'Murowana Goslina', 'Bomlo', 'Nova Veneza', 'Coppell', 'Miguelopolis', 'Stadtallendorf', 'Kishundaspur', 'Yuxiaguan', 'Aleksandrovac', 'Mianwali', 'Mujikharf', 'Develi', 'Khawr Fakkan', 'Billapadu', 'Cacu', 'Oldenburg', 'Lens', 'Veternik', 'Canet de Mar', 'Vellaturu', 'Colmar', 'Guadalupe Nuevo', 'Hulin', 'Talwandi Bhai', 'Sighisoara', 'Santa Monica', 'Alessandria', 'Idku', 'Barsaun', 'Podebrady', 'Dona Ines', 'Eppelheim', 'Lonavale', 'Comitan', 'Darby', 'Billingham', 'Saint-Maximin-la-Sainte-Baume', 'Volterra', 'Kombissiri', 'San Lorenzo de Guayubin', 'Mashyal', 'Cogua', 'Parral', 'Coaticook', 'Bekobod', 'Ribadeo', 'Bhanukumari', 'Kampong Chhnang', 'Vrbovec', 'South Sioux City', 'Kalaki', 'Panama City', 'Mustafabad', 'Morur', 'Zurbatiyah', 'Gamba', 'Toukoroba', 'Bully-les-Mines', 'Nattakkadaiyur', 'Sand Springs', 'Turek', 'Nejapa', 'Woodridge', 'Laranjeiras', 'Xiaozui', 'Kottaiyur', 'Sebaste', 'Penn Forest', 'La Esmeralda', 'Nohsa', 'Eskisehir', 'Chillan Viejo', 'Kani', 'Terdal', 'Sarta', 'Jinan', 'Oborniki', 'Taketoyo', 'Badnor', 'Panchkula', 'Tres Passos', 'Cartaya', 'Barra de Santa Rosa', 'Ganapavaram', 'Mulampilli', 'Damaishan', 'Meihuacun', 'Pahsara', 'Atessa', 'Toucheng', 'Chamba', 'Kranuan', 'Pasaul', 'Westview', 'Grimari', 'San Jacinto de Buena Fe', 'Porecatu', 'Ihtiman', 'Misilmeri', 'Friesenheim', 'Wentang', 'Bageshwar', 'Mahtha', 'Tenes', 'Eraclea', 'Evesham', 'Elmhurst', 'Nahorkatiya', 'Oil City', 'Sidi Jaber', 'Laxou', 'Royal Kunia', 'Dong Xoai', 'Hathauri', 'Ambinanintromby', 'Jurovski Dol', 'Erwitte', 'Jerada', 'Costas de Cao', 'Douetire', 'Terku Valliyur', 'Samesi', 'Halgeri', 'Huichapan', 'Bolpur', 'Ban Bu Sung', 'Dandong', 'Bucaramanga', 'Bhavnagar', 'Walia', 'Puerto Vallarta', 'Rishikesh', 'Solebury', 'Maratturai', 'Lindlar', 'Bariarpur Kandh', 'Yerbas Buenas', 'Ottur', 'Torpa', 'Debre Zeyit', 'Lemgo', 'Piritiba', 'San Lorenzo de El Escorial', 'Paterson', 'Gonzaga', 'Calabasas', 'Miahuatlan', 'Castelo do Piaui', 'Sprockhovel', 'Adelaide', 'Clay Cross', "Do'stlik Shahri", 'Carrieres-sous-Poissy', 'Hizan', 'Mios', 'Edenburg', 'Worplesdon', 'Le Kremlin-Bicetre', 'Nanjundapuram', 'Muratli', 'Massena', 'Girona', 'Arifwala', 'Chillum', 'Salcedo', 'Caoayan', 'Kolonodale', 'Curacautin', 'Ixchiguan', 'Dubnica nad Vahom', 'Careiro da Varzea', 'Meckenbeuren', 'Lacombe', 'Chifubu', 'Kostinbrod', 'Lipova', 'Sao Joao Evangelista', 'Messini', 'Santangpai', 'Viransehir', 'Ait Ali', 'Harsova', 'Mamou', 'Denkendorf', 'Kalardasht', 'Santo Antonio do Leverger', 'Qusar', 'San Tan Valley', 'Kushima', 'West Norriton', 'Brasilia de Minas', 'Hassleholm', 'Shahdol', 'Faqus', 'Abbots Langley', 'Zanandore', 'Canon City', 'Manjeri', 'New Milton', 'Regensdorf', 'Flower Mound', 'Dharphari', 'Chapra', 'Tazert', 'Hirehadagalli', 'Walkden', 'Hatti', 'Ohangaron', 'Bujanovac', 'Dharmapuram', 'Munchenstein', 'Ishigaki', 'Cocody', 'Karimpur', 'Appukkudal', 'New Kensington', 'Palmas de Monte Alto', 'Baghduma', 'Sorisole', 'Qarazhal', 'Shemonaikha', 'Korfez', 'Mbale', 'Shahar Telpa', 'Gangwuzhen', 'Qianzhou', 'Potosi', 'Monte Chingolo', 'Baba Bakala', 'Nottuln', 'Talakad', 'Dearborn Heights', 'Mulakumud', '`Ali Shahr', 'Nashtifan', 'Buriti', 'Veliko Tarnovo', 'Forest Lake', 'Mizuho', 'Birni Lafia', 'Honda', 'Fortin de las Flores', 'Corral de Bustos', 'Dracut', 'Dighwa', 'La Riche', 'Karsaut', 'Mito', 'Xunjiansi', 'Gandia', 'Sidi Makhlouf', 'Xibeijie', 'Peritoro', 'Sanjiangkou', 'Bounaamane', 'Barja', 'San Felipe del Progreso', 'Daisen', 'Avadi', 'North Bay Shore', 'Dagohoy', 'Fuying', 'Dabas', 'Koipadi', 'Bloomington', 'Mecheria', 'Sundargarh', 'Rajod', 'Serramazzoni', 'Kokofata', 'Urlati', 'Temixco', 'Barbadanes', 'Peddapalli', 'Le Mont-sur-Lausanne', 'Berthoud', 'Navrongo', 'Brockworth', 'West Goshen', 'Arcos de Valdevez', 'Lalgola', 'Bakhmut', 'Majibacoa', 'Yangfang', 'Malisheve', 'Cloverdale', 'Petah Tiqwa', 'Yamanashi', 'Riofrio', 'Merta', 'Yoro', 'Trinidad', 'Sobreda', 'Sidi Allal el Bahraoui', 'Naranammalpuram', 'Baicoi', 'Rolling Meadows', 'Linjiang', 'Aralam', "Citta Sant'Angelo", 'Kaous', 'Zacualpa', 'Rivas', 'Vellipalaiyam', 'Ciudad Santa Catarina', 'Chaungtha', 'Los Lunas', 'Crna na Koroskem', 'San Miguel Ixtahuacan', 'Pantukan', 'Tsushima', 'Donggangli', 'Nalbach', 'Begles', 'Addlestone', 'Maluso', 'Atchison', 'Vertentes', 'Perth', 'Deori Khas', 'Cho Phuoc Hai', 'Pacifica', 'Belhatti', 'Audincourt', 'Baraidih', 'Tsiningia', 'Ampary', 'Drobeta-Turnu Severin', 'Tsumeb', 'Atlantic City', 'Villaguay', 'Gyangze', 'Wokingham', 'Cowdenbeath', 'Tehuacan', 'Babanusah', 'Kolobrzeg', 'Iguala de la Independencia', 'Shanklin', 'Nasriganj', 'Haql', 'Bonab', 'Ratnanagar', 'Ermoupoli', 'Besalampy', 'Mannar', 'Vigo', 'Wolverhampton', 'Idfu', 'Zacapa', 'Malacacheta', 'Jersey City', 'Tanmpegre', 'Gemlik', 'Bilovodsk', 'Putyvl', 'Memmelsdorf', 'Ar Rustaq', 'Munster', 'Dano', 'Barhan', 'San Giorgio Ionico', 'Shevington', 'Larnaca', 'Mawu', 'Rio Verde de Mato Grosso', 'Arroyos y Esteros', 'Thoubal', 'Breisach am Rhein', 'Aizawl', 'Bamessing', 'Nurmo', 'Lewistown', 'Marseille', 'Hyde Park', 'Antsahalava', 'Gardelegen', 'Ramena', 'Centerville', 'Berdiansk', 'Contla', 'Lunavada', 'Schwetzingen', 'Vilangurichchi', 'Lijiaxiang', 'North Tustin', 'Sycow', 'San Severo', 'Tugaya', 'Luziania', 'Bay', 'Sete', 'Torbat-e Heydariyeh', 'Lerum', 'Las Tunas', 'Chalala', 'Surat', 'Tobias Barreto', 'Svilengrad', 'Viswanathaperi', 'Duartina', 'Basantpur', 'Chikodi', 'Joplin', 'Hilongos', 'Whitefish Bay', 'Sisian', 'Grimbergen', 'Soham', 'Bage', 'Senlis', 'Matungao', 'Roche-a-Bateau', 'Belfort', 'Asarcik', 'Alotenango', 'Santa Catarina Ayotzingo', 'Villa Tunari', 'Dangcheng', 'Sievierodonetsk', 'Vigan', 'Anahuac', 'Iesolo', 'Chandili', 'Aleksandrow Lodzki', 'Palkur', 'Parwaha', 'Machali', 'Corleone', 'Heemstede', 'Vallehermoso', 'Ermezinde', 'Sedalia', 'Guanxi', 'Valea lui Mihai', 'Montescaglioso', 'Heerlerbaan', 'Seika', 'Tongxiao', 'Ronneby', 'Badou', 'Andilana Avaratra', 'Hope Mills', 'Taiynsha', 'Borgampad', 'Gonder', 'Weigelstown', 'Boukhralfa', 'Kurshab', 'Bad Liebenwerda', 'Hodal', 'Franco da Rocha', 'Gangtok', 'Kamianka-Dniprovska', 'Acilia', 'Paks', 'Dastgerd', 'Montbeliard', 'Molteno', 'Lakshmipuram', 'El Tabo', 'North Massapequa', 'San Juan Cotzal', 'Buftea', 'Freystadt', 'Chiltiupan', 'Jabera', 'Ikizce', 'Khurmi', '`Ajlun', 'Kalpitiya', 'Idlib', 'Yuza', 'Gaohucun', 'Lavis', 'Olesnica', 'Parnera', 'Windorah', 'Munnarkod', 'Tuckahoe', 'Schotten', 'Blansko', 'Carregal do Sal', 'Okemos', 'Central Coast', 'Anjehalli', 'West Hollywood', 'Chanthaburi', 'Barauna', 'Chettinayakkanpatti', 'Aomori', 'Sukurhutu', 'Tiruvalam', 'Lisala', 'Al Bajur', 'Santa Filomena', 'Padre Paraiso', 'Boura', 'Teocuitatlan de Corona', 'Inowroclaw', 'Nova Milanese', 'Yalta', 'Lukulu', 'Ambila', 'Oulad Hassoune', 'Folignano', 'Salaman', 'Belem de Sao Francisco', 'Khvaf', 'Rio Bravo', 'Kotri', 'Bom Jesus', 'Ambodibonara', 'Balvi', 'Jiaozishan', 'Cubuk', 'Cagnes-sur-Mer', 'Harrow Weald', 'Nantingcun', 'Choctaw', 'Adalaj', 'Hoi An', 'Chintamani', 'Ajka', 'Havelock', 'Votuporanga', 'Uta', 'Incirliova', 'Ankazondandy', 'Ash Shunah ash Shamaliyah', 'Skegness', 'Kedu', 'Petite-Synthe', 'Mungeli', 'Anjahabe', 'Maceira', 'Sakurai', 'Talya', 'Ochtrup', 'Polur', 'Wake Forest', 'Bushenyi', 'Yihezhuang', 'Vega Baja', 'East Goshen', 'Serra Talhada', 'Ribeira Grande', 'Branquinha', 'Chimakurti', 'Smyrna', 'Lamesa', 'Tabuelan', 'Tuxpam de Rodriguez Cano', 'Satyamangalam', 'El Sauce', 'Nasrullahganj', 'Acerra', 'Horlivka', 'Beloit', 'Adigaratti', 'Puraini', 'Gueltat Sidi Saad', 'Egra', 'Comendador', 'Verrieres-le-Buisson', 'Novoberde', 'Onchan', 'Puri', 'Oudtshoorn', 'Ab Pakhsh', 'Melouza', 'Schoonhoven', 'Quilicura', 'Arlon', 'Steinkjer', 'Port Macquarie', 'Inca', 'Erkelenz', 'Lebrija', 'Mandla', 'Brus Laguna', 'Deniliquin', 'Schramberg', 'Mokhotlong', 'Walbrzych', 'Azarshahr', 'Highland Springs', 'Ambodilazana', 'Timurni', 'Corbera de Llobregat', 'Sanand', 'Lugu', 'Plovdiv', 'Nokia', 'Emden', 'Biganos', 'Sao Joao do Triunfo', 'Carrollton', 'Qadian', 'Lowenberg', 'Hongshandian', 'Sultanpur Mor', 'Uusikaupunki', 'Manosque', 'Itapevi', 'Saudade', 'Russi', 'Brecksville', 'Abaran', 'Abu Suwayr', 'Laranjeiras do Sul', 'Krasnyi Luch', 'Chekkal', 'Grimma', 'Aksum', 'Arari', 'Bromborough', 'Yufle', 'Indio', 'Xonobod', 'Trairi', 'The Pinery', 'Wehrheim', 'Martuni', 'Burbank', 'Muritiba', 'Nieuw-Vennep', 'Cannock', 'Neuhausen am Rheinfall', 'Rostamabad', 'Broome', 'Voru', 'Asilah', 'Fazendinha', 'West Orange', 'Sirigeri', 'Gurmatkal', 'Simplicio Mendes', 'Ede', 'Rolesville', 'Ashtian', 'Saint-Genis-Laval', 'Morada Nova', 'Rohri', 'Easton', 'Tonacatepeque', 'Marmande', 'Neratovice', 'Mildenhall', 'Telaprolu', 'North Perth', 'Vengat', 'Shirako', 'Khanewal', 'Malitbog', 'Uherske Hradiste', 'Sinsheim', 'Baguanos', 'Guigang', 'Kumarankari', 'Minudasht', 'Bilauri', 'Sihushan', 'El Dovio', "Sant'Anastasia", 'Puertollano', 'Rotorua', 'Douar Azla', 'Tostedt', 'Silkeborg', 'Velakalnattam', 'Malvik', 'Redentora', 'Kimhae', 'Cankuzo', 'Turuvanur', 'Siliancun', 'Davie', 'Ouled Rahou', 'Pederneiras', 'Aviano', 'Kwale', 'Knik-Fairview', 'Baiyashi', 'Pendekallu', 'Sarai Ranjan', 'Appleton', 'San Pedro', 'Bautista', 'Maurilandia', 'San Giovanni in Persiceto', 'East Bethel', 'Slovenska Bistrica', 'Caerdydd', 'Satana', 'Fakola', 'Waukee', 'Lindsay', 'Bailleul', 'Ventimiglia', 'Zaltbommel', 'Arua', 'Mansingha', 'Amauna', 'Muridke', 'Burgthann', 'Kayapinar', 'Kawachicho', 'Jiuru', 'Hartland', 'Oberwingert', 'Shiddapur', 'Sherbrooke', 'Divaca', 'Tayum', 'Trecate', 'El Menzel', 'Rawson', 'Imaculada', 'Mindat', "L'Epiphanie", 'Ambarimaninga', 'Oststeinbek', 'Kuala Terengganu', 'Sankt Polten', 'Pageralam', 'Carmen de Viboral', 'Kety', 'Inhambupe', 'Nonantola', 'South Lake Tahoe', 'Hiji', 'Erbaocun', 'Millis', 'Chervonohrad', 'Williamsport', 'Son La', 'Leon de los Aldama', 'Kraulshavn', 'Figuil', 'Ritterhude', 'Sahel', 'Karaga', 'Rosarno', 'Chudamani', 'Stonehaven', 'Jataizinho', 'Surdulica', 'Veghel', 'Oki', 'Hoxter', 'Memari', 'Holubivske', 'Texcoco', 'Aja', 'Bayyavaram', 'Pueblo', 'Fengcheng', 'Mossingen', 'Arinos', 'Bedesa', 'Dolenjske Toplice', 'Shadegan', 'Pontecorvo', 'Camano', 'Fairburn', 'Villefranche-sur-Saone', 'Maiduguri', 'Roxas City', 'Biddeford', 'Sokone', 'Aravelli', 'Palikir', 'Herford', 'Otsego', 'Kamiita', 'Brambleton', 'Hongsi', 'Bamafele', 'Al Qurayn', 'Naousa', 'Urdorf', 'Esira', 'Cachoeira do Sul', 'Botevgrad', 'Jamjamal', 'Sowerby Bridge', 'Remada', 'Vistahermosa', 'Los Lagos', 'Abengourou', 'Jean-Mermoz', 'Parker', 'Hisor', 'Oulunsalo', 'Aguilar', 'Yuscaran', 'Sedro-Woolley', 'Lankaran', 'Zaniena', 'Lucerne', 'Tortum', 'Lynbrook', 'Rasiari', 'Trappes', 'Kirkel', 'Balakrishnanpatti', 'Getafe', 'Bad Vilbel', 'Mankayan', 'Trindade', 'La Ceja', 'Tacuba', 'Tagbina', 'Severna Park', 'Canosa di Puglia', 'Gasan', 'Daiwanishi', 'Polohy', 'Qaraghandy', 'Ziri', 'San Antonio de Padua', 'Rarott', 'Itami', 'Carapo', 'La Cruz de Rio Grande', 'Ruskin', 'Camooweal', 'Barika', 'Tohoku', 'Pante Macassar', 'Vocklabruck', 'Erutukada', 'Koili Simra', 'Edea', 'Targoviste', 'Antanankambano', 'Bangzha', 'Bankass', 'Podstrana', 'Kakuma', 'Quezaltepeque', 'Schaan', 'Daxincun', 'Trikodi', 'Yawatahama-shi', 'Pefki', 'The Hague', 'Tamu', 'Guli', 'Shing', 'Bakersfield', 'Lanxi', 'Singaparna', 'Aljustrel', 'Chirchiq', 'Rio Acima', 'Halstenbek', 'Pandhurna', 'Boa Nova', 'Cerkvenjak', 'Saint-Jean-le-Blanc', 'Largo', 'Basse-Terre', 'Jimalalud', 'Bhanas Hivre', "Sant'Ilario d'Enza", 'Santa Lucia Utatlan', 'Parasurampur', 'New Providence', 'Sofiivka', 'Ban Muang Kham', 'Pingdu', 'Kyle', 'Nellutla', 'Hualqui', 'New City', 'Tangancicuaro de Arista', 'Brawley', 'Rucheng', 'Bendorf', 'Steinen', 'Welby', 'Valentigney', 'Jacinto City', 'Mweka', 'Baler', 'Sasso Marconi', 'Fanzeres', 'Sunrise', 'Ovidiopol', 'Canarana I', 'Jaitwar', 'Settat', 'Villa Paranacito', 'Lerdo de Tejada', 'Westerstede', 'Rangapukur', 'Vinukonda', 'Mastaga', 'Terrasini Favarotta', 'El Fuerte', 'Heydarabad', 'Rapperswil-Jona', 'Preston', 'Mahasamund', 'Bossier City', 'Kotamobagu', 'Sowme`eh Sara', 'Charala', 'Gayeri', 'Calape', 'Villa Rica', 'Panying', 'Khandauli', 'Oristano', 'Ar Ramtha', 'Ben Gardane', 'Donnacona', 'Bagamoyo', 'Caras', 'Chikha', 'Kalikavu', 'Lewiston', 'Maseru', 'Huanchaco', 'Higuerote', 'Yangshe', 'Salinas Victoria', 'Marofoty', 'Yamen', 'Hyderabad City', 'Gusang', 'Mexicali', 'Yedtare', 'Salgado de Sao Felix', 'Gaffney', 'Torcy', 'Hodos', 'Sangerhausen', 'Tsuruta', 'Bladensburg', "Sach'on", 'Sabanozu', 'Agustin Codazzi', 'Kings Park West', 'Periyamuttur', 'Varnsdorf', 'Akbez', 'The Woodlands', 'Puente Nacional', 'Golcuk', 'Sandbach', 'Tullamore', 'Chimalhuacan', 'Kula', 'Oliveira de Frades', 'Yuasa', 'Jieyang', 'Conceicao de Jacuipe', 'Pezinok', 'Malgrat de Mar', 'Ceske Budejovice', 'Qapshaghay', 'Membakut', 'General Acha', 'Studenka', 'Cleveland', 'Mehnajpur', 'Darasuram', 'Rubengera', 'Kusadasi', 'Buddh Gaya', "Clermont-l'Herault", 'Bifeng', 'Nakama', 'Kuiju', 'Scheessel', 'Kajo Kaji', 'Medina Estates', 'Pointe-Claire', 'Savannakhet', 'Yarumal', 'Saint-Egreve', 'Sokoto', 'Timonium', 'Tire', 'Ota', 'Otwock', 'Pretoria-Noord', 'Iziaslav', 'Bell Gardens', 'Ecija', 'Khunays', 'Kottaipatti', 'Garin', 'Tuktukan', 'Ibara', 'Tyre', 'Casteldaccia', 'Gulgam', 'Jaffna', 'Gariadhar', 'Zoetermeer', 'Kasulu', 'Santo Antonio dos Lopes', 'Pannawonica', 'Cajuru', 'Belchertown', 'Sonseca', 'Non Sung', 'Kardla', 'Furstenfeldbruck', 'Ban Tat', 'Bydgoszcz', 'Semere', 'Fortuna', 'Corralillo', 'Coin', 'Siegburg', 'Comacchio', 'Bellavista', 'Mission Bend', 'Avezzano', 'Rancho Mission Viejo', 'Mechraa Bel Ksiri', 'Shirebrook', 'Kizhariyur', 'Bueng Kan', 'Matigou', 'Misratah', 'Deventer', 'Luau', 'Kiratpur Rajaram', 'Huanuni', 'Kannamanayakkanur', 'Kluczbork', 'North Battleford', 'Lukaya', 'Bo`ka', 'Baicheng', 'Channahon', 'Terrace Heights', 'Uniao', 'Kunigal', "Porto Sant'Elpidio", 'Marin', 'Bimun', 'Dilijan', 'Satkhira', 'Erie', 'Hemsworth', 'Gueppi', 'Urubici', 'Daraw', 'Kastoria', 'Yutz', 'Lichtenstein', 'Unquillo', 'Homewood', 'Accrington', 'Kalyanpur', 'Diego de Almagro', 'Beeville', 'Koila', 'Hathiakan', 'Coka', 'Bouake', 'Veyrier', 'Akhaltsikhe', 'Tinaquillo', 'Moncada', 'Gladbeck', 'Telpaneca', 'Central Elgin', 'Vari', 'Plonsk', 'Chateau-Thierry', 'Schkopau', 'Nawalpur', 'Jagatpur', 'La Ceiba', 'Omuthiya', 'Soalandy', 'Kimberley', 'Kappiyara', 'Hyattsville', 'Shichigahama', 'Koronowo', 'Modra', 'Manolo Fortich', 'Neuilly-sur-Marne', 'San Juanito', 'Sedhiou', 'Pailon', 'Vechta', 'Schortens', "Qia'erbagecun", 'Coalinga', 'Plankstadt', 'Cachoeira Alta', 'Grand Prairie', 'Ulsan', 'Bouguenais', 'Campina da Lagoa', 'Sorong', 'Churumuco de Morelos', 'Byron Bay', 'Sarny', 'Bilbays', 'Patiali', 'Milagro', 'Kothri Kalan', 'Idaho Falls', 'Bombo', 'Muttunayakkanpatti', 'Nidamanuru', 'Countryside', 'Buu Long', 'Capitan Bado', 'Surpur', 'Reggane', 'Campi Bisenzio', 'Anantasagaram', 'Salto Grande', 'Baduria', 'Echague', 'Hollywood', 'Rahat', 'Lower Paxton', 'Trebbin', 'Wardha', 'Beek en Donk', 'Vilhena', 'Bergeijk', 'Chhapia', 'Ganaram', 'Chhanera', 'Kapangan', 'Roncade', 'Walcourt', 'Waltenhofen', 'Bahar', 'Oiba', 'Willoughby', 'Babol', 'Alashankou', 'Groton', 'Ban Talat Rangsit', 'Neijiang', 'Johvi', 'Ulao', 'Nagold', 'Lacey', 'Marigot', 'Zabljak', 'Atushi', 'Pileru', 'Borodyanka', 'Guaymate', 'Alakamisy Anativato', 'Montecchio Maggiore', 'Umea', 'Mekambo', 'Jericho', 'Urrao', 'Conceicao do Mato Dentro', 'Cutro', 'Narayankher', 'Ocoyoacac', 'Las Margaritas', 'North Cornwall', 'Katiena', 'Liangwancun', 'Tarawan', 'Tianjin', 'Eltham', 'Ampitasimo', 'Patzcuaro', 'Boortmeerbeek', 'Zary', 'New Tecumseth', 'Izki', 'Faradonbeh', 'Ezine', 'Mushie', 'Kuvango', 'La Quiaca', 'Tamgrout', 'Buenos Aires', 'Khowrhesht', 'Muskegon Heights', 'Wangsicun', 'Kimwanyi', 'Narra', 'Belton', 'Khlong Luang', 'Barhadashi', 'Ixtlahuacan de los Membrillos', 'Lauterbach', 'Mohda', 'Chepstow', 'Blegny', 'Reykjavik', 'Jangalapalle', 'Cumayeri', 'Tarashcha', 'Cervera', 'Pouso Redondo', 'Bunde', 'Bariyarpur', 'Zweibrucken', 'Alzenau in Unterfranken', 'Santo Tomas', 'Taxtako`pir', 'Colinas do Tocantins', 'Vryburg', 'Hooksett', 'Rahui', 'Daejeon', 'Chinaval', 'Pornichet', 'Hazelwood', 'Sherkot', 'Shuishang', 'Palmitos', 'Erin', 'Lagoa do Itaenga', 'Uthal', 'Whippany', 'Zamboanguita', 'Kasugai', 'Kuurne', 'Embarcacion', 'Alsfeld', 'Saint-Avertin', 'Lokwabe', 'Bronte', 'Gemerek', 'Diamond Bar', 'Germantown', 'Tengampudur', 'Myanaung', 'Amritpur', 'Manhattan Beach', 'Spring Valley Lake', 'Niangoloko', 'Crevalcore', 'Bamble', 'Hollabrunn', 'Furtwangen im Schwarzwald', 'Ariquemes', 'Baruari', 'Paura Madan Singh', 'Bracknell', 'Le Bouscat', 'Pirapozinho', 'Puyappalli', 'Zhengjiatun', 'Garibaldi', 'Aydin', 'Sestri Levante', 'Panjampatti', 'Chengbin', 'Ambohitrambo', 'Olympia Heights', 'Fuxin', 'Jupiter Farms', 'Birjand', 'Sugauna', 'Villa La Angostura', 'Abu Hummus', 'Igreja Nova', 'Sanghera', 'Tremembe', 'Bhopalia', 'Vila Velha', 'Papampeta', 'Dhanga', 'Sheyban', 'Jaltenango', 'Tagapul-an', 'Kulusuk', 'Montesson', 'Chascomus', 'Guernica y Luno', 'Itamarati', 'Metepec', 'Wadersloh', 'Costa Marques', 'Barod', 'Bhaisalotan', 'Velez-Malaga', 'Raunds', 'Hushihacun', 'Sindgi', 'Quivican', 'Ivoti', 'Maroteza', 'Bazar-Korgon', 'Baduriatola', 'Tysons', 'Saidapet', 'Busto Arsizio', 'Cabanatuan City', 'Sami', 'Logansport', 'Besozzo', 'Sonaguera', 'Acevedo', 'Harbiye', 'Prairie Ridge', 'Kasaragod', "Hosa'ina", 'Borgo San Dalmazzo', 'Cockeysville', 'Kabirpur', 'Mikuszowice', 'Kamiichi', 'Arsali', 'Enugu', 'Sahanivotry-Manandona', 'Savar', 'Al Khubar', 'Haddon', 'Allahdurg', 'Rodynske', 'Puduru', 'Jiquirica', 'Kirishima', 'Skive', 'Aj Jourf', 'Uedem', 'Cananea', 'Stryzhavka', 'Bankapur', '`Afrin', 'Saint-Colomban', 'Imperia', 'Valsequillo de Gran Canaria', 'Jette', 'Hamm', 'Piranga', 'Rixheim', 'Currimao', 'Opuwo', 'Herohalli', 'Larreynaga', 'Gravina in Puglia', 'Benton Harbor', 'Afragola', 'Castelldefels', 'Lubao', 'Brahmanbaria', 'Barkly West', 'San Rafael del Norte', 'Abu Dhabi', 'Mamburao', 'Chartres', 'Hawick', 'Shiruru', 'Butajira', 'Newcastle', 'Igarape-Miri', 'San Mariano', 'Statesboro', 'Cachoeira do Arari', 'Goumori', 'Kassaro', 'Karaisali', 'Yoshida', 'Stekene', 'Walajabad', 'Alcorcon', 'Seneca', 'Cabagan', 'Mansfield', 'Valsad', 'Monaragala', 'Barro', 'Assebroek', 'Al Mubarraz', "L'Isle-d'Abeau", 'Cajueiro', 'Wanstead', 'Iten', 'Salamina', 'Jacinto Machado', 'Kuyganyor', 'Ar Rusayfah', 'Morgantown', 'Balta', 'Anse-a-Foleur', 'Crnomelj', 'Dagbe', 'Nubl', 'Isumi', 'Arumbavur', 'Lohuti', 'Backa Palanka', 'Marktoberdorf', 'Massango', 'Gamay', 'Asbury Park', 'Piracicaba', 'Tadas', 'Malanvadi', 'Ranko', 'Falam', 'Kerava', 'Brevik', 'Fort Madison', 'San Pablo Villa de Mitla', 'La Resolana', 'Irun', 'Santa Juana', 'Senirkent', 'Colbun', 'Chasiv Yar', 'Bailleston', 'Mairwa', 'Ba Don', 'Caetanopolis', 'Popovo', 'Bandipura', 'Brewer', 'Raesfeld', 'Chityal', 'Bois-Guillaume', 'Miahuatlan de Porfirio Diaz', 'Eupen', 'Riemst', 'Reims', 'Menen', 'Wundanyi', 'Guarenas', 'Biankouma', 'Vila Bela da Santissima Trindade', 'Pallappalaiyam', 'Farmington', 'Bradenton', 'Singoli', 'Cainta', 'Farahalana', 'Simoes Filho', 'San Zenon', 'Planadas', 'Catole do Rocha', 'Solrod Strand', 'Rovinj', 'Dracena', 'Jablonec nad Nisou', 'Chhagalnaiya', 'Ampataka', 'Lohagara', 'Zarautz', 'Tiquisio', 'Epinay-sous-Senart', 'Soka', 'Gandarbal', 'Los Muermos', 'Akyaka', 'Az Zaqaziq', 'Ibaretama', 'San Jose Guayabal', 'Phulparas', 'Unity', 'Anderson', 'Post Falls', 'Bandio', 'Quiindy', 'Mazoe', 'Gokarna', 'Weston-super-Mare', 'Donihue', 'Maliano', 'Sahibganj', 'Stropkov', 'Williston', 'Khutauna', 'Mishrikot', 'Rice Lake', 'Boa Viagem', 'Ashkhaneh', 'Wabag', 'Ban Mangkon', 'Bognor Regis', 'Alabel', 'Hoor', 'Perote', 'Sotkamo', 'Mukondapalli', 'Kenzingen', 'El Alia', 'Barssel', 'Libon', 'Pljevlja', 'Karumulaikkal', 'Atlatlahucan', 'Puteaux', 'Ramnicu Sarat', 'Karariya', 'Floro', 'Richard-Toll', 'Semuto', 'Jilotlan de los Dolores', 'Nellikkuppam', 'Malsch', 'Khowrasgan', 'Ambodimahabibo', 'Safidon', 'Canoinhas', 'Ban Na Yang', 'Kuttur', 'Makubetsu', 'Tsavo', 'Guape', 'Mapleton', 'Sao Sebastiao da Grama', 'Cannanore', 'Sama', 'Vail', 'Alwar', 'Cherlagandlapalem', 'Zyryanovsk', 'Kopong', 'Wabrzezno', 'Antsampanimahazo', 'Forney', 'Obock', 'Banstead', 'Kovurupalli', 'Bientina', 'Le Creusot', 'Bontang', 'Pierre', 'Sidi Aissa', 'San Ignacio Cerro Gordo', 'Lancon-Provence', 'Sao Bento do Sul', 'Arkadelphia', 'Lalru', 'Neriyamangalam', 'Kwai Chung', 'Matsubara', 'Metapan', 'Hopatcong', 'Baniyas', 'Song Phi Nong', 'Labason', 'Kashima', 'Bafanji', 'Isola del Liri', 'Las Rozas de Madrid', 'Herdecke', 'Ait Bousarane', 'Itanhandu', 'Odemis', 'Mohgaon', 'Bilgoraj', 'Poquoson', 'Dilolo', 'Soliman', 'Videira', 'Sankt Augustin', 'Hachimantai', 'Oneida', 'Medeiros Neto', 'Sakri', 'Chestnut Ridge', 'Burubaytal', 'Oruro', 'Beauraing', 'Mauli', 'Curanilahue', 'Great Neck', 'Skofja Loka', 'Aiken', 'Roncador', 'Temuco', 'Oros', 'Timmapur', 'Kununurra', 'Naqadeh', 'Clarin', 'Podgorica', 'Beldanga', 'Mutluru', 'South Whitehall', 'Prince Rupert', 'Nirasaki', 'Hodatsushimizu', 'Trani', 'Voghera', 'Purkersdorf', 'Medapadu', 'Puerto Francisco de Orellana', 'Fatao', 'Sidi Yahia El Gharb', 'Spiez', 'Pujili', 'Renk', 'Quemado de Guines', 'Antenetibe', 'Rueil-Malmaison', 'Saimbeyli', 'Asagi Quscu', 'Tsarahasina', 'Hunasagi', 'Chilpancingo', 'Goycay', 'Republic', 'Bocholt', 'Montgeron', 'Dodji-Bata', 'Panzgam', 'Joinville-le-Pont', 'Fergus', 'Tenkasi', 'Groveland', 'El Carmen de Atrato', 'Garou', 'Sangao', 'Buffalo', 'Oulad Amrane el Mekki', 'Bethune', 'Sidfa', 'Cuichapa', 'Maria Aurora', 'Sanzana', 'Vaxjo', 'Tsrar Sharif', 'Samut Sakhon', 'Novate Milanese', 'Mirdaul', 'Broadview Heights', 'Domaa-Ahenkro', 'Herseh Chhina', 'Tupa', 'Porec', 'Mount Evelyn', 'Balboa Heights', 'Xuqiaocun', 'Sumbawa Besar', 'Nahiyat Khan Bani Sa`d', 'Danli', 'Bartin', 'Feyzin', 'Peruvanthanam', 'Chene-Bougeries', 'Malakal', 'North Middleton', 'Hauzenberg', 'Datteln', 'Goiatuba', 'Solan', 'Ol Kalou', 'Kaedi', 'Sakib', 'Reyes', 'Freudenstadt', 'Anamalais', 'Chengxiang', 'Ely', 'Monmouth', 'Fukude', 'Nepanagar', 'Bousse', 'Emba', 'Ghusiya', 'Noisiel', 'Barharia', 'Techiman', 'Castilho', 'Higashi-osaka', 'Kerugoya', 'Bouaiche', 'Laurel', 'Suluktu', 'Hanzviur', 'Rosrath', 'Ciro Marina', 'Cabanillas del Campo', 'Qahjavarestan', 'Zuidhorn', 'Beilen', 'Minami-Soma', 'Douentza', 'Florianopolis', 'Darende', 'Catanduva', 'Wao', 'Rasingapuram', 'Huodoushancun', 'Fontaine-les-Dijon', 'Shinyanga', 'Sargodha', 'Penfield', 'Dunaujvaros', 'Ube', 'Ain Dfali', 'Andrembesoa', 'Kochas', 'Kondalampatti', 'Khejroli', 'Manassas', 'Macaiba', 'Uson', 'Langelsheim', 'Kottukal', 'Sharan', 'Longueuil', 'Parappur', 'Kurivikod', 'Pindi Bhattian', 'Buuhoodle', 'Karjat', 'Befandriana', 'Alto Santo', 'Pinneli', 'Mlawa', 'Cortez', 'Katteragandla', 'Badalona', 'Zibo', 'Keizer', 'Marijampole', 'Loncoche', 'Mpika', 'Hobro', 'Mancora', 'Bosconia', 'Frohburg', 'Weilerswist', 'Las Charcas', 'Carney', 'Bhansia', 'Simrahi', 'Guildford', 'Fuquay-Varina', 'Braunau am Inn', 'Pine Hills', 'Franconville', 'Noordwijkerhout', 'Bezons', 'Sliema', 'Pinellas Park', 'Kafr Qasim', 'Dundee', 'Valley', 'Bromsgrove', 'Coacalco', 'Kralupy nad Vltavou', 'Lafrayta', 'Tunapuna', 'Barmstedt', 'Upper Saucon', 'Palaiya Ayakkudi', 'Mladenovac', 'Qaratau', 'Hurtgenwald', 'Mailapur', 'Tucurui', 'Australia', 'Cidelandia', 'Holly Hill', 'Kannandahalli', 'Gilgit', 'Roetgen', 'Jirkov', 'Madavur', 'Eastham', 'Kachavaram', 'Qal`at an Nakhl', 'Pearl River', 'Oberstdorf', 'Andilamena', 'Tittagudi', 'Ceel Baraf', 'Meerbusch', 'Jaunpur', 'Terrell', 'Ban Nong Kula', 'Sarmiento', 'Coaldale', 'Baneh', 'Leh', 'Mirai', 'Ambodisikidy', 'Amtar', 'Boise', 'Solsona', 'Maizuru', 'Villa Luvianos', 'Twentynine Palms', 'Murapaka', 'Monclova', 'Miharu', 'Krumbach', 'Haldensleben', 'Inaja', 'Iguatu', 'Ponto Novo', 'Batabano', 'Itape', 'Poggiomarino', 'Gayaspur', 'Monte San Juan', 'Kostiantynivka', 'Bagalvad', 'Pozoblanco', 'Gaunivaripalle', 'Gandhidham', 'Pingxiangcheng', 'Flers-lez-Lille', 'Owasso', 'Summerville', 'Ban Tap Tao', 'Mahisanrh', 'Podaturpeta', 'Kalanchak', 'Lira', 'Yaguaron', 'Saharanpur', 'Guthrie', 'Tirat Karmel', 'Sagala', 'Griesheim', 'Riesa', 'Anaikal', 'Kontiolahti', 'Betania', 'Varjota', 'Pisaflores', 'Encarnacion de Diaz', 'Tafeng', 'Ioannina', 'Ha Giang', 'Ash Shinan', 'Maromiandra', 'Kampel', 'Seyyedan', 'Elk', 'Marcos Juarez', 'Timri', 'Ewarton', 'Betul Bazar', 'Sao Jose do Belmonte', 'Amboahangibe', 'Isser', 'Yellayapalem', 'Bhiwani', 'Ad Dakhla', 'Edmond', 'Lourosa', 'Aslanapa', 'Ilsede', 'Phoenix', 'Negrine', 'Prabhat Pattan', 'Tapaktuan', 'Camalig', 'Upper Montclair', 'Binh Long', 'Trincomalee', 'Yellowknife', 'Turbo', 'Kashaf', 'Rapur', 'Jamiltepec', 'Zaggota', 'Serui', 'Ban Nam Dip Luang', 'Sursee', 'Miyoshidai', 'Chichiriviche', 'Kariat Ben Aouda', 'Tranomaro', 'Torokbalint', 'Eiheiji', 'Fartura', 'Lonate Pozzolo', 'Lixingcun', 'Purwakarta', "Fu'an", 'Havi Bhauar', 'Livinjipuram', 'Sherwood', 'Cayenne', 'Hamme', 'Bao Loc', 'Cameta', 'San Fructuoso de Bages', 'Mawlamyine', 'Yonkers', 'Doudian', 'Orebro', 'Longvic', 'Penaballi', 'Sinzig', 'Jambi', 'Bandundu', 'Poshkent', 'Dhamdaha', 'Springettsbury', 'Savage', 'Iga', 'Narsimlapet', 'Hatwans', 'Marco Island', 'Mallikkundam', 'Loharda', 'Aguada de Pasajeros', 'Jining', 'Dhana', 'Raceland', 'Wroclaw', 'Sao Lourenco da Mata', 'Kiblawan', 'Alagoa Nova', 'Trentham', 'Alofi', 'Missoula', 'Pecanha', 'Atok', 'Paipa', 'Kouroussa', 'Arnavutkoy', 'Ankaran', 'Jorhat', 'Susari', 'Higashi-Hiroshima', 'Incline Village', 'Rafael Calzada', 'Thetford', 'Jelgava', 'Manono', 'Tuncheng', 'Potrerillos', 'Lopon', 'Moline', 'Cornelio Procopio', 'Dali', 'Agropoli', 'Inazawa', 'Korosavada', 'Elne', 'Karur', 'Vasudevanallur', 'Viti', 'Rezina', 'Paju', 'Thohoyandou', 'Enniskillen', 'Bad Fallingbostel', 'Sokobanja', 'Pleven', 'Bang Phongphang', 'Danga', 'Tahuna', 'Belsara', 'Battalgazi', 'Paravada', 'General Belgrano', 'Qingping', 'Jinsha', 'Malmedy', 'Santa Cruz Mulua', 'Lindenberg im Allgau', 'Karpenisi', 'Dondo', 'Viralippatti', 'Tado', 'Cimanggis', 'Barra do Bugres', 'Kudahuvadhoo', 'Badru Khan', 'Samorin', 'Tiruverumbur', 'Taloda', 'Roding', 'Bante', 'La Independencia', 'Basseterre', 'Ati', 'Cavinti', 'Chitembo', 'Ryugasaki', 'Eschborn', 'Vertientes', 'Bobingen', 'Haliyal', 'Sao Gabriel da Cachoeira', 'Dielheim', 'Siirt', 'Oosterend', 'Aich', 'Ilkhchi', 'Labrador City', 'Chioggia', 'Neuquen', 'Upper Allen', 'Itajobi', 'Baltimore', 'Nagra', 'Wegorzewo', 'Sansa', 'Suaita', 'Purral', 'Narwar', 'Denan', 'Kandla Port', 'Koubel-Koundia', 'Airdrie', 'Karumandi Chellipalaiyam', 'Parol', 'San Francisco la Union', 'Kocasinan', 'Sidi Abdallah', 'Merzenich', 'Correntina', 'Saunda', 'Paducah', 'Keve', 'Nandiala', 'Indramayu', 'Qusmuryn', 'Aramangalam', 'Mavorano', 'Koppaka', 'Corroios', 'Thornbury', 'Palm City', 'Sao Felix da Marinha', 'Giffnock', 'Cardenas', 'Kyaunggon', 'Zhoukou', 'Red Deer', 'Constanza', 'Libertador General San Martin', 'Ducheng', 'Orsova', 'Bures-sur-Yvette', 'Tixtla de Guerrero', 'Tlachichilco', 'Kangan', 'Lyndhurst', 'Wexford', 'Chitipa', 'Palagonia', 'Olsberg', 'Antongomena-Bevary', 'Wayaobu', 'Irosin', "Sa'ada", 'Lamwo', 'Bibemi', 'Bartow', 'Floresti', 'Kemin', 'Baramandougou', 'Baybay', 'Nansang', 'Frenstat pod Radhostem', 'Mizhhiria', 'Zelino', 'Beni Abbes', 'Tarkwa', 'Ezhipram', 'Vicente Noble', 'Hitachi-ota', 'Muhembo', 'Ksar el Hirane', 'Lauchhammer', 'Agboville', 'Big Bear City', 'Oradea', 'Mabitac', 'Trashigang', 'An Nasiriyah', 'Belo Oriente', 'Pampas', 'Schifferstadt', 'Guatemala City', 'Pithampur', 'I`zaz', 'Poranki', 'Arcola', 'Perugia', 'Betmangala', 'Haqqulobod', 'Wujiaying', 'Ankazoabokely', 'Agrestina', 'Rodenbach', 'Nowshahr', 'Sierning', 'Santa Maria de Jesus', 'Madalena', 'Tarrafal', 'Mitoma', 'Roseaux', 'Wichita', 'Grudziadz', 'Zarnesti', 'Findikli', 'Sarnen', 'Irpin', 'Rouen', 'Qishe', 'Dammartin-en-Goele', 'Hazel Dell', 'Charikar', 'Ostrzeszow', 'Tunari', 'Kingri', 'La Reina', 'Boriziny', 'Nangavaram', 'Jahangirpur Salkhani', 'Mangobandar', 'Gustavia', 'Alangudi', 'Naumburg', 'Sarikishty', 'Dania Beach', 'Budaka', 'Tillmans Corner', 'Ramotswa', 'Menges', 'Dasuya', 'Darreh Shahr', 'Perumkulam', 'Moorreesburg', 'El Ejido', 'Lingayen', 'Fontana', 'Ubaporanga', 'Shoo', 'Heroica Matamoros', 'Bayog', 'Marikal', 'Da Nang', 'Lampertheim', 'Basbiti', 'Ramonville-Saint-Agne', 'Kuwait City', 'Manakana', 'Mandra', 'Tehata', 'Stein bei Nurnberg', 'Crigglestone', 'Brofodoume', 'Trnava', 'Thimphu', 'Gurmia', 'Cacem', 'Alahina', 'Mongo', 'Khayelitsha', 'Sadri', 'Fukui', 'Dharmaj', 'Kaatsheuvel', 'Dammarie-le-Lys', 'Matsakabanja', 'Ait Ouaoumana', 'Kirovsk', 'Savur', 'Baranivka', 'Kodaikanal', 'Lantana', 'Shache', 'Jacobabad', 'President Quirino', 'Kozienice', 'Coatetelco', 'Tha Muang', 'Fanlu', 'Ejutla de Crespo', 'Senhora da Hora', 'Bhadarwah', 'Almazora', 'Americus', 'Schaesberg', 'Liulin', 'Shahrak-e Ja`fariyeh', 'Pathankot', 'Kirkkonummi', 'Koyulhisar', 'Adamankottai', 'Ketama', 'Irineopolis', 'Kavali', 'Sambalhera', 'Nanjian', 'San Juan de Uraba', 'Tola Khadda', 'Mulheim', 'Berhoum', 'Hawaiian Paradise Park', 'Marg`ilon', 'Eilendorf', 'Oostzaan', 'Evanston', 'Mantasoa', 'Ixtlahuaca', 'Capao da Canoa', 'Baker', 'Altintas', 'Taipei', 'Siktiahi', 'Ramabitsa', 'Tarmount', 'Haciqabul', 'Ellore', 'Palangkaraya', 'Mugumu', 'Ed Damer', 'Steinfurt', 'Malthone', 'Villagarzon', 'Elizabeth City', 'La Farlede', 'Binaqadi', 'Barquisimeto', 'Simaria', 'Castel Volturno', 'Frickenhausen', 'Ait Ikkou', 'Gauravaram', 'Ubeda', 'Simcoe', 'Oakley', 'Capas', 'Jixian', 'Muthabana', 'Masalli', 'Leganes', 'Beidaying', 'Hyeres', 'Vinnamala', 'Chatteris', 'Parit Buntar', 'Khelil', 'Igaci', 'Raytown', 'Janakpur', 'Ilford', 'Bojnurd', 'Toyota', 'Orte', 'Saint-Brieuc', 'The Nation / La Nation', 'Ghulja', 'Lagoa do Mato', 'Mohelnice', 'Lavumisa', 'Harda Khas', 'Pendlebury', 'Sonakhal', 'Unagatla', 'Sandhurst', 'Villa Krause', 'Sherghati', 'Supaul', 'Busovaca', 'Luathaha', 'Liversedge', 'Liberty', 'Yian', 'Sion', 'Itamogi', 'Berrouaghia', 'Bharuch', 'Waunakee', 'Adel', 'Crestline', 'Savissivik', 'Meiganga', 'Sannai', 'Cuizhuangzi', 'Cold Lake', 'Agrigento', 'Rio Branco do Sul', 'Carneirinho', 'Viborg', 'Munsan', 'Big Lake', 'Rute', 'Mela Gudalur', 'Mbala', 'Mateus Leme', 'Kakamega', 'Al Atarib', 'Bad Harzburg', 'Tigbauan', 'Phak Hai', 'San Juan de la Costa', "Cornate d'Adda", 'Bewdley', 'Nyimba', 'Vedelago', 'Asaba', 'Bozyazi', 'Tuxtla', 'Alvin', 'Castiglion Fiorentino', 'Umag', 'Taquaritinga do Norte', 'Ghoti Budrukh', 'Relangi', 'Woodward', 'Santiago de Tolu', 'Marcali', 'Stanley', 'Dulay` Rashid', 'Five Forks', 'Jandaia do Sul', 'Culfa', 'Suzano', 'Motomachi', 'Ahermoumou', 'Jose Cardel', 'Marsella', 'Tyngsborough', 'Eijsden', 'Checy', 'Hamidiye', 'Mocimboa da Praia', 'Cossato', 'Adjohon', 'Jamapa', 'Dahana', 'Banfora', 'Rabo de Peixe', 'Caloto', 'West Lampeter', 'Princeton Meadows', 'Whitwick', 'Siversk', 'Mpraeso', 'Borovskoy', 'Cockermouth', 'Mogadishu', 'Traipu', 'Frechen', 'Cabras', 'Enkakad', 'Bocas de Satinga', 'Tomares', 'Umbauba', 'Itiki', 'Nordhorn', 'Macetown', 'Quilengues', 'Chiclana de la Frontera', 'Lagoa Seca', 'Zielonka', 'Xinqing', 'Pedro Carbo', 'Vespasiano', 'Masis', 'Prichard', 'Sandomierz', 'Fulwood', 'Bad Liebenzell', 'Zvenyhorodka', 'Bellevue', 'Gudgeri', 'Srikrishnapur', 'Madgaon', 'Piedecuesta', 'Ibrahimpatnam', 'Mandrosonoro', 'Tlumach', 'Citrus Springs', 'Cabestany', 'Metuchen', 'Baxiangshan', 'Bithauli', 'Araguaina', 'Ban Piang Luang', 'Kamrej', 'Chicaman', 'Kishiwada', 'Tabua', 'San Rafael del Sur', 'Cibolo', 'Le Relecq-Kerhuon', 'Boca Raton', 'Amecameca de Juarez', 'Radstock', 'Launceston', 'Gorantla', 'Babar', 'Jammalamadugu', 'Gordes', 'Vich', 'Arroyo Grande', 'Dvur Kralove nad Labem', 'Novoukrainka', 'Gurgunta', 'Oued Cheham', 'Salinopolis', 'Oak Lawn', 'Fatehgarh Churian', 'San Alberto', 'Wetaskiwin', 'San Jose Pinula', 'Montataire', 'Aduku', 'Planken', 'Trussville', 'Sidi Lmokhtar', 'Machchand', 'Moletai', 'Grand-Lahou', 'Ergoldsbach', 'Sao Joao dos Patos', 'Cajapio', 'Sestao', 'Bayshore Gardens', 'Jaragua', 'Canuelas', 'Campo do Meio', 'Chada', 'Bibala', 'Suining', 'Granby', 'Ostend', 'Split', 'Clark', 'Baikunthapur', 'Eppingen', 'Bara Khurd', 'Las Vigas de Ramirez', 'Sharm ash Shaykh', 'Sainthia', 'Lowestoft', 'Kaldenkirchen', 'Acailandia', 'Rosita', 'Antardipa', 'Ihaddadene', 'Kennett', 'Houlong', 'Sriramapuram', 'Nava Raipur', 'Zielona Gora', 'Biella', 'Niceville', 'Koscierzyna', 'Lonkly', 'Miramas', 'Zubin Potok', 'Sao Vicente Ferrer', 'Fouka', 'Rasak', 'Nyzhnia Krynka', 'Bijni', 'West Bend', 'Kurumul', 'Golbaf', 'Carencro', 'Mercer Island', 'Lai', 'Rimini', 'Labutta', 'Goubellat', 'Yakymivka', 'Fianarantsoa', 'Zhengding', 'Vittoria', 'Tactic', 'Arrecife', 'Parkes', 'Ibicoara', 'Gidi', 'Afumati', 'Dongning', 'Tameslouht', 'Ashwaubenon', 'Alto Parana', 'Mattanur', 'Chinchali', 'Monte Siao', 'Kallanai', 'Florania', 'Ghorahi', 'Uchqurghon Shahri', 'Tres Isletas', 'Bambara-Maounde', 'Bela Vista de Goias', 'Nova Granada', 'Kesap', 'Beldibi', 'Hyderabad', 'Otaki', 'Bhuban', 'Stansbury Park', 'Halgur', 'Arda', 'Dalupura', 'Maraveh Tappeh', 'Kankandighi', 'Chornomorske', 'Nikopol', 'Shinagawa-ku', 'Zeralda', 'St. Michael', 'Fortaleza dos Nogueiras', 'Nansan', 'Edattala', 'Phu Tu Son', 'Cipo', 'Bintulu', 'Amayan', 'Rainbow City', 'Dougabougou', 'Levakant', 'Allada', 'Tabursuq', 'Hoshangabad', 'Gerli', 'Bardsir', 'Sarsawa', 'Manicaragua', 'Sint-Pieters-Leeuw', 'Pescantina', 'Nimule', 'Ambovombe', 'Navsari', 'Wilbraham', 'Keene', 'Novska', 'Vallabh Vidyanagar', 'Kifri', 'Gornji Petrovci', 'Bareggio', 'Tosa', 'Kafr Shukr', 'Konotop', 'Frei Miguelinho', 'Pirapora do Bom Jesus', 'Nemocon', 'Ribeirao Bonito', 'Kundli', 'Zola Predosa', 'Bileca', 'Vargem Grande', 'Martinopolis', 'Madna', 'Colonial Park', 'Fecamp', 'Sesto Fiorentino', 'Bouzina', 'Bischofswerda', 'Saulkrasti', 'Margram', 'Zapresic', 'Muriyad', 'Ayun', 'Harlingen', 'San Juan de los Morros', 'Kozy', 'Kandern', 'Taibao', 'Savonlinna', 'Fountainebleau', "L'Ile-Perrot", 'Zomin Shaharchasi', 'PortoAlexandre', 'Nes Ziyyona', 'Watervliet', 'Bonito de Santa Fe', 'Murgod', 'Aurisina', 'Kochlowice', 'Lierre', 'Wellesley', 'East Greenwich', 'Jacarau', 'Nawa', 'Upper St. Clair', 'Qalyub', 'Darbhanga', 'Balakliia', 'Makulubita', 'Giesen', 'Sarkeghat', 'Paiania', 'Thandla', 'Gravata', 'Ad Dis', 'Nawada Gobindganj', 'Mosrah', 'Ballwin', 'Rehovot', "Chateau-d'Olonne", 'Barendrecht', 'Kihei', 'Forbes', 'Van Wert', 'Angola', 'Sokuluk', 'Gadarpur', 'Beji', 'Palmela', 'Bad Lippspringe', 'Al Badrashayn', 'Angelim', 'Higashimurayama', 'Tangshan', 'Fritzlar', 'Hobyo', 'Erumapalaiyam', 'Kahului', 'Varkkallai', 'Sayula', 'Waverly', 'Koranampatti', 'Peristeri', 'Weilheim an der Teck', 'Mustafakemalpasa', 'Babolsar', 'Concepcion Chiquirichapa', 'Yueshanwan', 'Sangeorgiu de Mures', 'Edamon', 'Moston', 'Gabes', 'Heesch', 'Sahjanwa', 'Auburn', 'Sorel-Tracy', 'Sagua de Tanamo', 'Wassenaar', 'Sarospatak', 'Sapahi', 'Kangayam', 'Oodweyne', 'Bac Kan', 'Hole Narsipur', 'Canmore', 'Mulungu do Morro', 'Fagnano Olona', 'Sawankhalok', 'Odanavattam', 'Tiruvambalapuram', 'Torotoro', 'Kil Valur', 'Koshizuka', 'Kyonpyaw', 'Annaberg-Buchholz', 'Lissone', 'Maracana', 'Paysandu', 'Batu', 'Ciudad Acuna', 'Kanavaikuli', 'Lahaina', 'Ma`an', 'Comala', 'Xiongzhou', 'Coalcoman de Vazquez Pallares', 'Benemerito', 'Kuchinarai', 'Lugo', 'Kibuku', 'Pirajui', 'Fountain Valley', 'Zentsujicho', 'Shahreza', 'Dragomer', 'Puyehue', 'Floridablanca', 'Bitterfeld', 'Bozkurt', 'Sewa', 'Soreang', 'Varedo', 'Narot Mehra', 'Rogasovci', 'Nukan', 'Skipton', 'Bidur', 'Walldurn', 'Ipero', 'Aritao', 'Breckerfeld', 'Manakayi', 'Narapalli', 'Waraseoni', 'Nogent-sur-Oise', 'Itumbiara', 'Ban Mae Sam Laep', 'El Retorno', 'Dunfermline', 'Tamaraikkulam', 'Dingli', 'Ouadda', 'Hillview', 'Eceabat', 'Yangmei', 'Mamoudzou', 'Dietikon', 'Siauliai', 'Pierrelatte', 'Hamada', 'Hooper', 'Sakuho', 'Beijing', 'Limoeiro do Norte', 'Ladario', 'Karuveppampatti', 'Sheffield', 'Aguas de Lindoia', 'Boac', 'Mende', 'Gohuma Bairia', 'Riscani', 'Koroth', 'Sibenik', 'Avalurpet', 'Roberval', 'Madinat as Sadis min Uktubar', 'Rosoman', 'Porto Alegre', 'Brugherio', 'Brody', 'Keuruu', 'Lehre', 'Zawiercie', 'Dalyoni Bolo', 'Loay', 'Kolonia', 'Bussy-Saint-Georges', 'Oulad Imloul', 'Lagoa Formosa', 'Wurselen', 'Kestel', 'Orhei', 'Lubuagan', 'Quixere', 'Scottdale', 'Caparrapi', "Bruay-sur-l'Escaut", 'Nova Ponte', 'Rostam Kola', 'Asuncion Nochixtlan', 'Colton', 'Little River', "Jian'ou", 'Diekirch', 'Fateha', 'Cimitarra', 'Hombal', 'Lindenhurst', 'Ouled Djellal', 'Comanesti', 'Bousso', 'Streamwood', 'Roulia', 'Duzce', 'Asuke', 'Libona', 'Rellivalasa', 'Ukwa', 'Yavatmal', 'Greasley', 'Sivas', 'Hukumati Gizab', 'Balikpapan', 'Qincheng', 'Hongfengcun', 'Nakai', 'Oued Zem', 'Pumalakkundu', 'Umaria', 'Sao Lourenco da Serra', 'Amawom', 'Barra do Mendes', 'Aklim', 'Sambrial', 'Bunkeflostrand', 'Taskent', 'Rakitovo', 'Nerima', 'Waxhaw', 'Hoogezand', 'San Vicente de Tagua Tagua', 'Recreio', 'Lope de Vega', 'Hejin', 'Margherita', 'Bend', 'Strijen', 'Manin', 'Nogi', 'Tilothu', 'Manching', 'Masindi Port', 'Sumba', 'Amizmiz', 'Canada de Gomez', 'Siddhapur', 'Ankaramena', 'Aleppo', 'Sansanding', 'Tirumakudal Narsipur', 'Gumla', 'Usa', 'At Tawahi', 'Koumantou', 'De Panne', 'Kukes', 'Lenoir City', 'Tiruppalaikudi', 'Canyon Lake', 'Nanpingcun', 'Gubbio', 'Jasaan', 'Galaosiyo Shahri', 'Mount Sinai', 'Tulshia', 'Sankt Johann im Pongau', 'Sandi', 'Banda', 'Kilkunda', 'Solymar', 'Shenzhou', 'As Sallum', 'Baohezhuangcun', 'Gogui', 'Aarschot', 'Kadappuram', 'Sorkheh', 'Gawler', 'La Falda', 'Masqat', 'Bazeh Kalagh', 'Forfar', 'Kelafo', 'Conde', 'Porciuncula', 'Palanas', 'Cuyo', 'Las Cabezas de San Juan', 'Kranidi', 'New Orleans', 'Mettupalaiyam', 'Kati', 'Carregado', 'Hornsby Bend', 'Salvatierra', 'Begamganj', 'Cishan', 'Draa Ben Khedda', 'Eshtehard', 'Sidi Daoud', 'Las Nieves', 'Nea Alikarnassos', 'San Francisco Ixhuatan', 'Luodong', 'Waihee-Waiehu', 'Rosolini', 'Tundla', 'Ash Shihaniyah', 'SeaTac', 'Daigo', 'Solana Beach', 'Kittanapalli', 'Guspini', 'Rahimyar Khan', 'Antanambao', 'New Silksworth', 'Bixby', 'Sahnaya', 'Kudal', 'Guaira', 'Ibaiti', 'Iksan', 'Muttamtura', 'Darnetal', 'Karma', 'Leyland', 'Strzelin', 'Tam Diep', 'Tagaytay', 'Finale Ligure', 'Cottonwood', 'Cutlerville', 'Santa Cruz de Bezana', 'Frydlant nad Ostravici', 'Aracuai', 'Huangyadong', 'Herzele', 'Sakarya', 'Extremoz', 'Bauta', 'Kela Khera', 'Castelvetrano', 'Kalisz', 'Qorovul', 'Sanchez', 'Tamezmout', 'Boundji', 'Bellinzago Novarese', 'Tungi', 'Ichenhausen', 'Sao Sebastiao do Uatuma', 'Inver Grove Heights', 'Villefranche-de-Rouergue', 'Uad Damran', 'Rishon LeZiyyon', 'Nellimarla', 'Bountiful', 'Sahaswan', 'Kamonkoli', 'Santo Augusto', 'Hammersmith', 'Breyten', 'Northwich', 'Nagambhotlapalem', 'Montabaur', 'Ammavarikuppam', 'Amasya', 'Bhankarpur', 'Santa Rosa del Penon', 'Anshun', 'Feidh el Botma', 'Abdurahmoni Jomi', 'Rionero in Vulture', 'Bundi', 'Almargem', 'McNair', 'East Palo Alto', 'Las Flores', 'Carmo da Mata', 'Wilkau-Hasslau', 'Barentin', 'Outat Oulad Al Haj', 'Chemax', 'Ayapango', 'Ramacca', 'Oton', 'Ghoswari', 'Al Fallujah', 'Cheviot', 'Alnavar', 'Al Qasr', 'Santo Domingo Suchitepequez', 'Sulleru', 'Barahpur', 'Chapalamadugu', 'Ar Rutbah', 'Areia Branca', 'Avenel', 'Hoover', 'Velykodolynske', 'Beladi', 'Changchun', 'Linden', 'Hillside', 'Taxco de Alarcon', 'Kathri', 'Belemoka', 'San-Pedro', 'Ban Huai Hin', 'Olovo', 'Sundekuppam', 'El Alto', 'Totonicapan', 'Mujiayu', 'Makhtal', 'Ikoma', 'Sylhet', 'Passo de Camarajibe', 'Bad Driburg', 'Rangapara', 'Komarom', 'Bakhmach', 'Qionghu', 'Ecatzingo', 'Pallipram', 'Taicheng', 'Datu Piang', 'Tlaltetela', 'Preveza', 'Cambuci', 'Werkendam', 'Ipira', 'Gosport', 'Grytviken', 'Rondonopolis', 'Phra Phutthabat', 'La Puebla de Cazalla', 'Leposaviq', 'San Juan Cotzocon', 'Sederot', 'Nidiyanga', 'Makamba', 'Resistencia', 'Rio Blanco', 'Monte Escobedo', 'Vasylkiv', 'Eyl', 'Fenoarivo', 'Pokrov', 'Bang Sao Thong', 'Granite Bay', 'Fortul', 'Lenoir', 'Trent Hills', 'Le Petit-Quevilly', 'Tifra', 'Jaroslaw', 'Perrysburg', 'Ericeira', 'Maniago', 'Al Qusiyah', 'Norridge', 'Palamos', 'Pochampalli', 'Buni', 'Preetz', 'Anaconda', 'Chinde', 'Bakouma', 'Jakar', 'Tavros', 'Doranala', 'Mayiladi', 'Villecresnes', 'Phulgachhi', 'Pen', 'Somersworth', 'Igrejinha', 'Umburetama', 'Kakumiro', 'Arauquita', 'Schwieberdingen', 'Berkeley', 'Alquizar', 'Adigappadi', 'Hardinxveld-Giessendam', 'Baindur', 'Beko', 'Belley', 'Mousoulou', 'Ome', 'Finnsnes', 'High Point', 'Budenheim', 'Temara', 'Sulejowek', 'Grecia', 'Deoghar', 'Macerata', 'Monghyr', 'Hullhorst', 'Tervuren', 'On Top of the World Designated Place', 'Isernhagen-Sud', 'Parner', 'Llanera', 'Tsurugashima', 'Dhamaun', 'Porto Valter', 'Loyalsock', 'Guntersville', 'Gandorhun', 'Zella-Mehlis', 'Pazin', 'Bunyan', 'Toyama', 'Lamrasla', 'Huliyar', 'Springville', 'Orkelljunga', 'Pillutla', 'Paruthur', 'Zahedan', 'Baskil', 'Morteza Gerd', 'Remchi', 'Chiaravalle', 'Grunstadt', 'Sofifi', 'Londrina', 'Mazatlan', 'Khathjari', 'Chiang Klang', 'Wulfrath', "Qal'at Mgouna", 'Irigny', 'Senahu', 'Perenchies', 'Vero Beach', 'Ocho Rios', 'Castanhal', 'Soe', 'My Drarga', 'Boxtel', 'Hoyo de Manzanares', 'Cruz Machado', 'Morges', 'Guano', 'Anantapalle', 'Tvrdosin', 'Caojia', 'Plaza Huincul', 'Ranillug', 'Yonezawa', 'Parnu', 'Zerizer', 'Rabta', 'Suginami-ku', 'San Jose del Guaviare', 'Khaira Tola', 'Ain Oussera', 'Badvel', 'Wijnegem', 'Frome', 'Theux', 'Caimbambo', 'Wang Saphung', 'Dabhoi', 'Gebre Guracha', 'Basatpur', 'Zambrano', 'Puttai', 'Brainerd', 'San Salvador El Seco', 'Shostka', 'Tabocas do Brejo Velho', 'Tabriz', 'Bhatpuri', 'Arpajon', 'Curti', 'Hyde', 'Cayetano Germosen', 'Cafelandia', 'Mnagueur', 'Rygge', 'Landskrona', 'Kokubunji', 'Resende', 'Aguasay', 'Painan', 'Komagane', 'Malhada de Pedras', 'Zhutian', 'Baabda', 'Jackson', 'Qoorlugud', 'Fraserpet', 'La Crau', 'Pioltello', 'Araure', 'Ilsenburg', 'Yanhewan', 'Narayanpur', 'Holtsville', 'Vimmerby', 'Binzhou', 'Ligang', 'Quillota', 'Montmagny', 'Malaga', 'Juatuba', 'Ascope', 'Effia-Kuma', 'Bellwood', 'Encarnacion', 'Mannur', 'Puerto Santander', 'Kendraparha', 'Wewak', 'Sendjas', 'San Pedro Pinula', 'Kampong Thom', 'Mpanda', 'Trebaseleghe', 'Kerkyra', 'Rogatec', 'Katigang', 'Yushu', 'Saint-Hyacinthe', 'Capelle aan den IJssel', 'Eisenach', 'Reinfeld', 'Idri', 'Kunnattur', 'Cakung', 'Rayadrug', 'Baoshan', 'Laramie', 'El Meghaier', 'Naperville', 'Macatuba', 'Katalpur', 'Carpina', 'Blyth', 'Veseli nad Moravou', 'Burhia Dhanghatta', 'Svitlodarsk', 'Lugait', 'Rancho Cucamonga', 'Tabara Arriba', 'Spondon', 'Marienberg', 'Middelburg', 'Huyton', 'Sroda Slaska', 'Woomera', 'Bethalto', 'Concepcion de Ataco', 'Fagersta', 'Halfmoon Bay', 'Estancia', 'Makow Mazowiecki', 'Malangas', 'Pittsburg', 'Adukam', 'West Melbourne', 'Haverstraw', 'Lousa', 'Kulpsville', 'Plasnica', 'Charuanwan', 'Foca', 'Gombe', 'Cordeiropolis', 'Chakicherla', 'Licey al Medio', 'Conceicao das Alagoas', 'Colonia', 'Yisuhe', 'Vignola', 'San Martin Sacatepequez', 'Mbanza-Ngungu', 'Oppatavadi', 'Eitorf', 'Ubach-Palenberg', 'Ranibennur', 'Aklvidu', 'Capoeiras', 'Mucambo', 'Palmares do Sul', 'Kondakomarla', 'Niteroi', 'Sai Ngam', 'City Bell', 'Biougra', 'White', 'Kumru', 'Alcala de Henares', 'Kalawit', 'Porto Xavier', 'Balancan', 'Vodice', 'Zouping', 'Nagawaram', 'Mareth', 'Kakraul', 'Horten', 'Padinjarebagam', 'Karattuppalaiyam', 'Redding', 'Wolgast', 'Ambrolauri', 'Dhanot', 'Nilakkottai', 'Abaete', 'Veendam', 'Oslo', 'Paiganapalli', 'White Center', 'Taminango', 'Constitucion', 'Begijnendijk', 'Baragua', 'Inverigo', 'Belsandi Tara', 'Rumst', 'Pinner', 'Reinheim', 'Liancourt', 'Villa Canas', 'Metlika', 'Balderton', 'Gauripur', 'Yamada', 'Cardona', 'Sartalillo', 'Odaiyakulam', 'Laguna Paiva', 'Carolina', 'Bhilavadi', 'Al Qardahah', 'Campo Bom', 'Laichingen', 'Montero', 'Nagoya', 'Portici', 'Sherpur', 'Baiheqiao', 'Saint-Chamas', 'Bloemfontein', 'Songyang', 'Mamfe', 'Bijiao', 'Tongobory', 'Barra do Dande', 'Revere', 'Celtik', 'Carigara', 'Laindon', 'Chapa de Mota', 'Djanet', 'Vinings', 'Saltillo', 'Gueoul', 'Parnagua', 'Burton upon Trent', 'Bocaiuva do Sul', 'Nova Xavantina', 'Esteli', 'Sremska Kamenica', 'Pennsauken', 'Bad Wurzach', 'Banbury', 'Nkowakowa', 'Koloti', 'Chaita', 'Las Parejas', 'Guerande', 'Ski', 'Suphan Buri', 'Cheyenne', 'San Isidro de Lules', 'Cisternino', 'Rajbalhai', 'San Ferdinando di Puglia', 'Nosivka', 'Singhwara', 'Ankarabato', 'Dod Ballapur', 'Wattignies', 'Lebu', 'Elangunnapuzha', 'Korneuburg', 'Saint Andrews', 'Exmouth', 'Menaa', 'Bungotakada', 'Isagarh', 'Bilecik', 'Oulad Bou Rahmoun', 'Manambondro', 'Port Morant', 'Townsend', 'Ca Mau', 'Kostolac', 'Presov', 'Valle', 'Sahambala', 'Chapala', 'Tepechitlan', 'Uruacu', 'Bangued', 'Renton', 'Ummannur', 'Lalmanirhat', 'Nurkot', 'Brownfield', 'San Miguel del Padron', 'Toualet', 'San Antonio Aguas Calientes', 'Los Alamitos', 'Jilib', 'San Mateo Ixtatan', 'Anapolis', 'Burhanpur', 'Yokotemachi', 'Torrinha', 'Mouzaia', 'Harshin', 'Cirencester', 'Bentley', 'Man', 'Yezhi', 'Kota', 'Sovicille', 'Otawara', 'Zihuatanejo', 'Charata', 'Paduma', 'Otake', 'Guanta', 'Aduru', 'Mannarai', 'Youghal', 'Haacht', 'Stowmarket', 'Fourou', 'Barari', 'Libanggaon', 'Amari', 'Lucknow', 'Babra', 'Idanre', 'Bedigoazon', 'Acari', 'Burslem', 'Lohara', 'Beersel', 'Polillo', 'Librazhd-Qender', 'Kumalarang', 'Tzitzio', 'Bisceglie', 'Karayilan', "Samarra'", 'Bafwasende', 'North Arlington', 'Souq Sebt Oulad Nemma', 'Bhalswa Jahangirpur', 'Wolfsburg', 'Eningen unter Achalm', 'Szubin', 'Varadarajampettai', 'Zelenodolsk', 'Karagwe', 'Vissannapeta', 'Cheramkod', 'Sanandaj', 'Karben', 'Bueng Khong Long', 'Jaboatao', 'Vrilissia', 'Cumaral', 'Akil', 'Ap Da Loi', 'Normanton', 'Trois-Rivieres', 'Almirante', 'Esperalvillo', 'Kongupatti', 'Quirinopolis', 'Lask', 'Sainte-Luce-sur-Loire', 'Castel San Giorgio', 'Bang Phae', 'Chinnayagudem', 'Arsin', 'Numata', 'Sab` al Bur', 'Cumaru', 'Tecamac', 'Rodelas', 'Seyah Cheshmeh', 'Kashan', 'Nanzhangcheng', 'Kendari', 'Jurado', 'Monrovia', 'Peshawar', 'Guacharachi', 'Kloten', 'Bandirma', 'Quevedo', 'Adekar Kebouche', 'Huinca Renanco', 'Dakpatthar', 'Chicoutimi', 'Baiyan', 'Pimenteiras', 'Las Brenas', 'Kusnacht', 'Burnie', 'Ercolano', 'Kroonstad', 'Modena', 'Kalna', 'Bonneville', 'Bellair-Meadowbrook Terrace', 'Jiyyammavalasa', 'Kisai', 'Vadodara', 'Nagtala', 'Kandiyankovil', 'Lorena', 'Taphan Hin', 'Juneau', 'Iati', 'Niksar', 'Roy', 'Kapoeta', 'Samdhin', 'Arafat', 'Kornepadu', 'Ait Faska', 'Olevsk', 'Rantoul', 'Glencoe', 'Stocksbridge', 'Tela', 'Chethakal', 'Canatlan', 'Guia de Isora', 'Veglie', 'Taguig City', 'Qillin', 'Harsinghpur', 'Sitangkai', 'Michigan City', 'Nagaizumi', 'Guariba', 'El Seibo', 'Woolton', 'Tainai', 'Mukher', 'Cartama', 'Suknadanga', 'Philippsburg', 'Ladysmith', 'Atherton', 'Baeza', 'Yazikonak', 'Santa Maria Texmelucan', 'Ban Mae Kaluang', 'Khiriawan', 'Eschen', 'Tobetsu', 'Nova Vicosa', 'Ankarongana', 'Shahedshahr', 'Sonoita', 'Kudligi', 'Saint-Felicien', 'Bukama', 'Nilgiri', 'Zwevegem', 'Muang Sing', 'Pinhal', 'Saranga', 'Ebelo', 'Toli', 'Cataguases', 'Miyaki', 'Caracol', 'Amol', 'Basdeopur', 'Cevicos', 'Charlotte', 'Canavieiras', 'Deva', 'North Shields', 'Icaraima', 'Hoskins', 'Kamalshahr', 'Guapiles', 'Araguari', 'Bouhlou', 'Castilla La Nueva', 'Azemmour', 'Juvignac', 'Medjedel', 'Goksun', 'San Justo', 'Iyo', 'Teploklyuchenka', 'Yaounde', 'Ain Kercha', 'Treviglio', 'Novi Pazar', 'San Lazzaro di Savena', 'Nakhon Ratchasima', 'Alcantara', 'Bull Mountain', 'Oldeani', 'Chardonnieres', 'Chini', 'Piketberg', 'Baranoa', 'Buldon', 'Akora', 'Santa', 'Olivet', 'Kouinine', 'Vanipenta', 'Shahr-e Babak', 'Porto de Moz', 'The Hills', 'Kaharlyk', 'Manpaur', 'G`azalkent', 'Hodmezovasarhely', 'Oak Creek', 'Kladanj', 'Merouana', 'Hiratsuka', 'Alvares Machado', 'Lagoa Dourada', 'Mishan', 'Puerto Deseado', 'Befandriana Atsimo', 'Supia', 'Avon Lake', 'Rialto', 'Renca', 'Mesra', 'Rognac', 'Pongode', 'Birkhadem', 'Talacogon', 'Turhapatti', 'Szigetszentmiklos', 'Onoto', 'Norala', 'Sorala', 'Diallassagou', 'Handa', 'Wieliczka', 'Tilougguit', 'Jaimalpura', 'Allentown', 'Crissiumal', 'Varberg', 'Abidjan', 'Cesky Tesin', 'Burgkirchen an der Alz', 'Nagina', 'Shahpur Baghauni', 'Itarhi', 'Bad Pyrmont', 'Raksaha', 'Pandireddigudem', 'Sinfra', 'Bayonne', 'Bahir Dar', 'Bobo-Dioulasso', 'Tubaran', 'Chihuahua', 'Hun', 'Rexburg', 'Kherson', 'Harad', 'Saint-Fons', 'Iazizatene', 'Sao Pedro do Sul', 'Tirmaigiri', 'Otumba', 'East Cleveland', 'Bankoumana', 'Goiatins', 'Kushtagi', 'Patori', 'Bertrix', 'Palanan', 'Achaljamu', 'Elanad', 'Mahmud-e Raqi', 'Bougou', 'American Fork', 'East Barnet', 'Mangrauni', 'Raghunathpur', 'Ierapetra', 'Crvenka', 'Mentor', 'Fidirana', 'Sangareddi', 'Pflugerville', 'Kupiskis', "Vil'nyans'k", 'Sonipat', 'Zestaponi', 'Goffstown', 'Lengshuijiang', 'Scottsdale', 'Rosemont', 'Garsekurti', 'Collierville', 'Bulaevo', 'Tosagua', 'Burzaco', 'Silea', 'Roosendaal', 'Tuusula', 'Turnu Magurele', 'An Nabk', 'Sasebo', 'San Vicente de Castellet', 'Andranomenatsa', 'Tbilisi', 'Postojna', 'Kessel-Lo', 'Arnsberg', 'Itarare', 'Mazeikiai', 'Pujehun', 'Vicovu de Sus', 'Okayama', 'Sarauli', 'Lakeville', 'Dashtigulho', 'Itapolis', 'Al Hazm', 'Hammelburg', 'Berastagi', 'Orlu', 'Castanet-Tolosan', 'Lumbayanague', 'Oeiras do Para', 'Whitney', 'Tubara', 'Santa Maria Madalena', 'Salinas de Hidalgo', 'Sangonera la Verde', 'Noisy-le-Sec', 'Glenview', 'Bern', 'Song Cau', 'Dyer', 'Chabal Kalan', 'Segala Mba', 'Upper Hutt', 'Massigui', 'Nalgonda', 'Terra Boa', 'Svrljig', "'Ain Tellout", 'Bellefontaine Neighbors', 'Sahibpur Kamal', 'Nieuwleusen', 'Acarlar', 'Chuo', 'Hellesdon', 'Kotor', 'Montepulciano', 'Emstek', 'Daye', 'Kona', 'Saint-Genis-Pouilly', 'Puerto Rico', 'Sitamau', 'Anguera', 'North Lebanon', 'Pesqueira', 'Guarei', 'Pendurti', 'Falls', 'Pinillos', 'Sighetu Marmatiei', 'Bangui', 'Tchitado', 'Tanga', 'Kesamudram', 'Narayangarh', 'Dapitan', 'Kaithahi', 'Saint-Cyr-sur-Mer', 'Jandaira', 'Bedum', 'Tomatlan', 'Pirapemas', 'Bemahatazana-Belobaka', 'Poblacion', 'El Cua', 'Maldonado', 'Pando', 'Madappalli', 'Cumru', 'Al Mahwit', 'Chake Chake', 'Oria', 'Zimnicea', 'Kivisto', 'Bumahen', 'Wichian Buri', 'Carrascal', 'Pedro Betancourt', 'Hire Megalageri', 'Ternate', 'Saoner', 'Itaituba', 'Vohitrafeno', 'Caluula', 'Muggia', 'Aizenay', 'Khokha', 'Ipokia', 'Venmani Padinjara', 'Siklos', 'Yangshuwa', 'Kanekallu', 'Golbey', 'Gengzhuangqiaocun', 'Gustavsberg', 'Parabiago', 'Ambohibary', 'Valladolid', 'Matamey', 'Bisee', 'Tete', 'Vreden', 'Toribio', 'Fucecchio', 'Zangareddigudem', 'Ceglie Messapico', 'Licinio de Almeida', 'Ad Dabbah', 'Sapian', 'Naranjo', 'Wollaston', 'Celaya', 'Dire', 'Gajiginhalu', 'Mulug', 'Upper Makefield', 'Rodriguez', 'Bocoio', 'Thalwil', 'Carate Brianza', 'Bondada', 'Ayagoz', 'Xiada', 'Vohburg an der Donau', 'Mansala', 'Rafha', 'Anastacio', 'Nove Mesto na Morave', 'Ramechhap', 'Ambinanindrano', 'Leibnitz', 'Alvaraes', 'Baure', 'Sohano', 'Tunja', 'Goriar', 'Msambweni', 'Aibak', 'Penugolanu', 'Castagneto Carducci', 'Ecclesall', 'Dunmore', 'North Londonderry', 'Carnot-Moon', 'Pau Brasil', 'Taka', 'Sihor', 'Terra Nova', 'Choachi', 'Tenri', 'Port Louis', 'Laoag', 'San Martin de la Vega', 'Nguigmi', 'Bendapudi', 'Eminabad', 'Aldenham', 'Vianen', 'Nagasamudram', 'Zemst', 'Khalkhal', 'Mount Clemens', 'Teotlaltzingo', 'Siribala', 'Ligao', 'Dom Pedrito', 'Bushey', 'Namakgale', 'Waiuku', 'Juruaia', 'Sikasso', 'Laplace', 'Gao', 'Casarano', 'Ratangarh', 'Brunssum', 'Castrovillari', 'Tarkeshwar', 'Ruisui', 'Monschau', 'Alat', 'Hetauda', 'Tunglangan', 'Panipat', 'El Chal', 'Bhanumukkala', 'Don Galo', 'Saddiqabad', 'Santo Domingo Petapa', 'Bingol', 'Piraju', 'Camp Verde', 'Chembagaramanpudur', 'Behara', 'Harrison', 'Nagulapadu', 'Knemis Dades', 'Youwangjie', 'Sarab', 'Rustavi', 'Obama', 'Dymka', 'Bijie', 'Tairan Camp', 'Sahoria Subhai', 'Southampton', 'Port Moody', 'Guaratinga', 'Xa Muteba', 'Ahmadnagar', 'Limanowa', 'Dembi Dolo', 'Samrala', 'Aichach', 'Codroipo', 'Nattarampalli', 'Induno Olona', 'Gosen', 'Bevato', 'Elkridge', 'Trissino', 'Siping', 'Bou Hanifia el Hamamat', 'Juayua', 'Tordera', 'Osnabruck', 'Zegoua', 'Ramain', 'Varanasi', 'Miedzyrzecz', 'Bradfordville', 'Arkansas City', 'Sidi Mohamed Lahmar', 'Paravai', 'Baltara', 'Paide', 'Manakara', 'Edassheri', 'Usiacuri', 'Yildirim', 'Keflavik', 'Tsetserleg', 'Paris', 'Gagnoa', 'Xintang', 'Sarni', 'Xico', 'Capitan Bermudez', 'Contenda', 'Garhwa', 'Huayin', 'Basuo', 'Tenjo', 'Cachan', 'Quimperle', 'Misato', 'Pittsgrove', 'Dumont', 'Salford', 'Bontoc', 'Lexington Park', 'Yame', 'Narasaraopet', 'Kesla', 'Ystad', 'Laqraqra', 'Milot', 'Benoy', 'Jalandhar', 'Qahderijan', 'Wake', 'Flowing Wells', 'Santana do Acarau', 'Kurdi', 'Sison', 'Mazamet', 'Montrouis', 'Kalaruch', 'Sarotar', 'Dabuleni', 'Thepaha Raja Ram', 'Glenn Heights', 'Tuxtepec', 'Whickham', 'Targu Frumos', 'Lauriya Nandangarh', 'Taineste', 'Medjez el Bab', 'Sagon', 'Cinisello Balsamo', 'Bartalah', 'Valabhipur', 'Tangdukou', 'Barreirinha', 'Calintaan', 'Binan', 'Rampur', 'Malatya', 'Clemson', 'Wyszkow', 'Carmiano', 'Yeovil', 'Cajobi', 'Yabuki', 'Dorud', 'Marrupa', 'Maitland', 'Jahrom', 'Kurgunta', 'Tonyrefail', 'Virreyes', 'Kalu Khan', 'Al Qurayya', 'Libertyville', 'Kharagpur', 'Woodley', 'Mineola', 'Chalon-sur-Saone', 'Bouhmama', 'Mena', 'San Cataldo', 'Quipungo', 'Alakamisy-Ambohimaha', 'Gloversville', 'Chuimatan', 'Nadugadda', 'Borjomi', 'Sibonga', 'Palanpur', 'Kodinar', 'Asingan', 'Chahchaheh', 'Dobrova', 'Ruzomberok', 'Chhoti Sadri', 'Langzhong', 'Anekal', 'Pontal do Parana', 'Muhammadabad', 'Moreira', 'Civita Castellana', 'Kpalime', 'Mahespur', 'Louny', 'Venkatapuram', 'Khaw Zar Chaung Wa', 'Dahutang', 'Data', 'Cheddra', 'Madridejos', 'DeLand', 'Koewarasan', 'Palaiseau', 'Ropczyce', 'Urakawa', 'Tanque Novo', 'Guider', 'Kiruhura', 'Kasese', 'Murrhardt', 'Andingcun', 'Kuhdasht', 'Freire', 'Singera', 'Panglong', 'Tampa', 'Mirik', 'Hatti Mattur', 'Nakatsugawa', 'Jitwarpur Nizamat', 'Carmo', 'Jocotan', 'Forquilha', 'Vargem Grande do Sul', 'Peniche', 'Xinying', 'Nahulingo', 'Goranboy', 'San Pedro Sula', 'Burr Ridge', 'Leatherhead', 'Belisce', 'Villalonga', 'Santa Anita', 'Mayluu-Suu', 'Skovde', 'Buston', 'Domodossola', 'Avelgem', 'El Malah', 'Azusa', 'Neyriz', 'Coronel Suarez', 'Saquarema', 'Meizhou', 'Apia', 'Jawor', 'Saint-Pol-sur-Mer', 'Heguri', 'Bobangui', 'Montalegre', 'Ben Taieb', 'Pande', 'Horsell', 'Sanjo', '`Aynkawah', 'Sfantu-Gheorghe', 'Escuinapa', 'Talikota', 'Guapiara', 'Point Pleasant', 'Ambohipandrano', 'Khangaon', 'Namlea', 'Kottapalem', 'Sebastiao Laranjeiras', 'Phra Samut Chedi', 'Madanancheri', 'Farnworth', 'Coshocton', 'Garrison', 'Peso da Regua', 'Jaguaruna', 'Katihar', 'West Fargo', 'Melaka', 'Ambohimanambola', 'Pabegou', 'Nossa Senhora das Dores', 'Burladingen', 'Byumba', 'Vallikunnam', 'Mohan Eghu', 'San Julian', 'Miracema do Tocantins', 'Palma', 'Goh', 'DeKalb', 'Azle', 'Mont-de-Marsan', 'Slovenske Konjice', 'Manazary', 'Shende', 'Portland', 'Lamarao', 'Reinach', 'Chrzanow', 'Hendek', 'Iwakura', 'Agua Clara', 'Capim', 'Brasilandia', 'Port Colborne', 'Ilsfeld', 'Cienaga de Oro', 'Faxinal dos Guedes', 'Earlestown', 'Andranomeva', 'Visalia', 'Tazhava', 'Linquan', 'Rampur Jalalpur', 'Cricova', 'Cholula de Rivadabia', 'Apiai', 'Vilandai', 'Castellana Grotte', 'Baturbari', 'Jandiala', 'Rive-de-Gier', 'Feltham', 'Laguna Niguel', 'Kannal', 'Purmerend', 'Succasunna', 'Ejea de los Caballeros', 'Dolisie', 'Vernag', 'Tabount', 'Kirksville', 'Rahuri', 'Nanjai Kilabadi', 'Parigi', 'Dhamsain', 'Bainbridge', 'Zumpango', 'Le Beausset', 'Westphalia', 'Cabinda', 'Bagahi', 'Valmontone', 'Al `Ajami', 'Maromby', 'Maaseik', 'Wuyishan', 'Ulbroka', 'Santiago Juxtlahuaca', 'Ranomena', 'Buadiposo-Buntong', 'Khurai', '`Aqrah', 'Priverno', 'Geldrop', 'Balandougou', 'Legnago', 'Don Carlos', 'Keitumkawn', 'Levice', 'Gulam', 'Ratne', 'Jiayuguan', 'Gidha', 'Juba', 'Ksar Belezma', 'Puerto Baquerizo Moreno', 'Ponta Pora', 'Sidrolandia', 'Achim', 'North Lynnwood', 'Haslett', 'Porto Ferreira', 'Anqing', 'Takahagi', 'Fray Luis A. Beltran', 'Basudebpur', 'Concon', 'Mangochi', 'McKeesport', 'Matheu', 'Sumbha', 'Murajpur', 'Champerico', 'Iki', 'Port-Bouet', 'Santa Luzia', 'Chitaldrug', 'Baliqchi', 'Uthai Thani', 'Az Zubaydiyah', 'Cicero', 'Rampurwa', 'El Salto', 'Duero', 'Littleover', 'Ropazi', 'Andradas', 'Tirur', 'Caramoan', 'San Carlos Alzatate', 'Zuojiawu', 'Karacasu', 'Namioka', 'Brumunddal', 'West Milford', 'Alandatte', 'Dumingag', 'Petrolandia', 'Arachchalur', 'Urussanga', 'Cerrillos', 'Ortega', 'Kanyobagonga', 'Atibaia', 'Eustis', 'Sefaatli', 'Lasam', 'Montalto di Castro', 'Cihuatlan', 'Cirebon', 'Rangewala', 'Tekkalakote', 'Cameron Park', 'Bardaskan', 'Manambidala', 'Bukittinggi', 'Sora', 'Dashouping', 'Escarcega', 'Tezoyuca', 'Karadichittur', 'Roskilde', 'West Caln', "Pereyaslav-Khmel'nyts'kyy", 'Senmanat', 'Formosa', 'Schaumburg', 'Tarnobrzeg', 'Kadaladi', 'Ouled Haddaj', 'Vuyyuru', 'Baruipur', 'Teniente Primero Manuel Irala Fernandez', 'Menaceur', 'Sedeh Lanjan', 'Ciney', 'Amboavory', 'Jale', 'Kalmar', 'Koori', 'Alsager', 'Gulagac', 'Zwedru', 'Kadiyampatti', 'Wilkes-Barre', 'Bremerhaven', 'Motru', 'Bad Tolz', 'Nha Trang', 'Andranomavo', 'Punta Prieta', 'Masmouda', 'Vavatenina', 'Divandarreh', 'Atambua', 'Krosuru', 'Wrecsam', 'Apache Junction', 'Barri', 'Shingucho-shingu', 'Kusaha', 'Sidi Ahmed El Khadir', 'West Springfield', 'Al Wajh', 'Massangena', 'Doda', 'Shutayil', 'Messamena', 'Navalcarnero', 'Bellaire', 'Nichelino', 'Serra Azul', 'Rantabe', 'Padaivedu', 'Monteroni di Lecce', 'Drodro', 'Rees', 'Bulgan', 'Fraijanes', 'Montbrison', 'Kangaroo Flat', 'Urbach', 'Lindon', 'Olen', 'Langgons', 'Velim', 'Marhamat', 'Anamur', 'Milattur', 'Forecariah', 'Quartier Morin', 'Worb', 'Playas', 'Kingsville', 'North Strabane', 'Kolumalapalle', 'Narippatta', 'Yotsukaido', 'Takahashi', 'Dulce Nombre de Maria', 'Reguiba', 'Dapi', 'Elk River', 'Karaagac', 'Duba', 'Rasnov', 'Cabrobo', 'Easthampton', 'Parvatipuram', 'Husainpur', 'Nipomo', 'Chettimangurichchi', 'Bekitro', 'Itapagipe', 'Palestina', "Erval d'Oeste", 'Vasilika', 'Kolappalur', 'Batken', 'Bank', 'Qorako`l Shahri', 'Manor', 'Majanji', 'Al Balyana', 'Niederkassel', 'Dulce Nombre de Jesus', 'Mason City', 'Lavello', 'Zaruma', 'Mignoure', 'Namyslow', 'Hagaranahalli', 'Filippoi', 'Ita', 'Wuzhong', 'Bedfordview', 'Mollet', 'Malipakar', 'Innisfil', 'Cehegin', 'Stone', 'Lake St. Louis', 'Panskura', 'Injibara', 'Festus', 'Cicevac', 'Busra ash Sham', 'Krompachy', 'Mumford', 'Phulmalik', 'Jundiai', 'Annaka', 'Senhora dos Remedios', 'Anthem', 'Sadarak', 'Naga City', 'Xiping', 'Planeta Rica', 'Kilgore', 'Ciudad Sabinas Hidalgo', 'Tsaratanana', 'Puduppalli Kunnam', 'Manilva', 'Colorno', 'Majsperk', 'Pibrac', 'Nova Gorica', 'Sorbolo', 'Halberstadt', 'Magalang', 'Cozumel', 'Chanco', 'Gympie', 'Colwood', 'Vire', 'Troina', 'Oudenbosch', 'Bertinoro', 'El Bazouriye', 'Cabra', 'Alakamisy-Ambohimahazo', 'Milan', 'Sanare', 'Trencin', 'Minas', 'Santa Vitoria do Palmar', 'Saintes', 'San Jose de las Lajas', 'Governador Dix-Sept Rosado', 'Ville-la-Grand', 'Stjordal', 'Kirkwall', 'Aigaleo', 'Uppukkottai', 'Lakeway', 'Cuchi', 'Kamina', 'Rawa Mazowiecka', 'Xinyuan', 'Scarsdale', 'Tosashimizu', 'Akot', 'Yaritagua', 'East Rancho Dominguez', 'Pongoz', 'Ambazoa', 'Liuchuan', 'Higashiura', 'Blackfoot', 'Gniezno', 'Blanchard', 'Zadar', 'Nakaechi', 'Mairena del Aljarafe', 'Massama', 'Mobo', 'Kezmarok', 'La Porte', 'Berching', 'Siegsdorf', 'New Albany', 'Half Way Tree', 'Jharka', 'Zinacantepec', 'Draveil', 'Bek-Abad', 'Fayetteville', 'Oyam', 'Pasil', 'Clamart', 'Mbuzini', 'Ouenza', 'Perdur', 'Soamanandrariny', 'Hanawa', 'Yekambarakuppam', 'Mashiki', 'Thibodaux', 'Nijkerk', 'Hayang', 'Malabon', 'Aparri', 'Bwana Mkubwa', 'Charqueada', 'Kuju', 'Kwamhlanga', 'Alimos', 'Ypane', 'Sahave', 'Marib', 'Shirley', 'Bunbury', 'Talayan', 'Ljungby', 'San Juan de Betulia', 'Laixi', 'La Ferte-Bernard', 'Lagos de Moreno', 'Tangbian', 'Adalar', 'Nyamira', 'Desert Hot Springs', 'Datia', 'Cognac', 'Jaltipan de Morelos', 'Ponsacco', 'Dindori', 'Hlaingbwe', 'Delijan', 'Cirie', 'Une', 'Boldesti-Scaeni', 'Sapeacu', 'Nagdah', 'Douar Ezzerarda', 'Bali', 'Stevenage', 'Saryshaghan', 'Melchor Romero', 'Barbana', 'Zanjan', 'Sennan', 'Ponta Delgada', 'Royse City', 'Wonthaggi', 'Putaendo', 'Kokawa', 'Wloszczowa', 'Warner Robins', 'Kranenburg', 'Tome', 'Usilampatti', 'Puerto Pinasco', 'El Maiten', 'Gurgaon', 'Immenstadt im Allgau', 'Wisla', 'Gannavaram', 'Yingzhong', 'Masho Khel', 'Reitz', 'Belen de Umbria', 'Ripollet', 'Lauaan', 'Beira', 'Ratu', 'Mure', 'Medicine Hat', 'Ixcatepec', 'Bois-des-Filion', 'Nogent-le-Rotrou', 'Djibouti', 'University of California-Santa Barbara', 'Byahatti', 'Bentonville', 'Danau Kandimarg', 'Tevragh Zeina', 'Sattar', 'Tranoroa', 'Mengdong', 'Luz', 'Wuyang', 'Batemans Bay', 'Dinas', 'Itauna', 'Artena', 'Carouge', 'Xylokastro', 'Bahia de Caraquez', 'Marabella', 'Bremerton', 'Ixtlahuacan del Rio', 'Garchitorena', 'Huittinen', 'Fonsorbes', 'Bafia', 'Da Lat', 'Pasian di Prato', 'Sarayonu', 'Chiman', 'Colombo', 'Kirikera', 'Pemmperena', 'Grunwald', 'Savalgi', 'Chambly', 'Puduppatti', 'Himamaylan', 'Lansdowne', 'Douar Mzoura', 'Powell', 'Erfurt', 'Kizhake Chalakudi', "L'Arbaa Nait Irathen", 'Galmi', 'Junagarh', 'Murgap', 'Sallanches', 'Loutete', 'Rosa Zarate', 'Caninde', 'Santa Tecla', 'Skoghall', 'Banswada', 'Cresskill', 'Dabaozi', 'Cortland', 'Ennery', 'Ifanirea', 'Antsaidoha-Bebao', 'Tepic', 'Qiryat Gat', 'Cabeceiras de Basto', 'Pirot', 'Waxahachie', 'De Pere', 'Salsomaggiore Terme', 'Potenza Picena', 'Wright', 'Resadiye', 'Melito di Napoli', 'Toui', 'Guercif', 'Daga', 'Porto Feliz', 'Sinende', 'Goasi', 'Kikube', 'North Tidworth', 'Narona', 'Capitao Eneas', 'Makakilo', 'Billere', 'Tabas', 'Klang', 'Agogo', 'Turnov', 'Fairhaven', 'Muriae', 'Chakai', 'Maniamkulam', 'Seberang Jaya', 'Jaboticatubas', 'Burley', 'Four Square Mile', 'Kirchlinteln', 'El Quetzal', 'Mar del Plata', 'Elattur', 'Mezica', 'Brunn am Gebirge', 'Quetta', 'Morokweng', 'Kitanagoya', 'Manacapuru', 'Towamencin', 'Aragona', 'Regla', 'Caldeirao Grande', 'Musina', 'Jacksonville', 'Azul', 'Winter Park', 'Englefield Green', 'Ambon', 'Lucena', 'Huntington Station', 'Itarema', 'San Francisco Chimalpa', 'Ondokuzmayis', 'Basford', 'Antsaravibe', 'Meilen', 'Tangermunde', 'Wijchen', 'Castilleja de la Cuesta', 'Lanark', 'Usak', 'Wall', 'Mijas', 'Sitionuevo', 'East Lake', 'Mahabo-Mananivo', 'Dhoraji', 'Jalacingo', 'Willstatt', 'Nakhl-e Taqi', 'Opelousas', 'Golo-Djigbe', 'Santiago Nonualco', 'Pulsano', 'Simrol', 'Ihnasya al Madinah', 'Pozzallo', 'Ramayipatti', 'Sihaul', 'Wanze', 'Senduria', 'Ergue-Gaberic', 'Mauldin', 'Sempeter pri Gorici', 'Ammanabrolu', 'Mitrapur', 'Massarosa', 'Binnish', 'La Tour-de-Peilz', 'Lavaltrie', 'Barsbuttel', 'Seberi', "Murang'a", 'Macherla', 'Acharipallam', 'Setana', 'Yichun', 'Lugus', 'Parnamirim', 'Kayes', 'Chinu', 'Tubmanburg', 'Cekme', 'Mehdauli', 'Vaulx-en-Velin', 'Waikabubak', 'Fontibon', 'Omaha', 'Pambujan', 'Kassorola', 'Khonj', 'La Maddalena', 'Fandrandava', 'Chinna Mupparam', 'Nea Smyrni', 'Burgdorf', 'Ituporanga', 'Rayamangalam', 'Albury', 'Paranapanema', 'Smiltene', 'Lohmar', 'Puliyankunnu', 'Tiruvalla', 'Hochberg', 'Minto', 'Antelope', 'Mohanur', 'Wiang Sa', 'Dobrich', 'Ingelheim', 'Shetou', 'Gavle', 'Svedala', 'Borogani', 'Kisoro', 'Ortenberg', 'Uttarpara', 'Rajaldesar', 'Al Badari', 'Masinigudi', 'Gold Coast', 'Indiana', 'Swissvale', "Al Jazirah al Hamra'", 'Santa Maria Capua Vetere', 'Mangala', 'Upper Bicutan', 'Hacilar', 'Woltersdorf', 'Khwazakhela', 'Santa Fe de Antioquia', 'Gammasa', 'Iwashita', 'Bohmte', 'Tondabayashicho', 'Salobrena', 'Unebicho', 'Isaszeg', 'Bloomsburg', 'Vidor', 'Niquero', 'Niepolomice', 'Halacho', 'Khanpur Khairanti', 'Sao Roque', 'Shively', 'Fox Crossing', 'Postmasburg', 'Vicentinopolis', 'Rafael Delgado', 'Fuglafjordhur', 'Bekapaika', 'Rastede', 'Devnya', 'Derecik', 'Nida', 'Odayarpatti', 'Bala', 'Harra', 'Ain Youcef', 'Kenge', 'Sapone', 'Dongtai', 'Ratnapur', 'Giron', 'Kartuzy', 'Lake Villa', 'Kolaccheri', 'Kiridh', 'Hattersheim', 'Daheba', 'Et Taiyiba', 'Ciudad del Plata', 'Ohringen', 'Tegina', 'Ishii', 'Gahmar', 'Lancaster', 'Carmel', 'Firminy', 'Akishima', "Milla'ab", 'Beparasy', 'Esperanza', 'Bignay', 'Spruce Grove', 'Krommenie', 'Hollviken', 'Westminster', 'Shymkent', 'Majhaulia', 'Biscarrosse', 'Kadur Sahib', 'Luce', "Al Ha'ir", 'Teluk Intan', 'Dubove', 'Qiaotouba', 'Qasbat Tadla', 'North Walsham', 'Aspe', 'Dokkum', 'Plaridel', 'Sonsbeck', "Cava de' Tirreni", 'Santa Elena', 'Barka Gaon', 'Asagicinik', 'Dagua', 'Campina Verde', 'Kaitaia', 'Leverkusen', 'Merchtem', 'Kiwoko', 'Samborondon', 'Palm Valley', 'Fengjia', 'Muhos', "'Ain Kerma", 'Sihma', 'Tuminkatti', 'Letychiv', 'Notse', 'Pearsall', 'Jeypore', 'San Antonio Palopo', 'Kingstown', 'Lislique', 'Sannat', 'Monnickendam', 'Kurabalakota', 'Miranorte', 'Rudersberg', 'Kasama', 'Granja', 'Amarwa Khurd', 'Ad Diwaniyah', 'Evington', 'Warman', 'Kuttampuzha', 'Mons', 'Korsholm', 'Gourma Rharous', 'General Alvear', 'Matelica', 'Bongabon', 'Irituia', 'Son en Breugel', 'Chauny', 'Bayeux', 'Unicov', 'Shaler', 'Aesch', 'Rahata', 'Porangaba', 'Hattingen', 'Ocampo', 'Pirna', 'Perunad', 'Umm Badr', 'Ramganj Mandi', 'Saidia', 'Mutata', 'Manatuto', 'Lambayeque', 'Grasse', 'Centreville', 'Bandrele', 'Glogow', 'Canagatan', 'Trisshileri', 'Pavona', 'Diamniadio', 'Harsola', 'Gulariya', 'Dubendorf', 'San Prisco', 'Maissade', 'Agios Nikolaos', 'Dubak', 'Nalut', 'Bhadrachalam', 'Upper Moreland', 'Geyve', 'El Tejar', 'Castilla', 'Amsin', 'Meulebeke', 'San Francisco El Alto', 'Presidente Getulio', 'Pakwach', 'Mayfield', 'Kalawana', 'Evosmos', 'Killarney', 'Zhubei', 'El Ksiba', 'Sierpc', 'Lambari', 'Fakfak', 'Dawlish', 'Kambaliyampatti', 'Dianke', 'Gaoya', 'Almenara', 'Molina de Segura', 'Zighout Youcef', 'San Jose Poaquil', 'Louvres', 'Los Altos', 'Newton Aycliffe', 'Chesterfield', 'Korhogo', 'Srbac', 'Warstein', 'Talladega', 'Tigaon', 'Pozos', 'Raja Pakar', 'Kuttiyeri', 'Tres Cachoeiras', 'Luena', 'Betatao', 'Sredisce ob Dravi', 'West Monroe', 'Sonabedha', 'Rhenen', 'Talainayar Agraharam', 'Serido', 'Campobasso', 'Zabkowice Slaskie', 'Harji', 'Ubata', '`Ajab Shir', 'Tatoufet', 'Hailun', 'Kiratot', 'El Idrissia', 'Liannong', 'Nedelisce', 'Pirangi', 'Beringen', 'Leopoldo de Bulhoes', 'Northolt', 'Quesada', 'Peumo', 'Zitiste', 'Kerman', 'Cigli', 'Gaibandha', 'Tostado', 'Ilog', 'Mohana', 'Erbach', 'Mahasoabe', 'Latifpur', 'Rajim', 'Tirhassaline', 'Rathenow', 'Strendur', 'Grigny', 'Skoczow', 'Belwa', 'Kasavanampatti', 'Kalinagar', 'Navappatti', 'Megara', 'Lolotique', 'Saint-Junien', 'Pijino del Carmen', 'Guelph/Eramosa', 'Solok', 'Ciudad del Este', 'Khagaria', 'Kinogitan', 'La Garenne-Colombes', 'Manaquiri', 'Telgte', 'Igaracu do Tiete', 'Mantsala', 'Bogatic', 'St. Cloud', 'Huntertown', 'Smithville', 'Nueva Valencia', 'Piru', 'Yukarikaraman', 'Merzifon', 'Cosham', 'Kharian', 'Mantes-la-Ville', 'Duanzhuang', 'Kashipur', 'Seaside', 'Orchha', 'Djemmorah', 'Kocarli', 'Mojkovac', 'Nigel', 'Kursunlu', 'Soyagaon', 'Pamuru', 'Bassila', 'Di An', 'Vaureal', 'El Dificil', 'Troyan', 'Toca', 'Bet Shemesh', 'Kayalpattanam', 'Bernards', 'Zomba', 'Okabecho-okabe', 'Nedroma', 'Kostel', 'Mosbach', 'Miyauchi', 'Jhitkahiya', 'Rhymney', 'Banisilan', 'Medina', 'Sayarpuram', 'Landsberg', 'Sevastopol', 'Ceara-Mirim', 'Assisi', 'Goudomp', "Debark'", 'Melrose Park', 'Le Rheu', 'Port Douglas', 'San Andres Tuxtla', 'Princetown', 'Seybaplaya', 'Riegelsberg', 'Cagayan de Oro', 'Lommel', 'Wapi', 'Feira de Santana', 'Obertshausen', 'Imarui', 'Soavinandriana', 'Strathmore', 'Kanke', 'Arraial do Cabo', 'Matsudo', 'Pandhana', 'Sao Miguel do Guapore', 'Medea', 'Rheinfelden (Baden)', 'Taglio', 'Korgan', 'Pantabangan', 'Bhattiprolu', 'Lingampet', 'Puerto Pimentel', 'Velampatti', 'Vilsbiburg', 'Mianpur Dubauli', 'Panguipulli', 'Hafr al Batin', 'Brasnorte', 'Vignate', 'Frederikshavn', 'Alpena', 'Pagbilao', 'Villers-les-Nancy', 'Chagalamarri', 'Dispur', 'Menemen', 'Baqershahr', 'Jandola', 'Oborniki Slaskie', 'Bandlaguda', 'Goulburn', 'Safotulafai', 'Dreieich', 'Galatsi', 'Meixedo', 'Banovici', 'San Martino Buon Albergo', 'Recani', 'Prudnik', 'Sao Miguel do Araguaia', 'Sunkarevu', 'New Castle', 'Ban Thung Tam Sao', 'Nariman', 'Pata Ellamilli', 'Capitolio', 'Dobele', 'Leicester', 'Giovinazzo', 'Ribeiro do Amparo', 'Pailitas', 'San Pedro Ayampuc', 'Stalowa Wola', 'Khargone', 'Lustenau', 'Sale', 'Narhan', 'Rubiataba', 'Arceburgo', 'Hokitika', 'Vila Frescainha', 'Lomme', 'Lazarevac', 'Tikrit', 'Sudak', 'Phrae', 'Ramapo', 'Ninomiya', 'Hrazdan', 'Taguasco', 'Serravalle', 'Bushtyno', 'Gobo', 'Kuttalam', 'Awans', 'Chancay', 'Jocotenango', 'Woodhouse', 'KwaDukuza', 'Wildau', 'Batobato', 'Crest Hill', 'Balvadi', 'Karkudalpatti', 'Santa Ines', 'Guacari', 'Mantena', 'Ladera Ranch', 'Sulechow', 'Matruh', 'Isla de Maipo', 'Gothva', 'Frutillar', 'Raxruha', 'Machesney Park', 'Wingene', 'Lejanias', 'Le Chesnay', 'Bissendorf', 'Abadiania', 'Ometepec', 'Enna', 'Petarukan', 'Boone', 'Awbari', 'Annaram', 'Boki-Were', 'Al Mijlad', 'Raiparthi', 'Aougrout', 'Ranchi', 'Onojo', 'Saumalkol', 'Atri', 'Kara-Kol', 'Lauria Inferiore', 'San Felice Circeo', 'Wooburn', 'Agualva', 'Fotadrevo', 'Herenfa', 'Berlin', 'Astolfo Dutra', 'Gahini', 'Ambanja', 'Hargeysa', 'Santa Cruz do Capibaribe', 'Ertis', 'Esperantina', 'Biloxi', 'Hang Tram', 'Diinsoor', 'Asfour', 'Pula', 'Cacaopera', 'Noida', 'Pittsfield', 'Le Muy', 'Barletta', 'Buraydah', 'Ingolstadt', 'Chautham', 'Northwest Harborcreek', 'Oldsmar', 'Neder-Over-Heembeek', 'Kamisu', 'Puerto Piritu', 'Ambatolahy', 'Daru', 'New Brighton', 'Queluz', 'Epernay', 'Bacalar', 'Ban Nikhom Phatthana', 'Valbonne', 'Yondo', 'Khowrmuj', 'Jingdezhen', 'Campana', 'Kuli', 'Ogimachi', 'Biskupiec', 'Framingham', 'Lomma', 'Papraur', 'Mukumbura', 'Kawayan', 'Ncora', 'Zhaoling', 'Boscombe', 'Abbeville', 'Reisterstown', 'Tongeren', 'Joal-Fadiout', 'Rosario de Lerma', 'Velpuru', 'Douarnenez', 'Sofia', 'Itayanagi', 'Ekinozu', 'Bonanza', 'Mohlanapeng', 'Cudworth', 'Barga', 'Okinawa', 'Owase', 'Ainan', 'Nyunzu', 'Yokohama', 'Aiud', 'Tachov', 'Khagam', 'Anosiarivo', 'Khust', 'Apolda', 'Bogra', 'Djinet', 'Holiday City-Berkeley', 'Carmo do Rio Verde', 'Kirchseeon', 'Victorica', 'Lahar', 'Veranopolis', 'Bechar', 'Noceto', 'Nossa Senhora da Gloria', 'Bhaluhar', 'Hofu', 'Sozopol', 'Kyabe', 'Sosua', 'Benedikt', 'Bad Neuenahr-Ahrweiler', 'Narendrapatnam', 'Gurramkonda', 'Pinhel', 'Shiling', 'Nadol', 'Paredes', 'Adjido', 'Trebnje', 'Yelpur', 'Sirpur', 'Barvala', 'Angol', 'Birpur Barapatti Pindraun', 'Hendijan', 'Parakou', 'Mangaratiba', 'Schwabisch Hall', 'Sarqan', 'Aleshtar', 'Warin Chamrap', 'West Des Moines', 'Cutervo', 'Valiyakumaramangalam', 'Kottoppadam', 'Sukth', 'Sambhal', 'Velliyod', 'Waldshut-Tiengen', 'Lakhnaur', 'Barrocas', 'Djamaa', 'Matale', 'Ban Phai', 'Sao Paulo de Olivenca', 'Harar', 'Barbasa', 'Kukatpalli', 'Aisho', 'Raynham', 'Harborcreek', 'Bafra', 'Gibsons', 'Cluj-Napoca', 'Hoeselt', 'Acopiara', 'Filipstad', 'Camardi', 'Bunol', 'Rania', "Su'ao", 'Konen Agrahar', 'Sidi Qacem', 'Chapada dos Guimaraes', 'Bad Berleburg', 'Thabazimbi', 'Bradley', 'Masif Sarsink', 'Anse Rouge', 'Galten', 'Oshoba', "Alta Floresta D'Oeste", 'Ranaghat', 'Patchur', 'Rameswaram', 'Fonadhoo', 'Kuraymah', 'Bosanska Krupa', 'Cajica', 'Sabou', 'Fundacion', 'Koba', 'Rudra Nagar', 'Tskhinvali', 'Itarantim', 'Mirzapur', 'Mocajuba', 'Berazategui', 'Penjamo', 'Doctor Mora', 'Boleslawiec', 'Angat', 'Siraha', 'Rock Ferry', 'Salt', 'Az Zintan', 'Livno', 'Winterville', 'Daryabad', 'Newton Mearns', 'Milovice', 'Nanqiao', 'Nord', 'Doba', 'Guayacanes', 'Didcot', 'Skelmersdale', 'Ivoamba', 'Roessleville', 'Sadovoye', 'Magugpo Poblacion', 'Toritama', 'Mulakaledu', 'Baia da Traicao', 'Pissila', 'Echuca', 'Les Cayes', 'Gajwel', 'San Jose de Jachal', 'Swedru', 'Ribeirao', 'Zhetibay', 'Monatele', 'Gibraleon', 'Kufstein', 'Chak Five Hundred Seventy-five', 'Sterling', 'Khattan', 'Kuse', 'Euless', 'Baggao', 'Al Juwayyidah', 'Ilobasco', 'Assi Bou Nif', 'Ikniwn', 'Gudja', 'Monteiro', 'Bryn Mawr-Skyway', 'Corona de Tucson', 'Sonson', 'Clevedon', 'Palestrina', 'Uberherrn', 'Civitella in Val di Chiana', 'Balya', 'Vinanivao', 'Alexandria', 'Merida', 'Kursumlija', 'Delgado', 'Menton', 'Dharampur Bande', 'Goiana', 'Matican', 'Carballo', 'Itapebi', 'Minsk Mazowiecki', 'Bhadaur', 'Saidu Sharif', 'Lalla Mimouna', 'Mian Sahib', 'Borzna', 'Pakenham', 'Al Kut', 'Guemoukouraba', 'Dhandhuka', 'Crawfordsville', 'Arak', 'Acahay', '`Abasan al Kabirah', 'Farashband', 'San Tomas', 'Calasparra', 'Ambahoabe', 'Bayindir', 'Merauke', 'Ayagawa', 'Ndjamba', 'Lakewood', 'Krishnanagar', 'Kyustendil', 'Porto Franco', 'Anoka', 'Tulsa', 'De Haan', 'Caluco', 'Dumraon', 'Fauske', 'Jogiara', 'Rambha', 'Campina Grande do Sul', 'Pontevedra', 'Brejolandia', 'Sundarpur', 'Waltikon', 'Putten', 'Jardim', 'Siemianowice Slaskie', 'Buchen in Odenwald', 'Ningde', 'Tonsberg', 'Shaliuhe', 'Siracusa', 'Galleh Dar', 'Ponneri', 'Gifhorn', 'Avarua', 'Longzhou', 'Wenxicun', 'Pontardulais', 'Alipur', 'Doetinchem', 'Maravatio de Ocampo', 'Elma', 'Paravur Tekkumbhagam', 'Oak Hills Place', 'Dorou', 'Galapagar', 'Prata', 'Bytca', 'Kanmaki', 'Ban Pong Tao', 'Antsohihy', 'Ban Ang Sila', 'Gorisnica', 'Abrera', 'Visaginas', 'Suoluntun', 'Le Perreux-Sur-Marne', 'Durbat', 'Ourinhos', 'Homa Bay', 'Kolkwitz', 'Gornji Grad', 'Soroca', 'Licata', 'Zhangshanying', 'Shuangcheng', 'Shariff Aguak', 'Redcliff', 'Selargius', 'Zapotlan del Rey', 'Uetersen', 'Rochedale', 'Mongeri', 'Tonala', 'Lloydminster', 'Ribeira Brava', 'Xingcheng', 'Baku', 'Ban Dung', 'Klatovy', 'Empangeni', 'Chundale', 'Sorontona', 'Dieppe', 'Grandville', 'Udawantnagar', 'Qaqortoq', 'Bischheim', 'Urucui', 'Uychi', 'Kalispell', 'Amman', 'Esplugas de Llobregat', 'Lohfelden', 'Nacimiento', 'Phongsali', 'Ngerulmud', 'Mulsen', 'Yby Yau', 'Balassagyarmat', 'Malingaon', 'Xionglin', 'Glenvar Heights', 'Miho', 'North Liberty', 'Dongshan', 'Bauang', 'Beguedo', 'Cruz Grande', 'Douar Trougout', 'Primrose', 'Merrimack', 'Raigarh', 'Potomac', 'Voorhees', 'Dalippur', 'Castle Pines', 'La Montanita', 'Sabang', 'Vintar', 'Mikumi', 'Sydney', 'Tarhjicht', 'Nizao', 'Cahors', 'Elland', 'Diplahan', 'Kayankulam', 'Custodia', 'Felipe Carrillo Puerto', 'Paraguacu', 'Lohafary', 'Austintown', 'Huixquilucan', 'Pulpi', 'Oakleaf Plantation', 'Katsina', 'Florennes', 'Bad Honnef am Rhein', 'Brenes', 'West Linn', 'Xanthi', 'Saint-Saulve', 'Sierra Bullones', 'Dravograd', 'Bizen', 'Kurman', 'Kuzma', 'Kolagallu', 'Bordj Ghdir', 'Jogeva', "Sofiyivs'ka Borshchahivka", 'Pipra', 'Lubeck', 'Oppegard', 'Kuopio', 'Tuscumbia', 'Kasaji', 'Fort Atkinson', 'Tamar', 'Middelfart', 'Grumo Nevano', 'Downey', 'Singhanwala', 'Kawasaki', 'Opglabbeek', 'Maun', 'Bosel', 'Benaulim', 'Smethwick', 'Maromandia', 'Hawaiian Gardens', 'Jantho', 'Malangam', 'Rock Hill', 'Livingstone', 'Jaranwala', 'Krong Kep', 'West Covina', 'Belzig', 'Kitagata', 'Fountain', 'San Lorenzo de Descardazar', 'Bati', 'Quzanli', 'Hosuru', 'Poco Verde', 'Simeulu', 'Yangquan', 'Shahzadpur', 'Lopik', 'Tarogong', 'Iraiyur', 'Freiberg', 'Corinto', 'Ukiha', 'Alindao', 'Ambohimahavelona', 'Sosan', 'Bhagabanpur', 'Madamba', 'Liubymivka', 'Hassan', 'Sham Shui Po', 'Barbalha', 'Wonju', 'Taftanaz', 'Waipio', 'Guidan Roumdji', 'Vohringen', 'Crusinallo', 'Nagareyama', 'Geel', 'Takahama', 'Czarna Bialostocka', 'Matthews', 'Fleury-Merogis', 'Tall `Aran', 'Kamikawa', 'Rose Belle', 'Anorombato', 'Wolfsberg', 'Montreal', 'Macrohon', 'Miribel', 'Cobanlar', 'Gelsenkirchen', 'Noblesville', 'Sabaur', 'Aizkraukle', 'El Bayadh', 'Bir Jdid', 'Gunjur', 'Nauta', 'Browns Mills', 'Shawnee', 'Bredasdorp', 'Camas', 'Malebennur', 'Ninh Binh', 'Alto do Rodrigues', 'Wilton Manors', 'Fort Hunt', 'Usia', 'Lorgues', 'Sanrha', 'Bluffton', 'Kendallville', 'Preddvor', 'Xovos', 'Staoueli', 'James Island', 'Pampa del Infierno', 'Lucaya', 'Chino Valley', 'Fort Leonard Wood', 'Mirpur Bhtoro', 'Ixtapa Zihuatanejo', 'Albemarle', 'Bekkaria', 'Payatas', 'Vitanje', 'Koila Dewa', 'Zhaoqing', 'Sabac', 'Kakonko', 'Hojambaz', 'Voinjama', 'Xiaba', 'Ramasingavaram', 'Of', 'Champoton', 'Burntwood', 'Viernheim', 'Estoril', 'Aflou', 'Ampanefena', 'Kamiamakusa', 'Juan L. Lacaze', 'Motkur', 'My Luong', 'Vernouillet', 'Balatan', 'Samaipata', 'Alwa Tirunagari', 'Saku', 'Kuilsrivier', 'Lynn Haven', 'Maisach', 'Bingerville', 'Alicia', 'Riversdale', 'Bishunpur', 'Tepoztlan', 'Vishakhapatnam', 'Gameleira', 'Nyanza', 'Bad Frankenhausen', 'Huasca de Ocampo', 'Kasongan', 'Tortoreto', 'Moschato', 'Requena', 'Kelowna', 'Nyahanga', 'Pohadi', 'Kalibo', 'Round Rock', 'Pequannock', 'Haifa', 'Ezpeleta', 'Boosaaso', 'Oberschleissheim', 'Seesen', 'Alexandreia', 'Salihli', 'Mandali', 'Xoxocotla', 'Jakkampalaiyam', 'Bradford West Gwillimbury', 'West Perth', 'Jaszbereny', 'Vukovar', 'Wittstock', 'Dartmouth', 'Zacatecas', 'Oswego', "Debre Werk'", 'Annemasse', 'Juripiranga', 'Tultitlan de Mariano Escobedo', 'Coachella', 'Zagubica', 'Altepexi', 'Balao', 'Velappadi', 'Pamukkale', 'West Vancouver', 'Encamp', 'Gudibanda', 'Bensenville', 'Augusta', 'Natitingou', 'Ganyesa', 'Jiujiang', 'Bad Schwartau', 'Kings Norton', 'Dharampuri', 'Huanta', 'Iganga', 'Buckeburg', 'Perryville', 'Bingham', 'Severn', 'As Sulayyil', 'Les Coteaux', 'Jinmingsi', 'Mel Nariyappanur', 'Timimoun', 'Nato', 'Unisan', 'Danao', 'Mount Hagen', 'Fiesole', 'Concepcion Las Minas', 'Zagorje', 'Eilenburg', 'Vardenik', 'Ribeirao Branco', 'Bismil', 'Lichinga', 'Reinosa', 'Paray-le-Monial', 'Villamediana de Iregua', 'Madisonville', 'Cheam', 'Qabatiyah', 'Bagnacavallo', 'Shankou', 'San Giuseppe Vesuviano', 'Penaflor', 'Amatenango del Valle', 'Vanderbijlpark', 'Coelemu', 'Labiod Medjadja', 'Olkusz', 'Shefar`am', 'Ulan Hua', 'Sirka', 'Caceres', 'Gurh', 'Oildale', 'Colonial Heights', 'eMuziwezinto', 'Uster', 'Pampur', 'Martinsville', 'Suttamalli', 'Go Cong', 'San Miguel de Papasquiaro', 'Aliabad', 'Huaura', 'Denderleeuw', 'Kremiss', 'Kattukkottai', 'Sakaka', 'Khuran Milik', "Al Ma`alla'", 'Belluno', 'Liptovsky Mikulas', 'Baghra', 'Touboro', 'Science City of Munoz', 'Trzebinia', 'Filandia', 'Myjava', 'Ramachandrapuran', 'Agdangan', 'Bourzanga', 'Barstow', 'Annigeri', 'Taio', 'Deokali', 'Lake Country', 'Taizhou', 'Adwick le Street', 'Quilon', 'Partesh', 'Cwmbran', 'Vaerlose', 'San Calixto', 'Oued Sly', 'Lomazzo', 'Guzelbahce', 'Fishkill', 'Annapolis', 'Washington Terrace', 'Keansburg', 'Xintangcun', 'Eastwood', 'Sinmperekou', 'Long Hill', 'Roswell', 'Budakeszi', 'Lincolnia', 'Flagstaff', 'Eynesil', 'Ban Wiang Ka Long', 'Groveton', 'Des Peres', 'Namacunde', 'Zlotow', 'Chittoor', 'Torre del Greco', 'Morrinhos', 'Upper Chichester', 'Marousi', 'Calimete', 'Oytal', 'Sao Joao dos Angolares', 'Anenii Noi', 'Perry', 'Ait Melloul', 'Kitchener', 'Amboronabo', 'Burtonwood', 'Croydon', 'San Celoni', 'Suhaj', 'Hod HaSharon', 'Kitenkela', 'Alagappapuram', 'Ramat HaSharon', 'Tortona', 'Rota', 'Innisfail', 'Cuyahoga Falls', 'Guadix', 'Wil', 'Vemuladivi', 'Taungup', 'Itaocara', 'San Pedro Mixtepec', 'Konce', 'Puerto Guzman', 'Mahuwa Singhrai', 'North Merritt Island', 'Most', 'Lahstedt', 'Westland', 'Liuma', 'Charagua', 'Krasnogorskiy', 'Pakarya Harsidhi', 'Beni Oulid', 'Daqing', 'Ecorse', 'Pineto', 'Olbernhau', 'Dorfen', 'Madeira', 'Sassnitz', 'Susegana', 'Suamico', 'Spring', 'Milazzo', 'Erada', 'Obernburg am Main', 'Oak Harbor', 'Marokarima', 'Malangawa', 'McAllen', 'Merrill', 'Pe de Serra', 'Bilohorodka', 'Dhakaich', 'Malhador', 'Wenatchee', 'Almere', 'Benguela', 'Pisek', 'Matam', 'Sidi Bouzid', 'Chhota Udepur', 'Tehran', 'Panjakent', 'Itapage', 'Quarteira', "Mi'eso", 'Saverne', 'Panchimalco', 'Parambil', 'San Miguel de Salcedo', 'Sanwas', 'Sidhap Kalan', 'Umraniye', 'Falavarjan', 'Gaalkacyo', 'La Ferte-sous-Jouarre', 'Schwalbach', 'La Junta', 'Timbuktu', 'Jessore', 'Tomisato', 'Parachinar', 'Myslowice', 'Beelitz', 'Minehead', 'Samadiala', 'Mankera', 'San Pedro Sacatepequez', 'Gaithersburg', 'Pongnam', 'Chambery', 'Wareham', 'Catu', 'Gangania', 'Shingu', 'Sawahlunto', 'Aydincik', 'Somvarpet', 'Beshariq', 'Hongjiang', 'Nallamadu', 'Hengelo', 'Sidi Tabet', 'Pucioasa', 'Salvaterra', 'Zhongbai', 'Pital', 'Acu', 'La Ligua', 'Kalugumalai', 'Raharpur', 'La Grande', 'Sint-Lievens-Houtem', 'Ban Tha Pho', 'Shangluhu', 'Ciudad Madero', 'Matias Barbosa', 'Yara', 'Lamitan', 'Port Laoise', 'Adiyaman', 'Jaguey Grande', 'Douai', 'Tecuci', 'Tifni', 'Changsha', 'Acatzingo', 'Eilat', 'South Ogden', 'Talsi', 'Dzuunmod', 'Kafr al Kurdi', 'Cochrane', 'Xigaze', 'Terra Santa', 'Na Klang', 'Parsa', 'Side', 'Noshiromachi', 'Tepehuacan de Guerrero', 'Makri', 'Bernex', 'Magway', 'Modugno', 'East Gwillimbury', 'Dornakal', 'The Crossings', 'Gazi', 'Dengjiazhuang', 'Betroka', 'Manno', 'Hausjarvi', 'Menifee', 'Qobustan', 'Ban Bang Pu Mai', 'Macau', 'Lithgow', 'Kissa', 'Kenadsa', 'Mosul', 'Letterkenny', 'Conda', 'Poonamallee', 'Huruta', 'Chennevieres-sur-Marne', 'Camiri', 'Cazanga', 'East Bakersfield', 'Baiji', 'Garsfontein', 'Bad Iburg', 'Glen Carbon', 'Arvin', 'Sobhapur', 'Talas', 'Aalborg', 'Bagnara Calabra', 'Pokhram', 'Aragoiania', 'Aravankara', 'East Nottingham', 'Ramjibanpur', 'Kilmangalam', 'Talisay', 'Krusevac', 'Baranain', 'Gauli Palasiya', 'Sieverne', 'Chakradharpur', 'Ipil', 'Xiamen', 'Henichesk', 'Wajir', 'Polonuevo', 'Las Piedras', 'Vysokyi', 'Barra do Pirai', 'Malancha', 'Vendome', 'Kisarazu', 'Ambohidanerana', 'Villareal', 'San', 'Ipele', 'Bologna', 'Berwyn', 'Kadambanad', 'Diest', 'Tuy Hoa', 'Neuilly-sur-Seine', 'Kiryas Joel', 'Takoma Park', 'Suwa', 'Heiloo', 'Chorbogh', 'Teror', 'Passo do Sertao', 'Akim Swedru', 'Bucheon', 'Upper', 'Kobuleti', 'Adesar', 'Hugli', 'El Khroub', 'Mwaya', 'Atome-Avegame', 'Dhahran', 'Ratnapura', 'Roncq', 'Ambatomena', 'Ichihara', 'Waiblingen', 'Anapoima', 'Tizi Nisly', 'Schiffdorf', 'Merrick', 'Jesenik', 'Jnane Bouih', 'Victoria Falls', 'Genhe', 'Charne', 'Bicas', 'Yandrapalle', 'Onklou', 'Arezzo', 'Taung', 'Paphos', 'Otacilio Costa', 'Ichora', 'Urlaha', 'Espelkamp', 'Birao', 'Lora del Rio', 'Altavilla Milicia', 'Anchorage', 'Pendik', 'Bugojno', 'Kadavur', 'Zahirabad', 'Oschersleben', 'Guerrero Negro', 'Posadas', 'Tabligbo', 'Bermeo', 'Coquimatlan', 'Farg`ona', 'Resita', 'Syurte', 'Viacha', 'Tsing Yi Town', 'Pultusk', 'Patratu', 'Satbayev', 'Bonga', 'San Salvador', 'Szentendre', 'Yvetot', 'Bilpura', 'Kabugao', 'Haskovo', 'Ingleside', 'Wapakoneta', 'Alhendin', 'Mandialaza', 'Kharsawan', 'Chanda', 'Maulavi Bazar', 'Rautara', 'Ljig', 'Iona', 'Sakaiminato', 'North Myrtle Beach', 'Peralillo', 'Santa Flavia', 'Ban Thung Khao Phuang', 'Mandalavadi', 'Bulanik', 'Chong-Aryk', 'Jayamkondacholapuram', 'Havran', 'Belobaka', 'Fujisawacho-niinuma', 'Puyallup', 'Anderanboukan', 'San Lazaro', 'Metpalli', 'Cerkezkoy', 'Attleborough', 'Chunakara Vadakku', 'Eqlid', 'Rockhampton', 'Onalaska', 'Hani i Elezit', 'Ituzaingo', 'Teculutan', 'Bajwara', 'Loran', 'Guinayangan', 'Wunstorf', 'Sheopur', 'Gol Tappeh', 'Casselberry', 'Jurema', 'Chimay', 'Horasan', 'Sankt Wendel', 'Eksambe', 'Francisco Morato', 'Panfilovka', 'Ensley', 'Kudavasal', 'Palpala', 'Itapecerica', 'Drancy', 'Monaghan', 'Beamsville', 'Crato', 'Keila', 'Riedlingen', 'Bucine', 'Vadippatti', 'Segou', 'Bag`dod', 'Vellanad', 'Conceicao do Castelo', 'Alenquer', 'Kalgi', 'Somers Point', 'McMinnville', 'Erlangen', 'Dabeiba', 'Selkirk', 'Toffo', 'Aba', 'Landi Kotal', 'Duyen Hai', 'Ostermundigen', 'Doutou', 'Emmeloord', 'Ainsdale', 'Isla Mujeres', 'Bosobolo', 'Forst (Lausitz)', 'Swarzedz', 'Guttenberg', 'Gibsonton', 'Moa', 'Nattarasankottai', 'Itaberai', 'Teghra English', 'Bellingham', 'Lafayette', 'Fort Wayne', 'Odranci', 'Hannut', 'Ashburn', 'Malate', 'Jardin', 'Clarington', 'Mandiakui', 'Merrydale', 'Uirauna', 'Panvel', 'Chaguanas', 'Basaha', 'Susanville', 'Kharik', 'Grandview', 'Mallapalli', 'Ban Krang', 'Camaiore', 'Gross-Zimmern', 'Vehkalahti', 'Merchweiler', 'Mohnesee', 'Beach Park', 'Yeghvard', 'Beckingen', 'Monte Dourado', 'Gori', 'Striano', 'Barueri', 'Tinogasta', 'Santa Isabel do Rio Negro', 'Kriftel', 'Shiroishi', 'Uaua', 'Pinamalayan', 'Sitalpur', 'Las Matas de Santa Cruz', 'Longchang', 'Causeni', 'Lajinha', 'Sabbashahr', 'Itacoatiara', 'Mashan', 'Trondheim', 'Walsall Wood', 'Lompoc', 'Ibicuitinga', 'Machang', 'Babai', 'Kobe', 'Imsida', 'Florida Ridge', 'Antanambao Mahatsara', 'Gudivada', 'Apopka', 'Bissau', 'Itaete', 'Shuiding', 'Bratunac', 'Beaconsfield', 'Attnang-Puchheim', 'Mennzel Bou Zelfa', 'Dourados', 'Obukhiv', 'Lengede', 'Riberalta', 'Cafayate', 'Bourem Inali', 'Carapicuiba', 'Orangevale', 'La Homa', 'Jaladurgam', 'Gilarchat', 'Reyhanli', 'Durango', 'Carros', 'Hima', 'Panthersville', 'Arab', 'Parasia', 'Graca', 'Metekora', 'Harunabad', 'Thonon-les-Bains', 'Kodikulam', 'Sarai Jattan', 'Anderson Creek', 'Gazulapalle', 'Warrenton', 'Qiman al `Arus', 'Vaux-le-Penil', 'Wuyi', 'Calafell', 'General Carneiro', 'Sylvan Lake', 'Onslow', 'Sidi Amrane', 'Heusden', 'Yushan', 'Kodakkal', 'Dodvad', 'Chaoshan', 'San Jose de Urquico', 'Rosmalen', 'Sumilao', 'Bharno', 'Caversham', 'Union Choco', 'Annappes', 'Angostura', 'Nandasmo', 'Ielmo Marinho', 'Overland Park', 'Umrapur', 'Pirai do Sul', 'Kumlu', 'Kiyosu', 'Paragaticherla', 'Maayon', 'Bardejov', 'Masaka', 'Maoming', 'Santa Josefa', 'Itarsi', 'Prilep', 'Pangil', 'Kottapadi', 'Central Falls', 'Navarre', 'Jatai', 'Antsoantany', 'Tiszavasvari', 'Philadelphia', 'Sundsvall', 'Teapa', 'Sansanne-Mango', 'Itanhomi', 'Upanema', 'Longbridge', 'Bryan', 'Nueva Granada', 'Malial', 'Basud', 'Kurichchi', 'Pichidegua', 'Amarante do Maranhao', 'Kankol', 'Tinajdad', 'Amwa Majhar', 'Pinetown', 'Cherry Hill Mall', 'Nangloi Jat', 'Teopisca', 'Bintuni', 'Vengavasal', 'Westmount', 'Garmsar', 'Sao Lourenco do Sul', 'Duncan', 'Kongarapalli', 'Taounate', 'Itapipoca', 'Yungay', 'Matalom', 'Cartavio', 'Bind', 'Marck', 'Galvarino', 'Midvale', 'Bangkok', 'Tausa', 'Gaoyou', 'Balingen', 'Wambrechies', 'Pomerode', 'Dolbeau', 'Miyar', 'Mahalapye', 'Mandritsara', 'Petrovske', 'Mansa Konko', 'Achern', 'Malvern', 'Veurne', 'Tarauaca', 'Sahna', 'Patiram', 'Nandurbar', 'Nalwar', 'Bahawalnagar', 'Douar Ouled Ayad', 'Guaitarilla', 'Campo Mourao', 'Sartana', 'Yaojiazhuangcun', 'Ma`raba', 'Santo Tome', 'Sapulpa', 'Iasi', 'Starkville', 'Monte Cristi', 'Youfangcun', 'Vestal', 'Shashemene', 'Cambre', 'Catonsville', 'Honow', 'Efkarpia', 'West Caldwell', 'Lianga', 'Losser', 'La Peche', 'Kolea', 'Avrig', 'Mahatsinjony', 'Sukhodilsk', 'Le Bourget', 'Sao Geraldo do Araguaia', 'Palos Verdes Estates', 'Millville', 'Sao Miguel das Matas', 'Saarbrucken', 'Tres de Maio', 'Sortobe', 'Dhulia', 'Pilate', 'Itai', 'Groot-Brakrivier', 'Lemsid', 'Cimarron Hills', 'Boppard', 'Shouguang', 'Ares', 'Aioun', 'Catmon', 'Kizilpinar', "Ta' Xbiex", 'Zuzemberk', 'Vicksburg', 'Koppies', 'Karaj', 'Cherrapunji', 'Qasr-e Shirin', 'El Penol', 'Bacuri', 'Rozenburg', 'Phusro', 'Howard', 'Masquefa', 'Taisar', 'Tirebolu', 'Wallenhorst', 'Pa', 'Baghpat', 'Woodburn', 'Tonawanda', 'Mparo', 'Douar Lamrabih', 'Avondale', 'Arenys de Mar', 'Anguillara Sabazia', 'Alblasserdam', 'Otukpo', 'Chandera', 'Rubino', 'Boskoop', 'Boujad', 'Bijai', 'Teotihuacan', 'Choba', 'Swellendam', 'Wood Dale', 'Motta di Livenza', 'Modakkurichchi', 'Rajahmundry', 'Rio Formoso', 'Shantipur', 'Wolvega', 'Ennigerloh', 'Seydi', 'Melvindale', 'Shangcaiyuan', 'Gooty', 'Ituiutaba', 'Dakota Ridge', 'Langen', 'Klaukkala', 'Lessines', 'Saraykoy', 'Oshu', 'Kac', 'Nanan', 'Mandvi', 'Larbert', 'Garalo', 'Binningen', 'Templeton', 'Yejituo', 'San Estanislao', 'Lake Stevens', 'Kothanuru', 'Dhanaura', 'Goba', 'Kasba', 'Hot Springs', 'Tiruvalur', 'Birzebbuga', 'Morafeno', 'Cruzilia', 'Sao Bras de Alportel', 'Balingoan', 'Purisima de la Concepcion', 'Augustinopolis', 'Caloocan City', 'Itubera', 'Lublin', 'Temoaya', 'Sebdou', 'Beclean', 'Itamarandiba', 'Bayramic', 'Ban Lam Narai', 'Sirsa', 'Talaja', 'Amberieu-en-Bugey', 'Arilje', 'Gorbea', 'Zakopane', 'Izium', 'Masvingo', 'Shulan', 'Chester', 'Shinozaki', 'Rupahi', 'Douar Ait Taleb', "'Ain Boucif", 'San Gil', 'Charters Towers', 'Monovar', 'Tierra Amarilla', 'Toprakkale', 'Pehcevo', 'Cinfaes', 'Lawang Bato', 'Olinda', 'Jiaoxi', 'Bogatynia', 'Bhawanipur Rajdham', 'Jardin America', 'Castelginest', 'Herborn', 'Cordeiro', 'Dharmsala', 'Gitega', 'Vransko', 'Pananaw', 'Wettingen', 'Deressia', 'Secunderabad', 'Queimada Nova', 'Kollankulam', 'Dharmaragar', 'Beylaqan', 'Lutsk', 'Madinat as Sadat', 'Shengping', 'Yasu', 'Hanmayingcun', 'Ukiah', 'Svilajnac', 'Itoman', 'Kathu', 'Dugny', 'Guanajuato', 'Dreux', 'Brovary', 'Sicuani', 'Hsenwi', 'Manama', 'Schleiden', 'Kovancilar', 'Tekkali', 'Schlangen', 'Zama', 'Hostivice', 'Calpulalpan', 'Chalchihuitan', 'Fondettes', 'Messadine', 'Duas Barras', 'Alencon', 'Cankiri', 'Lake Zurich', 'Bararam', 'Nim ka Thana', 'Woodlawn', 'Contramaestre', 'Monroe', 'Kannod', 'Ciechanow', 'Kishunpur', 'Haikou', 'Fene', 'Moree', 'Rasulpur Dhuria', 'Bowie', 'Besiri', 'Texistepec', 'Langenau', 'Nowra', 'Metema', 'Mokeri', 'Kelibia', 'Contamana', 'Albergaria-a-Velha', 'Paracuaro', 'Kokshetau', 'Campo Magro', 'General Mariano Alvarez', 'Stevenston', 'Nagda', 'Sliedrecht', 'Kilmarnock', 'Guantiankan', 'Barira', 'Dunn', 'Portugalete', 'Nizampatam', 'Anse a Galets', 'Louisville', 'Policka', 'Moita', 'Sitanagaram', 'Vrchlabi', 'Greystones', 'Kriel', 'Guachochi', 'Tuzla', 'Juigalpa', 'Paramonga', 'Perali', 'Idanha-a-Nova', 'Ambilobe', 'Kaga Bandoro', 'Gilroy', 'Nilavarappatti', 'Tokat', 'Miaojiaping', 'Cascais', 'Bilzen', 'Malmo', 'Tutong', 'Tergnier', 'Kalicherla', 'Bishop Auckland', 'Kyotamba', 'Sonsonate', 'Edmundston', 'Ban Nong Tong', 'Carnarvon', 'Sloviansk', 'Molave', 'Kurobeshin', 'Le Raincy', 'Chinobod', 'Alamnagar', 'Kot Bhai', 'Altenholz', 'Esanai', 'Kita-ku', 'Lambidou', 'Corumba', 'North Providence', 'Aulnoye-Aymeries', 'Suifenhe', 'Shar', 'Vakfikebir', 'Mudgee', 'Vailoa', 'Georgina', 'Pachalum', 'Al Fqih Ben Calah', 'Lahti', 'Eura', 'Wolsztyn', 'Santa Maria da Boa Vista', 'Udipi', 'Olgiate Olona', 'Okinawa Numero Uno', 'Albany', 'Saint Sampson', 'Bakel', 'Governador Lindenberg', 'Sannieshof', 'Charlottetown', 'Bidkhun', 'Tenafly', 'Dandarah', 'Yunnanyi', 'Mary', 'Ouankoro', 'Zhaicun', 'Loboc', 'Naduvannur', 'Biedenkopf', 'Jarash', 'Yuanyangzhen', 'Ashington', 'Rufino', 'Chodziez', 'Heidenheim', 'Aanekoski', 'Puerto Tirol', 'Attock Khurd', 'Sihung', 'Pennsville', 'Vejen', 'Sanford', 'Loano', 'West Deer', 'Safsaf', 'Collingswood', 'East Setauket', 'Sever do Vouga', 'Savona', 'Mayilur', 'Bouchabel', 'Mbarara', 'Bersenbruck', 'Bevonotra', 'Linghai', 'Aksay', 'Santiago Tangamandapio', 'Changshu', 'Buckhurst Hill', 'Yangchun', 'Tumbao', 'Mazagran', 'Bussy', 'Meerzorg', 'Veys', 'Peddapuram', 'Wuwei', 'Kongnolli', 'Uar Esgudud', 'Munxar', 'Targu Secuiesc', 'Zuvvaladinne', 'Tombos', 'Sanjianxiang', 'Colonie', 'Velakkuttai', 'Haveri', 'Machilipatnam', 'Witbank', 'Tierralta', 'Blytheville', 'Gorgan', 'Pallapatti', 'Sariq', 'Fort Worth', 'Hatonuevo', 'Steinbach', 'Anloga', 'East Stroudsburg', 'Rioverde', 'Villa Castelli', 'Hopa', 'Knysna', 'Koumassi', 'Pampierstad', 'Agawam', 'Utrera', 'Oggaz', 'Wadgira', 'Pagqen', 'Mizdah', 'Bni Tajjit', 'Douar Snada', 'Sandy Springs', 'Louviers', 'Mamak', 'Rimouski', 'Giddarbaha', 'Ittiva', 'Pedro Luro', 'Kerepestarcsa', 'Shohimardon', 'Santa Ana', 'La Sierra', 'Diguapo', 'Soio', 'Saint-Jean-de-Luz', 'Illkirch-Graffenstaden', 'Saint-Pierre-des-Corps', 'Aivanallur', 'Rio Piracicaba', 'Kompalle', 'Chiayi', 'Rostraver', 'Flossmoor', 'Frunze', 'General Roca', 'Aglipay', 'Nussloch', 'Yakinca', 'Kattupputtur', 'Kolkata', 'Bo', 'San Antonio del Tachira', 'Patrocinio', 'Cuvelai', 'Duarte', 'Panetha', 'Cave', 'Lahore', 'Maijdi', 'Janhapara', 'Tondi', 'Al Hamalah', 'Ludhiana', 'El Hammadia', 'Ejura', 'Antsalova', 'Fiadanana', 'Nedumana', 'Novo Aripuana', 'Qufadah', 'Igreja', 'Punganuru', 'Ranigaon', 'Presidente Kennedy', 'Pedda Kotayalanka', 'Loudoun Valley Estates', 'Vezirkopru', 'Dronfield', 'Ugo', 'Obburdon', 'Camiling', 'Cermik', 'Ampatuan', 'Alangulam', 'Candler-McAfee', 'Behenjy', 'Talanga', 'Garissa', 'Mbandaka', 'Senboku', 'Banganapalle', 'Pedreiras', 'Loreto', 'Elias Fausto', 'Columbia', 'Sao Ludgero', 'Revelganj', 'Uhersky Brod', 'Feira Grande', 'Norten-Hardenberg', 'Omura', 'Wilcannia', 'Bishopbriggs', 'Dandoli', 'Satna', 'Palni', 'Elda', 'Bakhtiyarpur', 'Kara-Tash', 'Holden', 'Kodumba', 'Udayendram', 'Wangjiaxian', 'Ragusa', 'Pijijiapan', 'Muret', 'Feldkirchen-Westerham', 'Ineu', 'Smithtown', 'Incesu', 'Svay Rieng', 'Bhucho Mandi', 'Mukaiengaru', 'Maiquetia', 'Manglaur', 'Centennial', 'Bonfinopolis', 'Cova Figueira', 'Ianca', 'Mahesana', 'San Francisco de Mostazal', 'Langenfeld', 'Buchholz in der Nordheide', 'Coutras', 'Grigiskes', 'Biswan', 'Perumbalai', 'Shujalpur', 'Ciudad Manuel Doblado', 'Tunzi', 'Great Harwood', 'Andhra Tharhi', 'Toudja', 'Ramsar', 'Gaesti', 'Champigny-sur-Marne', 'Ash Shaddadah', 'Santaquin', 'Quilpie', 'Tubay', 'Babylon', 'Pualas', 'Johi', 'Sahaspur', 'Padiham', 'Busto Garolfo', 'Kamatero', 'Miary', 'Ialysos', 'Seydisehir', 'Morgan Hill', 'Malard', 'Kafr ad Dawwar', 'Sancoale', 'Sakata', 'Jinoba-an', 'Sorgues', 'Sumedang', 'Sanrh Majhgawan', 'Dalanzadgad', 'Chundal', 'Piombino', 'Port Harcourt', 'Goyang', 'Puntarenas', 'Gaszowice', 'Santa Rosa de Copan', 'Carmo da Cachoeira', 'Bargersville', 'Beius', 'Stony Brook University', 'Razlog', 'Spenge', 'Ban Mae Kha Tai', 'Chilpur', 'Pacific Grove', 'Kalyan', 'Balmazujvaros', 'Hortolandia', 'Pataili', 'San Agustin de las Juntas', 'Sao Felix do Xingu', 'Prior Lake', 'Puzol', 'Udumanthala', 'Novopskov', 'Auerbach', 'Zarate', 'Makuyu', 'Ertvelde', 'Kommuru', 'Manari', 'Belonia', 'Immokalee', 'Kuiyibagecun', 'Ascoli Piceno', "Cassina de' Pecchi", 'Sousa', 'Alhaurin de la Torre', 'Djidian Kenieba', 'Los Rios', 'Castel Goffredo', 'Resana', 'Rehli', 'Puerto Boyaca', 'Bandar-e Anzali', 'Martha Lake', 'Chopinzinho', 'Friedrichsdorf', 'La Granja', 'San Agustin Loxicha', 'Platteville', 'Dighaun', 'Mebane', 'Pavumba', 'Epinay-sur-Seine', 'Alfredo Wagner', 'Singen', 'El Asintal', 'Sankrail', 'Nghi Son', 'Pithoragarh', 'Pudunagaram', 'Tsuchiura', 'Jhundpura', 'Itapecuru Mirim', 'Naranjal', 'Pinneberg', 'Schnaittach', 'Penco', 'Oissel', 'Dungra Chhota', 'Amudat', "Qo'shko'pir", 'Pachahi', 'Sufian', 'Haguenau', 'Mansapur', 'Discovery Bay', "'Ain Leuh", 'Tabernes de Valldigna', 'Medvode', 'Sillamae', 'Baker City', 'Gllogovc', 'Dhrangadhra', 'Homosassa Springs', 'Outreau', 'Beni', 'Meltonakkal', 'Mango', 'San Pedro Jocopilas', 'Roodepoort', 'Unterhaching', 'Ibiruba', 'San Bernardo', 'Marhanets', 'Itapitanga', 'Sivalarkulam', 'Maguing', 'Lianran', 'Smila', 'Forrest City', 'Lumino', 'Qingzhou', 'Bajina Basta', 'Bruggen', 'Tapiramuta', 'Ganga Sagar', 'Hvardiiske', 'Capitao Poco', 'Carranglan', 'Ekpe', 'Camana', 'Choyr', 'Tullahoma', 'Bothell', 'Livadeia', 'Samarate', 'Brixham', 'Anororo', 'Ypejhu', 'Bradford-on-Avon', 'Wigston Magna', 'Acigol', 'Puerto Salgar', 'Benicia', 'Fundeni', 'Dioumanzana', 'Chellalat el Adhaouara', 'Zonhoven', 'Vellar', 'Sceaux', 'Luuka Town', 'Kristiansand', 'Ripley', 'Lutry', 'Gurabo al Medio', 'Sepidan', 'Chubbuck', 'Harding', 'Rogatica', 'Maple Valley', 'Qal`at Bishah', 'Boladi', 'Terlizzi', 'Morriston', 'Jaslo', 'Mendrisio', 'Valangiman', 'Medveda', 'Barton upon Humber', 'Sauk Village', 'Dison', 'Little Bookham', 'Marlton', 'Harhorin', 'Rampur Kalan', 'Trofa', 'Gatesville', 'Etchojoa', 'Baichigan', 'New Hartford', 'Charleville-Mezieres', 'Anna', 'Unchahra', 'Porto de Mos', 'Ambhua', 'Al Minya', 'Morro do Chapeu', 'Saintard', 'Bukwo', 'Kawartha Lakes', 'Badr Hunayn', 'Valley Cottage', 'Duxbury', 'Baipingshan', 'Fuente de Oro', 'Nayudupet', 'Zhexiang', 'Khagra', 'Sarakhs', 'Saint-Pierre-du-Mont', 'Urumqi', 'Vannivedu', 'Cesano Maderno', 'Juarez', 'Neuruppin', 'Dig', 'Momil', 'Gondal', 'Torres Novas', 'Akkattettar', 'Dourbali', 'Ondo', 'Monjas', 'Mechernich', 'Arlit', 'Mandelieu-la-Napoule', 'Murska Sobota', 'Daean', 'Kalaleh', 'Bamei', 'Myingyan', 'Lingquan', 'Pescia', 'Malloussa', 'El Attaf', 'Panmana', 'Caqueza', 'Moulay Bousselham', 'Pathein', 'Reha Mota', 'Florencio Varela', 'Al Lataminah', 'Azogues', 'Jhandapur', 'Verucchio', 'Subachoque', 'Solila', "Rava-Rus'ka", 'Witu', 'Gander', 'General Pico', 'Tongluo', 'Serinhisar', "Fiorenzuola d'Arda", 'Vadamadurai', 'Bayou Cane', 'Plunge', 'Quakertown', "Limeira d'Oeste", 'Qulan', 'Agramonte', 'Jamunamukh', 'Gondomar', 'Lajas', 'Quinapundan', 'Kurten', 'Coremas', 'Mafra', 'Pinewood', 'Linguere', 'Miradouro', 'Baganga', 'Pauini', 'Trebur', 'Bollnas', 'Romulus', 'Banire Kore', 'Legnaro', 'Manatanna', 'Deruta', 'Finestrat', 'Chico', 'Mankono', 'Pimentel', 'Umirim', 'Velanganni', 'Natchitoches', 'Kanungu', 'West Palm Beach', 'Caraballeda', 'Tha Bo', 'Nishio', 'Bragado', 'Palopo', 'Concorezzo', 'Nagla', 'Nobres', 'Sagada', 'Bayona', 'Cheat Lake', 'Zion', 'Kochi', 'Cho Lach', 'Sannicandro di Bari', 'Pelotas', 'Selb', 'Sugarmill Woods', 'Oatlands', 'Basoda', 'Lakewood Ranch', 'Tenango del Aire', 'Tulkarm', 'Bella Union', 'Heath', 'Massaranduba', 'Higashikagawa', 'Barra do Choca', 'Bolivar', 'Aguacatan', 'Yakacik', 'Alexander Bay', 'Dehri', 'Mehnatobod', 'Thol', 'Burkburnett', 'Sanarate', 'Pollokshaws', 'Sencur', 'Zhanjiang', 'San Pedro Ixcatlan', 'Le Pre-Saint-Gervais', 'Jamunia', 'Opol', 'Ambalajia', 'Penola', 'Mangindrano', 'Oued el Alleug', 'Marai Kalan', 'Zatec', 'Southern Cross', 'San Luis de la Paz', 'Omihachiman', 'Illapel', 'Tsianisiha', 'Siteia', 'McFarland', 'Nova Resende', 'Rheinfelden', 'Mubi', 'Ourika Wawrmas', 'Quinte West', 'Korce', 'Kalanaur', 'Dhulian', 'Balen', 'Auterive', 'Wronki', 'Budha Theh', 'Quy Nhon', 'Cedro', 'Garlasco', 'Biharamulo', 'Chateaurenard', 'General Cabrera', 'Gostynin', 'Ad Diraz', 'Asau', 'Oceanside', 'Longmont', 'Nanzhuang', 'Satadougou', "Saint-Barthelemy-d'Anjou", 'Owariasahi', 'Zombo', 'Dudley', 'San Agustin Acasaguastlan', 'Lochem', 'Nidderau', 'Devarkonda', 'Leven', 'Munchberg', 'Mariani', 'Cornillon', 'Svidnik', 'Heek', 'Chak Thirty-six North Branch', 'Tholey', 'Charsadda', 'Restinga Seca', 'Villeneuve-Tolosane', 'Profondeville', 'Amla', 'Ludwigslust', 'Caterham', 'Al Ghardaqah', 'Mineral de la Reforma', 'Kosice', 'Sehnde', 'Imam Qasim', 'Ain El Aouda', 'Candido Mendes', 'Hacari', 'Massafra', 'Vittal', 'Koh Kong', 'Stockach', "Saint Paul's Bay", 'Tieshansi', 'Xinyi', 'Rawanduz', 'Nanyuki', 'Sarpamari', 'Tres Coracoes', 'Narutocho-mitsuishi', 'Cukai', 'Douchy-les-Mines', 'Mandirituba', 'Imouzzer Kandar', 'Oulad Teima', 'Lere', 'Kanavaypudur', 'Naugaon Sadat', 'Bonnyrigg', 'Charthawal', 'Znojmo', 'Cankova', 'Tondo', 'Badhan', "Wu'an", 'North Bellport', 'Ambatosoratra', 'Naduvattam', 'Columbia Heights', 'Neustadt am Rubenberge', 'Maturin', 'Ehringshausen', 'Dandkhora', 'Xinzhai', 'Winter Gardens', 'Bayur', 'Dame-Marie', 'Saco', 'Metsemotlhaba', 'Apen', 'Nuevo Laredo', 'Cofimvaba', 'Miryal', 'Shuangluan', 'Thorold', 'Zagarolo', 'Wenling', 'Dengtalu', 'Oharu', 'Sumoto', 'Osmaniye', 'Aquiraz', 'Bury Saint Edmunds', 'Nacajuca', 'Baao', 'Velbert', 'Pyskowice', 'North Platte', 'Prostejov', 'Paradise', 'Newington Forest', 'Gonghaur', 'Rawicz', 'Lamsabih', 'Zaoqiao', 'Chaoke', 'Renala Khurd', 'Saint-Sulpice-la-Pointe', 'Kerkrade', 'Glen Cove', 'Cottbus', 'Laurens', 'Mont-Laurier', 'Omalur', 'Loum', 'An Nu`maniyah', 'Allende', 'Jagdispur', 'Bourgoin-Jallieu', 'Icapui', 'Niiza', 'Disa', 'Rhynern', 'Mata', 'Straubing', 'Kingsnorth', 'Esch-sur-Alzette', 'Moyobamba', 'Flores Costa Cuca', 'Vetlanda', 'Hengbei', 'Hatillo de Loba', 'Conway', 'Ashta', 'Diphu', 'Sutculer', 'Bad Worishofen', 'Mahta', 'Umm Qurun', 'Strongsville', 'Kamidani', 'Playa del Carmen', 'Moralzarzal', 'Lauffen am Neckar', 'Kelishad va Sudarjan', 'Cocentaina', 'Harrislee', 'Sojat', 'Mahallat Damanah', 'South Frontenac', 'Ban Phan Chali', 'San Sebastian Coatan', 'Diego Martin', 'Thai Nguyen', 'Pewaukee', 'Ifanadiana', 'Asjen', 'Mankachar', 'Apparaopeta', 'DeRidder', 'Orleans', 'Nagojanahalli', 'Peddavadlapudi', 'Dadrewa', 'Sagure', 'Natal', 'Shirakawa', 'Calera de Tango', 'Sahbuz', 'Rodeio', 'Shaying', 'Sakoabe', 'Ramabhadrapuram', 'Kararan', 'Zolote', 'El Espinal', 'Laverton', 'Ramgarha', 'Sant Julia de Loria', "Sant'Agnello", 'Lidzbark Warminski', 'Shu', 'Pweto', 'Burscough', "Foum Jam'a", 'Hua Hin', 'Marovato', 'Gangapatnam', 'Rijeka', 'Jihong', 'Seacombe', 'Friendswood', 'Estreito de Camara de Lobos', 'Loei', 'Dan', 'Jerez de Garcia Salinas', 'Pukhrayan', 'Choma', 'Tlapa de Comonfort', 'Catbalogan', 'Boudenib', 'Paraparaumu Beach', 'East Lansing', 'Stradella', 'Tenali', 'Irungalur', 'Hollymead', 'Farap', 'Pitesti', 'Muzambinho', 'La Mesa', 'Mulki', 'Bruck an der Mur', 'Gongguan', 'Vayalar', 'Freiberg am Neckar', 'Passa e Fica', 'Myrhorod', 'Mutoko', 'Warwick', 'Shanmukhasundarapuram', 'Koscian', 'Nanyangcun', 'Patton', 'Grimes', 'Vempatti', 'Smizany', 'Chipinge', 'Miyota', 'Villajoyosa', 'Almoloya del Rio', "'Ali Ben Sliman", 'Brodosqui', 'Vikrutamala', 'Valatt', 'Aparecida', 'Guskhara', 'Oke-Mesi', 'Roche-la-Moliere', 'Sentrupert', 'Buraan', 'Kisii', 'Lohardaga', 'Angleton', 'Quakenbruck', 'Bystrzyca Klodzka', 'Ippagudem', 'Lunner', 'Almora', 'Hasilpur', 'Agiripalle', 'Parys', 'Loningen', 'King City', 'Suitland', 'Bathnaha', 'Gachancipa', 'Bad Bentheim', 'Fuyang', 'Federal Heights', 'Vallirana', 'Manandroy', 'Varidhanam', 'Valreas', 'Konstanz', 'Hissaramuruvani', 'Uripa', 'Kimbe', 'Medina Sidonia', 'Tarragona', 'Antindra', 'Montana', 'Cervignano del Friuli', 'Hezuo', 'Puerto Escondido', 'Salto de Pirapora', 'Gaobeidian', 'Chak Husaini', 'Grajales', 'Lykovrysi', 'Ankatafa', 'Sbiba', 'Matanog', 'Sikka', 'Avsallar', 'Sarykemer', 'Lighthouse Point', 'Fatehpur Sikri', 'Lakhminia', 'Nagari', 'Ansongo', 'Bad Nauheim', 'Trostberg an der Alz', 'Mogila', 'Mahrail', 'Mouans-Sartoux', 'Hude', 'Lazi', 'Berkhampstead', 'Nehram', 'Fussa', 'Waipahu', 'Dawei', 'Haldia', 'Olten', 'Apastepeque', 'Krzeszowice', 'Koini', 'Douar Messassa', 'Lima', 'Honcho', 'Mulchen', 'Acheres', 'Harper Woods', 'Tlacoachistlahuaca', 'Baikonur', 'De Bilt', 'Vianden', 'Arnold', 'Bhabhua', 'Al Ghizlaniyah', 'Chakur', 'Jixi', 'Kibiti', 'Viechtach', 'Comalcalco', 'Westfield', 'Uacu Cungo', 'Camargo', 'San Juan del Rio del Centauro del Norte', 'Bebington', 'Largs', 'Sompeta', 'Ipsala', 'Pola de Laviana', 'Jose Maria Morelos', 'Wasserburg am Inn', 'Al Hawiyah', 'Kilrajakularaman', 'Chak Sixty-one Gugera Branch', 'Jafaro', 'Yattir', 'Piura', 'Dachau', 'Bom Jesus dos Perdoes', 'Itacare', 'Delcevo', 'Negombo', 'Tenedla', 'Amadora', 'At-Bashy', 'Fort Collins', 'Amuria', 'Vicuna', 'Dahu', 'Eloy Alfaro', 'Beaumont', 'Lloro', 'Vinh Long', 'Padiyanallur', 'Tobruk', 'Pazar', 'Bideipur', 'Magdalena de Kino', 'Parla', 'Kanuma', 'Quatro Barras', 'Chikusei', 'Panambi', 'Arecibo', 'Bekatra Maromiandra', 'Ahus', 'Tilingzhai', 'Hanahan', 'Lunbei', 'Majhua', 'Bad Segeberg', 'Holywell', 'Tivim', 'El Charco', 'Mailiao', 'Chenlu', 'Beylul', 'Camarajibe', 'Le Puy-en-Velay', 'Santander de Quilichao', 'Nakasongola', 'Shabestar', 'Lindau', 'Hathaura', "Ayt 'Attou ou L'Arbi", 'Fiuggi', 'Dhekiajuli', 'Shuyangzha', 'Tokoroa', 'Chaozhou', 'Durres', 'Old Jamestown', 'Chimbote', 'Ivanovka', 'Silver Firs', 'Santa Ana Nextlalpan', 'Ourtzagh', 'Lysa nad Labem', 'Hirakata', 'Udine', 'Andrelandia', 'Seysses', 'Atami', 'Woudrichem', 'Brzeszcze', 'Zhangping', 'Rolante', 'Mabai', 'Varatanapalli', 'Catak', 'Mahabaleshwar', 'Galgamuwa', 'Kelso', 'Esquipulas', 'Seoni Malwa', 'Laboulaye', 'Winter Haven', 'Jimaguayu', 'Rury', 'Lubbenau/Spreewald', 'Dijiasuoxiang', 'Wielsbeke', 'Pagani', 'Kizhur', 'San Carlos City', 'Chaudfontaine', 'Camacupa', 'Celina', 'Roudnice nad Labem', 'Rajni', 'Umm Ruwaba', 'Harir', 'Suwasra', 'Rio Casca', 'Bry-sur-Marne', 'Fleron', 'Minamikyushu', 'Tsallagundla', 'Bashtanka', 'Soresina', 'Sogut', 'Parkersburg', 'Artesia', 'Barka Parbatta', 'San Carlos de Guaroa', 'Abreus', 'Sarsai Nawar', 'Saint-Marc', 'George', 'Kamamaung', 'Bayburt', 'Narsampet', 'As Sidrah', 'Menasha', 'Aberaman', 'Irikkur', 'Herning', 'Apeldoorn', 'El Campo', 'Thakhek', 'Karanjia', 'Maullin', 'Druten', 'Mundra', 'Paveh', 'Carovigno', 'Jodar', 'Montevarchi', 'LaGrange', 'Masanasa', 'Maddela', 'Chicago', 'Kamayakkavundanpatti', 'Xiangyang', 'Leizhou', 'Atner', 'Siderno Marina', 'Agra', 'Shin-Kamigoto', 'Anenecuilco', 'Sejenane', 'Paletwa', 'Pantanaw', 'Senec', 'Messaad', 'Brielle', 'Cangxi', 'Dagarua', 'Kingston South East', 'Allestree', 'Oslob', 'Bolsover', 'Grand Turk', 'Bohodukhiv', 'Boulogne-sur-Mer', 'Kyotanabe', 'Coimbatore', 'Jinshi', 'Ornskoldsvik', 'Woonsocket', 'Buhusi', 'Villanueva de la Canada', 'Karugamad', 'Jumla', 'Pueblo Nuevo Vinas', 'Tres de Mayo', 'Sal Rei', 'Valkeakoski', 'Kungsangen', 'Zulte', 'Kerewan', 'Tabango', 'Naha', 'Akkus', 'Tulcea', 'Moratuwa', 'Utsunomiya', 'Noeux-les-Mines', 'Los Barrios', 'Agadi', 'Hlohovec', 'Gages Lake', 'Tiverton', 'Golmarmara', 'Puszczykowo', 'Mangabe', 'Hohr-Grenzhausen', 'Zango', 'Cua Lo', 'Mansalay', 'Leverano', 'Yanahuanca', 'Sinaia', 'Gudluru', 'Vadugapatti', 'Ban Tha Phra', 'Kikwit', 'Cantagalo', "Yan'an Beilu", 'Tampakan', 'Glanerbrug', 'Copalchi', 'Zacualtipan', 'Fidenza', 'Tsararano', 'Ferguson', 'Renukut', 'Sao Sebastiao do Cai', 'Mainburg', 'Ceres', 'Uluberiya', 'Mauji', 'Telfs', 'Cachoeira dos Indios', 'Triunfo', 'Mittegrossefehn', 'Murehwa', 'Velika Polana', 'Borgosesia', 'Shichuanxiang', 'Befandefa', 'Pilar do Sul', 'Quimper', 'Saeby', 'Netanya', 'Peri-Mirim', 'Tidjelabine', 'Tecolutla', 'Anakaputtur', 'Quipapa', 'Dikili', 'Yuma', 'Parakadavu', 'Oxapampa', 'Odense', 'Tanambao-Daoud', 'Bussum', 'Dartford', 'Bilaua', 'Stoughton', 'Alcanena', 'Cloverleaf', 'Akambadam', 'Anomabu', 'Lo Barnechea', "Wen'anyi", 'Port Orchard', 'Ahuachapan', 'Horta', 'Bourg-de-Peage', 'Neuville-les-Dieppe', 'Walton upon Thames', 'Jaguaquara', 'Koshigaya', 'Wendeburg', 'Tall Shihab', 'Tounfafi', 'Al Hamah', 'Mostar', 'Illizi', 'Khaspur', 'Sirvan', 'Lapu-Lapu City', 'Sint-Gillis-Waas', 'Cassongue', 'Beahitse', 'Virapalle', 'Geislingen an der Steige', 'Bogalusa', 'Stavanger', 'Jiaji', 'Kannapuram', 'Bamukumbit', 'Maroua', 'Correggio', 'Bikkavolu', 'Kumaranallur', 'Guinagourou', 'Salonta', 'Zhutang', 'Tamarankottai', 'Tzucacab', 'Gurais', 'Taichung', 'Oas', 'Anjukulippatti', 'Melchor Ocampo', 'Nochistlan de Mejia', 'Victor', 'Limeira', 'Chalfont Saint Peter', 'La Marsa', 'Cullinan', 'Shimeo', 'Neuhof', 'Kannulu', 'Daganbhuiya', 'Antigua Guatemala', 'Bowmanville', 'Zemoura', 'Tyele', 'Cedartown', 'Kingsland', 'Toyomamachi-teraike', 'Bni Khloug', 'Montargis', 'Sarasambo', 'Aurora', 'Nilka', 'Siena', 'Santa Vitoria', 'Goz-Beida', 'Murray', 'Hennaya', 'Santa Isabel Cholula', 'Bellamkonda', 'Andasibe', 'Quvasoy', 'Villa Park', 'Thaon-les-Vosges', 'Acri', 'Limbdi', 'Kaisariani', 'Coconut Creek', 'Kitakoriyamacho', 'Gaggenau', 'Newmarket', 'Grombalia', 'Dogansehir', 'Chinsali', 'Moisei', 'La Vallee de Jacmel', 'Kadamalaikkundu', 'Wagga Wagga', 'Sonthofen', 'Sukkur', 'Zhaozhou', 'Boshruyeh', 'Macara', 'Cheongju', 'Nisko', 'Meftah', 'Amilly', 'Ageoshimo', 'Butzbach', 'Giria', 'Sultonobod', 'Sucua', 'Keregodu', 'Dudhgaon', 'Chalma', 'Pierre-Benite', 'Oberriet', 'Tramandai', 'Shiggaon', 'Whitestown', 'Zaqatala', 'Zaboli', 'Houston', 'Sorab', 'Enkhuizen', 'Kaniwara', 'Sare-Yamou', 'Kriens', 'Petersburg', 'Rawah', 'Montenegro', 'Boorama', 'Gangawati', 'Manjuyod', 'Argostoli', 'Clarksburg', 'Fall River', 'Boffa', 'Sodag', 'Hingyon', 'Kanding', 'Gresham', 'Peruru', 'Kemigawa', 'Manbazar', 'Capitol Hill', 'Bo`z', 'Murshidabad', 'Resplendor', 'Baishi Airikecun', 'Bulancak', 'Aibonito', 'East Islip', 'Mengibar', 'Kotwapatti Rampur', 'Qaryat al Qi`an', 'Estiva Gerbi', 'Sipalay', 'Mandrosohasina', 'Hadera', 'Partick', 'Pulawy', 'Lobamba', 'Vitoria de Santo Antao', 'Fada Ngourma', 'Al Hoceima', 'Agcogon', 'Konina', 'Mahbubnagar', 'Temamatla', 'Assab', 'Gautier', 'Mangalkot', 'San Benito', 'Ben Ahmed', 'Natori-shi', 'Wymondham', 'Hassi el Ghella', 'Pesca', 'Kuttappatti', 'Campo de Criptana', 'Pelham', 'Mont-Saint-Hilaire', 'Sao Joaquim', 'Lusca', "Topol'cany", 'Itagui', 'Artur Nogueira', 'Cheadle Hulme', 'Timmiarmiut', 'Carmopolis', 'Dolhasca', 'Mangghystau', 'Much', 'Vadamugam Vellodu', 'Vamanapuram', 'Bordj Mokhtar', 'Tougouni', 'Beldaur', 'Limbuhan', 'Paraibano', 'Realeza', 'Puchheim', 'Cardedeu', 'Cacocum', 'Luacano', 'Villavicencio', 'Fray Bartolome de Las Casas', 'Burke', 'Dabiya', 'Pizarra', 'Kothapet', 'Ile-Ife', 'Chittattukara', 'Chuadanga', 'Wausau', 'Ban Tha Ton', 'Feyzabad', 'Frankenberg', 'Gostivar', 'Magelang', 'Sabaya', 'Maleme Hodar', 'Virakeralam', 'Bhisho', 'Rakhiv', 'Manjimup', 'Catalpinar', 'Liaoyuan', 'Akureyri', 'Ris-Orangis', 'Larena', 'Ampelokipoi', 'Waterloo', 'Siguiri', 'Sujina', 'Monte Alegre do Piaui', 'Maicao', 'Ambohimiadana', "Pan'an", 'Vallauris', 'Lingampalli', 'Ewell', 'Tambau', 'South Union', 'Moga', 'Talghar', 'Salinas', 'Gonghe', 'Temple City', 'Ternat', 'San Ramon de la Nueva Oran', 'Donaueschingen', 'Adilcevaz', 'Plainedge', 'Salotgi', 'Barotac Nuevo', 'Chevigny-Saint-Sauveur', 'Jaisalmer', 'Cameri', 'Lowshan', 'Pleasant View', 'Gazipura', 'Indwe', 'Yen Bai', 'Hospet', 'Jamai', 'Palmetto Estates', 'Ponnani', 'Hwlffordd', 'Tiny', 'Bhattu Kolan', 'Yangsan', 'Allanridge', 'Quyang', 'Agoo', 'Sabbavaram', 'Chamical', 'Buritama', 'Minna', 'Millbury', 'San Jose Villa de Allende', 'Ercis', 'Mogok', 'Dona Remedios Trinidad', 'Rio Novo do Sul', 'Kormend', 'Ambohimitombo', 'New Carrollton', 'Mount Airy', 'Qinhuangdao', 'Madingou', 'Robbinsdale', 'Huejucar', 'Erraballa', 'Andover', 'Kefar Yona', 'Sweet Home', 'Deming', 'Asakuchi', 'Al Mu`abbadah', 'Varna', 'Pine Bluff', 'Rovira', 'Brackley', 'Kodala', 'Patu', 'Satoraljaujhely', 'Ambohimanga', 'Cherukolattur', 'Marechal Deodoro', 'Pelaya', 'Cape Town', 'Sarakkayhalli', 'Manado Light', 'Ihsaniye', 'Yangiobod', 'Natagaima', 'Mattathur', 'Strathroy-Caradoc', 'Talagante', 'Montclair', 'Kiryandongo', 'Nzeto', 'Pativilca', 'Quiapo', 'Pulilan', 'Serra Preta', 'Alagoa Grande', 'Jerez', 'Edam', 'Merces', 'Galaz', 'Pahuatlan de Valle', 'Nathpur', 'Orem', 'Milford Mill', 'Vimercate', 'Baie de Henne', 'Fanandrana', 'Stenungsund', 'Fondi', 'Vipparla', 'Arroyomolinos', 'Kaniyur', 'Cristuru Secuiesc', 'Marawi City', 'Dong Hoa', 'Moura', 'Selsey', 'Dinalupihan', 'Busko-Zdroj', 'Itatim', 'Foumban', 'Purwa', 'Woippy', 'Muzhakkunnu', 'Backa Topola', 'Colmenarejo', 'Burg', 'Ashoknagar', 'Manerbio', 'Montmorency', 'Carteret', 'Kotekara', 'El Socorro', 'Mayahaura', 'Besigheim', 'Barsinghausen', 'Raita', 'Zwenkau', 'Fords', 'Palamedu', 'McHenry', 'Rahovec', 'Lajosmizse', 'Sakhmohan', 'Lyons', 'Ulatu', 'Tuskegee', 'Humacao', 'Launaguet', 'Paranaiba', 'Shitab Diara', 'Asansol', 'Signal Mountain', 'Hammam Bou Hadjar', 'Novi Sad', 'Karghar', 'Arzano', 'Thatri', 'Vossevangen', 'Cuya', 'Lymanka', 'Bucak', 'Cathedral City', 'Sidi Slimane', 'Tapejara', 'Marco', 'Tingloy', 'Shelby', 'Sijua', 'Moss Point', 'Satwas', 'Parkal', 'Sitapur', 'Maniyamturuttu', 'Nakhla', 'Ispica', 'Nayagaon', 'Telimele', 'Mono', 'Kolga', 'Mirjaveh', 'Warrensburg', 'Furstenfeld', 'Oswiecim', 'Pariyapuram', 'Cuito Cuanavale', 'Yoqne`am `Illit', 'Komatsu', 'Farmersville', 'Mazar-e Sharif', 'Tasil', 'Surla', '`Ain el Hadjel', 'Potukonda', 'Alcochete', 'McKinleyville', 'Lage', 'Dugda', 'Candijay', "Intich'o", 'Mercier', 'Kamnik', 'Tleta Taghramt', 'Dungannon', 'Montespertoli', 'Port Jervis', 'Matiguas', 'Tago', 'Oulad Daoud', 'Haymana', 'Tshela', 'Aso', 'South Hadley', 'Doranahalli', 'La Palma', 'Laitila', 'Stara Zagora', 'Az Zuwaydah', 'Storozhynets', 'Muppalla', 'Urganch', 'Bialystok', 'Libertad', 'Bilara', 'Zemio', 'Mbabane', 'Tsukawaki', 'Middle Valley', 'Sierra Vista Southeast', 'Balboa', 'Ostrowiec Swietokrzyski', 'Lucapa', 'Chiang Rai', 'Khujand', 'Great Baddow', 'Buthidaung', 'El Chol', 'Albuera', 'Carmignano', 'Sommacampagna', 'Kutum', 'Cambambe', 'Cunha Pora', 'Longtaixiang', 'Lawas', 'Meru', 'Ibiuna', 'Aalen', 'Kuah', 'Bertem', 'Consuegra', 'Gamarra', 'Chunakhali', 'Tavsanli', 'Kolwezi', 'Dori', 'Melsele', 'Murugampalaiyam', 'Yirga `Alem', 'Vrnjacka Banja', 'Bentota', 'Bron', 'Uncia', 'Woerden', 'San Juan Nonualco', 'National City', 'Rajshahi', 'Mount Kisco', 'Ludza', 'Pattambi', 'Korsimoro', 'Keota', 'Marion', 'Kyaukmyaung', 'Algun', 'Montrose', 'Loganville', 'Gracemere', 'Anthony', 'Lapeer', 'Blachownia', 'Dimiao', 'Dellys', 'Haramachida', 'Porto da Folha', 'Ullo', 'Iju', 'Caister-on-Sea', 'Dunaharaszti', 'Ghanpur', 'Ban Mae Ka Hua Thung', 'Ajodhya', 'La Garriga', 'Canela', 'Mallappadi', 'Rossington', 'Suriapet', 'La Caleta', 'Kumbhraj', 'Chengdu', 'Tarnowskie Gory', 'Wattwil', 'Hoogeveen', 'Jagalur', 'Veymandoo', 'Sahatona-Tamboharivo', 'Jaqueira', 'Marshfield', 'Jonkoping', 'Navabad', 'Phuc Yen', 'Pinyahan', 'Palanga', 'Fujiyoshida', 'Quela', 'Ulcinj', 'Sao Raimundo das Mangabeiras', "L'Oulja", 'Polas', 'Vattalkundu', 'Lod', 'Suwanee', 'Asse', 'Bensekrane', 'Siki', 'Neira', 'Middelharnis', 'Managua', 'Ambatomborona', 'Touama', 'Karatepe', 'Willoughby Hills', 'Boonton', 'Mwatate', 'Sanchahe', 'Bommayapalaiyam', 'Fort Bliss', 'Bronx', 'Nahuizalco', 'Ankazomiriotra', 'East Kelowna', 'Bacioi', 'Hassloch', 'Hengyang', 'Xishan', 'Tamuin', 'Vestmanna', 'Bochnia', 'Znin', 'Purattur', 'Arapongas', 'Montividiu', 'Yagoua', 'Chinna Annaluru', 'Chiatura', 'Dhalai', 'El Tablon', 'Jyllinge', 'Ives Estates', 'Arenoso', 'Eppstein', 'Stony Plain', 'Charleroi', 'Bressanone', 'Graulhet', 'Diksmuide', 'Heverlee', 'Nova Russas', 'Sholaqqorghan', 'Viskovo', 'Palenque', 'Gatada', 'Kannanendal', 'Wadenswil', 'Nepomuceno', 'Dalli Rajhara', 'Okpo', 'Canelinha', 'Saharbani', 'Somma Lombardo', 'Bojnik', 'Beylikduzu', 'Ban Pha Bong', 'Trumbull Center', 'Mandera', 'Rio do Pires', 'Bagnolo in Piano', 'Tianchang', 'Lillerod', 'Ipswich', 'Olaippatti', 'Duzici', 'Pernamitta', 'Villa de Zaachila', 'Wakefield', 'Mandello del Lario', 'Heywood', 'Adigrat', 'Juchitepec', 'Nizhyn', 'San Miguel de Allende', 'Salemata', 'Fenton', 'As Sulaymaniyah', 'Olhanpur', 'Moon', 'Churchdown', 'Berezne', 'Avenal', 'Mettuppalaiyam', 'Fort St. John', 'Ghogardiha', 'Palia Kalan', 'Dardenne Prairie', 'Bezerros', 'Iowa City', 'Mondragone', 'Baojishi', 'Cuiaba', 'Kopparam', 'Gokavaram', 'Karamursel', 'Bariariya Tola Rajpur', 'Guambog', "Mirassol d'Oeste", 'Manbengtang', 'Cali', 'Yaguachi Nuevo', 'Tiruppalai', 'Kodivalasa', 'Elakadu', 'Hadyach', 'Kumaralingam', 'La Victoria de Acentejo', 'Busembatia', 'Arbroath', 'Mahanoro', 'El Qaa', 'Ixhuatlan de Madero', 'Golub-Dobrzyn', 'Bundala', 'Lurate Caccivio', 'Palapag', 'Biba', 'Gokceada', 'Kuzhittura', 'Assomada', 'Imias', 'Obihiro', 'Makinohara', 'Chettiyapatti', 'Wuyuan', 'Tukrah', 'Gorinchem', 'Chettikulam', 'Knic', 'Piratini', 'Takamatsu', 'Ichhapur', 'Ross', 'Jamay', 'Coyotepec', 'Hani', 'Malilipot', 'Kozloduy', 'Antwerp', 'Vikramasingapuram', 'Houma', 'Osawa', 'Colonia del Sacramento', 'Daulatnagar', 'Nannamukku', 'Wanderley', 'Dunhuang', 'Campolongo Maggiore', 'Chiche', 'Merksem', 'Kaboila', 'Boscoreale', 'Lordegan', 'Douar Laouamra', 'General Juan Madariaga', 'Kottangara', 'Badurpalle', 'Rumoi', 'Blackstone', 'Marinha Grande', 'Honmachi', 'Oostkamp', 'Xingangli', 'Castelfidardo', 'Zemun', 'Krishnagiri', 'Haomen', 'Korbach', 'Governador Celso Ramos', 'East Pennsboro', 'Aurich', 'Ajjipuram', 'Bury', 'Nazare da Mata', 'Humberto de Campos', 'Pechea', 'Cinarcik', 'Jidd Hafs', 'Noyon', 'Zhaodianzi', 'Sacavem', 'Bilopillya', 'Pader', 'Miami Lakes', 'Aplao', 'Camabatela', 'Domingos Martins', 'Mahon', 'Takaba', 'Konskie', 'Basso', 'San Juan Despi', 'Picui', 'Ranipet', 'Shidongcun', 'Santa Rita do Sapucai', 'Park Hills', 'Caldera', 'Gorenja Vas', 'Holborn', 'Welland', 'Radzyn Podlaski', 'Riacho dos Machados', 'Dukli', 'Motherwell', 'Santa Rosa de Viterbo', 'El Ateuf', 'Villa Corona', 'Rajaram', 'Payipira', 'Molnlycke', 'Kautalam', 'Duisburg', 'Ghoradongri', 'Sangasso', 'Tacna', 'Santa Helena', 'Casablanca', 'Kalliyassheri', 'Xiancun', 'Dabaga', 'Chilcuautla', 'Sihora', 'New Britain', 'Kingsburg', 'Saint-Raymond', 'Garoua', 'Tulunan', 'Torello', 'Punal', 'Beutelsbach', 'Telenesti', 'Abim', 'Ed Damour', 'Tecamachalco', 'Nootdorp', 'Centar Zupa', 'Bernay', 'Jatauba', 'Azrou', 'Hashtrud', 'Chimichagua', 'Radzymin', 'Feldkirchen', 'Houzhuang', 'Brant', 'Saint-Mande', 'Marauatpur', 'Radovljica', 'La Entrada', 'Kidbrooke', 'Dnestrovsc', 'Melksham', 'Keshan', 'Stockholm', 'Fort Mitchell', 'Haigerloch', 'Altinozu', 'Odzaci', 'La Dorada', 'Al Buraymi', 'New Glasgow', 'Vermilion', 'Chongshan', 'Kavarna', 'Ban San Phak Wan Luang', 'Puliyampatti', 'Benslimane', 'Chaumont-Gistoux', 'Mae Hong Son', 'Agia Paraskevi', 'Kattipparutti', 'Afyonkarahisar', 'Berdychiv', 'Alewah', 'Pachrukhi', 'Warzat', 'Tilaran', "Gonfreville-l'Orcher", 'Muradiye', 'Qara', 'Defiance', 'Bagu Na Mohra', 'Milenaka', 'Ruda Slaska', 'Suchiapa', 'Pachuca', 'Kingston upon Thames', 'Kumiyama', 'Enriquillo', 'El Astillero', 'Bergheim', 'Pindoretama', 'Ardabil', 'Lucban', 'Cacuso', 'Merimandroso', "Sa'in Qal`eh", 'Paula Candido', 'Varapatti', 'Erikolam', 'Clarksdale', 'Carhue', 'Lisle', 'Teotitlan', 'Chebrolu', 'Gabrovo', 'Shambhunath', 'Creve Coeur', 'Silver City', 'Matehuala', 'Tefe', 'Nohfelden', 'Yangambi', 'Camotan', 'Las Mercedes', 'Chevella', 'Sokolov', 'Porto Calvo', 'Kayattar', 'Batu Gajah', 'Uppada', 'Majalgaon', 'Saint-Ghislain', 'Staveley', 'Jaffar Khanpet', 'Chaiyaphum', 'Parole', 'Arara', 'Hatfield', 'Santiago Chimaltenango', 'Cao Bang', 'Gungoren', 'Marechal Floriano', 'Rio Cuarto', 'Tazishan', 'Binjai', 'Pattensen', 'Zhangjiazhuang', 'Maasin', 'Kizhakkanela', 'Taquarituba', 'Quedlinburg', 'Schorndorf', 'Melilli', 'Griffin', 'Ambondromisotra', 'Cetinje', 'Channelview', 'Hanur', 'Kenmore', 'Jayrud', 'Dollis Hill', 'Taldyqorghan', 'Al `Uqaylah', 'Mutia', 'Dabra', 'Mae Ai', 'Bamban', 'Mayantoc', 'Mechta Ouled Oulha', 'Sacapulas', 'Jaynagar-Majilpur', 'Gladeview', 'Dayong', 'Monte Rico', 'Nova Soure', 'San Juan Zitlaltepec', 'Almel', 'Sakhua', 'Kromeriz', 'Vitorino', 'Hayward', 'Kahnuj', 'Manalurpettai', 'Altstatten', 'Sydney Mines', 'Liuhu', 'Kasaali', 'Acton', 'Jiquipilas', 'Talegaon Dhamdhere', 'San Antonio Enchisi', 'Kaduna', 'Saint-Ave', 'Renapur', 'Carlos Spegazzini', 'Riverdale', 'Sint-Gillis-bij-Dendermonde', 'El Aioun', 'Ichinoseki', 'Phulera', 'Donwari', 'Motipur', 'Capitao de Campos', 'York', 'Rancho Mirage', 'Ekma', 'Thogapalle', 'Glodeni', 'Sedan', 'Cupar', 'Charbagh', 'Sahasmal', "Bazal'tove", 'Gorkha', 'Banha', 'Oguzeli', 'Nauagarhi', 'Zaio', 'Pirojgarh', 'Aoshang', 'Faruka', 'Boaco', 'Rani Sagar', 'Cao Lanh', 'Boyabat', 'Waslala', 'Vohitsaoka', 'Dayton', 'Borujen', 'Konya', 'Naj` Hammadi', 'Yatomi', 'Laferriere', 'Deo', 'Pampan', 'Citluk', 'Naranjito', 'Haiwei', 'Bohinjska Bistrica', 'Kochkor-Ata', 'Chiromo', 'Kimitsu', 'Linkoping', 'Valinhos', 'Lozova', 'Siyazan', 'Blenheim', "Olho d'Agua das Cunhas", 'Oak Park', 'La Macarena', 'Tasso Fragoso', 'Ladkhed', 'Taber', 'Atchoupa', 'Quilpue', 'Cimislia', 'Cimerak', 'Ar Ruhaybah', 'Xo`jaobod', 'Antombana', 'Tarnos', 'Blackhawk', 'Hengchun', 'Petrzalka', 'Ambatolampy', 'Renai', 'Massalubrense', 'Marcianise', 'Mako', 'Palmito', 'Marshalltown', 'Taywarah', 'Guiyang', 'Patan', 'Montichiari', 'Vittoriosa', 'Chili', 'Manasa', 'Khushab', 'Kujukuri', 'Zhongzhai', 'Cagua', 'Kryzhopil', 'Phuntsholing', 'Chhapra Bahas', 'Banak', 'Wolfhagen', 'Biyala', "An'gang", 'Ban Phe', 'Andoain', 'Martina Franca', 'Tadikonda', 'Itapetininga', 'Wusong', 'La Carolina', 'Greeneville', 'Datori', 'Gulbahor', 'Weifen', 'Farnley', 'Piedade', 'Reiskirchen', 'Mahitsy', 'Akouda', 'Hucclecote', 'Antsenavolo', 'Jever', 'Sablayan', 'Holzkirchen', 'Ramara', 'Phulhara', 'Ridgeland', 'Youdiou', 'Talwandi Chaudhrian', 'Tadian', 'Messias', 'Suar', 'Minamisuita', 'Nipani', 'Jugial', 'Bisignano', 'Opole', 'El Jicaro', 'Joensuu', 'Todupulai', 'Kamituga', 'Guararapes', 'Pike Creek Valley', 'Taimali', 'Chiconquiaco', 'Brookfield', 'Belo Jardim', 'Uttaramerur', 'Tokorozawa', 'Mulungu', 'Spencer', 'Sopron', 'Catape', 'Mata-Utu', 'Cha Grande', 'Jandira', 'Kunnatnad', 'Kolhapur', 'Manchenahalli', "Cox's Bazar", 'Mahisi', 'Nunspeet', 'Xocali', 'Hitachiomiya', 'Beaucaire', 'Soissons', 'Hjorring', 'Antonio Cardoso', 'Blaydon', 'Jhalida', 'Minden', 'Wardenburg', 'Ain Bessem', 'Milwaukee', 'Coventry', 'Puli', 'Arqalyq', 'Januaria', 'Los Almacigos', 'Moreton', 'Jennings', 'Laguna Woods', 'Biloela', 'Chaddesden', 'Monmouth Junction', 'Sullivan', 'Wassenberg', 'Canyon', 'Mograne', 'Mardakan', 'Areia', 'El Tigre', 'Wallingford', 'Goshogawara', 'Chandannagar', 'San Marcos', 'Pedra Branca', 'Dhani Sukhan', 'Barni', 'Maumere', 'Strathroy', 'Guozhen', 'Hlobyne', 'Blace', 'Pentecoste', 'Malden', 'Bad Orb', 'Hallstahammar', 'Delano', 'Bhadsara', 'Thal', 'Dongnanyanfa', 'Rio Bueno', 'Vendas Novas', 'Chhara', 'Katarmala', 'Ngorongoro', 'Takaoka', 'Kalianpur', 'Smarje', 'Mangaldan', 'Aulla', 'Koila Belwa', 'Shenjiaba', 'Emmen', 'Ambalavayal', 'Crepy-en-Valois', 'Ladwa', 'Oxchuc', 'Khagaur', 'Esmeraldas', 'Cirkulane', 'Kaviti', 'Wadsworth', 'Lukovit', 'Venafro', 'Saint-Gely-du-Fesc', 'Paramount', 'Dounan', 'Fort Beaufort', 'Ban Mueang Na Tai', 'Hornsey', 'Collingwood', 'Lower Allen', 'Rangwasa', 'Merelbeke', 'Jamao al Norte', 'Caudebec-les-Elbeuf', 'Cakovec', 'Matatiele', 'Carmen de Patagones', 'Kivsharivka', 'Faberg', 'Doornkop', 'Istres', 'Ancud', 'Ovidiu', 'Strangnas', 'Shazhou', 'Gazantarak', 'Daule', 'El Ksar', 'Riobamba', 'Paxtakor Shahri', 'Vontimitta', 'Pyinmana', 'Sunset Hills', 'Raha Tiga', 'Ujfeherto', 'Kiskoros', 'Benin City', 'Delhi Cantonment', 'Khowy', 'Ban Rangsit', 'Cabudare', 'Valderrama', 'Turki', 'Lohne', 'Pangkalpinang', 'Bazhajiemicun', 'Ribat Al Khayr', 'Estaimpuis', 'Mineros', 'Huanghuajie', 'Espungabera', 'Bawku', 'San Bartolo', 'Raghogarh', 'Jaito', 'Kakiri', 'Radzionkow Nowy', 'Chilakalurupet', 'Maragheh', 'Alacati', 'Gata', 'Rohera', 'Lakshmicharipara', 'Myronivka', 'Matriz de Camarajibe', 'Tamarana', "Ambinanin' Andravory", 'Kirakira', 'Bramhall', 'Roccastrada', 'Hasbergen', 'Ranjal', 'Sawakin', 'Romblon', 'Jogaili', 'Kaga', 'Jinka', 'Limerick', 'Chatou', 'Narman', 'Ponce', 'Baskale', 'Wilnsdorf', "Saint-Michel de l'Atalaye", 'Pakala', 'Sao Sebastiao da Boa Vista', 'Spring Hill', 'Orkney', 'Mahasolo', 'Ranranagudipeta', 'Oeiras', 'Carlos Manuel de Cespedes', 'Stavroupoli', 'Conner', 'Koscielisko', 'San Donato Milanese', 'Udon Thani', 'Jinchang', 'Dzuunharaa', 'Castries', 'Inegol', 'Novohrodivka', 'Chyhyryn', 'Cabarete', 'Ganderkesee', 'Bielawa', 'Roura', 'Namegawa', 'Shabqadar', 'Geylegphug', 'Guapi', 'Urgnano', 'Zhitiqara', 'Wildberg', 'Begusarai', 'Heihe', 'Zegzel', 'Or Yehuda', 'Uenohara', 'Atherstone', 'Taku', 'Central Islip', 'Dessel', 'Coal', 'Gaoua', 'Narwana', 'Shuzenji', 'West Lafayette', 'Hazar', 'Kongsvinger', 'Povoa de Varzim', 'Zunheboto', 'Kalmiuske', 'Zacatepec', 'Rethen', 'Benjamin Aceval', 'Bandeirantes', 'Yakou', 'Passi', 'Malanje', 'Catacaos', 'Capelinha', 'Qingyang', 'Alcira', 'La Gloria', 'Tucson', 'Ban Chorakhe Samphan', 'Cienaga', 'Pebane', 'Lontra', 'Haikoucun', 'Dhanwar', 'Weifang', 'Thari Mir Wah', 'Nakano', 'Coron', 'Maysville', 'Jeonghae', 'Trgoviste', 'Salaga', "Nan'ao", 'Locri', 'Kenndie', 'Florsheim', 'Ouroeste', 'Pizzo', 'Woodmere', 'Muzaffarpur', 'Vicencia', 'Yuriria', 'Andoas', 'Chauhanpatti', 'Debila', 'Xiwanzi', 'Mexico City', 'Perico', 'Nova Ipixuna', 'Pullambadi', 'Osaka', 'Kazo', 'Garut', 'Tonya', 'Zawiyat Razin', 'Hajdunanas', 'Mollerusa', 'Nuevo San Carlos', 'Inverurie', 'Cuddapah', 'Leiria', 'Magdeburg', 'As Saqlawiyah', 'Sidhi', 'Jitaicun', 'Alegrete', 'Arwal', 'Pueblo Nuevo', 'Kovilur', 'Valaiyampattu', 'Kollengode', 'Southside', 'Tiruvattar', 'Ahlat', 'Chandpura', 'Singleton', 'Valu lui Traian', "K'olito", 'Norzagaray', 'Onet Village', 'Maserada sul Piave', 'Gaziantep', 'Borgentreich', 'Cheb', 'Santa Maria Atzompa', 'Caspe', 'Dalton', 'Ilebo', 'Harpenden', 'Tiznit', 'Fredrikstad', 'Itaparica', 'Zabol', 'Majali', 'Ubombo', 'Karaiyampudur', 'Port Orange', 'Atitalaquia', 'Caldas', 'Shyroke', 'Songadh', 'Hai Duong', 'Ayamonte', 'Heroica Guaymas', 'Haraiya', 'Iarintsena', 'Cardoso', 'Yoloten', 'Tandrano', 'Tingo Maria', 'Quimbaya', 'Jambalo', 'Samaxi', 'Leme', 'Bougtob', 'Sudipen', 'Artigas', 'Ambatotsipihina', 'Charlottesville', 'Creutzwald', 'Mangbwalu', 'Tempe', 'Agareb', 'Jurh', 'Tambo', 'Kungalv', 'Navipet', 'Ramapattanam', 'Baykan', 'Suhr', 'Taurisano', 'Maydolong', 'Oulad Said', 'Pianiga', 'Jefferson Hills', 'Paine', 'Pocheon', 'Vennesla', 'Porto Recanati', 'Mirpeta', 'Sendenhorst', 'Carmichael', 'Borgo San Lorenzo', 'Torton', 'Banta', 'Urbino', 'Kedzierzyn-Kozle', 'Yamunanagar', 'Ishidoriyacho-eso', 'Tatui', 'Amarapura', 'Kodoli', 'Monterey', 'Basavakalyan', 'Beroun', 'East Donegal', 'To`raqo`rg`on', 'Usharal', 'Harihar', 'Amborompotsy', 'Barkly East', 'Charaut', 'Kuttattuppatti', 'Aracagi', 'Hongzhai', 'Sonqor', 'Krishnamsettipalle', 'Balugan', 'Agua Preta', 'Laayoune', 'Huyuk', 'Muban Saeng Bua Thong', 'Saviano', 'Florina', 'Naestved', 'Itabaianinha', 'Tekkumbagam', 'Valls', 'Ozuluama de Mascarenas', 'Wanluan', 'Toledo', 'Beawar', 'Parimpudi', 'Ramachandrapuram', 'Jalpatagua', 'Chur', 'Jajireddigudem', 'Salgotarjan', 'Thames Ditton', 'Sao Joao dos Poleiros', 'Japoata', 'Backi Petrovac', 'Mugalivakkam', 'Bachra', 'Ahmadpur East', 'Ganserndorf', 'Attibele', 'Briniamaro', 'Bazidpur', 'Santos', 'Aimores', 'Ciudad de la Costa', 'Westchester', 'Brand', 'Codlea', 'Nallagunta', 'Rovinari', 'Schiedam', 'Highland Village', 'Heikendorf', 'Ifs', 'Chuzhou', '`Anadan', 'Acornhoek', 'Fonds Verrettes', 'Hikone', 'Cartagena del Chaira', 'Moyogalpa', 'Cariari', 'Moe', 'Sahatavy', 'Otley', 'Nambiyur', 'Camden', 'Pomona', 'Vizianagaram', 'Lalo', 'Kafr Zayta', 'Bruchsal', 'Gua', 'Ali Sabieh', 'Ouargaye', 'Huzurnagar', 'Pharaha', 'Yayas de Viajama', 'Novi di Modena', 'Chofugaoka', 'Besagarahalli', 'Paulinia', 'Davis', 'Landhaura', 'Apace', 'San Felice sul Panaro', 'Silvino Lobos', 'Liugoucun', 'Cislago', 'Bububu', 'Gohpur', 'Bhatkal', 'Kotal', 'Lons-le-Saunier', 'Sisai', 'Palangarai', 'Harohalli', 'Tawargeri', 'Newton', 'Muscoy', 'Kade', 'Sene', 'Morarano Chrome', 'Nibria', 'Rosbach vor der Hohe', 'Hilpoltstein', 'Seddouk Oufella', 'Arbaa Sahel', 'Olivar', 'Sevan', 'Forquilhinha', 'Lake Mary', 'Paramus', 'Naula', 'Hukeri', 'Pinheiro', 'Sivaganga', 'Baleraja', 'Koiri Bigha', 'Le Pecq', 'Koflach', 'Namerikawa', 'Yavuzeli', 'Songzi', 'Etawa', 'Shek Tong Tsui', 'Long Binh', 'Tecuala', 'Vyronas', 'Potirendaba', 'Luino', 'Hazle', 'Capitan Mauricio Jose Troche', 'Attiecoube', 'Queensburgh', 'Santiponce', 'Fleurus', 'Ammapettai', 'Montoro', 'Al Waqf', 'Bam', 'Dehaqan', 'Santa Lucia Cotzumalguapa', 'Tucuran', 'Ambesisika', 'Bholsar', 'Chino', 'Androndrona Anava', 'Country Club Estates', 'Laren', 'Asolo', 'Aarsal', 'Sumner', 'Majalengka', 'Barwaaqo', 'Bon Air', 'Ampitatafika', 'Lizzano', 'Majdel Aanjar', 'Dahmani', 'Bhauradah', 'Qaha', 'Khorramabad', 'Shyamnagar', 'Suisun City', 'Frecheirinha', 'Shaw', 'Chepen', 'Nulvi', 'Sao Miguel Arcanjo', 'Kollipara', 'Pajara', 'Kortemark', 'Cha da Alegria', 'Crawford', 'Mistelbach', 'Erkner', 'Ituni', 'Odate', 'Nanded', 'Piekary Slaskie', 'Ainapur', 'Kapaa', 'Nemuro', 'Huolu', 'Banane', 'Tredegar', 'Farkhor', 'Tlayacapan', 'Szprotawa', 'Panchari Bazar', 'Hirson', 'Kukrahill', 'Zierikzee', 'Talata-Volonondry', 'Medjana', 'Santa Maria La Pila', 'Puligunta', 'Kataha', 'Weston', 'Kaikalur', 'Glen Ellyn', 'East St. Paul', 'Crown Point', 'Lagoa', 'Aybasti', 'Awaji', 'Sulibele', 'Puerto Caicedo', 'La Jigua', 'Vera', 'Friedrichsthal', 'El Dorado', 'Bezaha', 'Pagegiai', 'Ban Sai Ma Tai', 'Trzic', 'Sao Benedito do Rio Preto', 'Arcata', 'Brunete', 'Cousse', 'Itapeva', 'Shaoshanzhan', 'Ban Samo Khae', 'Fort Smith', 'Giannitsa', 'Gerona', 'Degeh Bur', 'Awsim', 'Blitar', 'Basavilbaso', 'Palsud', 'Sibolga', "Dias d'Avila", 'Cascades', 'Tecpan Guatemala', 'Overland', 'Levin', 'Tolongoina', 'Asten', 'Tamandare', 'Kannamangalam Tekku', 'Kandakkadava', 'Kilattingal', 'Bayyanagudem', 'Bremervorde', 'Wyandanch', 'Lutz', 'Kasanda', 'Tungavi', 'Palompon', 'Vinjamur', 'Ponteland', 'Kamienna Gora', 'Springboro', 'Morsbach', 'Depalpur', 'Londuimbali', 'Garbagnate Milanese', 'Yenimahalle', 'Masagua', 'Machiques', 'Carbonia', 'Pisco', 'Lynwood', 'Choisy-le-Roi', 'Ambalaromba', 'Seremban', 'Kolar', 'Telkapalli', 'Deoria', 'Glenn Dale', 'Notre-Dame-des-Prairies', 'Majayjay', 'Luzhang', 'Steha', 'Sao Jose do Cerrito', 'Zwettl', 'Lubango', 'Moka', 'Sarpang', 'Farrukhnagar', 'Acanceh', 'Dumas', 'Minignan', 'Mazatan', 'Kadod', 'Capotille', 'Wertheim', 'Gloucester Point', 'Jibou', 'Champasak', 'Katuete', 'Atuntaqui', 'Tupi', 'Nort-sur-Erdre', 'Leland', 'Zongolica', 'North St. Paul', 'O`nhayot', 'South River', 'Euclid', 'Miedzyrzec Podlaski', 'Madinat ash Shamal', 'Sandanski', 'Parora', 'Gela', 'Beronono', 'Nederland', 'Lower Macungie', 'Acambay', 'Cape Girardeau', 'Mafamude', 'Siddhirganj', 'Gonbad-e Kavus', 'San Antonio del Sur', 'Tsuruga', 'Sarcelles', 'Pont-Saint-Esprit', 'Serekali', 'Iguape', 'Rosemount', 'Hulikal', 'Nordenham', 'Bannur', 'Westerkappeln', 'Bozdogan', 'Eslohe', 'Koth', 'Kattari', 'Caltanissetta', 'Francavilla al Mare', 'Iguai', 'Moundou', 'Kozakai-cho', 'Alfaro', 'Sonsoro', 'Ljubljana', 'Charakunda', 'Ambohidranandriana', 'Caraguatay', 'Moriyama', 'Voula', 'Penamiller', 'Ponnai', 'Juan Aldama', 'Douglasville', 'Pati', 'Pfarrkirchen', 'Roman', 'Busan', 'Kottapeta', 'Nagua', 'Salaya', 'Ciudad Dario', 'Buttelborn', 'Victorville', 'San Pelayo', 'Mauren', 'Khairtal', 'Bombarral', 'Cadillac', 'Isiro', 'Cabral', 'Pelagor', 'Guastalla', 'Cruces', 'Gungu', 'Milford', 'Tigrana', 'Mundargi', 'Daram', 'Saudharkrokur', 'Canonsburg', 'Aourir', 'Blue Springs', 'Commerce', 'Saint-Die-des-Vosges', 'Lubuklinggau', 'Lake City', 'Bougouni', 'Netrakona', 'Meral', 'Csongrad', 'Tokai', 'Linthicum', 'Taylorville', 'Sano', 'Araguatins', 'Prokuplje', 'Dhansaria', 'Turtkul', 'Gyzylgaya', 'Bollullos de la Mitacion', 'Mallagunta', 'San Luis Potosi', 'Ukmerge', 'Chenove', 'Huejuquilla el Alto', 'South Gate', 'Llantwit Major', 'Thomazeau', 'Memunda', 'Stockton-on-Tees', 'Himatnagar', 'Rapid City', 'Banes', 'Rheydt', 'Bobenheim-Roxheim', 'Lauterach', 'Kalmthout', 'Yashan', 'Baures', 'Malanday', 'Treviso', 'Ico', 'Vladimirci', 'Dora', 'Abergavenny', 'Parow', 'Tineghir', 'Iapu', 'Cremona', 'Tazmalt', 'Vernon Hills', 'Palermo', 'Ambiula', 'Alaverdi', 'Altamira', 'Sao Goncalo', 'Sakado', 'Pingree Grove', 'Newton Abbot', 'Puerto Morelos', 'Buesaco', 'Santa Maria da Vitoria', 'Zeuthen', 'Hebri', 'Petua', 'Sertania', 'Yangiyer', 'Mahmudabad', 'Dinhata', 'Bariarpur', 'Mondeville', 'Efatsy-Anandroza', 'Rasht', 'Port Royal', 'Marotta', 'Suffern', 'Pearland', 'Paduvari', 'Ain Lechiakh', 'Ruvo di Puglia', 'Wittingen', 'Rochester Hills', 'Matsavaram', 'Dunakeszi', 'Sowa Town', 'Barneveld', 'Koycegiz', 'Shanhu', 'Zaidpur', 'Valenca', 'Rockaway', 'Champua', 'Caojiachuan', 'Saraykent', 'Gheorgheni', 'Shonai', 'Itaueira', 'Itanagar', 'Bandar-e Mahshahr', 'Yelandur', 'Kempston', 'Elazig', 'San Juan Guichicovi', 'Tom Price', 'Zhegaozhen', 'Igdir', 'Chang', 'Nyakrom', 'Shamunpet', 'Sultanabad', 'Tazarka', 'Rathdrum', 'Souahlia', 'Konnur', 'Rinconada', 'Lamballe', 'Planaltina', 'Sunadkuppi', 'Mangpa', 'Marsciano', 'Eonyang', 'Bani Murr', 'Baba Hassen', 'Colatina', 'Tianningcun', 'Morazan', 'Guantingzhan', 'Misserghin', 'Carolina Forest', 'Tanakallu', 'Castellarano', 'Kumla', 'Yorktown', 'Santo Domingo Xenacoj', 'Louga', 'Fort Lee', 'Tepperumalnallur', 'Ampanety', 'Ajuy', 'Idstein', 'Pristina', 'Snina', 'Habaswein', 'Joghtay', 'Shimizucho', 'Kotancheri', 'Woodstock', 'Falagueira', 'Moorestown-Lenola', 'San Juan Sacatepequez', 'Rio do Sul', 'Mirador', 'Moba', 'Hassi Khelifa', 'Nossa Senhora Aparecida', 'Ibiza', 'Watauga', 'Zemrane', 'Santa Cruz Xoxocotlan', 'Pilis', 'Evian-les-Bains', 'Beraketa', 'Goldsboro', 'Isulan', 'Sundern', 'Aikaranad', 'Yola', 'Sunderland', 'Ouled Ben Abd el Kader', 'Pakaur', 'Antsirabe Avaratra', 'Schofield Barracks', 'Serang', 'Lede', 'Guernica', 'Mostoles', 'Villa Mercedes', 'Carcar', 'Beruniy', 'Mariano Acosta', 'Toin', 'Kashikishi', 'Dar Chabanne', 'uMhlanga Rocks', 'Mieres', 'Chipurupalle', 'Aranda', 'Nacogdoches', 'Benner', 'Ahvaz', 'Sint-Michiels', 'Yamamoto', 'Retie', 'Pitman', 'Bagabag', 'Phalaborwa', 'Couva', 'Kheri Naru', 'Bazidpur Madhaul', 'Carlton Colville', 'Al Mafraq', 'Amasra', 'Peruibe', 'Cuarte de Huerva', 'Labrador', 'Bawshar', 'Pirojpur', 'Taraza', 'Teminabuan', 'Shimotsuke', 'Garca', 'Rifadpur', 'Los Chiles', 'Bhandarso', 'Capurso', 'Caetes', 'Pajapita', 'Puerto Real', 'East York', 'Teteven', 'Coruche', 'Wurzen', 'Pescaria Brava', 'Guzhou', 'Stanwell', 'Khanabad', 'Xisa', 'Guruzala', 'Basla', 'Sabana Larga', 'Dom Feliciano', 'Kuytun', "L'Asile", 'Bokhtar', 'Nazarabad', 'Leticia', 'Malang', 'Ambohimiera', 'Amjhar', 'Veitshochheim', 'Besana in Brianza', 'Lynn', 'Barura', 'Statte', 'Balchik', 'Ottappidaram', 'Afir', 'Barleben', 'Ashland', 'Jirja', 'Woodbridge', 'Smithton', 'Neustadt bei Coburg', 'Hewitt', 'Ulstein', 'Nong Bua', 'Piracanjuba', 'New Mirpur', 'Darabani', 'Szolnok', 'Nqutu', 'Ii', 'Glassmanor', 'Eruvatti', 'Magarao', 'Sunam', 'Sonaimukh', 'Chapelle-lez-Herlaimont', "Hai'an", 'Blaricum', 'El Marsa', 'Jutiapa', 'Cihanbeyli', 'Bershad', 'La Mujer', 'Sompting', 'Lomas de Sargentillo', 'Guacui', 'Mpwapwa', 'Sanhe', 'Kampong Trach', 'Govindgarh', 'Weilmunster', 'Hopkinsville', 'Xicotencatl', 'Crikvenica', 'Faizabad', 'Dehgolan', 'Changji', 'Obita', 'Ottobrunn', 'Pak Phanang', 'Dongta', 'Guaduas', 'Clarksville', 'Luxembourg', 'Kut-e `Abdollah', 'Puerto Penasco', 'Hebli', 'Locust Grove', 'Lappeenranta', 'Kizilcasar', 'Terenos', 'Kamen', 'Barra de Santo Antonio', 'Saint-Servan-sur-Mer', 'Pinto', 'Beverly Hills', 'Rouyn-Noranda', 'Joaquim Pires', 'Wichelen', 'Saint-Medard-en-Jalles', 'Dibrugarh', 'Gouvieux', 'Schriesheim', 'Taliwang', 'Prachuap Khiri Khan', 'Salida', 'Dahivel', 'Tanjung Selor', 'Tabio', 'Ilampillai', 'Acaxochitlan', 'West Donegal', 'Marosakoa', 'Coahuitlan', 'Valasske Mezirici', 'Mengmeng', 'Umm ar Rizam', 'Altindag', 'Lewisburg', 'Kamifurano', 'Pune', 'Miami Springs', 'San Sebastian de Yali', 'Berwick', 'Quixada', 'Wujindian', 'Jiadong', 'Bagnan', 'Puke', 'Bogazliyan', 'Sertaozinho', 'Beiya', 'Pragadavaram', 'Alarobia Bemaha', 'Lara', 'Pyryatyn', 'Kantabanji', 'Kodusseri', 'Meiningen', 'Fontainebleau', 'Panapur', 'Ulaangom', 'Bihpur', 'Bodinayakkanur', 'Lins', 'Helsingor', 'Maryanaj', 'New Kru Town', 'Mococa', 'Bab Ezzouar', 'Ghazaouet', 'Pulaski', 'Grenchen', 'East Riverdale', 'Bourne', 'Maqat', 'Oliveirinha', 'Deltona', 'Vermillion', 'Chotala', "Hong'an", 'Houbu', 'Furth', 'Tchibanga', 'Ishaka', 'Namala Guimbala', 'Aire-sur-la-Lys', 'Shentang', 'Guilford', 'Oswaldtwistle', 'Binga', 'Birstall', 'Kandukur', 'Dunleary', 'Majia', 'Pamban', 'Hata', 'Mitchells Plain', 'Blankenfelde', 'Itoigawa', 'Andriba', 'Maghra', 'Bugarama', 'Urk', 'Hoima', 'Binbrook', 'Manali', 'Manevychi', 'Gotzis', 'Byala Slatina', 'Capoocan', 'Pepperell', 'Ayr', 'Saint-Jean-de-Braye', 'Chungju', 'Barobo', 'Laamarna', 'Mudichchur', 'Madhyamgram', 'Sefrou', 'Haora', 'Harlakhi', 'Bremgarten', 'Conselheiro Lafaiete', 'Nanbei', "'Ain Mabed", 'Koper', 'Knowle', 'Cerkno', 'Perai', 'Baghin', 'Colne', 'Kingsborough', 'Mahagaon', 'Chengguan', 'Santa Eugenia', 'Yuli', 'Bad Rothenfelde', 'Chongqing', 'Aviles', 'Gokhulapur', 'Lokapur', 'Menfi', 'Kisvarda', 'Sungai Guntung', 'Ambolidibe Atsinanana', 'Oberkirch', 'Poteau', 'Raleigh', 'Ulongue', 'Floresta Azul', 'Migori', 'Veracruz', 'Roubaix', 'Mariscal Jose Felix Estigarribia', 'Sint-Michielsgestel', 'Gering', 'Yanchep', 'Hof', 'Villa Regina', 'South Amboy', 'Songkhla', 'Barwah', 'Stourbridge', 'Bodegraven', 'New Windsor', 'Kubadupuram', 'Lagoa dos Gatos', "Pa'in Chaf", 'Allagadda', 'Casilda', 'Tarancon', 'Adilabad', 'Chakla Waini', 'Sabadell', 'Bekipay', 'Tajimi', 'Ciputat', 'Oulad Khallouf', 'Rajpur Kalan', 'Ghadamis', 'Nyaungdon', 'Bimo', 'Tena', 'Kendall', 'Muzaffarabad', 'Tongren', 'Vrbas', 'Resende Costa', 'Cherchell', 'Namayingo', 'Port Angeles', 'Egersund', 'Bhasaula Danapur', 'Wavre', 'Itariri', 'Timbiras', 'Mahajanga', 'Lochearn', 'Vestby', 'Kulgo', 'Mola di Bari', 'Istrana', 'Shangtianba', 'Aue', 'Kirchzarten', 'Lanling', 'Tuneri', 'Montrouge', 'Taslicay', 'Choro', 'Schio', 'Diadema', 'Rosstal', 'Taohongpozhen', 'Nueva Guadalupe', 'Tacoma', 'Mayyanad', 'Botou', 'Alfonso', 'Bexley', 'Marginea', 'Alliance', 'Davuluru', 'Candiac', "Hammam M'Bails", 'Corridonia', 'Jaguarari', 'Bouati Mahmoud', 'Topeka', 'Honiton', 'Sambalpur', 'Kalamner', 'Tulua', 'Baesweiler', 'Chatra Gobraura', 'Swanage', 'Santa Magdalena', 'Marrero', 'Mizunami', 'Bickley', 'Ocala', 'Dera Baba Nanak', 'Reda', 'Bourem Guindou', 'Tagum', 'Talata Ampano', 'Poninguinim', 'Bad Urach', 'Alterosa', 'Lake Hiawatha', 'Rhosllanerchrugog', 'Zirapur', 'Bakixanov', 'Scottburgh', 'Laubach', 'Bhakkar', 'Bath', 'Iztapa', 'Kaabong', 'Haiku-Pauwela', 'Walthamstow', 'Leiyang', 'Silverton', 'Hangzhou', 'Namminikara', 'Badr', 'Ban Bang Rin', 'Skydra', 'Changting', 'Bou Nouh', 'Villorba', 'Conceicao de Macabu', 'Haldipur', 'Chavakkad', 'Artondale', 'Isla Raton', 'Taiyur', 'Mel Bhuvanagiri', 'Faratsiho', 'Piso Firme', 'Venlo', 'Nules', 'San Gaspar Ixchil', 'London Colney', 'Hesarghatta', 'Gingee', 'Shahpura', 'Bhikhi', 'Kamenice', 'Amesbury', 'Hanoi', 'Davenport', 'Bishunpur Sundar', 'Oggiono', 'Beinan', 'Tsimasham', 'Campos', 'Chaponost', 'Amondara', 'Pariyari', 'Brusque', 'Coquimbo', 'Promissao', 'Malakoff', 'Yunxian Chengguanzhen', 'Baiao', 'Tremelo', 'Sesto Calende', 'Pozega', 'Liepaja', 'Lonigo', 'San Leonardo', 'Gonabad', 'Valdemoro', 'Sikandra', 'Vise', 'Rybnik', 'Hit', 'Rosu', 'Fengrenxu', 'Barrie', 'Adjud', 'Manta', 'Susuz', 'Junin', 'Lauda-Konigshofen', 'Rocas de Santo Domingo', 'Barjhar', 'Kurdamir', 'Prabumulih', 'Miyazaki', 'Menzel Kamel', 'Micco', 'Sankt Leon-Rot', 'De Pinte', 'Chanaral', 'Abiko', 'Carlos A. Carrillo', 'Sarande', 'Matoes', 'Cholchol', 'Gangarampur', 'Aihua', 'Yanggao', 'Portalegre', 'Falea', 'Qapqal', 'Gotenba', 'Villanueva del Pardillo', 'Boysun', 'Bundu', 'Jucas', 'Abony', 'Bahabad', 'Paradarami', 'Eerbeek', 'Nyon', 'Shannon', 'Barreirinhas', 'Manompana', 'Fron', 'Nathdwara', 'Ganassi', 'Korntal-Munchingen', 'Tessenderlo', 'Prescott', 'Chivilcoy', 'Juina', 'Primero de Enero', 'Campiglia Marittima', 'Bharra', 'Naju', 'Halls Creek', 'Sakawa', 'Ain Temouchent', 'Caimanera', 'Mahao', 'Zacatelco', 'Perumuchchi', 'Aberdeen', 'Dentsville', 'Lincoln Village', 'Bou Hadjar', 'Retiro', 'Ouaouzgane', 'Balrampur', 'Iznik', 'Coatepeque', 'Dicle', 'Phon', 'Sanpaicun', 'Zgornja Kungota', 'Coulommiers', 'Woodhaven', 'Pirai', 'Marana', 'Benjamin Constant', 'Kimpese', 'Kashiba', 'Vandithavalam', 'Ar Ruseris', 'Fusui', 'Aalten', 'Hiranai', 'Kigumba', 'Ocosingo', 'Kotabommali', 'Sugar Hill', 'Covington', 'Mielec', 'Totness', 'Ribeirao Preto', 'Caotun', 'Salaiya', 'Karapurcek', 'Hornchurch', 'Hlyboka', 'Azizpur Chande', 'Sokhodewara', 'Brus', 'Dilovasi', 'Manjathala', 'Sabbah', 'Sadiola', 'Lomita', 'Aveiro', 'Kittur', 'Rajpur', 'Betsukai', 'Shintomi', 'Tahara', 'Vuliyattara', 'El Kelaa des Srarhna', 'Gammarth', "Sal'a", 'Kolin', 'Ceel Dheere', 'Magpet', 'Tarhzirt', 'Chateauguay', 'Qovlar', 'Gryfice', 'Bokakhat', 'Terrace', 'Acajutiba', 'Tefam', 'Wolow', 'Kuaidamao', 'Punnayur', 'Maliana', 'Ouando', 'Howell', 'Kodarma', 'Gines', 'Douglas', 'Werve', 'Corfe Mullen', 'Rayon', 'Tequila', 'Kozani', 'Rafina', 'Placido de Castro', 'Mascara', 'Pszczyna', 'Hochheim am Main', 'Babahoyo', 'Tanglou', 'Villagarcia de Arosa', 'Sao Romao', 'Izmir', 'Kabwe', 'Wulan', 'Lake Charles', 'Babhantoli', 'Villaviciosa', 'Doljevac', 'Achchippatti', 'Parasbani', 'Agadez', 'Yalluru', 'Saumur', 'Ambodiriana', 'Panda', 'Jadupatti', 'Santo Domingo Este', 'Manakambahiny', 'Hajduhadhaz', 'Kitamilo', 'Shenwan', 'Madattukkulam', 'La Romana', 'Glasgow', 'Aars', 'Niquelandia', 'South Farmingdale', 'Mangalia', 'Kualaserba', 'Flandes', 'Veroli', 'Ixtapan de la Sal', 'Makarska', 'Lakho', 'Dhanur Kalyanwadi', 'Castillo', 'Jamui', 'Mutyalapalle', 'Karaikkudi', 'Gogounou', 'Kurumbapalaiyam', 'Myrne', 'Maroamalona', 'Collado-Villalba', 'Sikandarpur', 'Olonne-sur-Mer', 'Cambundi Catembo', 'Vohimasina', 'Chenango', 'Muara Teweh', 'Las Matas de Farfan', 'Livermore', 'West Columbia', 'Anqiu', 'Mumbai', 'Ogijares', 'Bethesda', 'Wyndham', 'Lake Shore', 'Yuchi', 'Jalandhar Cantonment', 'Manevy', 'Sao Jose do Rio Pardo', 'Chimoio', 'Dagami', 'Sagarejo', "Lin'an", 'Gobardhanpur Kanap', 'Apple Valley', 'Allauch', 'Villafranca di Verona', 'Yulin', 'Herbolzheim', 'Dolton', 'Terebovlya', 'Sri Madhopur', 'Benifayo', 'Valambur', 'Sendrisoa', 'Ranipur', 'Media Luna', 'Ocna Mures', 'Kulundu', 'Damdama', 'Milagros', 'Smithfield', 'Lake Butler', 'Ciudad Barrios', 'Escuintla', 'Cherry Hinton', 'Solonytsivka', 'Ribeirao Claro', 'Balussheri', 'Nanjikkottai', 'Asela', 'Martos', 'Canto do Buriti', 'Samalut', 'Dhuburi', 'Maltahohe', 'Horst', 'Bafang', 'La Blanca', 'Seraing', 'Hedensted', 'Chala', 'Bou Salem', 'Boralday', 'Marktredwitz', 'Neuenrade', 'Muddada', 'Serrinha', 'Horizon West', 'Machalpur', 'Pathra', 'Nelspruit', 'Rankhandi', 'Gaoniang', 'Ohrid', 'Conguaco', 'Taylors', 'Aqadyr', 'Kirkop', 'El Golea', 'Slupsk', 'Sawran', 'Ansan', 'Villa Dominico', 'Kambia', 'Sainte-Genevieve-des-Bois', 'An Nuhud', 'Samarkand', 'Ringsaker', 'Blangmangat', 'Nuenen', 'North Aurora', 'Hoek van Holland', 'Santa Cruz Cabralia', 'Suvalan', 'Phra Pradaeng', 'Arnedo', 'Pagsanjan', 'Pietermaritzburg', 'Weissenthurm', 'Pebberu', 'Kangazha', 'Ancona', 'Bou Noura', 'Owensboro', 'Catalina Foothills', 'Alegre', 'Douar Imoukkane', 'Tirkadavur', 'Washington Court House', 'Colmenar Viejo', 'Baikunthpur', 'Sitrah', 'Presidente Janio Quadros', 'St. Louis Park', 'Kempten', 'Dergaon', 'Pannaipuram', 'Gifu', 'Toul', 'Sirakorola', 'Vaals', 'Barranca', 'Elgoibar', 'Kasamatsucho', 'Ampitahana', 'Totowa', 'Mocoa', 'Beni Slimane', 'Bethany', 'Ramdiri', 'Bariadi', 'Cofradia', 'Tandil', 'Bolton', 'Sirat', 'Uspenka', 'Capim Branco', 'Ozumba', 'Dighawani', 'Keonjhargarh', 'Makurazaki', 'Sada', 'Sarmin', 'Yamkanmardi', 'Zhujiezhen', 'Brampton', 'Nynashamn', 'Dawson Creek', 'Analalava', 'Dobrovnik', 'Puxi', 'Beni Tamou', 'Damargidda', 'San Casciano in Val di Pesa', 'Springdale', 'Luque', 'Zara', 'Dakit', 'Urena', 'Naduhatti', 'General Jose de San Martin', 'Raia', 'Bhuj', 'Tisnov', 'Tekkekoy', 'Faridpur', 'Sao Vicente', 'Bad Salzuflen', 'Itororo', 'Petersberg', 'Castelsarrasin', 'Madiama', 'Voluntari', 'Butaleja', 'Carandai', 'Arusha', 'Strumica', 'Quijingue', 'Banovce nad Bebravou', 'Huaibei', 'Siswa', 'Ansiao', 'Baisa', 'Kozuchow', 'Mions', 'Kenema', 'Rajaudha', 'Onga', 'City of Calamba', 'Buli', 'Lokhvytsya', 'Chak Thathi', 'Qasr-e Qomsheh', 'Wilson', 'Tarawa', 'Libjo', 'Batocina', 'Tajpur', 'Kanyakulam', 'Manzhouli', 'Bougaa', 'Kitzingen', 'Fenoughil', 'Dhanera', 'Dhorgaon', 'Abohar', 'Zug', 'Ghaura', 'Aparecida do Taboado', 'Zapotlanejo', 'Natanz', 'Guarda', 'Chengara', 'Tup', 'Livani', 'Sirsia Hanumanganj', 'Qal`at al Madiq', 'Tamarac', 'South Miami', 'Thung Sai', 'Grodzisk Wielkopolski', 'Jiutepec', 'Nautanwa', 'Grenada', 'Nandod', 'Helotes', 'Nang Rong', 'Makiivka', 'Paxtaobod', 'Susaki', 'Fuso', 'Tsawwassen', 'Villeurbanne', 'Ternopil', 'Pandaul', 'Nardo', 'Puerto Aysen', 'Marvdasht', 'Funafuti', 'Siraway', 'El Milagro', 'Dinapore', 'Conisbrough', 'Ban Duea', 'Kawm Umbu', 'Derhachi', 'Setubinha', 'Assis', 'Shikarpur', 'Hukumati Baghran', 'Manuel Urbano', 'Penacova', 'Mishawaka', 'Kremenchuk', 'Salzkotten', 'Sakai', 'Ostercappeln', 'Sebikhotane', 'Mahatalaky', 'Recanati', 'Sainte-Foy-les-Lyon', 'Hengshan', 'Antonio Enes', 'Adivala', 'Odaipatti', 'Aginiparru', 'Sao Goncalo dos Campos', 'Sagaing', 'Anjad', 'Jevargi', 'Bastia', 'General Tinio', 'Kailua', 'Fukutsu', 'Nogata', 'Park Forest', 'Lolokhur', 'Moengo', 'Hermosillo', 'Greencastle', 'Pittsburgh', 'Luuq', 'Toluca', 'Satgachia', 'Nova Varos', 'Tetovo', 'Empalme', 'Cherryland', 'Ka-Bungeni', 'Fernandopolis', 'Oita', 'Walnut', 'Dalian', 'Viersen', 'Jyvaskylan Maalaiskunta', 'Warragul', 'Malaikkal', 'Loule', 'Honolulu', 'Azalea Park', 'Samba Cango', 'Boblingen', 'Istmina', 'Southend', 'Kallithea', 'Khiria', 'Huaral', 'Mendota', 'Upper Buchanan', 'Damietta', 'Porto Empedocle', 'Santo Antonio do Ica', 'Nunungan', 'Naysar', 'Pico Rivera', 'Riano', 'Snodland', 'Sangre Grande', 'Sun City', 'Naunhof', 'Fisciano', 'Vedene', 'Khawad', 'Vernon', 'Ahiro', 'Chidambaram', 'Chacabuco', 'Elsdorf', 'Chuhal', 'Djelfa', 'Qiryat Shemona', 'Altamirano', 'Cubellas', 'Allonnes', 'Aracaju', 'Novi Knezevac', 'Ikalamavony', 'Staunton', 'Mpophomeni', 'Manombo Atsimo', 'Madinat Hamad', 'Guntramsdorf', 'Elchuru', 'Dongyang', 'Gardony', 'Taro', 'Tablat', 'Garulia', 'Gopichettipalaiyam', 'Raibari Mahuawa', 'Tibba', 'Taoyuan District', 'Manavadar', 'San Pedro Garza Garcia', 'Amuntai', 'Damua', 'Desio', 'Tomboutou', 'Parlier', 'Modasa', 'Ubud', 'Makan', 'Vieux-Conde', 'Maranga', 'Acobamba', 'Rugby', 'Paramirim', 'Nirgua', 'Alto Paraiso de Goias', 'Bandar Lampung', 'Koumaira', 'Sebt Ait Ikkou', 'Vavuniya', 'Grand Terrace', 'Surajgarha', 'Pedro Velho', 'Lanzhou', 'Tieli', 'Jadia', 'Mapanas', 'Nangan', 'Glazoue', 'Villa Literno', 'Qianshanhong Nongchang', 'Paso de Carrasco', 'Calliaqua', 'Watertown Town', 'Tasgaon', 'China', 'Yoshinogari', 'Warora', 'Castlebar', 'Hezhou', 'Tijuana', 'Vilachcheri', 'Tama', 'Stony Brook', 'Puurs', 'Dam Dam', 'Muktsar', 'Wishaw', 'Clay', 'Hattula', 'Marojala', 'Aguas Belas', 'Ogano', 'Kattagaram', 'Sulakyurt', 'Mariveles', 'Kavala', 'Leavenworth', 'Ambohitsimanova', 'Lingsugur', 'Delicias', 'Xihuachi', 'Kukarmunda', 'Saruu', 'Lwengo', 'Bokod', 'Andraitx', 'Polanco', 'Willowbrook', 'Peddapalle', 'Doddipatla', 'Basco', 'Bhimavaram', 'Timoktene', 'Tizi', 'Mettet', 'Matadi', 'Xiaotangzhuang', 'Guna', 'Villaricca', 'Pfedelbach', 'Barud', 'San Antonio del Monte', 'Advi Devalpalli', 'Nowy Dwor Gdanski', 'Hsinchu', 'Bad Kreuznach', 'Pasaje', 'Miluo Chengguanzhen', 'Puyang Chengguanzhen', 'Vasto', 'Abarkuh', 'West Plains', 'Carazinho', 'Vedi', 'Port Maria', 'Karratha', 'Fes', 'Ghulakandoz', 'Porto Acre', 'Jinzhong', 'Thyolo', 'Dien Bien Phu', 'Panniyannur', 'Banbhag', 'Saint-Lambert', 'Bhoj', 'Salug', 'Sobraon', 'Reigate', 'Colts Neck', 'Salcaja', 'Liesti', 'Tianyingcun', 'Maia', 'Befeta', 'Ina', 'Sutherlin', 'Saint-Brice-sous-Foret', 'St. Louis', 'Bistaria', 'Nixa', 'Boqueirao', 'Khimlasa', 'Munai', 'Encinitas', 'Citlaltepec', 'Tokigawa', 'Kalloni', 'Sachse', 'Huckeswagen', 'Meric', 'Ilkal', 'Bawana', 'Greetland', 'Alcazar de San Juan', 'Soltau', 'Wahiawa', 'Mogogelo', 'Villerupt', 'Carqueiranne', 'Morohongo', 'Hervey Bay', 'Lakhna', 'Mardin', 'Lidingo', 'Dar Chioukh', 'Juncos', 'Alliston', 'Ilorin', 'Arita', 'Skara', 'Progress Village', 'Savelugu', 'Khirpai', 'Kamalnagar', 'Mount Holly', 'Vreed-en-Hoop', 'Akcakale', 'Vence', 'Kurawar', 'Seoul', 'Novi Grad', 'Kudamatsu', 'Colina', 'Kottagudem', 'Tankal', 'Kazarman', 'East Orange', 'Mulanur', 'Sfizef', 'Larvik', 'Pawni', 'San Juan de Vilasar', 'Venmani', 'Gentilly', 'Picnic Point', 'Beiwusidui', 'Pakdasht', 'Tatvan', 'Ardmore', 'Capela do Alto Alegre', 'Rakai', 'Yakouren', 'Puerto Asis', 'Shirali', 'Adjumani', 'Huite', 'Indaiatuba', 'Ouled Moussa', 'Tamboril', 'Kasimkota', 'Iligan', 'Stratton Saint Margaret', 'Jiancheng', 'Mungod', 'Sremcica', 'Sara', 'Garrel', 'Narela', 'Meilan', 'Martinsicuro', 'Culaba', 'Suez', 'Villamaria', 'Baytunya', 'Oleshky', 'Mogaung', 'Twinsburg', 'Casatenovo', 'Sadda', 'Shiyali', 'Independencia', 'Jinotepe', 'Guilin', 'Samal', 'Gouda', 'Tarui', 'Sao Felipe', 'Srivilliputtur', 'Malmal', 'Pembroke Dock', 'Schwaikheim', 'Beizhou', 'Horw', 'Gohi Bishunpur', 'Taby', 'Ramallo', 'Woodcrest', 'San Jacinto Amilpas', 'Sundarsi', 'Basildon', 'Harasta', 'Brislington', 'Collingdale', 'Brezice', 'Xam Nua', 'Ans', 'Hardiya', 'Linnich', 'Varamin', 'Asuncion Mita', 'Dalgan', 'Sahawar', 'Jiutai', 'Bruckmuhl', 'Bingley', 'Dundigal', 'Somers', 'Villas', 'Ouro Branco', 'Apora', 'Miduturu', 'Manambolosy', 'Wuchuan', 'Kasumbalesa', 'Brent', 'Balma', 'Beidou', 'Farim', 'Dagmara', 'Bijaipur', 'Karera', 'Amersfoort', 'Gold', 'Patilar', 'Tweed Heads', 'San Quintin', 'Saloa', 'Ruiming', 'Sarayan', 'Silver Springs Shores', 'Vakon', 'Ingenbohl', 'Chetma', 'Blacksburg', 'Pondokaren', 'Queimadas', 'Encantado', 'Tsubata', 'Mers el Hadjad', 'Northglenn', 'Genc', 'Lohiyar Ujain', 'Ruoqiang', 'Madera', 'Santana do Cariri', 'Ramamangalam', 'Bangawan', 'Emirdag', 'Santa Fe Springs', 'Cabucgayan', 'Beaver Falls', 'Cypress', 'Glanmire', 'Ipua', 'Jaca', 'Pueblo Viejo', 'Kashiwa', 'Dos Hermanas', 'Ras Tanura', 'Sidi Abdellah Ben Taazizt', 'Badger', 'Velingrad', 'Velsen-Zuid', 'Iscuande', 'Ziro', 'Charneca', 'Tourlaville', 'Loma Linda', 'Sevran', 'Eraniel', 'Trece Martires City', 'Franceville', 'Coari', 'Khairabad', 'Bunhe', 'Taguatinga', 'Gilan-e Gharb', 'Nyborg', 'Kawagoe', 'Pinheiral', 'Bavaro', 'Lisse', 'Shuilou', 'Paraty', 'San Pedro Huamelula', 'Bamber Bridge', 'Port Huron', 'Quba', 'Ajacuba', 'Lambarene', 'Sikandarabad', 'Chornobaivka', 'Dores de Campos', 'Wakoro', 'Palomares del Rio', 'Ankola', 'Chibia', 'Birmingham', 'Antrim', 'Kingsport', 'Rockledge', 'Carrillos', 'Irig', 'Denekamp', 'Oberhaching', 'Tisma', 'Uriangato', 'Tori-Cada', 'Wolomin', 'Montignies-le-Tilleul', 'Des Moines', 'Angeles City', 'Morinville', 'Zunyi', 'Port Blair', 'Daparkha', 'Petrosani', 'Lewes', 'Douar Souk L`qolla', 'Siaton', 'Beyne-Heusay', 'Padang', 'Nova Kakhovka', 'Assare', 'Garh Sisai', 'Ulster', 'Magny-les-Hameaux', 'Vallieres', 'Garagoa', 'Sungurlu', 'Gadaul', 'Taraclia', 'Aden', 'Grand Bourg', 'Sao Francisco do Guapore', 'Meruoca', 'Oud-Beijerland', 'Nitra', 'Capoterra', 'Tasikmalaya', 'Cuevo', 'Fruit Cove', 'Le Teil', 'Beur', 'Bouskene', 'Saint-Pierre', 'Palitana', 'Bourkika', 'Jiquilisco', 'Hassa', 'Ixtapaluca', 'Velyka Dymerka', 'Kampli', 'Caetano', 'Luis Antonio', 'Horgen', 'Nalatvad', 'Quime', 'Betsiboka', 'Rancheria Payau', 'Dongxing', 'Panagyurishte', 'Soure', 'Sinalunga', 'Bhopatpur', 'Tottington', 'Zogno', 'Alpinopolis', 'Ibirite', 'Talusan', 'Mae Wang', 'Prato', 'Barvynkove', 'Altavilla Vicentina', 'Mussomeli', 'Hilchenbach', 'Agueda', 'Botupora', 'San Fernando', 'Gallarate', 'Balombo', 'Philipsburg', 'Oulad Aissa', 'Brazzaville', 'Tissaf', 'Sanniquellie', 'Zeist', 'Turgutlu', 'Ashikaga', 'Seclin', 'Actopan', 'Accra', 'Destelbergen', 'Paiker', 'Iowa Colony', 'Muvattupula', 'Springe', 'Coqueiral', 'Gmunden', 'Perry Barr', 'Cuneo', 'Masur', 'Calimera', 'Diang', 'Nacozari Viejo', 'Jurmala', 'Tsarasaotra', 'Chon Thanh', 'Dandenong', 'Zutphen', 'Bunawan', 'Kottaram', 'Majhgawan', 'Macedonia', 'Totogalpa', 'Grand Junction', 'Medfield', 'Tolcayuca', 'Santiago del Estero', 'Ibague', 'Caucaia', 'Quartucciu', 'Shangshan', 'Roldan', 'Tiddim', 'El Bosque', 'Mangapet', 'Enfield Lock', 'Paulo Afonso', 'Golden Gate', 'Del City', 'Brandys nad Labem-Stara Boleslav', 'Ambatomasina', 'San Pascual', 'Gothurutha', 'Budapest', 'El Hamel', 'Vadakadu', 'Casalpusterlengo', 'Vardannapet', 'Glucholazy', 'Talkha', 'Victoriaville', 'Fengdeng', 'Bhalki', "Sao Jorge d'Oeste", 'Calaca', 'Al `Awwamiyah', 'Listowel', 'Komenda', 'Janiuay', 'Copperas Cove', 'Kovvali', 'Cucer-Sandevo', 'Birkat as Sab`', 'Silaiyampatti', 'Lianzhuangcun', 'Arataca', 'Natchez', 'Guangyuan', 'Ko Samui', 'Hajduboszormeny', 'Ambatondrazaka', 'Akonolinga', 'Barpeta', 'Torrelodones', 'Norrtalje', 'Saurh', 'Wohlen', 'Felling', 'Honjo', 'Petare', 'Proper Bansud', 'Dendermonde', 'Parauna', 'Bibhutpur', 'Sultepec', 'Chiquinquira', 'Lint', 'Zhedao', 'Santo Antonio de Padua', 'Alburquerque', 'Fuzuli', 'Ocean Pines', 'Atacames', 'Francheville', "Wadi Halfa'", 'Ninh Hoa', 'Baiquan', 'Antigua', 'Bevata', 'Xinpo', 'Warkan', 'Qibray', 'Gisors', 'Pa Mok', 'Tabuleiro do Norte', 'Melres', 'Sangam', 'Ostrhauderfehn', 'Buritirama', 'Oleksandriia', 'Dongzhang', 'Rio Preto da Eva', 'Idupugallu', 'Florida City', 'Tobias Fornier', 'Newquay', 'Ivins', 'Kotah-ye `Ashro', 'Klaksvik', 'Klaeng', 'Vatutine', 'Rignano Flaminio', 'Soamahamanina', 'Essex', 'Arenzano', 'Arbaoun', 'Ain Kansara', 'Stein', 'Gleisdorf', 'Ottaviano', 'Lloret de Mar', 'Sindos', 'Lausanne', 'Swadlincote', 'Buffalo Grove', 'Coelho Neto', 'Msata', 'Ciudad Cuauhtemoc', 'Home Gardens', 'Resen', 'Khergam', 'Neu Isenburg', 'Kiato', 'Mahina', 'Keelung', 'Mahiari', 'Itambacuri', 'Tsiatajavona-Ankaratra', 'Sake', 'Narrabri', 'Popovaca', 'Uruma', 'Doha', 'Ilaiyankudi', 'Cabo Frio', 'Mustang', 'Kolbermoor', 'Anahidrano', 'Skikda', 'Omurtag', 'Maqu', 'Mouscron', 'Anchieta', 'Pendembu', 'Mokokchung', 'Cestos City', 'Oulad Zemam', 'Beerse', 'Solin', 'Orani', 'Itapira', 'Elburg', 'Pretoria', 'Nishihara', 'Siparia', 'Fort Riley', 'Dar Chaifat', 'Khiria Jhansi', 'Rezvanshahr', 'Gacheta', 'Rio Mayo', 'Huainan', 'Sarkisla', 'Frias', 'Msila', 'Neumarkt', 'Ajjanahalli', 'Santa Maria Xadani', 'Loikaw', 'Gebze', "Shin'onsen", 'Bhind', 'Nakhon Si Thammarat', 'Wangzhuang', 'Ban Kao', 'Barki Saria', 'Wylie', 'Birdsville', 'Atoyac de Alvarez', 'Bamberg', 'Agourai', 'Saavedra', 'Gundelfingen', 'Hunucma', 'Ambohitrimanjaka', 'San Pedro Necta', 'Eeklo', 'Gutalac', 'Chankou', 'Cerro Cora', 'Gulfport', 'Southaven', 'Vemulapudi', 'Pinal de Amoles', 'Montgomery', 'Conchali', 'Cercola', 'Baependi', 'Urgup', 'Qurayyat', 'Ban Si Don Chai', 'Duyun', 'Gentio do Ouro', 'Detroit Lakes', 'Muna', 'Pine Lake Park', 'Omerli', 'Ambatondrakalavao', 'Las Cabras', 'Dueville', 'Darjeeling', 'Los Gatos', 'Poa', 'Keren', 'Jiajin', 'Vogosca', 'Radece', 'Damonojodi', 'Khurda', 'Surallah', 'Johor Bahru', 'Cuervos', 'Brixton', 'Kasap', 'San Juan La Laguna', 'Dambai', 'Marantao', 'Aguazul', 'Teoloyucan', 'Munnar', 'Madhopur Hazari', 'Yaotsu', 'Le Ray', 'Segbwema', 'La Apartada', 'Vitrolles', 'Amtali', 'Neston', 'Portage La Prairie', 'Taytay', 'Barahbatta', 'Tiorpara', 'Hajira', 'As Sabburah', 'Krynica', 'East Rutherford', 'Stovring', 'Andernach', 'Levski', 'Vihiga', 'Kadikkad', 'Husepur', 'Nokha', 'Santa Isabel do Ivai', 'Bithar', 'Gaziemir', 'Manapparai', 'Sinait', 'Brandfort', 'Ibaraki', 'General San Martin', 'Parma Heights', 'Yoshiwara', 'Mudanya', 'Bafq', 'Mikolow', "Chang'an", 'Chota', 'Kesbewa', 'Sinacaban', 'Ambarawa', 'Molodohvardiisk', 'Herat', 'Sakaidecho', 'Honefoss', 'Handan', 'Clydebank', 'Glenpool', 'Uman', 'San Biagio di Callalta', 'Ocean City', 'Qiryat Yam', 'Baldwin', 'Sweden', 'Soavina', 'Hugo', 'Tra Vinh', 'Taibet', 'Turi', 'Agua Blanca Iturbide', 'Cherasco', 'Parun', 'Cedar Hills', 'Arugollu', 'Puduppattanam', 'Monki', 'Dorog', 'Tissamaharama', 'Seffner', 'Tongchuan', 'Nethirimangalam', 'Ambalanirana', 'Torbat-e Jam', 'Balangkayan', 'Malazgirt', "Uchtepa Qishlog'i", 'Les Anglais', 'Sing Buri', 'Zhaoxiang', 'Porta Westfalica', 'Kajur', 'Casa Grande', 'Nkoteng', 'Pasca', 'Merefa', 'Perols', 'Ellensburg', 'Mannukara', 'Rum', 'Moglingen', 'Tendrara', 'Traiskirchen', 'Dialoube', 'Poona-Piagapo', 'Orthez', 'Hollinwood', 'Ferryhill', 'Heerhugowaard', 'Bad Soden-Salmunster', 'Twin Falls', 'Imamoglu', 'Gartringen', 'Vaudreuil-Dorion', 'Chonthrhu', 'Aberbargoed', 'Langar', 'Cabarroguis', 'Anakapalle', 'Geraldton', 'Seara', 'Guasca', 'Ban Mae Sun Luang', 'Sankt Johann in Tirol', 'Musselburgh', 'Nave', 'Bhola', 'Mansehra', 'Villa Sarmiento', 'Sao Bento do Una', 'Balasan', 'Bocana de Paiwas', 'Costesti', 'Podu Iloaiei', 'Jaitpura', 'Rio Colorado', 'Villa Nueva', 'Tlaquepaque', 'Sutton on Hull', 'Radomyshl', 'Muttanampalaiyam', 'Elon', 'Great Falls', 'Al `Amirat', 'Ganzhu', 'Machachi', 'Lehututu', 'Ambohimierambe-Andranofito', 'Singur', 'Lagindingan', 'Bocaina', 'Siguatepeque', 'Jalalkhera', 'Santa Lucija', 'Colmeia', 'Aneho', 'Beterou', 'Mont-Organise', 'Cabanas', 'Leduc', 'Euxton', 'Trebinje', 'Patri', 'Ostrava', 'Bay Shore', 'Ghatkesar', 'Gia Nghia', 'University Heights', 'Montigny-les-Metz', 'Pipraun', 'Villa de Cura', 'Polokwane', 'Daloa', 'Souma', 'Cerea', 'Gracanice', 'Anteza', 'Dedza', 'Hilsea', 'Koduvalli', 'Katravulapalle', 'Nembro', 'Wanlaweyn', 'Sardulgarh', 'Hatibanda', 'Khizrpur', 'Sailana', 'Holbrook', 'Horwich', 'Motibennur', 'Kenner', 'Bugallon', 'Angermunde', 'Boguszow-Gorce', 'Motomiya', 'Nanzhuangzhen', 'Tangxing', 'Tamanrasset', 'Brasileia', 'Canandaigua', 'Gardez', 'Qorasuv', 'Halemba', 'Biandanshan', 'Mauleon', 'Komarno', 'Husi', 'Ngaoundal', 'Ferreiros', 'Gatunda', 'Aioi', 'Jainagar', 'Longyan', 'Mocimboa', 'Central Point', 'Machadodorp', 'Niesky', 'Trets', 'Kajang', 'Tuxpan', 'Anosy Avaratra', 'Novhorod-Siverskyi', 'Zhenjiang', 'Srungavarapukota', 'San Sebastian de la Gomera', 'Frankston', 'Ninga', 'Morauna', 'Saginaw', 'Kussnacht', 'Lerida', 'Silchar', 'Thakurainia', 'Wote', 'Riom', 'Khachrod', 'Takizawa', 'Psychiko', 'Niasso', 'Rio das Pedras', 'La Maquina', 'Kaguchi', 'San Fabian', 'Devarapalle', 'Nahiyat Ghammas', 'Tlapacoyan', 'Debe', 'Santa Maria de Itabira', 'Ifigha', 'Neduvattur', 'Monteprandone', 'San Pedro Perulapan', "Bou'nane", 'Ponnagyun', 'Guaicara', 'Krivogastani', 'Ino', 'Stupava', 'New Mills', 'Ankadimanga', 'Liaoyang', 'Chittaurgarh', 'Bim Son', 'Cullera', 'Blackburn', 'Great Driffield', 'Mukacheve', 'Warka', 'Villers-la-Ville', 'Gavarr', 'Khrustalnyi', 'Easley', 'Caykara', 'Woudenberg', 'Kismaayo', 'Chojnice', 'Holladay', 'Leiderdorp', 'Reading', 'Evansville', 'Bamaiya Harlal', 'Johns Creek', 'Qo`qon', 'Tambaga', 'Kouande', 'Sapang Dalaga', 'Rocafuerte', 'Curepipe', 'Jabuticabal', 'Hasroun', 'Nishinoomote', 'Salemi', 'Bailongqiaocun', 'Niefern-Oschelbronn', 'Jablah', 'Susa', 'Ortuella', 'Bitonto', 'Karadge', 'Quelimane', 'Pirthipur', 'Crisopolis', 'Venosa', 'Anantapur', 'Grajau', 'Idah', 'Sidlice', 'Middle Smithfield', 'Kanie', 'Khari', 'Patnagarh', 'Tamentit', 'Ciudad Valles', 'Kampen', 'Madangir', 'Swidwin', 'Analaroa', 'Cayirli', 'Cernay', 'Houghton Regis', 'Redon', 'Belchatow', 'Maktar', 'Belagal', 'Chikkarampalaiyam', 'Novato', 'Notre-Dame-de-Gravenchon', 'Wuustwezel', 'Palaiyampatti', 'Kandanur', 'Nahavand', 'Tokushima', 'Sandakan', 'Avidha', 'Chaital', 'Shri Mahavirji', 'Tashir', 'Taghbalt', 'Cottage Grove', 'Paso de los Toros', 'Ilicinia', "Monteforte d'Alpone", 'Helleland', 'Corcuera', 'Goa', 'Oqqo`rg`on', 'Chalco', 'Chorwad', 'Jhenida', 'Triesen', 'Ban Don Kaeo', 'Nova Prata', 'Pyrzyce', 'Sogod', 'Kilindoni', 'Condega', 'Qatana', 'Nuth', 'Ban Phru', 'Kiiminki', 'Antanandehibe', 'La Concordia', 'Guilherand', 'Antsiranana', 'Mersch', 'Al Awjam', 'Selouane', 'Coronda', 'Oregon City', 'Bordj Zemoura', 'Alwarkurichchi', 'Kangasala', 'Sonada', 'Burdur', 'Lollar', 'Bethel Park', 'Higashiagatsuma', 'Gandujie', 'Talugtug', 'Cocotitlan', 'Cassano al Ionio', 'New Hamburg', 'Jacobina', 'Tourougoumbe', 'Ribeiropolis', 'Midsomer Norton', 'Gretz-Armainvilliers', 'Nagqu', 'Sunnyvale', 'Canberra', 'Rupana', 'Owosso', 'Sao Jose do Norte', 'Mantaly', 'Socota', 'Cape St. Claire', 'Lepakshi', 'Rounia', 'Tavira', 'Qazyan', 'Chettipulam', 'Grantsville', 'Nagarote', 'Baras', 'Ashaiman', 'Birendranagar', 'West Allis', 'Ouricuri', 'Sarzana', 'Vallegrande', 'El Kseur', 'Assenede', 'Chitauria', 'Steinheim', 'Amsterdam', 'Goth Tando Sumro', 'Santa Cruz Itundujia', 'Cartago', 'Nandaime', 'Andes', 'Dialakorodji', 'Schulzendorf', 'Bantacan', 'Calarasi', "Ra's Gharib", 'East Lake-Orient Park', 'Baitoa', 'Ormesson-sur-Marne', 'El`ad', 'Campechuela', 'Shiotacho-matsusaki', 'Kankaanpaa', 'Ronda', 'Urucuia', 'Morlaix', 'Satwar', 'Bethel', 'Corvallis', 'Yinchuan', 'Talavera', 'Shahriston', 'Royston', 'Bexbach', 'Vilangudi', 'Brinkmann', 'Hellendoorn', 'Imi-n-Tanout', 'South Hayling', 'Schroeder', 'Petrovka', 'Zouerate', 'Bingen am Rhein', 'Bovenden', 'Biri', 'Teaneck', 'Yvoir', 'Santa Eulalia del Rio', 'Rolleston', 'Badantola', 'Waimea', 'Muzo', 'Staro Nagoricane', 'Paranhos', 'Neustrelitz', 'Paouignan', 'Oakashicho', 'Iygli', 'Rangra', 'Sassenage', 'Lommedalen', 'Giyon', 'Conroe', 'Upperu', 'Qorao`zak', 'Lingal', 'Jasien', 'Nonoichi', 'Al Bardiyah', 'Arrentela', 'Saharsa', 'Kalwakurti', 'Lakeland North', 'Bad Ems', 'Quibaxi', 'Huskvarna', 'Bihac', "'Ain Naga", 'Euclides da Cunha', 'East Grinstead', 'Koneurgench', 'Calauag', 'Kiangan', 'Mill Valley', 'Turpan', 'Cacimbinhas', 'Urucuca', 'Anouvong', 'Kunimedu', 'Islamabad', 'Chahar Borj-e Qadim', 'Maribor', 'Ilgin', 'Mount Lavinia', 'Tsukumiura', 'Aix-en-Provence', 'Ipaporanga', 'Pokotylivka', 'Nazipur', 'Tebingtinggi', 'Wenzenbach', 'Solingen', 'Beveren', 'Koszalin', 'Ticul', 'Kuroishi', 'Marialva', 'Auria', 'Raffadali', 'Kumagaya', 'Ruppichteroth', 'Dougoufe', 'Dompu', 'Petrus Steyn', 'Lattes', 'Raalte', 'Taungdwingyi', 'Guixi', 'Ganta', 'Schonwalde-Siedlung', 'Ifatsy', 'Baroda', 'Los Andes', 'Zhaitangcun', 'Simraungadh', 'Fada', 'Moldova Noua', 'Sankt Andra', 'Ica', 'Ngora', 'Robbah', 'Rahiar Kunchi', 'Ouatagouna', 'Benavente', 'Suluru', 'Burstadt', 'Hindoli', 'Santa Maria Petapa', 'Sanana', 'Mangidy', 'Buldan', 'Ustrzyki Dolne', 'Bouznika', 'Florstadt', 'Santiago Papasquiaro', 'Sirur Tajband', 'Ottumwa', 'Terralba', 'Henrietta', 'Adrar', 'Koni', 'Cardonal', 'Haskah Menah', 'Dawmat al Jandal', 'Salzgitter', 'Shikharpur', 'So-Awa', 'Baghmari', "'Ain Arnat", 'Gojra', 'Majidpur', 'Cayeli', 'Bouansa', 'Barhi', 'New Amsterdam', 'Ibigawa', 'Schwarzenberg', 'Darling', 'Borgaon', 'Daudnagar', 'Kitsuki', 'Fairfax', 'Mannarakkat', 'Oberasbach', 'Murphy', 'Laurentides', 'Manjha', 'Kobo', 'Kameoka', 'Bafut', 'Krasyliv', 'Saiha', 'Edinburgh', 'Kishkenekol', 'Leyton', 'Isiolo', 'Maibog', 'Chiranellur', 'Athens', 'Sampit', 'Ak-Suu', 'Toulal', 'Rangia', 'Morton Grove', 'Kaithinia', 'Antipolo', 'Tabant', 'Starachowice', 'Al Yadudah', 'Zonguldak', 'Piaseczno', 'Nirna', 'Mitrovice', 'Plainfield', 'Wheat Ridge', 'Oakbrook', 'Yahualica de Gonzalez Gallo', 'Drimmelen', 'Campo de la Cruz', 'El Roble', 'Umarga', 'Mqam at Tolba', 'Consett', 'Ponders End', 'Mount Lebanon', 'Harima', 'Marquette-les-Lille', 'Spata', 'Panaon', 'Viravada', 'Juazeiro do Norte', 'Tieling', 'Montemorelos', 'Manggar', 'Scicli', 'Gorlice', 'Rong Kwang', 'Mill Creek', 'Rivne', 'Linamon', 'Creazzo', 'Moody', 'Patnanungan', 'Qasr al Qarabulli', 'Leingarten', 'Saqultah', 'Farob', 'Chinju', 'Bartolome Maso', 'Pedreguer', 'Kuldiga', 'San Bernardo del Viento', 'Kushiro', 'Maha Sarakham', 'Campbell River', 'Balaxani', 'Kilakkurichchi', 'Uitenhage', 'Tudela de Duero', 'Brianka', 'Garkha', 'Kassel', 'Avigliana', 'Herk-de-Stad', 'Kassama', 'Al Jubayl', 'Longavi', 'Ugamedi', 'Leutkirch im Allgau', 'Lambeth', 'Koduman', 'Bais', "Petite Riviere de l'Artibonite", 'Presidencia Roque Saenz Pena', 'Bad Munder am Deister', 'Kottadindulu', 'Thorigny-sur-Marne', 'Tinkoni', 'Bilacari', 'Madruga', 'Chula Vista', 'Strzelce Opolskie', 'Chalgeri', 'Teningen', 'Cumanda', 'Ankilivalo', 'Arica', 'Mabacun', 'Agios Dimitrios', 'Pihuamo', 'Bithlo', 'Lititz', 'Barth', 'Nkhotakota', 'Castellammare del Golfo', 'Tearce', 'Jinzhou', 'Mahaditra', 'Nelkattumseval', 'Marinilla', 'Taguai', 'Ambohidronono', 'Ansfelden', 'Angus', 'Chintalapalli', 'Piedra Blanca', 'Prenzlau', 'San Manuel Chaparron', 'Grantham', 'Atyra', 'Cornwall', 'Conselice', 'Brasilia', 'Igarape Grande', 'Toplita', 'Malaimbandy', 'Kasane', 'Villa Ocampo', 'Simarwara Durgapur', 'Piripa', 'Giza', 'Xianyang', 'Rodas', 'Canal Winchester', 'Vashon', 'Pully', 'Venice', 'Miantsoarivo', 'Rio Cauto', 'Shahjanpur', 'Motala', "'Ain Azel", 'Taebaek', 'Mankato', 'Sevres', 'Sarangani', 'Berndorf', 'Paratinga', 'Floral Park', 'Santa Catalina', 'Greenacres', 'Verviers', 'Deoni Buzurg', 'Lantapan', 'Brakpan', 'Santa Ana Chiautempan', 'Chamtha', 'Umarkhed', 'Chai Nat', 'Willingboro', 'Licab', 'Esquel', 'Santiago Tuxtla', 'Suran', 'Mazenod', 'Holmdel', 'Saguiaran', 'Nidadavole', 'Santa Rosa de Calamuchita', 'Malkanur', 'Avaniyapuram', 'Saktipur', 'Lynnfield', 'Orange Park', 'Lanuvio', 'Magna', 'Hanumangarh', 'Ashmun', 'Ayirapuram', 'Viale', 'Andarai', 'Falesti', 'Soverato Marina', 'Loeches', 'Ibirataia', 'Chester-le-Street', 'Gouna', 'Azeffoun', 'Burlingame', 'Crest', 'Flowood', 'Lupane', 'Iferhounene', 'Eravattur', 'Mayaguez', 'Butte', 'Azangaro', 'Viladecans', 'Argenta', 'Rauma', 'Khombole', 'Trstenik', 'Kiryu', 'Mambusao', 'Tan Uyen', 'Yazoo City', 'Turnisce', 'Navolato', 'San Giustino', 'Lice', 'Antsambahara', 'Nagahama', 'Herndon', 'Phibun Mangsahan', 'Lushnje', 'Wanaque', 'Chuncheon', 'Thomassique', 'Havza', 'Wellington', 'Nagato', 'Aucamville', 'Corupa', "Ra's al Khafji", 'Novi Ligure', 'Isnos', 'Taiyong', 'Beigangwa', 'Greymouth', 'Sevilla', 'Beech Grove', 'Viry-Chatillon', 'Bogdanci', 'Hillsdale', 'Pao de Acucar', 'Fuqing', 'Oderzo', 'Osterholz-Scharmbeck', 'Saint-Maurice', 'Puluvappatti', 'Kabala', 'Marchtrenk', 'Lorch', 'Kamtaul', 'San Rafael Pie de la Cuesta', 'La Puente', 'San Andres', 'Robore', 'Baladharmaram', 'Breza', 'Rio del Mar', 'Bocas del Toro', 'Gorleston-on-Sea', 'Fair Oaks', 'Rapallo', 'Villahermosa', 'Kalappatti', 'Wilmington Island', 'San Francisco Menendez', 'Champahati', 'Baden', 'Maxaranguape', 'Macas', 'Rio Tinto', 'Ingre', 'Teroual', 'Nikki', 'Bir Tam Tam', 'Myrtle Grove', 'Wuxi', 'Si Mustapha', 'Rundu', 'Bar Bigha', 'Iriba', 'Jebba', 'Mohanpur Gaughata', 'Fetromby', 'Halwara', 'Carignano', 'Eagan', 'Bambari', 'Lachhmipur', 'Khemis Miliana', 'Candeias', 'Nioro du Rip', 'San Andres de la Barca', 'San Juan Nepomuceno', 'Knightdale', 'Marano Vicentino', 'Cergy', "Ping'anbao", 'Zhovti Vody', 'Saalfelden am Steinernen Meer', 'Lubumbashi', 'Oxkutzkab', 'Conakry', 'Puenteareas', 'Gotsucho', 'Sohna', 'Morehead City', 'Lingshou', 'Antotohazo', 'Ceadir-Lunga', 'Apucarana', 'Ihosy', 'Oulad Barhil', 'Turkoglu', 'Progreso', 'Oued el Aneb', 'Kendall West', 'Kignan', 'Camborne', 'Bagha Purana', 'Meskiana', 'Do Gonbadan', 'Geesthacht', 'Lodja', 'Winterthur', 'Taketa', 'Maple Heights', 'Kelangah', 'Stary Sacz', 'Chaplynka', 'Carlos Chagas', 'Metlaoui', 'Ciudad de Huitzuco', 'Ranod', 'Clevelandia', 'Alabat', 'Belovodskoe', 'Arriaga', 'Sandy Hook', 'Sainte-Agathe-des-Monts', 'Guacara', "Quan'ancun", 'Arques', 'Nenmem', 'Gravatal', 'Saidabad', 'Kolasin', 'Merritt Island', 'Jieshou', 'Hvalba', 'Maglaj', 'Favara', 'Elk Grove Village', 'Oppeano', 'Budai', 'Novi', 'Tambe', 'Balungao', 'Bocaranga', 'Pickering', 'Turuttiyad', 'Fihaonana', 'Kidira', 'Hopkins', 'Vitoria-Gasteiz', 'Harpur', 'Sibundoy', 'El Kouif', 'Furth im Wald', 'Vazhani', 'Mississippi Mills', 'Sayville', 'Bhawanipur', 'Bel Air', 'Lishui', 'Katuria', 'Sepatan', 'Uwchlan', 'Vieste', 'Pfullendorf', 'Poggio Renatico', 'Balha', 'Brandywine', 'Manville', 'Sokcho', 'Gaigirgordub', 'Kadattur', 'Munnelli', 'San Pancrazio Salentino', 'Kourani', 'As Suwayrah', 'Lubliniec', 'Poytya', 'Stezzano', 'Novomoskovsk', 'Conchal', 'Sandona', 'Nishitokyo', 'Gardnerville Ranchos', 'Nyiregyhaza', 'Encheng', 'Al Maraghah', 'Willmar', 'Sharg`un', 'Kaiken', 'Peravur', 'Ikot Abasi', 'Swift Current', 'Oizumi', 'Lieksa', 'Tarare', 'Marton', 'Berilo', 'Oldham', 'Kuppadi', 'Horodnia', 'Ataco', 'Nassjo', 'Wednesfield', 'Los Bellosos', 'Mombaca', 'Recife', 'Dalupo', 'Capitan Pablo Lagerenza', 'Tsarabaria', 'La Guaira', 'Ain Fakroun', 'Tsiatosika', 'Melikgazi', 'San Gabriel', 'Shek Wai Kok', 'Pati do Alferes', 'Nueva Santa Rosa', 'Gerasdorf bei Wien', 'Sesheke', 'Nueva Rosita', 'Diamante', 'Fyzabad', 'Masinloc', 'Bejofo', 'Ayolas', 'Eski Arab', 'Kaihua', 'Narlica', 'Amaga', 'Haaltert', 'Inhapim', 'Itacurubi de la Cordillera', 'Rawatsar', 'Kakhovka', 'Virapandi', 'Dasai', 'Laukaa', 'Uummannaq', 'Florin', 'Perampuzha', 'Mercato San Severino', 'Zografos', 'Bukit Gambir', 'Mericourt', 'Tchibota', 'Mantova', 'Santa Cruz Amilpas', 'Atarra', 'Saint-Jean', 'Mansong', 'Barbacha', 'Perondi', 'Lavasan', 'Poljcane', 'Haedo', 'Sumber', 'Riedisheim', 'Manvel', 'Willenhall', 'Green Cove Springs', 'Covasna', 'Zantiebougou', 'Mityana', 'Schiffweiler', 'San Antonio Sacatepequez', 'Kalas', 'Recreo', 'Bugiri', 'Rentachintala', 'Babhani Bholwa', 'Sedgley', 'Dimapur', 'Elkhart', 'Musoma', 'Luneburg', 'New Iberia', 'Espera Feliz', 'Catumbela', 'Leyte', 'Palkot', 'San Cugat del Valles', 'Briancon', 'Ermita', 'Kunnumel', 'Rain', 'Barretos', 'Jbail', 'Hadibu', 'Nagalapuram', 'Sibagat', 'Chamalieres', 'Minamishibetsucho', 'Gedaref', 'Mrirt', 'Rasra', 'Holbeach', 'San Angelo', 'Olawa', 'Sroda Wielkopolska', 'Sabalpur', 'Ban Plai Bua Phatthana', 'Homer Glen', 'Esparza', 'Media', 'Fasintsara', 'Atarfe', 'Tyamagondal', 'Panganiban', 'Arao', 'Campogalliano', 'Port Dickson', 'Iracemapolis', 'Ahmadabad', 'Kruje', 'Gourcy', 'Penn', 'Lubartow', 'Yigilca', 'Tiwi', 'Buffelshoek', 'Camacari', 'Kunitomi', 'Druento', 'Qarabalyq', 'Reghaia', 'Edavanakad', 'Baden-Baden', 'Seevetal', 'Puerto Viejo', 'Segarai', 'Khadbari', 'Zag', 'Amta', 'Biritinga', "Hayma'", 'Nebbi', 'Malacky', 'Bacolod', 'Nkouraba', 'Cuatro Cienegas de Carranza', 'Barishal', 'Lower', 'Lam Luk Ka', 'Petershagen', 'Balham', 'Titusville', 'Vijes', 'Harpur Bochaha', 'Sido', 'Sogam', 'Victorias', 'Khaira', 'Capanema', 'Slatina', 'Hajduszoboszlo', 'Kentau', 'Ba', 'Amagasaki', 'Changzhi', 'Orange Lake', 'Montigny-en-Gohelle', 'Gangaikondan', 'Shibin al Kawm', 'Kai', 'Bayanhongor', 'Erer Sata', 'Hamamatsu', 'Cedar Rapids', 'Lubbeek', 'Pingyi', 'Reo', 'Ban Chomphu Nuea', 'Mundi', 'Kantai', 'Genoa', 'Agadir', 'Wawarsing', 'Best', 'Tanjore', 'Tiete', 'Pacajus', 'Varzea Grande', 'Inawashiro', 'Sigmaringen', 'Banjarmasin', 'Punata', 'Fairview Heights', 'Embu', 'Brea', 'Mohacs', 'Rosedale', 'Hammam al `Alil', 'Papireddippatti', 'Beaumont-sur-Oise', 'Makokou', 'Eastchester', 'Huntington', 'Zarumilla', 'San Sebastian de Buenavista', 'Ouezzane', 'Ambalarondra', 'Waukegan', 'San Pablo Huixtepec', 'Kasangati', 'Boanamary', 'Nancy', 'El Pueblito', 'South Miami Heights', 'Sayyid Sadiq', 'Xghajra', 'Bulacan', 'Tanichchiyam', 'Heilbron', 'Sao Caetano de Odivelas', 'Ayos', 'Rawasari', 'Herve', 'Fairview Shores', 'Naklo nad Notecia', 'San Carlos', 'Varazze', 'Konak', 'Kattakampala', 'New Berlin', 'Tuam', 'Talaigua Nuevo', 'Suchanino', 'Alcobendas', 'Mangalapuram', 'Lombard', 'Temacine', 'Fancheng', 'Norristown', 'Kasrawad', 'Memuro-minami', 'Newman', 'Kumamoto', 'Shimizu', 'Adrogue', 'Belo Horizonte', 'Kurtalan', 'Gulf Shores', 'Kenitra', 'Esquipulas Palo Gordo', 'Dursunbey', 'Pazhayannur', 'Little Elm', 'Bassersdorf', 'Sant Just Desvern', 'Surir', 'Goodlettsville', 'Nanjo', 'Al Jammaliyah', 'Binyin', 'Osterwieck', 'Novoishimskiy', 'Ananas', 'Lower Moreland', 'Gradsko', 'Qiaotou', 'Gole', 'Marale', 'Kantang', 'Rajapur', 'Boha', 'Orbetello', 'Suances', 'Cheshunt', 'Nagykallo', "Bu'aale", 'Chikitigarh', 'Djenne', 'Le Luc', 'Sankeshwar', 'Hrebinka', 'Ronchin', 'Pugo', 'Putnam Valley', 'Valka', 'Sig', 'Andernos-les-Bains', 'Neuchatel', 'Varginha', 'Noventa Vicentina', 'Athy', 'Aradeo', 'Cururupu', 'Funza', 'Alghero', 'Termiz', 'Rockwall', 'Joliet', 'Alatri', 'Damarcherla', 'Nako', 'Muzaffarnagar', 'Kazlu Ruda', 'Urun-Islampur', 'Thorpe Saint Andrew', 'Baubau', 'Felixstowe', 'Mimata', 'Babenhausen', 'Valley Center', 'Trabzon', 'Ban Pak Phun', 'Koudougou', 'Wisbech', 'Danvers', 'Dhamnod', 'Rudrur', 'Nueva Italia de Ruiz', 'Beacon', 'Deerfield Beach', 'Tinkhang', 'Aiyomojok', 'Gevas', 'Salou', 'Pontarlier', 'Valluvandad', 'Pingtouchuanxiang', 'Takasagocho-takasemachi', 'Marsaskala', 'Zequ', 'Sarapui', 'Bhangha', 'Saint Albans', 'Fountain Hills', 'Bultfontein', 'Plant City', 'Bidestan', 'Alvorada', 'Faenza', 'Benyahia Abderrahmane', 'Al Hindiyah', 'Vohitromby', 'Guadalupe Victoria', 'Croissy-sur-Seine', 'Agudo', 'Carire', 'Rho', 'Gassino Torinese', 'Uzice', 'Magny-le-Hongre', 'Bazimini', 'New York', 'Croata', 'Shahgarh', 'Nyzhnohirskyi', 'St. Simons', 'Fleury-les-Aubrais', 'Paulino Neves', 'Buderim', 'Kapchagay', 'Ahogbeya', 'Warabi', 'Sasagawa', 'Edappalli', 'Bobleshwar', 'Sardinata', 'Niederhasli', 'Grez-Doiceau', 'Crimmitschau', 'Bound Brook', 'Magog', 'Pottstown', 'Loveland', 'Fort Mill', 'Kpandu', 'Ngoc Son', 'San Agustin', 'Turkeli', 'Keynsham', 'Padre Las Casas', 'Huangzhai', 'Acul du Nord', 'Sha Tin', 'Reddipalle', 'Dalby', 'Chigwell', 'Chaigoubu', 'Marolinta', 'Tarma', 'Schwabisch Gmund', 'La Teste-de-Buch', 'Bouna', 'Creil', 'Balatonalmadi', 'Srivardhan', 'Ettenheim', 'Spanish Town', 'Castellabate', 'Salimpur', 'Bhusaval', 'Ayomi', 'Babhniyawan', 'Dundankop', 'Khulna', 'Incheon', 'Prizren', 'Yuvileine', 'Taastrup', 'Maibara', 'Liberec', 'Bordj Bou Arreridj', 'Bueu', 'Baba I', 'Uttarkashi', 'Karuizawa', 'Ikast', 'Tapolca', 'Matrah', 'Chakia', 'Sosa', 'Kabalo', 'High River', 'Chemnitz', 'Mirganj', "Mohale's Hoek", 'Capena', "Fanja'", 'Idumbavanam', 'Travagliato', 'Noicattaro', 'Lasht-e Nesha', 'Bel Air North', 'Missour', 'Itaipe', 'Sandpoint', 'Villepinte', 'Tendukheda', 'Ljubovija', 'Mount Pleasant', 'Yucca Valley', 'Lai Chau', 'Litija', 'Liberty Lake', 'Kidamangalam', 'Cambara', 'Funchal', 'Nansana', 'Ongwediva', 'Jamhra', 'Gulbene', 'Hot Springs Village', 'Gothini', "Douar 'Ayn Dfali", 'Belsh', 'Shimokodanaka', 'Gerash', "Welench'iti", 'Pozorrubio', 'Placer', 'Streatham', 'Muttalakanpatti', 'Bougival', 'Porteiras', 'Liuguoju', 'Sultanhani', 'Dhutauli', 'Jilin', 'Dongsu', 'Saho', 'Santa Cecilia', 'Jalor', 'Kuruvambalam', 'Maraba', 'Yairipok', 'Shafter', 'Zabari', 'Sauzal', 'Barharwa', 'Sisia', 'Fabriano', 'Buttar', 'Corlu', 'Kailashahar', 'Mosina', 'Novo Oriente', 'Ploemeur', 'Goshaingaon', 'Amba Icharua', 'Lehara', 'eXobho', 'Waldenbuch', 'Marimba', 'Kavundappadi', 'Al Hudaydah', "Castelnovo ne' Monti", 'Ipuiuna', 'Az Zawiyah', 'Barros Blancos', 'Royan', 'Zaojiao', 'Tiztoutine', 'Tanashicho', 'Ebbw Vale', 'Swidnik', 'Snohomish', 'Melendugno', 'Pavia', 'Whitman', 'Cuilo', 'Russas', 'Callao', '`Izbat al Burj', 'Lushoto', 'Dayr al Barsha', 'Pepinster', 'Bustos', 'Ureshinomachi-shimojuku', 'Sabugal', 'Forest City', 'Afonso Bezerra', 'Kaliganj', 'Hato Mayor', 'Kete Krachi', 'Rasipuram', 'Ennepetal', "Cassano d'Adda", 'Ronne', 'Olpe', 'Karlsruhe', 'Janpur', 'Wepener', 'Happy Valley', 'Bhadrakh', 'Gaz', 'Kayanza', 'St. Ann', 'Chitcani', 'Tafrant', 'Itabuna', 'Iracoubo', 'Mirzanagar', 'Livry-Gargan', 'Bequimao', 'Dazhangzi', 'Zhugang', 'Waalre', 'La Roda', 'Kamthi', 'Piggs Peak', 'Mahadeopur', 'Annakattumula', 'Patterson', 'Koriukivka', 'Altenberge', 'Rajula', 'Hayden', 'Yambio', 'Saram', 'Vereeniging', 'Beauharnois', 'La Ravoire', 'Kulhudhuffushi', 'Alatsinainy-Bakaro', 'Nedugula', 'Goaso', 'Araucaria', 'Fort Washington', 'Sargur', "Boula'wane", 'Hussepur', 'Frederick', 'An Nimas', 'Joao Pessoa', 'La Eliana', 'Samsun', 'Bang Phlat', 'Ketugram', 'Dhobauli', 'Bad Wildungen', 'Estancia Pozo Colorado', 'Roseller Lim', 'Skarzysko-Kamienna', 'Alba', 'Sengurichchi', 'Togitsu', 'Chas', 'Petlad', 'El Rama', 'Parkstone', 'Lorca', 'Western Bicutan', 'Harare', 'Fort Valley', 'Staryy Beyneu', 'Mainit', 'Rosales', 'Isafjordhur', 'Moreau', 'Kapra', 'Tarascon', 'Tubbergen', 'Santa Comba Dao', 'Fatikchari', 'Sumy', 'Ignacio de la Llave', "Sant'Elpidio a Mare", 'Pocono', 'Kakogawacho-honmachi', 'Guajara-Mirim', 'Beekman', 'Meymand', 'University of Virginia', 'Haines City', 'Simao Dias', 'Signal Hill', 'Makedonski Brod', 'Kamakurayama', 'Blackpool', 'Poxoreo', 'Kohir', 'Talata-Angavo', 'Bielefeld', 'Tomesti', 'Universal City', 'Kotka', 'Uttukkottai', 'Baruta', 'Solana', 'Ain Mediouna', 'Garoowe', 'Ap Tan Ngai', 'Sarigol', 'Muong Lay', 'Guryongpo', 'Moorestown', 'Nottampatti', 'Marienheide', 'Tiruppur', 'Bikkatti', 'Fereydunshahr', 'Ezhamkulam', 'Ajax', 'Buguda', 'Shijiazhuang', 'Pantanal', 'Meylan', 'Dobanovci', 'Bizerte', 'Gingin', 'Baroni Khurd', 'Cusseta', 'Alhambra', 'Chaoyang', 'Wakema', 'Holland', 'Prunedale', 'Akure', 'Huautla', 'Ghora Gali', 'Ayase', 'Ciudad de Loreto', 'Bittou', 'Andong', 'Halluin', 'Yuncos', 'Eurajoki', 'Morden', 'Gueret', 'Mutukula', 'Abovyan', 'Al Hufuf', 'Tapaua', 'Tegalbuleud', 'Brackenheim', 'Tanaina', 'Siloam Springs', 'Kuusankoski', 'Concordia', 'Parsippany', 'Lapanga', 'Manhuacu', 'Extrema', 'El Manteco', 'Setti Fatma', 'Guneysinir', 'Sagara', 'Kohima', 'Guacheta', 'Hidaj', 'Gollalagunta', 'Sawla', 'Tamazouzt', 'Qoryooley', 'Constantine', 'Boulsa', 'Bhataulia', 'Oued Athmenia', 'Villa Ballester', 'Leipzig', 'Khewra', 'Wondelgem', 'Somanya', 'Melissa', 'Wangdue Phodrang', 'Rietavas', 'Kirkby in Ashfield', 'Khadra', 'Whitemarsh', 'Palauig', 'Aleg', 'El Barrio de la Soledad', 'Rosario Oeste', 'Pappakudi', 'Zorneding', 'Grodzisk Mazowiecki', 'Osowa', 'Samayanallur', 'Manampizha', 'Bago', 'Country Club', "Lu'an", 'Santa Teresinha (2)', 'East Rockaway', 'Minja', 'Ilkhechi', 'Moreno', 'Novoazovsk', 'Seondha', 'Castleton', 'Hannover', 'Bhopal', 'Barnoi', 'Casaluce', 'Hola', "Trezzo sull'Adda", 'Minxiong', 'Eboli', 'Bairiya', 'Bad Waldsee', 'Montagu', 'Torres', 'Pelitli', 'Ararenda', 'Coronel Fabriciano', 'Monteria', 'Kavlinge', 'Grand Gosier', 'Tidjikja', 'Palayad', 'Kattipudi', 'Kibichuo', 'Guoxing', 'Viera West', 'Aw Dheegle', 'Ipanguacu', 'Oued Fodda', 'Yoju', 'Leesburg', 'Sathiala', 'Senkaya', 'Kucevo', 'Mandal', 'Dauis', 'Oyon', 'Kulmbach', 'Macul', 'Aysha', 'Ash Shajarah', 'Huishi', 'Chiyoda-ku', 'Ceccano', 'Massenya', 'Impasugong', 'Champlin', 'Saint Ives', 'Sycamore', 'Mahalgaon', 'Yanqi', 'Puren', 'Koronadal', 'Santa Catarina Ixtahuacan', 'Dulmial', 'Bourg-les-Valence', 'Du Yar', 'Marsa', 'Bootle', 'Swidnica', 'Saint-Avold', 'Goleta', 'Peshkopi', 'Ahenkro', 'Fort Lewis', 'Barra do Garcas', 'Greytown', 'Thouare-sur-Loire', 'Abdul Hakim', 'Pont-Sainte-Maxence', 'Malindi', 'Meridian', 'Katsuyama', 'Pigue', 'Iradan', 'Mukerian', 'Yuzhang', 'Quang Ngai', 'Vila Nova de Gaia', 'Tembagapura', 'California City', 'Dolores Hidalgo Cuna de la Independencia Nacional', 'Guazacapan', 'Tympaki', 'Zhuozhou', 'Tangub', 'Gelemso', 'Paal', 'Hassi Berkane', 'Plandiste', 'Eduttavaynattam', 'Alice', 'Hannibal', 'Novi Travnik', 'Kairana', 'Ilinden', 'Mmabatho', 'Hoyacho', 'Menzel Bourguiba', 'Tinsukia', 'Medina del Campo', 'Ramon', 'Pierrefitte-sur-Seine', 'Monroeville', 'Randaberg', 'Maple Shade', 'Grottaglie', 'Hisua', 'Endicott', 'Palagiano', 'Martigues', 'Huacho', 'Wangtang', 'Daiyue', 'Chiguayante', 'Einbeck', 'Tezonapa', 'Sousse', 'Vikravandi', 'Hellemmes-Lille', 'Manampaneva', 'Spresiano', 'San Jose La Arada', 'Salina Cruz', 'Pindamonhangaba', 'Bad Aibling', 'Zhangjiazhuangcun', 'Selestat', 'Ascencion de Guarayos', 'Hastinapur', 'Xangda', 'Trujillo Alto', "Ji'an", 'Dapaong', 'Kernersville', 'Conceicao do Coite', 'Aguas Santas', 'A Coruna', 'Yanshanbu', 'Boizenburg', 'Zachary', 'Lake Forest', 'La Paz', 'Malta', 'Alfreton', 'Halfeti', 'Paracuru', 'Valdivia', 'Belen', 'Cawayan', 'Oda', 'Ankililoaka', 'Pondaluru', 'Larkana', 'Polaia Kalan', 'Curua', 'Hayashima', 'Brigham City', 'Beverungen', 'Towcester', 'Nove Zamky', 'Palpa', 'Gollapudi', 'Gescher', 'Camboriu', 'El Plan', 'Sokolo', 'Titel', 'Vallentuna', 'Montreux', 'Delmas', 'Edirne', 'Ngolonianasso', 'Nsiika', 'Rowlett', 'Kuchlagh', 'La Palma del Condado', 'Senago', 'Antanandava', 'Steger', 'Thatta', 'Al Hamzah', 'Krolevets', 'Cabrera', 'Baia-Sprie', 'Santiago Suchilquitongo', 'Aabenraa', 'San Marco in Lamis', 'Kota Bharu', 'Guayabal', 'Dasaut', 'Kauhava', 'Sabie', 'Garrucha', 'Maungdaw', 'Kaduturutti', 'Castenedolo', 'Rio Rico', 'Varese', 'Chadchan', 'Seoni Chhapara', 'Rodeo', 'Saint-Esteve', 'Kierspe', 'Velke Mezirici', 'Khajuraho', 'Thuin', 'Douz', 'Bac Lieu', 'Differdange', 'Imqabba', 'Barao de Cocais', 'Bijar', 'Payerne', 'Janakammapeta', 'Takaishi', 'Los Alamos', 'Xianshuigu', 'San Giuliano Terme', 'Leigh', 'Sinjar', 'Chalastra', 'Souakene', 'Carmo do Cajuru', 'Tirumangalam', 'Nagardevla Budrukh', 'Aqchah', 'As Sars', "'Ain el Assel", 'Cusco', 'Bacarra', 'Maracacume', 'Tanhuato de Guerrero', 'Winston-Salem', 'Fukuchiyama', 'Grangemouth', 'Olive Branch', 'Zumpango del Rio', 'Mardan', 'Greenwood', 'Torre de Moncorvo', 'Nanthankulam', 'Gadzin Han', 'Manappakkam', 'San Vicente Pacaya', 'Suresnes', 'Piraziz', 'Anadia', 'Valle Nacional', 'Zhongdong Shequ', 'Howick', 'San Pedro de Uraba', 'Konaje', 'Rayappanpatti', 'Nanuque', 'Zhutailing', 'Bovec', 'Villa Riva', 'Sanza Pombo', 'Brossard', 'Olbia', 'Bredene', 'Kaliro', 'Joetsu', 'Kruszwica', 'Surajpura', 'Primeira Cruz', 'Hanumana', 'Ghattupal', 'Changamkari', 'Bistrita', 'Les Herbiers', 'As Suqaylibiyah', 'Quebec City', 'Wiehl', 'Waltrop', 'Kadiana', 'Tadapurambakkam', 'Delportshoop', 'Pinghu', 'Bad Endorf', 'Singarayakonda', 'Prestwich', 'Weesp', 'Itamukkala', 'Ad Dab`ah', 'Salvador', 'Weipa', 'Agdam', 'Tolu Viejo', 'Erzincan', 'Arfoud', 'Giulianova', 'El Paso de Robles', 'Pedda Nindrakolanu', 'Shawangunk', 'Flamanzi', 'Icod de los Vinos', 'Vatananto', 'Frattaminore', 'Sterling Heights', 'Zhongliao', 'Tirmalgiri', 'Deutsch-Wagram', 'Peddaboddepalle', 'Kilosa', 'Gulbaar', 'Malema', 'Novo Horizonte', 'Blairgowrie', 'Nieder-Olm', 'Pertuis', 'Xacmaz', 'Tenerife', 'Krishnapuram', 'Molndal', 'Luba', 'Puttalam', 'Jegunovce', 'Kauniainen', 'Zapatoca', 'Abong Mbang', 'Tiana', 'Ryki', 'Mitai', 'Mullingar', 'Dom Pedro', 'Simiti', 'Naguilian', 'Roccapiemonte', 'Lanaken', 'Svatove', 'Salto del Guaira', 'Liman', 'Paisley', 'Skwierzyna', 'Balagtas', 'Belma', 'Olathe', 'Kumil', 'Hamworthy', 'Solwezi', 'Fucheng', 'Lanivo', 'Dao', 'Putao', 'Rafaela', 'Yozgat', 'Sao Joao da Boa Vista', 'Sapanca', 'Prebold', 'Gwangyang', 'Imerintsiatosika', 'Munnalam', 'Tizayuca', 'Sorso', 'Anah', 'Locarno', 'Wageningen', 'Muhammadganj', 'Binangonan', 'San Gregorio de Nigua', 'Oneonta', 'Bole', 'Mannara', 'Darnah', 'Habo', 'Aubagne', 'Pinili', 'Ostuni', 'Jitwarpur Chauth', 'Mainaschaff', 'Inderbor', 'Westlake', 'Esher', 'Gaimersheim', 'Fort Knox', 'Tiruvambadi', 'Towada', 'Kasangulu', 'Chernihiv', 'Pardubice', 'Ekibastuz', 'Tramore', 'Luchenza', 'Asarganj', 'Mascalucia', 'Bartica', 'Ipixuna', 'Gainza', 'Buea', 'Puerto Concordia', 'Mohammadabad', 'Kindia', 'Kadingilan', 'Amboasary-Gara', 'Kanchanadit', 'Sabaneta', 'Amingaon', 'Los Polvorines', 'Catano', 'Alto Longa', 'Zharkent', 'Samba', 'Hempstead', 'Arandelovac', 'Ghogha', 'Cluses', 'Kumarapalaiyam', 'Warendorf', 'Ath', 'Thanh Xuan', 'Esik', 'Luneville', 'Burgstadt', 'Deggendorf', 'Kozlu', 'Abdullahnagar', 'Penarroya-Pueblonuevo', 'Curug', 'Cosmopolis', 'Schoningen', 'Desri', 'Tandubas', 'Masiu', 'Petawawa', 'Buxerolles', 'Wazirabad', 'Wath upon Dearne', 'Agios Athanasios', 'Novi Marof', 'Chotebor', 'Ciudad General Belgrano', 'Kitaibaraki', "'s-Gravenzande", 'Wellington North', 'Jiuduhe', 'Villingen-Schwenningen', 'Stockbridge', 'Wauconda', 'Kayyngdy', 'Sultandagi', 'Torshavn', 'Wadala Sandhuan', 'Nordkirchen', 'Nandikotkur', 'Geisenheim', 'Shirvan', 'Port Victoria', 'Santo Antonio de Jesus', 'Matulji', 'Bagaces', 'Thisted', 'Perur', 'Coreau', 'Chitose', 'Marka', 'Weener', 'Barrinha', 'Limburg', 'Qazax', 'Itapa-Ekiti', 'Cajetina', 'Bendougouba', 'Buriti do Tocantins', 'Uppur', 'Hikari', 'Parnarama', 'Hammerfest', 'Alcala la Real', 'Deolali', 'Arukutti', 'Dar Si Aissa', 'Ratia', 'Gangaura Behra', 'Mehdya', 'Vagos', 'Ramdeora', 'Saint Helena Bay', "'Ain Fekan", 'Saint-Germain-les-Arpajon', 'Farrukhabad', 'Palmeiras', 'Saint-Lo', 'Marwa', "Khmis Sidi al 'Aydi", 'San Joaquin', 'Koteshwar', 'Pandalkudi', 'Kukawa', 'Lydney', 'Kanchanpalli', 'Chlef', 'Moquegua', 'Yanguancun', 'Dalavaypattanam', 'Gundlupet', 'Sabinov', 'Praia Grande', 'Tamba-Sasayama', 'Dachengzicun', 'Biloziria', 'Sanaur', 'Rudehen', 'Palmares', 'Fairfax Station', 'Bangor', 'Mula', 'North Chicago', 'Koencho', 'Mahibadhoo', 'Vallejuelo', 'Cajola', 'Mvurwi', 'Jinku', 'Zgorzelec', 'Barnstable', 'San Ignacio de Velasco', 'Fuquan', 'Bayan Lepas', 'Clovis', 'Kappeln', 'Ivaipora', 'Piraquara', 'Capul', 'Sieradz', 'Mombasa', 'Bayawan', 'Kahrizak', 'Rohnert Park', 'Ouzera', 'Mampong', 'Mezokovesd', 'Briceni', 'Pedara', 'Koch Bihar', 'Thaba Nchu', 'Dehti', 'Sebnitz', 'Metz', 'Mansa', 'Sanwer', 'Bingawan', 'Vechur', 'Kirkja', 'Tororo', 'Miandasht', 'Peringom', 'Nambutalai', 'Zarafshon Shahri', 'Deer Park', 'Atalaia', 'Congleton', 'Srebrenik', 'Rauch', 'Slavuta', 'Vengattur', 'Bokoro', 'Pureparo de Echaiz', 'Osijek', 'Divrigi', 'East San Gabriel', 'Gapan', 'Comilla', 'Ropar', 'Oak Bay', 'San Felipe', 'Dinmanpur', 'Konigslutter am Elm', 'Nittenau', 'Ardahan', 'Kelaa Kebira', 'Gulcho', 'Sanzhou', 'Albinea', 'Acireale', 'Wadegaon', 'Australind', 'Yaopu', 'Ramnagar', 'Tummalacheruvu', 'Morretes', 'Askoy', 'Teziutlan', 'Tupiza', 'Monastir', 'Dumri', "Estrela d'Oeste", 'Ribeirao Pires', 'Holesov', 'Ghatal', 'Santa Maria Huatulco', 'Vleuten', 'Ambatomiady', 'Isale', 'Chalmette', 'Tirukkoyilur', 'Ishikari', 'Lunglei', 'Desuri', 'Kaikoura', 'Vaghodia', 'Al Fayd', 'Kahoku', 'Bettendorf', 'Palafolls', 'Hadzici', 'Jocoro', 'Nanto', 'Botelhos', 'St. Clair Shores', 'Rahon', 'Monte Santo', 'Watsonville', 'Shankar Saraiya', 'Zlin', 'Maner', 'Sauce', 'Pudupattanam', 'Midalt', 'Jarinu', 'Goma', 'Laqtah', 'Ghorbanki', 'Nehoiu', 'Comayaguela', 'Sardhana', 'Butiama', 'Novo Hamburgo', 'Isahaya', 'Rouiba', 'Minatitlan', 'Devikolam', 'Putatan', 'Ala', 'Sirsi', 'Lawrence', 'Sarapaka', 'Turmalina', 'Yaguara', 'Tongjin', 'Monte Alegre de Minas', 'Nesher', 'Morsand', 'Rafsanjan', 'Brisbane', 'Rheinberg', 'Assamannur', 'Unterageri', 'Sanquelim', 'Steinhaus', 'Banja Luka', 'Guaimaro', 'Luquembo', 'Ambatomarina', 'Kucove', 'Thilogne', 'Sangereng', 'Beni Saf', 'Itabela', 'Woodfield', 'Sandavagur', 'Drobak', 'Fuldabruck', 'San Luis Obispo', 'Chandralapadu', 'Kanp', 'Sainte-Sophie', 'Karakopru', 'Maple Grove', 'Yachiyo', 'Wuling', 'Foammulah', 'Mouvaux', 'Gorizia', 'Umarkot', 'Forestdale', 'Rawatbhata', 'Bongaree', 'Jalingo', 'Koog aan de Zaan', 'Lapy', 'Rayleigh', 'Khandsa', "Fontaine-l'Eveque", 'Den Helder', 'Nanmucun', 'Sakaraha', 'Flixton', 'Magenta', 'Muroto-misakicho', 'Panhar', 'Ait Ourir', 'Mawkanin', 'Cheadle', 'Kuusamo', 'Velivennu', 'Espoo', 'Kakhandiki', 'Kortenberg', 'Krasnodon', 'Hennigsdorf', 'Olmos', 'Maropaika', 'Mahadipur', 'Itatuba', 'Uckfield', 'Belle Glade', 'Acatlan de Osorio', 'Elfers', 'Al Jumayl', 'Hatsukaichi', 'At Tall', 'Uzynaghash', 'Beatrice', 'Frogn', 'Yao', 'Monfort Heights', 'El Mansouria', 'Sileby', 'Emerald', 'Alubijid', 'Llanquihue', 'Yerrapalem', 'Tuguegarao', 'Tiel', 'Mahallat', 'Varena', 'Cherukunnu', 'Lahat', 'Nevsehir', 'Suonan', 'Cloncurry', 'Chikhli Kalan', 'Vedasandur', 'Coyaima', 'Yinajia', 'Rewtith', 'Villa Adelina', 'Siur', 'Urucania', 'Petrinja', 'Bruntal', 'Wietmarschen', 'Tumkur', 'Qana', 'Darlowo', 'Qamdo', 'Nova Gradiska', 'Barhiya', 'Balighattam', 'St. Petersburg', 'Paglat', 'Nauhata', 'Derdara', 'Barai', "King's Lynn", 'San Pablo Tacachico', 'Presidente Bernardes', 'Elizabethton', 'Ezhou', 'Kallamalai', 'Estrela', 'Tuburan', 'Solapuram', 'Dohta', 'Yazihan', 'Monett', 'Mayang Imphal', 'Starse', 'Chilon', 'Libano', 'Raneswar', 'Raydah', 'Tanjungpandan', 'Puning', 'Cabreuva', 'Shoufeng', 'Ipecaeta', 'Lujan', 'Sultanhisar', 'Kaynarca', 'Mwanza', 'Newport', 'Beypazari', 'Ramshir', 'Trakai', 'Mohiuddinnagar', 'Geddes', 'Rinteln', 'Nagarur', 'Bagra', 'Polkowice', 'Ban Sathan', 'Cotonou', 'Trier', 'Odenthal', 'Oyim', 'Tajarhi', 'Birigui', 'Moravske-Toplice', 'Tatebayashi', 'Serravalle Pistoiese', 'Targuist', 'Datu Paglas', 'Aral', 'Mazarron', 'Dank', 'Elk City', 'Chandankiari', 'Shatiancun', 'Kakamas', 'Sikat', 'Bhawanandpur', 'Stone Ridge', 'Kushimoto', 'Great Wyrley', 'Tall Banat', 'Yunfu', 'Westville', 'Karkamis', 'Ingabu', 'Rensselaer', 'Tonbridge', 'Phai Sali', 'Vaiano', 'Bikramganj', 'Nawsari', 'Akcaabat', "Al Qa'im", 'Carambei', 'Boloso', 'Shijiazhuangnan', 'Sidi Lahsene', 'River Edge', 'Shuozhou', 'Walnut Park', 'Ben Chicao', 'Eggenfelden', 'Floriano', 'Sevilimedu', 'Vesoul', 'Chinna Kalaiyamputtur', 'Los Cordobas', 'Malimono', 'Eslamabad-e Gharb', 'Akersberga', 'Bhaktapur', 'Tokha', 'Egelsbach', 'Windham', 'Shantou', 'Joyo', 'Asni', 'Camacan', 'Lomas de Zamora', 'Pedernales', 'Presidente Medici', 'Itabirito', 'Heris', 'Zarraga', 'Korgas', 'Horti', 'Aksaray', 'Ghal Kalan', 'Apex', 'Tummapala', 'Vavveru', 'Maumee', 'Chicomba', 'La Madeleine', 'Mekra', 'Mataquescuintla', 'Sitio do Quinto', 'Khanah Sur', 'World Golf Village', 'Mambore', 'Limoux', 'Hinode', 'Spodnji Duplek', 'Yancheng', 'Patia', 'Caiguantun', 'Gualdo Tadino', 'Fulshear', 'Dacun', 'Nawabshah', 'Sannicolau Mare', 'Zontecomatlan de Lopez y Fuentes', 'Devanangurichchi', 'Kovur', 'Manikganj', 'Qal`ah-ye Zal', 'Schuttorf', 'Fushun', 'Tsuruno', 'Sikhio', 'Tomelloso', 'Israin Kalan', 'Dinajpur', 'Douar Oulad Mbarek', 'Afanyangan', 'North Branch', 'Pattukkottai', 'Malo Crnice', 'Taboao da Serra', 'Kathurah', 'Tottori', 'Gundlapelle', 'Dusseldorf', 'Guisser', 'Abuyog', 'Sabalgarh', 'Cabugao', 'Pata Kalidindi', 'Mountain Top', 'Limonar', 'Urupes', 'Mudhol', 'Peranampattu', 'Deyr', 'San Enrique', 'Santa Lucia', 'Ghoriyan', 'Pervomaiskyi', 'Obikiik', 'Jauli', 'Grevenbroich', 'Chupaca', 'Ban Bueng Kok', 'Rada`', 'Chapeltique', 'Ghazipur', 'Barcarena Nova', 'Kayaralam', 'Saint-Gilles', 'Crosby', 'Tatahuicapan', 'Nedumangad', 'Meitingen', 'Matomou', 'Balud', 'Yatou', 'Stephanskirchen', 'Same', 'Rome', 'Oud-Heverlee', 'Mau Dhaneshpur', 'Sparks', 'Kyotango', 'Hirokawa', 'Kahama', 'General Emilio Aguinaldo', 'Bozhou', 'Areiopolis', 'Izumisano', 'Karmauli', 'Itele', 'Ghedi', 'Champdani', 'Ringas', 'Istog', 'Madre de Deus', 'San Jose', 'Duraiswamipuram', 'Kele', 'Andujar', 'Erdemli', 'Iwakuni', 'Clarines', 'Saint-Martin-de-Crau', 'Kankipadu', 'Bantayan', 'Kilibo', 'Kerpen', 'Moultrie', 'Belaur', 'Juan Rodriguez Clara', "P'yongch'ang", 'Verukulambu', 'Esenyurt', 'Ban Muang Ngam', 'Wako', 'Yankou', 'Radeberg', 'Haddada', 'Mill Creek East', 'Szekesfehervar', 'Lower Saucon', 'Lijiacha', 'Xushan', 'Tumauini', 'Molakalumuru', 'Mayureswar', 'Gnarrenburg', 'Kefamenanu', 'Lockhart', 'Pulgaon', 'El Palmar', 'Champlain', 'Adalpur', 'Omboue', 'Yueyaquan', 'Melila', 'Potomac Park', 'Barranqueras', 'Santa Cruz de la Sierra', "Yan'an", 'Talwara', 'Kod', 'Hechi', 'Green Bay', 'Murzuq', 'Farnham', 'Roznava', 'Merate', 'Nathana', 'Malakwal', 'Buhi', 'Tadepallegudem', 'Monastyryshche', 'Sneek', 'Gaoshu', 'Pergine Valsugana', 'Fond des Blancs', 'College', 'Liuliang', 'Huilongcun', 'Ballymena', 'Fitampito', 'Herzogenaurach', 'Weilheim', 'Melegnano', 'Coribe', 'Thondiamannu', 'Bermejo', 'Granville', 'Retalhuleu', 'Mayari', 'Tall Qasab', 'Bisaria', 'Bellinzona', 'Klodzko', 'Dessalines', 'Zamosc', 'Village St. George', 'Sturgis', 'Sanyi', 'Vennandur', 'El Paso', 'Andurkonam', 'Zitsa', 'Dainyor', 'Kaysville', 'Tijucas do Sul', 'Cap-Haitien', 'Linslade', 'Banki', 'Santo Antonio do Amparo', 'Diapaga', 'Buqda Caqable', 'Cochabamba', 'Irthlingborough', 'Ena', 'Donostia', 'Dippoldiswalde', 'Gisenyi', 'As Sanamayn', "Capo d'Orlando", 'Bahcesaray', 'Yamanouchi', 'Balangkas', 'Mihona', 'Ambodimandresy', 'El Mirage', 'Gazojak', 'Marvast', 'Etzatlan', 'Sicklerville', 'Pocao', 'Nicoadala', 'Belo Campo', 'Maina', 'Piripiri', 'Bani Hasan ash Shuruq', 'McComb', 'Southeast', 'Cavriglia', 'Kole', 'Granger', 'Rivera', 'Ponferrada', 'St. Anthony', 'Queanbeyan', 'Rangsdorf', 'Rio Grande da Serra', 'Carrieres-sur-Seine', 'Brookhaven', 'Lauderhill', 'Puerto Jimenez', 'Casas Adobes', 'Aligudarz', 'Buchireddipalem', 'Mae Sai', 'Guarai', 'Sprimont', 'Huizucar', 'Teano', 'Liria', 'Zhemgang', 'Funing', 'Amorebieta', 'Irece', 'Bansang', 'Shizukuishi', 'Kummarapurugupalem', 'Buzovna', 'Vadakkumbagam', 'Nago', 'Toma', 'Barra do Ribeiro', 'Mentone', 'Chorhat', 'Norrkoping', 'Hohoe', 'Montefiascone', 'Dario Meira', 'Sopetran', 'Dras', 'Sadpur', 'Miryang', 'Jagoniguda', 'Naklo', 'Agcabadi', 'Kermanshah', 'Kauswagan', 'Barah', 'Araujos', 'Herculandia', 'Sironj', 'Varazdin', 'Macomb', 'Corcoran', 'Molfetta', 'River Vale', 'Ban Bang Krang', 'Cutrofiano', 'Muh Hasan', 'Warnes', 'Ottobeuren', 'Pendencias', 'Wood-Ridge', 'Guttikonda', "Saint John's", 'White Marsh', 'Vatomandry', 'Shrirampur', 'Ambahita', 'Olean', 'Hanno', 'Jask', 'Navelim', 'Camalaniugan', 'Boumahra Ahmed', 'Porto Real do Colegio', 'Geneseo', 'Risod', 'Rio Negro', 'Kendu Bay', 'Soro', 'Etawah', 'Khed Brahma', 'Fukuyoshi', 'Ikot Okoro', 'Marahra', 'Coimbra', 'Svitavy', 'Urdaneta', 'Takhatgarh', 'Khanjahanpur', 'Chatelet', 'Babile', 'Budrio', 'Samma', 'Konigsbach-Stein', 'Dobris', 'Rheine', 'Porkeri', 'Menzel Abderhaman', 'Jhonkar', 'Torrance', 'Ladan Kara', 'Lealman', 'Witzenhausen', 'Ribeira do Pombal', 'Madhopur', 'Villarrica', 'Makurdi', 'Pleasanton', 'Gullapuram', 'Aboisso', 'Gravatai', 'Chinnamanur', 'Longjia', 'Chinna Orampadu', 'Bangar', 'Summerlin South', 'Obiliq', 'Kurate', 'Wentzville', 'Vallet', 'Moalboal', 'Ambohidrapeto', 'Tecozautla', 'Passagem Franca', 'Armthorpe', 'Kuchai Kot', 'Lata', 'Kashasha', 'Auburndale', 'Ban Tom Klang', 'Shendurjana', 'Chikushino', 'Varandarapilli', 'Highlands', 'Harua', 'Namaacha', 'Presidente Dutra', 'Ipu', 'Bockhorn', 'Davangere', 'Saint-Michel-sur-Orge', 'Crowley', 'Tamganj', 'Aklera', 'Tiruppachur', 'Candelaria de La Frontera', 'Sidi Aoun', 'Verrettes', 'El Bolson', 'Itamaraju', 'Corbetta', 'Borger', 'Kodavatipudi', 'Poisy', 'Kakamigahara', 'Petite Riviere de Nippes', 'Hammanskraal', 'Tonami', 'Santa Maria Jalapa del Marques', 'Los Palacios', 'Rolim de Moura', 'Ambahatrazo', 'North Little Rock', 'Cheyyar', 'Masakkavundanchettipalaiyam', 'Moorhead', 'Khamir', "L'Assomption", 'Galliate', 'Bad Zwischenahn', 'Tiruvadi', 'Ferreira do Zezere', 'Brecht', 'Mampikony', 'La Pointe', 'Ulipuram', 'Rubizhne', "Sant'Angelo in Lizzola", 'Charkhari', 'Atascocita', 'Ternitz', 'Olfen', 'Hughenden', 'Getulina', 'Kewatgawan', 'Konza', 'Ocean', 'Koziatyn', 'Epanomi', 'Mattul', 'Meise', 'Hazrat Shiura', 'Plzen', 'Tokunoshima', 'Guelmim', 'Targu Neamt', 'Fugu', 'Petroupoli', 'Cueramaro', 'Karaman', 'Palmira', 'Rajasur', 'Calenzano', 'Puerto Cabello', 'Kaizu', 'Pavullo nel Frignano', 'Embalse', 'Pocatello', 'Milngavie', 'Cachoeira de Minas', 'Barra', 'Savanette', 'Surbo', 'Ronda Alta', 'Baghlia', 'Chanasma', 'Waterlooville', 'Stord', 'Kapiri Mposhi', 'Plombieres', 'Khutubi', 'Pobe', 'Trashi Yangtse', 'Cranston', 'Capinota', 'Itambe', 'Undi', 'South Lyon', 'St. George', 'Anlu', 'Anantavur', 'Lake Arrowhead', 'Kankuria', 'Mellacheruvu', 'Novy Bor', 'Palacaguina', 'La Corredoria', 'Somerset', 'Encrucijada', 'Rampur Tilak', 'Bay Point', 'Changhua', 'Ousseltia', "Finch'a'a", 'Ap Binh Thanh', 'Coonoor', 'Nachikatsuura', 'Shanhur', 'Proserpine', 'Melavayi', 'Celebration', 'Chalkari', 'Pedda Pendyala', 'Aravakkurichchi', 'Oranjemund', 'Langfang', 'Jinjiang', 'Ziracuaretiro', 'Misseni', 'El Ayote', 'Penamaluru', 'Kadinamkulam', 'Villa Hidalgo', 'Hallandale Beach', 'Periya Semur', 'Ifakara', 'Baucau', 'Cesa', 'Antiguo Cuscatlan', 'Barnstaple', 'Grottammare', 'Rottenburg an der Laaber', 'Befasy', 'Nusaybin', 'White Bear Lake', 'Joaquin V. Gonzalez', 'Dorval', 'Raghopur', 'La Garde', 'Laukaha', 'Chawalhati', 'Burnham', 'Padinska Skela', 'Bilehra', 'Rosemead', 'Grafton', 'Diemen', 'Centre de Flacq', 'Yomra', 'Attanur', 'Haspra', 'Liquica', 'Mala Vyska', 'Sanger', 'Ban Kat', 'Miami', 'Karmegh', 'Montmelo', 'Santa Cruz Verapaz', 'Sao Gabriel', 'Jose Marmol', 'Wallisellen', 'Izumiotsu', 'Mashhad Rizeh', 'Tiszafoldvar', 'Limoges', 'Kuzhippilli', 'Vergara', 'Bukavu', 'Awasa', 'Rusera', 'Baragoi', 'Coolidge', 'Karjan', 'Jagannathpur', 'Somireddipalle', 'Hamura', 'Domchanch', 'Salama', 'Charmahin', 'Ocotlan', 'Cajidiocan', 'Dembecha', 'Great Sankey', 'Katsepy', 'Casteel', 'Netapur Tanda', 'Vangviang', 'Lala Musa', 'Treia', 'Coon Rapids', 'Jaromer', 'Sociedad', 'Ganda', 'Belao', 'Una', 'Feijo', 'Halen', 'Courrieres', 'Rendsburg', 'Djangoa', 'Gotse Delchev', 'Pematangsiantar', 'City of Isabela', 'Traversetolo', 'Sahuria', 'Hardia', 'Bhagwanpur Desua', 'Karamay', 'Dammennu', 'Pedra Preta', 'Zakiyah', 'Wagner', 'Naka', 'Searcy', 'Pak Chong', 'Kreuzau', 'Ammur', 'Curimata', 'Oued Lill', 'Esplanada', 'Dagana', 'Villalba', 'Qorovulbozor', 'Mengdan', 'Kenosha', 'Lumbatan', 'Beyla', 'Billdal', 'Barao do Grajau', 'Antsatramidola', 'Moirang', 'Jalakati', 'Sakhipur', 'Tyler', 'Gornji Milanovac', 'Ootacamund', 'Luruaco', 'Cowra', 'Jenison', 'Dar`a', 'Guamare', 'Palatka', 'Brooklyn Center', 'Bondo', 'Paracuellos de Jarama', 'Troon', 'Passy', 'Laredo', 'Rubim', 'Brand-Erbisdorf', 'Pofadder', 'Itzehoe', 'Yuyao', 'Sao Carlos', 'Jiangdi', 'Avon', 'Mission Viejo', 'Mbaiki', 'Kakan', 'Aland', 'Marlborough', 'Mesetas', 'Shahrinav', 'Kourou', 'Kobilje', 'Kragujevac', 'Corigliano Calabro', 'Jalpan', 'Kongoussi', 'Sursand', 'Jiangjiadong', 'Ja`fariyeh', 'Fulham', 'Jhabrera', 'Tumberi', 'Masunga', 'Schellenberg', 'Binfield', 'Jose C. Paz', 'Padre Garcia', 'Tillor Khurd', 'Alta Gracia', 'Subotica', 'Zaandijk', 'Ochsenfurt', 'Cholpon-Ata', 'Veinticinco de Diciembre', 'Shuangtian', 'Madalum', 'Dianguirde', 'Pitangui', 'Patacamaya', 'Kosvik', 'Xiba', 'Debagram', 'Ban Na Kham', 'Sao Francisco do Conde', 'Stegen', 'Bulalacao', 'Pala Oua', 'Pak Kret', 'Mundahal Khurd', 'Befotaka', 'Gobindpura', 'Al Aaroui', 'Mwenga', 'Caturama', 'Pelengana', 'Kandel', 'Vila Real', 'Bechloul', 'Nykobing Falster', 'Mazabuka', 'Binka', 'Konongo', 'Wrzesnia', 'Vasiana', 'Khan Shaykhun', 'Tolna', 'Piedrahita', 'Wilsele', 'Ye', 'Acucena', 'Ames', 'Ventnor City', 'Elhovo', 'Bajiao', 'Torreon', 'Lana', 'Herrenberg', 'Pira', 'Pascagoula', 'Takeo', 'Nuevitas', 'Foothill Farms', 'Mogotes', 'Los Palacios y Villafranca', 'Tanuku', 'Oroszlany', 'Sanyo-Onoda', 'Eichenau', 'Sundapalaiyam', 'Kanoni', 'Dioungani', 'Evora', 'Funtua', 'Kawkareik', 'Coffs Harbour', 'Villiers', 'Caldiran', 'Palakkuzhi', 'Sassenburg', 'Oravita', 'Liaojiayuan', 'Nisshin', 'Sirkhandi Bhitha', 'Union de Tula', 'Chitila', 'Montecito', 'Bozuyuk', 'Sahiwal', 'Nidamangalam', 'Beixingzhuang', 'Alaminos', 'Kobiri', 'Drawsko Pomorskie', 'Manassas Park', 'Nu`ayjah', 'Ndjili', 'Hamidiyeh', 'Pleiku', 'Candon', 'Samokov', 'La Belleza', 'Kargil', 'Barugo', 'Leichlingen', 'Rizal', 'Chhabila', 'Sinanpasa', 'Kabudarahang', 'Yonghetun', 'Selden', 'Longchuan', 'Guanduqiao', 'Tinnanur', 'Mandsaur', 'Joso', 'Besarh', 'Decin', 'Kut Chap', 'Botolan', 'Grad', 'Dumaguete City', 'Mimoso do Sul', 'Tello', 'Iwate', 'Pordenone', 'Naihati', 'Painesville', 'Alatsinainy Ialamarina', 'Rinopolis', 'Mut', 'Shirahama', 'Port Alfred', 'Nantan', 'Catuipe', 'Isola Vicentina', 'Barahkurwa', 'Kladno', 'Biritiba-Mirim', 'Southwater', 'Kariya', 'Kearney', 'Botosani', 'Queen Creek', 'Tamanique', 'Putla Villa de Guerrero', 'Visbek', 'Apac', 'Grossbeeren', 'Chislehurst', 'Esposende', 'Winnipeg', 'Thermi', 'Mahires', 'Neshannock', 'Clearwater', 'Tallinn', 'Manegaon', 'Nabua', 'Baxter', 'Arlington Heights', 'Manafwa', 'Groenlo', 'Ngerengere', 'Lakato', 'Corner Brook', 'Palakodu', 'San Pietro Vernotico', 'Arouca', 'Bad Nenndorf', 'Ban Nong Hoi', 'Grand Island', 'Perris', 'Long My', 'Contai', 'Berlare', 'Hlinsko', 'Oum Hadjer', 'Kusatsu', 'Kamudi', "N'Zerekore", 'Minoo', 'Moulares', 'Taybad', 'Nayanagar', 'Barberena', 'Oromocto', 'Phirangipuram', 'Neosho', 'Leno', 'Fakirtaki', 'Noto', 'Phulpur', 'Karahal', 'Alur', 'Garcia Hernandez', 'Zawodzie', 'Gararu', 'Wallsend', 'Debre Birhan', 'Bargaon', 'Washougal', 'Wutong', 'Kajha', 'Tamilisan', 'Ambohimasina', 'Abha', 'Holguin', 'Epe', 'Owen Sound', 'Essen', 'Sermadevi', 'Naga', 'Kanjikkovil', 'Maracai', 'Kyongju', 'Oulad Friha', 'Camp Pendleton South', 'Govindapuram', 'Sligo', 'Cubal', 'Mamuju', 'Talca', 'Carcagente', 'Bagepalli', 'Liuba', 'Nao-Me-Toque', 'Punitaqui', 'Saint-Pierre-du-Perray', 'Manila', 'Alkmaar', 'Providence', 'Xinguara', 'Jangaon', 'Ibimirim', 'McAlester', 'Saalfeld', 'Palladam', 'Yacuanquer', 'Port Denison', 'Mama Khel', 'Stabroek', 'Lake Arbor', 'Killingworth', 'Ouled Chebel', 'Utnur', 'Kandangan', 'Soest', 'Tbeng Meanchey', 'Haoping', 'Sater', 'West Bromwich', 'Mandiraja Kulon', 'Karczew', 'Krems an der Donau', 'Osako', 'Budwan', 'Scalea', 'Gersthofen', 'Shiloh', 'Kopargo', 'Trotwood', 'Teltow', 'Annonay', 'Makwassie', 'Ouled Rached', 'Rankweil', 'Farmington Hills', 'Mbini', 'Horice', 'Jacqueville', 'Banbishancun', 'Tamanar', 'Konidena', 'Hengzhou', 'Liverpool', 'Marofototra', 'Cheruvannur', 'Iormughanlo', 'Tomar', 'Si Satchanalai', 'Guiseley', 'Bicske', 'Richland', 'Elizabeth', 'Linares', 'Narbonne', 'Agua Azul do Norte', 'Crateus', 'Luton', 'Bardstown', 'Amiawar', 'Dulhanganj', 'Senanga', 'Kobylka', 'Imlil', 'St. Catharines', 'Laziska Gorne', 'An Cabhan', 'Tarbes', 'Leutenbach', 'Frederiksvaerk', 'Sechura', 'Geldermalsen', 'Krishnarajasagara', 'Catarroja', 'Dugo Selo', 'Brieselang', 'Kosum Phisai', 'East Fishkill', 'Sendhwa', 'Ampasinambo', 'Urbano Santos', 'Myaydo', 'Premnagar', 'Andovoranto', 'Yaraka', 'Fuller Heights', 'Tarqui', 'Struer', 'Chintalapalle', 'Boali', 'Keswick', 'Deal', 'Tayasan', 'Sakha', 'Vandalur', 'Somma Vesuviana', 'Almaguer', 'Jacksonville Beach', 'Yuxi', 'Torhout', 'Buqkoosaar', "O'Hara", 'Dortmund', 'Freeport City', 'Poptun', 'Upper Grand Lagoon', 'Shendi', 'Cuautitlan Izcalli', 'Blagnac', 'Letpandan', 'Bhachhi', 'Dunavarsany', 'Lessogou', 'Elze', 'Clare', 'Harvey', 'Ain Feka', 'Wayne', 'Pohang', 'Pothia', 'Pokhraira', 'Pontalina', 'Amatura', 'Sao Joao das Lampas', 'Pinan', 'Sidi Lakhdar', 'Badami', 'Matadepera', "Sant'Agata di Militello", 'Laiyang', 'Selmana', 'Rothwell', 'Kamareddipet', 'Hobe Sound', 'Mong Tun', 'Armur', 'Islampur', 'Wadgassen', 'Selma', 'Momanpet', 'Aktepe', 'Mjolby', 'Patchogue', 'Nueva Loja', 'Kronjo', 'Turuvekere', 'Tejupilco', 'Vetraz-Monthoux', 'Rickmansworth', 'Cicuco', 'Chandigarh', 'Winslow', 'Amtala', 'Waingapu', 'Mayuge', 'Ruse', 'Sindangan', 'Xankandi', 'Al Qutayfah', 'Llorente', 'Naliya', "Az Zarqa'", 'Bomporto', 'Wadern', 'Cegled', 'Jalapa', 'Shizhaobi', 'Union de Reyes', 'Nahargarh', 'Massapequa Park', 'Kalvarija', 'Portes-les-Valence', 'Sidi ech Chahmi', 'Hope', 'Cumbal', 'Soalkuchi', 'Modling', 'Khujner', 'Biederitz', 'Laval', 'Mikkeli', 'Otopeni', 'Qashyr', 'Noisy-le-Grand', 'Titao', 'Koga', 'Tipasa', 'Bukoba', 'Pruszcz Gdanski', 'Tachiarai', 'Na Yung', 'Oignies', 'Fillmore', 'Tachilek', 'Lathrop', 'Binalbagan', 'Kallidaikurichi', 'Leova', 'Adazi', 'Caibarien', 'El Obeid', 'Pareo', 'Whitstable', 'Raymond', 'Centenario', 'Zhangziying', 'Satyavedu', 'Uspantan', 'Castelli', 'Zuitou', 'Chiradzulu', 'Tadif', 'Stains', 'Mathba', 'St. Clair', 'Beni Ounif', 'Aripuana', 'Cumana', 'Bhui', 'Jeddah', 'Settara', 'San Andres de Llevaneras', 'Fermo', 'Kinross', 'Sanjiaocheng', 'Serrita', 'Analapatsy', 'La Bruyere', 'Bac Giang', 'Manticao', 'Jaen', 'Uzumlu', 'Pauri', 'Fernandina Beach', 'Mullaittivu', 'Fairfield Glade', 'Dougouni', 'Shuangyashan', 'Ashford', 'Dalan', 'Longchamps', 'Berat', 'Mirangaba', 'Bhagta', 'Bodmin', 'Wittenbach', 'Capriolo', 'Plock', 'Petropolis', 'Nimmekal', 'Eisenberg', 'Minakshipuram', 'Fox Lake', 'Simunul', 'Zhangatas', 'Besikduzu', 'Diepenbeek', 'Castalla', 'Nakaseke', 'Daimiel', 'Aguascalientes', 'Bluefields', 'Weinheim', 'Khirhar', 'Vilkaviskis', 'Gorha', 'Condeixa-a-Nova', 'Shikokuchuo', 'Maghar', 'Khutha Baijnath', 'Kathmandu', 'Tornesch', 'Isernia', 'Koriapatti', 'Tharike', 'Hastings', 'Pindra', 'Eislingen', 'Bridgetown', 'Salt Lake City', 'Kashti', 'Hulyaypole', 'Sexmoan', 'Shrewsbury', 'Tha Yang', 'Waghai', 'Scone', 'Ritto', 'Sukrah', 'Strausberg', 'Kurhani', 'Nesarg', 'Ciudad Constitucion', 'Francavilla Fontana', 'Langenselbold', 'Soklogbo', 'Deinze', 'Zaanstad', 'Lafia', 'Prosperidad', 'Rumuruti', 'Savigliano', 'Hundested', 'Neuhausen auf den Fildern', 'Chitre', 'Sarrebourg', 'Ayanikkad', 'Dapoli', 'Centerton', 'Plettenberg', 'Skocjan', 'Oamishirasato', 'Penticton', 'Aurillac', 'Red Hook', 'Kiel', 'Mannadipattu', 'Ronchi dei Legionari', 'Az Zuwaytinah', 'Ulverston', 'Tholikuzhi', 'Phrai Bueng', 'St. Paul', 'Shenley Church End', 'Gourrama', 'Itiuba', 'Bena', 'Partinico', 'Fort Thomas', 'Marasesti', "Monte Sant'Angelo", 'Lubbecke', 'Grande-Synthe', 'Jincheng', 'Monteros', 'Randburg', 'Shakopee', 'Escuque', 'Bauko', 'Muscatine', 'Ralla', 'Usuppur', 'Iwaki', 'Compton', 'Isna', 'Suhbaatar', 'Igboho', 'San Rafael Cedros', 'Lapua', 'Zdzieszowice', 'Arcadia', 'Orotina', 'Bogande', 'Alpignano', 'Highland Park', 'Newberg', 'Veles', 'Scituate', 'Bellatti', 'Tempio Pausania', 'Buri Ram', 'Marquette', 'Niquinohomo', 'Cuddalore', 'Waimalu', 'Potiskum', 'Mohammedia', 'Bamiantong', 'La Trinitaria', 'Bekes', 'Cajabamba', 'Conyers', 'Malungun', 'Takehara', 'Aspen Hill', 'Suzuka', 'Ait Youssef Ou Ali', 'Klaipeda', 'Churriana de la Vega', 'Malpura', 'Romano di Lombardia', 'Jose de Freitas', 'Soc Trang', 'Brahmadesam', 'Chalons-en-Champagne', 'Guria', 'Talwandi Sabo', 'Makhmur', 'Podvelka', 'Satghara', 'Chimteppa', 'Yaypan', 'Kearsley', 'Qianwangcun', 'Suratgarh', 'Santa Helena de Goias', 'San Andres Itzapa', 'Zulakallu', 'Pinos Puente', 'El Carmen de Bolivar', 'Nahiyat al Iskandariyah', 'Kasterlee', 'Cabries', 'Dakhan', 'Senguio', 'Lachute', 'Bni Quolla', 'Indanan', 'Spennymoor', 'Pottireddippatti', 'Chaville', 'Goto', 'Somoto', 'Antaritarika', 'Chikkala', 'Maheswa', 'Schmallenberg', 'Itajuba', 'Udarband', 'Itanhaem', 'Sandiacre', 'Castlegar', 'Maler Kotla', 'Kuressaare', 'Huangxicun', 'Sand', 'Farrokh Shahr', 'Valandovo', 'Madakkathara', 'Kaurihar', 'Santa Lucia La Reforma', 'Gjirokaster', 'Indore', 'Thorigne-Fouillard', 'Fiorano Modenese', 'Drexel Hill', 'Ash Shamiyah', 'Baia Farta', 'Chakdaha', 'Dambulla', 'Muttukuru', 'Chiesanuova', 'Mehran', 'Metkovic', 'Portomaggiore', 'Spokane', 'Oldbury', 'Vilattikulam', 'Nasushiobara', 'San Martin de Loba', 'Campo Largo', 'Yenakiieve', 'Uropa', 'Kaleybar', 'Mit Salsil', 'Hedehusene', 'Gross-Enzersdorf', 'Germiston', 'Qal`eh Chan`an', 'Rocky Point', 'Cingoli', 'Garden Grove', 'Ouro Preto', 'Mead Valley', 'Liwonde', 'Kokologo', 'Monona', 'Funato', 'College Park', 'Tighedouine', 'Peringottukurusshi', 'Ankilimivory', 'Tabina', 'Bahagalpur', 'Huelva', 'Musashino', 'Igapora', 'San Antonio Huista', 'Cristais', 'Rajkot', 'Hasseh', 'Wyckoff', 'Rukungiri', 'Rio Pomba', 'Morteros', 'Camillus', 'Cholai', 'Alto Rio Doce', 'Imperatriz', 'Ross on Wye', 'Gairtganj', 'Sakubva', 'Porsgrunn', 'Oss', 'Dickson', 'Famailla', 'Napak', 'La Tebaida', 'Dhaka', 'Pulicat', 'Dayr Mawas', 'Rampur Kudarkatti', 'La Rochelle', 'Cowansville', 'Van Buren', 'Montevideo', "Sant'Antimo", 'IJmuiden', 'Urtaowul', 'Chtiba', 'Bavanat', 'Mooresville', 'Longbangcun', 'Marawi', 'Meppel', 'Ha Long', 'Matsubushi', 'Massakory', 'Jamsaut', 'Uzungoz', 'Phatthaya', 'Glew', 'Gavimane', 'Ionia', 'Hackney', 'Shanshan', 'Chilanga', 'Puerto El Triunfo', 'Egg Buckland', 'Ladner', 'Chapadinha', 'Mangualde', 'Aisai', 'Itamati', 'Kunkalagunta', 'Yucheng', 'Ozhur', 'Volochysk', 'Garliava', 'Ananindeua', 'Naugatuck', 'Toronto', 'Daoukro', 'Isperih', 'Sebaco', 'Apizaco', 'Curepto', 'Phimai', 'Melzo', 'Edd', 'Santo Agostinho', 'Chystiakove', 'Kobilo', 'Rafai', 'Potchefstroom', 'Ban Bang Sai', 'Makato', 'Mairinque', 'Suwalki', 'Vinh Yen', 'Odobesti', 'Rogerstone', 'Campos Belos', 'Nanguneri', 'Ban Mon Pin', 'Bududa', 'Devikapuram', 'Pietrasanta', 'Lienen', 'Ma`arrat an Nu`man', 'Jicome', 'Bockenem', 'Bossembele', 'Dongchuan', 'Tepetlan', 'Piddig', 'Cheektowaga', 'Vohipeno', 'Soubala', 'Xalapa', 'Peru', 'Middlesborough', 'Oral', 'Villa Celina', 'Bacacay', 'Gangapur', 'Loyola Heights', 'South Riding', 'Gurupi', 'Galapa', 'Bagor', 'Chitemo', 'Birnin Kebbi', 'Tagajo', 'Maychew', 'Worms', 'Tesanj', 'South El Monte', 'Kitcharao', 'Shorewood', 'Agen', 'Kargi', 'Oppicherla', 'Van', 'Port Augusta', 'Ahlen', 'Moudjbara', 'Ghaxaq', 'El Arrouch', 'Carson', 'Kitatajima', 'Kamp-Lintfort', 'Kachhari', 'Fort Myers', 'Imbituva', 'North Charleston', 'Unterfohring', 'Triangle', 'Tuntum', 'Dollard-des-Ormeaux', 'Eschweiler', 'Pomorie', 'Xewkija', 'Buenavista', 'Arsanjan', 'Demnat', 'Saharefo', 'Maragogi', 'Odivelas', 'Villaviciosa de Odon', 'Keszthely', 'Alcantarilla', 'Loutraki', 'Falou', 'Linfen', 'Azamnagar', 'Karvina', 'Shady Hills', 'Chopda', 'Vredefort', 'Westmont', 'Ulaanbaatar', 'Southbridge', 'North Druid Hills', 'Viseu', 'Le Thor', 'Tacaratu', 'Elur', 'Motupe', 'Trim', 'Chorzow', 'Kelle', 'Ljutomer', 'Kanbara', 'Maroviro', 'Zapotitlan', 'Forlimpopoli', 'Dhuusamarreeb', 'Bakwa-Kalonji', 'Bakeshiyingcun', 'Ngoulemakong', 'Nor Hachn', 'Ejmiatsin', 'Handlova', 'Pillaiyarkuppam', 'Miramar', 'Nanyo', 'Northampton', 'Heerenveen', 'Doiwala', 'Engenheiro Caldas', 'Undrajavaram', 'Mahudha', 'Ashtead', 'Kotagiri', 'Nieuw Amsterdam', 'Hamakita', 'Ipueiras', 'Mannargudi', 'Ramsey', 'La Mata', 'Strabane', 'Meaford', 'Truseni', 'Kanhangad', 'Matanga', 'Seohara', 'Hijuelas', 'Narvik', 'Alamada', 'Moslem Ebn-e `Aqil', 'Kabasalan', 'Mataram', 'Jagodina', 'Zifta', 'Jambughoda', 'Ledeberg', 'Melonguane', 'Okuizumo', 'Petion-Ville', 'Coweta', 'Chiniot', 'Ntara', 'Munuf', 'West Falls Church', 'Kanchanaburi', 'Nahiyat al Kifl', 'Aqsu', 'Ghaziabad', 'Katpadi', 'Midalam', 'Omegna', 'Noci', 'Taniyama-chuo', 'Patrocinio Paulista', 'Merseburg', 'Nashua', 'Lamut', 'Ankang', 'Ahor', 'Edakkunnam', 'South Shields', 'Liangwu', 'Sonbari', 'Shinjo', 'Patuvilayi', 'Ponte de Sor', 'Kawamata', 'Ulvila', 'Polomolok', 'Saila', 'Mahaiza', 'El Trebol', 'Cisterniga', 'Fossombrone', 'Nyala', 'Villa Gesell', 'Lapa', 'Nidamaluru', 'Peraiyur', 'Dodarasinakere', 'Pachino', 'Saint-Etienne-du-Rouvray', 'Gilching', 'Laurentian Valley', 'Garhshankar', 'Lambesc', 'Hanamaki Onsen', 'Seagoville', 'Rheda-Wiedenbruck', 'Sutton Coldfield', 'Safaja', 'Enns', 'Mutki', 'Villa de Leyva', 'Chauk', 'Aadorf', 'Lukow', 'Yuanquan', 'Muniz', 'Maxixe', 'Soquel', 'Hillcrest Heights', 'Mancheral', 'Featherstone', 'Chelak', 'Tondela', 'Hostomel', 'Alfonsine', 'Iskilip', 'Jaciara', 'Gidri', 'Tulare', 'Mushabani', 'Patharia', 'Coudekerque-Branche', 'Bastos', 'San Antonio Abad', 'Eagle Point', 'Fernando de la Mora', 'Mhajar', 'Longhua', 'Sevnica', 'Karian', 'Narsipatnam', 'Nuuk', 'Mount Juliet', 'Tome-Acu', 'Ferozepore', 'Sirakoro', 'Barra Velha', 'Zabok', 'Azandarian', 'Conewago', 'El Gara', 'Mamungan', 'Livorno', 'Ain Zaouia', 'Ilamatlan', "Anse d'Hainault", 'Voloina', 'Itaitinga', 'Odatturai', 'Vedaranniyam', 'Bretzfeld', 'Lom Sak', 'Yingyangcun', 'Manamodu', 'Cerqueira Cesar', 'Sorgun', 'Ban', 'Oyrarbakki', 'Mandaguacu', "Sered'", 'Ringkobing', 'Kalihati', 'Modi`in Makkabbim Re`ut', 'Malverne', 'Copenhagen', "L'Ancienne-Lorette", 'Dagenham', 'Marovandrika', 'Nossa Senhora dos Milagres', 'Sacueni', 'Santa Maria a Monte', 'Souq Jamaa Fdalate', 'Chinguar', 'Swiedbodzin', 'Mangalme', 'San Anselmo', 'Ban Patong', 'Bazar-e Yakawlang', 'Teresopolis', 'Baliangao', 'Hunsur', 'Idigny', 'Kose', 'Pedra Azul', 'Mount Vernon', 'Uzun', 'Hamadanak', 'As', 'Tounfit', 'Blumberg', 'Konstancin-Jeziorna', 'Ebino', 'Hull', 'Tajumulco', 'Castellaneta', 'Sidhapa', 'Slagelse', 'Dunbar', 'Nahariyya', 'Tsuyama', 'Selston', 'Brummen', 'Gigante', 'Caluquembe', 'Nirkunnam', 'Peabody', 'Braunstone', 'San Fernando de Henares', 'Sebt Bni Smith', 'Lakkampatti', 'Miami Gardens', 'Gluckstadt', 'Santa Maria de Palautordera', 'Bas Limbe', 'El Puerto de Santa Maria', 'Bundaberg', 'Giszowiec', 'Gweru', 'Belle-Anse', 'Bodupal', 'Inopacan', 'Pencoed', 'Zittau', 'Sialkot City', 'Obera', 'Naikankudi', "L'Aquila", 'Plan-les-Ouates', "Ch'ungmu", 'Corte Madera', 'Kaukauna', 'Tsararafa', 'Lembok', 'Altena', 'Gliwice', 'Bora', 'Hagen im Bremischen', 'Tsaramasoandro', 'Alto Rio Senguer', 'Inungur', 'Alor Setar', 'Al Hajar al Aswad', 'Kurunegala', 'Efringen-Kirchen', 'Little Egg Harbor', 'Itagi', 'Koppunur', 'Bemidji', 'Tantoyuca', 'Guiratinga', 'Barton', 'An Khe', 'Yamagata', "Lee's Summit", 'Nea Filadelfeia', 'Bagneux', 'Samtredia', 'Yingcheng', 'Shibata', 'Mount Pearl Park', 'Arendonk', 'Dhenkanal', 'Kapenguria', 'Saida', 'Gloucester', 'Bibai', 'Lebbeke', 'Dhusar Tikapatti', 'Rondon', 'Tamayo', 'Daksinkali', 'Plymouth', 'Scordia', 'Haywards Heath', 'Battle Ground', 'Fishers', 'Gonen', 'Xingren', 'Queenstown', 'Port Coquitlam', 'Raritan', 'Nagykovacsi', 'North Bend', 'Wilmot', 'Taal', 'Ruhengeri', 'Zhovkva', 'Sangtuda', 'Pont-Rouge', 'Urbandale', 'Reeuwijksebrug', 'Hamburg', 'Midsayap', 'Janai', 'Harrow on the Hill', 'Nakao', 'Kaoma', 'Verbania', 'Dancagan', 'Tecumseh', 'Luhansk', 'Elanjivaliseri', 'Dhariwal', 'Mahuva', 'Dabhaura', 'Pilisvorosvar', 'Ravne na Koroskem', 'As Suwayq', 'Semri', 'Siliana', 'Noria', 'Chabahar', 'Gimpo', 'Parempuyre', 'Tam Hiep', 'Bir', 'Miranda', 'Dhemaji', 'Menongue', 'Argao', 'Fuldatal', 'Gualeguaychu', 'Hoboken', 'Melgar', 'Bishunpura', 'Karabuk', 'Tirthahalli', 'Binmaley', 'Trajano de Morais', 'Villeneuve-Saint-Georges', 'Moribabougou', 'Al Mansurah', 'Palangavangudi', 'Nasugbu', 'Giugliano in Campania', 'Emsdetten', 'Satte', 'Prescot', 'Carles', 'Razkrizje', 'Djado', 'Wurzburg', 'Langhnaj', 'Kysucke Nove Mesto', 'Ar Rastan', 'Huehuetoca', 'Cerca la Source', 'Goris', 'Plon', 'Bad Lauterberg', 'Gasparillo', 'Sao Amaro das Brotas', 'Rangvasa', 'Kolokonde', 'Osmancik', 'Trofaiach', 'Seaford', 'Appley Bridge', 'Majadahonda', 'Sudley', 'Raman Mandi', 'Diyarbakir', 'Xiaoxita', 'Azacualpa', 'Mullassheri', 'Aruppukkottai', 'El Rosario', 'Puyo', 'Pomichna', 'Plaisance-du-Touch', 'Ghosrawan', 'Yuba City', 'Homs', 'Baunatal', 'Cape Coral', 'Umburanas', 'Kakegawa', 'Shahr-e Pir', 'Neiba', 'Thiers', 'Chorbog', 'Melton Mowbray', 'Ariyalur', 'Suarez', 'Manoel Vitorino', 'Sint-Katelijne-Waver', 'Nongzhangjie', 'Bayan', 'Ceduna', 'Grand Falls', 'Masallatah', 'Meppen', 'Bromley', 'Vanves', 'Novelda', 'Kaspi', 'Horseheads', 'Rath', 'Vadakarai Kil Pidagai', 'El Kansera', 'Tanmen', 'Henin-Beaumont', 'Fort Payne', 'Southlake', 'Kaeng Khoi', 'Broughton Astley', 'Oyamazaki', 'Rodolfo Sanchez Taboada', 'Devrukh', 'Kombai', 'Juvisy-sur-Orge', 'Aigua', 'Sandridge', 'Oum el Bouaghi', 'Marilandia', 'Pirbahora', 'Bauria', 'Matsushige', 'Ipatinga', 'Venkatagirikota', 'Hazel Crest', 'Codegua', 'North Castle', 'Talhar', 'Kirchberg', 'Achocalla', 'Apam', 'Cloverly', 'La Trinite', 'Zivinice', 'Thanh Hoa', 'Pocao de Pedras', 'Tiruvadanai', 'Koshu', 'Cuencame de Ceniceros', 'Tellar', 'Praia do Carvoeiro', 'Pursat', 'Cunupia', 'North Lakhimpur', 'Carlsbad', 'Breinigsville', 'Saint-Bruno-de-Montarville', 'Oro Valley', 'Msemrir', 'Papendrecht', 'Pedersore', 'Balasore', 'Cadale', 'Viera East', 'Kambila', 'Snaresbrook', 'Rockville', 'Culcheth', 'San Valentino Torio', 'Sharbaqty', 'Chainpura', 'Laligam', 'Huntsville', 'Pidigan', 'Hohenems', 'Rabinal', 'Campo do Brito', 'Les Lilas', 'Kudali', 'Binnaguri', "Land O' Lakes", 'Hatch End', 'Abiy Adi', 'Prijedor', 'Affton', 'Pilon', 'Boditi', 'Ferdows', 'Villa Dolores', 'Desenzano del Garda', 'Venaria Reale', 'Cuite', 'Nonthaburi', 'Lawrenceburg', 'Guasipati', 'Tottenham', 'Utica', 'Al Basrah', 'Gorukle', 'Caranavi', 'Douglass', 'Havirov', 'Sukheke Mandi', 'Audubon', 'Puerto del Rosario', 'Chilkuru', 'Burco', 'La Mornaghia', 'Amatitan', 'Songhuajiangcun', 'San Antonio de las Vueltas', 'Azadshahr', 'Bislig', 'Rampura', 'Ban Klang', 'Granadero Baigorria', 'Usuda', 'Montego Bay', 'Chornomorsk', 'Saint-Lys', 'Sabana Grande de Palenque', 'Xiluodu', 'Gigaquit', 'Carrollwood', 'Kigali', 'Lyman', 'Sao Luis', 'Patakakani', 'Mar de Ajo', 'Inverell', 'Neuville-en-Ferrain', 'Molde', 'Petrel', 'Indian Harbour Beach', 'Gahi Mammar', 'Alanya', 'Shazand', 'Leso', 'Auray', 'Panauti', 'Ovada', 'Tokoname', 'Junnar', 'Chahar Dangeh', 'Monte Aprazivel', 'Maida Babhangawan', 'Matias Romero', 'Nasu', 'Quata', 'Barracao', 'Dila', 'Piatykhatky', 'Sikeston', 'Luzzi', 'Diabougou', 'Wiesbaden', 'Chumphon', 'Cervia', 'South Dundas', 'Halfmoon', 'Jardim de Piranhas', 'Carol Stream', 'Coronel Du Graty', "L'Isle-sur-la-Sorgue", 'Kapolei', 'Bad Windsheim', 'Hitchin', 'Almeirim', 'Kurakhove', 'Mataro', 'Khao Yoi', 'Shihezi', 'Itaiopolis', 'Iskandar', 'Pinamungahan', 'Highland City', "Ping'an", 'Khe Sanh', 'Konigs Wusterhausen', 'Kottampatti', 'Tshabong', 'Gastonia', 'Fuensalida', 'Cabot', 'Celldomolk', 'Sakiai', 'Pirmasens', 'Bostonia', 'Ngathainggyaung', 'Nattappettai', 'Oulad Hamdane', 'Campo Belo', 'Portsmouth', 'Birmitrapur', 'La Fare-les-Oliviers', 'Biberach', 'Kadoli', 'Sremski Karlovci', 'Leon Valley', 'La Huacana', 'Cambados', 'Ban Bang Non', 'Ranai', 'Orumiyeh', 'Moyuta', 'Hansot', 'Pasewalk', 'Ukunda', 'Guarne', 'Bethal', 'Schwabach', 'Laventille', 'North Reading', 'Hithadhoo', 'Shubrakhit', 'Overath', 'Karikad', 'Nakatsu', 'Pancevo', 'Hamirpur', 'Pilar de la Horadada', 'Akdepe', 'Phillaur', 'Itaquaquecetuba', 'Bridport', 'Marotaolana', 'Kowary', 'Ibaan', 'Derazhnia', 'Baishan', 'Strzelce Krajenskie', 'Ampere', 'West Nipissing / Nipissing Ouest', 'Dashiqiao', 'Iguaraci', 'Minglanilla', 'Andalgala', 'Dili', 'Galena Park', 'Cinar', 'Akropong', 'Aston', 'Ire', 'Vrhnika', 'Randfontein', 'Rorvik', 'Sugbongkogon', 'Trancas', 'Dusti', 'Pernes-les-Fontaines', 'Gamharia', 'Rozdilna', 'Harchoune', 'Cherakara', 'Valantaravai', 'Todmorden', 'Rehau', 'Draguignan', 'Clifton', 'Murud', 'Jalna', 'Chhatarpur', 'Palavur', 'Balangiga', 'Canete', 'Stoneham-et-Tewkesbury', 'Puttanattam', 'Lisen', 'Carrefour', 'Qostanay', 'Triggiano', 'Chingleput', 'Sonupur', 'Spodnje Hoce', 'Oftersheim', 'Chom Thong', 'Nagod', 'Ecoporanga', 'Cowley', 'Schiltigheim', 'San Andres del Rabanedo', 'Velilla de San Antonio', 'Bad Neustadt', 'Timbo', 'Muskogee', 'Enid', 'Batouri', 'Kragero', 'Mirano', 'Alangayam', 'Cianorte', 'Crystal Lake', 'Sebt Gzoula', 'Kyazanga', 'Lendava', 'Bad Durrheim', 'Jiangguanchi', 'Gonzalez Catan', 'La Roche-sur-Foron', 'Itapiranga', 'Aktuluk', 'Uslar', 'Luis Correia', 'Anjahamana', 'Dracevo', 'Pavlikeni', 'Salvatierra de Mino', 'El Karimia', 'Heilsbronn', 'Honaz', 'Multai', 'Zarghun Shahr', 'Jesenice', 'Catanauan', 'Haaksbergen', 'Tiruvasaladi', 'Karvarakundu', 'Malay', 'Tamura', 'Loma Plata', 'Hamilton Square', 'Dewal Thal', 'Partur', 'Koilakh', 'Great Dunmow', 'Sinj', 'Bekily', 'Hercules', 'Dunstable', 'Kosgi', 'Xinsi', 'Tomboco', 'Southwick', 'Sangamner', 'Mossoro', 'Khon Kaen', 'Dhamnagar', 'Denain', 'Berezivka', 'Humaita', 'Malhipur', 'San Luis de Since', 'Martin', 'Rajapudi', 'Yucaipa', 'Ortigueira', 'City of Paranaque', 'Kabira', 'Kalandy', 'Uwajima', 'Fantino', 'San Juan Atitan', 'Kennesaw', 'Prattville', 'Showa', 'Hakmana', 'Maastricht', 'Pesqueria', 'Ban Ngao', 'Amparafaravola', 'Bhagwatpur', 'Landshut', 'Justo Daract', 'San Vicente', 'Hall in Tirol', 'Avare', 'Binxian', 'Llaillay', 'Hoskote', 'La Massana', 'Tucker', 'Pacasmayo', 'Ambato', 'Apiuna', 'Shaxi', 'Mpigi', 'Evere', 'Sakaki', 'Sampona', 'Puerto Leguizamo', 'Santiago del Teide', 'Wudalianchi', 'Blieskastel', 'Rutesheim', 'Phan Ri Cua', 'Itahri', 'Chilibre', 'Schalksmuhle', 'Askale', 'Vite', 'Ibipora', 'Haugesund', 'Valdemorillo', 'Iringal', 'Tariba', 'Hays', 'Annur', 'Mariental', 'Cieza', 'Kirkland', 'Nohar', 'Shiso', 'Yoshinogawa', 'La Motte-Servolex', 'Sippola', 'Bonthe', 'Rijswijk', 'Pontinia', 'Aarhus', 'Kitaotao', 'Rreshen', 'Mahinawan', 'Manrar', 'Pattanapuram', 'Baidoa', 'Zhongxiang', 'La Colonia Tovar', 'Amelia', 'Fuveau', 'Aguas Formosas', 'Trikonavattam', 'Chewara', 'Bhado Khara', 'Novyi Rozdil', 'Palepalli', 'Ellamanda', 'San Martin Totolan', 'Edinet', 'Weihai', 'Granadilla de Abona', 'Sulphur Springs', 'Astoria', 'Seyitgazi', 'Kingsteignton', 'Weybridge', 'Plumtree', 'Brugnera', 'Stony Point', 'Shikrapur', 'Canguaretama', 'Pedreira', 'Kot Kapura', 'Banyuwangi', 'Sueca', 'Afzala', 'Kenilworth', 'Stuhr', 'Riverbank', 'Jdour', 'Al Minshah', 'Warren', 'Kalundborg', 'Farroupilha', 'Walsrode', 'Ilheus', 'Heroica Ciudad de Tlaxiaco', 'Smoline', 'Cueto', 'Pingtang', 'Khorugh', 'Engandiyur', 'Canlaon', 'El Reno', 'Gadwal', 'Villa Verde', 'Pulluru', 'Talatona', 'Badepalli', 'Oltiariq', 'Aizumisato', 'Antsoso', 'Jianganj', 'Mullurkara', 'Taouloukoult', 'Chambray-les-Tours', 'Tabarre', 'Bamumkumbit', 'Maquela do Zombo', 'Widnau', 'Kurihara', 'Shumen', 'Kosatarosh', 'Villa Vasquez', 'Nampicuan', 'Waasmunster', 'Wilmington', 'Silao', 'Geseke', 'Jamsa', 'Forres', 'Tukh', 'Aversa', 'Sanpetru', 'Terra de Areia', 'Samahuta', 'North Kingstown', 'Chiplun', 'Sztum', 'Neustadt an der Donau', 'Alajuelita', 'Peyziwat', 'San Carlos Sija', "Ji'an Shi", 'Ciudad de Allende', 'Carmo do Paranaiba', 'Dakar', 'Paulista', "Rignano sull'Arno", 'Papenburg', 'Imanombo', 'Cabangan', 'Miedzychod', 'Guanabacoa', 'Djemmal', 'Alpedrete', 'Alcacer do Sal', 'Barhagarh', 'Nong Ki', 'Kot Shamir', 'Yugawara', 'Polangui', 'Villa Ygatimi', 'Mathura', 'Morley', 'Gokak', 'Red Hill', 'Bad Durrenberg', 'Yeghegnadzor', 'Muroran', 'Fair Lawn', 'Urai', 'Caiaponia', 'Chimbarongo', 'Buabidi', 'Erba', 'Elandsdoorn', 'Mongar', 'Basankusu', 'Longji', 'Sucun', 'Kakata', 'Altus', 'Elizabethtown', 'Kamlapur', 'Birkenau', 'San Mateo Atenco', 'Viqueque', 'Dibaya', 'Sao Benedito', 'Daphne', 'Irving', 'Randazzo', 'Cameron', 'Gasa', 'Salyan', 'Magadi', 'Pingzhen', 'Kibiito', 'Katherine', 'Sebekoro', 'Itamaraca', 'Changling', 'Tunduru', 'River Ridge', 'El Adjiba', 'Pardiguda', 'Guaranesia', 'Subaykhan', 'Dar Naim', 'Fulda', 'Meiti', 'Choybalsan', 'Granollers', 'Al Qays', 'Moldava nad Bodvou', 'El Molar', 'Bayt Saham', 'Coruripe', 'Leawood', "Kunp'o", 'Aracati', 'Soaserana', 'Ambatofotsy', 'Linganore', 'Rotterdam', 'Kumano', 'Shoeburyness', 'Palayan City', 'Oak Island', 'Saylac', 'Darwa', 'Heidelberg', 'Puerto Maldonado', 'Szazhalombatta', 'Fatwa', 'Kolattupuzha', 'Mahinog', 'Villa Rumipal', 'Somnaha', 'Veauche', 'Baheri', 'Sangzishi', 'Moser', 'Cologne', 'Rypin', 'Salinas da Margarida', 'Carini', 'Gloria do Goita', 'Tekes', 'Northport', 'Seven Oaks', 'Nemmara', 'Consolacion del Sur', 'El Calafate', 'Foumbot', 'Magnolia', 'Carangola', 'Caracas', 'Vallahbhapuram', 'Al `Aqiq', 'Sincan', 'Bebra', 'Malkapur', 'Valongo', 'Flawil', 'Piprahi', 'Cheney', 'Tarar', 'Gode', 'Tangkak', 'La Canada Flintridge', 'Schwanewede', 'Thomasville', 'Wolossebougou', 'Zunilito', 'Aristobulo del Valle', 'Staden', 'Bakhri', 'Chatillon', 'Puerto Varas', 'El Tarf', 'Goianinha', 'Bimgal', 'Abu Qurqas', "Aghbalou n'Kerdous", 'Tayakou', 'Johnson City', 'Stjordalshalsen', 'Kot Mumin', 'Orbassano', 'Carius', 'Dikwella South', 'Gallipoli', 'Wschowa', 'Sundarnagar', 'Gustrow', 'Pindai', 'Mit Ghamr', 'Xinyaoshang', 'Torbeck', 'Marrakech', 'Xiushuicun', 'Valasa', 'Tuttlingen', 'Avesta', 'Highgate', 'Osian', 'Kanniyambram', 'Malinao', 'Xingsha', 'Oltenita', 'Capiata', 'Malanville', 'Hunters Creek', 'Balwa', 'Muli', 'Lotte', 'Hikkaduwa', 'Pelahiivka', 'Chilapa de Alvarez', 'Farafenni', 'Tanbaichicho', 'Saks', 'Calilabad', 'Salvaterra de Magos', 'Chittayankottai', 'Popondetta', 'Synelnykove', 'Flying Fish Cove', 'Gikongoro', 'Baraki', 'Karebilachi', 'El Hachimia', 'Highland Heights', 'Chilecito', 'Kawashiri', 'Scotch Plains', 'Adi Keyh', 'Cheung Chau', 'Therwil', 'Tiruvalanjuli', 'Bagrami', 'San Juan Lalana', 'Nodinge-Nol', 'Gonzalez', 'Moshi', 'Zhangjiajie', 'Bafilo', 'Presidente Prudente', 'Sassenberg', 'Kot Addu', 'Nicholasville', 'Magitang', 'Glauchau', 'Bendrahalli', 'Mislinja', 'Nysa', 'Tomaz pri Ormozu', 'Tassin-la-Demi-Lune', 'Schubelbach', 'Juban', 'Epinay-sur-Orge', 'West Bradford', 'Genk', 'Miyakonojo', 'Eastpointe', 'Quispamsis', 'Gadda Madiral', 'Nouadhibou', 'Neuss', 'Yanagawa', 'Kirano', 'Sheldon', 'Florange', 'Albert', 'Vilcun', 'Mannheim', 'Qahramon', 'Joaquim Tavora', 'Amravati', 'Small Heath', 'Ancon', 'San Juan Chamelco', 'Neunkirchen', 'Timargara', 'Poole', 'Tenkodogo', 'Jidigunta', 'Agoncillo', 'Diosd', 'Kondaparti', 'Darauli', 'San Giuliano Milanese', 'Farkhana', 'Upernavik', 'Isehara', 'Ginsheim-Gustavsburg', "Port Saint John's", 'Shekhupura', 'Malvinas Argentinas', 'Mirbat', 'Kishangarh', 'Ibicui', 'Tangalla', 'Napa', 'Tavares', 'Koro', 'Edasseri', 'Negotin', 'Barcs', 'Keshwari', 'Depew', 'Kottaya', 'Khair Khan', 'Toviklin', 'New Port Richey East', 'Tarrega', 'Mariel', 'Krabi', 'Rinconada de Malloa', 'Felida', 'Sidi Ifni', 'Tobre', 'Yorba Linda', 'Casper', 'Hartswater', 'Guanambi', 'Winthrop', 'Marcacao', 'Bridlington', 'Feicheng', 'Pinhalzinho', 'Dubai', 'Periyakulam', 'Tranent', 'Hammam Sousse', 'Ampasimbe', 'Santa Mariana', 'San Antonio de Ibarra', 'Charlton Kings', 'Tecoanapa', 'Kamwenge', 'Arai', 'Jocotepec', 'Kiziltepe', 'Hadali', 'Bahadurpur', 'Thoen', 'Nakhon Phanom', 'Nayoro', 'Baramati', 'Lamas', 'Krupanj', "Ning'an", 'Monfalcone', 'Hitachi-Naka', 'Lower Burrell', 'Formoso do Araguaia', 'Vadigenhalli', 'Bansbaria', 'Tupanatinga', 'Dome', 'Niles', 'Siquirres', 'Boddikurapadu', 'Bryant', 'Serebryansk', "'Ain el Hammam", 'Aldo Bonzi', 'Gajhara', 'Tokyo', 'Karakthal', 'Cabildo', 'Buon Trap', 'Liushui', 'Echternach', 'Vagur', 'Bicester', 'Rheden', 'Asaka', 'Ulladulla', 'Titara', 'Haya', 'Francisville', 'Robat Karim', 'Shidong', 'Bhadohi', 'Otsuki', 'Barnaon', 'Valavanur', 'Kyenjojo', 'Mara Rosa', 'Markgroningen', 'Planegg', 'Cajvana', 'Tatabanya', 'Andemaka', 'Nazilli', 'Leusden', 'Kanada', 'Dasol', 'Al Wakrah', 'Lucan', 'Jelilyuzi', 'Puerto Quito', 'Cran-Gevrier', 'Damous', 'Gunungsitoli', 'Minamikarasuyama', 'Sandton', 'San Antonio del Tequendama', 'Paiporta', 'Guying', 'Fontenay-le-Fleury', 'Denov', 'Bulambuli', 'Yulu', 'Cajamar', 'Brvenica', 'Ocozocoautla de Espinosa', 'Panapur Langa', 'Memmingen', 'Ban Non Sombun', 'Oegstgeest', 'Aigio', 'Bhanuvalli', 'Rankovce', 'Bagchini', 'Voitsberg', 'Ban Nong Kathao', 'Canby', 'Cosne sur Loire', 'Nilothi', 'Ma`arratmisrin', 'Lyndon', 'Gurgentepe', 'Adjahome', 'Andippatti', 'West Manheim', 'Brunsbuttel', 'Grave', 'Zapotlan de Juarez', 'Yakkabog`', 'Tan Phuoc Khanh', 'Tinley Park', 'Baligaon', 'Bell Ville', 'Gwagwalada', 'Pie de Pato', 'Mount Fletcher', 'Waris Aliganj', 'Queens', 'Tamana', 'Craibas', 'Caivano', 'Grants Pass', 'Citrus Hills', 'Elko', 'El Abiodh Sidi Cheikh', 'Pathardi', 'Acatlan', 'Westwood', 'Cibinong', 'Dayr `Atiyah', 'Ja`ar', 'Archdale', 'Seguin', 'Vif', 'San Vicente de Canete', 'Limbe', 'Calumet City', 'Ain el Hadid', 'Sint-Andries', 'Skhirate', 'Assi-Ben Okba', 'San Ildefonso', 'Saint-Raphael', 'Dire Dawa', 'Koropi', 'Murata', 'Mongu', 'Villa Maria', 'Taqah', 'I-n-Amguel', 'Moloacan', 'Milford Haven', 'Campo Alegre', 'Atlantic Beach', 'Kalikapur', 'Fuefuki', 'Centralina', 'Sarafand', 'Chuangjian', 'Kheralu', 'Sulecin', 'Kirchhain', 'Coffeyville', 'Uji', 'Westport', 'Compostela', 'Stratford-upon-Avon', 'Tougue', 'Neves', 'Satiro Dias', 'Numbrecht', 'Galle', 'Ilion', 'Dasarhalli', 'Flat Rock', 'Old Bridge', 'Barbacena', 'Essa', 'Peddakurapadu', 'Chervonopartyzansk', 'Perth East', 'Margosatubig', 'Bossangoa', 'Ngorkou', 'Frankenthal', 'Reguengos de Monsaraz', 'Roverbella', 'Annecy-le-Vieux', 'Woodmoor', 'Imizucho', 'Villanueva de la Serena', 'Kinalur', 'Depok', 'Ahrensburg', 'Kingman', 'Magu', 'Guiping', 'Kopavogur', 'Hunasamaranhalli', 'Tora', 'Malvar', 'Lentini', 'Pont-du-Chateau', 'Laghouat', 'Jima Abajo', 'Garbahaarrey', 'Samsikapuram', 'Vinanitelo', 'Nong Wua So', 'Jawad', 'Tirunageswaram', 'Koure', 'Villabate', 'Rezekne', 'Nenton', 'Canovanas', 'Los Banos', "Berre-l'Etang", 'Al Qunfudhah', 'Buenopolis', 'Le Plessis-Robinson', 'Bilar', 'San Antonio de Los Altos', 'Elblag', 'Masamba', 'Czeladz', 'Tmassah', 'San Vicente del Raspeig', 'Tejutla', 'Albano Laziale', 'Sibkund', 'Goio-Ere', 'Kanoya', 'Dahibhat Madhopur', 'Santa Maria Ajoloapan', 'Srebrenica', 'Bacoor', 'Novopavlovka', 'Marcory', 'Argos', 'Ivandrika', 'Iqaluit', 'Kondazhi', 'Kos', 'Newhaven', 'Bakwa', 'Mirante da Serra', 'Limbach-Oberfrohna', 'Asadabad', 'Damulog', 'Bishops Stortford', 'Jaguaribara', 'Vadasikarambattu', 'Kairaki', 'Koidu-Bulma', 'Alto-Cuilo', 'Mondoro', 'Buon Ho', 'Laibin', 'Prymorskyi', 'Berane', 'Wulongpu', 'Bernalillo', 'Koturkolkara', 'Bellerive-sur-Allier', 'Jambe', 'Kerwada', 'Kadoma', 'Antanamalaza', 'Yildiz', 'Kamrawan', "Kan'onjicho", 'Sekiyado', 'Tartarugalzinho', 'Kudra', 'Mauban', 'El Tambo', 'Taquarana', 'Winneba', 'Damghan', 'Balikumbat', 'Ulundi', 'Salo', 'Nyakosoba', 'Jeffersontown', 'Lievin', 'Santeramo in Colle', 'Casale Monferrato', 'Palma del Rio', 'Yizhu', 'Ankafina Tsarafidy', 'Nisarpur', 'Tadpatri', 'Peebles', 'Nawalgarh', 'Ulliyil', 'Paittur', 'Chhajli', 'Concepcion Batres', 'Nkpor', 'DuPont', 'Burgum', 'Kaposvar', 'Labangan', 'Santany', 'Appingedam', 'Barton upon Irwell', 'Bagli', 'Hatta', 'Leeds and the Thousand Islands', 'Catende', 'Friedeburg', 'Udaipura', 'Aranzazu', 'Umga', 'Vize', 'Coriano', 'Janjgir', 'Brwinow', 'Teays Valley', 'Ban Kaeng', 'Huebampo', 'Oadby', 'Hovsan', 'Torre-Pacheco', 'Kolluru', 'Araban', 'Ishiki', 'Daganzo de Arriba', 'Jagannadapuram', 'Quinhamel', 'Nsukka', 'Midwest City', 'Alcala', 'Payson', 'Kita Chauhattar', 'Sao Luis Gonzaga do Maranhao', 'Quezon', 'Mani', 'Khammam', 'Belo Tsiribihina', 'Carmaux', 'Idar', 'Denison', 'Lucera', 'Buda', 'Ankofa', 'San Miguel', 'Isaka', 'Netherton', 'Conil de la Frontera', 'Gorazde', 'Manullahpatti', 'Preah Vihear', 'Coello', 'Sopo', 'Valadares', 'Suruc', 'Ros Comain', 'Alto Hospicio', 'Bargas', "Anzola dell'Emilia", 'Eichenzell', 'Lucelia', 'Betsiaka', 'Sidi Bibi', 'Thale', 'South Stormont', 'Kaladgi', 'Alpen', 'Zumbagua', 'Encruzilhada do Sul', 'Megrine', 'Panorama', 'Chitrada', 'Adamantina', 'Harsum', 'Unguturu', 'San Juan Capistrano', 'Klasterec nad Ohri', 'Fernley', 'Aldenhoven', 'Pien', 'Cansancao', 'Kuje', 'Valenca do Piaui', 'Fraga', 'Carleton Place', 'Castle Bromwich', 'Leichi', 'Armacao dos Buzios', 'Valenton', 'Myawadi', 'Leandro N. Alem', 'Merlo', 'Lugazi', 'Foster City', 'Shurugwi', 'Labe', 'Ruwa', 'Inongo', 'Coto Brus', 'Tiruchengodu', 'Dharmasagaram', 'Ommen', 'Khajawa', 'Urandi', 'Fayroz Koh', 'Bajpe', 'Epalinges', 'Longreach', 'Omdurman', 'Campos Altos', 'East Lampeter', 'Podilsk', 'Bemaharivo', 'Al Ahmadi', 'Vallapuram', 'Sirvel', 'Gaurdah', 'Cottica', 'Izmail', 'Orangeburg', 'Crystal Beach', 'Toumoukro', 'Chittarkottal', 'Klipphausen', 'Donji Vakuf', 'Tazah Khurmatu', 'Laur', 'Raiyam', "Saint-Jean-d'Illac", 'Dubrovnik', 'Mikkabi', 'Masasi', 'Artvin', 'Maisaram', 'Ostwald', 'Wuzhou', 'San Bartolome Milpas Altas', 'Zhunan', 'Pine', 'Susner', 'Sibalom', 'Ormoc', 'Dimataling', 'Maroantsetra', 'Trevignano', 'La Presa', 'North Dumfries', 'Munguia', 'Mangalam', 'Kraainem', 'Bathgate', 'Russell', 'Oliveira do Bairro', 'Vettur', 'Basavana Bagevadi', 'Ub', 'Yepocapa', 'Spearfish', 'Stassfurt', 'Irvine', 'Bratislava', 'Coatbridge', 'Hudli', 'Beni Amrane', 'Mansourah', 'Addanki', 'Volnovakha', 'Lagoa Real', 'Salua', 'Talata-Vohimena', 'Kyotera', 'Rovato', 'Vilankulo', 'Nanterre', 'Timahdit', 'Blandford Forum', 'Bombardopolis', 'eManzimtoti', 'Garges-les-Gonesse', 'Araceli', 'Weslaco', 'Catembe', 'Ta Khmau', 'Zempoala', 'Terracina', 'Sint-Amandsberg', 'Monte Santo de Minas', 'Umm Salal `Ali', 'Mudbidri', 'Dores do Indaia', 'Osilnica', 'Eger', 'Chi Linh', 'El Omaria', 'Itapui', 'Ban Bueng', 'Balad', 'Blooming Grove', 'Kivertsi', 'Bacabal', 'Fujin', 'Combs-la-Ville', 'Aschheim', 'Kettering', 'Chinggil', 'Nangandao', 'Avalepalli', 'London', 'Aguachica', 'Leidschendam', 'Sannicandro Garganico', 'Cambrils', 'Tenenkou', 'Lincoln City', 'San Gennaro Vesuviano', 'Dumaran', 'Yuki', 'Llanes', 'Ksar Hellal', 'Clanton', 'Khomeyn', 'Petrovaradin', 'Whitecourt', 'San Sebastian Salitrillo', 'Steubenville', 'Santiago Atitlan', 'Cecil', 'Barnegat', 'Alto Garcas', 'Xinfeng', 'Innsbruck', 'Santa Margarita de Mombuy', 'Goya', 'Inisa', 'Temascalapa', 'Ouake', 'Ambararatabe', 'Las Delicias', 'Zawal', 'Longkoucun', "Fil'akovo", 'Concepcion del Uruguay', 'Seixal', 'Empedrado', 'Meram', 'Matagob', 'Boufatis', 'Juanjui', 'Butha-Buthe', 'Mutterstadt', 'Koulikoro', 'Narhat', 'Hateg', 'Americana', 'Copacabana', 'Lajkovac', 'Beasain', 'Miyoshi', 'Mooka', 'Zhuangwei', 'Dobeln', 'Brest', 'Hartlepool', 'Jajpur', 'Moncagua', 'Tarnow', 'Holt', 'Hancheng', 'Ciudad Guzman', 'Boudouaou el Bahri', 'Suhiya', 'Oteapan', 'Zhoujia', 'Hohhot', 'Bulle', 'Helensburgh', 'San Jose Villanueva', 'Lethem', 'Landen', 'Gonaives', 'Mahopac', 'Tuyen Quang', 'Shaowu', 'Celbridge', 'Artsyz', 'Heshan', 'Boutilimit', 'Bure', 'Jangid', 'Doberlug-Kirchhain', 'Kawara', 'Meldola', 'Xinglong', 'Ntungamo', 'Shubra al Khaymah', 'Eldama Ravine', 'Sukuta', 'Ain Tedeles', 'Bara', 'Barei', 'Caballococha', 'Locorotondo', 'Pine Creek', 'Mechanicsville', 'Baxdo', 'Mysliborz', 'Androtsy', 'Newcastle under Lyme', 'San Jose de Chiquitos', 'Forchheim', 'Sabana Iglesia', 'Saint-Max', 'Cordenons', 'Dent', 'Verdejante', 'Enkoping', 'Paradise Valley', 'Arnaud', 'Janapul', 'Sintra', 'Misserete', 'Lapinig', 'Singhbari', 'Librazhd', 'Amberg', 'Pires do Rio', 'Kenora', 'Satosho', 'Anosivelo', 'Coos Bay', 'Pathum Thani', 'Curchorem', 'Bergambacht', 'Dahbed', 'Haslingden', 'Shahin Dezh', 'Faridabad', 'Patera', 'Saint-Cyr-sur-Loire', 'Ilhota', 'Chianki', 'Garcia', 'Meine', 'Kalamazoo', 'Spitalfields', 'Maralal', 'Aruvapalam', 'Pedro Juan Caballero', 'Antanifotsy', 'Suhl', 'Hancha', 'Sansepolcro', 'Villa Martin Colchak', 'Ksar Chellala', 'Voznesensk', 'Requinoa', 'Vemalwada', 'Athieme', 'Neuburg', 'Tikamgarh', 'Mazidagi', 'Florence-Graham', 'Ikeja', 'Dinara', 'Ampanihy', 'Woodinville', 'El Palomar', 'Shafinagar', 'Jandrapeta', 'Finglas', 'Pikesville', 'Sidney', 'Tiffin', 'Belvedere Park', 'Bad Breisig', 'Afega', 'Canilla', 'Hisarcik', 'Hertford', 'Chichkah', 'Santa Rosa', 'Palampur', 'Sanmu', 'Apuiares', 'Courtenay', 'Aiuaba', 'Hidirbey', 'Chikuma', 'Hiddenhausen', 'Shikohabad', 'Chilliwack', 'Tagalft', 'Chelmno', 'Colfontaine', 'Upper Dublin', 'Ungheni', 'Estiva', 'Sveta Ana', 'Watsa', 'Gilbert', 'At Turrah', 'Payariq Shahri', 'Ibipeba', 'San Juan de los Lagos', 'Barcelona', 'Ambalamanasy II', 'Homnabad', 'Bad Freienwalde', 'Chandauli', 'Comitancillo', 'Bad Konig', "Sint-Job-in-'t-Goor", 'Templin', "D'Iberville", 'Adelanto', 'Tepeapulco', 'Moreni', 'Blois', 'Moberly', 'Encruzilhada', 'Tokol', 'Kaneohe', 'Macomia', 'Tlalpujahua de Rayon', 'Kaglipur', 'Baiceng', 'Muisne', 'Fareham', 'Polistena', 'Razampeta', 'Ipojuca', 'Sandrakatsy', 'Dagiapara', 'Madhurapur', 'Zhuangyuan', 'Shkoder', 'Hillsboro', 'Vidin', 'Quatipuru', 'Dubrajpur', 'Capenda Camulemba', 'Jaisinghpur', 'Sonneberg', 'Gonesse', 'Toging am Inn', 'Laranjal Paulista', 'Tucuru', 'Arcueil', 'Francisco I. Madero', 'Tarumizu', 'Estrela de Alagoas', 'Charleston', 'Cowes', 'Sessa Aurunca', 'Fougeres', "Gricignano d'Aversa", 'New Germany', 'Paraparaumu', 'Pleasant Prairie', 'Alfena', 'Payakumbuh', 'Ramree', 'Maromme', 'Kovvur', 'Derince', 'Caledon', 'Jieshang', 'Navan', 'Kumbo', 'Santa Rita do Passa Quatro', 'Ridgefield', 'Kllokot', 'Niramaruthur', 'Despujols', 'Narammala', 'Oroqen Zizhiqi', 'San Luis del Palmar', 'Mallawi', 'Polzela', 'Troy', 'El Refugio', 'Marau', 'Thaton', 'Gerede', 'Lakhanapuram', 'Chinchina', 'Duenas', 'Devizes', 'Amarillo', 'Dakor', 'Brotas', 'North Auburn', 'Soamanonga', 'Villa Alemana', 'Sathamba', 'Kanazawa', 'Namayumba', 'Ngozi', 'Mavalli', 'Maxhutte-Haidhof', 'Ratnapuram', 'Midoun', 'Gostyn', 'Olopa', 'Fugangcun', 'Perungala', 'Mount Washington', 'Tekirdag', 'Cerro Azul', 'Lahra Muhabbat', 'Ontario', 'Santa Maria Chiquimula', 'North Hempstead', 'Arma', 'Awash', 'Felanitx', 'Olmaliq', 'Torremolinos', 'Gbawe', 'Sivrihisar', 'Preussisch Oldendorf', 'Herrin', 'Vellalur', 'Doboj', 'Guaramirim', 'Bafoulabe', 'Nolensville', 'Acomb', 'Balaungi', 'San Juan Ixcaquixtla', 'Cadereyta', 'Tarra', 'Ponta do Sol', 'Ikaruga', 'Kaboua', 'Inagawa', 'Jacaltenango', 'Nankana Sahib', 'Ciudad Melchor de Mencos', 'Braine-le-Chateau', 'Gerzat', 'Abaiara', 'Maricopa', 'Saint Neots', 'Sao Goncalo do Sapucai', 'Myebon', 'Markapur', 'Nova Zagora', 'Gombong', 'Gambela', 'Magsingal', 'Brezovica', 'Greifswald', 'Saint-Nazaire', 'Poinciana', 'Hombori', 'Tsivory', 'Weiz', 'Jaguaripe', 'Kibaya', 'Wallasey', 'Nangal Township', 'Chongzuo', 'Takamori', 'Pajo', 'Daboh', 'Jisr ez Zarqa', 'Tizi Rached', 'Nangal Chaudhri', 'North Huntingdon', 'Balilihan', 'San Josecito', 'Steinfeld', 'Velika Gorica', 'New Cassel', 'Kyaukme', 'Kafr Batna', 'Tepetzintla', 'Takaharu', 'Valdepenas', 'Old Harbour', 'Siruguppa', 'Arasur', 'Grajewo', 'Aci Catena', 'Itau de Minas', 'Zundert', 'Djakotome', 'Toulou', 'Sahar', 'Opfikon', 'Damanhur', 'Yengema', 'Subic', 'Kolambugan', 'Taougrite', 'Eriyodu', 'Kiskunmajsa', 'Germasogeia', 'Peresecina', 'Yoshikawa', "Coeur d'Alene", 'Mobaye', 'Borda da Mata', 'Vecses', 'Kui Buri', 'Newport Beach', 'Palo del Colle', 'Ripoll', 'Galsi', 'Aldama', 'La Grita', 'Paese', 'Sambir', 'Chapelle', 'Canteras', 'Vega Alta', 'Bhachau', 'Nanfengcun', 'Nelson', 'Chikuzen', 'Burrillville', 'Nambour', 'Sanganakallu', 'Punch', 'Borehamwood', 'Bagan Si Api-api', 'Tiogollo', 'Pratteln', 'Coleford', 'Huajing', 'Amoucha', 'Kannivadi', 'Winfield', 'Kimilili', 'Bugganipalle', 'Mont-Dore', 'Chaukhata', 'Long Lama', 'Taysan', 'Caico', 'Sakardih', 'Owando', 'Foleshill', 'Katlehong', 'Falkoping', 'Vellithiruthi', 'Pancas', 'Rojales', 'Juazeiro', 'Sluis', 'Matala', 'Banikoara', 'Ilicakoy', 'Montalban', 'La Algaba', 'Makeni', 'Mangrol', 'Shirdi', 'Kamphaeng Phet', 'Prelouc', 'Podujeve', 'Redencao', 'Isaka-Ivondro', 'Pollensa', 'Eemnes', 'Kogon Shahri', 'Asarhi', 'Bang Khla', 'San Andres de Sotavento', 'Shertallai', 'Fenglu', 'El Doncello', 'Prachatice', 'Biel/Bienne', 'Tsabit', 'Mirante do Paranapanema', 'Kotoura', 'Port St. Lucie', 'Minamichita', 'Arapiraca', 'Balcarce', 'Scionzier', 'Kharv-e Sofla', 'Redan', 'Rolandia', 'Turrialba', 'Tsaravary', 'Sayda', 'Banepa', 'Soller', 'Kaith', 'Mahdasht', 'Sandhausen', 'Redditch', 'Bemiss', 'Callaway', 'Rioblanco', 'Maesteg', 'Saronno', 'Fulgazi', 'Revuca', 'Lysychansk', 'Mandan', 'Wangjiazhai', 'Capela de Santana', 'Toukoto', 'Vwawa', 'Dublin', 'Newark upon Trent', 'Zacoalco de Torres', 'Pustomyty', 'Sangi', 'Zymohiria', 'Clinton', 'Nenjiang', 'Nova Ubirata', 'Cosautlan', 'Kibuye', 'Darfield', 'Cornaredo', 'Ciudad Choluteca', 'Tvoroyri', 'Giussano', 'Manambur', 'Benfreha', 'Medarametla', 'Meridianville', 'Al Qa`idah', 'Treinta y Tres', 'Nagar Nahusa', 'Kedainiai', 'Kursenai', 'Shunan', 'Dzolokpuita', 'Lyon', 'Kleve', 'Sadon', 'Fort Pierce', 'Sanski Most', 'Zedelgem', 'Paracho de Verduzco', 'Hattem', 'Cantel', 'Miary-Taheza', 'Maintirano', 'Denham Springs', 'Ap Khanh Hoa', 'Kaler', 'Le Pradet', 'Uxbridge', 'Cugnaux', 'Altdorf', 'Oberhausen', 'Caacupe', 'Moyo', 'Muchun', 'Roquevaire', 'Tubod', 'Bacolor', 'Nijgaon Paranpur', 'Sao Pedro da Aldeia', 'Adrian', 'Remedios de Escalada', 'Pehowa', 'Orlando', "Mosciano Sant'Angelo", 'Kasempa', 'Tomigusuku', 'Naic', 'Blanes', 'Iguacu', 'Ben Arous', 'Ben', 'Westborough', 'Parkent', 'Nokaneng', 'Goianesia', 'Raniwara Kalan', 'Lambersart', 'Handewitt', 'Beni Haoua', 'Pargas', 'Temascalcingo', 'Abano Terme', 'Petmanhalli', 'Karis', 'Dezhou', 'Guigue', 'Melitopol', 'Tajerouine', 'Malaimarpuram', 'Asti', 'Hanimciftligi', 'Isua', 'Enkesen', 'Birtouta', 'Relizane', 'Igrapiuna', 'Belle Chasse', 'Calubian', 'Ararat', 'Caucete', 'Hybla Valley', 'Wipperfurth', 'Bom Jesus da Lapa', 'North Salt Lake', 'Dayan', 'Zouar', 'Parang', 'Mandi Bahauddin', 'Rovereto', 'Jinta', 'Araua', 'Eugenopolis', 'Chota Mollakhali', 'Nabha', 'Mit Nama', 'Baia Mare', 'Katridih', 'Iwamuro-onsen', 'Sao Miguel do Iguacu', 'Suesca', 'Al Mudaybi', 'Nauen', 'Nansio', 'Nada', 'Camaligan', 'Catandica', 'Ostrov', 'Wargal', 'Simarbani', 'Santa Venera', 'Bad Gandersheim', 'Kumbadaje', 'Samrong', 'Higashiomi', 'Chandragiri', 'General Enrique Mosconi', 'Seregno', 'La Florida', 'Envigado', 'Landerneau', 'Sao Pedro da Agua Branca', 'Orion', 'Mudgere', 'Kentwood', 'Hirono', 'Kongen', 'Swamimalai', 'Tiruchendur', 'Piploda', 'Cajamarca', 'Upata', 'Lobau', 'Les Pavillons-sous-Bois', 'Karungu', 'Araruama', 'Fraser', 'Queven', 'Kibingo', 'Senden', 'Zarbdor Shaharchasi', 'Goner', 'West Athens', 'Butembo', 'Vadakku Valliyur', 'Huasco', 'Brandsen', 'Halikko', 'Sabinanigo', 'Niagara Falls', 'Sarikei', 'Okegawa', 'Reichenbach/Vogtland', 'Triuggio', 'Villa Hayes', 'Tiruttangal', 'Hasuda', 'Marpingen', 'Toms River', 'Belalcazar', 'Gembloux', 'Shuizhai', 'Ihumwa', 'Zacualpan de Amilpas', 'Doddappanayakkanur', 'Covina', 'Brikama Ba', 'Yerkoy', 'Essenbach', 'Xiushui', 'Obuase', 'Riga', 'Pokhara', 'Somoniyon', 'Itogon', 'Zelzate', 'Valence', 'Bourges', 'Samthar', 'Elavalli', 'Ibitinga', 'Chembrassheri', 'Pilao Arcado', 'Carignan', 'Sipilou', 'Colonia Nicolich', 'Valbom', 'Naogaon', 'Decan', 'Nellore', 'Makilala', 'Borgne', 'Santa Clarita', 'Vadso', 'Nandura Buzurg', 'Polavaram', 'Bike', 'Lapinlahti', 'Cambanugoy', 'Umri', 'Kribi', 'Dogachi', 'Kara-Kulja', 'Karibib', 'La Salvetat-Saint-Gilles', 'Sidi Ben Adda', 'Tinpiple', 'Santa Perpetua de Moguda', 'Dushanove', 'Plewiska', 'Filderstadt', 'Yerres', 'Verkhivtseve', 'Lop Buri', 'Saldana', 'Gujo', 'Dodge City', 'Belari', 'Minano', 'Hamira', 'Keles', 'Bichena', 'Lagoa da Prata', 'Ormskirk', 'Heide', 'Borj Hammoud', 'Zhuzhou', 'Heilbronn', 'Geertruidenberg', 'Salumbar', 'Kings Park', 'Neralakaje', 'Ciudad Apodaca', 'Amircan', 'Samalkha', 'Hvannasund', 'Lier', 'Skadovsk', 'Nong Khai', 'Soderhamn', 'Aghbala', 'Ban Wisit', 'Ahram', 'Dianopolis', 'North College Hill', 'Barrechid', 'Elandakuttai', 'Yatagan', 'Macae', 'Fukayacho', 'Camaqua', 'Ypacarai', 'Santa Paula', 'Manjo', 'Sarmada', 'La Seyne-sur-Mer', 'Heze', 'Auch', 'Nuvem', 'Lowicz', 'Erdokertes', 'Kothia', 'Bassenge', 'Bagh-e Malek', 'New Port Richey', 'Soisy-sous-Montmorency', 'Toluprpatti', 'Niimi', 'Huaniu', 'Marratxi', 'Schwerte', 'Kontich', 'Jaynagar', 'Saint-Fargeau', 'Tamm', 'Ain Sefra', 'Glassboro', 'Pitoa', 'Hohenhameln', 'Babati', 'Madnur', 'Chicopee', 'Tanza', 'Burke Centre', 'Guntur', 'Arvayheer', 'Ban Pa Hung', 'Santa Margarita', 'Manihari', 'Lamorlaye', 'Mengen', 'Pullach im Isartal', 'Orleaes', 'Patuakhali', 'Kharan', 'Berriozar', 'Arad', 'Bra', 'Bucimas', 'Lindas', 'Datca', 'Vlotho', 'Zabalj', 'San Jose del Cabo', 'Aflao', 'Joliette', 'Mamidipalli', 'Ganguvarpatti', 'Sitamarhi', 'Hockessin', 'Piamonte', 'Arroyohondo', 'Alapli', 'Conflans-Sainte-Honorine', 'San Lawrenz', 'Cangas de Narcea', 'Apaseo el Alto', 'Castelfranco Emilia', 'Boucan Carre', 'Mongat', 'Sutton', 'Koungheul', 'Capodrise', 'Manhumirim', 'Mohnyin', 'San Francisco de Macoris', 'Veraval', 'Sint-Martens-Lennik', 'Ban Bo Haeo', 'Colinas', 'Ajaigarh', 'Arroyito', 'Gravelines', 'Little Lever', 'Sayama', 'Vitoria do Mearim', 'Ariana', 'Thatcham', 'Chateaudun', 'Laranja da Terra', 'Sidi Azzouz', 'Lokoja', 'Chamgardan', 'Worcester Park', 'Bagnolo Mella', 'Dewas', 'Safo', 'Villanueva y Geltru', 'Shangxiao', 'Ayabe', 'Neo Psychiko', 'Appenzell', 'Pototan', 'Carlos Casares', 'Vandoeuvre-les-Nancy', 'Blagoevgrad', 'Amirli', 'Jalalpur Jattan', 'Bilis Qooqaani', 'Hemsbach', 'Woodway', 'Stow', 'Fukuyama', 'Geilenkirchen', 'Villa Isabela', 'Mosjoen', 'High Wycombe', 'Yumbel', 'Ampasimazava', 'Mulbagal', 'Payao', 'New Garden', 'Newburn', "Ak'ordat", 'Northborough', 'Pueblo Bello', 'Prijepolje', "Espigao D'Oeste", 'Horstel', 'Seropedica', 'Terrytown', 'Kinnelon', 'Menggala', 'Atimonan', 'Grand Baie', 'Dodoni', 'Engenheiro Coelho', 'Santa Croce Camerina', 'Mambajao', 'Fateh Nangal', 'Karimnagar', 'Oliveira dos Brejinhos', 'Neykkarappatti', 'Hooglede', 'Malacatan', 'Gbadolite', 'Bhai Rupa', 'Khallikot', 'Uiwang', 'Kamianka', 'Omuta', 'Okagaki', 'Stepanavan', 'Kisujszallas', 'Digar', 'Glenwood', 'Siqba', 'Olsztyn', 'Zargar', 'Leamington', 'Seymour', 'Ban Mae Ngon Khilek', 'Wyomissing', 'Anyang', 'Paranacity', 'Paderno Dugnano', 'Haselunne', 'Kampong Thum', 'Paldorak', 'Brandermill', 'Wunsiedel', 'Ribeirao do Largo', 'Shirud', 'Melong', 'Romeoville', 'Palma Soriano', 'Filadelfia', 'Taiyuan', 'Medicina', 'Kara', 'Perunturuttu', "Sant'Eufemia Lamezia", 'Villa Canales', 'Boankra', 'Dildarnagar', 'Arraijan', 'Sonzacate', 'Manghit', 'La Valette-du-Var', 'Loon op Zand', 'Mutis', 'Ewa Gentry', 'Congonhal', 'Tirua', 'Firuzoba', 'Mushin', 'Okazaki', 'Thornton', 'Torbali', 'Rumburk', 'Sgamna', 'Bel-Air', 'Cassia', 'Oliveira de Azemeis', 'Sangrur', 'Thongwa', 'Cunco', 'Bonn', 'Kalanadu', 'Magalia', 'Ficarazzi', 'Bemarivo', 'Hadim', 'Forster', 'Mahis', 'Axixa', 'Putignano', 'Claye-Souilly', 'Jaguariuna', 'Kilifi', 'Santo Cristo', 'Gandlapenta', 'Langenhagen', 'Orillia', 'Tshilenge', 'Donggou', 'West Lincoln', 'Victoria', 'Hoh Ereg', 'Queensbury', 'Snovsk', 'Dabrowa Gornicza', 'Rosenberg', 'Nayanakulam', 'Kreminna', 'Bissora', 'Brezno', 'Xireg', 'Wittlich', 'Kanyana', 'Ohrdruf', 'Libagon', 'Fot', 'Ban Ratchakrut', 'Xinnongcun', 'Sotik', 'Hindarx', 'Engelskirchen', 'Marcos', 'San Miguel Acatan', 'Mariano Escobedo', 'Capdepera', 'Camarate', 'Corinth', 'Mejicanos', 'Sirohi', 'Takhli', 'Hodonin', 'Dabat', 'Humansdorp', 'Doncaster', 'Ballitoville', 'Combita', 'Beilngries', 'Soanierana Ivongo', 'Mella', 'Poti', 'Dongola', 'Deptford', 'Azaourisse', 'Itabirinha de Mantena', 'Saluzzo', 'Dumbravita', 'Cacongo', 'Mechelen', 'Sablan', 'Doaba', 'Borre', 'Benghazi', 'Vasilevo', 'Nyuzen', 'Thames Centre', 'Chavara Gramam', 'Nivelles', 'Fairmont', 'Villa Luzuriaga', 'Carletonville', 'Varash', 'Montemurlo', 'Mukocho', 'Sa Kaeo', 'Gonegandla', 'Bebandem', 'Kulp', 'Plainsboro', 'Pedro Leopoldo', 'Mokolo', 'Mesolongi', 'Formosa do Rio Preto', 'Vellmar', 'Yoko', 'Sax', 'Sirvar', 'Boadilla del Monte', 'Ziama Mansouria', 'Brasov', 'Remedios', 'Rita', 'Thane', 'Fukusaki', 'Pontypridd', 'Beysehir', 'Uchquduq Shahri', 'Feilding', 'Silves', 'Galvan', 'Great Linford', 'Zimna Voda', 'Hirado', 'Edinburgh of the Seven Seas', 'Vellodu', 'Prayagraj', 'Clermont-Ferrand', 'La Crosse', 'San Cristobal Verapaz', 'Apollo Beach', 'Cerete', 'Leskovac', 'Poltava', 'Anan', 'Al Fujayrah', 'Leramatang', 'Guastatoya', 'Fort Wellington', 'Erumad', 'Genet', 'Jondor Shaharchasi', 'Pudong', 'Dip', 'Bystrc', 'Gakuch', 'Napoleon', 'Mathigeri', 'Hathapur', 'Kinh Mon', 'Eidsberg', 'Quincy-sous-Senart', 'Yardimli', 'San Juan Cancuc', 'Kolbuszowa', 'Conegliano', 'Eski Ikan', 'Hildburghausen', 'Tirubhuvane', 'Nikaido-kaminoshocho', 'Simbahan', 'Havixbeck', 'Para', 'Blomberg', 'Tionk Essil', 'Clearview', 'Imassogo', 'Whittlesey', 'Panchgani', 'Gumani', 'Kaikkudi', 'Amatepec', 'Klotze', 'Kohtla-Jarve', 'Vilanova del Cami', 'Belison', 'Kakinada', 'Monte Alto', 'Walldorf', 'Alangalang', 'Carson City', 'Moises Padilla', 'Ad Dir`iyah', 'Hlatikulu', 'Vigna di Valle', 'Mosta', 'Eastern Goleta Valley', 'Ballenger Creek', 'Tsukubamirai', 'Metu', 'Modachchur', 'Sukabumi', 'San Nicolas de los Arroyos', 'Umingan', 'Tinnevelly', 'San Juan Opico', 'San Vicente de Chucuri', 'Jacunda', 'Kaura Namoda', 'Lodi', 'Nakhon Sawan', 'Dunwoody', 'Katahra', 'Zhonghechang', 'Troyes', 'Goalundo Ghat', 'Saadatpur Aguani', 'Ji-Parana', 'Baroy', 'Scoresbysund', 'Santurce-Antiguo', 'Gagny', 'La Libertad', 'Beypore', 'Puerto Cumarebo', 'Rasol', 'Kamalia', 'Singhana', 'Vancouver', 'Angouleme', 'Cacapava', 'Laurinburg', 'Ilhavo', 'Rangamati', 'Leogane', 'Ilkeston', 'Luganville', 'Somerville', 'Biot', 'Nanyang', 'Sidi Yahya Zaer', 'Saltangara', 'Ban Doi Suthep', 'San Lorenzo de Esmeraldas', 'Srinagar', 'Alberton', 'Khenchela', 'Belcista', 'Chiramanangad', 'Ordu', 'Channubanda', 'Niandjila', 'Tellicherry', 'Steenokkerzeel', 'Conceicao do Rio Verde', 'Kusapin', 'Sanjat', 'Nanxicun', 'Cedros', 'Soanenga', 'La Orotava', 'Pasvalys', 'Verdal', 'Romans-sur-Isere', 'Katangi', 'Zhob', 'Frosinone', 'Plan-de-Cuques', 'Bree', 'Sveti Nikole', 'Ciudad Tecun Uman', 'Helsinge', 'Velez', 'Balcova', 'Beni Yakhlef', 'Arumuganeri', 'Condado', 'Xiangkhoang', 'Parsons', 'Rouissat', 'Cetraro', 'Ozieri', "Say'un", 'San Luis Jilotepeque', 'Vynohradiv', 'Nampula', 'Kunsan', 'Hounde', 'Robertson', 'Oarai', 'Sigus', 'Golborne', 'Richmond Heights', 'Gateshead', 'Zhenbeibu', 'Chesham', 'Canals', 'Bakau', 'Belas', 'Hurricane', 'Oyo', 'Ciudad Piar', 'Palmerston', 'Chik Ballapur', 'Bressuire', 'Mountain Park', 'Pantelimon', 'Gooik', 'Flitwick', 'Kulattur', 'Plumstead', 'Verzej', 'Kapasiawan', 'Olds', 'Nidgundi', 'Santo Antonio', 'Dimitrovgrad', 'Oued Zenati', 'Kahramanmaras', 'Chengtangcun', 'Gonikoppal', 'Skidaway Island', 'Moriguchi', 'Devonport', 'Samamea', 'Tsimafana', 'Lakeside', 'San Matias', 'Visconde do Rio Branco', 'Rivarolo Canavese', 'Normal', 'Bahharet Oulad Ayyad', 'Hurzuf', 'Allendale', 'Saint-Charles-Borromee', 'Netivot', 'Tunasan', 'Mazagao', 'Czarnkow', 'Ubatuba', 'Baco', 'Aralik', 'Rosamond', 'Masindi', 'Vargem da Roca', 'Mbeya', 'Byram', 'Court-Saint-Etienne', 'Karath', 'Sidcup', 'Anage', 'Midway', 'Mountain Ash', 'Agouna', 'Iakora', 'Adendorf', 'Warni', 'Nakhon Nayok', 'G`ozg`on', 'Achacachi', 'Soamanova', 'Sao Mamede de Infesta', 'Brumadinho', 'Kosamba', 'Sabuncu', 'Alicante', 'Youganning', 'Neviges', 'Jaro', 'Juangriego', 'Meshkin Dasht', 'Veinticinco de Mayo', 'Buyan', 'Khirbat Ghazalah', 'Dabola', 'Bundehra', 'Bierbeek', 'Gherla', 'Tammampatti', 'Avranches', 'Dubbo', 'Southport', 'Ban Om Noi', 'Koumra', 'Bridgeview', 'Madipakkam', 'Fiche', 'Gura Humorului', 'Satsumasendai', 'Nautan Dube', 'Ngaputaw', 'Erlin', 'Segorbe', 'Khandela', 'Bikrampur Bande', 'Diavata', 'Taurage', 'Tula de Allende', 'Minato', 'Tumba', "Trostyanets'", 'La Matanza de Acentejo', 'Sathmalpur', 'Coycoyan de las Flores', 'Sierra Madre', 'Masjed Soleyman', 'Yokoshibahikari', 'Trezzano sul Naviglio', 'Leon', 'Xinpu', 'Karacabey', 'Graaff-Reinet', 'Maputo', 'Bargoed', 'Ndali', 'Vettikkavala', 'Bat Yam', 'Boxmeer', 'Anao', 'Riccione Marina', 'Marilao', "Saint-Martin-d'Heres", 'Badamdar', 'Shaoguan', 'Asker', 'Benedito Novo', 'Az Zubayr', 'Nemyriv', 'Madanpur', 'Hakha', 'Bumba', 'Fushe Kosove', 'Shahrisabz', 'Clondalkin', 'Delvada', 'Waldwick', 'Pemagatshel', 'Tubingen', 'Ejido', 'Awankh', 'Shyorongi', 'Houghton le Spring', 'Albert Lea', 'Lazdijai', 'Sanando', 'Galugah', 'Nagcarlan', 'Huong Thuy', 'Nettuno', 'Baalbek', 'Kunnamkulam', 'Oiso', 'Barra do Sul', 'Bishunpur Hakimabad', 'North Adams', 'Shamsa', 'Madhuban', 'Tirumala', 'Ganthier', 'Guben', 'Lakamane', 'Preakness', 'Caraguatatuba', 'Ferrol', 'Partibanur', 'Venkatadripalem', 'Aquidaba', 'Sedico', 'Baixo Guandu', 'Navadwip', 'Zgharta', 'Morro Bay', 'Miramichi', 'Jose Batlle y Ordonez', 'Zuwarah', 'Espinal', 'Brcko', 'Cham', 'Kika', 'Helixi', 'Pillaro', 'Lam Tin', 'Trzebiatow', 'Nidda', 'Paulo Lopes', "Santa Barbara d'Oeste", 'San Pedro de Ycuamandiyu', 'Wenchang', 'Hovd', 'Scunthorpe', 'Maspalomas', 'Oskemen', 'Raibhir', 'Palmview', 'Herrsching am Ammersee', 'Frederikssund', 'Yachimata', 'Rosario de la Frontera', 'Sinimbu', 'Valdagno', 'Roseto degli Abruzzi', 'Sidi Allal Tazi', 'Gantt', 'Pulimel', 'Ambatomainty', 'Kumaripur', 'Haldwani', 'Zwiesel', 'Nanqiaotou', 'Jovellar', 'Mizhou', 'Asafabad', 'Kielczow', 'Ciying', 'Erice', "Palmeira d'Oeste", 'Comiso', 'Colac', 'St. Peters', 'Gutersloh', 'Chililabombwe', 'Joure', 'Axixa do Tocantins', 'Celeken', 'Khvansar', 'Shepperton', 'Baghdad', 'Darwin', 'Pasaco', 'Diamond Harbour', 'Tiruvallur', 'Hackettstown', 'Santo Antonio da Platina', 'General Eugenio A. Garay', 'St. Helens', 'Hollister', 'Patra', 'Casalmaggiore', 'Villarrubia de los Ojos', 'Brejo da Madre de Deus', 'Attapu', 'Bambous', 'Neuenhof', 'Massape', 'Hurlingham', 'Longxing', 'Xinjun', 'Yaofeng', 'Ipubi', 'Sarikaya', 'Pambadi', 'Qinggang', 'Pirai do Norte', 'Tha Mai', 'Scranton', 'Dang`ara', 'Murliganj', 'Villa Sola de Vega', 'Diedorf', 'Panchi', 'Pagadian', 'Unna', 'Dongyuancun', 'Joacaba', 'San Andres de Giles', 'Niigata', 'Bihat', 'Torqabeh', 'Rio do Fogo', 'Yogyakarta', 'Candido Sales', 'Barangka', 'Paramati', 'Imito', 'Sugar Land', 'Kyaliwajjala', 'Silappadi', 'Golfito', 'Kilcock', 'Moinesti', 'Gujrat', 'Keevallur', 'Culasi', 'Inanantonana', 'Sidi Ghiles', 'Cacolo', 'Nierstein', 'Guia Lopes da Laguna', 'Takieta', 'Lumaco', 'Gatteo', 'Weehawken', 'Logan', 'Olimpia', 'Sidi Zouine', 'Pardanjan', 'Celle', 'Santa Brigida', 'Kifisia', 'Leimen', 'Denizciler', 'Berezhany', 'Rangkasbitung', 'Pagidyala', 'Infanta', 'Hackensack', 'Nakhon Pathom', 'Tamsaout', 'Towson', 'San Teodoro', 'Mahmuda', 'Port-Gentil', 'Federal Way', 'Kondapalle', 'Missao Velha', 'Castres', "Ash Shuhada'", 'Almusafes', 'Limbiate', 'Jiguani', 'Amherstburg', 'Ryde', 'Nazare Paulista', 'Lqoliaa', 'Balua', 'Pathrajolhania', 'Ain Karma', 'Ekangar Sarai', 'Dunhua', 'Cherupulassheri', 'Vutukuru', 'Cesario Lange', 'Coevorden', 'Xiayang', 'Converse', 'Shivpuri', 'Lido di Iesolo', 'Capao do Leao', 'Yangliuqing', 'Kanata', 'Vientiane', 'Darhan', 'Pola', 'Bardai', 'Taskopru', 'Marcq-en-Baroeul', 'Chestnuthill', 'Tonse West', 'Ibirapitanga', 'Carmo de Minas', 'Nirala', 'Gardner', 'Chorozinho', 'Novo Selo', 'Koilkuntla', 'Ixmiquilpan', 'Pinole', 'Kirkby', 'Rokytne', 'Haivoron', 'Charana', 'Shreveport', 'Dunkerque', 'Businga', 'El Salvador', 'Teustepe', 'Begowal', 'Al `Ayn', 'Xiaguanying', 'Mhow', 'Vittorio Veneto', 'Savignano sul Rubicone', 'Bitola', 'Chomutov', 'Natividad', 'Pont-a-Mousson', 'Ballincollig', 'Ballymoney', 'Peixe', 'As Saff', 'San Antonio de Areco', 'Ibi', 'Hathwan', 'Esperance', 'Wangjiabian', 'Pokrovsk', 'Davao', 'Anniston', 'Rodental', 'Maple Ridge', 'Esneux', 'Ahwa', 'Monticello Conte Otto', 'Royal Leamington Spa', 'Carei', 'Real', 'Burjasot', 'Pingdingshan', 'Wumayingcun', 'Asane', 'Souto Soares', 'Arvada', 'Palu', 'Narasapur', 'Ivry-sur-Seine', "Saint-Ouen-l'Aumone", 'High Blantyre', 'Draganesti-Olt', 'Pattoki', 'Cologno Monzese', "Ville-d'Avray", 'Dornava', 'Ambatomanoina', 'Qinbaling', 'So', 'Villanchirai', 'Emet', 'Agatogba', 'Saint-Laurent-du-Var', 'Airway Heights', 'Brasil Novo', 'Fangting', 'Mutukuru', 'Erlensee', 'Mangur', 'Olivenca', "L'Isle-Jourdain", 'Kodmial', 'Manresa', 'Antsahadinta', 'Fort-Shevchenko', 'Eisenhuttenstadt', 'Nicastro', 'Widekum', 'Delmenhorst', 'San Gwann', 'Soalala', 'Soweto', 'Pachor', 'Naugachhia', 'Mount Isa', 'Afmadow', 'Suzukawa', 'Radcliffe', 'Boksburg', 'Hipperholme', 'Harim', 'Chinnampalaiyam', 'Tilari', 'Giporlos', 'Perleberg', 'Bankya', 'Capua', 'Lilio', 'Qinzhou', 'Aratuipe', 'Itaborai', 'Oldebroek', 'Tibbar', 'Douar Ain Chkef', 'Nueva Tolten', 'Tetela de Ocampo', 'Denby Dale', 'Viareggio', 'Gangadhar', 'Durgapur', 'Arcahaie', 'Kupiansk', 'Achhnera', 'Cherupazhasshi', 'Benalla', 'Labhgaon', 'Bou Fekrane', 'Chahbounia', 'Middlebury', 'Wallerfangen', 'Fribourg', 'Chilly-Mazarin', 'Tibu', 'Wachtersbach', 'Valparaiso', 'Vettavalam', 'Zamania', 'Farmers Branch', 'Neqab', 'Ayinikkad', 'Messkirch', 'Nagamangala', 'Joinvile', "M'diq", 'Schwaigern', 'Ban Bang Phun', 'Xudat', 'Silay', 'Thikriwala', 'Lake Wylie', 'Doukombo', 'Tual', 'Phu Quoc', 'Huludao', 'Jabonga', 'Uetze', 'San Mauro Pascoli', 'Sao Desiderio', 'Brignais', 'Rancho Cordova', 'Pelezi', 'Sangota', 'Ikhlaspur', 'Guaratuba', 'Mibu', 'Arris', 'Lysander', 'Kameda-honcho', 'Mzuzu', 'Shibukawa', 'Dongyuya', 'Allahpur', 'Harrodsburg', 'Brig-Glis', 'Villa San Giovanni', 'Zhangaozen', 'Alabang', 'Saint-Jacques-de-la-Lande', 'Rampur Rajwa', 'Te Anau', 'Ashwaraopeta', 'Fridley', 'Hukou', 'Yerere', 'Itajuipe', 'Toda Rai Singh', 'Oconomowoc', 'Hudiksvall', 'Ploiesti', 'Camilo Ponce Enriquez', 'Aheqi', 'Povoa de Santa Iria', 'Salaspils', 'Marti', 'Heswall', 'Barcelos', 'Axim', 'Datoda', 'Kandra', 'Montreuil', 'Lakhnadon', 'Tarnok', 'Brades', 'Sabana Yegua', 'Ed Daein', 'Mchinji', 'Jaral del Progreso', 'Bouskoura', 'Gradignan', 'Norresundby', 'Bairagnia', 'Chai Prakan', 'Cocos', 'Mwingi', 'Lucao', 'Matauna', 'Pihani', 'Taishan', 'Mocamedes', 'Didieni', 'Ankiabe-Salohy', 'Dammapeta', "La'tamna", 'Upi', 'Annakunnu', 'Iizuka', 'Pattani', 'Qiqihar', 'Maharlika Village', 'Harborne', 'Chamusca', 'Nanshuicun', 'Milledgeville', 'Seven Pagodas', 'Semirom', 'Candiba', 'Antofagasta', 'Tayug', 'Konobougou', 'Profesor Salvador Mazza', 'Perry Heights', 'Charouine', 'Unterschleissheim', 'Kyzyl-Kyya', 'Hadjout', 'Zhailuo', 'Bobigny', 'Champs-Sur-Marne', 'Amatan', 'Tadmait', 'Zao', 'Temperance', 'Grenaa', 'Zebala', 'Steynsrus', 'Buka', 'Itapororoca', 'Maranchi', 'Sikandra Rao', 'Neftcala', 'Nguekhokh', 'Pawtucket', 'Mechanicsburg', 'Mold', 'Tipitapa', 'Gobindgarh', 'Bemanevika', 'Sureshjan', 'Abomey-Calavi', 'Caracuaro', 'Vohimarina', 'Didim', 'Getulio Vargas', 'Hoogvliet', 'Denton', 'Rodgau', 'Abbiategrasso', 'Oum Drou', 'Sagwara', 'Huetor-Tajar', 'Canovellas', 'Calan', 'Droitwich', 'Gradacac', 'Czechowice-Dziedzice', 'Newton in Makerfield', 'Zabreh', 'Otavi', 'Molina', 'Duncanville', 'Kumaramputtur', 'Visnagar', 'Ichikawa', 'Shiojiri', 'Trinity', 'Shchuchinsk', 'Kemberg', 'Isla', 'Bedele', 'Cocorna', 'Pozo Colorado', 'Motobu', 'Rodinghausen', 'Osterhofen', 'Beckwith', 'Yafran', 'Buguey', 'Hawkesbury', 'Seria', 'New London', 'Agia Varvara', 'Talisayan', 'Pothuhera', 'Tiruppuvanam', 'Koumpentoum', 'Sesquile', 'Freehold', 'El Haouaria', 'Tall Rif`at', 'Nova Timboteua', 'Novi Iskar', 'Choghadak', 'Trinitapoli', 'Orimattila', 'Mussoorie', 'Kozan', 'Mahatsinjo', 'Guajiquiro', 'Bear', 'Zhoushan', 'Safford', 'M.A. Rasulzada', 'Kalaun', 'Grijo', 'San Cristobal Totonicapan', 'Marbach am Neckar', 'Panukulan', 'Salcininkai', 'Neptune', 'Cerro de Pasco', 'Pisac', 'Puttankulam', 'Vordingborg', 'Coto de Caza', 'Ahumada', 'Angicos', 'Jeonju', 'Detroit', 'La Gi', 'Puerto Ayacucho', 'La Reja', 'Mymensingh', 'Bondy', 'Altoona', 'Loberia', 'Novooleksiivka', 'Vriddhachalam', 'Tirumalaippatti', 'Kontcha', 'Chennai', 'Nagykata', 'Havre de Grace', 'Jimani', 'Beaune', 'Montornes del Valles', 'Dothan', 'Saiki', 'Indaial', 'Nantwich', 'Foshan', 'Abu Hamad', 'Prilly', 'Chelora', 'Santa Isabel Ishuatan', 'Urpaar', 'Highfields', 'Choix', 'Poway', 'Ormeau', 'Pulivalam', 'Maruim', 'Minobu', 'Circleville', 'Noksan', 'Almada', 'Sali', 'New Brunswick', 'Ottapparai', 'Sadabe', 'Tomi', 'Ramanayakkanpalaiyam', 'Golpazari', 'Grunheide', 'Punalur', 'Essex Junction', "Ibra'", 'Balupur', 'Collegno', 'Gzira', 'Santa Cruz', 'Les Irois', 'Lobos', 'Slubice', 'Ichinohe', 'Phonsavan', 'Had Oulad Issa', 'Diffun', 'Cuijk', 'Algete', 'Laascaanood', 'Sayo', 'Sayada', 'Xonqa', 'Numan', 'Benetuser', 'Ilaka Atsinanana', 'Debno', 'Nagtipunan', 'Chegutu', 'Yabu', 'Maskanah', 'Tiruvankod', 'Japaratinga', 'Horodok', 'Maevatanana', 'Lanester', 'Jequitinhonha', 'Gibraltar', 'Acqui Terme', 'Acasusso', 'Cumberland', 'Yildizeli', 'Coronel Bogado', 'Daruvar', 'Dupax del Sur', 'Gharb', 'Becerril', 'Peachtree City', 'Najrij', 'Pabna', 'Millstone', 'Blidet Amor', 'Kayapa', 'Progress', 'San Pedro del Pinatar', 'Ambakireny', 'Afranio', 'Salangaippalaiyam', 'Cupira', 'Elmas', 'Taunton', 'Bianyang', 'Piracaia', 'Pudsey', 'Qiryat Ata', 'Lusaka', 'Carterton', 'Robbinsville', 'Namorona', 'Vicam Pueblo', 'Baytown', 'Kapfenberg', 'Makoua', 'Sobral de Monte Agraco', 'Belek', 'Rio de Oro', 'Biga', 'Shiliguri', 'Agdas', 'Bougara', 'Mit Damsis', 'Alto Alegre dos Parecis', 'Cantillana', 'Waldfeucht', 'Urcos', 'Kishanpura Kalan', 'Buchloe', 'Suffolk', 'Gummersbach', 'Khandhar', 'Coracora', 'Mpondwe', 'Lalin', 'Xiulin', 'Chanwari', 'Kumba', 'Aware', 'Colgong', 'Zeven', 'Mariano Comense', 'Campos Lindos', 'Diu', 'Aligarh', 'Nova Pazova', 'Barhni', 'Djibo', 'Soddy-Daisy', 'Seneffe', 'Tarn Taran', 'Dragasani', 'Bannewitz', 'Badagabettu', 'Angichettippalaiyam', 'Heroica Nogales', 'Osecina', 'Kuhsan', 'Grimstad', 'Tufanbeyli', 'Sweetwater', 'Chandur', 'Novyi Buh', 'Alzey', 'Telsiai', 'Purwokerto', 'Tarakeswar', 'Slupca', 'Malone', 'Nowrangapur', 'Khorol', 'Gudofredo Viana', 'Od', 'Tavriisk', 'Shakhtarsk', 'Silago', 'Muskego', 'Kartarpur', 'Shofirkon Shahri', 'Zhangjiakou Shi Xuanhua Qu', 'Lloyd', 'Togane', 'Minabe', 'Paloma Creek South', 'Koelwar', 'Takestan', 'Cotorra', 'Lugang', 'Osaki', 'Adiyakkamangalam', 'Fatipura', 'Jiaoxiyakou', 'Lakkireddipalle', 'Calamar', 'Eastbourne', 'Manlleu', 'Santo Andre', 'Glowno', 'Ozorkow', 'Herzliyya', 'Zaladanki', 'Druzhkivka', 'Naron', 'Nasim Shahr', 'Penicuik', 'Kamianets-Podilskyi', 'Rezzato', 'Wandlitz', 'Eufaula', 'El Hamma', 'Kattirippulam', 'Tafo', 'Bad Sassendorf', 'Tuticorin', 'Venado Tuerto', 'Kanasanapalle', 'Paulistana', 'Tauberbischofsheim', 'Mino', 'Bapaura', 'Lao Cai', 'Winchendon', 'Akyazi', 'Baclayon', 'Radhanpur', 'Inkerman', 'Donsol', 'Vila Real de Santo Antonio', 'Bottrop', 'South Orange Village', 'Grezzana', 'Konkavaripalle', 'Nybro', 'Dharmastala', 'Waldbrol', 'Shimomura', 'Hangal', 'Ribeirao das Neves', 'Hachirud', 'Asmara', 'Mercogliano', 'Isparta', 'Choshi', 'Rosetta', 'Shimotsuma', 'Souagui', 'Nishigo', 'Duitama', 'Salkhad', 'Tenares', 'Paita', 'Zephyrhills', 'Nacaome', 'Lezignan-Corbieres', 'Moncks Corner', 'Maracena', 'Gutao', 'Coseley', 'Susurluk', 'Hoofddorp', 'Taufkirchen', 'Lebork', 'West Bridgford', 'Roda', 'Laatatra', 'Sin-le-Noble', 'Lake Morton-Berrydale', 'Junik', 'Gorey', 'Ferrara', 'Falla', 'Atotonilco el Grande', 'Barnala', 'Port Chester', 'Rutland', 'Gistel', 'Palestina de los Altos', 'Manujan', 'Koja', 'Vitre', 'Antsambalahy', 'Ewo', 'Silivri', 'Nitte', 'Riyadh', 'Ginir', 'Figueras', 'Porvoo', 'Belamoty', 'Vista Hermosa de Negrete', 'Heysham', 'Laoac East', 'Ad Dammam', 'Chisinau', 'Lanus', 'Lockport', 'Powder Springs', 'Panamarattuppatti', 'Bisingen', 'Cotacachi', 'Bayanan', 'Ozoir-la-Ferriere', 'Ouyen', 'Kuttuparamba', 'Velimese', 'Nabeul', 'Anisio de Abreu', 'Uozu', 'Cumaribo', 'Bhadravati', 'Kuroshio', "Barano d'Ischia", "Qian'an", 'Oulmes', 'Bethulie', 'Tebessa', 'Columbia City', 'Kalvarpatti', 'Nesconset', 'Quilali', 'Uddevalla', 'Teplice', 'Pindobacu', 'Xihu', 'Altonia', 'Asudapuram', 'North Fayette', 'Konstantynow Lodzki', 'Castellanza', 'Penalolen', 'Zimatlan de Alvarez', 'Minad', 'Martellago', 'Chitral', 'Yumbo', 'Dunajska Streda', 'Didy', 'Horquetas', 'Pinas', 'Farragut', 'Castro Alves', 'Saatli', 'Naranapuram', 'Pine Castle', 'Penzberg', 'Fomeque', 'Goodmayes', 'Aru', 'Dobrovo', 'Martinez de la Torre', 'West Glens Falls', 'Vaddadi', 'Barra da Estiva', 'Whitefield', 'Milford city', 'Guarulhos', 'Fangchenggang', 'Edwardsville', 'Broussard', 'Lieto', 'Quimili', 'Kaiyun', 'Mocha', 'Volkach', 'Vazante', 'Thu Duc', 'Penapolis', 'Nanchital de Lazaro Cardenas del Rio', 'Phayao', 'Talwat', 'Tineo', 'Opwijk', 'Diabali', 'Andranovorivato', 'Huron East', 'Poirino', 'Juquitiba', 'Tizi Gheniff', 'Rudolstadt', 'Kozhinjampara', 'Huangyoutang', 'Triel-sur-Seine', 'Bordj el Kiffan', 'Unwana', 'Vanthli', 'Gramado', 'Ogawara', 'Arbatache', 'Mubende', 'Itaperuna', 'Beccles', 'Balabagan', 'Jeomchon', 'Fountain Inn', 'Passaic', 'Apaxco de Ocampo', 'Bhairi', 'Kalayapuram', 'Tlahualilo de Zaragoza', 'Mangasamudram', 'Kvivik', 'Arnstadt', 'South Lebanon', 'Vinnytsia', 'Bhimbar', 'Panakkudi', 'Paraisopolis', 'Deux-Montagnes', 'Antarvedipalem', 'Pushing', 'Ichinomiya', 'Padre Bernardo', 'Jiblah', 'Bestwig', 'Tuba', 'Porto Amboim', 'Gonohe', 'Caernarfon', 'Oostakker', 'Ataq', 'General Panfilo Natera', 'Sabratah', 'Andira', 'Ichtegem', 'Coban', 'Sangli', 'Balhapur', 'Narayanraopet', 'Paimio', 'Alvorada do Sul', 'El Transito', 'Tonk', 'Methil', 'Ban Phonla Krang', 'Arac', 'Selydove', 'Pecinci', 'Benidorm', 'Binkolo', 'Kailaras', 'Quibdo', 'Nieuw Nickerie', 'Magstadt', 'Bhirua', 'Pallipalaiyam', 'Ampasimatera', 'Vila do Conde', 'Fariman', 'Darihat', 'Jurong', 'Zurrieq', 'Oschatz', 'Godella', 'Martano', 'Wuhu', 'Gafanha da Nazare', 'Elsmere', 'Malior', 'Sidi el Mokhfi', 'Mingxing', 'Elsen', 'Zinvie', 'Orange', 'Rakovnik', 'Courcouronnes', 'Eregli', 'Shengaocun', 'Badkulla', 'Zile', 'North Highlands', 'Bhubaneshwar', 'Deurne', 'Bassano del Grappa', 'Paonta Sahib', 'Chapantongo', 'Samannud', 'Narayanpet', 'Chintapalle', 'Myslenice', 'Pama', 'Pedro II', 'Imperial', 'Tuvagudi', 'Erftstadt', 'Ningbo', 'Burglengenfeld', 'Coatepec', 'Horr-e Riahi', 'Duayaw-Nkwanta', 'Unchagao', 'Sayram', 'Herouville-Saint-Clair', 'Nurhak', 'Joniskis', 'Fish Town', "Vranov nad Topl'ou", 'Thief River Falls', 'North Miami', 'Szarvas', 'Poing', 'Xanxere', 'Bulolo', 'Tin Shui Wai', 'Auriol', 'La Calera', 'Mannachchanellur', 'San Miguelito', 'Harsefeld', 'Zdunska Wola', 'Hellin', 'Tours', 'Uryzhar', 'Villebon-sur-Yvette', "'Ain Kihal", 'Ocean Acres', 'Paranagua', 'Coaraci', 'Saaminki', 'Ullur', 'Muttatodi', 'Abashiri', 'Tadikalapudi', 'Mahin', 'Duderstadt', 'Utazu', 'Oleiros', 'Bolekhiv', 'Beuvry', 'Ilagan', 'Lakhipur', 'Tepotzotlan', 'Vallendar', 'Quimavango', 'Cobh', 'Ayutla', 'Pau', 'Ngong', 'Doorn', 'Libenge', 'Bin-Houye', 'Shahr-e Kord', 'Santiago de Maria', 'Sabya', 'Sunninghill', 'Alhama de Murcia', 'Usakos', 'Boke', 'Ingichka', 'Montgomery Village', 'Almonte', 'Campagnano di Roma', 'Akbou', 'Andaingo Gara', 'Herzberg', 'Sande', 'Aracataca', 'Dipaculao', 'La Celle-Saint-Cloud', 'Soja', 'Semari', 'Bareilly', 'Mahna', 'Wandsworth', 'Danwan', 'Manganam', 'Dinant', 'Delran', 'Shangzhen', 'Mallampalli', 'Czluchow', 'San Borja', 'Sanya', 'Galion', 'Tinchlik', 'Show Low', 'Mbouda', 'Hong Kong', 'Cedar Hill', 'Baraawe', 'Rabaul', 'Matsukawa', 'Farum', 'Jishi', 'Rush', 'Ubay', 'Schleusingen', 'Hengken', 'Pawai', 'Margherita di Savoia', 'Togba', 'Faradabad', 'Soron', 'Ban Tha Mai I', 'Santa Marta de Tormes', 'Llantrisant', 'Huauchinango', 'Darmaha', 'Hadano', 'Porto Real', 'Pakri', "N'Djamena", 'Skuvoy', 'Novopokrovka', 'Dighirpar', 'Summerside', 'Young', 'Campagna', 'Chavuttahalli', 'Malalbergo', 'San Victor Abajo', 'Robertsganj', 'Toubakoro', 'Namakadu', 'Clayton', 'Xalqobod', 'Moerdijk', 'Bergerac', 'Linxi', 'La Barca', 'Hopewell', 'Riverview', 'Nanfang', 'Garwolin', 'Gannan', 'Cowell', 'Zhongguyue', 'Cacapava do Sul', 'Fenyang', 'Chimore', 'Bahadurganj', 'Dantewara', 'San Feliu de Guixols', 'Puente-Genil', 'Basmanpur', 'Baar', 'Chinautla', 'Iriga City', 'Gonglang', 'Pecs', 'La Paz Centro', 'Lienz', 'Japaratuba', 'Chiba', 'Havlickuv Brod', 'Ranchos', 'Corning', 'Zinder', 'Sankt Gallen', 'Al Lith', 'Kaminoyama', 'Morarano', 'Sokyriany', 'Untergruppenbach', 'Makapanstad', 'Hullahalli', 'Empoli', 'Melton', 'Abiramam', 'Villapinzon', 'Le Vesinet', 'Den Chai', 'Khasab', 'General Deheza', 'Rideau Lakes', 'Banora Point', 'Dillenburg', 'Acarape', 'Olympia', 'Zilina', 'Chuarrancho', 'Jaipur Chuhar', 'Scaggsville', 'Bat Khela', 'Villa Jaragua', 'Oosterzele', 'La Banda', 'West Pensacola', 'Cleckheaton', 'Gizo', 'Guaimaca', 'Chalkida', 'Namegata', 'Shibam', 'Duekoue', 'Bir Anzarane', 'Sidi Yahia', 'Port Hedland', 'Cisterna di Latina', 'Dolo', 'Castro Daire', 'Kolattur', 'Hachioji', 'Sakti', 'Stanford', 'Asakapalle', 'Mazhang', 'Claypole', 'Sokal', 'Byureghavan', 'Ghasri', 'Harbin', 'Renqiu', 'Bhaur', 'Canillo', 'Capellen', 'Kenton', 'Balasamudram', 'Sondershausen', 'Coburg', 'Makrana', 'Zvishavane', 'Petapa', 'Piparia', 'Kiamba', 'Meyerton', 'Yi Xian', 'Tewkesbury', 'Menomonee Falls', 'Wilkinsburg', 'Cildir', 'Sao Mateus do Maranhao', 'Belleville', 'Sandefjord', 'Santa Branca', 'Hinckley', 'Tomblaine', 'Chizhou', 'Hardenberg', 'Victor Harbor', 'Broadlands', 'Iskourane', 'Yuanli', 'Erattukulakkada', 'Selcuklu', 'Colesville', 'Port Hope', 'Sidi Amer El Hadi', 'Bowringpet', 'Kalpakathukonam', 'Adonara', 'Mesquite', 'Papara', 'Karambakkudi', 'El Aguila', 'West Derby', 'Bhanvad', 'Borj el Qoble', 'Kamikita-kita', 'Rio Pardo de Minas', 'Paso Canoas', 'Holalu', 'Owani', 'Katoya', 'Semdinli', 'Manaratsandry', 'San Martin Texmelucan de Labastida', 'Palo', 'Vestavia Hills', 'Deogarh', 'San Juan del Cesar', 'Teorama', 'Secaucus', 'Tepecoyo', 'Kadur', 'Belampona', 'Ghattu', 'Sao Jose do Calcado', 'Shinto', 'Al Hibah', 'Gateway', 'Werdohl', 'Lenzburg', 'Gjilan', 'Zory', 'Vodurivandlagudem', 'Hoddesdon', 'Rawmarsh', 'Yajalon', 'Taisheng', 'Kizhattur', 'Yongcong', 'Macomer', 'Iwamizawa', 'Bail-Hongal', 'Maglod', 'Jauja', 'Kadiyadda', 'Tiruvengadam', 'Alovera', 'Sao Goncalo do Rio Abaixo', 'Velike Lasce', 'Foxborough', 'Collo', 'Cincinnati', 'Manambolo', 'Rugeley', 'Kulob', 'Saparua', 'Plympton', 'Malekan', 'Calheta', 'Kalanak', 'Sines', 'San Ignacio Cohuirimpo', 'Grottaferrata', 'Skhira', 'Harker Heights', 'El Nido', 'Morangis', 'Belp', 'Estremoz', 'Baft', 'State College', 'Likak', 'Kanjiza', 'Schwabmunchen', 'Adzope', 'Balamban', 'Yaojiafen', 'Klosterneuburg', 'Coracao de Jesus', 'Pereshchepyne', 'Dayr Abu Sa`id', 'Tofol', 'Vasad', 'Rychnov nad Kneznou', 'Sontha', 'Mandza', 'Jatibonico', 'North Whitehall', 'Ringnod', 'Palamel', 'Pampa', 'Ahrensfelde', 'Barwat Pasrain', 'Haider Khel', 'Zuera', 'Yampil', 'Wyoming', 'El Outaya', 'Kikinda', 'Zefat', 'Naganuma', 'Chingford', 'Weisswasser/Oberlausitz', 'Stockton', 'Tournefeuille', 'Wolfenbuttel', 'Kieta', 'Manambaro', 'Abejorral', 'Tchitato', 'Horsforth', 'Kunithala', 'Yesilhisar', 'Gulshan', 'Sanuki', 'Laanoussar', 'Kerouane', 'Huaycan', 'Curtorim', 'Tamallalt', 'Coronado', 'Mata Verde', 'Charenton-le-Pont', 'Tamri', 'Villiersdorp', 'Miorimivalana', "Ain M'Lila", 'Siruvachchur', 'Valdez', 'Bou Arfa', 'Bonifacio', 'Valpoy', 'Jun Bel', 'Northcote', 'Macenta', 'Guira de Melena', 'Moranha', "Jem'at Oulad 'Abbou", 'Upminster', 'Bulawayo', 'Laguna', 'Adria', 'Qatlupur', 'Rumphi', 'Omaezaki', 'Ksar El Kebir', 'Cruzeiro', 'Semic', 'Moparipalaiyam', 'Quanzhou', 'Wisil', 'Meadow Lakes', 'Qrendi', 'Ang Thong', 'Nandi Hills', 'Grayslake', 'Teguise', 'Kakunodatemachi', 'Marupe', 'Mayen', 'Miguel Alves', 'Karnal', 'Owings Mills', 'Lewe', 'Dyykan-Kyshtak', 'Kentville', 'Ilijas', 'San Pedro de Coche', 'Yermal', 'Zipaquira', 'Linganaboyinacherla', 'Es Sebt', 'Luisiana', 'Tamahu', 'Kudowa-Zdroj', 'Susice', 'Saucillo', 'Castelletto sopra Ticino', 'Repentigny', 'Manganj', 'Algemesi', 'Methuen Town', 'Reota', 'Aiyampuzha', 'Nisia Floresta', 'Tukwila', 'Cayey', 'Phu Tho', 'Pishin', 'Manuel B. Gonnet', 'Moguer', 'Petit-Goave', 'Oulad Salmane', 'Kadikoy', 'Cocoa Beach', "Bulung'ur Shahri", 'Ambalavato', 'Qal`eh Ganj', 'Quierschied', 'Barrow in Furness', 'Beifan', 'Cubulco', 'Hallbergmoos', 'Porcari', 'Kombolcha', 'Kirkagac', 'Cacule', 'Daraga', 'Qandala', 'Bagaha', 'Isoanala', 'Baulia', 'Boultham', 'Samundri', 'Amangal', 'Perambalur', 'Gose', 'San Pedro La Laguna', 'Mahendragarh', 'Offenburg', 'Sagae', 'Uttamapalaiyam', 'Lesquin', 'Novohrad-Volynskyi', 'Takab', 'Shahmirpet', 'Dom Basilio', 'Witten', 'Lavandevil', 'Katowice', 'Valavakattumula', 'Mel Seval', 'Santa Catarina Otzolotepec', 'Engenheiro Paulo de Frontin', 'Alta', 'Uran', 'Santo Tirso', 'Hochstadt an der Aisch', 'Texistepeque', 'Stevens Point', 'Karadipara', 'Diafarabe', 'Travnik', 'Sibi', 'Bichkunda', 'Tarpon Springs', 'Cuicatlan', 'Bealanana', 'Nalhati', 'Surak', 'Kurumbalur', 'Whangarei', 'Hatten', 'Valrico', 'Johnstone', 'Enterprise', 'Olutanga', 'Aguelhok', 'Mezdra', 'Tanghin-Dassouri', 'Kamien Pomorski', 'Santo Domingo Tehuantepec', 'Ap Phu My', 'Stamboliyski', 'Kilis', 'Ho Nai', 'Taree', 'Teulada', 'Virac', 'Vigia', 'Shahrak-e Pars', 'Zhengzhou', 'Vikarabad', 'Liubotyn', 'Tremonton', 'Tirmitine', 'Genas', 'Evato', 'Pisa', 'Kathua', 'El Bordo', 'Surin', 'Bariri', 'Trelaze', 'Thatto Heath', 'Takon', 'Praya', 'Baoding', 'Nakashunbetsu', 'Eloy', 'Sao Miguel de Touros', 'Moulay Driss Zerhoun', 'El Pinar', 'Mongomo', 'Andranovelona', 'Malbork', 'Soatanana', 'Hillerod', 'Cape Elizabeth', 'Tirwa', 'Salatiga', 'Roncaglia', 'Selu', 'Borgo Maggiore', 'Shelbyville', 'Bandar-e `Asaluyeh', 'Ferizaj', 'Ufeyn', 'Devanhalli', 'Akyurt', 'Jawalgeri', 'Cambita Garabitos', 'Manalapan', 'Surendranagar', 'Tadinada', 'Heumen', 'Inzago', 'Bulach', 'Tinipuka', 'Manohisoa', 'Leisure City', 'Hilvan', 'La Estrella', 'Vulcan', 'Wallan', 'Ramareddi', 'Gava', 'Beroy Atsimo', 'Huejotzingo', 'Tucupita', 'Ouled Beni Messous', 'Bedzin', 'Bidston', 'Marilandia do Sul', 'Sechelt', 'Beernem', 'Palwal', 'Puerto Iguazu', 'Banchpar', 'Targu Ocna', 'Sacile', 'Lajeado', 'Razole', 'Peringalam', 'Cavaillon', 'Panaji', 'Kesennuma', 'Linhai', 'Silvi Paese', 'Bairnsdale', 'Mehidpur', 'Sivrice', 'Waynesville', 'Oulu', 'Mairena del Alcor', 'Omatjete', 'Mokena', 'Awlouz', 'Douar Bni Malek', 'Bonao', 'Al Karak', 'Pitimbu', 'Pandami', 'Monzon', 'Molbergen', 'Sahit', 'Castleford', 'Perungulam', 'Tonekabon', 'Jasdan', 'Dharmajigudem', 'El Cacao', 'Salavan', 'Plato', 'Fontenay-aux-Roses', 'San Luis de La Loma', 'Dinbela', 'Hawthorn Woods', 'Corabia', 'Arboledas', 'Utrecht', 'Douar Lehouifrat', 'Dois Vizinhos', 'Cabo Bojador', 'Amalou', 'Rock Falls', 'Sitebe', 'Vakkam', 'Dasnapur', 'Ankirondro', 'Rajupalem', 'El Cajon', 'Gunbarrel', 'Yulee', 'Chatan', 'Palmer', 'Fronteiras', 'Chicureo Abajo', 'Newberry', 'Severance', 'Sattenapalle', 'Koping', 'Muhlhausen', 'Sylvania', 'San Elizario', 'Hameenkyro', 'Confresa', 'Barbate de Franco', 'Ararangua', 'Boston', 'Frankfurt (Oder)', 'Ganshoren', 'Mehdipur', 'Ambodinonoka', 'Cajari', 'La Primavera', 'Brahmanandapuram', 'Oued Rhiou', 'Polegate', 'Dodoma', 'Mecheraa Asfa', 'Aracas', 'Rushden', 'Korosten', 'Deodora', 'Pell City', 'Beppu', 'Greenburgh', 'Analaiva', 'Haar', 'Cervantes', 'Uelzen', 'Budd Lake', 'Puyang', 'Aladag', 'Hajeb el Aioun', 'Camajuani', 'Savannah', 'Pithapuram', 'Eldersburg', 'San Policarpo', 'Mennecy', 'Ibirapua', 'Gholia Kalan', 'Pickerington', 'Qadsayya', 'Waldniel', 'Bagalur', 'Sao Pedro do Piaui', 'Bar-le-Duc', 'Nakhyaungcharipara', 'Tralee', 'Penukonda', 'Ghosai', 'Pasaquina', 'Gevgelija', 'Chokkampatti', 'Illzach', 'Huadian', 'Baryshivka', 'Siofok', 'Yutan', 'Njombe', 'Khlung', 'Dabouziya', 'Tumpat', 'Tangpingcun', 'Chinna Salem', 'Basaithi', 'Aranguez', 'Marand', 'Tanudan', "Bet She'an", 'Pingxiang', 'Westerlo', 'Jomasho`y', 'Brussels', 'Kempele', 'Kiranomena', 'Porto Esperidiao', 'Padre Burgos', 'Banda del Rio Sali', 'Cortlandt', 'Tepatitlan de Morelos', 'Eau Claire', 'Saint-Constant', 'Schermbeck', 'Cuautitlan', 'Kolnur', 'Ehden', 'Dazhuangzi', 'Pebble Creek', 'Meihekou', 'Qianjiang Shequ', 'Matamoros', 'Calama', 'Stoneham', 'Dassel', 'Khanna', 'Cankaya', 'Palos Hills', 'Elurpatti', 'Kallakkurichchi', 'Monte San Giovanni Campano', 'Orhaneli', 'Bungoono', 'Blankenberge', 'Cordele', 'Kure', 'Bolgatanga', 'Bansalan', 'Race', 'Morales', 'Courcelles', 'Evaz', 'Pantepec', 'Nyirbator', 'Bellegarde-sur-Valserine', 'Barahi', 'Espinosa', 'Fatehgarh', 'Tangcun', 'Brzeziny', 'Pardwal', 'Gonzales', 'San Juan y Martinez', 'Hirriyat Raznah', 'Rokhaty', 'Jiangyin', 'Andahuaylas', 'Pamiers', 'Pinarbasi', 'Rio Vermelho', 'Boca del Rio', 'Timana', 'Cambridge', 'Shancheng', 'San German', 'Mata Roma', 'Compiegne', 'Kadanganeri', 'Argelato', 'Torokszentmiklos', 'Ardesen', 'Cariamanga', 'Korahia', 'Saarwellingen', 'South Abington', 'Balete', 'Fuman', 'Amdel', 'Dedemsvaart', 'Plasencia', 'Sharonville', 'Villamarchante', 'Slobozia', 'Jinji', 'Malak Abad', 'Adakplame', 'Manoke', 'Companiganj', 'Ozu', 'Friedrichshafen', 'Loudi', 'Meerut', 'Sheohar', 'Sanjiang Nongchang', 'Jos', 'Zhangmu Touwei', 'Siuna', 'Clervaux', 'Nocera Inferiore', 'Borvayeh-ye Al Bu `Aziz', 'Uki', 'Humble', 'Valaparla', 'Ajim', 'Kok-Janggak', 'Etropole', 'Idukki', 'Olmue', 'Maracanau', 'Ar Rass', 'Anivorano Avaratra', 'Jonesboro', 'Sanlucar de Barrameda', 'Estahban', 'Sujapur', 'Yilong', 'Ciudad Altamirano', 'Rudnyy', 'Wladyslawowo', 'Hekinan', 'Cittanova', 'Schonebeck', 'Mangalore', 'Pehonko', 'Kacanik', 'Cauquenes', 'Chikni', 'Yongcheng', 'Town and Country', 'Assai', 'Liujiaxia', 'Harwich', 'Attur', 'Ganjam', 'Matar', 'Grovetown', 'Hazro', 'Findlay', 'Lower Merion', 'Hennef', 'Musikot-Khalanga', 'Vinjam', 'Sao Jose', 'Gengenbach', 'Salmon Creek', 'Katoomba', 'Arrapalli', 'Krasnystaw', 'Ban Chang', "Tai'an", 'Manucan', 'Bopolu', 'Tissint', 'Vohimasy', 'Ocana', 'Dickinson', 'Middelkerke', 'Jarajus', 'El Jadid', 'Altinyayla', 'Rongcheng', 'Puthupalli', 'Pedro Afonso', 'Muurame', 'Castro Valley', 'Cookstown', 'Santa Maria das Barreiras', 'Konand', 'Dilbeek', 'Meicheng', 'Hamma Bouziane', 'Rottweil', 'Sao Luis do Curu', 'Dowlatabad', 'South Strabane', 'Schenectady', 'Baihar', 'Blacklick Estates', 'Villa Nougues', 'Al Qurayyat', 'Sao Jose de Mipibu', 'Huddersfield', 'Chibuto', 'Groutville', 'Vert-Saint-Denis', 'Patzicia', 'Kankon', 'La Chaux-de-Fonds', 'Altenbeken', 'Blonie', 'Anda', 'Masterton', 'Wieringerwerf', 'Peruwelz', 'Ilawa', 'Dirba', 'Shankarpalli', 'Longquan', 'Bodippatti', 'Nasukarasuyama', 'Sam', 'Ban Nong Prue', 'Monreale', 'Barreira', 'Hetanpur', 'Shishgarh', 'Tafresh', 'Saltcoats', 'Gopalasamudram', 'Kafr Saqr', 'Espinho', 'Peine', 'Radevormwald', 'Pompano Beach', 'Chavinda', 'Yishi', 'Janauba', 'Ankalgi', 'Pluderhausen', 'Sopot', 'Varpalota', 'Sanming', 'Santander', 'Niskayuna', 'Gumushacikoy', 'Obalapuram', 'Bollate', 'Ghardaia', 'Davorlim', 'San Jacinto del Cauca', 'Ridderkerk', 'Athiringal', 'Jimenez', 'Ketrzyn', 'Grugliasco', 'Segoubougou', 'Lanji', 'Cocorote', 'Mizil', 'Francisco Beltrao', 'Toksun', 'Haka', 'Bilasuvar', 'Sabaneta de Yasica', 'Abqaiq', 'Hinundayan', 'King', 'Satun', 'Fujioka', 'Ankadinondry-Sakay', 'Sentani', 'Bady Bassitt', 'Concepcion Tutuapa', 'Woking', 'Manlius', 'Mutsamudu', 'Khuzdar', 'Czernica', 'Balzers', 'Saydnaya', 'Suthalia', 'Stevenson Ranch', 'Dar Ould Zidouh', 'South Salt Lake', 'Voerde', 'South San Francisco', 'Sarstedt', 'Neuwied', 'Northview', 'Diez', 'Cuscatancingo', 'Worth', 'Wrentham', 'Pueblo West', 'Bispham', 'Shillong', 'Goes', 'Talayazham', 'Kosk', 'Imus', 'Montigny-les-Cormeilles', 'El Guetar', 'Campinas', 'Muggio', 'Latsia', 'Leopoldsburg', 'Tagazhi', 'Tacaimbo', 'Karimkunnum', 'Olifantshoek', 'Cocal', 'Petrovac na Mlavi', 'Likasi', 'Marmaris', 'Qarabulaq', 'Dalin', 'Straseni', 'Tamluk', 'Lemon Grove', 'Shoreview', 'Ewa Beach', 'Ado-Ekiti', 'Rio Verde', 'Marina', "Xi'an", 'Jutai', "Quartu Sant'Elena", 'Santa Ursula', 'Padiala', 'Sigatoka', 'Bien Unido', 'Yotoco', 'Goundam', 'Iconha', 'Pathari', 'Kotra', 'Brookside', 'Palos de la Frontera', 'Ban Bang Phlap', 'Halwan', 'Arth', 'Jiaoxiling', 'Dicholi', 'Uruapan', 'Bromont', 'Yaprakli', 'Junagadh', 'Sirvintos', 'Nandayure', 'Karimama', 'Pangantocan', 'Kanpur', 'Sao Manuel', 'Schwelm', 'North Bergen', 'Curvelo', 'Zafargarh', 'Travilah', 'Arifiye', 'Eusebio', 'Ba`qubah', 'Nutakki', 'Hermantown', 'Kremenets', 'Enrile', 'Tavarede', 'Wiesmoor', 'Soke', 'Bandraboua', 'Dera Allahyar', 'Welzheim', 'Calbuco', 'Markovci', 'Dumka', 'Dumaria', 'Bogue', 'Manuel Tames', 'Longjumeau', 'Igny', 'Medenine', 'Govindapalle', 'Justice', 'Surappalli', 'Fara', 'Sao Francisco do Maranhao', 'Nandiyalam', 'Tarrasa', 'Amaliada', 'El Hadjira', 'Hickory', 'Mandramo', 'Celendin', 'Almansa', 'Yamatotakada', 'Rhaude', 'Lingtang', 'Kfar Kidde', 'Abadla', 'Nova Serrana', 'La Victoria', 'Adis Zemen', 'Nanticoke', 'Douar El Arbaa Bou Quorra', 'Passo Fundo', 'Ain el Mediour', 'Ban Lam Sam Kaeo', 'Hinwil', 'Moradabad', 'Douar Toulal', 'Hong', 'Lubbock', 'Higashikagura', 'Trentola', 'Prymorsk', 'Trzcianka', 'Borujerd', 'Elefsina', 'Teruel', 'Mugdampalli', 'Manampatrana', 'Sisak', 'Simpelveld', 'Jonava', 'Solola', 'Santo Anastacio', 'Nalayh', 'Dumarao', 'Imgarr', 'Puerto Triunfo', 'Dhubaria', 'San Jose de Aerocuar', 'Destrnik', 'Dasso', 'Calanogas', 'Ash Shaykh Badr', 'Klerksdorp', 'Sardasht', 'Udamalpet', 'Xihuangni', 'Agudos do Sul', 'Ciudad de Atlixco', 'Huntley', 'Leonberg', 'North Glengarry', 'Baciu', 'Hemmoor', "Montopoli in Val d'Arno", 'Settimo Torinese', 'Pul-e `Alam', 'Kizilcahamam', 'Bulnes', 'Koiridih', 'Ho', 'Heber', 'Bristol', 'Turbaco', 'Arbaa Laaounate', 'Ayyagarpet', 'Xiluo', 'Nordre Fale', 'Stara Pazova', 'Uchinada', 'Bhikkiwind Uttar', 'Guerou', 'Landeh', 'Lilburn', 'Calvizzano', 'San Jose del Rincon Centro', 'Bagh', 'Mau', 'Vincennes', 'Warrenville', 'Madiun', 'Boma', 'Vettam', 'Magione', 'Piscataway', 'Souk et Tnine Jorf el Mellah', 'Marly-le-Roi', 'Khansahibpuram', 'Otsu', 'Vitoria', 'Motril', 'Asheboro', 'Cobham', 'Kalilangan', 'Dahua', 'Pozo Almonte', 'Salem', 'Babhangaon', 'Zero Branco', 'Berberati', 'Laharpur', 'Nihal Singhwala', 'Pesnica', 'Santa Cruz da Baixa Verde', 'Newburyport', 'Alcala de Guadaira', 'Sodegaura', 'Al Mazar ash Shamali', 'Aransas Pass', 'Xuddur', 'Port-de-Paix', 'Kirkstall', 'Erd', 'Saint-Martin-Boulogne', 'Wulong', 'Nagdha Simla', 'Roeselare', 'Lubin', 'Barberino di Mugello', 'Cranberry', 'Oldenzaal', 'Mascouche', 'Ashgabat', 'Mtwara', 'Mankal', 'Este', 'Caririacu', 'Piranshahr', 'Xiaozhengzhuang', 'Khamis Mushayt', 'Prineville', 'Sidi Namane', 'Tapachula', 'Massawa', 'Ramewadi', 'Abancay', 'Biratnagar', 'Chilakhana', 'Tadepalle', 'Passau', 'Imerimandroso', 'Nilanga', 'Tsukuba-kenkyugakuen-toshi', 'Veldhoven', 'Longjiang', 'Armidale', 'Alcoy', 'Gabaldon', 'Klippansbruk', 'Yongyang', 'Canton', 'Hammonton', 'Pena Forte', 'Panamaram', 'Kankan', 'Sanaa', 'Kilchberg', 'Fulton', 'Prainha', 'Andranofasika', 'Barela', 'Bhogpur', 'Crayford', 'Glenshaw', 'San Lorenzo', 'Thiais', 'Bahce', 'Oliveira do Hospital', 'Bhander', 'Taitung', 'Caldas da Rainha', 'Decatur', 'Basaon', 'Matteson', 'Al Hammam', 'Tayyibat al Imam', 'Jhakhra', 'Stiring-Wendel', 'Vlissingen', 'Zhongling', 'Palm Springs', "Qacha's Nek", 'Kambaneri Pudukkudi', 'Khardah', 'Artashat', 'Bua Yai', 'Point Pedro', 'Tozeur', 'Luxitun', 'San Sebastian de Mariquita', 'Saint-Sauveur', 'Rumilly', 'Mortad', 'Kaithwar', 'Abertillery', 'Sulzbach', 'Tillaivilagam', 'Veroia', 'Zeitz', 'Graz', 'Kitakyushu', 'Picasent', 'Rahden', 'Mecatlan', 'Tehri', 'Mauganj', 'Burshtyn', 'Espargos', 'Travis Ranch', 'Chokkanathapuram', 'Bhalil', 'Pateros', 'Nambuangongo', 'Beckum', 'Sanary-sur-Mer', 'Budili', 'Akalapura', 'Belo Vale', 'Pavlohrad', 'Swinton', 'Cento', 'Nantou', 'Flora', 'Carnaiba', 'Piotrkow Trybunalski', 'Conceicao da Feira', 'Dohazari', 'Sugito', 'Si Sa Ket', 'Sirjan', 'Zaandam', 'Titisee-Neustadt', 'Rafelbunol', 'Abadou', 'Suhareke', 'Pipariya', 'Binidayan', 'Kunzelsau', 'Panjgur', 'Juana Diaz', 'Afzalpur', 'Porirua', 'Elbasan', 'Hardi', 'Forbesganj', 'Shah Alam', 'Tiruvegapra', 'Cat', 'Clichy-sous-Bois', 'Qumqo`rg`on', 'Ban Tha Thong', 'Auburn Hills', 'Castelo Branco', 'Borgo', 'Rich', 'Karukachal', 'Maltby', 'Notteroy', 'Glens Falls North', 'Cunha', 'Alebtong', 'Balneario de Camboriu', 'Anandapuram', 'Bambalang', 'Touggourt', 'Taihe', 'Saidoke', 'Jori Kalan', 'Golet', 'Sankaranayinar Kovil', 'Sao Sepe', 'Saugus', 'Capaci', 'Puerto Morazan', 'Fao Rai', 'Caracase', 'Tranas', 'San Giorgio del Sannio', 'Abeche', 'Pak Thong Chai', 'Sola', 'Bensalem', 'Talen', 'Cidreira', 'Phangnga', 'Kalavapudi', 'Kapsabet', 'Nawan Shahr', 'Pyu', 'Mitsinjo', 'Bradford', "N'Gaous", 'Mionica', 'Echelon', 'Antonio Carlos', 'La Tuque', 'Nairn', 'Steenbergen', 'Sidi Abdelaziz', 'Jijel', 'Manzanares', 'Zofingen', 'Ryuyo', 'Tigard', 'Belm', 'Piastow', 'Sebring', 'Vastervik', 'Hengshuicun', 'Riverside', 'Montecristi', 'Aprilia', 'Penistone', 'Mogeiro', 'Sao Joao de Ver', 'Robles', 'Saint-Germain-en-Laye', 'Bethelsdorp', 'Saka', 'Butia', 'Gudipallipadu', 'Pulivendla', 'Huntingdon', 'Vila Teixeira da Silva', 'Antsakanalabe', 'Amneville', 'Lummen', 'Dbaiye', 'Semmarikulan', 'Risalpur Cantonment', 'Acala del Rio', 'Porur', 'Amarchinta', 'Olamze', 'Kairouan', 'Qiantangcun', 'Montegrotto Terme', 'Bensheim', 'Lenvik', 'Pariquera-Acu', 'Sambre', 'Tekkebhagam', 'Blerick', 'Parobe', 'Clive', 'Davos', 'Devanakavundanur', 'Tupran', 'Rossano', 'Raman', 'Hosan', 'Erding', 'Ban Tha Kham', 'Huercal-Overa', 'Fairmount', 'Kamabougou', 'Alcaudete', 'El Banco', 'Saksohara', 'Ferkessedougou', 'Darsur', 'Gagnef', 'Mostaganem', 'Opatija', 'Cambira', 'Moninnpebougou', 'Pinia', 'Sillod', 'Santo Antonio do Sudoeste', 'Baxt', 'Yomitan', 'Porto Firme', 'Kalungu', 'Tarime', 'Dingxi', 'Garuva', 'Alton', 'Poprad', 'Comandante Fontana', 'Luanzhou', 'Cuitzeo del Porvenir', "Sant'Agata de' Goti", 'Tsinjoarivo', 'Tamiahua', 'Barnis', 'Minami-Alps', 'Jalam', 'Arandu', 'Spanish Fork', 'Tiruvennanallur', 'Mohammadia', 'Bayt Sahur', 'Wilhelmshaven', 'Claudio', 'Nelali', 'San Andres Villa Seca', 'North Dundas', 'Kilminnal', 'Alamogordo', 'Antsakabary', 'Ibicarai', 'Eral', 'Zdolbuniv', 'Drochia', 'Saikaicho-kobago', 'Coroneo', 'Gabasumdo', 'River Forest', 'Ecatepec', 'Bekopaka', 'Fort McMurray', 'Ban Mae Tuen', 'Dizicheh', 'Biggin Hill', 'Lower Bicutan', 'Tunceli', 'Kurim', 'Curiti', 'Seminole', 'Tori-Bossito', 'Lake Havasu City', 'San Benito Abad', "'Ain el Bell", 'Bocsa', 'Batuco', 'Ughara', 'Kakdwip', 'Nallajerla', 'Lumbang', 'Sarbogard', 'Bowral', 'Kristinehamn', 'Meiwa', 'Tetela del Volcan', 'Rupenaguntla', 'Palera', 'Attleboro', 'Vitomarci', 'Guraahai', 'Henley on Thames', 'Stanford le Hope', 'Cazones de Herrera', 'Middleborough', 'Port Perry', 'Saryaghash', 'Tay', 'Marlow', "Granarolo del l'Emilia", 'Whitby', 'Pahou', 'Santa Maria di Sala', 'Rani Sawargaon', 'Moore', 'Tizimin', 'Gabane', 'Lichtenau', 'Wetter (Ruhr)', 'Quva', 'Ipaucu', 'Tahla', 'Enschede', 'Padinjaremuri', 'Oued Essalem', 'Sanatikri', 'Razanj', 'Ban Bang Lamung', 'Camberley', 'Avignon', 'Sittingbourne', 'Maduraivayal', 'Onate', 'Arboga', 'Duluth', 'Iluppur', 'Tehachapi', 'Tijucas', 'Bahadarpur', 'Bugugoucun', 'Bayi', 'Nakuru', 'Fontenay-sous-Bois', 'Tachikawa', 'Amroha', 'Boo', 'Fochville', 'Idak', 'Campbellsville', 'Limonade', 'Lucea', 'Tetouan', 'Sesori', 'Criciuma', 'Bedford', 'Osvaldo Cruz', 'Karacoban', 'Rafael Castillo', 'Faranah', 'Paina', 'Santa Maria del Tule', 'Puno', 'Cove', 'Sanankoroba', 'Nowogard', 'Pallipattu', 'Bhalwal', 'Torre Annunziata', 'Gavinivaripalem', 'Harike', 'Kameyama', 'Amala', 'Struthers', 'Ilindu Kothi', 'Rio Grande', 'Coral Gables', 'San Francisco Libre', 'Esteban Echeverria', 'Than', 'Bere', 'Villa San Jose', 'Gaurihar Khaliqnagar', 'Yonabaru', 'Kretinga', 'Banolas', 'Kalocsa', 'Atlautla', 'Al Muzayrib', 'Tsitondroina', 'Picture Rocks', 'Ostbevern', 'Purwa Utar', 'Pontchateau', 'Bandiagara', 'Midar', 'Abasingammedda', 'Chinnamandem', 'Zhydachiv', 'Fayzobod', 'Bad Bevensen', 'Nedumpura', 'Azua', 'Earley', 'Sao Sebastiao do Maranhao', 'Nossa Senhora do Livramento', 'Desaguadero', 'The Village', 'Richfield', 'Medway', 'Panglao', 'Erraguntla', 'Tapilula', 'South Park', 'Shovot', 'Samaca', 'Fehrbellin', 'Pouytenga', 'Wenping', 'Mendefera', 'Matanao', 'Bataguacu', 'Kljuc', 'Saint-Malo', 'Hiriyur', 'Copan', 'Comodoro', 'Ampasimpotsy-Gara', 'Issum', 'Thundersley', 'Nandavaram', 'Ivisan', 'Uvinza', 'Bituruna', 'Salzhemmendorf', 'Oguchi', 'Maubin', 'Pullman', 'Lower Gwynedd', 'Kidal', 'Ghariyah al Gharbiyah', 'Sagarpur', 'Cottage Lake', 'Macon', 'Siayan', 'Fatick', 'Hinda', 'Betio', 'Ramos Arizpe', 'Mbuyapey', 'Tirana', 'Uzyn', 'Jolo', 'Knoxville', 'Durant', 'Khmelnytskyi', 'Mong Duong', 'Mujui dos Campos', 'Hendaye', 'Despatch', 'Borne', 'Goirle', 'Petnjica', 'Barda', 'Feliz', 'Ut Bulag', 'Fasano', 'Gtarna', 'Greensboro', 'Boskovice', 'Barrafranca', 'Estero', 'Osmangazi', 'Demre', 'Manjil', 'Temascaltepec de Gonzalez', 'Mendota Heights', 'Nalgora', 'Gwangju', 'Qitai', 'Zakhu', 'Krishnarajpet', 'Hayes', 'Vitthalapuram', 'Burla', 'Salay', 'Skuodas', 'Nagano', 'Dambal', 'Sarauni Kalan', 'Bocaiuva', 'Chimbas', 'South Pasadena', 'Dar El Kebdani', 'Matanzas', 'Jarocin', 'Sarioglan', 'Ibate', 'Kisenzi', 'Shibirghan', 'Viseu de Sus', 'Majhariya Sheikh', 'Qulicun', 'Aswan', 'Kowloon City', 'Yverdon-les-Bains', 'Sera', 'Mililani Mauka', 'Acworth', 'Tibubeneng', 'Saijo', 'Wetzikon', 'Americo Brasiliense', 'Nova Cruz', 'Pantao-Ragat', 'Chichibu', 'Kyankwanzi', 'Jabalya', 'Caparica', 'Rewa', 'Castiglione delle Stiviere', 'Demir Kapija', 'Ramat Gan', 'Porto-Novo', 'River Road', 'Canarana', 'Penagam', 'Maldah', 'Rapho', 'Vilvoorde', 'Gauting', 'Aschaffenburg', 'Samux', 'Ekamba', 'Kepsut', 'Esfahan', 'Andramy', 'Sarezzo', 'Miracatu', 'Kramatorsk', 'Daping', 'Tunuyan', 'Bonyhad', 'Sao Joao da Barra', 'Kidsgrove', 'San Jeronimo', 'Chinnachauku', 'Abai', 'Kaipram', 'Paleng', 'Devarshola', 'Utraula', 'Meadville', 'Satupa`itea', 'Pacatuba', 'Saint-Georges', 'Remigio', 'Puerto Suarez', 'Arden-Arcade', 'Kankanalapalle', 'Mankada', 'Willow Grove', 'Tamaki', 'Pijnacker', 'Naryai ka Puri', 'Al Mazyunah', 'Bharhopur', 'Ekero', 'Deh', 'El Jem', 'Curacavi', 'Ladue', 'Barki Ballia', 'Yecapixtla', 'Petaluma', 'Bradley Gardens', 'Caapora', 'Dinklage', 'Karad', 'Niuchangqiao', 'Mahalingpur', 'Walcz', 'Mannamturuttu', 'Les Clayes-sous-Bois', 'Garhara', 'Mumaradikop', 'Davutlar', 'Haripur', 'Sahsaul', 'Pachora', 'Testour', 'Ratchaburi', 'Marovoay', 'Valente', 'Pouso Alegre', 'Chinnavadampatti', 'Mariana', 'Unjha', 'Keighley', 'Detva', 'Allouez', 'Bamako', 'Adrasmon', 'Ramannapeta', 'Fort Lauderdale', 'Webuye', 'Peligros', 'Leeton', 'Bollene', 'Lianjiang', 'Antsirabe Afovoany', 'Ada', 'Tixkokob', 'Kamagaya', 'Cassa de la Selva', 'Takeocho-takeo', 'Butig', 'Algiers', 'Straelen', 'Mechelen-aan-de-Maas', 'Esmeralda', 'Nuevo Casas Grandes', 'Houilles', 'Itano', 'San Javier', 'Emiliano Zapata', 'Seattle', 'Narino', 'Tiruvannamalai', 'Clausthal-Zellerfeld', 'Asola', 'Antonina', 'Dun Dealgan', 'Rifle', 'Maharajgani', 'Las Cruces', 'Mossel Bay', 'Tinglayan', 'Arapgir', 'Locogahoue', 'White House', 'Havana', 'Zhetisay', 'Navinipatti', 'Duptiair', 'Abcoude', 'Capinopolis', 'Arucas', 'Nova Lima', 'Cunhinga', 'Haftkel', 'Andacollo', 'Kawachinagano', 'Mountlake Terrace', 'Kalynivka', 'Perintalmanna', 'Rio Real', 'Cottingham', 'Bas Goynuk', 'Jianshe', 'Wermelskirchen', 'Hariharpara', 'Jucuapa', 'Choa Saidan Shah', 'Agaram', 'Carmen', 'Anguo', 'Challakere', 'Rahimpur', 'Pitanga', 'Zumarraga', 'Gohna', 'Coyuca de Catalan', 'Matinhos', 'Torun', 'Taluqan', 'Aurad Shahjahani', 'Persembe', 'Bochil', 'Nadvirna', 'Niagadina', 'Gubeng', 'Sitalkuchi', 'Dashtobod', 'Lascano', 'Bergisch Gladbach', 'Dumalag', 'Wahga', 'Guarambare', 'Raniyah', 'Maddur', 'Buba', 'Mobarakeh', 'Ouaklim Oukider', 'Ito', 'Lonar', 'Okhargara', 'Esil', 'Devrek', 'Pefka', 'Chamonix-Mont-Blanc', 'Bad Bergzabern', 'Las Condes', 'Ptolemaida', 'Helena-West Helena', 'Chimboy Shahri', 'Franconia', 'Szentes', 'San Vito', 'Panasapadu', 'Ogden', 'Harpalpur', 'Road Town', 'Montague', 'Issoire', 'Bagac', 'Shuichecun', 'Aylesbury', 'Jijiga', 'Dettingen an der Erms', 'Thakraha', 'Worth am Rhein', 'Moissy-Cramayel', 'Possneck', 'Punjai Turaiyampalaiyam', 'Ustka', 'Velair', 'As Sukhnah', 'Carlisle', 'Alausi', 'Chantal', 'Ouro Fino', 'Korsun-Shevchenkivskyi', 'North Andover', 'Melekeok', 'Mbake', 'San Vito dei Normanni', 'Cheranallur', 'Whitewater', 'Port Moresby', 'Bryans Road', 'Vegachi', 'Yolombo', 'Kewanee', 'Mingjian', 'Thonotosassa', 'Isapur', 'Kostrzyn nad Odra', 'Friesoythe', 'Lalam', 'Maruturu', 'Pudu', 'Zahed Shahr', 'Dungu', 'Manica', 'Draa el Mizan', 'Tolmezzo', 'Farsley', 'Agame', 'Pachauth', 'Sittard', 'Mableton', 'Guaratingueta', 'Lucenec', 'Catchiungo', 'Pitalito', 'Bensville', 'Shimohata', 'Zawiat Moulay Brahim', 'Anilao', 'Eloi Mendes', 'Erlanger', 'Minamiaso', 'Diffa', 'Shark', 'Osterburg', 'Barwell', 'Ambalanur', 'Sigaboy', 'Iretama', 'Alucra', 'Sivandipuram', 'Opalenica', 'Fray Bentos', 'Kottur', 'Yasugicho', 'Anacortes', 'Sangola', 'Vanimel', 'Guatape', 'Zorbig', 'Pitogo', 'Busayra', 'Puerto Carreno', 'Nandigama', 'Ampasimanolotra', "Ma'ai", 'Mapiri', 'Serra', 'Santa Iria da Azoia', 'Pyeongtaek', 'Hartford', 'Hokuei', 'Caldicot', 'Tucacas', 'Santiago Texacuangos', 'Turvo', 'Torredonjimeno', 'Fontanafredda', 'Rhede', 'Agri', 'Mandawa', 'Sibate', 'Tutrakan', 'Dinga', 'San Andres Cholula', 'Sarpsborg', 'Palashi', 'Attili', 'Pilas', 'Libonik', 'Tulum', 'Tomarza', 'Pato Branco', 'Currumbin', 'Fourmies', 'Caxias', 'Okondja', 'Cantu', 'Jeremoabo', 'Ciudad General Escobedo', 'Amberomanga', 'Magdiwang', 'Baardheere', 'Ubala', 'Chakwai', 'Uibai', 'Achuapa', 'Nastola', 'Partapnagar', 'Naujan', 'Letchworth', 'Calceta', 'Jafarabad', 'Mamnur', 'Merthyr Tudful', 'Sept-Iles', 'Bekalta', 'Cambrai', 'Rokiskis', 'Pamukova', 'Bad Laasphe', 'Harper', 'Registro', 'Fern Down', 'Perkiomen', 'Lushar', 'Lautaro', 'Bhainsahi', 'Welling', 'Imaricho-ko', 'Caerphilly', 'Cunda dia Baze', 'Monte Quemado', 'Khipro', 'Reriutaba', 'DeBary', 'Talapalli', 'Taverny', 'Jarjanaz', 'Targu-Mures', 'Bhasawar', 'Siverek', 'Grenade', 'Sinincay', 'Soledade', 'Chattogram', 'Srikurmam', 'Daitocho', 'Rombas', 'Surabaya', 'Le Mans', 'Varzea Alegre', 'Jonuta', 'Batavia', 'Pulimakkal', 'Giaveno', 'Sarangpur', 'Septemes-les-Vallons', 'Dolynska', 'Targovishte', 'Aldridge', 'Abilene', 'Santa Maria Jacatepec', 'Khash', 'Reni', 'Denville', 'Caracal', 'Puebla', 'Paravakkottai', 'Great Bookham', 'Bodo', 'Mountougoula', 'Mogi Guacu', 'Yanagawamachi-saiwaicho', 'Datu Odin Sinsuat', 'Morbach', 'Chincholi', 'Cangzhou', 'Pichilemu', 'Saravan', 'Sainte-Adele', 'Rapar', 'Langford Station', 'Porthcawl', 'Hilvarenbeek', 'Jalarpet', 'Bellview', 'Initao', 'Antaretra', 'Iriona', 'Peonga', 'Vadavalli', 'Port Talbot', "Arbi'a Tighadwiyn", 'Ciudad Vieja', 'Newtown', 'Sarasota', 'Rohtak', "'s-Hertogenbosch", 'Ras Baalbek', 'Harinakunda', 'Calahorra', 'Beitbridge', 'Las Tablas', 'Zhongli', 'Dauin', 'Saraqib', 'Karranah', 'Solaro', 'Dehra Dun', 'Riolandia', 'Wai', 'Golubovci', 'Lomza', 'Nkurenkuru', 'Oudenburg', 'North Laurel', 'Ammi Moussa', 'Puruliya', 'Ayungon', 'Mailavaram', 'Ban Dong Mada', 'San Sebastian', 'Le Petit-Couronne', 'Bani Walid', 'Molins de Rey', 'Bekoratsaka', 'Luperon', 'Abakaliki', 'Vila Nova de Cerveira', 'Rocca Priora', 'Raurkela', 'Romny', 'Santa Marcela', 'Chinhoyi', 'Tanki Leendert', 'Dej', 'Berber', 'Perryton', 'Haarlem', 'Xique-Xique', 'Mirdoddi', 'El Rodeo', 'Truskavets', 'Zugdidi', 'Dharhwa', 'Esslingen', 'Kakching Khunou', 'Campos Sales', 'Saundhonwali', 'Hemei', 'Pindorama', 'Cospicua', 'Sevur', 'Fiume Veneto', 'South Hill', 'Charipara', 'Borio', 'Mount Barker', 'Soliera', 'San Clemente', 'Polonne', 'Barreiras', 'Cabrero', 'Sainte-Therese', 'Merredin', 'Doume', 'Guaynabo', 'Mohyliv-Podilskyi', 'Santiago de Chuco', 'Auriflama', 'Sao Jeronimo da Serra', 'Rowland Heights', 'Telkathu', 'Zhentang', 'Ifrane', 'Chailaha', 'Acoyapa', 'Grunberg', 'Illescas', 'Jacare', 'Grabels', 'Anshan', 'Caloundra', 'Ruyigi', 'Kalleribhagam', 'Rochester', 'Ban Rawai', 'Dera', 'Takanezawa', 'Palmer Ranch', 'Rampatti', 'Heyunkeng', 'Sittwe', 'Hiep Hoa', 'Gorlitz', 'Yanggezhuang', 'Ogawa', 'Zafarwal', 'Cachoeirinha', 'Ra`ananna', 'Raymore', 'Deneysville', 'Nurpur', 'Nosibe', 'Minamishiro', 'Northbrook', 'Colonia General Felipe Angeles', 'Acacias', 'G`uzor', 'Oberwil', 'Yanjiang', 'Iflissen', 'Vsetin', 'Nimes', 'Sangrampur', 'Germering', 'Coulsdon', 'Lebon Regis', 'Malaimachchampatti', 'Rarz', 'Clacton-on-Sea', 'Droylsden', 'Maroli', 'Xintian', 'Wangi', 'Blaine', 'Aliganj', 'Michendorf', 'Kalaikunda', 'Betigeri', 'Talipao', 'Bheja', 'Red Wing', 'Merrillville', 'Minamisatsuma', 'Ramsbottom', 'Catarina', 'Korb', 'Lingyuan', 'Rendon', 'Ambongo', 'Aliyabad', 'Millington', 'Brdovec', 'Gujar Khan', 'Heeze', 'Binondo', 'Aix-les-Bains', 'Civril', 'Aranyaprathet', 'Bargteheide', 'Ambodiangezoka', 'Talsint', 'Silvania', 'Jindayris', 'Pacuarito', 'Nuku`alofa', 'Imi Mokorn', 'Panjipara', 'Portales', 'Ksebia', 'Sodo', 'Hoppegarten', 'Amparo', 'Pirayu', 'Narat', 'Bagaura', 'Puerto Gaitan', 'Hoyerswerda', 'Beckley', 'Tsarazaza', 'Mari`', 'Panitan', 'Phoenixville', 'Kinkala', 'Pilachikare', 'Chellaston', 'Ranchuelo', 'Schuylkill', 'Can-Avid', 'Northam', 'Dedham', 'Bad Camberg', 'Oyten', 'Guaymas', 'Gaeta', 'Ankadinandriana', 'Holalagondi', 'North Guwahati', 'Saint-Gaudens', 'Rancho San Diego', 'Vicar', 'San Pedro Pochutla', 'Uhingen', 'Policoro', 'Nabilatuk', 'Bauyrzhan Momyshuly', 'Mulakad', 'Kesan', 'Asosa', 'Dewangarh', 'Ash Shaykh Zuwayd', 'Nunihat', 'Itki Thakurgaon', 'Korsor', 'Cenovi', 'Aroali', 'Schweinfurt', 'Hirpardangal', 'Nadisal', 'El Amim', 'Zeewolde', 'Plaisance', 'Mokronog', 'Xiaojiangcun', 'Muhradah', 'Santa Rosa de Lima', 'Halvad', 'Welkenraedt', 'Kronberg', 'Lagoa Santa', 'Liutuancun', 'Iguig', 'Blue Island', 'Codru', 'Caramoran', 'Stuart', 'Marino', 'Tacheng', 'Bandar-e Gaz', 'San Leandro', 'Athi River', 'Leirvik', 'Wattrelos', "As Suwayda'", 'Singampunari', 'Velampalaiyam', 'Semarang', 'Garland', 'Coronel Oviedo', 'Waldkirch', 'Gusau', 'Campton Hills', 'Kalangala', 'Palafrugell', 'Esfarayen', 'Qazaly', 'Chelsea', 'Gokce', 'Harrisonville', 'Weare', 'Shiji', 'Bakharia', 'Soanindrariny', 'Dhanbad', 'Jayaque', 'Tanque Verde', 'Northenden', 'Matias Cardoso', 'St. Charles', 'Gaomi', 'Moreira Sales', 'Bharatpur', 'Chauki', 'Gokcebey', 'Kishtwar', 'Hambantota', 'Ez Zahra', 'Ain Beida', 'Bauru', 'Chickasha', 'Simpsonville', 'Taman Johor Jaya', 'Skelleftea', 'Bellaa', 'Narlidere', 'Lanciano', 'Bellmawr', 'Tataltepec de Valdes', 'Molo', 'Kiskunfelegyhaza', 'Kaisiadorys', 'San Dionisio', 'Corpus Christi', 'Tutin', 'Betanty', 'Ambatomanjaka', 'Koprukoy', 'Matuga', 'South Normanton', 'Kalamata', 'Trieste', 'Balikesir', 'Magsaysay', 'Fuji', 'Dorado', 'Teyateyaneng', 'Foiano della Chiana', 'Borba', 'Gaborone', 'Puerto Armuelles', 'Edgware', 'Ahirauliya', 'Felpham', 'Amboaboa', 'Paso del Macho', 'Raymond Terrace', 'Kambam', 'Los Arabos', 'Johnston', 'Placentia', 'Kharkiv', 'Sault Ste. Marie', 'East Glenville', 'Tamza', 'Akalgarh', 'Nwoya', 'Ordubad', 'Bulan', 'Musashimurayama', 'Lipa City', 'Tirupporur', 'Kannudaiyampatti', 'Sendafa', 'Angelholm', 'Carneiros', 'Vellikulangara', 'Andanappettai', 'Mira', 'Wangqing', 'Asahikawa', 'Stryi', 'Kittery', 'Rosso', 'Ramsgate', 'Diabigue', 'Anjarkandi', 'Santiago Tulantepec', 'Kaul', 'Pruszkow', 'Karlskrona', 'Porto-Vecchio', 'Pasadena Hills', 'Ban Bung Kha', 'Wilmslow', 'San Juan de Limay', 'Puran Bigha', 'Paramagudi', 'Xima', 'Khashuri', 'San Lorenzo della Costa', 'Medikunda', 'Belpara', 'Mountain Home', 'Burnaby', 'Sapouy', 'Rhar el Melah', 'Pessac', 'Grevenmacher', 'Santo Antonio do Monte', 'Gros Islet', 'Tarabha', 'Wimauma', 'Kumage', 'Sivamalai', 'Salmas', 'Bad Salzdetfurth', 'Taohuajiang', 'Tsirang', 'Dera Ismail Khan', 'Westbrook', "'Ayn Bni Mathar", 'Veppattur', 'Nettappakkam', 'Saint-Herblain', 'Cassano delle Murge', 'San Jose Acatempa', 'Sripur', 'Rae Bareli', 'Dazaifu', 'Benguema', 'Dobni Para', 'Hajipur', 'Villars-sur-Glane', 'Tejen', 'Itabaiana', 'Exu', 'Ninove', 'Riposto', 'Baraki Barak', 'Surat Thani', 'Setlagode', 'Rajhanpur', 'Wancheng', 'Bom Conselho', 'Karkkila', 'Arden Hills', 'Jasim', 'Tangainony', 'Ganguru', 'Tamiami', 'Anjiajia', 'Angren', 'Balimbing', 'Vallejo', 'Sabae', 'Podgorze', 'Rio Pardo', 'Sarzeau', 'Koniz', 'El Segundo', 'Werther', 'Phalia', 'Sidi Abd el Moumene', 'Rajanagaram', 'Riachao das Neves', 'Betrandraka', 'Graben-Neudorf', "Ighrem n'Ougdal", 'Kourimat', 'Reno', 'Verwood', 'Naspur', 'Douar El Mellaliyine', 'Pichanal', 'Octeville', 'Hamadan', 'Rackeve', 'Khesht', 'Anew', 'South Fulton', 'Cabo Verde', 'Cooma', 'Unnao', 'Kandanati', 'Sainkhera', 'Hasbrouck Heights', 'Antananarivo', 'Puca Urco', 'Hebron', 'Qal`ah-ye Now', 'Bara Malehra', 'Vinales', 'Kotturu', 'Ainaro', 'Kawambwa', 'Santiago Ixcuintla', 'Donzdorf', 'Thong Pha Phum', 'West Carrollton', 'Imbau', 'Shatrana', 'Nalerigu', 'Namli', 'Cumbernauld', 'Al Musayfirah', 'Lulea', 'Jamaica', 'Vijayapati', 'Monte Porzio Catone', 'Carshalton', 'Gumia', 'Arvand Kenar', 'Karaund', 'Minamiaizu', 'Gronau', 'Jinxing', 'Sixaola', 'Oulad Hammou', 'Trongsa', 'Khenichet-sur Ouerrha', 'Naifaru', "K'ebri Dehar", 'Namagiripettai', 'East London', 'Neibu', 'Himi', 'Cameron Highlands', 'Amfilochia', 'Saranambana', 'Qingshan', 'Ishikawa', 'Monkseaton', "Al Jahra'", 'Simmerath', "At Ta'if", 'Bama', 'Meknes', 'Tiahounkossi', 'Tucuma', 'Lower Salford', 'Punto Fijo', 'East Greenbush', 'Ambodimanary', 'Nilambur', 'Jalalabad', 'Qingnian', 'Barberton', 'Atari', 'Jilikul', 'Cranbrook', 'Lobogo', 'Xiegang', 'La Habra', 'Alingsas', 'Onverwacht', 'Pimpri-Chinchwad', 'Yatangcun', 'Coueron', 'Yaren', 'Kazincbarcika', 'Tanta', 'Rasaunk', 'Campoalegre', 'Jurua', 'Kallur', 'Cekerek', 'Tarwara', 'Burketown', 'South St. Paul', 'Corgao', 'An Thanh B', 'Borgholzhausen', 'Tahlequah', 'Vierzon', 'Qagan Us', 'Baravat', 'Vrsac', "Za'roura", 'Bennington', 'Talamba', 'Nandyal', 'Laiwu', 'Tacuarembo', 'Cuncolim', 'Xiaping', 'Landazuri', 'Cuautla', 'Pires Ferreira', 'Banaz', 'Cote-Saint-Luc', 'Manacor', 'Tantangan', 'Emmaus', 'Sonora', 'Pontiac', 'Chinandega', 'Armutlu', 'Besni', 'Qingquan', 'Ceyu', 'Majuro', 'Herxheim', 'Bluefield', 'Oerlinghausen', 'Aigues-Mortes', 'Tosya', "L'Ile-Saint-Denis", 'Ban Ngio Ngam', 'Fortim', 'Rio Gallegos', 'Schleiz', 'Zoudjame', 'Ondangwa', 'Ampasimanjeva', 'Altamont', 'Wittelsheim', 'Jayapura', 'Arni ka Khera', 'Commack', 'Morombe', 'Akdagmadeni', 'Reidsville', 'Chinnakkampalaiyam', 'Girau do Ponciano', 'Ardrossan', 'Cherutana Tekku', 'Kondur', 'Gwelej', 'Traiguen', 'Sannois', 'Zarach', 'Lhuentse', 'Redondo Beach', 'Maynard', 'Carmopolis de Minas', 'Chino Hills', 'Sohta', 'Velddrif', 'Tarifa', 'Wheatfield', 'Lasko', 'Ho Chi Minh City', 'Osakarovka', 'Sirajganj', 'Waidhofen an der Ybbs', 'Lieusaint', 'Worsborough', 'Mandoto', 'Kochkor', 'Hindang', 'Wolmirstedt', 'Angri', 'Bagnolet', 'Hatogaya-honcho', 'Panelas', 'Campestre', 'Boras', 'Danderesso', 'Vila Verde', 'Amborondra', 'Itapemirim', 'Muthutala', 'Qeshm', 'Halawa', 'Dosso', 'Villazon', 'Breukelen', 'Pandi', 'Audenge', 'Ngou', 'Urucara', 'Baham', 'Mudukulattur', 'Tubarao', "Dek'emhare", 'Sidi Yakoub', 'Bandhi', 'Karsiyaka', 'Shibin al Qanatir', 'Ajas', 'Dunedin', 'Asturias', 'California', 'Catania', 'Vinh', 'Celano', 'Tierra Colorada', 'Dosquebradas', 'Dunblane', 'Brejao', 'Toguere-Koumbe', 'Lianshan', 'Roghun', 'Gokdepe', 'Aul', 'Gotvand', 'Harnes', 'Choloma', 'Mamanguape', 'Miguelturra', 'Karlovo', 'Goole', 'Emir Abdelkader', 'Sironko', 'Bokaro Steel City', 'Altos del Rosario', 'Saint-Jean-de-Vedas', 'Hyosha', 'Tamiang Layang', 'Scartho', 'Oltu', 'Sillanwali', 'Tadangam', 'Loures', 'Kalecik', 'Sao Jose do Egito', 'Morelia', 'Mizan Teferi', 'Dar Bel Hamri', 'Ingeniero Guillermo N. Juarez', 'Vendrell', 'Wembley', 'Fritissa', 'Narasingapuram', 'Hatvan', 'Gharyala', 'Cachoeira Paulista', 'Pata Putrela', 'Batang', 'Burnham-on-Sea', 'Stuarts Draft', 'Balzan', 'Pollachi', 'Rajpura', 'Drachten', 'Agadallanka', 'Habikino', 'Mabole', 'Katipunan', 'Pachhapur', 'Banaso', 'Grinnell', 'Euskirchen', 'Ahar', 'Iranshahr', 'Mostardas', 'Shimotsucho-kominami', 'Maharagama', 'Owego', 'Madan', 'Tomaszow Lubelski', 'Oisterwijk', 'Sao Francisco', 'San Martin Jilotepeque', 'Iperu', 'Balaoan', 'Dubno', 'Tall `Afar', 'Kaufungen', 'Sohwal', 'Guamuchil', 'Fosses', 'Casillas', 'Ban Noen Phoem', 'Cuellar', 'Aurelino Leal', 'Zeulenroda', 'Porto Tolle', 'Lower Hutt', 'Castelnau-le-Lez', 'Matabhanga', 'Ban Ko', 'Balsa Nova', 'Brentwood', 'Brumath', 'Luban', 'Mainaguri', 'Buwenge', 'Qabqa', 'Al `Aziziyah', 'Frutillar Alto', 'Vatluru', 'Casale', 'Ambararata', 'Studenicani', 'Lanskroun', 'Sveti Ivan Zelina', 'Tokamachi', 'Bishopstoke', 'Broxburn', 'Timizart', 'Maroharatra', 'Isselburg', 'Vandiyur', 'Los Llanos de Aridane', 'Winder', 'Banga', 'Trophy Club', 'Olmsted Falls', 'Zanesville', 'Zhudong', 'Bapatla', 'Peringanad', 'Cedar Falls', 'Leanja', 'Eydhafushi', 'Paoua', 'Andribavontsona', 'Pergamino', 'Kouri', 'Bellmore', 'Bondeno', 'Comines', 'Volendam', 'Pio IX', 'Cotija de la Paz', 'Koratla', 'Tallbisah', 'Hakubacho', 'Helsingborg', 'Braila', 'Samalpur', 'Afourar', 'Briceno', 'Kabanga', 'Ambohimahamasina', 'Quirima', 'Hakone', 'Nova Londrina', 'Nuneaton', 'Plaisir', 'Kirikhan', 'Eccles', 'West Little River', 'Nideggen', 'Gyor', 'Parchim', 'Hagere Hiywet', 'Mirinzal', 'Lupi Viejo', 'Buckeye', 'Ravutulapudi', 'Landupdih', 'Santa Barbara de Pinto', 'Haysville', 'Dimbokro', 'Toulouse', 'Mangdongshan', 'Vladicin Han', 'Soma', 'Springfield', 'Wedel', 'Dokuchaievsk', 'Thai Binh', 'Mbulu', 'Muconda', 'Pearl', 'Pleszew', 'Qualicum Beach', 'Harihans', 'Yangtangxu', 'Boa Esperanca do Sul', 'Saravia', 'Wepangandla', 'Morristown', 'Karaikandi', 'Elverum', 'Mexicaltzingo', 'Kamdoli', 'Tayabas', 'Hillsborough', 'Joao Monlevade', 'Syracuse', 'Elliot Lake', 'Ciudad Guadalupe Victoria', 'Shampur', 'Beverstedt', 'Miajadas', 'Erode', 'Sao Pedro do Ivai', 'Volkermarkt', 'Lagoa de Itaenga', 'Kokstad', 'Jinshui', 'Miesbach', 'Lakeland South', 'Dong Hoi', 'Illingen', 'San Dona di Piave', 'Liantang', 'Jujutla', 'Ad Darb', 'Osicala', 'Huehuetla', 'Yambol', 'Muthallath al Azraq', 'Asago', 'Periya Soragai', 'Anar', 'Velika Plana', 'Becej', 'Hengnan', 'Brad', 'Tudiyalur', 'Skhour Rehamna', 'Seonar', 'Muktagacha', 'Saffron Walden', 'Cilacap', 'Ankiliabo', 'Ramon Magsaysay', 'Sarenja', 'Yamanobe', 'Marcinelle', 'Abergele', 'Saraunja', 'Arankhola', 'Calexico', 'Dalaguete', 'Owatonna', 'Jalpa de Mendez', 'Villalbilla', 'Frameries', 'Lakewood Park', 'Erlun', 'Thandwe', 'Mons-en-Baroeul', 'Netishyn', 'Mindelo', 'Repelon', 'Sidon', 'Ouinhri', 'Miramar Beach', 'Cam Ranh', 'Certaldo', 'Vedurupavaluru', 'Alginet', 'Egil', 'Ruteng', 'Chesapeake Ranch Estates', 'Sao Jose do Jacuipe', 'Perafita', 'Barnsley', 'Hammond', 'Simav', 'South Euclid', 'Tiquipaya', 'Moorpark', 'Chernivtsi', 'Joigny', 'Cary', 'Pennadam', 'Yongping', 'Sagnay', 'Lalibela', 'Tazoult-Lambese', 'Orta Nova', 'Dumangas', 'Trujillo', 'Borovnica', 'Buttayagudem', 'Kegalle', 'Oshikango', 'San Juan de Arama', 'Hathiaundha', 'Eirunepe', 'Pakka Kalan', 'Kita', 'Msoga', 'Solothurn', 'Welkom', 'La Quinta', 'Stamford', 'Serpa', 'Cadelbosco di Sopra', "'Ain Tolba", 'Chaiwu', 'Kundiawa', 'Clorinda', 'Liushuquan', 'Pingquan', 'Ouesso', 'Jiaganj', 'Ibiraci', 'Varaklani', 'Whakatane', 'Brzesko', 'Fort Oglethorpe', 'Pervomaisk', 'Guiuan', 'Hadiaya', 'El Zulia', 'Mahates', 'Rio Segundo', 'Patos', 'Summerland', 'Zalantun', 'Khundawandpur', 'Soledad de Graciano Sanchez', 'Xocavand', 'Denizli', 'Mangai', 'Chedaopo', 'Rathfarnham', 'Sibanicu', 'Oxnard', 'Surubim', 'Bergama', 'Keta', 'Ochiai', 'Yellanda', 'Wilsonville', 'Iguidiy', 'Cordoba', 'Tsarahonenana', 'Flint', 'Valentim Gentil', 'Gumushane', 'Cherbourg', 'Hosir', 'Makariv', 'Pikine', 'Acayucan', 'Trebisov', 'Bagong Pag-Asa', 'Barr Elias', 'Santa Coloma de Farnes', 'Simao', 'Ballarat', 'Andoharanomaitso', 'Bethlehem', 'Mevani', 'Sao Miguel dos Campos', 'Heinsberg', 'Al Kufah', 'Pont Sonde', 'Turkmenbasy', 'Goldasht', 'Hamar', 'Pastores', 'Veruela', "Nong'an", 'Senica', 'Steinau an der Strasse', 'Wibsey', 'Long Khanh', 'Lubny', 'Aalsmeer', 'Citong', 'Charxin', 'Soyapango', 'Avanos', 'Milas', 'Padmanabhapuram', 'Les Palmes', 'Rangoon', 'Guichen', 'Vadakkum', 'Holon', 'Faraskur', 'Cifteler', 'Malente', 'Salgado', 'Udalguri', 'Broadwater', 'Olot', 'Kirkintilloch', 'Amares', 'West Drayton', 'Pliezhausen', 'Manicore', 'Kanniparamba', 'Larisa', 'Stans', 'Entraigues-sur-la-Sorgue', 'Bhelsi', 'Nuwara Eliya', 'Montignoso', 'Yavoriv', 'Thillangeri', 'Gajendragarh', 'Narva', 'Chirak', 'Forestville', 'Waitangi', 'Yicheng', 'Machagai', 'Rasivarai Tottam', 'Poienile de sub Munte', 'Dogubayazit', 'Abhia', 'Etampes', 'Puerto San Martin', 'Panama City Beach', 'Mittahalli', 'Rudravaram', 'Nassau', 'Bladel', 'Bauan', 'Bryne', 'Mandi Bamora', 'Schenefeld', 'Gosaingaon', 'Nallur', 'Soanierana', 'Akhnur', 'Pedda Adsarlapalli', 'Waalwijk', 'Sekondi', 'Kelo', 'Squinzano', 'Itikalapalle', 'Tattamangalam', 'Segue', 'Jaltocan', 'Coram', 'Entre Rios', 'Hafizabad', 'Konarak', 'Betsizaraina', 'Ulanhot', 'Augsburg', 'Modica', 'Nova Olinda', 'Eastvale', 'Lechang', 'Wanaka', 'Ormesby', 'Malegaon', 'Rincon de Romos', 'Bihta', 'Mukilteo', 'Codajas', 'Hindalgi', 'Radviliskis', 'Vasai-Virar', 'Salor', 'Eraura', 'Labrea', 'Nanuet', 'Maryborough', 'Bramhabarada', 'Rubeho', 'Scherpenzeel', 'Osan', 'Obando', 'San Antonio', 'Mamarappatti', 'Padangpanjang', 'Wau', 'Thiene', 'Avocado Heights', 'Lake Forest Park', 'Sallaumines', 'Nova Venecia', 'Varadero', 'Amalner', 'Debaltseve', 'Levoca', 'Beachwood', "Motta Sant'Anastasia", 'Kreuztal', 'Kirundo', 'Ingeniero White', 'Tasucu', 'Spelle', 'Radomsko', 'Baleyara', 'Wencheng', 'Ambohijanahary', 'Oullins', 'Ain Oulmene', 'Sakete', 'Alattur', 'Sobradinho', 'Hoki', 'Calgary', 'Praia', 'Atbasar', 'Kisslegg', 'Chapaev', 'Chipindo', 'Livramento de Nossa Senhora', 'Odemira', 'Molango', 'Mortugaba', 'Kuttanallur', 'Dahegam', 'Gardena', 'Round Lake', 'San Nicolas', 'Kinhalu', 'Osorio', 'Alanganallur', 'Santa Cruz del Norte', 'La Grange', 'Habra', 'Ciamis', 'Zhuji', 'Maduru', 'Manyas', "'Ain el Arbaa", 'Alampalaiyam', 'Ribera', 'Qorashina', 'Penafiel', 'Tiraspol', 'Pitou', 'Pallikondai', 'Kabuga', 'Westerland', 'Luckau', 'Saddlebrooke', 'Bijeljina', 'Villepreux', 'Karianga', 'Tomar do Geru', 'Guaraniacu', 'Moravska Trebova', 'Sunrise Manor', 'King of Prussia', 'Lemon Hill', 'Condeuba', 'Sikonge', 'Metlili Chaamba', 'Maplewood', 'Qasigiannguit', 'Banjar', 'Cucuta', 'Vazquez', 'Sour el Ghozlane', 'Oran', 'Long Thanh', 'Sao Domingos do Prata', 'Fungurume', 'Grado', 'Sezze', 'San Pedro Tapanatepec', 'Uvalde', 'Buenaventura', 'Kanhauli', 'Tadaoka-higashi', 'Svitlovodsk', 'Samut Songkhram', 'Scherpenheuvel', 'Filomeno Mata', 'Mallappulasseri', 'Royken', 'Westerville', 'Nioro', 'Devendranagar', 'Itabashi', 'Itapuranga', 'Chanteloup-les-Vignes', 'Santa Lucia di Piave', 'Satu Mare', 'Tuni', 'Chouafa', 'Sevilla La Nueva', 'Chaugain', 'Cuquio', 'Palm Coast', 'Antanimieva', 'Baltasar Brum', 'Harnaut', 'Lagoa do Carro', 'Arenys de Munt', 'Landecy', 'Tancitaro', 'Shanawan', 'Mangoli', 'Baharestan', 'Pia', 'Witney', 'Guillena', 'Namchi', 'Naryn', 'Ujhana', 'Itigi', 'Douar Oulad Sidi Moussa', 'Natuba', 'Hazel Park', 'Farias Brito', 'Corumba de Goias', 'Rhyl', 'Barkuhi', 'Piracuruca', 'Ranst', 'Koka', 'Yunshan', 'Jiran', 'Zanhuang', 'Grahamstown', 'Erandio', 'Podili', 'Neerpelt', 'Leczna', 'Richterich', 'Narsingdi', 'Arnhem', 'Vodil', 'Hakkari', 'Simbach am Inn', 'Panzos', 'Chichester', 'Driouch', 'Tezontepec', 'Palmeiras de Goias', 'Tizi-n-Bechar', 'Rosyth', 'Greenlawn', 'Mali', 'Sidi Chiker', 'Kwiha', 'Busaar', 'Gangammapeta', "Be'er Ya`aqov", 'Potangal', 'Murayama', 'Bang Bua Thong', 'Lerma', 'Santo Antonio do Taua', 'Fair Lakes', 'Creteil', 'Qigexingcun', 'Ymittos', 'Flemington', 'Northlake', 'Edlapadu', 'Angalakuduru Malepalle', 'Pila', 'Sinop', 'La Higuerita', 'Plymstock', 'Cateel', 'Augustow', 'Ianantsony', 'Bando', 'Sizhan', 'Uppalaguptam', 'Damu', 'Bordj Okhriss', 'Panjab', 'North Versailles', 'Adh Dhakhirah', 'Grove City', 'Le Poire-sur-Vie', 'Keshod', 'Corby', 'Inhassoro', 'Hinesville', 'Espartinas', 'Gien', 'Chennirkara', 'Time', 'Ziar nad Hronom', 'Ravels', 'Pakxan', 'Namin', 'Yaragol', 'Maimbung', 'Kominato', 'Bayghanin', 'South Bend', 'Manzanillo', 'Hartselle', 'Balarampuram', 'Cay', 'Estepa', 'Heliopolis', 'Analila', 'Korba', 'Gusinje', 'Lake Magdalene', 'Ambahikily', 'Rijen', 'Tuta', 'Jardinopolis', 'Cypress Gardens', 'Bijawar', 'Soeda', 'Buena Vista Tomatlan', 'Lalejin', 'Muro del Alcoy', 'Lincoln Park', 'Beecher', 'Tansandra', 'Ambohimanga Atsimo', 'Manjhi', 'Nahiyat al Karmah', 'Rainhill', 'Elizabethtown-Kitley', 'Palotina', 'Alfter', 'Meppayyur', 'Domoni', 'Abay', 'Nettetal', 'Paithan', 'Hafshejan', 'Dubliany', 'Balaka', 'Barauli', 'Kleinblittersdorf', 'Belpasso', 'Shopokov', 'Eugene', 'Navalyal', 'Cieszyn', 'Bom Retiro do Sul', 'Taylor', 'Perehinske', 'Montale', 'Attingal', 'Fresnes', 'Marofinaritra', 'Arumbakkam', 'Djebahia', 'Diai Koura', 'Candelaria Loxicha', 'Boituva', 'Broni', 'Orangetown', 'Pastos Bons', 'Derinkuyu', 'Stratford', 'Ocotlan de Morelos', 'Trbovlje', 'Kitui', 'Dhone', 'Mohdra', 'Zwijndrecht', 'Chocen', 'Hameenlinna', 'Londonderry', 'Ujre', 'Koko', 'Pomfret', 'Rosdorf', 'Monopoli', 'Lingig', 'Ravenna', 'Herenthout', 'Bovalino Marina', 'Wheaton', 'Almoradi', 'Estanzuelas', 'Sao Sebastiao do Passe', 'Pont-y-pwl', 'Puliyur', 'Lagoa da Confusao', 'Nighoj', 'Giannouli', 'Paivalike', 'Yalamakuru', 'Aplahoue', 'Kotabumi', 'Los Corrales de Buelna', 'Marumori', 'Sujnipur', 'Pleasant Valley', 'Buala', 'Downers Grove', 'Magallanes', 'Bayt al Faqih', 'Hualien', 'Edgewater Park', 'Pascani', 'Ukkayapalle', 'Cabrayil', 'Xizhou', 'Ravulapalem', 'Villefontaine', 'Al Fayyum', 'Ulukisla', 'Hargawan', 'Tummanatti', 'Pearl City', 'Mucuge', 'Selibe Phikwe', 'Itsukaichi', 'Seravezza', 'Gopalganj', 'Bodoco', 'Paderborn', 'Butare', 'Sandur', 'Zgornja Hajdina', 'Stilfontein', 'Xuan Trung', 'Alampur Gonpura', 'Thimiri', 'Buhriz', 'Ballston', 'Kotla Qasim Khan', 'Mannegudam', 'Marinka', 'Machakos', 'Mankoeng', 'Mirandola', 'Dinard', 'Komen', 'Highlands Ranch', 'Kanra', 'Boldaji', 'Kampenhout', 'Ardea', 'Long Branch', 'Katiola', 'Rancagua', 'Taima', 'Innsbrook', 'Fedosiia', 'Vohitrindry', 'Tres Pontas', 'Kharial', 'Belinta', 'Tsu', 'Zhucaoying', 'West Chicago', 'Exeter', 'Ovar', 'Fort Portal', 'Upper Merion', 'Tiruvarpu', 'Karmiel', 'Ivanec', 'Project Six', 'Luofeng', 'Muttayyapuram', 'Kawaminami', 'Bila Tserkva', 'Pfungstadt', 'Hebi', 'Kizugawa', 'Gumusova', 'Wellingborough', 'Shimencun', 'Ourense', 'Sollies-Pont', 'Jatani', 'Ushuaia', 'Drongen', 'Lagarto', 'Las Guaranas', 'Minas Novas', 'Ponnampatti', 'Hola Prystan', 'Cojutepeque', 'Randolph', 'Matsue', 'Adohoun', 'Divonne-les-Bains', 'Talsur', 'Auki', 'Obo', 'Spilamberto', 'Konodimini', 'Hanam', 'Gopavaram', 'Xambioa', 'Terra Rica', 'Pitea', 'Jussara', 'Ahuimanu', 'Hampstead', 'Bala Cynwyd', 'El Rosal', 'Labasa', 'Garner', 'Las Maravillas', 'Toowoomba', 'Kukshi', 'Buzau', 'Tahoua', 'Koboko', 'Carlopolis', 'Wildeshausen', 'Hershey', 'Landsmeer', 'Swarna', 'Wordsley', 'Gagarin Shahri', 'Bad Laer', 'Adigoppula', 'Kratovo', 'Desavilakku', 'Qalat', 'Senador Pompeu', 'Solonopole', 'Bainet', 'Mandurah', 'Kyaukpyu', 'Willistown', 'Pirri', 'Aweil', 'Akividu', 'Majdanpek', 'Qal`eh Tall', 'Garopaba', 'Sograha', 'Al Jizah', 'Shurobod', 'Warden', 'Danielskuil', 'Paidha', 'Vargem Alta', 'Chamestan', 'Kumasi', 'Kafr ash Shaykh', 'Chirundu', 'Arzachena', 'Dobrada', 'Arcot', 'Matlock', 'Usmanpur', 'Silute', 'Al Qunaytirah', 'Jablanica', 'Gaada', 'Tarazu', 'Lagoa do Ouro', 'Yuksekova', 'Mirfield', 'Kurmuk', 'Macia', 'Karuvakulam', 'Nice', 'Ignalina', 'East Franklin', 'Malhada', 'Imbituba', 'Kasumi', 'Tiangua', 'Leonforte', 'Enumulapalle', 'Pushpattur', 'Romford', 'El Talar de Pacheco', 'Osakasayama', 'Lanuza', 'Bordighera', 'Bennane', 'Japura', 'Yesilli', 'Marcos Paz', 'Hickam Housing', 'Ban Khamen', 'Shirguppi', 'Chemmaruthi', 'Woodlesford', 'Povazska Bystrica', 'Kotia', 'Gunri', 'Nawagarh', 'Judenburg', 'Hammam-Lif', 'Matina', 'Verkhnodniprovsk', 'San Lucas', 'Jacaraci', 'Glarus', 'Manoli', 'Monticello', 'Urziceni', 'Pryluky', 'Langhirano', 'Buriti Bravo', 'Skenderaj', 'Bari', 'Cho Moi', 'Kahla', 'Grojec', 'Berea', 'Wyandotte', 'Jaguapita', 'Modavandisatyamangalam', 'Danesfahan', 'Larache', 'North Potomac', 'Kharupatia', 'Kisangani', 'Nuoro', 'Janakkala', 'Bhatkhori', 'Mungaoli', 'Yalova', 'Qaratog', 'Yesilova', 'Tualatin', 'Sandviken', 'Bickenhill', 'Inwood', 'Meckenheim', 'Davidson', 'Kanhaipur', 'Longton', 'Sendurai', 'Paniem', 'Jujharpur', 'Sasolburg', 'Rambouillet', 'March', 'Ewing', 'Seogwipo', 'Huber Heights', 'Calabanga', 'Mallasamudram', 'Dharawat', 'Ngara', 'Pensacola', 'Tuzluca', 'Lauenburg', 'Kidangazhi', 'Ucu Seles', 'Enebakk', 'Nalua', 'Aubiere', 'Manfredonia', 'Nacunday', 'Street', 'Sao Bernardo do Campo', 'Sidhauli', 'Igarape-Acu', 'Teodoro Schmidt', 'Barra do Corda', 'Tangua', 'Yibin', 'Ozdere', 'Chak Habib', 'Medchal', 'Surab', 'Snyder', 'Teus', 'Oulad Slim', 'Ingurti', 'Manaure', 'Jaipur', 'Zlate Moravce', 'Castillejos', 'Kalakeri', 'Mahavelona', 'Iesi', 'Bedford Heights', 'Dinokana', 'Shimogamo', 'Mohammad Yar', 'Ban Kham Pom', 'Nannestad', 'Espiye', 'Jiji', 'Bulandshahr', 'Wetteren', 'Valattur', 'Hejian', 'Wete', 'Beverley', 'Marar', 'Olecko', 'Chalatenango', 'Dommasandra', 'Chilamatturu', 'Petit-Trou de Nippes', 'Ratnagiri', 'Crozet', 'Birganj', 'Dehiba', 'Vettweiss', 'Kucuk Dalyan', 'Nirmal', 'Simi Valley', 'Wels', 'Fredericton', 'Nykoping', 'Salgueiro', 'Naantali', 'Koliakkod', 'Cruz das Almas', 'Cardoso Moreira', 'Sao Paulo', 'Lugovoy', 'Sheffield Lake', 'Focsani', 'Saarijarvi', 'La Magdalena Chichicaspa', 'Clarkstown', 'Bongabong', 'Raisen', 'Aguadas', 'Pomaz', 'Shahpur Undi', 'Wervershoof', 'Cochoapa el Grande', 'Malaybalay', 'Mesudiye', 'Caravaggio', 'Moviya', 'Manay', 'Kitanakagusuku', 'Basi', 'Kant', 'Garmeh', 'Weeze', 'Kenieran', 'Epping', 'Serra do Salitre', 'Sabzevar', 'Timbiqui', 'Andilanatoby', 'West Point', 'Nneyi-Umuleri', 'Brembate', 'Bendigo', 'Pitrufquen', 'Mobara', 'Tuku', 'Herkimer', 'Rokunohe', 'Aliso Viejo', 'Shirataka', 'Huajiang', 'Imam Sahib', 'Toyoake', 'Cidade Velha', 'Bull Run', 'Leguevin', 'Sao Domingos', 'Sidi Barani', 'Ejeda', 'Xishrov', 'Ebebiyin', 'Oruvadalkotta', 'Gurwalia Biswas', 'Mariupol', 'Ulft', 'Necocli', 'Ambriz', 'Saltsjobaden', 'Oulad Dahmane', 'Adelaide River', 'Bikaner', 'Condoto', 'Shah Latif Town', 'Manantheri', 'Mossley', 'Piracununga', 'Parappanangadi', 'Revel', 'Atmakuru', 'Pullappalli', 'Shengli', 'Salto', 'Los Mochis', 'Pattittara', 'Dahuaishu', 'Yamasa', "Val-d'Or", 'Holic', 'Tarxien', 'Meishan', 'Baghant', 'Panrepatti', 'Edemissen', 'Rockport', 'Chavassheri', 'Swampscott', 'Torroella de Montgri', 'Chauk Azam', 'Moul El Bergui', 'Isla-Cristina', 'Oulad Ayyad', 'Qorveh', 'Yoshiicho-shimobaru', 'Jangipur', 'Port Alberni', 'Beauchamp', 'Ratba', 'Beni Douala', 'Kunduz', 'Seosaeng', 'Etaples', 'Bludenz', 'Golaghat', 'Sujanpur', 'Doctor Arroyo', 'Bugho', 'Borgarnes', 'Solanea', 'Boulogne-Billancourt', 'Khosrowshahr', 'Paithan Kawai', 'Kaltenkirchen', 'Jordan', 'Motiong', 'Daraj', 'Manhattan', 'Debica', 'Bina', 'Nanjanad', 'Campina', 'Antsahavaribe', 'Maizal', 'Saint-Nicolas', 'Round Lake Beach', 'Kamalasai', 'Yeola', 'Dangila', 'East Bradford', 'Noale', 'Germencik', 'De Aar', 'Taihecun', 'Kilinochchi', 'Karivalamvandanallur', 'Vinany', 'Pudtol', 'Visselhovede', 'Perigny', 'Izalco', 'Dhuri', 'Rudarpur', 'San Marzano sul Sarno', 'Kambar', 'Le Hochet', 'Skalavik', 'Dharhara', 'Agutaya', 'Montpelier', 'Santa Pola', 'Budelsdorf', 'Gyapekurom', 'Medan', 'Egilsstadhir', 'Somasso', 'Carsamba', 'Nepalganj', 'Bobil', 'San Guillermo', 'Hyrum', 'Dhauni', 'Al Hisn', 'Hengkou', 'Nenagh', 'Lake Monticello', 'Kokiladanga', 'Aves', 'Iwaizumi', 'Santa Quiteria do Maranhao', 'Kusugal', 'Misano Adriatico', 'Liangyi', "Qa'em Shahr", 'Okene', 'Dorsten', 'Inglewood', 'Waianae', 'Nerupperichchal', 'Ghandinagar', 'Le Portel', 'Nazare', 'Douar Lehgagcha', 'Boende', 'Aracruz', 'Atar', 'Golhisar', 'Niar', 'Kristiansund', "Mu'minobod", 'Ilchester', 'Jodoigne', 'Mudanjiang', 'Parapatti', 'Youssoufia', 'Has', 'Rzeszow', 'View Park-Windsor Hills', 'Redland', 'Bni Sidel', 'Cumbum', 'Sue', 'Ban Pae', 'Eghezee', 'Lebedinovka', 'Demerval Lobao', 'Teplodar', 'Moose Jaw', 'Rahika', 'Edwards', 'Imdina', 'Zuhres', 'Gillette', 'Tixter', 'Valenii de Munte', 'Zhangliangcun', 'Failsworth', 'Dormentes', 'Chowchilla', 'Osasco', 'Cheste', 'Elmadag', 'Timberwood Park', 'Tiruttani', 'Souba', 'Maripad', 'Tiltil', 'Piossasco', 'Comstock Park', 'Sagrada Familia', 'San Raimundo', 'Killeen', 'Miracema', 'Singapore', 'Olocuilta', 'Ribeirao do Pinhal', 'Itaquitinga', 'Haltom City', 'Chanderi', 'Jarabulus', 'Chennampatti', 'Izumo', 'Taruma', 'Sottaiyampalaiyam', 'Ichikawamisato', 'Brockton', 'Pandua', 'Willow Street', 'Truro', 'Topola', 'Blumenau', 'Braine-le-Comte', 'Duga Resa', 'Liesveld', 'Cairu', 'Kolongo-Bozo', 'Huanghua', 'La Oliva', 'Yokkaichi', 'Tres Lagoas', 'Westervoort', 'Michelstadt', 'Cavalcante', 'La Lima', 'Ampanotokana', 'Roma', 'Latacunga', 'Eckington', 'Cam Pha', 'Kangqiao', 'Remanso', 'Jipijapa', 'Dbarwa', 'Cody', 'Narsarsuaq', 'Windhoek', 'Razgrad', 'Xinhualu', 'Aosta', "M'Chouneche", 'Aydarken', 'Analamitsivalana', 'Boro', 'Yelur', 'Manteno', 'As Sarw', 'Ban Bueng Phra', 'Puliyankudi', 'Istaravshan', 'Pithaura', 'Onex', 'Chakdarra', 'Kareli', 'Dinshaway', 'Matalam', 'Yongju', 'Kumarapuram', 'Yidu', 'St. Joseph', 'Fort Bonifacio', 'Mecayapan', 'Saint-Dizier', 'Sharurah', 'Uruara', 'Macusani', 'Stollberg', 'Avelino Lopes', 'Eschenbach', 'Kamianka-Buzka', 'Merced', 'Ishizaki', 'Ratan', 'Majholi', 'Portimao', 'Guyancourt', 'Ampasimena', 'Dengshangcun', 'Teno', 'Cachira', 'Davaci', 'Tamamura', 'Clifton Park', 'Ma`bar', 'Aartselaar', 'Blundellsands', 'Richards Bay', 'Bussolengo', 'Ratnahalli', 'Bestensee', 'Manjacaze', 'Solano', 'Mercerville', 'Wujie', 'Luderitz', 'Campo Formoso', 'Pobiedziska', 'Ennamangalam', 'Claiborne', 'Elesvaram', 'Begogo', 'San Salvo', 'Tambura', 'Zumbo', 'Margate', 'Naini Tal', 'Mao', 'Castelfranco di Sotto', 'Guachaves', 'Artemisa', 'Ovalle', 'Piedras Blancas', 'Dhanauri', 'Vengapalli', 'Tnine Sidi Lyamani', 'Balindong', 'Busia', 'Codigoro', 'Shiqiao', 'Eidson Road', 'Malita', 'Kapatagan', 'Ervalia', 'Sibuco', 'Rattaphum', 'Wolfratshausen', 'Jucurucu', 'Lismore', 'Lagangilang', 'Ech Chaibat', 'Iguaba Grande', 'Bayan Hot', 'Senjan', 'Mihara', 'Sandwich', 'Huanglongsi', 'Hualaihue', 'Campos do Jordao', 'Radauti', 'Hersham', 'Rutherford', 'Kavallemmavu', 'Bindura', 'Niono', 'Hammam Dalaa', 'Varto', 'Qina', 'Aringay', 'Trofarello', 'Goshikicho-aihara-minamidani', 'Taozhou', 'Wickford', 'Casale sul Sile', 'Pointe a Raquettes', 'Dolianova', 'Morpeth', 'Safdarabad', 'Asamankese', 'Mifune', 'South Huntington', 'Antri', 'Ambohimahazo', 'Nangis', 'Lincolnwood', 'Vieira do Minho', 'Marigliano', 'Sankt Valentin', 'Ota-ku', 'Montes Claros', 'Scottsboro', 'Gulni', 'Bogovinje', 'Tlalnelhuayocan', 'Little Canada', 'Bni Bouayach', 'Porto Rafti', 'Pontal', 'Milha', 'Kigoma', 'Littleborough', 'Tarur', 'Myers Corner', 'Salamedu', 'Hernando', 'Sava', 'Rowley Regis', 'Qax', 'Mor', 'Jalalaqsi', 'Fereydun Kenar', 'Aubergenville', 'Puranattukara', 'Vondrozo', 'Pombal', 'Upper Providence', 'Huizhou', 'Mali Idos', 'Townsville', 'Bolbec', 'Nanpara', 'Tocantins', 'Oulgaret', 'Marmara Ereglisi', 'Pedra', 'Sahapur', 'Puerto Lleras', 'Soccorro', 'Enerhodar', 'Adams', 'Khetko', 'Bacobampo', 'Naxxar', 'Wilton', 'Manantenina', 'Montecatini Terme', 'Ocumare del Tuy', 'Kas', 'Nagarpara', 'Tagounite', 'Magnago', 'Paripueira', 'Guerra', 'Ecaussinnes-Lalaing', 'Tittachcheri', 'Cariacica', 'Missaglia', 'Niscemi', 'Ajdabiya', 'Santa Cruz das Palmeiras', 'Bogucice', 'Centurion', 'Aalst', 'Tupelo', 'Kochhor', 'Riihimaki', 'Sjobo', 'Dongguan', 'Melbourne', 'Goluchow', 'Devapur', 'Merimbula', 'North Port', 'Bou Izakarn', 'Cuisnahuat', 'Cunday', 'Sao Bernardo', 'Everett', 'Gyal', 'Cedar Grove', 'Piriapolis', 'Mamakating', 'Narathiwat', 'Wood River', 'Kebemer', 'Salzburg', 'Roebourne', 'Port-au-Prince', 'Ulliyeri', 'Simferopol', 'Navojoa', 'Kuna', 'Ikeda', 'Dinan', 'Champadanga', 'Kottayam', 'Itajai', 'Moulay Abdallah', 'Ponnuru', 'Pakribarawan', 'Canakkale', 'Mestre', 'Kannampalaiyam', 'Lang Suan', 'Libiaz', 'Naftalan', 'Guanzhai', 'Juruti', 'Hautmont', 'Coro', 'Navani', 'Cagdianao', 'Bauska', 'Waltershausen', 'Karippira', 'Kodiyeri', 'Huercal de Almeria', 'Lebane', 'Tando Allahyar', 'El Bordj', 'Polva', 'Asasa', 'Capilla de Guadalupe', 'Vadapalanji', 'Mekhe', 'Leigh-on-Sea', 'Velykyi Bychkiv', 'Sihali Jagir', 'Tepetitlan', 'Sincelejo', 'Caorle', 'Burrel', 'Ebetsu', 'Niagara-on-the-Lake', 'Tucson Mountains', 'Manawar', 'Uige', 'Ankaramy', 'Guinobatan', 'Andrakata', 'Patulul', 'Patnos', 'Bengkulu', 'Strakonice', 'Bairo', 'Sint Anthonis', 'Astorp', 'Chantepie', 'Stara Gora', 'Fierenana', 'Phenix City', 'Schmalkalden', 'Nadi', 'Vimodrone', 'Maramag', 'Satipo', 'El Oued', 'Dogbo', 'Needham', 'Mitu', 'North Vancouver', 'Belmonte', 'Turmanin', 'Beandrarezona', 'Shiraz', 'Edgewood', 'Eqbaliyeh', 'West Whiteland', 'Nchelenge', 'Sason', 'Suknah', 'Alhandra', 'Boghni', 'Versoix', 'Beverwijk', 'Satyun', 'Village Green-Green Ridge', 'Luuk', 'Speedway', 'Yanbu', 'Bhitarwar', 'Kluang', 'Sastamala', 'Mulangodi', 'Olivares', 'Kondhali', 'Inkollu', 'Piedras Negras', 'Consuelito', 'Uruguaiana', 'Nowy Targ', 'Puvali', 'Hulshout', 'Pleasant Grove', 'Armant', 'Viga', 'Igbanke', 'Plover', 'Bene Beraq', 'Yopougon', 'San Nicolas de los Ranchos', 'Piui', 'Gieraltowice', 'Tarana', 'Yanai', 'Chariyakulam', 'Bogota', 'Minamisanriku', 'Werl', 'Manuel Ribas', 'Heusenstamm', 'Grosseto', 'Villeparisis', 'Serramanna', 'Sudlohn', 'Mawatagama', 'Starokostiantyniv', 'Vaikam', 'Ipiales', 'New Baltimore', 'Quinta de Tilcoco', 'Borgaro Torinese', 'Tapiratiba', 'Cazzago San Martino', 'Esperanca', 'Yirol', 'Mila', 'Tangalan', 'Sardarshahr', 'Palmetto', 'Heddesheim', 'Grossburgwedel', 'Pradera', 'Bela Vista do Paraiso', 'Kilwinning', 'Dunn Loring', 'Bcharre', 'Bhagirathpur', 'Cabadbaran', 'Hino', 'Runcorn', 'Santiago de Compostela', 'Sotouboua', 'Tecate', 'Morasar', 'Vanono', 'Sankt Ingbert', 'Jajce', 'Xinyu', 'Hochst im Odenwald', 'Kawthoung', 'Cherry Hill', 'Riebeeckstad', 'Cacheu', 'Tranovaho', 'Douar Sgarta', 'Tlemcen', 'Mandabe', 'Idil', 'Bohicon', 'East Cocalico', 'Wekiwa Springs', 'Magdalena Milpas Altas', 'South Charleston', 'Dougba', 'Glenwood Springs', 'Dumfries', 'Moyale', 'Mabini', 'Santa Terezinha de Goias', 'Evergreen', 'Watford', 'Rabor', 'Castel Mella', 'Tanabe', 'Tafaraoui', 'Hilter', 'Shujaabad', 'Videm', 'Kecskemet', 'Obertraubling', 'Folomana', 'Novi Becej', 'Molalla', 'Hagta', 'Tisina', 'Raikal', 'Wendell', 'Canet-en-Roussillon', 'Kennedy', 'Herzberg am Harz', 'San Simon', 'Miyatoko', 'Pali', 'Copiapo', 'Muhldorf', 'Bambadinca', 'Igbaras', 'Kothi', 'Souq Sebt Says', 'San Rafael Abajo', 'Halisahar', 'Mamaroneck', 'Varisshiyakuni', 'Matsoandakana', 'Burgas', 'Bhanghi', 'Beverly', 'Raichur', 'Tahannawt', 'Tota', 'Lunel', 'Souk Tlet El Gharb', 'Racine', 'Stockerau', 'Mazamitla', 'Guachucal', 'Malepur', 'Sarikamis', 'Gothenburg', 'Suganwan', 'Kirikkale', 'Maghull', 'Chonchi', 'Ospitaletto', 'Maria da Fe', 'Chalandri', 'Bobon', 'Naqadah', 'Martinopole', 'Enghien', 'Savigny-le-Temple', 'Ramchandarpur', 'Caio', 'Belo', 'Bukowno', 'Tel Aviv-Yafo', 'Avabodji', 'Puerto America', 'Hanson', 'Huangshan', 'Bulusan', 'Domazlice', 'Linkenheim-Hochstetten', 'Suaza', 'Mangalagiri', 'Rocky River', 'Tanakoub', 'Canicatti', 'Las Rosas', 'Mahlaing', 'Jelenia Gora', 'Mathila', 'Aizawa', 'Itatiaia', 'Neuri', 'Chivacoa', 'Arvorezinha', 'Armanaz', 'Buritizeiro', 'Bien Hoa', 'Bruchkobel', 'Dolo Bay', 'Lefkada', 'Tasiusaq', 'Pallappatti', 'Kanchrapara', 'San Roque', 'Jefferson', 'Santiago de Baney', 'Kofu', 'Harchandpur', 'Moss Bluff', 'Vushtrri', 'Mithi', 'Smederevska Palanka', 'Apan', 'Deodha', 'Nallihan', 'Guapo', "Ghinda'e", 'Sar-e Pul', 'Christiansburg', 'Pont-a-Celles', 'Overijse', 'Trecastagni', 'Oulad Embarek', 'Dayr Hafir', 'Caldas de Reyes', 'Arkalochori', 'Gafsa', 'Cipolletti', 'Kaikaram', 'Shuangshuicun', 'Iquitos', 'Hazaribagh', 'Bedidy', 'Mayate', 'Chumbicha', 'Dilasag', 'Karapa', 'Petaling Jaya', 'Castel Gandolfo', 'Yevpatoriia', 'Kuchaiburi', 'Hanyu', 'Horn Lake', 'Kadiria', 'Zhongba', 'Godoy Cruz', 'Malappuram', 'East Peoria', 'Markranstadt', 'Livonia', 'Dongen', 'Golbasi', 'Bhagwangola', 'Elektrenai', 'Hat Yai', 'Saint John', 'Torihama', 'Galimuyod', 'Raikot', 'Bagre', 'Komijan', 'Ibia', 'Holyhead', 'Akwatia', 'Elavanasur', 'Tandag', 'Chilwell', 'Ciudad Ojeda', 'Ciudad Serdan', 'Radlett', 'Vembarpatti', 'Atripalda', 'Gourock', 'Anama', 'Amblecote', 'Excelsior Springs', 'Dadu', 'Offa', 'Brindisi', 'Kovilpatti', 'Chrudim', 'Cesson-Sevigne', 'Mediouna', 'Zengqiao', 'Aguilares', 'Siay', 'Mananjary', 'Kolarovo', 'Melissia', 'Stari Trg', 'Viadana', 'Stolberg', 'Berekum', 'Farako', 'Joyabaj', 'San Giorgio di Piano', 'Carbonera', 'Mackay', 'Meltham', 'Challapalle', 'Saricam', 'Tres Barras', 'Cannes', 'Upper Darby', 'Arlington', 'Sanxi', 'Sanankoro Djitoumou', 'Birur', 'Manne Ekeli', 'Regenstauf', 'Midsalip', 'Reggello', 'Corinda', 'Babatngon', 'Bungku', 'Zhangshu', 'Idiofa', 'Amboanana', 'Langley', 'Fishersville', 'Mateare', 'Linda', 'Helena', 'Hohenstein-Ernstthal', 'Korla', 'Bairuo', 'Limoeiro do Ajuru', 'Gandevi', 'Rogers', 'Dexing', 'Padugaipattu', 'Ilmenau', 'Litovel', 'Yasenivskyi', 'Boca da Mata', 'Sarkad', 'Pawayan', 'Avon Park', 'Kondarangi Kiranur', 'Novgorodskoye', 'Nanattuparai', 'Krishnapur', 'Mendez-Nunez', 'Alacam', 'Pelissanne', 'Colotlan', 'Raahe', 'Ridder', 'Jisr ash Shughur', 'Hoveyzeh', 'Rokkasho', 'Muquem de Sao Francisco', 'Tsuru', 'Ross-Betio', 'Escanaba', 'Tacana', 'Traverse City', 'Gounarou', 'Fontaine', 'Sahuarita', 'Bracciano', 'Araripina', 'Gambissara', 'Lumphat', 'Gwangmyeongni', 'Kirrayach', 'University Place', 'Unai', 'Paracin', 'Juara', 'Ware', 'Hilltown', 'Summit Park', 'Mislata', 'Rottofreno', 'Uniontown', 'Kulu', 'Bunia', 'Ciudad Bolivar', 'Gitagum', 'Uniao dos Palmares', 'Erumaippatti', 'Pichhor', 'Yuanlin', 'Bulungu', 'Kadakola', 'Harur', 'Raunheim', 'Mothihari', 'Malahide', 'Macapa', 'Mahabo', 'Andonabe Atsimo', 'Hita', 'Salesopolis', 'Tirkakara', 'Malhargarh', 'Oakengates', 'Agrinio', 'Whitehall', 'Lapuyan', 'Botticino Sera', 'Tinoc', 'Jiquilpan de Juarez', 'Rockville Centre', 'Ibatiba', 'Ballina', 'Broken Arrow', 'Ziniare', 'Clayton le Moors', 'Fredensborg', 'Kharod', 'Umm al `Abid', 'Goldenrod', 'Yacopi', 'Halsur', 'Opoczno', 'Kottapalle', 'Ubrique', 'Sodrazica', 'Irakleia', 'Sao Luis Gonzaga', 'Sao Paulo do Potengi', 'North Gates', 'Alsip', 'Choele Choel', 'Antsiatsiaka', 'Beocin', 'Tenancingo', 'Monselice', 'Marpalli', 'Yangqingcun', 'Columbine', 'Sahamadio', 'San Nicolas Buenos Aires', 'Tiruvur', 'Anosimparihy', 'Lariano', 'Chaval', 'Karavan', 'Kingsbury', 'Koduru', 'Bangolo', 'Heidenau', 'Petrolina', 'Archena', 'Natonin', 'Casalnuovo di Napoli', 'Nova Odessa', 'Kariz', 'Caen', 'Scugog', 'Bangalore', 'Zijinglu', 'Catford', 'Chakapara', 'Safita', 'Acquaviva', 'Satai', 'Ghabaghib', 'La Spezia', 'Iquique', 'Barhampur', 'Chalungalpadam', 'Kerur', 'Gulnar', 'Banigbe', 'Metamorfosi', "Ma'muniyeh", 'Tura', 'Seelze', 'White Plains', 'Reddippatti', 'Ticuantepe', 'Ntoroko', 'Nairobi', 'Glubczyce', 'Safi', 'Larne', 'Sebin Karahisar', 'Donegal', 'Pasto', 'Ceyhan', 'East Grand Forks', 'Qarqan', 'Ferney-Voltaire', 'Caybasi', 'Ayanavelikulangara Tekku', 'Derventa', 'Homburg', 'Zaouiet Says', 'Jethuli', 'Ketti', 'Rochdale', 'Zevio', 'Made', 'Eleskirt', 'Iijima', 'Rubirizi', 'Mombris', 'Santa Teresita', 'Pedappai', 'Vineland', 'Saint-Amable', 'Awara', 'Betton', 'Alsbach-Hahnlein', 'Tongzhou', 'Nopala de Villagran', 'Propria', 'Jawkatia', 'Morrovalle', 'Bella Vista', 'Lufeng', 'Lebach', 'Wanda', 'East Highland Park', 'Phu Yen', 'Masangshy', 'Bou Sfer', 'Kumirimora', 'Shibushi', 'Falticeni', 'Wallington', 'Bryn', 'Dornstadt', 'Sembe', 'Onan', 'Brandizzo', 'Voerendaal', 'Qaladizay', 'Butwal', 'Kunzell', 'Dindanko', 'Knezha', 'Half Moon Bay', 'Upleta', 'Cambuslang', 'Zupanja', 'Ronse', 'Seneca Falls', 'Karlshamn', 'Attippattu', 'Terzigno', 'Brumado', 'Bangaon', 'Szczecin', 'Jauharabad', 'Pisticci', 'Gukeng', 'Conceicao do Araguaia', 'Saltash', 'Parvomay', 'Ban Wat Phrik', 'Cabaiguan', 'Awa', 'Nemours', 'Yukuhashi', 'Bujari', 'Jiwachhpur', 'Rongwo', 'Puerto Nare', 'Anjomachi', 'Lagbe', 'Mickleover', 'Mianyang', 'Eshowe', 'Bonnievale', 'Radekhiv', 'Sagauli', 'Ankirihitra', 'University Park', 'Punjai Lakkapuram', 'Hamlin', 'Feni', 'Legnica', 'Gandhali', 'Deyang', 'Scotts Valley', 'North Ogden', 'Tocantinopolis', 'Princesa Isabel', 'Weert', 'Chos Malal', 'West Odessa', 'Mawanella', 'Aljaraque', 'Majhariya', 'Nagayalanka', 'Tring', 'Lenox', 'Kebila', 'Fatehpur Bala', 'Mucheln', 'Sangolqui', 'Gohadi', 'Komaki', 'Malaudh', 'Coquitlam', 'Florencia', 'Kalisizo', 'Eslamshahr', 'Tychy', 'Lazaro Cardenas', 'Pallarimangalam', 'Yang Talat', 'Arsuz', 'Bhavani', 'Maniwa', 'Santa Luzia do Itanhy', 'Oranienburg', 'Huangshi', 'Zaranj', 'Halver', 'Areado', 'Hadjadj', 'Balykchy', 'Effingham', 'Valley Stream', 'Pinhao', 'West New York', 'Heishanzuicun', 'Bordentown', 'Inirida', 'La Huerta', "Sant'Egidio alla Vibrata", 'Dharapuram', 'Ad Dulu`iyah', 'Kaarina', 'Rekovac', 'Manitowoc', 'Goroka', 'Motul', 'Narasingam', 'Beckenham', 'Kirkwood', 'Kolondieba', 'Pasadena', 'Qo`ng`irot Shahri', 'Winsen', 'Mers el Kebir', 'Poro', 'Knjazevac', 'Dajiecun', 'Tangjin', 'Karbinci', 'Beykoz', 'Amadeo', 'Manamelkudi', 'Tekanpur', 'Busolwe', 'Ac-cahrij', 'Coroico', 'Inhambane', 'Jacinto', 'Carlentini', 'Skippack', 'Federal', 'Valenzano', 'Zhangcun', 'Loenen', 'Muswellbrook', 'Tamazunchale', 'Moatize', 'Port Salerno', 'Lisbon', 'Santo Estevao', 'Irondequoit', 'Levallois-Perret', 'San Antonio Suchitepequez', 'Zvecan', 'Douar Lamjaara', 'Winter Springs', 'Bimawan', 'Pentapadu Kasba', 'Montijo', 'Sena Madureira', 'Virei', 'Varzaneh', 'Sariveliler', 'Vatakemuri', 'Machelen', 'Nabunturan', "Yong'ancun", 'Dalsingh Sarai', 'Etoumbi', 'Funadhoo', 'Antur', 'Superior', 'Matouying', 'Abucay', 'Kengarai', 'Paterna', 'Saqqez', 'Spring Creek', 'Garaimari', 'Nan Zhuang', 'Jones', 'Bou Khadra', 'Rosh Ha`Ayin', 'Adalhat', 'Capaya', 'Kuruman', 'Chaoyangdicun', 'Villacarrillo', 'South Portland', 'Bambui', 'Apalit', 'Biddupur', 'Baqiabad', 'Kostanjevica na Krki', 'Rechaiga', 'Morwell', 'Koundian', 'Higashimiyoshi', 'Castro', 'At Tur', 'Pirnagar', 'Vandalia', 'Nellipaka', 'Zerong', 'Goodlands', 'Hatay', 'Bielsko-Biala', 'Grosse Pointe Woods', 'Ajijic', 'Dhanupra', 'Pachrukha', 'Handwara', 'Ranopiso', 'Vallenar', 'Santa Maria do Suacui', 'Loyalist', 'Chinameca', 'Mahazoma', 'Silifke', 'Falakata', 'Ghordaur', 'Black Forest', 'Dale City', 'Ranbirsinghpura', 'Ibiassuce', 'Tetagunta', 'Pereiro', 'Marmagao', 'Amparihy', 'Ereymentau', 'Heishuikeng', 'As Salt', 'Los Patios', 'Masyaf', 'Ban Sai Yoi', 'Abre Campo', 'El Maknassi', 'Rosaryville', 'Dupax Del Norte', 'Kosai', 'La Queue-en-Brie', 'Rasulpur', 'Chelm', 'Caserta', 'Devmaudaldal', 'Biancavilla', 'Nilaiyur', 'Budhanilkantha', 'Weyburn', 'Schertz', 'Bela Simri', 'Kimje', 'Jerome', 'Rangapuram', 'Sandalpur', 'Nea Erythraia', 'Entebbe', 'San Bartolome', 'Al Karnak', 'Estepona', 'Lempdes', 'Zengjiaba', 'Lipkovo', 'Buckingham', 'Bevoay', 'Sokouhoue', 'Tecklenburg', 'Bochum', 'San Luis Talpa', 'Ijui', 'Woodlyn', 'Capivari', 'Purna', 'Bayugan', 'Imbert', 'Durgauti', 'Belen de los Andaquies', 'Menghan', 'Gangapur Athar', 'Inza', 'Tiaong', 'Cagliari', 'Inverness', 'Ciudad Sahagun', 'Gerakas', 'Vubatalai', 'Ratauli', 'Catalao', 'Telsen', 'Secovce', 'Sirugamani', 'Lake Norman of Catawba', 'Vevey', 'Quakers Hill', 'Struga', 'Amarwara', 'Mont-Saint-Martin', 'Nova Crnja', 'Hirehalli', 'Ablu', 'Llavallol', 'El Almendro', 'Ad Darwa', 'Bietigheim-Bissingen', 'Ul', 'Indianola', 'Ankisabe', 'Minster', "Wik'ro", 'Barapire', 'A Yun Pa', 'Retirolandia', 'Shahkot', 'Itaberaba', 'Pilappulli', 'Coesfeld', 'Azare', 'Dix Hills', 'Nazira', 'Velen', 'Mnichovo Hradiste', 'Cedar City', 'Bofete', 'Haiphong', 'Tokmok', 'Goianapolis', 'Texas City', 'Marakkara', 'Cameli', 'Lundazi', 'Antanambaobe', 'Akhmim', 'Abhayapuri', 'Saint-Philbert-de-Grand-Lieu', 'Lutterworth', 'Vinci', 'Kidlington', 'Sao Joao', 'Leopold', 'Vigevano', 'Croix-des-Bouquets', 'Zele', 'Ibateguara', 'West Lealman', 'Taran', 'Pingshang', 'Douar Ait Sidi Daoud', 'San Martin', 'Bajram Curri', 'Fouesnant', 'Hosbach', 'Jiamusi', 'Mendi', 'Gennevilliers', 'Suramala', 'Lambunao', 'Guatuba', 'Hassi Bahbah', 'Mineral del Monte', 'Menglie', 'Chitarpur', 'Cristopolis', 'Ad Diwem', 'Tocopilla', 'Calella', 'Bedwas', 'Sarur', 'Wuhai', 'Flero', 'Jork', 'Juma Shahri', 'Hawtat Sudayr', 'Viyapuram', 'La Nucia', 'Chalhuanca', '`Utaybah', 'Ilkley', 'Bloomfield', 'Dobhawan', 'Chempalli', 'Tipo-Tipo', 'Risch', 'Marosangy', 'Kirchheim bei Munchen', 'Ubaira', 'Maragondon', 'Al `Aydabi', 'Lamia', 'Chippewa Falls', 'Ora', 'Androy', 'Valle del Guamuez', 'Sykies', 'Barahari', 'Peters', 'Naxcivan', 'Anderlues', 'Doraville', 'Banaue', 'Eersel', 'Ganapatipalaiyam', 'Bernal', 'Tole Bi', 'Badiangan', 'Newry', 'Boom', 'Baguineda', 'Thulusdhoo', 'Kanniyakumari', 'Ramagiri Udayagiri', 'Tangjiacun', 'Naranattenvanpatti', 'Megarine', 'Angallu', 'Yeni Suraxani', 'Gudimulakhandrika', 'Port-Alfred', 'Bantval', 'Banska Bystrica', 'Ratanpur', 'Sihui', 'Ambohimangakely', 'Mineshita', 'San Rafael', 'Bhambia Bhai', 'Nauheim', 'Russellville', 'Naushahro Firoz', 'Sarvestan', 'Pagalungan', 'Carsibasi', 'Emirgazi', 'Ciudad Rio Bravo', 'Proddatur', 'Bruz', 'Bartlett', 'Yeadon', 'Timbedgha', 'Rishton', 'Thaba-Tseka', 'Pappampatti', 'Alianca', 'Skowhegan', 'Gongzhuling', 'Port Arthur', 'Budingen', 'Vallam', 'Agios Ioannis Rentis', 'Birine', 'Quirino', 'Honeygo', 'Usulutan', 'Abram', 'Taranto', 'Tyrnavos', 'Nagongera', 'Farsan', 'Santa Rita', 'Bueno Brandao', 'Amingarh', 'Port Loko', 'Hellevoetsluis', 'Surigao', 'Pattamadai', 'Balsas', 'Matipo', 'Onitsha', 'Strasshof an der Nordbahn', 'Rochefort', 'Doral', 'Limoeiro', 'Ambalanjanakomby', 'Cohoes', 'Jaguaruana', 'Vadakethara', 'Prospect Heights', 'Bonita', 'Jarvenpaa', 'Baj Baj', 'Alamadi', 'Nanzhou', 'Sanatoga', 'Arruda dos Vinhos', 'Malapatan', 'Soroti', 'Dargahan', 'Meze', 'Pemangkat', 'Dinagat', 'Villemomble', 'Donetsk', 'Viru', 'Dazhangzicun', 'Vigneux-sur-Seine', 'Amolatar', 'Nemili', 'Nagykanizsa', 'Kedgaon', 'Boumia', 'Guisa', 'Maur', 'Saint-Laurent-du-Maroni', 'Chenggong', 'Rengali', 'Bacau', 'Kaunra', 'Ballyfermot', 'Aurahi', 'Perito Moreno', 'Shetang', 'Suwon', 'Virappanchathiram', 'Celje', 'Turhal', 'Taquari', 'Ikot Ekpene', 'Allinagaram', 'Bolintin Vale', 'Maga', 'Kolnad', 'Sori', 'Totoro', 'San Rafael Obrajuelo', 'Alayaman', 'Ouani', 'Tadla', 'Pirque', 'Cuxhaven', 'Zhongzai', 'Aliwal North', 'Plum', 'Saint Helier', 'Dongcha', 'Gokarn', 'Sassandra', 'Balneario do Rincao', 'Kafr Buhum', 'Hlukhiv', 'Sandacho', 'Arizona City', 'Silamalai', 'Mohanpur', 'Diaobingshancun', 'Ratekau', 'Halgar', 'Albertville', 'Gadani', 'Ascheberg', 'Godda', 'Tangdong', 'Sholinghur', 'Zuidlaren', 'Jadopur Shukul', 'Ekalbehri', 'Puerto Plata', 'Ambatturai', 'Masanwa', 'Crystal', 'Dasungezhuang', 'Eenhana', 'Xiangtan', 'Arbon', 'Jerez de la Frontera', 'Barkagaon', 'Sumter', 'Maroochydore', 'Rach Gia', 'Mirna Pec', 'St. John', 'Medianeira', 'Muqui', 'Farah', 'Dedougou', 'Manduri', 'Monserrat', 'Myrnohrad', 'Segbana', 'Gelnhausen', 'Chuanliaocun', 'Reshetylivka', 'College Station', 'Wum', 'Imst', 'Kumar Khad', 'Bjarred', 'Ambatofisaka II', 'Nocatee', 'Kanel', 'Philipstown', 'Torrevieja', 'Frankfort Square', 'Gudur', 'Oak Grove', 'Santana', 'Polukallu', 'Kanhai', 'Trapani', 'Sanchez-Mira', 'Wangtuan', 'Punta del Este', 'Zevenaar', 'Ban Tha Pha', 'Sainte-Marie', 'Karonga', 'Cremlingen', 'Ramallah', 'Gatumba', 'Caowotan', 'Tomah', 'Lorraine', 'Odumase', 'Amritsar', 'Vlasim', 'Yangcunzai', 'Prataparampuram', 'Indurti', 'Tanjay', 'Brzeg', 'Manthani', 'Passa Quatro', 'Dallas', 'Chinnatadagam', 'Roxborough Park', 'Ban Wang Pradu', 'Were Ilu', 'Tourcoing', 'Shimokizukuri', 'Dehui', 'Ithaca', 'Hamtic', 'Saint-Zotique', 'Mossendjo', 'Ban Bo Phlap', 'Arar', 'Meekatharra', 'Ghouazi', 'Amora', 'Yongji', 'Nurtingen', 'Pantar', 'Yantai', 'Shamli', 'Sidi Kasem', 'League City', 'Ylojarvi', 'Chakpi Karong', 'Lehman', 'Gunzenhausen', 'Pirpirituba', 'Titz', 'Vynnyky', 'Nicolas Romero', 'Tekit', 'Prinzapolka', 'Nueva Esparta', 'Hedongcun', 'Lobougoula', 'Socuellamos', 'East Hemet', 'Sebiston', 'Ban Laem', 'Mukono', 'Mineiros do Tiete', 'Warrnambool', 'Normandia', 'Monte Alegre', 'Volda', 'Yasothon', 'Kearny', 'Kahan', 'Mihqan', 'Playa Vicente', 'Shimoga', 'Konseguela', 'Outapi', 'Layton', 'Brock Hall', 'Kilsyth', 'Al Mukalla', 'Dhamtari', 'Alijo', 'Alacuas', 'Higashiizu', 'Lopez', 'Ta`izz', 'Corato', 'Chuhuiv', 'Albacete', 'Gundumal', 'Sidi Embarek', 'Calais', 'Camapua', 'Portogruaro', 'Bad Langensalza', 'Tecolotlan', 'Roseburg', 'Saint-Jerome', 'Sandila', 'Coldwater', 'Zhongcun', 'Westonaria', 'Pattanam', 'Guihulngan', 'Phaphot', 'Huili Chengguanzhen', 'Mabinay', 'Bothell East', 'Banavar', 'Nallippalaiyam', 'Quetigny', 'Tsuno', 'Geiro', 'Zarrin Shahr', 'Balpyq Bi', 'San Antonio Oeste', 'Krasnohorivka', 'Bled', 'Ama', 'Lototla', 'Azzano', 'Ivai', 'Mitanty', 'Calicut', 'Chelghoum el Aid', 'Kitee', 'Koffiefontein', 'Fair Oaks Ranch', 'Sao Marcos', 'Disuq', 'Bouzeghaia', 'Hudson', 'Fortuna Foothills', 'Raska', 'Rafah', 'Birsfelden', 'Loon', 'Ivrindi', 'Pacho', 'Sasaguri', 'Oroville', 'Odiongan', 'Bagua Grande', 'Dera Ghazi Khan', 'Moorslede', 'Pilkha', 'Kalar', 'Odawara', 'Monte Belo', 'Ludwigsfelde', 'Lopary', 'Boriguma', 'Storrs', 'Khrystynivka', 'Villa Aldama', 'Sirali', 'Bulwell', 'Vadacheri', 'Aytre', 'Kwidzyn', 'Furukawa', 'Sukand', 'Esquimalt', 'Caraubas', 'Antsohimbondrona', 'Waltham Abbey', 'Sagua la Grande', 'Taulaha', 'Roslyn', 'Hale Dyamavvanahalli', 'Cortona', 'Minnal', 'Brunoy', 'Kunisakimachi-tsurugawa', 'Rincao', 'Golden Glades', 'Matsuda-soryo', 'Pirauba', 'Ghriss', 'Mopipi', 'Chateauroux', 'Chortkiv', 'Theniet el Had', 'Balkh', 'Casalecchio di Reno', 'Guacimo', 'Arona', 'Novaci', 'Diakon', 'Taunusstein', 'Sawangan', 'Jigani', 'Parabita', 'Candaba', 'Jardim Alegre', 'Berovo', 'Cascina', 'Ambohijanaka', 'Sao Tome', 'Cleethorpes', 'Kalmunai', 'Vaslui', 'Fort William', 'Bolobo', 'Sanlucar la Mayor', 'El Hajeb', 'Quesnel', 'Leuwiliang', 'Caraibas', 'Sugaon', 'Villers-Cotterets', 'Santamesa', 'Santa Teresinha', 'Anosibe-Ifanja', 'Agarpur', 'Kutavettur', 'Itapetinga', 'Lubsko', 'Banbalah', 'Tala', 'Arcachon', 'Muturkha', 'Miyazu', 'Akcadag', 'Castaic', 'Xindian', 'Los Hidalgos', 'Kekava', 'Jacala', 'Lapseki', 'Cartaxo', 'Londerzeel', 'Ermua', 'Ipiranga', 'Trat', 'Guiglo', "Sant'Antioco", 'Linton Hall', 'Huaiyin', 'San Salvador de Jujuy', 'Ksour Essaf', 'El Quisco', 'Central', 'Muttupet', 'Juan de Herrera', 'Qaryat Suluq', 'Kressbronn am Bodensee', 'Kall', 'Thingangyun', 'Lauf', 'Villa de Alvarez', 'Sipacate', 'Andirin', "Cournon-d'Auvergne", 'Bhayandar', 'Borongan', 'Meerane', 'Pekanbaru', 'Szczytno', 'Obu', 'Chinnalapatti', 'Angalakurichchi', 'Caldas de Montbuy', 'Altata', 'Sonepur', 'Benisa', 'San Lucas Toliman', 'Japeri', 'Ecublens', 'Kowloon', 'Ivancna Gorica', 'Bougzoul', 'Jelcz-Laskowice', 'Engenheiro Beltrao', 'Stonecrest', 'Cheltenham', 'Ospino', 'Qingdao', 'Port Lavaca', 'Weichanglu', 'Gudiyattam', 'Fujikawa', 'Bamhni', 'Porterville', 'El Khemis des Beni Chegdal', 'Surany', 'Knurow', 'Znamianka', 'Huntersville', 'El Eulma', 'Bungoma', 'Al Kharjah', 'Rivesaltes', 'Denzlingen', 'Pul-e Khumri', 'Niedernhausen', 'Nova Floresta', 'Cazin', 'Virginia Beach', 'Cassino', 'Finnentrop', 'Khajuri', 'Cotabato', 'Mahamaibe', 'Aksu', 'Campbellton', 'Panay', 'Otofuke', 'Sarria', 'Oklahoma City', 'Waggaman', 'Powdersville', 'Ambongamarina', 'Mirpur Khas', 'Jamhor', 'Florham Park', 'Seram', 'Senda', 'Mancha Real', 'Mint Hill', 'Eggenstein-Leopoldshafen', 'Bachhraon', 'Friern Barnet', 'Toboso', 'Caledonia', 'Dawley', 'Bataredh', 'Podenzano', 'Hemiksem', 'Pielisjarvi', 'Pursa', 'Marsabit', 'Malar', 'Tabubil', 'White Meadow Lake', 'Winter Garden', 'Ryuo', 'Jequie', 'Ravar', 'Gudalur', 'Yenisehir', 'Brookings', 'Hamtramck', 'Orizona', 'Saint-Ouen', 'Wietze', 'Hwange', 'Barentu', 'Yedappalli', 'Caucagua', 'Gudensberg', 'New Hope', 'Benaguacil', 'Aymangala', 'Waltham', 'Horndean', 'Torquay', 'Amulung', 'Woolwich', 'Sentjur', 'Gondauli', 'Krosno', 'Nagasaki', 'Luebo', 'Sogutlu', 'Valle de Santiago', 'Ranzan', 'Chachoengsao', 'Golpayegan', 'Mocuba', 'Warrington', 'Yercaud', 'Capinzal', 'Paullo', 'Ozark', 'Kargahia Purab', 'Villanueva', 'Jbabra', 'Chunar', 'Likiskiai', 'Al Husayniyah', 'Rockcreek', 'Chichicastenango', 'Siasi', 'Beni Zouli', 'Saint-Hilaire-de-Riez', 'Devgeri', 'Arniya', 'Dennis', 'Bopa', 'East Norriton', 'Marancheri', 'Kalghatgi', 'Andkhoy', 'Dinanagar', 'Kaifeng Chengguanzhen', 'Jieshangya', 'Yima', 'Gamu', 'Safashahr', 'Pervari', 'Agoue', 'Presidente Franco', 'Bad Homburg', 'Taveta', 'Croxley Green', 'Bani', 'Liberal', 'Santa Maria de Ipire', 'Hranice', 'Mery-sur-Oise', 'Ipupiara', 'Takanabe', "G'allaorol Shahri", 'Syke', 'Sembabule', 'Kenley', 'Bishnah', 'Santa Lucia Milpas Altas', 'Ahfir', 'Virudunagar', 'Cuapiaxtla de Madero', 'Erquelinnes', 'Galanta', 'Islington', 'Alaca', 'Ganca', 'Colcapirhua', 'Katano', 'White Oak', 'Burgos', 'Macalelon', 'Bullhead City', 'Obukhivka', 'Krupka', 'Rovaniemi', 'Vrapciste', 'Puerto Padre', 'Cristalina', 'Drug', 'Bocono', 'Abaji', 'Annoeullin', 'Savja', 'El Coco', 'Vauvert', 'Rumbek', 'Tettnang', 'Tatsuno', 'Olton', 'Kabinda', 'Miami Shores', 'Chinnasekkadu', 'Cotia', 'Sapiranga', 'Bad Rappenau', 'Sharunah', 'Cormeilles-en-Parisis', 'Dossenheim', 'Ozarow Mazowiecki', 'Pichucalco', 'Dowlaiswaram', 'Palm Harbor', 'Longba', 'Chinique', 'Roscoe', 'Douar Oulad Bouziane', 'Dayr az Zawr', 'French Valley', 'Kalasa', 'Cave Spring', 'Yakage', 'Phagu', 'Taishituncun', 'Sumisip', 'Littleport', 'Kulachi', 'Saravena', 'Aguai', 'Nederweert', 'Haskoy', 'Saguday', 'Shushtar', 'Cuilapa', 'Formigine', 'Kalateh-ye Mir Ab', 'Farsund', 'Ripon', 'Munich', 'Himora', 'Banamba', 'Mkokotoni', 'Anori', 'Hartsville', 'Thikri', 'Xiantao', 'Libmanan', 'Shahritus', 'Hayle', 'Arakkal', 'Hoa Thanh', 'Chetumal', 'Cormano', 'Witham', 'Cleveland Heights', 'Ludus', 'Tolanaro', 'Saint-Doulchard', 'Pradopolis', 'Lourdes', 'Siswar', 'Alabaster', 'Kasibu', 'Saint-Leu-la-Foret', 'Sakaddi', 'Entre Ijuis', 'Bilina', 'Boyovut', 'Ambohinihaonana', 'Patrasaer', 'North Augusta', 'Ciudad Nezahualcoyotl', 'Jenks', 'Shiyuan', 'San Carlos Yautepec', 'Kabarore', 'Durban', 'Dajabon', 'Leognan', "Chech'on", 'Guelma', 'Rossmoor', 'Cano Martin Pena', 'Standerton', 'Kaimuh', 'Bellflower', 'Sibutu', 'Guateque', 'Vejer de la Frontera', 'Gornalwood', 'Serhetabat', 'Kondayampalaiyam', 'Andal', 'Kingersheim', 'Ait Ben Daoudi', 'Ixhuatlancillo', 'Song Doc', 'Schoten', 'Gan Yavne', 'Koprivnice', 'Wadi', 'Belvidere', 'Haibach', 'Chishtian', 'Ras El Oued', 'New Haven', 'Alamuru', 'Qaminis', 'Saint-Eustache', 'Port Wentworth', 'Hashimoto', 'Thara', 'Qingzhen', 'Sancti Spiritus', 'Andiyappanur', 'Pozi', 'Songnam', 'Alken', 'Horsens', 'Al Hashimiyah', 'Vanadzor', 'Fatehpur', 'Necochea', 'Sagbayan', 'La Virgen', 'Villagran', 'Elyria', 'Perunkolattur', 'Talavadi', 'Melrose', 'Penaranda', 'Aroeiras', 'Caucasia', 'Maryland Heights', 'Ambatomifanongoa', 'Fairfield', 'Liuquancun', 'Siqueira Campos', 'Villa Carlos Paz', 'Blitta', 'Ciudad de Melilla', 'Arajpur', 'Pantano Grande', 'Central Signal Village', 'Tirumuruganpundi', 'Vilakkudi', 'Kavadarci', 'La Plata', 'Short Pump', 'Shivganj', 'Fangasso', 'Douar Ain Maatouf', 'Harlau', 'Egypt Lake-Leto', 'Greiz', 'Al Khums', 'Inabanga', 'Ylivieska', 'Halmstad', 'West St. Paul', 'Boudouaou', 'Banaybanay', 'Pinheiro Machado', 'Bongao', 'Parras de la Fuente', 'Tabhka Khas', 'Basingstoke', 'Ovejas', 'Bandwar', 'Abu Ghurayb', 'Bekescsaba', 'Arutla', 'Kadwa', 'Kyaukse', 'North Bethesda', 'Jilotepec', 'Kafr `Awan', 'Takua Pa', 'Aberystwyth', 'Bataipora', 'Timmarasanayakkanur', 'Aradippou', 'Khampat', 'Caln', 'Ambodimadiro', 'Paso de Ovejas', 'Manavalakurichi', 'Chajari', 'Jacarei', 'Bals', 'Bishkek', 'Ikela', 'Saladoblanco', 'Sirsilla', 'Dara', 'Wuchang', 'Haiyang', 'Arida', 'Kalaiyamputtur', "'Tlat Bni Oukil", 'Dixon', 'Mogi das Cruzes', 'Tizi Ouzou', 'Hopkinton', 'Buchanan', 'Amami', 'Juiz de Fora', 'Kargipinar', 'Saint-Orens-de-Gameville', 'Charlton', 'Dortyol', 'Parepare', 'Cadaado', 'Boryspil', 'Marina del Rey', 'Uchen', 'Betulia', 'Kerema', 'Chartoun', 'Shajapur', 'Brooks', 'Nova Odesa', 'Sogne', 'Sarahandrano', 'Nea Ionia', 'Sumidouro', 'Yayladagi', 'Cesky Krumlov', 'Tay Ninh', 'Ottweiler', 'Pedro Brand', 'Neka', 'Masse', 'Darcheh', 'Adelphi', 'Melur', 'Bergneustadt', 'Salempur', 'Avanigadda', 'Guruvarajukuppam', 'Chandwa', 'Saclepea', 'Kathanian', 'Kari', 'San Jacinto', 'Southall', 'Jinja', 'Sungaiselam', 'Sirmaur', 'Friedland', 'Alpine', 'Kerben', 'Lubaczow', 'Ambodimangavolo', 'Chuxiong', 'Ciudad Mante', 'Chorkuh', 'Indalvai', 'Fengruncun', 'Azpeitia', 'Sallisaw', 'Karanganyar', 'Tokmak', 'Baripada', 'St. Marys', 'Platon Sanchez', 'Bugembe', 'Nihtaur', 'Kharsod B', 'Shahriar', 'Jabbeke', 'Mogtedo', 'Mayorga', 'Rastatt', 'Prairie Village', 'Hato Corozal', 'Dubuque', 'Rocha', 'Pataskala', 'Blue Ash', 'Sarandi', 'Nkayi', 'Coldstream', 'Guru Har Sahai', 'Gardanne', 'Waukesha', 'Saran', 'Alfred and Plantagenet', 'Tiadiaye', 'Huchuan', 'Lujan de Cuyo', 'Fundao', 'Ben Tre', 'Balingasag', 'Safipur', 'Mitoyo', 'At Taji', 'Doany', 'Fuyuan', 'Hayesville', 'Schwerin', 'Phelan', 'Bethanie', 'Weymouth', 'Kibanseke Premiere', 'Buey Arriba', 'Ibotirama', 'Yamaguchi', 'Vadasseri', 'Wantage', 'Iaboutene', 'Karlivka', 'Manabo', 'San Kamphaeng', 'El Hermel', 'Corrente', 'Voisins-le-Bretonneux', 'Vemuluru', 'Jasauli Patti', 'Paraguacu Paulista', 'Umreth', 'Rakovski', 'Kottagudi Muttanad', 'Sao Felix do Araguaia', 'Huong Tra', 'Yukon', 'Miena', 'Datang', 'Petrich', 'Tholen', 'Mosopa', 'Fara in Sabina', 'Alesund', 'Penwortham', 'Poyo', 'Ruma', 'Scorze', 'Cedral', 'Forest Grove', 'Szeged', 'Firavahana', 'Bagno a Ripoli', 'Caravelas', 'Horn-Bad Meinberg', 'Porto Alegre do Norte', 'Shtime', 'Raba', 'Chisec', 'Lagunia Raghukanth', 'Ensenada Berisso', 'Do`stobod', 'Amarpur', 'Lunca Cetatuii', 'Dagupan City', 'Saumlaki', 'Karkamb', 'Sobrado de Paiva', 'Kalappalangulam', 'Joao Lisboa', 'The Dalles', 'Slateng Dua', 'Lingwala', 'Latiano', 'Orangeville', 'Borazjan', 'Pizarro', 'Pilich', 'Veintiocho de Noviembre', 'Huntington Park', 'Jesus Menendez', "'Ain Abessa", 'Descalvado', 'Krapina', 'Harwood Heights', 'Banco Filipino International Village', 'Guadarrama', 'Channagiri', 'Mokri', 'Sanjiang', 'Kokri', 'Tirkha', 'Zhangjiakou', 'Veresegyhaz', 'Zhuanghe', 'Balkonda', 'Siemiatycze', 'Ban Saeo', 'Torrijos', 'Faches-Thumesnil', 'Campo Alegre de Lourdes', 'Francofonte', 'Alarobia', 'Worthing', 'Rochelle', 'Ilvesheim', 'Billings', 'Batangas', 'Vochaiko', 'Zaragoza', 'Heppenheim', 'La Mana', 'Son Tay', 'Magilampupuram', 'Logrono', 'Chiavari', 'Gujan-Mestras', 'Katako-Kombe', 'Chaparral', 'Batie', 'Foligno', 'University', 'Iruttarakonam', 'Kotla', 'East Hanover', 'Jomboy Shahri', 'Changanacheri', 'Kensington', 'Dobje', 'Maisons-Laffitte', 'Oroquieta', 'Parambatt Kavu', 'Dois Riachos', 'Latina', 'Babhanganwa', 'Ambohimahasoa', 'Pandino', 'Chefchaouene', 'Teixeiras', 'Saveh', 'Glens Falls', 'Tonj', 'Bhulwal', 'Luozi', 'Adoni', 'Mahazony', 'Setagaya', 'Banning', 'Minamishimabara', 'Tuz Khurmatu', 'Sundarapandiyam', 'Kodaira', 'Santa Cruz de la Palma', 'Candiota', 'Pittsford', 'Yakeshi', 'Lengerich', "Da'an", 'Pacaembu', 'East Point', 'Miacatlan', 'Sendamangalam', 'Sidi Khaled', 'Landquart', 'Reinbek', 'Millau', 'Itaosy', 'Bellefontaine', 'Aracoiaba', 'Melipilla', 'Monor', 'Collinsville', 'Lenggries', 'Pio Duran', 'Rhondda', 'Abu Sir Bana', 'Cholavaram', 'Kamepalle', 'Yeppoon', 'Sangenjo', 'Hradec Kralove', 'Kibeho', 'Ahmetli', 'Yurihonjo', 'Pontassieve', 'Cahokia Heights', 'Ubaidullahganj', 'Brakel', 'Berezan', 'Noya', 'Reggio Emilia', 'Huesca', 'Bopfingen', 'Bhainsa', 'Dhupgari', 'Itaiba', 'Camenca', 'Chmistar', 'Sao Luis do Quitunde', 'Foix', 'Kond Rud', 'Laterza', 'Schopfheim', 'Kasuga', 'Ouadhia', 'Dala', 'Sonagazi', 'Songjiangcun', 'Mamidalapadu', 'Biarritz', 'Theodore', 'Ngudu', 'Ntorosso', 'Buxar', 'Kaliyaganj', 'Milanoa', 'Spisska Nova Ves', 'Fuli', 'Kisi', 'Jhargram', 'Zaporizhzhia', 'Gazipasa', 'Suyo', 'North Saanich', 'San Ricardo', 'Komatipoort', 'Mallapuram', 'Yitiaoshan', 'Kargilik', 'Kinoni', 'Qazyqurt', 'Cajazeiras', 'Semnan', 'Pandan', 'Koceljeva', 'Ambodimotso Atsimo', 'Valenza', 'Puerto de la Cruz', 'Tepetlaoxtoc', 'Adjala-Tosorontio', 'Zillah', 'Diamondhead', 'Hallstadt', 'Anseong', 'Libacao', 'Bharella', 'Yilan', 'Cadolzburg', 'Papantla de Olarte', 'Fengguangcun', 'Jhelum', 'Dalanping', 'Tegucigalpa', 'Batala', 'El Kerma', 'Yangzhou', 'Passira', 'Hagonoy', 'Kambaduru', 'Jambusar', 'Teddington', 'Murambi', 'Corsham', 'Arzew', 'Nittedal', 'Paingkyon', 'Maravilha', 'Edison', 'Montlhery', 'Mokameh', 'Canegrate', 'Tezoatlan de Segura y Luna', 'Iraucuba', 'Kattivakkam', 'Lino Lakes', 'Kalyani', 'El Playon', 'Kabbur', 'San Miguel Xoxtla', 'Zirndorf', 'Guama Abajo', 'Sevlievo', 'Kawai', 'Edingen-Neckarhausen', 'Chimalapadu', 'Huatan', 'Cascavel', 'Naravarikuppam', 'Pasighat', 'Chimthana', 'Priolo Gargallo', 'Diema', 'Yingshouyingzi', 'Hardoi', 'Shpola', 'Alipur Duar', 'Parambu', 'Igaratinga', 'Xinxing', 'Damboa', 'Edgemere', 'Cypress Lake', 'Lemont', 'Karedu', 'Puerto Villamil', 'Thornaby on Tees', 'Basarabeasca', 'Bolama', 'Islip', 'Sonkach', 'Cinco Ranch', 'Tibiao', 'Kuala Belait', 'Nahazari', 'Culion', 'Villa Alegre', 'Santiago Amoltepec', 'Edgewater', 'Sinuni', 'Posusje', 'Castellbisbal', 'Bhainsoda', 'Jindrichuv Hradec', 'Pattiyurgramam', 'Manlin', 'Kabayan', 'Boulder Hill', 'Pastpar', 'Mablethorpe', 'Kodumur', 'Kegen', 'Wugang', 'Cerquilho Velho', 'Bent Jbail', 'Bornova', 'Hoganas', 'Queretaro', 'Ar Rayyan', 'Bondues', 'Pierrelaye', 'Baiyin', 'La Sierpe', 'Selendi', 'Antony', 'Mathukumnel', 'Kingoonya', 'Nueva Gerona', 'Alitagtag', 'Simojovel de Allende', 'Kralendijk', 'Kanal', 'Umm al Qaywayn', 'Dodola', 'Risaralda', 'Lugoff', 'Steinheim am Albuch', 'Sussen', 'Mulavana', 'Loanda', 'Holiday', 'Catriel', 'Rotonda', 'North Codorus', 'Amanganj', 'Sahakevo', 'Cadca', "Ch'onan", 'Ponte San Giovanni', 'Villa Altagracia', 'Maywood', 'Saraburi', 'Tyldesley', 'Satuluru', 'Neufahrn bei Freising', 'Zigon', 'Calne', 'Serere', 'Cumberland Hill', 'Ivanjica', 'Konz', 'Perth Amboy', 'Alerce', 'Berikai', 'Foum Zguid', 'Had Dra', 'Cinere', 'Dalkeith', 'Fresnillo', 'Yaizu', 'Ciro Redondo', 'Despotovac', 'Kakkat', 'Bassum', 'Najran', 'Segrate', 'Mexico', 'Kinrooi', 'Moknine', 'Andreba', 'Bresso', 'Venadillo', 'Reoti', 'Baharly', 'Los Palmitos', 'Jordania', 'Great Missenden', 'Puerto Cortes', 'Los Alcazares', 'Agbangnizoun', 'Chausa', 'Capistrano', 'West Richland', 'Pingyuanjie', 'Coomera', 'Bteghrine', 'Occhiobello', 'Olagadam', 'Deerfield', 'Aroroy', 'Macedon', 'Spout Springs', 'East Patchogue', 'Ebina', 'Soria', 'Ouled Mimoun', 'Navapur', 'Balumath', 'Hennenman', 'Barkot', 'Denduluru', 'Raca', 'Kumbhari', 'Bayaram', 'Liuzhou', 'Belmonte Mezzagno', 'Harvard', 'Wandiwash', 'Cukurcayir', 'Dinkelsbuhl', 'Chero', 'Ngolobougou', 'Quilevo', 'Saint-Claude', 'Sonari', 'Zaventem', 'Venecia', 'Sharjah', 'Shoreham-by-Sea', 'Governador Archer', 'Cerese', 'Lazarivo', 'Angara-Debou', 'Kaele', 'Mahe', 'Leighton Buzzard', 'Erfelek', 'Willimantic', 'Sao Joaquim da Barra', 'Littau', 'Kayanna', 'Temperley', 'Jirwa', 'Moses Lake', 'Merriam', 'Cerreto Guidi', 'San Miniato', 'Tumby Bay', 'Wielun', 'Tirukkattuppalli', 'North Las Vegas', 'Mahemdavad', 'Nagaiyampatti', 'Atyrau', 'Chorley', 'Ravansar', 'Bettioua', 'Green River', 'Chicacao', 'Karukkalvadi', 'Dar el Beida', 'Langarivo', 'Tettu', 'Santa Barbara', 'Ban Pong', 'San Nicolas de los Garza', 'Tres Coroas', 'Muttattuteruvu', 'Coracao de Maria', 'Nieuwpoort', 'Laatzen', 'Kusumbe', 'Xiangyuncun', 'Coral Springs', 'Escaldes-Engordany', 'Durpalli', 'Kundian', 'Acatlan de Perez Figueroa', 'Mendes', 'Sansale', 'Batan', 'Paslek', 'San Severino Marche', 'Madinat `Isa', 'Youwarou', 'Paghman', 'Aruvikkara', 'Osthofen', 'Tianzhong', 'Marudur', 'Crensovci', 'Ceerigaabo', 'Brusciano', 'Sabana de La Mar', 'Aileu', 'Kutaisi', 'Maebashi', 'Wanggezhuang', 'Banska Stiavnica', 'Tola', 'Hovelhof', 'Johannesburg', "Sant Sadurni d'Anoia", 'Binalonan', 'Dingcheng', 'Henderson', 'Bloxwich', 'Gberouboue', 'Dharmaram', 'Abomsa', 'Imtarfa', 'Rancho Santa Margarita', 'Meuselwitz', 'Ad Dali`', 'Nilandhoo', 'Bayramaly', 'Lufkin', 'Talcher', 'Menaka', 'Rupauli', 'Ipanema', 'Tindwara', 'Piendamo', 'Gennep', 'Kachnar', 'Vitez', 'Oporapa', 'Litvinov', 'Lich', 'Hericourt', 'Sanghar', 'Sao Lourenco', 'Bir Ali Ben Khalifa', 'Hlucin', 'Erith', 'Ebejico', 'Qulsary', 'Pichor', 'Lubon', 'Lufilufi', 'Mayamankurichchi', 'Kulittalai', 'Marshall', 'Weil der Stadt', 'Mahabako', 'Lemoore', "Al M'aziz", 'Behat', 'Mauriti', 'Memphis', 'Antas', 'Novo Mesto', 'Zinkiv', 'Mi`rabah', 'Ksar Sghir', 'Mandapam', 'Cupang', 'Mina', 'Bytom', 'Kikuyo', 'Yankton', 'Spilimbergo', 'Gottmadingen', 'Vanersborg', 'Harduli', 'Wierden', 'Paloncha', 'Chikmagalur', 'Qubadli', 'Longwood', 'Nay Pyi Taw', 'Wald-Michelbach', 'Kafr al Battikh', 'Orai', 'Juchique de Ferrer', 'Swiebodzice', 'Behbahan', 'Sehore', 'Te Awamutu', 'Shacheng', 'Kano', 'Ciftlikkoy', 'Op', 'Yendi', 'Sun Valley', 'Guabari', 'Kalyanpur Bamaiya', 'Rauco', 'Pedro de Toledo', 'Kandhkot', 'Degollado', 'Kasai', 'Krasnik', 'Abu Khashab', 'Middlesbrough', 'Cifuentes', 'Ganxi', 'Mae Sot', 'Raubling', 'Myoko', 'McLean', 'Trail', 'Reggada', 'Migdal Ha`Emeq', 'Banni', 'La Grange Park', 'Ojiya', 'Youngstown', 'East Longmeadow', 'Perunkalattu', 'Guimaraes', 'Sete Lagoas', 'Bhandaria', 'Stretford', 'Corrientes', 'Hurst', 'Satuek', 'Suhum', 'Majiagoucha', 'Caltagirone', 'Asthal Bohar', 'Chipiona', 'Aravan', 'Kiboga', 'Ephrata', 'Lystrup', 'Nowa Deba', 'Vangaindrano', 'New Westminster', 'Cestas', 'Bithan', 'Saraiya', 'Bad Soden am Taunus', 'Cinnaminson', 'Mansidao', 'Heroldsberg', 'Opovo', 'Shinhidaka', 'Mildura', 'Sahatsiho-Ambohimanjaka', 'Dok Kham Tai', 'Szombathely', 'Nova Olinda do Norte', 'Brantford', 'Cobram', 'Sulzbach-Rosenberg', 'Key Biscayne', 'South Ubian', 'Varde', 'Scandiano', 'Santa Cruz do Sul', 'Cedar Park', 'Erstein', 'Matozinhos', 'Buxin', 'Chortiatis', 'Camrose', 'Ettimadai', 'Montagnana', 'Ivybridge', 'Middlewich', 'Tivat', 'Babai Kalan', 'Al Qusayr', 'Vatolatsaka', 'Cruz', 'Linstead', 'Lakkundi', 'Serrolandia', 'Shuibian', 'Llanelli', 'Six-Fours-les-Plages', 'Entre Rios de Minas', 'Saude', "Karbala'", 'Godinlabe', 'Ocatlan', 'Paranatama', 'Begampur', 'Khulm', 'Alegria', 'Partanna', 'Bothaville', 'Vaal Reefs', "Alvorada D'Oeste", 'Cotes de Fer', 'Sibirila', 'Heshancun', 'Adustina', 'Uberlingen', 'Shawano', 'Santa Fe', 'Bolivia', 'Maniche', 'Chikura', 'Amstelveen', 'Mwinilunga', 'Tetiiv', 'Losal', 'Ortakent', 'Mirassol', 'Yerba Buena', 'Kasongo-Lunda', 'Zongdi', 'Pirmed', 'Ashton', 'La Union', 'Anaco', 'Front Royal', 'Faro', 'Votorantim', 'Bengbu', 'Aldaya', 'Khem Karan', 'Dushanbe', 'Campulung Moldovenesc', 'Quixeramobim', 'Chabet el Ameur', 'Pingtung', 'Fukuoka', 'Itapiuna', 'Khiram', 'Lami', 'Morubagalu', 'South Kingstown', 'Chatham', 'Cefalu', 'Pattanakkad', 'Silvan', 'Souk Et-Tleta des Oulad Hamdane', "Mai'Adua", 'Ban Bang Khu Wat', 'Ardal', 'Collegedale', 'Irbid', 'Lanham', 'San Pablo Jocopilas', 'Santuario', 'Betma', 'Inezgane', 'Pout', 'Batcha', 'Irlam', 'Aldoar', 'Atakpame', 'Ban Bo Luang', 'Finchley', 'Tamalous', 'Bagnols-sur-Ceze', 'Matsushima', 'Yecuatla', 'Wuxue', 'Ocean Springs', 'Petilia Policastro', 'Benenitra', 'Elgin', 'Guntupalle', 'Itirucu', 'Chengde', 'Castelvetro di Modena', 'Ramkali', 'Marki', 'Loudima Poste', 'Usta Muhammad', 'Banfield', 'Patna', 'Lendinara', 'Toliara', 'Tioribougou', 'Mosbrough', 'Yenangyaung', 'Analanampotsy', 'Zgornje Gorje', 'Al Qubbah', 'Lakeshore', 'Kocaali', 'Sombor', 'Punta Arenas', 'Elmira', 'Serekunda', 'Ban Khlong', 'Ried im Innkreis', 'Olivehurst', 'Naruto', 'Plainville', 'Nakhal', 'Hampton Bays', 'Yaita', 'Ambatosia', 'Guindulman', 'Wommelgem', 'Jasper', 'Sao Joao da Madeira', 'Curiuva', 'Destin', 'Zigong', 'Kushijima', 'Fameck', 'Telmar', 'Noboribetsu', 'Guanare', 'Adis `Alem', 'Angatuba', 'Santa Cruz Zenzontepec', 'Dongguazhen', 'Castle Rock', 'Millbrook', 'Erlenbach am Main', 'Kamloops', 'South Jordan', 'Colombia', 'Baguio City', 'Turkaguda', 'Momchilgrad', 'Aiyappan Kovil', 'Strada', 'Rostock', 'Lichtenfels', 'Al Majaridah', 'Bagh-e Bahadoran', 'Irvington', 'Maheshwar', 'Jhagarua', 'Birchington', 'Iba', 'Wang Nam Yen', 'Heilbad Heiligenstadt', 'Meinerzhagen', 'Shandiz', 'Fallston', 'Landivisiau', 'San Juan de Aznalfarache', 'Ettaiyapuram', 'Kota Kinabalu', 'Roseira', 'Fusagasuga', 'Bovisio Masciago', 'Vilpatti', 'Old Forge', 'Kadiyam', 'Goraya', 'Almelo', 'Rescaldina', 'Bad Munstereifel', 'Atebubu', 'Panapakkam', 'Morag', 'Ferrier', 'Velaux', 'Sellersburg', 'Kambhampadu', 'Ubbergen', 'Quilandi', 'Digos', 'Slavutych', 'Terre Haute', 'Schlitz', '`Unayzah', 'Zaoyang', 'Beauvais', 'Allacapan', 'Shangjing', 'Nayagarh', 'Kanasin', 'Velasco Ibarra', 'Agan', 'Nouna', 'Puthiyangadi', 'Salar', 'Povoa de Lanhoso', 'Brotas de Macaubas', 'Donggang', 'Inuyama', 'Balakan', 'Camara de Lobos', 'Tanauan', 'Ribas do Rio Pardo', 'Fagundes', 'San Rafael Petzal', 'Iraci', 'Kadena', 'Ramos Mejia', 'Vaasa', 'Goribidnur', 'Changping', 'Oregon', 'Schiller Park', 'Mereeg', 'Toro', 'Lynnwood', 'Iiyama', 'Punta Alta', 'Cervello', 'Sariosiyo', 'Kunitachi', 'Thu Dau Mot', 'Viljandi', 'Kurichedu', 'Saugerties', 'Noveleta', 'Paripiranga', 'Odesa', 'Ibirama', 'Coamo', 'Smolyan', 'Montilla', 'Khan Bebin', 'Ambovonomby', 'Serinyol', 'Ayapel', 'San Antonio de los Cobres', 'Kadiri', 'Quatre Bornes', 'Bareh', 'Buritis', 'Los Cerrillos', 'Potenza', 'Sughrain', 'Nehbandan', 'Ampthill', 'Palmeira das Missoes', 'Dombovar', 'Angadanan', 'Yuanping', 'Martapura', 'Curumani', 'Benhao', 'Ban Bang Toei', 'Stadskanaal', 'Secanj', 'Renaico', 'Buckhall', 'Tirutturaippundi', 'Komotini', 'Suroth', 'Guene', 'Canudos', 'Bhagsar', 'Lubben (Spreewald)', 'La Ciotat', 'Stirling', 'Praia da Vitoria', 'Pakra', 'Harpanahalli', 'Tururu', 'Andranomanelatra', 'Sirnia', 'Steyr', 'Lake Stickney', 'Villa Ojo de Agua', 'Tuyserkan', 'Saint-Jean-de-la-Ruelle', 'Ranapur', 'Kheiredine', 'Villa El Carmen', 'Sherborne', 'Kerai', 'Barughutu', 'Tienen', 'Cherkasy', 'Turmero', 'Ahigbe Koffikro', 'Rio Azul', 'Baro', 'Koshanam', 'Malinalco', 'Afula', 'Perpignan', 'Timayy al Imdid', 'Kotgir', 'Forbach', 'Malka', 'Palm Beach', 'Falmouth', 'Beihai', 'Kalbarri', 'Ivato', 'Picos', 'Marcon', 'Pieksamaki', 'Carlet', 'Salobe', 'Yacimiento Rio Turbio', 'Omiyacho', 'Kodikkulam', 'Shorapur', 'Dniprorudne', 'Neutraubling', 'Kizhakkemanad', 'Banagi', 'Mount Gambier', 'Malkajgiri', 'Sokhumi', 'Douera', 'Chekfa', 'Mairi', 'Ortaca', 'Loria', 'San al Hajar al Qibliyah', 'Galeana', 'Aguadulce', 'Carepa', 'Jogipet', 'Tambacounda', 'Hingham', 'Huntington Beach', 'Yeniciftlik', 'Talupula', 'Naruar', 'Tudela', 'Ayyampalaiyam', 'Kolwara', 'Cisauk', 'Jettihalli', 'Surt', 'Allen Park', 'Casier', 'Zhaxi', 'Itu', 'Quilombo', 'Gharghur', 'Keerbergen', 'Gbarnga', 'Degana', 'Gretna', 'Cerro Maggiore', 'Santa Cruz Michapa', 'Diamou', 'Tiruppanandal', 'Bayji', 'Njeru', 'Sour', 'Martinengo', 'Rocklin', 'Saint-Vallier', 'Yabrud', 'Alfortville', 'Kopoky', 'Phuoc Long', 'Giarre', 'Kindu', 'Ain Cheggag', 'Babila', 'Busumbala', 'Basmenj', 'Tangjia', 'Karimunjawa', 'Bhagatpur', 'Murtazabad', 'Konganapuram', 'Kapelle', 'Sidi Lamine', 'Nezahualcoyotl', 'Prague', 'Chandreru', 'Nelliyalam', 'Kouhu', 'Bollullos par del Condado', 'Chiryu', 'Ain Defla', 'Trzin', 'Llandybie', 'Ushtobe', 'Mecitozu', 'Lys-les-Lannoy', 'Campo Ere', 'Antambohobe', 'Kanifing', 'Mulukuku', 'Micoud', 'Siran', 'Markt Schwaben', 'Porangatu', 'Manimala', 'Liege', 'Shiremoor', 'Itaperucu', 'Puerto Pilon', 'Bourg-la-Reine', 'Kadalur', 'Kodikuthi', 'Thame', 'Soahany', 'Kyritz', 'Longtoushan Jiezi', 'Meiktila', 'Centenario do Sul', 'Cunit', 'Engerwitzdorf', 'Ebreichsdorf', 'Newbury', 'Almoloya de Alquisiras', 'Krishnarayapuram', 'Kadamakudi', 'Yongzhou', 'Sartell', 'Madeley', 'Koszutka', 'Iacanga', 'Ekhari', 'Kratie', 'Nyeri', 'Qazigund', 'Langdu', 'Pong Nam Ron', 'Timon', 'Gyeongsan', 'Minerbio', 'Singia', 'East Niles', 'Mmopone', 'Masaurhi Buzurg', 'Liantangcun', 'Banino', 'Altopascio', 'Tillaberi', 'Riachao do Jacuipe', 'Nalikkalpatti', 'Romanshorn', 'Xinjing', 'Cha-am', 'Cumpana', 'Shenandoah', 'Pathanamthitta', 'Tata', 'Lake Elsinore', 'Vilavur', 'Ogdensburg', 'Santa Maria de Cayon', 'Rakvere', 'Baretha', 'Ankaraobato', 'Miguel Pereira', 'West Islip', 'Tekkekara Kizhakku', 'Chita', 'Futrono', 'Cordon', 'Ampefy', 'Santo Domingo de los Colorados', 'Mtskheta', 'Dassa-Zoume', 'San Cristobal', 'Corona', 'Telemaco Borba', 'Pedregulho', 'Walajapet', 'Kurnool', 'Bois-Colombes', 'Bitritto', 'Baia Formosa', 'Mopti', 'Sassari', 'General Pinedo', 'Capim Grosso', 'Sanok', 'Nawashahr', 'Radom', 'Esbjerg', 'Scheveningen', 'Woodland Park', 'Ljubno', 'Tirkarur', 'Mississauga', 'Oudewater', 'Plochingen', 'Villa Donato Guerra', 'Kuangfu', 'Mabuhay', 'Deh-e Shu', 'Anjur', 'Sung Noen', 'Fife', 'Mhangura', 'Chalchuapa', 'Herisau', 'Cesis', 'Flensburg', 'Prado', 'Fujisaki', 'Winterveld', 'Mlimba', 'Chintakunta', 'Marsaxlokk', 'Munakata', 'Corciano', 'Sibilia', 'Pullalacheruvu', 'Trescore Balneario', 'Querfurt', 'Palestine', 'Salta', 'Scafati', 'Atbara', 'Unhel', 'Rosenheim', 'Khambhat', 'Serafina Corea', 'Guayos', 'Ladson', 'Martinho Campos', 'Rondon do Para', 'Gandajika', 'Konigsbrunn', 'Dabrowa Tarnowska', 'Dudhpura', 'Hurdiyo', 'Vi Thanh', 'Blaubeuren', 'Berubari', 'Wujiaqu', 'Warrensville Heights', 'Duc Pho', 'Misungwi', 'Jeju', 'Diamond Springs', 'Rabot', 'Chateau-Gontier', 'Culver City', 'Chanhassen', 'Campamento', 'Venturosa', 'Bom Principio', 'Huquan', 'Odlabari', 'Burbage', 'Siladon', 'Nutley', 'Kinzau-Vuete', 'Ickenham', 'Belavabary', 'Kariyapatti', 'Dzierzoniow', 'North Cowichan', 'Mine', 'Srvanampatti', 'Uba', 'Monte Alegre de Sergipe', 'Espanola', 'Lenart v Slovenskih Goricah', 'Hirske', 'Nallikodur', 'Kisara', 'Salamanca', 'North New Hyde Park', 'Prudente de Morais', 'Peragamanna', 'Kaippakancheri', 'Venray', 'Kitakata', 'Napindan', 'Beinasco', 'Anyksciai', 'Piera', 'Morab', 'Charef', 'Fraserburgh', 'Maghalleen', 'Palatine', 'Batuan', 'Maldegem', 'Kalabahi', 'Hollola', 'Quimbele', 'Ippy', 'Oirschot', 'Hrib-Loski Potok', 'Ras el Metn', 'Dalaba', 'Thun', 'Caguas', 'Jadayampalaiyam', 'Biaora', 'Brno', 'Fino Mornasco', 'Lohagaon', 'Silale', 'Kouka', 'Tulchyn', 'Caimito', 'Teijlingen', 'Szczecinek', 'Oulad Rahmoun', 'Khat Azakane', 'Koupela', 'Giddalur', 'Francisco Sa', 'Adda-Doueni', 'Santa Catarina Mita', 'Nova Olimpia', 'Sabalito', 'Hire Vadvatti', 'Duvva', 'Iloilo', 'Catalca', 'Pribram', 'McCalla', 'Oleggio', 'Lajes', 'Entroncamento', 'Vidapanakallu', 'Moju', 'East Brunswick', 'Samalkot', 'Birnin Konni', 'Ipaba', 'Sebba', 'Joao Camara', 'Sho`rchi', 'De Witt', 'Ebolowa', 'Nandipeta', 'Bokaro', 'Port Glasgow', 'Vellakkinar', 'Goldenstedt', 'Al Jawf', 'Adliswil', 'Castelnaudary', 'Bula', 'Swan Hill', 'Brikcha', 'Valkurti', 'Lake Hopatcong', 'Ingeniero Jacobacci', 'Kemise', 'Obidos', 'Central Saanich', 'Shioya', 'Ulus', 'Chicago Heights', 'Tracy', 'Al Matariyah', 'Chelmza', 'Ban Wat Chan', 'Chelles', 'Basni', 'Moriya', 'Arzignano', 'Bharanikavu Tekku', 'Paikpar', 'Lake Elmo', 'Pottanikad', 'Gorakhpur', 'Rose Hill', 'Sallimedu', 'Kearns', 'Berehove', 'Kidderminster', 'Marieville', 'Lillers', 'Voreppe', 'Rovenky', 'Spittal an der Drau', 'Vohipaho', 'Selnica ob Dravi', 'Musile di Piave', 'Sado', 'El Ancer', 'Lamego', 'Kaeng Khro', 'Laguna Hills', 'Bag', 'Krotoszyn', 'Makhu', 'Kotha Guru', 'Bandar-e Bushehr', 'Borsbeek', 'Manbij', 'Mabaruma', 'Godohou', 'Landau an der Isar', 'Zamboanga City', 'Bendarhalli', 'Vanavasi', 'Karlovac', 'Saraland', 'Vaucresson', 'Clusone', 'Kumi', 'Turayf', 'Konakondla', 'Adet', 'Yuanhucun', 'Iramaia', 'Buttar Khurd', 'New Richmond', 'McDonough', 'Monte Cristo', 'Save', 'Khutaha', 'Erzin', 'Lovrenc na Pohorju', 'Tangail', 'Tadmur', 'Midyat', 'Guanajay', 'Colombes', 'Garbsen', 'Leribe', 'Gamail', 'Abu Hulayfah', 'Azcoitia', 'Abelardo Luz', 'Kamavarapukota', 'Bolongongo', 'Paraiso', 'Dum Duma', 'Auckland', 'Peterlee', 'Turiacu', 'Xaignabouli', 'Huangyan', 'Camarillo', 'Volksrust', 'San Jose de Guaribe', 'Roca Sales', 'Glogow Malopolski', 'Tototlan', 'Murrells Inlet', 'Oxford', 'Amodei', 'Harbel', 'Hue', 'Kajhi Hridenagar', 'Silvani', 'Voorschoten', 'Villeneuve-sur-Lot', 'Kavaklidere', 'Chibemba', 'Avellino', 'Brejinho', 'Oulad Cherif', 'Abra de Ilog', 'Kurtkoti', 'Tovala', 'Gqeberha', 'Almoloya', 'Sao Joao do Paraiso', 'Pella', 'Zabre', 'Higashimatsushima', 'Fairland', 'Arambakkam', 'Skvyra', 'Lamacaes', 'Lumbreras', 'Campbelltown', 'Anoviara', 'Jurawanpur Karari', 'Imatra', 'Medford', 'Ban Phan Don', 'Marietta', 'Pennagaram', 'Rotselaar', 'Tezze sul Brenta', 'Ponnada', 'Golmud', 'Ehningen', 'Amambai', 'Princeton', 'Tepeji del Rio de Ocampo', 'Mosonmagyarovar', 'Tianguistengo', 'Mongagua', 'Elkhorn', 'Langenthal', 'Mehrabpur', 'Obra', 'Kabo', 'Ban Mueang Nga', 'Nga Bay', 'Kannankurichchi', 'Bayou Blue', 'Et Tira', 'Solita', 'Bauchi', 'Uribia', 'Painkulam', 'Calkini', 'Sanchor', 'Zoersel', 'Tomiya', 'Huvin Hippargi', 'Teodoro Sampaio', 'Lambton Shores', 'Tissemsilt', 'Kolding', 'Quillacollo', 'Tenosique', 'Birnagar', 'Laeken', 'Talant', 'Richterswil', 'Kundgol', 'Vitry-sur-Seine', 'Conwy', 'Oqtosh Shahri', 'Yellapur', 'Kumbakonam', 'Jizhou', 'Visoko', 'Kassala', 'Bloemendaal', 'Perl', 'Kaniv', 'Ensenada', 'Armavir', 'Hirnyk', 'Turgutalp', 'Gaoual', 'Mazzarino', 'Batley', 'Trichinopoly', 'Ban Kang', 'Manado', 'Gerstetten', 'Nechmeya', 'Khan Yunis', 'Anatuya', 'Sosale', 'Nindiri', 'Uzwil', 'Kesli', 'Srbobran', 'Jacmel', 'Malibu', 'Mulongo', 'Balehonnur', 'Froyland', 'Vernal', 'Kollegal', 'San Juan', 'Wayland', 'Sirdala', 'Redwood City', 'Patalia', 'Lycksele', 'Alcantaras', 'Majdal Shams', 'Villa de San Diego de Ubate', "Sa-ch'on", 'Jalakandapuram', 'Husum', 'Coral Terrace', 'Chropaczow', 'Khalari', 'Talence', 'Arroyo Naranjo', 'Gundrajukuppam', 'Hinis', 'Luoyang', 'Argenteuil', 'Dnipro', 'Zajecar', 'Usgao', 'Serra Branca', 'Hunduan', 'Anosipatrana', 'Constantina', 'Sirur', 'Bandar Seri Begawan', 'Zavrc', 'Rampur Parhat', 'Saito', 'Jurupa Valley', 'Pedras de Fogo', 'Kalymnos', 'Jerissa', 'Beni Khiar', 'Ginebra', 'Recco', 'Kanajanahalli', 'Miani', 'Rionegro', 'Honavar', 'Bolu', 'Shepherdsville', 'Babura', 'Vilnohirsk', 'Saint-Loubes', 'Machados', 'Roselle', 'Valle de Bravo', 'Stoke Gifford', 'Settiyarpatti', 'Citrus Heights', 'Amityville', 'Muntinlupa City', 'Poco Branco', 'Ciudad Cortes', 'Muncie', 'Ballenstedt', 'Kipushi', 'Simri', 'Satrovka', 'Parasi', 'Dumra', 'Tha Luang', 'Galatina', 'Chiampo', 'Robinson', 'Chelsfield', 'Hersbruck', 'Ambodivoanio', 'Carache', 'Hagfors', 'Hazlet', 'Luchong', 'Maili', 'Kiangara', 'Dengtacun', 'Tuodian', 'Prudhoe', 'Codogno', 'Philippeville', 'Agsu', 'Onomichi', 'Kajiado', 'Louang Namtha', 'Sobral', 'Hermosa', 'Ouargla', 'Welwyn Garden City', 'Niel', 'San Ramon', 'Eutin', 'Patarra', 'Plainview', 'Iioka', 'Shamshernagar', 'Ravanusa', 'Madha', 'Mmadinare', 'Rawalpindi', 'Calcinaia', 'Chak Pahar', 'Sunbury', 'Cruzeiro do Oeste', 'Pemberton', 'Putian', 'Sapucaia', 'Worpswede', 'Anaheim', 'Tupancireta', 'Lyaskovets', 'Setouchi', 'Tanay', 'Takerbouzt', 'Tarabuco', 'Volpago del Montello', 'Al `Arish', 'Foley', 'Avion', 'Dina', 'Kazerun', 'Tuljapur', 'Tocuyito', 'Arakawa', 'Sao Jose do Rio Preto', 'Labuan', 'Kadama', 'Kollo', 'Nieuwkoop', 'Butalangu', 'Haeryong', 'Minbu', 'Nobeoka', 'Heunghae', 'Chaidari', 'Hoa Thuong', 'Oelde', 'Finote Selam', 'Valayam', "O'Fallon", 'Araquari', 'Nazaria', 'Hofgeismar', 'Masaya', 'Saarlouis', 'Schluchtern', 'Mankara', 'Mersa', 'Sioux Falls', 'Altinopolis', 'South Plainfield', 'Eccleston', 'Louangphabang', 'Kudachi', 'Outjo', 'Arluno', 'Almus', 'Zehak', 'Elsenfeld', 'Garag', 'Agano', 'Ferndale', 'Yui', 'Kasba Maker', 'Krakow', 'Aghbalou Aqourar', 'Zakynthos', 'Zhuchangba', 'Tan Van', 'Xiezhou', 'Cinisi', 'Drogheda', 'Balurghat', 'San Sebastian de los Reyes', 'Kavaratti', 'Egirdir', 'Tolentino', 'Iwanai', 'Bhongir', 'Nuzvid', 'Amaha', 'Jemaat Oulad Mhamed', 'Karlovy Vary', 'Kengri', 'Khawaspur', 'Palm Bay', 'Murungattoluvu', 'Autlan de Navarro', 'Bursa', 'Bruhl', 'Montemor-o-Velho', 'Berbera', 'Mattoon', 'Mboro', 'Texcaltitlan', 'Ellicott City', 'Banbridge', 'Bandipur', 'Talakkad', 'Ostroda', 'Imphal', 'Porto Belo', 'Chattanooga', 'Cogan', 'Oued el Kheir', 'Girard', 'Tongshan', 'Bhogapuram', 'Cabanglasan', 'Caleta Olivia', 'Lozovo', 'Hythe', 'Izra', 'Toba', 'Sipocot', 'Borgosatollo', 'Messina', 'Manadhoo', 'Jumri Tilaiya', 'Ollerton', 'Riverhead', 'Dinguiraye', 'Salehpur', 'Horizon City', 'Tettuppatti', 'Manorville', 'Gumaca', 'Barpathar', 'Ankasakasabe', 'Rio Caribe', 'Mutare', 'Kiramangalam', 'Voorst', 'El Ancor', 'Marsta', 'Wombourn', 'Bagong Silangan', 'Mogiyon', 'Maracaibo', 'Amay', 'Ostrow Wielkopolski', 'Herent', 'Yecla', 'Barrington', 'Cananeia', 'Crowborough', 'Kandori', 'Toconao', 'Cazombo', 'Basel', 'Halavagalu', 'Baile an Bhiataigh', 'Baraboo', 'Kambla', 'Hung Yen', 'Sun City Center', 'Panniperumthalai', 'Gwacheon', 'Hikawa', 'Zamora', 'Las Lajas', 'Dan Gorayo', 'Racale', 'La Paloma', 'Kon Tum', 'Oloron-Sainte-Marie', 'Piacabucu', 'Bourke', 'Piraeus', 'Ciudad Benito Juarez', 'Peje', 'Sagar', 'Sivagiri', 'Biyahmu', 'Vianopolis', 'Curitiba', 'Larkspur', 'Harenkarspel', 'Reghin', 'Kapchorwa', 'Hohenau', 'Villa Alsina', 'Tagami', 'We', 'Lac des Oiseaux', 'Pariharpur', 'Volodymyrets', 'Areraj', 'Chupinguaia', 'Anantpur', 'Gif-sur-Yvette', 'Bronkhorstspruit', 'Krimpen aan den IJssel', 'Rock Island', 'Ban Huai So Nuea', 'Barcellona-Pozzo di Gotto', 'Tumbippadi', 'Pontoise', 'Bandar Emam', 'Reconquista', 'Braniewo', 'Vettaikkaraniruppu', 'Ancuabe', 'Hitachi', 'Malolos', 'Ngaparou', 'Bayamon', 'Abyek', 'Tlahuiltepa', 'Lugano', 'Crispiano', 'Padre Hurtado', 'Wlodawa', 'Placheri', 'General Martin Miguel de Guemes', 'Yangyuhe', 'La Chapelle-Saint-Mesmin', 'Darmanesti', '`Ayn al `Arab', 'Yorkshire', 'Calcinato', 'Sadiqpur Maraul', 'Tadley', 'Nkokonjeru', 'Fatime', 'Selby', 'Kafr Ruma', 'South Brunswick', 'Yahotyn', 'North Grenville', 'Mahajjah', 'Wangyuanqiao', 'Hrastnik', 'Luckenwalde', 'Kalaiya', 'Matagalpa', 'Bulbula', 'Shirva', 'Nogoya', 'Leulumoega', 'San Diego Country Estates', 'Magong', 'Kourouma', 'Vorkadi', 'Shibuya', 'Madathapatti', 'Cilegon', 'Betun', 'Mantes-la-Jolie', 'Tokar', 'Medog', 'Medleri', 'El Viso del Alcor', 'Sao Joaquim de Bicas', 'Cyuve', 'Igarapava', 'Hamilton Township', 'Baldock', 'Evren', 'Sangarebougou', 'Sohagpur', 'Razua', 'Naurhiya', 'Tromsdalen', 'Mairipora', 'Yesilyurt', 'Malacatancito', 'Szydlowiec', 'Masiaboay', 'Taozhuangcun', 'Gongyefu', 'Qiziltepa', 'Pivnichne', 'Anandpur', 'Machinga', 'Irmo', 'Guntapalli', 'Indang', 'Baetov', 'Sarvar', 'Eberbach', 'Douar Oulad Driss', 'Tanagura', 'Ikare', 'Yumurtalik', 'Kapuvar', 'Margraten', 'General Las Heras', 'Yinhua', 'Badnawar', 'Grao Mogol', 'Dalja', 'Abensberg', 'Roque Perez', 'Rustampur', 'Cajati', 'Sarso', 'Samadh Bhai', 'Azazga', 'Kenar', 'Ealing', 'Versailles', 'Kalima', 'Stillorgan', 'Portao', 'Kwinana', 'Gemena', 'Oulad Chikh', 'Arslanbob', 'Sumenep', 'Kleppe', 'Anisoc', 'Chapel Hill', 'Timisoara', 'Saint-Denis', 'Temescal Valley', 'Alayor', 'Siuri', 'Puerto Colombia', 'San Andres Timilpan', 'Tadjenanet', 'La Londe-les-Maures', 'Tivaouane', 'Singaraja', 'Frisco', 'Pozzuoli', 'Robe', 'Kutina', 'Escoublac', 'Ginosa', 'Weil im Schonbuch', "Jeffrey's Bay", 'Emporia', 'Arbaoua', 'Saint-Augustin-de-Desmaures', 'Asslar', 'Bischofshofen', 'Vakhsh', 'Upper Macungie', 'Karpuzlu', 'Dattapara', 'Vaikuntam', 'Haverhill', 'Craiova', 'Santo Amaro da Imperatriz', 'Orpington', 'Tibri', 'Arlov', 'Luwuk', 'Pipra Naurangia', 'Yingkou', 'Sidi Okba', 'Torrox', 'Toki', 'Matera', 'Cristinapolis', 'Ivano-Frankivsk', 'Ramu', 'Mangawan', 'Ishioka', 'Leones', 'Mougins', 'Maco', 'Hanko', 'Backnang', 'Bakamune', 'Gubbi', 'Bazhou', 'Guangzhou', 'Badajoz', 'Masdi', 'Estcourt', 'Kale', 'Amaraji', 'Guire', 'Mesa', 'Utebo', 'Al Khawr', 'Pararia', 'Hapur', 'Maigo', 'Grimmen', 'Saratoga Springs', 'Villejuif', 'Tucapel', 'Zhoujiajing', 'Nicosia', 'Tirumalaiyampalaiyam', 'Tamorot', 'Radford', 'Ambarakaraka', 'Maragogipe', 'Mettlach', 'Nautla', 'Nuevo Paysandu', 'Kuppachchipalaiyam', 'Steinbach am Taunus', 'Calverton', 'Ainring', 'Olanchito', 'Calayan', 'Girardota', 'Salem Lakes', 'South Houston', 'Maharajpur', 'Saboya', 'San Jose de Maipo', 'Chennimalai', 'Kecioren', 'Auchi', 'Elk Plain', 'Galbois', 'Shuilin', 'Malabuyoc', 'Buyuk Cakirman', 'Rishra', 'Aruja', 'Piata', 'Uvira', 'Sohana', 'Garmdarreh', 'Wulingshancun', 'Nijoni', 'Kudlu', 'Braunfels', 'Attappampatti', 'Pedana', 'Guayaquil', 'Crotone', 'Anse a Pitre', 'Svishtov', 'Couzeix', 'Seinajoki', 'Schagen', 'Weatherford', 'Hidaka', 'Klazienaveen', 'Al Harah', 'Qabb Elias', 'Al Fashn', 'Berja', 'Prieska', 'Ban Son Loi', 'Togo', 'Asagiri', 'Zafferana Etnea', 'The Bottom', 'Kannapolis', 'Ussel', 'Nukus', 'Susehri', 'Yawata-shimizui', 'Bhayala', 'Leagrave', 'Hermitage', 'Seynod', 'Lubawa', 'Gran', 'Nazerabad', 'Shenzhen', 'Lone Tree', 'Birecik', 'Ikongo', 'Amudalavalasa', 'Rajgarh', 'Xalpatlahuac', 'Lunga-Lunga', 'Haubourdin', 'Ambolomoty', 'Nyamata', 'Sukhsena', 'Wanzleben', 'Brazopolis', 'Pupiales', 'Smederevo', 'Bischofsheim', 'Mato Verde', 'Jawhar', 'Shiraoi', 'Huanghuajing', 'San Rafael Arriba', 'Freienbach', 'Edegem', 'Sao Sebastiao de Lagoa de Roca', 'Yangirabot', 'Chhapera', 'Izu', 'East St. Louis', 'Chegur', 'Naawan', 'Shuangxianxiang', 'Kalamaria', 'Shangzhi', 'Bardiha Turki', 'Alfeld', 'Sacele', 'Pereira Barreto', 'Tiptree', 'Bala Koh', 'Buyende', 'Macuspana', 'Gediz', 'Tocancipa', 'Taneichi', 'Makabana', 'Richland Hills', 'Puerto Limon', 'Dighwara', 'Radovis', 'Hobbs', 'Quinchia', 'Santa Catarina Masahuat', 'Roquetas de Mar', 'Willowick', 'Chahana', 'Samai', 'Tasiilaq', 'Anao-aon', 'Kattanam', 'Derbisek', 'Changuinola', 'Schwalmstadt', 'Camposampiero', 'Mayumba', 'Raipur', 'Stanmore', 'Wesseling', 'Poldasht', 'Keora', 'Hettstedt', 'Kihoku', 'Cabimas', 'Stadthagen', 'Gorontalo', 'Cherial', 'Khonobod', 'Barntrup', 'Bhojpur Kadim', 'Jaltenco', 'Attendorn', 'Andranovory', 'Binche', 'Muscat', 'Ludwigsburg', 'Ain Zora', 'Dabutou', 'Landgraaf', 'Kaunas', 'Sandwa', 'Dingolfing', 'Rothenbach an der Pegnitz', 'Sheghnan', 'Mawai', 'Sunnyside', 'Esteio', 'Carnoustie', 'Sakura', 'Monte Compatri', 'Rize', 'Walvisbaai', 'Gouande', 'Bardipuram', 'Rockford', 'Pipra Dewas', 'Jagdishpur', 'Sonwan', 'Bridgewater', 'Akpassi', 'Zoumi', 'Tongchuanshi', 'Guagua', 'Ayacucho', 'Lom', 'Wang Sombun', 'Pran Buri', 'Rass el Djebel', 'Pita', 'Ait Hani', 'Saraikela', 'Cepagatti', 'Monte Patria', 'Bessemer', 'Santa Juliana', 'Obuse', 'Schilde', 'Varel', 'Almhult', 'Wesel', 'Xanten', 'Bastogne', 'Glyka Nera', 'Tasova', 'Rampur Shamchand', 'Maarssen', 'La Asuncion', 'Streaky Bay', 'Ridgefield Park', 'Tichi', 'Busuanga', 'Arakvaz-e Malekshahi', 'Ban Bong Tai', "Vel'ke Kapusany", 'Maracas', 'Rio das Flores', 'Moers', 'Bexleyheath', 'Savsat', 'Morsott', 'Germersheim', 'Pungulam', 'Sussex', 'Bilwi', 'Abare', 'El Tumbador', 'Guebwiller', 'Bourg-en-Bresse', 'Koskapur', 'Orvieto', 'Quartz Hill', 'Espita', 'Vechelde', 'Banting', 'Cancun', 'Humahuaca', 'Hockley', 'Barwan', 'Gravesend', 'Palhalan', 'Galway', 'Shotley Bridge', 'Guioyo', 'Xaxim', 'Dunkirk', 'Amreli', 'Balqash', 'Buruanga', 'Nachchandupatti', 'Nejo', 'Lumba-a-Bayabao', 'Harri', 'Gaoliying Ercun', 'Erumakkuzhi', 'Campina Grande', 'Palimbang', 'Al Jabayish', 'Gurdaspur', 'Spaichingen', 'Le Pontet', 'Villiers-sur-Marne', 'Doruma', 'Isa', 'Puerto San Jose', 'Gunupur', 'Salcea', 'Maruttuvakkudi', 'Nsanje', 'Mahamda', 'Pola de Siero', 'Radenci', 'Chincha Alta', 'Ed Damazin', 'Red Bank', 'Whitehaven', 'Oulad Driss', 'Edinburg', 'Floresta', 'Vili', 'Saghar Sultanpur', 'Salon-de-Provence', 'Batufah', 'Dalkola', 'Fostoria', 'Yauco', 'Maridi', 'Nueva Imperial', 'Saint-Jean-sur-Richelieu', 'Tupaciguara', 'Palmdale', "Notre-Dame-de-l'Ile-Perrot", 'Rio Largo', 'Ntossoni', 'Nagnur', 'Wallkill', 'Breda', 'Solindabad', 'Penkridge', 'Agui', 'George Mason', 'Aliartos', 'Itatinga', 'Tekkampatti', 'Turicato', 'Chorleywood', 'Grootfontein', 'Kassa', 'Kotli', 'Sayreville', 'Kottavalasa', 'North Plainfield', 'Madanapalle', 'Santa Eulalia', 'Farnborough', 'Lerwick', 'Rohar', 'Holalkere', 'Gavirate', 'Pace', 'Bharokhara', 'Oraiokastro', 'Senekane', 'Ambohitrarivo', 'Regente Feijo', 'Ambotaka', 'El Crucero', 'Gummudipundi', 'Romilly-sur-Seine', 'Tanggemu Nongchang', 'Chubek', 'Quattro Castella', 'Sai Buri', 'Piuma', 'Cerveteri', 'West Rancho Dominguez', 'Carmona', 'Santa Maria', 'Cayiralan', 'Wennigsen', 'Batalha', 'Vatican City', 'Bahrain', 'Tenente Ananias Gomes', 'Lusambo', 'Angra dos Reis', 'Pilkhua', 'San Marcelino', 'Kuching', 'Moche', 'Straza', 'Ban Ueam', 'Kibale', 'Shahdadpur', 'West Long Branch', 'Killai', 'Chandia', 'Los Teques', 'Hpa-An', 'Wenden', 'Sao Bento', 'Wentorf bei Hamburg', 'Farajok', 'Uppugunduru', 'Samkir', 'Samraong', 'Pind Dadan Khan', 'Rajmahal', 'Paz de Ariporo', 'Rincon de la Victoria', 'Vyshhorod', 'Khaniadhana', 'Fuxing', 'Xibang', 'Antanananivo', 'Peekskill', 'Kanhauli Manohar', 'Kengtung', 'Yahyali', 'Yecun', 'Albolote', 'Etrotroka', 'Robstown', 'Bacoli', 'Godfrey', 'Savastepe', 'Netphen', 'Cam', 'Moortebeek', 'Koufalia', 'Tabatinga', 'Qadirganj', 'Tlanchinol', 'Kokopo', 'Parelhas', 'Bergkamen', 'Crossville', 'Charallave', 'Mahugaon', 'Tazhakudi', 'Nawa Nagar Nizamat', 'Rizhao', 'Bilasipara', 'Kfar Aabida', "E'erguna", 'Rio Branco', 'Buique', 'Maesawa', 'Rabak', 'Noqui', 'Maxcanu', 'Otegen Batyr', 'Nellipoyil', 'Oakville', 'Serowe', 'Conselheiro Pena', 'Ban Fang Tuen', 'Santa Isabel', 'Sukhasan', 'Ocos', 'Cai Lay', 'Mangamila', 'Sikar', 'Ogoshi', 'Wimbledon', 'Islam Qal`ah', 'Iawar', 'Kuala Lipis', 'Gressier', 'South Laurel', 'Greendale', 'Ilfracombe', 'Prestonpans', 'Stonegate', 'Willich', 'Forssa', 'Enghien-les-Bains', 'Balintawak', 'Kitgum', 'Pucallpa', 'Bhilai Karanja', 'Uzundere', 'Boshof', 'Maradi', 'Bekasi Kota', 'Mission', 'Lecherias', 'Valverde del Camino', 'Bojaca', 'Khowai', 'Heerlen', 'Bucyrus', 'Ipameri', 'Kadaiyam', 'Guanagazapa', 'Lencois', 'Riosucio', 'Sredets', 'Rio de Janeiro', 'Peringuzha', 'Rabat', 'Kupang', 'Louis Trichardt', 'Tlalixtac de Cabrera', 'Quincy', 'Kerrville', 'Khandala', 'Waconia', 'Aventura', 'Mahabad', 'Ciudad Miguel Aleman', 'Al Hamdaniyah', 'Mankur', 'Dumbea', 'Sungailiat', 'Huitan', 'Naubatpur', 'Soubre', 'Kaniama', 'Dholbaja', 'Kuala Kapuas', 'Oplotnica', 'Rampur Hat', 'Sangaree', 'Fatoma', 'El Arba Des Bir Lenni', 'Huamantla', 'Enamadala', 'Alibunar', 'Madirovalo', 'Bampur', 'Karachi', 'Lehi', 'Cross Lanes', 'Abou el Hassan', 'Telwa', 'Ga-Kgapane', 'Navraftor', 'Janapadu', 'Garesnica', 'Awantipur', 'El Fanar', 'Ayyampettai', 'Shangzhuangcun', 'Gainesville', 'Santanopolis', 'La Goulette', 'Chiknayakanhalli', 'Murtosa', 'Tirano', 'Heckmondwike', 'Mantenopolis', 'Villa Corzo', 'Gacko', 'Etterbeek', 'Dargot', 'Kattamuru', 'Thazhamel', 'Armilla', 'Eisenstadt', 'Elbeuf', 'Metsamor', "'Ain el Melh", 'Karunagapalli', 'Dayr al Balah', 'Qazvin', 'Brokopondo', 'Keetmanshoop', 'Chopadandi', 'Shinas', 'Silverdale', 'Orcutt', 'La Concepcion', 'Kebili', 'Tecoluca', 'Kursaha', 'Laiyuan', 'Hilliard', 'Summit', 'Vadakku Viravanallur', 'Phanat Nikhom', 'Vila Rica', 'Vantaa', 'Saint-Amand-les-Eaux', 'Inole', 'Barrancas', 'Upala', 'Albania', 'Puerto Galera', 'Calulo', 'Mioveni', 'Yangiariq', 'Newport News', 'Puerto Villarroel', 'San Juan del Rio', 'Badarwas', 'Chertsey', 'Montalvo', 'Nulivedu', 'Ain Nouissy', 'Linhares', 'Sao Joao da Ponte', 'Palos Heights', 'Mondolfo', 'Vibo Valentia', "Colle di Val d'Elsa", 'Tache', 'Mixquiahuala de Juarez', 'Montegiardino', 'Franca', 'Iwo', 'Northfield', 'Neyyattinkara', 'Hirna', 'Acquaviva delle Fonti', 'Voiron', 'Iturama', 'Aklanpa', 'Raymondville', 'Ambohimandroso', 'Lakshmipur', 'Torgelow', 'Trento', 'Neiva', 'Umuarama', 'San Mauro Torinese', 'Pago Pago', 'Ulricehamn', 'San Cristobal Cucho', 'Owendo', 'Basht', 'Conthey', 'Damba', 'Senmayacho-senmaya', 'Gdansk', 'Antilla', 'Husavik', 'Frankfurt', 'Seaham', 'Zagan', 'Westerly', 'Sivasli', 'Nablus', 'El Wak', 'Saddle Brook', 'Tepelene', 'Vyskov', 'Tavagnacco', 'Ndende', 'Salanso', 'Laranjeira', 'Tranquebar', 'Battulapalle', 'Bhilwara', 'Cork', 'Ennis', 'Danjiangkou', 'North Logan', 'Viralimalai', 'Firoza', 'Bettiah', 'Atharga', 'Tallimarjon Shahri', 'Yangi Marg`ilon', 'Lohariandava', 'Opera', 'Gampaha', 'Teboulba', 'Solofra', 'Portoferraio', 'Daijiazhuang', 'Jiexiu', 'Sinkolo', 'Joutseno', 'Donauworth', 'South Glengarry', 'Nawabganj', 'Lodhran', 'Rio San Juan', 'Kriva Palanka', 'Rozaje', 'Manapla', 'Negresti-Oas', 'Plaine Magnien', 'Satsuma', 'Fatehabad', 'Rio do Antonio', 'Calafat', 'Preganziol', 'Spalding', 'Xinji', 'Kilkottai', 'Guding', 'Milici', 'Paranavai', 'Skopun', "Debre Mark'os", 'Isingiro', 'Port-Margot', 'Ludinghausen', 'Grossenluder', 'Fenyuan', 'Araras', 'Terme', 'Conneaut', 'Winnetka', 'Pelhrimov', 'Pir Bakran', 'Sukkampatti', 'Shrirangapattana', 'Vatra Dornei', 'Palmaner', 'Goose Creek', 'Hailsham', 'Douar Tabouda', 'Douar Echbanat', 'Adre', 'Quinto di Treviso', 'Lagoa Vermelha', 'Ambatomivary', 'Maidencreek', 'Huanuco', 'Sommerda', "Sant'Arcangelo di Romagna", 'Huancavelica', 'Erramvaripalem', 'Gyomaendrod', 'Szigetvar', "Xin'an", 'Behror', 'Antanimenabaka', 'Sonbarsa', 'Cuajinicuilapa', 'Lovosice', 'Carlos Barbosa', 'Mlada Boleslav', 'Wesley Chapel', 'Adelsdorf', 'Temascal', 'Novovolynsk', 'Brattleboro', 'Yapacani', 'Sao Sebastiao do Curral', 'Makinsk', 'Ojinaga', 'Shaoxing', 'Numazu', 'Edakkazhiyur', 'Riesi', 'Bagamanoc', 'Inkster', 'Sugarland Run', 'Savenay', 'Eidhi', 'Elvas', 'Pannimadai', 'Dorverden', 'Luanshya', 'Grays', 'Tenente Portela', 'Iuna', 'Okmulgee', 'Busra al Harir', 'Kil Perambalur', 'Urucurituba', 'Villa Aberastain', 'Sitakund', 'Ras el-Barr', 'Luwero', 'Itaguacu', 'Vadasinor', 'Sile', 'Inhapi', 'Tanguieta', 'Olney', 'Sonpur', 'Gillitts', 'Fond du Lac', 'Aboso', 'Oujda-Angad', 'Tiquisate', 'Chuqung', 'Jose Abad Santos', 'Holsbeek', 'Lioua', 'Wiang Haeng', 'Sirugudi', 'Gomparou', 'Kodangipatti', 'Jitwarpur Kumhra', 'Dhaula', 'Monte Carmelo', 'Zhytomyr', 'Copertino', 'Panggezhuang', "M'lang", 'Caconde', 'Sao Sebastiao', 'Rurrenabaque', 'Canaman', 'Vienna', 'Shichinohe', 'Catia La Mar', 'Pachchaimalaiyankottai', 'Bema', 'Guadalupe', 'Fouchana', 'Takatsuki', 'Nakrekal', 'Greenfield', 'Mirabel', 'Sharon', 'Valenzuela', 'Shiroi', 'Polotitlan de la Ilustracion', 'Skawina', 'Merrifield', 'Bitetto', 'Hinsdale', 'Kalafotsy', 'Reddigudem', 'El Colegio', 'Santa Ana Jilotzingo', 'Korkuteli', 'Xieqiaocun', 'Nikko', 'Purulha', 'Al Hasakah', 'Puigcerda', 'Diabugu', 'Sao Joao de Pirabas', 'Yegainnyin', 'Itasca', 'Italva', 'Olavarria', 'Dikirnis', 'Tantoucun', 'Al Khmissat', 'Woodland', 'Vasco Da Gama', 'Amalapuram', 'Los Amates', 'Gotha', 'Burscheid', 'Tanippadi', 'Xinyingheyan', 'St. Pete Beach', 'Chieri', 'Ahmadpur', 'Capela do Alto', 'Ebn Ziad', 'Moimenta da Beira', 'Schwarzenbek', 'Sasthankotta', 'Lannion', 'Oupeye', 'Digne-les-Bains', 'Kami', 'Widnes', 'Bala Cangamba', 'Edmonds', 'Qabr as Sitt', 'Lorient', 'Antsinanantsena', 'Liteni', 'Belsand', 'Tibau do Sul', 'Melpanaikkadu', 'Uranganpatti', 'Canasgordas', 'Totnes', 'King Edward Point', 'Ben Daoud', 'Aarau', 'San Sebastiano al Vesuvio', 'Bagam', 'Hobart', 'Shimada', 'Roelofarendsveen', 'Broomfield', 'Palenga', 'Rainham', 'Molepolole', 'Hayama', 'Guines', 'Gonubie', 'Filiasi', 'Gethaura', 'General Rodriguez', 'Guilderland', 'Acarigua', 'Lowell', 'Subulussalam', 'Boiro', 'Workington', 'Sebes', 'Konibodom', 'Grevesmuhlen', 'Mixco', 'St. Andrews', 'Tirora', 'Gubat', 'Edattirutti', 'Bad Wildbad', 'Newport Pagnell', 'Jadcherla', 'Mountain House', 'Pindare-Mirim', 'Hollins', 'Barddhaman', 'Castro-Urdiales', 'Bandora', 'Masandra', 'Charo', 'Qiryat Bialik', 'Turangi', 'Lavras da Mangabeira', 'Hanover', 'Middleburg', 'Umm Qasr', 'Kingswinford', 'Dachne', 'Surcin', 'Al Manaqil', 'Sorombo', 'Maruteru', 'Chaona', 'Morbi', 'Rani Shakarpura', 'Tiko', 'Picarras', 'Keratsini', 'West Deptford', 'Natividade', 'Kalale', 'Hohenkirchen-Siegertsbrunn', 'Huachipato', 'Ubaitaba', 'Shashijie', 'Nalambur', 'Monte Mor', 'Halikner', 'Pinczow', 'Vellavadanparappu', 'Ankli', 'Oakdale', 'Ar Rommani', 'Phulwaria', 'Berchem', 'Ban Bang Muang', 'Mangalapur', 'Maki', 'Puente Alto', 'Paoy Paet', 'Parsagarhi', 'Gobernador Galvez', 'Narpes', 'Kodumudi', 'Vostochnyy', 'Grabouw', 'Volketswil', 'Qabala', 'Falan', 'Sankt Veit an der Glan', 'Bargur', 'Zalishchyky', 'Paraiso do Norte', 'Guymon', 'Corsicana', 'Malargue', 'Cheran', 'Makhmalpur', 'Mitchellville', 'Ouamri', 'Kegeyli Shahar', 'Nalbari', 'Caazapa', 'Bebedouro', 'Crixas', 'Jaisinghnagar', 'Cuimba', 'Ankireddikuntapalem', 'Biknur', 'Parauli', 'Solec Kujawski', 'La Riviera', 'Sakae', 'Ville Bonheur', 'Galkot', 'Tessalit', 'Joao Alfredo', 'Vinkovci', 'Karuppur', 'Vedappatti', 'Palocabildo', 'Nanjing', 'Haidarnagar', 'Kabankalan', 'Acapetahua', 'Yashio', 'Wasaga Beach', 'Gorzow Wielkopolski', 'Erzurum', 'Jordbro', 'Ilmajoki', 'Cuauhtemoc', 'Soldanesti', 'Kontagora', 'Baleno', 'Umbertide', 'Cividale del Friuli', 'Maidenhead', 'Taupo', 'Potters Bar', 'Hubli', 'Kakching', 'Aguadilla', 'Dobrna', 'Souk Ahras', 'Giessen', 'Feriana', 'Simdega', 'Kadugli', 'Makati City', 'Harqalah', 'Flowery Branch', 'Spartanburg', 'Royal Oak', 'Fukuroi', 'Kishmat Dhanbari', 'Somerset East', 'Behisatse', 'Tummalapenta', 'Central District', 'Tubize', 'Issy-les-Moulineaux', 'Lezhe', 'Buriti dos Lopes', 'Krusevo', 'Shibuya-ku', 'Cachoeira', 'Takahata', 'Bozeman', 'Yongqing', 'Rio Vista', 'Primavera', 'Bainbridge Island', 'Lo Miranda', 'Sanharo', 'Sentjernej', 'Coral Hills', 'Serres', 'Kapan', 'Caruaru', 'Shijonawate', 'Para de Minas', 'Buco Zau', 'Shaker Heights', 'Zhaoyu', 'Koto-ku', 'Parvatgiri', 'Evergem', 'Asalem', "Mu'tah", 'Mahalleh-ye Shirinu', 'Douar Oulad Hssine', 'Villa Purificacion', 'Arras', "Sao Lourenco d'Oeste", 'Bad Krozingen', 'Olgiate Comasco', 'Hanumannagar', 'Echemmaia Est', 'Lennox', 'Kalimpong', 'Kitob', 'Las Palmas', 'Guacima', 'Urgut Shahri', 'Carnot', 'Lambertville', 'Sabara Bangou', 'Shahr-e Herat', 'Longford', 'Kambove', 'Cartagena', 'Odessa', 'Bani Suwayf', 'Toqsu', 'Bibbiano', 'Torres Vedras', 'Bareja', 'Maying', 'Gross-Umstadt', 'Anzoategui', 'Holstebro', 'Baturite', 'Neduvannur', "Terra Roxa d'Oeste", 'Cencheng', 'Marondera', 'Azumino', 'Sanha', 'Porto Grande', 'Ansermanuevo', 'Perigueux', 'Makronia', "Bek'oji", 'Los Blancos', 'Argeles-sur-Mer', 'Khorabar', 'Fushe-Kruje', 'Fotsialanana', 'Aomar', 'Guardamar del Segura', 'Ksibet el Mediouni', 'Georgian Bluffs', 'Stockport', 'Saray', 'Buckley', 'Daventry', 'Jaqma', 'Guifoes', 'Patpara', 'Bonhill', 'Presidente Figueiredo', 'Cutral-Co', 'Morgan City', 'Cernusco sul Naviglio', 'Beheloka', 'Bargny', 'Brits', 'Nakasato', 'San Jose de Feliciano', 'Lithia Springs', 'Kushtia', 'Nasipit', 'Alsdorf', 'Rockland', 'Homestead', 'Ziketan', 'Ludvika', 'Erbil', 'Meguro', 'Teolo', 'Lapovo', 'Streetly', 'Dar es Salaam', 'Middleton', 'Ayancik', 'Canoas', 'Kanrangana', 'Parczew', 'Puck', 'Mario Campos', 'Bedourie', 'Bohumin', 'Freha', 'Juventino Rosas', 'Kolaras', 'Oued Sebbah', 'Kinattukkadavu', 'Peravurani', 'Barahona', 'Kalabo', 'Kumluca', 'Saint-Priest', 'Tagkawayan', 'Durazno', 'Ouled Rabah', 'Susangerd', 'Reynosa', 'Araraquara', 'Ambinanynony', 'Puerto Madryn', 'Villa Juarez', 'Bletchley', 'Santo Tomas de Janico', 'El Arahal', 'Bijapur', 'Villeneuve-la-Garenne', 'Nantong', 'Semera', 'Dean Funes', 'Koblenz', 'Montebello', 'Sideradougou', 'Luzhou', 'Yuanchang', 'Tulbagh', 'Hitoyoshi', 'Wijk bij Duurstede', 'Damascus', 'Branchburg', 'Bahutal', 'Payyanpalli', 'Gjovik', 'Pudukkottai', 'Lakhdaria', 'Paracambi', 'El Bagre', 'Ban Cho Ho', 'Villamartin', 'Natogyi', 'Wildwood', 'Tall Salhab', 'Fort Erie', 'Kirtipur', 'Quirihue', 'Masamagrell', 'Medak', 'Futtsu', 'Cuamba', 'Zbarazh', 'Beniel', 'Azzaba', 'Banaruyeh', 'Denver', 'Slough', 'Khenifra', 'Nanbu', 'Ibarra', 'Galashiels', 'Malyn', 'Biei', 'Eranapuram', 'Miechow', 'San Benedetto del Tronto', 'St. Matthews', 'Muhlenberg', 'Cape Coast', 'Massa Lombarda', 'Raybag', 'Villeta', 'Jogbani', 'Agua Blanca', 'Kaluderica', 'Dinnington', 'Jepara', 'Bhimadolu', 'Ortona', 'Xincheng', 'Huancayo', 'Crewe', 'Koduvilarpatti', 'Eggertsville', 'Agege', 'Gharyan', 'Sucupira do Norte', 'Datian', 'Kaldsletta', 'Paillaco', 'Anjanazana', 'Sulphur', 'Kakrala', 'Walin', 'Francisco Caballero Alvarez', 'Frenda', 'Robertsport', 'Simanggang', 'Flers', 'Tindouf', 'Azna', 'Umrat', 'Chhimluang', 'Felixlandia', 'Tung Tau Tsuen', 'Soledar', 'Mejorada del Campo', 'Nijlen', 'Przeworsk', 'Upper Leacock', 'Machico', 'Arakkapadi', 'Gawan', 'Tracuateua', 'Tilbury', 'Indiaroba', 'Villa Constitucion', 'Gil', 'Soyaniquilpan', 'Nanchang', 'Guaxupe', 'Gardone Val Trompia', 'Lampang', 'Kahror Pakka', 'Kundurpi', 'Gallatin', 'Belper', 'Beruri', 'El Pinon', 'Chengjiao Chengguanzhen', 'Ad Darbasiyah', 'Neo Karlovasi', 'Mersin', 'Glyfada', 'Bastak', 'Wolfersheim', 'Albenga', 'Kuqa', 'Isanlu', 'Kaji', 'Barranquilla', 'Sai Kung Tuk', 'Hish', 'Overpelt', 'La Vista', 'Livingston', 'Bangassi-Nangou', 'Purcellville', 'Mashpee', 'Alushta', 'Tikar', 'Katima Mulilo', 'Duvvuru', 'Paragominas', 'Mosquera', 'Yuzhnoukrainsk', 'Senador Jose Porfirio', 'Mutuipe', 'Castenaso', 'Schmolln', 'Mporokoso', 'Tawsalun', 'Malo', 'Myedu', 'Orsta', 'Staten Island', 'Goch', 'Dulag', 'Bedlington', 'Mangatarem', 'Lubok Antu', 'Zongo', 'Toktogul', 'Sabana Grande de Boya', 'Mullheim', 'Mogilno', 'Abra Pampa', 'Sidi Baizid', 'Rostusa', 'Glen Allen', 'Barra de Sao Francisco', 'Jocotitlan', 'Pensilvania', 'Petropavl', 'Radlje ob Dravi', 'Chichaoua', 'Irondale', 'Namhkam', 'Liestal', 'Yazu', 'Douar Olad. Salem', 'Mecca', 'Fergus Falls', 'Shimotoba', 'Purificacion', 'Qal`at Sukkar', 'Zhezqazghan', 'Kenafif', 'Chak That', 'Roanoke Rapids', 'Manakondur', 'Beni Enzar', "Nefas Mewch'a", 'Kerap', 'Narsingi', 'Arys', 'Ajaccio', 'Lihue', 'Herentals', 'Tarakan', 'San Jose del Fragua', 'Sitio Novo de Goias', 'Jammu', 'Ballesteros', 'Bayreuth', 'Schererville', 'Kara-Suu', 'Mbandjok', 'Xinpi', 'Caudry', 'Jalhalli', 'Rosario de Mora', 'Hampden', 'Tevaram', 'Annapolis Neck', 'Minokamo', 'Ghataro Chaturbhuj', 'Neyshabur', 'North Mankato', 'Nea Makri', 'Matino', 'San Vito al Tagliamento', 'Shangchuankou', 'Maldon', 'Tirupparangunram', 'Chateauneuf-les-Martigues', 'Sumare', 'La Calamine', 'Plettenberg Bay', "Monteroni d'Arbia", 'Ban Charoen Mueang', 'Pukekohe East', 'Ban Sop Tia', 'Barbacoas', 'Plottier', 'Fleetwood', 'Fultondale', 'Karahia', 'Grefrath', 'Kalavai', 'Borsa', 'Bournville', 'Changwon', 'Chik Banavar', 'Kungsbacka', 'Yunoshima', 'Pithiviers', 'Zepce', 'Rosario do Sul', 'Nadiad', 'Besancon', 'Rupbas', 'Muong Theng', 'Dubrovytsya', 'Someren', 'Lianyuan', 'Huilongping', 'Zrenjanin', 'Ban Wiang Phan', 'Jucurutu', 'Alamos', 'Zetang', 'Lymm', 'Sarajevo', 'Saint-Sebastien-sur-Loire', 'Sereflikochisar', 'Suai', 'La Leonesa', 'Tequisquiapan', 'Faya', 'Fort Saskatchewan', 'Neuilly-Plaisance', 'Elenga', 'Dearborn', 'Gok', 'Ipora', 'Mierlo', 'Ottappalam', 'Mirandiba', 'Rongjiawan', 'Sarno', 'Volpiano', 'Ain Touta', 'Swietochlowice', 'San Dimas', 'Brenham', 'Birkenfeld', 'Sveti Jurij', 'Montfermeil', 'Bowen', 'Xiangcheng', 'Bariariya', 'Albufeira', 'Pantin', 'Ruffano', 'Buurhakaba', 'Kulai', 'Nordlingen', 'Alcanar', 'Ostfildern', 'San Juan del Puerto', 'Zunhua', 'Yuregir', 'Amha', 'San Antonio de los Banos', 'Pojuca', 'Cernavoda', 'Maria', 'Saky', 'Ciudad Victoria', 'Qasr-e Qand', 'North Lauderdale', 'Manahari', 'Kartikapalli', 'Herne', 'Zabki', 'Middle River', 'Moncion', 'Er Regueb', 'Ridge', 'Tezu', 'Djendel', 'Tynaarlo', 'Katra', 'Le Plessis-Trevise', 'Pernik', 'Grossostheim', 'Dharir', 'Kherrata', 'Guamal', 'Mangrawan', 'Haldibari', 'Yingzhou Linchang', 'Zgierz', 'Brownsburg', 'Radebeul', 'Novara', 'Markacho', 'Lochristi', 'Oyem', 'Porcia', 'Bogoso', 'Tapa', 'Carmagnola', 'Alto Parnaiba', 'Prosper', 'Nossombougou', 'Tohana', 'Turlock', 'Gualaquiza', 'Masrakh', "Arba Minch'", 'Moroto', 'Carcassonne', 'Paraipaba', 'Jajarm', 'Raparla', "Diao'ecun", 'Oued Laou', 'Sint-Niklaas', 'Masuda', 'Tilvalli', 'Tauranga', 'Upplands Vasby', 'Wabash', 'Tapas', 'Al `Alamayn', 'Huozhou', 'Tshikapa', 'Kennedy Town', 'Wisconsin Rapids', 'Yangiyul', 'Quezalguaque', 'Bougado', 'Cuautepec de Hinojosa', 'Mundo Novo', 'Verl', 'Maghnia', 'Dzouz', 'Principe da Beira', 'Bamessi', 'Soloma', 'Basay', 'Closter', 'Amatenango de la Frontera', 'Bazarak', 'Benairia', 'Mocharim', 'Kuchesfahan', 'Kurayoshi', 'Luohe', 'Kampot', 'Chaska', 'Lynden', 'Purushottampur', 'Konin', 'Long Xuyen', 'Kizhakkott', 'Peachtree Corners', 'Natshal', 'Saleaula', 'Bahia Blanca', 'Semey', 'Rianxo', 'Caridad', 'Hammam el Rhezez', 'Kudelstaart', 'Huizen', 'Narasapuram', 'Parkville', 'Kadungapuram', 'Garden City', 'Daulatkhan', 'Tectitan', 'Maroambihy', 'Pallini', 'Center Point', 'North Bay', 'Maihar', 'Lons', 'Huehuetenango', 'Qalqilyah', 'Koryo', 'Betafo', 'Rheinbach', 'Surprise', 'Sumbawanga', 'Thatha', 'Kurugodu', 'Neustadt in Holstein', 'Jupi', 'Santa Isabel do Para', 'Chandrawada', 'Tamra', 'Cusset', 'Le Cres', 'Tunduma', "Toyloq Qishlog'i", 'Southbourne', 'Hempfield', 'Dhilwan', 'Roseville', 'Sisimiut', 'Aizubange', 'Lleida', 'Muri', 'Phalombe', 'Antsoha', 'Saha', 'Muana', 'Djambala', 'Istanbul', 'Andoharanofotsy', 'Seligenstadt', 'Djougou', 'Boukadir', 'Ingeniero Pablo Nogues', 'Barwon Heads', 'Hazleton', 'Ciudad Tula', 'Northeim', 'Galela', 'Rehoboth', 'Bemanonga', 'Nawucun', 'Gallup', 'Malayer', 'Abu Za`bal', 'Seia', 'Samalsar', 'Melfi', 'Puduvayal', 'Namsos', "Bailey's Crossroads", 'Gardere', 'Bermo', 'Gaillard', 'Qiushanxiang', 'Rokycany', 'Cesena', 'Alto Piquiri', 'Mendoza', 'Rudrangi', 'Kotido', 'Turbihal', 'Comox', 'Shangpa', 'Tzintzuntzan', 'Nao Kothi', 'Cogolin', 'Nhan Trach', 'Royal Wootton Bassett', 'Suhar', 'Prestea', 'Zahle', 'Cherniakhiv', 'Thinadhoo', 'Phuthaditjhaba', 'Pasco', 'Madhavaram', 'Chuo-ku', 'Kishi', 'Tavistock', 'Jitpur', 'Legnano', 'Heusweiler', 'Punnila', 'Bonoufla', "Qiryat Mal'akhi", 'Jaguari', 'Erraguntlakota', 'Valdobbiadene', 'Lupao', 'Hille', 'Cosamaloapan', 'Tromso', 'Charkhi Dadri', 'Dolo Odo', 'Ebersbach an der Fils', 'Chojnow', 'Perkasie', 'Zhamog', 'Seiada', 'Bjelovar', 'Puerto Natales', 'Camaguey', 'Dabakala', 'Katagami', 'Panchla', 'Borgoricco', 'St. Albans', 'Campeche', 'Kontela', 'Broughty Ferry', 'Shihuajie', 'Alvinopolis', 'Parilla', 'Georgetown', 'North Palm Beach', 'Canela Baja', 'Darpa', 'Battipaglia', 'Upland', 'Bombinhas', 'Houten', 'Ibiracu', 'Paraopeba', 'Hokota', 'Whitpain', 'Nakoushi', 'Krasnoilsk', 'Guapimirim', 'Bom Jesus da Serra', 'Tesalia', 'Arohi', 'Pyay', 'Tryavna', 'Mufulira', 'Weligama', 'Gaillac', 'Rettanai', 'Marano di Napoli', 'Jefferson City', 'Tulagi', 'Nirmali', 'Passos', 'Htison', 'Harewa', 'Miren', 'Shika', 'Alfajayucan', 'San Pedro Masahuat', 'Galhinna', 'Silla', 'Dong Ha', 'Legazpi City', 'Hassi Messaoud', 'Bhadrapur', 'Anjahambe', 'Sirsaganj', 'Menglang', 'Qarchak', 'Gandhwani', 'Penedo', 'Peralta', 'Rappang', 'Bad Bramstedt', 'Weinfelden', 'Aliaga', 'Douar Sidi Laaroussi', 'Regedor Quissico', 'Sidi Dahbi', 'Joppatowne', 'Borgo a Buggiano', 'Tiqqi', 'Stakhanov', 'Basmakci', 'Oulad Fraj', 'Moron', 'Thunder Bay', 'Manoharpur', 'Kuniyamuttur', 'Onondaga', 'Lopare', 'Zumaia', 'Kahemba', 'Altenstadt', 'Cabuyao', 'Kafr az Zayyat', 'Burriana', 'Kulharia', 'Barinas', 'Nelmadur', 'Demirozu', "Bois-d'Arcy", 'Lahad Datu', 'Venganellur', 'Tsuruoka', 'Bordertown', 'Amrabad', 'Sala', 'Coronel Freitas', 'Bassin Bleu', 'Porto Santo Stefano', 'Ait Yazza', 'Thanhlyin', 'Tournai', 'Valea Adanca', 'Saint-Cloud', 'Ginowan', "Welk'it'e", 'Siilinjarvi', 'Inhangapi', 'Huazhou', 'Eski Yakkabog`', 'Tobelo', 'Horsholm', 'Springs', 'Lorengau', 'Manwat', 'Khmilnyk', 'Hrubieszow', 'Santa Teresa di Riva', 'Portel', 'El Dorado Hills', 'Pattiswaram', 'Bad Reichenhall', 'Dourdan', 'Dakhram', 'Rio de Contas', 'Pagudpud', 'Sao Felix', 'Kosjeric', 'Lebanon', 'Fargo', 'Mont-Saint-Aignan', 'Wald', 'Eckernforde', 'Chicholi', 'Goldap', 'Kurashiki', 'Santa Maria Colotepec', 'Kawaii', 'Narapala', 'Maimanah', 'Ponda', "'Ain Roua", 'Manyoni', 'Mineiros', 'Moussoro', 'Olhao', 'Ridley', 'Sidi Ouassay', 'Nancagua', 'Sagay', 'Majurwa', 'Okotoks', 'Chengannur', 'Middletown', 'Heredia', 'Mbanza Kongo', 'Texenna', 'Urania', 'Cantemir', 'Albertirsa', 'Qala', 'Molinella', 'Pahrump', 'Padugupadu', 'Utiel', 'Harbatpur', 'Orestiada', 'Tiachiv', 'Kahta', 'Cesenatico', 'Omaruru', 'Siquijor', 'America Dourada', 'Kilkis', 'Ascot', 'Zhujiagua', 'Bagheria', 'Puerto Wilches', 'Torre Maggiore', 'Tomuk', 'Huanggang', 'Trovagunta', 'Devanakonda', 'Sa Pa', 'Narashino', 'Koganei', 'West Memphis', 'Pendleton', 'Edenvale', 'Dieburg', 'Ankavandra', 'Gondalga', 'Quang Ha', 'Xochistlahuaca', 'Niiyama', 'Viedma', 'Sukhothai', 'Cuorgne', 'Janzur', 'Dhansura', 'Rio Maior', 'Penuganchiprolu', 'Jaora', 'Kanzakimachi-kanzaki', 'Hunfeld', 'Yabayo', 'Torre del Mar', 'Shenyang', 'Odacho-oda', 'Lagdo', 'Otuzco', 'Hagi', 'Pompeu', 'Repalle', 'Majagual', 'Yulinshi', 'Periyakoduveri', 'Ubon Ratchathani', 'Al `Amadiyah', 'Saint-Andre', 'Omis', 'Arcozelo', 'Doujing', 'Vuhledar', 'Hindley', 'Aphaur', 'Oliveira', 'Nasir', 'Palm Desert', 'Khategaon', 'Esmoriz', 'Belmont', 'Koani', 'Gyomro', 'Mackworth', 'Rubio', 'Zafra', 'San Juan Evangelista', 'Qoubaiyat', 'Lautoka', 'Cerkes', 'Villeneuve-le-Roi', 'Xinhua', 'Touros', 'Nyaungu', 'Chakand', 'Aluksne', 'Yishui', 'Timperley', 'Oakwood', 'Chakwal', 'Tarauna', 'Khandrauli', 'Huaquechula', 'Bhatinda', 'Puerto Williams', 'Tongliao', 'Staffanstorp', 'Kisumu', 'Mangueirinha', 'Leopoldina', 'Luchow', 'Harrismith', 'Indija', 'Campo Redondo', 'Lastra a Signa', 'Akhisar', 'Alnif', 'Tega Cay', 'Foughala', 'Jinjicun', 'Kangning', 'Treze Tilias', 'Landau', 'Vaires-sur-Marne', 'Udiyavara', 'Boquim', 'Hekou', 'Kirchheim unter Teck', 'Kondakindi Agraharam', 'Sao Mateus', 'Simonesia', 'Pinerolo', 'Ptuj', 'Mezobereny', 'Gex', 'Unnan', 'Dayr Abu Hinnis', 'Sanampudi', 'Simon Bolivar', 'Oteiza', 'Gragnano', 'Khartsyzk', 'Beramanja', 'Richmond Hill', 'Juru', 'Padavedu', 'Ekwari', 'Brescia', 'La Cruz', 'Pulimaddi', 'Pleasant Hill', 'Hozin', "Sant'Ambrogio di Valpolicella", 'Telua', 'Bilozerske', 'Adami Tulu', 'Manteca', 'Delmiro Gouveia', 'Mindelheim', 'Fate', 'Hokur Badasgom', 'Sidi Kada', 'Talayolaparambu', 'Connersville', 'Gurinhem', 'Skopje', 'Fairless Hills', 'Debrecen', 'El Realejo', 'Gadhada', 'Schwalmtal', 'Yamaga', 'Bihar', 'Pakil', 'Loni', 'Chilgazi', 'Varzobkala', 'Samandag', 'Portela', 'Wanghong Yidui', 'Qianxi', 'Dese', 'Hathidah Buzurg', 'Periyapuliyur', 'Bilston', 'Laja', 'Makhdumpur', 'Les Pennes-Mirabeau', 'Himmatpura', 'Denia', 'Keimoes', 'Castillo de Teayo', 'Conselve', 'Longmeadow', 'Herne Bay', 'Sao Miguel do Guama', 'Sunland Park', 'Israna', 'Bassian', 'Qift', 'Behshahr', 'Kotli Ablu', 'Hidrolandia', 'Torre Santa Susanna', 'Kirdasah', 'Tanjungpinang', 'Reshuijie', 'Wappinger', 'Perungudi', 'Goworowo', 'Knin', 'Vardhamankota', 'Campo Limpo', 'Sermoneta', 'Limoeiro de Anadia', 'Otsuchi', 'Niamina', 'Kasumigaura', 'West Puente Valley', 'Sungai Petani', 'East Windsor', 'Pedro Celestino Negrete', 'Yorkton', 'Gueznaia', 'Telica', 'Mong Cai', 'El Paisnal', 'Provo', 'Stefan Voda', 'Vargem', 'Treillieres', 'Procida', 'Sens', 'Aperibe', 'Comasagua', 'Hulbuk', 'Talakag', 'Puduva', 'Hashikami', 'Randallstown', 'Sahline', 'Bejar', 'Isnapuram', 'Teghra', 'Eschwege', 'Carver', 'Franklin Park', 'Tremedal', 'Itahara', 'Lancut', 'Vettikattiri', 'Meyzieu', 'Al Abraq', 'Basista', 'Charlotte Amalie', 'Palembang', 'Esparraguera', 'Villacidro', 'Berettyoujfalu', 'Peravali', 'Mastic Beach', 'Perundurai', 'San Francisco', 'Bangassou', 'Kauriya', 'Carinhanha', 'Otjiwarongo', 'Minot', 'Covilha', 'Shahe', 'Petrila', 'Portchester', 'Ban Lueak', 'Aminpur', 'Calenga', 'Bafata', 'Cumnock', 'Clichy', 'Tabernes Blanques', 'Siloe', 'Qiaotouyi', 'Candido Mota', 'Barbosa', 'Borcka', 'Maryland City', 'Lacarak', 'Crestview', 'Jounie', 'Hochdorf', 'Montelibano', 'Tundhul', 'Osku', 'Monsenhor Tabosa', 'Anosiala', 'Ban Pa Sak', 'Kanakir', 'Wakayama', 'Muttenz', 'Makale', 'Medesano', 'Lamerd', 'Bakarpur Ogairah', 'Wetherby', 'Nakagawa', 'Sao Jose dos Pinhais', "Nova Brasilandia d'Oeste", 'Piquet Carneiro', 'Valmadrera', 'Pua', 'Chak Forty-one', 'Sandvika', 'Stansted Mountfitchet', 'Snoqualmie', 'Pereiras', 'Joaquim Nabuco', 'Mondragon', 'Huixcolotla', 'Minamiuonuma', 'Gap', 'General Villegas', 'Matane', 'El Monte', 'Minamiminowa', 'Saint-Andre-les-Vergers', 'Ibb', 'Oudenaarde', 'Lainate', 'Box Elder', 'Karanchedu', 'Guanhaes', 'Calderara di Reno', 'Itainopolis', 'Gattaran', 'Muhlacker', 'Rengo', 'Winkler', 'Marathalli', 'Cambuquira', 'Ilulissat', 'Kalyandrug', 'Kesabpur', 'Santiago de Anaya', 'Ano Liosia', 'Vung Tau', 'Monaco', 'Cuesmes', "Ra's al `Ayn", 'Rasebetsane', 'Tagudin', 'Basopatti', 'Mack', 'Pedra Badejo', "Les Sables-d'Olonne", 'Hosakote', 'Barbastro', 'Tutoia', 'Manpur', 'Higashiyamato', 'Varvarin', 'Manhica', 'Kachchippalli', 'Velugodu', 'Palin', 'Xinchangcun', 'Alchevsk', 'Anzegem', 'Doi Saket', 'Amersham', 'Namburu', 'Union Hill-Novelty Hill', 'Apopa', 'Zafarabad', 'Nanminda', 'Burlington', 'Hibbing', 'Galloway', 'Union City', 'Dukinfield', 'Montecristo', 'Shrigonda', 'Uchoa', 'Tizi-n-Tleta', 'Stellenbosch', 'Olaine', 'Isabel', 'Luqa', 'Matinha', 'Bni Rzine', 'Attimarappatti', 'Circasia', 'Ban Krot', 'Selimpasa', 'Xangongo', 'Simiganj', 'Coacoatzintla', 'Nueva Helvecia', 'Chenzhou', 'Lota', 'Lawrenceville', 'Vayanur', 'Tobe', 'Ende', 'Haslemere', 'Calabayan', 'Pakxe', 'Pajapan', 'Udhampur', 'Tsararivotra', 'Telerghma', 'Haydock', 'Surmon Chogga Grong', 'Trebic', 'Alafaya', 'Kafr Sajnah', 'Conceicao', 'Vellore', 'Usingen', 'Terenure', 'Bouca', 'Ghat Borul', 'Baisuhalli', 'Daulatpur', 'Grossrohrsdorf', 'Lorica', 'Hagaribommanahalli', 'Ban Thoet Thai', 'Loznica', 'Ponmala', 'Padiyur', 'Tigoa', 'Budva', 'Mistassini', "'Ain Babouche", 'El Limon', 'Diouna', 'Rieti', 'Kuttyadi', 'Xinyang', 'Killiney', 'Pop', 'Guaymango', 'Springbok', 'Presque Isle', 'Timra', 'Abuja', 'Miyakojima', 'Payyannur', 'Xunyang', 'Bailen', 'Chambellan', 'Bukit Mertajam', 'Cattolica', 'Serdar', 'Sarare', 'Coronel Vivida', 'Herriman', 'Guliston', 'Rouvroy', 'Garfield', 'Elmina', 'Sarreguemines', 'Milwaukie', 'Ershui', 'Garot', 'Czestochowa', 'Kirklareli', 'La Jagua de Ibirico', 'Stroud', 'Kocaeli', 'Calimaya', 'Savalou', 'Manalalondo', 'Cikupa', 'Cide', 'Idylwood', 'East Massapequa', 'Andijon', 'Ibusuki', 'Brahmapur', 'Mount Prospect', 'St. James', 'Vila Franca de Xira', 'Bhadreswar', 'Khandwa', 'Amporoforo', 'Celic', 'Gomec', 'Fort Carson', 'Kosiv', 'Fria', 'Cliftonville', 'Narvacan', 'Ramanathapuram', 'Jawasa', 'Guclukonak', 'Samobor', 'Ceska Trebova', "Anan'evo", 'Zinjibar', 'Indianapolis', 'Yanggok', 'Aberdare', 'Palm River-Clair Mel', 'Diari', 'Chilmil', 'Callosa de Segura', 'Kummersbruck', 'Sambava', 'Pritzwalk', 'Karai', 'Lachhmangarh Sikar', 'Wade Hampton', 'Rapla', 'Assa', 'Malavalli', 'Trinec', 'La Verne', 'Selibaby', 'Zagnanado', 'Commerce City', 'Tredyffrin', 'Salaberry-de-Valleyfield', 'Longtan', 'Karatsu', 'Badian', 'Sao Sebastiao do Paraiso', 'Satyamangala', 'Balsta', 'Gamagori', 'Chinampa de Gorostiza', "Ambinanin'i Sakaleona", 'Itiquira', 'Center', 'Umargam', 'Ozurgeti', 'Montereau-faut-Yonne', 'Amau', 'Nova Esperanca', 'Venezuela', 'Kafia Kingi', 'Tanguturu', 'Vinto', 'Sonosari', 'Sturovo', 'Date', 'Niska Banja', 'An Nhon', 'Littlehampton', 'Fuente Palmera', 'Sokode', 'Donghua', 'South Holland', 'Frimley', 'Bachchannapet', 'Junnardev', 'Dole', 'Neuotting', 'Monforte del Cid', 'Baguley', 'Brimington', 'Calauan', 'Durmersheim', 'Wittenberge', 'Echizen', 'Dobropillia', 'Ayuquitan', 'Viramgam', 'Matsuura', 'Abu Dis', 'Winsum', 'Gentbrugge', 'Villa Victoria', 'Yovon', 'Boaz', 'Kotta Kalidindi', 'Great Bend', 'Tarcento', 'Limbazi', 'Bajos de Haina', 'Karakax', 'Ibira', 'Yakushima', 'Goito', 'Nabari', 'Sabanalarga', 'Tiou', 'Renens', 'Dolores', 'Agago', 'Madakalavaripalli', 'Mapastepec', 'La Lucila', 'Cherakhera', 'Sadaseopet', 'Neerijnen', 'Krathum Baen', 'Gansbaai', 'Mahalandi', 'Chanaur', 'La Argentina', 'Jesup', 'Bela Crkva', 'Soteapan', 'Siechnice', 'Lwakhakha', 'Uspallata', 'Imbatug', 'Altavas', 'Jaglot', 'Colangute', 'Kumatori', 'Andapa', 'Choconta', 'Sarhari', 'Santiago Jocotepec', 'Arauca', 'Hailin', 'Shahr-e Majlesi', 'Guyuan', 'Murfatlar', 'Chon Buri', 'Bacnotan', 'Kutna Hora', 'Xiva', 'Mizque', 'West Ham', 'Roatan', 'Doylestown', 'Tarichar Kalan', 'Foz do Iguacu', 'Garching an der Alz', 'Tifariti', 'Longwy', 'Tarikere', 'Alameda', 'Gbanhi', 'Catunda', 'Kirchhundem', 'Cuenca', 'Delareyville', 'Cliffside Park', 'Orting', 'Kapadvanj', 'Nigran', 'Fengyicun', 'Pazardzhik', 'Yeddumailaram', 'Longkeng', 'Nikaho', 'Altofonte', 'Douar Oulad Youssef', 'Lathi', 'Osh', 'Daytona Beach', 'Atharan Hazari', 'Ami', 'Balaguer', 'Malnate', 'Quanzhang', 'Rio Claro', 'Hajdusamson', 'Gouvea', 'Kunming', 'Kulasegaram', 'Pori', 'Braintree', 'Bijelo Polje', 'Mateur', 'Kozova', 'Fenoarivo Atsinanana', 'Leisure World', 'Taman Senai', 'Nerinjippettai', 'Washington', 'Basapatna', 'Mwene-Ditu', 'Norak', 'South Upi', 'Yokadouma', 'Abbigeri', 'Kouoro', 'Freilassing', 'Nymburk', 'Cangandala', 'Dihri', 'Arakli', 'Huaihua', 'Turkauliya', 'Ramainandro', 'Aytos', 'Lihe', 'Raseborg', 'Tomioka', 'Sakrand', 'Belur', 'Kaseda-shirakame', 'Mettingen', 'Bugalagrande', 'Annandale', 'Mambere', 'Oume', 'Al Buwaydah', 'Eagle', 'Ustron', 'Pegnitz', 'Massaguet', 'Bibbiena', 'Saint-Vith', 'Geelong', 'Glen Rock', 'Miarinavaratra', 'Sao Simao', 'Gurlan', 'La Escala', 'Mian Channun', 'Tono', 'Kevelaer', 'Jiaozhou', 'Einsiedeln', 'Udaypur Gadhi', 'Molagavalli', 'Sira', 'Ospina', 'Kuduru', 'Komarolu', 'Arnprior', 'Breclav', 'Qornet Chahouane', 'Okaya', 'Montesarchio', 'Tecoh', 'Dezful', 'Quiculungo', 'Tam Ky', 'Qaracuxur', 'Daxin', 'Horsham', 'Tafalla', 'Yehud', 'Abertawe', 'Darien', 'Shodoshima', 'El Arenal', 'Litomerice', 'Caboolture', 'Norwalk', 'Mananara Avaratra', 'Herten', 'Kihihi', 'Aira', 'Kuttampala', 'Noumea', 'Phon-Hong', 'Thair', 'Herzogenrath', 'Planalto', 'Arrecifes', 'Fuente-Alamo de Murcia', 'Gilbues', 'Miltenberg', 'Bubong', 'Henstedt-Ulzburg', 'Rayavaram', 'Alberique', 'Pimpalgaon Raja', 'Palencia', 'Lalganj', "L'Isle-Adam", 'Abasolo', 'Sirikonda', 'Devadanappatti', 'Pokrovka', 'Baliguian', 'Corum', 'Tlacolula de Matamoros', 'Jeremie', 'Myaungmya', 'Tissa', 'Capljina', 'Ambariokorano', 'Gemert', 'Akayu', 'Bhiwandi', 'Chishui', 'Abbotsford', 'La Carlota', 'Antalya', 'Yongjing', 'Tres Marias', 'Haa', 'Meerhout', 'Sibu', 'Astara', 'Gar', 'Ndola', 'Sao Tiago de Custoias', 'Karrapur', 'Chartiers', 'Songea', 'Mouila', 'Streator', 'Al Qamishli', 'Nikaweratiya', 'Delbruck', 'Batonyterenye', 'Odzak', 'Agudos', 'Gatineau', 'Vineyard', 'Saint-Rambert', 'Kesath', 'Sugar Grove', 'Tuensang', 'Los Reyes de Juarez', 'Sardinal', 'Eksjo', 'Safety Harbor', 'Kilkenny', 'Tsiroanomandidy', 'Agartala', 'Agrate Brianza', 'Nomimachi', 'North Tonawanda', 'Stafford', 'Paramoti', 'Cavdir', 'Houmt Souk', 'Domagnano', 'Kaset Wisai', 'Piedmont', 'Nazareth', 'Anjialava', 'Stebnyk', 'Golyaka', 'Zehdenick', 'Birstonas', 'Chaklasi', 'Carira', 'Sapatgram', 'Bankura', 'Rosignano Marittimo', 'Quchan', 'Hoeilaart', 'Angul', 'Erechim', 'Xinshi', 'Jima', 'Upper Southampton', 'Baramula', 'Joao Neiva', 'Vanukuru', 'Shiwan', 'White Horse', 'Morong', 'Frascati', 'Schrobenhausen', 'Luvungi', 'Itirapina', 'Nari Bhadaun', 'Tarnaveni', 'Mytilini', 'Destrehan', 'Eybens', 'Dianga', 'Pasarkemis', 'Najafabad', 'Swiecie', 'Cobourg', 'San Jorge', 'Pfastatt', 'Kusmaul', 'Fallersleben', 'Pershotravensk', 'Mahambo', 'Sigli', 'Annaba', 'Turku', 'Pedda Vegi', 'Mesa Geitonia', 'Charam', 'Charentsavan', 'Imielin', 'Orvault', 'Lodz', 'Sam Ko', 'Foz', 'Alexandroupoli', 'Demmin', 'Castelleone', 'Burauen', 'Capilla del Monte', 'Ettlingen', 'Srirangapur', 'Camamu', 'Saruhanli', 'Ferfer', 'Aurangabad', 'Taromske', 'Hinigaran', 'San Juan de Dios', 'Saint-Maur-des-Fosses', 'Bunkyo-ku', 'Renfrew', 'Mus', 'Lake Jackson', 'Xirdalan', 'Samayac', 'Spanish Lake', 'Izegem', 'Nema', 'Dongducheon', 'Hecelchakan', 'Seoni', 'Wood Green', 'Castelfranco Veneto', 'Bostanici', 'Itajiba', 'Inagi', 'Oakham', 'Boa Vista', 'Puquio', 'Chittaranjan', 'Tibro', 'Pitangueiras', 'Rouached', 'Nouakchott', 'Nuevo San Juan Parangaricutiro', 'Sao Jose do Campestre', 'Heyin', 'Vacaria', 'Lukavac', 'Katy', 'Caibiran', 'Villasis', 'Pontianak', 'Pallijkarani', 'Rayapalle', 'San Pedro Carcha', 'Kabirwala', 'Idhna', 'Kiama', 'Armadale', 'Braunschweig', 'Lathasepura', 'Bajo Boquete', 'Shal', 'Balyqshy', "Long'e", 'Felgueiras', 'Arteijo', 'Dhanaula', 'Kaoni', 'Telford', 'Laukaria', 'Druskininkai', 'Sayhat', 'Skanderborg', 'Lindome', 'Paramanandal', 'Quebrangulo', 'Bida', 'Narragansett', 'Valle Hermoso', 'Santa Catarina Pinula', 'Muriedas', 'Tawnza', 'Arnouville-les-Gonesse', 'Ouaoula', 'Shahrixon', 'Alcamo', 'Naujoji Akmene', 'Pappakurichchi', 'Toyonaka', 'Uttukkuli', 'Ilave', 'Villupuram', 'Bhojpur Jadid', 'Ischia', 'Rio Verde Arriba', 'Radolfzell am Bodensee', 'Llica', 'Erjie', 'Qiaomaichuan', 'Lamont', 'Bezliudivka', 'Five Corners', 'Niksic', 'Hnivan', 'Tamale', 'Khanapur', 'Ichnia', 'Sesvete', 'Reeuwijk', 'Qutur', 'Moana', 'San Pedro Atocpan', 'Osny', 'Toda Bhim', 'Zaouiet Sousse', 'Lavras', 'Ouardenine', 'Katakos', 'Hetton le Hole', 'Iseo', 'Lehigh', 'Hallim', 'Bonou', 'Moita Bonita', 'Shakargarh', 'Chiapa de Corzo', 'Zhanggu', 'Meudon', 'Ayikudi', 'Cunduacan', 'Jingling', 'Jiantang', 'Laojiezi', 'Bazid Chak Kasturi', 'Elliniko', 'Rifu', 'Duanshan', 'Busselton', 'Liubashivka', 'Maniyur', 'Singuilucan', 'Erkrath', 'Mucari', 'Tumwater', 'Yaoquan', 'Santa Maria Ixhuatan', 'Sidi Bel Abbes', 'Namanga', 'Afsin', 'An Phu', 'Bamyan', 'Mannanchori', 'Shahr-e Qods', 'Bierun', 'Vijapur', 'Boli', 'Hirni', 'Ibitiara', 'Pachchampalaiyam', 'Hutto', 'Talibon', 'Bilbao', 'Readington', 'Pasivedalajimma', 'Sariaya', 'Lappersdorf', 'Jodhpur', 'Opava', 'Shelton', 'Ledegem', 'Cabedelo', 'Tiptur', 'Narsapur', 'Jamira', 'Kaufbeuren', 'Ricany', 'Ansonia', 'Mirandopolis', 'Gondizalves', 'Brilon', 'Tarumirim', 'Leek', 'Sheopuria', 'Dongfeng', 'Pozanti', 'Fuyingzicun', 'Sartrouville', 'Vigonovo', 'Tatakan', 'Kountouri', 'Gharbia', 'Figuig', 'Shire', 'Swabi', 'Wismar', 'Chachagui', 'Nedumudi', 'Carrickfergus', 'St. Thomas', 'Jaicos', 'Tiruvaduturai', 'Bolinao', 'Eumseong', 'Edina', 'Essey-les-Nancy', 'Saint-Julien-en-Genevois', 'Grosuplje', 'Grand Haven', 'Al Faw', 'Androrangavola', 'Hassi Maameche', 'Rubi', 'Busesa', 'Villa Tapia', 'Divo', 'Benalmadena', 'Redmond', 'Rajaori', 'Kraaipan', 'Nisporeni', 'Wandan', 'Aguia Branca', 'Lake Park', 'Hoquiam', 'Jan Kempdorp', 'Lentvaris', 'Waikanae', 'Reboucas', "Pomigliano d'Arco", 'Meycauayan', 'Saugor', 'Pasacao', 'Schinnen', 'Glan', 'Lavagna', 'Betnoti', 'Ban Wat Sala Daeng', 'Romsey', 'Campi Salentina', 'Glinde', 'Gamprin', 'Chandanais', 'Taiping', 'Marivorahona', 'Store', 'Vardenis', 'Salima', 'Kadanadu', 'Jarqo`rg`on', 'Barlad', 'Bethpage', 'Boscotrecase', 'Dhamua', 'Yelm', 'Tlacotepec', 'Parame', 'Haapsalu', 'Diadi', 'Huetor Vega', 'Jozefow', 'Diessen am Ammersee', 'Meyrin', 'Crofton', 'Galvez', 'Boulder', 'Masi-Manimba', 'Aubervilliers', 'Unguia', 'Caslav', 'El Centro', 'El Fasher', 'Beersheba', 'Hujra Shah Muqim', 'Bajil', 'Tomas Oppus', 'Liancheng', 'Arkivan', 'Jasidih', 'Marturu', 'Evry', 'Easttown', 'Lebedyn', 'Garhpura', 'Chinaur', 'Jean-Rabel', 'Chichigalpa', 'Talukkara', 'Kakuda', 'Bozoum', 'Tuzdybastau', 'Pudimadaka', 'Iraquara', 'Blanquefort', 'Batman', 'Iserlohn', 'Castano Primo', 'Uttaradit', 'Bartlesville', 'Sapna', 'Humpolec', 'Avanashipalaiyam', 'Rawajaya', 'Gadarwara', 'Mascote', 'Narkatpalli', 'Kendale Lakes', 'White City', 'Moss', 'Nnewi', 'Douar Oulad Aj-jabri', 'Pata Uppal', 'Dhiban', 'Salitre', 'West Babylon', 'Khulays', 'Teplohirsk', 'Bannu', 'Mudki', 'Agar', 'Al Madad', 'Chokwe', 'Udala', 'Chichli', 'Lemington', 'Khatima', 'Shangrao', 'Chiriguana', 'Baliuag', 'Balua Rampur', 'Nafplio', 'Kudat', 'Cabusao', 'Acatic', 'Madougou', 'Tiszafured', 'Akurana', 'Kingston', 'Pariaman', 'Seacroft', 'Tecoman', 'Alavus', 'Kanteru', 'Jaramana', 'Schodack', 'Eniwa', 'Fuzhou', 'Klobuck', 'Budhma', 'Lelydorp', 'Great Yarmouth', 'Missoes', 'Porto Velho', 'Muhanga', 'Porto Ingles', 'Harderwijk', 'Ubungo', 'Phopnar Kalan', 'Chrysoupoli', 'Zarzis', 'Nanning', 'Tekeli', 'Drammen', 'Montivilliers', 'Gualeguay', 'Nishinomiya-hama', 'Laguna de Duero', 'Moraga', 'Alzano Lombardo', 'Miki', 'Janin', 'Frederico Westphalen', 'Moulins', 'Kuningan', 'Castelnuovo di Porto', 'Irimbiliyam', 'Kharar', 'Cholavandan', 'Csomor', 'Amurrio', 'Tiruchchuli', 'Steffisburg', 'Parksville', 'Anontsibe-Sakalava', 'Nueva Concepcion', 'Le Locle', 'Harsewinkel', 'Kaonke', 'Sao Joao Batista', 'Hokuto', 'Aubange', 'Bni Boufrah', 'Sirmatpur', 'Gignac-la-Nerthe', 'Igarata', 'Kharki', 'Pamarru', 'Bhimunipatnam', 'Perdoes', 'Qarqaraly', 'Iisalmi', 'Koussane', 'Pittalavanipalem', 'Tilakpur', 'Okuchi-shinohara', 'Alfenas', 'Mindouli', 'Chokkalingapuram', 'Webster', 'Rorschach', 'Mascali', 'Stauceni', 'Mucaba', 'Heemskerk', 'Tari', 'Bitam', 'Ullal', 'Qari Kola-ye Arateh', 'Brownwood', 'Santee', 'Akbarpur', 'Mega', 'Motosu', 'Csorna', 'Cavarzere', 'Omallur', 'Virarajendrapet', 'Wixom', 'Murak', 'Niederkruchten', 'Aldine', 'Jalihalli', 'Batajnica', 'Bardibas', 'Madina do Boe', 'Kallar', 'Rhennouch', 'East Providence', 'Oras', 'Vittuone', 'Corbelia', 'San Pedro Jicayan', 'Telnal', 'Parmanpur', 'Morena', 'Olindina', 'Sumpango', 'Bielsk Podlaski', 'Mogoditshane', 'Gulsehir', 'Kumari', 'Yantzaza', 'Ambalavao', 'Vohitany', 'Qianxucun', 'Pragatinagar', 'Siaya', 'Pombos', 'Thuan Tien', 'Minas de Matahambre', 'Haaren', 'Concarneau', 'Al Marj', 'Desborough', 'Brewster', 'San Miguel Ocotenco', 'Toba Tek Singh', 'Quito', 'Pushkar', 'Bagasra', 'Snihurivka', 'Nsawam', 'Raonta', 'Pishbar', 'Hanamsagar', 'Lemay', 'Douliu', 'Linlithgow', 'Mamqan', 'Phulaut', 'Colwyn Bay', 'East Leake', 'Bargara', 'Phan Rang-Thap Cham', 'Jouy-le-Moutier', 'Whyalla', 'La Rioja', 'Ferrenafe', 'Yenmangandla', "Ben 'Aknoun", 'Fier', 'Baharampur', 'Kin', 'Staphorst', 'Gondar', 'Pantnagar', 'Barnagar', 'Campobello di Licata', 'Rukan', 'Masatepe', 'Barroso', 'Savnik', 'Huilong', 'Porto Seguro', 'Tanjungbalai', 'Sembedu', 'Lunenburg', 'Kampung Tengah', 'Tatsunocho-tominaga', 'Oosterwolde', 'Aver-o-Mar', 'Poughkeepsie', 'Anjozorobe', 'Zengcun', 'Soavimbahoaka', 'Warr Acres', 'Donacarney', 'Galesburg', 'Alma', 'Fuchu', 'Lehrte', 'Angelopolis', 'Ventanas', 'Barro Alto', 'Santana do Matos', 'Patnongon', 'East Dereham', 'Tibagi', 'Rambilli', 'Punturin', 'Balzar', 'Beaufort West', 'Sendai', 'Szekszard', 'Kaizuka', 'Motatan', 'Smiths Falls', 'Litherland', 'Maisenhausen', 'Gandara', 'Ban Mae Kham Lang Wat', 'Madrid', 'Pasrur', 'Shahganj', 'Escazu', 'Pakruojis', 'Shiogama', 'Kallad', 'Matinilla', 'Delemont', 'Bilga', 'Kasur', 'Khattab', 'Palmi', 'Sariz', 'Didymoteicho', 'Pallippuram', 'Wagrowiec', 'Munro', 'Aschersleben', 'Ndalatando', 'Cecchina', 'Meizichong', 'Elamattur', 'Umred', 'Bonita Springs', 'Cujubim', 'Red Oak', 'Montlouis-sur-Loire', 'Santa Rosa de Osos', 'Dehmoi', 'Pesochin', 'Sadao', 'Nagyatad', 'Khusropur', 'Klagenfurt', 'Bou Merdes', 'Acatenango', 'Demiryol', 'Sinnuris', 'Juazeirinho', 'Sivakasi', 'Hunxe', 'Lamu', 'Ravensthorpe', 'Streetsboro', 'Marugame', 'Nizampur', 'Salina', 'Baglung', 'Aketao', 'Schwedt (Oder)', 'Medrissa', 'Nordwalde', 'Rielasingen-Worblingen', 'Santiago Tianguistenco', 'Dahuk', 'Lubutu', 'Ambohipihaonana', 'Felino', 'Kitwe', 'Langarud', 'San Rafael Las Flores', 'Kurwa Mathia', 'Deuil-la-Barre', 'Sitampiky', 'Westchase', 'Korkut', 'Aracariguama', 'Verneuil-sur-Seine', 'Sucy-en-Brie', 'Vulcanesti', 'Berkeley Heights', 'Suhut', 'Saratamata', 'Owk', 'Holzgerlingen', 'Neckargemund', 'Piacenza', 'Kakkalapalle', 'Tendo', 'Atlanta', 'Kamsar', 'East Bridgewater', 'Krizevci', 'Mandapeta', 'Kusumha', 'South Valley', 'Bhargaon', 'Birkerod', 'Olintla', 'Jiabong', 'Winona', 'Ambinanitelo', 'Tlajomulco de Zuniga', 'Monument', 'Matay', 'Jingping', 'Gadoon', 'Yingmen', 'Bondoukou', 'Sirpanandal', 'Materi', 'Englewood', 'Nea Moudania', 'Shanglingcun', 'Sutton in Ashfield', 'Maule', 'Djidja', 'Long Eaton', 'Wasco', 'Satao', 'Idrija', 'Catacamas', 'Oga', 'Canelli', 'Seydunganallur', 'Sidi Ettiji', 'Civitavecchia', 'Bouc-Bel-Air', 'Itapetim', 'Shizuishan', 'Ron', 'San Sebastian Tutla', 'Lupeni', 'Buenaventura Lakes', 'Morsang-sur-Orge', 'Adamstown', 'Daraina', 'Penugonda', 'Pachkhal', 'Genemuiden', 'Supe', 'Sahri', 'Ingraj Bazar', 'Poloros', 'Chalil', 'Collecchio', 'Ipiranga do Piaui', 'Hove', 'Ahmadli', 'Karahalli', 'Matosinhos', 'Navalmoral de la Mata', 'Bialogard', 'Radcliff', 'Padakanti', 'Malasiqui', 'Mendicino', 'Fredericksburg', 'Javanrud', 'Tegal', 'Bailesti', 'Beeston', 'Huaixiangcun', 'Puranpur', 'Manlio Fabio Altamirano', 'Roddam', 'Celakovice', 'El Espinar', 'Plattling', 'Juan de Acosta', 'Sidi Rahal', 'Zrnovci', 'Montepuez', 'Guruvayur', 'Kouti', 'Sipalakottai', 'Vuzenica', 'Fontenay-le-Comte', 'Jianshi', 'Soignies', 'Tewksbury', 'Montelupo Fiorentino', 'Lacin', 'Kibaha', 'Minakami', 'Bedburg', 'Cuttack', 'Had Zraqtane', 'Mandamari', 'Sadabad', 'Azhikkal', 'Huaiyang', 'Ovruch', 'Khetia', 'Monte Azul Paulista', 'Amparihitsokatra', 'Luling', 'Ayaviri', 'Pekin', 'Dhihdhoo', 'Sylacauga', 'Venkatagiri', 'Piro', 'Rasauli', 'Verde Village', 'Ostrow Mazowiecka', 'Bahon', 'Teixeira Soares', 'Qasr al Farafirah', 'Ambohimalaza', 'Tabuk', 'Tubungan', 'Norseman', 'Fresno', 'Lingamparti', 'Burgau', 'Coatepec Harinas', 'Haridwar', 'Itaugua', 'Cruz del Eje', 'Hidalgo', 'Nellaya', 'Mandishah', 'Digboi', 'Darlington', 'Solim', 'Kintampo', 'Rancho Arriba', 'San Isidro', 'Atlapexco', 'Morada Nova de Minas', 'Dwarka', 'Ambatofinandrahana', 'Tha Chang', 'Wangguanzhuang Sicun', 'Chatellerault', 'Benicasim', 'Norwell', 'Moanda', 'Balsamo', 'Cesson', 'Mels', 'Lokbatan', 'Karlapalem', 'Otar', 'Gainrha', 'Panajachel', 'Mulhouse', 'Rafard', 'Tabora', 'Barroquinha', 'Newport East', 'Smarjeske Toplice', 'Mallaram', 'Qutubpur', 'El Tortuguero', 'Obernai', 'Alella', 'Susquehanna', 'Caxambu', 'Sibulan', 'Kaleyanpur', 'Asheville', 'Albal', 'Seshambe', 'Sanga', 'Furstenau', 'Nea Artaki', 'Flores', 'Hualane', 'Altun Kawbri', 'McCandless', 'Essau', 'Andoany', 'Navodari', 'Cherry Creek', 'Quitilipi', 'Ermelo', 'Arco', 'Rosario del Tala', 'Ravensburg', 'Seveso', 'Puerto Barrios', 'Lucon', 'Tadworth', 'Banzhuangcun', 'Sinnar', 'Ladenburg', 'Changzhou', 'Futog', 'Veenoord', 'Pardes Hanna Karkur', 'Palhoca', 'Schaarbeek', 'Jarabacoa', 'Nazca', 'Zurich', 'Pacoti', 'Lemmer', 'Weizhou', 'Nagpur', 'Hueyotlipan', 'Araria', 'Kulat', 'Horodenka', 'Irupi', 'Cinco Saltos', 'Bhainsdehi', 'Wamena', 'West Haverstraw', 'Ellon', 'Fasa', 'Turkan', 'Tuzi', 'Conjeeveram', 'Imperial Beach', 'Simria', 'Golden Valley', 'Tafas', 'Hohenbrunn', 'Harsin', 'Mulundo', 'Delaware', "Sant'Angelo Lodigiano", 'El Reten', 'Madukkur', 'Padalarang', 'Paraiba do Sul', 'Toboali', 'Bagcilar', 'Dejen', 'Carthage', 'Yusufeli', 'Ponduru', 'Conceicao dos Ouros', 'Finnkolo', 'San Francisco Solano', 'Vetralla', 'Kheda', 'Chauddagram', 'Vinaroz', 'Galatge', 'Bradwell', 'Orlova', 'Korostyshiv', 'San Adrian de Besos', 'Bernburg', 'Galatone', 'Paradip Garh', 'Kukraun', 'Bruges', 'Karuzi', 'Hemet', 'Vadnagar', 'Bernardino de Campos', 'Pansol', 'Keystone', 'Satuba', 'Weinbohla', 'Morris', 'Piazzola sul Brenta', 'Shalingzicun', 'Falkensee', 'Uberlandia', 'Norco', 'Narala', 'Widhwidh', 'Yahualica', 'Mokrisset', 'Bandalli', 'Nizwa', 'Seal Beach', 'Shahpur Chaumukhi', 'Bhangar', 'Soldado Bartra', 'Paratdiha', 'Esme', 'Goulmima', 'Waiyuanshan', 'Fukagawa', 'Brookdale', 'Leiktho', 'West University Place', 'Kaarst', 'Sausar', 'Pivijay', 'Paraibuna', 'Saint Bernard', 'Ponta Grossa', 'Ushiku', 'Greenville', 'Peduasi', 'Le Loroux-Bottereau', 'Sturbridge', '`Ibri', 'Bururi', 'Itarana', 'Santol', 'Saint-Amand-Montrond', 'Palod', 'Erdenet', 'Bangramanjeshvara', 'Schongau', 'Tirumayam', 'Barigarh', 'Kinna', 'Sarmera', 'Middlesex', 'Xixucun', 'Anatoli', 'Santa Cruz Naranjo', 'Tocoa', 'Cabo San Lucas', 'Ban Dan Na Kham', 'Martigny', 'Morganton', 'Caseros', 'Ljubuski', 'Itzer', 'Januario Cicco', 'Derzhavinsk', 'Timoteo', 'Cloppenburg', 'Chirilagua', 'Ordino', 'Birsinghpur', 'Nea Michaniona', 'Sisophon', 'Setif', 'Porto San Giorgio', 'Lakheri', 'Gobernador Gregores', 'Mazapil', 'Arsikere', 'Leon Postigo', 'Ambovombe Afovoany', 'Corbeil-Essonnes', 'Ouda-yamaguchi', 'Schloss Holte-Stukenbrock', 'Ujjain', 'Komorowice', 'Pueblo Juarez', 'Cookeville', 'Caombo', 'Heiligenhafen', 'Lexington', 'Maassluis', 'Allschwil', 'Ogulin', 'Galveston', 'Tepalcatepec', 'Jericoacoara', 'Gurupa', 'Combarbala', 'Poulton le Fylde', 'Brandon', 'Famy', 'Fenggang', 'Leping', 'Recica', 'Founougo', 'San Diego', 'Athar', 'Teresina', 'Xinbocun', 'Piduguralla', 'Buyukorhan', "'Ain Merane", 'Bolvadin', 'Carpenedolo', 'Onna', 'Nalegaon', 'Grandola', 'Frondenberg', 'Hagerstown', 'Lalmunia Munhara', 'Shelburne', 'Boulia', 'Bni Drar', 'Courbevoic', 'Cocieri', 'Lentate sul Seveso', 'Claveria', 'Margny-les-Compiegne', 'Leoben', 'Pavugada', 'Werlte', 'Turori', 'Puerto Montt', 'Hauppauge', 'Nalas', 'Daxiang', 'Polygyros', 'Asahni', 'Dzhankoi', 'Sanliurfa', 'Motema', 'Gadag', 'Negele', 'Cuevas del Almanzora', 'David', 'Singhanakhon', 'Siwan', 'Kalush', 'Carnaubeira da Penha', 'Odenton', 'Caldono', 'Ampahana', 'Mozzate', 'Hemer', 'Barcaldine', 'Kanan', 'Itapecerica da Serra', 'Toftir', 'Peterhead', 'Xishancun', 'Worgl', 'Hermanus', 'Derby', 'Aranda de Duero', "Aci Sant'Antonio", 'Fetesti', 'Masantol', 'Vyara', 'Puerto Berrio', 'Norton', 'St. Augustine Shores', 'East Moline', 'Shahbazpur', 'Congonhas', 'Naples', 'Coromandel', 'Aguas Zarcas', 'Ceu Azul', 'Sabanagrande', 'Ranti', 'Palapye', 'Jitauna', 'Brejo', 'Abrisham', 'Placilla de Penuelas', 'Worcester', 'Graneros', 'Ras el Oued', 'Tominian', 'Kamatgi', 'Tonakkal', 'Dranesville', 'Stepnogorsk', 'Suchteln', 'Casorate Primo', 'Lajedo', 'Chichihualco', 'Loos', 'Chust', 'Winchester', 'Tifton', 'Deblin', 'Armagh', 'Mahonda', 'Cheval', 'Dzitbalche', 'New Bern', 'Le Teich', 'Lamosina', 'Oatfield', 'Autazes', 'Kikuchi', 'Bni Gmil', 'Ligonha', 'Nepi', 'Vohilengo', 'Uruburetama', 'Bajestan', 'Al Kharj', 'Cuito', 'Vadakku Ariyanayakipuram', 'Itaqui', 'Cerro', 'Acajutla', 'Adjaouere', 'Christchurch', 'Miantso', 'Damaturu', 'Teluknaga', 'Shaoyang', 'Parappukara', 'Buyukcekmece', 'Pontefract', 'Sangju', 'Posoltega', 'Longshan', 'Scottsbluff', 'Yate', 'Rossville', 'Khed', 'Lokomby', 'Pocking', 'Duverge', 'Mundamala', 'Arivonimamo', 'Bhimphedi', 'Akiruno', 'Toufen', 'Civitanova Marche', 'Chandlers Ford', 'Titagarh', 'Bengkalis', 'Roth', 'Lugoj', "Yong'an", 'Sandusky', 'Karuvambram', 'Chambersburg', 'Perivale', 'Dayalpur', 'Gafour', 'Zeboudja', 'Bee Cave', 'El Amria', 'Agioi Anargyroi', 'Biberist', 'Kerikeri', 'Lequile', 'DeSoto', 'Waddan', 'Sopur', 'Ambattur', 'Guayama', 'Pontes e Lacerda', 'Vellur', 'Thebes', 'Cedarburg', 'Dzialdowo', 'Mestrino', 'Valeggio sul Mincio', 'San Ignacio', 'Dafni', 'Burbach', 'Milton Keynes', 'Moman Barodiya', 'Rantau Prapat', 'Silver Spring', 'Goldbach', 'Bottesford', 'Peruvancha', 'Bad Ischl', 'Olivar Bajo', 'Shitan', 'Vohilava', 'Pinukpuk', 'Dashao', 'Pocharam', 'Daireaux', 'Putaparti', 'Eysines', 'Hasanparti', 'Gumdag', 'Chicago Ridge', 'Quissama', 'Colonia Leopoldina', 'General Mamerto Natividad', 'Steamboat Springs', 'Walpole', 'Mockern', 'Sheboygan', 'Korolevo', 'Wichita Falls', 'Chepes', 'Airmont', 'El Geneina', 'Fruitville', 'Hernani', 'Casinhas', 'Neuried', 'Ain Aicha', 'Correntes', 'Loxstedt', 'Yakoma', 'Aqtau', 'Aubenas', 'Cedar Mill', 'Drensteinfurt', 'Bankheri', 'Comarnic', 'San Ignacio de Moxo', 'Kattagan', 'Podcetrtek', 'Armenia', 'Albina', 'Ferraz de Vasconcelos', 'Kibawe', 'Navegantes', 'Panna', 'Sebt Ait Saghiouchen', 'Nuevo Arraijan', 'Milicz', 'Alleppey', 'Mondlo', 'Tallmadge', 'Birni', 'Langrucun', 'Maria Enzersdorf', 'Birdaban', 'Kalingalan Caluang', 'Ormond Beach', 'Pontedera', 'Jilava', 'Guduru', 'Ashikita', 'Port Antonio', 'Termini Imerese', 'Stranraer', 'Muheza', 'Sonapur', 'Rothenburg ob der Tauber', 'De Doorns', 'Libreville', 'Vargaur', 'Madona', 'Kirlampudi', 'Usworth', 'Ichchapuram', 'Groairas', 'Raipura', 'Thanesar', 'Gracanica', 'Bukkarayasamudram', 'Brunico', 'Spremberg', 'Sidi El Hattab', 'Imeni Chapayeva', 'Tandwa', 'Ap Khanh Hung', 'Bagulin', 'Ziyodin Shaharchasi', 'Catanzaro', 'Kavak', 'Nalchiti', 'Pannaikkadu', 'Vysoke Myto', 'Punnappatti', 'Kanchika', 'Sibut', 'Marblehead', 'Mayenne', 'Concepcion Huista', 'Birch Bay', 'Mulakaluru', 'Lynchburg', 'Titay', 'Trou du Nord', 'Murraysville', 'Kalladai', 'Ipoh', 'Santa Maria Chilchotla', 'Wervik', 'Casa de Oro-Mount Helix', 'Jamnagar', 'Caotan', 'Dewsbury', 'La Pintana', 'Betong', 'Jhundo', 'Saint-Gregoire', 'Channarayapatna', 'Lennestadt', 'Bir Kasdali', 'Kissidougou', 'Palghat', 'Winterberg', 'San Jose Tenango', 'Mays Chapel', 'Barreiro', 'Latakia', 'Loudeac', 'Periyapodu', 'Bad Schussenried', 'Himeji', 'Ulaan-Uul', 'Qormi', 'Wamba', 'Bovolone', 'Limavady', 'Kara-Bak', 'Wilrijk', 'Bissegem', 'Richmond', 'Quzhou', 'Chamarru', 'Arhavi', 'Tartus', 'Mboki', 'Gramsh', 'Jizhuang', 'Capitola', 'Ashby de la Zouch', 'Saga', 'Mage', 'Manvi', 'Baud', 'Hincesti', 'Santiago Sacatepequez', 'Astley', 'Moramanga', 'Le Mars', 'Mahinathpur', 'Fort Mohave', 'Turda', 'Kadiolo', 'Kannan', 'Grossenkneten', 'Tsiately', 'Zhipingxiang', 'Al `Amarah', 'Florestopolis', 'Mazara del Vallo', 'Antonivka', 'Laconia', 'Bouka', 'Vemulanarva', 'Budakalasz', 'Kelkheim (Taunus)', 'Ventania', 'Castanos', 'Ganzhou', 'Lauingen', 'Borbon', 'Lodwar', 'Ober-Ramstadt', 'Qardho', 'Kathevaram', 'Colorado', 'Sierra Vista', 'Walnut Creek', 'Tainan', 'Guaraciaba', 'Beni Hassane', 'Tabuse', 'Isfisor', 'Long Beach', 'Mahraurh', 'Langerwehe', 'An', 'Nueve de Julio', 'Kastamonu', 'Harmanli', 'Welver', 'Binisalem', "Imi n'Oulaoun", 'Lizzanello', 'Itapema', 'Colon', 'Andolofotsy', 'Ilafy', 'Chivasso', 'Al Ghayzah', 'Weno', 'Hostotipaquillo', 'Talugai', 'Tomohon', 'Saint-Remy-de-Provence', 'Takikawa', 'Ramgundam', 'Shamaldy-Say', 'Buram', 'Curtea de Arges', 'Labo', 'Chahatpur', 'Upington', 'Andorinha', 'Saymayl', 'Nkheila', 'Lindi', 'Rio Tercero', 'Grevena', 'Peschiera del Garda', 'Las Terrenas', 'Holzwickede', 'Mallet', 'Ambara', 'San Gregorio di Catania', 'Chiramba', 'Marudi', 'Donji Kakanj', 'Strand', 'Azzano Decimo', 'San Remo', 'Brasschaat', 'Barokhar', 'Torghay', 'Soledad', 'Alfredo Chaves', 'Dongsheng', 'Enumclaw', 'Bouafle', 'Navotas', 'Ambohitoaka', 'Marotandrano', 'Sao Miguel do Tapuio', 'Wevelgem', 'Pio XII', 'Vigodarzere', 'Gargzdai', 'Rennes', 'Lucala', 'Piney Green', 'Stokke', 'Al `Ayyat', 'Agoura Hills', 'Akabira', 'Cabiao', 'Sevilla de Niefang', 'Coconuco', 'Yaxley', 'Morondava', 'Francistown', 'Shush', 'Papillion', 'Morawa', 'Halstead', 'Lower Pottsgrove', 'Rende', 'Opelika', 'Tanakpur', 'Bolsward', 'Majene', 'Tullinge', 'Ifield', 'Calumpit', 'Brajarajnagar', 'Laon', 'Tamagawa', 'Avcilar', 'Manikpur', 'Portet-sur-Garonne', 'East Chicago', 'Reddiyapatti', 'Bananal', 'Somero', 'Sabara', 'Ragay', 'Cupertino', 'Kuala Pembuang', 'Andranomamy', 'Kavar', 'Gommern', 'Oxelosund', 'Cheraro', 'Repala', 'Sannar', 'Blaj', 'Clemmons', 'Kakarati', 'Valenciennes', 'Batad', 'Kyeintali', 'Mengjiacun', 'Agdz', 'Ula', 'Parede', 'Buguias', 'Palleja', 'Urubamba', 'Bobbili', 'Damalcheruvu', 'Wantagh', 'Wailuku', 'La Piedad', 'Texarkana', 'Isangel', 'Rosario', 'Tublay', 'Zapote', 'Albox', 'Kandulapuram', 'Vrindavan', 'Amudalapalle', 'Cabo Rojo', 'Sternberk', 'Telde', 'Kreuzlingen', 'Dudu', 'Vouzela', 'Hullatti', 'Yellandu', 'Suzaka', 'Ouled Abbes', 'Ankilizato', 'Pocklington', 'Lescar', 'Belgrave', 'Az Zabadani', 'Battambang', 'Lehigh Acres', 'Magurele', 'Kato', 'Bailin', 'Kobarid', 'Aketi', 'Pokrovske', 'Pecica', 'Coringa', 'Dosemealti', 'Oued Amlil', 'Gaotan', 'Warburg', 'Virgem da Lapa', 'Amdjarass', 'Duijiang', 'Bagado', 'Pujali', 'Nan', 'Ibrahimpatan', 'Tornio', 'Mount Vista', 'Sanso', 'Nam Dinh', 'Martinsburg', 'Eshkanan', 'Maringa', 'Alamosa', 'Suzzara', 'Portachuelo', 'Vempalle', 'San Antonio La Paz', 'Stafa', 'Basey', 'Sabaudia', 'Veliki Preslav', 'West Manchester', 'Weissenburg', 'Le Havre', 'Togamalai', 'Omitama', 'Bihariganj', 'Redange-sur-Attert', 'Saint-Chamond', 'Anklam', 'Perwez', 'Qivraq', 'Chieti', 'Gossau', 'Tutzing', 'Bacaadweyn', 'Kohat', 'Durbuy', 'Clarence', 'Olesno', 'Uberaba', 'Andorra la Vella', 'Ranong', 'Meshgin Shahr', 'Senigallia', 'Siahkal', 'Apatin', 'West Park', 'Fallsburg', 'Bom Lugar', 'Chassieu', 'Chauki Hasan Chauki Makhdum', 'Hejamadi', 'Santa Maria do Para', 'Al Jumayliyah', 'Harij', 'Reedley', 'Wafangdian', 'Suzak', 'Hirekerur', 'Muar', 'Xarardheere', 'Tando Muhammad Khan', 'Uchturpan', 'Kalba', 'Clearlake', 'Aulendorf', 'Parur', 'Joanopolis', 'Belagola', 'Chapeltown', 'Shildon', 'Uruoca', 'Gevelsberg', 'Hindoria', 'Przasnysz', 'Vigasio', 'Alamedin', 'Namrup', 'Luenoufla', 'Teofilo Otoni', 'Gopalapuram', 'Shahhat', 'Mesagne', 'Maisaka', 'San Pedro Nonualco', 'Panevezys', 'Quibor', 'Frutal', 'Casa Branca', 'Furukawamen', 'Presidente Vargas', 'Vadugappatti', 'Nefta', 'Maipu', 'Opa-locka', 'Maidstone', 'Santo Tomas La Union', 'Wheeling', 'Kamakhyanagar', 'Rozzano', 'Boljoon', 'Aconibe', 'Huzhou', 'Kongolo', 'Tacloban', 'Escalante', 'Wadowice', 'Machala', 'Lagonoy', 'Carnaubais', 'Lubuk Sikaping', 'Yupiltepeque', 'Gwoza', 'Padilla', 'Dhari', 'Refahiye', 'Sevierville', 'Fellbach', 'Raub', 'Plaine du Nord', 'Tepexi de Rodriguez', 'Garango', 'Nakapiripirit', 'Grosse Pointe Park', 'Aerzen', 'Lingcheng', 'Mogliano Veneto', 'Wiesloch', 'Tokatippa', 'Bezou', 'Bela', 'Wahlstedt', 'Huichang', 'Raghudebbati', 'El Sobrante', 'Badarpur', 'Nakskov', 'Kurwar', 'Siquinala', 'Berga', 'Jamalpur', 'Sidi Taibi', 'Crema', 'Tharad', 'Colomba', 'Marostica', 'Temiskaming Shores', 'Karwar', 'Pallavaram', 'Dokolo', 'Nawai', 'Cullman', 'Waldkraiburg', 'Yoichi', 'Nizip', 'Litomysl', 'Huong Canh', 'Appenweier', 'Gimbi', 'Hulst', 'Beshkent Shahri', 'Ban Wat Lak Hok', 'San Martin De Porres', 'Chuy', 'Fallon', 'Domerat', 'Ruggell', 'Subang', 'Kapalong', 'Shahrak-e Enqelab', 'Finneytown', 'Casamassima', 'Forks', 'Silvassa', 'Pharr', 'Ait Bouziyane', 'Sirari', 'Saint-Etienne', 'Halol', 'Aregua', 'Calatagan', 'Palmas', 'Milajerd', 'Ar Riqqah', 'Waghausel', 'Tlaxcala', 'Pellezzano', 'Segovia', 'Villeneuve-les-Maguelone', 'Narrogin', 'Vila Pouca de Aguiar', 'Madagh', 'Tanambe', 'Rheinau', 'Tsubame', 'Kozluk', 'El Ghaziye', 'Ciudad Insurgentes', 'Zuchwil', 'Shangzhou', 'Morton', 'Ieper', 'Hefei', 'Qiryat Ono', 'West Vero Corridor', 'Rumonge', 'Suohe', 'Juprelle', 'Airmadidi', 'Pecel', 'Pa Sang', 'Phra Nakhon Si Ayutthaya', 'Castrolibero', 'Koprivnica', 'Formiga', 'Petatlan', 'Xuanzhou', 'Navirai', 'Weilburg', 'Belgaum', 'Sovetskoe', 'Mwaline al Oued', 'Fort-de-France', 'Belkheir', 'Sarapiqui', 'Manandona', 'Ernagudem', 'Dunaivtsi', 'Perama', 'Eastmont', 'Zeerust', 'Sao Miguel', 'La Crescenta-Montrose', 'Kardzhali', 'Faisalabad', 'Asan', 'Trenton', 'Kuttyattur', 'Ban Thap Kwang', 'Sahtah', 'Tila', 'Ait Tamlil', 'Tomball', 'Pathiyanikunnu', 'Markdorf', 'Kitakami', 'Zacatecoluca', 'Thouars', 'Belem', 'Albuquerque', 'Veenendaal', 'Namutumba', 'Chalette-sur-Loing', 'Shilan', 'Consolacion', 'Brahmanpara', 'Pola de Lena', 'Gislaved', 'Arlesheim', 'Inuma', 'Royton', 'Sawadah', 'La Magdalena Tlaltelulco', 'Wenlan', 'Portoviejo', 'Jaguarao', 'Atlit', 'Yellareddi', 'Chima', 'Ribnica', 'Loves Park', 'Vera Cruz', 'Sape', 'Alauli', 'Antargangi', 'DeForest', 'Lethbridge', 'Fuerte Olimpo', 'Kiskunlachaza', 'Mahagama', 'Ulubey', 'Cacimba de Dentro', 'Thames', 'Junqueiropolis', 'Ganeshpur', 'Avrille', 'Guediawaye', 'Flores de Goias', 'Nevele', 'Osisioma', 'Valtoha', 'Alfonso Castaneda', 'Embu-Guacu', 'Ballia', 'Elmont', 'Adampur', 'Montalto Uffugo', 'Locate di Triulzi', 'Jinju', 'Xenia', 'Douar Jwalla', 'Zanzibar', 'Ansbach', 'Cafarnaum', 'Riacho de Santana', 'Magburaka', 'Hampton', 'Kumo', 'Adana', 'Salisbury', 'Begoro', 'Rehlingen-Siersburg', 'Tambulig', 'Howli', 'Port Hueneme', 'Saire', 'Lishanpu', 'Mykhailivka', 'Xiazhai', 'Sochaczew', 'Minbya', 'Vienne', 'Shegaon', 'Hajnowka', 'Alberobello', 'Karatas', 'Nyamti', 'Tatarikan', 'Puqiancun', 'Andrews', 'Kraluv Dvur', 'Calnali', 'Anna Regina', 'Voss', 'Rietberg', 'Baracoa', 'Caglayancerit', 'Larkhall', 'Fairwood', 'Sardoba', 'Beni Fouda', 'Lakshminarayanapuram', 'Kandy', 'Pigcawayan', 'Yuchengcun', 'Barru', 'Wulflingen', 'Koktokay', 'Santa Rosa de Cabal', 'Ramona', 'Rucphen', 'Jinghai', 'Twist', 'Alberdi', 'Izola', 'Bom Jardim', 'Benito Juarez', 'Grinon', 'Mundgod', 'Manito', 'Ban Tha Luang Lang', 'Colesberg', 'Hinche', 'Sidmant al Jabal', 'Vilnius', 'Mount Laurel', 'Dyero', 'Madari Hat', 'Patian', 'Guimbal', 'Linnei', 'Astaneh-ye Ashrafiyeh', 'Delfzijl', 'Walla Walla', 'Saint-Louis du Sud', 'Shilou', 'Puchov', 'Trossingen', 'Spring Valley', 'May Pen', 'Hadleigh', 'Hoffman Estates', 'Hirschaid', 'Dorridge', 'Ampohibe', 'Amapa', 'Temirtau', 'Lakhsetipet', 'Raeren', 'Liffre', 'Hassi Fedoul', 'West Grey', 'Jaggayyapeta', 'Akita', 'Burien', 'Kranjska Gora', 'Urla', 'Apapa', 'Tucano', 'Stouffville', 'Bayt Ummar', 'Chamrajnagar', 'Cagwait', 'Sorada', 'Saoula', 'Cavan Monaghan', 'Somotillo', 'Kabarnet', 'Ash Shatrah', 'Forest Hill', 'Colleyville', 'Nzega', 'Kaufering', 'Bikou', 'Ksar Sbahi', 'Muramvya', 'Impfondo', 'Dalachi', 'Janze', 'Baruun-Urt', 'Scornicesti', 'Caem', 'Darayya', 'Foya Tangia', 'Robertsville', 'Ban Ho Mae Salong', "Bi'r al `Abd", 'Giyani', 'Dhamar', 'Bni Frassen', 'Brownhills', 'Buckie', 'Majagua', 'Gainsborough', 'Ayotoxco de Guerrero', 'East Milton', 'Sanchi', 'Pogradec', 'Yurimaguas', 'Xingyi', 'Tripunittura', 'Lady Lake', 'Safotu', 'Gabu', 'Pitt Meadows', 'Sahalanona', 'Vilaseca de Solcina', 'Shinjuku', 'Phnom Penh', 'Espumoso', 'Tiantoujiao', 'Haukipudas', 'Cuilapan de Guerrero', 'Bafoussam', 'Sunjiayan', 'Bhanpura', 'Sarangapuram', 'Frontera', 'Roh', 'Prince Albert', 'San Bernardino', 'Maranello', 'Ales', 'Shenley Brook End', 'Morayra', 'Maba', 'Pelileo', 'Majiadiancun', 'Broadstone', 'Eski-Nookat', 'Axapusco', 'Najasa', 'Iuiu', 'Morieres-les-Avignon', 'Capilla del Senor', 'Amarante', 'Bretigny-sur-Orge', 'Jhaua', 'Deulgaon Mahi', 'Waterbury', 'Euriapolis', 'Thika', 'Rothrist', 'Tinja', 'Umarizal', 'Vihari', 'Tepatlaxco', 'Tan Son', 'Anjangoveratra', 'Yelimane', 'Poulsbo', 'Hutchinson', 'Ittikelakunta', 'Villena', 'Mositai', 'Roi Et', 'Tulsipur', 'Munhall', 'Staufen im Breisgau', 'Ciudad Hidalgo', 'Naama', 'Vratsa', 'Sales Oliveira', 'Xingtai', 'Monteriggioni', 'Grey Highlands', 'Camoluk', 'Ochsenhausen', 'Winnenden', 'Bogen', 'Ambinanisakana', 'Rolla', 'New Philadelphia', 'Varzelandia', 'Can Tho', 'Tepetlixpa', 'Gursarai', 'Grumo Appula', 'Rudraprayag', 'Kelheim', 'Pionki', 'Kedavur', 'Frankfort', 'Bishamagiri', 'Shingbwiyang', 'Montes Altos', 'Longtangwan', 'Veszprem', 'Vadnais Heights', 'Pozarevac', 'Zyrardow', 'Nis', 'Julianadorp', 'Beigang', 'Wuhuang', 'Ambalakindresy', 'Yeosu', 'Hisar', 'Chithara', 'Caldwell', 'San Remigio', 'Budamangalam', 'Tamani', 'Shetpe', 'Velyki Luchky', 'Jaramijo', 'East Finchley', 'Siedlce', 'Tadigadapa', 'Umuahia', 'Jalgaon', 'Schneeberg', 'Chilonga', 'Zhdanivka', 'Bananeiras', 'Nasaud', 'Karlsdorf-Neuthard', 'Culemborg', 'Luanco', 'Analamary', 'Hinabangan', 'Borgomanero', 'Don Sak', 'Newark', 'Salqin', 'Tafersit', 'Siyang', 'Goussainville', 'Cermenate', 'Gurlapeta', 'Kiri', 'Luis Gomes', 'Partizanske', 'Puducherry', 'Strzegom', 'Sai Mai', 'Petlawad', 'Vicenza', 'Debbache el Hadj Douadi', 'Amarpura', 'Val-des-Monts', 'Haicheng', 'Kempton Park', 'Raspur Patasia', 'Killimangalam', 'Thanatpin', 'Oji', 'Neno', 'Kato Polemidia', 'Anapurus', 'Shakiso', 'Diepholz', 'Altrincham', 'Nowa Ruda', 'Sirnak', 'Pettaivayttalai', 'Schmelz', 'Velur', 'Wolverton', 'Goygol', 'Groesbeek', 'Kannavam', 'Bujumbura', 'Ihorombe', 'Hasami', 'Revur', 'Nosiarina', 'Ciudad Guayana', 'Economy', 'Rayachoti', 'Naciria', 'Rompicherla', 'Fairbanks', 'Timbio', 'Santo Amaro', 'Vares', 'Mafeteng', "G'oliblar Qishlog'i", 'Kumul', 'Kline', 'Alampur', 'Moda', 'Lhasa', 'Sirhali Kalan', 'Lower Providence', 'Fuengirola', 'Naigarhi', 'Cottonwood Heights', 'Aldeias Altas', 'Sibinal', 'Sulaco', 'Pintuyan', 'Sombrio', 'Negrete', 'Tsau', 'Casa Nova', 'Garupa', 'Pencheng', 'Langnau', 'Gokinepalle', 'General Conesa', 'Bandamurlanka', 'Morarano-Gara', 'Tartu', 'Hombrechtikon', 'Wegberg', 'Argelia', 'Neubrandenburg', 'Bardmoor', 'Benipati', 'Saint-Laurent-de-la-Salanque', 'Sungai Penuh', 'Iklod', 'Alem Paraiba', 'Uitgeest', 'Kiranur', 'Tinambac', 'Ait Yaich', 'Teniet el Abed', 'Middlesex Centre', 'Grand-Bassam', 'Redhill', 'Zhaotong', 'Mohammad Shahr', 'Cranendonck', 'Vanzago', 'Wiener Neustadt', 'Telavi', 'Fonte Boa', 'Svalyava', 'Valea Lupului', 'Kitale', 'Campos Gerais', 'Dayr as Sanquriyah', 'Atmakur', 'Boynton Beach', 'Can', 'Kalongo', 'Bad Voslau', 'Trutnov', 'Sunagawa', 'Bandarbeyla', 'San Pedro Ixtlahuaca', 'Lonquimay', 'Kangal', 'Bornem', 'Luanza', 'Tacurong', 'Totteridge', 'Kumcati', "Saint George's", 'Uttoxeter', 'Neath', 'Cairns', 'Kelkit', 'Banlung', 'Minneola', 'Nedre Eiker', 'Vembaditalam', 'Ramanayyapeta', 'New Ross', 'Szamotuly', 'Lapao', 'Sakkamapatti', 'Anjuna', 'Lower Southampton', 'Uch-Korgon', 'Salovci', 'Bhawanigarh', 'Venceslau Bras', 'Hosahalli', 'Sibsagar', 'Pazarcik', 'Grigoriopol', 'Shencottah', 'Trenque Lauquen', 'Oak Hills', 'Magangue', 'Beharona', 'Hirakawacho', 'Katsuura', 'Belpukur', 'Xingang', 'Topoloveni', 'Auxerre', 'Caba', 'Amioun', 'Lake Oswego', 'Nicoya', 'Piprai', 'Takhemaret', 'Jula Buzarg', 'Nam Som', 'Wad Medani', 'Dubasari', 'Ricaurte', 'Kocevje', 'Torit', 'Kanamadi', 'Tasso', 'Hermiston', 'Seven Hills', 'Ahmed Rachedi', 'Maryville', 'Lunen', 'Marondry', 'Ridgewood', 'Chajul', 'Norosi', 'Amatitlan', 'Williamstown', 'Uliastay', 'Lorsch', 'Tekkekara', 'Manisa', 'Piatra Neamt', 'Serta', 'Padarti', 'Agaro', 'Boa Vista do Tupim', 'Hariana', 'Kobayashi', 'Jessup', 'Chahe', 'Bournemouth', 'Anajatuba', 'Tugatog', 'Vavur', 'Ksar el Boukhari', 'Masku', 'Sungandiancun', 'Candoni', 'Mori', 'Saint-Omer', 'Alhaurin el Grande', 'Atasu', 'Rodos', 'Bordj el Bahri', 'Iheddadene', 'Golden Hills', 'Vista', 'Cabrican', 'Guara', 'Sendarappatti', 'Djouab', 'Altea', 'Omagari', 'Firou', 'Guinguineo', 'Poytug`', 'Chennamangalam', 'Ramantali', 'Penonome', 'Tindivanam', 'New River', 'Pedda Tumbalam', 'Hendersonville', 'East Ridge', 'The Colony', 'Velizy-Villacoublay', 'Koheda', 'Ouaregou', 'Sint-Truiden', 'Zonnebeke', 'Cartersville', 'Serro', 'Smorumnedre', 'Sebt Bni Garfett', 'Budhlada', 'Malmesbury', 'Kranj', 'Union de San Antonio', 'Batu Pahat', 'Lian', 'Baldeogarh', 'Katsushika-ku', 'Kiho', 'Decines-Charpieu', 'Jangy-Nookat', 'Mudgal', 'Pocoes', 'Nanping', 'Mota', 'Maoussa', 'Wolnzach', 'Deysbrook', 'Bad Essen', 'Edogawa', 'Cromer', 'Brejetuba', 'Wasilkow', 'Beni Rached', 'Balch Springs', 'Folsom', 'Nakonde', 'Priboj', 'Sigma', 'Monywa', 'Raismes', 'Patut', 'Kangar', 'Kostrzyn', 'Settsu', 'Nandongcun', 'Moosburg', 'Vohitrandriana', 'Sidi Bou Othmane', 'Had Sahary', 'Prescott Valley', 'Burghausen', 'Meco', 'Bourbonnais', 'San Maurizio Canavese', 'Tequixquiac', 'Wanyuan', 'Hungund', 'Huambo', 'Sopelana', 'Madaoua', 'Mottola', 'North Valley Stream', 'Schaafheim', 'Mataas Na Kahoy', 'Maracay', 'Deh Bakri', 'Turkistan', 'Spiesen-Elversberg', 'Lansdale', 'Sinha', 'Haddonfield', 'Mechanicstown', 'San Vicente del Caguan', 'Middleburg Heights', 'New Lenox', 'Mehlville', 'Ambalakirajy', 'Tak', 'Willemstad', 'Yato', 'Carbondale', 'Wakiso', 'Farsala', 'Gudarah', 'Dorchester', 'Ban Pang Mu', 'Mosgiel', 'Grafing bei Munchen', 'Domkonda', 'Challans', 'Sulmona', 'Ayirurpara', 'Kaman', 'Kumaramangalam', 'Jiaojiazhuang', 'Kelsterbach', 'Saratoga', 'Haledon', 'Apodi', 'Kuchinda', 'Slidell', 'Miamisburg', 'Ponmana', 'Icatu', 'Oliva', 'Cliza', 'Ellenabad', 'Pikit', 'Fengning', 'Erbaa', 'Koraput', 'Bhiloda', 'Thenia', 'Chinoz', 'Zaysan', 'Sarmastpur', 'Lerici', 'Wachtberg', 'Kunjatturu', 'Mzimba', 'Oficina Maria Elena', 'Paco do Lumiar', 'Swansea', 'Showt', 'Batroun', 'El Hadjar', 'Childwall', 'Xekong', 'Anta', 'Khem Karan Saray', 'Panruti', 'Yatton', 'Avalpundurai', 'Kudayattur', 'Al Miqdadiyah', 'Humanes de Madrid', 'Tako', 'As Sa`diyah', 'Santana do Livramento', 'Terrebonne', 'Caninde de Sao Francisco', 'Dombachcheri', 'Podlehnik', 'Andergrove', 'Mima', 'San Vendemiano', 'Kindi', 'Mengdingjie', 'Xindi', 'Kembhavi', 'Verden', 'Tarboro', 'Manga', 'Santa Rosa Jauregui', 'Eiras', 'Sapporo', 'Puerto Octay', 'Bothell West', 'Almolonga', 'Asperg', 'Nanchong', 'Honnali', 'Orange City', 'Ommangi', 'Marilia', 'Tobyl', 'Coatesville', 'Laualagaon', 'Chinacota', 'Breuillet', 'Sinnamary', 'Sun Lakes', 'Tandur', 'La Uruca', 'Ann Arbor', 'Boumerdes', 'Cizre', 'Bicheno', 'Ebersberg', 'Diangoute Kamara', 'Havanur', 'Barra Mansa', 'Santa Margherita Ligure', 'Bruchhausen-Vilsen', 'Daet', 'Dumai', 'Lanta', 'Cahama', 'Rajgadh', 'Kapurthala', "Sidi Smai'il", 'Bir el Ater', 'Yesilkoy', 'Yanshuiguan', 'Arantangi', 'Gaza', 'Alajarvi', 'Kondalahalli', 'Salmanshahr', 'Campo Maior', 'Lago Vista', 'Savignano sul Panaro', 'Nong Kung Si', 'Barnia', 'Puttige', 'Bracebridge', 'Hipparga', 'Bramsche', 'Yangi Mirishkor', 'Kimyogarlar', 'Khemis el Khechna', 'Maizieres-les-Metz', 'Beyneu', 'Eden Isle', 'South Burlington', 'Ramnagar Farsahi', 'Vught', 'Hexiwu', 'Bhatpalli', 'Kokoszki', 'Rocky Mount', 'Ano Syros', 'Lutayan', 'Ridgecrest', 'Ranpur', 'Ituango', 'Kapelle-op-den-Bos', 'Ladol', 'Ambalabe', 'Taramangalam', 'Kumanovo', 'Funyan Bira', "Itaporanga d'Ajuda", 'Samana', 'Nij Khari', 'Kafr Nabudah', 'Dombasle-sur-Meurthe', 'Jurbise', 'Ciudad del Carmen', 'Toon', 'Big Spring', 'Talpa de Allende', 'Hinton', 'Araouane', 'Morungaba', 'Stolac', 'Lobez', 'Toulon', 'San Cesareo', 'Three Lakes', 'Jacareacanga', 'Marne', 'Fredericia', 'Convencion', 'Tarquinia', 'Tepe-Korgon', 'El Ghiate', 'Tashkent', 'Kirchlengern', 'Roseau', 'Nahan', 'Givors', 'Ikom', 'Bumpe', 'Shirhatti', 'Reggiolo', 'San Blas', 'Cholet', 'Dhirwas', 'Bata', 'Taito', 'Jind', 'Sahil', 'La Fleche', 'Ardakan', 'Batangafo', 'Kaza', 'Bac', 'Rio Jimenez', 'Caycuma', 'Kippax', 'Denyekoro', 'Ganganagar', 'Mahalpur', 'Sarpavaram', 'Punarakh', 'Cessnock', 'San Fausto de Campcentellas', 'Scandicci', 'Valle Vista', 'Manaoag', 'Noe', 'Penha', 'Couvin', 'Bakun', 'Porto Uniao', 'Bolanos de Calatrava', 'Sun City West', 'Orzinuovi', 'Carrara', 'Tilhar', 'Prudentopolis', 'Ciudad Real', 'Ermont', 'Sulop', 'Brive-la-Gaillarde', 'El Estor', 'Tirschenreuth', 'Foum el Anser', 'Ban Pong Yaeng Nai', 'Lissegazoun', 'Gucheng', 'Louth', 'Minamiise', 'Janow Lubelski', 'Zacualpan', 'Bobrynets', 'Bel Imour', 'El Valle del Espiritu Santo', 'Vodiriana', 'Ad Dujayl', 'Puerto La Cruz', 'Cadiz', 'Basse Santa Su', 'Longuenesse', 'Kalamula', 'Ponca City', 'Lebowakgomo', 'Chittandikavundanur', 'Villarrobledo', 'Settivaripalle', 'Maksi', 'Whitnash', 'Parintins', 'Mykolaiv', 'Somavarappatti', 'Az Zaydiyah', 'Chop', 'Pingcha', 'Gojo', 'Anicuns', "Vaprio d'Adda", 'Ailan Mubage', 'Gernsheim', 'Ozamiz City', 'Bamendjou', 'Mantingan', 'Centre Wellington', 'Somolu', 'Ijevan', 'Gurun', 'Sonhauli', 'Hoshoot', 'Lippstadt', 'Payshamba Shahri', 'Pirapora', 'Mariyadau', 'Touba', 'Cavallino', 'Broomall', 'Saint-Paul-Trois-Chateaux', 'Pirangut', 'Monte San Savino', 'Addis Ababa', 'Wenzhou', 'Hongshui', 'Bamora', 'Dadeldhura', 'Barrhead', 'Qizilhacili', 'Cenon', 'Yany Kapu', 'Lauderdale Lakes', 'Franklin Farm', 'Licheng', 'Lacchiarella', 'Oshwe', 'Blantyre', 'Birpur', 'Bucha', 'San Manuel', 'Laguna Beach', 'Donna', 'Clute', 'Achchampeta', 'Rajapalaiyam', 'Koszeg', 'Venancio Aires', 'Romang', 'Four Corners', 'Sturgeon Bay', 'Tuscaloosa', 'Goianira', 'Chattamangalam', 'Tadotsu', 'Sindhnur', 'Ouda-daito', 'Harmah', 'Nordhausen', 'Had Laaounate', 'Dolny Kubin', 'Penarth', 'Androka', 'Cajibio', 'Molesey', 'Zhanjia', 'Abuzar-e Ghaffari', 'Sayaxche', 'Austin', 'Pipalrawan', 'Chillan', 'Baena', 'Langenberg', 'Mykolaivka', 'Ferentino', 'Biddulph', 'Harnosand', 'Koekelberg', 'Little Hulton', 'Al Qitena', 'Lewisville', 'Turaiyur', 'Chakkuvarakal', 'Tres Rios', 'Banabuiu', 'Muzaffargarh', 'Erragondapalem', 'Ghambiraopet', 'Okara', 'Ilioupoli', 'Paddhari', 'Nariar', 'Longjing', "M'Sila", 'Bongor', 'Viterbo', 'Vlasenica', 'Montalvania', 'Parali', 'Spa', 'Westford', 'Muttam', 'Raamsdonksveer', 'Garoua Boulai', 'Batarasa', 'Vasylkivka', 'Estevan', 'Luanda', 'MacArthur', 'Zulpich', 'Phulbani', 'Ifarantsa', 'Umbita', 'Korogwe', 'Ilhabela', 'Baronissi', 'Kalyvia Thorikou', 'Lahfayr', 'Ba Ria', 'Bronderslev', 'Kaynasli', 'Usmate Velate', 'Hirosaki', 'Quedgeley', 'Zawyat ech Cheikh', 'Soledad Atzompa', 'Ambohitromby', 'Joao Teves', 'Siddipet', 'Bitkine', 'Bellshill', 'Feke', 'Grass Valley', 'Ashton in Makerfield', 'Punjai Puliyampatti', 'Lobito', 'Bernalda', 'Loudonville', 'Asbury Lake', 'Gyoda', 'Artik', 'Rio Rancho', 'Roissy-en-Brie', 'Garches', 'Chemini', ]; export const maxStringLength = 49; ================================================ FILE: drizzle-seed/src/datasets/companyNameSuffixes.ts ================================================ export default [ 'LLC', 'Ltd', 'Inc.', 'Corp.', 'PLC', 'GmbH', 'AG', 'S.A.', 'S.p.A.', 'SARL', 'B.V.', 'N.V.', 'Oy', 'AB', 'AS', 'Pty Ltd', 'K.K.', 'JSC', 'Ltda.', 'Pvt Ltd', 'Sdn Bhd', 'A/S', 'SAOG', 'Co.', 'SCC', ]; export const maxStringLength = 7; ================================================ FILE: drizzle-seed/src/datasets/countries.ts ================================================ /** * The original source for countries data was taken from https://www.kaggle.com/datasets/manusmitajha/countrydatacsv * We've excluded a few countries and their cities from this list because we don't think they should ever appear in any list */ export default [ 'Afghanistan', 'Albania', 'Algeria', 'Angola', 'Antigua and Barbuda', 'Argentina', 'Armenia', 'Australia', 'Austria', 'Azerbaijan', 'Bahamas', 'Bahrain', 'Bangladesh', 'Barbados', 'Belgium', 'Belize', 'Benin', 'Bhutan', 'Bolivia', 'Bosnia and Herzegovina', 'Botswana', 'Brazil', 'Brunei', 'Bulgaria', 'Burkina Faso', 'Burundi', 'Cambodia', 'Cameroon', 'Canada', 'Cape Verde', 'Central African Republic', 'Chad', 'Chile', 'China', 'Colombia', 'Comoros', 'Congo, Dem. Rep.', 'Congo, Rep.', 'Costa Rica', "Cote d'Ivoire", 'Croatia', 'Cyprus', 'Czech Republic', 'Denmark', 'Dominican Republic', 'Ecuador', 'Egypt', 'El Salvador', 'Equatorial Guinea', 'Eritrea', 'Estonia', 'Fiji', 'Finland', 'France', 'Gabon', 'Gambia', 'Georgia', 'Germany', 'Ghana', 'Greece', 'Grenada', 'Guatemala', 'Guinea', 'Guinea-Bissau', 'Guyana', 'Haiti', 'Hungary', 'Iceland', 'India', 'Indonesia', 'Iran', 'Iraq', 'Ireland', 'Israel', 'Italy', 'Jamaica', 'Japan', 'Jordan', 'Kazakhstan', 'Kenya', 'Kiribati', 'Kuwait', 'Kyrgyz Republic', 'Lao', 'Latvia', 'Lebanon', 'Lesotho', 'Liberia', 'Libya', 'Lithuania', 'Luxembourg', 'Macedonia, FYR', 'Madagascar', 'Malawi', 'Malaysia', 'Maldives', 'Mali', 'Malta', 'Mauritania', 'Mauritius', 'Micronesia, Fed. Sts.', 'Moldova', 'Mongolia', 'Montenegro', 'Morocco', 'Mozambique', 'Myanmar', 'Namibia', 'Nepal', 'Netherlands', 'New Zealand', 'Niger', 'Nigeria', 'Norway', 'Oman', 'Pakistan', 'Panama', 'Paraguay', 'Peru', 'Philippines', 'Poland', 'Portugal', 'Qatar', 'Romania', 'Rwanda', 'Samoa', 'Saudi Arabia', 'Senegal', 'Serbia', 'Seychelles', 'Sierra Leone', 'Singapore', 'Slovak Republic', 'Slovenia', 'Solomon Islands', 'South Africa', 'South Korea', 'Spain', 'Sri Lanka', 'St. Vincent and the Grenadines', 'Sudan', 'Suriname', 'Sweden', 'Switzerland', 'Tajikistan', 'Tanzania', 'Thailand', 'Timor-Leste', 'Togo', 'Tonga', 'Tunisia', 'Turkey', 'Turkmenistan', 'Uganda', 'Ukraine', 'United Arab Emirates', 'United Kingdom', 'United States', 'Uruguay', 'Uzbekistan', 'Vanuatu', 'Venezuela', 'Vietnam', 'Yemen', 'Zambia', ]; export const maxStringLength = 30; ================================================ FILE: drizzle-seed/src/datasets/emailDomains.ts ================================================ export default [ 'gmail.com', 'yahoo.com', 'outlook.com', 'msn.com', 'hotmail.com', 'aol.com', 'hotmail.co.uk', 'hotmail.fr', 'yahoo.fr', 'wanadoo.fr', 'orange.fr', 'comcast.net', 'yahoo.co.uk', 'yahoo.com.br', 'yahoo.co.in', 'live.com', 'rediffmail.com', 'free.fr', 'gmx.de', 'web.de', 'ymail.com', 'libero.it', ]; export const maxStringLength = 14; ================================================ FILE: drizzle-seed/src/datasets/firstNames.ts ================================================ /** * The original source for first names data was taken from https://www.kaggle.com/datasets/kaggle/us-baby-names?select=StateNames.csv */ export default [ 'Robert', 'John', 'Michael', 'David', 'James', 'Richard', 'Christopher', 'William', 'Daniel', 'Mark', 'Thomas', 'Jose', 'Joseph', 'Matthew', 'Jason', 'Andrew', 'Joshua', 'Steven', 'Anthony', 'Jonathan', 'Angel', 'Ryan', 'Kevin', 'Jacob', 'Nicholas', 'Brandon', 'Justin', 'Charles', 'Gary', 'Paul', 'Scott', 'George', 'Christian', 'Eric', 'Brian', 'Alexander', 'Ronald', 'Jayden', 'Juan', 'Edward', 'Noah', 'Diego', 'Donald', 'Ethan', 'Kyle', 'Peter', 'Jeffrey', 'Luis', 'Timothy', 'Nathan', 'Tyler', 'Frank', 'Stephen', 'Dennis', 'Larry', 'Jesus', 'Kenneth', 'Austin', 'Adrian', 'Adam', 'Sebastian', 'Gregory', 'Carlos', 'Aiden', 'Gabriel', 'Isaac', 'Zachary', 'Julian', 'Benjamin', 'Liam', 'Billy', 'Miguel', 'Mason', 'Aaron', 'Mike', 'Dylan', 'Sean', 'Alejandro', 'Bryan', 'Jordan', 'Cody', 'Jeremy', 'Samuel', 'Harry', 'Victor', 'Joe', 'Eduardo', 'Isaiah', 'Jorge', 'Logan', 'Elijah', 'Bruce', 'Patrick', 'Jerry', 'Jesse', 'Lawrence', 'Steve', 'Walter', 'Harold', 'Arthur', 'Lucas', 'Francisco', 'Douglas', 'Oscar', 'Craig', 'Alexis', 'Todd', 'Randy', 'Alan', 'Raymond', 'Damian', 'Willie', 'Albert', 'Ricardo', 'Louis', 'Luke', 'Edgar', 'Travis', 'Evan', 'Ricky', 'Aidan', 'Jack', 'Jeff', 'Jimmy', 'Manuel', 'Oliver', 'Mateo', 'Johnny', 'Henry', 'Cristian', 'Terry', 'Dominic', 'Cameron', 'Gerald', 'Caleb', 'Christop', 'Bobby', 'Alex', 'Gavin', 'Shawn', 'Jackson', 'Ivan', 'Keith', 'Antonio', 'Vincent', 'Philip', 'Chad', 'Alfred', 'Eugene', 'Erik', 'Martin', 'Omar', 'Chris', 'Stanley', 'Sergio', 'Francis', 'Mario', 'Fernando', 'Taylor', 'Herbert', 'Santiago', 'Nathaniel', 'Cesar', 'Barry', 'Trevor', 'Dustin', 'Hunter', 'Roger', 'Andres', 'Javier', 'Bernard', 'Jim', 'Ian', 'Wayne', 'Leonardo', 'Giovanni', 'Josiah', 'Jeremiah', 'Glenn', 'Hector', 'Roberto', 'Rodney', 'Howard', 'Eli', 'Xavier', 'Jaxon', 'Levi', 'Derek', 'Danny', 'Jared', 'Landon', 'Ralph', 'Ruben', 'Gael', 'Connor', 'Tommy', 'Tony', 'Marc', 'Wyatt', 'Rick', 'Carter', 'Ayden', 'Tim', 'Roy', 'Owen', 'Greg', 'Joel', 'Leonard', 'Frederick', 'Russell', 'Jon', 'Jaden', 'Jeffery', 'Irving', 'Erick', 'Darren', 'Dale', 'Carl', 'Brayden', 'Ronnie', 'Gerardo', 'Pedro', 'Raul', 'Elias', 'Chase', 'Alberto', 'Troy', 'Tom', 'Axel', 'Julio', 'Emmanuel', 'Edwin', 'Norman', 'Marcus', 'Fred', 'Bill', 'Jake', 'Marco', 'Leo', 'Rafael', 'Armando', 'Jace', 'Garrett', 'Jaime', 'Earl', 'Shane', 'Cole', 'Phillip', 'Seth', 'Corey', 'Nicolas', 'Randall', 'Hayden', 'Abraham', 'Grayson', 'Tristan', 'Cory', 'Josue', 'Andy', 'Warren', 'Roman', 'Devin', 'Salvador', 'Shaun', 'Spencer', 'Infant', 'Ryder', 'Dillon', 'Max', 'Salvatore', 'Bradley', 'Seymour', 'Arturo', 'Iker', 'Dean', 'Milton', 'Sidney', 'Gustavo', 'Alfredo', 'Blake', 'Clarence', 'Brody', 'Enrique', 'Brett', 'Colton', 'Dan', 'Brendan', 'Charlie', 'Darrell', 'Hudson', 'Ezra', 'Emiliano', 'Ashton', 'Darryl', 'Dave', 'Nolan', 'Theodore', 'Casey', 'Colin', 'Easton', 'Caden', 'Marcos', 'Cooper', 'Mitchell', 'Morris', 'Don', 'Eddie', 'Jay', 'Marvin', 'Kaden', 'Curtis', 'Lance', 'Gerard', 'Israel', 'Ramon', 'Rickey', 'Alec', 'Carson', 'Ernesto', 'Riley', 'Kai', 'Ezekiel', 'Yahir', 'Dakota', 'Ron', 'Bob', 'Saul', 'Kayden', 'Adan', 'Fabian', 'Maxwell', 'Allen', 'Micheal', 'Parker', 'Micah', 'Miles', 'Gilbert', 'Grant', 'Malik', 'Abel', 'Darrin', 'Johnathan', 'Jase', 'Kaleb', 'Ray', 'Jaxson', 'Brent', 'Wesley', 'Tanner', 'Chester', 'Bryce', 'Lincoln', 'Preston', 'Maximiliano', 'Jerome', 'Sam', 'Ernest', 'Bentley', 'Colby', 'Elmer', 'Moises', 'Joaquin', 'Arnold', 'Stuart', 'Murray', 'Asher', 'Andre', 'Neil', 'Allan', 'Brady', 'Brad', 'Maximus', 'Dalton', 'Jonah', 'Kim', 'Kirk', 'Bryson', 'Kurt', 'Angelo', 'Rene', 'Jimmie', 'Emilio', 'Damien', 'Harvey', 'Moshe', 'Derrick', 'Kelly', 'Franklin', 'Rodrigo', 'Woodrow', 'Leon', 'Esteban', 'Hugo', 'Clayton', 'Guadalupe', 'Darin', 'Pablo', 'Luca', 'Ken', 'Ismael', 'Leroy', 'Guillermo', 'Tracy', 'Melvin', 'Lorenzo', 'Clifford', 'Hugh', 'Mathew', 'Jameson', 'Billie', 'Nelson', 'Herman', 'Ira', 'Jamie', 'Alexande', 'Lester', 'Glen', 'Damon', 'Emanuel', 'Maverick', 'Braxton', 'Zayden', 'Dominick', 'Irwin', 'Rudy', 'Calvin', 'Julius', 'Jermaine', 'Jakob', 'Donovan', 'Lee', 'Shaquille', 'Gordon', 'Peyton', 'Duane', 'Declan', 'Jalen', 'Jude', 'Tyrone', 'Bret', 'Gene', 'Felix', 'Guy', 'Devon', 'Cruz', 'Rylan', 'Clinton', 'Jonathon', 'Kaiden', 'Kingston', 'Kristopher', 'Felipe', 'Collin', 'Alfonso', 'Rodolfo', 'King', 'Everett', 'Chance', 'Johnnie', 'Clyde', 'Weston', 'Karl', 'Camden', 'Maddox', 'Bryant', 'Gage', 'Dwayne', 'Shannon', 'Gilberto', 'Braden', 'Lewis', 'Greyson', 'Rudolph', 'Floyd', 'Jayce', 'Harrison', 'Brayan', 'Cayden', 'Reginald', 'Jaiden', 'Brantley', 'Hyman', 'Perry', 'Kent', 'Alvin', 'Cade', 'Doug', 'Romeo', 'Jax', 'Silas', 'Ty', 'Emmett', 'Jackie', 'Leslie', 'Vernon', 'Jessie', 'Lloyd', 'Cecil', 'Roland', 'Ted', 'Amir', 'Cash', 'Gregg', 'Uriel', 'Donnie', 'Noel', 'Mauricio', 'Dana', 'Osvaldo', 'Sawyer', 'Rogelio', 'Terrence', 'Conner', 'Darius', 'Chaim', 'Maurice', 'Male', 'Malachi', 'Issac', 'Ramiro', 'Zane', 'Jaylen', 'Dawson', 'Willard', 'Randolph', 'Wilbur', 'Noe', 'Huey', 'Sammy', 'Lonnie', 'Morton', 'Chandler', 'Elliot', 'Geoffrey', 'Robin', 'Muhammad', 'Wallace', 'Matt', 'Drew', 'Bailey', 'Orlando', 'Jasper', 'Tyrese', 'Matteo', 'Leonel', 'Simon', 'Braylon', 'Corbin', 'Jayceon', 'Gunner', 'Dante', 'Daryl', 'Bennett', 'Ulises', 'Efrain', 'Drake', 'Rolando', 'Lukas', 'Arian', 'Trenton', 'Humberto', 'Ryker', 'Aldo', 'Landen', 'Xander', 'Dwight', 'Alvaro', 'Sheldon', 'Freddie', 'Vicente', 'Avery', 'Marty', 'Irvin', 'Ariel', 'Lane', 'Nestor', 'Chuck', 'Dominique', 'Baby', 'Kerry', 'Enzo', 'Nick', 'Yosef', 'Edmund', 'Oswaldo', 'Kobe', 'Aden', 'Clair', 'Freddy', 'Karter', 'Stacy', 'Byron', 'Roosevelt', 'Claude', 'Marion', 'Thiago', 'Colt', 'Sol', 'Lamont', 'Neal', 'August', 'Cason', 'Kason', 'Reynaldo', 'Malcolm', 'Beau', 'Ignacio', 'Kareem', 'Laurence', 'Finn', 'Domingo', 'Rigoberto', 'Solomon', 'Aaden', 'Case', 'Horace', 'Griffin', 'Rocco', 'Pete', 'Ross', 'Skyler', 'Kenny', 'Tucker', 'Morgan', 'Forrest', 'Timmy', 'Clint', 'Garry', 'Elwood', 'Knox', 'Elian', 'Zion', 'Trey', 'Vito', 'Jamel', 'Junior', 'Roderick', 'Brooks', 'Isidore', 'Kelvin', 'Ali', 'Octavio', 'Luther', 'Milo', 'Jett', 'Unknown', 'Milan', 'Nickolas', 'German', 'Terence', 'Virgil', 'Conor', 'Isaias', 'Cristopher', 'Jayson', 'Brenden', 'Joey', 'Tevin', 'Branden', 'Arjun', 'Carmine', 'Wendell', 'Judah', 'Nikolas', 'Izaiah', 'Dick', 'Jairo', 'Giovani', 'Ervin', 'Graham', 'Trent', 'Tyson', 'Cedric', 'Elliott', 'Myles', 'Kameron', 'Jaylon', 'Hubert', 'Grady', 'Homer', 'Quinn', 'Payton', 'Bennie', 'River', 'Dexter', 'Emil', 'Jamal', 'Orion', 'Alonzo', 'Paxton', 'Ashley', 'Desmond', 'Waylon', 'Patsy', 'Agustin', 'Shimon', 'Jarrod', 'Rex', 'Pat', 'Rhett', 'Benny', 'Adriel', 'Moses', 'Daquan', 'Johan', 'Adolfo', 'Otis', 'Kadeem', 'Jody', 'Wilson', 'Pasquale', 'Kendrick', 'Alonso', 'Ben', 'Ezequiel', 'Jair', 'Tomas', 'Zackary', 'Dane', 'Nasir', 'Alton', 'Tristen', 'Wilfredo', 'Lyle', 'Rowan', 'Deandre', 'Mordechai', 'Mohamed', 'Khalil', 'Maximilian', 'Devante', 'Wade', 'Norbert', 'Yehuda', 'Dallas', 'Menachem', 'Anderson', 'Jonas', 'Zachery', 'Zaiden', 'Giovanny', 'Clifton', 'Tommie', 'Ronaldo', 'Major', 'Barrett', 'Darnell', 'Keegan', 'Randal', 'Aarav', 'Burton', 'Terrance', 'Reid', 'Fredrick', 'Bobbie', 'Ace', 'Kyler', 'Yoel', 'Earnest', 'Toby', 'Merle', 'Archer', 'Santos', 'Nico', 'Beckett', 'Yisroel', 'Nehemiah', 'Lynn', 'Holden', 'Matias', 'Rufus', 'Mohammed', 'Hayes', 'Marshall', 'Trinidad', 'Valentin', 'Heath', 'Weldon', 'Ed', 'Lionel', 'Jaret', 'Aron', 'Bernardo', 'Zander', 'Devonte', 'Meyer', 'Ulysses', 'Myron', 'Lowell', 'Linwood', 'Rocky', 'Phoenix', 'Antoine', 'Cyrus', 'Demarcus', 'Bruno', 'Titus', 'Madison', 'Jarod', 'Caiden', 'Kash', 'Jarvis', 'Clay', 'Notnamed', 'Doyle', 'Dallin', 'Atticus', 'Orville', 'Nixon', 'Loren', 'Wilbert', 'Karson', 'Brennan', 'Brittany', 'Marlon', 'Gonzalo', 'Carlton', 'Cary', 'Marquis', 'Amari', 'Rohan', 'Terrell', 'Gianni', 'Johnathon', 'Jan', 'Boston', 'Ibrahim', 'Yitzchok', 'Jean', 'Camron', 'Ronny', 'Porter', 'Adonis', 'Alessandro', 'Stefan', 'Giancarlo', 'Clark', 'Lupe', 'Edgardo', 'Scotty', 'Messiah', 'Benito', 'Zachariah', 'Kristian', 'Bodhi', 'Ronan', 'Emerson', 'Wilfred', 'Heriberto', 'Davis', 'Stewart', 'Efren', 'Brock', 'Christophe', 'Sammie', 'Kade', 'Denis', 'Ernie', 'Kayson', 'Quincy', 'Abe', 'Estevan', 'Jamari', 'Mohammad', 'Kendall', 'Demetrius', 'Walker', 'Shlomo', 'Louie', 'Kody', 'Valentino', 'Jaheim', 'Vince', 'Frankie', 'Aubrey', 'Quinton', 'Royce', 'Ari', 'Abram', 'Jessica', 'Curt', 'Bart', 'Daren', 'Braylen', 'Alexandro', 'Lamar', 'Kasen', 'Willis', 'Vihaan', 'Delbert', 'Triston', 'Yakov', 'Courtney', 'Niko', 'Pierre', 'Jaquan', 'Braulio', 'Santino', 'Quentin', 'Dario', 'Dusty', 'Neymar', 'Bridger', 'Tyrell', 'Bertram', 'Raymundo', 'Isiah', 'Reed', 'Archie', 'Prince', 'Rory', 'Davon', 'Stacey', 'Bradford', 'Nikolai', 'Kian', 'Kase', 'Casen', 'Dion', 'Isai', 'Armand', 'Percy', 'Emily', 'Leland', 'Sterling', 'Yandel', 'Olin', 'Sanford', 'Marlin', 'Denzel', 'Mekhi', 'Elbert', 'Braydon', 'Dewey', 'Dudley', 'Shmuel', 'Jadon', 'Braeden', 'Yair', 'Rob', 'Mickey', 'Monty', 'Hannah', 'Luciano', 'Remington', 'Akeem', 'Julien', 'Carmen', 'Jensen', 'Johnie', 'Mack', 'Rickie', 'Javon', 'Misael', 'Elvis', 'Eden', 'Jess', 'Phil', 'Malakai', 'Melvyn', 'Rod', 'Arnulfo', 'Cohen', 'Fidel', 'Levar', 'Dominik', 'Grover', 'Yaakov', 'Landyn', 'Colten', 'Dorian', 'Keaton', 'Loyd', 'Brodie', 'Otto', 'Eliezer', 'Ahmed', 'Shelby', 'Hernan', 'Odin', 'Regis', 'Jaydon', 'Uriah', 'Remy', 'Tariq', 'Sonny', 'Carroll', 'Xavi', 'Christia', 'Marcel', 'Brendon', 'Kellan', 'Bowen', 'Unnamed', 'Scottie', 'Justice', 'Kurtis', 'Stephan', 'Daxton', 'Coby', 'Jadiel', 'Dashawn', 'Amare', 'Cannon', 'Blaine', 'Tate', 'Talmadge', 'Nathanael', 'Adolph', 'Talan', 'Tobias', 'Sylvester', 'Tadeo', 'Darrel', 'Winston', 'Garland', 'Meir', 'Kory', 'Joseluis', 'Wilburn', 'Rusty', 'Avraham', 'Ayaan', 'Theo', 'Mathias', 'Marcelo', 'Dino', 'Kolby', 'Cael', 'Tzvi', 'Davion', 'Aryan', 'Rhys', 'Cain', 'Duke', 'Pierce', 'Landry', 'Tristin', 'Emma', 'Zackery', 'Antone', 'Rayan', 'Hendrix', 'Lucca', 'Luka', 'Jarrett', 'Miguelangel', 'Rodger', 'Kevon', 'Jacoby', 'Damion', 'Maximo', 'Robbie', 'Jovanny', 'Trace', 'Gunnar', 'Kieran', 'Cristobal', 'Kris', 'Ellis', 'Matthias', 'Eloy', 'Sarah', 'Donny', 'Donte', 'Ronin', 'Reece', 'Alijah', 'Zayne', 'Jamarion', 'Laverne', 'Gregorio', 'Kellen', 'Nathen', 'Gideon', 'Rosario', 'Erwin', 'Jakub', 'Normand', 'Rey', 'Trevon', 'Stetson', 'Carmelo', 'Rashad', 'Tod', 'Elizabeth', 'Harley', 'Darian', 'Scot', 'Tavon', 'Keven', 'Merlin', 'Nash', 'Deangelo', 'Raiden', 'Jahir', 'Isidro', 'Davian', 'Raekwon', 'Alphonse', 'Reese', 'Abigail', 'Deshawn', 'Ahmad', 'Conrad', 'Truman', 'Kolton', 'Ryland', 'Jamaal', 'Abdiel', 'Aditya', 'Keenan', 'Brycen', 'Thaddeus', 'Austen', 'Leonidas', 'Raphael', 'Jovani', 'Brenton', 'Jasmine', 'Thurman', 'Russel', 'Emory', 'Cornelius', 'Roel', 'Xzavier', 'Jovanni', 'Zev', 'Eldon', 'Deven', 'Kamden', 'Eliseo', 'Franco', 'Duncan', 'Anton', 'Amarion', 'Deron', 'Sage', 'Babyboy', 'Fredy', 'Russ', 'Omarion', 'Ryne', 'Jovany', 'Camilo', 'Stan', 'Cullen', 'Armani', 'Adrien', 'Royal', 'Kane', 'Ishaan', 'Spenser', 'Antwan', 'Stephon', 'Juanpablo', 'Tiffany', 'Garret', 'Jagger', 'Will', 'Nigel', 'Chadwick', 'Casimir', 'Abdirahman', 'Odell', 'Keanu', 'Josh', 'Mortimer', 'Raheem', 'Jordon', 'Nery', 'Monte', 'Jaxton', 'Deacon', 'Reuben', 'Carlo', 'Skylar', 'Jamarcus', 'Robby', 'Jaycob', 'Kristofer', 'Buddy', 'Korbin', 'Arlo', 'Jennifer', 'Rodrick', 'Juwan', 'Latrell', 'Chaz', 'Lawson', 'Mendel', 'Jordy', 'Dirk', 'Finnegan', 'Eason', 'Atlas', 'Eddy', 'Mitch', 'Reagan', 'Clement', 'Jamar', 'Kamari', 'Jarred', 'Lauren', 'Roscoe', 'Jefferson', 'Devan', 'Elton', 'Cortez', 'Leandro', 'Finley', 'Cordero', 'Dov', 'Eliyahu', 'Princeton', 'Avrohom', 'Hassan', 'Dangelo', 'Shamar', 'Gino', 'Yusuf', 'Jaylin', 'Martez', 'Shad', 'Keyshawn', 'Nikhil', 'Yael', 'Harlan', 'Jeffry', 'Frederic', 'Derick', 'Dondre', 'Vance', 'Markus', 'London', 'Arman', 'Marley', 'Van', 'Jaeden', 'Krish', 'Benson', 'Marquise', 'Cristofer', 'Dewayne', 'Gannon', 'Genaro', 'Crew', 'Rashawn', 'Rayden', 'Raylan', 'Jaxen', 'Fredric', 'Beckham', 'Tripp', 'Mckay', 'Deonte', 'Johann', 'Johnpaul', 'Santo', 'Hakeem', 'Federico', 'Bert', 'Flynn', 'Edison', 'Enoch', 'Shulem', 'Jovan', 'Art', 'Isadore', 'Hal', 'Cristiano', 'Urijah', 'Dilan', 'Benicio', 'Kingsley', 'Aydan', 'Syed', 'Nicole', 'Rachel', 'Tyree', 'Maximillian', 'Branson', 'Davin', 'Layton', 'Joan', 'Darien', 'Deion', 'Augustus', 'Dariel', 'Oziel', 'Juancarlos', 'Pranav', 'Danielle', 'Rubin', 'Jerald', 'Wilmer', 'Deegan', 'Teddy', 'Mariano', 'Nathanie', 'Stevie', 'Dorsey', 'Maxim', 'Jaron', 'Coty', 'Damarion', 'Semaj', 'Maria', 'Jamison', 'Domenick', 'Emile', 'Armaan', 'Arnav', 'Mackenzie', 'Everardo', 'Aurelio', 'Cayson', 'Edwardo', 'Charley', 'Geovanni', 'Vincenzo', 'Yadiel', 'Francesco', 'Koby', 'Joziah', 'Jasiah', 'Makai', 'Long', 'Cassius', 'Omari', 'Ferdinand', 'Samir', 'Cleveland', 'Olivia', 'Lanny', 'Sincere', 'Hyrum', 'Christina', 'Lucian', 'Margarito', 'Osiel', 'Kinsler', 'Sydney', 'Slade', 'Lazaro', 'Sal', 'Lipa', 'Hobert', 'Coy', 'Elroy', 'Tatum', 'Katherine', 'Chloe', 'Kyrie', 'Amanda', 'Buford', 'Kymani', 'Kacper', 'Elmo', 'Alphonso', 'Ramses', 'Homero', 'Sherman', 'Reinaldo', 'Yechiel', 'Jonatan', 'Mychal', 'Gustave', 'Paris', 'Zain', 'Markanthony', 'Dimitri', 'Mamadou', 'Apollo', 'Bronson', 'Hamza', 'Samson', 'Madden', 'Tylor', 'Jacquez', 'Garth', 'Giuseppe', 'Stephanie', 'Darion', 'Yurem', 'Antony', 'Rico', 'Rich', 'Dontavious', 'Erin', 'Kannon', 'Isaak', 'Dovid', 'Coleman', 'Monroe', 'Bryon', 'Asa', 'Patricio', 'Arnoldo', 'Alexandra', 'Jessy', 'Jules', 'Alexzander', 'Jerrod', 'Talon', 'Elvin', 'Chace', 'Amos', 'Galen', 'Kenji', 'Rahul', 'Delmar', 'Nakia', 'Abdullah', 'Deon', 'Brice', 'Osbaldo', 'Favian', 'Mauro', 'Tristian', 'Leopoldo', 'Hans', 'Hank', 'Tou', 'Demond', 'Jemal', 'Ladarius', 'Kylan', 'Braiden', 'Darwin', 'Kamron', 'Millard', 'Dax', 'Shaquan', 'Aloysius', 'Tyshawn', 'Westley', 'Marquez', 'Shayne', 'Kasey', 'Usher', 'Ares', 'Killian', 'Maynard', 'Jeshua', 'Vaughn', 'Shia', 'Naftali', 'Zaire', 'Taj', 'Edmond', 'Zechariah', 'Ollie', 'Hoyt', 'Donnell', 'Soren', 'Isac', 'Tyquan', 'Legend', 'Devyn', 'Shon', 'Gerry', 'Ellsworth', 'Naftuli', 'Johnson', 'Haywood', 'Aydin', 'Junius', 'Wiley', 'Lennox', 'Siddharth', 'Odis', 'Zaid', 'Lacy', 'Hussein', 'Nicklas', 'Callen', 'Izayah', 'Jaziel', 'Claud', 'Horacio', 'Cyril', 'Jariel', 'Shemar', 'Rebecca', 'Reyes', 'Denny', 'Dereck', 'Marcelino', 'Najee', 'Mac', 'Hollis', 'Korey', 'Addison', 'Jordi', 'Eleazar', 'Lisandro', 'Dayton', 'Ammon', 'Reymundo', 'Erich', 'Tenzin', 'Mitchel', 'Kristoffer', 'Jerrold', 'Kristoph', 'Refugio', 'Erasmo', 'Samantha', 'Simcha', 'Abdullahi', 'Booker', 'Quadir', 'Kyson', 'Hoover', 'Gus', 'Azael', 'Mervin', 'Yoshio', 'Jorje', 'Jesiah', 'Shirley', 'Brigham', 'Memphis', 'Reyansh', 'Flavio', 'Lavern', 'Rosendo', 'Dantrell', 'Devonta', 'Forest', 'Alden', 'Lyndon', 'Luiz', 'Elisha', 'Al', 'Bentlee', 'Eriberto', 'Marques', 'Alexandre', 'Fidencio', 'Jabari', 'Arsenio', 'Kaysen', 'Ethen', 'Cleo', 'Blaze', 'Aryeh', 'Dequan', 'Denver', 'Luc', 'Delmas', 'Javion', 'Gauge', 'Martell', 'Ever', 'Gavyn', 'Aldair', 'Okey', 'Carey', 'Geovanny', 'Kalel', 'Layne', 'Hiroshi', 'Ayan', 'Akiva', 'Clare', 'Sigmund', 'Furman', 'Claudio', 'Garrison', 'Draven', 'Aidyn', 'Vern', 'Andreas', 'Kwame', 'Imanol', 'Jorden', 'Glynn', 'Adalberto', 'Varun', 'Dashiell', 'Baron', 'Jasen', 'Child', 'Earle', 'Izaac', 'Vivaan', 'Koa', 'Lennon', 'Marcoantonio', 'Gaetano', 'Sumner', 'Barney', 'Demarion', 'Abner', 'Delonte', 'Val', 'Jacky', 'Demario', 'Zavier', 'Kale', 'Wilton', 'Jordyn', 'Tatsuo', 'Boyd', 'Zayn', 'Darron', 'Moe', 'Dillan', 'Naquan', 'Ned', 'Kaylee', 'Kelton', 'Sahil', 'Kermit', 'Abelardo', 'Sullivan', 'Crosby', 'Hagen', 'Tyreek', 'Jaquez', 'Andrea', 'Kyan', 'Jeremias', 'Tracey', 'Ward', 'Brixton', 'Seamus', 'Cedrick', 'Enrico', 'Emmitt', 'Ford', 'Travon', 'Felton', 'Blair', 'Rio', 'Dandre', 'Kaeden', 'Tiger', 'Orval', 'Castiel', 'Yousef', 'Anson', 'Callan', 'Jamey', 'Darrius', 'Tre', 'Michel', 'Mcarthur', 'Rasheed', 'Jamir', 'Herschel', 'Anibal', 'Kinnick', 'Hilario', 'Shea', 'Jencarlos', 'Darrick', 'Rishi', 'Shaya', 'Haden', 'Ean', 'Jaylan', 'Rolland', 'Leobardo', 'Fermin', 'Keon', 'Lucio', 'Keagan', 'Savion', 'Masao', 'Damari', 'Aarush', 'Nunzio', 'Anakin', 'Mayson', 'Westin', 'Norberto', 'Tavares', 'Gorge', 'Tavaris', 'Joesph', 'Sylas', 'Huy', 'Gerson', 'Augustine', 'Buster', 'Jelani', 'Haley', 'Filip', 'Shmiel', 'Lucius', 'Rojelio', 'Gale', 'Quintin', 'Channing', 'Brayton', 'Keshawn', 'Osmar', 'Otha', 'Eder', 'Mary', 'Eusebio', 'Matheus', 'Randell', 'Brennen', 'Trae', 'Paolo', 'Caesar', 'Estill', 'Camren', 'Dhruv', 'Cutter', 'Rayyan', 'Jeramiah', 'Anish', 'Donavan', 'Sunny', 'Hershel', 'Salvator', 'Jedidiah', 'Romario', 'Hershy', 'Anders', 'Trevion', 'Murphy', 'Kanye', 'Jionni', 'Bradyn', 'Cordell', 'Alek', 'Luisangel', 'Norris', 'Nevin', 'Jaleel', 'Lenny', 'Judson', 'Tayshaun', 'Aedan', 'Rhyder', 'Domenic', 'Santana', 'Rahsaan', 'Sebastien', 'Corban', 'Rowdy', 'Kiyoshi', 'Armen', 'Efraim', 'Vladimir', 'Callum', 'Abdul', 'Gianluca', 'Mayer', 'Mustafa', 'Demarco', 'Neyland', 'Vidal', 'Marshawn', 'Rudolfo', 'Nazir', 'Azariah', 'Shoji', 'Worth', 'Levern', 'Jai', 'Antione', 'Dickie', 'Yehoshua', 'Cliff', 'Kaison', 'Kye', 'Jaren', 'Emir', 'Henrik', 'Maxx', 'Kainoa', 'Athan', 'Cletus', 'Jasir', 'Dejon', 'Jadyn', 'Houston', 'Kadin', 'Erubiel', 'Hadi', 'Jaydin', 'Brianna', 'Alyssa', 'Marcello', 'Omer', 'Ikaika', 'Ramel', 'Arron', 'Bently', 'Daron', 'Avi', 'Jerod', 'Shelton', 'Winfred', 'Mendy', 'Ryu', 'Nikko', 'Arley', 'Kamdyn', 'Bo', 'Erica', 'Faustino', 'Fletcher', 'Dionte', 'Boyce', 'Kennedy', 'Reyli', 'Paulo', 'Baruch', 'Bernie', 'Mohamad', 'Kahlil', 'Kong', 'Baldemar', 'Murry', 'Rogers', 'Sandy', 'Bodie', 'Ivory', 'Youssef', 'Kee', 'Jahiem', 'Isabella', 'Keoni', 'Michelle', 'Luigi', 'Marcanthony', 'Jericho', 'Achilles', 'Everette', 'Americo', 'Edson', 'Hiram', 'Jeramy', 'Metro', 'Davi', 'Hezekiah', 'Harper', 'Kiel', 'Brandan', 'Said', 'Noam', 'Tarik', 'Raquan', 'Zeb', 'Broderick', 'Arath', 'Emery', 'Kip', 'Tymir', 'Garrick', 'Anfernee', 'Khalid', 'Jamil', 'Demian', 'Amador', 'Oran', 'Franklyn', 'Porfirio', 'Delano', 'Justyn', 'Aharon', 'Karol', 'Alva', 'Nicky', 'Zack', 'Jerimiah', 'Josef', 'Errol', 'Hideo', 'Tahj', 'Ilan', 'Kennith', 'Nathanial', 'Kyron', 'Merton', 'Danial', 'Tuan', 'Hung', 'Massimo', 'Krew', 'Arya', 'Jedediah', 'Nosson', 'Jakobe', 'Eitan', 'Edmundo', 'Olen', 'Benedict', 'Quintavious', 'Shalom', 'Akash', 'Maxton', 'Anna', 'Julia', 'Melissa', 'Victoria', 'Kekoa', 'Konner', 'Kirby', 'Heyward', 'Davonte', 'Magnus', 'Zeus', 'Neel', 'Franky', 'Isael', 'Gaylon', 'Kole', 'Axton', 'Brando', 'Mateusz', 'Lucien', 'Marquavious', 'Lon', 'Gian', 'Savannah', 'Trinity', 'Harris', 'Kamarion', 'Aydenn', 'Cale', 'Neo', 'Justus', 'Mose', 'Tiago', 'Saverio', 'Eligh', 'Mikel', 'Eliot', 'Alvis', 'Argenis', 'Musa', 'Lonny', 'Thad', 'Guido', 'Ceasar', 'Obed', 'Pinchas', 'Barton', 'Durell', 'Johnatha', 'Aric', 'Geovany', 'Fransisco', 'Jaheem', 'Jarett', 'Yeshua', 'Karim', 'Aayden', 'Merrill', 'Michele', 'Jaydan', 'Octavius', 'Jermiah', 'Alexavier', 'Brandyn', 'Arvid', 'Brentley', 'Sutton', 'Coen', 'Ameer', 'Giovany', 'Ishan', 'Blaise', 'Bayron', 'Kamil', 'Brooklyn', 'Catherine', 'Akira', 'Briggs', 'Damani', 'Rasheen', 'Rayford', 'Moishe', 'Ephraim', 'Esequiel', 'Kenyon', 'Constantine', 'Silvio', 'Brain', 'Daylon', 'Raymon', 'Ayush', 'Lazer', 'Telly', 'Elan', 'Stone', 'Marland', 'Donn', 'Shamel', 'Silvestre', 'Zephyr', 'Merrick', 'Fausto', 'Dedrick', 'Cornell', 'Whitney', 'Derrell', 'Mitsuo', 'Lucious', 'Tad', 'Lyric', 'Darrion', 'Dannie', 'Gayle', 'Burl', 'Jayquan', 'Carrol', 'Laquan', 'Tyrek', 'Natividad', 'Casimer', 'Jael', 'Aven', 'Arnaldo', 'Yovani', 'Laura', 'Dejuan', 'Dimitrios', 'Yash', 'Esai', 'Zavion', 'Ora', 'Durward', 'Bradly', 'Hazel', 'Che', 'Richie', 'Diana', 'Alois', 'Lynwood', 'Luverne', 'Zeke', 'Dash', 'Cairo', 'Delvin', 'Kawika', 'Josemanuel', 'Devean', 'Sameer', 'Seneca', 'Presley', 'Jed', 'Malaki', 'Dominque', 'Dontae', 'Dev', 'Darey', 'Reggie', 'Izaak', 'Manny', 'Jere', 'Minh', 'Ryden', 'Montana', 'Kaleo', 'Jacorey', 'Ignatius', 'Filiberto', 'Cache', 'Yitzchak', 'Yaseen', 'Kentrell', 'Basil', 'Ivy', 'Migel', 'Jalon', 'Lenwood', 'Ellwood', 'Zakary', 'Haiden', 'Dontrell', 'Braedon', 'Lorne', 'Trever', 'Mikael', 'Kenzo', 'Javaris', 'Ambrose', 'Alain', 'Columbus', 'Leif', 'Jerold', 'Anwar', 'Gabino', 'Dillion', 'Kelby', 'Denzil', 'Ulisses', 'Sami', 'Jahmir', 'Elimelech', 'Dock', 'Zahir', 'Hardy', 'Florian', 'Jewel', 'Tobin', 'Curley', 'Mahdi', 'Mccoy', 'Jaquavious', 'Justen', 'Lino', 'Teodoro', 'Kazuo', 'Lenard', 'Robb', 'Takashi', 'Maison', 'Merlyn', 'Brecken', 'Ricki', 'Jet', 'Lars', 'Ulices', 'Dereon', 'Fox', 'Ajay', 'Geraldo', 'Maksim', 'Jullian', 'Kalani', 'Andrei', 'Jaidyn', 'Maxie', 'Javen', 'Gail', 'Ely', 'Caroline', 'Amber', 'Crystal', 'Kiara', 'Megan', 'Reilly', 'Eugenio', 'Fisher', 'Langston', 'Gavriel', 'Abhinav', 'Dee', 'Kace', 'Axl', 'Isabel', 'Uziel', 'Sabastian', 'Rylee', 'Eliazar', 'Renato', 'Harland', 'Lavar', 'Stefano', 'Mayra', 'Valentine', 'Bud', 'Hasan', 'Zaden', 'Truett', 'Korbyn', 'Toshio', 'Stockton', 'Edd', 'Trystan', 'Daylan', 'Jayven', 'Dewitt', 'Kraig', 'Wilford', 'Celestino', 'Jacobo', 'Patryk', 'Hailey', 'Nainoa', 'Haskell', 'Sharif', 'Jerad', 'Raynaldo', 'Jacques', 'Jessi', 'Geary', 'Gaige', 'Garnett', 'Jakari', 'Yonatan', 'Eino', 'Phong', 'Jerel', 'Benzion', 'Quinten', 'Amado', 'Blas', 'Kimberly', 'Cuauhtemoc', 'Aayan', 'Catarino', 'Jeromy', 'Kyree', 'Apolonio', 'Boy', 'Antwon', 'Hakim', 'Creed', 'Shiloh', 'Shepherd', 'Garett', 'Oakley', 'Miller', 'Dajuan', 'Mattias', 'Titan', 'Immanuel', 'Lamarcus', 'Devontae', 'Reef', 'Brayson', 'Grey', 'Deante', 'Yariel', 'Makhi', 'Jayse', 'Corbyn', 'Domenico', 'Sedrick', 'Deontae', 'Kou', 'Shant', 'Willy', 'Austyn', 'Shloime', 'Masen', 'Linus', 'Florentino', 'Gionni', 'Boden', 'Torrey', 'Minoru', 'Daulton', 'Kolten', 'Jennings', 'Noble', 'Hersh', 'Kelsey', 'Nicholaus', 'Florencio', 'Nam', 'Juelz', 'Kainalu', 'Destin', 'Damarcus', 'Jacolby', 'Nikita', 'Artis', 'Bilal', 'Kendell', 'Alexsander', 'Parth', 'Esau', 'Glennon', 'Kohen', 'Isacc', 'Aleksander', 'Vinh', 'Trenten', 'Koen', 'Candelario', 'Connie', 'Aram', 'Wolfgang', 'Amit', 'Om', 'Shyheim', 'Raven', 'Kendra', 'Eliel', 'Viet', 'Kenyatta', 'Sky', 'Binyomin', 'Deanthony', 'Lachlan', 'Tory', 'Kenton', 'Tamir', 'Kramer', 'Deshaun', 'Javian', 'Haruo', 'Rupert', 'Jevon', 'Shlome', 'Danilo', 'Vanessa', 'Fernand', 'Daveon', 'Les', 'Marko', 'Delmer', 'Marlyn', 'Winfield', 'Wes', 'Rosevelt', 'Rayshawn', 'Tai', 'Kalvin', 'Jerardo', 'Sarkis', 'Bertrand', 'Kaimana', 'Kaitlyn', 'Summer', 'Veer', 'Waymon', 'Evin', 'Andrey', 'Iain', 'Kimi', 'Foster', 'Servando', 'Mychael', 'Derik', 'Ryon', 'Rowen', 'Mel', 'Ibn', 'Werner', 'Jameel', 'Avrum', 'Nachman', 'Jomar', 'Rudolf', 'Tyrique', 'Rayburn', 'Khalif', 'Rondal', 'Bijan', 'Rohit', 'Jeremie', 'Kain', 'Nicola', 'Bode', 'Brogan', 'Trayvon', 'Turner', 'Dwain', 'Konnor', 'Lev', 'Zayd', 'Finnley', 'Brantlee', 'Deonta', 'Demetrio', 'Ajani', 'Arther', 'Bianca', 'Takeo', 'Harding', 'Jareth', 'Rigo', 'Epifanio', 'Nahum', 'Carleton', 'Cosmo', 'Shigeru', 'Josias', 'Takeshi', 'Jacobi', 'Michal', 'Dorris', 'Treveon', 'Jaxx', 'Aren', 'Tejas', 'Beverly', 'Geoff', 'Maddux', 'Camryn', 'Burt', 'Norwood', 'Sholom', 'Ahron', 'Macario', 'Carol', 'Camdyn', 'Gennaro', 'Leeroy', 'Pinchus', 'Kaito', 'Burnell', 'Frantz', 'Laron', 'Clemente', 'Chasen', 'Neri', 'Jerrell', 'Kashawn', 'Keola', 'Alvan', 'Amar', 'Ubaldo', 'Roque', 'Zalmen', 'Daylen', 'Kadyn', 'Gil', 'Bernice', 'Yosgart', 'Shaan', 'Yahel', 'Elon', 'Levon', 'Kit', 'Brodrick', 'Gaven', 'Kaidyn', 'Ansel', 'Jewell', 'Mikhail', 'Derian', 'Elam', 'Tye', 'Leigh', 'Wayde', 'Rian', 'Artemio', 'Ibrahima', 'Noa', 'Autumn', 'Kylie', 'Pernell', 'Britton', 'Deondre', 'Arlen', 'Aman', 'Kelley', 'Eliud', 'Dijon', 'Imran', 'Eulalio', 'Juvenal', 'Agapito', 'Brant', 'Nima', 'Yisrael', 'Yerik', 'Ewan', 'Lathan', 'Adair', 'Gentry', 'Kyren', 'Lian', 'Tayshawn', 'Alejandra', 'Jeancarlos', 'Keyon', 'Jade', 'Shayan', 'June', 'Christos', 'Adrain', 'Jarom', 'Kathryn', 'Thor', 'Haven', 'Duy', 'Enmanuel', 'Montavious', 'Cortney', 'Teagan', 'Blayne', 'Anselmo', 'Leyton', 'Jonny', 'Braylin', 'Albaro', 'Pascual', 'Gasper', 'Waldo', 'Tyreke', 'Dylon', 'Narciso', 'Ebony', 'Hilton', 'Margaret', 'Brighton', 'Martavious', 'Demetrios', 'Kishan', 'Ansh', 'Treyton', 'Albin', 'Rashon', 'Rony', 'Krystian', 'Amrom', 'Korver', 'Richardo', 'Kayla', 'Katelyn', 'Milford', 'Bishop', 'Ottis', 'Emmet', 'Codey', 'Ayub', 'Isreal', 'Karas', 'Kendarius', 'Isamu', 'Kunta', 'Jermey', 'Arvin', 'Kayleb', 'Sione', 'Taurean', 'Tyron', 'Mihir', 'Rami', 'Vincente', 'Zayan', 'Mahlon', 'Clovis', 'Kirt', 'Dyllan', 'Ramsey', 'Jeramie', 'Nikolaus', 'Edsel', 'Asael', 'Andrik', 'Lisa', 'Sandro', 'Desean', 'Narek', 'Kiran', 'Elzie', 'Jered', 'Arlie', 'Yahya', 'Lizandro', 'Rollin', 'Khiry', 'Yuvraj', 'Jeancarlo', 'Anay', 'Freeman', 'Stevan', 'Keller', 'Ledger', 'Jasiel', 'Jacinto', 'Sherwin', 'Beaux', 'Campbell', 'Sherwood', 'Torrence', 'Daryle', 'Chevy', 'Adiel', 'Patricia', 'Jameer', 'Bilbo', 'Jayvon', 'Early', 'Boruch', 'Jadarius', 'Alpha', 'Amadou', 'Reino', 'Betty', 'Moussa', 'Wolf', 'Jenna', 'Grace', 'Natalie', 'Javonte', 'Crawford', 'Damir', 'Mckinley', 'Elden', 'Jhon', 'Lemuel', 'Colston', 'Donta', 'Pearl', 'Taquan', 'Salman', 'Palmer', 'Muhammed', 'Brennon', 'Cashton', 'Ysidro', 'Salomon', 'Ocean', 'Anirudh', 'Aksel', 'Cal', 'Ishmael', 'Brenda', 'Abran', 'Rome', 'Leighton', 'Canyon', 'Kael', 'Amin', 'Antoni', 'Tiara', 'Heather', 'Christine', 'Brittney', 'Angela', 'Johathan', 'Cipriano', 'Coltin', 'Verne', 'Darrien', 'Eamon', 'Oskar', 'Mikah', 'Matix', 'Kooper', 'Antonino', 'Duwayne', 'Dagoberto', 'Kolt', 'Sanjay', 'Tayden', 'Waverly', 'Abrahan', 'Diamond', 'West', 'Jefferey', 'Shigeo', 'Kabir', 'Jamell', 'Jaedyn', 'Malcom', 'Gadiel', 'Manav', 'Audie', 'Hipolito', 'Theron', 'Codie', 'General', 'Lindy', 'Carver', 'Nat', 'Jacari', 'Khamari', 'Wally', 'Kay', 'Anastacio', 'Jaymes', 'Skip', 'Cheyne', 'Dameon', 'Geronimo', 'Kevyn', 'Toney', 'Arden', 'Dontavius', 'Rasheem', 'Geovani', 'Gaspar', 'Baltazar', 'Bladimir', 'Rashan', 'Rulon', 'Karan', 'Jory', 'Chet', 'Abiel', 'Lazarus', 'Britt', 'Rodriquez', 'Akil', 'Zuriel', 'Rylen', 'Aston', 'Graysen', 'Jaysen', 'Hillel', 'Alford', 'Tyriq', 'Cassidy', 'Rahiem', 'Juanmanuel', 'Demetri', 'Jayton', 'Timoteo', 'Infantof', 'Braedyn', 'Corde', 'Bee', 'Valente', 'Gildardo', 'Feliciano', 'Dalvin', 'Tadashi', 'Claudie', 'Teng', 'Genesis', 'Tayler', 'Joeangel', 'Teruo', 'Tylan', 'Markel', 'Linda', 'Taven', 'Pierson', 'Newton', 'Keandre', 'Jayvion', 'Donavon', 'Encarnacion', 'Melton', 'Ritchie', 'Erika', 'Edgard', 'Christoper', 'Rocio', 'Alvie', 'Josedejesus', 'Dashaun', 'Travion', 'Johny', 'Marcell', 'Monique', 'Caitlin', 'Durwood', 'Gustav', 'Rosalio', 'Farhan', 'Benuel', 'Lashawn', 'Shakeem', 'Ocie', 'Yasir', 'Szymon', 'Aaryan', 'Hansel', 'Slater', 'Samarth', 'Kiyan', 'Storm', 'Ava', 'Yassin', 'Dayquan', 'Sherrill', 'Khari', 'Anas', 'Cheskel', 'Kamryn', 'Zyaire', 'Cristo', 'Christofer', 'Akhil', 'Shreyas', 'Ryley', 'Gibson', 'Haziel', 'Talen', 'Bracken', 'Dallen', 'Rashard', 'Rockwell', 'Colie', 'Del', 'Jihad', 'Simeon', 'Jahmari', 'Ashwin', 'Shraga', 'Cian', 'Alistair', 'Cartier', 'Stoney', 'Verlyn', 'Kavon', 'Konrad', 'Conrado', 'Colon', 'Randel', 'Christ', 'Jeremey', 'Raleigh', 'Lauro', 'Dionicio', 'Kauan', 'Piotr', 'Cleon', 'Malique', 'Rand', 'Fritz', 'Cordaro', 'Pietro', 'Faris', 'Ezio', 'Atharv', 'Karthik', 'Jahsir', 'Saleem', 'Abdoulaye', 'Jiovanni', 'Ezrah', 'Everest', 'Bronx', 'Kruz', 'Viktor', 'Yasiel', 'Thatcher', 'Michelangelo', 'Alaric', 'Oneal', 'Sahib', 'Osiris', 'Teo', 'Joseangel', 'Nate', 'Walton', 'Yousif', 'Ezzard', 'Yamil', 'Angus', 'Jhonny', 'Fabio', 'Darold', 'Junious', 'Atreyu', 'Beck', 'Adriano', 'Amani', 'Trevin', 'Rudra', 'Parsa', 'Breon', 'Umar', 'Taha', 'Cormac', 'Yossi', 'Jaison', 'Saad', 'Shloimy', 'Chesky', 'Ayman', 'Alicia', 'Chadd', 'Broc', 'Cynthia', 'Reynold', 'Ismail', 'Gaylord', 'Saburo', 'Kao', 'Masato', 'Alfonzo', 'Joshue', 'Earvin', 'Patric', 'Robinson', 'Serjio', 'Gavino', 'Stanford', 'Thanh', 'Kamren', 'Vikram', 'Roan', 'Jeronimo', 'Zahid', 'Anjel', 'Jayro', 'Skye', 'Baylor', 'Drayden', 'Pheng', 'Yeng', 'Wilber', 'Meng', 'Arik', 'Jamarius', 'Avigdor', 'Ladarrius', 'Nicklaus', 'Gatlin', 'Boone', 'Jacen', 'Antonia', 'Kyran', 'Quintavius', 'Estil', 'Casimiro', 'Prentice', 'Jodie', 'Rashaad', 'Konstantinos', 'Allison', 'Sophia', 'Makayla', 'Lillian', 'Zymir', 'Canaan', 'Delfino', 'Benton', 'Apolinar', 'Winford', 'Dayne', 'Shivam', 'Fredi', 'Yves', 'Jarrell', 'Ignazio', 'Gamaliel', 'Young', 'Kiefer', 'Juanjose', 'Rehan', 'Kegan', 'Davante', 'Naim', 'Lyman', 'Erskine', 'Toivo', 'Darrian', 'Jad', 'Ender', 'Remi', 'Rishaan', 'Shaurya', 'Viaan', 'Chelsea', 'Molly', 'Sara', 'Leib', 'Azriel', 'Howell', 'Briar', 'Korben', 'Manning', 'Job', 'Brandt', 'Jaedon', 'Ozzy', 'Cordarius', 'Lannie', 'Stanton', 'Radames', 'Blease', 'Zyon', 'Chadrick', 'Watson', 'Kentavious', 'Taurus', 'Adin', 'Jordin', 'Bryden', 'Susumu', 'Tamotsu', 'Yukio', 'Granville', 'Ashby', 'Tristyn', 'Devaughn', 'Deric', 'Cecilio', 'Pershing', 'Noboru', 'Rashaun', 'Masaichi', 'Juventino', 'Norton', 'Serafin', 'Windell', 'Cris', 'Curtiss', 'Boris', 'Elio', 'Williams', 'Trung', 'Torao', 'Karon', 'Canon', 'Tyrik', 'Naythan', 'Michaelangelo', 'Kavin', 'Akshay', 'Broden', 'Quran', 'Rishabh', 'Hilbert', 'Abbas', 'Damoni', 'Dillard', 'Tigran', 'Romel', 'Chip', 'Aeden', 'Deagan', 'Treyson', 'Brannon', 'Tremaine', 'Fay', 'Bryton', 'Lucky', 'Izak', 'Edan', 'Casper', 'Koda', 'Saquan', 'Alcide', 'Quinlan', 'Maddex', 'Hoyle', 'Sandra', 'Joshuah', 'Lindsay', 'Donato', 'Jancarlos', 'Kalin', 'Zigmund', 'Kalen', 'Jalil', 'Bonifacio', 'Gabrielle', 'Destiny', 'Cheyenne', 'Ulyses', 'Rueben', 'Markell', 'Jermel', 'Corwin', 'Justine', 'Idris', 'Pilar', 'Torrance', 'Raeford', 'Olan', 'Octavious', 'Quantavious', 'Modesto', 'Kashton', 'Librado', 'Bonnie', 'Lois', 'Justo', 'Mahmoud', 'Divine', 'Baylen', 'Rakeem', 'Diesel', 'Kyng', 'Daisy', 'Armon', 'Joseantonio', 'Montel', 'Gearld', 'Cloyd', 'Lindell', 'Nile', 'Kashif', 'Johnmichael', 'Aramis', 'Leopold', 'Kamal', 'Jerrad', 'Jadin', 'Mykel', 'Jahlil', 'Cheng', 'Ezriel', 'Aria', 'Dajon', 'Holt', 'Chauncey', 'Karsen', 'Stryker', 'Olaf', 'Reno', 'Colter', 'Schuyler', 'Orvil', 'Auden', 'Eyan', 'Tyce', 'Barbara', 'Zamir', 'Alexi', 'Braelyn', 'Brook', 'Marchello', 'Tyrel', 'Oracio', 'Jalin', 'Verlon', 'Raj', 'Lindsey', 'Andon', 'Devlin', 'Brysen', 'Harman', 'Treyvon', 'Foy', 'Arash', 'Cuong', 'Torin', 'Rommel', 'Lorenza', 'Vishal', 'Kenya', 'Heber', 'Victoriano', 'Shay', 'Tremayne', 'Natanael', 'Zachry', 'Eros', 'Veronica', 'Wayland', 'Rayquan', 'Ana', 'Jaceon', 'Yida', 'Rahmel', 'Alter', 'Lamarion', 'Tavion', 'Javin', 'Lawerence', 'Alessio', 'Kristen', 'Jacqueline', 'Oren', 'Aahil', 'Adyan', 'Augustin', 'Coleton', 'Wilfrid', 'Dezmond', 'Keelan', 'Ike', 'Kanoa', 'Kedrick', 'Chue', 'Danniel', 'Jowell', 'Micahel', 'Yonathan', 'Finnian', 'Garfield', 'Joao', 'Ezell', 'Masaru', 'Yoshito', 'Pasco', 'Yechezkel', 'Shloma', 'Adnan', 'Jaythan', 'Laith', 'Greysen', 'Maddix', 'Alfonse', 'Ernst', 'Hobart', 'Tavin', 'Dajour', 'Cy', 'Estel', 'Osman', 'Vedant', 'Rolf', 'Ova', 'Colson', 'Kelan', 'Oumar', 'Olivier', 'Seichi', 'Tayson', 'Roshan', 'Blane', 'Baxter', 'Vu', 'Tam', 'Pao', 'Wardell', 'Davonta', 'Montrell', 'Ravi', 'Durrell', 'Bastian', 'Aj', 'Ren', 'Loki', 'Kairo', 'Rock', 'Mylo', 'Lavell', 'Bjorn', 'Arvil', 'Reinhold', 'Yesenia', 'Carsen', 'Zephaniah', 'Renzo', 'Willem', 'Unique', 'Elmore', 'Kalob', 'Payne', 'Leeland', 'Naseem', 'Yusef', 'Aboubacar', 'Ioannis', 'Bohdan', 'Javien', 'Jakobi', 'Dempsey', 'Xavian', 'Antavious', 'Jc', 'Dara', 'Obie', 'Celso', 'Tyrin', 'Eian', 'Elgin', 'Jaylyn', 'Brandin', 'Adyn', 'Gabriela', 'Jaidon', 'Zavian', 'Lonzo', 'Elwin', 'Tsutomu', 'Jeanluc', 'Caeden', 'Auston', 'Jasson', 'Omid', 'Gray', 'Vang', 'Nancy', 'Nader', 'Kylen', 'Jarell', 'Prentiss', 'Tahir', 'Ahmir', 'Terell', 'Ludwig', 'Biagio', 'Douglass', 'Nafis', 'Harlem', 'Phineas', 'Lochlan', 'Hermon', 'Wilder', 'Aniello', 'Attilio', 'Shiv', 'Montgomery', 'Bowie', 'Aries', 'Itzae', 'Isa', 'Huxley', 'Elwyn', 'Advik', 'Mahamadou', 'Grayden', 'Landin', 'Decker', 'Dakotah', 'Ella', 'Md', 'Shayaan', 'Isidor', 'Joahan', 'Tillman', 'Jafet', 'Panagiotis', 'Jajuan', 'Cristhian', 'Demetric', 'Zaylen', 'Kacen', 'Sloan', 'Shedrick', 'Denilson', 'Buck', 'Dyland', 'Aris', 'Demonte', 'Telvin', 'Raynard', 'Quantavius', 'Neftali', 'Alma', 'Kadarius', 'Philippe', 'Laurel', 'Vadhir', 'Juandiego', 'Alekzander', 'Napoleon', 'Fabrizio', 'Abisai', 'Yasin', 'Kamran', 'Ole', 'Nicolai', 'Erling', 'Jathan', 'Zen', 'Shiven', 'Keshaun', 'Nikola', 'Loy', 'Usman', 'Concepcion', 'Verlin', 'Dedric', 'Derwin', 'Graig', 'Serge', 'Merritt', 'Kervin', 'Maleek', 'Baldomero', 'Germaine', 'Hampton', 'Shan', 'Alvino', 'Davy', 'Arlington', 'Brandy', 'Timmie', 'Andrae', 'Terrion', 'Quang', 'Jeb', 'Clem', 'Judd', 'Severo', 'Woody', 'Toan', 'Alonza', 'Gardner', 'Delton', 'Vinny', 'Vilas', 'Welton', 'Sabian', 'Dell', 'Randolf', 'Tyren', 'Glenwood', 'Antwain', 'Savon', 'Lesley', 'Rashid', 'Tavian', 'Marvens', 'Aleksandr', 'Vivek', 'Maximino', 'Pavel', 'Renee', 'Charly', 'Donell', 'Shariff', 'Ennis', 'Menashe', 'Ygnacio', 'Hoke', 'Lebron', 'Hillard', 'Xavion', 'Nicolaus', 'Kemari', 'Sammuel', 'Jessiah', 'Virgle', 'Niklas', 'Allante', 'Keenen', 'Albino', 'Rivaldo', 'Jospeh', 'Broadus', 'Trequan', 'Finis', 'Sabas', 'Abdoul', 'Tyronne', 'Tyreik', 'Tyriek', 'Linton', 'Jashawn', 'Ivey', 'Janiel', 'Jayme', 'Lamarr', 'Tiernan', 'Meilech', 'Fitzgerald', 'Jonnathan', 'Tashawn', 'Verl', 'Nichoals', 'Urban', 'Marquan', 'Montez', 'Akshaj', 'Syrus', 'Nehemias', 'Nova', 'Makaio', 'Joselito', 'Armin', 'Monica', 'Natasha', 'Leonce', 'Corby', 'Doris', 'Chancellor', 'Yonah', 'Gaston', 'Alston', 'Tyreese', 'Gaither', 'Donna', 'Graeme', 'Frances', 'Earlie', 'Oral', 'Ruby', 'Krishna', 'Berkley', 'Viraj', 'Jame', 'Judge', 'Denim', 'Guilherme', 'Salim', 'Rondell', 'Marek', 'Zac', 'Seven', 'Stellan', 'Calder', 'Eithan', 'Eliam', 'Gareth', 'Auther', 'Theodis', 'Denzell', 'Octave', 'Destry', 'Bartholomew', 'Rajiv', 'Jaxxon', 'Maxson', 'Adler', 'Tyran', 'Carnell', 'Alben', 'Saif', 'Merwin', 'Binyamin', 'Hayward', 'Arav', 'Berry', 'Daunte', 'Arvo', 'Gerhard', 'Selmer', 'Davie', 'Courtland', 'Athanasios', 'Ori', 'Aadi', 'Kamar', 'Jeremih', 'Jayvian', 'Doyne', 'Macarthur', 'Elza', 'Harden', 'Soham', 'Alder', 'Josemaria', 'Iziah', 'Jin', 'Woodie', 'Alfie', 'Stefon', 'Oswald', 'Talmage', 'Leander', 'Jancarlo', 'Sasha', 'Lorin', 'Roby', 'Juanmiguel', 'Johannes', 'Allie', 'Demetris', 'Sharod', 'Mynor', 'Lex', 'Tito', 'Domonique', 'Seferino', 'Jourdan', 'Marcial', 'Herminio', 'Mikal', 'Alegandro', 'Makana', 'Bb', 'Jarret', 'Jemel', 'Kareen', 'Sierra', 'Michale', 'Jalyn', 'Meredith', 'Gracie', 'Dawud', 'Raylon', 'Avan', 'Dayshawn', 'Livan', 'Kendal', 'Otho', 'Dung', 'Reuven', 'Karmelo', 'Myer', 'Tadao', 'Bentzion', 'Tex', 'Jamin', 'Clois', 'Sadao', 'Tetsuo', 'Izrael', 'Avion', 'Katsumi', 'Gerrit', 'Jamauri', 'Kunal', 'Nickolaus', 'Hoang', 'Bernabe', 'Khristian', 'Arne', 'Javeon', 'Vasilios', 'Noach', 'Ruger', 'Kutter', 'Kyden', 'Marshal', 'Jaelon', 'Raffi', 'Rito', 'Parrish', 'Duvid', 'Jamario', 'Verle', 'Harmon', 'Thai', 'Claire', 'Daiquan', 'Didier', 'Jonnie', 'Arlan', 'Taggart', 'Henri', 'Rogan', 'Woodford', 'Maceo', 'Nyjah', 'Smith', 'Syncere', 'Ballard', 'Kenichi', 'Khaled', 'Dwaine', 'Mathieu', 'Ousmane', 'Emmit', 'Aayush', 'Elyas', 'Taysom', 'Azaiah', 'Axle', 'Ander', 'Azaan', 'Vic', 'Terrel', 'Alen', 'Fabricio', 'Yeshaya', 'Greggory', 'Derrik', 'Esgar', 'Selwyn', 'Binh', 'Tarun', 'Quoc', 'Corry', 'Wylie', 'Jadan', 'Aamir', 'Barron', 'Ciaran', 'Melville', 'Bronislaus', 'Fong', 'Hakop', 'Jashua', 'Stanislaus', 'Keion', 'Timmothy', 'Kenan', 'Banks', 'Ammar', 'Maxfield', 'Tyre', 'Chistian', 'Son', 'Shaka', 'Jahmal', 'Jerell', 'Beckam', 'Zakariya', 'Jayshawn', 'Orvel', 'Yona', 'Derrek', 'Warner', 'Rollie', 'Adelbert', 'Von', 'Kathleen', 'April', 'Nikolaos', 'Alika', 'Barrington', 'Inez', 'Len', 'Arsh', 'Elyjah', 'Eshaan', 'Shayden', 'Jaykob', 'Raziel', 'Makoa', 'Cornelio', 'Rufino', 'Leamon', 'Terrill', 'Hai', 'Jonerik', 'Hamilton', 'Lindbergh', 'Enos', 'Sabino', 'Ara', 'Raudel', 'Jones', 'Cedar', 'Yohan', 'Janet', 'Archibald', 'Boaz', 'Cleotha', 'Dontez', 'Eldridge', 'Abhay', 'Butch', 'Jayvien', 'Rowland', 'Kimo', 'Gurney', 'Virgilio', 'Alfonza', 'Perley', 'Silverio', 'Amilcar', 'Kapena', 'Issak', 'Josemiguel', 'Mikey', 'Camille', 'Gershon', 'Mehki', 'Carsten', 'Lavelle', 'Jamere', 'Natale', 'Elya', 'Antwone', 'Pedrohenrique', 'Kyjuan', 'Shakim', 'Evaristo', 'Lionell', 'Helen', 'Aariz', 'Paige', 'Jaquavius', 'Adolphus', 'Faith', 'Breanna', 'Martavius', 'Armondo', 'Yobani', 'Missael', 'Marcellus', 'Rishab', 'Jaxsen', 'Jahleel', 'Bernell', 'Woodroe', 'Breck', 'Paden', 'Trumaine', 'Rogerio', 'Cleve', 'Ameen', 'Jermain', 'Shakir', 'Berl', 'Conley', 'Vinson', 'Andru', 'Andrue', 'Suraj', 'Ruvim', 'Rodriguez', 'Benji', 'Kylon', 'Matheo', 'Kellin', 'Karsyn', 'Izan', 'Caysen', 'Caison', 'Witten', 'Issa', 'Audrey', 'Sekou', 'Januel', 'Christpher', 'Octaviano', 'Jereme', 'Basilio', 'Kaine', 'Jayvyn', 'Vishnu', 'Umberto', 'Keondre', 'Delroy', 'Herve', 'Rakim', 'Denton', 'Donavin', 'Elder', 'Ger', 'Jazmin', 'Schneider', 'Ethyn', 'Davien', 'Cross', 'Reginal', 'Maksymilian', 'Rahim', 'Ridge', 'Ved', 'Bartosz', 'Kaye', 'Quamir', 'Jasmin', 'Diante', 'Codi', 'Khamani', 'Juliocesar', 'Lydell', 'Dakari', 'Eluzer', 'Daniyal', 'Isidoro', 'Yousuf', 'Rider', 'Winthrop', 'Diogo', 'Kejuan', 'Micaiah', 'Ransom', 'Rolla', 'Leibish', 'Ilyas', 'Arham', 'Adham', 'Abdulrahman', 'Lateef', 'Rahmir', 'Kollin', 'Jamaine', 'Khary', 'De', 'Jabbar', 'Hardin', 'Deryl', 'Yanky', 'Aviel', 'Boubacar', 'Eshan', 'Hanley', 'Hussain', 'Tylon', 'Leldon', 'Raoul', 'Braheem', 'Kaseem', 'Tyshaun', 'Rashaan', 'Kordell', 'Anil', 'Devion', 'Mervyn', 'Shaquil', 'Shaquill', 'Shaul', 'Musab', 'Muad', 'Tomasz', 'Madeline', 'Delante', 'Jahari', 'Leah', 'Tamika', 'Britney', 'Jeriel', 'Yidel', 'Jarad', 'Oneil', 'Fransico', 'Shamir', 'Carmello', 'Abdulahi', 'Shneur', 'Yehudah', 'Brown', 'Sylvan', 'Dontay', 'French', 'Griffen', 'Faisal', 'Dru', 'Demitri', 'Faron', 'Deloy', 'Juston', 'Charleston', 'Farrell', 'Tab', 'Donaciano', 'Candido', 'Joyce', 'Marquel', 'Lamonte', 'Raheen', 'Dashon', 'Hieu', 'Tyus', 'Ciro', 'Naeem', 'Rush', 'Keifer', 'Christion', 'Bladen', 'Kobie', 'Darell', 'Mouhamed', 'Jia', 'Shepard', 'Price', 'Kasyn', 'Truitt', 'Jenson', 'Aizen', 'Markeith', 'Braylan', 'Jonmichael', 'Damond', 'Jaycion', 'Platon', 'Amaury', 'Amaan', 'Daven', 'Tobey', 'Hymen', 'Altariq', 'Jacory', 'Ashtin', 'Domonic', 'Demari', 'Denise', 'Abimael', 'Izaya', 'Jovon', 'Harout', 'Caelan', 'Donal', 'Martel', 'Jaskaran', 'Alante', 'Bradon', 'Deborah', 'Harrell', 'Kaipo', 'Klayton', 'Danthony', 'Justino', 'Kamuela', 'Barrie', 'Argelis', 'Dolores', 'Jahaziel', 'Iram', 'Adian', 'Rance', 'Karsten', 'Christain', 'Jamarian', 'Yee', 'Adriana', 'Jamichael', 'Waino', 'Anh', 'Casmer', 'Ronnell', 'Tong', 'Vicent', 'Jarius', 'Tiburcio', 'Burdette', 'Amadeo', 'Kevan', 'Arlyn', 'Derald', 'Waleed', 'Jabez', 'Khoa', 'Neville', 'Susan', 'Leandre', 'Jorgeluis', 'Angelica', 'Regan', 'Froylan', 'Tevita', 'Sagar', 'Drayton', 'Zade', 'Karriem', 'Townes', 'Ram', 'Jaceyon', 'Keng', 'Isao', 'Unkown', 'Vivian', 'Mamoru', 'Dyllon', 'Hagop', 'Masami', 'Shoichi', 'Landan', 'Cadence', 'Yanixan', 'Xzavion', 'Javan', 'Avian', 'Cadyn', 'Collier', 'Clarance', 'Karen', 'Christy', 'Toriano', 'Diallo', 'Mateus', 'Caio', 'Larue', 'Gilmer', 'Rhyan', 'Elijiah', 'Curren', 'Souleymane', 'Deklan', 'Zakaria', 'Hayk', 'Ric', 'Briley', 'Oval', 'Lovell', 'Daryn', 'Franz', 'Spurgeon', 'Giacomo', 'Orrin', 'Vester', 'Taran', 'Salem', 'Naveen', 'Linkin', 'Kallen', 'Kongmeng', 'Patrice', 'Bibb', 'Arjan', 'Fateh', 'Clive', 'Pharaoh', 'Subhan', 'Rayaan', 'Zebulon', 'Webster', 'Raghav', 'Zakai', 'Ekam', 'Caspian', 'Atom', 'Athen', 'Esdras', 'Vihan', 'Ronav', 'Arrow', 'Izek', 'Gaines', 'Trajan', 'Onofrio', 'Romello', 'Ramone', 'Symir', 'Kanyon', 'Shomari', 'Christo', 'Anthoney', 'Giovonni', 'Gurshan', 'Nathon', 'Zach', 'Jhonatan', 'Shakur', 'Favio', 'Imani', 'Asad', 'Brien', 'Aureliano', 'Fischer', 'Yadier', 'Marino', 'Kimball', 'Saleh', 'Greco', 'Helmer', 'Sai', 'Khai', 'Marius', 'Joy', 'Amauri', 'Tegan', 'Darl', 'Cosimo', 'Armond', 'Yecheskel', 'Natan', 'Shabazz', 'Devine', 'Fabrice', 'Tarek', 'Renaldo', 'Jarrel', 'Gamal', 'Rajesh', 'Lavon', 'Ahnaf', 'Cono', 'Gaspare', 'Chas', 'Jaspreet', 'Tevon', 'Kush', 'Nuchem', 'Jostin', 'Wm', 'Darnel', 'Thurston', 'Maliek', 'Shakeel', 'Coolidge', 'Shaheed', 'Anastasios', 'Wesson', 'Humza', 'Kofi', 'Jamelle', 'Davey', 'Llewellyn', 'Nashawn', 'Odie', 'Jun', 'Jahmere', 'Bienvenido', 'Safwan', 'Mordche', 'Demarius', 'Cillian', 'Alexandros', 'Nochum', 'Shareef', 'Pawel', 'Theadore', 'Dorothy', 'Geno', 'Haris', 'Dayvon', 'Lemarcus', 'Rayvon', 'Laird', 'Zayvion', 'Dennie', 'Dwane', 'Orvis', 'Chalmer', 'Adil', 'Zamari', 'Kodi', 'Braxtyn', 'Fahim', 'Merl', 'Name', 'Aaiden', 'Dyson', 'Westyn', 'Wells', 'Niles', 'Nabil', 'Kaelan', 'Dmitri', 'Demitrius', 'Arlis', 'Reco', 'Glendon', 'Abhishek', 'Jammie', 'Grabiel', 'Jerson', 'Gerhardt', 'Kyrin', 'Kipton', 'Bear', 'Jaciel', 'Dakoda', 'Kaelin', 'Keilan', 'Brendyn', 'Fortino', 'Diondre', 'Arin', 'Cleophus', 'Dimas', 'Caine', 'Jakoby', 'Hagan', 'Layden', 'Calen', 'Nils', 'Cisco', 'Jerrick', 'Gevork', 'Mckenzie', 'Justis', 'Coltyn', 'Brazos', 'Jaycen', 'Kemauri', 'Tyrus', 'Zaidyn', 'Lenin', 'Karlos', 'Shrey', 'Edric', 'Tino', 'Macklin', 'Nevan', 'Lawrance', 'Arno', 'Irby', 'Namir', 'Chayse', 'Ronit', 'Clemens', 'Giorgio', 'Khriz', 'Khang', 'Zidane', 'Nomar', 'Glade', 'Doyce', 'Kaya', 'Surya', 'Jaelen', 'Vernell', 'Issiah', 'Henderson', 'Jessejames', 'Gaylen', 'Aldahir', 'An', 'Asencion', 'Garner', 'Treston', 'Evans', 'Salome', 'Cyle', 'Sang', 'Isaih', 'Kirkland', 'Loyal', 'Jonpaul', 'Cindy', 'Bao', 'Laurie', 'Monico', 'Kiptyn', 'Toribio', 'Cresencio', 'Ruperto', 'Dat', 'Rustin', 'Kendric', 'Miquel', 'Hasani', 'Caron', 'Jarron', 'Enrigue', 'Evelyn', 'Paulino', 'Eligio', 'Melchor', 'Deshon', 'Johndavid', 'Cliffton', 'Ovidio', 'Jacorian', 'Laken', 'Aedyn', 'Ichiro', 'Derion', 'Sharon', 'Yasuo', 'Masayuki', 'Andrez', 'Dustyn', 'Toua', 'Jossue', 'Zakkary', 'Bernardino', 'Deward', 'Joanthan', 'Sandeep', 'Hercules', 'Claudia', 'Sampson', 'Jacobe', 'Hulon', 'Ventura', 'Blade', 'Jayzen', 'Jarren', 'Nakoa', 'Chan', 'Jerrel', 'Isamar', 'Artie', 'Amy', 'Meghan', 'Rockey', 'Sixto', 'Ascencion', 'Damonte', 'Golden', 'Bubba', 'Randle', 'Adelard', 'Rumaldo', 'Nieves', 'Marshaun', 'Kavion', 'Mikolaj', 'Brees', 'Gayland', 'Herb', 'Quenton', 'Flint', 'Lennie', 'Tramaine', 'Nadir', 'Timur', 'Keshav', 'Malek', 'Ozzie', 'Dresden', 'Eliah', 'Benaiah', 'Muhsin', 'Walt', 'Damen', 'Enoc', 'Giancarlos', 'Darsh', 'Maximilliano', 'Yaniel', 'Jeevan', 'Malakhi', 'Viggo', 'Karlo', 'Yosgar', 'Xavior', 'Frazier', 'Orin', 'Payson', 'Tonatiuh', 'Amando', 'Angad', 'Gibran', 'Eben', 'Deaundre', 'Rajon', 'Anand', 'Andree', 'Dany', 'Kayvon', 'Joell', 'Jahsiah', 'Rosaire', 'Kc', 'Page', 'Salvadore', 'Arjen', 'Torey', 'Manraj', 'Lyam', 'Mazen', 'Autry', 'Coopar', 'Ranveer', 'Santhiago', 'Ronen', 'Remmy', 'Kamauri', 'Andra', 'Sohan', 'Cayetano', 'Jarrad', 'Fortunato', 'Magdaleno', 'Dorman', 'Cesario', 'Doroteo', 'Roddy', 'Matilde', 'Lafayette', 'Edelmiro', 'Higinio', 'Yancy', 'Zvi', 'Pascal', 'Timm', 'Dickey', 'Spiros', 'Georgios', 'Jarid', 'Johnatho', 'Nachum', 'Efrem', 'Stafford', 'Pajtim', 'Amelia', 'Jada', 'Lily', 'Lydia', 'Sherrod', 'Stedman', 'Ardis', 'Levy', 'Ulysse', 'Zalman', 'Marquette', 'Gabe', 'Blaize', 'Ashanti', 'Shaheem', 'Hervey', 'Abbott', 'Boleslaw', 'Tyshon', 'Kimani', 'Beecher', 'Diquan', 'Eulogio', 'Arvel', 'Kennth', 'Benigno', 'Luz', 'Dionisio', 'Eustacio', 'Trino', 'Eldred', 'Primitivo', 'Perfecto', 'Delma', 'Cosme', 'Milburn', 'Shameek', 'Quayshaun', 'Evert', 'Green', 'Brylan', 'Crit', 'Haskel', 'Ancil', 'Rayhan', 'Rose', 'Gianfranco', 'Matan', 'Derin', 'Artem', 'Abhiram', 'Yovanni', 'Stevenson', 'Crue', 'Krue', 'Jethro', 'Jakai', 'Mattix', 'Daxon', 'Dallan', 'Murl', 'Harsh', 'Uzziel', 'Kemarion', 'Jashaun', 'Rodman', 'Elie', 'Desi', 'Malikai', 'Angello', 'Amogh', 'Advaith', 'Adryan', 'Nazareth', 'Adolf', 'Bosco', 'Arshan', 'Abdulaziz', 'Theseus', 'Riaan', 'Reza', 'Radley', 'Mars', 'Kirin', 'Kiaan', 'Evander', 'Indiana', 'Hanson', 'Viliami', 'Jaydenn', 'Ilya', 'Draco', 'Riyan', 'Onyx', 'Xian', 'Khristopher', 'Ayrton', 'Aurelius', 'Crosley', 'Obadiah', 'Nihal', 'Rithvik', 'Constantino', 'Jeyden', 'Jaycee', 'Bane', 'Aakash', 'Aniket', 'Mathis', 'Maximos', 'Kohl', 'Fuquan', 'Rahman', 'Aziel', 'Alexys', 'Iverson', 'Marck', 'Criss', 'Arsen', 'Angelgabriel', 'Ronak', 'Selvin', 'Ibraheem', 'Yordi', 'Taylen', 'Javari', 'Jairus', 'Hamzah', 'Sacha', 'Nayan', 'Marciano', 'Aneesh', 'Manfred', 'Adal', 'Bernhard', 'Jeovanny', 'Satvik', 'Nicolo', 'Julious', 'Weyman', 'Roswell', 'Brevin', 'Amedeo', 'Deforest', 'Barnett', 'Braydin', 'Italo', 'Adrienne', 'Anne', 'Jr', 'Krystal', 'Brion', 'Wilberto', 'Detrick', 'Bucky', 'Kristin', 'Christohper', 'Laddie', 'Creighton', 'Gust', 'Darby', 'Shanon', 'Darious', 'Josua', 'Thang', 'Demarkus', 'Chistopher', 'Ehren', 'Marlo', 'Matas', 'Augusto', 'Diamonte', 'Maciej', 'Jamon', 'Marcin', 'Valdemar', 'Nickey', 'Niam', 'Ambrosio', 'Crispin', 'Lukasz', 'Yazan', 'Romell', 'Darryle', 'Renard', 'Ewald', 'Quint', 'Andrzej', 'Vittorio', 'Keonte', 'Lavonte', 'Cordale', 'Darvin', 'Marvell', 'Krzysztof', 'Corben', 'Keylan', 'Haydon', 'Ociel', 'Zeth', 'Ahmari', 'Texas', 'Yutaka', 'Isami', 'Adarius', 'Juaquin', 'Jaydn', 'Jaidan', 'Exavier', 'Steffan', 'Vahe', 'Crystian', 'Edilberto', 'Jaquavion', 'Xavien', 'Delvon', 'Otoniel', 'Demontae', 'Collins', 'Keoki', 'Nolberto', 'Leng', 'Karina', 'Grigor', 'Isrrael', 'Kaoru', 'Hisao', 'Masayoshi', 'Satoru', 'Satoshi', 'Nobuo', 'Michaelanthony', 'Lucero', 'Jocelyn', 'Yovany', 'Joangel', 'Jaelyn', 'Caedmon', 'Granger', 'Heston', 'Rhodes', 'Kanon', 'Judith', 'Montavius', 'Antron', 'Xaiden', 'Burhanuddin', 'Stratton', 'Kadence', 'Jhett', 'Jacion', 'Aiyden', 'Journey', 'Jaziah', 'Thien', 'Travious', 'Carsyn', 'Quindarius', 'Masyn', 'Jalan', 'Jaelin', 'Dorien', 'Aarron', 'Dmarcus', 'Ramin', 'Christan', 'Blain', 'Rosa', 'Christoher', 'Vadim', 'Martha', 'Osher', 'Laakea', 'Chayton', 'Keahi', 'Johnatan', 'Juanantonio', 'Kahiau', 'Sheridan', 'Samual', 'Luisalberto', 'Zacharias', 'Phi', 'Marquice', 'Chong', 'Harpreet', 'Fue', 'Derrion', 'Eber', 'Kevion', 'Beryl', 'Gavan', 'Liliana', 'Fernie', 'Sulo', 'Jayren', 'Lior', 'Ruth', 'Carlie', 'Thierno', 'Davontae', 'Jamier', 'Arye', 'Kiernan', 'Hanad', 'Huston', 'Winson', 'Hobson', 'Yates', 'Kaua', 'Einar', 'Berish', 'Annie', 'Mahir', 'Amr', 'Sabir', 'Ewell', 'Orland', 'Dujuan', 'Harvie', 'Dahmir', 'Hosea', 'Haneef', 'Wei', 'Nello', 'Fishel', 'Amere', 'Rafi', 'Charlton', 'Colden', 'Hughes', 'Laurier', 'Blong', 'Shimshon', 'Jahmel', 'Steward', 'Milbert', 'Buel', 'Hallie', 'Comer', 'Tafari', 'Iver', 'Evangelos', 'Jaquarius', 'Azan', 'Braedan', 'Jadarrius', 'Vernie', 'Andi', 'Darry', 'Jawad', 'Uri', 'Kennard', 'Yishai', 'Kijana', 'Brekken', 'Rajan', 'Stevens', 'Sunil', 'Siddhant', 'Sir', 'Sire', 'Jansen', 'Theodor', 'Kaedyn', 'Tymere', 'Zyair', 'Tron', 'Sanchez', 'Amaru', 'Anastasio', 'Agastya', 'Hawk', 'Honor', 'Sotero', 'Saeed', 'Ziggy', 'Conan', 'Arie', 'Gloria', 'Onesimo', 'Wellington', 'Alexei', 'Tavarus', 'Cayleb', 'Arion', 'Amadeus', 'Bryer', 'Jeter', 'Merced', 'Kaylon', 'Lakendrick', 'Nolen', 'Niccolo', 'Halston', 'Deontre', 'Ash', 'Arush', 'Artur', 'Bidwell', 'Tomie', 'Author', 'Izik', 'Jeriah', 'Edwyn', 'Zhi', 'Gilman', 'Jawan', 'Bryar', 'Giles', 'Talha', 'Gill', 'Abelino', 'Kwasi', 'Stavros', 'Juanita', 'Tri', 'Consuelo', 'Khambrel', 'Peterson', 'Brantly', 'Brently', 'Vitaliy', 'Hashim', 'Rain', 'Quintus', 'Matthieu', 'Kayne', 'Icker', 'Valen', 'Nels', 'Josephus', 'Nasario', 'Romulo', 'Kaisen', 'Sulaiman', 'Selim', 'Mahad', 'Steele', 'Stryder', 'Cristina', 'Thornton', 'Girard', 'Prudencio', 'Ethaniel', 'Laurent', 'Jayvin', 'Jayveon', 'Eladio', 'Ellison', 'Caius', 'Christiano', 'Navid', 'Gerold', 'Sven', 'Advay', 'Cabell', 'Marcio', 'Luisalfredo', 'Ryatt', 'Elijio', 'Pax', 'Neev', 'Mehtab', 'Eluterio', 'Tahmir', 'Davit', 'Eliott', 'Keane', 'Kysen', 'Rafe', 'Legacy', 'Erie', 'Orlin', 'Dawn', 'Calum', 'Adithya', 'Adarsh', 'Ulysee', 'Thurmond', 'Christen', 'Thayne', 'Sriram', 'Yoav', 'Lawton', 'Kemar', 'Duston', 'Jatavious', 'Luisfernando', 'Maxime', 'Rithik', 'Dior', 'Phuong', 'Roni', 'Manu', 'Esteven', 'Hazen', 'Farris', 'Leverne', 'Ryen', 'Tanay', 'Seaborn', 'Cicero', 'Gianmarco', 'Isak', 'Lige', 'Burke', 'Authur', 'Javarius', 'Jeromie', 'Jerred', 'Silvano', 'Keyan', 'Briant', 'Arun', 'Jeremi', 'Decarlos', 'Jeanpierre', 'Haydn', 'Ab', 'Anmol', 'Shaye', 'Nana', 'Mateen', 'Maurisio', 'Nitin', 'Dustan', 'Srikar', 'Arlin', 'Burnett', 'Johnathen', 'Wyman', 'Aleksandar', 'Agustine', 'Ronney', 'Marisol', 'Dmarion', 'Keir', 'Demetrice', 'Jawon', 'Ricci', 'Javontae', 'Armoni', 'Alto', 'Dawid', 'Zakir', 'Jarek', 'Lary', 'Dez', 'Kaydon', 'Henley', 'Adonai', 'Zahmir', 'Youssouf', 'Oisin', 'Deniz', 'Antonios', 'Netanel', 'Shlok', 'Ranger', 'Uzziah', 'Eryk', 'Sid', 'Andersen', 'Daylin', 'Naftoli', 'Lyn', 'Orvin', 'Kesean', 'Hanif', 'Adael', 'Maury', 'Ronn', 'Carlyle', 'Ankur', 'Takumi', 'Piero', 'Jeanpaul', 'Hoa', 'Jacarri', 'Jakhi', 'Zyion', 'Jeovany', 'Eoin', 'Etienne', 'Amrit', 'Dang', 'Juliano', 'Blakely', 'Tauno', 'Edin', 'Dmitriy', 'Lambert', 'Roderic', 'Felice', 'Zaki', 'Debra', 'Teegan', 'Tosh', 'Nicholai', 'Erickson', 'Atharva', 'Aaditya', 'Anuj', 'Diane', 'Sachin', 'Elazar', 'Torian', 'Tan', 'Cristoval', 'Jonathen', 'Kobi', 'Yuki', 'Jacori', 'Eduard', 'Keron', 'Tysean', 'Deshun', 'Hewitt', 'Kaulana', 'Jaydyn', 'Sebastia', 'Shamell', 'Trysten', 'Treshawn', 'Samer', 'Burnice', 'Da', 'Parris', 'Royer', 'Tien', 'Tj', 'Andranik', 'Nino', 'Luisenrique', 'Andrick', 'Graydon', 'Pookela', 'Nevaeh', 'Zoe', 'Hanna', 'Joniel', 'Jamarious', 'Hurley', 'Avante', 'Iban', 'Isaiha', 'Chee', 'Kealii', 'Irbin', 'Maynor', 'Wendy', 'Germain', 'Shamus', 'Zygmunt', 'Garnet', 'Lopaka', 'Damar', 'Ramy', 'Everton', 'Raylen', 'Tryston', 'Kullen', 'Therman', 'Khaliq', 'Alon', 'Arch', 'Tylen', 'Kalan', 'Zacharia', 'Dalen', 'Bedford', 'Lou', 'Tsuneo', 'Kalub', 'Dadrian', 'Jiro', 'Fahad', 'Quashawn', 'Hisashi', 'Fumio', 'Carlito', 'Ewing', 'Zarek', 'Leron', 'Cardell', 'Westen', 'Hogan', 'Payden', 'Chazz', 'Jarryd', 'Sedric', 'Homar', 'Tylar', 'Keone', 'Dasean', 'Lake', 'Joeanthony', 'Haroon', 'Adonys', 'Grayling', 'Braelon', 'Loras', 'Jontavious', 'Nesanel', 'Carlisle', 'Camillo', 'Mandeep', 'Yang', 'Blayden', 'Niall', 'Evelio', 'Zaragoza', 'Shlomie', 'Percell', 'Baylee', 'Garold', 'Eriq', 'Ozell', 'Benjiman', 'Wayman', 'Saturnino', 'Moody', 'Deandra', 'Estanislado', 'Curvin', 'Demonta', 'Crimson', 'Scout', 'Daequan', 'Izael', 'Trine', 'Demontre', 'Rexford', 'Fenix', 'Raheim', 'Rivers', 'Cobe', 'Jeron', 'Yanuel', 'Naftula', 'Dwan', 'Kanai', 'Nicco', 'Kaeson', 'Shadman', 'Cobi', 'Raequan', 'Shae', 'Osama', 'Ernan', 'Dennys', 'Aquil', 'Tierra', 'Sabrina', 'Mia', 'Melanie', 'Marissa', 'Carolyn', 'Arielle', 'Zaine', 'Macen', 'Shahin', 'Casyn', 'Osmin', 'Alphonsus', 'Carrington', 'Chayce', 'Opal', 'Taylon', 'Koy', 'Ebenezer', 'Amarii', 'Keshun', 'Kolin', 'Aspen', 'Cort', 'Zaylon', 'Zaedyn', 'Zaydyn', 'Tuff', 'Holton', 'Ashtyn', 'Lathen', 'Hershell', 'Jerre', 'Tsugio', 'Josealberto', 'Adien', 'Acen', 'Maurilio', 'Ashten', 'Wataru', 'Keontae', 'Donaven', 'Javonta', 'Jacobie', 'Peng', 'Ector', 'Ankit', 'Ann', 'Kasim', 'Parley', 'Mizael', 'Maxon', 'Kylar', 'Jjesus', 'Kaven', 'Curran', 'Edvin', 'Enrrique', 'Donovin', 'Godfrey', 'Xayden', 'Xzavian', 'Carlosmanuel', 'Ladainian', 'Keithan', 'Azrael', 'Jae', 'Marlow', 'Aviv', 'Orson', 'Zamarion', 'Chason', 'Henrry', 'Gevorg', 'Dartagnan', 'Zakee', 'Giovannie', 'Halen', 'Vinay', 'Wilfrido', 'Winton', 'Garet', 'Josafat', 'Manjot', 'Juandaniel', 'Manley', 'Oshea', 'Wali', 'Reymond', 'Harjot', 'Sidharth', 'Amer', 'Camari', 'Quincey', 'Dawan', 'Newell', 'Sigurd', 'Logen', 'Rafiq', 'Delonta', 'Katrina', 'Kristina', 'Octavia', 'Sade', 'Ziyad', 'Tovia', 'Malachai', 'Briana', 'Alison', 'Ashleigh', 'Jerick', 'Benedetto', 'Fiore', 'Mikail', 'Qasim', 'Yochanan', 'Ettore', 'Ferris', 'Aziz', 'Naseer', 'Jabril', 'Brodey', 'Alvah', 'Kalman', 'Ziyon', 'Zakery', 'Sedale', 'Jevin', 'Kalmen', 'Moishy', 'Shai', 'Zakari', 'Bradlee', 'Kenley', 'Pratham', 'Izeah', 'Ilias', 'Emari', 'Race', 'Zacarias', 'Yuri', 'Kleber', 'Kailer', 'Jhovany', 'Iven', 'Issaiah', 'Hosie', 'Dixon', 'Massiah', 'Remo', 'Pinchos', 'Mahki', 'Gunther', 'Irene', 'Jamarie', 'Kaan', 'Jayon', 'Moroni', 'Jkwon', 'Barack', 'Alastair', 'Fares', 'Zackariah', 'Yoshua', 'Tanish', 'Iann', 'Linden', 'Avinash', 'Willam', 'Iman', 'Domanic', 'Lenton', 'Samad', 'Aimar', 'Buddie', 'Jozef', 'Josmar', 'Mercer', 'Collie', 'Nephi', 'Kenai', 'Alquan', 'Cezar', 'Verbon', 'Aeneas', 'Jeremyah', 'Eren', 'Tej', 'Jahad', 'Deep', 'Augusta', 'Yaqub', 'Yahye', 'Vashon', 'Kristoff', 'Penn', 'Loukas', 'Kaydin', 'Kaius', 'Perseus', 'Mykah', 'Joab', 'Cylus', 'Emrys', 'Mikko', 'Jaxsyn', 'Sudais', 'Tiberius', 'Rooney', 'Yuvan', 'Cletis', 'Liev', 'Ester', 'Harlow', 'Shreyan', 'Samar', 'Saharsh', 'Ruhaan', 'Zyler', 'Yuma', 'Dwyane', 'Yanni', 'Dutch', 'Rajveer', 'Tayton', 'Kasir', 'Luster', 'Tage', 'Damarius', 'Elihu', 'Heinz', 'Manolo', 'Makhai', 'Madhav', 'Sohum', 'Omri', 'Egbert', 'Marie', 'Keshon', 'Jahmier', 'Nachmen', 'Mckade', 'Moise', 'Ames', 'Iden', 'Benard', 'Yannick', 'Pasha', 'Sherrick', 'Jordany', 'Fenton', 'Tytan', 'Dashel', 'Daksh', 'Juliani', 'Jhonathan', 'Broxton', 'Essie', 'Devontay', 'Maksym', 'Park', 'Dasani', 'Severiano', 'Kamel', 'Chayanne', 'Jarel', 'Yolanda', 'Tylik', 'Marquell', 'Jamarr', 'Micky', 'Socorro', 'Waymond', 'Michial', 'Yoseph', 'Lumir', 'Placido', 'Asif', 'Needham', 'Claiborne', 'Tennis', 'Burley', 'Raffaele', 'Shavar', 'Atanacio', 'Jahmar', 'Arben', 'Nabeel', 'Cordarryl', 'Danyal', 'Bryston', 'Lemont', 'Elston', 'Kerwin', 'Riccardo', 'Danzel', 'Waldemar', 'Ledarius', 'Omarr', 'Wilho', 'Alger', 'Raymie', 'Kenney', 'Abdallah', 'Aristides', 'Avram', 'Tayvion', 'Urbano', 'Deontay', 'Darcy', 'Robbin', 'Bartlomiej', 'Dann', 'Tyjuan', 'Khaleel', 'Winifred', 'Claron', 'Linford', 'Hilliard', 'Arlon', 'Yong', 'Malvin', 'Zymere', 'Newborn', 'Eleuterio', 'Glyn', 'Koltyn', 'Serapio', 'Pius', 'Ines', 'Harrold', 'Caitlyn', 'Rajeev', 'Constantinos', 'Abid', 'Calvert', 'Parnell', 'Aubry', 'Damone', 'Akim', 'Adem', 'Othel', 'Joaopedro', 'Tomer', 'Brentlee', 'Melquan', 'Elpidio', 'Jenny', 'Alejos', 'Romie', 'Ardell', 'Doctor', 'Virginia', 'Makenzie', 'Maggie', 'Tywan', 'Elisaul', 'Luby', 'Teofilo', 'Jermell', 'Gumesindo', 'Harless', 'Croix', 'Obinna', 'Traveon', 'Coley', 'Tu', 'Brylon', 'Carlin', 'Daneil', 'Garen', 'Ronell', 'Chesley', 'Tyrece', 'Arville', 'Eamonn', 'Kayshawn', 'Wilkie', 'Zacchaeus', 'Rapheal', 'Cordaryl', 'Quan', 'Nhan', 'Vann', 'Franciscojavier', 'Kinte', 'Rui', 'Chuong', 'Chao', 'Chai', 'Linh', 'Cirilo', 'Ky', 'Gwyn', 'Hearl', 'Tray', 'Carmon', 'Phuc', 'Neiman', 'Ladon', 'Moua', 'Eulises', 'Jonte', 'Yusuke', 'Vinnie', 'Seanpatrick', 'Pearson', 'Daemon', 'Reyn', 'Daekwon', 'Garron', 'Sequan', 'Zavien', 'Geovanie', 'Jessee', 'Richmond', 'Osualdo', 'Artin', 'Devone', 'Makoto', 'Hitoshi', 'Shinichi', 'Samari', 'Saxon', 'Glennis', 'Fadi', 'Bronislaw', 'Estuardo', 'Shaheen', 'Saman', 'Lue', 'Djuan', 'Cord', 'Linville', 'Landis', 'Cameren', 'Herson', 'Ellie', 'Seanmichael', 'Froilan', 'Delon', 'Jestin', 'Mattew', 'Toni', 'Kelii', 'Maribel', 'Jadrian', 'Traylon', 'Kaiea', 'Kaeo', 'Taft', 'Dameion', 'Darryn', 'Dondi', 'Clell', 'Corbett', 'Lyndell', 'Avenir', 'Seldon', 'Jakwon', 'Jacque', 'Deane', 'Cheikh', 'Carmel', 'Kieth', 'Tahmid', 'Lillard', 'Tasheem', 'Jens', 'Christobal', 'Delos', 'Lashon', 'Jaimie', 'Kary', 'Kendarious', 'Johnell', 'Harlen', 'Terron', 'Corliss', 'Liston', 'Seng', 'Phu', 'Rasean', 'Sung', 'San', 'Babak', 'Adel', 'Gillermo', 'Avon', 'Harlon', 'Allyn', 'Clary', 'Orry', 'Nazario', 'Jamail', 'Daeshawn', 'Tal', 'Moustafa', 'Tarell', 'Jahquan', 'Jian', 'Lazar', 'Adama', 'Benyamin', 'Tayvon', 'Lamel', 'Davonne', 'Tayquan', 'Jusitn', 'Shjon', 'Leotis', 'Kasheem', 'Ilir', 'Ravon', 'Parish', 'Ehan', 'Daishawn', 'Islam', 'Pinches', 'Ovadia', 'Mechel', 'Berlin', 'Deryk', 'Tymel', 'Vijay', 'Dyquan', 'Agron', 'Tarrell', 'Itamar', 'Mordcha', 'Chrisotpher', 'Alban', 'Stephane', 'Tanvir', 'Demetriu', 'Yan', 'Asim', 'Ahsan', 'Mackenzi', 'Kristofe', 'Kenrick', 'Cuahutemoc', 'Tavis', 'Audric', 'Deaven', 'Nicanor', 'Mick', 'Geoffery', 'Timofey', 'Dolphus', 'Franciso', 'Gorje', 'Jobany', 'Abdelrahman', 'Clenton', 'Yohance', 'Milad', 'Juanluis', 'Luismario', 'Marvyn', 'Rushil', 'Tenoch', 'Trentin', 'Fardeen', 'Shashank', 'Yuta', 'Ritvik', 'Akili', 'Aleksei', 'Gaurav', 'Iran', 'Caillou', 'Borach', 'Keisuke', 'Kaushik', 'Hari', 'Izac', 'Josejulian', 'Juanangel', 'Kasra', 'Anthonie', 'Daivd', 'Dain', 'Toren', 'Sesar', 'Eldor', 'Pieter', 'Yu', 'Cloyce', 'Dusten', 'Aquiles', 'Aslan', 'Sevastian', 'Siddarth', 'Tysen', 'Johncarlo', 'Idan', 'Daymian', 'Domanick', 'Arnie', 'Bartley', 'Newman', 'Akram', 'Abdulla', 'Lew', 'Geremy', 'Jehu', 'Josejuan', 'Jailen', 'Etai', 'Fabien', 'Victormanuel', 'Ossie', 'Egan', 'Eldin', 'Shamari', 'Nussen', 'Arda', 'Sina', 'Tytus', 'Pranay', 'Dylen', 'Juandavid', 'Kalil', 'Kushal', 'Hazael', 'Lecil', 'Belton', 'Aleczander', 'Terance', 'Faizan', 'Naithan', 'Koji', 'Akshat', 'Andruw', 'Bram', 'Dieter', 'Saahil', 'Saulo', 'Arnel', 'Demarea', 'Farhad', 'Joeseph', 'Alondra', 'Belal', 'Antoniodejesus', 'Anival', 'Choua', 'Cha', 'Bryn', 'Xiong', 'Aristeo', 'Mehmet', 'Moustapha', 'Jandel', 'Asante', 'Yunus', 'Schneur', 'Steffen', 'Leovardo', 'Kacey', 'Payam', 'Salbador', 'Nicholes', 'Neema', 'Clarke', 'Marqus', 'Araceli', 'Jerman', 'Marioalberto', 'Joseguadalupe', 'Emigdio', 'Krishan', 'Jessey', 'Arcadio', 'Zong', 'Yoni', 'Tirso', 'Thompson', 'Damarea', 'Everado', 'Edy', 'Edder', 'Nikki', 'Clemmie', 'Willian', 'Marquese', 'Perris', 'Miriam', 'Shelly', 'Bulmaro', 'Jasdeep', 'Irvine', 'Hue', 'Gurpreet', 'Donaldo', 'Jonthan', 'Geroge', 'Francois', 'Duc', 'Jerico', 'Avedis', 'Chang', 'Damario', 'Kenta', 'Nikkolas', 'Khoi', 'Garren', 'Norma', 'My', 'Lam', 'Sahir', 'Yer', 'Jaskarn', 'Jeric', 'Maximillion', 'Elson', 'Marin', 'Loc', 'Lemar', 'Kristofor', 'Nai', 'Takoda', 'Tung', 'Thong', 'Rayshaun', 'Derreck', 'Regino', 'Nadav', 'Luismiguel', 'Josede', 'Hao', 'Rayce', 'Zacary', 'Nareg', 'Khyree', 'Chi', 'Joanna', 'Sevag', 'Garin', 'Juluis', 'Petros', 'Berel', 'Abubakar', 'Jorel', 'Kazi', 'Jaiceon', 'Haider', 'Feynman', 'Muhammadali', 'Jassiel', 'Morrison', 'Nakai', 'Oden', 'Odysseus', 'Quest', 'Kaidan', 'Kilian', 'Kirill', 'Thorin', 'Tru', 'Xzander', 'Taniela', 'Roen', 'Sho', 'Aarin', 'Gracen', 'Gurfateh', 'Gurman', 'Hiro', 'Edrick', 'Esaias', 'Johncarlos', 'Sidi', 'Cataldo', 'Noor', 'Philbert', 'Eyad', 'Arber', 'Abrar', 'Ladislaus', 'Serafino', 'Mannie', 'Daevon', 'Haseeb', 'Yale', 'Spiro', 'Emre', 'Daryan', 'Camrin', 'Kavi', 'Doran', 'Vaibhav', 'Rayne', 'Derric', 'Orbie', 'Reily', 'Gio', 'Gurnoor', 'Jaasiel', 'Naman', 'Josaiah', 'Josiyah', 'Kasper', 'Filippo', 'Sigfredo', 'Joesiah', 'Rei', 'Nahom', 'Ojas', 'Vladislav', 'Hilary', 'Rinaldo', 'Even', 'Gautam', 'Cornel', 'Julyan', 'Inaki', 'Iseah', 'Itai', 'Laurance', 'Garey', 'Lawerance', 'Quindon', 'Levin', 'Leviticus', 'Link', 'Glenford', 'Avyan', 'Dmitry', 'Eiden', 'Advait', 'Ahaan', 'Arhaan', 'Kassius', 'Hendrick', 'Jaiveer', 'Nirvaan', 'Reeve', 'Torsten', 'True', 'Iwao', 'Jahvon', 'Paxson', 'Kali', 'Kwesi', 'Yaron', 'Ami', 'Dashiel', 'Meliton', 'Sylus', 'Mika', 'Jireh', 'Selig', 'Adi', 'Brenner', 'Breyden', 'Mitsuru', 'Farley', 'Montrel', 'Kyland', 'Jadakiss', 'Tadarius', 'Brooke', 'Alexandria', 'Alexa', 'Abby', 'Hayley', 'Mallory', 'Madelyn', 'Layla', 'Kirsten', 'Quayshawn', 'Deadrick', 'Hobby', 'Eunice', 'Macon', 'Ysabel', 'Secundino', 'Hulen', 'Estle', 'Tolbert', 'Baker', 'Tilford', 'Shyheem', 'Orbin', 'Ruel', 'Hurshel', 'Jailyn', 'Dequincy', 'Jamall', 'Draper', 'Kenric', 'Aime', 'Cam', 'Connell', 'Treylon', 'Bethel', 'Rommie', 'Alphonza', 'Gussie', 'Elridge', 'Hillery', 'Ruffin', 'Farrel', 'Wendall', 'Gerome', 'Ferrell', 'Uvaldo', 'Marshon', 'Jawaun', 'Trevell', 'Tyvon', 'Telesforo', 'Ellery', 'Cordae', 'Loran', 'Travell', 'Lamari', 'Errick', 'Antwoine', 'Starsky', 'Chirag', 'Donzell', 'Tierre', 'Ketan', 'Crespin', 'Orris', 'Bawi', 'Wanda', 'Canuto', 'Aniceto', 'Braxten', 'Audry', 'Bartolo', 'Brigido', 'Garvin', 'Vergil', 'Olegario', 'Thelma', 'Crecencio', 'Eleno', 'Wright', 'Burtis', 'Dicky', 'Avelino', 'Norval', 'Cirildo', 'Darwyn', 'Delwin', 'Henery', 'Beauford', 'Little', 'Ameir', 'Arland', 'Verner', 'Taron', 'Undra', 'Khasir', 'Kymir', 'Aleem', 'Ordean', 'Carmino', 'Lucus', 'Jodeci', 'Linn', 'Sinclair', 'Delorean', 'Chalmers', 'Kentavius', 'Jarious', 'Lajuan', 'Narada', 'Hussien', 'Alonte', 'Damarco', 'Benjamen', 'Randon', 'Jabree', 'Lawyer', 'Wanya', 'Samie', 'Sim', 'Washington', 'Isom', 'Keyton', 'Quin', 'Mahamed', 'Liban', 'Ramir', 'Samaj', 'Kipp', 'Prentis', 'Jibril', 'Kyaire', 'Buell', 'Nasim', 'Adell', 'Mohamedamin', 'Abdiaziz', 'Harun', 'Amire', 'Eligah', 'Parks', 'Colonel', 'Joaovictor', 'Vinicius', 'Mcdonald', 'Manly', 'Phares', 'Geza', 'Kemp', 'Alphonzo', 'Loring', 'Haig', 'Joaquim', 'Craven', 'Bynum', 'Parke', 'Ignatz', 'Hebert', 'Berton', 'Ayomide', 'Kidus', 'Ayven', 'Aziah', 'Banner', 'Barret', 'Blayze', 'Braddock', 'Javoris', 'Cortland', 'Antavius', 'Amaziah', 'Santonio', 'Slate', 'Sylis', 'Thierry', 'Joanthony', 'Rhylan', 'Pryce', 'Riggin', 'Dequavious', 'Bakari', 'Marquavius', 'Artavious', 'Desmon', 'Rajohn', 'Faheem', 'Kage', 'Arkeem', 'Jaquon', 'Dontavis', 'Quentavious', 'Braysen', 'Bricen', 'Traevon', 'Caidyn', 'Collyn', 'Joah', 'Patton', 'Coleson', 'Eythan', 'Hadley', 'Jaaziel', 'Johntavious', 'Quadarius', 'Rafeal', 'Karam', 'Krishiv', 'Majd', 'Yeray', 'Whitten', 'Johnluke', 'Demani', 'Easten', 'Ediel', 'Tellis', 'Delvecchio', 'Aleks', 'Rylie', 'Osmel', 'Lelan', 'Tamarion', 'Cayman', 'Hajime', 'Akio', 'Takao', 'Seiji', 'Ah', 'Mitsugi', 'Koichi', 'Ikenna', 'Tyquavious', 'Brannen', 'Slayde', 'Sultan', 'Cage', 'Jillian', 'Kara', 'Simone', 'Theresa', 'Julie', 'Alisha', 'Candace', 'Candice', 'Jazmine', 'Domani', 'Tiana', 'Jeovanni', 'Khaleb', 'Copeland', 'Dathan', 'Deleon', 'Jakori', 'Jayke', 'Kadon', 'Camdon', 'Shandon', 'Mylan', 'Jaxin', 'Beverley', 'Dallon', 'Jakeem', 'Tallon', 'Vraj', 'Welford', 'Jadarian', 'Yancarlos', 'Omkar', 'Jamaree', 'Alix', 'Trevyn', 'Orestes', 'Trevis', 'Refoel', 'Roddrick', 'Tarvis', 'Tamarick', 'Denard', 'Kerem', 'Treyden', 'Stephano', 'Shubh', 'Carston', 'Utah', 'Treven', 'Reshard', 'Yerachmiel', 'Osmany', 'Vansh', 'Samaad', 'Shakil', 'Saford', 'Doyal', 'Cai', 'Alexey', 'Cruze', 'Masiah', 'Kitai', 'Fedor', 'Algie', 'Worley', 'Jakhari', 'Brison', 'Lanier', 'Eston', 'Qadir', 'Lonzie', 'Rayfield', 'Chirstopher', 'Eron', 'Deontray', 'Zoltan', 'Christon', 'Byford', 'Mikeal', 'Talyn', 'Stormy', 'Laramie', 'Chrisopher', 'Breckin', 'Kennon', 'Json', 'Deiondre', 'Heron', 'Mykal', 'Kalai', 'Ervey', 'Brayam', 'Alakai', 'Maika', 'Kelson', 'Trevaughn', 'Aundre', 'Eathan', 'Keylon', 'Kolbe', 'Sebastion', 'Kalib', 'Jermy', 'Jarrid', 'Gumaro', 'Maliq', 'Armstead', 'Stephone', 'Oris', 'Hassel', 'Antwine', 'Lorraine', 'Budd', 'Irfan', 'Kamrin', 'Araf', 'Affan', 'Leiby', 'Sruly', 'Peretz', 'Mildred', 'Louise', 'Ryken', 'Ryler', 'Tayven', 'Taysen', 'Brexton', 'Zayaan', 'Oronde', 'Firman', 'Collen', 'Letcher', 'Clearence', 'Braydan', 'Yasser', 'Jeferson', 'Yahsir', 'Cavan', 'Ivor', 'Hasker', 'Kodie', 'Lori', 'Jaysean', 'Cadin', 'Breydon', 'Amaree', 'Nyeem', 'Menno', 'Orlo', 'Nassir', 'Sylar', 'Drevon', 'Burech', 'Lenox', 'Shloima', 'Daris', 'Diontae', 'Aidin', 'Brydon', 'Jasean', 'Nasier', 'Johney', 'Gabrial', 'Fate', 'Colyn', 'Kaleem', 'Capers', 'Rembert', 'Jquan', 'Legrand', 'Kirubel', 'Gaberiel', 'Thaddaeus', 'Rece', 'Dymir', 'Tylil', 'Remigio', 'Ahad', 'Melquiades', 'Ethel', 'Euel', 'Harvy', 'Margarita', 'Jakeb', 'Kagan', 'Trinton', 'Faiz', 'Iliyan', 'Emeterio', 'Ferman', 'Keeton', 'Decorian', 'Hadyn', 'Rashaud', 'Davontay', 'Brallan', 'Benancio', 'Espiridion', 'Seledonio', 'Estefan', 'Chanse', 'Dade', 'Sisto', 'Herbie', 'Janson', 'Eusevio', 'Loye', 'Leocadio', 'Kaelon', 'Trevian', 'Christien', 'Chrystian', 'Daegan', 'Rosbel', 'Romero', 'Kylin', 'Treyvion', 'Ezekial', 'Jaice', 'Jantzen', 'Aadyn', 'Tennyson', 'Kaedan', 'Kaiser', 'Kanin', 'Jerron', 'Jonaven', 'Elija', 'Amon', 'Valton', 'Derwood', 'Atilano', 'Jovanie', 'Kaemon', 'Oluwatobi', 'Atlee', 'Tadd', 'Tammy', 'Lem', 'Hilmar', 'Foch', 'Clenard', 'Jd', 'Jiovanny', 'Ladarion', 'Lleyton', 'Adrik', 'Webb', 'Toddrick', 'Jerrett', 'Omero', 'Wendel', 'Teresa', 'Cass', 'Kedric', 'Heraclio', 'Rainier', 'Lakota', 'Sanjuan', 'Daymon', 'Rodd', 'Yancey', 'Trampas', 'Viviano', 'Heith', 'Bj', 'Trevante', 'Ildefonso', 'Jaeger', 'Jamarkus', 'Remijio', 'Desiderio', 'Ausencio', 'Alejo', 'Keldrick', 'Sigifredo', 'Treavor', 'Britain', 'Macedonio', 'Kourtney', 'Gerrick', 'Jousha', 'Klinton', 'Montreal', 'Catlin', 'Danner', 'Eliberto', 'Eliodoro', 'Lonnell', 'Michiel', 'Hermilo', 'Jackey', 'Todrick', 'Irineo', 'Wenceslao', 'Duaine', 'Cleto', 'Gaylan', 'Derrel', 'Nabor', 'Huck', 'Hoy', 'Antwaun', 'Hoyte', 'Flournoy', 'Mayford', 'Harlie', 'Hansford', 'Cutler', 'Amerigo', 'Teague', 'Griffith', 'Emidio', 'Kenna', 'Cru', 'Arnett', 'Gay', 'Dencil', 'Carman', 'Doy', 'Trevan', 'Jahziel', 'Rodricus', 'Copper', 'Dael', 'Aydon', 'Ricco', 'Judas', 'Kessler', 'Romelo', 'Slayton', 'Marico', 'Leevi', 'Xadrian', 'Jceon', 'Kross', 'Chancey', 'Bayne', 'Brylen', 'Eidan', 'Olvin', 'Pearce', 'Zak', 'Jaiven', 'Dani', 'Bairon', 'Cordarious', 'Jaxyn', 'Rylin', 'Avin', 'Bransen', 'Eastyn', 'Eyden', 'Brenham', 'Chaston', 'Horatio', 'Dakarai', 'Jencarlo', 'Jevan', 'Jhayden', 'Tracen', 'Peggy', 'Wynn', 'Bennet', 'Milas', 'Ronal', 'Kadrian', 'Jhase', 'Callahan', 'Hays', 'Braidyn', 'Ezana', 'Chidubem', 'Virat', 'Maxemiliano', 'Ozias', 'Pace', 'Mordecai', 'Tabor', 'Phillipe', 'Maritza', 'Ricahrd', 'Jeanette', 'Sundeep', 'Tyric', 'Mina', 'Nasser', 'Nhia', 'Giuliano', 'Farid', 'Ryo', 'Delmont', 'Klaus', 'Traquan', 'Dawayne', 'Broward', 'Drequan', 'Cagney', 'Shellie', 'Torre', 'Deepak', 'Janmichael', 'Lan', 'Quentavius', 'Quantez', 'Markevious', 'Melbourne', 'Melford', 'Xue', 'Samnang', 'Jarquez', 'Montrez', 'Dao', 'Luvern', 'Vue', 'Jenaro', 'Wacey', 'Lorena', 'Ly', 'Casmere', 'Marsean', 'Marinus', 'Shiro', 'Shizuo', 'Knowledge', 'Baudelio', 'Cher', 'Christiaan', 'Adriane', 'Wilgus', 'Gustabo', 'Barnet', 'Xeng', 'Priscilla', 'Sou', 'Sumeet', 'Vartan', 'Herschell', 'Montell', 'Illya', 'Flem', 'Marwan', 'Johnrobert', 'Boleslaus', 'Christie', 'Ericberto', 'Esmeralda', 'Cecilia', 'Purvis', 'Benjie', 'Sutter', 'Sufyan', 'Viraaj', 'Sathvik', 'Quitman', 'Liborio', 'Humbert', 'Zakariah', 'Yichen', 'Seward', 'Alf', 'Sebastiano', 'Guiseppe', 'Stanislaw', 'Tyrice', 'Lenell', 'Kewon', 'Bahe', 'Recardo', 'Paola', 'Ronson', 'Naveed', 'Karla', 'Lamberto', 'Leoncio', 'Sandor', 'Diamante', 'Woodson', 'Hargis', 'Kelcey', 'Daquon', 'Estell', 'Christapher', 'Jalal', 'Tania', 'Tramell', 'Victoralfonso', 'Kento', 'Kiet', 'Krystopher', 'Shaine', 'Bejamin', 'Virgel', 'Toxie', 'Goebel', 'Tyon', 'Norvin', 'Savalas', 'Othmar', 'Jakaiden', 'Reis', 'Pratik', 'Ashish', 'Hutson', 'Karmello', 'Dacari', 'Katsuji', 'Sadamu', 'Masatoshi', 'Kiyoto', 'Carols', 'Waylen', 'Shain', 'Alexandru', 'Jomo', 'Kalei', 'Shyam', 'Zyan', 'Tamar', 'Prem', 'Jamiyl', 'Remmel', 'Harlin', 'Novak', 'Fynn', 'Gonsalo', 'Maliki', 'Loghan', 'Cauy', 'Kassem', 'Jitsuo', 'Itsuo', 'Atsushi', 'Sunao', 'Sueo', 'Hiromu', 'Toshiyuki', 'Osamu', 'Mena', 'Aldin', 'Leticia', 'Darick', 'Kawan', 'Rajahn', 'Asmar', 'Emarion', 'Hilmer', 'Dameyune', 'Rondarius', 'Brinson', 'Trason', 'Chung', 'Eran', 'Khanh', 'Javarious', 'Makel', 'Zyquan', 'Quintarius', 'Duran', 'Veasna', 'Thao', 'Gracin', 'Eberardo', 'Ming', 'Lusiano', 'Kaveh', 'Truong', 'Ying', 'Kentravious', 'Dillen', 'Jamonte', 'Arthuro', 'Camarion', 'Avett', 'Mehdi', 'Nishant', 'Bartek', 'Aarnav', 'Jeffory', 'Deen', 'Dayshaun', 'Kemonte', 'Petar', 'Yug', 'Donat', 'Sylvio', 'Magdiel', 'Christianjames', 'Lessie', 'Sander', 'Rajvir', 'Nahuel', 'Pearlie', 'Aaren', 'Dimitry', 'Aravind', 'Aristotle', 'Jeury', 'Naji', 'Tysheem', 'Alcee', 'Gustaf', 'Jamarrion', 'Zollie', 'Malick', 'Navin', 'Juwon', 'Usama', 'Walid', 'Quamel', 'Sadiq', 'Tamarcus', 'Merwyn', 'Ferdie', 'Kalif', 'Latif', 'Davidson', 'Aahan', 'Shahid', 'Min', 'Kieren', 'Oz', 'Oryan', 'Madox', 'Kota', 'Gurshaan', 'Gagik', 'Finnigan', 'Finlay', 'Exodus', 'Kaileb', 'Jullien', 'Jiovani', 'Maryland', 'Weaver', 'Williard', 'Keyondre', 'Kailen', 'Kanan', 'Luisantonio', 'Izack', 'Daniela', 'Colm', 'Raja', 'Keeshawn', 'Adhemar', 'Hillary', 'Abdimalik', 'Roark', 'Kolston', 'Cheryl', 'Richardson', 'Arif', 'Jahkeem', 'Kumar', 'Raywood', 'Jaiquan', 'Earley', 'Buren', 'Rossie', 'Jakayden', 'Ruffus', 'Zaquan', 'Tamer', 'Devonne', 'Ikeem', 'Dhruva', 'Georges', 'Kwabena', 'Yeriel', 'Glover', 'Sanders', 'Adonay', 'Gillis', 'Yomar', 'Ediberto', 'Antwane', 'Isahi', 'Haidyn', 'Elizandro', 'Markjoseph', 'Jezreel', 'Isayah', 'Zedekiah', 'Nikolay', 'Jenner', 'Uriyah', 'Taiga', 'Daniele', 'Zacharie', 'Joanne', 'Manpreet', 'Mohan', 'Eliu', 'Faraz', 'Robah', 'Isham', 'Omarian', 'Gagandeep', 'Zeno', 'Waddell', 'Plato', 'Quavon', 'Talib', 'Bascom', 'Mayo', 'Tequan', 'Teron', 'Anatole', 'Tajh', 'Algenis', 'Liridon', 'Kervens', 'Yunior', 'Kenson', 'Wesly', 'Antwann', 'Zelig', 'Demetrious', 'Johnbenedict', 'Josecarlos', 'Kona', 'Cj', 'Atul', 'Asaf', 'Aleck', 'Anthoni', 'Anuar', 'Gedalya', 'Rafay', 'Eyal', 'Andry', 'Natanel', 'Nissim', 'Jahdiel', 'Jophy', 'Rehaan', 'Jhovani', 'Maxximus', 'Nain', 'Yomtov', 'Sheikh', 'Demir', 'Markos', 'Mouhamadou', 'Ousman', 'Izreal', 'Hadrian', 'Aldrin', 'Conlan', 'Degan', 'Toi', 'Finneas', 'Latroy', 'Adon', 'Antuan', 'Elchonon', 'Uzair', 'Mohid', 'Nazier', 'Eliab', 'Roc', 'Pavan', 'Yovanny', 'Sinjin', 'Tavoris', 'Asiel', 'Brayant', 'Alexsandro', 'Adrean', 'Darel', 'Olajuwon', 'Corderro', 'Tynan', 'Xaiver', 'Travaris', 'Yonis', 'Gerren', 'Demon', 'Furnell', 'Juel', 'Harish', 'Raiyan', 'Elia', 'Elijha', 'Gautham', 'Arvind', 'Audel', 'Almer', 'Djimon', 'Jahi', 'Gehrig', 'Avant', 'Arnell', 'Eliaz', 'Kaedon', 'Jaedin', 'Voshon', 'Malachy', 'Gilad', 'Gabriele', 'Riku', 'Cameran', 'Yoskar', 'Jahfari', 'Alexiz', 'Javante', 'Gregor', 'Izel', 'Donnovan', 'Nikos', 'Kodey', 'Eytan', 'Betzalel', 'Dimitrius', 'Chananya', 'Graylin', 'Samvel', 'Yi', 'Wassillie', 'Kelechi', 'Erroll', 'Ardit', 'Rahn', 'Delaine', 'Jule', 'Idus', 'Dessie', 'Juda', 'Levester', 'Kiante', 'Earnie', 'Ihor', 'Kapono', 'Akoni', 'Koamalu', 'Sholem', 'Howie', 'Dariusz', 'Hall', 'Kekai', 'Onix', 'Ozie', 'Liem', 'Collis', 'Lemon', 'Hinton', 'Guss', 'Ronda', 'Siddhartha', 'Owyn', 'Rye', 'Riot', 'Vander', 'Selena', 'Barnie', 'Lewie', 'Jaxiel', 'Kaizen', 'Haloa', 'Dermot', 'Misha', 'Mister', 'Nicholis', 'Kevork', 'Kia', 'Houa', 'Huriel', 'Jesu', 'Dionta', 'Silvino', 'Ivery', 'Iokepa', 'Geo', 'Dex', 'Izaan', 'Jasraj', 'Jakson', 'Niel', 'Avelardo', 'Arjay', 'Aran', 'Alanzo', 'Aidric', 'Lomax', 'Rawn', 'Simmie', 'Tonnie', 'Yuto', 'Mataio', 'Nicodemus', 'Maximilien', 'Raider', 'Ridley', 'Orest', 'Ramzi', 'Kaikea', 'Kamahao', 'Kyrillos', 'Mace', 'Lyrik', 'Lyon', 'Lux', 'Ashkan', 'Jurgen', 'Khachik', 'Maher', 'Jaccob', 'Jagdeep', 'Wash', 'Simpson', 'Macy', 'Haylee', 'Hope', 'Katie', 'Thurmon', 'Savanna', 'Zoey', 'Atiba', 'Dylann', 'Kaylen', 'Helio', 'Geovannie', 'Praneel', 'Kamau', 'Rhamel', 'Knoah', 'Harm', 'Nyle', 'Maveric', 'Neithan', 'Niklaus', 'Lejon', 'Wai', 'Indigo', 'Sayed', 'Abdias', 'Daniil', 'Rashod', 'Wren', 'Chico', 'Jamarri', 'Leiland', 'Ranvir', 'Mavrick', 'Matai', 'Deveon', 'Teyon', 'Ramell', 'Haik', 'Dupree', 'Emon', 'Jermal', 'Bayley', 'Marshell', 'Blouncie', 'Larson', 'Lorenz', 'Jhovanny', 'Jeffie', 'Portia', 'Adron', 'Calogero', 'Mathews', 'Aundra', 'Aariv', 'Keniel', 'Jameis', 'Konstantin', 'Khayden', 'Manford', 'Polo', 'Chanel', 'Brittani', 'Kazuki', 'Kaelen', 'Alice', 'Maya', 'Madeleine', 'Kiana', 'Latasha', 'Felicia', 'Gabriella', 'Bolivar', 'Eileen', 'Alister', 'Aidenn', 'Nina', 'Ellington', 'Alecsander', 'Ja', 'Jarmaine', 'Kyriakos', 'Apostolos', 'Leshawn', 'Shondell', 'Matvey', 'Savino', 'Zakariye', 'Dozier', 'Holland', 'Haruto', 'Hendrik', 'Allah', 'Johnanthony', 'Eliyah', 'Champ', 'Dastan', 'Caliph', 'Manish', 'Agostino', 'Kaio', 'Avyaan', 'Gerasimos', 'Refujio', 'Munir', 'Abdurrahman', 'Selso', 'Epimenio', 'Suhayb', 'Jock', 'Larwence', 'Saadiq', 'Lilburn', 'Selestino', 'Randi', 'Nysir', 'Harlyn', 'Basir', 'Kathy', 'Teddie', 'Luqman', 'Tyhir', 'Mubarak', 'Ridwan', 'Filemon', 'Bergen', 'Danney', 'Eual', 'Melburn', 'Esiquio', 'Cree', 'Dorwin', 'Naasir', 'Ysmael', 'Nirav', 'Chuckie', 'Lashaun', 'Darris', 'Blase', 'Kiley', 'Demarko', 'Taiwan', 'Lamon', 'Corrie', 'Feras', 'Excell', 'Cornelious', 'Martinez', 'Marvel', 'Climmie', 'Martrell', 'Valley', 'Lonie', 'Jovante', 'Lavante', 'Lugene', 'Cordarro', 'Lacey', 'Derrius', 'Tedd', 'Levell', 'Linas', 'Taras', 'Toma', 'Klint', 'Gualberto', 'Feliberto', 'Tarrance', 'Theran', 'Lakeith', 'Mearl', 'Karry', 'Denarius', 'Dontarius', 'Nikia', 'Rakesh', 'Not', 'Darek', 'Gery', 'Ontario', 'Jimi', 'Shamarion', 'Kedarius', 'Jermarcus', 'Amarie', 'Kordae', 'Montie', 'Haleem', 'Inocencio', 'Brockton', 'Yoshiaki', 'Ponciano', 'Silvester', 'Derron', 'Davaughn', 'Urie', 'Juanito', 'Corky', 'Pasqual', 'Marilyn', 'Morley', 'Ayoub', 'Eliasar', 'Mickel', 'Skylor', 'Kewan', 'Teon', 'Rafal', 'Devanta', 'Rosco', 'Tywon', 'Evon', 'Cleven', 'Hardie', 'Tori', 'Trayvond', 'Maaz', 'Masashi', 'Neno', 'Kahari', 'Terri', 'Toru', 'Jalynn', 'Avonte', 'Satchel', 'Tanya', 'Kalab', 'Avetis', 'Miko', 'Kodiak', 'Lang', 'Leondre', 'Purnell', 'Harutyun', 'Gorman', 'Vong', 'Shervin', 'Soloman', 'Sue', 'Amandeep', 'Amritpal', 'Leonides', 'Melecio', 'Mikhael', 'Estaban', 'Arius', 'Calix', 'Gurtaj', 'Dilraj', 'Dillinger', 'Aidden', 'Shivansh', 'Shravan', 'Saud', 'Yarel', 'Riker', 'Yareth', 'Zeppelin', 'Ladarious', 'Lucan', 'Terren', 'Tustin', 'Nicolaas', 'Rakan', 'Johnjoseph', 'Hovanes', 'Navjot', 'Henrique', 'Marsalis', 'Karanveer', 'Jeffren', 'Khairi', 'Haruki', 'Jadden', 'Iliya', 'Hansen', 'Srihan', 'Sartaj', 'Rishik', 'Rishan', 'Octavian', 'Ranbir', 'Padraic', 'Tanush', 'Tlaloc', 'Cadarius', 'Yared', 'Vahan', 'Lakai', 'Fionn', 'Eziah', 'Emillio', 'Hakob', 'Gryphon', 'Harsha', 'Hiroto', 'Nivaan', 'Radin', 'Nicasio', 'Mael', 'Lysander', 'Rees', 'Roemello', 'Bretton', 'Christoph', 'Eliceo', 'Armany', 'Axell', 'Bogdan', 'Luan', 'Aldon', 'Aeson', 'Adhvik', 'Jese', 'Blanca', 'Crisanto', 'Dietrich', 'Tarin', 'Yama', 'Yia', 'Omeed', 'Arbie', 'Shayn', 'Ranferi', 'Ricard', 'Farmer', 'Goble', 'Herald', 'Hager', 'Elva', 'Carlis', 'Evertt', 'Ledford', 'Dequarius', 'Hughie', 'Burgess', 'Kourosh', 'Jaun', 'Nicko', 'Victorhugo', 'Roverto', 'Shadi', 'Sopheak', 'Acie', 'Demar', 'Carolina', 'Vinal', 'Earland', 'Sergey', 'Dayon', 'Kwamaine', 'Kerney', 'Ola', 'Welby', 'Kyon', 'Tyion', 'Kiyon', 'Neng', 'Raquel', 'Nadeem', 'Terran', 'Tin', 'Rudi', 'Murad', 'Murrell', 'Lenville', 'Rondall', 'Han', 'Hovhannes', 'Karapet', 'Hamed', 'Alasdair', 'Agam', 'Areg', 'Ariston', 'Askari', 'Ayansh', 'Byran', 'Dolan', 'Devonn', 'Edith', 'Christoffer', 'Alaa', 'Ashraf', 'Rondle', 'Tavarius', 'Michaeljames', 'Nichols', 'Sonia', 'Ryanchristopher', 'Garo', 'Hien', 'Corin', 'Dillin', 'Jerid', 'Jesusalberto', 'Zeferino', 'Gobel', 'Tykeem', 'Miking', 'Juno', 'Jiraiya', 'Kailash', 'Madix', 'Lucciano', 'Llewyn', 'Leone', 'Knight', 'Dorse', 'Oak', 'Irie', 'Brodi', 'Hridhaan', 'Coda', 'Dekker', 'Evren', 'Eisen', 'Eddison', 'Donatello', 'Happy', 'Devron', 'Suleiman', 'Siddhanth', 'Zorawar', 'Zadkiel', 'Waylan', 'Valor', 'Triton', 'Govanni', 'Angelus', 'Ashvin', 'Matthews', 'Elver', 'Brendin', 'Rhea', 'Jyron', 'Matisse', 'Karanvir', 'Kenshin', 'Saketh', 'Trigo', 'Wil', 'Tyrick', 'Trejon', 'Manvir', 'Sascha', 'Samay', 'Prabhjot', 'Piers', 'Arshia', 'Karo', 'Makani', 'Ludwin', 'Kean', 'Nikoli', 'Garlin', 'Georgio', 'Jyren', 'Ledell', 'Jayceion', 'Wiltz', 'Elgie', 'Jediah', 'Izzac', 'Izeyah', 'Jeyson', 'Hamid', 'Jalani', 'Rohin', 'Shiva', 'Ramces', 'Claudell', 'Daymien', 'Aeron', 'Aadan', 'Alesandro', 'Aleksey', 'Galileo', 'Esvin', 'Indy', 'Graden', 'Gor', 'Vlad', 'Kendrell', 'Saket', 'Asahel', 'Blue', 'Arshdeep', 'Adain', 'Keneth', 'Jacy', 'Dasan', 'Haniel', 'Ethin', 'Ericson', 'Izick', 'Elisandro', 'Coltrane', 'Kemani', 'Josearmando', 'Josealfredo', 'Alias', 'Anurag', 'Carlitos', 'Ceaser', 'Sukhraj', 'Severin', 'Nishanth', 'Mattox', 'Rhiley', 'Dareon', 'Danyel', 'Calan', 'Nithin', 'Donivan', 'Taye', 'Trustin', 'Igor', 'Jayr', 'Kayin', 'Pleas', 'Aadit', 'Balam', 'Jovannie', 'Quintrell', 'Japheth', 'Hero', 'Edu', 'Duvan', 'Anden', 'Anshul', 'Ailton', 'Raybon', 'Rabon', 'Kendry', 'Manases', 'Damyan', 'Braven', 'Dhani', 'Isaia', 'Hovik', 'Sonnie', 'Wolfe', 'Banyan', 'Hiroki', 'Matin', 'Sequoia', 'Acelin', 'Aarya', 'Arsalan', 'Carlosdaniel', 'Jaryd', 'Ariana', 'Kylee', 'Mariah', 'Serenity', 'Kailey', 'Delaney', 'Emilee', 'Isabelle', 'Jayla', 'Drue', 'Emani', 'Juandedios', 'Kedar', 'Baily', 'Daijon', 'Daman', 'Kentaro', 'Damaria', 'Mareco', 'Valmore', 'Theophile', 'Winslow', 'Ugo', 'Cainan', 'Finian', 'Keiji', 'Issack', 'Blanchard', 'Domingos', 'Jarin', 'Giovan', 'Ovila', 'Lovelace', 'Albion', 'Curry', 'Christophr', 'Nolton', 'Unborn', 'Torry', 'Yoshi', 'Perrion', 'Nathyn', 'Syler', 'Sheila', 'Jaedan', 'Cobey', 'Bashar', 'Ehsan', 'Daryll', 'Seann', 'Niels', 'Nazar', 'Frederico', 'Esther', 'Bobie', 'Loyce', 'Heberto', 'Bentura', 'Jafar', 'Keigan', 'Bertil', 'Aloys', 'Janie', 'Paz', 'Damacio', 'Oiva', 'Ingvald', 'Walfred', 'Jakeob', 'Georgie', 'Alcuin', 'Raynold', 'Josey', 'Lasaro', 'Jo', 'Hjalmer', 'Philemon', 'Paula', 'Christophor', 'Estanislao', 'Angelita', 'Anacleto', 'Alfons', 'Lawayne', 'Delrico', 'Clemson', 'Jaleen', 'Jerimy', 'Javaughn', 'Tiofilo', 'Hubbard', 'Abundio', 'Derl', 'Keagen', 'Aymen', 'Freedom', 'Venancio', 'Pauline', 'Gorden', 'Hani', 'Pharrell', 'Jager', 'Nyair', 'Azeem', 'Khyir', 'Jabriel', 'Yandiel', 'Zaahir', 'Laine', 'Xai', 'Vernard', 'Augie', 'Sostenes', 'Darryll', 'Asir', 'Lindon', 'Jearl', 'Peder', 'Rudolpho', 'Clancy', 'Yue', 'Ronnald', 'Onofre', 'Kysir', 'Helmuth', 'Marlowe', 'Derk', 'Demetrick', 'Jefrey', 'Burrell', 'Robie', 'Marlan', 'Thane', 'Jamire', 'Donnel', 'Syaire', 'York', 'Asaad', 'Kyair', 'Devere', 'Wing', 'Yaniv', 'Mathhew', 'Silvia', 'Chia', 'Bren', 'Cavin', 'Aldrich', 'Judy', 'Erron', 'Butler', 'Carole', 'Almon', 'Gilles', 'Christin', 'Renald', 'Sony', 'Chavis', 'Nghia', 'Mercedes', 'Real', 'Josejesus', 'Ryman', 'Kori', 'Ichael', 'Jabier', 'Nguyen', 'Angeldejesus', 'Bobak', 'Brittan', 'Shaunt', 'Karlton', 'Jerin', 'Gerado', 'Raymund', 'Kerolos', 'Rolan', 'Wilbern', 'Sipriano', 'Hermes', 'Robyn', 'Ynes', 'Vernice', 'Pink', 'Jevonte', 'Jerame', 'Tajuan', 'Mingo', 'Jeremia', 'Edmon', 'Castulo', 'Cleofas', 'Arlee', 'Oather', 'Larkin', 'Mcarther', 'Ryann', 'Hong', 'Jamieson', 'Enedino', 'Gerad', 'Lenord', 'Alireza', 'Hollie', 'Gilford', 'Lajuane', 'Izell', 'Trenidad', 'Shelley', 'Ulysees', 'Juana', 'Coalton', 'Remer', 'Raiford', 'Caydon', 'Dalyn', 'Wilhelm', 'Lenzy', 'Bartow', 'Tibor', 'Cebert', 'Elizar', 'Ellen', 'Uchenna', 'Toy', 'Curlee', 'Ralf', 'Giulio', 'Conway', 'Ngai', 'Chaka', 'Engelbert', 'Auburn', 'Socrates', 'Kostas', 'Kamalei', 'Kupono', 'Carrell', 'Lister', 'Mattie', 'Thermon', 'Tina', 'Kennan', 'Adison', 'Dalon', 'Ephram', 'Jaylynn', 'Zabdiel', 'Kaidon', 'Juvencio', 'Havis', 'Dagan', 'Dacorian', 'Donavyn', 'Evyn', 'Issai', 'Zenon', 'Inman', 'Hason', 'Lehman', 'Afton', 'Clayborn', 'Abrahm', 'Neill', 'Conard', 'Mutsuo', 'Seikichi', 'Wetzel', 'Masaji', 'Masanobu', 'Shigeto', 'Edgel', 'Goro', 'Lovett', 'Seiko', 'Sakae', 'Roshawn', 'Antjuan', 'Erby', 'Jobe', 'Ladarian', 'Cyler', 'Edel', 'Hartsel', 'Jill', 'Jami', 'Rabun', 'Fulton', 'Dreddy', 'Corrado', 'Harald', 'Alterick', 'Hala', 'Powell', 'Lesly', 'Kalon', 'Theodoros', 'Etan', 'Trev', 'Javiel', 'Jusiah', 'Joncarlos', 'Jhamari', 'Rasheim', 'Raysean', 'Kreg', 'Rahmell', 'Kerby', 'Eliga', 'Clemon', 'Aneudy', 'Keiran', 'Kensley', 'Ludie', 'Jorell', 'Can', 'Demondre', 'Cierra', 'Maurizio', 'Tacuma', 'Ryzen', 'Jabar', 'Tara', 'Reign', 'Jashon', 'Lasean', 'Artavius', 'Akbar', 'Un', 'Kaikane', 'Tanisha', 'Elena', 'Bridget', 'Asia', 'Latisha', 'Rachael', 'Latoya', 'Elisabeth', 'Janelle', 'Ikea', 'Kobey', 'Kamaehu', 'Keona', 'Calixto', 'Theotis', 'Worthy', 'Galo', 'Holly', 'Sevyn', 'Petr', 'Cerrone', 'Tedrick', 'Kymari', 'Gerrard', 'Eldo', 'Alcides', 'Derrian', 'Eulas', 'Leodis', 'Akai', 'Dalonte', 'Pantelis', 'Sheron', 'Tommaso', 'Treg', 'Shirl', 'Abrian', 'Brewer', 'Yamir', 'Zadok', 'Holdyn', 'Jayanthony', 'Eh', 'Dayson', 'Khaden', 'Quintez', 'Rontavious', 'Markese', 'Quintavis', 'Daveion', 'Tonny', 'Jaevon', 'Ahkeem', 'Hy', 'Adams', 'Marian', 'Huner', 'Jarmarcus', 'Treyon', 'Tullio', 'Oreste', 'Oleg', 'Xzavien', 'Atzel', 'Brenan', 'Abriel', 'Braylyn', 'Chidera', 'Lebaron', 'Jameir', 'Kameryn', 'Shade', 'Koltin', 'Cordarrius', 'Amelio', 'Demarquez', 'Tarus', 'Calob', 'Dmarco', 'Creek', 'Amen', 'Cylas', 'Davyn', 'Haygen', 'Godric', 'Garn', 'Renardo', 'Locke', 'Lexington', 'Mazin', 'Othniel', 'Kruze', 'Jaxston', 'Jaxten', 'Jeziah', 'Jettson', 'Zebastian', 'Sarim', 'Jawuan', 'Tremain', 'Hassell', 'Quartez', 'Hawkins', 'Riggs', 'Rebel', 'Nael', 'Kaycen', 'Kamsiyochukwu', 'Kagen', 'Jrue', 'Jaydeen', 'Azazel', 'Ayson', 'Cheston', 'Aarian', 'Chavez', 'Void', 'Zacariah', 'Keena', 'Antwuan', 'Labarron', 'Quamere', 'Mikell', 'Prestyn', 'Savian', 'Dayden', 'Jaivion', 'Geremiah', 'Aidon', 'Bralyn', 'Gianncarlo', 'Jarquavious', 'Muriel', 'Akshar', 'Kadir', 'Najir', 'Neko', 'Jahaad', 'Jdyn', 'Kashon', 'Jaquil', 'Wah', 'Delmos', 'Masuo', 'Nobuichi', 'Kiichi', 'Jerone', 'Tatsumi', 'Damarian', 'Elier', 'Lansing', 'Heinrich', 'Hasson', 'Larrie', 'Phyllis', 'Jamoni', 'Zylen', 'Demoni', 'Harrel', 'Levie', 'Zaryan', 'Orazio', 'Seymore', 'Florence', 'Kolter', 'Kemper', 'Daelyn', 'Haddon', 'Syon', 'Sair', 'Filadelfio', 'Marquavion', 'Breylon', 'Filimon', 'Abie', 'Cortavious', 'Achille', 'Dontrez', 'Matty', 'Darshawn', 'Overton', 'Bashir', 'Kavan', 'Caidan', 'Braelen', 'Param', 'Kani', 'Percival', 'Hartley', 'Erminio', 'Candler', 'Ulyssee', 'Damontae', 'Ellijah', 'Cesare', 'Eleanor', 'Eustace', 'Joachim', 'Tarique', 'Altin', 'Tyleek', 'Posey', 'Awais', 'Daivon', 'Zi', 'Hammad', 'Meshulem', 'Nickie', 'Brehon', 'Dacoda', 'Kwamane', 'Rafuel', 'Mikai', 'Hensel', 'Thelbert', 'Valerio', 'Trevonte', 'Koran', 'Cheick', 'Shahzaib', 'Tahsin', 'Derry', 'Mustapha', 'Chucky', 'Osborne', 'Daquarius', 'Marque', 'Raquon', 'Cherokee', 'Keyshaun', 'Mohamadou', 'Keishawn', 'Jahmeek', 'Junaid', 'Amil', 'Mckoy', 'Zackry', 'Nyheim', 'Nkosi', 'Kweli', 'Tydarius', 'Umer', 'Demorris', 'Demarquis', 'Hersch', 'Luzer', 'Li', 'Aly', 'Quade', 'Quamaine', 'Markee', 'Jhoan', 'Mert', 'Supreme', 'Tyheem', 'Gomer', 'Taseen', 'Yousaf', 'Yonason', 'Gifford', 'Ashar', 'Sender', 'Salah', 'Saifan', 'Raihan', 'Nizar', 'Abrahim', 'Kunga', 'Javis', 'Evens', 'Bayard', 'Kaysan', 'Padraig', 'Ney', 'Ahmet', 'Misty', 'Ayyan', 'Saint', 'Fern', 'Wasil', 'Nolie', 'Zarif', 'Younis', 'Eesa', 'Ketrick', 'Veryl', 'Refael', 'Motty', 'Naftuly', 'Waseem', 'Yusif', 'Brigg', 'Zaheer', 'Shiya', 'Karma', 'Meilich', 'Mihran', 'Javares', 'Efe', 'Abubakr', 'Avrumi', 'Nayshawn', 'Mostafa', 'Hinckley', 'Jahmeir', 'Fintan', 'Sollie', 'Amiel', 'Abu', 'Yaro', 'Josha', 'Jermane', 'Bertis', 'Hernando', 'Gerrod', 'Haim', 'Frandy', 'Andrews', 'Dayle', 'Fleming', 'Volvi', 'Savior', 'Shuford', 'Plummer', 'Ralston', 'Dayvion', 'Muhamed', 'Naheem', 'Nataniel', 'Kaeleb', 'Billyjoe', 'Able', 'Fareed', 'Purcell', 'Trayson', 'Mackay', 'Moyer', 'Haynes', 'Domnick', 'Burnie', 'Gleen', 'Leavy', 'Lennart', 'Breken', 'Arlind', 'Clarnce', 'Nissen', 'Josiel', 'Alvester', 'Jaquay', 'Nickolaos', 'Ruddy', 'Berkeley', 'Flamur', 'Sherif', 'Shateek', 'Ayodele', 'Davone', 'Meshach', 'Kinston', 'Algernon', 'Dvonte', 'Jawara', 'Zamar', 'Dayron', 'Jaequan', 'Tyrelle', 'Jazion', 'Tamel', 'Torris', 'Marguis', 'Yuniel', 'Saige', 'Gottlieb', 'Cori', 'Dre', 'Yordan', 'Shaquell', 'Jonel', 'Kashaun', 'Arjenis', 'Tashan', 'Fitzroy', 'Francisc', 'Kwaku', 'Jakyri', 'Trayton', 'Jarrick', 'Reginaldo', 'Facundo', 'Elma', 'Dardan', 'Dreshawn', 'Demontray', 'Chaddrick', 'Roper', 'Taariq', 'Ausitn', 'Jachai', 'Duval', 'Braun', 'Taylan', 'Dionis', 'Samy', 'Armistead', 'Alize', 'Tayshon', 'Ainsley', 'Kaheem', 'Jaire', 'Kyshawn', 'Nahshon', 'Aaliyah', 'Shanard', 'Azion', 'Alana', 'Alexia', 'Breyon', 'Trigg', 'Wylder', 'Zaydin', 'Ziaire', 'Zixuan', 'Yanis', 'Zair', 'Zaven', 'Alanmichael', 'Viyan', 'Vivan', 'Klay', 'Erico', 'Tycho', 'Waris', 'Winter', 'Aliyah', 'Kamilo', 'Kei', 'Glendell', 'Lilly', 'Lauryn', 'Jovian', 'Shayla', 'Tessa', 'Jupiter', 'Aaric', 'Aadhav', 'Jetson', 'Abir', 'Adhrit', 'Alexandr', 'Brooklynn', 'Callie', 'Ashlee', 'Ashlyn', 'Haleigh', 'Heaven', 'Jahkari', 'Izaiyah', 'Troyce', 'Bige', 'Hayze', 'Neldon', 'Marven', 'Beckem', 'Dvante', 'Navarro', 'Neiko', 'Noeh', 'Jen', 'Torrian', 'Helios', 'Macallan', 'Lio', 'Wilkens', 'Merrik', 'Ameal', 'Mujtaba', 'Iktan', 'Kavious', 'Monterrio', 'Hughey', 'Calin', 'Cali', 'Jamaar', 'Kenith', 'Rihaan', 'Deaglan', 'Kelso', 'Lavaris', 'Ashot', 'Marshun', 'Rainer', 'Rivan', 'Talal', 'Taiyo', 'Minor', 'Yvon', 'Stonewall', 'Merril', 'Okie', 'Trevino', 'Imari', 'Ithan', 'Izmael', 'Jayan', 'Harut', 'Harshaan', 'Kainen', 'Kalyan', 'Kanishk', 'Kotaro', 'Josyah', 'Vola', 'Omarie', 'Dmari', 'Mannix', 'Elad', 'Shun', 'Andriy', 'Angelino', 'Ary', 'Axcel', 'Becker', 'Daxten', 'Daemian', 'Cypress', 'Jakhai', 'Warnie', 'Maikel', 'Davinci', 'Calloway', 'Vernal', 'Tyrome', 'Mont', 'Ovie', 'Hester', 'Arvis', 'Corbit', 'Tarvaris', 'Audra', 'Cloud', 'Taveon', 'Balian', 'Bodi', 'Brodee', 'Kainan', 'Dezi', 'Devesh', 'Emad', 'Esa', 'Massie', 'Moir', 'Markavious', 'Veachel', 'Dalan', 'Carles', 'Antawn', 'Jermichael', 'Talin', 'Sy', 'Murrel', 'Elster', 'Kru', 'Okley', 'Maverik', 'Diangelo', 'Burns', 'Jamaris', 'Jayshaun', 'Dantae', 'Rahil', 'Renny', 'Rohith', 'Strummer', 'Birchel', 'Astor', 'Nolyn', 'Neeko', 'Reyan', 'Kailan', 'Jaideep', 'Manveer', 'Maeson', 'Khris', 'Lancelot', 'Shaunak', 'Shubham', 'Siaosi', 'Ruslan', 'Sajan', 'Renwick', 'Yann', 'Vitali', 'Zealand', 'Vyom', 'Xabi', 'Yazid', 'Terrelle', 'Oaks', 'Kache', 'Arjuna', 'Cephas', 'Holmes', 'Rockie', 'Elray', 'Doc', 'Mell', 'Tyresse', 'Maguire', 'Sheddrick', 'Loney', 'Helaman', 'Andrus', 'Asberry', 'Love', 'Clebert', 'Cashius', 'Egypt', 'Devansh', 'Elige', 'Tobe', 'Taten', 'Arias', 'Leandrew', 'Dekota', 'Varian', 'Lehi', 'Colbert', 'Ignace', 'Suhas', 'Syris', 'Ahan', 'Aithan', 'Aiven', 'Akshath', 'Hamp', 'Kato', 'Leeon', 'Reubin', 'Lukah', 'Wilmon', 'Tait', 'Theophilus', 'Sion', 'Maysen', 'Nicoli', 'Nason', 'Mykell', 'Montae', 'Laszlo', 'Lestat', 'Prithvi', 'Maxi', 'Mekhai', 'Hammond', 'Atiksh', 'Aldean', 'Aldine', 'Jedi', 'Almond', 'Edahi', 'Hisham', 'Clide', 'Cosby', 'Hayato', 'Harnoor', 'Gurjot', 'Ethridge', 'Dublin', 'Daimian', 'Derreon', 'Hansell', 'Mae', 'Semisi', 'Ulysess', 'Render', 'Eschol', 'Rodell', 'Atzin', 'Alik', 'Amiri', 'Keyvon', 'Noland', 'Terius', 'Isauro', 'Harshith', 'Pledger', 'Tilman', 'Lennis', 'Jovin', 'Jaymin', 'Jaydee', 'Asbury', 'Lovie', 'Mcclinton', 'Grayton', 'Cardin', 'Jacey', 'Gurveer', 'Ethanmatthew', 'Aaronjames', 'Ascher', 'Aarion', 'Windle', 'Jahan', 'Jayen', 'Jatin', 'Jedrek', 'Anthonyjr', 'Dabney', 'Galvin', 'Ilijah', 'Gohan', 'Quaid', 'Teancum', 'Chael', 'Chetan', 'Cylis', 'Manas', 'Logun', 'Karston', 'Mickeal', 'Iskander', 'Isaah', 'Aryaman', 'Juvens', 'Joncarlo', 'Gurkirat', 'Laymon', 'Salesi', 'Rion', 'Tao', 'Tadhg', 'Stephens', 'Terryl', 'Jacquan', 'Zubin', 'Yul', 'Yadriel', 'Dolph', 'Keiden', 'Koston', 'Demetre', 'Kameren', 'Kaedin', 'Zebedee', 'Tyrie', 'Truth', 'Lanorris', 'Tilden', 'Tidus', 'Thelonious', 'Tennessee', 'Sirius', 'Pervis', 'Saatvik', 'Adley', 'Amarian', 'Numa', 'Bronco', 'Zian', 'Zephan', 'Yaziel', 'Ajit', 'Arick', 'Ollin', 'Kort', 'Tayvin', 'Grason', 'Leonid', 'Nihaal', 'Koah', 'Southern', 'Kavish', 'Joeziah', 'Javi', 'Kaiyan', 'Kyro', 'Ziad', 'Maxen', 'Xion', 'Mica', 'Mansour', 'Matteus', 'Renner', 'Jonan', 'Shilo', 'Josedaniel', 'Kaj', 'Robel', 'Krithik', 'Lautaro', 'Evann', 'Carden', 'Nathaneal', 'Wirt', 'Kile', 'Kevonte', 'Jazz', 'Vardan', 'Tanav', 'Tamim', 'Ojani', 'Raydel', 'Rigel', 'Sheamus', 'Cameryn', 'Jedd', 'Dalessandro', 'Daejon', 'Zacheriah', 'Jt', 'Valeria', 'Treshon', 'Martynas', 'Markeese', 'Ladislado', 'Fidensio', 'Cincere', 'Amonte', 'Erion', 'Emin', 'Tayten', 'Zachory', 'Ysidoro', 'Treshaun', 'Franciszek', 'Adit', 'Neftaly', 'Kaylan', 'Dezmon', 'Joby', 'Terrick', 'Irma', 'Isiaha', 'Micha', 'Sylvia', 'Dejan', 'Kippy', 'Tyreece', 'Corie', 'Martese', 'Senovio', 'Lexus', 'Freddrick', 'Jemarcus', 'Kuba', 'Corion', 'Andrian', 'Romualdo', 'Lyndal', 'Kalem', 'Laderrick', 'Jobin', 'Chaise', 'Naren', 'Reynol', 'Ohm', 'Trashawn', 'Danyell', 'Diron', 'Kameran', 'Dunte', 'Ikechukwu', 'Trendon', 'Visente', 'Valeriano', 'Dillian', 'Chantz', 'Bacilio', 'Crescencio', 'Policarpio', 'Janice', 'Kem', 'Rutilio', 'Jaqualin', 'Kendon', 'Keevin', 'Adelaido', 'Coltan', 'Theodoro', 'Devondre', 'Dekendrick', 'Deionte', 'Taz', 'Jimmey', 'Cristan', 'Chancelor', 'Ascension', 'Kemon', 'Makari', 'Cordel', 'Colbey', 'Ambrocio', 'Marselino', 'Dewain', 'Graciano', 'Gumecindo', 'Lorenso', 'Quaylon', 'Halbert', 'Celedonio', 'Terrin', 'Zuri', 'Sherod', 'Ermal', 'Elisa', 'Larnell', 'Tully', 'Wenceslaus', 'Lashun', 'Duan', 'Correy', 'Wilburt', 'Antwoin', 'Lynell', 'Ramond', 'Victorio', 'Antion', 'Dragan', 'Priest', 'Marice', 'Laroy', 'Ninos', 'Byrl', 'Ebert', 'Keita', 'Dimitris', 'Zoran', 'Khaalis', 'Rollo', 'Alwin', 'Loraine', 'Jerard', 'Lyndle', 'Quirino', 'Ramey', 'Jarian', 'Marky', 'Adlai', 'Shamon', 'Treyshawn', 'Shaft', 'Gumercindo', 'Rita', 'Derryl', 'Chancy', 'Kacy', 'Jonothan', 'Ruston', 'Ranulfo', 'Talik', 'Johntae', 'Kendale', 'Diandre', 'Reginold', 'Tyris', 'Davell', 'Ladell', 'Raymone', 'Mariusz', 'Edvardo', 'Joh', 'Lavontae', 'Markie', 'Laquinton', 'Alexandar', 'Divante', 'Jabin', 'Shawon', 'Jawann', 'Ladd', 'Khali', 'Gilmore', 'Oliverio', 'Thuan', 'Daiel', 'Kierre', 'Javar', 'Stevon', 'Derius', 'Chadley', 'Manual', 'Johnaton', 'Lc', 'Erek', 'Jakaden', 'Jden', 'Drayke', 'Dawsen', 'Jadarion', 'Shriyans', 'Raylin', 'Kaydan', 'Lynden', 'Duard', 'Elo', 'Amarius', 'Cleburne', 'Dailen', 'Brance', 'Braycen', 'Daiden', 'Cruzito', 'Caedyn', 'Aizik', 'Abyan', 'Felisiano', 'Taevion', 'Zaeden', 'Zadrian', 'Fredie', 'Burnis', 'Cleave', 'Ki', 'Quandarius', 'Quavion', 'Makell', 'Myrl', 'Tae', 'Melik', 'Samarion', 'Branton', 'Vollie', 'Reynolds', 'Draylon', 'Keivon', 'Kevontae', 'Deundre', 'Zaydrian', 'Zaydan', 'Jotham', 'Janthony', 'Sahid', 'Keilon', 'Lain', 'Kenechukwu', 'Kanaan', 'Kamdon', 'Ahmod', 'Dong', 'Nnamdi', 'Jontavius', 'Kelijah', 'Searcy', 'Wheeler', 'Francisca', 'Burrel', 'Zyquavious', 'Kortez', 'Tres', 'Tranquilino', 'Guinn', 'Hiawatha', 'Jasiyah', 'Arlos', 'Celestine', 'Deadrian', 'Chinedu', 'Cane', 'Caedon', 'Gabryel', 'Garon', 'Solon', 'Udell', 'Medardo', 'Chon', 'Zakk', 'Trip', 'Somtochukwu', 'Shooter', 'Frutoso', 'Laurencio', 'Izayiah', 'Franko', 'Izzak', 'Braelan', 'Dryden', 'Wilborn', 'Newt', 'Petronilo', 'Nathanel', 'Jatavius', 'Locadio', 'Tyquez', 'Laiden', 'Allister', 'Javarion', 'Demarrio', 'Shenouda', 'Rodriques', 'Jenard', 'Azarias', 'Axxel', 'Ariyan', 'Pate', 'Raidyn', 'Saylor', 'Kreed', 'Kayce', 'Bray', 'Zyren', 'Zayvien', 'Yeiden', 'Kinta', 'Trampus', 'Lofton', 'Zayvian', 'Zaydon', 'Zaidan', 'Weslee', 'Robben', 'Rook', 'Roston', 'Trigger', 'Steel', 'Rustyn', 'Jaeceon', 'Hutton', 'Hatcher', 'Kartier', 'Kallan', 'Daxtyn', 'Corvin', 'Deklyn', 'Kaveon', 'Leviathan', 'Leelan', 'Lael', 'Prynce', 'Korban', 'Khyren', 'Omran', 'Oluwademilade', 'Orenthal', 'Dequavius', 'Quinterrius', 'Quantavis', 'Astin', 'Asaiah', 'Dace', 'Brylee', 'Kenyan', 'Jeovani', 'Kolson', 'Dreyden', 'Jujuan', 'Gregoria', 'Abdon', 'Javious', 'Latravious', 'Nanya', 'Kaleel', 'Elsie', 'Iris', 'Javarus', 'Hunner', 'Ebubechukwu', 'Ashaz', 'Huntley', 'Montravious', 'Argelio', 'Amaar', 'Abdulmalik', 'Deronte', 'Ramirez', 'Travius', 'Xavius', 'Rashamel', 'Martice', 'Oshay', 'Jamerson', 'Derico', 'Benino', 'Otilio', 'Palani', 'Trystin', 'Domonick', 'Jayron', 'Josephine', 'Dora', 'Larence', 'Feliz', 'Tereso', 'Natalio', 'Olga', 'Bralen', 'Temple', 'Keala', 'Anita', 'Eathen', 'Lamond', 'Jakobie', 'Johnthan', 'Elnathan', 'Edris', 'Alcario', 'Cornie', 'Ival', 'Pantaleon', 'Deavion', 'Daevion', 'Dorance', 'Jailon', 'Ragene', 'Kaena', 'Kaimipono', 'Keaka', 'Kiai', 'Babygirl', 'Aukai', 'Kaitlin', 'Kaikoa', 'Jedadiah', 'Pono', 'Layth', 'Kolbie', 'Naaman', 'Pacey', 'Jearld', 'Corinthian', 'Bryceson', 'Kayzen', 'Mana', 'Janee', 'Janae', 'Kelli', 'Tamara', 'Nora', 'Sophie', 'Rashida', 'Princess', 'Lakeisha', 'Nadia', 'Monet', 'Meaghan', 'Marquita', 'Chiquita', 'Charlotte', 'Chantelle', 'Cassandra', 'Cara', 'Brandi', 'Asha', 'Tatiana', 'Haaheo', 'Valerie', 'Valencia', 'Shoso', 'Yoshimi', 'Bristol', 'Mikio', 'Nobuyuki', 'Tomio', 'Kazumi', 'Kunio', 'Yoshiharu', 'Balentin', 'Paublo', 'Nobuyoshi', 'Toshiaki', 'Matsuo', 'Hachiro', 'Tokio', 'Eichi', 'Manabu', 'Masanori', 'Yoshiyuki', 'Tokuo', 'Eustolio', 'Petra', 'Fukuichi', 'Haruyoshi', 'Eastin', 'Keygan', 'Kelin', 'Lalo', 'Ramona', 'Felis', 'Rodgers', 'Deigo', 'Guerin', 'Arrington', 'Bradin', 'Aurora', 'Calistro', 'Ervie', 'Velma', 'Whit', 'Adarian', 'Jakevion', 'Jadrien', 'Calub', 'Kaegan', 'Jamorian', 'Milam', 'Usiel', 'Drayven', 'Orange', 'Daelon', 'Jatavion', 'Vastine', 'Preciliano', 'Floyce', 'Billye', 'Burney', 'Consepcion', 'Dason', 'Osie', 'Tashaun', 'Sajid', 'Umair', 'Tymarion', 'Jakorian', 'Ginobili', 'Areeb', 'Jonovan', 'Jonavan', 'Jaqualyn', 'Billey', 'Luisgustavo', 'Lamario', 'Telford', 'Lekendrick', 'Brinton', 'Lebarron', 'Marrio', 'Tyshun', 'Kendarrius', 'Zylan', 'Jarrius', 'Kadarrius', 'Marvis', 'Orie', 'Kimber', 'Jakevious', 'Shawndale', 'Jakel', 'Jaquarious', 'Deakon', 'Brevan', 'Rochester', 'Lemmie', 'Athony', 'Rosie', 'Lillie', 'Mozell', 'Aubert', 'Kimble', 'Jaymon', 'Gaza', 'Lysle', 'Wasco', 'Zigmond', 'Addie', 'Erastus', 'Claudius', 'Audley', 'Thadeus', 'Exum', 'Caldwell', 'Emmert', 'Teagen', 'Royden', 'Mykale', 'Lindberg', 'Elmon', 'Norfleet', 'Radford', 'Romulus', 'Thedore', 'Cor', 'Ledarrius', 'Cyncere', 'Hurbert', 'Pearly', 'Jobie', 'Garvey', 'Meade', 'Casmir', 'Bertie', 'Belvin', 'Lynford', 'Verdun', 'Junie', 'Dover', 'Harlee', 'Romolo', 'Sirr', 'Bradey', 'Kingsten', 'Manuelito', 'Leno', 'Primo', 'Antonie', 'Jane', 'Halsey', 'Mujahid', 'Quron', 'Cleophas', 'Amedio', 'Gildo', 'Norvel', 'Livingston', 'Norvell', 'Fard', 'Khaleef', 'Dorr', 'Chaquille', 'Giro', 'Verdell', 'Concetto', 'Taevon', 'Amato', 'Hasaan', 'Burr', 'Payten', 'Baden', 'Abdirizak', 'Emran', 'Abdurahman', 'Greig', 'Sabree', 'Shymir', 'Haakon', 'Aasim', 'Abdifatah', 'Cheemeng', 'Yuepheng', 'Hamsa', 'Abdalla', 'Samatar', 'Joshawa', 'Leeman', 'Hershal', 'Fayette', 'Patty', 'Thom', 'Yaaseen', 'Alven', 'Hillis', 'Bard', 'Nymir', 'Imir', 'Mohamud', 'Muaad', 'Mickael', 'Hermann', 'Varner', 'Norm', 'Suheyb', 'Eivin', 'Jamy', 'Taro', 'Caydin', 'Masaharu', 'Cassie', 'Virgie', 'Oddie', 'Pamela', 'Emmette', 'Rayshon', 'Vardaman', 'Ruble', 'Clance', 'Rigdon', 'Osborn', 'Gina', 'Rozell', 'Marcy', 'Farron', 'Bartolomeo', 'Dierre', 'Demetrus', 'Yoneo', 'Blayke', 'Decarlo', 'Sebert', 'Quayon', 'Nihar', 'Segundo', 'Ritik', 'Aljaquan', 'Lealon', 'Opie', 'Darshan', 'Trapper', 'Ladarrion', 'Thaine', 'Abanoub', 'Filipe', 'Oley', 'Zaylan', 'Rushi', 'Watie', 'Cleatus', 'Harshil', 'Alferd', 'Carthel', 'Ogden', 'Carmin', 'Hiren', 'Harl', 'Drexel', 'Shadeed', 'Malvern', 'Argus', 'Sharief', 'Almalik', 'Audy', 'Terral', 'Nuno', 'Verna', 'Alim', 'Sherron', 'Terek', 'Clardie', 'Shadee', 'Clendon', 'Johnpatrick', 'Chritopher', 'Taheem', 'Jahid', 'Waitman', 'Jabraylen', 'Quasim', 'Azim', 'Eulis', 'Wladyslaw', 'Delmus', 'Minter', 'Kharter', 'Zavhary', 'Taji', 'Hoskie', 'Colsen', 'Orlanda', 'Shawntez', 'Obryan', 'Emanual', 'Silviano', 'Chrishawn', 'Rayon', 'Martino', 'Fairley', 'Lenward', 'Autzen', 'Selby', 'Odus', 'Redell', 'Seavy', 'Dennison', 'Jamiere', 'Rondy', 'Donold', 'Lindwood', 'Laudie', 'Obert', 'Jahki', 'Braidon', 'Zalen', 'Zymier', 'Jahzir', 'Nahsir', 'Vikrant', 'Shourya', 'Eliyohu', 'Tyheim', 'Keyshon', 'Kaydence', 'Ekin', 'Tresean', 'Quendarius', 'Shammond', 'Malakye', 'Findlay', 'Ashrith', 'Elfego', 'Jalik', 'Nyzir', 'Boe', 'Abdikadir', 'Jameek', 'Gyasi', 'Khyri', 'Mohit', 'Shayquan', 'Sivan', 'Steffon', 'Lord', 'Leor', 'Kujtim', 'Haaris', 'Rafid', 'Nechemia', 'Nyles', 'Khalik', 'Tysheen', 'Shaheim', 'Starling', 'Taiquan', 'Takeem', 'Teshawn', 'Tuvia', 'Shu', 'Schyler', 'Indalecio', 'Edouard', 'Alverto', 'Alexio', 'Aurash', 'Fabiola', 'Firas', 'Fredis', 'Guthrie', 'Babacar', 'Ayinde', 'Khallid', 'Shadrach', 'Rikki', 'Prescott', 'Saam', 'Perla', 'Michell', 'Markis', 'Nou', 'Sher', 'Tor', 'Kyre', 'Shykeem', 'Jilberto', 'Klye', 'Jeramey', 'Herber', 'Kue', 'Mainor', 'Macaulay', 'Jequan', 'Bond', 'Hykeem', 'Husam', 'Catalina', 'Danh', 'Aaronmichael', 'Anthonyjames', 'Jerrid', 'Jobani', 'Kenia', 'Oshae', 'Michaelvincent', 'Mong', 'Dawit', 'Dabid', 'Daisuke', 'Geddy', 'Ehab', 'Jarmal', 'Caelin', 'Barak', 'Gurtej', 'Geordan', 'Jacobb', 'Estefani', 'Esaul', 'Karandeep', 'Jevaughn', 'Kassim', 'Kion', 'Vikas', 'Infinite', 'Yekusiel', 'Zohaib', 'Yaw', 'Sakib', 'Shah', 'Zeshan', 'Hassaan', 'Masai', 'Mattheus', 'Jeniel', 'Martine', 'Maalik', 'Jeanclaude', 'Stirling', 'Trayveon', 'Paymon', 'Ajai', 'Habib', 'Enis', 'Grafton', 'Nissan', 'Oshane', 'Mirza', 'Malike', 'Yianni', 'Zachari', 'Tadeh', 'Patrik', 'Richy', 'Riki', 'Yao', 'Yadira', 'Nylan', 'Lennard', 'Roldan', 'Admir', 'Oniel', 'Addam', 'Itzel', 'Ivann', 'Shabab', 'Honorio', 'Hrag', 'Harutun', 'Keano', 'Kayvan', 'Takahiro', 'Juanfrancisco', 'Eri', 'Ermon', 'Ramzy', 'Selma', 'Kasean', 'Obrian', 'Jonatha', 'Jonahtan', 'Davione', 'Chandara', 'Chantha', 'Lo', 'Loreto', 'Derell', 'Ganesh', 'Janathan', 'Alejandr', 'Rodolphe', 'Isaul', 'Bejan', 'Doron', 'Yvette', 'Erlon', 'Erland', 'Yuji', 'Milagro', 'Ndrew', 'Pedram', 'Thinh', 'Vandy', 'Vi', 'Ryanjoseph', 'Richar', 'Hosey', 'Adeel', 'Nicholos', 'Michaeljohn', 'Philipe', 'Bravlio', 'Anup', 'Davide', 'Daquann', 'Lequan', 'Raymel', 'Rahsean', 'Woodley', 'Jarmel', 'Wiliam', 'Joseh', 'Somnang', 'Colvin', 'Jenkins', 'Jaquawn', 'Javonne', 'Javed', 'Joelle', 'Lameek', 'Kishawn', 'Krikor', 'Christipher', 'Ghassan', 'Essa', 'Hovig', 'Nayquan', 'Shawndell', 'Rawle', 'Marwin', 'Record', 'Dmario', 'Crist', 'La', 'Access', 'Shaquel', 'Tyrrell', 'Tiquan', 'Shavon', 'Shatique', 'Yochanon', 'Keontay', 'Shaquelle', 'Kshawn', 'Armend', 'Eliazer', 'Diony', 'Saddam', 'Takayuki', 'Sukhdeep', 'Shahan', 'Valon', 'Orel', 'Tremell', 'Chayim', 'Jaquille', 'Ayodeji', 'Bekim', 'Besnik', 'Oluwanifemi', 'Stalin', 'Sadam', 'Aniel', 'Laureat', 'Dyrell', 'Jhony', 'Barkim', 'Ludger', 'Mahendra', 'Kadeen', 'Jovaughn', 'Khadeem', 'Ardian', 'Ravindra', 'Harpal', 'Jatinder', 'Erving', 'Gerrell', 'Sylvestre', 'Luismanuel', 'Pharell', 'Jahziah', 'Salif', 'Jakyrin', 'Idrissa', 'Daoud', 'Swan', 'Pryor', 'Polk', 'Rameses', 'Prateek', 'Lelon', 'Ebrima', 'Ezechiel', 'Tevan', 'Sohail', 'Luiseduardo', 'Clearance', 'Brayn', 'Alexsis', 'Edwar', 'Johnmark', 'Hikaru', 'Edon', 'Chezkel', 'Dinari', 'Ahmadou', 'Jadien', 'Ismaeel', 'Heshy', 'Jhan', 'Dejohn', 'Ajdin', 'Damier', 'Cashmere', 'Amitai', 'Alp', 'Avrahom', 'Hooper', 'Daichi', 'Dariush', 'Bryen', 'Oseas', 'Moyses', 'Alderic', 'Dickson', 'Joon', 'Justinkyle', 'Jassiah', 'Jaidin', 'Lexie', 'Mieczyslaw', 'Joffre', 'Augustino', 'Adelino', 'Tadeusz', 'Humphrey', 'Lonas', 'Avry', 'Tylin', 'Dixie', 'Goldman', 'Yissachar', 'Toure', 'Yafet', 'Siraj', 'Nasiah', 'Maor', 'Roniel', 'Kerim', 'Danieljr', 'Django', 'Lion', 'Baruc', 'Cervando', 'Akul', 'Abdi', 'Ameya', 'Arhan', 'Aliou', 'Arcangel', 'Avrumy', 'Deandrea', 'Dontreal', 'Yossef', 'Walden', 'Tameem', 'Kenderick', 'Yassine', 'Zeyad', 'Riyad', 'Kashmere', 'Tevis', 'Malichi', 'Malakhai', 'Yulian', 'Clearnce', 'Esco', 'Fabrizzio', 'Gianpaolo', 'Jaskirat', 'Termaine', 'Daouda', 'Abba', 'Aaban', 'Chanoch', 'Raynell', 'Ihsan', 'Djibril', 'Cassiel', 'Ishaq', 'Azlan', 'Behruz', 'Amirjon', 'Anisjon', 'Asadbek', 'Dhilan', 'Dream', 'Daviel', 'Mosha', 'Rayane', 'Shabsi', 'Olie', 'Vinicio', 'Yuda', 'Shohjahon', 'Kylematthew', 'Kien', 'Matthewjames', 'Giorgi', 'Konstantine', 'Jibreel', 'Jadriel', 'Lliam', 'Travonte', 'Taiki', 'Rendell', 'Wyland', 'Arafat', 'Tajon', 'Loic', 'Shaw', 'Sukhman', 'Randiel', 'Stefanos', 'Lukus', 'Majesty', 'Massimiliano', 'Burach', 'Jansel', 'Ismaila', 'Henoch', 'Daelin', 'Giordano', 'Huber', 'Rontrell', 'Simran', 'Majid', 'Rayjon', 'Pharoah', 'Lamine', 'Hanoch', 'Chidi', 'Jahmani', 'Javid', 'Kamani', 'Endrit', 'Endy', 'Nasean', 'Danyael', 'Cinque', 'Akaash', 'Zeeshan', 'Amel', 'Adib', 'Aboubakar', 'Artan', 'Burak', 'Serigne', 'Samin', 'Hovsep', 'Jomari', 'Cesareo', 'Dajohn', 'Charbel', 'Bakary', 'Camerin', 'Jaquel', 'Pape', 'Jahrel', 'Jahrell', 'Khadim', 'Jeison', 'Yobany', 'Zaul', 'Taryn', 'Abou', 'Besim', 'Abdur', 'Ebrahim', 'Albi', 'Haadi', 'Saba', 'Wen', 'Felipedejesus', 'Dragon', 'Jamiel', 'Alecxis', 'Ashkon', 'Tejon', 'Meelad', 'Renan', 'Brailyn', 'Harel', 'Abdou', 'Amier', 'Jonathanjoseph', 'Juanalberto', 'Larenz', 'Nerses', 'Emmanuelle', 'Jasmeet', 'Jahred', 'Elsworth', 'Nyshawn', 'Alexes', 'Cranford', 'Trenell', 'Cephus', 'Costas', 'Rama', 'Nickalas', 'Moultrie', 'Deklin', 'Saafir', 'Alexie', 'Kajuan', 'Jamahl', 'Robet', 'Antoin', 'Turhan', 'Mart', 'Richrd', 'Ante', 'Bransyn', 'Dargan', 'Levan', 'Milledge', 'Ollis', 'Morey', 'Jeromey', 'Ebon', 'Nicholus', 'Yvonne', 'Gladstone', 'Kwan', 'Sherry', 'Romney', 'Nicolaos', 'Oded', 'Koty', 'Mandy', 'Adger', 'Esaw', 'Shaunte', 'Nimesh', 'Ahren', 'Marcellino', 'Attila', 'Pinkney', 'Reinhard', 'Deanna', 'Shanti', 'Calmer', 'Reda', 'Darral', 'Monserrate', 'Levert', 'Harce', 'Ayham', 'Breslin', 'Dom', 'Darrow', 'Haidar', 'Willaim', 'Shann', 'Regina', 'Einer', 'Zui', 'Shonn', 'Skipper', 'Henning', 'Jacek', 'Wendelin', 'Wilmar', 'Algot', 'Marlen', 'Dquan', 'Emanuele', 'Erol', 'Boby', 'Elbin', 'Londell', 'Bradd', 'Malo', 'Mohamadali', 'Toussaint', 'Roald', 'Trini', 'Stace', 'Erubey', 'Labron', 'Kyseem', 'Duong', 'Rande', 'Siegfried', 'Mamon', 'Va', 'Quy', 'Raman', 'Ramil', 'Jasai', 'Carla', 'Belen', 'Lawernce', 'Jemar', 'Markham', 'Kym', 'Jemaine', 'Baldwin', 'Damany', 'Timonthy', 'Tesfa', 'Vinod', 'Albertus', 'Yupheng', 'Danie', 'Tashiem', 'Uno', 'Onnie', 'Juliana', 'Duff', 'Doua', 'Orman', 'Kamaal', 'Godwin', 'Ulric', 'Darrold', 'Rennie', 'Lory', 'Jamile', 'Terril', 'Gable', 'Hanh', 'Grisel', 'Jimmylee', 'Mikkel', 'Victorino', 'Jaymere', 'Rayn', 'Duriel', 'Ceferino', 'Autrey', 'Durant', 'Kolsen', 'Abayomi', 'Azell', 'Spyros', 'Ato', 'Damin', 'Diogenes', 'Barnaby', 'Pinckney', 'Keno', 'Sherard', 'Chukwuemeka', 'Akin', 'Harvel', 'Marv', 'Kenyetta', 'Huel', 'Royzell', 'Luddie', 'Olden', 'Ardith', 'Branch', 'Bertha', 'Hillman', 'Namon', 'Donnis', 'Fitzhugh', 'Lavaughn', 'Lucille', 'Amanuel', 'Carvin', 'Minnie', 'Tivis', 'Birt', 'Bronner', 'Vaden', 'Joenathan', 'Alphonsa', 'Elvie', 'Alpheus', 'Clausell', 'Clayburn', 'Demetrias', 'Avis', 'Garlon', 'Romaine', 'Jamorris', 'Swanson', 'Perez', 'Hurschel', 'Virge', 'Rutherford', 'Lelton', 'Tarris', 'Denson', 'Benjaman', 'Rashun', 'Keino', 'Cedarius', 'Keanthony', 'Blakeley', 'Burwell', 'Kasai', 'Euell', 'Eldrick', 'Ashford', 'Demetruis', 'Wood', 'Blanton', 'Daniell', 'Robt', 'Lamorris', 'Waller', 'Devoris', 'Herley', 'Jermery', 'Jamicheal', 'Horton', 'Gradie', 'Etheridge', 'Millie', 'Jammy', 'Karey', 'Rodregus', 'Cordera', 'Embry', 'Forney', 'Sims', 'Gergory', 'Rosser', 'Benjamine', 'Erskin', 'Heflin', 'Torrie', 'Norville', 'Arvie', 'Bessie', 'Keonta', 'Tarrence', 'Chapman', 'Limmie', 'Tavius', 'Reynard', 'Lonza', 'Detroit', 'Camauri', 'Clanton', 'Obbie', 'Mizell', 'Marshel', 'Tollie', 'Jondarius', 'Therion', 'Antoino', 'Beatrice', 'Keyonte', 'Littleton', 'Hozie', 'Atwell', 'Ottie', 'Pelham', 'Vickie', 'Cederick', 'Zaykeese', 'Jadarious', 'Shin', 'Tizoc', 'Mischa', 'Tycen', 'Jubal', 'Kito', 'Sabin', 'Brannan', 'Baltasar', 'Hilda', 'Orasio', 'Bassel', 'Ameet', 'Talus', 'Renne', 'Reuel', 'Saro', 'Kam', 'Heliodoro', 'Hodari', 'Mondo', 'Damaso', 'Damein', 'Thunder', 'Ravinder', 'Remberto', 'Rodel', 'Yvan', 'Marcelle', 'Kiril', 'Shem', 'Bardo', 'Carlson', 'Jebediah', 'Austreberto', 'Hannibal', 'Shawnn', 'Kenyatte', 'Geoffry', 'Hadden', 'Natnael', 'Edurdo', 'Errik', 'Eva', 'Gaelan', 'Gilverto', 'Antwaine', 'Barclay', 'Rithy', 'Sarath', 'Sasan', 'Stefen', 'Susana', 'Le', 'Mai', 'Marquies', 'Neeraj', 'Galdino', 'Cuitlahuac', 'Griselda', 'Jerret', 'Filbert', 'Travone', 'Lizette', 'Lourdes', 'Ratana', 'Sarith', 'Ku', 'Jocob', 'Jushua', 'Shaughn', 'Sophal', 'Sophana', 'Stepan', 'Tramel', 'Veniamin', 'Ha', 'Halley', 'Hiep', 'Maclain', 'Alberta', 'Alejando', 'Eliana', 'Chay', 'Esmond', 'Frisco', 'Dai', 'Marta', 'Man', 'Kha', 'Kin', 'Sun', 'Paulmichael', 'Rj', 'Jeoffrey', 'Custodio', 'Herberth', 'Gerrad', 'Seanpaul', 'Sten', 'Nereida', 'Jasun', 'Micharl', 'Robbert', 'Ronnel', 'Rosio', 'Othon', 'Chau', 'Hart', 'Atthew', 'Angelito', 'Debbie', 'Randol', 'Jeffrie', 'Kern', 'Rohn', 'Raef', 'Arleigh', 'Jef', 'Reg', 'Vinton', 'Perrin', 'Parry', 'Sally', 'Hoby', 'Vint', 'Dagmawi', 'Mat', 'Gregrey', 'Darol', 'Merik', 'Rickard', 'Clete', 'Fredrik', 'Darrol', 'Lyall', 'Jamare', 'Duffy', 'Barre', 'Shawnee', 'Tige', 'Whittaker', 'Tyrion', 'Jamas', 'Jud', 'Spence', 'Dione', 'Erinn', 'Bron', 'Ackley', 'Dal', 'Monti', 'Paco', 'Kjell', 'Gabor', 'Davinder', 'Shonte', 'Maximiano', 'Heshimu', 'Jassen', 'Jerami', 'Jermon', 'Keefe', 'Keri', 'Daric', 'Christropher', 'Johnney', 'Dodd', 'Wilferd', 'Raymondo', 'Keary', 'Orlan', 'Gerhart', 'Clemence', 'Pepe', 'Whitaker', 'Vaughan', 'Wess', 'Abenezer', 'Miroslav', 'Kurk', 'Helmut', 'Timothey', 'Annette', 'Cruise', 'Jahel', 'Itay', 'Isaiahs', 'Isack', 'Eagan', 'Finbar', 'Famous', 'Ethanjoseph', 'Ethanjames', 'Edi', 'Isais', 'Albeiro', 'Abhijot', 'Joshuajames', 'Amine', 'Edwardjames', 'Donyae', 'Danieljohn', 'Avaneesh', 'Aryav', 'Andoni', 'Yeison', 'Lowen', 'Obi', 'Mycah', 'Moksh', 'Miliano', 'Maxamillion', 'Lazlo', 'Jocsan', 'Jibran', 'Jerimyah', 'Jefte', 'Korde', 'Kanav', 'Tavita', 'Taesean', 'Yoltzin', 'Xzavior', 'Vibhav', 'Romen', 'Rocket', 'Rai', 'Orian', 'Rumi', 'Shota', 'Shaheer', 'Sadrac', 'Semaje', 'Sohrob', 'Yuval', 'Yuren', 'Yannis', 'Vineet', 'Yarden', 'Jesusjr', 'Kartik', 'Jairon', 'Millen', 'Nahun', 'Krisna', 'Kyrese', 'Mher', 'Mayan', 'Kais', 'Joshuan', 'Jometh', 'Keawe', 'Siris', 'Sinai', 'Shuban', 'Shian', 'Sneijder', 'Sota', 'Uday', 'Sevak', 'Royale', 'Yuuki', 'Reyhan', 'Seena', 'Moisses', 'Nayib', 'Sumit', 'Dayveon', 'Christianpaul', 'Garrin', 'Edgerrin', 'Edrees', 'Estephan', 'Assael', 'Azad', 'Tydus', 'Yosuf', 'Zekiel', 'Strider', 'Senai', 'Edmar', 'Dmorea', 'Eman', 'Darran', 'Keston', 'Keny', 'Hardeep', 'Heladio', 'Hernesto', 'Hovannes', 'Sankalp', 'Brenten', 'Navraj', 'Mavrik', 'Nilmar', 'Rishit', 'Edwing', 'Eswin', 'Flabio', 'Jasn', 'Romar', 'Sevan', 'Shahab', 'Justinmichael', 'Joseandres', 'Marcelus', 'Mariana', 'Andhy', 'Angeles', 'Tannor', 'Tristain', 'Joshuaray', 'Luisdavid', 'Damaris', 'Daymond', 'Anthonyjohn', 'Dezhon', 'Emelio', 'Eulices', 'Maclean', 'Jaeson', 'Ethanjohn', 'Ethanjacob', 'Jasiri', 'Kaisei', 'Khyle', 'Jona', 'Jeren', 'Jeramyah', 'Jesusantonio', 'Jguadalupe', 'Joseeduardo', 'Elkin', 'Prashant', 'Anguel', 'Anant', 'Aisea', 'Abhimanyu', 'Daelen', 'Dylin', 'Dodge', 'Nazaret', 'Mikie', 'Matthewjoseph', 'Maximillan', 'Savir', 'Dhillon', 'Donoven', 'Ebin', 'Edrei', 'Elek', 'Nykolas', 'Nikash', 'Nik', 'Reyly', 'Razi', 'Presten', 'Arul', 'Avo', 'Yandell', 'Wynston', 'Tallen', 'Suhaib', 'Joshuajohn', 'Jesusmanuel', 'Malacai', 'Kethan', 'Londen', 'Larenzo', 'Kriss', 'Kohei', 'Hamlet', 'Martinjr', 'Mansoor', 'Archit', 'Aniketh', 'Kincaid', 'Lunden', 'Masaki', 'Salam', 'Sahith', 'Nour', 'Miqueas', 'Estefano', 'Hatim', 'Gurvir', 'Adeeb', 'Tobiah', 'Torrin', 'Tushar', 'Tyee', 'Sulayman', 'Takai', 'Tayo', 'Yoan', 'Vegas', 'Duilio', 'Dyami', 'Greko', 'Harim', 'Ioane', 'Ashmit', 'Bora', 'Alekxander', 'Alexanderjames', 'Amanpreet', 'Anthonny', 'Brandom', 'Daimon', 'Sirus', 'Seananthony', 'Vignesh', 'Vir', 'Wisdom', 'Rameen', 'Kenzie', 'Joshuamichael', 'Josejr', 'Joseenrique', 'Jacksen', 'Jeriko', 'Jesua', 'Myka', 'Naithen', 'Saurav', 'Shalim', 'Puneet', 'Denali', 'Daveyon', 'Sohil', 'Edilson', 'Jafeth', 'Nathin', 'Maurion', 'Mekai', 'Nadim', 'Jamani', 'Jamisen', 'Gared', 'Gahel', 'Emron', 'Hanzel', 'Xaviar', 'Yohann', 'Alam', 'Brasen', 'Ashlan', 'Rury', 'Ralphie', 'Robertanthony', 'Tomoki', 'Zamuel', 'Urian', 'Vinayak', 'Wilberth', 'Jazziel', 'Mizraim', 'Mosiah', 'Muneeb', 'Lennin', 'Chaitanya', 'Cyrille', 'Dilpreet', 'Bhargav', 'Captain', 'Camil', 'Jaion', 'Eithen', 'Dominyk', 'Domenik', 'Imad', 'Dabin', 'Ceejay', 'Avishek', 'Anoop', 'Aaronjoshua', 'Billal', 'Euan', 'Eion', 'Beauregard', 'Fouad', 'Chriss', 'Daimien', 'Cyan', 'Conall', 'Inigo', 'Jashan', 'Jaicob', 'Arek', 'Benjaminjoseph', 'Bodey', 'Andrewjames', 'Abdel', 'Alian', 'Artyom', 'Anik', 'Angeljesus', 'Shriyan', 'Sosaia', 'Shabd', 'Tayveon', 'Samik', 'Josephanthony', 'Kaushal', 'Gerardojr', 'Haile', 'Henok', 'Imer', 'Izaiha', 'Vedanth', 'Rishav', 'Praveen', 'Kenner', 'Juanjr', 'Kinan', 'Maven', 'Neven', 'Niccolas', 'Raynav', 'Rani', 'Noahjames', 'Nirvan', 'Nevaan', 'Naythen', 'Rhythm', 'Samyak', 'Sahas', 'Roczen', 'Kroy', 'Johanna', 'Miro', 'Mayank', 'Masson', 'Yamato', 'Xaden', 'Vin', 'Tyden', 'Gaudencio', 'Garreth', 'Toryn', 'Jaswinder', 'Stiles', 'Graciela', 'Rutger', 'Razmig', 'Keo', 'Kavir', 'Kalev', 'Kal', 'Kabeer', 'Jianni', 'Terrace', 'Vicken', 'Westly', 'Pardeep', 'Lizeth', 'Lucia', 'Mandela', 'Maricela', 'Joshus', 'Kayle', 'Klyde', 'Djavan', 'Wang', 'Aljandro', 'Belisario', 'Cristino', 'Yihan', 'Carina', 'Chritian', 'Juanramon', 'Khan', 'Jaiver', 'Nefi', 'Murtaza', 'Raciel', 'Marlene', 'Maira', 'Chima', 'Cheenou', 'Bijon', 'Dorion', 'Elber', 'Emeka', 'Ge', 'Ratha', 'Jaxxson', 'Ryanjames', 'Shannen', 'Shue', 'Sia', 'Romaldo', 'Zareh', 'Tomy', 'Vanna', 'Xao', 'Bertin', 'Dhyan', 'Dexton', 'Esiah', 'Ayce', 'Avyukt', 'Avner', 'Caspar', 'Cove', 'Ciel', 'Yen', 'Yessenia', 'Yony', 'Fin', 'Ezrael', 'Ezel', 'Ilay', 'Harveer', 'Hamad', 'Asiah', 'Ashwath', 'Arcenio', 'Aroldo', 'Awet', 'Alexx', 'Arihant', 'Arihaan', 'Apolo', 'Aero', 'Advith', 'Arren', 'Beatriz', 'Jony', 'Joseramon', 'Justinray', 'Jamaul', 'Tarren', 'Cristal', 'Dinh', 'Chantra', 'Dshawn', 'Geraldine', 'Fuad', 'Edlin', 'Jerren', 'Jerrin', 'Josje', 'Chrystopher', 'Darriel', 'Takuya', 'Vannak', 'Zenas', 'Miklos', 'Marten', 'Rondale', 'Rothana', 'Randeep', 'Ryle', 'Eduardoluis', 'Christepher', 'Davionne', 'Eriverto', 'Farbod', 'Chauncy', 'Charle', 'Bayardo', 'Ashneel', 'Shoua', 'Redmond', 'Ustin', 'Johnnathan', 'Josephmichael', 'Marisela', 'Markandrew', 'Michaeljoseph', 'Marcua', 'Nidal', 'Phat', 'Pritesh', 'Seaver', 'Ryananthony', 'Tyan', 'Vatche', 'Thoren', 'Othoniel', 'Nicandro', 'Rajdeep', 'Tulio', 'Soua', 'Jovonte', 'Kalyn', 'Jamesryan', 'Navdeep', 'Maxmillian', 'Kayon', 'Koua', 'Aaryn', 'Wilver', 'Zubair', 'Ankush', 'Andie', 'Adonnis', 'Jacobanthony', 'Izekiel', 'Izacc', 'Escher', 'Elijahjames', 'Edrik', 'Drayson', 'Dj', 'Giordan', 'Dejaun', 'Davidmichael', 'Deshone', 'Auron', 'Auguste', 'Athos', 'Cutberto', 'Hairo', 'Anvay', 'Adrick', 'Aydeen', 'Bassam', 'Basem', 'Kyrell', 'Rjay', 'Ozil', 'Taisei', 'Samanyu', 'Marvion', 'Mykael', 'Mukund', 'Namish', 'Naoki', 'Nishan', 'Aideen', 'Aalijah', 'Hassani', 'Harkirat', 'Exzavier', 'Hudsen', 'Hrach', 'Caelum', 'Caeleb', 'Destan', 'Jaspal', 'Huan', 'Marcellous', 'Mehran', 'Luisfelipe', 'Gelacio', 'Eris', 'Eneas', 'Terin', 'Sohrab', 'Ravneet', 'Uziah', 'Vedansh', 'Peni', 'Nethaniel', 'Niraj', 'Odilon', 'Kalden', 'Mariela', 'Levonte', 'Elih', 'Ej', 'Eames', 'Jarome', 'Jishnu', 'Gurtaaj', 'Hamish', 'Gryffin', 'Jayin', 'Trong', 'Sebastain', 'Sargon', 'Wa', 'Cheveyo', 'Ariv', 'Aum', 'Caellum', 'Bayan', 'Balthazar', 'Sagan', 'Rowyn', 'Sehaj', 'Ivon', 'Stavro', 'Shrihan', 'Noey', 'Oswin', 'Abrham', 'Adalid', 'Aldric', 'Zayed', 'Vonn', 'Vaishnav', 'Urias', 'Yahshua', 'Yago', 'Darith', 'Mantej', 'Kyo', 'Khyler', 'Marcjacob', 'Nayden', 'Morrissey', 'Benedicto', 'Kendrix', 'Xang', 'Ranjit', 'Raymar', 'Milos', 'Rayansh', 'Rawley', 'Paxon', 'Krishang', 'Leeam', 'Yerick', 'Yegor', 'Viren', 'Saathvik', 'Shailen', 'Sahaj', 'Rydan', 'Rollins', 'Rivaan', 'Soul', 'Aerick', 'Aladdin', 'Catalino', 'Berenice', 'Branndon', 'Kyleanthony', 'Maclovio', 'Kiven', 'Johnchristopher', 'Jonh', 'Kassandra', 'Jobanny', 'Pastor', 'Michaela', 'Montre', 'Morgen', 'Gerber', 'Danish', 'Haroutun', 'Duron', 'Adrion', 'Evrett', 'Reegan', 'Haskie', 'Quamane', 'Derrike', 'Haydyn', 'Glenville', 'Dearl', 'Deroe', 'Dewell', 'Lundy', 'Cleaster', 'Jeral', 'Delontae', 'Delford', 'Argie', 'Loise', 'Elmar', 'Donley', 'Ferrel', 'Carrel', 'Athel', 'Rector', 'Cledith', 'Dail', 'Donzel', 'Lenoard', 'Winferd', 'Birl', 'Dorsie', 'Olee', 'Erman', 'Dorsel', 'Roma', 'Othell', 'Herold', 'Chaffee', 'Trygve', 'Aubra', 'Opha', 'Dionne', 'Colleen', 'Ciara', 'Cleotis', 'Alissa', 'Alesha', 'Elise', 'Emilie', 'Tiera', 'Tia', 'Suzanne', 'Jaleesa', 'Jaclyn', 'Ingrid', 'India', 'Georgia', 'Francesca', 'Female', 'Fatima', 'Rochelle', 'Precious', 'Nichelle', 'Martina', 'Lucy', 'Latonya', 'Cline', 'Ott', 'Ona', 'Otmer', 'Ersel', 'Olufemi', 'Gordy', 'Marne', 'Jahquez', 'Daeshaun', 'Nashaun', 'Seiichi', 'Shigeki', 'Kazuto', 'Shozo', 'Alhaji', 'Lonn', 'Tevion', 'Kaige', 'Darlene', 'Braydyn', 'Masaaki', 'Graeson', 'Bernerd', 'Lynne', 'Dewaine', 'Shig', 'Junichi', 'Toshiro', 'Azavion', 'Michio', 'Yoshiro', 'Heraldo', 'Epitacio', 'Mas', 'Taequan', 'Trindon', 'Tirrell', 'Dmonte', 'Jaquante', 'Yeeleng', 'Maleik', 'Airam', 'Noname', 'Shyhiem', 'Tyquon', 'Damonta', 'Undray', 'Shadrick', 'Durwin', 'Lataurus', 'Corneall', 'Dantonio', 'Tilmon', 'Mackie', 'Ebbie', 'Eligha', 'Beth', 'Barth', 'Hezzie', 'Artha', 'Darrie', 'Frederi', 'Benford', 'Elves', 'Theodia', 'Jaye', 'Fran', 'Khylan', 'Berwyn', 'Constance', 'Markevion', 'Martavion', 'Jashun', 'Jermarion', 'Taylin', 'Breland', 'Franchot', 'Chrishun', 'Davarius', 'Dearius', 'Tredarius', 'Jayland', 'Cortavius', 'Deyonta', 'Tradarius', 'Kemarrion', 'Markavion', 'Jmarion', 'Jacarius', 'Kairi', 'Rasool', 'Jarreau', 'Khayree', 'Brahin', 'Hameed', 'Rolen', 'Cleason', 'Cartez', 'Nicholad', 'Brahim', 'Bryheem', 'Khalief', 'Anel', 'Mcgwire', 'Lula', 'Gaddis', 'Lowery', 'Odies', 'Rannie', 'Artee', 'Aurther', 'Bookert', 'Lenon', 'Oree', 'Gennie', 'Emitt', 'Sedgie', 'Claudy', 'Coyt', 'Lieutenant', 'Zannie', 'Kenn', 'Roosvelt', 'Vertis', 'Elex', 'Eula', 'Abron', 'Perkins', 'Emersyn', 'Lakin', 'Dravin', 'Other', 'President', 'Carrie', 'Cleother', 'Estus', 'Tee', 'Raymont', 'Woodard', 'Ras', 'Zennie', 'Versie', 'Mansfield', 'Atha', 'Bossie', 'Smiley', 'Kenard', 'Jermie', 'Vardell', 'Kadan', 'Roney', 'Furney', 'Caroll', 'Benjy', 'Shamond', 'Tyrease', 'Dontre', 'Raekwan', 'Raequon', 'Chrishon', 'Jahmez', 'Jaques', 'Zaveon', 'Zaccheus', 'Demaris', 'Shaquile', 'Shiheem', 'Santario', 'Monterio', 'Jawaan', 'Lavere', 'Levere', 'Guerino', 'Lisle', 'Fraser', 'Grier', 'Gurnie', 'Lattie', 'Wassil', 'Domer', 'Melio', 'Zolton', 'Haines', 'Gervase', 'Fermon', 'Geneva', 'Trask', 'Linward', 'Colen', 'Dossie', 'Zygmund', 'Teofil', 'Talbert', 'Mosby', 'Elworth', 'Garvie', 'Jiles', 'Mallie', 'Flay', 'Stokes', 'Bernis', 'Gardiner', 'Deno', 'Algerd', 'Handy', 'Flake', 'Hallet', 'Coyte', 'Wingate', 'Burlie', 'Sigmond', 'Myrle', 'Stiney', 'Americus', 'Claxton', 'Acy', 'Hill', 'Fenner', 'Festus', 'Linnie', 'Guilford', 'Artice', 'Constant', 'Faber', 'Jb', 'Pleasant', 'Dallis', 'Vestal', 'Terez', 'English', 'Allard', 'Ingram', 'Beaufort', 'Chene', 'Dequante', 'Bubber', 'Jamone', 'Zebulun', 'Daqwan', 'Delshawn', 'Jamond', 'Dacota', 'Wilmot', 'Prue', 'Wister', 'Kenyata', 'Darik', 'Sumter', 'Hovie', 'Tallie', 'Diontay', 'Dontaye', 'Brentt', 'Felder', 'Chappell', 'Ralpheal', 'Wofford', 'Stclair', 'Aiken', 'Hashem', 'Daire', 'Grahm', 'Jaivon', 'Davarion', 'Arnez', 'Ryer', 'Mousa', 'Jahlon', 'Leyland', 'Maizen', 'Zadyn', 'Zein', 'Amarri', 'Hady', 'Keegen', 'Taeshawn', 'Jontae', 'Radwan', 'Jsean', 'Hartwell', 'Roddey', 'Arend', 'Marjorie', 'Clements', 'Rae', 'Pressley', 'Saintclair', 'Derrill', 'Joann', 'Cote', 'Philo', 'Urho', 'Evart', 'Vada', 'Deo', 'Tonie', 'Irven', 'Stjulian', 'Durand', 'Diarra', 'Burnet', 'Steed', 'Demont', 'Burris', 'Donyell', 'Gjon', 'Demone', 'Jodi', 'Boban', 'Brunson', 'Mackey', 'Delwyn', 'Gordie', 'Owens', 'Efton', 'Uel', 'Ancel', 'Zafir', 'Kyeem', 'Vencil', 'Irl', 'Tymeer', 'Dymere', 'Kier', 'Murel', 'Hale', 'Lorn', 'Tahjir', 'Sufyaan', 'Trig', 'Yacqub', 'Khadir', 'Najib', 'Ayuub', 'Hamse', 'Yassir', 'Yussuf', 'Abdihafid', 'Abdinasir', 'Abdiqani', 'Tayon', 'Abdirahim', 'Abdishakur', 'Mukhtar', 'Bauer', 'Damere', 'Rashee', 'Kalief', 'Shyheed', 'Dejour', 'Kuran', 'Qaadir', 'Aldor', 'Jasyah', 'Hajj', 'Ordell', 'Gradyn', 'Ayyub', 'Atley', 'Mahkai', 'Lochlann', 'Sakai', 'Saamir', 'Bernhardt', 'Willmer', 'Swen', 'Hilding', 'Knute', 'Wael', 'Thorvald', 'Erle', 'Melroy', 'Valerian', 'Jorgen', 'Dacotah', 'Shaydon', 'Lamir', 'Kahseem', 'Jihaad', 'Tylee', 'Sakariye', 'Qalid', 'Syair', 'Syire', 'Safi', 'Zaakir', 'Sahmir', 'Saahir', 'Karlin', 'Kowen', 'Kahne', 'Azir', 'Tysir', 'Maki', 'Zekhi', 'Pater', 'Louden', 'Jandiel', 'Khaseem', 'Livio', 'Pellegrino', 'Loretta', 'Lothar', 'Morty', 'Harvard', 'Jeris', 'Arlene', 'Salvotore', 'Erasmus', 'Canio', 'Heywood', 'Ivar', 'Maitland', 'Neale', 'Gladys', 'Ethelbert', 'Fergus', 'Arcangelo', 'Sigismund', 'Fremont', 'Stillman', 'Egidio', 'Pincus', 'Sabatino', 'Solly', 'Bela', 'Stanly', 'Faust', 'Gesualdo', 'Adolphe', 'Ladislav', 'Mandel', 'Philander', 'Catello', 'Fordyce', 'Brownie', 'Darnley', 'Alfio', 'Emerito', 'Darrly', 'Delfin', 'Chiam', 'Beril', 'Albie', 'Roberts', 'Ferdinando', 'Maureen', 'Herberto', 'Lamark', 'Philipp', 'Uwe', 'Dermott', 'Amalio', 'Sandford', 'Shawnta', 'Shannan', 'Sheppard', 'Jerauld', 'Antoinne', 'Oleh', 'Tobie', 'Thoms', 'Valice', 'Thurnell', 'Deamonte', 'Kendel', 'Trevone', 'Kaylob', 'Carder', 'Antrell', 'Traven', 'Jaymir', 'Joni', 'Keisean', 'Krishawn', 'Marquelle', 'Dearis', 'Delvonte', 'Jamez', 'Zebadiah', 'Kreig', 'Teran', 'Resean', 'Zackory', 'Lamontae', 'Albieri', 'Albiery', 'Chen', 'Alexy', 'Arslan', 'Taliek', 'Nakhi', 'Naphtali', 'Papa', 'Pesach', 'Michoel', 'Salih', 'Harshdeep', 'Elhadj', 'Izzy', 'Jahkai', 'Tyliek', 'Vasilis', 'Yaacov', 'Sohaib', 'Yissochor', 'Mir', 'Jasin', 'Jensy', 'Rehman', 'Nazeer', 'Jahmil', 'Enson', 'Nasif', 'Rizwan', 'Samiul', 'Rahat', 'Angelos', 'Avroham', 'Abdulai', 'Adir', 'Enes', 'Yishay', 'Doyt', 'Gal', 'Shoaib', 'Quaron', 'Ishraq', 'Nazaire', 'Nyzaiah', 'Mattia', 'Javone', 'Mahesh', 'Mamady', 'Johnattan', 'Jorman', 'Kaliq', 'Devendra', 'Burhan', 'Zishe', 'Zeandre', 'Arel', 'Shalik', 'Shameer', 'Nisson', 'Ralik', 'Agim', 'Amauris', 'Atif', 'Samory', 'Shatiek', 'Taner', 'Rafat', 'Zhen', 'Radhames', 'Raliek', 'Ronel', 'Sabbir', 'Saqib', 'Jeudy', 'Hesham', 'Hyun', 'Lakeem', 'Mishael', 'Ivo', 'Tajay', 'Taleek', 'Tishawn', 'Tyreem', 'Samori', 'Nickholas', 'Pearse', 'Mamadi', 'Elhadji', 'Dawood', 'Dilon', 'Ishmel', 'Yiannis', 'Jahquel', 'Jahquell', 'El', 'Equan', 'Ho', 'Delno', 'Dinesh', 'Damel', 'Temitayo', 'Tenzing', 'Wahab', 'Alisher', 'Adonijah', 'Bradan', 'Efrayim', 'Elnatan', 'Elmin', 'Hossain', 'Eliav', 'Azimjon', 'Dovber', 'Sheya', 'Yahia', 'Jasani', 'Liav', 'Kamare', 'Kaysean', 'Kinsley', 'Nikoloz', 'Nyrell', 'Wyeth', 'Jeremaih', 'Mahin', 'Matis', 'Oriel', 'Mourad', 'Shmeil', 'Messi', 'Jonibek', 'Jeyren', 'Keyden', 'Temur', 'Tanveer', 'Zyir', 'Zidan', 'Zayyan', 'Varick', 'Wesam', 'Abdoulie', 'Aqib', 'Asani', 'Bless', 'Hasnain', 'Hamdan', 'Getzel', 'Fatin', 'Huzaifa', 'Jarif', 'Jahlani', 'Davier', 'Chuna', 'Eashan', 'Rafan', 'Rakin', 'Ngawang', 'Mouhamad', 'Rohaan', 'Vanness', 'Volvy', 'Javel', 'Jabir', 'Jaevion', 'Fahd', 'Lean', 'Machai', 'Juniel', 'Kaylin', 'Jeremiyah', 'Matisyahu', 'Menasha', 'Mikaeel', 'Gaspard', 'Lorik', 'Shuaib', 'Seif', 'Shlomy', 'Shneor', 'Sonam', 'Volf', 'Yussef', 'Ziv', 'Krrish', 'Machi', 'Endi', 'Frederik', 'Abdo', 'Alif', 'Elchanan', 'Yordy', 'Shafin', 'Siam', 'Furkan', 'Fallou', 'Devyne', 'Chaskel', 'Arbi', 'Younes', 'Ziare', 'Tanyon', 'Terique', 'Nicholaos', 'Nickita', 'Mordchai', 'Saifullah', 'Saliou', 'Savier', 'Jahmiere', 'Jahson', 'Javoni', 'Jayel', 'Jie', 'Kwadwo', 'Kahmani', 'Johansel', 'Murat', 'Nasire', 'Nezar', 'Seydou', 'Jamair', 'Jahmeer', 'Chanina', 'Chezky', 'Zyire', 'Yoscar', 'Alassane', 'Aitan', 'Dannon', 'Donelle', 'Harrington', 'Sha', 'Shamal', 'Josph', 'Torrell', 'Ralphy', 'Sharron', 'Eleftherios', 'Gedalia', 'Kasheen', 'Manoj', 'Nuri', 'Daran', 'Devanand', 'Evagelos', 'Fatmir', 'Haralambos', 'Biju', 'Nilson', 'Wane', 'Tarig', 'Rober', 'Sharone', 'Lezer', 'Odalis', 'Glenston', 'Josip', 'Kostantinos', 'Rahshawn', 'Osei', 'Shariyf', 'Sotirios', 'Aneudi', 'Marios', 'Biff', 'Damiano', 'Shean', 'Rajendra', 'Mare', 'Richad', 'Jaja', 'Efstathios', 'Nephtali', 'Kowan', 'Rhonda', 'Pasqualino', 'Confesor', 'Linc', 'Safet', 'Sharrieff', 'Kiron', 'Damain', 'Aurohom', 'Kariem', 'Tiheim', 'Dushawn', 'Kindu', 'Aswad', 'Kwane', 'Oba', 'Jermayne', 'Dakeem', 'Babatunde', 'Ackeem', 'Alvi', 'Adetokunbo', 'Akeel', 'Kedwin', 'Kayron', 'Mergim', 'Wilkins', 'Wojciech', 'Omair', 'Kushtrim', 'Kwamel', 'Saiquan', 'Naquon', 'Quandell', 'Veton', 'Shaune', 'Daguan', 'Duquan', 'Jency', 'Ka', 'Waqas', 'Xiao', 'Mahlik', 'Kasiem', 'Navindra', 'Sayquan', 'Shaquon', 'Shiquan', 'Rameek', 'Jerelle', 'Devaun', 'Jakim', 'Jaquell', 'Eury', 'Shaiquan', 'Shakeal', 'Shakiem', 'Shaleek', 'Ramesh', 'Suhail', 'Tylique', 'Jawanza', 'Jonell', 'Hamdi', 'Jaimeson', 'Kerven', 'Demetreus', 'Giselle', 'Aikeem', 'Akiem', 'Rondel', 'Dow', 'Gregroy', 'Darnelle', 'Naguan', 'Tyronn', 'Ricke', 'Dishawn', 'Rishawn', 'Tarick', 'Tynell', 'Japhet', 'Francesc', 'Maximili', 'Herby', 'Jaqwan', 'Kemal', 'Akeen', 'Azeez', 'Devindra', 'Deryck', 'Deval', 'Alessand', 'Masood', 'Uladimir', 'Cadon', 'Quanah', 'Zimere', 'Chatham', 'Koi', 'Zymire', 'Jamaury', 'Jahmire', 'Ziyan', 'Cowen', 'Jamaurie', 'Nyquan', 'Jayleen', 'Zymiere', 'Zymarion', 'Kahmari', 'Langdon', 'Zymari', 'Jymir', 'Kamaree', 'Nycere', 'Sayvion', 'Jahmarion', 'Justyce', 'Tuck', 'Thayer', 'Mung', 'Graison', 'Delane', 'Lemoyne', 'Cinch', 'Nevada', 'Dhairya', 'Jyaire', 'Yazir', 'Tahjmir', 'Sequoyah', 'Quention', 'Tanmay', 'Shreyansh', 'Ahyan', 'Aaryav', 'Zaylin', 'Laksh', 'Basheer', 'Bhavik', 'Orley', 'Vestel', 'Altus', 'Choice', 'Bufford', 'Quasir', 'Emry', 'Tressel', 'Eppie', 'Jayvier', 'Prestin', 'Haydin', 'Caydan', 'Corday', 'Camdin', 'Brodyn', 'Liberato', 'Trayon', 'Telesfor', 'Jayko', 'Lavi', 'Procopio', 'Rubel', 'Karder', 'Jaymar', 'Bryor', 'Gottlob', 'Saladin', 'Tunis', 'Saheed', 'Alsexander', 'Davonn', 'Jaquill', 'Shakeil', 'Krunal', 'Tashon', 'Doyel', 'Odes', 'Thoams', 'Rasul', 'Wendyl', 'Glendale', 'Ahmid', 'Altarik', 'Amish', 'Jaquis', 'Dashan', 'Salaam', 'Bhavin', 'Nashid', 'Tauheed', 'Jamill', 'Cordney', 'Derly', 'Jamale', 'Hristopher', 'Camaron', 'Domanique', 'Desmund', 'Keenon', 'Paulanthony', 'Demarques', 'Meryl', 'Medard', 'Erbey', 'Adrin', 'Evo', 'Pal', 'Deke', 'Glendal', 'Tramayne', 'Aloysuis', 'Berthal', 'Ashly', 'Arien', 'Teodulo', 'Johsua', 'Kelwin', 'Quintan', 'Mauel', 'Quisto', 'Gaylin', 'Trayvion', 'Tracer', 'Ramsay', 'Verlan', 'Kyndal', 'Donovon', 'Samuell', 'Treyveon', 'Nereo', 'Areli', 'Dashun', 'Devontre', 'Stran', 'Zarian', 'Pacen', 'Kamakani', 'Alii', 'Chidozie', 'Cobie', 'Acxel', 'Jatavian', 'Kelvon', 'Keldon', 'Giezi', 'Gavon', 'Virtus', 'Burdell', 'Dorrance', 'Naail', 'Lantz', 'Travian', 'Cleland', 'Arish', 'Elyan', 'Chukwudi', 'Shahrukh', 'Coulter', 'Karver', 'Seeley', 'Wynton', 'Detric', 'Quenten', 'Joemichael', 'Daruis', 'Tyeler', 'Montray', 'Hermenegildo', 'Donathan', 'Mckenna', 'Kijuan', 'Braijon', 'Vashawn', 'Darvell', 'Kennie', 'Rejino', 'Vickey', 'Lyndall', 'Reynoldo', 'Malyk', 'Armarion', 'Brit', 'Trayshawn', 'Contrell', 'Eutimio', 'Dantrel', 'Darrious', 'Dawon', 'Richey', 'Arrion', 'Zohair', 'Randale', 'Keyshone', 'Kiwane', 'Jibri', 'Devell', 'Beto', 'Jaymz', 'Ritchey', 'Tremel', 'Keante', 'Vontrell', 'Guadlupe', 'Esiquiel', 'Erasto', 'Dub', 'Augustas', 'Panfilo', 'Vuk', 'Mickie', 'Javonni', 'Riddick', 'Nikodem', 'Marrion', 'Kamareon', 'Maks', 'Eliverto', 'Cresenciano', 'Jerrol', 'Joakim', 'Maddax', 'Kayvion', 'Khizar', 'Haze', 'Aveon', 'Amjad', 'Audwin', 'Almir', 'Vicky', 'Lonell', 'Jabarie', 'Jaylun', 'Damarrion', 'Mantas', 'Dannye', 'Aadarsh', 'Caelen', 'Tilton', 'Kimmie', 'Josgar', 'Oleksandr', 'Keyontae', 'Fidelio', 'Wiktor', 'Maxymilian', 'Cayce', 'Rodric', 'Manrique', 'Kestutis', 'Donnald', 'Grayland', 'Lavance', 'Medgar', 'Chaney', 'Monta', 'Lemond', 'Medford', 'Mareo', 'Camerino', 'Ronold', 'Lancer', 'Credell', 'Elbridge', 'Stony', 'Dvid', 'Hilberto', 'Erineo', 'Jerrald', 'Antawan', 'Cordario', 'Levelle', 'Ramsen', 'Jigar', 'Laroyce', 'Lazerrick', 'Artez', 'Cordelro', 'Creon', 'Lonzell', 'Shanton', 'Orpheus', 'Terris', 'Renauld', 'Deondra', 'Fontaine', 'Airrion', 'Branko', 'Enemencio', 'Antiono', 'Caprice', 'Danyale', 'Valdez', 'Oswell', 'Tahitoa', 'Fannie', 'Estes', 'Herchel', 'Seabron', 'Bunyan', 'Thelmon', 'Agnew', 'Broughton', 'Harwell', 'Mather', 'Quillie', 'Hardwick', 'Phinizy', 'Pope', 'Addis', 'Seals', 'Thelman', 'Summie', 'Romano', 'Zacari', 'Kortney', 'Makye', 'Graycen', 'Kavari', 'Kamarri', 'Ajahni', 'Dayan', 'Sharrod', 'Pheonix', 'Trentyn', 'Jacai', 'Jamesley', 'Destyn', 'Maddon', 'Gianlucas', 'Aydrian', 'Bader', 'Jaise', 'Godson', 'Gleb', 'Jatniel', 'Yaxiel', 'Marvins', 'Miron', 'Yaroslav', 'Legrande', 'Lonzy', 'Merrell', 'Flemming', 'Guerry', 'Kimothy', 'Remus', 'Wyndell', 'Barnard', 'Denorris', 'Edna', 'Bevan', 'Warnell', 'Josie', 'Arthor', 'Theartis', 'Kimsey', 'Wymon', 'Duglas', 'Reshawn', 'Natrone', 'Treysen', 'Davaris', 'Jocqui', 'Traivon', 'Trevonne', 'Tavarious', 'Monson', 'Kevis', 'Ladonte', 'Mackenson', 'Bodee', 'Chayden', 'Dequon', 'Keiondre', 'Dewan', 'Taige', 'Renel', 'Jasher', 'Bayler', 'Dodger', 'Tyke', 'Jarvin', 'Edner', 'Travonn', 'Traxton', 'Malosi', 'Lavonta', 'Janard', 'Kyzer', 'Packer', 'Travoris', 'Frantzy', 'Makay', 'Tamari', 'Kanard', 'Dairon', 'Gabriell', 'Kemaury', 'Jshaun', 'Karel', 'Jakarri', 'Rubens', 'Zamauri', 'Winsley', 'Giulian', 'Yosbel', 'Kevaughn', 'Jimson', 'Kendly', 'Dishon', 'Dallyn', 'Jephthe', 'Luccas', 'Kemuel', 'Eddrick', 'Ahmarion', 'Amariyon', 'Artavis', 'Dewin', 'Jacarie', 'Jahn', 'Janari', 'Geordy', 'Mardochee', 'Jimari', 'Yoshinobu', 'Eiji', 'Yasunobu', 'Koon', 'Hidemi', 'Norio', 'Kiyomi', 'Shuichi', 'Kazuyoshi', 'Yoshitaka', 'Kanji', 'Tetsuro', 'Asao', 'Dominador', 'Shogo', 'Jakye', 'Braelin', 'Chrisangel', 'Calab', 'Morio', 'Seiki', 'Tsuyoshi', 'Soichi', 'Masakatsu', 'Tadayoshi', 'Tokuichi', 'Yoshikatsu', 'Matsuichi', 'Lorrin', 'Javeion', 'Kail', 'Jvon', 'Joshwa', 'Keylen', 'Rylon', 'Oved', 'Kraven', 'Koben', 'Klever', 'Nedved', 'Dago', 'Cortlen', 'Reeves', 'Yhair', 'Xane', 'Jamori', 'Jayshon', 'Jaiveon', 'Joseth', 'Drelynn', 'Haldrin', 'Keelyn', 'Nathanuel', 'Kvon', 'Jayln', 'Khyrie', 'Zayveon', 'Braxston', 'Jaceion', 'Jonavon', 'Jesaiah', 'Gaddiel', 'Tobyn', 'Becket', 'Aydyn', 'Arinze', 'Dacian', 'Aadin', 'Fender', 'Brysun', 'Demarious', 'Kaimi', 'Ryson', 'Jarrin', 'Maleko', 'Kamakana', 'Kamalani', 'Johnavon', 'Kawena', 'Aadil', 'Blayde', 'Garyn', 'Izaih', 'Bryndon', 'Drelyn', 'Demarian', 'Kupaa', 'Nalu', 'Makena', 'Lawaia', 'Kaimalu', 'Kanaloa', 'Oshen', 'Mj', 'Kahekili', 'Koalii', 'Makua', 'Promise', 'Keylin', 'Kevondrick', 'Tobenna', 'Infantboy', 'Oluwatimilehin', 'Nathanal', 'Zakkery', 'Shariq', 'Sadler', 'Rockne', 'Drelon', 'Ethon', 'Catcher', 'Clayten', 'Kaniela', 'Isaack', 'Josten', 'Zarius', 'Tayte', 'Ugochukwu', 'Aiman', 'Eduar', 'Basel', 'Canton', 'Dyron', 'Keaden', 'Kayceon', 'Kyrian', 'Kree', 'Jj', 'Iaan', 'Hudsyn', 'Graceson', 'Gatlyn', 'Eydan', 'Jak', 'Townsend', 'Owais', 'Nandan', 'Rayland', 'Ridhaan', 'Dantavious', 'Lavoris', 'Maricus', 'Rodrigus', 'Aayansh', 'Chasten', 'Durante', 'Johnta', 'Detavious', 'Donterrius', 'Rilyn', 'Rilee', 'Marquize', 'Quinterius', 'Jamarco', 'Quinnton', 'Deston', 'Aceson', 'Britten', 'Adric', 'Tabias', 'Lajarvis', 'Corderius', 'Romon', 'Que', 'Nord', 'Lerone', 'Skylan', 'Tobi', 'Mccrae', 'Mathayus', 'Marcuz', 'Levii', 'Lander', 'Oluwadarasimi', 'Miklo', 'Nijah', 'Nero', 'Quavis', 'Zailyn', 'Whitman', 'Zavior', 'Zlatan', 'Crixus', 'Cotton', 'Chukwuebuka', 'Draden', 'Caston', 'Aceyn', 'Caeson', 'Brax', 'Azel', 'Kaisyn', 'Hunt', 'Gaius', 'Gabrian', 'Falcon', 'Iyan', 'Jayjay', 'Altonio', 'Woodruff', 'Tavare', 'Kawaski', 'Dontravious', 'Gabreil', 'Holten', 'Dayvian', 'Brennyn', 'Chayson', 'Dailon', 'Keyshun', 'Jaryn', 'Jamyron', 'Jakavion', 'July', 'Jonanthony', 'Trenden', 'Tobechukwu', 'Yostin', 'Casin', 'Kaydyn', 'Jshawn', 'Keaghan', 'Khalen', 'Haylen', 'Jamarques', 'Alyjah', 'Baylon', 'Kemontae', 'Taysean', 'Slaton', 'Saxton', 'Yadir', 'Tramon', 'Traevion', 'Raydon', 'Raahim', 'Olamide', 'Oreoluwa', 'Zyien', 'Zayde', 'Marqavious', 'Marquavis', 'Trevious', 'Zyshonne', 'Quindarrius', 'Quintarious', 'Quinterious', 'Rodarius', 'Deontavious', 'Champion', 'Decklan', 'Daxx', 'Pecos', 'Jovonni', 'Jaydrian', 'Montravius', 'Gunter', 'Zerrick', 'Quontavious', 'Ayeden', 'Audi', 'Bentlie', 'Brek', 'Travonne', 'Daquavious', 'Jartavious', 'Keldric', 'Alezander', 'Kamen', 'Taytum', 'Siler', 'Yavuz', 'Zaniel', 'Yuriel', 'Draiden', 'Axzel', 'Castin', 'Keeland', 'Jrake', 'Jonhatan', 'Jeziel', 'Javery', 'Severino', 'Olavi', 'Benoit', 'Phillips', 'Lothrop', 'Konstanty', 'Mato', 'Carney', 'Keithen', 'Easley', 'Chanler', 'Erbie', 'Ephriam', 'Kentravion', 'Kesan', 'Ladamien', 'Treshun', 'Jakyron', 'Burch', 'Kaston', 'Kyndall', 'Jarden', 'Shields', 'Jontrell', 'Thales', 'Minnis', 'Ida', 'Hildred', 'Helder', 'Fernell', 'Shone', 'Laterrance', 'Tuyen', 'Roshun', 'Vincient', 'Ory', 'Hilman', 'Calton', 'Clydell', 'Vick', 'Derrin', 'Silton', 'Tandy', 'Emeal', 'Rual', 'Cardarius', 'Jylan', 'Hodge', 'Charls', 'Jacobey', 'Jaqualon', 'Jyrin', 'Calib', 'Fowler', 'Kalep', 'Osco', 'Treylan', 'Paschal', 'Lowry', 'Tydrick', 'Ladavion', 'Roe', 'Jarmall', 'Josuha', 'Quindell', 'Tra', 'Jamaria', 'Jermicheal', 'Hobie', 'Oluwaseun', 'Trimayne', 'Kaire', 'Katrell', 'Tradd', 'Yohannes', 'Oluwaseyi', 'Tyski', 'Lansana', 'Tion', 'Delontay', 'Tavone', 'Quante', 'Taavon', 'Daquane', 'Burleigh', 'Eyoel', 'Cung', 'Khodee', 'Emilien', 'Laurien', 'Leonide', 'Loomis', 'Antrone', 'Sewall', 'Nicollas', 'Vitor', 'Jaythian', 'Jasaun', 'Tighe', 'Colman', 'Antionne', 'Nygel', 'Garnell', 'Jamareon', 'Alvey', 'Carvel', 'Carville', 'Carlester', 'Rutledge', 'Mills', 'Rayner', 'Doil', 'Gregario', 'Aniseto', 'Audon', 'Brevyn', 'Pio', 'Tanis', 'Jasinto', 'Jaxtin', 'Nugent', 'Eldredge', 'Egon', 'Jong', 'Pancho', 'Lionardo', 'Susano', 'Trueman', 'Braxtin', 'Delphine', 'Harroll', 'Goree', 'Manuela', 'Epigmenio', 'Laureano', 'Josefina', 'Tiodoro', 'Silbestre', 'Patrocinio', 'Corando', 'Maciah', 'Quintyn', 'Wrigley', 'Onie', 'Noal', 'Duward', 'Filomeno', 'Cleburn', 'Garvis', 'Bisente', 'Cedell', 'Jap', 'Rube', 'Mavis', 'Jarold', 'Hijinio', 'Dewie', 'Trinida', 'Jung', 'Byrd', 'Mcadoo', 'Floy', 'Eldie', 'Volney', 'Saragosa', 'Derward', 'Francico', 'Genovevo', 'Lindley', 'Lasalle', 'Borden', 'Bonny', 'Claudis', 'Silberio', 'Asuncion', 'Rolly', 'Doak', 'Luvender', 'Thurl', 'Garl', 'Arvine', 'Johnnye', 'Emiterio', 'Crisoforo', 'Eulojio', 'Edell', 'Infboy', 'Ural', 'Natalia', 'Delia', 'Acencion', 'Joas', 'Keagon', 'Reice', 'Esperanza', 'Velton', 'Eufemio', 'Frumencio', 'Dominga', 'Eutiquio', 'Dois', 'Gean', 'Odaniel', 'Lyndel', 'Kreigh', 'Bobbye', 'Rogue', 'Deundra', 'Cambron', 'Kaitlynn', 'Kayleigh', 'Hailee', 'Piper', 'Sofia', 'Carly', 'Abigayle', 'Angelina', 'Tavish', 'Christophere', 'Anterrio', 'Thimothy', 'Montarius', 'Marquarius', 'Labradford', 'Lawless', 'Lenis', 'Camile', 'Tonya', 'Hersey', 'Abbie', 'Loveless', 'Aristide', 'Ovey', 'Ovide', 'Robley', 'Elward', 'Leory', 'Earlis', 'Gaynell', 'Printes', 'Elzy', 'Aswell', 'Waver', 'Wilma', 'Minos', 'Euclide', 'Aster', 'Demarrion', 'Selbert', 'Stoy', 'Brack', 'Strother', 'Osa', 'Ovel', 'Custer', 'Keveon', 'Lenvil', 'Hargus', 'Kline', 'Goldie', 'Warfield', 'Wavy', 'Carless', 'Proctor', 'Holston', 'Philopateer', 'Loman', 'Vernis', 'Forster', 'Jakie', 'Martavis', 'Louard', 'Corbet', 'Waldon', 'Cluster', 'Lafe', 'Tayshun', 'Browder', 'Moss', 'Rudell', 'Loyde', 'Glendel', 'Elby', 'Shafter', 'Camila', 'Elaine', 'Scarlett', 'Gertrude', 'Bella', 'Penelope', 'Cathy', 'Lizbeth', 'Arianna', 'Agnes', 'Vicki', 'Mila', 'Ximena', 'Delilah', 'Stella', 'Miranda', 'Valentina', 'Rosemary', 'Khloe', 'Heidi', 'Desiree', 'Violet', 'Gianna', 'Nayeli', 'Luna', 'Doreen', 'Jennie', 'Roberta', 'Sheri', 'Jeanne', 'Alina', 'Celeste', 'Rosalie', 'Naomi', 'Teri', 'Maryann', 'Glenda', 'Lynda', 'Annabelle', 'Antoinette', 'Stephani', 'Marcia', 'Sherri', 'Clara', 'Julissa', 'Becky', 'Marianne', 'Melody', 'Sadie', 'Sienna', 'Marsha', 'Belinda', 'Jaylah', 'Harriet', 'Kristine', 'Elizabet', 'Paisley', 'Genevieve', 'Melinda', 'Leilani', 'Aubree', 'Keira', 'Kristy', 'Sheryl', 'Fernanda', 'Tami', 'Daleyza', 'Rosemarie', 'Francine', 'Kristi', 'Jaqueline', 'Meagan', 'Nichole', 'Athena', 'Anahi', 'Marisa', 'Yaretzi', 'Lena', 'Serena', 'Miley', 'Izabella', 'Kate', 'Joselyn', 'Margie', 'Krystle', 'Dulce', 'Pam', 'Traci', 'Mikayla', 'Shari', 'Delores', 'Nellie', 'Gisselle', 'Blanche', 'Clarissa', 'Dianne', 'Maxine', 'Janis', 'Carmela', 'Mabel', 'Estrella', 'Emely', 'Viola', 'Penny', 'Viviana', 'Estelle', 'Krista', 'Adalynn', 'Julianna', 'Danna', 'Marina', 'Sheena', 'Shawna', 'Mya', 'Leona', 'Leila', 'Isla', 'Charlene', 'Mindy', 'Bernadette', 'Audrina', 'Tricia', 'Adele', 'Myrtle', 'Nataly', 'Kimberley', 'Gwendolyn', 'Emilia', 'Janine', 'Paulina', 'Stefanie', 'Marguerite', 'Dayanara', 'Katina', 'Brielle', 'Vera', 'Jimena', 'Aileen', 'Bethany', 'America', 'Kellie', 'Shanice', 'Roxanne', 'Darla', 'Mamie', 'Jocelyne', 'Katherin', 'Lyla', 'Sonya', 'Allyson', 'Debora', 'Chaya', 'Jaslene', 'Malia', 'Daniella', 'Alessandra', 'Aimee', 'Dina', 'Arabella', 'Juliet', 'Laila', 'Rhoda', 'Angie', 'Everly', 'Adrianna', 'Shelia', 'Jana', 'Analia', 'Kamila', 'Rebekah', 'Myrna', 'Concetta', 'Amaya', 'Juliette', 'Litzy', 'Marely', 'Londyn', 'Patti', 'Adalyn', 'Marla', 'Tammie', 'Cora', 'Angelique', 'Fiona', 'Kari', 'Jaylene', 'Lucile', 'Rubi', 'Vivienne', 'Hattie', 'Noemi', 'Celina', 'Dena', 'Sherlyn', 'Selina', 'Bonita', 'Paulette', 'Aisha', 'Susie', 'Adeline', 'Elsa', 'Shania', 'Yasmin', 'Dalia', 'Jacquelyn', 'Thalia', 'Trina', 'Allisson', 'Chana', 'Olive', 'Helene', 'Nelda', 'Mireya', 'Chelsey', 'Cheri', 'Kira', 'Karissa', 'Lynette', 'Deneen', 'Ivette', 'Roslyn', 'Kinley', 'Rosalinda', 'Lila', 'Kaylie', 'Dayana', 'Melany', 'Carissa', 'Aniyah', 'Kyla', 'Yulissa', 'Trisha', 'Camilla', 'Ansley', 'Sarai', 'Lola', 'Arline', 'Lara', 'Stacie', 'Annika', 'Christi', 'Brisa', 'Gia', 'Therese', 'Abril', 'Angeline', 'Isabela', 'Marcella', 'Shanna', 'Stephany', 'Henrietta', 'Tasha', 'Brianne', 'Rosanne', 'Luann', 'Frieda', 'Renata', 'Dianna', 'Celia', 'Sondra', 'Aylin', 'Melba', 'Catina', 'Alayna', 'Mollie', 'Nathalie', 'Tabitha', 'Tracie', 'Scarlet', 'Jayne', 'Rachelle', 'Jeannette', 'Addyson', 'Cecelia', 'Annabella', 'Dahlia', 'Dorothea', 'Annmarie', 'Marlys', 'Deirdre', 'Evangeline', 'Melina', 'Erma', 'Jeanine', 'Roxana', 'Yaritza', 'Montserrat', 'Lizzie', 'Kerri', 'Yoselin', 'Migdalia', 'Rivka', 'Cathleen', 'Lorene', 'Yareli', 'Bette', 'Kyra', 'Janette', 'Beulah', 'Danica', 'Arely', 'Lexi', 'Shana', 'Sherrie', 'Alexus', 'Mable', 'Citlalli', 'Nadine', 'Shauna', 'Ryleigh', 'Jeri', 'Phoebe', 'Jazlyn', 'Noreen', 'Keisha', 'Lora', 'Brynlee', 'Alivia', 'Lottie', 'Monserrat', 'Giuliana', 'Adelyn', 'Deana', 'Jacqueli', 'Makenna', 'Jeannie', 'Noelle', 'Imogene', 'Daphne', 'Reyna', 'Katelynn', 'Bettie', 'Carmella', 'Estefania', 'Cassandr', 'Betsy', 'Brianda', 'Iliana', 'Bryanna', 'Aranza', 'Rihanna', 'Anissa', 'Alisa', 'Azul', 'Milagros', 'Gemma', 'Freda', 'Ada', 'Bettye', 'Nia', 'Oralia', 'Alaina', 'Anabelle', 'Destinee', 'Sallie', 'Sonja', 'Willow', 'Staci', 'Lia', 'Breana', 'Eliza', 'Mikaela', 'Mona', 'Cataleya', 'Jeannine', 'Lilah', 'Anabel', 'Ashlynn', 'Aleena', 'Estella', 'Ayla', 'Adelaide', 'Lilliana', 'Kristie', 'Nettie', 'Cherie', 'May', 'Myra', 'Nicolette', 'Lissette', 'Siena', 'Ivanna', 'Christa', 'Caylee', 'Roseann', 'Anastasia', 'Karin', 'Corinne', 'Ginger', 'Flora', 'Bria', 'Gretchen', 'Maryellen', 'Lana', 'Harmony', 'Elvira', 'Ilene', 'Iesha', 'Celine', 'Faye', 'Khadijah', 'Elyse', 'Joana', 'Sharyn', 'Leia', 'Catherin', 'Corina', 'Sheree', 'Salma', 'Deja', 'Liz', 'Aracely', 'Roselyn', 'Samara', 'Lorrie', 'Frida', 'Tessie', 'Talia', 'Rosalind', 'Jailene', 'Lisette', 'Raelynn', 'Yetta', 'Catharine', 'Adelynn', 'Odalys', 'Jolene', 'Charity', 'Aniya', 'Sanjuanita', 'Norah', 'Terrie', 'Yuliana', 'Lorie', 'Yazmin', 'Eleanore', 'Anika', 'Elida', 'Valery', 'Matilda', 'Nannie', 'Eloise', 'Gillian', 'Tatyana', 'Kimora', 'Brynn', 'Maliyah', 'Madilyn', 'Jenifer', 'Maddison', 'Colette', 'Nanette', 'Ayleen', 'Winnie', 'Jayda', 'Deloris', 'Tillie', 'Kizzy', 'Galilea', 'Janessa', 'Brenna', 'Amelie', 'Marybeth', 'Lorna', 'Kaia', 'Sarahi', 'Viridiana', 'Rebeca', 'Ericka', 'Mareli', 'Anaya', 'Nathaly', 'Candy', 'Larissa', 'Elle', 'Yasmine', 'Claudine', 'Kyleigh', 'Paloma', 'Lenore', 'Citlali', 'Rosanna', 'Misti', 'Kasandra', 'Zara', 'Isis', 'Alisson', 'Cheyanne', 'Reba', 'Ariella', 'Lavonne', 'Miah', 'Roxanna', 'Anabella', 'Suzette', 'Kiera', 'Gitty', 'Farrah', 'Helena', 'Shaniqua', 'Maryanne', 'Liana', 'Arleen', 'Belle', 'Katy', 'Anya', 'Selene', 'Maura', 'Chantel', 'Keyla', 'Maryjane', 'Tisha', 'Kisha', 'Kaelyn', 'Malka', 'Maci', 'Evelin', 'Julianne', 'Magdalena', 'Kimberlee', 'Ernestine', 'Alyson', 'Kaley', 'Danika', 'Kecia', 'Leanne', 'Tonia', 'Nyla', 'Ivonne', 'Madelynn', 'Ofelia', 'Lakisha', 'Adilene', 'Wendi', 'Susanne', 'Katharine', 'Faigy', 'Raizy', 'Tawny', 'Jackeline', 'Ariadne', 'Giovanna', 'Janiyah', 'Alani', 'Nayely', 'Lilian', 'Saundra', 'Jazlynn', 'Jaelynn', 'Elliana', 'Gayla', 'Deena', 'Earnestine', 'Margo', 'Herlinda', 'Elinor', 'Salina', 'Casandra', 'Nathalia', 'Kaila', 'Deanne', 'Desirae', 'Liza', 'Bobbi', 'Briella', 'Gilda', 'Averie', 'Charlize', 'Azalea', 'Sanjuana', 'Yajaira', 'Brandie', 'Aleah', 'Della', 'Elaina', 'Yahaira', 'Aja', 'Bernadine', 'Lela', 'Annabel', 'Xiomara', 'Kassidy', 'Nohely', 'Aubrie', 'Angelia', 'Macie', 'Shelbi', 'Chelsie', 'Lilyana', 'Jazlene', 'Amina', 'Dorthy', 'Noelia', 'Addisyn', 'Dalilah', 'Clarisa', 'Chrystal', 'Oleta', 'Georgina', 'Adelina', 'Edythe', 'Lucinda', 'Jannie', 'Minerva', 'Kelsie', 'Madisyn', 'Aida', 'Katlyn', 'Julieta', 'Violeta', 'Heidy', 'Lea', 'Leola', 'Chasity', 'Nell', 'Felicity', 'Kathi', 'Karyn', 'Hana', 'Micaela', 'Chandra', 'Liberty', 'Cielo', 'Tameka', 'Maude', 'Malky', 'Coraima', 'Haylie', 'Vanesa', 'Sloane', 'Karyme', 'Evelynn', 'Batsheva', 'Nallely', 'Tamra', 'Maricruz', 'Paislee', 'Kynlee', 'Marcela', 'Marci', 'Vonda', 'Cinthia', 'Amiyah', 'Breanne', 'Lisbeth', 'Leanna', 'Anais', 'Flor', 'Annemarie', 'Amie', 'Estela', 'Tammi', 'Rhiannon', 'Denisse', 'Leyla', 'Iridian', 'Dariana', 'Romina', 'Yamileth', 'Lidia', 'Sybil', 'Elvia', 'Debby', 'Philomena', 'Jacklyn', 'Charlee', 'Kathie', 'Aryanna', 'Katarina', 'Elianna', 'Zariah', 'Andreina', 'Filomena', 'Xochitl', 'Mariam', 'Myla', 'Janiya', 'Kristal', 'Estefany', 'Debi', 'Miracle', 'Shaindy', 'Evangelina', 'Naya', 'Maeve', 'Judi', 'Effie', 'Lilia', 'Dayami', 'Kierra', 'Vincenza', 'Cari', 'Lauri', 'Bethzy', 'Trudy', 'Deidre', 'Melisa', 'Luciana', 'Chantal', 'Laisha', 'Kennedi', 'Ayanna', 'Madalyn', 'Dania', 'Jaliyah', 'Madilynn', 'Citlaly', 'Lolita', 'Drema', 'Iva', 'Kailee', 'Grecia', 'Kailyn', 'Ladonna', 'Latanya', 'Maia', 'Jaquelin', 'Alanna', 'Etta', 'Marlee', 'Reina', 'Aiyana', 'Carolann', 'Gizelle', 'Greta', 'Lynnette', 'Cecile', 'Shayna', 'Savanah', 'Annalise', 'Nylah', 'Lesa', 'Jolie', 'Arleth', 'Laraine', 'Selah', 'Alysha', 'Bridgette', 'Madyson', 'Marylou', 'Adela', 'Shaina', 'Trista', 'Katia', 'Kayleen', 'Lilianna', 'Tamera', 'Millicent', 'Eugenia', 'Myrtice', 'Baila', 'Charmaine', 'Maegan', 'Ruthie', 'Jovanna', 'Julisa', 'Mayte', 'Latrice', 'Priscila', 'Glenna', 'Yitty', 'Tawana', 'Yessica', 'Ina', 'Brittni', 'Johana', 'Tess', 'Caryn', 'Natalee', 'Barb', 'Journee', 'Malaysia', 'Yulisa', 'Alta', 'Shaila', 'Maurine', 'Amira', 'Tiffani', 'Danette', 'Fanny', 'Justina', 'Leann', 'Dafne', 'Ima', 'Azucena', 'Braylee', 'Amaris', 'Bailee', 'Giana', 'Josette', 'Raegan', 'Gena', 'Luella', 'Nita', 'Laney', 'Gisela', 'Alexandrea', 'Rosalia', 'Odessa', 'Laci', 'Yamilex', 'Tamia', 'Astrid', 'Luanne', 'Gwen', 'Tabatha', 'Rivky', 'Laureen', 'Zina', 'Amara', 'Itzayana', 'Adamaris', 'Laylah', 'Luisa', 'Georgette', 'Joselin', 'Yamilet', 'Nilda', 'Luisana', 'Coleen', 'Cecily', 'Jocelynn', 'Mirella', 'Jessika', 'Moriah', 'Halle', 'Caren', 'Earline', 'Shantel', 'Aliana', 'Keila', 'Maryam', 'Marianna', 'Magaly', 'Sariah', 'Marnie', 'Kiersten', 'Janeth', 'Lyndsey', 'Shelli', 'Jaylee', 'Ashlie', 'Tianna', 'Bree', 'Isela', 'Krystina', 'Yaretzy', 'Evelina', 'Sarina', 'Tyra', 'Eloisa', 'Maite', 'Leilah', 'Marcie', 'Imelda', 'Alena', 'Juniper', 'Shelbie', 'Shakira', 'Ember', 'Emmalyn', 'Elissa', 'Skyla', 'Lylah', 'Xitlali', 'Gisele', 'Polly', 'Ernestina', 'Sandi', 'Emmy', 'Josefa', 'Magali', 'Ashely', 'Eve', 'Jayde', 'Rosella', 'Yuridia', 'Sheyla', 'Raelyn', 'Domenica', 'Valarie', 'Herminia', 'Katalina', 'Shaquana', 'Nelly', 'Rosalyn', 'Denice', 'Saanvi', 'Cambria', 'Joseline', 'Tomasa', 'Milana', 'Harriett', 'Devorah', 'Jackelyn', 'Jacquelin', 'Yadhira', 'Antonella', 'Shreya', 'Janay', 'Betzy', 'Kaiya', 'Terra', 'Roseanne', 'Karime', 'Lina', 'Macey', 'Vilma', 'Shaniya', 'Deyanira', 'Cindi', 'Mandi', 'Sanaa', 'Lakesha', 'Essence', 'Faviola', 'Brinley', 'Kirstie', 'Brissa', 'Alia', 'Janney', 'Kaylynn', 'Kamilah', 'Kianna', 'Adrianne', 'Yasmeen', 'Jerri', 'Anayeli', 'Ambar', 'Lorri', 'Hailie', 'Demetria', 'Awilda', 'Isabell', 'Leonor', 'Florine', 'Tennille', 'Deann', 'Nyah', 'Jolette', 'Xitlaly', 'Vienna', 'Lenora', 'Keily', 'Syble', 'Ciera', 'Milania', 'Lainey', 'Nyasia', 'Carley', 'Kelsi', 'Blossom', 'Maranda', 'Ally', 'Serina', 'Charli', 'Taraji', 'Jena', 'Natalya', 'Hortencia', 'Ila', 'Kailani', 'Mira', 'Evie', 'Ione', 'Briseyda', 'Aryana', 'Yarely', 'Susanna', 'Amya', 'Kaleigh', 'Qiana', 'Juli', 'Mckayla', 'Suzan', 'Fallon', 'Jacalyn', 'Ileana', 'Yesica', 'Willa', 'Fatoumata', 'Arly', 'Jakayla', 'Chyna', 'Jaida', 'Sunshine', 'Beyonce', 'Lawanda', 'Flossie', 'Lupita', 'Demi', 'Keely', 'Aliya', 'Jeanie', 'Tamiko', 'Gigi', 'Brissia', 'Mariel', 'Lluvia', 'Jasleen', 'Lizet', 'Brittanie', 'Kaci', 'Alycia', 'Madalynn', 'Milena', 'Coraline', 'Kaela', 'Soraya', 'Mozelle', 'Jessenia', 'Wilhelmina', 'Jazmyn', 'Stefani', 'Natali', 'Christiana', 'Ivana', 'Eiza', 'Zaria', 'Zaira', 'Lorelei', 'Cherry', 'Aline', 'Briseida', 'Siani', 'Yara', 'Rhianna', 'Kalia', 'Destiney', 'Hindy', 'Arlette', 'Shyanne', 'Joceline', 'Janell', 'Vianey', 'Elnora', 'Zoie', 'Elba', 'Jamila', 'Rena', 'Mari', 'Chava', 'Scarlette', 'Shyla', 'Corine', 'Kaliyah', 'Ailyn', 'Liv', 'Freya', 'Diya', 'Myrtis', 'Aliah', 'Margery', 'Gracelyn', 'Shira', 'Riya', 'Breann', 'Siobhan', 'Rochel', 'Tiffanie', 'Mirna', 'Nilsa', 'Tenley', 'Aliza', 'Celena', 'Vianney', 'Janel', 'Toccara', 'Dayna', 'Rona', 'Alba', 'Althea', 'Josselyn', 'Karlie', 'Alyce', 'Erlinda', 'Kadijah', 'Rosalba', 'Tangela', 'Marlena', 'Delois', 'Chastity', 'Coral', 'Braelynn', 'Dalila', 'Rosetta', 'Lu', 'Venessa', 'Kayley', 'Barbra', 'Jesica', 'Dona', 'Mitzi', 'Catrina', 'Gracelynn', 'Ophelia', 'Ayana', 'Mara', 'Calista', 'Adyson', 'Marilynn', 'Tomeka', 'Britni', 'Whitley', 'Karly', 'Verenice', 'Raylee', 'Dayanna', 'Shonda', 'Felecia', 'Betzaida', 'Kaylani', 'Shaylee', 'Jazzlyn', 'Giavanna', 'Vivianna', 'Jesusa', 'Lashonda', 'Maile', 'Suzy', 'Vania', 'Giada', 'Maisie', 'Venus', 'Emerald', 'Wilda', 'Saniya', 'Naydelin', 'Enid', 'Leilany', 'Jesenia', 'Maliah', 'Dortha', 'Dalary', 'Chany', 'Amia', 'Amalia', 'Khaleesi', 'Taina', 'Abbey', 'Dollie', 'Joslyn', 'Sommer', 'Lilibeth', 'Charleigh', 'Sydell', 'Shoshana', 'Nechama', 'Jamya', 'Jeanmarie', 'Albertha', 'Akeelah', 'Aanya', 'Destini', 'Kacie', 'Maleah', 'Cayla', 'Bryana', 'Zelma', 'Anjanette', 'Kaylah', 'Tonja', 'Amairani', 'Karli', 'Elina', 'Aurelia', 'Judie', 'Letha', 'Brittnee', 'Yanira', 'Ariza', 'Kataleya', 'Berta', 'Soleil', 'Marleen', 'Desteny', 'Gissel', 'Suri', 'Anjelica', 'Lilith', 'Breeanna', 'Krysta', 'Alysia', 'Chrissy', 'Lailah', 'Cathryn', 'Dawna', 'Myah', 'Lelia', 'Aviana', 'Xena', 'Pansy', 'Jazleen', 'Kaylyn', 'Mariann', 'Celene', 'Berniece', 'Anjali', 'Benita', 'Reanna', 'Sydnee', 'Taliyah', 'Raylene', 'Kristyn', 'Latonia', 'Pa', 'Nola', 'Lyanne', 'Danae', 'Sharla', 'Chanelle', 'Aleyda', 'Deb', 'Sofie', 'Shameka', 'Emelia', 'Miya', 'Latricia', 'Claribel', 'Lacie', 'Taisha', 'Queen', 'Breeana', 'Ilana', 'Erna', 'Neha', 'Melodie', 'Ariah', 'Ursula', 'Janna', 'Cienna', 'Maryjo', 'Vannessa', 'Saniyah', 'Mariajose', 'Malaya', 'Abbigail', 'Elin', 'Emi', 'Shanaya', 'Zahra', 'Lorine', 'Karrie', 'Johnna', 'Marni', 'Karis', 'Shelba', 'Omayra', 'Claudette', 'Anitra', 'Jenelle', 'Zelda', 'Alyse', 'Alethea', 'Jannet', 'Myranda', 'Corinna', 'Pattie', 'Jemma', 'Avah', 'Joycelyn', 'Loriann', 'Kirstin', 'Davina', 'Clementine', 'Arantza', 'Esme', 'Vida', 'Samira', 'Alysa', 'Ananya', 'Cherish', 'Jocelin', 'Renae', 'Jalisa', 'Elease', 'Salena', 'Zhane', 'Zulema', 'Rubye', 'Amerie', 'Leatrice', 'Geralyn', 'Brigitte', 'Sibyl', 'Corrina', 'Phylicia', 'Karlee', 'Kerrie', 'Addilyn', 'Alayah', 'Jacquely', 'Mirian', 'Jovana', 'Katelin', 'Marielena', 'Libby', 'Aditi', 'Nalani', 'Lilyanna', 'Mylee', 'Goldy', 'Melia', 'Audriana', 'Lillyana', 'Enriqueta', 'Tasia', 'Debbi', 'Ani', 'Elyssa', 'Yamile', 'Bridgett', 'Taniya', 'Britany', 'Latosha', 'Shanda', 'Estephanie', 'Maudie', 'Mariyah', 'Tana', 'Neva', 'Kalea', 'Oma', 'Jazelle', 'Neveah', 'Leonora', 'Miesha', 'Corrine', 'Jordynn', 'Cornelia', 'Ronni', 'Malinda', 'Janeen', 'Neriah', 'Brigette', 'Windy', 'Cassondra', 'Klarissa', 'Lizzette', 'Tanika', 'Izamar', 'Tera', 'Arianny', 'Florene', 'Evalyn', 'Poppy', 'Deisy', 'Jannette', 'Thania', 'Kelsea', 'Taniyah', 'Geri', 'Allyssa', 'Zariyah', 'Averi', 'Leeann', 'Kallie', 'Loni', 'Bryleigh', 'Rosina', 'Carlee', 'Preslee', 'Alexsandra', 'Adamari', 'Saray', 'Yaneli', 'Raina', 'Lianna', 'Keilani', 'Tamela', 'Ninfa', 'Ireland', 'Shante', 'Racheal', 'Zainab', 'Blima', 'Yocheved', 'Gema', 'Sayra', 'Aretha', 'Nya', 'Criselda', 'Anai', 'Bracha', 'Amirah', 'Sury', 'Twila', 'Arissa', 'Livia', 'Jacquline', 'Chiara', 'Anneliese', 'Quiana', 'Monika', 'Charisse', 'Emerie', 'Rosalva', 'Halie', 'Jenesis', 'Zaylee', 'Pricilla', 'Ouida', 'Felipa', 'Latifah', 'Kalley', 'Clarice', 'Nona', 'Jaunita', 'Hermelinda', 'Analy', 'Jizelle', 'Theda', 'Yoselyn', 'Dottie', 'Brittaney', 'Meghann', 'Azeneth', 'Richelle', 'Peggie', 'Brittny', 'Jaci', 'Marietta', 'Gissell', 'Evolet', 'Abbygail', 'Naima', 'Noelani', 'Jaslyn', 'Katheryn', 'Ruthann', 'Shelva', 'Ashli', 'Alianna', 'Felicitas', 'Delfina', 'Rayna', 'Christal', 'Leta', 'Tawnya', 'Zaniyah', 'Cathie', 'Antonette', 'Bethann', 'Nannette', 'Vita', 'Santa', 'Dejah', 'Patience', 'Alessia', 'Ahuva', 'Karely', 'Anette', 'Alfreda', 'Cyndi', 'Cami', 'Shirlee', 'Roxann', 'Alvina', 'Sima', 'Star', 'Tatianna', 'Krissy', 'Dreama', 'Diann', 'Birdie', 'Yoshiko', 'Violette', 'Mylah', 'Rosita', 'Eartha', 'Miabella', 'Shanika', 'Gricel', 'Ariyah', 'Emmalee', 'Nidia', 'Gladis', 'Roxie', 'Zoraida', 'Kandace', 'Annamarie', 'Alannah', 'Abrielle', 'Mercy', 'Lesli', 'Sydni', 'Kathrine', 'Jiselle', 'Anisa', 'Felisha', 'Kayli', 'Nanci', 'Ria', 'Cailyn', 'Melani', 'Alyna', 'Bambi', 'Avril', 'Amberly', 'Towanda', 'Malissa', 'Kaleena', 'Kinsey', 'Andria', 'Emogene', 'Milani', 'Milah', 'Hadassah', 'Avianna', 'Aubri', 'Pessy', 'Dori', 'Tea', 'Keshia', 'Adina', 'Esha', 'Magnolia', 'Moesha', 'Elana', 'Vikki', 'Lakendra', 'Ilse', 'Sydnie', 'Laquita', 'Hortense', 'Elouise', 'Tarah', 'Shamika', 'Genoveva', 'Margot', 'Aubrielle', 'Aya', 'Aleta', 'Shantell', 'Angelle', 'Lakeshia', 'Leota', 'Stormie', 'Caryl', 'Cristy', 'Sydelle', 'Analisa', 'Earlene', 'Syreeta', 'Paityn', 'Citlally', 'Nikole', 'Leandra', 'Elda', 'Lizbet', 'Blimy', 'Lorelai', 'Gittel', 'Jasmyn', 'Verania', 'Zoya', 'Anyssa', 'Jeniffer', 'Dorene', 'Makaila', 'Earlean', 'Ysabella', 'Brandee', 'Nailea', 'Stefany', 'Amiya', 'Carolee', 'Kassie', 'Theodora', 'Merissa', 'Skylah', 'Alesia', 'Leela', 'Madge', 'Shanta', 'Soledad', 'Sharonda', 'Thea', 'Capri', 'Amparo', 'Concha', 'Karolina', 'Keitha', 'Harriette', 'Evette', 'Mylie', 'Isha', 'Suzie', 'Carlene', 'Brunilda', 'Annamae', 'Ariadna', 'Sanai', 'Gisell', 'Danelle', 'Dovie', 'Lani', 'Shavonne', 'Janiah', 'Kora', 'Jessa', 'Melva', 'Yehudis', 'Analee', 'Enedina', 'Oaklee', 'Aubrianna', 'Velia', 'Zooey', 'Dolly', 'Shanae', 'Lyndsay', 'Allene', 'Kamya', 'Tedra', 'Yecenia', 'Nyree', 'Shyann', 'Kandice', 'Edwina', 'Aiyanna', 'Carli', 'Sariyah', 'Gwyneth', 'Roseanna', 'Charla', 'Nereyda', 'Yides', 'Helaine', 'Evita', 'Alanis', 'Starr', 'Rosalee', 'Yaire', 'Risa', 'Kristel', 'Greidys', 'Lillianna', 'Khushi', 'Triniti', 'Lilyan', 'Myesha', 'Kala', 'Moira', 'Neida', 'Gisel', 'Myriam', 'Anali', 'Izabel', 'Savana', 'Sanjana', 'Willodean', 'Briza', 'Lyra', 'Merry', 'Cheryle', 'Porsha', 'Kaili', 'Buffy', 'Deidra', 'Everleigh', 'Gardenia', 'Italia', 'Novella', 'Sahara', 'Sirena', 'Elide', 'Madisen', 'Katerina', 'Ashlea', 'Rianna', 'Samatha', 'Diandra', 'Shanell', 'Annalee', 'Samiyah', 'Joselyne', 'Maylin', 'Jazmyne', 'Terese', 'Nydia', 'Stasia', 'Saira', 'Carlota', 'Kathia', 'Katya', 'Elodie', 'Priya', 'Malena', 'Aadhya', 'Meera', 'Tayla', 'Jovita', 'Rafaela', 'Faiga', 'Jaquelyn', 'Elisheva', 'Debbra', 'Melyssa', 'Chelsi', 'Gricelda', 'Tawanda', 'Sharlene', 'Mellissa', 'Alene', 'Amayah', 'Nicolle', 'Yanet', 'Zissy', 'Candi', 'Hedwig', 'Leyna', 'Nichol', 'Reva', 'Fraidy', 'Esty', 'Kaily', 'Mimi', 'Shani', 'Hadlee', 'Naomy', 'Kinslee', 'Emmalynn', 'Alverta', 'Anushka', 'Tinsley', 'Armida', 'Cleta', 'Analise', 'Ahtziri', 'Anakaren', 'Tracee', 'Glynda', 'Kaelynn', 'Carie', 'Avalon', 'Eboni', 'Shameeka', 'Letitia', 'Enola', 'Rasheeda', 'Taylee', 'Jerrica', 'Janely', 'Taya', 'Xochilt', 'Rosana', 'Doretha', 'Henny', 'Shaniece', 'Charleen', 'Abigale', 'Marylyn', 'Retha', 'Keren', 'Elly', 'Ailani', 'Aarna', 'Starla', 'Maren', 'Nan', 'Marivel', 'Georgianna', 'Era', 'Kirra', 'Maisha', 'Caydence', 'Dinah', 'Noemy', 'Tamatha', 'Madonna', 'Kristan', 'Keana', 'Kloe', 'Maribeth', 'Sana', 'Korina', 'Irania', 'Izabelle', 'Roxy', 'Mariaguadalupe', 'Sulema', 'Vivien', 'Tatia', 'Holli', 'Debrah', 'Kattie', 'Kaidence', 'Cathey', 'Anniston', 'Refugia', 'Renita', 'Aubriella', 'Kaleah', 'Zuleyka', 'Sherie', 'Tomika', 'Charisma', 'Caridad', 'Kailynn', 'Gertie', 'Jaslynn', 'Agatha', 'Avani', 'Hennessy', 'Pamala', 'Malak', 'Raizel', 'Kami', 'Rosalina', 'Ferne', 'Cloe', 'Jeryl', 'Louann', 'Jacie', 'Tais', 'Johnsie', 'Brittnie', 'Collette', 'Lettie', 'Jeanna', 'Kyara', 'Renada', 'Abrianna', 'Nayelli', 'Alda', 'Yuna', 'Cristi', 'Yazmine', 'Marlie', 'Milly', 'Anastacia', 'Daria', 'Caitlynn', 'Shriya', 'Vianca', 'Sayuri', 'Dennise', 'Aleyna', 'Jenni', 'Tanesha', 'Suzanna', 'Zaniya', 'Kesha', 'Edie', 'Ansleigh', 'Emmie', 'Marjory', 'Lanette', 'Babette', 'Alaya', 'Palma', 'Tamie', 'Nelle', 'Haydee', 'Zeinab', 'Stephania', 'Biridiana', 'Yoshie', 'Mayme', 'Michaele', 'Marimar', 'Winona', 'Christene', 'Meadow', 'Ariya', 'Daleysa', 'Thuy', 'Nautica', 'Hadleigh', 'Aliyana', 'Annabell', 'Stacia', 'Leonore', 'Albina', 'Daira', 'Rhona', 'Lisbet', 'Alizae', 'Aminata', 'Samanta', 'Jerilyn', 'Darci', 'Sudie', 'Kynleigh', 'Marva', 'Karie', 'Marbella', 'Franchesca', 'Kylah', 'Lillyanna', 'Melony', 'Abygail', 'Yulianna', 'Sahana', 'Velvet', 'Michelina', 'Treva', 'Iona', 'Adilynn', 'Milla', 'Teressa', 'Coretta', 'Venita', 'Evalynn', 'Chynna', 'Janett', 'Nohemi', 'Symone', 'Kaycee', 'Racquel', 'Jerica', 'Chanda', 'Vannesa', 'Deasia', 'Alanah', 'Dasha', 'Dian', 'Iyana', 'Katlin', 'Shizue', 'Mitsuko', 'Shara', 'Shanelle', 'Sinead', 'Jacinda', 'Alecia', 'Tanvi', 'Genese', 'Crissy', 'Niki', 'Shanequa', 'Trish', 'Shalonda', 'Darleen', 'Magda', 'Annalisa', 'Lashanda', 'Carin', 'Nahomi', 'Londynn', 'Alaysia', 'Annaliese', 'Valorie', 'Naidelyn', 'Abbe', 'Karley', 'Cinda', 'Marilu', 'Azaria', 'Kitty', 'Mechelle', 'Jazzmin', 'Malina', 'Cianna', 'Leesa', 'Nahla', 'Dorotha', 'Jaeda', 'Tinley', 'Kelis', 'Ayesha', 'Cinthya', 'Shawnte', 'Fawn', 'Calleigh', 'Mittie', 'Aide', 'Lisset', 'Tyesha', 'Devora', 'Analeigh', 'Anahy', 'Donnamarie', 'Nala', 'Haruko', 'Lesia', 'Aideliz', 'Emme', 'Mitsue', 'Jamiya', 'Joleen', 'Missy', 'Shawanda', 'Chastelyn', 'Jaleah', 'Eulalia', 'Elvera', 'Kalina', 'Adrina', 'Nicolasa', 'Belia', 'Elodia', 'Kazuko', 'Ixchel', 'Leena', 'Yoseline', 'Yocelin', 'Jamiyah', 'Mariama', 'Audrianna', 'Dasia', 'Ieshia', 'Malorie', 'Toniann', 'Genessis', 'Makeda', 'Cherise', 'Tarsha', 'Karri', 'Romayne', 'Beronica', 'Nubia', 'Shasta', 'Cristin', 'Cristine', 'Eryn', 'Jazzmine', 'Alyssia', 'Verona', 'Divya', 'Beatrix', 'Chiyoko', 'Destinie', 'Hali', 'Myisha', 'Sabina', 'Chante', 'Brea', 'Aundrea', 'Harmoni', 'Iyanna', 'Rosaria', 'Hettie', 'Bronte', 'Constanza', 'Heavenly', 'Georgiana', 'Coco', 'Eleni', 'Brylie', 'Ajee', 'Jerrie', 'Zella', 'Xenia', 'Djuana', 'Bianka', 'Lizett', 'Destany', 'Bettina', 'Pennie', 'Ciji', 'Ciani', 'Tosha', 'Roxane', 'Tenisha', 'Pepper', 'Ayva', 'Dynasty', 'Krysten', 'Maud', 'Janene', 'Yomaira', 'Kizzie', 'Oriana', 'Antionette', 'Kamille', 'Candis', 'Kimberlie', 'Britta', 'Malika', 'Khalilah', 'Louisa', 'Maiya', 'Shanay', 'Kellye', 'Gaye', 'Rosangelica', 'Breonna', 'Jenae', 'Kaylene', 'Rileigh', 'Linnea', 'Tawanna', 'Harleen', 'Tamya', 'Makaylah', 'Annabeth', 'Alysson', 'Adella', 'Adalee', 'Karisa', 'Rosangela', 'Ema', 'Dayra', 'Tena', 'Mathilda', 'Magan', 'Dayanira', 'Annelise', 'Takisha', 'Rosamaria', 'Shifra', 'Vianna', 'Daysi', 'Jalissa', 'Samaya', 'Aubriana', 'Alora', 'Emmeline', 'Elora', 'Laylani', 'Willene', 'Cathrine', 'Ginny', 'Lashunda', 'Mikalah', 'Kiyoko', 'Wynter', 'Zuleima', 'Alease', 'Louella', 'Jubilee', 'Allegra', 'Karmen', 'Emiliana', 'Jianna', 'Eisley', 'Emmaline', 'Teresita', 'Mackenna', 'Lauretta', 'Krystin', 'Kalene', 'Aviva', 'Zena', 'Shanique', 'Glynis', 'Toya', 'Linsey', 'Denisha', 'Marysol', 'Marcelina', 'Makiyah', 'Masako', 'Cintia', 'Sharen', 'Lahoma', 'Magen', 'Alyvia', 'Shaniyah', 'Anamaria', 'Shivani', 'Hannia', 'Chavy', 'Hayleigh', 'Jaycie', 'Mayah', 'Delila', 'Danita', 'Modesta', 'Arcelia', 'Deedee', 'Monserrath', 'Angelie', 'Mellisa', 'Leisa', 'Melannie', 'Mafalda', 'Kinlee', 'Annetta', 'Freida', 'Anisha', 'Mayrin', 'Dajah', 'Delylah', 'Hortensia', 'Joretta', 'Lexy', 'Laysha', 'Anessa', 'Jesusita', 'Pearline', 'Caleigh', 'Liset', 'Leilene', 'Jaya', 'Haily', 'Tatyanna', 'Desire', 'Lisha', 'Mindi', 'Ivelisse', 'Amariah', 'Blythe', 'Treasure', 'Latarsha', 'Emelda', 'Latavia', 'Debanhi', 'Brynleigh', 'Gala', 'Jurnee', 'Joslynn', 'Harleigh', 'Trang', 'Audree', 'Brande', 'Genea', 'Carri', 'Kandy', 'Kenisha', 'Georgene', 'Kamora', 'Anabell', 'Meranda', 'Renesmee', 'Rosaura', 'Linette', 'Rosamond', 'Candida', 'Crista', 'Keeley', 'Mykayla', 'Rina', 'Jonna', 'Lorinda', 'Wynona', 'Kylene', 'Kellee', 'Elayne', 'Chela', 'Zykeria', 'Shawnna', 'Jaimee', 'Zuleyma', 'Britnee', 'Mikala', 'Coletta', 'Morelia', 'Isadora', 'Anayah', 'Amiah', 'Ailin', 'Jordana', 'Casie', 'Shakia', 'Cordelia', 'Analeah', 'Janelly', 'Adelita', 'Yoana', 'Lizabeth', 'Latoria', 'Pricila', 'Margaretta', 'Fumiko', 'Lura', 'Toshiko', 'Marge', 'Luana', 'Marilee', 'Jeana', 'Tallulah', 'Zia', 'Betsabe', 'Delanie', 'Jenicka', 'Kensington', 'Navya', 'Golda', 'Kambree', 'Orpha', 'Rayleigh', 'Kinleigh', 'Karleigh', 'Avalynn', 'Addilynn', 'Cambree', 'Brinlee', 'Liba', 'Zendaya', 'Farah', 'Oumou', 'Aislinn', 'Karena', 'Erendira', 'Mariaelena', 'Temperance', 'Angelic', 'Khadija', 'Jonelle', 'Aniah', 'Aleigha', 'Samaria', 'Dedra', 'Sammantha', 'Bernardine', 'Leilanie', 'Makaela', 'Samiya', 'Porsche', 'Krystel', 'Simona', 'Catarina', 'Joi', 'Etty', 'Jannat', 'Rubie', 'Waneta', 'Shaquita', 'Shaindel', 'Alida', 'January', 'Riana', 'Jamilet', 'Jala', 'Gearldine', 'Iola', 'Tiesha', 'Ariyana', 'Josslyn', 'Verla', 'Gerri', 'Emili', 'Jennyfer', 'Halo', 'Raya', 'Asusena', 'Jessalyn', 'Anaiah', 'Sabine', 'Dorinda', 'Andriana', 'Charissa', 'Cambrie', 'Daija', 'Danyelle', 'Maricarmen', 'Melania', 'Glinda', 'Jaretzy', 'Keesha', 'Lucie', 'Persephone', 'Veda', 'Avalyn', 'Odilia', 'Teena', 'Daisha', 'Shianne', 'Nadya', 'Peighton', 'Shawana', 'Lateefah', 'Geena', 'Aixa', 'Magdalene', 'Estefana', 'China', 'Tamekia', 'Audrie', 'Angely', 'Charline', 'Britny', 'Quanisha', 'Erykah', 'Kenzi', 'Carleigh', 'Kamiyah', 'Zayra', 'Abagail', 'Sulay', 'Shelita', 'Cattleya', 'Ariela', 'Yalitza', 'Marleigh', 'Colbie', 'Lavergne', 'Pyper', 'Tawni', 'Kasie', 'Kati', 'Cinnamon', 'Trana', 'Verda', 'Romana', 'Merrily', 'Landri', 'Bruchy', 'Irlanda', 'Lanie', 'Kendyl', 'Sanvi', 'Akshara', 'Aneesa', 'Giulia', 'Ruchy', 'Giulianna', 'Zahara', 'Sumaya', 'Guillermina', 'Araseli', 'Jackelin', 'Norine', 'Ariane', 'Naidelin', 'Gwenyth', 'Kya', 'Liyah', 'Danya', 'Sujey', 'Grayce', 'Honey', 'Assunta', 'Aleksandra', 'Almeda', 'Devany', 'Spring', 'Patrica', 'Delisa', 'Fantasia', 'Cydney', 'Laquisha', 'Lynsey', 'Stephenie', 'Cassaundra', 'Elisabet', 'Echo', 'Juliann', 'Micayla', 'Iridiana', 'Antonietta', 'Rosaisela', 'Bayleigh', 'Candelaria', 'Zaida', 'Mercedez', 'Kindra', 'Malayah', 'Stephaine', 'Nayla', 'Tameeka', 'Kiesha', 'Pooja', 'Sahar', 'Paisleigh', 'Kynslee', 'Idella', 'Arelis', 'Shizuko', 'Leslee', 'Acacia', 'Elexis', 'Violetta', 'Sailor', 'Marceline', 'Una', 'Kamilla', 'Aulani', 'Aracelis', 'Kikue', 'Kasi', 'Elwanda', 'Brookelyn', 'Kellyann', 'Shaquanna', 'Marielle', 'Isel', 'Agustina', 'Vergie', 'Arriana', 'Perel', 'Maylee', 'Navy', 'Lanell', 'Rosann', 'Carmelita', 'Deisi', 'Alyza', 'Nailah', 'Somaya', 'Kiarra', 'Tatiyana', 'Nelida', 'Demetra', 'Thais', 'Syriana', 'Nicki', 'Tyanna', 'Idaly', 'Ramonita', 'Zuzanna', 'Aiza', 'Larae', 'Alyanna', 'Aleyah', 'Elayna', 'Blaire', 'Laniyah', 'Rilynn', 'Kandi', 'Sherryl', 'Marti', 'Cherri', 'Kimberli', 'Carma', 'Trena', 'Darcie', 'Evelyne', 'Allissa', 'Meliza', 'Regine', 'Adalina', 'Siya', 'Seraphina', 'Calliope', 'Jiya', 'Talisa', 'Mistie', 'Ignacia', 'Crysta', 'Lona', 'Voncile', 'Rutha', 'Kamiya', 'Anslee', 'Janya', 'Berenise', 'Sonji', 'Yaeko', 'Nika', 'Queena', 'Yatziri', 'Aiko', 'Lisamarie', 'Evalina', 'Alline', 'Alejandrina', 'Trula', 'Hinda', 'Delinda', 'Brisia', 'Aminah', 'Mariella', 'Nayzeth', 'Sherlin', 'Idalia', 'Madaline', 'Shenika', 'Janaya', 'Fabiana', 'Aleeah', 'Lasonya', 'Jania', 'Breindy', 'Mitzy', 'Yaquelin', 'Tzipora', 'Serene', 'Mikaila', 'Aicha', 'Brucha', 'Myrka', 'Kaaren', 'Meg', 'Lise', 'Suhani', 'Liane', 'Celisse', 'Jasmyne', 'Sharde', 'Dannielle', 'Crystle', 'Jenniffer', 'Shaneka', 'Leslye', 'Hedy', 'Tashina', 'Letisia', 'Carys', 'Antonetta', 'Tamisha', 'Kaniya', 'Darline', 'Alizay', 'Minna', 'Raelene', 'Rebecka', 'Martika', 'Makiya', 'Idalis', 'Keasia', 'Breeann', 'Vlasta', 'Ellianna', 'Caelyn', 'Kaytlin', 'Cathi', 'Jamia', 'Tahnee', 'Zulma', 'Mallorie', 'Katlynn', 'Mahi', 'Carleen', 'Honesty', 'Rasheedah', 'Ronna', 'Lissa', 'Sherika', 'Carolynn', 'Romona', 'Jamesha', 'Shakiyla', 'Mccall', 'Joanie', 'Makala', 'Brionna', 'Shaunna', 'Hawa', 'Marylin', 'Baylie', 'Preslie', 'Aaralyn', 'Pia', 'Beatris', 'Adria', 'Arianne', 'Carmina', 'Sebrina', 'Malani', 'Lovely', 'Jahaira', 'Miyah', 'Sylvie', 'Cassi', 'Kaniyah', 'Cailin', 'Santina', 'Nariah', 'Calandra', 'Georgine', 'Serafina', 'Keyana', 'Amethyst', 'Tehya', 'Avni', 'Alessa', 'Novalee', 'Mayleen', 'Aadya', 'Jacquelynn', 'Izetta', 'Sumiko', 'Irasema', 'Annamaria', 'Niya', 'Latrina', 'Cicely', 'Kristiana', 'Kimiko', 'Keara', 'Mazie', 'Najah', 'Evelia', 'Tiarra', 'Jaela', 'Montine', 'Mandie', 'Lavada', 'Dimple', 'Emiko', 'Yocelyn', 'Issabella', 'Rowena', 'Tanja', 'Velda', 'Chantell', 'Gretel', 'Jacelyn', 'Kambri', 'Zayla', 'Anasofia', 'Atiana', 'Dulcemaria', 'Zulay', 'Tari', 'Sahasra', 'Rayleen', 'Greydis', 'Shiela', 'Florinda', 'Samya', 'Shakima', 'Shakeema', 'Yanely', 'Lavina', 'Azalee', 'Oneta', 'Tammye', 'Kelsy', 'Kalie', 'Keanna', 'Daniya', 'Antonina', 'Katharin', 'Tiare', 'Yorley', 'Jeslyn', 'Emeli', 'Zakia', 'Massiel', 'Latesha', 'Jenessa', 'Jayna', 'Raylynn', 'Ainslee', 'Aralynn', 'Khloee', 'Ily', 'Emeri', 'Jeni', 'Kassi', 'Nakita', 'Lakia', 'Ariyanna', 'Addalyn', 'Keyanna', 'Bibiana', 'Genna', 'Kathya', 'Leana', 'Trane', 'Yomira', 'Brigid', 'Dionna', 'Jerilynn', 'Sarita', 'Altha', 'Laniya', 'Zakiya', 'Akilah', 'Celestina', 'Priyanka', 'Taliah', 'Donya', 'Soila', 'Quetzalli', 'Cristel', 'Naia', 'Kailah', 'Zitlaly', 'Tonda', 'Cate', 'Lizzet', 'Vesta', 'Sherilyn', 'Teruko', 'Aldona', 'Armandina', 'Ciana', 'Amairany', 'Elysia', 'Samarah', 'Janyla', 'Skylee', 'Rolanda', 'Sapphire', 'Setsuko', 'Miyoko', 'Contina', 'Imogen', 'Jailine', 'Vanellope', 'Leora', 'Jennah', 'Perl', 'Analiyah', 'Hellen', 'Tyasia', 'Symphony', 'Amada', 'Otilia', 'Avigail', 'Tzivia', 'Fradel', 'Mariadelcarmen', 'Ilona', 'Dyan', 'Zahraa', 'Patrisia', 'Jersey', 'Lilla', 'Lossie', 'Somer', 'Deserie', 'Jaila', 'Briseis', 'Aniston', 'Idell', 'Raeleigh', 'Gracyn', 'Everlee', 'Laurene', 'Sherita', 'Pinkie', 'Nakisha', 'Olevia', 'Corene', 'Loreen', 'Sandie', 'Keosha', 'Kenleigh', 'Alli', 'Alyana', 'Prisha', 'Brookelynn', 'Thaily', 'Maddie', 'Grettel', 'Kinzley', 'Jailynn', 'Kalli', 'Jazzlynn', 'Klaudia', 'Blanch', 'Mariafernanda', 'Makenzi', 'Shonna', 'Lita', 'Karima', 'Rebeccah', 'Isaura', 'Kalee', 'Jori', 'Allysa', 'Tonisha', 'Neda', 'Jenine', 'Chanell', 'Jamaya', 'Lorrayne', 'Birtha', 'Kanisha', 'Nicollette', 'Desiray', 'Kaity', 'Shamya', 'Kathlene', 'Jann', 'Sari', 'Lucila', 'Tressie', 'Charise', 'Kalista', 'Jamileth', 'Kalena', 'Sakura', 'Blondell', 'Thomasina', 'Aila', 'Mossie', 'Tamala', 'Siri', 'Gertha', 'Reta', 'Easter', 'Tala', 'Vivianne', 'Nila', 'Merida', 'Ahana', 'Lanelle', 'Hilaria', 'Arlys', 'Inell', 'Rylynn', 'Cosette', 'Penne', 'Jenevieve', 'Jenilee', 'Carlotta', 'Ziva', 'Hildegard', 'Aleshia', 'Nedra', 'Madelaine', 'Lisandra', 'Pang', 'Sindy', 'Zenaida', 'Lulu', 'Shanya', 'Shakema', 'Katiria', 'Raffaela', 'Solange', 'Illiana', 'Chelsy', 'Shanee', 'Adriene', 'Tyla', 'Cailey', 'Daijah', 'Melonie', 'Courteney', 'Deysi', 'Makinley', 'Brynna', 'Hildegarde', 'Fiorella', 'Kenadee', 'Ellyn', 'Ebonie', 'Thu', 'Charde', 'Kaytlyn', 'Kenadie', 'Georgeann', 'Analicia', 'Emalee', 'Shatara', 'Lucerito', 'Mckell', 'Atiya', 'Stormi', 'Maleny', 'Nariyah', 'Steffanie', 'Kirstyn', 'Zayda', 'Mariadejesus', 'Deeann', 'Abcde', 'Eleanora', 'Pearle', 'Seana', 'Denine', 'Presleigh', 'Keziah', 'Queenie', 'Henchy', 'Merari', 'Joscelyn', 'Celest', 'Mirel', 'Sania', 'Maryah', 'Angelena', 'Emelyn', 'Gissele', 'Fanta', 'Gaylene', 'Adelaida', 'Madie', 'Maja', 'Nashaly', 'Christel', 'Rachele', 'Raniyah', 'Rashel', 'Kavya', 'Callista', 'Elmira', 'Rifky', 'Syeda', 'Tresa', 'Detra', 'Jarely', 'Prisila', 'Enedelia', 'Trany', 'Lainie', 'Yisel', 'Alynna', 'Allysson', 'Tamica', 'Velva', 'Nancee', 'Breleigh', 'Shanita', 'Orelia', 'Patrici', 'Daja', 'Shardae', 'Abriana', 'Halee', 'Dorcas', 'Kathey', 'Rosia', 'Princesa', 'Lezly', 'Dawnmarie', 'Gaby', 'Ania', 'Denae', 'Jahzara', 'Jaymie', 'Bari', 'Suzann', 'Alnisa', 'Fatimah', 'Zakiyyah', 'Yana', 'Naimah', 'Tyisha', 'Kathaleen', 'Sameerah', 'Chesney', 'Shanteria', 'Pamella', 'Rayven', 'Romelia', 'Lucretia', 'Tova', 'Aura', 'Chelsee', 'Roizy', 'Manha', 'Nisha', 'Tierney', 'Girl', 'Taelor', 'Litzi', 'Sneha', 'Natisha', 'Alliyah', 'Sully', 'Twyla', 'Daisey', 'Sarahy', 'Shemeka', 'Lexis', 'Shalanda', 'Kelcie', 'Natacha', 'Amyah', 'Byanka', 'Kymberly', 'Navil', 'Britani', 'Karolyn', 'Emelie', 'Zana', 'Vernita', 'Leigha', 'Romy', 'Arlet', 'Jazlin', 'Laynie', 'Jesslyn', 'Adilyn', 'Karoline', 'Nyomi', 'Maycee', 'Nicol', 'Daliah', 'Lillyann', 'Shawnda', 'Dede', 'Wiktoria', 'Liah', 'Liya', 'Emmerson', 'Aarohi', 'Aribella', 'Brayleigh', 'Sumie', 'Elke', 'Taja', 'Ahsley', 'Tisa', 'Dannette', 'Gidget', 'Misao', 'Adelle', 'Jamiah', 'Joselynn', 'Jalyssa', 'Marnita', 'Trinitee', 'Bev', 'Aleida', 'Cloey', 'Tahlia', 'Melodee', 'Anaiya', 'Clover', 'Prudence', 'Kalynn', 'Dezirae', 'Solana', 'Reena', 'Mariko', 'Tiffiny', 'Elinore', 'Madelyne', 'Anela', 'Bess', 'Perri', 'Loree', 'Cyndy', 'Yolonda', 'Jolee', 'Tequila', 'Sumer', 'Cherilyn', 'Ela', 'Kenlee', 'Alexxis', 'Larisa', 'Nevaeha', 'Nira', 'Shaquasia', 'Shanel', 'Medina', 'Rifka', 'Sable', 'Atara', 'Aissatou', 'Mecca', 'Anastasi', 'Falon', 'Holley', 'Yuliza', 'Lili', 'Siara', 'Kiarah', 'Tiffaney', 'Alyah', 'Annalia', 'Naila', 'Analiah', 'Aymar', 'Tambra', 'Elna', 'Eola', 'Tkeyah', 'Zola', 'Francheska', 'Aidee', 'Alexzandra', 'Cianni', 'Myasia', 'Carisa', 'Ilah', 'Yenifer', 'Veronika', 'Nahomy', 'Madysen', 'Elsy', 'Lilli', 'Belva', 'Steffie', 'Kaylea', 'Ginamarie', 'Sharman', 'Latia', 'Shakeria', 'Audelia', 'Odette', 'Shaniah', 'Diamantina', 'Lorayne', 'Ciarra', 'Wilhelmena', 'Zaina', 'Niesha', 'Kanesha', 'Turquoise', 'Tziporah', 'Timi', 'Fatou', 'Karna', 'Matsue', 'Vina', 'Ronisha', 'Layan', 'Viktoria', 'Lilyann', 'Maliya', 'Jamilex', 'Epifania', 'Fidela', 'Delphia', 'Starasia', 'Glennie', 'Teodora', 'Hatsue', 'Margarett', 'Margarette', 'Laronda', 'Vicenta', 'Cotina', 'Meilani', 'Mannat', 'Leylani', 'Lailani', 'Seerat', 'Reya', 'Amilia', 'Avary', 'Brocha', 'Daneen', 'Kimie', 'Trudi', 'Margret', 'Djuna', 'Charis', 'Izzabella', 'Brionne', 'Elenora', 'Lakeitha', 'Jacki', 'Beckie', 'Guinevere', 'Inara', 'Landrie', 'Nicoletta', 'Ayari', 'Zaniah', 'Merlene', 'Keli', 'Maricella', 'Leonela', 'Donita', 'Tehani', 'Susannah', 'Journi', 'Machelle', 'Tammara', 'Cherrie', 'Nelva', 'Destanie', 'Neyda', 'Tabetha', 'Wilhelmenia', 'Brieanna', 'Turkessa', 'Ameera', 'Avital', 'Marycruz', 'Zoila', 'Tressa', 'Joellen', 'Raisa', 'Bethanie', 'Ermelinda', 'Asiyah', 'Monifa', 'Samia', 'Adamary', 'Anahit', 'Rania', 'Miri', 'Ether', 'Desirea', 'Chimere', 'Erla', 'Karisma', 'Nalleli', 'Larhonda', 'Darlyn', 'Anaisa', 'Suellen', 'Kamaria', 'Nashla', 'Yuriko', 'Tzirel', 'Tehila', 'Myriah', 'Frimet', 'Cesilia', 'Marika', 'Frady', 'Deloise', 'Saleen', 'Betsey', 'Merri', 'Laurette', 'Sharita', 'Shena', 'Porscha', 'Aerial', 'Florrie', 'Ayah', 'Anusha', 'Jeanelle', 'Lessly', 'Mahogany', 'See', 'Hang', 'Karinna', 'Leighann', 'Elexus', 'Markayla', 'Kaneesha', 'Barbie', 'Aurea', 'Kaeli', 'Arwen', 'Angelyn', 'Jaclynn', 'Tesla', 'Maritsa', 'Madelin', 'Alisia', 'Tyana', 'Kimberlyn', 'Dejanae', 'Dalena', 'Blessing', 'Courtnie', 'Amaria', 'Micki', 'Safa', 'Jadah', 'Mele', 'Maryssa', 'Channel', 'Lianne', 'Alea', 'Chyanne', 'Addelyn', 'Aaleyah', 'Michela', 'Torri', 'Indira', 'Kanani', 'Lashundra', 'Mikaylah', 'Zoee', 'Taelyn', 'Noheli', 'Sarena', 'Dariela', 'Adalie', 'Meggan', 'Daniyah', 'Sela', 'Shaelyn', 'Maylen', 'Giovana', 'Ayvah', 'Arabelle', 'Adaline', 'Isyss', 'Melanny', 'Margaux', 'Klara', 'Janey', 'Idolina', 'Georgetta', 'Amaiya', 'Sianna', 'Rebeka', 'Meleny', 'Kelle', 'Angelika', 'Malerie', 'Latara', 'Niamh', 'Yevette', 'Yomayra', 'Karizma', 'Nayelie', 'Shantal', 'Latoyia', 'Jenee', 'Shandra', 'Magdalen', 'Yatzari', 'Jettie', 'Charlsie', 'Idy', 'Inaya', 'Yitta', 'Reem', 'Basya', 'Skylynn', 'Elyana', 'Brynley', 'Amor', 'Amberlee', 'Eternity', 'Niyah', 'Emiley', 'Madeleyn', 'Korie', 'Sanaya', 'Meira', 'Chevonne', 'Sabra', 'Uma', 'Kaira', 'Isobel', 'Elli', 'Gurleen', 'Berneice', 'Alvera', 'Ambrosia', 'Roya', 'Bettyann', 'Alverda', 'Tinamarie', 'Tanasia', 'Lavonda', 'Jorja', 'Heide', 'Marwa', 'Annaly', 'Aaliah', 'Ileen', 'Lamonica', 'Enjoli', 'Ninel', 'Milissa', 'Dawne', 'Joie', 'Ashlei', 'Elidia', 'Maybelle', 'Getsemani', 'Gisella', 'Mariya', 'Adisyn', 'Adia', 'Caterina', 'Bettyjane', 'Kaydee', 'Rasheda', 'Camisha', 'Chassidy', 'Sadia', 'Aislyn', 'Ngoc', 'Mirka', 'Lanita', 'Lashawnda', 'Liridona', 'Tynisa', 'Arnelle', 'Librada', 'Marita', 'Makyla', 'Raniya', 'Kandis', 'Ethelyn', 'Divina', 'Genevie', 'Jadelyn', 'Ashleen', 'Saya', 'Marli', 'Calli', 'Anyla', 'Sheng', 'Vasiliki', 'Yelena', 'Darya', 'Clarabelle', 'Shirlene', 'Tommye', 'Julieann', 'Jennefer', 'Rana', 'Raeann', 'Suleima', 'Lilyanne', 'Jelisa', 'Jaymee', 'Rhylee', 'Keyli', 'Brooklin', 'Meta', 'Shakirah', 'Loria', 'Sharyl', 'Sharday', 'Manuelita', 'Debera', 'Lera', 'Jacquie', 'Ardella', 'Jameria', 'Winnifred', 'Rossana', 'Shemika', 'Sedona', 'Arvilla', 'Samaira', 'Aitana', 'Daiana', 'Cassia', 'Lucianna', 'Tama', 'Shigeko', 'Sueko', 'Hatsuko', 'Hazle', 'Lida', 'Wylene', 'Sachiko', 'Tahiry', 'Renea', 'Janina', 'Becki', 'Vy', 'Cherryl', 'Arionna', 'Marrissa', 'Randee', 'Norita', 'Sonali', 'Susann', 'Rachell', 'Natashia', 'Aspyn', 'Malaika', 'Nuvia', 'Safiya', 'Contessa', 'Julyssa', 'Analiese', 'Jacee', 'Kathlyn', 'Gracey', 'Chassity', 'Kady', 'Tytiana', 'Katiana', 'Eneida', 'Teela', 'Roiza', 'Alaura', 'Giuseppina', 'Randa', 'Danisha', 'Mariza', 'Marquisha', 'Sharese', 'Deseree', 'Inaaya', 'Rivkah', 'Tawnie', 'Miriah', 'Shereen', 'Candra', 'Tomiko', 'Whittney', 'Aziza', 'Ayala', 'Hafsa', 'Zaynab', 'Kaileigh', 'Yarima', 'Danitza', 'Maram', 'Shakeya', 'Emmer', 'Kareema', 'Maayan', 'Rheanna', 'Jaritza', 'Marleny', 'Zitlali', 'Vanity', 'Apryl', 'Zully', 'Tashia', 'Courtnee', 'Laporsha', 'Luvenia', 'Batya', 'Ayelet', 'Quetcy', 'Tiny', 'Felicita', 'Omaira', 'Nyssa', 'Krystine', 'Stevi', 'Michiko', 'Tennie', 'Tomekia', 'Billiejo', 'Yohana', 'Krystyna', 'Kacee', 'Naja', 'Charmayne', 'Twana', 'Jeane', 'Brittnay', 'Cherelle', 'Raechel', 'Temeka', 'Jasmeen', 'Zuria', 'Zailey', 'Saydee', 'Renatta', 'Neta', 'Bg', 'Italy', 'Terrica', 'Goldia', 'Monae', 'Yelitza', 'Ryanne', 'Samirah', 'Breckyn', 'Nicolina', 'Olympia', 'Almeta', 'Tamesha', 'Zora', 'Emmaleigh', 'Loralei', 'Kennadi', 'Julieanna', 'Jenavieve', 'Shylah', 'Akemi', 'Fonda', 'Nizhoni', 'Iqra', 'Gaetana', 'Coreen', 'Evonne', 'Sadako', 'Angele', 'Macel', 'Alinna', 'Avneet', 'Jannah', 'Nickole', 'Lotus', 'Yukie', 'Laiyah', 'Kynzlee', 'Mailen', 'Nobuko', 'Annaleigh', 'Otila', 'Shona', 'Kimberely', 'Delcie', 'Zula', 'Roselynn', 'Aleeyah', 'Bellarose', 'Damya', 'Cammie', 'Treena', 'Chanie', 'Kaliah', 'Abella', 'Aahana', 'Mileena', 'Adaleigh', 'Keiry', 'Journie', 'Myrtie', 'Tsuruko', 'Lyda', 'Fernande', 'Julee', 'Estephany', 'Louvenia', 'Monserat', 'Meena', 'Jayline', 'Brie', 'Elicia', 'Suzana', 'Dusti', 'Odile', 'Hilma', 'Katarzyna', 'Jenise', 'Hiromi', 'Huong', 'Deolinda', 'Pamelia', 'Awa', 'Odelia', 'Mattison', 'Gwenda', 'Sera', 'Yuritzi', 'Karishma', 'Kaina', 'Henna', 'Norene', 'Brina', 'Chyenne', 'Moncerrat', 'Keilah', 'Saphira', 'Marylee', 'Meri', 'Lajuana', 'Lindsy', 'Shanise', 'Sugey', 'Jaimi', 'Viki', 'Ceola', 'Naiya', 'Adysen', 'Shantavia', 'Amberlyn', 'Brihanna', 'Laela', 'Kenadi', 'Hermine', 'Bernita', 'Deziree', 'Anja', 'Lawana', 'Aletha', 'Nella', 'Irelyn', 'Jakira', 'Wynema', 'Janai', 'Keondra', 'Venice', 'Zenobia', 'Jaelene', 'Ammy', 'Alizah', 'Lakiesha', 'Azure', 'Aysha', 'Saniah', 'Mahnoor', 'Ananda', 'Asma', 'Aissata', 'Jaileen', 'Yailin', 'Xiana', 'Laiza', 'Liseth', 'Marykate', 'Daizy', 'Neoma', 'Tykeria', 'Shamiya', 'Nykeria', 'Addalynn', 'Kenzley', 'Ardyce', 'Anylah', 'Vallie', 'Darlyne', 'Makiah', 'Neela', 'Naraly', 'Danni', 'Jolina', 'Ailene', 'Lyndia', 'Erminia', 'Asiya', 'Alexius', 'Mc', 'Maylene', 'Signe', 'Adelfa', 'Yusra', 'Keyonna', 'Yasuko', 'Yukiko', 'Augustina', 'Leen', 'Fumie', 'Amora', 'Annaleah', 'Anvi', 'Indie', 'Haya', 'Emmarie', 'Enya', 'Chieko', 'Kinsleigh', 'Shiann', 'Eufemia', 'Fusae', 'Akiko', 'Hosanna', 'Alitzel', 'Araya', 'Anaiyah', 'Rosy', 'Nishka', 'Gao', 'Tamiya', 'Lillyan', 'Eudelia', 'Kamea', 'Berlyn', 'Kahlan', 'Alinah', 'Mahealani', 'Leeah', 'Rosalynn', 'Zadie', 'Aolanis', 'Esta', 'Maisy', 'Chevelle', 'Jalayah', 'Yatziry', 'Alyne', 'Jodell', 'Sariya', 'Yashica', 'Jissel', 'Letty', 'Mariaisabel', 'Lizzeth', 'Yovana', 'Dyanna', 'Tamyra', 'Monzerrat', 'Seanna', 'Eldora', 'Mattea', 'Zahira', 'Jeanetta', 'Aysia', 'Ashlin', 'Tenika', 'Lezlie', 'Kailie', 'Jariyah', 'Jovie', 'Kiyah', 'Lynlee', 'Abriella', 'Adleigh', 'Ranada', 'Vertie', 'Flonnie', 'Kynnedi', 'Lya', 'Acelynn', 'Emalyn', 'Emberly', 'Yalexa', 'Izabela', 'Sadye', 'Kamyla', 'Jayleigh', 'Cayleigh', 'Ceil', 'Inger', 'Cindee', 'Nena', 'Loan', 'Kiya', 'Laure', 'Cristen', 'Celenia', 'Fredda', 'Ravyn', 'Mee', 'Graci', 'Azalia', 'Latina', 'Hassie', 'Dinorah', 'Virna', 'Autum', 'Michala', 'Macayla', 'Franca', 'Corissa', 'Alysse', 'Monisha', 'Jessyca', 'Nisa', 'Jacqulyn', 'Makaylee', 'Ellin', 'Jameelah', 'Shalon', 'Jlynn', 'Kennady', 'Brinkley', 'Providence', 'Phylis', 'Eugenie', 'Clementina', 'Kadynce', 'Yuvia', 'Mailyn', 'Taneisha', 'Samone', 'Aurore', 'Brienne', 'Tritia', 'Fayth', 'Jayci', 'Jorie', 'Loreal', 'Taylar', 'Maryn', 'Melissia', 'Midori', 'Hisako', 'Hulda', 'Bobbijo', 'Bulah', 'Nancye', 'Melvina', 'Sherree', 'Kierstin', 'Merrilee', 'Lonna', 'Judyth', 'Nancie', 'Lark', 'Candyce', 'Kadejah', 'Kenda', 'Fara', 'Estephania', 'Cady', 'Marilin', 'Kadie', 'Suleyma', 'Jacquelyne', 'Vonetta', 'Tanairi', 'Charlott', 'Shannel', 'Zenia', 'Alise', 'Takara', 'Lyndsie', 'Ivett', 'Letisha', 'Idania', 'Lacee', 'Candie', 'Camelia', 'Brittanee', 'Mariaeduarda', 'Geovanna', 'Kirsti', 'Michaella', 'Kelsee', 'Cheryll', 'Cyrstal', 'Keriann', 'Latrisha', 'Exie', 'Deborha', 'Verdie', 'Zahava', 'Zuleika', 'Dorla', 'Dominiqu', 'Sharina', 'Ardeth', 'Alethia', 'Starlene', 'Shamira', 'Shantelle', 'Marilou', 'Kyah', 'Kyana', 'Clemencia', 'Cordie', 'Meagen', 'Xitlalic', 'Gaia', 'Ellia', 'Elani', 'Jaylani', 'Krisha', 'Khalia', 'Aaradhya', 'Aeris', 'Avamarie', 'Artemis', 'Sheana', 'Jolynn', 'Sandee', 'Wendie', 'Willia', 'Loriene', 'Apolonia', 'Eusebia', 'Kazue', 'Synthia', 'Harue', 'Siomara', 'Nhi', 'Maleni', 'Toyoko', 'Freeda', 'Hideko', 'Sherrye', 'Bethanne', 'Merrie', 'Peri', 'Ozella', 'Venetia', 'Revonda', 'Breauna', 'Arika', 'Annissa', 'Leeza', 'Siera', 'Jakiyah', 'Kamaya', 'Lashay', 'Elvina', 'Laquinta', 'Faren', 'Harmonie', 'Brianny', 'Jama', 'Johna', 'Sharalyn', 'Aziyah', 'Hadassa', 'Shantinique', 'Treasa', 'Penni', 'Shakera', 'Carolyne', 'Shaunda', 'Talya', 'Karyna', 'Natosha', 'Vivica', 'Pauletta', 'Laverna', 'Danasia', 'Shakita', 'Sharee', 'Yajayra', 'Karlene', 'Reatha', 'Laiba', 'Zamiyah', 'Shirleen', 'Bettylou', 'Nakiya', 'Eryka', 'Bailie', 'Janiece', 'Keisa', 'Kiah', 'Jennica', 'Niasia', 'Hildy', 'Jacquel', 'Mahina', 'Eshal', 'Khloey', 'Emelin', 'Eesha', 'Kaylei', 'Aymee', 'Alona', 'Catelyn', 'Arushi', 'Ameerah', 'Regenia', 'Brailey', 'Sparkle', 'Giavonna', 'Ashunti', 'Naudia', 'Kyrsten', 'Emmalina', 'Neve', 'Aolani', 'Gizella', 'Tameika', 'Leocadia', 'Nidhi', 'Wende', 'Eshaal', 'Cherice', 'Lakeysha', 'Menucha', 'Ameena', 'Kloey', 'Nayelly', 'Kathryne', 'Lashawna', 'Kristle', 'Zaylie', 'Keylee', 'Landree', 'Wynell', 'Dezarae', 'Angelli', 'Haddie', 'Ilyana', 'Jaleigh', 'Brilee', 'Lakeya', 'Kanika', 'Lavinia', 'Marykay', 'Ruthanne', 'Tenille', 'Dorine', 'Esabella', 'Genavieve', 'Zarah', 'Mileidy', 'Solara', 'Yamila', 'Amaia', 'Season', 'Cheree', 'Luise', 'Tracye', 'Christeen', 'Florida', 'Breona', 'Kathe', 'Jakyra', 'Zury', 'Lakeesha', 'Yaneth', 'Keandra', 'Agnieszka', 'Markita', 'Mariska', 'Zada', 'Breasia', 'Aaniyah', 'Kambria', 'Lilit', 'Sheilah', 'Cherisse', 'Hermione', 'Angeli', 'Britnie', 'Lisett', 'Joette', 'Nga', 'Ruthe', 'Anamarie', 'Mayeli', 'Takia', 'Lien', 'Tenaya', 'Kera', 'Meah', 'Mei', 'Anoushka', 'Annalyse', 'Persia', 'Sheccid', 'Kyndra', 'Susy', 'Steffany', 'Jennavecia', 'Briannah', 'Kynlie', 'Alexxa', 'Paizlee', 'Jesika', 'Kinzlee', 'Ishani', 'Freyja', 'Julietta', 'Raynette', 'Nely', 'Zayleigh', 'Amberlynn', 'Journei', 'Eimy', 'Delany', 'Emarie', 'Brilynn', 'Audri', 'Abilene', 'Saoirse', 'Naveah', 'Ayelen', 'Emeline', 'Loryn', 'Mykaela', 'Skarlett', 'Tionne', 'Rashelle', 'Jerline', 'Ofilia', 'Rufina', 'Phillis', 'Jenica', 'Dineen', 'Glory', 'Camellia', 'Alane', 'Angelyna', 'Amalie', 'Kina', 'Kateri', 'Neyva', 'Malisa', 'Saida', 'Jerika', 'Bayli', 'Crystale', 'Silvana', 'Inga', 'Lyndi', 'Leeanna', 'Cheyanna', 'Fayrene', 'Torie', 'Latashia', 'Baleigh', 'Fidencia', 'Rori', 'Lorianne', 'Catrice', 'Cherrelle', 'Lashaunda', 'Sammi', 'Thomasena', 'Roshanda', 'Alazae', 'Enza', 'Mairead', 'Pandora', 'Kortni', 'Timber', 'Chasidy', 'Danesha', 'Camry', 'Charlette', 'Kaneshia', 'Shadae', 'Keturah', 'Randye', 'Kiyana', 'Charlean', 'Delana', 'Tomasita', 'Lilliam', 'Bibi', 'Marguita', 'Maryalice', 'Iraida', 'Tyhessia', 'Makeba', 'Tanaya', 'Keiara', 'Madlyn', 'Jelissa', 'Shakayla', 'Mickayla', 'Aleisha', 'Keyara', 'Mekayla', 'Mykala', 'Riva', 'Inaara', 'Paulita', 'Lashae', 'Destynee', 'Christianna', 'Rise', 'Larraine', 'Luetta', 'Merna', 'Francena', 'Diedre', 'Georgiann', 'Rubbie', 'Denita', 'Dyani', 'Laticia', 'Ludivina', 'Suanne', 'Hareem', 'Nava', 'Florie', 'Sherly', 'Vidhi', 'Camie', 'Sharell', 'Chole', 'Jolin', 'Polina', 'Brittiany', 'Delicia', 'Brystol', 'Beaulah', 'Leatha', 'Jamilah', 'Zona', 'Elliette', 'Joye', 'Aashi', 'Kerriann', 'Xin', 'Michaelene', 'Senaida', 'Emaan', 'Nakayla', 'Aine', 'Jadalyn', 'Maimouna', 'Klaire', 'Macee', 'Shandi', 'Heily', 'Braylynn', 'Aislynn', 'Chevon', 'Henretta', 'Bellamy', 'Icie', 'Draya', 'Darianna', 'Zyana', 'Zaelynn', 'Story', 'Kambrie', 'Ranae', 'Florencia', 'Porfiria', 'Elianny', 'Karren', 'Yachet', 'Euna', 'Naoma', 'Stefania', 'Liora', 'Zlaty', 'Margene', 'Denna', 'Isidra', 'Faustina', 'Bintou', 'Arbutus', 'Kelci', 'Evelena', 'Maudine', 'Agapita', 'Olyvia', 'Loma', 'Veola', 'Mckinlee', 'Lamya', 'Nashali', 'Baileigh', 'Josselin', 'Kaydance', 'Paiton', 'Myleigh', 'Jazlyne', 'Indya', 'Siham', 'Aryn', 'Madalene', 'Nefertiti', 'Meridith', 'Kamesha', 'Peg', 'Shelbey', 'Pearlean', 'Jamika', 'Maryama', 'Sabria', 'Taniqua', 'Maraya', 'Joely', 'Karys', 'Charolette', 'Orly', 'Jennipher', 'Kimbra', 'Krislynn', 'Kenlie', 'Karrington', 'Kambry', 'Alasia', 'Carletta', 'Maija', 'Nadirah', 'Gladyce', 'Shevy', 'Jalessa', 'Mushka', 'Cyre', 'Mabry', 'Arihanna', 'Brithany', 'Ilianna', 'Jozlynn', 'Zandra', 'Serinity', 'Passion', 'Lacresha', 'Jeraldine', 'Henriette', 'Elenore', 'Nastassia', 'Ruchel', 'Amal', 'Madina', 'Rosaline', 'Liyana', 'Pasqualina', 'Keyra', 'Kaycie', 'Lyanna', 'Naina', 'Gennesis', 'Aarushi', 'Lariah', 'Jakiya', 'Kareena', 'Rhiana', 'Emilly', 'Yeimi', 'Matsuko', 'Makhia', 'Alin', 'Addisen', 'Lanae', 'Oceana', 'Laquanda', 'Coralie', 'Arina', 'Harini', 'Emy', 'Emmarose', 'Ellyana', 'Eila', 'Havana', 'Dagny', 'Leylah', 'Shawanna', 'Aleenah', 'Adalia', 'Aaliya', 'Zyanya', 'Felisa', 'Tameca', 'Sama', 'Ripley', 'Nayomi', 'Quanesha', 'Shequita', 'Shanik', 'Savina', 'Nalah', 'Magaby', 'Cattaleya', 'Calla', 'Lillia', 'Kaida', 'Izabell', 'Francia', 'Caylin', 'Bianey', 'Hanah', 'Julienne', 'Viva', 'Xochil', 'Staphany', 'Rayanne', 'Marialuisa', 'Devina', 'Sabryna', 'Estefanie', 'Dinora', 'Clarisse', 'Josephina', 'Milca', 'Anjolie', 'Akayla', 'Malea', 'Mea', 'Meghana', 'Briceida', 'Betsaida', 'Roselin', 'Anuhea', 'Megha', 'Azusena', 'Nandini', 'Prisilla', 'Shelsy', 'Olene', 'Kaneisha', 'Onalee', 'Jadore', 'Monteen', 'Trudie', 'Leisha', 'Drucilla', 'Tamiyah', 'Ashante', 'Daysha', 'Caliyah', 'Sabella', 'Emoni', 'Jakyla', 'Reginae', 'Anyah', 'Kierstyn', 'Sharleen', 'Doretta', 'Harlene', 'Gerrie', 'Zofia', 'Albertine', 'Bronwyn', 'Terresa', 'Delta', 'Anijah', 'Mathilde', 'Cindie', 'Dalene', 'Cyndee', 'Eulah', 'Ayako', 'Beverlee', 'Nicholle', 'Kaitlan', 'Yeraldin', 'Tawney', 'Tawnee', 'Britteny', 'Alishia', 'Shireen', 'Byanca', 'Rebekka', 'Annel', 'Krizia', 'Kerstin', 'Shera', 'Uyen', 'Ligia', 'Dallana', 'Itsel', 'Karine', 'Sharmaine', 'Noely', 'Marrisa', 'Karah', 'Rayann', 'Oksana', 'Stephannie', 'Brynne', 'Alixandra', 'Dyana', 'Emilce', 'Delmy', 'Jamee', 'Caitlan', 'Marily', 'Kiani', 'Jennafer', 'Nissa', 'Jenell', 'Jennette', 'Marquitta', 'Chimene', 'Justyna', 'Jenette', 'Cassy', 'Temika', 'Katrice', 'Brandis', 'Consuela', 'Chavon', 'Angella', 'Shantrell', 'Christiane', 'Shenna', 'Donia', 'Angelise', 'Janyah', 'Damiyah', 'Luzmaria', 'Meghna', 'Natally', 'Nerissa', 'Kaori', 'Laya', 'Analyssa', 'Teya', 'Alizon', 'Jasline', 'Lavette', 'Emmi', 'Kamisha', 'Taleah', 'Shenita', 'Kaytlynn', 'Azariyah', 'Dominica', 'Malvina', 'Skyy', 'Shondra', 'Lorina', 'Donielle', 'Kaisley', 'Katalyna', 'Jesslynn', 'Yasmina', 'Glada', 'Maliha', 'Irina', 'Hiba', 'Trinette', 'Oona', 'Aleeza', 'Arisha', 'Janean', 'Cristie', 'Syd', 'Lavona', 'Kennia', 'Kyanna', 'Lovenia', 'Julieanne', 'Launa', 'Taunya', 'Tytianna', 'Becca', 'Deonna', 'Jihan', 'Jomaira', 'Shantay', 'Talitha', 'Shyra', 'Alverna', 'Chere', 'Kamela', 'Phaedra', 'Stacee', 'Gretta', 'Kathyrn', 'Shalee', 'Beautiful', 'Lissett', 'Georgann', 'Corrin', 'Chelsa', 'Cera', 'Layna', 'Lizanne', 'Mariellen', 'Lashandra', 'Sophya', 'Shruti', 'Janea', 'Rheta', 'Jezebel', 'Alizee', 'Delaila', 'Dayani', 'Arieanna', 'Amarah', 'Janyia', 'Makalah', 'Dorie', 'Tynisha', 'Tran', 'Prisma', 'Shirin', 'Tonette', 'Suzi', 'Alajah', 'Lurline', 'Adelia', 'Tani', 'Cassey', 'Maha', 'Cheyann', 'Keyona', 'Yezenia', 'Vaness', 'Stephine', 'Cyndie', 'Jaylanie', 'Jeannemarie', 'Mammie', 'Sherice', 'Delynn', 'Aoife', 'Kadiatou', 'Sherese', 'Trenyce', 'Anaiz', 'Anaiza', 'Dajanae', 'Lisaann', 'Keiko', 'Martiza', 'Elysa', 'Petrina', 'Dierdre', 'Davida', 'Falyn', 'Briona', 'Maryjean', 'Lanisha', 'Marlenne', 'Nawal', 'Ethelene', 'Alya', 'Ariannah', 'Jacinta', 'Alaia', 'Sindee', 'Jalaya', 'Mellanie', 'Lasya', 'Kyrah', 'Mirabella', 'Renay', 'Seren', 'Hiliana', 'Kinzie', 'Isra', 'Hanan', 'Kaleia', 'Melynda', 'Marine', 'Twanna', 'Lekisha', 'Jamecia', 'Penney', 'Tiwanna', 'Rylea', 'Shekinah', 'Mckenzi', 'Abigael', 'Patrizia', 'Jamillah', 'Caris', 'Karmyn', 'Kyli', 'Princessa', 'Sakinah', 'Deserae', 'Patrina', 'Carmelina', 'Mayela', 'Sherise', 'Ilda', 'Florentina', 'Nelwyn', 'Jennine', 'Aleeya', 'Kynsley', 'Arlett', 'Tarra', 'Lakyn', 'Tyeisha', 'Temima', 'Mallori', 'Ingeborg', 'Elizaveta', 'Jentry', 'Kymber', 'Maddisyn', 'Allana', 'Anistyn', 'Emberlynn', 'Faithlynn', 'Arianah', 'Tionna', 'Lenda', 'Laveta', 'Alayla', 'Aisling', 'Miryam', 'Marena', 'Aneta', 'Yzabella', 'Mihika', 'Raine', 'Samiah', 'Raveena', 'Elfrieda', 'Niccole', 'Tatanisha', 'Medha', 'Katharina', 'Jazmen', 'Cally', 'Louanne', 'Caress', 'Naylea', 'Avarie', 'Madelynne', 'Dayla', 'Shanterria', 'Tesha', 'Thanya', 'Jalia', 'Josalyn', 'Ailey', 'Brooklynne', 'Dodie', 'Champagne', 'Taneka', 'Tenesha', 'Tinisha', 'Deeanna', 'Shelvia', 'Chenoa', 'Darcel', 'Kailea', 'Jatziry', 'Merryl', 'Sharlyn', 'Harolyn', 'Rilla', 'Ayisha', 'Jacklynn', 'Chloee', 'Makynzie', 'Leyah', 'Aalyiah', 'Tynlee', 'Statia', 'Tyronda', 'Tsuyako', 'Casimira', 'Kehaulani', 'Ragan', 'Lorissa', 'Abelina', 'Cuca', 'Sachi', 'Evany', 'Elektra', 'Sianni', 'Raychel', 'Natassia', 'Vermell', 'Sharifa', 'Everley', 'Ivanka', 'Arisbeth', 'Aleyza', 'Bay', 'Deedra', 'Zarina', 'Regena', 'Kitana', 'Latoshia', 'Virgia', 'Aili', 'Breslyn', 'Ishika', 'Jhoana', 'Dorrace', 'Chanice', 'Sheniqua', 'Tashana', 'Joetta', 'Sanya', 'Altamese', 'Pari', 'Niah', 'Ysabelle', 'Lisseth', 'Parisa', 'Aislin', 'Leiah', 'Atziri', 'Anvita', 'Jaydah', 'Gabby', 'Ashia', 'Dymond', 'Marah', 'Uniqua', 'Blimie', 'Anny', 'Dalinda', 'Wauneta', 'Gionna', 'Rabia', 'Jayanna', 'Anica', 'Maybell', 'Kathern', 'Amrita', 'Mayerli', 'Irais', 'Kemberly', 'Vena', 'Kamri', 'Destine', 'Adreanna', 'Seleste', 'Claretha', 'Brynnlee', 'Anquette', 'Komal', 'Lysette', 'Michayla', 'Zamya', 'Sierrah', 'Felica', 'Otelia', 'Rihana', 'Doloris', 'Alanie', 'Angelly', 'Kassandr', 'Rosemari', 'Shaday', 'Annemari', 'Marlana', 'Clorinda', 'Oneida', 'Shaunta', 'Alexcia', 'Takesha', 'Amiracle', 'Sharion', 'Joline', 'Jaziyah', 'Teal', 'Sueann', 'Sora', 'Kamiah', 'Caressa', 'Eleana', 'Bernetha', 'Alexyss', 'Sharda', 'Aishwarya', 'Suhaill', 'Radhika', 'Wonda', 'Renda', 'Janny', 'Ardelle', 'Malory', 'Jossie', 'Anaid', 'Mitsuye', 'Shizuye', 'Fariha', 'Aiesha', 'Nitya', 'Nadiya', 'Katerin', 'Bruna', 'Varsha', 'Yaretsi', 'Xitlalli', 'Leshia', 'Eda', 'Sheronda', 'Malikah', 'Tayah', 'Briann', 'Tasnim', 'Jayonna', 'Kenedy', 'Anarosa', 'Zaya', 'Kerline', 'Brinda', 'Amna', 'Desarae', 'Sarrah', 'Silva', 'Steffani', 'Almarosa', 'Alyshia', 'Ariell', 'Breeanne', 'Alyxandra', 'Juliane', 'Jesseca', 'Janisha', 'Donisha', 'Darnisha', 'Jakeria', 'Kirsty', 'Markeisha', 'Breena', 'Selin', 'Nikisha', 'Adreana', 'Elois', 'Arrianna', 'Melenie', 'Rayanna', 'Kaelee', 'Shakyra', 'Clotee', 'Jakeline', 'Kalysta', 'Cesia', 'Ankita', 'Cristela', 'Shunta', 'Mozella', 'Chrissie', 'Adora', 'Ashanty', 'Ashna', 'Lehua', 'Nohealani', 'Shruthi', 'Metzli', 'Jakelin', 'Jisel', 'Mikenna', 'Miroslava', 'Mansi', 'Daphney', 'Amisha', 'Adara', 'Alexzandria', 'Alliah', 'Yuriana', 'Nanea', 'Kahealani', 'Ritika', 'Arica', 'Amayrani', 'Kealani', 'Dorina', 'Lucienne', 'Estrellita', 'Kimberlin', 'Lai', 'Yovanna', 'Rebekkah', 'Azra', 'Nada', 'Gabryella', 'Avigayil', 'Binta', 'Devoiry', 'Raeanna', 'Arlena', 'Briauna', 'Itati', 'Grabiela', 'Noella', 'Teaghan', 'Tzippy', 'Faiza', 'Zaara', 'Tehilla', 'Miki', 'Sendy', 'Kassondra', 'Katherina', 'Lissete', 'Livier', 'Lauran', 'Dandrea', 'Chelse', 'Lizmarie', 'Sunday', 'Haidee', 'Carrissa', 'Nicholette', 'Katey', 'Katheryne', 'Katty', 'Kimia', 'Leeanne', 'Lizmary', 'Jani', 'Emmanuella', 'Jahniya', 'Talar', 'Sintia', 'Narda', 'Chriselda', 'Candance', 'Delorise', 'Daysy', 'Lusine', 'Raeanne', 'Cherylann', 'Ayat', 'Halima', 'Zissel', 'Courtni', 'Adahli', 'Der', 'Emree', 'Brynlie', 'Cherlyn', 'Bostyn', 'Francie', 'Oaklie', 'Shakeerah', 'Hertha', 'Haneefah', 'Taheerah', 'Nikkia', 'Sheryll', 'Donnabelle', 'Teddi', 'Jodee', 'Tammera', 'Janylah', 'Laquesha', 'Penina', 'Gracee', 'Thomasine', 'Janyce', 'Randie', 'Mela', 'Alka', 'Cordia', 'Shaquetta', 'Mi', 'Jaquetta', 'Yoshiye', 'Haruye', 'Yoneko', 'Fumi', 'Wava', 'Congetta', 'Denee', 'Kandyce', 'Soraida', 'Triana', 'Kenedi', 'Abena', 'Talisha', 'Rochell', 'Sharisse', 'Tijuana', 'Amiee', 'Nyesha', 'Towana', 'Lore', 'Melodye', 'Hayli', 'Joyelle', 'Shareen', 'Amarilis', 'Takiyah', 'Takiya', 'Keysha', 'Feige', 'Diahann', 'Kloie', 'Laynee', 'Mariely', 'Rainey', 'Alizabeth', 'Alyssandra', 'Cambry', 'Jadelynn', 'Marylynn', 'Keoka', 'Jamaica', 'Lus', 'Shonta', 'Kameelah', 'Danell', 'Evamarie', 'Francoise', 'Beata', 'Caylie', 'Elexa', 'Joscelin', 'Hessie', 'Alazay', 'Robena', 'Texie', 'Clarine', 'Makennah', 'Arletha', 'Willette', 'Amee', 'Jetaun', 'Anyia', 'Aryssa', 'Bonni', 'Graciella', 'Haileigh', 'Sharae', 'Shanea', 'Ieisha', 'Porche', 'Teanna', 'Ashanta', 'Taiya', 'Nicolett', 'Naisha', 'Sharice', 'Madelein', 'Kimberle', 'Monifah', 'Cameo', 'Evelynne', 'Edlyn', 'Porcha', 'Maricel', 'Waleska', 'Shakeena', 'Shavone', 'Ashlynne', 'Yahira', 'Shamecca', 'Yashira', 'Sherell', 'Fiorela', 'Nansi', 'Shawntae', 'Poonam', 'Shala', 'Kellyn', 'Jazzmyn', 'Asya', 'Shatoya', 'Yury', 'Weronika', 'Dawnette', 'Lorita', 'Michaelle', 'Tomi', 'Abbi', 'Maudry', 'Jaylinn', 'Kynzie', 'Lynnlee', 'Madisson', 'Denese', 'Devona', 'Sharika', 'Sharilyn', 'Zayna', 'Janalee', 'Sherril', 'Timika', 'Lynelle', 'Rolayne', 'Lubertha', 'Jariah', 'Kamala', 'Taffy', 'Marquetta', 'Honora', 'Frederica', 'Monalisa', 'Rashonda', 'Francene', 'Diedra', 'Ceara', 'Marylouise', 'Kenesha', 'Aisley', 'Donnalee', 'Genisis', 'Debroah', 'Helayne', 'Raelee', 'Maryrose', 'Yalonda', 'Chyla', 'Edelmira', 'Roselle', 'Alyssah', 'Brenley', 'Gaynelle', 'Shelvie', 'Mackayla', 'Linley', 'Allizon', 'Alonna', 'Kendalyn', 'Jozlyn', 'Gwenn', 'Jina', 'Zariya', 'Rosabella', 'Emrie', 'Tamu', 'Senta', 'Myia', 'Emberlyn', 'Emorie', 'Arantxa', 'Richele', 'Christianne', 'Lashan', 'Koren', 'Buffie', 'Ronnette', 'Marna', 'Tuesday', 'Helga', 'Emilyn', 'Cailee', 'Shaquilla', 'Dyamond', 'Gerda', 'Mckynzie', 'Khloie', 'Kendyll', 'Maryfrances', 'Khadejah', 'Annalie', 'Adaya', 'Akia', 'Markia', 'Iyla', 'Kaely', 'Rafaella', 'Tali', 'Sukhmani', 'Mili', 'Kaylanie', 'Maribelle', 'Zharia', 'Georgeanne', 'Shamekia', 'Siyona', 'Layah', 'Maylani', 'Elianah', 'Ellena', 'Elyanna', 'Yanilen', 'Jashanti', 'Lakita', 'Juanell', 'Caley', 'Annella', 'Vinita', 'Zakiyah', 'Sherian', 'Palmira', 'Delpha', 'Creola', 'Veta', 'Sheneka', 'Ameria', 'Keonna', 'Nathali', 'Vaishnavi', 'Zurisadai', 'Mily', 'Aalyah', 'Hasini', 'Irelynn', 'Taneshia', 'Lashanti', 'Shatavia', 'Shantoria', 'Avelina', 'Vanya', 'Erline', 'Surina', 'Maribella', 'Julieana', 'Jazel', 'Kalissa', 'Marlis', 'Hadasa', 'Iveth', 'Miliani', 'Leiana', 'Devynn', 'Ahtziry', 'Shilah', 'Sicily', 'Ashari', 'Yarenis', 'Tamiah', 'Annis', 'Azzie', 'Sedalia', 'Maebell', 'Empress', 'Fairy', 'Najma', 'Loreta', 'Suhayla', 'Sundus', 'Vayda', 'Doshia', 'Ahlam', 'Lashondra', 'Ryanna', 'Lala', 'Merline', 'Severa', 'Kymora', 'Fae', 'Jameka', 'Othella', 'Wyoma', 'Ailee', 'Aishani', 'Fransisca', 'Noma', 'Meztli', 'Miliana', 'Navaeh', 'Swara', 'Malillany', 'Jaina', 'Dia', 'Ivyanna', 'Jamira', 'Jazaria', 'Oletha', 'Julieth', 'Avia', 'Elizebeth', 'Yareni', 'Korra', 'Miraya', 'Bernetta', 'Helyn', 'Suhaylah', 'Laina', 'Lassie', 'Anyae', 'Maleena', 'Nirvana', 'Danely', 'Keilana', 'Hildur', 'Mariaclara', 'Toshie', 'Maniyah', 'Hanako', 'Asako', 'Hiroko', 'Hisae', 'Suraya', 'Kaileen', 'Pearla', 'Layal', 'Batoul', 'Johannah', 'Gizel', 'Venecia', 'Yanelly', 'Atianna', 'Apple', 'Arizbeth', 'Sriya', 'Natania', 'Mayline', 'Emmagrace', 'Meriam', 'Laree', 'Tempie', 'Sedonia', 'Evalee', 'Laquana', 'Sheli', 'Liesl', 'Hazeline', 'Blanchie', 'Samyra', 'Keelie', 'Krislyn', 'Yanelis', 'Addysen', 'Inis', 'Tammra', 'Johnette', 'Amery', 'Alayza', 'Alaiyah', 'Abree', 'Amri', 'Anapaula', 'Jacelynn', 'Kenzleigh', 'Kenzlee', 'Jaelah', 'Brenlee', 'Avalee', 'Paizley', 'Columbia', 'Benedetta', 'Daeja', 'Myeshia', 'Jeanene', 'Terina', 'Ethyl', 'Oliwia', 'Taniah', 'Yaiza', 'Eveline', 'Monnie', 'Margherita', 'Jayana', 'Macil', 'Leontine', 'Catera', 'Wynelle', 'Eldana', 'Sallyann', 'Yolande', 'Marybelle', 'Leanore', 'Clothilde', 'Tonita', 'Kimaya', 'Sumayah', 'Latrenda', 'Kelleen', 'Deatrice', 'Madelon', 'Phyliss', 'Argelia', 'Mellie', 'Emmah', 'Jorley', 'Muna', 'Daphine', 'Darina', 'Bliss', 'Karyl', 'Taelynn', 'Blenda', 'Tonika', 'Jerrilyn', 'Sahra', 'Keilyn', 'Pearlene', 'Arrie', 'Ellene', 'Fredericka', 'Ladawn', 'Maudell', 'Rahma', 'Jaylie', 'Jaidah', 'Vernetta', 'Aleya', 'Aubreigh', 'Alaysha', 'Adena', 'Jacara', 'Elfriede', 'Maysel', 'Munira', 'Mumtaz', 'Dorathy', 'Chanin', 'Ronette', 'Maymie', 'Providencia', 'Mirta', 'Loida', 'Blakelyn', 'Bentleigh', 'Alliana', 'Aleen', 'Daliyah', 'Jodene', 'Johanne', 'Timeka', 'Ilhan', 'Aloma', 'Maris', 'Arlyne', 'Jene', 'Hazelene', 'Shakela', 'Maida', 'Maycie', 'Makynlee', 'Kawanda', 'Consuella', 'Sephora', 'Andrianna', 'Joshlyn', 'Hollyn', 'Kyliee', 'Adaly', 'Dailyn', 'Averee', 'Berklee', 'Marly', 'Gianella', 'Ekaterina', 'Colene', 'Dayonna', 'Shareka', 'Roshni', 'Latifa', 'Merilyn', 'Vernelle', 'Marlyce', 'Sabrena', 'Jeneen', 'Genie', 'Lawanna', 'Tashara', 'Kayzlee', 'Skylie', 'Iyonna', 'Honesti', 'Cherylene', 'Tahira', 'Chizuko', 'Aneesah', 'Helmi', 'Katrena', 'Shyanna', 'Zeola', 'Lempi', 'Arliss', 'Madgie', 'Verlie', 'Ardys', 'Twanda', 'Kareemah', 'Chardae', 'Arlinda', 'Darlena', 'Karee', 'Lorry', 'Rolande', 'Marlane', 'Lelah', 'Zahria', 'Michalene', 'Nayelis', 'Abbigale', 'Lorretta', 'Sheril', 'Priscille', 'Cleda', 'Kerrigan', 'Wanita', 'Ambria', 'Wanetta', 'Ebone', 'Georgianne', 'Karleen', 'Laural', 'Jonette', 'Sharie', 'Francina', 'Yarelis', 'Tempestt', 'Kamie', 'Julene', 'Londa', 'Haniya', 'Kristeen', 'Classie', 'Nakiyah', 'Valinda', 'Kamree', 'Micheline', 'Mckaylee', 'Prescilla', 'Shaylynn', 'Donelda', 'Fayetta', 'Terrye', 'Dorthey', 'Azilee', 'Juanda', 'Eustolia', 'Nakeisha', 'Hira', 'Tarrah', 'Jamyra', 'Azaleah', 'Aveline', 'Chanae', 'Andreana', 'Banesa', 'Berenis', 'Brittini', 'Orianna', 'Reet', 'Rayah', 'Sofi', 'Japji', 'Kensie', 'Roshonda', 'Agripina', 'Blasa', 'Anevay', 'Akari', 'Krissi', 'Maily', 'Kitzia', 'Keilly', 'Raveen', 'Kaiah', 'Juliett', 'Jocelynne', 'Eowyn', 'Calie', 'Ebonee', 'Chelcie', 'Kayci', 'Lauralee', 'Trenity', 'Deborrah', 'Imagene', 'Akasha', 'Analaura', 'Liani', 'Lizania', 'Lucina', 'Melaine', 'Sanah', 'Stepanie', 'Zabrina', 'Janaye', 'Jelena', 'Kaylina', 'Diavian', 'Tasnia', 'Nusrat', 'Ashleymarie', 'Maheen', 'Ndeye', 'Yumi', 'Vittoria', 'Amyra', 'Yakelin', 'Yudith', 'Yumalay', 'Juliza', 'Daila', 'Daenerys', 'Calissa', 'Tahirah', 'Laquasia', 'Jenay', 'Crystina', 'Eleonore', 'Inessa', 'Irine', 'Vennie', 'Oda', 'Laurine', 'Lavera', 'Saraya', 'Kerin', 'Itzia', 'Jennessa', 'Katerine', 'Rosselyn', 'Leidy', 'Adamariz', 'Adylene', 'Aylen', 'Aniela', 'Aleesha', 'Alyssamarie', 'Ainara', 'Emalie', 'Darlin', 'Inna', 'Emmely', 'Eriana', 'Esbeidy', 'Chenelle', 'Janise', 'Sherrell', 'Basilia', 'Malayna', 'Hilinai', 'Mardell', 'Romi', 'Rosena', 'Violett', 'Zaylah', 'Taia', 'Anisah', 'Esli', 'Cleopatra', 'Carisma', 'Dezaray', 'Swayze', 'Raeven', 'Neiva', 'Myeisha', 'Shelsea', 'Yissel', 'Velinda', 'Josseline', 'Denasia', 'Digna', 'Keiana', 'Clytee', 'Vernette', 'Cheyene', 'Roshunda', 'Telisha', 'Nilah', 'Ayda', 'Zykia', 'Isabellamarie', 'Melanee', 'Laylanie', 'Ajah', 'Guiliana', 'Oliva', 'Mikela', 'Mirabelle', 'Nabiha', 'Jasmina', 'Hendy', 'Ita', 'Elif', 'Reola', 'Jamyah', 'Tempest', 'Arletta', 'Keaira', 'Ibeth', 'Jerolyn', 'Nelta', 'Alishba', 'Crisol', 'Sabreena', 'Silver', 'Toba', 'Yunuen', 'Rishika', 'Naomie', 'Brittanya', 'Annasophia', 'Ayumi', 'Jayleene', 'Emmily', 'Lyssa', 'Natoya', 'Vallerie', 'Andee', 'Annastasia', 'Mazzy', 'Zinnia', 'Sheran', 'Sumaiya', 'Tasneem', 'Aniylah', 'Dua', 'Tausha', 'Jabria', 'Lanora', 'Janeli', 'Mileydi', 'Mikaella', 'Ryah', 'Rolonda', 'Ajanae', 'Ianna', 'Xaria', 'Winni', 'Marializ', 'Aidel', 'Jonae', 'Sanam', 'Mao', 'Tesia', 'Yanina', 'Brieana', 'Genova', 'Lashanae', 'Anneke', 'Siarra', 'Sharhonda', 'Zeldy', 'Saron', 'Johnisha', 'Katelynne', 'Janneth', 'Corayma', 'Helvi', 'Asucena', 'Lachelle', 'Solmayra', 'Tavia', 'Marlina', 'Rachal', 'Sunni', 'Nycole', 'Aliannah', 'Nafisa', 'Simi', 'Suki', 'Jadalynn', 'Kezia', 'Athziri', 'Huda', 'Evy', 'Jailah', 'Jaselle', 'Jaslyne', 'Dalyla', 'Emeraude', 'Mahika', 'Yoanna', 'Fraida', 'Tannia', 'Selenne', 'Analiz', 'Angelene', 'Anacristina', 'Kylea', 'Naydelyn', 'Lecia', 'Gitel', 'Shareese', 'Cassady', 'Diem', 'Perlita', 'Monigue', 'Marisha', 'Emillee', 'Kareli', 'Shandreka', 'Kerrin', 'Tram', 'Nohelani', 'Monic', 'Brandice', 'Johnetta', 'Evangelia', 'Shakina', 'Shunda', 'Robbi', 'Ariatna', 'Shantae', 'Sorangel', 'Valene', 'Aletta', 'Libbie', 'Marifer', 'Deitra', 'Despina', 'Hayle', 'Kassidi', 'Dayrin', 'Anjelina', 'Gimena', 'Llesenia', 'Rainbow', 'Muskaan', 'Judit', 'Kyley', 'Tanna', 'Luci', 'Altagracia', 'Kilee', 'Kamry', 'Kalyssa', 'Jadeyn', 'Virgen', 'Damita', 'Leinaala', 'Illeana', 'Nneka', 'Onika', 'Aralyn', 'Mahalia', 'Marelyn', 'Jalene', 'Bobbiejo', 'Apollonia', 'Anjuli', 'Ricarda', 'Fusako', 'Michie', 'Janira', 'Citlalic', 'Jannelle', 'Tiffini', 'Elisia', 'Racine', 'Marybel', 'Xitlally', 'Tynesha', 'Sharay', 'Shamara', 'Aleene', 'Rayssa', 'Carlyn', 'Falisha', 'Lasandra', 'Trinh', 'Seema', 'Tonianne', 'Destani', 'Nairobi', 'Tomica', 'Raena', 'Ivania', 'Odaliz', 'Lilybeth', 'Sheyenne', 'Tereza', 'Yuka', 'Baleria', 'Ayiana', 'Floree', 'Jhoanna', 'Shakila', 'Meleah', 'Monserath', 'Lelani', 'Conception', 'Zowie', 'Teah', 'Takayla', 'Teaira', 'Karyssa', 'Delina', 'Kamaile', 'Rut', 'Reanne', 'Zamantha', 'Ellyse', 'Jisela', 'Latonja', 'Eiko', 'Aylene', 'Atziry', 'Avila', 'Andreya', 'Delyla', 'Aashna', 'Dacia', 'Shavonda', 'Desirey', 'Matea', 'Makailah', 'Henessy', 'Naliyah', 'Charlise', 'Keirsten', 'Ressie', 'Halia', 'Gweneth', 'Manda', 'Lilinoe', 'Mariselda', 'Tajuana', 'Mahima', 'Noeli', 'Yanelli', 'Sole', 'Saloni', 'Annistyn', 'Marcille', 'Thresa', 'Cerenity', 'Samnatha', 'Alexah', 'Analie', 'Aryah', 'Jazline', 'Evony', 'Erandy', 'Jezelle', 'Kamara', 'Emelina', 'Kadance', 'Masae', 'Davonna', 'Shamaya', 'Shalynn', 'Rima', 'Toria', 'Zamira', 'Cerina', 'Fujiko', 'Armine', 'Morganne', 'Gicela', 'Desree', 'Khaila', 'Nikayla', 'Kennedie', 'Marylu', 'Ilyssa', 'Jatziri', 'Shianna', 'Dharma', 'Resa', 'Abra', 'Neely', 'Imo', 'Betzabeth', 'Briceyda', 'Karenna', 'Jakhia', 'Ramiyah', 'Khaliyah', 'Tocarra', 'Milee', 'Athina', 'Maleigha', 'Shalyn', 'Syliva', 'Roseline', 'Claira', 'Jisselle', 'Kiely', 'Marisabel', 'Maryanna', 'Melena', 'Mylene', 'Mariangela', 'Mailey', 'Sonora', 'Siana', 'Shreeya', 'Sevana', 'Samhita', 'Jackelyne', 'Kyrstin', 'Anslie', 'Samella', 'Jewelia', 'Sammye', 'Ayline', 'Navneet', 'Charlesetta', 'Raye', 'Yulonda', 'Esmerelda', 'Gianina', 'Danessa', 'Calia', 'Everlena', 'Sadaf', 'Analucia', 'Meriah', 'Gwendalyn', 'Disha', 'Katana', 'Kalaya', 'Kaeley', 'Tyonna', 'Rozella', 'Marjean', 'Conchita', 'Kylynn', 'Aasiyah', 'Maelynn', 'Kahla', 'Prachi', 'Tajanae', 'Megumi', 'Micheala', 'Yanitza', 'Geselle', 'Reather', 'Annalicia', 'Bonna', 'Lilliann', 'Callia', 'Brigit', 'Quintina', 'Fujie', 'Jolanda', 'Nanami', 'Yosselin', 'Jakelyn', 'Kadeja', 'Eveny', 'Emaly', 'Ciena', 'Julliana', 'Jareli', 'Jaretzi', 'Kailin', 'Kimiye', 'Ammie', 'Kiona', 'Sumayyah', 'Terre', 'Laryssa', 'Marleni', 'Kamira', 'Yulanda', 'Jonda', 'Lania', 'Pippa', 'Jazariah', 'Takeya', 'Shatima', 'Ysenia', 'Mikki', 'Necole', 'Etha', 'Williemae', 'Margurite', 'Leonarda', 'Inocencia', 'Dominika', 'Laisa', 'Haylea', 'Annamay', 'Azia', 'Mckynlee', 'Maddilyn', 'Scotlyn', 'Lillith', 'Mertie', 'Kynzee', 'Joshlynn', 'Maelee', 'Daleiza', 'Xyla', 'Royalty', 'Railynn', 'Patrycja', 'Dotty', 'Leda', 'Toshiba', 'Nelma', 'Yeni', 'Ottilie', 'Lyna', 'Leslieann', 'Onita', 'Darcey', 'Marya', 'Africa', 'Seferina', 'Theola', 'Ysidra', 'Zita', 'Cing', 'Zailynn', 'Jennilee', 'Sharmon', 'Tyechia', 'Irmgard', 'Shameika', 'Jemima', 'Jazzelle', 'Adlee', 'Aliyanna', 'Acelyn', 'Catalaya', 'Brileigh', 'Braylie', 'Angelin', 'Arianni', 'Ariani', 'Kennya', 'Maelyn', 'Lillee', 'Maripaz', 'Laikyn', 'Kenslee', 'Ileane', 'Puja', 'Oanh', 'Jakara', 'Shawntay', 'Cendy', 'Erianna', 'Chloie', 'Birtie', 'Korin', 'Jannett', 'Shawntel', 'Markisha', 'Nastassja', 'Shalene', 'Alexya', 'Cloie', 'Exa', 'Jentri', 'Modena', 'Veronique', 'Daina', 'Mechele', 'Lakesia', 'Kawanna', 'Clotilde', 'Diamonique', 'Teyana', 'Rheagan', 'Shanece', 'Yanique', 'Taysha', 'Ulyssa', 'Jadzia', 'Kadija', 'Towanna', 'Lurlene', 'Sharri', 'Rosenda', 'Daphna', 'Hermina', 'Shaquanda', 'Saachi', 'Sena', 'Yazaira', 'Yatzil', 'Anam', 'Sparrow', 'Anetra', 'Nalayah', 'Jaylenne', 'Joya', 'Kensi', 'Khylee', 'Lilyrose', 'Iasia', 'Jaliah', 'Melda', 'Armella', 'Zyasia', 'Nazia', 'Shanasia', 'Krystie', 'Dorothe', 'Thora', 'Adelene', 'Avaya', 'Aurielle', 'Ailany', 'Andromeda', 'Loa', 'Cleora', 'Darling', 'Caliana', 'Keniyah', 'Crystel', 'Dimitra', 'Renate', 'Zyriah', 'Taegan', 'Marygrace', 'Mckinzie', 'Nivea', 'Rhian', 'Amarissa', 'Kadee', 'Devani', 'Khara', 'Aishia', 'Annell', 'Jaslin', 'Jaide', 'Briahna', 'Merary', 'Lauraine', 'Tywana', 'Athanasia', 'Chantay', 'Loretha', 'Anyiah', 'Marvine', 'Jennelle', 'Hiedi', 'Sunnie', 'Panagiota', 'Lanesha', 'Amity', 'Denyse', 'Nataleigh', 'Amyia', 'Avrie', 'Analysa', 'Ameris', 'Ambrielle', 'Kynnedy', 'Gracy', 'Kaelie', 'Heydi', 'Latrese', 'Lavonia', 'Latrelle', 'Lynetta', 'Graceann', 'Susette', 'Sarabeth', 'Arnetta', 'Shelonda', 'Myiesha', 'Shila', 'Pascale', 'Zenja', 'Madelene', 'Lalena', 'Doria', 'Dagmar', 'Griselle', 'Nitza', 'Moraima', 'Miguelina', 'Brittania', 'Emmalin', 'Novie', 'Chavonne', 'Lashana', 'Quyen', 'Gennifer', 'Zaryah', 'Paytin', 'Keeli', 'Kolbi', 'Maddyson', 'Jackqueline', 'Arnita', 'Brynnley', 'Edelyn', 'Arial', 'Yaneliz', 'Ena', 'Barbaraann', 'Glendora', 'Heavyn', 'Neomi', 'Rebbecca', 'Laketa', 'Renetta', 'Carline', 'Nezzie', 'Shaneeka', 'Desaray', 'Hiromy', 'Hallee', 'Halli', 'Sheba', 'Tahisha', 'Paetyn', 'Katisha', 'Joyell', 'Joyel', 'Zoei', 'Zamiya', 'Raygan', 'Clydie', 'Missouri', 'Debany', 'Kalisha', 'Niurka', 'Beverlyn', 'Bell', 'Zuly', 'Lakayla', 'Lainee', 'Kynli', 'Lundyn', 'Erynn', 'Braleigh', 'Allena', 'Lashanna', 'Shaunya', 'Tykia', 'Leeba', 'Bassheva', 'Kandra', 'Breyana', 'Geovana', 'Joandra', 'Jessyka', 'Analilia', 'Charna', 'Josefita', 'Laurin', 'Casi', 'Jeniah', 'Koraima', 'Vivi', 'Merlina', 'Marinna', 'Soriya', 'Sarayu', 'Ma', 'Adali', 'Abbygale', 'Avonlea', 'Bellah', 'Makeyla', 'Maanya', 'Hania', 'Ellah', 'Esmee', 'Jaylean', 'Verlene', 'Kendria', 'Kasondra', 'Kadesha', 'Kadedra', 'Reizel', 'Reizy', 'Sheryle', 'Elka', 'Caileigh', 'Meya', 'Rondi', 'Janetta', 'Dwana', 'Yakira', 'Donetta', 'Laurissa', 'Jordann', 'Jenice', 'Hasmik', 'Mychelle', 'Shabnam', 'Sarahann', 'Shaylene', 'Zuleica', 'Verenise', 'Dejanee', 'Alyx', 'Breyanna', 'Anum', 'Jamesia', 'Asheley', 'Keya', 'Lyzette', 'Rossy', 'Terilyn', 'Rahaf', 'Anabia', 'Neala', 'Payal', 'Taheera', 'Nakhia', 'Shaela', 'Krupa', 'Suriya', 'Victory', 'Viviane', 'Habiba', 'Fortune', 'Farida', 'Erina', 'Ranya', 'Tifani', 'Surie', 'Aastha', 'Joella', 'Sherida', 'Vonnie', 'Bluma', 'Gianny', 'Naziyah', 'Taylie', 'Jakia', 'Timia', 'Farren', 'Skylin', 'Sabiha', 'Nashley', 'Blimi', 'Annita', 'Kristianna', 'Delena', 'Dalina', 'Kyasia', 'Cathlene', 'Karalee', 'Merilee', 'Monette', 'Asharia', 'Jacquelina', 'Nishat', 'Charlcie', 'Sukanya', 'Celines', 'Rashell', 'Nadja', 'Lamiyah', 'Najae', 'Zipporah', 'Rawan', 'Tailor', 'Denesha', 'Masiel', 'Nida', 'Assata', 'Infiniti', 'Cresencia', 'Omega', 'Meher', 'Maneh', 'Noura', 'Yanine', 'Maral', 'Malori', 'Safia', 'Saori', 'Vesper', 'Audrinna', 'Dea', 'Kahlia', 'Eliora', 'Isley', 'Laurinda', 'Mignon', 'Debie', 'Denette', 'Jolyn', 'Casondra', 'Donnisha', 'Elysse', 'Lazaria', 'Aleia', 'Shelbee', 'Ivone', 'Mazal', 'Sherley', 'Shantia', 'Christelle', 'Tatjana', 'Roselia', 'Pebbles', 'Cleotilde', 'Erendida', 'Chardonnay', 'Brittiny', 'Brittanny', 'Scarleth', 'Mehar', 'Neila', 'Sofiya', 'Lakshmi', 'Lilianne', 'Akeiba', 'Shabreka', 'Joannie', 'Samiha', 'Fatma', 'Itzell', 'Envy', 'Maybelline', 'Nashly', 'Rya', 'Kaelani', 'Kailana', 'Aylah', 'Bellamarie', 'Marizol', 'Malyssa', 'Madai', 'Neelam', 'Ysamar', 'Sulma', 'Sueling', 'Song', 'Sharayah', 'Melisha', 'Ashliegh', 'Melodi', 'Belem', 'Chrystina', 'Tonantzin', 'Setareh', 'Valeri', 'Yaffa', 'Niara', 'Mame', 'Janasia', 'Flo', 'Gustavia', 'Lanya', 'Nanie', 'Velta', 'Dot', 'Luberta', 'Ledora', 'Olean', 'Abbigayle', 'Hadeel', 'Rayma', 'Mayola', 'Nonnie', 'Voncille', 'Heloise', 'Nolia', 'Victorine', 'Yola', 'Vella', 'Terrilyn', 'Noelie', 'Alean', 'Allean', 'Lorean', 'Josiephine', 'Heba', 'Kerrianne', 'Odeal', 'Aigner', 'Anaclara', 'Gudrun', 'Valborg', 'Trenice', 'Ardath', 'Aune', 'Teresia', 'Lesha', 'Dewanna', 'Arlyce', 'Jayliana', 'Orene', 'Paralee', 'Jamyia', 'Kemiyah', 'Fredia', 'Amyiah', 'Doreatha', 'Lashanta', 'Cerissa', 'Kawana', 'Arizona', 'Shanetta', 'Jalesa', 'Asmaa', 'Garnette', 'Clella', 'Artemisa', 'Liliya', 'Oretha', 'Adna', 'Amyri', 'Tyshae', 'Maryan', 'Santanna', 'Bushra', 'Jamyla', 'Earma', 'Delsie', 'Verlean', 'Sherena', 'Carmelite', 'Chari', 'Darlean', 'Shamia', 'Audryna', 'Genevia', 'Avie', 'Tamora', 'Lavonna', 'September', 'Sharolyn', 'Athziry', 'Alyiah', 'Aleina', 'Alesandra', 'Amoreena', 'Nykia', 'Drea', 'Galilee', 'Ainslie', 'Ishita', 'Jenavie', 'Jezabel', 'Erandi', 'Evana', 'Jiana', 'Laniah', 'Britanny', 'Sanika', 'Solash', 'Laasya', 'Nairi', 'Leighla', 'Kaiyah', 'Suhana', 'Taliya', 'Maleia', 'Candee', 'Ninette', 'Eugena', 'Lateisha', 'Salvatrice', 'Quaneisha', 'Mertis', 'Bebe', 'Rida', 'Takyra', 'Floye', 'Christell', 'Ozelle', 'Juanice', 'Genia', 'Shaundra', 'Shanin', 'Wendee', 'Cynde', 'Adalynne', 'Adelin', 'Hayven', 'Ayra', 'Chimamanda', 'Kenzlie', 'Taylynn', 'Zerenity', 'Kynsleigh', 'Dorthea', 'Alley', 'Melrose', 'Keyondra', 'Anglia', 'Lynnea', 'Tamira', 'Terisa', 'Tona', 'Isaly', 'Jeimy', 'Giannah', 'Leilanni', 'Leya', 'Quetzali', 'Naylene', 'Misaki', 'Amely', 'Donette', 'Charlayne', 'Selia', 'Kittie', 'Tamaya', 'Lenna', 'Zykerria', 'Teisha', 'Terrea', 'Alita', 'Bunny', 'Deniece', 'Inge', 'Takira', 'Monesha', 'Mahala', 'Donica', 'Fortunata', 'Valrie', 'Zayah', 'Ziyah', 'Vela', 'Vassie', 'Omie', 'Nadean', 'Annalynn', 'Adah', 'Edmae', 'Aalayah', 'Yuritzy', 'Ytzel', 'Svetlana', 'Soha', 'Alfredia', 'Kylei', 'Landrey', 'Lariyah', 'Rozlyn', 'Sakina', 'Greer', 'Bula', 'Eura', 'Harmonee', 'Pecola', 'Noreta', 'Laveda', 'Retta', 'Rozlynn', 'Skarlet', 'Snow', 'Zoha', 'Sophiarose', 'Anglea', 'Itzabella', 'Elanie', 'Calirose', 'Adhya', 'Amaiyah', 'Lavender', 'Leylanie', 'Kaliana', 'Quetzaly', 'Helon', 'Nalia', 'Cipriana', 'Martyna', 'Pola', 'Dierra', 'Maximina', 'Sherica', 'Murlene', 'Berna', 'Bernarda', 'Ettie', 'Laiken', 'Hensley', 'Fontella', 'Modelle', 'Timotea', 'Venora', 'Lakelyn', 'Licia', 'Laury', 'Loralee', 'Kamyah', 'Verba', 'Angelee', 'Adalind', 'Adaliz', 'Ailynn', 'Airi', 'Alany', 'Avika', 'Avleen', 'Leoni', 'Saisha', 'Savvy', 'Philippa', 'Jasneet', 'Izabellah', 'Elienai', 'Kalayah', 'Eureka', 'Dionicia', 'Zylah', 'Zosia', 'Yetzali', 'Tigerlily', 'Dorena', 'Nakesha', 'Lakenya', 'Margarete', 'Margarite', 'Cloteal', 'Adline', 'Willadeen', 'Anselma', 'Marcheta', 'Havyn', 'Ilyanna', 'Idalie', 'Fallyn', 'Emori', 'Anzal', 'Kalila', 'Ellisyn', 'Maddalyn', 'Roslynn', 'Hodan', 'Emalynn', 'Addy', 'Adelyne', 'Aizah', 'Dalayza', 'Cambri', 'Annali', 'Angelynn', 'Caidence', 'Auriana', 'Azlynn', 'Blakelee', 'Brenleigh', 'Tailynn', 'Zyla', 'Verline', 'Pierina', 'Panhia', 'Valda', 'Shela', 'Uldine', 'Vibha', 'Wednesday', 'Porshia', 'Shabria', 'Palmina', 'Khristine', 'Lannette', 'Sandhya', 'Janalyn', 'Floreine', 'Marchelle', 'Minette', 'Tawnia', 'Wynne', 'Sada', 'Windi', 'Clydene', 'Shundra', 'Joycie', 'Delories', 'Alvena', 'Edmonia', 'Denean', 'Dhana', 'Marjie', 'Alicja', 'Cammy', 'Aryam', 'Leonie', 'Adrielle', 'Felisita', 'Tinnie', 'Marinda', 'Lamia', 'Conchetta', 'Naylah', 'Sarayah', 'Nataliya', 'Delani', 'Eknoor', 'Ellee', 'Maiah', 'Mayumi', 'Meara', 'Kalliope', 'Jewels', 'Lanaya', 'Yui', 'Maxcine', 'Yaqueline', 'Yoceline', 'Marilynne', 'Maple', 'Ronesha', 'Marili', 'Reema', 'Rayana', 'Aggie', 'Talina', 'Doristine', 'Romelle', 'Shaqueena', 'Sharelle', 'Caira', 'Gelsey', 'Tashawna', 'Takeisha', 'Jerlean', 'Sunita', 'Shalini', 'Michaeline', 'Audria', 'Ronnisha', 'Leonia', 'Monna', 'Ambra', 'Corena', 'Taren', 'Alexiss', 'Kajal', 'Jordanne', 'Kasia', 'Brienna', 'Gayane', 'Deija', 'Cidney', 'Tabytha', 'Raeleen', 'Mkayla', 'Harli', 'Jassmin', 'Ilo', 'Lasheena', 'Keianna', 'Kally', 'Makenzy', 'Angelea', 'Natasia', 'Shaneequa', 'Monay', 'Moet', 'Marcelline', 'Shatia', 'Sarafina', 'Kaisha', 'Tiffney', 'Shenequa', 'Sheretta', 'Floria', 'Alacia', 'Kavita', 'Kerianne', 'Tameshia', 'Jamye', 'Shanese', 'Latiqua', 'Jesscia', 'Johanny', 'Daniqua', 'Geneviev', 'Bernadet', 'Annice', 'Megann', 'Katee', 'Nikeya', 'Stavroula', 'Tawna', 'Sindia', 'Marlaina', 'Jury', 'Tovah', 'Shivonne', 'Nekia', 'Yvonnie', 'Kyna', 'Railey', 'Xandria', 'Genine', 'Tashima', 'Marycarmen', 'Kiahna', 'Jadynn', 'Akua', 'Eather', 'Fatema', 'Aiysha', 'Allisa', 'Ashleynicole', 'Bobette', 'Shandrika', 'Hollace', 'Chandni', 'Cayley', 'Brenae', 'Areisy', 'Annahi', 'Anallely', 'Klarisa', 'Ayssa', 'Jatavia', 'Nohemy', 'Mikyla', 'Mariadelosang', 'Shatina', 'Kazandra', 'Elsi', 'Teryl', 'Yennifer', 'Destyni', 'Damariz', 'Areanna', 'Everlean', 'Lesslie', 'Margrette', 'Tuyet', 'Jacquelene', 'Grissel', 'Walterine', 'Shterna', 'Gila', 'Nabila', 'Liel', 'Sani', 'Djeneba', 'Angeliz', 'Anari', 'Amyrie', 'Aissa', 'Tichina', 'Amariana', 'Xiara', 'Yamiles', 'Isatou', 'Airiana', 'Carrigan', 'Aldea', 'Aarika', 'Bryanne', 'Alegandra', 'Carrisa', 'Andrina', 'Casaundra', 'Breanda', 'Biviana', 'Irena', 'Denielle', 'Lizzett', 'Shaunice', 'Sigourney', 'Sona', 'Paradise', 'Lashanique', 'Melaina', 'Zoua', 'Vaneza', 'Tyresha', 'Shyasia', 'Tiyana', 'Youa', 'Zaneta', 'Muskan', 'Talissa', 'Kennisha', 'Lizandra', 'Akosua', 'Jaymi', 'Chelby', 'Chelci', 'Aeriel', 'Isamara', 'Payge', 'Hadja', 'Fruma', 'Fiza', 'Fatumata', 'Kabrina', 'Feigy', 'Zanaya', 'Yanette', 'Teairra', 'Talor', 'Kathrina', 'Justeen', 'Maryelizabeth', 'Jannete', 'Chantalle', 'Haide', 'Genelle', 'Esthela', 'Emilse', 'Maegen', 'Lyndsi', 'Cristiana', 'Clio', 'Breindel', 'Briyana', 'Jamyria', 'Jameshia', 'Kadeshia', 'Jamisha', 'Faige', 'Aishah', 'Lorette', 'Nandi', 'Nastasia', 'Shada', 'Shakeia', 'Shaneice', 'Yanel', 'Teryn', 'Shaylyn', 'Karimah', 'Fabienne', 'Shaianne', 'Saleena', 'Raychelle', 'Pahoua', 'Justyne', 'Fransheska', 'Katilyn', 'Shadaya', 'Quanasia', 'Shantasia', 'Nyasha', 'Minahil', 'Shahd', 'Chani', 'Bassy', 'Zunairah', 'Lynsie', 'Charnelle', 'Jaquana', 'Taquana', 'Shaasia', 'Idelle', 'Rogene', 'Udy', 'Devory', 'Evanna', 'Keisy', 'Hadiya', 'Brittainy', 'Cortni', 'Erikka', 'Lindsie', 'Mayraalejandra', 'Topacio', 'Elky', 'Yita', 'Sura', 'Tiani', 'Sadiya', 'Kaitlen', 'Jessicca', 'Linna', 'Stephy', 'Hadia', 'Jaiyana', 'Aldina', 'Frimy', 'Tywanda', 'Renarda', 'Mardelle', 'Alaijah', 'Antoinetta', 'Amyria', 'Sheyanne', 'Jackee', 'Bina', 'Khole', 'Selenia', 'Seidy', 'Albertina', 'Yoandra', 'Yarelyn', 'Kassaundra', 'Lynzee', 'Haneen', 'Marshay', 'Sharona', 'Shanygne', 'Nigeria', 'Nechy', 'Jhane', 'Chrisette', 'Gypsy', 'Drusilla', 'Milta', 'Ranee', 'Yvett', 'Mykenzie', 'Aracelia', 'Vernessa', 'Chekesha', 'Cadance', 'Moria', 'Tsurue', 'Yarisbel', 'Verena', 'Tomoe', 'Breezy', 'Swannie', 'Tsuyuko', 'Hisayo', 'Gerianne', 'Cailynn', 'Adrionna', 'Lillianne', 'Eduarda', 'Melinna', 'Sanaiya', 'Nohelia', 'Zarela', 'Yarethzy', 'Sruthi', 'Josefine', 'Kiela', 'Kersten', 'Syriah', 'Emaleigh', 'Jazlynne', 'Aeryn', 'Danelly', 'Dalylah', 'Lexa', 'Kherington', 'Nivia', 'Carolanne', 'Sharlotte', 'Vanda', 'Deirdra', 'Ilyse', 'Judyann', 'Venezia', 'Mailee', 'Latishia', 'Ajla', 'Lucine', 'Shontell', 'Rosiland', 'Celinda', 'Aanika', 'Felicidad', 'Denia', 'Natsuko', 'Analyse', 'Angellina', 'Brizeida', 'Jazira', 'Terah', 'Reana', 'Jennalyn', 'Jenaya', 'Kelani', 'Miyuki', 'Aracelie', 'Dannika', 'Danity', 'Cadie', 'Breelyn', 'Kayra', 'Mayli', 'Malarie', 'Tequilla', 'Gerilyn', 'Mieko', 'Belynda', 'Shamiyah', 'Reaghan', 'Ziya', 'Rozanne', 'Joyanne', 'Zamaria', 'Luiza', 'Tamanika', 'Kimya', 'Patriciaann', 'Eilene', 'Bryna', 'Yena', 'Yarelly', 'Maddyn', 'Khylie', 'Khyla', 'Margueritte', 'Ramya', 'Jenea', 'Jennavie', 'Jazzlene', 'Marelly', 'Manya', 'Lillyanne', 'Gyselle', 'Niyati', 'Moana', 'Kenosha', 'Ezmeralda', 'Anvitha', 'Avelyn', 'Dahlila', 'Emmaly', 'Dayamy', 'Anajulia', 'Mandee', 'Valli', 'Sharan', 'Leasia', 'Shiquita', 'Malana', 'Nadeen', 'Parneet', 'Lynna', 'Saskia', 'Samaiya', 'Saffron', 'Vianka', 'Evey', 'Ebelin', 'Anishka', 'Aneth', 'Addelynn', 'Kayly', 'Alyzae', 'Anniyah', 'Ayme', 'Alexsa', 'Aidsa', 'Elyn', 'Illianna', 'Greenlee', 'Tinesha', 'Sherline', 'Yvanna', 'Joslin', 'Estee', 'Lusia', 'Nhung', 'Janielle', 'Smithie', 'Yohanna', 'Shanette', 'Marilena', 'Blannie', 'Meleana', 'Malie', 'Jannine', 'Kuulei', 'Kawehi', 'Velna', 'Kuuipo', 'Keani', 'Tiffeny', 'Billi', 'Conni', 'Elexia', 'Sheily', 'Mehak', 'Ardelia', 'Phung', 'Aleasha', 'Toyia', 'Kalliopi', 'Carrieann', 'Shayal', 'Brandye', 'Shatisha', 'Neola', 'Pallavi', 'Symantha', 'Mackenzee', 'Shalawn', 'Krimson', 'Jaquelinne', 'Sonal', 'Calysta', 'Kaylamarie', 'Kirah', 'Belicia', 'Anicia', 'Aerin', 'Marisel', 'Priscella', 'Lei', 'Imaan', 'Haruka', 'Kila', 'Jerusha', 'Deva', 'Charon', 'Leida', 'Deadra', 'Areana', 'Iriana', 'Drenda', 'Saadia', 'Danne', 'Jossalyn', 'Kennadie', 'Makaya', 'Daelynn', 'Daffne', 'Galia', 'Naida', 'Yaira', 'Latania', 'Damarys', 'Mireille', 'Maribell', 'Luzelena', 'Anacani', 'Sahira', 'Shaylin', 'Sejal', 'Subrina', 'Julaine', 'Saby', 'Zoraya', 'Atalie', 'Deseray', 'Nacole', 'Jennell', 'Laneisha', 'Ivie', 'Darnella', 'Lashone', 'Lekeisha', 'Puanani', 'Uilani', 'Donyale', 'Terriann', 'Marianela', 'Josalynn', 'Avari', 'Blonnie', 'Makya', 'Seriah', 'Nori', 'Roselee', 'Verbie', 'Borghild', 'Marcene', 'Syretta', 'Bama', 'Eulene', 'Chantale', 'Shontae', 'Mabell', 'Hellon', 'Shantanique', 'Janki', 'Dhara', 'Buna', 'Naeemah', 'Tacara', 'Shirleyann', 'Tshwanda', 'Nadege', 'Georganne', 'Leondra', 'Fredricka', 'Margaree', 'Quincee', 'Oaklynn', 'Arlean', 'Judee', 'Nyoka', 'Khia', 'Kendia', 'Mahek', 'Anasia', 'Jenin', 'Gerline', 'Elwillie', 'Annsley', 'Juhi', 'Zettie', 'Shacara', 'Shantique', 'Marijo', 'Shakara', 'Ersie', 'Bionca', 'Kolleen', 'Ertha', 'Chioma', 'Roneisha', 'Courtenay', 'Altie', 'Arla', 'Delainey', 'Rainelle', 'Lockie', 'Rayonna', 'Nasiyah', 'Zori', 'Carollee', 'Mima', 'Irja', 'Willadean', 'Sigrid', 'Myong', 'Khaliah', 'Sakeenah', 'Saleemah', 'Emmersyn', 'Miyeko', 'Brooksie', 'Brailynn', 'Raghad', 'Nadira', 'Hassana', 'Toshiye', 'Fumiye', 'Kelise', 'Angelis', 'Earla', 'Dilia', 'Arwa', 'Shaylie', 'Synai', 'Tanijah', 'Jalaysia', 'Charnita', 'Marit', 'Gaelle', 'Shandiin', 'Janelis', 'Gatha', 'Alahna', 'Aniyla', 'Mikelle', 'Skai', 'Merlinda', 'Tariyah', 'Arietta', 'Terrika', 'Elenor', 'Ruthanna', 'Evaline', 'Abigaelle', 'Alayjah', 'Naysa', 'Camya', 'Pachia', 'Kamia', 'Sylvania', 'Ambree', 'Oakleigh', 'Zania', 'Murielle', 'Charlyn', 'Zykira', 'Jestine', 'Simonne', 'Willodene', 'Lyndee', 'Sophonie', 'Saddie', 'Darlis', 'Lynnda', 'Marysa', 'Seleena', 'Raevyn', 'Lilikoi', 'Maiyer', 'Kymberli', 'Shayda', 'Cassidee', 'Jadira', 'Delora', 'Afsheen', 'Adira', 'Amena', 'Canary', 'Humaira', 'Derricka', 'Fatiha', 'Xia', 'Jaquelyne', 'Aurianna', 'Sarahjane', 'Sanaz', 'Taleen', 'Teara', 'Taiz', 'Sharai', 'Magally', 'Manon', 'Maizie', 'Manisha', 'Marisleysis', 'Anjela', 'Youlanda', 'Jermani', 'Elysha', 'Claritza', 'Gissela', 'Icela', 'Alixandria', 'Asley', 'Analuisa', 'Maddalena', 'Cortnee', 'Coretha', 'Audreanna', 'Manal', 'Kadijatou', 'Pollie', 'Mysti', 'Tiffiany', 'Corean', 'Amiree', 'Anner', 'Cleone', 'Lavone', 'Fredna', 'Konnie', 'Robbyn', 'Alica', 'Bessy', 'Aleesa', 'Analleli', 'Mischelle', 'Bethani', 'Baillie', 'Odessie', 'Erlene', 'Marcile', 'Edona', 'Tylah', 'Tyrah', 'Rainell', 'Precilla', 'Genever', 'Ajanee', 'Chera', 'Amye', 'Monserratt', 'Moorea', 'Richa', 'Willetta', 'Shawne', 'Trisa', 'Lasonia', 'Cleona', 'Alizea', 'Anayely', 'Emelly', 'Fionna', 'Cerena', 'Julyana', 'Kaile', 'Jacklin', 'Brianca', 'Ashleyann', 'Richardine', 'Kelcee', 'Keyaira', 'Mabelle', 'Brecklyn', 'Samyah', 'Ayonna', 'Mesha', 'Tyeshia', 'Tiffiney', 'Tyara', 'Azuri', 'Merideth', 'Hermie', 'Leaner', 'Mendi', 'Kanoelani', 'Kadeidra', 'Akeela', 'Lin', 'Mindel', 'Lashell', 'Meegan', 'Ia', 'Ellamae', 'Jasmen', 'Nechuma', 'Romilda', 'Hiilei', 'Osmara', 'Keidy', 'Rianne', 'Afia', 'Teylor', 'Raquelle', 'Grizelda', 'Tasfia', 'Laquasha', 'Tandra', 'Maeghan', 'Kameshia', 'Alara', 'Emina', 'Delaina', 'Jacquetta', 'Christena', 'Topanga', 'Viviann', 'Eboney', 'Kasha', 'Sativa', 'Secilia', 'Niomi', 'Neena', 'Tanji', 'Shandy', 'Corryn', 'Esly', 'Silka', 'Sanaii', 'Annais', 'Kaitlynne', 'Epiphany', 'Maniya', 'Mali', 'Madigan', 'Sanii', 'Jaeleen', 'Faria', 'Maralyn', 'Johnae', 'Lekesha', 'Sharry', 'Latecia', 'Kimberl', 'Charita', 'Modean', 'Marrie', 'Lielle', 'Zeina', 'Pessel', 'Sameera', 'Eleonora', 'Jannatul', 'Coryn', 'Dustie', 'Demitria', 'Jacqlyn', 'Nekisha', 'Latrecia', 'Rabecca', 'Malaysha', 'Lugenia', 'Elese', 'Myrissa', 'Lucrecia', 'Lysandra', 'Tarryn', 'Tammey', 'Bonnita', 'Shiffy', 'Shirel', 'Clariza', 'Analis', 'Rechy', 'Nusaiba', 'Manahil', 'Chamisa', 'Almetta', 'Moncia', 'Leba', 'Jeilyn', 'Earnesteen', 'Mennie', 'Kieara', 'Sheina', 'Yo', 'Sharnice', 'Ravin', 'Daisi', 'Britini', 'Carlina', 'Arisa', 'Margy', 'Whitnee', 'Krysti', 'Odean', 'Darlys', 'Janita', 'Donnetta', 'Guynell', 'Neomia', 'Loyalty', 'Serra', 'Kaysie', 'Preciosa', 'Earleen', 'Shatoria', 'Kourtnie', 'Kana', 'Jahnavi', 'Kyarra', 'Licet', 'Railyn', 'Delisha', 'Flordia', 'Arsema', 'Kena', 'Kaelah', 'Kashia', 'Emonie', 'Izola', 'Linsay', 'Naibe', 'Natallie', 'Rosi', 'Taline', 'Cortina', 'Annett', 'Kadi', 'Lindsi', 'Lasasha', 'Tamre', 'Yenny', 'Yasaman', 'Shawnice', 'Thi', 'Jannel', 'Kaleen', 'Demitra', 'Meisha', 'Mahira', 'Emmanuela', 'Janaiya', 'Rechel', 'Nazifa', 'Zeynep', 'Shalena', 'Hila', 'Ailish', 'Altovise', 'Anabeth', 'Anavictoria', 'Averey', 'Berlynn', 'Alitza', 'Adelynne', 'Aiva', 'Alenna', 'Harlowe', 'Camrynn', 'Daphnie', 'Ezri', 'Lanna', 'Lua', 'Maddilynn', 'Maeva', 'Maytte', 'Jovi', 'Karalyn', 'Kataleah', 'Kaylana', 'Milliana', 'Surveen', 'Veera', 'Nimrat', 'Nimrit', 'Radha', 'Roisin', 'Senna', 'Ruhi', 'Saja', 'Glenice', 'Damiana', 'Mikeria', 'Lakeria', 'Yulia', 'Zanna', 'Lynnae', 'Illa', 'Buelah', 'Novis', 'Johnye', 'Valree', 'Santiaga', 'Modell', 'Maydell', 'Elfida', 'Charlyne', 'Argentina', 'Terica', 'Kiandra', 'Tangi', 'Pascuala', 'Narcisa', 'Macaria', 'Thomasa', 'Verta', 'Eulogia', 'Trellis', 'Tavaria', 'Dakayla', 'Oneita', 'Kimberlynn', 'Aslee', 'Jenascia', 'Shamaria', 'Lakely', 'Etna', 'Gilberte', 'Glena', 'Delorse', 'Margrett', 'Endia', 'Buena', 'Alvilda', 'Domitila', 'Jasmaine', 'Jaquita', 'Shontavia', 'Roneshia', 'Leasa', 'Feliciana', 'Allyana', 'Anaia', 'Annalyn', 'Ayane', 'Belladonna', 'Adanely', 'Akshaya', 'Aleiyah', 'Tereasa', 'Antonisha', 'Darlah', 'Dhalia', 'Dianelly', 'Elika', 'Camillia', 'Leonila', 'Manreet', 'Jazzlin', 'Kaiulani', 'Kashvi', 'Talayah', 'Viana', 'Ximenna', 'Shaylah', 'Quorra', 'Anagha', 'Annalea', 'Jaleyah', 'Bethanny', 'Zophia', 'Alegria', 'Advika', 'Taneika', 'Marye', 'Latorya', 'Sayler', 'Nara', 'Nithya', 'Phoenyx', 'Saiya', 'Mellany', 'Yazlin', 'Adalena', 'Adya', 'Aliviah', 'Aalia', 'Rickia', 'Eliyana', 'Arella', 'Audris', 'Auria', 'Avantika', 'Aylani', 'Beya', 'Camilah', 'Kaede', 'Laylonie', 'Jayani', 'Katara', 'Hera', 'Audrea', 'Nataley', 'Nazli', 'Neyla', 'Noya', 'Srinidhi', 'Pranavi', 'Sareen', 'Satya', 'Terika', 'Zamora', 'Jimmye', 'Brigida', 'Shereka', 'Widline', 'Natori', 'Dorthie', 'Berit', 'Aretta', 'Svea', 'Wenona', 'Amera', 'Nayah', 'Lollie', 'Genice', 'Fabianna', 'Nazaria', 'Edra', 'Jamariah', 'Willine', 'Madolyn', 'Wanell', 'Lucetta', 'Eudora', 'Adda', 'Shariah', 'Jaelle', 'Jalena', 'Annelle', 'Solveig', 'Autherine', 'Nobie', 'Izora', 'Eudell', 'Wyolene', 'Mariangel', 'Mayar', 'Luevenia', 'Eniyah', 'Lilie', 'Eliany', 'Ivyonna', 'Beadie', 'Zeta', 'Merita', 'Valjean', 'Delbra', 'Alanys', 'Camiyah', 'Edyth', 'Kanya', 'Perina', 'Catelynn', 'Angelisse', 'Relda', 'Eathel', 'Kerrington', 'Lyriq', 'Brita', 'Meda', 'Zanya', 'Emileigh', 'Aracelys', 'Lisania', 'Evalena', 'Traniya', 'Janiyla', 'Syesha', 'Ahmya', 'Camora', 'Armonie', 'Beula', 'Veva', 'Kateria', 'Harumi', 'Kimiyo', 'Tangie', 'Amayrany', 'Alexiah', 'Alyn', 'Tokie', 'Masayo', 'Makenzee', 'Arieana', 'Asayo', 'Seirra', 'Elfrida', 'Ariona', 'Masue', 'Mizuki', 'Liliane', 'Malanie', 'Sabreen', 'Yuritza', 'Shanautica', 'Kateleen', 'Montanna', 'Tiona', 'Theresia', 'Vernia', 'Mahayla', 'Glynna', 'Shaelynn', 'Isabelly', 'Aileth', 'Ailie', 'Melvia', 'Sherrel', 'Ivah', 'Himani', 'Marayah', 'Melane', 'Evanie', 'Atalia', 'Athalia', 'Bethsy', 'Betzi', 'California', 'Bryonna', 'Yaretsy', 'Zamara', 'Sanyah', 'Gaylynn', 'Vitoria', 'Yoshino', 'Hatsumi', 'Tatsuko', 'Samika', 'Maili', 'Charnae', 'Jamilla', 'Vieno', 'Rylei', 'Vanita', 'Hydia', 'Carmyn', 'Kenslie', 'Maryhelen', 'Lamees', 'Lilley', 'Haunani', 'Pualani', 'Mikiyah', 'Lovina', 'Janith', 'Kanoe', 'Anouk', 'Mayerly', 'Kiele', 'Lexia', 'Janani', 'Berlinda', 'Belma', 'Inayah', 'Saloma', 'Anely', 'Anjolina', 'Devonna', 'Nikhita', 'Nayana', 'Naidely', 'Hina', 'Ismerai', 'Daisie', 'Sitlaly', 'Yahayra', 'Trinidy', 'Vallery', 'Ceaira', 'Floretta', 'Lavena', 'Shawntavia', 'Dessa', 'Tareva', 'Iyanla', 'Kania', 'Shakiya', 'Latora', 'Hermila', 'Clora', 'Tiyanna', 'Saydie', 'Sherlene', 'Trixie', 'Nadiyah', 'Zarria', 'Saidy', 'Sabriya', 'Keirra', 'Leeana', 'Leianna', 'Jaia', 'Ishanvi', 'Ailed', 'Fathima', 'Hansika', 'Delailah', 'Caliah', 'Dayleen', 'Jolisa', 'Sallye', 'Levonia', 'Tula', 'Kristene', 'Alanni', 'Aleiah', 'Aeva', 'Ilean', 'Annet', 'Lateshia', 'Markesha', 'Nikol', 'Nadolyn', 'Kimyatta', 'Ercilia', 'Sheliah', 'Heiley', 'Metztli', 'Teyla', 'Saranya', 'Tanishka', 'Kayana', 'Donnamae', 'Lajoyce', 'Kemya', 'Kemora', 'Jozelyn', 'Keili', 'Jaydy', 'Linzy', 'Marelin', 'Melaney', 'Aleksa', 'Alynah', 'Elyza', 'Emmery', 'Angeleen', 'Annica', 'Bindi', 'Demya', 'Nayleen', 'Sadee', 'Samah', 'Shylee', 'Talula', 'Vannia', 'Yarelli', 'Zohar', 'Miangel', 'Orla', 'Sundra', 'Korinne', 'Taniesha', 'Zaliyah', 'Zionna', 'Amariyah', 'Loris', 'Cruzita', 'Landa', 'Eduvina', 'Ileanna', 'Ileene', 'Jesselle', 'Daviana', 'Eleny', 'Marijane', 'Okla', 'Violanda', 'Dorma', 'Leoma', 'Esperansa', 'Shanreka', 'Baudelia', 'Teasia', 'Aubrei', 'Jeree', 'Ortencia', 'Melida', 'Pernie', 'Sweetie', 'Arelly', 'Ariday', 'Bhavya', 'Aiyanah', 'Akshita', 'Ginette', 'Docia', 'Pegeen', 'Alaynah', 'Allanah', 'Daniah', 'Loriana', 'Kenly', 'Kenli', 'Kendahl', 'Kenady', 'Senora', 'Hetal', 'Aloha', 'Barri', 'Shaniquah', 'Feather', 'Rica', 'Adriann', 'Fleta', 'Shontel', 'Kynisha', 'Nahima', 'Myracle', 'Syniah', 'Jomarie', 'Leeandra', 'Maylie', 'Marijose', 'Jaley', 'Sydnei', 'Amariya', 'Alysandra', 'Damia', 'Laurieann', 'Lucecita', 'Miosotis', 'Shelvy', 'Bernina', 'Darice', 'Dorrie', 'Myrta', 'Yoko', 'Vara', 'Joanmarie', 'Kerryann', 'Carmesa', 'Kenzington', 'Oaklyn', 'Shelbia', 'Arhianna', 'Ardyn', 'Amarachi', 'Cydnee', 'Chloey', 'Brailee', 'Aily', 'Rosette', 'Geryl', 'Luba', 'Marguerita', 'Ayannah', 'Deziyah', 'Lurdes', 'Dawnelle', 'Reiko', 'Brynli', 'Tenlee', 'Kynadee', 'Emersen', 'Josilyn', 'Jazalyn', 'Maleyah', 'Cozette', 'Xoe', 'Syria', 'Charyl', 'Gita', 'Aniaya', 'Yulemni', 'Joleigh', 'Kenzy', 'Logann', 'Genesys', 'Cherita', 'Trenise', 'Stpehanie', 'Riann', 'Matilyn', 'Akisha', 'Coralee', 'Presli', 'Yariana', 'Edda', 'Lisabeth', 'Farm', 'Dennice', 'Deepa', 'Chiffon', 'Alyzea', 'Alexas', 'Emylee', 'Joellyn', 'Zo', 'Marybell', 'Sapna', 'Khristina', 'Kellyanne', 'Chrystie', 'Damary', 'Graziella', 'Tene', 'Shakisha', 'Shirelle', 'Gwynne', 'Insha', 'Lydiann', 'Cuba', 'Cortnie', 'Denelle', 'Huyen', 'Brieann', 'Cindia', 'Shalina', 'Linnette', 'Kiamesha', 'Anecia', 'Brinna', 'Kewanna', 'Malke', 'Yira', 'Rashidah', 'Karicia', 'Chrislyn', 'Idali', 'Zandria', 'Ruta', 'Toshi', 'Daena', 'Aneliz', 'Cherese', 'Brandalyn', 'Brieanne', 'Chistina', 'Denys', 'Nyisha', 'Lissie', 'Sherine', 'Marisal', 'Tuwana', 'Zyonna', 'Shady', 'Patrisha', 'Laniece', 'Jessamyn', 'Letticia', 'Shirlie', 'Miyo', 'Marilouise', 'Yukiye', 'Ltanya', 'Geralynn', 'Anastazia', 'Mitzie', 'Lluliana', 'Rozanna', 'Magalie', 'Salima', 'Bevin', 'Gaudy', 'Ieasha', 'Makia', 'Sacheen', 'Sherene', 'Mataya', 'Hatsuye', 'Chiyeko', 'Devanny', 'Nasya', 'Odyssey', 'Tunisia', 'Caldonia', 'Marsi', 'Mindee', 'Tamy', 'Sherill', 'Tsitsiki', 'Arva', 'Gayleen', 'Kimmy', 'Lenette', 'Roxan', 'Leanora', 'Charlena', 'Claudina', 'Danise', 'Denell', 'Eydie', 'Irish', 'Hydeia', 'Nichele', 'Ronica', 'Temre', 'Cindra', 'Vincenta', 'Zyra', 'Larita', 'Jodine', 'Ewelina', 'Madylin', 'Kinzleigh', 'Malone', 'Layken', 'Verity', 'Tinleigh', 'Sophi', 'Skyleigh', 'Stanislawa', 'Rylinn', 'Natalynn', 'Marlei', 'Rhylie', 'Payslee', 'Paxtyn', 'Brittyn', 'Alaynna', 'Avory', 'Aubriee', 'Jacqui', 'Aseel', 'Jannell', 'Simra', 'Raneem', 'Kellene', 'Shellee', 'Tish', 'Lashauna', 'Ashira', 'Sharrie', 'Donnette', 'Milarain', 'Toshia', 'Shariyah', 'Dariah', 'Gustava', 'Leotha', 'Sherelle', 'Lindi', 'Luanna', 'Shanan', 'Arelys', 'Nyema', 'Errin', 'Fredrica', 'Dhriti', 'Yashvi', 'Gaile', 'Ermalinda', 'Gregorita', 'Klynn', 'Kaedence', 'Zaila', 'Yaritzi', 'Taylyn', 'Tailyn', 'Milka', 'Maesyn', 'Macyn', 'Riyah', 'Alleigh', 'Aracelli', 'Hadlie', 'Iza', 'Riddhi', 'Kathleene', 'Darely', 'Eleyna', 'Analiya', 'Fanchon', 'Allyce', 'Jasma', 'Porschia', 'Deberah', 'Zoi', 'Sherlyne', 'Favour', 'Shakari', 'Mckenzy', 'Makinzie', 'Maahi', 'Jacqualine', 'Nancyann', 'Ronne', 'Charmane', 'Martie', 'Leane', 'Kama', 'Corrinne', 'Vangie', 'Jonni', 'Michon', 'Sharise', 'Shawnie', 'Joane', 'Rosary', 'Noretta', 'Zaylynn', 'Paislie', 'Infinity', 'Amaryllis', 'Altair', 'Cookie', 'Danyella', 'Collyns', 'Chrislynn', 'Bryley', 'Brelynn', 'Finleigh', 'Evianna', 'Flavia', 'Wilhemina', 'Jaeliana', 'Taija', 'Naiomi', 'Jennika', 'Jenika', 'Jaicee', 'Laurice', 'Ashaunti', 'Alyxandria', 'Delfinia', 'Tyiesha', 'Petrita', 'Fedelina', 'Eufelia', 'Marshae', 'Marquesha', 'Feloniz', 'Tyliyah', 'Nadene', 'Natascha', 'Shawnette', 'Jamese', 'Tashay', 'Mckenzee', 'Mckinsey', 'Langley', 'Kensleigh', 'Karolyna', 'Coralyn', 'Grethel', 'Baylei', 'Ariany', 'Mekenzie', 'Whitlee', 'Sayde', 'Willena', 'Tzipporah', 'Afsana', 'Kearra', 'Marialy', 'Quiara', 'Jing', 'Dorathea', 'Rachelann', 'Melissaann', 'Jeanett', 'Jensine', 'Jessicaann', 'Ellesse', 'Kaula', 'Calley', 'Malkie', 'Shenelle', 'Sheela', 'Steffi', 'Shadia', 'Marielis', 'Saima', 'Tiarah', 'Reginia', 'Shaquala', 'Shadiamond', 'Kallista', 'Allee', 'Allexis', 'Nakeya', 'Reshma', 'Sosha', 'Kendrea', 'Imalay', 'Kyong', 'Sharmin', 'Sorah', 'Alayshia', 'Katja', 'Chavie', 'Farzana', 'Lanasia', 'Khayla', 'Jamella', 'Diva', 'Ericca', 'Brettany', 'Imunique', 'Tiasia', 'Tajae', 'Sidra', 'Chelbi', 'Kourtni', 'Lamisha', 'Krystyn', 'Maly', 'Mirtha', 'Nary', 'Nuria', 'Falicia', 'Zilpha', 'Keyasia', 'Ranisha', 'Garnetta', 'Alexxus', 'Hae', 'Herma', 'Tasheena', 'Philicia', 'Fotini', 'Avanell', 'Czarina', 'Kindle', 'Antoinet', 'Constanc', 'Cassondr', 'Destanee', 'Christinia', 'Shalisa', 'Stepahnie', 'Sopheap', 'Somaly', 'Shalane', 'Saran', 'Alaycia', 'Carolynne', 'Nikolette', 'Saphire', 'Dominigue', 'Channa', 'Leva', 'Starquasia', 'Shyan', 'Sabah', 'Shakiera', 'Nagely', 'Hajar', 'Keniya', 'Anhthu', 'Ashle', 'Taira', 'Meline', 'Rebeckah', 'Daritza', 'Kaysha', 'Kathrin', 'Edit', 'Jennae', 'Kaja', 'Molli', 'Hildreth', 'Elyssia', 'Keandrea', 'Courtlyn', 'Cova', 'Kyndle', 'Kadisha', 'Mitchelle', 'Chabeli', 'Ashlen', 'Feiga', 'Shakena', 'Lakeia', 'Jehan', 'Karianne', 'Renisha', 'Crystalyn', 'Blia', 'Amanada', 'Neiba', 'Oyuki', 'Lianet', 'Javaria', 'Praise', 'Sagal', 'Avaleigh', 'Amoni', 'Fadumo', 'Debhora', 'Sharol', 'Sahalie', 'Aleana', 'Dezire', 'Catalia', 'Barbarann', 'Raelin', 'Reniyah', 'Jeniyah', 'Jaziya', 'Wilhemenia', 'Wavie', 'Modestine', 'Tariah', 'Cathern', 'Asenath', 'Nakya', 'Reeva', 'Tkai', 'Orva', 'Theora', 'Brookie', 'Breyonna', 'Ellagrace', 'Kaliya', 'Jemimah', 'Ahna', 'Zetta', 'Tanyia', 'Dicie', 'Malasia', 'Janvi', 'Talaysia', 'Kaybree', 'Teia', 'Robertha', 'Tilda', 'Marykatherine', 'Gusta', 'Gola', 'Malta', 'Nija', 'Kaija', 'Tamaria', 'Chyann', 'Davianna', 'Gae', 'Ruther', 'Kennadee', 'Arvella', 'Ashonti', 'Euphemia', 'Teyanna', 'Jahnya', 'Jamariya', 'Ceanna', 'Francenia', 'Charletta', 'Catheryn', 'Theodosia', 'Magdaline', 'Samariah', 'Jamara', 'Nehemie', 'Mikenzie', 'Marielys', 'Keilany', 'Bernardita', 'Marketa', 'Takya', 'Frona', 'Draxie', 'Genell', 'Celesta', 'Deloria', 'Sister', 'Icy', 'Mardi', 'Florance', 'Azari', 'Ahmiyah', 'Chaniya', 'Rheda', 'Kateland', 'Rielle', 'Kjersten', 'Olivette', 'Tita', 'Tharon', 'Briasia', 'Pakou', 'Raniah', 'Janaria', 'Jaliya', 'Alexiana', 'Alayja', 'Ailea', 'Camiya', 'Versa', 'Vertell', 'Loyola', 'Mckelle', 'Ebonique', 'Jaynie', 'Shamiah', 'Keela', 'Laterrica', 'Fidelia', 'Annia', 'Rosslyn', 'Robynn', 'Darlynn', 'Shakiara', 'Shakeira', 'Olinda', 'Kionna', 'Annslee', 'Rudine', 'Teonna', 'Rudene', 'Latrece', 'Wynette', 'Damiya', 'Zonnie', 'Jenne', 'Deeanne', 'Doree', 'Jennilyn', 'Lari', 'Lourie', 'Tedi', 'Deaira', 'Deairra', 'Fatuma', 'Gearldean', 'Genise', 'Karlyn', 'Arleta', 'Alla', 'Donie', 'Lady', 'Rheba', 'Nuha', 'Olita', 'Elzina', 'Lutricia', 'Tauna', 'Teasha', 'Elberta', 'Jeralyn', 'Shaketa', 'Elonda', 'Lafondra', 'Shelle', 'Lamiya', 'Lejla', 'Labria', 'Wessie', 'Cleola', 'Suad', 'Andretta', 'Piccola', 'Jadalee', 'Louanna', 'Donabelle', 'Shauntel', 'Vannie', 'Naomia', 'Ludell', 'Ikram', 'Ariyonna', 'Anaelle', 'Pamila', 'Scheryl', 'Kandee', 'Donella', 'Vicie', 'Tajah', 'Jodeen', 'Debborah', 'Varvara', 'Jalisha', 'Paw', 'Tranette', 'Ruwayda', 'Jeanice', 'Lowana', 'Curlie', 'Viveca', 'Tommi', 'Lynnel', 'Shawneen', 'Tora', 'Ikhlas', 'Delene', 'Jillyn', 'Abria', 'Blondine', 'Katharyn', 'Gini', 'Lynnell', 'Laurey', 'Ikran', 'Madell', 'Dura', 'Trenia', 'Arsie', 'Runell', 'Lawan', 'Georgeanna', 'Nashay', 'Lasha', 'Michi', 'Arloa', 'Kazuye', 'Arnette', 'Morghan', 'Allure', 'Kiyo', 'Fusaye', 'Sebrena', 'Kikuye', 'Mykia', 'Soon', 'Kyung', 'Maysa', 'Manessa', 'Ople', 'Amyre', 'Katera', 'Danaya', 'Dorothey', 'Shahidah', 'Soliana', 'Concettina', 'Delphie', 'Aqueelah', 'Cassadee', 'Larayne', 'Burnette', 'Diona', 'Stasha', 'Sheria', 'Luciel', 'Anise', 'Cumi', 'Marillyn', 'Domenique', 'Sumiye', 'Masaye', 'Imojean', 'Louetta', 'Taimi', 'Berdie', 'Jyl', 'Cyrilla', 'Kearstin', 'Tosca', 'Billee', 'Milda', 'Rema', 'Tyne', 'Altamease', 'Aleaha', 'Malaina', 'Jersie', 'Nadyne', 'Suhailah', 'Reghan', 'Burma', 'Kamyra', 'Geraldean', 'Ivalee', 'Waunita', 'Aritza', 'Madalynne', 'Talaya', 'Azura', 'Aldonia', 'Robinette', 'Ameenah', 'Abeer', 'Yamilette', 'Tanae', 'Mertha', 'Jamirah', 'Chun', 'Avayah', 'Janayah', 'Bena', 'Mahiyah', 'Karn', 'Kristien', 'Mikesha', 'Eriel', 'Kemoni', 'Aziya', 'Raigan', 'Rissie', 'Tenna', 'Tambria', 'Birdell', 'Almena', 'Jonisha', 'Marcey', 'Rosebud', 'Lakevia', 'Shateria', 'Nelia', 'Rilda', 'Doshie', 'Onzell', 'Safiyyah', 'Lorilee', 'Shiane', 'Gauri', 'Ashiya', 'Yaileen', 'Vendetta', 'Margaretmary', 'Telisa', 'Imogean', 'Sheryn', 'Nance', 'Mariette', 'Keerthana', 'Rosellen', 'Michelene', 'Kamrie', 'Mayci', 'Jerzi', 'Vermelle', 'Tondra', 'Dorethea', 'Wannetta', 'Tilly', 'Brightyn', 'Patt', 'Lynae', 'Willo', 'Cloma', 'Yailyn', 'Takeria', 'Janyiah', 'Rasheema', 'Nafeesa', 'Rosene', 'Kellianne', 'Taccara', 'Quanda', 'Patsie', 'Chaquita', 'Shakelia', 'Guerline', 'Tashika', 'Taneesha', 'Fatme', 'Marliss', 'Hye', 'Marjo', 'Meggie', 'Maye', 'Walline', 'Dodi', 'Kristyna', 'Aliyyah', 'Latravia', 'Diania', 'Elta', 'Oralee', 'Nikkita', 'Rasha', 'Sharena', 'Tecora', 'Pluma', 'Ovell', 'Keeya', 'Dayja', 'Sherrian', 'Jinnie', 'Ekta', 'Javonda', 'Shantrice', 'Dava', 'Kimbley', 'Lafonda', 'Lasonja', 'Hiilani', 'Danay', 'Avree', 'Kelliann', 'Keasha', 'Kimmarie', 'Jannely', 'Manasi', 'Moncerat', 'Miyu', 'Jullianna', 'Joelene', 'Ynez', 'Yazmeen', 'Yasamin', 'Syann', 'Surena', 'Tresia', 'Trecia', 'Sonjia', 'Hokulani', 'Amarilys', 'Bethzaida', 'Noraida', 'Dietra', 'Nealie', 'Charice', 'Alicea', 'Jozie', 'Delzora', 'Jordis', 'Jolett', 'Kahlen', 'Kallee', 'Natilee', 'Pecolia', 'Iyari', 'Shandrell', 'Quintella', 'Monchel', 'Tysha', 'Vanetta', 'Shawneequa', 'Odesser', 'Lareina', 'Jannifer', 'Kinya', 'Lateesha', 'Dvora', 'Katrin', 'Denene', 'Diondra', 'Ciclali', 'Sula', 'Talena', 'Afrika', 'Cheron', 'Emireth', 'Cadee', 'Jlyn', 'Jermya', 'Alyia', 'Sitlali', 'Sissy', 'Felita', 'Kerith', 'Wendolyn', 'Chaundra', 'Angle', 'Gladies', 'Meygan', 'Sereniti', 'Saryn', 'Vielka', 'Tirzah', 'Lynnmarie', 'Lisanne', 'Yliana', 'Yamilett', 'Keyoka', 'Laquanta', 'Teneshia', 'Trenna', 'Veronda', 'Fronie', 'Carlette', 'Lanetta', 'Raynelle', 'Tianne', 'Siria', 'Mayda', 'Lorien', 'Celica', 'Tabbitha', 'Kayanna', 'Julitza', 'Kylia', 'Heavenlee', 'Nikka', 'Rachana', 'Mekenna', 'Maritere', 'Ai', 'Angelisa', 'Anysa', 'Basia', 'Ilka', 'Geanine', 'Kedra', 'Caila', 'Deysy', 'Emilyann', 'Samera', 'Mackinzie', 'Lynzie', 'Akela', 'Navpreet', 'Reylene', 'Reyanna', 'Kathlynn', 'Kiaira', 'Guiselle', 'Brinn', 'Jerelyn', 'Lorel', 'Alandra', 'Ardyth', 'Kloee', 'Mellody', 'Carlisa', 'Martinique', 'Damali', 'Cassandre', 'Ivanelle', 'Janaan', 'Shontay', 'Tamieka', 'Tashema', 'Irmalinda', 'Tayna', 'Berdena', 'Janika', 'Shauntay', 'Nikea', 'Ekaterini', 'Glendaly', 'Vernee', 'Kang', 'Candise', 'Jamica', 'Andera', 'Katheleen', 'Annagrace', 'Bradleigh', 'Kissy', 'Lachandra', 'Tamikia', 'Shevon', 'Wardean', 'Betina', 'Marcee', 'Evia', 'Carry', 'Marica', 'Tiwana', 'Stacye', 'Theressa', 'Torsha', 'Allayna', 'Betania', 'Berania', 'Claryssa', 'Clarise', 'Cassidi', 'Mehana', 'Janella', 'Mackenzy', 'Kaeleigh', 'Sanoe', 'Neysa', 'Shawntee', 'Shannah', 'Tihani', 'Willye', 'Zalma', 'Serrina', 'Shealyn', 'Hiiaka', 'Jeselle', 'Mitsy', 'Kela', 'Aquila', 'Marikay', 'Christella', 'Tameria', 'Ebelina', 'Maricar', 'Shalimar', 'Yanin', 'Xuan', 'Tifany', 'Thy', 'Quynh', 'Shronda', 'Kysha', 'Lular', 'Danee', 'Christyna', 'Antonieta', 'Chara', 'Bich', 'Tishana', 'Sophy', 'Shoshanna', 'Adrea', 'Lavaun', 'Keryn', 'Okema', 'Njeri', 'Ashaki', 'Alegra', 'Anapatricia', 'Terena', 'Tuere', 'Ensley', 'Geraline', 'Corrinna', 'Carlye', 'Dawnielle', 'Fancy', 'Akiba', 'Korrie', 'Lavita', 'Chisa', 'Lakishia', 'Mandisa', 'Lalita', 'Sakeena', 'Noami', 'Olivea', 'Lucilla', 'Marialuiza', 'Radonna', 'Magaline', 'Minda', 'Annah', 'Mitsuyo', 'Kameko', 'Miyako', 'Satsuki', 'Hatsuyo', 'Aimie', 'Jalexis', 'Haruyo', 'Tokiko', 'Matsuyo', 'Myiah', 'Natalye', 'Priseis', 'Yeraldi', 'Natsue', 'Nobue', 'Zyria', 'Tierany', 'Samyia', 'Rhema', 'Chiyo', 'Lailoni', 'Momoka', 'Miku', 'Havanna', 'Izela', 'Kendy', 'Rashanda', 'Aleysha', 'Sherlita', 'Tamana', 'Kikuyo', 'Tapanga', 'Shauntell', 'Adithi', 'Chiamaka', 'Devika', 'Angy', 'Arwyn', 'Aparna', 'Anneka', 'Betzayra', 'Analuiza', 'Blondie', 'October', 'Yarexi', 'Yarethzi', 'Annaclaire', 'Rosabel', 'Jerlene', 'Clelia', 'Jatara', 'Anzley', 'Zamaya', 'Venera', 'Kalleigh', 'Jaylynne', 'Kaylor', 'Milli', 'Nelsy', 'Laycee', 'Arayah', 'Betzabe', 'Bethzi', 'Haidy', 'Chayla', 'Elizah', 'Evoleth', 'Edyn', 'Cyniah', 'December', 'Amerika', 'Analea', 'Ayshia', 'Alauna', 'Shamica', 'Peaches', 'Shenee', 'Letecia', 'Arminda', 'Yolander', 'Amariona', 'Kaithlyn', 'Jasiya', 'Niharika', 'Sareena', 'Maryana', 'Melanye', 'Solei', 'Suhey', 'Soyla', 'Koral', 'Lilee', 'Mercede', 'Pennye', 'Yumeka', 'Mazel', 'Vani', 'Pattiann', 'Shirell', 'Carmencita', 'Delayla', 'Hailyn', 'Brena', 'Daana', 'Lenise', 'Ryhanna', 'Lorely', 'Tiannah', 'Zabdi', 'Kammy', 'Josslynn', 'Keilee', 'Kamrynn', 'Itza', 'Jaidy', 'Cherly', 'Ladeana', 'Memory', 'Maresa', 'Shauntae', 'Risha', 'Ilisa', 'Debraann', 'Gavriela', 'Jenai', 'Suzzette', 'Mailani', 'Leiloni', 'Manasa', 'Malin', 'Faythe', 'Haylei', 'Haili', 'Gwenivere', 'Jamilette', 'Naydeline', 'Sakshi', 'Nayda', 'Nuala', 'Chelsae', 'Berenize', 'Bahar', 'Arpi', 'Tearra', 'Metta', 'Lethia', 'Akanksha', 'Danine', 'Alayne', 'Jeanann', 'Loyda', 'Yamna', 'Marsela', 'Jolinda', 'Leina', 'Mariane', 'Kaydince', 'Etsuko', 'Tinika', 'Lashona', 'Chidinma', 'Jazell', 'Derenda', 'Cylinda', 'Amaiah', 'Alyzza', 'Abbygayle', 'Tashae', 'Tesa', 'Sarra', 'Tanasha', 'Latoy', 'Dawnell', 'Corinn', 'Charmain', 'Odetta', 'Kimiya', 'Kiaya', 'Mairin', 'Maelani', 'Halena', 'Dorianne', 'Ilia', 'Cheyenna', 'Noora', 'Nareh', 'Namrata', 'Sholanda', 'Sita', 'Dunia', 'Betzayda', 'Analissa', 'Amulya', 'Annaka', 'Anneth', 'Anaalicia', 'Noemie', 'Leni', 'Robyne', 'Skyleen', 'Tiphanie', 'Belmira', 'Francelina', 'Kreindy', 'Kiri', 'Kristena', 'Lawren', 'Christyn', 'Deicy', 'Hollyann', 'Jamela', 'Eriko', 'Sotheary', 'Lekeshia', 'Onica', 'Micole', 'Marlisa', 'Aqsa', 'Bayla', 'Abigal', 'Charny', 'Shaquira', 'Rabab', 'Yasemin', 'Keishla', 'Donasia', 'Ellamarie', 'Darianny', 'Dahiana', 'Areeba', 'Shaquasha', 'Oneisha', 'Daicy', 'Karem', 'Kymberlee', 'Kayleena', 'Katryna', 'Jessicamae', 'Gessica', 'Jameela', 'Janele', 'Naylani', 'Anagabriela', 'Andraya', 'Andreanna', 'Artavia', 'Alexanderia', 'Laporche', 'Laporsche', 'Folasade', 'Kirandeep', 'Davia', 'Davona', 'Darbi', 'Baylea', 'Sylwia', 'Glendy', 'Ivet', 'Fritzi', 'Lusero', 'Marlayna', 'Marlissa', 'Leanny', 'Duaa', 'Ruchama', 'Orli', 'Nabeeha', 'Maurissa', 'Shevawn', 'Shauni', 'Shellby', 'Sindi', 'Taralyn', 'Tanzania', 'Sinthia', 'Ondrea', 'Nhu', 'Narine', 'Naly', 'Yanett', 'Temmy', 'Manar', 'Maimuna', 'Arielys', 'Dalya', 'Allyse', 'Mariateresa', 'Mariade', 'Lashea', 'Kimberlyann', 'Cyntia', 'Cystal', 'Elisse', 'Tonimarie', 'Nashalie', 'Shatasia', 'Teigan', 'Muntaha', 'Zlata', 'Zehra', 'Shaterra', 'Leeya', 'Keysi', 'Christabel', 'Alfrieda', 'Mehgan', 'Hyacinth', 'Shley', 'Caterin', 'Darnesha', 'Amaranta', 'Jazzmen', 'Kelia', 'Kassy', 'Grasiela', 'Sheindy', 'Yenty', 'Tahani', 'Umme', 'Mayla', 'Maryon', 'Kiyanna', 'Dezeray', 'Macaela', 'Nalley', 'Mikeisha', 'Sylvana', 'Smantha', 'Virdiana', 'Afiya', 'Chanise', 'Glorimar', 'Hui', 'Hendel', 'Junia', 'Gioia', 'Elene', 'Dorothie', 'Elynor', 'Mercades', 'Arfa', 'Abiha', 'Aayat', 'Amarianna', 'Raynisha', 'Pahola', 'Sarin', 'Marixa', 'Shavonna', 'Tannya', 'Tijera', 'Girtha', 'Tameko', 'Caresse', 'Bernyce', 'Allisha', 'Branda', 'Jahmya', 'Haleema', 'Hodaya', 'Samina', 'Sheva', 'Theadora', 'Skylyn', 'Razan', 'Somalia', 'Thalya', 'Quadasia', 'Yanil', 'Arabia', 'Edina', 'Briyanna', 'Verdia', 'Sehar', 'Naama', 'Timberly', 'Reann', 'Narissa', 'Maggy', 'Marriah', 'Joua', 'Kellsie', 'Kelcy', 'Evonna', 'Jacqueleen', 'Xee', 'Zaynah', 'Janique', 'Jailin', 'Aniqa', 'Melana', 'Mariame', 'Aundria', 'Anacaren', 'Anahid', 'Jassmine', 'Keoshia', 'Keyera', 'Delmi', 'Briselda', 'Carlisha', 'Brittnei', 'Clarrisa', 'Dezerae', 'Banessa', 'Ariele', 'Cherrell', 'Daissy', 'Cecila', 'Jady', 'Kristelle', 'Kristinamarie', 'Korinna', 'Kortnee', 'Jasimine', 'Jahnay', 'Farhana', 'Shaliyah', 'Nemesis', 'Shakerria', 'Phoua', 'Carylon', 'Ironesha', 'Lariza', 'Anesa', 'Elantra', 'Deandria', 'Denecia', 'Chelsia', 'Teighlor', 'Suzannah', 'Zelene', 'Zeena', 'Catriona', 'Tamarra', 'Tannaz', 'Titiana', 'Briany', 'Lyana', 'Maytal', 'Antanasia', 'Kierston', 'Dashia', 'Ismenia', 'Annessa', 'Carolena', 'Miasia', 'Mikhaila', 'Lamiracle', 'Kassey', 'Markeshia', 'Hilarie', 'Necha', 'Ziara', 'Jahniyah', 'Safiyah', 'Tanaisha', 'Shamyra', 'Laportia', 'Shavy', 'Viktoriya', 'Khrystyne', 'Kristyne', 'Juanisha', 'Jerrika', 'Channelle', 'Jacquiline', 'Rakia', 'Tamarah', 'Sarha', 'Mishelle', 'Nastasha', 'Acadia', 'Brittiney', 'Mickaela', 'Natavia', 'Seryna', 'Ardene', 'Special', 'Simranjit', 'Marivi', 'Natassja', 'Neira', 'Nikkie', 'Asiana', 'Dazhane', 'Channell', 'Adryana', 'Mariluz', 'Dajia', 'Breigh', 'Zelpha', 'Lataya', 'Glenny', 'Sharene', 'Shaguana', 'Henrine', 'Camesha', 'Birdia', 'Dynisha', 'Sherina', 'Ayde', 'Danille', 'Charday', 'Almadelia', 'Larena', 'Charlestine', 'Suellyn', 'Marry', 'Constantina', 'Tandi', 'Lacretia', 'Noralba', 'Latresha', 'Latacha', 'Talynn', 'Rox', 'Chasey', 'Nyia', 'Alyissa', 'Karilyn', 'Shevonne', 'Genny', 'Tamicka', 'Doneisha', 'Cyrena', 'Daisia', 'Ravina', 'Berdia', 'Aneesha', 'Vashti', 'Latrica', 'Kennetha', 'Aarti', 'Raiza', 'Elspeth', 'Kyleen', 'Ronika', 'Lyndsy', 'Jone', 'Chanta', 'Serita', 'Margree', 'Ruthel', 'Ruthella', 'Breunna', 'Cyann', 'Atlanta', 'Danniela', 'Junita', 'Floella', 'Brittane', 'Avanelle', 'Priscill', 'Luvina', 'Jeneva', 'Teretha', 'Clarita', 'Ilce', 'Jacqualyn', 'Justene', 'Daysia', 'Taylore', 'Sadi', 'Verenis', 'Shyenne', 'Toriana', 'Alvira', 'Kalah', 'Rajanee', 'Reonna', 'Mariadelaluz', 'Mychaela', 'Charnele', 'Aeisha', 'Shaquaya', 'Shaakira', 'Tayana', 'Cozetta', 'Kensey', 'Jazsmin', 'Kaitlyne', 'Hollye', 'Lavren', 'Sarit', 'Shanieka', 'Margorie', 'Virgene', 'Dannia', 'Clorissa', 'Breahna', 'Rayla', 'Dellanira', 'Megen', 'Matalie', 'Taraneh', 'Teila', 'Etter', 'Cheetara', 'Shetara', 'Jamielee', 'Kariann', 'Karess', 'Bea', 'Leyda', 'Misa', 'Mareena', 'Maisee', 'Yvonna', 'Yocelyne', 'Yilda', 'Sabrinna', 'Sirenia', 'Tyriel', 'Darrielle', 'Siedah', 'Yuko', 'Stevee', 'Chrystle', 'Shaterrica', 'Janyll', 'Evelisse', 'Belkis', 'Renesmae', 'Sahily', 'Zurie', 'Edelia', 'Sequoya', 'Waldine', 'Marinell', 'Moya', 'Lavenia', 'Liboria', 'Meliah', 'Meliyah', 'Mio', 'Xitllali', 'Nare', 'Oliviah', 'Mayrani', 'Sravya', 'Valeska', 'Riona', 'Lashaundra', 'Phebe', 'Yeira', 'Zarai', 'Ayanah', 'Kriti', 'Kaileah', 'Donata', 'Jenavee', 'Daphnee', 'Gurneet', 'Emmalie', 'Rowrenia', 'Haisley', 'Harbor', 'Arilyn', 'Aubrii', 'Avielle', 'Avyn', 'Bethenny', 'Arienne', 'Anyeli', 'Brilyn', 'Cataleyah', 'Chisom', 'Dalis', 'Malaiya', 'Meela', 'Karsynn', 'Kaselyn', 'Kashlyn', 'Amorette', 'Lenita', 'Adabelle', 'Allisyn', 'Alyzah', 'Aaralynn', 'Avyanna', 'Aylinn', 'Bexley', 'Blakeleigh', 'Caeli', 'Chizaram', 'Avriana', 'Clarity', 'Juanelle', 'Jerelene', 'Eluteria', 'Lamerle', 'Aletheia', 'Abrie', 'Adelie', 'Elleigh', 'Emmelyn', 'Emsley', 'Everlynn', 'Galileah', 'Derrica', 'Keondria', 'Keneshia', 'Amberley', 'Valkyrie', 'Yazleemar', 'Maybree', 'Shloka', 'Neah', 'Oluwatomisin', 'Saydi', 'Jessalynn', 'Katalaya', 'Katniss', 'Kendalynn', 'Davionna', 'Mercie', 'Danett', 'Deetya', 'Dilynn', 'Dunya', 'Camyla', 'Elliotte', 'Ivee', 'Jadie', 'Kyleah', 'Laelani', 'Mileah', 'Nalanie', 'Nixie', 'Oviya', 'Lakecia', 'Sharnae', 'Abbagail', 'Derica', 'Truly', 'Tvisha', 'Vedika', 'Xiclaly', 'Syra', 'Idamae', 'Dashanti', 'Neita', 'Siona', 'Jourdyn', 'Analyn', 'Shamiracle', 'Daylene', 'Kadeesha', 'Malgorzata', 'Dashay', 'Else', 'Pixie', 'Myleah', 'Myleen', 'Nadiah', 'Sadhana', 'Samai', 'Seraphine', 'Sereen', 'Sharanya', 'Simar', 'Mahlia', 'Inika', 'Jennavieve', 'Genevy', 'Harshita', 'Hennessey', 'Zari', 'Jamiracle', 'Loveta', 'Coleta', 'Adabella', 'Alesana', 'Brinleigh', 'Azlyn', 'Braelee', 'Shaquila', 'Shanyia', 'Jamilia', 'Corlis', 'Dulcie', 'Desha', 'Timya', 'Rakiya', 'Tyliah', 'Taura', 'Terasha', 'Gaynel', 'Roylene', 'Janecia', 'Alonda', 'Tyneisha', 'Fleurette', 'Mayleigh', 'Meklit', 'Sarenity', 'Gulianna', 'Itzayanna', 'Ivyana', 'Jazmynn', 'Esmie', 'Favor', 'Kimbella', 'Shanavia', 'Yaritzel', 'Daun', 'Tykerria', 'Antoria', 'Shykemmia', 'Remona', 'Lucrezia', 'Cicily', 'Aradhya', 'Esmae', 'Evah', 'Jhene', 'Katalia', 'Cyrine', 'Delayza', 'Eleonor', 'Arohi', 'Aseneth', 'Avarose', 'Caia', 'Hulene', 'Valera', 'Nasaria', 'Makesha', 'Zera', 'Aahna', 'Aariyah', 'Aashvi', 'Adalene', 'Annaliyah', 'Aira', 'Alaska', 'Amila', 'Amour', 'Kaylinn', 'Isidora', 'Marija', 'Suha', 'Marigold', 'Mayzie', 'Liesel', 'Darielle', 'Sapphira', 'Scotland', 'Serah', 'Srinika', 'Novah', 'Primrose', 'Latresa', 'Theia', 'Alleen', 'Agness', 'Estanislada', 'Ellouise', 'Emilija', 'Glynnis', 'Paulene', 'Wilna', 'Maedell', 'Lometa', 'Cressie', 'Allyne', 'Calleen', 'Joaquina', 'Lashelle', 'Modene', 'Jonie', 'Minta', 'Milady', 'Jearlene', 'Rithika', 'Simrat', 'Vonzella', 'Venna', 'Pabla', 'Benilde', 'Eniya', 'Shakendra', 'Ailen', 'Aina', 'Marionna', 'Millette', 'Emiyah', 'Kayloni', 'Keerat', 'Keeva', 'Lailany', 'Mishka', 'Naevia', 'Nathania', 'Nyari', 'Jayah', 'Kaavya', 'Frankee', 'Anahita', 'Anella', 'Elizabella', 'Damara', 'Juaquina', 'Gracia', 'Rozalyn', 'Ruhani', 'Novalie', 'Mialani', 'Minka', 'Nessa', 'Sissi', 'Sitara', 'Jaynee', 'Jeyla', 'Gizzelle', 'Maila', 'Maizy', 'Lamaya', 'Katalea', 'Khamila', 'Shekita', 'Chinita', 'Anshika', 'Aerabella', 'Azelia', 'Cici', 'Daleyssa', 'Divinity', 'Fermina', 'Murline', 'Mattye', 'Devra', 'Jakya', 'Santresa', 'Larene', 'Deola', 'Liliann', 'Lexxi', 'Kamori', 'Myonna', 'Yitzel', 'Lindalee', 'Tira', 'Mairyn', 'Riyana', 'Shaleen', 'Rhyleigh', 'Fleeta', 'Gabrielly', 'Deajah', 'Yarielis', 'Arelie', 'Amore', 'Sacoria', 'Hedda', 'Wanza', 'Janyth', 'Yaslin', 'Brianah', 'Anyelin', 'Shayleigh', 'Lace', 'Kurstin', 'Zakhia', 'Charvi', 'Raylie', 'Nyellie', 'Natalyn', 'Libra', 'Khianna', 'Jolena', 'Genevive', 'Jadine', 'Deniya', 'Madysin', 'Porchia', 'Layleen', 'Kemiya', 'Donesha', 'Jewelene', 'Sakari', 'Narely', 'Maylyn', 'Halina', 'Nelli', 'Myangel', 'British', 'Adore', 'Alainah', 'Shadonna', 'Aminta', 'Marolyn', 'Jalea', 'Breelynn', 'Carah', 'Sagrario', 'Akyra', 'Kailei', 'Kenza', 'Renette', 'Joanann', 'Solimar', 'Semira', 'Harneet', 'Jahayra', 'Evanny', 'Gyzelle', 'Nathalee', 'Dalphine', 'Mane', 'Merelyn', 'Kayliana', 'Aubryn', 'Brooklyne', 'Kimari', 'Dandra', 'Cilia', 'Laren', 'Denetra', 'Kandise', 'Makynli', 'Janan', 'Rosalea', 'Ludean', 'Syndey', 'Shaney', 'Vannary', 'Reynalda', 'Rainee', 'Trishia', 'Kirbie', 'Kristyl', 'Lynzi', 'Shardai', 'Yaricza', 'Tarina', 'Lynley', 'Maniah', 'Arcilia', 'Keaundra', 'Karrigan', 'Madeliene', 'Lessley', 'Laurynn', 'Ragen', 'Essance', 'Celsey', 'Caitlen', 'Dulse', 'Sulamita', 'Evlyn', 'Dorace', 'Marciana', 'Tenecia', 'Natarsha', 'Analiza', 'Ladene', 'Tatumn', 'Maricsa', 'Lysa', 'Leydi', 'Limayri', 'Rebbeca', 'Amreen', 'Saina', 'Remedy', 'Rael', 'Nami', 'Nalini', 'Naiyah', 'Moxie', 'Olina', 'Whitni', 'Dayannara', 'Diara', 'Arma', 'Giorgia', 'Evee', 'Bricia', 'Brizeyda', 'Chihiro', 'Ayram', 'Ayushi', 'Isolde', 'Husna', 'Khrystal', 'Kriston', 'Raylena', 'Porschea', 'Samanthia', 'Mylinda', 'Ginelle', 'Coreena', 'Aryel', 'Mallary', 'Maciel', 'Kursten', 'Leandrea', 'Mackensie', 'Camri', 'Itzamara', 'Aryiah', 'Alayssa', 'Andreah', 'Anberlin', 'Amrie', 'Breah', 'Ryane', 'Tonna', 'Valisa', 'Adryanna', 'Ajia', 'Robynne', 'Brystal', 'Brylynn', 'Kaleigha', 'Danyka', 'Dannica', 'Caylen', 'Jonier', 'Ruthy', 'Mada', 'Vaida', 'Yeila', 'Zoelle', 'Elzora', 'Samreen', 'Seylah', 'Sayla', 'Allina', 'Stellarose', 'Starlett', 'Simrit', 'Shina', 'Bernestine', 'Tranisha', 'Tiffanyann', 'Adamarys', 'Tylyn', 'Shahrzad', 'Addisson', 'Aeriana', 'Alaiya', 'Anni', 'Ariely', 'Anvika', 'Aneya', 'Bani', 'Ayame', 'Ayaka', 'Aviella', 'Alabama', 'Adalyne', 'Teresea', 'Ishana', 'Hargun', 'Jasnoor', 'Deby', 'Dannelle', 'Swetha', 'Catherina', 'Bridgitt', 'Birgit', 'Calisi', 'Defne', 'Delsa', 'Demiyah', 'Cataleah', 'Icel', 'Ixel', 'Jazman', 'Jessicamarie', 'Desaree', 'Chika', 'Estephani', 'Dilcia', 'Dartha', 'Lesieli', 'Breyona', 'Waynette', 'Verma', 'Calletana', 'Cherisa', 'Casara', 'Jil', 'Shella', 'Renell', 'Venise', 'Loura', 'Kaylia', 'Leileen', 'Jessel', 'Janesa', 'Kaelly', 'Julina', 'Joselinne', 'Juna', 'Hazelle', 'Mauricia', 'Octaviana', 'Rumalda', 'Kataleyah', 'Kimela', 'Mosella', 'Delone', 'Shemekia', 'Balinda', 'Hazell', 'Deboraha', 'Gizell', 'Camilia', 'Avalina', 'Audreyana', 'Baran', 'Genesee', 'Elyzabeth', 'Eliya', 'Kathyleen', 'Deeksha', 'Scherry', 'Angelyne', 'Amiliana', 'Amaira', 'Jeani', 'Alysen', 'Alania', 'Adiana', 'Chinyere', 'Lamesha', 'Keiley', 'Lanea', 'Rosely', 'Surabhi', 'Dyanne', 'Mallika', 'Tabbatha', 'Shilpa', 'Morgyn', 'Narali', 'Jenevie', 'Lovette', 'Nayleah', 'Navi', 'Meili', 'Nazly', 'Nethra', 'Earlee', 'Layloni', 'Kiannah', 'Lilyanah', 'Liannah', 'Jaylenn', 'Jiayi', 'Kattleya', 'Kanna', 'Jimin', 'Kaleesi', 'Kailia', 'Itzy', 'Itzela', 'Jasminemarie', 'Malynda', 'Jeweline', 'Eloiza', 'Carolin', 'Helma', 'Arlyle', 'Giannina', 'Constancia', 'Elyce', 'Montoya', 'Marline', 'Krystale', 'Maghan', 'Laquitta', 'Elishia', 'Aliciana', 'Maralee', 'Brunetta', 'Cybil', 'Dannell', 'Cherene', 'Agueda', 'Guillerma', 'Haillie', 'Bobbe', 'Gesselle', 'Esthefany', 'Sian', 'Ouita', 'Sasheen', 'Abigaile', 'Demarie', 'Edwena', 'Aamiyah', 'Breaunna', 'Bryssa', 'Catlyn', 'Xaviera', 'Sierria', 'Skyelar', 'Aujanae', 'Rika', 'Roshelle', 'Roxsana', 'Zonia', 'Tifanie', 'Thavy', 'Teala', 'Tanea', 'Loukisha', 'Melita', 'Keiona', 'Maryfer', 'Delcenia', 'Akila', 'Gwenevere', 'Obdulia', 'Texana', 'Licette', 'Larina', 'Lany', 'Yailine', 'Yomara', 'Zavia', 'Sydne', 'Mariadelourdes', 'Margeaux', 'Daneille', 'Doni', 'Donalee', 'Darilyn', 'Jennfier', 'Jeanny', 'Haliegh', 'Dymon', 'Callee', 'Cydni', 'Daesha', 'Tamila', 'Tresha', 'Mckennah', 'Shouana', 'Xcaret', 'Yeneisy', 'Yumalai', 'Ziana', 'Hanny', 'Shanisha', 'Nissi', 'Mirabel', 'Miarose', 'Valerya', 'Rosalin', 'Saliha', 'Samayah', 'Smriti', 'Jozette', 'Gari', 'Jeanell', 'Dyann', 'Vonna', 'Velina', 'Salli', 'Nonie', 'Olena', 'Camela', 'Eufracia', 'Ethelyne', 'Yuhan', 'Silveria', 'Silvestra', 'Thressa', 'Tiahna', 'Vasti', 'Calee', 'Florentine', 'Sherre', 'Almira', 'Zitlalli', 'Vianne', 'Yaribeth', 'Yarelie', 'Robbye', 'Jasminne', 'Sophiah', 'Saryah', 'Hermalinda', 'Sinclaire', 'Korissa', 'Lanee', 'Keeana', 'Parlee', 'Luceal', 'Jetta', 'Mairani', 'Tameisha', 'Haruna', 'Chasiti', 'Leighanne', 'Anaisabel', 'Aanchal', 'Alesa', 'Annisa', 'Brigitta', 'Elideth', 'Chua', 'Cherrish', 'Aleece', 'Maizee', 'Navie', 'Philomene', 'Jilian', 'Jesi', 'Kortnie', 'Beija', 'Delissa', 'Shiree', 'Silbia', 'Tamura', 'Aerianna', 'Abegail', 'Braniya', 'Calyn', 'Carlynn', 'Anjana', 'Angelik', 'Alyzabeth', 'Amorie', 'Joannamarie', 'Kerissa', 'Kennesha', 'Laruen', 'Korrina', 'Felisitas', 'Gilma', 'Essica', 'Gerarda', 'Petronila', 'Dorotea', 'Maguadalupe', 'Najla', 'Loana', 'Illyana', 'Amunique', 'Antwanette', 'Krystan', 'Shaniquia', 'Shanequia', 'Rainy', 'Raynesha', 'Shayleen', 'Stephanee', 'Sharaya', 'Nikkole', 'Cecille', 'Christyne', 'Auriel', 'Franki', 'Zelina', 'Deshanae', 'Deshawna', 'Tyneshia', 'Tyrisha', 'Deangela', 'Dynasia', 'Maigan', 'Jericka', 'Jackalyn', 'Kayln', 'Ceslie', 'Bethaney', 'Samanvi', 'Saidee', 'Rosibel', 'Spirit', 'Srishti', 'Varnika', 'Vanshika', 'Rosha', 'Rheya', 'Yoyo', 'Veyda', 'Weslyn', 'Palak', 'Sieanna', 'Riannah', 'Lovetta', 'Lota', 'Florice', 'Hortence', 'Zuley', 'Zoejane', 'Zemira', 'Mineola', 'Senona', 'Concepsion', 'Conrada', 'Dardanella', 'Rhina', 'Rubicela', 'Raissa', 'Porchea', 'Latiana', 'Landy', 'Monee', 'Maritssa', 'Marjani', 'Meosha', 'Cecilie', 'Britanie', 'Brandilyn', 'Khrystina', 'Atenas', 'Kristeena', 'Kristell', 'Kristianne', 'Angelicia', 'Alexandera', 'Jaimy', 'Jeneffer', 'Hayde', 'Vickye', 'Suzzanne', 'Susi', 'Sherrilyn', 'Sanda', 'Janeal', 'Stephnie', 'Luwana', 'Shenae', 'Yaris', 'Marzell', 'Lashane', 'Liandra', 'Keionna', 'Korri', 'Marlet', 'Marytza', 'Lorraina', 'Deepika', 'Devi', 'Fion', 'Darrah', 'Dalisha', 'Karessa', 'Karrisa', 'Kasara', 'Ismar', 'Jacquilyn', 'Janica', 'Jeannett', 'Samanatha', 'Samra', 'Sayda', 'Breklyn', 'Ashika', 'Bita', 'Allysha', 'Areil', 'Arlenne', 'Artelia', 'Janicia', 'Corinthia', 'Angellica', 'Maygen', 'Maygan', 'Odelle', 'Wenonah', 'Perfecta', 'Anjelika', 'Solmaira', 'Fredonia', 'Burgandy', 'Chelcee', 'Kellsey', 'Lyann', 'Jazmon', 'Ardie', 'Latunya', 'Benetta', 'Delphina', 'Ortensia', 'Obelia', 'Lurene', 'Refujia', 'Noriko', 'Ladelle', 'Lella', 'Shanie', 'Shawndra', 'Zell', 'Zela', 'Wenda', 'Troylene', 'Merrilyn', 'Kapri', 'Timesha', 'Gwendlyn', 'Jenean', 'Lamona', 'Ladana', 'Cina', 'Cybele', 'Eugina', 'Anjeanette', 'Vana', 'Jeneal', 'Cherlene', 'Railee', 'Palin', 'Yuliet', 'Rechelle', 'Sherisse', 'Pollyanna', 'Tiphani', 'Tiffanee', 'Vanisha', 'Yurico', 'Junko', 'Shannell', 'Shalise', 'Kimberlina', 'Kerra', 'Shantee', 'Emmelia', 'Micala', 'Lexxus', 'Candiss', 'Chauntel', 'Alese', 'Margit', 'Any', 'Ambur', 'Chrysta', 'Janese', 'Jinny', 'Zaydee', 'Makisha', 'Carola', 'Marjan', 'Samanth', 'Shaquinta', 'Polette', 'Riane', 'Nitasha', 'Kasarah', 'Jillianne', 'Keidra', 'Karrah', 'Kaytie', 'Sondi', 'Swayzie', 'Laporcha', 'Bridgit', 'Chanika', 'Antoniette', 'Jessicia', 'Francies', 'Kaizley', 'Negin', 'Mistica', 'Lorenia', 'Kalise', 'Kynslie', 'Dene', 'Jizel', 'Jinger', 'Jayli', 'Jariya', 'Joelynn', 'Haylin', 'Isabellah', 'Ciria', 'Dealva', 'Barbarita', 'Prudencia', 'Wanna', 'Marieli', 'Madisynn', 'Madalyne', 'Artisha', 'Everlyn', 'Cyerra', 'Liezl', 'Kabao', 'Karmina', 'Kashmir', 'Nani', 'Mithra', 'Mishika', 'Milynn', 'Mehr', 'Marybella', 'Maisey', 'Maddy', 'Lyah', 'Marnee', 'Machele', 'Ladona', 'Lorilei', 'Liara', 'Alahni', 'Analaya', 'Amalya', 'Alyannah', 'Aayla', 'Aarini', 'Arliz', 'Cyra', 'Asenet', 'Avy', 'Avaree', 'Ciela', 'Evangelyn', 'Kaidynce', 'Isella', 'Ilaria', 'Kattaleya', 'Laveah', 'Lareen', 'Lanah', 'Deema', 'Hannaley', 'Fiora', 'Eviana', 'Ellieana', 'Elisabetta', 'Dejanira', 'Manaia', 'Malibu', 'Charlsey', 'Kaytee', 'Kinberly', 'Cinderella', 'Miana', 'Kimm', 'Koni', 'Eraina', 'Dory', 'Deette', 'Nysa', 'Nyima', 'Nikitha', 'Anasophia', 'Alissandra', 'Alisi', 'Corynn', 'Aubreyana', 'Anjani', 'Oliana', 'Nura', 'Nihira', 'Loveda', 'Gayathri', 'Kleigh', 'Ladaisha', 'Ilette', 'Jillene', 'Jalina', 'Izellah', 'Tiaira', 'Mickala', 'Macarena', 'Rubina', 'Shadow', 'Emillie', 'Morine', 'Novell', 'Oletta', 'Pura', 'Winna', 'Synia', 'Shyloh', 'Kaizlee', 'Raley', 'Merly', 'Na', 'Yenia', 'Shayanne', 'Raeana', 'Tiauna', 'Tanairy', 'Georganna', 'Mahsa', 'Maiquel', 'Korena', 'Yamel', 'Shamonica', 'Romesha', 'Terrisha', 'Hannan', 'Hillarie', 'Feliza', 'Courtny', 'Lyndsee', 'Katelan', 'Lakedra', 'Elisabel', 'Cynthya', 'Dannah', 'Darienne', 'Dejanique', 'Madalin', 'Makynzi', 'Gwendolynn', 'Alaine', 'Bridney', 'Kimorah', 'Klee', 'Kynedi', 'Loreley', 'Parthenia', 'Aubryana', 'Aryannah', 'Edeline', 'Elen', 'Raguel', 'Marizela', 'Michella', 'Haasini', 'Tristine', 'Elis', 'Pattye', 'Tanishia', 'Jenel', 'Jurea', 'Laini', 'Britania', 'Christabelle', 'Dafney', 'Laterica', 'Angelmarie', 'Asuzena', 'Aleea', 'Teneka', 'Yicel', 'Malisha', 'Prairie', 'Makelle', 'Shaelee', 'Dafina', 'Hisaye', 'Adayah', 'Alexsia', 'Allysen', 'Takako', 'Thamara', 'Trinitie', 'Shaneen', 'Sueellen', 'Telma', 'Meyah', 'Rorie', 'Preslea', 'Elbia', 'Ginna', 'Marja', 'Marites', 'Neisha', 'Shir', 'Shastelyn', 'Saraih', 'Unity', 'Makinna', 'Franchelle', 'Azadeh', 'Charito', 'Joli', 'Amyrah', 'Sharlee', 'Jasey', 'Kortlynn', 'Kiari', 'Kyria', 'Eleina', 'Elany', 'Daleah', 'Sumi', 'Kileigh', 'Lorianna', 'Macady', 'Naviah', 'Mattilyn', 'Raylyn', 'Bridgitte', 'Hasina', 'Johnelle', 'Gwendlyon', 'Itxel', 'Iyanah', 'Jeidy', 'Jaidynn', 'Jaslynne', 'Zoii', 'Tensley', 'Yolando', 'Keyarah', 'Keyri', 'Katherinne', 'Thersa', 'Sinahi', 'Secret', 'Vivika', 'Yobana', 'Hailley', 'Haliey', 'Isys', 'Deyla', 'Kassidee', 'Jalie', 'Florestela', 'Cyla', 'Samyuktha', 'Libni', 'Laritza', 'Breannah', 'Breya', 'Keelin', 'Jarelly', 'Jenyfer', 'Julyanna', 'Kaetlyn', 'Mixtli', 'Mykaila', 'Nasia', 'Judieth', 'Misako', 'Bre', 'Shaley', 'Gelila', 'Aariana', 'Laquetta', 'Shizu', 'Annay', 'Annai', 'Breeze', 'Mahum', 'Harsimran', 'Helaina', 'Alexza', 'Tangelia', 'Shellye', 'Blondena', 'Keva', 'Suzzane', 'Vallorie', 'Absidy', 'Alis', 'Alexxia', 'Allura', 'Ariba', 'Annete', 'Anett', 'Deyanara', 'Ellise', 'Majorie', 'Hibah', 'Chaselyn', 'Hennesy', 'Gayatri', 'Kathelyn', 'Caylah', 'Athyna', 'Arpita', 'Ciclaly', 'Emmamarie', 'Virjinia', 'Tyna', 'Cyd', 'Glennda', 'Littie', 'Orlean', 'Derinda', 'Hether', 'Clata', 'Pleshette', 'Maricelda', 'Charmin', 'Matsuye', 'Tamitha', 'Armanda', 'Sayaka', 'Lacresia', 'Demonica', 'Skie', 'Trynity', 'Sereena', 'Shefali', 'Rewa', 'Reshonda', 'Yalanda', 'Anissia', 'Layni', 'Paolina', 'Manaal', 'Mariali', 'Merina', 'Milenia', 'Millenia', 'Moncerrath', 'Monzerrath', 'Kaydie', 'Adianna', 'Toluwalase', 'Trysta', 'Ainsleigh', 'Alianah', 'Meuy', 'Meloney', 'Talea', 'Sheetal', 'Shalana', 'Venesa', 'Teana', 'Kiki', 'Imee', 'Aubryanna', 'Allyanna', 'Ambrie', 'Amory', 'Aniyha', 'Caelynn', 'Reita', 'Rylann', 'Aijah', 'Aaliyha', 'Alezandra', 'Yeraldine', 'Forestine', 'Sameeha', 'Caeley', 'Britzy', 'Blessin', 'Armilda', 'Birda', 'Lorrine', 'Krisalyn', 'Linell', 'Maryl', 'Karole', 'Maryela', 'Mckinzy', 'Madailein', 'Kendi', 'Kayda', 'Jenasis', 'Madelis', 'Jamyiah', 'Gabryela', 'Catie', 'Genessa', 'Jamelia', 'Jenene', 'Nicholl', 'Saralyn', 'Taylah', 'Xandra', 'Jezlyn', 'Zakayla', 'Jaira', 'Veena', 'Shaden', 'Sahiti', 'Sahian', 'Shelsey', 'Sreya', 'Zianna', 'Angeleah', 'Camily', 'Lesvia', 'Sonda', 'Franceska', 'Cytlaly', 'Ylonda', 'Issis', 'Moon', 'Joei', 'Mariposa', 'Ramandeep', 'Preeti', 'Niobe', 'Sherran', 'Nichola', 'Letrice', 'Waneda', 'Meka', 'Takeshia', 'Leaann', 'Girlie', 'Olar', 'Pearlena', 'Carlean', 'Dhanya', 'Chastelin', 'Aryanah', 'Brihana', 'Bijou', 'Haifa', 'Genesiss', 'Genavie', 'Enna', 'Jazzel', 'Japleen', 'Iana', 'Rahel', 'Rylyn', 'Pragya', 'Yosselyn', 'Yarelin', 'Ellasyn', 'Charlaine', 'Zayli', 'Taide', 'Jodean', 'Emilynn', 'Channon', 'Carinne', 'Anaira', 'Amisadai', 'Caraline', 'Danella', 'Debanhy', 'Devanee', 'Koneta', 'Jenie', 'Hollee', 'Marelie', 'Mahathi', 'Madilynne', 'Lylia', 'Loreli', 'Lolah', 'Lexine', 'Maylynn', 'Clarinda', 'Marlynn', 'Netra', 'Makaylin', 'Naira', 'Naleah', 'Mishel', 'Myli', 'Charlotta', 'Arlisa', 'Kaylynne', 'Kamillah', 'Ksenia', 'Briseidy', 'Aysel', 'Anaily', 'Eulean', 'Adilee', 'Abri', 'Aidynn', 'Alisyn', 'Alicen', 'Marveline', 'Lupie', 'Mariabelen', 'Makenah', 'Kyliegh', 'Foye', 'Yajahira', 'Trenda', 'Tya', 'Nattaly', 'Netanya', 'Supriya', 'Teja', 'Srija', 'Sherra', 'Janissa', 'Mysha', 'Essfa', 'Alexandrya', 'Abi', 'Takhia', 'Jaeli', 'Jaelynne', 'Dianey', 'Denisa', 'Aleli', 'Akina', 'Aayushi', 'Adanna', 'Aunika', 'Ithzel', 'Caricia', 'Kallyn', 'Karmin', 'Kindall', 'Gredmarie', 'Peace', 'Jennalee', 'Yaindhi', 'Arcola', 'Trannie', 'Lyza', 'Mackynzie', 'Peggye', 'Zenab', 'Megyn', 'Navina', 'Naileah', 'Maddelyn', 'Luxe', 'Arkie', 'Belvia', 'Edilia', 'Monda', 'Ridhi', 'Peyten', 'Sorayah', 'Syrena', 'Amberle', 'Johnita', 'Jerrye', 'Alfa', 'Jonita', 'Lakie', 'Jenalee', 'Minami', 'Morena', 'Elsbeth', 'Sylia', 'Eunique', 'Ellisa', 'Lanai', 'Jesselyn', 'Jolissa', 'Julizza', 'Laquitha', 'Jobina', 'Wyvonne', 'Shalese', 'Deshannon', 'Almendra', 'Alisandra', 'Geraldene', 'Abygale', 'Katelyne', 'Kennede', 'Karisia', 'Lindzy', 'Keyhla', 'Emilea', 'Dacey', 'Jalah', 'Adrienna', 'Aisa', 'Alaisha', 'Brithney', 'Calynn', 'Cassity', 'Brendy', 'Reagen', 'Myrah', 'Montserrath', 'Pheobe', 'Nyeli', 'Jocell', 'Serenidy', 'Issabela', 'Hanalei', 'Laelah', 'Emmylou', 'Geraldy', 'Ovetta', 'Analena', 'Allyna', 'Aliyanah', 'Magdalyn', 'Suann', 'Ronee', 'Amey', 'Chirstina', 'Trude', 'Jearldine', 'Maeleigh', 'Lizzy', 'Liviana', 'Eithel', 'Meryem', 'Yaneisy', 'Shatika', 'Zeniyah', 'Xaylee', 'Pennelope', 'Xochilth', 'Jullie', 'Saki', 'Shaiann', 'Haille', 'Dannya', 'Kerie', 'Chianti', 'Leza', 'Koreen', 'Letricia', 'Lamanda', 'Kinza', 'Marisella', 'Joelyn', 'Cinde', 'Chyrl', 'Cece', 'Boni', 'Felecity', 'Faithe', 'Delayna', 'Diamon', 'Daley', 'Darah', 'France', 'Kolina', 'Kieu', 'Grizel', 'Shaleigh', 'Shaylea', 'Anitza', 'Carrolyn', 'Olimpia', 'Jeannene', 'Victoriana', 'Azara', 'Avelynn', 'Aveah', 'Ariam', 'Devanie', 'Daleisa', 'Karelly', 'Karalynn', 'Keyleen', 'Kendallyn', 'Graceyn', 'Falynn', 'Evoleht', 'Everlie', 'Emri', 'Hartlee', 'Eleena', 'Jailee', 'Insiya', 'Analysia', 'Chalee', 'Amzie', 'Amilya', 'Celisa', 'Airabella', 'Laketha', 'Kyoko', 'Saria', 'Neli', 'Melonee', 'Neidy', 'Nyanza', 'Aizlynn', 'Arthurine', 'Mikhaela', 'Adalae', 'Parveen', 'Lotoya', 'Evanjelina', 'Deborra', 'Lunna', 'Makylah', 'Mckinleigh', 'Mayalen', 'Ladasia', 'Javia', 'Evian', 'Jaelee', 'Oluwatamilore', 'Payzlee', 'Reiley', 'Samarra', 'Chyler', 'Areona', 'Vanesha', 'Tomisha', 'Betzaira', 'Dalana', 'Destenie', 'Brennah', 'Cassidie', 'Deziray', 'Dimond', 'Braeleigh', 'Aylee', 'Anastyn', 'Amillia', 'Jailyne', 'Jissell', 'Jailenne', 'Inioluwa', 'Jensyn', 'Allia', 'Evolett', 'Emmalynne', 'Emberlee', 'Emaline', 'Ellayna', 'Kollins', 'Keyly', 'Livi', 'Judeen', 'Eleah', 'Vonceil', 'Kaaliyah', 'Girtie', 'Gianelle', 'Iniya', 'Harlynn', 'Greidy', 'Shayli', 'Belina', 'Auri', 'Avangeline', 'Alizey', 'Arlynn', 'Anelise', 'Aneli', 'Delmira', 'Vanassa', 'Ceana', 'Ambre', 'Florita', 'Balbina', 'Clova', 'Danice', 'Aydee', 'Carlena', 'Benicia', 'Soumya', 'Lissandra', 'Ling', 'Liahna', 'Leonna', 'Leilana', 'Reeya', 'Krisinda', 'Maleiah', 'Maiyah', 'Mailin', 'Lucciana', 'Naydeen', 'Nailani', 'Miette', 'Yeva', 'Suley', 'Shravya', 'Kyia', 'Shree', 'Cerise', 'Katriana', 'Jaskiran', 'Mone', 'Latijera', 'Rosicela', 'Sidnee', 'Rosisela', 'Troi', 'Victorya', 'Creasie', 'Latorsha', 'Erienne', 'Jovonna', 'Jessia', 'Jeny', 'Dejia', 'Destynie', 'Barbi', 'Marlinda', 'Shakeitha', 'Mistelle', 'Ziona', 'Zarahi', 'Xiadani', 'Zyrah', 'Zoriah', 'Pamla', 'Cinamon', 'Bernardette', 'Makensie', 'Lexani', 'Miyana', 'Costella', 'Cliffie', 'Lashune', 'Windie', 'Rhondalyn', 'Avonelle', 'Marcine', 'Berneda', 'Rosabelle', 'Huldah', 'Emagene', 'Clarabell', 'Marceil', 'Ula', 'Renika', 'Shaterica', 'Labrittany', 'Zelia', 'Aidy', 'Abeeha', 'Maebelle', 'Farzona', 'Bryelle', 'Aphrodite', 'Diyora', 'Zilphia', 'Ercell', 'Starlynn', 'Renad', 'Reham', 'Marwah', 'Raaina', 'Mehreen', 'Chermaine', 'Ameliah', 'Hajra', 'Anamika', 'Caoimhe', 'Tasheka', 'Cladie', 'Claretta', 'Ratzy', 'Parizoda', 'Tzurty', 'Simrah', 'Miamor', 'Mala', 'Yittel', 'Ranata', 'Clellie', 'Dewana', 'Kenyada', 'Sennie', 'Estie', 'Oprah', 'Chessie', 'Rumaisa', 'Rosmery', 'Shenell', 'Cosima', 'Ellyanna', 'Hebe', 'Aamira', 'Beily', 'Areesha', 'Amilah', 'Mahdiya', 'Ramata', 'Naava', 'Cannie', 'Dorraine', 'Verlee', 'Anija', 'Garnita', 'Lorenda', 'Mikia', 'Marvella', 'Sharma', 'Pamula', 'Anmarie', 'Valicia', 'Collene', 'Ronetta', 'Floris', 'Andora', 'Berdina', 'Ivadell', 'Lorain', 'Kevinisha', 'Corielle', 'Rinda', 'Jodelle', 'Arta', 'Kalima', 'Kalifa', 'Liat', 'Dashawna', 'Jahnae', 'Eylin', 'Tahmina', 'Sherin', 'Niambi', 'Tonjua', 'Hanifah', 'Maham', 'Sokhna', 'Carliss', 'Nimra', 'Quianna', 'Shadai', 'Renella', 'Eliska', 'Alima', 'Agata', 'Adenike', 'Charizma', 'Shirlean', 'Joycelin', 'Cyanne', 'Ambika', 'Albana', 'Noshin', 'Merve', 'Sanjida', 'Khiabet', 'Maudrey', 'Manuella', 'Linder', 'Bisma', 'Shataya', 'Shandel', 'Samanthamarie', 'Liron', 'Liann', 'Merdis', 'Daquana', 'Chanee', 'Ezora', 'Janiqua', 'Jamielyn', 'Kyesha', 'Eulalie', 'Montressa', 'Alzina', 'Monez', 'Casmira', 'Eileene', 'Ethelmae', 'Veneta', 'Madiha', 'Akeema', 'Daneisha', 'Cecely', 'Gwendola', 'Javonna', 'Teshia', 'Yaniris', 'Valbona', 'Corita', 'Deshanna', 'Kameka', 'Armina', 'Georgian', 'Shakeera', 'Saudia', 'Stacyann', 'Shenique', 'Ura', 'Felicie', 'Ezola', 'Janeece', 'Chavely', 'Ashling', 'Nakea', 'Shiana', 'Shadasia', 'Petronella', 'Virgin', 'Gunhild', 'Brianni', 'Grainne', 'Aneisha', 'Chaniece', 'Zalika', 'Tynasia', 'Tashauna', 'Shazia', 'Shatiqua', 'Sharissa', 'Shanyce', 'Shandell', 'Shakeyla', 'Vergia', 'Geraldyne', 'Dorita', 'Nathasha', 'Samanthajo', 'Amela', 'Afnan', 'Halimah', 'Dayatra', 'Shontrell', 'Tziry', 'Shanyah', 'Shawntell', 'Schwanda', 'Magalene', 'Si', 'Ramisa', 'Ioanna', 'Imane', 'Hadar', 'Ettel', 'Coumba', 'Chumy', 'Shiran', 'Lianny', 'Kimara', 'Nicha', 'Chestine', 'Fatmata', 'Chedva', 'Shaima', 'Shailyn', 'Zarin', 'Zahrah', 'Wania', 'Tsering', 'Syrai', 'Suriyah', 'No', 'Niylah', 'Meerab', 'Emanuela', 'Draizy', 'Giabella', 'Jeily', 'Sofya', 'Shantrelle', 'Analisse', 'Ramatoulaye', 'Raima', 'Sumaiyah', 'Stori', 'Tremeka', 'Beila', 'Clodagh', 'Lyniah', 'Giavana', 'Tikisha', 'Kesia', 'Shawan', 'Mazelle', 'Lear', 'Rosilyn', 'Jnaya', 'Jahnia', 'Shi', 'Henya', 'Jhoselyn', 'Doha', 'Dilara', 'Adelisa', 'Dedria', 'Troylynn', 'Basha', 'Fatimata', 'Ama', 'Ashantee', 'Chania', 'Donzella', 'Ya', 'Fahmida', 'Iysis', 'Neviah', 'Anastasiya', 'Brandel', 'Afra', 'Lendora', 'Zisel', 'Dwanda', 'Ciarah', 'Brighid', 'Rafia', 'Keamber', 'Virdie', 'Girtrude', 'Nakaya', 'Donis', 'Anslei', 'Alyene', 'Audell', 'Nahriah', 'Zakeria', 'Zoria', 'Nikeria', 'Kynley', 'Karaline', 'Jacquita', 'Shonteria', 'Carlyon', 'Tykira', 'Nykerria', 'Lema', 'Destyne', 'Kansas', 'Aryonna', 'Iyannah', 'Jamayah', 'Serenitee', 'Jood', 'Willean', 'Makyah', 'Kameria', 'Shelagh', 'Zarriah', 'Avionna', 'Arilynn', 'Vira', 'Lelar', 'Miyonna', 'Jaionna', 'Nakiah', 'Rubby', 'Henrene', 'Perlie', 'Tanyah', 'Luretha', 'Fannye', 'Arquilla', 'Albirta', 'Annakate', 'Akeria', 'Teola', 'Darthy', 'Amberleigh', 'Floriene', 'Alleyne', 'Karra', 'Shaneika', 'Nekita', 'Niketa', 'Kiaraliz', 'Anacarolina', 'Sharonica', 'Renota', 'Shambrica', 'Mylea', 'Jalicia', 'Shantavious', 'Antania', 'Derika', 'Rashunda', 'Shandrea', 'Teneisha', 'Wachovia', 'Jalecia', 'Leimomi', 'Lasondra', 'Tela', 'Caleah', 'Iwalani', 'Jamyri', 'Azyria', 'Napua', 'Lahela', 'Lehuanani', 'Lameka', 'Davelyn', 'Filippa', 'Tywanna', 'Toini', 'Pota', 'Berthe', 'Aliesha', 'Iolanda', 'Seaira', 'Kealohilani', 'Leialoha', 'Chastidy', 'Taimane', 'Taylorann', 'Briunna', 'Tyrielle', 'Alohilani', 'Jakala', 'Lakendria', 'Tiffinie', 'Laprecious', 'Kaylaann', 'Marigny', 'Roise', 'Kaidance', 'Niyla', 'Mahari', 'Zya', 'Ruthia', 'Timara', 'Caniya', 'Keirah', 'Arieonna', 'Alydia', 'Alivea', 'Ahmani', 'Elynn', 'Earnstine', 'Ramiya', 'Morrigan', 'Masiyah', 'Harmoney', 'Pearley', 'Jearlean', 'Korrine', 'Chyanna', 'Catena', 'Pacita', 'Kalle', 'Alzira', 'Tashayla', 'Tsugie', 'Yachiyo', 'Shellia', 'Sueno', 'Kazuyo', 'Kikumi', 'Shizuka', 'Chiyono', 'Shigeno', 'Tatsue', 'Fumiyo', 'Misayo', 'Momoyo', 'Hanayo', 'Misae', 'Dalaney', 'Dewanda', 'Itsuko', 'Nyamal', 'Claris', 'Virlee', 'Lulabelle', 'Valada', 'Neleigh', 'Rafelita', 'Placida', 'Dulcinea', 'Pita', 'Heer', 'Beren', 'Ramoncita', 'Orlinda', 'Florette', 'Deluvina', 'Lugarda', 'Crucita', 'Rafaelita', 'Pablita', 'Lamaria', 'Terriana', 'Terrianna', 'Dariyah', 'Carmie', 'Clotine', 'Antha', 'Takyla', 'Peachie', 'Akirah', 'Captola', 'Sadeel', 'Dosha', 'Miquela', 'Anilah', 'Erielle', 'Janiylah', 'Aubriel', 'Priti', 'Purvi', 'Shakeemah', 'Anjail', 'Shaheerah', 'Amneris', 'Melverine', 'Twilla', 'Kruti', 'Jalee', 'Shareefah', 'Muslimah', 'Tauheedah', 'Anabela', 'Yakima', 'Lyllian', 'Tanajah', 'Sakiyah', 'Eun', 'Yashika', 'Ji', 'Demiana', 'Mariaeduard', 'Snigdha', 'Dala', 'Kum', 'Myung', 'Hadiyah', 'Gopi', 'Cresta', 'In', 'Davita', 'Talayeh', 'Tracyann', 'Petula', 'Nerida', 'Jeaneen', 'Ilissa', 'Letta', 'Kishia', 'Gesenia', 'Bethsaida', 'Tanija', 'Ivelise', 'Marines', 'Angenette', 'Alanda', 'Lauraann', 'Darnetta', 'Alisande', 'Jeniya', 'Patria', 'Tieysha', 'Tasheen', 'Ife', 'Loredana', 'Amyjo', 'Chane', 'Nilka', 'Sharema', 'Grazia', 'Renna', 'Tahesha', 'Tarita', 'Jannis', 'Geriann', 'Areatha', 'Rosangel', 'Kemba', 'Noni', 'Margaretann', 'Kimberleigh', 'Latisa', 'Kiriaki', 'Bobbyjo', 'Walida', 'Lynanne', 'Niyanna', 'Daziah', 'Kharma', 'Pier', 'Marymargaret', 'Lorrain', 'Ketty', 'Helane', 'Tarnisha', 'Sherrice', 'Swati', 'Donnajean', 'Tunya', 'Annmargaret', 'Raffaella', 'Pina', 'Deneene', 'Lorriane', 'Shenise', 'Ziyonna', 'Evagelia', 'Chantae', 'Tasheema', 'Meaghen', 'Shanikqua', 'Lynnox', 'Taiesha', 'Sharima', 'Shantai', 'Shaena', 'Jamine', 'Rasheena', 'Tashi', 'Magdala', 'Edia', 'Lasheka', 'Tiasha', 'Quanita', 'Jomayra', 'Nairoby', 'Danamarie', 'Roena', 'Zasha', 'Shatema', 'Orissa', 'Elvire', 'Louisiana', 'Hoda', 'Kashana', 'Jaquanna', 'Jacqulin', 'Annamari', 'Marquia', 'Elmire', 'Viney', 'Sonna', 'Yokasta', 'Esma', 'Rella', 'Deloras', 'Janill', 'Samanthan', 'Ketsia', 'Chaunte', 'Aderonke', 'Sheindel', 'Shameen', 'Karema', 'Amalin', 'Glendaliz', 'Finesse', 'Talibah', 'Lakima', 'Geeta', 'Suehay', 'Dorice', 'Aesha', 'Lateasha', 'Kimitra', 'Omolola', 'Bobbette', 'Deliah', 'Carianne', 'Chanah', 'Laquandra', 'Laquanna', 'Yanick', 'Nathifa', 'Nakima', 'Gayl', 'Shamaine', 'Saquana', 'Nixzaliz', 'Chaye', 'Maleka', 'Latima', 'Yamira', 'Tashanna', 'Kathiria', 'Jameika', 'Jamesetta', 'Moniqua', 'Yamaris', 'Tasheba', 'Virgina', 'Aviance', 'Calogera', 'Candita', 'Kinga', 'Alissia', 'Onnolee', 'Johnda', 'Sebastiana', 'Michelena', 'Tecla', 'Mirriam', 'Sydel', 'Glema', 'Tatiyanna', 'Patrycia', 'Fortuna', 'Ebba', 'Carmelia', 'Liddie', 'Genella', 'Detta', 'Malvery', 'Evelene', 'Loretto', 'Nunziata', 'Jenan', 'Keshawna', 'Kinisha', 'Tikia', 'Sueanne', 'Cira', 'Charda', 'Midge', 'Annina', 'Delcina', 'Barbette', 'Danah', 'Isolina', 'Tanita', 'Gracemarie', 'Halleigh', 'Julita', 'Kaprice', 'Dorothyann', 'Binnie', 'Bettyjean', 'Frayda', 'Tashiana', 'Breshey', 'Charnise', 'Tashena', 'Meribeth', 'Sandralee', 'Heena', 'Walda', 'Latika', 'Rashaunda', 'Linde', 'Rosaleen', 'Illona', 'Clydette', 'Benay', 'Damonica', 'Anajah', 'Louelle', 'Lunette', 'Faduma', 'Nadeige', 'Meylin', 'Elverna', 'Etrulia', 'Ellaree', 'Rushie', 'Jayona', 'Mauri', 'Radiah', 'Runette', 'Terrah', 'Joia', 'Ezma', 'Glenys', 'Ramla', 'Shatasha', 'Berma', 'Chanteria', 'Chantrell', 'Elvi', 'Sharnell', 'Rether', 'Keshana', 'Ranesha', 'Earther', 'Zahirah', 'Anye', 'Khori', 'Saniyyah', 'Teniola', 'Anniemae', 'Oluwadamilola', 'Aldene', 'Amellia', 'Junice', 'Carolene', 'Ireoluwa', 'Nasra', 'Vernease', 'Delrose', 'Marysue', 'Mirlande', 'Lashannon', 'Taijah', 'Markiesha', 'Syanne', 'Jahiya', 'Vyonne', 'Reniya', 'Ryana', 'Idonia', 'Loette', 'Etheleen', 'Ariyon', 'Jeneane', 'Jamea', 'Airyana', 'Natesha', 'Bonnell', 'Savilla', 'Daneshia', 'Deneshia', 'Alexzandrea', 'Martharee', 'Elfreda', 'Danyla', 'Retaj', 'Childnotnamed', 'Kariana', 'Ladeja', 'Johnesha', 'Nariya', 'Zamariah', 'Shanyla', 'Zykiria', 'Micaella', 'Angeliyah', 'Camara', 'Kenniyah', 'Keyani', 'Renie', 'Aldena', 'Paytyn', 'Perma', 'Annamary', 'Roniyah', 'Zeniya', 'Capitola', 'Jaiana', 'Lakiya', 'Reida', 'Ahniya', 'Elanor', 'Dorothee', 'Joud', 'Ludmilla', 'Traniyah', 'Kjerstin', 'Jeylin', 'Teona', 'Marypat', 'Jacquelynne', 'Harmonii', 'Kenyah', 'Anora', 'Deniyah', 'Tyleah', 'Samora', 'Almeter', 'Floride', 'Lether', 'Aviah', 'Livie', 'Federica', 'Khalani', 'Dericka', 'Ronisue', 'Raziah', 'Emaya', 'Christyana', 'Rasheka', 'Jahira', 'Jalana', 'Lateria', 'Baneen', 'Davisha', 'Joyanna', 'Janelys', 'Raneisha', 'Israa', 'Shauntavia', 'Shericka', 'Deloma', 'Maryetta', 'Jeannetta', 'Tymber', 'Charmon', 'Lanise', 'Charlisa', 'Bloneva', 'Andrena', 'Katena', 'Latorria', 'Letoya', 'Quovadis', 'Lakeisa', 'Sihaam', 'Charo', 'Annaclara', 'Margretta', 'Nataki', 'Tyjae', 'Bahja', 'Shequila', 'Quadira', 'Toinette', 'Sumeya', 'Takita', 'Sherlonda', 'Daejah', 'Zyanna', 'Antonique', 'Linnae', 'Georgean', 'Charlane', 'Jakerria', 'Nimo', 'Saprina', 'Detrice', 'Nicolly', 'Nayara', 'Seandra', 'Demetrica', 'Kayton', 'Jalayna', 'Emanuelly', 'Dondra', 'Michaeleen', 'Aquinnah', 'Lakrisha', 'Latoia', 'Bernessia', 'Jaydaliz', 'Deona', 'Donyelle', 'Kearsten', 'Tashira', 'Kaisa', 'Korrin', 'Onelia', 'Shawntia', 'Faylene', 'Nafeesah', 'Synetta', 'Robertine', 'Krystn', 'Nyjae', 'Nijae', 'Cieara', 'Ellerie', 'Thomasenia', 'Tiki', 'Lougenia', 'Joeann', 'Marlyss', 'Saralee', 'Dayona', 'Alainna', 'Gennell', 'Berline', 'Latoiya', 'Eyvonne', 'Cherline', 'Tequesta', 'Loann', 'Kerstyn', 'Najmo', 'Shanitra', 'Marnice', 'Tamyah', 'Ave', 'Cierrah', 'Deborahann', 'Davette', 'Kennidy', 'Breelle', 'Lundon', 'Imoni', 'Shamyah', 'Lindia', 'Caylyn', 'Ghadeer', 'Amirrah', 'Arlayne', 'Norrine', 'Vondell', 'Ruqaya', 'Azariya', 'Narice', 'Glenadine', 'Lallie', 'Conola', 'Airlie', 'Lorelie', 'Levis', 'Sanyia', 'Mckaela', 'Arlina', 'Dellar', 'Zorianna', 'Zanyiah', 'Maleya', 'Niyana', 'Amonie', 'Aryia', 'Autie', 'Keileigh', 'Kyndel', 'Saliyah', 'Naziah', 'Bernette', 'Vona', 'Venie', 'Tyashia', 'Khaliya', 'Mckensie', 'Kerigan', 'Kaniah', 'Eria', 'Maziyah', 'Kiasia', 'Anice', 'Dera', 'Georgena', 'Ezelle', 'Eavan', 'Marlyne', 'Lovella', 'Westonia', 'Keniah', 'Janiaya', 'Mertice', 'Marget', 'Zyeria', 'Marquerite', 'Minha', 'Redonna', 'Deetta', 'Aiyla', 'Majel', 'Elnor', 'Deronda', 'Viona', 'Rosaleigh', 'Virgiline', 'Reeda', 'Minnah', 'Keerthi', 'Kaleyah', 'Myanna', 'Remas', 'Noralee', 'Idabelle', 'Albena', 'Ellory', 'Areej', 'Zariel', 'Laverle', 'Hjordis', 'Hilja', 'Ragna', 'Cordella', 'Irean', 'Ottilia', 'Gerane', 'Locklyn', 'Equilla', 'Dellie', 'Aarvi', 'Mardella', 'Leighanna', 'Theone', 'Ordella', 'Lidwina', 'Alyda', 'Arlyss', 'Evangelita', 'Hee', 'Cherell', 'Charelle', 'Shealynn', 'Anesha', 'Jasman', 'Stephie', 'Ok', 'Tacarra', 'Sharnita', 'Jessic', 'Dulcey', 'Natina', 'Sharvae', 'Nachelle', 'Jillane', 'Tarri', 'Ajena', 'Allexus', 'Labrenda', 'Pammy', 'Shemeika', 'Ysela', 'Meghin', 'Marketta', 'Porshe', 'Kayti', 'Taylour', 'Shavonte', 'Aivah', 'Khloi', 'Jerzie', 'Nikesha', 'Cherron', 'Coralynn', 'Alvita', 'Carlita', 'Albany', 'Deshawnda', 'Lacole', 'Lameeka', 'Mashawn', 'Kimyata', 'Keenya', 'Baya', 'Kiva', 'Samona', 'Meggin', 'Chanita', 'Danissa', 'Lileigh', 'Addeline', 'Shemeeka', 'Aprille', 'Donice', 'Tannisha', 'Angelette', 'Lakeita', 'Marcelyn', 'Lesta', 'Claudene', 'Marney', 'Tonyia', 'Nellora', 'Kimyetta', 'Ameliana', 'Electa', 'Sherl', 'Jeniece', 'Jawana', 'Errica', 'Braya', 'Titania', 'Guydra', 'Valeta', 'Danetta', 'Sharia', 'Hawraa', 'Danaja', 'Makalynn', 'Tayonna', 'Kyrene', 'Arieona', 'Dallie', 'Ruie', 'Ophia', 'Odella', 'Vessie', 'Offie', 'Evadean', 'Ample', 'Aleecia', 'Shakyla', 'Makynna', 'Lakyra', 'Korryn', 'Araina', 'Semiyah', 'Ndea', 'Areonna', 'Jasia', 'Xavia', 'Merikay', 'Keshara', 'Jennetta', 'Vergene', 'Wilodean', 'Wyona', 'Avonell', 'Datha', 'Ellar', 'Morene', 'Laverda', 'Loetta', 'Emmogene', 'Arbadella', 'Camaria', 'Rochella', 'Indiya', 'Shayma', 'Orneta', 'Clotene', 'Genoa', 'Lanyah', 'Oneda', 'Glendola', 'Rosala', 'Zelphia', 'Suda', 'Jerrilynn', 'Orlena', 'Lorella', 'Bernadean', 'Novice', 'Pheba', 'Rukaya', 'Gathel', 'Meron', 'Asianae', 'Arriel', 'Whisper', 'Talesha', 'Morgann', 'Madissen', 'Dajanay', 'Karil', 'Sherrita', 'Chery', 'Lezlee', 'Daytona', 'Raegen', 'Dalal', 'Majerle', 'Lama', 'Daijanae', 'Celicia', 'Cheril', 'Cornesha', 'Aniza', 'Clytie', 'Persis', 'Aino', 'Lawandra', 'Deshonda', 'Catrena', 'Temekia', 'Camella', 'Arnetra', 'Latoyna', 'Tekisha', 'Nalee', 'Jennife', 'Daphanie', 'Shewanda', 'Cheronda', 'Latayna', 'Almerinda', 'Danene', 'Jadwiga', 'Ellora', 'Tanga', 'Tamekka', 'Lashond', 'Shinika', 'Khyleigh', 'Baelyn', 'Clarene', 'Monyette', 'Lakisa', 'Audreyanna', 'Malayjah', 'Keia', 'Lajessica', 'Marquite', 'Odessia', 'Marketia', 'Malayshia', 'Laconya', 'Brayla', 'Germani', 'Luberdie', 'Angla', 'Cona', 'Katrinia', 'Shaletha', 'Eutha', 'Elmyra', 'Cleva', 'Elnore', 'Vila', 'Evone', 'Margert', 'Pairlee', 'Bernelle', 'Diannie', 'Alinda', 'Emerine', 'Rogena', 'Genette', 'Jearline', 'Estalee', 'Bertina', 'Cassand', 'Kisa', 'Veronic', 'Idalina', 'Walsie', 'Gwendol', 'Orvilla', 'Latonga', 'Elizabe', 'Bernece', 'Charlen', 'Dola', 'Alaija', 'Martia', 'Shanica', 'Shariya', 'Yuliya', 'Atleigh', 'Flannery', 'Loeta', 'Zakiah', 'Alayia', 'Glee', 'Embree', 'Kasidy', 'Zacaria', 'Derriona', 'Jakyria', 'Kiauna', 'Garnelle', 'Tyriana', 'Juliya', 'Maddisen', 'Auna', 'Jameisha', 'Lurleen', 'Kourtlyn', 'Chelan', 'Verlinda', 'Sherria', 'Alzada', 'Ketara', 'Anaka', 'Breion', 'Shadestiny', 'Shanterica', 'Tenia', 'Keiosha', 'Jamyriah', 'Jamyrie', 'Jalacia', 'Ronita', 'Maryln', 'Earsie', 'Kyri', 'Markiyah', 'Malajah', 'Alandria', 'Shaquitta', 'Raymona', 'Paeton', 'Yaritzy', 'Jonesha', 'Anda', 'Khadjah', 'Amyree', 'Vernestine', 'Lavetta', 'Jniya', 'Shakiyah', 'Aasia', 'Roniya', 'Keleigh', 'Makalyn', 'Kadasia', 'Johneisha', 'Jakaya', 'Kinzey', 'Wendelyn', 'Darielys', 'Wyteria', 'Yarieliz', 'Taysia', 'Carmya', 'Erionna', 'Shameria', 'Kearia', 'Graycie', 'Jurnie', 'Calypso', 'Finlee', 'Fynlee', 'Sophee', 'Lorali', 'Shacoria', 'Kadeejah', 'Lakira', 'Kelsay', 'Angelys', 'Moeshia', 'Keundra', 'Mayara', 'Josi', 'Annaluiza', 'Jacquese', 'Jillaine', 'Annajulia', 'Nayeliz', 'Maire', 'Jamonica', 'Jadalys', 'Missie', 'Machell', 'Liisa', 'Jalaine', 'Odester', 'Veria', 'Virda', 'Arleene', 'Cigi', 'Eloda', 'Kacelyn', 'Cidalia', 'Vadie', 'Wydell', 'Donnita', 'Lousie', 'Oreatha', 'Berdine', 'Cielita', 'Lilas', 'Verneda', 'Armelia', 'Glender', 'Elizbeth', 'Vanella', 'Florean', 'Vyolet', 'Albertia', 'Albirda', 'Sylva', 'Lakresha', 'Matha', 'Nerine', 'Dezzie', 'Lodell', 'Rosielee', 'Julane', 'Lodena', 'Brookley', 'Kynadi', 'Krymson', 'Etoile', 'Meighan', 'Izella', 'Jakaria', 'Jaleria', 'Clister', 'Alberdia', 'Zykeriah', 'Mileigh', 'Isola', 'Mamye', 'Eller', 'Kamoria', 'Lakelynn', 'Aslean', 'Bular', 'Emmaclaire', 'Dasie', 'Denotra', 'Everlene', 'Lynleigh', 'Iantha', 'Quinetta', 'Lillion', 'Sophronia', 'Japonica', 'Beauty', 'Pearlina', 'Evella', 'Jatana', 'Kechia', 'Conswella', 'Malissia', 'Alexina', 'Demeka', 'Muguette', 'Vaudine', 'Aprill', 'Villa', 'Florece', 'Tonjia', 'Bethania', 'Makinlee', 'Latondra', 'Audery', 'Ericia', 'Miyoshi', 'Betti', 'Harlym', 'Novelle', 'Liller', 'Pinkey', 'Narcille', 'Lasheika', 'Leonise', 'Lydie', 'Olla', 'Rejeanne', 'Athelene', 'Eloyse', 'Edolia', 'Clotile', 'Ethelrine', 'Devonda', 'Nakeshia', 'Tomesha', 'Orena', 'Karlyne', 'Enolia', 'Faynell', 'Margia', 'Marvelene', 'Justilia', 'Iceola', 'Shantina', 'Shinita', 'Loula', 'Ireta', 'Vanessia', 'Ramonia', 'Monita', 'Shalva', 'Ong', 'Remonia', 'Sheral', 'Angelean', 'Phyllistine', 'Brenetta', 'Madgeline', 'Zyairah', 'Anjolaoluwa', 'Clotiel', 'Eldine', 'Tylia', 'Ifeoluwa', 'Florestine', 'Althia', 'Ravonda', 'Tsion', 'Zyaira', 'Wylodene', 'Janesha', 'Vonciel', 'Ruthey', 'Khiana', 'Kadesia', 'Murdis', 'Zhana', 'Jillayne', 'Quatisha', 'Jaquasia', 'Michaila', 'Mashayla', 'Travia', 'Tyrika', 'Aldah', 'Makaiya', 'Maridee', 'Kyndell', 'Nykira', 'Mazell', 'Luecile', 'Quatasia', 'Khala', 'Sible', 'Jakera', 'Ovella', 'Lealer', 'Juleen', 'Rinette', 'Laykin', 'Ozite', 'Shaquanta', 'Quanetta', 'Shannyn', 'Lacrystal', 'Everline', 'Editha', 'Toneka', 'Reinette', 'Maclovia', 'Ledia', 'Shakeeka', 'Shakeeta', 'Taquanna', 'Miyisha', 'Patrecia', 'Wylodean', 'Solita', 'Dalisa', 'Jatoya', 'Texanna', 'Yvetta', 'Lectoria', 'Cyntrell', 'Monyae', 'Ibtisam', 'Miski', 'Renesha', 'Maelle', 'Azhar', 'Zamzam', 'Jamera', 'Tyranika', 'Ladan', 'Ruweyda', 'Jabrea', 'Sherrica', 'Clyda', 'Treniece', 'Fonnie', 'Bedie', 'Kewanda', 'Mozel', 'Tramika', 'Quessie', 'Tyshay', 'Ladasha', 'Kaarin', 'Mazzie', 'Genora', 'Monie', 'Muntas', 'Hayat', 'Jovanda', 'Appolonia', 'Cuma', 'Briante', 'Reneisha', 'Zenovia', 'Allysia', 'Aliene', 'Raini', 'Tyja', 'Iriel', 'Deshante', 'Shatira', 'Demri', 'Ajaysia', 'Ireon', 'Idil', 'Nawaal', 'Riham', 'Nyeisha', 'Jonique', 'Keneisha', 'Ravan', 'Khadra', 'Dawanna', 'Gavriella', 'Myrene', 'Jasamine', 'Brione', 'Earlisha', 'Dazia', 'Jalesia', 'Cabrina', 'Marieme', 'Gloristine', 'Cattie', 'Damilola', 'Evora', 'Almarie', 'Vauda', 'Tanzie', 'Truby', 'Tayona', 'Francelia', 'Brona', 'Jannice', 'Weltha', 'Phylliss', 'Vieva', 'Danera', 'Saratha', 'Colinda', 'Suzonne', 'Shelene', 'Shelda', 'Annye', 'Kaola', 'Modine', 'Velvie', 'Vetra', 'Tyrhonda', 'Malissie', 'Shemica', 'Rockell', 'Adgie', 'Lachanda', 'Kwanza', 'Keyanta', 'Hazeleen', 'Yarnell', 'Mettie', 'Kissie', 'Jawanna', 'Ilham', 'Enchantra', 'Lucielle', 'Salmo', 'Sabrin', 'Nicy', 'Rubell', 'Willet', 'Ronata', 'Semiko', 'Idman', 'Meoshia', 'Maie', 'Eulala', 'Tiyonna', 'Sabarin', 'Merlie', 'Oneka', 'Khiya', 'Geralene', 'Hubbie', 'Patches', 'Robenia', 'Carita', 'Veleka', 'Tamla', 'Zondra', 'Cheramie', 'Nimco', 'Chauntelle', 'Calonia', 'Mulki', 'Clydia', 'Glida', 'Fartun', 'Fardowsa', 'Iyona', 'Dwanna', 'Angila', 'Carletha', 'Blakley', 'Valecia', 'Songa', 'Shya', 'Kamber', 'Siah', 'Sloka', 'Sophiagrace', 'Sophiamarie', 'Setayesh', 'Roselie', 'Samhitha', 'Savreen', 'Zanayah', 'Yilia', 'Zareena', 'Yeilin', 'Ulyana', 'Tylie', 'Vaani', 'Vasilisa', 'Videl', 'Xylia', 'Rubylee', 'Jessye', 'Itasca', 'Bonifacia', 'Bennye', 'Estellene', 'Daycee', 'Vung', 'Babe', 'Lucyle', 'Laurencia', 'Frankye', 'Clariece', 'Alsace', 'Ernesteen', 'Zuma', 'Loleta', 'Matiana', 'Thyra', 'Thekla', 'Miladie', 'Moselle', 'Waldene', 'Thula', 'Ethelda', 'Elbira', 'Eddye', 'Lafaye', 'Beryle', 'Beanna', 'Basilisa', 'Bernardina', 'Vontressa', 'Elner', 'Gladine', 'Saketha', 'Nellene', 'Margurette', 'Levada', 'Alcie', 'Beuna', 'Miaa', 'Miia', 'Miral', 'Lunabella', 'Manvi', 'Nahlia', 'Quetzal', 'Preet', 'Navreet', 'Prajna', 'Analayah', 'Aalaya', 'Aaleah', 'Aaria', 'Aby', 'Adeena', 'Adelaine', 'Adhara', 'Alekhya', 'Avaline', 'Avina', 'Azaliah', 'Azayla', 'Anwita', 'Arna', 'Asmi', 'Cutina', 'Jaydalynn', 'Jerusalem', 'Hiyab', 'Icey', 'Jaanvi', 'Khalessi', 'Khiara', 'Leelah', 'Ketzaly', 'Kaliyanei', 'Karolynn', 'Kaylonnie', 'Harveen', 'Danilynn', 'Decklyn', 'Deleyza', 'Charm', 'Calina', 'Cathaleya', 'Dailynn', 'Corra', 'Cyrene', 'Eveleen', 'Fia', 'Galina', 'Gohar', 'Gursirat', 'Harleyquinn', 'Evalin', 'Eevee', 'Eira', 'Elara', 'Ellaina', 'Ellarose', 'Erabella', 'Teofila', 'Calamity', 'Sherion', 'Niang', 'Oreta', 'Leita', 'Maedelle', 'Othello', 'Meshell', 'Alfreida', 'Detria', 'Cloda', 'Ermine', 'Gertrudes', 'Zudora', 'Benigna', 'Dolorez', 'Narcissa', 'Eduviges', 'Dionisia', 'Crisanta', 'Adreena', 'Aivy', 'Sharanda', 'Amma', 'Danitra', 'Lashuna', 'Yasheka', 'Sheronica', 'Ameliya', 'Cayetana', 'Benancia', 'Tiya', 'Umaiza', 'Vicktoria', 'Vidushi', 'Yaretzie', 'Siennah', 'Sofiah', 'Stuti', 'Taitum', 'Yuli', 'Zarya', 'Zeriah', 'Sadiee', 'Rubee', 'Ryenn', 'Sayana', 'Ezabella', 'Galya', 'Hayzel', 'Evalette', 'Eleanna', 'Elize', 'Elleana', 'Hiya', 'Jezabelle', 'Jazzy', 'Jeraldin', 'Jocabed', 'Kaloni', 'Jazmeen', 'Jasmarie', 'Ilani', 'Ilany', 'Ariannie', 'Angelinne', 'Delaynie', 'Calise', 'Bethlehem', 'Cateleya', 'Paitynn', 'Peytin', 'Rainie', 'Rhylin', 'Rosaly', 'Nomi', 'Mirai', 'Moksha', 'Mylin', 'Nazeli', 'Nilani', 'Marcelene', 'Victorina', 'Laiah', 'Leeyah', 'Miaisabella', 'Ravleen', 'Lazara', 'Zuleidy', 'Shraddha', 'Simarpreet', 'Rinoa', 'Ridhima', 'Ryla', 'Ryleeann', 'Ryli', 'Sahori', 'Smrithi', 'Yeslin', 'Yanessa', 'Zeltzin', 'Sonakshi', 'Sophea', 'Carlissa', 'Bryttani', 'Albesa', 'Bonnye', 'Daksha', 'Terria', 'Davinah', 'Enalina', 'Evolette', 'Dhwani', 'Eleora', 'Leea', 'Lexii', 'Meilyn', 'Nevah', 'Noga', 'Prabhleen', 'Quinley', 'Mursal', 'Naiara', 'Navah', 'Izumi', 'Janelli', 'Jniyah', 'Klaryssa', 'Kritika', 'Laksmi', 'Lalani', 'Joselle', 'Kashish', 'Kenyana', 'Laquishia', 'Deshonna', 'Sentoria', 'Ernestene', 'Maxima', 'Senovia', 'Nestora', 'Valta', 'Casady', 'Daphene', 'Chonita', 'Omelia', 'Odena', 'Melchora', 'Quetzally', 'Thera', 'Gabina', 'Donaciana', 'Riddhima', 'Lakessa', 'Lakeeta', 'Katasha', 'Chaitra', 'Chizara', 'Aveyah', 'Elah', 'Eliannah', 'Ellanore', 'Emmalia', 'Dalexa', 'Delara', 'Donatella', 'Aubreanna', 'Aberdeen', 'Aerilyn', 'Aleksia', 'Annarose', 'Anthea', 'Aoi', 'Amberrose', 'Anaeli', 'Lilou', 'Lumen', 'Manasvi', 'Lillybeth', 'Keylani', 'Lenya', 'Lidya', 'Mulan', 'Nirvi', 'Ondine', 'Meenakshi', 'Mathea', 'Melyna', 'Io', 'Izelle', 'Jailia', 'Eztli', 'Gali', 'Hade', 'Hafsah', 'Hannahgrace', 'Kayleah', 'Kayleeann', 'Kemily', 'Jeylah', 'Jiaqi', 'Sherrika', 'Daffney', 'Solstice', 'Soriah', 'Sumayya', 'Saory', 'Shaily', 'Shanzay', 'Sharvi', 'Xylina', 'Yeimy', 'Yizel', 'Zaidee', 'Ziah', 'Jesucita', 'Madalena', 'Vontresa', 'Tangee', 'Shekina', 'Sista', 'Norvis', 'Winnell', 'Yoshida', 'Nikiya', 'Vidala', 'Shandria', 'Rozelle', 'Maragret', 'Sixta', 'Theta', 'Wylma', 'Jobita', 'Gaudalupe', 'Lurlean', 'Oveta', 'Heriberta', 'Bacilia', 'Senorina', 'Denika', 'Akeisha', 'Tamecia', 'Jera', 'Crestina', 'Shwanda', 'Kelbie', 'Sanayah', 'Zaliah', 'Nadezhda', 'Maaliyah', 'Mahaley', 'Raziyah', 'Saraiya', 'Cyriah', 'Chaniyah', 'Emmarae', 'Eleen', 'Ashland', 'Briniyah', 'Ainhoa', 'Aviyah', 'Atarah', 'Lutrelle', 'Clevie', 'Blossie', 'Cola', 'Para', 'Verdelle', 'Beddie', 'Lilliemae', 'Jurell', 'Bertice', 'Fozie', 'Oppie', 'Rozia', 'Rozie', 'Epsie', 'Karman', 'Estoria', 'Dynesha', 'Sarae', 'Xolani', 'Talyah', 'Zanaria', 'Zamiah', 'Starkeisha', 'Alys', 'Izaria', 'Cayenne', 'Damiah', 'Alwilda', 'Leoda', 'Yariah', 'Tuleen', 'Rhelda', 'Carlesha', 'Alfretta', 'Orma', 'Ornella', 'Nazyia', 'Samorah', 'Keyonni', 'Jeriyah', 'Jazariyah', 'Demaria', 'Mikeyla', 'Malania', 'Miyanna', 'Neriyah', 'Naelle', 'Lazariah', 'Rea', 'Annaya', 'Aleanna', 'Baylin', 'Aela', 'Emmilyn', 'Anila', 'Rodnesha', 'Janeliz', 'Kseniya', 'Nyana', 'Zemirah', 'Somya', 'Yanna', 'Terryn', 'Naika', 'Laiyla', 'Lyrica', 'Loralie', 'Lilya', 'Wonnie', 'Runelle', 'Tynleigh', 'Loralye', 'Arynn', 'Melvis', 'Akiyah', 'Matline', 'Ellean', 'Wylean', 'Marfa', 'Elliemae', 'Nancey', 'Waltina', 'Ommie', 'Lonia', 'Reaver', 'Virdell', 'Rosabell', 'Sarahgrace', 'Faustine', 'Euretha', 'Sussie', 'Rebie', 'Oveline', 'Reathel', 'Algia', 'Mylissa', 'Rethel', 'Nakyla', 'Necia', 'Deanie', 'Beckey', 'Yasmen', 'Yukari', 'Zamyra', 'Roselinda', 'Takeko', 'Vicke', 'Mckala', 'Hanae', 'Elley', 'Ellyssa', 'Geanna', 'Geetika', 'Elenoa', 'Elane', 'Deeya', 'Deviny', 'Genecis', 'Jasminerose', 'Ireri', 'Hailei', 'Hannya', 'Harshini', 'Holiday', 'Arista', 'Dannae', 'Melayna', 'Meleni', 'Mystique', 'Nathalya', 'Natsumi', 'Sharlize', 'Shine', 'Sindhu', 'Starlyn', 'Sarika', 'Sarine', 'Seleen', 'Khalea', 'Kirti', 'Jocilyn', 'Maille', 'Mariaceleste', 'Leelee', 'Leidi', 'Libertad', 'Lizvet', 'Kierstan', 'Adja', 'Debbye', 'Dorenda', 'Kiyono', 'Katsuko', 'Katsue', 'Misue', 'Umeno', 'Rayvin', 'Sachie', 'Kinue', 'Danajah', 'Denay', 'Tsuneko', 'Tamae', 'Saeko', 'Tsutako', 'Sumako', 'Momoe', 'Tomoko', 'Asae', 'Nautika', 'Kourtnee', 'Keauna', 'Maydeen', 'Chianne', 'Macala', 'Briaunna', 'Ceirra', 'Kimberlea', 'Normalinda', 'Milinda', 'Jonetta', 'Seleta', 'Chryl', 'Aaminah', 'Mersades', 'Mickenzie', 'Tahlor', 'Kimetha', 'Hopie', 'Guadulupe', 'Blakelynn', 'Orfelinda', 'Aubre', 'Ajayla', 'Makenlee', 'Journii', 'Janayla', 'Talulah', 'Siddhi', 'Shaira', 'Yuridiana', 'Yulitza', 'Tulsi', 'Yatana', 'Jaleya', 'Ayrianna', 'Damaya', 'Myana', 'Lanyiah', 'Kadince', 'Aunna', 'Avrielle', 'Khyli', 'Kariyah', 'Bralynn', 'Derrianna', 'Maryella', 'Charlynn', 'Ilma', 'Tresea', 'Mersadies', 'Macenzie', 'Terriona', 'Telia', 'Tamryn', 'Tahari', 'Solyana', 'Lyrical', 'Akie', 'Teruyo', 'Shizuyo', 'Tsuruyo', 'Daviona', 'Marshelia', 'Connye', 'Marka', 'Adelmira', 'Dorelia', 'Nirel', 'Oceanna', 'Neeka', 'Sherolyn', 'Sheralyn', 'Sharlet', 'Milenka', 'Astha', 'Angeleena', 'Anysia', 'Apoorva', 'Bryanah', 'Carolyna', 'Cecy', 'Anadalay', 'Akaylah', 'Aika', 'Aasha', 'Ahniah', 'Adelayda', 'Kyaira', 'Manmeet', 'Linsy', 'Malini', 'Mairany', 'Haeley', 'Evelen', 'Jezel', 'Jinelle', 'Joleena', 'Hikari', 'Inari', 'Itcel', 'Lokelani', 'Keikilani', 'Sherilynn', 'Jamieann', 'Lajuanna', 'Roselind', 'Rhetta', 'Alysah', 'Ameyalli', 'Abigayl', 'Aizza', 'Alaiza', 'Aslyn', 'Anjalee', 'Annaliza', 'Antara', 'Areen', 'Carra', 'Katieann', 'Kimla', 'Xan', 'Mikiala', 'Chrissa', 'Belanna', 'Ankitha', 'Celestial', 'Chiana', 'Akhila', 'Alique', 'Alyssamae', 'Betheny', 'Stepheny', 'Brittanyann', 'Adonna', 'Barbarella', 'Shalamar', 'Flecia', 'Dlisa', 'Anabelia', 'Velen', 'Xotchil', 'Yairis', 'Lytzy', 'Faizah', 'Eilleen', 'Elona', 'Esteffany', 'Jesyka', 'Jhovana', 'Jisell', 'Joclyn', 'Teel', 'Sundee', 'Mechell', 'Lisia', 'Nandita', 'Natalina', 'Nattalie', 'Rosaelena', 'Siclali', 'Skyllar', 'Taeya', 'Sadey', 'Sadira', 'Sanae', 'Serenah', 'Shamila', 'Brizza', 'Chalisa', 'Shakeela', 'Gordean', 'Akane', 'Akansha', 'Angeni', 'Annalina', 'Anushree', 'Allexa', 'Katelynd', 'Raenette', 'Airiel', 'Matina', 'Teira', 'Deatra', 'Darolyn', 'Hilliary', 'Roanna', 'Prissy', 'Monya', 'Armelinda', 'Ginnie', 'Darenda', 'Leslea', 'Marcedes', 'Jeweliana', 'Jewelissa', 'Josselyne', 'Lavanya', 'Koryn', 'Khushpreet', 'Kierah', 'Cyana', 'Deeana', 'Bibianna', 'Bryannah', 'Heidie', 'Desteni', 'Elleanna', 'Sierah', 'Sumedha', 'Shantall', 'Yarissa', 'Yerania', 'Tifanny', 'Mehek', 'Mirely', 'Mitra', 'Mar', 'Rohini', 'Prerana', 'Naizeth', 'Naydeli', 'Melveen', 'Moani', 'Endora', 'Jackquline', 'Stefanny', 'Tamanna', 'Sofija', 'Zitlalic', 'Ymani', 'Jumana', 'Kailene', 'Josephyne', 'Leiya', 'Letzy', 'Litsy', 'Lizbett', 'Lizveth', 'Jaiya', 'Dreanna', 'Celestia', 'Electra', 'Sevanna', 'Sidnie', 'Semone', 'Sharra', 'Sharlette', 'Selinda', 'Saumya', 'Meilan', 'Melea', 'Maleeha', 'Mitali', 'Rheana', 'Ruchi', 'Oasis', 'Preethi', 'Aungelique', 'Kristl', 'Tashala', 'Darcell', 'Rolinda', 'Toye', 'Shirlyn', 'Yvonda', 'Tymia', 'Oteka', 'Ladora', 'Deashia', 'Janautica', 'Sonnet', 'Sucely', 'Suriah', 'Tallula', 'Sanna', 'Seniyah', 'Seri', 'Yexalen', 'Yumiko', 'Zayana', 'Zohal', 'Valerye', 'Yarisbeth', 'Vivyana', 'Xela', 'Brithanny', 'Jasira', 'Jenessy', 'Jezebelle', 'Leahna', 'Leilanee', 'Leily', 'Kohana', 'Dorsa', 'Elanna', 'Caralyn', 'Erilyn', 'Halyn', 'Helayna', 'Lionor', 'Maela', 'Masha', 'Myley', 'Malaak', 'Malai', 'Mariapaula', 'Nathalye', 'Remie', 'Parnika', 'Neveen', 'Cherith', 'Orvella', 'Aurion', 'Shonterria', 'Natoria', 'Shaterria', 'Clo', 'Donnia', 'Cana', 'Niaya', 'Brelyn', 'Aalliyah', 'Shaaron', 'Doylene', 'Lowanda', 'Henryetta', 'Obera', 'Marykathryn', 'Dema', 'Arcadia', 'Lodema', 'Aloni', 'Analya', 'Aashritha', 'Ayani', 'Audreena', 'Audrena', 'Ariahna', 'Antonela', 'Atzi', 'Amunet', 'Jaala', 'Keambria', 'Kanaya', 'Emya', 'Deijah', 'Dayjah', 'Tiye', 'Nyja', 'Markesia', 'Valla', 'Cesaria', 'Eusevia', 'Elpidia', 'Jaquisha', 'Romanita', 'Shauntia', 'Chasmine', 'Deneisha', 'Quatesha', 'Nicosha', 'Shandricka', 'Shambria', 'Shakerra', 'Santrice', 'Quinesha', 'Shantika', 'Roderica', 'Whitnie', 'Piedad', 'Koleta', 'Brazil', 'Aamina', 'Adaleen', 'Adyline', 'Bricola', 'Analeigha', 'Anara', 'Ladawna', 'Ruperta', 'Deaundra', 'Jaleisa', 'Keria', 'Sharaine', 'Shanekqua', 'Shanekia', 'Kenyanna', 'Jacoria', 'Airianna', 'Amana', 'Amariz', 'Ammi', 'Miaya', 'Aaniya', 'Anaisha', 'Bellina', 'Annasofia', 'Archita', 'Arianie', 'Shaquandra', 'Shakeyra', 'Tiandra', 'Soveida', 'Gonzala', 'Gaylia', 'Freddye', 'Roxi', 'Neya', 'Nitika', 'Noriah', 'Raha', 'Briah', 'Syrah', 'Talise', 'Tarynn', 'Tianah', 'Solay', 'Saraiah', 'Sherlynn', 'Leylany', 'Lilu', 'Maelie', 'Lexxie', 'Monzeratt', 'Nari', 'Naveyah', 'Mianna', 'Maylea', 'Mery', 'Marene', 'Zeba', 'Xymena', 'Yaremi', 'Yari', 'Yulie', 'Lile', 'Dafnee', 'Indra', 'Itzelle', 'Evangaline', 'Evelett', 'Evely', 'Ghazal', 'Arnisha', 'Kassia', 'Kayah', 'Kalliyan', 'Diannia', 'Damyah', 'Torianna', 'Talasia', 'Zakira', 'Zyah', 'Masiya', 'Rhyanna', 'Kemaya', 'Jadasia', 'Kanijah', 'Henleigh', 'Ciella', 'Dayanne', 'Ivannia', 'Heydy', 'Fergie', 'Fianna', 'Goretti', 'Gwynneth', 'Gyanna', 'Haidi', 'Christabella', 'Angelinah', 'Anina', 'Annya', 'Alejah', 'Bradie', 'Breanah', 'Arihana', 'Aryona', 'Ashwika', 'Aylet', 'Ayleth', 'Meleena', 'Micel', 'Misel', 'Naiema', 'Meiling', 'Malaia', 'Rehanna', 'Raengel', 'Padma', 'Majestic', 'Katelen', 'Jenaveve', 'Jennessy', 'Jewelisa', 'Joelie', 'Lyliana', 'Mahati', 'Sherral', 'Kamariah', 'Larsen', 'Khaniya', 'Jakiah', 'Darionna', 'Bristal', 'Ahlana', 'Aireanna', 'Alaila', 'Jarethzy', 'Orfalinda', 'Nataliah', 'Nayra', 'Nishika', 'Meeya', 'Sanaia', 'Sensi', 'Percilla', 'Pranathi', 'Kathrynn', 'Katriel', 'Jordanna', 'Jessilyn', 'Jilliana', 'Madeira', 'Laia', 'Leala', 'Courtlynn', 'Ahriana', 'Aliena', 'Adalay', 'Nakyia', 'Niema', 'Leeasia', 'Evenny', 'Dorismar', 'Dyanara', 'Elonna', 'Estreya', 'Ashmita', 'Anureet', 'Angeliah', 'Annaliz', 'Dallanara', 'Danaly', 'Carely', 'Sevilla', 'Aleigh', 'Allianna', 'Alamar', 'Jaiah', 'Shellsea', 'Sheylin', 'Sonoma', 'Hayla', 'Yoali', 'Yzabel', 'Zeenat', 'Zienna', 'Shirlynn', 'Shilynn', 'Raphaella', 'Makyia', 'Inola', 'Omaria', 'Michiah', 'Anareli', 'Anacamila', 'Anahis', 'Anapaola', 'Clowie', 'Brizia', 'Alexssa', 'Ailanie', 'Aileene', 'Francille', 'Jatoria', 'Jaquitta', 'Sybol', 'Landra', 'Danyela', 'Cubia', 'Arabela', 'Adelfina', 'Quaniya', 'Paulyne', 'Vanteen', 'Treba', 'Kaylena', 'Kaelynne', 'Kalanie', 'Lezli', 'Lithzy', 'Lanessa', 'Laylene', 'Leilaney', 'Emmajean', 'Francella', 'Eiliyah', 'Jadey', 'Jamilett', 'Ingris', 'Tayanna', 'Skarlette', 'Sady', 'Senia', 'Yakeline', 'Yenna', 'Yesmin', 'Meily', 'Mikeila', 'Miu', 'Rakel', 'Niveah', 'Nyemah', 'Gorgeous', 'Zaraya', 'Lavaeh', 'Meila', 'Labella', 'Lilyona', 'Zykierra', 'Orfa', 'Seriyah', 'Shivali', 'Sibylla', 'Sua', 'Ulani', 'Vianet', 'Yanell', 'Yolette', 'Yudany', 'Suheidy', 'Sukhpreet', 'Syanna', 'Tatevik', 'Tayde', 'Sameria', 'Mikiya', 'Claramae', 'Audine', 'Francile', 'Tynia', 'Goddess', 'Samoria', 'Llana', 'Oveda', 'Amelya', 'Auda', 'Disaya', 'Zanyah', 'Samiyyah', 'Jaianna', 'Ruqayyah', 'Nakira', 'Shamirah', 'Ta', 'Giani', 'Brya', 'Cyani', 'Ashiyah', 'Kahli', 'Beauton', 'Kashay', 'Sadiyah', 'Mikaya', 'Nasira', 'Nasirah', 'Ariauna', 'Yasirah', 'Skyelynn', 'Naailah', 'Nyelle', 'Adessa', 'Ayriana', 'Mirielle', 'Munirah', 'Layani', 'Haniyah', 'Ovida', 'Haniyyah', 'Layonna', 'Jazmarie', 'Wicahpi', 'Cante', 'Zamyah', 'Tanyiah', 'Shalita', 'Salley', 'Jnya', 'Santasia', 'Shaneque', 'Quantina', 'Temeika', 'Narvis', 'Pearlee', 'Nykesha', 'Orrie', 'Mozter', 'Earthalee', 'Rozena', 'Anniebell', 'Hannie', 'Pretto', 'Caro', 'Everlina', 'Arnetha', 'Glenora', 'Asalee', 'Parniece', 'Rubena', 'Wilhemena', 'Perline', 'Elloree', 'Clorine', 'Richardean', 'Rovena', 'Arthuree', 'Mikea', 'Charnice', 'Tylashia', 'Rebacca', 'Caretha', 'Dynasti', 'Marvie', 'Hermenia', 'Tekela', 'Trenace', 'Valetta', 'Topaz', 'Debara', 'Jaquasha', 'Markeria', 'Alkeria', 'Salwa', 'Tatayana', 'Dianelys', 'Beyounce', 'Drena', 'Julysa', 'Shuntel', 'Antasia', 'Alyze', 'Marytheresa', 'Raechelle', 'Trevia', 'Tomara', 'Jermeka', 'Curtisha', 'Kebrina', 'Kayte', 'Shakeila', 'Ronnesha', 'Shavontae', 'Taquila', 'Shaquia', 'Lynnann', 'Markevia', 'Terrilynn', 'Carime', 'Quaneshia', 'Shaylen', 'Corneisha', 'Rodneshia', 'Nateria', 'Marycatherine', 'Ashlyne', 'Reyne', 'Natia', 'Taquisha', 'Mikeshia', 'Khadeja', 'Lismary', 'Prisca', 'Antwonette', 'Anesia', 'Clotilda', 'Willavene', 'Lovey', 'Aleda', 'Karita', 'Rakiyah', 'Nyasiah', 'Timaya', 'Gabryelle', 'Caniyah', 'Ethelreda', 'Aryelle', 'Trianna', 'Yesli', 'Yareliz', 'Tanyla', 'Keyshia', 'Makinsey', 'Daily', 'Caylynn', 'Kalyse', 'Sarabelle', 'Araminta', 'Magdelene', 'Kristalyn', 'Lianni', 'Layana', 'Haedyn', 'Teyona', 'Taziyah', 'Ranijah', 'Darneisha', 'Jahzaria', 'Palmyra', 'Altheda', 'Armanii', 'Blodwyn', 'Colletta', 'Yelenis', 'Yazlyn', 'Leira', 'Anaysia', 'Anayiah', 'Valia', 'Bambina', 'Burnetta', 'Clarabel', 'Philomenia', 'Lorma', 'Janeka', 'Danaisha', 'Cayci', 'Jermia', 'Idalys', 'Sarajane', 'Shakenya', 'Kashanti', 'Lanika', 'Ceira', 'Deshanti', 'Adianez', 'Alannis', 'Lubov', 'Aylana', 'Nephtalie', 'Harlean', 'Shelvey', 'Yalissa', 'Asianna', 'Jahnyah', 'Jahliyah', 'Ellissa', 'Gabrianna', 'Katonya', 'Elsia', 'Ketina', 'Kateena', 'Claudean', 'Chenita', 'Belkys', 'Kerryn', 'Teria', 'Charron', 'Charnissa', 'Alura', 'Bashirah', 'Gerldine', 'Katilynn', 'Trellany', 'Lacheryl', 'Twalla', 'Sharnise', 'Yoland', 'Shanai', 'Ikia', 'Aquilla', 'Shalandra', 'Nekesha', 'Sonni', 'Kutana', 'Sharnay', 'Timitra', 'Shareena', 'Tyeesha', 'Natara', 'Amatullah', 'Nydirah', 'Shahadah', 'Inetha', 'Clatie', 'Ladye', 'Makalia', 'Sabriyah', 'Graple', 'Lorell', 'Vercie', 'Rayona', 'Dayshia', 'Nakirah', 'Mcneva', 'Bunia', 'Brooxie', 'Delcia', 'Naje', 'Eilish', 'Lashara', 'Crystall', 'Shearon', 'Kafi', 'Kea', 'Shantrel', 'Jeanni', 'Andreia', 'Myrlande', 'Jennifier', 'Damika', 'Carloyn', 'Lashera', 'Kamika', 'Chrisann', 'Lashavia', 'Ivis', 'Quinisha', 'Yanelys', 'Taralee', 'Ibis', 'Jazma', 'Shakevia', 'Deneane', 'Kimala', 'Casee', 'Audreana', 'Shahida', 'Latangela', 'Lashira', 'Lashawndra', 'Sherrina', 'Shawntrell', 'Latronda', 'Meghaan', 'Ayasha', 'Raushanah', 'Serrita', 'Tennile', 'Keyonda', 'Idalmis', 'Telicia', 'Takeia', 'Aristea', 'Letesha', 'Badia', 'Nykea', 'Bilan', 'Ieva', 'Kimmi', 'Geniel', 'Tamberly', 'Tammee', 'Sherma', 'Emira', 'Agena', 'Carrin', 'Ladean', 'Caera', 'Shatha', 'Utahna', 'Lujean', 'Joylyn', 'Kathren', 'Kristiane', 'Lenee', 'Angi', 'Vichelle', 'Rochele', 'Shonnie', 'Anastasija', 'Clea', 'Myrlene', 'Dniyah', 'Tashanti', 'Sireen', 'Vincie', 'Wreatha', 'Josphine', 'Casimera', 'Hildagarde', 'Margeret', 'Grettell', 'Greenley', 'Gloriana', 'Eyleen', 'Evaleigh', 'Davanee', 'Corley', 'Liliah', 'Leanah', 'Kynzlie', 'Kynzleigh', 'Kolette', 'Lively', 'Makenlie', 'Lochlyn', 'Kinslie', 'Jleigh', 'Jeslynn', 'Jenisis', 'Jenisha', 'Kensli', 'Addalie', 'Demia', 'Cele', 'Aderinsola', 'Auriella', 'Blyss', 'Cashlynn', 'Callyn', 'Allyzon', 'Aleiya', 'Alazne', 'Alayzia', 'Ailah', 'Annora', 'Analynn', 'Leonilda', 'Minnette', 'Onolee', 'Michaelina', 'Rosemond', 'Milica', 'Ednamae', 'Floribel', 'Nur', 'Ndia', 'Thecla', 'Immaculate', 'Mayfred', 'Selda', 'Vincenzia', 'Vitina', 'Tammatha', 'Joley', 'Kelene', 'Kriste', 'Liese', 'Mariaemilia', 'Lasaundra', 'Letica', 'Karene', 'Devera', 'Denyce', 'Dawnn', 'Maryum', 'Giovannina', 'Roze', 'Reygan', 'Quinlyn', 'Stassi', 'Meelah', 'Novaleigh', 'Navey', 'Mirakle', 'Naiovy', 'Munachiso', 'Montzerrat', 'Misk', 'Mireyah', 'Temiloluwa', 'Zaiya', 'Varshini', 'Tiwatope', 'Tinlee', 'Geneve', 'Kotryna', 'Janila', 'Janeah', 'Mollye', 'Dody', 'Doreena', 'Chelle', 'Javaeh', 'Dim', 'Jamylah', 'Kamyia', 'Ramie', 'Kandie', 'Kitt', 'Gaylyn', 'Marji', 'Laurena', 'Lorre', 'Ronelle', 'Kresta', 'Jonylah', 'Kornelia', 'Mindie', 'Kendis', 'Dorri', 'Seaneen', 'Lorilyn', 'Lolly', 'Pati', 'Shalayne', 'Dorise', 'Joani', 'Yailene', 'Batool', 'Cyntha', 'Coni', 'Kae', 'Cynia', 'Rhonna', 'Lynnetta', 'Terrisa', 'Nishi', 'Delise', 'Ladena', 'Bronwen', 'Tere', 'Tippi', 'Peggi', 'Portland', 'Sherrin', 'Tacy', 'Terie', 'Dore', 'Daphane', 'Juliene', 'Kamile', 'Janeil', 'Megin', 'Shenandoah', 'Rashada', 'Disa', 'Elita', 'Kelee', 'Genee', 'Taneya', 'Storie', 'Sheza', 'Rielyn', 'Venicia', 'Zamyria', 'Yisell', 'Appollonia', 'Meryle', 'Frann', 'Lucyann', 'Clarivel', 'Marguarite', 'Nelsa', 'Reanetta', 'Roshaunda', 'Channie', 'Bathsheba', 'Jannessa', 'Jakaylah', 'Jesalyn', 'Ellyson', 'Hally', 'Haelyn', 'Gabbie', 'Emmerie', 'Makailyn', 'Maddi', 'Lirio', 'Lexee', 'Matalyn', 'Kenzee', 'Kenlei', 'Kaydi', 'Kynlei', 'Krissa', 'Adalin', 'Alayiah', 'Ellice', 'Caydee', 'Annalysa', 'Anisty', 'Abeni', 'Aliha', 'Aerith', 'Adrie', 'Peggyann', 'Pietrina', 'Amberlie', 'Dabria', 'Cylee', 'Amyriah', 'Ambry', 'Berkleigh', 'Azula', 'Zaryiah', 'Zanyia', 'Gerardine', 'Joycelynn', 'Jeslin', 'Kenzli', 'Keisi', 'Kayelynn', 'Jaselyn', 'Mckinnley', 'Maryse', 'Peightyn', 'Latausha', 'Lety', 'Tekia', 'Arasely', 'Arlynne', 'Noell', 'Patrcia', 'Morning', 'Meika', 'Tanda', 'Terasa', 'Tika', 'Roshon', 'Marlaine', 'Stephaie', 'Franne', 'Ewa', 'Tomeca', 'Chequita', 'Dierdra', 'Doriann', 'Tammika', 'Jeananne', 'Cythia', 'Laconda', 'Catiria', 'Migna', 'Latiesha', 'Sharin', 'Tekesha', 'Elga', 'Barbarajean', 'Ilena', 'Evett', 'Timiko', 'Kachina', 'Desere', 'Galadriel', 'Lynea', 'Laurajean', 'Rukiya', 'Sakara', 'Snezana', 'Tashonda', 'Orquidea', 'Myshia', 'Latrease', 'Monquie', 'Robina', 'Vesna', 'Faline', 'Glori', 'Jennel', 'Keyatta', 'Dimitria', 'Uzma', 'Lalia', 'Krystiana', 'Kaedynce', 'Juany', 'Kesley', 'Kennedee', 'Keeleigh', 'Paiten', 'Neelah', 'Naylee', 'Sairy', 'Rocsi', 'Mckenzey', 'Modesty', 'Abbiegail', 'Jasalyn', 'Genises', 'Emmory', 'Elisea', 'Dlaney', 'Haelee', 'Jadence', 'Audryana', 'Carizma', 'Josanne', 'Nashira', 'Meesha', 'Taneil', 'Sobeida', 'Zakyra', 'Syndee', 'Zipora', 'Amita', 'Bridie', 'Hilde', 'Aspasia', 'Yalexi', 'Tenleigh', 'Anjannette', 'Zniyah', 'Zayley', 'Kyerra', 'Lynnsey', 'Dashae', 'Jasha', 'Anjenette', 'Lelania', 'Mija', 'Lorrene', 'Shanyn', 'Shindana', 'Shamra', 'Dove', 'Drina', 'Caralee', 'Charmian', 'Katrine', 'Lagina', 'Jahna', 'Nesita', 'Teriana', 'Dajae', 'Kyiah', 'Keslyn', 'Kayelee', 'Kamberlyn', 'Raygen', 'Orchid', 'Maleigh', 'Mairim', 'Amily', 'Ameli', 'Alie', 'Adelai', 'Eniola', 'Enaya', 'Brealynn', 'Blakleigh', 'Ayelene', 'Camrie', 'Dianely', 'Delayne', 'Cortlyn', 'Jaylei', 'Jaycelynn', 'Jaleigha', 'Iviana', 'Kaedance', 'Jewelz', 'Jillianna', 'Faithlyn', 'Isabeau', 'Irany', 'Galiana', 'Makynzee', 'Maebry', 'Merit', 'Mckinzee', 'Kinzee', 'Kendrah', 'Laityn', 'Amberlin', 'Ahliyah', 'Raphaela', 'Ameri', 'Brecklynn', 'Cristabel', 'Annalucia', 'Avri', 'Averly', 'Shalia', 'Sheilla', 'Dejana', 'Tonnette', 'Tracia', 'Trese', 'Lalanya', 'Kristiann', 'Zunaira', 'Zinachidi', 'Xayla', 'Zaybree', 'Zanae', 'Xoey', 'Sirenity', 'Renesme', 'Raeley', 'Preslyn', 'Nyx', 'Nyelli', 'Rozalynn', 'Safaa', 'Abaigeal', 'Perle', 'Ersilia', 'Ethlyn', 'Dashanae', 'Dajana', 'Tahja', 'Shavona', 'Vernisha', 'Sunya', 'Zenorah', 'Dorota', 'Ramsha', 'Nirali', 'Najia', 'Maryclaire', 'Ismay', 'Alfonsina', 'Letizia', 'Lotta', 'Honore', 'Jamille', 'Kashe', 'Bonnielee', 'Lorelle', 'Gloriajean', 'Trenae', 'Tonesha', 'Maxene', 'Aliz', 'Annelyse', 'Avagrace', 'Adanelly', 'Dariella', 'Colbi', 'Tema', 'Marlea', 'Elleen', 'Veroncia', 'Shelina', 'Sundae', 'Jericca', 'Liduvina', 'Jenney', 'Pascha', 'Roshell', 'Marlies', 'Marny', 'Judithann', 'Nancylee', 'Freyda', 'Joyceann', 'Caroleann', 'Desirie', 'Christol', 'Shulamith', 'Marlise', 'Rocquel', 'Tamsen', 'Sukari', 'Tinna', 'Magdelena', 'Ruba', 'Patra', 'Erryn', 'Buffi', 'Chantil', 'Kerensa', 'Annastacia', 'Zailee', 'Lamika', 'Kashlynn', 'Jaedynn', 'Kaly', 'Paisyn', 'Seraiah', 'Mckenzye', 'Nhyla', 'Chandrika', 'Dawana', 'Elesha', 'Caryle', 'Karrin', 'Valency', 'Kianga', 'Shawndee', 'Tamasha', 'Rhodora', 'Shivangi', 'Vermont', 'Diasia', 'Aniyyah', 'Azhane', 'Katleyn', 'Tynetta', 'Negan', 'Marilyne', 'Leronia', 'Charmie', 'Lateefa', 'Hassanah', 'Louvinia', 'Shirly', 'Sanjna', 'Andelyn', 'Jaima', 'Aftyn', 'Atira', 'Weslie', 'Tayzlee', 'Rossi', 'Nayvie', 'Livvy', 'Brinklee', 'Drinda', 'Nazirah', 'Krithika', 'Taisley', 'Starlee', 'Bijal', 'Hiral', 'Gwynn', 'Orlene', 'Maurene', 'Sweta', 'Naasia', 'Luvinia', 'Sayoko', 'Geannie', 'Rupal', 'Zerlina', 'Nobu', 'Taeko', 'Miye', 'Carnation', 'Joplin', 'Yayeko', 'Sakaye', 'Ernell', 'Tazuko', 'Bayyinah', 'Konstantina', 'Danuta', 'Cariann', 'Charnette', 'Michiye', 'Tejal', 'Shaheedah', 'Zakkiyya', 'Latoyah', 'Audre', 'Tayeko', 'Qadriyyah', 'Nikema', 'Wadeeah', 'Quanika', 'Fareeda', 'Ivelis', 'Karigan', 'Yayoi', 'Tauni', 'Shailee', 'Ronnah', 'Roseana', 'Rosalita', 'Orlidia', 'Mckall', 'Seattle', 'Lauree', 'Georgi', 'Jacolyn', 'Meichele', 'Starlet', 'Shandee', 'Miquelle', 'Cathe', 'Nondas', 'Roben', 'Manette', 'Monzelle', 'Genieve', 'Rumaysa', 'Dariya', 'Brynnleigh', 'Vicci', 'Sharli', 'Chandi', 'Guadelupe', 'Jamilyn', 'Willadene', 'Centhia', 'Cheryal', 'Normalee', 'Wilmajean', 'Roanne', 'Dyane', 'Jinx', 'Jorene', 'Ceceilia', 'Arikka', 'Latanza', 'Lacinda', 'Rus', 'Sangeeta', 'Demita', 'Jerene', 'Marcellina', 'Zani', 'Izzabelle', 'Graycee', 'Sajada', 'Quinlee', 'Brooklee', 'Shulamis', 'Bunnie', 'Michaelyn', 'Dhruvi', 'Sreeja', 'Tzipa', 'Doreene', 'Bedelia', 'Eutimia', 'Tomacita', 'Jerra', 'Rosela', 'Ignacita', 'Conferina', 'Andreita', 'Lugardita', 'Estefanita', 'Suetta', 'Debbe', 'Amadita', 'Mardel', 'Mliss', 'Korla', 'Felipita', 'Erminda', 'Chrys', 'Karthika', 'Guilianna', 'Chasya', 'Bryndee', 'Taeler', 'Sinforosa', 'Brinnley', 'Aviya', 'Jayma', 'Zimal', 'Vivia', 'Arielis', 'Arshiya', 'Adiba', 'Afreen', 'Ajooni', 'Alianny', 'Fariza', 'Breina', 'Sila', 'Aaima', 'Amesha', 'Antigone', 'Kayse', 'Aurelie', 'Marianny', 'Naba', 'Salimata', 'Retal', 'Pema', 'Pesha', 'Reemas', 'Emunah', 'Farzeen', 'Safina', 'Sema', 'Seynabou', 'Roza', 'Romaisa', 'Yehudit', 'Tzivi', 'Tzivy', 'Zahro', 'Jeylen', 'Klea', 'Namirah', 'Lamiah', 'Mahjabeen', 'Daielle', 'Ogechi', 'Laresha', 'Laqueta', 'Anayla', 'Bashy', 'Naeema', 'Sarrinah', 'Sevinch', 'Frimmy', 'Hibba', 'Fajr', 'Rayhona', 'Rokia', 'Wafa', 'Britne', 'Crystalann', 'Reah', 'Maggi', 'Lenae', 'Kambra', 'Tabita', 'Tamlyn', 'Thuytien', 'Titianna', 'Trenisha', 'Yuan', 'Yarithza', 'Yarixa', 'Satin', 'Elizeth', 'Gabiela', 'Jackline', 'Janisa', 'Graviela', 'Gudalupe', 'Hena', 'Bryanda', 'Avilene', 'Ayerim', 'Breiana', 'Nicoleanne', 'Merisa', 'Relina', 'Rebecah', 'Rachyl', 'Kasaundra', 'Katryn', 'Jeaneth', 'Jenah', 'Jocely', 'Jorgina', 'Lindsee', 'Lizvette', 'Oleen', 'Waveline', 'Laurabelle', 'Charma', 'Gleneva', 'Yesika', 'Felina', 'Nguyet', 'Krissie', 'Silvina', 'Stephanny', 'Teera', 'Kristol', 'Karisha', 'Lorisa', 'Iracema', 'Temesha', 'Tamber', 'Shelisa', 'Roshana', 'Rheannon', 'Amala', 'Anabelen', 'Daizhane', 'Darbie', 'Dezaree', 'Dezhane', 'Carrina', 'Chessa', 'Christinejoy', 'Aliea', 'Adalhi', 'Alexandrina', 'Abrina', 'Madaleine', 'Maressa', 'Marki', 'Koryna', 'Lilibet', 'Mystic', 'Neyra', 'Ivonna', 'Jenalyn', 'Truc', 'Berneta', 'Quinci', 'Rachelanne', 'Raylina', 'Nykole', 'Stephaney', 'Seleni', 'Marvene', 'Melizza', 'Aimme', 'Anaissa', 'Anhelica', 'Celyna', 'Azalie', 'Bereniz', 'Meliss', 'Leanza', 'Lenina', 'Karrina', 'Kalynne', 'Kanwal', 'Kazzandra', 'Mandalyn', 'Limairy', 'Lizzete', 'Lyly', 'Coua', 'Icsel', 'Izamary', 'Lakindra', 'Rosezella', 'Wilhelmine', 'Clela', 'Marvelle', 'Jenafer', 'Katye', 'Eliabeth', 'Angelicamaria', 'Adrieanna', 'Caludia', 'Caycee', 'Chenay', 'Cherika', 'Arpine', 'Kimberlyanne', 'Jully', 'Jyoti', 'Mariha', 'Meganelizabeth', 'Melysa', 'Lashanay', 'Jericha', 'Eliset', 'Esmirna', 'Clarie', 'Conny', 'Derrisha', 'Frania', 'Jeena', 'Gresia', 'Hlee', 'Emanie', 'Liany', 'Aisatou', 'Ashya', 'Nefertari', 'Nyanna', 'Mariem', 'Michellee', 'Amenda', 'Markella', 'Kiyara', 'Issamar', 'Cecilee', 'Rehana', 'Nube', 'Simy', 'Laneshia', 'Vasthi', 'Treanna', 'Tria', 'Tuongvi', 'Brany', 'Niza', 'Shandale', 'Shanley', 'Shastina', 'Sheyna', 'Ronniesha', 'Rubit', 'Ruvi', 'Siobhain', 'Shauntal', 'Linzie', 'Linzi', 'Fatimatou', 'Efrat', 'Jasmely', 'Kadidia', 'Kamily', 'Meirav', 'Areebah', 'Fatim', 'Nuzhat', 'Saribel', 'Zorah', 'Ting', 'Laporscha', 'Mieshia', 'Vanecia', 'Brittne', 'Denetria', 'Deamber', 'Cymone', 'Arieal', 'Araly', 'Shamieka', 'Deshay', 'Britainy', 'Matraca', 'Krystyne', 'Kristela', 'Kindell', 'Ceyda', 'Jahnasia', 'Halimatou', 'Graciana', 'Haja', 'Safiatou', 'Su', 'Zaineb', 'Yianna', 'Shilat', 'Zanai', 'Zeinabou', 'Jalysa', 'Garcia', 'Jinna', 'Brytni', 'Crystalmarie', 'Kyrstie', 'Labrea', 'Laurita', 'Kathleena', 'Salimatou', 'Martisha', 'Damisha', 'Londin', 'Toree', 'Yadria', 'Yaminah', 'Nili', 'Pella', 'Menna', 'Minah', 'Porshay', 'Rahwa', 'Parissa', 'Nury', 'Sheeva', 'Sendi', 'Aroush', 'Jerlyn', 'Momina', 'Nylia', 'Mahreen', 'Mattingly', 'Emanuella', 'Ceylin', 'Biana', 'Ishrat', 'Genendy', 'Hindel', 'Chavi', 'Freidy', 'Rouguiatou', 'Osnas', 'Yagmur', 'Yitel', 'Hudy', 'Jamielynn', 'Valyncia', 'Cheyla', 'Assa', 'Tasmia', 'Yaslene', 'Zaima', 'Jenisse', 'Juliannah', 'Reveca', 'Amra', 'Anaria', 'Arlenis', 'Anastassia', 'Anique', 'Arilene', 'Adileni', 'Chelcy', 'Chelesa', 'Columba', 'Corri', 'Briane', 'Carine', 'Deziah', 'Jojo', 'Jaidalyn', 'Cecelie', 'Meagon', 'Raysha', 'Mylinh', 'Madelena', 'Saniyya', 'Shama', 'Shifa', 'Nyala', 'Lafaun', 'Ronnetta', 'Rondia', 'Christe', 'Tynnetta', 'Ethyle', 'Bobi', 'Rayetta', 'Wilmina', 'Tangala', 'Chloris', 'Marvyl', 'Larinda', 'Narcedalia', 'Tiaa', 'Terressa', 'Missi', 'Ardythe', 'Briget', 'Julya', 'Emilyanne', 'Ayano', 'Eliane', 'Tatem', 'Roselani', 'Zareen', 'Yaxeni', 'Marleena', 'Nicolemarie', 'Patzy', 'Morgana', 'Mirca', 'Mystica', 'Rosaicela', 'Rosaysela', 'Serrena', 'Shiori', 'Yannely', 'Threasa', 'Zohra', 'Lanitra', 'Laquinthia', 'Deshundra', 'Mirasol', 'Lladira', 'Tejuana', 'Michaelann', 'Normajean', 'Leasha', 'Kajuana', 'Xianna', 'Yaquelyn', 'Marcea', 'Mohini', 'Jaysha', 'Saysha', 'Makamae', 'Lynnett', 'Mistee', 'Kaysee', 'Lizel', 'Kiora', 'Kla', 'Lanay', 'Kainani', 'Pomaikai', 'Piilani', 'Aulii', 'Khristi', 'Delfa', 'Toka', 'Satonya', 'Jammi', 'Iolani', 'Hinaea', 'Ilihia', 'Kulia', 'Darcus', 'Raejean', 'Brisamar', 'Francessca', 'Dhamar', 'Lehiwa', 'Ajane', 'Alexsys', 'Jema', 'Imara', 'Itzanami', 'Ivori', 'Tabby', 'Charnell', 'Vanessamarie', 'Vibiana', 'Kameisha', 'Edica', 'Shanetra', 'Shametria', 'Quinette', 'Abreanna', 'Corazon', 'Correna', 'Lilac', 'Najwa', 'Moranda', 'Monik', 'Deise', 'Edid', 'Karinne', 'Ilsa', 'Irazema', 'Pegge', 'Chenique', 'Temisha', 'Cristella', 'Christle', 'Falan', 'Mekesha', 'Jonquil', 'Latarya', 'Maretta', 'Sonceria', 'Latamara', 'Ladina', 'Rozann', 'Suz', 'Aleja', 'Wray', 'Indica', 'Harkiran', 'Gemini', 'Erikah', 'Fey', 'Gudelia', 'Komalpreet', 'Anah', 'Angelicamarie', 'Cammi', 'Dejane', 'Dejanay', 'Cilicia', 'Merla', 'Janann', 'Maurita', 'Aireana', 'Shuronda', 'Shunte', 'Lacrisha', 'Kwana', 'Krisi', 'Kaysi', 'Latressa', 'Tyronza', 'Debralee', 'Crissie', 'Crissa', 'Jameca', 'Alicha', 'Ketra', 'Chrisie', 'Delecia', 'Rokisha', 'Natoshia', 'Shajuana', 'Jenipher', 'Jenefer', 'Anjanae', 'Azita', 'Clairissa', 'Brezhane', 'Keera', 'Siarah', 'Smita', 'Savonna', 'Raquelin', 'Lorren', 'Omunique', 'Molina', 'Nixaliz', 'Melitza', 'Shylo', 'Teniqua', 'Charmine', 'Deonne', 'Kima', 'Galit', 'Ikesha', 'Jamala', 'Cherl', 'Ageliki', 'Ydania', 'Kortlyn', 'Lisvet', 'Khya', 'Kearstyn', 'Seline', 'Stormey', 'Rehma', 'Mckynna', 'Brynnan', 'Abiola', 'Ambriel', 'Akaysha', 'Hailea', 'Fryda', 'Fedra', 'Dacie', 'Deissy', 'Deyna', 'Mayling', 'Tessy', 'Yaa', 'Shameca', 'Shivon', 'Taesha', 'Dinamarie', 'Ifeoma', 'Ashlye', 'Patriciajo', 'Danute', 'Amalyn', 'Nakeia', 'Takima', 'Shavonn', 'Katira', 'Lakema', 'Jahaida', 'Marshelle', 'Angeliki', 'Carrianne', 'Carrieanne', 'Tarika', 'Sherece', 'Kalimah', 'Kinda', 'Sadiga', 'Paraskevi', 'Ayianna', 'Alezay', 'Cadynce', 'Haely', 'Heavenleigh', 'Dajanique', 'Lasharn', 'Drita', 'Genene', 'Gittle', 'Carriann', 'Emerita', 'Jenniferann', 'Kammie', 'Bryony', 'Rupinder', 'Tenise', 'Yazmyn', 'Maricris', 'Rhianon', 'Nicolet', 'Mui', 'Nacy', 'Naoko', 'Gaila', 'Charene', 'Bas', 'Geni', 'Lorez', 'Taneeka', 'Tanikqua', 'Tulani', 'Sotiria', 'Sheeba', 'Katiuscia', 'Eleftheria', 'Ghislaine', 'Jamiylah', 'Omotayo', 'Yuleidy', 'Tylene', 'Zanetta', 'Yizza', 'Ngan', 'Natassha', 'Sophear', 'Starkisha', 'Stehanie', 'Jasie', 'Aprile', 'Billiejean', 'Wilnelia', 'Yaasmiyn', 'Ednita', 'Engracia', 'Grisell', 'Christinamarie', 'Eftihia', 'Jenniefer', 'Chantee', 'Afua', 'Shamea', 'Shamina', 'Vickiana', 'Sharoya', 'Shateema', 'Aubrea', 'Alexcis', 'Wallis', 'Jalyne', 'Harlea', 'Carisia', 'Cheynne', 'Daylee', 'Kyera', 'Latayvia', 'Raashida', 'Saajida', 'Nakema', 'Annalyssa', 'Chivonne', 'Lyndie', 'Sabrian', 'Rahcel', 'Hoai', 'Krisann', 'Jilliane', 'Saide', 'Matti', 'Raigen', 'Tenea', 'Staphanie', 'Zitlally', 'Yudelca', 'Raysa', 'Monea', 'Shanigua', 'Shirah', 'Chemise', 'Jajaira', 'Tunisha', 'Yelissa', 'Yudelka', 'Taria', 'Taralynn', 'Condol', 'Nikima', 'Syrianna', 'Anndrea', 'Charae', 'Ebelia', 'Comfort', 'Denishia', 'Lanyia', 'Lahna', 'Iraima', 'Josaline', 'Onyinyechi', 'Mykalah', 'Shamyia', 'Sarely', 'Makaylie', 'Madasyn', 'Carron', 'Shawnetta', 'Dorca', 'Subrena', 'Romanda', 'Sallyanne', 'Ahniyah', 'Annalissa', 'Anikah', 'Anet', 'Emelee', 'Branae', 'Rosemaria', 'Kimerly', 'Lorra', 'Breda', 'Graceanne', 'Kathyann', 'Letetia', 'Allaina', 'Anaceli', 'Brendalee', 'Aidaly', 'Arlana', 'Trinetta', 'Tennesha', 'Talonda', 'Sherrilynn', 'Maloree', 'Laiya', 'Kynlea', 'Ludwika', 'Raeli', 'Yadirah', 'Yveth', 'Sabrie', 'Dannielynn', 'Breely', 'Jozlin', 'Jewelyssa', 'Keylie', 'Jazzalyn', 'Ijeoma', 'Jaydie', 'Irianna', 'Ronya', 'Lynee', 'Myrian', 'Cristalle', 'Delinah', 'Arnetia', 'Guisela', 'Orna', 'Samehesha', 'Scherrie', 'Marylynne', 'Judianne', 'Tomasina', 'Sanora', 'Cheray', 'Gordana', 'Torina', 'Yolandra', 'Tyese', 'Sharine', 'Marea', 'Areti', 'Sharmila', 'Charrise', 'Cyndia', 'Cinzia', 'Gecenia', 'Tarshia', 'Luwanda', 'Negar', 'Sharah', 'Sherah', 'Sokha', 'Marium', 'Taslin', 'Taleyah', 'Parys', 'Odeth', 'Mirabai', 'Myree', 'Tyhesha', 'Soyini', 'Liria', 'Jenille', 'Marivic', 'Mey', 'Adrena', 'Cristyn', 'Jodette', 'Ilea', 'Jennett', 'Latoi', 'Charrisse', 'Correne', 'Reannon', 'Shanah', 'Shavaun', 'Shelena', 'Macrina', 'Lashonna', 'Tecia', 'Zobeida', 'Casilda', 'Ketsy', 'Lizza', 'Lucesita', 'Anelis', 'Amori', 'Atlantis', 'Aslynn', 'Kimbery', 'Yolunda', 'Pasqua', 'Magalis', 'Yanellie', 'Tryniti', 'Tniya', 'Ziza', 'Nadina', 'Lloana', 'Shoshannah', 'Tamarie', 'Ronique', 'Keatyn', 'Matison', 'Micalah', 'Nataya', 'Mama', 'Bailea', 'Sidrah', 'Jazzman', 'Deanndra', 'Shawniece', 'Polett', 'Rathana', 'Timisha', 'Tristina', 'Vanezza', 'Shiri', 'Stephanieann', 'Genessy', 'Hema', 'Huma', 'Alessandria', 'Yarisa', 'Oyindamola', 'Tianni', 'Monasia', 'Kely', 'Khady', 'Pegah', 'Casarah', 'Cassara', 'Chalise', 'Arti', 'Natanya', 'Masuma', 'Shellyann', 'Taje', 'Saher', 'Kelsye', 'Odaly', 'Talicia', 'Mollee', 'Tashea', 'Shima', 'Janaia', 'Jenia', 'Jharline', 'Chabely', 'Chalon', 'Charnesha', 'Christna', 'Melika', 'Melis', 'Lesleyann', 'Maleeka', 'Krystalyn', 'Krystalynn', 'Marnisha', 'Mariele', 'Michelleann', 'Melessa', 'Diasy', 'Dioselina', 'Jenita', 'Jaynae', 'Jeanae', 'Hripsime', 'Janete', 'Lanique', 'Ashlon', 'Aroosa', 'Enisa', 'Danaysha', 'Briani', 'Arjeta', 'Sapir', 'Naysha', 'Kharisma', 'Laterra', 'Yannet', 'Aruna', 'Anaja', 'Fahima', 'Dasmine', 'Amberlea', 'Latiera', 'Kimanh', 'Mayuri', 'Meshelle', 'Morgane', 'Nahal', 'Mariacristina', 'Marlisha', 'Elaura', 'Kacia', 'Neesha', 'Tila', 'Waynisha', 'Witney', 'Niloofar', 'Solina', 'Soo', 'Stphanie', 'Shanesha', 'Sharrell', 'Nene', 'Bleona', 'Hudes', 'Isatu', 'Aylssa', 'Camerina', 'Arrielle', 'Allycia', 'Anacecilia', 'Anairis', 'Courney', 'Dashanique', 'Cedrina', 'Celida', 'Taaliyah', 'Clarrissa', 'Egla', 'Duyen', 'Kendle', 'Janil', 'Adeola', 'Jazmene', 'Leesha', 'Lyzeth', 'Madeley', 'Khrystyna', 'Charisa', 'Crystelle', 'Carinna', 'Channy', 'Flory', 'Glenisha', 'Sheida', 'Naara', 'Nassim', 'Ngozi', 'Nidya', 'Marche', 'Mariaesther', 'Shaleena', 'Kioni', 'Nayab', 'Nzinga', 'Fizza', 'Diavion', 'Zanib', 'Tionni', 'Temitope', 'Nasreen', 'Melaysia', 'Maame', 'Sameen', 'Azka', 'Basma', 'Virjean', 'Jarmila', 'Louren', 'Mckenize', 'Malyn', 'Mercadies', 'Vika', 'Suong', 'Mariadel', 'Mariatheresa', 'Marison', 'Meleane', 'Shabana', 'Salote', 'Raquell', 'Rekha', 'Sibel', 'Shavaughn', 'Shaquoia', 'Shatera', 'Fatina', 'Jestina', 'Latasia', 'Geraldin', 'Shirleymae', 'Lubna', 'Maxiel', 'Naquasha', 'Dalissa', 'Chaniqua', 'Chanele', 'Jahlisa', 'Faatimah', 'Abagayle', 'Adwoa', 'Angeliqu', 'Gelisa', 'Bradi', 'Shantice', 'Sharece', 'Nyiesha', 'Yanill', 'Yocasta', 'Stepheni', 'Suleika', 'Takeema', 'Kerrilyn', 'Jamiyla', 'Josephin', 'Margarit', 'Ilaisaane', 'Jamilee', 'Corvette', 'Janitza', 'Lexey', 'Jazzmyne', 'Kirstan', 'Kattia', 'Yatzary', 'Pricsilla', 'Gisette', 'Panayiota', 'Pinar', 'Rasheida', 'Tiffay', 'Venisha', 'Jennier', 'Margulia', 'Katima', 'Anjoli', 'Evelise', 'Chetara', 'Jaquelynn', 'Pessie', 'Quintessa', 'Orit', 'Nelissa', 'Shekia', 'Sherrise', 'Abbye', 'Imagine', 'Britlyn', 'Baley', 'Tanequa', 'Tanique', 'Nocole', 'Sokhom', 'Krystelle', 'Marqui', 'Mariaangelica', 'Raiven', 'Nini', 'Lesliee', 'Crystalee', 'Amadi', 'Suzett', 'Thelda', 'Wladyslawa', 'Shaqueen', 'Shayra', 'Domingue', 'Garine', 'Johnanna', 'Karia', 'Jany', 'Ardele', 'Bilma', 'Lindita', 'Lisbel', 'Lyasia', 'Kianie', 'Saidah', 'Niasha', 'Chantele', 'Brette', 'Cydnie', 'Chealsea', 'Jaritsa', 'Hanaa', 'Jordain', 'Kerria', 'Shannara', 'Shaquna', 'Sultana', 'Tajana', 'Taquasha', 'Queenasia', 'Wandalee', 'Mikalyn', 'Jossette', 'Jazsmine', 'Keairra', 'Arleny', 'Selest', 'Sabryn', 'Jilliann', 'Janin', 'Kayliegh', 'Alyss', 'Asuka', 'Chenin', 'Eiliana', 'Fahm', 'Cyndle', 'Daniesha', 'Saranda', 'Shany', 'Veridiana', 'Yanai', 'Melanieann', 'Mishell', 'Mariadelosangel', 'Rupa', 'Orabelle', 'Taquasia', 'Tyquasia', 'Cecillia', 'Jeanet', 'Lucely', 'Kar', 'Niaja', 'Naquana', 'Joanny', 'Anjelique', 'Aquasia', 'Ardita', 'Jatasia', 'Donika', 'Fantasha', 'Dominiqua', 'Elecia', 'Deyra', 'Erial', 'Bayle', 'Ninoska', 'Jonee', 'Jullisa', 'Lavasia', 'Laniqua', ]; export const maxStringLength = 15; ================================================ FILE: drizzle-seed/src/datasets/jobsTitles.ts ================================================ /** * The original source for the job titles data was taken from https://www.kaggle.com/datasets/ravindrasinghrana/job-description-dataset */ export default [ 'Digital marketing specialist', 'Web developer', 'Operations manager', 'Network engineer', 'Event manager', 'Software tester', 'Teacher', 'Ux/ui designer', 'Wedding planner', 'Qa analyst', 'Litigation attorney', 'Mechanical engineer', 'Network administrator', 'Account manager', 'Brand manager', 'Social worker', 'Social media coordinator', 'Email marketing specialist', 'Hr generalist', 'Legal assistant', 'Nurse practitioner', 'Account director', 'Software engineer', 'Purchasing agent', 'Sales consultant', 'Civil engineer', 'Network security specialist', 'Ui developer', 'Financial planner', 'Event planner', 'Psychologist', 'Electrical designer', 'Data analyst', 'Technical writer', 'Tax consultant', 'Account executive', 'Systems administrator', 'Database administrator', 'Research analyst', 'Data entry clerk', 'Registered nurse', 'Investment analyst', 'Speech therapist', 'Sales manager', 'Landscape architect', 'Key account manager', 'Ux researcher', 'Investment banker', 'It support specialist', 'Art director', 'Software developer', 'Project manager', 'Customer service manager', 'Procurement manager', 'Substance abuse counselor', 'Supply chain analyst', 'Data engineer', 'Accountant', 'Sales representative', 'Environmental consultant', 'Electrical engineer', 'Systems engineer', 'Art teacher', 'Human resources manager', 'Inventory analyst', 'Legal counsel', 'Database developer', 'Procurement specialist', 'Systems analyst', 'Copywriter', 'Content writer', 'Hr coordinator', 'Business development manager', 'Java developer', 'Supply chain manager', 'Event coordinator', 'Family nurse practitioner', 'Front-end engineer', 'Customer success manager', 'Procurement coordinator', 'Urban planner', 'Architectural designer', 'Financial analyst', 'Environmental engineer', 'Back-end developer', 'Structural engineer', 'Market research analyst', 'Customer service representative', 'Customer support specialist', 'Business analyst', 'Social media manager', 'Family lawyer', 'Chemical analyst', 'Network technician', 'Interior designer', 'Software architect', 'Nurse manager', 'Veterinarian', 'Process engineer', 'It manager', 'Quality assurance analyst', 'Pharmaceutical sales representative', 'Office manager', 'Architect', 'Physician assistant', 'Marketing director', 'Front-end developer', 'Research scientist', 'Executive assistant', 'Hr manager', 'Marketing manager', 'Public relations specialist', 'Financial controller', 'Investment advisor', 'Aerospace engineer', 'Marketing analyst', 'Paralegal', 'Landscape designer', 'Web designer', 'Occupational therapist', 'Legal advisor', 'Marketing coordinator', 'Dental hygienist', 'Sem specialist', 'Seo specialist', 'Pediatrician', 'Qa engineer', 'Data scientist', 'Financial advisor', 'Personal assistant', 'Seo analyst', 'Network analyst', 'Mechanical designer', 'Marketing specialist', 'Graphic designer', 'Finance manager', 'Physical therapist', 'Product designer', 'Administrative assistant', 'Brand ambassador', 'Project coordinator', 'Product manager', 'It administrator', 'Sales associate', 'Chemical engineer', 'Legal secretary', 'Market analyst', ]; export const maxStringLength = 35; ================================================ FILE: drizzle-seed/src/datasets/lastNames.ts ================================================ /** * The original source for last names data was taken from https://www.kaggle.com/datasets/fivethirtyeight/fivethirtyeight-most-common-name-dataset?resource=download&select=surnames.csv */ export default [ 'Smith', 'Johnson', 'Williams', 'Brown', 'Jones', 'Miller', 'Davis', 'Garcia', 'Rodriguez', 'Wilson', 'Martinez', 'Anderson', 'Taylor', 'Thomas', 'Hernandez', 'Moore', 'Martin', 'Jackson', 'Thompson', 'White', 'Lopez', 'Lee', 'Gonzalez', 'Harris', 'Clark', 'Lewis', 'Robinson', 'Walker', 'Perez', 'Hall', 'Young', 'Allen', 'Sanchez', 'Wright', 'King', 'Scott', 'Green', 'Baker', 'Adams', 'Nelson', 'Hill', 'Ramirez', 'Campbell', 'Mitchell', 'Roberts', 'Carter', 'Phillips', 'Evans', 'Turner', 'Torres', 'Parker', 'Collins', 'Edwards', 'Stewart', 'Flores', 'Morris', 'Nguyen', 'Murphy', 'Rivera', 'Cook', 'Rogers', 'Morgan', 'Peterson', 'Cooper', 'Reed', 'Bailey', 'Bell', 'Gomez', 'Kelly', 'Howard', 'Ward', 'Cox', 'Diaz', 'Richardson', 'Wood', 'Watson', 'Brooks', 'Bennett', 'Gray', 'James', 'Reyes', 'Cruz', 'Hughes', 'Price', 'Myers', 'Long', 'Foster', 'Sanders', 'Ross', 'Morales', 'Powell', 'Sullivan', 'Russell', 'Ortiz', 'Jenkins', 'Gutierrez', 'Perry', 'Butler', 'Barnes', 'Fisher', 'Henderson', 'Coleman', 'Simmons', 'Patterson', 'Jordan', 'Reynolds', 'Hamilton', 'Graham', 'Kim', 'Gonzales', 'Alexander', 'Ramos', 'Wallace', 'Griffin', 'West', 'Cole', 'Hayes', 'Chavez', 'Gibson', 'Bryant', 'Ellis', 'Stevens', 'Murray', 'Ford', 'Marshall', 'Owens', 'Mcdonald', 'Harrison', 'Ruiz', 'Kennedy', 'Wells', 'Alvarez', 'Woods', 'Mendoza', 'Castillo', 'Olson', 'Webb', 'Washington', 'Tucker', 'Freeman', 'Burns', 'Henry', 'Vasquez', 'Snyder', 'Simpson', 'Crawford', 'Jimenez', 'Porter', 'Mason', 'Shaw', 'Gordon', 'Wagner', 'Hunter', 'Romero', 'Hicks', 'Dixon', 'Hunt', 'Palmer', 'Robertson', 'Black', 'Holmes', 'Stone', 'Meyer', 'Boyd', 'Mills', 'Warren', 'Fox', 'Rose', 'Rice', 'Moreno', 'Schmidt', 'Patel', 'Ferguson', 'Nichols', 'Herrera', 'Medina', 'Ryan', 'Fernandez', 'Weaver', 'Daniels', 'Stephens', 'Gardner', 'Payne', 'Kelley', 'Dunn', 'Pierce', 'Arnold', 'Tran', 'Spencer', 'Peters', 'Hawkins', 'Grant', 'Hansen', 'Castro', 'Hoffman', 'Hart', 'Elliott', 'Cunningham', 'Knight', 'Bradley', 'Carroll', 'Hudson', 'Duncan', 'Armstrong', 'Berry', 'Andrews', 'Johnston', 'Ray', 'Lane', 'Riley', 'Carpenter', 'Perkins', 'Aguilar', 'Silva', 'Richards', 'Willis', 'Matthews', 'Chapman', 'Lawrence', 'Garza', 'Vargas', 'Watkins', 'Wheeler', 'Larson', 'Carlson', 'Harper', 'George', 'Greene', 'Burke', 'Guzman', 'Morrison', 'Munoz', 'Jacobs', 'Obrien', 'Lawson', 'Franklin', 'Lynch', 'Bishop', 'Carr', 'Salazar', 'Austin', 'Mendez', 'Gilbert', 'Jensen', 'Williamson', 'Montgomery', 'Harvey', 'Oliver', 'Howell', 'Dean', 'Hanson', 'Weber', 'Garrett', 'Sims', 'Burton', 'Fuller', 'Soto', 'Mccoy', 'Welch', 'Chen', 'Schultz', 'Walters', 'Reid', 'Fields', 'Walsh', 'Little', 'Fowler', 'Bowman', 'Davidson', 'May', 'Day', 'Schneider', 'Newman', 'Brewer', 'Lucas', 'Holland', 'Wong', 'Banks', 'Santos', 'Curtis', 'Pearson', 'Delgado', 'Valdez', 'Pena', 'Rios', 'Douglas', 'Sandoval', 'Barrett', 'Hopkins', 'Keller', 'Guerrero', 'Stanley', 'Bates', 'Alvarado', 'Beck', 'Ortega', 'Wade', 'Estrada', 'Contreras', 'Barnett', 'Caldwell', 'Santiago', 'Lambert', 'Powers', 'Chambers', 'Nunez', 'Craig', 'Leonard', 'Lowe', 'Rhodes', 'Byrd', 'Gregory', 'Shelton', 'Frazier', 'Becker', 'Maldonado', 'Fleming', 'Vega', 'Sutton', 'Cohen', 'Jennings', 'Parks', 'Mcdaniel', 'Watts', 'Barker', 'Norris', 'Vaughn', 'Vazquez', 'Holt', 'Schwartz', 'Steele', 'Benson', 'Neal', 'Dominguez', 'Horton', 'Terry', 'Wolfe', 'Hale', 'Lyons', 'Graves', 'Haynes', 'Miles', 'Park', 'Warner', 'Padilla', 'Bush', 'Thornton', 'Mccarthy', 'Mann', 'Zimmerman', 'Erickson', 'Fletcher', 'Mckinney', 'Page', 'Dawson', 'Joseph', 'Marquez', 'Reeves', 'Klein', 'Espinoza', 'Baldwin', 'Moran', 'Love', 'Robbins', 'Higgins', 'Ball', 'Cortez', 'Le', 'Griffith', 'Bowen', 'Sharp', 'Cummings', 'Ramsey', 'Hardy', 'Swanson', 'Barber', 'Acosta', 'Luna', 'Chandler', 'Blair', 'Daniel', 'Cross', 'Simon', 'Dennis', 'Oconnor', 'Quinn', 'Gross', 'Navarro', 'Moss', 'Fitzgerald', 'Doyle', 'Mclaughlin', 'Rojas', 'Rodgers', 'Stevenson', 'Singh', 'Yang', 'Figueroa', 'Harmon', 'Newton', 'Paul', 'Manning', 'Garner', 'Mcgee', 'Reese', 'Francis', 'Burgess', 'Adkins', 'Goodman', 'Curry', 'Brady', 'Christensen', 'Potter', 'Walton', 'Goodwin', 'Mullins', 'Molina', 'Webster', 'Fischer', 'Campos', 'Avila', 'Sherman', 'Todd', 'Chang', 'Blake', 'Malone', 'Wolf', 'Hodges', 'Juarez', 'Gill', 'Farmer', 'Hines', 'Gallagher', 'Duran', 'Hubbard', 'Cannon', 'Miranda', 'Wang', 'Saunders', 'Tate', 'Mack', 'Hammond', 'Carrillo', 'Townsend', 'Wise', 'Ingram', 'Barton', 'Mejia', 'Ayala', 'Schroeder', 'Hampton', 'Rowe', 'Parsons', 'Frank', 'Waters', 'Strickland', 'Osborne', 'Maxwell', 'Chan', 'Deleon', 'Norman', 'Harrington', 'Casey', 'Patton', 'Logan', 'Bowers', 'Mueller', 'Glover', 'Floyd', 'Hartman', 'Buchanan', 'Cobb', 'French', 'Kramer', 'Mccormick', 'Clarke', 'Tyler', 'Gibbs', 'Moody', 'Conner', 'Sparks', 'Mcguire', 'Leon', 'Bauer', 'Norton', 'Pope', 'Flynn', 'Hogan', 'Robles', 'Salinas', 'Yates', 'Lindsey', 'Lloyd', 'Marsh', 'Mcbride', 'Owen', 'Solis', 'Pham', 'Lang', 'Pratt', 'Lara', 'Brock', 'Ballard', 'Trujillo', 'Shaffer', 'Drake', 'Roman', 'Aguirre', 'Morton', 'Stokes', 'Lamb', 'Pacheco', 'Patrick', 'Cochran', 'Shepherd', 'Cain', 'Burnett', 'Hess', 'Li', 'Cervantes', 'Olsen', 'Briggs', 'Ochoa', 'Cabrera', 'Velasquez', 'Montoya', 'Roth', 'Meyers', 'Cardenas', 'Fuentes', 'Weiss', 'Hoover', 'Wilkins', 'Nicholson', 'Underwood', 'Short', 'Carson', 'Morrow', 'Colon', 'Holloway', 'Summers', 'Bryan', 'Petersen', 'Mckenzie', 'Serrano', 'Wilcox', 'Carey', 'Clayton', 'Poole', 'Calderon', 'Gallegos', 'Greer', 'Rivas', 'Guerra', 'Decker', 'Collier', 'Wall', 'Whitaker', 'Bass', 'Flowers', 'Davenport', 'Conley', 'Houston', 'Huff', 'Copeland', 'Hood', 'Monroe', 'Massey', 'Roberson', 'Combs', 'Franco', 'Larsen', 'Pittman', 'Randall', 'Skinner', 'Wilkinson', 'Kirby', 'Cameron', 'Bridges', 'Anthony', 'Richard', 'Kirk', 'Bruce', 'Singleton', 'Mathis', 'Bradford', 'Boone', 'Abbott', 'Charles', 'Allison', 'Sweeney', 'Atkinson', 'Horn', 'Jefferson', 'Rosales', 'York', 'Christian', 'Phelps', 'Farrell', 'Castaneda', 'Nash', 'Dickerson', 'Bond', 'Wyatt', 'Foley', 'Chase', 'Gates', 'Vincent', 'Mathews', 'Hodge', 'Garrison', 'Trevino', 'Villarreal', 'Heath', 'Dalton', 'Valencia', 'Callahan', 'Hensley', 'Atkins', 'Huffman', 'Roy', 'Boyer', 'Shields', 'Lin', 'Hancock', 'Grimes', 'Glenn', 'Cline', 'Delacruz', 'Camacho', 'Dillon', 'Parrish', 'Oneill', 'Melton', 'Booth', 'Kane', 'Berg', 'Harrell', 'Pitts', 'Savage', 'Wiggins', 'Brennan', 'Salas', 'Marks', 'Russo', 'Sawyer', 'Baxter', 'Golden', 'Hutchinson', 'Liu', 'Walter', 'Mcdowell', 'Wiley', 'Rich', 'Humphrey', 'Johns', 'Koch', 'Suarez', 'Hobbs', 'Beard', 'Gilmore', 'Ibarra', 'Keith', 'Macias', 'Khan', 'Andrade', 'Ware', 'Stephenson', 'Henson', 'Wilkerson', 'Dyer', 'Mcclure', 'Blackwell', 'Mercado', 'Tanner', 'Eaton', 'Clay', 'Barron', 'Beasley', 'Oneal', 'Preston', 'Small', 'Wu', 'Zamora', 'Macdonald', 'Vance', 'Snow', 'Mcclain', 'Stafford', 'Orozco', 'Barry', 'English', 'Shannon', 'Kline', 'Jacobson', 'Woodard', 'Huang', 'Kemp', 'Mosley', 'Prince', 'Merritt', 'Hurst', 'Villanueva', 'Roach', 'Nolan', 'Lam', 'Yoder', 'Mccullough', 'Lester', 'Santana', 'Valenzuela', 'Winters', 'Barrera', 'Leach', 'Orr', 'Berger', 'Mckee', 'Strong', 'Conway', 'Stein', 'Whitehead', 'Bullock', 'Escobar', 'Knox', 'Meadows', 'Solomon', 'Velez', 'Odonnell', 'Kerr', 'Stout', 'Blankenship', 'Browning', 'Kent', 'Lozano', 'Bartlett', 'Pruitt', 'Buck', 'Barr', 'Gaines', 'Durham', 'Gentry', 'Mcintyre', 'Sloan', 'Rocha', 'Melendez', 'Herman', 'Sexton', 'Moon', 'Hendricks', 'Rangel', 'Stark', 'Lowery', 'Hardin', 'Hull', 'Sellers', 'Ellison', 'Calhoun', 'Gillespie', 'Mora', 'Knapp', 'Mccall', 'Morse', 'Dorsey', 'Weeks', 'Nielsen', 'Livingston', 'Leblanc', 'Mclean', 'Bradshaw', 'Glass', 'Middleton', 'Buckley', 'Schaefer', 'Frost', 'Howe', 'House', 'Mcintosh', 'Ho', 'Pennington', 'Reilly', 'Hebert', 'Mcfarland', 'Hickman', 'Noble', 'Spears', 'Conrad', 'Arias', 'Galvan', 'Velazquez', 'Huynh', 'Frederick', 'Randolph', 'Cantu', 'Fitzpatrick', 'Mahoney', 'Peck', 'Villa', 'Michael', 'Donovan', 'Mcconnell', 'Walls', 'Boyle', 'Mayer', 'Zuniga', 'Giles', 'Pineda', 'Pace', 'Hurley', 'Mays', 'Mcmillan', 'Crosby', 'Ayers', 'Case', 'Bentley', 'Shepard', 'Everett', 'Pugh', 'David', 'Mcmahon', 'Dunlap', 'Bender', 'Hahn', 'Harding', 'Acevedo', 'Raymond', 'Blackburn', 'Duffy', 'Landry', 'Dougherty', 'Bautista', 'Shah', 'Potts', 'Arroyo', 'Valentine', 'Meza', 'Gould', 'Vaughan', 'Fry', 'Rush', 'Avery', 'Herring', 'Dodson', 'Clements', 'Sampson', 'Tapia', 'Bean', 'Lynn', 'Crane', 'Farley', 'Cisneros', 'Benton', 'Ashley', 'Mckay', 'Finley', 'Best', 'Blevins', 'Friedman', 'Moses', 'Sosa', 'Blanchard', 'Huber', 'Frye', 'Krueger', 'Bernard', 'Rosario', 'Rubio', 'Mullen', 'Benjamin', 'Haley', 'Chung', 'Moyer', 'Choi', 'Horne', 'Yu', 'Woodward', 'Ali', 'Nixon', 'Hayden', 'Rivers', 'Estes', 'Mccarty', 'Richmond', 'Stuart', 'Maynard', 'Brandt', 'Oconnell', 'Hanna', 'Sanford', 'Sheppard', 'Church', 'Burch', 'Levy', 'Rasmussen', 'Coffey', 'Ponce', 'Faulkner', 'Donaldson', 'Schmitt', 'Novak', 'Costa', 'Montes', 'Booker', 'Cordova', 'Waller', 'Arellano', 'Maddox', 'Mata', 'Bonilla', 'Stanton', 'Compton', 'Kaufman', 'Dudley', 'Mcpherson', 'Beltran', 'Dickson', 'Mccann', 'Villegas', 'Proctor', 'Hester', 'Cantrell', 'Daugherty', 'Cherry', 'Bray', 'Davila', 'Rowland', 'Levine', 'Madden', 'Spence', 'Good', 'Irwin', 'Werner', 'Krause', 'Petty', 'Whitney', 'Baird', 'Hooper', 'Pollard', 'Zavala', 'Jarvis', 'Holden', 'Haas', 'Hendrix', 'Mcgrath', 'Bird', 'Lucero', 'Terrell', 'Riggs', 'Joyce', 'Mercer', 'Rollins', 'Galloway', 'Duke', 'Odom', 'Andersen', 'Downs', 'Hatfield', 'Benitez', 'Archer', 'Huerta', 'Travis', 'Mcneil', 'Hinton', 'Zhang', 'Hays', 'Mayo', 'Fritz', 'Branch', 'Mooney', 'Ewing', 'Ritter', 'Esparza', 'Frey', 'Braun', 'Gay', 'Riddle', 'Haney', 'Kaiser', 'Holder', 'Chaney', 'Mcknight', 'Gamble', 'Vang', 'Cooley', 'Carney', 'Cowan', 'Forbes', 'Ferrell', 'Davies', 'Barajas', 'Shea', 'Osborn', 'Bright', 'Cuevas', 'Bolton', 'Murillo', 'Lutz', 'Duarte', 'Kidd', 'Key', 'Cooke', 'Goff', 'Dejesus', 'Marin', 'Dotson', 'Bonner', 'Cotton', 'Merrill', 'Lindsay', 'Lancaster', 'Mcgowan', 'Felix', 'Salgado', 'Slater', 'Carver', 'Guthrie', 'Holman', 'Fulton', 'Snider', 'Sears', 'Witt', 'Newell', 'Byers', 'Lehman', 'Gorman', 'Costello', 'Donahue', 'Delaney', 'Albert', 'Workman', 'Rosas', 'Springer', 'Kinney', 'Justice', 'Odell', 'Lake', 'Donnelly', 'Law', 'Dailey', 'Guevara', 'Shoemaker', 'Barlow', 'Marino', 'Winter', 'Craft', 'Katz', 'Pickett', 'Espinosa', 'Maloney', 'Daly', 'Goldstein', 'Crowley', 'Vogel', 'Kuhn', 'Pearce', 'Hartley', 'Cleveland', 'Palacios', 'Mcfadden', 'Britt', 'Wooten', 'Cortes', 'Dillard', 'Childers', 'Alford', 'Dodd', 'Emerson', 'Wilder', 'Lange', 'Goldberg', 'Quintero', 'Beach', 'Enriquez', 'Quintana', 'Helms', 'Mackey', 'Finch', 'Cramer', 'Minor', 'Flanagan', 'Franks', 'Corona', 'Kendall', 'Mccabe', 'Hendrickson', 'Moser', 'Mcdermott', 'Camp', 'Mcleod', 'Bernal', 'Kaplan', 'Medrano', 'Lugo', 'Tracy', 'Bacon', 'Crowe', 'Richter', 'Welsh', 'Holley', 'Ratliff', 'Mayfield', 'Talley', 'Haines', 'Dale', 'Gibbons', 'Hickey', 'Byrne', 'Kirkland', 'Farris', 'Correa', 'Tillman', 'Sweet', 'Kessler', 'England', 'Hewitt', 'Blanco', 'Connolly', 'Pate', 'Elder', 'Bruno', 'Holcomb', 'Hyde', 'Mcallister', 'Cash', 'Christopher', 'Whitfield', 'Meeks', 'Hatcher', 'Fink', 'Sutherland', 'Noel', 'Ritchie', 'Rosa', 'Leal', 'Joyner', 'Starr', 'Morin', 'Delarosa', 'Connor', 'Hilton', 'Alston', 'Gilliam', 'Wynn', 'Wills', 'Jaramillo', 'Oneil', 'Nieves', 'Britton', 'Rankin', 'Belcher', 'Guy', 'Chamberlain', 'Tyson', 'Puckett', 'Downing', 'Sharpe', 'Boggs', 'Truong', 'Pierson', 'Godfrey', 'Mobley', 'John', 'Kern', 'Dye', 'Hollis', 'Bravo', 'Magana', 'Rutherford', 'Ng', 'Tuttle', 'Lim', 'Romano', 'Trejo', 'Arthur', 'Knowles', 'Lyon', 'Shirley', 'Quinones', 'Childs', 'Dolan', 'Head', 'Reyna', 'Saenz', 'Hastings', 'Kenney', 'Cano', 'Foreman', 'Denton', 'Villalobos', 'Pryor', 'Sargent', 'Doherty', 'Hopper', 'Phan', 'Womack', 'Lockhart', 'Ventura', 'Dwyer', 'Muller', 'Galindo', 'Grace', 'Sorensen', 'Courtney', 'Parra', 'Rodrigues', 'Nicholas', 'Ahmed', 'Mcginnis', 'Langley', 'Madison', 'Locke', 'Jamison', 'Nava', 'Gustafson', 'Sykes', 'Dempsey', 'Hamm', 'Rodriquez', 'Mcgill', 'Xiong', 'Esquivel', 'Simms', 'Kendrick', 'Boyce', 'Vigil', 'Downey', 'Mckenna', 'Sierra', 'Webber', 'Kirkpatrick', 'Dickinson', 'Couch', 'Burks', 'Sheehan', 'Slaughter', 'Pike', 'Whitley', 'Magee', 'Cheng', 'Sinclair', 'Cassidy', 'Rutledge', 'Burris', 'Bowling', 'Crabtree', 'Mcnamara', 'Avalos', 'Vu', 'Herron', 'Broussard', 'Abraham', 'Garland', 'Corbett', 'Corbin', 'Stinson', 'Chin', 'Burt', 'Hutchins', 'Woodruff', 'Lau', 'Brandon', 'Singer', 'Hatch', 'Rossi', 'Shafer', 'Ott', 'Goss', 'Gregg', 'Dewitt', 'Tang', 'Polk', 'Worley', 'Covington', 'Saldana', 'Heller', 'Emery', 'Swartz', 'Cho', 'Mccray', 'Elmore', 'Rosenberg', 'Simons', 'Clemons', 'Beatty', 'Harden', 'Herbert', 'Bland', 'Rucker', 'Manley', 'Ziegler', 'Grady', 'Lott', 'Rouse', 'Gleason', 'Mcclellan', 'Abrams', 'Vo', 'Albright', 'Meier', 'Dunbar', 'Ackerman', 'Padgett', 'Mayes', 'Tipton', 'Coffman', 'Peralta', 'Shapiro', 'Roe', 'Weston', 'Plummer', 'Helton', 'Stern', 'Fraser', 'Stover', 'Fish', 'Schumacher', 'Baca', 'Curran', 'Vinson', 'Vera', 'Clifton', 'Ervin', 'Eldridge', 'Lowry', 'Childress', 'Becerra', 'Gore', 'Seymour', 'Chu', 'Field', 'Akers', 'Carrasco', 'Bingham', 'Sterling', 'Greenwood', 'Leslie', 'Groves', 'Manuel', 'Swain', 'Edmonds', 'Muniz', 'Thomson', 'Crouch', 'Walden', 'Smart', 'Tomlinson', 'Alfaro', 'Quick', 'Goldman', 'Mcelroy', 'Yarbrough', 'Funk', 'Hong', 'Portillo', 'Lund', 'Ngo', 'Elkins', 'Stroud', 'Meredith', 'Battle', 'Mccauley', 'Zapata', 'Bloom', 'Gee', 'Givens', 'Cardona', 'Schafer', 'Robison', 'Gunter', 'Griggs', 'Tovar', 'Teague', 'Swift', 'Bowden', 'Schulz', 'Blanton', 'Buckner', 'Whalen', 'Pritchard', 'Pierre', 'Kang', 'Metcalf', 'Butts', 'Kurtz', 'Sanderson', 'Tompkins', 'Inman', 'Crowder', 'Dickey', 'Hutchison', 'Conklin', 'Hoskins', 'Holbrook', 'Horner', 'Neely', 'Tatum', 'Hollingsworth', 'Draper', 'Clement', 'Lord', 'Reece', 'Feldman', 'Kay', 'Hagen', 'Crews', 'Bowles', 'Post', 'Jewell', 'Daley', 'Cordero', 'Mckinley', 'Velasco', 'Masters', 'Driscoll', 'Burrell', 'Valle', 'Crow', 'Devine', 'Larkin', 'Chappell', 'Pollock', 'Ly', 'Kimball', 'Schmitz', 'Lu', 'Rubin', 'Self', 'Barrios', 'Pereira', 'Phipps', 'Mcmanus', 'Nance', 'Steiner', 'Poe', 'Crockett', 'Jeffries', 'Amos', 'Nix', 'Newsome', 'Dooley', 'Payton', 'Rosen', 'Swenson', 'Connelly', 'Tolbert', 'Segura', 'Esposito', 'Coker', 'Biggs', 'Hinkle', 'Thurman', 'Drew', 'Ivey', 'Bullard', 'Baez', 'Neff', 'Maher', 'Stratton', 'Egan', 'Dubois', 'Gallardo', 'Blue', 'Rainey', 'Yeager', 'Saucedo', 'Ferreira', 'Sprague', 'Lacy', 'Hurtado', 'Heard', 'Connell', 'Stahl', 'Aldridge', 'Amaya', 'Forrest', 'Erwin', 'Gunn', 'Swan', 'Butcher', 'Rosado', 'Godwin', 'Hand', 'Gabriel', 'Otto', 'Whaley', 'Ludwig', 'Clifford', 'Grove', 'Beaver', 'Silver', 'Dang', 'Hammer', 'Dick', 'Boswell', 'Mead', 'Colvin', 'Oleary', 'Milligan', 'Goins', 'Ames', 'Dodge', 'Kaur', 'Escobedo', 'Arredondo', 'Geiger', 'Winkler', 'Dunham', 'Temple', 'Babcock', 'Billings', 'Grimm', 'Lilly', 'Wesley', 'Mcghee', 'Siegel', 'Painter', 'Bower', 'Purcell', 'Block', 'Aguilera', 'Norwood', 'Sheridan', 'Cartwright', 'Coates', 'Davison', 'Regan', 'Ramey', 'Koenig', 'Kraft', 'Bunch', 'Engel', 'Tan', 'Winn', 'Steward', 'Link', 'Vickers', 'Bragg', 'Piper', 'Huggins', 'Michel', 'Healy', 'Jacob', 'Mcdonough', 'Wolff', 'Colbert', 'Zepeda', 'Hoang', 'Dugan', 'Meade', 'Kilgore', 'Guillen', 'Do', 'Hinojosa', 'Goode', 'Arrington', 'Gary', 'Snell', 'Willard', 'Renteria', 'Chacon', 'Gallo', 'Hankins', 'Montano', 'Browne', 'Peacock', 'Ohara', 'Cornell', 'Sherwood', 'Castellanos', 'Thorpe', 'Stiles', 'Sadler', 'Latham', 'Redmond', 'Greenberg', 'Cote', 'Waddell', 'Dukes', 'Diamond', 'Bui', 'Madrid', 'Alonso', 'Sheets', 'Irvin', 'Hurt', 'Ferris', 'Sewell', 'Carlton', 'Aragon', 'Blackmon', 'Hadley', 'Hoyt', 'Mcgraw', 'Pagan', 'Land', 'Tidwell', 'Lovell', 'Miner', 'Doss', 'Dahl', 'Delatorre', 'Stanford', 'Kauffman', 'Vela', 'Gagnon', 'Winston', 'Gomes', 'Thacker', 'Coronado', 'Ash', 'Jarrett', 'Hager', 'Samuels', 'Metzger', 'Raines', 'Spivey', 'Maurer', 'Han', 'Voss', 'Henley', 'Caballero', 'Caruso', 'Coulter', 'North', 'Finn', 'Cahill', 'Lanier', 'Souza', 'Mcwilliams', 'Deal', 'Urban', 'Schaffer', 'Houser', 'Cummins', 'Romo', 'Crocker', 'Bassett', 'Kruse', 'Bolden', 'Ybarra', 'Metz', 'Root', 'Mcmullen', 'Hagan', 'Crump', 'Guidry', 'Brantley', 'Kearney', 'Beal', 'Toth', 'Jorgensen', 'Timmons', 'Milton', 'Tripp', 'Hurd', 'Sapp', 'Whitman', 'Messer', 'Burgos', 'Major', 'Westbrook', 'Castle', 'Serna', 'Carlisle', 'Varela', 'Cullen', 'Wilhelm', 'Bergeron', 'Burger', 'Posey', 'Barnhart', 'Hackett', 'Madrigal', 'Eubanks', 'Sizemore', 'Hilliard', 'Hargrove', 'Boucher', 'Thomason', 'Melvin', 'Roper', 'Barnard', 'Fonseca', 'Pedersen', 'Quiroz', 'Washburn', 'Holliday', 'Yee', 'Rudolph', 'Bermudez', 'Coyle', 'Gil', 'Pina', 'Goodrich', 'Elias', 'Lockwood', 'Cabral', 'Carranza', 'Duvall', 'Cornelius', 'Mccollum', 'Street', 'Mcneal', 'Connors', 'Angel', 'Paulson', 'Hinson', 'Keenan', 'Sheldon', 'Farr', 'Eddy', 'Samuel', 'Ring', 'Ledbetter', 'Betts', 'Fontenot', 'Gifford', 'Hannah', 'Hanley', 'Person', 'Fountain', 'Levin', 'Stubbs', 'Hightower', 'Murdock', 'Koehler', 'Ma', 'Engle', 'Smiley', 'Carmichael', 'Sheffield', 'Langston', 'Mccracken', 'Yost', 'Trotter', 'Story', 'Starks', 'Lujan', 'Blount', 'Cody', 'Rushing', 'Benoit', 'Herndon', 'Jacobsen', 'Nieto', 'Wiseman', 'Layton', 'Epps', 'Shipley', 'Leyva', 'Reeder', 'Brand', 'Roland', 'Fitch', 'Rico', 'Napier', 'Cronin', 'Mcqueen', 'Paredes', 'Trent', 'Christiansen', 'Spangler', 'Pettit', 'Langford', 'Benavides', 'Penn', 'Paige', 'Weir', 'Dietz', 'Prater', 'Brewster', 'Louis', 'Diehl', 'Pack', 'Spaulding', 'Ernst', 'Aviles', 'Nowak', 'Olvera', 'Rock', 'Mansfield', 'Aquino', 'Ogden', 'Stacy', 'Rizzo', 'Sylvester', 'Gillis', 'Sands', 'Machado', 'Lovett', 'Duong', 'Hyatt', 'Landis', 'Platt', 'Bustamante', 'Hedrick', 'Pritchett', 'Gaston', 'Dobson', 'Caudill', 'Tackett', 'Bateman', 'Landers', 'Carmona', 'Gipson', 'Uribe', 'Mcneill', 'Ledford', 'Mims', 'Abel', 'Gold', 'Smallwood', 'Thorne', 'Mchugh', 'Dickens', 'Leung', 'Tobin', 'Kowalski', 'Medeiros', 'Cope', 'Quezada', 'Kraus', 'Overton', 'Montalvo', 'Staley', 'Woody', 'Hathaway', 'Osorio', 'Laird', 'Dobbs', 'Capps', 'Putnam', 'Lay', 'Francisco', 'Bernstein', 'Adair', 'Hutton', 'Burkett', 'Rhoades', 'Yanez', 'Richey', 'Bledsoe', 'Mccain', 'Beyer', 'Cates', 'Roche', 'Spicer', 'Queen', 'Doty', 'Darling', 'Darby', 'Sumner', 'Kincaid', 'Hay', 'Grossman', 'Lacey', 'Wilkes', 'Humphries', 'Paz', 'Darnell', 'Keys', 'Kyle', 'Lackey', 'Vogt', 'Locklear', 'Kiser', 'Presley', 'Bryson', 'Bergman', 'Peoples', 'Fair', 'Mcclendon', 'Corley', 'Prado', 'Christie', 'Delong', 'Skaggs', 'Dill', 'Shearer', 'Judd', 'Stapleton', 'Flaherty', 'Casillas', 'Pinto', 'Youngblood', 'Haywood', 'Toney', 'Ricks', 'Granados', 'Crum', 'Triplett', 'Soriano', 'Waite', 'Hoff', 'Anaya', 'Crenshaw', 'Jung', 'Canales', 'Cagle', 'Denny', 'Marcus', 'Berman', 'Munson', 'Ocampo', 'Bauman', 'Corcoran', 'Keen', 'Zimmer', 'Friend', 'Ornelas', 'Varner', 'Pelletier', 'Vernon', 'Blum', 'Albrecht', 'Culver', 'Schuster', 'Cuellar', 'Mccord', 'Shultz', 'Mcrae', 'Moreland', 'Calvert', 'William', 'Whittington', 'Eckert', 'Keene', 'Mohr', 'Hanks', 'Kimble', 'Cavanaugh', 'Crowell', 'Russ', 'Feliciano', 'Crain', 'Busch', 'Mccormack', 'Drummond', 'Omalley', 'Aldrich', 'Luke', 'Greco', 'Mott', 'Oakes', 'Mallory', 'Mclain', 'Burrows', 'Otero', 'Allred', 'Eason', 'Finney', 'Weller', 'Waldron', 'Champion', 'Jeffers', 'Coon', 'Rosenthal', 'Huddleston', 'Solano', 'Hirsch', 'Akins', 'Olivares', 'Song', 'Sneed', 'Benedict', 'Bain', 'Okeefe', 'Hidalgo', 'Matos', 'Stallings', 'Paris', 'Gamez', 'Kenny', 'Quigley', 'Marrero', 'Fagan', 'Dutton', 'Pappas', 'Atwood', 'Mcgovern', 'Bagley', 'Read', 'Lunsford', 'Moseley', 'Oakley', 'Ashby', 'Granger', 'Shaver', 'Hope', 'Coe', 'Burroughs', 'Helm', 'Neumann', 'Ambrose', 'Michaels', 'Prescott', 'Light', 'Dumas', 'Flood', 'Stringer', 'Currie', 'Comer', 'Fong', 'Whitlock', 'Lemus', 'Hawley', 'Ulrich', 'Staples', 'Boykin', 'Knutson', 'Grover', 'Hobson', 'Cormier', 'Doran', 'Thayer', 'Woodson', 'Whitt', 'Hooker', 'Kohler', 'Vandyke', 'Addison', 'Schrader', 'Haskins', 'Whittaker', 'Madsen', 'Gauthier', 'Burnette', 'Keating', 'Purvis', 'Aleman', 'Huston', 'Pimentel', 'Hamlin', 'Gerber', 'Hooks', 'Schwab', 'Honeycutt', 'Schulte', 'Alonzo', 'Isaac', 'Conroy', 'Adler', 'Eastman', 'Cottrell', 'Orourke', 'Hawk', 'Goldsmith', 'Rader', 'Crandall', 'Reynoso', 'Shook', 'Abernathy', 'Baer', 'Olivas', 'Grayson', 'Bartley', 'Henning', 'Parr', 'Duff', 'Brunson', 'Baum', 'Ennis', 'Laughlin', 'Foote', 'Valadez', 'Adamson', 'Begay', 'Stovall', 'Lincoln', 'Cheung', 'Malloy', 'Rider', 'Giordano', 'Jansen', 'Lopes', 'Arnett', 'Pendleton', 'Gage', 'Barragan', 'Keyes', 'Navarrete', 'Amador', 'Hoffmann', 'Schilling', 'Hawthorne', 'Perdue', 'Schreiber', 'Arevalo', 'Naylor', 'Deluca', 'Marcum', 'Altman', 'Mark', 'Chadwick', 'Doan', 'Easley', 'Ladd', 'Woodall', 'Betancourt', 'Shin', 'Maguire', 'Bellamy', 'Quintanilla', 'Ham', 'Sorenson', 'Mattson', 'Brenner', 'Means', 'Faust', 'Calloway', 'Ojeda', 'Mcnally', 'Dietrich', 'Ransom', 'Hare', 'Felton', 'Whiting', 'Burkhart', 'Clinton', 'Schwarz', 'Cleary', 'Wetzel', 'Reagan', 'Stjohn', 'Chow', 'Hauser', 'Dupree', 'Brannon', 'Lyles', 'Prather', 'Willoughby', 'Sepulveda', 'Nugent', 'Pickens', 'Mosher', 'Joiner', 'Stoner', 'Dowling', 'Trimble', 'Valdes', 'Cheek', 'Scruggs', 'Coy', 'Tilley', 'Barney', 'Saylor', 'Nagy', 'Horvath', 'Lai', 'Corey', 'Ruth', 'Sauer', 'Baron', 'Thao', 'Rowell', 'Grubbs', 'Schaeffer', 'Hillman', 'Sams', 'Hogue', 'Hutson', 'Busby', 'Nickerson', 'Bruner', 'Parham', 'Rendon', 'Anders', 'Lombardo', 'Iverson', 'Kinsey', 'Earl', 'Borden', 'Titus', 'Jean', 'Tellez', 'Beavers', 'Cornett', 'Sotelo', 'Kellogg', 'Silverman', 'Burnham', 'Mcnair', 'Jernigan', 'Escamilla', 'Barrow', 'Coats', 'London', 'Redding', 'Ruffin', 'Yi', 'Boudreaux', 'Goodson', 'Dowell', 'Fenton', 'Mock', 'Dozier', 'Bynum', 'Gale', 'Jolly', 'Beckman', 'Goddard', 'Craven', 'Whitmore', 'Leary', 'Mccloud', 'Gamboa', 'Kerns', 'Brunner', 'Negron', 'Hough', 'Cutler', 'Ledesma', 'Pyle', 'Monahan', 'Tabor', 'Burk', 'Leone', 'Stauffer', 'Hayward', 'Driver', 'Ruff', 'Talbot', 'Seals', 'Boston', 'Carbajal', 'Fay', 'Purdy', 'Mcgregor', 'Sun', 'Orellana', 'Gentile', 'Mahan', 'Brower', 'Patino', 'Thurston', 'Shipman', 'Torrez', 'Aaron', 'Weiner', 'Call', 'Wilburn', 'Oliva', 'Hairston', 'Coley', 'Hummel', 'Arreola', 'Watt', 'Sharma', 'Lentz', 'Arce', 'Power', 'Longoria', 'Wagoner', 'Burr', 'Hsu', 'Tinsley', 'Beebe', 'Wray', 'Nunn', 'Prieto', 'German', 'Rowley', 'Grubb', 'Brito', 'Royal', 'Valentin', 'Bartholomew', 'Schuler', 'Aranda', 'Flint', 'Hearn', 'Venegas', 'Unger', 'Mattingly', 'Boles', 'Casas', 'Barger', 'Julian', 'Dow', 'Dobbins', 'Vann', 'Chester', 'Strange', 'Lemon', 'Kahn', 'Mckinnon', 'Gannon', 'Waggoner', 'Conn', 'Meek', 'Cavazos', 'Skelton', 'Lo', 'Kumar', 'Toledo', 'Lorenz', 'Vallejo', 'Starkey', 'Kitchen', 'Reaves', 'Demarco', 'Farrar', 'Stearns', 'Michaud', 'Higginbotham', 'Fernandes', 'Isaacs', 'Marion', 'Guillory', 'Priest', 'Meehan', 'Oliveira', 'Palma', 'Oswald', 'Loomis', 'Galvez', 'Lind', 'Mena', 'Stclair', 'Hinds', 'Reardon', 'Alley', 'Barth', 'Crook', 'Bliss', 'Nagel', 'Banuelos', 'Parish', 'Harman', 'Douglass', 'Kearns', 'Newcomb', 'Mulligan', 'Coughlin', 'Way', 'Fournier', 'Lawler', 'Kaminski', 'Barbour', 'Sousa', 'Stump', 'Alaniz', 'Ireland', 'Rudd', 'Carnes', 'Lundy', 'Godinez', 'Pulido', 'Dennison', 'Burdick', 'Baumann', 'Dove', 'Stoddard', 'Liang', 'Dent', 'Roark', 'Mcmahan', 'Bowser', 'Parnell', 'Mayberry', 'Wakefield', 'Arndt', 'Ogle', 'Worthington', 'Durbin', 'Escalante', 'Pederson', 'Weldon', 'Vick', 'Knott', 'Ryder', 'Zarate', 'Irving', 'Clemens', 'Shelley', 'Salter', 'Jack', 'Cloud', 'Dasilva', 'Muhammad', 'Squires', 'Rapp', 'Dawkins', 'Polanco', 'Chatman', 'Maier', 'Yazzie', 'Gruber', 'Staton', 'Blackman', 'Mcdonnell', 'Dykes', 'Laws', 'Whitten', 'Pfeiffer', 'Vidal', 'Early', 'Kelsey', 'Baughman', 'Dias', 'Starnes', 'Crespo', 'Lombardi', 'Kilpatrick', 'Deaton', 'Satterfield', 'Wiles', 'Weinstein', 'Rowan', 'Delossantos', 'Hamby', 'Estep', 'Daigle', 'Elam', 'Creech', 'Heck', 'Chavis', 'Echols', 'Foss', 'Trahan', 'Strauss', 'Vanhorn', 'Winslow', 'Rea', 'Heaton', 'Fairchild', 'Minton', 'Hitchcock', 'Linton', 'Handy', 'Crouse', 'Coles', 'Upton', 'Foy', 'Herrington', 'Mcclelland', 'Hwang', 'Rector', 'Luther', 'Kruger', 'Salcedo', 'Chance', 'Gunderson', 'Tharp', 'Griffiths', 'Graf', 'Branham', 'Humphreys', 'Renner', 'Lima', 'Rooney', 'Moya', 'Almeida', 'Gavin', 'Coburn', 'Ouellette', 'Goetz', 'Seay', 'Parrott', 'Harms', 'Robb', 'Storey', 'Barbosa', 'Barraza', 'Loyd', 'Merchant', 'Donohue', 'Carrier', 'Diggs', 'Chastain', 'Sherrill', 'Whipple', 'Braswell', 'Weathers', 'Linder', 'Chapa', 'Bock', 'Oh', 'Lovelace', 'Saavedra', 'Ferrara', 'Callaway', 'Salmon', 'Templeton', 'Christy', 'Harp', 'Dowd', 'Forrester', 'Lawton', 'Epstein', 'Gant', 'Tierney', 'Seaman', 'Corral', 'Dowdy', 'Zaragoza', 'Morrissey', 'Eller', 'Chau', 'Breen', 'High', 'Newberry', 'Beam', 'Yancey', 'Jarrell', 'Cerda', 'Ellsworth', 'Lofton', 'Thibodeaux', 'Pool', 'Rinehart', 'Arteaga', 'Marlow', 'Hacker', 'Will', 'Mackenzie', 'Hook', 'Gilliland', 'Emmons', 'Pickering', 'Medley', 'Willey', 'Andrew', 'Shell', 'Randle', 'Brinkley', 'Pruett', 'Tobias', 'Edmondson', 'Grier', 'Saldivar', 'Batista', 'Askew', 'Moeller', 'Chavarria', 'Augustine', 'Troyer', 'Layne', 'Mcnulty', 'Shank', 'Desai', 'Herrmann', 'Hemphill', 'Bearden', 'Spear', 'Keener', 'Holguin', 'Culp', 'Braden', 'Briscoe', 'Bales', 'Garvin', 'Stockton', 'Abreu', 'Suggs', 'Mccartney', 'Ferrer', 'Rhoads', 'Ha', 'Nevarez', 'Singletary', 'Chong', 'Alcala', 'Cheney', 'Westfall', 'Damico', 'Snodgrass', 'Devries', 'Looney', 'Hein', 'Lyle', 'Lockett', 'Jacques', 'Barkley', 'Wahl', 'Aponte', 'Myrick', 'Bolin', 'Holm', 'Slack', 'Scherer', 'Martino', 'Bachman', 'Ely', 'Nesbitt', 'Marroquin', 'Bouchard', 'Mast', 'Jameson', 'Hills', 'Mireles', 'Bueno', 'Pease', 'Vitale', 'Alarcon', 'Linares', 'Schell', 'Lipscomb', 'Arriaga', 'Bourgeois', 'Markham', 'Bonds', 'Wisniewski', 'Ivy', 'Oldham', 'Wendt', 'Fallon', 'Joy', 'Stamper', 'Babb', 'Steinberg', 'Asher', 'Fuchs', 'Blank', 'Willett', 'Heredia', 'Croft', 'Lytle', 'Lance', 'Lassiter', 'Barrientos', 'Condon', 'Barfield', 'Darden', 'Araujo', 'Noonan', 'Guinn', 'Burleson', 'Belanger', 'Main', 'Traylor', 'Messina', 'Zeigler', 'Danielson', 'Millard', 'Kenyon', 'Radford', 'Graff', 'Beaty', 'Baggett', 'Salisbury', 'Crisp', 'Trout', 'Lorenzo', 'Parson', 'Gann', 'Garber', 'Adcock', 'Covarrubias', 'Scales', 'Acuna', 'Thrasher', 'Card', 'Van', 'Mabry', 'Mohamed', 'Montanez', 'Stock', 'Redd', 'Willingham', 'Redman', 'Zambrano', 'Gaffney', 'Herr', 'Schubert', 'Devlin', 'Pringle', 'Houck', 'Casper', 'Rees', 'Wing', 'Ebert', 'Jeter', 'Cornejo', 'Gillette', 'Shockley', 'Amato', 'Girard', 'Leggett', 'Cheatham', 'Bustos', 'Epperson', 'Dubose', 'Seitz', 'Frias', 'East', 'Schofield', 'Steen', 'Orlando', 'Myles', 'Caron', 'Grey', 'Denney', 'Ontiveros', 'Burden', 'Jaeger', 'Reich', 'Witherspoon', 'Najera', 'Frantz', 'Hammonds', 'Xu', 'Leavitt', 'Gilchrist', 'Adam', 'Barone', 'Forman', 'Ceja', 'Ragsdale', 'Sisk', 'Tubbs', 'Elizondo', 'Pressley', 'Bollinger', 'Linn', 'Huntley', 'Dewey', 'Geary', 'Carlos', 'Ragland', 'Mixon', 'Mcarthur', 'Baugh', 'Tam', 'Nobles', 'Clevenger', 'Lusk', 'Foust', 'Cooney', 'Tamayo', 'Robert', 'Longo', 'Overstreet', 'Oglesby', 'Mace', 'Churchill', 'Matson', 'Hamrick', 'Rockwell', 'Trammell', 'Wheatley', 'Carrington', 'Ferraro', 'Ralston', 'Clancy', 'Mondragon', 'Carl', 'Hu', 'Hopson', 'Breaux', 'Mccurdy', 'Mares', 'Mai', 'Chisholm', 'Matlock', 'Aiken', 'Cary', 'Lemons', 'Anguiano', 'Herrick', 'Crawley', 'Montero', 'Hassan', 'Archuleta', 'Farias', 'Cotter', 'Parris', 'Felder', 'Luu', 'Pence', 'Gilman', 'Killian', 'Naranjo', 'Duggan', 'Scarborough', 'Swann', 'Easter', 'Ricketts', 'France', 'Bello', 'Nadeau', 'Still', 'Rincon', 'Cornwell', 'Slade', 'Fierro', 'Mize', 'Christianson', 'Greenfield', 'Mcafee', 'Landrum', 'Adame', 'Dinh', 'Lankford', 'Lewandowski', 'Rust', 'Bundy', 'Waterman', 'Milner', 'Mccrary', 'Hite', 'Curley', 'Donald', 'Duckworth', 'Cecil', 'Carrera', 'Speer', 'Birch', 'Denson', 'Beckwith', 'Stack', 'Durant', 'Lantz', 'Dorman', 'Christman', 'Spann', 'Masterson', 'Hostetler', 'Kolb', 'Brink', 'Scanlon', 'Nye', 'Wylie', 'Beverly', 'Woo', 'Spurlock', 'Sommer', 'Shelby', 'Reinhardt', 'Robledo', 'Bertrand', 'Ashton', 'Cyr', 'Edgar', 'Doe', 'Harkins', 'Brubaker', 'Stoll', 'Dangelo', 'Zhou', 'Moulton', 'Hannon', 'Falk', 'Rains', 'Broughton', 'Applegate', 'Hudgins', 'Slone', 'Yoon', 'Farnsworth', 'Perales', 'Reedy', 'Milam', 'Franz', 'Ponder', 'Ricci', 'Fontaine', 'Irizarry', 'Puente', 'New', 'Selby', 'Cazares', 'Doughty', 'Moffett', 'Balderas', 'Fine', 'Smalley', 'Carlin', 'Trinh', 'Dyson', 'Galvin', 'Valdivia', 'Benner', 'Low', 'Turpin', 'Lyman', 'Billingsley', 'Mcadams', 'Cardwell', 'Fraley', 'Patten', 'Holton', 'Shanks', 'Mcalister', 'Canfield', 'Sample', 'Harley', 'Cason', 'Tomlin', 'Ahmad', 'Coyne', 'Forte', 'Riggins', 'Littlejohn', 'Forsythe', 'Brinson', 'Halverson', 'Bach', 'Stuckey', 'Falcon', 'Wenzel', 'Talbert', 'Champagne', 'Mchenry', 'Vest', 'Shackelford', 'Ordonez', 'Collazo', 'Boland', 'Sisson', 'Bigelow', 'Wharton', 'Hyman', 'Brumfield', 'Oates', 'Mesa', 'Morrell', 'Beckett', 'Reis', 'Alves', 'Chiu', 'Larue', 'Streeter', 'Grogan', 'Blakely', 'Brothers', 'Hatton', 'Kimbrough', 'Lauer', 'Wallis', 'Jett', 'Pepper', 'Hildebrand', 'Rawls', 'Mello', 'Neville', 'Bull', 'Steffen', 'Braxton', 'Cowart', 'Simpkins', 'Mcneely', 'Blalock', 'Spain', 'Shipp', 'Lindquist', 'Oreilly', 'Butterfield', 'Perrin', 'Qualls', 'Edge', 'Havens', 'Luong', 'Switzer', 'Troutman', 'Fortner', 'Tolliver', 'Monk', 'Poindexter', 'Rupp', 'Ferry', 'Negrete', 'Muse', 'Gresham', 'Beauchamp', 'Schmid', 'Barclay', 'Chun', 'Brice', 'Faulk', 'Watters', 'Briones', 'Guajardo', 'Harwood', 'Grissom', 'Harlow', 'Whelan', 'Burdette', 'Palumbo', 'Paulsen', 'Corrigan', 'Garvey', 'Levesque', 'Dockery', 'Delgadillo', 'Gooch', 'Cao', 'Mullin', 'Ridley', 'Stanfield', 'Noriega', 'Dial', 'Ceballos', 'Nunes', 'Newby', 'Baumgartner', 'Hussain', 'Wyman', 'Causey', 'Gossett', 'Ness', 'Waugh', 'Choate', 'Carman', 'Daily', 'Kong', 'Devore', 'Irby', 'Breeden', 'Whatley', 'Ellington', 'Lamar', 'Fultz', 'Bair', 'Zielinski', 'Colby', 'Houghton', 'Grigsby', 'Fortune', 'Paxton', 'Mcmillian', 'Hammons', 'Bronson', 'Keck', 'Wellman', 'Ayres', 'Whiteside', 'Menard', 'Roush', 'Warden', 'Espino', 'Strand', 'Haggerty', 'Banda', 'Krebs', 'Fabian', 'Bowie', 'Branson', 'Lenz', 'Benavidez', 'Keeler', 'Newsom', 'Ezell', 'Jeffrey', 'Pulliam', 'Clary', 'Byrnes', 'Kopp', 'Beers', 'Smalls', 'Sommers', 'Gardiner', 'Fennell', 'Mancini', 'Osullivan', 'Sebastian', 'Bruns', 'Giron', 'Parent', 'Boyles', 'Keefe', 'Muir', 'Wheat', 'Vergara', 'Shuler', 'Pemberton', 'South', 'Brownlee', 'Brockman', 'Royer', 'Fanning', 'Herzog', 'Morley', 'Bethea', 'Tong', 'Needham', 'Roque', 'Mojica', 'Bunn', 'Francois', 'Noe', 'Kuntz', 'Snowden', 'Withers', 'Harlan', 'Seibert', 'Limon', 'Kiefer', 'Bone', 'Sell', 'Allan', 'Skidmore', 'Wren', 'Dunaway', 'Finnegan', 'Moe', 'Wolford', 'Seeley', 'Kroll', 'Lively', 'Janssen', 'Montague', 'Rahman', 'Boehm', 'Nettles', 'Dees', 'Krieger', 'Peek', 'Hershberger', 'Sage', 'Custer', 'Zheng', 'Otoole', 'Jaimes', 'Elrod', 'Somers', 'Lira', 'Nagle', 'Grooms', 'Soria', 'Drury', 'Keane', 'Bostic', 'Hartmann', 'Pauley', 'Murrell', 'Manzo', 'Morey', 'Agee', 'Hamel', 'Tavares', 'Dunning', 'Mccloskey', 'Plunkett', 'Maples', 'March', 'Armenta', 'Waldrop', 'Espinal', 'Fajardo', 'Christenson', 'Robins', 'Bagwell', 'Massie', 'Leahy', 'Urbina', 'Medlin', 'Zhu', 'Pantoja', 'Barbee', 'Clawson', 'Reiter', 'Ko', 'Crider', 'Maxey', 'Worrell', 'Brackett', 'Mclemore', 'Younger', 'Her', 'Hardesty', 'Danner', 'Ragan', 'Almanza', 'Nielson', 'Graber', 'Mcintire', 'Tirado', 'Griswold', 'Seifert', 'Valles', 'Laney', 'Gupta', 'Malik', 'Libby', 'Marvin', 'Koontz', 'Marr', 'Kozlowski', 'Lemke', 'Brant', 'Phelan', 'Kemper', 'Gooden', 'Beaulieu', 'Cardoza', 'Healey', 'Zhao', 'Hardwick', 'Kitchens', 'Box', 'Stepp', 'Comstock', 'Poston', 'Sager', 'Conti', 'Borges', 'Farrow', 'Acker', 'Glaser', 'Antonio', 'Lennon', 'Gaither', 'Freitas', 'Alicea', 'Mcmillen', 'Chapin', 'Ratcliff', 'Lerma', 'Severson', 'Wilde', 'Mortensen', 'Winchester', 'Flannery', 'Villasenor', 'Centeno', 'Burkholder', 'Horan', 'Meador', 'Ingle', 'Roldan', 'Estrella', 'Pullen', 'Newkirk', 'Gaytan', 'Lindberg', 'Windham', 'Gatlin', 'Stoltzfus', 'Behrens', 'Cintron', 'Broderick', 'Solorzano', 'Jaime', 'Venable', 'Culbertson', 'Garay', 'Caputo', 'Grantham', 'Hanlon', 'Parry', 'Crist', 'Cosby', 'Shore', 'Everhart', 'Dorn', 'Turley', 'Eng', 'Valerio', 'Rand', 'Hiatt', 'Mota', 'Judge', 'Kinder', 'Colwell', 'Ashworth', 'Tejeda', 'Sikes', 'Oshea', 'Westmoreland', 'Faber', 'Culpepper', 'Logsdon', 'Fugate', 'Apodaca', 'Lindley', 'Samson', 'Liles', 'Mcclanahan', 'Burge', 'Vail', 'Etheridge', 'Boudreau', 'Andres', 'Noll', 'Higgs', 'Snead', 'Layman', 'Turk', 'Nolen', 'Wayne', 'Betz', 'Victor', 'Lafferty', 'Carbone', 'Skipper', 'Zeller', 'Kasper', 'Desantis', 'Fogle', 'Gandy', 'Mendenhall', 'Seward', 'Schweitzer', 'Gulley', 'Stine', 'Sowers', 'Duenas', 'Monson', 'Brinkman', 'Hubert', 'Motley', 'Pfeifer', 'Weinberg', 'Eggleston', 'Isom', 'Quinlan', 'Gilley', 'Jasso', 'Loya', 'Mull', 'Reichert', 'Wirth', 'Reddy', 'Hodgson', 'Stowe', 'Mccallum', 'Ahrens', 'Huey', 'Mattox', 'Dupont', 'Aguayo', 'Pak', 'Tice', 'Alba', 'Colburn', 'Currier', 'Gaskins', 'Harder', 'Cohn', 'Yoo', 'Garnett', 'Harter', 'Wenger', 'Charlton', 'Littleton', 'Minter', 'Henriquez', 'Cone', 'Vines', 'Kimmel', 'Crooks', 'Caraballo', 'Searcy', 'Peyton', 'Renfro', 'Groff', 'Thorn', 'Moua', 'Jay', 'Leigh', 'Sanborn', 'Wicker', 'Martens', 'Broome', 'Abney', 'Fisk', 'Argueta', 'Upchurch', 'Alderman', 'Tisdale', 'Castellano', 'Legg', 'Wilbur', 'Bills', 'Dix', 'Mauldin', 'Isbell', 'Mears', 'Latimer', 'Ashcraft', 'Earley', 'Tejada', 'Partridge', 'Anglin', 'Caswell', 'Easton', 'Kirchner', 'Mehta', 'Lanham', 'Blaylock', 'Binder', 'Catalano', 'Handley', 'Storm', 'Albertson', 'Free', 'Tuck', 'Keegan', 'Moriarty', 'Dexter', 'Mancuso', 'Allard', 'Pino', 'Chamberlin', 'Moffitt', 'Haag', 'Schott', 'Agnew', 'Malcolm', 'Hallman', 'Heckman', 'Karr', 'Soares', 'Alfonso', 'Tom', 'Wadsworth', 'Schindler', 'Garibay', 'Kuykendall', 'Penny', 'Littlefield', 'Mcnabb', 'Sam', 'Lea', 'Berrios', 'Murry', 'Regalado', 'Dehart', 'Mohammed', 'Counts', 'Solorio', 'Preciado', 'Armendariz', 'Martell', 'Barksdale', 'Frick', 'Haller', 'Broyles', 'Doll', 'Cable', 'Delvalle', 'Weems', 'Kelleher', 'Gagne', 'Albers', 'Kunz', 'Hoy', 'Hawes', 'Guenther', 'Johansen', 'Chaffin', 'Whitworth', 'Wynne', 'Mcmurray', 'Luce', 'Fiore', 'Straub', 'Majors', 'Mcduffie', 'Bohannon', 'Rawlings', 'Freed', 'Sutter', 'Lindstrom', 'Buss', 'Loera', 'Hoyle', 'Witte', 'Tyree', 'Luttrell', 'Andrus', 'Steed', 'Thiel', 'Cranford', 'Fulmer', 'Gable', 'Porras', 'Weis', 'Maas', 'Packard', 'Noyes', 'Kwon', 'Knoll', 'Marx', 'Feeney', 'Israel', 'Bohn', 'Cockrell', 'Glick', 'Cosgrove', 'Keefer', 'Mundy', 'Batchelor', 'Loveless', 'Horowitz', 'Haskell', 'Kunkel', 'Colson', 'Hedges', 'Staggs', 'Swisher', 'Lomeli', 'Padron', 'Cota', 'Homan', 'Musser', 'Curtin', 'Salerno', 'Segovia', 'Keeton', 'Brandenburg', 'Starling', 'Tsai', 'Mahon', 'Klinger', 'Paquette', 'Haddad', 'Mccune', 'Mathew', 'Shull', 'Higdon', 'Guest', 'Shay', 'Swafford', 'Angulo', 'Hackney', 'Evers', 'Sibley', 'Woodworth', 'Ostrander', 'Mangum', 'Smyth', 'Quarles', 'Mccarter', 'Close', 'Truitt', 'Stpierre', 'Mackay', 'Bayer', 'Timm', 'Thatcher', 'Bess', 'Trinidad', 'Jacoby', 'Proffitt', 'Concepcion', 'Parkinson', 'Carreon', 'Ramon', 'Monroy', 'Leger', 'Jauregui', 'Glynn', 'Taggart', 'Neil', 'Reddick', 'Wiese', 'Dover', 'Wicks', 'Hennessy', 'Bittner', 'Mcclung', 'Mcwhorter', 'Derrick', 'Strom', 'Beckham', 'Kee', 'Coombs', 'Schrock', 'Holtz', 'Maki', 'Willson', 'Hulsey', 'Whitson', 'Haugen', 'Lumpkin', 'Scholl', 'Gall', 'Carvalho', 'Kovach', 'Vieira', 'Millan', 'Irvine', 'Held', 'Jolley', 'Jasper', 'Cadena', 'Runyon', 'Lomax', 'Fahey', 'Hoppe', 'Bivens', 'Ruggiero', 'Hussey', 'Ainsworth', 'Hardman', 'Ulloa', 'Dugger', 'Fitzsimmons', 'Scroggins', 'Sowell', 'Toler', 'Barba', 'Biddle', 'Rafferty', 'Trapp', 'Byler', 'Brill', 'Delagarza', 'Thigpen', 'Hiller', 'Martins', 'Jankowski', 'Findley', 'Hollins', 'Stull', 'Pollack', 'Poirier', 'Reno', 'Bratton', 'Jeffery', 'Menendez', 'Mcnutt', 'Kohl', 'Forster', 'Clough', 'Deloach', 'Bader', 'Hanes', 'Sturm', 'Tafoya', 'Beall', 'Coble', 'Demers', 'Kohn', 'Santamaria', 'Vaught', 'Correia', 'Mcgrew', 'Sarmiento', 'Roby', 'Reinhart', 'Rosenbaum', 'Bernier', 'Schiller', 'Furman', 'Grabowski', 'Perryman', 'Kidwell', 'Sabo', 'Saxton', 'Noland', 'Seaton', 'Packer', 'Seal', 'Ruby', 'Smoot', 'Lavoie', 'Putman', 'Fairbanks', 'Neill', 'Florence', 'Beattie', 'Tarver', 'Stephen', 'Bolen', 'Mccombs', 'Freedman', 'Barnhill', 'Gaddis', 'Goad', 'Worden', 'Canada', 'Vickery', 'Calvin', 'Mcclintock', 'Slocum', 'Clausen', 'Mccutcheon', 'Ripley', 'Razo', 'Southard', 'Bourne', 'Aiello', 'Knudsen', 'Angeles', 'Keeney', 'Stacey', 'Neeley', 'Holly', 'Gallant', 'Eads', 'Lafleur', 'Fredrickson', 'Popp', 'Bobo', 'Pardo', 'Artis', 'Lawless', 'Shen', 'Headley', 'Pedraza', 'Pickard', 'Salvador', 'Hofmann', 'Davey', 'Szymanski', 'Dallas', 'Erb', 'Perea', 'Alcantar', 'Ashford', 'Harry', 'Crutchfield', 'Goebel', 'Ridgeway', 'Mcvey', 'Cordell', 'Kovacs', 'Florez', 'Calkins', 'Redden', 'Ricker', 'Salcido', 'Farrington', 'Reimer', 'Mullis', 'Mayhew', 'Register', 'Kaye', 'Blocker', 'Buford', 'Munguia', 'Cady', 'Burley', 'Sander', 'Robinette', 'Stubblefield', 'Shuman', 'Santillan', 'Loy', 'Deutsch', 'Sales', 'Langdon', 'Mazur', 'Clapp', 'Teal', 'Buffington', 'Elliot', 'Halstead', 'Sturgeon', 'Colley', 'Koehn', 'Bergstrom', 'Dunne', 'Pond', 'Gantt', 'Cousins', 'Viera', 'Wilks', 'Haase', 'Sweat', 'Simonson', 'Breedlove', 'Munn', 'Pitt', 'Faircloth', 'Peter', 'Wheaton', 'Howland', 'Merriman', 'Fusco', 'Burney', 'Bedford', 'Baltazar', 'Persaud', 'Gerard', 'Bourque', 'Chao', 'Slagle', 'Kirsch', 'Volk', 'Heim', 'Glasgow', 'Borders', 'Rauch', 'Goforth', 'Batson', 'Basham', 'Mount', 'Peace', 'Lazo', 'Samples', 'Amaro', 'Slattery', 'Ibrahim', 'Weatherford', 'Taft', 'Santoro', 'Aparicio', 'Jiang', 'Ritchey', 'Goble', 'Spring', 'Strain', 'Scully', 'Villareal', 'Toro', 'Duval', 'Jonas', 'Neuman', 'Wozniak', 'Varney', 'Dell', 'Conover', 'Landon', 'Sigler', 'Galbraith', 'Boss', 'Cepeda', 'Back', 'Mateo', 'Peebles', 'Arsenault', 'Cathey', 'Calabrese', 'Dodds', 'Gilbertson', 'Hoke', 'Greenlee', 'Sauceda', 'Vue', 'Lehmann', 'Zink', 'Lapointe', 'Laster', 'Moy', 'Ammons', 'Llamas', 'Foltz', 'Fleck', 'Chew', 'Amaral', 'Geer', 'Su', 'Carden', 'Nunley', 'Creel', 'Clarkson', 'Provost', 'Covey', 'Paine', 'Wofford', 'Frame', 'Dube', 'Grice', 'Tully', 'Molnar', 'Luciano', 'Bartels', 'Winstead', 'Canady', 'Moreau', 'Burnside', 'Bratcher', 'Infante', 'Peterman', 'Swope', 'Freeland', 'Vetter', 'Lanning', 'Marquis', 'Schulze', 'Thai', 'Coppola', 'Rayburn', 'Conte', 'Martz', 'Showalter', 'Quinonez', 'Bandy', 'Rao', 'Bunting', 'Belt', 'Cruse', 'Hamblin', 'Himes', 'Raney', 'Merrell', 'See', 'Gough', 'Maciel', 'Wimberly', 'Craddock', 'Marquardt', 'Wentz', 'Meeker', 'Sandberg', 'Mosier', 'Wasson', 'Hundley', 'Joe', 'Shumaker', 'Fortin', 'Embry', 'Olivarez', 'Akin', 'Seidel', 'Coons', 'Corrales', 'Earle', 'Matheny', 'Kish', 'Outlaw', 'Lieberman', 'Spalding', 'Barnette', 'Martel', 'Hargis', 'Kelso', 'Merrick', 'Fullerton', 'Fries', 'Doucette', 'Clouse', 'Prewitt', 'Hawks', 'Keaton', 'Worthy', 'Zook', 'Montez', 'Poore', 'Autry', 'Lemay', 'Shifflett', 'Forsyth', 'Briseno', 'Piazza', 'Welker', 'Tennant', 'Heinz', 'Haggard', 'Leighton', 'Brittain', 'Begley', 'Flanders', 'Hermann', 'Botello', 'Mathias', 'Hofer', 'Hutto', 'Godoy', 'Cave', 'Pagano', 'Asbury', 'Bowens', 'Withrow', 'Olivo', 'Harbin', 'Andre', 'Sandlin', 'Wertz', 'Desimone', 'Greiner', 'Heinrich', 'Whitcomb', 'Dayton', 'Petrie', 'Hair', 'Ketchum', 'Shanahan', 'Bianco', 'Heil', 'Cochrane', 'Wegner', 'Dagostino', 'Couture', 'Ling', 'Wingate', 'Arenas', 'Keel', 'Casteel', 'Boothe', 'Derosa', 'Horst', 'Rau', 'Palermo', 'Mccorkle', 'Altamirano', 'Nall', 'Shumate', 'Lightfoot', 'Creamer', 'Romeo', 'Coffin', 'Hutchings', 'Jerome', 'Hutcheson', 'Damron', 'Sorrell', 'Nickel', 'Sells', 'Pinkerton', 'Dao', 'Dion', 'Mcfarlane', 'Ridenour', 'Atwell', 'Sturgill', 'Schoen', 'Partin', 'Nemeth', 'Almonte', 'Pan', 'Rickard', 'Wentworth', 'Sammons', 'Sayre', 'Southerland', 'Parisi', 'Ahn', 'Carrion', 'Testa', 'Shorter', 'Covert', 'Gorham', 'Alcantara', 'Belton', 'Bannister', 'Sharkey', 'Mccreary', 'Pannell', 'Scarbrough', 'Keeling', 'Gainey', 'Mill', 'Camarena', 'Herbst', 'Roller', 'Wild', 'Dellinger', 'Lovejoy', 'Manson', 'Dupuis', 'Clem', 'Resendez', 'Burkhardt', 'Williford', 'Mclendon', 'Mazza', 'Mccaffrey', 'Lum', 'Settle', 'Hefner', 'Dupre', 'Louie', 'Gunther', 'Weimer', 'Turnbull', 'Bradbury', 'Maness', 'Urena', 'Lor', 'Sides', 'Wick', 'Monaco', 'Gillen', 'Ives', 'Battaglia', 'Ulmer', 'Schreiner', 'Caceres', 'Sprouse', 'Scoggins', 'Ahern', 'Tracey', 'Terrazas', 'Bracken', 'Gurley', 'Soliz', 'Alcaraz', 'Martines', 'Weidner', 'Criswell', 'Wilbanks', 'Hennessey', 'Mendes', 'Peak', 'Ruelas', 'Caudle', 'Fuqua', 'Jewett', 'Chism', 'Volpe', 'Nino', 'Logue', 'Mcculloch', 'Furr', 'Kersey', 'Shinn', 'Yan', 'Rausch', 'Stinnett', 'Mowery', 'Rivero', 'Weed', 'Bertram', 'Durand', 'Gatewood', 'Tilton', 'Mahaffey', 'Niles', 'Mccue', 'Vargo', 'Holcombe', 'Ralph', 'Castleberry', 'Snipes', 'Wilt', 'Vanmeter', 'Nutter', 'Mendiola', 'Burchett', 'Enos', 'Jobe', 'Kirkwood', 'Pedroza', 'Iglesias', 'Leong', 'Cromer', 'Trice', 'Magnuson', 'Eagle', 'Montenegro', 'Troy', 'Cato', 'Edmond', 'Hendrick', 'Lebron', 'Lathrop', 'Budd', 'Appel', 'Knowlton', 'Bianchi', 'Camarillo', 'Ginn', 'Pulley', 'True', 'Gaddy', 'Domingo', 'Kingsley', 'Loftus', 'Denham', 'Sifuentes', 'Siler', 'Hardison', 'Kwan', 'Pendergrass', 'Frasier', 'Hutchens', 'Fort', 'Montiel', 'Fincher', 'Eggers', 'Moen', 'Griffis', 'Hauck', 'Lister', 'Lundberg', 'Tanaka', 'Cornish', 'Whitlow', 'Chou', 'Griego', 'Robson', 'Prosser', 'Ballinger', 'Fogarty', 'Allman', 'Atchison', 'Conaway', 'Riddick', 'Rupert', 'Krug', 'Pinkston', 'Coggins', 'Narvaez', 'Earnest', 'Fain', 'Rash', 'Olmstead', 'Sherrod', 'Beeler', 'Spearman', 'Poland', 'Rousseau', 'Hyland', 'Rhea', 'Son', 'Redmon', 'Wilke', 'Valenti', 'Paulino', 'Geyer', 'Blackwood', 'Leclair', 'Olguin', 'Maestas', 'Buckingham', 'Blythe', 'Samuelson', 'Bounds', 'Nakamura', 'Batts', 'Galarza', 'Sisco', 'Mcvay', 'Hynes', 'Mertz', 'Tremblay', 'Orosco', 'Prentice', 'Wilhite', 'Seiler', 'Archibald', 'Wooldridge', 'Winfield', 'Oden', 'Zelaya', 'Chestnut', 'Guardado', 'Mccallister', 'Canty', 'Grasso', 'Collett', 'Hylton', 'Easterling', 'Deangelis', 'Treadway', 'Ferrari', 'Ethridge', 'Milburn', 'Mercier', 'Bickford', 'Thibodeau', 'Bolanos', 'Fellows', 'Hales', 'Greathouse', 'Buchholz', 'Strunk', 'Faison', 'Purnell', 'Clegg', 'Steinmetz', 'Wojcik', 'Alcorn', 'Ballesteros', 'Basile', 'Paez', 'Armour', 'Devito', 'Tello', 'Flick', 'Yount', 'Estevez', 'Hitt', 'Houle', 'Cha', 'Travers', 'Cass', 'Loper', 'Getz', 'Cade', 'Gonsalves', 'Lear', 'Cromwell', 'Stephan', 'Ocasio', 'Deluna', 'Tolentino', 'Picard', 'Eaves', 'Toscano', 'Ault', 'Osburn', 'Ruvalcaba', 'Szabo', 'Kozak', 'Bear', 'Eck', 'Deyoung', 'Morehead', 'Herrin', 'Tillery', 'Royster', 'Kehoe', 'Swank', 'Yamamoto', 'Schoonover', 'Clanton', 'Stutzman', 'Swearingen', 'Martinson', 'Harrelson', 'Leo', 'Keyser', 'Guyton', 'Lucio', 'Veal', 'Vanwinkle', 'Angelo', 'Zamudio', 'Haddock', 'Quach', 'Thomsen', 'Curiel', 'Badger', 'Teel', 'Hibbard', 'Dvorak', 'Ballew', 'Falls', 'Bostick', 'Monaghan', 'Segal', 'Denning', 'Bahr', 'Serrato', 'Toomey', 'Lacroix', 'Antoine', 'Resendiz', 'Sperry', 'Rosser', 'Bogan', 'Gaspar', 'Amin', 'Schramm', 'Lemaster', 'Echevarria', 'Lilley', 'Poling', 'Villagomez', 'Conde', 'Delrio', 'Lerner', 'Leroy', 'Otis', 'Durkin', 'Lavender', 'Schenk', 'Ong', 'Guess', 'Alanis', 'Jacobo', 'Ramsay', 'Henke', 'Sledge', 'Whited', 'Frazer', 'Fortier', 'Macleod', 'Pascual', 'Casanova', 'Olds', 'Jenson', 'Tijerina', 'Flora', 'Casto', 'Rinaldi', 'Blunt', 'Fontana', 'Minnick', 'Larios', 'Raynor', 'Fung', 'Marek', 'Valladares', 'Clemmons', 'Gracia', 'Rohrer', 'Fryer', 'Folsom', 'Gearhart', 'Sumpter', 'Kraemer', 'Aceves', 'Pettigrew', 'Mclaurin', 'Southern', 'Barrows', 'Landeros', 'Janes', 'Deguzman', 'Mcfall', 'Fredericks', 'Ashe', 'Mauro', 'Merino', 'Windsor', 'Taber', 'Armijo', 'Bricker', 'Pitman', 'Morrill', 'Sanches', 'Deboer', 'Conlon', 'Reuter', 'Stegall', 'Clemente', 'Romine', 'Dykstra', 'Ehlers', 'Tallman', 'Lovato', 'Brent', 'Pearl', 'Pyles', 'Cloutier', 'Mccurry', 'Mckeever', 'Graziano', 'Heflin', 'Garman', 'Isaacson', 'Mcreynolds', 'Meister', 'Stroup', 'Everson', 'Halsey', 'Mcewen', 'Sparkman', 'Yager', 'Bucher', 'Berryman', 'Derr', 'Jester', 'Mickelson', 'Sayers', 'Whiteman', 'Riordan', 'Mcinnis', 'Jose', 'Goolsby', 'Stidham', 'Donley', 'Johnsen', 'Stallworth', 'Franke', 'Silvers', 'Reitz', 'Nathan', 'Brogan', 'Cardoso', 'Linville', 'Baptiste', 'Gorski', 'Rey', 'Hazen', 'Damon', 'Shores', 'Boling', 'Jablonski', 'Lemieux', 'Hecht', 'Dong', 'Langlois', 'Burrow', 'Hernandes', 'Mcdevitt', 'Pichardo', 'Lew', 'Stillwell', 'Savoy', 'Teixeira', 'Matheson', 'Hildreth', 'Warfield', 'Hogg', 'Tiller', 'Unruh', 'Rudy', 'Bristol', 'Matias', 'Buxton', 'Ambriz', 'Chiang', 'Pomeroy', 'Pogue', 'Hammock', 'Bethel', 'Miguel', 'Cassell', 'Towns', 'Bunker', 'Mcmichael', 'Kress', 'Newland', 'Whitehurst', 'Fazio', 'Batten', 'Calvillo', 'Wallen', 'Lung', 'Turney', 'Sparrow', 'Steadman', 'Battles', 'Berlin', 'Lindgren', 'Mckeon', 'Luckett', 'Spradlin', 'Sherry', 'Timmerman', 'Utley', 'Beale', 'Driggers', 'Hintz', 'Pellegrino', 'Hazel', 'Grim', 'Desmond', 'Spellman', 'Boren', 'Staten', 'Schlegel', 'Maya', 'Johnstone', 'Harwell', 'Pinson', 'Barreto', 'Spooner', 'Candelaria', 'Hammett', 'Sessions', 'Mckeown', 'Mccool', 'Gilson', 'Knudson', 'Irish', 'Spruill', 'Kling', 'Gerlach', 'Carnahan', 'Markley', 'Laporte', 'Flanigan', 'Spires', 'Cushman', 'Plante', 'Schlosser', 'Sachs', 'Jamieson', 'Hornsby', 'Armstead', 'Kremer', 'Madera', 'Thornburg', 'Briley', 'Garris', 'Jorgenson', 'Moorman', 'Vuong', 'Ard', 'Irons', 'Fiedler', 'Jackman', 'Kuehn', 'Jenks', 'Bristow', 'Mosby', 'Aldana', 'Maclean', 'Freund', 'Creighton', 'Smothers', 'Melson', 'Lundgren', 'Donato', 'Usher', 'Thornhill', 'Lowman', 'Mariano', 'Button', 'Mcbee', 'Cupp', 'Wickham', 'Destefano', 'Nutt', 'Rambo', 'Voigt', 'Talbott', 'Saxon', 'Cedillo', 'Mattison', 'Speed', 'Reiss', 'Nan', 'Westphal', 'Whittle', 'Bernhardt', 'Boatwright', 'Bussey', 'Rojo', 'Eden', 'Crites', 'Place', 'He', 'Chaves', 'Larose', 'Thames', 'Hoch', 'Knotts', 'Simone', 'Binkley', 'Koester', 'Pettis', 'Moye', 'Napolitano', 'Heffner', 'Sasser', 'Jessup', 'Aguiar', 'Ogrady', 'Pippin', 'Worth', 'Shively', 'Whitmire', 'Rutter', 'Cedeno', 'Welborn', 'Mcdougal', 'Angell', 'Sacco', 'Hailey', 'Neel', 'Paniagua', 'Pointer', 'Rohde', 'Holloman', 'Strother', 'Guffey', 'Fenner', 'Huntington', 'Shane', 'Yuen', 'Gosnell', 'Martini', 'Loving', 'Molloy', 'Olmos', 'Christ', 'Oaks', 'Ostrowski', 'Badillo', 'To', 'Laplante', 'Martindale', 'Richie', 'Pleasant', 'Palomino', 'Rodarte', 'Stamps', 'Peeples', 'Ries', 'Brownell', 'Walz', 'Arana', 'Tenney', 'Roddy', 'Lindner', 'Bolt', 'Rigsby', 'Matteson', 'Fielder', 'Randazzo', 'Deanda', 'Drayton', 'Ridge', 'Tarr', 'Shade', 'Upshaw', 'Woodcock', 'Miley', 'Hargrave', 'Langer', 'Yun', 'Wilkie', 'Choe', 'Ching', 'Dugas', 'Saul', 'Corder', 'Bobbitt', 'Spurgeon', 'Gladden', 'Woodbury', 'Tibbs', 'Mcgarry', 'Mcdaniels', 'Weigel', 'Bickel', 'Michels', 'Hughey', 'Apple', 'Bosley', 'Nesmith', 'Farber', 'Ackley', 'Goodin', 'Almond', 'Garrity', 'Bettencourt', 'Koss', 'Falcone', 'Lavigne', 'Rainwater', 'Nation', 'Blodgett', 'Dabney', 'Mabe', 'Trowbridge', 'Lundquist', 'Rosenberger', 'Dombrowski', 'Ferro', 'Evangelista', 'Bowlin', 'Mckelvey', 'Roderick', 'Michalski', 'Berkowitz', 'Sato', 'Mayorga', 'Corwin', 'Mckenney', 'Salyer', 'Walling', 'Abell', 'Palacio', 'Lash', 'Collado', 'Gass', 'Luis', 'Cooksey', 'Moll', 'Miramontes', 'Luster', 'Shrader', 'Toliver', 'Hard', 'Tu', 'Sena', 'Mckoy', 'Wainwright', 'Barela', 'Keiser', 'Hoag', 'Backus', 'Huskey', 'Brannan', 'Brumley', 'Palm', 'Boynton', 'Krauss', 'Steel', 'Jurado', 'Mulder', 'Paterson', 'Woolsey', 'Smithson', 'Joslin', 'Richman', 'Partida', 'Grisham', 'Wooden', 'Gooding', 'Fang', 'Mcdade', 'Spriggs', 'Fishman', 'Gabel', 'Rutkowski', 'Pride', 'Beals', 'Gaskin', 'Friday', 'Underhill', 'Rodas', 'Melo', 'Sipes', 'Zimmermann', 'Mosqueda', 'Haight', 'Beeson', 'Judy', 'Bankston', 'Pieper', 'Siebert', 'Horning', 'Butt', 'Bice', 'Sills', 'Philips', 'Eisenberg', 'Schumann', 'Conger', 'Bare', 'Hume', 'Nolasco', 'Trainor', 'Weatherly', 'Huebner', 'Bosch', 'Gayle', 'Kuhns', 'Byron', 'Glaze', 'Poulin', 'Enright', 'Large', 'Comeaux', 'Rountree', 'Tavarez', 'Beardsley', 'Rubino', 'Fee', 'Grider', 'Bechtel', 'Gaona', 'Wallin', 'Mashburn', 'Dalrymple', 'Gingerich', 'Vaccaro', 'Hass', 'Manzano', 'Tyner', 'Loza', 'Lowell', 'Kaufmann', 'Bischoff', 'Doolittle', 'Shivers', 'Valente', 'Bozeman', 'Howes', 'Felts', 'Feller', 'Justus', 'Schnell', 'Boettcher', 'Ivory', 'Thorson', 'Corn', 'Snook', 'Heilman', 'Baxley', 'Hasty', 'Wasserman', 'Barringer', 'Frankel', 'Peltier', 'Guarino', 'Avina', 'Sturdivant', 'Lien', 'Montemayor', 'Giddens', 'Valverde', 'Burchfield', 'Pang', 'Holbert', 'Rooks', 'Erdman', 'Mcmaster', 'Iniguez', 'Hartwell', 'Menchaca', 'Bordelon', 'Farkas', 'Chrisman', 'Metzler', 'Fredrick', 'Porterfield', 'Slayton', 'Quesada', 'Hembree', 'Peel', 'Woodley', 'Mather', 'Waltz', 'Totten', 'Forney', 'Woolley', 'Trombley', 'Yarborough', 'Javier', 'Durr', 'Macklin', 'Macon', 'Novotny', 'Amundson', 'Kidder', 'Flagg', 'Oxendine', 'Arguello', 'Marler', 'Penrod', 'Mallett', 'Council', 'Kinard', 'Bremer', 'Towne', 'Harless', 'Merkel', 'Giese', 'Fife', 'Byars', 'Grande', 'Kuo', 'Levi', 'Darr', 'Sanabria', 'Pounds', 'Roeder', 'Keim', 'Brush', 'Dreyer', 'Taveras', 'Furlong', 'Dorris', 'Prior', 'Musgrove', 'Weiler', 'Munro', 'Leake', 'Vollmer', 'Musick', 'Hetrick', 'Perdomo', 'Kester', 'Lock', 'Pine', 'Baskin', 'Bonham', 'Heffernan', 'Mandel', 'Sarver', 'Hamer', 'Duckett', 'Lozada', 'Stocker', 'Fulcher', 'Damato', 'Camargo', 'Shephard', 'Loftis', 'Winfrey', 'Rueda', 'Ledezma', 'Gottlieb', 'Lamont', 'Mackie', 'Bowe', 'Stockwell', 'Groth', 'Chavira', 'Lohr', 'Loftin', 'Gilmer', 'Cushing', 'Brody', 'Nowlin', 'Holiday', 'Shirk', 'Archie', 'Howerton', 'Matthew', 'Copley', 'Marchese', 'Echeverria', 'Soper', 'Cantwell', 'Nelms', 'Tuggle', 'Dumont', 'Bard', 'Gower', 'Mathes', 'Yeung', 'Buell', 'Bastian', 'Burd', 'Broadway', 'Peng', 'Greenwell', 'Vanover', 'Correll', 'Tindall', 'Bill', 'Mulcahy', 'Dionne', 'Rathbun', 'Baeza', 'Booher', 'Fried', 'Mcginley', 'Lavin', 'Atherton', 'Donnell', 'Bays', 'Riedel', 'Grenier', 'Zachary', 'Harold', 'Styles', 'Wisdom', 'Raley', 'Tamez', 'Arena', 'Morelli', 'Hazelwood', 'Somerville', 'Lapp', 'Rood', 'Salem', 'Pape', 'Olivera', 'Albritton', 'Carvajal', 'Zayas', 'Myer', 'Pohl', 'Haynie', 'Mariscal', 'Wampler', 'Rife', 'Leeper', 'Newhouse', 'Rodney', 'Vandenberg', 'Spitzer', 'Kingston', 'Wessel', 'Hartzell', 'Durden', 'Marques', 'Born', 'Scribner', 'Rocco', 'Germain', 'Tinoco', 'Valdovinos', 'Musselman', 'Vicente', 'Parsley', 'Crittenden', 'Tibbetts', 'Hulse', 'Mccleary', 'Barboza', 'Velarde', 'Brodie', 'Beaudoin', 'Moreira', 'Maggard', 'Jara', 'Ferrante', 'Overby', 'Friesen', 'Viola', 'Nelsen', 'Hash', 'Doane', 'Deese', 'Messick', 'Bay', 'Anton', 'Ingersoll', 'Saucier', 'Kwiatkowski', 'Rawson', 'Brophy', 'Ladner', 'Lehr', 'Weil', 'Yocum', 'Brasher', 'Denison', 'Hutcherson', 'Stowers', 'Geller', 'Fortenberry', 'Stebbins', 'Conyers', 'Toole', 'Stoker', 'Roden', 'Chitwood', 'Beeman', 'Fannin', 'Strait', 'Marlowe', 'Greenwald', 'Hann', 'Stumpf', 'Samaniego', 'Colton', 'Bogart', 'Morel', 'Montelongo', 'Boylan', 'Guido', 'Wyrick', 'Horsley', 'Tenorio', 'Sallee', 'Morehouse', 'Whyte', 'Neilson', 'Watanabe', 'Magallanes', 'Mudd', 'Kieffer', 'Brigham', 'Dollar', 'Huss', 'Albanese', 'Spiegel', 'Hixson', 'Rounds', 'Orth', 'Blanchette', 'Vanderpool', 'Pfaff', 'Speck', 'Shreve', 'Sevilla', 'Neri', 'Rohr', 'Ruble', 'Vanpelt', 'Rickman', 'Caraway', 'Berndt', 'Mchale', 'Ingalls', 'Roybal', 'Money', 'Mcdougall', 'Melancon', 'Wellington', 'Ingraham', 'Ritz', 'Lashley', 'Marchand', 'Schatz', 'Heiser', 'Eby', 'Wimmer', 'Orton', 'Atchley', 'Mumford', 'Bahena', 'Gammon', 'Buehler', 'Fike', 'Plank', 'Carrigan', 'Kempf', 'Cundiff', 'So', 'Sauls', 'Mohler', 'Grillo', 'Prichard', 'Pastor', 'Prasad', 'Babin', 'Bontrager', 'Weddle', 'Alberts', 'Theis', 'Lemoine', 'Hartnett', 'Kingsbury', 'Baran', 'Birmingham', 'Gault', 'Thorp', 'Wyant', 'Obryan', 'Santacruz', 'Camara', 'Whitehouse', 'Evenson', 'Halvorson', 'Palmieri', 'Hannan', 'Dew', 'Au', 'Nolte', 'Click', 'Wooley', 'Hung', 'Eberhardt', 'Rawlins', 'Sadowski', 'Sarabia', 'Soule', 'Millar', 'Engstrom', 'Cowles', 'Runyan', 'Mitchel', 'Torrence', 'Silverstein', 'Hewett', 'Pilgrim', 'Yeh', 'Rosenfeld', 'Mulholland', 'Hatley', 'Fawcett', 'Delrosario', 'Chinn', 'Bayless', 'Dee', 'Deane', 'Arriola', 'Duda', 'Koster', 'Rath', 'Karl', 'Weiland', 'Lemmon', 'Blaine', 'Scofield', 'Marston', 'Gist', 'Pinckney', 'Moritz', 'Mclellan', 'Fulkerson', 'Gaynor', 'Pitre', 'Warrick', 'Cobbs', 'Meacham', 'Guerin', 'Tedesco', 'Passmore', 'Northcutt', 'Ison', 'Cowell', 'Ream', 'Walther', 'Meraz', 'Tribble', 'Bumgarner', 'Gabbard', 'Dawes', 'Moncada', 'Chilton', 'Deweese', 'Rigby', 'Marte', 'Baylor', 'Valentino', 'Shine', 'August', 'Billups', 'Jarman', 'Jacks', 'Coffee', 'Friedrich', 'Marley', 'Hasan', 'Pennell', 'Abercrombie', 'Bazan', 'Strickler', 'Bruton', 'Lamm', 'Pender', 'Wingfield', 'Hoffer', 'Zahn', 'Chaplin', 'Reinke', 'Larosa', 'Maupin', 'Bunnell', 'Hassell', 'Guo', 'Galan', 'Paschal', 'Browder', 'Krantz', 'Milne', 'Pelayo', 'Emanuel', 'Mccluskey', 'Edens', 'Radtke', 'Alger', 'Duhon', 'Probst', 'Witmer', 'Hoagland', 'Saechao', 'Pitcher', 'Villalpando', 'Carswell', 'Roundtree', 'Kuhlman', 'Tait', 'Shaughnessy', 'Wei', 'Cravens', 'Sipe', 'Islas', 'Hollenbeck', 'Lockard', 'Perrone', 'Tapp', 'Santoyo', 'Jaffe', 'Klotz', 'Gilpin', 'Ehrlich', 'Klug', 'Stowell', 'Ibanez', 'Lazar', 'Osman', 'Larkins', 'Donofrio', 'Ericson', 'Schenck', 'Mouton', 'Medlock', 'Hubbell', 'Bixler', 'Nowicki', 'Muro', 'Homer', 'Grijalva', 'Ashmore', 'Harbison', 'Duffey', 'Osgood', 'Hardee', 'Jain', 'Wilber', 'Bolling', 'Lett', 'Phillip', 'Dipietro', 'Lefebvre', 'Batiste', 'Mcswain', 'Distefano', 'Hack', 'Strobel', 'Kipp', 'Doerr', 'Radcliffe', 'Cartagena', 'Paradis', 'Stilwell', 'Mccrea', 'Searles', 'Frausto', 'Hendershot', 'Gosselin', 'Islam', 'Freese', 'Stockman', 'Burwell', 'Vandiver', 'Engler', 'Geisler', 'Barham', 'Wiegand', 'Goncalves', 'Theriot', 'Doucet', 'Bridge', 'Catron', 'Blanks', 'Rahn', 'Schaub', 'Hershey', 'Strader', 'Buckman', 'Hartwig', 'Campo', 'Tsang', 'Luck', 'Bernardo', 'Marker', 'Pinkney', 'Benefield', 'Mcginty', 'Bode', 'Linden', 'Manriquez', 'Jaquez', 'Bedard', 'Flack', 'Hesse', 'Costanzo', 'Boardman', 'Carper', 'Word', 'Miracle', 'Edmunds', 'Bott', 'Flemming', 'Manns', 'Kesler', 'Piatt', 'Tankersley', 'Eberle', 'Roney', 'Belk', 'Vansickle', 'Varga', 'Hillard', 'Neubauer', 'Quirk', 'Chevalier', 'Mintz', 'Kocher', 'Casarez', 'Tinker', 'Elmer', 'Decarlo', 'Cordes', 'Berube', 'Kimbrell', 'Schick', 'Papa', 'Alderson', 'Callaghan', 'Renaud', 'Pardue', 'Krohn', 'Bloomfield', 'Coward', 'Ligon', 'Trask', 'Wingo', 'Book', 'Crutcher', 'Canter', 'Teran', 'Denman', 'Stackhouse', 'Chambliss', 'Gourley', 'Earls', 'Frizzell', 'Bergen', 'Abdullah', 'Sprinkle', 'Fancher', 'Urias', 'Lavelle', 'Baumgardner', 'Kahler', 'Baldridge', 'Alejandro', 'Plascencia', 'Hix', 'Rule', 'Mix', 'Petro', 'Hadden', 'Fore', 'Humes', 'Barnum', 'Laing', 'Maggio', 'Sylvia', 'Malinowski', 'Fell', 'Durst', 'Plant', 'Vaca', 'Abarca', 'Shirey', 'Parton', 'Ta', 'Ramires', 'Ochs', 'Gaitan', 'Ledoux', 'Darrow', 'Messenger', 'Chalmers', 'Schaller', 'Derby', 'Coakley', 'Saleh', 'Kirkman', 'Orta', 'Crabb', 'Spinks', 'Dinkins', 'Harrigan', 'Koller', 'Dorr', 'Carty', 'Sturgis', 'Shriver', 'Macedo', 'Feng', 'Bentz', 'Bedell', 'Osuna', 'Dibble', 'Dejong', 'Fender', 'Parada', 'Vanburen', 'Chaffee', 'Stott', 'Sigmon', 'Nicolas', 'Salyers', 'Magdaleno', 'Deering', 'Puentes', 'Funderburk', 'Jang', 'Christopherson', 'Sellars', 'Marcotte', 'Oster', 'Liao', 'Tudor', 'Specht', 'Chowdhury', 'Landa', 'Monge', 'Brake', 'Behnke', 'Llewellyn', 'Labelle', 'Mangan', 'Godsey', 'Truax', 'Lombard', 'Thurmond', 'Emerick', 'Blume', 'Mcginn', 'Beer', 'Marrs', 'Zinn', 'Rieger', 'Dilley', 'Thibault', 'Witkowski', 'Chi', 'Fielding', 'Tyrrell', 'Peeler', 'Northrup', 'Augustin', 'Toy', 'Geist', 'Schuman', 'Fairley', 'Duque', 'Villatoro', 'Dudek', 'Sonnier', 'Fritts', 'Worsham', 'Herold', 'Mcgehee', 'Caskey', 'Boatright', 'Lazaro', 'Deck', 'Palomo', 'Cory', 'Olivier', 'Baines', 'Fan', 'Futrell', 'Halpin', 'Garrido', 'Koonce', 'Fogg', 'Meneses', 'Mulkey', 'Restrepo', 'Ducharme', 'Slate', 'Toussaint', 'Sorrells', 'Fitts', 'Dickman', 'Alfred', 'Grimsley', 'Settles', 'Etienne', 'Eggert', 'Hague', 'Caldera', 'Hillis', 'Hollander', 'Haire', 'Theriault', 'Madigan', 'Kiernan', 'Parkhurst', 'Lippert', 'Jaynes', 'Moniz', 'Bost', 'Bettis', 'Sandy', 'Kuhl', 'Wilk', 'Borrego', 'Koon', 'Penney', 'Pizarro', 'Stitt', 'Koski', 'Galicia', 'Quiles', 'Real', 'Massa', 'Crone', 'Teeter', 'Voorhees', 'Hilbert', 'Nabors', 'Shupe', 'Blood', 'Mcauliffe', 'Waits', 'Blakley', 'Stoltz', 'Maes', 'Munroe', 'Rhoden', 'Abeyta', 'Milliken', 'Harkness', 'Almaraz', 'Remington', 'Raya', 'Frierson', 'Olszewski', 'Quillen', 'Westcott', 'Fu', 'Tolley', 'Olive', 'Mcclary', 'Corbitt', 'Lui', 'Lachance', 'Meagher', 'Cowley', 'Hudak', 'Cress', 'Mccrory', 'Talavera', 'Mclaren', 'Laurent', 'Bias', 'Whetstone', 'Hollister', 'Quevedo', 'Byerly', 'Berryhill', 'Folk', 'Conners', 'Kellum', 'Haro', 'Mallard', 'Mccants', 'Risner', 'Barros', 'Downes', 'Mayers', 'Loeffler', 'Mink', 'Hotchkiss', 'Bartz', 'Alt', 'Hindman', 'Bayne', 'Bagby', 'Colin', 'Treadwell', 'Hemingway', 'Bane', 'Heintz', 'Fite', 'Mccomb', 'Carmody', 'Kistler', 'Olinger', 'Vestal', 'Byrum', 'Seale', 'Turnage', 'Raber', 'Prendergast', 'Koons', 'Nickell', 'Benz', 'Mcculley', 'Lightner', 'Hamill', 'Castellon', 'Chesser', 'Moats', 'Buie', 'Svoboda', 'Wold', 'Macmillan', 'Boring', 'Terrill', 'Loveland', 'Gaskill', 'Verdugo', 'Yip', 'Oviedo', 'Hight', 'Carmack', 'Scheer', 'Dreher', 'Appleby', 'Lally', 'Kibler', 'Marra', 'Mcnamee', 'Cooks', 'Kavanaugh', 'Carrico', 'Alden', 'Dillman', 'Zamarripa', 'Serra', 'Gilligan', 'Nester', 'Sokol', 'Latta', 'Hanrahan', 'Ballou', 'Hollinger', 'Lux', 'Caton', 'Hamann', 'Sackett', 'Leiva', 'Emory', 'Barden', 'Houk', 'Lees', 'Deltoro', 'Lowrey', 'Mcevoy', 'Hibbs', 'Crossley', 'Rego', 'Melchor', 'Tull', 'Bramlett', 'Hsieh', 'Warwick', 'Sayles', 'Mapes', 'Pabon', 'Dearing', 'Stamm', 'Joshi', 'Quan', 'Larry', 'Nordstrom', 'Heisler', 'Bigham', 'Walston', 'Solberg', 'Bodnar', 'Posada', 'Mancilla', 'Ovalle', 'Harr', 'Mccaskill', 'Bromley', 'Koerner', 'Macpherson', 'Trudeau', 'Blais', 'Kiley', 'Lawlor', 'Suter', 'Rothman', 'Oberg', 'Seely', 'Maxfield', 'Truman', 'Salvatore', 'Fouts', 'Goulet', 'Munger', 'Sikora', 'Comeau', 'Oliphant', 'Baber', 'Hensel', 'Edelman', 'Farina', 'Albano', 'Aycock', 'Sung', 'Deckard', 'Steinke', 'Silveira', 'Servin', 'Rex', 'Franzen', 'Hecker', 'Gragg', 'Mcgriff', 'Ellingson', 'Kerrigan', 'An', 'Bartel', 'Priddy', 'Hodson', 'Tse', 'Arbogast', 'Arceneaux', 'Leatherman', 'Federico', 'Pridgen', 'Yim', 'Kowalczyk', 'Deberry', 'Lejeune', 'Elston', 'Mielke', 'Shelly', 'Stambaugh', 'Eagan', 'Rivard', 'Silvia', 'Lawhorn', 'Denis', 'Hendry', 'Wieland', 'Levinson', 'Marlin', 'Gerdes', 'Pfister', 'Carder', 'Pipkin', 'Angle', 'Hang', 'Hagerty', 'Rhinehart', 'Gao', 'Petit', 'Mccraw', 'Markle', 'Lupo', 'Busse', 'Marble', 'Bivins', 'Storms', 'Yuan', 'Waldman', 'Suh', 'Wyckoff', 'Stillman', 'Piotrowski', 'Abrego', 'Gregoire', 'Bogle', 'Wortham', 'Phung', 'Brister', 'Karnes', 'Deming', 'Ley', 'Carrasquillo', 'Curtiss', 'Appleton', 'Salley', 'Borja', 'Begum', 'Phifer', 'Shoup', 'Cawley', 'Deason', 'Castanon', 'Loucks', 'Hagler', 'Mcclinton', 'Dulaney', 'Hargett', 'Mcardle', 'Burcham', 'Philpot', 'Laroche', 'Breland', 'Hatten', 'Karp', 'Brummett', 'Boatman', 'Natale', 'Pepe', 'Mortimer', 'Sink', 'Voyles', 'Reeve', 'Honaker', 'Loredo', 'Ridgway', 'Donner', 'Lessard', 'Dever', 'Salomon', 'Hickson', 'Nicholls', 'Bushey', 'Osteen', 'Reavis', 'Rodman', 'Barahona', 'Knecht', 'Hinman', 'Faria', 'Dana', 'Bancroft', 'Hatchett', 'Hageman', 'Klaus', 'Castor', 'Lampkin', 'Dalessandro', 'Riffle', 'Korn', 'Savoie', 'Sandifer', 'Mciver', 'Magill', 'Delafuente', 'Widener', 'Vermillion', 'Dandrea', 'Mader', 'Woodman', 'Milan', 'Hollowell', 'Schaaf', 'Kao', 'Nail', 'Beaman', 'Hawkes', 'Mclane', 'Marchant', 'Scanlan', 'Syed', 'Peabody', 'Uhl', 'Schauer', 'Azevedo', 'Wolcott', 'Mick', 'Melgar', 'Pilcher', 'Burgin', 'Weiser', 'Daughtry', 'Theisen', 'Babbitt', 'Petry', 'Cotten', 'Fick', 'Eubank', 'Tolson', 'Judkins', 'Cronk', 'Wendel', 'Monteiro', 'Kissinger', 'Banta', 'Senn', 'Fix', 'Brehm', 'Rittenhouse', 'Banner', 'Elwell', 'Herd', 'Araiza', 'Hui', 'Nowell', 'Brett', 'Hua', 'Breeding', 'Pawlowski', 'Thompkins', 'Bocanegra', 'Bosworth', 'Dutcher', 'Cotto', 'Beecher', 'Callender', 'Hamlett', 'Benfield', 'Claudio', 'Reel', 'Brookshire', 'Helmick', 'Ryals', 'Winder', 'Thom', 'Robin', 'Overman', 'Furtado', 'Dacosta', 'Paddock', 'Dancy', 'Carpio', 'Manzanares', 'Zito', 'Favela', 'Beckley', 'Adrian', 'Flory', 'Nestor', 'Spell', 'Speight', 'Strawn', 'Beckner', 'Gause', 'Berglund', 'Ruppert', 'Mincey', 'Spinelli', 'Suzuki', 'Mizell', 'Kirksey', 'Bolduc', 'Kilmer', 'Wesson', 'Brinker', 'Urrutia', 'Markey', 'Brenneman', 'Haupt', 'Sievers', 'Puga', 'Halloran', 'Birdsong', 'Stancil', 'Wiener', 'Calvo', 'Macy', 'Cairns', 'Kahl', 'Vice', 'Ordaz', 'Grow', 'Lafrance', 'Dryden', 'Studer', 'Matney', 'Edward', 'Rackley', 'Gurrola', 'Demoss', 'Woolard', 'Oquinn', 'Hambrick', 'Christmas', 'Robey', 'Crayton', 'Haber', 'Arango', 'Newcomer', 'Groom', 'Corson', 'Harness', 'Rossman', 'Slaton', 'Schutz', 'Conant', 'Tedder', 'Sabin', 'Lowder', 'Womble', 'Jin', 'Monday', 'Garmon', 'Aronson', 'Skeen', 'Headrick', 'Lefevre', 'Whittemore', 'Pelton', 'Barner', 'Hildebrandt', 'Rick', 'Helmer', 'Grose', 'Zak', 'Schroder', 'Mahler', 'Keeley', 'Flinn', 'Jordon', 'Ozuna', 'Sand', 'Henkel', 'Turcotte', 'Vining', 'Bellinger', 'Neese', 'Hagerman', 'Mcmillin', 'Gaylord', 'Harney', 'Milano', 'Carothers', 'Depew', 'Bucci', 'Pirtle', 'Hafner', 'Dimas', 'Howlett', 'Reber', 'Abram', 'Davalos', 'Zajac', 'Pedro', 'Goodall', 'Kaylor', 'Wrenn', 'Gartner', 'Kell', 'Curl', 'Leathers', 'Spiller', 'Beason', 'Shattuck', 'Brewington', 'Pinon', 'Nazario', 'Wash', 'Ruggles', 'Matz', 'Capers', 'Dorsett', 'Wilmoth', 'Bracey', 'Lenhart', 'Devoe', 'Choy', 'Oswalt', 'Capone', 'Wayman', 'Parikh', 'Eastwood', 'Cofield', 'Rickert', 'Mccandless', 'Greenway', 'Majewski', 'Rigdon', 'Armbruster', 'Royce', 'Sterner', 'Swaim', 'Flournoy', 'Amezcua', 'Delano', 'Westerman', 'Grau', 'Claxton', 'Veliz', 'Haun', 'Roscoe', 'Mccafferty', 'Ringer', 'Volz', 'Blessing', 'Mcphail', 'Thelen', 'Gagliardi', 'Scholz', 'Genovese', 'Boyette', 'Squire', 'Naughton', 'Levitt', 'Erskine', 'Leffler', 'Manchester', 'Hallett', 'Whitmer', 'Gillett', 'Groce', 'Roos', 'Bejarano', 'Moskowitz', 'Constantine', 'Fidler', 'Roll', 'Schutte', 'Ohare', 'Warnock', 'Wester', 'Macgregor', 'Golding', 'Abner', 'Burgett', 'Bushnell', 'Brazil', 'Ascencio', 'Hock', 'Legrand', 'Eversole', 'Rome', 'Radcliff', 'Fuhrman', 'Schmit', 'Tew', 'Caro', 'Cowen', 'Marriott', 'Kephart', 'Hartung', 'Keil', 'Benally', 'Hazlett', 'Avant', 'Desrosiers', 'Kwong', 'Guyer', 'Penner', 'Avelar', 'Cashman', 'Stith', 'Orona', 'Rager', 'Johanson', 'Lanza', 'Min', 'Cool', 'Heine', 'Nissen', 'Buenrostro', 'Mcmullin', 'Oropeza', 'Hom', 'Degroot', 'Wescott', 'Hulbert', 'Shrum', 'Muncy', 'Littrell', 'Forest', 'Dyke', 'Garces', 'Cimino', 'Gebhardt', 'Hickerson', 'Satterwhite', 'Radke', 'Luckey', 'Coronel', 'Pugliese', 'Frazee', 'Siddiqui', 'Flatt', 'Abbey', 'Gerald', 'Bodine', 'Lora', 'Youngs', 'Catlett', 'Alexis', 'Luo', 'Youmans', 'Sherlock', 'Kinser', 'Wales', 'Dinsmore', 'Abramson', 'Stricker', 'Rumsey', 'Showers', 'Mickens', 'Tallent', 'Setzer', 'Etter', 'Allgood', 'Pagel', 'Jefferies', 'Bissell', 'Colombo', 'Musgrave', 'Kuehl', 'Raab', 'Kavanagh', 'Beane', 'Witcher', 'Pattison', 'Paulus', 'Gong', 'Mcgough', 'Burkhalter', 'Vanbuskirk', 'Kite', 'Sass', 'Lalonde', 'Gormley', 'Baier', 'Brauer', 'Stricklin', 'Napoli', 'Brotherton', 'Stansbury', 'Loggins', 'Sorrentino', 'Poff', 'Nieman', 'Roebuck', 'Reiner', 'Hovey', 'Walley', 'Leech', 'Gambino', 'Hammack', 'Burson', 'Tatro', 'Perrine', 'Carley', 'Stadler', 'Nason', 'Peckham', 'Gervais', 'Ables', 'Turman', 'Dore', 'Peavy', 'Addington', 'Tobar', 'Gilstrap', 'Brumbaugh', 'Gerhardt', 'Slusher', 'Nevins', 'Garofalo', 'Amick', 'Barrick', 'Race', 'Daggett', 'Manion', 'Noah', 'Kranz', 'Runge', 'Wysocki', 'Gillum', 'Verduzco', 'Alvey', 'Pettus', 'Sim', 'Cage', 'Mckean', 'Harrod', 'Weatherspoon', 'Takahashi', 'Wingard', 'Endres', 'Skiles', 'Wald', 'Finger', 'Reams', 'Ussery', 'Fricke', 'Jaworski', 'Cusick', 'Stanek', 'Shaner', 'Massaro', 'Ribeiro', 'Eades', 'Rue', 'Scharf', 'Standridge', 'Wojciechowski', 'Victoria', 'Galbreath', 'Lander', 'Martinelli', 'Raper', 'Karas', 'Tomas', 'La', 'Kizer', 'Gastelum', 'Delp', 'Sansone', 'Therrien', 'Brookins', 'Shi', 'Hammel', 'Polley', 'Riddell', 'Claiborne', 'Lampe', 'Benham', 'Braddock', 'Elwood', 'Mcminn', 'Amerson', 'Leija', 'Gambrell', 'Nuno', 'Mallon', 'Gard', 'Burford', 'Halley', 'Maley', 'Eicher', 'Caban', 'Rubenstein', 'Tighe', 'Harbaugh', 'Bergmann', 'Runnels', 'Carrizales', 'Gustin', 'Wight', 'Dominick', 'Cannady', 'Brace', 'Beauregard', 'Weitzel', 'Orcutt', 'Abrahamson', 'Jorge', 'Mccown', 'Harriman', 'Nicol', 'Gott', 'Andino', 'Tsosie', 'Shumway', 'Aucoin', 'Bowes', 'Hixon', 'Broom', 'Cate', 'Desantiago', 'Haug', 'Pinedo', 'Mowry', 'Moyers', 'Deangelo', 'Mcshane', 'Boley', 'Tiffany', 'Steger', 'Woodford', 'Whitford', 'Collette', 'Muth', 'Mansour', 'Schuh', 'Fortney', 'Khoury', 'Livengood', 'Haworth', 'Rusk', 'Mathieu', 'Peppers', 'Gehring', 'Faris', 'Diep', 'Rae', 'Hupp', 'Escalera', 'Gwin', 'Engelhardt', 'Bannon', 'Menjivar', 'Eberhart', 'Kershaw', 'Cottle', 'Palomares', 'Carrell', 'Galaviz', 'Willie', 'Troxell', 'Visser', 'Xie', 'Juan', 'Spector', 'Izzo', 'Woodring', 'Gilbreath', 'Bey', 'Giraldo', 'Neary', 'Ready', 'Toland', 'Benge', 'Thrower', 'Bemis', 'Hostetter', 'Dull', 'Poulos', 'Vanegas', 'Abad', 'Harker', 'Mei', 'Nigro', 'Messner', 'Peres', 'Hardaway', 'Crumpton', 'Dingman', 'Hipp', 'Lemley', 'Maloy', 'Ye', 'Neighbors', 'Proulx', 'Jamerson', 'Finkelstein', 'Payan', 'Holler', 'Simonds', 'Toms', 'Schulman', 'Aguero', 'Hinrichs', 'Steffens', 'Clapper', 'Delao', 'Knighton', 'Jahn', 'Mach', 'Heal', 'Detwiler', 'Corso', 'Toner', 'Rook', 'Brockway', 'Coulson', 'Delia', 'Giddings', 'Hermosillo', 'Ballenger', 'Persinger', 'Delk', 'Pedigo', 'Burg', 'Voelker', 'Ecker', 'Kile', 'Propst', 'Rascon', 'Stultz', 'Swindle', 'Swindell', 'Deaver', 'Welty', 'Sussman', 'Southworth', 'Child', 'Coston', 'Lei', 'Spillman', 'Hochstetler', 'Veach', 'Melcher', 'Chipman', 'Lebeau', 'Summerville', 'Peden', 'Lizarraga', 'Kingery', 'Leos', 'Fogel', 'Eckman', 'Burbank', 'Castano', 'Chartier', 'Medellin', 'Torrey', 'Peake', 'Swinney', 'Aziz', 'Reinert', 'Borg', 'Pires', 'Brooke', 'Forester', 'Greaves', 'Delapaz', 'Hunnicutt', 'Bierman', 'Stringfellow', 'Lavallee', 'Farnham', 'Gadson', 'Gainer', 'Kulp', 'Liston', 'Brooker', 'Loudermilk', 'Reza', 'Henshaw', 'Hinz', 'Brammer', 'Frisch', 'Toombs', 'Esquibel', 'Feinberg', 'Plaza', 'Bly', 'Encarnacion', 'Cockerham', 'Shealy', 'Haile', 'Nave', 'Chenoweth', 'Goto', 'Ernest', 'Staub', 'Marty', 'Huizar', 'Lammers', 'Mcavoy', 'Dishman', 'Giroux', 'Dowdell', 'Via', 'Fenn', 'Kain', 'Breckenridge', 'Egbert', 'Steelman', 'Gasper', 'Riojas', 'Parmer', 'Creed', 'Gillispie', 'Edgerton', 'Yen', 'Calder', 'Holmberg', 'Kreider', 'Landau', 'Eley', 'Lewallen', 'Quimby', 'Holladay', 'Du', 'Leland', 'Hyder', 'Omeara', 'Acton', 'Gaspard', 'Kennard', 'Renfroe', 'Hayman', 'Gladney', 'Glidden', 'Wilmot', 'Pearsall', 'Cahoon', 'Hallock', 'Grigg', 'Boggess', 'Lewin', 'Doering', 'Thach', 'Mcatee', 'Paulk', 'Rusch', 'Harrold', 'Suttles', 'Chiles', 'Sawyers', 'Roger', 'Kwok', 'Luevano', 'Coelho', 'Waldo', 'Ewell', 'Lagunas', 'Rude', 'Barrington', 'Mccomas', 'Whiteley', 'Jeanbaptiste', 'Darcy', 'Lussier', 'Kerley', 'Fordham', 'Moorehead', 'Welton', 'Nicely', 'Constantino', 'Townes', 'Giglio', 'Damian', 'Mckibben', 'Resnick', 'Endicott', 'Lindeman', 'Killion', 'Gwinn', 'Beaumont', 'Nord', 'Miceli', 'Fast', 'Bidwell', 'Sites', 'Drum', 'Maze', 'Abshire', 'Berner', 'Rhyne', 'Juliano', 'Wortman', 'Beggs', 'Winchell', 'Summerlin', 'Thrash', 'Biggers', 'Buckles', 'Barnwell', 'Thomasson', 'Wan', 'Arneson', 'Rodrigue', 'Wroblewski', 'Quiroga', 'Fulk', 'Dillingham', 'Rone', 'Mapp', 'Sattler', 'Letourneau', 'Gaudet', 'Mccaslin', 'Gurule', 'Huck', 'Hudspeth', 'Welter', 'Wittman', 'Hileman', 'Ewald', 'Yao', 'Kindred', 'Kato', 'Nickels', 'Tyndall', 'Sanmiguel', 'Mayle', 'Alfano', 'Eichelberger', 'Bee', 'Sheehy', 'Rogan', 'Philip', 'Dilworth', 'Midkiff', 'Hudgens', 'Killingsworth', 'Russel', 'Criss', 'Liddell', 'Eberly', 'Khalil', 'Lattimore', 'Koval', 'Maxson', 'Schram', 'Goodell', 'Catlin', 'Cofer', 'Alva', 'Sandler', 'Kunkle', 'Perron', 'Bushman', 'Edmonson', 'Roa', 'Nesbit', 'Ahearn', 'Garver', 'Bible', 'Barley', 'Struble', 'Oxford', 'Wulf', 'Marron', 'Haught', 'Bonnell', 'Pigg', 'Friel', 'Almaguer', 'Bowler', 'Mitchem', 'Fussell', 'Lemos', 'Savino', 'Boisvert', 'Torgerson', 'Annis', 'Dicks', 'Ruhl', 'Pepin', 'Wildman', 'Gendron', 'Melanson', 'Sherer', 'Duty', 'Cassel', 'Croteau', 'Rolon', 'Staats', 'Pass', 'Larocca', 'Sauter', 'Sacks', 'Boutwell', 'Hunsaker', 'Omara', 'Mcbroom', 'Lohman', 'Treat', 'Dufour', 'Brashear', 'Yepez', 'Lao', 'Telles', 'Manis', 'Mars', 'Shilling', 'Tingle', 'Macaluso', 'Rigney', 'Clair', 'Matsumoto', 'Agosto', 'Halbert', 'Dabbs', 'Eckstein', 'Mercurio', 'Berkley', 'Wachter', 'Langan', 'Peach', 'Carreno', 'Lepore', 'Howie', 'Thaxton', 'Arrowood', 'Weinberger', 'Eldred', 'Hooten', 'Raymer', 'Feaster', 'Bosco', 'Cataldo', 'Fears', 'Eckhardt', 'Mullinax', 'Spratt', 'Laboy', 'Marsden', 'Carlile', 'Bustillos', 'Crim', 'Surratt', 'Kurth', 'Gaul', 'Machuca', 'Rolfe', 'Lower', 'Edmiston', 'Millsap', 'Dehaven', 'Racine', 'Coney', 'Rinker', 'Maddux', 'Burmeister', 'Fenwick', 'Stocks', 'Forde', 'Pettway', 'Balderrama', 'Westover', 'Bloch', 'Burress', 'Hunley', 'Futch', 'Chee', 'Alvarenga', 'Bostwick', 'Cleaver', 'Pelkey', 'Bryce', 'Pisano', 'Qureshi', 'Varghese', 'Cunha', 'Hellman', 'Grass', 'Luker', 'Hazelton', 'Cathcart', 'Yamada', 'Gallego', 'Menke', 'Yingling', 'Merriweather', 'Fleury', 'Salmeron', 'Metcalfe', 'Brook', 'Freitag', 'Malek', 'Obregon', 'Blain', 'Mellott', 'Alam', 'Bessette', 'Moncrief', 'Arvizu', 'Botts', 'Moorer', 'Landreth', 'Hulett', 'Marinelli', 'Falco', 'Silvestri', 'Gottschalk', 'Thiele', 'Kight', 'Warrington', 'Huckaby', 'Ledet', 'Charbonneau', 'Crozier', 'Mohan', 'Stroh', 'Bolinger', 'Delvecchio', 'Macfarlane', 'Cribbs', 'Mcloughlin', 'Maynor', 'Ming', 'Digiovanni', 'Truesdale', 'Pfeffer', 'Benn', 'Chaparro', 'Englert', 'Spano', 'Ogletree', 'Yancy', 'Swick', 'Hallmark', 'Mattern', 'Tryon', 'Plumb', 'Martineau', 'Man', 'Grube', 'Holst', 'Nez', 'Belden', 'Aikens', 'Litton', 'Moorhead', 'Dufresne', 'Bonney', 'Heyward', 'Halliday', 'Ito', 'Crossman', 'Gast', 'Levan', 'Wine', 'Desouza', 'Kornegay', 'Nam', 'Keough', 'Stotts', 'Dickenson', 'Ousley', 'Leduc', 'Revels', 'Dizon', 'Arreguin', 'Shockey', 'Alegria', 'Blades', 'Ignacio', 'Mellon', 'Ebersole', 'Sain', 'Weissman', 'Wargo', 'Claypool', 'Zorn', 'Julien', 'Hinshaw', 'Alberto', 'Garduno', 'Kellar', 'Rizo', 'Labonte', 'Humble', 'Downer', 'Lykins', 'Tower', 'Vanhouten', 'Chairez', 'Campa', 'Blizzard', 'Standley', 'Reiser', 'Whitener', 'Menefee', 'Nalley', 'Lasher', 'Strang', 'Smock', 'Moralez', 'Kiel', 'Moffatt', 'Behm', 'Hackworth', 'Dirks', 'Kratz', 'Guillot', 'Tittle', 'Stlouis', 'Seymore', 'Searle', 'Utter', 'Wilborn', 'Dortch', 'Duron', 'Cardinal', 'Spikes', 'Arambula', 'Cutter', 'Dibenedetto', 'Botelho', 'Bedwell', 'Kilby', 'Bottoms', 'Cassady', 'Rothwell', 'Bilodeau', 'Markowitz', 'Baucom', 'Valley', 'Esqueda', 'Depalma', 'Laskowski', 'Hopp', 'Casale', 'Perreault', 'Shuster', 'Wolter', 'Raby', 'Cyrus', 'Tseng', 'Georges', 'Das', 'Wilfong', 'Schlueter', 'Woolf', 'Stickney', 'Mcinerney', 'Curcio', 'Fowlkes', 'Boldt', 'Zander', 'Shropshire', 'Antonelli', 'Froehlich', 'Butterworth', 'Stedman', 'Broadnax', 'Kroeger', 'Kellner', 'Monreal', 'Armas', 'Mcguinness', 'Canterbury', 'Weisman', 'Hilburn', 'Carruthers', 'Pell', 'Peele', 'Devaney', 'Owings', 'Mar', 'Liggett', 'Breslin', 'Soucy', 'Aguila', 'Weidman', 'Mingo', 'Tarango', 'Winger', 'Poteet', 'Acree', 'Mcnew', 'Leatherwood', 'Aubrey', 'Waring', 'Soler', 'Roof', 'Sunderland', 'Blackford', 'Rabe', 'Hepler', 'Leonardo', 'Spina', 'Smythe', 'Alex', 'Barta', 'Bybee', 'Campagna', 'Pete', 'Batchelder', 'Gurney', 'Wyche', 'Schutt', 'Rashid', 'Almazan', 'Pahl', 'Perri', 'Viramontes', 'Cavender', 'Snapp', 'Newson', 'Sandhu', 'Fernando', 'Stockdale', 'Garfield', 'Ealy', 'Mcfarlin', 'Bieber', 'Callan', 'Arruda', 'Oquendo', 'Levasseur', 'Maple', 'Kowal', 'Kushner', 'Naquin', 'Shouse', 'Mcquade', 'Cai', 'Smedley', 'Gober', 'Saiz', 'Brunelle', 'Arbuckle', 'Landes', 'Mak', 'Korte', 'Oxley', 'Boger', 'Mickey', 'Lent', 'Cureton', 'Husted', 'Eidson', 'Boyett', 'Kitts', 'Shope', 'Hance', 'Jessen', 'Litchfield', 'Torre', 'Cargill', 'Herren', 'Straight', 'Merz', 'Weese', 'Sperling', 'Lapierre', 'Yung', 'Doggett', 'Cauley', 'Hardeman', 'Margolis', 'Watford', 'Seltzer', 'Fullmer', 'Timberlake', 'Butz', 'Duquette', 'Olin', 'Leverett', 'Hartford', 'Otte', 'Beaton', 'Grimaldi', 'Marotta', 'Carlsen', 'Cullum', 'Monte', 'Haygood', 'Middlebrooks', 'Lazarus', 'Shiver', 'Ivie', 'Niemi', 'Lacombe', 'Judson', 'Ginsberg', 'Firestone', 'Izquierdo', 'Deel', 'Jacinto', 'Towers', 'Fritsch', 'Albin', 'Kaminsky', 'Yin', 'Wrobel', 'Birdwell', 'Krieg', 'Danforth', 'Florio', 'Saito', 'Clift', 'Duck', 'Matt', 'Moxley', 'Barbieri', 'Klatt', 'Saltzman', 'Chesney', 'Bojorquez', 'Cosentino', 'Lodge', 'Converse', 'Decastro', 'Gerhart', 'Music', 'Danley', 'Santangelo', 'Bevins', 'Coen', 'Seibel', 'Lindemann', 'Dressler', 'Newport', 'Bedolla', 'Lillie', 'Rhone', 'Penaloza', 'Swart', 'Niemeyer', 'Pilkington', 'Matta', 'Hollifield', 'Gillman', 'Montana', 'Maroney', 'Stenger', 'Loos', 'Wert', 'Brogdon', 'Gandhi', 'Bent', 'Tabb', 'Sikorski', 'Hagedorn', 'Hannigan', 'Hoss', 'Conlin', 'Trott', 'Fall', 'Granado', 'Bartell', 'Rubalcava', 'Neves', 'Poynter', 'Alton', 'Paschall', 'Waltman', 'Parke', 'Kittle', 'Czarnecki', 'Bloodworth', 'Knorr', 'Timms', 'Derry', 'Messier', 'Saad', 'Cozart', 'Sutphin', 'Puryear', 'Gatto', 'Whitacre', 'Verdin', 'Bloomer', 'Brundage', 'Brian', 'Seger', 'Clare', 'Balch', 'Tharpe', 'Rayford', 'Halter', 'Barefoot', 'Gonsalez', 'Lomas', 'Monzon', 'Howarth', 'Mccready', 'Gudino', 'Serafin', 'Sanfilippo', 'Minnich', 'Eldredge', 'Malave', 'Greeley', 'Sisneros', 'Kangas', 'Peery', 'Lunn', 'Lukas', 'Bunce', 'Riccio', 'Thies', 'Stivers', 'Conard', 'Mullaney', 'Catalan', 'Omar', 'Theobald', 'Jeffcoat', 'Kucera', 'Borkowski', 'Coomer', 'Mathison', 'Croom', 'Rushton', 'Stites', 'Pendley', 'Till', 'Oconner', 'Forsberg', 'Wages', 'Fillmore', 'Barcenas', 'Gillard', 'Leak', 'Towle', 'Esser', 'Dunlop', 'Quackenbush', 'Archambault', 'Buller', 'Newlin', 'Urquhart', 'Shanley', 'Mote', 'Ippolito', 'Rozier', 'Reidy', 'Gregor', 'Swaney', 'Bradfield', 'Fudge', 'More', 'Tester', 'Higley', 'Dambrosio', 'Bullington', 'Highsmith', 'Silas', 'Felker', 'Sawicki', 'Beltz', 'Albarran', 'Aitken', 'Findlay', 'Looper', 'Tooley', 'Lasley', 'Moynihan', 'Ratcliffe', 'Grizzle', 'Souders', 'Nussbaum', 'Suber', 'Macdougall', 'Waddle', 'Brawner', 'Tucci', 'Cosme', 'Walk', 'Gordy', 'Tarrant', 'Rosenblum', 'Huth', 'Bridgeman', 'Hinkley', 'Gehrke', 'Boden', 'Suazo', 'Gambill', 'Widner', 'Chick', 'Mccollough', 'Hassler', 'Odum', 'Pawlak', 'Prevost', 'Slavin', 'Fetters', 'Beamon', 'Renshaw', 'Deng', 'Plourde', 'Holstein', 'Rye', 'Holliman', 'Melville', 'Messinger', 'Turcios', 'Garnica', 'Feeley', 'Mariani', 'Otten', 'Dorado', 'Mortenson', 'Meissner', 'Scarlett', 'Sweitzer', 'Glisson', 'Desjardins', 'Penland', 'Elledge', 'Crumley', 'Deen', 'Shih', 'Heuer', 'Gloria', 'Lail', 'Mcandrew', 'Mcnaughton', 'Cortese', 'Stgermain', 'Hammon', 'Leininger', 'Flickinger', 'Dement', 'Bumgardner', 'Tessier', 'Fulford', 'Cervantez', 'Wisner', 'Shulman', 'Sabol', 'Papp', 'Strasser', 'Sartin', 'Rothstein', 'Grote', 'Beaudry', 'Deville', 'Roop', 'Villar', 'Bussell', 'Bowyer', 'Yoshida', 'Hertz', 'Countryman', 'Hoey', 'Roseberry', 'Schock', 'Boozer', 'Mccowan', 'Kirschner', 'Lechner', 'Winkelman', 'Witham', 'Thurber', 'Depriest', 'Chenault', 'Moten', 'Tillotson', 'Guan', 'Ketcham', 'Jiles', 'Grosso', 'Nottingham', 'Kellam', 'Alejo', 'Thoma', 'Marchetti', 'Holifield', 'Fortson', 'Leasure', 'Mceachern', 'Oceguera', 'Carleton', 'Weekley', 'Kinsella', 'Harvell', 'Waldon', 'Kean', 'Chancellor', 'Blosser', 'Detweiler', 'Presnell', 'Beachy', 'Lingle', 'Plumley', 'Knopp', 'Gamache', 'Atwater', 'Caine', 'Woodland', 'Terwilliger', 'Moller', 'Cleland', 'Cottingham', 'Janke', 'Willman', 'Dann', 'Mangrum', 'Shuck', 'Paden', 'Adelman', 'Brim', 'Tullis', 'Hertel', 'Gallaher', 'Leopold', 'Donegan', 'Popovich', 'Gusman', 'Chatham', 'Schooley', 'Pinder', 'Heise', 'Maines', 'Nystrom', 'Jahnke', 'Poon', 'Murphree', 'Pelaez', 'Risley', 'Sohn', 'Shim', 'Armentrout', 'Kastner', 'Philpott', 'Mao', 'Pursley', 'Mangold', 'Mccourt', 'Hollar', 'Desmarais', 'Debord', 'Gullett', 'Gaeta', 'Bae', 'Houlihan', 'Gorton', 'Steinman', 'Santo', 'Snelling', 'Corpuz', 'Look', 'Scudder', 'Treece', 'Binns', 'Sokolowski', 'Harner', 'Gallup', 'Marti', 'Teasley', 'Markel', 'Casiano', 'Nicks', 'Recinos', 'Paradise', 'Colman', 'Orange', 'Mele', 'Medford', 'Templin', 'Zuber', 'Mackin', 'Brodsky', 'Householder', 'Wirtz', 'Hackman', 'Tippett', 'Polson', 'Colston', 'Cerna', 'Herald', 'Shults', 'Shubert', 'Mertens', 'Dave', 'Duffield', 'Vanness', 'Mayne', 'Driskell', 'Percy', 'Lauderdale', 'Cipriano', 'Theodore', 'Colella', 'Kiger', 'Brownfield', 'Stella', 'Wideman', 'Maye', 'Chisolm', 'Muldoon', 'Fitzwater', 'Harville', 'Dixson', 'Burkey', 'Hartsfield', 'Schade', 'Brawley', 'Pelfrey', 'Tennyson', 'Whitted', 'Silvas', 'Harbour', 'Krupa', 'Peraza', 'Erdmann', 'Halpern', 'Finnerty', 'Mackinnon', 'Humbert', 'Mccarley', 'Doster', 'Kugler', 'Livesay', 'Force', 'Haberman', 'Lamp', 'Hector', 'Charron', 'Woosley', 'Rein', 'Ashburn', 'Greenleaf', 'Niemann', 'Carillo', 'Skelly', 'Nunnally', 'Renfrow', 'Prickett', 'Angus', 'Bednar', 'Nightingale', 'Steinbach', 'Warnick', 'Jason', 'Hans', 'Lydon', 'Rutland', 'Alleman', 'Hawn', 'Malin', 'Beech', 'Auger', 'Desilva', 'Izaguirre', 'Isham', 'Mandujano', 'Glasser', 'Dimarco', 'Berumen', 'Nipper', 'Pegram', 'Sundberg', 'Labbe', 'Mcphee', 'Crafton', 'Agustin', 'Cantor', 'Beller', 'Bang', 'Lawyer', 'Croy', 'Kyles', 'Winans', 'Battista', 'Jost', 'Bakken', 'Dandridge', 'Mustafa', 'Ice', 'Eklund', 'Montesdeoca', 'Hermes', 'Grimaldo', 'Vannoy', 'Grainger', 'Lamas', 'Tarantino', 'Witter', 'Worthen', 'Basinger', 'Cowden', 'Hiles', 'Mcanally', 'Felipe', 'Gallimore', 'Kapp', 'Makowski', 'Copenhaver', 'Ramer', 'Gideon', 'Bowker', 'Wilkens', 'Seeger', 'Huntsman', 'Palladino', 'Jessee', 'Kittrell', 'Rolle', 'Ciccone', 'Kolar', 'Brannen', 'Bixby', 'Pohlman', 'Strachan', 'Lesher', 'Fleischer', 'Umana', 'Murphey', 'Mcentire', 'Rabon', 'Mcauley', 'Bunton', 'Soileau', 'Sheriff', 'Borowski', 'Mullens', 'Larrabee', 'Prouty', 'Malley', 'Sumrall', 'Reisinger', 'Surber', 'Kasten', 'Shoemake', 'Yowell', 'Bonin', 'Bevan', 'Bove', 'Boe', 'Hazard', 'Slay', 'Carraway', 'Kaczmarek', 'Armitage', 'Lowther', 'Sheaffer', 'Farah', 'Atencio', 'Ung', 'Kirkham', 'Cavanagh', 'Mccutchen', 'Shoop', 'Nickles', 'Borchardt', 'Durkee', 'Maus', 'Shedd', 'Petrillo', 'Brainard', 'Eddings', 'Fanelli', 'Seo', 'Heaney', 'Drennan', 'Mcgarvey', 'Saddler', 'Lucia', 'Higa', 'Gailey', 'Groh', 'Hinckley', 'Griner', 'Norfleet', 'Caplan', 'Rademacher', 'Souder', 'Autrey', 'Eskridge', 'Drumm', 'Fiske', 'Giffin', 'Townley', 'Derose', 'Burrus', 'Castrejon', 'Emmert', 'Cothran', 'Hartsell', 'Kilburn', 'Riggle', 'Trussell', 'Mulvey', 'Barto', 'Crank', 'Lovely', 'Woodhouse', 'Powe', 'Pablo', 'Zack', 'Murchison', 'Dicarlo', 'Kessel', 'Hagood', 'Rost', 'Edson', 'Blakeney', 'Fant', 'Brodeur', 'Jump', 'Spry', 'Laguna', 'Lotz', 'Bergquist', 'Collard', 'Mash', 'Rideout', 'Bilbrey', 'Selman', 'Fortunato', 'Holzer', 'Pifer', 'Mcabee', 'Talamantes', 'Tollefson', 'Pastore', 'Crew', 'Wilcher', 'Kutz', 'Stallard', 'Ressler', 'Fehr', 'Piercy', 'Lafond', 'Digiacomo', 'Schuck', 'Winkle', 'Graybill', 'Plata', 'Gribble', 'Odle', 'Fraga', 'Bressler', 'Moultrie', 'Tung', 'Charette', 'Marvel', 'Kerby', 'Mori', 'Hamman', 'Favors', 'Freeze', 'Delisle', 'Straw', 'Dingle', 'Elizalde', 'Cabello', 'Zalewski', 'Funkhouser', 'Abate', 'Nero', 'Holston', 'Josey', 'Schreck', 'Shroyer', 'Paquin', 'Bing', 'Chauvin', 'Maria', 'Melgoza', 'Arms', 'Caddell', 'Pitchford', 'Sternberg', 'Rana', 'Lovelady', 'Strouse', 'Macarthur', 'Lechuga', 'Wolfson', 'Mcglynn', 'Koo', 'Stoops', 'Tetreault', 'Lepage', 'Duren', 'Hartz', 'Kissel', 'Gish', 'Largent', 'Henninger', 'Janson', 'Carrick', 'Kenner', 'Haack', 'Diego', 'Wacker', 'Wardell', 'Ballentine', 'Smeltzer', 'Bibb', 'Winton', 'Bibbs', 'Reinhard', 'Nilsen', 'Edison', 'Kalinowski', 'June', 'Hewlett', 'Blaisdell', 'Zeman', 'Chon', 'Board', 'Nealy', 'Moretti', 'Wanner', 'Bonnett', 'Hardie', 'Mains', 'Cordeiro', 'Karim', 'Kautz', 'Craver', 'Colucci', 'Congdon', 'Mounts', 'Kurz', 'Eder', 'Merryman', 'Soles', 'Dulin', 'Lubin', 'Mcgowen', 'Hockenberry', 'Work', 'Mazzola', 'Crandell', 'Mcgrady', 'Caruthers', 'Govea', 'Meng', 'Fetter', 'Trusty', 'Weintraub', 'Hurlburt', 'Reiff', 'Nowakowski', 'Hoard', 'Densmore', 'Blumenthal', 'Neale', 'Schiff', 'Raleigh', 'Steiger', 'Marmolejo', 'Jessie', 'Palafox', 'Tutt', 'Keister', 'Core', 'Im', 'Wendell', 'Bennet', 'Canning', 'Krull', 'Patti', 'Zucker', 'Schlesinger', 'Wiser', 'Dunson', 'Olmedo', 'Hake', 'Champlin', 'Braley', 'Wheelock', 'Geier', 'Janis', 'Turek', 'Grindstaff', 'Schaffner', 'Deas', 'Sirois', 'Polito', 'Bergin', 'Schall', 'Vineyard', 'Pellegrini', 'Corrado', 'Oleson', 'List', 'Dameron', 'Parkin', 'Flake', 'Hollingshead', 'Chancey', 'Hufford', 'Morell', 'Kantor', 'Chasteen', 'Laborde', 'Sessoms', 'Hermanson', 'Burnell', 'Dewberry', 'Tolman', 'Glasscock', 'Durfee', 'Gilroy', 'Wilkey', 'Dungan', 'Saravia', 'Weigand', 'Bigler', 'Vancleave', 'Burlingame', 'Roseman', 'Stiffler', 'Gagliano', 'Kates', 'Awad', 'Knepp', 'Rondeau', 'Bertsch', 'Wolverton', 'Walcott', 'Poss', 'Frisby', 'Wexler', 'Reinhold', 'Krol', 'Stuck', 'Ricciardi', 'Ardoin', 'Michaelson', 'Lillard', 'Burciaga', 'Birchfield', 'Patch', 'Silvey', 'Simmonds', 'Siu', 'Press', 'Deans', 'Riegel', 'Ismail', 'Magallon', 'Diller', 'Hine', 'Michalak', 'Dones', 'Deitz', 'Gulledge', 'Stroman', 'Kobayashi', 'Hafer', 'Berk', 'Landin', 'Gilles', 'Obryant', 'Cheeks', 'Gress', 'Lutes', 'Raphael', 'Pizano', 'Bachmann', 'Cifuentes', 'Earp', 'Gilreath', 'Peluso', 'Hubbs', 'Alvis', 'Peer', 'Dutra', 'Stetson', 'Constant', 'Benford', 'Sorto', 'Cater', 'Rosier', 'Isenberg', 'Shanklin', 'Veloz', 'Ramage', 'Dunford', 'Ku', 'Hames', 'Eddins', 'Ruano', 'Frink', 'Flower', 'Beadle', 'Rochester', 'Fontes', 'Mefford', 'Barwick', 'Millen', 'Stelly', 'Cann', 'Rayner', 'Carruth', 'Wendling', 'Shutt', 'Hazzard', 'Maravilla', 'Gregorio', 'Pavlik', 'Hudnall', 'Aston', 'Mcglothlin', 'Weise', 'Devereaux', 'Belle', 'Borst', 'Burdett', 'Frisbie', 'Rummel', 'Rentz', 'Cobos', 'Kimura', 'Neu', 'Winner', 'Candelario', 'Callis', 'Basso', 'Mckim', 'Tai', 'Eskew', 'Lair', 'Pye', 'Knuth', 'Scarberry', 'Alter', 'Mcgann', 'Anson', 'Drews', 'Zuckerman', 'Petrone', 'Ludlow', 'Bechtold', 'Nair', 'Rennie', 'Rhine', 'Fleetwood', 'Sudduth', 'Leftwich', 'Hardiman', 'Northrop', 'Banker', 'Killen', 'Mastin', 'Mcmurry', 'Jasinski', 'Taliaferro', 'Mathers', 'Sheikh', 'Nuss', 'Jesse', 'Zabel', 'Crotty', 'Kamp', 'Fleenor', 'Halcomb', 'Eady', 'Vella', 'Demars', 'Ensley', 'Delosreyes', 'Zendejas', 'Leeds', 'Just', 'Oday', 'Dills', 'Zeng', 'Barriga', 'Millican', 'Cascio', 'Eakin', 'Argo', 'Borland', 'Cover', 'Diorio', 'Coria', 'Lease', 'Pinkham', 'Reichard', 'Guadalupe', 'Hansel', 'Bye', 'Westerfield', 'Gales', 'Mickle', 'Licata', 'Cram', 'Bracy', 'Motta', 'Imhoff', 'Siegfried', 'Merry', 'Swiger', 'Ton', 'Hersey', 'Marrone', 'Ginter', 'Miele', 'Breton', 'Scheffler', 'Pray', 'Stapp', 'Bogard', 'Towner', 'Mcelhaney', 'Bridgewater', 'Waldner', 'Quijano', 'Galante', 'Quesenberry', 'Rourke', 'Harshman', 'Traver', 'Alvares', 'Mcgaha', 'Nyberg', 'Pharr', 'Lerch', 'Sok', 'Rosson', 'Wiggs', 'Mcelveen', 'Dimaggio', 'Rettig', 'Ahumada', 'Hetzel', 'Welling', 'Chadwell', 'Swink', 'Mckinzie', 'Kwak', 'Chabot', 'Tomaszewski', 'Bonanno', 'Lesko', 'Teter', 'Stalnaker', 'Ober', 'Hovis', 'Hosey', 'Chaudhry', 'Fey', 'Vital', 'Earhart', 'Heins', 'Crowther', 'Hanner', 'Behr', 'Billington', 'Vogler', 'Hersh', 'Perlman', 'Given', 'Files', 'Partain', 'Coddington', 'Jardine', 'Grimmett', 'Springs', 'Macomber', 'Horgan', 'Arrieta', 'Charley', 'Josephson', 'Tupper', 'Provenzano', 'Celaya', 'Mcvicker', 'Sigala', 'Wimer', 'Ayon', 'Dossantos', 'Norvell', 'Lorenzen', 'Pasquale', 'Lambright', 'Goings', 'Defelice', 'Wen', 'Sigman', 'Gaylor', 'Rehm', 'Carino', 'Werth', 'Forehand', 'Hanke', 'Lasalle', 'Mitchum', 'Priester', 'Lefler', 'Celis', 'Lesser', 'Fitz', 'Wentzel', 'Lavery', 'Klassen', 'Shiflett', 'Hedden', 'Henn', 'Coursey', 'Drain', 'Delorenzo', 'Haws', 'Stansberry', 'Trump', 'Dantzler', 'Chaidez', 'Mcsweeney', 'Griffen', 'Trail', 'Gandara', 'Brunk', 'Kennon', 'Coss', 'Blackmore', 'Metts', 'Gluck', 'Blackshear', 'Cogan', 'Boney', 'Encinas', 'Adamski', 'Roberge', 'Schuette', 'Valero', 'Barroso', 'Antunez', 'Mohammad', 'Housley', 'Escoto', 'Ullrich', 'Helman', 'Trost', 'Lafave', 'Faith', 'Blaney', 'Kershner', 'Hoehn', 'Roemer', 'Isley', 'Lipinski', 'Claus', 'Caulfield', 'Paiz', 'Leyba', 'Robinett', 'Lambeth', 'Tarpley', 'Essex', 'Eilers', 'Epley', 'Murdoch', 'Sandstrom', 'Laux', 'Domingue', 'Grundy', 'Bellows', 'Spindler', 'Boos', 'Bhatt', 'Tye', 'Salamone', 'Cirillo', 'Troup', 'Jemison', 'Calzada', 'Dowden', 'Geraci', 'Dunphy', 'Sack', 'Sloane', 'Hathcock', 'Yap', 'Ronquillo', 'Willette', 'Partlow', 'Dear', 'Tunstall', 'Kiss', 'Huhn', 'Seabolt', 'Beene', 'Sather', 'Lockridge', 'Despain', 'Wines', 'Mcalpine', 'Wadley', 'Dey', 'Loring', 'Meadors', 'Buettner', 'Lavalley', 'Bugg', 'Creek', 'Millett', 'Pumphrey', 'Fregoso', 'Merkle', 'Sheffer', 'Glassman', 'Groover', 'Sweatt', 'Colunga', 'Boykins', 'Seng', 'Stutz', 'Brann', 'Blakey', 'Munos', 'Geddes', 'Avendano', 'Molitor', 'Diedrich', 'Langham', 'Kindle', 'Lacour', 'Buckler', 'Corum', 'Bakke', 'Godin', 'Kerner', 'Tobey', 'Kubiak', 'Hoyer', 'Hedge', 'Priebe', 'Callison', 'Lahr', 'Shears', 'Snavely', 'Blatt', 'Mcpeak', 'Tinney', 'Sullins', 'Bernhard', 'Gibb', 'Vaillancourt', 'Paugh', 'Funes', 'Romans', 'Maurice', 'Lough', 'Kerwin', 'Sanger', 'Vierra', 'Markus', 'Comfort', 'Krall', 'Spies', 'Malcom', 'Vizcarra', 'Beamer', 'Kellerman', 'Mcroberts', 'Waterhouse', 'Stromberg', 'Persons', 'Whitesell', 'Harty', 'Rosenblatt', 'Broadwater', 'Clardy', 'Shackleford', 'Jacquez', 'Brittingham', 'Lindahl', 'Feliz', 'Danna', 'Garwood', 'Heron', 'Southwick', 'Dehoyos', 'Cottrill', 'Mellor', 'Goldfarb', 'Grieco', 'Helgeson', 'Vandusen', 'Heinen', 'Batt', 'Ruch', 'Garretson', 'Pankey', 'Caudillo', 'Jakubowski', 'Plowman', 'Starcher', 'Wessels', 'Moose', 'Rosner', 'Louden', 'Walczak', 'Poulsen', 'Mcchesney', 'Karns', 'Casares', 'Cusack', 'Cespedes', 'Cornelison', 'Crossland', 'Hirst', 'Mier', 'Roberto', 'Canchola', 'Bosse', 'Shetler', 'Melendrez', 'Giannini', 'Six', 'Traynor', 'Knepper', 'Lonergan', 'Kessinger', 'Hollon', 'Weathersby', 'Stouffer', 'Gingrich', 'Breault', 'Pompa', 'Vanhoose', 'Burdine', 'Lark', 'Stiltner', 'Wunderlich', 'Yong', 'Merrifield', 'Willhite', 'Geiser', 'Lambrecht', 'Keffer', 'Carlo', 'Germany', 'Turgeon', 'Dame', 'Tristan', 'Bova', 'Doak', 'Mannino', 'Shotwell', 'Bash', 'Coots', 'Feist', 'Mahmood', 'Schlabach', 'Salzman', 'Kass', 'Bresnahan', 'Stonge', 'Tesch', 'Grajeda', 'Mccarron', 'Mcelwee', 'Spradling', 'Mckown', 'Colgan', 'Piedra', 'Collum', 'Stoffel', 'Won', 'Gulick', 'Devault', 'Enders', 'Yanes', 'Lansing', 'Ebner', 'Deegan', 'Boutin', 'Fetzer', 'Andresen', 'Trigg', 'Sale', 'Polite', 'Hummer', 'Wille', 'Bowerman', 'Routh', 'Iqbal', 'Lakey', 'Mcadoo', 'Laflamme', 'Boulware', 'Guadarrama', 'Campana', 'Strayer', 'Aho', 'Emmett', 'Wolters', 'Bos', 'Knighten', 'Averill', 'Bhakta', 'Schumaker', 'Stutts', 'Mejias', 'Byer', 'Mahone', 'Staab', 'Riehl', 'Briceno', 'Zabala', 'Lafountain', 'Clemmer', 'Mansell', 'Rossetti', 'Lafontaine', 'Mager', 'Adamo', 'Bogue', 'Northern', 'Disney', 'Masse', 'Senter', 'Yaeger', 'Dahlberg', 'Bisson', 'Leitner', 'Bolding', 'Ormsby', 'Berard', 'Brazell', 'Pickle', 'Hord', 'Mcguigan', 'Glennon', 'Aman', 'Dearman', 'Cauthen', 'Rembert', 'Delucia', 'Enciso', 'Slusser', 'Kratzer', 'Schoenfeld', 'Gillam', 'Rael', 'Rhode', 'Moton', 'Eide', 'Eliason', 'Helfrich', 'Bish', 'Goodnight', 'Campion', 'Blow', 'Gerken', 'Goldenberg', 'Mellinger', 'Nations', 'Maiden', 'Anzalone', 'Wagers', 'Arguelles', 'Christen', 'Guth', 'Stamey', 'Bozarth', 'Balogh', 'Grammer', 'Chafin', 'Prine', 'Freer', 'Alder', 'Latorre', 'Zaleski', 'Lindholm', 'Belisle', 'Zacharias', 'Swinson', 'Bazemore', 'Glazer', 'Acord', 'Said', 'Liggins', 'Lueck', 'Luedtke', 'Blackstone', 'Copper', 'Riker', 'Braud', 'Demello', 'Rode', 'Haven', 'Rhee', 'Galligan', 'Record', 'Nilson', 'Ansley', 'Pera', 'Gilliard', 'Copp', 'Haugh', 'Dunigan', 'Grinnell', 'Garr', 'Leonhardt', 'Elswick', 'Shahan', 'Mike', 'Boddie', 'Casella', 'Mauricio', 'Millet', 'Daye', 'Claussen', 'Pierrelouis', 'Fleischman', 'Embrey', 'Durso', 'Whisenant', 'Rankins', 'Lasky', 'Askins', 'Rupe', 'Rochelle', 'Burkes', 'Kreger', 'Mishler', 'Heald', 'Jager', 'Player', 'Linehan', 'Horwitz', 'Jacobi', 'Maine', 'Wiest', 'Ostrom', 'Sealy', 'Jimerson', 'Alverson', 'Senior', 'Hassett', 'Colter', 'Schleicher', 'Marini', 'Mcbrayer', 'Arzola', 'Sobel', 'Frederickson', 'Confer', 'Tadlock', 'Belmonte', 'Lebrun', 'Clyde', 'Alleyne', 'Lozoya', 'Teller', 'Husband', 'Brigman', 'Secrest', 'Krajewski', 'Neiman', 'Trull', 'Watterson', 'Vanhook', 'Sotomayor', 'Woodrum', 'Baskerville', 'Finke', 'Hohman', 'Arp', 'Hearne', 'Mauk', 'Danko', 'Laurie', 'Linderman', 'Hutt', 'Springfield', 'Chmielewski', 'Klimek', 'Phinney', 'Leboeuf', 'Mcglone', 'Holmquist', 'Cogswell', 'Nichol', 'Klink', 'Dunston', 'Krawczyk', 'Dart', 'Woodside', 'Smitherman', 'Gasca', 'Sala', 'Foxworth', 'Kammerer', 'Auer', 'Pegues', 'Bukowski', 'Koger', 'Spitz', 'Blomquist', 'Creasy', 'Bomar', 'Holub', 'Loney', 'Garry', 'Habib', 'Chea', 'Dupuy', 'Seaver', 'Sowards', 'Julius', 'Fulks', 'Braithwaite', 'Bretz', 'Mccammon', 'Sedillo', 'Chiasson', 'Oney', 'Horstman', 'Waites', 'Mccusker', 'Fenske', 'Conwell', 'Brokaw', 'Cloyd', 'Biles', 'Aguinaga', 'Astorga', 'Demaio', 'Liberty', 'Kayser', 'Ney', 'Barthel', 'Lennox', 'Trautman', 'Purser', 'Pitzer', 'Mattos', 'Liss', 'Clack', 'Sias', 'Bobb', 'Stoller', 'Robillard', 'Almodovar', 'Cribb', 'Ebel', 'Oyler', 'Dail', 'Ericksen', 'Geis', 'Everitt', 'Cropper', 'Meisner', 'Skeens', 'Frith', 'Privett', 'Braddy', 'Bolick', 'Severance', 'Jeffreys', 'Bethune', 'Delcid', 'Buzzard', 'Broadbent', 'Bono', 'Addis', 'Johannes', 'Tims', 'Castorena', 'Simonsen', 'Glidewell', 'Mui', 'Ogilvie', 'Soukup', 'Sunday', 'Redwine', 'Borton', 'Schuyler', 'Rudisill', 'Beckford', 'Pascua', 'Garton', 'Gilkey', 'Applewhite', 'Halterman', 'Alsup', 'Delreal', 'Hubble', 'Quijada', 'Kropp', 'Dunkle', 'Lemire', 'Lamontagne', 'Dunkin', 'Paulin', 'Attaway', 'Baugher', 'Hornbeck', 'Niehaus', 'Nice', 'Trimmer', 'Canaday', 'Maney', 'Trexler', 'Schmucker', 'Edinger', 'Massengill', 'Rowlett', 'Caviness', 'Kam', 'Chesnut', 'Giardina', 'Spaeth', 'Gebhart', 'Morano', 'Salguero', 'Buckland', 'Reina', 'Jumper', 'Navas', 'Thrift', 'Spradley', 'Bitner', 'Ayer', 'Harber', 'Landaverde', 'Mcmillion', 'Naugle', 'Dole', 'Seagraves', 'Smithers', 'Frechette', 'Weeden', 'Caston', 'Cavallaro', 'Laureano', 'Mandell', 'Lowrance', 'Baty', 'Ronan', 'Gigliotti', 'Rossiter', 'Mines', 'Alatorre', 'Markowski', 'Berge', 'Hatter', 'Weakley', 'Borrero', 'Glazier', 'Lavergne', 'Sines', 'Ingham', 'Meltzer', 'Rabinowitz', 'Siciliano', 'Canas', 'Perna', 'Struck', 'Dare', 'Nay', 'Severino', 'Mathewson', 'Bouldin', 'Topete', 'Brunette', 'Sin', 'Hendren', 'Brickey', 'Ferrier', 'Alessi', 'Scheel', 'Storer', 'Matherne', 'Mecham', 'Spiker', 'Hibbert', 'Klingensmith', 'Lefever', 'Banning', 'Bankhead', 'Roan', 'Brack', 'Pascoe', 'Davie', 'Scheid', 'Jim', 'Tweedy', 'Strahan', 'Revis', 'Fermin', 'Obrian', 'Motes', 'Lobo', 'Palmisano', 'Faught', 'Byington', 'Garren', 'Hungerford', 'Vanzandt', 'Gust', 'Heater', 'Klingler', 'Delay', 'Wear', 'Hendley', 'Threatt', 'Gaughan', 'Kunze', 'Hessler', 'Lindell', 'Monteleone', 'Palazzolo', 'Shear', 'Phares', 'Cavalier', 'Benning', 'Urbanski', 'Darrah', 'Wager', 'Mohn', 'Vereen', 'Beiler', 'Hedlund', 'Quade', 'Wieczorek', 'Cicero', 'Hoekstra', 'Scalf', 'Ducote', 'Havard', 'Espiritu', 'Beacham', 'Bolger', 'Schuller', 'Sill', 'Dice', 'Lemmons', 'Orlowski', 'Lundeen', 'Steck', 'Stanfill', 'Rakes', 'Laine', 'Haviland', 'Durrett', 'Naumann', 'Donahoe', 'Reif', 'Franck', 'Amoroso', 'Belknap', 'Tolle', 'Perrotta', 'Heyer', 'Dougan', 'Frakes', 'Leath', 'Poteat', 'Violette', 'Marine', 'Zellner', 'Granillo', 'Fontanez', 'Didonato', 'Bradberry', 'Morman', 'Mentzer', 'Lamoureux', 'Sabatino', 'Catania', 'Wenner', 'Pastrana', 'Shenk', 'Losey', 'Hepburn', 'Antonucci', 'Egger', 'Higbee', 'Adames', 'Reep', 'Cavallo', 'Bridwell', 'Villalba', 'Poor', 'Peet', 'Everette', 'Arney', 'Towery', 'Sharon', 'Trainer', 'Marrow', 'Cumming', 'Rimmer', 'Stanger', 'Pinter', 'Felt', 'Parrett', 'Garrard', 'Benedetto', 'Lingenfelter', 'Resch', 'Billy', 'Mikesell', 'Osterman', 'Trueblood', 'Redfern', 'Calderone', 'Placencia', 'Wamsley', 'Warr', 'Varnado', 'Harshbarger', 'Topping', 'Feltner', 'Decosta', 'Tart', 'Blumberg', 'Shaikh', 'Culley', 'Bork', 'Thibeault', 'Stolz', 'Ramsdell', 'Tedford', 'Noto', 'Poulson', 'Daves', 'Altieri', 'Mendosa', 'Kisner', 'Grafton', 'Remy', 'Hartline', 'Cripe', 'Sher', 'Mulvaney', 'Ansari', 'Hartfield', 'Whitton', 'Wathen', 'Eisele', 'Hinojos', 'Backer', 'Speaks', 'Schuetz', 'Novoa', 'Marcos', 'Mask', 'Oboyle', 'Kircher', 'Stang', 'Sibert', 'Scala', 'Zacarias', 'Hendon', 'Halvorsen', 'Montalbano', 'Zermeno', 'Vancamp', 'Grams', 'Hornberger', 'Binion', 'Dewald', 'Rives', 'Sankey', 'Kleinman', 'Falconer', 'Rumph', 'Matus', 'Swett', 'Spinner', 'Depasquale', 'Gamino', 'Olmsted', 'Absher', 'Culler', 'Fryman', 'Lampert', 'Carlyle', 'Terranova', 'Dunagan', 'Chouinard', 'Wesolowski', 'Hetherington', 'Scalise', 'Pendergast', 'Marcano', 'Joubert', 'Scheller', 'Whisenhunt', 'Lenoir', 'Mahar', 'Vanlandingham', 'Pecoraro', 'You', 'Natividad', 'Daum', 'Penick', 'Eddington', 'Deleo', 'Soltis', 'Santucci', 'Costanza', 'Hiner', 'Farlow', 'Hartsock', 'Duprey', 'Fann', 'Safford', 'Murtha', 'Fessler', 'Chien', 'Paynter', 'Devera', 'Hoelscher', 'Boltz', 'Deacon', 'Loo', 'Enoch', 'Dilorenzo', 'Saville', 'Mirza', 'Takacs', 'Drexler', 'Lakin', 'Geraghty', 'Widmer', 'Esteves', 'Llanes', 'Cerny', 'Quist', 'Hargraves', 'Toma', 'Tarter', 'Chapple', 'Alderete', 'Michelson', 'Clymer', 'Batey', 'Sealey', 'Loughlin', 'Preece', 'Zurita', 'Courville', 'Desousa', 'Shamblin', 'Tingley', 'Noles', 'Misner', 'Standifer', 'Dinardo', 'Dillow', 'Bullis', 'Carballo', 'Everly', 'Mulvihill', 'Tincher', 'Carle', 'Lundin', 'Birdsall', 'Bainbridge', 'Suttle', 'Wightman', 'Mower', 'Mountain', 'Bickham', 'Durante', 'Viveros', 'Swinford', 'Mcgruder', 'Tapley', 'Grable', 'Gwynn', 'Wiebe', 'Stagg', 'Dash', 'Heitman', 'Cluff', 'Huertas', 'Fortuna', 'Lines', 'Sly', 'Halford', 'Helsel', 'Bicknell', 'Blakeman', 'Colangelo', 'Olney', 'Quinton', 'Rothrock', 'Renz', 'Hone', 'Prejean', 'Oshiro', 'Serio', 'Latour', 'Newbold', 'Fitzhugh', 'Songer', 'Cardin', 'Geter', 'Barbera', 'Abbas', 'Caesar', 'Blakeslee', 'Camper', 'Mcclurg', 'Driskill', 'Cancel', 'Donelson', 'Borrelli', 'Donoghue', 'Shoaf', 'Tinajero', 'Arzate', 'Keesee', 'Pasley', 'Strode', 'Morello', 'Trantham', 'Ackerson', 'Jowers', 'Brockington', 'Barcia', 'Lipp', 'Dinger', 'Ridings', 'Canavan', 'Rank', 'Hagans', 'Lampley', 'Beckmann', 'Bjork', 'Raygoza', 'Schirmer', 'Longmire', 'Schiavone', 'Breuer', 'Lore', 'Stenson', 'Koziol', 'Channell', 'Cale', 'Trader', 'Culberson', 'Mundt', 'Sickles', 'Nemec', 'Holl', 'Stribling', 'Berens', 'Nauman', 'Lehner', 'Deem', 'Castelli', 'Billman', 'Orndorff', 'Gumm', 'Davy', 'Pelham', 'Spotts', 'Jurgens', 'Sword', 'Adorno', 'Gorrell', 'Boughton', 'Bobadilla', 'Mauer', 'Moline', 'Guay', 'Holsinger', 'Baranowski', 'Gutierres', 'Beveridge', 'Marable', 'Berkey', 'Lamothe', 'Spitler', 'Carbaugh', 'Hoopes', 'Wilken', 'Milford', 'Bingaman', 'Crippen', 'Shock', 'Yarnell', 'Oman', 'Wethington', 'Kost', 'Gaudette', 'Spielman', 'Foran', 'Starke', 'Eugene', 'Birnbaum', 'Navarrette', 'Hussein', 'Ranson', 'Hedgepeth', 'Doctor', 'Higuera', 'Brough', 'Cookson', 'Provencher', 'Mendonca', 'Gowen', 'Summer', 'Rutz', 'Reader', 'Doud', 'Raven', 'Toribio', 'Peachey', 'Gunning', 'Bittle', 'Vale', 'Harnish', 'Marano', 'Aker', 'Damore', 'Utz', 'Throckmorton', 'Bulger', 'Vanzant', 'Pasillas', 'Holmgren', 'Corpus', 'Longley', 'Wetmore', 'Carstens', 'Line', 'Percival', 'Ayotte', 'Batres', 'Pipes', 'Ludwick', 'Alpert', 'Pick', 'Carlock', 'Edmundson', 'Feinstein', 'Krouse', 'Dahlgren', 'Sasaki', 'Lieb', 'Londono', 'Oloughlin', 'Wardlaw', 'Lineberry', 'Castello', 'Milstead', 'Parmenter', 'Riffe', 'Pare', 'Sitton', 'Tarin', 'Delcastillo', 'Manor', 'Calabro', 'Elkin', 'Grill', 'Boaz', 'Coco', 'Chamblee', 'Celestine', 'Nick', 'Stork', 'Meekins', 'Moise', 'Devers', 'Jun', 'Kegley', 'Brick', 'Lobato', 'Biggerstaff', 'Kersten', 'Jayne', 'Nasser', 'Southall', 'Kempton', 'Eaddy', 'Paladino', 'Berardi', 'Pizzo', 'Pulver', 'Ohalloran', 'Fromm', 'Cranston', 'Rowden', 'Capobianco', 'Kahle', 'Thiessen', 'Malott', 'Houseman', 'Maul', 'Gallion', 'Tressler', 'Pauly', 'Pellerin', 'Sainz', 'Firth', 'Cryer', 'Jeanlouis', 'Mong', 'Trawick', 'Chronister', 'Hayashi', 'Posner', 'Cueva', 'Sherwin', 'Lacasse', 'Gorden', 'Bohl', 'Twigg', 'Coan', 'Hocker', 'Goodale', 'Urbano', 'Loeb', 'Perrault', 'Frawley', 'Carcamo', 'Richburg', 'Moffat', 'Hennings', 'Weyer', 'Myatt', 'Ullman', 'Tunnell', 'Hern', 'Lopresti', 'Sonnenberg', 'Knisley', 'Twomey', 'Jaggers', 'Tanksley', 'Rachal', 'Poppe', 'Vos', 'Kania', 'Speakman', 'Peirce', 'Pound', 'Romer', 'Patty', 'Millsaps', 'Kyser', 'Telford', 'Hegarty', 'Kellett', 'Michaelis', 'Halligan', 'Maughan', 'Herb', 'Rainer', 'Robichaud', 'Fiscus', 'Sickler', 'Blom', 'Lavine', 'Medel', 'Bolyard', 'Secor', 'Creekmore', 'Magruder', 'Haskin', 'Laliberte', 'Drago', 'Bernabe', 'Leader', 'Cavin', 'Lukens', 'Vassallo', 'Pletcher', 'Fuson', 'Hasson', 'Huckabee', 'Edington', 'Eichler', 'Hering', 'Vong', 'Mardis', 'Gu', 'Segarra', 'Bilyeu', 'Runion', 'Fragoso', 'Gama', 'Dunton', 'Frady', 'Lewellen', 'Crumpler', 'Jeske', 'Furlow', 'Delapena', 'Kale', 'Massengale', 'Hamlet', 'Galli', 'Esteban', 'Greeson', 'Shue', 'Pollak', 'Pinney', 'Ruffner', 'Maitland', 'Steven', 'Hockett', 'Fraire', 'Mulhern', 'Elbert', 'Hoggard', 'Labarge', 'Silcox', 'Saez', 'Sluder', 'Stamp', 'Darlington', 'Mccarroll', 'Pillow', 'Palazzo', 'Blaha', 'Demaria', 'Swanger', 'Winningham', 'Lippincott', 'Dake', 'Goldsberry', 'Seidl', 'Woolfolk', 'Murawski', 'Hobart', 'Kimber', 'Nilsson', 'Stough', 'Almendarez', 'Nevels', 'Fasano', 'Salmons', 'Denmark', 'Lathan', 'Mosely', 'Stengel', 'Mendieta', 'Felice', 'Drown', 'Vidrine', 'Callihan', 'Polston', 'Howze', 'Eakins', 'Leek', 'Featherstone', 'Lajoie', 'Athey', 'Asuncion', 'Ashbaugh', 'Orman', 'Morrissette', 'Peart', 'Hamner', 'Zell', 'Dry', 'Dieter', 'Terrones', 'Campuzano', 'Reveles', 'Bakker', 'Banister', 'Arceo', 'Dhillon', 'Normand', 'Shavers', 'Ginsburg', 'Go', 'Rubinstein', 'Arens', 'Clutter', 'Jaques', 'Traxler', 'Hackler', 'Cisco', 'Starrett', 'Ceron', 'Gillenwater', 'Ottinger', 'Caster', 'Blakemore', 'Thorsen', 'Molinar', 'Baur', 'Hower', 'Haldeman', 'Oliveri', 'Mcalpin', 'Standish', 'Bengtson', 'Strack', 'Cordoba', 'Blackstock', 'Barna', 'Schantz', 'Hawkinson', 'Breese', 'Saba', 'Camden', 'Gwaltney', 'Corliss', 'Smit', 'Cruise', 'Mcneese', 'Duggins', 'Laub', 'Burman', 'Kenworthy', 'Spohn', 'Santini', 'Nuttall', 'Willison', 'Stjean', 'Shabazz', 'Manes', 'Gerry', 'Mclamb', 'Koepke', 'Reeser', 'Ogburn', 'Wegener', 'Risinger', 'Carrero', 'Livermore', 'Brewton', 'Harsh', 'Utterback', 'Lecompte', 'Schnabel', 'Ting', 'Honea', 'Stryker', 'Foshee', 'Baptista', 'Gravely', 'Courson', 'Goyette', 'Leitch', 'Tasker', 'Laurence', 'Reneau', 'Voight', 'Tilson', 'Range', 'Hallam', 'Dufrene', 'Boice', 'Shrewsbury', 'Sturges', 'Lenard', 'Sistrunk', 'Weitz', 'Carnevale', 'Hepner', 'Wehner', 'Callen', 'Oshaughnessy', 'Wingert', 'Mouser', 'Palmore', 'Rugg', 'Elia', 'Alcazar', 'Avitia', 'Penton', 'Brisco', 'Ambrosio', 'Wardlow', 'Leaf', 'Rowles', 'Buggs', 'Dittmer', 'Schweizer', 'Puleo', 'Vaden', 'Haughton', 'Cardinale', 'Seguin', 'Ruddy', 'Minard', 'Stalker', 'Bennington', 'Hilt', 'Works', 'Broadus', 'Engels', 'Haddix', 'Buster', 'Recker', 'Bopp', 'Wilton', 'Costantino', 'Boots', 'Falkner', 'Tennison', 'Mcgary', 'Holz', 'Lofgren', 'Putney', 'Christner', 'Fruge', 'Vassar', 'Vankirk', 'Spoon', 'Pearlman', 'Guertin', 'Meece', 'Sartain', 'Petterson', 'Primm', 'Cardillo', 'Dryer', 'Hartshorn', 'Dane', 'Chaisson', 'Espitia', 'Creager', 'Disalvo', 'Janik', 'Parente', 'Paiva', 'Slaven', 'Tague', 'Kujawa', 'Gruver', 'Foor', 'Frampton', 'Prokop', 'Mettler', 'Collis', 'Lamkin', 'Shuey', 'Tepper', 'Colyer', 'Masi', 'Trumble', 'Guice', 'Hurwitz', 'Windle', 'Mccully', 'Cutting', 'Stotler', 'Grullon', 'Wagstaff', 'Morfin', 'Dehaan', 'Noon', 'Flesher', 'Ferri', 'Covell', 'Coll', 'Lucy', 'Albaugh', 'Testerman', 'Gordillo', 'Jepson', 'Brinkerhoff', 'Calle', 'Crowl', 'Mcelwain', 'Chumley', 'Brockett', 'Thoms', 'Revell', 'Garzon', 'Polak', 'Rothenberg', 'Socha', 'Vallejos', 'Felty', 'Peguero', 'Ping', 'Tso', 'Charleston', 'Fedor', 'Haider', 'Abe', 'Enlow', 'Fifer', 'Bumpus', 'Keele', 'Mcdavid', 'Panek', 'Scholten', 'Dyess', 'Heatherly', 'Donohoe', 'Hoban', 'Griffey', 'Corry', 'Mcclean', 'Plyler', 'Feathers', 'Adkison', 'Killeen', 'Hoeft', 'Myhre', 'Fiorentino', 'Mcbeth', 'Erazo', 'Madson', 'Fulbright', 'Wilds', 'Petrucci', 'Mcgaughey', 'Monteith', 'Murguia', 'Hausman', 'Zukowski', 'Shute', 'Brisson', 'Lain', 'Runkle', 'Hickok', 'Caffrey', 'Million', 'Elson', 'Peay', 'Haga', 'Ancheta', 'Cordle', 'Blas', 'Carmen', 'Pettiford', 'Dimartino', 'Spahr', 'Mozingo', 'Backman', 'Stgeorge', 'Konrad', 'Buhler', 'Mcelrath', 'Oliveros', 'Edelstein', 'Cadet', 'Gilmartin', 'Munday', 'Roane', 'Desalvo', 'Lepe', 'Symons', 'Shearin', 'Linkous', 'Cheshire', 'Klemm', 'Beagle', 'Pooler', 'Dewalt', 'Esch', 'Finnell', 'Sinnott', 'Kepler', 'Toups', 'Riccardi', 'Caylor', 'Tillis', 'Messmer', 'Rothschild', 'Boutte', 'Zumwalt', 'Bohrer', 'Elgin', 'Kinley', 'Schechter', 'Gowan', 'Pyne', 'Cousin', 'Hunsinger', 'Fishel', 'Edenfield', 'Nadler', 'Warman', 'Bruhn', 'Swint', 'Lizotte', 'Nardone', 'Troxel', 'Grindle', 'Labrie', 'Tao', 'Olea', 'Schermerhorn', 'Stier', 'Hettinger', 'Farthing', 'Roux', 'Max', 'Amburgey', 'Auerbach', 'Janzen', 'Ortez', 'Alejandre', 'Peiffer', 'Molinaro', 'Burleigh', 'Benites', 'Ringler', 'Hou', 'Haffner', 'Nace', 'Crosson', 'Karcher', 'Neufeld', 'Bayles', 'Riemer', 'Amezquita', 'Cadwell', 'Petrosky', 'Swallow', 'Minnis', 'Krupp', 'Nardi', 'Orsini', 'Diez', 'Updike', 'Gasser', 'Rogerson', 'Speicher', 'Dubay', 'Hollaway', 'Teets', 'Keown', 'Center', 'Blanding', 'Whisler', 'Spurlin', 'Collin', 'Greenawalt', 'Tomes', 'Leister', 'Chatfield', 'Helwig', 'Reimers', 'Andress', 'Norcross', 'Melnick', 'Yearwood', 'Defazio', 'Kubik', 'Bhatia', 'Uddin', 'Belmont', 'Haden', 'Bench', 'Chilson', 'Pegg', 'Cane', 'Goehring', 'Lino', 'Tyus', 'Furey', 'Castleman', 'Heywood', 'Leedy', 'Holleman', 'Villeda', 'Mcveigh', 'Carreiro', 'Hocking', 'Azar', 'Blough', 'Lieu', 'Marcial', 'Coblentz', 'Hossain', 'Weisberg', 'Gardea', 'Hoyos', 'Lipsey', 'Reger', 'Clouser', 'Bewley', 'Magness', 'Goines', 'Thome', 'Odea', 'Mannion', 'Dansby', 'Dipasquale', 'Constable', 'Truelove', 'Hubler', 'Ulibarri', 'Wymer', 'Cron', 'Hugo', 'Hilderbrand', 'Milazzo', 'Vasques', 'Sproul', 'Shuford', 'Chavers', 'Kral', 'Vecchio', 'Mehl', 'Rymer', 'Henriksen', 'Taulbee', 'Hagy', 'Ammerman', 'Kagan', 'Galdamez', 'Krick', 'Owsley', 'Mullikin', 'Beery', 'Eccles', 'Kleinschmidt', 'Kloss', 'Oldenburg', 'Ospina', 'Harbert', 'Andujar', 'Florian', 'Antone', 'Mcmillon', 'Ceniceros', 'Rippy', 'Adkisson', 'Stange', 'Balmer', 'Mazurek', 'Dahlke', 'Girouard', 'Nickelson', 'Perera', 'Tullos', 'Cioffi', 'Bogdan', 'Olivieri', 'Petree', 'Speights', 'Jantz', 'Collings', 'Zellers', 'Yarber', 'Lafollette', 'Rink', 'Currin', 'Chua', 'Hartle', 'Larocque', 'Cuthbertson', 'Ehrhardt', 'Mara', 'Rieck', 'Lumley', 'Anderton', 'Hennigan', 'Fabrizio', 'Hutter', 'Bruning', 'Korman', 'Haring', 'Monette', 'Woodyard', 'Goggins', 'Balzer', 'Philbrick', 'Bruder', 'Hansford', 'Averett', 'Teske', 'Mauck', 'Billiot', 'Collie', 'Caffey', 'Manos', 'Buchan', 'Birk', 'Abdallah', 'Featherston', 'Koh', 'Valera', 'Deyo', 'Buono', 'Aubin', 'Doody', 'Pigott', 'Peloquin', 'Maniscalco', 'Eisenhauer', 'Biller', 'Farwell', 'Hartzog', 'Brazier', 'Talton', 'Mcdougald', 'Midgett', 'Strout', 'Spiers', 'Eiland', 'Garth', 'Sequeira', 'Noyola', 'Petri', 'Goodyear', 'Dineen', 'Bernardi', 'Berns', 'Coolidge', 'Dorfman', 'Dittman', 'Zeno', 'Hauer', 'Finlay', 'Ziemba', 'Spillane', 'Kays', 'Ekstrom', 'Hile', 'Mckinstry', 'Lesley', 'Courtright', 'Kuhlmann', 'Verma', 'Cripps', 'Wigley', 'Nickens', 'Petrick', 'Delozier', 'Hardcastle', 'Yamaguchi', 'Romig', 'Venezia', 'Reading', 'Redford', 'Heng', 'Anselmo', 'Getty', 'Marten', 'Badgett', 'Eisner', 'Holtzman', 'Stell', 'Hiser', 'Dustin', 'Bordeaux', 'Debolt', 'Trevizo', 'Eckard', 'Follett', 'Lal', 'Dark', 'Buskirk', 'Roca', 'Todaro', 'Campanella', 'Lindsley', 'Wickman', 'Pritt', 'Cutlip', 'Pokorny', 'Friedlander', 'Saari', 'Casias', 'Macneil', 'Clyburn', 'Kravitz', 'Edgington', 'Portis', 'Culbreth', 'Cuff', 'Brouillette', 'Artz', 'Trudell', 'Pledger', 'Markovich', 'Pisani', 'Faller', 'Sergent', 'Hail', 'Stabile', 'Wait', 'Mcilwain', 'Eriksen', 'Nee', 'Boll', 'Catanzaro', 'Giuliano', 'Oldfield', 'Banas', 'Ickes', 'Vachon', 'Gleeson', 'Bailes', 'Biehl', 'Woodham', 'Troupe', 'Mcgoldrick', 'Cappello', 'Kirkendall', 'Baisden', 'Joshua', 'Nicoletti', 'Roesch', 'Deatherage', 'Matter', 'Sheth', 'Tynes', 'Shaheen', 'Wilbert', 'Toles', 'Sanner', 'Bury', 'Boman', 'Bose', 'Millner', 'Eisen', 'Couto', 'Ide', 'Howells', 'Jiminez', 'Crampton', 'Monti', 'Jelinek', 'Morford', 'Yeomans', 'Turnbow', 'Rolland', 'Scheetz', 'Arends', 'Repp', 'Hohn', 'Paton', 'Govan', 'Fabela', 'Mroz', 'Bourassa', 'Rizzi', 'Froelich', 'Molinari', 'Lunde', 'Navarre', 'Alexandre', 'Dearborn', 'Lakes', 'Foxx', 'Jerez', 'Lamanna', 'Talarico', 'Butera', 'Riner', 'Gros', 'Champ', 'Phoenix', 'Vandeventer', 'Samora', 'Behling', 'Karpinski', 'Hosier', 'Tufts', 'Hobby', 'Rohrbach', 'Youngman', 'Yeary', 'Paisley', 'Ben', 'Villalta', 'Hempel', 'Giblin', 'Lunt', 'Hagar', 'Lapoint', 'Singley', 'Shows', 'Kesterson', 'Bollman', 'Stansell', 'Yon', 'Gabaldon', 'Simental', 'Zastrow', 'Enloe', 'Sasso', 'Harkey', 'Sansom', 'Twyman', 'Haslam', 'Sowa', 'Hunsberger', 'Norberg', 'Hornback', 'Hanshaw', 'Axtell', 'Hoge', 'Gantz', 'Mccullum', 'Blazek', 'Scher', 'Carlucci', 'Jeong', 'Tillett', 'Woolridge', 'Carberry', 'Reck', 'Nevin', 'Armes', 'Sidhu', 'Wiesner', 'Auman', 'Teeters', 'Rigg', 'Moloney', 'Feld', 'Lucier', 'Cardone', 'Kilian', 'Conder', 'Horta', 'Murakami', 'Schaff', 'Dresser', 'Spray', 'Hott', 'Capuano', 'Englund', 'Rothe', 'Ferree', 'Nolt', 'Triana', 'Sanjuan', 'Oller', 'Brathwaite', 'Richert', 'Holdren', 'Goree', 'Branstetter', 'Schimmel', 'Jessop', 'Nellis', 'Sevier', 'Rabb', 'Mcmorris', 'Lindo', 'Littles', 'Polzin', 'Ranieri', 'Reale', 'Sturtevant', 'Arnone', 'Zamorano', 'Keever', 'Clow', 'Corr', 'Blaser', 'Sheetz', 'Llanos', 'Belew', 'Rusnak', 'Brandes', 'Eichhorn', 'Guida', 'Pucci', 'Streit', 'Renn', 'Partee', 'Rappaport', 'Rosso', 'Defeo', 'Greve', 'Schoch', 'Langevin', 'Manna', 'Towe', 'Scoville', 'Marco', 'Gove', 'Mckissick', 'Dangerfield', 'Mcwhirter', 'Port', 'Marrufo', 'Nicosia', 'Farren', 'Kinsley', 'Pearman', 'Porch', 'Mooneyham', 'Buff', 'Ruben', 'Blanc', 'Mellen', 'Heiman', 'Novack', 'Heston', 'Huie', 'Justin', 'Kincade', 'Laverty', 'Villavicencio', 'Burkart', 'Offutt', 'Halliburton', 'Polo', 'Barbara', 'Trammel', 'Rosati', 'Sakamoto', 'Salo', 'Heyman', 'Rooker', 'Sarno', 'Leroux', 'Virgen', 'Collison', 'Branum', 'Mcmasters', 'Divine', 'Mcnatt', 'Threadgill', 'Desir', 'Borchers', 'Walkup', 'Sy', 'Greenbaum', 'Vidales', 'Mercedes', 'Selph', 'Bardwell', 'Whorton', 'Demartino', 'Endsley', 'Verner', 'Hillier', 'Mancha', 'Ricard', 'Postell', 'Kummer', 'Welsch', 'Almanzar', 'Brunet', 'Deeds', 'Romanowski', 'Ocallaghan', 'Cueto', 'Terhune', 'Truesdell', 'Whisnant', 'Lingo', 'Aden', 'Labrecque', 'Braga', 'Iles', 'Garrick', 'Knickerbocker', 'Rasberry', 'Hervey', 'Schill', 'Kiely', 'Liddle', 'Blakeley', 'Marez', 'Schoonmaker', 'Swinton', 'Fryar', 'Exum', 'Gouge', 'Hoskinson', 'Lupton', 'Guild', 'Davisson', 'Chidester', 'Gravitt', 'Lenox', 'Pyatt', 'Moberg', 'Overholt', 'Whiddon', 'Foti', 'Lipps', 'Shankle', 'Xiao', 'Balentine', 'Cesar', 'Barreras', 'Schroer', 'Ram', 'Eames', 'Gutman', 'Pardee', 'Damiano', 'Houchin', 'Porto', 'Leclerc', 'Mahaney', 'Deardorff', 'Garey', 'Trotta', 'Lachapelle', 'Suiter', 'Ewert', 'Costner', 'Bever', 'Charpentier', 'Milewski', 'Coffelt', 'Schorr', 'Leis', 'Dasher', 'Cullins', 'Eveland', 'Hornung', 'Swingle', 'Eudy', 'Motter', 'Silk', 'Gadd', 'Sidwell', 'Sandusky', 'Auld', 'Mazariegos', 'Hirt', 'Zane', 'Rickett', 'Ritenour', 'Goin', 'Dipaolo', 'Wolfgang', 'Inouye', 'Branton', 'Rakestraw', 'Kimbro', 'Craighead', 'Sandefur', 'Foerster', 'Wipf', 'Wilkin', 'Shoffner', 'Overcash', 'Simonetti', 'Toomer', 'Albino', 'Eshelman', 'Rockwood', 'Pineiro', 'Reames', 'Cray', 'Wulff', 'Heider', 'Bath', 'Colletti', 'Fiala', 'Greenstein', 'Moles', 'Bashaw', 'Adamczyk', 'Finkel', 'Kistner', 'Manzi', 'Ferretti', 'Demarest', 'Ahlers', 'Lack', 'Wedel', 'Kinzer', 'Sechrist', 'Stickler', 'Easterday', 'Mallette', 'Loehr', 'Gessner', 'Croce', 'Stanko', 'Innes', 'Farfan', 'Heady', 'Chambless', 'Balbuena', 'Decicco', 'Winsor', 'Pereyra', 'Zoller', 'Ingles', 'Churchwell', 'Westlake', 'Villagran', 'Soderberg', 'Thill', 'Timmer', 'Mccaleb', 'Mckernan', 'Vandergriff', 'Yoho', 'Crispin', 'Dorton', 'Fults', 'Borne', 'Maxie', 'Bloomquist', 'Kung', 'Budde', 'Weinstock', 'Honey', 'Diener', 'Horak', 'Tsui', 'Zirkle', 'Plum', 'Heitz', 'Manrique', 'Balcom', 'Napper', 'Boese', 'Stefan', 'Kime', 'Gautreaux', 'Leverette', 'Lemaire', 'Danford', 'Hollman', 'Kuzma', 'Swinehart', 'Merriam', 'Novick', 'Stankiewicz', 'Parkes', 'Englehart', 'Polansky', 'Leclaire', 'Magner', 'Masson', 'Mass', 'Coogan', 'Jepsen', 'Pittenger', 'Bump', 'Hain', 'Burchell', 'Chesley', 'Cawthon', 'Dance', 'Piccolo', 'Lucey', 'Ordway', 'Recio', 'Ginther', 'Hauge', 'Lesperance', 'Suhr', 'Ding', 'Ogg', 'Skiba', 'Scannell', 'Gillies', 'Brame', 'Schipper', 'Brune', 'Stuber', 'Pesce', 'Stead', 'Bushong', 'Juneau', 'Mccalla', 'Feder', 'Plaisance', 'Tweed', 'Hashimoto', 'Mounce', 'Diana', 'Savala', 'Vanek', 'Lamson', 'Dubin', 'Killebrew', 'Kan', 'Nault', 'Mulford', 'Salamanca', 'Linker', 'Penrose', 'Kowalewski', 'Platz', 'Kogan', 'Martucci', 'Gutowski', 'Mattes', 'Haigh', 'Merida', 'Ashman', 'Batton', 'Biondo', 'Sweigart', 'Sorg', 'Barrier', 'Gatling', 'Geib', 'Henrich', 'Dabrowski', 'Vara', 'Weikel', 'Jarosz', 'Mummert', 'Uriarte', 'Fifield', 'Locker', 'Merlo', 'Lasater', 'Ripple', 'Hopwood', 'Sherrell', 'Ruark', 'Litz', 'Kinkade', 'Simkins', 'Grandy', 'Lemasters', 'Wehr', 'Jinks', 'Alas', 'Bale', 'Stimpson', 'Glickman', 'Hage', 'Seabrook', 'Stirling', 'Rozell', 'Woodburn', 'Braaten', 'Sugg', 'Linde', 'Castille', 'Grewal', 'Blackwelder', 'Hover', 'Spurling', 'Mckellar', 'Muench', 'Bovee', 'Amado', 'Yau', 'Harger', 'Lederer', 'Seda', 'Doney', 'Kimes', 'Western', 'Foret', 'Luera', 'Warnke', 'Bussard', 'Cartier', 'Andreasen', 'Lagasse', 'Topper', 'Nyman', 'Hallberg', 'Whisman', 'Cremeans', 'Dewar', 'Garrow', 'Odaniel', 'Stabler', 'Bourg', 'Appling', 'Dahlstrom', 'Fujimoto', 'Prudhomme', 'Gum', 'Nau', 'Hiers', 'Rockett', 'Sobczak', 'Traub', 'Bevis', 'Tilghman', 'Plasencia', 'Sison', 'Blau', 'Abbate', 'Sisler', 'Rudder', 'Trotman', 'Brust', 'Lederman', 'Frahm', 'Fredette', 'Parise', 'Urso', 'Amann', 'Kaul', 'Woolery', 'Thielen', 'Symonds', 'Marcy', 'Wiltshire', 'Sustaita', 'Botkin', 'Kernan', 'Doolin', 'Babineaux', 'Greenspan', 'Delacerda', 'Kinnard', 'Twitty', 'Augustus', 'Corriveau', 'Stults', 'Toman', 'Sklar', 'Leber', 'Considine', 'Ohearn', 'Deforest', 'Mcmann', 'Farquhar', 'Ferrel', 'Bickley', 'Manno', 'Vreeland', 'Berthiaume', 'Mcentee', 'Summerfield', 'Woodrow', 'Reynaga', 'Soltero', 'Tomko', 'Jarboe', 'Allmon', 'Duplessis', 'Sydnor', 'Diallo', 'Cogar', 'Mandeville', 'Shimizu', 'Aubuchon', 'Gabbert', 'Ashlock', 'Macri', 'Weng', 'Walser', 'Teng', 'Bailon', 'Steeves', 'Perillo', 'Quattlebaum', 'Knipp', 'Delavega', 'Kirtley', 'Bramble', 'Sublett', 'Borchert', 'Doria', 'Session', 'Merced', 'Lundstrom', 'Bluhm', 'Cortinas', 'Proper', 'Sieber', 'Mccay', 'Wilford', 'Asberry', 'Muldrow', 'Berning', 'Hemenway', 'Millman', 'Ewers', 'Timko', 'Reding', 'Sayer', 'Pickel', 'Cogburn', 'Chappel', 'Custodio', 'Reichel', 'Robeson', 'Waid', 'Wagler', 'Sappington', 'Bart', 'Zazueta', 'Najar', 'Marko', 'Nally', 'States', 'Bellard', 'Marciano', 'Killough', 'Cosper', 'Sangster', 'Heinze', 'Bortz', 'Matamoros', 'Nuckols', 'Townsley', 'Bak', 'Ralls', 'Ferrin', 'Villela', 'Siegrist', 'Arora', 'Collinsworth', 'Masten', 'Deer', 'Balog', 'Buchman', 'Scaggs', 'Holeman', 'Lefkowitz', 'Santora', 'Funke', 'Redfield', 'Douthit', 'Marciniak', 'Twitchell', 'Sheahan', 'Dai', 'Demuth', 'Ganz', 'Bruckner', 'Wier', 'Alamo', 'Aultman', 'Chubb', 'Branco', 'Courter', 'Vivian', 'Guin', 'Witten', 'Glen', 'Hyer', 'Crowson', 'Arendt', 'Cipolla', 'Prochaska', 'Schober', 'Harte', 'Arciniega', 'Beier', 'Middlebrook', 'Dennard', 'Vantassel', 'Weekes', 'Penley', 'Lozier', 'Lamberson', 'Broomfield', 'Nygaard', 'Pascale', 'Hyden', 'Mundell', 'Kamara', 'Ehlert', 'Mangus', 'Bornstein', 'Benedetti', 'Erikson', 'Quint', 'Westman', 'Basler', 'Smoak', 'Leavell', 'Kerber', 'Kopec', 'Emrick', 'Mattice', 'Render', 'Mccree', 'Feldmann', 'Cutright', 'Randell', 'Drucker', 'Gilmour', 'Marconi', 'Stripling', 'Mucha', 'Shipe', 'Chalk', 'Martone', 'Lema', 'Ricardo', 'Cobian', 'Laufer', 'Mistretta', 'Shortt', 'Menzel', 'Wickline', 'Oddo', 'Chai', 'Rabideau', 'Stogner', 'Mckie', 'Luongo', 'Trieu', 'Breshears', 'Sturdevant', 'Abernethy', 'Rohan', 'Bonnette', 'Steffes', 'Straka', 'Lawhon', 'Shawver', 'Guilford', 'Wiltz', 'Digregorio', 'Warburton', 'Fleshman', 'Kerstetter', 'Byram', 'Obannon', 'Dalessio', 'Gatti', 'Kalb', 'Boris', 'Graver', 'Parkins', 'Kollar', 'Crothers', 'Patin', 'Cutshall', 'Fern', 'Derosier', 'Goodrum', 'Kaelin', 'Baynes', 'Beesley', 'Macintyre', 'Butters', 'Kinsman', 'Huffer', 'Eslinger', 'Prunty', 'Boehmer', 'Nusbaum', 'Gouveia', 'Mire', 'Mccary', 'Mikell', 'Petrovich', 'Melillo', 'Kennelly', 'Howley', 'Merwin', 'Cotner', 'Kanter', 'Sahagun', 'Bodden', 'Mcconville', 'Leddy', 'Auten', 'Downie', 'Armistead', 'Goudy', 'Gerhard', 'Theiss', 'Lauria', 'Tuthill', 'Ammon', 'Ikeda', 'Schultheis', 'Zhong', 'Pearcy', 'Vass', 'Essary', 'Wendland', 'Zehr', 'Hartigan', 'Ugalde', 'Mossman', 'Hartwick', 'Joaquin', 'Andreas', 'Bartee', 'Gajewski', 'Gallaway', 'Comerford', 'Lieber', 'Wireman', 'Damm', 'Yousif', 'Kosinski', 'Kelm', 'Durrant', 'Derouen', 'Bonk', 'Rubalcaba', 'Opperman', 'Decamp', 'Fairfield', 'Pauls', 'Dicicco', 'Northup', 'Woerner', 'Stegman', 'Ritch', 'Bedoya', 'Jeanpierre', 'Rioux', 'Strohl', 'Herrell', 'Simonton', 'Carriere', 'Pridemore', 'Karam', 'Marple', 'Topp', 'Heiden', 'Leibowitz', 'Morabito', 'Junker', 'Calixto', 'Hardt', 'Silverio', 'Swords', 'Rickey', 'Roussel', 'Earles', 'Bastien', 'Defilippo', 'Bigley', 'Mosteller', 'Issa', 'Prout', 'Grossi', 'Bartos', 'Lipman', 'Colegrove', 'Stpeter', 'Vanfleet', 'Fordyce', 'Risher', 'Royston', 'Shoulders', 'Mendel', 'Statler', 'Dantonio', 'Inglis', 'Fogleman', 'Loveday', 'Straus', 'Luft', 'Dam', 'Chewning', 'Winkel', 'Bousquet', 'Eckhart', 'Dillinger', 'Locascio', 'Shellenberger', 'Duerr', 'Alcocer', 'Licht', 'Gingras', 'Grassi', 'Gately', 'Padula', 'Brien', 'Nimmo', 'Nell', 'Bondurant', 'Hughley', 'Schalk', 'Cabrales', 'Heinemann', 'Meunier', 'Maddock', 'Noone', 'Brackin', 'Dunnigan', 'Sargeant', 'Kinchen', 'Veras', 'Gile', 'Bacchus', 'Ang', 'Cowgill', 'Currey', 'Garlick', 'Manus', 'Ballance', 'Robitaille', 'Begin', 'Mijares', 'Keogh', 'Wicklund', 'Mccurley', 'Truett', 'Pullin', 'Alkire', 'Loughran', 'Mort', 'Tatman', 'Wanamaker', 'Haralson', 'Harrah', 'Stucker', 'Reda', 'Pascal', 'Holter', 'Solares', 'Bruck', 'Mah', 'Didomenico', 'Korth', 'Virgil', 'Nishimura', 'Vacca', 'Stenberg', 'Tomczak', 'Sayler', 'Chasse', 'Blazer', 'Sleeper', 'Doiron', 'Nunnery', 'Ortman', 'Maag', 'Cali', 'Ferrera', 'Hotaling', 'Festa', 'Murr', 'Sterrett', 'Cuthbert', 'Clayborn', 'Pendergraft', 'Yoakum', 'Baily', 'Overbey', 'Warne', 'Hokanson', 'Tafolla', 'Puglisi', 'Wooster', 'Nassar', 'Lesniak', 'Noack', 'Beres', 'Liberatore', 'Guyette', 'Duffin', 'Ishmael', 'Dolezal', 'Larimer', 'Musso', 'Borman', 'Deemer', 'Hobgood', 'Triggs', 'Mau', 'Wainscott', 'Seth', 'Hodnett', 'Mckeehan', 'Toon', 'Evens', 'Drost', 'Roehl', 'Trapani', 'Bains', 'Modica', 'Arcos', 'Knopf', 'Salvo', 'Garlock', 'Lounsbury', 'Hennen', 'Drescher', 'Morgenstern', 'Studebaker', 'Nordin', 'Madore', 'Joslyn', 'Brousseau', 'Addy', 'Audette', 'Santibanez', 'Sauers', 'Engelman', 'Mauney', 'Arechiga', 'Eckel', 'Jerry', 'Pernell', 'Sedlacek', 'Mcnary', 'Loewen', 'Eyler', 'Feather', 'Mckinnie', 'Bowersox', 'Laclair', 'Melby', 'Thoman', 'Hose', 'Carmon', 'Bartram', 'Berggren', 'Rogge', 'Seto', 'Court', 'Deskins', 'Barcus', 'Putt', 'Minick', 'Durgin', 'Hockman', 'Keltner', 'Legaspi', 'Wallach', 'Ranney', 'Borger', 'Wakeman', 'Schoolcraft', 'Souther', 'Villani', 'Sauder', 'Chupp', 'Slover', 'Faul', 'Degroat', 'Hakim', 'Brucker', 'Moylan', 'Castilleja', 'Whetzel', 'Eanes', 'Brouwer', 'Okelley', 'Crimmins', 'Bargas', 'Jo', 'Clover', 'Adan', 'Domingues', 'Yelton', 'Lobdell', 'Mattis', 'Escudero', 'Pentecost', 'Riser', 'Lorentz', 'Neace', 'Caplinger', 'Lipe', 'Satterlee', 'Labarbera', 'Cullison', 'Goggin', 'Coke', 'Keo', 'Buckmaster', 'Holtzclaw', 'Lustig', 'Ellinger', 'Lollar', 'Cork', 'Mccrae', 'Hilario', 'Yawn', 'Arnette', 'Yuhas', 'Wardle', 'Pixley', 'Leflore', 'Fluker', 'Krier', 'Wind', 'Ditto', 'Rorie', 'Ensminger', 'Hunsucker', 'Levenson', 'Millington', 'Gorsuch', 'Willems', 'Fredricks', 'Agarwal', 'Lariviere', 'Don', 'Chery', 'Pfeil', 'Wurtz', 'Remillard', 'Cozad', 'Hodgkins', 'Cohan', 'Nurse', 'Espana', 'Giguere', 'Hoskin', 'Pettaway', 'Keifer', 'Yandell', 'Frandsen', 'Nawrocki', 'Vila', 'Pouliot', 'Boulanger', 'Pruden', 'Strauch', 'Lua', 'Rohn', 'Greig', 'Lightsey', 'Etheredge', 'Hara', 'Ensign', 'Ruckman', 'Senecal', 'Sedgwick', 'Maciejewski', 'Morningstar', 'Creswell', 'Britten', 'Godley', 'Laubach', 'Schwenk', 'Hayhurst', 'Cammarata', 'Paxson', 'Mcmurtry', 'Marasco', 'Weatherby', 'Fales', 'Fondren', 'Deherrera', 'Gaydos', 'Defranco', 'Bjorklund', 'Silberman', 'Maxon', 'Rockey', 'Brass', 'Marcoux', 'Marquette', 'Marcello', 'Veit', 'Debose', 'Cloninger', 'Puccio', 'Greenman', 'Bross', 'Lile', 'Behan', 'Plumlee', 'Hampson', 'Steverson', 'Wininger', 'Mcmullan', 'Jude', 'Sharif', 'Rothermel', 'Becher', 'Keithley', 'Gargano', 'Morillo', 'Dumond', 'Johannsen', 'Baney', 'Lipton', 'Railey', 'Clowers', 'Rotondo', 'Simeone', 'Hatt', 'Schexnayder', 'Snoddy', 'Gelinas', 'Mendelson', 'Matherly', 'Klock', 'Clubb', 'Dunkley', 'Rosenzweig', 'Chuang', 'Gines', 'Galasso', 'Helland', 'Rohrbaugh', 'Avilez', 'Czajkowski', 'Olsson', 'Lumsden', 'Birt', 'Ortego', 'Acuff', 'Yetter', 'Tichenor', 'Mork', 'Skillman', 'Row', 'Lollis', 'Wolk', 'Demott', 'Lazenby', 'Bellew', 'Brickner', 'Ragusa', 'Stice', 'Herlihy', 'Guillermo', 'Estabrook', 'Montijo', 'Jenner', 'Rayfield', 'Donlon', 'Greenhalgh', 'Alberti', 'Rix', 'Holthaus', 'Mistry', 'Ruzicka', 'Sievert', 'Koopman', 'Kalish', 'Kehl', 'Ponte', 'Varnell', 'Guss', 'Kovac', 'Hosmer', 'Scrivner', 'Tomblin', 'Villafuerte', 'Branscum', 'Nitz', 'Reider', 'Gaunt', 'Richerson', 'Hemmer', 'Vinyard', 'Barrie', 'Manalo', 'Flynt', 'Cadle', 'Hau', 'Uy', 'Manfredi', 'Deeter', 'Resto', 'Carnell', 'Drane', 'Cusumano', 'Fein', 'Schneck', 'Stucky', 'Heid', 'Bruggeman', 'Schweiger', 'Vanetten', 'Munsey', 'Kiker', 'Whittier', 'Seeman', 'Zerbe', 'Hillyer', 'Burkhead', 'Gafford', 'Gephart', 'Braman', 'Plott', 'Henriques', 'Coppock', 'Mcandrews', 'Valtierra', 'Dileo', 'Stiner', 'Mikel', 'Owensby', 'Gupton', 'Scurlock', 'Gittens', 'Degnan', 'Guillaume', 'Helmuth', 'Nolin', 'Mair', 'Bergeson', 'Paik', 'Kinne', 'Goodloe', 'Nakagawa', 'Raposo', 'Defreitas', 'Korb', 'Hinkel', 'Magers', 'Althoff', 'Rafael', 'Akhtar', 'Cashion', 'Mcquillan', 'Patricio', 'Sweeny', 'Meaux', 'Tyre', 'Demeo', 'Trivedi', 'Goodfellow', 'Dunleavy', 'Middaugh', 'Barbato', 'Pasco', 'Harland', 'Shorts', 'Mowrey', 'Dempster', 'Knuckles', 'Luebke', 'Petrella', 'Retana', 'Licea', 'Rundle', 'Cape', 'Lou', 'Mcconkey', 'Leeman', 'Cabe', 'Timothy', 'Crochet', 'Fulgham', 'Glasco', 'Backes', 'Konopka', 'Mcquaid', 'Schley', 'Abrahams', 'Dahlin', 'Iversen', 'Chico', 'Huffaker', 'Modlin', 'Laduke', 'Marquart', 'Motz', 'Keech', 'Louviere', 'Como', 'Fye', 'Brightwell', 'Yamashita', 'Desrochers', 'Richer', 'Bourke', 'Broadhead', 'Pink', 'Okamoto', 'Chicas', 'Vanatta', 'Shick', 'Furst', 'Layfield', 'Mcewan', 'Baumgart', 'Kappel', 'Kucharski', 'Quam', 'Taub', 'Houghtaling', 'Sundquist', 'Monks', 'Wake', 'Quiros', 'Pursell', 'Johansson', 'Talkington', 'Bast', 'Stimson', 'Hakes', 'Loe', 'Caggiano', 'Schaper', 'Chandra', 'Tuma', 'Arledge', 'Romain', 'Hornick', 'Bridgman', 'Livingstone', 'Potvin', 'Sparling', 'Hause', 'Trosclair', 'Pless', 'Szeto', 'Clontz', 'Lauber', 'Detrick', 'Dominique', 'Mosser', 'Degraff', 'Liner', 'Fleet', 'Czerwinski', 'Kopf', 'Kovar', 'Sheedy', 'Zaremba', 'Mina', 'Sweeten', 'Ou', 'Musto', 'Hennig', 'Bangs', 'Pasternak', 'Berrier', 'Smidt', 'Brayton', 'Claytor', 'Ellerbe', 'Reiman', 'Larimore', 'Ratzlaff', 'Mudge', 'Ni', 'Spillers', 'Cuomo', 'Gerke', 'Polizzi', 'Harmer', 'Apperson', 'Regis', 'Ugarte', 'Paull', 'Lagrange', 'Dinwiddie', 'Becton', 'Gadsden', 'Conforti', 'Desoto', 'Orme', 'Filer', 'Viers', 'Lares', 'Stair', 'Hipps', 'Kaneshiro', 'Ladson', 'Altizer', 'Montejano', 'Scalzo', 'Sowder', 'Ebeling', 'Faucher', 'Dicken', 'Sartor', 'Mcnerney', 'Stage', 'Mika', 'Hice', 'Grinstead', 'Bartsch', 'Mccumber', 'Lenahan', 'Liska', 'Tietz', 'Gauna', 'Janda', 'Bellis', 'Shew', 'Kelton', 'Doby', 'Golson', 'Plaster', 'Gonsales', 'Krone', 'Lape', 'Lowrie', 'Polly', 'Gerardi', 'Lamoreaux', 'Bhatti', 'Kimsey', 'Buhl', 'Arvin', 'Gillian', 'Benbow', 'Roesler', 'Stlaurent', 'Canon', 'Swihart', 'Corea', 'Petitt', 'Spates', 'Nappi', 'Sebring', 'Smelser', 'Eckenrode', 'Palos', 'Disanto', 'Tabares', 'Okane', 'Easterly', 'Dendy', 'Whigham', 'Bednarz', 'Wedge', 'Edelen', 'Stiff', 'Borjas', 'Obando', 'Mcspadden', 'Breed', 'Dismuke', 'Jarmon', 'Serpa', 'Lucky', 'Cournoyer', 'Hedberg', 'Martine', 'Michell', 'Wittig', 'Clodfelter', 'Davids', 'Gattis', 'Kull', 'Mascorro', 'Schad', 'Rine', 'Bradburn', 'Marie', 'Czech', 'Sunderman', 'Wickersham', 'Toohey', 'Capozzi', 'Poplin', 'Markland', 'Brosnan', 'Fetterman', 'Heiss', 'Haglund', 'Jourdan', 'Turnipseed', 'Tiernan', 'Horrocks', 'Barnhardt', 'Sing', 'Belford', 'Baumgarten', 'Klee', 'Degeorge', 'Caulder', 'Gladstone', 'Dancer', 'Satchell', 'Vento', 'Larock', 'Kimberly', 'Hunn', 'Harvin', 'Krahn', 'Ogorman', 'Storch', 'Coomes', 'Bevilacqua', 'Crotts', 'Schillinger', 'Morelock', 'Hayworth', 'Avis', 'Cranmer', 'Getchell', 'Tena', 'Buzzell', 'Widman', 'Barter', 'Lafayette', 'Asencio', 'Embree', 'Krell', 'Siders', 'Fuselier', 'Whitby', 'Elsner', 'Pando', 'Surface', 'Rolf', 'Highland', 'Bufford', 'Scheidt', 'Defrancesco', 'Fellers', 'Carrol', 'Germano', 'Licon', 'Hilty', 'Ringo', 'Dowler', 'Glowacki', 'Slabaugh', 'Tomasello', 'Messing', 'Lavalle', 'Milo', 'Frerichs', 'Plotkin', 'Ziolkowski', 'Gentle', 'Knobloch', 'Larochelle', 'Duell', 'Hurdle', 'Speller', 'Ceasar', 'Vinci', 'Mosquera', 'Wyse', 'Towler', 'Ayoub', 'Gullickson', 'Spade', 'Forshee', 'Cliff', 'Gholson', 'Reichenbach', 'Lockman', 'Morones', 'Storie', 'Bissett', 'Janney', 'Durocher', 'Fentress', 'Troiano', 'Boes', 'Rouleau', 'Rall', 'Sultan', 'Braggs', 'Bethke', 'Schacht', 'Straley', 'Mcfalls', 'Fahy', 'Winegar', 'Gorecki', 'Rudnick', 'Wigginton', 'Dedrick', 'Sthilaire', 'Lovette', 'Hanneman', 'Loch', 'Moores', 'Polen', 'Anchondo', 'Rosato', 'Tindell', 'Hunsicker', 'Penna', 'Privette', 'Gayton', 'Sliger', 'Wink', 'Brummer', 'Crown', 'Sommerville', 'Mastrangelo', 'Latimore', 'Merlino', 'Thoreson', 'Kleiner', 'Able', 'Boose', 'Loyola', 'Jimenes', 'Lapham', 'Srinivasan', 'Hammers', 'Mo', 'Evert', 'Vanslyke', 'Caywood', 'Gremillion', 'Rauscher', 'Eckhoff', 'Dearth', 'Sinha', 'Becerril', 'Tuten', 'Greenwalt', 'Curlee', 'Burgan', 'Feagin', 'Gallman', 'Germann', 'Swensen', 'Vanallen', 'Bissonnette', 'Stoudt', 'Handler', 'Tanguay', 'Lovins', 'Smotherman', 'Cutts', 'Herod', 'Maclin', 'Arcuri', 'Hackbarth', 'Breazeale', 'Rainville', 'Crick', 'Macintosh', 'Bloss', 'Fridley', 'Stefanski', 'Beauvais', 'Koop', 'Andes', 'Blomberg', 'Vallee', 'Lanigan', 'Blouin', 'Rochon', 'Dorazio', 'Drouin', 'Lamonica', 'Wilbourn', 'Spraggins', 'Rieder', 'Shugart', 'Chacko', 'Rutan', 'Nutting', 'Lawley', 'Landy', 'January', 'Blowers', 'Handel', 'Doman', 'Swiney', 'Ettinger', 'Jellison', 'Veilleux', 'Wiens', 'Raimondi', 'Spink', 'Emond', 'Yale', 'Rachel', 'Alldredge', 'Lach', 'Morlan', 'Wayland', 'Colquitt', 'Gabrielson', 'Mccarver', 'Frances', 'Granville', 'Costigan', 'Preuss', 'Lentini', 'Vansant', 'Mosca', 'Connally', 'Frei', 'Laplant', 'Lago', 'Leiter', 'Trumbull', 'Shaeffer', 'Gongora', 'Coady', 'Fyffe', 'Mance', 'Worcester', 'Zehner', 'Bodie', 'Burnes', 'Pompey', 'Teitelbaum', 'Beaupre', 'Visconti', 'Mumma', 'Markiewicz', 'Piscitelli', 'Moak', 'Bourland', 'Pennock', 'Hannum', 'Robichaux', 'Folks', 'Coppage', 'Heffron', 'Mullet', 'Kimberlin', 'Breneman', 'Blandford', 'Matthias', 'Engebretson', 'Roessler', 'Allee', 'Parkman', 'Barge', 'Ren', 'Backstrom', 'Bullen', 'Lampman', 'Loesch', 'Echavarria', 'Haman', 'Cortina', 'Elms', 'Gordan', 'Pabst', 'Snelson', 'Vanarsdale', 'Pecora', 'Rabago', 'Enger', 'Senger', 'Dewees', 'Semple', 'Howey', 'Westlund', 'Daw', 'Hagemann', 'Mcpeek', 'Vanderhoof', 'Ohler', 'Bohm', 'Mazzone', 'Arnott', 'Bouton', 'Fackler', 'Giunta', 'Stagner', 'Tavera', 'Poorman', 'Buch', 'Mangano', 'Bonar', 'Gerson', 'Ranger', 'Mccullar', 'Wunder', 'Bade', 'Armand', 'Chalfant', 'Lichtenstein', 'Turco', 'Degraw', 'Few', 'Haigler', 'Lis', 'Bittinger', 'Morrone', 'Hodgdon', 'Wittenberg', 'Imes', 'Dreiling', 'Landwehr', 'Maly', 'Warlick', 'Terpstra', 'Bolte', 'Stiller', 'Stmartin', 'Pankratz', 'Albee', 'Victory', 'Lezama', 'Brecht', 'Monarrez', 'Thurlow', 'Laskey', 'Bothwell', 'Candler', 'Esh', 'Kalman', 'Samano', 'Yohe', 'Regnier', 'Leite', 'Ballantyne', 'Dan', 'Fikes', 'Cendejas', 'Mikula', 'Fairman', 'Dragon', 'Manzella', 'Renninger', 'Leaman', 'Godbey', 'Current', 'Mirabal', 'Boerner', 'Depaz', 'Birge', 'Westberry', 'Severin', 'Weddington', 'Longenecker', 'Mccreery', 'Lebel', 'Nader', 'Gan', 'Auguste', 'Colonna', 'Paramo', 'Minyard', 'Duley', 'Beil', 'Salters', 'Brindley', 'Simmers', 'Lumpkins', 'Crisman', 'Raulerson', 'Lanz', 'Deroche', 'Kemmerer', 'Bogner', 'Mahn', 'Willer', 'Gunnels', 'Warford', 'Reason', 'Scherr', 'Digirolamo', 'Hallowell', 'Wilcoxson', 'Gaillard', 'Deshields', 'Hively', 'Sakai', 'Creason', 'Jaber', 'Lapinski', 'Bolivar', 'Millwood', 'Shumpert', 'Fujii', 'Plemmons', 'Lamere', 'Cleghorn', 'Mccaw', 'Seavey', 'Zwick', 'Hosler', 'Lepley', 'Marden', 'Cornwall', 'Gauger', 'Hofmeister', 'Bugarin', 'Loose', 'Guardiola', 'Hertzog', 'Bigger', 'Heineman', 'Retzlaff', 'Rizzuto', 'Flannigan', 'Rathburn', 'Moulder', 'Town', 'Gautier', 'Hamid', 'Torrance', 'Walthall', 'Windom', 'Kleckner', 'Kirwan', 'Gasaway', 'Pinkard', 'Concannon', 'Mcquiston', 'Yow', 'Eshleman', 'Riggleman', 'Foulk', 'Bolles', 'Craine', 'Hinnant', 'Gholston', 'Lebo', 'Torkelson', 'Mancia', 'Canale', 'Celestin', 'Neubert', 'Schmaltz', 'Highfill', 'Fisch', 'Matte', 'Hoefer', 'Flippin', 'Mclin', 'Mikkelson', 'Gump', 'Kilroy', 'Ensor', 'Klosterman', 'Ruppel', 'Steffey', 'Sauve', 'Cessna', 'Apgar', 'Jacobus', 'Pettyjohn', 'Northington', 'Smithey', 'Moro', 'Dossett', 'Mccroskey', 'Yelverton', 'Mascarenas', 'Hebb', 'Quinteros', 'Giang', 'Pontius', 'Sipple', 'Atkin', 'Howington', 'Hiebert', 'Lingerfelt', 'Schueler', 'Sailer', 'Smits', 'Keeter', 'Macrae', 'Mease', 'Shortridge', 'Scates', 'Amstutz', 'Kuebler', 'Cambron', 'Eaker', 'Finlayson', 'Bookout', 'Mullett', 'Bank', 'Schlenker', 'Morlock', 'Haskett', 'Dade', 'Gallucci', 'Lahey', 'Ryerson', 'Crownover', 'Banfield', 'Mcclay', 'Diggins', 'Conerly', 'Primus', 'Syverson', 'Prindle', 'Blasingame', 'Deford', 'Garnes', 'Hoisington', 'Glasper', 'Lorusso', 'Hesson', 'Youssef', 'Threlkeld', 'Talmadge', 'Winfree', 'Heacock', 'Rawlinson', 'Burse', 'Diederich', 'Niemiec', 'Norby', 'Bauder', 'Scranton', 'Prentiss', 'Towles', 'Henton', 'Purifoy', 'Pinzon', 'Edler', 'Ragin', 'Albarado', 'Cuadra', 'Hoadley', 'Devita', 'Pavon', 'Alday', 'Goulding', 'Millis', 'Dalley', 'Kolodziej', 'Kropf', 'Kuiper', 'Crespin', 'Xavier', 'Sailor', 'Lagrone', 'Boehme', 'Tidd', 'Wilmore', 'Ziemer', 'Ropp', 'Kettler', 'Pilon', 'Miron', 'Salsbury', 'Job', 'Sensenig', 'Cayton', 'Nanney', 'Rasch', 'Silvestre', 'Ladue', 'Dampier', 'Ackermann', 'Friedel', 'Kleiman', 'Geronimo', 'Ezzell', 'Duclos', 'Moor', 'Neuhaus', 'Lan', 'Allender', 'Tedeschi', 'Langton', 'Dawley', 'Kearse', 'Godina', 'Guernsey', 'Kober', 'Bisbee', 'Lamphere', 'Kinman', 'Wesner', 'Malo', 'Stroupe', 'Millette', 'Yeoman', 'Baig', 'Kirchoff', 'Tsao', 'Cristobal', 'Mucci', 'Pair', 'Barefield', 'Dewolf', 'Fitzmaurice', 'Mcaleer', 'Natal', 'Bara', 'Macey', 'Mclennan', 'Fabre', 'Vieyra', 'Magno', 'Eyre', 'Chatterton', 'Gilland', 'Hurlbut', 'Umberger', 'Roloff', 'Brambila', 'Mazzeo', 'Letson', 'Norsworthy', 'Bier', 'Gioia', 'Kapoor', 'Marlatt', 'Flippo', 'Houde', 'Baughn', 'Blackledge', 'Fly', 'Dinkel', 'Rathbone', 'Bober', 'Boydston', 'Ferdinand', 'Coletti', 'Cuenca', 'Deters', 'Blagg', 'Timmins', 'Boyden', 'Meads', 'Narcisse', 'Saelee', 'Cosner', 'Strawser', 'Amico', 'Dowdle', 'Golub', 'Silverberg', 'Riles', 'Balk', 'Buhr', 'Feltman', 'Stickel', 'Zapien', 'Cargile', 'Kulik', 'Lazzaro', 'Oberle', 'Wickstrom', 'Maeda', 'Cockrum', 'Boulton', 'Sandford', 'Culbert', 'Dula', 'Ament', 'Chunn', 'Owenby', 'Wasilewski', 'Wichman', 'Oestreich', 'Klos', 'Orchard', 'Hogge', 'Presson', 'Cordon', 'Gans', 'Leonardi', 'Manjarrez', 'Olander', 'Drennen', 'Wirt', 'Tiger', 'Dolce', 'Hagstrom', 'Hirsh', 'Tally', 'Crumbley', 'Mcgreevy', 'Amidon', 'Olague', 'Lint', 'Poche', 'Lipford', 'Engen', 'Mcelfresh', 'Cuneo', 'Krumm', 'Haak', 'Arocho', 'Longworth', 'Seamon', 'Bronner', 'Swartzentruber', 'Chand', 'Wilhoit', 'Chapel', 'Hitchens', 'Brzezinski', 'Heidenreich', 'Ellenberger', 'Gamblin', 'Ormond', 'Burchard', 'Dibella', 'Nicoll', 'Simcox', 'Strohm', 'Dittmar', 'Wycoff', 'Grays', 'Spero', 'Vess', 'Picone', 'Greening', 'Maynes', 'Knauss', 'Wojtowicz', 'Chaput', 'Soliman', 'Ponton', 'Carlino', 'Kestner', 'Kelch', 'Dimauro', 'Iorio', 'Parenteau', 'Pesina', 'Clauson', 'Stigall', 'Keels', 'Waldrep', 'Wix', 'Draeger', 'Ertel', 'Starner', 'Charest', 'Simoneaux', 'Ivanov', 'Thor', 'Gravel', 'Trottier', 'Clendenin', 'Kromer', 'Benda', 'Touchet', 'Hornbuckle', 'Avent', 'Dombroski', 'Friedland', 'Radabaugh', 'Vesely', 'Wike', 'Lax', 'Messersmith', 'Deoliveira', 'Brey', 'Cogdill', 'Overturf', 'Sova', 'Pero', 'Beaird', 'Cevallos', 'Defalco', 'Taormina', 'Thornberry', 'Westervelt', 'Macaulay', 'Hajek', 'Brugger', 'Leff', 'Ketterer', 'Ono', 'Mullenix', 'Frison', 'Gullo', 'Calhoon', 'Summey', 'Hockaday', 'Dimatteo', 'Agan', 'Patenaude', 'Mary', 'Tanis', 'Obert', 'Elton', 'Randles', 'Migliore', 'Schmalz', 'Vanvalkenburg', 'Quinto', 'Palmquist', 'Hoops', 'Naples', 'Orear', 'Eberhard', 'Fitzgibbons', 'Adkinson', 'Gerace', 'Elie', 'Dressel', 'Silber', 'Otey', 'Hsiao', 'Kreutzer', 'Tutor', 'Roundy', 'Haddox', 'Bridgers', 'Leto', 'Daniell', 'Pollitt', 'Freda', 'Mraz', 'Engelbrecht', 'Ariza', 'Grand', 'Pavone', 'Everts', 'Benes', 'Reamer', 'Faucett', 'Eatmon', 'Raymundo', 'Zaman', 'Devitt', 'Master', 'Carron', 'Hoffner', 'Sciortino', 'Stringham', 'Bookman', 'Westberg', 'Spahn', 'Hise', 'Waterbury', 'Buckwalter', 'Hug', 'Overly', 'Dingus', 'Ince', 'Haar', 'Shain', 'Heaps', 'Oppenheimer', 'Miyamoto', 'Schreier', 'Martello', 'Atteberry', 'Folger', 'Macke', 'Pal', 'Lucchesi', 'Osterhout', 'Liriano', 'Legge', 'Barra', 'Crumb', 'Gwyn', 'Forst', 'Axelrod', 'Samayoa', 'Edgell', 'Purkey', 'Lannon', 'Branam', 'Yeo', 'Hatmaker', 'Borum', 'Villagrana', 'Lawing', 'Bark', 'Muirhead', 'Eckles', 'Weight', 'Surles', 'Cullinan', 'Lagos', 'Naber', 'Sloat', 'Foos', 'Vine', 'Milliner', 'Reliford', 'Dahlquist', 'Gibney', 'Moroney', 'Stecker', 'Bella', 'Brickhouse', 'Canela', 'Kula', 'Tartaglia', 'Siewert', 'Hitch', 'Brickman', 'Cheeseman', 'Carollo', 'Geissler', 'Jiron', 'Cossey', 'Sroka', 'Border', 'Brownlow', 'Ellenburg', 'Cella', 'Brinton', 'Scurry', 'Behrendt', 'Carstensen', 'Schendel', 'Bodner', 'Eddleman', 'Stec', 'Capasso', 'Leu', 'Kennett', 'Ruane', 'Critchfield', 'Carbonell', 'Mitcham', 'Troncoso', 'Mckeen', 'Cammack', 'Broach', 'Culbreath', 'Callejas', 'Wurst', 'Brookman', 'Guerrier', 'Seese', 'Kitzmiller', 'Graybeal', 'Yardley', 'Cheever', 'Virgin', 'Brimmer', 'Swoboda', 'Pandya', 'Canton', 'Magnus', 'Draughn', 'Dilts', 'Tauber', 'Vandegrift', 'Rene', 'Cousineau', 'Joo', 'Pimental', 'Carpentier', 'Eager', 'Cumberland', 'Eastridge', 'Moberly', 'Erhardt', 'Meldrum', 'Degennaro', 'Desanto', 'Manahan', 'Gowdy', 'Popham', 'Mee', 'Kinslow', 'Harned', 'Cartee', 'Raiford', 'Henrichs', 'Maffei', 'Seamans', 'Heckel', 'Toll', 'Milian', 'Mabrey', 'Dall', 'Lanford', 'Carew', 'Bascom', 'Christofferson', 'Hadfield', 'Ferber', 'Mestas', 'Leith', 'Abston', 'Cuddy', 'Svendsen', 'Cowling', 'Segars', 'Nalls', 'Hofstetter', 'Badgley', 'Mccaffery', 'Burner', 'Laymon', 'Pinion', 'Schooler', 'Brun', 'Aldaco', 'Savarese', 'Gravelle', 'Belvin', 'Brekke', 'Dekker', 'Ellefson', 'Lurie', 'Cassity', 'Epperly', 'Genova', 'Dehn', 'Fargo', 'Vanderford', 'Sine', 'Horrell', 'Napoleon', 'Kamm', 'Riel', 'Gerena', 'Check', 'Devane', 'Grissett', 'Brendel', 'Weyant', 'Basurto', 'Coppinger', 'Grosse', 'Saeed', 'Lunceford', 'Washam', 'Benard', 'Eastham', 'Holleran', 'Kiesel', 'Risch', 'Mccullen', 'Vizcaino', 'Fullen', 'Westbrooks', 'Babich', 'Mauch', 'Hensler', 'Bryner', 'Phillippi', 'Santistevan', 'Jalbert', 'Vanorden', 'Brantner', 'Mcgrail', 'Rustin', 'Lebaron', 'Genao', 'Quast', 'Hamburg', 'Mensah', 'Heckler', 'Popa', 'Mantooth', 'Hargreaves', 'Jury', 'Seiber', 'Calton', 'Lafreniere', 'Starbuck', 'Gow', 'Veazey', 'Kneeland', 'Woodberry', 'Vallone', 'Sutcliffe', 'Loh', 'Wiltse', 'Choudhury', 'Rollo', 'Bjerke', 'Huffstetler', 'Ogren', 'Legere', 'Wilmer', 'Conboy', 'Pressler', 'Hon', 'Monger', 'Devos', 'Houtz', 'Shurtleff', 'Sedlak', 'Carolan', 'Luc', 'Immel', 'Guizar', 'Kron', 'Lusby', 'Whitsett', 'Pryce', 'Mengel', 'Youngberg', 'Kluge', 'Thrush', 'Wilsey', 'Santee', 'Braham', 'Palmeri', 'Cousino', 'Willits', 'Gram', 'Dearmond', 'Fonville', 'Sabatini', 'Nehring', 'Henne', 'Prager', 'Mederos', 'Schuldt', 'Weisz', 'Mccart', 'Warriner', 'Bartelt', 'Dimond', 'Mccubbin', 'Say', 'Mickel', 'Bracamonte', 'Volkman', 'Brindle', 'Bitter', 'Dickie', 'Inge', 'Brinegar', 'Lerman', 'Bohan', 'Rondon', 'Dilbeck', 'Rumbaugh', 'Simard', 'Berke', 'Ealey', 'Knauer', 'Michalek', 'Smolinski', 'Wurster', 'Zullo', 'Nott', 'Claar', 'Mayor', 'Moir', 'Hubbert', 'Hankerson', 'Mok', 'Simko', 'Mumm', 'Sheely', 'Abramowitz', 'Pusateri', 'Boomer', 'Chappelle', 'Demery', 'Coniglio', 'Asay', 'Nova', 'Biel', 'Delancey', 'Tocco', 'Tant', 'Melin', 'Lacoste', 'Derrico', 'Stacks', 'Watley', 'Stoneking', 'Westrick', 'Pons', 'Malm', 'Parekh', 'Loop', 'Kitt', 'Crisostomo', 'Ecklund', 'Tollison', 'Dziedzic', 'Pillsbury', 'Baumer', 'Matsuda', 'Jeon', 'Foye', 'Peltz', 'Candela', 'Levey', 'Organ', 'Hathorn', 'Galeano', 'Nies', 'Cabezas', 'Barras', 'Pier', 'Truss', 'Leist', 'Lheureux', 'Nakano', 'Ladwig', 'Grunwald', 'Centers', 'Sherrard', 'Morais', 'Juhl', 'Ivers', 'Dunfee', 'Jolliff', 'Breeze', 'Tapper', 'Goodridge', 'Kelliher', 'Finck', 'Roose', 'Gauvin', 'Coil', 'Pounders', 'Lobb', 'Stalcup', 'Swanner', 'Boivin', 'Neer', 'Laxton', 'Pai', 'Postma', 'Janus', 'Didier', 'Engleman', 'League', 'Fray', 'Aguillon', 'Richins', 'Tolar', 'Criner', 'Rowlands', 'Verdi', 'Utt', 'Winders', 'Turbeville', 'Rada', 'Mcnichols', 'Boddy', 'Binford', 'Amey', 'Schultze', 'Sontag', 'Saleem', 'Przybylski', 'Vanderlinden', 'Vanfossen', 'Longacre', 'Heasley', 'Southwell', 'Decesare', 'Munch', 'Minix', 'Hymes', 'Klopp', 'Militello', 'Schuessler', 'Velazco', 'Jurek', 'Claycomb', 'Diemer', 'Roser', 'Huse', 'Perkinson', 'Musa', 'Leavy', 'Seidman', 'Vroman', 'Stalter', 'Grieve', 'Aron', 'Purdie', 'Dusek', 'Rago', 'Shepler', 'Leopard', 'Araya', 'Rutt', 'Voth', 'Hittle', 'Husain', 'Gratton', 'Seigler', 'Coppedge', 'Nicastro', 'Fitzgibbon', 'Sosebee', 'Tank', 'Troche', 'Delph', 'Ryland', 'Mazzella', 'Rai', 'Strecker', 'Epp', 'Clower', 'Porche', 'Gelman', 'Herrman', 'Balser', 'Tosh', 'Bonn', 'Cerrato', 'Varley', 'Dingess', 'Goodspeed', 'Boller', 'Heimann', 'Gottfried', 'Super', 'Falzone', 'Bizzell', 'Litwin', 'Ji', 'Rogowski', 'Tindle', 'Hoye', 'Balfour', 'Focht', 'Manz', 'Stender', 'Sutterfield', 'Bayes', 'Mullings', 'Dockter', 'Figueiredo', 'Kepner', 'Posadas', 'Nettleton', 'Ruder', 'Younce', 'Flanary', 'Scotti', 'Bayliss', 'Tandy', 'Henrickson', 'Volker', 'Letts', 'Joines', 'Fewell', 'Wherry', 'Stelzer', 'Stever', 'Viator', 'Catt', 'Jeffords', 'Guerriero', 'Milby', 'Jozwiak', 'Slawson', 'Portwood', 'Billie', 'Borunda', 'Chinchilla', 'Papadopoulos', 'Lohse', 'Mantz', 'Gabriele', 'Hosford', 'Kohut', 'Tardiff', 'Puma', 'Bodin', 'Hodgins', 'Boon', 'Golightly', 'Bogert', 'Abdi', 'Wigfall', 'Fleischmann', 'Nease', 'Rayborn', 'Zigler', 'Reimann', 'Malagon', 'Puls', 'Grogg', 'Drinkwater', 'Dacus', 'Mcfee', 'Domino', 'Harjo', 'Pascarella', 'Spengler', 'Copple', 'Rollings', 'Brew', 'Brabham', 'Nordquist', 'Emig', 'Riggio', 'Sanson', 'Gerardo', 'Pereda', 'Renken', 'Stickley', 'Milliron', 'Rolling', 'Hollie', 'Biondi', 'Fluharty', 'Magyar', 'Balsamo', 'Imler', 'Hanlin', 'Dycus', 'Kirkley', 'Wimberley', 'Finan', 'Kulkarni', 'Morreale', 'Briner', 'Pelzer', 'Bouie', 'Fenstermaker', 'Gimenez', 'Labella', 'Scherrer', 'Holzman', 'Winer', 'Wrigley', 'Leighty', 'Liptak', 'Chamness', 'Franko', 'Arwood', 'Tiner', 'Schoenberger', 'Gear', 'Hereford', 'Slezak', 'Longfellow', 'Cull', 'Brashears', 'Clear', 'Zielke', 'Arden', 'Bonneau', 'Muck', 'Tarvin', 'Beran', 'Coulombe', 'Toothman', 'Ghosh', 'Mcguirk', 'Pinero', 'Ruan', 'Gartman', 'Peed', 'Cassano', 'Forcier', 'Haque', 'Veatch', 'Fodor', 'Wetherington', 'Barrette', 'Bottorff', 'Holmstrom', 'Honda', 'Kopecky', 'Loaiza', 'Castelan', 'Haydon', 'Lamotte', 'Mutchler', 'Mahmoud', 'Gleaton', 'Rebollar', 'Moctezuma', 'Tannehill', 'Bernardino', 'Walrath', 'Adcox', 'Heidt', 'Rakowski', 'Soza', 'Limas', 'Wysong', 'Mannix', 'Pattillo', 'Corner', 'Kuang', 'Loflin', 'Ledger', 'Ivery', 'Likens', 'Mctaggart', 'Hartin', 'Prange', 'Stenzel', 'Shadle', 'Karn', 'Duplantis', 'Garibaldi', 'Batty', 'Goulart', 'Ranck', 'Beekman', 'Nicolosi', 'Arizmendi', 'Donoho', 'Drewry', 'Lenihan', 'Spatz', 'Wible', 'Dimmick', 'Stelter', 'Seyler', 'Stringfield', 'Bonaparte', 'Dematteo', 'Petrey', 'Bellino', 'Cavaliere', 'Thaler', 'Heiner', 'Lillis', 'Hammes', 'Rainbolt', 'Hillyard', 'Farnum', 'Overmyer', 'Replogle', 'Sclafani', 'Audet', 'Santa', 'Hollen', 'Lineberger', 'Bonnet', 'Caples', 'Dahlen', 'Ruggieri', 'Keppler', 'Ryman', 'Copas', 'Lyda', 'Pusey', 'Bostrom', 'Patnode', 'Richeson', 'Hamil', 'Wyss', 'Mcadam', 'Dennett', 'Lever', 'Drinkard', 'Ohl', 'Restivo', 'Vyas', 'Moyle', 'Blauvelt', 'Gregson', 'Scull', 'Verret', 'Stines', 'Forsman', 'Gehman', 'Watrous', 'Gunnell', 'Choice', 'Castaldo', 'Pietrzak', 'Goodsell', 'Klima', 'Stratman', 'Foutz', 'Massingill', 'Huneycutt', 'Zellmer', 'Tefft', 'Hamblen', 'Baggs', 'Mcgarity', 'Alfieri', 'Stetler', 'Hershman', 'Fuerst', 'Granda', 'Villafane', 'Stocking', 'Laguerre', 'Salvato', 'Mcniel', 'Trim', 'Goldston', 'Tannenbaum', 'Laforge', 'Hawker', 'Innis', 'Rasheed', 'Marbury', 'Jules', 'Harpster', 'Hruska', 'Mancillas', 'Ruck', 'Schloss', 'Shy', 'Leming', 'Eich', 'Allain', 'Premo', 'Goodner', 'Karlin', 'Natoli', 'Sinn', 'Althouse', 'Bodiford', 'Krishnan', 'Snedeker', 'Weigle', 'Blohm', 'Renwick', 'Menzies', 'Stonebraker', 'Brunetti', 'Crompton', 'Hucks', 'Maharaj', 'Bangert', 'Hepp', 'Kammer', 'Sutliff', 'Doyon', 'Hutsell', 'Cumbie', 'Dibiase', 'Linke', 'Sapienza', 'Sprayberry', 'Sundstrom', 'Vanbeek', 'Ewart', 'Erlandson', 'Knutsen', 'Nicolai', 'Oros', 'Almquist', 'Tedrow', 'Diebold', 'Bellman', 'Sherrer', 'Ehret', 'Ota', 'Seman', 'Folse', 'Amy', 'Mcateer', 'Steinhauer', 'Vannatta', 'Holle', 'Carreras', 'Anger', 'Clinkscales', 'Castiglione', 'Zakrzewski', 'Principe', 'Artman', 'Waiters', 'Tarbox', 'Sippel', 'Belz', 'Joachim', 'Pipkins', 'Peterkin', 'Abalos', 'Flock', 'Brochu', 'Tobler', 'Mckinnis', 'Gatson', 'Cronan', 'Manthey', 'Oberholtzer', 'Schiltz', 'Skowronski', 'Matute', 'Castonguay', 'Bechard', 'Drees', 'Carte', 'Baysinger', 'Kees', 'Steve', 'Ratchford', 'Clopton', 'Heimbach', 'Selig', 'Peavey', 'Sidney', 'Hilliker', 'Oehler', 'Essig', 'Ownby', 'Huling', 'Aylward', 'Matzke', 'Mikkelsen', 'Vandam', 'Rodden', 'Plunk', 'Mcdonell', 'Buechler', 'Dahm', 'Tarlton', 'Funches', 'Alvidrez', 'Padua', 'Pingel', 'Cid', 'Mcburney', 'Brunton', 'Dwight', 'Bucio', 'Schiffer', 'Dyal', 'Cyphers', 'Gildea', 'Wengerd', 'Lappin', 'Longwell', 'Basil', 'Acklin', 'Cancino', 'Kalina', 'Tynan', 'Raasch', 'Fleener', 'Dunmire', 'Gent', 'Cruickshank', 'Baltimore', 'Shum', 'Vanpatten', 'Costilla', 'Grimshaw', 'Loar', 'Royse', 'Amon', 'Amendola', 'Mcgonagle', 'Alm', 'Hausmann', 'Heitzman', 'Mailloux', 'Brault', 'Capra', 'Levis', 'Barillas', 'Quandt', 'Fedele', 'Chittenden', 'Cheesman', 'Wildes', 'Bolan', 'Metoyer', 'Ciccarelli', 'Melara', 'Gano', 'Janowski', 'Magoon', 'Kuster', 'Ofarrell', 'Joplin', 'Cannella', 'Middendorf', 'Putz', 'Saephan', 'Sieg', 'Lainez', 'Roten', 'Buras', 'Nock', 'Manke', 'Hymel', 'Devaughn', 'Braverman', 'Fleisher', 'Persson', 'Sandidge', 'Corsi', 'Torok', 'Steinhoff', 'Corby', 'Shorey', 'Wooton', 'Estell', 'Bolander', 'Vivar', 'Cuesta', 'Renick', 'Isler', 'Caprio', 'Crissman', 'Wann', 'Matchett', 'Calahan', 'Escareno', 'Liguori', 'Helt', 'Boner', 'Luper', 'Hoppes', 'Ingold', 'Gilleland', 'Saathoff', 'Szczepanski', 'Yockey', 'Veith', 'Wasser', 'Denniston', 'Fretwell', 'Goetsch', 'Havel', 'Banach', 'Schaal', 'Nisbet', 'Depaul', 'Escalona', 'Gammons', 'Schmelzer', 'Wehrle', 'Guglielmo', 'Oberlander', 'Wolski', 'Dimick', 'Rebello', 'Braunstein', 'Vanderveen', 'Saini', 'Meiners', 'Metheny', 'Schommer', 'Kissell', 'Burgoyne', 'Walmsley', 'Parmley', 'Arthurs', 'Worsley', 'Hulme', 'Campisi', 'Parvin', 'Ogawa', 'Coder', 'Gardener', 'Taplin', 'Nuzzo', 'Linthicum', 'Rosenstein', 'Simoneau', 'Preble', 'Chae', 'Nealon', 'Stonecipher', 'Medders', 'Bencomo', 'Durazo', 'Scotto', 'Klem', 'Corman', 'Byard', 'Evan', 'Dengler', 'Kohls', 'Seidler', 'Clute', 'Nebel', 'Hohl', 'Younker', 'Parkerson', 'Pullins', 'Sweeting', 'Wiersma', 'Callanan', 'Lisk', 'Fassett', 'Alloway', 'Lafever', 'Ollis', 'Gracey', 'Tune', 'Ester', 'Weingarten', 'Swigart', 'Frew', 'Conkle', 'Mendelsohn', 'Belliveau', 'Bacher', 'Coto', 'Ro', 'Lipson', 'Standard', 'Hoerner', 'Moldenhauer', 'Trivette', 'Colligan', 'Cacho', 'Emrich', 'Condit', 'Styer', 'Paramore', 'Cheramie', 'Sprenger', 'Kreps', 'Curd', 'Josephs', 'Bruch', 'Villano', 'Banh', 'Kennison', 'Hilson', 'Gathers', 'Weinman', 'Brickley', 'Jetton', 'Munford', 'Charboneau', 'Dittrich', 'Boysen', 'Newbury', 'Hayner', 'Pfau', 'Wegman', 'Eure', 'Heinrichs', 'Kresge', 'Klepper', 'Yohn', 'Bergan', 'Spells', 'Reisman', 'Schiffman', 'Napoles', 'Banegas', 'Landman', 'Hallenbeck', 'Sever', 'Hole', 'Bown', 'Barnaby', 'Junior', 'Deloatch', 'Secrist', 'Steigerwald', 'Kallas', 'Littell', 'Clinger', 'Rehman', 'Cothern', 'Class', 'Sabino', 'Mckain', 'Werts', 'Asmus', 'Fierros', 'Heffelfinger', 'Henthorn', 'Weirich', 'Ashbrook', 'Alber', 'Calles', 'Bragdon', 'Gerow', 'Hanger', 'Machen', 'Patt', 'Harada', 'Parmelee', 'Decaro', 'Sons', 'Tindal', 'Lubbers', 'Ferland', 'Bruni', 'Boyes', 'Danis', 'Tigner', 'Anzaldua', 'Gaxiola', 'Iacono', 'Lizama', 'Forbis', 'Mcguffin', 'Greenhill', 'Baity', 'Welcome', 'Lauzon', 'Nicodemus', 'Rabin', 'Teegarden', 'Yunker', 'Salim', 'Dews', 'Schueller', 'Stogsdill', 'Minch', 'Ellett', 'Villafana', 'Shan', 'Boler', 'Kast', 'Shrout', 'Taff', 'Willcox', 'Kahan', 'Gerth', 'Sabella', 'Procopio', 'Vedder', 'Heeter', 'Banes', 'Alaimo', 'Raza', 'Starkweather', 'Mutter', 'Manners', 'Bohanan', 'Virden', 'Booze', 'Wimbush', 'Eickhoff', 'Hankinson', 'Swilley', 'Killinger', 'Labar', 'Tallant', 'Rosin', 'Hillhouse', 'Labarre', 'Ryans', 'Heintzelman', 'Cottone', 'Bickerstaff', 'Westley', 'Rotter', 'Hey', 'Dinapoli', 'Lohmann', 'Reetz', 'Vences', 'Mckiernan', 'Thornsberry', 'Hofman', 'Murrieta', 'Vanwormer', 'Sen', 'Pinheiro', 'Jaco', 'Maner', 'Crosley', 'Rogalski', 'Hollandsworth', 'Hinze', 'Seawright', 'Brosius', 'Keehn', 'Sweetman', 'Vicknair', 'Casler', 'Hagopian', 'Westhoff', 'Lipari', 'Poll', 'Lintz', 'Rosinski', 'Henrie', 'Crystal', 'Wroten', 'Perla', 'Zawacki', 'Mckillip', 'Dorantes', 'Wallick', 'Hoots', 'Witty', 'Granata', 'Janicki', 'Petroff', 'Emert', 'Raskin', 'Picou', 'Caple', 'Mcelyea', 'Blackmer', 'Busbee', 'Pettengill', 'Newberg', 'Nickle', 'Hedman', 'Flavin', 'Forgione', 'Wachtel', 'Meader', 'Nale', 'Westby', 'Pulaski', 'Schupp', 'Troutt', 'Fishburn', 'Laprade', 'Dealba', 'Waymire', 'Stiefel', 'Carner', 'Fallin', 'Belin', 'Anand', 'Lesh', 'Okada', 'Whipkey', 'Mang', 'Harvill', 'Caver', 'Moskal', 'Schaible', 'Vandeusen', 'Boyko', 'Matteo', 'Crisler', 'Capehart', 'Heide', 'Holdsworth', 'Mcdonagh', 'Burlison', 'Beshears', 'Gills', 'Cowger', 'Gendreau', 'Goering', 'Hewes', 'Whelchel', 'Kier', 'Tramel', 'Mcsherry', 'Morita', 'Cissell', 'Knaus', 'Vangilder', 'Karsten', 'Linscott', 'Ratner', 'Catoe', 'Scriven', 'Gerstner', 'Brobst', 'Normandin', 'Piasecki', 'Tamura', 'Balboa', 'Nathanson', 'Huizenga', 'Renard', 'Deshazo', 'Ethier', 'Fabiano', 'Quisenberry', 'Mcbryde', 'Palencia', 'Scaglione', 'Friese', 'Laughter', 'Houchins', 'Loman', 'Garden', 'Cromartie', 'Borgman', 'Hoffpauir', 'Choquette', 'Jarrard', 'Fernald', 'Barranco', 'Levering', 'Ansell', 'Perl', 'Caudell', 'Ewen', 'Ohanlon', 'Swofford', 'Reasoner', 'Grout', 'Rising', 'Buttram', 'Vandenheuvel', 'Imel', 'Rearick', 'Harn', 'Sorrels', 'Biggins', 'Renda', 'Norden', 'Matula', 'Walch', 'Broad', 'Stokley', 'Gully', 'Barrientes', 'Chilcote', 'Freel', 'Lage', 'Farner', 'Rubel', 'Demko', 'Shao', 'Cupples', 'Holderman', 'Dunnam', 'Hughs', 'Foskey', 'Darst', 'Greenblatt', 'Shiner', 'Brasfield', 'Simeon', 'Maser', 'Lacayo', 'Priestley', 'Pleasants', 'Howse', 'Iyer', 'Perreira', 'Baillargeon', 'Revilla', 'Yarger', 'Gries', 'Sheeley', 'Prim', 'Picazo', 'Heinlein', 'Merola', 'Malhotra', 'Wein', 'Mchone', 'Valliere', 'Minner', 'Blumer', 'Hasse', 'Kuester', 'Landi', 'Suits', 'Primeaux', 'Jarnagin', 'Galle', 'Greenlaw', 'Qiu', 'Lamarche', 'Acheson', 'Gothard', 'Mendivil', 'Bombard', 'Mcquillen', 'Munden', 'Herzberg', 'Ros', 'Umstead', 'Levins', 'Pellegrin', 'Castagna', 'Alvord', 'Huckins', 'Wagnon', 'Plemons', 'Dolin', 'Garica', 'Lyttle', 'Bazile', 'Astudillo', 'Gover', 'Galati', 'Seager', 'Girardi', 'Freels', 'Bramblett', 'Brancato', 'Reppert', 'Saetern', 'Puig', 'Prettyman', 'Chagnon', 'Heavner', 'Schlichting', 'Saladino', 'Stall', 'Loiselle', 'Sedano', 'Panos', 'Heilig', 'Ridgley', 'Basilio', 'Rapoza', 'Furrow', 'Oliveras', 'Cordray', 'Strausbaugh', 'Culhane', 'Iraheta', 'Lamantia', 'Shires', 'Wilding', 'Obanion', 'Easterwood', 'Hearns', 'Manske', 'Spiess', 'Eckley', 'Wootton', 'Enochs', 'Cheatwood', 'Woodfin', 'Akridge', 'Mattocks', 'Mcdougle', 'Legette', 'Neher', 'Rhoton', 'Vartanian', 'Dunkel', 'Wehmeyer', 'Foutch', 'Dille', 'Halle', 'Lowden', 'Olesen', 'Chace', 'Hasbrouck', 'Lesage', 'Pappalardo', 'Shinkle', 'Ishii', 'Peralez', 'Gabler', 'Fichter', 'Mcnicholas', 'Moshier', 'Barbeau', 'Bossert', 'Trivett', 'Bamford', 'Lauterbach', 'Gossman', 'Epling', 'Welk', 'Daub', 'Squier', 'Dicus', 'Siller', 'Romaine', 'Meriwether', 'Bordner', 'Baden', 'Hagins', 'Sica', 'Mullane', 'Jurgensen', 'Tien', 'Gertz', 'Touchstone', 'Bones', 'Kimmons', 'Prisco', 'Kaser', 'Drysdale', 'Jelks', 'Cerrone', 'Wolfenbarger', 'Deckert', 'Ganley', 'Fleeman', 'Cubbage', 'Woodie', 'Schwan', 'Siefert', 'Rizvi', 'Heier', 'Khanna', 'Leet', 'Gratz', 'Mullan', 'Moorefield', 'Fishback', 'Whittenburg', 'Casson', 'Statham', 'Red', 'Coldiron', 'Keplinger', 'Reichman', 'Brier', 'Vavra', 'Housman', 'Kitson', 'Fekete', 'Rotella', 'Onofre', 'Orvis', 'Beutler', 'Cadwallader', 'Gabor', 'Emmanuel', 'Moretz', 'Suniga', 'Mcmath', 'Kinlaw', 'Beringer', 'Gaudreau', 'Lirette', 'Drye', 'Oubre', 'Gardella', 'Reigle', 'Zubia', 'Mccardle', 'Ambler', 'Lucius', 'Fizer', 'Hilley', 'Fischbach', 'Borelli', 'Gies', 'Barks', 'Sheard', 'Hammontree', 'Hogle', 'Fagg', 'Buitron', 'Eiler', 'Grandstaff', 'Hank', 'Wark', 'Decoteau', 'Depina', 'Clabaugh', 'Desiderio', 'Kuchta', 'Trang', 'Abril', 'Smathers', 'Kaspar', 'Melia', 'Sandman', 'Maltese', 'Mccasland', 'Rayl', 'Meche', 'Wiggin', 'Saint', 'Dorner', 'Columbus', 'Boatner', 'Fresquez', 'Sykora', 'Shriner', 'Drumheller', 'Mahony', 'Redinger', 'Radloff', 'Mitts', 'Casperson', 'Gammill', 'Moraga', 'Baratta', 'Tow', 'Ocon', 'Cruce', 'Bohannan', 'Hurtt', 'Mose', 'Caines', 'Heisey', 'Pitcock', 'Swiderski', 'Shu', 'Buda', 'Whidden', 'Busick', 'Simas', 'Croley', 'Morrisey', 'Saulsberry', 'Crudup', 'Bongiorno', 'Beem', 'Bunner', 'Rosemond', 'Freire', 'Casado', 'Merideth', 'Selden', 'Lamarre', 'Fullwood', 'Hartig', 'Kerlin', 'Lebowitz', 'Kibbe', 'Fannon', 'Hotz', 'Yerkes', 'Re', 'Waddington', 'Akbar', 'Baek', 'Closson', 'Miers', 'Bonomo', 'Wetherbee', 'Taranto', 'Henslee', 'Bartle', 'Hilger', 'Asaro', 'Mahr', 'Strozier', 'Agudelo', 'Kulick', 'Skoglund', 'Yamasaki', 'Schlemmer', 'Hefley', 'Waxman', 'Radley', 'Sanderlin', 'Arispe', 'Galang', 'Morejon', 'Stich', 'Cesario', 'Silvis', 'Gurganus', 'Shofner', 'Funderburg', 'Reddish', 'Rybak', 'Dingler', 'Mankin', 'Renna', 'Alban', 'Mckittrick', 'Lippman', 'Brenton', 'Liebman', 'Santillo', 'Crigger', 'Riney', 'Mccraney', 'Kluck', 'Sosnowski', 'Anspach', 'Bourdon', 'Modi', 'Heer', 'Mastroianni', 'Musial', 'Whiteaker', 'Summa', 'Herber', 'Roselli', 'Orris', 'Bert', 'Dedmon', 'Kelson', 'Paone', 'Barstow', 'Gerst', 'Bettinger', 'Castner', 'Penman', 'Broaddus', 'Ohman', 'Villalon', 'Carwile', 'Fluellen', 'Ort', 'Bommarito', 'Shuff', 'Cannata', 'Westgate', 'Bien', 'Driggs', 'Maisonet', 'Costin', 'Raine', 'Banton', 'Buterbaugh', 'Katzman', 'Coreas', 'Rosalez', 'Gose', 'Robie', 'Winburn', 'Glancy', 'Hild', 'Strock', 'Umanzor', 'Hoglund', 'Kesner', 'Lynam', 'Swayze', 'Grizzard', 'Fettig', 'Macko', 'Schrum', 'Sours', 'Yonker', 'Ebanks', 'Chiodo', 'Meaney', 'Paras', 'Struthers', 'Sicard', 'Leveille', 'Beckstead', 'Calero', 'Fuhrmann', 'Lybarger', 'Capo', 'Adolph', 'Raabe', 'Gran', 'Borel', 'Ary', 'Charland', 'Huh', 'Steinert', 'Stemple', 'Groat', 'Zang', 'Nath', 'Ogara', 'Pecina', 'Simoes', 'Breece', 'Nascimento', 'Usry', 'Gain', 'Brassfield', 'Lochner', 'Pietsch', 'Wechsler', 'Sum', 'Teneyck', 'Pelt', 'Burnley', 'Renzi', 'Mujica', 'Profitt', 'Body', 'Debusk', 'Robidoux', 'Pruneda', 'Pomerantz', 'Gonyea', 'Crosier', 'Currence', 'Newborn', 'Tolleson', 'Conlan', 'Dunsmore', 'Tansey', 'Clinard', 'Staudt', 'Oppenheim', 'Gossard', 'Osbourne', 'Gilyard', 'Lucido', 'Tonkin', 'Mitzel', 'Sola', 'Palombo', 'Duane', 'Mac', 'Kerry', 'Stills', 'Viveiros', 'Stallman', 'Moos', 'Follis', 'Maris', 'Hollier', 'Gundlach', 'Moler', 'Schweigert', 'Chartrand', 'Finkle', 'Meese', 'Nigh', 'Amundsen', 'Brocato', 'Dreier', 'Glessner', 'Weibel', 'Fritch', 'Retherford', 'Rahim', 'Markert', 'Ronk', 'Olmeda', 'Gosney', 'Keathley', 'Luby', 'Harrill', 'Dinges', 'Rocheleau', 'Meisel', 'Farrer', 'Lute', 'Apel', 'Pincus', 'Maida', 'Jimmerson', 'Baltz', 'Cuccia', 'Heenan', 'Thieme', 'Zoeller', 'Larocco', 'Abdalla', 'Classen', 'Hassinger', 'Filler', 'Pidgeon', 'Hanford', 'Espy', 'Goodlett', 'Jone', 'Ruggeri', 'Lisi', 'Spada', 'Gerrard', 'Allbritton', 'Brazelton', 'Boggan', 'Dufault', 'Espejo', 'Bodkin', 'Penix', 'Dockins', 'Rascoe', 'Swarthout', 'Tritt', 'Gouin', 'Lamberth', 'Bourn', 'Barnhouse', 'Guzzo', 'Netherton', 'Zamarron', 'Rosenberry', 'Dahms', 'Anwar', 'Whitesides', 'Tidmore', 'Longstreet', 'Claunch', 'Ehrhart', 'Hullinger', 'Xia', 'Heideman', 'Nicklas', 'Prins', 'Soni', 'Dominquez', 'Vogelsang', 'Pew', 'Chess', 'Simmerman', 'Brunell', 'Matthes', 'Kinnison', 'Cansler', 'Weekly', 'Eger', 'Garabedian', 'Milliman', 'Severns', 'Magnusson', 'Fossum', 'Salamon', 'Vandoren', 'Gillingham', 'Charney', 'Nokes', 'Lamon', 'Irick', 'Okeeffe', 'Zou', 'Kott', 'Quillin', 'Friar', 'Drummer', 'Catchings', 'Hamada', 'Scheck', 'Setser', 'Gobble', 'Condra', 'Bowley', 'Deschamps', 'Sylva', 'Bartolome', 'Warfel', 'Veltri', 'Speers', 'Butner', 'Delorme', 'Giesler', 'Sonntag', 'Wetherell', 'Ohagan', 'Torbert', 'Grandberry', 'Ronning', 'Howser', 'Soden', 'Rasco', 'Clauss', 'Beland', 'Nicola', 'Justiniano', 'Varnum', 'Fergus', 'Lazcano', 'Sartori', 'Carnley', 'Lucarelli', 'Bergh', 'Wellborn', 'Bow', 'Longshore', 'Marcel', 'Sumlin', 'Atilano', 'Dostal', 'Westendorf', 'Stiver', 'Morency', 'Herrod', 'Bologna', 'Valiente', 'Weinert', 'Gaertner', 'Prock', 'Spangenberg', 'Tineo', 'Cosio', 'Maass', 'Rist', 'Oatman', 'Waguespack', 'Cardiel', 'Grate', 'Behrends', 'Linger', 'Pozo', 'Scoggin', 'Jenkinson', 'Ake', 'Redick', 'Bonacci', 'Rivet', 'Declue', 'Swing', 'Chopra', 'Leib', 'Wallner', 'Grimmer', 'Wilmes', 'Pirkle', 'Stanhope', 'Knop', 'Culotta', 'Dipaola', 'Hipolito', 'Gerling', 'Sennett', 'Fulghum', 'Grothe', 'Krout', 'Onorato', 'Donis', 'Winbush', 'Aoki', 'Buscher', 'Jarquin', 'Lemanski', 'Mcgrane', 'Tardif', 'Segundo', 'Caba', 'Sease', 'Blinn', 'Losee', 'Kirschbaum', 'Baskett', 'Knights', 'Goudeau', 'Grondin', 'Harting', 'Szewczyk', 'Wieder', 'Conatser', 'Romanelli', 'Freshour', 'Brizendine', 'Rolen', 'Guynn', 'Laforest', 'Doris', 'Sandridge', 'Dublin', 'Blancas', 'Duryea', 'Naik', 'Paradiso', 'Scheele', 'Westra', 'Hassel', 'Bertucci', 'Fansler', 'Flohr', 'Solt', 'Suess', 'Keiper', 'Downard', 'Ivester', 'Darley', 'Seales', 'Kolesar', 'Overbeck', 'Subramanian', 'Panter', 'Parshall', 'Stannard', 'Gravley', 'Dhaliwal', 'Shippy', 'Dolphin', 'Lepper', 'Gorby', 'Delmonte', 'Piccirillo', 'Besaw', 'Alligood', 'Rhymes', 'Eisenman', 'Deveau', 'Tilden', 'Girton', 'Buser', 'Rentschler', 'Sopko', 'Uriostegui', 'Wasko', 'Noffsinger', 'Barkman', 'Dyck', 'Ferrero', 'Kiehl', 'Leffel', 'Rybicki', 'Hedstrom', 'Bracamontes', 'Zebrowski', 'Blundell', 'Brightman', 'Hegwood', 'Beecham', 'Kolbe', 'Bucy', 'Bondi', 'Borgen', 'Gibbens', 'Pullman', 'Letcher', 'Ferebee', 'Kitterman', 'Seefeldt', 'Upham', 'Thiede', 'Bolster', 'Bastin', 'Bondy', 'Mershon', 'Nickson', 'Drozd', 'Schroyer', 'Mcmenamin', 'Reith', 'Lovin', 'San', 'Henegar', 'Haislip', 'Barco', 'Arter', 'Malecki', 'Teeple', 'Walpole', 'Feil', 'Neitzel', 'Ostler', 'Parmar', 'Vinton', 'Jan', 'Weldy', 'Etherton', 'Joya', 'Saliba', 'Schnur', 'Belles', 'Mcgeorge', 'Olden', 'Rarick', 'Worrall', 'Degen', 'Froman', 'Odowd', 'Einhorn', 'Fimbres', 'Maresca', 'Rocker', 'Arend', 'Biermann', 'Guimond', 'Mcgurk', 'Goll', 'Santilli', 'Hadlock', 'Teer', 'Dillion', 'Jorden', 'Honore', 'Bromberg', 'Stoneman', 'Blossom', 'Guzik', 'Stockstill', 'Wax', 'Anello', 'Blasko', 'Frese', 'Berthold', 'Morefield', 'Baptist', 'Legault', 'Bouffard', 'Bebout', 'Darnall', 'Buscemi', 'Buentello', 'Scroggs', 'Gatton', 'Turnquist', 'Lucht', 'Remick', 'Godlewski', 'Bradt', 'Waldorf', 'Zeringue', 'Rowen', 'Mowbray', 'Parkey', 'Engram', 'Mazzarella', 'Kirkbride', 'Gridley', 'Kaster', 'Lorenzana', 'Wareham', 'Star', 'Marshburn', 'Everman', 'Wolfram', 'Zick', 'Hyun', 'Yerger', 'Baham', 'Gebhard', 'Ruf', 'Suchy', 'Tieman', 'Wenz', 'Schiro', 'Fout', 'Abdo', 'Hayter', 'Cleaves', 'Fritsche', 'Meurer', 'Riendeau', 'Ventimiglia', 'Cervera', 'Mallow', 'Allie', 'Hanscom', 'Viloria', 'Dubon', 'Leeson', 'Ruffing', 'Jonson', 'Fenimore', 'Gonzaga', 'Schriver', 'Traina', 'Mecca', 'Lantigua', 'Baril', 'Harford', 'Bartow', 'Asbell', 'Rumley', 'Brogden', 'Derryberry', 'Ketner', 'Dakin', 'Wass', 'Fallis', 'Wada', 'Studdard', 'Lecroy', 'Fetty', 'Nass', 'Chute', 'Parman', 'Bevans', 'Headen', 'Hysell', 'Merten', 'Most', 'Fuss', 'Schrank', 'Last', 'Even', 'Vaz', 'Sifford', 'Streets', 'Claude', 'Bronstein', 'Sherburne', 'Wadkins', 'Gascon', 'Seiter', 'Steffan', 'Cardozo', 'Henricks', 'Claflin', 'Etzel', 'Kulas', 'Trinkle', 'Ortegon', 'Phaneuf', 'Langworthy', 'Barb', 'Mazon', 'Veney', 'Redondo', 'Tieu', 'Laursen', 'Nanez', 'Votaw', 'Walraven', 'Abella', 'Dsouza', 'Bayley', 'Townson', 'Applebaum', 'Mazzei', 'Piche', 'Rivenbark', 'Urrea', 'Dolph', 'Bonifacio', 'Shehan', 'Glascock', 'Verde', 'Gadberry', 'Trimm', 'Dowe', 'Khang', 'Mulhall', 'Selzer', 'Raub', 'Ore', 'Copes', 'Masuda', 'Moscoso', 'Zeitler', 'Mollica', 'Iler', 'Leventhal', 'Manders', 'Prue', 'Fergerson', 'Brose', 'Phu', 'Debellis', 'Haan', 'Schoening', 'Stager', 'Demos', 'Rumble', 'Brunt', 'Nivens', 'Manigault', 'Buendia', 'Deschenes', 'Wittmer', 'Hamon', 'Hentz', 'Loud', 'Oseguera', 'Rayo', 'Macfarland', 'Mimms', 'Grunewald', 'Hartness', 'Wynkoop', 'Wallingford', 'Juergens', 'Meszaros', 'Riehle', 'Trego', 'Neece', 'Coggin', 'Burrill', 'Laurel', 'Routt', 'Rodger', 'Krum', 'Faulkenberry', 'Labadie', 'Hemming', 'Fulp', 'Jamal', 'Deloney', 'Fells', 'Bohnert', 'Kapadia', 'Guill', 'Coop', 'Broadhurst', 'Mccrimmon', 'Bonfiglio', 'Capetillo', 'Chamorro', 'Gargiulo', 'Stoehr', 'Schlecht', 'Karlson', 'Garten', 'Remer', 'Mebane', 'Finnigan', 'Bourdeau', 'Espindola', 'Shukla', 'Petras', 'Steinberger', 'Casner', 'Carico', 'Seevers', 'Westwood', 'Hosea', 'Mcphillips', 'Nygren', 'Wagaman', 'Coghlan', 'Sutherlin', 'Sellman', 'Bashore', 'Mullican', 'Stoneburner', 'Montag', 'Karst', 'Murch', 'Puffer', 'Sabala', 'Pauli', 'Odonoghue', 'Lassen', 'Mattera', 'Mcaninch', 'Portugal', 'Clingan', 'Michener', 'Munsell', 'Streetman', 'Harton', 'Swarts', 'Honig', 'Jesus', 'Rentas', 'Trosper', 'Coffield', 'Burket', 'Donaghy', 'Byun', 'Riess', 'Mcqueary', 'Stayton', 'Ferron', 'Wedding', 'Tibbitts', 'Frisbee', 'Reinoso', 'Lama', 'Allyn', 'Sheen', 'Tyra', 'Golder', 'Veasey', 'Schroth', 'Kukla', 'Narayan', 'Vandemark', 'Horace', 'Kadlec', 'Portnoy', 'Reynosa', 'Surprenant', 'Savell', 'Seagle', 'Vandervort', 'Eye', 'Eccleston', 'Blaise', 'Glaspie', 'Cressman', 'Lahti', 'Yocom', 'Leppert', 'Brendle', 'Greenough', 'Relyea', 'Marinez', 'Bouley', 'Fincham', 'Highley', 'Goza', 'Norrell', 'Yusuf', 'Ohm', 'Thakkar', 'Cosenza', 'Efird', 'Heger', 'Dysart', 'Mango', 'Fitchett', 'Kring', 'Paolucci', 'Menges', 'Layden', 'Mccleery', 'Benko', 'Sandor', 'Blakney', 'Zanders', 'Gengler', 'Fujita', 'Huls', 'Basquez', 'Trepanier', 'Spadaro', 'Ankney', 'Damiani', 'Games', 'Cherney', 'Fitzsimons', 'Dearmas', 'Bonet', 'Diem', 'Shimp', 'Agrawal', 'Gaw', 'Gahagan', 'Fossett', 'Kafka', 'Dedios', 'Coryell', 'Bahe', 'Wurm', 'Wishart', 'Dray', 'Armer', 'Khalid', 'Gassaway', 'Vawter', 'Loew', 'Coello', 'Curren', 'Gilder', 'Letendre', 'Sprecher', 'Rexrode', 'Minich', 'Koepp', 'Mulloy', 'Bohman', 'Gambrel', 'Hackley', 'Demasi', 'Hoffert', 'Kittredge', 'Maltby', 'Nyquist', 'Schieber', 'Kennell', 'Calderwood', 'Compean', 'Romines', 'Simonelli', 'Pico', 'Oda', 'Holte', 'Bate', 'Learn', 'Lowenstein', 'Holtman', 'Mingus', 'Sessa', 'Legendre', 'Gerrish', 'Schoenberg', 'Liberman', 'Mclachlan', 'Higginson', 'Vince', 'Mallery', 'Delamora', 'Difranco', 'Lein', 'Haltom', 'Dority', 'Marcellus', 'Heskett', 'Harward', 'Spinney', 'Darwin', 'Baylis', 'Amodeo', 'Schwandt', 'Mcmorrow', 'Foraker', 'Fyfe', 'Shingleton', 'Blandon', 'Waddy', 'Ricca', 'Scheffer', 'Balliet', 'Philipp', 'Rish', 'Hattaway', 'Krejci', 'Orduno', 'Passarelli', 'Skala', 'Oram', 'Raynes', 'Hiett', 'Tolan', 'Kimbell', 'Delara', 'Farhat', 'Kamps', 'Mohney', 'Escarcega', 'Mell', 'Mcquay', 'Cannizzaro', 'Deuel', 'Losoya', 'Goldin', 'Zaidi', 'Gillmore', 'Buelow', 'Maust', 'Guerrera', 'Bouck', 'Bick', 'Kelty', 'Pines', 'Braziel', 'Bruening', 'Frenzel', 'Kenna', 'Loria', 'Koren', 'Cornelio', 'Poisson', 'Raker', 'Ptak', 'Bohr', 'Coury', 'Failla', 'Cipriani', 'Delany', 'Marmon', 'Kinch', 'Figgins', 'Delfino', 'Risser', 'Hickox', 'Fager', 'Turpen', 'Dalzell', 'Falvey', 'Leiker', 'Mcgonigal', 'Vaquera', 'Weisser', 'Viviano', 'Shrock', 'Minaya', 'Chitty', 'Costley', 'Granberry', 'Dimaria', 'Roma', 'Ortis', 'Burnam', 'Burruss', 'Stoughton', 'Cales', 'Burrage', 'Vanwagner', 'Espada', 'Mccuen', 'Baize', 'Pullum', 'Gerrity', 'Vicari', 'Heuser', 'Semler', 'Fear', 'Havener', 'Kash', 'Thibodaux', 'Hadaway', 'Smithwick', 'Eisenhart', 'Hodgin', 'Cluck', 'Godby', 'Belli', 'Demaree', 'Beyers', 'Jared', 'Mall', 'Defoe', 'Chmura', 'Hepworth', 'Hintze', 'Luk', 'Vanriper', 'Solari', 'Atlas', 'Outland', 'Hanselman', 'Scharff', 'Rhein', 'Milone', 'Rochford', 'Mynatt', 'Lambdin', 'Sandell', 'Grounds', 'Tabler', 'Smartt', 'Dejean', 'Clayborne', 'Vangorder', 'Eastin', 'Hiler', 'Lisle', 'Gramling', 'Degarmo', 'Malec', 'Tinkham', 'Vanauken', 'Andrzejewski', 'Rundell', 'Happel', 'Strine', 'Koerber', 'Haner', 'Ashcroft', 'Hille', 'Cairo', 'Upson', 'Mooring', 'Koury', 'Vito', 'Oberlin', 'Christiano', 'Redfearn', 'Trower', 'Hibbler', 'Sumter', 'Raftery', 'Geise', 'Wohl', 'Gorney', 'Peasley', 'Heap', 'Brazeal', 'Mccleskey', 'Yard', 'Mcroy', 'Amend', 'Cutshaw', 'Kazmierczak', 'Strandberg', 'Lasko', 'Newlon', 'File', 'Bevill', 'Silvera', 'Arakaki', 'Kelsch', 'Ostendorf', 'Cowie', 'Hove', 'Doles', 'Bouvier', 'Fecteau', 'Hasegawa', 'Paschke', 'Taing', 'Heldt', 'Allaire', 'Ochsner', 'Giusti', 'Reisner', 'Swim', 'Laidlaw', 'Vanderbilt', 'Atterberry', 'Barthelemy', 'Chalker', 'Degregorio', 'Mastro', 'Patlan', 'Gipe', 'Roosa', 'Filkins', 'Styron', 'Bryer', 'Blackston', 'Hagel', 'Fralick', 'Linhart', 'Moura', 'Pavia', 'Pavao', 'Furry', 'Petrus', 'Fairweather', 'Blystone', 'Co', 'Divito', 'Villicana', 'Winch', 'Tome', 'Lanoue', 'Biron', 'Noell', 'Mckeel', 'Worthey', 'Aten', 'Eyer', 'Zhen', 'Tischler', 'Luoma', 'Opp', 'Riggin', 'Furness', 'Wolbert', 'Penning', 'Draves', 'Whitehill', 'Dudgeon', 'Kinkead', 'Luca', 'Rosell', 'Macauley', 'Goldner', 'Ishikawa', 'Kirchhoff', 'Lamarca', 'Miyashiro', 'Weger', 'Wuest', 'Kreis', 'Urbanek', 'Palko', 'Victorino', 'Morado', 'Burchette', 'Holyfield', 'Tulloch', 'Twombly', 'Munk', 'Woolford', 'Knisely', 'Locher', 'Eckart', 'Rancourt', 'Pyron', 'Edney', 'Besser', 'Truex', 'Monterroso', 'Bruneau', 'Province', 'Permenter', 'Nims', 'Rollison', 'Cabell', 'Sylvain', 'Salman', 'Signorelli', 'Vegas', 'Maddy', 'Bachelder', 'Sevigny', 'Stolte', 'Chavarin', 'Lukes', 'Rather', 'Gartland', 'Kurek', 'Nantz', 'Savard', 'Finegan', 'No', 'Chichester', 'Newbill', 'Mahnke', 'Sax', 'Sowinski', 'Wendler', 'Cadiz', 'Male', 'Mealey', 'Brookes', 'Enderle', 'Valenta', 'Tooker', 'Whitbeck', 'Threet', 'Cavitt', 'Murtagh', 'Phalen', 'Errico', 'Merkley', 'Ju', 'Zachery', 'Bramer', 'Henline', 'Noga', 'Woelfel', 'Deras', 'Amen', 'Aldape', 'Bartling', 'Claros', 'Spurrier', 'Ginder', 'Fred', 'Giberson', 'Ryba', 'Sommerfeld', 'Dahle', 'Endo', 'Haddon', 'Bowlby', 'Wagener', 'Ketter', 'Balint', 'Goheen', 'Motsinger', 'Celentano', 'Drawdy', 'Dennehy', 'Mcelligott', 'Nakamoto', 'Deines', 'Goldsby', 'Drakeford', 'Steffy', 'Streich', 'Villasana', 'Cermak', 'Prill', 'Ellzey', 'Gartrell', 'Duffie', 'Rother', 'Buse', 'Luz', 'Groen', 'Laviolette', 'Roles', 'Days', 'Eash', 'Haefner', 'Font', 'Mcree', 'Bustillo', 'Coughlan', 'Bax', 'Hoxie', 'Barre', 'Scaife', 'Nowacki', 'Reichardt', 'Rogel', 'Ivins', 'Vanderburg', 'Etchison', 'Chesson', 'Molden', 'Giuliani', 'Goodpaster', 'Kriner', 'Sturtz', 'Tschida', 'Henschel', 'Asselin', 'Kocsis', 'Kroger', 'Swayne', 'Gallop', 'Fraker', 'Lauro', 'Tuohy', 'Scholes', 'Croxton', 'Fertig', 'Gregerson', 'Gundersen', 'Lehrer', 'Monsivais', 'Pilla', 'Weishaar', 'Gutshall', 'Winget', 'Human', 'Oberry', 'Learned', 'Marburger', 'Teed', 'Parrilla', 'Due', 'Hartzler', 'Cieslak', 'Feltz', 'Geren', 'Wile', 'Waldrip', 'Clore', 'Stutler', 'Feehan', 'Lacher', 'Felter', 'Barakat', 'Flippen', 'Holsey', 'Finkbeiner', 'Istre', 'Lengyel', 'Lupercio', 'Beegle', 'Habel', 'Hammill', 'Kifer', 'Buswell', 'Deboard', 'Guilliams', 'Ahlstrom', 'Beliveau', 'Sasse', 'Delker', 'Letterman', 'Avey', 'Bohlen', 'Piner', 'Folmar', 'Barile', 'Komar', 'Bonelli', 'Lamay', 'Cora', 'Deere', 'Sanon', 'Deppe', 'Emmerich', 'Giannone', 'Navarra', 'Hudock', 'Seaborn', 'Burda', 'Faz', 'Stefani', 'Beemer', 'Vose', 'Calandra', 'Eno', 'Figueredo', 'Lauck', 'Schwindt', 'Dumais', 'Hedger', 'Capp', 'Barreiro', 'Buker', 'Spruell', 'Bertolini', 'Hoar', 'Tiemann', 'Vandenbosch', 'Winebrenner', 'Maio', 'Winship', 'Brissette', 'Hansell', 'Elsey', 'Hansard', 'Gildersleeve', 'Hambright', 'Borba', 'Konieczny', 'Lundell', 'Tiedemann', 'Siegler', 'Ying', 'Mckinsey', 'Olah', 'Boersma', 'Younkin', 'Evanoff', 'Nakashima', 'Scalia', 'Piro', 'Colorado', 'Felan', 'Fuentez', 'Blea', 'Gowin', 'Hanning', 'Byrom', 'Morant', 'Bachand', 'Mcsorley', 'Peaslee', 'Bardsley', 'Stilson', 'Severs', 'Kincheloe', 'Kyler', 'Aurand', 'Bento', 'Hoeppner', 'Mertes', 'Pickrell', 'Rustad', 'Millikan', 'Celestino', 'Hovland', 'Kurowski', 'Zollinger', 'Tallon', 'Junkins', 'Mizrahi', 'Bomberger', 'Farrand', 'Curto', 'Bona', 'Donatelli', 'Eppley', 'Schurman', 'Henao', 'Tomberlin', 'Provencio', 'Speidel', 'Cree', 'Inskeep', 'Yeates', 'Hoggatt', 'Hinkson', 'Ficklin', 'Mcnealy', 'Cabanas', 'Laycock', 'Theroux', 'Weymouth', 'Mabie', 'Hatchell', 'Bohanon', 'Bilger', 'Nazarian', 'Weist', 'Depue', 'Mangini', 'Gelb', 'Luman', 'Blass', 'Desroches', 'Hearon', 'Mcmiller', 'Stoltenberg', 'Parenti', 'Daulton', 'Smail', 'Chisum', 'Benefiel', 'Tetrault', 'Foland', 'Reddington', 'Mattei', 'Custis', 'Fransen', 'Zylstra', 'Salvaggio', 'Factor', 'Deshong', 'Biederman', 'Sirianni', 'Steckler', 'Thrall', 'Dorsch', 'Harpe', 'Tell', 'Galusha', 'Guttman', 'Raposa', 'Jaros', 'Lipka', 'Shive', 'Shand', 'Brizuela', 'Horvat', 'Pisciotta', 'Sorge', 'Riebe', 'Vanderlaan', 'Isenhour', 'Franson', 'Goslin', 'Amore', 'Leachman', 'Foulks', 'Alamillo', 'Scarpa', 'Tickle', 'Pettitt', 'Orrell', 'Fleckenstein', 'Sapien', 'Roye', 'Mcmeans', 'Sligh', 'Landgraf', 'Cecere', 'Aune', 'Ketron', 'Welcher', 'Tilford', 'Maston', 'Overall', 'Fails', 'Bah', 'Ketterman', 'Lindauer', 'Saxe', 'Majka', 'Goodenough', 'Panella', 'Ramm', 'Caley', 'Christine', 'Kinsler', 'Pippen', 'Murph', 'Ammann', 'Falkowski', 'Madonna', 'Seligman', 'Rommel', 'Lareau', 'Melone', 'Frasure', 'Joyal', 'Piekarski', 'Porcelli', 'Kennington', 'Pica', 'Ankrom', 'Capron', 'Chatmon', 'Horrigan', 'Morelos', 'Noren', 'Paolini', 'Wildermuth', 'Rossow', 'Dorgan', 'Pawlik', 'Reiber', 'Rothenberger', 'Mcgonigle', 'Oren', 'Jeans', 'Vivas', 'Gerner', 'Brzozowski', 'Croyle', 'Klick', 'Vidaurri', 'Wollman', 'Brouillard', 'Dejohn', 'Meikle', 'Grochowski', 'Kaczor', 'Philbin', 'Sperber', 'Vancil', 'Zornes', 'Strope', 'Housel', 'Minks', 'Dike', 'Jasmin', 'Denicola', 'Gokey', 'Dominy', 'Gillham', 'Viray', 'Herz', 'Hursh', 'Koeller', 'Caicedo', 'Near', 'Harrel', 'Veale', 'Gustavson', 'Lopiccolo', 'Goldschmidt', 'Loder', 'Vannorman', 'Maske', 'Randel', 'Pinner', 'Buntin', 'Roache', 'Pinnock', 'Dimaio', 'Heckert', 'Perrigo', 'Schank', 'Lisowski', 'Brownstein', 'Sharer', 'Hambleton', 'Maker', 'Hursey', 'Aguado', 'Tian', 'Rheaume', 'Becraft', 'Sowders', 'Bratt', 'Tebo', 'Eid', 'Reinecke', 'Storck', 'Pech', 'Alspaugh', 'Grell', 'Purdue', 'Jennette', 'Pauling', 'Wint', 'Knupp', 'Madewell', 'Schwanke', 'Tellier', 'Washer', 'Staff', 'Keely', 'Lisenby', 'Walder', 'Kennerly', 'Ip', 'Michalik', 'Eichner', 'Disbrow', 'Bellomy', 'Boesch', 'Chirico', 'Lietz', 'Ploof', 'Dyar', 'Bai', 'Lary', 'Corbo', 'Danaher', 'Schiavo', 'Giacalone', 'Pentz', 'Studley', 'Doyal', 'Edie', 'Nathaniel', 'Cambra', 'Fenstermacher', 'Garst', 'Gaudio', 'Zavaleta', 'Castilla', 'Griffeth', 'Warthen', 'Derringer', 'Samsel', 'Mattia', 'Boelter', 'Mathieson', 'Estelle', 'Frisk', 'Hipple', 'Garceau', 'Ehrman', 'Buchner', 'Frailey', 'Ganey', 'Belser', 'Leiby', 'Schwind', 'Hagberg', 'Hooley', 'Rafter', 'Hasting', 'Mcnab', 'Piggott', 'Millhouse', 'Brescia', 'Giancola', 'Grob', 'Uresti', 'Tawney', 'Huot', 'Mizer', 'Storrs', 'Shobe', 'Blade', 'Baumbach', 'Eppler', 'Henningsen', 'Kmetz', 'Sepeda', 'Pangburn', 'Falgout', 'Hurn', 'Sholar', 'Kendricks', 'Brimhall', 'Bucklin', 'Hruby', 'Hunziker', 'Krenz', 'Schwager', 'Murley', 'Crittendon', 'Broady', 'Kintz', 'Entrekin', 'Estey', 'Sharrow', 'Quarterman', 'Gumbs', 'Steely', 'Machin', 'Difiore', 'Desch', 'Wiedemann', 'Tonn', 'Villines', 'Mcdole', 'Bashir', 'Beauford', 'Crary', 'Gallina', 'Wolak', 'Aburto', 'Hasler', 'Gullion', 'Bracewell', 'Rusher', 'Sarvis', 'Dargan', 'Garbarino', 'Pigeon', 'Blasi', 'Viens', 'Reising', 'Vosburgh', 'Canipe', 'Mcnett', 'Bruss', 'Shiflet', 'Pinard', 'Lattin', 'Armbrust', 'Peffer', 'Shotts', 'Arbaugh', 'Hux', 'First', 'Bolds', 'Ceaser', 'Cephas', 'Bormann', 'Broadwell', 'Qian', 'Talamantez', 'Vandermolen', 'Maza', 'Kinnear', 'Bullins', 'Arant', 'Brodbeck', 'Rolfes', 'Wisneski', 'Dague', 'Dudas', 'Greener', 'Noguera', 'Greeno', 'Daddario', 'Giambrone', 'Menon', 'Sherrick', 'Spier', 'Semon', 'Fendley', 'Crichton', 'Moree', 'Stratford', 'Zobel', 'Halladay', 'Keesler', 'Prewett', 'Deavers', 'Kamal', 'Bottom', 'Caves', 'Harshaw', 'Fretz', 'Secord', 'Seibold', 'Pantaleon', 'Greek', 'Baumeister', 'Kleven', 'Kos', 'Orban', 'Papke', 'Shatto', 'Cui', 'Boan', 'Nevitt', 'Hultgren', 'Kreiser', 'Veres', 'Jent', 'Merck', 'Gibby', 'Hosch', 'Mallet', 'Dock', 'Dallman', 'Loiacono', 'Tetzlaff', 'Arboleda', 'Mclelland', 'Willing', 'Coonrod', 'Cappiello', 'Courtemanche', 'Halperin', 'Odegard', 'Hornyak', 'Stem', 'Doner', 'Saffold', 'Hochman', 'Ing', 'Knudtson', 'Laabs', 'Selleck', 'Bassler', 'Kamin', 'Hur', 'Forward', 'Finnie', 'Blubaugh', 'Hitz', 'Litteral', 'Mansur', 'Rosenow', 'Vermeulen', 'Markarian', 'Marceau', 'Weisner', 'Sharpless', 'Cunniff', 'Guilfoyle', 'Lauver', 'Lukasik', 'Ripp', 'Wierzbicki', 'Wunsch', 'Boothby', 'Selfridge', 'Mckey', 'Vandermeer', 'Vanhoy', 'Edlund', 'Eggen', 'Bickett', 'Hallum', 'Brow', 'Rhymer', 'Buckalew', 'Haughey', 'Hentges', 'Matthies', 'Mccloy', 'Simmon', 'Concha', 'Feingold', 'Maglio', 'Olaughlin', 'Tassone', 'Abbasi', 'Oyola', 'Mook', 'Makin', 'Carnegie', 'Yue', 'Sethi', 'Duchene', 'Mcnear', 'Bartolo', 'Hegedus', 'Knoblauch', 'Orner', 'Hottinger', 'Lovitt', 'Harkless', 'Anastasio', 'Hohmann', 'Mangione', 'Dalby', 'Urich', 'Shuttleworth', 'Guilbeau', 'Bausch', 'Demartini', 'Difrancesco', 'Schwalm', 'Steere', 'Guel', 'Blanford', 'Flax', 'Fearon', 'Severe', 'Canto', 'Krogh', 'Meola', 'Dykema', 'Angelini', 'Pooley', 'Raff', 'Rister', 'Baehr', 'Daubert', 'Dechant', 'Kliewer', 'Hamdan', 'Gaiser', 'Lichty', 'Pomerleau', 'Uhler', 'Membreno', 'Printz', 'Worman', 'Thornley', 'Burbridge', 'Burdge', 'Schnitzer', 'Swanberg', 'Steinkamp', 'Heidel', 'Karch', 'Igo', 'Mccausland', 'Huskins', 'Kuss', 'Newbern', 'Peete', 'Godbolt', 'Climer', 'Neuenschwander', 'Then', 'Tietjen', 'Trombetta', 'Hawke', 'Hazlewood', 'Mayse', 'Patillo', 'Banos', 'Kuck', 'Lashbrook', 'Sarkisian', 'Goldberger', 'Moravec', 'Arey', 'Crosswhite', 'Elders', 'Fricks', 'Hercules', 'Bester', 'Erhart', 'Kuper', 'Sickels', 'Mun', 'Beddingfield', 'Panetta', 'Poplawski', 'Lansford', 'Negri', 'Dawe', 'Belair', 'Lattimer', 'Betty', 'Raye', 'Gobert', 'Dragoo', 'Horney', 'Strawbridge', 'Howery', 'Bosarge', 'Panzer', 'Labrador', 'Ransdell', 'Trumbo', 'Aubry', 'Fenderson', 'Fukuda', 'Grosz', 'Jacome', 'Slick', 'Kogut', 'Haig', 'Fouse', 'Hufnagel', 'Kehr', 'Musselwhite', 'Otwell', 'Raddatz', 'Oliverio', 'Sluss', 'Crossen', 'Guidroz', 'Mollett', 'Sumler', 'Chmiel', 'Guinan', 'Vita', 'Wieser', 'Ohlson', 'Bubb', 'Stennett', 'Bugbee', 'Minchew', 'Grado', 'Calcagno', 'Losh', 'Witzel', 'Brandl', 'Geoghegan', 'Vanbrunt', 'Smalling', 'Carignan', 'Schuelke', 'Sienkiewicz', 'Sollars', 'Dames', 'Malkin', 'Rodriges', 'Rozanski', 'Tews', 'Aust', 'Bardin', 'Voorhies', 'Rines', 'Courts', 'Bannerman', 'Martinsen', 'Malick', 'Collar', 'Twilley', 'Freiberg', 'Latiolais', 'Zehnder', 'Mannon', 'Becnel', 'Cowans', 'Arrigo', 'Crago', 'Curtsinger', 'Gassman', 'Marcelo', 'Rosendahl', 'Benito', 'Cortright', 'Carlon', 'Kenton', 'Hemminger', 'Martinek', 'Galeana', 'Cobble', 'Ruffino', 'Wittrock', 'Aberle', 'Catanese', 'Huezo', 'Soules', 'Ashraf', 'Mera', 'Gash', 'Agnello', 'Hauk', 'Hayek', 'Rahm', 'Higham', 'Kondo', 'Almon', 'Earwood', 'Kriebel', 'Philbrook', 'Rimer', 'Cuffee', 'Wolfgram', 'Wardwell', 'Ridder', 'Runner', 'Houchens', 'Vasser', 'Charlesworth', 'Dierks', 'Molter', 'Orosz', 'Roudebush', 'Coca', 'Brost', 'Lovern', 'Brott', 'Baudoin', 'Prophet', 'Bermea', 'Ulm', 'Bahl', 'Ulery', 'Caraveo', 'Maez', 'Corchado', 'Baillie', 'Colmenero', 'Rebolledo', 'Shevlin', 'Mehaffey', 'Hedin', 'Pickell', 'Spiro', 'Coatney', 'Gentner', 'Fuhr', 'Zeh', 'Fuerte', 'Knerr', 'Nakata', 'Voll', 'Zach', 'Gatica', 'Rabalais', 'Macek', 'Petti', 'Dickison', 'Sheley', 'Kinner', 'Effinger', 'Axelson', 'Overbay', 'Vancleve', 'Speegle', 'Muntz', 'Sang', 'Mcleroy', 'Aleshire', 'Holdridge', 'Knouse', 'Saling', 'Zacher', 'Zambrana', 'Neblett', 'Cichon', 'Herdman', 'Poli', 'Schisler', 'Antrim', 'Babineau', 'Coplin', 'Straughn', 'Watlington', 'Burbach', 'Campanelli', 'Coletta', 'Tennis', 'Dymond', 'Darosa', 'Chard', 'Delcampo', 'Lyden', 'Piland', 'Eslick', 'Beets', 'Ransome', 'Schuett', 'Styers', 'Fegley', 'Corning', 'Crume', 'Villeneuve', 'Schmeling', 'Zeiger', 'Blaker', 'Ramsden', 'Carol', 'Roseboro', 'Egner', 'Filip', 'Poitras', 'Flanery', 'Cothren', 'Bridger', 'Hoose', 'Demas', 'Kozel', 'Marzano', 'Penwell', 'Rast', 'Whicker', 'Haslett', 'Bibby', 'Keese', 'Montilla', 'Sultana', 'Resendes', 'Vanscoy', 'Dinan', 'Bala', 'Dirksen', 'Ek', 'Shimer', 'Doshi', 'Mayeux', 'Streater', 'Dattilo', 'Marlar', 'Senft', 'Vanalstine', 'Rehberg', 'Vanderhoff', 'Brenes', 'Motto', 'Sproles', 'Toone', 'Royall', 'Beaudette', 'Belding', 'Berta', 'Carmean', 'Simonian', 'Avera', 'Martina', 'Kind', 'Buchheit', 'Corrao', 'Crumrine', 'Wertman', 'Lininger', 'Pressman', 'Slane', 'Manges', 'Theus', 'Canizales', 'Eugenio', 'Ferrigno', 'Ellard', 'Stilley', 'Crabbe', 'Procter', 'Baccus', 'Hellmann', 'Risk', 'Schild', 'Tostado', 'Fessenden', 'Glines', 'Perone', 'Carns', 'Belote', 'Deshotel', 'Bottomley', 'Delbosque', 'Dubinsky', 'Flinchum', 'Berlanga', 'Darland', 'Daniele', 'Jess', 'Mungia', 'Harlin', 'Rocca', 'Saltsman', 'Trovato', 'Dionisio', 'Erbe', 'Dauzat', 'Laferriere', 'Kear', 'Brannigan', 'Guard', 'Roquemore', 'Brehmer', 'Kappes', 'Kepley', 'Labounty', 'Sudol', 'Walburn', 'Bibeau', 'Euler', 'Brawn', 'Pilot', 'Bunger', 'Earnhardt', 'Fischetti', 'Buitrago', 'Calo', 'Surette', 'Martyn', 'Tollett', 'Tuller', 'Noakes', 'Marson', 'Bongiovanni', 'Novello', 'Werling', 'Wyland', 'Palen', 'Sigmund', 'Salzer', 'Abels', 'Penson', 'Cazarez', 'Diblasi', 'Jantzen', 'Kittleson', 'Hurlbert', 'Shepardson', 'Munz', 'Bozek', 'Woll', 'Forth', 'Colvard', 'Baginski', 'Beirne', 'Lemmer', 'Shover', 'Lucci', 'Hockensmith', 'Mayhall', 'Faucette', 'Soloman', 'Lembo', 'Tarnowski', 'Westerlund', 'Gossage', 'Bold', 'Davi', 'Crater', 'Saia', 'Spisak', 'Zerr', 'Penate', 'Piel', 'Raja', 'Farney', 'Cutrer', 'Liverman', 'Brar', 'Nocera', 'Coutu', 'Rishel', 'Spurr', 'Kail', 'Molino', 'Favreau', 'Mullinix', 'Pospisil', 'Rohloff', 'Slavens', 'Stumbo', 'Ahl', 'Hosking', 'Speaker', 'Tarkington', 'Majeski', 'Skoog', 'Kirch', 'Vannostrand', 'Olmo', 'Dorrell', 'Newcombe', 'Halls', 'Riffel', 'Luque', 'Rolston', 'Lokey', 'Nicholes', 'Gula', 'Schrage', 'Goshorn', 'Woodell', 'Ahmadi', 'Austria', 'Shaul', 'Berwick', 'Graczyk', 'Lacourse', 'Porcaro', 'Rexroad', 'Chrzanowski', 'Abele', 'Woodin', 'Gillan', 'Lone', 'Orzechowski', 'Fader', 'Regina', 'Ban', 'Morriss', 'Rickards', 'Gannaway', 'Tassin', 'Accardi', 'Engelke', 'Kruk', 'Mantilla', 'Soderstrom', 'Kriz', 'Cantley', 'Cangelosi', 'Kalin', 'Sobolewski', 'Prinz', 'Bessey', 'Chittum', 'Marcucci', 'Annunziata', 'Hegg', 'Mishra', 'Heppner', 'Benningfield', 'Rhoten', 'Smolen', 'Lewellyn', 'Tall', 'Comiskey', 'Gobel', 'Klump', 'Stauber', 'Tocci', 'Gosser', 'Tussey', 'Summitt', 'Ottman', 'Vester', 'Pasko', 'Latshaw', 'Kies', 'Valderrama', 'Leese', 'Orduna', 'Gilcrease', 'Alli', 'Berberich', 'Delariva', 'Harb', 'Schmuck', 'Spang', 'Uecker', 'Garfinkel', 'Mcalexander', 'Monty', 'Leonetti', 'Knipe', 'Loudon', 'Leisure', 'Swearengin', 'Tinnin', 'Engelmann', 'Noblitt', 'Ruhland', 'Shewmaker', 'Smetana', 'Vangundy', 'Yzaguirre', 'Nehls', 'Sullens', 'Mahurin', 'Ferman', 'Lenhardt', 'Littman', 'Udell', 'Coutts', 'Mcginness', 'Nakayama', 'Goguen', 'Lass', 'Tibbits', 'Pafford', 'Fett', 'Ruis', 'Trogdon', 'Tarleton', 'Isabell', 'Paylor', 'Grandison', 'Bejar', 'Highfield', 'Peplinski', 'Hammitt', 'Mitton', 'Dashiell', 'Turrentine', 'Rusin', 'Sheeran', 'Barrs', 'Grund', 'Kowalsky', 'Mccaughey', 'Orantes', 'Oshields', 'Tourville', 'Szymczak', 'Gagner', 'Kemble', 'Delangel', 'Kaler', 'Treanor', 'Deems', 'Ours', 'Loss', 'Remley', 'Welles', 'Bogardus', 'Feher', 'Grzybowski', 'Meinert', 'Mickelsen', 'Opitz', 'Osowski', 'Paglia', 'Srivastava', 'Hirata', 'Vandermark', 'Maggi', 'Gautreau', 'Fonte', 'Meck', 'Mcquinn', 'Criddle', 'Hulin', 'Fulmore', 'Baldino', 'Neugebauer', 'Sletten', 'Talcott', 'Tessmer', 'Vrooman', 'Whitlatch', 'Miano', 'Arauz', 'Lafon', 'Cashin', 'Carrow', 'Feely', 'Provo', 'Botsford', 'Chojnacki', 'Pritts', 'Duby', 'Danos', 'Mundo', 'Strum', 'Bealer', 'Barmore', 'Birkholz', 'Hedgecock', 'Vides', 'Mcjunkin', 'Paley', 'Dennie', 'Cosey', 'Trombly', 'Wagar', 'Tope', 'Venters', 'Neptune', 'Allshouse', 'Kuczynski', 'Beams', 'Kilbourne', 'Troxler', 'Mcgahee', 'Latson', 'Miraglia', 'Suda', 'Prall', 'Searls', 'Tevis', 'Vales', 'Coberly', 'Eichman', 'Hiltz', 'Mancera', 'Mrozek', 'Obermeyer', 'Wiedeman', 'Yoshimura', 'Pascucci', 'Denk', 'Pita', 'Abdul', 'Schurr', 'Huntoon', 'Sund', 'Blose', 'Agostini', 'Cogdell', 'Hamburger', 'Orwig', 'Pelley', 'Mcnelly', 'Litten', 'Osterberg', 'Zepp', 'Mathur', 'Ardon', 'Petre', 'Schroeter', 'Christoff', 'Ridenhour', 'Hibler', 'Coachman', 'Tadeo', 'Vanderploeg', 'Ference', 'Connery', 'Albro', 'Bublitz', 'Fagundes', 'Purpura', 'Deeb', 'Melzer', 'Haus', 'Huffine', 'Groner', 'Laforce', 'Burriss', 'Longino', 'Seldon', 'Chicoine', 'Neira', 'Pintor', 'Trager', 'Garg', 'Camilleri', 'Limbaugh', 'Marinello', 'Sanz', 'Hankey', 'Aylor', 'Homes', 'Marro', 'Stalder', 'Creasey', 'Blankinship', 'Waldrup', 'Aubert', 'Quintanar', 'Tarbell', 'Mayton', 'Baba', 'Voltz', 'Cuba', 'Bracco', 'Dimeo', 'Cauble', 'Rodela', 'Sambrano', 'Doten', 'Jobes', 'Laura', 'Farrier', 'Mixson', 'Bassi', 'Kroening', 'Papineau', 'Scheuerman', 'Zertuche', 'Cardella', 'Taube', 'Bazzi', 'Sautter', 'Tobon', 'Venditti', 'Nordman', 'Loken', 'Fortino', 'Godbout', 'Knaub', 'Larabee', 'Meserve', 'Slama', 'Junge', 'Stamand', 'Daigneault', 'Fredericksen', 'Loveall', 'Clothier', 'Kuehne', 'Delahoussaye', 'Bosquez', 'Hildenbrand', 'Muto', 'Vanvliet', 'Frederiksen', 'Mero', 'Rapier', 'Feldt', 'Mcpartland', 'Stegner', 'Veenstra', 'Yeater', 'Yeatts', 'Rosenbloom', 'Shepperd', 'Marchbanks', 'Tapscott', 'Baynard', 'Osby', 'Cumberbatch', 'Brassard', 'Dahlman', 'Doi', 'Katona', 'Niesen', 'Slavik', 'Macneill', 'Marsala', 'Fazekas', 'Cudd', 'Ocana', 'Brimer', 'Lachman', 'Balla', 'Shahid', 'Gammage', 'Canez', 'Fickes', 'Goldblatt', 'Mcgeehan', 'Westerberg', 'Legler', 'Stanberry', 'Hillery', 'Colosimo', 'Florek', 'Heckathorn', 'Lenart', 'Mcneilly', 'Viles', 'Davin', 'Pierro', 'Edman', 'Patron', 'Tipps', 'Ardis', 'Hassen', 'Crase', 'Gebert', 'Predmore', 'Entwistle', 'Lourenco', 'Snively', 'Chivers', 'Byas', 'Edsall', 'Sneddon', 'Kloster', 'Luedke', 'Barcelo', 'Corns', 'Paula', 'Tacker', 'Marton', 'Lyke', 'Huitt', 'Tinch', 'Tagle', 'Linnell', 'Loden', 'Witman', 'Condrey', 'Swindler', 'Denby', 'Mcdow', 'Bennion', 'Berkman', 'Esguerra', 'Kohli', 'Leicht', 'Platero', 'Purtell', 'Sarro', 'Spera', 'Wasielewski', 'Nold', 'Gander', 'Coster', 'Burn', 'Sindelar', 'Spivak', 'Stangl', 'Eakes', 'Host', 'Raybon', 'Stickle', 'Vitiello', 'Borntrager', 'Glorioso', 'Winnie', 'Blocher', 'Che', 'Godbold', 'Blumenfeld', 'Hallford', 'Nuckolls', 'Rasor', 'Tardy', 'Hayslett', 'Kivett', 'Pettry', 'Klopfenstein', 'Martelli', 'Dunker', 'Klass', 'Denn', 'Vessels', 'Stukes', 'Iannone', 'Kovarik', 'Perlmutter', 'Som', 'Kump', 'Tack', 'Warf', 'Coffer', 'Baas', 'Balli', 'Fleishman', 'Lyall', 'Meli', 'Petrovic', 'Sego', 'Tignor', 'Maule', 'Stinchcomb', 'Doxey', 'Garbutt', 'Drewes', 'Prestridge', 'Vivanco', 'Weinmann', 'Amrhein', 'Schluter', 'Cleek', 'Rossignol', 'Rezendes', 'Marone', 'Sloss', 'Weary', 'Leishman', 'Searfoss', 'Springman', 'Wolfer', 'Hires', 'Mccampbell', 'Casselman', 'Frasca', 'Lintner', 'Preiss', 'Neilsen', 'Twiss', 'Boughner', 'Donnellan', 'Rech', 'Mccaulley', 'Massenburg', 'Dermody', 'Neuberger', 'Rifkin', 'Ullom', 'Marth', 'Blacker', 'Kase', 'Garon', 'Calaway', 'Grange', 'Yopp', 'Service', 'Blassingame', 'Lockley', 'Straughter', 'Porath', 'Situ', 'Stansfield', 'Eves', 'Cianci', 'Colindres', 'Killam', 'Luiz', 'Stahlman', 'Silvernail', 'Moorhouse', 'Langner', 'Soucie', 'Lucke', 'Manly', 'Huggard', 'Higareda', 'Matarazzo', 'Jusino', 'Winnett', 'Matheney', 'Bufkin', 'Bilbo', 'Levingston', 'Auxier', 'Guevarra', 'Triolo', 'Roder', 'Clever', 'Moodie', 'Cabana', 'Kiesling', 'Lindblom', 'Reuther', 'Rubi', 'Brinkmann', 'Donati', 'Cresswell', 'Fortes', 'Bayard', 'Grayer', 'Malveaux', 'Hauger', 'Hirschman', 'Soroka', 'Witek', 'Pugsley', 'Eoff', 'Alewine', 'Hastie', 'Budzinski', 'Burgard', 'Hebel', 'Kleist', 'Lawhead', 'Saporito', 'Sugarman', 'Sechler', 'Cohoon', 'Treadaway', 'Silliman', 'Horsey', 'Chauhan', 'Jovel', 'Giorgio', 'Waltrip', 'Templeman', 'Morning', 'Fava', 'Mcinturff', 'Migliaccio', 'Moncayo', 'Pesek', 'Olivero', 'Devall', 'Dauphin', 'Banerjee', 'Benway', 'Bermejo', 'Dacey', 'Pilarski', 'Pinnell', 'Chia', 'Pung', 'Rahe', 'Greenhaw', 'Byrns', 'Ancona', 'Granato', 'Luciani', 'Shryock', 'Sloop', 'Murcia', 'Croll', 'Congleton', 'Okelly', 'Norville', 'Flesch', 'Murad', 'Seddon', 'Waybright', 'Cremer', 'Hagman', 'Largo', 'Solar', 'Costales', 'Gier', 'Tober', 'Reeb', 'Lands', 'Hoback', 'Ingrassia', 'Youngquist', 'Tyrell', 'Profit', 'Collura', 'Oldaker', 'Vogl', 'Spafford', 'Laughman', 'Goris', 'Coghill', 'Sweatman', 'Rozelle', 'Chatelain', 'Fouch', 'Legros', 'Koza', 'Vialpando', 'Subia', 'Danz', 'Dosch', 'Debruin', 'Stefanik', 'Gamber', 'Saylors', 'Cost', 'Bernat', 'Eastburn', 'Getman', 'Maillet', 'Dogan', 'Finklea', 'Alongi', 'Ballas', 'Konkel', 'Ryu', 'Scoles', 'Oles', 'Algarin', 'Seago', 'Delaune', 'Pettey', 'Gettys', 'Blanch', 'Kea', 'Cambridge', 'Ciesielski', 'Pribble', 'Mayhugh', 'Dery', 'Allsup', 'Hauptman', 'Shoff', 'Spath', 'Lipsky', 'Lakhani', 'Lona', 'Andrea', 'Heist', 'Herzig', 'Insley', 'Frasher', 'Muise', 'Kettle', 'Catano', 'Harkleroad', 'Rominger', 'Schreffler', 'Bielecki', 'Knarr', 'Arvidson', 'Harnden', 'Galyon', 'Rando', 'Delima', 'Constance', 'Bosman', 'Meinke', 'Rosenquist', 'Stickles', 'Batz', 'Eitel', 'Kouba', 'Marmol', 'Rini', 'Kinyon', 'Munns', 'Hilts', 'Verrett', 'Shead', 'Staggers', 'Naccarato', 'Shupp', 'Willeford', 'Gayer', 'Bran', 'Krider', 'Cue', 'Dubiel', 'Kawamoto', 'Quayle', 'Meckley', 'Weingart', 'Ivan', 'Aller', 'Pattee', 'Pile', 'Shinault', 'Alzate', 'Goudreau', 'Weitzman', 'Zurek', 'Portman', 'Tellis', 'Achenbach', 'Cranfill', 'Scheib', 'Rud', 'Forgey', 'Sardina', 'Hayslip', 'Fadden', 'Ethington', 'Jette', 'Maberry', 'Stecher', 'Mcgahan', 'Buffa', 'Lehto', 'Lesch', 'Minier', 'Niblett', 'Behar', 'Gochenour', 'Thole', 'Woodmansee', 'Guse', 'Breunig', 'Deibert', 'Levario', 'Liming', 'Oltman', 'Vought', 'Higby', 'Lummus', 'Casimir', 'Grabow', 'Helzer', 'Madero', 'Panico', 'Ruud', 'Beas', 'Knebel', 'Lorence', 'Sizer', 'Goodwill', 'Darrell', 'Dismukes', 'Wimbish', 'Kleine', 'Prohaska', 'Freeborn', 'Caso', 'Meis', 'Bise', 'Maxim', 'Chumbley', 'Eaglin', 'Bergey', 'Hillenbrand', 'Pacifico', 'Plath', 'Rio', 'Ristau', 'Zych', 'Whang', 'Fister', 'Forbush', 'Lagarde', 'Atha', 'Hallinan', 'Hesser', 'Hoak', 'Kohr', 'Longnecker', 'Nomura', 'Raia', 'Seybold', 'Spagnola', 'Majano', 'Sanmartin', 'Mangual', 'Stanback', 'Gangi', 'Lauritzen', 'Seeber', 'Disla', 'Frain', 'Besse', 'Makris', 'Ducker', 'Demps', 'Laporta', 'Pavey', 'Reineke', 'Najjar', 'Mcclaskey', 'Luff', 'Vanderveer', 'Mccoll', 'Leamon', 'Meinhardt', 'Dinatale', 'Laffoon', 'Jenny', 'Skipworth', 'Folds', 'Burstein', 'Freas', 'Lizardo', 'Selle', 'Vrabel', 'Beranek', 'Hakala', 'Spataro', 'Prahl', 'Meas', 'Haston', 'Croker', 'Carmouche', 'Doolan', 'Guerrieri', 'Poulton', 'Mauger', 'Klose', 'Husk', 'Pharis', 'Dipalma', 'Hamaker', 'Simek', 'Strube', 'Corl', 'Bence', 'Meigs', 'Gillaspie', 'Moring', 'Eli', 'Mccullers', 'Erving', 'Dopp', 'Falbo', 'Gensler', 'Heroux', 'Hertzler', 'Muscarella', 'Wittmann', 'Willner', 'Howton', 'Brummitt', 'Demar', 'Hardrick', 'Benavente', 'Choo', 'Tiscareno', 'Bunge', 'Helle', 'Ogan', 'Allbright', 'Jervis', 'Tompson', 'Sheats', 'Hebron', 'Esters', 'Fiorillo', 'Narciso', 'Slowik', 'Kush', 'Sole', 'Bitting', 'Bradham', 'Goggans', 'Rushin', 'Huguley', 'Kittelson', 'Nadel', 'Noggle', 'Xue', 'Alameda', 'Hege', 'Liberto', 'Maron', 'Aber', 'Brodersen', 'Clasen', 'Couturier', 'Godines', 'Ozment', 'Parga', 'Rohm', 'Voris', 'Leaver', 'Newhart', 'Sabourin', 'Kelling', 'Repass', 'Wigington', 'Prioleau', 'Antle', 'Goucher', 'Kreitzer', 'Reuss', 'Rosenfield', 'Sliva', 'Nolting', 'Radel', 'Quintal', 'Lisa', 'Temples', 'Cavins', 'Gazaway', 'Hopewell', 'Albury', 'Broberg', 'Khuu', 'Zelinski', 'Kurian', 'Treacy', 'Rake', 'Tirrell', 'Macdowell', 'Smead', 'Edgerly', 'Fowles', 'Yorke', 'Goodwyn', 'Sciacca', 'Breitenbach', 'Charity', 'Greenidge', 'Kendig', 'Navarette', 'Doremus', 'Marcelino', 'Ribera', 'Luse', 'Hasley', 'Halton', 'Jakes', 'Balas', 'Cheema', 'Dettman', 'Schachter', 'Weisenberger', 'Lehn', 'Sailors', 'Alcott', 'Mancino', 'Mineo', 'Montz', 'Stettler', 'Brannock', 'Shumake', 'Blunk', 'Feuerstein', 'Mangino', 'Bitzer', 'Padden', 'Wetter', 'Blase', 'Helvey', 'Sabia', 'Folden', 'Wyllie', 'Hoosier', 'Gehringer', 'Peifer', 'Schneiderman', 'Raj', 'Gift', 'Sue', 'Wedgeworth', 'Bischof', 'Coviello', 'Flor', 'Barrentine', 'Ells', 'Dundas', 'Baine', 'Bouknight', 'Koning', 'Mallari', 'Monje', 'Wingler', 'Stainbrook', 'Mari', 'Hemby', 'Boateng', 'Enfinger', 'Esquer', 'Salvatierra', 'Tercero', 'Porta', 'Speth', 'Plate', 'Rockhold', 'Hampshire', 'Stipe', 'Buescher', 'Denault', 'Fahnestock', 'Vandehey', 'Brouse', 'Ciaccio', 'Hund', 'Wire', 'Sherron', 'Fairfax', 'Owusu', 'Cuervo', 'Minjarez', 'Zarco', 'Vandyne', 'Gedeon', 'Kegler', 'Ebron', 'Murtaugh', 'Pariseau', 'Morvant', 'Ellwood', 'Beazley', 'Farrelly', 'Mccollom', 'Alegre', 'Dussault', 'Goulette', 'Hession', 'Regier', 'Speranza', 'Spinella', 'Maloof', 'Nogueira', 'Beaudin', 'Sable', 'Samford', 'Marchan', 'Rodriques', 'Rhines', 'Aldrete', 'Creedon', 'Laberge', 'Sandel', 'Spady', 'Horsman', 'Schimpf', 'Sottile', 'Than', 'Ybanez', 'Sagastume', 'Vosburg', 'Langlais', 'Windley', 'Bielski', 'Meyerson', 'Rizk', 'Sparacino', 'Winebarger', 'Helsley', 'Alward', 'Wilker', 'Clyne', 'Bergren', 'Gin', 'Heberling', 'Noh', 'Rotz', 'Laffey', 'Zurawski', 'Aliff', 'Coover', 'Steves', 'Brain', 'Greggs', 'Burts', 'Culwell', 'Halbrook', 'Marcantel', 'Alsip', 'Esslinger', 'Kinnaird', 'Rew', 'Wimbley', 'Dalal', 'Litke', 'Ostlund', 'Petersheim', 'Vezina', 'Vickrey', 'Vida', 'Stachowiak', 'Santizo', 'Stow', 'Hoel', 'Parrino', 'Elsberry', 'Pharris', 'Chiarello', 'Konen', 'Ogata', 'Tousignant', 'Turano', 'Zoll', 'Reser', 'Ribble', 'Dally', 'Kersh', 'Crivello', 'Glantz', 'Vanvleet', 'Dy', 'Woolwine', 'Ager', 'Romney', 'Dedeaux', 'Ringgold', 'Mir', 'Rexford', 'Whitehair', 'Wilczynski', 'Kleinsasser', 'Siemens', 'Kindig', 'Kemmer', 'Fonda', 'Litt', 'Mcferrin', 'Riche', 'Beaudet', 'Lasala', 'Maglione', 'Milani', 'Moscato', 'Pangilinan', 'Haycraft', 'Camilo', 'Trafton', 'Stroble', 'Dollard', 'Consiglio', 'Kinnaman', 'Mumaw', 'Mustard', 'Nees', 'Rupprecht', 'Gimbel', 'Chamberland', 'Lish', 'Beedle', 'Minder', 'Broxton', 'Cocco', 'Vore', 'Slough', 'Pehrson', 'Graney', 'Reade', 'Cozzi', 'Mowrer', 'Necaise', 'Notaro', 'Vanderwall', 'Jeffs', 'Lynd', 'Perino', 'Poyner', 'Oscar', 'Mihalik', 'Coscia', 'Zoellner', 'Shippee', 'Casimiro', 'Phillippe', 'Bartolotta', 'Graciano', 'Schnoor', 'Aube', 'Duguay', 'Dickerman', 'Santi', 'Cude', 'Haver', 'Heidelberg', 'Farquharson', 'Bianchini', 'Kasprzak', 'Pizzi', 'Urquiza', 'Knee', 'Lust', 'Strayhorn', 'Ader', 'Canup', 'Mira', 'Saulnier', 'Stalvey', 'Takeuchi', 'Updegraff', 'Barletta', 'Mikhail', 'Abadie', 'Cohee', 'Sones', 'Hird', 'Mizelle', 'Graddy', 'Demay', 'Escandon', 'Kozar', 'Lecuyer', 'Tredway', 'Danks', 'Pry', 'Mathena', 'Gomer', 'Moussa', 'Journey', 'Brison', 'Denardo', 'Digiorgio', 'Worster', 'Kottke', 'Sayegh', 'Aday', 'Chain', 'Digby', 'Beeks', 'Malpass', 'Toft', 'Fucci', 'Stam', 'Smoker', 'Willms', 'Bohner', 'Sugar', 'Tay', 'Faye', 'Melnik', 'Pankow', 'Stehle', 'Vecchione', 'Weatherwax', 'Monterrosa', 'Bodily', 'Serino', 'Jerkins', 'Bosma', 'Luczak', 'Serafini', 'Baze', 'Hemmings', 'Darrington', 'Fraizer', 'Henrikson', 'Kok', 'Larrison', 'Mirabella', 'Newhall', 'Hollenbach', 'Formica', 'Haake', 'Seim', 'Zeledon', 'Crabill', 'Mensch', 'Prevatt', 'Riggan', 'Gallien', 'Erby', 'Running', 'Shisler', 'Sidebottom', 'Sladek', 'Alejos', 'Momin', 'Bickers', 'Smither', 'Ahart', 'Huseman', 'Cantero', 'Reiley', 'Mcneeley', 'Quill', 'Binger', 'Ellerbee', 'Cearley', 'Guilmette', 'Helbig', 'Nuzum', 'Gravatt', 'Turlington', 'Deramus', 'Casados', 'Harrop', 'Kardos', 'Krehbiel', 'Homa', 'Agostino', 'Candia', 'Byerley', 'Kincer', 'Vitello', 'Backhaus', 'Burzynski', 'Zaborowski', 'Puebla', 'Pedrick', 'Hyson', 'Mazyck', 'Deno', 'Yutzy', 'Dubbs', 'Shimek', 'Saha', 'Philipps', 'Chretien', 'Bramwell', 'Mccalister', 'Ebright', 'Parkhill', 'Rieke', 'Karras', 'Mcbain', 'Gibbon', 'Beckler', 'Nordby', 'Sipos', 'Swider', 'Treiber', 'Weakland', 'Zagorski', 'Peavler', 'Cirino', 'Corzine', 'Barbier', 'Dolby', 'Sheperd', 'Vanderhorst', 'Cornman', 'Dippel', 'Gramlich', 'Hoffmeister', 'Markwell', 'Milks', 'Schriner', 'Cusimano', 'Emberton', 'Kimbler', 'Merrow', 'Huard', 'Paulo', 'Durrance', 'Faherty', 'Palmatier', 'Rezac', 'Speir', 'Streicher', 'Ackman', 'Veitch', 'Bedgood', 'Pantano', 'Raman', 'Eusebio', 'Coldwell', 'Omer', 'Swanigan', 'Stepney', 'Breiner', 'Casebolt', 'Deblasio', 'Mascaro', 'Maselli', 'Overfield', 'Enyart', 'Litman', 'Borer', 'Dudash', 'Mcniff', 'Cherian', 'Scearce', 'Brakefield', 'Hamed', 'Cooperman', 'Kinzel', 'Mchargue', 'Schiefelbein', 'Varughese', 'Brumm', 'Novy', 'Vicars', 'Barratt', 'Titsworth', 'Mole', 'Crisafulli', 'Deitch', 'Slager', 'Tokarz', 'Speelman', 'Tunney', 'Peal', 'Chenevert', 'Haggins', 'Heitmann', 'Scheuer', 'Stuhr', 'Zenner', 'Wishon', 'Arno', 'Lauder', 'Goertz', 'Jew', 'Knapik', 'Lococo', 'Murnane', 'Pawloski', 'Contino', 'Holbrooks', 'Carlstrom', 'Heitkamp', 'Muszynski', 'Shelnutt', 'Tortora', 'Dietrick', 'Kyzer', 'Colt', 'Propes', 'Caffee', 'Fankhauser', 'Liotta', 'Patil', 'Broder', 'Disher', 'Telfer', 'Lampkins', 'Bartman', 'Beauchemin', 'Gatz', 'Pedrosa', 'Schuch', 'Zorrilla', 'Capote', 'Vanderslice', 'Boulden', 'Kirkendoll', 'Fausto', 'Krom', 'Ngai', 'Sepe', 'Domenech', 'Dines', 'Aschenbrenner', 'Carias', 'Inoue', 'Montagna', 'Pulsifer', 'Rieman', 'Seelye', 'Yochum', 'Defilippis', 'Lacross', 'Betances', 'Jenne', 'Rousey', 'Brunswick', 'Wadlington', 'Brainerd', 'Dauria', 'Dinicola', 'Fath', 'Gemmell', 'Rudman', 'Urbaniak', 'Fillion', 'Brandel', 'Devin', 'Derrickson', 'Jenkin', 'Ebling', 'Ferranti', 'Lueders', 'Alvear', 'Gero', 'Maury', 'Estill', 'Beadles', 'Philyaw', 'Tann', 'Bednarski', 'Nagata', 'Partington', 'Sobol', 'Soohoo', 'Welliver', 'Yam', 'Popejoy', 'Berthelot', 'Manwaring', 'Cahn', 'Layer', 'Poarch', 'Tee', 'Arellanes', 'Ehler', 'Montalto', 'Pavlick', 'Rauh', 'Mcnees', 'Balke', 'Alles', 'Caperton', 'Frier', 'Thweatt', 'Whitely', 'Demby', 'Kowalik', 'Loffredo', 'Solem', 'Clampitt', 'Dossey', 'Fauver', 'Toto', 'Corlett', 'Nickols', 'Golston', 'Graef', 'Salsman', 'Hartl', 'Towell', 'Lasseter', 'Arata', 'Diver', 'Malan', 'Lanter', 'Justis', 'Prime', 'Ditzler', 'Engelhart', 'Plouffe', 'Zaldivar', 'Elser', 'Witherow', 'Mateer', 'Rikard', 'Dolson', 'Mariner', 'Amis', 'Toby', 'Evins', 'Midgette', 'Pinnix', 'Blackard', 'Huisman', 'Lager', 'Deloera', 'Dutt', 'Goodrow', 'Morphis', 'Quin', 'Frankenfield', 'Craycraft', 'Mazer', 'Meloy', 'Lebouef', 'Beresford', 'Spiva', 'Michie', 'Jarreau', 'Vallier', 'Dunmore', 'Cerra', 'Ciulla', 'Dauer', 'Helling', 'Jackowski', 'Taboada', 'Balistreri', 'Blattner', 'Cabot', 'Lawver', 'Cornette', 'Arline', 'Amsden', 'Degner', 'Ungar', 'Birney', 'Goldie', 'Croston', 'Wixon', 'Alan', 'Garneau', 'Kolakowski', 'Vitek', 'Witherell', 'Licari', 'Badeaux', 'Sammon', 'Greenland', 'Corlew', 'Cashwell', 'Aldinger', 'Bilderback', 'Kleeman', 'Sisto', 'Menz', 'Bakos', 'Ebbert', 'Berliner', 'Kin', 'Cabaniss', 'Ouzts', 'Mccook', 'Campfield', 'Gulino', 'Odriscoll', 'Weyand', 'Mcguckin', 'Crean', 'Boyington', 'Bracero', 'Carini', 'Chawla', 'Chaudhary', 'Koehl', 'Wahlstrom', 'Francoeur', 'Leveque', 'Ledgerwood', 'Paluch', 'Wyble', 'Latif', 'Koen', 'Eddie', 'Mcgirt', 'Boxley', 'Exline', 'Lujano', 'Michalowski', 'Rottman', 'Throop', 'Zech', 'Baros', 'Bohne', 'Mule', 'Monica', 'Lasiter', 'Alsop', 'Pittard', 'Whitefield', 'Mccaskey', 'Paek', 'Reilley', 'Wasik', 'Bouma', 'Garrigan', 'Nett', 'Mclarty', 'Flemings', 'Alcorta', 'Spoor', 'Mccranie', 'Coverdale', 'Guaman', 'Jenness', 'Knoop', 'Scarpelli', 'Schrecengost', 'Toews', 'Caughey', 'Laska', 'Helfer', 'Bevers', 'Forbus', 'Mccrady', 'Reasor', 'Aggarwal', 'Locicero', 'Uber', 'Vadnais', 'Budnick', 'Duhamel', 'Stelling', 'Kicklighter', 'Basco', 'Otts', 'Tippins', 'Bliven', 'Gayheart', 'Knauf', 'Lalli', 'Quigg', 'Kingman', 'Boros', 'Henneman', 'Lofland', 'Pendarvis', 'Keitt', 'Gelfand', 'Greaney', 'Kindt', 'Stimac', 'Kirn', 'Tokar', 'Miura', 'Wendorf', 'Vigue', 'Dorey', 'Fegan', 'Meares', 'Thierry', 'Ambrosino', 'Coenen', 'Kersting', 'Leas', 'Millward', 'Petzold', 'Morphew', 'Filippone', 'Stoffer', 'Mani', 'Clairmont', 'Mccreight', 'Cully', 'Bissonette', 'Kochan', 'Linneman', 'Parlier', 'Bergner', 'Sterns', 'Steveson', 'Clingerman', 'Karg', 'Medved', 'Prakash', 'Ulman', 'Petroski', 'Hagaman', 'Huddle', 'Auclair', 'Shives', 'Dunavant', 'Glade', 'Chauncey', 'Pough', 'Burgoon', 'Pluta', 'Couey', 'Punch', 'Colmenares', 'Fosdick', 'Henze', 'Kaczynski', 'Lomonaco', 'Roepke', 'Schenkel', 'Schlatter', 'Schoenherr', 'Tripodi', 'Zeiler', 'Bunt', 'Dolly', 'Boyland', 'Bickle', 'Cincotta', 'Crull', 'Enfield', 'Saltz', 'Skelley', 'Younts', 'Bussiere', 'Latona', 'Sensabaugh', 'Grosvenor', 'Woolbright', 'Shorty', 'Brungardt', 'Cardon', 'Carlberg', 'Clevinger', 'Rucinski', 'Vanhooser', 'Westling', 'Imperial', 'Tyer', 'Elzey', 'Aslam', 'Fesler', 'Leiser', 'Smitley', 'Orgeron', 'Scuderi', 'Flatley', 'Whiteford', 'Tison', 'Laurin', 'Fortman', 'Whitty', 'Kirton', 'Cassella', 'Flom', 'Seigel', 'Cossette', 'Bryden', 'Gobin', 'Hieb', 'Marzullo', 'Matuszak', 'Rolph', 'Spilman', 'Vanvoorhis', 'Sande', 'Suydam', 'Gledhill', 'Krill', 'Mackiewicz', 'Templet', 'Friedrichs', 'Ruddell', 'Kats', 'Nourse', 'Millender', 'Wafer', 'Fauntleroy', 'Archibeque', 'Maslowski', 'Metzgar', 'Pizana', 'Mcguffey', 'Estridge', 'Vanalstyne', 'Decuir', 'Mcbean', 'Hardnett', 'Avilla', 'Spadafora', 'Weisel', 'Kann', 'Leyden', 'Purdom', 'Tappan', 'Gunnells', 'Slaten', 'Hansley', 'Chiappetta', 'Rozek', 'Tiede', 'Winland', 'Dubuque', 'Heslin', 'Bradway', 'Eckels', 'Saffell', 'Germaine', 'Apolinar', 'Coloma', 'Gawlik', 'Chipps', 'Hicklin', 'Glanton', 'Dalke', 'Denlinger', 'Kuipers', 'Houpt', 'Parcell', 'Claeys', 'Ferreri', 'Greif', 'Lucente', 'Siems', 'Yousef', 'Llerena', 'Rote', 'Suero', 'Malmberg', 'Touchette', 'Luton', 'Wess', 'Height', 'Stampley', 'Anastasi', 'Bulman', 'Deharo', 'Laube', 'Severt', 'Midgley', 'Colling', 'Ell', 'Burbage', 'Commander', 'Hubner', 'Zurcher', 'Arocha', 'Nobile', 'Tingler', 'Ellman', 'Lolley', 'Pewitt', 'Mcduff', 'Hyler', 'Goltz', 'Kubota', 'Lamberti', 'Ohern', 'Uhrig', 'Dummer', 'Keesling', 'Litzinger', 'Moriarity', 'Servantes', 'Rohe', 'Stokely', 'Weedon', 'Pippins', 'Dehner', 'Krogman', 'Luecke', 'Rosete', 'Zona', 'Lowy', 'Applebee', 'Heather', 'Cruikshank', 'Linson', 'Brandy', 'Koser', 'Ruel', 'Ruppe', 'Saeteurn', 'Dewolfe', 'Sawtelle', 'Rudin', 'Raver', 'Bassham', 'Yaw', 'Segrest', 'Belfiore', 'Heeren', 'Kotowski', 'Luken', 'Makela', 'Ranallo', 'Schug', 'Seery', 'Payson', 'Caufield', 'Lacefield', 'Bratten', 'Jr', 'Buske', 'Ternes', 'Bivona', 'Felber', 'Rott', 'Pitkin', 'Pridmore', 'Oyer', 'Astle', 'Jeppesen', 'Shimabukuro', 'Soltys', 'Vieth', 'Rasnick', 'Calfee', 'Brignac', 'Lamy', 'Facey', 'Alper', 'Borquez', 'Cavalieri', 'Niswonger', 'Pajak', 'Schwabe', 'Ringel', 'Abbe', 'Fenley', 'Churchman', 'Haydel', 'Stockard', 'Adamek', 'Ellerman', 'Torpey', 'Waldroup', 'Hunte', 'Bienaime', 'Lazzara', 'Nemitz', 'Wingerter', 'Boer', 'Franken', 'Lebow', 'Manger', 'Baisley', 'Pane', 'Gayden', 'Bertelsen', 'Curfman', 'Leanos', 'Nissley', 'Odwyer', 'Manzer', 'Kollman', 'Quon', 'Holgate', 'Cola', 'Mckissack', 'Cousar', 'Bilski', 'Boehler', 'Kawamura', 'April', 'Mckelvy', 'Lanni', 'Roehm', 'Salva', 'Stackpole', 'Stracener', 'Masiello', 'Barrus', 'Tubb', 'Brummel', 'Devereux', 'Foushee', 'Corado', 'Gladfelter', 'Grewe', 'Hodapp', 'Swartwood', 'Vacek', 'Wrona', 'Shaffner', 'Ullah', 'Heslop', 'Mungo', 'Haymon', 'Behrend', 'Falter', 'Feola', 'Gruner', 'Picklesimer', 'Riedl', 'Stegeman', 'Harpole', 'Moyes', 'Boulay', 'Brighton', 'Guise', 'Laury', 'Badilla', 'Cypher', 'Houdek', 'Juhasz', 'Klingbeil', 'Pinales', 'Fellman', 'Daher', 'Allmond', 'Bal', 'Crager', 'Hillebrand', 'Menezes', 'Serpas', 'Zager', 'Alvardo', 'Summerford', 'Stillings', 'Vandergrift', 'Hanchett', 'Minto', 'Daughtery', 'Gillon', 'Rajan', 'Vasko', 'Wirick', 'Woolever', 'Caserta', 'Welle', 'Kimbrel', 'Traywick', 'Hands', 'Spratley', 'Iannuzzi', 'Krikorian', 'Runk', 'Sood', 'Riese', 'Antunes', 'Winsett', 'Mans', 'Capel', 'Condron', 'Nilles', 'Petz', 'Salemi', 'Bainter', 'Patchett', 'Hirschfeld', 'Murrin', 'Lamey', 'Mcglothin', 'Hodo', 'Hirth', 'Kaltenbach', 'Kensinger', 'Leidy', 'Shurtz', 'Braatz', 'Brafford', 'Willet', 'Clendening', 'Basch', 'Brockwell', 'Oberman', 'Palmateer', 'Osornio', 'Gehl', 'Staker', 'Mattila', 'Dawn', 'Cowherd', 'Appleman', 'Carbonaro', 'Castruita', 'Pilling', 'Wenrich', 'Christoffersen', 'Hinzman', 'Kaup', 'Pettersen', 'Jue', 'Khalsa', 'Mutz', 'Remus', 'Arch', 'Shands', 'Borek', 'Buresh', 'Egli', 'Feldkamp', 'Hampel', 'Lichtenberg', 'Morimoto', 'Brasel', 'Demelo', 'Royalty', 'Averitt', 'Metivier', 'Bradsher', 'Avallone', 'Demeter', 'Masucci', 'Musil', 'Wichmann', 'Broman', 'Taunton', 'Blewett', 'Duhart', 'Goo', 'Hanus', 'Mathai', 'Shutts', 'Taniguchi', 'Vanleeuwen', 'Delvillar', 'Hane', 'Givan', 'Croskey', 'Elamin', 'Deffenbaugh', 'Miklos', 'Passalacqua', 'Woessner', 'Lapan', 'Miah', 'Coty', 'Baksh', 'Beehler', 'Goel', 'Wolfinger', 'Goodhue', 'Toal', 'Mattoon', 'Haq', 'Nida', 'Dant', 'Varnadore', 'Tippit', 'Every', 'Bohling', 'Lichtenberger', 'Louk', 'Soderquist', 'Werkheiser', 'Willbanks', 'Whitis', 'Millikin', 'Dietzel', 'Frase', 'Ishida', 'Pilger', 'Grajales', 'Kole', 'Roff', 'Ballantine', 'Basden', 'Cadenas', 'Caliendo', 'Hotard', 'Vidrio', 'Lichtman', 'Devinney', 'Fugitt', 'Proud', 'Hults', 'Galey', 'Verna', 'Newburn', 'Lafortune', 'Fobbs', 'Azure', 'Cheong', 'Heft', 'Aispuro', 'Longstreth', 'Lajeunesse', 'Howle', 'Galley', 'Lovan', 'Convery', 'Malatesta', 'Warnecke', 'Glavin', 'Reil', 'Filson', 'Poage', 'Fountaine', 'Nolley', 'Raglin', 'Backlund', 'Doerfler', 'Faunce', 'Hooton', 'Lightcap', 'Stepanek', 'Grosser', 'Weld', 'Filippi', 'Youn', 'Matis', 'Harnett', 'Ferrill', 'Segers', 'Ponds', 'Cuyler', 'Faile', 'Flaugher', 'Kuehner', 'Giorgi', 'Eckler', 'Sergeant', 'Twiggs', 'Boeck', 'Flach', 'Iliff', 'Mcmurtrey', 'Mcnelis', 'Steckel', 'Rouillard', 'Folkerts', 'Mechling', 'Whitcher', 'Daws', 'Joly', 'Abt', 'Eells', 'Niccum', 'Twining', 'Grinder', 'Melrose', 'Yarbro', 'Degenhardt', 'Dimeglio', 'Okamura', 'Kriss', 'Payette', 'Chui', 'Mowers', 'Foose', 'Kinzie', 'Blick', 'Rizer', 'Alcock', 'Sirmans', 'Behrman', 'Carsten', 'Kopacz', 'Randhawa', 'Schwing', 'Burkhard', 'Cunanan', 'Exley', 'Balducci', 'Leman', 'Hyslop', 'Burtch', 'Hadnot', 'Lanphear', 'Finchum', 'Voit', 'Jock', 'Wilhoite', 'Officer', 'Mayweather', 'Ravenell', 'Arehart', 'Bonetti', 'Cloer', 'Galliher', 'Niven', 'Uyeda', 'Coughenour', 'Siddiqi', 'Karimi', 'Cupit', 'Loupe', 'Hammell', 'Antley', 'Ally', 'Southers', 'Haymond', 'Hosley', 'Broz', 'Kinoshita', 'Kohout', 'Lipke', 'Ostrow', 'Teves', 'Gaus', 'Meiser', 'Cravey', 'Noss', 'Drayer', 'Crooms', 'Carrano', 'Mckechnie', 'Uhrich', 'Villalva', 'Wilkening', 'Benevides', 'Kepple', 'Pon', 'Randol', 'Leadbetter', 'Russom', 'Locklin', 'Battiste', 'Abundis', 'Agosta', 'Bartek', 'Brillhart', 'Hoffmaster', 'Mehr', 'Spanos', 'Denker', 'Kimberling', 'Schon', 'Felten', 'Lightle', 'Ramseur', 'Branning', 'Deblois', 'Inocencio', 'Maricle', 'Nishimoto', 'Oviatt', 'Shunk', 'Taddeo', 'Villarruel', 'Otterson', 'Clune', 'Seamster', 'Dandy', 'Cybulski', 'Daza', 'Eastep', 'Faulhaber', 'Friedberg', 'Gentz', 'Scola', 'Sebesta', 'Glinski', 'Schoon', 'Graeber', 'Sinks', 'Wee', 'Summerall', 'Deets', 'Furnish', 'Kelemen', 'Maiorano', 'Teachout', 'Paquet', 'Mcgahey', 'Kill', 'Horman', 'Selders', 'Cottman', 'Delfin', 'Fronk', 'Seelig', 'Visco', 'Briles', 'Castillon', 'Suire', 'Havey', 'Arner', 'Farver', 'Marts', 'Gean', 'Hugh', 'Stoney', 'Townsel', 'Sandquist', 'Neidig', 'Miser', 'Leeth', 'Hocutt', 'Balcazar', 'Caporale', 'Guymon', 'Horstmann', 'Miedema', 'Zickefoose', 'Casterline', 'Pfannenstiel', 'Becht', 'Myres', 'Ried', 'Vallery', 'Bator', 'Calise', 'Cotterman', 'Desautels', 'Hinchey', 'Kostka', 'Orenstein', 'Rosenau', 'Skow', 'Cuello', 'Herder', 'Cure', 'Eadie', 'Claggett', 'Batie', 'Kirwin', 'Troia', 'Sinnett', 'Books', 'Maize', 'Tremble', 'Sinkler', 'Gallon', 'Winkles', 'Zion', 'Walt', 'Pearse', 'Gathright', 'Isakson', 'Saeger', 'Siegle', 'Wittwer', 'Modesto', 'Bensen', 'Royals', 'Mccane', 'Begaye', 'Matuszewski', 'Schrier', 'Shimko', 'Torchia', 'Ausmus', 'Casazza', 'Mealer', 'Yant', 'Amar', 'Callas', 'Depaola', 'Kintner', 'Lech', 'Marsico', 'Boerger', 'Rak', 'Kellen', 'Kennemer', 'Carbo', 'Rennick', 'Brennen', 'Dorrough', 'Shealey', 'Breyer', 'Dilks', 'Geske', 'Hundt', 'Occhipinti', 'Strauser', 'Schult', 'Transue', 'Holding', 'Vanhorne', 'Critchlow', 'Steptoe', 'Buerger', 'Claassen', 'Farinas', 'Ruland', 'Holsapple', 'Mcclintic', 'Bendel', 'Muriel', 'Mckeithan', 'Shellman', 'Balzano', 'Bement', 'Montesinos', 'Ringle', 'Sobotka', 'Donahoo', 'Dicker', 'Harling', 'Burkley', 'Browner', 'Iovino', 'Kubala', 'Labriola', 'Morra', 'Orloff', 'Patchen', 'Recchia', 'Budge', 'Glendenning', 'Nethery', 'Scholtz', 'Aybar', 'Buis', 'Mattie', 'Bonsall', 'Conine', 'Dettmer', 'Gerding', 'Plantz', 'Vandorn', 'Tremaine', 'Ruddick', 'Murrow', 'Mceachin', 'Bridgeforth', 'Docherty', 'Hultman', 'Liechty', 'Touchton', 'Yokoyama', 'Borth', 'Daoud', 'Mealy', 'Hearst', 'Stalling', 'Drapeau', 'Hellwig', 'Longtin', 'Rappa', 'Tormey', 'Vanantwerp', 'Sabel', 'Neagle', 'Duet', 'Liebert', 'Lush', 'Aly', 'Behn', 'Brereton', 'Atienza', 'Dubey', 'Gennaro', 'Miltenberger', 'Nitschke', 'Ragle', 'Schumm', 'Tangen', 'Waibel', 'Whitham', 'Stallone', 'Perritt', 'Coody', 'Hinch', 'Depuy', 'Dunkelberger', 'Texeira', 'Tomita', 'Diers', 'Elsasser', 'Neve', 'Clendenen', 'Pettibone', 'Dobyns', 'Ciotti', 'Dodrill', 'Fridman', 'Lepine', 'Nygard', 'Shreves', 'Sollenberger', 'Leinbach', 'Diazdeleon', 'Bourget', 'Ramadan', 'Allensworth', 'Scarboro', 'Prowell', 'Ghee', 'Edouard', 'Duca', 'Ziebell', 'Kercher', 'Greger', 'Mas', 'Shier', 'Branca', 'Melchior', 'Cast', 'Saner', 'Beswick', 'Carone', 'Sobieski', 'Zweifel', 'Beahm', 'Defrank', 'Krebsbach', 'Mericle', 'Mcinnes', 'Lown', 'Brumback', 'Clause', 'Claborn', 'Rollin', 'Montford', 'Beckles', 'Grebe', 'Groesbeck', 'Guidi', 'Mathisen', 'Mukherjee', 'Rotolo', 'Seybert', 'Odegaard', 'Mackley', 'Glatt', 'Going', 'Perks', 'Sansbury', 'Prude', 'Bequette', 'Difilippo', 'Dodgen', 'Terpening', 'Vanepps', 'Poncedeleon', 'Qu', 'Ullery', 'Wisener', 'Lok', 'Lutton', 'Bellah', 'Kinsel', 'Tone', 'Carabajal', 'Koll', 'Shankar', 'Edick', 'Donathan', 'Andree', 'Perrino', 'Moffit', 'Gaddie', 'Breidenbach', 'Jespersen', 'Larrick', 'Mauriello', 'Morgado', 'Roh', 'Svec', 'Tebbe', 'Thieman', 'Cerezo', 'Perkowski', 'Colville', 'Yarnall', 'Chason', 'Brach', 'Meller', 'Brayboy', 'Salaam', 'Keleher', 'Kilbourn', 'Lowenthal', 'Rispoli', 'Vanzee', 'Vlahos', 'Trojan', 'Birdsell', 'Defoor', 'Mcclusky', 'Barret', 'Smoke', 'Berkeley', 'Cuadrado', 'Galyean', 'Gruen', 'Gualtieri', 'Kurland', 'Sposato', 'Stieber', 'Weatherman', 'Strausser', 'Miera', 'Edlin', 'Gilford', 'Mouzon', 'Buczek', 'Krapf', 'Lucatero', 'Amburn', 'Peddicord', 'Forero', 'Domer', 'Farish', 'Segraves', 'Sant', 'Engles', 'Douthitt', 'Lall', 'Wormley', 'Geisel', 'Hao', 'Polhemus', 'Slifer', 'Mowen', 'Markin', 'Rape', 'Bollin', 'Bulloch', 'Pouncey', 'Rufus', 'Goodlow', 'Dammann', 'Delgrosso', 'Gadbois', 'Leap', 'Lorentzen', 'Sprankle', 'Stucki', 'Vitela', 'Walck', 'Winkelmann', 'Mund', 'Bley', 'Channel', 'Griebel', 'Nordberg', 'Slinkard', 'Orrick', 'Crooker', 'Groll', 'Maradiaga', 'Jolin', 'Boni', 'Prom', 'Reder', 'Easler', 'Totty', 'Arnaud', 'Bohler', 'Heikkila', 'Kehler', 'Klingenberg', 'Matera', 'Striegel', 'Urzua', 'Baldi', 'Burling', 'Osmond', 'Rucks', 'Diel', 'Kassel', 'Schewe', 'Conkling', 'Ricke', 'Schack', 'Shirah', 'Brauner', 'Carriker', 'Mcduffy', 'Bieker', 'Credeur', 'Fabry', 'Holdeman', 'Jeansonne', 'Klett', 'Kolstad', 'Mustain', 'Strub', 'Ricketson', 'Fairbairn', 'Langel', 'Fenster', 'Slatton', 'Ehrenberg', 'Espinola', 'Hannaford', 'Hinderliter', 'Siqueiros', 'Ange', 'Gillin', 'Battin', 'Belue', 'Spigner', 'Simien', 'Gervasi', 'Pallares', 'Plotner', 'Puri', 'Swiatek', 'Vanmatre', 'Corp', 'Devillier', 'Bucholtz', 'Bremner', 'Jen', 'Evanson', 'Ghent', 'Eastland', 'Kappler', 'Grahn', 'Shadrick', 'Kibby', 'Chaires', 'Kontos', 'Petrov', 'Pillai', 'Chadbourne', 'Sotolongo', 'Allende', 'Kells', 'Hayford', 'Hempstead', 'Livers', 'Farrior', 'Authement', 'Bitz', 'Corkery', 'Klawitter', 'Mongold', 'Somma', 'Topham', 'Defrancisco', 'Noda', 'Breon', 'Thetford', 'Rod', 'Kisling', 'Drouillard', 'Dotts', 'Gramajo', 'Masek', 'Volkert', 'Vora', 'Pietras', 'Sheffler', 'Shrestha', 'Kono', 'Panza', 'Brunn', 'Tatom', 'Nasir', 'Barris', 'Bursey', 'Elsea', 'Kettner', 'Martorana', 'Lindow', 'Chevez', 'Pater', 'Hennis', 'Iman', 'Stembridge', 'Satcher', 'Britz', 'Hommel', 'Llanas', 'Pathak', 'Schwartzman', 'Janz', 'Hickle', 'Deakins', 'Mantle', 'Billing', 'Veiga', 'Darbonne', 'Angelle', 'Granderson', 'Odoms', 'Mondesir', 'Ducksworth', 'Anker', 'Deneen', 'Follmer', 'Norred', 'Whitecotton', 'Halsted', 'Schiele', 'Reddin', 'Pichon', 'Eustice', 'Finelli', 'Kawasaki', 'Kerekes', 'Surrett', 'Divers', 'Kerney', 'Bohlman', 'Oberst', 'Prough', 'Tarwater', 'Wangler', 'Piceno', 'Persico', 'Lastra', 'Fillman', 'Barlett', 'Cort', 'Kuchar', 'Plaisted', 'Rufo', 'Whitmarsh', 'Fusaro', 'Bajwa', 'Belter', 'Aldama', 'Conlee', 'Tweedie', 'Greear', 'Riviera', 'Stormer', 'Flannagan', 'Heatley', 'Feazell', 'Bastidas', 'Benninger', 'Canseco', 'Hanners', 'Kreiner', 'Pestana', 'Simerly', 'Such', 'Tiedeman', 'Weible', 'Zawadzki', 'Rayman', 'Crose', 'Sheeler', 'Kirven', 'Winford', 'Mackall', 'Balderson', 'Calleja', 'Klinefelter', 'Lauffer', 'Probert', 'Melero', 'Ravelo', 'Degroff', 'Pylant', 'Ricco', 'Varona', 'Pickney', 'Bachmeier', 'Dulay', 'Hanover', 'Virgilio', 'Spino', 'Bohon', 'Cantin', 'Pettijohn', 'Branigan', 'Duhe', 'Perine', 'Thedford', 'Shamburger', 'Guarnieri', 'Guptill', 'Nyland', 'Setliff', 'Shreffler', 'Viggiano', 'Pries', 'Sunde', 'Bulmer', 'Platts', 'Jeremiah', 'Fawley', 'Jansson', 'Rebelo', 'Prochnow', 'Waldeck', 'Citron', 'Roughton', 'Ryckman', 'Molano', 'Cannaday', 'Ned', 'Beckerman', 'Galaz', 'Graziani', 'Kawakami', 'Limones', 'Mousseau', 'Riha', 'Huser', 'Casady', 'Kirker', 'Benish', 'Tomczyk', 'Hallahan', 'Kue', 'Siple', 'Kandel', 'Maring', 'Bosak', 'Gandolfo', 'Reichart', 'Robarge', 'Shufelt', 'Forry', 'Richart', 'Shireman', 'Tozzi', 'Trudel', 'Tat', 'Maday', 'Faw', 'Lawrie', 'Mingle', 'Yasin', 'Cutrone', 'Fairbrother', 'Ficken', 'Kluesner', 'Lagana', 'Schoenborn', 'Greb', 'Stromain', 'Mcpeters', 'Toepfer', 'Wehrman', 'Kozma', 'Rohner', 'Kittel', 'Louderback', 'Daughtrey', 'Philippe', 'Bargo', 'Cullinane', 'Fama', 'Fredenburg', 'Pedone', 'Santillanes', 'Zahner', 'Zupan', 'Dundon', 'Gilfillan', 'Grego', 'Otter', 'Jamil', 'Beaubien', 'Collingwood', 'Quinney', 'Botero', 'Edstrom', 'Flink', 'Ortner', 'Schmidtke', 'Reichle', 'Leder', 'Pelosi', 'Fiorito', 'Berber', 'Hislop', 'Dunstan', 'Favorite', 'Wooding', 'Gariepy', 'Gottesman', 'Guercio', 'Konz', 'Kothari', 'Laguardia', 'Lamphier', 'Puetz', 'Casagrande', 'Quay', 'Rieth', 'Vowell', 'Mcanulty', 'Mian', 'Lucus', 'Alvizo', 'Domanski', 'Elling', 'Maniaci', 'Neumeyer', 'Piraino', 'Schroll', 'Willsey', 'Avellaneda', 'Wilcoxen', 'Murrey', 'Bennette', 'Boyajian', 'Distler', 'Lindamood', 'Maclaren', 'Onken', 'Stefano', 'Uselton', 'Wilgus', 'Rardin', 'Boen', 'Stillwagon', 'Satter', 'Allis', 'Capell', 'Nedd', 'Arcand', 'Breit', 'Horwath', 'Lakatos', 'Roling', 'Hessel', 'Cusson', 'Rockefeller', 'Shiffer', 'Briney', 'Celeste', 'Sayed', 'Revelle', 'Corker', 'Baldonado', 'Lokken', 'Plymale', 'Sugden', 'Twist', 'Parten', 'Geil', 'Sime', 'Grisby', 'Jeanty', 'Baroni', 'Ditullio', 'Domenico', 'Geiss', 'Gemmill', 'Leng', 'Lewicki', 'Weyandt', 'Haycock', 'Coonce', 'Pillar', 'Medcalf', 'Sall', 'Goldsborough', 'Bergerson', 'Daffron', 'Hinchman', 'Leibold', 'Sarkissian', 'Serratos', 'Uhlig', 'Wurth', 'Ost', 'Steinmann', 'Saum', 'Bullion', 'Dejonge', 'Assad', 'Adelson', 'Sholes', 'Clermont', 'Tabron', 'Kilduff', 'Millspaugh', 'Partyka', 'Santore', 'Wensel', 'Zima', 'Raschke', 'Simonis', 'Tuell', 'Obriant', 'Lewter', 'Nealey', 'Baranski', 'Bloomberg', 'Franchi', 'Klemme', 'Raborn', 'Wohlgemuth', 'Basta', 'Bernardini', 'Canlas', 'Yeargin', 'Stingley', 'Crosland', 'Bob', 'Ascher', 'Dibona', 'Farabaugh', 'Kilcoyne', 'Poblete', 'Beato', 'Teasdale', 'Rossell', 'Lawhorne', 'Jama', 'Behringer', 'Hallstrom', 'Kitzman', 'Klenk', 'Mctigue', 'Onate', 'Rodda', 'Siegal', 'Pepple', 'Tash', 'Gager', 'Hing', 'Yokley', 'Epting', 'Mangham', 'Zackery', 'Blackerby', 'Canedo', 'Glatz', 'Hilker', 'Hummell', 'Mangels', 'Gamel', 'Gang', 'Hooser', 'Moates', 'Mutch', 'Lyerly', 'Vesey', 'Satterthwaite', 'Calcote', 'Saulsbury', 'Averette', 'Ates', 'Rita', 'Vicencio', 'Wismer', 'Mayoral', 'Crader', 'Levens', 'Joel', 'Haye', 'Drager', 'Eiden', 'Escutia', 'Inzunza', 'Moroz', 'Sepulvado', 'Tomaselli', 'Zartman', 'Isaak', 'Philippi', 'Mcgeary', 'Taha', 'Buttler', 'Crisci', 'Kot', 'Micek', 'Mondello', 'Petrarca', 'Rossini', 'Villalvazo', 'Weedman', 'Mitten', 'Favre', 'Varnes', 'Betancur', 'Bevington', 'Bockman', 'Feldstein', 'Kujawski', 'Siemer', 'Soderlund', 'Fricker', 'Gerstein', 'Kick', 'Haff', 'Brackman', 'Hulen', 'Nephew', 'Birkett', 'Gardenhire', 'Garn', 'Kellenberger', 'Mogensen', 'Murata', 'Weisbrod', 'Vilchis', 'Meder', 'Akey', 'Mcmanis', 'Delatte', 'Guiles', 'Turnbough', 'Murrah', 'Kilgo', 'Marcelin', 'Cecchini', 'Chrysler', 'Eick', 'Fletes', 'Luevanos', 'Kurt', 'Firman', 'Hensen', 'Champine', 'Holford', 'Appelbaum', 'Ciampa', 'Florentino', 'Lorton', 'Lubinski', 'Moquin', 'Welke', 'Grinberg', 'Bolstad', 'Ade', 'Outten', 'Grear', 'Haith', 'Borntreger', 'Steinhauser', 'Facio', 'Preslar', 'Speirs', 'Grasser', 'Zuck', 'Deslauriers', 'Frates', 'Mayville', 'Suddeth', 'Littlepage', 'Aversa', 'Chagolla', 'Godshall', 'Jordahl', 'Oakland', 'Monsen', 'Rudolf', 'Mccollister', 'Mickles', 'Flaig', 'Friberg', 'Grubaugh', 'Sliwinski', 'Stach', 'Bechtol', 'Pasch', 'Keebler', 'Fagin', 'Mister', 'Wynter', 'Bednarek', 'Blansett', 'Crossett', 'Kettering', 'Lafata', 'Raffa', 'Roig', 'Schopp', 'Voegele', 'Waldschmidt', 'Clatterbuck', 'Amer', 'Kraut', 'Furniss', 'Edgecomb', 'Aspinwall', 'Buckelew', 'Loranger', 'Koppel', 'Vernier', 'Latino', 'Hayton', 'Girod', 'Primrose', 'Jetter', 'Hyche', 'Ottley', 'Isidro', 'Kort', 'Mulroy', 'Reznik', 'Tozer', 'Vanderheyden', 'Kassab', 'Paro', 'Belen', 'Vandever', 'Harsch', 'Rawley', 'Gonder', 'Delbridge', 'Alumbaugh', 'Basulto', 'Hoehne', 'Mccaig', 'Qin', 'Rasnake', 'Tewksbury', 'Ratajczak', 'Reinbold', 'Mcgillivray', 'Nuccio', 'Steinbeck', 'Deland', 'Callow', 'Wootten', 'Lytton', 'Calix', 'Stinger', 'Slider', 'Cadman', 'Faulconer', 'Higashi', 'Lamping', 'Sellner', 'Walko', 'Kilkenny', 'Charter', 'Gauntt', 'Bronk', 'Legare', 'Hukill', 'Kulikowski', 'Kunde', 'Michelsen', 'Mottola', 'Pasion', 'Stimmel', 'Deavila', 'Lian', 'Koga', 'Kitchin', 'Whitner', 'Bucholz', 'Kilbride', 'Klumpp', 'Osinski', 'Petrich', 'Saar', 'Robards', 'Flakes', 'Accardo', 'Gebauer', 'Matyas', 'Montesano', 'Schiefer', 'Zuehlke', 'Swartout', 'Gidley', 'Burghardt', 'Delcambre', 'Jerman', 'Laufenberg', 'Paterno', 'Piccione', 'Wenning', 'Wilhelmi', 'Rathjen', 'Bauch', 'Hiott', 'Bagnall', 'Miskell', 'Snellings', 'Sally', 'Bjornson', 'Din', 'Kroeker', 'Mitra', 'Saxena', 'Hausler', 'Scogin', 'Jeronimo', 'Holderfield', 'Cruze', 'Christina', 'Beville', 'Whitehorn', 'Bembry', 'Fludd', 'Abboud', 'Blomgren', 'Friddle', 'Jarvi', 'Nastasi', 'Tomich', 'Peinado', 'Rinaldo', 'Proudfoot', 'Down', 'Lawry', 'Noor', 'Bachelor', 'Mullenax', 'Pocock', 'Resler', 'Sprunger', 'Wiegel', 'Wohlers', 'Niedzwiecki', 'Bourgoin', 'Grist', 'Nora', 'Gude', 'Mcgaughy', 'Borror', 'Bushee', 'Crego', 'Engberg', 'Karle', 'Raso', 'Rayas', 'Roehrig', 'Villamil', 'Croucher', 'Candido', 'Rockhill', 'Dahn', 'Philp', 'Grasty', 'Basnight', 'Cacioppo', 'Heavener', 'Hoenig', 'Janisch', 'Labombard', 'Sheng', 'Wettstein', 'Wymore', 'Zuluaga', 'Canova', 'Maclennan', 'Tuley', 'Geddings', 'Cayetano', 'Bogar', 'Malbrough', 'Bradish', 'Chiaramonte', 'Eguia', 'Loux', 'Nemecek', 'Ouimet', 'Roxas', 'Yoshioka', 'Cossio', 'Sleight', 'Walla', 'Younan', 'Hee', 'Bartlow', 'Parchman', 'Leaks', 'Folz', 'Knittel', 'Lovvorn', 'Melick', 'Weingartner', 'Eustace', 'Robbs', 'Jacquet', 'Direnzo', 'Domke', 'Kestler', 'Pavelka', 'Pileggi', 'Silvestro', 'Leedom', 'Kyte', 'Espey', 'Kincannon', 'Robicheaux', 'Lard', 'Falkenstein', 'Fino', 'Kotz', 'Lammert', 'Markovic', 'Mcwaters', 'Shibata', 'Garoutte', 'Brum', 'Hora', 'Gundrum', 'Leer', 'Coller', 'Delsignore', 'Ebarb', 'Heras', 'Skolnick', 'Sponseller', 'Baltes', 'Rabinovich', 'Welden', 'Papas', 'Bingman', 'Neto', 'Burrough', 'Ollie', 'Deitrick', 'Hermansen', 'Datta', 'Gebo', 'Bulla', 'Rippey', 'Solon', 'Draughon', 'Sylvestre', 'Outen', 'Westfield', 'Daoust', 'Kuan', 'Kubat', 'Labuda', 'Olejniczak', 'Radomski', 'Scheuermann', 'Schunk', 'Tuazon', 'Wineland', 'Gizzi', 'Millay', 'Hamp', 'Murdaugh', 'Hayles', 'Plowden', 'Lesure', 'Artrip', 'Kenneally', 'Piehl', 'Vandermeulen', 'Camberos', 'Hochberg', 'Sinner', 'Crass', 'Gade', 'Tedrick', 'Nicholl', 'Speece', 'Chatterjee', 'Gillihan', 'Luzzi', 'Obyrne', 'Uchida', 'Kidney', 'Dorough', 'Dangler', 'Mcneel', 'Ruley', 'Mcloud', 'Smarr', 'Gayles', 'Janiszewski', 'Kubo', 'Mckibbin', 'Szatkowski', 'Lehnert', 'Mcilvain', 'Mcclish', 'Mcentyre', 'Strawder', 'Briere', 'Headlee', 'Leszczynski', 'Mauser', 'Rask', 'Wisler', 'Burba', 'Shaulis', 'Showman', 'Proto', 'Creasman', 'Slye', 'Dunwoody', 'Ellingsworth', 'Linebaugh', 'Riva', 'Um', 'Muldowney', 'Burlew', 'Gettings', 'Clingman', 'Shield', 'Trollinger', 'Stiger', 'Kellman', 'Arviso', 'Boettger', 'Deak', 'Deiter', 'Hackenberg', 'Langone', 'Lichter', 'Siano', 'Wrinkle', 'Dickert', 'Boor', 'Ludington', 'Griffing', 'Perin', 'Woodby', 'Quail', 'Harriss', 'Bilotta', 'Chino', 'Cocke', 'Corbell', 'Dearden', 'Facundo', 'Gaskell', 'Grieser', 'Houts', 'Zuk', 'Yamauchi', 'Caouette', 'Perham', 'Hewson', 'Keppel', 'Artiaga', 'Sa', 'Ginger', 'Goosby', 'Bollig', 'Grippo', 'Hoffmeyer', 'Klaas', 'Rohlfing', 'Stolp', 'Vielma', 'Gresh', 'Mignone', 'Parsell', 'Sprout', 'Hase', 'Nadal', 'Joye', 'Butkus', 'Donlan', 'Fuhrer', 'Grobe', 'Haverkamp', 'Janecek', 'Pancoast', 'Rathke', 'Scheibe', 'Schneller', 'Scally', 'Valeriano', 'Fail', 'Everage', 'Murff', 'Demayo', 'Dieterich', 'Kramp', 'Macchia', 'Ruyle', 'Zuidema', 'Tischer', 'Palo', 'Bahn', 'Hartson', 'Rosborough', 'Hartke', 'Hixenbaugh', 'Matlack', 'Hoefler', 'Hsia', 'Cech', 'Donham', 'Szafranski', 'Jennison', 'Emmer', 'Christians', 'Swigert', 'Mclawhorn', 'Costas', 'Culligan', 'Eisenstein', 'Joos', 'Villacorta', 'Majerus', 'Lukowski', 'Byford', 'Canepa', 'Jeppson', 'Larison', 'Waechter', 'Bleich', 'Trigo', 'Lill', 'Mcisaac', 'Oflaherty', 'Dedman', 'Lynes', 'Everidge', 'Armfield', 'Cadieux', 'Dembowski', 'Flewelling', 'Guadagno', 'Lamendola', 'Meidinger', 'Muzzy', 'Pacelli', 'Pangle', 'Denzer', 'Sharman', 'Venzor', 'Shadwick', 'Saine', 'Lighty', 'Twine', 'Buehner', 'Caruana', 'Filipiak', 'Fiori', 'Kellison', 'Odonovan', 'Ragone', 'Enyeart', 'Coale', 'Coombes', 'Yarrington', 'Leno', 'Coad', 'Well', 'Labranche', 'Banaszak', 'Jovanovic', 'Junk', 'Kratochvil', 'Marchi', 'Mcnitt', 'Monnin', 'Portales', 'Nazzaro', 'Laramie', 'Kohlman', 'Pinette', 'Craw', 'Aldred', 'Jolicoeur', 'Nevers', 'Boseman', 'Apostol', 'Barbaro', 'Dirienzo', 'Kimrey', 'Knaack', 'Marenco', 'Meixner', 'Placek', 'Prigge', 'Sablan', 'Stoecker', 'Ulrey', 'Madonia', 'Mariotti', 'Hypes', 'Teti', 'Pothier', 'Duer', 'Reay', 'Charlie', 'Alix', 'Cropp', 'Wellons', 'Haugland', 'Malkowski', 'Powley', 'Query', 'Stolle', 'Twedt', 'Grech', 'Musson', 'Larrimore', 'Esper', 'Suleiman', 'Gillie', 'Aaronson', 'Brueggeman', 'Kupfer', 'Orf', 'Pozzi', 'Rayos', 'Scheiner', 'Schmoll', 'Sirota', 'Trickey', 'Ahuja', 'Halm', 'Jaycox', 'Carithers', 'Bjorkman', 'Klar', 'Lembke', 'Nordyke', 'Primeau', 'Wachs', 'Wissinger', 'Doonan', 'Mikulski', 'Murthy', 'Raju', 'Thrailkill', 'Splawn', 'Lockamy', 'Brassell', 'Mcshan', 'Hawbaker', 'Kracht', 'Lahman', 'Lauritsen', 'Metzner', 'Presser', 'Rapoport', 'Romani', 'Wolken', 'Bertone', 'Bhat', 'Lenzi', 'Lefort', 'Makar', 'Melnyk', 'Siguenza', 'Ristow', 'Piller', 'Mcgaugh', 'Lampton', 'Delva', 'Gethers', 'Leday', 'Bateson', 'Beckstrom', 'Bedsole', 'Hauber', 'Hodgkinson', 'Croghan', 'Glanz', 'Gaver', 'Pinkley', 'Traynham', 'Heffley', 'Indelicato', 'Lindblad', 'Petrik', 'Ptacek', 'Capen', 'Carrara', 'Ortuno', 'Lobue', 'Corella', 'Lybrand', 'Myler', 'Steer', 'Mckamey', 'Coman', 'Auker', 'Escue', 'Knell', 'Mahood', 'Tillinghast', 'Deremer', 'Janak', 'Naegele', 'Patnaude', 'Leahey', 'Pupo', 'Bouse', 'Bradstreet', 'Symes', 'Callies', 'Duncanson', 'Blanche', 'Span', 'Shakir', 'Finneran', 'Lenker', 'Mendola', 'Navin', 'Palka', 'Spanier', 'Stahler', 'Vannatter', 'Botta', 'Gonser', 'Edelson', 'Brashier', 'Golla', 'Parramore', 'Bigby', 'El', 'Habeck', 'Kleinhans', 'Knobel', 'Pekar', 'Remmers', 'Dea', 'Foo', 'Plumer', 'Combest', 'Godbee', 'Hilaire', 'Lepak', 'Sgro', 'Vierling', 'Harm', 'Holtsclaw', 'Gaetano', 'Kindler', 'Sabbagh', 'Politte', 'Amor', 'Tilly', 'Trone', 'Callaham', 'Roussell', 'Asplund', 'Cacciatore', 'Dries', 'Friedl', 'Hartranft', 'Kimmell', 'Lengacher', 'Scardino', 'Werley', 'Zappa', 'Hust', 'Seiden', 'Bultman', 'Withey', 'Brandow', 'Oler', 'Ladouceur', 'Celli', 'Condie', 'Egge', 'Kleman', 'Krafft', 'Margulies', 'Weier', 'Mikels', 'Pavel', 'Sigel', 'Foulke', 'Kluttz', 'Mcgown', 'Acero', 'Gering', 'Knauff', 'Ruesch', 'Rydberg', 'Shonk', 'Weisgerber', 'Wieber', 'Zinser', 'Lilienthal', 'Crosbie', 'Luckie', 'Chenier', 'Aceto', 'Atnip', 'Hisey', 'Imhof', 'Klocke', 'Renderos', 'Schaad', 'Shoults', 'Slevin', 'Tenenbaum', 'Vrana', 'Dicesare', 'Colarusso', 'Killgore', 'Courtois', 'Tysinger', 'Agard', 'Brutus', 'Woodfork', 'Boeckman', 'Breitenstein', 'Downen', 'Franzese', 'Garbe', 'Iannucci', 'Kist', 'Mccolgan', 'Seib', 'Sereno', 'Varma', 'Fought', 'Barcomb', 'Happ', 'Yeaton', 'Sharples', 'Huson', 'Askin', 'Elliston', 'Birks', 'Allums', 'Richarson', 'Arterburn', 'Auyeung', 'Engman', 'Segall', 'Sjoberg', 'Sturman', 'Buys', 'Basford', 'Gaut', 'Hollomon', 'Antal', 'Groseclose', 'Motyka', 'Reddell', 'Ansel', 'Fausett', 'Girgis', 'Brownson', 'Pouncy', 'Behler', 'Ciesla', 'Dewall', 'Helmers', 'Pizzuto', 'Sao', 'Hourigan', 'Novelli', 'Kuta', 'Gau', 'Verville', 'Parkison', 'Souter', 'Whitelaw', 'Vercher', 'Coger', 'Issac', 'Cardamone', 'Heneghan', 'Herrero', 'Plancarte', 'Reach', 'Sarinana', 'Zweig', 'Berkheimer', 'Brosseau', 'Angstadt', 'Popoca', 'Brode', 'Presswood', 'Hannibal', 'Pigford', 'Argento', 'Dieringer', 'Kinnett', 'Maclachlan', 'Perko', 'Rosenkranz', 'Kobus', 'Merk', 'Prevatte', 'Kaya', 'Didio', 'Thong', 'Cowin', 'Tumlin', 'Lopp', 'Callier', 'Sesay', 'Beerman', 'Creger', 'Eyster', 'Libbey', 'Minear', 'Pontious', 'Stemen', 'Strahl', 'Trillo', 'Dively', 'Lackner', 'Welte', 'Likes', 'Mazzoni', 'Resh', 'Oser', 'Dilday', 'Requena', 'Bail', 'Ellen', 'Buchanon', 'Almeda', 'Dimino', 'Griess', 'Wetzler', 'Kriegel', 'Attanasio', 'Reighard', 'Alling', 'Wiginton', 'Penfield', 'Barbe', 'Alred', 'Ridout', 'Lucien', 'Cerullo', 'Esterline', 'Garriott', 'Hendershott', 'Kaczmarczyk', 'Pazos', 'Racicot', 'Kowaleski', 'Lippold', 'Bankert', 'Emigh', 'Cupps', 'Jagger', 'Leavens', 'Lies', 'Ater', 'Bleau', 'Pellot', 'Crosslin', 'Faulks', 'Antwine', 'Calixte', 'Brod', 'Hamad', 'Junkin', 'Koeppel', 'Leifer', 'Vannest', 'Olcott', 'Delange', 'Hillen', 'Merlin', 'Gundy', 'Hogans', 'Arseneau', 'Buzard', 'Ewalt', 'Persing', 'Pursel', 'Rohrs', 'Sisemore', 'Vilchez', 'Bernath', 'Rosenbalm', 'Woolverton', 'Gibbins', 'Like', 'Larsson', 'Savidge', 'Strohmeyer', 'Trentham', 'Wotring', 'Boster', 'Sewall', 'Glore', 'Burtis', 'Marchman', 'Fouche', 'Okafor', 'Khatri', 'Lengel', 'Pribyl', 'Rodewald', 'Cafaro', 'Mattix', 'Shingler', 'Seawell', 'Square', 'Belnap', 'Heidemann', 'Kretz', 'Nebeker', 'Zemke', 'Reiners', 'Cassels', 'Hout', 'Favor', 'Rattray', 'Custard', 'Bellucci', 'Bucklew', 'Casavant', 'Davanzo', 'Kleber', 'Koeppen', 'Kulpa', 'Ledonne', 'Scarano', 'Schaar', 'Staiger', 'Trigueros', 'Trobaugh', 'Tufano', 'Tschetter', 'Labra', 'Beverage', 'Hulet', 'Stairs', 'Waggener', 'Candy', 'Kaba', 'Feiner', 'Ipock', 'Nelligan', 'Pottorff', 'Beno', 'Beausoleil', 'Mayen', 'Kalil', 'Deller', 'Cormack', 'Hayne', 'Below', 'Bundick', 'Avakian', 'Desmet', 'Dobler', 'Dykeman', 'Eckstrom', 'Mahle', 'Meers', 'Bortner', 'Kroon', 'Lindenmuth', 'Mcnichol', 'Sechrest', 'Abdulla', 'Gaudin', 'Lamers', 'Luffman', 'Marchione', 'Paredez', 'Polster', 'Maresh', 'Kristoff', 'Rickel', 'Frary', 'Lorance', 'Round', 'Toye', 'Claybrook', 'Senegal', 'Gayhart', 'Mcmackin', 'Sagan', 'Sarkar', 'Whistler', 'Stutsman', 'Alderfer', 'Spainhour', 'Karol', 'Ke', 'Mifflin', 'Salah', 'Alberty', 'Hynson', 'Beisel', 'Castelo', 'Dau', 'Diliberto', 'Dollins', 'Fiorini', 'Fritzler', 'Hanan', 'Hauschild', 'Overholser', 'Wrobleski', 'Peil', 'Bellon', 'Buice', 'Rolls', 'Shack', 'Arakelian', 'Carpino', 'Liou', 'Lydick', 'Supple', 'Tammaro', 'Walbridge', 'Jandreau', 'Riter', 'Roeser', 'Merson', 'Bole', 'Franey', 'Berrett', 'Carton', 'Mcnish', 'Earnhart', 'Lehrman', 'Lipski', 'Mandelbaum', 'Tanabe', 'Mirabile', 'Ocegueda', 'Clementi', 'Shake', 'Buckle', 'Rowsey', 'Eifert', 'Giesen', 'Standiford', 'Vallecillo', 'Walworth', 'Berkshire', 'Feit', 'Lande', 'Fiddler', 'Deputy', 'Feemster', 'Evelyn', 'Bocchino', 'Cozza', 'Dirocco', 'Kock', 'Luisi', 'Marcantonio', 'Presti', 'Rahimi', 'Ridinger', 'Sergi', 'Viana', 'Kabat', 'Suriel', 'Mester', 'Bozman', 'Huffines', 'Linck', 'Lodato', 'Ownbey', 'Pietz', 'Rudnicki', 'Schoener', 'Schrag', 'Spicher', 'Sze', 'Villella', 'Steinle', 'Seaberg', 'Derks', 'Mavis', 'Luellen', 'Garlington', 'Nimmons', 'Brevard', 'Seabrooks', 'Ahlquist', 'Golembiewski', 'Kochis', 'Popov', 'Poulter', 'Redington', 'Wingrove', 'Krepps', 'Viars', 'Gallatin', 'Gilham', 'Jimison', 'Glosson', 'Campeau', 'Goodhart', 'Koth', 'Lettieri', 'Siragusa', 'Sojka', 'Tichy', 'Viar', 'Carrozza', 'Chaffins', 'Eagleson', 'Prestwood', 'Deshazer', 'Ike', 'Kubacki', 'Minogue', 'Sunseri', 'Turnbaugh', 'Heminger', 'Delira', 'Jani', 'Platte', 'Waterson', 'Keeble', 'Kiper', 'Crigler', 'Swaby', 'Brisbin', 'Galiano', 'Negley', 'Regal', 'Stottlemyer', 'Volkmann', 'Herrold', 'Cypert', 'Markman', 'Laman', 'Williard', 'Terrio', 'Raulston', 'Harrow', 'Humiston', 'Kantner', 'Mcmonagle', 'Polasek', 'Ruocco', 'Schelling', 'Seip', 'Woller', 'Despres', 'Melius', 'Keiffer', 'Voges', 'Figg', 'Judice', 'Henery', 'Dejarnette', 'Prosper', 'Duenez', 'Frenette', 'Jaimez', 'Krist', 'Kuch', 'Schlachter', 'Traeger', 'Mrozinski', 'Colberg', 'Lade', 'Been', 'Revere', 'Greely', 'Belizaire', 'Amberg', 'Cerniglia', 'Lattanzio', 'Leitz', 'Ocker', 'Ratto', 'Thornburgh', 'Yule', 'Hibner', 'Puerto', 'Shoultz', 'Baley', 'Linley', 'Alfrey', 'Bazaldua', 'Deniz', 'Lohnes', 'Marder', 'Pelland', 'Urick', 'Loberg', 'Rempel', 'Faux', 'Tomkins', 'Gail', 'Mccardell', 'Reuben', 'Brabant', 'Hutzler', 'Liedtke', 'Nowack', 'Pittsley', 'Pelc', 'Darragh', 'Pae', 'Blanke', 'Brinks', 'Delap', 'Brea', 'Milsap', 'Borneman', 'Crofts', 'Nakai', 'Silguero', 'Speciale', 'Martindelcampo', 'Vandenburg', 'Wimsatt', 'Harbor', 'Mccorvey', 'Bensinger', 'Carhart', 'Condo', 'Lemen', 'Malchow', 'Vandewater', 'Ventresca', 'Morena', 'Mendell', 'Faustino', 'Kleiber', 'Alberson', 'Lamonte', 'Kiner', 'Belgrave', 'Blitz', 'Dildine', 'Gosch', 'Grabill', 'Klemp', 'Larrea', 'Pallas', 'Leonhard', 'Littler', 'Dilling', 'Weatherbee', 'Robnett', 'Lacount', 'Brackins', 'Counterman', 'Divincenzo', 'Dobrowolski', 'Eppard', 'Estepp', 'Gahan', 'Steininger', 'Tancredi', 'Wixom', 'Combes', 'Dena', 'Warn', 'Teems', 'Askey', 'Delmar', 'Ogles', 'Herriott', 'Aguinaldo', 'In', 'Kinter', 'Moul', 'Santaniello', 'Tringali', 'Vanasse', 'Vanwagoner', 'Whitesel', 'Vanderwal', 'Friedmann', 'Kalis', 'Cayer', 'Para', 'Wander', 'Cothron', 'Betters', 'Cloward', 'Cusano', 'Encinias', 'Imai', 'Lalone', 'Saks', 'Nosal', 'Crossan', 'Caverly', 'Tewell', 'Lowney', 'Merle', 'Meighan', 'Labat', 'Pou', 'Linsey', 'Gaviria', 'Manthei', 'Marquina', 'Siegert', 'Blondin', 'Maskell', 'Kimpel', 'Cappel', 'Tootle', 'Folkes', 'Mainor', 'Offord', 'Clagg', 'Minshew', 'Niebuhr', 'Schanz', 'Stotz', 'Takeda', 'Huelsman', 'Madril', 'Monico', 'Stradley', 'Thein', 'Cannell', 'Malson', 'Ludden', 'Couts', 'Mishoe', 'Dales', 'Slemp', 'Stueve', 'Ziemann', 'Fluke', 'Vitali', 'Monn', 'Dooling', 'Lambe', 'Cail', 'Louder', 'Lotts', 'Augusta', 'Ando', 'Depaolo', 'Egolf', 'Hibdon', 'Marzan', 'Mccawley', 'Mcgivern', 'Minjares', 'Mullally', 'Portner', 'Vinciguerra', 'Wolpert', 'Yingst', 'Checo', 'Starck', 'Ra', 'Credle', 'Baldauf', 'Bamberger', 'Besch', 'Caulkins', 'Huyck', 'Portela', 'Walberg', 'Kutcher', 'Hunger', 'Trant', 'Cumbee', 'Cheadle', 'Drewery', 'Andrada', 'Dollinger', 'Dondero', 'Salvati', 'Sefton', 'Siemers', 'Sitz', 'Smale', 'Wenk', 'Reschke', 'Puglia', 'Koob', 'Overland', 'Furrer', 'Gohl', 'Hegge', 'Hentschel', 'Huberty', 'Krise', 'Stasiak', 'Tripoli', 'Palomera', 'Norling', 'Smucker', 'Hennes', 'Metro', 'Himmel', 'Paolino', 'Prato', 'Wommack', 'Mcpheeters', 'Ronald', 'Eppinger', 'Cantey', 'Appell', 'Capellan', 'Fielden', 'Garfias', 'Heit', 'Janusz', 'Pagliaro', 'Pitz', 'Winegardner', 'Gregorich', 'Schlager', 'Selvidge', 'Shultis', 'Severn', 'Buffum', 'Crafts', 'Antony', 'Timpson', 'Deveaux', 'Maese', 'Merlos', 'Mojarro', 'Policastro', 'Tawil', 'Flamm', 'Aasen', 'Lipkin', 'Dyches', 'Caulk', 'Rampersad', 'Pettie', 'Hagwood', 'Jedlicka', 'Paoli', 'Perkey', 'Shaub', 'Vires', 'Glad', 'Mandrell', 'Angeli', 'Antuna', 'Bessler', 'Cebula', 'Heagy', 'Mankowski', 'Sitler', 'Vanleuven', 'Blanck', 'Dannenberg', 'Moren', 'Hites', 'Leckie', 'Tham', 'Dower', 'Beans', 'Alls', 'Sipp', 'Dygert', 'Kubicek', 'Matsumura', 'Shiroma', 'Smiddy', 'Szilagyi', 'Winkleman', 'Zentz', 'Niehoff', 'Boedeker', 'Dimmitt', 'Trew', 'Wilner', 'Traughber', 'Bardales', 'Borbon', 'Bramhall', 'Crofoot', 'Desilets', 'Disch', 'Kehrer', 'Leffingwell', 'Olalde', 'Wawrzyniak', 'Jagodzinski', 'Schwerin', 'Heiney', 'Hirano', 'Rueter', 'Sarris', 'Magnan', 'Rigsbee', 'Blay', 'Edgeworth', 'Hafford', 'Legrande', 'Netter', 'Dulac', 'Etherington', 'Gaede', 'Matranga', 'Misch', 'Gryder', 'Kolman', 'Reyer', 'Landsman', 'Huppert', 'Steagall', 'Heims', 'Baldini', 'Breithaupt', 'Claypoole', 'Feuer', 'Heishman', 'Pallotta', 'Sponaugle', 'Pershing', 'Spaid', 'Salt', 'Giger', 'Whetsel', 'Balaban', 'Baus', 'Croke', 'Heimer', 'Milnes', 'Onstott', 'Wagman', 'Magro', 'Havlik', 'Menge', 'Talmage', 'Aungst', 'Dichiara', 'Kuhr', 'Milstein', 'Sinatra', 'Speiser', 'Vise', 'Panther', 'Phair', 'Commons', 'Mincy', 'Ashline', 'Eagen', 'Enns', 'Epler', 'Giltner', 'Rexroat', 'Schein', 'Wellner', 'Wickert', 'Ardito', 'Ihrig', 'Schuerman', 'Wentland', 'Wohlford', 'Stoy', 'Kohan', 'Ratley', 'Hazell', 'Coppin', 'Blackshire', 'Coolbaugh', 'Essman', 'Gandee', 'Moccia', 'Mullarkey', 'Sugrue', 'Woomer', 'Arriaza', 'Pipitone', 'Heart', 'Prothro', 'Connaughton', 'Covelli', 'Lunger', 'Mcilroy', 'Morataya', 'Swedberg', 'Trembley', 'Wiederhold', 'Zappia', 'Perret', 'Glander', 'Snedden', 'Stonestreet', 'Archey', 'Arbour', 'Cordaro', 'Diskin', 'Dumlao', 'Fravel', 'Spagnuolo', 'Derossett', 'Grigorian', 'Mercadante', 'Harcourt', 'Norgaard', 'Terhaar', 'Touch', 'Mccubbins', 'Tadros', 'Zabriskie', 'Fontanilla', 'Ruse', 'Springsteen', 'Getter', 'Berrian', 'Louissaint', 'Cobbins', 'Dorney', 'Kugel', 'Luth', 'Poffenberger', 'Sidoti', 'Steinfeld', 'Poley', 'Dreger', 'Ertl', 'Capper', 'Laswell', 'Spragg', 'Coltrane', 'Winborne', 'Langhorne', 'Fambro', 'Berkebile', 'Bosserman', 'Cygan', 'Debonis', 'Munsch', 'Pflug', 'Skowron', 'Ediger', 'Bosler', 'Morden', 'Virtue', 'Orso', 'Claire', 'Damas', 'Eichenlaub', 'Gatchell', 'Mikus', 'Tjaden', 'Tremper', 'Tusing', 'Longest', 'Baires', 'Dobos', 'Deforge', 'Kawa', 'Hodder', 'Thornell', 'Mcgarrity', 'Gotcher', 'Judah', 'Busey', 'Perrier', 'Hawthorn', 'Captain', 'Costlow', 'Frohlich', 'Gulla', 'Hildebrant', 'Hilgendorf', 'Ramachandran', 'Reaume', 'Vollrath', 'Lambertson', 'Wyer', 'Coit', 'Dietsch', 'Struve', 'Vicario', 'Ahlberg', 'Warshaw', 'Ryon', 'Evatt', 'Mobbs', 'Gartin', 'Kenley', 'Marcell', 'Bumpers', 'Jans', 'Karczewski', 'Mazurkiewicz', 'Nadolny', 'Verrill', 'Sitter', 'Freyer', 'Hindle', 'Hergert', 'Inda', 'Magwood', 'Basa', 'Covello', 'Pacini', 'Ruoff', 'Schenker', 'Zwicker', 'Popovic', 'Augustyn', 'Sutera', 'Almy', 'Keisler', 'Vowels', 'Lemond', 'Abila', 'Beardslee', 'Benvenuto', 'Deschaine', 'Hodel', 'Turbyfill', 'Vejar', 'Iddings', 'Labrada', 'Bowne', 'Seel', 'Stretch', 'Haswell', 'Rickerson', 'Speas', 'Southward', 'Tony', 'Burrier', 'Casco', 'Lorch', 'Pietrowski', 'Rabbitt', 'Sefcik', 'Trenary', 'Trisler', 'Zarazua', 'Kube', 'Riera', 'Stmarie', 'Starns', 'Carmel', 'Shire', 'Britto', 'Lacks', 'Cifelli', 'Dusenberry', 'Lusher', 'Mattioli', 'Quiring', 'Regner', 'Shetty', 'Stober', 'Winemiller', 'Zinke', 'Heffington', 'Santelli', 'Figeroa', 'Dishon', 'Doble', 'Canino', 'Tahir', 'Stamant', 'Sharpton', 'Sancho', 'Linzy', 'Ba', 'Bonebrake', 'Frenkel', 'Irion', 'Marines', 'Lacava', 'Drennon', 'Fallen', 'Whiten', 'Bielawski', 'Brasch', 'Eichorn', 'Gattuso', 'Neis', 'Tkach', 'Usrey', 'Walkowiak', 'Dorame', 'Orem', 'Crombie', 'Lowes', 'Truscott', 'Marlette', 'Bushell', 'Gosa', 'Hillary', 'Byfield', 'Engdahl', 'Ganser', 'Hollars', 'Lambros', 'Matzen', 'Moldovan', 'Najarian', 'Schoff', 'Soo', 'Spargo', 'Wierenga', 'Maysonet', 'Dewan', 'Bardo', 'Figgs', 'Bostian', 'Graser', 'Pecor', 'Rodrigo', 'Spilker', 'Suen', 'Nafziger', 'Khouri', 'Milling', 'Benke', 'Chapdelaine', 'Darwish', 'Merrigan', 'Narayanan', 'Neuner', 'Wallman', 'Caracciolo', 'Uren', 'Borge', 'Garside', 'Veasley', 'Arquette', 'Gastineau', 'Helbling', 'Maggiore', 'Prell', 'Vangelder', 'Giaquinto', 'Macha', 'Jonsson', 'Febus', 'Lady', 'Hughson', 'Wickliffe', 'Archila', 'Bearce', 'Harstad', 'Krein', 'Kulesza', 'Levitan', 'Nakasone', 'Saraceno', 'Stankus', 'Shelden', 'Hopping', 'Diab', 'Agar', 'Mcpike', 'Betterton', 'Buzbee', 'Dieguez', 'Lins', 'Phuong', 'Pinegar', 'Postel', 'Beatrice', 'Biddy', 'Over', 'Riding', 'Rials', 'Rance', 'Simington', 'Degraffenreid', 'Sherard', 'Clum', 'Harkin', 'Mallen', 'Messerschmidt', 'Patz', 'Shatzer', 'Stetz', 'Beckert', 'Worm', 'Belmontes', 'Narron', 'Lyne', 'Mckendrick', 'Rester', 'Archbold', 'Whorley', 'Monts', 'Crapo', 'Gribbin', 'Lamborn', 'Leverenz', 'Mccarville', 'Nishida', 'Ryberg', 'Smeal', 'Piontek', 'Routhier', 'Willmon', 'Proffit', 'Sharrock', 'Gasque', 'Minott', 'Corpening', 'Capizzi', 'Dubuc', 'Gurevich', 'Hohenstein', 'Kotch', 'Peper', 'Rehbein', 'Stortz', 'Corvin', 'Savant', 'Ryle', 'Madere', 'Firmin', 'Bitterman', 'Bruso', 'Guzzi', 'Hefty', 'Almada', 'Mcninch', 'Mangin', 'On', 'Hardage', 'Garson', 'Hisle', 'Dease', 'Critelli', 'Digennaro', 'Ehle', 'Freestone', 'Grieb', 'Haubert', 'Kelsay', 'Loughman', 'Neth', 'Pen', 'Ranta', 'Sater', 'Tomei', 'Castiglia', 'Kosek', 'Zentner', 'Nowland', 'Klinedinst', 'Karls', 'Charon', 'Cart', 'Umphrey', 'Laramore', 'Mckenny', 'Hamler', 'Stoudemire', 'Diercks', 'Hodzic', 'Huntzinger', 'Runde', 'Scavone', 'Halbach', 'Banales', 'Thiry', 'Waterfield', 'Bebee', 'Dass', 'Caughman', 'Admire', 'Attebery', 'Faubion', 'Friess', 'Goldsworthy', 'Raburn', 'Vantine', 'Newswanger', 'Manhart', 'Grecco', 'Meany', 'Rumpf', 'Dunlevy', 'Franceschi', 'Romanski', 'Alwine', 'Cahall', 'Czaja', 'Krawiec', 'Mikolajczyk', 'Neyman', 'Perrotti', 'Weideman', 'Coppa', 'Ingerson', 'Avena', 'Crunk', 'Cadenhead', 'Gittings', 'Gloss', 'Trowell', 'Denard', 'Funchess', 'Kinnamon', 'Mailhot', 'Mollohan', 'Polacek', 'Pozos', 'Rempe', 'Schutter', 'Shimkus', 'Bedrosian', 'Beede', 'Conry', 'Legan', 'Pickford', 'Chamblin', 'Depinto', 'Geibel', 'Gilpatrick', 'Hashmi', 'Hermsen', 'Petruzzi', 'Robben', 'Sorkin', 'Gambardella', 'Podgorski', 'Langenfeld', 'Yanke', 'Zipperer', 'Tillson', 'Ariola', 'Kelman', 'Hert', 'Fearn', 'Goods', 'Cervenka', 'Kreft', 'Kreidler', 'Kuhar', 'Leffew', 'Maziarz', 'Vollmar', 'Zmuda', 'Eisenhower', 'Yelle', 'Bhagat', 'Kirst', 'Gilkerson', 'Kindel', 'Argyle', 'Bedingfield', 'Manney', 'Guion', 'Rencher', 'Plater', 'Beitzel', 'Camero', 'Delaluz', 'Fennelly', 'Keenum', 'Kingrey', 'Mckillop', 'Munyon', 'Rorick', 'Schrimsher', 'Sohl', 'Torbett', 'Lynde', 'Reiland', 'Shepley', 'Cudney', 'Cather', 'Abed', 'Holen', 'Jobson', 'Husbands', 'Marc', 'Blatz', 'Feucht', 'Gunkel', 'Margolin', 'Messerly', 'Womer', 'Teston', 'Ditch', 'Marta', 'Osier', 'Awan', 'Marcella', 'Silvester', 'Baugus', 'Wilcoxon', 'Nowling', 'Torain', 'Badalamenti', 'Bartosh', 'Czajka', 'Savedra', 'Shaker', 'Shambaugh', 'Stapley', 'Goeke', 'Schepers', 'Tyo', 'Rhodus', 'Arencibia', 'Kara', 'Aitchison', 'Parlin', 'Benny', 'Shakespeare', 'Altomare', 'Axe', 'Bednarczyk', 'Feasel', 'Heikkinen', 'Heyl', 'Konecny', 'Montalbo', 'Semones', 'Zuercher', 'Dorrance', 'Gehrig', 'Kretzer', 'Puchalski', 'Asche', 'Astacio', 'Steers', 'Jeanes', 'Bamberg', 'Matthis', 'Maultsby', 'Bunkley', 'Afonso', 'Danielsen', 'Freier', 'Graeff', 'Gutknecht', 'Jansky', 'Lindenberg', 'Macphee', 'Pequeno', 'Petrocelli', 'Petrowski', 'Prete', 'Igoe', 'Demonte', 'Khatib', 'Agin', 'Siddall', 'Mcdill', 'Higginbottom', 'Gallow', 'Inniss', 'Ballman', 'Bieniek', 'Casino', 'Garringer', 'Griese', 'Heritage', 'Zeitz', 'Montanaro', 'Qi', 'Belcastro', 'Brautigam', 'Wakeland', 'Keasler', 'Oglesbee', 'Saye', 'Steppe', 'Cichocki', 'Melgarejo', 'Primavera', 'Rippe', 'Sieger', 'Stutes', 'Tustin', 'Vanloon', 'Konkol', 'Altmann', 'Anderegg', 'Bun', 'Mcduffee', 'Deo', 'Persad', 'Kindell', 'Antillon', 'Ast', 'Kumm', 'Lauricella', 'Minkler', 'Pilch', 'Porreca', 'Shoopman', 'Skeels', 'Chanthavong', 'Hounshell', 'Pitner', 'Space', 'Blackley', 'Groomes', 'Bleeker', 'Duddy', 'Inlow', 'Knabe', 'Lehmkuhl', 'Salais', 'Statz', 'Sundin', 'Woolston', 'Hojnacki', 'Drolet', 'Gallivan', 'Viner', 'Hafley', 'Hollan', 'Phillis', 'Montrose', 'Colclough', 'Coaxum', 'Basel', 'Campoverde', 'Cirelli', 'Delmonico', 'Goh', 'Goyal', 'Hungate', 'Lufkin', 'Passaro', 'Penta', 'Quispe', 'Ovalles', 'Bulkley', 'Show', 'Purington', 'Sockwell', 'Mccluney', 'Asato', 'Buchta', 'Cassara', 'Cesena', 'Empey', 'Fass', 'Gazda', 'Giannetti', 'Giuffre', 'Jahns', 'Jong', 'Ruh', 'Schmieder', 'Sheerin', 'Weinheimer', 'Iwamoto', 'Ouyang', 'Uranga', 'Ranalli', 'Woolum', 'Calabria', 'Arrowsmith', 'Cashen', 'Vogan', 'Giffen', 'Sherk', 'Denner', 'Lanclos', 'Whittlesey', 'Dora', 'Plain', 'Bransford', 'Bradwell', 'Davitt', 'Dehoff', 'Lotito', 'Roell', 'Satterly', 'Stahr', 'Thiem', 'Helberg', 'Vause', 'Willmore', 'Seid', 'Linebarger', 'Geddis', 'Bringhurst', 'Damelio', 'Fetterolf', 'Galban', 'Henkle', 'Kamen', 'Kaneko', 'Kissane', 'Rua', 'Tehrani', 'Tingey', 'Lizardi', 'Strick', 'Halper', 'Striker', 'Amason', 'Lesueur', 'Tatem', 'Bulluck', 'Hobdy', 'Flythe', 'Brookover', 'Fishbein', 'Hartless', 'Snelgrove', 'Weikert', 'Wissman', 'Bourbeau', 'Colclasure', 'Sampley', 'Shubin', 'Rhoda', 'Mcclane', 'Meals', 'Peets', 'Anding', 'Clewis', 'Gaymon', 'Bierly', 'Brockmeyer', 'Burnworth', 'Dierking', 'Patzer', 'Seipel', 'Shieh', 'Pazmino', 'Bailie', 'Ducey', 'Sessler', 'Hornaday', 'Andry', 'Mowatt', 'Charlot', 'Buchholtz', 'Gaulke', 'Gondek', 'Grossmann', 'Hammerschmidt', 'Heinle', 'Huckabay', 'Neathery', 'Vanzile', 'Vossler', 'Schillaci', 'Lem', 'Paff', 'Oja', 'Broker', 'Marlett', 'Innocent', 'Adsit', 'Begg', 'Kocian', 'Maddalena', 'Melamed', 'Mikos', 'Pio', 'Poth', 'Richwine', 'Ruda', 'Sackman', 'Querry', 'Padro', 'Sober', 'Ayscue', 'Puff', 'Hunton', 'Woltz', 'Alsobrook', 'Baskins', 'Daggs', 'Brands', 'Buechel', 'Gonda', 'Haberkorn', 'Hartel', 'Hazeltine', 'Lantrip', 'Leoni', 'Licona', 'Stanke', 'Zwart', 'Aplin', 'Leatham', 'Ace', 'Ganter', 'Bartolomeo', 'Colgrove', 'Halling', 'Hesler', 'Hainline', 'Susi', 'Kroner', 'Sanden', 'Rylander', 'Basaldua', 'Fujiwara', 'Hengst', 'Kapur', 'Kienzle', 'Miao', 'Mutschler', 'Orsi', 'Pais', 'Termini', 'Yamane', 'Zipp', 'Wildey', 'Bauerle', 'Rehn', 'Hipsher', 'Staubin', 'Esquilin', 'Goley', 'Buenaventura', 'Frutos', 'Gaugler', 'Maclellan', 'Mehring', 'Stiers', 'Gearheart', 'Bong', 'Maddocks', 'Canary', 'Urie', 'Skillings', 'Amir', 'Bogus', 'Oakman', 'Barresi', 'Cappelli', 'Clausing', 'Genest', 'Grella', 'Mulherin', 'Roettger', 'Corle', 'Mantel', 'Mody', 'Delapp', 'Dunnington', 'Harvard', 'Berquist', 'Foglia', 'Gilbride', 'Krenek', 'Gagnier', 'Berney', 'Bazzell', 'Selvage', 'Gullette', 'Lavan', 'Gunderman', 'Holaday', 'Horine', 'Salata', 'Slaybaugh', 'Tobia', 'Knick', 'Tinkle', 'Calcaterra', 'Fauth', 'Helmke', 'Margiotta', 'Mejorado', 'Salomone', 'Sevy', 'Suri', 'Vasconcellos', 'Vetrano', 'Flaten', 'Sweetser', 'Logston', 'Varon', 'Allsop', 'Mickler', 'Swails', 'Conejo', 'Derosia', 'Hamre', 'Hanvey', 'Holscher', 'Interiano', 'Kleinberg', 'Kravetz', 'Reinking', 'Schow', 'Schur', 'Vanbrocklin', 'Yinger', 'Zelenka', 'Chagoya', 'Sieben', 'Devora', 'Archambeau', 'Burpee', 'Shamp', 'Stander', 'Weaks', 'Viney', 'Halloway', 'Artiga', 'Clinkenbeard', 'Kenison', 'Loeza', 'Schaap', 'Simoni', 'Frock', 'Galea', 'Graven', 'Brookhart', 'Gurr', 'Mackintosh', 'Arjona', 'Busche', 'Salvi', 'Bedenbaugh', 'Duan', 'Clara', 'Brundidge', 'Akhter', 'Amsler', 'Bolz', 'Bonura', 'Brumbelow', 'Droste', 'Lohmeyer', 'Lorah', 'Louthan', 'Botti', 'Feigenbaum', 'Thon', 'Osbourn', 'Peugh', 'Viau', 'Elsayed', 'Hilyard', 'Coram', 'Alvin', 'Milbourne', 'Hickmon', 'Basu', 'Fasnacht', 'Heathcock', 'Matsui', 'Oyama', 'Stransky', 'Blakesley', 'Antes', 'Flury', 'Lacrosse', 'Lull', 'Clelland', 'Rugh', 'Hamelin', 'Reta', 'Barnet', 'Ballow', 'Pyburn', 'Slayden', 'Freshwater', 'Fomby', 'Bourquin', 'Bowersock', 'Calleros', 'Dallmann', 'Gootee', 'Koelling', 'Parfitt', 'Pruss', 'Tretter', 'Bellini', 'Gulden', 'Pett', 'Mcglasson', 'Yerby', 'Buth', 'Curnow', 'Goller', 'Halderman', 'Kulig', 'Laue', 'Roesner', 'Samra', 'Sorrow', 'Vanbibber', 'Mellin', 'Villacis', 'Hilborn', 'Ditty', 'Vasey', 'Crall', 'Sera', 'Honeywell', 'Blanchet', 'Halim', 'Nevius', 'Ines', 'Stuard', 'Birr', 'Curnutt', 'Deibler', 'Jaster', 'Ouk', 'Poppell', 'Provence', 'Rebman', 'Schmick', 'Terra', 'Zea', 'Hoven', 'Loth', 'Arreaga', 'Cambre', 'Roots', 'Gains', 'Jeancharles', 'Cerritos', 'Ihle', 'Zambito', 'Brueggemann', 'Kluth', 'Schwartzkopf', 'Shott', 'Mcglaughlin', 'Decoster', 'Northam', 'Esau', 'Fling', 'Castile', 'Milledge', 'Desjarlais', 'Laframboise', 'Remigio', 'Rudloff', 'Utecht', 'Enrique', 'Wygant', 'Fairbank', 'Behl', 'Meuse', 'Pyke', 'Fury', 'Chowning', 'Hyndman', 'Donat', 'Nuckles', 'Cartledge', 'Bilal', 'Antonacci', 'Huether', 'Kha', 'Mascia', 'Rothberg', 'Sieck', 'Younes', 'Sassaman', 'Amparan', 'Benesh', 'Faraci', 'Gaber', 'Lehew', 'Belzer', 'Segoviano', 'Teagle', 'Burian', 'Menne', 'Niemeier', 'Old', 'Olenick', 'Takemoto', 'Tepe', 'Test', 'Zahler', 'Matsuoka', 'Hopf', 'Misenheimer', 'Mings', 'Hullett', 'Beutel', 'Criscuolo', 'Fedak', 'Holtkamp', 'Kretschmer', 'Mongillo', 'Mulrooney', 'Panganiban', 'Pollick', 'Sgroi', 'Shirkey', 'Stodola', 'Tozier', 'Weidler', 'Puskar', 'Fiorello', 'Stille', 'Pomales', 'Gladding', 'Griffie', 'Warmack', 'Uzzell', 'Stennis', 'Buttrey', 'Ekberg', 'Harmsen', 'Lieske', 'Madriz', 'Mohs', 'Reininger', 'Edgin', 'Galla', 'Chattin', 'Frayer', 'Brents', 'Lasker', 'Angelone', 'Boulter', 'Burritt', 'Choudhry', 'Claffey', 'Elizarraras', 'Gaumer', 'Gawronski', 'Henwood', 'Lapine', 'Bitar', 'Himel', 'Almand', 'Brase', 'Lala', 'Salama', 'Essick', 'Longman', 'Mone', 'Reynard', 'Brackney', 'Cottam', 'Donadio', 'Geesey', 'Laudenslager', 'Mcgilvray', 'Yano', 'Bueche', 'Irey', 'Carneal', 'Tinder', 'Walke', 'Baston', 'Segar', 'Brisbane', 'Venson', 'Arguijo', 'Beitler', 'Burek', 'Burgener', 'Collyer', 'Donlin', 'Duhaime', 'Dworak', 'Frech', 'Kozik', 'Montejo', 'Nhan', 'Quirarte', 'Tram', 'Deshpande', 'Silverthorn', 'Leard', 'Sheller', 'Alphin', 'Boxer', 'Shawn', 'Pinnick', 'Stigler', 'Arpin', 'Falkenberg', 'Gerig', 'Lemonds', 'Salm', 'Sarkis', 'Paprocki', 'Probasco', 'Haithcock', 'Carn', 'Farrish', 'Haliburton', 'Copen', 'Pieri', 'Slaymaker', 'Cardarelli', 'Veneziano', 'Melfi', 'Solley', 'Hymer', 'Pleitez', 'Hinsley', 'Bruen', 'Arita', 'Dreisbach', 'Fichtner', 'Keckler', 'Slaby', 'Tanguma', 'Wiberg', 'Ferrucci', 'Lick', 'Maginnis', 'Quaranta', 'Bera', 'Maybee', 'Hennessee', 'Kerrick', 'Kabir', 'Branscome', 'Fullington', 'Menser', 'Brooking', 'Patridge', 'Gue', 'Gowens', 'Redus', 'Ector', 'Distasio', 'Kissner', 'Prada', 'Sponsler', 'Tempel', 'Wedemeyer', 'Degler', 'Bodenhamer', 'Sherbert', 'Jefferis', 'Belgarde', 'Bevel', 'Figaro', 'Bertino', 'Fabbri', 'Kovacevic', 'Kunst', 'Leja', 'Ruffo', 'Stearman', 'Trickett', 'Zafar', 'Valdivieso', 'Curbelo', 'Mabee', 'Emma', 'Arman', 'Swasey', 'Lyday', 'Muff', 'Rideaux', 'Ahlgren', 'Cobo', 'Hanratty', 'Litwiller', 'Mallonee', 'Glunt', 'Moudy', 'Hickam', 'Mahmud', 'Fate', 'Hemsley', 'Biery', 'Buechner', 'Fragale', 'Hornbaker', 'Lacorte', 'Mateos', 'Mickley', 'Reusch', 'Sabado', 'Schnurr', 'Gasior', 'Konkle', 'Okazaki', 'Doubleday', 'Couvillion', 'Lupien', 'Oder', 'Ohair', 'Win', 'Quaintance', 'Diltz', 'Poythress', 'Percell', 'Weatherall', 'Ainslie', 'Brandner', 'Byrge', 'Cawood', 'Heatwole', 'Kerschner', 'Looker', 'Racz', 'Skirvin', 'Steitz', 'Svenson', 'Vermette', 'Zupancic', 'Monnier', 'Scafidi', 'Trousdale', 'Bares', 'Costantini', 'Frees', 'Kallio', 'Methvin', 'Prudencio', 'Hayse', 'Mahabir', 'Wafford', 'Borgmann', 'Cogley', 'Gigante', 'Kurkowski', 'Lavoy', 'Wertheimer', 'Wienke', 'Goodling', 'Danek', 'Brinley', 'Charlson', 'Whitsell', 'Lowen', 'Minnix', 'Lowers', 'Palin', 'Burgher', 'Lorick', 'Sobers', 'Gavigan', 'Italiano', 'Liebl', 'Prevette', 'Wehunt', 'Radin', 'Guillotte', 'Mode', 'Halfacre', 'Stjames', 'Isabelle', 'Meggs', 'Burkard', 'Giannotti', 'Justo', 'Kasprzyk', 'Kuba', 'Mino', 'Morganti', 'Schnelle', 'Serfass', 'Yacoub', 'Thode', 'Wykoff', 'Macbeth', 'Oxner', 'Mayhue', 'Saulter', 'Budnik', 'Gandarilla', 'Michalec', 'Eisel', 'Newmark', 'Placido', 'Bellar', 'Dollarhide', 'Huett', 'Copher', 'Lacaze', 'Dominic', 'Bibler', 'Boydstun', 'Faas', 'Grana', 'Guardino', 'Illig', 'Luebbert', 'Lyford', 'Mcgettigan', 'Repko', 'Widmann', 'Trevathan', 'Ewan', 'Mcray', 'Footman', 'Kerchner', 'Leggio', 'Bullinger', 'Rushford', 'Edel', 'Leandro', 'Burkman', 'Grattan', 'Tench', 'Dartez', 'Lemar', 'Fane', 'Zenon', 'Sabb', 'Blatchford', 'Chilcoat', 'Hahne', 'Hanssen', 'Mawhinney', 'Pflueger', 'Pol', 'Vitelli', 'Brierley', 'Zundel', 'Mcgillicuddy', 'Adriano', 'Mate', 'Wilkison', 'Ramnarine', 'Peaks', 'Bacote', 'Barretto', 'Benevento', 'Gubler', 'Koelsch', 'Naas', 'Patane', 'Schnitzler', 'Sprenkle', 'Ulbrich', 'Violante', 'Rench', 'Najarro', 'Kristensen', 'Poma', 'Sara', 'Jerrell', 'Sarratt', 'Mondy', 'Antill', 'Belleville', 'Dworkin', 'Holdaway', 'Lenderman', 'Murga', 'Reiling', 'Stasko', 'Topel', 'Verity', 'Vinas', 'Ziebarth', 'Vanguilder', 'Stoots', 'Yantis', 'Faries', 'Tulley', 'Baucum', 'Fugett', 'Harring', 'Semien', 'Dauphinais', 'Furukawa', 'Grilli', 'Ohanian', 'Ormiston', 'Osegueda', 'Wiegert', 'Zier', 'Chiesa', 'Radecki', 'Mongeon', 'Stake', 'Sweetland', 'Shearon', 'Lamore', 'Mccuiston', 'Minson', 'Burditt', 'Mcferren', 'Covin', 'Straker', 'Elzy', 'Althaus', 'Anzures', 'Glaeser', 'Huseby', 'Nitta', 'Ribaudo', 'Sobota', 'Spieker', 'Stefaniak', 'Valois', 'Vanwie', 'Venturini', 'Beltre', 'Ewer', 'Hartt', 'Keaney', 'Throne', 'Edrington', 'Inmon', 'Isabel', 'Brayman', 'Devilbiss', 'Krasner', 'Malak', 'Tito', 'Vermeer', 'Benigno', 'Bosque', 'Berridge', 'Clines', 'Brite', 'Mcbeath', 'Gleaves', 'Koenen', 'Kubicki', 'Kudla', 'Seiple', 'Warkentin', 'Choiniere', 'Nassif', 'Banko', 'Muncie', 'Garling', 'Causby', 'Mcgaw', 'Burkeen', 'Balan', 'Georgia', 'Hick', 'Tumblin', 'Badon', 'Warrior', 'Yearby', 'Hiestand', 'Hughart', 'Proffer', 'Sult', 'Yepes', 'Zachman', 'Beddow', 'Molyneux', 'Camejo', 'Stephany', 'Cadogan', 'Gosha', 'Goodwine', 'Harewood', 'Burnsed', 'Frappier', 'Minardi', 'Rieser', 'Tabbert', 'Marietta', 'Butch', 'Steil', 'Canal', 'Brundige', 'Comas', 'Hopkinson', 'Shomo', 'Kendle', 'Bowsher', 'Illingworth', 'Kampa', 'Manasco', 'Mcdorman', 'Theurer', 'Widger', 'Carbonneau', 'Stachura', 'Eriksson', 'Trostle', 'Foxworthy', 'Lex', 'Belman', 'Isola', 'Mckane', 'Gearing', 'Rimes', 'Couillard', 'Emanuele', 'Pho', 'Scimeca', 'Skaar', 'Vibbert', 'Bilby', 'Hink', 'Gohn', 'Nguy', 'Perrett', 'Bowland', 'Comes', 'Moffet', 'Pauline', 'Donalson', 'Tilman', 'Hansberry', 'Acedo', 'Camarda', 'Devivo', 'Eurich', 'Jojola', 'Railsback', 'Rumfelt', 'Stastny', 'Strittmatter', 'Houseknecht', 'Rynearson', 'Weinrich', 'Kinghorn', 'Astin', 'Aguillard', 'Hameed', 'Drone', 'Lonon', 'Burgio', 'Klimas', 'Riegler', 'Schiano', 'Slonaker', 'Deery', 'Weissinger', 'Cea', 'Grenz', 'Arent', 'Sopher', 'Jarratt', 'Mitchener', 'Conigliaro', 'Dohm', 'Feenstra', 'Meiers', 'Hetland', 'Kinsinger', 'Kmiec', 'Teich', 'Fukushima', 'Kerins', 'Cienfuegos', 'Orlandi', 'Bonser', 'Okun', 'Coate', 'Rittenberry', 'Mcclaine', 'Dunklin', 'Citizen', 'Danzy', 'Geers', 'Georgeson', 'Kikuchi', 'Macinnis', 'Malizia', 'Mukai', 'Plants', 'Ehmann', 'Haren', 'Lachney', 'Duchesne', 'Collinson', 'Connett', 'Hostler', 'Farnell', 'Osler', 'Triche', 'Ballweg', 'Bansal', 'Galo', 'Hollabaugh', 'Hultquist', 'Mcbrien', 'Pelz', 'Picciano', 'Tashjian', 'Thresher', 'Uphoff', 'Shawley', 'Tomasek', 'Aldaz', 'Harig', 'Kullman', 'Vaness', 'Isabella', 'Munley', 'Bissette', 'Thackston', 'Borgia', 'Camire', 'Charters', 'Feiler', 'Geisinger', 'Racca', 'Rasmusson', 'Stonesifer', 'Vidmar', 'Arciga', 'Bialek', 'Baruch', 'Kornfeld', 'Harmeyer', 'Picon', 'Suppa', 'Strate', 'Hyre', 'Verdon', 'Reily', 'Castell', 'Foard', 'Exner', 'Furnari', 'Guereca', 'Hallgren', 'Holsclaw', 'Ketelsen', 'Magnani', 'Mehling', 'Naser', 'Seder', 'Sparr', 'Strnad', 'Tatar', 'Crecelius', 'Knicely', 'Vantassell', 'Balsley', 'Babbs', 'Gowans', 'Mcclam', 'Batdorf', 'Belsky', 'Gull', 'Letizia', 'Ludlum', 'Mascari', 'Scheffel', 'Spurgin', 'Dignan', 'Steffensen', 'Freeberg', 'Honan', 'Hamric', 'Woolman', 'Valeri', 'Saab', 'Boyers', 'Pardon', 'Deasy', 'Forshey', 'Juntunen', 'Kamel', 'Macisaac', 'Marinaro', 'Milroy', 'Parillo', 'Rappold', 'Schippers', 'Smola', 'Staniszewski', 'Strasburg', 'Epple', 'Dewitte', 'Hubley', 'Queener', 'Stoddart', 'Briant', 'Mcclurkin', 'Binkowski', 'Eberts', 'Kilbane', 'Kiraly', 'Monsalve', 'Othman', 'Pasek', 'Rinke', 'Steinbrecher', 'Trees', 'Winther', 'Boal', 'Eber', 'Funez', 'Harryman', 'Boyter', 'Rill', 'Jolliffe', 'Dorian', 'Demore', 'Sebree', 'Jeff', 'Jolivette', 'Elko', 'Jividen', 'Lenzen', 'Marsee', 'Milbrandt', 'Orihuela', 'Osterhoudt', 'Parras', 'Schnepp', 'Tenaglia', 'Thoren', 'Diosdado', 'Pingree', 'Rutigliano', 'Filbert', 'Babel', 'Stollings', 'Hopes', 'Bynes', 'Brockmann', 'Carta', 'Deleeuw', 'Demo', 'Margeson', 'Mckitrick', 'Reyez', 'Sidor', 'Strehlow', 'Timlin', 'Wegrzyn', 'Burgdorf', 'Benzing', 'Bonneville', 'Clonts', 'Camps', 'Graydon', 'Pasha', 'Andreoli', 'Cockerill', 'Covino', 'Hajjar', 'Korpi', 'Pohlmann', 'Wente', 'Wickwire', 'Schaber', 'Vonderhaar', 'Manser', 'Fitton', 'Galindez', 'Ares', 'Longmore', 'Buchert', 'Delisi', 'Gaulin', 'Genco', 'Helgerson', 'Khawaja', 'Radosevich', 'Sannicolas', 'Sterk', 'Theberge', 'Voiles', 'Warchol', 'Potthoff', 'Runkel', 'Stachowski', 'Snay', 'Share', 'Conkey', 'Pontes', 'Mathies', 'Brittian', 'Allgeier', 'Daughenbaugh', 'Glock', 'Meisinger', 'Pantaleo', 'Saitta', 'Weick', 'Burak', 'Borda', 'Rim', 'Bunyard', 'Neaves', 'Mcilvaine', 'Zee', 'Buskey', 'Roseborough', 'Bellin', 'Fasulo', 'Grab', 'Jia', 'Knab', 'Skalski', 'Stensland', 'Zajicek', 'Echeverry', 'Kolenda', 'Cadden', 'Delawder', 'Propp', 'Scheeler', 'Clukey', 'Loven', 'Bogen', 'Whittingham', 'Barcelona', 'Braasch', 'Haubrich', 'Kolberg', 'Vendetti', 'Sheesley', 'Bartoli', 'Knierim', 'Amparo', 'Lauth', 'Rosero', 'Burry', 'Guynes', 'Cumbo', 'Pridgeon', 'Aarons', 'Alarid', 'Arakawa', 'Benzel', 'Bywater', 'Grosch', 'Heth', 'Logiudice', 'Maisel', 'Morquecho', 'Wahlberg', 'Teigen', 'Bockelman', 'Rehak', 'Bitler', 'Brion', 'Niece', 'Selvey', 'Sudderth', 'Ruddock', 'Sandiford', 'Aguas', 'Folan', 'Herwig', 'Krupinski', 'Mccarrick', 'Mudgett', 'Pancake', 'Redner', 'Wentzell', 'Soliday', 'Marschall', 'Krakowski', 'Rebholz', 'Dold', 'Giller', 'Gassett', 'Brazzell', 'Bellow', 'Tolen', 'Gloster', 'Gagliardo', 'Harbuck', 'Lorber', 'Natarajan', 'Sarna', 'Schrack', 'Vena', 'Witzke', 'Minassian', 'Loi', 'Rogue', 'Trace', 'Bomba', 'Cozzens', 'Evett', 'Boze', 'Petros', 'Cotta', 'Eisenmann', 'Florea', 'Hammersley', 'Keohane', 'Necessary', 'Nodine', 'Pekarek', 'Sjogren', 'Ruybal', 'Arabie', 'Huntsinger', 'Eiseman', 'Mehler', 'Craner', 'Vandine', 'Gaffey', 'Menna', 'Royle', 'Cordrey', 'Gala', 'Gauss', 'Dacruz', 'Cardell', 'Devan', 'Calmes', 'Humber', 'Stoute', 'Balko', 'Cera', 'Griesbach', 'Kissick', 'Kloos', 'Oertel', 'Sedlock', 'Stellato', 'Tuite', 'Bero', 'Rinard', 'Dambra', 'Cinelli', 'Tea', 'Hicken', 'Linch', 'Dials', 'Bennefield', 'Hillsman', 'Flemister', 'Alvaro', 'Goranson', 'Henk', 'Ryden', 'Verhagen', 'Wessling', 'Willetts', 'Neidlinger', 'Pereida', 'Lainhart', 'Nemes', 'Rudzinski', 'Sward', 'Rom', 'Rosko', 'Runions', 'Henney', 'Ridgely', 'Tomson', 'Arballo', 'Bohorquez', 'Brixey', 'Durling', 'Espina', 'Esquivias', 'Nungaray', 'Ovando', 'Zapf', 'Pizza', 'Arel', 'Ballin', 'Heathman', 'Morison', 'Troop', 'Monfort', 'Copland', 'Harriott', 'Mcwhite', 'Amini', 'Cirilo', 'Gassner', 'Gulbranson', 'Kovatch', 'Venne', 'Terriquez', 'Savin', 'Amo', 'Moris', 'Crable', 'Delaughter', 'Greenhouse', 'Eckardt', 'Hendrixson', 'Manansala', 'Mongeau', 'Panko', 'Pichette', 'Sliwa', 'Tabak', 'Determan', 'Freeburg', 'Portell', 'Steller', 'Buffkin', 'Righter', 'Mcguinn', 'Corrie', 'Tatham', 'Smelley', 'Terrel', 'Selmon', 'Blecha', 'Eisler', 'Engelking', 'Goen', 'Krey', 'Mceldowney', 'Plamondon', 'Slovak', 'Sorce', 'Spagnolo', 'Wambold', 'Colborn', 'Englander', 'Monsour', 'Pait', 'Perricone', 'Loveridge', 'Cragg', 'Dies', 'Holsten', 'Dagley', 'Beverley', 'Bayona', 'Cam', 'Chock', 'Coppersmith', 'Donath', 'Guillemette', 'Iannelli', 'Potratz', 'Selander', 'Suk', 'Waldvogel', 'Olberding', 'Giaimo', 'Spoto', 'Crocco', 'Waskiewicz', 'Krizan', 'Vigo', 'Boarman', 'Ron', 'Facer', 'Garlow', 'Filsaime', 'Andersson', 'Demski', 'Derouin', 'Diegel', 'Feria', 'Foth', 'Hertzberg', 'Jillson', 'Kram', 'Mammen', 'Melhorn', 'Monjaras', 'Oslund', 'Petrin', 'Pinho', 'Scheerer', 'Shadden', 'Sitzman', 'Stumbaugh', 'Wengert', 'Gershon', 'Mcelhinney', 'Batterson', 'Macqueen', 'Janas', 'Gladson', 'Aull', 'Wasinger', 'Shemwell', 'Seats', 'Colas', 'Allbee', 'Fithian', 'Fonner', 'Gergen', 'Lubrano', 'Mannarino', 'Piscopo', 'Sydow', 'Werle', 'Aumiller', 'Coplen', 'Dardar', 'Morrisette', 'Mchaney', 'Simes', 'Gillison', 'Emmel', 'Klunk', 'Luber', 'Madeira', 'Schlicht', 'Tremain', 'Cleaveland', 'Boulet', 'Golladay', 'Enck', 'Fera', 'Hammar', 'Hebner', 'Ishee', 'Nanni', 'Palomar', 'Pangborn', 'Rogala', 'Rushlow', 'Wiedman', 'Laber', 'Schoenfelder', 'Sonner', 'Duffer', 'Granier', 'Sawin', 'Dwiggins', 'Jaso', 'Popplewell', 'Loren', 'Ord', 'Dearmon', 'Hammen', 'Misra', 'Reindl', 'Siordia', 'Woodhead', 'Yasuda', 'Dockstader', 'Kobs', 'Tokarski', 'Villers', 'Mase', 'Arrant', 'Hedgpeth', 'Eggleton', 'Frederic', 'Victorian', 'Akerman', 'Balazs', 'Brandau', 'Depietro', 'Dillenbeck', 'Goodnow', 'Larner', 'Mcmurtrie', 'Salameh', 'Swicegood', 'Koshy', 'Stdenis', 'Deakin', 'Izzi', 'Teater', 'Gramm', 'Doig', 'Blacklock', 'Haymore', 'Heggie', 'Kirklin', 'Kassa', 'Ryles', 'Tenner', 'Ndiaye', 'Burrola', 'Faires', 'Grega', 'Krentz', 'Needles', 'Portz', 'Ruedas', 'Sitko', 'Viernes', 'Setter', 'Tricarico', 'Prest', 'Olivar', 'Whitsitt', 'Labossiere', 'Bellomo', 'Burgeson', 'Capriotti', 'Drinnon', 'Gulati', 'Haffey', 'Lasota', 'Laughery', 'Mees', 'Melander', 'Paoletti', 'Petermann', 'Zerby', 'Burhans', 'Lasseigne', 'Vannote', 'Wai', 'Berson', 'Gritton', 'Searl', 'Toller', 'Brackeen', 'Screws', 'Hagens', 'Billingslea', 'Hyppolite', 'Asmussen', 'Bitton', 'Diiorio', 'Grigoryan', 'Hauenstein', 'Krukowski', 'Mulcahey', 'Perras', 'Prak', 'Reitzel', 'Spackman', 'Valenciano', 'Wieck', 'Yeagley', 'Zanetti', 'Goeller', 'Azizi', 'Grise', 'Mogan', 'Traverso', 'Nangle', 'Saladin', 'Hardgrove', 'Osei', 'Fehrenbach', 'Giesbrecht', 'Halas', 'Hetzler', 'Orsak', 'Salaz', 'Surace', 'Whipp', 'Charlebois', 'Stayer', 'Stelmach', 'Hitchings', 'Senters', 'Mcnaught', 'Cordier', 'Dawsey', 'Barhorst', 'Clauser', 'Dibernardo', 'Hawkey', 'Hritz', 'Patchin', 'Raatz', 'Seubert', 'Slingerland', 'Vanderwoude', 'Aquilino', 'Goertzen', 'Navratil', 'Mccuistion', 'Vallin', 'Moors', 'Connely', 'Fedrick', 'Bontempo', 'Dishong', 'Felch', 'Laino', 'Minshall', 'Montroy', 'Plotts', 'Radice', 'Sachse', 'Safran', 'Schecter', 'Traut', 'Vasile', 'Yadon', 'Gorka', 'Roelofs', 'Suit', 'Asbill', 'Torrens', 'Kimmey', 'Ruger', 'Vinzant', 'Watkin', 'Rawles', 'Cubero', 'Duch', 'Endress', 'Fangman', 'Holben', 'Holzapfel', 'Karner', 'Otteson', 'Stangel', 'Terrebonne', 'Wagley', 'Wisecup', 'Bengston', 'Leck', 'Coalson', 'Farooq', 'Safi', 'Smyers', 'All', 'Else', 'Wason', 'Nairn', 'Panton', 'Ahrendt', 'Arvizo', 'Klahn', 'Robak', 'Schier', 'Start', 'Tiano', 'Kraatz', 'Corzo', 'Maranto', 'Elm', 'Eagles', 'Acres', 'Schoolfield', 'Ancrum', 'Ahner', 'Augsburger', 'Berna', 'Danh', 'Fruth', 'Galluzzo', 'Racette', 'Selva', 'Szekely', 'Zirbel', 'Hauff', 'Markgraf', 'Wonderly', 'Rydell', 'Julia', 'Chris', 'Simson', 'Bridgeford', 'Jeffress', 'Brailsford', 'Bluford', 'Boser', 'Fichera', 'Meininger', 'Meyerhoff', 'Modzelewski', 'Niese', 'Pavlovich', 'Radovich', 'Ratz', 'Frankowski', 'Berti', 'Geno', 'Fares', 'Marney', 'Harwick', 'Tata', 'Bobby', 'Dobbin', 'Roosevelt', 'Greenaway', 'Janvier', 'Oatis', 'Beilke', 'Brelsford', 'Dowty', 'Giudice', 'Hetzer', 'Imboden', 'Irelan', 'Nie', 'Ramberg', 'Rega', 'Sproat', 'Sytsma', 'Unrein', 'Davignon', 'Ganoe', 'Leinweber', 'Mantell', 'Troisi', 'Sahr', 'Esperanza', 'Asper', 'Lathem', 'Eagleton', 'Lamons', 'Gaulden', 'Bloodgood', 'Cerone', 'Claro', 'Durfey', 'Enamorado', 'Herrada', 'Maw', 'Schlagel', 'Signor', 'Reisch', 'Gruenwald', 'Helbert', 'Lorenzi', 'Woodlief', 'Huval', 'Batman', 'Meadow', 'Croswell', 'Bordonaro', 'Earnshaw', 'Freiburger', 'Gunnoe', 'Lamberton', 'Martella', 'Mischke', 'Shelor', 'Venuti', 'Bilek', 'Mcmains', 'Balding', 'Mestre', 'Mcconnaughey', 'Manso', 'Decoste', 'Egerton', 'Alvino', 'Arizpe', 'Blaschke', 'Foglesong', 'Heyn', 'Irigoyen', 'Komorowski', 'Lesinski', 'Nghiem', 'Rund', 'Santiesteban', 'Strahm', 'Hendel', 'Capes', 'Carls', 'Bon', 'Sires', 'Nichelson', 'Brimm', 'Aikins', 'Berra', 'Brazee', 'Burkert', 'Capalbo', 'Criscione', 'Feddersen', 'Hofbauer', 'Jacobowitz', 'Mackowiak', 'Mcenroe', 'Philbeck', 'Shimada', 'Ticknor', 'Wozny', 'Biernacki', 'Hirschi', 'Polich', 'Sokoloski', 'Dolores', 'Knoch', 'Ge', 'Groome', 'Markell', 'Fearing', 'Mcclaren', 'Hadsell', 'Rumple', 'Samudio', 'Scardina', 'Spinosa', 'Abramov', 'Siracusa', 'Goren', 'Rocchio', 'Bibi', 'Lamer', 'Liddy', 'Anna', 'Coxe', 'De', 'Rodes', 'Cheshier', 'Coulon', 'Closs', 'Tigue', 'Seville', 'Hopkin', 'Rodwell', 'Bibbins', 'Baldree', 'Bawden', 'Bishoff', 'Costabile', 'Dec', 'Hillegass', 'Infantino', 'Mantia', 'Mcamis', 'Northcott', 'Ruprecht', 'Sanpedro', 'Campione', 'Muchow', 'Ostby', 'Mohl', 'Pulice', 'Vigna', 'Thomann', 'Lillibridge', 'Manville', 'Vives', 'Bellanger', 'Desormeaux', 'Lovingood', 'Stjulien', 'Echeverri', 'Florey', 'Gieseke', 'Maeder', 'Marcinko', 'Nuncio', 'Quirino', 'Versteeg', 'Voelkel', 'Wanless', 'Morocho', 'Monteagudo', 'Aikin', 'Bramley', 'Bartleson', 'Skeete', 'Batra', 'Dolloff', 'Gehr', 'Hellyer', 'Hersch', 'Hier', 'Lannan', 'Reffitt', 'Carboni', 'Schouten', 'Burkle', 'Riches', 'Busa', 'Rademaker', 'Hult', 'Synder', 'Bossard', 'Tunis', 'Pamplin', 'Oats', 'Mcphaul', 'Baik', 'Kieser', 'Pareja', 'Raffaele', 'Erhard', 'Iwasaki', 'Tonelli', 'Mabey', 'Debruyn', 'Carrel', 'Myron', 'Arai', 'Vallo', 'Points', 'Buteau', 'Becknell', 'Lue', 'Antos', 'Folkers', 'Galletta', 'Hissong', 'Knoche', 'Kundert', 'Larussa', 'Lobos', 'Poitra', 'Rinn', 'Seamons', 'Senko', 'Villaverde', 'Weatherholt', 'Maliszewski', 'Jurkowski', 'Scism', 'Hallas', 'Collet', 'Capello', 'Lena', 'Popper', 'Aikman', 'Blakes', 'Cadigan', 'Dupler', 'Kazi', 'Masri', 'Matejka', 'Mcgirr', 'Pistone', 'Prenger', 'Ranes', 'Thiemann', 'Voeller', 'Cockman', 'Burtt', 'Looby', 'Bonnie', 'Mcclenny', 'Ridgell', 'Nails', 'Lesane', 'Bertolino', 'Doheny', 'Fechter', 'Holshouser', 'Kierstead', 'Krewson', 'Lanahan', 'Vig', 'Wiswell', 'Freytag', 'Haselden', 'Kuras', 'Navar', 'Raisor', 'Finamore', 'Kipper', 'Morissette', 'Laughton', 'Awe', 'Manier', 'Cumby', 'Cabada', 'Hafen', 'Kojima', 'Massari', 'Mctague', 'Stehr', 'Vandevelde', 'Voong', 'Wisely', 'Girardin', 'Bies', 'Demaris', 'Galles', 'Goldstone', 'Kai', 'Cord', 'Brigance', 'Gomillion', 'Drakes', 'Bartkowiak', 'Chica', 'Draheim', 'Honeyman', 'Klapper', 'Kniffen', 'Knoblock', 'Scherzer', 'Tougas', 'Toyama', 'Urbach', 'Walia', 'Wattenbarger', 'Marz', 'Cesare', 'Miro', 'Kervin', 'Godard', 'Beiter', 'Betcher', 'Evarts', 'Evensen', 'Gaff', 'Griffitts', 'Grunden', 'Hoffart', 'Kroupa', 'Maiers', 'Mckendry', 'Puett', 'Shoe', 'Stermer', 'Wineinger', 'Brocious', 'Chudy', 'Spofford', 'Wessinger', 'Weich', 'Croff', 'Ephraim', 'Sallis', 'Blasco', 'Burningham', 'Buschmann', 'Forget', 'Kulak', 'Panozzo', 'Pierpont', 'Priolo', 'Puhl', 'Ruffolo', 'Voisine', 'Mancinelli', 'Santacroce', 'Vanvalkenburgh', 'Veverka', 'Desena', 'Agner', 'Boron', 'Wheeling', 'Plato', 'Tonge', 'Deibel', 'Herriman', 'Holroyd', 'Huitron', 'Hum', 'Kreamer', 'Lada', 'Lucena', 'Pao', 'Planck', 'Vanroekel', 'Bodell', 'Francia', 'Anastasia', 'Haxton', 'Maile', 'Warning', 'Labeau', 'Pujol', 'Done', 'Minney', 'Hogsett', 'Tayler', 'Delancy', 'Philson', 'Allemand', 'Buhrman', 'Diefenbach', 'Gawel', 'Kovacic', 'Kralik', 'Lazor', 'Mcnemar', 'Warth', 'Glanzer', 'Keep', 'Hochstein', 'Febles', 'Morneau', 'Agostinelli', 'Galeas', 'Landen', 'Lion', 'Attwood', 'Capshaw', 'Willy', 'Dekle', 'Murrill', 'Coby', 'Falvo', 'Kanagy', 'Mihalko', 'Schellenberg', 'Sugimoto', 'Lippard', 'Sardo', 'Suckow', 'Demichele', 'Kath', 'Lappe', 'Lego', 'Schleifer', 'Vold', 'Kingsland', 'Mitch', 'Manlove', 'Cuozzo', 'Dauber', 'Deininger', 'Goldbach', 'Halfmann', 'Kazarian', 'Marksberry', 'Marzec', 'Mcmurphy', 'Oregan', 'Paczkowski', 'Pinsky', 'Poynor', 'Schertz', 'Tetrick', 'Umali', 'Valenza', 'Witherington', 'Kesselring', 'Nylund', 'Cinnamon', 'Rielly', 'Surman', 'Fowle', 'Hains', 'Sharlow', 'Lones', 'Durgan', 'Savory', 'Minger', 'Okon', 'Berends', 'Binning', 'Malina', 'Loeser', 'Marthaler', 'Pacella', 'Vasta', 'Hinerman', 'Goodchild', 'Chuck', 'Linney', 'Beckworth', 'Carrie', 'Lovings', 'Ginyard', 'Bredeson', 'Debiase', 'Gorder', 'Noce', 'Redlin', 'Schwinn', 'Zins', 'Burtner', 'Kosakowski', 'Erler', 'Altom', 'Husman', 'Markos', 'Thorman', 'Fagen', 'Voisin', 'Gauldin', 'Pressey', 'Calbert', 'Holness', 'Alspach', 'Broeker', 'Danziger', 'Klenke', 'Popescu', 'Schoenrock', 'Schreckengost', 'Syme', 'Trick', 'Plautz', 'Beckel', 'Dealmeida', 'Winne', 'Moron', 'Seed', 'Capozzoli', 'Gawron', 'Kobel', 'Kouns', 'Nunemaker', 'Steinbacher', 'Stookey', 'Vidana', 'Zoch', 'Ohlinger', 'Hudkins', 'Ferren', 'Gille', 'Sheckler', 'Kittell', 'Roath', 'Ziglar', 'Brecher', 'Coldren', 'Degraaf', 'Eddinger', 'Joffe', 'Luthy', 'Metzinger', 'Nayak', 'Paule', 'Prudente', 'Wooddell', 'Zuccaro', 'Rineer', 'Soos', 'Manka', 'Vandervoort', 'Kitchell', 'Casserly', 'Watchman', 'Poteete', 'Dopson', 'Mathurin', 'Cataldi', 'Crepeau', 'Fackrell', 'Goben', 'Macinnes', 'Scherf', 'Shaddix', 'Sorber', 'Teichman', 'Wydra', 'Holzworth', 'Baade', 'Tinnell', 'Tinkler', 'Mauzy', 'Alphonse', 'Fullard', 'Adger', 'Akiyama', 'Bloxham', 'Coultas', 'Esler', 'Giebel', 'Goswick', 'Heikes', 'Javed', 'Linan', 'Mooers', 'Nemetz', 'Pradhan', 'Rainone', 'Romito', 'Treichel', 'Vohs', 'Grosskopf', 'Weisinger', 'Ruple', 'Naff', 'Meaders', 'Lamarr', 'Toppin', 'Apicella', 'Beecroft', 'Boshears', 'Breier', 'Cuadros', 'Umbarger', 'Alioto', 'Ravenscroft', 'Vesper', 'Oak', 'Tigges', 'Simmer', 'Hanby', 'Webre', 'Lenk', 'Mcelvain', 'Boy', 'Debarros', 'Hickenbottom', 'Quincy', 'Billips', 'Ollison', 'Barbuto', 'Clearwater', 'Cronkhite', 'Groleau', 'Mehra', 'Tessler', 'Kegel', 'Borenstein', 'Newnam', 'Crofton', 'Phenix', 'Dankert', 'Hymas', 'Lobel', 'Marszalek', 'Moceri', 'Ottaviano', 'Papazian', 'Roedel', 'Jochum', 'Urquidez', 'Lapin', 'Garro', 'Lamond', 'Sessums', 'Tooke', 'Steadham', 'Azam', 'Bleier', 'Buelna', 'Bupp', 'Burridge', 'Derderian', 'Derstine', 'Halberg', 'Katzer', 'Meegan', 'Ortmann', 'Herschberger', 'Sanroman', 'Winiarski', 'Alcon', 'Picker', 'Demille', 'Huron', 'Hankin', 'Dahmen', 'Fronczak', 'Klingman', 'Perugini', 'Pettinato', 'Powelson', 'Saffer', 'Schwenke', 'Pals', 'Estremera', 'Sofia', 'Arvelo', 'Terrero', 'Bankes', 'Sais', 'Netherland', 'Odeh', 'Sutphen', 'Caddy', 'Dorval', 'Glaude', 'Mcadory', 'Eichinger', 'Lesniewski', 'Petito', 'Pfohl', 'Presler', 'Rys', 'Sano', 'Willenborg', 'Seppala', 'Shibley', 'Cajigas', 'Gal', 'Farag', 'Pickles', 'Rump', 'Grills', 'Mikes', 'Adderley', 'Altland', 'Araki', 'Beitz', 'Brotzman', 'Buonocore', 'Fayard', 'Gelber', 'Jurewicz', 'Lezcano', 'Marsteller', 'Minarik', 'Opsahl', 'Pranger', 'Tiburcio', 'Zollo', 'Engh', 'Henault', 'Barrineau', 'Pilkinton', 'Pratte', 'Niland', 'Warda', 'Southwood', 'Clinch', 'Halsell', 'Mccaa', 'Isreal', 'Pinkett', 'Asch', 'Beauchesne', 'Bruemmer', 'Doebler', 'Ehlinger', 'Goelz', 'Hashemi', 'Karel', 'Magiera', 'Martorano', 'Mooneyhan', 'Cibrian', 'Cavey', 'Kosko', 'Christo', 'Cockrill', 'Mansker', 'Balls', 'Degree', 'Tiggs', 'Alberico', 'Clugston', 'Elman', 'Frueh', 'Kampf', 'Kochanski', 'Leider', 'Marsella', 'Mckendree', 'Moffa', 'Quattrocchi', 'Raval', 'Snoke', 'Akopyan', 'Barrilleaux', 'Cambria', 'Kawaguchi', 'Bonde', 'Dawdy', 'Willig', 'Kazee', 'Debow', 'Beachum', 'Vicks', 'Aurelio', 'Barocio', 'Bonesteel', 'Ezzo', 'Gesell', 'Krzeminski', 'Madan', 'Magda', 'Manring', 'Mcfaul', 'Morera', 'Purinton', 'Retzer', 'Schonfeld', 'Staszak', 'Stubbe', 'Talerico', 'Wikoff', 'Zia', 'Seyfried', 'Diangelo', 'Keach', 'Shipton', 'Shewmake', 'Behrmann', 'Hopps', 'Paster', 'Augenstein', 'Castaldi', 'Ferrufino', 'Gregersen', 'Hosseini', 'Keniston', 'Nadolski', 'Ouimette', 'Pellett', 'Riebel', 'Schwark', 'Spelman', 'Tesar', 'Yahn', 'Grossnickle', 'Rosillo', 'Dostie', 'Noa', 'Khalaf', 'Cardosa', 'Afzal', 'Mercure', 'Wheless', 'Tailor', 'Mcgarrah', 'Miler', 'Norfolk', 'Crapps', 'Dansereau', 'Jenney', 'Keast', 'Lieser', 'Mihm', 'Porco', 'Zelinsky', 'Sleeth', 'Mcelreath', 'Hemann', 'Capaldi', 'Huggett', 'Reagle', 'Mayotte', 'Liller', 'Leen', 'Demmer', 'Tunison', 'Woodbridge', 'Haymes', 'Cunning', 'Blaze', 'Eatman', 'Ulysse', 'Bagshaw', 'Buczkowski', 'Cardello', 'Decola', 'Diloreto', 'Evola', 'Glassburn', 'Hazelbaker', 'Holycross', 'Minasian', 'Regula', 'Ruge', 'Uhlman', 'Lamprecht', 'Shifflet', 'Weikle', 'Coupe', 'Isherwood', 'Dimon', 'Pop', 'Willhoite', 'Bari', 'Boise', 'Doom', 'Mccolley', 'Bircher', 'Wannamaker', 'Eppes', 'Pea', 'Okeke', 'Alpizar', 'Arista', 'Barbagallo', 'Baumert', 'Bhattacharya', 'Gheen', 'Hutchcraft', 'Karlen', 'Klier', 'Ladnier', 'Marrujo', 'Reister', 'Rorrer', 'Tarpey', 'Wisecarver', 'Beydoun', 'Fillinger', 'Kemnitz', 'Takata', 'Leight', 'Kross', 'Junco', 'Holmer', 'Sando', 'Biddix', 'Dawood', 'Frisco', 'Flagler', 'Arntz', 'Bache', 'Bundrick', 'Glasson', 'Los', 'Scheiber', 'Shellenbarger', 'Steinmeyer', 'Sura', 'Tanski', 'Teodoro', 'Vanaken', 'Jodoin', 'Klinker', 'Szydlowski', 'Yamashiro', 'Kutch', 'Hoth', 'Edwardson', 'Gess', 'Mohamad', 'Goodine', 'Carolina', 'Blauser', 'Emerich', 'Flook', 'Graul', 'Gribben', 'Herbold', 'Kreutz', 'Lavey', 'Lukacs', 'Maiorana', 'Openshaw', 'Plattner', 'Sauro', 'Schardt', 'Tortorici', 'Wendlandt', 'Danowski', 'Mcnellis', 'Pinkowski', 'Linz', 'Virga', 'Jardin', 'Maclaughlin', 'Rama', 'Deline', 'Kimbel', 'Hagin', 'Pottinger', 'Detmer', 'Ferrone', 'Matthiesen', 'Melchert', 'Ruehl', 'Takach', 'Briese', 'Elmendorf', 'Valentini', 'Hersom', 'Bordeau', 'Linsley', 'Keatts', 'Dina', 'Boye', 'Riviere', 'Stodghill', 'Madry', 'Angelos', 'Bou', 'Ketterling', 'Niemczyk', 'Pardini', 'Rippel', 'Schieffer', 'Schnee', 'Shogren', 'Sholl', 'Ullmann', 'Ure', 'Curless', 'Gonnella', 'Tholen', 'Valladolid', 'Silbernagel', 'Cohrs', 'Shahin', 'Beth', 'Holmen', 'Tippie', 'Opie', 'Sprowl', 'Byam', 'Bethany', 'Saintil', 'Auriemma', 'Blust', 'Dibello', 'Digangi', 'Farnam', 'Farnan', 'Linford', 'Mcgroarty', 'Meisenheimer', 'Pagels', 'Sauber', 'Schwalbe', 'Seemann', 'Slivka', 'Twardowski', 'Wickey', 'Zettler', 'Zuchowski', 'Feldhaus', 'Baldock', 'Cowman', 'Carp', 'Camera', 'Balon', 'Neveu', 'Caminiti', 'Carreira', 'Gura', 'Hershkowitz', 'Killoran', 'Narducci', 'Reigel', 'Saccone', 'Tomasi', 'Wieneke', 'Sibrian', 'Hashem', 'Kellems', 'Stouder', 'Villamar', 'Piette', 'Wand', 'Battey', 'Staunton', 'Bedore', 'Hanel', 'Jutras', 'Kanner', 'Mathiesen', 'Northway', 'Privitera', 'Reichelt', 'Zucco', 'Roys', 'Aderholt', 'Lampson', 'Olen', 'Mcgarr', 'Schools', 'Leaphart', 'Lykes', 'Brightbill', 'Koos', 'Lahue', 'Laplaca', 'Naqvi', 'Novo', 'Puerta', 'Siers', 'Strutz', 'Trimboli', 'Waldie', 'Goold', 'Falke', 'Corter', 'Cartmell', 'Brazel', 'Farabee', 'Majeed', 'Hilden', 'Kealoha', 'Neider', 'Parodi', 'Rizza', 'Rong', 'Silberstein', 'Snellgrove', 'Trojanowski', 'Warneke', 'Wissler', 'Yiu', 'Grein', 'Sak', 'Daines', 'Monzo', 'Emmerson', 'Lorraine', 'Samaroo', 'Edmund', 'Cacace', 'Dornan', 'Eyman', 'Hovanec', 'Jeschke', 'Limberg', 'Maturo', 'Pandey', 'Somoza', 'Streiff', 'Wiemer', 'Zablocki', 'Crace', 'Leinen', 'Rucci', 'Blyth', 'Clemans', 'Magid', 'Ferrick', 'Garriga', 'Martir', 'Tanton', 'Hoon', 'Echard', 'Borrell', 'Howden', 'Gravett', 'Lando', 'Amacher', 'Dalman', 'Hollenbaugh', 'Sigrist', 'Tamashiro', 'Therriault', 'Villafranca', 'Matthys', 'Salois', 'Sforza', 'Swager', 'Borah', 'Sentell', 'Besson', 'Ghani', 'Bilinski', 'Holzinger', 'Kus', 'Lobianco', 'Morawski', 'Perz', 'Sada', 'Wollenberg', 'Yusko', 'Caughron', 'Diffenderfer', 'Slowinski', 'Skiver', 'Galland', 'Hodes', 'Boyne', 'Towry', 'Alers', 'Hellums', 'Certain', 'Megginson', 'Creer', 'Coutee', 'Strothers', 'Stfleur', 'Barga', 'Bina', 'Cellini', 'Digiulio', 'Douma', 'Klement', 'Mccambridge', 'Parmeter', 'Presto', 'Salmi', 'Seabaugh', 'Barreda', 'Nepomuceno', 'Zent', 'Yonce', 'Loreto', 'Honer', 'Conquest', 'Gathings', 'Wims', 'Upshur', 'Aeschliman', 'Casaus', 'Dumke', 'Earlywine', 'Ferreyra', 'Heyne', 'Hudon', 'Kuder', 'Malia', 'Brueckner', 'Luchsinger', 'Ornellas', 'Ramseyer', 'Weidemann', 'Walbert', 'Zola', 'Linquist', 'Storts', 'Dente', 'Lebleu', 'Stockham', 'Rollinson', 'Auzenne', 'Abebe', 'Bartol', 'Cozzolino', 'Der', 'Fata', 'Gorr', 'Janousek', 'Moschella', 'Riedy', 'Dust', 'Malmgren', 'Puterbaugh', 'Sacchetti', 'Lascano', 'Begnaud', 'Duling', 'Porteous', 'Debnam', 'Abron', 'Delehanty', 'Fazenbaker', 'Flener', 'Gora', 'Herter', 'Johann', 'Keiter', 'Lucca', 'Passman', 'Saindon', 'Schoppe', 'Skibinski', 'Stueber', 'Tegeler', 'Jochim', 'Buttner', 'Crilly', 'Swanton', 'Muncey', 'Negrin', 'Thorburn', 'Delpino', 'Kinn', 'Gaiter', 'Obi', 'Hohensee', 'Rollman', 'Scheff', 'Shor', 'Tumbleson', 'Mccrum', 'Knack', 'Llano', 'Saber', 'Rosman', 'Bankson', 'Atkisson', 'Kennel', 'Cammon', 'Bangura', 'Cichy', 'Gillikin', 'Hiltner', 'Lubben', 'Mcqueeney', 'Nasca', 'Nordgren', 'Ostermann', 'Quito', 'Sakowski', 'Schut', 'Stobaugh', 'Alessio', 'Gorelik', 'Heinzman', 'Westrich', 'Nardella', 'Cruzado', 'Lansberry', 'Dubreuil', 'Nylander', 'Rabel', 'Moret', 'Crout', 'Ardrey', 'Rolley', 'Finks', 'Cliett', 'Caito', 'Clingenpeel', 'Delprete', 'Dolen', 'Heidrich', 'Hinrichsen', 'Jindra', 'Madej', 'Panzarella', 'Sandin', 'Seekins', 'Shilts', 'Sokoloff', 'Maggart', 'Pigman', 'Travieso', 'Denbow', 'Dollison', 'Gaye', 'Binette', 'Dutta', 'Grandinetti', 'Kitch', 'Tangeman', 'Finstad', 'Rodkey', 'Servis', 'Tiwari', 'Rodd', 'Parfait', 'Seck', 'Delaurentis', 'Dragan', 'Fleig', 'Giacobbe', 'Hilligoss', 'Kroh', 'Lippe', 'Maleski', 'Perini', 'Rutten', 'Stauss', 'Yoshikawa', 'Dibattista', 'Gilsdorf', 'Riemenschneider', 'Streck', 'Gessler', 'Springstead', 'Zaki', 'Lambie', 'Barczak', 'Ellerbrock', 'Foresman', 'Holstine', 'Lemm', 'Santillana', 'Trautwein', 'Unsworth', 'Valderas', 'Vaquero', 'Vetsch', 'Wadleigh', 'Yonts', 'Mcguiness', 'Auvil', 'Leeder', 'Sprowls', 'Cala', 'Portalatin', 'Casso', 'Chirinos', 'Less', 'Baltzell', 'Bo', 'Whetsell', 'Ledlow', 'Fullbright', 'Arnell', 'Stainback', 'Mcleish', 'Lyn', 'Bermeo', 'Billet', 'Craun', 'Gladwell', 'Goral', 'Herbig', 'Kluver', 'Mermelstein', 'Odette', 'Poggi', 'Schacher', 'Thielman', 'Cianciolo', 'Ferrie', 'Kapusta', 'Kreager', 'Messineo', 'Rovira', 'Stricklen', 'Wansley', 'Amell', 'Baena', 'Depaula', 'Fickett', 'Housewright', 'Kreiger', 'Legate', 'Lutterman', 'Men', 'Pautz', 'Swecker', 'Tantillo', 'Dudeck', 'Bellas', 'Marian', 'Bienvenu', 'Riden', 'Hosein', 'Couser', 'Batterton', 'Desantos', 'Dieterle', 'Drabek', 'Grennan', 'Greulich', 'Ludlam', 'Maltos', 'Marcin', 'Ostertag', 'Rednour', 'Tippetts', 'Updyke', 'Ormsbee', 'Reutter', 'Uyehara', 'Musumeci', 'Antonini', 'Thistle', 'Marcia', 'Renne', 'Jines', 'Dorothy', 'Menter', 'Crosser', 'Ditommaso', 'Glueck', 'Malta', 'Mcgranahan', 'Mensing', 'Ostroff', 'Rota', 'Rothfuss', 'Borcherding', 'Haveman', 'Swallows', 'Heltzel', 'Aloi', 'Stipp', 'Broda', 'Darter', 'Gressett', 'Brasier', 'Lana', 'Crooke', 'Seegers', 'Sirmons', 'Berberian', 'Goers', 'Losch', 'Memon', 'Paternoster', 'Rierson', 'Miyake', 'Barndt', 'Kirstein', 'Azua', 'Zeck', 'Britain', 'Lanman', 'Gorges', 'Clock', 'Alman', 'Callicutt', 'Walford', 'Searight', 'Eakle', 'Federici', 'Hosack', 'Jarecki', 'Kauffmann', 'Maras', 'Nisley', 'Sandahl', 'Shidler', 'Wnek', 'Moneymaker', 'Santander', 'Schneeberger', 'Luviano', 'Gorin', 'Negus', 'Coulston', 'Polin', 'Winslett', 'Anstett', 'Cowsert', 'Dipiazza', 'Fitting', 'Forslund', 'Poquette', 'Tibbets', 'Tomasini', 'Toor', 'Starry', 'Venema', 'Cedano', 'Carro', 'Samons', 'Matty', 'Ellenwood', 'Kilcrease', 'Noblin', 'Decatur', 'Heckard', 'Nard', 'Beighley', 'Delamater', 'Eblen', 'Heninger', 'Kehn', 'Rotunno', 'Uppal', 'Hynek', 'Zenk', 'Brasil', 'Mu', 'Julio', 'Cassar', 'Crisco', 'Oriley', 'Turton', 'Goens', 'Cargo', 'Toure', 'Breitbach', 'Cahalan', 'Chadha', 'Kittinger', 'Marnell', 'Masias', 'Matousek', 'Mittal', 'Nieblas', 'Onan', 'Purdum', 'Tursi', 'Esplin', 'Etsitty', 'Fratto', 'Przybyla', 'Cassin', 'Nitti', 'Arshad', 'Sandoz', 'Walzer', 'Everton', 'Russum', 'Morland', 'Fennel', 'Viel', 'Jarrells', 'Vassell', 'Frigo', 'Kodama', 'Naron', 'Oelke', 'Remaley', 'Shean', 'Cloonan', 'Clayman', 'Lasch', 'Lepard', 'Rewis', 'Vankeuren', 'Lightbody', 'Houseworth', 'Caison', 'Denmon', 'Rauls', 'Sallie', 'Humphery', 'Showell', 'Raysor', 'Angotti', 'Barbero', 'Buxbaum', 'Capella', 'Horsch', 'Kunselman', 'Nishikawa', 'Perotti', 'Sprung', 'Szucs', 'Emch', 'Kotula', 'Mendizabal', 'Yeaman', 'Beste', 'Kader', 'Forker', 'Wiggers', 'Cotham', 'Primo', 'Fetterhoff', 'Giarrusso', 'Glosser', 'Lumbreras', 'Rosano', 'Strohecker', 'Wanek', 'Waycaster', 'Worthley', 'Salasar', 'Boulos', 'Pulsipher', 'Scheider', 'Lorimer', 'Alamilla', 'Zapp', 'Deis', 'Tariq', 'Kasey', 'Famiglietti', 'Flansburg', 'Georgiou', 'Groft', 'Heistand', 'Merker', 'Stoeckel', 'Tackitt', 'Verbeck', 'Weyers', 'Wiltrout', 'Brabec', 'Caligiuri', 'Dudzinski', 'Grieger', 'Benfer', 'Pesta', 'Wool', 'Sunshine', 'Oka', 'Stamour', 'Barrio', 'Mathe', 'Vanduyne', 'Brager', 'Mcphatter', 'Ahluwalia', 'Borys', 'Dreibelbis', 'Kalmbach', 'Karwoski', 'Moomaw', 'Youngren', 'Offerman', 'Nine', 'Symington', 'Branan', 'Turberville', 'Heber', 'Loughridge', 'Vanderberg', 'Mccannon', 'Linda', 'Dupee', 'Cottom', 'Mcphearson', 'Razor', 'Buchwald', 'Fraze', 'Grannis', 'Krolikowski', 'Lapidus', 'Madruga', 'Mcmartin', 'Quinlivan', 'Riaz', 'Spittler', 'Zahm', 'Zender', 'Eisman', 'Hourihan', 'Shirazi', 'Herendeen', 'Perdew', 'Pendell', 'Chernoff', 'Lyell', 'Clarey', 'Macken', 'Guthridge', 'Redditt', 'Bedi', 'Debenedictis', 'Distel', 'Gapinski', 'Iwanski', 'Medici', 'Schmutz', 'Tuel', 'Verburg', 'Galgano', 'Skogen', 'Aymond', 'Raymo', 'Croney', 'Carry', 'Rhynes', 'Lamour', 'Shedrick', 'Tookes', 'Baltierra', 'Leitzel', 'Letchworth', 'Montesino', 'Preis', 'Sanzone', 'Shantz', 'Teo', 'Twohig', 'Wajda', 'Windisch', 'Zinck', 'Fiero', 'Hornby', 'Paget', 'Serano', 'Rodrick', 'Lewison', 'Dyas', 'Delcarmen', 'Garske', 'Hontz', 'Mcquown', 'Melling', 'Rolando', 'Rosencrans', 'Steichen', 'Teeples', 'Forseth', 'Quijas', 'Schraeder', 'Vaidya', 'Ventre', 'Mountjoy', 'Morr', 'Leviner', 'Paulette', 'Dobie', 'Brue', 'Prier', 'Biffle', 'Neyland', 'Valcourt', 'Mckeithen', 'Lemelle', 'Alviar', 'Auth', 'Bahm', 'Bierbaum', 'Cazier', 'Eschbach', 'Etzler', 'Nowlan', 'Sahota', 'Vanaman', 'Zaugg', 'Hogeland', 'Choat', 'Walmer', 'Cepero', 'Michal', 'Foxwell', 'Decoursey', 'Molyneaux', 'Peat', 'Jeanfrancois', 'Arevalos', 'Bachert', 'Beachler', 'Berrones', 'Clavijo', 'Elsen', 'Fuhs', 'Hooven', 'Johannessen', 'Klausner', 'Masso', 'Puzio', 'Sekula', 'Smyser', 'Stepanian', 'Barg', 'Trueman', 'Constante', 'Cubas', 'Dowers', 'Pratts', 'Cockburn', 'Counce', 'Nappier', 'Lindon', 'Burrowes', 'Cokley', 'Tillmon', 'Bao', 'Inks', 'Liberato', 'Moehring', 'Ryker', 'Sar', 'Swartzendruber', 'Torgersen', 'Treto', 'Tungate', 'Ricotta', 'Weesner', 'Willyard', 'Callicoat', 'Hoque', 'Atkison', 'Mcwherter', 'Dubuisson', 'Wanzer', 'Stradford', 'Abruzzo', 'Amerman', 'Bame', 'Bantz', 'Bleakley', 'Galt', 'Hoobler', 'Jaquith', 'Lessman', 'Polinski', 'Rasche', 'Roeber', 'Rubright', 'Sarnowski', 'Signore', 'Solum', 'Vankampen', 'Vath', 'Malmquist', 'Mittelstadt', 'Belyea', 'Haverty', 'Wickett', 'Sansing', 'Yeatman', 'Brocker', 'Wonders', 'Both', 'Rabun', 'Rocke', 'Meachum', 'Blane', 'Lapsley', 'Biswas', 'Derocher', 'Haran', 'Hehn', 'Keshishian', 'Kniffin', 'Lacina', 'Skolnik', 'Spiewak', 'Wileman', 'Eble', 'Kraynak', 'Wiesen', 'Micheli', 'Scroggin', 'Roch', 'Denise', 'Altenburg', 'Hornstein', 'Netto', 'Opel', 'Passey', 'Roeske', 'Schrantz', 'Abrahamsen', 'Powless', 'Callais', 'Desjardin', 'Pirro', 'Yonkers', 'Macallister', 'Dady', 'Ruskin', 'Escott', 'Abbot', 'Sankar', 'Bolar', 'Angelucci', 'Biegel', 'Cirone', 'Damewood', 'Flett', 'Kronenberg', 'Ky', 'Nagler', 'Perlstein', 'Saperstein', 'Tenbrink', 'Vana', 'Wnuk', 'Bonnema', 'Schoenecker', 'Pichler', 'Armendarez', 'Oiler', 'Rouch', 'Boas', 'Laracuente', 'Milbourn', 'Summy', 'Counter', 'Gracie', 'Belfield', 'Bynoe', 'Jalloh', 'Blazier', 'Bochenek', 'Broughman', 'Chuong', 'Cregger', 'Estacio', 'Kaleta', 'Lanctot', 'Mish', 'Novosel', 'Passero', 'Ripplinger', 'Vitt', 'Walborn', 'Friscia', 'Memmott', 'Tripi', 'Weinhold', 'Honn', 'Gianni', 'Poch', 'Sagar', 'Markum', 'Primmer', 'Belmore', 'Rain', 'Bevard', 'Skyles', 'Farland', 'Mccleese', 'Teachey', 'Moulden', 'Antolin', 'Augello', 'Borrayo', 'Effler', 'Hornak', 'Hosman', 'Leingang', 'Limbach', 'Oregel', 'Ritzman', 'Rochefort', 'Schimke', 'Stefanelli', 'Vien', 'Zurn', 'Badolato', 'Bieri', 'Clarkin', 'Folino', 'Kelchner', 'Pote', 'Brahm', 'Hoop', 'Macbride', 'Hunting', 'Brule', 'Wainright', 'Rolison', 'Bennie', 'Banghart', 'Bertke', 'Bozzo', 'Gadomski', 'Granberg', 'Kostecki', 'Lemelin', 'Levengood', 'Puskas', 'Swanstrom', 'Willcutt', 'Deitrich', 'Grieves', 'Ferran', 'Boileau', 'Kendra', 'Trippe', 'Mcconnel', 'Cara', 'Stephans', 'Bachus', 'Applin', 'Utsey', 'Auston', 'Arras', 'Bencosme', 'Berntsen', 'Decarolis', 'Dettloff', 'Duerksen', 'Pavlovic', 'Schwantes', 'Sjostrom', 'Sugiyama', 'Sulak', 'Virani', 'Winberg', 'Yoshimoto', 'Comito', 'Pandolfo', 'Cathers', 'Hardisty', 'Collom', 'Wain', 'Worthing', 'Leep', 'Simo', 'Boom', 'Bald', 'Applegarth', 'Gilbreth', 'Griest', 'Jobin', 'Matsuura', 'Misko', 'Scerbo', 'Scheidler', 'Sterba', 'Tomaino', 'Wixson', 'Yadao', 'Hietpas', 'Gruss', 'Fors', 'Gosse', 'Katt', 'Virk', 'Quebedeaux', 'Barkey', 'Salam', 'Willford', 'Tarry', 'Chancy', 'Beynon', 'Eckes', 'Eischen', 'Felger', 'Kimm', 'Labate', 'Mehan', 'Netzer', 'Strosnider', 'Trezza', 'Vial', 'Waugaman', 'Zieman', 'Ankeny', 'Digman', 'Farino', 'Faro', 'Vasconcelos', 'Nevill', 'Rave', 'Sabine', 'Hagg', 'Weightman', 'Berton', 'Fipps', 'Knapper', 'Camel', 'Gilkes', 'Aldous', 'Delucca', 'Dicke', 'Evitts', 'Hachey', 'Rinck', 'Treese', 'Uher', 'Victorio', 'Vignola', 'Willert', 'Baun', 'Wever', 'Varn', 'Yokum', 'Dunk', 'Maben', 'Arzu', 'Guider', 'Bonhomme', 'Majette', 'Crislip', 'Gresko', 'Luppino', 'Posch', 'Potenza', 'Rial', 'Ruderman', 'Shaff', 'Balboni', 'Solheim', 'Mey', 'Sittig', 'Perman', 'Sumners', 'Deaner', 'Keizer', 'Reves', 'Glanville', 'Menzie', 'Mccowen', 'Steib', 'Portee', 'Azad', 'Dallaire', 'Denno', 'Deptula', 'Fischman', 'Guilbault', 'Imperato', 'Koehne', 'Menning', 'Mirelez', 'Stanislawski', 'Streb', 'Sumida', 'Wolke', 'Kerfoot', 'Pirie', 'Saracino', 'Maslanka', 'Slominski', 'Nienaber', 'Serena', 'Kamper', 'Matheis', 'Westin', 'Ishman', 'Biagi', 'Chiou', 'Dieckmann', 'Frieden', 'Huestis', 'Presutti', 'Ribas', 'Siedlecki', 'Steege', 'Uehara', 'Petrosyan', 'Siebold', 'Turi', 'Rady', 'Vanorman', 'Arif', 'Hiland', 'Naidu', 'Clagett', 'Ludy', 'Bodley', 'Avelino', 'Citro', 'Cuda', 'Derbyshire', 'Kruszewski', 'Kupper', 'Mahl', 'Muratore', 'Noecker', 'Osmer', 'Pasquariello', 'Schlick', 'Snover', 'Strzelecki', 'Studt', 'Sunga', 'Belmares', 'Seifried', 'Urioste', 'Housh', 'Babu', 'Bures', 'Augusto', 'Faddis', 'Pun', 'Chopp', 'Tullock', 'Sea', 'Boisseau', 'Herbin', 'Balcer', 'Copus', 'Eichenberger', 'Enterline', 'Gamarra', 'Gursky', 'Hovsepian', 'Laffin', 'Melena', 'Rappe', 'Soma', 'Spira', 'Spraker', 'Teuscher', 'Hochhalter', 'Brenden', 'Snee', 'Polan', 'Hataway', 'Tirey', 'Cobler', 'Marren', 'Ress', 'Bennis', 'Busha', 'Galler', 'Orea', 'Nailor', 'Magby', 'Bridgett', 'Island', 'Camino', 'Coderre', 'Gangloff', 'Gillilan', 'Goergen', 'Henthorne', 'Heverly', 'Loughry', 'Records', 'Schweikert', 'Seeds', 'Vanderwerf', 'Westall', 'Cristiano', 'Biser', 'Cartmill', 'Greenly', 'Kountz', 'Craney', 'Sheffey', 'Gelin', 'Gourdine', 'Canham', 'Edgmon', 'Enz', 'Feldpausch', 'Hestand', 'Kaus', 'Kostelnik', 'Ocanas', 'Riggi', 'Rohl', 'Scheurer', 'Sleeman', 'Tosi', 'Phegley', 'Abelson', 'Mclees', 'Sinor', 'Babson', 'Whalley', 'Manton', 'Patteson', 'Doyen', 'Asad', 'Thurmon', 'Cassese', 'Ditmore', 'Duva', 'Pilato', 'Polaski', 'Rzepka', 'Sevin', 'Sivak', 'Speckman', 'Stepien', 'Switalski', 'Valletta', 'Knoth', 'Niver', 'Ciancio', 'Giza', 'Liebowitz', 'Orengo', 'Rothgeb', 'Witz', 'Airhart', 'Gayman', 'Belland', 'Eury', 'Randal', 'Mcghie', 'Briganti', 'Hoopingarner', 'Lugar', 'Manfre', 'Mongelli', 'Squibb', 'Vasil', 'Cap', 'Veillon', 'Ege', 'Spice', 'Nevel', 'Vanleer', 'Petway', 'Petitfrere', 'Barcena', 'Belville', 'Brezina', 'Ketcherside', 'Knodel', 'Krinsky', 'Lundahl', 'Mescher', 'Pilat', 'Sneller', 'Staller', 'Steinhaus', 'Stensrud', 'Szalay', 'Tani', 'Saviano', 'Genna', 'Emry', 'Allin', 'Harvel', 'Harth', 'Pay', 'Harries', 'Brannum', 'Elijah', 'Hoyte', 'Bazinet', 'Bhandari', 'Brozek', 'Cava', 'Dalbey', 'Delgiudice', 'Klages', 'Riffey', 'Straube', 'Zagar', 'Zientek', 'Dilger', 'Hof', 'Karwowski', 'Rybarczyk', 'Spiering', 'Stamos', 'Gangemi', 'Olavarria', 'Sardinas', 'Magin', 'Payano', 'Deady', 'Henricksen', 'Kary', 'Garnier', 'Babic', 'Behymer', 'Billig', 'Huegel', 'Ishihara', 'Mcglinchey', 'Misuraca', 'Petrosino', 'Zizzo', 'Reierson', 'Wadman', 'Brander', 'Risko', 'Basye', 'Mcmakin', 'Straughan', 'Chesnutt', 'Sima', 'Ree', 'Mankins', 'Soberanis', 'Greenup', 'Commodore', 'Carucci', 'Defibaugh', 'Finfrock', 'Funston', 'Grantz', 'Guiney', 'Ohrt', 'Tinsman', 'Godek', 'Mcgrory', 'Mikeska', 'Kamer', 'Lovas', 'Kirshner', 'Bevacqua', 'Franqui', 'Walts', 'Doke', 'Orsborn', 'Tavernier', 'Kibble', 'Scipio', 'Diop', 'Antczak', 'Bastida', 'Callister', 'Dusseau', 'Ficarra', 'Garcilazo', 'Hughett', 'Liebel', 'Rodenbaugh', 'Rosselli', 'Teresi', 'Bohnsack', 'Steidl', 'Vanderheiden', 'Demma', 'Dutson', 'Mcmeekin', 'Glassford', 'Serrao', 'Marriner', 'Mcchristian', 'Lias', 'Blahnik', 'Brunke', 'Daleo', 'Fullam', 'Goetzinger', 'Leva', 'Rehder', 'Ripperger', 'Shindler', 'Tussing', 'Mayr', 'Rozzi', 'Bonsignore', 'Te', 'Graft', 'Ok', 'Clink', 'Mccamey', 'Goldring', 'Tartt', 'Fullilove', 'Amodio', 'Arkin', 'Dettmann', 'Ellingwood', 'Figura', 'Fritzinger', 'Heilmann', 'Hillstrom', 'Marasigan', 'Pavlov', 'Totman', 'Dokken', 'Serpico', 'Shumard', 'Rathman', 'Siegmund', 'Woodhull', 'Oregon', 'Roselle', 'Taul', 'Maddix', 'Nwosu', 'Bavaro', 'Carella', 'Cowdrey', 'Goodnough', 'Koffler', 'Mahajan', 'Montalvan', 'Morga', 'Parrella', 'Quiggle', 'Rehrig', 'Rotondi', 'Tavenner', 'Wigger', 'Yax', 'Bartko', 'Netzel', 'Zechman', 'Socia', 'Vea', 'Wemple', 'Matti', 'Striplin', 'Hollin', 'Geddie', 'Nolden', 'Freeney', 'Jeanjacques', 'Bermudes', 'Castrellon', 'Catino', 'Feeser', 'Kreitz', 'Maisano', 'Melkonian', 'Toste', 'Vancura', 'Bylsma', 'Wiant', 'Mcpheron', 'Gere', 'Geoffroy', 'Fuston', 'Petteway', 'Barsky', 'Bovard', 'Buttars', 'Christophersen', 'Dudzik', 'Ganger', 'Hilgers', 'Holzhauer', 'Minervini', 'Pong', 'Rozycki', 'Sulzer', 'Tauscher', 'Upright', 'Verastegui', 'Lobello', 'Sandt', 'Timbrook', 'Yniguez', 'Nuzzi', 'Sakata', 'Koran', 'Veloso', 'Cullers', 'Culton', 'Reynold', 'Feagins', 'Amaker', 'Cafferty', 'Coontz', 'Iden', 'Mazzotta', 'Montanye', 'Wandell', 'Weiman', 'Vik', 'Staib', 'Lasso', 'Waynick', 'Boniface', 'Massingale', 'Gainous', 'Sharper', 'Columbia', 'Felkins', 'Gatzke', 'Heindel', 'Ludeman', 'Mcmunn', 'Mogavero', 'Ratti', 'Rickabaugh', 'Ripper', 'Tessman', 'Triano', 'Vanderpol', 'Langille', 'Holten', 'Steeley', 'Solan', 'Devaul', 'Lindler', 'Armor', 'Fambrough', 'Golliday', 'Bognar', 'Gamba', 'Gettinger', 'Hanzel', 'Krumwiede', 'Marcinkowski', 'Nicolay', 'Peppard', 'Sisti', 'Sundeen', 'Senatore', 'Diebel', 'Demarais', 'Letellier', 'Goon', 'Texidor', 'Baughan', 'Gunder', 'Lalor', 'Wigglesworth', 'Aird', 'Basey', 'Afshar', 'Anhalt', 'Bondoc', 'Bunten', 'Daniello', 'Kazmierski', 'Marcott', 'Petruska', 'Trejos', 'Droege', 'Fukumoto', 'Harju', 'Hauf', 'Yagi', 'Mccallie', 'Moulds', 'Singleterry', 'Ramkissoon', 'Sanks', 'Siggers', 'Myrie', 'Conteh', 'Biss', 'Brees', 'Collopy', 'Dashner', 'Dehaas', 'Delzer', 'Fees', 'Finocchiaro', 'Forsgren', 'Giampietro', 'Levandowski', 'Mallick', 'Maudlin', 'Micheletti', 'Newhard', 'Parmentier', 'Pintado', 'Pliego', 'Radigan', 'Selke', 'Uptain', 'Wigton', 'Zabinski', 'Becenti', 'Guthmiller', 'Malecha', 'Eardley', 'Muscat', 'Ruhe', 'Battersby', 'Lamie', 'Stan', 'Dutch', 'Duplechain', 'Dildy', 'Auch', 'Baltzer', 'Degaetano', 'Mileski', 'Parrillo', 'Schoof', 'Stires', 'Villescas', 'Knittle', 'Degrave', 'Deihl', 'Moseman', 'Prillaman', 'Wakeley', 'Jake', 'Murden', 'Shareef', 'Yarbough', 'Bothe', 'Boutilier', 'Breck', 'Buschman', 'Coccia', 'Eberlein', 'Harriger', 'Neas', 'Sullenger', 'Walp', 'Yaple', 'Zinger', 'Zufelt', 'Marinaccio', 'Viele', 'Markee', 'Melody', 'Rooke', 'Ales', 'Mumphrey', 'Bessinger', 'Bialas', 'Brugh', 'Chum', 'Diehm', 'Frieze', 'Hieber', 'Malouf', 'Maltz', 'Mcmanaway', 'Musante', 'Pester', 'Roda', 'Snarr', 'Tovey', 'Buchmann', 'Fluck', 'Sadowsky', 'Viteri', 'Loewe', 'Mullaly', 'Lamboy', 'Bouman', 'Provencal', 'Siddons', 'Chelette', 'Rachels', 'Dynes', 'Nobel', 'Desselle', 'Tillison', 'Bajaj', 'Bresee', 'Hisel', 'Mallo', 'Meints', 'Potocki', 'Spore', 'Steier', 'Toothaker', 'Wildt', 'Darcangelo', 'Karbowski', 'Scaccia', 'Lascola', 'Duman', 'Mccaul', 'Rowton', 'Setters', 'Hendryx', 'Belson', 'Manny', 'Winckler', 'Longe', 'Mclucas', 'Lenon', 'Linen', 'Anstine', 'Belkin', 'Drozdowski', 'Ender', 'Ferra', 'Lessig', 'Marucci', 'Nardo', 'Nipp', 'Passarella', 'Roecker', 'Siddique', 'Stanczak', 'Stavros', 'Tomasetti', 'Lagreca', 'Seegmiller', 'Keena', 'Suddarth', 'Wayt', 'Matas', 'Ryer', 'Mortimore', 'Durnell', 'Pieters', 'Slocumb', 'Andaya', 'Brymer', 'Dufek', 'Ekman', 'Espericueta', 'Feltes', 'Hammann', 'Heydt', 'Inthavong', 'Jagielski', 'Nast', 'Petrucelli', 'Phippen', 'Vanderzanden', 'Whinery', 'Zatarain', 'Zelenak', 'Aquilina', 'Hougland', 'Isais', 'Canney', 'Flath', 'Ragon', 'Len', 'Violet', 'Carra', 'Everetts', 'Lockey', 'Dahmer', 'Fuquay', 'Alpers', 'Borromeo', 'Bringas', 'Brumit', 'Campanile', 'Folts', 'Hirai', 'Kiessling', 'Krogstad', 'Ovitt', 'Bhardwaj', 'Hlavaty', 'Monceaux', 'Spatola', 'Trunzo', 'Girvin', 'Shady', 'Grimley', 'Tagg', 'Weddell', 'Mcfadyen', 'Reagin', 'Philo', 'Emily', 'Codd', 'Cherrington', 'Skates', 'Deary', 'Ballester', 'Barilla', 'Cicchetti', 'Dyche', 'Goossen', 'Graveline', 'Hajduk', 'Halliwell', 'Kohnen', 'Kupiec', 'Machacek', 'Manship', 'Slinker', 'Mallozzi', 'Dotter', 'Brazeau', 'Manon', 'Crofford', 'Gauthreaux', 'Petillo', 'Bailor', 'Ganesh', 'Reaser', 'Barren', 'Adachi', 'Aguiniga', 'Cartrette', 'Crady', 'Hegland', 'Isner', 'Karasek', 'Labrum', 'Maroon', 'Rullo', 'Schull', 'Stawicki', 'Withee', 'Penfold', 'Foronda', 'Claridge', 'Coiner', 'Guimaraes', 'Mawyer', 'Rivkin', 'Kiggins', 'Hackel', 'Wey', 'Fairhurst', 'Albertini', 'Gaal', 'Flurry', 'Patricia', 'Savery', 'Colen', 'Cuthrell', 'Maffett', 'Dungey', 'Luter', 'Hurston', 'Ahles', 'Czapla', 'Gallas', 'Kotecki', 'Lazzari', 'Marcellino', 'Valvo', 'Vukovich', 'Wisor', 'Agler', 'Wease', 'Gallentine', 'Christoph', 'Poyer', 'Norment', 'Rhett', 'Amabile', 'Barish', 'Heifner', 'Kolarik', 'Mcquarrie', 'Morua', 'Nahas', 'Razzano', 'Riegle', 'Torralba', 'Perfetti', 'Stalzer', 'Killman', 'Lenning', 'Wyler', 'Soward', 'Releford', 'Battisti', 'Bergum', 'Catapano', 'Doerner', 'Ehlen', 'Finken', 'Genereux', 'Hillegas', 'Hopple', 'Kaatz', 'Lacson', 'Macario', 'Marzolf', 'Muha', 'Picha', 'Springston', 'Stooksbury', 'Weide', 'Glodowski', 'Lueth', 'Assaf', 'Robuck', 'Lamaster', 'Foulkes', 'Swopes', 'Winkfield', 'Aristizabal', 'Aylesworth', 'Bellotti', 'Bittick', 'Capistran', 'Cizek', 'Dinneen', 'Ellender', 'Friske', 'Hoffa', 'Klinge', 'Kuklinski', 'Luzier', 'Martensen', 'Rolin', 'Shankles', 'Siska', 'Wiegman', 'Winterbottom', 'Crookston', 'Gorospe', 'Curci', 'Lamberty', 'Antonetti', 'Sheer', 'Durning', 'Hootman', 'Doub', 'Klaiber', 'Mayeaux', 'Domingos', 'Wheeless', 'Vantrease', 'Summerhill', 'Agresta', 'Annas', 'Aquilar', 'Crea', 'Froese', 'Medlen', 'Peeters', 'Rhudy', 'Risse', 'Schor', 'Zimmerer', 'Bombardier', 'Halfhill', 'Koppenhaver', 'Kruckenberg', 'Boccia', 'Rella', 'Carelli', 'Overson', 'Tamburro', 'Rosamond', 'Lie', 'Mesquita', 'Jennett', 'Jewel', 'Waye', 'Bogucki', 'Colpitts', 'Galpin', 'Hrdlicka', 'Kading', 'Kushnir', 'Leano', 'Liebig', 'Mceuen', 'Nestler', 'Payer', 'Santarelli', 'Schrupp', 'Schwarze', 'Semrau', 'Solanki', 'Terzian', 'Treloar', 'Ureno', 'Vohra', 'Voshell', 'Nakanishi', 'Senese', 'Dierker', 'Quinley', 'Monier', 'Rounsaville', 'Mcfaddin', 'Defrance', 'Joynes', 'Levert', 'Adragna', 'Buczynski', 'Cranor', 'Englebert', 'Furney', 'Gorny', 'Mockler', 'Pavlicek', 'Petrini', 'Schadt', 'Slagel', 'Cumpston', 'Priore', 'Paonessa', 'Carling', 'Espaillat', 'Hem', 'Griffo', 'Tomer', 'Venn', 'Giraud', 'Becks', 'Mungin', 'Attard', 'Brucato', 'Dreyfus', 'Droz', 'Falck', 'Firebaugh', 'Fiser', 'Hemmelgarn', 'Hofacker', 'Kreeger', 'Rippee', 'Ruehle', 'Saputo', 'Scovill', 'Silbaugh', 'Smolenski', 'Spickler', 'Swango', 'Kaehler', 'Mootz', 'Noblett', 'Zarcone', 'Katzenberger', 'Kita', 'Brezinski', 'Castles', 'Padin', 'Hinde', 'Barretta', 'Amiri', 'Shelburne', 'Mccoin', 'Heaston', 'Aldredge', 'Milhouse', 'Wilbon', 'Cephus', 'Barsness', 'Belch', 'Blatter', 'Boyum', 'Corvino', 'Dagenais', 'Doscher', 'Elizarraraz', 'Gierke', 'Habegger', 'Ketcher', 'Kristiansen', 'Oldroyd', 'Sandage', 'Tesoriero', 'Unzueta', 'Wollam', 'Cefalu', 'Achey', 'Wegmann', 'Lessner', 'Bunk', 'Mallin', 'Polis', 'Aronoff', 'Portal', 'Crock', 'Escher', 'Medler', 'Pretty', 'Younge', 'Agbayani', 'Brinkmeyer', 'Castrillon', 'Feick', 'Gutmann', 'Hagenbuch', 'Hesseltine', 'Houska', 'Kimzey', 'Kolasa', 'Lentine', 'Lobaugh', 'Maimone', 'Meshell', 'Nardini', 'Rosetti', 'Siefker', 'Sileo', 'Silveria', 'Argumedo', 'Lesmeister', 'Donnan', 'Hermans', 'Raggio', 'Dupras', 'Empson', 'Bevier', 'Tumey', 'Donn', 'Darville', 'Douse', 'Cheyne', 'Dewing', 'Jansma', 'Mayeda', 'Nield', 'Obermiller', 'Opfer', 'Surma', 'Tiffin', 'Tirpak', 'Wassel', 'Blickenstaff', 'Dorland', 'Kulhanek', 'Andras', 'Estupinan', 'Gonce', 'Weast', 'Souto', 'Guirguis', 'Glazebrook', 'Dain', 'Loyer', 'Bensley', 'Verge', 'Tubman', 'Onley', 'Dais', 'Barash', 'Bullman', 'Crispino', 'Davino', 'Isenhart', 'Kneller', 'Loschiavo', 'Opper', 'Pfleger', 'Wahler', 'Zelasko', 'Havrilla', 'Mintzer', 'Devoll', 'Giannelli', 'Sees', 'Barritt', 'Mesta', 'Sostre', 'Rohman', 'Padget', 'Edds', 'Slinger', 'Borowicz', 'Bregman', 'Bubar', 'Debartolo', 'Desposito', 'Grieshaber', 'Ludtke', 'Pagani', 'Quiambao', 'Schapiro', 'Winward', 'Bouska', 'Olstad', 'Rough', 'Genz', 'Husby', 'Nealis', 'Hyams', 'Andrades', 'Mcgibbon', 'Edwin', 'Buckhalter', 'Baylon', 'Fiene', 'Fillingim', 'Fiorenza', 'Greenstreet', 'Krager', 'Laxson', 'Noreen', 'Roberds', 'Rundquist', 'Smelcer', 'Tabone', 'Train', 'Zeoli', 'Defries', 'Kolp', 'Maahs', 'Mcnall', 'Ehman', 'Keeth', 'Shackleton', 'Hogarth', 'Westbury', 'Gulliver', 'Oquin', 'Holiman', 'Saintlouis', 'Vaughns', 'Aichele', 'Arbelaez', 'Bathurst', 'Bresler', 'Cecena', 'Drollinger', 'Fellner', 'Griesemer', 'Harnois', 'Hire', 'Kraker', 'Roylance', 'Zaccaria', 'Dinunzio', 'Foisy', 'Nordlund', 'Peppler', 'Kishbaugh', 'Marcil', 'Mcfarren', 'Puello', 'Supplee', 'Boyea', 'Depp', 'Tift', 'Wince', 'Pam', 'Ifill', 'Brodt', 'Caamano', 'Gibler', 'Litherland', 'Miesner', 'Pixler', 'Schwimmer', 'Suriano', 'Abendroth', 'Gillaspy', 'Kumpf', 'Schroepfer', 'Boals', 'Seneca', 'Sasson', 'Hindes', 'Posten', 'Lann', 'Anctil', 'Arebalo', 'Beacom', 'Boberg', 'Coufal', 'Didion', 'Fromme', 'Greenan', 'Guerrette', 'Hudec', 'Kazmi', 'Lucchese', 'Mouw', 'Savastano', 'Schomer', 'Shorb', 'Storz', 'Finazzo', 'Knigge', 'Pawlikowski', 'Cercone', 'Sutfin', 'Valdespino', 'Mccartin', 'Yurko', 'Treaster', 'Peaden', 'Russin', 'Dibartolo', 'Dona', 'Skillern', 'Brackens', 'Amyx', 'Bornemann', 'Comtois', 'Kaestner', 'Kallenbach', 'Krupka', 'Lineback', 'Lopata', 'Mcclenahan', 'Monteverde', 'Otani', 'Panchal', 'Pawlicki', 'Suman', 'Vallance', 'Zammit', 'Liszewski', 'Trunk', 'Sharifi', 'Lents', 'Watkinson', 'Willow', 'Flaming', 'Sol', 'Dory', 'Purchase', 'Haris', 'Bigsby', 'Boonstra', 'Emge', 'Goodpasture', 'Iwata', 'Kau', 'Syring', 'Vlach', 'Klaassen', 'Vicuna', 'Wasden', 'Cattell', 'Ridlon', 'Fassler', 'Scullion', 'Hibbitts', 'Mcgillis', 'Pla', 'Mustin', 'Darty', 'Minniefield', 'Bloyd', 'Calnan', 'Casal', 'Fickel', 'Gamero', 'Higuchi', 'Huante', 'Knies', 'Letner', 'Quang', 'Teufel', 'Topolski', 'Tumminello', 'Vanorder', 'Slawinski', 'Nyce', 'Asmar', 'Loudin', 'Karen', 'Budden', 'Mothershed', 'Fenelon', 'Mccrorey', 'Ashenfelter', 'Auge', 'Christison', 'Cilley', 'Corsetti', 'Coxwell', 'Critchley', 'Griep', 'Hausner', 'Hiemstra', 'Koprowski', 'Kozicki', 'Marling', 'Marmo', 'Noller', 'Pich', 'Recendez', 'Renegar', 'Rinne', 'Zeis', 'Buzzelli', 'Lipham', 'Schaner', 'Kartchner', 'Kealy', 'Sinopoli', 'Krishna', 'Brinn', 'Zachry', 'Barbre', 'Sharber', 'Fritze', 'Hanshew', 'Lemere', 'Maruyama', 'Masker', 'Melendy', 'Pelto', 'Rigo', 'Rohling', 'Scobee', 'Sundell', 'Tranter', 'Vancuren', 'Augustyniak', 'Mehringer', 'Sulkowski', 'Gittins', 'Twiford', 'Dumm', 'Jacklin', 'Mcquaig', 'Richison', 'Jex', 'Meritt', 'Hegler', 'Duboise', 'Houze', 'Akana', 'Corsaro', 'Delosangeles', 'Guidice', 'Maccallum', 'Moes', 'Steinhardt', 'Stirewalt', 'Wooters', 'Schissler', 'Sobeck', 'Boyte', 'Jilek', 'Suder', 'Kellis', 'Blankenbaker', 'Lank', 'Mandigo', 'Fremont', 'Rideau', 'Beidler', 'Boda', 'Gulotta', 'Havelka', 'Herberger', 'Isenhower', 'Lattanzi', 'Pandolfi', 'Shearman', 'Wilmarth', 'Dutkiewicz', 'Mazzuca', 'Tabarez', 'Vermilyea', 'Kray', 'Vitti', 'Packwood', 'Paulos', 'Howson', 'Collman', 'Ameen', 'Berisha', 'Capece', 'Fantasia', 'Galas', 'Laszlo', 'Luthi', 'Maietta', 'Mcconaghy', 'Naab', 'Nerio', 'Pineau', 'Rossbach', 'Senne', 'Unangst', 'Kautzman', 'Muhs', 'Ripka', 'Wehling', 'Hoot', 'Jee', 'Megna', 'Tirone', 'Walle', 'Brandi', 'Lutter', 'Mona', 'Roley', 'Mcfann', 'Swader', 'Cavett', 'Delmore', 'Walthour', 'Goldson', 'Biddinger', 'Bjornstad', 'Buesing', 'Cerino', 'Diede', 'Hagle', 'Hodgman', 'Killmer', 'Loa', 'Matsunaga', 'Micciche', 'Newquist', 'Poppen', 'Shellhammer', 'Tienda', 'Tino', 'Mihelich', 'Garsia', 'Orzel', 'Ericsson', 'Dose', 'Kotter', 'Amante', 'Hanif', 'Huckleberry', 'Blandin', 'Carvin', 'Axton', 'Delosrios', 'Diekmann', 'Failing', 'Filipek', 'Otting', 'Rozman', 'Sadeghi', 'Slutsky', 'Speake', 'Szostak', 'Tacy', 'Kmiecik', 'Macgillivray', 'Yeakel', 'Dykman', 'Gorey', 'Dowding', 'Revel', 'Geathers', 'Cappa', 'Davidoff', 'Lukehart', 'Mccutchan', 'Neeb', 'Nikolic', 'Piorkowski', 'Sandvig', 'Schmidgall', 'Stockbridge', 'Thornock', 'Valk', 'Wiechmann', 'Chait', 'Gacek', 'Schupbach', 'Gemma', 'Rus', 'Barch', 'Wyles', 'Scrivener', 'Salls', 'Akram', 'Mcclatchey', 'Bromfield', 'Burl', 'Redwood', 'Starkes', 'Beaston', 'Boggio', 'Cantillo', 'Cina', 'Cryan', 'Dubs', 'Edmisten', 'Fitzer', 'Fugere', 'Fundora', 'Galvis', 'Jafri', 'Nalepa', 'Peri', 'Pippenger', 'Rheault', 'Rohrbacher', 'Romberg', 'Samek', 'Stehlik', 'Stepan', 'Torrisi', 'Wessner', 'Zappala', 'Bangerter', 'Czerniak', 'Mcshea', 'Raczkowski', 'Rohwer', 'Spehar', 'Lague', 'Messman', 'Angst', 'Temme', 'Tolles', 'Lawn', 'Ayars', 'Austen', 'Stansel', 'Fairclough', 'Tribbett', 'Peevy', 'Fraiser', 'Caradine', 'Fiegel', 'Gignac', 'Halpert', 'Karels', 'Knappenberger', 'Prezioso', 'Rohlfs', 'Szot', 'Varano', 'Weinreich', 'Butterbaugh', 'Heying', 'Vandewalle', 'Yandle', 'Thede', 'Astor', 'Blanchfield', 'Hegeman', 'Fels', 'Miniard', 'Lorio', 'Muhammed', 'Lazard', 'Ehmke', 'Hulst', 'Imlay', 'Kinzler', 'Knaak', 'Poehler', 'Prusak', 'Rakow', 'Raupp', 'Sucher', 'Tanenbaum', 'Burich', 'Macmaster', 'Shapley', 'Thurgood', 'Mires', 'Gotay', 'Attia', 'Martis', 'Greenley', 'Fothergill', 'Bonvillain', 'Buffalo', 'Dues', 'Crute', 'Cantone', 'Dewit', 'Dovel', 'Klopfer', 'Philhower', 'Piatek', 'Pion', 'Rapaport', 'Vanwert', 'Wikstrom', 'Graffeo', 'Kissling', 'Niday', 'Soong', 'Adami', 'Hammersmith', 'Keir', 'Yo', 'Grizzell', 'Stclaire', 'Swales', 'Nole', 'Pole', 'Hartgrove', 'Carrothers', 'Carlone', 'Ciano', 'Finucane', 'Fitterer', 'Gellman', 'Hakimi', 'Janos', 'Krings', 'Malmstrom', 'Markwardt', 'Rodin', 'Schau', 'Scheible', 'Orick', 'Dine', 'Tremmel', 'Shon', 'Wilms', 'Bren', 'Bertin', 'Poster', 'Jeng', 'Stcharles', 'Jenning', 'Eutsey', 'Fayne', 'Gustave', 'Mccargo', 'Boruff', 'Boschert', 'Burmester', 'Colello', 'Conchas', 'Devi', 'Dishaw', 'Funaro', 'Gallen', 'Hsueh', 'Lanser', 'Macaraeg', 'Munster', 'Petsch', 'Routon', 'Werkmeister', 'Woznicki', 'Boroff', 'Cochenour', 'Dibartolomeo', 'Elzinga', 'Heyen', 'Lapaglia', 'Schiel', 'Rauda', 'Woltman', 'Carll', 'Kanda', 'Runnells', 'Hazelett', 'Arnwine', 'Sherfield', 'Borthwick', 'Coyner', 'Ensey', 'Feinman', 'Leyendecker', 'Lickteig', 'Lubeck', 'Maccarone', 'Minahan', 'Plew', 'Saur', 'Schleich', 'Sixtos', 'Soller', 'Valek', 'Umland', 'Swogger', 'Iannacone', 'Tomey', 'Venuto', 'Peru', 'Adolf', 'Lemme', 'Bureau', 'River', 'Buffaloe', 'Leacock', 'Threat', 'Boza', 'Constancio', 'Dandurand', 'Hiscock', 'Kaley', 'Michaelsen', 'Roberti', 'Sicilia', 'Sliker', 'Sooter', 'Steyer', 'Tabora', 'Vanderbeek', 'Vanscyoc', 'Piercey', 'Sabater', 'Bride', 'Tippens', 'Acquaviva', 'Baublitz', 'Mccanna', 'Mckaig', 'Merenda', 'Obermeier', 'Pechacek', 'Pugmire', 'Shaneyfelt', 'Steuer', 'Zeidler', 'Bodenheimer', 'Gaglio', 'Maceachern', 'Munsterman', 'Rayle', 'Wisnewski', 'Baar', 'Thi', 'Foulds', 'Rufino', 'Chrisco', 'Barrientez', 'Lare', 'Munnerlyn', 'Pitter', 'Koroma', 'Caisse', 'Espe', 'Kerin', 'Melchiorre', 'Mentz', 'Paasch', 'Parrales', 'Rhew', 'Sigley', 'Skiff', 'Stockert', 'Viglione', 'Kraska', 'Botto', 'Ponzio', 'Wolfley', 'Wack', 'Kilborn', 'Dunnavant', 'Pitney', 'Dolman', 'Biscoe', 'Michelle', 'Azcona', 'Brasington', 'Fazzino', 'Hoefs', 'Kohlmeyer', 'Laser', 'Morea', 'Morrin', 'Neuwirth', 'Nicklaus', 'Pennypacker', 'Rueckert', 'Schriefer', 'Scovel', 'Swyers', 'Thebeau', 'Mijangos', 'Douville', 'Tidball', 'Smullen', 'Lecount', 'Pruiett', 'Branche', 'Arment', 'Babiarz', 'Char', 'Granlund', 'Hillock', 'Kahrs', 'Khong', 'Lalley', 'Laspina', 'Pietila', 'Ponciano', 'Rosengren', 'Slee', 'Snowberger', 'Weglarz', 'Camarata', 'Villalovos', 'Buza', 'Kenning', 'Rohrig', 'Sedor', 'Perretta', 'Hamberg', 'Mongan', 'Formby', 'Portier', 'Silcott', 'Levell', 'Barrantes', 'Bellefeuille', 'Beneke', 'Bilbao', 'Danahy', 'Delahanty', 'Deppen', 'Dicostanzo', 'Dudding', 'Elmquist', 'Handa', 'Hatem', 'Loverde', 'Mesick', 'Onofrio', 'Ramesh', 'Tiberio', 'Trachtenberg', 'Vanwagenen', 'Cassada', 'Pepitone', 'Stillson', 'Pfarr', 'Radle', 'Scallan', 'Carlen', 'Bermingham', 'Sagers', 'Llorens', 'Turay', 'Beamish', 'Carlini', 'Galipeau', 'Heavey', 'Kempker', 'Masser', 'Montellano', 'Peine', 'Pietro', 'Plitt', 'Pollman', 'Rike', 'Spees', 'Vandervelde', 'Vanwey', 'Grundman', 'Marinucci', 'Molenda', 'Shideler', 'Turrubiartes', 'Schaer', 'Firkins', 'Haid', 'Parnes', 'Pulse', 'Masone', 'Burpo', 'Tharrington', 'Winborn', 'Petite', 'Buttry', 'Clason', 'Eutsler', 'Haberer', 'Haft', 'Kotler', 'Meloche', 'Raether', 'Rengifo', 'Roback', 'Stangle', 'Wilderman', 'Chickering', 'Gervacio', 'Penaranda', 'Schnieders', 'Coyer', 'Laramee', 'Curts', 'Bailiff', 'Truby', 'Molder', 'Hedley', 'Carbon', 'Gudger', 'Fontenette', 'Askren', 'Deshane', 'Enriques', 'Fake', 'Jungers', 'Krech', 'Niemela', 'Perfetto', 'Ritt', 'Soldano', 'Stanish', 'Strege', 'Wichert', 'Wolz', 'Zimbelman', 'Abplanalp', 'Nikkel', 'Oravec', 'Coile', 'Mizuno', 'Fenlon', 'Vanloo', 'Callery', 'Hortman', 'Hashim', 'Sorey', 'Ajayi', 'Alesi', 'Alessandro', 'Avants', 'Bachtel', 'Bonine', 'Butkovich', 'Cerros', 'Colina', 'Dayhoff', 'Favata', 'Haning', 'Kamath', 'Kosik', 'Loughrey', 'Mollo', 'Nagi', 'Nesler', 'Nosek', 'Ordoyne', 'Politis', 'Zwolinski', 'Yaffe', 'Sigal', 'Burow', 'Scarbro', 'Buckel', 'Broxson', 'Goyer', 'Goding', 'Delee', 'Jefferys', 'Blissett', 'Balian', 'Brader', 'Curreri', 'Dickmann', 'Eckerle', 'Erives', 'Fedewa', 'Frisina', 'Gropp', 'Hinck', 'Lamorte', 'Litzenberger', 'Proehl', 'Struss', 'Tamburello', 'Digioia', 'Galarneau', 'Jurkiewicz', 'Macnaughton', 'Talsma', 'Vlasak', 'Weyrauch', 'Yontz', 'Kho', 'Stgermaine', 'Grauer', 'Benware', 'Rearden', 'Molin', 'Pendergrast', 'Sivils', 'Ellery', 'Ikner', 'Metayer', 'Toran', 'Seaberry', 'Banderas', 'Bannan', 'Critzer', 'Doescher', 'Haakenson', 'Hignite', 'Hoeksema', 'Inserra', 'Korbel', 'Kruzel', 'Langen', 'Mittelstaedt', 'Popkin', 'Schwarting', 'Toral', 'Ilagan', 'Lamica', 'Lierman', 'Zimmerly', 'Fosse', 'Pagnotta', 'Trenholm', 'Clayson', 'Cerutti', 'Wollard', 'Mcburnett', 'Stallcup', 'Magan', 'Wonder', 'Gillock', 'Ellisor', 'Clayburn', 'Mabery', 'Cariaga', 'Crail', 'Dieckman', 'Joynt', 'Kleinert', 'Kutner', 'Milla', 'Nauta', 'Rende', 'Robare', 'Santella', 'Scianna', 'Sevcik', 'Smolik', 'Staudinger', 'Cedillos', 'Shroff', 'Ueda', 'Yearout', 'Zuno', 'Pottle', 'Klabunde', 'Tusa', 'Schomburg', 'Alto', 'Packett', 'Muns', 'Dante', 'Jarnigan', 'Londo', 'Bigbee', 'Isles', 'Nembhard', 'Appiah', 'Hypolite', 'Acebedo', 'Arlt', 'Champney', 'Kawahara', 'Lehan', 'Pavlak', 'Ritacco', 'Seckinger', 'Turvey', 'Vanevery', 'Wronski', 'Bahnsen', 'Clites', 'Ellwanger', 'Husak', 'Lydic', 'Zubiate', 'Muehlbauer', 'Neumeister', 'Wellnitz', 'Langstaff', 'Gort', 'Eve', 'Stones', 'Stanard', 'Whichard', 'Cheers', 'Baldus', 'Bertoni', 'Chesebro', 'Dino', 'Dubray', 'Icenhour', 'Marquard', 'Mette', 'Potash', 'Winterhalter', 'Crupi', 'Lascala', 'Tauer', 'Vandenburgh', 'Mende', 'Swarey', 'Sarles', 'Platter', 'Dekeyser', 'Jaye', 'Pelle', 'Caroll', 'Rosette', 'Shepperson', 'Fooks', 'Kennerson', 'Bolser', 'Chim', 'Diefenderfer', 'Frosch', 'Holzwarth', 'Kjos', 'Langland', 'Meland', 'Stufflebeam', 'Worland', 'Barrales', 'Chhay', 'Corkern', 'Creegan', 'Golan', 'Marceaux', 'Matsuo', 'Micallef', 'Otsuka', 'Rinella', 'Creveling', 'Krane', 'Mcnay', 'Detter', 'Drexel', 'Kibodeaux', 'Shippey', 'Medearis', 'Samms', 'Drzewiecki', 'Fariss', 'Glandon', 'Heinecke', 'Hendler', 'Jungwirth', 'Panepinto', 'Rohleder', 'Saragosa', 'Stuller', 'Wissel', 'Atwal', 'Tisch', 'Esterly', 'Mourad', 'Brickell', 'Bough', 'Rubens', 'Angevine', 'Tolin', 'Sago', 'Apfel', 'Ashdown', 'Derusha', 'Fiorino', 'Koyama', 'Matteucci', 'Newbrough', 'Seufert', 'Stahley', 'Tyburski', 'Zaino', 'Cdebaca', 'Hormann', 'Wangen', 'Winterton', 'Beagley', 'Sowden', 'Daul', 'Errington', 'Steber', 'Emfinger', 'Olan', 'Fiveash', 'Carriger', 'Breakfield', 'Ezekiel', 'Wallington', 'Hollimon', 'Izzard', 'Lyde', 'Bellmore', 'Benkert', 'Bhargava', 'Dacanay', 'Dano', 'Diprima', 'Garlitz', 'Hannemann', 'Janiak', 'Klann', 'Kunce', 'Malicki', 'Mcgivney', 'Nordeen', 'Procell', 'Rands', 'Smeltz', 'Sutch', 'Wach', 'Wentling', 'Karapetyan', 'Mcvicar', 'Pennisi', 'Perley', 'Graner', 'Hartney', 'Shadley', 'Pennebaker', 'Cayce', 'Marris', 'Burges', 'Odem', 'Charvat', 'Delgreco', 'Diven', 'Latu', 'Mccallion', 'Mcfeely', 'Mon', 'Nagai', 'Obrecht', 'Opdyke', 'Pearlstein', 'Pomroy', 'Prothero', 'Rado', 'Roehr', 'Seiffert', 'Spake', 'Stech', 'Thakur', 'Trzcinski', 'Uvalle', 'Vazques', 'Anschutz', 'Boecker', 'Descoteaux', 'Idol', 'Stanzione', 'Welp', 'Schumer', 'Ridner', 'Kasner', 'Auton', 'Barca', 'Ocheltree', 'Biernat', 'Mercuri', 'Truslow', 'Witters', 'Mcelhannon', 'Mccrackin', 'Brabson', 'Baumberger', 'Double', 'Garis', 'Kasparian', 'Kooistra', 'Loser', 'Mangone', 'Massman', 'Raimondo', 'Sparacio', 'Valli', 'Viets', 'Wessell', 'Kieu', 'Vonderheide', 'Wojnar', 'Furbee', 'Heyden', 'Lackie', 'Ehrich', 'Roupe', 'Holy', 'Care', 'Isa', 'Samad', 'Rougeau', 'Chavous', 'Rattler', 'Wedderburn', 'President', 'Blackham', 'Bobak', 'Crimi', 'Durland', 'Gargus', 'Gitlin', 'Levandoski', 'Niu', 'Piccirilli', 'Sauvageau', 'Schweers', 'Talty', 'Uthe', 'Verga', 'Warzecha', 'Erisman', 'Gallacher', 'Shanholtz', 'Fulgencio', 'Migues', 'Garin', 'Heisel', 'Stong', 'Christiana', 'Bonenfant', 'Clancey', 'Kindley', 'Nill', 'Mood', 'Atterbury', 'Tobe', 'Eisenhardt', 'Franceschini', 'Heiland', 'Kreuzer', 'Lockaby', 'Scarola', 'Tessitore', 'Warehime', 'Kukowski', 'Ruhlman', 'Frymire', 'Bartone', 'Wrightson', 'Langlinais', 'Planas', 'Darsey', 'Darin', 'Gammel', 'Giroir', 'Aspinall', 'Hollywood', 'Childres', 'Copelin', 'Teamer', 'Okoro', 'Abshier', 'Arizaga', 'Berenson', 'Biegler', 'Dugdale', 'Erlich', 'Gavino', 'Haaland', 'Lautenschlager', 'Lilja', 'Livingood', 'Lockner', 'Pyeatt', 'Reist', 'Rummell', 'Schadler', 'Snare', 'Zawada', 'Dumler', 'Moncivais', 'Sammarco', 'Laraway', 'Voorhis', 'Detty', 'Manko', 'Zale', 'Autin', 'Quaid', 'Denver', 'Demario', 'Nearing', 'Amerine', 'Bea', 'Carraher', 'Dierkes', 'Dutko', 'Hosek', 'Kassner', 'Meo', 'Mesler', 'Norquist', 'Pacetti', 'Pellerito', 'Ryser', 'Turnmire', 'Caniglia', 'Zollman', 'Gerwig', 'Denslow', 'Stapler', 'Majid', 'Prestage', 'Eargle', 'Spight', 'Argabright', 'Borgeson', 'Cipollone', 'Dippold', 'Korf', 'Milhoan', 'Pinelli', 'Roblero', 'Scolaro', 'Sperl', 'Svensson', 'Bauguess', 'Freimuth', 'Luquin', 'Barman', 'Solivan', 'Buel', 'Birkeland', 'Cafiero', 'Degollado', 'Demeyer', 'Hoberg', 'Homola', 'Kadel', 'Koslowski', 'Lefrancois', 'Macconnell', 'Madill', 'Nudelman', 'Raucci', 'Reidenbach', 'Schermer', 'Sergio', 'Bucko', 'Haegele', 'Nibert', 'Sidell', 'Slape', 'Hellard', 'Russi', 'Wilcock', 'Verdejo', 'Lessley', 'Camille', 'Topps', 'Acampora', 'Blacketer', 'Clapham', 'Efaw', 'Louks', 'Mersch', 'Odden', 'Schettler', 'Schnarr', 'Sieracki', 'Skog', 'Zobrist', 'Corless', 'Zunker', 'Bega', 'Victoriano', 'Singler', 'Keltz', 'Valcarcel', 'Curet', 'Harvison', 'Mccullah', 'Cranfield', 'Gardin', 'Mewborn', 'Bisel', 'Carfagno', 'Carli', 'Chirino', 'Fairless', 'Gaboury', 'Goetze', 'Guitron', 'Haut', 'Krupski', 'Lata', 'Misiak', 'Sawaya', 'Schomaker', 'Schulke', 'Tin', 'Dewhurst', 'Krummel', 'Hannahs', 'Carlow', 'Hemp', 'Bowdoin', 'Breda', 'Chriss', 'Kebede', 'Binney', 'Brasseaux', 'Cunliffe', 'Gantner', 'Gillick', 'Hottle', 'Hren', 'Irani', 'Klitzke', 'Luhrs', 'Micale', 'Oien', 'Oppelt', 'Rallo', 'Ringwald', 'Stonerock', 'Strebel', 'Tiberi', 'Volner', 'Whetstine', 'Wrubel', 'Brakebill', 'Fechner', 'Geurts', 'Hoefling', 'Misener', 'Andros', 'Dimock', 'Rosendo', 'Megill', 'Gloyd', 'Garney', 'Andries', 'Esco', 'Rhames', 'Draine', 'Plair', 'Jiggetts', 'Atcheson', 'Brienza', 'Cerveny', 'Depaoli', 'Deroo', 'Dorf', 'Guidotti', 'Heimlich', 'Insalaco', 'Kaczorowski', 'Kinnunen', 'Loureiro', 'Lyster', 'Pia', 'Piccoli', 'Quale', 'Sadek', 'Stenstrom', 'Strause', 'Tortorella', 'Traweek', 'Vanderwerff', 'Varian', 'Vink', 'Waxler', 'Wynia', 'Annese', 'Economou', 'Whitsel', 'Dougher', 'Schnieder', 'Cosman', 'Farra', 'Osmon', 'Bardon', 'Rampersaud', 'Jane', 'Kirts', 'Chennault', 'Thomison', 'Graig', 'Narine', 'Gunner', 'Aamodt', 'Adinolfi', 'Adolphson', 'Aki', 'Alderton', 'Aloisio', 'Bellavia', 'Clutts', 'Coughran', 'Frasco', 'Guinta', 'Hatala', 'Ibach', 'Mecum', 'Medero', 'Neria', 'Nery', 'Pignataro', 'Podesta', 'Statzer', 'Stombaugh', 'Szczesny', 'Kovaleski', 'Ades', 'Bauers', 'Bern', 'Horsfall', 'Masood', 'Cinque', 'Stay', 'Beare', 'Donavan', 'Ikerd', 'Seney', 'Layson', 'Coler', 'Tuft', 'Tamplin', 'Billinger', 'Scrivens', 'Bartolomei', 'Baza', 'Dimattia', 'Dotterer', 'Dushane', 'Fulop', 'Iacovelli', 'Macnamara', 'Mahlum', 'Noteboom', 'Rebstock', 'Drechsler', 'Itzkowitz', 'Rigler', 'Schrom', 'Pirozzi', 'Ferre', 'Shiley', 'Villanova', 'Barona', 'Farrel', 'Shelman', 'Nute', 'Rowlette', 'Tarrance', 'Cadorette', 'Christenberry', 'Deocampo', 'Farace', 'Fesmire', 'Kallman', 'Koogler', 'Pitsch', 'Salce', 'Schnepf', 'Totaro', 'Towey', 'Urdiales', 'Gotschall', 'Brunett', 'Dier', 'Hainsworth', 'Seabury', 'Cornelious', 'Altobelli', 'Andreozzi', 'Bohlmann', 'Carranco', 'Daubenspeck', 'Delagrange', 'Delo', 'Faler', 'Ficke', 'Hellinger', 'Hudman', 'Ihde', 'Landolfi', 'Leiner', 'Mosman', 'Rang', 'Tarbet', 'Wineman', 'Fehrman', 'Guinto', 'Icenogle', 'Tomasik', 'Looman', 'Iriarte', 'Denaro', 'Montross', 'Piersall', 'Lauren', 'Lablanc', 'Kindrick', 'Deriso', 'Manker', 'Maycock', 'Cullens', 'Frieson', 'Clippinger', 'Colavito', 'Fassbender', 'Fennessy', 'Granada', 'Gugliotta', 'Guiliano', 'Hirschberg', 'Kerbs', 'Kusch', 'Limmer', 'Malpica', 'Mcaloon', 'Morken', 'Pytel', 'Resnik', 'Spangle', 'Worstell', 'Kerkhoff', 'Kupka', 'Stanczyk', 'Storlie', 'Thurow', 'Caetano', 'Ernandez', 'Males', 'Coopersmith', 'Everest', 'Leander', 'Demeritt', 'Thomes', 'Codner', 'Livsey', 'Alcoser', 'Arico', 'Balestrieri', 'Cavalli', 'Florendo', 'Gottshall', 'Hinesley', 'Lafuente', 'Landess', 'Ornstein', 'Pettingill', 'Romesburg', 'Tokunaga', 'Wiersema', 'Janeway', 'Pecha', 'Steimel', 'Sproule', 'Sommerfield', 'Mirsky', 'Staines', 'Pu', 'Corbit', 'Mcelmurry', 'Wickes', 'Yell', 'Mordecai', 'Aye', 'Boldin', 'China', 'Fason', 'Thibeaux', 'Nesby', 'Bergevin', 'Besecker', 'Dohrmann', 'Fujioka', 'Fyock', 'Goralski', 'Kirschenbaum', 'Knipper', 'Menor', 'Mischler', 'Nolder', 'Odoherty', 'Pickerill', 'Poremba', 'Swantek', 'Difabio', 'Kulka', 'Servais', 'Wickizer', 'Melecio', 'Zeek', 'Fruit', 'Agnes', 'Bar', 'Mccarrell', 'Hopgood', 'Califano', 'Cratty', 'Dishner', 'Gabrielli', 'Hamacher', 'Hinote', 'Jakob', 'Klinkhammer', 'Krasinski', 'Krysiak', 'Pardi', 'Petrilli', 'Razon', 'Reifsnyder', 'Reisig', 'Reller', 'Sassano', 'Steinhart', 'Wrede', 'Zevallos', 'Coombe', 'Quesnel', 'Rebuck', 'Wantz', 'Bendele', 'Lacomb', 'Hagge', 'Donelan', 'Kempe', 'Po', 'Varnadoe', 'Constantin', 'Deon', 'Motte', 'Beckum', 'Parchment', 'Meriweather', 'Borucki', 'Fatima', 'Gerkin', 'Guglielmi', 'Hettich', 'Hoerr', 'Karlsson', 'Kenealy', 'Paolillo', 'Pfenning', 'Rueger', 'Schildt', 'Sem', 'Vilches', 'Dornbusch', 'Erdahl', 'Kleinhenz', 'Moneypenny', 'Tomasko', 'Vandevender', 'Cromley', 'Tun', 'Velasques', 'Roble', 'Burgo', 'Waples', 'Mabon', 'Benincasa', 'Buttermore', 'Dalbec', 'Eikenberry', 'Fuehrer', 'Hossler', 'Lepp', 'Opheim', 'Sarsfield', 'Strobl', 'Strouth', 'Tousley', 'Wilczek', 'Kleppe', 'Muraoka', 'Wiencek', 'Pinckard', 'Ahsan', 'Welder', 'Forton', 'Lorden', 'Stlawrence', 'Marina', 'Mcquire', 'Randleman', 'Pates', 'Fluitt', 'Scotland', 'Clerk', 'Townsell', 'Arrasmith', 'Baisch', 'Berling', 'Busler', 'Curtice', 'Ebinger', 'Fleeger', 'Geng', 'Goettsch', 'Henneberry', 'Johannesen', 'Mcilrath', 'Perigo', 'Phibbs', 'Riske', 'Scarcella', 'Vandyken', 'Barstad', 'Dicamillo', 'Ernsberger', 'Guebara', 'Peetz', 'Newcome', 'Alterman', 'Weik', 'Trier', 'Yeats', 'Hugg', 'Crayne', 'Ige', 'Coach', 'Archuletta', 'Bodi', 'Cadavid', 'Ceccarelli', 'Derksen', 'Deutscher', 'Genter', 'Gogel', 'Gorczyca', 'Grohs', 'Koplin', 'Kozloski', 'Lillo', 'Oplinger', 'Pulis', 'Renk', 'Repka', 'Scavo', 'Vitagliano', 'Weinkauf', 'Yellin', 'Boehlke', 'Montecalvo', 'Castrillo', 'Grenon', 'Wellen', 'Keelan', 'Coville', 'Rison', 'Jourdain', 'Chestnutt', 'Sharpley', 'Acharya', 'Bartles', 'Burruel', 'Capelle', 'Contos', 'Friedrichsen', 'Heaberlin', 'Hermiz', 'Iracheta', 'Klutts', 'Koziel', 'Salto', 'Scaturro', 'Stasik', 'Stitzel', 'Wiseley', 'Paccione', 'Squyres', 'Leverich', 'Holderness', 'Elvin', 'Morand', 'Lizana', 'Woolen', 'Amarante', 'Arn', 'Biedermann', 'Daddio', 'Davilla', 'Forti', 'Gripp', 'Hanzlik', 'Iannotti', 'Larin', 'Nakajima', 'Novacek', 'Pesch', 'Regino', 'Rosengarten', 'Schleif', 'Searing', 'Sikkema', 'Walstrom', 'Guastella', 'Hemstreet', 'Rorabaugh', 'Weisenburger', 'Cannan', 'Band', 'Fowkes', 'Bennetts', 'Purviance', 'Tippin', 'Brossard', 'Seigle', 'Babyak', 'Billiter', 'Cartner', 'Deetz', 'Dorow', 'Laur', 'Leblond', 'Lecomte', 'Morando', 'Reitman', 'Sarria', 'Scheu', 'Timmermann', 'Vaneck', 'Vangorp', 'Windhorst', 'Kaeser', 'Kosloski', 'Cappuccio', 'Knitter', 'Evon', 'Garbett', 'Wickens', 'Ruston', 'Fregia', 'Ashurst', 'Ede', 'Strider', 'Reaux', 'Castellani', 'Debus', 'Degracia', 'Hineman', 'Laning', 'Litts', 'Losito', 'Massi', 'Mazzara', 'Schriber', 'Seyfert', 'Strength', 'Treptow', 'Yuhasz', 'Kamrath', 'Krigbaum', 'Marrocco', 'Wanta', 'Yakubov', 'Hy', 'Sabedra', 'Belling', 'Deats', 'Mahaffy', 'Brodrick', 'Mcneece', 'Madding', 'Mottley', 'Asp', 'Borgerding', 'Conrady', 'Dagenhart', 'Defusco', 'Duensing', 'Ensz', 'Fockler', 'Gajda', 'Masino', 'Minster', 'Naso', 'Nifong', 'Ohlsen', 'Prairie', 'Rosendale', 'Rotman', 'Salzano', 'Samet', 'Takagi', 'Vandagriff', 'Vespa', 'Zaragosa', 'Howdyshell', 'Kilburg', 'Mellado', 'Mollet', 'Varone', 'Benne', 'Dillehay', 'Ruther', 'Gullick', 'Lasure', 'Wilkenson', 'Lawrance', 'Amacker', 'Wisher', 'Pryer', 'Torian', 'Aragona', 'Dains', 'Darrigo', 'Escajeda', 'Fertitta', 'Futral', 'Kielty', 'Kightlinger', 'Lanuza', 'Marich', 'Mcenaney', 'Mohrman', 'Pressnell', 'Prestia', 'Scullin', 'Seidner', 'Steigerwalt', 'Wassell', 'Bonavita', 'Bourgault', 'Sentz', 'Viswanathan', 'Hanchey', 'Volpi', 'Wilensky', 'Mathey', 'Mages', 'Raimo', 'Cozine', 'Sprow', 'Petties', 'Bracht', 'Cayabyab', 'Comp', 'Flamenco', 'Friederich', 'Hori', 'Husmann', 'Isidoro', 'Ketchem', 'Krishnamurthy', 'Kucinski', 'Lalani', 'Lamacchia', 'Lecher', 'Morante', 'Schrieber', 'Sciarra', 'Vandamme', 'Welz', 'Bozich', 'Cancilla', 'Panduro', 'Mcglade', 'Wasmund', 'Riso', 'Moronta', 'Kemple', 'Rocks', 'Sainsbury', 'Solo', 'Harnage', 'Sturkie', 'Hollingworth', 'Denley', 'Bumpass', 'Lovick', 'Bribiesca', 'Dewilde', 'Drohan', 'Geringer', 'Kokoszka', 'Kronberg', 'Lewinski', 'Lunney', 'Morehart', 'Ty', 'Vasseur', 'Vona', 'Wriston', 'Casarrubias', 'Copsey', 'Rochette', 'Macwilliams', 'Natali', 'Milanes', 'Rux', 'Woodcox', 'Bernett', 'Bronaugh', 'Fulwood', 'Bhalla', 'Depalo', 'Hench', 'Huckeba', 'Kasch', 'Kisor', 'Marinos', 'Nakahara', 'Parrent', 'Rantz', 'Schoenbeck', 'Schwieterman', 'Selk', 'Swonger', 'Walding', 'Nunamaker', 'Schuchardt', 'Leverton', 'Fiallo', 'Viruet', 'Fadel', 'Robel', 'Calley', 'Renton', 'Rack', 'Brin', 'Cocks', 'Mcivor', 'Bois', 'Demary', 'Bason', 'Dowlen', 'Prophete', 'Collymore', 'Beisner', 'Briand', 'Cumberledge', 'Curro', 'Cutcher', 'Daponte', 'Eckroth', 'Edgemon', 'Farinella', 'Kobe', 'Muilenburg', 'Osiecki', 'Cutsinger', 'Biggar', 'Maciver', 'Quesinberry', 'Rippetoe', 'Baswell', 'Caven', 'Mimbs', 'Hurlock', 'Cham', 'Cypress', 'Emile', 'Beitel', 'Bellavance', 'Casada', 'Fandel', 'Gillentine', 'Gorelick', 'Kassis', 'Klim', 'Kohnke', 'Lutgen', 'Nalbandian', 'Schepis', 'Troester', 'Hartje', 'Hippensteel', 'Kiehn', 'Kuenzi', 'Greenburg', 'Boroughs', 'Catton', 'Adney', 'Olivencia', 'Mcdermitt', 'Ashwell', 'Leazer', 'Poag', 'Prevo', 'Porcher', 'Hugley', 'Salone', 'Jupiter', 'Bratz', 'Ehresman', 'Fauber', 'Filippelli', 'Kesling', 'Kronk', 'Mcelhiney', 'Mcgreal', 'Miyasato', 'Moomey', 'Nicolini', 'Osberg', 'Ostroski', 'Sanzo', 'Sybert', 'Dimichele', 'Gerrits', 'Shatley', 'Weider', 'Faraj', 'Paules', 'Yarberry', 'Lege', 'Pembroke', 'Clipper', 'Filmore', 'Crichlow', 'Blaustein', 'Boak', 'Canzoneri', 'Crescenzo', 'Ebaugh', 'Feig', 'Jens', 'Knoebel', 'Mohammadi', 'Montour', 'Norgren', 'Pasquini', 'Prost', 'Reh', 'Rosal', 'Thesing', 'Titcomb', 'Wolinski', 'Zeitlin', 'Depoy', 'Guccione', 'Ritsema', 'Valent', 'Drey', 'Govoni', 'Lonsdale', 'Hultz', 'Harvie', 'Levison', 'Colomb', 'Dace', 'Cleckley', 'Godette', 'Brentlinger', 'Fetrow', 'Giuffrida', 'Kopka', 'Kurtzman', 'Panameno', 'Pannone', 'Parzych', 'Seipp', 'Stobbe', 'Thulin', 'Torosian', 'Trani', 'Zietlow', 'Montufar', 'Stohr', 'Woloszyn', 'Cimini', 'Angles', 'Nicasio', 'Vi', 'Em', 'Couchman', 'Hobbie', 'Bluestein', 'Phillipson', 'Shiels', 'Altice', 'Williston', 'Kone', 'Tadesse', 'Abbruzzese', 'Badders', 'Duxbury', 'Egeland', 'Freyre', 'Haen', 'Hineline', 'Kniss', 'Kothe', 'Kyker', 'Popelka', 'Sanjose', 'Slaugh', 'Wecker', 'Wiechman', 'Bilello', 'Keezer', 'Knode', 'Longhurst', 'Wisser', 'Cease', 'Contrera', 'Berroa', 'Aguon', 'Pott', 'Blitch', 'Suares', 'Bein', 'Acre', 'Ailes', 'Tutwiler', 'Porte', 'Ashwood', 'Blackson', 'Viverette', 'Balthazar', 'Kidane', 'Allegretti', 'Corbeil', 'Crossno', 'Cudworth', 'Federspiel', 'Hamstra', 'Kibbey', 'Lefevers', 'Loomer', 'Losada', 'Medema', 'Palmerin', 'Peregoy', 'Previte', 'Riedinger', 'Schlossberg', 'Wilemon', 'Lepkowski', 'Mcdanel', 'Commisso', 'Baiza', 'Fones', 'Divis', 'Diedrick', 'Grave', 'Bonkowski', 'Cerami', 'Drinkwine', 'Hauke', 'Heun', 'Keilman', 'Klemmer', 'Mella', 'Olarte', 'Ryall', 'Veltman', 'Wlodarczyk', 'Bashor', 'Kubinski', 'Vanacker', 'Prouse', 'Perrott', 'Berrio', 'Mccarney', 'Seiders', 'Jafari', 'Louque', 'Melder', 'Grazier', 'Gabay', 'Hardway', 'Sadiq', 'Sully', 'Durrell', 'Barno', 'Maybin', 'Brazile', 'Asante', 'Awalt', 'Badal', 'Cucinotta', 'Grenfell', 'Hartis', 'Herbster', 'Hesch', 'Klosowski', 'Overfelt', 'Pangelinan', 'Pflum', 'Rozema', 'Spivack', 'Vallez', 'Vetere', 'Villamor', 'Wedekind', 'Bobrowski', 'Nguyenthi', 'Nowaczyk', 'Vis', 'Pownall', 'Susan', 'Yanni', 'Gest', 'Balthrop', 'Treasure', 'Harston', 'Frett', 'Buttery', 'Chiarelli', 'Colledge', 'Czaplicki', 'Fahringer', 'Fedder', 'Gerstenberger', 'Gretz', 'Hallquist', 'Hemme', 'Kolling', 'Krauth', 'Liquori', 'Podolsky', 'Scheirer', 'Sehgal', 'Selinger', 'Wintermute', 'Chokshi', 'Dimarzio', 'Santoni', 'Wetherby', 'Flis', 'Comley', 'Boyt', 'Farrah', 'Mario', 'Mcquilkin', 'Tim', 'Cusic', 'Enge', 'Millage', 'Waheed', 'Kenan', 'Silmon', 'Mcconico', 'Bougher', 'Braly', 'Coriell', 'Daignault', 'Henschen', 'Holsomback', 'Johal', 'Kellams', 'Schaumburg', 'Stockinger', 'Urquidi', 'Cabanillas', 'Lindbloom', 'Willinger', 'Redpath', 'Baller', 'Juarbe', 'Badia', 'Elderkin', 'Dessert', 'Retter', 'Mccollam', 'Rivette', 'Devins', 'Hewell', 'Penniman', 'Arbuthnot', 'Cotman', 'Tezeno', 'Albo', 'Beezley', 'Can', 'Chesler', 'Dehne', 'Demchak', 'Edberg', 'Gotham', 'Ingels', 'Kaercher', 'Kwiecinski', 'Landolt', 'Macdonnell', 'Malicoat', 'Meinen', 'Niswander', 'Pandit', 'Pettet', 'Pliska', 'Ploch', 'Ratigan', 'Sampsel', 'Sick', 'Ciampi', 'Mctighe', 'Riester', 'Salvucci', 'Tornow', 'Vencill', 'Racey', 'Haroldson', 'Finder', 'Dennen', 'Stano', 'Boys', 'Camillo', 'Woodfield', 'Turrell', 'Sami', 'Annan', 'Yeldell', 'Madlock', 'Manigo', 'Arcila', 'Bauza', 'Bisceglia', 'Crouthamel', 'Debenedetto', 'Delude', 'Dorta', 'Fairburn', 'Garciagarcia', 'Geeslin', 'Kazanjian', 'Loescher', 'Mccarl', 'Mulqueen', 'Pultz', 'Shutter', 'Spacek', 'Yamanaka', 'Borkholder', 'Halko', 'Pieroni', 'Proano', 'Sarkisyan', 'Riopelle', 'Routson', 'Fogelman', 'Sou', 'Tress', 'Altemus', 'Bosh', 'Laroque', 'Hueston', 'Latin', 'Taitt', 'Lymon', 'Chadd', 'Challis', 'Comella', 'Drabik', 'Entz', 'Hagner', 'Knobbe', 'Luckenbill', 'Macphail', 'Mogg', 'Paustian', 'Rimel', 'Schilke', 'Folkman', 'Lemery', 'Quinby', 'Cliburn', 'Rowand', 'Wambach', 'Gammell', 'Nobrega', 'Hoggan', 'Nightengale', 'Alison', 'Batte', 'Borner', 'Hudnell', 'Casseus', 'Boteler', 'Cantos', 'Contois', 'Coventry', 'Dezarn', 'Eisenbarth', 'Hegel', 'Jahr', 'Joss', 'Lober', 'Marcks', 'Portilla', 'Reinders', 'Scouten', 'Siri', 'Sobocinski', 'Tesh', 'Veno', 'Wheeldon', 'Yankee', 'Wanke', 'Wollin', 'Longobardi', 'Mccarson', 'Sampsell', 'Harrer', 'Bakewell', 'Mcgalliard', 'Truluck', 'Bremmer', 'Lois', 'Goody', 'Kassim', 'Conniff', 'Elenes', 'Esker', 'Groshong', 'Hallisey', 'Loree', 'Marken', 'Molle', 'Muntean', 'Ozaki', 'Roen', 'Rumer', 'Shorr', 'Tanzer', 'Varady', 'Hillmer', 'Macari', 'Schuld', 'Swartzlander', 'Tsuji', 'Holahan', 'Abee', 'Rowse', 'Pawley', 'Samp', 'Shad', 'Wintz', 'Rainford', 'Cellucci', 'Cumpton', 'Dando', 'Dress', 'Funari', 'Gouker', 'Hemberger', 'Latz', 'Meckes', 'Parrinello', 'Picardi', 'Pilz', 'Pretzer', 'Schriever', 'Sodano', 'Stetter', 'Storti', 'Tiu', 'Zimmerle', 'Dragone', 'Engert', 'Fullenkamp', 'Rockafellow', 'Siwek', 'Zillmer', 'Devol', 'Milke', 'Taira', 'Richner', 'Aros', 'Mancil', 'Yetman', 'Hanney', 'Kinion', 'Ferrand', 'Conyer', 'Chahal', 'Fulfer', 'Gurski', 'Horseman', 'Liebe', 'Nyhus', 'Pernice', 'Pesqueira', 'Rieker', 'Trautmann', 'Yellowhair', 'Schwanz', 'Salinger', 'Carvell', 'Heymann', 'Grad', 'Pharo', 'Pipher', 'Magalhaes', 'Kissee', 'Winthrop', 'Leid', 'Sledd', 'Bladen', 'Rahaman', 'Holdman', 'Goldwire', 'Lawal', 'Sinkfield', 'Bryk', 'Butkiewicz', 'Gagen', 'Gettle', 'Goede', 'Hardenbrook', 'Heinsohn', 'Kovalcik', 'Needleman', 'Obeso', 'Parziale', 'Schaus', 'Wadlow', 'Haluska', 'Stiteler', 'Zaruba', 'Tschirhart', 'Biscardi', 'Gopal', 'Avella', 'Ponto', 'Levit', 'Trevor', 'Pimienta', 'Plass', 'Guthery', 'Cordy', 'Tuff', 'Zellars', 'Altier', 'Berges', 'Connick', 'Deruyter', 'Divita', 'Frankovich', 'Ingenito', 'Kosman', 'Lantis', 'Lovering', 'Sortino', 'Waage', 'Wildrick', 'Barberio', 'Domin', 'Meisels', 'Sender', 'Giovanni', 'Sanguinetti', 'Beary', 'Helmstetter', 'Joens', 'Beaven', 'Kines', 'Surrency', 'Sheilds', 'Chamber', 'Albarez', 'Ambrocio', 'Arrellano', 'Berrigan', 'Bookwalter', 'Caravella', 'Higbie', 'Lotter', 'Lougee', 'Manganiello', 'Nobriga', 'Roorda', 'Serr', 'Squillace', 'Tejera', 'Tipping', 'Wohler', 'Carreto', 'Deignan', 'Luebbers', 'Engelhard', 'Hollenback', 'Baldo', 'Gearin', 'Bia', 'Figueras', 'Lule', 'Libert', 'Florida', 'Wyne', 'Mccright', 'Jacko', 'Cawthorne', 'Rhue', 'Betton', 'Cisse', 'Arth', 'Bendickson', 'Cangialosi', 'Coltharp', 'Cubias', 'Czarnik', 'Erpelding', 'Erway', 'Heister', 'Mergen', 'Murrietta', 'Nachman', 'Nusser', 'Ostrem', 'Pei', 'Pescatore', 'Reim', 'Shaull', 'Spranger', 'Uphold', 'Yslas', 'Heinold', 'Lindemuth', 'Redeker', 'Rochin', 'Wisehart', 'Carsey', 'Nocella', 'Combe', 'Thacher', 'Hammad', 'Bene', 'Yelvington', 'Mccrone', 'Driessen', 'Saxby', 'Maull', 'Jeune', 'Amorim', 'Degrazia', 'Doege', 'Flinchbaugh', 'Goodreau', 'Hanisch', 'Hoaglund', 'Imamura', 'Lafler', 'Linne', 'Profeta', 'Reifschneider', 'Santaana', 'Scaffidi', 'Shreeve', 'Stadelman', 'Dippolito', 'Pizzuti', 'Rodenberg', 'Schartz', 'Reiger', 'Solie', 'Willen', 'Atallah', 'Wyers', 'Harpel', 'Cleckler', 'Fobes', 'Sniffen', 'Pedroso', 'Samara', 'Malcomb', 'Penry', 'Stearn', 'Seller', 'Abeita', 'Bilotti', 'Brosky', 'Clewell', 'Fraijo', 'Gaskey', 'Goodfriend', 'Mesaros', 'Musch', 'Nulph', 'Obarr', 'Roat', 'Sabato', 'Sauerwein', 'Schum', 'Silsby', 'Weyenberg', 'Corrente', 'Egloff', 'Kohrs', 'Sammartino', 'Thoennes', 'Carmer', 'Madura', 'Shang', 'Faxon', 'Monell', 'Laden', 'Yousuf', 'Mcgauley', 'Salmond', 'Berhane', 'Abood', 'Bondar', 'Buehrer', 'Capelli', 'Gersten', 'Hambly', 'Haymaker', 'Kosar', 'Lahaie', 'Lecrone', 'Lippy', 'Pohle', 'Shimmel', 'Viall', 'Yother', 'Deviney', 'Kosiba', 'Wiederholt', 'Sivley', 'Wheelis', 'Kanipe', 'Braz', 'Peacher', 'Quadri', 'Hancox', 'Paye', 'Curlin', 'Broden', 'Mckeller', 'Baltodano', 'Baquero', 'Bolek', 'Brede', 'Bulson', 'Christmann', 'Cisler', 'Delio', 'Duffee', 'Duzan', 'Kuschel', 'Mohon', 'Nedrow', 'Sengupta', 'Timpe', 'Veeder', 'Zollner', 'Zummo', 'Hribar', 'Laredo', 'Mcdivitt', 'Nazari', 'Davern', 'Heizer', 'Orejel', 'Haggett', 'Flore', 'Soley', 'Bardell', 'Comegys', 'Bessent', 'Shaheed', 'Brugman', 'Choudhary', 'Fehl', 'Fogt', 'Heckmann', 'Iacobucci', 'Klaver', 'Lumbert', 'Mussman', 'Pierotti', 'Pihl', 'Sandrock', 'Scritchfield', 'Siefken', 'Stavropoulos', 'Thomley', 'Zenker', 'Enke', 'Knoke', 'Rung', 'Mikita', 'Kunkler', 'Deskin', 'Egnor', 'Vader', 'Allers', 'Pi', 'Sproull', 'Peller', 'Kendell', 'Jinkins', 'Iglehart', 'Brookens', 'Darrough', 'Winzer', 'Amenta', 'Aughenbaugh', 'Barnick', 'Conaty', 'Eichmann', 'Gilday', 'Guhl', 'Koskela', 'Makuch', 'Osoria', 'Pujols', 'Reinsch', 'Reiswig', 'Rosebrock', 'Sahli', 'Seitzinger', 'Shermer', 'Vasbinder', 'Zanghi', 'Flahive', 'Mieczkowski', 'Osmundson', 'Willmann', 'Agramonte', 'Aven', 'Vanderzee', 'Fraher', 'Kannan', 'Shira', 'Zetina', 'Gilden', 'Hingle', 'Boutros', 'Scutt', 'Foree', 'Gillins', 'Screen', 'Birden', 'Guinyard', 'Berreth', 'Bertini', 'Bousman', 'Butchko', 'Caras', 'Donoso', 'Gavilanes', 'Karow', 'Kouri', 'Rediger', 'Rininger', 'Rosecrans', 'Toops', 'Vigliotti', 'Cancio', 'Karger', 'Milholland', 'Thielke', 'Amster', 'Rosch', 'Elks', 'Vasco', 'Doshier', 'Belasco', 'Lean', 'Dickason', 'Suitt', 'Tipler', 'Obey', 'Crear', 'Redic', 'Agredano', 'Amarillas', 'Arnesen', 'Celedon', 'Clapsaddle', 'Coveney', 'Demorest', 'Gleich', 'Guenthner', 'Haverland', 'Jaffee', 'Kusek', 'Manni', 'Mysliwiec', 'Nakama', 'Ngan', 'Ohmer', 'Romanoff', 'Salaiz', 'Zeiders', 'Bartholow', 'Budke', 'Centanni', 'Koppelman', 'Liberti', 'Gatta', 'Lovegrove', 'Maggs', 'Malay', 'Blind', 'Kerman', 'Frans', 'Rendleman', 'Tyrone', 'Ambers', 'Rambert', 'Killings', 'Balicki', 'Bohac', 'Brisbois', 'Cervone', 'Curtner', 'Ertle', 'Fantozzi', 'Feger', 'Fineman', 'Garate', 'Goldy', 'Gudmundson', 'Harcrow', 'Herdt', 'Klapp', 'Mirra', 'Radu', 'Saiki', 'Unser', 'Valko', 'Verhoff', 'Candelas', 'Ireton', 'Vanhuss', 'Wierman', 'Zawistowski', 'Geiman', 'Mess', 'Full', 'Fuertes', 'Derickson', 'Mccole', 'Godden', 'Mizzell', 'Sane', 'Shirer', 'Fickling', 'Marcelle', 'Tramble', 'Cappelletti', 'Catterton', 'Champeau', 'Czyzewski', 'Dirusso', 'Herget', 'Heupel', 'Hinchliffe', 'Levitsky', 'Maheu', 'Nakao', 'Petsche', 'Pilkenton', 'Raska', 'Rief', 'Scheidegger', 'Schmeltzer', 'Sherlin', 'Skarda', 'Strassburg', 'Sundaram', 'Wuertz', 'Bonanni', 'Montante', 'Ottesen', 'Nading', 'Bram', 'Debell', 'Sia', 'Latch', 'Largen', 'Nack', 'Smillie', 'Debold', 'Bruer', 'Steedley', 'Mckinny', 'Radney', 'Amadio', 'Bearman', 'Canny', 'Cansino', 'Cupo', 'Ekstrand', 'Forrer', 'Imm', 'Kawano', 'Klingaman', 'Kovacevich', 'Lukasiewicz', 'Mcdermid', 'Michon', 'Mincks', 'Piano', 'Ronayne', 'Schaum', 'Sciandra', 'Villafan', 'Wolin', 'Schrager', 'Strawderman', 'Hable', 'Skees', 'Persky', 'Defore', 'Edmonston', 'Base', 'Barrell', 'Cressey', 'Husser', 'Matin', 'Mckennon', 'Barak', 'Buffone', 'Clemence', 'Delaguila', 'Eberwein', 'Eichholz', 'Faraone', 'Herington', 'Kempa', 'Kenefick', 'Lahaye', 'Larusso', 'Osterloh', 'Pfluger', 'Pomponio', 'Shiu', 'Stokke', 'Trembly', 'Weck', 'Alire', 'Babayan', 'Hustad', 'Stumph', 'Zwiebel', 'Wicke', 'Brauch', 'Milos', 'Haggart', 'Mento', 'Kennamer', 'Thibeau', 'Winge', 'Lords', 'Debaun', 'Haw', 'Mould', 'Elison', 'Etling', 'Froemming', 'Ghazarian', 'Justesen', 'Kawai', 'Lensing', 'Lindhorst', 'Poveda', 'Rabadan', 'Vigeant', 'Warnken', 'Bermel', 'Manry', 'Suppes', 'Stauder', 'Dayley', 'Lose', 'Tappe', 'Harle', 'Mcquain', 'Bettes', 'Carline', 'Cordner', 'Habeeb', 'Sisney', 'Kyer', 'Bruins', 'Prosise', 'Molton', 'Blye', 'Mccuin', 'Babler', 'Caiazzo', 'Cereceres', 'Ciaramitaro', 'Corkran', 'Crawshaw', 'Degan', 'Dunlavy', 'Gronewold', 'Hartner', 'Kornacki', 'Lapolla', 'Mountz', 'Mumpower', 'Orefice', 'Prats', 'Repasky', 'Schlee', 'Sekhon', 'Stanich', 'Yilmaz', 'Desisto', 'Hanko', 'Nichter', 'Risenhoover', 'Tomasso', 'Blome', 'Carda', 'Ebrahimi', 'Devor', 'Pappa', 'Caravello', 'Lunday', 'Slim', 'Praytor', 'Pickerel', 'Wahab', 'Breeland', 'Flowe', 'Brodnax', 'Monds', 'Sylla', 'Bekele', 'Mozee', 'Beechy', 'Birky', 'Dellavalle', 'Delmastro', 'Dematteis', 'Eckberg', 'Eisenbraun', 'Englehardt', 'Fazzio', 'Gedney', 'Hana', 'Keeran', 'Lallier', 'Martenson', 'Mcelheny', 'Paar', 'Suski', 'Vossen', 'Westergaard', 'Westermann', 'Wiemann', 'Golz', 'Lofquist', 'Pracht', 'Tifft', 'Ruhnke', 'Schnider', 'How', 'Knaggs', 'Bleck', 'Whitelock', 'Berringer', 'Clepper', 'Birkhead', 'Pilson', 'Inabinet', 'Gentles', 'Respress', 'Crumble', 'Bandera', 'Bartunek', 'Buerkle', 'Dulong', 'Eisinger', 'Favero', 'Giusto', 'Guisinger', 'Kiddy', 'Krisher', 'Lounsberry', 'Morikawa', 'Mowdy', 'Penaflor', 'Picariello', 'Quirion', 'Scali', 'Scheibel', 'Schlitt', 'Sermeno', 'Thalman', 'Barraclough', 'Boshart', 'Glatfelter', 'Hjelm', 'Horlacher', 'Muratalla', 'Schepp', 'Fogerty', 'Mulero', 'Manner', 'Creecy', 'Leftridge', 'Ancira', 'Anselmi', 'Blew', 'Coykendall', 'Dembinski', 'Emmerling', 'Fawver', 'Giard', 'Heinzen', 'Kasson', 'Linam', 'Lofaro', 'Magnotta', 'Pitzen', 'Ripa', 'Skowronek', 'Sliter', 'Stauch', 'Szczepaniak', 'Yerian', 'Baccam', 'Berres', 'Helstrom', 'Kocurek', 'Kostelecky', 'Corkins', 'Fesperman', 'Gibble', 'Liranzo', 'Karan', 'Lavely', 'Yorks', 'Lisenbee', 'Jerger', 'Cockroft', 'Brodhead', 'Weathersbee', 'Salih', 'Pore', 'Melbourne', 'Code', 'Scotton', 'Addie', 'Snipe', 'Cuffie', 'Haynesworth', 'Borawski', 'Borchard', 'Cacciola', 'Dedic', 'Grzyb', 'Hecox', 'Horacek', 'Nierman', 'Nofziger', 'Raup', 'Rissler', 'Segler', 'Serviss', 'Soon', 'Tesmer', 'Campanaro', 'Curnutte', 'Rabold', 'Schreyer', 'Siebenaler', 'Zenteno', 'Deveney', 'Kuchera', 'Ruden', 'Skaff', 'Sciulli', 'Howeth', 'Hanly', 'Gola', 'Forkner', 'Rosene', 'Beeker', 'Mazo', 'Lambson', 'Younis', 'Batch', 'Ayo', 'Ackles', 'Hansbrough', 'Terrance', 'Bacani', 'Cracraft', 'Ebben', 'Falzarano', 'Ferreras', 'Hovatter', 'Jaskiewicz', 'Killpack', 'Kwasniewski', 'Mahnken', 'Natera', 'Noboa', 'Rapson', 'Raybuck', 'Shima', 'Vahle', 'Sheeks', 'Laker', 'Krok', 'Debo', 'Oberly', 'Chelf', 'Catala', 'Airey', 'Osten', 'Golay', 'Eliot', 'Lebert', 'Swaggerty', 'Hue', 'Seavers', 'Bomer', 'Bouyer', 'Andazola', 'Blancarte', 'Brierly', 'Centofanti', 'Dalesandro', 'Dickstein', 'Kalas', 'Langman', 'Mouradian', 'Okubo', 'Overbaugh', 'Popek', 'Runnion', 'Sannes', 'Schamber', 'Silfies', 'Sinko', 'Sit', 'Cerrillo', 'Gayler', 'Kauth', 'Culkin', 'Peers', 'Spidle', 'Ballon', 'Rasmus', 'Queenan', 'Reynaud', 'Ambroise', 'Mcclenton', 'Adelmann', 'Avellino', 'Fickle', 'Humm', 'Hussong', 'Iturralde', 'Kritzer', 'Lautzenheiser', 'Linsky', 'Malarkey', 'Mallia', 'Marban', 'Mccance', 'Nawaz', 'Pallone', 'Rindfleisch', 'Schmall', 'Sowle', 'Stanco', 'Whelpley', 'Winning', 'Kopczynski', 'Pickup', 'Tsou', 'Phebus', 'Munter', 'Sisko', 'Fico', 'Mosco', 'Rani', 'Kon', 'Baggott', 'Brom', 'Valerius', 'Fines', 'Megee', 'Salsberry', 'Sheff', 'Mourning', 'Archambeault', 'Bhatnagar', 'Budreau', 'Dieffenbach', 'Gildner', 'Hevener', 'Hippler', 'Jonker', 'Keef', 'Kirlin', 'Litvak', 'Liz', 'Mulhearn', 'Popal', 'Samaha', 'Schwartzberg', 'Sotello', 'Weiskopf', 'Neitzke', 'Strelow', 'Nitsch', 'Lynne', 'Olver', 'Bange', 'Boot', 'Carmine', 'Bellville', 'Lafitte', 'Condry', 'Mccotter', 'Spruiell', 'Moman', 'Legree', 'Bongard', 'Deiss', 'Devoy', 'Gusler', 'Ianni', 'Kolker', 'Lagomarsino', 'Leier', 'Marbut', 'Minsky', 'Okumura', 'Roza', 'Siemon', 'Vescio', 'Wirkus', 'Huizinga', 'Lazalde', 'Morici', 'Ungaro', 'Detamore', 'Meer', 'Erman', 'Sherrow', 'Laforte', 'Pellman', 'Bostock', 'Lender', 'Peagler', 'Rhem', 'Brisbon', 'Angers', 'Azbill', 'Busto', 'Coggeshall', 'Cucci', 'Defino', 'Duey', 'Fecht', 'Grudzinski', 'Guarneri', 'Huesca', 'Kolbeck', 'Mennella', 'Nishi', 'Ohaver', 'Porth', 'Romanello', 'Serrata', 'Thoele', 'Thornsbury', 'Ulsh', 'Vanderlinde', 'Weninger', 'Bonaventura', 'Cura', 'Filley', 'Grabinski', 'Kloc', 'Kulinski', 'Maruca', 'Dantoni', 'Grohman', 'Starbird', 'Rach', 'Asman', 'Mosso', 'Slaney', 'Kall', 'Nevil', 'Blann', 'Frear', 'Mosey', 'Wrench', 'Balkcom', 'Liburd', 'Yeboah', 'Abbatiello', 'Creviston', 'Dunivan', 'Durnin', 'Eckerman', 'Fennimore', 'Gohlke', 'Holtan', 'Kochevar', 'Kraushaar', 'Landino', 'Maack', 'Montefusco', 'Noguchi', 'Norgard', 'Olafson', 'Paulick', 'Petropoulos', 'Principato', 'Qazi', 'Sammis', 'Sida', 'Sorum', 'Vandal', 'Vertrees', 'Votta', 'Wiesman', 'Fleagle', 'Panaro', 'Stolarski', 'Ogborn', 'Petta', 'Annett', 'Campas', 'Xing', 'Lorey', 'Restaino', 'Forgue', 'Rourk', 'Modisette', 'Aris', 'Vandunk', 'Dia', 'Alverio', 'Ancell', 'Bieler', 'Bouwman', 'Campillo', 'Cebreros', 'Chant', 'Cira', 'Cragun', 'Geppert', 'Hemmert', 'Kister', 'Luger', 'Ojala', 'Pfeifle', 'Piechocki', 'Saldarriaga', 'Skoda', 'Vangorden', 'Winberry', 'Zeeb', 'Gehm', 'Oshima', 'Tofte', 'Tsoi', 'Delman', 'Harsha', 'Finton', 'Triola', 'Bingle', 'Delise', 'Westergard', 'Aul', 'Celia', 'Headings', 'Mates', 'Coste', 'Venus', 'Shearn', 'Adell', 'Minnifield', 'Baxa', 'Cieri', 'Coppens', 'Delahoz', 'Fratus', 'Gribbins', 'Homann', 'Ilg', 'Majchrzak', 'Mcclard', 'Podolak', 'Pollan', 'Savio', 'Schloemer', 'Sesma', 'Tilbury', 'Torrico', 'Vanduyn', 'Eisert', 'Levalley', 'Silversmith', 'Zanoni', 'Grupe', 'Marmolejos', 'Marsch', 'Martes', 'Gorley', 'Furbush', 'Hughlett', 'Stcyr', 'Faustin', 'Bushaw', 'Cerbone', 'Equihua', 'Fiorella', 'Ganzer', 'Gugel', 'Hladik', 'Kalra', 'Leuenberger', 'Lusardi', 'Nogales', 'Schifano', 'Swalley', 'Tangney', 'Zakarian', 'Arenz', 'Bottcher', 'Gervasio', 'Peschel', 'Potteiger', 'Teruya', 'Tullier', 'Lenhard', 'Brusseau', 'Streett', 'Loan', 'Fahmy', 'Broadfoot', 'Shugars', 'Wilshire', 'Mohabir', 'Baye', 'Sean', 'Caruth', 'Arroyos', 'Campise', 'Capparelli', 'Desanti', 'Dunsworth', 'Fasching', 'Heldman', 'Keagle', 'Kulesa', 'Lawrenz', 'Monhollen', 'Niekamp', 'Nucci', 'Ostman', 'Salzmann', 'Schemmel', 'Selin', 'Stencel', 'Zilka', 'Friesner', 'Onstad', 'Poovey', 'Squillante', 'Tullo', 'Uriegas', 'Vigilante', 'Lasswell', 'Navedo', 'Dunnagan', 'Pevey', 'Santino', 'Waldren', 'Leven', 'Stinnette', 'Eleazer', 'Ragas', 'Cockfield', 'Lafontant', 'Babinski', 'Balash', 'Hadler', 'Kantz', 'Latini', 'Lavy', 'Mally', 'Maurin', 'Mifsud', 'Miguez', 'Muma', 'Needle', 'Orrico', 'Zalazar', 'Chinen', 'Coluccio', 'Gibboney', 'Knapke', 'Moczygemba', 'Leonguerrero', 'Punzalan', 'Lortz', 'Rosel', 'Mcclaran', 'Weatherhead', 'Mcgurn', 'Sanville', 'Goe', 'Phang', 'Briskey', 'Bluitt', 'Hapner', 'Lamadrid', 'Leuthold', 'Litchford', 'Scaduto', 'Smoyer', 'Stonehouse', 'Streng', 'Susman', 'Swoyer', 'Tempesta', 'Tiedt', 'Politi', 'Ruotolo', 'Schwendeman', 'Siegenthaler', 'Streff', 'Strite', 'Kroft', 'Lewey', 'Silbert', 'Frie', 'Bentson', 'Coin', 'Lupe', 'Mousa', 'Syler', 'Fester', 'Tenny', 'Surgeon', 'Blowe', 'Metellus', 'Borboa', 'Danker', 'Ferch', 'Fritzsche', 'Gudiel', 'Kilmartin', 'Nieland', 'Soffer', 'Yescas', 'Chappelear', 'Hincapie', 'Landowski', 'Barfoot', 'Hesketh', 'Mittelman', 'Escorcia', 'Meetze', 'Coral', 'Huddleson', 'Hoo', 'Googe', 'Munir', 'Reine', 'Studstill', 'Swims', 'Ganaway', 'Daise', 'Blando', 'Bream', 'Cangemi', 'Dicola', 'Difalco', 'Gleim', 'Goerke', 'Jauch', 'Lashway', 'Mckinlay', 'Mura', 'Polsky', 'Roehrich', 'Schwalbach', 'Tegtmeier', 'Theel', 'Wuthrich', 'Yabut', 'Zara', 'Ardizzone', 'Blasius', 'Deramo', 'Heffern', 'Rickels', 'Wojtas', 'Bue', 'Garant', 'Kitagawa', 'Vorhees', 'Randa', 'Seider', 'Bi', 'Womac', 'Santerre', 'Mesmer', 'Bailly', 'Argue', 'Spidell', 'Manu', 'General', 'Exantus', 'Neloms', 'Piggee', 'Agcaoili', 'Ambrosini', 'Balleza', 'Bhavsar', 'Brandstetter', 'Cascone', 'Deyton', 'Fette', 'Gershman', 'Hanni', 'Hitchner', 'Manthe', 'Marengo', 'Ockerman', 'Pergola', 'Ratterree', 'Shober', 'Swezey', 'Vadala', 'Waszak', 'Wishard', 'Zhuang', 'Bobst', 'Filippini', 'Giardino', 'Johanning', 'Kloepfer', 'Dahan', 'Rahmani', 'Hett', 'Sha', 'Spaugh', 'Darner', 'Dagen', 'Gaier', 'Musco', 'Holling', 'Keahey', 'Merricks', 'Nur', 'Andrick', 'Demauro', 'Haury', 'Hsiung', 'Kotarski', 'Kriesel', 'Leleux', 'Nazar', 'Oganesyan', 'Polivka', 'Sansoucie', 'Serafino', 'Stammer', 'Tamm', 'Wachowiak', 'Zinda', 'Goedde', 'Pedregon', 'Snader', 'Witczak', 'Kem', 'Prabhu', 'Purtle', 'Nola', 'Om', 'Finster', 'Bryans', 'Mateus', 'Bour', 'Santy', 'Mola', 'Guile', 'Denne', 'Bol', 'Mont', 'Perro', 'Haji', 'Swinger', 'Mitchelle', 'Creary', 'Leeks', 'Barsotti', 'Bolender', 'Dohner', 'Federman', 'Lancour', 'Lueken', 'Pettinger', 'Rathmann', 'Schiess', 'Schulenberg', 'Troyan', 'Dafoe', 'Delahunt', 'Domagala', 'Ganske', 'Grasmick', 'Guinther', 'Hlavac', 'Klumb', 'Susko', 'Vanhandel', 'Burget', 'Thaker', 'Winker', 'Castellucci', 'Guerette', 'Garde', 'Busher', 'Usery', 'Braker', 'Blan', 'Goar', 'Loiseau', 'Anderberg', 'Bamber', 'Biagini', 'Dack', 'Groeneveld', 'Habig', 'Howk', 'Kutsch', 'Mcgloin', 'Nevares', 'Piedrahita', 'Puffenbarger', 'Racer', 'Stanaland', 'Turck', 'Vanvleck', 'Velardi', 'Verhoeven', 'Wernick', 'Wherley', 'Zamzow', 'Binegar', 'Kaluza', 'Kudrna', 'Marbach', 'Schwichtenberg', 'Chay', 'Lanthier', 'Balling', 'Parcher', 'Venner', 'Nolette', 'Quant', 'Grierson', 'Quest', 'Level', 'Birkner', 'Evancho', 'Grinde', 'Horiuchi', 'Hoselton', 'Kuk', 'Maiello', 'Matuska', 'Melito', 'Northey', 'Pallante', 'Porzio', 'Rad', 'Rizzolo', 'Thull', 'Urenda', 'Dalfonso', 'Harbold', 'Kemerer', 'Knapton', 'Meeder', 'Ruckle', 'Segui', 'Behne', 'Bamburg', 'Galen', 'Hallen', 'Herandez', 'Chittick', 'Deshon', 'Verrier', 'Sorel', 'Neylon', 'Thatch', 'Bayly', 'Beever', 'Galka', 'Gruhn', 'Gsell', 'Happe', 'Hovan', 'Marter', 'Matarese', 'Mellema', 'Ollila', 'Schempp', 'Serda', 'Skenandore', 'Stemper', 'Toupin', 'Vandeven', 'Yauger', 'Koenigs', 'Mullendore', 'Ouellet', 'Sullenberger', 'Julson', 'Pelot', 'Clamp', 'Berte', 'Beese', 'Matkin', 'Erie', 'Rosenburg', 'Reap', 'Stelle', 'Rayon', 'Hoit', 'Hollyfield', 'Kindall', 'Agent', 'Glascoe', 'Holts', 'Wynder', 'Balderston', 'Bernardy', 'Blehm', 'Casebeer', 'Emler', 'Farrugia', 'Guzzardo', 'Johnsrud', 'Maffeo', 'Mccartan', 'Redburn', 'Reesman', 'Savas', 'Shamoon', 'Shown', 'Spinale', 'Tabaka', 'Wedell', 'Armato', 'Bassford', 'Bungard', 'Faerber', 'Freet', 'Oesterle', 'Vandeberg', 'Bacha', 'Stemm', 'Edgett', 'Karrick', 'Girten', 'Orgill', 'Meridith', 'Cullom', 'Hennington', 'Minns', 'Appleberry', 'Abare', 'Annen', 'Beierle', 'Berish', 'Cracchiolo', 'Dilullo', 'Kehm', 'Kuhne', 'Modglin', 'Norland', 'Petruzzelli', 'Schabel', 'Stauffacher', 'Villena', 'Wageman', 'Willden', 'Faiella', 'Mangiaracina', 'Petralia', 'Witwer', 'Tropp', 'Bores', 'Burkel', 'Stanifer', 'Teele', 'Cornick', 'Credit', 'Dorvil', 'Bonillas', 'Callinan', 'Colleran', 'Finer', 'Krach', 'Lubas', 'Lutman', 'Marien', 'Mccort', 'Merica', 'Mies', 'Nicotra', 'Novosad', 'Priem', 'Ramakrishnan', 'Zolman', 'Deitsch', 'Georgi', 'Haberstroh', 'Kofoed', 'Kreischer', 'Nazareno', 'Norkus', 'Steimle', 'Fellin', 'Ghanem', 'Kosch', 'Pages', 'Balthazor', 'Corte', 'Hoh', 'Shrewsberry', 'Beharry', 'Waight', 'Leconte', 'Clowney', 'Tesfaye', 'Andis', 'Brosch', 'Bruckman', 'Carducci', 'Erbes', 'Ferreiro', 'Gatten', 'Heggen', 'Kackley', 'Klamm', 'Korff', 'Lehane', 'Mech', 'Montanari', 'Pousson', 'Soderholm', 'Strey', 'Upp', 'Wahlen', 'Cedrone', 'Steuber', 'Vonfeldt', 'Deridder', 'Shams', 'Barnas', 'Bake', 'Brownrigg', 'Donohoo', 'Mccorry', 'Spruce', 'Masden', 'Porchia', 'Fofana', 'Bless', 'Caler', 'Calva', 'Carnero', 'Chakraborty', 'Clenney', 'Dockendorf', 'Dziak', 'Errickson', 'Ewoldt', 'Klippel', 'Krass', 'Luebbe', 'Parlett', 'Paternostro', 'Peterka', 'Petitti', 'Puthoff', 'Wessman', 'Brossman', 'Glotfelty', 'Grabau', 'Kortz', 'Sienko', 'Yonan', 'Fakhoury', 'Bunney', 'Sillas', 'Guerry', 'Sedwick', 'Okey', 'Virgo', 'Babers', 'Casali', 'Chiquito', 'Correnti', 'Doverspike', 'Fryberger', 'Golas', 'Golob', 'Hufstetler', 'Inoa', 'Lasser', 'Nesheim', 'Peveto', 'Reckner', 'Rydzewski', 'Shartzer', 'Smouse', 'Tipple', 'Wantland', 'Wolfert', 'Yordy', 'Zuleta', 'Heimerl', 'Mccarren', 'Cabeza', 'Neice', 'Kassem', 'Hodgen', 'Charrier', 'Duggar', 'Blacksmith', 'Cush', 'Trunnell', 'Laventure', 'Salahuddin', 'Batalla', 'Brahmbhatt', 'Breslow', 'Cua', 'Deatley', 'Digrazia', 'Divirgilio', 'Falin', 'Freiberger', 'Gladish', 'Holyoak', 'Lazos', 'Loader', 'Mcclafferty', 'Meloni', 'Muhr', 'Salzwedel', 'Schaab', 'Shehadeh', 'Suresh', 'Verdusco', 'Younglove', 'Damman', 'Fulco', 'Neikirk', 'Laver', 'Biro', 'Shill', 'Labarr', 'Kari', 'Mcclory', 'Torelli', 'Knock', 'Dormer', 'Papin', 'Stoneham', 'Weathington', 'Albus', 'Andel', 'Banville', 'Cassens', 'Chalifoux', 'Dellaquila', 'Depauw', 'Deschene', 'Genung', 'Greider', 'Luhman', 'Mastropietro', 'Mignogna', 'Pisarski', 'Terrien', 'Thomure', 'Tornabene', 'Beheler', 'Chimento', 'Engelbert', 'Gambone', 'Goettl', 'Jasperson', 'Kovalenko', 'Infinger', 'Timbs', 'Dasgupta', 'Purdon', 'Velie', 'Eland', 'Ankrum', 'Narain', 'Mcfarling', 'Creagh', 'Bunyan', 'Rattigan', 'Reddix', 'Aumann', 'Beilfuss', 'Bogosian', 'Bramel', 'Burlingham', 'Cruzan', 'Demel', 'Dorff', 'Figley', 'Friesz', 'Huffstutler', 'Mcdaris', 'Meinecke', 'Moench', 'Newville', 'Normile', 'Pfund', 'Pilar', 'Seckman', 'Szoke', 'Zyla', 'Freilich', 'Hammerle', 'Kopel', 'Liskey', 'Mesina', 'Schlicher', 'Dalen', 'Bettin', 'Malanga', 'Dern', 'Tuckey', 'Warder', 'Harren', 'Siner', 'Mahdi', 'Ahmann', 'Allor', 'Claywell', 'Corkill', 'Follansbee', 'Iseman', 'Lawter', 'Myslinski', 'Sauser', 'Tornatore', 'Bhasin', 'Governale', 'Karstens', 'Klocek', 'Stempien', 'Petrino', 'Kohlmeier', 'Igou', 'Sari', 'Mareno', 'Bouche', 'Romas', 'Urey', 'Sprott', 'Ponzo', 'Nevills', 'Affolter', 'Alleva', 'Allgaier', 'Azbell', 'Branagan', 'Fiebig', 'Geremia', 'Grabert', 'Grahl', 'Gruwell', 'Koebel', 'Krauter', 'Kuhnert', 'Kuperman', 'Laverdiere', 'Leuck', 'Masella', 'Mierzejewski', 'Platek', 'Samaan', 'Selsor', 'Vickroy', 'Whitenack', 'Zanella', 'Cavagnaro', 'Galioto', 'Schoeneman', 'Zanotti', 'Bort', 'Alpaugh', 'Culverhouse', 'Perona', 'Wheelwright', 'Amber', 'Bradner', 'Sedberry', 'Goethe', 'Swygert', 'Nisbett', 'Harts', 'Pendelton', 'Keita', 'Addair', 'Anania', 'Armagost', 'Brumett', 'Butala', 'Celmer', 'Forquer', 'Hagadorn', 'Jalomo', 'Koranda', 'Lemmond', 'Liske', 'Mcglamery', 'Ramiro', 'Tickner', 'Toso', 'Tosti', 'Beerbower', 'Bichler', 'Buege', 'Cadotte', 'Chiong', 'Romberger', 'Mandarino', 'Deter', 'Wallack', 'Bligh', 'Harer', 'Terral', 'Hobert', 'Doren', 'Affleck', 'Marquess', 'Lewton', 'Covel', 'Reff', 'Gowins', 'Claybrooks', 'Artiles', 'Brunelli', 'Campusano', 'Deshaies', 'Elpers', 'Fait', 'Heathcote', 'Katayama', 'Landreneau', 'Nardelli', 'Padovano', 'Pendry', 'Santillano', 'Ubaldo', 'Wurz', 'Bathke', 'Fillers', 'Reitano', 'Patrone', 'Mountford', 'Farran', 'Burdo', 'Danish', 'Windell', 'Amrine', 'Pilgreen', 'Pross', 'Bowery', 'Girdner', 'Stockley', 'Chisom', 'Bigos', 'Cavallero', 'Choma', 'Chorba', 'Doubek', 'Eynon', 'Fitzmorris', 'Gergely', 'Hilsabeck', 'Hime', 'Kafer', 'Kilday', 'Lairson', 'Mccanless', 'Meenan', 'Mossburg', 'Muscato', 'Raap', 'Ramp', 'Reali', 'Reinard', 'Rivadeneira', 'Schwenn', 'Serbin', 'Soeder', 'Wagle', 'Jablonowski', 'Vanni', 'Grapes', 'Hilleary', 'Mondor', 'Natalie', 'Seat', 'Heming', 'Waide', 'Haverly', 'Eva', 'Marshman', 'Mais', 'Portlock', 'Scoby', 'Sharps', 'Buday', 'Bumbalough', 'Burback', 'Carano', 'Eustis', 'Flaim', 'Fraticelli', 'Grimme', 'Heape', 'Hoaglin', 'Kreuser', 'Odgers', 'Pastorius', 'Pavek', 'Rogoff', 'Skorupski', 'Stene', 'Tomasino', 'Varble', 'Vasek', 'Woolums', 'Arcaro', 'Graley', 'Larkey', 'Ortlieb', 'Piccone', 'Verhey', 'Inch', 'Laroe', 'Brockmeier', 'Familia', 'Soll', 'Duplechin', 'Blevens', 'Gell', 'Hipkins', 'Kleinpeter', 'Swindall', 'Sabir', 'Kinloch', 'Muldrew', 'Clausell', 'Bouch', 'Casciano', 'Dewhirst', 'Draney', 'Fourman', 'Fuente', 'Ganci', 'Gentzler', 'Gerhold', 'Ingoglia', 'Jerabek', 'Keisling', 'Larivee', 'Negro', 'Pelchat', 'Quilty', 'Reinig', 'Rubeck', 'Rudick', 'Rulli', 'Spagnoli', 'Wiltsie', 'Vitolo', 'Neuhauser', 'Khurana', 'Vint', 'Kant', 'Nead', 'Deroy', 'Ransford', 'Stromer', 'Buley', 'Bloxom', 'Rieves', 'Bastos', 'Deckman', 'Duenes', 'Hessling', 'Kresse', 'Langdale', 'Penberthy', 'Polyak', 'Sagun', 'Salehi', 'Sas', 'Soja', 'Spieth', 'Verhulst', 'Walen', 'Woodling', 'Acierno', 'Bergsma', 'Biskup', 'Buonomo', 'Gores', 'Koffman', 'Redder', 'Ishak', 'Billow', 'Ratledge', 'Widder', 'Margerum', 'Bussing', 'Caccamo', 'Carozza', 'Cwik', 'Forner', 'Goeden', 'Greninger', 'Hartenstein', 'Hermida', 'Krutz', 'Kubes', 'Kulow', 'Lynott', 'Mank', 'Meinders', 'Mikrut', 'Moots', 'Patek', 'Pogorzelski', 'Reinstein', 'Ruiter', 'Rupard', 'Salvia', 'Sissom', 'Sligar', 'Spendlove', 'Vian', 'Wissing', 'Witucki', 'Brossart', 'Warhurst', 'Staron', 'Gilly', 'Borck', 'Mccarn', 'Stanbery', 'Aydelotte', 'Etters', 'Rho', 'Menzer', 'Knoble', 'Luallen', 'Meda', 'Myre', 'Nevils', 'Seide', 'Rouser', 'Bernas', 'Bressette', 'Dohn', 'Domina', 'Filion', 'Fossen', 'Grunder', 'Hofland', 'Larranaga', 'Launius', 'Lento', 'Mohrmann', 'Papenfuss', 'Polcyn', 'Pollina', 'Reinheimer', 'Rueb', 'Sacher', 'Sauseda', 'Whitwell', 'Caspers', 'Dejager', 'Kastelic', 'Kildow', 'Sappenfield', 'Schultes', 'Tucciarone', 'Gogan', 'Sarti', 'Percle', 'Cagney', 'Wasley', 'Getts', 'Sahm', 'Brandle', 'Osbon', 'Febres', 'Billett', 'Pall', 'Spearing', 'Thursby', 'Junious', 'Allenbaugh', 'Calamia', 'Cregan', 'Hostettler', 'Leete', 'Pirrone', 'Ploeger', 'Revak', 'Sarlo', 'Sayavong', 'Schlichter', 'Shonkwiler', 'Soots', 'Spak', 'Thien', 'Torgeson', 'Urbanczyk', 'Vredenburg', 'Wormuth', 'Yankovich', 'Badertscher', 'Holewinski', 'Kalinoski', 'Kwasny', 'Neidert', 'Remmel', 'Uram', 'Zettlemoyer', 'Sanna', 'Walthers', 'Kinkaid', 'Rummage', 'Vane', 'Morgen', 'Stum', 'Ainsley', 'Mckelvie', 'Barbin', 'Shariff', 'Blanchett', 'Mayon', 'Broadie', 'Millien', 'Azzarello', 'Bocock', 'Bohlander', 'Brennecke', 'Daman', 'Dixit', 'Goth', 'Kocur', 'Koslow', 'Loncar', 'Narez', 'Oleksy', 'Ouderkirk', 'Rathe', 'Sandmann', 'Scarpino', 'Siegman', 'Soloway', 'Tomeo', 'Vantuyl', 'Benesch', 'Doornbos', 'Gisler', 'Nistler', 'Pelzel', 'Piecuch', 'Schweiss', 'Zieba', 'Domangue', 'Curti', 'Iams', 'Viger', 'Sandefer', 'Maybury', 'Haneline', 'Shappell', 'Charlier', 'Belardo', 'Lynk', 'Ocain', 'Ismael', 'Blacksher', 'Lesesne', 'Blash', 'Fantroy', 'Bucciarelli', 'Deruiter', 'Fetner', 'Filla', 'Frontera', 'Furlan', 'Goepfert', 'Gorsline', 'Gugino', 'Kleis', 'Kriger', 'Lebarron', 'Lesnick', 'Losano', 'Macquarrie', 'Marczak', 'Mazariego', 'Moraes', 'Murano', 'Myint', 'Philley', 'Ruffalo', 'Salyards', 'Swab', 'Trester', 'Vlcek', 'Abramo', 'Kaczmarski', 'Mastronardi', 'Lafont', 'Tomerlin', 'Mchan', 'Blanda', 'Deandrade', 'Klien', 'Meno', 'Maia', 'Durall', 'Lansdowne', 'Cones', 'Adley', 'Taffe', 'Ikard', 'Sylve', 'Bartok', 'Farler', 'Farnworth', 'Gookin', 'Guijarro', 'Hazan', 'Hosterman', 'Klees', 'Knust', 'Leadingham', 'Lefeber', 'Maisch', 'Muchmore', 'Pini', 'Polinsky', 'Quakenbush', 'Rought', 'Ruta', 'Tingen', 'Urness', 'Valade', 'Wadle', 'Hietala', 'Hockenbury', 'Ivanoff', 'Mcevers', 'Miyazaki', 'Druckenmiller', 'Neisler', 'Vroom', 'Berland', 'Rizor', 'Caris', 'Jenison', 'Folmer', 'Si', 'Pulling', 'Houge', 'Snuggs', 'Enis', 'Peeks', 'Stacker', 'Destin', 'Ojo', 'Barraco', 'Childree', 'Ciszewski', 'Dicenzo', 'Gowing', 'Granquist', 'Kapinos', 'Khalili', 'Kienitz', 'Konrath', 'Kosa', 'Schilz', 'Sealock', 'Soucek', 'Stefanko', 'Trow', 'Udy', 'Fricano', 'Hunnewell', 'Sieler', 'Stranahan', 'Thammavongsa', 'Zettel', 'Cutrell', 'Balter', 'Clavel', 'Thibert', 'Ondo', 'Senna', 'Kun', 'Maximo', 'Wares', 'Caldeira', 'Furgerson', 'Franklyn', 'Christophe', 'Bady', 'Blanken', 'Boike', 'Cuen', 'Davidian', 'Glauser', 'Gleave', 'Guzy', 'Halleck', 'Kempfer', 'Kenkel', 'Kloth', 'Knable', 'Mcenery', 'Pizzolato', 'Schryver', 'Seminara', 'Shenoy', 'Somera', 'Stroop', 'Weirick', 'Yatsko', 'Evanko', 'Koegel', 'Lastinger', 'Schrenk', 'Vitullo', 'Holste', 'Susa', 'Pedley', 'Cove', 'Levett', 'Gillyard', 'Boeding', 'Delpozo', 'Denoyer', 'Farese', 'Floro', 'Gavina', 'Hargus', 'Kisiel', 'Konig', 'Krotz', 'Lundblad', 'Masoner', 'Mumper', 'Nolf', 'Sandgren', 'Schussler', 'Shallcross', 'Singhal', 'Standen', 'Teta', 'Vacanti', 'Yokota', 'Borski', 'Filice', 'Frankum', 'Kleinsmith', 'Plauche', 'Spohr', 'Goya', 'Rosensteel', 'Srey', 'Touhey', 'Launer', 'Dome', 'Mossey', 'Mclay', 'Sturgess', 'Demond', 'Buren', 'Millin', 'Riddles', 'Arps', 'Dugar', 'Carradine', 'Brasseur', 'Burchill', 'Champoux', 'Chojnowski', 'Cyphert', 'Devincentis', 'Donze', 'Gaspari', 'Harshberger', 'Merchan', 'Mulgrew', 'Parma', 'Pasqua', 'Pierpoint', 'Rozeboom', 'Rumery', 'Stahle', 'Stierwalt', 'Swander', 'Tiegs', 'Trabucco', 'Withington', 'Frericks', 'Kilman', 'Locastro', 'Samonte', 'Sanko', 'Wisman', 'Flecha', 'Coplan', 'Zafra', 'Art', 'Maxam', 'Cavaness', 'Willi', 'Vanliew', 'Fresh', 'Bauserman', 'Bergemann', 'Buchler', 'Curbow', 'Dimascio', 'Einstein', 'Favila', 'Galeno', 'Granat', 'Halteman', 'Janczak', 'Janicek', 'Jundt', 'Karren', 'Modesitt', 'Provance', 'Reasons', 'Riveron', 'Salts', 'Salvino', 'Sawhney', 'Shallenberger', 'Sirk', 'Tylka', 'Baumler', 'Mcmenamy', 'Territo', 'Thackeray', 'Much', 'Papageorge', 'Rynders', 'Bacigalupo', 'Fulwider', 'Hendricksen', 'Lepre', 'Mangel', 'Dering', 'Soda', 'Bazar', 'Dinning', 'Portera', 'Schatzman', 'Kernodle', 'Bashford', 'Ferrebee', 'Cortner', 'Sanker', 'Livings', 'Jemmott', 'Arzaga', 'Cihak', 'Cobarrubias', 'Coey', 'Coutinho', 'Deneau', 'Droll', 'Hickel', 'Hillmann', 'Kitto', 'Lefebre', 'Lev', 'Mato', 'Mcomber', 'Norlin', 'Renstrom', 'Rhyner', 'Sacca', 'Sangha', 'Sutor', 'Dwire', 'Huyser', 'Kressin', 'Moilanen', 'Picado', 'Schmidlin', 'Albor', 'Zaldana', 'Failor', 'Dubberly', 'Youse', 'Mohiuddin', 'Shiel', 'Loran', 'Hamme', 'Castine', 'Lanum', 'Mcelderry', 'Riggsbee', 'Madkins', 'Abts', 'Bekker', 'Boccio', 'Florin', 'Lienemann', 'Madara', 'Manganello', 'Mcfetridge', 'Medsker', 'Minish', 'Roberg', 'Sajdak', 'Schwall', 'Sedivy', 'Suto', 'Wieberg', 'Catherman', 'Ficker', 'Leckrone', 'Lieder', 'Rodak', 'Tomek', 'Everard', 'Spong', 'Glacken', 'Polka', 'Aley', 'Farro', 'Stanwood', 'Petter', 'Desrosier', 'Kerl', 'Goslee', 'Burston', 'Pennywell', 'Birchard', 'Federer', 'Flicker', 'Frangos', 'Korhonen', 'Kozikowski', 'Kyger', 'Mccoskey', 'Standing', 'Terada', 'Trierweiler', 'Trupiano', 'Urbanowicz', 'Viegas', 'Ybarbo', 'Brinlee', 'Daddona', 'Deisher', 'Schwieger', 'Farless', 'Slaght', 'Jarvie', 'Hunkins', 'Barrack', 'Bisset', 'Bruley', 'Molen', 'Milas', 'Matts', 'Wickware', 'Timbers', 'Minus', 'Kennebrew', 'Boorman', 'Faughn', 'Feight', 'Githens', 'Hazelrigg', 'Hutzell', 'Klang', 'Kogler', 'Lechtenberg', 'Malachowski', 'Orsburn', 'Retz', 'Saban', 'Tak', 'Underdahl', 'Veldman', 'Virnig', 'Wanat', 'Achord', 'Drenth', 'Heibel', 'Hendee', 'Raiche', 'Saunier', 'Wertheim', 'Forand', 'Stathis', 'Raider', 'Cassaro', 'Cly', 'Hagey', 'Moad', 'Rhody', 'Fogler', 'Hellen', 'Sweezy', 'Farid', 'Suddreth', 'Kenneth', 'Pindell', 'Corney', 'Monsanto', 'Laye', 'Lingard', 'Armwood', 'Asif', 'Axley', 'Barkan', 'Bittel', 'Boesen', 'Camilli', 'Champa', 'Dauenhauer', 'Ehrmann', 'Gangl', 'Gatrell', 'Gehret', 'Hankel', 'Kalbach', 'Kessell', 'Khatoon', 'Lanese', 'Manco', 'Masteller', 'Pruner', 'Remmert', 'Valasek', 'Vater', 'Yurick', 'Zavalza', 'Biesecker', 'Frankenberg', 'Hovorka', 'Poissant', 'Neises', 'Moog', 'Hadad', 'Wittenburg', 'Devere', 'Hynds', 'Sparkes', 'Brailey', 'Addo', 'Accetta', 'Altschuler', 'Amescua', 'Corredor', 'Didonna', 'Jencks', 'Keady', 'Keidel', 'Mancebo', 'Matusiak', 'Rakoczy', 'Reamy', 'Sardella', 'Slotnick', 'Fotheringham', 'Gettman', 'Kunzler', 'Manganaro', 'Manygoats', 'Huelskamp', 'Newsham', 'Kohen', 'Cong', 'Goulden', 'Timmers', 'Aderhold', 'Shinall', 'Cowser', 'Uzzle', 'Harps', 'Balster', 'Baringer', 'Bechler', 'Billick', 'Chenard', 'Ditta', 'Fiallos', 'Kampe', 'Kretzschmar', 'Manukyan', 'Mcbreen', 'Mcmanigal', 'Miko', 'Mol', 'Orrego', 'Penalosa', 'Ronco', 'Thum', 'Tupa', 'Vittitow', 'Wierzba', 'Gavitt', 'Hampe', 'Kowalkowski', 'Neuroth', 'Sterkel', 'Herling', 'Boldman', 'Camus', 'Drier', 'Arcia', 'Feagans', 'Thompsen', 'Maka', 'Villalona', 'Bonito', 'Buist', 'Dato', 'Yankey', 'Daluz', 'Hollands', 'Durio', 'Bradly', 'Daffin', 'Chhabra', 'Dettling', 'Dolinger', 'Flenniken', 'Henrichsen', 'Laduca', 'Lashomb', 'Leick', 'Luchini', 'Mcmanaman', 'Minkoff', 'Nobbe', 'Oyster', 'Quintela', 'Robar', 'Sakurai', 'Solak', 'Stolt', 'Taddei', 'Puopolo', 'Schwarzkopf', 'Stango', 'Mcparland', 'Schembri', 'Standefer', 'Dayan', 'Sculley', 'Bhuiyan', 'Delauder', 'Harrity', 'Bree', 'Haste', 'Mcbay', 'Garmany', 'Maison', 'Common', 'Hanton', 'Aigner', 'Aliaga', 'Boeve', 'Cromie', 'Demick', 'Filipowicz', 'Frickey', 'Garrigus', 'Heindl', 'Hilmer', 'Homeyer', 'Lanterman', 'Larch', 'Masci', 'Minami', 'Palmiter', 'Rufener', 'Saal', 'Sarmento', 'Seewald', 'Sestito', 'Somarriba', 'Sparano', 'Vorce', 'Wombles', 'Zarr', 'Antonson', 'Bruzzese', 'Chillemi', 'Dannunzio', 'Hogrefe', 'Mastandrea', 'Moynahan', 'Wangerin', 'Wedeking', 'Ziobro', 'Flegel', 'Axsom', 'Buzby', 'Slowey', 'Cuebas', 'App', 'Dar', 'Robers', 'Elting', 'Demus', 'Finkley', 'Taborn', 'Balogun', 'Binstock', 'Gebel', 'Hinnenkamp', 'Kosta', 'Lamphear', 'Linhares', 'Luzader', 'Mcconathy', 'Motl', 'Mroczkowski', 'Reznicek', 'Rieken', 'Sadlowski', 'Schlink', 'Snuffer', 'Tep', 'Vaske', 'Whisner', 'Amesquita', 'Demler', 'Macdonell', 'Rajala', 'Sandate', 'Kolk', 'Bickerton', 'Dugal', 'Kirtland', 'Neilan', 'Doughman', 'Crye', 'Depena', 'Quire', 'Hafeez', 'Rosse', 'Devon', 'Deboe', 'Kitchings', 'Blackett', 'Acey', 'Mcculler', 'Obie', 'Pleas', 'Lurry', 'Abid', 'Bierlein', 'Boisclair', 'Cabanilla', 'Celano', 'Conrow', 'Deeley', 'Frankhouser', 'Janowiak', 'Jarchow', 'Mcnicol', 'Peixoto', 'Pompeo', 'Reitmeyer', 'Scalera', 'Schnorr', 'Sideris', 'Solesbee', 'Stejskal', 'Strole', 'Tosto', 'Wenke', 'Dombek', 'Gottschall', 'Halbur', 'Kuchler', 'Kuyper', 'Wruck', 'Lorenc', 'Search', 'Chohan', 'Monda', 'Clowes', 'Farson', 'Rhoad', 'Clavin', 'Ramus', 'Hayley', 'Dolley', 'Menton', 'Dejarnett', 'Challenger', 'Branner', 'Shed', 'Dada', 'Flewellen', 'Volcy', 'Allphin', 'Barberena', 'Bencivenga', 'Bienkowski', 'Bossi', 'Corsini', 'Dardis', 'Falconi', 'Fitzhenry', 'Gaglione', 'Handlin', 'Kainz', 'Lorge', 'Nase', 'Pavich', 'Perelman', 'Shanafelt', 'Towsley', 'Weill', 'Zollars', 'Appelt', 'Cannizzo', 'Carrubba', 'Detar', 'Dobrzynski', 'Hashman', 'Maassen', 'Mccullagh', 'Rettinger', 'Roediger', 'Rybolt', 'Savoca', 'Wortmann', 'Boria', 'Mairs', 'Stream', 'Larmore', 'Sama', 'Graden', 'Hollinshead', 'Mandy', 'Gidney', 'Demming', 'Alexandra', 'Caleb', 'Abdullahi', 'Cabal', 'Dikeman', 'Ellenbecker', 'Fosnaugh', 'Funck', 'Heidorn', 'Housden', 'Ilic', 'Illescas', 'Kohlmann', 'Lagman', 'Larez', 'Penafiel', 'Pense', 'Ragonese', 'Reitan', 'Shetterly', 'Trapasso', 'Zank', 'Zecca', 'Grisanti', 'Hemker', 'Mascolo', 'Muhlenkamp', 'Riemann', 'Schindel', 'Uncapher', 'Zelman', 'Koper', 'Byrn', 'Calzadilla', 'Dilly', 'Beiser', 'Maller', 'Bagg', 'Winnick', 'Sillman', 'Bilton', 'Esmond', 'Sconyers', 'Lemma', 'Geralds', 'Lazare', 'Threats', 'Accurso', 'Boitnott', 'Calcagni', 'Chavera', 'Corda', 'Delisio', 'Demont', 'Eichel', 'Faulds', 'Ficek', 'Gappa', 'Graci', 'Hammaker', 'Heino', 'Katcher', 'Keslar', 'Larsh', 'Lashua', 'Leising', 'Magri', 'Manbeck', 'Mcwatters', 'Mixer', 'Moder', 'Noori', 'Pallo', 'Pfingsten', 'Plett', 'Prehn', 'Reyburn', 'Savini', 'Sebek', 'Thang', 'Lemberg', 'Xiang', 'Stiegler', 'Groman', 'Bowlen', 'Grignon', 'Morren', 'Dini', 'Mcaulay', 'Ngu', 'Bethell', 'Warring', 'Belyeu', 'Ramcharan', 'Mcjunkins', 'Alms', 'Ayub', 'Brem', 'Dresen', 'Flori', 'Geesaman', 'Haugan', 'Heppler', 'Hermance', 'Korinek', 'Lograsso', 'Madriaga', 'Milbrath', 'Radwan', 'Riemersma', 'Rivett', 'Roggenbuck', 'Stefanick', 'Storment', 'Ziegenfuss', 'Blackhurst', 'Daquila', 'Maruska', 'Rybka', 'Schweer', 'Tandon', 'Hersman', 'Galster', 'Lemp', 'Hantz', 'Enderson', 'Marchal', 'Conly', 'Bali', 'Canaan', 'Anstead', 'Savary', 'Andy', 'Tisdell', 'Livas', 'Grinage', 'Afanador', 'Alviso', 'Aracena', 'Denio', 'Dentremont', 'Eldreth', 'Gravois', 'Huebsch', 'Kalbfleisch', 'Labree', 'Mones', 'Reitsma', 'Schnapp', 'Seek', 'Shuping', 'Tortorice', 'Viscarra', 'Wahlers', 'Wittner', 'Yake', 'Zamani', 'Carriveau', 'Delage', 'Gargan', 'Goldade', 'Golec', 'Lapage', 'Meinhart', 'Mierzwa', 'Riggenbach', 'Schloesser', 'Sedam', 'Winkels', 'Woldt', 'Beckers', 'Teach', 'Feagan', 'Booe', 'Slates', 'Bears', 'Market', 'Moone', 'Verdun', 'Ibe', 'Jeudy', 'Agudo', 'Brisendine', 'Casillo', 'Chalupa', 'Daversa', 'Fissel', 'Fleites', 'Giarratano', 'Glackin', 'Granzow', 'Hawver', 'Hayashida', 'Hovermale', 'Huaman', 'Jezek', 'Lansdell', 'Loughery', 'Niedzielski', 'Orellano', 'Pebley', 'Rojek', 'Tomic', 'Yellen', 'Zerkle', 'Boettner', 'Decook', 'Digilio', 'Dinsdale', 'Germer', 'Kleve', 'Marcinek', 'Mendicino', 'Pehl', 'Revoir', 'Osmun', 'Bahner', 'Shone', 'Howald', 'Kanode', 'Amari', 'Enix', 'Levene', 'Joffrion', 'Lenis', 'Carmicheal', 'Njoku', 'Coffel', 'Ditter', 'Grupp', 'Kabel', 'Kanzler', 'Konop', 'Lupi', 'Mautz', 'Mccahill', 'Perella', 'Perich', 'Rion', 'Ruvolo', 'Torio', 'Vipperman', 'Bentivegna', 'Formanek', 'Smet', 'Tarquinio', 'Wesche', 'Dearinger', 'Makara', 'Duba', 'Iser', 'Nicklow', 'Wignall', 'Wanger', 'Goda', 'Huckstep', 'Basse', 'Debruhl', 'Hainey', 'Damour', 'Ebbs', 'Armond', 'Ewings', 'Rease', 'Okoye', 'Arentz', 'Baack', 'Bellantoni', 'Buckholz', 'Cirigliano', 'Colletta', 'Dutka', 'Everingham', 'Gilardi', 'Hudelson', 'Klimczak', 'Kneip', 'Papaleo', 'Peregrino', 'Piechowski', 'Prucha', 'Ryburn', 'Scholle', 'Scholtes', 'Socarras', 'Wrightsman', 'Yum', 'Campau', 'Dwinell', 'Haupert', 'Lotspeich', 'Madar', 'Casa', 'Michals', 'Rainier', 'Tenpenny', 'Lakeman', 'Spadoni', 'Cantrelle', 'Mangal', 'Chachere', 'Swoope', 'Carwell', 'Voltaire', 'Durrah', 'Roulhac', 'Aboytes', 'Apuzzo', 'Bielinski', 'Bollenbacher', 'Borjon', 'Croak', 'Dansie', 'Espin', 'Euceda', 'Garone', 'Garthwaite', 'Hata', 'Heu', 'Hogenson', 'Jahner', 'Keesey', 'Kotas', 'Labrake', 'Laitinen', 'Laumann', 'Miske', 'Nickless', 'Onishi', 'Setty', 'Shinnick', 'Takayama', 'Tassinari', 'Tribe', 'Bowdish', 'Friesenhahn', 'Hoffarth', 'Wachowski', 'Gudgel', 'Gautney', 'Matar', 'Ellenberg', 'Inghram', 'Bevil', 'Rasul', 'Niblack', 'Perkin', 'Goring', 'Potier', 'Bachrach', 'Bozza', 'Budz', 'Devens', 'Ditzel', 'Drahos', 'Ducat', 'Fahrner', 'Friedline', 'Geurin', 'Goodenow', 'Greenfeld', 'Grunow', 'Ingber', 'Kashani', 'Kochman', 'Kozub', 'Kukuk', 'Leppo', 'Liew', 'Metheney', 'Molony', 'Montemurro', 'Neiss', 'Postlethwait', 'Quaglia', 'Ruszkowski', 'Shertzer', 'Titone', 'Waldmann', 'Wenninger', 'Wheeland', 'Zorich', 'Mervine', 'Weatherholtz', 'Brotman', 'Malenfant', 'Nong', 'Rogness', 'Dibert', 'Gallahan', 'Gange', 'Chilcott', 'Axt', 'Wiler', 'Jacot', 'Ory', 'Abdon', 'Fenter', 'Bryars', 'Ramroop', 'Jacox', 'Mainer', 'Figures', 'Alig', 'Bires', 'Cassata', 'Cholewa', 'Dispenza', 'Eckmann', 'Gauer', 'Gloor', 'Hattori', 'Huster', 'Kopplin', 'Krugman', 'Lancon', 'Ledin', 'Limb', 'Marentes', 'Minges', 'Monacelli', 'Monteon', 'Naslund', 'Nitsche', 'Rapozo', 'Rimkus', 'Schwerdtfeger', 'Vandenbos', 'Balandran', 'Biehn', 'Briody', 'Hackmann', 'Kalka', 'Keranen', 'Lortie', 'Mannella', 'Shiffler', 'Stempel', 'Takaki', 'Tomassi', 'Reidel', 'Ciprian', 'Penza', 'Vite', 'Cormany', 'Derousse', 'Beus', 'Shurley', 'Courtwright', 'Donna', 'Karney', 'Keats', 'Harron', 'Stjacques', 'Regester', 'Stoke', 'Garron', 'Sulaiman', 'Fusilier', 'Hence', 'Altidor', 'Rollerson', 'Anfinson', 'Badua', 'Balmaceda', 'Bringman', 'Bystrom', 'Goffinet', 'Guindon', 'Hilling', 'Makepeace', 'Mooradian', 'Muzquiz', 'Newcom', 'Perrella', 'Postlewait', 'Raetz', 'Riveros', 'Saephanh', 'Scharer', 'Sheeder', 'Sitar', 'Umlauf', 'Voegeli', 'Yurkovich', 'Chaudhari', 'Chianese', 'Clonch', 'Gasparini', 'Giambalvo', 'Gindlesperger', 'Rauen', 'Riegert', 'Collingsworth', 'Stief', 'Zeisler', 'Kirsten', 'Vessey', 'Scherman', 'Ledwith', 'Goudie', 'Ayre', 'Salome', 'Knoles', 'Munyan', 'Corbet', 'Hagewood', 'Humphry', 'Bernardez', 'Drummonds', 'Lide', 'Veals', 'Andolina', 'Anzaldo', 'Aufiero', 'Bakshi', 'Berdan', 'Birrell', 'Colcord', 'Dutro', 'Eisenhour', 'Falgoust', 'Foertsch', 'Forlenza', 'Harroun', 'Kurtenbach', 'Livesey', 'Luka', 'Manseau', 'Mcdaid', 'Miska', 'Overley', 'Panzica', 'Reish', 'Riolo', 'Roseland', 'Shenberger', 'Splinter', 'Strupp', 'Sturgell', 'Swatzell', 'Totherow', 'Villarroel', 'Wenig', 'Zimny', 'Brunetto', 'Hiester', 'Kasinger', 'Laverde', 'Mihalek', 'Aquila', 'Moreton', 'Collums', 'Ergle', 'Keziah', 'Bourbon', 'Scaff', 'Leamy', 'Sublette', 'Winkley', 'Arlington', 'Cuffe', 'Guity', 'Mcmickle', 'Summerour', 'Baggerly', 'Biltz', 'Calma', 'Dephillips', 'Graffam', 'Holsopple', 'Izumi', 'Joerger', 'Kesselman', 'Kingdon', 'Kinkel', 'Knezevich', 'Liebler', 'Maceda', 'Qualey', 'Robeck', 'Sciarrino', 'Sooy', 'Stahly', 'Stieglitz', 'Strike', 'Unwin', 'Urizar', 'Volmer', 'Winterstein', 'Aronov', 'Czyz', 'Marrazzo', 'Seagren', 'Wiegmann', 'Yearsley', 'Brommer', 'Sterne', 'Armel', 'Kryger', 'Barten', 'Bodwell', 'Hollett', 'Sharron', 'Scobey', 'Croson', 'Gainor', 'Axel', 'Basore', 'Bengel', 'Chiem', 'Desanctis', 'Gillooly', 'Groulx', 'Hulings', 'Koenigsberg', 'Kuchinski', 'Pagaduan', 'Pataky', 'Rietz', 'Robello', 'Schuchman', 'Shek', 'Brattain', 'Gottwald', 'Klapperich', 'Kosky', 'Ruegg', 'Smid', 'Taillon', 'Klonowski', 'Attar', 'Mansoor', 'Daus', 'Falla', 'Guyot', 'Hasten', 'Mcdowall', 'Tugwell', 'Remo', 'Dishmon', 'Leggette', 'Sudler', 'Prescod', 'Benvenuti', 'Bittenbender', 'Burkland', 'Crehan', 'Donjuan', 'Ewbank', 'Fluegel', 'Freiman', 'Fuelling', 'Grabner', 'Gras', 'Horr', 'Jurich', 'Kentner', 'Laski', 'Minero', 'Olivos', 'Porro', 'Purves', 'Smethers', 'Spallone', 'Stangler', 'Gebbia', 'Fowers', 'Gaster', 'Fero', 'Gamon', 'Wiss', 'Strassner', 'Cott', 'Houp', 'Fidel', 'Parisien', 'Daisy', 'Calais', 'Boers', 'Bolle', 'Caccavale', 'Colantonio', 'Columbo', 'Goswami', 'Hakanson', 'Jelley', 'Kahlon', 'Lopezgarcia', 'Marier', 'Mesko', 'Monter', 'Mowell', 'Piech', 'Shortell', 'Slechta', 'Starman', 'Tiemeyer', 'Troutner', 'Vandeveer', 'Voorheis', 'Woodhams', 'Helget', 'Kalk', 'Kiester', 'Lagace', 'Obst', 'Parrack', 'Rennert', 'Rodeheaver', 'Schuermann', 'Warmuth', 'Wisnieski', 'Yahnke', 'Yurek', 'Faver', 'Belleau', 'Moan', 'Remsen', 'Bonano', 'Genson', 'Genis', 'Risen', 'Franze', 'Lauderback', 'Ferns', 'Cooler', 'Mcwilliam', 'Micheals', 'Gotch', 'Teat', 'Bacus', 'Banik', 'Bernhart', 'Doell', 'Francese', 'Gasbarro', 'Gietzen', 'Gossen', 'Haberle', 'Havlicek', 'Henion', 'Kevorkian', 'Liem', 'Loor', 'Moede', 'Mostafa', 'Mottern', 'Naito', 'Nofsinger', 'Omelia', 'Pirog', 'Pirone', 'Plucinski', 'Raghavan', 'Robaina', 'Seliga', 'Stade', 'Steinhilber', 'Wedin', 'Wieman', 'Zemaitis', 'Creps', 'Gumina', 'Inglett', 'Jhaveri', 'Kolasinski', 'Salvesen', 'Vallely', 'Weseman', 'Zysk', 'Gourlay', 'Zanger', 'Delorey', 'Sneider', 'Tacey', 'Valls', 'Ossman', 'Watton', 'Breau', 'Burell', 'Villard', 'Janice', 'Honor', 'Arterberry', 'Sow', 'Cucchiara', 'Diemert', 'Fluty', 'Guadiana', 'Ionescu', 'Kearley', 'Krzyzanowski', 'Lavecchia', 'Lundmark', 'Melichar', 'Mulkern', 'Odonohue', 'Payment', 'Pinnow', 'Popoff', 'Prus', 'Reinoehl', 'Scarlata', 'Schamp', 'Schowalter', 'Scinto', 'Semmler', 'Sheline', 'Sigg', 'Trauger', 'Bleiler', 'Carrino', 'Hauth', 'Kunsman', 'Reicks', 'Rotenberg', 'Soltesz', 'Wascher', 'Mattina', 'Tamblyn', 'Bellanca', 'Heward', 'Seif', 'Agha', 'Gosling', 'Defreese', 'Lyvers', 'Robley', 'Hadi', 'Ledyard', 'Mitchner', 'Berrien', 'Clinkscale', 'Affeldt', 'Aung', 'Azpeitia', 'Boehnlein', 'Cavan', 'Ekdahl', 'Ellyson', 'Fahl', 'Herrig', 'Hulick', 'Ihrke', 'Kaeding', 'Keagy', 'Mehlman', 'Minniear', 'Paniccia', 'Pleva', 'Prestidge', 'Pulford', 'Quattrone', 'Riquelme', 'Rombach', 'Sarwar', 'Sivertsen', 'Sprang', 'Wardrop', 'Anglemyer', 'Bobek', 'Scronce', 'Snethen', 'Stancliff', 'Booton', 'Pinal', 'Weihe', 'Bria', 'Lopresto', 'Awbrey', 'Fogal', 'Ning', 'Hydrick', 'Lumb', 'Pommier', 'Hendy', 'Armon', 'Spenser', 'Beachem', 'Decrescenzo', 'Heaphy', 'Kalata', 'Kastl', 'Kosel', 'Kunert', 'Laatsch', 'Lanpher', 'Malinski', 'Mazzie', 'Neuendorf', 'Salloum', 'Tays', 'Yackel', 'Calvario', 'Feese', 'Feldner', 'Kness', 'Kozuch', 'Magat', 'Pantalone', 'Rilling', 'Teska', 'Fantauzzi', 'Wartman', 'Stansbery', 'Sox', 'Napp', 'Schauf', 'Cumings', 'Coxon', 'Labor', 'Brash', 'Egleston', 'Quintin', 'Oki', 'Date', 'Tuckett', 'Devaux', 'Hewins', 'Abdelrahman', 'Schumpert', 'Dort', 'Limbrick', 'Linwood', 'Delaine', 'Liverpool', 'Azimi', 'Biever', 'Craigo', 'Eschete', 'Fortini', 'Francom', 'Giacomini', 'Girdler', 'Halasz', 'Hillin', 'Inglese', 'Isaza', 'Lewman', 'Ploetz', 'Rampley', 'Reifsteck', 'Rossano', 'Sanagustin', 'Sotak', 'Spainhower', 'Stecklein', 'Stolberg', 'Teschner', 'Thew', 'Blaszczyk', 'Caradonna', 'Cillo', 'Diluzio', 'Hagemeyer', 'Holstrom', 'Jewkes', 'Mcquaide', 'Osterhaus', 'Twaddle', 'Wenck', 'Yakel', 'Zeiner', 'Zulauf', 'Mirabelli', 'Gerold', 'Sherr', 'Ogando', 'Achilles', 'Woodlee', 'Underdown', 'Peirson', 'Abdelaziz', 'Bently', 'Junes', 'Furtick', 'Muckle', 'Freemon', 'Jamar', 'Scriber', 'Michaux', 'Cheatum', 'Hollings', 'Telfair', 'Amadeo', 'Bargar', 'Berchtold', 'Boomhower', 'Camba', 'Compo', 'Dellavecchia', 'Doring', 'Doyel', 'Geck', 'Giovannini', 'Herda', 'Kopko', 'Kuns', 'Maciag', 'Neenan', 'Neglia', 'Nienhuis', 'Niznik', 'Pieczynski', 'Quintos', 'Quirin', 'Ravi', 'Teaster', 'Tipsword', 'Troiani', 'Consuegra', 'Damaso', 'Garavaglia', 'Pischke', 'Prowse', 'Rumore', 'Simcoe', 'Slentz', 'Sposito', 'Sulser', 'Weichel', 'Sandobal', 'Siver', 'Vickerman', 'Sham', 'Gutridge', 'Gracy', 'Weatherington', 'Benett', 'Nottage', 'Myricks', 'Tukes', 'Alcaide', 'Curatolo', 'Dalziel', 'Fandrich', 'Fisette', 'Gianino', 'Grether', 'Hari', 'Ichikawa', 'Lantzy', 'Monteforte', 'Moskovitz', 'Porritt', 'Raz', 'Rodenbeck', 'Ryczek', 'Strehle', 'Vanduzer', 'Voge', 'Wiker', 'Yanik', 'Zangari', 'Cahue', 'Dellapenna', 'Gohr', 'Gurka', 'Imburgia', 'Langenberg', 'Kivi', 'Pikul', 'Sexson', 'Sharrer', 'Aramburo', 'Kadar', 'Casasola', 'Nina', 'Borras', 'Toledano', 'Wykle', 'Naeem', 'Bailer', 'Lalla', 'Booty', 'Turenne', 'Merrit', 'Duffus', 'Hemmingway', 'Asare', 'Ahlborn', 'Arroyave', 'Brandenberger', 'Carolus', 'Coonan', 'Dacunha', 'Dost', 'Filter', 'Freudenberg', 'Grabski', 'Hengel', 'Holohan', 'Kohne', 'Kollmann', 'Levick', 'Lupinacci', 'Meservey', 'Reisdorf', 'Rodabaugh', 'Shimon', 'Soth', 'Spall', 'Tener', 'Thier', 'Welshans', 'Chermak', 'Ciolino', 'Frantzen', 'Grassman', 'Okuda', 'Passantino', 'Schellinger', 'Sneath', 'Bolla', 'Bobe', 'Maves', 'Matey', 'Shafi', 'Rothchild', 'Ker', 'Verrette', 'Thorington', 'Lathers', 'Merriwether', 'Bendall', 'Bercier', 'Botz', 'Claybaugh', 'Creson', 'Dilone', 'Gabehart', 'Gencarelli', 'Ghormley', 'Hacking', 'Haefele', 'Haros', 'Holderby', 'Krotzer', 'Nanda', 'Oltmanns', 'Orndoff', 'Poniatowski', 'Rosol', 'Sheneman', 'Shifrin', 'Smay', 'Swickard', 'Thayne', 'Tripathi', 'Vonbehren', 'Pummill', 'Schnitker', 'Steines', 'Beechler', 'Faron', 'Villari', 'Spickard', 'Levings', 'Polack', 'Standre', 'Castel', 'Louise', 'Janey', 'Lindor', 'Bulthuis', 'Cantrall', 'Cisnero', 'Dangel', 'Deborde', 'Decena', 'Grandon', 'Gritz', 'Heberlein', 'Kestenbaum', 'Kubitz', 'Luers', 'Naiman', 'Ramaswamy', 'Sek', 'Slauson', 'Walsworth', 'Biehler', 'Capano', 'Casstevens', 'Forgette', 'Furnas', 'Gilkison', 'Janoski', 'Jerde', 'Mcclimans', 'Rohlf', 'Vliet', 'Heeney', 'Zapanta', 'Lighthall', 'Shallow', 'Neils', 'Raikes', 'Clarkston', 'Claud', 'Guilbeaux', 'Pennie', 'Arizola', 'Aud', 'Checketts', 'Corvera', 'Easterbrook', 'Gamm', 'Grassel', 'Guarin', 'Hanf', 'Hitsman', 'Lackman', 'Lubitz', 'Lupian', 'Olexa', 'Omori', 'Oscarson', 'Picasso', 'Plewa', 'Schmahl', 'Stolze', 'Todisco', 'Zarzycki', 'Baluyot', 'Cerrito', 'Elenbaas', 'Gavidia', 'Heisner', 'Karpowicz', 'Neidhardt', 'Silkwood', 'Taras', 'Dobias', 'Widen', 'Blandino', 'Fanguy', 'Probus', 'Guilbert', 'Shadowens', 'Keepers', 'Bruin', 'Hitson', 'Crymes', 'Roston', 'Beaubrun', 'Arrambide', 'Betti', 'Brockhaus', 'Bumanglag', 'Cabreja', 'Dicenso', 'Hartlaub', 'Hertlein', 'Lapenna', 'Rathje', 'Rotert', 'Salzberg', 'Siniard', 'Tomsic', 'Wondra', 'Zenger', 'Norrod', 'Opalka', 'Osment', 'Zhan', 'Lemcke', 'Meranda', 'Joles', 'Labay', 'Monserrate', 'Grime', 'Martha', 'Coltrain', 'Vardaman', 'Wragg', 'Frater', 'Offer', 'Elcock', 'Auble', 'Bistline', 'Chorney', 'Colgate', 'Dadamo', 'Deol', 'Discher', 'Ertz', 'Fletchall', 'Galletti', 'Geffre', 'Grall', 'Hoos', 'Iezzi', 'Kawecki', 'Madamba', 'Margolies', 'Mccreedy', 'Okimoto', 'Oum', 'Pangan', 'Pasternack', 'Plazola', 'Prochazka', 'Tewes', 'Tramontana', 'Yauch', 'Zarling', 'Zemanek', 'Altshuler', 'Bartkowski', 'Cuoco', 'Garcialopez', 'Kauzlarich', 'Shishido', 'Zaun', 'Hallin', 'Starliper', 'Belflower', 'Kneece', 'Genet', 'Palmero', 'Willmott', 'Riek', 'Belger', 'Abbitt', 'Statum', 'Jacque', 'Chisley', 'Habersham', 'Berardinelli', 'Bodle', 'Deshaw', 'Ingalsbe', 'Kirchgessner', 'Kuna', 'Pellow', 'Pickler', 'Pistole', 'Rosenstock', 'Salceda', 'Sawatzky', 'Schappell', 'Scholer', 'Shellabarger', 'Spader', 'Swadley', 'Travelstead', 'Varin', 'Villwock', 'Wiemers', 'Bedoy', 'Borowiak', 'Celio', 'Dornfeld', 'Juhnke', 'Livernois', 'Sakaguchi', 'Sandall', 'Sivertson', 'Whitcraft', 'Anda', 'Aprile', 'Kritz', 'Speier', 'Karman', 'Kise', 'Madia', 'Bodo', 'Madole', 'Harl', 'Gach', 'Stalls', 'Holme', 'Lomba', 'Tutton', 'Windon', 'Bines', 'Benoist', 'Cirrincione', 'Coday', 'Delrosso', 'Dlouhy', 'Domenick', 'Edelmann', 'Goos', 'Hamling', 'Huda', 'Hutzel', 'Lanasa', 'Loudenslager', 'Lueras', 'Magnussen', 'Mcferran', 'Nowinski', 'Pikula', 'Precht', 'Quilici', 'Robling', 'Rusche', 'Schettino', 'Scibelli', 'Soderman', 'Spirito', 'Teaford', 'Updegrove', 'Weygandt', 'Zervos', 'Brunker', 'Demuro', 'Eckenrod', 'Emley', 'Franek', 'Frankenberger', 'Longbrake', 'Magallanez', 'Stofko', 'Zenz', 'Galik', 'Crevier', 'Fina', 'Harari', 'Dudney', 'Inga', 'Sowles', 'Folker', 'Cressy', 'Eckerson', 'Gerringer', 'Capito', 'Huxtable', 'Arcement', 'Lansdown', 'Amara', 'Brazill', 'Flye', 'Currington', 'Buffin', 'Desta', 'Cheuvront', 'Fuoco', 'Gerbino', 'Hilyer', 'Hogsed', 'Kubis', 'Lautner', 'Loeber', 'Meyn', 'Mortell', 'Nunziato', 'Opdahl', 'Panebianco', 'Reffner', 'Repsher', 'Riccobono', 'Wik', 'Circle', 'Hovde', 'Keaveney', 'Landsberg', 'Pesavento', 'Bedel', 'Glas', 'Thurn', 'Jaffer', 'Dantin', 'Risi', 'Many', 'Egler', 'Craghead', 'Ann', 'Turnbo', 'Crumby', 'Faciane', 'Brummell', 'Bujak', 'Chaddock', 'Cullop', 'Eberling', 'Ennen', 'Frum', 'Gassert', 'Grothaus', 'Hucke', 'Lanphere', 'Lozon', 'Macadam', 'Mezo', 'Peretti', 'Perlin', 'Prestwich', 'Redmann', 'Ringley', 'Rivenburg', 'Sandow', 'Spreitzer', 'Stachnik', 'Szczesniak', 'Tworek', 'Wogan', 'Zygmunt', 'Austad', 'Chiappone', 'Gelineau', 'Lannom', 'Livezey', 'Monrroy', 'Norem', 'Oetting', 'Ostberg', 'Takeshita', 'Gorsky', 'Allcorn', 'Pemble', 'Josselyn', 'Lanzo', 'Hoare', 'Ticer', 'Netterville', 'Lawes', 'Lenton', 'Garraway', 'Cyprian', 'Alferez', 'Allocco', 'Aslanian', 'Brenna', 'Carachure', 'Devoss', 'Dubas', 'Garrabrant', 'Gerloff', 'Gerritsen', 'Hobaugh', 'Jasek', 'Kulis', 'Lenehan', 'Lodes', 'Mandich', 'Manter', 'Mcfatridge', 'Mikolajczak', 'Netz', 'Perrelli', 'Ribar', 'Sekerak', 'Shingledecker', 'Talamante', 'Taverna', 'Thoresen', 'Throneberry', 'Vanacore', 'Vieau', 'Wermuth', 'Zeidan', 'Counihan', 'Dircks', 'Markovitz', 'Panas', 'Steffel', 'Bergstedt', 'Mohar', 'Sonne', 'Mitsch', 'Aceituno', 'Loker', 'Treen', 'Prunier', 'Amberson', 'Allington', 'Artley', 'Caffery', 'Rhoney', 'Nimmer', 'Ledwell', 'Barkus', 'Fralin', 'Locks', 'Azzara', 'Bartosik', 'Bertelson', 'Birman', 'Brogna', 'Cachola', 'Dennington', 'Enea', 'Gallogly', 'Grafe', 'Jankowiak', 'Kaas', 'Karis', 'Kostick', 'Lentsch', 'Locken', 'Mathys', 'Maxcy', 'Monegro', 'Olano', 'Paske', 'Raible', 'Rowbotham', 'Vanderbeck', 'Vanosdol', 'Wenzler', 'Yglesias', 'Eisenberger', 'Grzelak', 'Hamidi', 'Hottel', 'Markoff', 'Santagata', 'Seefeld', 'Stachowicz', 'Stiehl', 'Staver', 'Raad', 'Sarber', 'Rudge', 'Connelley', 'Danser', 'Chumney', 'Hind', 'Desper', 'Fergusson', 'Ringwood', 'Byles', 'Alyea', 'Benzinger', 'Betzer', 'Brix', 'Chiarella', 'Chiriboga', 'Cicala', 'Cohick', 'Creeden', 'Delligatti', 'Garbacz', 'Grossberg', 'Habecker', 'Inscoe', 'Irias', 'Karlsen', 'Kilts', 'Koetter', 'Laflin', 'Laperle', 'Mizner', 'Navia', 'Nolet', 'Procaccini', 'Pula', 'Scarfo', 'Schmelz', 'Taaffe', 'Troth', 'Vanlaningham', 'Vosberg', 'Whitchurch', 'Benak', 'Hanawalt', 'Lindman', 'Moschetti', 'Rozas', 'Sporleder', 'Stopka', 'Turowski', 'Wahlgren', 'Youngstrom', 'Jabbour', 'Myerson', 'Perlow', 'Cannone', 'Kil', 'Stiverson', 'Cedar', 'Sweeden', 'Pourciau', 'Salina', 'Delmoral', 'Balle', 'Cohea', 'Bute', 'Rayne', 'Cawthorn', 'Conely', 'Cartlidge', 'Powel', 'Nwankwo', 'Centrella', 'Delaura', 'Deprey', 'Dulude', 'Garrod', 'Gassen', 'Greenberger', 'Huneke', 'Kunzman', 'Laakso', 'Oppermann', 'Radich', 'Rozen', 'Schoffstall', 'Swetnam', 'Vitrano', 'Wolber', 'Amirault', 'Przybysz', 'Trzeciak', 'Fontan', 'Mathie', 'Roswell', 'Mcquitty', 'Kye', 'Lucious', 'Chilcutt', 'Difazio', 'Diperna', 'Gashi', 'Goodstein', 'Gruetzmacher', 'Imus', 'Krumholz', 'Lanzetta', 'Leaming', 'Lehigh', 'Lobosco', 'Pardoe', 'Pellicano', 'Purtee', 'Ramanathan', 'Roszkowski', 'Satre', 'Steinborn', 'Stinebaugh', 'Thiesen', 'Tierno', 'Wrisley', 'Yazdani', 'Zwilling', 'Berntson', 'Gisclair', 'Golliher', 'Neumeier', 'Stohl', 'Swartley', 'Wannemacher', 'Wickard', 'Duford', 'Rosello', 'Merfeld', 'Arko', 'Cotney', 'Hai', 'Milley', 'Figueira', 'Willes', 'Helmes', 'Abair', 'Life', 'Izard', 'Duskin', 'Moland', 'Primer', 'Hagos', 'Anyanwu', 'Balasubramanian', 'Bluth', 'Calk', 'Chrzan', 'Constanza', 'Durney', 'Ekholm', 'Erny', 'Ferrando', 'Froberg', 'Gonyer', 'Guagliardo', 'Hreha', 'Kobza', 'Kuruvilla', 'Preziosi', 'Ricciuti', 'Rosiles', 'Schiesser', 'Schmoyer', 'Slota', 'Szeliga', 'Verba', 'Widjaja', 'Wrzesinski', 'Zufall', 'Bumstead', 'Dohrman', 'Dommer', 'Eisenmenger', 'Glogowski', 'Kaufhold', 'Kuiken', 'Ricklefs', 'Sinyard', 'Steenbergen', 'Schweppe', 'Chatwin', 'Dingee', 'Mittleman', 'Menear', 'Milot', 'Riccardo', 'Clemenson', 'Mellow', 'Gabe', 'Rolla', 'Vander', 'Casselberry', 'Hubbart', 'Colvert', 'Billingsly', 'Burgman', 'Cattaneo', 'Duthie', 'Hedtke', 'Heidler', 'Hertenstein', 'Hudler', 'Hustead', 'Ibsen', 'Krutsinger', 'Mauceri', 'Mersereau', 'Morad', 'Rentfro', 'Rumrill', 'Shedlock', 'Sindt', 'Soulier', 'Squitieri', 'Trageser', 'Vatter', 'Vollman', 'Wagster', 'Caselli', 'Dibacco', 'Gick', 'Kachel', 'Lukaszewski', 'Minniti', 'Neeld', 'Zarrella', 'Hedglin', 'Jahan', 'Nathe', 'Starn', 'Kana', 'Bernet', 'Rossa', 'Barro', 'Smylie', 'Bowlds', 'Mccalley', 'Oniel', 'Thaggard', 'Cayson', 'Sinegal', 'Bergfeld', 'Bickmore', 'Boch', 'Bushway', 'Carneiro', 'Cerio', 'Colbath', 'Eade', 'Eavenson', 'Epping', 'Fredricksen', 'Gramer', 'Hassman', 'Hinderer', 'Kantrowitz', 'Kaplowitz', 'Kelner', 'Lecates', 'Lothrop', 'Lupica', 'Masterman', 'Meeler', 'Neumiller', 'Newbauer', 'Noyce', 'Nulty', 'Shanker', 'Taheri', 'Timblin', 'Vitucci', 'Zappone', 'Femia', 'Hejl', 'Helmbrecht', 'Kiesow', 'Maschino', 'Brougher', 'Koff', 'Reffett', 'Langhoff', 'Milman', 'Sidener', 'Levie', 'Chaudry', 'Rattan', 'Finkler', 'Bollen', 'Booz', 'Shipps', 'Theall', 'Scallion', 'Furlough', 'Landfair', 'Albuquerque', 'Beckstrand', 'Colglazier', 'Darcey', 'Fahr', 'Gabert', 'Gertner', 'Gettler', 'Giovannetti', 'Hulvey', 'Juenger', 'Kantola', 'Kemmerling', 'Leclere', 'Liberati', 'Lopezlopez', 'Minerva', 'Redlich', 'Shoun', 'Sickinger', 'Vivier', 'Yerdon', 'Ziomek', 'Dechert', 'Delbene', 'Galassi', 'Rawdon', 'Wesenberg', 'Laurino', 'Grosjean', 'Levay', 'Zike', 'Stukey', 'Loft', 'Kool', 'Hatchel', 'Mainville', 'Salis', 'Greenslade', 'Mantey', 'Spratlin', 'Fayette', 'Marner', 'Rolan', 'Pain', 'Colquhoun', 'Brave', 'Locust', 'Sconiers', 'Bahler', 'Barrero', 'Bartha', 'Basnett', 'Berghoff', 'Bomgardner', 'Brindisi', 'Campoli', 'Carawan', 'Chhim', 'Corro', 'Crissey', 'Deterding', 'Dileonardo', 'Dowis', 'Hagemeier', 'Heichel', 'Kipfer', 'Lemberger', 'Maestri', 'Mauri', 'Nakatani', 'Notestine', 'Polakowski', 'Schlobohm', 'Segel', 'Socci', 'Stieg', 'Thorstad', 'Trausch', 'Whitledge', 'Wilkowski', 'Barkdull', 'Dubeau', 'Ellingsen', 'Hayduk', 'Lauter', 'Lizak', 'Machamer', 'Makarewicz', 'Shuffield', 'Heiserman', 'Sandeen', 'Plough', 'Stemler', 'Bossler', 'Catalina', 'Betley', 'Bonello', 'Pryde', 'Nickey', 'Schanck', 'Single', 'Mulberry', 'Point', 'Danson', 'Flemmings', 'Behnken', 'Catone', 'Cummiskey', 'Currens', 'Gersch', 'Kitamura', 'Meddaugh', 'Montagne', 'Nouri', 'Olejnik', 'Pintar', 'Placke', 'Quinter', 'Rakers', 'Stuteville', 'Sullo', 'Voelz', 'Barabas', 'Estock', 'Hultberg', 'Savitz', 'Treml', 'Vigneault', 'Jezierski', 'Zayed', 'Dewell', 'Yanko', 'Moulin', 'Whalin', 'Elsworth', 'Summit', 'Esty', 'Mahadeo', 'Shular', 'Amedee', 'Bellerose', 'Bendixen', 'Briski', 'Buysse', 'Desa', 'Dobry', 'Dufner', 'Fetterly', 'Finau', 'Gaudioso', 'Giangrande', 'Heuring', 'Kitchel', 'Latulippe', 'Pombo', 'Vancott', 'Woofter', 'Bojarski', 'Cretella', 'Heumann', 'Limpert', 'Mcneff', 'Pluff', 'Tumlinson', 'Widick', 'Yeargan', 'Hanft', 'Novinger', 'Ruddle', 'Wrye', 'Felde', 'Basic', 'Babington', 'Karson', 'Forgy', 'Rendall', 'Icard', 'Jann', 'Ady', 'Therrell', 'Sroufe', 'Maden', 'Ganus', 'Preddy', 'Marberry', 'Fonder', 'Latty', 'Leatherbury', 'Mentor', 'Brissett', 'Mcglory', 'Readus', 'Akau', 'Bellone', 'Berendt', 'Bok', 'Broten', 'Colosi', 'Corio', 'Gilani', 'Huffmaster', 'Kieler', 'Leonor', 'Lips', 'Madron', 'Missey', 'Nabozny', 'Panning', 'Reinwald', 'Ridener', 'Silvio', 'Soder', 'Spieler', 'Vaeth', 'Vincenti', 'Walczyk', 'Washko', 'Wiater', 'Wilen', 'Windish', 'Consalvo', 'Fioravanti', 'Hinners', 'Paduano', 'Ranum', 'Parlato', 'Dweck', 'Matern', 'Cryder', 'Rubert', 'Furgason', 'Virella', 'Boylen', 'Devenport', 'Perrodin', 'Hollingshed', 'Pennix', 'Bogdanski', 'Carretero', 'Cubillos', 'Deponte', 'Forrey', 'Gatchalian', 'Geisen', 'Gombos', 'Hartlage', 'Huddy', 'Kou', 'Matsko', 'Muffley', 'Niazi', 'Nodarse', 'Pawelek', 'Pyper', 'Stahnke', 'Udall', 'Baldyga', 'Chrostowski', 'Frable', 'Handshoe', 'Helderman', 'Lambing', 'Marolf', 'Maynez', 'Bunde', 'Coia', 'Piersol', 'Agne', 'Manwarren', 'Bolter', 'Kirsh', 'Limerick', 'Degray', 'Bossie', 'Frizell', 'Saulters', 'Staple', 'Raspberry', 'Arvie', 'Abler', 'Caya', 'Ceci', 'Dado', 'Dewoody', 'Hartzel', 'Haverstick', 'Kagel', 'Kinnan', 'Krock', 'Kubica', 'Laun', 'Leimbach', 'Mecklenburg', 'Messmore', 'Milich', 'Mor', 'Nachreiner', 'Novelo', 'Poer', 'Vaupel', 'Wery', 'Breisch', 'Cashdollar', 'Corbridge', 'Craker', 'Heiberger', 'Landress', 'Leichty', 'Wiedmann', 'Yankowski', 'Rigel', 'Eary', 'Riggen', 'Nazir', 'Shambo', 'Gingery', 'Guyon', 'Bogie', 'Kar', 'Manifold', 'Lafavor', 'Montas', 'Yeadon', 'Cutchin', 'Burkins', 'Achille', 'Bulls', 'Torry', 'Bartkus', 'Beshara', 'Busalacchi', 'Calkin', 'Corkum', 'Crilley', 'Cuny', 'Delgaudio', 'Devenney', 'Emanuelson', 'Fiel', 'Galanti', 'Gravina', 'Herzing', 'Huckaba', 'Jaquish', 'Kellermann', 'Ketola', 'Klunder', 'Kolinski', 'Kosak', 'Loscalzo', 'Moehle', 'Ressel', 'Skora', 'Steakley', 'Traugott', 'Volden', 'Berrong', 'Kehres', 'Loeffelholz', 'Mensinger', 'Nudo', 'Pargas', 'Endy', 'Corniel', 'Azzam', 'Soard', 'Flud', 'Shuffler', 'Hiley', 'Logwood', 'Ducre', 'Aikey', 'Ardolino', 'Bergstresser', 'Cen', 'Delpriore', 'Divelbiss', 'Fishkin', 'Gaucin', 'Hemmingsen', 'Inscore', 'Kathman', 'Kempen', 'Koble', 'Maestre', 'Mcmonigle', 'Merendino', 'Meske', 'Pietrzyk', 'Renfrew', 'Shevchenko', 'Wied', 'Digeronimo', 'Heberer', 'Himmelberger', 'Nordmeyer', 'Pocius', 'Sigurdson', 'Simic', 'Steury', 'Kealey', 'Sabat', 'Verstraete', 'Patchell', 'Finigan', 'Critz', 'Janelle', 'Cima', 'Zachariah', 'Lebon', 'Kellough', 'Whitehall', 'Jaudon', 'Civil', 'Dokes', 'Slappy', 'Bernacki', 'Castronovo', 'Douty', 'Formoso', 'Handelman', 'Hauswirth', 'Janowicz', 'Klostermann', 'Lochridge', 'Mcdiarmid', 'Schmale', 'Shaddox', 'Sitzes', 'Spaw', 'Urbanik', 'Voller', 'Fujikawa', 'Kimmet', 'Klingel', 'Stoffregen', 'Thammavong', 'Varelas', 'Whobrey', 'Mandella', 'Montuori', 'Safrit', 'Turan', 'Khokhar', 'Sircy', 'Sabio', 'Fill', 'Brandao', 'Avans', 'Mencer', 'Sherley', 'Mccadden', 'Sydney', 'Smack', 'Lastrapes', 'Rowser', 'Moultry', 'Faulcon', 'Arnall', 'Babiak', 'Balsam', 'Bezanson', 'Bocook', 'Bohall', 'Celi', 'Costillo', 'Crom', 'Crusan', 'Dibari', 'Donaho', 'Followell', 'Gaudino', 'Gericke', 'Gori', 'Hurrell', 'Jakubiak', 'Kazemi', 'Koslosky', 'Massoud', 'Niebla', 'Noffke', 'Panjwani', 'Papandrea', 'Patella', 'Plambeck', 'Plichta', 'Prinkey', 'Raptis', 'Ruffini', 'Shoen', 'Temkin', 'Thul', 'Vandall', 'Wyeth', 'Zalenski', 'Consoli', 'Gumbert', 'Milanowski', 'Musolf', 'Naeger', 'Okonski', 'Orrison', 'Solache', 'Verdone', 'Woehler', 'Yonke', 'Risdon', 'Orzech', 'Bergland', 'Collen', 'Bloodsworth', 'Furgeson', 'Moch', 'Callegari', 'Alphonso', 'Ozier', 'Paulding', 'Ringold', 'Yarde', 'Abbett', 'Axford', 'Capwell', 'Datz', 'Delillo', 'Delisa', 'Dicaprio', 'Dimare', 'Faughnan', 'Fehrenbacher', 'Gellert', 'Ging', 'Gladhill', 'Goates', 'Hammerstrom', 'Hilbun', 'Iodice', 'Kadish', 'Kilker', 'Lurvey', 'Maue', 'Michna', 'Parslow', 'Pawelski', 'Quenzer', 'Raboin', 'Sader', 'Sawka', 'Velis', 'Wilczewski', 'Willemsen', 'Zebley', 'Benscoter', 'Denhartog', 'Dolinsky', 'Malacara', 'Mccosh', 'Modugno', 'Tsay', 'Vanvoorst', 'Mincher', 'Nickol', 'Elster', 'Kerce', 'Brittan', 'Quilter', 'Spike', 'Mcintee', 'Boldon', 'Balderama', 'Cauffman', 'Chovanec', 'Difonzo', 'Fagerstrom', 'Galanis', 'Jeziorski', 'Krasowski', 'Lansdale', 'Laven', 'Magallan', 'Mahal', 'Mehrer', 'Naus', 'Peltzer', 'Petraitis', 'Pritz', 'Salway', 'Savich', 'Schmehl', 'Teniente', 'Tuccillo', 'Wahlquist', 'Wetz', 'Brozovich', 'Catalfamo', 'Dioguardi', 'Guzzetta', 'Hanak', 'Lipschutz', 'Sholtis', 'Bleecker', 'Sattar', 'Thivierge', 'Camfield', 'Heslep', 'Tree', 'Calvey', 'Mcgowin', 'Strickling', 'Manderson', 'Dieudonne', 'Bonini', 'Bruinsma', 'Burgueno', 'Cotugno', 'Fukunaga', 'Krog', 'Lacerda', 'Larrivee', 'Lepera', 'Pinilla', 'Reichenberger', 'Rovner', 'Rubiano', 'Saraiva', 'Smolka', 'Soboleski', 'Tallmadge', 'Wigand', 'Wikle', 'Bentsen', 'Bierer', 'Cohenour', 'Dobberstein', 'Holderbaum', 'Polhamus', 'Skousen', 'Theiler', 'Fornes', 'Sisley', 'Zingale', 'Nimtz', 'Prieur', 'Mccaughan', 'Fawaz', 'Hobbins', 'Killingbeck', 'Roads', 'Nicolson', 'Mcculloh', 'Verges', 'Badley', 'Shorten', 'Litaker', 'Laseter', 'Stthomas', 'Mcguffie', 'Depass', 'Flemons', 'Ahola', 'Armacost', 'Bearse', 'Downum', 'Drechsel', 'Farooqi', 'Filosa', 'Francesconi', 'Kielbasa', 'Latella', 'Monarch', 'Ozawa', 'Papadakis', 'Politano', 'Poucher', 'Roussin', 'Safley', 'Schwer', 'Tesoro', 'Tsan', 'Wintersteen', 'Zanni', 'Barlage', 'Brancheau', 'Buening', 'Dahlem', 'Forni', 'Gerety', 'Gutekunst', 'Leamer', 'Liwanag', 'Meech', 'Wigal', 'Bonta', 'Cheetham', 'Crespi', 'Fahs', 'Prow', 'Postle', 'Delacy', 'Dufort', 'Gallery', 'Romey', 'Aime', 'Molock', 'Dixion', 'Carstarphen', 'Appleyard', 'Aylsworth', 'Barberi', 'Contini', 'Cugini', 'Eiben', 'Faso', 'Hartog', 'Jelen', 'Loayza', 'Maugeri', 'Mcgannon', 'Osorno', 'Paratore', 'Sahagian', 'Sarracino', 'Scallon', 'Sypniewski', 'Teters', 'Throgmorton', 'Vogelpohl', 'Walkowski', 'Winchel', 'Niedermeyer', 'Jayroe', 'Montello', 'Neyer', 'Milder', 'Obar', 'Stanis', 'Pro', 'Pin', 'Fatheree', 'Cotterell', 'Reeds', 'Comrie', 'Zamor', 'Gradney', 'Poullard', 'Betker', 'Bondarenko', 'Buchko', 'Eischens', 'Glavan', 'Hannold', 'Heafner', 'Karaffa', 'Krabbe', 'Meinzer', 'Olgin', 'Raeder', 'Sarff', 'Senechal', 'Sette', 'Shovlin', 'Slife', 'Tallarico', 'Trivino', 'Wyszynski', 'Audia', 'Facemire', 'Januszewski', 'Klebba', 'Kovacik', 'Moroni', 'Nieder', 'Schorn', 'Sundby', 'Tehan', 'Trias', 'Kissler', 'Margo', 'Jefcoat', 'Bulow', 'Maire', 'Vizcarrondo', 'Ki', 'Ayuso', 'Mayhan', 'Usman', 'Blincoe', 'Whidby', 'Tinson', 'Calarco', 'Cena', 'Ciccarello', 'Cloe', 'Consolo', 'Davydov', 'Decristofaro', 'Delmundo', 'Dubrow', 'Ellinwood', 'Gehling', 'Halberstadt', 'Hascall', 'Hoeffner', 'Huettl', 'Iafrate', 'Imig', 'Khoo', 'Krausz', 'Kuether', 'Kulla', 'Marchesani', 'Ormonde', 'Platzer', 'Preusser', 'Rebel', 'Reidhead', 'Riehm', 'Robertshaw', 'Runco', 'Sandino', 'Spare', 'Trefethen', 'Tribby', 'Yamazaki', 'Ziesmer', 'Calamari', 'Deyoe', 'Marullo', 'Neidigh', 'Salveson', 'Senesac', 'Ausburn', 'Herner', 'Seagrave', 'Lormand', 'Niblock', 'Somes', 'Naim', 'Murren', 'Callander', 'Glassco', 'Henri', 'Jabbar', 'Bordes', 'Altemose', 'Bagnell', 'Belloso', 'Beougher', 'Birchall', 'Cantara', 'Demetriou', 'Galford', 'Hast', 'Heiny', 'Hieronymus', 'Jehle', 'Khachatryan', 'Kristof', 'Kubas', 'Mano', 'Munar', 'Ogas', 'Riccitelli', 'Sidman', 'Suchocki', 'Tortorello', 'Trombino', 'Vullo', 'Badura', 'Clerkin', 'Criollo', 'Dashnaw', 'Mednick', 'Pickrel', 'Mawson', 'Hockey', 'Alo', 'Frankland', 'Gaby', 'Hoda', 'Marchena', 'Fawbush', 'Cowing', 'Aydelott', 'Dieu', 'Rise', 'Morten', 'Gunby', 'Modeste', 'Balcerzak', 'Cutbirth', 'Dejoseph', 'Desaulniers', 'Dimperio', 'Dubord', 'Gruszka', 'Haske', 'Hehr', 'Kolander', 'Kusiak', 'Lampron', 'Mapel', 'Montie', 'Mumme', 'Naramore', 'Raffel', 'Ruter', 'Sawa', 'Sencion', 'Somogyi', 'Ventola', 'Zabawa', 'Alagna', 'Burmaster', 'Chirco', 'Gjerde', 'Hilgenberg', 'Huntress', 'Kochel', 'Nist', 'Schena', 'Toolan', 'Wurzer', 'Masih', 'Ritts', 'Rousse', 'Buckey', 'Sausedo', 'Dolle', 'Bena', 'Franca', 'Commins', 'Gago', 'Pattie', 'Brener', 'Verley', 'Griffy', 'Heiskell', 'Osley', 'Babula', 'Barbone', 'Berzins', 'Demirjian', 'Dietze', 'Haseltine', 'Heinbaugh', 'Henneke', 'Korba', 'Levitz', 'Lorenzini', 'Mansilla', 'Peffley', 'Poletti', 'Portelli', 'Rottinghaus', 'Scifres', 'Stadel', 'Stettner', 'Swauger', 'Vanwart', 'Vorhies', 'Worst', 'Yadav', 'Yebra', 'Kreiter', 'Mroczek', 'Pennella', 'Stangelo', 'Suchan', 'Weiand', 'Widhalm', 'Wojcicki', 'Gutzman', 'Griffee', 'Konicki', 'Moorehouse', 'Neighbor', 'Butte', 'Cooter', 'Humpherys', 'Morrish', 'Stockhausen', 'Slatter', 'Cheely', 'Yassin', 'Bazil', 'Mcsween', 'Anastos', 'Annunziato', 'Bora', 'Burkitt', 'Cino', 'Codding', 'Criado', 'Firestine', 'Goecke', 'Golda', 'Holloran', 'Homen', 'Laubscher', 'Memmer', 'Navejar', 'Peraino', 'Petrizzo', 'Pflieger', 'Pint', 'Porcello', 'Raffety', 'Riedesel', 'Salado', 'Scaletta', 'Schuring', 'Slaydon', 'Solecki', 'Spomer', 'Waldridge', 'Zawislak', 'Bottone', 'Helgesen', 'Knippel', 'Loutzenhiser', 'Mallinson', 'Malnar', 'Pethtel', 'Sissel', 'Thorstenson', 'Winokur', 'Dittmann', 'Fencl', 'Kernen', 'Gath', 'Hiney', 'Godman', 'Hopton', 'Tinley', 'Wamble', 'Greg', 'Garrette', 'Acoff', 'Ausman', 'Burggraf', 'Colliver', 'Dejulio', 'Fedorchak', 'Finocchio', 'Grasse', 'Harpold', 'Hopman', 'Kilzer', 'Losasso', 'Lovallo', 'Neumayer', 'Purohit', 'Reddinger', 'Scheper', 'Valbuena', 'Wenzl', 'Eilerman', 'Galbo', 'Haydu', 'Vipond', 'Wesselman', 'Yeagle', 'Boutelle', 'Odonnel', 'Morocco', 'Speak', 'Ruckel', 'Cornier', 'Burbidge', 'Esselman', 'Daisey', 'Juran', 'Henard', 'Trench', 'Hurry', 'Estis', 'Allport', 'Beedy', 'Blower', 'Bogacz', 'Caldas', 'Carriero', 'Garand', 'Gonterman', 'Harbeck', 'Husar', 'Lizcano', 'Lonardo', 'Meneely', 'Misiewicz', 'Pagliuca', 'Pember', 'Rybacki', 'Safar', 'Seeberger', 'Siharath', 'Spoerl', 'Tattersall', 'Birchmeier', 'Denunzio', 'Dustman', 'Franchini', 'Gettel', 'Goldrick', 'Goodheart', 'Keshishyan', 'Mcgrogan', 'Newingham', 'Scheier', 'Skorupa', 'Utech', 'Weidenbach', 'Chaloupka', 'Grater', 'Libman', 'Recore', 'Savona', 'Verbeke', 'Lunetta', 'Schlater', 'Staffieri', 'Troll', 'Leyton', 'Peto', 'Trella', 'Follin', 'Morro', 'Woodhall', 'Krauser', 'Salles', 'Brunty', 'Wadford', 'Shaddock', 'Minnie', 'Mountcastle', 'Butter', 'Galentine', 'Longsworth', 'Edgecombe', 'Babino', 'Printup', 'Humbles', 'Vessel', 'Relford', 'Taite', 'Aliberti', 'Brostrom', 'Budlong', 'Bykowski', 'Coursen', 'Darga', 'Doutt', 'Gomberg', 'Greaser', 'Hilde', 'Hirschy', 'Mayorquin', 'Mcartor', 'Mechler', 'Mein', 'Montville', 'Peskin', 'Popiel', 'Ricciardelli', 'Terrana', 'Urton', 'Cardiff', 'Foiles', 'Humann', 'Pokorney', 'Seehafer', 'Sporer', 'Timme', 'Tweten', 'Widrick', 'Harnack', 'Chamlee', 'Lafountaine', 'Lowdermilk', 'Akel', 'Maulden', 'Sloman', 'Odonald', 'Hitchman', 'Pendergraph', 'Klugh', 'Mctier', 'Stargell', 'Hailu', 'Kanu', 'Abrahamian', 'Ackerly', 'Belongia', 'Cudmore', 'Jaskolski', 'Kedzierski', 'Licciardi', 'Lowenberg', 'Meitzler', 'Metzer', 'Mitcheltree', 'Nishioka', 'Pascuzzi', 'Pelphrey', 'Ramones', 'Schuchard', 'Smithee', 'Bignell', 'Blaszak', 'Borello', 'Fiacco', 'Garrelts', 'Guzowski', 'Rychlik', 'Siebers', 'Speziale', 'Zauner', 'Corell', 'Welt', 'Koby', 'Auletta', 'Bursch', 'Luckman', 'Vanhoesen', 'Russian', 'Statton', 'Yahya', 'Boxx', 'Haltiwanger', 'Redhead', 'Mcgregory', 'Baccari', 'Berrey', 'Bogden', 'Braniff', 'Cafarelli', 'Clavette', 'Corallo', 'Dealy', 'Gilger', 'Gitter', 'Goldwasser', 'Hillesheim', 'Hulsizer', 'Jankovic', 'Limburg', 'Lopera', 'Mcaleese', 'Mcclintick', 'Montealegre', 'Mosko', 'Nogle', 'Ordones', 'Papesh', 'Peragine', 'Picco', 'Podraza', 'Ras', 'Rezek', 'Rork', 'Schraufnagel', 'Scipione', 'Terlizzi', 'Vanblarcom', 'Yoshino', 'Beaverson', 'Behunin', 'Isch', 'Janiga', 'Koeppe', 'Laurich', 'Vondrak', 'Walkley', 'Hottenstein', 'Garms', 'Macknight', 'Seagroves', 'Shehata', 'Arons', 'Liley', 'Pressly', 'Cowper', 'Branon', 'Abdella', 'Milord', 'Appenzeller', 'Ardila', 'Belgard', 'Boop', 'Burbano', 'Capitano', 'Carrig', 'Conrey', 'Donica', 'Fineberg', 'Gemberling', 'Harrier', 'Hufnagle', 'Kitner', 'Lessing', 'Manoukian', 'Menk', 'Repetto', 'Rhinesmith', 'Stechschulte', 'Yep', 'Zuhlke', 'Abundiz', 'Buccellato', 'Closser', 'Gielow', 'Nurmi', 'Pelka', 'Piscitello', 'Shoaff', 'Champlain', 'Conran', 'Leidig', 'Carel', 'Zahid', 'Dimitri', 'Sapia', 'Labauve', 'Khalifa', 'Gonsoulin', 'Parrot', 'Propps', 'Dunnaway', 'Cayo', 'Mccleod', 'Bonifas', 'Dirkes', 'Farruggia', 'Gut', 'Heacox', 'Herrejon', 'Ipina', 'Keatley', 'Kowitz', 'Kratky', 'Langseth', 'Nidiffer', 'Plimpton', 'Riesenberg', 'Sulewski', 'Tabar', 'Takara', 'Tomassetti', 'Tweet', 'Weltz', 'Youtsey', 'Franckowiak', 'Geffert', 'Glawe', 'Hillestad', 'Ladewig', 'Luckow', 'Radebaugh', 'Ransbottom', 'Stordahl', 'Weimar', 'Wiegers', 'Jowett', 'Tomb', 'Waitt', 'Beaudreau', 'Notter', 'Rijo', 'Denike', 'Mam', 'Vent', 'Gamage', 'Carre', 'Childrey', 'Heaven', 'Forge', 'Beckom', 'Collick', 'Bovell', 'Hardimon', 'Shells', 'Bolf', 'Canete', 'Cozby', 'Dunlavey', 'Febo', 'Lamke', 'Lant', 'Larned', 'Leiss', 'Lofthouse', 'Marohn', 'Stradling', 'Subramaniam', 'Vitug', 'Ziccardi', 'Akamine', 'Bellissimo', 'Bottini', 'Braund', 'Cavasos', 'Heltsley', 'Landstrom', 'Lisiecki', 'Navejas', 'Sobczyk', 'Trela', 'Yablonski', 'Yocham', 'Fier', 'Laiche', 'Zenor', 'Grew', 'Naval', 'Garratt', 'Sako', 'Zollicoffer', 'Momon', 'Bensman', 'Cirincione', 'Dimitrov', 'Domeier', 'Gaska', 'Gensel', 'Gernert', 'Groot', 'Guarisco', 'Llorente', 'Ludemann', 'Moisan', 'Muzio', 'Neiswender', 'Ottaway', 'Paslay', 'Readinger', 'Skok', 'Spittle', 'Sweany', 'Tanzi', 'Upadhyay', 'Valone', 'Varas', 'Benecke', 'Faulstich', 'Hebda', 'Jobst', 'Schleis', 'Shuart', 'Treinen', 'Fok', 'Dentler', 'Ginty', 'Ronda', 'Tess', 'Scantlin', 'Kham', 'Murin', 'Faubert', 'Ocarroll', 'Maranda', 'Gadsby', 'Mouse', 'Lunden', 'Asquith', 'Batley', 'Bazzle', 'Hooke', 'Macneal', 'Desnoyers', 'Verdier', 'Biglow', 'Leverson', 'Becherer', 'Cecilio', 'Correale', 'Ehinger', 'Erney', 'Fassnacht', 'Humpal', 'Korpela', 'Kratt', 'Kunes', 'Lockyer', 'Macho', 'Manfredo', 'Maturino', 'Raineri', 'Seiger', 'Stant', 'Tecson', 'Tempest', 'Traverse', 'Vonk', 'Wormington', 'Yeske', 'Erichsen', 'Fiorelli', 'Fouty', 'Hodgkiss', 'Lindenbaum', 'Matusik', 'Mazzocco', 'Oldani', 'Ronca', 'Amero', 'Ormand', 'Cagley', 'Teutsch', 'Likins', 'Blurton', 'Lapier', 'Rensch', 'Howitt', 'Kady', 'Broce', 'Gaba', 'Summerson', 'Faure', 'Densley', 'Matkins', 'Boleware', 'Rahming', 'Degrate', 'Broaden', 'Barbian', 'Brancaccio', 'Dimiceli', 'Doukas', 'Fredell', 'Fritchman', 'Gahr', 'Geerdes', 'Heidrick', 'Hernon', 'Ipsen', 'Koci', 'Lato', 'Lyng', 'Montella', 'Petraglia', 'Redlinger', 'Riedlinger', 'Rodier', 'Shenton', 'Smigiel', 'Spanbauer', 'Swetland', 'Sypolt', 'Taubert', 'Wallander', 'Willers', 'Ziller', 'Bielak', 'Careaga', 'Droddy', 'Girardot', 'Kanouse', 'Perusse', 'Schwier', 'Velo', 'Westrum', 'Bouza', 'Calverley', 'Shupert', 'Simi', 'Zieger', 'Nicole', 'Fergeson', 'Guerrant', 'Tongue', 'Amison', 'Darius', 'Banasiak', 'Cocca', 'Dannemiller', 'Frommer', 'Guardia', 'Herl', 'Lippa', 'Nappo', 'Olaya', 'Ozburn', 'Patry', 'Pontiff', 'Rauth', 'Reier', 'Rolfs', 'Sassone', 'Servidio', 'Shough', 'Tencza', 'Ernster', 'Helminiak', 'Mcmanamon', 'Ottens', 'Vinh', 'Bula', 'Elza', 'Serres', 'Holan', 'Wetherill', 'Balis', 'Schexnider', 'Harral', 'Dulany', 'Webley', 'Addleman', 'Antonopoulos', 'Badman', 'Czerwonka', 'Deweerd', 'Donaghey', 'Duszynski', 'Firkus', 'Foell', 'Goyne', 'Hattabaugh', 'Herbel', 'Liebelt', 'Lovera', 'Quenneville', 'Ramic', 'Rissmiller', 'Schlag', 'Selover', 'Seyer', 'Stangeland', 'Stutesman', 'Suminski', 'Sweger', 'Tetlow', 'Thornbury', 'Votava', 'Weberg', 'Canniff', 'Evetts', 'Gutterman', 'Kasparek', 'Krenzer', 'Luckenbaugh', 'Mainwaring', 'Vanderweide', 'Balladares', 'Riesterer', 'Salmen', 'Mirando', 'Rockman', 'Warnes', 'Crispell', 'Corban', 'Chrystal', 'Barlowe', 'Perot', 'Ka', 'Stockett', 'Montfort', 'Reagor', 'Coote', 'Christon', 'Dor', 'Apt', 'Bandel', 'Bibbee', 'Brunkhorst', 'Dexheimer', 'Disharoon', 'Engelstad', 'Glaza', 'Locey', 'Loughney', 'Minotti', 'Posa', 'Renzulli', 'Schlauch', 'Shadix', 'Sloboda', 'Topor', 'Vacha', 'Cerulli', 'Ciaravino', 'Cisek', 'Congrove', 'Domzalski', 'Fleitas', 'Helfand', 'Lehnen', 'Moleski', 'Walski', 'Dazey', 'Mckellips', 'Kanne', 'Deguire', 'Macmurray', 'Marcelli', 'Creach', 'Antrobus', 'Hykes', 'Barriere', 'Avinger', 'Handford', 'Beaufort', 'Abend', 'Bozzi', 'Burnsworth', 'Crosthwaite', 'Eilert', 'Frigon', 'Hanbury', 'Hoilman', 'Isaksen', 'Juday', 'Legarda', 'Mcgourty', 'Mittler', 'Olkowski', 'Pau', 'Pescador', 'Pinkerman', 'Renno', 'Rescigno', 'Salsgiver', 'Schlanger', 'Sobek', 'Stasi', 'Talaga', 'Tish', 'Tropea', 'Umphress', 'Weisheit', 'Bartolini', 'Dassow', 'Ferullo', 'Fetherolf', 'Kimery', 'Kurihara', 'Schneiter', 'Sramek', 'Swier', 'Weinzierl', 'Karrer', 'Hurta', 'Lodico', 'Conkright', 'Sandvik', 'Pash', 'Pinell', 'Dougal', 'Burnet', 'Hoe', 'Rann', 'Curvin', 'Route', 'Outler', 'Corprew', 'Berhe', 'Eleby', 'Acoba', 'Ante', 'Baio', 'Befort', 'Brueck', 'Chevere', 'Ciani', 'Farnes', 'Hamar', 'Hirschhorn', 'Imbrogno', 'Kegg', 'Leever', 'Mesker', 'Nodal', 'Olveda', 'Paletta', 'Pilant', 'Rissman', 'Sebold', 'Siebel', 'Smejkal', 'Stai', 'Vanderkolk', 'Allday', 'Canupp', 'Dieck', 'Hinders', 'Karcz', 'Shomaker', 'Tuinstra', 'Urquizo', 'Wiltgen', 'Withem', 'Yanda', 'Blizard', 'Christenbury', 'Helser', 'Jing', 'Stave', 'Waddill', 'Mairena', 'Rebert', 'Gara', 'Shipes', 'Hartsoe', 'Bargeron', 'Arne', 'Ebrahim', 'Basha', 'Rozar', 'Venter', 'Mounger', 'Marsalis', 'Gildon', 'Antkowiak', 'Brus', 'Cicalese', 'Einspahr', 'Faucheux', 'Frix', 'Gateley', 'Hamberger', 'Holdorf', 'Hollibaugh', 'Junod', 'Keaveny', 'Knechtel', 'Kuffel', 'Mcwhirt', 'Navis', 'Neave', 'Rackers', 'Romagnoli', 'Shawhan', 'Valvano', 'Vina', 'Wielgus', 'Wojtaszek', 'Bartnik', 'Fiebelkorn', 'Gertsch', 'Morgenthaler', 'Nambo', 'Nemmers', 'Nihart', 'Nilges', 'Pulgarin', 'Recktenwald', 'Vandenbrink', 'Wion', 'Cundy', 'Burby', 'Cu', 'Vansciver', 'Herne', 'Doughtie', 'Cowdery', 'Woodle', 'Lafosse', 'Hodgens', 'Mckune', 'Car', 'Callens', 'Corsey', 'Brimage', 'Westry', 'Arismendez', 'Benenati', 'Brine', 'Brookbank', 'Burfield', 'Charnock', 'Copado', 'Demilio', 'Elvira', 'Fantini', 'Ferko', 'Flanagin', 'Gotto', 'Hartsough', 'Heckart', 'Herskowitz', 'Hoene', 'Ishibashi', 'Kysar', 'Leaverton', 'Longfield', 'Mischel', 'Musleh', 'Neyra', 'Obeirne', 'Ostrum', 'Pedretti', 'Pilkerton', 'Plasse', 'Reesor', 'Roznowski', 'Rusinko', 'Sickle', 'Spiteri', 'Stash', 'Syracuse', 'Trachsel', 'Weinand', 'Gruenberg', 'Gutkowski', 'Morella', 'Morneault', 'Slivinski', 'Blessinger', 'Taketa', 'Hussaini', 'Obeid', 'Seebeck', 'Spayd', 'Keasling', 'Famularo', 'Carne', 'Lacosse', 'Morino', 'Gutzmer', 'Spinola', 'Deahl', 'Crumm', 'Folley', 'Lennard', 'Rowson', 'Pickron', 'Union', 'Abraha', 'Yohannes', 'Whidbee', 'Mccaster', 'Batzel', 'Borowy', 'Disanti', 'Druck', 'Elsbury', 'Eschmann', 'Fehn', 'Flesner', 'Grawe', 'Haapala', 'Helvie', 'Hudy', 'Joswick', 'Kilcullen', 'Mabus', 'Marzo', 'Obradovich', 'Oriordan', 'Phy', 'Scarff', 'Schappert', 'Scire', 'Vandevander', 'Weyland', 'Anstey', 'Feeback', 'Komarek', 'Kyllo', 'Manivong', 'Timberman', 'Tinkey', 'Zempel', 'Haselhorst', 'Herberg', 'Laris', 'Morter', 'Fredman', 'Reny', 'Ferrall', 'Silverthorne', 'Shuttlesworth', 'Stigers', 'Koker', 'Mollette', 'Mansel', 'Chrisp', 'Glymph', 'Preyer', 'Worlds', 'Arutyunyan', 'Carrizosa', 'Dambrosia', 'Dantuono', 'Delduca', 'Florencio', 'Garafola', 'Habermehl', 'Hanaway', 'Harmes', 'Heinonen', 'Hellstrom', 'Herzer', 'Klahr', 'Kobler', 'Korner', 'Lancia', 'Leask', 'Ledo', 'Manzanarez', 'Myung', 'Prestigiacomo', 'Serpe', 'Tonche', 'Ventrella', 'Walrod', 'Warga', 'Wasmer', 'Weins', 'Zaccaro', 'Bartus', 'Fiumara', 'Incorvaia', 'Khatun', 'Kisamore', 'Riesen', 'Santry', 'Schmierer', 'Talamo', 'Zaccone', 'Liddick', 'Mcclune', 'Hade', 'Calcutt', 'Gillet', 'Husein', 'Be', 'Lavell', 'Veley', 'Buckholtz', 'Naves', 'Debrosse', 'Palms', 'Lacewell', 'Tates', 'Tekle', 'Golphin', 'Asleson', 'Bartlebaugh', 'Benter', 'Bielefeld', 'Cappetta', 'Hanback', 'Heeg', 'Helf', 'Hibberd', 'Holsworth', 'Kowalchuk', 'Kruczek', 'Lieurance', 'Markwood', 'Muckey', 'Rasey', 'Rautio', 'Salek', 'Schwaller', 'Scibilia', 'Speltz', 'Stopper', 'Struckman', 'Surowiec', 'Texter', 'Venturi', 'Wolfenden', 'Zortman', 'Dehler', 'Gillogly', 'Hoelzel', 'Iida', 'Paparella', 'Petrea', 'Pflaum', 'Spampinato', 'Spaur', 'Umbaugh', 'Cerney', 'Athens', 'Salvas', 'Gardinier', 'Ammar', 'Arns', 'Calvi', 'Palazzola', 'Starlin', 'Quave', 'Rhame', 'Gulliford', 'Nettle', 'Picken', 'Warde', 'Pelissier', 'Mcteer', 'Freeny', 'Tappin', 'Bromell', 'People', 'Carthen', 'Battenfield', 'Bunte', 'Estrin', 'Fitzner', 'Flattery', 'Hlavacek', 'Holecek', 'Jorstad', 'Jurczak', 'Kraszewski', 'Lencioni', 'Mamula', 'Mater', 'Petrakis', 'Safranek', 'Santorelli', 'Speyer', 'Waterworth', 'Worner', 'Antonellis', 'Codispoti', 'Docken', 'Economos', 'Petrilla', 'Puccinelli', 'Rondinelli', 'Leibel', 'Santoya', 'Hader', 'Yeakley', 'Dowse', 'Hattan', 'Lia', 'Emel', 'Corse', 'Danes', 'Rambin', 'Dura', 'Kyne', 'Sanderford', 'Mincer', 'Rawl', 'Staves', 'Mccleave', 'Faniel', 'Abeln', 'Asta', 'Beymer', 'Cresap', 'Cryderman', 'Gutwein', 'Kaszuba', 'Maland', 'Marella', 'Mcmannis', 'Molenaar', 'Olivarria', 'Panfil', 'Pieratt', 'Ramthun', 'Resurreccion', 'Rosander', 'Rostad', 'Sallas', 'Santone', 'Schey', 'Shasteen', 'Spalla', 'Sui', 'Tannous', 'Tarman', 'Trayer', 'Wolman', 'Chausse', 'Debacker', 'Dozal', 'Hach', 'Klossner', 'Kruchten', 'Mahowald', 'Rosenlund', 'Steffenhagen', 'Vanmaanen', 'Wildasin', 'Winiecki', 'Dilauro', 'Wygal', 'Cadmus', 'Smallman', 'Sear', 'Berch', 'Nabor', 'Bro', 'Storr', 'Goynes', 'Chestang', 'Alvillar', 'Arya', 'Aton', 'Bors', 'Brydon', 'Castagno', 'Catena', 'Catterson', 'Chhun', 'Delrossi', 'Garnsey', 'Harbeson', 'Holum', 'Iglesia', 'Kleen', 'Lavallie', 'Lossing', 'Miyata', 'Myszka', 'Peth', 'Pyka', 'Radler', 'Roggenkamp', 'Sarra', 'Schmeltz', 'Schreifels', 'Schrimpf', 'Scrogham', 'Sieminski', 'Singson', 'Stichter', 'Vajda', 'Vilardo', 'Ziff', 'Cegielski', 'Fanara', 'Mefferd', 'Polanski', 'Reining', 'Roggow', 'Sassi', 'Wagenknecht', 'Roadcap', 'Tuman', 'Demesa', 'Surita', 'Armando', 'Macks', 'Megan', 'Angello', 'Bosher', 'Neugent', 'Croslin', 'Bumpas', 'Gladman', 'Demmons', 'Mcnairy', 'Sermons', 'Okonkwo', 'Alvira', 'Barfuss', 'Bersch', 'Bustin', 'Ciriello', 'Cords', 'Cuddeback', 'Debono', 'Delosh', 'Haeger', 'Ida', 'Kneer', 'Koppen', 'Kottwitz', 'Laib', 'Matsushita', 'Mckone', 'Meester', 'Ohashi', 'Pickert', 'Risso', 'Vannice', 'Vargason', 'Vorpahl', 'Gluth', 'Goossens', 'Kloeppel', 'Krolczyk', 'Lequire', 'Nila', 'Savoia', 'Wassmer', 'Bianca', 'Rousselle', 'Wittler', 'Gillean', 'Cervi', 'Fremin', 'Vanzanten', 'Varvel', 'Sween', 'Peron', 'Trudo', 'Welford', 'Scovil', 'Beazer', 'Cohill', 'Estime', 'Alcalde', 'Bugay', 'Bushard', 'Dethloff', 'Gahn', 'Gronau', 'Hogston', 'Kleinfelter', 'Ksiazek', 'Lyness', 'Marak', 'Munafo', 'Noorani', 'Plonski', 'Pontarelli', 'Presas', 'Ringenberg', 'Sabillon', 'Schaut', 'Shankland', 'Sheil', 'Shugrue', 'Soter', 'Stankovich', 'Arrants', 'Boeckmann', 'Boothroyd', 'Dysinger', 'Gersh', 'Monnig', 'Scheiderer', 'Slifka', 'Vilardi', 'Podell', 'Tarallo', 'Goodroe', 'Sardinha', 'Blish', 'Califf', 'Dorion', 'Dougall', 'Hamza', 'Boggus', 'Mccan', 'Branscomb', 'Baatz', 'Bendix', 'Hartstein', 'Hechler', 'Komatsu', 'Kooiman', 'Loghry', 'Lorson', 'Mcgoff', 'Moga', 'Monsees', 'Nigg', 'Pacitti', 'Shiffman', 'Shoupe', 'Snarski', 'Vrba', 'Wilmeth', 'Yurchak', 'Budney', 'Estok', 'Knipple', 'Krzywicki', 'Librizzi', 'Obringer', 'Poliquin', 'Severtson', 'Vecchiarelli', 'Zelazny', 'Eis', 'Wildeman', 'Gatt', 'Gordin', 'Dusenbury', 'Prew', 'Mander', 'Tine', 'Debarr', 'Bann', 'Mcguirt', 'Vanloan', 'Basdeo', 'Kosh', 'Bertha', 'Mcglothen', 'Youman', 'Hallums', 'Mcfield', 'Asano', 'Barbo', 'Braver', 'Bua', 'Buetow', 'Buttke', 'Estela', 'Kauk', 'Kosmicki', 'Kuecker', 'Lahm', 'Lienhard', 'Lombera', 'Menken', 'Niederhauser', 'Norcia', 'Petrelli', 'Phong', 'Piontkowski', 'Prihoda', 'Raffo', 'Sherpa', 'Shinsky', 'Skoczylas', 'Sosinski', 'Sua', 'Sur', 'Thorndike', 'Trease', 'Wessler', 'Witting', 'Ackroyd', 'Bartnick', 'Dziuba', 'Lisko', 'Muradyan', 'Pistilli', 'Riechers', 'Saxman', 'Rodi', 'Venables', 'Holway', 'Vargus', 'Oley', 'Delmont', 'Fuster', 'Wyndham', 'Whittenberg', 'Chustz', 'Swilling', 'Moncure', 'Housey', 'Mckiver', 'Shelvin', 'Aslin', 'Begeman', 'Capek', 'Christlieb', 'Colasanti', 'Daidone', 'Detlefsen', 'Elsass', 'Faus', 'Francke', 'Hensarling', 'Hollmann', 'Isaacks', 'Kocis', 'Kofman', 'Kwiatek', 'Osterkamp', 'Pickar', 'Prellwitz', 'Ramo', 'Steenson', 'Tomasulo', 'Weinreb', 'Wiard', 'Ambs', 'Baglio', 'Frayre', 'Hisaw', 'Justman', 'Morrical', 'Sherfey', 'Gera', 'Ilgenfritz', 'Silos', 'Boge', 'Darocha', 'Hennon', 'Hendriks', 'Purrington', 'Eunice', 'Kirks', 'Barbar', 'Guichard', 'Bonny', 'Lobban', 'Winrow', 'Alavi', 'Binner', 'Canan', 'Ciullo', 'Cyran', 'Doolen', 'Enquist', 'Fatzinger', 'Forsell', 'Harnisch', 'Hirose', 'Lunz', 'Mcbrearty', 'Mcgavin', 'Minkin', 'Ralphs', 'Ruegsegger', 'Shetter', 'Slagter', 'Tyminski', 'Ubben', 'Vanderschaaf', 'Wigfield', 'Zellman', 'Bettenhausen', 'Busker', 'Jabs', 'Mishkin', 'Sturdy', 'Vanstone', 'Tierce', 'Cormican', 'Mazzucco', 'Buenger', 'Gallier', 'Duma', 'Rainbow', 'Herlong', 'Chriswell', 'Litsey', 'Wyke', 'Kissoon', 'Sesler', 'Farve', 'Lalanne', 'Myhand', 'Heggs', 'Andujo', 'Arcilla', 'Bult', 'Caponigro', 'Commerford', 'Ditmars', 'Dressen', 'Eggemeyer', 'Forstner', 'From', 'Heldreth', 'Hevia', 'Leiphart', 'Mastrocola', 'Mcanelly', 'Mccrillis', 'Mellick', 'Mogle', 'Mummey', 'Nishiyama', 'Nordine', 'Picinich', 'Rafiq', 'Savo', 'Selvig', 'Sestak', 'Shafran', 'Smithhart', 'Soltani', 'Stillion', 'Szuch', 'Tigert', 'Trine', 'Un', 'Brest', 'Callari', 'Jaskowiak', 'Maneval', 'Sarchet', 'Szuba', 'Taubman', 'Wandel', 'Blok', 'Pasquarello', 'Sava', 'Diekman', 'Blight', 'Lovgren', 'Clemson', 'Lince', 'Kanady', 'Whipps', 'Coren', 'Coye', 'Patman', 'Souffrant', 'Bloodsaw', 'Amano', 'Cassaday', 'Cutillo', 'Dayrit', 'Deringer', 'Duwe', 'Favazza', 'Fennema', 'Hackleman', 'Harders', 'Imperiale', 'Kano', 'Kingma', 'Meuser', 'Neiger', 'Neitz', 'Nied', 'Prows', 'Riss', 'Rotundo', 'Scheurich', 'Stopa', 'Tonks', 'Veen', 'Volante', 'Maerz', 'Nunnelley', 'Sommerfeldt', 'Spoonemore', 'Wechter', 'Wehrli', 'Ackert', 'Begun', 'Dreyfuss', 'Frezza', 'Mako', 'Nagao', 'Lassetter', 'Linse', 'Raum', 'Graca', 'Enslow', 'Bruff', 'Hodgkin', 'Coone', 'Trippett', 'Tippitt', 'Sumerlin', 'Carelock', 'Whitelow', 'Beightol', 'Cappadona', 'Carrizal', 'Clendaniel', 'Cresci', 'Dietzman', 'Figge', 'Heyde', 'Jarema', 'Kyllonen', 'Laminack', 'Luddy', 'Monical', 'Mula', 'Picotte', 'Sandiego', 'Seki', 'Senner', 'Starkman', 'Stassi', 'Stuckert', 'Wiers', 'Wieting', 'Ziska', 'Ardelean', 'Hulslander', 'Loewenstein', 'Mearns', 'Roese', 'Sweaney', 'Winick', 'Zaring', 'Farry', 'Dulle', 'Gunnerson', 'Duden', 'Arts', 'Lame', 'Mcquerry', 'Smiles', 'Pennick', 'Adderly', 'Becka', 'Bluemel', 'Bocek', 'Bouwens', 'Deren', 'Dewitz', 'Doland', 'Ewton', 'Funnell', 'Gavel', 'Haidar', 'Kalkbrenner', 'Kawashima', 'Kueker', 'Lutze', 'Macareno', 'Nenninger', 'Schone', 'Seever', 'Sexauer', 'Sibilia', 'Sperrazza', 'Vanderhoef', 'Vanoss', 'Werre', 'Wotton', 'Behney', 'Bossart', 'Ellithorpe', 'Eyrich', 'Fosco', 'Fulginiti', 'Grumbles', 'Hoeger', 'Kizziah', 'Kloiber', 'Kudo', 'Majcher', 'Stickels', 'Stoler', 'Umholtz', 'Vasallo', 'Wenker', 'Wittmeyer', 'Telesco', 'Jha', 'Maulding', 'Campton', 'Verble', 'Mclure', 'Bernardin', 'Eison', 'Coffie', 'Ceesay', 'Balakrishnan', 'Barich', 'Bigman', 'Blumenstein', 'Bonafede', 'Cebulski', 'Chesbro', 'Cuaresma', 'Demarino', 'Derienzo', 'Donmoyer', 'Fairall', 'Gelpi', 'Giambra', 'Hasselman', 'Highlander', 'Hunker', 'Iyengar', 'Kulaga', 'Kuznicki', 'Labus', 'Limbert', 'Molchan', 'Neuharth', 'Overgaard', 'Paszkiewicz', 'Plescia', 'Redcay', 'Ritzer', 'Smirnov', 'Valiquette', 'Vannortwick', 'Warstler', 'Yantz', 'Beardall', 'Cimmino', 'Crnkovich', 'Konishi', 'Kosowski', 'Ragen', 'Sebert', 'Valla', 'Venancio', 'Maltez', 'Skehan', 'Abrantes', 'Colfer', 'Beman', 'Wilhelmsen', 'Wilking', 'Rorer', 'Shutes', 'Albany', 'Wearing', 'Assefa', 'Angeloni', 'Bisher', 'Blancett', 'Briel', 'Chiara', 'Clearman', 'Dengel', 'Detert', 'Fadely', 'Flinders', 'Garguilo', 'Goes', 'Hakimian', 'Henehan', 'Homewood', 'Kalla', 'Keirn', 'Kerwood', 'Laflam', 'Lynskey', 'Minhas', 'Mow', 'Olk', 'Ostergaard', 'Palecek', 'Poirrier', 'Raudenbush', 'Schlottman', 'Shatz', 'Sieloff', 'Stikeleather', 'Swavely', 'Tapanes', 'Teehan', 'Wendorff', 'Wollner', 'Bichsel', 'Brandenburger', 'Demattia', 'Eggebrecht', 'Koelzer', 'Landrigan', 'Morsch', 'Pittinger', 'Rewerts', 'Schopf', 'Tetro', 'Westenberger', 'Kieft', 'Overy', 'Cutrona', 'Misa', 'Erich', 'Swapp', 'Welchel', 'Messa', 'Ala', 'Witbeck', 'Mothershead', 'Stofer', 'Mcneice', 'Ayling', 'Zakaria', 'Bu', 'Rauf', 'Richbourg', 'Fristoe', 'Dorch', 'Mcclarin', 'Privott', 'Bonsu', 'Ayson', 'Bifulco', 'Brungard', 'Bub', 'Budzynski', 'Chizmar', 'Coriz', 'Corser', 'Daughdrill', 'Delre', 'Elfers', 'Fabrizi', 'Gunawan', 'Haecker', 'Hammac', 'Handwerk', 'Larcom', 'Liera', 'Littlewood', 'Luikart', 'Pasquarella', 'Radman', 'Ranft', 'Rigas', 'Santin', 'Sorbello', 'Tayag', 'Ureste', 'Weidinger', 'Yerena', 'Aase', 'Galyen', 'Halferty', 'Hindley', 'Kunath', 'Laprairie', 'Oza', 'Stohler', 'Tokarczyk', 'Yusupov', 'Nogueras', 'Jersey', 'Eastes', 'Agron', 'Boso', 'Kender', 'Couse', 'Moreta', 'Larrow', 'Degrace', 'Sonier', 'Tisdel', 'Creque', 'Esther', 'Girtman', 'Seraphin', 'Wesby', 'Kargbo', 'Adjei', 'Angeline', 'Biby', 'Brucks', 'Bucaro', 'Farman', 'Gerdeman', 'Hodsdon', 'Hoying', 'Kasperek', 'Keinath', 'Kidman', 'Kleier', 'Kuban', 'Lacko', 'Latourette', 'Leffert', 'Leonhart', 'Mathern', 'Ploss', 'Poblano', 'Raigoza', 'Santor', 'Schmitzer', 'Sirico', 'Skalsky', 'Spreen', 'Standlee', 'Vonbargen', 'Cederberg', 'Cornforth', 'Dercole', 'Diblasio', 'Fleer', 'Fredlund', 'Gehris', 'Guck', 'Lannen', 'Lurz', 'Mazzaferro', 'Neukam', 'Rookstool', 'Scharrer', 'Sevey', 'Sicairos', 'Skrocki', 'Sneeringer', 'Stefanowicz', 'Zuleger', 'Harmel', 'Sendejo', 'Bearer', 'Shur', 'Weers', 'Norell', 'Plotnick', 'Cecchi', 'Gandia', 'Bastone', 'Tole', 'Tramell', 'Willock', 'Rhome', 'Curington', 'Rapley', 'Hazley', 'Todman', 'Lathon', 'Alperin', 'Axtman', 'Boeke', 'Butson', 'Cestaro', 'Cosgriff', 'Docter', 'Eblin', 'Filsinger', 'Franzone', 'Gareau', 'Garfinkle', 'Gatch', 'Germosen', 'Grzywacz', 'Huesman', 'Kasel', 'Kazan', 'Manalang', 'Marando', 'Marchio', 'Massimino', 'Mcneer', 'Menger', 'Milanese', 'Monrreal', 'Moretto', 'Mulvany', 'Petkus', 'Rehling', 'Rubbo', 'Rudnik', 'Settlemire', 'Treon', 'Yaklin', 'Zittel', 'Betzold', 'Bohlin', 'Churilla', 'Conrath', 'Ozbun', 'Sciuto', 'Stitz', 'Sweigert', 'Tamanaha', 'Wallgren', 'Eplin', 'Ion', 'Liford', 'Orendorff', 'Wootan', 'Carmical', 'Mince', 'Stormes', 'Lantry', 'Sportsman', 'Corron', 'Padia', 'Cunnington', 'Pitta', 'Ori', 'Obara', 'Gaultney', 'Vanlue', 'Emmitt', 'Roddey', 'Payen', 'Elmi', 'Culmer', 'Mealing', 'Allegra', 'Bano', 'Batterman', 'Bickell', 'Dager', 'Drach', 'Duchesneau', 'Erdos', 'Fedorko', 'Fluhr', 'Gassmann', 'Gillig', 'Goedert', 'Golomb', 'Hatler', 'Jalali', 'Joosten', 'Koke', 'Lausch', 'Leisner', 'Mallinger', 'Marsolek', 'Mashek', 'Ognibene', 'Oishi', 'Outman', 'Paganelli', 'Passino', 'Petrak', 'Rosenwald', 'Schroader', 'Stehman', 'Tenuta', 'Todt', 'Tritz', 'Boerman', 'Doeden', 'Etcheverry', 'Grissinger', 'Gruenewald', 'Lijewski', 'Marcom', 'Niebauer', 'Rukavina', 'Sakuma', 'Woehrle', 'Amores', 'Krammes', 'Shontz', 'Bunning', 'Widdowson', 'Blankenburg', 'Goans', 'Longan', 'Aboud', 'Michelli', 'Rivere', 'Colla', 'Lory', 'Lougheed', 'Wadel', 'Chalkley', 'Gaubert', 'Goodlin', 'Bommer', 'Abbs', 'Rashad', 'Malachi', 'Abrigo', 'Akre', 'Antolik', 'Bachner', 'Blegen', 'Cona', 'Diantonio', 'Emde', 'Enrico', 'Follette', 'Hagarty', 'Hanser', 'Hulsman', 'Jelinski', 'Kalisz', 'Kolek', 'Kough', 'Ninneman', 'Offield', 'Perezgarcia', 'Plude', 'Printy', 'Rosengrant', 'Salminen', 'Schamberger', 'Teall', 'Zipfel', 'Bickler', 'Casanas', 'Holtzapple', 'Sachdeva', 'Scharnhorst', 'Schnack', 'Grode', 'Strough', 'Teare', 'Korona', 'Creelman', 'Simper', 'Marett', 'Nadeem', 'Pollet', 'Eduardo', 'Chipley', 'Vanrossum', 'Fabio', 'Colona', 'Whirley', 'Hider', 'Plaskett', 'Trabue', 'Gibert', 'Cabiness', 'Loyal', 'Rayson', 'Aloia', 'Aukerman', 'Broxterman', 'Cada', 'Catalanotto', 'Condos', 'Corriher', 'Eliopoulos', 'Furia', 'Girolamo', 'Haese', 'Israelson', 'Jaworowski', 'Jirik', 'Kalmar', 'Leipold', 'Lemmo', 'Loja', 'Loughmiller', 'Matelski', 'Mcrorie', 'Moeckel', 'Naill', 'Raczka', 'Rathgeber', 'Shamoun', 'Shannahan', 'Simler', 'Stamer', 'Stonehocker', 'Twersky', 'Voeltz', 'Willets', 'Wolgamott', 'Yamin', 'Acri', 'Dalgleish', 'Ehrenreich', 'Huish', 'Huxley', 'Pinkstaff', 'Rincones', 'Saric', 'Shreiner', 'Stitely', 'Tippets', 'Vanamburg', 'Zbikowski', 'Sharrett', 'Suther', 'Renta', 'Balles', 'Florentine', 'Chrisley', 'Offner', 'Matheus', 'Akens', 'Dugue', 'Rigaud', 'Mohamud', 'Magloire', 'Stigger', 'Andrist', 'Chaudoin', 'Clos', 'Cragin', 'Dinius', 'Duignan', 'Elk', 'Frenz', 'Frogge', 'Giammarino', 'Hackl', 'Jaeckel', 'Knieriem', 'Lajara', 'Lisak', 'Luxton', 'Merriott', 'Montini', 'Olender', 'Orebaugh', 'Orren', 'Osika', 'Sciascia', 'Selvaggio', 'Stoneback', 'Sweis', 'Torosyan', 'Trupp', 'Wardrip', 'Wigle', 'Beissel', 'Brakke', 'Carosella', 'Dobek', 'Eidem', 'Homolka', 'Kemery', 'Kinderman', 'Palla', 'Puccini', 'Szarek', 'Vandehei', 'Arca', 'Jou', 'Needs', 'Habermann', 'Hyle', 'Jagoda', 'Smigielski', 'Guttierrez', 'Awwad', 'Maccormack', 'Bassin', 'Achee', 'Demark', 'Jardon', 'Kelsoe', 'Olear', 'Comacho', 'Rosetta', 'Peddie', 'Delsol', 'Nwachukwu', 'Bagdasarian', 'Boehringer', 'Bunke', 'Burkhammer', 'Delahoya', 'Dietzen', 'Ditmer', 'Duchaine', 'Felske', 'Gumpert', 'Hansson', 'Hedeen', 'Jalil', 'Kalal', 'Kanan', 'Kaska', 'Kaufer', 'Knoff', 'Kornblum', 'Lanzi', 'Obenchain', 'Piatkowski', 'Prugh', 'Rima', 'Shadduck', 'Sodergren', 'Spitzley', 'Tauzin', 'Weigelt', 'Baldassarre', 'Biglin', 'Fuhriman', 'Gaumond', 'Ledvina', 'Meckler', 'Minteer', 'Nesser', 'Riederer', 'Ruelle', 'Turchi', 'Alberg', 'Vanderlip', 'Halder', 'Hop', 'Larmon', 'Bonfield', 'Ketch', 'Mannis', 'Mcallen', 'Alfonzo', 'Sampey', 'Guillet', 'Madaris', 'Lisby', 'Crowner', 'Frager', 'Coar', 'Crewe', 'Levier', 'Ligons', 'Abello', 'Brinsfield', 'Buccieri', 'Cantera', 'Cieslinski', 'Cragle', 'Flater', 'Grunert', 'Higinbotham', 'Janish', 'Kuennen', 'Lanners', 'Lesiak', 'Litvin', 'Madueno', 'Maffia', 'Manetta', 'Marschke', 'Mourer', 'Nordahl', 'Nordan', 'Pankowski', 'Petron', 'Qualley', 'Recht', 'Rosenbach', 'Ruttenberg', 'Saam', 'Savarino', 'Solana', 'Stumpff', 'Tsukamoto', 'Vanlanen', 'Wainer', 'Kasza', 'Kuehler', 'Landgren', 'Omahony', 'Paullin', 'Ramales', 'Schmelzle', 'Schnakenberg', 'Touma', 'Urgiles', 'Vorndran', 'Corne', 'Higman', 'Dutil', 'Reef', 'Racanelli', 'Gladwin', 'Jaspers', 'Crutchley', 'Homme', 'Hughbanks', 'Crismon', 'Burdin', 'Dise', 'Enzor', 'Hally', 'Mccone', 'Mckell', 'Belo', 'Moat', 'Ijames', 'Bussie', 'Papillion', 'Pratcher', 'Baranek', 'Bidlack', 'Boyadjian', 'Chern', 'Conahan', 'Dimuzio', 'Erker', 'Fregeau', 'Gelsinger', 'Gonzalo', 'Heo', 'Hoog', 'Jovanovich', 'Kaschak', 'Kasik', 'Katich', 'Laible', 'Mastel', 'Muellner', 'Pingleton', 'Rexroth', 'Schmitter', 'Stick', 'Strollo', 'Traficante', 'Veteto', 'Wampole', 'Winings', 'Amalfitano', 'Amiot', 'Camaj', 'Cuartas', 'Drotar', 'Eatherton', 'Fioretti', 'Fudala', 'Gehrman', 'Gittleman', 'Heppe', 'Maffucci', 'Tammen', 'Chovan', 'Ginley', 'Stipes', 'Antigua', 'Ironside', 'Kuroda', 'Lebar', 'Laske', 'Salay', 'Gisi', 'Mccormic', 'Veron', 'Robbin', 'Morain', 'Mayden', 'Vanputten', 'Triplet', 'Ravenel', 'Moragne', 'Bowdry', 'Agundez', 'Allinson', 'Bosko', 'Buehrle', 'Devey', 'Gasiorowski', 'Goettel', 'Halleran', 'Innocenti', 'Orser', 'Scarpati', 'Scherff', 'Schlott', 'Skilling', 'Speedy', 'Staal', 'Szafran', 'Szczech', 'Szczepanik', 'Venturella', 'Vert', 'Vogelgesang', 'Vollbrecht', 'Wiehe', 'Achterberg', 'Fadness', 'Groene', 'Halbrooks', 'Leavenworth', 'Pruski', 'Redifer', 'Schmiesing', 'Stanforth', 'Stepanski', 'Ziel', 'Hefter', 'Urman', 'Muela', 'Simpler', 'Elick', 'Shalabi', 'Cooner', 'Ferriera', 'Templer', 'Prashad', 'Gorum', 'Wheller', 'Spratling', 'Gutter', 'Eke', 'Rias', 'Belcourt', 'Bernards', 'Camburn', 'Cerqueira', 'Conkel', 'Deist', 'Derobertis', 'Desio', 'Eimer', 'Fayad', 'Frommelt', 'Guariglia', 'Laba', 'Labine', 'Lanius', 'Loconte', 'Nop', 'Omary', 'Penninger', 'Pentland', 'Pinkus', 'Richoux', 'Sturrock', 'Theil', 'Vanvranken', 'Bartoszek', 'Bruski', 'Engelken', 'Kranich', 'Mrazek', 'Muralles', 'Pienta', 'Salido', 'Sridhar', 'Turkington', 'Vellucci', 'Verhage', 'Derenzo', 'Lucker', 'Wands', 'Parrow', 'Branyon', 'Houff', 'Bossier', 'Reels', 'Rockmore', 'Altmeyer', 'Anacker', 'Antoniou', 'Berlinger', 'Busser', 'Caracci', 'Caseres', 'Corcino', 'Demint', 'Dhanani', 'Erekson', 'Farinacci', 'Ganesan', 'Gornick', 'Gresser', 'Kremers', 'Kreuter', 'Lesieur', 'Linarez', 'Mccrystal', 'Morang', 'Pucillo', 'Spicuzza', 'Tranchina', 'Tullar', 'Vantilburg', 'Yeck', 'Zandstra', 'Zeleny', 'Bearss', 'Burgner', 'Delich', 'Fetsch', 'Grom', 'Kreisel', 'Laprise', 'Legarreta', 'Musacchio', 'Rembold', 'Sjoblom', 'Skalicky', 'Sokolov', 'Tuminello', 'Vanskiver', 'Zidek', 'Severa', 'Stables', 'Guffy', 'Lebeck', 'Barradas', 'Chanley', 'Dayal', 'Villafranco', 'Droke', 'Popwell', 'Renier', 'Bolten', 'Mille', 'Swagerty', 'Grismore', 'Brantly', 'Divens', 'Ottey', 'Hagger', 'Advincula', 'Boschee', 'Buckbee', 'Carlan', 'Casciato', 'Cregar', 'Fehring', 'Ianniello', 'Interrante', 'Juedes', 'Kosier', 'Lizaola', 'Lorenzetti', 'Mccauslin', 'Older', 'Osuch', 'Ramstad', 'Sare', 'Stavinoha', 'Taborda', 'Warmoth', 'Weissmann', 'Winograd', 'Woeste', 'Zywicki', 'Blalack', 'Chavoya', 'Clickner', 'Daigrepont', 'Dissinger', 'Kovalik', 'Lemler', 'Shortall', 'Tucholski', 'Vanmetre', 'Zetino', 'Niezgoda', 'Recupero', 'Booms', 'Ramsburg', 'Berka', 'Mininger', 'Tamer', 'Baka', 'Jago', 'Bucks', 'Laude', 'Andrepont', 'Gair', 'Hayer', 'Kitching', 'Towson', 'Slappey', 'Syms', 'Derico', 'Badie', 'Kenon', 'Goffney', 'Amigon', 'Belsito', 'Bergamo', 'Caputi', 'Delpilar', 'Entsminger', 'Gehres', 'Geimer', 'Hada', 'Krolak', 'Kruer', 'Malaney', 'Mancias', 'Misiaszek', 'Pring', 'Salonga', 'Schaefers', 'Schmied', 'Schwertfeger', 'Scialabba', 'Stemmer', 'Stifter', 'Suon', 'Szczygiel', 'Weisse', 'Yackley', 'Decasas', 'Donado', 'Drenning', 'Eppich', 'Kertesz', 'Mihal', 'Mochizuki', 'Schiebel', 'Schlageter', 'Scruton', 'Weckerly', 'Wemhoff', 'Wernette', 'Zietz', 'Iwanicki', 'Ara', 'Barson', 'Resor', 'Rampy', 'Iskander', 'Oharra', 'Kope', 'Soli', 'Bodkins', 'Bussa', 'Maletta', 'Clemen', 'Vaneaton', 'Berkel', 'Salvage', 'Gilchrest', 'Whitter', 'Bruster', 'Mccowin', 'Gullatt', 'Cherubin', 'Flamer', 'Gueye', 'Angerer', 'Baray', 'Barreca', 'Bresson', 'Brougham', 'Buscaglia', 'Candee', 'Decelles', 'Durflinger', 'Dusenbery', 'Enomoto', 'Galliano', 'Klooster', 'Lowrimore', 'Manda', 'Morace', 'Raisanen', 'Ravenscraft', 'Rutman', 'Schmieg', 'Schorsch', 'Selim', 'Stanchfield', 'Stankowski', 'Tolosa', 'Uyeno', 'Vancleef', 'Kamdar', 'Kazlauskas', 'Kwasnik', 'Pivonka', 'Shrode', 'Sellinger', 'Deliz', 'Longerbeam', 'Schobert', 'Shader', 'Collister', 'Curtright', 'Franc', 'Wakely', 'Duree', 'Laban', 'Gascoigne', 'Noy', 'Hulon', 'Michele', 'Crowden', 'Dolton', 'Ryner', 'Gene', 'Tetterton', 'Laffitte', 'Laidler', 'Hoston', 'Akter', 'Biebel', 'Bohnenkamp', 'Bottger', 'Brecheisen', 'Bumbarger', 'Burgert', 'Burtnett', 'Coffing', 'Corigliano', 'Dault', 'Dettinger', 'Fenech', 'Golaszewski', 'Hernando', 'Hoppel', 'Kadrmas', 'Khim', 'Labrado', 'Leh', 'Michiels', 'Milkovich', 'Mosel', 'Nestle', 'Nunan', 'Palomarez', 'Peretz', 'Perno', 'Popowski', 'Pottebaum', 'Rallis', 'Rase', 'Rotramel', 'Sokolik', 'Sparlin', 'Zipf', 'Abruzzese', 'Branin', 'Cheslock', 'Chimenti', 'Czechowski', 'Diveley', 'Eisenbeis', 'Eisenhut', 'Friedt', 'Gehlhausen', 'Kamphaus', 'Mctiernan', 'Monnett', 'Schue', 'Steffensmeier', 'Gens', 'Schlotterbeck', 'Ask', 'Leser', 'Renville', 'Wisenbaker', 'Kellow', 'Mounsey', 'Dupin', 'Causer', 'Yapp', 'Stmary', 'Bowditch', 'Nickolson', 'Molla', 'Larke', 'Kamau', 'Cardinali', 'Deely', 'Deep', 'Dietel', 'Ferraris', 'Fons', 'Hahm', 'Huy', 'Imber', 'Leichliter', 'Longanecker', 'Lordi', 'Ludewig', 'Maiolo', 'Mckern', 'Meyering', 'Muhl', 'Nylen', 'Ohlendorf', 'Palmgren', 'Raffield', 'Reusser', 'Revette', 'Ridolfi', 'Rosemeyer', 'Seber', 'Silberberg', 'Sitzmann', 'Tayman', 'Tygart', 'Vertz', 'Volkmer', 'Bellemare', 'Benanti', 'Bialecki', 'Biber', 'Dipierro', 'Dornbush', 'Eichhorst', 'Messana', 'Neisen', 'Ottoson', 'Salmonson', 'Turcott', 'Vlachos', 'Wojdyla', 'Dagg', 'Hernan', 'Mannes', 'Fent', 'Tappen', 'Hyers', 'Gery', 'Deam', 'Channing', 'Gesner', 'Swaringen', 'Lakins', 'Cogbill', 'Allsbrook', 'Kennemore', 'Sumrell', 'Luma', 'Rookard', 'Shakoor', 'Philbert', 'Maragh', 'Wordlaw', 'Ofori', 'Arseneault', 'Arslanian', 'Aydin', 'Balthaser', 'Bensch', 'Boord', 'Botting', 'Brummet', 'Cassiday', 'Chubbuck', 'Crance', 'Dobis', 'Dymek', 'Kakar', 'Kipnis', 'Kooi', 'Kovack', 'Malzahn', 'Melendes', 'Micucci', 'Miklas', 'Molander', 'Nungesser', 'Razavi', 'Reppond', 'Reznick', 'Rosten', 'Schwegler', 'Sielaff', 'Sincavage', 'Soave', 'Socorro', 'Tausch', 'Tracz', 'Vey', 'Weltman', 'Wittich', 'Emswiler', 'Etzkorn', 'Kuchenbecker', 'Lampi', 'Pfahler', 'Thronson', 'Trefz', 'Pont', 'Hendrie', 'Russon', 'Coleson', 'Gregori', 'Herzfeld', 'Tamas', 'Oslin', 'Warrell', 'Basher', 'Elizabeth', 'Nickolas', 'Prigmore', 'Okray', 'Cannedy', 'Mercy', 'Daigre', 'Leggins', 'Savannah', 'Russaw', 'Opoku', 'Angier', 'Behrle', 'Budny', 'Cislo', 'Covalt', 'Dershem', 'Devincent', 'Dhar', 'Dombrosky', 'Dragovich', 'Drobny', 'Fess', 'Genthner', 'Gierhart', 'Hadzic', 'Hehir', 'Henle', 'Heyd', 'Hudlow', 'Janko', 'Kapral', 'Kietzman', 'Malburg', 'Maret', 'Mcever', 'Sann', 'Scheidel', 'Schultheiss', 'Sedita', 'Sigl', 'Starace', 'Stoklosa', 'Tainter', 'Tamburrino', 'Vankleeck', 'Vannucci', 'Wernecke', 'Widmayer', 'Agresti', 'Boshell', 'Dartt', 'Dobkin', 'Effertz', 'Gaydosh', 'Hocevar', 'Kluger', 'Mcguffee', 'Pekala', 'Tuchman', 'Keylon', 'Pletz', 'Germond', 'Keedy', 'Meir', 'Tromp', 'Solly', 'Baerga', 'Jawad', 'Chanda', 'Scobie', 'Snowball', 'Pricer', 'Graper', 'Bally', 'Mcfarlan', 'Duncombe', 'Mccory', 'Costen', 'Poplar', 'Denkins', 'Padmore', 'Waithe', 'Adduci', 'Aldaba', 'Berhow', 'Cocuzza', 'Dubroc', 'Earnheart', 'Eickholt', 'Gutzwiller', 'Heavin', 'Himebaugh', 'Jakubik', 'Kiang', 'Klusman', 'Knueppel', 'Neddo', 'Oakey', 'Rachlin', 'Spegal', 'Spizzirri', 'Stavola', 'Zika', 'Beverlin', 'Boehle', 'Caltagirone', 'Chernick', 'Ciaccia', 'Courchaine', 'Covault', 'Crihfield', 'Fojtik', 'Gronski', 'Huwe', 'Ostrovsky', 'Quraishi', 'Rauber', 'Scalici', 'Schuetze', 'Advani', 'Galer', 'Rog', 'Husson', 'Karpen', 'Ess', 'Henman', 'Slatten', 'Bango', 'Barkin', 'Vessell', 'Mayson', 'Kittles', 'Quince', 'Beardmore', 'Breceda', 'Carmony', 'Ciliberto', 'Cotroneo', 'Dimitroff', 'Granahan', 'Haacke', 'Huska', 'Jankiewicz', 'Klipp', 'Kostic', 'Langarica', 'Lanphier', 'Maran', 'Marmion', 'Mclinden', 'Mcpeake', 'Minkel', 'Nicolo', 'Quihuis', 'Siemsen', 'Somero', 'Spuhler', 'Spychalski', 'Stary', 'Stitzer', 'Stucke', 'Tango', 'Ticas', 'Vivero', 'Campen', 'Fei', 'Ganas', 'Klipfel', 'Vodicka', 'Zajdel', 'Ulin', 'Bodey', 'Moral', 'Fellenz', 'Charo', 'Cliver', 'Clasby', 'Neeson', 'Durell', 'Hew', 'Mcgray', 'Breaker', 'Haslem', 'Verser', 'Broner', 'Mannings', 'Darensbourg', 'Petithomme', 'Akbari', 'Amdahl', 'Boeger', 'Bougie', 'Buffo', 'Cisar', 'Deleonardis', 'Diffee', 'Dillen', 'Dingley', 'Dugo', 'Fedora', 'Habibi', 'Hartland', 'Hennelly', 'Kachmar', 'Louth', 'Mughal', 'Muska', 'Narang', 'Pontillo', 'Roel', 'Shehorn', 'Smick', 'Soliven', 'Starzyk', 'Swaminathan', 'Teagarden', 'Thune', 'Vokes', 'Volkov', 'Weckesser', 'Wigen', 'Donaghue', 'Ederer', 'Glaus', 'Gwozdz', 'Kimler', 'Kocak', 'Lagerquist', 'Pellecchia', 'Ruminski', 'Scholler', 'Steurer', 'Tlatelpa', 'Zegarra', 'Janssens', 'Jass', 'Ciriaco', 'Kessner', 'Georg', 'Harre', 'Brannam', 'Beel', 'Kaine', 'Roher', 'Evora', 'Rittman', 'Sion', 'Millon', 'Morre', 'Bouler', 'Seegars', 'Jenifer', 'Bernd', 'Chahine', 'Crisanto', 'Desautel', 'Dirosa', 'Fehringer', 'Fukui', 'Hetz', 'Hueber', 'Ivanova', 'Klecker', 'Kulzer', 'Machi', 'Menn', 'Mudry', 'Niro', 'Nyenhuis', 'Pressel', 'Prusinski', 'Roske', 'Shaefer', 'Stear', 'Stumpo', 'Teas', 'Tolsma', 'Troha', 'Vanveen', 'Waltermire', 'Zaretsky', 'Zingg', 'Arntson', 'Dizdarevic', 'Kassebaum', 'Natzke', 'Passanisi', 'Rodebaugh', 'Skonieczny', 'Vanhoozer', 'Wiechert', 'Golonka', 'Roycroft', 'Robl', 'Lisboa', 'Brandis', 'Symmes', 'Nou', 'Pawson', 'Comins', 'Ranker', 'Silman', 'Lonas', 'Goldthwaite', 'Aries', 'Leckey', 'Conolly', 'Ezelle', 'Degrasse', 'Tarte', 'Bonaventure', 'Rambeau', 'Alsobrooks', 'Blumenberg', 'Snape', 'Delane', 'Sarr', 'Rankine', 'Mcclarty', 'Skipwith', 'Mapps', 'Poke', 'Ahlman', 'Brunkow', 'Crissinger', 'Critcher', 'Cronce', 'Earney', 'Fischler', 'Franta', 'Haist', 'Hirschfield', 'Jacobe', 'Karraker', 'Kronenberger', 'Layland', 'Liscano', 'Lohrman', 'Luy', 'Macik', 'Makinen', 'Mis', 'Musarra', 'Orbe', 'Ortloff', 'Potempa', 'Presta', 'Rebollo', 'Rudden', 'Schab', 'Settlemyre', 'Shaban', 'Shiraishi', 'Shrake', 'Suba', 'Tornquist', 'Treglia', 'Vanschaick', 'Velten', 'Waln', 'Addeo', 'Dacquisto', 'Fenno', 'Gilberg', 'Halberstam', 'Holck', 'Landgrebe', 'Lipa', 'Luehrs', 'Mkrtchyan', 'Proscia', 'Schucker', 'Selner', 'Sinisi', 'Wandersee', 'Weigold', 'Winterrowd', 'Stoutenburg', 'Medinger', 'Bittman', 'Gerges', 'Langelier', 'Berdine', 'Hartshorne', 'Matters', 'Lavere', 'Delauter', 'Caillouet', 'Elford', 'Derrington', 'Mollison', 'Erskin', 'Doswell', 'Loadholt', 'Stepter', 'Contee', 'Adwell', 'Banez', 'Birchler', 'Bodman', 'Bransfield', 'Butzer', 'Cenci', 'Fabro', 'Fila', 'Follman', 'Geoffrion', 'Hardegree', 'Klindt', 'Kuzniar', 'Lapenta', 'Lasorsa', 'Lykens', 'Madariaga', 'Mcginnity', 'Mezger', 'Milleson', 'Nisly', 'Palau', 'Salz', 'Sholly', 'Spartz', 'Spevak', 'Svehla', 'Trafford', 'Treu', 'Winski', 'Zervas', 'Bautch', 'Dybas', 'Hillenburg', 'Krahl', 'Loretto', 'Mcanany', 'Meschke', 'Panuco', 'Pezzullo', 'Pokorski', 'Reinertson', 'Spoden', 'Steinbrenner', 'Wedig', 'Mom', 'Furner', 'Harpin', 'Carlston', 'Oo', 'Betten', 'Duro', 'Veronica', 'Klutz', 'Coven', 'Siles', 'Carby', 'Duvernay', 'Gory', 'Adamczak', 'Adee', 'Agius', 'Bachicha', 'Belka', 'Bridenstine', 'Cappella', 'Chiao', 'Georgiadis', 'Hansmann', 'Kettlewell', 'Klemann', 'Kracke', 'Legacy', 'Mateja', 'Mcgarrigle', 'Peitz', 'Pergande', 'Proia', 'Reicher', 'Rentfrow', 'Rudkin', 'Sahni', 'Santopietro', 'Sarin', 'Schear', 'Seckel', 'Sopp', 'Sorci', 'Terbush', 'Uplinger', 'Vantol', 'Zaro', 'Cuppett', 'Depetro', 'Hofferber', 'Kreifels', 'Kuznetsov', 'Matassa', 'Mazanec', 'Naegle', 'Sphar', 'Villaneda', 'Wachholz', 'Pastrano', 'Pilotte', 'Shedden', 'Molt', 'Dalia', 'Bishara', 'Dumoulin', 'Dehnert', 'Dilmore', 'Termine', 'Bracher', 'Laplace', 'Sherin', 'Morine', 'Garrott', 'Banford', 'Drumwright', 'Linnen', 'Belay', 'Juste', 'Moment', 'Adamec', 'Alessandrini', 'Bolda', 'Buonanno', 'Corrow', 'Couvillon', 'Dahnke', 'Durrani', 'Errett', 'Fingerhut', 'Ittner', 'Kandler', 'Khosla', 'Mascio', 'Mesch', 'Napolitan', 'Packman', 'Parady', 'Saline', 'Spatafore', 'Squiers', 'Stailey', 'Stolar', 'Strommen', 'Vahey', 'Vanbebber', 'Wimpee', 'Wolinsky', 'Yambao', 'Ciocca', 'Fornwalt', 'Giannattasio', 'Herbers', 'Korol', 'Lindenberger', 'Lysne', 'Piacentini', 'Vogeler', 'Cassetta', 'Hildebran', 'Masoud', 'Shiller', 'Fisler', 'Loll', 'Wattles', 'Carris', 'Hippe', 'Torregrossa', 'Thain', 'Enman', 'Kanno', 'Jeane', 'Clendenning', 'Halt', 'Dorin', 'Carnathan', 'Bisch', 'Simm', 'Goatley', 'July', 'Oke', 'Basley', 'Dillahunt', 'Times', 'Mcglown', 'Cohens', 'Jeanphilippe', 'Benshoof', 'Bensing', 'Bir', 'Birnie', 'Burklow', 'Capili', 'Cordts', 'Falanga', 'Farooqui', 'Furber', 'Godino', 'Gollnick', 'Harmening', 'Hilpert', 'Hrivnak', 'Iribe', 'Krienke', 'Kuntzman', 'Laslo', 'Loso', 'Omohundro', 'Rabadi', 'Reisenauer', 'Rohrich', 'Salak', 'Schuckman', 'Semmel', 'Sendelbach', 'Sidler', 'Stegmann', 'Sudbeck', 'Tara', 'Walcher', 'Walkenhorst', 'Wellbrock', 'Capaldo', 'Cotnoir', 'Durrence', 'Fralix', 'Leibfried', 'Schlarb', 'Whitenight', 'Grannan', 'Mugford', 'Filo', 'Soh', 'Deprez', 'Semidey', 'Vandivier', 'Shawl', 'Happy', 'Gartley', 'Jonathan', 'Bouquet', 'Warsaw', 'Verne', 'Furse', 'Holms', 'Bassette', 'Fishburne', 'Ambrosius', 'Amrein', 'Astorino', 'Bedonie', 'Bibee', 'Brearley', 'Chesher', 'Colasurdo', 'Deike', 'Dimarino', 'Felling', 'Freid', 'Gad', 'Gambale', 'Gieser', 'Greff', 'Halseth', 'Hamor', 'Hargens', 'Hohenberger', 'Hohler', 'Illes', 'Koscielniak', 'Kotara', 'Krygier', 'Lopinto', 'Mangas', 'Mantione', 'Mcendree', 'Musich', 'Nordling', 'Panagopoulos', 'Pollio', 'Score', 'Semaan', 'Tortorelli', 'Trabert', 'Troung', 'Vittorio', 'Barkdoll', 'Dombeck', 'Ferriter', 'Gancarz', 'Gubbels', 'Kertz', 'Langenderfer', 'Roppolo', 'Siglin', 'Trnka', 'Vanderkooi', 'Yaun', 'Witkin', 'Caryl', 'Boies', 'Carattini', 'Hannes', 'Harmison', 'Mctavish', 'Bille', 'Sullivant', 'Yeakey', 'Respess', 'Gooley', 'Maura', 'Jukes', 'Oguin', 'Demory', 'Morson', 'Hathorne', 'Anklam', 'Antaya', 'Bentler', 'Bettcher', 'Bresette', 'Broadrick', 'Degante', 'Demaray', 'Dipinto', 'Doberstein', 'Dorminey', 'Dorwart', 'Gugliuzza', 'Jesser', 'Kjar', 'Kujala', 'Lemarr', 'Lynds', 'Novitsky', 'Oropesa', 'Scarpulla', 'Schave', 'Siravo', 'Torma', 'Uva', 'Winkowski', 'Boscia', 'Buikema', 'Byland', 'Enneking', 'Enstrom', 'Gotsch', 'Kulakowski', 'Mattheis', 'Niemuth', 'Oberdorf', 'Rabuck', 'Shinners', 'Struebing', 'Dickes', 'Hettrick', 'Pille', 'Vilar', 'Blewitt', 'Gutt', 'Haseley', 'Pennel', 'Figuereo', 'Lassalle', 'Tannahill', 'Teats', 'Mumby', 'Cheves', 'Spark', 'Ale', 'Wally', 'Lowndes', 'Ballo', 'Couper', 'Alberta', 'Puller', 'Rochell', 'Bachar', 'Ballengee', 'Bellizzi', 'Boback', 'Cammarano', 'Dirr', 'Findling', 'Fruin', 'Ghattas', 'Kaliszewski', 'Kammeyer', 'Kwiecien', 'Lamora', 'Lehrke', 'Macewen', 'Nasta', 'Neibert', 'Ogaz', 'Olesky', 'Otano', 'Prescher', 'Romick', 'Scibetta', 'Slicker', 'Ungerer', 'Vanheel', 'Wadas', 'Weissert', 'Armiger', 'Brusca', 'Christeson', 'Crookshanks', 'Demarinis', 'Fahrney', 'Heiple', 'Howat', 'Knoedler', 'Kuske', 'Leifheit', 'Lukach', 'Nauert', 'Obremski', 'Seidenberg', 'Smigelski', 'Visscher', 'Wauneka', 'Whitmoyer', 'Wyand', 'Ilardi', 'Jackel', 'Rackham', 'Macgowan', 'Braid', 'Bringle', 'Dirk', 'Paci', 'Wears', 'Vanbergen', 'Sidle', 'Mellish', 'Paino', 'State', 'Cargle', 'Harcum', 'Beyene', 'Mwangi', 'Anderle', 'Cancienne', 'Compeau', 'Egle', 'Farone', 'Harke', 'Hollopeter', 'Jambor', 'Jermyn', 'Kadakia', 'Kerker', 'Langowski', 'Lechman', 'Nagengast', 'Narvaiz', 'Paola', 'Partch', 'Plucker', 'Rawe', 'Rohland', 'Rosebrook', 'Stanphill', 'Stoltman', 'Volkers', 'Balingit', 'Bausman', 'Besler', 'Dalto', 'Edgren', 'Hairfield', 'Janek', 'Kenoyer', 'Koska', 'Mihok', 'Monjaraz', 'Reisz', 'Snedegar', 'Vandezande', 'Viscomi', 'Kiene', 'Dib', 'Kuc', 'Magley', 'Swearingin', 'Culliton', 'Roome', 'Fendrick', 'Trindade', 'Whaling', 'Tarbutton', 'Sider', 'Swingler', 'Lover', 'Clarida', 'Jocelyn', 'Mervin', 'Blaize', 'Semper', 'Bagsby', 'Pree', 'Dieujuste', 'Anacleto', 'Annable', 'Bacci', 'Bottari', 'Cinco', 'Delzell', 'Dowless', 'Drilling', 'Egert', 'Fanton', 'Geerts', 'Ghaffari', 'Guggenheim', 'Hankes', 'Hediger', 'Hornig', 'Kauer', 'Kossman', 'Krasnow', 'Lauman', 'Lebsack', 'Liendo', 'Marhefka', 'Noguez', 'Oxman', 'Pa', 'Pella', 'Pongratz', 'Prisk', 'Rajagopalan', 'Rozo', 'Vanvorst', 'Wachob', 'Avolio', 'Banet', 'Boissonneault', 'Coglianese', 'Crudele', 'Dobratz', 'Gerdts', 'Koors', 'Mazzanti', 'Ozimek', 'Vanhove', 'Zern', 'Kalama', 'Mikelson', 'Renehan', 'Blecher', 'Meath', 'Bonus', 'Wesch', 'Kirkey', 'Goldbeck', 'Hun', 'Morgans', 'Strohman', 'Lanagan', 'Wyly', 'Syers', 'Berne', 'Tondreau', 'Witts', 'Budhu', 'Flott', 'Alsbrooks', 'Mabin', 'Kingsberry', 'Berend', 'Brandeberry', 'Carandang', 'Ciavarella', 'Foil', 'Galano', 'Garzia', 'Golembeski', 'Kossow', 'Kren', 'Lefave', 'Macmahon', 'Nilan', 'Peregrina', 'Pralle', 'Sahakian', 'Sarate', 'Scalzi', 'Soulliere', 'Srock', 'Stammen', 'Sterry', 'Tadych', 'Trembath', 'Watwood', 'Wolske', 'Woolson', 'Aversano', 'Chavana', 'Digiuseppe', 'Escano', 'Harkrider', 'Liebmann', 'Soldan', 'Swiatkowski', 'Tomala', 'Keay', 'Lindstedt', 'Maille', 'Thurner', 'Favia', 'Guedes', 'Simao', 'Rambow', 'Chriscoe', 'Hiss', 'Mcraney', 'Barke', 'Hobday', 'Buri', 'Sigle', 'Bawa', 'Lalande', 'Bordon', 'Friley', 'Feild', 'Arington', 'Jons', 'Funderburke', 'Mccommons', 'Troublefield', 'Mable', 'Hullum', 'Wrice', 'Cager', 'Barse', 'Braunschweig', 'Dasch', 'Fraioli', 'Giefer', 'Giovanniello', 'Glahn', 'Hatheway', 'Holtrop', 'Katsaros', 'Koetting', 'Malinoski', 'Markov', 'Mcclosky', 'Mccormac', 'Mertins', 'Milito', 'Mroczka', 'Overdorf', 'Palombi', 'Peninger', 'Provenza', 'Quinnell', 'Roady', 'Ruthven', 'Savitsky', 'Shenefield', 'Stapel', 'Venkataraman', 'Zachow', 'Aaberg', 'Bajorek', 'Bankowski', 'Barquero', 'Delcamp', 'Deshler', 'Halili', 'Hebenstreit', 'Hirota', 'Hladky', 'Kliethermes', 'Koestner', 'Kroes', 'Luepke', 'Mckeough', 'Mielcarek', 'Nobis', 'Olenik', 'Plessinger', 'Shillingburg', 'Spadaccini', 'Springborn', 'Werden', 'Willenbring', 'Zyskowski', 'Paucar', 'Werst', 'Wohlwend', 'Nauss', 'Alma', 'Tebeau', 'Paskett', 'Spindle', 'Twiddy', 'Alomar', 'Mi', 'Billard', 'Bails', 'Channer', 'Fripp', 'Abreo', 'Adamowicz', 'Bocian', 'Breden', 'Breitkreutz', 'Celona', 'Chizek', 'Chrestman', 'Ciaramella', 'Compher', 'Crannell', 'Dermer', 'Duryee', 'Feuerborn', 'Garrels', 'Gausman', 'Grippi', 'Guadamuz', 'Hatlestad', 'Heon', 'Hokenson', 'Kaden', 'Kluever', 'Lagares', 'Mamone', 'Mascola', 'Matich', 'Messimer', 'Mezera', 'Mongiello', 'Moradi', 'Nessler', 'Nijjar', 'Nin', 'Pasquarelli', 'Pawlowicz', 'Petitto', 'Petruccelli', 'Pullano', 'Rebar', 'Romack', 'Rosener', 'Soland', 'Solow', 'Vandervelden', 'Vazguez', 'Vonruden', 'Balmes', 'Berninger', 'Broecker', 'Clogston', 'Fontanella', 'Gubbins', 'Kampen', 'Levenhagen', 'Lyter', 'Nagamine', 'Regas', 'Riecke', 'Veltre', 'Wojahn', 'Angelino', 'Mccomber', 'Grisso', 'Saran', 'Pecore', 'Sorter', 'Encalada', 'Robart', 'Deerman', 'Lori', 'Mcnee', 'Dagher', 'Villars', 'Chaplain', 'Houtman', 'Dingwall', 'Akerson', 'Donaway', 'Dimmer', 'Mittman', 'Camm', 'Kenedy', 'Bilbro', 'Brocks', 'Mansaray', 'Acebo', 'Ahr', 'Alayon', 'Benyo', 'Blatnik', 'Degidio', 'Dumire', 'Elefante', 'Gase', 'Gilboy', 'Gradillas', 'Haverstock', 'Heberle', 'Hilmes', 'Hjort', 'Johnsey', 'Lambiase', 'Marland', 'Mcevilly', 'Mergenthaler', 'Mini', 'Noska', 'Patrie', 'Rohrback', 'Seelbach', 'Stopher', 'Trzaska', 'Vanessen', 'Veillette', 'Walizer', 'Zapalac', 'Andalon', 'Beukema', 'Cieslik', 'Dukart', 'Gerads', 'Gilhooly', 'Hinebaugh', 'Jumonville', 'Macchi', 'Oldenkamp', 'Plotz', 'Robideau', 'Streed', 'Trochez', 'Grames', 'Beltram', 'Fishbaugh', 'Lais', 'Ossa', 'Wilden', 'Erick', 'Dosier', 'Trust', 'Swaine', 'Darity', 'Mccroy', 'Yuille', 'Cantave', 'Barsanti', 'Carbonara', 'Cavanah', 'Chrismer', 'Cuestas', 'Czaplewski', 'Denes', 'Dorio', 'Geraldo', 'Giebler', 'Goewey', 'Gorniak', 'Grabe', 'Guidera', 'Hannig', 'Herin', 'Kadow', 'Klauer', 'Kleppinger', 'Lerro', 'Manoogian', 'Mentzel', 'Muramoto', 'Ollinger', 'Pacey', 'Pufahl', 'Quero', 'Revuelta', 'Rickles', 'Rudie', 'Ruggerio', 'Salberg', 'Schwoerer', 'Stephani', 'Stevick', 'Strada', 'Thorley', 'Thrun', 'Virts', 'Wingett', 'Balfe', 'Branaman', 'Brookshier', 'Carlsson', 'Chismar', 'Habben', 'Migdal', 'Ozga', 'Rivest', 'Russman', 'Schellhorn', 'Staup', 'Pietri', 'Welby', 'Cisney', 'Hijazi', 'Brines', 'Calderin', 'Mudrick', 'Domine', 'Parlow', 'Ervine', 'Banis', 'Mathenia', 'Carbin', 'Rashed', 'Mcgilvery', 'Prichett', 'Feimster', 'Smoots', 'Persley', 'Desire', 'Abadi', 'Bercaw', 'Bertz', 'Bibian', 'Brosious', 'Brunken', 'Calvano', 'Chenette', 'Chiusano', 'Dendinger', 'Diffley', 'Eichenberg', 'Gawne', 'Gelardi', 'Gottman', 'Gulyas', 'Hak', 'Haydock', 'Hettler', 'Hinsch', 'Kozlik', 'Krebbs', 'Krichbaum', 'Loges', 'Lyssy', 'Mitnick', 'Podolski', 'Priego', 'Radhakrishnan', 'Reineck', 'Ruggirello', 'Samborski', 'Schwalb', 'Sitek', 'Sprinkel', 'Tkachuk', 'Viscuso', 'Working', 'Zinner', 'Anspaugh', 'Anthes', 'Bratsch', 'Breining', 'Cejka', 'Delbuono', 'Hugill', 'Huyett', 'Irlbeck', 'Kilgus', 'Langwell', 'Margulis', 'Meara', 'Napierala', 'Stanaway', 'Worton', 'Gaucher', 'Bakeman', 'Pasos', 'Feazel', 'Evitt', 'Marrin', 'Baskette', 'Orne', 'Ivens', 'Burnstein', 'Rodell', 'Bowell', 'Maraj', 'Lango', 'Boudoin', 'Wider', 'Walkins', 'Raheem', 'Talford', 'Jeanmarie', 'Drumgoole', 'Arnot', 'Bennick', 'Buchinger', 'Cleven', 'Corsello', 'Delucchi', 'Dicocco', 'Eachus', 'Eilts', 'Fandino', 'Fyke', 'Giammarco', 'Gwartney', 'Hawken', 'Henkelman', 'Jaggi', 'Jurczyk', 'Kamman', 'Kattner', 'Keator', 'Klus', 'Leidner', 'Ligas', 'Martus', 'Maslow', 'Piccinini', 'Pysher', 'Riga', 'Siek', 'Sizelove', 'Vanostrand', 'Vastine', 'Viviani', 'Youngerman', 'Zahniser', 'Brigante', 'Burklund', 'Cajina', 'Coppolino', 'Goytia', 'Icenhower', 'Ihnen', 'Jablonsky', 'Koepsell', 'Mennenga', 'Redenius', 'Tengan', 'Weishaupt', 'Dorst', 'Kief', 'Busk', 'Luba', 'Quine', 'Deshotels', 'Roulston', 'Diniz', 'Chandley', 'Saleeby', 'Maro', 'Faidley', 'Burrous', 'Ilyas', 'Roster', 'Clovis', 'Bacot', 'Pembleton', 'Bellot', 'Entzminger', 'Ryce', 'Posley', 'Alvi', 'Audino', 'Bitters', 'Boomershine', 'Boyack', 'Branda', 'Bresnan', 'Brusco', 'Bunda', 'Catanzarite', 'Dohmen', 'Elbaum', 'Farago', 'Ferrentino', 'Gimpel', 'Grzeskowiak', 'Gutting', 'Henandez', 'Herbeck', 'Hoben', 'Hunnell', 'Ibbotson', 'Kida', 'Kirchman', 'Kubin', 'Laplume', 'Laskin', 'Lefferts', 'Leimer', 'Locatelli', 'Pitsenbarger', 'Reum', 'Rittgers', 'Scadden', 'Shammas', 'Tatge', 'Tiongson', 'Wengler', 'Wenrick', 'Wortley', 'Bretado', 'Detloff', 'Dlugosz', 'Eisemann', 'Embler', 'Graffius', 'Kienast', 'Kucher', 'Larew', 'Lemmerman', 'Maners', 'Peckinpaugh', 'Rupnow', 'Schubring', 'Staheli', 'Stege', 'Talwar', 'Truszkowski', 'Coda', 'Comunale', 'Holtry', 'Newfield', 'Blankley', 'Devino', 'Wahba', 'Cathell', 'Timson', 'Setzler', 'Shacklett', 'Nicols', 'Rocque', 'Nest', 'Freelove', 'Neat', 'Kina', 'Caslin', 'Creal', 'Wyre', 'Compere', 'Brisker', 'Givhan', 'Menifee', 'Hymon', 'Boakye', 'Aguillar', 'Alpern', 'Antico', 'Attridge', 'Bjorge', 'Bordwell', 'Brumbach', 'Castronova', 'Cowher', 'Fakhouri', 'Hanigan', 'Heidecker', 'Hosick', 'Lorang', 'Magadan', 'Marovich', 'Masur', 'Nienow', 'Passow', 'Priola', 'Prose', 'Radillo', 'Saracco', 'Schlender', 'Sellards', 'Stirn', 'Strathman', 'Supan', 'Taguchi', 'Tufte', 'Vanderleest', 'Vanderpoel', 'Vondra', 'Wayment', 'Wisinski', 'Brodowski', 'Cichowski', 'Delarocha', 'Demyan', 'Dobies', 'Hegner', 'Karapetian', 'Konieczka', 'Lazarz', 'Loughner', 'Portanova', 'Rosentreter', 'Rothlisberger', 'Schropp', 'Trenkamp', 'Flaharty', 'Murfin', 'Waner', 'Baiz', 'Dunegan', 'Gillson', 'Erne', 'Mahin', 'Hardgrave', 'Felps', 'Bevens', 'Abdou', 'Songy', 'Boule', 'Wisham', 'Devonshire', 'Havis', 'Relf', 'Pean', 'Manago', 'Brazzle', 'Mckelvin', 'Goulbourne', 'Pinkins', 'Yelder', 'Akina', 'Allerton', 'Aminov', 'Barsamian', 'Biondolillo', 'Bouchillon', 'Bustle', 'Dolney', 'Dunkerley', 'Farha', 'Floor', 'Gaustad', 'Gilberti', 'Helder', 'Kolber', 'Kuznia', 'Longhi', 'Mamaril', 'Milhorn', 'Mozo', 'Norbury', 'Okano', 'Perkovich', 'Rafanan', 'Rulo', 'Ruperto', 'Scow', 'Shadoan', 'Smisek', 'Steinfeldt', 'Thobe', 'Venturino', 'Widell', 'Broccoli', 'Helmig', 'Koegler', 'Lewandoski', 'Pequignot', 'Radermacher', 'Resetar', 'Rostro', 'Sebald', 'Walgren', 'Lottes', 'Capraro', 'Grine', 'Gordner', 'Crus', 'Easom', 'Bayle', 'Barts', 'Duguid', 'Estel', 'Peggs', 'Cheaney', 'Rossin', 'Mackel', 'Vassel', 'Fils', 'Senat', 'Alarie', 'Allar', 'Brownlie', 'Bumbaugh', 'Caissie', 'Cordone', 'Critser', 'Delconte', 'Falzon', 'Formosa', 'Frerking', 'Gadea', 'Ganem', 'Guzek', 'Hauch', 'Heese', 'Hemmen', 'Holzschuh', 'Impson', 'Jablon', 'Kiedrowski', 'Krob', 'Kuhnle', 'Laake', 'Larouche', 'Leaton', 'Leyland', 'Lorenson', 'Macduff', 'Maready', 'Newberger', 'Ohnstad', 'Pinela', 'Polino', 'Postema', 'Pyon', 'Radziewicz', 'Rathod', 'Salopek', 'Salvadore', 'Sawchuk', 'Trotto', 'Vereb', 'Auslander', 'Beninati', 'Blunck', 'Decandia', 'Deeney', 'Escatel', 'Foskett', 'Hagmann', 'Hussar', 'Jakubek', 'Kluender', 'Mcelhinny', 'Salatino', 'Sangalang', 'Schoenfeldt', 'Stogdill', 'Svitak', 'Taravella', 'Tezak', 'Wieseler', 'Komperda', 'Reinitz', 'Malis', 'Duce', 'Salib', 'Keelin', 'Labell', 'Symmonds', 'Gwynne', 'Byus', 'Burgy', 'Delfosse', 'Benskin', 'Hedgepath', 'Ursin', 'Kinnebrew', 'Tinnon', 'Callum', 'Allah', 'Arduini', 'Azucena', 'Birkel', 'Bowermaster', 'Caires', 'Chrobak', 'Cottier', 'Cropley', 'Crotteau', 'Dutan', 'Ezernack', 'Fabiani', 'Fauser', 'Feeny', 'Ferdig', 'Fliss', 'Gallus', 'Harlacher', 'Hasselbach', 'Honsinger', 'Landberg', 'Lohn', 'Losinski', 'Maung', 'Melikian', 'Nooney', 'Oyervides', 'Prum', 'Riepe', 'Seebach', 'Sendejas', 'Sprick', 'Torino', 'Weida', 'Geschke', 'Girgenti', 'Klever', 'Rathert', 'Roszell', 'Sarich', 'Shimmin', 'Trimpe', 'Turrubiates', 'Zelada', 'Danzig', 'Diamant', 'Hannen', 'Odland', 'Puzzo', 'Slyter', 'Smaldone', 'Ebey', 'Beg', 'Magel', 'Tebbs', 'Gali', 'Winney', 'Juba', 'Stargel', 'Waren', 'Stann', 'Ducasse', 'Vaugh', 'Lewers', 'Stjuste', 'Heckstall', 'Bokhari', 'Bonino', 'Brummond', 'Caterino', 'Deatrick', 'Decorte', 'Demara', 'Dubree', 'Dulski', 'Feck', 'Foglio', 'Heinzelman', 'Jory', 'Knoell', 'Kronick', 'Maclay', 'Mastrogiovanni', 'Reichling', 'Rueff', 'Sellitto', 'Sensing', 'Sheu', 'Soberanes', 'Stahlecker', 'Wholey', 'Yochim', 'Zeiss', 'Bojanowski', 'Bonawitz', 'Caporaso', 'Dalesio', 'Exposito', 'Giovinazzo', 'Palardy', 'Rastogi', 'Saenger', 'Sirek', 'Sonoda', 'Sovereign', 'Weimann', 'Wirtanen', 'Enerson', 'Olliff', 'Kallam', 'Leggitt', 'Goude', 'Rampey', 'Letsinger', 'Walles', 'Kater', 'Betsill', 'Creese', 'Lisbon', 'Abitz', 'Bednarik', 'Bendorf', 'Berkovich', 'Brevik', 'Cassatt', 'Ciarlo', 'Cookman', 'Cosma', 'Defee', 'Essner', 'Fallas', 'Holda', 'Kemler', 'Kovich', 'Krimmel', 'Landauer', 'Meharg', 'Moncus', 'Nabi', 'Redenbaugh', 'Ruwe', 'Scalisi', 'Shughart', 'Sloma', 'Sovine', 'Tomaso', 'Trueba', 'Urista', 'Vanyo', 'Wolanski', 'Zettle', 'Arvanitis', 'Baeten', 'Caponi', 'Carrazco', 'Galambos', 'Hartsook', 'Helseth', 'Kobylarz', 'Krugh', 'Meckel', 'Ohnemus', 'Voytek', 'Winegarden', 'Zuba', 'Piloto', 'Shames', 'Debella', 'Keddy', 'Perra', 'Winks', 'Hemrick', 'Snowdon', 'Cleere', 'Leavey', 'Courington', 'Herson', 'Nelon', 'Bloise', 'Mcphie', 'Catledge', 'Mcneary', 'Hoffler', 'Suell', 'Coard', 'Woolfork', 'Biros', 'Brouhard', 'Dinovo', 'Disano', 'Emami', 'Flegal', 'Hardebeck', 'Hobin', 'Huttner', 'Kloosterman', 'Knutzen', 'Kopinski', 'Mailman', 'Mankey', 'Mccamish', 'Mccorquodale', 'Minichiello', 'Miyasaki', 'Osher', 'Prutzman', 'Sagen', 'Shawgo', 'Sokolow', 'Southam', 'Sulik', 'Wiedel', 'Wollschlager', 'Cantalupo', 'Cruser', 'Denomme', 'Dinardi', 'Donahey', 'Havlin', 'Lasecki', 'Margraf', 'Mchaffie', 'Mihaly', 'Omlor', 'Roope', 'Schremp', 'Vanhecke', 'Washabaugh', 'Zaunbrecher', 'Joost', 'Pensinger', 'Kraner', 'Mikles', 'Delair', 'Bukhari', 'Earll', 'Sans', 'Gatliff', 'Casteneda', 'Shalom', 'Fidalgo', 'Leitao', 'Degrange', 'Fruits', 'Kercheval', 'Mew', 'Chopin', 'Seawood', 'Agro', 'Aliano', 'Badour', 'Betsch', 'Buchbinder', 'Cleavenger', 'Collazos', 'Cusmano', 'Dienes', 'Dittus', 'Eggenberger', 'Fierst', 'Gingell', 'Greever', 'Grisales', 'Hegstrom', 'Justen', 'Kalt', 'Kirkhart', 'Krage', 'Kyzar', 'Livolsi', 'Neyhart', 'Nunziata', 'Orlich', 'Parcel', 'Peshlakai', 'Schemm', 'Segner', 'Urieta', 'Wolfman', 'Coonradt', 'Disilvestro', 'Dobrowski', 'Gramza', 'Kotlyar', 'Micka', 'Miksch', 'Mione', 'Montone', 'Palmerton', 'Parrill', 'Passafiume', 'Rosoff', 'Spaziani', 'Venditto', 'Wisch', 'Fini', 'Horky', 'Perel', 'Arzuaga', 'Nasworthy', 'Carland', 'Elden', 'Moises', 'Maione', 'Glace', 'Laverdure', 'Sieh', 'Toulouse', 'Hannam', 'Cumber', 'Rendell', 'Hardey', 'Maddison', 'Brittle', 'Helen', 'Aina', 'Allwood', 'Fenty', 'Herard', 'Traore', 'Ator', 'Bedsaul', 'Bickert', 'Brendlinger', 'Camuso', 'Dutter', 'Eastlick', 'Fernholz', 'Guza', 'Heitzenrater', 'Huo', 'Isbill', 'Katzenstein', 'Keigley', 'Kelnhofer', 'Klarich', 'Mangat', 'Mathiason', 'Murzyn', 'Odenthal', 'Pascarelli', 'Passwaters', 'Rotunda', 'Schons', 'Sein', 'Sobon', 'Stayner', 'Tri', 'Uhlir', 'Viscusi', 'Winstanley', 'Xi', 'Yodice', 'Aerts', 'Antosh', 'Baldinger', 'Brislin', 'Christopoulos', 'Faurot', 'Fusselman', 'Hamsher', 'Henckel', 'Macht', 'Moellering', 'Oclair', 'Pavelko', 'Poehlman', 'Rajewski', 'Richcreek', 'Schmeichel', 'Venkatesh', 'Zemba', 'Zuelke', 'Dechellis', 'Reddig', 'Splain', 'Claw', 'Mottram', 'Crise', 'Villaflor', 'Allocca', 'Buttrum', 'Cocking', 'Mundie', 'Tavis', 'Saidi', 'Latter', 'Tuberville', 'Spease', 'Leatherberry', 'Peatross', 'Claridy', 'Duerson', 'Durley', 'Mekonnen', 'Thiam', 'Aderman', 'Al', 'Andreu', 'Beine', 'Bowron', 'Campi', 'Chura', 'Ciraulo', 'Daywalt', 'Fleek', 'Friant', 'Gahm', 'Gongaware', 'Grosh', 'Heaslip', 'Knape', 'Kravets', 'Kritikos', 'Kumagai', 'Kustra', 'Madani', 'Mich', 'Norlander', 'Paulhus', 'Rabanal', 'Saker', 'Stupak', 'Suchomel', 'Vandenberghe', 'Wehrenberg', 'Zaccardi', 'Davlin', 'Dykhouse', 'Grandfield', 'Hullender', 'Kallis', 'Livshits', 'Rihn', 'Criger', 'Michl', 'Tutino', 'Zulueta', 'Cristo', 'Meline', 'Fetch', 'Dung', 'Shami', 'Teale', 'Cocker', 'Eshbach', 'Phagan', 'Millea', 'Tayloe', 'Olivia', 'Houchen', 'Peddy', 'Ferryman', 'Boodram', 'Maduro', 'Fullman', 'Landingham', 'Pee', 'Argenbright', 'Aronowitz', 'Baldenegro', 'Barentine', 'Bernasconi', 'Bicking', 'Bohle', 'Camerer', 'Dufford', 'Ende', 'Gessel', 'Grauman', 'Jaqua', 'Kagawa', 'Kalinski', 'Kanz', 'Klasen', 'Koloski', 'Kriete', 'Litalien', 'Maish', 'Massar', 'Muraski', 'Pickelsimer', 'Sagraves', 'Servellon', 'Shellito', 'Shiveley', 'Stanislaw', 'Volland', 'Biehle', 'Cruey', 'Eagar', 'Ermis', 'Goracke', 'Mackert', 'Malloch', 'Merillat', 'Rylee', 'Schelin', 'Tibbals', 'Zandi', 'Golde', 'Steuart', 'Jamie', 'Lavis', 'Bromwell', 'Tregre', 'Alkhatib', 'Carvey', 'Essa', 'Wale', 'Mccarey', 'Brandley', 'Hermon', 'Stenhouse', 'Oguinn', 'Barclift', 'Sylvan', 'Smyre', 'Ellerby', 'Alemany', 'Beyl', 'Boven', 'Bultema', 'Buzan', 'Cappo', 'Cottongim', 'Detore', 'Dierolf', 'Dueck', 'Egelston', 'Emard', 'Eveleth', 'Ferrini', 'Fodera', 'Hidy', 'Kahley', 'Karasik', 'Klare', 'Koudelka', 'Lafleche', 'Minturn', 'Montemarano', 'Plock', 'Ratterman', 'Reingold', 'Rieber', 'Schnackenberg', 'Schrade', 'Steffek', 'Stehling', 'Sticha', 'Velaquez', 'Weissberg', 'Allnutt', 'Barkhurst', 'Bettendorf', 'Canonico', 'Deshmukh', 'Dobosz', 'Glab', 'Kirkeby', 'Menapace', 'Parizek', 'Pursifull', 'Ragucci', 'Raisch', 'Schronce', 'Tuason', 'Duross', 'Hainer', 'Kinnick', 'Rens', 'Williamsen', 'Hilke', 'Hark', 'Mellett', 'Decarvalho', 'Filyaw', 'Sian', 'Mccard', 'Symon', 'Grade', 'Giboney', 'Sadik', 'Caul', 'Gater', 'Sulton', 'Dungee', 'Adriance', 'Almas', 'Andler', 'Bellina', 'Belshe', 'Blouch', 'Bradeen', 'Brandwein', 'Buechele', 'Cristina', 'Davidov', 'Defiore', 'Defrain', 'Derasmo', 'Dober', 'Grosshans', 'Hoek', 'Hofstad', 'Ingman', 'Kille', 'Langill', 'Matic', 'Niederer', 'Novella', 'Oelkers', 'Percifield', 'Phariss', 'Pola', 'Pompei', 'Potthast', 'Raden', 'Radick', 'Rendina', 'Sicotte', 'Sleep', 'Wadhwa', 'Buccheri', 'Calogero', 'Catrett', 'Flemmer', 'Mancinas', 'Mcmichen', 'Measel', 'Pudlo', 'Ruether', 'Shusterman', 'Stabley', 'Teffeteller', 'Waisanen', 'Zappulla', 'Symanski', 'Mckenrick', 'Moger', 'Obispo', 'Armenteros', 'Roses', 'Makki', 'Faley', 'Rumford', 'Schonberg', 'Hizer', 'Blaydes', 'Coor', 'Mccalip', 'Stancill', 'Cal', 'Murat', 'Amie', 'Placide', 'Akpan', 'Bembenek', 'Bilyk', 'Bizzarro', 'Bugge', 'Cunnane', 'Degenhart', 'Doehring', 'Flammia', 'Fritcher', 'Godinho', 'Gouger', 'Heyboer', 'Humenik', 'Iannaccone', 'Lacivita', 'Lagunes', 'Leitzke', 'Luty', 'Maute', 'Micke', 'Midura', 'Nydam', 'Rasp', 'Rediker', 'Requejo', 'Roskos', 'Ruckert', 'Saldierna', 'Salemme', 'Tsuchiya', 'Vallas', 'Werder', 'Arenivas', 'Bartholomay', 'Brozowski', 'Dusza', 'Frevert', 'Giannopoulos', 'Kormos', 'Martos', 'Mollenhauer', 'Romanek', 'Solinger', 'Tomaro', 'Zangara', 'Buttrick', 'Pardy', 'Alvelo', 'Breth', 'Hemond', 'Kayes', 'Manne', 'Grandchamp', 'Gilbo', 'Calame', 'Clippard', 'Gieger', 'Penalver', 'Ecton', 'Totton', 'Poyser', 'Kettles', 'Hosang', 'Waker', 'Maryland', 'Girma', 'Baribeau', 'Boehnke', 'Brunick', 'Buhrow', 'Cerreta', 'Dascoli', 'Eroh', 'Fallert', 'Fotopoulos', 'Granholm', 'Hebdon', 'Hoelzer', 'Hyser', 'Lisanti', 'Mastrianni', 'Mewes', 'Mulanax', 'Nikolai', 'Odekirk', 'Ofallon', 'Onnen', 'Or', 'Osso', 'Ridpath', 'Schara', 'Schnipke', 'Slayter', 'Sodhi', 'Steffler', 'Stegemann', 'Weisensel', 'Bertling', 'Dueitt', 'Keehner', 'Khaimov', 'Kramlich', 'Salkeld', 'Ulbricht', 'Vultaggio', 'Dennin', 'Mondo', 'Kett', 'Dom', 'Kalan', 'Yaney', 'Nicley', 'Carabello', 'Ellegood', 'Mcglocklin', 'Figuero', 'Pillard', 'Wolfrey', 'Leys', 'Cobert', 'Wahid', 'Fede', 'Ausbrooks', 'Gums', 'Gillion', 'Mcgeachy', 'Parran', 'Likely', 'Marbley', 'Argote', 'Bhullar', 'Botros', 'Brethauer', 'Chell', 'Conradi', 'Covill', 'Crays', 'Crysler', 'Handke', 'Hanneken', 'Hidrogo', 'Hirayama', 'Huebert', 'Hurford', 'Iskra', 'Malczewski', 'Menees', 'Monforte', 'Murdick', 'Naclerio', 'Nohr', 'Pangallo', 'Payeur', 'Pozniak', 'Rammel', 'Schield', 'Schrick', 'Seifer', 'Sperduto', 'Stagliano', 'Staubs', 'Stromme', 'Tourigny', 'Traister', 'Vandecar', 'Wilhelms', 'Wilinski', 'Wittke', 'Clougherty', 'Crotwell', 'Hannula', 'Heavrin', 'Heidinger', 'Keehan', 'Ortwein', 'Palinkas', 'Seivert', 'Sloniker', 'Yielding', 'Lac', 'Shove', 'Venard', 'Violett', 'Foresta', 'Gapp', 'Dejongh', 'Ambrosia', 'Simkin', 'Sastre', 'Mcarthy', 'Bering', 'Sarah', 'Hickling', 'Sookdeo', 'Val', 'Colden', 'Feltus', 'Hailes', 'Canizalez', 'Cloke', 'Connole', 'Dancel', 'Demmon', 'Ehrler', 'Fruchey', 'Helinski', 'Hepfer', 'Katzen', 'Kressler', 'Lagrow', 'Nethercutt', 'Novitski', 'Papale', 'Pesola', 'Petrosian', 'Pies', 'Prazak', 'Preza', 'Reiche', 'Salle', 'Savic', 'Servello', 'Sherbondy', 'Solazzo', 'Stabenow', 'Walstad', 'Yaden', 'Zagal', 'Zani', 'Dimambro', 'Engquist', 'Fochtman', 'Frasch', 'Fuerstenberg', 'Galus', 'Gronowski', 'Grossenbacher', 'Hahs', 'Iavarone', 'Kerper', 'Kravchenko', 'Kwolek', 'Lusignan', 'Lybbert', 'Maertens', 'Mahany', 'Medico', 'Orrantia', 'Reitmeier', 'Sieve', 'Sterbenz', 'Tenpas', 'Wischmeyer', 'Zajkowski', 'Cregg', 'Shetley', 'Tisher', 'Coup', 'Murdy', 'Lysaght', 'Sesco', 'Koy', 'Wakley', 'Bertholf', 'Swaner', 'Stakes', 'Gren', 'Elahi', 'Torney', 'Gopaul', 'Egland', 'Gingles', 'Aurich', 'Biela', 'Binz', 'Blumenstock', 'Boardwine', 'Boehner', 'Boening', 'Crankshaw', 'Decarli', 'Fauble', 'Georgopoulos', 'Gieske', 'Hasselbring', 'Heeb', 'Janosik', 'Kalafut', 'Karpf', 'Kramm', 'Lanyon', 'Lewelling', 'Lilla', 'Marik', 'Moyano', 'Oppel', 'Panagos', 'Renovato', 'Rohlman', 'Rostron', 'Todhunter', 'Torello', 'Umfleet', 'Wien', 'Youker', 'Ytuarte', 'Zavada', 'Altvater', 'Arnzen', 'Blixt', 'Elek', 'Geiselman', 'Hiltunen', 'Jachim', 'Kolenovic', 'Kooyman', 'Muecke', 'Pierron', 'Preisler', 'Rogus', 'Schoeller', 'Solimine', 'Speagle', 'Courser', 'Mascarenhas', 'Dorer', 'Scotten', 'Goy', 'Avers', 'Blanca', 'Choung', 'Goleman', 'Nanna', 'Lave', 'Seley', 'Meggison', 'Ripoll', 'Mannan', 'Bihm', 'Tribbey', 'Ports', 'Asby', 'Philibert', 'Furby', 'Keal', 'Louallen', 'Idris', 'Artist', 'Branford', 'Sabree', 'Ainley', 'Amezola', 'Andreason', 'Athans', 'Batiz', 'Bostelman', 'Bozic', 'Butman', 'Coiro', 'Defina', 'Garbo', 'Gewirtz', 'Hathcoat', 'Heebner', 'Helbing', 'Kasler', 'Kastler', 'Kearby', 'Krus', 'Lezotte', 'Lithgow', 'Mealor', 'Moltz', 'Morcom', 'Norbeck', 'Novicki', 'Osmani', 'Posluszny', 'Quiroa', 'Rahal', 'Roddenberry', 'Rodino', 'Sallade', 'Saraceni', 'Schmaus', 'Stathopoulos', 'Swatek', 'Tupy', 'Vonseggern', 'Zens', 'Ahonen', 'Arrazola', 'Avedisian', 'Bachtell', 'Bastarache', 'Chavero', 'Darienzo', 'Giampa', 'Gillott', 'Hierholzer', 'Kruckeberg', 'Lafrenz', 'Milkowski', 'Missildine', 'Passaretti', 'Rogstad', 'Saadeh', 'Sielski', 'Slavick', 'Tieken', 'Wittenmyer', 'Yepiz', 'Zimdars', 'Rail', 'Kook', 'Jian', 'Piet', 'Sanjurjo', 'Shampine', 'Christel', 'Hechavarria', 'Blucher', 'Crimm', 'Lebreton', 'Charbonnet', 'Bolls', 'Stroder', 'Baise', 'Mcnease', 'Alen', 'Priestly', 'Mannie', 'Doleman', 'Areas', 'Atayde', 'Berent', 'Bodmer', 'Brodin', 'Buntrock', 'Eckrich', 'Emberson', 'Hilgert', 'Hirn', 'Holihan', 'Hoshino', 'Jeung', 'Leece', 'Leonardis', 'Macera', 'Mcferron', 'Muster', 'Naef', 'Pecka', 'Peloso', 'Pensyl', 'Reaney', 'Reidinger', 'Rockholt', 'Tabrizi', 'Trauth', 'Trulock', 'Tupou', 'Asbridge', 'Franzel', 'Gesualdi', 'Grimwood', 'Hardinger', 'Kondrat', 'Koskinen', 'Ludolph', 'Marchesi', 'Mehrtens', 'Racioppi', 'Sabey', 'Stroebel', 'Swendsen', 'Vandewalker', 'Korber', 'Messler', 'Mowat', 'Kor', 'Pua', 'Sarazin', 'Wayson', 'Oland', 'Bandi', 'Fabel', 'Frankl', 'Rane', 'Mozer', 'Weaber', 'Moustafa', 'Robe', 'Lindy', 'Medaris', 'Derden', 'Benthall', 'Ayler', 'Osias', 'Choyce', 'Scantlebury', 'Patmon', 'Ahlgrim', 'Boffa', 'Brideau', 'Bubeck', 'Bubel', 'Casio', 'Casique', 'Casten', 'Colebank', 'Demoura', 'Devincenzo', 'Elsesser', 'Fauci', 'Frentz', 'Hemler', 'Keitel', 'Luan', 'Luhn', 'Luquette', 'Mazurowski', 'Mendibles', 'Mickiewicz', 'Minelli', 'Mistler', 'Nemer', 'Nikolaus', 'Offill', 'Pezza', 'Ruzich', 'Skrzypek', 'Swimmer', 'Trucks', 'Vaccarella', 'Zeidman', 'Brattin', 'Deblock', 'Dufrane', 'Gural', 'Hufstedler', 'Kapuscinski', 'Lyerla', 'Musolino', 'Neubecker', 'Polus', 'Protzman', 'Retzloff', 'Sachdev', 'Sazama', 'Shrider', 'Tobolski', 'Mcbane', 'Clabo', 'Fredrich', 'Lace', 'Bertran', 'Kama', 'Simonet', 'Lippitt', 'Thomlinson', 'Vallot', 'Dede', 'Brimley', 'Parler', 'Standfield', 'Goodie', 'Isidore', 'Philogene', 'Abramczyk', 'Andert', 'Besancon', 'Bieda', 'Birkey', 'Boquet', 'Borak', 'Bottino', 'Breyfogle', 'Crill', 'Daffern', 'Derrig', 'Dimalanta', 'Dresch', 'Feulner', 'Friede', 'Furth', 'Gamet', 'Garramone', 'Gaunce', 'Gitto', 'Guandique', 'Hoxworth', 'Hubers', 'Ingwersen', 'Junio', 'Kassing', 'Magrath', 'Martelle', 'Mcweeney', 'Neris', 'Nesheiwat', 'Remlinger', 'Rentmeester', 'Schlein', 'Schoneman', 'Sterr', 'Streib', 'Szymanowski', 'Trompeter', 'Tullius', 'Cherico', 'Cremin', 'Dominey', 'Gotthardt', 'Kowalke', 'Onderdonk', 'Pirrello', 'Rumberger', 'Schreur', 'Westerhoff', 'Maroni', 'Dire', 'Menta', 'Hoeg', 'Meise', 'Standerfer', 'Roam', 'Tibbett', 'Beevers', 'Evrard', 'Locklair', 'Brester', 'Sirmon', 'Woodbeck', 'Wires', 'Durette', 'Raul', 'Stephanie', 'Mcwain', 'Skeeters', 'Wilbourne', 'Debroux', 'Keyton', 'Noris', 'Fanta', 'Goshen', 'Kithcart', 'Shepheard', 'Sherod', 'Buntyn', 'Gissendanner', 'Goodley', 'Mckissic', 'Bissinger', 'Biswell', 'Borruso', 'Danese', 'Eslava', 'Gehle', 'Gibeau', 'Gionet', 'Greth', 'Gul', 'Hambley', 'Harshfield', 'Helin', 'Henken', 'Hogland', 'Hoxha', 'Hurlbutt', 'Kaminer', 'Kien', 'Kliebert', 'Koivisto', 'Kooken', 'Laconte', 'Lovo', 'Manninen', 'Maxham', 'Mcleland', 'Mclerran', 'Milici', 'Negrette', 'Nicotera', 'Nissan', 'Philipson', 'Pimenta', 'Pinch', 'Rietveld', 'Seyller', 'Shollenberger', 'Sochacki', 'Telleria', 'Toda', 'Unrue', 'Vanbenschoten', 'Versace', 'Villada', 'Watry', 'Wirsing', 'Zeimet', 'Zynda', 'Angelillo', 'Fleissner', 'Freehling', 'Grewell', 'Heick', 'Kartes', 'Kishi', 'Kopke', 'Laubenstein', 'Leske', 'Lohmeier', 'Marotz', 'Moccio', 'Mullineaux', 'Muzyka', 'Ostermiller', 'Penuelas', 'Plagge', 'Stolarz', 'Wertenberger', 'Sella', 'Allinger', 'Betzler', 'Rosenkrantz', 'Trimarchi', 'Dionicio', 'Frohman', 'Landenberger', 'Shillings', 'Chill', 'Leather', 'Sonn', 'Connel', 'Fougere', 'Alia', 'Wisby', 'Haisley', 'Minion', 'Mccathern', 'Rozzell', 'Armbrister', 'Ryant', 'Almeyda', 'Bonjour', 'Bordas', 'Bozard', 'Buccola', 'Cihlar', 'Dargis', 'Faivre', 'Fejes', 'Grulke', 'Harken', 'Heimberger', 'Hochmuth', 'Keadle', 'Kedrowski', 'Kortman', 'Krahenbuhl', 'Krasniqi', 'Kundrat', 'Leistner', 'Loguidice', 'Mcauliff', 'Mchatton', 'Minella', 'Muccio', 'Normington', 'Nuttle', 'Orsino', 'Reker', 'Respicio', 'Shein', 'Teichert', 'Varisco', 'Accomando', 'Amelio', 'Burckhard', 'Fleischhacker', 'Hagglund', 'Kessenich', 'Langrehr', 'Lauderbaugh', 'Misquez', 'Muneton', 'Ourada', 'Rulon', 'Scholze', 'Stellmach', 'Sudano', 'Thelander', 'Yeckley', 'Corsino', 'Grage', 'Isla', 'Narramore', 'Coolman', 'Heatherington', 'Newey', 'Kunda', 'Motts', 'Tawfik', 'Tindel', 'Passon', 'Sypher', 'Conceicao', 'Haraway', 'Deamer', 'Nored', 'Mamo', 'Mcgilberry', 'Akerley', 'Andreatta', 'Aronhalt', 'Barz', 'Bebber', 'Brubacher', 'Cabriales', 'Dyckman', 'Ellers', 'Finerty', 'Hargan', 'Haselton', 'Hellmuth', 'Hoffmeier', 'Homrich', 'Hrabak', 'Intrieri', 'Lebeda', 'Lutzke', 'Malka', 'Mcglinn', 'Nicklin', 'Nusz', 'Pennings', 'Rebmann', 'Rodocker', 'Sacra', 'Saksa', 'Shehane', 'Siever', 'Snide', 'Sotero', 'Sponsel', 'Therien', 'Viti', 'Wubben', 'Zieske', 'Billingham', 'Bruschi', 'Cullipher', 'Eppolito', 'Greuel', 'Huq', 'Matott', 'Mohlman', 'Monterroza', 'Risberg', 'Shvartsman', 'Sigafoos', 'Zehring', 'Manuele', 'Asghar', 'Shelp', 'Grieder', 'Hippert', 'Dani', 'Beserra', 'Kennan', 'Scholfield', 'Joh', 'Swailes', 'Pear', 'Hell', 'Kittler', 'Pickeral', 'Somerset', 'Streat', 'Tinner', 'Landor', 'Pretlow', 'Tensley', 'Abela', 'Abramovich', 'Acocella', 'Avino', 'Bacchi', 'Bayliff', 'Beganovic', 'Belinsky', 'Bilicki', 'Borowiec', 'Bucknam', 'Calandro', 'Ciszek', 'Cooling', 'Cundari', 'Derk', 'Ekern', 'Engelson', 'Fennessey', 'Ferencz', 'Filipkowski', 'Frescas', 'Frisinger', 'Gegg', 'Hanken', 'Harbach', 'Jipson', 'Kasal', 'Kinstler', 'Langenbach', 'Leccese', 'Maalouf', 'Mcinerny', 'Mcpartlin', 'Meth', 'Mitzner', 'Riano', 'Saggese', 'Schroff', 'Skibicki', 'Textor', 'Vancampen', 'Vukelich', 'Wascom', 'Workinger', 'Xin', 'Bronkema', 'Gerstel', 'Geving', 'Gravlin', 'Hannay', 'Haughn', 'Lippi', 'Lonsway', 'Paradowski', 'Poust', 'Thinnes', 'Wassenaar', 'Hemm', 'Isip', 'Pastorino', 'Barkett', 'Montalban', 'Ballestero', 'Floren', 'Rossen', 'Chuba', 'Burrington', 'Derman', 'Wickland', 'Dunman', 'Beek', 'Petitjean', 'Michelin', 'Chapell', 'Pullam', 'Adamcik', 'Albarracin', 'Batrez', 'Berghuis', 'Birkland', 'Boulier', 'Broderson', 'Bruun', 'Cicio', 'Davidow', 'Denova', 'Dooner', 'Espeland', 'Fifita', 'Guidone', 'Hartnell', 'Havranek', 'Janca', 'Klepac', 'Langhorst', 'Lippmann', 'Merrihew', 'Mondelli', 'Monterosso', 'Moster', 'Noxon', 'Poznanski', 'Reents', 'Samaras', 'Silvius', 'Srour', 'Stasio', 'Steffe', 'Steimer', 'Stracke', 'Taney', 'Theodorou', 'Trickel', 'Tunks', 'Vavrek', 'Whitfill', 'Wohlfeil', 'Zirkelbach', 'Brissey', 'Busboom', 'Collignon', 'Emling', 'Fratzke', 'Genrich', 'Giglia', 'Hayakawa', 'Lupinski', 'Pulvermacher', 'Steinbrink', 'Xayavong', 'Yerkey', 'Arlotta', 'Calia', 'Pfiffner', 'Gostomski', 'Declerck', 'Demedeiros', 'Dirickson', 'Wo', 'Hosie', 'Chad', 'Herbison', 'Fleece', 'Connon', 'Dun', 'Gaffin', 'Plush', 'Gravette', 'Houseal', 'Seaward', 'Esson', 'Mayhorn', 'Surrell', 'Horsford', 'Mcduffey', 'Huger', 'Alexie', 'Apsey', 'Belke', 'Bourcier', 'Cardena', 'Daun', 'Dunworth', 'Ehrsam', 'Elizardo', 'Elkhatib', 'Emick', 'Fernau', 'Finnan', 'Hitzeman', 'Housand', 'Kallstrom', 'Katen', 'Kerstein', 'Kiracofe', 'Klammer', 'Largaespada', 'Limoges', 'Lodwick', 'Lozito', 'Madl', 'Mauthe', 'Mogel', 'Newstrom', 'Ninh', 'Obrochta', 'Opsal', 'Ordiway', 'Osentoski', 'Paxman', 'Plume', 'Rickenbach', 'Rinks', 'Saltmarsh', 'Scheuring', 'Schwegel', 'Skov', 'Woodrome', 'Zdanowicz', 'Zera', 'Basgall', 'Bornhorst', 'Clotfelter', 'Coulthard', 'Dresner', 'Fischl', 'Grahek', 'Grefe', 'Knightly', 'Kuenzel', 'Mccumbers', 'Millstein', 'Mulnix', 'Weiher', 'Yust', 'Metter', 'Polio', 'Ayad', 'Banke', 'Lawlis', 'Coba', 'Twyford', 'Burck', 'Barthold', 'Sames', 'Jacquot', 'Allsopp', 'Mcglaun', 'Hollinsworth', 'Gillings', 'Buchannon', 'Bas', 'Beaber', 'Berto', 'Bobrow', 'Bochicchio', 'Bohland', 'Burghart', 'Chaloux', 'Costella', 'Depace', 'Dils', 'Diviney', 'Ehly', 'Ermer', 'Fussner', 'Gunia', 'Guterrez', 'Holik', 'Holster', 'Kasperski', 'Koscinski', 'Lamoureaux', 'Marotti', 'Masullo', 'Mcconahy', 'Mehlhaff', 'Mocarski', 'Moosman', 'Pavlich', 'Pfisterer', 'Ruacho', 'Semrad', 'Slemmer', 'Stineman', 'Toelle', 'Vanderstelt', 'Wagy', 'Wuensch', 'Wykes', 'Zar', 'Bouchie', 'Friis', 'Gehrt', 'Hempfling', 'Henkes', 'Huggler', 'Kelbaugh', 'Petrenko', 'Pfost', 'Rubendall', 'Shimel', 'Stapf', 'Sweeton', 'Tsuda', 'Vitanza', 'Voytko', 'Bibbo', 'Hagee', 'Majer', 'Mangieri', 'Pala', 'Volle', 'Cabassa', 'Lipsett', 'Macdougal', 'Minar', 'Eline', 'Eskin', 'Angeletti', 'Lattner', 'Kimple', 'Marsan', 'Tornes', 'Moncur', 'Sanderfer', 'Crite', 'Levels', 'Valcin', 'Motton', 'Foggie', 'Battistoni', 'Bedient', 'Bendt', 'Bennison', 'Bonnin', 'Caridi', 'Cedotal', 'Choinski', 'Cossin', 'Devargas', 'Deveny', 'Dosher', 'Dredge', 'Fittro', 'Gorgone', 'Gourd', 'Herra', 'Holwerda', 'Iannello', 'Klintworth', 'Kubena', 'Leyvas', 'Magowan', 'Mendolia', 'Nehme', 'Pelikan', 'Pfalzgraf', 'Raith', 'Reichenberg', 'Reinertsen', 'Sens', 'Simer', 'Spektor', 'Sweda', 'Wordell', 'Blasing', 'Dinoto', 'Goblirsch', 'Helming', 'Hibshman', 'Lamountain', 'Latka', 'Licausi', 'Malerba', 'Mentink', 'Meskill', 'Moening', 'Montminy', 'Ryno', 'Sluka', 'Solarz', 'Swainston', 'Tagliaferri', 'Twichell', 'Vertucci', 'Voland', 'Wolgast', 'Bissen', 'Duray', 'Flaum', 'Taves', 'Caplin', 'Hayat', 'Pollett', 'Baris', 'Taher', 'Anes', 'Beza', 'Pere', 'Tipper', 'Farrey', 'Slott', 'Sinquefield', 'Bobbett', 'Calico', 'Eigner', 'Gambrill', 'Donigan', 'Daney', 'Natt', 'Gettis', 'Kincy', 'Dolberry', 'Curenton', 'Elzie', 'Beretta', 'Carbine', 'Carpenito', 'Clarin', 'Conrado', 'Conradt', 'Courteau', 'Daft', 'Debruler', 'Delahunty', 'Duerst', 'Dzik', 'Ellner', 'Faeth', 'Fournet', 'Galinski', 'Goldenstein', 'Hanauer', 'Higgason', 'Hoeper', 'Hollo', 'Ildefonso', 'Jocson', 'Kasprowicz', 'Kochanowski', 'Labrosse', 'Lazaroff', 'Leino', 'Levinsky', 'Lopezhernandez', 'Mckeague', 'Otremba', 'Paluzzi', 'Pevehouse', 'Polgar', 'Raneri', 'Rumler', 'Sanantonio', 'Schissel', 'Senteno', 'Sieling', 'Smee', 'Swiggum', 'Tarnow', 'Tavakoli', 'Tholl', 'Valdiviezo', 'Willadsen', 'Wilmouth', 'Dudziak', 'Eskenazi', 'Garity', 'Gravino', 'Impastato', 'Kuhner', 'Mcclaflin', 'Nein', 'Precourt', 'Rotenberry', 'Sciara', 'Arenson', 'Coupland', 'Sedler', 'Pizer', 'Him', 'Combee', 'Rhorer', 'Gelles', 'Baroody', 'Basten', 'Sprinkles', 'Vanier', 'Clementson', 'Robberson', 'Harten', 'Kade', 'Bhola', 'Bahar', 'Pellum', 'Isadore', 'Dixie', 'Axline', 'Backs', 'Berdahl', 'Billeter', 'Bily', 'Broerman', 'Declercq', 'Derleth', 'Fanucchi', 'Forkey', 'Gallinger', 'Gionfriddo', 'Gretzinger', 'Grima', 'Helgren', 'Hoelting', 'Hundertmark', 'Inscho', 'Jernberg', 'Kamiya', 'Lekas', 'Marchini', 'Markuson', 'Matsushima', 'Meineke', 'Mizrachi', 'Moglia', 'Nagele', 'Naro', 'Padillo', 'Palleschi', 'Palomba', 'Purgason', 'Qadri', 'Recalde', 'Rosiak', 'Rumney', 'Savitt', 'Shibuya', 'Szalkowski', 'Wagg', 'Wolsey', 'Zumpano', 'Benbrook', 'Blasdel', 'Carusone', 'Karalis', 'Koep', 'Kohles', 'Rumbo', 'Siggins', 'Unverzagt', 'Eatherly', 'Kapper', 'Salser', 'Wege', 'Zinsmeister', 'Alf', 'Wish', 'Falero', 'Bur', 'Imam', 'Biven', 'Merritts', 'Kaigler', 'Verdell', 'Feggins', 'Acerra', 'Antenucci', 'Benegas', 'Bisesi', 'Boshers', 'Chap', 'Clouatre', 'Doxtater', 'Dullea', 'Eischeid', 'Gundry', 'Hinger', 'Hodak', 'Iseminger', 'Juris', 'Kirchen', 'Knezevic', 'Kobrin', 'Krizek', 'Leza', 'Lusty', 'Luttrull', 'Mattke', 'Mossbarger', 'Narro', 'Osland', 'Ostwald', 'Pepperman', 'Pritzl', 'Reasner', 'Schimming', 'Schulenburg', 'Trefry', 'Vigorito', 'Bayardo', 'Bieser', 'Brinkmeier', 'Camposano', 'Cremeens', 'Delgrande', 'Demopoulos', 'Deyarmin', 'Grismer', 'Jubb', 'Kinker', 'Lauf', 'Mabile', 'Muehl', 'Orlick', 'Pillado', 'Pizzano', 'Poppleton', 'Quickel', 'Stoneberg', 'Szwed', 'Zadrozny', 'Ziemke', 'Zupko', 'Diesel', 'Hornbrook', 'Pillion', 'Holaway', 'Massad', 'Rossmiller', 'Parriott', 'Toya', 'Dross', 'Burwick', 'Kaman', 'Bruna', 'Milles', 'Acrey', 'Toogood', 'Austell', 'Chastang', 'Jasmine', 'Eckford', 'Stiggers', 'Saintvil', 'Adeyemi', 'Basto', 'Bolon', 'Brilliant', 'Brockhoff', 'Colao', 'Emens', 'Endler', 'Fabris', 'Falletta', 'Felver', 'Ferdon', 'Golinski', 'Gosdin', 'Gronlund', 'Guijosa', 'Hainley', 'Halama', 'Heinicke', 'Heldenbrand', 'Helmkamp', 'Hoctor', 'Hoeck', 'Kroboth', 'Lamagna', 'Lingg', 'Locurto', 'Marchewka', 'Micco', 'Mormino', 'Newmeyer', 'Ostrosky', 'Redel', 'Saccoccio', 'Stavely', 'Stidd', 'Tonne', 'Tonnesen', 'Umbach', 'Vardanyan', 'Wank', 'Wolven', 'Cilento', 'Delmonaco', 'Denigris', 'Gerbig', 'Gradilla', 'Grebner', 'Landini', 'Marohl', 'Muenchow', 'Niedermeier', 'Nussbaumer', 'Nycz', 'Pizzino', 'Schader', 'Schuneman', 'Takano', 'Ureta', 'Vanderloop', 'Windholz', 'Wombacher', 'Woulfe', 'Hamley', 'Schickel', 'Yuill', 'Batta', 'Galant', 'Mofield', 'Kint', 'Barnell', 'Ashmead', 'Crossin', 'Lasco', 'Chasen', 'Swire', 'Gleghorn', 'Bearfield', 'Goodgame', 'Daris', 'Plump', 'Derricott', 'Burno', 'Baylock', 'Vanterpool', 'Judon', 'Mells', 'Proby', 'Bagan', 'Batcheller', 'Bjelland', 'Boline', 'Boullion', 'Broomall', 'Carcia', 'Cassinelli', 'Cerro', 'Colantuono', 'Dembeck', 'Doto', 'Eckersley', 'Edell', 'Ewy', 'Goodness', 'Huhta', 'Kallen', 'Keimig', 'Kemppainen', 'Koopmann', 'Lacap', 'Lehtinen', 'Maciolek', 'Marchuk', 'Mcfate', 'Mentel', 'Minihan', 'Mohsin', 'Oppedisano', 'Patriarca', 'Raske', 'Schueneman', 'Shostak', 'Sibal', 'Spadafore', 'Suitor', 'Tavella', 'Vy', 'Wies', 'Beadnell', 'Bogusz', 'Cleverly', 'Dellorusso', 'Dudenhoeffer', 'Glendinning', 'Glomb', 'Heinkel', 'Jiwani', 'Lonigro', 'Machala', 'Marsicano', 'Neuenfeldt', 'Overlock', 'Popko', 'Russomanno', 'Saxer', 'Scicchitano', 'Spiegelberg', 'Spindel', 'Timpone', 'Vincelette', 'Waidelich', 'Wissink', 'Woolstenhulme', 'Danza', 'Sleasman', 'Frometa', 'Savinon', 'Higgerson', 'Helmich', 'Nahar', 'Campus', 'Hassey', 'Mccorkel', 'Tola', 'Ferrington', 'Nicolls', 'Markes', 'Edgley', 'Dupriest', 'Wah', 'Mclester', 'Scantling', 'Goffe', 'Battie', 'Battershell', 'Bearup', 'Bisig', 'Brouillet', 'Canby', 'Chaussee', 'Colandrea', 'Colocho', 'Daube', 'Dobransky', 'Dolbow', 'Dyk', 'Elfrink', 'Figel', 'Hauter', 'Henkels', 'Keillor', 'Kollasch', 'Krabill', 'Kubly', 'Kvasnicka', 'Leise', 'Martirosyan', 'Mihalic', 'Montecinos', 'Myren', 'Okerlund', 'Ozer', 'Rajput', 'Reihl', 'Rimando', 'Saffle', 'Schmelter', 'Tellado', 'Wachsmuth', 'Wussow', 'Zylka', 'Caiola', 'Certo', 'Disabatino', 'Ehrke', 'Lahmann', 'Lamartina', 'Manheim', 'Mckevitt', 'Nardozzi', 'Neuzil', 'Novotney', 'Oldfather', 'Sietsema', 'Stemmler', 'Stumm', 'Ueno', 'Weckwerth', 'Berrocal', 'Nolde', 'Alava', 'Revier', 'Sester', 'Saller', 'Tonga', 'Kala', 'Reveron', 'Homesley', 'Pagett', 'Blackie', 'Raimer', 'Fitt', 'Kimbley', 'Amory', 'Cabler', 'Juett', 'Crate', 'Burres', 'Siddle', 'Barnfield', 'Bordenave', 'Cubit', 'Elem', 'Hardmon', 'Augspurger', 'Barriger', 'Bau', 'Bloomingdale', 'Busta', 'Canoy', 'Carapia', 'Cavenaugh', 'Conkin', 'Coppernoll', 'Daloia', 'Debruyne', 'Egly', 'Esmail', 'Estorga', 'Gladu', 'Gladue', 'Harvath', 'Hirschmann', 'Juel', 'Kappus', 'Kopriva', 'Krul', 'Lavorgna', 'Maginn', 'Malphrus', 'Mcilhenny', 'Perazzo', 'Peredo', 'Pineo', 'Rigoni', 'Robleto', 'Schoene', 'Sevillano', 'Stears', 'Stoltzfoos', 'Sutley', 'Terracciano', 'Villacres', 'Yoak', 'Brensinger', 'Brodzinski', 'Cordial', 'Cornacchia', 'Corralejo', 'Demarchi', 'Dziuk', 'Hirzel', 'Keirns', 'Kocourek', 'Kupec', 'Nazaryan', 'Oftedahl', 'Pignatelli', 'Pundt', 'Repinski', 'Ryther', 'Sampedro', 'Shemanski', 'Siess', 'Trettel', 'Urquilla', 'Vantil', 'Vicens', 'Dunahoo', 'Safer', 'Romaniello', 'Tallo', 'Cavell', 'Cobern', 'Yarrow', 'Serge', 'Adel', 'Allum', 'Pruit', 'Wali', 'Forson', 'Bells', 'Blyden', 'Andreotti', 'Bagnato', 'Beauchaine', 'Biedrzycki', 'Brabo', 'Brodman', 'Bruyere', 'Canizares', 'Chio', 'Coudriet', 'Dara', 'Dhawan', 'Diclemente', 'Doro', 'Elvir', 'Fivecoat', 'Frate', 'Furuya', 'Greis', 'Halbleib', 'Heuerman', 'Hoener', 'Holberg', 'Hoogendoorn', 'Inclan', 'Jokinen', 'Kretchmer', 'Lafromboise', 'Mccomsey', 'Mckiddy', 'Pelky', 'Plaia', 'Ponti', 'Reichl', 'Schicker', 'Sotto', 'Staehle', 'Thau', 'Turchin', 'Zill', 'Aicher', 'Arrigoni', 'Bertagnolli', 'Binetti', 'Dahlheimer', 'Delashmit', 'Disque', 'Hemmerling', 'Hovater', 'Kachur', 'Massmann', 'Schlup', 'Turkovich', 'Underberg', 'Wambolt', 'Vassey', 'Larney', 'Brisky', 'Minas', 'Kata', 'Magar', 'Arlen', 'Corporan', 'Westland', 'Detherage', 'Reen', 'Morale', 'Hoes', 'Baynham', 'Norrington', 'Lartigue', 'Hakeem', 'Kendrix', 'Cazeau', 'Amadi', 'Mczeal', 'Alwin', 'Barcellos', 'Bastedo', 'Bintz', 'Brackenbury', 'Brockel', 'Bucek', 'Cecala', 'Dapper', 'Dettore', 'Dowdall', 'Dralle', 'Essenmacher', 'Evaristo', 'Fecher', 'Feldmeier', 'Fetherston', 'Futterman', 'Garlinghouse', 'Germani', 'Gotz', 'Hoen', 'Janikowski', 'Kiess', 'Lagerstrom', 'Lozinski', 'Magnone', 'Markow', 'Mayall', 'Mehdi', 'Mineau', 'Morgenroth', 'Nitzsche', 'Nordell', 'Pavlock', 'Peruzzi', 'Pettine', 'Pinos', 'Polidoro', 'Rahl', 'Rudis', 'Ryback', 'Santellan', 'Scharfenberg', 'Schnake', 'Schwake', 'Seeling', 'Senk', 'Siron', 'Speich', 'Summerhays', 'Torno', 'Vangieson', 'Wiacek', 'Begnoche', 'Carrejo', 'Chervenak', 'Edminster', 'Halonen', 'Macumber', 'Mazeika', 'Mikami', 'Minetti', 'Mosbrucker', 'Mundis', 'Onder', 'Prowant', 'Pyo', 'Sedlack', 'Stanbro', 'Woehl', 'Wrage', 'Carpentieri', 'Guedry', 'Hodde', 'Waggy', 'Weitman', 'Handal', 'Gosman', 'Mckeone', 'Oliveria', 'Soutar', 'Glance', 'Surprise', 'Milius', 'Crammer', 'Mclear', 'Borris', 'Malon', 'Mane', 'Arrick', 'Brazzel', 'Matthewson', 'Philemon', 'Selvy', 'Lites', 'Deadwyler', 'Marzette', 'Alipio', 'Arancibia', 'Arrona', 'Basista', 'Blethen', 'Brull', 'Colaianni', 'Dreese', 'Giammona', 'Giovanetti', 'Grandmaison', 'Grondahl', 'Gulli', 'Hellenbrand', 'Iturbe', 'Koesters', 'Kondracki', 'Konitzer', 'Kubic', 'Lauerman', 'Mcfadin', 'Musquiz', 'Papalia', 'Porrazzo', 'Prien', 'Reichley', 'Treichler', 'Ursua', 'Vanblaricom', 'Wich', 'Windler', 'Wos', 'Zampino', 'Alexopoulos', 'Bambrick', 'Beabout', 'Brechtel', 'Buroker', 'Dahler', 'Everding', 'Furno', 'Gikas', 'Gilkeson', 'Hubka', 'Konwinski', 'Krisko', 'Kuligowski', 'Maltbie', 'Molstad', 'Nonnemacher', 'Nowotny', 'Odisho', 'Remsburg', 'Rollyson', 'Siegmann', 'Slaubaugh', 'Wasco', 'Carlyon', 'Chanin', 'Cominsky', 'Karber', 'Aynes', 'Swamy', 'Kolden', 'Rochel', 'Julin', 'Demarcus', 'Malena', 'Morice', 'Burst', 'Sukhu', 'Mccravy', 'Rinehardt', 'Veazie', 'Isaiah', 'Bradby', 'Poellnitz', 'Agyemang', 'Agate', 'Aschoff', 'Beenken', 'Bogenschutz', 'Casamento', 'Correira', 'Ebers', 'Ellertson', 'Forcum', 'Gortney', 'Jarriel', 'Jasmer', 'Kennebeck', 'Kimpton', 'Lad', 'Lasek', 'Licavoli', 'Lipper', 'Luedecke', 'Maqueda', 'Matsen', 'Mest', 'Neang', 'Neault', 'Newlun', 'Oetken', 'Rodick', 'Rollinger', 'Sabins', 'Schalow', 'Sheils', 'Spilde', 'Virzi', 'Watz', 'Wehrly', 'Boscarino', 'Chavolla', 'Dasaro', 'Eisenbach', 'Ignatowski', 'Kievit', 'Kuzminski', 'Lickliter', 'Moravek', 'Pawling', 'Prause', 'Redler', 'Wunschel', 'Suchanek', 'Eyring', 'Loge', 'Tout', 'Fross', 'Swiss', 'Deforrest', 'Umphlett', 'Herran', 'Matton', 'Passe', 'Ode', 'Della', 'Caillier', 'Baten', 'Chesterfield', 'Odneal', 'Azeez', 'Salami', 'Ramson', 'Mcvea', 'Pittmon', 'Cheatom', 'Dorsainvil', 'Cheeseboro', 'Lavalais', 'Allegro', 'Bressi', 'Brocklehurst', 'Cassarino', 'Dario', 'Gazzola', 'Glinka', 'Goffredo', 'Halabi', 'Kroeze', 'Lenig', 'Marciel', 'Marcussen', 'Massoni', 'Mayernik', 'Nawrot', 'Palazzi', 'Pfefferkorn', 'Placeres', 'Polimeni', 'Recendiz', 'Sawdey', 'Seidell', 'Suchecki', 'Titzer', 'Virag', 'Vitulli', 'Wiltfong', 'Wolden', 'Woolworth', 'Yandow', 'Zeiter', 'Zogg', 'Brosh', 'Dunsmoor', 'Gucciardo', 'Gumz', 'Luginbill', 'Mathwig', 'Pannullo', 'Raitt', 'Reutzel', 'Sonnen', 'Bahri', 'Guiffre', 'Hons', 'Platner', 'Balaguer', 'Lapre', 'Rabbani', 'Talent', 'Hoster', 'Thal', 'Apo', 'Duggin', 'Kirley', 'Burnard', 'Lourie', 'Wilham', 'Craton', 'Griff', 'Falwell', 'Upperman', 'Laverne', 'Wi', 'Foucher', 'Sudberry', 'Oriol', 'Cowens', 'Marshell', 'Chargois', 'Bordley', 'Artale', 'Boeker', 'Cookston', 'Dattilio', 'Dewinter', 'Ditton', 'Droessler', 'Dusch', 'Eltringham', 'Feige', 'Giel', 'Grigas', 'Hannagan', 'Haubner', 'Henzler', 'Kippes', 'Kneebone', 'Lozeau', 'Mallek', 'Mandato', 'Mangiapane', 'Matusek', 'Newgard', 'Notte', 'Purdin', 'Ramaker', 'Reddoch', 'Rensing', 'Rohrman', 'Romm', 'Rudiger', 'Torti', 'Travaglini', 'Uno', 'Wojciak', 'Yannuzzi', 'Zeien', 'Arpino', 'Borgstrom', 'Burkemper', 'Cristino', 'Detjen', 'Gienger', 'Glockner', 'Grillot', 'Jentz', 'Kendzierski', 'Klebe', 'Knippenberg', 'Kusler', 'Olofson', 'Orlov', 'Rindt', 'Stallbaumer', 'Troost', 'Turri', 'Uzelac', 'Weichert', 'Sweazy', 'Alcivar', 'Canner', 'Lottman', 'Salame', 'Berkes', 'Pickren', 'Ganson', 'Odonell', 'Geron', 'Kasa', 'Banbury', 'Tinnel', 'Umble', 'Flow', 'Kirt', 'Rhule', 'Diles', 'Seeney', 'Givans', 'Mckethan', 'Crusoe', 'Darko', 'Mucker', 'Kizzee', 'Daniely', 'Nutall', 'Angove', 'Appelhans', 'Balder', 'Blatchley', 'Botkins', 'Brisk', 'Burandt', 'Clowdus', 'Debauche', 'Deily', 'Group', 'Hoecker', 'Holsonback', 'Humpert', 'Jacquin', 'Jurica', 'Karnik', 'Krontz', 'Lapiana', 'Lenzo', 'Luscombe', 'Madey', 'Mirabito', 'Neifert', 'Pennino', 'Piechota', 'Pizzimenti', 'Reeg', 'Roarty', 'Routzahn', 'Salsedo', 'Schuff', 'Silveri', 'Steckman', 'Supak', 'Swackhamer', 'Trusler', 'Vizzini', 'Wences', 'Whelton', 'Zachar', 'Albertsen', 'Bischel', 'Brigandi', 'Campoy', 'Castagnola', 'Doenges', 'Flessner', 'Garbers', 'Jezewski', 'Kozlov', 'Niedbalski', 'Schillo', 'Schoepke', 'Schranz', 'Trulson', 'Vanwyhe', 'Versluis', 'Zavadil', 'Brau', 'Rudell', 'Golen', 'Meter', 'Sherrin', 'Tolly', 'Mandala', 'Calcano', 'Lewing', 'Sedeno', 'Ramalho', 'Haggar', 'Borns', 'Matherson', 'Cobin', 'Turnley', 'Pone', 'Tuner', 'Crandle', 'Sturkey', 'Heggins', 'Tisby', 'Allbaugh', 'Baars', 'Bethard', 'Brenizer', 'Bussman', 'Casebier', 'Castanos', 'Climaco', 'Dux', 'Farrens', 'Frediani', 'Gaccione', 'Garciaperez', 'Hoppa', 'Juckett', 'Klinkner', 'Kooy', 'Krinke', 'Locy', 'Lovecchio', 'Lukin', 'Machia', 'Mand', 'Maslin', 'Mehrotra', 'Nicolet', 'Peyser', 'Reckart', 'Roanhorse', 'Rokicki', 'Sargis', 'Sciullo', 'Shevchuk', 'Sindoni', 'Slankard', 'Sobiech', 'Stoneberger', 'Stys', 'Tuzzolino', 'Waligora', 'Wiland', 'Clabough', 'Drawbaugh', 'Figurski', 'Gibeault', 'Gojcaj', 'Hartfiel', 'Inbody', 'Konarski', 'Kruszka', 'Letarte', 'Lillich', 'Mccandlish', 'Mollenkopf', 'Oltmann', 'Pfenninger', 'Ruediger', 'Schaben', 'Shauger', 'Wilczak', 'Wolanin', 'Ziehm', 'Bassinger', 'Brannick', 'Schlereth', 'Capri', 'Roscher', 'Pasqual', 'Lallo', 'Sweney', 'Rozario', 'Hamblet', 'Muckleroy', 'Frankson', 'Moure', 'Shrieves', 'Bosket', 'Strowbridge', 'Hawkin', 'Cooperwood', 'Agena', 'Barrowman', 'Belko', 'Blasdell', 'Brobeck', 'Chieffo', 'Cooperrider', 'Dickard', 'Erion', 'Fradkin', 'Hattery', 'Hefferon', 'Hofstra', 'Hoiland', 'Jirak', 'Klugman', 'Klundt', 'Knope', 'Lawniczak', 'Luckenbach', 'Manzione', 'Mccombie', 'Minden', 'Mousel', 'Ridling', 'Rightmire', 'Ritzel', 'Santori', 'Semmens', 'Snyders', 'Spargur', 'Staszewski', 'Swiech', 'Tasso', 'Veldhuizen', 'Vuolo', 'Wojnarowski', 'Yoe', 'Bachler', 'Cimo', 'Hippen', 'Klimaszewski', 'Kohlhepp', 'Kovacich', 'Kretsch', 'Lacoursiere', 'Lopezmartinez', 'Marsiglia', 'Metzker', 'Murchie', 'Paradee', 'Pfefferle', 'Rothert', 'Skellenger', 'Tourangeau', 'Beumer', 'Thunder', 'Uden', 'Broe', 'Moxon', 'Kassin', 'Murton', 'Hockley', 'Vinet', 'Suthers', 'Bayman', 'Cokeley', 'Ailey', 'Crossfield', 'Desha', 'Dowson', 'Acheampong', 'Boomsma', 'Buer', 'Caratachea', 'Dascenzo', 'Debes', 'Degroote', 'Dillie', 'Dorsi', 'Dorward', 'Eyestone', 'Geister', 'Gonia', 'Heiler', 'Hin', 'Hoheisel', 'Horger', 'Hulce', 'Kainer', 'Kerkman', 'Kloehn', 'Krempasky', 'Kuehnel', 'Leetch', 'Lio', 'Lohrey', 'Lucchetti', 'Machnik', 'Majeske', 'Martire', 'Mores', 'Oyen', 'Pappert', 'Platas', 'Podany', 'Prata', 'Radoncic', 'Sainato', 'Salada', 'Serota', 'Tatsch', 'Torbeck', 'Vilhauer', 'Waltner', 'Wauters', 'Welge', 'Yoss', 'Bigwood', 'Brunsman', 'Civitello', 'Compston', 'Cuccaro', 'Denholm', 'Emmick', 'Gadzinski', 'Goedken', 'Graumann', 'Hackert', 'Hardacre', 'Hehl', 'Magliocco', 'Marotto', 'Ozanich', 'Pidcock', 'Schlangen', 'Scoma', 'Sobecki', 'Spreng', 'Thalmann', 'Wolfrum', 'Groninger', 'Howatt', 'Kindy', 'Swor', 'Ledden', 'Voyer', 'Colli', 'Andrae', 'Duchemin', 'Boker', 'Malter', 'Snooks', 'Morss', 'Haylett', 'Mitter', 'Fairey', 'Kenerson', 'Albea', 'Ellerson', 'Alcindor', 'Gadison', 'Arabia', 'Bundren', 'Calica', 'Cartaya', 'Cielo', 'Ebbers', 'Entler', 'Friedly', 'Granja', 'Landt', 'Lorensen', 'Michelini', 'Oliveto', 'Piela', 'Reust', 'Roussos', 'Sanluis', 'Seier', 'Sobolik', 'Stader', 'Stetzer', 'Tetley', 'Zirbes', 'Bridenbaugh', 'Chinnici', 'Crabbs', 'Evilsizer', 'Favaloro', 'Haeberle', 'Hopfensperger', 'Kijowski', 'Kingbird', 'Leikam', 'Montavon', 'Petrossian', 'Quizhpi', 'Spoelstra', 'Testani', 'Plaut', 'Windt', 'Dubie', 'Kozinski', 'Sorell', 'Nish', 'Katon', 'Soy', 'Pelcher', 'Sayres', 'Waitman', 'Relph', 'Hearld', 'Farewell', 'Giordani', 'Canida', 'Martian', 'Suliman', 'Mckesson', 'Randon', 'Eastmond', 'Willaims', 'Collington', 'Hardge', 'Asevedo', 'Beauchene', 'Bebeau', 'Bobick', 'Bogacki', 'Bolich', 'Bonadonna', 'Butsch', 'Coltrin', 'Corbello', 'Dastrup', 'Dunshee', 'Firpo', 'Foister', 'Franssen', 'Fredriksen', 'Gfeller', 'Glassner', 'Johanns', 'Korson', 'Langsam', 'Linstrom', 'Longstaff', 'Lukic', 'Maler', 'Marteney', 'Milardo', 'Rhatigan', 'Ruetz', 'Semel', 'Senske', 'Shatswell', 'Simmering', 'Tasch', 'Vanskike', 'Verano', 'Viscardi', 'Weidmann', 'Doubet', 'Farraj', 'Fritter', 'Griesinger', 'Horkey', 'Hornik', 'Izatt', 'Klayman', 'Mantei', 'Notz', 'Oberholzer', 'Petko', 'Rueth', 'Rygiel', 'Tumolo', 'Unterreiner', 'Urgo', 'Weisbecker', 'Weniger', 'Zarro', 'Zunino', 'Goldmann', 'Verderber', 'Glennie', 'Shere', 'Lamos', 'Face', 'Sparger', 'Donnay', 'Kage', 'Leason', 'Mcgue', 'Brickle', 'Mae', 'Thomaston', 'Dunnell', 'Tillie', 'Miggins', 'Geffrard', 'Aubel', 'Backe', 'Beaumier', 'Bloor', 'Brackbill', 'Brandvold', 'Bylund', 'Carbary', 'Catrambone', 'Dapolito', 'Dillenburg', 'Elliff', 'Fehnel', 'Ferriss', 'Gellner', 'Graw', 'Guilbeault', 'Hautala', 'Hollenberg', 'Imparato', 'Kaner', 'Kley', 'Lanzer', 'Laterza', 'Legner', 'Lombardozzi', 'Mcerlean', 'Mcgilton', 'Mohring', 'Neeper', 'Pollinger', 'Pullara', 'Sagona', 'Scripter', 'Skillen', 'Streeper', 'Tritch', 'Vayda', 'Verbeek', 'Wenberg', 'Youngers', 'Bayus', 'Cobaugh', 'Dolak', 'Forys', 'Genther', 'Jankovich', 'Kneale', 'Komp', 'Kreher', 'Kuwahara', 'Mclouth', 'Melland', 'Molesky', 'Neustadt', 'Oesterling', 'Quirke', 'Roeper', 'Stantz', 'Vandenboom', 'Venhuizen', 'Westermeyer', 'Embury', 'Cozort', 'Crispo', 'Woollard', 'Thiery', 'Lecy', 'Terris', 'Stencil', 'Yero', 'Bollard', 'Chander', 'Shepp', 'Younkins', 'Jon', 'Anselm', 'Deveraux', 'Better', 'Birth', 'Hoskie', 'Kirtz', 'Encalade', 'Aprea', 'Bernick', 'Bialy', 'Bolenbaugh', 'Chinea', 'Cwiklinski', 'Dunavan', 'Dunckel', 'Essen', 'Ferner', 'Gallick', 'Gruba', 'Hauss', 'Intriago', 'Javaid', 'Kaney', 'Klemens', 'Kuriakose', 'Leyda', 'Losurdo', 'Mcelhone', 'Methot', 'Morioka', 'Mundorf', 'Nocito', 'Nordmann', 'Oommen', 'Pfahl', 'Piquette', 'Prinsen', 'Sacramento', 'Shenker', 'Skidgel', 'Sobalvarro', 'Soldo', 'Synan', 'Tostenson', 'Trotti', 'Vienneau', 'Vigneau', 'Waitkus', 'Wiess', 'Bartmess', 'Comparan', 'Dalonzo', 'Dutrow', 'Fleegle', 'Fronek', 'Handrich', 'Hazelip', 'Heinig', 'Macapagal', 'Masciarelli', 'Pitstick', 'Radakovich', 'Ripberger', 'Schwebel', 'Slomski', 'Stinchfield', 'Zegers', 'Zeiser', 'Kimmer', 'Rippon', 'Satz', 'Bosques', 'Mcnickle', 'Yarwood', 'Babar', 'Ghazi', 'Mcquary', 'Africa', 'Sofer', 'Marsland', 'Curby', 'Odor', 'Gillem', 'Selmer', 'Delmas', 'Lamison', 'Lanes', 'Shadd', 'Goard', 'Haylock', 'Sermon', 'Meachem', 'Vernet', 'Akiona', 'Avitabile', 'Berkson', 'Bisono', 'Busic', 'Caroselli', 'Corradi', 'Delval', 'Egley', 'Elkind', 'Everling', 'Ferrario', 'Frumkin', 'Gelder', 'Gironda', 'Glasheen', 'Goette', 'Gotts', 'Haub', 'Herro', 'Hudzik', 'Hula', 'Inboden', 'Isensee', 'Kiesewetter', 'Koetje', 'Laughridge', 'Lovewell', 'Meeuwsen', 'Mokry', 'Navarez', 'Plake', 'Quain', 'Reppucci', 'Sorn', 'Tallerico', 'Uselman', 'Verrastro', 'Wineberg', 'Blazina', 'Falardeau', 'Garavito', 'Gellerman', 'Havins', 'Kurdziel', 'Liedel', 'Lofstrom', 'Pakula', 'Presby', 'Ringstad', 'Rokosz', 'Schuchart', 'Seckler', 'Verderame', 'Veselka', 'Asfour', 'Delanoy', 'Fromer', 'Koba', 'Kostrzewa', 'Melle', 'Merkey', 'Scalese', 'Oritz', 'Kilgour', 'Piker', 'Janet', 'Huge', 'Hails', 'Dobey', 'Escoe', 'Rasool', 'Gilcrest', 'Codrington', 'Jeangilles', 'Outley', 'Bambach', 'Beaulac', 'Begue', 'Bobeck', 'Buccino', 'Carrigg', 'Cranney', 'Denninger', 'Dicioccio', 'Eapen', 'Fargnoli', 'Fatica', 'Fernicola', 'Forse', 'Freck', 'Gardipee', 'Gibas', 'Goeman', 'Guadian', 'Hlad', 'Jakab', 'Kishimoto', 'Krenn', 'Lagesse', 'Lhommedieu', 'Lusch', 'Mausolf', 'Mazzocchi', 'Mcdavitt', 'Noseworthy', 'Passante', 'Placzek', 'Quamme', 'Ringgenberg', 'Spiegelman', 'Vinluan', 'Wachsman', 'Bacigalupi', 'Baechle', 'Baetz', 'Barsch', 'Colbaugh', 'Devoto', 'Dimercurio', 'Dosanjh', 'Dukeman', 'Ferger', 'Garinger', 'Grelle', 'Guyett', 'Harpenau', 'Hundal', 'Kamerer', 'Klomp', 'Licklider', 'Martinec', 'Matzek', 'Nixdorf', 'Pankonin', 'Pogosyan', 'Schweickert', 'Smethurst', 'Stroope', 'Zwack', 'Tebbetts', 'Stains', 'Tosado', 'Carles', 'Rings', 'Hebard', 'Choplin', 'Townshend', 'Doorn', 'Aja', 'Picking', 'Oneall', 'Logie', 'Aro', 'Dua', 'Heney', 'Manard', 'Atchinson', 'Breech', 'Brashers', 'Addams', 'Nooner', 'Barsh', 'Orum', 'Dancey', 'Bamba', 'Kareem', 'Theard', 'Marseille', 'Molette', 'Getachew', 'Saintfleur', 'Frimpong', 'Anglada', 'Attardo', 'Barreira', 'Bleicher', 'Bonecutter', 'Bricco', 'Compian', 'Creppel', 'Cuadras', 'Cuccio', 'Cutsforth', 'Dinino', 'Eskelson', 'Freemyer', 'Friedhoff', 'Grandt', 'Holzmann', 'Hoverson', 'Hurteau', 'Iacona', 'Jergens', 'Kingham', 'Leiterman', 'Leugers', 'Leyh', 'Lotti', 'Majkowski', 'Mossberg', 'Nuffer', 'Oaxaca', 'Pagenkopf', 'Paille', 'Petzoldt', 'Rogalla', 'Siddens', 'Siddoway', 'Spatafora', 'Tufo', 'Weismann', 'Werntz', 'Wilz', 'Ammirati', 'Benninghoff', 'Escarsega', 'Fessel', 'Hurless', 'Jastrzebski', 'Klingerman', 'Kurilla', 'Kuzmin', 'Meserole', 'Politz', 'Pollino', 'Rettke', 'Sinay', 'Strebeck', 'Strycharz', 'Suhre', 'Thumm', 'Trybus', 'Uhrin', 'Weisberger', 'Zeger', 'Carringer', 'Sitts', 'Lungren', 'Iiams', 'Sudbury', 'Surrette', 'Chellis', 'Yore', 'Joice', 'Foot', 'Ausley', 'Scioneaux', 'Mcaffee', 'Pinn', 'Maina', 'Dorce', 'Agrusa', 'Albornoz', 'Arave', 'Bacallao', 'Bendavid', 'Bochner', 'Bortle', 'Carragher', 'Chalfin', 'Courtade', 'Dagle', 'Debuhr', 'Fowble', 'Galinsky', 'Hardigree', 'Haulk', 'Hendron', 'Herringshaw', 'Jayaraman', 'Koestler', 'Konicek', 'Kutscher', 'Lachowicz', 'Lafauci', 'Lansky', 'Lazarski', 'Lolli', 'Ludvigsen', 'Manternach', 'Martorelli', 'Mcquillin', 'Mikaelian', 'Northcraft', 'Nyborg', 'Palone', 'Peckman', 'Schwebach', 'Simbeck', 'Sittler', 'Udovich', 'Viesca', 'Yazell', 'Zimmers', 'Bielen', 'Cohron', 'Dearcos', 'Feezor', 'Hilgart', 'Karriker', 'Klingberg', 'Leisenring', 'Napora', 'Nedved', 'Okeson', 'Seratt', 'Trautner', 'Trimarco', 'Turkel', 'Bronder', 'Itani', 'Verona', 'Blackbird', 'Laque', 'Karpel', 'Louro', 'Hamson', 'Ashland', 'Gruel', 'Breer', 'Wesely', 'Bebo', 'Conery', 'Mccarry', 'Cradic', 'Aytes', 'Dikes', 'Soltau', 'Debois', 'Berko', 'Callins', 'Anastacio', 'Balbi', 'Bata', 'Bechel', 'Borsuk', 'Chihuahua', 'Cindric', 'Denapoli', 'Dotzler', 'Dusing', 'Dziekan', 'Eifler', 'Franchino', 'Garritano', 'Herrarte', 'Jaskot', 'Kettell', 'Kingsford', 'Marsters', 'Oshel', 'Overacker', 'Pagliarulo', 'Pannier', 'Pyun', 'Rardon', 'Reville', 'Rogozinski', 'Scatena', 'Schoeppner', 'Senkbeil', 'Silkey', 'Takhar', 'Whitebread', 'Wiech', 'Adelsberger', 'Aslinger', 'Bhattacharyya', 'Brege', 'Burright', 'Cafarella', 'Chlebowski', 'Decaprio', 'Dilello', 'Dresher', 'Finkbiner', 'Gerlich', 'Ignasiak', 'Kataoka', 'Kearl', 'Pingitore', 'Sellick', 'Sinning', 'Stojanovic', 'Vanasten', 'Vanluven', 'Westerfeld', 'Mahala', 'Biancardi', 'Velardo', 'Payes', 'Debello', 'Kyes', 'Reever', 'Joung', 'Coran', 'Perrow', 'Linzer', 'Birchett', 'Poles', 'Cajuste', 'Albergo', 'Andal', 'Belaire', 'Borell', 'Bruehl', 'Celani', 'Cerruti', 'Crellin', 'Delcarlo', 'Dubach', 'Elicker', 'Fialkowski', 'Ganim', 'Gladieux', 'Glendening', 'Glomski', 'Kalp', 'Kavan', 'Kawabata', 'Kever', 'Kisch', 'Maiorino', 'Masaki', 'Mcgeough', 'Miyoshi', 'Nand', 'Nitka', 'Novakovich', 'Penagos', 'Pierini', 'Rassi', 'Rorke', 'Rosenboom', 'Rossmann', 'Scarfone', 'Scarsella', 'Siedschlag', 'Sobotta', 'Studnicka', 'Teeling', 'Tegtmeyer', 'Woznick', 'Beske', 'Dersch', 'Deschepper', 'Duffner', 'Geroux', 'Lindvall', 'Linnemann', 'Roethler', 'Scanlin', 'Schaecher', 'Schmude', 'Schwertner', 'Shimamoto', 'Stratmann', 'Stufflebean', 'Ulatowski', 'Witkop', 'Landrus', 'Sahin', 'Araque', 'Massett', 'Meanor', 'Sebo', 'Delic', 'Bryand', 'Frederico', 'Portuondo', 'Verry', 'Browe', 'Winecoff', 'Gipp', 'Khamis', 'Ingrum', 'Gilliand', 'Poinsett', 'Hagley', 'Valliant', 'Henly', 'Bingley', 'Romulus', 'Moyd', 'Abascal', 'Adelstein', 'Arabian', 'Barcelos', 'Barot', 'Cabacungan', 'Darco', 'Dickmeyer', 'Gindi', 'Grone', 'Haberland', 'Hachem', 'Humbarger', 'Insco', 'Kravchuk', 'Mackowski', 'Madrazo', 'Malesky', 'Markowicz', 'Mcconnon', 'Meiring', 'Micalizzi', 'Moeser', 'Mortier', 'Muegge', 'Ollar', 'Pamperin', 'Pusch', 'Remache', 'Roginski', 'Rothbauer', 'Sellin', 'Stachurski', 'Stelmack', 'Suprenant', 'Totzke', 'Uemura', 'Vandercook', 'Yott', 'Zaher', 'Autio', 'Barnhard', 'Brys', 'Chisenhall', 'Deiters', 'Fetsko', 'Finzel', 'Gangwer', 'Grygiel', 'Heidelberger', 'Kommer', 'Latchford', 'Liszka', 'Mcconaha', 'Miazga', 'Nettesheim', 'Oelschlager', 'Rafuse', 'Reichow', 'Santosuosso', 'Sebastiani', 'Serratore', 'Spenner', 'Steffenson', 'Strehl', 'Tropeano', 'Vanstraten', 'Vegh', 'Virrueta', 'Wilhide', 'Prey', 'Ullmer', 'Ferraz', 'Mazor', 'Vinje', 'Mory', 'Rody', 'Dowen', 'Bord', 'Rajkumar', 'Qadir', 'Turbin', 'Rorex', 'Wilmott', 'Grandpre', 'Bucker', 'Reasonover', 'Holoman', 'Mustapha', 'Warsame', 'Laday', 'Whack', 'Blahut', 'Boxell', 'Britnell', 'Buehl', 'Burri', 'Cesaro', 'Degrand', 'Demetro', 'Fadeley', 'Fischel', 'Florer', 'Givler', 'Gockley', 'Iuliano', 'Koral', 'Kotlarz', 'Kraai', 'Kvamme', 'Latchaw', 'Lopeman', 'Manocchio', 'Martinezgarcia', 'Minehart', 'Narasimhan', 'Nier', 'Niziolek', 'Oliff', 'Piascik', 'Pitera', 'Pronovost', 'Roseboom', 'Rosevear', 'Runkles', 'Santmyer', 'Skillin', 'Stamas', 'Storbeck', 'Teicher', 'Titterington', 'Tomkinson', 'Tzeng', 'Vukovic', 'Wescoat', 'Algeo', 'Aronow', 'Balbach', 'Brockbank', 'Caloca', 'Caughlin', 'Devincenzi', 'Doetsch', 'Filby', 'Godar', 'Keeven', 'Marchetta', 'Quiram', 'Rudeen', 'Siemen', 'Suderman', 'Tacke', 'Walby', 'Fram', 'Maccarthy', 'Fana', 'Kimberley', 'Richens', 'Doser', 'Bigford', 'Brazie', 'Haroon', 'Mcginniss', 'Knipfer', 'Seltz', 'Laton', 'Balow', 'Cramp', 'Edger', 'Alonge', 'Beagles', 'Ken', 'Peary', 'Lifsey', 'Acy', 'Lightbourne', 'Antwi', 'Arntzen', 'Bracknell', 'Brewbaker', 'Carville', 'Cinquemani', 'Corales', 'Corgan', 'Craze', 'Dechristopher', 'Eltzroth', 'Fjelstad', 'Forinash', 'Gudenkauf', 'Hapeman', 'Hassing', 'Hurm', 'Jaurigue', 'Kneisel', 'Kulwicki', 'Lookingbill', 'Moist', 'Naderi', 'Nicoli', 'Nicoson', 'Olvey', 'Remaly', 'Stare', 'Steinruck', 'Switala', 'Tada', 'Toves', 'Traber', 'Tuohey', 'Venti', 'Vinal', 'Wahle', 'Yarosh', 'Balinski', 'Bauknecht', 'Bernauer', 'Bink', 'Chudzik', 'Coppess', 'Corrick', 'Gruener', 'Kutter', 'Malkiewicz', 'Marking', 'Mcgrain', 'Melberg', 'Ohmann', 'Pellicane', 'Regehr', 'Schmoldt', 'Schmuhl', 'Starmer', 'Stiens', 'Whilden', 'Yearick', 'Desmith', 'Habiger', 'Papay', 'Study', 'Toot', 'Franzoni', 'Neuhoff', 'Boreman', 'Sayas', 'Hinks', 'Dax', 'Sasnett', 'Hannis', 'Rotan', 'Haze', 'Jennifer', 'Barganier', 'Milson', 'Kinnie', 'Boyde', 'Dyce', 'Cuttino', 'Neals', 'Mccovery', 'Abaya', 'Balz', 'Bezold', 'Breighner', 'Buttacavoli', 'Cattani', 'Detzel', 'Douthat', 'Dunay', 'Eicholtz', 'Eirich', 'Felkner', 'Friedenberg', 'Haskew', 'Henes', 'Jamroz', 'Kelter', 'Kutzer', 'Laughner', 'Livoti', 'Magistro', 'Makinson', 'Manwell', 'Mckimmy', 'Mcwethy', 'Pacholski', 'Pankau', 'Poh', 'Purewal', 'Remedios', 'Ringuette', 'Rocchi', 'Rojero', 'Sabina', 'Schiffner', 'Sellen', 'Setaro', 'Soledad', 'Stoermer', 'Tal', 'Vanwyk', 'Waack', 'Xenos', 'Yoakam', 'Zweber', 'Apachito', 'Belluomini', 'Cancelliere', 'Cervini', 'Davidovich', 'Deguia', 'Doxtator', 'Errera', 'Eshbaugh', 'Mandt', 'Pautler', 'Raczynski', 'Roemmich', 'Rosamilia', 'Shelhamer', 'Vandevoorde', 'Vanengen', 'Vindiola', 'Weyman', 'Dufur', 'Reaver', 'Bugh', 'Starley', 'Macmullen', 'Mataya', 'Bucknell', 'Taitano', 'Coole', 'Huguet', 'Top', 'Rockford', 'Carrithers', 'Garrell', 'Toppins', 'Mayner', 'Dantes', 'Tones', 'Dauphine', 'Shillingford', 'Massiah', 'Angermeier', 'Arrizon', 'Azer', 'Badami', 'Beeck', 'Buddenhagen', 'Cheyney', 'Danielski', 'Delgiorno', 'Enslin', 'Erber', 'Fluegge', 'Fresco', 'Frishman', 'Geigle', 'Gervase', 'Giangregorio', 'Glauber', 'Hedding', 'Janota', 'Labore', 'Ladley', 'Levee', 'Lipuma', 'Lomanto', 'Magos', 'Mangen', 'Miltner', 'Mitschke', 'Pingley', 'Puertas', 'Schwed', 'Seminario', 'Sinsel', 'Sliney', 'Spielmann', 'Standage', 'Waas', 'Cooprider', 'Delguercio', 'Dockham', 'Dohse', 'Doubrava', 'Emerine', 'Frazzini', 'Godown', 'Heidbreder', 'Ladow', 'Lariccia', 'Molzahn', 'Opiela', 'Ordorica', 'Otterness', 'Owczarzak', 'Rafalski', 'Smigel', 'Urbas', 'Andon', 'Kota', 'Ruzzo', 'Pheasant', 'Proch', 'Sullinger', 'Ezra', 'Portes', 'Mynhier', 'Depree', 'Slight', 'Selley', 'Daughety', 'Shamel', 'Glasby', 'Casher', 'Brisby', 'Whittley', 'Brye', 'Mackins', 'Allam', 'Berwanger', 'Borgmeyer', 'Brumlow', 'Cashmore', 'Clementz', 'Coopman', 'Corti', 'Danzer', 'Deater', 'Delprado', 'Dibuono', 'Dwan', 'Edling', 'Ekins', 'Feighner', 'Galica', 'Gasparro', 'Geisert', 'Gilvin', 'Glotzbach', 'Goostree', 'Hollenkamp', 'Hronek', 'Kamins', 'Khun', 'Klimowicz', 'Langella', 'Letz', 'Lindh', 'Lycan', 'Magouirk', 'Mcbryar', 'Milonas', 'Patalano', 'Petrides', 'Plocher', 'Signer', 'Sinagra', 'Taibi', 'Thissen', 'Thueson', 'Tietje', 'Trebilcock', 'Zelek', 'Alavez', 'Beyersdorf', 'Ferraiolo', 'Flodin', 'Fulwiler', 'Gieselman', 'Heisinger', 'Hutmacher', 'Laraia', 'Lempke', 'Marchiano', 'Mendia', 'Milberger', 'Murri', 'Willhelm', 'Yannone', 'Diss', 'Golab', 'Meuth', 'Strebe', 'Berenguer', 'Cunard', 'Girvan', 'Pacer', 'Nate', 'Weare', 'Dile', 'Donate', 'Pamer', 'Charlet', 'Roades', 'Krah', 'Merton', 'Debrito', 'Montel', 'Guimont', 'Caire', 'Olley', 'Ausborn', 'Ramdass', 'Stores', 'Hush', 'Watler', 'Robotham', 'Stanislaus', 'Bellevue', 'Almeter', 'Bartold', 'Bathgate', 'Bollier', 'Boundy', 'Bushart', 'Buzek', 'Cauthon', 'Daudelin', 'Delguidice', 'Depaolis', 'Dysert', 'Forsee', 'Goglia', 'Gruenhagen', 'Guilfoil', 'Guldin', 'Gurnee', 'Henzel', 'Jurney', 'Kable', 'Korenek', 'Kussman', 'Liese', 'Mauss', 'Mexicano', 'Morini', 'Oathout', 'Paragas', 'Phommachanh', 'Pixton', 'Pucciarelli', 'Rabine', 'Ramlow', 'Ravert', 'Redhouse', 'Renault', 'Rybinski', 'Sahlin', 'Scherger', 'Schoeffler', 'Smolinsky', 'Stadnik', 'Stallsmith', 'Timoney', 'Whiteeagle', 'Woodsmall', 'Zinter', 'Bargmann', 'Basich', 'Bossio', 'Coutant', 'Curcuru', 'Duitsman', 'Hunkele', 'Kingry', 'Kotek', 'Mancusi', 'Orama', 'Paszek', 'Schrodt', 'Schuknecht', 'Torsiello', 'Troise', 'Wernimont', 'Wipperfurth', 'Wissner', 'Zahradnik', 'Deasis', 'Pac', 'Vowles', 'Montesi', 'Carie', 'Name', 'Broy', 'Hillson', 'Exton', 'Skerritt', 'Ude', 'Allston', 'Cliatt', 'Chevis', 'Poitier', 'Barrasso', 'Bartnicki', 'Broski', 'Cobleigh', 'Crickenberger', 'Cruces', 'Cumba', 'Diodato', 'Dipietrantonio', 'Eyerly', 'Fedler', 'Fetting', 'Francavilla', 'Frein', 'Gasparyan', 'Gingold', 'Gunnarson', 'Houy', 'Huelsmann', 'Jeppsen', 'Labreck', 'Lefton', 'Maenza', 'Mauritz', 'Mingione', 'Mullany', 'Mussell', 'Muston', 'Paraiso', 'Peelman', 'Penuel', 'Piccola', 'Punt', 'Ramella', 'Rauser', 'Reas', 'Reino', 'Schlack', 'Sebastiano', 'Sgambati', 'Shackett', 'Szpak', 'Thalacker', 'Theissen', 'Tutko', 'Astarita', 'Blazejewski', 'Dejaynes', 'Djordjevic', 'Eckenroth', 'Estala', 'Giacomo', 'Glaub', 'Golubski', 'Guerreiro', 'Housholder', 'Kashuba', 'Klute', 'Lennartz', 'Messamore', 'Rovito', 'Schreurs', 'Starcevich', 'Starkel', 'Szczerba', 'Thomassen', 'Varkey', 'Yorio', 'Guba', 'Unzicker', 'Howry', 'Bido', 'Farella', 'Frane', 'Werry', 'Cornia', 'Postal', 'Humphres', 'Ran', 'Macnair', 'Duston', 'Aveni', 'Mcconn', 'Sistare', 'Wadell', 'Naraine', 'Mubarak', 'Lonzo', 'Shyne', 'Tilmon', 'Symonette', 'Shinholster', 'Oree', 'Ogarro', 'Quashie', 'Almario', 'Antonsen', 'Armetta', 'Avetisyan', 'Bania', 'Barricklow', 'Bloemker', 'Cannavo', 'Dolliver', 'Espenshade', 'Falor', 'Fukuhara', 'Gemme', 'Goldfinger', 'Gonya', 'Hamamoto', 'Hindi', 'Hiraldo', 'Holquin', 'Janco', 'Janow', 'Lemming', 'Macchio', 'Mago', 'Mavity', 'Mcnamer', 'Mushrush', 'Niskanen', 'Ohms', 'Pawluk', 'Popple', 'Poser', 'Schiavi', 'Stram', 'Streight', 'Stueck', 'Vansandt', 'Vivona', 'Vongphakdy', 'Zalar', 'Zipper', 'Altic', 'Billmeyer', 'Boghosian', 'Bohlke', 'Cisewski', 'Gabrielsen', 'Gianotti', 'Heffler', 'Holian', 'Kannenberg', 'Lenius', 'Manuelito', 'Mugavero', 'Reinier', 'Rekowski', 'Sadlier', 'Scialdone', 'Stromquist', 'Vittetoe', 'Vorwald', 'Widrig', 'Audi', 'Peral', 'Devery', 'Gato', 'Sower', 'Vanes', 'Bonnes', 'Hense', 'Counsell', 'Frankie', 'Colford', 'Wanser', 'Mickels', 'Briddell', 'Washinton', 'Antilla', 'Baxendale', 'Beining', 'Belveal', 'Boedecker', 'Bottenfield', 'Bufano', 'Castellana', 'Chaikin', 'Cherne', 'Costilow', 'Dzialo', 'Goeken', 'Gombert', 'Hammerman', 'Hansman', 'Hartling', 'Kalani', 'Klich', 'Kolodziejski', 'Kramar', 'Lapinsky', 'Latterell', 'Lipsitz', 'Loma', 'Lukenbill', 'Marxen', 'Metallo', 'Molner', 'Niquette', 'Ostrand', 'Pelster', 'Previti', 'Rennaker', 'Roering', 'Roode', 'Saltos', 'Sangiovanni', 'Schiraldi', 'Schlafer', 'Schwering', 'Seedorf', 'Sklenar', 'Spinello', 'Steinhorst', 'Urueta', 'Vonstein', 'Bonczek', 'Casalino', 'Chiaro', 'Doffing', 'Downham', 'Gillotti', 'Hearl', 'Karges', 'Kunesh', 'Langeland', 'Maertz', 'Mattinson', 'Mignano', 'Pasquinelli', 'Petracca', 'Pherigo', 'Pikus', 'Reichmuth', 'Schwegman', 'Schwerdt', 'Seelman', 'Winquist', 'Wyka', 'Yahr', 'Bunkers', 'Delnegro', 'Norder', 'Manas', 'Polites', 'Grape', 'Jares', 'Surges', 'Asa', 'Copeman', 'Askar', 'Goman', 'Whitmyer', 'Cohran', 'Imbert', 'Beaner', 'Hugger', 'Petion', 'Lauture', 'Andringa', 'Athanas', 'Butrick', 'Caronna', 'Dedominicis', 'Eligio', 'Fasick', 'Hilinski', 'Hinely', 'Idler', 'Janosko', 'Kempner', 'Klosinski', 'Lapeyrouse', 'Lindroth', 'Marcon', 'Meding', 'Peppin', 'Quizon', 'Rectenwald', 'Roessner', 'Roets', 'Schonberger', 'Szostek', 'Wassink', 'Whan', 'Yeakle', 'Alguire', 'Bielenberg', 'Bisaillon', 'Bonenberger', 'Centola', 'Colaizzi', 'Deroos', 'Eberlin', 'Ehrig', 'Ferenc', 'Freiermuth', 'Fruchter', 'Garnto', 'Huxford', 'Knous', 'Luttman', 'Mulry', 'Schirm', 'Stankovic', 'Authier', 'Derise', 'Doo', 'Kessen', 'Maline', 'Porada', 'Vasconez', 'Haseman', 'Tonner', 'Woodroof', 'Bedrossian', 'Cranmore', 'Dodaro', 'Hommes', 'Harmony', 'Peno', 'Mccommon', 'Colver', 'Olinde', 'Oba', 'Colone', 'Warbington', 'Monie', 'Whitmill', 'Moxey', 'Canion', 'Mcclenney', 'Hallmon', 'Austill', 'Berni', 'Boehning', 'Bueso', 'Cefalo', 'Conneely', 'Demicco', 'Dieppa', 'Duris', 'Durnil', 'Erxleben', 'Hashimi', 'Hedquist', 'Koc', 'Lamattina', 'Lassman', 'Ligman', 'Lukins', 'Mackler', 'Manolis', 'Mou', 'Oblak', 'Omahoney', 'Paolo', 'Pollok', 'Priess', 'Reeh', 'Rempfer', 'Rickerd', 'Schoettle', 'Serritella', 'Steedman', 'Suss', 'Tanimoto', 'Thaden', 'Thelin', 'Vanwingerden', 'Wacha', 'Weldin', 'Youkhana', 'Bazzano', 'Behring', 'Caliri', 'Cocchi', 'Croissant', 'Dibbern', 'Figiel', 'Flygare', 'Grieshop', 'Iten', 'Kaupp', 'Linnane', 'Plybon', 'Rappleye', 'Romanik', 'Saefong', 'Schetter', 'Schryer', 'Siwik', 'Snitker', 'Tomasic', 'Wavra', 'Auen', 'Thone', 'Marso', 'Shadid', 'Cake', 'Louvier', 'Macia', 'Areola', 'Kardell', 'Strome', 'Coogle', 'Delis', 'Pistorius', 'Raybourn', 'Sula', 'Math', 'Sanda', 'Renaldo', 'Pat', 'Florance', 'Brank', 'Alice', 'Rosebrough', 'Quiett', 'Henigan', 'Mcclees', 'Dase', 'Bagot', 'Kings', 'Lanehart', 'Barbary', 'Stitts', 'Aurora', 'Baldoni', 'Barkalow', 'Bohnet', 'Bosshart', 'Decapua', 'Denbo', 'Deneault', 'Dinse', 'Dul', 'Estle', 'Filipski', 'Fishell', 'Fluckiger', 'Glassberg', 'Janick', 'Juda', 'Kibbee', 'Kreisler', 'Lawther', 'Levangie', 'Lichtenwalner', 'Lucking', 'Meiner', 'Mileham', 'Milz', 'Reposa', 'Rinehimer', 'Rupley', 'Sandez', 'Schinke', 'Sharpnack', 'Sineath', 'Tax', 'Thumma', 'Urda', 'Widdison', 'Bergdoll', 'Bruhl', 'Chesmore', 'Delfavero', 'Ferderer', 'Haueter', 'Hirshberg', 'Hollobaugh', 'Lalama', 'Mckeag', 'Mehlhoff', 'Mirchandani', 'Orwick', 'Puskarich', 'Schlotzhauer', 'Stoiber', 'Swetz', 'Basara', 'Magaw', 'Amble', 'Hawe', 'Toren', 'Parilla', 'Gowell', 'Selkirk', 'Edris', 'Ariel', 'Kihara', 'Dunkerson', 'Halk', 'Mooty', 'Tippen', 'Fullenwider', 'Herford', 'Salton', 'Feider', 'Buckhannon', 'Mckneely', 'Milon', 'Whiters', 'Barasch', 'Baria', 'Basques', 'Beavin', 'Borre', 'Branz', 'Broers', 'Conca', 'Cortopassi', 'Courchesne', 'Crisanti', 'Cumpian', 'Dagan', 'Dekay', 'Demartin', 'Dewaard', 'Dowland', 'Duffell', 'Ebersol', 'Faiola', 'Frontz', 'Fryling', 'Garczynski', 'Hanway', 'Huettner', 'Janovsky', 'Johndrow', 'Kahana', 'Kaniewski', 'Kulish', 'Lich', 'Lincks', 'Loppnow', 'Macnab', 'Mcconaughy', 'Melroy', 'Noviello', 'Orn', 'Pacas', 'Peppel', 'Polidori', 'Radi', 'Riesgo', 'Romanoski', 'Sagrero', 'Schirripa', 'Spack', 'Sternhagen', 'Tamburri', 'Traczyk', 'Uballe', 'Vandruff', 'Voght', 'Weant', 'Weinel', 'Angerman', 'Boultinghouse', 'Dolinar', 'Dripps', 'Dubow', 'Ehrhard', 'Janvrin', 'Lazear', 'Liddiard', 'Madayag', 'Mirkin', 'Monticello', 'Mulka', 'Oliger', 'Pierceall', 'Pittner', 'Polkowski', 'Prindiville', 'Rasnic', 'Tellefsen', 'Uffelman', 'Vandenbergh', 'Weisenbach', 'Wiedmeyer', 'Wintle', 'Wisz', 'Yorba', 'Holtmeyer', 'Tabet', 'Laham', 'Barsoum', 'Henner', 'Idle', 'Shaft', 'Rennels', 'Swarm', 'Forgie', 'Khaled', 'Avon', 'Hewey', 'Grober', 'Pipe', 'Macfadden', 'Keath', 'Fergason', 'Polland', 'Brownley', 'Haslip', 'Crocket', 'Tines', 'Juniel', 'Opara', 'Bethley', 'Ambuehl', 'Bagheri', 'Baquera', 'Bertoli', 'Bisek', 'Borroto', 'Botten', 'Bovenzi', 'Bruntz', 'Buehring', 'Canche', 'Cicco', 'Dambach', 'Delellis', 'Deniston', 'Dirico', 'Feagle', 'Frayne', 'Haagenson', 'Janicke', 'Kashyap', 'Kastel', 'Kruck', 'Langi', 'Lapka', 'Marschner', 'Megia', 'Nesta', 'Nevala', 'Oblinger', 'Picchi', 'Rodeffer', 'Salkin', 'Scavuzzo', 'Sladky', 'Soyars', 'Suchil', 'Thielbar', 'Timoteo', 'Vanhise', 'Varden', 'Waldoch', 'Watling', 'Werk', 'Becvar', 'Betteridge', 'Bolliger', 'Bonifield', 'Buchberger', 'Caprara', 'Castrogiovanni', 'Fallaw', 'Geeting', 'Hiegel', 'Hulgan', 'Kokesh', 'Lanting', 'Mcphetridge', 'Nuxoll', 'Soun', 'Strothman', 'Triska', 'Vensel', 'Wesolek', 'Wixted', 'Wolgemuth', 'Yedinak', 'Anthis', 'Manfred', 'Agans', 'Lafoe', 'Mcginnes', 'Folwell', 'Galvao', 'Carmo', 'Valin', 'Woon', 'Degregory', 'Evangelist', 'Coast', 'Strater', 'Decou', 'Pears', 'Nellums', 'Kynard', 'Boursiquot', 'Ruffins', 'Akhavan', 'Baloga', 'Barany', 'Buche', 'Davoli', 'Fennewald', 'Figler', 'Frede', 'Gannett', 'Ghannam', 'Handlon', 'Herridge', 'Jakel', 'Kamphuis', 'Kattan', 'Kemplin', 'Klecka', 'Korver', 'Kozakiewicz', 'Linenberger', 'Lofaso', 'Lorman', 'Lueder', 'Mcconahay', 'Mcternan', 'Mench', 'Norenberg', 'Oro', 'Ostenson', 'Pant', 'Peardon', 'Pertuit', 'Ritzert', 'Salvetti', 'Sandner', 'Sheek', 'Sniegowski', 'Sorbo', 'Sperbeck', 'Sump', 'Supinski', 'Sweetin', 'Toenjes', 'Velotta', 'Venier', 'Veracruz', 'Wender', 'Yamagata', 'Arostegui', 'Balestra', 'Blumstein', 'Carras', 'Grauberger', 'Howdeshell', 'Murayama', 'Nippert', 'Notch', 'Reisert', 'Sebren', 'Tetzloff', 'Venneman', 'Douds', 'Lineman', 'Powles', 'Huet', 'Matto', 'Roes', 'Dillin', 'Lagan', 'Bakes', 'Yann', 'Canterberry', 'Milum', 'Hinderman', 'Linzey', 'Ballen', 'Ventress', 'Prysock', 'Bangle', 'Blinder', 'Bugaj', 'Carlisi', 'Dimario', 'Dzikowski', 'Gaetz', 'Galves', 'Ghazal', 'Golebiewski', 'Hadsall', 'Hogberg', 'Krammer', 'Kreisher', 'Lamia', 'Luhmann', 'Lupa', 'Michelotti', 'Nesci', 'Paape', 'Posthumus', 'Reth', 'Sassman', 'Schlechter', 'Schlie', 'Schumacker', 'Seliger', 'Shanholtzer', 'Strojny', 'Taglieri', 'Tibbles', 'Tregoning', 'Valine', 'Zeiset', 'Antu', 'Bierwirth', 'Birenbaum', 'Boeder', 'Dobkins', 'Fenoglio', 'Jentsch', 'Marcinkiewicz', 'Mruk', 'Muhlbauer', 'Namba', 'Oettinger', 'Rigor', 'Rothweiler', 'Schmader', 'Schork', 'Vandevoort', 'Brenny', 'Neels', 'Fodge', 'Que', 'Dalpe', 'Guerard', 'Lammey', 'Alfredo', 'Corrin', 'Quarry', 'Reise', 'Derrow', 'Worrel', 'Tennent', 'Cassis', 'Winson', 'Cornet', 'Garlin', 'Saucer', 'Ursery', 'Saffo', 'Battee', 'Ackerley', 'Ackland', 'Allmendinger', 'Altamura', 'Anastas', 'Artola', 'Baldassari', 'Bayron', 'Bouwkamp', 'Buonopane', 'Chronis', 'Coffaro', 'Dech', 'Delfierro', 'Depaulo', 'Digges', 'Dowda', 'Drab', 'Feijoo', 'Formato', 'Friedli', 'Hanahan', 'Hegna', 'Igarashi', 'Kamai', 'Kory', 'Kuzel', 'Lewkowicz', 'Lumbra', 'Mccreadie', 'Meisch', 'Montoro', 'Pamintuan', 'Petrow', 'Pulcini', 'Shewell', 'Spitznagel', 'Swedlund', 'Terhorst', 'Wilberg', 'Willwerth', 'Affinito', 'Baune', 'Beichner', 'Boutell', 'Challender', 'Ellestad', 'Gomm', 'Hochstatter', 'Jasko', 'Kielar', 'Kimmerle', 'Kirshenbaum', 'Kotila', 'Lecker', 'Manross', 'Mcnevin', 'Neuburger', 'Verderosa', 'Wiltsey', 'Caminero', 'Gianfrancesco', 'Shiverdecker', 'Amman', 'Flavell', 'Oconor', 'Shure', 'Hanagan', 'Bokor', 'Mashaw', 'Ground', 'Brittenham', 'Pinera', 'Smaltz', 'Hold', 'Gallamore', 'Delon', 'Hearing', 'Rynes', 'Cocklin', 'Cassie', 'Calligan', 'Josue', 'Congo', 'Tennell', 'Blyther', 'Azarian', 'Bauernfeind', 'Beeghly', 'Berget', 'Brayfield', 'Cerasoli', 'Dedecker', 'Gloeckner', 'Herriges', 'Hoganson', 'Ivancic', 'Jakeway', 'Kayne', 'Kitko', 'Kohlbeck', 'Krabbenhoft', 'Kumari', 'Lauri', 'Leiber', 'Minke', 'Montecino', 'Moutray', 'Munshi', 'Ohlin', 'Portocarrero', 'Rados', 'Roedl', 'Rossing', 'Schake', 'Simonin', 'Staffa', 'Stroschein', 'Titman', 'Treder', 'Vonada', 'Xenakis', 'Aulds', 'Benedick', 'Boulais', 'Butikofer', 'Butorac', 'Contento', 'Goetting', 'Goldammer', 'Hopke', 'Koppes', 'Phetteplace', 'Roehrs', 'Schul', 'Slabach', 'Steinmiller', 'Sucharski', 'Vorwerk', 'Wahlert', 'Wheatcraft', 'Abellera', 'Jutte', 'Baumgarner', 'Tijerino', 'Awadallah', 'Horen', 'Lina', 'Stanbrough', 'College', 'Jarry', 'Keas', 'Mordan', 'Ramnauth', 'Rena', 'Wa', 'Petters', 'Ramnath', 'Hellams', 'Mamon', 'Cheese', 'Meggett', 'Anttila', 'Beilman', 'Binsfeld', 'Brining', 'Brubeck', 'Carcione', 'Chandran', 'Chaudhuri', 'Cogliano', 'Dimaano', 'Dols', 'Doughten', 'Ehrenfeld', 'Elena', 'Fausnaugh', 'Fetz', 'Fogelson', 'Fraleigh', 'Gaza', 'Giesey', 'Gockel', 'Gougeon', 'Granito', 'Grassia', 'Hauserman', 'Idrovo', 'Iwan', 'Janning', 'Kaffenberger', 'Kichline', 'Kimoto', 'Kolodny', 'Kortum', 'Lafevers', 'Lodi', 'Longton', 'Ludke', 'Manganelli', 'Mccuan', 'Merryfield', 'Mezquita', 'Morandi', 'Neibauer', 'Oran', 'Ozaeta', 'Pacha', 'Palese', 'Perala', 'Pisarcik', 'Pobanz', 'Pommer', 'Pontrelli', 'Prabhakar', 'Rehmann', 'Scheunemann', 'Severini', 'Skalla', 'Srinivas', 'Stadtmiller', 'Trentman', 'Trinka', 'Tutterow', 'Vari', 'Wence', 'Zeff', 'Anagnos', 'Arvayo', 'Bihl', 'Darbyshire', 'Deeg', 'Domagalski', 'Estenson', 'Finkenbinder', 'Gaboriault', 'Kastens', 'Lacek', 'Merkin', 'Mersman', 'Nicolaus', 'Offerdahl', 'Pallett', 'Platten', 'Quesnell', 'Skene', 'Sondag', 'Wolfrom', 'Mineer', 'Sor', 'Canard', 'Mcmeen', 'Tur', 'Giner', 'Mackrell', 'Alic', 'Sampath', 'Baby', 'Beales', 'Kadri', 'Minot', 'Bienvenue', 'Millirons', 'Woodstock', 'Landing', 'Limehouse', 'Andonian', 'Armentor', 'Asai', 'Cutaia', 'Darji', 'Delsanto', 'Deutch', 'Droge', 'Emme', 'Flenner', 'Gaida', 'Gladd', 'Guettler', 'Guggisberg', 'Guier', 'Habenicht', 'Heininger', 'Helfman', 'Hiscox', 'Holtorf', 'Hovious', 'Juul', 'Lacock', 'Lepisto', 'Malanowski', 'Marineau', 'Matza', 'Meffert', 'Nuon', 'Oneto', 'Padmanabhan', 'Pantuso', 'Pesci', 'Rosenbluth', 'Rubano', 'Sedlar', 'Sferrazza', 'Sifuentez', 'Simione', 'Torossian', 'Vaux', 'Weilbacher', 'Wiatrek', 'Brzoska', 'Caltabiano', 'Csaszar', 'Eyerman', 'Geissinger', 'Gioffre', 'Grilliot', 'Grotz', 'Harrower', 'Jaroszewski', 'Jokerst', 'Kamali', 'Kampmann', 'Klemz', 'Koike', 'Lista', 'Mcconkie', 'Mencia', 'Missler', 'Olshefski', 'Omdahl', 'Penunuri', 'Scheckel', 'Schreiter', 'Swackhammer', 'Taflinger', 'Tegethoff', 'Ummel', 'Wetsel', 'Wissmann', 'Porr', 'Ramser', 'Russett', 'Clucas', 'Matlin', 'Noblet', 'Boyan', 'Koman', 'Lope', 'Deman', 'Latendresse', 'Bound', 'Rijos', 'Bouillon', 'Crunkleton', 'Jayson', 'Anne', 'Staude', 'Sturn', 'Burdell', 'Arther', 'Yett', 'Woolcock', 'Clemon', 'Saintjean', 'Sainvil', 'Coverson', 'Barroga', 'Benedicto', 'Borin', 'Budrow', 'Cuddihy', 'Forness', 'Gohman', 'Hepker', 'Hilscher', 'Holien', 'Holstad', 'Hopfer', 'Hulburt', 'Kalter', 'Kuehnle', 'Lachica', 'Macioce', 'Massimo', 'Matsubara', 'Meaker', 'Mehmedovic', 'Minckler', 'Miralles', 'Mostek', 'Oshita', 'Parthasarathy', 'Roszak', 'Rottenberg', 'Rydman', 'Shankman', 'Sprong', 'Stenerson', 'Strubel', 'Tavano', 'Thornberg', 'Trumpower', 'Whittinghill', 'Altenhofen', 'Bartolucci', 'Debski', 'Dekoning', 'Dottavio', 'Emminger', 'Hodkinson', 'Hurtubise', 'Lauridsen', 'Leinberger', 'Luskin', 'Pask', 'Rehfeld', 'Spagna', 'Szumski', 'Szymborski', 'Teem', 'Tritschler', 'Tschantz', 'Tsutsui', 'Vanecek', 'Haddaway', 'Colombe', 'Mayol', 'Shivley', 'Maturin', 'Babe', 'Bovey', 'Bathe', 'Belliard', 'Loner', 'Arrow', 'Billa', 'Mcneish', 'Kinton', 'Scarber', 'Donson', 'Atherley', 'Abdulaziz', 'Age', 'Carreker', 'Tory', 'Leduff', 'Wattley', 'Altergott', 'Belitz', 'Bidinger', 'Blauch', 'Cariker', 'Condren', 'Curiale', 'Dronet', 'Elstad', 'Esquerra', 'Fread', 'Gilb', 'Goga', 'Gonyo', 'Grudzien', 'Hino', 'Ishler', 'Jacober', 'Kilty', 'Kuhrt', 'Lairmore', 'Lamba', 'Lorek', 'Lucich', 'Marcou', 'Mcgath', 'Menze', 'Mindel', 'Nabb', 'Ottosen', 'Pann', 'Ratkowski', 'Saurer', 'Sedore', 'Shonka', 'Soberano', 'Sossamon', 'Stdennis', 'Stillinger', 'Tager', 'Tersigni', 'Tissue', 'Trampe', 'Twite', 'Whitling', 'Wiebusch', 'Abundez', 'Bisping', 'Candella', 'Dahill', 'Groebner', 'Gulbrandsen', 'Hasenauer', 'Heesch', 'Hipwell', 'Kamrowski', 'Keyworth', 'Kleinschmit', 'Legorreta', 'Minium', 'Mixter', 'Neiswonger', 'Purk', 'Rinkenberger', 'Rosenkrans', 'Rozenberg', 'Simenson', 'Soltes', 'Storino', 'Viereck', 'Schaafsma', 'Craigie', 'Amorin', 'Latner', 'Bowmer', 'Nasby', 'Bada', 'Rami', 'Mcglashan', 'Reede', 'Police', 'Cobey', 'Dahir', 'Dirden', 'Destine', 'Akkerman', 'Azzopardi', 'Blankenhorn', 'Bolio', 'Brandhorst', 'Buchter', 'Canul', 'Cocozza', 'Collantes', 'Cronic', 'Cullifer', 'Delpizzo', 'Demoranville', 'Dolder', 'Dvorsky', 'Eggett', 'Elgersma', 'Episcopo', 'Esses', 'Fehlman', 'Gansen', 'Garciamartinez', 'Goldwater', 'Gushue', 'Hittner', 'Igel', 'Jupin', 'Kostoff', 'Kruschke', 'Kuechler', 'Labs', 'Lacerte', 'Lagle', 'Leischner', 'Linders', 'Marulanda', 'Meindl', 'Melman', 'Menden', 'Orbach', 'Patak', 'Patras', 'Petroni', 'Rabenold', 'Rapisarda', 'Rodenburg', 'Roelle', 'Schar', 'Scherbarth', 'Simar', 'Thoen', 'Trana', 'Tuch', 'Turko', 'Wamser', 'Weinfeld', 'Wirz', 'Zatorski', 'Zbinden', 'Aksamit', 'Asebedo', 'Biello', 'Bouchey', 'Callejo', 'Espanol', 'Flathers', 'Kunka', 'Liaw', 'Mckowen', 'Mitrano', 'Needler', 'Och', 'Paolella', 'Patricelli', 'Recine', 'Rengel', 'Spinler', 'Wagenaar', 'Winnicki', 'Eichert', 'Dabb', 'Imrie', 'Antoni', 'Lardner', 'Maund', 'Schou', 'Brittin', 'Anthon', 'Was', 'Nevis', 'Delamar', 'Mcnorton', 'Tankard', 'Boardley', 'Garcon', 'Wimes', 'Antell', 'Belmarez', 'Boff', 'Boughan', 'Cando', 'Carrender', 'Carrieri', 'Charnley', 'Cittadino', 'Cwynar', 'Deupree', 'Doepke', 'Fasone', 'Fauteux', 'Foody', 'Fornal', 'Fust', 'Gasner', 'Gloe', 'Gorter', 'Grumbine', 'Hancher', 'Hapke', 'Heckendorn', 'Heinlen', 'Hilgeman', 'Kahre', 'Kakos', 'Kops', 'Lahn', 'Leiferman', 'Lothamer', 'Mallis', 'Napierkowski', 'Orbin', 'Panno', 'Piacente', 'Posas', 'Ragasa', 'Sonora', 'Stupka', 'Tio', 'Valido', 'Weyrick', 'Argall', 'Arrighi', 'Bohlken', 'Desrocher', 'Distad', 'Erkkila', 'Gherardi', 'Goughnour', 'Koltz', 'Koperski', 'Lafalce', 'Lucken', 'Meleski', 'Mortellaro', 'Nagorski', 'Pedrotti', 'Pruyn', 'Revard', 'Saffran', 'Schnoebelen', 'Sermersheim', 'Skroch', 'Vandervliet', 'Alwood', 'Bosso', 'Hor', 'Licerio', 'Septer', 'Labo', 'Lessa', 'Ooley', 'Gorgas', 'Medal', 'Coull', 'Creely', 'Bolland', 'Ishaq', 'Legore', 'Alicia', 'Fillingame', 'Levers', 'Flight', 'Woodrick', 'Berrie', 'Buckels', 'Pigue', 'Crosse', 'Speakes', 'Wynes', 'Mussa', 'Highbaugh', 'Venning', 'Dupas', 'Mccastle', 'Andreoni', 'Bakula', 'Besemer', 'Blier', 'Braaksma', 'Brocco', 'Cajas', 'Campano', 'Crapser', 'Dentinger', 'Deziel', 'Dragos', 'Ekblad', 'Gargis', 'Gilberto', 'Guadron', 'Hollern', 'Leibensperger', 'Lindaman', 'Lumadue', 'Mault', 'Mieses', 'Nanninga', 'Nudd', 'Ouch', 'Ramin', 'Reggio', 'Ruttan', 'Saccomanno', 'Scheaffer', 'Sohm', 'Spaniol', 'Stenner', 'Strieter', 'Takashima', 'Vaid', 'Venzke', 'Wallwork', 'Zaffuto', 'Zaucha', 'Zemel', 'Zinni', 'Alltop', 'Ciolek', 'Empie', 'Flitton', 'Gullikson', 'Hassebrock', 'Kanitz', 'Kirschenmann', 'Krivanek', 'Loseke', 'Mckercher', 'Melching', 'Nham', 'Ormerod', 'Randlett', 'Reifel', 'Sawada', 'Sofranko', 'Stoia', 'Umeda', 'Eagon', 'Hucker', 'Kenniston', 'Salus', 'Ayyad', 'Camey', 'Dacy', 'Joa', 'Peerson', 'Rossy', 'Aure', 'Keetch', 'Sprigg', 'Southgate', 'Parden', 'Andris', 'Bossman', 'Blondell', 'Carmickle', 'Pelly', 'Mceachron', 'Marry', 'Burel', 'Shark', 'Flash', 'Rickenbacker', 'Foots', 'Sillah', 'Almgren', 'Awtrey', 'Berganza', 'Boehne', 'Bralley', 'Brosnahan', 'Caddick', 'Chandonnet', 'Cullimore', 'Darroch', 'Eimers', 'Flam', 'Howerter', 'Jerzak', 'Kabler', 'Kirkes', 'Kopper', 'Krakow', 'Linskey', 'Lizzi', 'Luria', 'Marcrum', 'Mathy', 'Matulich', 'Miskin', 'Moghadam', 'Nagarajan', 'Packham', 'Papania', 'Paup', 'Rippeon', 'Rolli', 'Rubey', 'Scherzinger', 'Scrima', 'Sharar', 'Shoberg', 'Stupar', 'Tendler', 'Tobiason', 'Vanvooren', 'Zisa', 'Bindel', 'Flasch', 'Graetz', 'Heintzman', 'Kosanke', 'Longden', 'Mahfouz', 'Mormile', 'Nannini', 'Olaes', 'Panik', 'Putzier', 'Radilla', 'Schaedler', 'Schoepf', 'Sianez', 'Taucher', 'Wiebelhaus', 'Banka', 'Console', 'Derego', 'Vile', 'Colgin', 'Drage', 'Josten', 'Luckadoo', 'Ryen', 'Bako', 'Ow', 'Patient', 'Elmes', 'Mossa', 'Colee', 'Comber', 'Tippy', 'Perrell', 'Axon', 'Rickson', 'Postlewaite', 'Lafargue', 'Guffin', 'Cains', 'Dewindt', 'Cathy', 'Tallie', 'Ausby', 'Alires', 'Baz', 'Bergeman', 'Bodensteiner', 'Borghi', 'Dematos', 'Denzler', 'Dorko', 'Duffett', 'Dykas', 'Emerton', 'Fenger', 'Fosberg', 'Gwinner', 'Kniess', 'Lerew', 'Lohner', 'Lun', 'Maita', 'Mandler', 'Marcoe', 'Nikolov', 'Paschen', 'Paver', 'Prosperi', 'Rackliff', 'Roever', 'Ruberg', 'Ruest', 'Schnick', 'Schuur', 'Sowash', 'Zanca', 'Brecheen', 'Brusky', 'Chauca', 'Debernardi', 'Froio', 'Gadway', 'Karoly', 'Kintzel', 'Kneisley', 'Kruser', 'Lindfors', 'Lwin', 'Oursler', 'Peruski', 'Petteys', 'Rottmann', 'Schroeck', 'Stenglein', 'Vigen', 'Wempe', 'Zehren', 'Wollen', 'Dismore', 'Santalucia', 'Laza', 'Pesnell', 'Litle', 'Markson', 'Piercefield', 'Jerrett', 'Virginia', 'Demonbreun', 'Tugman', 'Ramoutar', 'Bazin', 'Ola', 'Alamin', 'Adebayo', 'Berkland', 'Bernt', 'Briguglio', 'Bulnes', 'Burack', 'Cantoran', 'Giardini', 'Goetzke', 'Graziosi', 'Guberman', 'Kamaka', 'Karvonen', 'Kitz', 'Kopera', 'Krempa', 'Linkenhoker', 'Mascioli', 'Matlick', 'Mcmahill', 'Medaglia', 'Mirarchi', 'Mondry', 'Muhlestein', 'Murty', 'Orender', 'Pesantez', 'Postiglione', 'Reisen', 'Riff', 'Scarantino', 'Seelinger', 'Seher', 'Sharum', 'Sorice', 'Staebler', 'Tanney', 'Tech', 'Tramontano', 'Trude', 'Vasudevan', 'Wareing', 'Westerhold', 'Wohlfarth', 'Achorn', 'Boesel', 'Calabaza', 'Dunkleberger', 'Erck', 'Fanger', 'Felmlee', 'Friebel', 'Gabrys', 'Godsil', 'Goldhammer', 'Gourneau', 'Kaseman', 'Keysor', 'Mccargar', 'Mittag', 'Narum', 'Schoeneck', 'Stenquist', 'Sunderlin', 'Tarazon', 'Tietze', 'Wemmer', 'Witthuhn', 'Durango', 'Simerson', 'Beber', 'Bjorn', 'Neuville', 'Preas', 'Reitter', 'Senf', 'Mcclatchy', 'Sanor', 'Benney', 'Sarrazin', 'Woodliff', 'Bramlet', 'Cullin', 'Wessells', 'Higgens', 'Rout', 'Craigen', 'Ackers', 'Wickliff', 'Hofler', 'Pilgram', 'Mcfayden', 'Dillworth', 'Robenson', 'Mateen', 'Ambrogio', 'Aoun', 'Aranas', 'Balsiger', 'Bonzo', 'Busam', 'Casassa', 'Ciborowski', 'Cotterill', 'Cressler', 'Cristales', 'Crumpacker', 'Daloisio', 'Damasco', 'Depolo', 'Diguglielmo', 'Dominik', 'Esbenshade', 'Fineran', 'Formisano', 'Gandolfi', 'Geidel', 'Gerwitz', 'Grammatico', 'Idleman', 'Iwinski', 'Kerth', 'Lacouture', 'Lafoy', 'Lapid', 'Lardizabal', 'Lembcke', 'Maga', 'Mahrt', 'Maniatis', 'Martinezlopez', 'Martinovich', 'Milham', 'Muscatello', 'Perezperez', 'Quiocho', 'Rickner', 'Sackrider', 'Schwarm', 'Schwebke', 'Scollard', 'Seader', 'Shutters', 'Skare', 'Slothower', 'Steeber', 'Want', 'Cherubini', 'Coslett', 'Degener', 'Dulak', 'Faull', 'Freyman', 'Gatchel', 'Ginzburg', 'Gronberg', 'Landeck', 'Lehenbauer', 'Lubke', 'Mcconaughey', 'Mendonsa', 'Minnehan', 'Palaguachi', 'Peedin', 'Raithel', 'Rezabek', 'Rolfson', 'Schuitema', 'Sjodin', 'Underkoffler', 'Verrilli', 'Yogi', 'Zimpfer', 'Zingaro', 'Butrum', 'Ritson', 'Martinka', 'Cashatt', 'Kearn', 'Sawtell', 'Boyster', 'Broyhill', 'Cockerell', 'Thane', 'Resende', 'Pealer', 'Perrot', 'Everhardt', 'Breach', 'Bry', 'Juma', 'Mclaine', 'Paddy', 'Hennesy', 'Ledee', 'Web', 'Delone', 'Louison', 'Hamiel', 'Tutson', 'Bellingham', 'Brenn', 'Bussen', 'Charrette', 'Denenberg', 'Depascale', 'Derner', 'Dondlinger', 'Favro', 'Frana', 'Goeser', 'Guerrini', 'Hamideh', 'Hetu', 'Hnat', 'Hollerbach', 'Kenagy', 'Kregel', 'Lammi', 'Laubacher', 'Madarang', 'Mangine', 'Marut', 'Mcmahen', 'Memoli', 'Milko', 'Morash', 'Mulvehill', 'Nelles', 'Perfecto', 'Perkes', 'Pesantes', 'Peschke', 'Polyakov', 'Preheim', 'Prust', 'Reha', 'Richardt', 'Rockers', 'Sartwell', 'Schedler', 'Scheler', 'Skop', 'Stefko', 'Tatlock', 'Tiley', 'Waldecker', 'Weinbaum', 'Aguallo', 'Benassi', 'Bezio', 'Bockover', 'Dobesh', 'Encina', 'Eversman', 'Haverfield', 'Heigl', 'Holzhauser', 'Liebenow', 'Mesenbrink', 'Mittendorf', 'Normoyle', 'Pickart', 'Rosselot', 'Shigley', 'Skufca', 'Stroot', 'Walth', 'Wernert', 'Lahood', 'Ragain', 'Stumpe', 'Kolle', 'Minerd', 'Dickeson', 'Koone', 'Stoessel', 'Kington', 'Soe', 'Wailes', 'Monet', 'Mccullars', 'Huguenin', 'Warnell', 'Calip', 'Sandles', 'Fayson', 'Balik', 'Bauermeister', 'Bianculli', 'Bin', 'Bring', 'Busenbark', 'Canevari', 'Crile', 'Dyment', 'Egelhoff', 'Elbe', 'Estudillo', 'Feigel', 'Flammer', 'Folta', 'Ghuman', 'Hefferan', 'Hennick', 'Hosner', 'Kilner', 'Liuzzi', 'Maj', 'Massing', 'Nicolaisen', 'Ohlrich', 'Ozdemir', 'Piccininni', 'Prem', 'Primiano', 'Reek', 'Riling', 'Rohweder', 'Rosasco', 'Sandau', 'Santarsiero', 'Schuhmacher', 'Stenseth', 'Stilts', 'Strohmeier', 'Thorell', 'Torr', 'Vaswani', 'Yono', 'Amadon', 'Ballowe', 'Betke', 'Borgwardt', 'Decelle', 'Dibiasio', 'Fieldhouse', 'Hegyi', 'Heuberger', 'Kreiling', 'Montney', 'Sammut', 'Senseney', 'Takenaka', 'Tramonte', 'Zalesky', 'Zumstein', 'Bents', 'Vandersluis', 'Wieringa', 'Houlton', 'Lippens', 'Maino', 'Keeny', 'Bethards', 'Guillette', 'Lenn', 'Minge', 'Masley', 'Christley', 'Gabrielle', 'Bruington', 'Perren', 'Ander', 'Leeb', 'Callicott', 'Peaster', 'Hardister', 'Daughtridge', 'Mclauchlin', 'Culliver', 'Missouri', 'Aloisi', 'Barua', 'Bezek', 'Broshears', 'Busbin', 'Cajamarca', 'Dellarocco', 'Dezeeuw', 'Ferrelli', 'Fieber', 'Fredin', 'Giovannoni', 'Glasner', 'Grenda', 'Haberl', 'Heimsoth', 'Heinl', 'Hellickson', 'Hernandezlopez', 'Huckeby', 'Jungman', 'Langhans', 'Lingelbach', 'Manera', 'Maneri', 'Marzella', 'Mennen', 'Molesworth', 'Nagano', 'Narula', 'Niner', 'Nordhoff', 'Olazabal', 'Perfect', 'Plonka', 'Pund', 'Reincke', 'Schimek', 'Seegert', 'Summar', 'Tanori', 'Trethewey', 'Wehler', 'Wirthlin', 'Wolaver', 'Zuver', 'Bendure', 'Bither', 'Bungert', 'Chaviano', 'Derhammer', 'Disbro', 'Facchini', 'Hoefle', 'Hoepner', 'Kimmes', 'Korus', 'Manfredonia', 'Neuser', 'Samarin', 'Sanghera', 'Sherburn', 'Shiplett', 'Steckelberg', 'Faist', 'Cardy', 'Colan', 'Goodbar', 'Boro', 'Moden', 'Hardick', 'Esteve', 'Rawling', 'Benet', 'Nabers', 'Atkerson', 'Countess', 'Thwaites', 'Caroline', 'Whisonant', 'Alridge', 'Pamphile', 'Abdelnour', 'Allebach', 'Armenti', 'Baudendistel', 'Biers', 'Bockrath', 'Borgert', 'Bovino', 'Burgamy', 'Cadiente', 'Calabretta', 'Cariveau', 'Christoffel', 'Daigler', 'Dannels', 'Darnold', 'Decock', 'Dominski', 'Fest', 'Forren', 'Freise', 'Galperin', 'Hackbart', 'Holtzer', 'Idell', 'Kapala', 'Kohlenberg', 'Kolton', 'Lemburg', 'Lievanos', 'Maranan', 'Marchitto', 'Masini', 'Mayabb', 'Mccrossen', 'Metrick', 'Molinelli', 'Oehlert', 'Parlee', 'Pizzini', 'Polachek', 'Salmans', 'Selbe', 'Sickman', 'Stegmaier', 'Sulek', 'Thall', 'Tiznado', 'Tonini', 'Trostel', 'Warshawsky', 'Aument', 'Byrer', 'Dechaine', 'Fearnow', 'Gallicchio', 'Gertler', 'Greubel', 'Hironaka', 'Kashner', 'Kleffner', 'Korthals', 'Kundinger', 'Lenger', 'Lingafelter', 'Luczynski', 'Ostermeier', 'Petrasek', 'Righetti', 'Tvedt', 'Weindel', 'Wurtzel', 'Zumbro', 'Wikel', 'Burdi', 'Ozturk', 'Parmele', 'Oteri', 'Alexa', 'Erven', 'Keng', 'Fare', 'Sade', 'Saw', 'Jaquay', 'Pillay', 'Kearsley', 'Kirkby', 'Game', 'Herst', 'Vallie', 'Bayon', 'Whitler', 'Pe', 'Lockerman', 'Cogle', 'Rouzer', 'Curling', 'Mandley', 'Kleckley', 'Buckson', 'Risby', 'Averhart', 'Almendariz', 'Angelopoulos', 'Brallier', 'Decaire', 'Deloria', 'Derham', 'Drudge', 'Eckelberry', 'Ehling', 'Engebretsen', 'Ercole', 'Fiscal', 'Gabino', 'Gelvin', 'Giannetto', 'Godeaux', 'Goshert', 'Hedrich', 'Ioannou', 'Jungbluth', 'Kia', 'Krusemark', 'Lader', 'Lythgoe', 'Malinak', 'Mcinvale', 'Melis', 'Metsker', 'Minasyan', 'Nuhfer', 'Omana', 'Parco', 'Pha', 'Phanthavong', 'Proa', 'Sarli', 'Schirtzinger', 'Schlotter', 'Sharrar', 'Spielberg', 'Stelzner', 'Tschudy', 'Utke', 'Weipert', 'Yera', 'Berkemeier', 'Bothun', 'Dalporto', 'Deschler', 'Dragonetti', 'Hasz', 'Holtzinger', 'Kallal', 'Kesinger', 'Kilfoyle', 'Kobylinski', 'Kramme', 'Kreh', 'Lindseth', 'Plaugher', 'Rehfeldt', 'Repine', 'Roudabush', 'Swoveland', 'Teper', 'Tucek', 'Wadding', 'Wenzlick', 'Ghobrial', 'Golberg', 'Soyka', 'Matura', 'Moras', 'Natter', 'Apps', 'Imran', 'Rossel', 'Harne', 'Les', 'Silla', 'Deblanc', 'Rhinehardt', 'Delaware', 'Alkins', 'Laidley', 'Maree', 'Cassells', 'Abdulrahman', 'Cange', 'Devone', 'Eustache', 'Negash', 'Tanks', 'Sivels', 'Cabbagestalk', 'Ahlin', 'Akard', 'Barbaree', 'Bielat', 'Bressman', 'Capurro', 'Cortazar', 'Dauphinee', 'Dornak', 'Eckl', 'Eisenhuth', 'Fazzini', 'Fraim', 'Glaab', 'Glod', 'Guedea', 'Hearty', 'Hinostroza', 'Honold', 'Jostes', 'Korzeniewski', 'Lobell', 'Lopardo', 'Middlekauff', 'Monfils', 'Oshana', 'Schiappa', 'Schubach', 'Servantez', 'Shaler', 'Siverson', 'Slimp', 'Slovacek', 'Staat', 'Strassman', 'Waffle', 'Wuebker', 'Beigel', 'Berardo', 'Berkery', 'Bloyer', 'Cronkright', 'Cuautle', 'Devenny', 'Ghrist', 'Gipple', 'Gwilliam', 'Hunzeker', 'Ierardi', 'Kathol', 'Kienle', 'Krack', 'Loeper', 'Minchey', 'Pecht', 'Schaberg', 'Schollmeyer', 'Siniscalchi', 'Toback', 'Tramp', 'Vandaele', 'Witzig', 'Wivell', 'Moros', 'Saso', 'Gares', 'Heagle', 'Murrillo', 'Stankey', 'Shamon', 'Avram', 'Achor', 'Ovens', 'Rames', 'Perris', 'Kernes', 'Semmes', 'Thaw', 'Stevison', 'Clemetson', 'Belmar', 'Guster', 'Bascomb', 'Adrien', 'Jeanpaul', 'Alabi', 'Jallow', 'Atamian', 'Basque', 'Bubier', 'Casad', 'Czekaj', 'Dejoy', 'Dulworth', 'Fatula', 'Favale', 'Feutz', 'Freundlich', 'Frid', 'Gagan', 'Gaughran', 'Guderian', 'Hagemeister', 'Haser', 'Leibman', 'Meddings', 'Narlock', 'Offenberger', 'Pesa', 'Poupard', 'Raus', 'Repetti', 'Revello', 'Robarts', 'Rowin', 'Saltarelli', 'Sanghvi', 'Schleyer', 'Silba', 'Steuck', 'Stoffers', 'Tangredi', 'Taussig', 'Tiso', 'Wehmeier', 'Zwiefelhofer', 'Bartelson', 'Brabender', 'Cornfield', 'Davtyan', 'Delnero', 'Frontino', 'Gathman', 'Graessle', 'Hinchcliff', 'Houdeshell', 'Kapler', 'Karabin', 'Kerestes', 'Lemmen', 'Merkt', 'Mitro', 'Nahm', 'Nancarrow', 'Novakowski', 'Parraz', 'Revolorio', 'Schamel', 'Scowden', 'Steever', 'Suastegui', 'Villarin', 'Wuellner', 'Dooly', 'Erno', 'Arbelo', 'Groshek', 'Boliver', 'Gane', 'Bees', 'Dowds', 'Newmann', 'Kewley', 'Stile', 'Lobe', 'Skeet', 'Burgen', 'Mckamie', 'Hubanks', 'Suleman', 'Billey', 'Efferson', 'Mcleary', 'Housen', 'Shambley', 'Fanfan', 'Bacca', 'Battaglini', 'Bonfanti', 'Bongers', 'Butzin', 'Caira', 'Councilman', 'Crounse', 'Dadisman', 'Donais', 'Estabrooks', 'Fornoff', 'Froh', 'Gaige', 'Garofolo', 'Grivas', 'Jacuinde', 'Kalmus', 'Kientz', 'Kostenko', 'Kras', 'Lagoy', 'Larzelere', 'Lizer', 'Maric', 'Mayette', 'Mcfeeters', 'Meadowcroft', 'Newgent', 'Parpart', 'Pauwels', 'Perriello', 'Persichetti', 'Proietti', 'Siefring', 'Simones', 'Taliercio', 'Thilges', 'Thumann', 'Thun', 'Tuomi', 'Uhde', 'Umscheid', 'Uran', 'Velador', 'Veltkamp', 'Waddoups', 'Yeley', 'Bihn', 'Bladow', 'Boeh', 'Chadderdon', 'Ensing', 'Fasbender', 'Folkert', 'Goellner', 'Heitmeyer', 'Iovine', 'Klinke', 'Nessel', 'Perleberg', 'Rajagopal', 'Sackmann', 'Sapio', 'Schickling', 'Schliep', 'Siminski', 'Sirrine', 'Sporn', 'Stockburger', 'Tangonan', 'Tarkowski', 'Tartaglione', 'Traum', 'Vanoverbeke', 'Weirauch', 'Wellendorf', 'Wonnacott', 'Camplin', 'Leth', 'Meltz', 'Cavero', 'Florido', 'Tremont', 'Riviello', 'Piotter', 'Munce', 'Trescott', 'Eben', 'Vaillant', 'Furches', 'Bazen', 'Esse', 'Losier', 'Zahir', 'Lazier', 'Lightell', 'Christal', 'Behe', 'Blayney', 'Buchalter', 'Demarsh', 'Dhondt', 'Diefendorf', 'Dillavou', 'Dombkowski', 'Duchow', 'Fettes', 'Gallaga', 'Gallet', 'Haaf', 'Hartinger', 'Jech', 'Klas', 'Kostal', 'Kubler', 'Leisey', 'Leisinger', 'Marinas', 'Mcpeck', 'Miccio', 'Mikkola', 'Morath', 'Olthoff', 'Pacific', 'Penado', 'Petronio', 'Pirani', 'Pitones', 'Pociask', 'Ratay', 'Riesberg', 'Ruberto', 'Sabet', 'Sabic', 'Simonich', 'Skains', 'Skarzynski', 'Spreeman', 'Steig', 'Struckhoff', 'Trolinger', 'Uliano', 'Vaquerano', 'Zukas', 'Zwahlen', 'Amborn', 'Amspacher', 'Azzaro', 'Bartoletti', 'Berkstresser', 'Buboltz', 'Ekstein', 'Fohl', 'Heinzel', 'Hellmer', 'Kapfer', 'Kurka', 'Mccreless', 'Miyahira', 'Nebergall', 'Orlosky', 'Pajor', 'Quartararo', 'Rahilly', 'Rzasa', 'Sabas', 'Slutz', 'Speros', 'Stumpp', 'Tamburo', 'Tesler', 'Tonkovich', 'Urbieta', 'Vallandingham', 'Youngdahl', 'Juliana', 'Rienstra', 'Prideaux', 'Coval', 'Hausen', 'Seith', 'Ny', 'Bian', 'Gressman', 'Yanick', 'Mannina', 'Nater', 'Gurry', 'Vaile', 'Sortor', 'Woodington', 'Apollo', 'Mozley', 'Patience', 'Hearron', 'Milloy', 'Huntsberry', 'Polidore', 'Ridges', 'Bonton', 'Mercadel', 'Alikhan', 'Antis', 'Bartosiewicz', 'Brems', 'Clopper', 'Colato', 'Collver', 'Daino', 'Degrande', 'Dellis', 'Depner', 'Disantis', 'Dolecki', 'Dollens', 'Eliasen', 'Fasig', 'Favinger', 'Furuta', 'Gharibian', 'Gombar', 'Gordo', 'Gornik', 'Gulas', 'Khoshaba', 'Laurita', 'Liby', 'Linhardt', 'Lookabaugh', 'Lorincz', 'Mautner', 'Mcquigg', 'Meine', 'Melaragno', 'Meroney', 'Mikesh', 'Miu', 'Monasterio', 'Navarete', 'Orendain', 'Puricelli', 'Riede', 'Rubis', 'Sandness', 'Schellhase', 'Stehlin', 'Sunder', 'Teaney', 'Terman', 'Tith', 'Totino', 'Tudisco', 'Urwin', 'Vandrunen', 'Vasicek', 'Youtz', 'Berwald', 'Bilow', 'Bubolz', 'Cieslewicz', 'Denbleyker', 'Ensinger', 'Gantenbein', 'Gurnsey', 'Herceg', 'Kless', 'Kollias', 'Leppek', 'Naeve', 'Oncale', 'Pastran', 'Pinyan', 'Porrata', 'Pustejovsky', 'Renko', 'Scioli', 'Sinkhorn', 'Sporrer', 'Tomkiewicz', 'Weisbeck', 'Gautam', 'Gleed', 'Shave', 'Crotzer', 'Demarr', 'Reckard', 'Coyt', 'Norberto', 'Ury', 'Crispen', 'Parcells', 'Meiklejohn', 'Risden', 'Bracker', 'Askari', 'Hyneman', 'Auberry', 'Bruney', 'Weakly', 'Ysaguirre', 'Calender', 'Benison', 'Nazaire', 'Pondexter', 'Fryson', 'Aguino', 'Antonino', 'Babilonia', 'Banfill', 'Beger', 'Berardino', 'Bizub', 'Contractor', 'Convey', 'Cossairt', 'Cruzen', 'Dible', 'Dorning', 'Ellena', 'Fafard', 'Fano', 'Favaro', 'Feeler', 'Foulger', 'Gulbrandson', 'Heckaman', 'Heimerman', 'Herms', 'Hotchkin', 'Jinright', 'Kisler', 'Kontz', 'Kryder', 'Lopezperez', 'Lumm', 'Mcelravy', 'Meditz', 'Melucci', 'Meras', 'Miyahara', 'Musella', 'Nelis', 'Nhem', 'Olivan', 'Popson', 'Presgraves', 'Reindel', 'Riege', 'Rivenburgh', 'Sahl', 'Selberg', 'Tashiro', 'Todorov', 'Toutant', 'Turski', 'Vankuren', 'Westrup', 'Beeney', 'Bickhart', 'Borkenhagen', 'Bukoski', 'Citrin', 'Civello', 'Forstrom', 'Froning', 'Geiler', 'Hargadon', 'Hemric', 'Jeffus', 'Klingele', 'Kooiker', 'Lizalde', 'Nardiello', 'Pestka', 'Pignato', 'Pudwill', 'Rabelo', 'Remund', 'Skluzacek', 'Stegenga', 'Steidle', 'Stenz', 'Terlecki', 'Vanselow', 'Waskey', 'Azhar', 'Wroe', 'Tool', 'Leibert', 'Vary', 'Scovell', 'Derick', 'Arrey', 'Cavness', 'Garley', 'Sholtz', 'Legard', 'Heyliger', 'Thorns', 'Sowells', 'Alemu', 'Aragones', 'Ayllon', 'Baab', 'Blankenbeckler', 'Brengle', 'Burick', 'Deuser', 'Disabato', 'Doddridge', 'Dolinski', 'Economy', 'Ems', 'Hagenow', 'Iwen', 'Kiesler', 'Lehrmann', 'Loisel', 'Mallicoat', 'Mansouri', 'Marse', 'Mccartt', 'Menninger', 'Montee', 'Nappa', 'Ohanesian', 'Podgurski', 'Prosch', 'Puder', 'Ritthaler', 'Rodelo', 'Shipper', 'Shorkey', 'Sirna', 'Smedberg', 'Smink', 'Strahle', 'Troeger', 'Twaddell', 'Vandyk', 'Wandrey', 'Yaworski', 'Zagami', 'Duecker', 'Finlinson', 'Frysinger', 'Grush', 'Knackstedt', 'Morozov', 'Murgia', 'Naffziger', 'Ontko', 'Piltz', 'Roskelley', 'Sonderman', 'Garrand', 'Kopack', 'Theys', 'Sanseverino', 'Budai', 'Selwyn', 'Assante', 'Nary', 'Fildes', 'Tano', 'Hogen', 'Gennett', 'Melka', 'Thorner', 'Grandjean', 'Dury', 'Gerrald', 'Quilling', 'Mccallon', 'Preister', 'Kydd', 'Cranshaw', 'Folson', 'Roker', 'Dockett', 'Stfort', 'Haymer', 'Njie', 'Adamik', 'Aredondo', 'Bathrick', 'Beldin', 'Blackwater', 'Branscom', 'Cappucci', 'Cartelli', 'Carullo', 'Cunneen', 'Davee', 'Deboy', 'Defrates', 'Esham', 'Furio', 'Garverick', 'Gimlin', 'Gosline', 'Gromer', 'Halbig', 'Hasbrook', 'Holgerson', 'Hupfer', 'Jochem', 'Kihn', 'Klotzbach', 'Lantagne', 'Leichter', 'Lerette', 'Lupu', 'Machorro', 'Mieles', 'Mikulec', 'Mirante', 'Nasrallah', 'Piccini', 'Pinkhasov', 'Poplaski', 'Pottenger', 'Rahrig', 'Ranganathan', 'Ravan', 'Righi', 'Rogacki', 'Sadlon', 'Salafia', 'Schlitz', 'Slayback', 'Stetzel', 'Tamargo', 'Tenore', 'Verkuilen', 'Vuncannon', 'Waggle', 'Bacorn', 'Boerema', 'Cimorelli', 'Ciresi', 'Dethlefs', 'Dimarzo', 'Ficco', 'Floresca', 'Gnau', 'Hefel', 'Holbein', 'Klepacki', 'Konigsberg', 'Lienau', 'Malsam', 'Meidl', 'Nawabi', 'Netzley', 'Renbarger', 'Rumbold', 'Sarafian', 'Sonnenfeld', 'Tindol', 'Trettin', 'Tuckerman', 'Vanderweele', 'Weppler', 'Westbay', 'Zaveri', 'Boran', 'Deighan', 'Rothery', 'Yom', 'Gatley', 'Caldron', 'Lucado', 'Dromgoole', 'Novell', 'Sherriff', 'Gerrick', 'Balgobin', 'Danger', 'Sookram', 'Daron', 'Knibbs', 'Faggart', 'Beidleman', 'Russey', 'Lagrand', 'Bluett', 'Glaspy', 'Baldon', 'Trueheart', 'Cradle', 'Asfaw', 'Ballinas', 'Bogdon', 'Brizzi', 'Carrio', 'Cherny', 'Crogan', 'Depierro', 'Dhami', 'Dresden', 'Finnicum', 'Geltz', 'Granade', 'Granieri', 'Guia', 'Hashagen', 'Hollick', 'Jicha', 'Jollie', 'Kathan', 'Malara', 'Manabat', 'Mehall', 'Midcap', 'Mitre', 'Newburg', 'Parveen', 'Pianka', 'Plouff', 'Posillico', 'Ransier', 'Reano', 'Roskam', 'Rufer', 'Schnetzer', 'Scorsone', 'Sitterly', 'Skilton', 'Sohail', 'Starin', 'Stavish', 'Tufaro', 'Vano', 'Vinsant', 'Vlahakis', 'Vondrasek', 'Waldroop', 'Wamboldt', 'Achatz', 'Bomkamp', 'Fetzner', 'Gemmer', 'Haroutunian', 'Hurtig', 'Juncaj', 'Kleban', 'Knier', 'Kopischke', 'Kugelman', 'Lacoss', 'Meulemans', 'Neyens', 'Niccoli', 'Oberhaus', 'Penkala', 'Podoll', 'Roupp', 'Scozzari', 'Siverling', 'Uhls', 'Werber', 'Grealish', 'Montieth', 'Haik', 'Kuri', 'Kanaan', 'Prenatt', 'Dingledine', 'Mccamy', 'Balin', 'Droney', 'Clyatt', 'Ramone', 'Anglen', 'Mathus', 'Bagent', 'Lamarque', 'Arscott', 'Romes', 'Speigner', 'Latouche', 'Tripplett', 'Eversley', 'Aquirre', 'Bernales', 'Bouthillier', 'Cavendish', 'Detienne', 'Dewbre', 'Dimuro', 'Dosh', 'Dunklee', 'Duyck', 'Emilio', 'Ence', 'Garofano', 'Gellis', 'Haertel', 'Handyside', 'Hornburg', 'Jenniges', 'Kallhoff', 'Klontz', 'Langsdorf', 'Leabo', 'Lorette', 'Maracle', 'Merta', 'Muoio', 'Nierenberg', 'Oborn', 'Osorto', 'Ruscitti', 'Santaella', 'Spinnato', 'Stentz', 'Stocke', 'Sundt', 'Thorup', 'Tresch', 'Urdaneta', 'Uttech', 'Vosler', 'Wieand', 'Zacharia', 'Zeleznik', 'Zoucha', 'Zuch', 'Abrell', 'Atiyeh', 'Aydt', 'Cleeton', 'Crisan', 'Cwikla', 'Denz', 'Diesing', 'Emmi', 'Fringer', 'Gibbard', 'Graunke', 'Gschwind', 'Hafele', 'Hoogland', 'Howsare', 'Kesecker', 'Kilgallon', 'Kleyman', 'Kufahl', 'Laut', 'Malstrom', 'Michetti', 'Nosbisch', 'Rasner', 'Rosekrans', 'Schnebly', 'Staebell', 'Theilen', 'Tieszen', 'Mellone', 'Burcher', 'Feister', 'Hoage', 'Irmen', 'Derwin', 'Dien', 'Markins', 'Egnew', 'Dunlow', 'Brickel', 'Curt', 'Smyly', 'Whedbee', 'Larman', 'Boisselle', 'Jaquess', 'Bowns', 'Nile', 'Boyson', 'Phillipps', 'Weech', 'Pillars', 'Cauldwell', 'Wynns', 'Toca', 'Scorza', 'Ramsaran', 'Arkwright', 'Gurganious', 'Jubert', 'Beed', 'Kellem', 'Gervin', 'Yarn', 'Bookhart', 'Sullen', 'Moncrieffe', 'Eze', 'Agyeman', 'Aldea', 'Amodei', 'Attig', 'Bergthold', 'Blaskowski', 'Blitzer', 'Bowring', 'Brenning', 'Chappuis', 'Cordasco', 'Cosens', 'Denoble', 'Dochterman', 'Domek', 'Embleton', 'Georgiades', 'Gintz', 'Grooters', 'Hoell', 'Honse', 'Jagiello', 'Jaskulski', 'Kaluzny', 'Keske', 'Khiev', 'Koeneman', 'Majestic', 'Mandile', 'Marandola', 'Mcinroy', 'Nienhaus', 'Peckenpaugh', 'Raquel', 'Rossler', 'Rusconi', 'Schaffert', 'Schipani', 'Sittner', 'Sweezey', 'Swenor', 'Tagliaferro', 'Tubby', 'Ulep', 'Vallette', 'Westergren', 'Yaros', 'Yasui', 'Anway', 'Bannick', 'Biasi', 'Breitling', 'Catarino', 'Dunaj', 'Giovanelli', 'Hemmerich', 'Iott', 'Knotek', 'Kraeger', 'Laskaris', 'Lomboy', 'Oleski', 'Reibel', 'Rightmyer', 'Salmela', 'Salow', 'Siebels', 'Spielvogel', 'Streitmatter', 'Ucci', 'Windmiller', 'Wojtkiewicz', 'Zirkel', 'Markie', 'Nedeau', 'Froehle', 'Jesson', 'Regala', 'Boody', 'Hayen', 'Ose', 'Loewy', 'Radliff', 'Davia', 'Sky', 'Halker', 'Alu', 'Ey', 'Badawi', 'Yeargain', 'Jeanette', 'Doublin', 'Nolton', 'Streety', 'Blueford', 'Abeles', 'Aldava', 'Alsteen', 'Altadonna', 'Apa', 'Behlke', 'Bellisario', 'Bienstock', 'Brenan', 'Capley', 'Castoro', 'Demir', 'Evinger', 'Gartside', 'Gellatly', 'Goldinger', 'Grabel', 'Henkin', 'Herrle', 'Honegger', 'Kunin', 'Larmer', 'Lizano', 'Lorino', 'Malcomson', 'Matesic', 'Mathiasen', 'Mccolm', 'Meenach', 'Mullady', 'Neiderer', 'Ogier', 'Omura', 'Plog', 'Pomplun', 'Procida', 'Raisbeck', 'Rastetter', 'Reither', 'Rettberg', 'Roblee', 'Rossitto', 'Scahill', 'Schmoker', 'Segreto', 'Shelstad', 'Shwartz', 'Sondgeroth', 'Supnet', 'Swartzbaugh', 'Tkachenko', 'Urbani', 'Vanslooten', 'Varricchio', 'Villarino', 'Whiston', 'Wyffels', 'Yehle', 'Basinski', 'Belvedere', 'Bernabei', 'Bolotin', 'Bresett', 'Dabkowski', 'Dalsanto', 'Gotwalt', 'Hellberg', 'Hunke', 'Kroenke', 'Leppla', 'Luginbuhl', 'Mimnaugh', 'Mullenbach', 'Nearhood', 'Raser', 'Resendis', 'Seydel', 'Sozio', 'Stillions', 'Stormont', 'Strimple', 'Toruno', 'Trouten', 'Tryba', 'Vandalen', 'Wilhelmy', 'Orland', 'Loui', 'Morcos', 'Radell', 'Artus', 'Truxillo', 'Copelan', 'Bress', 'Unthank', 'Sudlow', 'Branden', 'Rowzee', 'Montreuil', 'Sollers', 'Umar', 'Coulibaly', 'Allegretto', 'Andreen', 'Bielicki', 'Bustard', 'Cardosi', 'Carkhuff', 'Cetina', 'Clouthier', 'Dolata', 'Fiola', 'Fjeld', 'Gawthrop', 'Glastetter', 'Hamlyn', 'Hanten', 'Huerter', 'Kreiss', 'Lestrange', 'Litzau', 'Luberto', 'Menconi', 'Milosevic', 'Munera', 'Nachtigal', 'Nethers', 'Nicolaou', 'Olund', 'Paddack', 'Pfiester', 'Pilley', 'Polendo', 'Porcayo', 'Preast', 'Runquist', 'Saccente', 'Santoli', 'Saragoza', 'Selway', 'Smestad', 'Stebner', 'Toben', 'Trapnell', 'Urschel', 'Verno', 'Vidovich', 'Walterscheid', 'Yoh', 'Zmijewski', 'Allwein', 'Bessire', 'Broering', 'Budzik', 'Denherder', 'Goerner', 'Goldbaum', 'Grussing', 'Huaracha', 'Ippoliti', 'Kanak', 'Kaucher', 'Kious', 'Kirkner', 'Kratzke', 'Kubisiak', 'Kueny', 'Mazzilli', 'Mazzo', 'Mcclenathan', 'Mehlberg', 'Miotke', 'Nihiser', 'Olheiser', 'Oravetz', 'Radwanski', 'Shinsato', 'Vandekamp', 'Zagata', 'Abert', 'Llera', 'Thommen', 'Wirkkala', 'Brasuell', 'Shawler', 'Mourey', 'Gavia', 'Morgano', 'Newill', 'Rathel', 'Wist', 'Braner', 'Soman', 'Koskey', 'Searson', 'Brocksmith', 'Peale', 'Couzens', 'Shall', 'Anis', 'Stanly', 'Cauthorn', 'Kinkle', 'Laughinghouse', 'Mellette', 'Rox', 'Demetrius', 'Cullars', 'Summons', 'Banwart', 'Bartl', 'Bebb', 'Bobier', 'Bogdanoff', 'Bollmann', 'Borrowman', 'Borseth', 'Buttitta', 'Canelo', 'Cassedy', 'Cata', 'Crivelli', 'Daane', 'Dhingra', 'Dipple', 'Dovidio', 'Duesler', 'Eissler', 'Ent', 'Falotico', 'Goodrick', 'Goupil', 'Huels', 'Keithly', 'Killilea', 'Klausing', 'Kludt', 'Licitra', 'Llerenas', 'Merolla', 'Oatley', 'Osmanovic', 'Poudrier', 'Raben', 'Realmuto', 'Reczek', 'Ricchio', 'Rossner', 'Rozak', 'Sandora', 'Schuenemann', 'Seres', 'Shoptaw', 'Splitt', 'Tonkinson', 'Willardson', 'Winterberg', 'Zayac', 'Bobzien', 'Buhman', 'Carotenuto', 'Chynoweth', 'Defenbaugh', 'Dipiero', 'Duve', 'Goonan', 'Gragert', 'Hangartner', 'Heemstra', 'Hensch', 'Hollatz', 'Jakubowicz', 'Kapaun', 'Kiener', 'Landesman', 'Lenzini', 'Longbottom', 'Parde', 'Pincock', 'Schlicker', 'Shankel', 'Vidas', 'Waisner', 'Zilberman', 'Allcock', 'Durban', 'Javid', 'Shoda', 'Edes', 'Boxwell', 'Dezern', 'Rubley', 'Angelica', 'Jeannette', 'Planer', 'Pata', 'Lothridge', 'Lucks', 'Bais', 'Sandra', 'Enwright', 'Maxton', 'Radway', 'Hoof', 'Morisset', 'Danzey', 'Ancar', 'Mcwright', 'Leggs', 'Monestime', 'Massaquoi', 'Barkow', 'Bastyr', 'Bautz', 'Behanna', 'Bewick', 'Bezdek', 'Bielby', 'Bretschneider', 'Bugher', 'Carchi', 'Chapp', 'Conser', 'Crete', 'Derflinger', 'Elsbernd', 'Freimark', 'Gerwin', 'Grunfeld', 'Harpham', 'Hoeschen', 'Holmlund', 'Horch', 'Hulsebus', 'Kassabian', 'Konczal', 'Korell', 'Lacuesta', 'Lantier', 'Larowe', 'Lietzke', 'Lunny', 'Masin', 'Massicotte', 'Michalsky', 'Notarianni', 'Pautsch', 'Poppy', 'Sukup', 'Suleski', 'Tafel', 'Wanninger', 'Zaffino', 'Zody', 'Arganbright', 'Bohmer', 'Cintora', 'Connatser', 'Dlugos', 'Fariello', 'Fedie', 'Felicetti', 'Garno', 'Gottsch', 'Gratzer', 'Gubser', 'Kappelman', 'Kuechle', 'Laningham', 'Latsch', 'Longie', 'Luscher', 'Lybeck', 'Rhude', 'Setterlund', 'Sobh', 'Sonneborn', 'Villamizar', 'Wolstenholme', 'Zacek', 'Leppanen', 'Casdorph', 'Pinsker', 'Reutov', 'Rede', 'Sheck', 'Bakley', 'Radde', 'Moher', 'Khader', 'Rossie', 'Scriver', 'Provine', 'Debarge', 'Darke', 'Griswell', 'Naji', 'Frere', 'Cheevers', 'Schnyder', 'Curb', 'Luten', 'Cashaw', 'Agerton', 'Barnier', 'Bluestone', 'Boward', 'Boyar', 'Briano', 'Bryngelson', 'Calef', 'Caraher', 'Castelluccio', 'Conk', 'Crewse', 'Demarzo', 'Deutschman', 'Eckrote', 'Edmister', 'Ferg', 'Ghan', 'Giampaolo', 'Goedecke', 'Gonet', 'Gradel', 'Gregston', 'Grzesiak', 'Guallpa', 'Hanline', 'Hardyman', 'Hogate', 'Houg', 'Justiss', 'Kaps', 'Klopf', 'Kniskern', 'Laneve', 'Lenhoff', 'Lojewski', 'Melott', 'Milillo', 'Passage', 'Pereyda', 'Plack', 'Poet', 'Prospero', 'Quadros', 'Revelo', 'Rogier', 'Sanabia', 'Tragesser', 'Vanarsdall', 'Vanausdal', 'Verbrugge', 'Wandler', 'Zoss', 'Balzarini', 'Brotz', 'Bulin', 'Bumann', 'Cancro', 'Centner', 'Deblasi', 'Duesing', 'Friedley', 'Frieling', 'Heinke', 'Holzheimer', 'Klinck', 'Knouff', 'Kuczek', 'Leible', 'Lerum', 'Liddicoat', 'Mikowski', 'Nonaka', 'Ohlman', 'Picaso', 'Plamann', 'Porretta', 'Prajapati', 'Rancour', 'Stepka', 'Studzinski', 'Vaysman', 'Wallenstein', 'Wunderlin', 'Pattinson', 'Siskind', 'Sitzer', 'Thuman', 'Barella', 'Brillon', 'Arnholt', 'Karge', 'Dohman', 'Morone', 'Macie', 'Aken', 'Lye', 'Student', 'Westen', 'Bonsell', 'Komara', 'Hafiz', 'Stickland', 'Morina', 'Creekmur', 'Hussien', 'Walrond', 'Louischarles', 'Alkema', 'Angert', 'Arcidiacono', 'Ashkar', 'Bookbinder', 'Bootz', 'Cilia', 'Devilla', 'Difatta', 'Enberg', 'Enderby', 'Forbess', 'Frutiger', 'Graefe', 'Guenette', 'Hauschildt', 'Keirsey', 'Kolka', 'Kopelman', 'Lewan', 'Mcluckie', 'Mia', 'Moebius', 'Oestreicher', 'Oprea', 'Ortolano', 'Padovani', 'Pensabene', 'Phimmasone', 'Pointon', 'Punches', 'Schertzer', 'Seoane', 'Skramstad', 'Sorlie', 'Syfert', 'Tasca', 'Townzen', 'Wernli', 'Wurzel', 'Yazdi', 'Devendorf', 'Featherly', 'Frush', 'Heringer', 'Iwai', 'Kallenberger', 'Kobashigawa', 'Langbehn', 'Livecchi', 'Middlesworth', 'Niess', 'Osterlund', 'Ruz', 'Seiwert', 'Vanwieren', 'Wernet', 'Grabbe', 'Gaugh', 'Mcclarren', 'Raudales', 'Urry', 'Clere', 'Lacer', 'Mathia', 'Mccrumb', 'Cotrell', 'Mannor', 'Medine', 'Tittsworth', 'Hughston', 'Buick', 'Limes', 'Hams', 'Thagard', 'Leavelle', ]; export const maxStringLength = 15; ================================================ FILE: drizzle-seed/src/datasets/loremIpsumSentences.ts ================================================ /** * Data was generated, using https://www.lipsum.com/ */ export default [ 'Lorem ipsum dolor sit amet, consectetur adipiscing elit.', 'Nam porta quis ex a blandit.', 'Donec ullamcorper erat sed diam luctus, eu euismod nibh eleifend.', 'Curabitur sit amet tortor vehicula lacus mollis efficitur eu feugiat tortor.', 'Quisque in erat vitae nisl tristique blandit.', 'Vivamus in lectus tellus.', 'Donec quis neque sit amet diam elementum accumsan.', 'Sed vitae sollicitudin tellus, sed rhoncus magna.', 'Aliquam eu interdum purus, sed viverra lorem.', 'Etiam eget viverra dui.', 'Morbi vel risus dolor.', 'Donec laoreet, ipsum sed vestibulum venenatis, ligula leo fermentum enim, in pharetra lorem massa volutpat metus.', 'Aliquam egestas mi in urna blandit, quis viverra justo condimentum.', 'Maecenas pulvinar quam sapien, sed euismod enim rhoncus quis.', 'Maecenas at quam non elit varius rutrum.', 'Orci varius natoque penatibus et magnis dis parturient montes, nascetur ridiculus mus.', 'Quisque et malesuada erat.', 'Maecenas eleifend tellus eu luctus tempor.', 'Cras at scelerisque massa, quis dapibus urna.', 'Aliquam porttitor a risus quis luctus.', 'Aenean mollis ex tempor ligula cursus, interdum porttitor nibh fringilla.', 'Donec aliquet ac nulla nec scelerisque.', 'Curabitur neque diam, posuere nec tortor a, posuere pretium odio.', 'Nullam et vehicula ante.', 'Etiam mattis, odio quis sodales maximus, nisl lectus sagittis ligula, quis ornare urna nibh ac est.', 'Pellentesque eget finibus eros.', 'Maecenas gravida risus vitae vestibulum facilisis.', 'Sed rhoncus libero fringilla arcu viverra tempus.', 'Suspendisse non lacus vitae urna viverra vehicula.', 'Pellentesque eu elementum enim.', 'Morbi aliquet nisl eu accumsan rhoncus.', 'Ut fringilla dolor ut odio blandit, et dignissim lectus placerat.', 'Aliquam vulputate mauris elit, in semper purus accumsan tempor.', 'Sed at elit ut ligula bibendum tincidunt.', 'Maecenas ut tristique ipsum, ac sollicitudin quam.', 'Pellentesque ut ante quis tellus pellentesque tempus.', 'Nulla suscipit ex eget ex cursus accumsan.', 'Sed at purus sapien.', 'Fusce feugiat ante ac massa aliquam, maximus bibendum arcu convallis.', 'Interdum et malesuada fames ac ante ipsum primis in faucibus.', 'Cras vitae dignissim leo, ac pretium est.', 'Aliquam lectus lectus, varius in eros eget, tempus sollicitudin ex.', 'Nunc gravida mi lectus, tincidunt ultrices sapien lobortis cursus.', 'Class aptent taciti sociosqu ad litora torquent per conubia nostra, per inceptos himenaeos.', 'Maecenas aliquam vulputate justo vel lacinia.', 'Nam facilisis augue vitae dolor mattis, sit amet maximus orci molestie.', 'Etiam et nibh id lorem viverra aliquet.', 'Quisque et mauris et odio finibus ullamcorper id eget odio.', 'Duis sit amet varius purus.', 'In congue posuere libero, nec tincidunt dui suscipit ac.', 'Vivamus suscipit risus vel massa commodo pulvinar vitae eu diam.', 'Mauris porta non orci at dapibus.', 'Sed ullamcorper, sem ac fringilla tristique, purus massa hendrerit turpis, at elementum massa nulla nec quam.', 'Praesent sed felis vitae felis vestibulum hendrerit vel at ipsum.', 'Nunc egestas, lectus feugiat consequat auctor, erat mauris pretium sapien, et consequat magna ex id purus.', 'Maecenas nibh ex, bibendum at augue eget, pulvinar cursus libero.', 'Quisque ultricies vestibulum neque, in sollicitudin felis euismod in.', 'Maecenas viverra mauris sit amet neque vulputate, sed suscipit sapien laoreet.', 'Sed vitae sapien maximus, faucibus enim a, placerat erat.', 'Cras maximus ipsum nec dui fermentum, eu facilisis augue fringilla.', 'Sed eget nibh ante.', 'Praesent pellentesque sodales tellus non consectetur.', 'Suspendisse pulvinar, massa id gravida facilisis, diam nulla molestie metus, et convallis purus elit quis sapien.', 'Mauris fermentum nec metus id consectetur.', 'Pellentesque habitant morbi tristique senectus et netus et malesuada fames ac turpis egestas.', 'Nulla vel maximus nunc.', 'Duis dolor orci, tempor nec odio at, gravida congue ex.', 'Pellentesque faucibus, est et eleifend commodo, ipsum nunc lobortis felis, at aliquam erat leo eu massa.', 'Morbi egestas vehicula lacus, in pharetra nulla dictum in.', 'Ut facilisis, erat eu suscipit mollis, ipsum ex sagittis augue, sit amet vehicula neque nunc ut leo.', 'Donec dapibus non odio non auctor.', 'Donec vitae ipsum eget risus vulputate vestibulum.', 'Cras vestibulum purus leo, in porttitor erat finibus quis.', 'Vivamus tincidunt justo diam, placerat maximus orci congue ac.', 'Curabitur pulvinar congue accumsan.', 'Vivamus eget velit dictum, sagittis diam eu, elementum diam.', 'Quisque pharetra pellentesque purus at tristique.', 'Cras dignissim arcu massa, eu pellentesque eros tristique id.', 'Proin efficitur turpis vel sem ultricies molestie.', 'Curabitur rhoncus viverra nibh ut sollicitudin.', 'Vestibulum ut magna dolor.', 'Suspendisse placerat eleifend lorem, at aliquet enim lacinia ut.', 'Integer at nisi eu ex viverra viverra.', 'Morbi finibus bibendum volutpat.', 'Donec facilisis sem id eros tempor vehicula.', 'Phasellus a dolor in dolor finibus iaculis et at quam.', 'Phasellus volutpat nulla eget mauris blandit pharetra ut sit amet augue.', 'Phasellus leo urna, ornare ut mauris ultrices, posuere imperdiet dui.', 'Morbi accumsan bibendum neque, sit amet eleifend nunc bibendum nec.', 'Praesent dapibus tristique tempor.', 'Duis dapibus nulla in lectus luctus, nec blandit sem tristique.', 'In odio dolor, consectetur eget sapien egestas, viverra pharetra urna.', 'Nam risus est, suscipit fermentum tincidunt id, vehicula vitae arcu.', 'Aenean venenatis pretium condimentum.', 'Mauris lobortis blandit dapibus.', 'Phasellus aliquet efficitur condimentum.', 'Nam pulvinar ullamcorper metus ac vehicula.', 'Donec eget auctor tellus.', 'Morbi quis diam ultrices, tristique lectus eu, vehicula dolor.', 'Aenean malesuada lorem sed vestibulum rutrum.', 'Praesent quis metus id quam facilisis blandit.', 'Proin venenatis eleifend augue nec gravida.', 'Nulla eget vehicula mauris, vel rutrum ligula.', 'Ut interdum aliquam fermentum.', 'Morbi elementum metus ut velit pellentesque lacinia.', 'Suspendisse malesuada est sed varius rhoncus.', 'Sed eu porta ex.', 'Nullam dignissim egestas dapibus.', 'Vestibulum a pharetra ipsum.', 'Donec in interdum diam.', 'Morbi viverra id sem quis mollis.', 'In eget porta lorem.', 'Aliquam tincidunt feugiat magna, vel finibus odio rutrum sit amet.', 'Morbi faucibus metus at vehicula efficitur.', 'Cras metus lectus, egestas lacinia leo vitae, lacinia dapibus quam.', 'Morbi tincidunt ut velit sed hendrerit.', 'Vivamus eleifend at leo porttitor blandit.', 'Morbi egestas diam augue, vel condimentum odio pulvinar id.', 'Morbi porta vulputate ante sed lacinia.', 'Fusce massa est, varius et lacinia sit amet, dictum at turpis.', 'Vestibulum viverra augue elit, eget tristique ipsum accumsan vitae.', 'Sed sit amet ex sapien.', 'Mauris dapibus tincidunt scelerisque.', 'Aliquam nunc libero, vestibulum id facilisis in, sollicitudin vitae nulla.', 'Aenean a nulla commodo, rutrum orci eget, pellentesque erat.', 'Aenean ut sem felis.', 'Donec sapien ante, ornare sit amet ornare id, mattis lobortis tellus.', 'Nam ut placerat metus.', 'Vivamus in cursus eros, sit amet scelerisque mauris.', 'Integer tempus, justo vel aliquet aliquam, mi libero iaculis leo, placerat sollicitudin mauris ipsum faucibus justo.', 'Cras at vehicula urna.', 'Phasellus id nunc eu enim ultricies hendrerit.', 'Nulla sodales sodales orci in placerat.', 'Donec placerat, justo in imperdiet euismod, nulla metus pharetra nibh, nec auctor tortor mauris ac augue.', 'Donec at elit non odio malesuada consequat non id velit.', 'Morbi pellentesque eleifend iaculis.', 'Aliquam ullamcorper lacinia vulputate.', 'Nulla commodo risus et efficitur mollis.', 'In venenatis consectetur metus, in iaculis ligula bibendum fermentum.', 'Nullam ac finibus nisl.', 'Aenean blandit sagittis justo, ut cursus tortor vehicula vel.', 'Integer at pulvinar eros, sed dictum ex.', 'Phasellus bibendum interdum porttitor.', 'Fusce blandit egestas nisl, quis mattis elit commodo in.', 'Donec in ex justo.', 'Aenean elementum tristique eros, vel mattis tellus malesuada nec.', 'Quisque euismod tincidunt erat.', 'Proin turpis orci, vehicula vitae ipsum et, auctor ornare ex.', 'Nunc efficitur nisl sit amet justo faucibus, eu bibendum diam pretium.', 'Nullam consectetur finibus dui at malesuada.', 'Ut elementum, ante vitae gravida feugiat, orci enim molestie libero, ut vehicula purus ipsum a eros.', 'Ut mi neque, vestibulum nec nibh eu, imperdiet elementum ipsum.', 'Donec cursus augue quis ex rutrum lacinia.', 'Mauris purus mi, pellentesque at leo in, auctor ultrices massa.', 'Maecenas finibus quam quis arcu mattis porttitor.', 'Suspendisse ac urna ac odio aliquet congue.', 'Integer suscipit, odio in ullamcorper ornare, diam nibh elementum eros, a aliquam lacus velit vel mauris.', 'Quisque ut bibendum risus.', 'Suspendisse bibendum augue pellentesque, dapibus leo ac, luctus purus.', 'Phasellus interdum ipsum sit amet elit rhoncus varius.', 'Pellentesque pharetra lorem et nibh aliquam, vel luctus elit sodales.', 'Maecenas ornare cursus metus in efficitur.', 'Phasellus laoreet ipsum nec erat mattis, vitae vulputate risus auctor.', 'Maecenas augue magna, mattis elementum dapibus sed, vulputate venenatis ante.', 'Fusce non lorem vitae velit molestie auctor.', 'Etiam sodales, orci sed consequat luctus, ante urna hendrerit ipsum, at ultrices mauris neque in velit.', 'Vestibulum a egestas ipsum.', 'Donec sit amet laoreet mi.', 'Quisque varius ligula dolor.', 'Morbi sodales volutpat nulla, et ullamcorper lacus bibendum at.', 'In mattis in dui quis facilisis.', 'Cras pulvinar, massa eu rhoncus rhoncus, mi mi ultricies turpis, eu iaculis elit nulla et metus.', 'Donec et neque suscipit, iaculis ipsum at, maximus eros.', 'Quisque eu accumsan risus.', 'Phasellus in consectetur nisl.', 'Nullam interdum porttitor enim fermentum bibendum.', 'Vestibulum consequat fermentum mollis.', 'Quisque id velit sit amet magna posuere aliquam vel in nunc.', 'Maecenas nisl lectus, sollicitudin eu auctor nec, cursus vel quam.', 'Proin elementum efficitur velit vel vestibulum.', 'Nunc non tincidunt ex.', 'Fusce nec nisl eget nunc fringilla dignissim vel nec ex.', 'Cras malesuada erat quis ligula lacinia consectetur.', 'Aliquam semper elit ante, sed accumsan lacus molestie in.', 'Vivamus porttitor enim eros, eu ultricies lectus pulvinar eget.', 'Nullam consequat tincidunt ligula, eu luctus nisi congue id.', 'Aenean lacinia lobortis ante, fermentum vulputate turpis eleifend faucibus.', 'Vestibulum ante ipsum primis in faucibus orci luctus et ultrices posuere cubilia curae; Phasellus tempus libero non ipsum cursus rhoncus.', 'Sed euismod molestie augue vitae fringilla.', 'Pellentesque mi tortor, tempor quis condimentum quis, lobortis quis quam.', 'Sed in vestibulum purus, in vestibulum neque.', 'Etiam pellentesque ligula ligula, sit amet varius mi venenatis fringilla.', 'Morbi vitae est ac diam convallis sagittis in facilisis ligula.', 'Sed vel consequat diam.', 'Nunc ac tempor felis.', 'Aenean luctus tristique urna lacinia venenatis.', 'Suspendisse vehicula auctor accumsan.', 'Suspendisse ultrices rhoncus nisi a pellentesque.', 'Sed sollicitudin id orci ut laoreet.', 'Cras pulvinar lorem ut ipsum malesuada, sed euismod turpis placerat.', 'Ut vitae massa quis augue posuere ultricies.', 'In quis erat posuere, posuere dolor ac, tempus tortor.', 'Aliquam aliquet nisl eu tortor mollis, id dictum nisi congue.', 'Etiam pulvinar, ex a tincidunt bibendum, nisl elit venenatis lacus, nec dictum odio ligula non nulla.', 'Etiam sit amet nunc vestibulum, pharetra diam ac, lacinia felis.', 'Quisque volutpat laoreet lorem, sit amet porta justo ultrices aliquet.', 'Praesent aliquet nisi elit, ut facilisis orci accumsan vitae.', 'Quisque vehicula augue at leo varius, ac dictum tortor viverra.', 'Proin eu bibendum diam.', 'Aliquam blandit, erat et feugiat varius, erat mauris convallis ipsum, ut convallis massa erat vel neque.', 'Sed commodo nec ipsum in maximus.', 'Pellentesque ligula nisl, tincidunt volutpat convallis non, interdum quis felis.', 'Nunc ultrices neque ut diam congue, non tristique metus tempor.', 'Pellentesque sodales metus leo, at eleifend dui pretium at.', 'Suspendisse sit amet metus at est viverra fermentum.', 'Donec ac odio vitae urna blandit consectetur.', 'Vivamus tincidunt cursus nunc in mollis.', 'Nullam malesuada quis odio eu imperdiet.', 'Integer convallis sapien vitae semper varius.', 'Nullam malesuada tincidunt lacus elementum condimentum.', 'Nam eget neque vitae leo convallis aliquam id eu quam.', 'Quisque aliquet elementum lectus, vitae pharetra nisl facilisis at.', 'Fusce ut velit porttitor, porta erat ac, vehicula odio.', 'Sed tempor est at nulla mollis aliquet.', 'Quisque luctus dolor eu placerat ultrices.', 'Vivamus luctus ex non ante pretium venenatis.', 'Ut non arcu vitae velit pellentesque accumsan eget id risus.', 'Pellentesque accumsan elementum turpis, a aliquam dui sodales nec.', 'Donec quis semper tortor, scelerisque venenatis velit.', 'Morbi tempus lacus pretium risus rhoncus, tincidunt lacinia diam dapibus.', 'Donec libero neque, aliquet non aliquet et, mollis at est.', 'Fusce mauris tortor, molestie ut porttitor nec, euismod consequat metus.', 'Maecenas in nunc blandit, sagittis orci sed, fringilla risus.', 'Suspendisse vel vulputate velit.', 'Nulla aliquam facilisis velit.', 'Donec placerat porttitor sapien.', 'Quisque non pharetra mi.', 'Suspendisse mattis justo nec arcu efficitur, nec suscipit mi tempor.', 'Sed et dui vitae nisi accumsan faucibus nec vel odio.', 'Donec at lacus eget nisi ultricies efficitur.', 'Aenean ultricies elit eget mi consectetur imperdiet.', 'Ut lorem magna, ullamcorper sit amet dui quis, pulvinar cursus felis.', 'Morbi ligula nibh, fermentum nec pellentesque eget, sodales in sapien.', 'Sed eu vehicula mi.', 'Vestibulum et erat erat.', 'Maecenas eleifend ultricies erat eget vehicula.', 'Donec varius lectus ut metus finibus pellentesque.', 'Aliquam nec orci scelerisque, elementum odio non, aliquet ante.', 'Nulla eget nisi ac magna aliquet efficitur vitae sed felis.', 'Suspendisse purus erat, blandit eget leo quis, iaculis vestibulum sapien.', 'Vivamus rutrum, leo ac suscipit tincidunt, ipsum sem volutpat purus, quis sodales augue lacus id mi.', 'Aenean interdum ac turpis eu viverra.', 'Suspendisse rhoncus rutrum augue.', 'Ut dolor lectus, rutrum et metus et, volutpat sagittis urna.', 'Donec blandit tortor sed pellentesque maximus.', 'Phasellus molestie congue erat, ut euismod leo pulvinar nec.', 'Nulla elementum vestibulum libero vehicula aliquet.', 'Sed venenatis enim eu nisi laoreet, sit amet sagittis magna gravida.', 'Suspendisse semper molestie ligula sit amet lobortis.', 'Nulla urna eros, condimentum a odio id, aliquet scelerisque justo.', 'Suspendisse sit amet orci ante.', 'Sed congue sem sapien, ac ornare nibh porta efficitur.', 'Nullam suscipit, lectus ac gravida ultrices, lectus neque viverra sem, sit amet eleifend purus felis vulputate odio.', 'In velit lacus, facilisis quis nunc vitae, imperdiet bibendum mauris.', 'Duis iaculis sodales turpis, vestibulum rutrum eros efficitur ac.', 'Aenean interdum congue libero vel suscipit.', 'Quisque pharetra semper lorem ac posuere.', 'Mauris viverra neque pellentesque, semper augue id, placerat arcu.', 'Mauris sit amet bibendum nisi, a laoreet ipsum.', 'Proin tristique auctor massa convallis imperdiet.', 'Curabitur congue sed neque vel imperdiet.', 'Quisque egestas metus at diam feugiat, vestibulum ornare nulla venenatis.', 'Donec non hendrerit urna.', 'Curabitur id justo ex.', 'Sed consectetur urna a purus egestas, a fringilla leo scelerisque.', 'Morbi interdum massa sed ligula dapibus semper.', 'Nam sit amet condimentum erat.', 'Nam vel magna porta, ultrices nisl eu, lacinia lorem.', 'Pellentesque accumsan, felis sit amet elementum tincidunt, risus arcu bibendum eros, vitae commodo justo orci vitae ex.', 'Vestibulum eu fermentum lacus.', 'Nulla quis pulvinar metus.', 'Vestibulum ante ipsum primis in faucibus orci luctus et ultrices posuere cubilia curae; Curabitur malesuada non erat vitae dapibus.', 'Donec sit amet facilisis ante, vitae tristique risus.', 'Vestibulum maximus vehicula purus sed tincidunt.', 'Fusce facilisis odio et fermentum dapibus.', 'In hac habitasse platea dictumst.', 'Etiam et viverra felis, in vulputate enim.', 'Donec vel pulvinar elit.', 'Mauris quis velit suscipit, accumsan libero eget, consectetur sapien.', 'Maecenas vel placerat justo, sit amet eleifend sem.', 'Pellentesque cursus felis enim, vitae convallis lectus finibus et.', 'Aliquam eu dolor eros.', 'Cras dictum, est quis porttitor semper, turpis lacus maximus eros, at luctus diam orci et elit.', 'Nulla id augue tincidunt, sollicitudin mi vel, pellentesque mi.', 'Vestibulum ultricies turpis a congue rutrum.', 'Praesent sed dictum nunc.', 'Donec finibus commodo ligula non tincidunt.', 'Aliquam eget tellus velit.', 'Proin tempor elit at nulla commodo molestie.', 'Morbi ultricies sit amet tortor eu porttitor.', 'Duis congue elit ac porttitor lobortis.', 'Sed fringilla mi pretium lacinia finibus.', 'Maecenas faucibus metus sed ipsum tristique, sed vulputate odio bibendum.', 'Nulla facilisi.', 'Nulla non dapibus nibh, id ultricies augue.', 'Vestibulum vitae lorem neque.', 'Maecenas non augue nibh.', 'Suspendisse laoreet dapibus auctor.', 'In sit amet lorem eget purus ultrices tincidunt ut at neque.', 'Fusce congue, nunc sit amet lobortis pellentesque, sapien ex rutrum elit, vel gravida nulla velit sed dui.', 'Cras nec hendrerit sapien, eu euismod elit.', 'Maecenas lobortis egestas interdum.', 'Nunc sagittis bibendum erat sit amet varius.', 'Mauris varius nunc at odio facilisis facilisis.', 'Sed maximus sit amet urna vitae aliquam.', 'Donec vel ipsum sed sapien aliquet pharetra id vitae leo.', 'Sed vitae diam elit.', 'Nam tempor, risus nec gravida mollis, velit neque efficitur leo, sit amet porta purus magna euismod ipsum.', 'Suspendisse potenti.', 'Donec interdum vulputate lorem, vitae pellentesque sem mollis at.', 'Vivamus vitae faucibus libero.', 'Etiam laoreet semper accumsan.', 'Morbi imperdiet, ex ut fringilla fermentum, sapien ipsum efficitur magna, a ultrices purus massa at nibh.', 'Sed a dolor euismod, facilisis ex eget, efficitur est.', 'Phasellus eleifend, felis quis convallis semper, dui magna accumsan leo, a mattis magna urna in ligula.', 'Donec porta sollicitudin vestibulum.', 'Nunc et nisl in ligula iaculis rutrum id consequat neque.', 'Nunc sed purus quis felis aliquet accumsan.', 'Aliquam congue placerat condimentum.', 'Proin ultrices condimentum facilisis.', 'Aenean in faucibus odio.', 'Pellentesque enim ex, mattis non risus ut, euismod imperdiet lorem.', 'Integer quis magna non risus luctus posuere.', 'Vestibulum pellentesque suscipit arcu, id dignissim leo rhoncus nec.', 'Praesent vitae sodales quam.', 'Morbi viverra nibh quam, quis dapibus nibh consequat sed.', 'Morbi sed risus sollicitudin, tincidunt augue vel, posuere orci.', 'Nunc nisi nisi, varius ac commodo ac, placerat hendrerit nisi.', 'Donec sapien magna, elementum eu mi vitae, laoreet euismod turpis.', 'Cras eget pretium eros.', 'Sed tincidunt ante id tortor porta, ac pellentesque erat suscipit.', 'Fusce consequat nisi dolor, eget tincidunt tellus imperdiet at.', 'Nullam scelerisque commodo eleifend.', 'Mauris et nisl bibendum, varius sem at, sollicitudin libero.', 'Quisque purus felis, tristique id ligula in, ullamcorper pellentesque felis.', 'Phasellus et tortor ut sem rhoncus suscipit ac eget elit.', 'Donec rhoncus ex finibus neque volutpat, ut placerat metus gravida.', 'Suspendisse at sem id diam efficitur dapibus.', 'Aliquam erat volutpat.', 'Nunc ac nibh eget augue sodales ornare.', 'Integer ultricies neque at felis aliquam, vel interdum felis mollis.', 'Nulla iaculis libero velit, a consequat eros hendrerit venenatis.', 'Etiam aliquet eros magna, ut ultricies metus feugiat vitae.', 'Maecenas est orci, accumsan eu eleifend vitae, sollicitudin vitae metus.', 'Sed aliquet, tellus sed euismod sodales, lectus leo imperdiet dui, eu luctus mauris turpis id turpis.', 'Sed eget accumsan felis, viverra euismod nulla.', 'Nullam convallis odio consectetur nisl tempus, sed dictum urna tempor.', 'Proin scelerisque elit tortor, a ultricies odio ullamcorper vel.', 'Etiam ultrices congue neque ac sollicitudin.', 'Ut placerat consectetur sapien ut rhoncus.', 'Ut aliquam quam nec ornare fermentum.', 'Vivamus aliquet facilisis magna.', 'Vestibulum dictum sed leo non cursus.', 'Morbi egestas et augue fringilla bibendum.', 'Etiam vel maximus tellus.', 'Praesent et turpis justo.', 'Morbi a hendrerit diam, cursus posuere lorem.', 'In sed sem id eros dignissim tincidunt.', 'Nullam porta varius risus at ullamcorper.', 'Nam varius sodales dolor, dapibus rutrum ligula vulputate at.', 'Nam ultricies sed ante eget convallis.', 'Duis ultrices est ac orci auctor, et malesuada neque sodales.', 'Aliquam venenatis sodales aliquam.', 'Phasellus ut lectus id sapien dictum luctus a vitae nibh.', 'Sed euismod varius malesuada.', 'Ut faucibus ultricies posuere.', 'Nunc vitae diam in mi pellentesque vehicula eu elementum lectus.', 'Vivamus gravida felis eget ipsum consectetur tincidunt.', 'Nullam nunc eros, blandit ut finibus sit amet, porta nec lectus.', 'Vestibulum non orci neque.', 'Praesent velit massa, pulvinar quis mauris sit amet, consequat tincidunt mauris.', 'Quisque id cursus magna.', 'Donec eros ante, placerat at efficitur in, placerat id turpis.', 'Morbi non dui tortor.', 'Quisque at turpis sodales, pharetra justo ut, accumsan est.', 'Sed molestie dolor mi, ac feugiat elit blandit et.', 'Nullam libero ex, rutrum ac ultrices vitae, tincidunt ut velit.', 'Proin pharetra placerat eros, eget mattis risus semper at.', 'In feugiat congue risus.', 'Curabitur non odio ligula.', 'Nulla sit amet ligula facilisis, venenatis ante eget, porttitor libero.', 'In eu sodales sem.', 'In iaculis ex eget nisi euismod, eget porta libero condimentum.', 'Vivamus tristique faucibus nunc.', 'Nam sit amet cursus erat.', 'Suspendisse ligula velit, molestie ac nisl quis, cursus sodales nunc.', 'Nunc vel semper odio, at scelerisque felis.', 'Fusce egestas purus id enim accumsan ultrices.', 'Curabitur porttitor justo urna, nec ultricies magna varius non.', 'Nullam euismod, nunc varius efficitur viverra, sem justo scelerisque elit, a pretium ante sem id mi.', 'Sed sagittis faucibus urna, eu sollicitudin magna.', 'Nam pretium velit quis lectus viverra sodales.', 'Cras lectus mi, accumsan non vulputate et, hendrerit ac libero.', 'Integer nec faucibus risus.', 'Vestibulum a finibus nulla.', 'Quisque consequat nisi varius, laoreet justo et, elementum dui.', 'Praesent cursus quam nec vestibulum porta.', 'Nunc fermentum semper molestie.', 'Cras fermentum, sem et lobortis iaculis, magna erat bibendum sapien, in sollicitudin erat metus in lectus.', 'Nullam ligula est, tincidunt vitae consectetur vel, rutrum at erat.', 'Etiam in rhoncus nisl, ut tempor ex.', 'Phasellus mollis tempus urna, vel hendrerit felis aliquam sit amet.', 'Aliquam eget mi tellus.', 'Maecenas consectetur enim diam, a fringilla nunc suscipit egestas.', 'Phasellus ac efficitur dolor.', 'Nullam efficitur metus a risus sodales, quis vestibulum urna lacinia.', 'Nam eu risus vulputate, commodo purus quis, ullamcorper nunc.', 'Maecenas in urna tortor.', 'Duis a purus volutpat ligula tristique suscipit.', 'Sed id libero accumsan, finibus ipsum et, sagittis justo.', 'Suspendisse malesuada lectus in ligula interdum condimentum.', 'Fusce viverra ipsum lacus, et condimentum nisi tincidunt vitae.', 'Ut pulvinar sodales nisl non dictum.', 'Proin et efficitur tortor.', 'Cras viverra lacinia dolor, a condimentum mauris rhoncus eu.', 'Quisque non nunc lobortis, iaculis neque et, venenatis eros.', 'Etiam posuere, risus quis feugiat gravida, velit nisi ornare enim, vitae dictum leo massa id orci.', 'Sed sit amet ligula nisi.', 'Cras et velit eget urna pulvinar dignissim.', 'Phasellus feugiat enim eu dolor molestie, vitae molestie dui consectetur.', 'Morbi scelerisque sapien et diam tincidunt volutpat.', 'Praesent viverra lobortis tristique.', 'Vestibulum placerat rutrum congue.', 'Sed vel leo eu odio feugiat bibendum.', 'Mauris lobortis ante tortor, ac mattis nunc consequat sit amet.', 'Aenean sollicitudin faucibus purus, ut facilisis neque convallis quis.', 'Aenean sit amet risus in libero eleifend pellentesque nec non lacus.', 'Maecenas iaculis at ligula eget rutrum.', 'Aliquam vitae tristique justo.', 'Aliquam enim nibh, porta accumsan tortor at, condimentum feugiat tellus.', 'Morbi dignissim egestas maximus.', 'Sed dui risus, vulputate ac accumsan vel, rhoncus vitae nunc.', 'Phasellus elementum ac enim a tincidunt.', 'Curabitur vulputate enim ut leo suscipit rhoncus.', 'Ut vitae dapibus dui.', 'Proin sed nulla purus.', 'Etiam a eros elementum, fringilla orci nec, cursus magna.', 'Curabitur egestas ultricies risus, vitae ultrices dolor.', 'Donec et tempus leo.', 'Phasellus justo tellus, lacinia ut lorem sed, pretium hendrerit dolor.', 'Mauris in mattis libero, sed pulvinar lorem.', 'Cras quis auctor velit.', 'Vestibulum vitae hendrerit tortor.', 'Cras dictum ligula eget arcu malesuada suscipit ac sed arcu.', 'Suspendisse vel enim sit amet metus eleifend venenatis.', 'Quisque eget purus in lorem vulputate congue.', 'Curabitur non enim vulputate, accumsan purus nec, suscipit lacus.', 'Donec eros est, pretium blandit semper eu, dignissim vitae leo.', 'Maecenas molestie erat ac magna finibus, quis sollicitudin dui euismod.', 'Curabitur rutrum dolor ut nibh suscipit luctus.', 'Fusce venenatis orci nulla, eget semper libero dictum sed.', 'Sed id justo id est eleifend tristique.', 'Phasellus eleifend eget lectus vitae luctus.', 'Fusce vitae dolor id dui aliquam gravida ac id ex.', 'Maecenas elementum, mi sed suscipit malesuada, magna ex laoreet lorem, lacinia pellentesque erat nisl quis augue.', 'Ut egestas tincidunt tincidunt.', 'Nullam consectetur magna id dictum varius.', 'Pellentesque mattis, velit ac volutpat euismod, ipsum magna volutpat tellus, ac pharetra dolor erat vel felis.', 'Nunc pretium, tortor blandit gravida pretium, neque nulla vehicula diam, eget aliquet turpis eros eu orci.', 'Integer non arcu eget odio eleifend tempor quis eget elit.', 'Ut molestie nulla ornare, congue nulla vel, eleifend tellus.', 'Pellentesque ultrices diam ut risus convallis viverra.', 'Integer ex erat, molestie in rhoncus ornare, rhoncus id ipsum.', 'Fusce non nulla id augue molestie malesuada.', 'Proin in ornare ligula.', 'Fusce sit amet augue eget orci imperdiet consequat.', 'Maecenas sodales est dui, vel feugiat orci aliquam a.', 'Aenean pulvinar quam in nunc fringilla, et convallis turpis congue.', 'Proin ex erat, vehicula tristique mi vehicula, finibus congue ex.', 'Nunc ornare fermentum convallis.', 'Quisque scelerisque orci non dignissim sodales.', 'Donec eget facilisis enim.', 'Maecenas at libero at urna vestibulum mattis.', 'Curabitur fringilla ex purus, quis egestas tortor lacinia nec.', 'Sed vel est consequat, sagittis lacus at, ullamcorper augue.', 'Sed eget dui ac nisi hendrerit auctor.', 'Nulla consectetur placerat magna, at mattis felis rutrum in.', 'Nullam vitae risus viverra, faucibus lacus a, eleifend eros.', 'Nullam tempus sit amet eros vitae semper.', 'Nullam vestibulum sem sed purus congue, hendrerit porttitor leo maximus.', 'Praesent fringilla aliquet efficitur.', 'Aliquam quis metus at ante posuere gravida.', 'Vestibulum ante ipsum primis in faucibus orci luctus et ultrices posuere cubilia curae; Aliquam sapien dui, mollis sed hendrerit id, fermentum sed tortor.', 'Ut eget velit a urna interdum volutpat.', 'Maecenas rhoncus dui vitae tempus rhoncus.', 'Phasellus eu molestie sem.', 'Mauris quis aliquam lectus, nec vehicula dui.', 'Aenean eget imperdiet odio.', 'Vestibulum non ullamcorper lacus.', 'Etiam eu augue eget massa tempus rutrum eu sit amet leo.', 'Vestibulum nec nulla quis tellus lacinia scelerisque quis vitae felis.', 'Donec convallis, elit sit amet viverra fermentum, libero eros auctor elit, a tincidunt erat eros vitae ligula.', 'Cras ornare placerat ultrices.', 'Mauris blandit, nunc eu viverra interdum, est odio bibendum urna, eget mattis purus nisl sollicitudin mauris.', 'Cras ornare velit ac facilisis rhoncus.', 'Donec lacus lectus, consectetur in fringilla ac, aliquet et nisl.', 'Morbi sit amet nulla vitae arcu porttitor elementum ac vitae nulla.', 'Duis sed tristique velit, non rutrum tellus.', 'Cras et imperdiet nisl.', 'Fusce id ipsum a dui volutpat volutpat.', 'Nam eget augue et lectus placerat vehicula.', 'In egestas condimentum mi, id efficitur nunc mattis sit amet.', 'Nunc pulvinar tortor nec dolor vehicula, at porta elit porttitor.', 'Praesent ac auctor erat.', 'Donec eu faucibus eros, eu varius tortor.', 'Sed eget sapien at est lacinia molestie eu in ligula.', 'Duis mollis vehicula cursus.', 'Duis finibus auctor pellentesque.', 'Nullam sed urna diam.', 'Nulla quis ipsum lacinia, placerat nisl et, dignissim est.', 'Vivamus hendrerit, urna vel pulvinar maximus, elit turpis placerat dolor, quis sodales erat turpis sed lorem.', 'Nulla dapibus porta odio, at porta ligula convallis a.', 'Morbi rhoncus tempor libero, lobortis facilisis ligula convallis at.', 'Etiam dapibus lacinia massa a finibus.', 'Nam eget convallis mi.', 'Integer posuere consectetur nisl eu ullamcorper.', 'Cras et lorem sit amet dui venenatis pellentesque non laoreet quam.', 'Sed eget fermentum diam, eget suscipit nulla.', 'Vestibulum ante ipsum primis in faucibus orci luctus et ultrices posuere cubilia curae; Aliquam luctus eros quis metus faucibus luctus.', 'Suspendisse rutrum libero magna, eu porttitor purus pretium vel.', 'Curabitur ultricies pellentesque dui sit amet volutpat.', 'Fusce tincidunt vulputate elit et efficitur.', 'Nunc tincidunt malesuada dignissim.', 'Nullam ornare venenatis purus semper porta.', 'Nullam nisl massa, porttitor non gravida id, bibendum at dui.', 'Curabitur rhoncus at massa ac ultricies.', 'Phasellus aliquet ex in quam placerat feugiat.', 'Nullam placerat in quam vitae venenatis.', 'Vivamus quis mi accumsan, egestas nunc ac, laoreet arcu.', 'Vestibulum ante ipsum primis in faucibus orci luctus et ultrices posuere cubilia curae; Sed faucibus imperdiet tortor vitae dapibus.', 'Morbi neque sapien, aliquam eget leo in, maximus feugiat eros.', 'In interdum ipsum quis dictum posuere.', 'Vestibulum pellentesque ullamcorper rutrum.', 'Vivamus sed lacus imperdiet, vehicula augue id, aliquet magna.', 'Donec mollis auctor consectetur.', 'Proin ultrices orci tellus, in volutpat leo pellentesque id.', 'Donec sapien mauris, sagittis iaculis iaculis non, iaculis at nunc.', 'In dapibus leo ac elit gravida, in ornare tortor mattis.', 'Nam ac auctor arcu, id semper ex.', 'Sed vitae mauris sagittis, vestibulum sapien sed, consectetur tortor.', 'Sed dapibus metus vitae condimentum sagittis.', 'Integer purus leo, pretium id vulputate sit amet, tempus at quam.', 'Donec egestas laoreet efficitur.', 'Phasellus et ante vehicula, consectetur lacus ut, vehicula nunc.', 'Vestibulum ultricies, ligula et consequat fermentum, nisi mauris imperdiet turpis, tincidunt mollis felis tellus sed quam.', 'Praesent id arcu dui.', 'Suspendisse suscipit, velit vel tempus ultricies, turpis magna facilisis lectus, et varius enim sem vel enim.', 'Vivamus id elit turpis.', 'Duis porta, nibh sit amet pharetra dapibus, massa neque elementum turpis, et porta ante ex at libero.', 'In tempus, sem a pulvinar viverra, turpis nulla malesuada metus, vitae posuere augue odio at nunc.', 'Donec lectus sem, cursus ut vestibulum vitae, facilisis ac mi.', 'Maecenas bibendum nisl eu libero hendrerit, vel mollis lorem tristique.', 'Nulla cursus cursus mauris.', 'Vestibulum laoreet augue ac nunc pulvinar, vestibulum ullamcorper purus volutpat.', 'Vivamus iaculis euismod accumsan.', 'Vestibulum eu dui feugiat, consectetur libero id, consectetur arcu.', 'Nam lobortis ut turpis ac convallis.', 'Duis dapibus turpis nec aliquet porta.', 'Suspendisse id gravida nunc.', 'Nullam tristique risus et magna dictum bibendum.', 'Morbi semper tellus id arcu dictum, vitae accumsan purus sollicitudin.', 'Nunc id laoreet diam.', 'Praesent congue, elit ac molestie pharetra, nulla orci viverra turpis, quis tristique dolor sem quis magna.', 'Donec eu ligula mauris.', 'Etiam nec pretium arcu, et vehicula leo.', 'Pellentesque non libero eu justo aliquet dictum.', 'Mauris vel aliquet nulla.', 'Aliquam vitae pharetra purus.', 'Morbi aliquam aliquet malesuada.', 'Duis id luctus arcu.', 'In rutrum mattis leo.', 'Sed mattis augue sed nulla porta, sit amet efficitur libero aliquet.', 'Vivamus quis dignissim nibh.', 'Donec accumsan vitae nisl ac placerat.', 'Maecenas in neque nunc.', 'Donec placerat quis ex id egestas.', 'Quisque aliquam ex vel convallis eleifend.', 'Mauris sodales elementum risus, quis fermentum augue vulputate a.', 'Suspendisse sollicitudin finibus tellus, vitae sollicitudin eros tincidunt et.', 'Nulla ut ex in eros consectetur aliquam.', 'Ut maximus nulla tellus, eget tincidunt nulla auctor a.', 'Vivamus accumsan dictum felis, sit amet tempor sapien pulvinar at.', 'Morbi interdum, justo in scelerisque varius, purus enim vulputate nisi, consectetur interdum odio lorem vel justo.', 'Aenean vitae hendrerit tortor.', 'Suspendisse maximus imperdiet mi eu pellentesque.', 'Quisque ultrices ultrices tortor, eget tristique odio pretium nec.', 'Fusce metus dui, ultrices vel bibendum non, elementum sit amet magna.', 'Nullam euismod ligula non ligula pulvinar, eu elementum velit cursus.', 'Donec neque quam, feugiat nec vehicula non, ultricies non ex.', 'Vivamus felis dui, vulputate in sollicitudin id, blandit nec odio.', 'Pellentesque risus elit, mattis a purus vitae, varius auctor eros.', 'Donec non rhoncus magna.', 'Vestibulum volutpat orci enim, sed cursus massa vehicula ut.', 'Vestibulum in rhoncus mi.', 'Fusce vitae pulvinar nunc.', 'Nam sit amet urna et lacus auctor accumsan et malesuada mauris.', 'Curabitur ac eleifend urna.', 'Maecenas a justo sed augue consequat blandit nec sit amet lacus.', 'Phasellus vehicula est diam, in pharetra turpis porta ut.', 'Nunc arcu lorem, pretium vitae feugiat in, elementum ac purus.', 'Phasellus eget mollis diam, eu lacinia mi.', 'Aenean eget lectus nulla.', 'Integer sapien nibh, blandit quis libero mattis, ultricies consequat nisl.', 'Nunc ac sagittis ligula, vel varius ante.', 'Proin ut turpis a erat viverra lobortis eu quis quam.', 'Mauris eu augue vel nisl interdum accumsan.', 'Ut ut euismod leo.', 'Aliquam urna turpis, blandit nec suscipit non, convallis at ante.', 'Nulla quis molestie erat, ut venenatis nunc.', 'Fusce pellentesque sit amet felis eu mattis.', 'Mauris felis elit, gravida at dapibus ut, cursus at ante.', 'Praesent in lacus euismod, porta ipsum id, feugiat mauris.', 'Maecenas in eros nec arcu aliquet hendrerit.', 'Duis bibendum rutrum mi.', 'Donec elementum, felis eu fringilla placerat, mi lacus molestie tortor, et varius libero justo in tortor.', 'Integer rutrum at neque et luctus.', 'Nullam faucibus tempus metus, vitae dignissim leo viverra a.', 'Integer augue erat, aliquet id tortor a, convallis dignissim tellus.', 'Morbi elementum mollis tellus, id interdum arcu sodales sit amet.', 'Suspendisse a sapien quis nibh convallis laoreet eu eget sem.', 'Aliquam blandit odio vel nulla rhoncus dapibus.', 'Nullam mollis tristique ligula, in tristique mauris eleifend id.', 'Pellentesque eget consequat nunc.', 'Donec et orci hendrerit, lacinia nisl in, pretium ex.', 'Quisque sit amet eleifend tortor.', 'Proin viverra dui eget tortor faucibus hendrerit.', 'Etiam pretium fringilla justo, quis ornare risus posuere quis.', 'Sed commodo maximus mauris, eu vulputate metus consectetur eget.', 'Maecenas in arcu porttitor, condimentum arcu at, pretium ante.', 'Ut vel urna viverra diam gravida condimentum non at enim.', 'In ligula orci, malesuada convallis est a, egestas auctor elit.', 'Sed blandit sagittis ipsum, id tempus diam ornare at.', 'Mauris volutpat faucibus magna ac sodales.', 'Fusce eget arcu et nunc rhoncus egestas et vel metus.', 'Vestibulum egestas euismod bibendum.', 'Ut id ex cursus, congue nulla ac, finibus mi.', 'Suspendisse vitae tempor arcu.', 'Integer ultricies orci purus, non finibus nisi laoreet in.', 'Vestibulum metus purus, sodales rutrum tincidunt pharetra, eleifend vel dui.', 'Pellentesque convallis dolor sed consequat pretium.', 'Fusce suscipit ex ante, ac accumsan quam eleifend non.', 'Donec porta, neque eu cursus cursus, enim arcu consequat neque, cursus laoreet dolor nunc at sem.', 'Praesent sit amet arcu eros.', 'Donec congue tellus nec mi ullamcorper pretium.', 'Duis aliquet, orci eu facilisis dapibus, mauris erat congue neque, in commodo diam libero maximus urna.', 'Curabitur interdum et metus at ullamcorper.', 'Nulla a dui purus.', 'Proin sed dapibus risus.', 'Nunc consectetur posuere maximus.', 'In sodales sem nisl, ut varius est venenatis at.', 'Vestibulum non consectetur lorem.', 'Nunc id risus sapien.', 'Sed tristique condimentum tortor, tincidunt vulputate turpis.', 'Ut neque purus, molestie eu varius vitae, venenatis sit amet nulla.', 'Quisque dapibus libero libero, sed blandit lectus blandit sed.', 'Nulla congue urna quis metus molestie vehicula.', 'Vivamus sed aliquet felis, volutpat dignissim metus.', 'Duis id augue neque.', 'Nullam placerat mollis malesuada.', 'Aenean malesuada elit ac ante imperdiet posuere sed et nibh.', 'Vivamus id interdum ligula.', 'Nunc nisl tellus, aliquam nec tristique et, elementum non est.', 'Praesent fermentum gravida dolor, eu viverra justo posuere id.', 'Ut nec iaculis velit, quis pretium mi.', 'Vivamus scelerisque dolor sit amet erat dapibus consectetur.', 'Pellentesque dolor risus, maximus quis gravida non, viverra sit amet dui.', 'Phasellus placerat lectus velit, id ultrices augue posuere id.', 'In consequat aliquet justo, in ornare turpis tempus sed.', 'Fusce condimentum rhoncus condimentum.', 'Phasellus convallis, sem molestie scelerisque congue, est justo lobortis diam, id commodo ligula ipsum eu nibh.', 'Etiam id dolor vehicula, vehicula orci vitae, elementum dolor.', 'Proin sit amet massa et dui varius auctor at nec ex.', 'Donec ultrices sem vel nisi fermentum vulputate.', 'In viverra quam ante, sit amet efficitur est varius sed.', 'Sed vestibulum, tortor sed tincidunt congue, eros turpis mollis ipsum, id dictum nibh ipsum et metus.', 'Proin nec sem eros.', 'Donec vulputate lacus nisl, nec consectetur nulla blandit ac.', 'Nulla sapien ipsum, tristique a mi sed, iaculis accumsan sapien.', 'Etiam vitae est quis purus rhoncus blandit.', 'Proin suscipit nec tortor sit amet malesuada.', 'Aenean felis odio, facilisis sit amet enim in, condimentum sollicitudin ante.', 'Nullam porttitor vel nulla sit amet feugiat.', 'Phasellus mi libero, vulputate eu magna at, molestie aliquet erat.', 'Mauris eros lorem, malesuada nec ligula vitae, pellentesque consequat est.', 'Donec sodales pellentesque mi vitae pulvinar.', 'Morbi sapien nisi, commodo nec ultricies et, iaculis nec nisl.', 'Vestibulum maximus luctus elit malesuada tincidunt.', 'Duis eget varius orci, eget sagittis eros.', 'Quisque eu eros nisl.', 'Pellentesque eleifend aliquam metus, a finibus neque sodales eu.', 'Sed tempus ligula sapien, eu scelerisque nisl porttitor gravida.', 'Sed vitae lectus sit amet lectus ornare interdum.', 'Aenean arcu diam, porta sit amet lorem eget, sagittis iaculis erat.', 'Nulla pharetra, sem non auctor ultrices, ex metus dictum magna, at sollicitudin ex justo ut turpis.', 'Sed aliquam luctus semper.', 'Nullam vitae malesuada sapien, tempor scelerisque lorem.', 'Integer congue lorem ligula, ac volutpat lorem sagittis id.', 'Nam nec pretium elit.', 'Quisque volutpat ex dui, a sagittis augue consequat tincidunt.', 'Cras tristique felis arcu, at vestibulum enim scelerisque sollicitudin.', 'Nunc est metus, semper vel magna et, tincidunt scelerisque felis.', 'Morbi lacus nisl, porttitor ac justo non, ultricies fringilla tellus.', 'Ut eu metus metus.', 'Proin vehicula vestibulum sollicitudin.', 'Sed iaculis sem non ante ultricies, at semper libero porttitor.', 'Ut dapibus laoreet sem ac consectetur.', 'Etiam hendrerit, odio sit amet iaculis semper, ligula sapien rhoncus lorem, sit amet cursus eros ante sed nunc.', 'Etiam interdum pellentesque enim id dapibus.', 'Fusce luctus orci tortor.', 'Cras id bibendum risus.', 'Nam pretium felis nec ante tincidunt interdum.', 'Nam cursus nibh non justo pharetra tristique.', 'Nam facilisis dapibus lacus sit amet volutpat.', 'Morbi felis ex, semper sed tortor eu, finibus aliquet lacus.', 'Integer a orci augue.', 'Vivamus lobortis tellus sed est fringilla ornare.', 'Mauris tempus ante tortor, ac feugiat dui viverra et.', 'Vivamus risus leo, ultrices at eleifend vel, luctus non arcu.', 'Nullam dictum purus commodo turpis maximus, a mollis nibh efficitur.', 'Nullam elementum dapibus suscipit.', 'Nullam blandit sem lacus, vel blandit eros fermentum et.', 'Sed cursus fermentum augue sit amet rutrum.', 'Maecenas posuere, ante sed tristique ultrices, nisl orci molestie metus, sed vehicula mauris odio quis neque.', 'Aliquam quis elit enim.', 'Aenean cursus, turpis vel rhoncus imperdiet, dolor est consectetur metus, nec egestas elit orci ac ex.', 'Fusce convallis justo eget dui ultrices, nec venenatis felis venenatis.', 'Praesent eleifend, arcu eget auctor venenatis, nibh mi aliquet ligula, ac sagittis risus mi sed est.', 'Phasellus pharetra, leo sed elementum aliquam, nisl justo lacinia ante, eu vehicula justo diam a sem.', 'Praesent ac convallis neque, a dictum ipsum.', 'Quisque venenatis nulla porta nunc semper sollicitudin.', 'Duis a lectus velit.', 'Phasellus lobortis, arcu et feugiat imperdiet, magna ipsum vestibulum massa, eget ullamcorper libero eros sed justo.', 'Vivamus eget augue a ligula ultrices condimentum.', 'Maecenas at eros a eros dictum mollis.', 'Sed pellentesque tempor purus rhoncus pulvinar.', 'Vestibulum faucibus urna quis convallis rutrum.', 'Donec ultricies tempor nunc, a facilisis velit elementum non.', 'Sed in risus in ante eleifend congue sed non nisl.', 'Vestibulum in ante quis libero rhoncus sollicitudin id in quam.', 'Sed imperdiet dapibus purus.', 'Sed blandit, enim a cursus scelerisque, ante purus ornare massa, in dictum lacus neque id quam.', 'Suspendisse tincidunt consectetur eros ac imperdiet.', 'Sed ac justo feugiat, convallis ligula eget, interdum erat.', 'Integer fermentum id arcu quis blandit.', 'Curabitur iaculis, mauris at efficitur condimentum, justo lectus ornare quam, a euismod enim dolor gravida sem.', 'Vivamus eu diam et nisl tincidunt pretium.', 'Nullam sodales pulvinar urna in ultricies.', 'Phasellus in condimentum enim.', 'Vivamus eu mollis lacus.', 'Etiam id ante eget dolor vehicula porta.', 'Etiam purus urna, bibendum sollicitudin viverra quis, tristique id felis.', 'Morbi porttitor tortor eget lorem commodo, nec pharetra urna vestibulum.', 'Integer id faucibus massa.', 'Duis interdum, quam quis ornare vulputate, eros est sodales ligula, a tincidunt mauris turpis ac diam.', 'Mauris dictum nisl a vulputate cursus.', 'Nulla cursus accumsan nisi, sit amet vulputate sapien semper ac.', 'Vestibulum malesuada sodales condimentum.', 'Sed tincidunt iaculis interdum.', 'Maecenas quis nulla arcu.', 'Fusce lacus urna, vulputate eu fringilla ut, vulputate quis magna.', 'Vivamus a commodo neque.', 'Donec maximus erat libero, eget sagittis magna tincidunt ac.', 'Proin at dui id orci ornare vestibulum.', 'Ut porttitor eget urna non efficitur.', 'Nullam ut ligula est.', 'Vestibulum vulputate, nulla sit amet laoreet egestas, ex mauris mattis metus, ut bibendum elit diam nec felis.', 'Duis tempus finibus lorem, sit amet consequat dolor porta eu.', 'Phasellus vel fringilla orci.', 'Curabitur nec elit pulvinar mi vestibulum accumsan id quis augue.', 'Ut ante neque, malesuada sed lacinia id, faucibus eget dui.', 'In dui nibh, dignissim sed nisi at, feugiat vestibulum odio.', 'Nunc ac lacus eleifend, laoreet tellus sit amet, condimentum nulla.', 'Sed ac elit a lacus fermentum auctor in ac mi.', 'Donec a justo in tellus laoreet facilisis.', 'Pellentesque a enim dui.', 'Vivamus sit amet rhoncus neque.', 'Vivamus risus leo, aliquet ut sollicitudin vel, blandit in risus.', 'Pellentesque eget tincidunt urna.', 'Duis iaculis suscipit diam a tempus.', 'Etiam posuere eu est vel congue.', 'Nunc vel accumsan justo.', 'Sed pharetra arcu vitae mauris eleifend, eu fermentum elit fermentum.', 'In suscipit quam neque, ut dapibus urna venenatis in.', 'Vivamus ultrices consequat risus, ac vestibulum orci porta finibus.', 'Fusce sem orci, egestas ut feugiat sit amet, molestie quis arcu.', 'Fusce vel ex nec justo ullamcorper viverra sit amet sed justo.', 'Nunc sit amet leo consequat, commodo mauris quis, euismod mi.', 'Ut finibus sapien ut dictum maximus.', 'Suspendisse sed dui urna.', 'Ut mattis et ex sit amet sagittis.', 'Nulla vitae condimentum metus.', 'Fusce sodales nulla metus, lacinia tincidunt sapien lobortis tincidunt.', 'Vestibulum dapibus urna diam, nec ultricies neque feugiat et.', 'Vestibulum eget quam ac lectus fermentum sagittis eu sit amet massa.', 'Suspendisse quis diam eget felis faucibus tempus.', 'Nullam nec consectetur urna.', 'Aenean pharetra ullamcorper nibh, in maximus dui molestie non.', 'Mauris laoreet, ex id volutpat bibendum, magna purus sodales lacus, eu fringilla ante ex ac metus.', 'Suspendisse ac sapien massa.', 'Donec pellentesque, ipsum in mollis accumsan, nisi risus sodales nisl, sed malesuada enim nibh quis mi.', 'Duis varius viverra lacus non ultricies.', 'Vestibulum venenatis id odio eget gravida.', 'Suspendisse at quam et justo sollicitudin ornare eget ut felis.', 'Etiam rhoncus ornare nisl ac tincidunt.', 'Etiam vitae maximus tellus.', 'In nibh leo, mattis vel tellus id, facilisis imperdiet lacus.', 'Mauris sagittis ut erat eget porta.', 'Nam a lectus laoreet, consequat elit ac, porta magna.', 'Aenean commodo suscipit lorem, quis iaculis risus interdum ut.', 'In cursus ullamcorper quam vel imperdiet.', 'Nullam id mattis lectus.', 'Cras sagittis massa urna, vitae mollis elit tincidunt a.', 'Aliquam commodo urna quis nunc elementum ultricies at interdum massa.', 'Sed pharetra urna eros, eget aliquam sem interdum id.', 'Integer vestibulum dolor eget urna suscipit ultricies.', 'Nam facilisis velit non mi pulvinar, eu mattis massa interdum.', 'Sed faucibus, velit id tempor maximus, magna odio sodales ligula, vitae efficitur massa nunc non odio.', 'Integer mattis lorem vitae turpis molestie faucibus.', 'Praesent sagittis, quam quis placerat viverra, orci eros interdum mauris, ac iaculis erat lectus vel augue.', 'Donec dapibus leo leo, accumsan tincidunt augue aliquet et.', 'Ut non erat sed odio gravida blandit.', 'Praesent placerat ante nulla, a iaculis magna lobortis in.', 'Nulla erat nisi, pharetra at luctus vitae, cursus eget neque.', 'Vivamus diam ante, pulvinar elementum vestibulum ut, fringilla quis lacus.', 'Pellentesque ultrices odio placerat sollicitudin tincidunt.', 'Nunc accumsan nisl nunc, tempor egestas odio elementum a.', 'Vestibulum ut leo euismod, hendrerit tortor sed, eleifend purus.', 'Aenean venenatis viverra elementum.', 'Vivamus eu mattis quam, sit amet iaculis mi.', 'Proin ultricies, arcu et tempus blandit, neque tortor vehicula felis, quis mattis magna metus non velit.', 'Mauris imperdiet eu quam nec efficitur.', 'Nam sodales sem at nulla laoreet, a luctus odio viverra.', 'Morbi non lectus semper, interdum lectus ac, elementum purus.', 'Vestibulum efficitur faucibus volutpat.', 'Aliquam pellentesque, ex id laoreet gravida, sapien orci sodales dolor, vitae pretium turpis dolor a metus.', 'Nulla metus tellus, porttitor in placerat non, laoreet eget nunc.', 'Maecenas euismod massa et viverra consectetur.', 'Curabitur est sapien, commodo vel urna lacinia, accumsan viverra libero.', 'Maecenas fringilla, odio vitae congue malesuada, elit mi rhoncus erat, at hendrerit metus magna et eros.', 'Suspendisse efficitur cursus purus quis commodo.', 'Duis egestas sem urna.', 'Suspendisse consectetur posuere purus id dignissim.', 'Aenean ut congue velit.', 'Etiam suscipit, tellus non laoreet maximus, orci dolor faucibus velit, quis vulputate leo dui in lacus.', 'Ut iaculis metus ante, sed sollicitudin sem consequat ut.', 'Fusce semper mattis turpis, vel elementum leo ornare eget.', 'Donec magna nulla, vulputate quis lacinia sit amet, tempor ac mauris.', 'Aenean interdum purus ligula, ut porttitor arcu aliquam vitae.', 'Morbi venenatis sem in velit venenatis rutrum.', 'Nullam porta leo convallis, molestie massa non, sagittis metus.', 'Nunc posuere pretium augue, eu condimentum augue sollicitudin sit amet.', 'Pellentesque elementum ipsum nec tincidunt aliquam.', 'Pellentesque massa enim, vehicula quis euismod non, lobortis eget magna.', 'Mauris posuere risus non velit vestibulum pretium non non ipsum.', 'Cras vel ornare turpis, vel feugiat purus.', 'Quisque odio eros, porttitor nec vulputate vitae, sollicitudin pretium purus.', 'Maecenas imperdiet lacus a urna finibus fringilla sit amet et felis.', 'Aenean quis ipsum tempus, pellentesque nunc mattis, tristique diam.', 'Vestibulum vitae nunc hendrerit, gravida sem eu, tempus risus.', 'Donec condimentum bibendum ipsum, a hendrerit neque posuere nec.', 'Donec a dolor a massa maximus efficitur.', 'Praesent velit massa, tempus ac semper quis, scelerisque vitae ante.', 'Aliquam purus urna, hendrerit vitae sagittis at, porta vel justo.', 'Curabitur pellentesque consectetur lobortis.', 'Vivamus scelerisque hendrerit venenatis.', 'Integer tincidunt ut diam sed congue.', 'Sed ut aliquam nisi.', 'Nullam nec eros id nunc semper luctus.', 'Aliquam maximus eleifend dui, nec blandit massa bibendum eget.', 'Donec interdum placerat tincidunt.', 'Quisque non nulla sapien.', 'Etiam tincidunt eros eget elit bibendum gravida.', 'Quisque fringilla facilisis tortor quis ullamcorper.', 'Integer gravida justo in iaculis posuere.', 'Praesent sed tincidunt sapien.', 'Sed euismod vitae ex vel scelerisque.', 'Aenean nisi felis, ornare et feugiat eget, sodales vitae odio.', 'Aenean libero sapien, lacinia ac sapien ac, laoreet dignissim dui.', 'Nunc nibh massa, convallis in augue et, efficitur mattis elit.', 'Suspendisse id nisl luctus, sollicitudin justo non, luctus arcu.', 'Aliquam a est massa.', 'Pellentesque dignissim mattis arcu.', 'Aliquam efficitur ante metus, ut pellentesque felis suscipit eu.', 'Ut facilisis vestibulum arcu elementum dignissim.', 'Cras non sapien mauris.', 'Maecenas interdum libero eu libero luctus, sit amet efficitur leo porttitor.', 'Duis ut mollis ex.', 'Quisque mi eros, suscipit vitae tempus ut, condimentum sit amet est.', 'Ut non posuere erat.', 'Curabitur bibendum magna turpis, non tincidunt risus dictum at.', 'Etiam tempus magna at odio auctor, ac euismod nibh pulvinar.', 'Morbi dapibus et sapien in elementum.', 'Ut finibus est odio, eu convallis nunc viverra id.', 'Quisque rhoncus mollis est sit amet semper.', 'Ut dapibus urna sed diam ornare, eu efficitur leo feugiat.', 'Vestibulum vel felis et erat molestie volutpat.', 'Maecenas molestie lorem eget quam porta, a venenatis felis accumsan.', 'In id auctor risus.', 'Phasellus at diam sed orci porttitor tempus.', 'Nunc dapibus, massa id ornare condimentum, mi sem ullamcorper nunc, et auctor felis felis id sem.', 'Suspendisse nec vehicula augue.', 'Aliquam imperdiet sagittis justo at iaculis.', 'Praesent vulputate tellus ornare malesuada faucibus.', 'Sed sed tristique ante.', 'Curabitur ac augue et leo fermentum commodo eu in nulla.', 'Aliquam pellentesque risus velit, a consequat dolor vestibulum ac.', 'Fusce in dolor porttitor arcu viverra gravida.', 'Maecenas ex orci, pretium vitae dui eget, scelerisque dapibus arcu.', 'Praesent efficitur efficitur imperdiet.', 'Curabitur eget tortor finibus, elementum orci quis, viverra arcu.', 'Aenean ac tincidunt lacus.', 'Donec vel ultricies est.', 'Mauris mollis nisi a efficitur scelerisque.', 'Nam dictum lacinia odio at pharetra.', 'Quisque ultricies arcu in venenatis rutrum.', 'In odio ipsum, euismod in posuere eget, placerat et felis.', 'Curabitur mi est, placerat quis egestas non, mollis sed urna.', 'Praesent malesuada, dolor in fermentum faucibus, tellus velit accumsan ipsum, tincidunt luctus turpis nulla sed enim.', 'Integer rhoncus, turpis id tincidunt pulvinar, metus orci cursus mi, nec feugiat lorem elit ut enim.', 'Aliquam et felis vel elit porta cursus in sit amet diam.', 'In a vehicula eros, eu ullamcorper turpis.', 'Vivamus eleifend libero non nulla accumsan porttitor.', 'Suspendisse vel neque ultrices, scelerisque lacus in, ornare massa.', 'Duis dolor leo, ullamcorper vel lacinia eget, aliquam rhoncus risus.', 'Vivamus dapibus ac elit sed imperdiet.', 'Vestibulum eget auctor dui, at tempus dolor.', 'Sed consequat placerat libero, et sodales sapien porttitor non.', 'Aenean arcu diam, imperdiet sit amet purus a, ornare sodales metus.', 'Integer accumsan ante sem, at facilisis ipsum egestas et.', 'Nulla non orci dolor.', 'Sed rhoncus facilisis condimentum.', 'Nulla vitae maximus nisl.', 'Praesent a rhoncus ante, a pharetra ante.', 'Fusce volutpat eu risus nec eleifend.', 'Suspendisse nibh leo, semper in egestas eget, placerat vel nulla.', 'Ut malesuada condimentum eros, id dignissim nunc imperdiet ac.', 'Praesent posuere tortor vitae augue convallis malesuada.', 'Donec congue sem eu leo dignissim, at blandit felis blandit.', 'In auctor, sapien quis hendrerit auctor, arcu tellus aliquam ante, quis vulputate purus metus eget mauris.', 'Proin eget purus purus.', 'Vestibulum pretium pharetra egestas.', 'Proin vulputate augue non odio commodo, eu varius sem porta.', 'Quisque porta massa quis finibus dignissim.', 'Sed sit amet lectus sit amet elit porta rutrum.', 'Nunc ornare vulputate tellus, eu rutrum turpis sagittis rutrum.', 'Nam elit justo, laoreet a tortor et, tempus dapibus sapien.', 'Sed velit augue, maximus et dignissim sed, mollis id mi.', 'Integer eget libero consequat, placerat massa maximus, efficitur dui.', 'Praesent quis ipsum a ex ultricies euismod a sit amet mi.', 'Curabitur at accumsan urna.', 'Cras pulvinar leo sit amet ligula suscipit mattis.', 'Morbi dapibus facilisis euismod.', 'Quisque efficitur venenatis eros ac elementum.', 'Suspendisse imperdiet nunc non libero consectetur, eget blandit libero mattis.', 'Vivamus tempor ullamcorper sapien vitae aliquam.', 'In eget tempus ante.', 'Vestibulum accumsan enim sed est eleifend, non commodo orci tristique.', 'Nam pellentesque nisi a laoreet lobortis.', 'Sed faucibus eros nec urna elementum, sed tincidunt est elementum.', 'Suspendisse bibendum, velit nec tempus ullamcorper, urna elit auctor mauris, et tempus dui purus ut sapien.', 'Aliquam posuere pulvinar lorem, ac vulputate neque congue eget.', 'Vivamus sit amet elit id ante pellentesque rhoncus.', 'Maecenas sagittis dui justo, id bibendum nulla egestas ac.', 'Mauris placerat sapien urna, eget porta nisi molestie eu.', 'Pellentesque metus arcu, gravida eget erat et, condimentum sodales quam.', 'Ut finibus tortor ac dui vestibulum, et interdum elit fringilla.', 'Aliquam a fringilla augue.', 'Aenean placerat, enim nec dapibus viverra, arcu leo pharetra nibh, id pretium nisl purus et orci.', 'Praesent felis odio, bibendum ut rutrum non, vehicula mollis tortor.', 'Nam id vulputate magna.', 'Vivamus ultrices purus vitae risus vehicula, et varius magna fringilla.', 'Integer mollis, arcu in aliquet eleifend, augue felis suscipit augue, vitae consectetur sem elit non neque.', 'Suspendisse dictum, lectus et sagittis dictum, turpis lorem ultrices odio, vitae tincidunt ex dolor vitae leo.', 'Nulla aliquet risus ut augue finibus, ac egestas ante varius.', 'In eget est a urna pulvinar bibendum quis eget dui.', 'In sodales auctor imperdiet.', 'Aenean et ipsum commodo, gravida erat vel, molestie nunc.', 'Etiam maximus nibh finibus ex aliquet aliquam.', 'Aliquam ultricies tellus lectus, ac suscipit tellus congue ac.', 'Ut id ullamcorper urna.', 'Aliquam ut faucibus nunc.', 'Morbi vel elit dapibus, faucibus sem quis, feugiat lacus.', 'Sed euismod diam mi, ac lacinia diam ornare sed.', 'Duis dictum sodales turpis at feugiat.', 'Donec id orci maximus, venenatis metus quis, tincidunt sapien.', 'Etiam et justo non orci elementum bibendum ut sed elit.', 'Donec imperdiet porta augue eget suscipit.', 'Donec posuere dui eget quam faucibus hendrerit.', 'Nunc non nibh mi.', 'Nullam augue ex, tincidunt nec turpis pretium, porttitor tempor neque.', 'Integer vel neque commodo, consectetur nulla a, blandit risus.', 'Quisque maximus est condimentum hendrerit placerat.', 'Morbi sagittis posuere feugiat.', 'Mauris nec lacinia dolor, eu accumsan ante.', 'Sed eu lorem auctor, gravida ligula vitae, auctor arcu.', 'Curabitur porttitor et lacus nec tempor.', 'Nulla sed posuere lorem, at facilisis quam.', 'Sed ornare leo vitae ipsum condimentum, in ullamcorper magna rhoncus.', 'Nullam quis augue tristique, scelerisque turpis sit amet, placerat tortor.', 'Suspendisse aliquam magna a suscipit egestas.', 'Fusce dictum consectetur mattis.', 'Etiam commodo iaculis neque quis scelerisque.', 'Nulla eu condimentum lectus, vitae tincidunt nisi.', 'Vestibulum lacinia ac ligula quis fringilla.', 'Pellentesque aliquam posuere nunc sed malesuada.', 'Fusce a sem lobortis, egestas tortor eu, mollis sapien.', 'Praesent malesuada consequat ante in hendrerit.', 'Pellentesque lorem ligula, sodales quis suscipit at, venenatis in risus.', 'Phasellus sapien nibh, tincidunt ut suscipit non, hendrerit id turpis.', 'Maecenas ut lorem non risus efficitur tristique ac at arcu.', 'Phasellus consequat urna ligula, a luctus justo cursus ac.', 'Praesent et augue augue.', 'Morbi accumsan neque id nisl malesuada, quis cursus sapien blandit.', 'Phasellus ultrices dignissim neque a posuere.', 'Mauris eu vehicula nunc.', 'Sed egestas nisi dui, at lobortis dui condimentum vitae.', 'Quisque sit amet dui eu massa molestie malesuada iaculis in nulla.', 'Phasellus et molestie lacus.', 'Pellentesque egestas iaculis tortor, ac tempus ante commodo non.', 'Maecenas ullamcorper dictum tortor ut luctus.', 'Curabitur id dui quis felis pharetra elementum vitae nec elit.', 'Nunc dictum malesuada ante, in elementum nulla ornare ut.', 'Vivamus luctus lacus id venenatis eleifend.', 'Suspendisse a venenatis turpis.', 'Sed suscipit feugiat massa sed molestie.', 'Suspendisse vitae ornare quam.', 'Vivamus tincidunt metus sed aliquet sodales.', 'Ut quis massa a magna vulputate dictum in vitae ex.', 'Phasellus nulla elit, volutpat eu tincidunt eu, fringilla interdum metus.', 'Praesent dignissim felis nec est molestie, in commodo lectus tempor.', 'Donec quis mi venenatis, suscipit nibh eget, euismod dui.', 'Morbi enim elit, tempor at euismod quis, lacinia sit amet risus.', 'Vestibulum vitae pharetra magna, non luctus mauris.', 'Nullam vel luctus arcu, condimentum posuere libero.', 'Ut ullamcorper, dolor ac interdum auctor, massa leo vehicula urna, sed interdum lacus magna in quam.', 'Integer pretium pulvinar sem, eget vehicula sem egestas vel.', 'Aenean malesuada odio eget fermentum facilisis.', 'Suspendisse finibus, tortor sit amet porta ultricies, sapien dolor sodales metus, a commodo risus nulla vel justo.', 'Integer nec justo at neque aliquam ultrices a quis libero.', 'Ut sem ligula, facilisis ut turpis quis, convallis porta ex.', 'Nam sed ullamcorper ipsum, ut tincidunt risus.', 'Curabitur nisi nisl, finibus et maximus id, bibendum hendrerit risus.', 'Suspendisse molestie laoreet quam, in lobortis ipsum sodales id.', 'Integer et suscipit nisi.', 'Mauris mattis porta malesuada.', 'Vivamus interdum blandit dolor, vitae egestas turpis rutrum eget.', 'In a est sit amet dolor tristique vehicula.', 'Sed et vehicula magna, eget laoreet dui.', 'In convallis sem in maximus luctus.', 'Vestibulum vestibulum convallis diam, nec tristique nisl cursus vel.', 'Suspendisse nec lorem eget ligula venenatis placerat et et massa.', 'Vestibulum efficitur ac libero vel fermentum.', 'Cras condimentum diam turpis, sit amet egestas neque hendrerit ac.', 'Pellentesque mattis mollis dignissim.', 'Donec eleifend magna ac pulvinar sagittis.', 'Vestibulum accumsan vulputate odio et molestie.', 'Vestibulum accumsan volutpat interdum.', 'Fusce vestibulum feugiat odio bibendum bibendum.', 'Mauris non blandit nibh, id mattis dolor.', 'Nunc interdum ipsum quis sem scelerisque sagittis.', 'Integer sit amet dapibus velit, sit amet facilisis sem.', 'In hendrerit sapien vel nulla rutrum, at lobortis quam gravida.', 'Etiam euismod est quis leo convallis, eu interdum risus viverra.', 'Nam ullamcorper imperdiet erat, ut efficitur nunc molestie a.', 'Sed eget tellus cursus, ullamcorper odio sit amet, semper est.', 'Aliquam suscipit urna ex, quis sodales nisi dapibus non.', 'Nulla quis leo non tellus sollicitudin suscipit.', 'Fusce in aliquet nulla.', 'Donec risus tellus, imperdiet sed vulputate ut, pulvinar malesuada quam.', 'Morbi id ligula leo.', 'Fusce varius mauris dui, vel placerat magna efficitur eget.', 'Morbi vulputate et lectus a porta.', 'Quisque porta tortor sapien, quis rhoncus libero maximus volutpat.', 'Cras sodales ex nec tortor finibus, aliquet scelerisque sem pellentesque.', 'Aliquam ornare sodales quam.', 'Donec eleifend ornare velit, in mollis elit.', 'Nunc bibendum venenatis dui, sit amet scelerisque ex blandit eget.', 'In non lacus iaculis, dictum urna id, placerat lorem.', 'Vestibulum quis sem imperdiet, pellentesque neque ac, varius justo.', 'Phasellus porta, augue at mattis dignissim, erat tortor porttitor leo, eu pretium purus lectus quis diam.', 'Nam scelerisque, turpis eget pharetra sollicitudin, erat lacus tincidunt odio, at condimentum augue eros nec lectus.', 'Proin vestibulum, tortor non maximus sodales, quam nibh gravida risus, vitae porta ex nisi eget velit.', 'Cras at orci eu tortor vulputate facilisis nec in ex.', 'Donec a turpis pulvinar, gravida nisl ut, suscipit justo.', 'Vestibulum pharetra, lacus eu sodales vestibulum, eros lectus ullamcorper odio, in vulputate dui leo a enim.', 'Aenean at consectetur quam, in elementum ipsum.', 'Vestibulum maximus aliquam leo, vitae accumsan felis hendrerit varius.', 'Sed bibendum vestibulum nibh, scelerisque dictum ex feugiat et.', 'Suspendisse placerat dolor quis aliquam maximus.', 'Sed sed enim convallis, sodales nulla id, molestie nisi.', 'Aliquam at iaculis ante.', 'Cras blandit hendrerit accumsan.', 'Vestibulum convallis nisi vel dui luctus, sit amet malesuada mi tincidunt.', 'Nunc tempor eget massa porta dignissim.', 'Proin ut congue neque, sit amet maximus felis.', 'Mauris ultrices eleifend nunc.', 'Maecenas maximus mauris ac sagittis volutpat.', 'Vestibulum ante ipsum primis in faucibus orci luctus et ultrices posuere cubilia curae; Cras ac pulvinar diam.', 'Donec mollis mi eu arcu convallis mattis.', 'Donec ipsum lectus, placerat sed consectetur id, ultricies nec tortor.', 'Cras tellus augue, faucibus et felis ut, vehicula pretium sem.', 'Fusce sit amet cursus leo, nec cursus dui.', 'Curabitur massa leo, varius ut consectetur in, sodales sit amet nisi.', 'In faucibus nibh id massa porttitor, vitae sollicitudin metus pretium.', 'Phasellus ultrices erat enim, vitae mollis justo tincidunt at.', 'Donec accumsan commodo quam non iaculis.', 'Pellentesque viverra et magna eget sollicitudin.', 'Suspendisse at dui eu diam mattis congue sagittis in magna.', 'Fusce fermentum commodo arcu sed consectetur.', 'Fusce nec orci lacus.', 'Aliquam eu mauris accumsan, ullamcorper massa eu, facilisis augue.', 'Curabitur vel tincidunt felis, vitae faucibus nibh.', 'Cras mattis dignissim viverra.', 'Phasellus sed erat congue, maximus quam id, blandit lectus.', 'Pellentesque a volutpat magna.', 'Aenean tempus, tortor sit amet porttitor consectetur, ante libero pulvinar urna, at euismod purus erat a turpis.', 'Vestibulum congue interdum laoreet.', 'Morbi auctor sollicitudin lacus nec feugiat.', 'Etiam et justo eget elit egestas bibendum eget varius nibh.', 'Phasellus tempor ullamcorper tellus, fermentum lobortis velit luctus vel.', 'In commodo ac ligula sit amet maximus.', 'Duis consectetur nibh velit, vitae tristique urna mattis at.', 'Cras vitae risus at metus finibus vestibulum.', 'Ut sit amet suscipit mauris.', 'Fusce euismod dolor non nibh consequat viverra.', 'Duis viverra orci magna, vel volutpat turpis pretium vel.', 'Integer ultricies tempus augue ut ultrices.', 'Ut gravida ante venenatis commodo dapibus.', 'Fusce tincidunt id nisl nec tincidunt.', 'Phasellus sed diam bibendum, tincidunt felis ac, malesuada augue.', 'In convallis mauris non turpis convallis auctor.', 'Vivamus dolor tortor, suscipit at vulputate vitae, ullamcorper vitae ipsum.', 'Sed dictum eros neque, sit amet cursus felis condimentum ac.', 'Sed laoreet diam eu euismod tempus.', 'Phasellus ultricies suscipit lacus, at faucibus est varius ac.', 'Praesent aliquam tristique interdum.', 'Quisque quis porttitor ipsum, sed pellentesque arcu.', 'Ut convallis eros sed tellus euismod posuere.', 'Proin eget dictum lacus.', 'Mauris scelerisque ex ac faucibus maximus.', 'Donec at leo sed libero iaculis gravida sed ac enim.', 'Proin a consequat ligula.', 'In euismod tempus velit vel condimentum.', 'Proin viverra convallis ipsum sit amet accumsan.', 'Ut mi nisl, consequat a neque eu, aliquam placerat quam.', 'Duis tempus ullamcorper risus ut finibus.', 'Duis ac enim eros.', 'In blandit malesuada tellus, in bibendum massa condimentum in.', 'Vivamus quis orci libero.', 'Cras eu condimentum ipsum.', 'Maecenas lacinia lobortis euismod.', 'Nulla at imperdiet nibh, eget lacinia augue.', 'Curabitur sit amet pulvinar ipsum.', 'Praesent tincidunt velit in nunc congue dictum.', 'Cras arcu arcu, elementum sit amet molestie non, pellentesque id orci.', 'Vestibulum eleifend faucibus magna pellentesque imperdiet.', 'Curabitur convallis non nunc nec consectetur.', 'Integer at maximus tellus, ac pellentesque velit.', 'Etiam nec elementum turpis, et blandit sapien.', 'Mauris iaculis pulvinar ipsum, ac vulputate lacus maximus sit amet.', 'Mauris quis turpis in orci ornare posuere at quis lorem.', 'Etiam egestas aliquam rhoncus.', 'Maecenas metus ex, lobortis malesuada rutrum viverra, pellentesque quis ligula.', 'Pellentesque a nunc orci.', 'Integer vitae elit sodales nisl aliquet luctus nec eu augue.', 'Pellentesque feugiat eget urna eu molestie.', 'Quisque dolor sem, gravida id nisl nec, sodales hendrerit sapien.', 'Vivamus vehicula neque lacus.', 'Duis non justo et nunc consequat sagittis non in eros.', 'Morbi nulla diam, interdum et massa at, eleifend lobortis nibh.', 'Nunc sollicitudin pharetra tincidunt.', 'Pellentesque nulla diam, bibendum ac dictum a, facilisis gravida est.', 'Fusce tempus turpis fringilla pellentesque pretium.', 'Aenean ultricies sapien dolor, ullamcorper auctor libero interdum eu.', 'Duis quis velit in urna laoreet imperdiet id ut sem.', 'Phasellus fermentum odio at tempor scelerisque.', 'Donec semper viverra ex, ut hendrerit ante tristique vel.', 'Cras vel tempor massa.', 'Sed lacinia viverra vestibulum.', 'Suspendisse libero elit, porta at enim eu, iaculis consectetur lectus.', 'Pellentesque aliquet lorem vehicula sapien sagittis, vel tristique augue venenatis.', 'Nunc felis diam, iaculis vitae tortor at, rutrum efficitur orci.', 'Pellentesque mi metus, luctus a tellus eget, eleifend elementum tortor.', 'Praesent aliquet quam efficitur urna blandit lacinia.', 'Pellentesque euismod sodales ultrices.', 'Quisque non arcu ut arcu molestie dapibus.', 'Aenean euismod lacus mi.', 'Morbi sodales massa sed nisl luctus, eget posuere tortor vehicula.', 'Etiam interdum convallis enim eu sagittis.', 'Mauris sollicitudin nisi eget diam placerat, ac malesuada ligula vestibulum.', 'Phasellus in vulputate elit.', 'Phasellus porta consequat scelerisque.', 'Ut tincidunt eget quam et faucibus.', 'Integer mi elit, blandit at vehicula non, porta ut odio.', 'Donec sollicitudin varius finibus.', 'Nam a venenatis massa.', 'Vivamus sit amet porta arcu.', 'Maecenas dui nunc, venenatis ac sem ac, elementum molestie tellus.', 'Aliquam scelerisque, velit ac venenatis vestibulum, nisi ante semper risus, in eleifend diam lorem ac ligula.', 'Aliquam pulvinar dui porttitor magna sagittis volutpat.', 'Sed posuere tortor a tellus tincidunt semper.', 'Nulla rhoncus id nunc vitae condimentum.', 'Donec efficitur faucibus ex eget varius.', 'Cras fermentum vestibulum tellus eget iaculis.', 'Vestibulum vestibulum nec purus eget semper.', 'Sed commodo purus arcu, a consequat felis sollicitudin a.', 'Suspendisse mollis lectus sed nulla dapibus gravida.', 'Sed fermentum sem et nunc venenatis luctus.', 'Phasellus dapibus est non magna iaculis, vel venenatis ex ornare.', 'Vestibulum ante ipsum primis in faucibus orci luctus et ultrices posuere cubilia curae; Donec malesuada commodo nibh, id vulputate enim viverra at.', 'Sed venenatis viverra mattis.', 'Aliquam tincidunt dignissim sem, et sodales purus mollis id.', 'Nam vitae pellentesque augue, a congue magna.', 'Quisque vitae blandit nisi, sed vehicula magna.', 'Aliquam id diam ac purus sagittis pharetra mattis eget ipsum.', 'Praesent pulvinar gravida mi, a ornare velit cursus ac.', 'Aliquam bibendum vulputate consequat.', 'Nulla elit ante, ultrices quis eleifend eu, egestas quis ipsum.', 'Aliquam non auctor nulla.', 'Curabitur in condimentum ex.', 'Mauris vestibulum ligula sit amet tortor porttitor iaculis.', 'Ut sollicitudin sed nunc eu iaculis.', 'Integer mattis egestas tellus, et volutpat ligula placerat non.', 'Aliquam id sem interdum, scelerisque arcu in, tempus elit.', 'Etiam tristique, mi et euismod dictum, leo augue posuere tellus, vitae malesuada libero dolor sed urna.', 'Donec interdum dolor metus, ac euismod magna pulvinar in.', 'Cras at nunc at nibh sollicitudin tristique.', 'Nullam nec quam porta, rhoncus diam et, porta erat.', 'Vestibulum ut vestibulum ante.', 'Aliquam tincidunt blandit metus ac varius.', 'Vivamus sollicitudin, quam a suscipit volutpat, leo eros commodo ex, sed sodales urna diam sed nulla.', 'Sed a nibh ac felis efficitur luctus sit amet mollis leo.', 'Vestibulum tempor urna quis erat accumsan hendrerit.', 'Etiam euismod mauris non est iaculis dignissim.', 'Nunc in dui ultrices, placerat tellus ut, sollicitudin magna.', 'Aliquam efficitur nunc ac elit viverra laoreet.', 'Pellentesque dui nisl, viverra vitae venenatis eu, ultricies vel risus.', 'Integer volutpat quam non erat condimentum placerat.', 'Maecenas molestie odio vel ultrices porta.', 'Integer ullamcorper mollis elementum.', 'Suspendisse mi sapien, mattis ut posuere vitae, vulputate a nisi.', 'Nulla et ullamcorper odio.', 'Mauris nec arcu massa.', 'Aliquam quis eleifend ante.', 'Nulla dignissim pulvinar hendrerit.', 'Aenean lobortis tempus condimentum.', 'Sed rhoncus metus quis mi ullamcorper tincidunt.', 'Phasellus augue nisi, auctor quis posuere sed, mattis eu libero.', 'Mauris pharetra ac libero at sodales.', 'Cras efficitur enim ut tempor convallis.', 'Donec lectus lorem, consectetur quis felis sed, vehicula suscipit massa.', 'Vestibulum posuere viverra ultrices.', 'Vivamus mollis cursus nibh sed fermentum.', 'Vestibulum in varius ligula.', 'Donec sit amet est scelerisque, sodales odio a, dictum ligula.', 'Suspendisse consequat laoreet est, sit amet pulvinar elit sodales vitae.', 'Nunc fermentum sodales eros, at vestibulum arcu vulputate ut.', 'Integer faucibus aliquet eros.', 'Suspendisse metus quam, placerat nec rutrum a, feugiat vel sapien.', 'Donec malesuada, eros id blandit scelerisque, tellus libero ultricies leo, ac accumsan arcu metus eu nisl.', 'Integer vitae arcu turpis.', 'Sed eget congue orci, vel porta tellus.', 'Integer ligula nisl, finibus eu sollicitudin ac, malesuada vitae nisi.', 'Donec sit amet vulputate metus.', 'Phasellus enim sem, varius ac vulputate ut, dapibus id tellus.', 'Nam non malesuada metus.', 'Nulla scelerisque magna ut est imperdiet, ac luctus sem sodales.', 'In auctor neque enim, eu hendrerit eros fringilla nec.', 'Duis quis purus rhoncus, malesuada enim vitae, bibendum nisl.', 'Donec ut libero lacinia, tempus enim sed, volutpat metus.', 'In commodo posuere nisi vitae faucibus.', 'Maecenas felis odio, vehicula ac ullamcorper non, maximus id elit.', 'Curabitur congue urna in mi venenatis euismod.', 'Suspendisse hendrerit lacus ac risus tempor, non tristique urna venenatis.', 'Suspendisse cursus urna ornare, varius risus sit amet, lobortis eros.', 'Fusce consequat porttitor tortor in dignissim.', 'Ut nulla magna, semper posuere ex quis, pharetra tempor quam.', 'Morbi enim ligula, tincidunt id ligula ac, imperdiet pretium arcu.', 'Maecenas quis risus malesuada nisl efficitur pretium.', 'Integer at ante congue, luctus neque sed, dictum sem.', 'Phasellus luctus diam nec risus porttitor posuere.', 'Vestibulum ultrices tristique ex.', 'Nulla id lacus erat.', 'Vestibulum eu orci turpis.', 'Ut feugiat auctor interdum.', 'Donec at convallis dui.', 'Phasellus placerat vitae dui eu tincidunt.', 'Vestibulum metus lacus, fermentum id dignissim a, sagittis a nulla.', 'In fermentum turpis in dui dignissim iaculis.', 'Suspendisse vehicula ex vel imperdiet vestibulum.', 'Suspendisse lobortis felis non augue lacinia ornare.', 'Cras porta neque tellus, sed aliquet purus vulputate tincidunt.', 'Nullam non purus tellus.', 'Vivamus ex diam, condimentum ut ornare sit amet, venenatis pharetra turpis.', 'Aliquam dui tortor, volutpat ut molestie eu, efficitur id nibh.', 'Morbi vel ante et tellus rhoncus mattis id at sem.', 'In vel tellus sapien.', 'Aenean nec est finibus, iaculis orci a, molestie turpis.', 'Proin ornare eget odio luctus sodales.', 'Etiam hendrerit a nisl at pellentesque.', 'Nunc blandit blandit mauris, eget blandit nisi luctus eget.', 'Ut justo justo, imperdiet nec magna maximus, venenatis vestibulum leo.', 'Cras a consequat quam.', 'Vestibulum ac lectus ullamcorper, efficitur lacus quis, pulvinar est.', 'Aenean nec odio elit.', 'Nulla quis viverra odio, nec cursus erat.', 'Vestibulum lobortis est nec sem dignissim, id luctus tortor congue.', 'Morbi sollicitudin massa et justo aliquam volutpat.', 'Aliquam bibendum tristique lacus sit amet mattis.', 'Sed hendrerit lorem feugiat est feugiat ultricies.', 'Cras dictum, turpis id imperdiet volutpat, nisl orci facilisis mi, in tincidunt arcu diam et dui.', 'Mauris congue neque libero, vitae sagittis elit consectetur in.', 'Cras commodo at sem et fermentum.', 'Nullam consequat ligula in est pellentesque malesuada.', 'In sit amet elit ac erat varius posuere scelerisque in lectus.', 'Nunc ac odio vitae orci placerat mollis at in velit.', 'Vivamus interdum nunc quis velit viverra consectetur.', 'Nam sit amet semper arcu.', 'Donec pellentesque feugiat lorem nec elementum.', 'Quisque tincidunt maximus vehicula.', 'Curabitur fermentum, ligula in sollicitudin tempus, odio libero euismod augue, vulputate pellentesque lorem nisi vehicula nulla.', 'Aenean quis mauris et neque venenatis laoreet.', 'Fusce id porta augue.', 'Integer ac nunc vel enim ultrices consequat.', 'Mauris magna purus, congue ut sapien in, molestie luctus nulla.', 'Donec consequat, augue in tristique scelerisque, nisi sem tincidunt lorem, varius tempus est nunc in ipsum.', 'Duis dictum ut est ac viverra.', 'Aliquam id tincidunt ligula.', 'Aliquam vitae tortor ut massa eleifend imperdiet sed et ipsum.', 'Maecenas aliquet tellus ac nisl molestie, vel ullamcorper lacus vulputate.', 'Mauris nec lacus sapien.', 'Integer sagittis dolor sit amet velit mollis, quis sodales odio sollicitudin.', 'Vivamus varius, libero a cursus imperdiet, lorem diam vestibulum risus, eget dictum lectus dolor eget eros.', 'Nullam vitae orci varius, suscipit magna vitae, efficitur tellus.', 'Praesent sagittis mi nec rutrum tempus.', 'Aenean eu eros in erat gravida consectetur.', 'Nulla lacinia vitae urna commodo ultricies.', 'In interdum fermentum malesuada.', 'Nullam placerat vel velit vel blandit.', 'Praesent blandit tortor nec nisl viverra rutrum.', 'Fusce ac euismod dui.', 'Curabitur auctor quam dui, quis tristique ligula ornare et.', 'Vivamus tellus diam, fringilla a congue quis, porta sit amet diam.', 'Proin consectetur pulvinar malesuada.', 'Duis tempor maximus libero non fringilla.', 'Maecenas et elit leo.', 'Fusce porttitor ex tortor, iaculis mollis sapien scelerisque eu.', 'Etiam in dignissim tellus, nec egestas mauris.', 'Aenean lacinia nec sapien quis suscipit.', 'Ut id libero nec ligula laoreet rutrum.', 'In id hendrerit nisl, sed luctus tortor.', 'Morbi interdum augue justo, tincidunt suscipit sem lacinia vel.', 'Aliquam a massa tortor.', 'Sed eget libero id est pharetra laoreet.', 'Morbi molestie, ex eu aliquet maximus, ante felis efficitur ligula, ac commodo justo augue quis velit.', 'Integer lorem nulla, rhoncus a magna placerat, semper faucibus quam.', 'Curabitur imperdiet aliquet diam nec scelerisque.', 'Sed sed pellentesque risus.', 'Sed tortor odio, vestibulum et augue a, maximus congue turpis.', 'Donec pulvinar mi a enim rutrum, eu blandit neque molestie.', 'Nullam tristique pulvinar sapien, ut consectetur velit mollis eu.', 'Suspendisse vestibulum nisi leo, et ornare mi congue id.', 'Integer urna nulla, molestie vitae arcu in, finibus aliquet felis.', 'Maecenas laoreet venenatis felis, eget finibus urna pharetra sed.', 'Pellentesque in turpis a nulla rutrum egestas vel in augue.', 'Curabitur a lectus ac nulla porta vehicula.', 'Cras quis euismod massa.', 'Sed consequat arcu vitae gravida pellentesque.', 'Donec rhoncus rhoncus imperdiet.', 'Praesent pulvinar risus sed orci dignissim, sed tincidunt leo viverra.', 'Aenean euismod maximus posuere.', 'Quisque odio dolor, suscipit ut semper sed, molestie vel nulla.', 'Vestibulum ante ipsum primis in faucibus orci luctus et ultrices posuere cubilia curae; Quisque auctor, leo at vulputate tempus, est nisi imperdiet arcu, nec dignissim nunc mauris eget enim.', 'Aliquam at posuere mauris, ac dignissim nibh.', 'Donec lacinia lobortis tempus.', 'Sed blandit varius enim et sodales.', 'Morbi pretium vulputate ipsum et congue.', 'Quisque scelerisque justo eget ullamcorper vestibulum.', 'Cras quis erat ut turpis dictum aliquam rutrum vel ligula.', 'Sed ipsum elit, congue quis mi sit amet, semper porttitor magna.', 'Nulla vel tellus condimentum, accumsan ex ac, gravida mauris.', 'Donec cursus, nisi in vestibulum suscipit, magna tellus interdum tortor, a tincidunt mi turpis vitae libero.', 'Etiam consectetur, odio ut iaculis convallis, ex nisl rhoncus enim, vitae placerat sem dui eu elit.', 'Curabitur libero tellus, posuere nec fringilla sed, lobortis nec quam.', 'Curabitur a augue ligula.', 'Mauris pharetra eu ante non porta.', 'Cras efficitur velit sem.', 'Proin at dapibus arcu.', 'Integer nec purus vitae lorem aliquam placerat.', 'Integer sem risus, ultricies sit amet magna eget, viverra vehicula augue.', 'Morbi blandit ligula vitae interdum euismod.', 'Vestibulum egestas eleifend pulvinar.', 'Ut euismod ex rutrum, viverra augue non, molestie libero.', 'Proin fringilla, urna eu condimentum pharetra, dolor quam sollicitudin mi, vitae consequat sapien purus id magna.', 'Phasellus pulvinar vel massa eu ullamcorper.', 'Etiam nunc leo, vestibulum a tortor quis, laoreet gravida odio.', 'Nullam tristique consequat lacus nec aliquam.', 'Pellentesque aliquet augue facilisis felis pretium faucibus.', 'Integer vitae ultrices diam, id lobortis ipsum.', 'In at augue nibh.', 'In non gravida ante.', 'Aenean euismod eros augue.', 'Nunc ac metus eget mauris bibendum cursus.', 'Etiam et massa eu lacus molestie dignissim.', 'Proin dictum ante non urna sollicitudin eleifend.', 'Mauris convallis ultricies neque sit amet semper.', 'Morbi venenatis euismod quam sed gravida.', 'Nulla elementum orci at justo scelerisque, vel dapibus eros scelerisque.', 'Integer accumsan augue posuere, suscipit odio vitae, tincidunt nisi.', 'Quisque in orci malesuada, lobortis neque sit amet, pretium odio.', 'Integer tempus eget sapien non sodales.', 'Nunc ut dapibus justo.', 'Quisque id nulla elit.', 'Nam quis justo eget velit convallis facilisis.', 'Quisque iaculis ex lectus, sit amet dictum neque pellentesque volutpat.', 'Aenean quis tellus lacinia, gravida elit non, interdum quam.', 'Donec interdum orci ut mauris molestie lacinia.', 'Donec risus elit, mattis et viverra placerat, viverra et turpis.', 'Morbi in sodales urna.', 'Nam lorem ligula, tempus vitae sapien eget, vulputate fermentum purus.', 'Aenean vulputate, nulla id euismod aliquet, leo ligula ornare tortor, sed tincidunt ante metus non felis.', 'Cras volutpat, dolor consectetur luctus volutpat, mauris eros laoreet augue, at vehicula purus enim in tortor.', 'Cras mi est, tincidunt in sapien nec, tempor cursus purus.', 'Phasellus rutrum nibh a sagittis feugiat.', 'Aenean sit amet tincidunt enim, at tincidunt justo.', 'Pellentesque maximus nisi vitae nibh porttitor gravida.', 'Aliquam ac sem mollis, pulvinar ligula id, rhoncus turpis.', 'Ut placerat turpis ac finibus fringilla.', 'Sed sit amet est eu tortor efficitur dignissim.', 'Mauris iaculis, dolor et lobortis congue, augue massa scelerisque libero, in lobortis augue dolor et libero.', 'Mauris pharetra finibus turpis, non maximus quam pellentesque sit amet.', 'Praesent feugiat, ex eu aliquam tempor, sem nibh tempus est, eget tincidunt velit quam eget est.', 'Donec imperdiet enim tellus, vel tincidunt felis accumsan rutrum.', 'Ut ac sem sit amet sem posuere luctus at in enim.', 'Nulla euismod, libero vel finibus lobortis, nulla libero porttitor tellus, a interdum odio quam nec nunc.', 'Phasellus quis efficitur risus, ullamcorper condimentum nisl.', 'Aliquam id neque a nunc vehicula euismod.', 'In nec ultrices eros.', 'Nunc sit amet purus neque.', 'Pellentesque pharetra nisl augue, sed sollicitudin eros faucibus eget.', 'Morbi sit amet lorem eget enim cursus vehicula.', 'Etiam euismod ante in venenatis bibendum.', 'Curabitur eu efficitur tortor.', 'Vestibulum ante ipsum primis in faucibus orci luctus et ultrices posuere cubilia curae; Nulla interdum nibh in vehicula tincidunt.', 'Sed fringilla, magna non malesuada dignissim, quam enim mollis tellus, non faucibus purus tellus a ipsum.', 'Ut placerat varius diam in vulputate.', 'Mauris aliquet lectus id lacus iaculis, nec viverra ante porttitor.', 'Maecenas finibus posuere risus.', 'Suspendisse sit amet lectus quis nunc tincidunt bibendum eget in felis.', 'In mattis dolor purus, id tempus nibh rutrum ut.', 'Phasellus ornare diam ac elit laoreet, ac egestas ante maximus.', 'In et lacus leo.', 'Nam volutpat id nulla non dictum.', 'Nunc blandit eu libero at consequat.', 'In pretium lacus ac leo malesuada condimentum.', 'Maecenas congue dictum ultricies.', 'Sed id tincidunt ex.', 'Praesent gravida lectus id ante facilisis, nec pharetra justo rutrum.', 'Praesent cursus lobortis accumsan.', 'Aenean sed lacus ac arcu blandit placerat.', 'Quisque vitae ligula vitae elit congue dictum.', 'Nulla condimentum fermentum nulla ac porta.', 'Cras ornare diam vitae augue maximus, quis faucibus dui fermentum.', 'Maecenas porttitor porttitor felis, sit amet facilisis diam sodales vitae.', 'Aliquam dictum arcu tortor, et fringilla leo euismod vel.', 'Phasellus diam mauris, feugiat aliquet fermentum in, porttitor et tortor.', 'Maecenas ex justo, sagittis sed magna sed, efficitur venenatis felis.', 'In placerat et nunc et malesuada.', 'Aenean vehicula neque odio, vitae molestie mauris aliquet quis.', 'Vestibulum gravida dolor vel velit semper rhoncus.', 'Ut egestas sodales nulla quis rutrum.', 'Integer quis nisi nec enim vestibulum convallis.', 'Fusce eget nisi rutrum justo porttitor gravida nec nec ipsum.', 'Nam blandit nec leo ut porttitor.', 'Suspendisse a nunc sed ante fringilla fermentum consectetur vitae orci.', 'Nunc bibendum arcu erat, eu ullamcorper est placerat ac.', 'Sed facilisis, enim a tempor ullamcorper, metus nunc interdum lacus, id pellentesque mauris magna ut augue.', 'Curabitur eu dignissim velit.', 'Sed ut quam erat.', 'Sed faucibus sapien felis, ac malesuada nunc cursus et.', 'Sed vestibulum lacus nec sapien ultrices, at euismod tellus dignissim.', 'Integer sagittis vulputate lectus, sed scelerisque tortor pulvinar id.', 'Integer finibus venenatis massa, eget fringilla arcu placerat et.', 'Proin blandit neque a quam blandit mollis.', 'Vestibulum ante ipsum primis in faucibus orci luctus et ultrices posuere cubilia curae; Suspendisse potenti.', 'Ut nec risus nisl.', 'Vestibulum rhoncus pellentesque augue fringilla molestie.', 'Praesent sodales eget sapien eu tristique.', 'In nec purus leo.', 'Vivamus vitae sem sed massa bibendum tempor eu id lectus.', 'Ut id pulvinar nunc.', 'Nam tempus dignissim lectus, ac pellentesque neque porttitor non.', 'Suspendisse lobortis rhoncus dui, ac ullamcorper dolor dignissim sed.', 'Vestibulum sollicitudin faucibus nisl a laoreet.', 'Aenean consequat purus et lorem suscipit, sed efficitur magna facilisis.', 'Mauris fermentum malesuada tortor et tempus.', 'Maecenas interdum rutrum pretium.', 'Morbi non neque eget lorem suscipit congue id eget quam.', 'Vestibulum ante ipsum primis in faucibus orci luctus et ultrices posuere cubilia curae; Suspendisse malesuada purus nibh, volutpat facilisis lacus iaculis a.', 'Aenean eu pharetra ligula.', 'Nulla nec leo porta, sollicitudin nunc et, lacinia quam.', 'Nunc placerat tristique pellentesque.', 'Ut enim nisi, condimentum quis felis ut, vestibulum cursus mi.', 'Vestibulum ut ipsum eros.', 'Proin vehicula bibendum enim, vitae laoreet ante auctor vitae.', 'Donec imperdiet vestibulum congue.', 'Sed velit mi, dictum in commodo at, semper a neque.', 'Aliquam lectus turpis, vulputate at nunc et, laoreet porttitor turpis.', 'Nullam eu mauris eget augue tincidunt ornare.', 'Pellentesque sit amet leo eu nibh placerat sagittis.', 'Curabitur et finibus odio.', 'Fusce tristique non tellus sed egestas.', 'Donec metus nunc, consequat non lorem sit amet, vehicula venenatis nisl.', 'Aenean sodales molestie posuere.', 'Mauris ut sollicitudin orci.', 'Vestibulum ante ipsum primis in faucibus orci luctus et ultrices posuere cubilia curae; Nunc sit amet condimentum mauris, eu fermentum risus.', 'Cras quis ante felis.', 'Pellentesque id interdum neque.', 'Phasellus erat tellus, tempus quis pulvinar volutpat, efficitur in odio.', 'Maecenas a venenatis ex, at tempor libero.', 'Pellentesque suscipit ipsum eget tincidunt faucibus.', 'Aenean sollicitudin est odio, vitae vulputate ligula dapibus sit amet.', 'Nam finibus arcu at dui blandit ornare.', 'Integer id dapibus est, ut fringilla lorem.', 'Aenean interdum quis dui nec pharetra.', 'Maecenas non diam non elit fringilla tempor id sollicitudin purus.', 'Nullam odio metus, aliquam eu dolor non, rutrum sodales mauris.', 'Maecenas nibh sem, faucibus at turpis vitae, tristique vestibulum eros.', 'Duis ac dolor eget augue luctus dignissim.', 'Fusce eget libero hendrerit, placerat purus id, volutpat tellus.', 'Fusce viverra, arcu ut molestie ultricies, dui tortor lacinia risus, eu auctor elit mauris eu orci.', 'Praesent semper mi eu mi dignissim, imperdiet dictum urna tincidunt.', 'Nulla in lorem id sapien rhoncus ultrices.', 'Quisque imperdiet, tortor ac viverra blandit, massa sapien auctor tortor, at fermentum ex tortor sit amet sem.', 'Fusce nisl libero, sollicitudin vel feugiat iaculis, consectetur ac diam.', 'Aliquam velit arcu, faucibus id ornare a, pharetra et leo.', 'Nullam eu fringilla felis.', 'Donec dignissim risus facilisis bibendum tincidunt.', 'Donec sed orci nunc.', 'Fusce at metus lobortis, iaculis ante laoreet, aliquam nisi.', 'Integer imperdiet ornare turpis, sed viverra felis mattis non.', 'Nulla tincidunt purus sed fermentum volutpat.', 'Phasellus eu dignissim massa, non consectetur lorem.', 'Praesent venenatis, nulla nec ornare vulputate, velit ipsum lobortis nisl, sed consequat nibh turpis vel diam.', 'Pellentesque pharetra odio lobortis, congue enim eu, sagittis velit.', 'Mauris sed molestie tortor.', 'Suspendisse consectetur nunc vel dolor aliquet, vel venenatis eros laoreet.', 'Nullam malesuada nulla libero, dapibus pretium mi iaculis a.', 'Donec placerat nibh leo, id tristique enim tincidunt sed.', 'Nunc elementum, arcu nec ultrices convallis, urna turpis finibus velit, eu euismod nibh orci ac eros.', 'Proin cursus pellentesque turpis, vel tempor lorem feugiat et.', 'Duis finibus aliquet lacus, at accumsan tortor posuere nec.', 'Aliquam id sem justo.', 'Suspendisse eget dignissim eros.', 'Suspendisse feugiat scelerisque ante id fermentum.', 'Quisque pellentesque libero et sem pellentesque ultrices.', 'Nunc dignissim magna interdum, feugiat odio sit amet, pharetra lacus.', 'Duis in sapien aliquam, volutpat est sit amet, semper risus.', 'Proin blandit enim at fermentum mattis.', 'Proin pharetra, orci a semper porttitor, purus felis placerat lorem, nec mollis orci tellus sit amet elit.', 'Duis sed convallis turpis, a pellentesque nibh.', 'Proin et est vel nisl dictum aliquam et et sem.', 'Donec sed augue laoreet, suscipit sem quis, consectetur est.', 'Sed hendrerit arcu quis porttitor vestibulum.', 'Pellentesque interdum lorem urna, et vestibulum enim pharetra et.', 'Cras sed neque sit amet erat aliquam semper.', 'Fusce sit amet eros lacus.', 'Cras commodo, nibh et sodales vehicula, eros magna pellentesque orci, et laoreet lacus dolor nec enim.', 'Donec et quam ultrices, porta justo tempor, auctor odio.', 'Morbi eget iaculis nisi, ut finibus orci.', 'Sed tellus lectus, consequat nec dapibus ac, interdum vitae lectus.', 'Praesent accumsan vehicula lacus ut efficitur.', 'Integer sit amet nisl non odio tincidunt sodales eu ut mauris.', 'Duis euismod risus quis iaculis sodales.', 'Pellentesque eleifend vel lorem sed iaculis.', 'Duis a mollis ex.', 'Nunc ullamcorper id felis non aliquam.', 'Etiam ut posuere lacus.', 'Duis quis elementum ligula.', 'Aenean pulvinar elit a eleifend luctus.', 'Vivamus accumsan tincidunt dui in pellentesque.', 'Cras vulputate metus at suscipit vestibulum.', 'Phasellus ultrices consequat lectus, ac tristique nunc mattis sit amet.', 'Donec arcu diam, mollis ut euismod sit amet, tincidunt vel ipsum.', 'Maecenas aliquet orci ac rutrum laoreet.', 'Quisque maximus nisl sed sapien posuere pulvinar.', 'Sed interdum, ante et laoreet iaculis, arcu ligula pretium mi, eget bibendum lorem urna sit amet odio.', 'Praesent consequat nisl quis tellus mollis eleifend.', 'Suspendisse facilisis sem vitae sapien rhoncus, ac dignissim nunc pharetra.', 'Quisque id lacus ut neque vehicula pellentesque.', 'Quisque scelerisque, risus sit amet condimentum consectetur, quam erat pretium tellus, ut tempor metus lectus suscipit risus.', 'Vestibulum eu lectus eget nunc aliquet laoreet.', 'Phasellus ornare neque auctor pulvinar pellentesque.', 'Aliquam vitae dignissim mi, ac mattis nibh.', 'Duis vitae porta velit, ac efficitur mauris.', 'Sed id nisi nisl.', 'Donec malesuada odio in posuere eleifend.', 'Cras ut mi quis tortor mollis iaculis sed a dolor.', 'Nunc a nisi vitae orci consectetur semper.', 'Mauris tristique pharetra mattis.', 'Sed iaculis fermentum mauris a tempus.', 'Quisque cursus, mauris vel tristique molestie, massa libero suscipit orci, ut sodales lacus massa vel odio.', 'Sed eu mi sapien.', 'Sed tincidunt ullamcorper mauris nec lobortis.', 'Suspendisse at est in diam elementum commodo.', 'Ut felis lectus, hendrerit sit amet vulputate sit amet, cursus sit amet quam.', 'Quisque porta arcu ac purus scelerisque, ac aliquam mi ornare.', 'Sed ultricies vitae purus et convallis.', 'Nunc ligula nisl, malesuada ut scelerisque ac, maximus quis odio.', 'Duis nisi arcu, commodo ac purus ac, dignissim convallis arcu.', 'Suspendisse eget scelerisque libero.', 'Morbi dolor purus, vehicula id sem sed, condimentum viverra eros.', 'Etiam bibendum commodo enim, ac vulputate ex consequat aliquet.', 'Pellentesque porta nisl sit amet tortor dignissim, quis aliquet arcu consectetur.', 'Integer et pretium lectus.', 'Fusce efficitur posuere ipsum, ut convallis nisl sollicitudin ultrices.', 'Sed suscipit elementum est, vitae laoreet tellus dignissim ultricies.', 'Praesent viverra nisl ut odio posuere, tristique finibus nibh rhoncus.', 'Aliquam consequat ornare orci ac ultrices.', 'Vivamus vel lacinia velit.', 'Vestibulum sit amet sapien sit amet velit tincidunt suscipit.', 'Etiam ornare molestie aliquet.', 'Sed gravida enim quis nunc interdum imperdiet.', 'Proin cursus odio ac dolor blandit, quis sollicitudin ante rutrum.', ]; export const maxStringLength = 190; ================================================ FILE: drizzle-seed/src/datasets/phonesInfo.ts ================================================ /** * The original source for the phones info data was taken from https://www.kaggle.com/datasets/leighplt/country-code?select=mobile_telephone_prefixes_by_country.csv * * Data format is: ["country prefix, operator prefix, number length including operator prefix and excluding country prefix"] */ export default [ '93,70,9', '93,71,9', '93,72,9', '93,73,9', '93,74,9', '93,75,9', '93,76,9', '93,77,9', '93,78,9', '93,79,9', '355,66,9', '355,67,9', '355,68,9', '355,69,9', '213,5,9', '213,6,9', '213,7,9', '1,684,10', '1,264 772,10', '1,268 7,10', '374,55,6', '374,95,6', '374,41,6', '374,44,6', '374,77,6', '374,93,6', '374,94,6', '374,98,6', '374,91,6', '374,99,6', '374,43,6', '374,97,6', '61,4,9', '672,1,9', '43,650,10', '43,660,10', '43,664,10', '43,676,10', '43,680,10', '43,677,11', '43,681,11', '43,688,11', '43,699,11', '994,41,9', '994,50,9', '994,51,9', '994,55,9', '994,70,9', '994,77,9', '994,99,9', '1,242 35,10', '1,242 45,10', '1,242 55,10', '973,31,8', '973,322,8', '973,33,8', '973,340,8', '973,341,8', '973,343,8', '973,344,8', '973,345,8', '973,353,8', '973,355,8', '973,36,8', '973,377,8', '973,383,8', '973,384,8', '973,388,8', '973,39,8', '973,663,8', '973,666,8', '973,669,8', '880,13,10', '880,14,10', '880,15,10', '880,16,10', '880,17,10', '880,18,10', '880,19,10', '1,246,10', '32,456,9', '32,47,9', '32,48,9', '32,49,9', '501,6,7', '1,441,10', '387,60,8', '387,69,8', '387,62,8', '387,63,8', '387,64,8', '387,65,8', '387,66,8', '246,387,7', '1,284,10', '359,87,9', '359,88,9', '359,89,9', '359,988,9', '226,70,8', '226,71,8', '226,72,8', '226,74,8', '226,75,8', '226,77,8', '226,78,8', '226,79,8', '855,92,9', '855,12,9', '855,11,9', '855,77,9', '855,99,9', '1,345,10', '235,66,8', '235,63,8', '235,65,8', '235,99,8', '235,95,8', '235,93,8', '235,90,8', '235,77,8', '56,9,9', '86,13,11', '86,15,11', '86,18,11', '86,19,11', '57,30,10', '57,310,10', '57,311,10', '57,312,10', '57,313,10', '57,314,10', '57,315,10', '57,316,10', '57,317,10', '57,318,10', '57,319,10', '57,32,10', '682,5,5', '682,7,5', '506,6,8', '506,7,8', '506,8,8', '385,91,9', '385,92,9', '385,95,9', '385,97,9', '385,98,9', '385,99,9', '357,94,8', '357,95,8', '357,96,8', '357,97,8', '357,99,8', '420,601,9', '420,602,9', '420,603,9', '420,604,9', '420,605,9', '420,606,9', '420,607,9', '420,608,9', '420,702,9', '420,72,9', '420,73,9', '420,77,9', '420,790,9', '45,2,8', '45,30,8', '45,31,8', '45,40,8', '45,41,8', '45,42,8', '45,50,8', '45,51,8', '45,52,8', '45,53,8', '45,60,8', '45,61,8', '45,71,8', '45,81,8', '1,767 2,10', '1,809,10', '1,829,10', '1,849,10', '670,77,8', '670,78,8', '593,9,9', '20,10,10', '20,11,10', '20,12,10', '503,6,8', '503,7,8', '268,7,8', '500,5,5', '500,6,5', '298,21,5', '298,22,5', '298,23,5', '298,24,5', '298,25,5', '298,26,5', '298,27,5', '298,28,5', '298,29,5', '298,5,5', '298,71,5', '298,72,5', '298,73,5', '298,74,5', '298,75,5', '298,76,5', '298,77,5', '298,78,5', '298,79,5', '298,91,5', '298,92,5', '298,93,5', '298,94,5', '298,95,5', '298,96,5', '298,97,5', '298,98,5', '298,99,5', '691,92,7', '691,93,7', '691,95,7', '691,97,7', '358,457,10', '33,6,9', '33,700,9', '33,73,9', '33,74,9', '33,75,9', '33,76,9', '33,77,9', '33,78,9', '594,694,9', '241,2,7', '241,3,7', '241,4,7', '241,5,7', '241,6,7', '241,7,7', '995,544,9', '995,514,9', '995,551,9', '995,555,9', '995,557,9', '995,558,9', '995,568,9', '995,570,9', '995,571,9', '995,574,9', '995,577,9', '995,578,9', '995,579,9', '995,591,9', '995,592,9', '995,593,9', '995,595,9', '995,596,9', '995,597,9', '995,598,9', '995,599,9', '49,151,10', '49,152,10', '49,155,10', '49,157,10', '49,159,10', '49,162,10', '49,163,10', '49,170,10', '49,171,10', '49,172,10', '49,173,10', '49,174,10', '49,175,10', '49,176,10', '49,177,10', '49,178,10', '49,179,10', '233,20,9', '233,50,9', '233,23,9', '233,24,9', '233,54,9', '233,55,9', '233,59,9', '233,26,9', '233,56,9', '233,27,9', '233,57,9', '233,28,9', '30,690,10', '30,693,10', '30,694,10', '30,695,10', '30,697,10', '30,698,10', '30,699,10', '1,473 41,10', '1,671,10', '502,231,8', '502,2324,8', '502,2326,8', '502,2327,8', '502,2328,8', '502,2329,8', '502,2428,8', '502,2429,8', '502,30,8', '502,310,8', '502,311,8', '502,448,8', '502,449,8', '502,45,8', '502,46,8', '502,478,8', '502,479,8', '502,480,8', '502,481,8', '502,49,8', '502,5,8', '1,808,10', '504,3,8', '504,7,8', '504,8,8', '504,9,8', '36,20,9', '36,30,9', '36,31,9', '36,38,9', '36,50,9', '36,60,9', '36,70,9', '91,7,10', '91,8,10', '91,90,10', '91,91,10', '91,92,10', '91,93,10', '91,94,10', '91,95,10', '91,96,10', '91,97,10', '91,98,10', '91,99,10', '62,811,9', '62,813,11', '62,814,11', '62,815,10', '62,818,9', '62,819,10', '62,838,10', '62,852,11', '62,853,11', '62,855,10', '62,858,11', '62,859,11', '62,878,11', '62,896,10', '62,897,10', '62,898,10', '62,899,10', '98,91,10', '98,990,10', '353,8,9', '353,83,9', '353,85,9', '353,86,9', '353,87,9', '353,89,9', '972,50,9', '972,52,9', '972,53,9', '972,54,9', '972,556,9', '972,558,9', '972,559,9', '972,58,9', '39,310,10', '39,31100,10', '39,31101,10', '39,31105,10', '39,313,10', '39,319,10', '39,320,10', '39,324,10', '39,327,10', '39,328,10', '39,329,10', '39,331,10', '39,333,10', '39,334,10', '39,338,10', '39,339,10', '39,340,10', '39,342,10', '39,344,10', '39,345,10', '39,346,10', '39,347,10', '39,348,10', '39,349,10', '39,3505,10', '39,3510,10', '39,3512,10', '39,366,10', '39,370,10', '39,3710,10', '39,3711,10', '39,373,10', '39,377,10', '39,380,10', '39,385,10', '39,388,10', '39,389,10', '39,391,10', '39,392,10', '39,393,10', '1,876,10', '81,060,11', '81,070,11', '81,080,11', '81,090,11', '254,10,10', '254,11,10', '254,70,10', '254,71,10', '254,72,10', '254,73,10', '254,74,10', '254,75,10', '254,763,10', '254,77,10', '254,78,10', '686,63,8', '686,7,8', '383,44,8', '383,45,8', '383,49,8', '965,5,8', '965,6,8', '965,9,8', '371,2,8', '231,46,7', '231,47,7', '231,5,7', '231,64,7', '231,65,7', '231,7,8', '218,91,10', '218,92,10', '218,94,10', '370,6,8', '352,621,9', '352,628,9', '352,661,9', '352,668,9', '352,691,9', '352,698,9', '60,11,7', '60,12,7', '60,13,7', '60,14,7', '60,16,7', '60,17,7', '60,18,7', '60,19,7', '960,7,7', '960,9,7', '223,6,8', '223,7,8', '596,696,9', '230,57,8', '230,58,8', '230,59,8', '230,54,8', '52,1,10', '373,60,8', '373,65,8', '373,67,8', '373,68,8', '373,69,8', '373,78,8', '373,79,8', '976,70,8', '976,88,8', '976,89,8', '976,91,8', '976,93,8', '976,94,8', '976,95,8', '976,96,8', '976,98,8', '976,99,8', '382,60,8', '382,63,8', '382,66,8', '382,67,8', '382,68,8', '382,69,8', '1,664,10', '95,92,8', '95,925,10', '95,926,10', '95,943,9', '95,94,10', '95,944,9', '95,95,8', '95,96,8', '95,973,9', '95,991,9', '95,93,9', '95,996,10', '95,997,10', '95,977,10', '95,978,10', '95,979,10', '977,98,10', '31,6,9', '687,7,6', '687,8,6', '687,9,6', '64,22,9', '64,27,9', '505,8,8', '227,9,8', '234,804,8', '234,805,8', '234,803,8', '234,802,8', '234,809,8', '683,1,4', '683,3,4', '683,4,4', '672,38,6', '389,70,8', '389,71,8', '389,72,8', '389,74,8', '389,75,8', '389,76,8', '389,77,8', '389,78,8', '389,79,8', '90,533,7', '1,670,10', '47,4,8', '47,59,8', '47,9,8', '968,91,8', '92,30,10', '92,31,10', '92,32,10', '92,33,10', '92,34,10', '507,6,8', '595,9,9', '51,9,9', '63,973,10', '63,974,10', '63,905,10', '63,906,10', '63,977,10', '63,915,10', '63,916,10', '63,926,10', '63,927,10', '63,935,10', '63,936,10', '63,937,10', '63,996,10', '63,997,10', '63,917,10', '63,979,10', '63,920,10', '63,930,10', '63,938,10', '63,939,10', '63,907,10', '63,908,10', '63,909,10', '63,910,10', '63,912,10', '63,919,10', '63,921,10', '63,928,10', '63,929,10', '63,947,10', '63,948,10', '63,949,10', '63,989,10', '63,918,10', '63,999,10', '63,922,10', '63,923,10', '63,932,10', '63,933,10', '63,942,10', '63,943,10', '48,50,9', '48,45,9', '48,51,9', '48,53,9', '48,57,9', '48,60,9', '48,66,9', '48,69,9', '48,72,9', '48,73,9', '48,78,9', '48,79,9', '48,88,9', '351,91,9', '351,921,9', '351,922,9', '351,924,9', '351,925,9', '351,926,9', '351,927,9', '351,9290,9', '351,9291,9', '351,9292,9', '351,9293,9', '351,9294,9', '351,93,9', '351,96,9', '1,787,10', '1,939,10', '974,33,8', '974,55,8', '974,66,8', '974,77,8', '1,869,10', '1,758,10', '1,784,10', '685,77,5', '966,50,9', '966,51,9', '966,53,9', '966,54,9', '966,55,9', '966,56,9', '966,57,9', '966,58,9', '966,59,9', '381,60,9', '381,61,9', '381,62,9', '381,63,9', '381,64,9', '381,65,9', '381,66,9', '381,677,9', '381,68,9', '381,69,8', '65,8,8', '65,9,8', '1,721,10', '421,901,9', '421,902,9', '421,903,9', '421,904,9', '421,905,9', '421,906,9', '421,907,9', '421,908,9', '421,910,9', '421,911,9', '421,912,9', '421,914,9', '421,915,9', '421,916,9', '421,917,9', '421,918,9', '421,940,9', '421,944,9', '421,948,9', '421,949,9', '421,950,9', '421,951,9', '386,20,8', '386,21,8', '386,30,8', '386,31,8', '386,40,8', '386,41,8', '386,49,8', '386,50,8', '386,51,8', '386,60,8', '386,61,8', '386,64,8', '386,70,8', '386,71,8', '677,74,7', '677,75,7', '27,60,9', '27,710,9', '27,711,9', '27,712,9', '27,713,9', '27,714,9', '27,715,9', '27,716,9', '27,717,9', '27,718,9', '27,719,9', '27,72,9', '27,73,9', '27,74,9', '27,741,9', '27,76,9', '27,78,9', '27,79,9', '27,811,9', '27,812,9', '27,813,9', '27,814,9', '27,82,9', '27,83,9', '27,84,9', '34,6,9', '34,7,9', '94,70,7', '94,71,7', '94,72,7', '94,75,7', '94,76,7', '94,77,7', '94,78,7', '46,70,7', '46,71 0,10', '46,73 00,7', '46,73 01,7', '46,73 10,7', '46,73 11,7', '46,73 12,7', '46,73 13,7', '46,73 16,7', '46,73 170,7', '46,73 18,7', '46,73 19,7', '46,73 20,7', '46,73 23,7', '46,73 27,7', '46,73 28,7', '46,73 29,7', '46,73 3,7', '46,73 455,7', '46,73 456,7', '46,73 6,7', '46,73 85,7', '46,73 86,7', '46,73 87,7', '46,73 88,7', '46,73 89,7', '46,73 9,7', '41,74,9', '41,75,9', '41,76,9', '41,77,9', '41,78,9', '41,79,9', '963,93,9', '963,98,9', '963,99,9', '963,94,9', '963,95,9', '963,96,9', '886,9,9', '66,6,9', '66,8,9', '66,9,9', '228,90,8', '228,91,8', '228,92,8', '228,97,8', '228,98,8', '228,99,8', '1,868,10', '216,2,8', '216,3,8', '216,4,8', '216,5,8', '216,9,8', '90,50,11', '90,53,11', '90,54,11', '90,55,11', '1,649,10', '380,39,9', '380,50,9', '380,63,9', '380,66,9', '380,67,9', '380,68,9', '380,91,9', '380,92,9', '380,93,9', '380,94,9', '380,95,9', '380,96,9', '380,97,9', '380,98,9', '380,99,9', '971,50,9', '971,52,9', '971,54,9', '971,55,9', '971,56,9', '971,58,9', '44,71,10', '44,72,10', '44,73,10', '44,74,10', '44,75,10', '44,7624,10', '44,77,10', '44,78,10', '44,79,10', '598,91,8', '598,93,8', '598,94,8', '598,95,8', '598,96,8', '598,97,8', '598,98,8', '598,99,8', '39,06 698,10', '58,4,7', '58,412,7', '58,414,7', '58,416,7', '58,424,7', '58,426,7', '1,340,10', '967,7,9', '967,70,9', '967,71,9', '967,73,9', '967,77,9', ]; ================================================ FILE: drizzle-seed/src/datasets/states.ts ================================================ export default [ 'Alabama', 'Alaska', 'Arizona', 'Arkansas', 'California', 'Colorado', 'Connecticut', 'Delaware', 'Florida', 'Georgia', 'Hawaii', 'Idaho', 'Illinois', 'Indiana', 'Iowa', 'Kansas', 'Kentucky', 'Louisiana', 'Maine', 'Maryland', 'Massachusetts', 'Michigan', 'Minnesota', 'Mississippi', 'Missouri', 'Montana', 'Nebraska', 'Nevada', 'New Hampshire', 'New Jersey', 'New Mexico', 'New York', 'North Carolina', 'North Dakota', 'Ohio', 'Oklahoma', 'Oregon', 'Pennsylvania', 'Rhode Island', 'South Carolina', 'South Dakota', 'Tennessee', 'Texas', 'Utah', 'Vermont', 'Virginia', 'Washington', 'West Virginia', 'Wisconsin', 'Wyoming', ]; export const maxStringLength = 14; ================================================ FILE: drizzle-seed/src/datasets/streetSuffix.ts ================================================ /** * The original data was taken from the сopycat library: https://github.com/supabase-community/copycat/blob/main/src/locales/en/address/street_suffix.ts */ export default [ 'Alley', 'Avenue', 'Branch', 'Bridge', 'Brook', 'Brooks', 'Burg', 'Burgs', 'Bypass', 'Camp', 'Canyon', 'Cape', 'Causeway', 'Center', 'Centers', 'Circle', 'Circles', 'Cliff', 'Cliffs', 'Club', 'Common', 'Corner', 'Corners', 'Course', 'Court', 'Courts', 'Cove', 'Coves', 'Creek', 'Crescent', 'Crest', 'Crossing', 'Crossroad', 'Curve', 'Dale', 'Dam', 'Divide', 'Drive', 'Drives', 'Estate', 'Estates', 'Expressway', 'Extension', 'Extensions', 'Fall', 'Falls', 'Ferry', 'Field', 'Fields', 'Flat', 'Flats', 'Ford', 'Fords', 'Forest', 'Forge', 'Forges', 'Fork', 'Forks', 'Fort', 'Freeway', 'Garden', 'Gardens', 'Gateway', 'Glen', 'Glens', 'Green', 'Greens', 'Grove', 'Groves', 'Harbor', 'Harbors', 'Haven', 'Heights', 'Highway', 'Hill', 'Hills', 'Hollow', 'Inlet', 'Island', 'Islands', 'Isle', 'Junction', 'Junctions', 'Key', 'Keys', 'Knoll', 'Knolls', 'Lake', 'Lakes', 'Land', 'Landing', 'Lane', 'Light', 'Lights', 'Loaf', 'Lock', 'Locks', 'Lodge', 'Loop', 'Mall', 'Manor', 'Manors', 'Meadow', 'Meadows', 'Mews', 'Mill', 'Mills', 'Mission', 'Motorway', 'Mount', 'Mountain', 'Mountains', 'Neck', 'Orchard', 'Oval', 'Overpass', 'Park', 'Parks', 'Parkway', 'Parkways', 'Pass', 'Passage', 'Path', 'Pike', 'Pine', 'Pines', 'Place', 'Plain', 'Plains', 'Plaza', 'Point', 'Points', 'Port', 'Ports', 'Prairie', 'Radial', 'Ramp', 'Ranch', 'Rapid', 'Rapids', 'Rest', 'Ridge', 'Ridges', 'River', 'Road', 'Roads', 'Route', 'Row', 'Rue', 'Run', 'Shoal', 'Shoals', 'Shore', 'Shores', 'Skyway', 'Spring', 'Springs', 'Spur', 'Spurs', 'Square', 'Squares', 'Station', 'Stravenue', 'Stream', 'Street', 'Streets', 'Summit', 'Terrace', 'Throughway', 'Trace', 'Track', 'Trafficway', 'Trail', 'Tunnel', 'Turnpike', 'Underpass', 'Union', 'Unions', 'Valley', 'Valleys', 'Via', 'Viaduct', 'View', 'Views', 'Village', 'Villages', 'Ville', 'Vista', 'Walk', 'Walks', 'Wall', 'Way', 'Ways', 'Well', 'Wells', ]; export const maxStringLength = 10; ================================================ FILE: drizzle-seed/src/index.ts ================================================ /* eslint-disable drizzle-internal/require-entity-kind */ import { createTableRelationsHelpers, extractTablesRelationalConfig, getTableName, is, One, Relations, sql, } from 'drizzle-orm'; import type { MySqlColumn, MySqlSchema } from 'drizzle-orm/mysql-core'; import { getTableConfig as getMysqlTableConfig, MySqlDatabase, MySqlTable } from 'drizzle-orm/mysql-core'; import type { PgArray, PgColumn, PgSchema } from 'drizzle-orm/pg-core'; import { getTableConfig as getPgTableConfig, PgDatabase, PgTable } from 'drizzle-orm/pg-core'; import type { SQLiteColumn } from 'drizzle-orm/sqlite-core'; import { BaseSQLiteDatabase, getTableConfig as getSqliteTableConfig, SQLiteTable } from 'drizzle-orm/sqlite-core'; import { generatorsFuncs, generatorsFuncsV2 } from './services/GeneratorFuncs.ts'; import type { AbstractGenerator } from './services/Generators.ts'; import { SeedService } from './services/SeedService.ts'; import type { DrizzleStudioObjectType, DrizzleStudioRelationType } from './types/drizzleStudio.ts'; import type { RefinementsType } from './types/seedService.ts'; import type { Column, Relation, RelationWithReferences, Table } from './types/tables.ts'; type InferCallbackType< DB extends | PgDatabase | MySqlDatabase | BaseSQLiteDatabase, SCHEMA extends { [key: string]: PgTable | PgSchema | MySqlTable | MySqlSchema | SQLiteTable | Relations; }, > = DB extends PgDatabase ? SCHEMA extends { [key: string]: | PgTable | PgSchema | MySqlTable | MySqlSchema | SQLiteTable | Relations; } ? { // iterates through schema fields. example -> schema: {"tableName": PgTable} [ table in keyof SCHEMA as SCHEMA[table] extends PgTable ? table : never ]?: { count?: number; columns?: { // iterates through table fields. example -> table: {"columnName": PgColumn} [ column in keyof SCHEMA[table] as SCHEMA[table][column] extends PgColumn ? column : never ]?: AbstractGenerator; }; with?: { [ refTable in keyof SCHEMA as SCHEMA[refTable] extends PgTable ? refTable : never ]?: | number | { weight: number; count: number | number[] }[]; }; }; } : {} : DB extends MySqlDatabase ? SCHEMA extends { [key: string]: | PgTable | PgSchema | MySqlTable | MySqlSchema | SQLiteTable | Relations; } ? { // iterates through schema fields. example -> schema: {"tableName": MySqlTable} [ table in keyof SCHEMA as SCHEMA[table] extends MySqlTable ? table : never ]?: { count?: number; columns?: { // iterates through table fields. example -> table: {"columnName": MySqlColumn} [ column in keyof SCHEMA[table] as SCHEMA[table][column] extends MySqlColumn ? column : never ]?: AbstractGenerator; }; with?: { [ refTable in keyof SCHEMA as SCHEMA[refTable] extends MySqlTable ? refTable : never ]?: | number | { weight: number; count: number | number[] }[]; }; }; } : {} : DB extends BaseSQLiteDatabase ? SCHEMA extends { [key: string]: | PgTable | PgSchema | MySqlTable | MySqlSchema | SQLiteTable | Relations; } ? { // iterates through schema fields. example -> schema: {"tableName": SQLiteTable} [ table in keyof SCHEMA as SCHEMA[table] extends SQLiteTable ? table : never ]?: { count?: number; columns?: { // iterates through table fields. example -> table: {"columnName": SQLiteColumn} [ column in keyof SCHEMA[table] as SCHEMA[table][column] extends SQLiteColumn ? column : never ]?: AbstractGenerator; }; with?: { [ refTable in keyof SCHEMA as SCHEMA[refTable] extends SQLiteTable ? refTable : never ]?: | number | { weight: number; count: number | number[] }[]; }; }; } : {} : {}; class SeedPromise< DB extends | PgDatabase | MySqlDatabase | BaseSQLiteDatabase, SCHEMA extends { [key: string]: PgTable | PgSchema | MySqlTable | MySqlSchema | SQLiteTable | Relations; }, VERSION extends string | undefined, > implements Promise { static readonly entityKind: string = 'SeedPromise'; [Symbol.toStringTag] = 'SeedPromise'; constructor( private db: DB, private schema: SCHEMA, private options?: { count?: number; seed?: number; version?: VERSION }, ) {} then( onfulfilled?: | ((value: void) => TResult1 | PromiseLike) | null | undefined, onrejected?: | ((reason: any) => TResult2 | PromiseLike) | null | undefined, ): Promise { return seedFunc(this.db, this.schema, this.options).then( onfulfilled, onrejected, ); } catch( onrejected?: | ((reason: any) => TResult | PromiseLike) | null | undefined, ): Promise { return this.then(undefined, onrejected); } finally(onfinally?: (() => void) | null | undefined): Promise { return this.then( (value) => { onfinally?.(); return value; }, (reason) => { onfinally?.(); throw reason; }, ); } async refine( callback: ( funcs: FunctionsVersioning, ) => InferCallbackType, ): Promise { const refinements = this.options?.version === undefined || this.options.version === '2' ? callback(generatorsFuncsV2 as FunctionsVersioning) as RefinementsType : callback(generatorsFuncs as FunctionsVersioning) as RefinementsType; await seedFunc(this.db, this.schema, this.options, refinements); } } type FunctionsVersioning = VERSION extends `1` ? typeof generatorsFuncs : VERSION extends `2` ? typeof generatorsFuncsV2 : typeof generatorsFuncsV2; export function getGeneratorsFunctions() { return generatorsFuncs; } export async function seedForDrizzleStudio( { sqlDialect, drizzleStudioObject, drizzleStudioRelations, schemasRefinements, options }: { sqlDialect: 'postgresql' | 'mysql' | 'sqlite'; drizzleStudioObject: DrizzleStudioObjectType; drizzleStudioRelations: DrizzleStudioRelationType[]; schemasRefinements?: { [schemaName: string]: RefinementsType }; options?: { count?: number; seed?: number }; }, ) { const generatedSchemas: { [schemaName: string]: { tables: { tableName: string; rows: { [columnName: string]: string | number | boolean | undefined; }[]; }[]; }; } = {}; let tables: Table[], relations: Relation[], refinements: RefinementsType | undefined; drizzleStudioRelations = drizzleStudioRelations.filter((rel) => rel.type === 'one'); for (const [schemaName, { tables: drizzleStudioTables }] of Object.entries(drizzleStudioObject)) { tables = []; for (const [tableName, table] of Object.entries(drizzleStudioTables)) { const drizzleStudioColumns = Object.values(table.columns); const columns = drizzleStudioColumns.map((col) => ({ name: col.name, dataType: 'string', columnType: col.type, // TODO: revise later typeParams: {}, default: col.default, hasDefault: col.default === undefined ? false : true, isUnique: col.isUnique === undefined ? false : col.isUnique, notNull: col.notNull, primary: col.primaryKey, })); tables.push( { name: tableName, columns, primaryKeys: drizzleStudioColumns.filter((col) => col.primaryKey === true).map((col) => col.name), }, ); } relations = drizzleStudioRelations.filter((rel) => rel.schema === schemaName && rel.refSchema === schemaName); const isCyclicRelations = relations.map( (reli) => { if (relations.some((relj) => reli.table === relj.refTable && reli.refTable === relj.table)) { return { ...reli, isCyclic: true }; } return { ...reli, isCyclic: false }; }, ); refinements = schemasRefinements !== undefined && schemasRefinements[schemaName] !== undefined ? schemasRefinements[schemaName] : undefined; const seedService = new SeedService(); const generatedTablesGenerators = seedService.generatePossibleGenerators( sqlDialect, tables, isCyclicRelations, refinements, options, ); const generatedTables = await seedService.generateTablesValues( isCyclicRelations, generatedTablesGenerators, undefined, undefined, { ...options, preserveData: true, insertDataInDb: false }, ); generatedSchemas[schemaName] = { tables: generatedTables }; } return generatedSchemas; } /** * @param db - database you would like to seed. * @param schema - object that contains all your database tables you would like to seed. * @param options - object that contains properties `count` and `seed`: * * `count` - number of rows you want to generate. * * `seed` - a number that controls the state of generated data. (if the `seed` number is the same and nothing is changed in the seeding script, generated data will remain the same each time you seed database) * * @returns SeedPromise - a class object that has a refine method that is used to change generators for columns. * * @example * ```ts * // base seeding * await seed(db, schema); * * // seeding with count specified * await seed(db, schema, { count: 100000 }); * * // seeding with count and seed specified * await seed(db, schema, { count: 100000, seed: 1 }); * * //seeding using refine * await seed(db, schema, { count: 1000 }).refine((funcs) => ({ * users: { * columns: { * name: funcs.firstName({ isUnique: true }), * email: funcs.email(), * phone: funcs.phoneNumber({ template: "+380 99 ###-##-##" }), * password: funcs.string({ isUnique: true }), * }, * count: 100000, * }, * posts: { * columns: { * title: funcs.valuesFromArray({ * values: ["Title1", "Title2", "Title3", "Title4", "Title5"], * }), * content: funcs.loremIpsum({ sentencesCount: 3 }), * }, * }, * })); * * ``` */ export function seed< DB extends | PgDatabase | MySqlDatabase | BaseSQLiteDatabase, SCHEMA extends { [key: string]: | PgTable | PgSchema | MySqlTable | MySqlSchema | SQLiteTable | Relations | any; }, VERSION extends '2' | '1' | undefined, >(db: DB, schema: SCHEMA, options?: { count?: number; seed?: number; version?: VERSION }) { return new SeedPromise(db, schema, options); } const seedFunc = async ( db: PgDatabase | MySqlDatabase | BaseSQLiteDatabase, schema: { [key: string]: | PgTable | PgSchema | MySqlTable | MySqlSchema | SQLiteTable | Relations | any; }, options: { count?: number; seed?: number; version?: string } = {}, refinements?: RefinementsType, ) => { let version: number | undefined; if (options?.version !== undefined) { version = Number(options?.version); } if (is(db, PgDatabase)) { await seedPostgres(db, schema, { ...options, version }, refinements); } else if (is(db, MySqlDatabase)) { await seedMySql(db, schema, { ...options, version }, refinements); } else if (is(db, BaseSQLiteDatabase)) { await seedSqlite(db, schema, { ...options, version }, refinements); } else { throw new Error( 'The drizzle-seed package currently supports only PostgreSQL, MySQL, and SQLite databases. Please ensure your database is one of these supported types', ); } return; }; /** * deletes all data from specified tables * * @param db - database you would like to reset. * @param schema - object that contains all your database tables you would like to delete data from. * * `If db is a PgDatabase object`, we will execute sql query and delete data from your tables the following way: * ```sql * truncate tableName1, tableName2, ... cascade; * ``` * * `If db is a MySqlDatabase object`, we will execute sql queries and delete data from your tables the following way: * ```sql * SET FOREIGN_KEY_CHECKS = 0; * truncate tableName1; * truncate tableName2; * . * . * . * * SET FOREIGN_KEY_CHECKS = 1; * ``` * * `If db is a BaseSQLiteDatabase object`, we will execute sql queries and delete data from your tables the following way: * ```sql * PRAGMA foreign_keys = OFF; * delete from tableName1; * delete from tableName2; * . * . * . * * PRAGMA foreign_keys = ON; * ``` * * @example * ```ts * await reset(db, schema); * ``` */ export async function reset< DB extends | PgDatabase | MySqlDatabase | BaseSQLiteDatabase, SCHEMA extends { [key: string]: | PgTable | PgSchema | MySqlTable | MySqlSchema | SQLiteTable | any; }, >(db: DB, schema: SCHEMA) { if (is(db, PgDatabase)) { const { pgTables } = filterPgSchema(schema); if (Object.entries(pgTables).length > 0) { await resetPostgres(db, pgTables); } } else if (is(db, MySqlDatabase)) { const { mysqlTables } = filterMysqlTables(schema); if (Object.entries(mysqlTables).length > 0) { await resetMySql(db, mysqlTables); } } else if (is(db, BaseSQLiteDatabase)) { const { sqliteTables } = filterSqliteTables(schema); if (Object.entries(sqliteTables).length > 0) { await resetSqlite(db, sqliteTables); } } else { throw new Error( 'The drizzle-seed package currently supports only PostgreSQL, MySQL, and SQLite databases. Please ensure your database is one of these supported types', ); } } // Postgres----------------------------------------------------------------------------------------------------------- const resetPostgres = async ( db: PgDatabase, pgTables: { [key: string]: PgTable }, ) => { const tablesToTruncate = Object.entries(pgTables).map(([_, table]) => { const config = getPgTableConfig(table); config.schema = config.schema === undefined ? 'public' : config.schema; return `"${config.schema}"."${config.name}"`; }); await db.execute(sql.raw(`truncate ${tablesToTruncate.join(',')} cascade;`)); }; const filterPgSchema = (schema: { [key: string]: | PgTable | PgSchema | MySqlTable | MySqlSchema | SQLiteTable | Relations | any; }) => { const pgSchema = Object.fromEntries( Object.entries(schema).filter((keyValue): keyValue is [string, PgTable | Relations] => is(keyValue[1], PgTable) || is(keyValue[1], Relations) ), ); const pgTables = Object.fromEntries( Object.entries(schema).filter((keyValue): keyValue is [string, PgTable] => is(keyValue[1], PgTable)), ); return { pgSchema, pgTables }; }; const seedPostgres = async ( db: PgDatabase, schema: { [key: string]: | PgTable | PgSchema | MySqlTable | MySqlSchema | SQLiteTable | Relations | any; }, options: { count?: number; seed?: number; version?: number } = {}, refinements?: RefinementsType, ) => { const seedService = new SeedService(); const { pgSchema, pgTables } = filterPgSchema(schema); const { tables, relations } = getPostgresInfo(pgSchema, pgTables); const generatedTablesGenerators = seedService.generatePossibleGenerators( 'postgresql', tables, relations, refinements, options, ); const preserveCyclicTablesData = relations.some((rel) => rel.isCyclic === true); const tablesValues = await seedService.generateTablesValues( relations, generatedTablesGenerators, db, pgTables, { ...options, preserveCyclicTablesData }, ); const { filteredTablesGenerators, tablesUniqueNotNullColumn } = seedService.filterCyclicTables( generatedTablesGenerators, ); const updateDataInDb = filteredTablesGenerators.length === 0 ? false : true; await seedService.generateTablesValues( relations, filteredTablesGenerators, db, pgTables, { ...options, tablesValues, updateDataInDb, tablesUniqueNotNullColumn }, ); }; const getPostgresInfo = ( pgSchema: { [key: string]: PgTable | Relations }, pgTables: { [key: string]: PgTable }, ) => { let tableConfig: ReturnType; let dbToTsColumnNamesMap: { [key: string]: string }; const dbToTsTableNamesMap: { [key: string]: string } = Object.fromEntries( Object.entries(pgTables).map(([key, value]) => [getTableName(value), key]), ); const tables: Table[] = []; const relations: RelationWithReferences[] = []; const dbToTsColumnNamesMapGlobal: { [tableName: string]: { [dbColumnName: string]: string }; } = {}; const tableRelations: { [tableName: string]: RelationWithReferences[] } = {}; const getDbToTsColumnNamesMap = (table: PgTable) => { let dbToTsColumnNamesMap: { [dbColName: string]: string } = {}; const tableName = getTableName(table); if (Object.hasOwn(dbToTsColumnNamesMapGlobal, tableName)) { dbToTsColumnNamesMap = dbToTsColumnNamesMapGlobal[tableName]!; return dbToTsColumnNamesMap; } const tableConfig = getPgTableConfig(table); for (const [tsCol, col] of Object.entries(tableConfig.columns[0]!.table)) { dbToTsColumnNamesMap[col.name] = tsCol; } dbToTsColumnNamesMapGlobal[tableName] = dbToTsColumnNamesMap; return dbToTsColumnNamesMap; }; const transformFromDrizzleRelation = ( schema: Record, getDbToTsColumnNamesMap: (table: PgTable) => { [dbColName: string]: string; }, tableRelations: { [tableName: string]: RelationWithReferences[]; }, ) => { const schemaConfig = extractTablesRelationalConfig(schema, createTableRelationsHelpers); const relations: RelationWithReferences[] = []; for (const table of Object.values(schemaConfig.tables)) { if (table.relations === undefined) continue; for (const drizzleRel of Object.values(table.relations)) { if (!is(drizzleRel, One)) continue; const tableConfig = getPgTableConfig(drizzleRel.sourceTable as PgTable); const tableDbSchema = tableConfig.schema ?? 'public'; const tableDbName = tableConfig.name; const tableTsName = schemaConfig.tableNamesMap[`${tableDbSchema}.${tableDbName}`] ?? tableDbName; const dbToTsColumnNamesMap = getDbToTsColumnNamesMap(drizzleRel.sourceTable); const columns = drizzleRel.config?.fields.map((field) => dbToTsColumnNamesMap[field.name] as string) ?? []; const refTableConfig = getPgTableConfig(drizzleRel.referencedTable as PgTable); const refTableDbSchema = refTableConfig.schema ?? 'public'; const refTableDbName = refTableConfig.name; const refTableTsName = schemaConfig.tableNamesMap[`${refTableDbSchema}.${refTableDbName}`] ?? refTableDbName; const dbToTsColumnNamesMapForRefTable = getDbToTsColumnNamesMap(drizzleRel.referencedTable); const refColumns = drizzleRel.config?.references.map((ref) => dbToTsColumnNamesMapForRefTable[ref.name] as string ) ?? []; if (tableRelations[refTableTsName] === undefined) { tableRelations[refTableTsName] = []; } const relation: RelationWithReferences = { table: tableTsName, columns, refTable: refTableTsName, refColumns, refTableRels: tableRelations[refTableTsName], type: 'one', }; // do not add duplicate relation if ( tableRelations[tableTsName]?.some((rel) => rel.table === relation.table && rel.refTable === relation.refTable ) ) { console.warn( `You are providing a one-to-many relation between the '${relation.refTable}' and '${relation.table}' tables,\n` + `while the '${relation.table}' table object already has foreign key constraint in the schema referencing '${relation.refTable}' table.\n` + `In this case, the foreign key constraint will be used.\n`, ); continue; } relations.push(relation); tableRelations[tableTsName]!.push(relation); } } return relations; }; for (const table of Object.values(pgTables)) { tableConfig = getPgTableConfig(table); dbToTsColumnNamesMap = {}; for (const [tsCol, col] of Object.entries(tableConfig.columns[0]!.table)) { dbToTsColumnNamesMap[col.name] = tsCol; } // might be empty list const newRelations = tableConfig.foreignKeys.map((fk) => { const table = dbToTsTableNamesMap[tableConfig.name] as string; const refTable = dbToTsTableNamesMap[getTableName(fk.reference().foreignTable)] as string; const dbToTsColumnNamesMapForRefTable = getDbToTsColumnNamesMap( fk.reference().foreignTable, ); if (tableRelations[refTable] === undefined) { tableRelations[refTable] = []; } return { table, columns: fk .reference() .columns.map((col) => dbToTsColumnNamesMap[col.name] as string), refTable, refColumns: fk .reference() .foreignColumns.map( (fCol) => dbToTsColumnNamesMapForRefTable[fCol.name] as string, ), refTableRels: tableRelations[refTable], }; }); relations.push( ...newRelations, ); if (tableRelations[dbToTsTableNamesMap[tableConfig.name] as string] === undefined) { tableRelations[dbToTsTableNamesMap[tableConfig.name] as string] = []; } tableRelations[dbToTsTableNamesMap[tableConfig.name] as string]!.push(...newRelations); const getAllBaseColumns = ( baseColumn: PgArray['baseColumn'] & { baseColumn?: PgArray['baseColumn'] }, ): Column['baseColumn'] => { const baseColumnResult: Column['baseColumn'] = { name: baseColumn.name, columnType: baseColumn.getSQLType(), typeParams: getTypeParams(baseColumn.getSQLType()), dataType: baseColumn.dataType, size: (baseColumn as PgArray).size, hasDefault: baseColumn.hasDefault, enumValues: baseColumn.enumValues, default: baseColumn.default, isUnique: baseColumn.isUnique, notNull: baseColumn.notNull, primary: baseColumn.primary, baseColumn: baseColumn.baseColumn === undefined ? undefined : getAllBaseColumns(baseColumn.baseColumn), }; return baseColumnResult; }; const getTypeParams = (sqlType: string) => { // get type params const typeParams: Column['typeParams'] = {}; // handle dimensions if (sqlType.includes('[')) { const match = sqlType.match(/\[\w*]/g); if (match) { typeParams['dimensions'] = match.length; } } if ( sqlType.startsWith('numeric') || sqlType.startsWith('decimal') || sqlType.startsWith('double precision') || sqlType.startsWith('real') ) { const match = sqlType.match(/\((\d+), *(\d+)\)/); if (match) { typeParams['precision'] = Number(match[1]); typeParams['scale'] = Number(match[2]); } } else if ( sqlType.startsWith('varchar') || sqlType.startsWith('bpchar') || sqlType.startsWith('char') || sqlType.startsWith('bit') || sqlType.startsWith('time') || sqlType.startsWith('timestamp') || sqlType.startsWith('interval') ) { const match = sqlType.match(/\((\d+)\)/); if (match) { typeParams['length'] = Number(match[1]); } } return typeParams; }; // console.log(tableConfig.columns); tables.push({ name: dbToTsTableNamesMap[tableConfig.name] as string, columns: tableConfig.columns.map((column) => ({ name: dbToTsColumnNamesMap[column.name] as string, columnType: column.getSQLType(), typeParams: getTypeParams(column.getSQLType()), dataType: column.dataType, size: (column as PgArray).size, hasDefault: column.hasDefault, default: column.default, enumValues: column.enumValues, isUnique: column.isUnique, notNull: column.notNull, primary: column.primary, generatedIdentityType: column.generatedIdentity?.type, baseColumn: ((column as PgArray).baseColumn === undefined) ? undefined : getAllBaseColumns((column as PgArray).baseColumn), })), primaryKeys: tableConfig.columns .filter((column) => column.primary) .map((column) => dbToTsColumnNamesMap[column.name] as string), }); } const transformedDrizzleRelations = transformFromDrizzleRelation(pgSchema, getDbToTsColumnNamesMap, tableRelations); relations.push( ...transformedDrizzleRelations, ); const isCyclicRelations = relations.map( (relI) => { // if (relations.some((relj) => relI.table === relj.refTable && relI.refTable === relj.table)) { const tableRel = tableRelations[relI.table]!.find((relJ) => relJ.refTable === relI.refTable)!; if (isRelationCyclic(relI)) { tableRel['isCyclic'] = true; return { ...relI, isCyclic: true }; } tableRel['isCyclic'] = false; return { ...relI, isCyclic: false }; }, ); return { tables, relations: isCyclicRelations, tableRelations }; }; const isRelationCyclic = ( startRel: RelationWithReferences, ) => { // self relation if (startRel.table === startRel.refTable) return false; // DFS const targetTable = startRel.table; const queue = [startRel]; let path: string[] = []; while (queue.length !== 0) { const currRel = queue.shift(); if (path.includes(currRel!.table)) { const idx = path.indexOf(currRel!.table); path = path.slice(0, idx); } path.push(currRel!.table); for (const rel of currRel!.refTableRels) { // self relation if (rel.table === rel.refTable) continue; if (rel.refTable === targetTable) return true; // found cycle, but not the one we are looking for if (path.includes(rel.refTable)) continue; queue.unshift(rel); } } return false; }; // MySql----------------------------------------------------------------------------------------------------- const resetMySql = async ( db: MySqlDatabase, schema: { [key: string]: MySqlTable }, ) => { const tablesToTruncate = Object.entries(schema).map(([_tsTableName, table]) => { const dbTableName = getTableName(table); return dbTableName; }); await db.execute(sql.raw('SET FOREIGN_KEY_CHECKS = 0;')); for (const tableName of tablesToTruncate) { const sqlQuery = `truncate \`${tableName}\`;`; await db.execute(sql.raw(sqlQuery)); } await db.execute(sql.raw('SET FOREIGN_KEY_CHECKS = 1;')); }; const filterMysqlTables = (schema: { [key: string]: | PgTable | PgSchema | MySqlTable | MySqlSchema | SQLiteTable | any; }) => { const mysqlSchema = Object.fromEntries( Object.entries(schema).filter( (keyValue): keyValue is [string, MySqlTable | Relations] => is(keyValue[1], MySqlTable) || is(keyValue[1], Relations), ), ); const mysqlTables = Object.fromEntries( Object.entries(schema).filter( (keyValue): keyValue is [string, MySqlTable] => is(keyValue[1], MySqlTable), ), ); return { mysqlSchema, mysqlTables }; }; const seedMySql = async ( db: MySqlDatabase, schema: { [key: string]: | PgTable | PgSchema | MySqlTable | MySqlSchema | SQLiteTable | Relations | any; }, options: { count?: number; seed?: number; version?: number } = {}, refinements?: RefinementsType, ) => { const { mysqlSchema, mysqlTables } = filterMysqlTables(schema); const { tables, relations } = getMySqlInfo(mysqlSchema, mysqlTables); const seedService = new SeedService(); const generatedTablesGenerators = seedService.generatePossibleGenerators( 'mysql', tables, relations, refinements, options, ); const preserveCyclicTablesData = relations.some((rel) => rel.isCyclic === true); const tablesValues = await seedService.generateTablesValues( relations, generatedTablesGenerators, db, mysqlTables, { ...options, preserveCyclicTablesData }, ); const { filteredTablesGenerators, tablesUniqueNotNullColumn } = seedService.filterCyclicTables( generatedTablesGenerators, ); const updateDataInDb = filteredTablesGenerators.length === 0 ? false : true; await seedService.generateTablesValues( relations, filteredTablesGenerators, db, mysqlTables, { ...options, tablesValues, updateDataInDb, tablesUniqueNotNullColumn }, ); }; const getMySqlInfo = ( mysqlSchema: { [key: string]: MySqlTable | Relations }, mysqlTables: { [key: string]: MySqlTable }, ) => { let tableConfig: ReturnType; let dbToTsColumnNamesMap: { [key: string]: string }; const dbToTsTableNamesMap: { [key: string]: string } = Object.fromEntries( Object.entries(mysqlTables).map(([key, value]) => [getTableName(value), key]), ); const tables: Table[] = []; const relations: RelationWithReferences[] = []; const dbToTsColumnNamesMapGlobal: { [tableName: string]: { [dbColumnName: string]: string }; } = {}; const tableRelations: { [tableName: string]: RelationWithReferences[] } = {}; const getDbToTsColumnNamesMap = (table: MySqlTable) => { let dbToTsColumnNamesMap: { [dbColName: string]: string } = {}; const tableName = getTableName(table); if (Object.hasOwn(dbToTsColumnNamesMapGlobal, tableName)) { dbToTsColumnNamesMap = dbToTsColumnNamesMapGlobal[tableName]!; return dbToTsColumnNamesMap; } const tableConfig = getMysqlTableConfig(table); for (const [tsCol, col] of Object.entries(tableConfig.columns[0]!.table)) { dbToTsColumnNamesMap[col.name] = tsCol; } dbToTsColumnNamesMapGlobal[tableName] = dbToTsColumnNamesMap; return dbToTsColumnNamesMap; }; const transformFromDrizzleRelation = ( schema: Record, getDbToTsColumnNamesMap: (table: MySqlTable) => { [dbColName: string]: string; }, tableRelations: { [tableName: string]: RelationWithReferences[]; }, ) => { const schemaConfig = extractTablesRelationalConfig(schema, createTableRelationsHelpers); const relations: RelationWithReferences[] = []; for (const table of Object.values(schemaConfig.tables)) { if (table.relations === undefined) continue; for (const drizzleRel of Object.values(table.relations)) { if (!is(drizzleRel, One)) continue; const tableConfig = getMysqlTableConfig(drizzleRel.sourceTable as MySqlTable); const tableDbSchema = tableConfig.schema ?? 'public'; const tableDbName = tableConfig.name; const tableTsName = schemaConfig.tableNamesMap[`${tableDbSchema}.${tableDbName}`] ?? tableDbName; const dbToTsColumnNamesMap = getDbToTsColumnNamesMap(drizzleRel.sourceTable as MySqlTable); const columns = drizzleRel.config?.fields.map((field) => dbToTsColumnNamesMap[field.name] as string) ?? []; const refTableConfig = getMysqlTableConfig(drizzleRel.referencedTable as MySqlTable); const refTableDbSchema = refTableConfig.schema ?? 'public'; const refTableDbName = refTableConfig.name; const refTableTsName = schemaConfig.tableNamesMap[`${refTableDbSchema}.${refTableDbName}`] ?? refTableDbName; const dbToTsColumnNamesMapForRefTable = getDbToTsColumnNamesMap(drizzleRel.referencedTable as MySqlTable); const refColumns = drizzleRel.config?.references.map((ref) => dbToTsColumnNamesMapForRefTable[ref.name] as string ) ?? []; if (tableRelations[refTableTsName] === undefined) { tableRelations[refTableTsName] = []; } const relation: RelationWithReferences = { table: tableTsName, columns, refTable: refTableTsName, refColumns, refTableRels: tableRelations[refTableTsName], type: 'one', }; // do not add duplicate relation if ( tableRelations[tableTsName]?.some((rel) => rel.table === relation.table && rel.refTable === relation.refTable ) ) { console.warn( `You are providing a one-to-many relation between the '${relation.refTable}' and '${relation.table}' tables,\n` + `while the '${relation.table}' table object already has foreign key constraint in the schema referencing '${relation.refTable}' table.\n` + `In this case, the foreign key constraint will be used.\n`, ); continue; } relations.push(relation); tableRelations[tableTsName]!.push(relation); } } return relations; }; for (const table of Object.values(mysqlTables)) { tableConfig = getMysqlTableConfig(table); dbToTsColumnNamesMap = {}; for (const [tsCol, col] of Object.entries(tableConfig.columns[0]!.table)) { dbToTsColumnNamesMap[col.name] = tsCol; } const newRelations = tableConfig.foreignKeys.map((fk) => { const table = dbToTsTableNamesMap[tableConfig.name] as string; const refTable = dbToTsTableNamesMap[getTableName(fk.reference().foreignTable)] as string; const dbToTsColumnNamesMapForRefTable = getDbToTsColumnNamesMap( fk.reference().foreignTable, ); if (tableRelations[refTable] === undefined) { tableRelations[refTable] = []; } return { table, columns: fk .reference() .columns.map((col) => dbToTsColumnNamesMap[col.name] as string), refTable, refColumns: fk .reference() .foreignColumns.map( (fCol) => dbToTsColumnNamesMapForRefTable[fCol.name] as string, ), refTableRels: tableRelations[refTable], }; }); relations.push( ...newRelations, ); if (tableRelations[dbToTsTableNamesMap[tableConfig.name] as string] === undefined) { tableRelations[dbToTsTableNamesMap[tableConfig.name] as string] = []; } tableRelations[dbToTsTableNamesMap[tableConfig.name] as string]!.push(...newRelations); const getTypeParams = (sqlType: string) => { // get type params and set only type const typeParams: Column['typeParams'] = {}; if ( sqlType.startsWith('decimal') || sqlType.startsWith('real') || sqlType.startsWith('double') || sqlType.startsWith('float') ) { const match = sqlType.match(/\((\d+), *(\d+)\)/); if (match) { typeParams['precision'] = Number(match[1]); typeParams['scale'] = Number(match[2]); } } else if ( sqlType.startsWith('char') || sqlType.startsWith('varchar') || sqlType.startsWith('binary') || sqlType.startsWith('varbinary') ) { const match = sqlType.match(/\((\d+)\)/); if (match) { typeParams['length'] = Number(match[1]); } } return typeParams; }; tables.push({ name: dbToTsTableNamesMap[tableConfig.name] as string, columns: tableConfig.columns.map((column) => ({ name: dbToTsColumnNamesMap[column.name] as string, columnType: column.getSQLType(), typeParams: getTypeParams(column.getSQLType()), dataType: column.dataType, hasDefault: column.hasDefault, default: column.default, enumValues: column.enumValues, isUnique: column.isUnique, notNull: column.notNull, primary: column.primary, })), primaryKeys: tableConfig.columns .filter((column) => column.primary) .map((column) => dbToTsColumnNamesMap[column.name] as string), }); } const transformedDrizzleRelations = transformFromDrizzleRelation( mysqlSchema, getDbToTsColumnNamesMap, tableRelations, ); relations.push( ...transformedDrizzleRelations, ); const isCyclicRelations = relations.map( (relI) => { const tableRel = tableRelations[relI.table]!.find((relJ) => relJ.refTable === relI.refTable)!; if (isRelationCyclic(relI)) { tableRel['isCyclic'] = true; return { ...relI, isCyclic: true }; } tableRel['isCyclic'] = false; return { ...relI, isCyclic: false }; }, ); return { tables, relations: isCyclicRelations, tableRelations }; }; // Sqlite------------------------------------------------------------------------------------------------------------------------ const resetSqlite = async ( db: BaseSQLiteDatabase, schema: { [key: string]: SQLiteTable }, ) => { const tablesToTruncate = Object.entries(schema).map(([_tsTableName, table]) => { const dbTableName = getTableName(table); return dbTableName; }); await db.run(sql.raw('PRAGMA foreign_keys = OFF')); for (const tableName of tablesToTruncate) { const sqlQuery = `delete from \`${tableName}\`;`; await db.run(sql.raw(sqlQuery)); } await db.run(sql.raw('PRAGMA foreign_keys = ON')); }; const filterSqliteTables = (schema: { [key: string]: | PgTable | PgSchema | MySqlTable | MySqlSchema | SQLiteTable | any; }) => { const sqliteSchema = Object.fromEntries( Object.entries(schema).filter( (keyValue): keyValue is [string, SQLiteTable | Relations] => is(keyValue[1], SQLiteTable) || is(keyValue[1], Relations), ), ); const sqliteTables = Object.fromEntries( Object.entries(schema).filter( (keyValue): keyValue is [string, SQLiteTable] => is(keyValue[1], SQLiteTable), ), ); return { sqliteSchema, sqliteTables }; }; const seedSqlite = async ( db: BaseSQLiteDatabase, schema: { [key: string]: | PgTable | PgSchema | MySqlTable | MySqlSchema | SQLiteTable | Relations | any; }, options: { count?: number; seed?: number; version?: number } = {}, refinements?: RefinementsType, ) => { const { sqliteSchema, sqliteTables } = filterSqliteTables(schema); const { tables, relations } = getSqliteInfo(sqliteSchema, sqliteTables); const seedService = new SeedService(); const generatedTablesGenerators = seedService.generatePossibleGenerators( 'sqlite', tables, relations, refinements, options, ); const preserveCyclicTablesData = relations.some((rel) => rel.isCyclic === true); const tablesValues = await seedService.generateTablesValues( relations, generatedTablesGenerators, db, sqliteTables, { ...options, preserveCyclicTablesData }, ); const { filteredTablesGenerators, tablesUniqueNotNullColumn } = seedService.filterCyclicTables( generatedTablesGenerators, ); const updateDataInDb = filteredTablesGenerators.length === 0 ? false : true; await seedService.generateTablesValues( relations, filteredTablesGenerators, db, sqliteTables, { ...options, tablesValues, updateDataInDb, tablesUniqueNotNullColumn }, ); }; const getSqliteInfo = ( sqliteSchema: { [key: string]: SQLiteTable | Relations }, sqliteTables: { [key: string]: SQLiteTable }, ) => { let tableConfig: ReturnType; let dbToTsColumnNamesMap: { [key: string]: string }; const dbToTsTableNamesMap: { [key: string]: string } = Object.fromEntries( Object.entries(sqliteTables).map(([key, value]) => [getTableName(value), key]), ); const tables: Table[] = []; const relations: RelationWithReferences[] = []; const dbToTsColumnNamesMapGlobal: { [tableName: string]: { [dbColumnName: string]: string }; } = {}; const tableRelations: { [tableName: string]: RelationWithReferences[] } = {}; const getDbToTsColumnNamesMap = (table: SQLiteTable) => { let dbToTsColumnNamesMap: { [dbColName: string]: string } = {}; const tableName = getTableName(table); if (Object.hasOwn(dbToTsColumnNamesMapGlobal, tableName)) { dbToTsColumnNamesMap = dbToTsColumnNamesMapGlobal[tableName]!; return dbToTsColumnNamesMap; } const tableConfig = getSqliteTableConfig(table); for (const [tsCol, col] of Object.entries(tableConfig.columns[0]!.table)) { dbToTsColumnNamesMap[col.name] = tsCol; } dbToTsColumnNamesMapGlobal[tableName] = dbToTsColumnNamesMap; return dbToTsColumnNamesMap; }; const transformFromDrizzleRelation = ( schema: Record, getDbToTsColumnNamesMap: (table: SQLiteTable) => { [dbColName: string]: string; }, tableRelations: { [tableName: string]: RelationWithReferences[]; }, ) => { const schemaConfig = extractTablesRelationalConfig(schema, createTableRelationsHelpers); const relations: RelationWithReferences[] = []; for (const table of Object.values(schemaConfig.tables)) { if (table.relations === undefined) continue; for (const drizzleRel of Object.values(table.relations)) { if (!is(drizzleRel, One)) continue; const tableConfig = getSqliteTableConfig(drizzleRel.sourceTable as SQLiteTable); const tableDbName = tableConfig.name; // TODO: tableNamesMap: have {public.customer: 'customer'} structure in sqlite const tableTsName = schemaConfig.tableNamesMap[`public.${tableDbName}`] ?? tableDbName; const dbToTsColumnNamesMap = getDbToTsColumnNamesMap(drizzleRel.sourceTable as SQLiteTable); const columns = drizzleRel.config?.fields.map((field) => dbToTsColumnNamesMap[field.name] as string) ?? []; const refTableConfig = getSqliteTableConfig(drizzleRel.referencedTable as SQLiteTable); const refTableDbName = refTableConfig.name; const refTableTsName = schemaConfig.tableNamesMap[`public.${refTableDbName}`] ?? refTableDbName; const dbToTsColumnNamesMapForRefTable = getDbToTsColumnNamesMap(drizzleRel.referencedTable as SQLiteTable); const refColumns = drizzleRel.config?.references.map((ref) => dbToTsColumnNamesMapForRefTable[ref.name] as string ) ?? []; if (tableRelations[refTableTsName] === undefined) { tableRelations[refTableTsName] = []; } const relation: RelationWithReferences = { table: tableTsName, columns, refTable: refTableTsName, refColumns, refTableRels: tableRelations[refTableTsName], type: 'one', }; // do not add duplicate relation if ( tableRelations[tableTsName]?.some((rel) => rel.table === relation.table && rel.refTable === relation.refTable ) ) { console.warn( `You are providing a one-to-many relation between the '${relation.refTable}' and '${relation.table}' tables,\n` + `while the '${relation.table}' table object already has foreign key constraint in the schema referencing '${relation.refTable}' table.\n` + `In this case, the foreign key constraint will be used.\n`, ); continue; } relations.push(relation); tableRelations[tableTsName]!.push(relation); } } return relations; }; for (const table of Object.values(sqliteTables)) { tableConfig = getSqliteTableConfig(table); dbToTsColumnNamesMap = {}; for (const [tsCol, col] of Object.entries(tableConfig.columns[0]!.table)) { dbToTsColumnNamesMap[col.name] = tsCol; } const newRelations = tableConfig.foreignKeys.map((fk) => { const table = dbToTsTableNamesMap[tableConfig.name] as string; const refTable = dbToTsTableNamesMap[getTableName(fk.reference().foreignTable)] as string; const dbToTsColumnNamesMapForRefTable = getDbToTsColumnNamesMap( fk.reference().foreignTable, ); if (tableRelations[refTable] === undefined) { tableRelations[refTable] = []; } return { table, columns: fk .reference() .columns.map((col) => dbToTsColumnNamesMap[col.name] as string), refTable, refColumns: fk .reference() .foreignColumns.map( (fCol) => dbToTsColumnNamesMapForRefTable[fCol.name] as string, ), refTableRels: tableRelations[refTable], }; }); relations.push( ...newRelations, ); if (tableRelations[dbToTsTableNamesMap[tableConfig.name] as string] === undefined) { tableRelations[dbToTsTableNamesMap[tableConfig.name] as string] = []; } tableRelations[dbToTsTableNamesMap[tableConfig.name] as string]!.push(...newRelations); const getTypeParams = (sqlType: string) => { // get type params and set only type const typeParams: Column['typeParams'] = {}; if ( sqlType.startsWith('decimal') ) { const match = sqlType.match(/\((\d+), *(\d+)\)/); if (match) { typeParams['precision'] = Number(match[1]); typeParams['scale'] = Number(match[2]); } } else if ( sqlType.startsWith('char') || sqlType.startsWith('varchar') || sqlType.startsWith('text') ) { const match = sqlType.match(/\((\d+)\)/); if (match) { typeParams['length'] = Number(match[1]); } } return typeParams; }; tables.push({ name: dbToTsTableNamesMap[tableConfig.name] as string, columns: tableConfig.columns.map((column) => ({ name: dbToTsColumnNamesMap[column.name] as string, columnType: column.getSQLType(), typeParams: getTypeParams(column.getSQLType()), dataType: column.dataType, hasDefault: column.hasDefault, default: column.default, enumValues: column.enumValues, isUnique: column.isUnique, notNull: column.notNull, primary: column.primary, })), primaryKeys: tableConfig.columns .filter((column) => column.primary) .map((column) => dbToTsColumnNamesMap[column.name] as string), }); } const transformedDrizzleRelations = transformFromDrizzleRelation( sqliteSchema, getDbToTsColumnNamesMap, tableRelations, ); relations.push( ...transformedDrizzleRelations, ); const isCyclicRelations = relations.map( (relI) => { const tableRel = tableRelations[relI.table]!.find((relJ) => relJ.refTable === relI.refTable)!; if (isRelationCyclic(relI)) { tableRel['isCyclic'] = true; return { ...relI, isCyclic: true }; } tableRel['isCyclic'] = false; return { ...relI, isCyclic: false }; }, ); return { tables, relations: isCyclicRelations, tableRelations }; }; export { default as cities } from './datasets/cityNames.ts'; export { default as countries } from './datasets/countries.ts'; export { default as firstNames } from './datasets/firstNames.ts'; export { default as lastNames } from './datasets/lastNames.ts'; export { SeedService } from './services/SeedService.ts'; ================================================ FILE: drizzle-seed/src/services/GeneratorFuncs.ts ================================================ import type { AbstractGenerator } from './Generators.ts'; import { GenerateArray, GenerateBoolean, GenerateCity, GenerateCompanyName, GenerateCountry, GenerateDate, GenerateDatetime, GenerateDefault, GenerateEmail, GenerateEnum, GenerateFirstName, GenerateFullName, GenerateInt, GenerateInterval, GenerateIntPrimaryKey, GenerateJobTitle, GenerateJson, GenerateLastName, GenerateLine, GenerateLoremIpsum, GenerateNumber, GeneratePhoneNumber, GeneratePoint, GeneratePostcode, GenerateSelfRelationsValuesFromArray, GenerateState, GenerateStreetAddress, GenerateString, GenerateTime, GenerateTimestamp, GenerateUniqueCity, GenerateUniqueCompanyName, GenerateUniqueCountry, GenerateUniqueFirstName, GenerateUniqueFullName, GenerateUniqueInt, GenerateUniqueInterval, GenerateUniqueLastName, GenerateUniqueLine, GenerateUniqueNumber, GenerateUniquePoint, GenerateUniquePostcode, GenerateUniqueStreetAddress, GenerateUniqueString, GenerateUUID, GenerateValuesFromArray, GenerateWeightedCount, GenerateYear, HollowGenerator, WeightedRandomGenerator, } from './Generators.ts'; import { GenerateStringV2, GenerateUniqueIntervalV2, GenerateUniqueStringV2 } from './versioning/v2.ts'; function createGenerator, T>( generatorConstructor: new(params?: T) => GeneratorType, ) { return ( ...args: GeneratorType extends GenerateValuesFromArray | GenerateDefault | WeightedRandomGenerator ? [T] : ([] | [T]) ): GeneratorType => { let params = args[0]; if (params === undefined) params = {} as T; return new generatorConstructor(params); }; } export const generatorsFuncs = { /** * generates same given value each time the generator is called. * @param defaultValue - value you want to generate * @param arraySize - number of elements in each one-dimensional array. (If specified, arrays will be generated.) * * @example * ```ts * await seed(db, schema, { count: 1000 }).refine((funcs) => ({ * posts: { * columns: { * content: funcs.default({ defaultValue: "post content" }), * }, * }, * })); * ``` */ default: createGenerator(GenerateDefault), /** * generates values from given array * @param values - array of values you want to generate. can be array of weighted values. * @param isUnique - property that controls if generated values gonna be unique or not. * @param arraySize - number of elements in each one-dimensional array. (If specified, arrays will be generated.) * * @example * ```ts * await seed(db, schema, { count: 1000 }).refine((funcs) => ({ * posts: { * columns: { * title: funcs.valuesFromArray({ * values: ["Title1", "Title2", "Title3", "Title4", "Title5"], * isUnique: true * }), * }, * }, * })); * * ``` * weighted values example * @example * ```ts * await seed(db, schema, { count: 1000 }).refine((funcs) => ({ * posts: { * columns: { * title: funcs.valuesFromArray({ * values: [ * { weight: 0.35, values: ["Title1", "Title2"] }, * { weight: 0.5, values: ["Title3", "Title4"] }, * { weight: 0.15, values: ["Title5"] }, * ], * isUnique: false * }), * }, * }, * })); * * ``` */ valuesFromArray: createGenerator(GenerateValuesFromArray), /** * generates sequential integers starting with 1. * @example * ```ts * await seed(db, schema, { count: 1000 }).refine((funcs) => ({ * posts: { * columns: { * id: funcs.intPrimaryKey(), * }, * }, * })); * * ``` */ intPrimaryKey: createGenerator(GenerateIntPrimaryKey), /** * generates numbers with floating point in given range. * @param minValue - lower border of range. * @param maxValue - upper border of range. * @param precision - precision of generated number: * precision equals 10 means that values will be accurate to one tenth (1.2, 34.6); * precision equals 100 means that values will be accurate to one hundredth (1.23, 34.67). * @param isUnique - property that controls if generated values gonna be unique or not. * @param arraySize - number of elements in each one-dimensional array. (If specified, arrays will be generated.) * * @example * ```ts * await seed(db, schema, { count: 1000 }).refine((funcs) => ({ * products: { * columns: { * unitPrice: funcs.number({ minValue: 10, maxValue: 120, precision: 100, isUnique: false }), * }, * }, * })); * * ``` */ number: createGenerator(GenerateNumber), // uniqueNumber: createGenerator(GenerateUniqueNumber), /** * generates integers within given range. * @param minValue - lower border of range. * @param maxValue - upper border of range. * @param isUnique - property that controls if generated values gonna be unique or not. * @param arraySize - number of elements in each one-dimensional array. (If specified, arrays will be generated.) * * @example * ```ts * await seed(db, schema, { count: 1000 }).refine((funcs) => ({ * products: { * columns: { * unitsInStock: funcs.number({ minValue: 0, maxValue: 100, isUnique: false }), * }, * }, * })); * * ``` */ int: createGenerator(GenerateInt), // uniqueInt: createGenerator(GenerateUniqueInt), /** * generates boolean values(true or false) * @param arraySize - number of elements in each one-dimensional array. (If specified, arrays will be generated.) * * @example * ```ts * await seed(db, schema, { count: 1000 }).refine((funcs) => ({ * users: { * columns: { * isAvailable: funcs.boolean() * }, * }, * })); * * ``` */ boolean: createGenerator(GenerateBoolean), /** * generates date within given range. * @param minDate - lower border of range. * @param maxDate - upper border of range. * @param arraySize - number of elements in each one-dimensional array. (If specified, arrays will be generated.) * * @example * ```ts * await seed(db, schema, { count: 1000 }).refine((funcs) => ({ * users: { * columns: { * birthDate: funcs.date({ minDate: "1990-01-01", maxDate: "2010-12-31" }) * }, * }, * })); * * ``` */ date: createGenerator(GenerateDate), /** * generates time in 24 hours style. * @param arraySize - number of elements in each one-dimensional array. (If specified, arrays will be generated.) * * @example * ```ts * await seed(db, schema, { count: 1000 }).refine((funcs) => ({ * users: { * columns: { * birthTime: funcs.time() * }, * }, * })); * * ``` */ time: createGenerator(GenerateTime), /** * generates timestamps. * @param arraySize - number of elements in each one-dimensional array. (If specified, arrays will be generated.) * * @example * ```ts * await seed(db, schema, { count: 1000 }).refine((funcs) => ({ * orders: { * columns: { * shippedDate: funcs.timestamp() * }, * }, * })); * * ``` */ timestamp: createGenerator(GenerateTimestamp), /** * generates datetime objects. * @param arraySize - number of elements in each one-dimensional array. (If specified, arrays will be generated.) * * @example * ```ts * await seed(db, schema, { count: 1000 }).refine((funcs) => ({ * orders: { * columns: { * shippedDate: funcs.datetime() * }, * }, * })); * * ``` */ datetime: createGenerator(GenerateDatetime), /** * generates years. * @param arraySize - number of elements in each one-dimensional array. (If specified, arrays will be generated.) * * @example * ```ts * await seed(db, schema, { count: 1000 }).refine((funcs) => ({ * users: { * columns: { * birthYear: funcs.year() * }, * }, * })); * * ``` */ year: createGenerator(GenerateYear), /** * generates json objects with fixed structure. * @param arraySize - number of elements in each one-dimensional array. (If specified, arrays will be generated.) * * json structure can equal this: * ``` * { * email, * name, * isGraduated, * hasJob, * salary, * startedWorking, * visitedCountries, * } * ``` * or this * ``` * { * email, * name, * isGraduated, * hasJob, * visitedCountries, * } * ``` * * @example * ```ts * await seed(db, schema, { count: 1000 }).refine((funcs) => ({ * users: { * columns: { * metadata: funcs.json() * }, * }, * })); * ``` */ json: createGenerator(GenerateJson), // jsonb: createGenerator(GenerateJsonb), /** * generates time intervals. * * interval example: "1 years 12 days 5 minutes" * * @param isUnique - property that controls if generated values gonna be unique or not. * @param arraySize - number of elements in each one-dimensional array. (If specified, arrays will be generated.) * @param fields - range of values you want to see in your intervals. * @example * ```ts * await seed(db, schema, { count: 1000 }).refine((funcs) => ({ * users: { * columns: { * timeSpentOnWebsite: funcs.interval() * }, * }, * })); * ``` */ interval: createGenerator(GenerateInterval), // uniqueInterval: createGenerator(GenerateUniqueInterval), /** * generates random strings. * @param isUnique - property that controls if generated values gonna be unique or not. * @param arraySize - number of elements in each one-dimensional array. (If specified, arrays will be generated.) * * @example * ```ts * await seed(db, schema, { count: 1000 }).refine((funcs) => ({ * users: { * columns: { * hashedPassword: funcs.string({isUnique: false}) * }, * }, * })); * ``` */ string: createGenerator(GenerateString), // uniqueString: createGenerator(GenerateUniqueString), /** * generates v4 UUID strings if arraySize is not specified, or v4 UUID 1D arrays if it is. * * @param arraySize - number of elements in each one-dimensional array. (If specified, arrays will be generated.) * * @example * ```ts * await seed(db, schema, { count: 1000 }).refine((funcs) => ({ * users: { * columns: { * uuid: funcs.uuid({ * arraySize: 4 * }) * }, * }, * })); * ``` */ uuid: createGenerator(GenerateUUID), /** * generates person's first names. * @param isUnique - property that controls if generated values gonna be unique or not. * @param arraySize - number of elements in each one-dimensional array. (If specified, arrays will be generated.) * * @example * ```ts * await seed(db, schema, { count: 1000 }).refine((funcs) => ({ * users: { * columns: { * firstName: funcs.firstName({isUnique: true}) * }, * }, * })); * ``` */ firstName: createGenerator(GenerateFirstName), // uniqueFirstName: createGenerator(GenerateUniqueName), /** * generates person's last names. * @param isUnique - property that controls if generated values gonna be unique or not. * @param arraySize - number of elements in each one-dimensional array. (If specified, arrays will be generated.) * * @example * ```ts * await seed(db, schema, { count: 1000 }).refine((funcs) => ({ * users: { * columns: { * lastName: funcs.lastName({isUnique: false}) * }, * }, * })); * ``` */ lastName: createGenerator(GenerateLastName), // uniqueLastName: createGenerator(GenerateUniqueSurname), /** * generates person's full names. * @param isUnique - property that controls if generated values gonna be unique or not. * @param arraySize - number of elements in each one-dimensional array. (If specified, arrays will be generated.) * * @example * ```ts * await seed(db, schema, { count: 1000 }).refine((funcs) => ({ * users: { * columns: { * fullName: funcs.fullName({isUnique: true}) * }, * }, * })); * ``` */ fullName: createGenerator(GenerateFullName), // uniqueFullName: createGenerator(GenerateUniqueFullName), /** * generates unique emails. * @param arraySize - number of elements in each one-dimensional array. (If specified, arrays will be generated.) * * @example * ```ts * await seed(db, schema, { count: 1000 }).refine((funcs) => ({ * users: { * columns: { * email: funcs.email() * }, * }, * })); * ``` */ email: createGenerator(GenerateEmail), /** * generates unique phone numbers. * @param arraySize - number of elements in each one-dimensional array. (If specified, arrays will be generated.) * * @param template - phone number template, where all '#' symbols will be substituted with generated digits. * @param prefixes - array of any string you want to be your phone number prefixes.(not compatible with template property) * @param generatedDigitsNumbers - number of digits that will be added at the end of prefixes.(not compatible with template property) * @example * ```ts * //generate phone number using template property * await seed(db, schema, { count: 1000 }).refine((funcs) => ({ * users: { * columns: { * phoneNumber: funcs.phoneNumber({template: "+(380) ###-####"}) * }, * }, * })); * * //generate phone number using prefixes and generatedDigitsNumbers properties * await seed(db, schema, { count: 1000 }).refine((funcs) => ({ * users: { * columns: { * phoneNumber: funcs.phoneNumber({prefixes: [ "+380 99", "+380 67" ], generatedDigitsNumbers: 7}) * }, * }, * })); * * //generate phone number using prefixes and generatedDigitsNumbers properties but with different generatedDigitsNumbers for prefixes * await seed(db, schema, { count: 1000 }).refine((funcs) => ({ * users: { * columns: { * phoneNumber: funcs.phoneNumber({prefixes: [ "+380 99", "+380 67", "+1" ], generatedDigitsNumbers: [7, 7, 10]}) * }, * }, * })); * * ``` */ phoneNumber: createGenerator(GeneratePhoneNumber), /** * generates country's names. * @param isUnique - property that controls if generated values gonna be unique or not. * @param arraySize - number of elements in each one-dimensional array. (If specified, arrays will be generated.) * * @example * ```ts * await seed(db, schema, { count: 1000 }).refine((funcs) => ({ * users: { * columns: { * country: funcs.country({isUnique: false}) * }, * }, * })); * ``` */ country: createGenerator(GenerateCountry), // uniqueCountry: createGenerator(GenerateUniqueCountry), /** * generates city's names. * @param isUnique - property that controls if generated values gonna be unique or not. * @param arraySize - number of elements in each one-dimensional array. (If specified, arrays will be generated.) * * @example * ```ts * await seed(db, schema, { count: 1000 }).refine((funcs) => ({ * users: { * columns: { * city: funcs.city({isUnique: false}) * }, * }, * })); * ``` */ city: createGenerator(GenerateCity), // uniqueCity: createGenerator(GenerateUniqueCityName), /** * generates street address. * @param isUnique - property that controls if generated values gonna be unique or not. * @param arraySize - number of elements in each one-dimensional array. (If specified, arrays will be generated.) * * @example * ```ts * await seed(db, schema, { count: 1000 }).refine((funcs) => ({ * users: { * columns: { * streetAddress: funcs.streetAddress({isUnique: true}) * }, * }, * })); * ``` */ streetAddress: createGenerator(GenerateStreetAddress), // uniqueStreetAddress: createGenerator(GenerateUniqueStreetAddress), /** * generates job titles. * @param arraySize - number of elements in each one-dimensional array. (If specified, arrays will be generated.) * * @example * ```ts * await seed(db, schema, { count: 1000 }).refine((funcs) => ({ * users: { * columns: { * jobTitle: funcs.jobTitle() * }, * }, * })); * ``` */ jobTitle: createGenerator(GenerateJobTitle), /** * generates postal codes. * * @param isUnique - property that controls if generated values gonna be unique or not. * @param arraySize - number of elements in each one-dimensional array. (If specified, arrays will be generated.) * * @example * ```ts * await seed(db, schema, { count: 1000 }).refine((funcs) => ({ * users: { * columns: { * postcode: funcs.postcode({isUnique: true}) * }, * }, * })); * ``` */ postcode: createGenerator(GeneratePostcode), // uniquePostcoe: createGenerator(GenerateUniquePostcode), /** * generates states of America. * @param arraySize - number of elements in each one-dimensional array. (If specified, arrays will be generated.) * * @example * ```ts * await seed(db, schema, { count: 1000 }).refine((funcs) => ({ * users: { * columns: { * state: funcs.state() * }, * }, * })); * ``` */ state: createGenerator(GenerateState), /** * generates company's names. * * @param isUnique - property that controls if generated values gonna be unique or not. * @param arraySize - number of elements in each one-dimensional array. (If specified, arrays will be generated.) * * @example * ```ts * await seed(db, schema, { count: 1000 }).refine((funcs) => ({ * users: { * columns: { * company: funcs.companyName({isUnique: true}) * }, * }, * })); * ``` */ companyName: createGenerator(GenerateCompanyName), // uniqueCompanyName: createGenerator(GenerateUniqueCompanyName), /** * generates 'lorem ipsum' text sentences. * * @param sentencesCount - number of sentences you want to generate as one generated value(string). * @param arraySize - number of elements in each one-dimensional array. (If specified, arrays will be generated.) * * @example * ```ts * await seed(db, schema, { count: 1000 }).refine((funcs) => ({ * posts: { * columns: { * content: funcs.loremIpsum({sentencesCount: 2}) * }, * }, * })); * ``` */ loremIpsum: createGenerator(GenerateLoremIpsum), /** * generates 2D points within specified ranges for x and y coordinates. * * @param isUnique - property that controls if generated values gonna be unique or not. * @param minXValue - lower bound of range for x coordinate. * @param maxXValue - upper bound of range for x coordinate. * @param minYValue - lower bound of range for y coordinate. * @param maxYValue - upper bound of range for y coordinate. * @param arraySize - number of elements in each one-dimensional array. (If specified, arrays will be generated.) * * @example * ```ts * await seed(db, schema, { count: 1000 }).refine((funcs) => ({ * triangles: { * columns: { * pointCoords: funcs.point({ * isUnique: true, * minXValue: -5, maxXValue:20, * minYValue: 0, maxYValue: 30 * }) * }, * }, * })); * ``` */ point: createGenerator(GeneratePoint), // uniquePoint: createGenerator(GenerateUniquePoint), /** * generates 2D lines within specified ranges for a, b and c parameters of line. * * ``` * line equation: a*x + b*y + c = 0 * ``` * * @param isUnique - property that controls if generated values gonna be unique or not. * @param minAValue - lower bound of range for a parameter. * @param maxAValue - upper bound of range for x parameter. * @param minBValue - lower bound of range for y parameter. * @param maxBValue - upper bound of range for y parameter. * @param minCValue - lower bound of range for y parameter. * @param maxCValue - upper bound of range for y parameter. * @param arraySize - number of elements in each one-dimensional array. (If specified, arrays will be generated.) * * @example * ```ts * await seed(db, schema, { count: 1000 }).refine((funcs) => ({ * lines: { * columns: { * lineParams: funcs.point({ * isUnique: true, * minAValue: -5, maxAValue:20, * minBValue: 0, maxBValue: 30, * minCValue: 0, maxCValue: 10 * }) * }, * }, * })); * ``` */ line: createGenerator(GenerateLine), // uniqueLine: createGenerator(GenerateUniqueLine), /** * gives you the opportunity to call different generators with different probabilities to generate values for one column. * @param params - array of generators with probabilities you would like to call them to generate values. * * @example * ```ts * await seed(db, schema, { count: 1000 }).refine((funcs) => ({ * posts: { * columns: { * content: funcs.weightedRandom([ * { * weight: 0.6, * value: funcs.loremIpsum({ sentencesCount: 3 }), * }, * { * weight: 0.4, * value: funcs.default({ defaultValue: "TODO" }), * }, * ]), * }, * }, * })); * ``` */ weightedRandom: createGenerator(WeightedRandomGenerator), }; // so far, version changes don’t affect generator parameters. export const generatorsFuncsV2 = { ...generatorsFuncs, }; export const generatorsMap = { HollowGenerator: [ HollowGenerator, ], GenerateDefault: [ GenerateDefault, ], GenerateValuesFromArray: [ GenerateValuesFromArray, ], GenerateSelfRelationsValuesFromArray: [ GenerateSelfRelationsValuesFromArray, ], GenerateIntPrimaryKey: [ GenerateIntPrimaryKey, ], GenerateNumber: [ GenerateNumber, ], GenerateUniqueNumber: [ GenerateUniqueNumber, ], GenerateInt: [ GenerateInt, ], GenerateUniqueInt: [ GenerateUniqueInt, ], GenerateBoolean: [ GenerateBoolean, ], GenerateDate: [ GenerateDate, ], GenerateTime: [ GenerateTime, ], GenerateTimestamp: [ GenerateTimestamp, ], GenerateDatetime: [ GenerateDatetime, ], GenerateYear: [ GenerateYear, ], GenerateJson: [ GenerateJson, ], GenerateEnum: [ GenerateEnum, ], GenerateInterval: [ GenerateInterval, ], GenerateUniqueInterval: [ GenerateUniqueInterval, GenerateUniqueIntervalV2, ], GenerateString: [ GenerateString, GenerateStringV2, ], GenerateUniqueString: [ GenerateUniqueString, GenerateUniqueStringV2, ], GenerateUUID: [ GenerateUUID, ], GenerateFirstName: [ GenerateFirstName, ], GenerateUniqueFirstName: [ GenerateUniqueFirstName, ], GenerateLastName: [ GenerateLastName, ], GenerateUniqueLastName: [ GenerateUniqueLastName, ], GenerateFullName: [ GenerateFullName, ], GenerateUniqueFullName: [ GenerateUniqueFullName, ], GenerateEmail: [ GenerateEmail, ], GeneratePhoneNumber: [ GeneratePhoneNumber, ], GenerateCountry: [ GenerateCountry, ], GenerateUniqueCountry: [ GenerateUniqueCountry, ], GenerateCity: [ GenerateCity, ], GenerateUniqueCity: [ GenerateUniqueCity, ], GenerateStreetAddress: [ GenerateStreetAddress, ], GenerateUniqueStreetAddress: [ GenerateUniqueStreetAddress, ], GenerateJobTitle: [ GenerateJobTitle, ], GeneratePostcode: [ GeneratePostcode, ], GenerateUniquePostcode: [ GenerateUniquePostcode, ], GenerateState: [ GenerateState, ], GenerateCompanyName: [ GenerateCompanyName, ], GenerateUniqueCompanyName: [ GenerateUniqueCompanyName, ], GenerateLoremIpsum: [ GenerateLoremIpsum, ], GeneratePoint: [ GeneratePoint, ], GenerateUniquePoint: [ GenerateUniquePoint, ], GenerateLine: [ GenerateLine, ], GenerateUniqueLine: [ GenerateUniqueLine, ], WeightedRandomGenerator: [ WeightedRandomGenerator, ], GenerateArray: [ GenerateArray, ], GenerateWeightedCount: [ GenerateWeightedCount, ], } as const; ================================================ FILE: drizzle-seed/src/services/Generators.ts ================================================ /* eslint-disable drizzle-internal/require-entity-kind */ import prand from 'pure-rand'; import adjectives, { maxStringLength as maxAdjectiveLength } from '../datasets/adjectives.ts'; import cityNames, { maxStringLength as maxCityNameLength } from '../datasets/cityNames.ts'; import companyNameSuffixes, { maxStringLength as maxCompanyNameSuffixLength } from '../datasets/companyNameSuffixes.ts'; import countries, { maxStringLength as maxCountryLength } from '../datasets/countries.ts'; import emailDomains, { maxStringLength as maxEmailDomainLength } from '../datasets/emailDomains.ts'; import firstNames, { maxStringLength as maxFirstNameLength } from '../datasets/firstNames.ts'; import jobsTitles, { maxStringLength as maxJobTitleLength } from '../datasets/jobsTitles.ts'; import lastNames, { maxStringLength as maxLastNameLength } from '../datasets/lastNames.ts'; import loremIpsumSentences, { maxStringLength as maxLoremIpsumLength } from '../datasets/loremIpsumSentences.ts'; import phonesInfo from '../datasets/phonesInfo.ts'; import states, { maxStringLength as maxStateLength } from '../datasets/states.ts'; import streetSuffix, { maxStringLength as maxStreetSuffixLength } from '../datasets/streetSuffix.ts'; import { fastCartesianProduct, fillTemplate, getWeightedIndices, isObject } from './utils.ts'; export abstract class AbstractGenerator { static readonly entityKind: string = 'AbstractGenerator'; static readonly version: number = 1; public isUnique = false; public notNull = false; // param for generators which have a unique version of themselves public uniqueVersionOfGen?: new(params: T) => AbstractGenerator; public dataType?: string; public timeSpent?: number; // public arraySize?: number; public baseColumnDataType?: string; // param for text-like generators public stringLength?: number; // params for GenerateValuesFromArray public weightedCountSeed?: number | undefined; public maxRepeatedValuesCount?: number | { weight: number; count: number | number[] }[] | undefined; public params: T; constructor(params?: T) { this.params = params === undefined ? {} as T : params as T; } init(params: { count: number | { weight: number; count: number | number[] }[]; seed: number }): void; init() { this.updateParams(); } updateParams() { if ((this.params as any).arraySize !== undefined) { this.arraySize = (this.params as any).arraySize; } if ((this.params as any).isUnique !== undefined) { if ((this.params as any).isUnique === false && this.isUnique === true) { throw new Error('specifying non unique generator to unique column.'); } this.isUnique = (this.params as any).isUnique; } } abstract generate(params: { i: number }): number | string | boolean | unknown | undefined | void; getEntityKind(): string { const constructor = this.constructor as typeof AbstractGenerator; return constructor.entityKind; } replaceIfUnique() { this.updateParams(); if ( this.uniqueVersionOfGen !== undefined && this.isUnique === true ) { const uniqueGen = new this.uniqueVersionOfGen({ ...this.params, }); uniqueGen.isUnique = this.isUnique; uniqueGen.dataType = this.dataType; return uniqueGen; } return; } replaceIfArray() { this.updateParams(); if (!(this.getEntityKind() === 'GenerateArray') && this.arraySize !== undefined) { const uniqueGen = this.replaceIfUnique(); const baseColumnGen = uniqueGen === undefined ? this : uniqueGen; baseColumnGen.dataType = this.baseColumnDataType; const arrayGen = new GenerateArray( { baseColumnGen, size: this.arraySize, }, ); return arrayGen; } return; } } // Generators Classes ----------------------------------------------------------------------------------------------------------------------- export class GenerateArray extends AbstractGenerator<{ baseColumnGen: AbstractGenerator; size?: number }> { static override readonly entityKind: string = 'GenerateArray'; public override arraySize = 10; override init({ count, seed }: { count: number; seed: number }) { super.init({ count, seed }); this.arraySize = this.params.size === undefined ? this.arraySize : this.params.size; this.params.baseColumnGen.init({ count: count * this.arraySize, seed }); } generate() { const array = []; for (let i = 0; i < this.arraySize; i++) { array.push(this.params.baseColumnGen.generate({ i })); } return array; } } export class GenerateWeightedCount extends AbstractGenerator<{}> { static override readonly entityKind: string = 'GenerateWeightedCount'; private state: { rng: prand.RandomGenerator; weightedIndices: number[]; weightedCount: { weight: number; count: number | number[] }[]; } | undefined; override init({ seed, count }: { count: { weight: number; count: number | number[] }[]; seed: number }) { const rng = prand.xoroshiro128plus(seed); const weightedIndices = getWeightedIndices(count.map((val) => val.weight)); this.state = { rng, weightedIndices, weightedCount: count }; } generate() { if (this.state === undefined) { throw new Error('state is not defined.'); } // logic for this generator let idx: number; const weightedCount = this.state.weightedCount; [idx, this.state.rng] = prand.uniformIntDistribution(0, this.state.weightedIndices.length - 1, this.state.rng); const objIdx = this.state.weightedIndices[idx] as number; if (typeof weightedCount[objIdx]!.count === 'number') { return weightedCount[objIdx]!.count as number; } else { // typeof weightedCount[objIdx]!.count === 'object' // number[] const possCounts = weightedCount[objIdx]!.count as number[]; [idx, this.state.rng] = prand.uniformIntDistribution(0, possCounts.length - 1, this.state.rng); return possCounts[idx]!; } } } export class HollowGenerator extends AbstractGenerator<{}> { static override readonly entityKind: string = 'HollowGenerator'; override init() {} generate() {} } export class GenerateDefault extends AbstractGenerator<{ defaultValue: unknown; arraySize?: number; }> { static override readonly entityKind: string = 'GenerateDefault'; generate() { return this.params.defaultValue; } } export class GenerateValuesFromArray extends AbstractGenerator< { values: | (number | string | boolean | undefined)[] | { weight: number; values: (number | string | boolean | undefined)[] }[]; isUnique?: boolean; arraySize?: number; } > { static override readonly entityKind: string = 'GenerateValuesFromArray'; private state: { rng: prand.RandomGenerator; values: | (number | string | boolean | undefined)[] | { weight: number; values: (number | string | boolean | undefined)[] }[]; genIndicesObj: GenerateUniqueInt | undefined; genIndicesObjList: GenerateUniqueInt[] | undefined; valuesWeightedIndices: number[] | undefined; genMaxRepeatedValuesCount: GenerateDefault | GenerateWeightedCount | undefined; } | undefined; public override timeSpent: number = 0; checks({ count }: { count: number }) { const { values } = this.params; const { maxRepeatedValuesCount, notNull, isUnique } = this; if (values.length === 0) { throw new Error('Values length equals zero.'); } if ( isObject(values[0]) && !(values as { weight: number; values: any[] }[]).every((val) => val.values.length !== 0) ) { throw new Error('One of weighted values length equals zero.'); } if ( maxRepeatedValuesCount !== undefined && ( (typeof maxRepeatedValuesCount === 'number' && maxRepeatedValuesCount <= 0) || (typeof maxRepeatedValuesCount === 'object' && !maxRepeatedValuesCount .every((obj) => (typeof obj.count) === 'number' ? (obj.count as number) > 0 : (obj.count as number[]).every((count) => count > 0) )) ) ) { throw new Error('maxRepeatedValuesCount should be greater than zero.'); } let allValuesCount = values.length; if (isObject(values[0])) { allValuesCount = (values as { values: any[] }[]).reduce((acc, currVal) => acc + currVal.values.length, 0); } if ( notNull === true && maxRepeatedValuesCount !== undefined && ( (!isObject(values[0]) && typeof maxRepeatedValuesCount === 'number' && maxRepeatedValuesCount * values.length < count) || (isObject(values[0]) && typeof maxRepeatedValuesCount === 'number' && maxRepeatedValuesCount * allValuesCount < count) ) ) { throw new Error("Can't fill notNull column with null values."); } if ( isUnique === true && maxRepeatedValuesCount !== undefined && ( (typeof maxRepeatedValuesCount === 'number' && maxRepeatedValuesCount > 1) || (typeof maxRepeatedValuesCount === 'object' && !maxRepeatedValuesCount .every((obj) => (typeof obj.count) === 'number' ? obj.count === 1 : (obj.count as number[]).every((count) => count === 1) )) ) ) { throw new Error("Can't be greater than 1 if column is unique."); } if ( isUnique === true && notNull === true && ( (!isObject(values[0]) && values.length < count) || (isObject(values[0]) && allValuesCount < count) ) ) { // console.log(maxRepeatedValuesCount, values.length, allValuesCount, count) throw new Error('There are no enough values to fill unique column.'); } } override init({ count, seed }: { count: number; seed: number }) { super.init({ count, seed }); this.checks({ count }); let { maxRepeatedValuesCount } = this; const { params, isUnique, notNull, weightedCountSeed } = this; const values = params.values; let valuesWeightedIndices; if (isObject(values[0])) { valuesWeightedIndices = getWeightedIndices((values as { weight: number }[]).map((val) => val.weight)); if (isUnique === true && notNull === true) { let idx: number, valueIdx: number, rng = prand.xoroshiro128plus(seed); const indicesCounter: { [key: number]: number } = {}; for (let i = 0; i < count; i++) { [idx, rng] = prand.uniformIntDistribution(0, valuesWeightedIndices.length - 1, rng); valueIdx = valuesWeightedIndices[idx]!; if (!Object.hasOwn(indicesCounter, valueIdx)) indicesCounter[valueIdx] = 0; indicesCounter[valueIdx]! += 1; } for (const [idx, value] of values.entries()) { if ((value as { values: (number | string | boolean | undefined)[] }).values.length < indicesCounter[idx]!) { throw new Error( 'weighted values arrays is too small to generate values with specified probability for unique not null column.' + `it's planned to generate: ${ Object.entries(indicesCounter).map(([idx, count]) => { return `${count} values with probability ${(values as { weight: number }[])[Number(idx)]?.weight}`; }).join(',') }`, ); } } } } if (isUnique === true && maxRepeatedValuesCount === undefined) { maxRepeatedValuesCount = 1; } let genMaxRepeatedValuesCount: GenerateDefault | GenerateWeightedCount | undefined; if (typeof maxRepeatedValuesCount === 'number') { genMaxRepeatedValuesCount = new GenerateDefault({ defaultValue: maxRepeatedValuesCount }); } else if (typeof maxRepeatedValuesCount === 'object') { genMaxRepeatedValuesCount = new GenerateWeightedCount({}); (genMaxRepeatedValuesCount as GenerateWeightedCount).init( { count: maxRepeatedValuesCount, seed: weightedCountSeed === undefined ? seed : weightedCountSeed, }, ); } let genIndicesObj: GenerateUniqueInt | undefined; let genIndicesObjList: GenerateUniqueInt[] | undefined; if (maxRepeatedValuesCount !== undefined) { if (!isObject(values[0])) { genIndicesObj = new GenerateUniqueInt({ minValue: 0, maxValue: values.length - 1 }); genIndicesObj.genMaxRepeatedValuesCount = genMaxRepeatedValuesCount; genIndicesObj.skipCheck = true; genIndicesObj.init({ count, seed }); } else if (isObject(values[0])) { genIndicesObjList = []; for (const obj of values as { weight: number; values: (number | string | boolean | undefined)[] }[]) { const genIndicesObj = new GenerateUniqueInt({ minValue: 0, maxValue: obj.values.length - 1 }); genIndicesObj.genMaxRepeatedValuesCount = genMaxRepeatedValuesCount; genIndicesObj.skipCheck = true; genIndicesObj.init({ count, seed }); genIndicesObjList.push(genIndicesObj); } } } const rng = prand.xoroshiro128plus(seed); this.state = { rng, values, valuesWeightedIndices, genMaxRepeatedValuesCount, genIndicesObj, genIndicesObjList }; } generate() { const t0 = new Date(); if (this.state === undefined) { throw new Error('state is not defined.'); } let idx: number, value: string | number | boolean | undefined; let valueIdx: number; if (this.state.valuesWeightedIndices === undefined) { if (this.state.genIndicesObj === undefined) { [idx, this.state.rng] = prand.uniformIntDistribution(0, this.state.values.length - 1, this.state.rng); } else { idx = this.state.genIndicesObj.generate() as number; } value = (this.state.values as (number | string | boolean | undefined)[])[idx]; } else { // weighted values [idx, this.state.rng] = prand.uniformIntDistribution( 0, this.state.valuesWeightedIndices.length - 1, this.state.rng, ); valueIdx = this.state.valuesWeightedIndices[idx] as number; const currValues = (this.state.values![valueIdx] as { weight: number; values: (number | string | boolean | undefined)[] }).values; if (this.state.genIndicesObjList === undefined) { // isUnique !== true [idx, this.state.rng] = prand.uniformIntDistribution(0, currValues.length - 1, this.state.rng); } else { // isUnique === true idx = this.state.genIndicesObjList[valueIdx]!.generate() as number; } value = currValues[idx]; } this.timeSpent += (Date.now() - t0.getTime()) / 1000; return value; } } export class GenerateSelfRelationsValuesFromArray extends AbstractGenerator<{ values: (number | string | boolean)[] }> { static override readonly entityKind: string = 'GenerateSelfRelationsValuesFromArray'; private state: { rng: prand.RandomGenerator; firstValuesCount: number; firstValues: (string | number | boolean)[]; } | undefined; override init({ count, seed }: { count: number; seed: number }) { let rng = prand.xoroshiro128plus(seed); // generate 15-40 % values with the same value as reference column let percent = 30; [percent, rng] = prand.uniformIntDistribution(20, 40, rng); const firstValuesCount = Math.floor((percent / 100) * count), firstValues: (string | number | boolean)[] = []; this.state = { rng, firstValuesCount, firstValues }; } generate({ i }: { i: number }) { if (this.state === undefined) { throw new Error('state is not defined.'); } const { values } = this.params; let idx: number; if (i < this.state.firstValuesCount) { this.state.firstValues.push(values[i]!); return values[i]; } else { [idx, this.state.rng] = prand.uniformIntDistribution(0, this.state.firstValues.length - 1, this.state.rng); return this.state.firstValues[idx]; } } } export class GenerateIntPrimaryKey extends AbstractGenerator<{}> { static override readonly entityKind: string = 'GenerateIntPrimaryKey'; public maxValue?: number | bigint; override init({ count }: { count: number; seed: number }) { if (this.maxValue !== undefined && count > this.maxValue) { throw new Error('count exceeds max number for this column type.'); } } generate({ i }: { i: number }) { if (this.dataType === 'bigint') { return BigInt(i + 1); } return i + 1; } } export class GenerateNumber extends AbstractGenerator< { minValue?: number; maxValue?: number; precision?: number; isUnique?: boolean; arraySize?: number; } > { static override readonly entityKind: string = 'GenerateNumber'; private state: { rng: prand.RandomGenerator; minValue: number; maxValue: number; precision: number; } | undefined; override uniqueVersionOfGen = GenerateUniqueNumber; override init({ count, seed }: { seed: number; count: number }) { super.init({ count, seed }); let { minValue, maxValue, precision } = this.params; if (precision === undefined) { precision = 100; } if (maxValue === undefined) { maxValue = precision * 1000; } else { maxValue *= precision; } if (minValue === undefined) { minValue = -maxValue; } else { minValue *= precision; } const rng = prand.xoroshiro128plus(seed); this.state = { rng, minValue, maxValue, precision }; } generate() { if (this.state === undefined) { throw new Error('state is not defined.'); } let value: number; [value, this.state.rng] = prand.uniformIntDistribution(this.state.minValue, this.state.maxValue, this.state.rng); return value / this.state.precision; } } export class GenerateUniqueNumber extends AbstractGenerator< { minValue?: number; maxValue?: number; precision?: number; isUnique?: boolean; } > { static override readonly entityKind: string = 'GenerateUniqueNumber'; private state: { genUniqueIntObj: GenerateUniqueInt; minValue: number; maxValue: number; precision: number; } | undefined; public override isUnique = true; override init({ count, seed }: { count: number; seed: number }) { let { minValue, maxValue, precision } = this.params; if (precision === undefined) { precision = 100; } if (maxValue === undefined) { maxValue = count * precision; } else { maxValue *= precision; } if (minValue === undefined) { minValue = -maxValue; } else { minValue *= precision; } const genUniqueIntObj = new GenerateUniqueInt({ minValue, maxValue }); genUniqueIntObj.init({ count, seed }); this.state = { genUniqueIntObj, minValue, maxValue, precision }; } generate() { if (this.state === undefined) { throw new Error('state is not defined.'); } const value = this.state.genUniqueIntObj.generate() as number / this.state.precision; return value; } } export class GenerateInt extends AbstractGenerator<{ minValue?: number | bigint; maxValue?: number | bigint; isUnique?: boolean; arraySize?: number; }> { static override readonly entityKind: string = 'GenerateInt'; private state: { rng: prand.RandomGenerator; minValue: number | bigint; maxValue: number | bigint; } | undefined; override uniqueVersionOfGen = GenerateUniqueInt; override init({ count, seed }: { count: number; seed: number }) { super.init({ count, seed }); let { minValue, maxValue } = this.params; if (maxValue === undefined) { maxValue = 1000; } if (minValue === undefined) { minValue = -maxValue; } if (typeof minValue === 'number' && typeof maxValue === 'number') { minValue = minValue >= 0 ? Math.ceil(minValue) : Math.floor(minValue); maxValue = maxValue >= 0 ? Math.floor(maxValue) : Math.ceil(maxValue); } const rng = prand.xoroshiro128plus(seed); this.state = { rng, minValue, maxValue }; } generate() { if (this.state === undefined) { throw new Error('state is not defined.'); } let value: number | bigint; if (typeof this.state.minValue === 'bigint' && typeof this.state.maxValue === 'bigint') { [value, this.state.rng] = prand.uniformBigIntDistribution( this.state.minValue, this.state.maxValue, this.state.rng, ); } else { [value, this.state.rng] = prand.uniformIntDistribution( this.state.minValue as number, this.state.maxValue as number, this.state.rng, ); } if (this.dataType === 'string') { return String(value); } if (this.dataType === 'bigint') { value = BigInt(value); } return value; } } export class GenerateUniqueInt extends AbstractGenerator<{ minValue?: number | bigint; maxValue?: number | bigint; isUnique?: boolean; }> { static override readonly entityKind: string = 'GenerateUniqueInt'; public genMaxRepeatedValuesCount: GenerateDefault | GenerateWeightedCount | undefined; public skipCheck?: boolean = false; public state: { rng: prand.RandomGenerator; minValue: number | bigint; maxValue: number | bigint; intervals: (number | bigint)[][]; integersCount: Map; } | undefined; public override isUnique = true; public override timeSpent = 0; override init({ count, seed }: { count: number; seed: number }) { const rng = prand.xoroshiro128plus(seed); let { minValue, maxValue } = this.params; if (maxValue === undefined) { maxValue = count * 10; } if (minValue === undefined) { minValue = -maxValue; } const intervals = [[minValue, maxValue]]; const integersCount = new Map(); if (typeof minValue === 'bigint' && typeof maxValue === 'bigint') { if (this.skipCheck === false && maxValue - minValue + BigInt(1) < count) { throw new Error( 'count exceeds max number of unique integers in given range(min, max), try to make range wider.', ); } } else if (typeof minValue === 'number' && typeof maxValue === 'number') { minValue = minValue >= 0 ? Math.ceil(minValue) : Math.floor(minValue); maxValue = maxValue >= 0 ? Math.floor(maxValue) : Math.ceil(maxValue); if (this.skipCheck === false && maxValue - minValue + 1 < count) { throw new Error( 'count exceeds max number of unique integers in given range(min, max), try to make range wider.', ); } } else { throw new Error( 'minValue and maxValue should be the same type.', ); } this.state = { rng, minValue, maxValue, intervals, integersCount }; } generate() { if (this.state === undefined) { throw new Error('state is not defined.'); } let intervalIdx: number, numb: number | bigint | undefined; const intervalsToAdd: (number | bigint)[][] = []; if (this.state.intervals.length === 0) { if (this.skipCheck === false) { throw new RangeError( 'generateUniqueInt: count exceeds max number of unique integers in given range(min, max), try to increase range.', ); } else { return; } } [intervalIdx, this.state.rng] = prand.uniformIntDistribution( 0, this.state.intervals.length - 1, this.state.rng, ); const interval = this.state.intervals[intervalIdx] as (number | bigint)[]; const [currMinNumb, currMaxNumb] = [interval[0] as number | bigint, interval[1] as number | bigint]; if (typeof currMinNumb === 'number' && typeof currMaxNumb === 'number') { numb = this.generateNumber(currMinNumb, currMaxNumb, intervalsToAdd as number[][], intervalIdx); } else if (typeof currMinNumb === 'bigint' && typeof currMaxNumb === 'bigint') { numb = this.generateBigint( currMinNumb as bigint, currMaxNumb as bigint, intervalsToAdd as bigint[][], intervalIdx, ); } if (this.dataType === 'string') { return String(numb); } if (this.dataType === 'bigint' && numb !== undefined) { numb = BigInt(numb); } return numb; } generateNumber(currMinNumb: number, currMaxNumb: number, intervalsToAdd: number[][], intervalIdx: number) { let numb: number; [numb, this.state!.rng] = prand.uniformIntDistribution(currMinNumb, currMaxNumb, this.state!.rng); if (this.genMaxRepeatedValuesCount !== undefined) { if (this.state!.integersCount.get(numb) === undefined) { this.state!.integersCount.set(numb, this.genMaxRepeatedValuesCount.generate() as number); } this.state!.integersCount.set(numb, this.state!.integersCount.get(numb)! - 1); } if (this.state!.integersCount.get(numb) === undefined || this.state!.integersCount.get(numb) === 0) { if (numb === currMinNumb) { intervalsToAdd = numb + 1 <= currMaxNumb ? [[numb + 1, currMaxNumb]] : []; } else if (numb === currMaxNumb) { intervalsToAdd = [[currMinNumb, numb - 1]]; } else { intervalsToAdd = [ [currMinNumb, numb - 1], [numb + 1, currMaxNumb], ]; } const t0 = new Date(); this.state!.intervals[intervalIdx] = this.state!.intervals[this.state!.intervals.length - 1]!; this.state?.intervals.pop(); this.timeSpent += (Date.now() - t0.getTime()) / 1000; this.state!.intervals.push(...intervalsToAdd); } return numb; } generateBigint(currMinNumb: bigint, currMaxNumb: bigint, intervalsToAdd: bigint[][], intervalIdx: number) { let numb: bigint; [numb, this.state!.rng] = prand.uniformBigIntDistribution(currMinNumb, currMaxNumb, this.state!.rng); if (this.genMaxRepeatedValuesCount !== undefined) { if (this.state!.integersCount.get(numb) === undefined) { this.state!.integersCount.set(numb, this.genMaxRepeatedValuesCount.generate() as number); } this.state!.integersCount.set(numb, this.state!.integersCount.get(numb)! - 1); } if (this.state!.integersCount.get(numb) === undefined || this.state!.integersCount.get(numb) === 0) { if (numb === currMinNumb) { intervalsToAdd = numb + BigInt(1) <= currMaxNumb ? [[numb + BigInt(1), currMaxNumb]] : []; } else if (numb === currMaxNumb) { intervalsToAdd = [[currMinNumb, numb - BigInt(1)]]; } else { intervalsToAdd = [ [currMinNumb, numb - BigInt(1)], [numb + BigInt(1), currMaxNumb], ]; } this.state!.intervals[intervalIdx] = this.state!.intervals[this.state!.intervals.length - 1]!; this.state?.intervals.pop(); this.state!.intervals.push(...intervalsToAdd); } return numb; } } export class GenerateBoolean extends AbstractGenerator<{ arraySize?: number }> { static override readonly entityKind: string = 'GenerateBoolean'; private state: { rng: prand.RandomGenerator; } | undefined; override init({ count, seed }: { count: number; seed: number }) { super.init({ count, seed }); const rng = prand.xoroshiro128plus(seed); this.state = { rng }; } generate() { if (this.state === undefined) { throw new Error('state is not defined.'); } let value: number; [value, this.state.rng] = prand.uniformIntDistribution(0, 1, this.state.rng); return value === 1; } } export class GenerateDate extends AbstractGenerator<{ minDate?: string | Date; maxDate?: string | Date; arraySize?: number; }> { static override readonly entityKind: string = 'GenerateDate'; private state: { rng: prand.RandomGenerator; minDate: Date; maxDate: Date; } | undefined; override init({ count, seed }: { count: number; seed: number }) { super.init({ count, seed }); const rng = prand.xoroshiro128plus(seed); let { minDate, maxDate } = this.params; const anchorDate = new Date('2024-05-08'); const deltaMilliseconds = 4 * 31536000000; if (typeof minDate === 'string') { minDate = new Date(minDate); } if (typeof maxDate === 'string') { maxDate = new Date(maxDate); } if (minDate === undefined) { if (maxDate === undefined) { minDate = new Date(anchorDate.getTime() - deltaMilliseconds); maxDate = new Date(anchorDate.getTime() + deltaMilliseconds); } else { minDate = new Date(maxDate.getTime() - (2 * deltaMilliseconds)); } } if (maxDate === undefined) { maxDate = new Date(minDate.getTime() + (2 * deltaMilliseconds)); } this.state = { rng, minDate, maxDate }; } generate() { if (this.state === undefined) { throw new Error('state is not defined.'); } let milliseconds: number; [milliseconds, this.state.rng] = prand.uniformIntDistribution( this.state.minDate.getTime(), this.state.maxDate.getTime(), this.state.rng, ); const date = new Date(milliseconds); if (this.dataType === 'string') { return date.toISOString().replace(/T.+/, ''); } return date; } } export class GenerateTime extends AbstractGenerator<{ arraySize?: number }> { static override readonly entityKind: string = 'GenerateTime'; private state: { rng: prand.RandomGenerator; } | undefined; override init({ count, seed }: { count: number; seed: number }) { super.init({ count, seed }); const rng = prand.xoroshiro128plus(seed); this.state = { rng }; } generate() { if (this.state === undefined) { throw new Error('state is not defined.'); } const anchorDateTime = new Date('2024-05-08T12:00:00.000Z'); const oneDayInMilliseconds = 86400000; let date = new Date(); let milliseconds: number; [milliseconds, this.state.rng] = prand.uniformIntDistribution( -oneDayInMilliseconds, oneDayInMilliseconds, this.state.rng, ); date = new Date(date.setTime(anchorDateTime.getTime() + milliseconds)); return date.toISOString().replace(/(\d{4}-\d{2}-\d{2}T)|(\.\d{3}Z)/g, ''); } } export class GenerateTimestampInt extends AbstractGenerator<{ unitOfTime?: 'seconds' | 'milliseconds' }> { static override readonly entityKind: string = 'GenerateTimestampInt'; private state: { generateTimestampObj: GenerateTimestamp; } | undefined; override init({ count, seed }: { count: number; seed: number }) { const generateTimestampObj = new GenerateTimestamp({}); generateTimestampObj.dataType = 'date'; generateTimestampObj.init({ count, seed }); this.state = { generateTimestampObj }; } generate() { if (this.state === undefined) { throw new Error('state is not defined.'); } const date = this.state.generateTimestampObj.generate() as Date; if (this.params.unitOfTime === 'seconds') { return Math.floor(date.getTime() / 1000); } else if (this.params.unitOfTime === 'milliseconds') { return date.getTime(); } else { // this.params.unitOfTime === undefined return Math.floor(date.getTime() / 1000); } } } export class GenerateTimestamp extends AbstractGenerator<{ arraySize?: number }> { static override readonly entityKind: string = 'GenerateTimestamp'; private state: { rng: prand.RandomGenerator; } | undefined; override init({ count, seed }: { count: number; seed: number }) { super.init({ count, seed }); const rng = prand.xoroshiro128plus(seed); this.state = { rng }; } generate() { if (this.state === undefined) { throw new Error('state is not defined.'); } const anchorTimestamp = new Date('2024-05-08'); const twoYearsInMilliseconds = 2 * 31536000000; let date = new Date(); let milliseconds: number; [milliseconds, this.state.rng] = prand.uniformIntDistribution( -twoYearsInMilliseconds, twoYearsInMilliseconds, this.state.rng, ); date = new Date(date.setTime(anchorTimestamp.getTime() + milliseconds)); if (this.dataType === 'string') { return date .toISOString() .replace('T', ' ') .replace(/\.\d{3}Z/, ''); } return date; } } export class GenerateDatetime extends AbstractGenerator<{ arraySize?: number }> { static override readonly entityKind: string = 'GenerateDatetime'; private state: { rng: prand.RandomGenerator; } | undefined; override init({ count, seed }: { count: number; seed: number }) { super.init({ count, seed }); const rng = prand.xoroshiro128plus(seed); this.state = { rng }; } generate() { if (this.state === undefined) { throw new Error('state is not defined.'); } const anchorDate = new Date('2024-05-08'); const twoYearsInMilliseconds = 2 * 31536000000; let date = new Date(); let milliseconds: number; [milliseconds, this.state.rng] = prand.uniformIntDistribution( -twoYearsInMilliseconds, twoYearsInMilliseconds, this.state.rng, ); date = new Date(date.setTime(anchorDate.getTime() + milliseconds)); if (this.dataType === 'string') { return date .toISOString() .replace('T', ' ') .replace(/\.\d{3}Z/, ''); } return date; } } export class GenerateYear extends AbstractGenerator<{ arraySize?: number }> { static override readonly entityKind: string = 'GenerateYear'; private state: { rng: prand.RandomGenerator; } | undefined; override init({ count, seed }: { count: number; seed: number }) { super.init({ count, seed }); const rng = prand.xoroshiro128plus(seed); this.state = { rng }; } generate() { if (this.state === undefined) { throw new Error('state is not defined.'); } const anchorDate = new Date('2024-05-08'); const tenYears = 10; let date = new Date(); let years: number; [years, this.state.rng] = prand.uniformIntDistribution(-tenYears, tenYears, this.state.rng); date = new Date(date.setFullYear(anchorDate.getFullYear() + years)); return date .toISOString() .replace(/(-\d{2}-\d{2}T)|(\d{2}:\d{2}:\d{2}\.\d{3}Z)/g, ''); } } export class GenerateJson extends AbstractGenerator<{ arraySize?: number }> { static override readonly entityKind: string = 'GenerateJson'; private state: { emailGeneratorObj: GenerateEmail; nameGeneratorObj: GenerateFirstName; booleanGeneratorObj: GenerateBoolean; salaryGeneratorObj: GenerateInt; dateGeneratorObj: GenerateDate; visitedCountriesNumberGeneratorObj: GenerateInt; seed: number; } | undefined; override init({ count, seed }: { count: number; seed: number }) { super.init({ count, seed }); const emailGeneratorObj = new GenerateEmail({}); emailGeneratorObj.init({ count, seed }); const nameGeneratorObj = new GenerateFirstName({}); nameGeneratorObj.init({ count, seed }); const booleanGeneratorObj = new GenerateBoolean({}); booleanGeneratorObj.init({ count, seed, }); const salaryGeneratorObj = new GenerateInt({ minValue: 200, maxValue: 4000 }); salaryGeneratorObj.init({ count, seed, ...salaryGeneratorObj.params, }); const dateGeneratorObj = new GenerateDate({}); dateGeneratorObj.dataType = 'string'; dateGeneratorObj.init({ count, seed }); const visitedCountriesNumberGeneratorObj = new GenerateInt({ minValue: 0, maxValue: 4 }); visitedCountriesNumberGeneratorObj.init( { count, seed, ...visitedCountriesNumberGeneratorObj.params }, ); this.state = { emailGeneratorObj, nameGeneratorObj, booleanGeneratorObj, salaryGeneratorObj, dateGeneratorObj, visitedCountriesNumberGeneratorObj, seed, }; } generate({ i }: { i: number }) { if (this.state === undefined) { throw new Error('state is not defined.'); } const visitedCountries: string[] = []; const email = this.state.emailGeneratorObj.generate(); const name = this.state.nameGeneratorObj.generate(); const isGraduated = this.state.booleanGeneratorObj.generate(); const hasJob = this.state.booleanGeneratorObj.generate(); const salary = this.state.salaryGeneratorObj.generate() as number; const startedWorking = this.state.dateGeneratorObj.generate() as string; const visitedCountriesNumber = this.state.visitedCountriesNumberGeneratorObj.generate() as number; const uniqueCountriesGeneratorObj = new GenerateUniqueCountry({}); uniqueCountriesGeneratorObj.init({ count: visitedCountriesNumber, seed: this.state.seed + i, }); for (let j = 0; j < visitedCountriesNumber; j++) { visitedCountries.push(uniqueCountriesGeneratorObj.generate()); } const returnJson = hasJob ? { email, name, isGraduated, hasJob, salary, startedWorking, visitedCountries, } : { email, name, isGraduated, hasJob, visitedCountries, }; if (this.dataType === 'string') { return JSON.stringify(returnJson); } return returnJson; } } export class GenerateEnum extends AbstractGenerator<{ enumValues: (string | number | boolean)[] }> { static override readonly entityKind: string = 'GenerateEnum'; private state: { enumValuesGenerator: GenerateValuesFromArray; } | undefined; override init({ count, seed }: { count: number; seed: number }) { const { enumValues } = this.params; const enumValuesGenerator = new GenerateValuesFromArray({ values: enumValues }); enumValuesGenerator.init({ count, seed }); this.state = { enumValuesGenerator }; } generate() { if (this.state === undefined) { throw new Error('state is not defined.'); } // logic for this generator return this.state.enumValuesGenerator.generate(); } } export class GenerateInterval extends AbstractGenerator<{ fields?: | 'year' | 'month' | 'day' | 'hour' | 'minute' | 'second' | 'year to month' | 'day to hour' | 'day to minute' | 'day to second' | 'hour to minute' | 'hour to second' | 'minute to second'; isUnique?: boolean; arraySize?: number; }> { static override readonly entityKind: string = 'GenerateInterval'; private state: { rng: prand.RandomGenerator; fieldsToGenerate: string[]; } | undefined; override uniqueVersionOfGen: new(params: any) => AbstractGenerator = GenerateUniqueInterval; private config: { [key: string]: { from: number; to: number } } = { year: { from: 0, to: 5, }, month: { from: 0, to: 12, }, day: { from: 1, to: 29, }, hour: { from: 0, to: 24, }, minute: { from: 0, to: 60, }, second: { from: 0, to: 60, }, }; override init({ count, seed }: { count: number; seed: number }) { super.init({ count, seed }); const allFields = ['year', 'month', 'day', 'hour', 'minute', 'second']; let fieldsToGenerate: string[] = allFields; if (this.params.fields !== undefined && this.params.fields?.includes(' to ')) { const tokens = this.params.fields.split(' to '); const endIdx = allFields.indexOf(tokens[1]!); fieldsToGenerate = allFields.slice(0, endIdx + 1); } else if (this.params.fields !== undefined) { const endIdx = allFields.indexOf(this.params.fields); fieldsToGenerate = allFields.slice(0, endIdx + 1); } const rng = prand.xoroshiro128plus(seed); this.state = { rng, fieldsToGenerate }; } generate() { if (this.state === undefined) { throw new Error('state is not defined.'); } let interval = '', numb: number; for (const field of this.state.fieldsToGenerate) { const from = this.config[field]!.from, to = this.config[field]!.to; [numb, this.state.rng] = prand.uniformIntDistribution(from, to, this.state.rng); interval += `${numb} ${field} `; } return interval; } } // has a newer version export class GenerateUniqueInterval extends AbstractGenerator<{ fields?: | 'year' | 'month' | 'day' | 'hour' | 'minute' | 'second' | 'year to month' | 'day to hour' | 'day to minute' | 'day to second' | 'hour to minute' | 'hour to second' | 'minute to second'; isUnique?: boolean; }> { static override readonly 'entityKind': string = 'GenerateUniqueInterval'; private state: { rng: prand.RandomGenerator; fieldsToGenerate: string[]; intervalSet: Set; } | undefined; public override isUnique = true; private config: { [key: string]: { from: number; to: number } } = { year: { from: 0, to: 5, }, month: { from: 0, to: 12, }, day: { from: 1, to: 29, }, hour: { from: 0, to: 24, }, minute: { from: 0, to: 60, }, second: { from: 0, to: 60, }, }; override init({ count, seed }: { count: number; seed: number }) { const allFields = ['year', 'month', 'day', 'hour', 'minute', 'second']; let fieldsToGenerate: string[] = allFields; if (this.params.fields !== undefined && this.params.fields?.includes(' to ')) { const tokens = this.params.fields.split(' to '); const endIdx = allFields.indexOf(tokens[1]!); fieldsToGenerate = allFields.slice(0, endIdx + 1); } else if (this.params.fields !== undefined) { const endIdx = allFields.indexOf(this.params.fields); fieldsToGenerate = allFields.slice(0, endIdx + 1); } let maxUniqueIntervalsNumber = 1; for (const field of fieldsToGenerate) { const from = this.config[field]!.from, to = this.config[field]!.to; maxUniqueIntervalsNumber *= from - to + 1; } if (count > maxUniqueIntervalsNumber) { throw new RangeError(`count exceeds max number of unique intervals(${maxUniqueIntervalsNumber})`); } const rng = prand.xoroshiro128plus(seed); const intervalSet = new Set(); this.state = { rng, fieldsToGenerate, intervalSet }; } generate() { if (this.state === undefined) { throw new Error('state is not defined.'); } let interval, numb: number; for (;;) { interval = ''; for (const field of this.state.fieldsToGenerate) { const from = this.config[field]!.from, to = this.config[field]!.to; [numb, this.state.rng] = prand.uniformIntDistribution(from, to, this.state.rng); interval += `${numb} ${field} `; } if (!this.state.intervalSet.has(interval)) { this.state.intervalSet.add(interval); break; } } return interval; } } // has a newer version export class GenerateString extends AbstractGenerator<{ isUnique?: boolean; arraySize?: number; }> { static override readonly entityKind: string = 'GenerateString'; private state: { rng: prand.RandomGenerator } | undefined; override uniqueVersionOfGen = GenerateUniqueString; override init({ count, seed }: { count: number; seed: number }) { super.init({ count, seed }); const rng = prand.xoroshiro128plus(seed); this.state = { rng }; } generate() { if (this.state === undefined) { throw new Error('state is not defined.'); } const minStringLength = 7; const maxStringLength = 20; const stringChars = '1234567890abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'; let idx: number, strLength: number, currStr: string; currStr = ''; [strLength, this.state.rng] = prand.uniformIntDistribution( minStringLength, maxStringLength, this.state.rng, ); for (let j = 0; j < strLength; j++) { [idx, this.state.rng] = prand.uniformIntDistribution( 0, stringChars.length - 1, this.state.rng, ); currStr += stringChars[idx]; } return currStr; } } // has a newer version export class GenerateUniqueString extends AbstractGenerator<{ isUnique?: boolean }> { static override readonly entityKind: string = 'GenerateUniqueString'; private state: { rng: prand.RandomGenerator } | undefined; public override isUnique = true; override init({ seed }: { seed: number }) { const rng = prand.xoroshiro128plus(seed); this.state = { rng }; } generate({ i }: { i: number }) { if (this.state === undefined) { throw new Error('state is not defined.'); } const minStringLength = 7; const maxStringLength = 20; const stringChars = '1234567890abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'; let idx: number, strLength: number; let currStr: string; currStr = ''; const uniqueStr = i.toString(16); [strLength, this.state.rng] = prand.uniformIntDistribution( minStringLength, maxStringLength - uniqueStr.length, this.state.rng, ); for (let j = 0; j < strLength - uniqueStr.length; j++) { [idx, this.state.rng] = prand.uniformIntDistribution( 0, stringChars.length - 1, this.state.rng, ); currStr += stringChars[idx]; } return currStr.slice(0, 4) + uniqueStr + currStr.slice(4); } } export class GenerateUUID extends AbstractGenerator<{ arraySize?: number; }> { static override readonly entityKind: string = 'GenerateUUID'; public override isUnique = true; private state: { rng: prand.RandomGenerator } | undefined; override init({ count, seed }: { count: number; seed: number }) { super.init({ count, seed }); const rng = prand.xoroshiro128plus(seed); this.state = { rng }; } generate() { if (this.state === undefined) { throw new Error('state is not defined.'); } // TODO generate uuid using string generator const stringChars = '1234567890abcdef'; let idx: number, currStr: string; const strLength = 36; // uuid v4 const uuidTemplate = '########-####-4###-####-############'; currStr = ''; for (let i = 0; i < strLength; i++) { [idx, this.state.rng] = prand.uniformIntDistribution( 0, stringChars.length - 1, this.state.rng, ); if (uuidTemplate[i] === '#') { currStr += stringChars[idx]; continue; } currStr += uuidTemplate[i]; } return currStr; } } export class GenerateFirstName extends AbstractGenerator<{ isUnique?: boolean; arraySize?: number; }> { static override readonly entityKind: string = 'GenerateFirstName'; override timeSpent: number = 0; private state: { rng: prand.RandomGenerator; } | undefined; override uniqueVersionOfGen = GenerateUniqueFirstName; override init({ count, seed }: { count: number; seed: number }) { super.init({ count, seed }); const rng = prand.xoroshiro128plus(seed); if (this.stringLength !== undefined && this.stringLength < maxFirstNameLength) { throw new Error( `You can't use first name generator with a db column length restriction of ${this.stringLength}. Set the maximum string length to at least ${maxFirstNameLength}.`, ); } this.state = { rng }; } generate() { if (this.state === undefined) { throw new Error('state is not defined.'); } // logic for this generator // names dataset contains about 30000 unique names. let idx: number; [idx, this.state.rng] = prand.uniformIntDistribution(0, firstNames.length - 1, this.state.rng); return firstNames[idx] as string; } } export class GenerateUniqueFirstName extends AbstractGenerator<{ isUnique?: boolean; }> { static override readonly entityKind: string = 'GenerateUniqueFirstName'; private state: { genIndicesObj: GenerateUniqueInt; } | undefined; public override isUnique = true; override init({ count, seed }: { count: number; seed: number }) { if (count > firstNames.length) { throw new Error('count exceeds max number of unique first names.'); } if (this.stringLength !== undefined && this.stringLength < maxFirstNameLength) { throw new Error( `You can't use first name generator with a db column length restriction of ${this.stringLength}. Set the maximum string length to at least ${maxFirstNameLength}.`, ); } const genIndicesObj = new GenerateUniqueInt({ minValue: 0, maxValue: firstNames.length - 1 }); genIndicesObj.init({ count, seed }); this.state = { genIndicesObj }; } generate() { // names dataset contains about 30000 unique names. if (this.state === undefined) { throw new Error('state is not defined.'); } const nameIdx = this.state.genIndicesObj.generate() as number; const name = firstNames[nameIdx] as string; return name; } } export class GenerateLastName extends AbstractGenerator<{ isUnique?: boolean; arraySize?: number; }> { static override readonly entityKind: string = 'GenerateLastName'; private state: { rng: prand.RandomGenerator; } | undefined; override uniqueVersionOfGen = GenerateUniqueLastName; override init({ count, seed }: { count: number; seed: number }) { super.init({ count, seed }); const rng = prand.xoroshiro128plus(seed); if (this.stringLength !== undefined && this.stringLength < maxLastNameLength) { throw new Error( `You can't use last name generator with a db column length restriction of ${this.stringLength}. Set the maximum string length to at least ${maxLastNameLength}.`, ); } this.state = { rng }; } generate() { if (this.state === undefined) { throw new Error('state is not defined.'); } let idx: number; [idx, this.state.rng] = prand.uniformIntDistribution(0, lastNames.length - 1, this.state.rng); return lastNames[idx]; } } export class GenerateUniqueLastName extends AbstractGenerator<{ isUnique?: boolean }> { static override readonly entityKind: string = 'GenerateUniqueLastName'; private state: { genIndicesObj: GenerateUniqueInt; } | undefined; public override isUnique = true; override init({ count, seed }: { count: number; seed: number }) { if (count > lastNames.length) { throw new Error('count exceeds max number of unique last names.'); } if (this.stringLength !== undefined && this.stringLength < maxLastNameLength) { throw new Error( `You can't use last name generator with a db column length restriction of ${this.stringLength}. Set the maximum string length to at least ${maxLastNameLength}.`, ); } const genIndicesObj = new GenerateUniqueInt({ minValue: 0, maxValue: lastNames.length - 1 }); genIndicesObj.init({ count, seed }); this.state = { genIndicesObj }; } generate() { if (this.state === undefined) { throw new Error('state is not defined.'); } const surnameIdx = this.state.genIndicesObj.generate() as number; const surname = lastNames[surnameIdx] as string; return surname; } } export class GenerateFullName extends AbstractGenerator<{ isUnique?: boolean; arraySize?: number; }> { static override readonly entityKind: string = 'GenerateFullName'; private state: { rng: prand.RandomGenerator; } | undefined; override uniqueVersionOfGen = GenerateUniqueFullName; override init({ count, seed }: { count: number; seed: number }) { super.init({ count, seed }); const rng = prand.xoroshiro128plus(seed); if (this.stringLength !== undefined && this.stringLength < (maxFirstNameLength + maxLastNameLength + 1)) { throw new Error( `You can't use full name generator with a db column length restriction of ${this.stringLength}. Set the maximum string length to at least ${ maxFirstNameLength + maxLastNameLength + 1 }.`, ); } this.state = { rng }; } generate() { if (this.state === undefined) { throw new Error('state is not defined.'); } let idx: number; [idx, this.state.rng] = prand.uniformIntDistribution(0, firstNames.length - 1, this.state.rng); const name = firstNames[idx] as string; [idx, this.state.rng] = prand.uniformIntDistribution(0, lastNames.length - 1, this.state.rng); const surname = lastNames[idx] as string; const fullName = `${name} ${surname}`; return fullName; } } export class GenerateUniqueFullName extends AbstractGenerator<{ isUnique?: boolean; }> { static override readonly entityKind: string = 'GenerateUniqueFullName'; private state: { fullnameSet: Set; rng: prand.RandomGenerator; } | undefined; public override isUnique = true; public override timeSpent = 0; override init({ count, seed }: { count: number; seed: number }) { const t0 = new Date(); const maxUniqueFullNamesNumber = firstNames.length * lastNames.length; if (count > maxUniqueFullNamesNumber) { throw new RangeError( `count exceeds max number of unique full names(${maxUniqueFullNamesNumber}).`, ); } if (this.stringLength !== undefined && this.stringLength < (maxFirstNameLength + maxLastNameLength + 1)) { throw new Error( `You can't use full name generator with a db column length restriction of ${this.stringLength}. Set the maximum string length to at least ${ maxFirstNameLength + maxLastNameLength + 1 }.`, ); } const rng = prand.xoroshiro128plus(seed); const fullnameSet = new Set(); this.state = { rng, fullnameSet }; this.timeSpent += (Date.now() - t0.getTime()) / 1000; } generate() { if (this.state === undefined) { throw new Error('state is not defined.'); } let fullname: string, name: string, surname: string, idx: number; const t0 = new Date(); for (;;) { [idx, this.state.rng] = prand.uniformIntDistribution(0, firstNames.length - 1, this.state.rng); name = firstNames[idx] as string; [idx, this.state.rng] = prand.uniformIntDistribution(0, lastNames.length - 1, this.state.rng); surname = lastNames[idx] as string; fullname = `${name} ${surname}`; if (!this.state.fullnameSet.has(fullname)) { this.state.fullnameSet.add(fullname); break; } } this.timeSpent += (Date.now() - t0.getTime()) / 1000; return fullname; } } export class GenerateEmail extends AbstractGenerator<{ arraySize?: number; }> { static override readonly entityKind: string = 'GenerateEmail'; private state: { genIndicesObj: GenerateUniqueInt; arraysToGenerateFrom: string[][]; } | undefined; public override timeSpent: number = 0; public override isUnique = true; override init({ count, seed }: { count: number; seed: number }) { super.init({ count, seed }); const domainsArray = emailDomains; const adjectivesArray = adjectives; const namesArray = firstNames; const maxUniqueEmailsNumber = adjectivesArray.length * namesArray.length * domainsArray.length; if (count > maxUniqueEmailsNumber) { throw new RangeError( `count exceeds max number of unique emails(${maxUniqueEmailsNumber}).`, ); } const maxEmailLength = maxAdjectiveLength + maxFirstNameLength + maxEmailDomainLength + 2; if (this.stringLength !== undefined && this.stringLength < maxEmailLength) { throw new Error( `You can't use email generator with a db column length restriction of ${this.stringLength}. Set the maximum string length to at least ${maxEmailLength}.`, ); } const arraysToGenerateFrom = [adjectivesArray, namesArray, domainsArray]; const genIndicesObj = new GenerateUniqueInt({ minValue: 0, maxValue: maxUniqueEmailsNumber - 1, }); genIndicesObj.init({ count, seed }); this.state = { genIndicesObj, arraysToGenerateFrom }; } generate() { if (this.state === undefined) { throw new Error('state is not defined.'); } const t0 = new Date(); const emailIndex = this.state.genIndicesObj.generate() as number; this.timeSpent += (Date.now() - t0.getTime()) / 1000; const tokens = fastCartesianProduct( this.state.arraysToGenerateFrom, emailIndex, ) as string[]; const [adjective, name, domain] = [tokens[0] as string, tokens[1] as string, tokens[2] as string]; const email = `${adjective}_${name.toLowerCase()}@${domain}`; return email; } } export class GeneratePhoneNumber extends AbstractGenerator<{ template?: string; prefixes?: string[]; generatedDigitsNumbers?: number | number[]; arraySize?: number; }> { static override readonly entityKind: string = 'GeneratePhoneNumber'; private state: { rng: prand.RandomGenerator; placeholdersCount?: number; prefixesArray: string[]; generatedDigitsNumbers: number[]; generatorsMap: Map; phoneNumbersSet: Set; } | undefined; public override isUnique = true; override init({ count, seed }: { count: number; seed: number }) { super.init({ count, seed }); let { generatedDigitsNumbers } = this.params; const { prefixes, template } = this.params; const rng = prand.xoroshiro128plus(seed); if (template !== undefined) { if (this.stringLength !== undefined && this.stringLength < template.length) { throw new Error( `Length of phone number template is shorter than db column length restriction: ${this.stringLength}. Set the maximum string length to at least ${template.length}.`, ); } const iterArray = [...template.matchAll(/#/g)]; const placeholdersCount = iterArray.length; const maxUniquePhoneNumbersCount = Math.pow(10, placeholdersCount); if (maxUniquePhoneNumbersCount < count) { throw new RangeError( `count exceeds max number of unique phone numbers(${maxUniquePhoneNumbersCount}).`, ); } const generatorsMap = new Map(); const genObj = new GenerateUniqueInt({ minValue: 0, maxValue: maxUniquePhoneNumbersCount - 1 }); genObj.init({ count, seed, }); generatorsMap.set( template, genObj, ); const prefixesArray: string[] = []; const generatedDigitsNumbers: number[] = []; const phoneNumbersSet = new Set(); this.state = { rng, placeholdersCount, generatorsMap, prefixesArray, generatedDigitsNumbers, phoneNumbersSet }; return; } let prefixesArray: string[]; if (prefixes === undefined || prefixes.length === 0) { prefixesArray = phonesInfo.map((phoneInfo) => phoneInfo.split(',').slice(0, -1).join(' ')); generatedDigitsNumbers = phonesInfo.map((phoneInfo) => { // tokens = ["380","99","9"] = // = ["country prefix", "operator prefix", "number length including operator prefix and excluding country prefix"] const tokens = phoneInfo.split(','); const operatorPrefixLength = tokens[1]!.replaceAll(' ', '').length; return Number(tokens[2]) - operatorPrefixLength; }); } else { prefixesArray = prefixes; if (typeof generatedDigitsNumbers === 'number') { generatedDigitsNumbers = Array.from({ length: prefixes.length }).fill( generatedDigitsNumbers, ); } else if ( generatedDigitsNumbers === undefined || generatedDigitsNumbers.length === 0 ) { generatedDigitsNumbers = Array.from({ length: prefixes.length }).fill(7); } } const maxPrefixLength = Math.max(...prefixesArray.map((prefix) => prefix.length)); const maxGeneratedDigits = Math.max(...generatedDigitsNumbers); if (this.stringLength !== undefined && this.stringLength < (maxPrefixLength + maxGeneratedDigits)) { throw new Error( `You can't use phone number generator with a db column length restriction of ${this.stringLength}. Set the maximum string length to at least ${ maxPrefixLength + maxGeneratedDigits }.`, ); } if (new Set(prefixesArray).size !== prefixesArray.length) { throw new Error('prefixes are not unique.'); } const maxUniquePhoneNumbersCount = generatedDigitsNumbers.reduce( (a, b) => a + Math.pow(10, b), 0, ); if (maxUniquePhoneNumbersCount < count) { throw new RangeError( `count exceeds max number of unique phone numbers(${maxUniquePhoneNumbersCount}).`, ); } const generatorsMap = new Map(); let maxValue: number, prefix: string, generatedDigitsNumber: number; for (const [i, element] of prefixesArray.entries()) { prefix = element as string; generatedDigitsNumber = generatedDigitsNumbers[i] as number; maxValue = Math.pow(10, generatedDigitsNumber) - 1; if (!generatorsMap.has(prefix)) { const genObj = new GenerateUniqueInt({ minValue: 0, maxValue }); genObj.init({ count: Math.min(count, maxValue + 1), seed, }); genObj.skipCheck = true; generatorsMap.set( prefix, genObj, ); } } const phoneNumbersSet = new Set(); this.state = { rng, prefixesArray, generatedDigitsNumbers, generatorsMap, phoneNumbersSet }; } generate() { if (this.state === undefined) { throw new Error('state is not defined.'); } let prefix: string, generatedDigitsNumber: number, numberBody: string, phoneNumber: string, idx: number; if (this.params.template === undefined) { for (;;) { [idx, this.state.rng] = prand.uniformIntDistribution( 0, this.state.prefixesArray.length - 1, this.state.rng, ); prefix = this.state.prefixesArray[idx] as string; generatedDigitsNumber = this.state.generatedDigitsNumbers[idx] as number; numberBody = String(this.state.generatorsMap.get(prefix)?.generate()); if (numberBody === 'undefined') { this.state.prefixesArray!.splice(idx, 1); this.state.generatedDigitsNumbers.splice(idx, 1); this.state.generatorsMap.delete(prefix); continue; } if (this.state.phoneNumbersSet.has(numberBody)) { continue; } this.state.phoneNumbersSet.add(numberBody); break; } const digitsNumberDiff = generatedDigitsNumber - numberBody.length; if (digitsNumberDiff > 0) { numberBody = '0'.repeat(digitsNumberDiff) + numberBody; } phoneNumber = prefix + '' + numberBody; return phoneNumber; } else { numberBody = String(this.state.generatorsMap.get(this.params.template)?.generate()); phoneNumber = fillTemplate({ template: this.params.template, values: [...numberBody], defaultValue: '0', placeholdersCount: this.state.placeholdersCount, }); return phoneNumber; } } } export class GenerateCountry extends AbstractGenerator<{ isUnique?: boolean; arraySize?: number; }> { static override readonly entityKind: string = 'GenerateCountry'; private state: { rng: prand.RandomGenerator; } | undefined; override uniqueVersionOfGen = GenerateUniqueCountry; override init({ count, seed }: { count: number; seed: number }) { super.init({ count, seed }); const rng = prand.xoroshiro128plus(seed); if (this.stringLength !== undefined && this.stringLength < maxCountryLength) { throw new Error( `You can't use country generator with a db column length restriction of ${this.stringLength}. Set the maximum string length to at least ${maxCountryLength}.`, ); } this.state = { rng }; } generate() { if (this.state === undefined) { throw new Error('state is not defined.'); } let idx: number; [idx, this.state.rng] = prand.uniformIntDistribution(0, countries.length - 1, this.state.rng); const country = countries[idx] as string; return country; } } export class GenerateUniqueCountry extends AbstractGenerator<{ isUnique?: boolean }> { static override readonly entityKind: string = 'GenerateUniqueCountry'; private state: { genIndicesObj: GenerateUniqueInt; } | undefined; public override isUnique = true; override init({ count, seed }: { count: number; seed: number }) { if (count > countries.length) { throw new Error('count exceeds max number of unique countries.'); } if (this.stringLength !== undefined && this.stringLength < maxCountryLength) { throw new Error( `You can't use country generator with a db column length restriction of ${this.stringLength}. Set the maximum string length to at least ${maxCountryLength}.`, ); } const genIndicesObj = new GenerateUniqueInt({ minValue: 0, maxValue: countries.length - 1 }); genIndicesObj.init({ count, seed }); this.state = { genIndicesObj }; } generate() { if (this.state === undefined) { throw new Error('state is not defined.'); } const countryIdx = this.state.genIndicesObj.generate() as number; const country = countries[countryIdx] as string; return country; } } export class GenerateJobTitle extends AbstractGenerator<{ arraySize?: number; }> { static override readonly entityKind: string = 'GenerateJobTitle'; private state: { rng: prand.RandomGenerator; } | undefined; override init({ count, seed }: { count: number; seed: number }) { super.init({ count, seed }); const rng = prand.xoroshiro128plus(seed); if (this.stringLength !== undefined && this.stringLength < maxJobTitleLength) { throw new Error( `You can't use job title generator with a db column length restriction of ${this.stringLength}. Set the maximum string length to at least ${maxJobTitleLength}.`, ); } this.state = { rng }; } generate() { if (this.state === undefined) { throw new Error('state is not defined.'); } let idx; [idx, this.state.rng] = prand.uniformIntDistribution(0, jobsTitles.length - 1, this.state.rng); return jobsTitles[idx]; } } export class GenerateStreetAddress extends AbstractGenerator<{ isUnique?: boolean; arraySize?: number; }> { static override readonly entityKind: string = 'GenerateStreetAddress'; private state: { rng: prand.RandomGenerator; possStreetNames: string[][]; } | undefined; override uniqueVersionOfGen = GenerateUniqueStreetAddress; override init({ count, seed }: { count: number; seed: number }) { super.init({ count, seed }); const rng = prand.xoroshiro128plus(seed); const possStreetNames = [firstNames, lastNames]; const maxStreetAddressLength = 4 + Math.max(maxFirstNameLength, maxLastNameLength) + 1 + maxStreetSuffixLength; if (this.stringLength !== undefined && this.stringLength < maxStreetAddressLength) { throw new Error( `You can't use street address generator with a db column length restriction of ${this.stringLength}. Set the maximum string length to at least ${maxStreetAddressLength}.`, ); } this.state = { rng, possStreetNames }; } generate() { if (this.state === undefined) { throw new Error('state is not defined.'); } let idx, streetBaseNameIdx, streetSuffixIdx, streetNumber; [idx, this.state.rng] = prand.uniformIntDistribution(0, this.state.possStreetNames.length - 1, this.state.rng); [streetBaseNameIdx, this.state.rng] = prand.uniformIntDistribution( 0, this.state.possStreetNames[idx]!.length - 1, this.state.rng, ); [streetSuffixIdx, this.state.rng] = prand.uniformIntDistribution(0, streetSuffix.length - 1, this.state.rng); const streetName = `${this.state.possStreetNames[idx]![streetBaseNameIdx]} ${streetSuffix[streetSuffixIdx]}`; [streetNumber, this.state.rng] = prand.uniformIntDistribution(1, 999, this.state.rng); return `${streetNumber} ${streetName}`; } } export class GenerateUniqueStreetAddress extends AbstractGenerator<{ isUnique?: boolean }> { static override readonly entityKind: string = 'GenerateUniqueStreetAddress'; private state: { rng: prand.RandomGenerator; possStreetNameObjs: { indicesGen: GenerateUniqueInt; maxUniqueStreetNamesNumber: number; count: number; arraysToChooseFrom: string[][]; }[]; } | undefined; public override isUnique = true; override init({ count, seed }: { count: number; seed: number }) { const streetNumberStrs = Array.from({ length: 999 }, (_, i) => String(i + 1)); const maxUniqueStreetnamesNumber = streetNumberStrs.length * firstNames.length * streetSuffix.length + streetNumberStrs.length * firstNames.length * streetSuffix.length; if (count > maxUniqueStreetnamesNumber) { throw new RangeError( `count exceeds max number of unique street names(${maxUniqueStreetnamesNumber}).`, ); } const maxStreetAddressLength = 4 + Math.max(maxFirstNameLength, maxLastNameLength) + 1 + maxStreetSuffixLength; if (this.stringLength !== undefined && this.stringLength < maxStreetAddressLength) { throw new Error( `You can't use street address generator with a db column length restriction of ${this.stringLength}. Set the maximum string length to at least ${maxStreetAddressLength}.`, ); } const rng = prand.xoroshiro128plus(seed); // ["1", "2", ..., "999"] const possStreetNameObjs = [ { indicesGen: new GenerateUniqueInt({ minValue: 0, maxValue: streetNumberStrs.length * firstNames.length * streetSuffix.length - 1, }), maxUniqueStreetNamesNumber: streetNumberStrs.length * firstNames.length * streetSuffix.length, count: 0, arraysToChooseFrom: [streetNumberStrs, firstNames, streetSuffix], }, { indicesGen: new GenerateUniqueInt({ minValue: 0, maxValue: streetNumberStrs.length * lastNames.length * streetSuffix.length - 1, }), maxUniqueStreetNamesNumber: streetNumberStrs.length * firstNames.length * streetSuffix.length, count: 0, arraysToChooseFrom: [streetNumberStrs, lastNames, streetSuffix], }, ]; for (const possStreetNameObj of possStreetNameObjs) { possStreetNameObj.indicesGen.skipCheck = true; possStreetNameObj.indicesGen.init({ count, seed }); } this.state = { rng, possStreetNameObjs }; } generate() { if (this.state === undefined) { throw new Error('state is not defined.'); } let streetNameObjIdx; [streetNameObjIdx, this.state.rng] = prand.uniformIntDistribution( 0, this.state.possStreetNameObjs.length - 1, this.state.rng, ); const streetNameObj = this.state.possStreetNameObjs[streetNameObjIdx]!; const idx = streetNameObj.indicesGen.generate() as number; const values = fastCartesianProduct(streetNameObj.arraysToChooseFrom, idx) as string[]; streetNameObj.count += 1; if (streetNameObj.count === streetNameObj.maxUniqueStreetNamesNumber) { this.state.possStreetNameObjs[streetNameObjIdx] = this.state .possStreetNameObjs.at(-1)!; this.state.possStreetNameObjs.pop(); } const streetName = fillTemplate({ template: '# # #', values, placeholdersCount: 3 }); return streetName; } } export class GenerateCity extends AbstractGenerator<{ isUnique?: boolean; arraySize?: number; }> { static override readonly entityKind: string = 'GenerateCity'; private state: { rng: prand.RandomGenerator; } | undefined; override uniqueVersionOfGen = GenerateUniqueCity; override init({ count, seed }: { count: number; seed: number }) { super.init({ count, seed }); const rng = prand.xoroshiro128plus(seed); if (this.stringLength !== undefined && this.stringLength < maxCityNameLength) { throw new Error( `You can't use city generator with a db column length restriction of ${this.stringLength}. Set the maximum string length to at least ${maxCityNameLength}.`, ); } this.state = { rng }; } generate() { if (this.state === undefined) { throw new Error('state is not defined.'); } let idx; [idx, this.state.rng] = prand.uniformIntDistribution(0, cityNames.length - 1, this.state.rng); return cityNames[idx]; } } export class GenerateUniqueCity extends AbstractGenerator<{ isUnique?: boolean }> { static override readonly entityKind: string = 'GenerateUniqueCity'; private state: { genIndicesObj: GenerateUniqueInt; } | undefined; public override isUnique = true; override init({ count, seed }: { count: number; seed: number }) { if (count > cityNames.length) { throw new Error('count exceeds max number of unique cities.'); } if (this.stringLength !== undefined && this.stringLength < maxCityNameLength) { throw new Error( `You can't use city generator with a db column length restriction of ${this.stringLength}. Set the maximum string length to at least ${maxCityNameLength}.`, ); } const genIndicesObj = new GenerateUniqueInt({ minValue: 0, maxValue: cityNames.length - 1 }); genIndicesObj.init({ count, seed }); this.state = { genIndicesObj }; } generate() { if (this.state === undefined) { throw new Error('state is not defined.'); } const cityIdx = this.state.genIndicesObj.generate() as number; const city = cityNames[cityIdx] as string; return city; } } export class GeneratePostcode extends AbstractGenerator<{ isUnique?: boolean; arraySize?: number; }> { static override readonly entityKind: string = 'GeneratePostcode'; private state: { rng: prand.RandomGenerator; templates: string[]; } | undefined; override uniqueVersionOfGen = GenerateUniquePostcode; override init({ count, seed }: { count: number; seed: number }) { super.init({ count, seed }); const rng = prand.xoroshiro128plus(seed); const templates = ['#####', '#####-####']; const maxPostcodeLength = Math.max(...templates.map((template) => template.length)); if (this.stringLength !== undefined && this.stringLength < maxPostcodeLength) { throw new Error( `You can't use postcode generator with a db column length restriction of ${this.stringLength}. Set the maximum string length to at least ${maxPostcodeLength}.`, ); } this.state = { rng, templates }; } generate() { if (this.state === undefined) { throw new Error('state is not defined.'); } let idx: number, postcodeNumber: number; [idx, this.state.rng] = prand.uniformIntDistribution(0, this.state.templates.length - 1, this.state.rng); const template = this.state.templates[idx]!; const iterArray = [...template.matchAll(/#/g)]; const placeholdersCount = iterArray.length; [postcodeNumber, this.state.rng] = prand.uniformIntDistribution( 0, Math.pow(10, placeholdersCount) - 1, this.state.rng, ); const postcode = fillTemplate({ template, placeholdersCount, values: [...String(postcodeNumber)], defaultValue: '0', }); return postcode; } } export class GenerateUniquePostcode extends AbstractGenerator<{ isUnique?: boolean }> { static override readonly entityKind: string = 'GenerateUniquePostcode'; private state: { rng: prand.RandomGenerator; templates: { template: string; indicesGen: GenerateUniqueInt; placeholdersCount: number; count: number; maxUniquePostcodeNumber: number; }[]; } | undefined; public override isUnique = true; override init({ count, seed }: { count: number; seed: number }) { const maxUniquePostcodeNumber = Math.pow(10, 5) + Math.pow(10, 9); if (count > maxUniquePostcodeNumber) { throw new RangeError( `count exceeds max number of unique postcodes(${maxUniquePostcodeNumber}).`, ); } const rng = prand.xoroshiro128plus(seed); const templates = [ { template: '#####', indicesGen: new GenerateUniqueInt({ minValue: 0, maxValue: Math.pow(10, 5) - 1 }), placeholdersCount: 5, count: 0, maxUniquePostcodeNumber: Math.pow(10, 5), }, { template: '#####-####', indicesGen: new GenerateUniqueInt({ minValue: 0, maxValue: Math.pow(10, 9) - 1 }), placeholdersCount: 9, count: 0, maxUniquePostcodeNumber: Math.pow(10, 9), }, ]; const maxPostcodeLength = Math.max(...templates.map((template) => template.template.length)); if (this.stringLength !== undefined && this.stringLength < maxPostcodeLength) { throw new Error( `You can't use postcode generator with a db column length restriction of ${this.stringLength}. Set the maximum string length to at least ${maxPostcodeLength}.`, ); } for (const templateObj of templates) { templateObj.indicesGen.skipCheck = true; templateObj.indicesGen.init({ count, seed }); } this.state = { rng, templates }; } generate() { if (this.state === undefined) { throw new Error('state is not defined.'); } let idx: number; [idx, this.state.rng] = prand.uniformIntDistribution(0, this.state.templates.length - 1, this.state.rng); const templateObj = this.state.templates[idx]!; const postcodeNumber = templateObj.indicesGen.generate() as number; templateObj.count += 1; if (templateObj.count === templateObj.maxUniquePostcodeNumber) { this.state.templates[idx] = this.state.templates.at(-1)!; this.state.templates.pop(); } const postcode = fillTemplate({ template: templateObj.template, placeholdersCount: templateObj.placeholdersCount, values: [...String(postcodeNumber)], defaultValue: '0', }); return postcode; } } export class GenerateState extends AbstractGenerator<{ arraySize?: number; }> { static override readonly entityKind: string = 'GenerateState'; private state: { rng: prand.RandomGenerator; } | undefined; override init({ count, seed }: { count: number; seed: number }) { super.init({ count, seed }); const rng = prand.xoroshiro128plus(seed); if (this.stringLength !== undefined && this.stringLength < maxStateLength) { throw new Error( `You can't use state generator with a db column length restriction of ${this.stringLength}. Set the maximum string length to at least ${maxStateLength}.`, ); } this.state = { rng }; } generate() { if (this.state === undefined) { throw new Error('state is not defined.'); } let idx; [idx, this.state.rng] = prand.uniformIntDistribution(0, states.length - 1, this.state.rng); return states[idx]; } } export class GenerateCompanyName extends AbstractGenerator<{ isUnique?: boolean; arraySize?: number; }> { static override readonly entityKind: string = 'GenerateCompanyName'; private state: { rng: prand.RandomGenerator; templates: { template: string; placeholdersCount: number }[]; } | undefined; override uniqueVersionOfGen = GenerateUniqueCompanyName; override init({ count, seed }: { count: number; seed: number }) { super.init({ count, seed }); const rng = prand.xoroshiro128plus(seed); const templates = [ { template: '#', placeholdersCount: 1 }, { template: '# - #', placeholdersCount: 2 }, { template: '# and #', placeholdersCount: 2 }, { template: '#, # and #', placeholdersCount: 3 }, ]; // max( { template: '#', placeholdersCount: 1 }, { template: '#, # and #', placeholdersCount: 3 } ) const maxCompanyNameLength = Math.max( maxLastNameLength + maxCompanyNameSuffixLength + 1, 3 * maxLastNameLength + 7, ); if (this.stringLength !== undefined && this.stringLength < maxCompanyNameLength) { throw new Error( `You can't use company name generator with a db column length restriction of ${this.stringLength}. Set the maximum string length to at least ${maxCompanyNameLength}.`, ); } this.state = { rng, templates }; } generate() { if (this.state === undefined) { throw new Error('state is not defined.'); } let templateIdx, idx, lastName, companyNameSuffix, companyName; [templateIdx, this.state.rng] = prand.uniformIntDistribution(0, this.state.templates.length - 1, this.state.rng); const templateObj = this.state.templates[templateIdx]!; if (templateObj.template === '#') { [idx, this.state.rng] = prand.uniformIntDistribution(0, lastNames.length - 1, this.state.rng); lastName = lastNames[idx]; [idx, this.state.rng] = prand.uniformIntDistribution(0, companyNameSuffixes.length - 1, this.state.rng); companyNameSuffix = companyNameSuffixes[idx]; companyName = `${lastName} ${companyNameSuffix}`; return companyName; } const values = []; for (let i = 0; i < templateObj.placeholdersCount; i++) { [idx, this.state.rng] = prand.uniformIntDistribution(0, lastNames.length - 1, this.state.rng); values.push(lastNames[idx]!); } companyName = fillTemplate({ template: templateObj.template, values, placeholdersCount: templateObj.placeholdersCount, }); return companyName; } } export class GenerateUniqueCompanyName extends AbstractGenerator<{ isUnique?: boolean }> { static override readonly entityKind: string = 'GenerateUniqueCompanyName'; private state: { rng: prand.RandomGenerator; templates: { template: string; placeholdersCount: number; indicesGen: GenerateUniqueInt; maxUniqueCompanyNameNumber: number; count: number; arraysToChooseFrom: string[][]; }[]; } | undefined; public override isUnique = true; override init({ count, seed }: { count: number; seed: number }) { const maxUniqueCompanyNameNumber = lastNames.length * companyNameSuffixes.length + Math.pow(lastNames.length, 2) + Math.pow(lastNames.length, 2) + Math.pow(lastNames.length, 3); if (count > maxUniqueCompanyNameNumber) { throw new RangeError( `count exceeds max number of unique company names(${maxUniqueCompanyNameNumber}).`, ); } // max( { template: '#', placeholdersCount: 1 }, { template: '#, # and #', placeholdersCount: 3 } ) const maxCompanyNameLength = Math.max( maxLastNameLength + maxCompanyNameSuffixLength + 1, 3 * maxLastNameLength + 7, ); if (this.stringLength !== undefined && this.stringLength < maxCompanyNameLength) { throw new Error( `You can't use company name generator with a db column length restriction of ${this.stringLength}. Set the maximum string length to at least ${maxCompanyNameLength}.`, ); } const rng = prand.xoroshiro128plus(seed); // when count reach maxUniqueCompanyNameNumber template will be deleted from array const templates = [ { template: '# - #', placeholdersCount: 1, indicesGen: new GenerateUniqueInt({ minValue: 0, maxValue: lastNames.length * companyNameSuffixes.length - 1 }), maxUniqueCompanyNameNumber: lastNames.length * companyNameSuffixes.length, count: 0, arraysToChooseFrom: [lastNames, companyNameSuffixes], }, { template: '# - #', placeholdersCount: 2, indicesGen: new GenerateUniqueInt({ minValue: 0, maxValue: Math.pow(lastNames.length, 2) - 1 }), maxUniqueCompanyNameNumber: Math.pow(lastNames.length, 2), count: 0, arraysToChooseFrom: [lastNames, lastNames], }, { template: '# and #', placeholdersCount: 2, indicesGen: new GenerateUniqueInt({ minValue: 0, maxValue: Math.pow(lastNames.length, 2) - 1 }), maxUniqueCompanyNameNumber: Math.pow(lastNames.length, 2), count: 0, arraysToChooseFrom: [lastNames, lastNames], }, { template: '#, # and #', placeholdersCount: 3, indicesGen: new GenerateUniqueInt({ minValue: 0, maxValue: Math.pow(lastNames.length, 3) - 1 }), maxUniqueCompanyNameNumber: Math.pow(lastNames.length, 3), count: 0, arraysToChooseFrom: [lastNames, lastNames, lastNames], }, ]; for (const templateObj of templates) { templateObj.indicesGen.skipCheck = true; templateObj.indicesGen.init({ count, seed }); } this.state = { rng, templates }; } generate() { if (this.state === undefined) { throw new Error('state is not defined.'); } let templateIdx; [templateIdx, this.state.rng] = prand.uniformIntDistribution(0, this.state.templates.length - 1, this.state.rng); const templateObj = this.state.templates[templateIdx]!; const idx = templateObj.indicesGen.generate() as number; const values = fastCartesianProduct(templateObj.arraysToChooseFrom, idx) as string[]; templateObj.count += 1; if (templateObj.count === templateObj.maxUniqueCompanyNameNumber) { this.state.templates[templateIdx] = this.state.templates.at(-1)!; this.state.templates.pop(); } const companyName = fillTemplate({ template: templateObj.template, values, placeholdersCount: templateObj.placeholdersCount, }); return companyName; } } export class GenerateLoremIpsum extends AbstractGenerator<{ sentencesCount?: number; arraySize?: number; }> { static override readonly entityKind: string = 'GenerateLoremIpsum'; private state: { rng: prand.RandomGenerator; } | undefined; override init({ count, seed }: { count: number; seed: number }) { super.init({ count, seed }); const rng = prand.xoroshiro128plus(seed); if (this.params.sentencesCount === undefined) this.params.sentencesCount = 1; const maxLoremIpsumSentencesLength = maxLoremIpsumLength * this.params.sentencesCount + this.params.sentencesCount - 1; if (this.stringLength !== undefined && this.stringLength < maxLoremIpsumSentencesLength) { throw new Error( `You can't use lorem ipsum generator with a db column length restriction of ${this.stringLength}. Set the maximum string length to at least ${maxLoremIpsumSentencesLength}.`, ); } this.state = { rng }; } generate() { if (this.state === undefined) { throw new Error('state is not defined.'); } let idx, resultText: string = ''; for (let i = 0; i < this.params.sentencesCount!; i++) { [idx, this.state.rng] = prand.uniformIntDistribution(0, loremIpsumSentences.length - 1, this.state.rng); resultText += loremIpsumSentences[idx] + ' '; } return resultText; } } export class WeightedRandomGenerator extends AbstractGenerator<{ weight: number; value: AbstractGenerator }[]> { static override readonly entityKind: string = 'WeightedRandomGenerator'; private state: { rng: prand.RandomGenerator; weightedIndices: number[]; } | undefined; override init({ count, seed }: { count: number; seed: number }) { const weights = this.params.map((weightedGen) => weightedGen.weight); const weightedIndices = getWeightedIndices(weights); let idx: number, valueIdx: number, tempRng = prand.xoroshiro128plus(seed); const indicesCounter: { [key: number]: number } = {}; for (let i = 0; i < count; i++) { [idx, tempRng] = prand.uniformIntDistribution(0, weightedIndices.length - 1, tempRng); valueIdx = weightedIndices[idx]!; if (!Object.hasOwn(indicesCounter, valueIdx)) indicesCounter[valueIdx] = 0; indicesCounter[valueIdx]! += 1; } for (const [idx, weightedGen] of this.params.entries()) { weightedGen.value.isUnique = this.isUnique; weightedGen.value.dataType = this.dataType; weightedGen.value.init({ count: indicesCounter[idx]!, seed }); if ( weightedGen.value.uniqueVersionOfGen !== undefined && weightedGen.value.isUnique === true ) { const uniqueGen = new weightedGen.value.uniqueVersionOfGen({ ...weightedGen.value.params, }); uniqueGen.init({ count: indicesCounter[idx]!, seed, }); uniqueGen.isUnique = weightedGen.value.isUnique; uniqueGen.dataType = weightedGen.value.dataType; weightedGen.value = uniqueGen; } } const rng = prand.xoroshiro128plus(seed); this.state = { weightedIndices, rng }; } generate({ i }: { i: number }) { if (this.state === undefined) { throw new Error('state is not defined.'); } let idx: number; [idx, this.state.rng] = prand.uniformIntDistribution(0, this.state.weightedIndices.length - 1, this.state.rng); const generatorIdx = this.state.weightedIndices[idx] as number; const value = this.params[generatorIdx]!.value.generate({ i }); return value; } } export class GeneratePoint extends AbstractGenerator<{ isUnique?: boolean; minXValue?: number; maxXValue?: number; minYValue?: number; maxYValue?: number; arraySize?: number; }> { static override readonly entityKind: string = 'GeneratePoint'; private state: { xCoordinateGen: GenerateNumber; yCoordinateGen: GenerateNumber; } | undefined; override uniqueVersionOfGen = GenerateUniquePoint; override init({ count, seed }: { count: number; seed: number }) { super.init({ count, seed }); const xCoordinateGen = new GenerateNumber({ minValue: this.params.minXValue, maxValue: this.params.maxXValue, precision: 10, }); xCoordinateGen.init({ count, seed }); const yCoordinateGen = new GenerateNumber({ minValue: this.params.minYValue, maxValue: this.params.maxYValue, precision: 10, }); yCoordinateGen.init({ count, seed }); this.state = { xCoordinateGen, yCoordinateGen }; } generate() { if (this.state === undefined) { throw new Error('state is not defined.'); } const x = this.state.xCoordinateGen.generate(); const y = this.state.yCoordinateGen.generate(); if (this.dataType === 'json') { return { x, y }; } else if (this.dataType === 'string') { return `[${x}, ${y}]`; } else { // if (this.dataType === "array") return [x, y]; } } } export class GenerateUniquePoint extends AbstractGenerator<{ minXValue?: number; maxXValue?: number; minYValue?: number; maxYValue?: number; isUnique?: boolean; }> { static override readonly entityKind: string = 'GenerateUniquePoint'; private state: { xCoordinateGen: GenerateUniqueNumber; yCoordinateGen: GenerateUniqueNumber; } | undefined; public override isUnique = true; override init({ count, seed }: { count: number; seed: number }) { const xCoordinateGen = new GenerateUniqueNumber({ minValue: this.params.minXValue, maxValue: this.params.maxXValue, precision: 10, }); xCoordinateGen.init({ count, seed }); const yCoordinateGen = new GenerateUniqueNumber({ minValue: this.params.minYValue, maxValue: this.params.maxYValue, precision: 10, }); yCoordinateGen.init({ count, seed }); this.state = { xCoordinateGen, yCoordinateGen }; } generate() { if (this.state === undefined) { throw new Error('state is not defined.'); } const x = this.state.xCoordinateGen.generate(); const y = this.state.yCoordinateGen.generate(); if (this.dataType === 'json') { return { x, y }; } else if (this.dataType === 'string') { return `[${x}, ${y}]`; } else { // if (this.dataType === "array") return [x, y]; } } } export class GenerateLine extends AbstractGenerator<{ isUnique?: boolean; minAValue?: number; maxAValue?: number; minBValue?: number; maxBValue?: number; minCValue?: number; maxCValue?: number; arraySize?: number; }> { static override readonly entityKind: string = 'GenerateLine'; private state: { aCoefficientGen: GenerateNumber; bCoefficientGen: GenerateNumber; cCoefficientGen: GenerateNumber; } | undefined; override uniqueVersionOfGen = GenerateUniqueLine; override init({ count, seed }: { count: number; seed: number }) { super.init({ count, seed }); const aCoefficientGen = new GenerateNumber({ minValue: this.params.minAValue, maxValue: this.params.maxAValue, precision: 10, }); aCoefficientGen.init({ count, seed }); const bCoefficientGen = new GenerateNumber({ minValue: this.params.minBValue, maxValue: this.params.maxBValue, precision: 10, }); bCoefficientGen.init({ count, seed }); const cCoefficientGen = new GenerateNumber({ minValue: this.params.minCValue, maxValue: this.params.maxCValue, precision: 10, }); cCoefficientGen.init({ count, seed }); this.state = { aCoefficientGen, bCoefficientGen, cCoefficientGen }; } generate() { if (this.state === undefined) { throw new Error('state is not defined.'); } let b: number; const a = this.state.aCoefficientGen.generate(); b = this.state.bCoefficientGen.generate(); while (a === 0 && b === 0) { b = this.state.bCoefficientGen.generate(); } const c = this.state.cCoefficientGen.generate(); if (this.dataType === 'json') { return { a, b, c }; } else if (this.dataType === 'string') { return `[${a}, ${b}, ${c}]`; } else { // if (this.dataType === "array") return [a, b, c]; } } } export class GenerateUniqueLine extends AbstractGenerator<{ minAValue?: number; maxAValue?: number; minBValue?: number; maxBValue?: number; minCValue?: number; maxCValue?: number; isUnique?: boolean; }> { static override readonly entityKind: string = 'GenerateUniqueLine'; private state: { aCoefficientGen: GenerateUniqueNumber; bCoefficientGen: GenerateUniqueNumber; cCoefficientGen: GenerateUniqueNumber; } | undefined; public override isUnique = true; override init({ count, seed }: { count: number; seed: number }) { const aCoefficientGen = new GenerateUniqueNumber({ minValue: this.params.minAValue, maxValue: this.params.maxAValue, precision: 10, }); aCoefficientGen.init({ count, seed }); const bCoefficientGen = new GenerateUniqueNumber({ minValue: this.params.minBValue, maxValue: this.params.maxBValue, precision: 10, }); bCoefficientGen.init({ count, seed }); const cCoefficientGen = new GenerateUniqueNumber({ minValue: this.params.minCValue, maxValue: this.params.maxCValue, precision: 10, }); cCoefficientGen.init({ count, seed }); this.state = { aCoefficientGen, bCoefficientGen, cCoefficientGen }; } generate() { if (this.state === undefined) { throw new Error('state is not defined.'); } let b: number; const a = this.state.aCoefficientGen.generate(); b = this.state.bCoefficientGen.generate(); while (a === 0 && b === 0) { b = this.state.bCoefficientGen.generate(); } const c = this.state.cCoefficientGen.generate(); if (this.dataType === 'json') { return { a, b, c }; } else if (this.dataType === 'string') { return `[${a}, ${b}, ${c}]`; } else { // if (this.dataType === "array") return [a, b, c]; } } } ================================================ FILE: drizzle-seed/src/services/SeedService.ts ================================================ /* eslint-disable drizzle-internal/require-entity-kind */ import { entityKind, eq, is } from 'drizzle-orm'; import type { MySqlTable, MySqlTableWithColumns } from 'drizzle-orm/mysql-core'; import { MySqlDatabase } from 'drizzle-orm/mysql-core'; import type { PgTable, PgTableWithColumns } from 'drizzle-orm/pg-core'; import { PgDatabase } from 'drizzle-orm/pg-core'; import type { SQLiteTable, SQLiteTableWithColumns } from 'drizzle-orm/sqlite-core'; import { BaseSQLiteDatabase } from 'drizzle-orm/sqlite-core'; import type { GeneratePossibleGeneratorsColumnType, GeneratePossibleGeneratorsTableType, RefinementsType, TableGeneratorsType, } from '../types/seedService.ts'; import type { Column, Prettify, Relation, Table } from '../types/tables.ts'; import { generatorsMap } from './GeneratorFuncs.ts'; import type { AbstractGenerator, GenerateArray, GenerateInterval, GenerateWeightedCount } from './Generators.ts'; import { latestVersion } from './apiVersion.ts'; import { equalSets, generateHashFromString } from './utils.ts'; export class SeedService { static readonly entityKind: string = 'SeedService'; private defaultCountForTable = 10; private postgresPgLiteMaxParametersNumber = 32740; private postgresMaxParametersNumber = 65535; // there is no max parameters number in mysql, so you can increase mysqlMaxParametersNumber if it's needed. private mysqlMaxParametersNumber = 100000; // SQLITE_MAX_VARIABLE_NUMBER, which by default equals to 999 for SQLite versions prior to 3.32.0 (2020-05-22) or 32766 for SQLite versions after 3.32.0. private sqliteMaxParametersNumber = 32766; private version?: number; generatePossibleGenerators = ( connectionType: 'postgresql' | 'mysql' | 'sqlite', tables: Table[], relations: (Relation & { isCyclic: boolean })[], refinements?: RefinementsType, options?: { count?: number; seed?: number; version?: number }, ) => { let columnPossibleGenerator: Prettify; let tablePossibleGenerators: Prettify; const customSeed = options?.seed === undefined ? 0 : options.seed; this.version = options?.version === undefined ? latestVersion : options.version; if (Number.isNaN(this.version) || this.version < 1 || this.version > latestVersion) { throw new Error(`Version should be in range [1, ${latestVersion}].`); } // sorting table in order which they will be filled up (tables with foreign keys case) const { tablesInOutRelations } = this.getInfoFromRelations(relations); const orderedTablesNames = this.getOrderedTablesList(tablesInOutRelations); tables = tables.sort((table1, table2) => { const rel = relations.find((rel) => rel.table === table1.name && rel.refTable === table2.name); if (rel?.isCyclic === true) { const reverseRel = relations.find((rel) => rel.table === table2.name && rel.refTable === table1.name); return this.cyclicTablesCompare(table1, table2, rel, reverseRel); } const table1Order = orderedTablesNames.indexOf( table1.name, ), table2Order = orderedTablesNames.indexOf( table2.name, ); return table1Order - table2Order; }); const tableNamesSet = new Set(tables.map((table) => table.name)); const tablesPossibleGenerators: Prettify< (typeof tablePossibleGenerators)[] > = tables.map((table) => ({ tableName: table.name, columnsPossibleGenerators: [], withFromTable: {}, })); for (const [i, table] of tables.entries()) { // get foreignKey columns relations const foreignKeyColumns: { [columnName: string]: { table: string; column: string }; } = {}; for ( const rel of relations .filter((rel) => rel.table === table.name) ) { for (const [idx, col] of rel.columns.entries()) { foreignKeyColumns[col] = { table: rel.refTable, column: rel.refColumns[idx] as string, }; } } if (refinements !== undefined && refinements[table.name] !== undefined) { if (refinements[table.name]!.count !== undefined) { tablesPossibleGenerators[i]!.count = refinements[table.name]!.count; } if (refinements[table.name]!.with !== undefined) { tablesPossibleGenerators[i]!.count = refinements[table.name]!.count || options?.count || this.defaultCountForTable; let idx: number; for ( const fkTableName of Object.keys( refinements[table.name]!.with as {}, ) ) { if (!tablesInOutRelations[table.name]?.dependantTableNames.has(fkTableName)) { const reason = tablesInOutRelations[table.name]?.selfRelation === true ? `"${table.name}" table has self reference` : `"${fkTableName}" table doesn't have a reference to "${table.name}" table or` + `\nyou didn't include your one-to-many relation in the seed function schema`; throw new Error( `${reason}.` + `\nYou can't specify "${fkTableName}" as parameter in ${table.name}.with object.` + `\n\nFor more details, check this: https://orm.drizzle.team/docs/guides/seeding-using-with-option`, ); } idx = tablesPossibleGenerators.findIndex( (table) => table.tableName === fkTableName, ); if (idx !== -1) { let newTableWithCount: number, weightedCountSeed: number | undefined; if ( typeof refinements![table.name]!.with![fkTableName] === 'number' ) { newTableWithCount = (tablesPossibleGenerators[i]!.withCount || tablesPossibleGenerators[i]!.count)! * (refinements[table.name]!.with![fkTableName] as number); } else { const weightedRepeatedValuesCount = refinements[table.name]! .with![fkTableName] as { weight: number; count: number | number[]; }[]; weightedCountSeed = customSeed + generateHashFromString(`${table.name}.${fkTableName}`); newTableWithCount = this.getWeightedWithCount( weightedRepeatedValuesCount, (tablesPossibleGenerators[i]!.withCount || tablesPossibleGenerators[i]!.count)!, weightedCountSeed, ); } if ( tablesPossibleGenerators[idx]!.withCount === undefined || newTableWithCount > tablesPossibleGenerators[idx]!.withCount! ) { tablesPossibleGenerators[idx]!.withCount = newTableWithCount; } tablesPossibleGenerators[idx]!.withFromTable[table.name] = { repeatedValuesCount: refinements[table.name]!.with![fkTableName]!, weightedCountSeed, }; } } } } tablePossibleGenerators = tablesPossibleGenerators[i]!; for (const col of table.columns) { // col.myType = typeMap[col._type as keyof typeof typeMap]; columnPossibleGenerator = { columnName: col.name, isUnique: col.isUnique, notNull: col.notNull, primary: col.primary, generatedIdentityType: col.generatedIdentityType, generator: undefined, isCyclic: false, wasDefinedBefore: false, wasRefined: false, }; if ( refinements !== undefined && refinements[table.name] !== undefined && refinements[table.name]!.columns !== undefined && refinements[table.name]!.columns[col.name] !== undefined ) { const genObj = refinements[table.name]!.columns[col.name]!; if (col.columnType.match(/\[\w*]/g) !== null) { if ( (col.baseColumn?.dataType === 'array' && col.baseColumn.columnType.match(/\[\w*]/g) !== null) // studio case || (col.typeParams.dimensions !== undefined && col.typeParams.dimensions > 1) ) { throw new Error("for now you can't specify generators for columns of dimension greater than 1."); } genObj.baseColumnDataType = col.baseColumn?.dataType; } columnPossibleGenerator.generator = genObj; columnPossibleGenerator.wasRefined = true; } else if (Object.hasOwn(foreignKeyColumns, col.name)) { // TODO: I might need to assign repeatedValuesCount to column there instead of doing so in generateTablesValues const cyclicRelation = relations.find((rel) => rel.table === table.name && rel.isCyclic === true && rel.columns.includes(col.name) ); if (cyclicRelation !== undefined) { columnPossibleGenerator.isCyclic = true; } if ( (foreignKeyColumns[col.name]?.table === undefined || !tableNamesSet.has(foreignKeyColumns[col.name]!.table)) && col.notNull === true ) { throw new Error( `Column '${col.name}' has not null contraint,` + `\nand you didn't specify a table for foreign key on column '${col.name}' in '${table.name}' table.` + `\n\nFor more details, check this: https://orm.drizzle.team/docs/guides/seeding-with-partially-exposed-tables#example-1`, ); } const predicate = ( cyclicRelation !== undefined || ( foreignKeyColumns[col.name]?.table === undefined || !tableNamesSet.has(foreignKeyColumns[col.name]!.table) ) ) && col.notNull === false; if (predicate === true) { if ( (foreignKeyColumns[col.name]?.table === undefined || !tableNamesSet.has(foreignKeyColumns[col.name]!.table)) && col.notNull === false ) { console.warn( `Column '${col.name}' in '${table.name}' table will be filled with Null values` + `\nbecause you specified neither a table for foreign key on column '${col.name}'` + `\nnor a function for '${col.name}' column in refinements.` + `\n\nFor more details, check this: https://orm.drizzle.team/docs/guides/seeding-with-partially-exposed-tables#example-2`, ); } columnPossibleGenerator.generator = new generatorsMap.GenerateDefault[0]({ defaultValue: null }); columnPossibleGenerator.wasDefinedBefore = true; } else { columnPossibleGenerator.generator = new generatorsMap.HollowGenerator[0](); } } // TODO: rewrite pickGeneratorFor... using new col properties: isUnique and notNull else if (connectionType === 'postgresql') { columnPossibleGenerator.generator = this.selectGeneratorForPostgresColumn( table, col, ); } else if (connectionType === 'mysql') { columnPossibleGenerator.generator = this.selectGeneratorForMysqlColumn( table, col, ); } else if (connectionType === 'sqlite') { columnPossibleGenerator.generator = this.selectGeneratorForSqlite( table, col, ); } if (columnPossibleGenerator.generator === undefined) { throw new Error( `column with type ${col.columnType} is not supported for now.`, ); } const arrayGen = columnPossibleGenerator.generator.replaceIfArray(); if (arrayGen !== undefined) { columnPossibleGenerator.generator = arrayGen; } columnPossibleGenerator.generator.isUnique = col.isUnique; const uniqueGen = columnPossibleGenerator.generator.replaceIfUnique(); if (uniqueGen !== undefined) { columnPossibleGenerator.generator = uniqueGen; } // selecting version of generator columnPossibleGenerator.generator = this.selectVersionOfGenerator(columnPossibleGenerator.generator); // TODO: for now only GenerateValuesFromArray support notNull property columnPossibleGenerator.generator.notNull = col.notNull; columnPossibleGenerator.generator.dataType = col.dataType; columnPossibleGenerator.generator.stringLength = col.typeParams.length; tablePossibleGenerators.columnsPossibleGenerators.push( columnPossibleGenerator, ); } } return tablesPossibleGenerators; }; selectVersionOfGenerator = (generator: AbstractGenerator) => { const entityKind = generator.getEntityKind(); if (entityKind === 'GenerateArray') { const oldBaseColumnGen = (generator as GenerateArray).params.baseColumnGen; const newBaseColumnGen = this.selectVersionOfGenerator(oldBaseColumnGen); // newGenerator.baseColumnDataType = oldGenerator.baseColumnDataType; (generator as GenerateArray).params.baseColumnGen = newBaseColumnGen; } const possibleGeneratorConstructors = generatorsMap[entityKind as keyof typeof generatorsMap]; const possibleGeneratorConstructorsFiltered = possibleGeneratorConstructors?.filter((possGenCon) => possGenCon.version <= this.version! // sorting in ascending order by version ).sort((a, b) => a.version - b.version); const generatorConstructor = possibleGeneratorConstructorsFiltered?.at(-1) as | (new(params: any) => AbstractGenerator) | undefined; if (generatorConstructor === undefined) { throw new Error(`Can't select ${entityKind} generator for ${this.version} version.`); } const newGenerator = new generatorConstructor(generator.params); newGenerator.baseColumnDataType = generator.baseColumnDataType; newGenerator.isUnique = generator.isUnique; // TODO: for now only GenerateValuesFromArray support notNull property newGenerator.notNull = generator.notNull; newGenerator.dataType = generator.dataType; newGenerator.stringLength = generator.stringLength; return newGenerator; }; cyclicTablesCompare = ( table1: Table, table2: Table, relation: Relation & { isCyclic: boolean }, reverseRelation: Relation & { isCyclic: boolean } | undefined, ) => { // TODO: revise const hasTable1NotNullColumns = relation.columns.some((colIName) => table1.columns.find((colJ) => colJ.name === colIName)?.notNull === true ); if (reverseRelation !== undefined) { const hasTable2NotNullColumns = reverseRelation.columns.some((colIName) => table2.columns.find((colJ) => colJ.name === colIName)?.notNull === true ); if (hasTable1NotNullColumns && hasTable2NotNullColumns) { throw new Error( `The '${table1.name}' and '${table2.name}' tables have not null foreign keys. You can't seed cyclic tables with not null foreign key columns.`, ); } if (hasTable1NotNullColumns) return 1; else if (hasTable2NotNullColumns) return -1; return 0; } if (hasTable1NotNullColumns) { return 1; } return 0; // if (hasTable1NotNullColumns) return 1; // else if (hasTable2NotNullColumns) return -1; }; getOrderedTablesList = ( tablesInOutRelations: ReturnType['tablesInOutRelations'], ): string[] => { const leafTablesNames = Object.entries(tablesInOutRelations) .filter( (tableRel) => tableRel[1].out === 0 || (tableRel[1].out !== 0 && tableRel[1].selfRelCount === tableRel[1].out), ) .map((tableRel) => tableRel[0]); const orderedTablesNames: string[] = []; let parent: string, children: string[]; for (let i = 0; leafTablesNames.length !== 0; i++) { parent = leafTablesNames.shift() as string; if (orderedTablesNames.includes(parent)) { continue; } if (tablesInOutRelations[parent] === undefined) { orderedTablesNames.push(parent); continue; } for (const orderedTableName of orderedTablesNames) { tablesInOutRelations[parent]!.requiredTableNames.delete(orderedTableName); } if ( tablesInOutRelations[parent]!.requiredTableNames.size === 0 || equalSets( tablesInOutRelations[parent]!.requiredTableNames, tablesInOutRelations[parent]!.dependantTableNames, ) ) { orderedTablesNames.push(parent); } else { leafTablesNames.push(...tablesInOutRelations[parent]!.requiredTableNames, parent); continue; } children = [...tablesInOutRelations[parent]!.dependantTableNames]; leafTablesNames.push(...children); } return orderedTablesNames; }; getInfoFromRelations = (relations: (Relation & { isCyclic: boolean })[]) => { const tablesInOutRelations: { [tableName: string]: { out: number; in: number; selfRelation: boolean; selfRelCount: number; requiredTableNames: Set; dependantTableNames: Set; }; } = {}; // const cyclicRelations: { [cyclicTableName: string]: Relation & { isCyclic: boolean } } = {}; for (const rel of relations) { // if (rel.isCyclic) { // cyclicRelations[rel.table] = rel; // } if (tablesInOutRelations[rel.table] === undefined) { tablesInOutRelations[rel.table] = { out: 0, in: 0, selfRelation: false, selfRelCount: 0, requiredTableNames: new Set(), dependantTableNames: new Set(), }; } if ( rel.refTable !== undefined && tablesInOutRelations[rel.refTable] === undefined ) { tablesInOutRelations[rel.refTable] = { out: 0, in: 0, selfRelation: false, selfRelCount: 0, requiredTableNames: new Set(), dependantTableNames: new Set(), }; } if (rel.refTable !== undefined) { tablesInOutRelations[rel.table]!.out += 1; tablesInOutRelations[rel.refTable]!.in += 1; } if (rel.refTable === rel.table) { tablesInOutRelations[rel.table]!.selfRelation = true; tablesInOutRelations[rel.table]!.selfRelCount = rel.columns.length; } else if (rel.refTable !== undefined) { tablesInOutRelations[rel.table]!.requiredTableNames.add(rel.refTable); tablesInOutRelations[rel.refTable]!.dependantTableNames.add(rel.table); } } return { tablesInOutRelations }; }; getWeightedWithCount = ( weightedCount: { weight: number; count: number | number[] }[], count: number, seed: number, ) => { let gen = new generatorsMap.GenerateWeightedCount[0](); gen = this.selectVersionOfGenerator(gen) as GenerateWeightedCount; // const gen = new GenerateWeightedCount({}); gen.init({ count: weightedCount, seed }); let weightedWithCount = 0; for (let i = 0; i < count; i++) { weightedWithCount += gen.generate(); } return weightedWithCount; }; // TODO: revise serial part generators selectGeneratorForPostgresColumn = ( table: Table, col: Column, ) => { const pickGenerator = (table: Table, col: Column) => { // ARRAY if (col.columnType.match(/\[\w*]/g) !== null && col.baseColumn !== undefined) { const baseColumnGen = this.selectGeneratorForPostgresColumn( table, col.baseColumn!, ) as AbstractGenerator; if (baseColumnGen === undefined) { throw new Error(`column with type ${col.baseColumn!.columnType} is not supported for now.`); } // const getBaseColumnDataType = (baseColumn: Column) => { // if (baseColumn.baseColumn !== undefined) { // return getBaseColumnDataType(baseColumn.baseColumn); // } // return baseColumn.dataType; // }; // const baseColumnDataType = getBaseColumnDataType(col.baseColumn); const generator = new generatorsMap.GenerateArray[0]({ baseColumnGen, size: col.size }); // generator.baseColumnDataType = baseColumnDataType; return generator; } // ARRAY for studio if (col.columnType.match(/\[\w*]/g) !== null) { // remove dimensions from type const baseColumnType = col.columnType.replace(/\[\w*]/g, ''); const baseColumn: Column = { ...col, }; baseColumn.columnType = baseColumnType; const baseColumnGen = this.selectGeneratorForPostgresColumn(table, baseColumn) as AbstractGenerator; if (baseColumnGen === undefined) { throw new Error(`column with type ${col.baseColumn!.columnType} is not supported for now.`); } let generator = new generatorsMap.GenerateArray[0]({ baseColumnGen }); for (let i = 0; i < col.typeParams.dimensions! - 1; i++) { generator = new generatorsMap.GenerateArray[0]({ baseColumnGen: generator }); } return generator; } // INT ------------------------------------------------------------------------------------------------------------ if ( (col.columnType.includes('serial') || col.columnType === 'integer' || col.columnType === 'smallint' || col.columnType.includes('bigint')) && table.primaryKeys.includes(col.name) ) { const generator = new generatorsMap.GenerateIntPrimaryKey[0](); return generator; } let minValue: number | bigint | undefined; let maxValue: number | bigint | undefined; if (col.columnType.includes('serial')) { minValue = 1; if (col.columnType === 'smallserial') { // 2^16 / 2 - 1, 2 bytes maxValue = 32767; } else if (col.columnType === 'serial') { // 2^32 / 2 - 1, 4 bytes maxValue = 2147483647; } else if (col.columnType === 'bigserial') { // 2^64 / 2 - 1, 8 bytes minValue = BigInt(1); maxValue = BigInt('9223372036854775807'); } } else if (col.columnType.includes('int')) { if (col.columnType === 'smallint') { // 2^16 / 2 - 1, 2 bytes minValue = -32768; maxValue = 32767; } else if (col.columnType === 'integer') { // 2^32 / 2 - 1, 4 bytes minValue = -2147483648; maxValue = 2147483647; } else if (col.columnType.includes('bigint')) { if (col.dataType === 'bigint') { // 2^64 / 2 - 1, 8 bytes minValue = BigInt('-9223372036854775808'); maxValue = BigInt('9223372036854775807'); } else { // if (col.dataType === 'number') // if you’re expecting values above 2^31 but below 2^53 minValue = -9007199254740991; maxValue = 9007199254740991; } } } if ( col.columnType.includes('int') && !col.columnType.includes('interval') && !col.columnType.includes('point') ) { const generator = new generatorsMap.GenerateInt[0]({ minValue, maxValue, }); return generator; } if (col.columnType.includes('serial')) { const generator = new generatorsMap.GenerateIntPrimaryKey[0](); generator.maxValue = maxValue; return generator; } // NUMBER(real, double, decimal, numeric) if ( col.columnType.startsWith('real') || col.columnType.startsWith('double precision') || col.columnType.startsWith('decimal') || col.columnType.startsWith('numeric') ) { if (col.typeParams.precision !== undefined) { const precision = col.typeParams.precision; const scale = col.typeParams.scale === undefined ? 0 : col.typeParams.scale; const maxAbsoluteValue = Math.pow(10, precision - scale) - Math.pow(10, -scale); const generator = new generatorsMap.GenerateNumber[0]({ minValue: -maxAbsoluteValue, maxValue: maxAbsoluteValue, precision: Math.pow(10, scale), }); return generator; } const generator = new generatorsMap.GenerateNumber[0](); return generator; } // STRING if ( (col.columnType === 'text' || col.columnType.startsWith('varchar') || col.columnType.startsWith('char')) && table.primaryKeys.includes(col.name) ) { const generator = new generatorsMap.GenerateUniqueString[0](); return generator; } if ( (col.columnType === 'text' || col.columnType.startsWith('varchar') || col.columnType.startsWith('char')) && col.name.toLowerCase().includes('name') ) { const generator = new generatorsMap.GenerateFirstName[0](); return generator; } if ( (col.columnType === 'text' || col.columnType.startsWith('varchar') || col.columnType.startsWith('char')) && col.name.toLowerCase().includes('email') ) { const generator = new generatorsMap.GenerateEmail[0](); return generator; } if ( col.columnType === 'text' || col.columnType.startsWith('varchar') || col.columnType.startsWith('char') ) { const generator = new generatorsMap.GenerateString[0](); return generator; } // UUID if (col.columnType === 'uuid') { const generator = new generatorsMap.GenerateUUID[0](); return generator; } // BOOLEAN if (col.columnType === 'boolean') { const generator = new generatorsMap.GenerateBoolean[0](); return generator; } // DATE, TIME, TIMESTAMP if (col.columnType.includes('date')) { const generator = new generatorsMap.GenerateDate[0](); return generator; } if (col.columnType === 'time') { const generator = new generatorsMap.GenerateTime[0](); return generator; } if (col.columnType.includes('timestamp')) { const generator = new generatorsMap.GenerateTimestamp[0](); return generator; } // JSON, JSONB if (col.columnType === 'json' || col.columnType === 'jsonb') { const generator = new generatorsMap.GenerateJson[0](); return generator; } // if (col.columnType === "jsonb") { // const generator = new GenerateJsonb({}); // return generator; // } // ENUM if (col.enumValues !== undefined) { const generator = new generatorsMap.GenerateEnum[0]({ enumValues: col.enumValues, }); return generator; } // INTERVAL if (col.columnType.startsWith('interval')) { if (col.columnType === 'interval') { const generator = new generatorsMap.GenerateInterval[0](); return generator; } const fields = col.columnType.replace('interval ', '') as GenerateInterval['params']['fields']; const generator = new generatorsMap.GenerateInterval[0]({ fields }); return generator; } // POINT, LINE if (col.columnType.includes('point')) { const generator = new generatorsMap.GeneratePoint[0](); return generator; } if (col.columnType.includes('line')) { const generator = new generatorsMap.GenerateLine[0](); return generator; } if (col.hasDefault && col.default !== undefined) { const generator = new generatorsMap.GenerateDefault[0]({ defaultValue: col.default, }); return generator; } return; }; const generator = pickGenerator(table, col); if (generator !== undefined) { generator.isUnique = col.isUnique; generator.dataType = col.dataType; generator.stringLength = col.typeParams.length; } return generator; }; selectGeneratorForMysqlColumn = ( table: Table, col: Column, ) => { const pickGenerator = (table: Table, col: Column) => { // INT ------------------------------------------------------------------------------------------------------------ if ( (col.columnType.includes('serial') || col.columnType.includes('int')) && table.primaryKeys.includes(col.name) ) { const generator = new generatorsMap.GenerateIntPrimaryKey[0](); return generator; } let minValue: number | bigint | undefined; let maxValue: number | bigint | undefined; if (col.columnType === 'serial') { // 2^64 % 2 - 1, 8 bytes minValue = BigInt(0); maxValue = BigInt('9223372036854775807'); } else if (col.columnType.includes('int')) { if (col.columnType === 'tinyint') { // 2^8 / 2 - 1, 1 bytes minValue = -128; maxValue = 127; } else if (col.columnType === 'smallint') { // 2^16 / 2 - 1, 2 bytes minValue = -32768; maxValue = 32767; } else if (col.columnType === 'mediumint') { // 2^16 / 2 - 1, 2 bytes minValue = -8388608; maxValue = 8388607; } else if (col.columnType === 'int') { // 2^32 / 2 - 1, 4 bytes minValue = -2147483648; maxValue = 2147483647; } else if (col.columnType === 'bigint') { // 2^64 / 2 - 1, 8 bytes minValue = BigInt('-9223372036854775808'); maxValue = BigInt('9223372036854775807'); } } if (col.columnType.includes('int')) { const generator = new generatorsMap.GenerateInt[0]({ minValue, maxValue, }); return generator; } if (col.columnType.includes('serial')) { const generator = new generatorsMap.GenerateIntPrimaryKey[0](); generator.maxValue = maxValue; return generator; } // NUMBER(real, double, decimal, float) if ( col.columnType.startsWith('real') || col.columnType.startsWith('double') || col.columnType.startsWith('decimal') || col.columnType.startsWith('float') || col.columnType.startsWith('numeric') ) { if (col.typeParams.precision !== undefined) { const precision = col.typeParams.precision; const scale = col.typeParams.scale === undefined ? 0 : col.typeParams.scale; const maxAbsoluteValue = Math.pow(10, precision - scale) - Math.pow(10, -scale); const generator = new generatorsMap.GenerateNumber[0]({ minValue: -maxAbsoluteValue, maxValue: maxAbsoluteValue, precision: Math.pow(10, scale), }); return generator; } const generator = new generatorsMap.GenerateNumber[0](); return generator; } // STRING if ( (col.columnType === 'text' || col.columnType === 'blob' || col.columnType.startsWith('char') || col.columnType.startsWith('varchar') || col.columnType.startsWith('binary') || col.columnType.startsWith('varbinary')) && table.primaryKeys.includes(col.name) ) { const generator = new generatorsMap.GenerateUniqueString[0](); return generator; } if ( (col.columnType === 'text' || col.columnType === 'blob' || col.columnType.startsWith('char') || col.columnType.startsWith('varchar') || col.columnType.startsWith('binary') || col.columnType.startsWith('varbinary')) && col.name.toLowerCase().includes('name') ) { const generator = new generatorsMap.GenerateFirstName[0](); return generator; } if ( (col.columnType === 'text' || col.columnType === 'blob' || col.columnType.startsWith('char') || col.columnType.startsWith('varchar') || col.columnType.startsWith('binary') || col.columnType.startsWith('varbinary')) && col.name.toLowerCase().includes('email') ) { const generator = new generatorsMap.GenerateEmail[0](); return generator; } if ( col.columnType === 'text' || col.columnType === 'blob' || col.columnType.startsWith('char') || col.columnType.startsWith('varchar') || col.columnType.startsWith('binary') || col.columnType.startsWith('varbinary') ) { const generator = new generatorsMap.GenerateString[0](); return generator; } // BOOLEAN if (col.columnType === 'boolean') { const generator = new generatorsMap.GenerateBoolean[0](); return generator; } // DATE, TIME, TIMESTAMP, DATETIME, YEAR if (col.columnType.includes('datetime')) { const generator = new generatorsMap.GenerateDatetime[0](); return generator; } if (col.columnType.includes('date')) { const generator = new generatorsMap.GenerateDate[0](); return generator; } if (col.columnType === 'time') { const generator = new generatorsMap.GenerateTime[0](); return generator; } if (col.columnType.includes('timestamp')) { const generator = new generatorsMap.GenerateTimestamp[0](); return generator; } if (col.columnType === 'year') { const generator = new generatorsMap.GenerateYear[0](); return generator; } // JSON if (col.columnType === 'json') { const generator = new generatorsMap.GenerateJson[0](); return generator; } // ENUM if (col.enumValues !== undefined) { const generator = new generatorsMap.GenerateEnum[0]({ enumValues: col.enumValues, }); return generator; } if (col.hasDefault && col.default !== undefined) { const generator = new generatorsMap.GenerateDefault[0]({ defaultValue: col.default, }); return generator; } return; }; const generator = pickGenerator(table, col); return generator; }; selectGeneratorForSqlite = ( table: Table, col: Column, ) => { const pickGenerator = (table: Table, col: Column) => { // int section --------------------------------------------------------------------------------------- if ( (col.columnType === 'integer' || col.columnType === 'numeric') && table.primaryKeys.includes(col.name) ) { const generator = new generatorsMap.GenerateIntPrimaryKey[0](); return generator; } if (col.columnType === 'integer' && col.dataType === 'boolean') { const generator = new generatorsMap.GenerateBoolean[0](); return generator; } if ((col.columnType === 'integer' && col.dataType === 'date')) { const generator = new generatorsMap.GenerateTimestamp[0](); return generator; } if ( col.columnType === 'integer' || (col.dataType === 'bigint' && col.columnType === 'blob') ) { const generator = new generatorsMap.GenerateInt[0](); return generator; } // number section ------------------------------------------------------------------------------------ if (col.columnType.startsWith('real') || col.columnType.startsWith('numeric')) { if (col.typeParams.precision !== undefined) { const precision = col.typeParams.precision; const scale = col.typeParams.scale === undefined ? 0 : col.typeParams.scale; const maxAbsoluteValue = Math.pow(10, precision - scale) - Math.pow(10, -scale); const generator = new generatorsMap.GenerateNumber[0]({ minValue: -maxAbsoluteValue, maxValue: maxAbsoluteValue, precision: Math.pow(10, scale), }); return generator; } const generator = new generatorsMap.GenerateNumber[0](); return generator; } // string section ------------------------------------------------------------------------------------ if ( (col.columnType.startsWith('text') || col.columnType.startsWith('numeric') || col.columnType.startsWith('blob')) && table.primaryKeys.includes(col.name) ) { const generator = new generatorsMap.GenerateUniqueString[0](); return generator; } if ( (col.columnType.startsWith('text') || col.columnType.startsWith('numeric') || col.columnType.startsWith('blob')) && col.name.toLowerCase().includes('name') ) { const generator = new generatorsMap.GenerateFirstName[0](); return generator; } if ( (col.columnType.startsWith('text') || col.columnType.startsWith('numeric') || col.columnType.startsWith('blob')) && col.name.toLowerCase().includes('email') ) { const generator = new generatorsMap.GenerateEmail[0](); return generator; } if ( col.columnType.startsWith('text') || col.columnType.startsWith('numeric') || col.columnType.startsWith('blob') || col.columnType.startsWith('blobbuffer') ) { const generator = new generatorsMap.GenerateString[0](); return generator; } if ( (col.columnType.startsWith('text') && col.dataType === 'json') || (col.columnType.startsWith('blob') && col.dataType === 'json') ) { const generator = new generatorsMap.GenerateJson[0](); return generator; } if (col.hasDefault && col.default !== undefined) { const generator = new generatorsMap.GenerateDefault[0]({ defaultValue: col.default, }); return generator; } return; }; const generator = pickGenerator(table, col); return generator; }; filterCyclicTables = (tablesGenerators: ReturnType) => { const filteredTablesGenerators = tablesGenerators.filter((tableGen) => tableGen.columnsPossibleGenerators.some((columnGen) => columnGen.isCyclic === true && columnGen.wasDefinedBefore === true ) ); const tablesUniqueNotNullColumn: { [tableName: string]: { uniqueNotNullColName: string } } = {}; for (const [idx, tableGen] of filteredTablesGenerators.entries()) { const uniqueNotNullColName = filteredTablesGenerators[idx]!.columnsPossibleGenerators.find((colGen) => colGen.primary === true || (colGen.isUnique === true && colGen.notNull === true) )?.columnName; if (uniqueNotNullColName === undefined) { throw new Error( `Table '${tableGen.tableName}' does not have primary or (unique and notNull) column. Can't seed table with cyclic relation.`, ); } tablesUniqueNotNullColumn[tableGen.tableName] = { uniqueNotNullColName }; filteredTablesGenerators[idx]!.columnsPossibleGenerators = tableGen.columnsPossibleGenerators.filter((colGen) => (colGen.isCyclic === true && colGen.wasDefinedBefore === true) || colGen.columnName === uniqueNotNullColName ).map((colGen) => { const newColGen = { ...colGen }; newColGen.wasDefinedBefore = false; return newColGen; }); } return { filteredTablesGenerators, tablesUniqueNotNullColumn }; }; generateTablesValues = async ( relations: (Relation & { isCyclic: boolean })[], tablesGenerators: ReturnType, db?: | PgDatabase | MySqlDatabase | BaseSQLiteDatabase, schema?: { [key: string]: PgTable | MySqlTable | SQLiteTable }, options?: { count?: number; seed?: number; preserveData?: boolean; preserveCyclicTablesData?: boolean; insertDataInDb?: boolean; updateDataInDb?: boolean; tablesValues?: { tableName: string; rows: { [columnName: string]: string | number | boolean | undefined; }[]; }[]; tablesUniqueNotNullColumn?: { [tableName: string]: { uniqueNotNullColName: string } }; }, ) => { const customSeed = options?.seed === undefined ? 0 : options.seed; let tableCount: number | undefined; let columnsGenerators: Prettify[]; let tableGenerators: Prettify; let tableValues: { [columnName: string]: string | number | boolean | undefined; }[]; let tablesValues: { tableName: string; rows: typeof tableValues; }[] = options?.tablesValues === undefined ? [] : options.tablesValues; let pRNGSeed: number; let filteredRelations: typeof relations; let preserveData: boolean, insertDataInDb: boolean = true, updateDataInDb: boolean = false; if (options?.preserveData !== undefined) preserveData = options.preserveData; if (options?.insertDataInDb !== undefined) insertDataInDb = options.insertDataInDb; if (options?.updateDataInDb !== undefined) updateDataInDb = options.updateDataInDb; if (updateDataInDb === true) insertDataInDb = false; // TODO: now I'm generating tablesInOutRelations twice, first time in generatePossibleGenerators and second time here. maybe should generate it once instead. const { tablesInOutRelations } = this.getInfoFromRelations(relations); for (const table of tablesGenerators) { tableCount = table.count === undefined ? options?.count || this.defaultCountForTable : table.count; tableGenerators = {}; columnsGenerators = table.columnsPossibleGenerators; filteredRelations = relations.filter( (rel) => rel.table === table.tableName, ); // adding pRNG seed to column for (const col of columnsGenerators) { const columnRelations = filteredRelations.filter((rel) => rel.columns.includes(col.columnName)); pRNGSeed = (columnRelations.length !== 0 && columnRelations[0]!.columns.length >= 2) ? (customSeed + generateHashFromString( `${columnRelations[0]!.table}.${columnRelations[0]!.columns.join('_')}`, )) : (customSeed + generateHashFromString(`${table.tableName}.${col.columnName}`)); tableGenerators[col.columnName] = { pRNGSeed, ...col, }; } // get values to generate columns with foreign key // if table posts contains foreign key to table users, then rel.table === 'posts' and rel.refTable === 'users', because table posts has reference to table users. if (filteredRelations.length !== 0) { for (const rel of filteredRelations) { if ( table.withFromTable[rel.refTable] !== undefined && table.withCount !== undefined ) { tableCount = table.withCount; } for (let colIdx = 0; colIdx < rel.columns.length; colIdx++) { let refColumnValues: (string | number | boolean)[]; let hasSelfRelation: boolean = false; let repeatedValuesCount: | number | { weight: number; count: number | number[] }[] | undefined, weightedCountSeed: number | undefined; let genObj: AbstractGenerator | undefined; if ( rel.table === rel.refTable && tableGenerators[rel.columns[colIdx]!]?.wasRefined === false ) { const refColName = rel.refColumns[colIdx] as string; pRNGSeed = generateHashFromString( `${table.tableName}.${refColName}`, ); const refColumnGenerator: typeof tableGenerators = {}; refColumnGenerator[refColName] = { ...tableGenerators[refColName]!, pRNGSeed, }; refColumnValues = (await this.generateColumnsValuesByGenerators({ tableGenerators: refColumnGenerator, count: tableCount, preserveData: true, insertDataInDb: false, }))!.map((rows) => rows[refColName]) as (string | number | boolean)[]; hasSelfRelation = true; genObj = new generatorsMap.GenerateSelfRelationsValuesFromArray[0]({ values: refColumnValues, }); genObj = this.selectVersionOfGenerator(genObj); // genObj = new GenerateSelfRelationsValuesFromArray({ // values: refColumnValues, // }); } else if ( tableGenerators[rel.columns[colIdx]!]?.wasDefinedBefore === false && tableGenerators[rel.columns[colIdx]!]?.wasRefined === false ) { refColumnValues = tablesValues .find((val) => val.tableName === rel.refTable)! .rows!.map((row) => row[rel.refColumns[colIdx]!]!); if ( table.withFromTable[rel.refTable] !== undefined && table.withFromTable[rel.refTable]!.repeatedValuesCount !== undefined ) { repeatedValuesCount = table.withFromTable[rel.refTable]!.repeatedValuesCount; weightedCountSeed = table.withFromTable[rel.refTable]!.weightedCountSeed; } // TODO: revise maybe need to select version of generator here too genObj = new generatorsMap.GenerateValuesFromArray[0]({ values: refColumnValues }); genObj.notNull = tableGenerators[rel.columns[colIdx]!]!.notNull; genObj.weightedCountSeed = weightedCountSeed; genObj.maxRepeatedValuesCount = repeatedValuesCount; } if (genObj !== undefined) { tableGenerators[rel.columns[colIdx]!]!.generator = genObj; } tableGenerators[rel.columns[colIdx]!] = { ...tableGenerators[rel.columns[colIdx]!]!, hasSelfRelation, hasRelation: true, }; } } } preserveData = ( options?.preserveData === undefined && tablesInOutRelations[table.tableName]?.in === 0 ) ? false : true; preserveData = preserveData || (options?.preserveCyclicTablesData === true && table.columnsPossibleGenerators.some((colGen) => colGen.isCyclic === true)); tableValues = await this.generateColumnsValuesByGenerators({ tableGenerators, db, schema, tableName: table.tableName, count: tableCount, preserveData, insertDataInDb, updateDataInDb, uniqueNotNullColName: options?.tablesUniqueNotNullColumn === undefined ? undefined : options?.tablesUniqueNotNullColumn[table.tableName]?.uniqueNotNullColName, }); if (preserveData === true) { tablesValues.push({ tableName: table.tableName, rows: tableValues, }); } // removing "link" from table that was required to generate current table if (tablesInOutRelations[table.tableName] !== undefined) { for (const tableName of tablesInOutRelations[table.tableName]!.requiredTableNames) { tablesInOutRelations[tableName]!.in -= 1; } } if (preserveData === false) { tablesValues = tablesValues.filter( (table) => tablesInOutRelations[table.tableName] !== undefined && tablesInOutRelations[table.tableName]!.in > 0, ); } } return tablesValues; }; generateColumnsValuesByGenerators = async ({ tableGenerators, db, schema, tableName, count, preserveData = true, insertDataInDb = true, updateDataInDb = false, uniqueNotNullColName, batchSize = 10000, }: { tableGenerators: Prettify; db?: | PgDatabase | MySqlDatabase | BaseSQLiteDatabase; schema?: { [key: string]: PgTable | MySqlTable | SQLiteTable }; tableName?: string; count?: number; preserveData?: boolean; insertDataInDb?: boolean; updateDataInDb?: boolean; uniqueNotNullColName?: string; batchSize?: number; }) => { if (count === undefined) { count = this.defaultCountForTable; } if (updateDataInDb === true) { batchSize = 1; } let columnGenerator: (typeof tableGenerators)[string]; const columnsGenerators: { [columnName: string]: AbstractGenerator; } = {}; let generatedValues: { [columnName: string]: number | string | boolean | undefined }[] = []; let columnsNumber = 0; let override = false; for (const columnName of Object.keys(tableGenerators)) { columnsNumber += 1; columnGenerator = tableGenerators[columnName]!; override = tableGenerators[columnName]?.generatedIdentityType === 'always' ? true : override; columnsGenerators[columnName] = columnGenerator.generator!; columnsGenerators[columnName]!.init({ count, seed: columnGenerator.pRNGSeed, }); // const arrayGen = columnsGenerators[columnName]!.replaceIfArray({ count, seed: columnGenerator.pRNGSeed }); // if (arrayGen !== undefined) { // columnsGenerators[columnName] = arrayGen; // } // const uniqueGen = columnsGenerators[columnName]!.replaceIfUnique({ count, seed: columnGenerator.pRNGSeed }); // if (uniqueGen !== undefined) { // columnsGenerators[columnName] = uniqueGen; // } } let maxParametersNumber: number; if (is(db, PgDatabase)) { // @ts-ignore maxParametersNumber = db.constructor[entityKind] === 'PgliteDatabase' ? this.postgresPgLiteMaxParametersNumber : this.postgresMaxParametersNumber; } else if (is(db, MySqlDatabase)) { maxParametersNumber = this.mysqlMaxParametersNumber; } else { // is(db, BaseSQLiteDatabase) maxParametersNumber = this.sqliteMaxParametersNumber; } const maxBatchSize = Math.floor(maxParametersNumber / columnsNumber); batchSize = batchSize > maxBatchSize ? maxBatchSize : batchSize; if ( (insertDataInDb === true || updateDataInDb === true) && (db === undefined || schema === undefined || tableName === undefined) ) { throw new Error('db or schema or tableName is undefined.'); } let row: { [columnName: string]: string | number | boolean }, generatedValue, i: number; for (i = 0; i < count; i++) { row = {}; generatedValues.push(row); for (const columnName of Object.keys(columnsGenerators)) { // generatedValue = columnsGenerators[columnName].next().value as // | string // | number // | boolean; generatedValue = columnsGenerators[columnName]!.generate({ i }) as | string | number | boolean; row[columnName as keyof typeof row] = generatedValue; } if ( (insertDataInDb === true || updateDataInDb === true) && ((i + 1) % batchSize === 0 || i === count - 1) ) { if (preserveData === false) { if (insertDataInDb === true) { await this.insertInDb({ generatedValues, db: db as | PgDatabase | MySqlDatabase | BaseSQLiteDatabase, schema: schema as { [key: string]: PgTable | MySqlTable | SQLiteTable; }, tableName: tableName as string, override, }); } else if (updateDataInDb === true) { await this.updateDb({ generatedValues, db: db as | PgDatabase | MySqlDatabase | BaseSQLiteDatabase, schema: schema as { [key: string]: PgTable | MySqlTable | SQLiteTable; }, tableName: tableName as string, uniqueNotNullColName: uniqueNotNullColName as string, }); } generatedValues = []; } else { const batchCount = Math.floor(i / batchSize); if (insertDataInDb === true) { await this.insertInDb({ generatedValues: generatedValues.slice( batchSize * batchCount, batchSize * (batchCount + 1), ), db: db as | PgDatabase | MySqlDatabase | BaseSQLiteDatabase, schema: schema as { [key: string]: PgTable | MySqlTable | SQLiteTable; }, tableName: tableName as string, override, }); } else if (updateDataInDb === true) { await this.updateDb({ generatedValues: generatedValues.slice( batchSize * batchCount, batchSize * (batchCount + 1), ), db: db as | PgDatabase | MySqlDatabase | BaseSQLiteDatabase, schema: schema as { [key: string]: PgTable | MySqlTable | SQLiteTable; }, tableName: tableName as string, uniqueNotNullColName: uniqueNotNullColName as string, }); } } } } return preserveData === true ? generatedValues : []; }; insertInDb = async ({ generatedValues, db, schema, tableName, override, }: { generatedValues: { [columnName: string]: number | string | boolean | undefined; }[]; db: | PgDatabase | MySqlDatabase | BaseSQLiteDatabase; schema: { [key: string]: PgTable | MySqlTable | SQLiteTable; }; tableName: string; override: boolean; }) => { if (is(db, PgDatabase)) { const query = db.insert((schema as { [key: string]: PgTable })[tableName]!); if (override === true) { return await query.overridingSystemValue().values(generatedValues); } await query.values(generatedValues); } else if (is(db, MySqlDatabase)) { await db .insert((schema as { [key: string]: MySqlTable })[tableName]!) .values(generatedValues); } else if (is(db, BaseSQLiteDatabase)) { await db .insert((schema as { [key: string]: SQLiteTable })[tableName]!) .values(generatedValues); } }; updateDb = async ({ generatedValues, db, schema, tableName, uniqueNotNullColName, }: { generatedValues: { [columnName: string]: number | string | boolean | undefined; }[]; db: | PgDatabase | MySqlDatabase | BaseSQLiteDatabase; schema: { [key: string]: PgTable | MySqlTable | SQLiteTable; }; tableName: string; uniqueNotNullColName: string; }) => { if (is(db, PgDatabase)) { const table = (schema as { [key: string]: PgTableWithColumns })[tableName]!; const uniqueNotNullCol = table[uniqueNotNullColName]; await db.update(table).set(generatedValues[0]!).where( eq(uniqueNotNullCol, generatedValues[0]![uniqueNotNullColName]), ); } else if (is(db, MySqlDatabase)) { const table = (schema as { [key: string]: MySqlTableWithColumns })[tableName]!; await db.update(table).set(generatedValues[0]!).where( eq(table[uniqueNotNullColName], generatedValues[0]![uniqueNotNullColName]), ); } else if (is(db, BaseSQLiteDatabase)) { const table = (schema as { [key: string]: SQLiteTableWithColumns })[tableName]!; await db.update(table).set(generatedValues[0]!).where( eq(table[uniqueNotNullColName], generatedValues[0]![uniqueNotNullColName]), ); } }; } ================================================ FILE: drizzle-seed/src/services/apiVersion.ts ================================================ export const latestVersion = 2; ================================================ FILE: drizzle-seed/src/services/utils.ts ================================================ export const fastCartesianProduct = (sets: (number | string | boolean | object)[][], index: number) => { const resultList = []; let currSet: (typeof sets)[number]; let element: (typeof sets)[number][number]; for (let i = sets.length - 1; i >= 0; i--) { currSet = sets[i]!; element = currSet[index % currSet.length]!; resultList.unshift(element); index = Math.floor(index / currSet.length); } return resultList; }; const sumArray = (weights: number[]) => { const scale = 1e10; const scaledSum = weights.reduce((acc, currVal) => acc + Math.round(currVal * scale), 0); return scaledSum / scale; }; /** * @param weights positive number in range [0, 1], that represents probabilities to choose index of array. Example: weights = [0.2, 0.8] * @param [accuracy=100] approximate number of elements in returning array * @returns Example: with weights = [0.2, 0.8] and accuracy = 10 returning array of indices gonna equal this: [0, 0, 1, 1, 1, 1, 1, 1, 1, 1] */ export const getWeightedIndices = (weights: number[], accuracy = 100) => { const weightsSum = sumArray(weights); if (weightsSum !== 1) { throw new Error( `The weights for the Weighted Random feature must add up to exactly 1. Please review your weights to ensure they total 1 before proceeding`, ); } // const accuracy = 100; const weightedIndices: number[] = []; for (const [index, weight] of weights.entries()) { const ticketsNumb = Math.floor(weight * accuracy); weightedIndices.push(...Array.from({ length: ticketsNumb }).fill(index)); } return weightedIndices; }; export const generateHashFromString = (s: string) => { let hash = 0; // p and m are prime numbers const p = 53; const m = 28871271685163; for (let i = 0; i < s.length; i++) { hash += ((s.codePointAt(i) || 0) * Math.pow(p, i)) % m; } return hash; }; /** * @param param0.template example: "#####" or "#####-####" * @param param0.values example: ["3", "2", "h"] * @param param0.defaultValue example: "0" * @returns */ export const fillTemplate = ({ template, placeholdersCount, values, defaultValue = ' ' }: { template: string; placeholdersCount?: number; values: string[]; defaultValue?: string; }) => { if (placeholdersCount === undefined) { const iterArray = [...template.matchAll(/#/g)]; placeholdersCount = iterArray.length; } const diff = placeholdersCount - values.length; if (diff > 0) { values.unshift(...Array.from({ length: diff }).fill(defaultValue)); } let resultStr = '', valueIdx = 0; for (const si of template) { if (si === '#') { resultStr += values[valueIdx]; valueIdx += 1; continue; } resultStr += si; } return resultStr; }; // is variable is object-like. // Example: // isObject({f: 4}) === true; // isObject([1,2,3]) === false; // isObject(new Set()) === false; export const isObject = (value: any) => { if (value !== null && value !== undefined && value.constructor === Object) return true; return false; }; export const equalSets = (set1: Set, set2: Set) => { return set1.size === set2.size && [...set1].every((si) => set2.has(si)); }; ================================================ FILE: drizzle-seed/src/services/versioning/v2.ts ================================================ /* eslint-disable drizzle-internal/require-entity-kind */ import prand from 'pure-rand'; import { AbstractGenerator } from '../Generators.ts'; export class GenerateUniqueIntervalV2 extends AbstractGenerator<{ fields?: | 'year' | 'month' | 'day' | 'hour' | 'minute' | 'second' | 'year to month' | 'day to hour' | 'day to minute' | 'day to second' | 'hour to minute' | 'hour to second' | 'minute to second'; isUnique?: boolean; }> { static override readonly 'entityKind': string = 'GenerateUniqueInterval'; static override readonly version: number = 2; private state: { rng: prand.RandomGenerator; fieldsToGenerate: string[]; intervalSet: Set; } | undefined; public override isUnique = true; private config: { [key: string]: { from: number; to: number } } = { year: { from: 0, to: 5, }, month: { from: 0, to: 11, }, day: { from: 0, to: 29, }, hour: { from: 0, to: 23, }, minute: { from: 0, to: 59, }, second: { from: 0, to: 59, }, }; override init({ count, seed }: { count: number; seed: number }) { const allFields = ['year', 'month', 'day', 'hour', 'minute', 'second']; let fieldsToGenerate: string[] = allFields; if (this.params.fields !== undefined && this.params.fields?.includes(' to ')) { const tokens = this.params.fields.split(' to '); const endIdx = allFields.indexOf(tokens[1]!); fieldsToGenerate = allFields.slice(0, endIdx + 1); } else if (this.params.fields !== undefined) { const endIdx = allFields.indexOf(this.params.fields); fieldsToGenerate = allFields.slice(0, endIdx + 1); } let maxUniqueIntervalsNumber = 1; for (const field of fieldsToGenerate) { const from = this.config[field]!.from, to = this.config[field]!.to; maxUniqueIntervalsNumber *= from - to + 1; } if (count > maxUniqueIntervalsNumber) { throw new RangeError(`count exceeds max number of unique intervals(${maxUniqueIntervalsNumber})`); } const rng = prand.xoroshiro128plus(seed); const intervalSet = new Set(); this.state = { rng, fieldsToGenerate, intervalSet }; } generate() { if (this.state === undefined) { throw new Error('state is not defined.'); } let interval, numb: number; for (;;) { interval = ''; for (const field of this.state.fieldsToGenerate) { const from = this.config[field]!.from, to = this.config[field]!.to; [numb, this.state.rng] = prand.uniformIntDistribution(from, to, this.state.rng); interval += `${numb} ${field} `; } if (!this.state.intervalSet.has(interval)) { this.state.intervalSet.add(interval); break; } } return interval; } } export class GenerateStringV2 extends AbstractGenerator<{ isUnique?: boolean; arraySize?: number; }> { static override readonly 'entityKind': string = 'GenerateString'; static override readonly version: number = 2; private state: { rng: prand.RandomGenerator; minStringLength: number; maxStringLength: number; } | undefined; override uniqueVersionOfGen = GenerateUniqueStringV2; override init({ count, seed }: { count: number; seed: number }) { super.init({ count, seed }); let minStringLength = 7; let maxStringLength = 20; if (this.stringLength !== undefined) { maxStringLength = this.stringLength; if (maxStringLength === 1) minStringLength = maxStringLength; if (maxStringLength < minStringLength) minStringLength = 1; } const rng = prand.xoroshiro128plus(seed); this.state = { rng, minStringLength, maxStringLength }; } generate() { if (this.state === undefined) { throw new Error('state is not defined.'); } const minStringLength = this.state.minStringLength, maxStringLength = this.state.maxStringLength; const stringChars = '1234567890abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'; let idx: number, strLength: number, currStr: string; currStr = ''; [strLength, this.state.rng] = prand.uniformIntDistribution( minStringLength, maxStringLength, this.state.rng, ); for (let j = 0; j < strLength; j++) { [idx, this.state.rng] = prand.uniformIntDistribution( 0, stringChars.length - 1, this.state.rng, ); currStr += stringChars[idx]; } return currStr; } } export class GenerateUniqueStringV2 extends AbstractGenerator<{ isUnique?: boolean }> { static override readonly 'entityKind': string = 'GenerateUniqueString'; static override readonly version: number = 2; private state: { rng: prand.RandomGenerator; minStringLength: number; maxStringLength: number; } | undefined; public override isUnique = true; override init({ seed, count }: { seed: number; count: number }) { const rng = prand.xoroshiro128plus(seed); let minStringLength = 7; let maxStringLength = 20; // TODO: revise later if (this.stringLength !== undefined) { maxStringLength = this.stringLength; if (maxStringLength === 1 || maxStringLength < minStringLength) minStringLength = maxStringLength; } if (maxStringLength < count.toString(16).length) { throw new Error( `You can't generate ${count} unique strings, with a maximum string length of ${maxStringLength}.`, ); } this.state = { rng, minStringLength, maxStringLength }; } generate({ i }: { i: number }) { if (this.state === undefined) { throw new Error('state is not defined.'); } const minStringLength = this.state.minStringLength, maxStringLength = this.state.maxStringLength; const stringChars = '1234567890abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'; let idx: number, strLength: number; let currStr: string; currStr = ''; const uniqueStr = i.toString(16); [strLength, this.state.rng] = prand.uniformIntDistribution( minStringLength, maxStringLength - uniqueStr.length, this.state.rng, ); for (let j = 0; j < strLength - uniqueStr.length; j++) { [idx, this.state.rng] = prand.uniformIntDistribution( 0, stringChars.length - 1, this.state.rng, ); currStr += stringChars[idx]; } return uniqueStr + currStr; } } ================================================ FILE: drizzle-seed/src/types/drizzleStudio.ts ================================================ export type DbPrimaryKey = { name: string; columns: string[]; }; export type DbForeignKey = { name: string; tableFrom: string; columnsFrom: string[]; schemaFrom: string; schemaTo: string; tableTo: string; columnsTo: string[]; onUpdate?: string; onDelete?: string; }; export type DbColumn = { name: string; type: string; primaryKey: boolean; notNull: boolean; default?: any; isUnique?: any; autoIncrement?: boolean; uniqueName?: string; nullsNotDistinct?: boolean; onUpdate?: boolean; }; export type DbTable = { name: string; type: 'table'; database?: string; schema: string; columns: Record; indexes: Record; foreignKeys: Record; compositePrimaryKeys: Record; uniqueConstraints: Record; }; export type DbView = Omit & { type: 'view' | 'mat_view'; }; export type DbSchema = { database?: string; tables: Record; views: Record; enums: Record; }; export type DrizzleStudioObjectType = { [schemaName: string]: DbSchema }; export type DrizzleStudioRelationType = { name: string; type: 'one' | 'many'; table: string; schema: string; columns: string[]; refTable: string; refSchema: string; refColumns: string[]; }; ================================================ FILE: drizzle-seed/src/types/seedService.ts ================================================ import type { AbstractGenerator } from '../services/Generators.ts'; import type { Prettify } from './tables.ts'; export type TableGeneratorsType = { [columnName: string]: Prettify< { hasSelfRelation?: boolean | undefined; hasRelation?: boolean | undefined; pRNGSeed: number; } & GeneratePossibleGeneratorsColumnType >; }; export type GeneratePossibleGeneratorsColumnType = { columnName: string; generator: AbstractGenerator | undefined; isUnique: boolean; notNull: boolean; primary: boolean; generatedIdentityType?: 'always' | 'byDefault' | undefined; wasRefined: boolean; wasDefinedBefore: boolean; isCyclic: boolean; }; export type GeneratePossibleGeneratorsTableType = Prettify<{ tableName: string; count?: number; withCount?: number; withFromTable: { [withFromTableName: string]: { repeatedValuesCount: | number | { weight: number; count: number | number[] }[]; weightedCountSeed?: number; }; }; // repeatedValuesCount?: number, // withFromTableName?: string, columnsPossibleGenerators: GeneratePossibleGeneratorsColumnType[]; }>; export type RefinementsType = Prettify<{ [tableName: string]: { count?: number; columns: { [columnName: string]: AbstractGenerator<{}> }; with?: { [tableName: string]: number | { weight: number; count: number | number[] }[] }; }; }>; ================================================ FILE: drizzle-seed/src/types/tables.ts ================================================ /* eslint-disable @typescript-eslint/no-explicit-any */ export type Column = { name: string; dataType: string; columnType: string; typeParams: { precision?: number; scale?: number; length?: number; dimensions?: number; }; size?: number; default?: any; hasDefault: boolean; enumValues?: string[]; isUnique: boolean; notNull: boolean; primary: boolean; generatedIdentityType?: 'always' | 'byDefault' | undefined; baseColumn?: Omit; }; export type Table = { name: string; columns: Column[]; primaryKeys: string[]; }; export type Relation = { // name: string; type?: 'one' | 'many'; table: string; // schema: string; columns: string[]; refTable: string; // refSchema: string; refColumns: string[]; }; export type RelationWithReferences = Relation & { isCyclic?: boolean; refTableRels: RelationWithReferences[] }; export type Prettify = & { [K in keyof T]: T[K]; } & {}; ================================================ FILE: drizzle-seed/tests/benchmarks/generatorsBenchmark.ts ================================================ import lastNames from '../../src/datasets/lastNames.ts'; import { GenerateBoolean, GenerateCity, GenerateCompanyName, GenerateCountry, GenerateDate, GenerateDatetime, GenerateDefault, GenerateEmail, GenerateFirstName, GenerateFullName, GenerateInt, GenerateInterval, GenerateIntPrimaryKey, GenerateJobTitle, GenerateJson, GenerateLastName, GenerateLine, GenerateLoremIpsum, GenerateNumber, GeneratePhoneNumber, GeneratePoint, GeneratePostcode, GenerateState, GenerateStreetAddress, GenerateString, GenerateTime, GenerateTimestamp, GenerateUniqueCompanyName, GenerateUniqueFullName, GenerateUniqueInt, GenerateUniqueInterval, GenerateUniqueLine, GenerateUniqueNumber, GenerateUniquePoint, GenerateUniquePostcode, GenerateUniqueStreetAddress, GenerateUniqueString, GenerateValuesFromArray, GenerateYear, WeightedRandomGenerator, } from '../../src/services/Generators.ts'; const benchmark = ({ generatorName, generator, count = 100000, seed = 1 }: { generatorName: string; generator: (typeof generatorsFuncs)[keyof typeof generatorsFuncs]; count?: number; seed?: number; }) => { generator.init({ count, seed }); let timeSpentToInit = 0, timeSpent = 0; const t0 = new Date(); generator.init({ count, seed }); timeSpentToInit += (Date.now() - t0.getTime()) / 1000; for (let i = 0; i < count; i++) { const val = generator.generate({ i }); if (val === undefined) { console.log(val, `in ${generatorName} generator.`); } } timeSpent += (Date.now() - t0.getTime()) / 1000; console.log(`${generatorName} spent ${timeSpentToInit} to init and spent ${timeSpent} to generate ${count} rows.`); console.log( 'time spent in particular code part:', generator.timeSpent, ';', generator.timeSpent === undefined ? generator.timeSpent : (generator.timeSpent / timeSpent), 'percent of all time', ); console.log('\n'); }; const generatorsFuncs = { default: new GenerateDefault({ defaultValue: 'defaultValue' }), valuesFromArray: new GenerateValuesFromArray({ values: lastNames }), intPrimaryKey: new GenerateIntPrimaryKey({}), number: new GenerateNumber({}), uniqueNumber: new GenerateUniqueNumber({}), int: new GenerateInt({}), uniqueInt: new GenerateUniqueInt({}), boolean: new GenerateBoolean({}), date: new GenerateDate({}), time: new GenerateTime({}), timestamp: new GenerateTimestamp({}), datetime: new GenerateDatetime({}), year: new GenerateYear({}), json: new GenerateJson({}), jsonb: new GenerateJson({}), interval: new GenerateInterval({}), uniqueInterval: new GenerateUniqueInterval({}), string: new GenerateString({}), uniqueString: new GenerateUniqueString({}), firstName: new GenerateFirstName({}), // uniqueFirstName: new GenerateUniqueName({}), lastName: new GenerateLastName({}), // uniqueLastName: new GenerateUniqueSurname({}), fullName: new GenerateFullName({}), uniqueFullName: new GenerateUniqueFullName({}), email: new GenerateEmail({}), phoneNumber: new GeneratePhoneNumber({ template: '+380 ## ## ### ##' }), country: new GenerateCountry({}), // uniqueCountry: new GenerateUniqueCountry({}), city: new GenerateCity({}), // uniqueCity: new GenerateUniqueCity({}), streetAddress: new GenerateStreetAddress({}), uniqueStreetAddress: new GenerateUniqueStreetAddress({}), jobTitle: new GenerateJobTitle({}), postcode: new GeneratePostcode({}), uniquePostcode: new GenerateUniquePostcode({}), state: new GenerateState({}), companyName: new GenerateCompanyName({}), uniqueCompanyName: new GenerateUniqueCompanyName({}), loremIpsum: new GenerateLoremIpsum({}), point: new GeneratePoint({}), uniquePoint: new GenerateUniquePoint({}), line: new GenerateLine({}), uniqueLine: new GenerateUniqueLine({}), weightedRandom: new WeightedRandomGenerator([ { weight: 0.8, value: new GenerateUniqueInt({ minValue: 0, maxValue: 90000 }) }, { weight: 0.2, value: new GenerateDefault({ defaultValue: Number.NaN }) }, ]), }; for (const [generatorName, generator] of Object.entries(generatorsFuncs)) { benchmark({ generatorName, generator, count: 100000, seed: 1 }); } ================================================ FILE: drizzle-seed/tests/mysql/allDataTypesTest/mysqlSchema.ts ================================================ import { bigint, binary, boolean, char, date, datetime, decimal, double, float, int, json, mediumint, mysqlEnum, mysqlTable, real, serial, smallint, text, time, timestamp, tinyint, varbinary, varchar, year, } from 'drizzle-orm/mysql-core'; export const allDataTypes = mysqlTable('all_data_types', { int: int('integer'), tinyint: tinyint('tinyint'), smallint: smallint('smallint'), mediumint: mediumint('mediumint'), biginteger: bigint('bigint', { mode: 'bigint' }), bigintNumber: bigint('bigint_number', { mode: 'number' }), real: real('real'), decimal: decimal('decimal'), double: double('double'), float: float('float'), serial: serial('serial'), binary: binary('binary', { length: 255 }), varbinary: varbinary('varbinary', { length: 256 }), char: char('char', { length: 255 }), varchar: varchar('varchar', { length: 256 }), text: text('text'), boolean: boolean('boolean'), dateString: date('date_string', { mode: 'string' }), date: date('date', { mode: 'date' }), datetime: datetime('datetime', { mode: 'date' }), datetimeString: datetime('datetimeString', { mode: 'string' }), time: time('time'), year: year('year'), timestampDate: timestamp('timestamp_date', { mode: 'date' }), timestampString: timestamp('timestamp_string', { mode: 'string' }), json: json('json'), mysqlEnum: mysqlEnum('popularity', ['unknown', 'known', 'popular']), }); ================================================ FILE: drizzle-seed/tests/mysql/allDataTypesTest/mysql_all_data_types.test.ts ================================================ import Docker from 'dockerode'; import { sql } from 'drizzle-orm'; import type { MySql2Database } from 'drizzle-orm/mysql2'; import { drizzle } from 'drizzle-orm/mysql2'; import getPort from 'get-port'; import type { Connection } from 'mysql2/promise'; import { createConnection } from 'mysql2/promise'; import { v4 as uuid } from 'uuid'; import { afterAll, beforeAll, expect, test } from 'vitest'; import { seed } from '../../../src/index.ts'; import * as schema from './mysqlSchema.ts'; let mysqlContainer: Docker.Container; let client: Connection | undefined; let db: MySql2Database; async function createDockerDB(): Promise { const docker = new Docker(); const port = await getPort({ port: 3306 }); const image = 'mysql:8'; const pullStream = await docker.pull(image); await new Promise((resolve, reject) => // eslint-disable-next-line @typescript-eslint/no-unsafe-argument docker.modem.followProgress(pullStream, (err) => err ? reject(err) : resolve(err)) ); mysqlContainer = await docker.createContainer({ Image: image, Env: ['MYSQL_ROOT_PASSWORD=mysql', 'MYSQL_DATABASE=drizzle'], name: `drizzle-integration-tests-${uuid()}`, HostConfig: { AutoRemove: true, PortBindings: { '3306/tcp': [{ HostPort: `${port}` }], }, }, }); await mysqlContainer.start(); return `mysql://root:mysql@127.0.0.1:${port}/drizzle`; } beforeAll(async () => { const connectionString = await createDockerDB(); const sleep = 1000; let timeLeft = 40000; let connected = false; let lastError: unknown | undefined; do { try { const client = await createConnection(connectionString); await client.connect(); db = drizzle(client); connected = true; break; } catch (e) { lastError = e; await new Promise((resolve) => setTimeout(resolve, sleep)); timeLeft -= sleep; } } while (timeLeft > 0); if (!connected) { console.error('Cannot connect to MySQL'); await client?.end().catch(console.error); await mysqlContainer?.stop().catch(console.error); throw lastError; } await db.execute( sql` CREATE TABLE \`all_data_types\` ( \`integer\` int, \`tinyint\` tinyint, \`smallint\` smallint, \`mediumint\` mediumint, \`bigint\` bigint, \`bigint_number\` bigint, \`real\` real, \`decimal\` decimal, \`double\` double, \`float\` float, \`serial\` serial AUTO_INCREMENT, \`binary\` binary(255), \`varbinary\` varbinary(256), \`char\` char(255), \`varchar\` varchar(256), \`text\` text, \`boolean\` boolean, \`date_string\` date, \`date\` date, \`datetime\` datetime, \`datetimeString\` datetime, \`time\` time, \`year\` year, \`timestamp_date\` timestamp, \`timestamp_string\` timestamp, \`json\` json, \`popularity\` enum('unknown','known','popular') ); `, ); }); afterAll(async () => { await client?.end().catch(console.error); await mysqlContainer?.stop().catch(console.error); }); test('basic seed test', async () => { await seed(db, schema, { count: 10000 }); const allDataTypes = await db.select().from(schema.allDataTypes); // every value in each 10 rows does not equal undefined. const predicate = allDataTypes.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); }); ================================================ FILE: drizzle-seed/tests/mysql/cyclicTables/cyclicTables.test.ts ================================================ import Docker from 'dockerode'; import { sql } from 'drizzle-orm'; import type { MySql2Database } from 'drizzle-orm/mysql2'; import { drizzle } from 'drizzle-orm/mysql2'; import getPort from 'get-port'; import type { Connection } from 'mysql2/promise'; import { createConnection } from 'mysql2/promise'; import { v4 as uuid } from 'uuid'; import { afterAll, afterEach, beforeAll, expect, test } from 'vitest'; import { reset, seed } from '../../../src/index.ts'; import * as schema from './mysqlSchema.ts'; let mysqlContainer: Docker.Container; let client: Connection; let db: MySql2Database; async function createDockerDB(): Promise { const docker = new Docker(); const port = await getPort({ port: 3306 }); const image = 'mysql:8'; const pullStream = await docker.pull(image); await new Promise((resolve, reject) => // eslint-disable-next-line @typescript-eslint/no-unsafe-argument docker.modem.followProgress(pullStream, (err) => err ? reject(err) : resolve(err)) ); mysqlContainer = await docker.createContainer({ Image: image, Env: ['MYSQL_ROOT_PASSWORD=mysql', 'MYSQL_DATABASE=drizzle'], name: `drizzle-integration-tests-${uuid()}`, HostConfig: { AutoRemove: true, PortBindings: { '3306/tcp': [{ HostPort: `${port}` }], }, }, }); await mysqlContainer.start(); return `mysql://root:mysql@127.0.0.1:${port}/drizzle`; } beforeAll(async () => { const connectionString = await createDockerDB(); const sleep = 1000; let timeLeft = 40000; let connected = false; let lastError: unknown | undefined; do { try { client = await createConnection(connectionString); await client.connect(); db = drizzle(client); connected = true; break; } catch (e) { lastError = e; await new Promise((resolve) => setTimeout(resolve, sleep)); timeLeft -= sleep; } } while (timeLeft > 0); if (!connected) { console.error('Cannot connect to MySQL'); await client?.end().catch(console.error); await mysqlContainer?.stop().catch(console.error); throw lastError; } await db.execute( sql` create table model ( id int not null primary key, name varchar(256) not null, defaultImageId int null ); `, ); await db.execute( sql` create table model_image ( id int not null primary key, url varchar(256) not null, caption varchar(256) null, modelId int not null, constraint model_image_modelId_model_id_fk foreign key (modelId) references model (id) ); `, ); await db.execute( sql` alter table model add constraint model_defaultImageId_model_image_id_fk foreign key (defaultImageId) references model_image (id); `, ); // 3 tables case await db.execute( sql` create table model1 ( id int not null primary key, name varchar(256) not null, userId int null, defaultImageId int null ); `, ); await db.execute( sql` create table model_image1 ( id int not null primary key, url varchar(256) not null, caption varchar(256) null, modelId int not null, constraint model_image1_modelId_model1_id_fk foreign key (modelId) references model1 (id) ); `, ); await db.execute( sql` create table user ( id int not null primary key, name text null, invitedBy int null, imageId int not null, constraint user_imageId_model_image1_id_fk foreign key (imageId) references model_image1 (id), constraint user_invitedBy_user_id_fk foreign key (invitedBy) references user (id) ); `, ); await db.execute( sql` alter table model1 add constraint model1_userId_user_id_fk foreign key (userId) references user (id); `, ); }); afterAll(async () => { await client?.end().catch(console.error); await mysqlContainer?.stop().catch(console.error); }); afterEach(async () => { await reset(db, schema); }); test('2 cyclic tables test', async () => { await seed(db, { modelTable: schema.modelTable, modelImageTable: schema.modelImageTable, }); const modelTable = await db.select().from(schema.modelTable); const modelImageTable = await db.select().from(schema.modelImageTable); expect(modelTable.length).toBe(10); let predicate = modelTable.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); expect(modelImageTable.length).toBe(10); predicate = modelImageTable.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); }); test('3 cyclic tables test', async () => { await seed(db, { modelTable1: schema.modelTable1, modelImageTable1: schema.modelImageTable1, user: schema.user, }); const modelTable1 = await db.select().from(schema.modelTable1); const modelImageTable1 = await db.select().from(schema.modelImageTable1); const user = await db.select().from(schema.user); expect(modelTable1.length).toBe(10); let predicate = modelTable1.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); expect(modelImageTable1.length).toBe(10); predicate = modelImageTable1.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); expect(user.length).toBe(10); predicate = user.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); }); ================================================ FILE: drizzle-seed/tests/mysql/cyclicTables/mysqlSchema.ts ================================================ import { relations } from 'drizzle-orm'; import type { AnyMySqlColumn } from 'drizzle-orm/mysql-core'; import { int, mysqlTable, serial, text, varchar } from 'drizzle-orm/mysql-core'; // MODEL export const modelTable = mysqlTable( 'model', { id: serial().primaryKey(), name: varchar({ length: 256 }).notNull(), defaultImageId: int().references(() => modelImageTable.id), }, ); export const modelRelations = relations(modelTable, ({ one, many }) => ({ images: many(modelImageTable), defaultImage: one(modelImageTable, { fields: [modelTable.defaultImageId], references: [modelImageTable.id], }), })); // MODEL IMAGE export const modelImageTable = mysqlTable( 'model_image', { id: serial().primaryKey(), url: varchar({ length: 256 }).notNull(), caption: varchar({ length: 256 }), modelId: int() .notNull() .references((): AnyMySqlColumn => modelTable.id), }, ); export const modelImageRelations = relations(modelImageTable, ({ one }) => ({ model: one(modelTable, { fields: [modelImageTable.modelId], references: [modelTable.id], }), })); // 3 tables case export const modelTable1 = mysqlTable( 'model1', { id: serial().primaryKey(), name: varchar({ length: 256 }).notNull(), userId: int() .references(() => user.id), defaultImageId: int(), }, ); export const modelImageTable1 = mysqlTable( 'model_image1', { id: serial().primaryKey(), url: varchar({ length: 256 }).notNull(), caption: varchar({ length: 256 }), modelId: int().notNull() .references((): AnyMySqlColumn => modelTable1.id), }, ); export const user = mysqlTable( 'user', { id: serial().primaryKey(), name: text(), invitedBy: int().references((): AnyMySqlColumn => user.id), imageId: int() .notNull() .references((): AnyMySqlColumn => modelImageTable1.id), }, ); ================================================ FILE: drizzle-seed/tests/mysql/generatorsTest/generators.test.ts ================================================ import Docker from 'dockerode'; import { sql } from 'drizzle-orm'; import type { MySql2Database } from 'drizzle-orm/mysql2'; import { drizzle } from 'drizzle-orm/mysql2'; import getPort from 'get-port'; import type { Connection } from 'mysql2/promise'; import { createConnection } from 'mysql2/promise'; import { v4 as uuid } from 'uuid'; import { afterAll, beforeAll, expect, test } from 'vitest'; import { seed } from '../../../src/index.ts'; import * as schema from './mysqlSchema.ts'; let mysqlContainer: Docker.Container; let client: Connection; let db: MySql2Database; async function createDockerDB(): Promise { const docker = new Docker(); const port = await getPort({ port: 3306 }); const image = 'mysql:8'; const pullStream = await docker.pull(image); await new Promise((resolve, reject) => // eslint-disable-next-line @typescript-eslint/no-unsafe-argument docker.modem.followProgress(pullStream, (err) => err ? reject(err) : resolve(err)) ); mysqlContainer = await docker.createContainer({ Image: image, Env: ['MYSQL_ROOT_PASSWORD=mysql', 'MYSQL_DATABASE=drizzle'], name: `drizzle-integration-tests-${uuid()}`, HostConfig: { AutoRemove: true, PortBindings: { '3306/tcp': [{ HostPort: `${port}` }], }, }, }); await mysqlContainer.start(); return `mysql://root:mysql@127.0.0.1:${port}/drizzle`; } beforeAll(async () => { const connectionString = await createDockerDB(); const sleep = 1000; let timeLeft = 40000; let connected = false; let lastError: unknown | undefined; do { try { client = await createConnection(connectionString); await client.connect(); db = drizzle(client); connected = true; break; } catch (e) { lastError = e; await new Promise((resolve) => setTimeout(resolve, sleep)); timeLeft -= sleep; } } while (timeLeft > 0); if (!connected) { console.error('Cannot connect to MySQL'); await client?.end().catch(console.error); await mysqlContainer?.stop().catch(console.error); throw lastError; } await db.execute( sql` CREATE TABLE \`datetime_table\` ( \`datetime\` datetime ); `, ); await db.execute( sql` CREATE TABLE \`year_table\` ( \`year\` year ); `, ); }); afterAll(async () => { await client?.end().catch(console.error); await mysqlContainer?.stop().catch(console.error); }); const count = 10000; test('datetime generator test', async () => { await seed(db, { datetimeTable: schema.datetimeTable }).refine((funcs) => ({ datetimeTable: { count, columns: { datetime: funcs.datetime(), }, }, })); const data = await db.select().from(schema.datetimeTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); }); test('year generator test', async () => { await seed(db, { yearTable: schema.yearTable }).refine((funcs) => ({ yearTable: { count, columns: { year: funcs.year(), }, }, })); const data = await db.select().from(schema.yearTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); }); ================================================ FILE: drizzle-seed/tests/mysql/generatorsTest/mysqlSchema.ts ================================================ import { datetime, mysqlTable, year } from 'drizzle-orm/mysql-core'; export const datetimeTable = mysqlTable('datetime_table', { datetime: datetime('datetime'), }); export const yearTable = mysqlTable('year_table', { year: year('year'), }); ================================================ FILE: drizzle-seed/tests/mysql/mysql.test.ts ================================================ import Docker from 'dockerode'; import { relations, sql } from 'drizzle-orm'; import type { MySql2Database } from 'drizzle-orm/mysql2'; import { drizzle } from 'drizzle-orm/mysql2'; import getPort from 'get-port'; import type { Connection } from 'mysql2/promise'; import { createConnection } from 'mysql2/promise'; import { v4 as uuid } from 'uuid'; import { afterAll, afterEach, beforeAll, expect, test, vi } from 'vitest'; import { reset, seed } from '../../src/index.ts'; import * as schema from './mysqlSchema.ts'; let mysqlContainer: Docker.Container; let client: Connection; let db: MySql2Database; async function createDockerDB(): Promise { const docker = new Docker(); const port = await getPort({ port: 3306 }); const image = 'mysql:8'; const pullStream = await docker.pull(image); await new Promise((resolve, reject) => // eslint-disable-next-line @typescript-eslint/no-unsafe-argument docker.modem.followProgress(pullStream, (err) => err ? reject(err) : resolve(err)) ); mysqlContainer = await docker.createContainer({ Image: image, Env: ['MYSQL_ROOT_PASSWORD=mysql', 'MYSQL_DATABASE=drizzle'], name: `drizzle-integration-tests-${uuid()}`, HostConfig: { AutoRemove: true, PortBindings: { '3306/tcp': [{ HostPort: `${port}` }], }, }, }); await mysqlContainer.start(); return `mysql://root:mysql@127.0.0.1:${port}/drizzle`; } beforeAll(async () => { const connectionString = await createDockerDB(); const sleep = 1000; let timeLeft = 40000; let connected = false; let lastError: unknown | undefined; do { try { client = await createConnection(connectionString); await client.connect(); db = drizzle(client); connected = true; break; } catch (e) { lastError = e; await new Promise((resolve) => setTimeout(resolve, sleep)); timeLeft -= sleep; } } while (timeLeft > 0); if (!connected) { console.error('Cannot connect to MySQL'); await client?.end().catch(console.error); await mysqlContainer?.stop().catch(console.error); throw lastError; } await db.execute( sql` CREATE TABLE \`customer\` ( \`id\` varchar(256) NOT NULL, \`company_name\` text NOT NULL, \`contact_name\` text NOT NULL, \`contact_title\` text NOT NULL, \`address\` text NOT NULL, \`city\` text NOT NULL, \`postal_code\` text, \`region\` text, \`country\` text NOT NULL, \`phone\` text NOT NULL, \`fax\` text, CONSTRAINT \`customer_id\` PRIMARY KEY(\`id\`) ); `, ); await db.execute( sql` CREATE TABLE \`order_detail\` ( \`unit_price\` float NOT NULL, \`quantity\` int NOT NULL, \`discount\` float NOT NULL, \`order_id\` int NOT NULL, \`product_id\` int NOT NULL ); `, ); await db.execute( sql` CREATE TABLE \`employee\` ( \`id\` int NOT NULL, \`last_name\` text NOT NULL, \`first_name\` text, \`title\` text NOT NULL, \`title_of_courtesy\` text NOT NULL, \`birth_date\` timestamp NOT NULL, \`hire_date\` timestamp NOT NULL, \`address\` text NOT NULL, \`city\` text NOT NULL, \`postal_code\` text NOT NULL, \`country\` text NOT NULL, \`home_phone\` text NOT NULL, \`extension\` int NOT NULL, \`notes\` text NOT NULL, \`reports_to\` int, \`photo_path\` text, CONSTRAINT \`employee_id\` PRIMARY KEY(\`id\`) ); `, ); await db.execute( sql` CREATE TABLE \`order\` ( \`id\` int NOT NULL, \`order_date\` timestamp NOT NULL, \`required_date\` timestamp NOT NULL, \`shipped_date\` timestamp, \`ship_via\` int NOT NULL, \`freight\` float NOT NULL, \`ship_name\` text NOT NULL, \`ship_city\` text NOT NULL, \`ship_region\` text, \`ship_postal_code\` text, \`ship_country\` text NOT NULL, \`customer_id\` varchar(256) NOT NULL, \`employee_id\` int NOT NULL, CONSTRAINT \`order_id\` PRIMARY KEY(\`id\`) ); `, ); await db.execute( sql` CREATE TABLE \`product\` ( \`id\` int NOT NULL, \`name\` text NOT NULL, \`quantity_per_unit\` text NOT NULL, \`unit_price\` float NOT NULL, \`units_in_stock\` int NOT NULL, \`units_on_order\` int NOT NULL, \`reorder_level\` int NOT NULL, \`discontinued\` int NOT NULL, \`supplier_id\` int NOT NULL, CONSTRAINT \`product_id\` PRIMARY KEY(\`id\`) ); `, ); await db.execute( sql` CREATE TABLE \`supplier\` ( \`id\` int NOT NULL, \`company_name\` text NOT NULL, \`contact_name\` text NOT NULL, \`contact_title\` text NOT NULL, \`address\` text NOT NULL, \`city\` text NOT NULL, \`region\` text, \`postal_code\` text NOT NULL, \`country\` text NOT NULL, \`phone\` text NOT NULL, CONSTRAINT \`supplier_id\` PRIMARY KEY(\`id\`) ); `, ); await db.execute( sql` CREATE TABLE \`users\` ( \`id\` int, \`name\` text, \`invitedBy\` int, CONSTRAINT \`users_id\` PRIMARY KEY(\`id\`) ); `, ); await db.execute( sql` CREATE TABLE \`posts\` ( \`id\` int, \`name\` text, \`content\` text, \`userId\` int, CONSTRAINT \`posts_id\` PRIMARY KEY(\`id\`) ); `, ); await db.execute( sql` ALTER TABLE \`order_detail\` ADD CONSTRAINT \`order_detail_order_id_order_id_fk\` FOREIGN KEY (\`order_id\`) REFERENCES \`order\`(\`id\`) ON DELETE cascade ON UPDATE no action; `, ); await db.execute( sql` ALTER TABLE \`order_detail\` ADD CONSTRAINT \`order_detail_product_id_product_id_fk\` FOREIGN KEY (\`product_id\`) REFERENCES \`product\`(\`id\`) ON DELETE cascade ON UPDATE no action; `, ); await db.execute( sql` ALTER TABLE \`employee\` ADD CONSTRAINT \`employee_reports_to_employee_id_fk\` FOREIGN KEY (\`reports_to\`) REFERENCES \`employee\`(\`id\`) ON DELETE no action ON UPDATE no action; `, ); await db.execute( sql` ALTER TABLE \`order\` ADD CONSTRAINT \`order_customer_id_customer_id_fk\` FOREIGN KEY (\`customer_id\`) REFERENCES \`customer\`(\`id\`) ON DELETE cascade ON UPDATE no action; `, ); await db.execute( sql` ALTER TABLE \`order\` ADD CONSTRAINT \`order_employee_id_employee_id_fk\` FOREIGN KEY (\`employee_id\`) REFERENCES \`employee\`(\`id\`) ON DELETE cascade ON UPDATE no action; `, ); await db.execute( sql` ALTER TABLE \`product\` ADD CONSTRAINT \`product_supplier_id_supplier_id_fk\` FOREIGN KEY (\`supplier_id\`) REFERENCES \`supplier\`(\`id\`) ON DELETE cascade ON UPDATE no action; `, ); await db.execute( sql` ALTER TABLE \`users\` ADD CONSTRAINT \`users_invitedBy_users_id_fk\` FOREIGN KEY (\`invitedBy\`) REFERENCES \`users\`(\`id\`) ON DELETE cascade ON UPDATE no action; `, ); await db.execute( sql` ALTER TABLE \`posts\` ADD CONSTRAINT \`posts_userId_users_id_fk\` FOREIGN KEY (\`userId\`) REFERENCES \`users\`(\`id\`) ON DELETE cascade ON UPDATE no action; `, ); }); afterAll(async () => { await client?.end().catch(console.error); await mysqlContainer?.stop().catch(console.error); }); afterEach(async () => { await reset(db, schema); }); test('basic seed test', async () => { await seed(db, schema); const customers = await db.select().from(schema.customers); const details = await db.select().from(schema.details); const employees = await db.select().from(schema.employees); const orders = await db.select().from(schema.orders); const products = await db.select().from(schema.products); const suppliers = await db.select().from(schema.suppliers); expect(customers.length).toBe(10); expect(details.length).toBe(10); expect(employees.length).toBe(10); expect(orders.length).toBe(10); expect(products.length).toBe(10); expect(suppliers.length).toBe(10); }); test('seed with options.count:11 test', async () => { await seed(db, schema, { count: 11 }); const customers = await db.select().from(schema.customers); const details = await db.select().from(schema.details); const employees = await db.select().from(schema.employees); const orders = await db.select().from(schema.orders); const products = await db.select().from(schema.products); const suppliers = await db.select().from(schema.suppliers); expect(customers.length).toBe(11); expect(details.length).toBe(11); expect(employees.length).toBe(11); expect(orders.length).toBe(11); expect(products.length).toBe(11); expect(suppliers.length).toBe(11); }); test('redefine(refine) customers count', async () => { await seed(db, schema, { count: 11 }).refine(() => ({ customers: { count: 12, }, })); const customers = await db.select().from(schema.customers); const details = await db.select().from(schema.details); const employees = await db.select().from(schema.employees); const orders = await db.select().from(schema.orders); const products = await db.select().from(schema.products); const suppliers = await db.select().from(schema.suppliers); expect(customers.length).toBe(12); expect(details.length).toBe(11); expect(employees.length).toBe(11); expect(orders.length).toBe(11); expect(products.length).toBe(11); expect(suppliers.length).toBe(11); }); test('redefine(refine) all tables count', async () => { await seed(db, schema, { count: 11 }).refine(() => ({ customers: { count: 12, }, details: { count: 13, }, employees: { count: 14, }, orders: { count: 15, }, products: { count: 16, }, suppliers: { count: 17, }, })); const customers = await db.select().from(schema.customers); const details = await db.select().from(schema.details); const employees = await db.select().from(schema.employees); const orders = await db.select().from(schema.orders); const products = await db.select().from(schema.products); const suppliers = await db.select().from(schema.suppliers); expect(customers.length).toBe(12); expect(details.length).toBe(13); expect(employees.length).toBe(14); expect(orders.length).toBe(15); expect(products.length).toBe(16); expect(suppliers.length).toBe(17); }); test("redefine(refine) orders count using 'with' in customers", async () => { await seed(db, schema, { count: 11 }).refine(() => ({ customers: { count: 4, with: { orders: 2, }, }, orders: { count: 13, }, })); const customers = await db.select().from(schema.customers); const details = await db.select().from(schema.details); const employees = await db.select().from(schema.employees); const orders = await db.select().from(schema.orders); const products = await db.select().from(schema.products); const suppliers = await db.select().from(schema.suppliers); expect(customers.length).toBe(4); expect(details.length).toBe(11); expect(employees.length).toBe(11); expect(orders.length).toBe(8); expect(products.length).toBe(11); expect(suppliers.length).toBe(11); }); test("sequential using of 'with'", async () => { await seed(db, schema, { count: 11 }).refine(() => ({ customers: { count: 4, with: { orders: 2, }, }, orders: { count: 12, with: { details: 3, }, }, })); const customers = await db.select().from(schema.customers); const details = await db.select().from(schema.details); const employees = await db.select().from(schema.employees); const orders = await db.select().from(schema.orders); const products = await db.select().from(schema.products); const suppliers = await db.select().from(schema.suppliers); expect(customers.length).toBe(4); expect(details.length).toBe(24); expect(employees.length).toBe(11); expect(orders.length).toBe(8); expect(products.length).toBe(11); expect(suppliers.length).toBe(11); }); test('overlapping a foreign key constraint with a one-to-many relation', async () => { const postsRelation = relations(schema.posts, ({ one }) => ({ user: one(schema.users, { fields: [schema.posts.userId], references: [schema.users.id] }), })); const consoleMock = vi.spyOn(console, 'warn').mockImplementation(() => {}); await reset(db, { users: schema.users, posts: schema.posts, postsRelation }); await seed(db, { users: schema.users, posts: schema.posts, postsRelation }); // expecting to get a warning expect(consoleMock).toBeCalled(); expect(consoleMock).toBeCalledWith(expect.stringMatching(/^You are providing a one-to-many relation.+/)); const users = await db.select().from(schema.users); const posts = await db.select().from(schema.posts); expect(users.length).toBe(10); let predicate = users.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); expect(posts.length).toBe(10); predicate = posts.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); }); ================================================ FILE: drizzle-seed/tests/mysql/mysqlSchema.ts ================================================ import type { AnyMySqlColumn } from 'drizzle-orm/mysql-core'; import { float, int, mysqlTable, text, timestamp, varchar } from 'drizzle-orm/mysql-core'; export const customers = mysqlTable('customer', { id: varchar('id', { length: 256 }).primaryKey(), companyName: text('company_name').notNull(), contactName: text('contact_name').notNull(), contactTitle: text('contact_title').notNull(), address: text('address').notNull(), city: text('city').notNull(), postalCode: text('postal_code'), region: text('region'), country: text('country').notNull(), phone: text('phone').notNull(), fax: text('fax'), }); export const employees = mysqlTable( 'employee', { id: int('id').primaryKey(), lastName: text('last_name').notNull(), firstName: text('first_name'), title: text('title').notNull(), titleOfCourtesy: text('title_of_courtesy').notNull(), birthDate: timestamp('birth_date').notNull(), hireDate: timestamp('hire_date').notNull(), address: text('address').notNull(), city: text('city').notNull(), postalCode: text('postal_code').notNull(), country: text('country').notNull(), homePhone: text('home_phone').notNull(), extension: int('extension').notNull(), notes: text('notes').notNull(), reportsTo: int('reports_to').references((): AnyMySqlColumn => employees.id), photoPath: text('photo_path'), }, ); export const orders = mysqlTable('order', { id: int('id').primaryKey(), orderDate: timestamp('order_date').notNull(), requiredDate: timestamp('required_date').notNull(), shippedDate: timestamp('shipped_date'), shipVia: int('ship_via').notNull(), freight: float('freight').notNull(), shipName: text('ship_name').notNull(), shipCity: text('ship_city').notNull(), shipRegion: text('ship_region'), shipPostalCode: text('ship_postal_code'), shipCountry: text('ship_country').notNull(), customerId: varchar('customer_id', { length: 256 }) .notNull() .references(() => customers.id, { onDelete: 'cascade' }), employeeId: int('employee_id') .notNull() .references(() => employees.id, { onDelete: 'cascade' }), }); export const suppliers = mysqlTable('supplier', { id: int('id').primaryKey(), companyName: text('company_name').notNull(), contactName: text('contact_name').notNull(), contactTitle: text('contact_title').notNull(), address: text('address').notNull(), city: text('city').notNull(), region: text('region'), postalCode: text('postal_code').notNull(), country: text('country').notNull(), phone: text('phone').notNull(), }); export const products = mysqlTable('product', { id: int('id').primaryKey(), name: text('name').notNull(), quantityPerUnit: text('quantity_per_unit').notNull(), unitPrice: float('unit_price').notNull(), unitsInStock: int('units_in_stock').notNull(), unitsOnOrder: int('units_on_order').notNull(), reorderLevel: int('reorder_level').notNull(), discontinued: int('discontinued').notNull(), supplierId: int('supplier_id') .notNull() .references(() => suppliers.id, { onDelete: 'cascade' }), }); export const details = mysqlTable('order_detail', { unitPrice: float('unit_price').notNull(), quantity: int('quantity').notNull(), discount: float('discount').notNull(), orderId: int('order_id') .notNull() .references(() => orders.id, { onDelete: 'cascade' }), productId: int('product_id') .notNull() .references(() => products.id, { onDelete: 'cascade' }), }); export const users = mysqlTable( 'users', { id: int().primaryKey(), name: text(), invitedBy: int().references((): AnyMySqlColumn => users.id), }, ); export const posts = mysqlTable( 'posts', { id: int().primaryKey(), name: text(), content: text(), userId: int().references(() => users.id), }, ); ================================================ FILE: drizzle-seed/tests/mysql/softRelationsTest/mysqlSchema.ts ================================================ import { relations } from 'drizzle-orm'; import { float, int, mysqlTable, text, timestamp, varchar } from 'drizzle-orm/mysql-core'; export const customers = mysqlTable('customer', { id: varchar('id', { length: 256 }).primaryKey(), companyName: text('company_name').notNull(), contactName: text('contact_name').notNull(), contactTitle: text('contact_title').notNull(), address: text('address').notNull(), city: text('city').notNull(), postalCode: text('postal_code'), region: text('region'), country: text('country').notNull(), phone: text('phone').notNull(), fax: text('fax'), }); export const employees = mysqlTable( 'employee', { id: int('id').primaryKey(), lastName: text('last_name').notNull(), firstName: text('first_name'), title: text('title').notNull(), titleOfCourtesy: text('title_of_courtesy').notNull(), birthDate: timestamp('birth_date').notNull(), hireDate: timestamp('hire_date').notNull(), address: text('address').notNull(), city: text('city').notNull(), postalCode: text('postal_code').notNull(), country: text('country').notNull(), homePhone: text('home_phone').notNull(), extension: int('extension').notNull(), notes: text('notes').notNull(), reportsTo: int('reports_to'), photoPath: text('photo_path'), }, ); export const employeesRelations = relations(employees, ({ one }) => ({ employee: one(employees, { fields: [employees.reportsTo], references: [employees.id], }), })); export const orders = mysqlTable('order', { id: int('id').primaryKey(), orderDate: timestamp('order_date').notNull(), requiredDate: timestamp('required_date').notNull(), shippedDate: timestamp('shipped_date'), shipVia: int('ship_via').notNull(), freight: float('freight').notNull(), shipName: text('ship_name').notNull(), shipCity: text('ship_city').notNull(), shipRegion: text('ship_region'), shipPostalCode: text('ship_postal_code'), shipCountry: text('ship_country').notNull(), customerId: varchar('customer_id', { length: 256 }).notNull(), employeeId: int('employee_id').notNull(), }); export const ordersRelations = relations(orders, ({ one }) => ({ customer: one(customers, { fields: [orders.customerId], references: [customers.id], }), employee: one(employees, { fields: [orders.employeeId], references: [employees.id], }), })); export const suppliers = mysqlTable('supplier', { id: int('id').primaryKey(), companyName: text('company_name').notNull(), contactName: text('contact_name').notNull(), contactTitle: text('contact_title').notNull(), address: text('address').notNull(), city: text('city').notNull(), region: text('region'), postalCode: text('postal_code').notNull(), country: text('country').notNull(), phone: text('phone').notNull(), }); export const products = mysqlTable('product', { id: int('id').primaryKey(), name: text('name').notNull(), quantityPerUnit: text('quantity_per_unit').notNull(), unitPrice: float('unit_price').notNull(), unitsInStock: int('units_in_stock').notNull(), unitsOnOrder: int('units_on_order').notNull(), reorderLevel: int('reorder_level').notNull(), discontinued: int('discontinued').notNull(), supplierId: int('supplier_id').notNull(), }); export const productsRelations = relations(products, ({ one }) => ({ supplier: one(suppliers, { fields: [products.supplierId], references: [suppliers.id], }), })); export const details = mysqlTable('order_detail', { unitPrice: float('unit_price').notNull(), quantity: int('quantity').notNull(), discount: float('discount').notNull(), orderId: int('order_id').notNull(), productId: int('product_id').notNull(), }); export const detailsRelations = relations(details, ({ one }) => ({ order: one(orders, { fields: [details.orderId], references: [orders.id], }), product: one(products, { fields: [details.productId], references: [products.id], }), })); ================================================ FILE: drizzle-seed/tests/mysql/softRelationsTest/softRelations.test.ts ================================================ import Docker from 'dockerode'; import { sql } from 'drizzle-orm'; import type { MySql2Database } from 'drizzle-orm/mysql2'; import { drizzle } from 'drizzle-orm/mysql2'; import getPort from 'get-port'; import type { Connection } from 'mysql2/promise'; import { createConnection } from 'mysql2/promise'; import { v4 as uuid } from 'uuid'; import { afterAll, afterEach, beforeAll, expect, test } from 'vitest'; import { reset, seed } from '../../../src/index.ts'; import * as schema from './mysqlSchema.ts'; let mysqlContainer: Docker.Container; let client: Connection; let db: MySql2Database; async function createDockerDB(): Promise { const docker = new Docker(); const port = await getPort({ port: 3306 }); const image = 'mysql:8'; const pullStream = await docker.pull(image); await new Promise((resolve, reject) => // eslint-disable-next-line @typescript-eslint/no-unsafe-argument docker.modem.followProgress(pullStream, (err) => err ? reject(err) : resolve(err)) ); mysqlContainer = await docker.createContainer({ Image: image, Env: ['MYSQL_ROOT_PASSWORD=mysql', 'MYSQL_DATABASE=drizzle'], name: `drizzle-integration-tests-${uuid()}`, HostConfig: { AutoRemove: true, PortBindings: { '3306/tcp': [{ HostPort: `${port}` }], }, }, }); await mysqlContainer.start(); return `mysql://root:mysql@127.0.0.1:${port}/drizzle`; } beforeAll(async () => { const connectionString = await createDockerDB(); const sleep = 1000; let timeLeft = 40000; let connected = false; let lastError: unknown | undefined; do { try { client = await createConnection(connectionString); await client.connect(); db = drizzle(client); connected = true; break; } catch (e) { lastError = e; await new Promise((resolve) => setTimeout(resolve, sleep)); timeLeft -= sleep; } } while (timeLeft > 0); if (!connected) { console.error('Cannot connect to MySQL'); await client?.end().catch(console.error); await mysqlContainer?.stop().catch(console.error); throw lastError; } await db.execute( sql` CREATE TABLE \`customer\` ( \`id\` varchar(256) NOT NULL, \`company_name\` text NOT NULL, \`contact_name\` text NOT NULL, \`contact_title\` text NOT NULL, \`address\` text NOT NULL, \`city\` text NOT NULL, \`postal_code\` text, \`region\` text, \`country\` text NOT NULL, \`phone\` text NOT NULL, \`fax\` text, CONSTRAINT \`customer_id\` PRIMARY KEY(\`id\`) ); `, ); await db.execute( sql` CREATE TABLE \`order_detail\` ( \`unit_price\` float NOT NULL, \`quantity\` int NOT NULL, \`discount\` float NOT NULL, \`order_id\` int NOT NULL, \`product_id\` int NOT NULL ); `, ); await db.execute( sql` CREATE TABLE \`employee\` ( \`id\` int NOT NULL, \`last_name\` text NOT NULL, \`first_name\` text, \`title\` text NOT NULL, \`title_of_courtesy\` text NOT NULL, \`birth_date\` timestamp NOT NULL, \`hire_date\` timestamp NOT NULL, \`address\` text NOT NULL, \`city\` text NOT NULL, \`postal_code\` text NOT NULL, \`country\` text NOT NULL, \`home_phone\` text NOT NULL, \`extension\` int NOT NULL, \`notes\` text NOT NULL, \`reports_to\` int, \`photo_path\` text, CONSTRAINT \`employee_id\` PRIMARY KEY(\`id\`) ); `, ); await db.execute( sql` CREATE TABLE \`order\` ( \`id\` int NOT NULL, \`order_date\` timestamp NOT NULL, \`required_date\` timestamp NOT NULL, \`shipped_date\` timestamp, \`ship_via\` int NOT NULL, \`freight\` float NOT NULL, \`ship_name\` text NOT NULL, \`ship_city\` text NOT NULL, \`ship_region\` text, \`ship_postal_code\` text, \`ship_country\` text NOT NULL, \`customer_id\` varchar(256) NOT NULL, \`employee_id\` int NOT NULL, CONSTRAINT \`order_id\` PRIMARY KEY(\`id\`) ); `, ); await db.execute( sql` CREATE TABLE \`product\` ( \`id\` int NOT NULL, \`name\` text NOT NULL, \`quantity_per_unit\` text NOT NULL, \`unit_price\` float NOT NULL, \`units_in_stock\` int NOT NULL, \`units_on_order\` int NOT NULL, \`reorder_level\` int NOT NULL, \`discontinued\` int NOT NULL, \`supplier_id\` int NOT NULL, CONSTRAINT \`product_id\` PRIMARY KEY(\`id\`) ); `, ); await db.execute( sql` CREATE TABLE \`supplier\` ( \`id\` int NOT NULL, \`company_name\` text NOT NULL, \`contact_name\` text NOT NULL, \`contact_title\` text NOT NULL, \`address\` text NOT NULL, \`city\` text NOT NULL, \`region\` text, \`postal_code\` text NOT NULL, \`country\` text NOT NULL, \`phone\` text NOT NULL, CONSTRAINT \`supplier_id\` PRIMARY KEY(\`id\`) ); `, ); }); afterAll(async () => { await client?.end().catch(console.error); await mysqlContainer?.stop().catch(console.error); }); afterEach(async () => { await reset(db, schema); }); const checkSoftRelations = ( customers: (typeof schema.customers.$inferSelect)[], details: (typeof schema.details.$inferSelect)[], employees: (typeof schema.employees.$inferSelect)[], orders: (typeof schema.orders.$inferSelect)[], products: (typeof schema.products.$inferSelect)[], suppliers: (typeof schema.suppliers.$inferSelect)[], ) => { // employees soft relations check const employeeIds = new Set(employees.map((employee) => employee.id)); const employeesPredicate = employees.every((employee) => employee.reportsTo !== null && employeeIds.has(employee.reportsTo) ); expect(employeesPredicate).toBe(true); // orders soft relations check const customerIds = new Set(customers.map((customer) => customer.id)); const ordersPredicate1 = orders.every((order) => order.customerId !== null && customerIds.has(order.customerId)); expect(ordersPredicate1).toBe(true); const ordersPredicate2 = orders.every((order) => order.employeeId !== null && employeeIds.has(order.employeeId)); expect(ordersPredicate2).toBe(true); // product soft relations check const supplierIds = new Set(suppliers.map((supplier) => supplier.id)); const productsPredicate = products.every((product) => product.supplierId !== null && supplierIds.has(product.supplierId) ); expect(productsPredicate).toBe(true); // details soft relations check const orderIds = new Set(orders.map((order) => order.id)); const detailsPredicate1 = details.every((detail) => detail.orderId !== null && orderIds.has(detail.orderId)); expect(detailsPredicate1).toBe(true); const productIds = new Set(products.map((product) => product.id)); const detailsPredicate2 = details.every((detail) => detail.productId !== null && productIds.has(detail.productId)); expect(detailsPredicate2).toBe(true); }; test('basic seed, soft relations test', async () => { await seed(db, schema); const customers = await db.select().from(schema.customers); const details = await db.select().from(schema.details); const employees = await db.select().from(schema.employees); const orders = await db.select().from(schema.orders); const products = await db.select().from(schema.products); const suppliers = await db.select().from(schema.suppliers); expect(customers.length).toBe(10); expect(details.length).toBe(10); expect(employees.length).toBe(10); expect(orders.length).toBe(10); expect(products.length).toBe(10); expect(suppliers.length).toBe(10); checkSoftRelations(customers, details, employees, orders, products, suppliers); }); test("redefine(refine) orders count using 'with' in customers, soft relations test", async () => { await seed(db, schema, { count: 11 }).refine(() => ({ customers: { count: 4, with: { orders: 2, }, }, orders: { count: 13, }, })); const customers = await db.select().from(schema.customers); const details = await db.select().from(schema.details); const employees = await db.select().from(schema.employees); const orders = await db.select().from(schema.orders); const products = await db.select().from(schema.products); const suppliers = await db.select().from(schema.suppliers); expect(customers.length).toBe(4); expect(details.length).toBe(11); expect(employees.length).toBe(11); expect(orders.length).toBe(8); expect(products.length).toBe(11); expect(suppliers.length).toBe(11); checkSoftRelations(customers, details, employees, orders, products, suppliers); }); test("sequential using of 'with', soft relations test", async () => { await seed(db, schema, { count: 11 }).refine(() => ({ customers: { count: 4, with: { orders: 2, }, }, orders: { count: 12, with: { details: 3, }, }, })); const customers = await db.select().from(schema.customers); const details = await db.select().from(schema.details); const employees = await db.select().from(schema.employees); const orders = await db.select().from(schema.orders); const products = await db.select().from(schema.products); const suppliers = await db.select().from(schema.suppliers); expect(customers.length).toBe(4); expect(details.length).toBe(24); expect(employees.length).toBe(11); expect(orders.length).toBe(8); expect(products.length).toBe(11); expect(suppliers.length).toBe(11); checkSoftRelations(customers, details, employees, orders, products, suppliers); }); ================================================ FILE: drizzle-seed/tests/northwind/mysqlSchema.ts ================================================ import type { AnyMySqlColumn } from 'drizzle-orm/mysql-core'; import { float, int, mysqlTable, text, timestamp, varchar } from 'drizzle-orm/mysql-core'; export const customers = mysqlTable('customer', { id: varchar('id', { length: 256 }).primaryKey(), companyName: text('company_name').notNull(), contactName: text('contact_name').notNull(), contactTitle: text('contact_title').notNull(), address: text('address').notNull(), city: text('city').notNull(), postalCode: text('postal_code'), region: text('region'), country: text('country').notNull(), phone: text('phone').notNull(), fax: text('fax'), }); export const employees = mysqlTable( 'employee', { id: int('id').primaryKey(), lastName: text('last_name').notNull(), firstName: text('first_name'), title: text('title').notNull(), titleOfCourtesy: text('title_of_courtesy').notNull(), birthDate: timestamp('birth_date').notNull(), hireDate: timestamp('hire_date').notNull(), address: text('address').notNull(), city: text('city').notNull(), postalCode: text('postal_code').notNull(), country: text('country').notNull(), homePhone: text('home_phone').notNull(), extension: int('extension').notNull(), notes: text('notes').notNull(), reportsTo: int('reports_to').references((): AnyMySqlColumn => employees.id), photoPath: text('photo_path'), }, ); export const orders = mysqlTable('order', { id: int('id').primaryKey(), orderDate: timestamp('order_date').notNull(), requiredDate: timestamp('required_date').notNull(), shippedDate: timestamp('shipped_date'), shipVia: int('ship_via').notNull(), freight: float('freight').notNull(), shipName: text('ship_name').notNull(), shipCity: text('ship_city').notNull(), shipRegion: text('ship_region'), shipPostalCode: text('ship_postal_code'), shipCountry: text('ship_country').notNull(), customerId: varchar('customer_id', { length: 256 }) .notNull() .references(() => customers.id, { onDelete: 'cascade' }), employeeId: int('employee_id') .notNull() .references(() => employees.id, { onDelete: 'cascade' }), }); export const suppliers = mysqlTable('supplier', { id: int('id').primaryKey(), companyName: text('company_name').notNull(), contactName: text('contact_name').notNull(), contactTitle: text('contact_title').notNull(), address: text('address').notNull(), city: text('city').notNull(), region: text('region'), postalCode: text('postal_code').notNull(), country: text('country').notNull(), phone: text('phone').notNull(), }); export const products = mysqlTable('product', { id: int('id').primaryKey(), name: text('name').notNull(), quantityPerUnit: text('quantity_per_unit').notNull(), unitPrice: float('unit_price').notNull(), unitsInStock: int('units_in_stock').notNull(), unitsOnOrder: int('units_on_order').notNull(), reorderLevel: int('reorder_level').notNull(), discontinued: int('discontinued').notNull(), supplierId: int('supplier_id') .notNull() .references(() => suppliers.id, { onDelete: 'cascade' }), }); export const details = mysqlTable('order_detail', { unitPrice: float('unit_price').notNull(), quantity: int('quantity').notNull(), discount: float('discount').notNull(), orderId: int('order_id') .notNull() .references(() => orders.id, { onDelete: 'cascade' }), productId: int('product_id') .notNull() .references(() => products.id, { onDelete: 'cascade' }), }); ================================================ FILE: drizzle-seed/tests/northwind/mysqlTest.ts ================================================ import 'dotenv/config'; import path from 'path'; import { drizzle } from 'drizzle-orm/mysql2'; import { migrate } from 'drizzle-orm/mysql2/migrator'; import mysql from 'mysql2/promise'; import * as schema from './mysqlSchema.ts'; import { seed } from '../../src/index.ts'; const { Mysql_HOST, Mysql_PORT, Mysql_DATABASE, Mysql_USER, Mysql_PASSWORD } = process.env; const mysqlPool = mysql.createPool({ host: Mysql_HOST, port: Number(Mysql_PORT) || 3306, database: Mysql_DATABASE, user: Mysql_USER, password: Mysql_PASSWORD, // ssl: { rejectUnauthorized: false } }); const db = drizzle(mysqlPool); console.log('database connection was established successfully.'); (async () => { await migrate(db, { migrationsFolder: path.join(__dirname, '../../../mysqlMigrations') }); console.log('database was migrated.'); // await seed(db, schema, { count: 100000, seed: 1 }); const titlesOfCourtesy = ['Ms.', 'Mrs.', 'Dr.']; const unitsOnOrders = [0, 10, 20, 30, 50, 60, 70, 80, 100]; const reorderLevels = [0, 5, 10, 15, 20, 25, 30]; const quantityPerUnit = [ '100 - 100 g pieces', '100 - 250 g bags', '10 - 200 g glasses', '10 - 4 oz boxes', '10 - 500 g pkgs.', '10 - 500 g pkgs.', '10 boxes x 12 pieces', '10 boxes x 20 bags', '10 boxes x 8 pieces', '10 kg pkg.', '10 pkgs.', '12 - 100 g bars', '12 - 100 g pkgs', '12 - 12 oz cans', '12 - 1 lb pkgs.', '12 - 200 ml jars', '12 - 250 g pkgs.', '12 - 355 ml cans', '12 - 500 g pkgs.', '750 cc per bottle', '5 kg pkg.', '50 bags x 30 sausgs.', '500 ml', '500 g', '48 pieces', '48 - 6 oz jars', '4 - 450 g glasses', '36 boxes', '32 - 8 oz bottles', '32 - 500 g boxes', ]; const discounts = [0.05, 0.15, 0.2, 0.25]; await seed(db, schema).refine((funcs) => ({ customers: { count: 10000, columns: { companyName: funcs.companyName({}), contactName: funcs.fullName({}), contactTitle: funcs.jobTitle({}), address: funcs.streetAddress({}), city: funcs.city({}), postalCode: funcs.postcode({}), region: funcs.state({}), country: funcs.country({}), phone: funcs.phoneNumber({ template: '(###) ###-####' }), fax: funcs.phoneNumber({ template: '(###) ###-####' }), }, }, employees: { count: 200, columns: { firstName: funcs.firstName({}), lastName: funcs.lastName({}), title: funcs.jobTitle({}), titleOfCourtesy: funcs.valuesFromArray({ values: titlesOfCourtesy }), birthDate: funcs.date({ minDate: '1990-01-01', maxDate: '2010-12-31' }), hireDate: funcs.date({ minDate: '2010-12-31', maxDate: '2024-08-26' }), address: funcs.streetAddress({}), city: funcs.city({}), postalCode: funcs.postcode({}), country: funcs.country({}), homePhone: funcs.phoneNumber({ template: '(###) ###-####' }), extension: funcs.int({ minValue: 428, maxValue: 5467 }), notes: funcs.loremIpsum({}), }, }, orders: { count: 50000, columns: { shipVia: funcs.int({ minValue: 1, maxValue: 3 }), freight: funcs.number({ minValue: 0, maxValue: 1000, precision: 100 }), shipName: funcs.streetAddress({}), shipCity: funcs.city({}), shipRegion: funcs.state({}), shipPostalCode: funcs.postcode({}), shipCountry: funcs.country({}), }, with: { details: [ { weight: 0.6, count: [1, 2, 3, 4] }, { weight: 0.2, count: [5, 6, 7, 8, 9, 10] }, { weight: 0.15, count: [11, 12, 13, 14, 15, 16, 17] }, { weight: 0.05, count: [18, 19, 20, 21, 22, 23, 24, 25] }, ], }, }, suppliers: { count: 1000, columns: { companyName: funcs.companyName({}), contactName: funcs.fullName({}), contactTitle: funcs.jobTitle({}), address: funcs.streetAddress({}), city: funcs.city({}), postalCode: funcs.postcode({}), region: funcs.state({}), country: funcs.country({}), phone: funcs.phoneNumber({ template: '(###) ###-####' }), }, }, products: { count: 5000, columns: { name: funcs.companyName({}), quantityPerUnit: funcs.valuesFromArray({ values: quantityPerUnit }), unitPrice: funcs.weightedRandom( [ { weight: 0.5, value: funcs.int({ minValue: 3, maxValue: 300 }), }, { weight: 0.5, value: funcs.number({ minValue: 3, maxValue: 300, precision: 100 }), }, ], ), unitsInStock: funcs.int({ minValue: 0, maxValue: 125 }), unitsOnOrder: funcs.valuesFromArray({ values: unitsOnOrders }), reorderLevel: funcs.valuesFromArray({ values: reorderLevels }), discontinued: funcs.int({ minValue: 0, maxValue: 1 }), }, }, details: { columns: { unitPrice: funcs.number({ minValue: 10, maxValue: 130 }), quantity: funcs.int({ minValue: 1, maxValue: 130 }), discount: funcs.weightedRandom( [ { weight: 0.5, value: funcs.valuesFromArray({ values: discounts }) }, { weight: 0.5, value: funcs.default({ defaultValue: 0 }) }, ], ), }, }, })); await mysqlPool.end(); })().then(); ================================================ FILE: drizzle-seed/tests/northwind/pgSchema.ts ================================================ import type { AnyPgColumn } from 'drizzle-orm/pg-core'; import { integer, numeric, pgSchema, text, timestamp, varchar } from 'drizzle-orm/pg-core'; export const schema = pgSchema('seeder_lib_pg'); export const customers = schema.table('customer', { id: varchar('id', { length: 256 }).primaryKey(), companyName: text('company_name').notNull(), contactName: text('contact_name').notNull(), contactTitle: text('contact_title').notNull(), address: text('address').notNull(), city: text('city').notNull(), postalCode: text('postal_code'), region: text('region'), country: text('country').notNull(), phone: text('phone').notNull(), fax: text('fax'), }); export const employees = schema.table('employee', { id: integer('id').primaryKey(), lastName: text('last_name').notNull(), firstName: text('first_name'), title: text('title').notNull(), titleOfCourtesy: text('title_of_courtesy').notNull(), birthDate: timestamp('birth_date').notNull(), hireDate: timestamp('hire_date').notNull(), address: text('address').notNull(), city: text('city').notNull(), postalCode: text('postal_code').notNull(), country: text('country').notNull(), homePhone: text('home_phone').notNull(), extension: integer('extension').notNull(), notes: text('notes').notNull(), reportsTo: integer('reports_to').references((): AnyPgColumn => employees.id), photoPath: text('photo_path'), }); export const orders = schema.table('order', { id: integer('id').primaryKey(), orderDate: timestamp('order_date').notNull(), requiredDate: timestamp('required_date').notNull(), shippedDate: timestamp('shipped_date'), shipVia: integer('ship_via').notNull(), freight: numeric('freight').notNull(), shipName: text('ship_name').notNull(), shipCity: text('ship_city').notNull(), shipRegion: text('ship_region'), shipPostalCode: text('ship_postal_code'), shipCountry: text('ship_country').notNull(), customerId: text('customer_id') .notNull() .references(() => customers.id, { onDelete: 'cascade' }), employeeId: integer('employee_id') .notNull() .references(() => employees.id, { onDelete: 'cascade' }), }); export const suppliers = schema.table( 'supplier', { id: integer('id').primaryKey(), companyName: text('company_name').notNull(), contactName: text('contact_name').notNull(), contactTitle: text('contact_title').notNull(), address: text('address').notNull(), city: text('city').notNull(), region: text('region'), postalCode: text('postal_code').notNull(), country: text('country').notNull(), phone: text('phone').notNull(), }, ); export const products = schema.table('product', { id: integer('id').primaryKey(), name: text('name').notNull(), quantityPerUnit: text('quantity_per_unit').notNull(), unitPrice: numeric('unit_price').notNull(), unitsInStock: integer('units_in_stock').notNull(), unitsOnOrder: integer('units_on_order').notNull(), reorderLevel: integer('reorder_level').notNull(), discontinued: integer('discontinued').notNull(), supplierId: integer('supplier_id') .notNull() .references(() => suppliers.id, { onDelete: 'cascade' }), }); export const details = schema.table('order_detail', { unitPrice: numeric('unit_price').notNull(), quantity: integer('quantity').notNull(), discount: numeric('discount').notNull(), orderId: integer('order_id') .notNull() .references(() => orders.id, { onDelete: 'cascade' }), productId: integer('product_id') .notNull() .references(() => products.id, { onDelete: 'cascade' }), }); ================================================ FILE: drizzle-seed/tests/northwind/pgTest.ts ================================================ import 'dotenv/config'; import path from 'path'; import { drizzle } from 'drizzle-orm/node-postgres'; import { migrate } from 'drizzle-orm/node-postgres/migrator'; import { Pool as PgPool } from 'pg'; import { seed } from '../../src/index.ts'; import * as schema from './pgSchema.ts'; const { PG_HOST, PG_PORT, PG_DATABASE, PG_USER, PG_PASSWORD } = process.env; const pgPool = new PgPool({ host: PG_HOST, port: Number(PG_PORT) || 5432, database: PG_DATABASE, user: PG_USER, password: PG_PASSWORD, // ssl: true }); const db = drizzle(pgPool); console.log('database connection was established successfully.'); (async () => { await migrate(db, { migrationsFolder: path.join(__dirname, '../../../pgMigrations') }); console.log('database was migrated.'); // await seed(db, schema, { count: 100000, seed: 1 }); const titlesOfCourtesy = ['Ms.', 'Mrs.', 'Dr.']; const unitsOnOrders = [0, 10, 20, 30, 50, 60, 70, 80, 100]; const reorderLevels = [0, 5, 10, 15, 20, 25, 30]; const quantityPerUnit = [ '100 - 100 g pieces', '100 - 250 g bags', '10 - 200 g glasses', '10 - 4 oz boxes', '10 - 500 g pkgs.', '10 - 500 g pkgs.', '10 boxes x 12 pieces', '10 boxes x 20 bags', '10 boxes x 8 pieces', '10 kg pkg.', '10 pkgs.', '12 - 100 g bars', '12 - 100 g pkgs', '12 - 12 oz cans', '12 - 1 lb pkgs.', '12 - 200 ml jars', '12 - 250 g pkgs.', '12 - 355 ml cans', '12 - 500 g pkgs.', '750 cc per bottle', '5 kg pkg.', '50 bags x 30 sausgs.', '500 ml', '500 g', '48 pieces', '48 - 6 oz jars', '4 - 450 g glasses', '36 boxes', '32 - 8 oz bottles', '32 - 500 g boxes', ]; const discounts = [0.05, 0.15, 0.2, 0.25]; await seed(db, schema).refine((funcs) => ({ customers: { count: 10000, columns: { companyName: funcs.companyName({}), contactName: funcs.fullName({}), contactTitle: funcs.jobTitle({}), address: funcs.streetAddress({}), city: funcs.city({}), postalCode: funcs.postcode({}), region: funcs.state({}), country: funcs.country({}), phone: funcs.phoneNumber({ template: '(###) ###-####' }), fax: funcs.phoneNumber({ template: '(###) ###-####' }), }, }, employees: { count: 200, columns: { firstName: funcs.firstName({}), lastName: funcs.lastName({}), title: funcs.jobTitle({}), titleOfCourtesy: funcs.valuesFromArray({ values: titlesOfCourtesy }), birthDate: funcs.date({ minDate: '1990-01-01', maxDate: '2010-12-31' }), hireDate: funcs.date({ minDate: '2010-12-31', maxDate: '2024-08-26' }), address: funcs.streetAddress({}), city: funcs.city({}), postalCode: funcs.postcode({}), country: funcs.country({}), homePhone: funcs.phoneNumber({ template: '(###) ###-####' }), extension: funcs.int({ minValue: 428, maxValue: 5467 }), notes: funcs.loremIpsum({}), }, }, orders: { count: 50000, columns: { shipVia: funcs.int({ minValue: 1, maxValue: 3 }), freight: funcs.number({ minValue: 0, maxValue: 1000, precision: 100 }), shipName: funcs.streetAddress({}), shipCity: funcs.city({}), shipRegion: funcs.state({}), shipPostalCode: funcs.postcode({}), shipCountry: funcs.country({}), }, with: { details: [ { weight: 0.6, count: [1, 2, 3, 4] }, { weight: 0.2, count: [5, 6, 7, 8, 9, 10] }, { weight: 0.15, count: [11, 12, 13, 14, 15, 16, 17] }, { weight: 0.05, count: [18, 19, 20, 21, 22, 23, 24, 25] }, ], }, }, suppliers: { count: 1000, columns: { companyName: funcs.companyName({}), contactName: funcs.fullName({}), contactTitle: funcs.jobTitle({}), address: funcs.streetAddress({}), city: funcs.city({}), postalCode: funcs.postcode({}), region: funcs.state({}), country: funcs.country({}), phone: funcs.phoneNumber({ template: '(###) ###-####' }), }, }, products: { count: 5000, columns: { name: funcs.companyName({}), quantityPerUnit: funcs.valuesFromArray({ values: quantityPerUnit }), unitPrice: funcs.weightedRandom( [ { weight: 0.5, value: funcs.int({ minValue: 3, maxValue: 300 }), }, { weight: 0.5, value: funcs.number({ minValue: 3, maxValue: 300, precision: 100 }), }, ], ), unitsInStock: funcs.int({ minValue: 0, maxValue: 125 }), unitsOnOrder: funcs.valuesFromArray({ values: unitsOnOrders }), reorderLevel: funcs.valuesFromArray({ values: reorderLevels }), discontinued: funcs.int({ minValue: 0, maxValue: 1 }), }, }, details: { columns: { unitPrice: funcs.number({ minValue: 10, maxValue: 130 }), quantity: funcs.int({ minValue: 1, maxValue: 130 }), discount: funcs.weightedRandom( [ { weight: 0.5, value: funcs.valuesFromArray({ values: discounts }) }, { weight: 0.5, value: funcs.default({ defaultValue: 0 }) }, ], ), }, }, })); await pgPool.end(); })().then(); ================================================ FILE: drizzle-seed/tests/northwind/sqliteSchema.ts ================================================ import { foreignKey, integer, numeric, sqliteTable, text } from 'drizzle-orm/sqlite-core'; export const customers = sqliteTable('customer', { id: text('id').primaryKey(), companyName: text('company_name').notNull(), contactName: text('contact_name').notNull(), contactTitle: text('contact_title').notNull(), address: text('address').notNull(), city: text('city').notNull(), postalCode: text('postal_code'), region: text('region'), country: text('country').notNull(), phone: text('phone').notNull(), fax: text('fax'), }); export const employees = sqliteTable( 'employee', { id: integer('id').primaryKey(), lastName: text('last_name').notNull(), firstName: text('first_name'), title: text('title').notNull(), titleOfCourtesy: text('title_of_courtesy').notNull(), birthDate: integer('birth_date', { mode: 'timestamp' }).notNull(), hireDate: integer('hire_date', { mode: 'timestamp' }).notNull(), address: text('address').notNull(), city: text('city').notNull(), postalCode: text('postal_code').notNull(), country: text('country').notNull(), homePhone: text('home_phone').notNull(), extension: integer('extension').notNull(), notes: text('notes').notNull(), reportsTo: integer('reports_to'), photoPath: text('photo_path'), }, (table) => ({ reportsToFk: foreignKey(() => ({ columns: [table.reportsTo], foreignColumns: [table.id], })), }), ); export const orders = sqliteTable('order', { id: integer('id').primaryKey(), orderDate: integer('order_date', { mode: 'timestamp' }).notNull(), requiredDate: integer('required_date', { mode: 'timestamp' }).notNull(), shippedDate: integer('shipped_date', { mode: 'timestamp' }), shipVia: integer('ship_via').notNull(), freight: numeric('freight').notNull(), shipName: text('ship_name').notNull(), shipCity: text('ship_city').notNull(), shipRegion: text('ship_region'), shipPostalCode: text('ship_postal_code'), shipCountry: text('ship_country').notNull(), customerId: text('customer_id') .notNull() .references(() => customers.id, { onDelete: 'cascade' }), employeeId: integer('employee_id') .notNull() .references(() => employees.id, { onDelete: 'cascade' }), }); export const suppliers = sqliteTable('supplier', { id: integer('id').primaryKey({ autoIncrement: true }), companyName: text('company_name').notNull(), contactName: text('contact_name').notNull(), contactTitle: text('contact_title').notNull(), address: text('address').notNull(), city: text('city').notNull(), region: text('region'), postalCode: text('postal_code').notNull(), country: text('country').notNull(), phone: text('phone').notNull(), }); export const products = sqliteTable('product', { id: integer('id').primaryKey({ autoIncrement: true }), name: text('name').notNull(), quantityPerUnit: text('quantity_per_unit').notNull(), unitPrice: numeric('unit_price').notNull(), unitsInStock: integer('units_in_stock').notNull(), unitsOnOrder: integer('units_on_order').notNull(), reorderLevel: integer('reorder_level').notNull(), discontinued: integer('discontinued').notNull(), supplierId: integer('supplier_id') .notNull() .references(() => suppliers.id, { onDelete: 'cascade' }), }); export const details = sqliteTable('order_detail', { unitPrice: numeric('unit_price').notNull(), quantity: integer('quantity').notNull(), discount: numeric('discount').notNull(), orderId: integer('order_id') .notNull() .references(() => orders.id, { onDelete: 'cascade' }), productId: integer('product_id') .notNull() .references(() => products.id, { onDelete: 'cascade' }), }); ================================================ FILE: drizzle-seed/tests/northwind/sqliteTest.ts ================================================ import 'dotenv/config'; import path from 'path'; import betterSqlite3 from 'better-sqlite3'; import { drizzle } from 'drizzle-orm/better-sqlite3'; import { migrate } from 'drizzle-orm/better-sqlite3/migrator'; import { seed } from '../../src/index.ts'; import * as schema from './sqliteSchema.ts'; const { Sqlite_PATH } = process.env; const sqliteDb = betterSqlite3(Sqlite_PATH); const db = drizzle(sqliteDb); console.log('database connection was established successfully.'); (async () => { migrate(db, { migrationsFolder: path.join(__dirname, '../../../sqliteMigrations') }); console.log('database was migrated.'); const titlesOfCourtesy = ['Ms.', 'Mrs.', 'Dr.']; const unitsOnOrders = [0, 10, 20, 30, 50, 60, 70, 80, 100]; const reorderLevels = [0, 5, 10, 15, 20, 25, 30]; const quantityPerUnit = [ '100 - 100 g pieces', '100 - 250 g bags', '10 - 200 g glasses', '10 - 4 oz boxes', '10 - 500 g pkgs.', '10 - 500 g pkgs.', '10 boxes x 12 pieces', '10 boxes x 20 bags', '10 boxes x 8 pieces', '10 kg pkg.', '10 pkgs.', '12 - 100 g bars', '12 - 100 g pkgs', '12 - 12 oz cans', '12 - 1 lb pkgs.', '12 - 200 ml jars', '12 - 250 g pkgs.', '12 - 355 ml cans', '12 - 500 g pkgs.', '750 cc per bottle', '5 kg pkg.', '50 bags x 30 sausgs.', '500 ml', '500 g', '48 pieces', '48 - 6 oz jars', '4 - 450 g glasses', '36 boxes', '32 - 8 oz bottles', '32 - 500 g boxes', ]; const discounts = [0.05, 0.15, 0.2, 0.25]; await seed(db, schema).refine((funcs) => ({ customers: { count: 10000, columns: { companyName: funcs.companyName({}), contactName: funcs.fullName({}), contactTitle: funcs.jobTitle({}), address: funcs.streetAddress({}), city: funcs.city({}), postalCode: funcs.postcode({}), region: funcs.state({}), country: funcs.country({}), phone: funcs.phoneNumber({ template: '(###) ###-####' }), fax: funcs.phoneNumber({ template: '(###) ###-####' }), }, }, employees: { count: 200, columns: { firstName: funcs.firstName({}), lastName: funcs.lastName({}), title: funcs.jobTitle({}), titleOfCourtesy: funcs.valuesFromArray({ values: titlesOfCourtesy }), birthDate: funcs.date({ minDate: '1990-01-01', maxDate: '2010-12-31' }), hireDate: funcs.date({ minDate: '2010-12-31', maxDate: '2024-08-26' }), address: funcs.streetAddress({}), city: funcs.city({}), postalCode: funcs.postcode({}), country: funcs.country({}), homePhone: funcs.phoneNumber({ template: '(###) ###-####' }), extension: funcs.int({ minValue: 428, maxValue: 5467 }), notes: funcs.loremIpsum({}), }, }, orders: { count: 50000, columns: { shipVia: funcs.int({ minValue: 1, maxValue: 3 }), freight: funcs.number({ minValue: 0, maxValue: 1000, precision: 100 }), shipName: funcs.streetAddress({}), shipCity: funcs.city({}), shipRegion: funcs.state({}), shipPostalCode: funcs.postcode({}), shipCountry: funcs.country({}), }, with: { details: [ { weight: 0.6, count: [1, 2, 3, 4] }, { weight: 0.2, count: [5, 6, 7, 8, 9, 10] }, { weight: 0.15, count: [11, 12, 13, 14, 15, 16, 17] }, { weight: 0.05, count: [18, 19, 20, 21, 22, 23, 24, 25] }, ], }, }, suppliers: { count: 1000, columns: { companyName: funcs.companyName({}), contactName: funcs.fullName({}), contactTitle: funcs.jobTitle({}), address: funcs.streetAddress({}), city: funcs.city({}), postalCode: funcs.postcode({}), region: funcs.state({}), country: funcs.country({}), phone: funcs.phoneNumber({ template: '(###) ###-####' }), }, }, products: { count: 5000, columns: { name: funcs.companyName({}), quantityPerUnit: funcs.valuesFromArray({ values: quantityPerUnit }), unitPrice: funcs.weightedRandom( [ { weight: 0.5, value: funcs.int({ minValue: 3, maxValue: 300 }), }, { weight: 0.5, value: funcs.number({ minValue: 3, maxValue: 300, precision: 100 }), }, ], ), unitsInStock: funcs.int({ minValue: 0, maxValue: 125 }), unitsOnOrder: funcs.valuesFromArray({ values: unitsOnOrders }), reorderLevel: funcs.valuesFromArray({ values: reorderLevels }), discontinued: funcs.int({ minValue: 0, maxValue: 1 }), }, }, details: { columns: { unitPrice: funcs.number({ minValue: 10, maxValue: 130 }), quantity: funcs.int({ minValue: 1, maxValue: 130 }), discount: funcs.weightedRandom( [ { weight: 0.5, value: funcs.valuesFromArray({ values: discounts }) }, { weight: 0.5, value: funcs.default({ defaultValue: 0 }) }, ], ), }, }, })); })().then(); ================================================ FILE: drizzle-seed/tests/pg/allDataTypesTest/pgSchema.ts ================================================ import { bigint, bigserial, boolean, char, date, decimal, doublePrecision, integer, interval, json, jsonb, line, numeric, pgEnum, pgSchema, point, real, serial, smallint, smallserial, text, time, timestamp, uuid, varchar, } from 'drizzle-orm/pg-core'; export const schema = pgSchema('seeder_lib_pg'); export const moodEnum = pgEnum('mood_enum', ['sad', 'ok', 'happy']); export const allDataTypes = schema.table('all_data_types', { integer: integer('integer'), smallint: smallint('smallint'), biginteger: bigint('bigint', { mode: 'bigint' }), bigintNumber: bigint('bigint_number', { mode: 'number' }), serial: serial('serial'), smallserial: smallserial('smallserial'), bigserial: bigserial('bigserial', { mode: 'bigint' }), bigserialNumber: bigserial('bigserial_number', { mode: 'number' }), boolean: boolean('boolean'), text: text('text'), varchar: varchar('varchar', { length: 256 }), char: char('char', { length: 256 }), numeric: numeric('numeric'), decimal: decimal('decimal'), real: real('real'), doublePrecision: doublePrecision('double_precision'), json: json('json'), jsonb: jsonb('jsonb'), time: time('time'), timestampDate: timestamp('timestamp_date', { mode: 'date' }), timestampString: timestamp('timestamp_string', { mode: 'string' }), dateString: date('date_string', { mode: 'string' }), date: date('date', { mode: 'date' }), interval: interval('interval'), point: point('point', { mode: 'xy' }), pointTuple: point('point_tuple', { mode: 'tuple' }), line: line('line', { mode: 'abc' }), lineTuple: line('line_tuple', { mode: 'tuple' }), moodEnum: moodEnum('mood_enum'), uuid: uuid('uuid'), }); export const allArrayDataTypes = schema.table('all_array_data_types', { integerArray: integer('integer_array').array(), smallintArray: smallint('smallint_array').array(), bigintegerArray: bigint('bigint_array', { mode: 'bigint' }).array(), bigintNumberArray: bigint('bigint_number_array', { mode: 'number' }).array(), booleanArray: boolean('boolean_array').array(), textArray: text('text_array').array(), varcharArray: varchar('varchar_array', { length: 256 }).array(), charArray: char('char_array', { length: 256 }).array(), numericArray: numeric('numeric_array').array(), decimalArray: decimal('decimal_array').array(), realArray: real('real_array').array(), doublePrecisionArray: doublePrecision('double_precision_array').array(), jsonArray: json('json_array').array(), jsonbArray: jsonb('jsonb_array').array(), timeArray: time('time_array').array(), timestampDateArray: timestamp('timestamp_date_array', { mode: 'date' }).array(), timestampStringArray: timestamp('timestamp_string_array', { mode: 'string' }).array(), dateStringArray: date('date_string_array', { mode: 'string' }).array(), dateArray: date('date_array', { mode: 'date' }).array(), intervalArray: interval('interval_array').array(), pointArray: point('point_array', { mode: 'xy' }).array(), pointTupleArray: point('point_tuple_array', { mode: 'tuple' }).array(), lineArray: line('line_array', { mode: 'abc' }).array(), lineTupleArray: line('line_tuple_array', { mode: 'tuple' }).array(), moodEnumArray: moodEnum('mood_enum_array').array(), }); export const ndArrays = schema.table('nd_arrays', { integer1DArray: integer('integer_1d_array').array(3), integer2DArray: integer('integer_2d_array').array(3).array(4), integer3DArray: integer('integer_3d_array').array(3).array(4).array(5), integer4DArray: integer('integer_4d_array').array(3).array(4).array(5).array(6), }); export const intervals = schema.table('intervals', { intervalYear: interval({ fields: 'year' }), intervalYearToMonth: interval({ fields: 'year to month' }), intervalMonth: interval({ fields: 'month' }), intervalDay: interval({ fields: 'day' }), intervalDayToHour: interval({ fields: 'day to hour' }), intervalDayToMinute: interval({ fields: 'day to minute' }), intervalDayToSecond: interval({ fields: 'day to second' }), intervalHour: interval({ fields: 'hour' }), intervalHourToMinute: interval({ fields: 'hour to minute' }), intervalHourToSecond: interval({ fields: 'hour to second' }), intervalMinute: interval({ fields: 'minute' }), intervalMinuteToSecond: interval({ fields: 'minute to second' }), intervalSecond: interval({ fields: 'second' }), }); ================================================ FILE: drizzle-seed/tests/pg/allDataTypesTest/pg_all_data_types.test.ts ================================================ import { PGlite } from '@electric-sql/pglite'; import { sql } from 'drizzle-orm'; import type { PgliteDatabase } from 'drizzle-orm/pglite'; import { drizzle } from 'drizzle-orm/pglite'; import { afterAll, beforeAll, expect, test } from 'vitest'; import { seed } from '../../../src/index.ts'; import * as schema from './pgSchema.ts'; let client: PGlite; let db: PgliteDatabase; beforeAll(async () => { client = new PGlite(); db = drizzle(client); await db.execute(sql`CREATE SCHEMA if not exists "seeder_lib_pg";`); await db.execute( sql` DO $$ BEGIN CREATE TYPE "seeder_lib_pg"."mood_enum" AS ENUM('sad', 'ok', 'happy'); EXCEPTION WHEN duplicate_object THEN null; END $$; `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."all_data_types" ( "integer" integer, "smallint" smallint, "bigint" bigint, "bigint_number" bigint, "serial" serial, "smallserial" smallserial, "bigserial" bigserial, "bigserial_number" bigserial, "boolean" boolean, "text" text, "varchar" varchar(256), "char" char(256), "numeric" numeric, "decimal" numeric, "real" real, "double_precision" double precision, "json" json, "jsonb" jsonb, "time" time, "timestamp_date" timestamp, "timestamp_string" timestamp, "date_string" date, "date" date, "interval" interval, "point" "point", "point_tuple" "point", "line" "line", "line_tuple" "line", "mood_enum" "seeder_lib_pg"."mood_enum", "uuid" "uuid" ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."all_array_data_types" ( "integer_array" integer[], "smallint_array" smallint[], "bigint_array" bigint[], "bigint_number_array" bigint[], "boolean_array" boolean[], "text_array" text[], "varchar_array" varchar(256)[], "char_array" char(256)[], "numeric_array" numeric[], "decimal_array" numeric[], "real_array" real[], "double_precision_array" double precision[], "json_array" json[], "jsonb_array" jsonb[], "time_array" time[], "timestamp_date_array" timestamp[], "timestamp_string_array" timestamp[], "date_string_array" date[], "date_array" date[], "interval_array" interval[], "point_array" "point"[], "point_tuple_array" "point"[], "line_array" "line"[], "line_tuple_array" "line"[], "mood_enum_array" "seeder_lib_pg"."mood_enum"[] ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."nd_arrays" ( "integer_1d_array" integer[3], "integer_2d_array" integer[3][4], "integer_3d_array" integer[3][4][5], "integer_4d_array" integer[3][4][5][6] ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."intervals" ( "intervalYear" interval year, "intervalYearToMonth" interval year to month, "intervalMonth" interval month, "intervalDay" interval day, "intervalDayToHour" interval day to hour, "intervalDayToMinute" interval day to minute, "intervalDayToSecond" interval day to second, "intervalHour" interval hour, "intervalHourToMinute" interval hour to minute, "intervalHourToSecond" interval hour to second, "intervalMinute" interval minute, "intervalMinuteToSecond" interval minute to second, "intervalSecond" interval second ); `, ); }); afterAll(async () => { await client.close(); }); test('all data types test', async () => { await seed(db, { allDataTypes: schema.allDataTypes }, { count: 10000 }); const allDataTypes = await db.select().from(schema.allDataTypes); // every value in each rows does not equal undefined. const predicate = allDataTypes.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); }); test('all array data types test', async () => { await seed(db, { allArrayDataTypes: schema.allArrayDataTypes }, { count: 1000 }); const allArrayDataTypes = await db.select().from(schema.allArrayDataTypes); // every value in each rows does not equal undefined. const predicate = allArrayDataTypes.every((row) => Object.values(row).every((val) => val !== undefined && val !== null && val.length === 10) ); expect(predicate).toBe(true); }); test('nd arrays', async () => { await seed(db, { ndArrays: schema.ndArrays }, { count: 1000 }); const ndArrays = await db.select().from(schema.ndArrays); // every value in each rows does not equal undefined. const predicate0 = ndArrays.every((row) => Object.values(row).every((val) => val !== undefined && val !== null && val.length !== 0) ); let predicate1 = true, predicate2 = true, predicate3 = true, predicate4 = true; for (const row of ndArrays) { predicate1 = predicate1 && (row.integer1DArray?.length === 3); predicate2 = predicate2 && (row.integer2DArray?.length === 4) && (row.integer2DArray[0]?.length === 3); predicate3 = predicate3 && (row.integer3DArray?.length === 5) && (row.integer3DArray[0]?.length === 4) && (row.integer3DArray[0][0]?.length === 3); predicate4 = predicate4 && (row.integer4DArray?.length === 6) && (row.integer4DArray[0]?.length === 5) && (row.integer4DArray[0][0]?.length === 4) && (row.integer4DArray[0][0][0]?.length === 3); } expect(predicate0 && predicate1 && predicate2 && predicate3 && predicate4).toBe(true); }); test('intervals test', async () => { await seed(db, { intervals: schema.intervals }, { count: 1000 }); const intervals = await db.select().from(schema.intervals); // every value in each rows does not equal undefined. const predicate = intervals.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); }); ================================================ FILE: drizzle-seed/tests/pg/cyclicTables/cyclicTables.test.ts ================================================ import { PGlite } from '@electric-sql/pglite'; import { sql } from 'drizzle-orm'; import type { PgliteDatabase } from 'drizzle-orm/pglite'; import { drizzle } from 'drizzle-orm/pglite'; import { afterAll, afterEach, beforeAll, expect, test } from 'vitest'; import { reset, seed } from '../../../src/index.ts'; import * as schema from './pgSchema.ts'; let client: PGlite; let db: PgliteDatabase; beforeAll(async () => { client = new PGlite(); db = drizzle(client); await db.execute( sql` create table model_image ( id serial primary key, url varchar not null, caption varchar, "modelId" integer not null ); `, ); await db.execute( sql` create table model ( id serial primary key, name varchar not null, "defaultImageId" integer constraint "model_defaultImageId_model_image_id_fk" references model_image ); `, ); await db.execute( sql` alter table model_image add constraint "model_image_modelId_model_id_fk" foreign key ("modelId") references model; `, ); // 3 tables case await db.execute( sql` create table model_image1 ( id serial primary key, url varchar not null, caption varchar, "modelId" integer not null ); `, ); await db.execute( sql` create table "user" ( id serial primary key, name text, "invitedBy" integer constraint "user_invitedBy_user_id_fk" references "user", "imageId" integer not null constraint "user_imageId_model_image1_id_fk" references model_image1 ); `, ); await db.execute( sql` create table model1 ( id serial primary key, name varchar not null, "userId" integer constraint "model1_userId_user_id_fk" references "user", "defaultImageId" integer constraint "model1_defaultImageId_model_image1_id_fk" references model_image1 ); `, ); await db.execute( sql` alter table model_image1 add constraint "model_image1_modelId_model1_id_fk" foreign key ("modelId") references model1; `, ); }); afterEach(async () => { await reset(db, schema); }); afterAll(async () => { await client.close(); }); test('2 cyclic tables test', async () => { await seed(db, { modelTable: schema.modelTable, modelImageTable: schema.modelImageTable, }); const modelTable = await db.select().from(schema.modelTable); const modelImageTable = await db.select().from(schema.modelImageTable); expect(modelTable.length).toBe(10); let predicate = modelTable.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); expect(modelImageTable.length).toBe(10); predicate = modelImageTable.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); }); test('3 cyclic tables test', async () => { await seed(db, { modelTable1: schema.modelTable1, modelImageTable1: schema.modelImageTable1, user: schema.user, }); const modelTable1 = await db.select().from(schema.modelTable1); const modelImageTable1 = await db.select().from(schema.modelImageTable1); const user = await db.select().from(schema.user); expect(modelTable1.length).toBe(10); let predicate = modelTable1.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); expect(modelImageTable1.length).toBe(10); predicate = modelImageTable1.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); expect(user.length).toBe(10); predicate = user.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); }); ================================================ FILE: drizzle-seed/tests/pg/cyclicTables/pgSchema.ts ================================================ import { relations } from 'drizzle-orm'; import type { AnyPgColumn } from 'drizzle-orm/pg-core'; import { foreignKey, integer, pgTable, serial, text, varchar } from 'drizzle-orm/pg-core'; // MODEL export const modelTable = pgTable( 'model', { id: serial().primaryKey(), name: varchar().notNull(), defaultImageId: integer(), }, (t) => [ foreignKey({ columns: [t.defaultImageId], foreignColumns: [modelImageTable.id], }), ], ); export const modelRelations = relations(modelTable, ({ one, many }) => ({ images: many(modelImageTable), defaultImage: one(modelImageTable, { fields: [modelTable.defaultImageId], references: [modelImageTable.id], }), })); // MODEL IMAGE export const modelImageTable = pgTable( 'model_image', { id: serial().primaryKey(), url: varchar().notNull(), caption: varchar(), modelId: integer() .notNull() .references((): AnyPgColumn => modelTable.id), }, ); export const modelImageRelations = relations(modelImageTable, ({ one }) => ({ model: one(modelTable, { fields: [modelImageTable.modelId], references: [modelTable.id], }), })); // 3 tables case export const modelTable1 = pgTable( 'model1', { id: serial().primaryKey(), name: varchar().notNull(), userId: integer() .references(() => user.id), defaultImageId: integer(), }, (t) => [ foreignKey({ columns: [t.defaultImageId], foreignColumns: [modelImageTable1.id], }), ], ); export const modelImageTable1 = pgTable( 'model_image1', { id: serial().primaryKey(), url: varchar().notNull(), caption: varchar(), modelId: integer().notNull() .references((): AnyPgColumn => modelTable1.id), }, ); export const user = pgTable( 'user', { id: serial().primaryKey(), name: text(), invitedBy: integer().references((): AnyPgColumn => user.id), imageId: integer() .notNull() .references((): AnyPgColumn => modelImageTable1.id), }, ); ================================================ FILE: drizzle-seed/tests/pg/generatorsTest/generators.test.ts ================================================ import { afterAll, beforeAll, expect, test } from 'vitest'; import { PGlite } from '@electric-sql/pglite'; import type { PgliteDatabase } from 'drizzle-orm/pglite'; import { drizzle } from 'drizzle-orm/pglite'; import { reset, seed } from '../../../src/index.ts'; import * as schema from './pgSchema.ts'; import { sql } from 'drizzle-orm'; import cities from '../../../src/datasets/cityNames.ts'; import countries from '../../../src/datasets/countries.ts'; import firstNames from '../../../src/datasets/firstNames.ts'; import lastNames from '../../../src/datasets/lastNames.ts'; let client: PGlite; let db: PgliteDatabase; beforeAll(async () => { client = new PGlite(); db = drizzle(client); await db.execute(sql`CREATE SCHEMA "seeder_lib_pg";`); await db.execute( sql` DO $$ BEGIN CREATE TYPE "seeder_lib_pg"."enum" AS ENUM('sad', 'ok', 'happy'); EXCEPTION WHEN duplicate_object THEN null; END $$; `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."default_table" ( "default_string" text ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."default_array_table" ( "default_string" text[] ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."boolean_table" ( "boolean" boolean ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."boolean_array_table" ( "boolean" boolean[] ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."city_table" ( "city" varchar(256) ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."city_unique_table" ( "city_unique" varchar(256), CONSTRAINT "city_unique_table_city_unique_unique" UNIQUE("city_unique") ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."city_array_table" ( "city" varchar(256)[] ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."company_name_table" ( "company_name" text ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."company_name_unique_table" ( "company_name_unique" varchar(256), CONSTRAINT "company_name_unique_table_company_name_unique_unique" UNIQUE("company_name_unique") ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."company_name_array_table" ( "company_name" text[] ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."country_table" ( "country" varchar(256) ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."country_unique_table" ( "country_unique" varchar(256), CONSTRAINT "country_unique_table_country_unique_unique" UNIQUE("country_unique") ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."country_array_table" ( "country" varchar(256)[] ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."date_table" ( "date" date ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."date_array_table" ( "date" date[], "date_string" date[] ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."email_table" ( "email" varchar(256), CONSTRAINT "email_table_email_unique" UNIQUE("email") ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."email_array_table" ( "email" varchar(256)[] ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."enum_table" ( "mood_enum" "seeder_lib_pg"."enum" ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."first_name_table" ( "first_name" varchar(256) ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."first_name_unique_table" ( "first_name_unique" varchar(256), CONSTRAINT "first_name_unique_table_first_name_unique_unique" UNIQUE("first_name_unique") ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."first_name_array_table" ( "first_name" varchar(256)[] ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."full_name__table" ( "full_name_" varchar(256) ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."full_name_unique_table" ( "full_name_unique" varchar(256), CONSTRAINT "full_name_unique_table_full_name_unique_unique" UNIQUE("full_name_unique") ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."full_name_array_table" ( "full_name" varchar(256)[] ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."int_primary_key_table" ( "int_primary_key" integer, CONSTRAINT "int_primary_key_table_int_primary_key_unique" UNIQUE("int_primary_key") ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."int_table" ( "int" integer ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."int_unique_table" ( "int_unique" integer, CONSTRAINT "int_unique_table_int_unique_unique" UNIQUE("int_unique") ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."int_array_table" ( "int" integer[] ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."interval_table" ( "interval" interval ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."interval_unique_table" ( "interval_unique" interval, CONSTRAINT "interval_unique_table_interval_unique_unique" UNIQUE("interval_unique") ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."interval_array_table" ( "interval" interval[] ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."job_Title_table" ( "job_title" text ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."job_title_array_table" ( "job_title" text[] ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."json_table" ( "json" json ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."json_array_table" ( "json" json[] ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."last_name_table" ( "last_name" varchar(256) ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."last_name_unique_table" ( "last_name_unique" varchar(256), CONSTRAINT "last_name_unique_table_last_name_unique_unique" UNIQUE("last_name_unique") ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."last_name_array_table" ( "last_name" varchar(256)[] ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."line_table" ( "line" "line" ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."line_array_table" ( "line" "line"[] ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."lorem_ipsum_table" ( "lorem_ipsum" text ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."lorem_ipsum_array_table" ( "lorem_ipsum" text[] ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."number_table" ( "number" real ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."number_unique_table" ( "number_unique" real, CONSTRAINT "number_unique_table_number_unique_unique" UNIQUE("number_unique") ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."number_array_table" ( "number" real[] ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."phone_number_table" ( "phoneNumber" varchar(256), "phone_number_template" varchar(256), "phone_number_prefixes" varchar(256), CONSTRAINT "phone_number_table_phoneNumber_unique" UNIQUE("phoneNumber"), CONSTRAINT "phone_number_table_phone_number_template_unique" UNIQUE("phone_number_template"), CONSTRAINT "phone_number_table_phone_number_prefixes_unique" UNIQUE("phone_number_prefixes") ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."phone_number_array_table" ( "phoneNumber" varchar(256)[], "phone_number_template" varchar(256)[], "phone_number_prefixes" varchar(256)[] ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."point_table" ( "point" "point" ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."point_array_table" ( "point" "point"[] ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."postcode_table" ( "postcode" varchar(256) ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."postcode_unique_table" ( "postcode_unique" varchar(256), CONSTRAINT "postcode_unique_table_postcode_unique_unique" UNIQUE("postcode_unique") ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."postcode_array_table" ( "postcode" varchar(256)[] ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."state_table" ( "state" text ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."state_array_table" ( "state" text[] ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."street_address_table" ( "street_address" varchar(256) ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."street_address_unique_table" ( "street_address_unique" varchar(256), CONSTRAINT "street_address_unique_table_street_address_unique_unique" UNIQUE("street_address_unique") ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."street_address_array_table" ( "street_address" varchar(256)[] ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."string_table" ( "string" text ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."string_unique_table" ( "string_unique" varchar(256), CONSTRAINT "string_unique_table_string_unique_unique" UNIQUE("string_unique") ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."string_array_table" ( "string" text[] ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."time_table" ( "time" time ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."time_array_table" ( "time" time[] ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."timestamp_table" ( "timestamp" timestamp ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."timestamp_array_table" ( "timestamp" timestamp[] ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."values_from_array_table" ( "values_from_array_not_null" varchar(256) NOT NULL, "values_from_array_weighted_not_null" varchar(256) NOT NULL ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."values_from_array_unique_table" ( "values_from_array" varchar(256), "values_from_array_not_null" varchar(256) NOT NULL, "values_from_array_weighted" varchar(256), "values_from_array_weighted_not_null" varchar(256) NOT NULL, CONSTRAINT "values_from_array_unique_table_values_from_array_unique" UNIQUE("values_from_array"), CONSTRAINT "values_from_array_unique_table_values_from_array_not_null_unique" UNIQUE("values_from_array_not_null"), CONSTRAINT "values_from_array_unique_table_values_from_array_weighted_unique" UNIQUE("values_from_array_weighted"), CONSTRAINT "values_from_array_unique_table_values_from_array_weighted_not_null_unique" UNIQUE("values_from_array_weighted_not_null") ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."values_from_array_array_table" ( "values_from_array" varchar(256) ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."weighted_random_table" ( "weighted_random" varchar(256) ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."weighted_random_with_unique_gens_table" ( "weighted_random_with_unique_gens" varchar(256), CONSTRAINT "weighted_random_with_unique_gens_table_weighted_random_with_unique_gens_unique" UNIQUE("weighted_random_with_unique_gens") ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."uuid_table" ( "uuid" uuid ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."uuid_array_table" ( "uuid" uuid[] ); `, ); }); afterAll(async () => { await client.close(); }); const count = 1000; test('enum generator test', async () => { await seed(db, { enumTable: schema.enumTable }).refine(() => ({ enumTable: { count, }, })); const data = await db.select().from(schema.enumTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); }); test('default generator test', async () => { await seed(db, { defaultTable: schema.defaultTable }).refine((funcs) => ({ defaultTable: { count, columns: { defaultString: funcs.default({ defaultValue: 'default string' }), }, }, })); const data = await db.select().from(schema.defaultTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); }); test('default array generator test', async () => { await seed(db, { defaultTable: schema.defaultArrayTable }).refine((funcs) => ({ defaultTable: { count, columns: { defaultString: funcs.default({ defaultValue: 'default string', arraySize: 3 }), }, }, })); const data = await db.select().from(schema.defaultArrayTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null && val.length === 3)); expect(predicate).toBe(true); }); test('valuesFromArray generator test', async () => { await seed(db, { valuesFromArrayTable: schema.valuesFromArrayTable }).refine((funcs) => ({ valuesFromArrayTable: { count, columns: { valuesFromArrayNotNull: funcs.valuesFromArray({ values: lastNames }), valuesFromArrayWeightedNotNull: funcs.valuesFromArray({ values: [ { values: lastNames, weight: 0.3 }, { values: firstNames, weight: 0.7 }, ], }), }, }, })); const data = await db.select().from(schema.valuesFromArrayTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); }); test('valuesFromArray unique generator test', async () => { // valuesFromArrayUniqueTable----------------------------------------------------------------------------------- await seed(db, { valuesFromArrayUniqueTable: schema.valuesFromArrayUniqueTable }, { seed: 1 }).refine((funcs) => ({ valuesFromArrayUniqueTable: { count: 49998, columns: { valuesFromArray: funcs.valuesFromArray({ values: lastNames.slice(0, 20), isUnique: true }), valuesFromArrayNotNull: funcs.valuesFromArray({ values: lastNames, isUnique: true }), valuesFromArrayWeighted: funcs.valuesFromArray({ values: [ { values: lastNames.slice(0, 20000), weight: 0.3 }, { values: lastNames.slice(20000), weight: 0.7 }, ], isUnique: true, }), valuesFromArrayWeightedNotNull: funcs.valuesFromArray({ values: [ { values: lastNames.slice(0, 14920), weight: 0.3 }, { values: lastNames.slice(14920), weight: 0.7 }, ], isUnique: true, }), }, }, })); const data = await db.select().from(schema.valuesFromArrayUniqueTable); // console.log(valuesFromArrayUniqueTableData); const predicate = data.length !== 0 && data.every((row) => row['valuesFromArrayWeightedNotNull'] !== null && row['valuesFromArrayNotNull'] !== null ); expect(predicate).toBe(true); await expect( seed(db, { valuesFromArrayUniqueTable: schema.valuesFromArrayUniqueTable }).refine((funcs) => ({ valuesFromArrayUniqueTable: { count: 49998, columns: { valuesFromArrayWeightedNotNull: funcs.valuesFromArray({ values: [ { values: lastNames.slice(0, 20000), weight: 0.3 }, { values: lastNames.slice(20000), weight: 0.7 }, ], isUnique: true, }), }, }, })), ).rejects.toThrow( /^weighted values arrays is too small to generate values with specified probability for unique not null column\..+/, ); await expect( seed(db, { valuesFromArrayUniqueTable: schema.valuesFromArrayUniqueTable }).refine((funcs) => ({ valuesFromArrayUniqueTable: { count: 49998, columns: { valuesFromArrayNotNull: funcs.valuesFromArray({ values: lastNames.slice(20), isUnique: true, }), }, }, })), ).rejects.toThrow('There are no enough values to fill unique column.'); await expect( seed(db, { valuesFromArrayUniqueTable: schema.valuesFromArrayUniqueTable }, { seed: 1 }).refine((funcs) => ({ valuesFromArrayUniqueTable: { count: 49999, columns: { valuesFromArrayNotNull: funcs.valuesFromArray({ values: lastNames, isUnique: true, }), valuesFromArrayWeightedNotNull: funcs.valuesFromArray({ values: [ { values: lastNames.slice(0, 14854), weight: 0.3 }, { values: lastNames.slice(14854), weight: 0.7 }, ], isUnique: true, }), }, }, })), ).rejects.toThrow('There are no enough values to fill unique column.'); }); test('valuesFromArray array generator test', async () => { await seed(db, { valuesFromArrayTable: schema.valuesFromArrayArrayTable }).refine((funcs) => ({ valuesFromArrayTable: { count, columns: { valuesFromArray: funcs.valuesFromArray({ values: lastNames, arraySize: 3 }), }, }, })); const data = await db.select().from(schema.valuesFromArrayArrayTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null && val.length === 3)); expect(predicate).toBe(true); }); test('intPrimaryKey generator test', async () => { await seed(db, { intPrimaryKeyTable: schema.intPrimaryKeyTable }).refine((funcs) => ({ intPrimaryKeyTable: { count, columns: { intPrimaryKey: funcs.intPrimaryKey(), }, }, })); const data = await db.select().from(schema.intPrimaryKeyTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); }); test('number generator test', async () => { await seed(db, { numberTable: schema.numberTable }).refine((funcs) => ({ numberTable: { count, columns: { number: funcs.number(), }, }, })); const data = await db.select().from(schema.numberTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); }); test('number unique generator test', async () => { // numberUniqueTable----------------------------------------------------------------------------------- await seed(db, { numberUniqueTable: schema.numberUniqueTable }).refine((funcs) => ({ numberUniqueTable: { count: 20070, columns: { numberUnique: funcs.number({ isUnique: true, minValue: -100.23, maxValue: 100.46 }), }, }, })); const data = await db.select().from(schema.numberUniqueTable); const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null && val >= -100.23 && val <= 100.46) ); expect(predicate).toBe(true); await expect( seed(db, { numberUniqueTable: schema.numberUniqueTable }).refine((funcs) => ({ numberUniqueTable: { count: 20071, columns: { numberUnique: funcs.number({ isUnique: true, minValue: -100.23, maxValue: 100.46 }), }, }, })), ).rejects.toThrow('count exceeds max number of unique integers in given range(min, max), try to make range wider.'); }); test('number array generator test', async () => { await seed(db, { numberTable: schema.numberArrayTable }).refine((funcs) => ({ numberTable: { count, columns: { number: funcs.number({ arraySize: 3 }), }, }, })); const data = await db.select().from(schema.numberArrayTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null && val.length === 3)); expect(predicate).toBe(true); }); test('int generator test', async () => { await seed(db, { intTable: schema.intTable }).refine((funcs) => ({ intTable: { count, columns: { int: funcs.int(), }, }, })); const data = await db.select().from(schema.intTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); }); test('int unique generator test', async () => { // intUniqueTable----------------------------------------------------------------------------------- await seed(db, { intUniqueTable: schema.intUniqueTable }).refine((funcs) => ({ intUniqueTable: { count: 201, columns: { intUnique: funcs.int({ isUnique: true, minValue: -100, maxValue: 100 }), }, }, })); const data = await db.select().from(schema.intUniqueTable); const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); await expect( seed(db, { intUniqueTable: schema.intUniqueTable }).refine((funcs) => ({ intUniqueTable: { count: 202, columns: { intUnique: funcs.int({ isUnique: true, minValue: -100, maxValue: 100 }), }, }, })), ).rejects.toThrow('count exceeds max number of unique integers in given range(min, max), try to make range wider.'); }); test('int array generator test', async () => { await seed(db, { intTable: schema.intArrayTable }).refine((funcs) => ({ intTable: { count, columns: { int: funcs.int({ arraySize: 3 }), }, }, })); const data = await db.select().from(schema.intArrayTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null && val.length === 3)); expect(predicate).toBe(true); }); test('boolean generator test', async () => { await seed(db, { booleanTable: schema.booleanTable }).refine((funcs) => ({ booleanTable: { count, columns: { boolean: funcs.boolean(), }, }, })); const data = await db.select().from(schema.booleanTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); }); test('boolean array generator test', async () => { await seed(db, { booleanTable: schema.booleanArrayTable }).refine((funcs) => ({ booleanTable: { count, columns: { boolean: funcs.boolean({ arraySize: 3 }), }, }, })); const data = await db.select().from(schema.booleanArrayTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null && val.length === 3)); expect(predicate).toBe(true); }); test('date generator test', async () => { await seed(db, { dateTable: schema.dateTable }).refine((funcs) => ({ dateTable: { count, columns: { date: funcs.date(), }, }, })); const data = await db.select().from(schema.dateTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); }); test('date array generator test', async () => { await seed(db, { dateTable: schema.dateArrayTable }).refine((funcs) => ({ dateTable: { count, columns: { date: funcs.date({ arraySize: 3 }), dateString: funcs.date({ arraySize: 4 }), }, }, })); const data = await db.select().from(schema.dateArrayTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null && [3, 4].includes(val.length)) ); expect(predicate).toBe(true); }); test('time generator test', async () => { await seed(db, { timeTable: schema.timeTable }).refine((funcs) => ({ timeTable: { count, columns: { time: funcs.time(), }, }, })); const data = await db.select().from(schema.timeTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); }); test('time array generator test', async () => { await seed(db, { timeTable: schema.timeArrayTable }).refine((funcs) => ({ timeTable: { count, columns: { time: funcs.time({ arraySize: 3 }), }, }, })); const data = await db.select().from(schema.timeArrayTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null && val.length === 3)); expect(predicate).toBe(true); }); test('timestamp generator test', async () => { await seed(db, { timestampTable: schema.timestampTable }).refine((funcs) => ({ timestampTable: { count, columns: { timestamp: funcs.timestamp(), }, }, })); const data = await db.select().from(schema.timestampTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); }); test('timestamp array generator test', async () => { await seed(db, { timestampTable: schema.timestampArrayTable }).refine((funcs) => ({ timestampTable: { count, columns: { timestamp: funcs.timestamp({ arraySize: 3 }), }, }, })); const data = await db.select().from(schema.timestampArrayTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null && val.length === 3)); expect(predicate).toBe(true); }); test('json generator test', async () => { await seed(db, { jsonTable: schema.jsonTable }).refine((funcs) => ({ jsonTable: { count, columns: { json: funcs.json(), }, }, })); const data = await db.select().from(schema.jsonTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); }); test('json array generator test', async () => { await seed(db, { jsonTable: schema.jsonArrayTable }).refine((funcs) => ({ jsonTable: { count, columns: { json: funcs.json({ arraySize: 3 }), }, }, })); const data = await db.select().from(schema.jsonArrayTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null && val.length === 3)); expect(predicate).toBe(true); }); test('interval generator test', async () => { await seed(db, { intervalTable: schema.intervalTable }).refine((funcs) => ({ intervalTable: { count, columns: { interval: funcs.interval(), }, }, })); const data = await db.select().from(schema.intervalTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); }); test('interval unique generator test', async () => { // intervalUniqueTable----------------------------------------------------------------------------------- await seed(db, { intervalUniqueTable: schema.intervalUniqueTable }).refine((funcs) => ({ intervalUniqueTable: { count, columns: { intervalUnique: funcs.interval({ isUnique: true }), }, }, })); const data = await db.select().from(schema.intervalUniqueTable); const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); }); test('interval array generator test', async () => { await seed(db, { intervalTable: schema.intervalArrayTable }).refine((funcs) => ({ intervalTable: { count, columns: { interval: funcs.interval({ arraySize: 3 }), }, }, })); const data = await db.select().from(schema.intervalArrayTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null && val.length === 3)); expect(predicate).toBe(true); }); test('string generator test', async () => { await seed(db, { stringTable: schema.stringTable }).refine((funcs) => ({ stringTable: { count, columns: { string: funcs.string(), }, }, })); const data = await db.select().from(schema.stringTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); }); test('string unique generator test', async () => { await seed(db, { stringUniqueTable: schema.stringUniqueTable }).refine((funcs) => ({ stringUniqueTable: { count, columns: { stringUnique: funcs.string({ isUnique: true }), }, }, })); const data = await db.select().from(schema.stringUniqueTable); const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); }); test('string array generator test', async () => { await seed(db, { stringTable: schema.stringArrayTable }).refine((funcs) => ({ stringTable: { count, columns: { string: funcs.string({ arraySize: 3 }), }, }, })); const data = await db.select().from(schema.stringArrayTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null && val.length === 3)); expect(predicate).toBe(true); }); test('email generator test', async () => { await seed(db, { emailTable: schema.emailTable }).refine((funcs) => ({ emailTable: { count, columns: { email: funcs.email(), }, }, })); const data = await db.select().from(schema.emailTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); }); test('email array generator test', async () => { await seed(db, { emailTable: schema.emailArrayTable }).refine((funcs) => ({ emailTable: { count, columns: { email: funcs.email({ arraySize: 3 }), }, }, })); const data = await db.select().from(schema.emailArrayTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null && val.length === 3)); expect(predicate).toBe(true); }); test('firstName generator test', async () => { await seed(db, { firstNameTable: schema.firstNameTable }).refine((funcs) => ({ firstNameTable: { count, columns: { firstName: funcs.firstName(), }, }, })); const data = await db.select().from(schema.firstNameTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); }); test('firstName unique generator test', async () => { // firstNameUniqueTable----------------------------------------------------------------------------------- await seed(db, { firstNameUniqueTable: schema.firstNameUniqueTable }).refine((funcs) => ({ firstNameUniqueTable: { count: 30274, columns: { firstNameUnique: funcs.firstName({ isUnique: true }), }, }, })); const data = await db.select().from(schema.firstNameUniqueTable); const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); await expect( seed(db, { firstNameUniqueTable: schema.firstNameUniqueTable }, { count: 30275 }).refine((funcs) => ({ firstNameUniqueTable: { count: 30275, columns: { firstNameUnique: funcs.firstName({ isUnique: true }), }, }, })), ).rejects.toThrow('count exceeds max number of unique first names.'); }); test('firstName array generator test', async () => { await seed(db, { firstNameTable: schema.firstNameArrayTable }).refine((funcs) => ({ firstNameTable: { count, columns: { firstName: funcs.firstName({ arraySize: 3 }), }, }, })); const data = await db.select().from(schema.firstNameArrayTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null && val.length === 3)); expect(predicate).toBe(true); }); test('lastName generator test', async () => { await seed(db, { lastNameTable: schema.lastNameTable }).refine((funcs) => ({ lastNameTable: { count, columns: { lastName: funcs.lastName(), }, }, })); const data = await db.select().from(schema.lastNameTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); }); test('lastName unique generator test', async () => { // lastNameUniqueTable----------------------------------------------------------------------------------- await seed(db, { lastNameUniqueTable: schema.lastNameUniqueTable }).refine((funcs) => ({ lastNameUniqueTable: { count: 49998, columns: { lastNameUnique: funcs.lastName({ isUnique: true }), }, }, })); const data = await db.select().from(schema.lastNameUniqueTable); const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); await expect( seed(db, { lastNameUniqueTable: schema.lastNameUniqueTable }).refine((funcs) => ({ lastNameUniqueTable: { count: 49999, columns: { lastNameUnique: funcs.lastName({ isUnique: true }), }, }, })), ).rejects.toThrow('count exceeds max number of unique last names.'); }); test('lastName array generator test', async () => { await seed(db, { lastNameTable: schema.lastNameArrayTable }).refine((funcs) => ({ lastNameTable: { count, columns: { lastName: funcs.lastName({ arraySize: 3 }), }, }, })); const data = await db.select().from(schema.lastNameArrayTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null && val.length === 3)); expect(predicate).toBe(true); }); test('fullName generator test', async () => { await seed(db, { fullNameTable: schema.fullNameTable }).refine((funcs) => ({ fullNameTable: { count, columns: { fullName: funcs.fullName(), }, }, })); const data = await db.select().from(schema.fullNameTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); }); test('fullName unique generator test', async () => { // fullNameUniqueTable----------------------------------------------------------------------------------- await seed(db, { fullNameUniqueTable: schema.fullNameUniqueTable }).refine((funcs) => ({ fullNameUniqueTable: { count, columns: { fullNameUnique: funcs.fullName({ isUnique: true }), }, }, })); const data = await db.select().from(schema.fullNameUniqueTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); }); test('fullName array generator test', async () => { await seed(db, { fullNameTable: schema.fullNameArrayTable }).refine((funcs) => ({ fullNameTable: { count, columns: { fullName: funcs.fullName({ arraySize: 3 }), }, }, })); const data = await db.select().from(schema.fullNameArrayTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null && val.length === 3)); expect(predicate).toBe(true); }); test('country generator test', async () => { await seed(db, { countryTable: schema.countryTable }).refine((funcs) => ({ countryTable: { count, columns: { country: funcs.country(), }, }, })); const data = await db.select().from(schema.countryTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); }); test('country unique generator test', async () => { // countryUniqueTable----------------------------------------------------------------------------------- await seed(db, { countryUniqueTable: schema.countryUniqueTable }).refine((funcs) => ({ countryUniqueTable: { count: countries.length, columns: { countryUnique: funcs.country({ isUnique: true }), }, }, })); const data = await db.select().from(schema.countryUniqueTable); const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); await expect( seed(db, { countryUniqueTable: schema.countryUniqueTable }).refine((funcs) => ({ countryUniqueTable: { count: countries.length + 1, columns: { countryUnique: funcs.country({ isUnique: true }), }, }, })), ).rejects.toThrow('count exceeds max number of unique countries.'); }); test('country array generator test', async () => { await seed(db, { countryTable: schema.countryArrayTable }).refine((funcs) => ({ countryTable: { count, columns: { country: funcs.country({ arraySize: 3 }), }, }, })); const data = await db.select().from(schema.countryArrayTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null && val.length === 3)); expect(predicate).toBe(true); }); test('city generator test', async () => { await seed(db, { cityTable: schema.cityTable }).refine((funcs) => ({ cityTable: { count, columns: { city: funcs.city(), }, }, })); const data = await db.select().from(schema.cityTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); }); test('city unique generator test', async () => { // cityUniqueTable----------------------------------------------------------------------------------- await reset(db, { cityUniqueTable: schema.cityUniqueTable }); await seed(db, { cityUniqueTable: schema.cityUniqueTable }).refine((funcs) => ({ cityUniqueTable: { count: cities.length, columns: { cityUnique: funcs.city({ isUnique: true }), }, }, })); const data = await db.select().from(schema.cityUniqueTable); const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); await expect( seed(db, { cityUniqueTable: schema.cityUniqueTable }).refine((funcs) => ({ cityUniqueTable: { count: cities.length + 1, columns: { cityUnique: funcs.city({ isUnique: true }), }, }, })), ).rejects.toThrow('count exceeds max number of unique cities.'); }); test('city array generator test', async () => { await seed(db, { cityTable: schema.cityArrayTable }).refine((funcs) => ({ cityTable: { count, columns: { city: funcs.city({ arraySize: 3 }), }, }, })); const data = await db.select().from(schema.cityArrayTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null && val.length === 3)); expect(predicate).toBe(true); }); test('streetAddress generator test', async () => { await seed(db, { streetAddressTable: schema.streetAddressTable }).refine((funcs) => ({ streetAddressTable: { count, columns: { streetAddress: funcs.streetAddress(), }, }, })); const data = await db.select().from(schema.streetAddressTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); }); test('streetAddress unique generator test', async () => { await seed(db, { streetAddressUniqueTable: schema.streetAddressUniqueTable }).refine((funcs) => ({ streetAddressUniqueTable: { count, columns: { streetAddressUnique: funcs.streetAddress({ isUnique: true }), }, }, })); const data = await db.select().from(schema.streetAddressUniqueTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); }); test('streetAddress array generator test', async () => { await seed(db, { streetAddressTable: schema.streetAddressArrayTable }).refine((funcs) => ({ streetAddressTable: { count, columns: { streetAddress: funcs.streetAddress({ arraySize: 3 }), }, }, })); const data = await db.select().from(schema.streetAddressArrayTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null && val.length === 3)); expect(predicate).toBe(true); }); test('jobTitle generator test', async () => { await seed(db, { jobTitleTable: schema.jobTitleTable }).refine((funcs) => ({ jobTitleTable: { count, columns: { jobTitle: funcs.jobTitle(), }, }, })); const data = await db.select().from(schema.jobTitleTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); }); test('jobTitle array generator test', async () => { await seed(db, { jobTitleTable: schema.jobTitleArrayTable }).refine((funcs) => ({ jobTitleTable: { count, columns: { jobTitle: funcs.jobTitle({ arraySize: 3 }), }, }, })); const data = await db.select().from(schema.jobTitleArrayTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null && val.length === 3)); expect(predicate).toBe(true); }); test('postcode generator test', async () => { await seed(db, { postcodeTable: schema.postcodeTable }).refine((funcs) => ({ postcodeTable: { count, columns: { postcode: funcs.postcode(), }, }, })); const data = await db.select().from(schema.postcodeTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); }); test('postcode unique generator test', async () => { await seed(db, { postcodeUniqueTable: schema.postcodeUniqueTable }).refine((funcs) => ({ postcodeUniqueTable: { count, columns: { postcodeUnique: funcs.postcode({ isUnique: true }), }, }, })); const data = await db.select().from(schema.postcodeUniqueTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); }); test('postcode array generator test', async () => { await seed(db, { postcodeTable: schema.postcodeArrayTable }).refine((funcs) => ({ postcodeTable: { count, columns: { postcode: funcs.postcode({ arraySize: 3 }), }, }, })); const data = await db.select().from(schema.postcodeArrayTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null && val.length === 3)); expect(predicate).toBe(true); }); test('state generator test', async () => { await seed(db, { stateTable: schema.stateTable }).refine((funcs) => ({ stateTable: { count, columns: { state: funcs.state(), }, }, })); const data = await db.select().from(schema.stateTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); }); test('state array generator test', async () => { await seed(db, { stateTable: schema.stateArrayTable }).refine((funcs) => ({ stateTable: { count, columns: { state: funcs.state({ arraySize: 3 }), }, }, })); const data = await db.select().from(schema.stateArrayTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null && val.length === 3)); expect(predicate).toBe(true); }); test('companyName generator test', async () => { await seed(db, { companyNameTable: schema.companyNameTable }).refine((funcs) => ({ companyNameTable: { count, columns: { companyName: funcs.companyName(), }, }, })); const data = await db.select().from(schema.companyNameTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); }); test('companyName unique generator test', async () => { await seed(db, { companyNameUniqueTable: schema.companyNameUniqueTable }).refine((funcs) => ({ companyNameUniqueTable: { count, columns: { companyNameUnique: funcs.companyName({ isUnique: true }), }, }, })); const data = await db.select().from(schema.companyNameUniqueTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); }); test('companyName array generator test', async () => { await seed(db, { companyNameTable: schema.companyNameArrayTable }).refine((funcs) => ({ companyNameTable: { count, columns: { companyName: funcs.companyName({ arraySize: 3 }), }, }, })); const data = await db.select().from(schema.companyNameArrayTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null && val.length === 3)); expect(predicate).toBe(true); }); test('loremIpsum generator test', async () => { await seed(db, { loremIpsumTable: schema.loremIpsumTable }).refine((funcs) => ({ loremIpsumTable: { count, columns: { loremIpsum: funcs.loremIpsum(), }, }, })); const data = await db.select().from(schema.loremIpsumTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); }); test('loremIpsum array generator test', async () => { await seed(db, { loremIpsumTable: schema.loremIpsumArrayTable }).refine((funcs) => ({ loremIpsumTable: { count, columns: { loremIpsum: funcs.loremIpsum({ arraySize: 3 }), }, }, })); const data = await db.select().from(schema.loremIpsumArrayTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null && val.length === 3)); expect(predicate).toBe(true); }); test('point generator test', async () => { await seed(db, { pointTable: schema.pointTable }).refine((funcs) => ({ pointTable: { count, columns: { point: funcs.point(), }, }, })); const data = await db.select().from(schema.pointTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); }); test('point unique generator test', async () => { await reset(db, { pointTable: schema.pointTable }); await seed(db, { pointTable: schema.pointTable }).refine((funcs) => ({ pointTable: { count, columns: { point: funcs.point({ isUnique: true }), }, }, })); const data = await db.select().from(schema.pointTable); // every value in each row does not equal undefined. let predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); // using Set because PGlite does not support unique point const pointStrsSet = new Set(data.map((row) => row.point!.map(String).join(','))); predicate = pointStrsSet.size === data.length; expect(predicate).toBe(true); }); test('point array generator test', async () => { await seed(db, { pointTable: schema.pointArrayTable }).refine((funcs) => ({ pointTable: { count, columns: { point: funcs.point({ arraySize: 2 }), }, }, })); const data = await db.select().from(schema.pointArrayTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null && val.length === 2)); expect(predicate).toBe(true); }); test('line generator test', async () => { await seed(db, { lineTable: schema.lineTable }).refine((funcs) => ({ lineTable: { count, columns: { line: funcs.line(), }, }, })); const data = await db.select().from(schema.lineTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); }); test('line unique generator test', async () => { await reset(db, { lineTable: schema.lineTable }); await seed(db, { lineTable: schema.lineTable }).refine((funcs) => ({ lineTable: { count, columns: { line: funcs.line({ isUnique: true }), }, }, })); const data = await db.select().from(schema.lineTable); // every value in each row does not equal undefined. let predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); // using Set because PGlite does not support unique point const lineStrsSet = new Set(data.map((row) => row.line!.map(String).join(','))); predicate = lineStrsSet.size === data.length; expect(predicate).toBe(true); }); test('line array generator test', async () => { await seed(db, { lineTable: schema.lineArrayTable }).refine((funcs) => ({ lineTable: { count, columns: { line: funcs.line({ arraySize: 2 }), }, }, })); const data = await db.select().from(schema.lineArrayTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null && val.length === 2)); expect(predicate).toBe(true); }); test('phoneNumber generator test', async () => { await seed(db, { phoneNumberTable: schema.phoneNumberTable }).refine((funcs) => ({ phoneNumberTable: { count, columns: { phoneNumber: funcs.phoneNumber(), phoneNumberPrefixes: funcs.phoneNumber({ prefixes: ['+380 99', '+380 67', '+1'], generatedDigitsNumbers: [7, 7, 10], }), phoneNumberTemplate: funcs.phoneNumber({ template: '+380 ## ## ### ##' }), }, }, })); const data = await db.select().from(schema.phoneNumberTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); }); test('phoneNumber array generator test', async () => { await seed(db, { phoneNumberTable: schema.phoneNumberArrayTable }).refine((funcs) => ({ phoneNumberTable: { count, columns: { phoneNumber: funcs.phoneNumber({ arraySize: 3 }), phoneNumberPrefixes: funcs.phoneNumber({ prefixes: ['+380 99', '+380 67', '+1'], generatedDigitsNumbers: [7, 7, 10], arraySize: 4, }), phoneNumberTemplate: funcs.phoneNumber({ template: '+380 ## ## ### ##', arraySize: 5, }), }, }, })); const data = await db.select().from(schema.phoneNumberArrayTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null && [3, 4, 5].includes(val.length)) ); expect(predicate).toBe(true); }); test('weightedRandom generator test', async () => { await seed(db, { weightedRandomTable: schema.weightedRandomTable }).refine((funcs) => ({ weightedRandomTable: { count, columns: { weightedRandom: funcs.weightedRandom([ { value: funcs.default({ defaultValue: 'default value' }), weight: 0.3 }, { value: funcs.loremIpsum(), weight: 0.7 }, ]), }, }, })); const data = await db.select().from(schema.weightedRandomTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); }); test('weightedRandom with unique gens generator test', async () => { await seed(db, { weightedRandomWithUniqueGensTable: schema.weightedRandomWithUniqueGensTable }).refine((funcs) => ({ weightedRandomWithUniqueGensTable: { count: 10000, columns: { weightedRandomWithUniqueGens: funcs.weightedRandom([ { weight: 0.3, value: funcs.email() }, { weight: 0.7, value: funcs.firstName({ isUnique: true }) }, ]), }, }, })); const data = await db.select().from(schema.weightedRandomWithUniqueGensTable); const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); await expect( seed(db, { weightedRandomWithUniqueGensTable: schema.weightedRandomWithUniqueGensTable }).refine((funcs) => ({ weightedRandomWithUniqueGensTable: { count: 40000, columns: { weightedRandomWithUniqueGens: funcs.weightedRandom([ { weight: 0.1, value: funcs.email() }, { weight: 0.9, value: funcs.firstName({ isUnique: true }) }, ]), }, }, })), ).rejects.toThrow('count exceeds max number of unique first names.'); await expect( seed(db, { weightedRandomWithUniqueGensTable: schema.weightedRandomWithUniqueGensTable }).refine((funcs) => ({ weightedRandomWithUniqueGensTable: { count: 10000, columns: { weightedRandomWithUniqueGens: funcs.weightedRandom([ { weight: 0.2, value: funcs.email() }, { weight: 0.9, value: funcs.firstName({ isUnique: true }) }, ]), }, }, })), ).rejects.toThrow( 'The weights for the Weighted Random feature must add up to exactly 1. Please review your weights to ensure they total 1 before proceeding', ); }); test('uuid generator test', async () => { await reset(db, { uuidTable: schema.uuidTable }); await seed(db, { uuidTable: schema.uuidTable }).refine((funcs) => ({ uuidTable: { count, columns: { uuid: funcs.uuid(), }, }, })); const data = await db.select().from(schema.uuidTable); // every value in each row does not equal undefined. let predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); const uuidStrsSet = new Set(data.map((row) => row.uuid!)); predicate = uuidStrsSet.size === data.length; expect(predicate).toBe(true); }); test('uuid array generator test', async () => { await reset(db, { uuidArrayTable: schema.uuidArrayTable }); await seed(db, { uuidArrayTable: schema.uuidArrayTable }).refine((funcs) => ({ uuidArrayTable: { count, columns: { uuid: funcs.uuid({ arraySize: 4 }), }, }, })); const data = await db.select().from(schema.uuidArrayTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); }); ================================================ FILE: drizzle-seed/tests/pg/generatorsTest/pgSchema.ts ================================================ import { boolean, date, integer, interval, json, line, pgSchema, point, real, text, time, timestamp, uuid, varchar, } from 'drizzle-orm/pg-core'; export const schema = pgSchema('seeder_lib_pg'); export const moodEnum = schema.enum('enum', ['sad', 'ok', 'happy']); export const enumTable = schema.table('enum_table', { mood: moodEnum('mood_enum'), }); export const defaultTable = schema.table('default_table', { defaultString: text('default_string'), }); export const defaultArrayTable = schema.table('default_array_table', { defaultString: text('default_string').array(), }); export const valuesFromArrayTable = schema.table('values_from_array_table', { valuesFromArrayNotNull: varchar('values_from_array_not_null', { length: 256 }).notNull(), valuesFromArrayWeightedNotNull: varchar('values_from_array_weighted_not_null', { length: 256 }).notNull(), }); export const valuesFromArrayUniqueTable = schema.table('values_from_array_unique_table', { valuesFromArray: varchar('values_from_array', { length: 256 }).unique(), valuesFromArrayNotNull: varchar('values_from_array_not_null', { length: 256 }).unique().notNull(), valuesFromArrayWeighted: varchar('values_from_array_weighted', { length: 256 }).unique(), valuesFromArrayWeightedNotNull: varchar('values_from_array_weighted_not_null', { length: 256 }).unique().notNull(), }); export const valuesFromArrayArrayTable = schema.table('values_from_array_array_table', { valuesFromArray: varchar('values_from_array', { length: 256 }).array(), }); export const intPrimaryKeyTable = schema.table('int_primary_key_table', { intPrimaryKey: integer('int_primary_key').unique(), }); export const numberTable = schema.table('number_table', { number: real('number'), }); export const numberUniqueTable = schema.table('number_unique_table', { numberUnique: real('number_unique').unique(), }); export const numberArrayTable = schema.table('number_array_table', { number: real('number').array(), }); export const intTable = schema.table('int_table', { int: integer('int'), }); export const intUniqueTable = schema.table('int_unique_table', { intUnique: integer('int_unique').unique(), }); export const intArrayTable = schema.table('int_array_table', { int: integer('int').array(), }); export const booleanTable = schema.table('boolean_table', { boolean: boolean('boolean'), }); export const booleanArrayTable = schema.table('boolean_array_table', { boolean: boolean('boolean').array(), }); export const dateTable = schema.table('date_table', { date: date('date'), }); // TODO: add tests for data type with different modes export const dateArrayTable = schema.table('date_array_table', { date: date('date', { mode: 'date' }).array(), dateString: date('date_string', { mode: 'string' }).array(), }); export const timeTable = schema.table('time_table', { time: time('time'), }); export const timeArrayTable = schema.table('time_array_table', { time: time('time').array(), }); export const timestampTable = schema.table('timestamp_table', { timestamp: timestamp('timestamp'), }); export const timestampArrayTable = schema.table('timestamp_array_table', { timestamp: timestamp('timestamp').array(), }); export const jsonTable = schema.table('json_table', { json: json('json'), }); export const jsonArrayTable = schema.table('json_array_table', { json: json('json').array(), }); export const intervalTable = schema.table('interval_table', { interval: interval('interval'), }); export const intervalUniqueTable = schema.table('interval_unique_table', { intervalUnique: interval('interval_unique').unique(), }); export const intervalArrayTable = schema.table('interval_array_table', { interval: interval('interval').array(), }); export const stringTable = schema.table('string_table', { string: text('string'), }); export const stringUniqueTable = schema.table('string_unique_table', { stringUnique: varchar('string_unique', { length: 256 }).unique(), }); export const stringArrayTable = schema.table('string_array_table', { string: text('string').array(), }); export const emailTable = schema.table('email_table', { email: varchar('email', { length: 256 }).unique(), }); export const emailArrayTable = schema.table('email_array_table', { email: varchar('email', { length: 256 }).array(), }); export const firstNameTable = schema.table('first_name_table', { firstName: varchar('first_name', { length: 256 }), }); export const firstNameUniqueTable = schema.table('first_name_unique_table', { firstNameUnique: varchar('first_name_unique', { length: 256 }).unique(), }); export const firstNameArrayTable = schema.table('first_name_array_table', { firstName: varchar('first_name', { length: 256 }).array(), }); export const lastNameTable = schema.table('last_name_table', { lastName: varchar('last_name', { length: 256 }), }); export const lastNameUniqueTable = schema.table('last_name_unique_table', { lastNameUnique: varchar('last_name_unique', { length: 256 }).unique(), }); export const lastNameArrayTable = schema.table('last_name_array_table', { lastName: varchar('last_name', { length: 256 }).array(), }); export const fullNameTable = schema.table('full_name__table', { fullName: varchar('full_name_', { length: 256 }), }); export const fullNameUniqueTable = schema.table('full_name_unique_table', { fullNameUnique: varchar('full_name_unique', { length: 256 }).unique(), }); export const fullNameArrayTable = schema.table('full_name_array_table', { fullName: varchar('full_name', { length: 256 }).array(), }); export const countryTable = schema.table('country_table', { country: varchar('country', { length: 256 }), }); export const countryUniqueTable = schema.table('country_unique_table', { countryUnique: varchar('country_unique', { length: 256 }).unique(), }); export const countryArrayTable = schema.table('country_array_table', { country: varchar('country', { length: 256 }).array(), }); export const cityTable = schema.table('city_table', { city: varchar('city', { length: 256 }), }); export const cityUniqueTable = schema.table('city_unique_table', { cityUnique: varchar('city_unique', { length: 256 }).unique(), }); export const cityArrayTable = schema.table('city_array_table', { city: varchar('city', { length: 256 }).array(), }); export const streetAddressTable = schema.table('street_address_table', { streetAddress: varchar('street_address', { length: 256 }), }); export const streetAddressUniqueTable = schema.table('street_address_unique_table', { streetAddressUnique: varchar('street_address_unique', { length: 256 }).unique(), }); export const streetAddressArrayTable = schema.table('street_address_array_table', { streetAddress: varchar('street_address', { length: 256 }).array(), }); export const jobTitleTable = schema.table('job_Title_table', { jobTitle: text('job_title'), }); export const jobTitleArrayTable = schema.table('job_title_array_table', { jobTitle: text('job_title').array(), }); export const postcodeTable = schema.table('postcode_table', { postcode: varchar('postcode', { length: 256 }), }); export const postcodeUniqueTable = schema.table('postcode_unique_table', { postcodeUnique: varchar('postcode_unique', { length: 256 }).unique(), }); export const postcodeArrayTable = schema.table('postcode_array_table', { postcode: varchar('postcode', { length: 256 }).array(), }); export const stateTable = schema.table('state_table', { state: text('state'), }); export const stateArrayTable = schema.table('state_array_table', { state: text('state').array(), }); export const companyNameTable = schema.table('company_name_table', { companyName: text('company_name'), }); export const companyNameUniqueTable = schema.table('company_name_unique_table', { companyNameUnique: varchar('company_name_unique', { length: 256 }).unique(), }); export const companyNameArrayTable = schema.table('company_name_array_table', { companyName: text('company_name').array(), }); export const loremIpsumTable = schema.table('lorem_ipsum_table', { loremIpsum: text('lorem_ipsum'), }); export const loremIpsumArrayTable = schema.table('lorem_ipsum_array_table', { loremIpsum: text('lorem_ipsum').array(), }); export const pointTable = schema.table('point_table', { point: point('point'), }); export const pointArrayTable = schema.table('point_array_table', { point: point('point').array(), }); export const lineTable = schema.table('line_table', { line: line('line'), }); export const lineArrayTable = schema.table('line_array_table', { line: line('line').array(), }); // export const pointUniqueTable = schema.table("point_unique_table", { // pointUnique: point("point_unique").unique(), // }); // export const lineUniqueTable = schema.table("line_unique_table", { // lineUnique: line("line_unique").unique(), // }); export const phoneNumberTable = schema.table('phone_number_table', { phoneNumber: varchar('phoneNumber', { length: 256 }).unique(), phoneNumberTemplate: varchar('phone_number_template', { length: 256 }).unique(), phoneNumberPrefixes: varchar('phone_number_prefixes', { length: 256 }).unique(), }); export const phoneNumberArrayTable = schema.table('phone_number_array_table', { phoneNumber: varchar('phoneNumber', { length: 256 }).array(), phoneNumberTemplate: varchar('phone_number_template', { length: 256 }).array(), phoneNumberPrefixes: varchar('phone_number_prefixes', { length: 256 }).array(), }); export const weightedRandomTable = schema.table('weighted_random_table', { weightedRandom: varchar('weighted_random', { length: 256 }), }); export const weightedRandomWithUniqueGensTable = schema.table('weighted_random_with_unique_gens_table', { weightedRandomWithUniqueGens: varchar('weighted_random_with_unique_gens', { length: 256 }).unique(), }); export const uuidTable = schema.table('uuid_table', { uuid: uuid('uuid'), }); export const uuidArrayTable = schema.table('uuid_array_table', { uuid: uuid('uuid').array(), }); ================================================ FILE: drizzle-seed/tests/pg/pg.test.ts ================================================ import { PGlite } from '@electric-sql/pglite'; import { relations, sql } from 'drizzle-orm'; import type { PgliteDatabase } from 'drizzle-orm/pglite'; import { drizzle } from 'drizzle-orm/pglite'; import { afterAll, afterEach, beforeAll, expect, test, vi } from 'vitest'; import { reset, seed } from '../../src/index.ts'; import * as schema from './pgSchema.ts'; let client: PGlite; let db: PgliteDatabase; beforeAll(async () => { client = new PGlite(); db = drizzle(client); await db.execute(sql`CREATE SCHEMA "seeder_lib_pg";`); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."customer" ( "id" varchar(256) PRIMARY KEY NOT NULL, "company_name" text NOT NULL, "contact_name" text NOT NULL, "contact_title" text NOT NULL, "address" text NOT NULL, "city" text NOT NULL, "postal_code" text, "region" text, "country" text NOT NULL, "phone" text NOT NULL, "fax" text ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."order_detail" ( "unit_price" numeric NOT NULL, "quantity" integer NOT NULL, "discount" numeric NOT NULL, "order_id" integer NOT NULL, "product_id" integer NOT NULL ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."employee" ( "id" integer PRIMARY KEY NOT NULL, "last_name" text NOT NULL, "first_name" text, "title" text NOT NULL, "title_of_courtesy" text NOT NULL, "birth_date" timestamp NOT NULL, "hire_date" timestamp NOT NULL, "address" text NOT NULL, "city" text NOT NULL, "postal_code" text NOT NULL, "country" text NOT NULL, "home_phone" text NOT NULL, "extension" integer NOT NULL, "notes" text NOT NULL, "reports_to" integer, "photo_path" text ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."order" ( "id" integer PRIMARY KEY NOT NULL, "order_date" timestamp NOT NULL, "required_date" timestamp NOT NULL, "shipped_date" timestamp, "ship_via" integer NOT NULL, "freight" numeric NOT NULL, "ship_name" text NOT NULL, "ship_city" text NOT NULL, "ship_region" text, "ship_postal_code" text, "ship_country" text NOT NULL, "customer_id" text NOT NULL, "employee_id" integer NOT NULL ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."product" ( "id" integer PRIMARY KEY NOT NULL, "name" text NOT NULL, "quantity_per_unit" text NOT NULL, "unit_price" numeric NOT NULL, "units_in_stock" integer NOT NULL, "units_on_order" integer NOT NULL, "reorder_level" integer NOT NULL, "discontinued" integer NOT NULL, "supplier_id" integer NOT NULL ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."supplier" ( "id" integer PRIMARY KEY NOT NULL, "company_name" text NOT NULL, "contact_name" text NOT NULL, "contact_title" text NOT NULL, "address" text NOT NULL, "city" text NOT NULL, "region" text, "postal_code" text NOT NULL, "country" text NOT NULL, "phone" text NOT NULL ); `, ); await db.execute( sql` DO $$ BEGIN ALTER TABLE "seeder_lib_pg"."order_detail" ADD CONSTRAINT "order_detail_order_id_order_id_fk" FOREIGN KEY ("order_id") REFERENCES "seeder_lib_pg"."order"("id") ON DELETE cascade ON UPDATE no action; EXCEPTION WHEN duplicate_object THEN null; END $$; `, ); await db.execute( sql` DO $$ BEGIN ALTER TABLE "seeder_lib_pg"."order_detail" ADD CONSTRAINT "order_detail_product_id_product_id_fk" FOREIGN KEY ("product_id") REFERENCES "seeder_lib_pg"."product"("id") ON DELETE cascade ON UPDATE no action; EXCEPTION WHEN duplicate_object THEN null; END $$; `, ); await db.execute( sql` DO $$ BEGIN ALTER TABLE "seeder_lib_pg"."employee" ADD CONSTRAINT "employee_reports_to_employee_id_fk" FOREIGN KEY ("reports_to") REFERENCES "seeder_lib_pg"."employee"("id") ON DELETE no action ON UPDATE no action; EXCEPTION WHEN duplicate_object THEN null; END $$; `, ); await db.execute( sql` DO $$ BEGIN ALTER TABLE "seeder_lib_pg"."order" ADD CONSTRAINT "order_customer_id_customer_id_fk" FOREIGN KEY ("customer_id") REFERENCES "seeder_lib_pg"."customer"("id") ON DELETE cascade ON UPDATE no action; EXCEPTION WHEN duplicate_object THEN null; END $$; `, ); await db.execute( sql` DO $$ BEGIN ALTER TABLE "seeder_lib_pg"."order" ADD CONSTRAINT "order_employee_id_employee_id_fk" FOREIGN KEY ("employee_id") REFERENCES "seeder_lib_pg"."employee"("id") ON DELETE cascade ON UPDATE no action; EXCEPTION WHEN duplicate_object THEN null; END $$; `, ); await db.execute( sql` DO $$ BEGIN ALTER TABLE "seeder_lib_pg"."product" ADD CONSTRAINT "product_supplier_id_supplier_id_fk" FOREIGN KEY ("supplier_id") REFERENCES "seeder_lib_pg"."supplier"("id") ON DELETE cascade ON UPDATE no action; EXCEPTION WHEN duplicate_object THEN null; END $$; `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."identity_columns_table" ( "id" integer generated always as identity, "id1" integer generated by default as identity, "name" text ); `, ); await db.execute( sql` create table "seeder_lib_pg"."users" ( id serial primary key, name text, "invitedBy" integer constraint "users_invitedBy_user_id_fk" references "seeder_lib_pg"."users" ); `, ); await db.execute( sql` create table "seeder_lib_pg"."posts" ( id serial primary key, name text, content text, "userId" integer constraint "users_userId_user_id_fk" references "seeder_lib_pg"."users" ); `, ); }); afterEach(async () => { await reset(db, schema); }); afterAll(async () => { await client.close(); }); test('basic seed test', async () => { await seed(db, schema); const customers = await db.select().from(schema.customers); const details = await db.select().from(schema.details); const employees = await db.select().from(schema.employees); const orders = await db.select().from(schema.orders); const products = await db.select().from(schema.products); const suppliers = await db.select().from(schema.suppliers); expect(customers.length).toBe(10); expect(details.length).toBe(10); expect(employees.length).toBe(10); expect(orders.length).toBe(10); expect(products.length).toBe(10); expect(suppliers.length).toBe(10); }); test('seed with options.count:11 test', async () => { await seed(db, schema, { count: 11 }); const customers = await db.select().from(schema.customers); const details = await db.select().from(schema.details); const employees = await db.select().from(schema.employees); const orders = await db.select().from(schema.orders); const products = await db.select().from(schema.products); const suppliers = await db.select().from(schema.suppliers); expect(customers.length).toBe(11); expect(details.length).toBe(11); expect(employees.length).toBe(11); expect(orders.length).toBe(11); expect(products.length).toBe(11); expect(suppliers.length).toBe(11); }); test('redefine(refine) customers count', async () => { await seed(db, schema, { count: 11 }).refine(() => ({ customers: { count: 12, }, })); const customers = await db.select().from(schema.customers); const details = await db.select().from(schema.details); const employees = await db.select().from(schema.employees); const orders = await db.select().from(schema.orders); const products = await db.select().from(schema.products); const suppliers = await db.select().from(schema.suppliers); expect(customers.length).toBe(12); expect(details.length).toBe(11); expect(employees.length).toBe(11); expect(orders.length).toBe(11); expect(products.length).toBe(11); expect(suppliers.length).toBe(11); }); test('redefine(refine) all tables count', async () => { await seed(db, schema, { count: 11 }).refine(() => ({ customers: { count: 12, }, details: { count: 13, }, employees: { count: 14, }, orders: { count: 15, }, products: { count: 16, }, suppliers: { count: 17, }, })); const customers = await db.select().from(schema.customers); const details = await db.select().from(schema.details); const employees = await db.select().from(schema.employees); const orders = await db.select().from(schema.orders); const products = await db.select().from(schema.products); const suppliers = await db.select().from(schema.suppliers); expect(customers.length).toBe(12); expect(details.length).toBe(13); expect(employees.length).toBe(14); expect(orders.length).toBe(15); expect(products.length).toBe(16); expect(suppliers.length).toBe(17); }); test("redefine(refine) orders count using 'with' in customers", async () => { await seed(db, schema, { count: 11 }).refine(() => ({ customers: { count: 4, with: { orders: 2, }, }, orders: { count: 13, }, })); const customers = await db.select().from(schema.customers); const details = await db.select().from(schema.details); const employees = await db.select().from(schema.employees); const orders = await db.select().from(schema.orders); const products = await db.select().from(schema.products); const suppliers = await db.select().from(schema.suppliers); expect(customers.length).toBe(4); expect(details.length).toBe(11); expect(employees.length).toBe(11); expect(orders.length).toBe(8); expect(products.length).toBe(11); expect(suppliers.length).toBe(11); }); test("sequential using of 'with'", async () => { const currSchema = { customers: schema.customers, details: schema.details, employees: schema.employees, orders: schema.orders, products: schema.products, suppliers: schema.suppliers, }; await seed(db, currSchema, { count: 11 }).refine(() => ({ customers: { count: 4, with: { orders: 2, }, }, orders: { count: 12, with: { details: 3, }, }, })); const customers = await db.select().from(schema.customers); const details = await db.select().from(schema.details); const employees = await db.select().from(schema.employees); const orders = await db.select().from(schema.orders); const products = await db.select().from(schema.products); const suppliers = await db.select().from(schema.suppliers); expect(customers.length).toBe(4); expect(details.length).toBe(24); expect(employees.length).toBe(11); expect(orders.length).toBe(8); expect(products.length).toBe(11); expect(suppliers.length).toBe(11); }); test('seeding with identity columns', async () => { await seed(db, { identityColumnsTable: schema.identityColumnsTable }); const result = await db.select().from(schema.identityColumnsTable); expect(result.length).toBe(10); }); test('seeding with self relation', async () => { await seed(db, { users: schema.users }); const result = await db.select().from(schema.users); expect(result.length).toBe(10); const predicate = result.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); }); test('overlapping a foreign key constraint with a one-to-many relation', async () => { const postsRelation = relations(schema.posts, ({ one }) => ({ user: one(schema.users, { fields: [schema.posts.userId], references: [schema.users.id] }), })); const consoleMock = vi.spyOn(console, 'warn').mockImplementation(() => {}); await reset(db, { users: schema.users, posts: schema.posts, postsRelation }); await seed(db, { users: schema.users, posts: schema.posts, postsRelation }); // expecting to get a warning expect(consoleMock).toBeCalled(); expect(consoleMock).toBeCalledWith(expect.stringMatching(/^You are providing a one-to-many relation.+/)); const users = await db.select().from(schema.users); const posts = await db.select().from(schema.posts); expect(users.length).toBe(10); let predicate = users.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); expect(posts.length).toBe(10); predicate = posts.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); }); ================================================ FILE: drizzle-seed/tests/pg/pgSchema.ts ================================================ import type { AnyPgColumn } from 'drizzle-orm/pg-core'; import { integer, numeric, pgSchema, serial, text, timestamp, varchar } from 'drizzle-orm/pg-core'; export const schema = pgSchema('seeder_lib_pg'); export const customers = schema.table('customer', { id: varchar('id', { length: 256 }).primaryKey(), companyName: text('company_name').notNull(), contactName: text('contact_name').notNull(), contactTitle: text('contact_title').notNull(), address: text('address').notNull(), city: text('city').notNull(), postalCode: text('postal_code'), region: text('region'), country: text('country').notNull(), phone: text('phone').notNull(), fax: text('fax'), }); export const employees = schema.table( 'employee', { id: integer('id').primaryKey(), lastName: text('last_name').notNull(), firstName: text('first_name'), title: text('title').notNull(), titleOfCourtesy: text('title_of_courtesy').notNull(), birthDate: timestamp('birth_date').notNull(), hireDate: timestamp('hire_date').notNull(), address: text('address').notNull(), city: text('city').notNull(), postalCode: text('postal_code').notNull(), country: text('country').notNull(), homePhone: text('home_phone').notNull(), extension: integer('extension').notNull(), notes: text('notes').notNull(), reportsTo: integer('reports_to').references((): AnyPgColumn => employees.id), photoPath: text('photo_path'), }, ); export const orders = schema.table('order', { id: integer('id').primaryKey(), orderDate: timestamp('order_date').notNull(), requiredDate: timestamp('required_date').notNull(), shippedDate: timestamp('shipped_date'), shipVia: integer('ship_via').notNull(), freight: numeric('freight').notNull(), shipName: text('ship_name').notNull(), shipCity: text('ship_city').notNull(), shipRegion: text('ship_region'), shipPostalCode: text('ship_postal_code'), shipCountry: text('ship_country').notNull(), customerId: text('customer_id') .notNull() .references(() => customers.id, { onDelete: 'cascade' }), employeeId: integer('employee_id') .notNull() .references(() => employees.id, { onDelete: 'cascade' }), }); export const suppliers = schema.table('supplier', { id: integer('id').primaryKey(), companyName: text('company_name').notNull(), contactName: text('contact_name').notNull(), contactTitle: text('contact_title').notNull(), address: text('address').notNull(), city: text('city').notNull(), region: text('region'), postalCode: text('postal_code').notNull(), country: text('country').notNull(), phone: text('phone').notNull(), }); export const products = schema.table('product', { id: integer('id').primaryKey(), name: text('name').notNull(), quantityPerUnit: text('quantity_per_unit').notNull(), unitPrice: numeric('unit_price').notNull(), unitsInStock: integer('units_in_stock').notNull(), unitsOnOrder: integer('units_on_order').notNull(), reorderLevel: integer('reorder_level').notNull(), discontinued: integer('discontinued').notNull(), supplierId: integer('supplier_id') .notNull() .references(() => suppliers.id, { onDelete: 'cascade' }), }); export const details = schema.table('order_detail', { unitPrice: numeric('unit_price').notNull(), quantity: integer('quantity').notNull(), discount: numeric('discount').notNull(), orderId: integer('order_id') .notNull() .references(() => orders.id, { onDelete: 'cascade' }), productId: integer('product_id') .notNull() .references(() => products.id, { onDelete: 'cascade' }), }); export const identityColumnsTable = schema.table('identity_columns_table', { id: integer().generatedAlwaysAsIdentity(), id1: integer().generatedByDefaultAsIdentity(), name: text(), }); export const users = schema.table( 'users', { id: serial().primaryKey(), name: text(), invitedBy: integer().references((): AnyPgColumn => users.id), }, ); export const posts = schema.table( 'posts', { id: serial().primaryKey(), name: text(), content: text(), userId: integer().references(() => users.id), }, ); ================================================ FILE: drizzle-seed/tests/pg/softRelationsTest/pgSchema.ts ================================================ import { relations } from 'drizzle-orm'; import { integer, numeric, pgSchema, text, timestamp, varchar } from 'drizzle-orm/pg-core'; export const schema = pgSchema('seeder_lib_pg'); export const customers = schema.table('customer', { id: varchar('id', { length: 256 }).primaryKey(), companyName: text('company_name').notNull(), contactName: text('contact_name').notNull(), contactTitle: text('contact_title').notNull(), address: text('address').notNull(), city: text('city').notNull(), postalCode: text('postal_code'), region: text('region'), country: text('country').notNull(), phone: text('phone').notNull(), fax: text('fax'), }); export const employees = schema.table( 'employee', { id: integer('id').primaryKey(), lastName: text('last_name').notNull(), firstName: text('first_name'), title: text('title').notNull(), titleOfCourtesy: text('title_of_courtesy').notNull(), birthDate: timestamp('birth_date').notNull(), hireDate: timestamp('hire_date').notNull(), address: text('address').notNull(), city: text('city').notNull(), postalCode: text('postal_code').notNull(), country: text('country').notNull(), homePhone: text('home_phone').notNull(), extension: integer('extension').notNull(), notes: text('notes').notNull(), reportsTo: integer('reports_to'), photoPath: text('photo_path'), }, ); export const employeesRelations = relations(employees, ({ one }) => ({ employee: one(employees, { fields: [employees.reportsTo], references: [employees.id], }), })); export const orders = schema.table('order', { id: integer('id').primaryKey(), orderDate: timestamp('order_date').notNull(), requiredDate: timestamp('required_date').notNull(), shippedDate: timestamp('shipped_date'), shipVia: integer('ship_via').notNull(), freight: numeric('freight').notNull(), shipName: text('ship_name').notNull(), shipCity: text('ship_city').notNull(), shipRegion: text('ship_region'), shipPostalCode: text('ship_postal_code'), shipCountry: text('ship_country').notNull(), customerId: text('customer_id').notNull(), employeeId: integer('employee_id').notNull(), }); export const ordersRelations = relations(orders, ({ one }) => ({ customer: one(customers, { fields: [orders.customerId], references: [customers.id], }), employee: one(employees, { fields: [orders.employeeId], references: [employees.id], }), })); export const suppliers = schema.table('supplier', { id: integer('id').primaryKey(), companyName: text('company_name').notNull(), contactName: text('contact_name').notNull(), contactTitle: text('contact_title').notNull(), address: text('address').notNull(), city: text('city').notNull(), region: text('region'), postalCode: text('postal_code').notNull(), country: text('country').notNull(), phone: text('phone').notNull(), }); export const products = schema.table('product', { id: integer('id').primaryKey(), name: text('name').notNull(), quantityPerUnit: text('quantity_per_unit').notNull(), unitPrice: numeric('unit_price').notNull(), unitsInStock: integer('units_in_stock').notNull(), unitsOnOrder: integer('units_on_order').notNull(), reorderLevel: integer('reorder_level').notNull(), discontinued: integer('discontinued').notNull(), supplierId: integer('supplier_id').notNull(), }); export const productsRelations = relations(products, ({ one }) => ({ supplier: one(suppliers, { fields: [products.supplierId], references: [suppliers.id], }), })); export const details = schema.table('order_detail', { unitPrice: numeric('unit_price').notNull(), quantity: integer('quantity').notNull(), discount: numeric('discount').notNull(), orderId: integer('order_id').notNull(), productId: integer('product_id').notNull(), }); export const detailsRelations = relations(details, ({ one }) => ({ order: one(orders, { fields: [details.orderId], references: [orders.id], }), product: one(products, { fields: [details.productId], references: [products.id], }), })); ================================================ FILE: drizzle-seed/tests/pg/softRelationsTest/softRelations.test.ts ================================================ import { PGlite } from '@electric-sql/pglite'; import { sql } from 'drizzle-orm'; import type { PgliteDatabase } from 'drizzle-orm/pglite'; import { drizzle } from 'drizzle-orm/pglite'; import { afterAll, afterEach, beforeAll, expect, test } from 'vitest'; import { reset, seed } from '../../../src/index.ts'; import * as schema from './pgSchema.ts'; let client: PGlite; let db: PgliteDatabase; beforeAll(async () => { client = new PGlite(); db = drizzle(client); await db.execute(sql`CREATE SCHEMA "seeder_lib_pg";`); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."customer" ( "id" varchar(256) PRIMARY KEY NOT NULL, "company_name" text NOT NULL, "contact_name" text NOT NULL, "contact_title" text NOT NULL, "address" text NOT NULL, "city" text NOT NULL, "postal_code" text, "region" text, "country" text NOT NULL, "phone" text NOT NULL, "fax" text ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."order_detail" ( "unit_price" numeric NOT NULL, "quantity" integer NOT NULL, "discount" numeric NOT NULL, "order_id" integer NOT NULL, "product_id" integer NOT NULL ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."employee" ( "id" integer PRIMARY KEY NOT NULL, "last_name" text NOT NULL, "first_name" text, "title" text NOT NULL, "title_of_courtesy" text NOT NULL, "birth_date" timestamp NOT NULL, "hire_date" timestamp NOT NULL, "address" text NOT NULL, "city" text NOT NULL, "postal_code" text NOT NULL, "country" text NOT NULL, "home_phone" text NOT NULL, "extension" integer NOT NULL, "notes" text NOT NULL, "reports_to" integer, "photo_path" text ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."order" ( "id" integer PRIMARY KEY NOT NULL, "order_date" timestamp NOT NULL, "required_date" timestamp NOT NULL, "shipped_date" timestamp, "ship_via" integer NOT NULL, "freight" numeric NOT NULL, "ship_name" text NOT NULL, "ship_city" text NOT NULL, "ship_region" text, "ship_postal_code" text, "ship_country" text NOT NULL, "customer_id" text NOT NULL, "employee_id" integer NOT NULL ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."product" ( "id" integer PRIMARY KEY NOT NULL, "name" text NOT NULL, "quantity_per_unit" text NOT NULL, "unit_price" numeric NOT NULL, "units_in_stock" integer NOT NULL, "units_on_order" integer NOT NULL, "reorder_level" integer NOT NULL, "discontinued" integer NOT NULL, "supplier_id" integer NOT NULL ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."supplier" ( "id" integer PRIMARY KEY NOT NULL, "company_name" text NOT NULL, "contact_name" text NOT NULL, "contact_title" text NOT NULL, "address" text NOT NULL, "city" text NOT NULL, "region" text, "postal_code" text NOT NULL, "country" text NOT NULL, "phone" text NOT NULL ); `, ); }); afterEach(async () => { await reset(db, schema); }); afterAll(async () => { await client.close(); }); const checkSoftRelations = ( customers: (typeof schema.customers.$inferSelect)[], details: (typeof schema.details.$inferSelect)[], employees: (typeof schema.employees.$inferSelect)[], orders: (typeof schema.orders.$inferSelect)[], products: (typeof schema.products.$inferSelect)[], suppliers: (typeof schema.suppliers.$inferSelect)[], ) => { // employees soft relations check const employeeIds = new Set(employees.map((employee) => employee.id)); const employeesPredicate = employees.every((employee) => employee.reportsTo !== null && employeeIds.has(employee.reportsTo) ); expect(employeesPredicate).toBe(true); // orders soft relations check const customerIds = new Set(customers.map((customer) => customer.id)); const ordersPredicate1 = orders.every((order) => order.customerId !== null && customerIds.has(order.customerId)); expect(ordersPredicate1).toBe(true); const ordersPredicate2 = orders.every((order) => order.employeeId !== null && employeeIds.has(order.employeeId)); expect(ordersPredicate2).toBe(true); // product soft relations check const supplierIds = new Set(suppliers.map((supplier) => supplier.id)); const productsPredicate = products.every((product) => product.supplierId !== null && supplierIds.has(product.supplierId) ); expect(productsPredicate).toBe(true); // details soft relations check const orderIds = new Set(orders.map((order) => order.id)); const detailsPredicate1 = details.every((detail) => detail.orderId !== null && orderIds.has(detail.orderId)); expect(detailsPredicate1).toBe(true); const productIds = new Set(products.map((product) => product.id)); const detailsPredicate2 = details.every((detail) => detail.productId !== null && productIds.has(detail.productId)); expect(detailsPredicate2).toBe(true); }; test('basic seed, soft relations test', async () => { await seed(db, schema); const customers = await db.select().from(schema.customers); const details = await db.select().from(schema.details); const employees = await db.select().from(schema.employees); const orders = await db.select().from(schema.orders); const products = await db.select().from(schema.products); const suppliers = await db.select().from(schema.suppliers); expect(customers.length).toBe(10); expect(details.length).toBe(10); expect(employees.length).toBe(10); expect(orders.length).toBe(10); expect(products.length).toBe(10); expect(suppliers.length).toBe(10); checkSoftRelations(customers, details, employees, orders, products, suppliers); }); test("redefine(refine) orders count using 'with' in customers, soft relations test", async () => { await seed(db, schema, { count: 11 }).refine(() => ({ customers: { count: 4, with: { orders: 2, }, }, orders: { count: 13, }, })); const customers = await db.select().from(schema.customers); const details = await db.select().from(schema.details); const employees = await db.select().from(schema.employees); const orders = await db.select().from(schema.orders); const products = await db.select().from(schema.products); const suppliers = await db.select().from(schema.suppliers); expect(customers.length).toBe(4); expect(details.length).toBe(11); expect(employees.length).toBe(11); expect(orders.length).toBe(8); expect(products.length).toBe(11); expect(suppliers.length).toBe(11); checkSoftRelations(customers, details, employees, orders, products, suppliers); }); test("sequential using of 'with', soft relations test", async () => { await seed(db, schema, { count: 11 }).refine(() => ({ customers: { count: 4, with: { orders: 2, }, }, orders: { count: 12, with: { details: 3, }, }, })); const customers = await db.select().from(schema.customers); const details = await db.select().from(schema.details); const employees = await db.select().from(schema.employees); const orders = await db.select().from(schema.orders); const products = await db.select().from(schema.products); const suppliers = await db.select().from(schema.suppliers); expect(customers.length).toBe(4); expect(details.length).toBe(24); expect(employees.length).toBe(11); expect(orders.length).toBe(8); expect(products.length).toBe(11); expect(suppliers.length).toBe(11); checkSoftRelations(customers, details, employees, orders, products, suppliers); }); ================================================ FILE: drizzle-seed/tests/sqlite/allDataTypesTest/sqliteSchema.ts ================================================ import { blob, integer, numeric, real, sqliteTable, text } from 'drizzle-orm/sqlite-core'; export const allDataTypes = sqliteTable('all_data_types', { integerNumber: integer('integer_number', { mode: 'number' }), integerBoolean: integer('integer_boolean', { mode: 'boolean' }), integerTimestamp: integer('integer_timestamp', { mode: 'timestamp' }), integerTimestampms: integer('integer_timestampms', { mode: 'timestamp_ms' }), real: real('real'), text: text('text', { mode: 'text' }), textJson: text('text_json', { mode: 'json' }), blobBigint: blob('blob_bigint', { mode: 'bigint' }), blobBuffer: blob('blob_buffer', { mode: 'buffer' }), blobJson: blob('blob_json', { mode: 'json' }), numeric: numeric('numeric'), }); ================================================ FILE: drizzle-seed/tests/sqlite/allDataTypesTest/sqlite_all_data_types.test.ts ================================================ import BetterSqlite3 from 'better-sqlite3'; import { sql } from 'drizzle-orm'; import type { BetterSQLite3Database } from 'drizzle-orm/better-sqlite3'; import { drizzle } from 'drizzle-orm/better-sqlite3'; import { afterAll, beforeAll, expect, test } from 'vitest'; import { seed } from '../../../src/index.ts'; import * as schema from './sqliteSchema.ts'; let client: BetterSqlite3.Database; let db: BetterSQLite3Database; beforeAll(async () => { client = new BetterSqlite3(':memory:'); db = drizzle(client); db.run( sql.raw(` CREATE TABLE \`all_data_types\` ( \`integer_number\` integer, \`integer_boolean\` integer, \`integer_timestamp\` integer, \`integer_timestampms\` integer, \`real\` real, \`text\` text, \`text_json\` text, \`blob_bigint\` blob, \`blob_buffer\` blob, \`blob_json\` blob, \`numeric\` numeric ); `), ); }); afterAll(async () => { client.close(); }); test('basic seed test', async () => { // migrate(db, { migrationsFolder: path.join(__dirname, "sqliteMigrations") }); await seed(db, schema, { count: 10000 }); const allDataTypes = await db.select().from(schema.allDataTypes); // every value in each 10 rows does not equal undefined. const predicate = allDataTypes.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); client.close(); }); ================================================ FILE: drizzle-seed/tests/sqlite/cyclicTables/cyclicTables.test.ts ================================================ import BetterSqlite3 from 'better-sqlite3'; import { sql } from 'drizzle-orm'; import type { BetterSQLite3Database } from 'drizzle-orm/better-sqlite3'; import { drizzle } from 'drizzle-orm/better-sqlite3'; import { afterAll, afterEach, beforeAll, expect, test } from 'vitest'; import { reset, seed } from '../../../src/index.ts'; import * as schema from './sqliteSchema.ts'; let client: BetterSqlite3.Database; let db: BetterSQLite3Database; beforeAll(async () => { client = new BetterSqlite3(':memory:'); db = drizzle(client); db.run( sql` create table model ( id integer not null primary key, name text not null, defaultImageId integer, foreign key (defaultImageId) references model_image ); `, ); db.run( sql` create table model_image ( id integer not null primary key, url text not null, caption text, modelId integer not null references model ); `, ); // 3 tables case db.run( sql` create table model1 ( id integer not null primary key, name text not null, userId integer, defaultImageId integer, foreign key (defaultImageId) references model_image1, foreign key (userId) references user ); `, ); db.run( sql` create table model_image1 ( id integer not null primary key, url text not null, caption text, modelId integer not null references model1 ); `, ); db.run( sql` create table user ( id integer not null primary key, name text, invitedBy integer references user, imageId integer not null references model_image1 ); `, ); }); afterEach(async () => { await reset(db, schema); }); afterAll(async () => { client.close(); }); test('2 cyclic tables test', async () => { await seed(db, { modelTable: schema.modelTable, modelImageTable: schema.modelImageTable, }); const modelTable = await db.select().from(schema.modelTable); const modelImageTable = await db.select().from(schema.modelImageTable); expect(modelTable.length).toBe(10); let predicate = modelTable.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); expect(modelImageTable.length).toBe(10); predicate = modelImageTable.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); }); test('3 cyclic tables test', async () => { await seed(db, { modelTable1: schema.modelTable1, modelImageTable1: schema.modelImageTable1, user: schema.user, }); const modelTable1 = await db.select().from(schema.modelTable1); const modelImageTable1 = await db.select().from(schema.modelImageTable1); const user = await db.select().from(schema.user); expect(modelTable1.length).toBe(10); let predicate = modelTable1.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); expect(modelImageTable1.length).toBe(10); predicate = modelImageTable1.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); expect(user.length).toBe(10); predicate = user.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); }); ================================================ FILE: drizzle-seed/tests/sqlite/cyclicTables/sqliteSchema.ts ================================================ import { relations } from 'drizzle-orm'; import { integer, sqliteTable, text } from 'drizzle-orm/sqlite-core'; import type { AnySQLiteColumn } from 'drizzle-orm/sqlite-core'; // MODEL export const modelTable = sqliteTable( 'model', { id: integer().primaryKey(), name: text().notNull(), defaultImageId: integer().references(() => modelImageTable.id), }, ); export const modelRelations = relations(modelTable, ({ one, many }) => ({ images: many(modelImageTable), defaultImage: one(modelImageTable, { fields: [modelTable.defaultImageId], references: [modelImageTable.id], }), })); // MODEL IMAGE export const modelImageTable = sqliteTable( 'model_image', { id: integer().primaryKey(), url: text().notNull(), caption: text(), modelId: integer() .notNull() .references((): AnySQLiteColumn => modelTable.id), }, ); export const modelImageRelations = relations(modelImageTable, ({ one }) => ({ model: one(modelTable, { fields: [modelImageTable.modelId], references: [modelTable.id], }), })); // 3 tables case export const modelTable1 = sqliteTable( 'model1', { id: integer().primaryKey(), name: text().notNull(), userId: integer() .references(() => user.id), defaultImageId: integer().references(() => modelImageTable1.id), }, ); export const modelImageTable1 = sqliteTable( 'model_image1', { id: integer().primaryKey(), url: text().notNull(), caption: text(), modelId: integer().notNull() .references((): AnySQLiteColumn => modelTable1.id), }, ); export const user = sqliteTable( 'user', { id: integer().primaryKey(), name: text(), invitedBy: integer().references((): AnySQLiteColumn => user.id), imageId: integer() .notNull() .references((): AnySQLiteColumn => modelImageTable1.id), }, ); ================================================ FILE: drizzle-seed/tests/sqlite/softRelationsTest/softRelations.test.ts ================================================ import BetterSqlite3 from 'better-sqlite3'; import { sql } from 'drizzle-orm'; import type { BetterSQLite3Database } from 'drizzle-orm/better-sqlite3'; import { drizzle } from 'drizzle-orm/better-sqlite3'; import { afterAll, afterEach, beforeAll, expect, test } from 'vitest'; import { reset, seed } from '../../../src/index.ts'; import * as schema from './sqliteSchema.ts'; let client: BetterSqlite3.Database; let db: BetterSQLite3Database; beforeAll(async () => { client = new BetterSqlite3(':memory:'); db = drizzle(client); db.run( sql.raw(` CREATE TABLE \`customer\` ( \`id\` text PRIMARY KEY NOT NULL, \`company_name\` text NOT NULL, \`contact_name\` text NOT NULL, \`contact_title\` text NOT NULL, \`address\` text NOT NULL, \`city\` text NOT NULL, \`postal_code\` text, \`region\` text, \`country\` text NOT NULL, \`phone\` text NOT NULL, \`fax\` text ); `), ); db.run( sql.raw(` CREATE TABLE \`order_detail\` ( \`unit_price\` numeric NOT NULL, \`quantity\` integer NOT NULL, \`discount\` numeric NOT NULL, \`order_id\` integer NOT NULL, \`product_id\` integer NOT NULL ); `), ); db.run( sql.raw(` CREATE TABLE \`employee\` ( \`id\` integer PRIMARY KEY NOT NULL, \`last_name\` text NOT NULL, \`first_name\` text, \`title\` text NOT NULL, \`title_of_courtesy\` text NOT NULL, \`birth_date\` integer NOT NULL, \`hire_date\` integer NOT NULL, \`address\` text NOT NULL, \`city\` text NOT NULL, \`postal_code\` text NOT NULL, \`country\` text NOT NULL, \`home_phone\` text NOT NULL, \`extension\` integer NOT NULL, \`notes\` text NOT NULL, \`reports_to\` integer, \`photo_path\` text ); `), ); db.run( sql.raw(` CREATE TABLE \`order\` ( \`id\` integer PRIMARY KEY NOT NULL, \`order_date\` integer NOT NULL, \`required_date\` integer NOT NULL, \`shipped_date\` integer, \`ship_via\` integer NOT NULL, \`freight\` numeric NOT NULL, \`ship_name\` text NOT NULL, \`ship_city\` text NOT NULL, \`ship_region\` text, \`ship_postal_code\` text, \`ship_country\` text NOT NULL, \`customer_id\` text NOT NULL, \`employee_id\` integer NOT NULL ); `), ); db.run( sql.raw(` CREATE TABLE \`product\` ( \`id\` integer PRIMARY KEY AUTOINCREMENT NOT NULL, \`name\` text NOT NULL, \`quantity_per_unit\` text NOT NULL, \`unit_price\` numeric NOT NULL, \`units_in_stock\` integer NOT NULL, \`units_on_order\` integer NOT NULL, \`reorder_level\` integer NOT NULL, \`discontinued\` integer NOT NULL, \`supplier_id\` integer NOT NULL ); `), ); db.run( sql.raw(` CREATE TABLE \`supplier\` ( \`id\` integer PRIMARY KEY AUTOINCREMENT NOT NULL, \`company_name\` text NOT NULL, \`contact_name\` text NOT NULL, \`contact_title\` text NOT NULL, \`address\` text NOT NULL, \`city\` text NOT NULL, \`region\` text, \`postal_code\` text NOT NULL, \`country\` text NOT NULL, \`phone\` text NOT NULL ); `), ); }); afterAll(async () => { client.close(); }); afterEach(async () => { await reset(db, schema); }); const checkSoftRelations = ( customers: (typeof schema.customers.$inferSelect)[], details: (typeof schema.details.$inferSelect)[], employees: (typeof schema.employees.$inferSelect)[], orders: (typeof schema.orders.$inferSelect)[], products: (typeof schema.products.$inferSelect)[], suppliers: (typeof schema.suppliers.$inferSelect)[], ) => { // employees soft relations check const employeeIds = new Set(employees.map((employee) => employee.id)); const employeesPredicate = employees.every((employee) => employee.reportsTo !== null && employeeIds.has(employee.reportsTo) ); expect(employeesPredicate).toBe(true); // orders soft relations check const customerIds = new Set(customers.map((customer) => customer.id)); const ordersPredicate1 = orders.every((order) => order.customerId !== null && customerIds.has(order.customerId)); expect(ordersPredicate1).toBe(true); const ordersPredicate2 = orders.every((order) => order.employeeId !== null && employeeIds.has(order.employeeId)); expect(ordersPredicate2).toBe(true); // product soft relations check const supplierIds = new Set(suppliers.map((supplier) => supplier.id)); const productsPredicate = products.every((product) => product.supplierId !== null && supplierIds.has(product.supplierId) ); expect(productsPredicate).toBe(true); // details soft relations check const orderIds = new Set(orders.map((order) => order.id)); const detailsPredicate1 = details.every((detail) => detail.orderId !== null && orderIds.has(detail.orderId)); expect(detailsPredicate1).toBe(true); const productIds = new Set(products.map((product) => product.id)); const detailsPredicate2 = details.every((detail) => detail.productId !== null && productIds.has(detail.productId)); expect(detailsPredicate2).toBe(true); }; test('basic seed, soft relations test', async () => { await seed(db, schema); const customers = await db.select().from(schema.customers); const details = await db.select().from(schema.details); const employees = await db.select().from(schema.employees); const orders = await db.select().from(schema.orders); const products = await db.select().from(schema.products); const suppliers = await db.select().from(schema.suppliers); expect(customers.length).toBe(10); expect(details.length).toBe(10); expect(employees.length).toBe(10); expect(orders.length).toBe(10); expect(products.length).toBe(10); expect(suppliers.length).toBe(10); checkSoftRelations(customers, details, employees, orders, products, suppliers); }); test("redefine(refine) orders count using 'with' in customers, soft relations test", async () => { await seed(db, schema, { count: 11 }).refine(() => ({ customers: { count: 4, with: { orders: 2, }, }, orders: { count: 13, }, })); const customers = await db.select().from(schema.customers); const details = await db.select().from(schema.details); const employees = await db.select().from(schema.employees); const orders = await db.select().from(schema.orders); const products = await db.select().from(schema.products); const suppliers = await db.select().from(schema.suppliers); expect(customers.length).toBe(4); expect(details.length).toBe(11); expect(employees.length).toBe(11); expect(orders.length).toBe(8); expect(products.length).toBe(11); expect(suppliers.length).toBe(11); checkSoftRelations(customers, details, employees, orders, products, suppliers); }); test("sequential using of 'with', soft relations test", async () => { await seed(db, schema, { count: 11 }).refine(() => ({ customers: { count: 4, with: { orders: 2, }, }, orders: { count: 12, with: { details: 3, }, }, })); const customers = await db.select().from(schema.customers); const details = await db.select().from(schema.details); const employees = await db.select().from(schema.employees); const orders = await db.select().from(schema.orders); const products = await db.select().from(schema.products); const suppliers = await db.select().from(schema.suppliers); expect(customers.length).toBe(4); expect(details.length).toBe(24); expect(employees.length).toBe(11); expect(orders.length).toBe(8); expect(products.length).toBe(11); expect(suppliers.length).toBe(11); checkSoftRelations(customers, details, employees, orders, products, suppliers); }); ================================================ FILE: drizzle-seed/tests/sqlite/softRelationsTest/sqliteSchema.ts ================================================ import { relations } from 'drizzle-orm'; import { integer, numeric, sqliteTable, text } from 'drizzle-orm/sqlite-core'; export const customers = sqliteTable('customer', { id: text('id').primaryKey(), companyName: text('company_name').notNull(), contactName: text('contact_name').notNull(), contactTitle: text('contact_title').notNull(), address: text('address').notNull(), city: text('city').notNull(), postalCode: text('postal_code'), region: text('region'), country: text('country').notNull(), phone: text('phone').notNull(), fax: text('fax'), }); export const employees = sqliteTable( 'employee', { id: integer('id').primaryKey(), lastName: text('last_name').notNull(), firstName: text('first_name'), title: text('title').notNull(), titleOfCourtesy: text('title_of_courtesy').notNull(), birthDate: integer('birth_date', { mode: 'timestamp' }).notNull(), hireDate: integer('hire_date', { mode: 'timestamp' }).notNull(), address: text('address').notNull(), city: text('city').notNull(), postalCode: text('postal_code').notNull(), country: text('country').notNull(), homePhone: text('home_phone').notNull(), extension: integer('extension').notNull(), notes: text('notes').notNull(), reportsTo: integer('reports_to'), photoPath: text('photo_path'), }, ); export const employeesRelations = relations(employees, ({ one }) => ({ employee: one(employees, { fields: [employees.reportsTo], references: [employees.id], }), })); export const orders = sqliteTable('order', { id: integer('id').primaryKey(), orderDate: integer('order_date', { mode: 'timestamp' }).notNull(), requiredDate: integer('required_date', { mode: 'timestamp' }).notNull(), shippedDate: integer('shipped_date', { mode: 'timestamp' }), shipVia: integer('ship_via').notNull(), freight: numeric('freight').notNull(), shipName: text('ship_name').notNull(), shipCity: text('ship_city').notNull(), shipRegion: text('ship_region'), shipPostalCode: text('ship_postal_code'), shipCountry: text('ship_country').notNull(), customerId: text('customer_id').notNull(), employeeId: integer('employee_id').notNull(), }); export const ordersRelations = relations(orders, ({ one }) => ({ customer: one(customers, { fields: [orders.customerId], references: [customers.id], }), employee: one(employees, { fields: [orders.employeeId], references: [employees.id], }), })); export const suppliers = sqliteTable('supplier', { id: integer('id').primaryKey({ autoIncrement: true }), companyName: text('company_name').notNull(), contactName: text('contact_name').notNull(), contactTitle: text('contact_title').notNull(), address: text('address').notNull(), city: text('city').notNull(), region: text('region'), postalCode: text('postal_code').notNull(), country: text('country').notNull(), phone: text('phone').notNull(), }); export const products = sqliteTable('product', { id: integer('id').primaryKey({ autoIncrement: true }), name: text('name').notNull(), quantityPerUnit: text('quantity_per_unit').notNull(), unitPrice: numeric('unit_price').notNull(), unitsInStock: integer('units_in_stock').notNull(), unitsOnOrder: integer('units_on_order').notNull(), reorderLevel: integer('reorder_level').notNull(), discontinued: integer('discontinued').notNull(), supplierId: integer('supplier_id').notNull(), }); export const productsRelations = relations(products, ({ one }) => ({ supplier: one(suppliers, { fields: [products.supplierId], references: [suppliers.id], }), })); export const details = sqliteTable('order_detail', { unitPrice: numeric('unit_price').notNull(), quantity: integer('quantity').notNull(), discount: numeric('discount').notNull(), orderId: integer('order_id').notNull(), productId: integer('product_id').notNull(), }); export const detailsRelations = relations(details, ({ one }) => ({ order: one(orders, { fields: [details.orderId], references: [orders.id], }), product: one(products, { fields: [details.productId], references: [products.id], }), })); ================================================ FILE: drizzle-seed/tests/sqlite/sqlite.test.ts ================================================ import BetterSqlite3 from 'better-sqlite3'; import { relations, sql } from 'drizzle-orm'; import type { BetterSQLite3Database } from 'drizzle-orm/better-sqlite3'; import { drizzle } from 'drizzle-orm/better-sqlite3'; import { afterAll, afterEach, beforeAll, expect, test, vi } from 'vitest'; import { reset, seed } from '../../src/index.ts'; import * as schema from './sqliteSchema.ts'; let client: BetterSqlite3.Database; let db: BetterSQLite3Database; beforeAll(async () => { client = new BetterSqlite3(':memory:'); db = drizzle(client); db.run( sql.raw(` CREATE TABLE \`customer\` ( \`id\` text PRIMARY KEY NOT NULL, \`company_name\` text NOT NULL, \`contact_name\` text NOT NULL, \`contact_title\` text NOT NULL, \`address\` text NOT NULL, \`city\` text NOT NULL, \`postal_code\` text, \`region\` text, \`country\` text NOT NULL, \`phone\` text NOT NULL, \`fax\` text ); `), ); db.run( sql.raw(` CREATE TABLE \`order_detail\` ( \`unit_price\` numeric NOT NULL, \`quantity\` integer NOT NULL, \`discount\` numeric NOT NULL, \`order_id\` integer NOT NULL, \`product_id\` integer NOT NULL, FOREIGN KEY (\`order_id\`) REFERENCES \`order\`(\`id\`) ON UPDATE no action ON DELETE cascade, FOREIGN KEY (\`product_id\`) REFERENCES \`product\`(\`id\`) ON UPDATE no action ON DELETE cascade ); `), ); db.run( sql.raw(` CREATE TABLE \`employee\` ( \`id\` integer PRIMARY KEY NOT NULL, \`last_name\` text NOT NULL, \`first_name\` text, \`title\` text NOT NULL, \`title_of_courtesy\` text NOT NULL, \`birth_date\` integer NOT NULL, \`hire_date\` integer NOT NULL, \`address\` text NOT NULL, \`city\` text NOT NULL, \`postal_code\` text NOT NULL, \`country\` text NOT NULL, \`home_phone\` text NOT NULL, \`extension\` integer NOT NULL, \`notes\` text NOT NULL, \`reports_to\` integer, \`photo_path\` text, FOREIGN KEY (\`reports_to\`) REFERENCES \`employee\`(\`id\`) ON UPDATE no action ON DELETE no action ); `), ); db.run( sql.raw(` CREATE TABLE \`order\` ( \`id\` integer PRIMARY KEY NOT NULL, \`order_date\` integer NOT NULL, \`required_date\` integer NOT NULL, \`shipped_date\` integer, \`ship_via\` integer NOT NULL, \`freight\` numeric NOT NULL, \`ship_name\` text NOT NULL, \`ship_city\` text NOT NULL, \`ship_region\` text, \`ship_postal_code\` text, \`ship_country\` text NOT NULL, \`customer_id\` text NOT NULL, \`employee_id\` integer NOT NULL, FOREIGN KEY (\`customer_id\`) REFERENCES \`customer\`(\`id\`) ON UPDATE no action ON DELETE cascade, FOREIGN KEY (\`employee_id\`) REFERENCES \`employee\`(\`id\`) ON UPDATE no action ON DELETE cascade ); `), ); db.run( sql.raw(` CREATE TABLE \`product\` ( \`id\` integer PRIMARY KEY AUTOINCREMENT NOT NULL, \`name\` text NOT NULL, \`quantity_per_unit\` text NOT NULL, \`unit_price\` numeric NOT NULL, \`units_in_stock\` integer NOT NULL, \`units_on_order\` integer NOT NULL, \`reorder_level\` integer NOT NULL, \`discontinued\` integer NOT NULL, \`supplier_id\` integer NOT NULL, FOREIGN KEY (\`supplier_id\`) REFERENCES \`supplier\`(\`id\`) ON UPDATE no action ON DELETE cascade ); `), ); db.run( sql.raw(` CREATE TABLE \`supplier\` ( \`id\` integer PRIMARY KEY AUTOINCREMENT NOT NULL, \`company_name\` text NOT NULL, \`contact_name\` text NOT NULL, \`contact_title\` text NOT NULL, \`address\` text NOT NULL, \`city\` text NOT NULL, \`region\` text, \`postal_code\` text NOT NULL, \`country\` text NOT NULL, \`phone\` text NOT NULL ); `), ); db.run( sql.raw(` CREATE TABLE \`users\` ( \`id\` integer PRIMARY KEY, \`name\` text, \`invitedBy\` integer, FOREIGN KEY (\`invitedBy\`) REFERENCES \`users\`(\`id\`) ON UPDATE no action ON DELETE cascade ); `), ); db.run( sql.raw(` CREATE TABLE \`posts\` ( \`id\` integer PRIMARY KEY, \`name\` text, \`content\` text, \`userId\` integer, FOREIGN KEY (\`userId\`) REFERENCES \`users\`(\`id\`) ON UPDATE no action ON DELETE cascade ); `), ); }); afterAll(async () => { client.close(); }); afterEach(async () => { await reset(db, schema); }); test('basic seed test', async () => { await seed(db, schema); const customers = await db.select().from(schema.customers); const details = await db.select().from(schema.details); const employees = await db.select().from(schema.employees); const orders = await db.select().from(schema.orders); const products = await db.select().from(schema.products); const suppliers = await db.select().from(schema.suppliers); expect(customers.length).toBe(10); expect(details.length).toBe(10); expect(employees.length).toBe(10); expect(orders.length).toBe(10); expect(products.length).toBe(10); expect(suppliers.length).toBe(10); }); test('seed with options.count:11 test', async () => { await seed(db, schema, { count: 11 }); const customers = await db.select().from(schema.customers); const details = await db.select().from(schema.details); const employees = await db.select().from(schema.employees); const orders = await db.select().from(schema.orders); const products = await db.select().from(schema.products); const suppliers = await db.select().from(schema.suppliers); expect(customers.length).toBe(11); expect(details.length).toBe(11); expect(employees.length).toBe(11); expect(orders.length).toBe(11); expect(products.length).toBe(11); expect(suppliers.length).toBe(11); }); test('redefine(refine) customers count', async () => { await seed(db, schema, { count: 11 }).refine(() => ({ customers: { count: 12, }, })); const customers = await db.select().from(schema.customers); const details = await db.select().from(schema.details); const employees = await db.select().from(schema.employees); const orders = await db.select().from(schema.orders); const products = await db.select().from(schema.products); const suppliers = await db.select().from(schema.suppliers); expect(customers.length).toBe(12); expect(details.length).toBe(11); expect(employees.length).toBe(11); expect(orders.length).toBe(11); expect(products.length).toBe(11); expect(suppliers.length).toBe(11); }); test('redefine(refine) all tables count', async () => { await seed(db, schema, { count: 11 }).refine(() => ({ customers: { count: 12, }, details: { count: 13, }, employees: { count: 14, }, orders: { count: 15, }, products: { count: 16, }, suppliers: { count: 17, }, })); const customers = await db.select().from(schema.customers); const details = await db.select().from(schema.details); const employees = await db.select().from(schema.employees); const orders = await db.select().from(schema.orders); const products = await db.select().from(schema.products); const suppliers = await db.select().from(schema.suppliers); expect(customers.length).toBe(12); expect(details.length).toBe(13); expect(employees.length).toBe(14); expect(orders.length).toBe(15); expect(products.length).toBe(16); expect(suppliers.length).toBe(17); }); test("redefine(refine) orders count using 'with' in customers", async () => { await seed(db, schema, { count: 11 }).refine(() => ({ customers: { count: 4, with: { orders: 2, }, }, orders: { count: 13, }, })); const customers = await db.select().from(schema.customers); const details = await db.select().from(schema.details); const employees = await db.select().from(schema.employees); const orders = await db.select().from(schema.orders); const products = await db.select().from(schema.products); const suppliers = await db.select().from(schema.suppliers); expect(customers.length).toBe(4); expect(details.length).toBe(11); expect(employees.length).toBe(11); expect(orders.length).toBe(8); expect(products.length).toBe(11); expect(suppliers.length).toBe(11); }); test("sequential using of 'with'", async () => { await seed(db, schema, { count: 11 }).refine(() => ({ customers: { count: 4, with: { orders: 2, }, }, orders: { count: 12, with: { details: 3, }, }, })); const customers = await db.select().from(schema.customers); const details = await db.select().from(schema.details); const employees = await db.select().from(schema.employees); const orders = await db.select().from(schema.orders); const products = await db.select().from(schema.products); const suppliers = await db.select().from(schema.suppliers); expect(customers.length).toBe(4); expect(details.length).toBe(24); expect(employees.length).toBe(11); expect(orders.length).toBe(8); expect(products.length).toBe(11); expect(suppliers.length).toBe(11); }); test('overlapping a foreign key constraint with a one-to-many relation', async () => { const postsRelation = relations(schema.posts, ({ one }) => ({ user: one(schema.users, { fields: [schema.posts.userId], references: [schema.users.id] }), })); const consoleMock = vi.spyOn(console, 'warn').mockImplementation(() => {}); await reset(db, { users: schema.users, posts: schema.posts, postsRelation }); await seed(db, { users: schema.users, posts: schema.posts, postsRelation }); // expecting to get a warning expect(consoleMock).toBeCalled(); expect(consoleMock).toBeCalledWith(expect.stringMatching(/^You are providing a one-to-many relation.+/)); const users = await db.select().from(schema.users); const posts = await db.select().from(schema.posts); expect(users.length).toBe(10); let predicate = users.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); expect(posts.length).toBe(10); predicate = posts.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); }); ================================================ FILE: drizzle-seed/tests/sqlite/sqliteSchema.ts ================================================ import { foreignKey, integer, numeric, sqliteTable, text } from 'drizzle-orm/sqlite-core'; export const customers = sqliteTable('customer', { id: text('id').primaryKey(), companyName: text('company_name').notNull(), contactName: text('contact_name').notNull(), contactTitle: text('contact_title').notNull(), address: text('address').notNull(), city: text('city').notNull(), postalCode: text('postal_code'), region: text('region'), country: text('country').notNull(), phone: text('phone').notNull(), fax: text('fax'), }); export const employees = sqliteTable( 'employee', { id: integer('id').primaryKey(), lastName: text('last_name').notNull(), firstName: text('first_name'), title: text('title').notNull(), titleOfCourtesy: text('title_of_courtesy').notNull(), birthDate: integer('birth_date', { mode: 'timestamp' }).notNull(), hireDate: integer('hire_date', { mode: 'timestamp' }).notNull(), address: text('address').notNull(), city: text('city').notNull(), postalCode: text('postal_code').notNull(), country: text('country').notNull(), homePhone: text('home_phone').notNull(), extension: integer('extension').notNull(), notes: text('notes').notNull(), reportsTo: integer('reports_to'), photoPath: text('photo_path'), }, (table) => ({ reportsToFk: foreignKey(() => ({ columns: [table.reportsTo], foreignColumns: [table.id], })), }), ); export const orders = sqliteTable('order', { id: integer('id').primaryKey(), orderDate: integer('order_date', { mode: 'timestamp' }).notNull(), requiredDate: integer('required_date', { mode: 'timestamp' }).notNull(), shippedDate: integer('shipped_date', { mode: 'timestamp' }), shipVia: integer('ship_via').notNull(), freight: numeric('freight').notNull(), shipName: text('ship_name').notNull(), shipCity: text('ship_city').notNull(), shipRegion: text('ship_region'), shipPostalCode: text('ship_postal_code'), shipCountry: text('ship_country').notNull(), customerId: text('customer_id') .notNull() .references(() => customers.id, { onDelete: 'cascade' }), employeeId: integer('employee_id') .notNull() .references(() => employees.id, { onDelete: 'cascade' }), }); export const suppliers = sqliteTable('supplier', { id: integer('id').primaryKey({ autoIncrement: true }), companyName: text('company_name').notNull(), contactName: text('contact_name').notNull(), contactTitle: text('contact_title').notNull(), address: text('address').notNull(), city: text('city').notNull(), region: text('region'), postalCode: text('postal_code').notNull(), country: text('country').notNull(), phone: text('phone').notNull(), }); export const products = sqliteTable('product', { id: integer('id').primaryKey({ autoIncrement: true }), name: text('name').notNull(), quantityPerUnit: text('quantity_per_unit').notNull(), unitPrice: numeric('unit_price').notNull(), unitsInStock: integer('units_in_stock').notNull(), unitsOnOrder: integer('units_on_order').notNull(), reorderLevel: integer('reorder_level').notNull(), discontinued: integer('discontinued').notNull(), supplierId: integer('supplier_id') .notNull() .references(() => suppliers.id, { onDelete: 'cascade' }), }); export const details = sqliteTable('order_detail', { unitPrice: numeric('unit_price').notNull(), quantity: integer('quantity').notNull(), discount: numeric('discount').notNull(), orderId: integer('order_id') .notNull() .references(() => orders.id, { onDelete: 'cascade' }), productId: integer('product_id') .notNull() .references(() => products.id, { onDelete: 'cascade' }), }); export const users = sqliteTable( 'users', { id: integer().primaryKey(), name: text(), invitedBy: integer(), }, (table) => ({ reportsToFk: foreignKey(() => ({ columns: [table.invitedBy], foreignColumns: [table.id], })), }), ); export const posts = sqliteTable( 'posts', { id: integer().primaryKey(), name: text(), content: text(), userId: integer().references(() => users.id), }, ); ================================================ FILE: drizzle-seed/tsconfig.build.json ================================================ { "extends": "./tsconfig.json", "compilerOptions": { "rootDir": "src" }, "include": ["src"] } ================================================ FILE: drizzle-seed/tsconfig.json ================================================ { "compilerOptions": { "isolatedModules": true, "composite": false, "target": "esnext", "module": "esnext", "moduleResolution": "bundler", "lib": ["es2020", "es2018", "es2017", "es7", "es6", "es5", "es2022"], "declarationMap": false, "sourceMap": true, "allowJs": true, "incremental": false, "stripInternal": true, /* Disable emitting declarations that have '@internal' in their JSDoc comments. */ "allowSyntheticDefaultImports": true, /* Allow 'import x from y' when a module doesn't have a default export. */ "esModuleInterop": true, /* Emit additional JavaScript to ease support for importing CommonJS modules. This enables 'allowSyntheticDefaultImports' for type compatibility. */ "forceConsistentCasingInFileNames": true, /* Ensure that casing is correct in imports. */ "strict": true, /* Enable all strict type-checking options. */ "noImplicitAny": true, /* Enable error reporting for expressions and declarations with an implied 'any' type. */ "strictNullChecks": true, /* When type checking, take into account 'null' and 'undefined'. */ "strictFunctionTypes": true, /* When assigning functions, check to ensure parameters and the return values are subtype-compatible. */ "strictBindCallApply": true, /* Check that the arguments for 'bind', 'call', and 'apply' methods match the original function. */ "strictPropertyInitialization": true, /* Check for class properties that are declared but not set in the constructor. */ "noImplicitThis": true, /* Enable error reporting when 'this' is given the type 'any'. */ "useUnknownInCatchVariables": true, /* Default catch clause variables as 'unknown' instead of 'any'. */ "alwaysStrict": true, /* Ensure 'use strict' is always emitted. */ "exactOptionalPropertyTypes": false, /* Interpret optional property types as written, rather than adding 'undefined'. */ "noImplicitReturns": true, /* Enable error reporting for codepaths that do not explicitly return in a function. */ "noFallthroughCasesInSwitch": true, /* Enable error reporting for fallthrough cases in switch statements. */ "noUncheckedIndexedAccess": true, /* Add 'undefined' to a type when accessed using an index. */ "noImplicitOverride": true, /* Ensure overriding members in derived classes are marked with an override modifier. */ "noPropertyAccessFromIndexSignature": true, /* Enforces using indexed accessors for keys declared using an indexed type. */ "allowUnusedLabels": false, /* Disable error reporting for unused labels. */ "allowUnreachableCode": false, /* Disable error reporting for unreachable code. */ "skipLibCheck": true, /* Skip type checking all .d.ts files. */ "noErrorTruncation": true, /* Disable truncating types in error messages. */ "checkJs": true, "noEmit": true, "allowImportingTsExtensions": true, "outDir": "dist", "baseUrl": ".", "declaration": true, "paths": { "~/*": ["src/*"] } }, "exclude": ["**/dist", "src/dev"], "include": ["src", "*.ts", "tests"] } ================================================ FILE: drizzle-seed/type-tests/mysql.ts ================================================ import type { MySqlColumn } from 'drizzle-orm/mysql-core'; import { int, mysqlTable, text } from 'drizzle-orm/mysql-core'; import { drizzle as mysql2Drizzle } from 'drizzle-orm/mysql2'; import { reset, seed } from '../src/index.ts'; const mysqlUsers = mysqlTable('users', { id: int().primaryKey().autoincrement(), name: text(), inviteId: int('invite_id').references((): MySqlColumn => mysqlUsers.id), }); { const db = mysql2Drizzle(''); await seed(db, { users: mysqlUsers }); await reset(db, { users: mysqlUsers }); } ================================================ FILE: drizzle-seed/type-tests/pg.ts ================================================ import { drizzle as nodePostgresDrizzle } from 'drizzle-orm/node-postgres'; import type { PgColumn } from 'drizzle-orm/pg-core'; import { integer, pgTable, text } from 'drizzle-orm/pg-core'; import { drizzle as pgliteDrizzle } from 'drizzle-orm/pglite'; import { drizzle as postgresJsDrizzle } from 'drizzle-orm/postgres-js'; import { reset, seed } from '../src/index.ts'; const pgUsers = pgTable('users', { id: integer().primaryKey().generatedAlwaysAsIdentity(), name: text(), inviteId: integer('invite_id').references((): PgColumn => pgUsers.id), }); { const db0 = nodePostgresDrizzle('', { schema: { users: pgUsers } }); await seed(db0, { users: pgUsers }); await reset(db0, { users: pgUsers }); const db1 = nodePostgresDrizzle(''); await seed(db1, { users: pgUsers }); await reset(db1, { users: pgUsers }); } { const db0 = pgliteDrizzle('', { schema: { users: pgUsers } }); await seed(db0, { users: pgUsers }); await reset(db0, { users: pgUsers }); const db1 = pgliteDrizzle(''); await seed(db1, { users: pgUsers }); await reset(db1, { users: pgUsers }); } { const db0 = postgresJsDrizzle('', { schema: { users: pgUsers } }); await seed(db0, { users: pgUsers }); await reset(db0, { users: pgUsers }); const db1 = postgresJsDrizzle(''); await seed(db1, { users: pgUsers }); await reset(db1, { users: pgUsers }); } ================================================ FILE: drizzle-seed/type-tests/sqlite.ts ================================================ import { drizzle as betterSqlite3Drizzle } from 'drizzle-orm/better-sqlite3'; import type { SQLiteColumn } from 'drizzle-orm/sqlite-core'; import { int, sqliteTable, text } from 'drizzle-orm/sqlite-core'; import { reset, seed } from '../src/index.ts'; const mysqlUsers = sqliteTable('users', { id: int().primaryKey(), name: text(), inviteId: int('invite_id').references((): SQLiteColumn => mysqlUsers.id), }); { const db = betterSqlite3Drizzle(''); await seed(db, { users: mysqlUsers }); await reset(db, { users: mysqlUsers }); } ================================================ FILE: drizzle-seed/type-tests/tsconfig.json ================================================ { "extends": "../tsconfig.build.json", "compilerOptions": { "composite": false, "noEmit": true, "rootDir": "..", "outDir": "./.cache" }, "include": [".", "../src"], "exclude": ["**/playground"] } ================================================ FILE: drizzle-seed/vitest.config.ts ================================================ import { defineConfig } from 'vitest/config'; export default defineConfig({ test: { include: [ './tests/pg/**/*.test.ts', './tests/mysql/**/*.test.ts', './tests/sqlite/**/*.test.ts', ], exclude: [], typecheck: { tsconfig: 'tsconfig.json', }, testTimeout: 100000, hookTimeout: 100000, isolate: true, poolOptions: { threads: { singleThread: true, }, }, maxWorkers: 1, fileParallelism: false, }, }); ================================================ FILE: drizzle-typebox/README.md ================================================ `drizzle-typebox` is a plugin for [Drizzle ORM](https://github.com/drizzle-team/drizzle-orm) that allows you to generate [@sinclair/typebox](https://github.com/sinclairzx81/typebox) schemas from Drizzle ORM schemas. **Features** - Create a select schema for tables, views and enums. - Create insert and update schemas for tables. - Supports all dialects: PostgreSQL, MySQL and SQLite. # Usage ```ts import { pgEnum, pgTable, serial, text, timestamp } from 'drizzle-orm/pg-core'; import { createInsertSchema, createSelectSchema } from 'drizzle-typebox'; import { Type } from '@sinclair/typebox'; import { Value } from '@sinclair/typebox/value'; const users = pgTable('users', { id: serial('id').primaryKey(), name: text('name').notNull(), email: text('email').notNull(), role: text('role', { enum: ['admin', 'user'] }).notNull(), createdAt: timestamp('created_at').notNull().defaultNow(), }); // Schema for inserting a user - can be used to validate API requests const insertUserSchema = createInsertSchema(users); // Schema for updating a user - can be used to validate API requests const updateUserSchema = createUpdateSchema(users); // Schema for selecting a user - can be used to validate API responses const selectUserSchema = createSelectSchema(users); // Overriding the fields const insertUserSchema = createInsertSchema(users, { role: Type.String(), }); // Refining the fields - useful if you want to change the fields before they become nullable/optional in the final schema const insertUserSchema = createInsertSchema(users, { id: (schema) => Type.Number({ ...schema, minimum: 0 }), role: Type.String(), }); // Usage const isUserValid: boolean = Value.Check(insertUserSchema, { name: 'John Doe', email: 'johndoe@test.com', role: 'admin', }); ``` ================================================ FILE: drizzle-typebox/package.json ================================================ { "name": "drizzle-typebox", "version": "0.3.3", "description": "Generate Typebox schemas from Drizzle ORM schemas", "type": "module", "scripts": { "build": "tsx scripts/build.ts", "b": "pnpm build", "test:types": "cd tests && tsc", "pack": "(cd dist && npm pack --pack-destination ..) && rm -f package.tgz && mv *.tgz package.tgz", "publish": "npm publish package.tgz", "test": "vitest run" }, "exports": { ".": { "import": { "types": "./index.d.mts", "default": "./index.mjs" }, "require": { "types": "./index.d.cjs", "default": "./index.cjs" }, "types": "./index.d.ts", "default": "./index.mjs" } }, "main": "./index.cjs", "module": "./index.mjs", "types": "./index.d.ts", "publishConfig": { "provenance": true }, "repository": { "type": "git", "url": "git+https://github.com/drizzle-team/drizzle-orm.git" }, "keywords": [ "typebox", "validate", "validation", "schema", "drizzle", "orm", "pg", "mysql", "postgresql", "postgres", "sqlite", "database", "sql", "typescript", "ts" ], "author": "Drizzle Team", "license": "Apache-2.0", "peerDependencies": { "@sinclair/typebox": ">=0.34.8", "drizzle-orm": ">=0.36.0" }, "devDependencies": { "@rollup/plugin-typescript": "^11.1.0", "@sinclair/typebox": "^0.34.8", "@types/node": "^18.15.10", "cpy": "^10.1.0", "drizzle-orm": "link:../drizzle-orm/dist", "json-rules-engine": "^7.3.1", "rimraf": "^5.0.0", "rollup": "^3.29.5", "vite-tsconfig-paths": "^4.3.2", "vitest": "^3.1.3", "zx": "^7.2.2" } } ================================================ FILE: drizzle-typebox/rollup.config.ts ================================================ import typescript from '@rollup/plugin-typescript'; import { defineConfig } from 'rollup'; export default defineConfig([ { input: 'src/index.ts', output: [ { format: 'esm', dir: 'dist', entryFileNames: '[name].mjs', chunkFileNames: '[name]-[hash].mjs', sourcemap: true, }, { format: 'cjs', dir: 'dist', entryFileNames: '[name].cjs', chunkFileNames: '[name]-[hash].cjs', sourcemap: true, }, ], external: [ /^drizzle-orm\/?/, '@sinclair/typebox', ], plugins: [ typescript({ tsconfig: 'tsconfig.build.json', }), ], }, ]); ================================================ FILE: drizzle-typebox/scripts/build.ts ================================================ #!/usr/bin/env -S pnpm tsx import 'zx/globals'; import cpy from 'cpy'; await fs.remove('dist'); await $`rollup --config rollup.config.ts --configPlugin typescript`; await $`resolve-tspaths`; await fs.copy('README.md', 'dist/README.md'); await cpy('dist/**/*.d.ts', 'dist', { rename: (basename) => basename.replace(/\.d\.ts$/, '.d.mts'), }); await cpy('dist/**/*.d.ts', 'dist', { rename: (basename) => basename.replace(/\.d\.ts$/, '.d.cts'), }); await fs.copy('package.json', 'dist/package.json'); await $`scripts/fix-imports.ts`; ================================================ FILE: drizzle-typebox/scripts/fix-imports.ts ================================================ #!/usr/bin/env -S pnpm tsx import 'zx/globals'; import path from 'node:path'; import { parse, print, visit } from 'recast'; import parser from 'recast/parsers/typescript'; function resolvePathAlias(importPath: string, file: string) { if (importPath.startsWith('~/')) { const relativePath = path.relative(path.dirname(file), path.resolve('dist.new', importPath.slice(2))); importPath = relativePath.startsWith('.') ? relativePath : './' + relativePath; } return importPath; } function fixImportPath(importPath: string, file: string, ext: string) { importPath = resolvePathAlias(importPath, file); if (!/\..*\.(js|ts)$/.test(importPath)) { return importPath; } return importPath.replace(/\.(js|ts)$/, ext); } const cjsFiles = await glob('dist/**/*.{cjs,d.cts}'); await Promise.all(cjsFiles.map(async (file) => { const code = parse(await fs.readFile(file, 'utf8'), { parser }); visit(code, { visitImportDeclaration(path) { path.value.source.value = fixImportPath(path.value.source.value, file, '.cjs'); this.traverse(path); }, visitExportAllDeclaration(path) { path.value.source.value = fixImportPath(path.value.source.value, file, '.cjs'); this.traverse(path); }, visitExportNamedDeclaration(path) { if (path.value.source) { path.value.source.value = fixImportPath(path.value.source.value, file, '.cjs'); } this.traverse(path); }, visitCallExpression(path) { if (path.value.callee.type === 'Identifier' && path.value.callee.name === 'require') { path.value.arguments[0].value = fixImportPath(path.value.arguments[0].value, file, '.cjs'); } this.traverse(path); }, visitTSImportType(path) { path.value.argument.value = resolvePathAlias(path.value.argument.value, file); this.traverse(path); }, visitAwaitExpression(path) { if (print(path.value).code.startsWith(`await import("./`)) { path.value.argument.arguments[0].value = fixImportPath(path.value.argument.arguments[0].value, file, '.cjs'); } this.traverse(path); }, }); await fs.writeFile(file, print(code).code); })); let esmFiles = await glob('dist/**/*.{js,d.ts}'); await Promise.all(esmFiles.map(async (file) => { const code = parse(await fs.readFile(file, 'utf8'), { parser }); visit(code, { visitImportDeclaration(path) { path.value.source.value = fixImportPath(path.value.source.value, file, '.js'); this.traverse(path); }, visitExportAllDeclaration(path) { path.value.source.value = fixImportPath(path.value.source.value, file, '.js'); this.traverse(path); }, visitExportNamedDeclaration(path) { if (path.value.source) { path.value.source.value = fixImportPath(path.value.source.value, file, '.js'); } this.traverse(path); }, visitTSImportType(path) { path.value.argument.value = fixImportPath(path.value.argument.value, file, '.js'); this.traverse(path); }, visitAwaitExpression(path) { if (print(path.value).code.startsWith(`await import("./`)) { path.value.argument.arguments[0].value = fixImportPath(path.value.argument.arguments[0].value, file, '.js'); } this.traverse(path); }, }); await fs.writeFile(file, print(code).code); })); esmFiles = await glob('dist/**/*.{mjs,d.mts}'); await Promise.all(esmFiles.map(async (file) => { const code = parse(await fs.readFile(file, 'utf8'), { parser }); visit(code, { visitImportDeclaration(path) { path.value.source.value = fixImportPath(path.value.source.value, file, '.mjs'); this.traverse(path); }, visitExportAllDeclaration(path) { path.value.source.value = fixImportPath(path.value.source.value, file, '.mjs'); this.traverse(path); }, visitExportNamedDeclaration(path) { if (path.value.source) { path.value.source.value = fixImportPath(path.value.source.value, file, '.mjs'); } this.traverse(path); }, visitTSImportType(path) { path.value.argument.value = fixImportPath(path.value.argument.value, file, '.mjs'); this.traverse(path); }, visitAwaitExpression(path) { if (print(path.value).code.startsWith(`await import("./`)) { path.value.argument.arguments[0].value = fixImportPath(path.value.argument.arguments[0].value, file, '.mjs'); } this.traverse(path); }, }); await fs.writeFile(file, print(code).code); })); ================================================ FILE: drizzle-typebox/src/column.ts ================================================ import { Kind, Type as t, TypeRegistry } from '@sinclair/typebox'; import type { StringOptions, TSchema, Type as typebox } from '@sinclair/typebox'; import type { Column, ColumnBaseConfig } from 'drizzle-orm'; import type { MySqlBigInt53, MySqlChar, MySqlDouble, MySqlFloat, MySqlInt, MySqlMediumInt, MySqlReal, MySqlSerial, MySqlSmallInt, MySqlText, MySqlTinyInt, MySqlVarChar, MySqlYear, } from 'drizzle-orm/mysql-core'; import type { PgArray, PgBigInt53, PgBigSerial53, PgBinaryVector, PgChar, PgDoublePrecision, PgGeometry, PgGeometryObject, PgHalfVector, PgInteger, PgLineABC, PgLineTuple, PgPointObject, PgPointTuple, PgReal, PgSerial, PgSmallInt, PgSmallSerial, PgUUID, PgVarchar, PgVector, } from 'drizzle-orm/pg-core'; import type { SingleStoreBigInt53, SingleStoreChar, SingleStoreDouble, SingleStoreFloat, SingleStoreInt, SingleStoreMediumInt, SingleStoreReal, SingleStoreSerial, SingleStoreSmallInt, SingleStoreText, SingleStoreTinyInt, SingleStoreVarChar, SingleStoreYear, } from 'drizzle-orm/singlestore-core'; import type { SQLiteInteger, SQLiteReal, SQLiteText } from 'drizzle-orm/sqlite-core'; import { CONSTANTS } from './constants.ts'; import { isColumnType, isWithEnum } from './utils.ts'; import type { BufferSchema, JsonSchema } from './utils.ts'; export const literalSchema = t.Union([t.String(), t.Number(), t.Boolean(), t.Null()]); export const jsonSchema: JsonSchema = t.Union([literalSchema, t.Array(t.Any()), t.Record(t.String(), t.Any())]) as any; TypeRegistry.Set('Buffer', (_, value) => value instanceof Buffer); // eslint-disable-line no-instanceof/no-instanceof export const bufferSchema: BufferSchema = { [Kind]: 'Buffer', type: 'buffer' } as any; export function mapEnumValues(values: string[]) { return Object.fromEntries(values.map((value) => [value, value])); } export function columnToSchema(column: Column, t: typeof typebox): TSchema { let schema!: TSchema; if (isWithEnum(column)) { schema = column.enumValues.length ? t.Enum(mapEnumValues(column.enumValues)) : t.String(); } if (!schema) { // Handle specific types if (isColumnType | PgPointTuple>(column, ['PgGeometry', 'PgPointTuple'])) { schema = t.Tuple([t.Number(), t.Number()]); } else if ( isColumnType | PgGeometryObject>(column, ['PgGeometryObject', 'PgPointObject']) ) { schema = t.Object({ x: t.Number(), y: t.Number() }); } else if (isColumnType | PgVector>(column, ['PgHalfVector', 'PgVector'])) { schema = t.Array( t.Number(), column.dimensions ? { minItems: column.dimensions, maxItems: column.dimensions, } : undefined, ); } else if (isColumnType>(column, ['PgLine'])) { schema = t.Tuple([t.Number(), t.Number(), t.Number()]); } else if (isColumnType>(column, ['PgLineABC'])) { schema = t.Object({ a: t.Number(), b: t.Number(), c: t.Number(), }); } // Handle other types else if (isColumnType>(column, ['PgArray'])) { schema = t.Array( columnToSchema(column.baseColumn, t), column.size ? { minItems: column.size, maxItems: column.size, } : undefined, ); } else if (column.dataType === 'array') { schema = t.Array(t.Any()); } else if (column.dataType === 'number') { schema = numberColumnToSchema(column, t); } else if (column.dataType === 'bigint') { schema = bigintColumnToSchema(column, t); } else if (column.dataType === 'boolean') { schema = t.Boolean(); } else if (column.dataType === 'date') { schema = t.Date(); } else if (column.dataType === 'string') { schema = stringColumnToSchema(column, t); } else if (column.dataType === 'json') { schema = jsonSchema; } else if (column.dataType === 'custom') { schema = t.Any(); } else if (column.dataType === 'buffer') { schema = bufferSchema; } } if (!schema) { schema = t.Any(); } return schema; } function numberColumnToSchema(column: Column, t: typeof typebox): TSchema { let unsigned = column.getSQLType().includes('unsigned'); let min!: number; let max!: number; let integer = false; if (isColumnType | SingleStoreTinyInt>(column, ['MySqlTinyInt', 'SingleStoreTinyInt'])) { min = unsigned ? 0 : CONSTANTS.INT8_MIN; max = unsigned ? CONSTANTS.INT8_UNSIGNED_MAX : CONSTANTS.INT8_MAX; integer = true; } else if ( isColumnType | PgSmallSerial | MySqlSmallInt | SingleStoreSmallInt>(column, [ 'PgSmallInt', 'PgSmallSerial', 'MySqlSmallInt', 'SingleStoreSmallInt', ]) ) { min = unsigned ? 0 : CONSTANTS.INT16_MIN; max = unsigned ? CONSTANTS.INT16_UNSIGNED_MAX : CONSTANTS.INT16_MAX; integer = true; } else if ( isColumnType< PgReal | MySqlFloat | MySqlMediumInt | SingleStoreFloat | SingleStoreMediumInt >(column, [ 'PgReal', 'MySqlFloat', 'MySqlMediumInt', 'SingleStoreFloat', 'SingleStoreMediumInt', ]) ) { min = unsigned ? 0 : CONSTANTS.INT24_MIN; max = unsigned ? CONSTANTS.INT24_UNSIGNED_MAX : CONSTANTS.INT24_MAX; integer = isColumnType(column, ['MySqlMediumInt', 'SingleStoreMediumInt']); } else if ( isColumnType | PgSerial | MySqlInt | SingleStoreInt>(column, [ 'PgInteger', 'PgSerial', 'MySqlInt', 'SingleStoreInt', ]) ) { min = unsigned ? 0 : CONSTANTS.INT32_MIN; max = unsigned ? CONSTANTS.INT32_UNSIGNED_MAX : CONSTANTS.INT32_MAX; integer = true; } else if ( isColumnType< | PgDoublePrecision | MySqlReal | MySqlDouble | SingleStoreReal | SingleStoreDouble | SQLiteReal >(column, [ 'PgDoublePrecision', 'MySqlReal', 'MySqlDouble', 'SingleStoreReal', 'SingleStoreDouble', 'SQLiteReal', ]) ) { min = unsigned ? 0 : CONSTANTS.INT48_MIN; max = unsigned ? CONSTANTS.INT48_UNSIGNED_MAX : CONSTANTS.INT48_MAX; } else if ( isColumnType< | PgBigInt53 | PgBigSerial53 | MySqlBigInt53 | MySqlSerial | SingleStoreBigInt53 | SingleStoreSerial | SQLiteInteger >( column, [ 'PgBigInt53', 'PgBigSerial53', 'MySqlBigInt53', 'MySqlSerial', 'SingleStoreBigInt53', 'SingleStoreSerial', 'SQLiteInteger', ], ) ) { unsigned = unsigned || isColumnType(column, ['MySqlSerial', 'SingleStoreSerial']); min = unsigned ? 0 : Number.MIN_SAFE_INTEGER; max = Number.MAX_SAFE_INTEGER; integer = true; } else if (isColumnType | SingleStoreYear>(column, ['MySqlYear', 'SingleStoreYear'])) { min = 1901; max = 2155; integer = true; } else { min = Number.MIN_SAFE_INTEGER; max = Number.MAX_SAFE_INTEGER; } const key = integer ? 'Integer' : 'Number'; return t[key]({ minimum: min, maximum: max, }); } function bigintColumnToSchema(column: Column, t: typeof typebox): TSchema { const unsigned = column.getSQLType().includes('unsigned'); const min = unsigned ? 0n : CONSTANTS.INT64_MIN; const max = unsigned ? CONSTANTS.INT64_UNSIGNED_MAX : CONSTANTS.INT64_MAX; return t.BigInt({ minimum: min, maximum: max, }); } function stringColumnToSchema(column: Column, t: typeof typebox): TSchema { if (isColumnType>>(column, ['PgUUID'])) { return t.String({ format: 'uuid' }); } else if ( isColumnType & { dimensions: number }>>(column, [ 'PgBinaryVector', ]) ) { return t.RegExp(/^[01]+$/, column.dimensions ? { maxLength: column.dimensions } : undefined); } let max: number | undefined; let fixed = false; if (isColumnType | SQLiteText>(column, ['PgVarchar', 'SQLiteText'])) { max = column.length; } else if ( isColumnType | SingleStoreVarChar>(column, ['MySqlVarChar', 'SingleStoreVarChar']) ) { max = column.length ?? CONSTANTS.INT16_UNSIGNED_MAX; } else if (isColumnType | SingleStoreText>(column, ['MySqlText', 'SingleStoreText'])) { if (column.textType === 'longtext') { max = CONSTANTS.INT32_UNSIGNED_MAX; } else if (column.textType === 'mediumtext') { max = CONSTANTS.INT24_UNSIGNED_MAX; } else if (column.textType === 'text') { max = CONSTANTS.INT16_UNSIGNED_MAX; } else { max = CONSTANTS.INT8_UNSIGNED_MAX; } } if ( isColumnType | MySqlChar | SingleStoreChar>(column, [ 'PgChar', 'MySqlChar', 'SingleStoreChar', ]) ) { max = column.length; fixed = true; } const options: Partial = {}; if (max !== undefined && fixed) { options.minLength = max; options.maxLength = max; } else if (max !== undefined) { options.maxLength = max; } return t.String(Object.keys(options).length > 0 ? options : undefined); } ================================================ FILE: drizzle-typebox/src/column.types.ts ================================================ import type * as t from '@sinclair/typebox'; import type { Assume, Column } from 'drizzle-orm'; import type { BufferSchema, IsEnumDefined, IsNever, JsonSchema } from './utils.ts'; type HasBaseColumn = TColumn extends { _: { baseColumn: Column | undefined } } ? IsNever extends false ? true : false : false; export type EnumValuesToEnum = { [K in TEnumValues[number]]: K }; export interface GenericSchema extends t.TSchema { static: T; } export type GetTypeboxType< TColumn extends Column, > = TColumn['_']['columnType'] extends | 'MySqlTinyInt' | 'SingleStoreTinyInt' | 'PgSmallInt' | 'PgSmallSerial' | 'MySqlSmallInt' | 'MySqlMediumInt' | 'SingleStoreSmallInt' | 'SingleStoreMediumInt' | 'PgInteger' | 'PgSerial' | 'MySqlInt' | 'SingleStoreInt' | 'PgBigInt53' | 'PgBigSerial53' | 'MySqlBigInt53' | 'MySqlSerial' | 'SingleStoreBigInt53' | 'SingleStoreSerial' | 'SQLiteInteger' | 'MySqlYear' | 'SingleStoreYear' ? t.TInteger : TColumn['_']['columnType'] extends 'PgBinaryVector' ? t.TRegExp : HasBaseColumn extends true ? t.TArray< GetTypeboxType> > : IsEnumDefined extends true ? t.TEnum<{ [K in Assume[number]]: K }> : TColumn['_']['columnType'] extends 'PgGeometry' | 'PgPointTuple' ? t.TTuple<[t.TNumber, t.TNumber]> : TColumn['_']['columnType'] extends 'PgLine' ? t.TTuple<[t.TNumber, t.TNumber, t.TNumber]> : TColumn['_']['data'] extends Date ? t.TDate : TColumn['_']['data'] extends Buffer ? BufferSchema : TColumn['_']['dataType'] extends 'array' ? t.TArray[number]>> : TColumn['_']['data'] extends Record ? TColumn['_']['columnType'] extends 'PgJson' | 'PgJsonb' | 'MySqlJson' | 'SingleStoreJson' | 'SQLiteTextJson' | 'SQLiteBlobJson' ? GenericSchema : t.TObject<{ [K in keyof TColumn['_']['data']]: GetTypeboxPrimitiveType }> : TColumn['_']['dataType'] extends 'json' ? JsonSchema : GetTypeboxPrimitiveType; type GetTypeboxPrimitiveType = TData extends number ? t.TNumber : TData extends bigint ? t.TBigInt : TData extends boolean ? t.TBoolean : TData extends string ? t.TString : t.TAny; type HandleSelectColumn< TSchema extends t.TSchema, TColumn extends Column, > = TColumn['_']['notNull'] extends true ? TSchema : t.Union<[TSchema, t.TNull]>; type HandleInsertColumn< TSchema extends t.TSchema, TColumn extends Column, > = TColumn['_']['notNull'] extends true ? TColumn['_']['hasDefault'] extends true ? t.TOptional : TSchema : t.TOptional>; type HandleUpdateColumn< TSchema extends t.TSchema, TColumn extends Column, > = TColumn['_']['notNull'] extends true ? t.TOptional : t.TOptional>; export type HandleColumn< TType extends 'select' | 'insert' | 'update', TColumn extends Column, > = TType extends 'select' ? HandleSelectColumn, TColumn> : TType extends 'insert' ? HandleInsertColumn, TColumn> : TType extends 'update' ? HandleUpdateColumn, TColumn> : GetTypeboxType; ================================================ FILE: drizzle-typebox/src/constants.ts ================================================ export const CONSTANTS = { INT8_MIN: -128, INT8_MAX: 127, INT8_UNSIGNED_MAX: 255, INT16_MIN: -32768, INT16_MAX: 32767, INT16_UNSIGNED_MAX: 65535, INT24_MIN: -8388608, INT24_MAX: 8388607, INT24_UNSIGNED_MAX: 16777215, INT32_MIN: -2147483648, INT32_MAX: 2147483647, INT32_UNSIGNED_MAX: 4294967295, INT48_MIN: -140737488355328, INT48_MAX: 140737488355327, INT48_UNSIGNED_MAX: 281474976710655, INT64_MIN: -9223372036854775808n, INT64_MAX: 9223372036854775807n, INT64_UNSIGNED_MAX: 18446744073709551615n, }; ================================================ FILE: drizzle-typebox/src/index.ts ================================================ export { bufferSchema, jsonSchema, literalSchema } from './column.ts'; export * from './column.types.ts'; export * from './schema.ts'; export * from './schema.types.internal.ts'; export * from './schema.types.ts'; export * from './utils.ts'; ================================================ FILE: drizzle-typebox/src/schema.ts ================================================ import { Type as t } from '@sinclair/typebox'; import type { TSchema } from '@sinclair/typebox'; import { Column, getTableColumns, getViewSelectedFields, is, isTable, isView, SQL } from 'drizzle-orm'; import type { Table, View } from 'drizzle-orm'; import type { PgEnum } from 'drizzle-orm/pg-core'; import { columnToSchema, mapEnumValues } from './column.ts'; import type { Conditions } from './schema.types.internal.ts'; import type { CreateInsertSchema, CreateSchemaFactoryOptions, CreateSelectSchema, CreateUpdateSchema, } from './schema.types.ts'; import { isPgEnum } from './utils.ts'; export function getColumns(tableLike: Table | View) { return isTable(tableLike) ? getTableColumns(tableLike) : getViewSelectedFields(tableLike); } export function handleColumns( columns: Record, refinements: Record, conditions: Conditions, factory?: CreateSchemaFactoryOptions, ): TSchema { const columnSchemas: Record = {}; for (const [key, selected] of Object.entries(columns)) { if (!is(selected, Column) && !is(selected, SQL) && !is(selected, SQL.Aliased) && typeof selected === 'object') { const columns = isTable(selected) || isView(selected) ? getColumns(selected) : selected; columnSchemas[key] = handleColumns(columns, refinements[key] ?? {}, conditions, factory); continue; } const refinement = refinements[key]; if (refinement !== undefined && typeof refinement !== 'function') { columnSchemas[key] = refinement; continue; } const column = is(selected, Column) ? selected : undefined; const schema = column ? columnToSchema(column, factory?.typeboxInstance ?? t) : t.Any(); const refined = typeof refinement === 'function' ? refinement(schema) : schema; if (conditions.never(column)) { continue; } else { columnSchemas[key] = refined; } if (column) { if (conditions.nullable(column)) { columnSchemas[key] = t.Union([columnSchemas[key]!, t.Null()]); } if (conditions.optional(column)) { columnSchemas[key] = t.Optional(columnSchemas[key]!); } } } return t.Object(columnSchemas) as any; } export function handleEnum(enum_: PgEnum, factory?: CreateSchemaFactoryOptions) { const typebox: typeof t = factory?.typeboxInstance ?? t; return typebox.Enum(mapEnumValues(enum_.enumValues)); } const selectConditions: Conditions = { never: () => false, optional: () => false, nullable: (column) => !column.notNull, }; const insertConditions: Conditions = { never: (column) => column?.generated?.type === 'always' || column?.generatedIdentity?.type === 'always', optional: (column) => !column.notNull || (column.notNull && column.hasDefault), nullable: (column) => !column.notNull, }; const updateConditions: Conditions = { never: (column) => column?.generated?.type === 'always' || column?.generatedIdentity?.type === 'always', optional: () => true, nullable: (column) => !column.notNull, }; export const createSelectSchema: CreateSelectSchema = ( entity: Table | View | PgEnum<[string, ...string[]]>, refine?: Record, ) => { if (isPgEnum(entity)) { return handleEnum(entity); } const columns = getColumns(entity); return handleColumns(columns, refine ?? {}, selectConditions) as any; }; export const createInsertSchema: CreateInsertSchema = ( entity: Table, refine?: Record, ) => { const columns = getColumns(entity); return handleColumns(columns, refine ?? {}, insertConditions) as any; }; export const createUpdateSchema: CreateUpdateSchema = ( entity: Table, refine?: Record, ) => { const columns = getColumns(entity); return handleColumns(columns, refine ?? {}, updateConditions) as any; }; export function createSchemaFactory(options?: CreateSchemaFactoryOptions) { const createSelectSchema: CreateSelectSchema = ( entity: Table | View | PgEnum<[string, ...string[]]>, refine?: Record, ) => { if (isPgEnum(entity)) { return handleEnum(entity, options); } const columns = getColumns(entity); return handleColumns(columns, refine ?? {}, selectConditions, options) as any; }; const createInsertSchema: CreateInsertSchema = ( entity: Table, refine?: Record, ) => { const columns = getColumns(entity); return handleColumns(columns, refine ?? {}, insertConditions, options) as any; }; const createUpdateSchema: CreateUpdateSchema = ( entity: Table, refine?: Record, ) => { const columns = getColumns(entity); return handleColumns(columns, refine ?? {}, updateConditions, options) as any; }; return { createSelectSchema, createInsertSchema, createUpdateSchema }; } ================================================ FILE: drizzle-typebox/src/schema.types.internal.ts ================================================ import type * as t from '@sinclair/typebox'; import type { Assume, Column, DrizzleTypeError, SelectedFieldsFlat, Simplify, Table, View } from 'drizzle-orm'; import type { GetTypeboxType, HandleColumn } from './column.types.ts'; import type { ColumnIsGeneratedAlwaysAs, GetSelection } from './utils.ts'; export interface Conditions { never: (column?: Column) => boolean; optional: (column: Column) => boolean; nullable: (column: Column) => boolean; } type BuildRefineField = T extends t.TSchema ? ((schema: T) => t.TSchema) | t.TSchema : never; export type BuildRefine< TColumns extends Record, > = { [K in keyof TColumns as TColumns[K] extends Column | SelectedFieldsFlat | Table | View ? K : never]?: TColumns[K] extends Column ? BuildRefineField> : BuildRefine>; }; type HandleRefinement< TType extends 'select' | 'insert' | 'update', TRefinement, TColumn extends Column, > = TRefinement extends (schema: any) => t.TSchema ? (TColumn['_']['notNull'] extends true ? ReturnType : t.TUnion<[ReturnType, t.TNull]>) extends infer TSchema ? TType extends 'update' ? t.TOptional> : TSchema : t.TSchema : TRefinement; type IsRefinementDefined< TRefinements extends Record | undefined, TKey extends string | symbol | number, > = TRefinements extends object ? TRefinements[TKey] extends t.TSchema | ((schema: any) => any) ? true : false : false; export type BuildSchema< TType extends 'select' | 'insert' | 'update', TColumns extends Record, TRefinements extends Record | undefined, > = t.TObject< Simplify< { [K in keyof TColumns as ColumnIsGeneratedAlwaysAs extends true ? never : K]: TColumns[K] extends infer TColumn extends Column ? IsRefinementDefined extends true ? Assume, t.TSchema> : HandleColumn : TColumns[K] extends infer TObject extends SelectedFieldsFlat | Table | View ? BuildSchema< TType, GetSelection, TRefinements extends object ? TRefinements[K & keyof TRefinements] : undefined > : t.TAny; } > >; export type NoUnknownKeys< TRefinement extends Record, TCompare extends Record, > = { [K in keyof TRefinement]: K extends keyof TCompare ? TRefinement[K] extends t.TSchema ? TRefinement[K] : TRefinement[K] extends Record ? NoUnknownKeys : TRefinement[K] : DrizzleTypeError<`Found unknown key in refinement: "${K & string}"`>; }; ================================================ FILE: drizzle-typebox/src/schema.types.ts ================================================ import type * as t from '@sinclair/typebox'; import type { Table, View } from 'drizzle-orm'; import type { PgEnum } from 'drizzle-orm/pg-core'; import type { EnumValuesToEnum } from './column.types.ts'; import type { BuildRefine, BuildSchema, NoUnknownKeys } from './schema.types.internal.ts'; export interface CreateSelectSchema { (table: TTable): BuildSchema<'select', TTable['_']['columns'], undefined>; < TTable extends Table, TRefine extends BuildRefine, >( table: TTable, refine?: NoUnknownKeys, ): BuildSchema<'select', TTable['_']['columns'], TRefine>; (view: TView): BuildSchema<'select', TView['_']['selectedFields'], undefined>; < TView extends View, TRefine extends BuildRefine, >( view: TView, refine: NoUnknownKeys, ): BuildSchema<'select', TView['_']['selectedFields'], TRefine>; >(enum_: TEnum): t.TEnum>; } export interface CreateInsertSchema { (table: TTable): BuildSchema<'insert', TTable['_']['columns'], undefined>; < TTable extends Table, TRefine extends BuildRefine>, >( table: TTable, refine?: NoUnknownKeys, ): BuildSchema<'insert', TTable['_']['columns'], TRefine>; } export interface CreateUpdateSchema { (table: TTable): BuildSchema<'update', TTable['_']['columns'], undefined>; < TTable extends Table, TRefine extends BuildRefine>, >( table: TTable, refine?: TRefine, ): BuildSchema<'update', TTable['_']['columns'], TRefine>; } export interface CreateSchemaFactoryOptions { typeboxInstance?: any; } ================================================ FILE: drizzle-typebox/src/utils.ts ================================================ import type { Kind, Static, TSchema } from '@sinclair/typebox'; import type { Column, SelectedFieldsFlat, Table, View } from 'drizzle-orm'; import type { PgEnum } from 'drizzle-orm/pg-core'; import type { literalSchema } from './column.ts'; export function isColumnType(column: Column, columnTypes: string[]): column is T { return columnTypes.includes(column.columnType); } export function isWithEnum(column: Column): column is typeof column & { enumValues: [string, ...string[]] } { return 'enumValues' in column && Array.isArray(column.enumValues) && column.enumValues.length > 0; } export const isPgEnum: (entity: any) => entity is PgEnum<[string, ...string[]]> = isWithEnum as any; type Literal = Static; export type Json = Literal | { [key: string]: any } | any[]; export interface JsonSchema extends TSchema { [Kind]: 'Union'; static: Json; anyOf: Json; } export interface BufferSchema extends TSchema { [Kind]: 'Buffer'; static: Buffer; type: 'buffer'; } export type IsNever = [T] extends [never] ? true : false; export type IsEnumDefined = [string, ...string[]] extends TEnum ? false : undefined extends TEnum ? false : true; export type ColumnIsGeneratedAlwaysAs = TColumn extends Column ? TColumn['_']['identity'] extends 'always' ? true : TColumn['_']['generated'] extends { type: 'byDefault' } | undefined ? false : true : false; export type GetSelection | Table | View> = T extends Table ? T['_']['columns'] : T extends View ? T['_']['selectedFields'] : T; ================================================ FILE: drizzle-typebox/tests/mysql.test.ts ================================================ import { type Static, Type as t } from '@sinclair/typebox'; import { type Equal, sql } from 'drizzle-orm'; import { customType, int, json, mysqlSchema, mysqlTable, mysqlView, serial, text } from 'drizzle-orm/mysql-core'; import type { TopLevelCondition } from 'json-rules-engine'; import { test } from 'vitest'; import { jsonSchema } from '~/column.ts'; import { CONSTANTS } from '~/constants.ts'; import { createInsertSchema, createSelectSchema, createUpdateSchema, type GenericSchema } from '../src'; import { Expect, expectSchemaShape } from './utils.ts'; const intSchema = t.Integer({ minimum: CONSTANTS.INT32_MIN, maximum: CONSTANTS.INT32_MAX, }); const serialNumberModeSchema = t.Integer({ minimum: 0, maximum: Number.MAX_SAFE_INTEGER, }); const textSchema = t.String({ maxLength: CONSTANTS.INT16_UNSIGNED_MAX }); test('table - select', (tc) => { const table = mysqlTable('test', { id: serial().primaryKey(), name: text().notNull(), }); const result = createSelectSchema(table); const expected = t.Object({ id: serialNumberModeSchema, name: textSchema }); expectSchemaShape(tc, expected).from(result); Expect>(); }); test('table in schema - select', (tc) => { const schema = mysqlSchema('test'); const table = schema.table('test', { id: serial().primaryKey(), name: text().notNull(), }); const result = createSelectSchema(table); const expected = t.Object({ id: serialNumberModeSchema, name: textSchema }); expectSchemaShape(tc, expected).from(result); Expect>(); }); test('table - insert', (tc) => { const table = mysqlTable('test', { id: serial().primaryKey(), name: text().notNull(), age: int(), }); const result = createInsertSchema(table); const expected = t.Object({ id: t.Optional(serialNumberModeSchema), name: textSchema, age: t.Optional(t.Union([intSchema, t.Null()])), }); expectSchemaShape(tc, expected).from(result); Expect>(); }); test('table - update', (tc) => { const table = mysqlTable('test', { id: serial().primaryKey(), name: text().notNull(), age: int(), }); const result = createUpdateSchema(table); const expected = t.Object({ id: t.Optional(serialNumberModeSchema), name: t.Optional(textSchema), age: t.Optional(t.Union([intSchema, t.Null()])), }); expectSchemaShape(tc, expected).from(result); Expect>(); }); test('view qb - select', (tc) => { const table = mysqlTable('test', { id: serial().primaryKey(), name: text().notNull(), }); const view = mysqlView('test').as((qb) => qb.select({ id: table.id, age: sql``.as('age') }).from(table)); const result = createSelectSchema(view); const expected = t.Object({ id: serialNumberModeSchema, age: t.Any() }); expectSchemaShape(tc, expected).from(result); Expect>(); }); test('view columns - select', (tc) => { const view = mysqlView('test', { id: serial().primaryKey(), name: text().notNull(), }).as(sql``); const result = createSelectSchema(view); const expected = t.Object({ id: serialNumberModeSchema, name: textSchema }); expectSchemaShape(tc, expected).from(result); Expect>(); }); test('view with nested fields - select', (tc) => { const table = mysqlTable('test', { id: serial().primaryKey(), name: text().notNull(), }); const view = mysqlView('test').as((qb) => qb.select({ id: table.id, nested: { name: table.name, age: sql``.as('age'), }, table, }).from(table) ); const result = createSelectSchema(view); const expected = t.Object({ id: serialNumberModeSchema, nested: t.Object({ name: textSchema, age: t.Any() }), table: t.Object({ id: serialNumberModeSchema, name: textSchema }), }); expectSchemaShape(tc, expected).from(result); Expect>(); }); test('nullability - select', (tc) => { const table = mysqlTable('test', { c1: int(), c2: int().notNull(), c3: int().default(1), c4: int().notNull().default(1), }); const result = createSelectSchema(table); const expected = t.Object({ c1: t.Union([intSchema, t.Null()]), c2: intSchema, c3: t.Union([intSchema, t.Null()]), c4: intSchema, }); expectSchemaShape(tc, expected).from(result); Expect>(); }); test('nullability - insert', (tc) => { const table = mysqlTable('test', { c1: int(), c2: int().notNull(), c3: int().default(1), c4: int().notNull().default(1), c5: int().generatedAlwaysAs(1), }); const result = createInsertSchema(table); const expected = t.Object({ c1: t.Optional(t.Union([intSchema, t.Null()])), c2: intSchema, c3: t.Optional(t.Union([intSchema, t.Null()])), c4: t.Optional(intSchema), }); expectSchemaShape(tc, expected).from(result); Expect>(); }); test('nullability - update', (tc) => { const table = mysqlTable('test', { c1: int(), c2: int().notNull(), c3: int().default(1), c4: int().notNull().default(1), c5: int().generatedAlwaysAs(1), }); const result = createUpdateSchema(table); const expected = t.Object({ c1: t.Optional(t.Union([intSchema, t.Null()])), c2: t.Optional(intSchema), c3: t.Optional(t.Union([intSchema, t.Null()])), c4: t.Optional(intSchema), }); expectSchemaShape(tc, expected).from(result); Expect>(); }); test('refine table - select', (tc) => { const table = mysqlTable('test', { c1: int(), c2: int().notNull(), c3: int().notNull(), }); const result = createSelectSchema(table, { c2: (schema) => t.Integer({ minimum: schema.minimum, maximum: 1000 }), c3: t.Integer({ minimum: 1, maximum: 10 }), }); const expected = t.Object({ c1: t.Union([intSchema, t.Null()]), c2: t.Integer({ minimum: CONSTANTS.INT32_MIN, maximum: 1000 }), c3: t.Integer({ minimum: 1, maximum: 10 }), }); expectSchemaShape(tc, expected).from(result); Expect>(); }); test('refine table - select with custom data type', (tc) => { const customText = customType({ dataType: () => 'text' }); const table = mysqlTable('test', { c1: int(), c2: int().notNull(), c3: int().notNull(), c4: customText(), }); const customTextSchema = t.String({ minLength: 1, maxLength: 100 }); const result = createSelectSchema(table, { c2: (schema) => t.Integer({ minimum: schema.minimum, maximum: 1000 }), c3: t.Integer({ minimum: 1, maximum: 10 }), c4: customTextSchema, }); const expected = t.Object({ c1: t.Union([intSchema, t.Null()]), c2: t.Integer({ minimum: CONSTANTS.INT32_MIN, maximum: 1000 }), c3: t.Integer({ minimum: 1, maximum: 10 }), c4: customTextSchema, }); expectSchemaShape(tc, expected).from(result); Expect>(); }); test('refine table - insert', (tc) => { const table = mysqlTable('test', { c1: int(), c2: int().notNull(), c3: int().notNull(), c4: int().generatedAlwaysAs(1), }); const result = createInsertSchema(table, { c2: (schema) => t.Integer({ minimum: schema.minimum, maximum: 1000 }), c3: t.Integer({ minimum: 1, maximum: 10 }), }); const expected = t.Object({ c1: t.Optional(t.Union([intSchema, t.Null()])), c2: t.Integer({ minimum: CONSTANTS.INT32_MIN, maximum: 1000 }), c3: t.Integer({ minimum: 1, maximum: 10 }), }); expectSchemaShape(tc, expected).from(result); Expect>(); }); test('refine table - update', (tc) => { const table = mysqlTable('test', { c1: int(), c2: int().notNull(), c3: int().notNull(), c4: int().generatedAlwaysAs(1), }); const result = createUpdateSchema(table, { c2: (schema) => t.Integer({ minimum: schema.minimum, maximum: 1000 }), c3: t.Integer({ minimum: 1, maximum: 10 }), }); const expected = t.Object({ c1: t.Optional(t.Union([intSchema, t.Null()])), c2: t.Optional(t.Integer({ minimum: CONSTANTS.INT32_MIN, maximum: 1000 })), c3: t.Integer({ minimum: 1, maximum: 10 }), }); expectSchemaShape(tc, expected).from(result); Expect>(); }); test('refine view - select', (tc) => { const table = mysqlTable('test', { c1: int(), c2: int(), c3: int(), c4: int(), c5: int(), c6: int(), }); const view = mysqlView('test').as((qb) => qb.select({ c1: table.c1, c2: table.c2, c3: table.c3, nested: { c4: table.c4, c5: table.c5, c6: table.c6, }, table, }).from(table) ); const result = createSelectSchema(view, { c2: (schema) => t.Integer({ minimum: schema.minimum, maximum: 1000 }), c3: t.Integer({ minimum: 1, maximum: 10 }), nested: { c5: (schema) => t.Integer({ minimum: schema.minimum, maximum: 1000 }), c6: t.Integer({ minimum: 1, maximum: 10 }), }, table: { c2: (schema) => t.Integer({ minimum: schema.minimum, maximum: 1000 }), c3: t.Integer({ minimum: 1, maximum: 10 }), }, }); const expected = t.Object({ c1: t.Union([intSchema, t.Null()]), c2: t.Union([t.Integer({ minimum: CONSTANTS.INT32_MIN, maximum: 1000 }), t.Null()]), c3: t.Integer({ minimum: 1, maximum: 10 }), nested: t.Object({ c4: t.Union([intSchema, t.Null()]), c5: t.Union([t.Integer({ minimum: CONSTANTS.INT32_MIN, maximum: 1000 }), t.Null()]), c6: t.Integer({ minimum: 1, maximum: 10 }), }), table: t.Object({ c1: t.Union([intSchema, t.Null()]), c2: t.Union([t.Integer({ minimum: CONSTANTS.INT32_MIN, maximum: 1000 }), t.Null()]), c3: t.Integer({ minimum: 1, maximum: 10 }), c4: t.Union([intSchema, t.Null()]), c5: t.Union([intSchema, t.Null()]), c6: t.Union([intSchema, t.Null()]), }), }); expectSchemaShape(tc, expected).from(result); Expect>(); }); test('all data types', (tc) => { const table = mysqlTable('test', ({ bigint, binary, boolean, char, date, datetime, decimal, double, float, int, json, mediumint, mysqlEnum, real, serial, smallint, text, time, timestamp, tinyint, varchar, varbinary, year, longtext, mediumtext, tinytext, }) => ({ bigint1: bigint({ mode: 'number' }).notNull(), bigint2: bigint({ mode: 'bigint' }).notNull(), bigint3: bigint({ unsigned: true, mode: 'number' }).notNull(), bigint4: bigint({ unsigned: true, mode: 'bigint' }).notNull(), binary: binary({ length: 10 }).notNull(), boolean: boolean().notNull(), char1: char({ length: 10 }).notNull(), char2: char({ length: 1, enum: ['a', 'b', 'c'] }).notNull(), date1: date({ mode: 'date' }).notNull(), date2: date({ mode: 'string' }).notNull(), datetime1: datetime({ mode: 'date' }).notNull(), datetime2: datetime({ mode: 'string' }).notNull(), decimal1: decimal().notNull(), decimal2: decimal({ unsigned: true }).notNull(), double1: double().notNull(), double2: double({ unsigned: true }).notNull(), float1: float().notNull(), float2: float({ unsigned: true }).notNull(), int1: int().notNull(), int2: int({ unsigned: true }).notNull(), json: json().notNull(), mediumint1: mediumint().notNull(), mediumint2: mediumint({ unsigned: true }).notNull(), enum: mysqlEnum('enum', ['a', 'b', 'c']).notNull(), real: real().notNull(), serial: serial().notNull(), smallint1: smallint().notNull(), smallint2: smallint({ unsigned: true }).notNull(), text1: text().notNull(), text2: text({ enum: ['a', 'b', 'c'] }).notNull(), time: time().notNull(), timestamp1: timestamp({ mode: 'date' }).notNull(), timestamp2: timestamp({ mode: 'string' }).notNull(), tinyint1: tinyint().notNull(), tinyint2: tinyint({ unsigned: true }).notNull(), varchar1: varchar({ length: 10 }).notNull(), varchar2: varchar({ length: 1, enum: ['a', 'b', 'c'] }).notNull(), varbinary: varbinary({ length: 10 }).notNull(), year: year().notNull(), longtext1: longtext().notNull(), longtext2: longtext({ enum: ['a', 'b', 'c'] }).notNull(), mediumtext1: mediumtext().notNull(), mediumtext2: mediumtext({ enum: ['a', 'b', 'c'] }).notNull(), tinytext1: tinytext().notNull(), tinytext2: tinytext({ enum: ['a', 'b', 'c'] }).notNull(), })); const result = createSelectSchema(table); const expected = t.Object({ bigint1: t.Integer({ minimum: Number.MIN_SAFE_INTEGER, maximum: Number.MAX_SAFE_INTEGER }), bigint2: t.BigInt({ minimum: CONSTANTS.INT64_MIN, maximum: CONSTANTS.INT64_MAX }), bigint3: t.Integer({ minimum: 0, maximum: Number.MAX_SAFE_INTEGER }), bigint4: t.BigInt({ minimum: 0n, maximum: CONSTANTS.INT64_UNSIGNED_MAX }), binary: t.String(), boolean: t.Boolean(), char1: t.String({ minLength: 10, maxLength: 10 }), char2: t.Enum({ a: 'a', b: 'b', c: 'c' }), date1: t.Date(), date2: t.String(), datetime1: t.Date(), datetime2: t.String(), decimal1: t.String(), decimal2: t.String(), double1: t.Number({ minimum: CONSTANTS.INT48_MIN, maximum: CONSTANTS.INT48_MAX }), double2: t.Number({ minimum: 0, maximum: CONSTANTS.INT48_UNSIGNED_MAX }), float1: t.Number({ minimum: CONSTANTS.INT24_MIN, maximum: CONSTANTS.INT24_MAX }), float2: t.Number({ minimum: 0, maximum: CONSTANTS.INT24_UNSIGNED_MAX }), int1: t.Integer({ minimum: CONSTANTS.INT32_MIN, maximum: CONSTANTS.INT32_MAX }), int2: t.Integer({ minimum: 0, maximum: CONSTANTS.INT32_UNSIGNED_MAX }), json: jsonSchema, mediumint1: t.Integer({ minimum: CONSTANTS.INT24_MIN, maximum: CONSTANTS.INT24_MAX }), mediumint2: t.Integer({ minimum: 0, maximum: CONSTANTS.INT24_UNSIGNED_MAX }), enum: t.Enum({ a: 'a', b: 'b', c: 'c' }), real: t.Number({ minimum: CONSTANTS.INT48_MIN, maximum: CONSTANTS.INT48_MAX }), serial: t.Integer({ minimum: 0, maximum: Number.MAX_SAFE_INTEGER }), smallint1: t.Integer({ minimum: CONSTANTS.INT16_MIN, maximum: CONSTANTS.INT16_MAX }), smallint2: t.Integer({ minimum: 0, maximum: CONSTANTS.INT16_UNSIGNED_MAX }), text1: t.String({ maxLength: CONSTANTS.INT16_UNSIGNED_MAX }), text2: t.Enum({ a: 'a', b: 'b', c: 'c' }), time: t.String(), timestamp1: t.Date(), timestamp2: t.String(), tinyint1: t.Integer({ minimum: CONSTANTS.INT8_MIN, maximum: CONSTANTS.INT8_MAX }), tinyint2: t.Integer({ minimum: 0, maximum: CONSTANTS.INT8_UNSIGNED_MAX }), varchar1: t.String({ maxLength: 10 }), varchar2: t.Enum({ a: 'a', b: 'b', c: 'c' }), varbinary: t.String(), year: t.Integer({ minimum: 1901, maximum: 2155 }), longtext1: t.String({ maxLength: CONSTANTS.INT32_UNSIGNED_MAX }), longtext2: t.Enum({ a: 'a', b: 'b', c: 'c' }), mediumtext1: t.String({ maxLength: CONSTANTS.INT24_UNSIGNED_MAX }), mediumtext2: t.Enum({ a: 'a', b: 'b', c: 'c' }), tinytext1: t.String({ maxLength: CONSTANTS.INT8_UNSIGNED_MAX }), tinytext2: t.Enum({ a: 'a', b: 'b', c: 'c' }), }); expectSchemaShape(tc, expected).from(result); Expect>(); }); /* Infinitely recursive type */ { const TopLevelCondition: GenericSchema = t.Any() as any; const table = mysqlTable('test', { json: json().$type(), }); const result = createSelectSchema(table); const expected = t.Object({ json: t.Union([TopLevelCondition, t.Null()]), }); Expect, Static>>(); } /* Disallow unknown keys in table refinement - select */ { const table = mysqlTable('test', { id: int() }); // @ts-expect-error createSelectSchema(table, { unknown: t.String() }); } /* Disallow unknown keys in table refinement - insert */ { const table = mysqlTable('test', { id: int() }); // @ts-expect-error createInsertSchema(table, { unknown: t.String() }); } /* Disallow unknown keys in table refinement - update */ { const table = mysqlTable('test', { id: int() }); // @ts-expect-error createUpdateSchema(table, { unknown: t.String() }); } /* Disallow unknown keys in view qb - select */ { const table = mysqlTable('test', { id: int() }); const view = mysqlView('test').as((qb) => qb.select().from(table)); const nestedSelect = mysqlView('test').as((qb) => qb.select({ table }).from(table)); // @ts-expect-error createSelectSchema(view, { unknown: t.String() }); // @ts-expect-error createSelectSchema(nestedSelect, { table: { unknown: t.String() } }); } /* Disallow unknown keys in view columns - select */ { const view = mysqlView('test', { id: int() }).as(sql``); // @ts-expect-error createSelectSchema(view, { unknown: t.String() }); } ================================================ FILE: drizzle-typebox/tests/pg.test.ts ================================================ import { type Static, Type as t } from '@sinclair/typebox'; import { type Equal, sql } from 'drizzle-orm'; import { customType, integer, json, jsonb, pgEnum, pgMaterializedView, pgSchema, pgTable, pgView, serial, text, } from 'drizzle-orm/pg-core'; import type { TopLevelCondition } from 'json-rules-engine'; import { test } from 'vitest'; import { jsonSchema } from '~/column.ts'; import { CONSTANTS } from '~/constants.ts'; import { createInsertSchema, createSelectSchema, createUpdateSchema, type GenericSchema } from '../src'; import { Expect, expectEnumValues, expectSchemaShape } from './utils.ts'; const integerSchema = t.Integer({ minimum: CONSTANTS.INT32_MIN, maximum: CONSTANTS.INT32_MAX }); const textSchema = t.String(); test('table - select', (tc) => { const table = pgTable('test', { id: serial().primaryKey(), name: text().notNull(), }); const result = createSelectSchema(table); const expected = t.Object({ id: integerSchema, name: textSchema }); expectSchemaShape(tc, expected).from(result); Expect>(); }); test('table in schema - select', (tc) => { const schema = pgSchema('test'); const table = schema.table('test', { id: serial().primaryKey(), name: text().notNull(), }); const result = createSelectSchema(table); const expected = t.Object({ id: integerSchema, name: textSchema }); expectSchemaShape(tc, expected).from(result); Expect>(); }); test('table - insert', (tc) => { const table = pgTable('test', { id: integer().generatedAlwaysAsIdentity().primaryKey(), name: text().notNull(), age: integer(), }); const result = createInsertSchema(table); const expected = t.Object({ name: textSchema, age: t.Optional(t.Union([integerSchema, t.Null()])) }); expectSchemaShape(tc, expected).from(result); Expect>(); }); test('table - update', (tc) => { const table = pgTable('test', { id: integer().generatedAlwaysAsIdentity().primaryKey(), name: text().notNull(), age: integer(), }); const result = createUpdateSchema(table); const expected = t.Object({ name: t.Optional(textSchema), age: t.Optional(t.Union([integerSchema, t.Null()])), }); expectSchemaShape(tc, expected).from(result); Expect>(); }); test('view qb - select', (tc) => { const table = pgTable('test', { id: serial().primaryKey(), name: text().notNull(), }); const view = pgView('test').as((qb) => qb.select({ id: table.id, age: sql``.as('age') }).from(table)); const result = createSelectSchema(view); const expected = t.Object({ id: integerSchema, age: t.Any() }); expectSchemaShape(tc, expected).from(result); Expect>(); }); test('view columns - select', (tc) => { const view = pgView('test', { id: serial().primaryKey(), name: text().notNull(), }).as(sql``); const result = createSelectSchema(view); const expected = t.Object({ id: integerSchema, name: textSchema }); expectSchemaShape(tc, expected).from(result); Expect>(); }); test('materialized view qb - select', (tc) => { const table = pgTable('test', { id: serial().primaryKey(), name: text().notNull(), }); const view = pgMaterializedView('test').as((qb) => qb.select({ id: table.id, age: sql``.as('age') }).from(table)); const result = createSelectSchema(view); const expected = t.Object({ id: integerSchema, age: t.Any() }); expectSchemaShape(tc, expected).from(result); Expect>(); }); test('materialized view columns - select', (tc) => { const view = pgView('test', { id: serial().primaryKey(), name: text().notNull(), }).as(sql``); const result = createSelectSchema(view); const expected = t.Object({ id: integerSchema, name: textSchema }); expectSchemaShape(tc, expected).from(result); Expect>(); }); test('view with nested fields - select', (tc) => { const table = pgTable('test', { id: serial().primaryKey(), name: text().notNull(), }); const view = pgMaterializedView('test').as((qb) => qb.select({ id: table.id, nested: { name: table.name, age: sql``.as('age'), }, table, }).from(table) ); const result = createSelectSchema(view); const expected = t.Object({ id: integerSchema, nested: t.Object({ name: textSchema, age: t.Any() }), table: t.Object({ id: integerSchema, name: textSchema }), }); expectSchemaShape(tc, expected).from(result); Expect>(); }); test('enum - select', (tc) => { const enum_ = pgEnum('test', ['a', 'b', 'c']); const result = createSelectSchema(enum_); const expected = t.Enum({ a: 'a', b: 'b', c: 'c' }); expectEnumValues(tc, expected).from(result); Expect>(); }); test('nullability - select', (tc) => { const table = pgTable('test', { c1: integer(), c2: integer().notNull(), c3: integer().default(1), c4: integer().notNull().default(1), }); const result = createSelectSchema(table); const expected = t.Object({ c1: t.Union([integerSchema, t.Null()]), c2: integerSchema, c3: t.Union([integerSchema, t.Null()]), c4: integerSchema, }); expectSchemaShape(tc, expected).from(result); Expect>(); }); test('nullability - insert', (tc) => { const table = pgTable('test', { c1: integer(), c2: integer().notNull(), c3: integer().default(1), c4: integer().notNull().default(1), c5: integer().generatedAlwaysAs(1), c6: integer().generatedAlwaysAsIdentity(), c7: integer().generatedByDefaultAsIdentity(), }); const result = createInsertSchema(table); const expected = t.Object({ c1: t.Optional(t.Union([integerSchema, t.Null()])), c2: integerSchema, c3: t.Optional(t.Union([integerSchema, t.Null()])), c4: t.Optional(integerSchema), c7: t.Optional(integerSchema), }); expectSchemaShape(tc, expected).from(result); }); test('nullability - update', (tc) => { const table = pgTable('test', { c1: integer(), c2: integer().notNull(), c3: integer().default(1), c4: integer().notNull().default(1), c5: integer().generatedAlwaysAs(1), c6: integer().generatedAlwaysAsIdentity(), c7: integer().generatedByDefaultAsIdentity(), }); const result = createUpdateSchema(table); const expected = t.Object({ c1: t.Optional(t.Union([integerSchema, t.Null()])), c2: t.Optional(integerSchema), c3: t.Optional(t.Union([integerSchema, t.Null()])), c4: t.Optional(integerSchema), c7: t.Optional(integerSchema), }); expectSchemaShape(tc, expected).from(result); Expect>(); }); test('refine table - select', (tc) => { const table = pgTable('test', { c1: integer(), c2: integer().notNull(), c3: integer().notNull(), }); const result = createSelectSchema(table, { c2: (schema) => t.Integer({ minimum: schema.minimum, maximum: 1000 }), c3: t.Integer({ minimum: 1, maximum: 10 }), }); const expected = t.Object({ c1: t.Union([integerSchema, t.Null()]), c2: t.Integer({ minimum: CONSTANTS.INT32_MIN, maximum: 1000 }), c3: t.Integer({ minimum: 1, maximum: 10 }), }); expectSchemaShape(tc, expected).from(result); Expect>(); }); test('refine table - select with custom data type', (tc) => { const customText = customType({ dataType: () => 'text' }); const table = pgTable('test', { c1: integer(), c2: integer().notNull(), c3: integer().notNull(), c4: customText(), }); const customTextSchema = t.String({ minLength: 1, maxLength: 100 }); const result = createSelectSchema(table, { c2: (schema) => t.Integer({ minimum: schema.minimum, maximum: 1000 }), c3: t.Integer({ minimum: 1, maximum: 10 }), c4: customTextSchema, }); const expected = t.Object({ c1: t.Union([integerSchema, t.Null()]), c2: t.Integer({ minimum: CONSTANTS.INT32_MIN, maximum: 1000 }), c3: t.Integer({ minimum: 1, maximum: 10 }), c4: customTextSchema, }); expectSchemaShape(tc, expected).from(result); Expect>(); }); test('refine table - insert', (tc) => { const table = pgTable('test', { c1: integer(), c2: integer().notNull(), c3: integer().notNull(), c4: integer().generatedAlwaysAs(1), }); const result = createInsertSchema(table, { c2: (schema) => t.Integer({ minimum: schema.minimum, maximum: 1000 }), c3: t.Integer({ minimum: 1, maximum: 10 }), }); const expected = t.Object({ c1: t.Optional(t.Union([integerSchema, t.Null()])), c2: t.Integer({ minimum: CONSTANTS.INT32_MIN, maximum: 1000 }), c3: t.Integer({ minimum: 1, maximum: 10 }), }); expectSchemaShape(tc, expected).from(result); Expect>(); }); test('refine table - update', (tc) => { const table = pgTable('test', { c1: integer(), c2: integer().notNull(), c3: integer().notNull(), c4: integer().generatedAlwaysAs(1), }); const result = createUpdateSchema(table, { c2: (schema) => t.Integer({ minimum: schema.minimum, maximum: 1000 }), c3: t.Integer({ minimum: 1, maximum: 10 }), }); const expected = t.Object({ c1: t.Optional(t.Union([integerSchema, t.Null()])), c2: t.Optional(t.Integer({ minimum: CONSTANTS.INT32_MIN, maximum: 1000 })), c3: t.Integer({ minimum: 1, maximum: 10 }), }); expectSchemaShape(tc, expected).from(result); Expect>(); }); test('refine view - select', (tc) => { const table = pgTable('test', { c1: integer(), c2: integer(), c3: integer(), c4: integer(), c5: integer(), c6: integer(), }); const view = pgView('test').as((qb) => qb.select({ c1: table.c1, c2: table.c2, c3: table.c3, nested: { c4: table.c4, c5: table.c5, c6: table.c6, }, table, }).from(table) ); const result = createSelectSchema(view, { c2: (schema) => t.Integer({ minimum: schema.minimum, maximum: 1000 }), c3: t.Integer({ minimum: 1, maximum: 10 }), nested: { c5: (schema) => t.Integer({ minimum: schema.minimum, maximum: 1000 }), c6: t.Integer({ minimum: 1, maximum: 10 }), }, table: { c2: (schema) => t.Integer({ minimum: schema.minimum, maximum: 1000 }), c3: t.Integer({ minimum: 1, maximum: 10 }), }, }); const expected = t.Object({ c1: t.Union([integerSchema, t.Null()]), c2: t.Union([t.Integer({ minimum: CONSTANTS.INT32_MIN, maximum: 1000 }), t.Null()]), c3: t.Integer({ minimum: 1, maximum: 10 }), nested: t.Object({ c4: t.Union([integerSchema, t.Null()]), c5: t.Union([t.Integer({ minimum: CONSTANTS.INT32_MIN, maximum: 1000 }), t.Null()]), c6: t.Integer({ minimum: 1, maximum: 10 }), }), table: t.Object({ c1: t.Union([integerSchema, t.Null()]), c2: t.Union([t.Integer({ minimum: CONSTANTS.INT32_MIN, maximum: 1000 }), t.Null()]), c3: t.Integer({ minimum: 1, maximum: 10 }), c4: t.Union([integerSchema, t.Null()]), c5: t.Union([integerSchema, t.Null()]), c6: t.Union([integerSchema, t.Null()]), }), }); expectSchemaShape(tc, expected).from(result); Expect>(); }); test('all data types', (tc) => { const table = pgTable('test', ({ bigint, bigserial, bit, boolean, date, char, cidr, doublePrecision, geometry, halfvec, inet, integer, interval, json, jsonb, line, macaddr, macaddr8, numeric, point, real, serial, smallint, smallserial, text, sparsevec, time, timestamp, uuid, varchar, vector, }) => ({ bigint1: bigint({ mode: 'number' }).notNull(), bigint2: bigint({ mode: 'bigint' }).notNull(), bigserial1: bigserial({ mode: 'number' }).notNull(), bigserial2: bigserial({ mode: 'bigint' }).notNull(), bit: bit({ dimensions: 5 }).notNull(), boolean: boolean().notNull(), date1: date({ mode: 'date' }).notNull(), date2: date({ mode: 'string' }).notNull(), char1: char({ length: 10 }).notNull(), char2: char({ length: 1, enum: ['a', 'b', 'c'] }).notNull(), cidr: cidr().notNull(), doublePrecision: doublePrecision().notNull(), geometry1: geometry({ type: 'point', mode: 'tuple' }).notNull(), geometry2: geometry({ type: 'point', mode: 'xy' }).notNull(), halfvec: halfvec({ dimensions: 3 }).notNull(), inet: inet().notNull(), integer: integer().notNull(), interval: interval().notNull(), json: json().notNull(), jsonb: jsonb().notNull(), line1: line({ mode: 'abc' }).notNull(), line2: line({ mode: 'tuple' }).notNull(), macaddr: macaddr().notNull(), macaddr8: macaddr8().notNull(), numeric: numeric().notNull(), point1: point({ mode: 'xy' }).notNull(), point2: point({ mode: 'tuple' }).notNull(), real: real().notNull(), serial: serial().notNull(), smallint: smallint().notNull(), smallserial: smallserial().notNull(), text1: text().notNull(), text2: text({ enum: ['a', 'b', 'c'] }).notNull(), sparsevec: sparsevec({ dimensions: 3 }).notNull(), time: time().notNull(), timestamp1: timestamp({ mode: 'date' }).notNull(), timestamp2: timestamp({ mode: 'string' }).notNull(), uuid: uuid().notNull(), varchar1: varchar({ length: 10 }).notNull(), varchar2: varchar({ length: 1, enum: ['a', 'b', 'c'] }).notNull(), vector: vector({ dimensions: 3 }).notNull(), array1: integer().array().notNull(), array2: integer().array().array(2).notNull(), array3: varchar({ length: 10 }).array().array(2).notNull(), })); const result = createSelectSchema(table); const expected = t.Object({ bigint1: t.Integer({ minimum: Number.MIN_SAFE_INTEGER, maximum: Number.MAX_SAFE_INTEGER }), bigint2: t.BigInt({ minimum: CONSTANTS.INT64_MIN, maximum: CONSTANTS.INT64_MAX }), bigserial1: t.Integer({ minimum: Number.MIN_SAFE_INTEGER, maximum: Number.MAX_SAFE_INTEGER }), bigserial2: t.BigInt({ minimum: CONSTANTS.INT64_MIN, maximum: CONSTANTS.INT64_MAX }), bit: t.RegExp(/^[01]+$/, { maxLength: 5 }), boolean: t.Boolean(), date1: t.Date(), date2: t.String(), char1: t.String({ minLength: 10, maxLength: 10 }), char2: t.Enum({ a: 'a', b: 'b', c: 'c' }), cidr: t.String(), doublePrecision: t.Number({ minimum: CONSTANTS.INT48_MIN, maximum: CONSTANTS.INT48_MAX }), geometry1: t.Tuple([t.Number(), t.Number()]), geometry2: t.Object({ x: t.Number(), y: t.Number() }), halfvec: t.Array(t.Number(), { minItems: 3, maxItems: 3 }), inet: t.String(), integer: t.Integer({ minimum: CONSTANTS.INT32_MIN, maximum: CONSTANTS.INT32_MAX }), interval: t.String(), json: jsonSchema, jsonb: jsonSchema, line1: t.Object({ a: t.Number(), b: t.Number(), c: t.Number() }), line2: t.Tuple([t.Number(), t.Number(), t.Number()]), macaddr: t.String(), macaddr8: t.String(), numeric: t.String(), point1: t.Object({ x: t.Number(), y: t.Number() }), point2: t.Tuple([t.Number(), t.Number()]), real: t.Number({ minimum: CONSTANTS.INT24_MIN, maximum: CONSTANTS.INT24_MAX }), serial: t.Integer({ minimum: CONSTANTS.INT32_MIN, maximum: CONSTANTS.INT32_MAX }), smallint: t.Integer({ minimum: CONSTANTS.INT16_MIN, maximum: CONSTANTS.INT16_MAX }), smallserial: t.Integer({ minimum: CONSTANTS.INT16_MIN, maximum: CONSTANTS.INT16_MAX }), text1: t.String(), text2: t.Enum({ a: 'a', b: 'b', c: 'c' }), sparsevec: t.String(), time: t.String(), timestamp1: t.Date(), timestamp2: t.String(), uuid: t.String({ format: 'uuid' }), varchar1: t.String({ maxLength: 10 }), varchar2: t.Enum({ a: 'a', b: 'b', c: 'c' }), vector: t.Array(t.Number(), { minItems: 3, maxItems: 3 }), array1: t.Array(integerSchema), array2: t.Array(t.Array(integerSchema), { minItems: 2, maxItems: 2 }), array3: t.Array(t.Array(t.String({ maxLength: 10 })), { minItems: 2, maxItems: 2 }), }); expectSchemaShape(tc, expected).from(result); Expect>(); }); /* Infinitely recursive type */ { const TopLevelCondition: GenericSchema = t.Any() as any; const table = pgTable('test', { json: json().$type().notNull(), jsonb: jsonb().$type(), }); const result = createSelectSchema(table); const expected = t.Object({ json: TopLevelCondition, jsonb: t.Union([TopLevelCondition, t.Null()]), }); Expect, Static>>(); } /* Disallow unknown keys in table refinement - select */ { const table = pgTable('test', { id: integer() }); // @ts-expect-error createSelectSchema(table, { unknown: t.String() }); } /* Disallow unknown keys in table refinement - insert */ { const table = pgTable('test', { id: integer() }); // @ts-expect-error createInsertSchema(table, { unknown: t.String() }); } /* Disallow unknown keys in table refinement - update */ { const table = pgTable('test', { id: integer() }); // @ts-expect-error createUpdateSchema(table, { unknown: t.String() }); } /* Disallow unknown keys in view qb - select */ { const table = pgTable('test', { id: integer() }); const view = pgView('test').as((qb) => qb.select().from(table)); const mView = pgMaterializedView('test').as((qb) => qb.select().from(table)); const nestedSelect = pgView('test').as((qb) => qb.select({ table }).from(table)); // @ts-expect-error createSelectSchema(view, { unknown: t.String() }); // @ts-expect-error createSelectSchema(mView, { unknown: t.String() }); // @ts-expect-error createSelectSchema(nestedSelect, { table: { unknown: t.String() } }); } /* Disallow unknown keys in view columns - select */ { const view = pgView('test', { id: integer() }).as(sql``); const mView = pgView('test', { id: integer() }).as(sql``); // @ts-expect-error createSelectSchema(view, { unknown: t.String() }); // @ts-expect-error createSelectSchema(mView, { unknown: t.String() }); } ================================================ FILE: drizzle-typebox/tests/singlestore.test.ts ================================================ import { type Static, Type as t } from '@sinclair/typebox'; import { type Equal } from 'drizzle-orm'; import { customType, int, json, serial, singlestoreSchema, singlestoreTable, text } from 'drizzle-orm/singlestore-core'; import type { TopLevelCondition } from 'json-rules-engine'; import { test } from 'vitest'; import { jsonSchema } from '~/column.ts'; import { CONSTANTS } from '~/constants.ts'; import { createInsertSchema, createSelectSchema, createUpdateSchema, type GenericSchema } from '../src'; import { Expect, expectSchemaShape } from './utils.ts'; const intSchema = t.Integer({ minimum: CONSTANTS.INT32_MIN, maximum: CONSTANTS.INT32_MAX, }); const serialNumberModeSchema = t.Integer({ minimum: 0, maximum: Number.MAX_SAFE_INTEGER, }); const textSchema = t.String({ maxLength: CONSTANTS.INT16_UNSIGNED_MAX }); test('table - select', (tc) => { const table = singlestoreTable('test', { id: serial().primaryKey(), name: text().notNull(), }); const result = createSelectSchema(table); const expected = t.Object({ id: serialNumberModeSchema, name: textSchema }); expectSchemaShape(tc, expected).from(result); Expect>(); }); test('table in schema - select', (tc) => { const schema = singlestoreSchema('test'); const table = schema.table('test', { id: serial().primaryKey(), name: text().notNull(), }); const result = createSelectSchema(table); const expected = t.Object({ id: serialNumberModeSchema, name: textSchema }); expectSchemaShape(tc, expected).from(result); Expect>(); }); test('table - insert', (tc) => { const table = singlestoreTable('test', { id: serial().primaryKey(), name: text().notNull(), age: int(), }); const result = createInsertSchema(table); const expected = t.Object({ id: t.Optional(serialNumberModeSchema), name: textSchema, age: t.Optional(t.Union([intSchema, t.Null()])), }); expectSchemaShape(tc, expected).from(result); Expect>(); }); test('table - update', (tc) => { const table = singlestoreTable('test', { id: serial().primaryKey(), name: text().notNull(), age: int(), }); const result = createUpdateSchema(table); const expected = t.Object({ id: t.Optional(serialNumberModeSchema), name: t.Optional(textSchema), age: t.Optional(t.Union([intSchema, t.Null()])), }); expectSchemaShape(tc, expected).from(result); Expect>(); }); // TODO: SingleStore doesn't support views yet. Add these tests when they're added // test('view qb - select', (tc) => { // const table = singlestoreTable('test', { // id: serial().primaryKey(), // name: text().notNull(), // }); // const view = mysqlView('test').as((qb) => qb.select({ id: table.id, age: sql``.as('age') }).from(table)); // const result = createSelectSchema(view); // const expected = t.Object({ id: serialNumberModeSchema, age: t.Any() }); // expectSchemaShape(tc, expected).from(result); // Expect>(); // }); // test('view columns - select', (tc) => { // const view = mysqlView('test', { // id: serial().primaryKey(), // name: text().notNull(), // }).as(sql``); // const result = createSelectSchema(view); // const expected = t.Object({ id: serialNumberModeSchema, name: textSchema }); // expectSchemaShape(tc, expected).from(result); // Expect>(); // }); // test('view with nested fields - select', (tc) => { // const table = singlestoreTable('test', { // id: serial().primaryKey(), // name: text().notNull(), // }); // const view = mysqlView('test').as((qb) => // qb.select({ // id: table.id, // nested: { // name: table.name, // age: sql``.as('age'), // }, // table, // }).from(table) // ); // const result = createSelectSchema(view); // const expected = t.Object({ // id: serialNumberModeSchema, // nested: t.Object({ name: textSchema, age: t.Any() }), // table: t.Object({ id: serialNumberModeSchema, name: textSchema }), // }); // expectSchemaShape(tc, expected).from(result); // Expect>(); // }); test('nullability - select', (tc) => { const table = singlestoreTable('test', { c1: int(), c2: int().notNull(), c3: int().default(1), c4: int().notNull().default(1), }); const result = createSelectSchema(table); const expected = t.Object({ c1: t.Union([intSchema, t.Null()]), c2: intSchema, c3: t.Union([intSchema, t.Null()]), c4: intSchema, }); expectSchemaShape(tc, expected).from(result); Expect>(); }); test('nullability - insert', (tc) => { const table = singlestoreTable('test', { c1: int(), c2: int().notNull(), c3: int().default(1), c4: int().notNull().default(1), c5: int().generatedAlwaysAs(1), }); const result = createInsertSchema(table); const expected = t.Object({ c1: t.Optional(t.Union([intSchema, t.Null()])), c2: intSchema, c3: t.Optional(t.Union([intSchema, t.Null()])), c4: t.Optional(intSchema), }); expectSchemaShape(tc, expected).from(result); Expect>(); }); test('nullability - update', (tc) => { const table = singlestoreTable('test', { c1: int(), c2: int().notNull(), c3: int().default(1), c4: int().notNull().default(1), c5: int().generatedAlwaysAs(1), }); const result = createUpdateSchema(table); const expected = t.Object({ c1: t.Optional(t.Union([intSchema, t.Null()])), c2: t.Optional(intSchema), c3: t.Optional(t.Union([intSchema, t.Null()])), c4: t.Optional(intSchema), }); expectSchemaShape(tc, expected).from(result); Expect>(); }); test('refine table - select', (tc) => { const table = singlestoreTable('test', { c1: int(), c2: int().notNull(), c3: int().notNull(), }); const result = createSelectSchema(table, { c2: (schema) => t.Integer({ minimum: schema.minimum, maximum: 1000 }), c3: t.Integer({ minimum: 1, maximum: 10 }), }); const expected = t.Object({ c1: t.Union([intSchema, t.Null()]), c2: t.Integer({ minimum: CONSTANTS.INT32_MIN, maximum: 1000 }), c3: t.Integer({ minimum: 1, maximum: 10 }), }); expectSchemaShape(tc, expected).from(result); Expect>(); }); test('refine table - select with custom data type', (tc) => { const customText = customType({ dataType: () => 'text' }); const table = singlestoreTable('test', { c1: int(), c2: int().notNull(), c3: int().notNull(), c4: customText(), }); const customTextSchema = t.String({ minLength: 1, maxLength: 100 }); const result = createSelectSchema(table, { c2: (schema) => t.Integer({ minimum: schema.minimum, maximum: 1000 }), c3: t.Integer({ minimum: 1, maximum: 10 }), c4: customTextSchema, }); const expected = t.Object({ c1: t.Union([intSchema, t.Null()]), c2: t.Integer({ minimum: CONSTANTS.INT32_MIN, maximum: 1000 }), c3: t.Integer({ minimum: 1, maximum: 10 }), c4: customTextSchema, }); expectSchemaShape(tc, expected).from(result); Expect>(); }); test('refine table - insert', (tc) => { const table = singlestoreTable('test', { c1: int(), c2: int().notNull(), c3: int().notNull(), c4: int().generatedAlwaysAs(1), }); const result = createInsertSchema(table, { c2: (schema) => t.Integer({ minimum: schema.minimum, maximum: 1000 }), c3: t.Integer({ minimum: 1, maximum: 10 }), }); const expected = t.Object({ c1: t.Optional(t.Union([intSchema, t.Null()])), c2: t.Integer({ minimum: CONSTANTS.INT32_MIN, maximum: 1000 }), c3: t.Integer({ minimum: 1, maximum: 10 }), }); expectSchemaShape(tc, expected).from(result); Expect>(); }); test('refine table - update', (tc) => { const table = singlestoreTable('test', { c1: int(), c2: int().notNull(), c3: int().notNull(), c4: int().generatedAlwaysAs(1), }); const result = createUpdateSchema(table, { c2: (schema) => t.Integer({ minimum: schema.minimum, maximum: 1000 }), c3: t.Integer({ minimum: 1, maximum: 10 }), }); const expected = t.Object({ c1: t.Optional(t.Union([intSchema, t.Null()])), c2: t.Optional(t.Integer({ minimum: CONSTANTS.INT32_MIN, maximum: 1000 })), c3: t.Integer({ minimum: 1, maximum: 10 }), }); expectSchemaShape(tc, expected).from(result); Expect>(); }); // test('refine view - select', (tc) => { // const table = singlestoreTable('test', { // c1: int(), // c2: int(), // c3: int(), // c4: int(), // c5: int(), // c6: int(), // }); // const view = mysqlView('test').as((qb) => // qb.select({ // c1: table.c1, // c2: table.c2, // c3: table.c3, // nested: { // c4: table.c4, // c5: table.c5, // c6: table.c6, // }, // table, // }).from(table) // ); // const result = createSelectSchema(view, { // c2: (schema) => t.Integer({ minimum: schema.minimum, maximum: 1000 }), // c3: t.Integer({ minimum: 1, maximum: 10 }), // nested: { // c5: (schema) => t.Integer({ minimum: schema.minimum, maximum: 1000 }), // c6: t.Integer({ minimum: 1, maximum: 10 }), // }, // table: { // c2: (schema) => t.Integer({ minimum: schema.minimum, maximum: 1000 }), // c3: t.Integer({ minimum: 1, maximum: 10 }), // }, // }); // const expected = t.Object({ // c1: t.Union([intSchema, t.Null()]), // c2: t.Union([t.Integer({ minimum: CONSTANTS.INT32_MIN, maximum: 1000 }), t.Null()]), // c3: t.Integer({ minimum: 1, maximum: 10 }), // nested: t.Object({ // c4: t.Union([intSchema, t.Null()]), // c5: t.Union([t.Integer({ minimum: CONSTANTS.INT32_MIN, maximum: 1000 }), t.Null()]), // c6: t.Integer({ minimum: 1, maximum: 10 }), // }), // table: t.Object({ // c1: t.Union([intSchema, t.Null()]), // c2: t.Union([t.Integer({ minimum: CONSTANTS.INT32_MIN, maximum: 1000 }), t.Null()]), // c3: t.Integer({ minimum: 1, maximum: 10 }), // c4: t.Union([intSchema, t.Null()]), // c5: t.Union([intSchema, t.Null()]), // c6: t.Union([intSchema, t.Null()]), // }), // }); // expectSchemaShape(tc, expected).from(result); // Expect>(); // }); test('all data types', (tc) => { const table = singlestoreTable('test', ({ bigint, binary, boolean, char, date, datetime, decimal, double, float, int, json, mediumint, singlestoreEnum, real, serial, smallint, text, time, timestamp, tinyint, varchar, varbinary, year, longtext, mediumtext, tinytext, }) => ({ bigint1: bigint({ mode: 'number' }).notNull(), bigint2: bigint({ mode: 'bigint' }).notNull(), bigint3: bigint({ unsigned: true, mode: 'number' }).notNull(), bigint4: bigint({ unsigned: true, mode: 'bigint' }).notNull(), binary: binary({ length: 10 }).notNull(), boolean: boolean().notNull(), char1: char({ length: 10 }).notNull(), char2: char({ length: 1, enum: ['a', 'b', 'c'] }).notNull(), date1: date({ mode: 'date' }).notNull(), date2: date({ mode: 'string' }).notNull(), datetime1: datetime({ mode: 'date' }).notNull(), datetime2: datetime({ mode: 'string' }).notNull(), decimal1: decimal().notNull(), decimal2: decimal({ unsigned: true }).notNull(), double1: double().notNull(), double2: double({ unsigned: true }).notNull(), float1: float().notNull(), float2: float({ unsigned: true }).notNull(), int1: int().notNull(), int2: int({ unsigned: true }).notNull(), json: json().notNull(), mediumint1: mediumint().notNull(), mediumint2: mediumint({ unsigned: true }).notNull(), enum: singlestoreEnum('enum', ['a', 'b', 'c']).notNull(), real: real().notNull(), serial: serial().notNull(), smallint1: smallint().notNull(), smallint2: smallint({ unsigned: true }).notNull(), text1: text().notNull(), text2: text({ enum: ['a', 'b', 'c'] }).notNull(), time: time().notNull(), timestamp1: timestamp({ mode: 'date' }).notNull(), timestamp2: timestamp({ mode: 'string' }).notNull(), tinyint1: tinyint().notNull(), tinyint2: tinyint({ unsigned: true }).notNull(), varchar1: varchar({ length: 10 }).notNull(), varchar2: varchar({ length: 1, enum: ['a', 'b', 'c'] }).notNull(), varbinary: varbinary({ length: 10 }).notNull(), year: year().notNull(), longtext1: longtext().notNull(), longtext2: longtext({ enum: ['a', 'b', 'c'] }).notNull(), mediumtext1: mediumtext().notNull(), mediumtext2: mediumtext({ enum: ['a', 'b', 'c'] }).notNull(), tinytext1: tinytext().notNull(), tinytext2: tinytext({ enum: ['a', 'b', 'c'] }).notNull(), })); const result = createSelectSchema(table); const expected = t.Object({ bigint1: t.Integer({ minimum: Number.MIN_SAFE_INTEGER, maximum: Number.MAX_SAFE_INTEGER }), bigint2: t.BigInt({ minimum: CONSTANTS.INT64_MIN, maximum: CONSTANTS.INT64_MAX }), bigint3: t.Integer({ minimum: 0, maximum: Number.MAX_SAFE_INTEGER }), bigint4: t.BigInt({ minimum: 0n, maximum: CONSTANTS.INT64_UNSIGNED_MAX }), binary: t.String(), boolean: t.Boolean(), char1: t.String({ minLength: 10, maxLength: 10 }), char2: t.Enum({ a: 'a', b: 'b', c: 'c' }), date1: t.Date(), date2: t.String(), datetime1: t.Date(), datetime2: t.String(), decimal1: t.String(), decimal2: t.String(), double1: t.Number({ minimum: CONSTANTS.INT48_MIN, maximum: CONSTANTS.INT48_MAX }), double2: t.Number({ minimum: 0, maximum: CONSTANTS.INT48_UNSIGNED_MAX }), float1: t.Number({ minimum: CONSTANTS.INT24_MIN, maximum: CONSTANTS.INT24_MAX }), float2: t.Number({ minimum: 0, maximum: CONSTANTS.INT24_UNSIGNED_MAX }), int1: t.Integer({ minimum: CONSTANTS.INT32_MIN, maximum: CONSTANTS.INT32_MAX }), int2: t.Integer({ minimum: 0, maximum: CONSTANTS.INT32_UNSIGNED_MAX }), json: jsonSchema, mediumint1: t.Integer({ minimum: CONSTANTS.INT24_MIN, maximum: CONSTANTS.INT24_MAX }), mediumint2: t.Integer({ minimum: 0, maximum: CONSTANTS.INT24_UNSIGNED_MAX }), enum: t.Enum({ a: 'a', b: 'b', c: 'c' }), real: t.Number({ minimum: CONSTANTS.INT48_MIN, maximum: CONSTANTS.INT48_MAX }), serial: t.Integer({ minimum: 0, maximum: Number.MAX_SAFE_INTEGER }), smallint1: t.Integer({ minimum: CONSTANTS.INT16_MIN, maximum: CONSTANTS.INT16_MAX }), smallint2: t.Integer({ minimum: 0, maximum: CONSTANTS.INT16_UNSIGNED_MAX }), text1: t.String({ maxLength: CONSTANTS.INT16_UNSIGNED_MAX }), text2: t.Enum({ a: 'a', b: 'b', c: 'c' }), time: t.String(), timestamp1: t.Date(), timestamp2: t.String(), tinyint1: t.Integer({ minimum: CONSTANTS.INT8_MIN, maximum: CONSTANTS.INT8_MAX }), tinyint2: t.Integer({ minimum: 0, maximum: CONSTANTS.INT8_UNSIGNED_MAX }), varchar1: t.String({ maxLength: 10 }), varchar2: t.Enum({ a: 'a', b: 'b', c: 'c' }), varbinary: t.String(), year: t.Integer({ minimum: 1901, maximum: 2155 }), longtext1: t.String({ maxLength: CONSTANTS.INT32_UNSIGNED_MAX }), longtext2: t.Enum({ a: 'a', b: 'b', c: 'c' }), mediumtext1: t.String({ maxLength: CONSTANTS.INT24_UNSIGNED_MAX }), mediumtext2: t.Enum({ a: 'a', b: 'b', c: 'c' }), tinytext1: t.String({ maxLength: CONSTANTS.INT8_UNSIGNED_MAX }), tinytext2: t.Enum({ a: 'a', b: 'b', c: 'c' }), }); expectSchemaShape(tc, expected).from(result); Expect>(); }); /* Infinitely recursive type */ { const TopLevelCondition: GenericSchema = t.Any() as any; const table = singlestoreTable('test', { json: json().$type(), }); const result = createSelectSchema(table); const expected = t.Object({ json: t.Union([TopLevelCondition, t.Null()]), }); Expect, Static>>(); } /* Disallow unknown keys in table refinement - select */ { const table = singlestoreTable('test', { id: int() }); // @ts-expect-error createSelectSchema(table, { unknown: t.String() }); } /* Disallow unknown keys in table refinement - insert */ { const table = singlestoreTable('test', { id: int() }); // @ts-expect-error createInsertSchema(table, { unknown: t.String() }); } /* Disallow unknown keys in table refinement - update */ { const table = singlestoreTable('test', { id: int() }); // @ts-expect-error createUpdateSchema(table, { unknown: t.String() }); } // /* Disallow unknown keys in view qb - select */ { // const table = singlestoreTable('test', { id: int() }); // const view = mysqlView('test').as((qb) => qb.select().from(table)); // const nestedSelect = mysqlView('test').as((qb) => qb.select({ table }).from(table)); // // @ts-expect-error // createSelectSchema(view, { unknown: t.String() }); // // @ts-expect-error // createSelectSchema(nestedSelect, { table: { unknown: t.String() } }); // } // /* Disallow unknown keys in view columns - select */ { // const view = mysqlView('test', { id: int() }).as(sql``); // // @ts-expect-error // createSelectSchema(view, { unknown: t.String() }); // } ================================================ FILE: drizzle-typebox/tests/sqlite.test.ts ================================================ import { type Static, Type as t } from '@sinclair/typebox'; import { type Equal, sql } from 'drizzle-orm'; import { blob, customType, int, sqliteTable, sqliteView, text } from 'drizzle-orm/sqlite-core'; import type { TopLevelCondition } from 'json-rules-engine'; import { test } from 'vitest'; import { bufferSchema, jsonSchema } from '~/column.ts'; import { CONSTANTS } from '~/constants.ts'; import { createInsertSchema, createSelectSchema, createUpdateSchema, type GenericSchema } from '../src'; import { Expect, expectSchemaShape } from './utils.ts'; const intSchema = t.Integer({ minimum: Number.MIN_SAFE_INTEGER, maximum: Number.MAX_SAFE_INTEGER }); const textSchema = t.String(); test('table - select', (tc) => { const table = sqliteTable('test', { id: int().primaryKey({ autoIncrement: true }), name: text().notNull(), }); const result = createSelectSchema(table); const expected = t.Object({ id: intSchema, name: textSchema }); expectSchemaShape(tc, expected).from(result); Expect>(); }); test('table - insert', (tc) => { const table = sqliteTable('test', { id: int().primaryKey({ autoIncrement: true }), name: text().notNull(), age: int(), }); const result = createInsertSchema(table); const expected = t.Object({ id: t.Optional(intSchema), name: textSchema, age: t.Optional(t.Union([intSchema, t.Null()])), }); expectSchemaShape(tc, expected).from(result); Expect>(); }); test('table - update', (tc) => { const table = sqliteTable('test', { id: int().primaryKey({ autoIncrement: true }), name: text().notNull(), age: int(), }); const result = createUpdateSchema(table); const expected = t.Object({ id: t.Optional(intSchema), name: t.Optional(textSchema), age: t.Optional(t.Union([intSchema, t.Null()])), }); expectSchemaShape(tc, expected).from(result); Expect>(); }); test('view qb - select', (tc) => { const table = sqliteTable('test', { id: int().primaryKey({ autoIncrement: true }), name: text().notNull(), }); const view = sqliteView('test').as((qb) => qb.select({ id: table.id, age: sql``.as('age') }).from(table)); const result = createSelectSchema(view); const expected = t.Object({ id: intSchema, age: t.Any() }); expectSchemaShape(tc, expected).from(result); Expect>(); }); test('view columns - select', (tc) => { const view = sqliteView('test', { id: int().primaryKey({ autoIncrement: true }), name: text().notNull(), }).as(sql``); const result = createSelectSchema(view); const expected = t.Object({ id: intSchema, name: textSchema }); expectSchemaShape(tc, expected).from(result); Expect>(); }); test('view with nested fields - select', (tc) => { const table = sqliteTable('test', { id: int().primaryKey({ autoIncrement: true }), name: text().notNull(), }); const view = sqliteView('test').as((qb) => qb.select({ id: table.id, nested: { name: table.name, age: sql``.as('age'), }, table, }).from(table) ); const result = createSelectSchema(view); const expected = t.Object({ id: intSchema, nested: t.Object({ name: textSchema, age: t.Any() }), table: t.Object({ id: intSchema, name: textSchema }), }); expectSchemaShape(tc, expected).from(result); Expect>(); }); test('nullability - select', (tc) => { const table = sqliteTable('test', { c1: int(), c2: int().notNull(), c3: int().default(1), c4: int().notNull().default(1), }); const result = createSelectSchema(table); const expected = t.Object({ c1: t.Union([intSchema, t.Null()]), c2: intSchema, c3: t.Union([intSchema, t.Null()]), c4: intSchema, }); expectSchemaShape(tc, expected).from(result); Expect>(); }); test('nullability - insert', (tc) => { const table = sqliteTable('test', { c1: int(), c2: int().notNull(), c3: int().default(1), c4: int().notNull().default(1), c5: int().generatedAlwaysAs(1), }); const result = createInsertSchema(table); const expected = t.Object({ c1: t.Optional(t.Union([intSchema, t.Null()])), c2: intSchema, c3: t.Optional(t.Union([intSchema, t.Null()])), c4: t.Optional(intSchema), }); expectSchemaShape(tc, expected).from(result); Expect>(); }); test('nullability - update', (tc) => { const table = sqliteTable('test', { c1: int(), c2: int().notNull(), c3: int().default(1), c4: int().notNull().default(1), c5: int().generatedAlwaysAs(1), }); const result = createUpdateSchema(table); const expected = t.Object({ c1: t.Optional(t.Union([intSchema, t.Null()])), c2: t.Optional(intSchema), c3: t.Optional(t.Union([intSchema, t.Null()])), c4: t.Optional(intSchema), }); expectSchemaShape(tc, expected).from(result); Expect>(); }); test('refine table - select', (tc) => { const table = sqliteTable('test', { c1: int(), c2: int().notNull(), c3: int().notNull(), }); const result = createSelectSchema(table, { c2: (schema) => t.Integer({ minimum: schema.minimum, maximum: 1000 }), c3: t.Integer({ minimum: 1, maximum: 10 }), }); const expected = t.Object({ c1: t.Union([intSchema, t.Null()]), c2: t.Integer({ minimum: Number.MIN_SAFE_INTEGER, maximum: 1000 }), c3: t.Integer({ minimum: 1, maximum: 10 }), }); expectSchemaShape(tc, expected).from(result); Expect>(); }); test('refine table - select with custom data type', (tc) => { const customText = customType({ dataType: () => 'text' }); const table = sqliteTable('test', { c1: int(), c2: int().notNull(), c3: int().notNull(), c4: customText(), }); const customTextSchema = t.String({ minLength: 1, maxLength: 100 }); const result = createSelectSchema(table, { c2: (schema) => t.Integer({ minimum: schema.minimum, maximum: 1000 }), c3: t.Integer({ minimum: 1, maximum: 10 }), c4: customTextSchema, }); const expected = t.Object({ c1: t.Union([intSchema, t.Null()]), c2: t.Integer({ minimum: Number.MIN_SAFE_INTEGER, maximum: 1000 }), c3: t.Integer({ minimum: 1, maximum: 10 }), c4: customTextSchema, }); expectSchemaShape(tc, expected).from(result); Expect>(); }); test('refine table - insert', (tc) => { const table = sqliteTable('test', { c1: int(), c2: int().notNull(), c3: int().notNull(), c4: int().generatedAlwaysAs(1), }); const result = createInsertSchema(table, { c2: (schema) => t.Integer({ minimum: schema.minimum, maximum: 1000 }), c3: t.Integer({ minimum: 1, maximum: 10 }), }); const expected = t.Object({ c1: t.Optional(t.Union([intSchema, t.Null()])), c2: t.Integer({ minimum: Number.MIN_SAFE_INTEGER, maximum: 1000 }), c3: t.Integer({ minimum: 1, maximum: 10 }), }); expectSchemaShape(tc, expected).from(result); Expect>(); }); test('refine table - update', (tc) => { const table = sqliteTable('test', { c1: int(), c2: int().notNull(), c3: int().notNull(), c4: int().generatedAlwaysAs(1), }); const result = createUpdateSchema(table, { c2: (schema) => t.Integer({ minimum: schema.minimum, maximum: 1000 }), c3: t.Integer({ minimum: 1, maximum: 10 }), }); const expected = t.Object({ c1: t.Optional(t.Union([intSchema, t.Null()])), c2: t.Optional(t.Integer({ minimum: Number.MIN_SAFE_INTEGER, maximum: 1000 })), c3: t.Integer({ minimum: 1, maximum: 10 }), }); expectSchemaShape(tc, expected).from(result); Expect>(); }); test('refine view - select', (tc) => { const table = sqliteTable('test', { c1: int(), c2: int(), c3: int(), c4: int(), c5: int(), c6: int(), }); const view = sqliteView('test').as((qb) => qb.select({ c1: table.c1, c2: table.c2, c3: table.c3, nested: { c4: table.c4, c5: table.c5, c6: table.c6, }, table, }).from(table) ); const result = createSelectSchema(view, { c2: (schema) => t.Integer({ minimum: schema.minimum, maximum: 1000 }), c3: t.Integer({ minimum: 1, maximum: 10 }), nested: { c5: (schema) => t.Integer({ minimum: schema.minimum, maximum: 1000 }), c6: t.Integer({ minimum: 1, maximum: 10 }), }, table: { c2: (schema) => t.Integer({ minimum: schema.minimum, maximum: 1000 }), c3: t.Integer({ minimum: 1, maximum: 10 }), }, }); const expected = t.Object({ c1: t.Union([intSchema, t.Null()]), c2: t.Union([t.Integer({ minimum: Number.MIN_SAFE_INTEGER, maximum: 1000 }), t.Null()]), c3: t.Integer({ minimum: 1, maximum: 10 }), nested: t.Object({ c4: t.Union([intSchema, t.Null()]), c5: t.Union([t.Integer({ minimum: Number.MIN_SAFE_INTEGER, maximum: 1000 }), t.Null()]), c6: t.Integer({ minimum: 1, maximum: 10 }), }), table: t.Object({ c1: t.Union([intSchema, t.Null()]), c2: t.Union([t.Integer({ minimum: Number.MIN_SAFE_INTEGER, maximum: 1000 }), t.Null()]), c3: t.Integer({ minimum: 1, maximum: 10 }), c4: t.Union([intSchema, t.Null()]), c5: t.Union([intSchema, t.Null()]), c6: t.Union([intSchema, t.Null()]), }), }); expectSchemaShape(tc, expected).from(result); Expect>(); }); test('all data types', (tc) => { const table = sqliteTable('test', ({ blob, integer, numeric, real, text, }) => ({ blob1: blob({ mode: 'buffer' }).notNull(), blob2: blob({ mode: 'bigint' }).notNull(), blob3: blob({ mode: 'json' }).notNull(), integer1: integer({ mode: 'number' }).notNull(), integer2: integer({ mode: 'boolean' }).notNull(), integer3: integer({ mode: 'timestamp' }).notNull(), integer4: integer({ mode: 'timestamp_ms' }).notNull(), numeric: numeric().notNull(), real: real().notNull(), text1: text({ mode: 'text' }).notNull(), text2: text({ mode: 'text', length: 10 }).notNull(), text3: text({ mode: 'text', enum: ['a', 'b', 'c'] }).notNull(), text4: text({ mode: 'json' }).notNull(), })); const result = createSelectSchema(table); const expected = t.Object({ blob1: bufferSchema, blob2: t.BigInt({ minimum: CONSTANTS.INT64_MIN, maximum: CONSTANTS.INT64_MAX }), blob3: jsonSchema, integer1: t.Integer({ minimum: Number.MIN_SAFE_INTEGER, maximum: Number.MAX_SAFE_INTEGER }), integer2: t.Boolean(), integer3: t.Date(), integer4: t.Date(), numeric: t.String(), real: t.Number({ minimum: CONSTANTS.INT48_MIN, maximum: CONSTANTS.INT48_MAX }), text1: t.String(), text2: t.String({ maxLength: 10 }), text3: t.Enum({ a: 'a', b: 'b', c: 'c' }), text4: jsonSchema, }); expectSchemaShape(tc, expected).from(result); Expect>(); }); /* Infinitely recursive type */ { const TopLevelCondition: GenericSchema = t.Any() as any; const table = sqliteTable('test', { json1: text({ mode: 'json' }).$type().notNull(), json2: blob({ mode: 'json' }).$type(), }); const result = createSelectSchema(table); const expected = t.Object({ json1: TopLevelCondition, json2: t.Union([TopLevelCondition, t.Null()]), }); Expect, Static>>(); } /* Disallow unknown keys in table refinement - select */ { const table = sqliteTable('test', { id: int() }); // @ts-expect-error createSelectSchema(table, { unknown: t.String() }); } /* Disallow unknown keys in table refinement - insert */ { const table = sqliteTable('test', { id: int() }); // @ts-expect-error createInsertSchema(table, { unknown: t.String() }); } /* Disallow unknown keys in table refinement - update */ { const table = sqliteTable('test', { id: int() }); // @ts-expect-error createUpdateSchema(table, { unknown: t.String() }); } /* Disallow unknown keys in view qb - select */ { const table = sqliteTable('test', { id: int() }); const view = sqliteView('test').as((qb) => qb.select().from(table)); const nestedSelect = sqliteView('test').as((qb) => qb.select({ table }).from(table)); // @ts-expect-error createSelectSchema(view, { unknown: t.String() }); // @ts-expect-error createSelectSchema(nestedSelect, { table: { unknown: t.String() } }); } /* Disallow unknown keys in view columns - select */ { const view = sqliteView('test', { id: int() }).as(sql``); // @ts-expect-error createSelectSchema(view, { unknown: t.String() }); } ================================================ FILE: drizzle-typebox/tests/tsconfig.json ================================================ { "extends": "../tsconfig.json", "compilerOptions": { "module": "esnext", "target": "esnext", "noEmit": true, "rootDir": "..", "outDir": "./.cache" }, "include": [".", "../src"] } ================================================ FILE: drizzle-typebox/tests/utils.ts ================================================ import type * as t from '@sinclair/typebox'; import { expect, type TaskContext } from 'vitest'; function removeKeysFromObject(obj: Record, keys: string[]) { for (const key of keys) { delete obj[key]; } return obj; } export function expectSchemaShape(t: TaskContext, expected: T) { return { from(actual: T) { expect(Object.keys(actual.properties)).toStrictEqual(Object.keys(expected.properties)); const keys = ['$id', '$schema', 'title', 'description', 'default', 'examples', 'readOnly', 'writeOnly']; for (const key of Object.keys(actual.properties)) { expect(removeKeysFromObject(actual.properties[key]!, keys)).toStrictEqual( removeKeysFromObject(expected.properties[key]!, keys), ); } }, }; } export function expectEnumValues>(t: TaskContext, expected: T) { return { from(actual: T) { expect(actual.anyOf).toStrictEqual(expected.anyOf); }, }; } export function Expect<_ extends true>() {} ================================================ FILE: drizzle-typebox/tsconfig.build.json ================================================ { "extends": "./tsconfig.json", "compilerOptions": { "rootDir": "src" }, "include": ["src"] } ================================================ FILE: drizzle-typebox/tsconfig.json ================================================ { "extends": "../tsconfig.json", "compilerOptions": { "outDir": "dist", "baseUrl": ".", "declaration": true, "noEmit": true, "paths": { "~/*": ["src/*"] } }, "include": ["src", "*.ts"] } ================================================ FILE: drizzle-typebox/vitest.config.ts ================================================ import tsconfigPaths from 'vite-tsconfig-paths'; import { defineConfig } from 'vitest/config'; export default defineConfig({ test: { include: [ 'tests/**/*.test.ts', ], exclude: [ 'tests/bun/**/*', ], typecheck: { tsconfig: 'tsconfig.json', }, testTimeout: 100000, hookTimeout: 100000, isolate: false, poolOptions: { threads: { singleThread: true, }, }, }, plugins: [tsconfigPaths()], }); ================================================ FILE: drizzle-valibot/README.md ================================================ `drizzle-valibot` is a plugin for [Drizzle ORM](https://github.com/drizzle-team/drizzle-orm) that allows you to generate [valibot](https://valibot.dev/) schemas from Drizzle ORM schemas. **Features** - Create a select schema for tables, views and enums. - Create insert and update schemas for tables. - Supports all dialects: PostgreSQL, MySQL and SQLite. # Usage ```ts import { pgEnum, pgTable, serial, text, timestamp } from 'drizzle-orm/pg-core'; import { createInsertSchema, createSelectSchema } from 'drizzle-valibot'; import { string, parse, number, pipe } from 'valibot'; const users = pgTable('users', { id: serial('id').primaryKey(), name: text('name').notNull(), email: text('email').notNull(), role: text('role', { enum: ['admin', 'user'] }).notNull(), createdAt: timestamp('created_at').notNull().defaultNow(), }); // Schema for inserting a user - can be used to validate API requests const insertUserSchema = createInsertSchema(users); // Schema for updating a user - can be used to validate API requests const updateUserSchema = createUpdateSchema(users); // Schema for selecting a user - can be used to validate API responses const selectUserSchema = createSelectSchema(users); // Overriding the fields const insertUserSchema = createInsertSchema(users, { role: string(), }); // Refining the fields - useful if you want to change the fields before they become nullable/optional in the final schema const insertUserSchema = createInsertSchema(users, { id: (schema) => pipe([schema, minValue(0)]), role: string(), }); // Usage const isUserValid = parse(insertUserSchema, { name: 'John Doe', email: 'johndoe@test.com', role: 'admin', }); ``` ================================================ FILE: drizzle-valibot/package.json ================================================ { "name": "drizzle-valibot", "version": "0.4.2", "description": "Generate valibot schemas from Drizzle ORM schemas", "type": "module", "scripts": { "build": "tsx scripts/build.ts", "b": "pnpm build", "test:types": "cd tests && tsc", "pack": "(cd dist && npm pack --pack-destination ..) && rm -f package.tgz && mv *.tgz package.tgz", "publish": "npm publish package.tgz", "test": "vitest run" }, "exports": { ".": { "import": { "types": "./index.d.mts", "default": "./index.mjs" }, "require": { "types": "./index.d.cjs", "default": "./index.cjs" }, "types": "./index.d.ts", "default": "./index.mjs" } }, "main": "./index.cjs", "module": "./index.mjs", "types": "./index.d.ts", "publishConfig": { "provenance": true }, "repository": { "type": "git", "url": "git+https://github.com/drizzle-team/drizzle-orm.git" }, "keywords": [ "valibot", "validate", "validation", "schema", "drizzle", "orm", "pg", "mysql", "postgresql", "postgres", "sqlite", "database", "sql", "typescript", "ts" ], "author": "Drizzle Team", "license": "Apache-2.0", "peerDependencies": { "drizzle-orm": ">=0.36.0", "valibot": ">=1.0.0-beta.7" }, "devDependencies": { "@rollup/plugin-typescript": "^11.1.0", "@types/node": "^18.15.10", "cpy": "^10.1.0", "drizzle-orm": "link:../drizzle-orm/dist", "json-rules-engine": "^7.3.1", "rimraf": "^5.0.0", "rollup": "^3.29.5", "valibot": "1.0.0-beta.7", "vite-tsconfig-paths": "^4.3.2", "vitest": "^3.1.3", "zx": "^7.2.2" } } ================================================ FILE: drizzle-valibot/rollup.config.ts ================================================ import typescript from '@rollup/plugin-typescript'; import { defineConfig } from 'rollup'; export default defineConfig([ { input: 'src/index.ts', output: [ { format: 'esm', dir: 'dist', entryFileNames: '[name].mjs', chunkFileNames: '[name]-[hash].mjs', sourcemap: true, }, { format: 'cjs', dir: 'dist', entryFileNames: '[name].cjs', chunkFileNames: '[name]-[hash].cjs', sourcemap: true, }, ], external: [ /^drizzle-orm\/?/, 'valibot', ], plugins: [ typescript({ tsconfig: 'tsconfig.build.json', }), ], }, ]); ================================================ FILE: drizzle-valibot/scripts/build.ts ================================================ #!/usr/bin/env -S pnpm tsx import 'zx/globals'; import cpy from 'cpy'; await fs.remove('dist'); await $`rollup --config rollup.config.ts --configPlugin typescript`; await $`resolve-tspaths`; await fs.copy('README.md', 'dist/README.md'); await cpy('dist/**/*.d.ts', 'dist', { rename: (basename) => basename.replace(/\.d\.ts$/, '.d.mts'), }); await cpy('dist/**/*.d.ts', 'dist', { rename: (basename) => basename.replace(/\.d\.ts$/, '.d.cts'), }); await fs.copy('package.json', 'dist/package.json'); await $`scripts/fix-imports.ts`; ================================================ FILE: drizzle-valibot/scripts/fix-imports.ts ================================================ #!/usr/bin/env -S pnpm tsx import 'zx/globals'; import path from 'node:path'; import { parse, print, visit } from 'recast'; import parser from 'recast/parsers/typescript'; function resolvePathAlias(importPath: string, file: string) { if (importPath.startsWith('~/')) { const relativePath = path.relative(path.dirname(file), path.resolve('dist.new', importPath.slice(2))); importPath = relativePath.startsWith('.') ? relativePath : './' + relativePath; } return importPath; } function fixImportPath(importPath: string, file: string, ext: string) { importPath = resolvePathAlias(importPath, file); if (!/\..*\.(js|ts)$/.test(importPath)) { return importPath; } return importPath.replace(/\.(js|ts)$/, ext); } const cjsFiles = await glob('dist/**/*.{cjs,d.cts}'); await Promise.all(cjsFiles.map(async (file) => { const code = parse(await fs.readFile(file, 'utf8'), { parser }); visit(code, { visitImportDeclaration(path) { path.value.source.value = fixImportPath(path.value.source.value, file, '.cjs'); this.traverse(path); }, visitExportAllDeclaration(path) { path.value.source.value = fixImportPath(path.value.source.value, file, '.cjs'); this.traverse(path); }, visitExportNamedDeclaration(path) { if (path.value.source) { path.value.source.value = fixImportPath(path.value.source.value, file, '.cjs'); } this.traverse(path); }, visitCallExpression(path) { if (path.value.callee.type === 'Identifier' && path.value.callee.name === 'require') { path.value.arguments[0].value = fixImportPath(path.value.arguments[0].value, file, '.cjs'); } this.traverse(path); }, visitTSImportType(path) { path.value.argument.value = resolvePathAlias(path.value.argument.value, file); this.traverse(path); }, visitAwaitExpression(path) { if (print(path.value).code.startsWith(`await import("./`)) { path.value.argument.arguments[0].value = fixImportPath(path.value.argument.arguments[0].value, file, '.cjs'); } this.traverse(path); }, }); await fs.writeFile(file, print(code).code); })); let esmFiles = await glob('dist/**/*.{js,d.ts}'); await Promise.all(esmFiles.map(async (file) => { const code = parse(await fs.readFile(file, 'utf8'), { parser }); visit(code, { visitImportDeclaration(path) { path.value.source.value = fixImportPath(path.value.source.value, file, '.js'); this.traverse(path); }, visitExportAllDeclaration(path) { path.value.source.value = fixImportPath(path.value.source.value, file, '.js'); this.traverse(path); }, visitExportNamedDeclaration(path) { if (path.value.source) { path.value.source.value = fixImportPath(path.value.source.value, file, '.js'); } this.traverse(path); }, visitTSImportType(path) { path.value.argument.value = fixImportPath(path.value.argument.value, file, '.js'); this.traverse(path); }, visitAwaitExpression(path) { if (print(path.value).code.startsWith(`await import("./`)) { path.value.argument.arguments[0].value = fixImportPath(path.value.argument.arguments[0].value, file, '.js'); } this.traverse(path); }, }); await fs.writeFile(file, print(code).code); })); esmFiles = await glob('dist/**/*.{mjs,d.mts}'); await Promise.all(esmFiles.map(async (file) => { const code = parse(await fs.readFile(file, 'utf8'), { parser }); visit(code, { visitImportDeclaration(path) { path.value.source.value = fixImportPath(path.value.source.value, file, '.mjs'); this.traverse(path); }, visitExportAllDeclaration(path) { path.value.source.value = fixImportPath(path.value.source.value, file, '.mjs'); this.traverse(path); }, visitExportNamedDeclaration(path) { if (path.value.source) { path.value.source.value = fixImportPath(path.value.source.value, file, '.mjs'); } this.traverse(path); }, visitTSImportType(path) { path.value.argument.value = fixImportPath(path.value.argument.value, file, '.mjs'); this.traverse(path); }, visitAwaitExpression(path) { if (print(path.value).code.startsWith(`await import("./`)) { path.value.argument.arguments[0].value = fixImportPath(path.value.argument.arguments[0].value, file, '.mjs'); } this.traverse(path); }, }); await fs.writeFile(file, print(code).code); })); ================================================ FILE: drizzle-valibot/src/column.ts ================================================ import type { Column, ColumnBaseConfig } from 'drizzle-orm'; import type { MySqlBigInt53, MySqlChar, MySqlDouble, MySqlFloat, MySqlInt, MySqlMediumInt, MySqlReal, MySqlSerial, MySqlSmallInt, MySqlText, MySqlTinyInt, MySqlVarChar, MySqlYear, } from 'drizzle-orm/mysql-core'; import type { PgArray, PgBigInt53, PgBigSerial53, PgBinaryVector, PgChar, PgDoublePrecision, PgGeometry, PgGeometryObject, PgHalfVector, PgInteger, PgLineABC, PgLineTuple, PgPointObject, PgPointTuple, PgReal, PgSerial, PgSmallInt, PgSmallSerial, PgUUID, PgVarchar, PgVector, } from 'drizzle-orm/pg-core'; import type { SingleStoreBigInt53, SingleStoreChar, SingleStoreDouble, SingleStoreFloat, SingleStoreInt, SingleStoreMediumInt, SingleStoreReal, SingleStoreSerial, SingleStoreSmallInt, SingleStoreText, SingleStoreTinyInt, SingleStoreVarChar, SingleStoreYear, } from 'drizzle-orm/singlestore-core'; import type { SQLiteInteger, SQLiteReal, SQLiteText } from 'drizzle-orm/sqlite-core'; import * as v from 'valibot'; import { CONSTANTS } from './constants.ts'; import { isColumnType, isWithEnum } from './utils.ts'; import type { Json } from './utils.ts'; export const literalSchema = v.union([v.string(), v.number(), v.boolean(), v.null()]); export const jsonSchema: v.GenericSchema = v.union([ literalSchema, v.array(v.any()), v.record(v.string(), v.any()), ]); export const bufferSchema: v.GenericSchema = v.custom((v) => v instanceof Buffer); // eslint-disable-line no-instanceof/no-instanceof export function mapEnumValues(values: string[]) { return Object.fromEntries(values.map((value) => [value, value])); } export function columnToSchema(column: Column): v.GenericSchema { let schema!: v.GenericSchema; if (isWithEnum(column)) { schema = column.enumValues.length ? v.enum(mapEnumValues(column.enumValues)) : v.string(); } if (!schema) { // Handle specific types if (isColumnType | PgPointTuple>(column, ['PgGeometry', 'PgPointTuple'])) { schema = v.tuple([v.number(), v.number()]); } else if ( isColumnType | PgGeometryObject>(column, ['PgGeometryObject', 'PgPointObject']) ) { schema = v.object({ x: v.number(), y: v.number() }); } else if (isColumnType | PgVector>(column, ['PgHalfVector', 'PgVector'])) { schema = v.array(v.number()); schema = column.dimensions ? v.pipe(schema as v.ArraySchema, v.length(column.dimensions)) : schema; } else if (isColumnType>(column, ['PgLine'])) { schema = v.tuple([v.number(), v.number(), v.number()]); v.array(v.array(v.number())); } else if (isColumnType>(column, ['PgLineABC'])) { schema = v.object({ a: v.number(), b: v.number(), c: v.number() }); } // Handle other types else if (isColumnType>(column, ['PgArray'])) { schema = v.array(columnToSchema(column.baseColumn)); schema = column.size ? v.pipe(schema as v.ArraySchema, v.length(column.size)) : schema; } else if (column.dataType === 'array') { schema = v.array(v.any()); } else if (column.dataType === 'number') { schema = numberColumnToSchema(column); } else if (column.dataType === 'bigint') { schema = bigintColumnToSchema(column); } else if (column.dataType === 'boolean') { schema = v.boolean(); } else if (column.dataType === 'date') { schema = v.date(); } else if (column.dataType === 'string') { schema = stringColumnToSchema(column); } else if (column.dataType === 'json') { schema = jsonSchema; } else if (column.dataType === 'custom') { schema = v.any(); } else if (column.dataType === 'buffer') { schema = bufferSchema; } } if (!schema) { schema = v.any(); } return schema; } function numberColumnToSchema(column: Column): v.GenericSchema { let unsigned = column.getSQLType().includes('unsigned'); let min!: number; let max!: number; let integer = false; if (isColumnType | SingleStoreTinyInt>(column, ['MySqlTinyInt', 'SingleStoreTinyInt'])) { min = unsigned ? 0 : CONSTANTS.INT8_MIN; max = unsigned ? CONSTANTS.INT8_UNSIGNED_MAX : CONSTANTS.INT8_MAX; integer = true; } else if ( isColumnType | PgSmallSerial | MySqlSmallInt | SingleStoreSmallInt>(column, [ 'PgSmallInt', 'PgSmallSerial', 'MySqlSmallInt', 'SingleStoreSmallInt', ]) ) { min = unsigned ? 0 : CONSTANTS.INT16_MIN; max = unsigned ? CONSTANTS.INT16_UNSIGNED_MAX : CONSTANTS.INT16_MAX; integer = true; } else if ( isColumnType< PgReal | MySqlFloat | MySqlMediumInt | SingleStoreFloat | SingleStoreMediumInt >(column, [ 'PgReal', 'MySqlFloat', 'MySqlMediumInt', 'SingleStoreFloat', 'SingleStoreMediumInt', ]) ) { min = unsigned ? 0 : CONSTANTS.INT24_MIN; max = unsigned ? CONSTANTS.INT24_UNSIGNED_MAX : CONSTANTS.INT24_MAX; integer = isColumnType(column, ['MySqlMediumInt', 'SingleStoreMediumInt']); } else if ( isColumnType | PgSerial | MySqlInt | SingleStoreInt>(column, [ 'PgInteger', 'PgSerial', 'MySqlInt', 'SingleStoreInt', ]) ) { min = unsigned ? 0 : CONSTANTS.INT32_MIN; max = unsigned ? CONSTANTS.INT32_UNSIGNED_MAX : CONSTANTS.INT32_MAX; integer = true; } else if ( isColumnType< | PgDoublePrecision | MySqlReal | MySqlDouble | SingleStoreReal | SingleStoreDouble | SQLiteReal >(column, [ 'PgDoublePrecision', 'MySqlReal', 'MySqlDouble', 'SingleStoreReal', 'SingleStoreDouble', 'SQLiteReal', ]) ) { min = unsigned ? 0 : CONSTANTS.INT48_MIN; max = unsigned ? CONSTANTS.INT48_UNSIGNED_MAX : CONSTANTS.INT48_MAX; } else if ( isColumnType< | PgBigInt53 | PgBigSerial53 | MySqlBigInt53 | MySqlSerial | SingleStoreBigInt53 | SingleStoreSerial | SQLiteInteger >( column, [ 'PgBigInt53', 'PgBigSerial53', 'MySqlBigInt53', 'MySqlSerial', 'SingleStoreBigInt53', 'SingleStoreSerial', 'SQLiteInteger', ], ) ) { unsigned = unsigned || isColumnType(column, ['MySqlSerial', 'SingleStoreSerial']); min = unsigned ? 0 : Number.MIN_SAFE_INTEGER; max = Number.MAX_SAFE_INTEGER; integer = true; } else if (isColumnType | SingleStoreYear>(column, ['MySqlYear', 'SingleStoreYear'])) { min = 1901; max = 2155; integer = true; } else { min = Number.MIN_SAFE_INTEGER; max = Number.MAX_SAFE_INTEGER; } const actions: any[] = [v.minValue(min), v.maxValue(max)]; if (integer) { actions.push(v.integer()); } return v.pipe(v.number(), ...actions); } function bigintColumnToSchema(column: Column): v.GenericSchema { const unsigned = column.getSQLType().includes('unsigned'); const min = unsigned ? 0n : CONSTANTS.INT64_MIN; const max = unsigned ? CONSTANTS.INT64_UNSIGNED_MAX : CONSTANTS.INT64_MAX; return v.pipe(v.bigint(), v.minValue(min), v.maxValue(max)); } function stringColumnToSchema(column: Column): v.GenericSchema { if (isColumnType>>(column, ['PgUUID'])) { return v.pipe(v.string(), v.uuid()); } let max: number | undefined; let regex: RegExp | undefined; let fixed = false; if (isColumnType | SQLiteText>(column, ['PgVarchar', 'SQLiteText'])) { max = column.length; } else if ( isColumnType | SingleStoreVarChar>(column, ['MySqlVarChar', 'SingleStoreVarChar']) ) { max = column.length ?? CONSTANTS.INT16_UNSIGNED_MAX; } else if (isColumnType | SingleStoreText>(column, ['MySqlText', 'SingleStoreText'])) { if (column.textType === 'longtext') { max = CONSTANTS.INT32_UNSIGNED_MAX; } else if (column.textType === 'mediumtext') { max = CONSTANTS.INT24_UNSIGNED_MAX; } else if (column.textType === 'text') { max = CONSTANTS.INT16_UNSIGNED_MAX; } else { max = CONSTANTS.INT8_UNSIGNED_MAX; } } if ( isColumnType | MySqlChar | SingleStoreChar>(column, [ 'PgChar', 'MySqlChar', 'SingleStoreChar', ]) ) { max = column.length; fixed = true; } if (isColumnType>(column, ['PgBinaryVector'])) { regex = /^[01]+$/; max = column.dimensions; } const actions: any[] = []; if (regex) { actions.push(v.regex(regex)); } if (max && fixed) { actions.push(v.length(max)); } else if (max) { actions.push(v.maxLength(max)); } return actions.length > 0 ? v.pipe(v.string(), ...actions) : v.string(); } ================================================ FILE: drizzle-valibot/src/column.types.ts ================================================ import type { Assume, Column } from 'drizzle-orm'; import type * as v from 'valibot'; import type { ColumnIsGeneratedAlwaysAs, IsEnumDefined, IsNever, Json, RemoveNeverElements } from './utils.ts'; export type HasBaseColumn = TColumn extends { _: { baseColumn: Column | undefined } } ? IsNever extends false ? true : false : false; export type EnumValuesToEnum = { readonly [K in TEnumValues[number]]: K }; export type ExtractAdditionalProperties = { max: TColumn['_']['columnType'] extends 'PgVarchar' | 'SQLiteText' | 'PgChar' | 'MySqlChar' | 'SingleStoreChar' ? Assume['length'] : TColumn['_']['columnType'] extends 'MySqlText' | 'MySqlVarChar' | 'SingleStoreText' | 'SingleStoreVarChar' ? number : TColumn['_']['columnType'] extends 'PgBinaryVector' | 'PgHalfVector' | 'PgVector' ? Assume['dimensions'] : TColumn['_']['columnType'] extends 'PgArray' ? Assume['size'] : undefined; fixedLength: TColumn['_']['columnType'] extends 'PgChar' | 'PgHalfVector' | 'PgVector' | 'PgArray' | 'MySqlChar' | 'SingleStoreChar' ? true : false; }; type GetLengthAction, TType extends string | ArrayLike> = T['fixedLength'] extends true ? v.LengthAction : v.MaxLengthAction; type GetArraySchema = v.ArraySchema< GetValibotType< T['_']['data'], T['_']['dataType'], T['_']['columnType'], T['_']['enumValues'], HasBaseColumn extends true ? Assume : undefined, ExtractAdditionalProperties >, undefined >; export type GetValibotType< TData, TDataType extends string, TColumnType extends string, TEnumValues extends string[] | undefined, TBaseColumn extends Column | undefined, TAdditionalProperties extends Record, > = TColumnType extends 'PgHalfVector' | 'PgVector' ? TAdditionalProperties['max'] extends number ? v.SchemaWithPipe< [v.ArraySchema, undefined>, GetLengthAction] > : v.ArraySchema, undefined> : TColumnType extends 'PgUUID' ? v.SchemaWithPipe<[v.StringSchema, v.UuidAction]> : TColumnType extends 'PgBinaryVector' ? v.SchemaWithPipe< RemoveNeverElements<[ v.StringSchema, v.RegexAction, TAdditionalProperties['max'] extends number ? GetLengthAction : never, ]> > : TBaseColumn extends Column ? TAdditionalProperties['max'] extends number ? v.SchemaWithPipe< [ GetArraySchema>, GetLengthAction['_']['data'][]>, ] > : GetArraySchema> : IsEnumDefined extends true ? v.EnumSchema<{ readonly [K in Assume[number]]: K }, undefined> : TColumnType extends 'PgGeometry' | 'PgPointTuple' ? v.TupleSchema<[v.NumberSchema, v.NumberSchema], undefined> : TColumnType extends 'PgLine' ? v.TupleSchema<[v.NumberSchema, v.NumberSchema, v.NumberSchema], undefined> : TData extends Date ? v.DateSchema : TData extends Buffer ? v.GenericSchema : TDataType extends 'array' ? v.ArraySchema< GetValibotPrimitiveType[number], '', { noPipe: true }>, undefined > : TData extends Record ? TColumnType extends 'PgJson' | 'PgJsonb' | 'MySqlJson' | 'SingleStoreJson' | 'SQLiteTextJson' | 'SQLiteBlobJson' ? v.GenericSchema : v.ObjectSchema< { readonly [K in keyof TData]: GetValibotPrimitiveType }, undefined > : TDataType extends 'json' ? v.GenericSchema : GetValibotPrimitiveType; type GetValibotPrimitiveType> = TData extends number ? TAdditionalProperties['noPipe'] extends true ? v.NumberSchema : v.SchemaWithPipe< RemoveNeverElements<[ v.NumberSchema, v.MinValueAction, v.MaxValueAction, TColumnType extends | 'MySqlTinyInt' | 'SingleStoreTinyInt' | 'PgSmallInt' | 'PgSmallSerial' | 'MySqlSmallInt' | 'MySqlMediumInt' | 'SingleStoreSmallInt' | 'SingleStoreMediumInt' | 'PgInteger' | 'PgSerial' | 'MySqlInt' | 'SingleStoreInt' | 'PgBigInt53' | 'PgBigSerial53' | 'MySqlBigInt53' | 'MySqlSerial' | 'SingleStoreBigInt53' | 'SingleStoreSerial' | 'SQLiteInteger' | 'MySqlYear' | 'SingleStoreYear' ? v.IntegerAction : never, ]> > : TData extends bigint ? TAdditionalProperties['noPipe'] extends true ? v.BigintSchema : v.SchemaWithPipe<[ v.BigintSchema, v.MinValueAction, v.MaxValueAction, ]> : TData extends boolean ? v.BooleanSchema : TData extends string ? TAdditionalProperties['max'] extends number ? v.SchemaWithPipe<[v.StringSchema, GetLengthAction]> : v.StringSchema : v.AnySchema; type HandleSelectColumn< TSchema extends v.GenericSchema, TColumn extends Column, > = TColumn['_']['notNull'] extends true ? TSchema : v.NullableSchema; type HandleInsertColumn< TSchema extends v.GenericSchema, TColumn extends Column, > = TColumn['_']['notNull'] extends true ? TColumn['_']['hasDefault'] extends true ? v.OptionalSchema : TSchema : v.OptionalSchema, undefined>; type HandleUpdateColumn< TSchema extends v.GenericSchema, TColumn extends Column, > = TColumn['_']['notNull'] extends true ? v.OptionalSchema : v.OptionalSchema, undefined>; export type HandleColumn< TType extends 'select' | 'insert' | 'update', TColumn extends Column, > = GetValibotType< TColumn['_']['data'], TColumn['_']['dataType'], TColumn['_']['columnType'], TColumn['_']['enumValues'], HasBaseColumn extends true ? Assume : undefined, ExtractAdditionalProperties > extends infer TSchema extends v.GenericSchema ? TSchema extends v.AnySchema ? v.AnySchema : TType extends 'select' ? HandleSelectColumn : TType extends 'insert' ? HandleInsertColumn : TType extends 'update' ? HandleUpdateColumn : TSchema : v.AnySchema; ================================================ FILE: drizzle-valibot/src/constants.ts ================================================ export const CONSTANTS = { INT8_MIN: -128, INT8_MAX: 127, INT8_UNSIGNED_MAX: 255, INT16_MIN: -32768, INT16_MAX: 32767, INT16_UNSIGNED_MAX: 65535, INT24_MIN: -8388608, INT24_MAX: 8388607, INT24_UNSIGNED_MAX: 16777215, INT32_MIN: -2147483648, INT32_MAX: 2147483647, INT32_UNSIGNED_MAX: 4294967295, INT48_MIN: -140737488355328, INT48_MAX: 140737488355327, INT48_UNSIGNED_MAX: 281474976710655, INT64_MIN: -9223372036854775808n, INT64_MAX: 9223372036854775807n, INT64_UNSIGNED_MAX: 18446744073709551615n, }; ================================================ FILE: drizzle-valibot/src/index.ts ================================================ export { bufferSchema, jsonSchema, literalSchema } from './column.ts'; export * from './column.types.ts'; export * from './schema.ts'; export * from './schema.types.internal.ts'; export * from './schema.types.ts'; export * from './utils.ts'; ================================================ FILE: drizzle-valibot/src/schema.ts ================================================ import { Column, getTableColumns, getViewSelectedFields, is, isTable, isView, SQL } from 'drizzle-orm'; import type { Table, View } from 'drizzle-orm'; import type { PgEnum } from 'drizzle-orm/pg-core'; import * as v from 'valibot'; import { columnToSchema, mapEnumValues } from './column.ts'; import type { Conditions } from './schema.types.internal.ts'; import type { CreateInsertSchema, CreateSelectSchema, CreateUpdateSchema } from './schema.types.ts'; import { isPgEnum } from './utils.ts'; function getColumns(tableLike: Table | View) { return isTable(tableLike) ? getTableColumns(tableLike) : getViewSelectedFields(tableLike); } function handleColumns( columns: Record, refinements: Record, conditions: Conditions, ): v.GenericSchema { const columnSchemas: Record = {}; for (const [key, selected] of Object.entries(columns)) { if (!is(selected, Column) && !is(selected, SQL) && !is(selected, SQL.Aliased) && typeof selected === 'object') { const columns = isTable(selected) || isView(selected) ? getColumns(selected) : selected; columnSchemas[key] = handleColumns(columns, refinements[key] ?? {}, conditions); continue; } const refinement = refinements[key]; if (refinement !== undefined && typeof refinement !== 'function') { columnSchemas[key] = refinement; continue; } const column = is(selected, Column) ? selected : undefined; const schema = column ? columnToSchema(column) : v.any(); const refined = typeof refinement === 'function' ? refinement(schema) : schema; if (conditions.never(column)) { continue; } else { columnSchemas[key] = refined; } if (column) { if (conditions.nullable(column)) { columnSchemas[key] = v.nullable(columnSchemas[key]!); } if (conditions.optional(column)) { columnSchemas[key] = v.optional(columnSchemas[key]!); } } } return v.object(columnSchemas) as any; } export const createSelectSchema: CreateSelectSchema = ( entity: Table | View | PgEnum<[string, ...string[]]>, refine?: Record, ) => { if (isPgEnum(entity)) { return v.enum(mapEnumValues(entity.enumValues)); } const columns = getColumns(entity); return handleColumns(columns, refine ?? {}, { never: () => false, optional: () => false, nullable: (column) => !column.notNull, }) as any; }; export const createInsertSchema: CreateInsertSchema = ( entity: Table, refine?: Record, ) => { const columns = getColumns(entity); return handleColumns(columns, refine ?? {}, { never: (column) => column?.generated?.type === 'always' || column?.generatedIdentity?.type === 'always', optional: (column) => !column.notNull || (column.notNull && column.hasDefault), nullable: (column) => !column.notNull, }) as any; }; export const createUpdateSchema: CreateUpdateSchema = ( entity: Table, refine?: Record, ) => { const columns = getColumns(entity); return handleColumns(columns, refine ?? {}, { never: (column) => column?.generated?.type === 'always' || column?.generatedIdentity?.type === 'always', optional: () => true, nullable: (column) => !column.notNull, }) as any; }; ================================================ FILE: drizzle-valibot/src/schema.types.internal.ts ================================================ import type { Assume, Column, DrizzleTypeError, SelectedFieldsFlat, Simplify, Table, View } from 'drizzle-orm'; import type * as v from 'valibot'; import type { ExtractAdditionalProperties, GetValibotType, HandleColumn, HasBaseColumn } from './column.types.ts'; import type { ColumnIsGeneratedAlwaysAs, GetSelection } from './utils.ts'; export interface Conditions { never: (column?: Column) => boolean; optional: (column: Column) => boolean; nullable: (column: Column) => boolean; } type BuildRefineField = T extends v.GenericSchema ? ((schema: T) => v.GenericSchema) | v.GenericSchema : never; export type BuildRefine< TColumns extends Record, > = { [K in keyof TColumns as TColumns[K] extends Column | SelectedFieldsFlat | Table | View ? K : never]?: TColumns[K] extends Column ? BuildRefineField< GetValibotType< TColumns[K]['_']['data'], TColumns[K]['_']['dataType'], TColumns[K]['_']['columnType'], TColumns[K]['_']['enumValues'], HasBaseColumn extends true ? Assume : undefined, ExtractAdditionalProperties > > : BuildRefine>; }; type HandleRefinement< TType extends 'select' | 'insert' | 'update', TRefinement, TColumn extends Column, > = TRefinement extends (schema: any) => v.GenericSchema ? ( TColumn['_']['notNull'] extends true ? ReturnType : v.NullableSchema, undefined> ) extends infer TSchema ? TType extends 'update' ? v.OptionalSchema, undefined> : TSchema : v.AnySchema : TRefinement; type IsRefinementDefined< TRefinements extends Record | undefined, TKey extends string | symbol | number, > = TRefinements extends object ? TRefinements[TKey] extends v.GenericSchema | ((schema: any) => any) ? true : false : false; export type BuildSchema< TType extends 'select' | 'insert' | 'update', TColumns extends Record, TRefinements extends Record | undefined, > = v.ObjectSchema< Simplify< { readonly [K in keyof TColumns as ColumnIsGeneratedAlwaysAs extends true ? never : K]: TColumns[K] extends infer TColumn extends Column ? IsRefinementDefined> extends true ? Assume, v.GenericSchema> : HandleColumn : TColumns[K] extends infer TObject extends SelectedFieldsFlat | Table | View ? BuildSchema< TType, GetSelection, TRefinements extends object ? TRefinements[K & keyof TRefinements] : undefined > : v.AnySchema; } >, undefined >; export type NoUnknownKeys< TRefinement extends Record, TCompare extends Record, > = { [K in keyof TRefinement]: K extends keyof TCompare ? TRefinement[K] extends Record ? NoUnknownKeys : TRefinement[K] : DrizzleTypeError<`Found unknown key in refinement: "${K & string}"`>; }; ================================================ FILE: drizzle-valibot/src/schema.types.ts ================================================ import type { Table, View } from 'drizzle-orm'; import type { PgEnum } from 'drizzle-orm/pg-core'; import type * as v from 'valibot'; import type { EnumValuesToEnum } from './column.types.ts'; import type { BuildRefine, BuildSchema, NoUnknownKeys } from './schema.types.internal.ts'; export interface CreateSelectSchema { (table: TTable): BuildSchema<'select', TTable['_']['columns'], undefined>; < TTable extends Table, TRefine extends BuildRefine, >( table: TTable, refine?: NoUnknownKeys, ): BuildSchema<'select', TTable['_']['columns'], TRefine>; (view: TView): BuildSchema<'select', TView['_']['selectedFields'], undefined>; < TView extends View, TRefine extends BuildRefine, >( view: TView, refine: NoUnknownKeys, ): BuildSchema<'select', TView['_']['selectedFields'], TRefine>; >(enum_: TEnum): v.EnumSchema, undefined>; } export interface CreateInsertSchema { (table: TTable): BuildSchema<'insert', TTable['_']['columns'], undefined>; < TTable extends Table, TRefine extends BuildRefine>, >( table: TTable, refine?: NoUnknownKeys, ): BuildSchema<'insert', TTable['_']['columns'], TRefine>; } export interface CreateUpdateSchema { (table: TTable): BuildSchema<'update', TTable['_']['columns'], undefined>; < TTable extends Table, TRefine extends BuildRefine>, >( table: TTable, refine?: TRefine, ): BuildSchema<'update', TTable['_']['columns'], TRefine>; } ================================================ FILE: drizzle-valibot/src/utils.ts ================================================ import type { Column, SelectedFieldsFlat, Table, View } from 'drizzle-orm'; import type { PgEnum } from 'drizzle-orm/pg-core'; import type * as v from 'valibot'; import type { literalSchema } from './column.ts'; export function isColumnType(column: Column, columnTypes: string[]): column is T { return columnTypes.includes(column.columnType); } export function isWithEnum(column: Column): column is typeof column & { enumValues: [string, ...string[]] } { return 'enumValues' in column && Array.isArray(column.enumValues) && column.enumValues.length > 0; } export const isPgEnum: (entity: any) => entity is PgEnum<[string, ...string[]]> = isWithEnum as any; type Literal = v.InferOutput; export type Json = Literal | { [key: string]: any } | any[]; export type IsNever = [T] extends [never] ? true : false; export type IsEnumDefined = [string, ...string[]] extends TEnum ? false : undefined extends TEnum ? false : true; export type ColumnIsGeneratedAlwaysAs = TColumn extends Column ? TColumn['_']['identity'] extends 'always' ? true : TColumn['_']['generated'] extends { type: 'byDefault' } | undefined ? false : true : false; export type RemoveNever = { [K in keyof T as T[K] extends never ? never : K]: T[K]; }; export type RemoveNeverElements = T extends [infer First, ...infer Rest] ? IsNever extends true ? RemoveNeverElements : [First, ...RemoveNeverElements] : []; export type GetSelection | Table | View> = T extends Table ? T['_']['columns'] : T extends View ? T['_']['selectedFields'] : T; ================================================ FILE: drizzle-valibot/tests/mysql.test.ts ================================================ import { type Equal, sql } from 'drizzle-orm'; import { customType, int, json, mysqlSchema, mysqlTable, mysqlView, serial, text } from 'drizzle-orm/mysql-core'; import type { TopLevelCondition } from 'json-rules-engine'; import * as v from 'valibot'; import { test } from 'vitest'; import { jsonSchema } from '~/column.ts'; import { CONSTANTS } from '~/constants.ts'; import { createInsertSchema, createSelectSchema, createUpdateSchema } from '../src'; import { Expect, expectSchemaShape } from './utils.ts'; const intSchema = v.pipe( v.number(), v.minValue(CONSTANTS.INT32_MIN as number), v.maxValue(CONSTANTS.INT32_MAX as number), v.integer(), ); const serialNumberModeSchema = v.pipe( v.number(), v.minValue(0 as number), v.maxValue(Number.MAX_SAFE_INTEGER as number), v.integer(), ); const textSchema = v.pipe(v.string(), v.maxLength(CONSTANTS.INT16_UNSIGNED_MAX as number)); test('table - select', (t) => { const table = mysqlTable('test', { id: serial().primaryKey(), name: text().notNull(), }); const result = createSelectSchema(table); const expected = v.object({ id: serialNumberModeSchema, name: textSchema }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('table in schema - select', (tc) => { const schema = mysqlSchema('test'); const table = schema.table('test', { id: serial().primaryKey(), name: text().notNull(), }); const result = createSelectSchema(table); const expected = v.object({ id: serialNumberModeSchema, name: textSchema }); expectSchemaShape(tc, expected).from(result); Expect>(); }); test('table - insert', (t) => { const table = mysqlTable('test', { id: serial().primaryKey(), name: text().notNull(), age: int(), }); const result = createInsertSchema(table); const expected = v.object({ id: v.optional(serialNumberModeSchema), name: textSchema, age: v.optional(v.nullable(intSchema)), }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('table - update', (t) => { const table = mysqlTable('test', { id: serial().primaryKey(), name: text().notNull(), age: int(), }); const result = createUpdateSchema(table); const expected = v.object({ id: v.optional(serialNumberModeSchema), name: v.optional(textSchema), age: v.optional(v.nullable(intSchema)), }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('view qb - select', (t) => { const table = mysqlTable('test', { id: serial().primaryKey(), name: text().notNull(), }); const view = mysqlView('test').as((qb) => qb.select({ id: table.id, age: sql``.as('age') }).from(table)); const result = createSelectSchema(view); const expected = v.object({ id: serialNumberModeSchema, age: v.any() }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('view columns - select', (t) => { const view = mysqlView('test', { id: serial().primaryKey(), name: text().notNull(), }).as(sql``); const result = createSelectSchema(view); const expected = v.object({ id: serialNumberModeSchema, name: textSchema }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('view with nested fields - select', (t) => { const table = mysqlTable('test', { id: serial().primaryKey(), name: text().notNull(), }); const view = mysqlView('test').as((qb) => qb.select({ id: table.id, nested: { name: table.name, age: sql``.as('age'), }, table, }).from(table) ); const result = createSelectSchema(view); const expected = v.object({ id: serialNumberModeSchema, nested: v.object({ name: textSchema, age: v.any() }), table: v.object({ id: serialNumberModeSchema, name: textSchema }), }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('nullability - select', (t) => { const table = mysqlTable('test', { c1: int(), c2: int().notNull(), c3: int().default(1), c4: int().notNull().default(1), }); const result = createSelectSchema(table); const expected = v.object({ c1: v.nullable(intSchema), c2: intSchema, c3: v.nullable(intSchema), c4: intSchema, }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('nullability - insert', (t) => { const table = mysqlTable('test', { c1: int(), c2: int().notNull(), c3: int().default(1), c4: int().notNull().default(1), c5: int().generatedAlwaysAs(1), }); const result = createInsertSchema(table); const expected = v.object({ c1: v.optional(v.nullable(intSchema)), c2: intSchema, c3: v.optional(v.nullable(intSchema)), c4: v.optional(intSchema), }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('nullability - update', (t) => { const table = mysqlTable('test', { c1: int(), c2: int().notNull(), c3: int().default(1), c4: int().notNull().default(1), c5: int().generatedAlwaysAs(1), }); const result = createUpdateSchema(table); const expected = v.object({ c1: v.optional(v.nullable(intSchema)), c2: v.optional(intSchema), c3: v.optional(v.nullable(intSchema)), c4: v.optional(intSchema), }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('refine table - select', (t) => { const table = mysqlTable('test', { c1: int(), c2: int().notNull(), c3: int().notNull(), }); const result = createSelectSchema(table, { c2: (schema) => v.pipe(schema, v.maxValue(1000)), c3: v.pipe(v.string(), v.transform(Number)), }); const expected = v.object({ c1: v.nullable(intSchema), c2: v.pipe(intSchema, v.maxValue(1000)), c3: v.pipe(v.string(), v.transform(Number)), }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('refine table - select with custom data type', (t) => { const customText = customType({ dataType: () => 'text' }); const table = mysqlTable('test', { c1: int(), c2: int().notNull(), c3: int().notNull(), c4: customText(), }); const customTextSchema = v.pipe(v.string(), v.minLength(1), v.maxLength(100)); const result = createSelectSchema(table, { c2: (schema) => v.pipe(schema, v.maxValue(1000)), c3: v.pipe(v.string(), v.transform(Number)), c4: customTextSchema, }); const expected = v.object({ c1: v.nullable(intSchema), c2: v.pipe(intSchema, v.maxValue(1000)), c3: v.pipe(v.string(), v.transform(Number)), c4: customTextSchema, }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('refine table - insert', (t) => { const table = mysqlTable('test', { c1: int(), c2: int().notNull(), c3: int().notNull(), c4: int().generatedAlwaysAs(1), }); const result = createInsertSchema(table, { c2: (schema) => v.pipe(schema, v.maxValue(1000)), c3: v.pipe(v.string(), v.transform(Number)), }); const expected = v.object({ c1: v.optional(v.nullable(intSchema)), c2: v.pipe(intSchema, v.maxValue(1000)), c3: v.pipe(v.string(), v.transform(Number)), }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('refine table - update', (t) => { const table = mysqlTable('test', { c1: int(), c2: int().notNull(), c3: int().notNull(), c4: int().generatedAlwaysAs(1), }); const result = createUpdateSchema(table, { c2: (schema) => v.pipe(schema, v.maxValue(1000)), c3: v.pipe(v.string(), v.transform(Number)), }); const expected = v.object({ c1: v.optional(v.nullable(intSchema)), c2: v.optional(v.pipe(intSchema, v.maxValue(1000))), c3: v.pipe(v.string(), v.transform(Number)), }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('refine view - select', (t) => { const table = mysqlTable('test', { c1: int(), c2: int(), c3: int(), c4: int(), c5: int(), c6: int(), }); const view = mysqlView('test').as((qb) => qb.select({ c1: table.c1, c2: table.c2, c3: table.c3, nested: { c4: table.c4, c5: table.c5, c6: table.c6, }, table, }).from(table) ); const result = createSelectSchema(view, { c2: (schema) => v.pipe(schema, v.maxValue(1000)), c3: v.pipe(v.string(), v.transform(Number)), nested: { c5: (schema) => v.pipe(schema, v.maxValue(1000)), c6: v.pipe(v.string(), v.transform(Number)), }, table: { c2: (schema) => v.pipe(schema, v.maxValue(1000)), c3: v.pipe(v.string(), v.transform(Number)), }, }); const expected = v.object({ c1: v.nullable(intSchema), c2: v.nullable(v.pipe(intSchema, v.maxValue(1000))), c3: v.pipe(v.string(), v.transform(Number)), nested: v.object({ c4: v.nullable(intSchema), c5: v.nullable(v.pipe(intSchema, v.maxValue(1000))), c6: v.pipe(v.string(), v.transform(Number)), }), table: v.object({ c1: v.nullable(intSchema), c2: v.nullable(v.pipe(intSchema, v.maxValue(1000))), c3: v.pipe(v.string(), v.transform(Number)), c4: v.nullable(intSchema), c5: v.nullable(intSchema), c6: v.nullable(intSchema), }), }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('all data types', (t) => { const table = mysqlTable('test', ({ bigint, binary, boolean, char, date, datetime, decimal, double, float, int, json, mediumint, mysqlEnum, real, serial, smallint, text, time, timestamp, tinyint, varchar, varbinary, year, longtext, mediumtext, tinytext, }) => ({ bigint1: bigint({ mode: 'number' }).notNull(), bigint2: bigint({ mode: 'bigint' }).notNull(), bigint3: bigint({ unsigned: true, mode: 'number' }).notNull(), bigint4: bigint({ unsigned: true, mode: 'bigint' }).notNull(), binary: binary({ length: 10 }).notNull(), boolean: boolean().notNull(), char1: char({ length: 10 }).notNull(), char2: char({ length: 1, enum: ['a', 'b', 'c'] }).notNull(), date1: date({ mode: 'date' }).notNull(), date2: date({ mode: 'string' }).notNull(), datetime1: datetime({ mode: 'date' }).notNull(), datetime2: datetime({ mode: 'string' }).notNull(), decimal1: decimal().notNull(), decimal2: decimal({ unsigned: true }).notNull(), double1: double().notNull(), double2: double({ unsigned: true }).notNull(), float1: float().notNull(), float2: float({ unsigned: true }).notNull(), int1: int().notNull(), int2: int({ unsigned: true }).notNull(), json: json().notNull(), mediumint1: mediumint().notNull(), mediumint2: mediumint({ unsigned: true }).notNull(), enum: mysqlEnum('enum', ['a', 'b', 'c']).notNull(), real: real().notNull(), serial: serial().notNull(), smallint1: smallint().notNull(), smallint2: smallint({ unsigned: true }).notNull(), text1: text().notNull(), text2: text({ enum: ['a', 'b', 'c'] }).notNull(), time: time().notNull(), timestamp1: timestamp({ mode: 'date' }).notNull(), timestamp2: timestamp({ mode: 'string' }).notNull(), tinyint1: tinyint().notNull(), tinyint2: tinyint({ unsigned: true }).notNull(), varchar1: varchar({ length: 10 }).notNull(), varchar2: varchar({ length: 1, enum: ['a', 'b', 'c'] }).notNull(), varbinary: varbinary({ length: 10 }).notNull(), year: year().notNull(), longtext1: longtext().notNull(), longtext2: longtext({ enum: ['a', 'b', 'c'] }).notNull(), mediumtext1: mediumtext().notNull(), mediumtext2: mediumtext({ enum: ['a', 'b', 'c'] }).notNull(), tinytext1: tinytext().notNull(), tinytext2: tinytext({ enum: ['a', 'b', 'c'] }).notNull(), })); const result = createSelectSchema(table); const expected = v.object({ bigint1: v.pipe(v.number(), v.minValue(Number.MIN_SAFE_INTEGER), v.maxValue(Number.MAX_SAFE_INTEGER), v.integer()), bigint2: v.pipe(v.bigint(), v.minValue(CONSTANTS.INT64_MIN), v.maxValue(CONSTANTS.INT64_MAX)), bigint3: v.pipe(v.number(), v.minValue(0 as number), v.maxValue(Number.MAX_SAFE_INTEGER), v.integer()), bigint4: v.pipe(v.bigint(), v.minValue(0n as bigint), v.maxValue(CONSTANTS.INT64_UNSIGNED_MAX)), binary: v.string(), boolean: v.boolean(), char1: v.pipe(v.string(), v.length(10 as number)), char2: v.enum({ a: 'a', b: 'b', c: 'c' }), date1: v.date(), date2: v.string(), datetime1: v.date(), datetime2: v.string(), decimal1: v.string(), decimal2: v.string(), double1: v.pipe(v.number(), v.minValue(CONSTANTS.INT48_MIN), v.maxValue(CONSTANTS.INT48_MAX)), double2: v.pipe(v.number(), v.minValue(0 as number), v.maxValue(CONSTANTS.INT48_UNSIGNED_MAX)), float1: v.pipe(v.number(), v.minValue(CONSTANTS.INT24_MIN), v.maxValue(CONSTANTS.INT24_MAX)), float2: v.pipe(v.number(), v.minValue(0 as number), v.maxValue(CONSTANTS.INT24_UNSIGNED_MAX)), int1: v.pipe(v.number(), v.minValue(CONSTANTS.INT32_MIN), v.maxValue(CONSTANTS.INT32_MAX), v.integer()), int2: v.pipe(v.number(), v.minValue(0 as number), v.maxValue(CONSTANTS.INT32_UNSIGNED_MAX), v.integer()), json: jsonSchema, mediumint1: v.pipe(v.number(), v.minValue(CONSTANTS.INT24_MIN), v.maxValue(CONSTANTS.INT24_MAX), v.integer()), mediumint2: v.pipe(v.number(), v.minValue(0 as number), v.maxValue(CONSTANTS.INT24_UNSIGNED_MAX), v.integer()), enum: v.enum({ a: 'a', b: 'b', c: 'c' }), real: v.pipe(v.number(), v.minValue(CONSTANTS.INT48_MIN), v.maxValue(CONSTANTS.INT48_MAX)), serial: v.pipe(v.number(), v.minValue(0 as number), v.maxValue(Number.MAX_SAFE_INTEGER), v.integer()), smallint1: v.pipe(v.number(), v.minValue(CONSTANTS.INT16_MIN), v.maxValue(CONSTANTS.INT16_MAX), v.integer()), smallint2: v.pipe(v.number(), v.minValue(0 as number), v.maxValue(CONSTANTS.INT16_UNSIGNED_MAX), v.integer()), text1: v.pipe(v.string(), v.maxLength(CONSTANTS.INT16_UNSIGNED_MAX)), text2: v.enum({ a: 'a', b: 'b', c: 'c' }), time: v.string(), timestamp1: v.date(), timestamp2: v.string(), tinyint1: v.pipe(v.number(), v.minValue(CONSTANTS.INT8_MIN), v.maxValue(CONSTANTS.INT8_MAX), v.integer()), tinyint2: v.pipe(v.number(), v.minValue(0 as number), v.maxValue(CONSTANTS.INT8_UNSIGNED_MAX), v.integer()), varchar1: v.pipe(v.string(), v.maxLength(10 as number)), varchar2: v.enum({ a: 'a', b: 'b', c: 'c' }), varbinary: v.string(), year: v.pipe(v.number(), v.minValue(1901 as number), v.maxValue(2155 as number), v.integer()), longtext1: v.pipe(v.string(), v.maxLength(CONSTANTS.INT32_UNSIGNED_MAX)), longtext2: v.enum({ a: 'a', b: 'b', c: 'c' }), mediumtext1: v.pipe(v.string(), v.maxLength(CONSTANTS.INT24_UNSIGNED_MAX)), mediumtext2: v.enum({ a: 'a', b: 'b', c: 'c' }), tinytext1: v.pipe(v.string(), v.maxLength(CONSTANTS.INT8_UNSIGNED_MAX)), tinytext2: v.enum({ a: 'a', b: 'b', c: 'c' }), }); expectSchemaShape(t, expected).from(result); Expect>(); }); /* Infinitely recursive type */ { const TopLevelCondition: v.GenericSchema = v.custom(() => true); const table = mysqlTable('test', { json: json().$type(), }); const result = createSelectSchema(table); const expected = v.object({ json: v.nullable(TopLevelCondition), }); Expect, v.InferOutput>>(); } /* Disallow unknown keys in table refinement - select */ { const table = mysqlTable('test', { id: int() }); // @ts-expect-error createSelectSchema(table, { unknown: v.string() }); } /* Disallow unknown keys in table refinement - insert */ { const table = mysqlTable('test', { id: int() }); // @ts-expect-error createInsertSchema(table, { unknown: v.string() }); } /* Disallow unknown keys in table refinement - update */ { const table = mysqlTable('test', { id: int() }); // @ts-expect-error createUpdateSchema(table, { unknown: v.string() }); } /* Disallow unknown keys in view qb - select */ { const table = mysqlTable('test', { id: int() }); const view = mysqlView('test').as((qb) => qb.select().from(table)); const nestedSelect = mysqlView('test').as((qb) => qb.select({ table }).from(table)); // @ts-expect-error createSelectSchema(view, { unknown: v.string() }); // @ts-expect-error createSelectSchema(nestedSelect, { table: { unknown: v.string() } }); } /* Disallow unknown keys in view columns - select */ { const view = mysqlView('test', { id: int() }).as(sql``); // @ts-expect-error createSelectSchema(view, { unknown: v.string() }); } ================================================ FILE: drizzle-valibot/tests/pg.test.ts ================================================ import { type Equal, sql } from 'drizzle-orm'; import { customType, integer, json, jsonb, pgEnum, pgMaterializedView, pgSchema, pgTable, pgView, serial, text, } from 'drizzle-orm/pg-core'; import type { TopLevelCondition } from 'json-rules-engine'; import * as v from 'valibot'; import { test } from 'vitest'; import { jsonSchema } from '~/column.ts'; import { CONSTANTS } from '~/constants.ts'; import { createInsertSchema, createSelectSchema, createUpdateSchema } from '../src'; import { Expect, expectEnumValues, expectSchemaShape } from './utils.ts'; const integerSchema = v.pipe(v.number(), v.minValue(CONSTANTS.INT32_MIN), v.maxValue(CONSTANTS.INT32_MAX), v.integer()); const textSchema = v.string(); test('table - select', (t) => { const table = pgTable('test', { id: serial().primaryKey(), name: text().notNull(), }); const result = createSelectSchema(table); const expected = v.object({ id: integerSchema, name: textSchema }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('table in schema - select', (tc) => { const schema = pgSchema('test'); const table = schema.table('test', { id: serial().primaryKey(), name: text().notNull(), }); const result = createSelectSchema(table); const expected = v.object({ id: integerSchema, name: textSchema }); expectSchemaShape(tc, expected).from(result); Expect>(); }); test('table - insert', (t) => { const table = pgTable('test', { id: integer().generatedAlwaysAsIdentity().primaryKey(), name: text().notNull(), age: integer(), }); const result = createInsertSchema(table); const expected = v.object({ name: textSchema, age: v.optional(v.nullable(integerSchema)) }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('table - update', (t) => { const table = pgTable('test', { id: integer().generatedAlwaysAsIdentity().primaryKey(), name: text().notNull(), age: integer(), }); const result = createUpdateSchema(table); const expected = v.object({ name: v.optional(textSchema), age: v.optional(v.nullable(integerSchema)), }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('view qb - select', (t) => { const table = pgTable('test', { id: serial().primaryKey(), name: text().notNull(), }); const view = pgView('test').as((qb) => qb.select({ id: table.id, age: sql``.as('age') }).from(table)); const result = createSelectSchema(view); const expected = v.object({ id: integerSchema, age: v.any() }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('view columns - select', (t) => { const view = pgView('test', { id: serial().primaryKey(), name: text().notNull(), }).as(sql``); const result = createSelectSchema(view); const expected = v.object({ id: integerSchema, name: textSchema }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('materialized view qb - select', (t) => { const table = pgTable('test', { id: serial().primaryKey(), name: text().notNull(), }); const view = pgMaterializedView('test').as((qb) => qb.select({ id: table.id, age: sql``.as('age') }).from(table)); const result = createSelectSchema(view); const expected = v.object({ id: integerSchema, age: v.any() }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('materialized view columns - select', (t) => { const view = pgView('test', { id: serial().primaryKey(), name: text().notNull(), }).as(sql``); const result = createSelectSchema(view); const expected = v.object({ id: integerSchema, name: textSchema }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('view with nested fields - select', (t) => { const table = pgTable('test', { id: serial().primaryKey(), name: text().notNull(), }); const view = pgMaterializedView('test').as((qb) => qb.select({ id: table.id, nested: { name: table.name, age: sql``.as('age'), }, table, }).from(table) ); const result = createSelectSchema(view); const expected = v.object({ id: integerSchema, nested: v.object({ name: textSchema, age: v.any() }), table: v.object({ id: integerSchema, name: textSchema }), }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('enum - select', (t) => { const enum_ = pgEnum('test', ['a', 'b', 'c']); const result = createSelectSchema(enum_); const expected = v.enum({ a: 'a', b: 'b', c: 'c' }); expectEnumValues(t, expected).from(result); Expect>(); }); test('nullability - select', (t) => { const table = pgTable('test', { c1: integer(), c2: integer().notNull(), c3: integer().default(1), c4: integer().notNull().default(1), }); const result = createSelectSchema(table); const expected = v.object({ c1: v.nullable(integerSchema), c2: integerSchema, c3: v.nullable(integerSchema), c4: integerSchema, }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('nullability - insert', (t) => { const table = pgTable('test', { c1: integer(), c2: integer().notNull(), c3: integer().default(1), c4: integer().notNull().default(1), c5: integer().generatedAlwaysAs(1), c6: integer().generatedAlwaysAsIdentity(), c7: integer().generatedByDefaultAsIdentity(), }); const result = createInsertSchema(table); const expected = v.object({ c1: v.optional(v.nullable(integerSchema)), c2: integerSchema, c3: v.optional(v.nullable(integerSchema)), c4: v.optional(integerSchema), c7: v.optional(integerSchema), }); expectSchemaShape(t, expected).from(result); }); test('nullability - update', (t) => { const table = pgTable('test', { c1: integer(), c2: integer().notNull(), c3: integer().default(1), c4: integer().notNull().default(1), c5: integer().generatedAlwaysAs(1), c6: integer().generatedAlwaysAsIdentity(), c7: integer().generatedByDefaultAsIdentity(), }); const result = createUpdateSchema(table); const expected = v.object({ c1: v.optional(v.nullable(integerSchema)), c2: v.optional(integerSchema), c3: v.optional(v.nullable(integerSchema)), c4: v.optional(integerSchema), c7: v.optional(integerSchema), }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('refine table - select', (t) => { const table = pgTable('test', { c1: integer(), c2: integer().notNull(), c3: integer().notNull(), }); const result = createSelectSchema(table, { c2: (schema) => v.pipe(schema, v.maxValue(1000)), c3: v.pipe(v.string(), v.transform(Number)), }); const expected = v.object({ c1: v.nullable(integerSchema), c2: v.pipe(integerSchema, v.maxValue(1000)), c3: v.pipe(v.string(), v.transform(Number)), }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('refine table - select with custom data type', (t) => { const customText = customType({ dataType: () => 'text' }); const table = pgTable('test', { c1: integer(), c2: integer().notNull(), c3: integer().notNull(), c4: customText(), }); const customTextSchema = v.pipe(v.string(), v.minLength(1), v.maxLength(100)); const result = createSelectSchema(table, { c2: (schema) => v.pipe(schema, v.maxValue(1000)), c3: v.pipe(v.string(), v.transform(Number)), c4: customTextSchema, }); const expected = v.object({ c1: v.nullable(integerSchema), c2: v.pipe(integerSchema, v.maxValue(1000)), c3: v.pipe(v.string(), v.transform(Number)), c4: customTextSchema, }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('refine table - insert', (t) => { const table = pgTable('test', { c1: integer(), c2: integer().notNull(), c3: integer().notNull(), c4: integer().generatedAlwaysAs(1), }); const result = createInsertSchema(table, { c2: (schema) => v.pipe(schema, v.maxValue(1000)), c3: v.pipe(v.string(), v.transform(Number)), }); const expected = v.object({ c1: v.optional(v.nullable(integerSchema)), c2: v.pipe(integerSchema, v.maxValue(1000)), c3: v.pipe(v.string(), v.transform(Number)), }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('refine table - update', (t) => { const table = pgTable('test', { c1: integer(), c2: integer().notNull(), c3: integer().notNull(), c4: integer().generatedAlwaysAs(1), }); const result = createUpdateSchema(table, { c2: (schema) => v.pipe(schema, v.maxValue(1000)), c3: v.pipe(v.string(), v.transform(Number)), }); const expected = v.object({ c1: v.optional(v.nullable(integerSchema)), c2: v.optional(v.pipe(integerSchema, v.maxValue(1000))), c3: v.pipe(v.string(), v.transform(Number)), }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('refine view - select', (t) => { const table = pgTable('test', { c1: integer(), c2: integer(), c3: integer(), c4: integer(), c5: integer(), c6: integer(), }); const view = pgView('test').as((qb) => qb.select({ c1: table.c1, c2: table.c2, c3: table.c3, nested: { c4: table.c4, c5: table.c5, c6: table.c6, }, table, }).from(table) ); const result = createSelectSchema(view, { c2: (schema) => v.pipe(schema, v.maxValue(1000)), c3: v.pipe(v.string(), v.transform(Number)), nested: { c5: (schema) => v.pipe(schema, v.maxValue(1000)), c6: v.pipe(v.string(), v.transform(Number)), }, table: { c2: (schema) => v.pipe(schema, v.maxValue(1000)), c3: v.pipe(v.string(), v.transform(Number)), }, }); const expected = v.object({ c1: v.nullable(integerSchema), c2: v.nullable(v.pipe(integerSchema, v.maxValue(1000))), c3: v.pipe(v.string(), v.transform(Number)), nested: v.object({ c4: v.nullable(integerSchema), c5: v.nullable(v.pipe(integerSchema, v.maxValue(1000))), c6: v.pipe(v.string(), v.transform(Number)), }), table: v.object({ c1: v.nullable(integerSchema), c2: v.nullable(v.pipe(integerSchema, v.maxValue(1000))), c3: v.pipe(v.string(), v.transform(Number)), c4: v.nullable(integerSchema), c5: v.nullable(integerSchema), c6: v.nullable(integerSchema), }), }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('all data types', (t) => { const table = pgTable('test', ({ bigint, bigserial, bit, boolean, date, char, cidr, doublePrecision, geometry, halfvec, inet, integer, interval, json, jsonb, line, macaddr, macaddr8, numeric, point, real, serial, smallint, smallserial, text, sparsevec, time, timestamp, uuid, varchar, vector, }) => ({ bigint1: bigint({ mode: 'number' }).notNull(), bigint2: bigint({ mode: 'bigint' }).notNull(), bigserial1: bigserial({ mode: 'number' }).notNull(), bigserial2: bigserial({ mode: 'bigint' }).notNull(), bit: bit({ dimensions: 5 }).notNull(), boolean: boolean().notNull(), date1: date({ mode: 'date' }).notNull(), date2: date({ mode: 'string' }).notNull(), char1: char({ length: 10 }).notNull(), char2: char({ length: 1, enum: ['a', 'b', 'c'] }).notNull(), cidr: cidr().notNull(), doublePrecision: doublePrecision().notNull(), geometry1: geometry({ type: 'point', mode: 'tuple' }).notNull(), geometry2: geometry({ type: 'point', mode: 'xy' }).notNull(), halfvec: halfvec({ dimensions: 3 }).notNull(), inet: inet().notNull(), integer: integer().notNull(), interval: interval().notNull(), json: json().notNull(), jsonb: jsonb().notNull(), line1: line({ mode: 'abc' }).notNull(), line2: line({ mode: 'tuple' }).notNull(), macaddr: macaddr().notNull(), macaddr8: macaddr8().notNull(), numeric: numeric().notNull(), point1: point({ mode: 'xy' }).notNull(), point2: point({ mode: 'tuple' }).notNull(), real: real().notNull(), serial: serial().notNull(), smallint: smallint().notNull(), smallserial: smallserial().notNull(), text1: text().notNull(), text2: text({ enum: ['a', 'b', 'c'] }).notNull(), sparsevec: sparsevec({ dimensions: 3 }).notNull(), time: time().notNull(), timestamp1: timestamp({ mode: 'date' }).notNull(), timestamp2: timestamp({ mode: 'string' }).notNull(), uuid: uuid().notNull(), varchar1: varchar({ length: 10 }).notNull(), varchar2: varchar({ length: 1, enum: ['a', 'b', 'c'] }).notNull(), vector: vector({ dimensions: 3 }).notNull(), array1: integer().array().notNull(), array2: integer().array().array(2).notNull(), array3: varchar({ length: 10 }).array().array(2).notNull(), })); const result = createSelectSchema(table); const expected = v.object({ bigint1: v.pipe(v.number(), v.minValue(Number.MIN_SAFE_INTEGER), v.maxValue(Number.MAX_SAFE_INTEGER), v.integer()), bigint2: v.pipe(v.bigint(), v.minValue(CONSTANTS.INT64_MIN), v.maxValue(CONSTANTS.INT64_MAX)), bigserial1: v.pipe( v.number(), v.minValue(Number.MIN_SAFE_INTEGER), v.maxValue(Number.MAX_SAFE_INTEGER), v.integer(), ), bigserial2: v.pipe(v.bigint(), v.minValue(CONSTANTS.INT64_MIN), v.maxValue(CONSTANTS.INT64_MAX)), bit: v.pipe(v.string(), v.regex(/^[01]+$/), v.maxLength(5 as number)), boolean: v.boolean(), date1: v.date(), date2: v.string(), char1: v.pipe(v.string(), v.length(10 as number)), char2: v.enum({ a: 'a', b: 'b', c: 'c' }), cidr: v.string(), doublePrecision: v.pipe(v.number(), v.minValue(CONSTANTS.INT48_MIN), v.maxValue(CONSTANTS.INT48_MAX)), geometry1: v.tuple([v.number(), v.number()]), geometry2: v.object({ x: v.number(), y: v.number() }), halfvec: v.pipe(v.array(v.number()), v.length(3 as number)), inet: v.string(), integer: v.pipe(v.number(), v.minValue(CONSTANTS.INT32_MIN), v.maxValue(CONSTANTS.INT32_MAX), v.integer()), interval: v.string(), json: jsonSchema, jsonb: jsonSchema, line1: v.object({ a: v.number(), b: v.number(), c: v.number() }), line2: v.tuple([v.number(), v.number(), v.number()]), macaddr: v.string(), macaddr8: v.string(), numeric: v.string(), point1: v.object({ x: v.number(), y: v.number() }), point2: v.tuple([v.number(), v.number()]), real: v.pipe(v.number(), v.minValue(CONSTANTS.INT24_MIN), v.maxValue(CONSTANTS.INT24_MAX)), serial: v.pipe(v.number(), v.minValue(CONSTANTS.INT32_MIN), v.maxValue(CONSTANTS.INT32_MAX), v.integer()), smallint: v.pipe(v.number(), v.minValue(CONSTANTS.INT16_MIN), v.maxValue(CONSTANTS.INT16_MAX), v.integer()), smallserial: v.pipe(v.number(), v.minValue(CONSTANTS.INT16_MIN), v.maxValue(CONSTANTS.INT16_MAX), v.integer()), text1: v.string(), text2: v.enum({ a: 'a', b: 'b', c: 'c' }), sparsevec: v.string(), time: v.string(), timestamp1: v.date(), timestamp2: v.string(), uuid: v.pipe(v.string(), v.uuid()), varchar1: v.pipe(v.string(), v.maxLength(10 as number)), varchar2: v.enum({ a: 'a', b: 'b', c: 'c' }), vector: v.pipe(v.array(v.number()), v.length(3 as number)), array1: v.array(integerSchema), array2: v.pipe(v.array(v.array(integerSchema)), v.length(2 as number)), array3: v.pipe(v.array(v.array(v.pipe(v.string(), v.maxLength(10 as number)))), v.length(2 as number)), }); expectSchemaShape(t, expected).from(result); Expect>(); }); /* Infinitely recursive type */ { const TopLevelCondition: v.GenericSchema = v.custom(() => true); const table = pgTable('test', { json: json().$type().notNull(), jsonb: jsonb().$type(), }); const result = createSelectSchema(table); const expected = v.object({ json: TopLevelCondition, jsonb: v.nullable(TopLevelCondition), }); Expect, v.InferOutput>>(); } /* Disallow unknown keys in table refinement - select */ { const table = pgTable('test', { id: integer() }); // @ts-expect-error createSelectSchema(table, { unknown: v.string() }); } /* Disallow unknown keys in table refinement - insert */ { const table = pgTable('test', { id: integer() }); // @ts-expect-error createInsertSchema(table, { unknown: v.string() }); } /* Disallow unknown keys in table refinement - update */ { const table = pgTable('test', { id: integer() }); // @ts-expect-error createUpdateSchema(table, { unknown: v.string() }); } /* Disallow unknown keys in view qb - select */ { const table = pgTable('test', { id: integer() }); const view = pgView('test').as((qb) => qb.select().from(table)); const mView = pgMaterializedView('test').as((qb) => qb.select().from(table)); const nestedSelect = pgView('test').as((qb) => qb.select({ table }).from(table)); // @ts-expect-error createSelectSchema(view, { unknown: v.string() }); // @ts-expect-error createSelectSchema(mView, { unknown: v.string() }); // @ts-expect-error createSelectSchema(nestedSelect, { table: { unknown: v.string() } }); } /* Disallow unknown keys in view columns - select */ { const view = pgView('test', { id: integer() }).as(sql``); const mView = pgView('test', { id: integer() }).as(sql``); // @ts-expect-error createSelectSchema(view, { unknown: v.string() }); // @ts-expect-error createSelectSchema(mView, { unknown: v.string() }); } ================================================ FILE: drizzle-valibot/tests/singlestore.test.ts ================================================ import { type Equal } from 'drizzle-orm'; import { customType, int, json, serial, singlestoreSchema, singlestoreTable, text } from 'drizzle-orm/singlestore-core'; import type { TopLevelCondition } from 'json-rules-engine'; import * as v from 'valibot'; import { test } from 'vitest'; import { jsonSchema } from '~/column.ts'; import { CONSTANTS } from '~/constants.ts'; import { createInsertSchema, createSelectSchema, createUpdateSchema } from '../src'; import { Expect, expectSchemaShape } from './utils.ts'; const intSchema = v.pipe( v.number(), v.minValue(CONSTANTS.INT32_MIN as number), v.maxValue(CONSTANTS.INT32_MAX as number), v.integer(), ); const serialNumberModeSchema = v.pipe( v.number(), v.minValue(0 as number), v.maxValue(Number.MAX_SAFE_INTEGER as number), v.integer(), ); const textSchema = v.pipe(v.string(), v.maxLength(CONSTANTS.INT16_UNSIGNED_MAX as number)); test('table - select', (t) => { const table = singlestoreTable('test', { id: serial().primaryKey(), name: text().notNull(), }); const result = createSelectSchema(table); const expected = v.object({ id: serialNumberModeSchema, name: textSchema }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('table in schema - select', (tc) => { const schema = singlestoreSchema('test'); const table = schema.table('test', { id: serial().primaryKey(), name: text().notNull(), }); const result = createSelectSchema(table); const expected = v.object({ id: serialNumberModeSchema, name: textSchema }); expectSchemaShape(tc, expected).from(result); Expect>(); }); test('table - insert', (t) => { const table = singlestoreTable('test', { id: serial().primaryKey(), name: text().notNull(), age: int(), }); const result = createInsertSchema(table); const expected = v.object({ id: v.optional(serialNumberModeSchema), name: textSchema, age: v.optional(v.nullable(intSchema)), }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('table - update', (t) => { const table = singlestoreTable('test', { id: serial().primaryKey(), name: text().notNull(), age: int(), }); const result = createUpdateSchema(table); const expected = v.object({ id: v.optional(serialNumberModeSchema), name: v.optional(textSchema), age: v.optional(v.nullable(intSchema)), }); expectSchemaShape(t, expected).from(result); Expect>(); }); // TODO: SingleStore doesn't support views yet. Add these tests when they're added // test('view qb - select', (t) => { // const table = singlestoreTable('test', { // id: serial().primaryKey(), // name: text().notNull(), // }); // const view = mysqlView('test').as((qb) => qb.select({ id: table.id, age: sql``.as('age') }).from(table)); // const result = createSelectSchema(view); // const expected = v.object({ id: serialNumberModeSchema, age: v.any() }); // expectSchemaShape(t, expected).from(result); // Expect>(); // }); // test('view columns - select', (t) => { // const view = mysqlView('test', { // id: serial().primaryKey(), // name: text().notNull(), // }).as(sql``); // const result = createSelectSchema(view); // const expected = v.object({ id: serialNumberModeSchema, name: textSchema }); // expectSchemaShape(t, expected).from(result); // Expect>(); // }); // test('view with nested fields - select', (t) => { // const table = singlestoreTable('test', { // id: serial().primaryKey(), // name: text().notNull(), // }); // const view = mysqlView('test').as((qb) => // qb.select({ // id: table.id, // nested: { // name: table.name, // age: sql``.as('age'), // }, // table, // }).from(table) // ); // const result = createSelectSchema(view); // const expected = v.object({ // id: serialNumberModeSchema, // nested: v.object({ name: textSchema, age: v.any() }), // table: v.object({ id: serialNumberModeSchema, name: textSchema }), // }); // expectSchemaShape(t, expected).from(result); // Expect>(); // }); test('nullability - select', (t) => { const table = singlestoreTable('test', { c1: int(), c2: int().notNull(), c3: int().default(1), c4: int().notNull().default(1), }); const result = createSelectSchema(table); const expected = v.object({ c1: v.nullable(intSchema), c2: intSchema, c3: v.nullable(intSchema), c4: intSchema, }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('nullability - insert', (t) => { const table = singlestoreTable('test', { c1: int(), c2: int().notNull(), c3: int().default(1), c4: int().notNull().default(1), c5: int().generatedAlwaysAs(1), }); const result = createInsertSchema(table); const expected = v.object({ c1: v.optional(v.nullable(intSchema)), c2: intSchema, c3: v.optional(v.nullable(intSchema)), c4: v.optional(intSchema), }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('nullability - update', (t) => { const table = singlestoreTable('test', { c1: int(), c2: int().notNull(), c3: int().default(1), c4: int().notNull().default(1), c5: int().generatedAlwaysAs(1), }); const result = createUpdateSchema(table); const expected = v.object({ c1: v.optional(v.nullable(intSchema)), c2: v.optional(intSchema), c3: v.optional(v.nullable(intSchema)), c4: v.optional(intSchema), }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('refine table - select', (t) => { const table = singlestoreTable('test', { c1: int(), c2: int().notNull(), c3: int().notNull(), }); const result = createSelectSchema(table, { c2: (schema) => v.pipe(schema, v.maxValue(1000)), c3: v.pipe(v.string(), v.transform(Number)), }); const expected = v.object({ c1: v.nullable(intSchema), c2: v.pipe(intSchema, v.maxValue(1000)), c3: v.pipe(v.string(), v.transform(Number)), }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('refine table - select with custom data type', (t) => { const customText = customType({ dataType: () => 'text' }); const table = singlestoreTable('test', { c1: int(), c2: int().notNull(), c3: int().notNull(), c4: customText(), }); const customTextSchema = v.pipe(v.string(), v.minLength(1), v.maxLength(100)); const result = createSelectSchema(table, { c2: (schema) => v.pipe(schema, v.maxValue(1000)), c3: v.pipe(v.string(), v.transform(Number)), c4: customTextSchema, }); const expected = v.object({ c1: v.nullable(intSchema), c2: v.pipe(intSchema, v.maxValue(1000)), c3: v.pipe(v.string(), v.transform(Number)), c4: customTextSchema, }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('refine table - insert', (t) => { const table = singlestoreTable('test', { c1: int(), c2: int().notNull(), c3: int().notNull(), c4: int().generatedAlwaysAs(1), }); const result = createInsertSchema(table, { c2: (schema) => v.pipe(schema, v.maxValue(1000)), c3: v.pipe(v.string(), v.transform(Number)), }); const expected = v.object({ c1: v.optional(v.nullable(intSchema)), c2: v.pipe(intSchema, v.maxValue(1000)), c3: v.pipe(v.string(), v.transform(Number)), }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('refine table - update', (t) => { const table = singlestoreTable('test', { c1: int(), c2: int().notNull(), c3: int().notNull(), c4: int().generatedAlwaysAs(1), }); const result = createUpdateSchema(table, { c2: (schema) => v.pipe(schema, v.maxValue(1000)), c3: v.pipe(v.string(), v.transform(Number)), }); const expected = v.object({ c1: v.optional(v.nullable(intSchema)), c2: v.optional(v.pipe(intSchema, v.maxValue(1000))), c3: v.pipe(v.string(), v.transform(Number)), }); expectSchemaShape(t, expected).from(result); Expect>(); }); // test('refine view - select', (t) => { // const table = singlestoreTable('test', { // c1: int(), // c2: int(), // c3: int(), // c4: int(), // c5: int(), // c6: int(), // }); // const view = mysqlView('test').as((qb) => // qb.select({ // c1: table.c1, // c2: table.c2, // c3: table.c3, // nested: { // c4: table.c4, // c5: table.c5, // c6: table.c6, // }, // table, // }).from(table) // ); // const result = createSelectSchema(view, { // c2: (schema) => v.pipe(schema, v.maxValue(1000)), // c3: v.pipe(v.string(), v.transform(Number)), // nested: { // c5: (schema) => v.pipe(schema, v.maxValue(1000)), // c6: v.pipe(v.string(), v.transform(Number)), // }, // table: { // c2: (schema) => v.pipe(schema, v.maxValue(1000)), // c3: v.pipe(v.string(), v.transform(Number)), // }, // }); // const expected = v.object({ // c1: v.nullable(intSchema), // c2: v.nullable(v.pipe(intSchema, v.maxValue(1000))), // c3: v.pipe(v.string(), v.transform(Number)), // nested: v.object({ // c4: v.nullable(intSchema), // c5: v.nullable(v.pipe(intSchema, v.maxValue(1000))), // c6: v.pipe(v.string(), v.transform(Number)), // }), // table: v.object({ // c1: v.nullable(intSchema), // c2: v.nullable(v.pipe(intSchema, v.maxValue(1000))), // c3: v.pipe(v.string(), v.transform(Number)), // c4: v.nullable(intSchema), // c5: v.nullable(intSchema), // c6: v.nullable(intSchema), // }), // }); // expectSchemaShape(t, expected).from(result); // Expect>(); // }); test('all data types', (t) => { const table = singlestoreTable('test', ({ bigint, binary, boolean, char, date, datetime, decimal, double, float, int, json, mediumint, singlestoreEnum, real, serial, smallint, text, time, timestamp, tinyint, varchar, varbinary, year, longtext, mediumtext, tinytext, }) => ({ bigint1: bigint({ mode: 'number' }).notNull(), bigint2: bigint({ mode: 'bigint' }).notNull(), bigint3: bigint({ unsigned: true, mode: 'number' }).notNull(), bigint4: bigint({ unsigned: true, mode: 'bigint' }).notNull(), binary: binary({ length: 10 }).notNull(), boolean: boolean().notNull(), char1: char({ length: 10 }).notNull(), char2: char({ length: 1, enum: ['a', 'b', 'c'] }).notNull(), date1: date({ mode: 'date' }).notNull(), date2: date({ mode: 'string' }).notNull(), datetime1: datetime({ mode: 'date' }).notNull(), datetime2: datetime({ mode: 'string' }).notNull(), decimal1: decimal().notNull(), decimal2: decimal({ unsigned: true }).notNull(), double1: double().notNull(), double2: double({ unsigned: true }).notNull(), float1: float().notNull(), float2: float({ unsigned: true }).notNull(), int1: int().notNull(), int2: int({ unsigned: true }).notNull(), json: json().notNull(), mediumint1: mediumint().notNull(), mediumint2: mediumint({ unsigned: true }).notNull(), enum: singlestoreEnum('enum', ['a', 'b', 'c']).notNull(), real: real().notNull(), serial: serial().notNull(), smallint1: smallint().notNull(), smallint2: smallint({ unsigned: true }).notNull(), text1: text().notNull(), text2: text({ enum: ['a', 'b', 'c'] }).notNull(), time: time().notNull(), timestamp1: timestamp({ mode: 'date' }).notNull(), timestamp2: timestamp({ mode: 'string' }).notNull(), tinyint1: tinyint().notNull(), tinyint2: tinyint({ unsigned: true }).notNull(), varchar1: varchar({ length: 10 }).notNull(), varchar2: varchar({ length: 1, enum: ['a', 'b', 'c'] }).notNull(), varbinary: varbinary({ length: 10 }).notNull(), year: year().notNull(), longtext1: longtext().notNull(), longtext2: longtext({ enum: ['a', 'b', 'c'] }).notNull(), mediumtext1: mediumtext().notNull(), mediumtext2: mediumtext({ enum: ['a', 'b', 'c'] }).notNull(), tinytext1: tinytext().notNull(), tinytext2: tinytext({ enum: ['a', 'b', 'c'] }).notNull(), })); const result = createSelectSchema(table); const expected = v.object({ bigint1: v.pipe(v.number(), v.minValue(Number.MIN_SAFE_INTEGER), v.maxValue(Number.MAX_SAFE_INTEGER), v.integer()), bigint2: v.pipe(v.bigint(), v.minValue(CONSTANTS.INT64_MIN), v.maxValue(CONSTANTS.INT64_MAX)), bigint3: v.pipe(v.number(), v.minValue(0 as number), v.maxValue(Number.MAX_SAFE_INTEGER), v.integer()), bigint4: v.pipe(v.bigint(), v.minValue(0n as bigint), v.maxValue(CONSTANTS.INT64_UNSIGNED_MAX)), binary: v.string(), boolean: v.boolean(), char1: v.pipe(v.string(), v.length(10 as number)), char2: v.enum({ a: 'a', b: 'b', c: 'c' }), date1: v.date(), date2: v.string(), datetime1: v.date(), datetime2: v.string(), decimal1: v.string(), decimal2: v.string(), double1: v.pipe(v.number(), v.minValue(CONSTANTS.INT48_MIN), v.maxValue(CONSTANTS.INT48_MAX)), double2: v.pipe(v.number(), v.minValue(0 as number), v.maxValue(CONSTANTS.INT48_UNSIGNED_MAX)), float1: v.pipe(v.number(), v.minValue(CONSTANTS.INT24_MIN), v.maxValue(CONSTANTS.INT24_MAX)), float2: v.pipe(v.number(), v.minValue(0 as number), v.maxValue(CONSTANTS.INT24_UNSIGNED_MAX)), int1: v.pipe(v.number(), v.minValue(CONSTANTS.INT32_MIN), v.maxValue(CONSTANTS.INT32_MAX), v.integer()), int2: v.pipe(v.number(), v.minValue(0 as number), v.maxValue(CONSTANTS.INT32_UNSIGNED_MAX), v.integer()), json: jsonSchema, mediumint1: v.pipe(v.number(), v.minValue(CONSTANTS.INT24_MIN), v.maxValue(CONSTANTS.INT24_MAX), v.integer()), mediumint2: v.pipe(v.number(), v.minValue(0 as number), v.maxValue(CONSTANTS.INT24_UNSIGNED_MAX), v.integer()), enum: v.enum({ a: 'a', b: 'b', c: 'c' }), real: v.pipe(v.number(), v.minValue(CONSTANTS.INT48_MIN), v.maxValue(CONSTANTS.INT48_MAX)), serial: v.pipe(v.number(), v.minValue(0 as number), v.maxValue(Number.MAX_SAFE_INTEGER), v.integer()), smallint1: v.pipe(v.number(), v.minValue(CONSTANTS.INT16_MIN), v.maxValue(CONSTANTS.INT16_MAX), v.integer()), smallint2: v.pipe(v.number(), v.minValue(0 as number), v.maxValue(CONSTANTS.INT16_UNSIGNED_MAX), v.integer()), text1: v.pipe(v.string(), v.maxLength(CONSTANTS.INT16_UNSIGNED_MAX)), text2: v.enum({ a: 'a', b: 'b', c: 'c' }), time: v.string(), timestamp1: v.date(), timestamp2: v.string(), tinyint1: v.pipe(v.number(), v.minValue(CONSTANTS.INT8_MIN), v.maxValue(CONSTANTS.INT8_MAX), v.integer()), tinyint2: v.pipe(v.number(), v.minValue(0 as number), v.maxValue(CONSTANTS.INT8_UNSIGNED_MAX), v.integer()), varchar1: v.pipe(v.string(), v.maxLength(10 as number)), varchar2: v.enum({ a: 'a', b: 'b', c: 'c' }), varbinary: v.string(), year: v.pipe(v.number(), v.minValue(1901 as number), v.maxValue(2155 as number), v.integer()), longtext1: v.pipe(v.string(), v.maxLength(CONSTANTS.INT32_UNSIGNED_MAX)), longtext2: v.enum({ a: 'a', b: 'b', c: 'c' }), mediumtext1: v.pipe(v.string(), v.maxLength(CONSTANTS.INT24_UNSIGNED_MAX)), mediumtext2: v.enum({ a: 'a', b: 'b', c: 'c' }), tinytext1: v.pipe(v.string(), v.maxLength(CONSTANTS.INT8_UNSIGNED_MAX)), tinytext2: v.enum({ a: 'a', b: 'b', c: 'c' }), }); expectSchemaShape(t, expected).from(result); Expect>(); }); /* Infinitely recursive type */ { const TopLevelCondition: v.GenericSchema = v.custom(() => true); const table = singlestoreTable('test', { json: json().$type(), }); const result = createSelectSchema(table); const expected = v.object({ json: v.nullable(TopLevelCondition), }); Expect, v.InferOutput>>(); } /* Disallow unknown keys in table refinement - select */ { const table = singlestoreTable('test', { id: int() }); // @ts-expect-error createSelectSchema(table, { unknown: v.string() }); } /* Disallow unknown keys in table refinement - insert */ { const table = singlestoreTable('test', { id: int() }); // @ts-expect-error createInsertSchema(table, { unknown: v.string() }); } /* Disallow unknown keys in table refinement - update */ { const table = singlestoreTable('test', { id: int() }); // @ts-expect-error createUpdateSchema(table, { unknown: v.string() }); } // /* Disallow unknown keys in view qb - select */ { // const table = singlestoreTable('test', { id: int() }); // const view = mysqlView('test').as((qb) => qb.select().from(table)); // const nestedSelect = mysqlView('test').as((qb) => qb.select({ table }).from(table)); // // @ts-expect-error // createSelectSchema(view, { unknown: v.string() }); // // @ts-expect-error // createSelectSchema(nestedSelect, { table: { unknown: v.string() } }); // } // /* Disallow unknown keys in view columns - select */ { // const view = mysqlView('test', { id: int() }).as(sql``); // // @ts-expect-error // createSelectSchema(view, { unknown: v.string() }); // } ================================================ FILE: drizzle-valibot/tests/sqlite.test.ts ================================================ import { type Equal, sql } from 'drizzle-orm'; import { blob, customType, int, sqliteTable, sqliteView, text } from 'drizzle-orm/sqlite-core'; import type { TopLevelCondition } from 'json-rules-engine'; import * as v from 'valibot'; import { test } from 'vitest'; import { bufferSchema, jsonSchema } from '~/column.ts'; import { CONSTANTS } from '~/constants.ts'; import { createInsertSchema, createSelectSchema, createUpdateSchema } from '../src'; import { Expect, expectSchemaShape } from './utils.ts'; const intSchema = v.pipe( v.number(), v.minValue(Number.MIN_SAFE_INTEGER), v.maxValue(Number.MAX_SAFE_INTEGER), v.integer(), ); const textSchema = v.string(); test('table - select', (t) => { const table = sqliteTable('test', { id: int().primaryKey({ autoIncrement: true }), name: text().notNull(), }); const result = createSelectSchema(table); const expected = v.object({ id: intSchema, name: textSchema }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('table - insert', (t) => { const table = sqliteTable('test', { id: int().primaryKey({ autoIncrement: true }), name: text().notNull(), age: int(), }); const result = createInsertSchema(table); const expected = v.object({ id: v.optional(intSchema), name: textSchema, age: v.optional(v.nullable(intSchema)) }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('table - update', (t) => { const table = sqliteTable('test', { id: int().primaryKey({ autoIncrement: true }), name: text().notNull(), age: int(), }); const result = createUpdateSchema(table); const expected = v.object({ id: v.optional(intSchema), name: v.optional(textSchema), age: v.optional(v.nullable(intSchema)), }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('view qb - select', (t) => { const table = sqliteTable('test', { id: int().primaryKey({ autoIncrement: true }), name: text().notNull(), }); const view = sqliteView('test').as((qb) => qb.select({ id: table.id, age: sql``.as('age') }).from(table)); const result = createSelectSchema(view); const expected = v.object({ id: intSchema, age: v.any() }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('view columns - select', (t) => { const view = sqliteView('test', { id: int().primaryKey({ autoIncrement: true }), name: text().notNull(), }).as(sql``); const result = createSelectSchema(view); const expected = v.object({ id: intSchema, name: textSchema }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('view with nested fields - select', (t) => { const table = sqliteTable('test', { id: int().primaryKey({ autoIncrement: true }), name: text().notNull(), }); const view = sqliteView('test').as((qb) => qb.select({ id: table.id, nested: { name: table.name, age: sql``.as('age'), }, table, }).from(table) ); const result = createSelectSchema(view); const expected = v.object({ id: intSchema, nested: v.object({ name: textSchema, age: v.any() }), table: v.object({ id: intSchema, name: textSchema }), }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('nullability - select', (t) => { const table = sqliteTable('test', { c1: int(), c2: int().notNull(), c3: int().default(1), c4: int().notNull().default(1), }); const result = createSelectSchema(table); const expected = v.object({ c1: v.nullable(intSchema), c2: intSchema, c3: v.nullable(intSchema), c4: intSchema, }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('nullability - insert', (t) => { const table = sqliteTable('test', { c1: int(), c2: int().notNull(), c3: int().default(1), c4: int().notNull().default(1), c5: int().generatedAlwaysAs(1), }); const result = createInsertSchema(table); const expected = v.object({ c1: v.optional(v.nullable(intSchema)), c2: intSchema, c3: v.optional(v.nullable(intSchema)), c4: v.optional(intSchema), }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('nullability - update', (t) => { const table = sqliteTable('test', { c1: int(), c2: int().notNull(), c3: int().default(1), c4: int().notNull().default(1), c5: int().generatedAlwaysAs(1), }); const result = createUpdateSchema(table); const expected = v.object({ c1: v.optional(v.nullable(intSchema)), c2: v.optional(intSchema), c3: v.optional(v.nullable(intSchema)), c4: v.optional(intSchema), }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('refine table - select', (t) => { const table = sqliteTable('test', { c1: int(), c2: int().notNull(), c3: int().notNull(), }); const result = createSelectSchema(table, { c2: (schema) => v.pipe(schema, v.maxValue(1000)), c3: v.pipe(v.string(), v.transform(Number)), }); const expected = v.object({ c1: v.nullable(intSchema), c2: v.pipe(intSchema, v.maxValue(1000)), c3: v.pipe(v.string(), v.transform(Number)), }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('refine table - select with custom data type', (t) => { const customText = customType({ dataType: () => 'text' }); const table = sqliteTable('test', { c1: int(), c2: int().notNull(), c3: int().notNull(), c4: customText(), }); const customTextSchema = v.pipe(v.string(), v.minLength(1), v.maxLength(100)); const result = createSelectSchema(table, { c2: (schema) => v.pipe(schema, v.maxValue(1000)), c3: v.pipe(v.string(), v.transform(Number)), c4: customTextSchema, }); const expected = v.object({ c1: v.nullable(intSchema), c2: v.pipe(intSchema, v.maxValue(1000)), c3: v.pipe(v.string(), v.transform(Number)), c4: customTextSchema, }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('refine table - insert', (t) => { const table = sqliteTable('test', { c1: int(), c2: int().notNull(), c3: int().notNull(), c4: int().generatedAlwaysAs(1), }); const result = createInsertSchema(table, { c2: (schema) => v.pipe(schema, v.maxValue(1000)), c3: v.pipe(v.string(), v.transform(Number)), }); const expected = v.object({ c1: v.optional(v.nullable(intSchema)), c2: v.pipe(intSchema, v.maxValue(1000)), c3: v.pipe(v.string(), v.transform(Number)), }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('refine table - update', (t) => { const table = sqliteTable('test', { c1: int(), c2: int().notNull(), c3: int().notNull(), c4: int().generatedAlwaysAs(1), }); const result = createUpdateSchema(table, { c2: (schema) => v.pipe(schema, v.maxValue(1000)), c3: v.pipe(v.string(), v.transform(Number)), }); const expected = v.object({ c1: v.optional(v.nullable(intSchema)), c2: v.optional(v.pipe(intSchema, v.maxValue(1000))), c3: v.pipe(v.string(), v.transform(Number)), }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('refine view - select', (t) => { const table = sqliteTable('test', { c1: int(), c2: int(), c3: int(), c4: int(), c5: int(), c6: int(), }); const view = sqliteView('test').as((qb) => qb.select({ c1: table.c1, c2: table.c2, c3: table.c3, nested: { c4: table.c4, c5: table.c5, c6: table.c6, }, table, }).from(table) ); const result = createSelectSchema(view, { c2: (schema) => v.pipe(schema, v.maxValue(1000)), c3: v.pipe(v.string(), v.transform(Number)), nested: { c5: (schema) => v.pipe(schema, v.maxValue(1000)), c6: v.pipe(v.string(), v.transform(Number)), }, table: { c2: (schema) => v.pipe(schema, v.maxValue(1000)), c3: v.pipe(v.string(), v.transform(Number)), }, }); const expected = v.object({ c1: v.nullable(intSchema), c2: v.nullable(v.pipe(intSchema, v.maxValue(1000))), c3: v.pipe(v.string(), v.transform(Number)), nested: v.object({ c4: v.nullable(intSchema), c5: v.nullable(v.pipe(intSchema, v.maxValue(1000))), c6: v.pipe(v.string(), v.transform(Number)), }), table: v.object({ c1: v.nullable(intSchema), c2: v.nullable(v.pipe(intSchema, v.maxValue(1000))), c3: v.pipe(v.string(), v.transform(Number)), c4: v.nullable(intSchema), c5: v.nullable(intSchema), c6: v.nullable(intSchema), }), }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('all data types', (t) => { const table = sqliteTable('test', ({ blob, integer, numeric, real, text, }) => ({ blob1: blob({ mode: 'buffer' }).notNull(), blob2: blob({ mode: 'bigint' }).notNull(), blob3: blob({ mode: 'json' }).notNull(), integer1: integer({ mode: 'number' }).notNull(), integer2: integer({ mode: 'boolean' }).notNull(), integer3: integer({ mode: 'timestamp' }).notNull(), integer4: integer({ mode: 'timestamp_ms' }).notNull(), numeric: numeric().notNull(), real: real().notNull(), text1: text({ mode: 'text' }).notNull(), text2: text({ mode: 'text', length: 10 }).notNull(), text3: text({ mode: 'text', enum: ['a', 'b', 'c'] }).notNull(), text4: text({ mode: 'json' }).notNull(), })); const result = createSelectSchema(table); const expected = v.object({ blob1: bufferSchema, blob2: v.pipe(v.bigint(), v.minValue(CONSTANTS.INT64_MIN), v.maxValue(CONSTANTS.INT64_MAX)), blob3: jsonSchema, integer1: v.pipe(v.number(), v.minValue(Number.MIN_SAFE_INTEGER), v.maxValue(Number.MAX_SAFE_INTEGER), v.integer()), integer2: v.boolean(), integer3: v.date(), integer4: v.date(), numeric: v.string(), real: v.pipe(v.number(), v.minValue(CONSTANTS.INT48_MIN), v.maxValue(CONSTANTS.INT48_MAX)), text1: v.string(), text2: v.pipe(v.string(), v.maxLength(10 as number)), text3: v.enum({ a: 'a', b: 'b', c: 'c' }), text4: jsonSchema, }); expectSchemaShape(t, expected).from(result); Expect>(); }); /* Infinitely recursive type */ { const TopLevelCondition: v.GenericSchema = v.custom(() => true); const table = sqliteTable('test', { json1: text({ mode: 'json' }).$type().notNull(), json2: blob({ mode: 'json' }).$type(), }); const result = createSelectSchema(table); const expected = v.object({ json1: TopLevelCondition, json2: v.nullable(TopLevelCondition), }); Expect, v.InferOutput>>(); } /* Disallow unknown keys in table refinement - select */ { const table = sqliteTable('test', { id: int() }); // @ts-expect-error createSelectSchema(table, { unknown: v.string() }); } /* Disallow unknown keys in table refinement - insert */ { const table = sqliteTable('test', { id: int() }); // @ts-expect-error createInsertSchema(table, { unknown: v.string() }); } /* Disallow unknown keys in table refinement - update */ { const table = sqliteTable('test', { id: int() }); // @ts-expect-error createUpdateSchema(table, { unknown: v.string() }); } /* Disallow unknown keys in view qb - select */ { const table = sqliteTable('test', { id: int() }); const view = sqliteView('test').as((qb) => qb.select().from(table)); const nestedSelect = sqliteView('test').as((qb) => qb.select({ table }).from(table)); // @ts-expect-error createSelectSchema(view, { unknown: v.string() }); // @ts-expect-error createSelectSchema(nestedSelect, { table: { unknown: v.string() } }); } /* Disallow unknown keys in view columns - select */ { const view = sqliteView('test', { id: int() }).as(sql``); // @ts-expect-error createSelectSchema(view, { unknown: v.string() }); } ================================================ FILE: drizzle-valibot/tests/tsconfig.json ================================================ { "extends": "../tsconfig.json", "compilerOptions": { "module": "esnext", "target": "esnext", "noEmit": true, "rootDir": "..", "outDir": "./.cache" }, "include": [".", "../src"] } ================================================ FILE: drizzle-valibot/tests/utils.ts ================================================ import type * as v from 'valibot'; import { expect, type TaskContext } from 'vitest'; function onlySpecifiedKeys(obj: Record, keys: string[]) { return Object.fromEntries(Object.entries(obj).filter(([key]) => keys.includes(key))); } export function expectSchemaShape>(t: TaskContext, expected: T) { return { from(actual: T) { expect(Object.keys(actual.entries)).toStrictEqual(Object.keys(expected.entries)); for (const key of Object.keys(actual.entries)) { const actualEntry = actual.entries[key] as any; const expectedEntry = expected.entries[key] as any; const keys = ['kind', 'type', 'expects', 'async', 'message']; actualEntry.pipe ??= []; expectedEntry.pipe ??= []; expect(onlySpecifiedKeys(actualEntry, keys)).toStrictEqual(onlySpecifiedKeys(expectedEntry, keys)); expect(actualEntry.pipe.length).toStrictEqual(expectedEntry.pipe.length); for (let i = 0; i < actualEntry.pipe.length; i++) { const actualPipeElement = actualEntry.pipe[i]; const expectedPipeElement = expectedEntry.pipe[i]; expect(onlySpecifiedKeys(actualPipeElement, keys)).toStrictEqual( onlySpecifiedKeys(expectedPipeElement, keys), ); } } }, }; } export function expectEnumValues>(t: TaskContext, expected: T) { return { from(actual: T) { expect(actual.enum).toStrictEqual(expected.enum); }, }; } export function Expect<_ extends true>() {} ================================================ FILE: drizzle-valibot/tsconfig.build.json ================================================ { "extends": "./tsconfig.json", "compilerOptions": { "rootDir": "src" }, "include": ["src"] } ================================================ FILE: drizzle-valibot/tsconfig.json ================================================ { "extends": "../tsconfig.json", "compilerOptions": { "outDir": "dist", "baseUrl": ".", "declaration": true, "noEmit": true, "paths": { "~/*": ["src/*"] } }, "include": ["src", "*.ts"] } ================================================ FILE: drizzle-valibot/vitest.config.ts ================================================ import tsconfigPaths from 'vite-tsconfig-paths'; import { defineConfig } from 'vitest/config'; export default defineConfig({ test: { include: [ 'tests/**/*.test.ts', ], exclude: [ 'tests/bun/**/*', ], typecheck: { tsconfig: 'tsconfig.json', }, testTimeout: 100000, hookTimeout: 100000, isolate: false, poolOptions: { threads: { singleThread: true, }, }, }, plugins: [tsconfigPaths()], }); ================================================ FILE: drizzle-zod/README.md ================================================

drizzle-zod npm

npm npm bundle size Discord License
If you know SQL, you know Drizzle ORM

`drizzle-zod` is a plugin for [Drizzle ORM](https://github.com/drizzle-team/drizzle-orm) that allows you to generate [Zod](https://zod.dev/) schemas from Drizzle ORM schemas. **Features** - Create a select schema for tables, views and enums. - Create insert and update schemas for tables. - Supports all dialects: PostgreSQL, MySQL and SQLite. # Usage ```ts import { pgEnum, pgTable, serial, text, timestamp } from 'drizzle-orm/pg-core'; import { createInsertSchema, createSelectSchema } from 'drizzle-zod'; import { z } from 'zod'; const users = pgTable('users', { id: serial('id').primaryKey(), name: text('name').notNull(), email: text('email').notNull(), role: text('role', { enum: ['admin', 'user'] }).notNull(), createdAt: timestamp('created_at').notNull().defaultNow(), }); // Schema for inserting a user - can be used to validate API requests const insertUserSchema = createInsertSchema(users); // Schema for updating a user - can be used to validate API requests const updateUserSchema = createUpdateSchema(users); // Schema for selecting a user - can be used to validate API responses const selectUserSchema = createSelectSchema(users); // Overriding the fields const insertUserSchema = createInsertSchema(users, { role: z.string(), }); // Refining the fields - useful if you want to change the fields before they become nullable/optional in the final schema const insertUserSchema = createInsertSchema(users, { id: (schema) => schema.positive(), email: (schema) => schema.email(), role: z.string(), }); // Usage const user = insertUserSchema.parse({ name: 'John Doe', email: 'johndoe@test.com', role: 'admin', }); // Zod schema type is also inferred from the table schema, so you have full type safety const requestSchema = insertUserSchema.pick({ name: true, email: true }); ``` ================================================ FILE: drizzle-zod/package.json ================================================ { "name": "drizzle-zod", "version": "0.8.3", "description": "Generate Zod schemas from Drizzle ORM schemas", "type": "module", "scripts": { "build": "tsx scripts/build.ts", "b": "pnpm build", "test:types": "cd tests && tsc", "pack": "(cd dist && npm pack --pack-destination ..) && rm -f package.tgz && mv *.tgz package.tgz", "publish": "npm publish package.tgz", "test": "vitest run" }, "exports": { ".": { "import": { "types": "./index.d.mts", "default": "./index.mjs" }, "require": { "types": "./index.d.cjs", "default": "./index.cjs" }, "types": "./index.d.ts", "default": "./index.mjs" } }, "main": "./index.cjs", "module": "./index.mjs", "types": "./index.d.ts", "publishConfig": { "provenance": true }, "repository": { "type": "git", "url": "git+https://github.com/drizzle-team/drizzle-orm.git" }, "ava": { "files": [ "tests/**/*.test.ts", "!tests/bun/**/*" ], "extensions": { "ts": "module" } }, "keywords": [ "zod", "validate", "validation", "schema", "drizzle", "orm", "pg", "mysql", "postgresql", "postgres", "sqlite", "database", "sql", "typescript", "ts" ], "author": "Drizzle Team", "license": "Apache-2.0", "peerDependencies": { "drizzle-orm": ">=0.36.0", "zod": "^3.25.0 || ^4.0.0" }, "devDependencies": { "@rollup/plugin-typescript": "^11.1.0", "@types/node": "^18.15.10", "cpy": "^10.1.0", "drizzle-orm": "link:../drizzle-orm/dist", "json-rules-engine": "^7.3.1", "rimraf": "^5.0.0", "rollup": "^3.29.5", "vite-tsconfig-paths": "^4.3.2", "vitest": "^3.1.3", "zod": "3.25.1", "zx": "^7.2.2" } } ================================================ FILE: drizzle-zod/rollup.config.ts ================================================ import typescript from '@rollup/plugin-typescript'; import { defineConfig } from 'rollup'; export default defineConfig([ { input: 'src/index.ts', output: [ { format: 'esm', dir: 'dist', entryFileNames: '[name].mjs', chunkFileNames: '[name]-[hash].mjs', sourcemap: true, }, { format: 'cjs', dir: 'dist', entryFileNames: '[name].cjs', chunkFileNames: '[name]-[hash].cjs', sourcemap: true, }, ], external: [ /^drizzle-orm\/?/, 'zod', ], plugins: [ typescript({ tsconfig: 'tsconfig.build.json', }), ], }, ]); ================================================ FILE: drizzle-zod/scripts/build.ts ================================================ #!/usr/bin/env -S pnpm tsx import 'zx/globals'; import cpy from 'cpy'; await fs.remove('dist'); await $`rollup --config rollup.config.ts --configPlugin typescript`; await $`resolve-tspaths`; await fs.copy('README.md', 'dist/README.md'); await cpy('dist/**/*.d.ts', 'dist', { rename: (basename) => basename.replace(/\.d\.ts$/, '.d.mts'), }); await cpy('dist/**/*.d.ts', 'dist', { rename: (basename) => basename.replace(/\.d\.ts$/, '.d.cts'), }); await fs.copy('package.json', 'dist/package.json'); await $`scripts/fix-imports.ts`; ================================================ FILE: drizzle-zod/scripts/fix-imports.ts ================================================ #!/usr/bin/env -S pnpm tsx import 'zx/globals'; import path from 'node:path'; import { parse, print, visit } from 'recast'; import parser from 'recast/parsers/typescript'; function resolvePathAlias(importPath: string, file: string) { if (importPath.startsWith('~/')) { const relativePath = path.relative(path.dirname(file), path.resolve('dist.new', importPath.slice(2))); importPath = relativePath.startsWith('.') ? relativePath : './' + relativePath; } return importPath; } function fixImportPath(importPath: string, file: string, ext: string) { importPath = resolvePathAlias(importPath, file); if (!/\..*\.(js|ts)$/.test(importPath)) { return importPath; } return importPath.replace(/\.(js|ts)$/, ext); } const cjsFiles = await glob('dist/**/*.{cjs,d.cts}'); await Promise.all(cjsFiles.map(async (file) => { const code = parse(await fs.readFile(file, 'utf8'), { parser }); visit(code, { visitImportDeclaration(path) { path.value.source.value = fixImportPath(path.value.source.value, file, '.cjs'); this.traverse(path); }, visitExportAllDeclaration(path) { path.value.source.value = fixImportPath(path.value.source.value, file, '.cjs'); this.traverse(path); }, visitExportNamedDeclaration(path) { if (path.value.source) { path.value.source.value = fixImportPath(path.value.source.value, file, '.cjs'); } this.traverse(path); }, visitCallExpression(path) { if (path.value.callee.type === 'Identifier' && path.value.callee.name === 'require') { path.value.arguments[0].value = fixImportPath(path.value.arguments[0].value, file, '.cjs'); } this.traverse(path); }, visitTSImportType(path) { path.value.argument.value = resolvePathAlias(path.value.argument.value, file); this.traverse(path); }, visitAwaitExpression(path) { if (print(path.value).code.startsWith(`await import("./`)) { path.value.argument.arguments[0].value = fixImportPath(path.value.argument.arguments[0].value, file, '.cjs'); } this.traverse(path); }, }); await fs.writeFile(file, print(code).code); })); let esmFiles = await glob('dist/**/*.{js,d.ts}'); await Promise.all(esmFiles.map(async (file) => { const code = parse(await fs.readFile(file, 'utf8'), { parser }); visit(code, { visitImportDeclaration(path) { path.value.source.value = fixImportPath(path.value.source.value, file, '.js'); this.traverse(path); }, visitExportAllDeclaration(path) { path.value.source.value = fixImportPath(path.value.source.value, file, '.js'); this.traverse(path); }, visitExportNamedDeclaration(path) { if (path.value.source) { path.value.source.value = fixImportPath(path.value.source.value, file, '.js'); } this.traverse(path); }, visitTSImportType(path) { path.value.argument.value = fixImportPath(path.value.argument.value, file, '.js'); this.traverse(path); }, visitAwaitExpression(path) { if (print(path.value).code.startsWith(`await import("./`)) { path.value.argument.arguments[0].value = fixImportPath(path.value.argument.arguments[0].value, file, '.js'); } this.traverse(path); }, }); await fs.writeFile(file, print(code).code); })); esmFiles = await glob('dist/**/*.{mjs,d.mts}'); await Promise.all(esmFiles.map(async (file) => { const code = parse(await fs.readFile(file, 'utf8'), { parser }); visit(code, { visitImportDeclaration(path) { path.value.source.value = fixImportPath(path.value.source.value, file, '.mjs'); this.traverse(path); }, visitExportAllDeclaration(path) { path.value.source.value = fixImportPath(path.value.source.value, file, '.mjs'); this.traverse(path); }, visitExportNamedDeclaration(path) { if (path.value.source) { path.value.source.value = fixImportPath(path.value.source.value, file, '.mjs'); } this.traverse(path); }, visitTSImportType(path) { path.value.argument.value = fixImportPath(path.value.argument.value, file, '.mjs'); this.traverse(path); }, visitAwaitExpression(path) { if (print(path.value).code.startsWith(`await import("./`)) { path.value.argument.arguments[0].value = fixImportPath(path.value.argument.arguments[0].value, file, '.mjs'); } this.traverse(path); }, }); await fs.writeFile(file, print(code).code); })); ================================================ FILE: drizzle-zod/src/column.ts ================================================ import type { Column, ColumnBaseConfig } from 'drizzle-orm'; import type { MySqlBigInt53, MySqlChar, MySqlDouble, MySqlFloat, MySqlInt, MySqlMediumInt, MySqlReal, MySqlSerial, MySqlSmallInt, MySqlText, MySqlTinyInt, MySqlVarChar, MySqlYear, } from 'drizzle-orm/mysql-core'; import type { PgArray, PgBigInt53, PgBigSerial53, PgBinaryVector, PgChar, PgDoublePrecision, PgGeometry, PgGeometryObject, PgHalfVector, PgInteger, PgLineABC, PgLineTuple, PgPointObject, PgPointTuple, PgReal, PgSerial, PgSmallInt, PgSmallSerial, PgUUID, PgVarchar, PgVector, } from 'drizzle-orm/pg-core'; import type { SingleStoreBigInt53, SingleStoreChar, SingleStoreDouble, SingleStoreFloat, SingleStoreInt, SingleStoreMediumInt, SingleStoreReal, SingleStoreSerial, SingleStoreSmallInt, SingleStoreText, SingleStoreTinyInt, SingleStoreVarChar, SingleStoreYear, } from 'drizzle-orm/singlestore-core'; import type { SQLiteInteger, SQLiteReal, SQLiteText } from 'drizzle-orm/sqlite-core'; import { z as zod } from 'zod/v4'; import { CONSTANTS } from './constants.ts'; import type { CreateSchemaFactoryOptions } from './schema.types.ts'; import { isColumnType, isWithEnum } from './utils.ts'; import type { Json } from './utils.ts'; export const literalSchema = zod.union([zod.string(), zod.number(), zod.boolean(), zod.null()]); export const jsonSchema: zod.ZodType = zod.union([ literalSchema, zod.record(zod.string(), zod.any()), zod.array(zod.any()), ]); export const bufferSchema: zod.ZodType = zod.custom((v) => v instanceof Buffer); // eslint-disable-line no-instanceof/no-instanceof export function columnToSchema( column: Column, factory: | CreateSchemaFactoryOptions< Partial> | true | undefined > | undefined, ): zod.ZodType { const z: typeof zod = factory?.zodInstance ?? zod; const coerce = factory?.coerce ?? {}; let schema!: zod.ZodType; if (isWithEnum(column)) { schema = column.enumValues.length ? z.enum(column.enumValues) : z.string(); } if (!schema) { // Handle specific types if (isColumnType | PgPointTuple>(column, ['PgGeometry', 'PgPointTuple'])) { schema = z.tuple([z.number(), z.number()]); } else if ( isColumnType | PgGeometryObject>(column, ['PgGeometryObject', 'PgPointObject']) ) { schema = z.object({ x: z.number(), y: z.number() }); } else if (isColumnType | PgVector>(column, ['PgHalfVector', 'PgVector'])) { schema = z.array(z.number()); schema = column.dimensions ? (schema as zod.ZodArray).length(column.dimensions) : schema; } else if (isColumnType>(column, ['PgLine'])) { schema = z.tuple([z.number(), z.number(), z.number()]); } else if (isColumnType>(column, ['PgLineABC'])) { schema = z.object({ a: z.number(), b: z.number(), c: z.number(), }); } // Handle other types else if (isColumnType>(column, ['PgArray'])) { schema = z.array(columnToSchema(column.baseColumn, factory)); schema = column.size ? (schema as zod.ZodArray).length(column.size) : schema; } else if (column.dataType === 'array') { schema = z.array(z.any()); } else if (column.dataType === 'number') { schema = numberColumnToSchema(column, z, coerce); } else if (column.dataType === 'bigint') { schema = bigintColumnToSchema(column, z, coerce); } else if (column.dataType === 'boolean') { schema = coerce === true || coerce.boolean ? z.coerce.boolean() : z.boolean(); } else if (column.dataType === 'date') { schema = coerce === true || coerce.date ? z.coerce.date() : z.date(); } else if (column.dataType === 'string') { schema = stringColumnToSchema(column, z, coerce); } else if (column.dataType === 'json') { schema = jsonSchema; } else if (column.dataType === 'custom') { schema = z.any(); } else if (column.dataType === 'buffer') { schema = bufferSchema; } } if (!schema) { schema = z.any(); } return schema; } function numberColumnToSchema( column: Column, z: typeof zod, coerce: CreateSchemaFactoryOptions< Partial> | true | undefined >['coerce'], ): zod.ZodType { let unsigned = column.getSQLType().includes('unsigned'); let min!: number; let max!: number; let integer = false; if (isColumnType | SingleStoreTinyInt>(column, ['MySqlTinyInt', 'SingleStoreTinyInt'])) { min = unsigned ? 0 : CONSTANTS.INT8_MIN; max = unsigned ? CONSTANTS.INT8_UNSIGNED_MAX : CONSTANTS.INT8_MAX; integer = true; } else if ( isColumnType | PgSmallSerial | MySqlSmallInt | SingleStoreSmallInt>(column, [ 'PgSmallInt', 'PgSmallSerial', 'MySqlSmallInt', 'SingleStoreSmallInt', ]) ) { min = unsigned ? 0 : CONSTANTS.INT16_MIN; max = unsigned ? CONSTANTS.INT16_UNSIGNED_MAX : CONSTANTS.INT16_MAX; integer = true; } else if ( isColumnType< PgReal | MySqlFloat | MySqlMediumInt | SingleStoreMediumInt | SingleStoreFloat >(column, [ 'PgReal', 'MySqlFloat', 'MySqlMediumInt', 'SingleStoreMediumInt', 'SingleStoreFloat', ]) ) { min = unsigned ? 0 : CONSTANTS.INT24_MIN; max = unsigned ? CONSTANTS.INT24_UNSIGNED_MAX : CONSTANTS.INT24_MAX; integer = isColumnType(column, ['MySqlMediumInt', 'SingleStoreMediumInt']); } else if ( isColumnType | PgSerial | MySqlInt | SingleStoreInt>(column, [ 'PgInteger', 'PgSerial', 'MySqlInt', 'SingleStoreInt', ]) ) { min = unsigned ? 0 : CONSTANTS.INT32_MIN; max = unsigned ? CONSTANTS.INT32_UNSIGNED_MAX : CONSTANTS.INT32_MAX; integer = true; } else if ( isColumnType< | PgDoublePrecision | MySqlReal | MySqlDouble | SingleStoreReal | SingleStoreDouble | SQLiteReal >(column, [ 'PgDoublePrecision', 'MySqlReal', 'MySqlDouble', 'SingleStoreReal', 'SingleStoreDouble', 'SQLiteReal', ]) ) { min = unsigned ? 0 : CONSTANTS.INT48_MIN; max = unsigned ? CONSTANTS.INT48_UNSIGNED_MAX : CONSTANTS.INT48_MAX; } else if ( isColumnType< | PgBigInt53 | PgBigSerial53 | MySqlBigInt53 | MySqlSerial | SingleStoreBigInt53 | SingleStoreSerial | SQLiteInteger >( column, [ 'PgBigInt53', 'PgBigSerial53', 'MySqlBigInt53', 'MySqlSerial', 'SingleStoreBigInt53', 'SingleStoreSerial', 'SQLiteInteger', ], ) ) { unsigned = unsigned || isColumnType(column, ['MySqlSerial', 'SingleStoreSerial']); min = unsigned ? 0 : Number.MIN_SAFE_INTEGER; max = Number.MAX_SAFE_INTEGER; integer = true; } else if (isColumnType | SingleStoreYear>(column, ['MySqlYear', 'SingleStoreYear'])) { min = 1901; max = 2155; integer = true; } else { min = Number.MIN_SAFE_INTEGER; max = Number.MAX_SAFE_INTEGER; } let schema = coerce === true || coerce?.number ? integer ? z.coerce.number() : z.coerce.number().int() : integer ? z.int() : z.number(); schema = schema.gte(min).lte(max); return schema; } function bigintColumnToSchema( column: Column, z: typeof zod, coerce: CreateSchemaFactoryOptions< Partial> | true | undefined >['coerce'], ): zod.ZodType { const unsigned = column.getSQLType().includes('unsigned'); const min = unsigned ? 0n : CONSTANTS.INT64_MIN; const max = unsigned ? CONSTANTS.INT64_UNSIGNED_MAX : CONSTANTS.INT64_MAX; const schema = coerce === true || coerce?.bigint ? z.coerce.bigint() : z.bigint(); return schema.gte(min).lte(max); } function stringColumnToSchema( column: Column, z: typeof zod, coerce: CreateSchemaFactoryOptions< Partial> | true | undefined >['coerce'], ): zod.ZodType { if (isColumnType>>(column, ['PgUUID'])) { return z.uuid(); } let max: number | undefined; let regex: RegExp | undefined; let fixed = false; if (isColumnType | SQLiteText>(column, ['PgVarchar', 'SQLiteText'])) { max = column.length; } else if ( isColumnType | SingleStoreVarChar>(column, ['MySqlVarChar', 'SingleStoreVarChar']) ) { max = column.length ?? CONSTANTS.INT16_UNSIGNED_MAX; } else if (isColumnType | SingleStoreText>(column, ['MySqlText', 'SingleStoreText'])) { if (column.textType === 'longtext') { max = CONSTANTS.INT32_UNSIGNED_MAX; } else if (column.textType === 'mediumtext') { max = CONSTANTS.INT24_UNSIGNED_MAX; } else if (column.textType === 'text') { max = CONSTANTS.INT16_UNSIGNED_MAX; } else { max = CONSTANTS.INT8_UNSIGNED_MAX; } } if ( isColumnType | MySqlChar | SingleStoreChar>(column, [ 'PgChar', 'MySqlChar', 'SingleStoreChar', ]) ) { max = column.length; fixed = true; } if (isColumnType>(column, ['PgBinaryVector'])) { regex = /^[01]+$/; max = column.dimensions; } let schema = coerce === true || coerce?.string ? z.coerce.string() : z.string(); schema = regex ? schema.regex(regex) : schema; return max && fixed ? schema.length(max) : max ? schema.max(max) : schema; } ================================================ FILE: drizzle-zod/src/column.types.ts ================================================ import type { Assume, Column } from 'drizzle-orm'; import type { z } from 'zod/v4'; import type { IsEnumDefined, IsNever, Json } from './utils.ts'; type HasBaseColumn = TColumn extends { _: { baseColumn: Column | undefined } } ? IsNever extends false ? true : false : false; export type GetZodType< TColumn extends Column, TCoerce extends Partial> | true | undefined, > = HasBaseColumn extends true ? z.ZodArray< GetZodType, TCoerce> > : TColumn['_']['columnType'] extends 'PgUUID' ? z.ZodUUID : IsEnumDefined extends true ? z.ZodEnum<{ [K in Assume[number]]: K }> : TColumn['_']['columnType'] extends 'PgGeometry' | 'PgPointTuple' ? z.ZodTuple<[z.ZodNumber, z.ZodNumber], null> : TColumn['_']['columnType'] extends 'PgLine' ? z.ZodTuple<[z.ZodNumber, z.ZodNumber, z.ZodNumber], null> : TColumn['_']['data'] extends Date ? CanCoerce extends true ? z.coerce.ZodCoercedDate : z.ZodDate : TColumn['_']['data'] extends Buffer ? z.ZodType : TColumn['_']['dataType'] extends 'array' ? z.ZodArray[number], '', TCoerce>> : TColumn['_']['data'] extends Record ? TColumn['_']['columnType'] extends 'PgJson' | 'PgJsonb' | 'MySqlJson' | 'SingleStoreJson' | 'SQLiteTextJson' | 'SQLiteBlobJson' ? z.ZodType : z.ZodObject< { [K in keyof TColumn['_']['data']]: GetZodPrimitiveType }, { out: {}; in: {} } > : TColumn['_']['dataType'] extends 'json' ? z.ZodType : GetZodPrimitiveType; type CanCoerce< TCoerce extends Partial> | true | undefined, TTo extends 'bigint' | 'boolean' | 'date' | 'number' | 'string', > = TCoerce extends true ? true : TCoerce extends Record ? TCoerce[TTo] extends true ? true : false : false; type GetZodPrimitiveType< TData, TColumnType, TCoerce extends Partial> | true | undefined, > = TColumnType extends | 'MySqlTinyInt' | 'SingleStoreTinyInt' | 'PgSmallInt' | 'PgSmallSerial' | 'MySqlSmallInt' | 'MySqlMediumInt' | 'SingleStoreSmallInt' | 'SingleStoreMediumInt' | 'PgInteger' | 'PgSerial' | 'MySqlInt' | 'SingleStoreInt' | 'PgBigInt53' | 'PgBigSerial53' | 'MySqlBigInt53' | 'MySqlSerial' | 'SingleStoreBigInt53' | 'SingleStoreSerial' | 'SQLiteInteger' | 'MySqlYear' | 'SingleStoreYear' ? CanCoerce extends true ? z.coerce.ZodCoercedNumber : z.ZodInt : TData extends number ? CanCoerce extends true ? z.coerce.ZodCoercedNumber : z.ZodNumber : TData extends bigint ? CanCoerce extends true ? z.coerce.ZodCoercedBigInt : z.ZodBigInt : TData extends boolean ? CanCoerce extends true ? z.coerce.ZodCoercedBoolean : z.ZodBoolean : TData extends string ? CanCoerce extends true ? z.coerce.ZodCoercedString : z.ZodString : z.ZodType; type HandleSelectColumn< TSchema extends z.ZodType, TColumn extends Column, > = TColumn['_']['notNull'] extends true ? TSchema : z.ZodNullable; type HandleInsertColumn< TSchema extends z.ZodType, TColumn extends Column, > = TColumn['_']['notNull'] extends true ? TColumn['_']['hasDefault'] extends true ? z.ZodOptional : TSchema : z.ZodOptional>; type HandleUpdateColumn< TSchema extends z.ZodType, TColumn extends Column, > = TColumn['_']['notNull'] extends true ? z.ZodOptional : z.ZodOptional>; export type HandleColumn< TType extends 'select' | 'insert' | 'update', TColumn extends Column, TCoerce extends Partial> | true | undefined, > = TType extends 'select' ? HandleSelectColumn, TColumn> : TType extends 'insert' ? HandleInsertColumn, TColumn> : TType extends 'update' ? HandleUpdateColumn, TColumn> : GetZodType; ================================================ FILE: drizzle-zod/src/constants.ts ================================================ export const CONSTANTS = { INT8_MIN: -128, INT8_MAX: 127, INT8_UNSIGNED_MAX: 255, INT16_MIN: -32768, INT16_MAX: 32767, INT16_UNSIGNED_MAX: 65535, INT24_MIN: -8388608, INT24_MAX: 8388607, INT24_UNSIGNED_MAX: 16777215, INT32_MIN: -2147483648, INT32_MAX: 2147483647, INT32_UNSIGNED_MAX: 4294967295, INT48_MIN: -140737488355328, INT48_MAX: 140737488355327, INT48_UNSIGNED_MAX: 281474976710655, INT64_MIN: -9223372036854775808n, INT64_MAX: 9223372036854775807n, INT64_UNSIGNED_MAX: 18446744073709551615n, }; ================================================ FILE: drizzle-zod/src/index.ts ================================================ export { bufferSchema, jsonSchema, literalSchema } from './column.ts'; export * from './column.types.ts'; export * from './schema.ts'; export * from './schema.types.internal.ts'; export * from './schema.types.ts'; export * from './utils.ts'; ================================================ FILE: drizzle-zod/src/schema.ts ================================================ import { Column, getTableColumns, getViewSelectedFields, is, isTable, isView, SQL } from 'drizzle-orm'; import type { Table, View } from 'drizzle-orm'; import type { PgEnum } from 'drizzle-orm/pg-core'; import { z } from 'zod/v4'; import { columnToSchema } from './column.ts'; import type { Conditions } from './schema.types.internal.ts'; import type { CreateInsertSchema, CreateSchemaFactoryOptions, CreateSelectSchema, CreateUpdateSchema, } from './schema.types.ts'; import { isPgEnum } from './utils.ts'; function getColumns(tableLike: Table | View) { return isTable(tableLike) ? getTableColumns(tableLike) : getViewSelectedFields(tableLike); } function handleColumns( columns: Record, refinements: Record, conditions: Conditions, factory?: CreateSchemaFactoryOptions< Partial> | true | undefined >, ): z.ZodType { const columnSchemas: Record = {}; for (const [key, selected] of Object.entries(columns)) { if (!is(selected, Column) && !is(selected, SQL) && !is(selected, SQL.Aliased) && typeof selected === 'object') { const columns = isTable(selected) || isView(selected) ? getColumns(selected) : selected; columnSchemas[key] = handleColumns(columns, refinements[key] ?? {}, conditions, factory); continue; } const refinement = refinements[key]; if (refinement !== undefined && typeof refinement !== 'function') { columnSchemas[key] = refinement; continue; } const column = is(selected, Column) ? selected : undefined; const schema = column ? columnToSchema(column, factory) : z.any(); const refined = typeof refinement === 'function' ? refinement(schema) : schema; if (conditions.never(column)) { continue; } else { columnSchemas[key] = refined; } if (column) { if (conditions.nullable(column)) { columnSchemas[key] = columnSchemas[key]!.nullable(); } if (conditions.optional(column)) { columnSchemas[key] = columnSchemas[key]!.optional(); } } } return z.object(columnSchemas) as any; } function handleEnum( enum_: PgEnum, factory?: CreateSchemaFactoryOptions< Partial> | true | undefined >, ) { const zod: typeof z = factory?.zodInstance ?? z; return zod.enum(enum_.enumValues); } const selectConditions: Conditions = { never: () => false, optional: () => false, nullable: (column) => !column.notNull, }; const insertConditions: Conditions = { never: (column) => column?.generated?.type === 'always' || column?.generatedIdentity?.type === 'always', optional: (column) => !column.notNull || (column.notNull && column.hasDefault), nullable: (column) => !column.notNull, }; const updateConditions: Conditions = { never: (column) => column?.generated?.type === 'always' || column?.generatedIdentity?.type === 'always', optional: () => true, nullable: (column) => !column.notNull, }; export const createSelectSchema: CreateSelectSchema = ( entity: Table | View | PgEnum<[string, ...string[]]>, refine?: Record, ) => { if (isPgEnum(entity)) { return handleEnum(entity); } const columns = getColumns(entity); return handleColumns(columns, refine ?? {}, selectConditions) as any; }; export const createInsertSchema: CreateInsertSchema = ( entity: Table, refine?: Record, ) => { const columns = getColumns(entity); return handleColumns(columns, refine ?? {}, insertConditions) as any; }; export const createUpdateSchema: CreateUpdateSchema = ( entity: Table, refine?: Record, ) => { const columns = getColumns(entity); return handleColumns(columns, refine ?? {}, updateConditions) as any; }; export function createSchemaFactory< TCoerce extends Partial> | true | undefined, >(options?: CreateSchemaFactoryOptions) { const createSelectSchema: CreateSelectSchema = ( entity: Table | View | PgEnum<[string, ...string[]]>, refine?: Record, ) => { if (isPgEnum(entity)) { return handleEnum(entity, options); } const columns = getColumns(entity); return handleColumns(columns, refine ?? {}, selectConditions, options) as any; }; const createInsertSchema: CreateInsertSchema = ( entity: Table, refine?: Record, ) => { const columns = getColumns(entity); return handleColumns(columns, refine ?? {}, insertConditions, options) as any; }; const createUpdateSchema: CreateUpdateSchema = ( entity: Table, refine?: Record, ) => { const columns = getColumns(entity); return handleColumns(columns, refine ?? {}, updateConditions, options) as any; }; return { createSelectSchema, createInsertSchema, createUpdateSchema }; } ================================================ FILE: drizzle-zod/src/schema.types.internal.ts ================================================ import type { Assume, Column, DrizzleTypeError, SelectedFieldsFlat, Simplify, Table, View } from 'drizzle-orm'; import type { z } from 'zod/v4'; import type { GetZodType, HandleColumn } from './column.types.ts'; import type { ColumnIsGeneratedAlwaysAs, GetSelection } from './utils.ts'; export interface Conditions { never: (column?: Column) => boolean; optional: (column: Column) => boolean; nullable: (column: Column) => boolean; } type BuildRefineField = T extends z.ZodType ? ((schema: T) => z.ZodType) | z.ZodType : never; export type BuildRefine< TColumns extends Record, TCoerce extends Partial> | true | undefined, > = { [K in keyof TColumns as TColumns[K] extends Column | SelectedFieldsFlat | Table | View ? K : never]?: TColumns[K] extends Column ? BuildRefineField> : BuildRefine, TCoerce>; }; type HandleRefinement< TType extends 'select' | 'insert' | 'update', TRefinement, TColumn extends Column, > = TRefinement extends (schema: any) => z.ZodType ? (TColumn['_']['notNull'] extends true ? ReturnType : z.ZodNullable>) extends infer TSchema extends z.ZodType ? TType extends 'update' ? z.ZodOptional : TSchema : z.ZodType : TRefinement; type IsRefinementDefined< TRefinements extends Record | undefined, TKey extends string | symbol | number, > = TRefinements extends object ? TRefinements[TKey] extends z.ZodType | ((schema: any) => any) ? true : false : false; export type BuildSchema< TType extends 'select' | 'insert' | 'update', TColumns extends Record, TRefinements extends Record | undefined, TCoerce extends Partial> | true | undefined, > = z.ZodObject< Simplify< { [ K in keyof TColumns as ColumnIsGeneratedAlwaysAs extends true ? TType extends 'select' ? K : never : K ]: TColumns[K] extends infer TColumn extends Column ? IsRefinementDefined extends true ? Assume, z.ZodType> : HandleColumn : TColumns[K] extends infer TObject extends SelectedFieldsFlat | Table | View ? BuildSchema< TType, GetSelection, TRefinements extends object ? TRefinements[K & keyof TRefinements] : undefined, TCoerce > : z.ZodAny; } >, { out: {}; in: {} } >; export type NoUnknownKeys< TRefinement extends Record, TCompare extends Record, > = { [K in keyof TRefinement]: K extends keyof TCompare ? TRefinement[K] extends Record ? NoUnknownKeys : TRefinement[K] : DrizzleTypeError<`Found unknown key in refinement: "${K & string}"`>; }; ================================================ FILE: drizzle-zod/src/schema.types.ts ================================================ import type { Table, View } from 'drizzle-orm'; import type { PgEnum } from 'drizzle-orm/pg-core'; import type { z } from 'zod/v4'; import type { BuildRefine, BuildSchema, NoUnknownKeys } from './schema.types.internal.ts'; export interface CreateSelectSchema< TCoerce extends Partial> | true | undefined, > { (table: TTable): BuildSchema<'select', TTable['_']['columns'], undefined, TCoerce>; < TTable extends Table, TRefine extends BuildRefine, >( table: TTable, refine?: NoUnknownKeys, ): BuildSchema<'select', TTable['_']['columns'], TRefine, TCoerce>; (view: TView): BuildSchema<'select', TView['_']['selectedFields'], undefined, TCoerce>; < TView extends View, TRefine extends BuildRefine, >( view: TView, refine: NoUnknownKeys, ): BuildSchema<'select', TView['_']['selectedFields'], TRefine, TCoerce>; >(enum_: TEnum): z.ZodEnum<{ [K in TEnum['enumValues'][number]]: K }>; } export interface CreateInsertSchema< TCoerce extends Partial> | true | undefined, > { (table: TTable): BuildSchema<'insert', TTable['_']['columns'], undefined, TCoerce>; < TTable extends Table, TRefine extends BuildRefine, TCoerce>, >( table: TTable, refine?: NoUnknownKeys, ): BuildSchema<'insert', TTable['_']['columns'], TRefine, TCoerce>; } export interface CreateUpdateSchema< TCoerce extends Partial> | true | undefined, > { (table: TTable): BuildSchema<'update', TTable['_']['columns'], undefined, TCoerce>; < TTable extends Table, TRefine extends BuildRefine, TCoerce>, >( table: TTable, refine?: TRefine, ): BuildSchema<'update', TTable['_']['columns'], TRefine, TCoerce>; } export interface CreateSchemaFactoryOptions< TCoerce extends Partial> | true | undefined, > { zodInstance?: any; coerce?: TCoerce; } ================================================ FILE: drizzle-zod/src/utils.ts ================================================ import type { Column, SelectedFieldsFlat, Table, View } from 'drizzle-orm'; import type { PgEnum } from 'drizzle-orm/pg-core'; import type { z } from 'zod/v4'; import type { literalSchema } from './column.ts'; export function isColumnType(column: Column, columnTypes: string[]): column is T { return columnTypes.includes(column.columnType); } export function isWithEnum(column: Column): column is typeof column & { enumValues: [string, ...string[]] } { return 'enumValues' in column && Array.isArray(column.enumValues) && column.enumValues.length > 0; } export const isPgEnum: (entity: any) => entity is PgEnum<[string, ...string[]]> = isWithEnum as any; type Literal = z.infer; export type Json = Literal | { [key: string]: any } | any[]; export type IsNever = [T] extends [never] ? true : false; export type IsEnumDefined = [string, ...string[]] extends TEnum ? false : undefined extends TEnum ? false : true; export type ColumnIsGeneratedAlwaysAs = TColumn extends Column ? TColumn['_']['identity'] extends 'always' ? true : TColumn['_']['generated'] extends { type: 'byDefault' } | undefined ? false : true : false; export type GetSelection | Table | View> = T extends Table ? T['_']['columns'] : T extends View ? T['_']['selectedFields'] : T; ================================================ FILE: drizzle-zod/tests/mysql.test.ts ================================================ import { type Equal, sql } from 'drizzle-orm'; import { customType, int, json, mysqlSchema, mysqlTable, mysqlView, serial, text } from 'drizzle-orm/mysql-core'; import type { TopLevelCondition } from 'json-rules-engine'; import { test } from 'vitest'; import { z } from 'zod/v4'; import { jsonSchema } from '~/column.ts'; import { CONSTANTS } from '~/constants.ts'; import { createInsertSchema, createSchemaFactory, createSelectSchema, createUpdateSchema } from '../src'; import { Expect, expectSchemaShape } from './utils.ts'; const intSchema = z.int().gte(CONSTANTS.INT32_MIN).lte(CONSTANTS.INT32_MAX); const serialNumberModeSchema = z.int().gte(0).lte(Number.MAX_SAFE_INTEGER); const textSchema = z.string().max(CONSTANTS.INT16_UNSIGNED_MAX); test('table - select', (t) => { const table = mysqlTable('test', { id: serial().primaryKey(), generated: int().generatedAlwaysAs(1).notNull(), name: text().notNull(), }); const result = createSelectSchema(table); const expected = z.object({ id: serialNumberModeSchema, generated: intSchema, name: textSchema }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('table in schema - select', (tc) => { const schema = mysqlSchema('test'); const table = schema.table('test', { id: serial().primaryKey(), name: text().notNull(), }); const result = createSelectSchema(table); const expected = z.object({ id: serialNumberModeSchema, name: textSchema }); expectSchemaShape(tc, expected).from(result); Expect>(); }); test('table - insert', (t) => { const table = mysqlTable('test', { id: serial().primaryKey(), name: text().notNull(), age: int(), }); const result = createInsertSchema(table); const expected = z.object({ id: serialNumberModeSchema.optional(), name: textSchema, age: intSchema.nullable().optional(), }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('table - update', (t) => { const table = mysqlTable('test', { id: serial().primaryKey(), name: text().notNull(), age: int(), }); const result = createUpdateSchema(table); const expected = z.object({ id: serialNumberModeSchema.optional(), name: textSchema.optional(), age: intSchema.nullable().optional(), }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('view qb - select', (t) => { const table = mysqlTable('test', { id: serial().primaryKey(), name: text().notNull(), }); const view = mysqlView('test').as((qb) => qb.select({ id: table.id, age: sql``.as('age') }).from(table)); const result = createSelectSchema(view); const expected = z.object({ id: serialNumberModeSchema, age: z.any() }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('view columns - select', (t) => { const view = mysqlView('test', { id: serial().primaryKey(), name: text().notNull(), }).as(sql``); const result = createSelectSchema(view); const expected = z.object({ id: serialNumberModeSchema, name: textSchema }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('view with nested fields - select', (t) => { const table = mysqlTable('test', { id: serial().primaryKey(), name: text().notNull(), }); const view = mysqlView('test').as((qb) => qb.select({ id: table.id, nested: { name: table.name, age: sql``.as('age'), }, table, }).from(table) ); const result = createSelectSchema(view); const expected = z.object({ id: serialNumberModeSchema, nested: z.object({ name: textSchema, age: z.any() }), table: z.object({ id: serialNumberModeSchema, name: textSchema }), }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('nullability - select', (t) => { const table = mysqlTable('test', { c1: int(), c2: int().notNull(), c3: int().default(1), c4: int().notNull().default(1), }); const result = createSelectSchema(table); const expected = z.object({ c1: intSchema.nullable(), c2: intSchema, c3: intSchema.nullable(), c4: intSchema, }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('nullability - insert', (t) => { const table = mysqlTable('test', { c1: int(), c2: int().notNull(), c3: int().default(1), c4: int().notNull().default(1), c5: int().generatedAlwaysAs(1), }); const result = createInsertSchema(table); const expected = z.object({ c1: intSchema.nullable().optional(), c2: intSchema, c3: intSchema.nullable().optional(), c4: intSchema.optional(), }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('nullability - update', (t) => { const table = mysqlTable('test', { c1: int(), c2: int().notNull(), c3: int().default(1), c4: int().notNull().default(1), c5: int().generatedAlwaysAs(1), }); const result = createUpdateSchema(table); const expected = z.object({ c1: intSchema.nullable().optional(), c2: intSchema.optional(), c3: intSchema.nullable().optional(), c4: intSchema.optional(), }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('refine table - select', (t) => { const table = mysqlTable('test', { c1: int(), c2: int().notNull(), c3: int().notNull(), }); const result = createSelectSchema(table, { c2: (schema) => schema.lte(1000), c3: z.string().transform(Number), }); const expected = z.object({ c1: intSchema.nullable(), c2: intSchema.lte(1000), c3: z.string().transform(Number), }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('refine table - select with custom data type', (t) => { const customText = customType({ dataType: () => 'text' }); const table = mysqlTable('test', { c1: int(), c2: int().notNull(), c3: int().notNull(), c4: customText(), }); const customTextSchema = z.string().min(1).max(100); const result = createSelectSchema(table, { c2: (schema) => schema.lte(1000), c3: z.string().transform(Number), c4: customTextSchema, }); const expected = z.object({ c1: intSchema.nullable(), c2: intSchema.lte(1000), c3: z.string().transform(Number), c4: customTextSchema, }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('refine table - insert', (t) => { const table = mysqlTable('test', { c1: int(), c2: int().notNull(), c3: int().notNull(), c4: int().generatedAlwaysAs(1), }); const result = createInsertSchema(table, { c2: (schema) => schema.lte(1000), c3: z.string().transform(Number), }); const expected = z.object({ c1: intSchema.nullable().optional(), c2: intSchema.lte(1000), c3: z.string().transform(Number), }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('refine table - update', (t) => { const table = mysqlTable('test', { c1: int(), c2: int().notNull(), c3: int().notNull(), c4: int().generatedAlwaysAs(1), }); const result = createUpdateSchema(table, { c2: (schema) => schema.lte(1000), c3: z.string().transform(Number), }); const expected = z.object({ c1: intSchema.nullable().optional(), c2: intSchema.lte(1000).optional(), c3: z.string().transform(Number), }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('refine view - select', (t) => { const table = mysqlTable('test', { c1: int(), c2: int(), c3: int(), c4: int(), c5: int(), c6: int(), }); const view = mysqlView('test').as((qb) => qb.select({ c1: table.c1, c2: table.c2, c3: table.c3, nested: { c4: table.c4, c5: table.c5, c6: table.c6, }, table, }).from(table) ); const result = createSelectSchema(view, { c2: (schema) => schema.lte(1000), c3: z.string().transform(Number), nested: { c5: (schema) => schema.lte(1000), c6: z.string().transform(Number), }, table: { c2: (schema) => schema.lte(1000), c3: z.string().transform(Number), }, }); const expected = z.object({ c1: intSchema.nullable(), c2: intSchema.lte(1000).nullable(), c3: z.string().transform(Number), nested: z.object({ c4: intSchema.nullable(), c5: intSchema.lte(1000).nullable(), c6: z.string().transform(Number), }), table: z.object({ c1: intSchema.nullable(), c2: intSchema.lte(1000).nullable(), c3: z.string().transform(Number), c4: intSchema.nullable(), c5: intSchema.nullable(), c6: intSchema.nullable(), }), }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('all data types', (t) => { const table = mysqlTable('test', ({ bigint, binary, boolean, char, date, datetime, decimal, double, float, int, json, mediumint, mysqlEnum, real, serial, smallint, text, time, timestamp, tinyint, varchar, varbinary, year, longtext, mediumtext, tinytext, }) => ({ bigint1: bigint({ mode: 'number' }).notNull(), bigint2: bigint({ mode: 'bigint' }).notNull(), bigint3: bigint({ unsigned: true, mode: 'number' }).notNull(), bigint4: bigint({ unsigned: true, mode: 'bigint' }).notNull(), binary: binary({ length: 10 }).notNull(), boolean: boolean().notNull(), char1: char({ length: 10 }).notNull(), char2: char({ length: 1, enum: ['a', 'b', 'c'] }).notNull(), date1: date({ mode: 'date' }).notNull(), date2: date({ mode: 'string' }).notNull(), datetime1: datetime({ mode: 'date' }).notNull(), datetime2: datetime({ mode: 'string' }).notNull(), decimal1: decimal().notNull(), decimal2: decimal({ unsigned: true }).notNull(), double1: double().notNull(), double2: double({ unsigned: true }).notNull(), float1: float().notNull(), float2: float({ unsigned: true }).notNull(), int1: int().notNull(), int2: int({ unsigned: true }).notNull(), json: json().notNull(), mediumint1: mediumint().notNull(), mediumint2: mediumint({ unsigned: true }).notNull(), enum: mysqlEnum('enum', ['a', 'b', 'c']).notNull(), real: real().notNull(), serial: serial().notNull(), smallint1: smallint().notNull(), smallint2: smallint({ unsigned: true }).notNull(), text1: text().notNull(), text2: text({ enum: ['a', 'b', 'c'] }).notNull(), time: time().notNull(), timestamp1: timestamp({ mode: 'date' }).notNull(), timestamp2: timestamp({ mode: 'string' }).notNull(), tinyint1: tinyint().notNull(), tinyint2: tinyint({ unsigned: true }).notNull(), varchar1: varchar({ length: 10 }).notNull(), varchar2: varchar({ length: 1, enum: ['a', 'b', 'c'] }).notNull(), varbinary: varbinary({ length: 10 }).notNull(), year: year().notNull(), longtext1: longtext().notNull(), longtext2: longtext({ enum: ['a', 'b', 'c'] }).notNull(), mediumtext1: mediumtext().notNull(), mediumtext2: mediumtext({ enum: ['a', 'b', 'c'] }).notNull(), tinytext1: tinytext().notNull(), tinytext2: tinytext({ enum: ['a', 'b', 'c'] }).notNull(), })); const result = createSelectSchema(table); const expected = z.object({ bigint1: z.int().gte(Number.MIN_SAFE_INTEGER).lte(Number.MAX_SAFE_INTEGER), bigint2: z.bigint().gte(CONSTANTS.INT64_MIN).lte(CONSTANTS.INT64_MAX), bigint3: z.int().gte(0).lte(Number.MAX_SAFE_INTEGER), bigint4: z.bigint().gte(0n).lte(CONSTANTS.INT64_UNSIGNED_MAX), binary: z.string(), boolean: z.boolean(), char1: z.string().length(10), char2: z.enum(['a', 'b', 'c']), date1: z.date(), date2: z.string(), datetime1: z.date(), datetime2: z.string(), decimal1: z.string(), decimal2: z.string(), double1: z.number().gte(CONSTANTS.INT48_MIN).lte(CONSTANTS.INT48_MAX), double2: z.number().gte(0).lte(CONSTANTS.INT48_UNSIGNED_MAX), float1: z.number().gte(CONSTANTS.INT24_MIN).lte(CONSTANTS.INT24_MAX), float2: z.number().gte(0).lte(CONSTANTS.INT24_UNSIGNED_MAX), int1: z.int().gte(CONSTANTS.INT32_MIN).lte(CONSTANTS.INT32_MAX), int2: z.int().gte(0).lte(CONSTANTS.INT32_UNSIGNED_MAX), json: jsonSchema, mediumint1: z.int().gte(CONSTANTS.INT24_MIN).lte(CONSTANTS.INT24_MAX), mediumint2: z.int().gte(0).lte(CONSTANTS.INT24_UNSIGNED_MAX), enum: z.enum(['a', 'b', 'c']), real: z.number().gte(CONSTANTS.INT48_MIN).lte(CONSTANTS.INT48_MAX), serial: z.int().gte(0).lte(Number.MAX_SAFE_INTEGER), smallint1: z.int().gte(CONSTANTS.INT16_MIN).lte(CONSTANTS.INT16_MAX), smallint2: z.int().gte(0).lte(CONSTANTS.INT16_UNSIGNED_MAX), text1: z.string().max(CONSTANTS.INT16_UNSIGNED_MAX), text2: z.enum(['a', 'b', 'c']), time: z.string(), timestamp1: z.date(), timestamp2: z.string(), tinyint1: z.int().gte(CONSTANTS.INT8_MIN).lte(CONSTANTS.INT8_MAX), tinyint2: z.int().gte(0).lte(CONSTANTS.INT8_UNSIGNED_MAX), varchar1: z.string().max(10), varchar2: z.enum(['a', 'b', 'c']), varbinary: z.string(), year: z.int().gte(1901).lte(2155), longtext1: z.string().max(CONSTANTS.INT32_UNSIGNED_MAX), longtext2: z.enum(['a', 'b', 'c']), mediumtext1: z.string().max(CONSTANTS.INT24_UNSIGNED_MAX), mediumtext2: z.enum(['a', 'b', 'c']), tinytext1: z.string().max(CONSTANTS.INT8_UNSIGNED_MAX), tinytext2: z.enum(['a', 'b', 'c']), }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('type coercion - all', (t) => { const table = mysqlTable('test', ({ bigint, boolean, timestamp, int, text, }) => ({ bigint: bigint({ mode: 'bigint' }).notNull(), boolean: boolean().notNull(), timestamp: timestamp().notNull(), int: int().notNull(), text: text().notNull(), })); const { createSelectSchema } = createSchemaFactory({ coerce: true, }); const result = createSelectSchema(table); const expected = z.object({ bigint: z.coerce.bigint().gte(CONSTANTS.INT64_MIN).lte(CONSTANTS.INT64_MAX), boolean: z.coerce.boolean(), timestamp: z.coerce.date(), int: z.coerce.number().gte(CONSTANTS.INT32_MIN).lte(CONSTANTS.INT32_MAX).int(), text: z.coerce.string().max(CONSTANTS.INT16_UNSIGNED_MAX), }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('type coercion - mixed', (t) => { const table = mysqlTable('test', ({ timestamp, int, }) => ({ timestamp: timestamp().notNull(), int: int().notNull(), })); const { createSelectSchema } = createSchemaFactory({ coerce: { date: true, }, }); const result = createSelectSchema(table); const expected = z.object({ timestamp: z.coerce.date(), int: z.int().gte(CONSTANTS.INT32_MIN).lte(CONSTANTS.INT32_MAX), }); expectSchemaShape(t, expected).from(result); Expect>(); }); /* Infinitely recursive type */ { const TopLevelCondition: z.ZodType = z.custom().superRefine(() => {}); const table = mysqlTable('test', { json: json().$type(), }); const result = createSelectSchema(table); const expected = z.object({ json: z.nullable(TopLevelCondition), }); Expect, z.infer>>(); } /* Disallow unknown keys in table refinement - select */ { const table = mysqlTable('test', { id: int() }); // @ts-expect-error createSelectSchema(table, { unknown: z.string() }); } /* Disallow unknown keys in table refinement - insert */ { const table = mysqlTable('test', { id: int() }); // @ts-expect-error createInsertSchema(table, { unknown: z.string() }); } /* Disallow unknown keys in table refinement - update */ { const table = mysqlTable('test', { id: int() }); // @ts-expect-error createUpdateSchema(table, { unknown: z.string() }); } /* Disallow unknown keys in view qb - select */ { const table = mysqlTable('test', { id: int() }); const view = mysqlView('test').as((qb) => qb.select().from(table)); const nestedSelect = mysqlView('test').as((qb) => qb.select({ table }).from(table)); // @ts-expect-error createSelectSchema(view, { unknown: z.string() }); // @ts-expect-error createSelectSchema(nestedSelect, { table: { unknown: z.string() } }); } /* Disallow unknown keys in view columns - select */ { const view = mysqlView('test', { id: int() }).as(sql``); // @ts-expect-error createSelectSchema(view, { unknown: z.string() }); } ================================================ FILE: drizzle-zod/tests/pg.test.ts ================================================ import { type Equal, sql } from 'drizzle-orm'; import { customType, integer, json, jsonb, pgEnum, pgMaterializedView, pgSchema, pgTable, pgView, serial, text, } from 'drizzle-orm/pg-core'; import type { TopLevelCondition } from 'json-rules-engine'; import { test } from 'vitest'; import { z } from 'zod/v4'; import { jsonSchema } from '~/column.ts'; import { CONSTANTS } from '~/constants.ts'; import { createInsertSchema, createSchemaFactory, createSelectSchema, createUpdateSchema } from '../src'; import { Expect, expectEnumValues, expectSchemaShape } from './utils.ts'; const integerSchema = z.int().gte(CONSTANTS.INT32_MIN).lte(CONSTANTS.INT32_MAX); const textSchema = z.string(); test('table - select', (t) => { const table = pgTable('test', { id: integer().primaryKey(), generated: integer().generatedAlwaysAsIdentity(), name: text().notNull(), }); const result = createSelectSchema(table); const expected = z.object({ id: integerSchema, generated: integerSchema, name: textSchema }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('table in schema - select', (tc) => { const schema = pgSchema('test'); const table = schema.table('test', { id: serial().primaryKey(), name: text().notNull(), }); const result = createSelectSchema(table); const expected = z.object({ id: integerSchema, name: textSchema }); expectSchemaShape(tc, expected).from(result); Expect>(); }); test('table - insert', (t) => { const table = pgTable('test', { id: integer().generatedAlwaysAsIdentity().primaryKey(), name: text().notNull(), age: integer(), }); const result = createInsertSchema(table); const expected = z.object({ name: textSchema, age: integerSchema.nullable().optional() }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('table - update', (t) => { const table = pgTable('test', { id: integer().generatedAlwaysAsIdentity().primaryKey(), name: text().notNull(), age: integer(), }); const result = createUpdateSchema(table); const expected = z.object({ name: textSchema.optional(), age: integerSchema.nullable().optional(), }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('view qb - select', (t) => { const table = pgTable('test', { id: serial().primaryKey(), name: text().notNull(), }); const view = pgView('test').as((qb) => qb.select({ id: table.id, age: sql``.as('age') }).from(table)); const result = createSelectSchema(view); const expected = z.object({ id: integerSchema, age: z.any() }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('view columns - select', (t) => { const view = pgView('test', { id: serial().primaryKey(), name: text().notNull(), }).as(sql``); const result = createSelectSchema(view); const expected = z.object({ id: integerSchema, name: textSchema }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('materialized view qb - select', (t) => { const table = pgTable('test', { id: serial().primaryKey(), name: text().notNull(), }); const view = pgMaterializedView('test').as((qb) => qb.select({ id: table.id, age: sql``.as('age') }).from(table)); const result = createSelectSchema(view); const expected = z.object({ id: integerSchema, age: z.any() }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('materialized view columns - select', (t) => { const view = pgView('test', { id: serial().primaryKey(), name: text().notNull(), }).as(sql``); const result = createSelectSchema(view); const expected = z.object({ id: integerSchema, name: textSchema }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('view with nested fields - select', (t) => { const table = pgTable('test', { id: serial().primaryKey(), name: text().notNull(), }); const view = pgMaterializedView('test').as((qb) => qb.select({ id: table.id, nested: { name: table.name, age: sql``.as('age'), }, table, }).from(table) ); const result = createSelectSchema(view); const expected = z.object({ id: integerSchema, nested: z.object({ name: textSchema, age: z.any() }), table: z.object({ id: integerSchema, name: textSchema }), }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('enum - select', (t) => { const enum_ = pgEnum('test', ['a', 'b', 'c']); const result = createSelectSchema(enum_); const expected = z.enum(['a', 'b', 'c']); expectEnumValues(t, expected).from(result); Expect>(); }); test('nullability - select', (t) => { const table = pgTable('test', { c1: integer(), c2: integer().notNull(), c3: integer().default(1), c4: integer().notNull().default(1), }); const result = createSelectSchema(table); const expected = z.object({ c1: integerSchema.nullable(), c2: integerSchema, c3: integerSchema.nullable(), c4: integerSchema, }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('nullability - insert', (t) => { const table = pgTable('test', { c1: integer(), c2: integer().notNull(), c3: integer().default(1), c4: integer().notNull().default(1), c5: integer().generatedAlwaysAs(1), c6: integer().generatedAlwaysAsIdentity(), c7: integer().generatedByDefaultAsIdentity(), }); const result = createInsertSchema(table); const expected = z.object({ c1: integerSchema.nullable().optional(), c2: integerSchema, c3: integerSchema.nullable().optional(), c4: integerSchema.optional(), c7: integerSchema.optional(), }); expectSchemaShape(t, expected).from(result); }); test('nullability - update', (t) => { const table = pgTable('test', { c1: integer(), c2: integer().notNull(), c3: integer().default(1), c4: integer().notNull().default(1), c5: integer().generatedAlwaysAs(1), c6: integer().generatedAlwaysAsIdentity(), c7: integer().generatedByDefaultAsIdentity(), }); const result = createUpdateSchema(table); const expected = z.object({ c1: integerSchema.nullable().optional(), c2: integerSchema.optional(), c3: integerSchema.nullable().optional(), c4: integerSchema.optional(), c7: integerSchema.optional(), }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('refine table - select', (t) => { const table = pgTable('test', { c1: integer(), c2: integer().notNull(), c3: integer().notNull(), }); const result = createSelectSchema(table, { c2: (schema) => schema.lte(1000), c3: z.string().transform(Number), }); const expected = z.object({ c1: integerSchema.nullable(), c2: integerSchema.lte(1000), c3: z.string().transform(Number), }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('refine table - select with custom data type', (t) => { const customText = customType({ dataType: () => 'text' }); const table = pgTable('test', { c1: integer(), c2: integer().notNull(), c3: integer().notNull(), c4: customText(), }); const customTextSchema = z.string().min(1).max(100); const result = createSelectSchema(table, { c2: (schema) => schema.lte(1000), c3: z.string().transform(Number), c4: customTextSchema, }); const expected = z.object({ c1: integerSchema.nullable(), c2: integerSchema.lte(1000), c3: z.string().transform(Number), c4: customTextSchema, }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('refine table - insert', (t) => { const table = pgTable('test', { c1: integer(), c2: integer().notNull(), c3: integer().notNull(), c4: integer().generatedAlwaysAs(1), }); const result = createInsertSchema(table, { c2: (schema) => schema.lte(1000), c3: z.string().transform(Number), }); const expected = z.object({ c1: integerSchema.nullable().optional(), c2: integerSchema.lte(1000), c3: z.string().transform(Number), }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('refine table - update', (t) => { const table = pgTable('test', { c1: integer(), c2: integer().notNull(), c3: integer().notNull(), c4: integer().generatedAlwaysAs(1), }); const result = createUpdateSchema(table, { c2: (schema) => schema.lte(1000), c3: z.string().transform(Number), }); const expected = z.object({ c1: integerSchema.nullable().optional(), c2: integerSchema.lte(1000).optional(), c3: z.string().transform(Number), }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('refine view - select', (t) => { const table = pgTable('test', { c1: integer(), c2: integer(), c3: integer(), c4: integer(), c5: integer(), c6: integer(), }); const view = pgView('test').as((qb) => qb.select({ c1: table.c1, c2: table.c2, c3: table.c3, nested: { c4: table.c4, c5: table.c5, c6: table.c6, }, table, }).from(table) ); const result = createSelectSchema(view, { c2: (schema) => schema.lte(1000), c3: z.string().transform(Number), nested: { c5: (schema) => schema.lte(1000), c6: z.string().transform(Number), }, table: { c2: (schema) => schema.lte(1000), c3: z.string().transform(Number), }, }); const expected = z.object({ c1: integerSchema.nullable(), c2: integerSchema.lte(1000).nullable(), c3: z.string().transform(Number), nested: z.object({ c4: integerSchema.nullable(), c5: integerSchema.lte(1000).nullable(), c6: z.string().transform(Number), }), table: z.object({ c1: integerSchema.nullable(), c2: integerSchema.lte(1000).nullable(), c3: z.string().transform(Number), c4: integerSchema.nullable(), c5: integerSchema.nullable(), c6: integerSchema.nullable(), }), }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('all data types', (t) => { const table = pgTable('test', ({ bigint, bigserial, bit, boolean, date, char, cidr, doublePrecision, geometry, halfvec, inet, integer, interval, json, jsonb, line, macaddr, macaddr8, numeric, point, real, serial, smallint, smallserial, text, sparsevec, time, timestamp, uuid, varchar, vector, }) => ({ bigint1: bigint({ mode: 'number' }).notNull(), bigint2: bigint({ mode: 'bigint' }).notNull(), bigserial1: bigserial({ mode: 'number' }).notNull(), bigserial2: bigserial({ mode: 'bigint' }).notNull(), bit: bit({ dimensions: 5 }).notNull(), boolean: boolean().notNull(), date1: date({ mode: 'date' }).notNull(), date2: date({ mode: 'string' }).notNull(), char1: char({ length: 10 }).notNull(), char2: char({ length: 1, enum: ['a', 'b', 'c'] }).notNull(), cidr: cidr().notNull(), doublePrecision: doublePrecision().notNull(), geometry1: geometry({ type: 'point', mode: 'tuple' }).notNull(), geometry2: geometry({ type: 'point', mode: 'xy' }).notNull(), halfvec: halfvec({ dimensions: 3 }).notNull(), inet: inet().notNull(), integer: integer().notNull(), interval: interval().notNull(), json: json().notNull(), jsonb: jsonb().notNull(), line1: line({ mode: 'abc' }).notNull(), line2: line({ mode: 'tuple' }).notNull(), macaddr: macaddr().notNull(), macaddr8: macaddr8().notNull(), numeric: numeric().notNull(), point1: point({ mode: 'xy' }).notNull(), point2: point({ mode: 'tuple' }).notNull(), real: real().notNull(), serial: serial().notNull(), smallint: smallint().notNull(), smallserial: smallserial().notNull(), text1: text().notNull(), text2: text({ enum: ['a', 'b', 'c'] }).notNull(), sparsevec: sparsevec({ dimensions: 3 }).notNull(), time: time().notNull(), timestamp1: timestamp({ mode: 'date' }).notNull(), timestamp2: timestamp({ mode: 'string' }).notNull(), uuid: uuid().notNull(), varchar1: varchar({ length: 10 }).notNull(), varchar2: varchar({ length: 1, enum: ['a', 'b', 'c'] }).notNull(), vector: vector({ dimensions: 3 }).notNull(), array1: integer().array().notNull(), array2: integer().array().array(2).notNull(), array3: varchar({ length: 10 }).array().array(2).notNull(), })); const result = createSelectSchema(table); const expected = z.object({ bigint1: z.int().gte(Number.MIN_SAFE_INTEGER).lte(Number.MAX_SAFE_INTEGER), bigint2: z.bigint().gte(CONSTANTS.INT64_MIN).lte(CONSTANTS.INT64_MAX), bigserial1: z.int().gte(Number.MIN_SAFE_INTEGER).lte(Number.MAX_SAFE_INTEGER), bigserial2: z.bigint().gte(CONSTANTS.INT64_MIN).lte(CONSTANTS.INT64_MAX), bit: z.string().regex(/^[01]+$/).max(5), boolean: z.boolean(), date1: z.date(), date2: z.string(), char1: z.string().length(10), char2: z.enum(['a', 'b', 'c']), cidr: z.string(), doublePrecision: z.number().gte(CONSTANTS.INT48_MIN).lte(CONSTANTS.INT48_MAX), geometry1: z.tuple([z.number(), z.number()]), geometry2: z.object({ x: z.number(), y: z.number() }), halfvec: z.array(z.number()).length(3), inet: z.string(), integer: z.int().gte(CONSTANTS.INT32_MIN).lte(CONSTANTS.INT32_MAX), interval: z.string(), json: jsonSchema, jsonb: jsonSchema, line1: z.object({ a: z.number(), b: z.number(), c: z.number() }), line2: z.tuple([z.number(), z.number(), z.number()]), macaddr: z.string(), macaddr8: z.string(), numeric: z.string(), point1: z.object({ x: z.number(), y: z.number() }), point2: z.tuple([z.number(), z.number()]), real: z.number().gte(CONSTANTS.INT24_MIN).lte(CONSTANTS.INT24_MAX), serial: z.int().gte(CONSTANTS.INT32_MIN).lte(CONSTANTS.INT32_MAX), smallint: z.int().gte(CONSTANTS.INT16_MIN).lte(CONSTANTS.INT16_MAX), smallserial: z.int().gte(CONSTANTS.INT16_MIN).lte(CONSTANTS.INT16_MAX), text1: z.string(), text2: z.enum(['a', 'b', 'c']), sparsevec: z.string(), time: z.string(), timestamp1: z.date(), timestamp2: z.string(), uuid: z.uuid(), varchar1: z.string().max(10), varchar2: z.enum(['a', 'b', 'c']), vector: z.array(z.number()).length(3), array1: z.array(integerSchema), array2: z.array(z.array(integerSchema).length(2)), array3: z.array(z.array(z.string().max(10)).length(2)), }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('type coercion - all', (t) => { const table = pgTable('test', ({ bigint, boolean, timestamp, integer, text, }) => ({ bigint: bigint({ mode: 'bigint' }).notNull(), boolean: boolean().notNull(), timestamp: timestamp().notNull(), integer: integer().notNull(), text: text().notNull(), })); const { createSelectSchema } = createSchemaFactory({ coerce: true, }); const result = createSelectSchema(table); const expected = z.object({ bigint: z.coerce.bigint().gte(CONSTANTS.INT64_MIN).lte(CONSTANTS.INT64_MAX), boolean: z.coerce.boolean(), timestamp: z.coerce.date(), integer: z.coerce.number().gte(CONSTANTS.INT32_MIN).lte(CONSTANTS.INT32_MAX).int(), text: z.coerce.string(), }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('type coercion - mixed', (t) => { const table = pgTable('test', ({ timestamp, integer, }) => ({ timestamp: timestamp().notNull(), integer: integer().notNull(), })); const { createSelectSchema } = createSchemaFactory({ coerce: { date: true, }, }); const result = createSelectSchema(table); const expected = z.object({ timestamp: z.coerce.date(), integer: z.int().gte(CONSTANTS.INT32_MIN).lte(CONSTANTS.INT32_MAX), }); expectSchemaShape(t, expected).from(result); Expect>(); }); /* Infinitely recursive type */ { const TopLevelCondition: z.ZodType = z.custom().superRefine(() => {}); const table = pgTable('test', { json: json().$type().notNull(), jsonb: jsonb().$type(), }); const result = createSelectSchema(table); const expected = z.object({ json: TopLevelCondition, jsonb: z.nullable(TopLevelCondition), }); Expect, z.infer>>(); } /* Disallow unknown keys in table refinement - select */ { const table = pgTable('test', { id: integer() }); // @ts-expect-error createSelectSchema(table, { unknown: z.string() }); } /* Disallow unknown keys in table refinement - insert */ { const table = pgTable('test', { id: integer() }); // @ts-expect-error createInsertSchema(table, { unknown: z.string() }); } /* Disallow unknown keys in table refinement - update */ { const table = pgTable('test', { id: integer() }); // @ts-expect-error createUpdateSchema(table, { unknown: z.string() }); } /* Disallow unknown keys in view qb - select */ { const table = pgTable('test', { id: integer() }); const view = pgView('test').as((qb) => qb.select().from(table)); const mView = pgMaterializedView('test').as((qb) => qb.select().from(table)); const nestedSelect = pgView('test').as((qb) => qb.select({ table }).from(table)); // @ts-expect-error createSelectSchema(view, { unknown: z.string() }); // @ts-expect-error createSelectSchema(mView, { unknown: z.string() }); // @ts-expect-error createSelectSchema(nestedSelect, { table: { unknown: z.string() } }); } /* Disallow unknown keys in view columns - select */ { const view = pgView('test', { id: integer() }).as(sql``); const mView = pgView('test', { id: integer() }).as(sql``); // @ts-expect-error createSelectSchema(view, { unknown: z.string() }); // @ts-expect-error createSelectSchema(mView, { unknown: z.string() }); } ================================================ FILE: drizzle-zod/tests/singlestore.test.ts ================================================ import type { Equal } from 'drizzle-orm'; import { customType, int, json, serial, singlestoreSchema, singlestoreTable, text } from 'drizzle-orm/singlestore-core'; import type { TopLevelCondition } from 'json-rules-engine'; import { test } from 'vitest'; import { z } from 'zod/v4'; import { jsonSchema } from '~/column.ts'; import { CONSTANTS } from '~/constants.ts'; import { createInsertSchema, createSchemaFactory, createSelectSchema, createUpdateSchema } from '../src'; import { Expect, expectSchemaShape } from './utils.ts'; const intSchema = z.int().gte(CONSTANTS.INT32_MIN).lte(CONSTANTS.INT32_MAX); const serialNumberModeSchema = z.int().gte(0).lte(Number.MAX_SAFE_INTEGER); const textSchema = z.string().max(CONSTANTS.INT16_UNSIGNED_MAX); test('table - select', (t) => { const table = singlestoreTable('test', { id: serial().primaryKey(), generated: int().generatedAlwaysAs(1).notNull(), name: text().notNull(), }); const result = createSelectSchema(table); const expected = z.object({ id: serialNumberModeSchema, generated: intSchema, name: textSchema }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('table in schema - select', (tc) => { const schema = singlestoreSchema('test'); const table = schema.table('test', { id: serial().primaryKey(), name: text().notNull(), }); const result = createSelectSchema(table); const expected = z.object({ id: serialNumberModeSchema, name: textSchema }); expectSchemaShape(tc, expected).from(result); Expect>(); }); test('table - insert', (t) => { const table = singlestoreTable('test', { id: serial().primaryKey(), name: text().notNull(), age: int(), }); const result = createInsertSchema(table); const expected = z.object({ id: serialNumberModeSchema.optional(), name: textSchema, age: intSchema.nullable().optional(), }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('table - update', (t) => { const table = singlestoreTable('test', { id: serial().primaryKey(), name: text().notNull(), age: int(), }); const result = createUpdateSchema(table); const expected = z.object({ id: serialNumberModeSchema.optional(), name: textSchema.optional(), age: intSchema.nullable().optional(), }); expectSchemaShape(t, expected).from(result); Expect>(); }); // TODO: SingleStore doesn't support views yet. Add these tests when they're added // test('view qb - select', (t) => { // const table = singlestoreTable('test', { // id: serial().primaryKey(), // name: text().notNull(), // }); // const view = mysqlView('test').as((qb) => qb.select({ id: table.id, age: sql``.as('age') }).from(table)); // const result = createSelectSchema(view); // const expected = z.object({ id: serialNumberModeSchema, age: z.any() }); // expectSchemaShape(t, expected).from(result); // Expect>(); // }); // test('view columns - select', (t) => { // const view = mysqlView('test', { // id: serial().primaryKey(), // name: text().notNull(), // }).as(sql``); // const result = createSelectSchema(view); // const expected = z.object({ id: serialNumberModeSchema, name: textSchema }); // expectSchemaShape(t, expected).from(result); // Expect>(); // }); // test('view with nested fields - select', (t) => { // const table = singlestoreTable('test', { // id: serial().primaryKey(), // name: text().notNull(), // }); // const view = mysqlView('test').as((qb) => // qb.select({ // id: table.id, // nested: { // name: table.name, // age: sql``.as('age'), // }, // table, // }).from(table) // ); // const result = createSelectSchema(view); // const expected = z.object({ // id: serialNumberModeSchema, // nested: z.object({ name: textSchema, age: z.any() }), // table: z.object({ id: serialNumberModeSchema, name: textSchema }), // }); // expectSchemaShape(t, expected).from(result); // Expect>(); // }); test('nullability - select', (t) => { const table = singlestoreTable('test', { c1: int(), c2: int().notNull(), c3: int().default(1), c4: int().notNull().default(1), }); const result = createSelectSchema(table); const expected = z.object({ c1: intSchema.nullable(), c2: intSchema, c3: intSchema.nullable(), c4: intSchema, }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('nullability - insert', (t) => { const table = singlestoreTable('test', { c1: int(), c2: int().notNull(), c3: int().default(1), c4: int().notNull().default(1), c5: int().generatedAlwaysAs(1), }); const result = createInsertSchema(table); const expected = z.object({ c1: intSchema.nullable().optional(), c2: intSchema, c3: intSchema.nullable().optional(), c4: intSchema.optional(), }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('nullability - update', (t) => { const table = singlestoreTable('test', { c1: int(), c2: int().notNull(), c3: int().default(1), c4: int().notNull().default(1), c5: int().generatedAlwaysAs(1), }); const result = createUpdateSchema(table); const expected = z.object({ c1: intSchema.nullable().optional(), c2: intSchema.optional(), c3: intSchema.nullable().optional(), c4: intSchema.optional(), }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('refine table - select', (t) => { const table = singlestoreTable('test', { c1: int(), c2: int().notNull(), c3: int().notNull(), }); const result = createSelectSchema(table, { c2: (schema) => schema.lte(1000), c3: z.string().transform(Number), }); const expected = z.object({ c1: intSchema.nullable(), c2: intSchema.lte(1000), c3: z.string().transform(Number), }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('refine table - select with custom data type', (t) => { const customText = customType({ dataType: () => 'text' }); const table = singlestoreTable('test', { c1: int(), c2: int().notNull(), c3: int().notNull(), c4: customText(), }); const customTextSchema = z.string().min(1).max(100); const result = createSelectSchema(table, { c2: (schema) => schema.lte(1000), c3: z.string().transform(Number), c4: customTextSchema, }); const expected = z.object({ c1: intSchema.nullable(), c2: intSchema.lte(1000), c3: z.string().transform(Number), c4: customTextSchema, }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('refine table - insert', (t) => { const table = singlestoreTable('test', { c1: int(), c2: int().notNull(), c3: int().notNull(), c4: int().generatedAlwaysAs(1), }); const result = createInsertSchema(table, { c2: (schema) => schema.lte(1000), c3: z.string().transform(Number), }); const expected = z.object({ c1: intSchema.nullable().optional(), c2: intSchema.lte(1000), c3: z.string().transform(Number), }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('refine table - update', (t) => { const table = singlestoreTable('test', { c1: int(), c2: int().notNull(), c3: int().notNull(), c4: int().generatedAlwaysAs(1), }); const result = createUpdateSchema(table, { c2: (schema) => schema.lte(1000), c3: z.string().transform(Number), }); const expected = z.object({ c1: intSchema.nullable().optional(), c2: intSchema.lte(1000).optional(), c3: z.string().transform(Number), }); expectSchemaShape(t, expected).from(result); Expect>(); }); // test('refine view - select', (t) => { // const table = singlestoreTable('test', { // c1: int(), // c2: int(), // c3: int(), // c4: int(), // c5: int(), // c6: int(), // }); // const view = mysqlView('test').as((qb) => // qb.select({ // c1: table.c1, // c2: table.c2, // c3: table.c3, // nested: { // c4: table.c4, // c5: table.c5, // c6: table.c6, // }, // table, // }).from(table) // ); // const result = createSelectSchema(view, { // c2: (schema) => schema.lte(1000), // c3: z.string().transform(Number), // nested: { // c5: (schema) => schema.lte(1000), // c6: z.string().transform(Number), // }, // table: { // c2: (schema) => schema.lte(1000), // c3: z.string().transform(Number), // }, // }); // const expected = z.object({ // c1: intSchema.nullable(), // c2: intSchema.lte(1000).nullable(), // c3: z.string().transform(Number), // nested: z.object({ // c4: intSchema.nullable(), // c5: intSchema.lte(1000).nullable(), // c6: z.string().transform(Number), // }), // table: z.object({ // c1: intSchema.nullable(), // c2: intSchema.lte(1000).nullable(), // c3: z.string().transform(Number), // c4: intSchema.nullable(), // c5: intSchema.nullable(), // c6: intSchema.nullable(), // }), // }); // expectSchemaShape(t, expected).from(result); // Expect>(); // }); test('all data types', (t) => { const table = singlestoreTable('test', ({ bigint, binary, boolean, char, date, datetime, decimal, double, float, int, json, mediumint, singlestoreEnum, real, serial, smallint, text, time, timestamp, tinyint, varchar, varbinary, year, longtext, mediumtext, tinytext, }) => ({ bigint1: bigint({ mode: 'number' }).notNull(), bigint2: bigint({ mode: 'bigint' }).notNull(), bigint3: bigint({ unsigned: true, mode: 'number' }).notNull(), bigint4: bigint({ unsigned: true, mode: 'bigint' }).notNull(), binary: binary({ length: 10 }).notNull(), boolean: boolean().notNull(), char1: char({ length: 10 }).notNull(), char2: char({ length: 1, enum: ['a', 'b', 'c'] }).notNull(), date1: date({ mode: 'date' }).notNull(), date2: date({ mode: 'string' }).notNull(), datetime1: datetime({ mode: 'date' }).notNull(), datetime2: datetime({ mode: 'string' }).notNull(), decimal1: decimal().notNull(), decimal2: decimal({ unsigned: true }).notNull(), double1: double().notNull(), double2: double({ unsigned: true }).notNull(), float1: float().notNull(), float2: float({ unsigned: true }).notNull(), int1: int().notNull(), int2: int({ unsigned: true }).notNull(), json: json().notNull(), mediumint1: mediumint().notNull(), mediumint2: mediumint({ unsigned: true }).notNull(), enum: singlestoreEnum('enum', ['a', 'b', 'c']).notNull(), real: real().notNull(), serial: serial().notNull(), smallint1: smallint().notNull(), smallint2: smallint({ unsigned: true }).notNull(), text1: text().notNull(), text2: text({ enum: ['a', 'b', 'c'] }).notNull(), time: time().notNull(), timestamp1: timestamp({ mode: 'date' }).notNull(), timestamp2: timestamp({ mode: 'string' }).notNull(), tinyint1: tinyint().notNull(), tinyint2: tinyint({ unsigned: true }).notNull(), varchar1: varchar({ length: 10 }).notNull(), varchar2: varchar({ length: 1, enum: ['a', 'b', 'c'] }).notNull(), varbinary: varbinary({ length: 10 }).notNull(), year: year().notNull(), longtext1: longtext().notNull(), longtext2: longtext({ enum: ['a', 'b', 'c'] }).notNull(), mediumtext1: mediumtext().notNull(), mediumtext2: mediumtext({ enum: ['a', 'b', 'c'] }).notNull(), tinytext1: tinytext().notNull(), tinytext2: tinytext({ enum: ['a', 'b', 'c'] }).notNull(), })); const result = createSelectSchema(table); const expected = z.object({ bigint1: z.int().gte(Number.MIN_SAFE_INTEGER).lte(Number.MAX_SAFE_INTEGER), bigint2: z.bigint().gte(CONSTANTS.INT64_MIN).lte(CONSTANTS.INT64_MAX), bigint3: z.int().gte(0).lte(Number.MAX_SAFE_INTEGER), bigint4: z.bigint().gte(0n).lte(CONSTANTS.INT64_UNSIGNED_MAX), binary: z.string(), boolean: z.boolean(), char1: z.string().length(10), char2: z.enum(['a', 'b', 'c']), date1: z.date(), date2: z.string(), datetime1: z.date(), datetime2: z.string(), decimal1: z.string(), decimal2: z.string(), double1: z.number().gte(CONSTANTS.INT48_MIN).lte(CONSTANTS.INT48_MAX), double2: z.number().gte(0).lte(CONSTANTS.INT48_UNSIGNED_MAX), float1: z.number().gte(CONSTANTS.INT24_MIN).lte(CONSTANTS.INT24_MAX), float2: z.number().gte(0).lte(CONSTANTS.INT24_UNSIGNED_MAX), int1: z.int().gte(CONSTANTS.INT32_MIN).lte(CONSTANTS.INT32_MAX), int2: z.int().gte(0).lte(CONSTANTS.INT32_UNSIGNED_MAX), json: jsonSchema, mediumint1: z.int().gte(CONSTANTS.INT24_MIN).lte(CONSTANTS.INT24_MAX), mediumint2: z.int().gte(0).lte(CONSTANTS.INT24_UNSIGNED_MAX), enum: z.enum(['a', 'b', 'c']), real: z.number().gte(CONSTANTS.INT48_MIN).lte(CONSTANTS.INT48_MAX), serial: z.int().gte(0).lte(Number.MAX_SAFE_INTEGER), smallint1: z.int().gte(CONSTANTS.INT16_MIN).lte(CONSTANTS.INT16_MAX), smallint2: z.int().gte(0).lte(CONSTANTS.INT16_UNSIGNED_MAX), text1: z.string().max(CONSTANTS.INT16_UNSIGNED_MAX), text2: z.enum(['a', 'b', 'c']), time: z.string(), timestamp1: z.date(), timestamp2: z.string(), tinyint1: z.int().gte(CONSTANTS.INT8_MIN).lte(CONSTANTS.INT8_MAX), tinyint2: z.int().gte(0).lte(CONSTANTS.INT8_UNSIGNED_MAX), varchar1: z.string().max(10), varchar2: z.enum(['a', 'b', 'c']), varbinary: z.string(), year: z.int().gte(1901).lte(2155), longtext1: z.string().max(CONSTANTS.INT32_UNSIGNED_MAX), longtext2: z.enum(['a', 'b', 'c']), mediumtext1: z.string().max(CONSTANTS.INT24_UNSIGNED_MAX), mediumtext2: z.enum(['a', 'b', 'c']), tinytext1: z.string().max(CONSTANTS.INT8_UNSIGNED_MAX), tinytext2: z.enum(['a', 'b', 'c']), }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('type coercion - all', (t) => { const table = singlestoreTable('test', ({ bigint, boolean, timestamp, int, text, }) => ({ bigint: bigint({ mode: 'bigint' }).notNull(), boolean: boolean().notNull(), timestamp: timestamp().notNull(), int: int().notNull(), text: text().notNull(), })); const { createSelectSchema } = createSchemaFactory({ coerce: true, }); const result = createSelectSchema(table); const expected = z.object({ bigint: z.coerce.bigint().gte(CONSTANTS.INT64_MIN).lte(CONSTANTS.INT64_MAX), boolean: z.coerce.boolean(), timestamp: z.coerce.date(), int: z.coerce.number().gte(CONSTANTS.INT32_MIN).lte(CONSTANTS.INT32_MAX).int(), text: z.coerce.string().max(CONSTANTS.INT16_UNSIGNED_MAX), }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('type coercion - mixed', (t) => { const table = singlestoreTable('test', ({ timestamp, int, }) => ({ timestamp: timestamp().notNull(), int: int().notNull(), })); const { createSelectSchema } = createSchemaFactory({ coerce: { date: true, }, }); const result = createSelectSchema(table); const expected = z.object({ timestamp: z.coerce.date(), int: z.int().gte(CONSTANTS.INT32_MIN).lte(CONSTANTS.INT32_MAX), }); expectSchemaShape(t, expected).from(result); Expect>(); }); /* Infinitely recursive type */ { const TopLevelCondition: z.ZodType = z.custom().superRefine(() => {}); const table = singlestoreTable('test', { json: json().$type(), }); const result = createSelectSchema(table); const expected = z.object({ json: z.nullable(TopLevelCondition), }); Expect, z.infer>>(); } /* Disallow unknown keys in table refinement - select */ { const table = singlestoreTable('test', { id: int() }); // @ts-expect-error createSelectSchema(table, { unknown: z.string() }); } /* Disallow unknown keys in table refinement - insert */ { const table = singlestoreTable('test', { id: int() }); // @ts-expect-error createInsertSchema(table, { unknown: z.string() }); } /* Disallow unknown keys in table refinement - update */ { const table = singlestoreTable('test', { id: int() }); // @ts-expect-error createUpdateSchema(table, { unknown: z.string() }); } // /* Disallow unknown keys in view qb - select */ { // const table = singlestoreTable('test', { id: int() }); // const view = mysqlView('test').as((qb) => qb.select().from(table)); // const nestedSelect = mysqlView('test').as((qb) => qb.select({ table }).from(table)); // // @ts-expect-error // createSelectSchema(view, { unknown: z.string() }); // // @ts-expect-error // createSelectSchema(nestedSelect, { table: { unknown: z.string() } }); // } // /* Disallow unknown keys in view columns - select */ { // const view = mysqlView('test', { id: int() }).as(sql``); // // @ts-expect-error // createSelectSchema(view, { unknown: z.string() }); // } ================================================ FILE: drizzle-zod/tests/sqlite.test.ts ================================================ import { type Equal, sql } from 'drizzle-orm'; import { blob, customType, int, sqliteTable, sqliteView, text } from 'drizzle-orm/sqlite-core'; import type { TopLevelCondition } from 'json-rules-engine'; import { test } from 'vitest'; import { z } from 'zod/v4'; import { bufferSchema, jsonSchema } from '~/column.ts'; import { CONSTANTS } from '~/constants.ts'; import { createInsertSchema, createSchemaFactory, createSelectSchema, createUpdateSchema } from '../src'; import { Expect, expectSchemaShape } from './utils.ts'; const intSchema = z.int().gte(Number.MIN_SAFE_INTEGER).lte(Number.MAX_SAFE_INTEGER); const textSchema = z.string(); test('table - select', (t) => { const table = sqliteTable('test', { id: int().primaryKey({ autoIncrement: true }), generated: int().generatedAlwaysAs(1).notNull(), name: text().notNull(), }); const result = createSelectSchema(table); const expected = z.object({ id: intSchema, generated: intSchema, name: textSchema }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('table - insert', (t) => { const table = sqliteTable('test', { id: int().primaryKey({ autoIncrement: true }), name: text().notNull(), age: int(), }); const result = createInsertSchema(table); const expected = z.object({ id: intSchema.optional(), name: textSchema, age: intSchema.nullable().optional() }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('table - update', (t) => { const table = sqliteTable('test', { id: int().primaryKey({ autoIncrement: true }), name: text().notNull(), age: int(), }); const result = createUpdateSchema(table); const expected = z.object({ id: intSchema.optional(), name: textSchema.optional(), age: intSchema.nullable().optional(), }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('view qb - select', (t) => { const table = sqliteTable('test', { id: int().primaryKey({ autoIncrement: true }), name: text().notNull(), }); const view = sqliteView('test').as((qb) => qb.select({ id: table.id, age: sql``.as('age') }).from(table)); const result = createSelectSchema(view); const expected = z.object({ id: intSchema, age: z.any() }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('view columns - select', (t) => { const view = sqliteView('test', { id: int().primaryKey({ autoIncrement: true }), name: text().notNull(), }).as(sql``); const result = createSelectSchema(view); const expected = z.object({ id: intSchema, name: textSchema }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('view with nested fields - select', (t) => { const table = sqliteTable('test', { id: int().primaryKey({ autoIncrement: true }), name: text().notNull(), }); const view = sqliteView('test').as((qb) => qb.select({ id: table.id, nested: { name: table.name, age: sql``.as('age'), }, table, }).from(table) ); const result = createSelectSchema(view); const expected = z.object({ id: intSchema, nested: z.object({ name: textSchema, age: z.any() }), table: z.object({ id: intSchema, name: textSchema }), }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('nullability - select', (t) => { const table = sqliteTable('test', { c1: int(), c2: int().notNull(), c3: int().default(1), c4: int().notNull().default(1), }); const result = createSelectSchema(table); const expected = z.object({ c1: intSchema.nullable(), c2: intSchema, c3: intSchema.nullable(), c4: intSchema, }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('nullability - insert', (t) => { const table = sqliteTable('test', { c1: int(), c2: int().notNull(), c3: int().default(1), c4: int().notNull().default(1), c5: int().generatedAlwaysAs(1), }); const result = createInsertSchema(table); const expected = z.object({ c1: intSchema.nullable().optional(), c2: intSchema, c3: intSchema.nullable().optional(), c4: intSchema.optional(), }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('nullability - update', (t) => { const table = sqliteTable('test', { c1: int(), c2: int().notNull(), c3: int().default(1), c4: int().notNull().default(1), c5: int().generatedAlwaysAs(1), }); const result = createUpdateSchema(table); const expected = z.object({ c1: intSchema.nullable().optional(), c2: intSchema.optional(), c3: intSchema.nullable().optional(), c4: intSchema.optional(), }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('refine table - select', (t) => { const table = sqliteTable('test', { c1: int(), c2: int().notNull(), c3: int().notNull(), }); const result = createSelectSchema(table, { c2: (schema) => schema.lte(1000), c3: z.string().transform(Number), }); const expected = z.object({ c1: intSchema.nullable(), c2: intSchema.lte(1000), c3: z.string().transform(Number), }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('refine table - select with custom data type', (t) => { const customText = customType({ dataType: () => 'text' }); const table = sqliteTable('test', { c1: int(), c2: int().notNull(), c3: int().notNull(), c4: customText(), }); const customTextSchema = z.string().min(1).max(100); const result = createSelectSchema(table, { c2: (schema) => schema.lte(1000), c3: z.string().transform(Number), c4: customTextSchema, }); const expected = z.object({ c1: intSchema.nullable(), c2: intSchema.lte(1000), c3: z.string().transform(Number), c4: customTextSchema, }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('refine table - insert', (t) => { const table = sqliteTable('test', { c1: int(), c2: int().notNull(), c3: int().notNull(), c4: int().generatedAlwaysAs(1), }); const result = createInsertSchema(table, { c2: (schema) => schema.lte(1000), c3: z.string().transform(Number), }); const expected = z.object({ c1: intSchema.nullable().optional(), c2: intSchema.lte(1000), c3: z.string().transform(Number), }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('refine table - update', (t) => { const table = sqliteTable('test', { c1: int(), c2: int().notNull(), c3: int().notNull(), c4: int().generatedAlwaysAs(1), }); const result = createUpdateSchema(table, { c2: (schema) => schema.lte(1000), c3: z.string().transform(Number), }); const expected = z.object({ c1: intSchema.nullable().optional(), c2: intSchema.lte(1000).optional(), c3: z.string().transform(Number), }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('refine view - select', (t) => { const table = sqliteTable('test', { c1: int(), c2: int(), c3: int(), c4: int(), c5: int(), c6: int(), }); const view = sqliteView('test').as((qb) => qb.select({ c1: table.c1, c2: table.c2, c3: table.c3, nested: { c4: table.c4, c5: table.c5, c6: table.c6, }, table, }).from(table) ); const result = createSelectSchema(view, { c2: (schema) => schema.lte(1000), c3: z.string().transform(Number), nested: { c5: (schema) => schema.lte(1000), c6: z.string().transform(Number), }, table: { c2: (schema) => schema.lte(1000), c3: z.string().transform(Number), }, }); const expected = z.object({ c1: intSchema.nullable(), c2: intSchema.lte(1000).nullable(), c3: z.string().transform(Number), nested: z.object({ c4: intSchema.nullable(), c5: intSchema.lte(1000).nullable(), c6: z.string().transform(Number), }), table: z.object({ c1: intSchema.nullable(), c2: intSchema.lte(1000).nullable(), c3: z.string().transform(Number), c4: intSchema.nullable(), c5: intSchema.nullable(), c6: intSchema.nullable(), }), }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('all data types', (t) => { const table = sqliteTable('test', ({ blob, integer, numeric, real, text, }) => ({ blob1: blob({ mode: 'buffer' }).notNull(), blob2: blob({ mode: 'bigint' }).notNull(), blob3: blob({ mode: 'json' }).notNull(), integer1: integer({ mode: 'number' }).notNull(), integer2: integer({ mode: 'boolean' }).notNull(), integer3: integer({ mode: 'timestamp' }).notNull(), integer4: integer({ mode: 'timestamp_ms' }).notNull(), numeric: numeric().notNull(), real: real().notNull(), text1: text({ mode: 'text' }).notNull(), text2: text({ mode: 'text', length: 10 }).notNull(), text3: text({ mode: 'text', enum: ['a', 'b', 'c'] }).notNull(), text4: text({ mode: 'json' }).notNull(), })); const result = createSelectSchema(table); const expected = z.object({ blob1: bufferSchema, blob2: z.bigint().gte(CONSTANTS.INT64_MIN).lte(CONSTANTS.INT64_MAX), blob3: jsonSchema, integer1: z.int().gte(Number.MIN_SAFE_INTEGER).lte(Number.MAX_SAFE_INTEGER), integer2: z.boolean(), integer3: z.date(), integer4: z.date(), numeric: z.string(), real: z.number().gte(CONSTANTS.INT48_MIN).lte(CONSTANTS.INT48_MAX), text1: z.string(), text2: z.string().max(10), text3: z.enum(['a', 'b', 'c']), text4: jsonSchema, }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('type coercion - all', (t) => { const table = sqliteTable('test', ({ blob, integer, text, }) => ({ blob: blob({ mode: 'bigint' }).notNull(), integer1: integer({ mode: 'boolean' }).notNull(), integer2: integer({ mode: 'timestamp' }).notNull(), integer3: integer().notNull(), text: text().notNull(), })); const { createSelectSchema } = createSchemaFactory({ coerce: true, }); const result = createSelectSchema(table); const expected = z.object({ blob: z.coerce.bigint().gte(CONSTANTS.INT64_MIN).lte(CONSTANTS.INT64_MAX), integer1: z.coerce.boolean(), integer2: z.coerce.date(), integer3: z.coerce.number().gte(Number.MIN_SAFE_INTEGER).lte(Number.MAX_SAFE_INTEGER).int(), text: z.coerce.string(), }); expectSchemaShape(t, expected).from(result); Expect>(); }); test('type coercion - mixed', (t) => { const table = sqliteTable('test', ({ integer, }) => ({ integer1: integer({ mode: 'timestamp' }).notNull(), integer2: integer().notNull(), })); const { createSelectSchema } = createSchemaFactory({ coerce: { date: true, }, }); const result = createSelectSchema(table); const expected = z.object({ integer1: z.coerce.date(), integer2: z.int().gte(Number.MIN_SAFE_INTEGER).lte(Number.MAX_SAFE_INTEGER), }); expectSchemaShape(t, expected).from(result); Expect>(); }); /* Infinitely recursive type */ { const TopLevelCondition: z.ZodType = z.custom().superRefine(() => {}); const table = sqliteTable('test', { json1: text({ mode: 'json' }).$type().notNull(), json2: blob({ mode: 'json' }).$type(), }); const result = createSelectSchema(table); const expected = z.object({ json1: TopLevelCondition, json2: z.nullable(TopLevelCondition), }); Expect, z.infer>>(); } /* Disallow unknown keys in table refinement - select */ { const table = sqliteTable('test', { id: int() }); // @ts-expect-error createSelectSchema(table, { unknown: z.string() }); } /* Disallow unknown keys in table refinement - insert */ { const table = sqliteTable('test', { id: int() }); // @ts-expect-error createInsertSchema(table, { unknown: z.string() }); } /* Disallow unknown keys in table refinement - update */ { const table = sqliteTable('test', { id: int() }); // @ts-expect-error createUpdateSchema(table, { unknown: z.string() }); } /* Disallow unknown keys in view qb - select */ { const table = sqliteTable('test', { id: int() }); const view = sqliteView('test').as((qb) => qb.select().from(table)); const nestedSelect = sqliteView('test').as((qb) => qb.select({ table }).from(table)); // @ts-expect-error createSelectSchema(view, { unknown: z.string() }); // @ts-expect-error createSelectSchema(nestedSelect, { table: { unknown: z.string() } }); } /* Disallow unknown keys in view columns - select */ { const view = sqliteView('test', { id: int() }).as(sql``); // @ts-expect-error createSelectSchema(view, { unknown: z.string() }); } ================================================ FILE: drizzle-zod/tests/tsconfig.json ================================================ { "extends": "../tsconfig.json", "compilerOptions": { "module": "esnext", "target": "esnext", "noEmit": true, "rootDir": "..", "outDir": "./.cache" }, "include": [".", "../src"] } ================================================ FILE: drizzle-zod/tests/utils.ts ================================================ import { expect, type TaskContext } from 'vitest'; import type { z } from 'zod/v4'; export function expectSchemaShape>(t: TaskContext, expected: T) { return { from(actual: T) { expect(Object.keys(actual.shape)).toStrictEqual(Object.keys(expected.shape)); for (const key in Object.keys(actual.shape)) { expect(actual.shape[key]?._zod.def).toStrictEqual(expected.shape[key]?._zod.def); } }, }; } export function expectEnumValues>(t: TaskContext, expected: T) { return { from(actual: T) { expect(actual.def).toStrictEqual(expected.def); }, }; } export function Expect<_ extends true>() {} ================================================ FILE: drizzle-zod/tsconfig.build.json ================================================ { "extends": "./tsconfig.json", "compilerOptions": { "rootDir": "src", "stripInternal": true }, "include": ["src"] } ================================================ FILE: drizzle-zod/tsconfig.json ================================================ { "extends": "../tsconfig.json", "compilerOptions": { "outDir": "dist", "baseUrl": ".", "declaration": true, "noEmit": true, "paths": { "~/*": ["src/*"] } }, "include": ["src", "*.ts"] } ================================================ FILE: drizzle-zod/vitest.config.ts ================================================ import tsconfigPaths from 'vite-tsconfig-paths'; import { defineConfig } from 'vitest/config'; export default defineConfig({ test: { include: [ 'tests/**/*.test.ts', ], exclude: [ 'tests/bun/**/*', ], typecheck: { tsconfig: 'tsconfig.json', }, testTimeout: 100000, hookTimeout: 100000, isolate: false, poolOptions: { threads: { singleThread: true, }, }, }, plugins: [tsconfigPaths()], }); ================================================ FILE: eslint/eslint-plugin-drizzle-internal/index.js ================================================ // @ts-nocheck const { ESLintUtils } = require('@typescript-eslint/experimental-utils'); const ts = require('typescript'); module.exports = { rules: { 'require-entity-kind': ESLintUtils.RuleCreator((name) => name)({ meta: { type: 'problem', docs: { description: 'Enforce the usage of a static readonly [entityKind] property on Drizzle classes', recommended: 'error', }, messages: { missingEntityKind: "Class '{{name}}' doesn't have a static readonly [entityKind] property defined with a string value.", }, schema: [], fixable: 'code', }, defaultOptions: [], create(context) { const parserServices = ESLintUtils.getParserServices(context); const checker = parserServices.program.getTypeChecker(); return { ClassDeclaration(node) { const tsNode = parserServices.esTreeNodeToTSNodeMap.get(node); const className = tsNode.name ? tsNode.name.text : undefined; ts.SyntaxKind.PropertyDeclaration; for (const prop of tsNode.members) { if ( prop.kind === ts.SyntaxKind.PropertyDeclaration && prop.modifiers?.some( (m) => m.kind === ts.SyntaxKind.StaticKeyword, ) && prop.modifiers?.some( (m) => m.kind === ts.SyntaxKind.ReadonlyKeyword, ) && ts.isComputedPropertyName(prop.name) && ts.isIdentifier(prop.name.expression) && prop.name.expression.escapedText === 'entityKind' && checker .getTypeAtLocation(prop.initializer) .isStringLiteral() ) { return; } } context.report({ node, messageId: 'missingEntityKind', data: { name: className, }, fix(fixer) { const classBodyOpeningCurlyToken = context .getSourceCode() .getFirstToken(node.body); const insertionPoint = classBodyOpeningCurlyToken.range[1]; return fixer.insertTextAfterRange( [insertionPoint, insertionPoint], `\n\tstatic readonly [entityKind]: string = '${className}';\n`, ); }, }); }, }; }, }), }, }; ================================================ FILE: eslint-plugin-drizzle/.gitignore ================================================ node_modules dist tsconfig.tsbuildinfo ================================================ FILE: eslint-plugin-drizzle/package.json ================================================ { "name": "eslint-plugin-drizzle", "version": "0.2.3", "description": "Eslint plugin for drizzle users to avoid common pitfalls", "main": "src/index.js", "scripts": { "test": "vitest run", "build": "tsc -b && pnpm cpy readme.md dist/", "pack": "(cd dist && npm pack --pack-destination ..) && rm -f package.tgz && mv *.tgz package.tgz", "publish": "npm publish package.tgz" }, "keywords": [ "eslint", "eslintplugin", "eslint-plugin", "drizzle" ], "author": "Drizzle Team", "repository": { "type": "git", "url": "git+https://github.com/drizzle-team/drizzle-orm/tree/main/eslint-plugin-drizzle.git" }, "license": "Apache-2.0", "devDependencies": { "@types/node": "^20.10.1", "@typescript-eslint/parser": "^6.10.0", "@typescript-eslint/rule-tester": "^6.10.0", "@typescript-eslint/utils": "^6.10.0", "cpy-cli": "^5.0.0", "eslint": "^8.53.0", "typescript": "^5.2.2", "vitest": "^3.1.3" }, "peerDependencies": { "eslint": ">=8.0.0" } } ================================================ FILE: eslint-plugin-drizzle/readme.md ================================================ # eslint-plugin-drizzle For cases where it's impossible to perform type checks for specific scenarios, or where it's possible but error messages would be challenging to understand, we've decided to create an ESLint package with recommended rules. This package aims to assist developers in handling crucial scenarios during development > Big thanks to @Angelelz for initiating the development of this package and transferring it to the Drizzle Team's npm ## Install ```sh [ npm | yarn | pnpm | bun ] install eslint eslint-plugin-drizzle ``` You can install those packages for typescript support in your IDE ```sh [ npm | yarn | pnpm | bun ] install @typescript-eslint/eslint-plugin @typescript-eslint/parser ``` ## Usage Create a `.eslintrc.yml` file, add `drizzle` to the `plugins`, and specify the rules you want to use. You can find a list of all existing rules below ```yml root: true parser: '@typescript-eslint/parser' parserOptions: project: './tsconfig.json' plugins: - drizzle rules: 'drizzle/enforce-delete-with-where': "error" 'drizzle/enforce-update-with-where': "error" ``` ### All config This plugin exports an [`all` config](src/configs/all.js) that makes use of all rules (except for deprecated ones). ```yml root: true extends: - "plugin:drizzle/all" parser: '@typescript-eslint/parser' parserOptions: project: './tsconfig.json' plugins: - drizzle ``` At the moment, `all` is equivalent to `recommended` ```yml root: true extends: - "plugin:drizzle/recommended" parser: '@typescript-eslint/parser' parserOptions: project: './tsconfig.json' plugins: - drizzle ``` ## Rules **enforce-delete-with-where**: Enforce using `delete` with the`.where()` clause in the `.delete()` statement. Most of the time, you don't need to delete all rows in the table and require some kind of `WHERE` statements. Optionally, you can define a `drizzleObjectName` in the plugin options that accept a `string` or `string[]`. This is useful when you have objects or classes with a delete method that's not from Drizzle. Such a `delete` method will trigger the ESLint rule. To avoid that, you can define the name of the Drizzle object that you use in your codebase (like db) so that the rule would only trigger if the delete method comes from this object: Example, config 1: ```json "rules": { "drizzle/enforce-delete-with-where": ["error"] } ``` ```ts class MyClass { public delete() { return {} } } const myClassObj = new MyClass(); // ---> Will be triggered by ESLint Rule myClassObj.delete() const db = drizzle(...) // ---> Will be triggered by ESLint Rule db.delete() ``` Example, config 2: ```json "rules": { "drizzle/enforce-delete-with-where": ["error", { "drizzleObjectName": ["db"] }], } ``` ```ts class MyClass { public delete() { return {} } } const myClassObj = new MyClass(); // ---> Will NOT be triggered by ESLint Rule myClassObj.delete() const db = drizzle(...) // ---> Will be triggered by ESLint Rule db.delete() ``` **enforce-update-with-where**: Enforce using `update` with the`.where()` clause in the `.update()` statement. Most of the time, you don't need to update all rows in the table and require some kind of `WHERE` statements. Optionally, you can define a `drizzleObjectName` in the plugin options that accept a `string` or `string[]`. This is useful when you have objects or classes with a delete method that's not from Drizzle. Such as `update` method will trigger the ESLint rule. To avoid that, you can define the name of the Drizzle object that you use in your codebase (like db) so that the rule would only trigger if the delete method comes from this object: Example, config 1: ```json "rules": { "drizzle/enforce-update-with-where": ["error"] } ``` ```ts class MyClass { public update() { return {} } } const myClassObj = new MyClass(); // ---> Will be triggered by ESLint Rule myClassObj.update() const db = drizzle(...) // ---> Will be triggered by ESLint Rule db.update() ``` Example, config 2: ```json "rules": { "drizzle/enforce-update-with-where": ["error", { "drizzleObjectName": ["db"] }], } ``` ```ts class MyClass { public update() { return {} } } const myClassObj = new MyClass(); // ---> Will NOT be triggered by ESLint Rule myClassObj.update() const db = drizzle(...) // ---> Will be triggered by ESLint Rule db.update() ``` ================================================ FILE: eslint-plugin-drizzle/src/configs/all.ts ================================================ export default { env: { es2024: true, }, parserOptions: { ecmaVersion: 'latest', sourceType: 'module', }, plugins: ['drizzle'], rules: { 'drizzle/enforce-delete-with-where': 'error', 'drizzle/enforce-update-with-where': 'error', }, }; ================================================ FILE: eslint-plugin-drizzle/src/configs/recommended.ts ================================================ export default { env: { es2024: true, }, parserOptions: { ecmaVersion: 'latest', sourceType: 'module', }, plugins: ['drizzle'], rules: { 'drizzle/enforce-delete-with-where': 'error', 'drizzle/enforce-update-with-where': 'error', }, }; ================================================ FILE: eslint-plugin-drizzle/src/enforce-delete-with-where.ts ================================================ import { ESLintUtils } from '@typescript-eslint/utils'; import { resolveMemberExpressionPath } from './utils/ast'; import { isDrizzleObj, type Options } from './utils/options'; const createRule = ESLintUtils.RuleCreator(() => 'https://github.com/drizzle-team/eslint-plugin-drizzle'); type MessageIds = 'enforceDeleteWithWhere'; let lastNodeName: string = ''; const deleteRule = createRule({ defaultOptions: [{ drizzleObjectName: [] }], name: 'enforce-delete-with-where', meta: { type: 'problem', docs: { description: 'Enforce that `delete` method is used with `where` to avoid deleting all the rows in a table.', }, fixable: 'code', messages: { enforceDeleteWithWhere: "Without `.where(...)` you will delete all the rows in a table. If you didn't want to do it, please use `{{ drizzleObjName }}.delete(...).where(...)` instead. Otherwise you can ignore this rule here", }, schema: [{ type: 'object', properties: { drizzleObjectName: { type: ['string', 'array'], }, }, additionalProperties: false, }], }, create(context, options) { return { MemberExpression: (node) => { if (node.property.type === 'Identifier') { if (node.property.name === 'delete' && lastNodeName !== 'where' && isDrizzleObj(node, options)) { context.report({ node, messageId: 'enforceDeleteWithWhere', data: { drizzleObjName: resolveMemberExpressionPath(node), }, }); } lastNodeName = node.property.name; } return; }, }; }, }); export default deleteRule; ================================================ FILE: eslint-plugin-drizzle/src/enforce-update-with-where.ts ================================================ import { ESLintUtils } from '@typescript-eslint/utils'; import { resolveMemberExpressionPath } from './utils/ast'; import { isDrizzleObj, type Options } from './utils/options'; const createRule = ESLintUtils.RuleCreator(() => 'https://github.com/drizzle-team/eslint-plugin-drizzle'); type MessageIds = 'enforceUpdateWithWhere'; let lastNodeName: string = ''; const updateRule = createRule({ defaultOptions: [{ drizzleObjectName: [] }], name: 'enforce-update-with-where', meta: { type: 'problem', docs: { description: 'Enforce that `update` method is used with `where` to avoid deleting all the rows in a table.', }, fixable: 'code', messages: { enforceUpdateWithWhere: "Without `.where(...)` you will update all the rows in a table. If you didn't want to do it, please use `{{ drizzleObjName }}.update(...).set(...).where(...)` instead. Otherwise you can ignore this rule here", }, schema: [{ type: 'object', properties: { drizzleObjectName: { type: ['string', 'array'], }, }, additionalProperties: false, }], }, create(context, options) { return { MemberExpression: (node) => { if (node.property.type === 'Identifier') { if ( lastNodeName !== 'where' && node.property.name === 'set' && node.object.type === 'CallExpression' && node.object.callee.type === 'MemberExpression' && node.object.callee.property.type === 'Identifier' && node.object.callee.property.name === 'update' && isDrizzleObj(node.object.callee, options) ) { context.report({ node, messageId: 'enforceUpdateWithWhere', data: { drizzleObjName: resolveMemberExpressionPath(node.object.callee), }, }); } lastNodeName = node.property.name; } return; }, }; }, }); export default updateRule; ================================================ FILE: eslint-plugin-drizzle/src/index.ts ================================================ import type { TSESLint } from '@typescript-eslint/utils'; import { name, version } from '../package.json'; import all from './configs/all'; import recommended from './configs/recommended'; import deleteRule from './enforce-delete-with-where'; import updateRule from './enforce-update-with-where'; import type { Options } from './utils/options'; export const rules = { 'enforce-delete-with-where': deleteRule, 'enforce-update-with-where': updateRule, } satisfies Record>; export const configs = { all, recommended }; export const meta = { name, version }; ================================================ FILE: eslint-plugin-drizzle/src/utils/ast.ts ================================================ import type { TSESTree } from '@typescript-eslint/utils'; export const resolveMemberExpressionPath = (node: TSESTree.MemberExpression) => { let objectExpression = node.object; let fullName = ''; const addToFullName = (name: string) => { const prefix = fullName ? '.' : ''; fullName = `${name}${prefix}${fullName}`; }; while (objectExpression) { if (objectExpression.type === 'MemberExpression') { if (objectExpression.property.type === 'Identifier') { addToFullName(objectExpression.property.name); } objectExpression = objectExpression.object; } else if (objectExpression.type === 'CallExpression' && objectExpression.callee.type === 'Identifier') { addToFullName(`${objectExpression.callee.name}(...)`); break; } else if (objectExpression.type === 'CallExpression' && objectExpression.callee.type === 'MemberExpression') { if (objectExpression.callee.property.type === 'Identifier') { addToFullName(`${objectExpression.callee.property.name}(...)`); } objectExpression = objectExpression.callee.object; } else if (objectExpression.type === 'Identifier') { addToFullName(objectExpression.name); break; } else if (objectExpression.type === 'ThisExpression') { addToFullName('this'); break; } else { break; } } return fullName; }; ================================================ FILE: eslint-plugin-drizzle/src/utils/options.ts ================================================ import type { TSESTree } from '@typescript-eslint/utils'; export type Options = readonly [{ drizzleObjectName: string[] | string; }]; const isDrizzleObjName = (name: string, drizzleObjectName: string[] | string) => { if (typeof drizzleObjectName === 'string') { return name === drizzleObjectName; } if (Array.isArray(drizzleObjectName)) { if (drizzleObjectName.length === 0) { return true; } return drizzleObjectName.includes(name); } return false; }; export const isDrizzleObj = ( node: TSESTree.MemberExpression, options: Options, ) => { const drizzleObjectName = options[0].drizzleObjectName; if (node.object.type === 'Identifier') { return isDrizzleObjName(node.object.name, drizzleObjectName); } else if (node.object.type === 'MemberExpression' && node.object.property.type === 'Identifier') { return isDrizzleObjName(node.object.property.name, drizzleObjectName); } else if (node.object.type === 'CallExpression') { if (node.object.callee.type === 'Identifier') { return isDrizzleObjName(node.object.callee.name, drizzleObjectName); } else if (node.object.callee.type === 'MemberExpression' && node.object.callee.property.type === 'Identifier') { return isDrizzleObjName(node.object.callee.property.name, drizzleObjectName); } } return false; }; ================================================ FILE: eslint-plugin-drizzle/tests/delete.test.ts ================================================ // @ts-ignore import { RuleTester } from '@typescript-eslint/rule-tester'; import myRule from '../src/enforce-delete-with-where'; const parserResolver = require.resolve('@typescript-eslint/parser'); const ruleTester = new RuleTester({ parser: parserResolver, }); ruleTester.run('enforce delete with where (default options)', myRule, { valid: [ 'const a = db.delete({}).where({});', 'delete db.something', `dataSource .delete() .where()`, `this.database.delete({}).where()`, ], invalid: [ { code: 'db.delete({})', errors: [{ messageId: 'enforceDeleteWithWhere', data: { drizzleObjName: 'db' } }], }, { code: 'this.dataSource.db.delete({})', errors: [{ messageId: 'enforceDeleteWithWhere', data: { drizzleObjName: 'this.dataSource.db' } }], }, { code: 'const a = await db.delete({})', errors: [{ messageId: 'enforceDeleteWithWhere', data: { drizzleObjName: 'db' } }], }, { code: 'const a = db.delete({})', errors: [{ messageId: 'enforceDeleteWithWhere', data: { drizzleObjName: 'db' } }], }, { code: `const a = database .delete({})`, errors: [{ messageId: 'enforceDeleteWithWhere', data: { drizzleObjName: 'database' } }], }, { code: `const a = getDatabase().delete({})`, errors: [{ messageId: 'enforceDeleteWithWhere', data: { drizzleObjName: 'getDatabase(...)' } }], }, { code: `const a = getDatabase(arg1, arg2).delete({})`, errors: [{ messageId: 'enforceDeleteWithWhere', data: { drizzleObjName: 'getDatabase(...)' } }], }, { code: `const a = getDatabase(arg1, arg2).delete({})`, errors: [{ messageId: 'enforceDeleteWithWhere', data: { drizzleObjName: 'getDatabase(...)' } }], }, { code: `const a = this.dataSource.getDatabase(arg1, arg2).delete({})`, errors: [{ messageId: 'enforceDeleteWithWhere', data: { drizzleObjName: 'this.dataSource.getDatabase(...)' } }], }, { code: `const a = this.getDataSource().getDatabase(arg1, arg2).delete({})`, errors: [{ messageId: 'enforceDeleteWithWhere', data: { drizzleObjName: 'this.getDataSource(...).getDatabase(...)' }, }], }, { code: `const a = this.getDataSource().db.delete({})`, errors: [{ messageId: 'enforceDeleteWithWhere', data: { drizzleObjName: 'this.getDataSource(...).db' }, }], }, ], }); ruleTester.run('enforce delete with where (string option)', myRule, { valid: [ { code: 'const a = db.delete({}).where({});', options: [{ drizzleObjectName: 'db' }] }, { code: 'const a = this.database.db.delete({}).where({});', options: [{ drizzleObjectName: 'db' }] }, { code: 'const a = something.delete({})', options: [{ drizzleObjectName: 'db' }] }, { code: 'delete db.something', options: [{ drizzleObjectName: 'db' }] }, { code: `dataSource .delete() .where()`, options: [{ drizzleObjectName: 'db' }], }, { code: `const a = database .delete({})`, options: [{ drizzleObjectName: 'db' }], }, { code: `const a = getDatabase(arg1, arg2).delete({})`, options: [{ drizzleObjectName: 'db' }], }, { code: `const a = this.database.getDatabase().delete({})`, options: [{ drizzleObjectName: 'db' }], }, { code: `const a = this.getDataSource().getDatabase(arg1, arg2).delete({})`, options: [{ drizzleObjectName: 'db' }], }, { code: `const a = this.getDataSource().db.delete({})`, options: [{ drizzleObjectName: 'getDataSource' }], }, ], invalid: [ { code: 'db.delete({})', errors: [{ messageId: 'enforceDeleteWithWhere', data: { drizzleObjName: 'db' } }], options: [{ drizzleObjectName: 'db' }], }, { code: 'this.database.db.delete({})', errors: [{ messageId: 'enforceDeleteWithWhere', data: { drizzleObjName: 'this.database.db' } }], options: [{ drizzleObjectName: 'db' }], }, { code: 'const a = await db.delete({})', errors: [{ messageId: 'enforceDeleteWithWhere', data: { drizzleObjName: 'db' } }], options: [{ drizzleObjectName: 'db' }], }, { code: 'const a = db.delete({})', errors: [{ messageId: 'enforceDeleteWithWhere', data: { drizzleObjName: 'db' } }], options: [{ drizzleObjectName: 'db' }], }, { code: `const a = getDatabase().delete({})`, errors: [{ messageId: 'enforceDeleteWithWhere', data: { drizzleObjName: 'getDatabase(...)' } }], options: [{ drizzleObjectName: 'getDatabase' }], }, { code: `const a = getDatabase(arg1, arg2).delete({})`, errors: [{ messageId: 'enforceDeleteWithWhere', data: { drizzleObjName: 'getDatabase(...)' } }], options: [{ drizzleObjectName: 'getDatabase' }], }, { code: `const a = getDatabase(arg1, arg2).delete({})`, errors: [{ messageId: 'enforceDeleteWithWhere', data: { drizzleObjName: 'getDatabase(...)' } }], options: [{ drizzleObjectName: 'getDatabase' }], }, { code: `const a = this.dataSource.getDatabase(arg1, arg2).delete({})`, errors: [{ messageId: 'enforceDeleteWithWhere', data: { drizzleObjName: 'this.dataSource.getDatabase(...)' } }], options: [{ drizzleObjectName: 'getDatabase' }], }, { code: `const a = this.getDataSource().getDatabase(arg1, arg2).delete({})`, errors: [{ messageId: 'enforceDeleteWithWhere', data: { drizzleObjName: 'this.getDataSource(...).getDatabase(...)' }, }], options: [{ drizzleObjectName: 'getDatabase' }], }, { code: `const a = this.getDataSource().db.delete({})`, errors: [{ messageId: 'enforceDeleteWithWhere', data: { drizzleObjName: 'this.getDataSource(...).db' }, }], options: [{ drizzleObjectName: 'db' }], }, ], }); ruleTester.run('enforce delete with where (array option)', myRule, { valid: [ { code: 'const a = db.delete({}).where({});', options: [{ drizzleObjectName: ['db'] }] }, { code: 'const a = this.database.dataSource.delete({}).where({});', options: [{ drizzleObjectName: ['db', 'dataSource'] }], }, { code: 'delete db.something', options: [{ drizzleObjectName: ['db'] }] }, { code: `dataSource .delete() .where()`, options: [{ drizzleObjectName: ['db', 'dataSource'] }], }, { code: `const a = database .delete({})`, options: [{ drizzleObjectName: ['db'] }], }, { code: `const a = getDatabase(arg1, arg2).delete({})`, options: [{ drizzleObjectName: ['db'] }], }, { code: `const a = this.database.getDatabase().delete({})`, options: [{ drizzleObjectName: ['db', 'database'] }], }, { code: `const a = this.getDataSource().getDatabase(arg1, arg2).delete({})`, options: [{ drizzleObjectName: ['db', 'getDataSource'] }], }, { code: `const a = this.getDataSource().db.delete({})`, options: [{ drizzleObjectName: ['getDataSource'] }], }, ], invalid: [ { code: 'db.delete({})', errors: [{ messageId: 'enforceDeleteWithWhere', data: { drizzleObjName: 'db' } }], options: [{ drizzleObjectName: ['db', 'anotherName'] }], }, { code: 'this.dataSource.db.delete({})', errors: [{ messageId: 'enforceDeleteWithWhere', data: { drizzleObjName: 'this.dataSource.db' } }], options: [{ drizzleObjectName: ['db', 'anotherName'] }], }, { code: 'dataSource.delete({})', errors: [{ messageId: 'enforceDeleteWithWhere', data: { drizzleObjName: 'dataSource' } }], options: [{ drizzleObjectName: ['db', 'dataSource'] }], }, { code: 'const a = await db.delete({})', errors: [{ messageId: 'enforceDeleteWithWhere', data: { drizzleObjName: 'db' } }], options: [{ drizzleObjectName: ['db'] }], }, { code: 'const a = db.delete({})', errors: [{ messageId: 'enforceDeleteWithWhere', data: { drizzleObjName: 'db' } }], options: [{ drizzleObjectName: ['db'] }], }, { code: `const a = getDatabase().delete({})`, errors: [{ messageId: 'enforceDeleteWithWhere', data: { drizzleObjName: 'getDatabase(...)' } }], options: [{ drizzleObjectName: ['getDatabase', 'db'] }], }, { code: `const a = getDatabase(arg1, arg2).delete({})`, errors: [{ messageId: 'enforceDeleteWithWhere', data: { drizzleObjName: 'getDatabase(...)' } }], options: [{ drizzleObjectName: ['getDatabase', 'db'] }], }, { code: `const a = getDatabase(arg1, arg2).delete({})`, errors: [{ messageId: 'enforceDeleteWithWhere', data: { drizzleObjName: 'getDatabase(...)' } }], options: [{ drizzleObjectName: ['getDatabase', 'db'] }], }, { code: `const a = this.dataSource.getDatabase(arg1, arg2).delete({})`, errors: [{ messageId: 'enforceDeleteWithWhere', data: { drizzleObjName: 'this.dataSource.getDatabase(...)' } }], options: [{ drizzleObjectName: ['getDatabase', 'dataSource'] }], }, { code: `const a = this.getDataSource().getDatabase(arg1, arg2).delete({})`, errors: [{ messageId: 'enforceDeleteWithWhere', data: { drizzleObjName: 'this.getDataSource(...).getDatabase(...)' }, }], options: [{ drizzleObjectName: ['getDatabase'] }], }, { code: `const a = this.getDataSource().db.delete({})`, errors: [{ messageId: 'enforceDeleteWithWhere', data: { drizzleObjName: 'this.getDataSource(...).db' }, }], options: [{ drizzleObjectName: ['db'] }], }, ], }); ================================================ FILE: eslint-plugin-drizzle/tests/update.test.ts ================================================ // @ts-ignore import { RuleTester } from '@typescript-eslint/rule-tester'; import myRule from '../src/enforce-update-with-where'; const parserResolver = require.resolve('@typescript-eslint/parser'); const ruleTester = new RuleTester({ parser: parserResolver, }); ruleTester.run('enforce update with where (default options)', myRule, { valid: [ 'const a = db.update({}).set().where({});', 'const a = db.update();', 'update()', `db .update() .set() .where()`, `dataSource .update() .set() .where()`, `this .dataSource .update() .set() .where()`, ], invalid: [ { code: 'db.update({}).set()', errors: [{ messageId: 'enforceUpdateWithWhere' }], }, { code: 'this.database.db.update({}).set()', errors: [{ messageId: 'enforceUpdateWithWhere' }], }, { code: 'const a = await db.update({}).set()', errors: [{ messageId: 'enforceUpdateWithWhere' }], }, { code: 'const a = db.update({}).set', errors: [{ messageId: 'enforceUpdateWithWhere' }], }, { code: `const a = database .update({}) .set()`, errors: [{ messageId: 'enforceUpdateWithWhere' }], }, { code: `const a = getDatabase().update({}).set()`, errors: [{ messageId: 'enforceUpdateWithWhere', data: { drizzleObjName: 'getDatabase(...)' } }], }, { code: `const a = getDatabase(arg1, arg2).update({}).set()`, errors: [{ messageId: 'enforceUpdateWithWhere', data: { drizzleObjName: 'getDatabase(...)' } }], }, { code: `const a = getDatabase(arg1, arg2).update({}).set()`, errors: [{ messageId: 'enforceUpdateWithWhere', data: { drizzleObjName: 'getDatabase(...)' } }], }, { code: `const a = this.dataSource.getDatabase(arg1, arg2).update({}).set()`, errors: [{ messageId: 'enforceUpdateWithWhere', data: { drizzleObjName: 'this.dataSource.getDatabase(...)' } }], }, { code: `const a = this.getDataSource().getDatabase(arg1, arg2).update({}).set()`, errors: [{ messageId: 'enforceUpdateWithWhere', data: { drizzleObjName: 'this.getDataSource(...).getDatabase(...)' }, }], }, { code: `const a = this.getDataSource().db.update({}).set()`, errors: [{ messageId: 'enforceUpdateWithWhere', data: { drizzleObjName: 'this.getDataSource(...).db' }, }], }, ], }); ruleTester.run('enforce update with where (string option)', myRule, { valid: [ { code: 'const a = db.update({}).set().where({});', options: [{ drizzleObjectName: 'db' }] }, { code: 'const a = this.database.db.update({}).set().where({});', options: [{ drizzleObjectName: 'db' }] }, { code: 'update.db.update()', options: [{ drizzleObjectName: 'db' }] }, { code: `dataSource .update() .set()`, options: [{ drizzleObjectName: 'db' }], }, { code: `const a = database .update({})`, options: [{ drizzleObjectName: 'db' }], }, { code: `const a = getDatabase(arg1, arg2).update({}).set()`, options: [{ drizzleObjectName: 'db' }], }, { code: `const a = this.database.getDatabase().update({}).set()`, options: [{ drizzleObjectName: 'db' }], }, { code: `const a = this.getDataSource().getDatabase(arg1, arg2).update({}).set()`, options: [{ drizzleObjectName: 'db' }], }, { code: `const a = this.getDataSource().db.update({}).set()`, options: [{ drizzleObjectName: 'getDataSource' }], }, ], invalid: [ { code: 'db.update({}).set({})', errors: [{ messageId: 'enforceUpdateWithWhere' }], options: [{ drizzleObjectName: 'db' }], }, { code: 'this.dataSource.db.update({}).set({})', errors: [{ messageId: 'enforceUpdateWithWhere' }], options: [{ drizzleObjectName: 'db' }], }, { code: 'const a = await db.update({}).set()', errors: [{ messageId: 'enforceUpdateWithWhere' }], options: [{ drizzleObjectName: 'db' }], }, { code: 'const a = db.update({}).set()', errors: [{ messageId: 'enforceUpdateWithWhere' }], options: [{ drizzleObjectName: 'db' }], }, { code: `const a = getDatabase().update({}).set()`, errors: [{ messageId: 'enforceUpdateWithWhere', data: { drizzleObjName: 'getDatabase(...)' } }], options: [{ drizzleObjectName: 'getDatabase' }], }, { code: `const a = getDatabase(arg1, arg2).update({}).set()`, errors: [{ messageId: 'enforceUpdateWithWhere', data: { drizzleObjName: 'getDatabase(...)' } }], options: [{ drizzleObjectName: 'getDatabase' }], }, { code: `const a = getDatabase(arg1, arg2).update({}).set()`, errors: [{ messageId: 'enforceUpdateWithWhere', data: { drizzleObjName: 'getDatabase(...)' } }], options: [{ drizzleObjectName: 'getDatabase' }], }, { code: `const a = this.dataSource.getDatabase(arg1, arg2).update({}).set()`, errors: [{ messageId: 'enforceUpdateWithWhere', data: { drizzleObjName: 'this.dataSource.getDatabase(...)' } }], options: [{ drizzleObjectName: 'getDatabase' }], }, { code: `const a = this.getDataSource().getDatabase(arg1, arg2).update({}).set()`, errors: [{ messageId: 'enforceUpdateWithWhere', data: { drizzleObjName: 'this.getDataSource(...).getDatabase(...)' }, }], options: [{ drizzleObjectName: 'getDatabase' }], }, { code: `const a = this.getDataSource().db.update({}).set()`, errors: [{ messageId: 'enforceUpdateWithWhere', data: { drizzleObjName: 'this.getDataSource(...).db' }, }], options: [{ drizzleObjectName: 'db' }], }, ], }); ruleTester.run('enforce update with where (array option)', myRule, { valid: [ { code: 'const a = db.update({}).set().where({});', options: [{ drizzleObjectName: ['db'] }] }, { code: 'const a = this.dataSource.db.update({}).set().where({});', options: [{ drizzleObjectName: ['db'] }] }, { code: 'update.db.something', options: [{ drizzleObjectName: ['db'] }] }, { code: `dataSource .update() .set() .where()`, options: [{ drizzleObjectName: ['db', 'dataSource'] }], }, { code: `const a = database .update({})`, options: [{ drizzleObjectName: ['db'] }], }, { code: `const a = getDatabase(arg1, arg2).update({}).set()`, options: [{ drizzleObjectName: ['db'] }], }, { code: `const a = this.database.getDatabase().update({}).set()`, options: [{ drizzleObjectName: ['db', 'database'] }], }, { code: `const a = this.getDataSource().getDatabase(arg1, arg2).update({}).set()`, options: [{ drizzleObjectName: ['db', 'getDataSource'] }], }, { code: `const a = this.getDataSource().db.update({}).set()`, options: [{ drizzleObjectName: ['getDataSource'] }], }, ], invalid: [ { code: 'db.update({}).set()', errors: [{ messageId: 'enforceUpdateWithWhere' }], options: [{ drizzleObjectName: ['db', 'anotherName'] }], }, { code: 'this.dataSource.db.update({}).set()', errors: [{ messageId: 'enforceUpdateWithWhere' }], options: [{ drizzleObjectName: ['db', 'anotherName'] }], }, { code: 'dataSource.update({}).set({})', errors: [{ messageId: 'enforceUpdateWithWhere' }], options: [{ drizzleObjectName: ['db', 'dataSource'] }], }, { code: 'const a = await db.update({}).set()', errors: [{ messageId: 'enforceUpdateWithWhere' }], options: [{ drizzleObjectName: ['db'] }], }, { code: 'const a = db.update({}).set()', errors: [{ messageId: 'enforceUpdateWithWhere' }], options: [{ drizzleObjectName: ['db'] }], }, { code: `const a = getDatabase().update({}).set()`, errors: [{ messageId: 'enforceUpdateWithWhere', data: { drizzleObjName: 'getDatabase(...)' } }], options: [{ drizzleObjectName: ['getDatabase', 'db'] }], }, { code: `const a = getDatabase(arg1, arg2).update({}).set()`, errors: [{ messageId: 'enforceUpdateWithWhere', data: { drizzleObjName: 'getDatabase(...)' } }], options: [{ drizzleObjectName: ['getDatabase', 'db'] }], }, { code: `const a = getDatabase(arg1, arg2).update({}).set()`, errors: [{ messageId: 'enforceUpdateWithWhere', data: { drizzleObjName: 'getDatabase(...)' } }], options: [{ drizzleObjectName: ['getDatabase', 'db'] }], }, { code: `const a = this.dataSource.getDatabase(arg1, arg2).update({}).set()`, errors: [{ messageId: 'enforceUpdateWithWhere', data: { drizzleObjName: 'this.dataSource.getDatabase(...)' } }], options: [{ drizzleObjectName: ['getDatabase', 'dataSource'] }], }, { code: `const a = this.getDataSource().getDatabase(arg1, arg2).update({}).set()`, errors: [{ messageId: 'enforceUpdateWithWhere', data: { drizzleObjName: 'this.getDataSource(...).getDatabase(...)' }, }], options: [{ drizzleObjectName: ['getDatabase'] }], }, { code: `const a = this.getDataSource().db.update({}).set()`, errors: [{ messageId: 'enforceUpdateWithWhere', data: { drizzleObjName: 'this.getDataSource(...).db' }, }], options: [{ drizzleObjectName: ['db'] }], }, ], }); ================================================ FILE: eslint-plugin-drizzle/tsconfig.json ================================================ { "compilerOptions": { "strict": true, "noUnusedLocals": true, "noUnusedParameters": true, "noImplicitReturns": true, "noFallthroughCasesInSwitch": true, "moduleResolution": "nodenext", "esModuleInterop": true, "forceConsistentCasingInFileNames": true, "allowJs": true, "resolveJsonModule": true, "lib": [ "esnext" ], "composite": false, "incremental": false, "skipLibCheck": true, "outDir": "dist", "module": "nodenext", "target": "es6", "exactOptionalPropertyTypes": true, "noUncheckedIndexedAccess": true }, "include": [ "src/**/*.ts", "package.json" ] } ================================================ FILE: eslint-plugin-drizzle/vitest.config.ts ================================================ import { defineConfig } from 'vitest/config'; export default defineConfig({ test: { globals: true, exclude: ['**/dist/**'], }, }); ================================================ FILE: integration-tests/.gitignore ================================================ .env *.db trace tests/imports/imports.cjs tests/imports/imports.mjs .sst ================================================ FILE: integration-tests/.xata/migrations/.ledger ================================================ ================================================ FILE: integration-tests/.xata/version/compatibility.json ================================================ { "@xata.io/cli": { "latest": "0.15.10", "compatibility": [{ "range": ">=0.0.0" }] }, "@xata.io/client": { "latest": "0.29.3", "compatibility": [{ "range": ">=0.0.0" }] } } ================================================ FILE: integration-tests/.xatarc ================================================ { "databaseURL": "https://Andrii-Sherman-s-workspace-2r5ujp.us-east-1.xata.sh/db/integration-tests", "codegen": { "output": "tests/xata/xata.ts" } } ================================================ FILE: integration-tests/docker-neon.yml ================================================ services: postgres: image: 'postgres:latest' environment: POSTGRES_USER: postgres POSTGRES_PASSWORD: postgres POSTGRES_DB: postgres ports: - '5432:5441' healthcheck: test: ['CMD-SHELL', 'pg_isready -U postgres'] interval: 10s timeout: 5s retries: 5 neon-proxy: image: ghcr.io/timowilhelm/local-neon-http-proxy:main environment: - PG_CONNECTION_STRING=postgres://postgres:postgres@postgres:5432/postgres ports: - '4444:4444' depends_on: postgres: condition: service_healthy pg_proxy: image: ghcr.io/neondatabase/wsproxy:latest environment: APPEND_PORT: 'postgres:5432' ALLOW_ADDR_REGEX: '.*' LOG_TRAFFIC: 'true' ports: - '5446:80' depends_on: - postgres ================================================ FILE: integration-tests/drizzle2/mysql/0000_nostalgic_carnage.sql ================================================ CREATE TABLE `cities_migration` ( `id` int, `fullname_name` text, `state` text ); --> statement-breakpoint CREATE TABLE `users_migration` ( `id` int PRIMARY KEY NOT NULL, `full_name` text, `phone` int, `invited_by` int, `city_id` int, `date` timestamp DEFAULT (now()) ); --> statement-breakpoint CREATE TABLE `users12` ( `id` serial AUTO_INCREMENT PRIMARY KEY NOT NULL, `name` text NOT NULL, `email` text NOT NULL ); ================================================ FILE: integration-tests/drizzle2/mysql/meta/0000_snapshot.json ================================================ { "version": "5", "dialect": "mysql", "id": "8e8c8378-0496-40f6-88e3-98aab8282b1f", "prevId": "00000000-0000-0000-0000-000000000000", "tables": { "cities_migration": { "name": "cities_migration", "columns": { "id": { "name": "id", "type": "int", "primaryKey": false, "notNull": false, "autoincrement": false }, "fullname_name": { "name": "fullname_name", "type": "text", "primaryKey": false, "notNull": false, "autoincrement": false }, "state": { "name": "state", "type": "text", "primaryKey": false, "notNull": false, "autoincrement": false } }, "indexes": {}, "foreignKeys": {}, "compositePrimaryKeys": {} }, "users_migration": { "name": "users_migration", "columns": { "id": { "name": "id", "type": "int", "primaryKey": true, "notNull": true, "autoincrement": false }, "full_name": { "name": "full_name", "type": "text", "primaryKey": false, "notNull": false, "autoincrement": false }, "phone": { "name": "phone", "type": "int", "primaryKey": false, "notNull": false, "autoincrement": false }, "invited_by": { "name": "invited_by", "type": "int", "primaryKey": false, "notNull": false, "autoincrement": false }, "city_id": { "name": "city_id", "type": "int", "primaryKey": false, "notNull": false, "autoincrement": false }, "date": { "name": "date", "type": "timestamp", "primaryKey": false, "notNull": false, "autoincrement": false, "default": "(now())" } }, "indexes": {}, "foreignKeys": {}, "compositePrimaryKeys": {} }, "users12": { "name": "users12", "columns": { "id": { "name": "id", "type": "serial", "primaryKey": true, "notNull": true, "autoincrement": true }, "name": { "name": "name", "type": "text", "primaryKey": false, "notNull": true, "autoincrement": false }, "email": { "name": "email", "type": "text", "primaryKey": false, "notNull": true, "autoincrement": false } }, "indexes": { "my_unique_index": { "name": "my_unique_index", "columns": [ "name" ], "isUnique": true, "using": "btree" } }, "foreignKeys": {}, "compositePrimaryKeys": {} } }, "schemas": {}, "_meta": { "schemas": {}, "tables": {}, "columns": {} } } ================================================ FILE: integration-tests/drizzle2/mysql/meta/_journal.json ================================================ { "version": "5", "dialect": "mysql", "entries": [ { "idx": 0, "version": "5", "when": 1680270921944, "tag": "0000_nostalgic_carnage", "breakpoints": true } ] } ================================================ FILE: integration-tests/drizzle2/mysql-proxy/first/0000_nostalgic_carnage.sql ================================================ CREATE TABLE `userstest` ( `id` serial PRIMARY KEY, `name` text NOT NULL, `verified` boolean NOT NULL DEFAULT false, `jsonb` json, `created_at` timestamp NOT NULL DEFAULT now() ); ================================================ FILE: integration-tests/drizzle2/mysql-proxy/first/meta/0000_snapshot.json ================================================ { "version": "5", "dialect": "mysql", "id": "8e8c8378-0496-40f6-88e3-98aab8282b1f", "prevId": "00000000-0000-0000-0000-000000000000", "tables": { "userstest": { "name": "userstest", "schema": "", "columns": { "id": { "name": "id", "type": "serial", "primaryKey": true, "notNull": false, "autoincrement": true }, "name": { "name": "name", "type": "text", "primaryKey": false, "notNull": true, "autoincrement": false }, "verified": { "name": "verified", "type": "boolean", "primaryKey": false, "notNull": true, "default": false, "autoincrement": false }, "jsonb": { "name": "jsonb", "type": "json", "primaryKey": false, "notNull": false, "autoincrement": false }, "created_at": { "name": "created_at", "type": "timestamp", "primaryKey": false, "notNull": true, "default": "now()", "autoincrement": false } }, "indexes": {}, "foreignKeys": {}, "compositePrimaryKeys": {} } }, "schemas": {}, "_meta": { "schemas": {}, "tables": {}, "columns": {} } } ================================================ FILE: integration-tests/drizzle2/mysql-proxy/first/meta/_journal.json ================================================ { "version": "5", "dialect": "mysql", "entries": [ { "idx": 0, "version": "5", "when": 1680270921944, "tag": "0000_nostalgic_carnage", "breakpoints": true } ] } ================================================ FILE: integration-tests/drizzle2/mysql-proxy/second/0000_nostalgic_carnage.sql ================================================ CREATE TABLE `userstest` ( `id` serial PRIMARY KEY, `name` text NOT NULL, `verified` boolean NOT NULL DEFAULT false, `jsonb` json, `created_at` timestamp NOT NULL DEFAULT now() ); ================================================ FILE: integration-tests/drizzle2/mysql-proxy/second/0001_test.sql ================================================ CREATE TABLE `users12` ( `id` serial AUTO_INCREMENT PRIMARY KEY NOT NULL, `name` text NOT NULL, `email` text NOT NULL ); ================================================ FILE: integration-tests/drizzle2/mysql-proxy/second/meta/0000_snapshot.json ================================================ { "version": "5", "dialect": "mysql", "id": "8e8c8378-0496-40f6-88e3-98aab8282b1f", "prevId": "00000000-0000-0000-0000-000000000000", "tables": { "userstest": { "name": "userstest", "schema": "", "columns": { "id": { "name": "id", "type": "serial", "primaryKey": true, "notNull": false, "autoincrement": true }, "name": { "name": "name", "type": "text", "primaryKey": false, "notNull": true, "autoincrement": false }, "verified": { "name": "verified", "type": "boolean", "primaryKey": false, "notNull": true, "default": false, "autoincrement": false }, "jsonb": { "name": "jsonb", "type": "json", "primaryKey": false, "notNull": false, "autoincrement": false }, "created_at": { "name": "created_at", "type": "timestamp", "primaryKey": false, "notNull": true, "default": "now()", "autoincrement": false } }, "indexes": {}, "foreignKeys": {}, "compositePrimaryKeys": {} } }, "schemas": {}, "_meta": { "schemas": {}, "tables": {}, "columns": {} } } ================================================ FILE: integration-tests/drizzle2/mysql-proxy/second/meta/0001_snapshot.json ================================================ { "version": "5", "dialect": "mysql", "id": "47362df0-c353-4bd1-8107-fcc36f0e61bd", "prevId": "8e8c8378-0496-40f6-88e3-98aab8282b1f", "tables": { "userstest": { "name": "userstest", "schema": "", "columns": { "id": { "name": "id", "type": "serial", "primaryKey": true, "notNull": false, "autoincrement": true }, "name": { "name": "name", "type": "text", "primaryKey": false, "notNull": true, "autoincrement": false }, "verified": { "name": "verified", "type": "boolean", "primaryKey": false, "notNull": true, "default": false, "autoincrement": false }, "jsonb": { "name": "jsonb", "type": "json", "primaryKey": false, "notNull": false, "autoincrement": false }, "created_at": { "name": "created_at", "type": "timestamp", "primaryKey": false, "notNull": true, "default": "now()", "autoincrement": false } }, "indexes": {}, "foreignKeys": {}, "compositePrimaryKeys": {} }, "users12": { "name": "users12", "columns": { "id": { "name": "id", "type": "serial", "primaryKey": true, "notNull": true, "autoincrement": true }, "name": { "name": "name", "type": "text", "primaryKey": false, "notNull": true, "autoincrement": false }, "email": { "name": "email", "type": "text", "primaryKey": false, "notNull": true, "autoincrement": false } }, "indexes": { "my_unique_index": { "name": "my_unique_index", "columns": ["name"], "isUnique": true, "using": "btree" } }, "foreignKeys": {}, "compositePrimaryKeys": {} } }, "schemas": {}, "_meta": { "schemas": {}, "tables": {}, "columns": {} } } ================================================ FILE: integration-tests/drizzle2/mysql-proxy/second/meta/_journal.json ================================================ { "version": "5", "dialect": "mysql", "entries": [ { "idx": 0, "version": "5", "when": 1680270921944, "tag": "0000_nostalgic_carnage", "breakpoints": true }, { "idx": 1, "version": "5", "when": 1680270921945, "tag": "0001_test", "breakpoints": true } ] } ================================================ FILE: integration-tests/drizzle2/pg/0000_puzzling_flatman.sql ================================================ CREATE TABLE IF NOT EXISTS "all_columns" ( "smallint" smallint, "smallint_def" smallint DEFAULT 10, "integer" integer, "integer_def" integer DEFAULT 10, "numeric" numeric, "numeric3" numeric, "numeric4" numeric(7, 7), "numeridef" numeric DEFAULT '100', "bigint" bigint, "bigintdef" bigint DEFAULT 100, "boolean" boolean, "boolean_def" boolean DEFAULT true, "text" text, "textdef" text DEFAULT 'text', "varchar" varchar, "varchardef" varchar DEFAULT 'text', "serial" serial NOT NULL, "bigserial" bigserial NOT NULL, "decimal" numeric(100, 2), "decimaldef" numeric(100, 2) DEFAULT '100.0', "doublePrecision" double precision, "doublePrecisiondef" double precision DEFAULT 100, "real" real, "realdef" real DEFAULT 100, "time2" time(6) with time zone, "timedefnow" time DEFAULT now(), "timestamp" timestamp, "timestamp2" timestamp (6) with time zone, "timestamp3" timestamp with time zone, "timestamp4" timestamp (4), "timestampdef" timestamp DEFAULT now(), "date" date, "datedef" date DEFAULT now(), "interval" interval, "intervaldef" interval DEFAULT '10 days' ); ================================================ FILE: integration-tests/drizzle2/pg/0001_test.sql ================================================ CREATE TABLE IF NOT EXISTS "users12" ( "id" serial PRIMARY KEY NOT NULL, "name" text NOT NULL, "email" text NOT NULL ); ================================================ FILE: integration-tests/drizzle2/pg/meta/0000_snapshot.json ================================================ { "version": "5", "dialect": "pg", "id": "cb1644bb-c5da-465a-8d70-f63d81e34514", "prevId": "00000000-0000-0000-0000-000000000000", "tables": { "all_columns": { "name": "all_columns", "schema": "", "columns": { "smallint": { "name": "smallint", "type": "smallint", "primaryKey": false, "notNull": false }, "smallint_def": { "name": "smallint_def", "type": "smallint", "primaryKey": false, "notNull": false, "default": 10 }, "integer": { "name": "integer", "type": "integer", "primaryKey": false, "notNull": false }, "integer_def": { "name": "integer_def", "type": "integer", "primaryKey": false, "notNull": false, "default": 10 }, "numeric": { "name": "numeric", "type": "numeric", "primaryKey": false, "notNull": false }, "numeric3": { "name": "numeric3", "type": "numeric", "primaryKey": false, "notNull": false }, "numeric4": { "name": "numeric4", "type": "numeric(7, 7)", "primaryKey": false, "notNull": false }, "numeridef": { "name": "numeridef", "type": "numeric", "primaryKey": false, "notNull": false, "default": "'100'" }, "bigint": { "name": "bigint", "type": "bigint", "primaryKey": false, "notNull": false }, "bigintdef": { "name": "bigintdef", "type": "bigint", "primaryKey": false, "notNull": false, "default": 100 }, "boolean": { "name": "boolean", "type": "boolean", "primaryKey": false, "notNull": false }, "boolean_def": { "name": "boolean_def", "type": "boolean", "primaryKey": false, "notNull": false, "default": true }, "text": { "name": "text", "type": "text", "primaryKey": false, "notNull": false }, "textdef": { "name": "textdef", "type": "text", "primaryKey": false, "notNull": false, "default": "'text'" }, "varchar": { "name": "varchar", "type": "varchar", "primaryKey": false, "notNull": false }, "varchardef": { "name": "varchardef", "type": "varchar", "primaryKey": false, "notNull": false, "default": "'text'" }, "serial": { "name": "serial", "type": "serial", "primaryKey": false, "notNull": true }, "bigserial": { "name": "bigserial", "type": "bigserial", "primaryKey": false, "notNull": true }, "decimal": { "name": "decimal", "type": "numeric(100, 2)", "primaryKey": false, "notNull": false }, "decimaldef": { "name": "decimaldef", "type": "numeric(100, 2)", "primaryKey": false, "notNull": false, "default": "'100.0'" }, "doublePrecision": { "name": "doublePrecision", "type": "double precision", "primaryKey": false, "notNull": false }, "doublePrecisiondef": { "name": "doublePrecisiondef", "type": "double precision", "primaryKey": false, "notNull": false, "default": 100 }, "real": { "name": "real", "type": "real", "primaryKey": false, "notNull": false }, "realdef": { "name": "realdef", "type": "real", "primaryKey": false, "notNull": false, "default": 100 }, "time2": { "name": "time2", "type": "time(6) with time zone", "primaryKey": false, "notNull": false }, "timedefnow": { "name": "timedefnow", "type": "time", "primaryKey": false, "notNull": false, "default": "now()" }, "timestamp": { "name": "timestamp", "type": "timestamp", "primaryKey": false, "notNull": false }, "timestamp2": { "name": "timestamp2", "type": "timestamp (6) with time zone", "primaryKey": false, "notNull": false }, "timestamp3": { "name": "timestamp3", "type": "timestamp with time zone", "primaryKey": false, "notNull": false }, "timestamp4": { "name": "timestamp4", "type": "timestamp (4)", "primaryKey": false, "notNull": false }, "timestampdef": { "name": "timestampdef", "type": "timestamp", "primaryKey": false, "notNull": false, "default": "now()" }, "date": { "name": "date", "type": "date", "primaryKey": false, "notNull": false }, "datedef": { "name": "datedef", "type": "date", "primaryKey": false, "notNull": false, "default": "now()" }, "interval": { "name": "interval", "type": "interval", "primaryKey": false, "notNull": false }, "intervaldef": { "name": "intervaldef", "type": "interval", "primaryKey": false, "notNull": false, "default": "'10 days'" } }, "indexes": {}, "foreignKeys": {}, "compositePrimaryKeys": {} }, "users12": { "name": "users12", "schema": "", "columns": { "id": { "name": "id", "type": "serial", "primaryKey": true, "notNull": true }, "name": { "name": "name", "type": "text", "primaryKey": false, "notNull": true }, "email": { "name": "email", "type": "text", "primaryKey": false, "notNull": true } }, "indexes": {}, "foreignKeys": {}, "compositePrimaryKeys": {} } }, "enums": {}, "schemas": {}, "_meta": { "schemas": {}, "tables": {}, "columns": {} } } ================================================ FILE: integration-tests/drizzle2/pg/meta/_journal.json ================================================ { "version": "5", "dialect": "pg", "entries": [ { "idx": 0, "version": "5", "when": 1680271923328, "tag": "0000_puzzling_flatman", "breakpoints": true }, { "idx": 1, "version": "5", "when": 1680271923329, "tag": "0001_test", "breakpoints": true } ] } ================================================ FILE: integration-tests/drizzle2/pg-proxy/first/0000_puzzling_flatman.sql ================================================ CREATE TABLE "users" ( id serial PRIMARY KEY, name text NOT NULL, verified boolean NOT NULL DEFAULT false, jsonb jsonb, created_at timestamptz NOT NULL DEFAULT now() ); ================================================ FILE: integration-tests/drizzle2/pg-proxy/first/meta/0000_snapshot.json ================================================ { "version": "5", "dialect": "pg", "id": "cb1644bb-c5da-465a-8d70-f63d81e34514", "prevId": "00000000-0000-0000-0000-000000000000", "tables": { "users": { "name": "users", "schema": "", "columns": { "id": { "name": "id", "type": "serial", "primaryKey": true, "notNull": false }, "name": { "name": "name", "type": "text", "primaryKey": false, "notNull": true }, "verified": { "name": "verified", "type": "boolean", "primaryKey": false, "notNull": true, "default": false }, "jsonb": { "name": "jsonb", "type": "jsonb", "primaryKey": false, "notNull": false }, "created_at": { "name": "created_at", "type": "timestamptz", "primaryKey": false, "notNull": true, "default": "now()" } }, "indexes": {}, "foreignKeys": {}, "compositePrimaryKeys": {} } }, "enums": {}, "schemas": {}, "_meta": { "schemas": {}, "tables": {}, "columns": {} } } ================================================ FILE: integration-tests/drizzle2/pg-proxy/first/meta/_journal.json ================================================ { "version": "5", "dialect": "pg", "entries": [ { "idx": 0, "version": "5", "when": 1680271923328, "tag": "0000_puzzling_flatman", "breakpoints": true } ] } ================================================ FILE: integration-tests/drizzle2/pg-proxy/second/0000_puzzling_flatman.sql ================================================ CREATE TABLE "users" ( id serial PRIMARY KEY, name text NOT NULL, verified boolean NOT NULL DEFAULT false, jsonb jsonb, created_at timestamptz NOT NULL DEFAULT now() ); ================================================ FILE: integration-tests/drizzle2/pg-proxy/second/0001_test.sql ================================================ CREATE TABLE "users12" ( "id" serial PRIMARY KEY NOT NULL, "name" text NOT NULL, "email" text NOT NULL ); ================================================ FILE: integration-tests/drizzle2/pg-proxy/second/meta/0000_snapshot.json ================================================ { "version": "5", "dialect": "pg", "id": "cb1644bb-c5da-465a-8d70-f63d81e34514", "prevId": "00000000-0000-0000-0000-000000000000", "tables": { "users": { "name": "users", "schema": "", "columns": { "id": { "name": "id", "type": "serial", "primaryKey": true, "notNull": false }, "name": { "name": "name", "type": "text", "primaryKey": false, "notNull": true }, "verified": { "name": "verified", "type": "boolean", "primaryKey": false, "notNull": true, "default": false }, "jsonb": { "name": "jsonb", "type": "jsonb", "primaryKey": false, "notNull": false }, "created_at": { "name": "created_at", "type": "timestamptz", "primaryKey": false, "notNull": true, "default": "now()" } }, "indexes": {}, "foreignKeys": {}, "compositePrimaryKeys": {} } }, "enums": {}, "schemas": {}, "_meta": { "schemas": {}, "tables": {}, "columns": {} } } ================================================ FILE: integration-tests/drizzle2/pg-proxy/second/meta/0001_snapshot.json ================================================ { "version": "5", "dialect": "pg", "id": "f2a88b25-f2da-4973-879e-60b57f24e7b9", "prevId": "cb1644bb-c5da-465a-8d70-f63d81e34514", "tables": { "users": { "name": "users", "schema": "", "columns": { "id": { "name": "id", "type": "serial", "primaryKey": true, "notNull": false }, "name": { "name": "name", "type": "text", "primaryKey": false, "notNull": true }, "verified": { "name": "verified", "type": "boolean", "primaryKey": false, "notNull": true, "default": false }, "jsonb": { "name": "jsonb", "type": "jsonb", "primaryKey": false, "notNull": false }, "created_at": { "name": "created_at", "type": "timestamptz", "primaryKey": false, "notNull": true, "default": "now()" } }, "indexes": {}, "foreignKeys": {}, "compositePrimaryKeys": {} } }, "enums": {}, "schemas": {}, "_meta": { "schemas": {}, "tables": {}, "columns": {} } } ================================================ FILE: integration-tests/drizzle2/pg-proxy/second/meta/_journal.json ================================================ { "version": "5", "dialect": "pg", "entries": [ { "idx": 0, "version": "5", "when": 1680271923328, "tag": "0000_puzzling_flatman", "breakpoints": true }, { "idx": 1, "version": "5", "when": 1680271923329, "tag": "0001_test", "breakpoints": true } ] } ================================================ FILE: integration-tests/drizzle2/planetscale/0000_nostalgic_carnage.sql ================================================ CREATE TABLE `drizzle_tests_cities_migration` ( `id` int, `fullname_name` text, `state` text ); --> statement-breakpoint CREATE TABLE `drizzle_tests_users_migration` ( `id` int PRIMARY KEY NOT NULL, `full_name` text, `phone` int, `invited_by` int, `city_id` int, `date` timestamp DEFAULT (now()) ); --> statement-breakpoint CREATE TABLE `drizzle_tests_users12` ( `id` serial AUTO_INCREMENT PRIMARY KEY NOT NULL, `name` text NOT NULL, `email` text NOT NULL ); ================================================ FILE: integration-tests/drizzle2/planetscale/meta/0000_snapshot.json ================================================ { "version": "5", "dialect": "mysql", "id": "8e8c8378-0496-40f6-88e3-98aab8282b1f", "prevId": "00000000-0000-0000-0000-000000000000", "tables": { "drizzle_tests_cities_migration": { "name": "drizzle_tests_cities_migration", "columns": { "id": { "name": "id", "type": "int", "primaryKey": false, "notNull": false, "autoincrement": false }, "fullname_name": { "name": "fullname_name", "type": "text", "primaryKey": false, "notNull": false, "autoincrement": false }, "state": { "name": "state", "type": "text", "primaryKey": false, "notNull": false, "autoincrement": false } }, "indexes": {}, "foreignKeys": {}, "compositePrimaryKeys": {} }, "drizzle_tests_users_migration": { "name": "drizzle_tests_users_migration", "columns": { "id": { "name": "id", "type": "int", "primaryKey": true, "notNull": true, "autoincrement": false }, "full_name": { "name": "full_name", "type": "text", "primaryKey": false, "notNull": false, "autoincrement": false }, "phone": { "name": "phone", "type": "int", "primaryKey": false, "notNull": false, "autoincrement": false }, "invited_by": { "name": "invited_by", "type": "int", "primaryKey": false, "notNull": false, "autoincrement": false }, "city_id": { "name": "city_id", "type": "int", "primaryKey": false, "notNull": false, "autoincrement": false }, "date": { "name": "date", "type": "timestamp", "primaryKey": false, "notNull": false, "autoincrement": false, "default": "(now())" } }, "indexes": {}, "foreignKeys": {}, "compositePrimaryKeys": {} }, "drizzle_tests_users12": { "name": "drizzle_tests_users12", "columns": { "id": { "name": "id", "type": "serial", "primaryKey": true, "notNull": true, "autoincrement": true }, "name": { "name": "name", "type": "text", "primaryKey": false, "notNull": true, "autoincrement": false }, "email": { "name": "email", "type": "text", "primaryKey": false, "notNull": true, "autoincrement": false } }, "indexes": { "my_unique_index": { "name": "my_unique_index", "columns": [ "name" ], "isUnique": true, "using": "btree" } }, "foreignKeys": {}, "compositePrimaryKeys": {} } }, "schemas": {}, "_meta": { "schemas": {}, "tables": {}, "columns": {} } } ================================================ FILE: integration-tests/drizzle2/planetscale/meta/_journal.json ================================================ { "version": "5", "dialect": "mysql", "entries": [ { "idx": 0, "version": "5", "when": 1680270921944, "tag": "0000_nostalgic_carnage", "breakpoints": true } ] } ================================================ FILE: integration-tests/drizzle2/singlestore/0000_nostalgic_carnage.sql ================================================ CREATE TABLE `cities_migration` ( `id` int, `fullname_name` text, `state` text ); --> statement-breakpoint CREATE TABLE `users_migration` ( `id` int PRIMARY KEY NOT NULL, `full_name` text, `phone` int, `invited_by` int, `city_id` int, `date` timestamp DEFAULT now() ); --> statement-breakpoint CREATE TABLE `users12` ( `id` serial AUTO_INCREMENT PRIMARY KEY NOT NULL, `name` text NOT NULL, `email` text NOT NULL ); ================================================ FILE: integration-tests/drizzle2/singlestore/meta/0000_snapshot.json ================================================ { "version": "1", "dialect": "singlestore", "id": "8e8c8378-0496-40f6-88e3-98aab8282b1f", "prevId": "00000000-0000-0000-0000-000000000000", "tables": { "cities_migration": { "name": "cities_migration", "columns": { "id": { "name": "id", "type": "int", "primaryKey": false, "notNull": false, "autoincrement": false }, "fullname_name": { "name": "fullname_name", "type": "text", "primaryKey": false, "notNull": false, "autoincrement": false }, "state": { "name": "state", "type": "text", "primaryKey": false, "notNull": false, "autoincrement": false } }, "indexes": {}, "foreignKeys": {}, "compositePrimaryKeys": {} }, "users_migration": { "name": "users_migration", "columns": { "id": { "name": "id", "type": "int", "primaryKey": true, "notNull": true, "autoincrement": false }, "full_name": { "name": "full_name", "type": "text", "primaryKey": false, "notNull": false, "autoincrement": false }, "phone": { "name": "phone", "type": "int", "primaryKey": false, "notNull": false, "autoincrement": false }, "invited_by": { "name": "invited_by", "type": "int", "primaryKey": false, "notNull": false, "autoincrement": false }, "city_id": { "name": "city_id", "type": "int", "primaryKey": false, "notNull": false, "autoincrement": false }, "date": { "name": "date", "type": "timestamp", "primaryKey": false, "notNull": false, "autoincrement": false, "default": "now()" } }, "indexes": {}, "foreignKeys": {}, "compositePrimaryKeys": {} }, "users12": { "name": "users12", "columns": { "id": { "name": "id", "type": "serial", "primaryKey": true, "notNull": true, "autoincrement": true }, "name": { "name": "name", "type": "text", "primaryKey": false, "notNull": true, "autoincrement": false }, "email": { "name": "email", "type": "text", "primaryKey": false, "notNull": true, "autoincrement": false } }, "indexes": { "my_unique_index": { "name": "my_unique_index", "columns": [ "name" ], "isUnique": true, "using": "btree" } }, "foreignKeys": {}, "compositePrimaryKeys": {} } }, "schemas": {}, "_meta": { "schemas": {}, "tables": {}, "columns": {} } } ================================================ FILE: integration-tests/drizzle2/singlestore/meta/_journal.json ================================================ { "version": "1", "dialect": "singlestore", "entries": [ { "idx": 0, "version": "1", "when": 1680270921944, "tag": "0000_nostalgic_carnage", "breakpoints": true } ] } ================================================ FILE: integration-tests/drizzle2/sqlite/0000_fancy_bug.sql ================================================ CREATE TABLE another_users ( `id` integer PRIMARY KEY NOT NULL, `name` text NOT NULL, `email` text NOT NULL ); --> statement-breakpoint CREATE TABLE users12 ( `id` integer PRIMARY KEY NOT NULL, `name` text NOT NULL, `email` text NOT NULL ); ================================================ FILE: integration-tests/drizzle2/sqlite/meta/0000_snapshot.json ================================================ { "version": "5", "dialect": "sqlite", "id": "b6793a90-b553-4a5d-9d2f-b48e56ef2a2a", "prevId": "00000000-0000-0000-0000-000000000000", "tables": { "cities": { "name": "cities", "columns": { "id": { "name": "id", "type": "integer", "primaryKey": false, "notNull": false, "autoincrement": false }, "fullname_name": { "name": "fullname_name", "type": "text", "primaryKey": false, "notNull": false }, "state": { "name": "state", "type": "text", "primaryKey": false, "notNull": false } }, "indexes": {}, "foreignKeys": {}, "compositePrimaryKeys": { "cities_id_fullname_name_pk": { "columns": [ "id", "fullname_name" ] } } }, "test_get": { "name": "test_get", "columns": { "id": { "name": "id", "type": "integer", "primaryKey": true, "notNull": true, "autoincrement": false }, "full_name": { "name": "full_name", "type": "text", "primaryKey": false, "notNull": true } }, "indexes": {}, "foreignKeys": {}, "compositePrimaryKeys": {} }, "users": { "name": "users", "columns": { "id": { "name": "id", "type": "integer", "primaryKey": true, "notNull": true, "autoincrement": false }, "full_name": { "name": "full_name", "type": "text", "primaryKey": false, "notNull": true }, "phone": { "name": "phone", "type": "integer", "primaryKey": false, "notNull": false, "autoincrement": false }, "invited_by": { "name": "invited_by", "type": "integer", "primaryKey": false, "notNull": false, "autoincrement": false }, "city_id": { "name": "city_id", "type": "integer", "primaryKey": false, "notNull": false, "autoincrement": false } }, "indexes": {}, "foreignKeys": { "users_city_id_cities_id_fk": { "name": "users_city_id_cities_id_fk", "tableFrom": "users", "tableTo": "cities", "columnsFrom": [ "city_id" ], "columnsTo": [ "id" ] } }, "compositePrimaryKeys": {} } }, "enums": {}, "_meta": { "schemas": {}, "tables": {}, "columns": {} } } ================================================ FILE: integration-tests/drizzle2/sqlite/meta/_journal.json ================================================ { "version": "5", "dialect": "sqlite", "entries": [ { "idx": 0, "version": "5", "when": 1680202256473, "tag": "0000_fancy_bug", "breakpoints": true } ] } ================================================ FILE: integration-tests/js-tests/driver-init/commonjs/better-sqlite3.test.cjs ================================================ require('dotenv/config'); const Database = require('better-sqlite3'); const { drizzle } = require('drizzle-orm/better-sqlite3'); const { sqlite: schema } = require('./schema.cjs'); import { describe, expect } from 'vitest'; describe('better-sqlite3', async (it) => { it('drizzle()', async () => { const db = drizzle(); await db.$client.exec('SELECT 1;'); await db.$client.close(); }); it('drizzle(string)', async () => { const db = drizzle(':memory:'); await db.$client.exec('SELECT 1;'); await db.$client.close(); }); it('drizzle(string, config)', async () => { const db = drizzle(':memory:', { schema, }); await db.$client.exec('SELECT 1;'); await db.$client.close(); expect(db.query.User).not.toStrictEqual(undefined); }); it('drizzle({connection: string, ...config})', async () => { const db = drizzle({ connection: ':memory:', schema, }); await db.$client.exec('SELECT 1;'); await db.$client.close(); expect(db.query.User).not.toStrictEqual(undefined); }); it('drizzle({connection: params, ...config})', async () => { const db = drizzle({ connection: { source: ':memory:', }, schema, }); await db.$client.exec('SELECT 1;'); await db.$client.close(); expect(db.query.User).not.toStrictEqual(undefined); }); it('drizzle({connection: {}, ...config})', async () => { const db = drizzle({ connection: {}, schema, }); await db.$client.exec('SELECT 1;'); await db.$client.close(); expect(db.query.User).not.toStrictEqual(undefined); }); it('drizzle({...config})', async () => { const db = drizzle({ schema, }); await db.$client.exec('SELECT 1;'); await db.$client.close(); expect(db.query.User).not.toStrictEqual(undefined); }); it('drizzle(client)', async () => { const client = new Database(':memory:'); const db = drizzle(client); await db.$client.exec('SELECT 1;'); await db.$client.close(); }); it('drizzle(client, config)', async () => { const client = new Database(':memory:'); const db = drizzle(client, { schema, }); await db.$client.exec('SELECT 1;'); await db.$client.close(); expect(db.query.User).not.toStrictEqual(undefined); }); it('drizzle({client, ...config})', async () => { const client = new Database(':memory:'); const db = drizzle({ client, schema, }); await db.$client.exec('SELECT 1;'); await db.$client.close(); expect(db.query.User).not.toStrictEqual(undefined); }); }); ================================================ FILE: integration-tests/js-tests/driver-init/commonjs/libsql.test.cjs ================================================ require('dotenv/config'); const { createClient } = require('@libsql/client'); const { drizzle } = require('drizzle-orm/libsql'); const { sqlite: schema } = require('./schema.cjs'); import { describe, expect } from 'vitest'; describe('libsql', async (it) => { it('drizzle(string)', async () => { const db = drizzle(':memory:'); await db.$client.execute('SELECT 1;'); await db.$client.close(); }); it('drizzle(string, config)', async () => { const db = drizzle(':memory:', { schema, }); await db.$client.execute('SELECT 1;'); await db.$client.close(); expect(db.query.User).not.toStrictEqual(undefined); }); it('drizzle({connection: string, ...config})', async () => { const db = drizzle({ connection: ':memory:', schema, }); await db.$client.execute('SELECT 1;'); await db.$client.close(); expect(db.query.User).not.toStrictEqual(undefined); }); it('drizzle({connection: params, ...config})', async () => { const db = drizzle({ connection: { url: ':memory:', }, schema, }); await db.$client.execute('SELECT 1;'); await db.$client.close(); expect(db.query.User).not.toStrictEqual(undefined); }); it('drizzle(client)', async () => { const client = createClient({ url: ':memory:', }); const db = drizzle(client); await db.$client.execute('SELECT 1;'); await db.$client.close(); }); it('drizzle(client, config)', async () => { const client = createClient({ url: ':memory:', }); const db = drizzle(client, { schema, }); await db.$client.execute('SELECT 1;'); await db.$client.close(); expect(db.query.User).not.toStrictEqual(undefined); }); it('drizzle({client, ...config})', async () => { const client = createClient({ url: ':memory:', }); const db = drizzle({ client, schema, }); await db.$client.execute('SELECT 1;'); await db.$client.close(); expect(db.query.User).not.toStrictEqual(undefined); }); }); ================================================ FILE: integration-tests/js-tests/driver-init/commonjs/mysql2.test.cjs ================================================ require('dotenv/config'); const { drizzle } = require('drizzle-orm/mysql2'); const { createPool, createConnection, Connection } = require('mysql2'); const { mysql: schema } = require('./schema.cjs'); import { describe, expect } from 'vitest'; if (!process.env['MYSQL_CONNECTION_STRING']) { throw new Error('MYSQL_CONNECTION_STRING is not defined'); } describe('mysql2', async (it) => { it('drizzle(string)', async () => { const db = drizzle( process.env['MYSQL_CONNECTION_STRING'], ); await db.$client.execute(`SELECT 1`); expect(db.$client.getConnection).not.toStrictEqual(undefined); }); it('drizzle(string, config)', async () => { const db = drizzle( process.env['MYSQL_CONNECTION_STRING'], { schema, mode: 'default', }, ); await db.$client.execute('SELECT 1;'); expect(db.query.User).not.toStrictEqual(undefined); expect(db.$client.getConnection).not.toStrictEqual(undefined); }); it('drizzle({connection: string, ...config})', async () => { const db = drizzle({ connection: process.env['MYSQL_CONNECTION_STRING'], schema, mode: 'default', }); await db.$client.execute('SELECT 1;'); expect(db.query.User).not.toStrictEqual(undefined); expect(db.$client.getConnection).not.toStrictEqual(undefined); }); it('drizzle({connection: params, ...config})', async () => { const db = drizzle({ connection: { uri: process.env['MYSQL_CONNECTION_STRING'], }, schema, mode: 'default', }); await db.$client.execute('SELECT 1;'); expect(db.query.User).not.toStrictEqual(undefined); expect(db.$client.getConnection).not.toStrictEqual(undefined); }); it('drizzle(client)', async () => { const client = createPool({ uri: process.env['MYSQL_CONNECTION_STRING'], }); const db = drizzle(client); await db.$client.execute('SELECT 1;'); expect(db.$client.getConnection).not.toStrictEqual(undefined); }); it('drizzle(client, config)', async () => { const client = createPool({ uri: process.env['MYSQL_CONNECTION_STRING'], }); const db = drizzle(client, { schema, mode: 'default', }); await db.$client.execute('SELECT 1;'); expect(db.query.User).not.toStrictEqual(undefined); expect(db.$client.getConnection).not.toStrictEqual(undefined); }); it('drizzle({client, ...config})', async () => { const client = createPool({ uri: process.env['MYSQL_CONNECTION_STRING'], }); const db = drizzle({ client, schema, mode: 'default', }); await db.$client.execute('SELECT 1;'); expect(db.query.User).not.toStrictEqual(undefined); expect(db.$client.getConnection).not.toStrictEqual(undefined); }); }); describe('mysql2:connection', async (it) => { it('drizzle(client)', async () => { const client = createConnection({ uri: process.env['MYSQL_CONNECTION_STRING'], }); const db = drizzle(client); await db.$client.execute('SELECT 1;'); expect(db.$client.getConnection).toStrictEqual(undefined); }); it('drizzle(client, config)', async () => { const client = createConnection({ uri: process.env['MYSQL_CONNECTION_STRING'], }); const db = drizzle(client, { schema, mode: 'default', }); await db.$client.execute('SELECT 1;'); expect(db.query.User).not.toStrictEqual(undefined); expect(db.$client.getConnection).toStrictEqual(undefined); }); it('drizzle({client, ...config})', async () => { const client = createConnection({ uri: process.env['MYSQL_CONNECTION_STRING'], }); const db = drizzle({ client, schema, mode: 'default', }); await db.$client.execute('SELECT 1;'); expect(db.query.User).not.toStrictEqual(undefined); expect(db.$client.getConnection).toStrictEqual(undefined); }); }); ================================================ FILE: integration-tests/js-tests/driver-init/commonjs/neon-http.test.cjs ================================================ require('dotenv/config'); const { neon: pg } = require('@neondatabase/serverless'); const { drizzle } = require('drizzle-orm/neon-http'); const { pg: schema } = require('./schema.cjs'); import { describe, expect } from 'vitest'; if (!process.env['NEON_CONNECTION_STRING']) { throw new Error('NEON_CONNECTION_STRING is not defined'); } describe('neon-http', async (it) => { it('drizzle(string)', async () => { const db = drizzle( process.env['NEON_CONNECTION_STRING'], ); await db.$client('SELECT 1;'); }); it('drizzle(string, config)', async () => { const db = drizzle( process.env['NEON_CONNECTION_STRING'], { schema, }, ); await db.$client('SELECT 1;'); expect(db.query.User).not.toStrictEqual(undefined); }); it('drizzle({connection: string, ...config})', async () => { const db = drizzle({ connection: process.env['NEON_CONNECTION_STRING'], schema, }); await db.$client('SELECT 1;'); expect(db.query.User).not.toStrictEqual(undefined); }); it('drizzle({connection: params, ...config})', async () => { const db = drizzle({ connection: { connectionString: process.env['NEON_CONNECTION_STRING'], }, schema, }); await db.$client('SELECT 1;'); expect(db.query.User).not.toStrictEqual(undefined); }); it('drizzle(client)', async () => { const client = pg( process.env['NEON_CONNECTION_STRING'], ); const db = drizzle(client); await db.$client('SELECT 1;'); }); it('drizzle(client, config)', async () => { const client = pg( process.env['NEON_CONNECTION_STRING'], ); const db = drizzle(client, { schema, }); await db.$client('SELECT 1;'); expect(db.query.User).not.toStrictEqual(undefined); }); it('drizzle({client, ...config})', async () => { const client = pg( process.env['NEON_CONNECTION_STRING'], ); const db = drizzle({ client, schema, }); await db.$client('SELECT 1;'); expect(db.query.User).not.toStrictEqual(undefined); }); }); ================================================ FILE: integration-tests/js-tests/driver-init/commonjs/neon-ws.test.cjs ================================================ require('dotenv/config'); const { neonConfig, Pool, Client } = require('@neondatabase/serverless'); const { drizzle } = require('drizzle-orm/neon-serverless'); const { pg: schema } = require('./schema.cjs'); const ws = require('ws'); import { describe, expect } from 'vitest'; neonConfig.webSocketConstructor = ws; if (!process.env['NEON_CONNECTION_STRING']) { throw new Error('NEON_CONNECTION_STRING is not defined'); } describe('neon-ws', async (it) => { it('drizzle(string)', async () => { const db = drizzle( process.env['NEON_CONNECTION_STRING'], ); await db.$client.query('SELECT 1;'); }); it('drizzle(string, config)', async () => { const db = drizzle( process.env['NEON_CONNECTION_STRING'], { schema, }, ); await db.$client.query('SELECT 1;'); expect(db.query.User).not.toStrictEqual(undefined); expect(db.$client).toBeInstanceOf(Pool); }); it('drizzle({connection: string, ...config})', async () => { const db = drizzle({ connection: process.env['NEON_CONNECTION_STRING'], schema, }); await db.$client.query('SELECT 1;'); expect(db.query.User).not.toStrictEqual(undefined); expect(db.$client).toBeInstanceOf(Pool); }); it('drizzle({connection: params, ...config})', async () => { const db = drizzle({ connection: { connectionString: process.env['NEON_CONNECTION_STRING'], }, schema, }); await db.$client.query('SELECT 1;'); expect(db.query.User).not.toStrictEqual(undefined); expect(db.$client).toBeInstanceOf(Pool); }); it('drizzle(client)', async () => { const client = new Pool({ connectionString: process.env['NEON_CONNECTION_STRING'], }); const db = drizzle(client); await db.$client.query('SELECT 1;'); expect(db.$client).toBeInstanceOf(Pool); }); it('drizzle(client, config)', async () => { const client = new Pool({ connectionString: process.env['NEON_CONNECTION_STRING'], }); const db = drizzle(client, { schema, }); await db.$client.query('SELECT 1;'); expect(db.query.User).not.toStrictEqual(undefined); expect(db.$client).toBeInstanceOf(Pool); }); it('drizzle({client, ...config})', async () => { const client = new Pool({ connectionString: process.env['NEON_CONNECTION_STRING'], }); const db = drizzle({ client, schema, }); await db.$client.query('SELECT 1;'); expect(db.query.User).not.toStrictEqual(undefined); expect(db.$client).toBeInstanceOf(Pool); }); }); describe('neon-ws:Client', async (it) => { it('drizzle(client)', async () => { const client = new Client({ connectionString: process.env['NEON_CONNECTION_STRING'], }); await client.connect(); const db = drizzle(client); await db.$client.query('SELECT 1;'); expect(db.$client).toBeInstanceOf(Client); expect(db.$client).not.toBeInstanceOf(Pool); }); it('drizzle(client, config)', async () => { const client = new Client({ connectionString: process.env['NEON_CONNECTION_STRING'], }); const db = drizzle(client, { schema, }); await client.connect(); await db.$client.query('SELECT 1;'); expect(db.query.User).not.toStrictEqual(undefined); expect(db.$client).toBeInstanceOf(Client); expect(db.$client).not.toBeInstanceOf(Pool); }); it('drizzle({client, ...config})', async () => { const client = new Client({ connectionString: process.env['NEON_CONNECTION_STRING'], }); const db = drizzle({ client, schema, }); await client.connect(); await db.$client.query('SELECT 1;'); expect(db.query.User).not.toStrictEqual(undefined); expect(db.$client).toBeInstanceOf(Client); expect(db.$client).not.toBeInstanceOf(Pool); }); }); describe('neon-ws:PoolClient', async (it) => { it('drizzle(client)', async () => { const pool = new Pool({ connectionString: process.env['NEON_CONNECTION_STRING'], }); const client = await pool.connect(); const db = drizzle(client); await db.$client.query('SELECT 1;'); client.release(); expect(db.$client).toBeInstanceOf(Client); expect(db.$client).not.toBeInstanceOf(Pool); }); it('drizzle(client, config)', async () => { const pool = new Pool({ connectionString: process.env['NEON_CONNECTION_STRING'], }); const client = await pool.connect(); const db = drizzle(client, { schema, }); await db.$client.query('SELECT 1;'); client.release(); expect(db.query.User).not.toStrictEqual(undefined); expect(db.$client).toBeInstanceOf(Client); expect(db.$client).not.toBeInstanceOf(Pool); }); it('drizzle({client, ...config})', async () => { const pool = new Pool({ connectionString: process.env['NEON_CONNECTION_STRING'], }); const client = await pool.connect(); const db = drizzle({ client, schema, }); await db.$client.query('SELECT 1;'); client.release(); expect(db.query.User).not.toStrictEqual(undefined); expect(db.$client).toBeInstanceOf(Client); expect(db.$client).not.toBeInstanceOf(Pool); }); }); ================================================ FILE: integration-tests/js-tests/driver-init/commonjs/node-pg.test.cjs ================================================ require('dotenv/config'); const { drizzle } = require('drizzle-orm/node-postgres'); const pg = require('pg'); const { pg: schema } = require('./schema.cjs'); import { describe, expect } from 'vitest'; const Pool = pg.Pool; const Client = pg.Client; if (!process.env['PG_CONNECTION_STRING']) { throw new Error('PG_CONNECTION_STRING is not defined'); } describe('node-pg', async (it) => { it('drizzle(string)', async () => { const db = drizzle(process.env['PG_CONNECTION_STRING']); await db.$client.query('SELECT 1;'); expect(db.$client).toBeInstanceOf(Pool); }); it('drizzle(string, config)', async () => { const db = drizzle(process.env['PG_CONNECTION_STRING'], { schema, }); await db.$client.query('SELECT 1;'); expect(db.$client).toBeInstanceOf(Pool); expect(db.query.User).not.toStrictEqual(undefined); }); it('drizzle({connection: string, ...config})', async () => { const db = drizzle({ connection: process.env['PG_CONNECTION_STRING'], schema, }); await db.$client.query('SELECT 1;'); expect(db.$client).toBeInstanceOf(Pool); expect(db.query.User).not.toStrictEqual(undefined); }); it('drizzle({connection: params, ...config})', async () => { const db = drizzle({ connection: { connectionString: process.env['PG_CONNECTION_STRING'], }, schema, }); await db.$client.query('SELECT 1;'); expect(db.$client).toBeInstanceOf(Pool); expect(db.query.User).not.toStrictEqual(undefined); }); it('drizzle(client)', async () => { const client = new Pool({ connectionString: process.env['PG_CONNECTION_STRING'], }); const db = drizzle(client); await db.$client.query('SELECT 1;'); expect(db.$client).toBeInstanceOf(Pool); }); it('drizzle(client, config)', async () => { const client = new Pool({ connectionString: process.env['PG_CONNECTION_STRING'], }); const db = drizzle(client, { schema, }); await db.$client.query('SELECT 1;'); expect(db.$client).toBeInstanceOf(Pool); expect(db.query.User).not.toStrictEqual(undefined); }); it('drizzle({client, ...config})', async () => { const client = new Pool({ connectionString: process.env['PG_CONNECTION_STRING'], }); const db = drizzle({ client, schema, }); await db.$client.query('SELECT 1;'); expect(db.$client).toBeInstanceOf(Pool); expect(db.query.User).not.toStrictEqual(undefined); }); }); describe('node-pg:Client', async (it) => { it('drizzle(client)', async () => { const client = new Client({ connectionString: process.env['PG_CONNECTION_STRING'], }); const db = drizzle(client); await client.connect(); await db.$client.query('SELECT 1;'); expect(db.$client).not.toBeInstanceOf(Pool); expect(db.$client).toBeInstanceOf(Client); }); it('drizzle(client, config)', async () => { const client = new Client({ connectionString: process.env['PG_CONNECTION_STRING'], }); const db = drizzle(client, { schema, }); await client.connect(); await db.$client.query('SELECT 1;'); expect(db.$client).not.toBeInstanceOf(Pool); expect(db.$client).toBeInstanceOf(Client); expect(db.query.User).not.toStrictEqual(undefined); }); it('drizzle({client, ...config})', async () => { const client = new Client({ connectionString: process.env['PG_CONNECTION_STRING'], }); const db = drizzle({ client, schema, }); await client.connect(); await db.$client.query('SELECT 1;'); expect(db.$client).not.toBeInstanceOf(Pool); expect(db.$client).toBeInstanceOf(Client); expect(db.query.User).not.toStrictEqual(undefined); }); }); describe('node-pg:PoolClient', async (it) => { it('drizzle(client)', async () => { const pool = new Pool({ connectionString: process.env['PG_CONNECTION_STRING'], }); const client = await pool.connect(); const db = drizzle(client); await db.$client.query('SELECT 1;'); client.release(); expect(db.$client).not.toBeInstanceOf(Pool); expect(db.$client).toBeInstanceOf(Client); }); it('drizzle(client, config)', async () => { const pool = new Pool({ connectionString: process.env['PG_CONNECTION_STRING'], }); const client = await pool.connect(); const db = drizzle(client, { schema, }); await db.$client.query('SELECT 1;'); client.release(); expect(db.$client).not.toBeInstanceOf(Pool); expect(db.$client).toBeInstanceOf(Client); expect(db.query.User).not.toStrictEqual(undefined); }); it('drizzle({client, ...config})', async () => { const pool = new Pool({ connectionString: process.env['PG_CONNECTION_STRING'], }); const client = await pool.connect(); const db = drizzle({ client, schema, }); await db.$client.query('SELECT 1;'); client.release(); expect(db.$client).not.toBeInstanceOf(Pool); expect(db.$client).toBeInstanceOf(Client); expect(db.query.User).not.toStrictEqual(undefined); }); }); ================================================ FILE: integration-tests/js-tests/driver-init/commonjs/pglite.test.cjs ================================================ require('dotenv/config'); const { drizzle } = require('drizzle-orm/pglite'); const { pg: schema } = require('./schema.cjs'); const { PGlite: Database } = require('@electric-sql/pglite'); import { describe, expect } from 'vitest'; describe('pglite', async (it) => { it('drizzle()', async () => { const db = drizzle(); await db.$client.exec('SELECT 1;'); await db.$client.close(); }); it('drizzle(string)', async () => { const db = drizzle('memory://'); await db.$client.exec('SELECT 1;'); await db.$client.close(); }); it('drizzle(string, config)', async () => { const db = drizzle('memory://', { schema, }); await db.$client.exec('SELECT 1;'); await db.$client.close(); expect(db.query.User).not.toStrictEqual(undefined); }); it('drizzle({connection: {}, ...config})', async () => { const db = drizzle({ connection: {}, schema, }); await db.$client.exec('SELECT 1;'); await db.$client.close(); expect(db.query.User).not.toStrictEqual(undefined); }); it('drizzle({...config})', async () => { const db = drizzle({ schema, }); await db.$client.exec('SELECT 1;'); await db.$client.close(); expect(db.query.User).not.toStrictEqual(undefined); }); it('drizzle(client)', async () => { const client = new Database('memory://'); const db = drizzle(client); await db.$client.exec('SELECT 1;'); await db.$client.close(); }); it('drizzle(client, config)', async () => { const client = new Database('memory://'); const db = drizzle(client, { schema, }); await db.$client.exec('SELECT 1;'); await db.$client.close(); expect(db.query.User).not.toStrictEqual(undefined); }); it('drizzle({client, ...config})', async () => { const client = new Database('memory://'); const db = drizzle({ client, schema, }); await db.$client.exec('SELECT 1;'); await db.$client.close(); expect(db.query.User).not.toStrictEqual(undefined); }); }); ================================================ FILE: integration-tests/js-tests/driver-init/commonjs/planetscale.test.cjs ================================================ require('dotenv/config'); const { Client } = require('@planetscale/database'); const { drizzle } = require('drizzle-orm/planetscale-serverless'); const { mysql: schema } = require('./schema.cjs'); import { describe, expect } from 'vitest'; if (!process.env['PLANETSCALE_CONNECTION_STRING']) { throw new Error('PLANETSCALE_CONNECTION_STRING is not defined'); } describe('planetscale', async (it) => { it('drizzle(string)', async () => { const db = drizzle( process.env['PLANETSCALE_CONNECTION_STRING'], ); await db.$client.execute('SELECT 1;'); expect(db.$client).toBeInstanceOf(Client); }); it('drizzle(string, config)', async () => { const db = drizzle( process.env['PLANETSCALE_CONNECTION_STRING'], { schema, }, ); await db.$client.execute('SELECT 1;'); expect(db.$client).toBeInstanceOf(Client); expect(db.query.User).not.toStrictEqual(undefined); }); it('drizzle({connection: string, ...config})', async () => { const db = drizzle({ connection: process.env['PLANETSCALE_CONNECTION_STRING'], schema, }); await db.$client.execute('SELECT 1;'); expect(db.$client).toBeInstanceOf(Client); expect(db.query.User).not.toStrictEqual(undefined); }); it('drizzle({connection: params, ...config})', async () => { const db = drizzle({ connection: { url: process.env['PLANETSCALE_CONNECTION_STRING'], }, schema, }); await db.$client.execute('SELECT 1;'); expect(db.$client).toBeInstanceOf(Client); expect(db.query.User).not.toStrictEqual(undefined); }); it('drizzle(client)', async () => { const client = new Client({ url: process.env['PLANETSCALE_CONNECTION_STRING'], }); const db = drizzle(client); await db.$client.execute('SELECT 1;'); expect(db.$client).toBeInstanceOf(Client); }); it('drizzle(client, config)', async () => { const client = new Client({ url: process.env['PLANETSCALE_CONNECTION_STRING'], }); const db = drizzle(client, { schema, }); await db.$client.execute('SELECT 1;'); expect(db.$client).toBeInstanceOf(Client); expect(db.query.User).not.toStrictEqual(undefined); }); it('drizzle({client, ...config})', async () => { const client = new Client({ url: process.env['PLANETSCALE_CONNECTION_STRING'], }); const db = drizzle({ client, schema, }); await db.$client.execute('SELECT 1;'); expect(db.$client).toBeInstanceOf(Client); expect(db.query.User).not.toStrictEqual(undefined); }); }); ================================================ FILE: integration-tests/js-tests/driver-init/commonjs/postgres-js.test.cjs ================================================ require('dotenv/config'); const { drizzle } = require('drizzle-orm/postgres-js'); const pg = require('postgres'); const { pg: schema } = require('./schema.cjs'); import { describe, expect } from 'vitest'; if (!process.env['PG_CONNECTION_STRING']) { throw new Error('PG_CONNECTION_STRING is not defined'); } describe('postgres-js', async (it) => { it('drizzle(string)', async () => { const db = drizzle(process.env['PG_CONNECTION_STRING']); await db.$client.unsafe('SELECT 1;'); }); it('drizzle(string, config)', async () => { const db = drizzle(process.env['PG_CONNECTION_STRING'], { schema, }); await db.$client.unsafe('SELECT 1;'); expect(db.query.User).not.toStrictEqual(undefined); }); it('drizzle({connection: string, ...config})', async () => { const db = drizzle({ connection: process.env['PG_CONNECTION_STRING'], schema, }); await db.$client.unsafe('SELECT 1;'); expect(db.query.User).not.toStrictEqual(undefined); }); it('drizzle({connection: params, ...config})', async () => { const db = drizzle({ connection: { url: process.env['PG_CONNECTION_STRING'], }, schema, }); await db.$client.unsafe('SELECT 1;'); expect(db.query.User).not.toStrictEqual(undefined); }); it('drizzle(client)', async () => { const client = pg(process.env['PG_CONNECTION_STRING']); const db = drizzle(client); await db.$client.unsafe('SELECT 1;'); }); it('drizzle(client, config)', async () => { const client = pg(process.env['PG_CONNECTION_STRING']); const db = drizzle(client, { schema, }); await db.$client.unsafe('SELECT 1;'); expect(db.query.User).not.toStrictEqual(undefined); }); it('drizzle({client, ...config})', async () => { const client = pg(process.env['PG_CONNECTION_STRING']); const db = drizzle({ client, schema, }); await db.$client.unsafe('SELECT 1;'); expect(db.query.User).not.toStrictEqual(undefined); }); }); ================================================ FILE: integration-tests/js-tests/driver-init/commonjs/schema.cjs ================================================ const { int: mysqlInt, mysqlTable } = require('drizzle-orm/mysql-core'); const { integer: pgInt, pgTable } = require('drizzle-orm/pg-core'); const { integer: sqliteInt, sqliteTable } = require('drizzle-orm/sqlite-core'); module.exports.sqlite = { User: sqliteTable('test', { id: sqliteInt('id').primaryKey().notNull(), }), }; module.exports.pg = { User: pgTable('test', { id: pgInt('id').primaryKey().notNull(), }), }; module.exports.mysql = { User: mysqlTable('test', { id: mysqlInt('id').primaryKey().notNull(), }), }; ================================================ FILE: integration-tests/js-tests/driver-init/commonjs/tidb.test.cjs ================================================ require('dotenv/config'); const { connect } = require('@tidbcloud/serverless'); const { drizzle } = require('drizzle-orm/tidb-serverless'); const { mysql: schema } = require('./schema.cjs'); import { describe, expect } from 'vitest'; if (!process.env['TIDB_CONNECTION_STRING']) { throw new Error('TIDB_CONNECTION_STRING is not defined'); } describe('tidb', async (it) => { it('drizzle(string)', async () => { const db = drizzle( process.env['TIDB_CONNECTION_STRING'], ); await db.$client.execute(`SELECT 1`); }); it('drizzle(string, config)', async () => { const db = drizzle( process.env['TIDB_CONNECTION_STRING'], { schema, }, ); await db.$client.execute('SELECT 1;'); expect(db.query.User).not.toStrictEqual(undefined); }); it('drizzle({connection: string, ...config})', async () => { const db = drizzle({ connection: process.env['TIDB_CONNECTION_STRING'], schema, }); await db.$client.execute('SELECT 1;'); expect(db.query.User).not.toStrictEqual(undefined); }); it('drizzle({connection: params, ...config})', async () => { const db = drizzle({ connection: { url: process.env['TIDB_CONNECTION_STRING'], }, schema, }); await db.$client.execute('SELECT 1;'); expect(db.query.User).not.toStrictEqual(undefined); }); it('drizzle(client)', async () => { const client = connect({ url: process.env['TIDB_CONNECTION_STRING'], }); const db = drizzle(client); await db.$client.execute('SELECT 1;'); }); it('drizzle(client, config)', async () => { const client = connect({ url: process.env['TIDB_CONNECTION_STRING'], }); const db = drizzle(client, { schema, }); await db.$client.execute('SELECT 1;'); expect(db.query.User).not.toStrictEqual(undefined); }); it('drizzle({client, ...config})', async () => { const client = connect({ url: process.env['TIDB_CONNECTION_STRING'], }); const db = drizzle({ client, schema, }); await db.$client.execute('SELECT 1;'); expect(db.query.User).not.toStrictEqual(undefined); }); }); ================================================ FILE: integration-tests/js-tests/driver-init/commonjs/vercel.test.cjs ================================================ require('dotenv/config'); const vc = require('@vercel/postgres'); const { drizzle } = require('drizzle-orm/vercel-postgres'); const { pg: schema } = require('./schema.cjs'); import { describe, expect } from 'vitest'; const { sql, createClient, createPool } = vc; const Pool = vc.VercelPool; const Client = vc.VercelClient; if (!process.env['VERCEL_CONNECTION_STRING']) { throw new Error('VERCEL_CONNECTION_STRING is not defined'); } // Used for non-pooled connection if (!process.env['NEON_CONNECTION_STRING']) { throw new Error('NEON_CONNECTION_STRING is not defined'); } process.env['POSTGRES_URL'] = process.env['VERCEL_CONNECTION_STRING']; describe('vercel:sql', async (it) => { it('drizzle()', async () => { const db = drizzle(); await sql.connect(); await db.$client.query('SELECT 1;'); expect(db.$client).toBeTypeOf('function'); }); it('drizzle(client)', async () => { const db = drizzle(sql); await db.$client.query('SELECT 1;'); expect(db.$client).toBeTypeOf('function'); }); it('drizzle(client, config)', async () => { const db = drizzle(sql, { schema, }); await db.$client.query('SELECT 1;'); expect(db.$client).toBeTypeOf('function'); expect(db.query.User).not.toStrictEqual(undefined); }); it('drizzle({client, ...config})', async () => { const db = drizzle({ client: sql, schema, }); await db.$client.query('SELECT 1;'); expect(db.$client).toBeTypeOf('function'); expect(db.query.User).not.toStrictEqual(undefined); }); it('drizzle({...config})', async () => { const db = drizzle({ schema, }); await db.$client.query('SELECT 1;'); expect(db.$client).toBeTypeOf('function'); expect(db.query.User).not.toStrictEqual(undefined); }); }); describe('vercel:Pool', async (it) => { it('drizzle(client)', async () => { const client = createPool({ connectionString: process.env['VERCEL_CONNECTION_STRING'], }); const db = drizzle(client); await db.$client.query('SELECT 1;'); expect(db.$client).not.toBeTypeOf('function'); expect(db.$client).toBeInstanceOf(Pool); }); it('drizzle(client, config)', async () => { const client = createPool({ connectionString: process.env['VERCEL_CONNECTION_STRING'], }); const db = drizzle(client, { schema, }); await db.$client.query('SELECT 1;'); expect(db.$client).not.toBeTypeOf('function'); expect(db.$client).toBeInstanceOf(Pool); expect(db.query.User).not.toStrictEqual(undefined); }); it('drizzle({client, ...config})', async () => { const client = createPool({ connectionString: process.env['VERCEL_CONNECTION_STRING'], }); const db = drizzle({ client: client, schema, }); await db.$client.query('SELECT 1;'); expect(db.$client).not.toBeTypeOf('function'); expect(db.$client).toBeInstanceOf(Pool); expect(db.query.User).not.toStrictEqual(undefined); }); }); describe('vercel:Client', async (it) => { it('drizzle(client)', async () => { const client = createClient({ connectionString: process.env['NEON_CONNECTION_STRING'], }); const db = drizzle(client); await client.connect(); await db.$client.query('SELECT 1;'); expect(db.$client).not.toBeTypeOf('function'); expect(db.$client).not.toBeInstanceOf(Pool); expect(db.$client).toBeInstanceOf(Client); }); it('drizzle(client, config)', async () => { const client = createClient({ connectionString: process.env['NEON_CONNECTION_STRING'], }); const db = drizzle(client, { schema, }); await client.connect(); await db.$client.query('SELECT 1;'); expect(db.$client).not.toBeTypeOf('function'); expect(db.$client).not.toBeInstanceOf(Pool); expect(db.$client).toBeInstanceOf(Client); expect(db.query.User).not.toStrictEqual(undefined); }); it('drizzle({client, ...config})', async () => { const client = createClient({ connectionString: process.env['NEON_CONNECTION_STRING'], }); const db = drizzle({ client: client, schema, }); await client.connect(); await db.$client.query('SELECT 1;'); expect(db.$client).not.toBeTypeOf('function'); expect(db.$client).not.toBeInstanceOf(Pool); expect(db.$client).toBeInstanceOf(Client); expect(db.query.User).not.toStrictEqual(undefined); }); }); describe('vercel:PoolClient', async (it) => { it('drizzle(client)', async () => { const pool = createPool({ connectionString: process.env['VERCEL_CONNECTION_STRING'], }); const client = await pool.connect(); const db = drizzle(client); await db.$client.query('SELECT 1;'); client.release(); expect(db.$client).not.toBeTypeOf('function'); expect(db.$client).not.toBeInstanceOf(Pool); expect(db.$client).toBeInstanceOf(Client); }); it('drizzle(client, config)', async () => { const pool = createPool({ connectionString: process.env['VERCEL_CONNECTION_STRING'], }); const client = await pool.connect(); const db = drizzle(client, { schema, }); await db.$client.query('SELECT 1;'); client.release(); expect(db.$client).not.toBeTypeOf('function'); expect(db.$client).not.toBeInstanceOf(Pool); expect(db.$client).toBeInstanceOf(Client); expect(db.query.User).not.toStrictEqual(undefined); }); it('drizzle({client, ...config})', async () => { const pool = createPool({ connectionString: process.env['VERCEL_CONNECTION_STRING'], }); const client = await pool.connect(); const db = drizzle({ client: client, schema, }); await db.$client.query('SELECT 1;'); client.release(); expect(db.$client).not.toBeTypeOf('function'); expect(db.$client).not.toBeInstanceOf(Pool); expect(db.$client).toBeInstanceOf(Client); expect(db.query.User).not.toStrictEqual(undefined); }); }); ================================================ FILE: integration-tests/js-tests/driver-init/module/better-sqlite3.test.mjs ================================================ import 'dotenv/config'; import Database from 'better-sqlite3'; import { drizzle } from 'drizzle-orm/better-sqlite3'; import { describe, expect } from 'vitest'; import { sqlite as schema } from './schema.mjs'; describe('better-sqlite3', async (it) => { it('drizzle()', async () => { const db = drizzle(); await db.$client.exec('SELECT 1;'); await db.$client.close(); }); it('drizzle(string)', async () => { const db = drizzle(':memory:'); await db.$client.exec('SELECT 1;'); await db.$client.close(); }); it('drizzle(string, config)', async () => { const db = drizzle(':memory:', { schema, }); await db.$client.exec('SELECT 1;'); await db.$client.close(); expect(db.query.User).not.toStrictEqual(undefined); }); it('drizzle({connection: string, ...config})', async () => { const db = drizzle({ connection: ':memory:', schema, }); await db.$client.exec('SELECT 1;'); await db.$client.close(); expect(db.query.User).not.toStrictEqual(undefined); }); it('drizzle({connection: params, ...config})', async () => { const db = drizzle({ connection: { source: ':memory:', }, schema, }); await db.$client.exec('SELECT 1;'); await db.$client.close(); expect(db.query.User).not.toStrictEqual(undefined); }); it('drizzle({connection: {}, ...config})', async () => { const db = drizzle({ connection: {}, schema, }); await db.$client.exec('SELECT 1;'); await db.$client.close(); expect(db.query.User).not.toStrictEqual(undefined); }); it('drizzle({...config})', async () => { const db = drizzle({ schema, }); await db.$client.exec('SELECT 1;'); await db.$client.close(); expect(db.query.User).not.toStrictEqual(undefined); }); it('drizzle(client)', async () => { const client = new Database(':memory:'); const db = drizzle(client); await db.$client.exec('SELECT 1;'); await db.$client.close(); }); it('drizzle(client, config)', async () => { const client = new Database(':memory:'); const db = drizzle(client, { schema, }); await db.$client.exec('SELECT 1;'); await db.$client.close(); expect(db.query.User).not.toStrictEqual(undefined); }); it('drizzle({client, ...config})', async () => { const client = new Database(':memory:'); const db = drizzle({ client, schema, }); await db.$client.exec('SELECT 1;'); await db.$client.close(); expect(db.query.User).not.toStrictEqual(undefined); }); }); ================================================ FILE: integration-tests/js-tests/driver-init/module/libsql.test.mjs ================================================ import 'dotenv/config'; import { createClient } from '@libsql/client'; import { drizzle } from 'drizzle-orm/libsql'; import { describe, expect } from 'vitest'; import { sqlite as schema } from './schema.mjs'; describe('libsql', async (it) => { it('drizzle(string)', async () => { const db = drizzle(':memory:'); await db.$client.execute('SELECT 1;'); await db.$client.close(); }); it('drizzle(string, config)', async () => { const db = drizzle(':memory:', { schema, }); await db.$client.execute('SELECT 1;'); await db.$client.close(); expect(db.query.User).not.toStrictEqual(undefined); }); it('drizzle({connection: string, ...config})', async () => { const db = drizzle({ connection: ':memory:', schema, }); await db.$client.execute('SELECT 1;'); await db.$client.close(); expect(db.query.User).not.toStrictEqual(undefined); }); it('drizzle({connection: params, ...config})', async () => { const db = drizzle({ connection: { url: ':memory:', }, schema, }); await db.$client.execute('SELECT 1;'); await db.$client.close(); expect(db.query.User).not.toStrictEqual(undefined); }); it('drizzle(client)', async () => { const client = createClient({ url: ':memory:', }); const db = drizzle(client); await db.$client.execute('SELECT 1;'); await db.$client.close(); }); it('drizzle(client, config)', async () => { const client = createClient({ url: ':memory:', }); const db = drizzle(client, { schema, }); await db.$client.execute('SELECT 1;'); await db.$client.close(); expect(db.query.User).not.toStrictEqual(undefined); }); it('drizzle({client, ...config})', async () => { const client = createClient({ url: ':memory:', }); const db = drizzle({ client, schema, }); await db.$client.execute('SELECT 1;'); await db.$client.close(); expect(db.query.User).not.toStrictEqual(undefined); }); }); ================================================ FILE: integration-tests/js-tests/driver-init/module/mysql2.test.mjs ================================================ import 'dotenv/config'; import { drizzle } from 'drizzle-orm/mysql2'; import { Connection, createConnection, createPool } from 'mysql2'; import { describe, expect } from 'vitest'; import { mysql as schema } from './schema.mjs'; if (!process.env['MYSQL_CONNECTION_STRING']) { throw new Error('MYSQL_CONNECTION_STRING is not defined'); } describe('mysql2', async (it) => { it('drizzle(string)', async () => { const db = drizzle( process.env['MYSQL_CONNECTION_STRING'], ); await db.$client.execute(`SELECT 1`); expect(db.$client.getConnection).not.toStrictEqual(undefined); }); it('drizzle(string, config)', async () => { const db = drizzle( process.env['MYSQL_CONNECTION_STRING'], { schema, mode: 'default', }, ); await db.$client.execute('SELECT 1;'); expect(db.query.User).not.toStrictEqual(undefined); expect(db.$client.getConnection).not.toStrictEqual(undefined); }); it('drizzle({connection: string, ...config})', async () => { const db = drizzle({ connection: process.env['MYSQL_CONNECTION_STRING'], schema, mode: 'default', }); await db.$client.execute('SELECT 1;'); expect(db.query.User).not.toStrictEqual(undefined); expect(db.$client.getConnection).not.toStrictEqual(undefined); }); it('drizzle({connection: params, ...config})', async () => { const db = drizzle({ connection: { uri: process.env['MYSQL_CONNECTION_STRING'], }, schema, mode: 'default', }); await db.$client.execute('SELECT 1;'); expect(db.query.User).not.toStrictEqual(undefined); expect(db.$client.getConnection).not.toStrictEqual(undefined); }); it('drizzle(client)', async () => { const client = createPool({ uri: process.env['MYSQL_CONNECTION_STRING'], }); const db = drizzle(client); await db.$client.execute('SELECT 1;'); expect(db.$client.getConnection).not.toStrictEqual(undefined); }); it('drizzle(client, config)', async () => { const client = createPool({ uri: process.env['MYSQL_CONNECTION_STRING'], }); const db = drizzle(client, { schema, mode: 'default', }); await db.$client.execute('SELECT 1;'); expect(db.query.User).not.toStrictEqual(undefined); expect(db.$client.getConnection).not.toStrictEqual(undefined); }); it('drizzle({client, ...config})', async () => { const client = createPool({ uri: process.env['MYSQL_CONNECTION_STRING'], }); const db = drizzle({ client, schema, mode: 'default', }); await db.$client.execute('SELECT 1;'); expect(db.query.User).not.toStrictEqual(undefined); expect(db.$client.getConnection).not.toStrictEqual(undefined); }); }); describe('mysql2:connection', async (it) => { it('drizzle(client)', async () => { const client = createConnection({ uri: process.env['MYSQL_CONNECTION_STRING'], }); const db = drizzle(client); await db.$client.execute('SELECT 1;'); expect(db.$client.getConnection).toStrictEqual(undefined); }); it('drizzle(client, config)', async () => { const client = createConnection({ uri: process.env['MYSQL_CONNECTION_STRING'], }); const db = drizzle(client, { schema, mode: 'default', }); await db.$client.execute('SELECT 1;'); expect(db.query.User).not.toStrictEqual(undefined); expect(db.$client.getConnection).toStrictEqual(undefined); }); it('drizzle({client, ...config})', async () => { const client = createConnection({ uri: process.env['MYSQL_CONNECTION_STRING'], }); const db = drizzle({ client, schema, mode: 'default', }); await db.$client.execute('SELECT 1;'); expect(db.query.User).not.toStrictEqual(undefined); expect(db.$client.getConnection).toStrictEqual(undefined); }); }); ================================================ FILE: integration-tests/js-tests/driver-init/module/neon-http.test.mjs ================================================ import 'dotenv/config'; import { neon as pg } from '@neondatabase/serverless'; import { drizzle } from 'drizzle-orm/neon-http'; import { describe, expect } from 'vitest'; import { pg as schema } from './schema.mjs'; if (!process.env['NEON_CONNECTION_STRING']) { throw new Error('NEON_CONNECTION_STRING is not defined'); } describe('neon-http', async (it) => { it('drizzle(string)', async () => { const db = drizzle( process.env['NEON_CONNECTION_STRING'], ); await db.$client('SELECT 1;'); }); it('drizzle(string, config)', async () => { const db = drizzle( process.env['NEON_CONNECTION_STRING'], { schema, }, ); await db.$client('SELECT 1;'); expect(db.query.User).not.toStrictEqual(undefined); }); it('drizzle({connection: string, ...config})', async () => { const db = drizzle({ connection: process.env['NEON_CONNECTION_STRING'], schema, }); await db.$client('SELECT 1;'); expect(db.query.User).not.toStrictEqual(undefined); }); it('drizzle({connection: params, ...config})', async () => { const db = drizzle({ connection: { connectionString: process.env['NEON_CONNECTION_STRING'], }, schema, }); await db.$client('SELECT 1;'); expect(db.query.User).not.toStrictEqual(undefined); }); it('drizzle(client)', async () => { const client = pg( process.env['NEON_CONNECTION_STRING'], ); const db = drizzle(client); await db.$client('SELECT 1;'); }); it('drizzle(client, config)', async () => { const client = pg( process.env['NEON_CONNECTION_STRING'], ); const db = drizzle(client, { schema, }); await db.$client('SELECT 1;'); expect(db.query.User).not.toStrictEqual(undefined); }); it('drizzle({client, ...config})', async () => { const client = pg( process.env['NEON_CONNECTION_STRING'], ); const db = drizzle({ client, schema, }); await db.$client('SELECT 1;'); expect(db.query.User).not.toStrictEqual(undefined); }); }); ================================================ FILE: integration-tests/js-tests/driver-init/module/neon-ws.test.mjs ================================================ import 'dotenv/config'; import { Client, neonConfig, Pool } from '@neondatabase/serverless'; import { drizzle } from 'drizzle-orm/neon-serverless'; import { describe, expect } from 'vitest'; import ws from 'ws'; import { pg as schema } from './schema.mjs'; neonConfig.webSocketConstructor = ws; if (!process.env['NEON_CONNECTION_STRING']) { throw new Error('NEON_CONNECTION_STRING is not defined'); } describe('neon-ws', async (it) => { it('drizzle(string)', async () => { const db = drizzle( process.env['NEON_CONNECTION_STRING'], ); await db.$client.query('SELECT 1;'); }); it('drizzle(string, config)', async () => { const db = drizzle( process.env['NEON_CONNECTION_STRING'], { schema, }, ); await db.$client.query('SELECT 1;'); expect(db.query.User).not.toStrictEqual(undefined); expect(db.$client).toBeInstanceOf(Pool); }); it('drizzle({connection: string, ...config})', async () => { const db = drizzle({ connection: process.env['NEON_CONNECTION_STRING'], schema, }); await db.$client.query('SELECT 1;'); expect(db.query.User).not.toStrictEqual(undefined); expect(db.$client).toBeInstanceOf(Pool); }); it('drizzle({connection: params, ...config})', async () => { const db = drizzle({ connection: { connectionString: process.env['NEON_CONNECTION_STRING'], }, schema, }); await db.$client.query('SELECT 1;'); expect(db.query.User).not.toStrictEqual(undefined); expect(db.$client).toBeInstanceOf(Pool); }); it('drizzle(client)', async () => { const client = new Pool({ connectionString: process.env['NEON_CONNECTION_STRING'], }); const db = drizzle(client); await db.$client.query('SELECT 1;'); expect(db.$client).toBeInstanceOf(Pool); }); it('drizzle(client, config)', async () => { const client = new Pool({ connectionString: process.env['NEON_CONNECTION_STRING'], }); const db = drizzle(client, { schema, }); await db.$client.query('SELECT 1;'); expect(db.query.User).not.toStrictEqual(undefined); expect(db.$client).toBeInstanceOf(Pool); }); it('drizzle({client, ...config})', async () => { const client = new Pool({ connectionString: process.env['NEON_CONNECTION_STRING'], }); const db = drizzle({ client, schema, }); await db.$client.query('SELECT 1;'); expect(db.query.User).not.toStrictEqual(undefined); expect(db.$client).toBeInstanceOf(Pool); }); }); describe('neon-ws:Client', async (it) => { it('drizzle(client)', async () => { const client = new Client({ connectionString: process.env['NEON_CONNECTION_STRING'], }); await client.connect(); const db = drizzle(client); await db.$client.query('SELECT 1;'); expect(db.$client).toBeInstanceOf(Client); expect(db.$client).not.toBeInstanceOf(Pool); }); it('drizzle(client, config)', async () => { const client = new Client({ connectionString: process.env['NEON_CONNECTION_STRING'], }); const db = drizzle(client, { schema, }); await client.connect(); await db.$client.query('SELECT 1;'); expect(db.query.User).not.toStrictEqual(undefined); expect(db.$client).toBeInstanceOf(Client); expect(db.$client).not.toBeInstanceOf(Pool); }); it('drizzle({client, ...config})', async () => { const client = new Client({ connectionString: process.env['NEON_CONNECTION_STRING'], }); const db = drizzle({ client, schema, }); await client.connect(); await db.$client.query('SELECT 1;'); expect(db.query.User).not.toStrictEqual(undefined); expect(db.$client).toBeInstanceOf(Client); expect(db.$client).not.toBeInstanceOf(Pool); }); }); describe('neon-ws:PoolClient', async (it) => { it('drizzle(client)', async () => { const pool = new Pool({ connectionString: process.env['NEON_CONNECTION_STRING'], }); const client = await pool.connect(); const db = drizzle(client); await db.$client.query('SELECT 1;'); client.release(); expect(db.$client).toBeInstanceOf(Client); expect(db.$client).not.toBeInstanceOf(Pool); }); it('drizzle(client, config)', async () => { const pool = new Pool({ connectionString: process.env['NEON_CONNECTION_STRING'], }); const client = await pool.connect(); const db = drizzle(client, { schema, }); await db.$client.query('SELECT 1;'); client.release(); expect(db.query.User).not.toStrictEqual(undefined); expect(db.$client).toBeInstanceOf(Client); expect(db.$client).not.toBeInstanceOf(Pool); }); it('drizzle({client, ...config})', async () => { const pool = new Pool({ connectionString: process.env['NEON_CONNECTION_STRING'], }); const client = await pool.connect(); const db = drizzle({ client, schema, }); await db.$client.query('SELECT 1;'); client.release(); expect(db.query.User).not.toStrictEqual(undefined); expect(db.$client).toBeInstanceOf(Client); expect(db.$client).not.toBeInstanceOf(Pool); }); }); ================================================ FILE: integration-tests/js-tests/driver-init/module/node-pg.test.mjs ================================================ import 'dotenv/config'; import { drizzle } from 'drizzle-orm/node-postgres'; import pg from 'pg'; import { describe, expect } from 'vitest'; import { pg as schema } from './schema.mjs'; const Pool = pg.Pool; const Client = pg.Client; if (!process.env['PG_CONNECTION_STRING']) { throw new Error('PG_CONNECTION_STRING is not defined'); } describe('node-pg', async (it) => { it('drizzle(string)', async () => { const db = drizzle(process.env['PG_CONNECTION_STRING']); await db.$client.query('SELECT 1;'); expect(db.$client).toBeInstanceOf(Pool); }); it('drizzle(string, config)', async () => { const db = drizzle(process.env['PG_CONNECTION_STRING'], { schema, }); await db.$client.query('SELECT 1;'); expect(db.$client).toBeInstanceOf(Pool); expect(db.query.User).not.toStrictEqual(undefined); }); it('drizzle({connection: string, ...config})', async () => { const db = drizzle({ connection: process.env['PG_CONNECTION_STRING'], schema, }); await db.$client.query('SELECT 1;'); expect(db.$client).toBeInstanceOf(Pool); expect(db.query.User).not.toStrictEqual(undefined); }); it('drizzle({connection: params, ...config})', async () => { const db = drizzle({ connection: { connectionString: process.env['PG_CONNECTION_STRING'], }, schema, }); await db.$client.query('SELECT 1;'); expect(db.$client).toBeInstanceOf(Pool); expect(db.query.User).not.toStrictEqual(undefined); }); it('drizzle(client)', async () => { const client = new Pool({ connectionString: process.env['PG_CONNECTION_STRING'], }); const db = drizzle(client); await db.$client.query('SELECT 1;'); expect(db.$client).toBeInstanceOf(Pool); }); it('drizzle(client, config)', async () => { const client = new Pool({ connectionString: process.env['PG_CONNECTION_STRING'], }); const db = drizzle(client, { schema, }); await db.$client.query('SELECT 1;'); expect(db.$client).toBeInstanceOf(Pool); expect(db.query.User).not.toStrictEqual(undefined); }); it('drizzle({client, ...config})', async () => { const client = new Pool({ connectionString: process.env['PG_CONNECTION_STRING'], }); const db = drizzle({ client, schema, }); await db.$client.query('SELECT 1;'); expect(db.$client).toBeInstanceOf(Pool); expect(db.query.User).not.toStrictEqual(undefined); }); }); describe('node-pg:Client', async (it) => { it('drizzle(client)', async () => { const client = new Client({ connectionString: process.env['PG_CONNECTION_STRING'], }); const db = drizzle(client); await client.connect(); await db.$client.query('SELECT 1;'); expect(db.$client).not.toBeInstanceOf(Pool); expect(db.$client).toBeInstanceOf(Client); }); it('drizzle(client, config)', async () => { const client = new Client({ connectionString: process.env['PG_CONNECTION_STRING'], }); const db = drizzle(client, { schema, }); await client.connect(); await db.$client.query('SELECT 1;'); expect(db.$client).not.toBeInstanceOf(Pool); expect(db.$client).toBeInstanceOf(Client); expect(db.query.User).not.toStrictEqual(undefined); }); it('drizzle({client, ...config})', async () => { const client = new Client({ connectionString: process.env['PG_CONNECTION_STRING'], }); const db = drizzle({ client, schema, }); await client.connect(); await db.$client.query('SELECT 1;'); expect(db.$client).not.toBeInstanceOf(Pool); expect(db.$client).toBeInstanceOf(Client); expect(db.query.User).not.toStrictEqual(undefined); }); }); describe('node-pg:PoolClient', async (it) => { it('drizzle(client)', async () => { const pool = new Pool({ connectionString: process.env['PG_CONNECTION_STRING'], }); const client = await pool.connect(); const db = drizzle(client); await db.$client.query('SELECT 1;'); client.release(); expect(db.$client).not.toBeInstanceOf(Pool); expect(db.$client).toBeInstanceOf(Client); }); it('drizzle(client, config)', async () => { const pool = new Pool({ connectionString: process.env['PG_CONNECTION_STRING'], }); const client = await pool.connect(); const db = drizzle(client, { schema, }); await db.$client.query('SELECT 1;'); client.release(); expect(db.$client).not.toBeInstanceOf(Pool); expect(db.$client).toBeInstanceOf(Client); expect(db.query.User).not.toStrictEqual(undefined); }); it('drizzle({client, ...config})', async () => { const pool = new Pool({ connectionString: process.env['PG_CONNECTION_STRING'], }); const client = await pool.connect(); const db = drizzle({ client, schema, }); await db.$client.query('SELECT 1;'); client.release(); expect(db.$client).not.toBeInstanceOf(Pool); expect(db.$client).toBeInstanceOf(Client); expect(db.query.User).not.toStrictEqual(undefined); }); }); ================================================ FILE: integration-tests/js-tests/driver-init/module/pglite.test.mjs ================================================ import 'dotenv/config'; import { PGlite as Database } from '@electric-sql/pglite'; import { drizzle } from 'drizzle-orm/pglite'; import { describe, expect } from 'vitest'; import { pg as schema } from './schema.mjs'; describe('pglite', async (it) => { it('drizzle()', async () => { const db = drizzle(); await db.$client.exec('SELECT 1;'); await db.$client.close(); }); it('drizzle(string)', async () => { const db = drizzle('memory://'); await db.$client.exec('SELECT 1;'); await db.$client.close(); }); it('drizzle(string, config)', async () => { const db = drizzle('memory://', { schema, }); await db.$client.exec('SELECT 1;'); await db.$client.close(); expect(db.query.User).not.toStrictEqual(undefined); }); it('drizzle({connection: {}, ...config})', async () => { const db = drizzle({ connection: {}, schema, }); await db.$client.exec('SELECT 1;'); await db.$client.close(); expect(db.query.User).not.toStrictEqual(undefined); }); it('drizzle({...config})', async () => { const db = drizzle({ schema, }); await db.$client.exec('SELECT 1;'); await db.$client.close(); expect(db.query.User).not.toStrictEqual(undefined); }); it('drizzle(client)', async () => { const client = new Database('memory://'); const db = drizzle(client); await db.$client.exec('SELECT 1;'); await db.$client.close(); }); it('drizzle(client, config)', async () => { const client = new Database('memory://'); const db = drizzle(client, { schema, }); await db.$client.exec('SELECT 1;'); await db.$client.close(); expect(db.query.User).not.toStrictEqual(undefined); }); it('drizzle({client, ...config})', async () => { const client = new Database('memory://'); const db = drizzle({ client, schema, }); await db.$client.exec('SELECT 1;'); await db.$client.close(); expect(db.query.User).not.toStrictEqual(undefined); }); }); ================================================ FILE: integration-tests/js-tests/driver-init/module/planetscale.test.mjs ================================================ import 'dotenv/config'; import { Client } from '@planetscale/database'; import { drizzle } from 'drizzle-orm/planetscale-serverless'; import { describe, expect } from 'vitest'; import { mysql as schema } from './schema.mjs'; if (!process.env['PLANETSCALE_CONNECTION_STRING']) { throw new Error('PLANETSCALE_CONNECTION_STRING is not defined'); } describe('planetscale', async (it) => { it('drizzle(string)', async () => { const db = drizzle( process.env['PLANETSCALE_CONNECTION_STRING'], ); await db.$client.execute('SELECT 1;'); expect(db.$client).toBeInstanceOf(Client); }); it('drizzle(string, config)', async () => { const db = drizzle( process.env['PLANETSCALE_CONNECTION_STRING'], { schema, }, ); await db.$client.execute('SELECT 1;'); expect(db.$client).toBeInstanceOf(Client); expect(db.query.User).not.toStrictEqual(undefined); }); it('drizzle({connection: string, ...config})', async () => { const db = drizzle({ connection: process.env['PLANETSCALE_CONNECTION_STRING'], schema, }); await db.$client.execute('SELECT 1;'); expect(db.$client).toBeInstanceOf(Client); expect(db.query.User).not.toStrictEqual(undefined); }); it('drizzle({connection: params, ...config})', async () => { const db = drizzle({ connection: { url: process.env['PLANETSCALE_CONNECTION_STRING'], }, schema, }); await db.$client.execute('SELECT 1;'); expect(db.$client).toBeInstanceOf(Client); expect(db.query.User).not.toStrictEqual(undefined); }); it('drizzle(client)', async () => { const client = new Client({ url: process.env['PLANETSCALE_CONNECTION_STRING'], }); const db = drizzle(client); await db.$client.execute('SELECT 1;'); expect(db.$client).toBeInstanceOf(Client); }); it('drizzle(client, config)', async () => { const client = new Client({ url: process.env['PLANETSCALE_CONNECTION_STRING'], }); const db = drizzle(client, { schema, }); await db.$client.execute('SELECT 1;'); expect(db.$client).toBeInstanceOf(Client); expect(db.query.User).not.toStrictEqual(undefined); }); it('drizzle({client, ...config})', async () => { const client = new Client({ url: process.env['PLANETSCALE_CONNECTION_STRING'], }); const db = drizzle({ client, schema, }); await db.$client.execute('SELECT 1;'); expect(db.$client).toBeInstanceOf(Client); expect(db.query.User).not.toStrictEqual(undefined); }); }); ================================================ FILE: integration-tests/js-tests/driver-init/module/postgres-js.test.mjs ================================================ import 'dotenv/config'; import { drizzle } from 'drizzle-orm/postgres-js'; import pg from 'postgres'; import { describe, expect } from 'vitest'; import { pg as schema } from './schema.mjs'; if (!process.env['PG_CONNECTION_STRING']) { throw new Error('PG_CONNECTION_STRING is not defined'); } describe('postgres-js', async (it) => { it('drizzle(string)', async () => { const db = drizzle(process.env['PG_CONNECTION_STRING']); await db.$client.unsafe('SELECT 1;'); }); it('drizzle(string, config)', async () => { const db = drizzle(process.env['PG_CONNECTION_STRING'], { schema, }); await db.$client.unsafe('SELECT 1;'); expect(db.query.User).not.toStrictEqual(undefined); }); it('drizzle({connection: string, ...config})', async () => { const db = drizzle({ connection: process.env['PG_CONNECTION_STRING'], schema, }); await db.$client.unsafe('SELECT 1;'); expect(db.query.User).not.toStrictEqual(undefined); }); it('drizzle({connection: params, ...config})', async () => { const db = drizzle({ connection: { url: process.env['PG_CONNECTION_STRING'], }, schema, }); await db.$client.unsafe('SELECT 1;'); expect(db.query.User).not.toStrictEqual(undefined); }); it('drizzle(client)', async () => { const client = pg(process.env['PG_CONNECTION_STRING']); const db = drizzle(client); await db.$client.unsafe('SELECT 1;'); }); it('drizzle(client, config)', async () => { const client = pg(process.env['PG_CONNECTION_STRING']); const db = drizzle(client, { schema, }); await db.$client.unsafe('SELECT 1;'); expect(db.query.User).not.toStrictEqual(undefined); }); it('drizzle({client, ...config})', async () => { const client = pg(process.env['PG_CONNECTION_STRING']); const db = drizzle({ client, schema, }); await db.$client.unsafe('SELECT 1;'); expect(db.query.User).not.toStrictEqual(undefined); }); }); ================================================ FILE: integration-tests/js-tests/driver-init/module/schema.mjs ================================================ import { int as mysqlInt, mysqlTable } from 'drizzle-orm/mysql-core'; import { integer as pgInt, pgTable } from 'drizzle-orm/pg-core'; import { integer as sqliteInt, sqliteTable } from 'drizzle-orm/sqlite-core'; export const sqlite = { User: sqliteTable('test', { id: sqliteInt('id').primaryKey().notNull(), }), }; export const pg = { User: pgTable('test', { id: pgInt('id').primaryKey().notNull(), }), }; export const mysql = { User: mysqlTable('test', { id: mysqlInt('id').primaryKey().notNull(), }), }; ================================================ FILE: integration-tests/js-tests/driver-init/module/tidb.test.mjs ================================================ import 'dotenv/config'; import { connect } from '@tidbcloud/serverless'; import { drizzle } from 'drizzle-orm/tidb-serverless'; import { describe, expect } from 'vitest'; import { mysql as schema } from './schema.mjs'; if (!process.env['TIDB_CONNECTION_STRING']) { throw new Error('TIDB_CONNECTION_STRING is not defined'); } describe('tidb', async (it) => { it('drizzle(string)', async () => { const db = drizzle( process.env['TIDB_CONNECTION_STRING'], ); await db.$client.execute(`SELECT 1`); }); it('drizzle(string, config)', async () => { const db = drizzle( process.env['TIDB_CONNECTION_STRING'], { schema, }, ); await db.$client.execute('SELECT 1;'); expect(db.query.User).not.toStrictEqual(undefined); }); it('drizzle({connection: string, ...config})', async () => { const db = drizzle({ connection: process.env['TIDB_CONNECTION_STRING'], schema, }); await db.$client.execute('SELECT 1;'); expect(db.query.User).not.toStrictEqual(undefined); }); it('drizzle({connection: params, ...config})', async () => { const db = drizzle({ connection: { url: process.env['TIDB_CONNECTION_STRING'], }, schema, }); await db.$client.execute('SELECT 1;'); expect(db.query.User).not.toStrictEqual(undefined); }); it('drizzle(client)', async () => { const client = connect({ url: process.env['TIDB_CONNECTION_STRING'], }); const db = drizzle(client); await db.$client.execute('SELECT 1;'); }); it('drizzle(client, config)', async () => { const client = connect({ url: process.env['TIDB_CONNECTION_STRING'], }); const db = drizzle(client, { schema, }); await db.$client.execute('SELECT 1;'); expect(db.query.User).not.toStrictEqual(undefined); }); it('drizzle({client, ...config})', async () => { const client = connect({ url: process.env['TIDB_CONNECTION_STRING'], }); const db = drizzle({ client, schema, }); await db.$client.execute('SELECT 1;'); expect(db.query.User).not.toStrictEqual(undefined); }); }); ================================================ FILE: integration-tests/js-tests/driver-init/module/vercel.test.mjs ================================================ import 'dotenv/config'; import { createClient, createPool, sql, VercelClient, VercelPool } from '@vercel/postgres'; import { drizzle } from 'drizzle-orm/vercel-postgres'; import { describe, expect } from 'vitest'; import { pg as schema } from './schema.mjs'; const Pool = VercelPool; const Client = VercelClient; if (!process.env['VERCEL_CONNECTION_STRING']) { throw new Error('VERCEL_CONNECTION_STRING is not defined'); } // Used for non-pooled connection if (!process.env['NEON_CONNECTION_STRING']) { throw new Error('NEON_CONNECTION_STRING is not defined'); } process.env['POSTGRES_URL'] = process.env['VERCEL_CONNECTION_STRING']; describe('vercel:sql', async (it) => { it('drizzle()', async () => { const db = drizzle(); await sql.connect(); await db.$client.query('SELECT 1;'); expect(db.$client).toBeTypeOf('function'); }); it('drizzle(client)', async () => { const db = drizzle(sql); await db.$client.query('SELECT 1;'); expect(db.$client).toBeTypeOf('function'); }); it('drizzle(client, config)', async () => { const db = drizzle(sql, { schema, }); await db.$client.query('SELECT 1;'); expect(db.$client).toBeTypeOf('function'); expect(db.query.User).not.toStrictEqual(undefined); }); it('drizzle({client, ...config})', async () => { const db = drizzle({ client: sql, schema, }); await db.$client.query('SELECT 1;'); expect(db.$client).toBeTypeOf('function'); expect(db.query.User).not.toStrictEqual(undefined); }); it('drizzle({...config})', async () => { const db = drizzle({ schema, }); await db.$client.query('SELECT 1;'); expect(db.$client).toBeTypeOf('function'); expect(db.query.User).not.toStrictEqual(undefined); }); }); describe('vercel:Pool', async (it) => { it('drizzle(client)', async () => { const client = createPool({ connectionString: process.env['VERCEL_CONNECTION_STRING'], }); const db = drizzle(client); await db.$client.query('SELECT 1;'); expect(db.$client).not.toBeTypeOf('function'); expect(db.$client).toBeInstanceOf(Pool); }); it('drizzle(client, config)', async () => { const client = createPool({ connectionString: process.env['VERCEL_CONNECTION_STRING'], }); const db = drizzle(client, { schema, }); await db.$client.query('SELECT 1;'); expect(db.$client).not.toBeTypeOf('function'); expect(db.$client).toBeInstanceOf(Pool); expect(db.query.User).not.toStrictEqual(undefined); }); it('drizzle({client, ...config})', async () => { const client = createPool({ connectionString: process.env['VERCEL_CONNECTION_STRING'], }); const db = drizzle({ client: client, schema, }); await db.$client.query('SELECT 1;'); expect(db.$client).not.toBeTypeOf('function'); expect(db.$client).toBeInstanceOf(Pool); expect(db.query.User).not.toStrictEqual(undefined); }); }); describe('vercel:Client', async (it) => { it('drizzle(client)', async () => { const client = createClient({ connectionString: process.env['NEON_CONNECTION_STRING'], }); const db = drizzle(client); await client.connect(); await db.$client.query('SELECT 1;'); expect(db.$client).not.toBeTypeOf('function'); expect(db.$client).not.toBeInstanceOf(Pool); expect(db.$client).toBeInstanceOf(Client); }); it('drizzle(client, config)', async () => { const client = createClient({ connectionString: process.env['NEON_CONNECTION_STRING'], }); const db = drizzle(client, { schema, }); await client.connect(); await db.$client.query('SELECT 1;'); expect(db.$client).not.toBeTypeOf('function'); expect(db.$client).not.toBeInstanceOf(Pool); expect(db.$client).toBeInstanceOf(Client); expect(db.query.User).not.toStrictEqual(undefined); }); it('drizzle({client, ...config})', async () => { const client = createClient({ connectionString: process.env['NEON_CONNECTION_STRING'], }); const db = drizzle({ client: client, schema, }); await client.connect(); await db.$client.query('SELECT 1;'); expect(db.$client).not.toBeTypeOf('function'); expect(db.$client).not.toBeInstanceOf(Pool); expect(db.$client).toBeInstanceOf(Client); expect(db.query.User).not.toStrictEqual(undefined); }); }); describe('vercel:PoolClient', async (it) => { it('drizzle(client)', async () => { const pool = createPool({ connectionString: process.env['VERCEL_CONNECTION_STRING'], }); const client = await pool.connect(); const db = drizzle(client); await db.$client.query('SELECT 1;'); client.release(); expect(db.$client).not.toBeTypeOf('function'); expect(db.$client).not.toBeInstanceOf(Pool); expect(db.$client).toBeInstanceOf(Client); }); it('drizzle(client, config)', async () => { const pool = createPool({ connectionString: process.env['VERCEL_CONNECTION_STRING'], }); const client = await pool.connect(); const db = drizzle(client, { schema, }); await db.$client.query('SELECT 1;'); client.release(); expect(db.$client).not.toBeTypeOf('function'); expect(db.$client).not.toBeInstanceOf(Pool); expect(db.$client).toBeInstanceOf(Client); expect(db.query.User).not.toStrictEqual(undefined); }); it('drizzle({client, ...config})', async () => { const pool = createPool({ connectionString: process.env['VERCEL_CONNECTION_STRING'], }); const client = await pool.connect(); const db = drizzle({ client: client, schema, }); await db.$client.query('SELECT 1;'); client.release(); expect(db.$client).not.toBeTypeOf('function'); expect(db.$client).not.toBeInstanceOf(Pool); expect(db.$client).toBeInstanceOf(Client); expect(db.query.User).not.toStrictEqual(undefined); }); }); ================================================ FILE: integration-tests/package.json ================================================ { "name": "integration-tests", "version": "1.0.0", "description": "", "type": "module", "scripts": { "test:types": "tsc && cd type-tests/join-nodenext && tsc", "test": "pnpm test:vitest", "test:vitest": "vitest run --pass-with-no-tests", "test:esm": "node tests/imports.test.mjs && node tests/imports.test.cjs", "test:data-api": "sst shell vitest run tests/pg/awsdatapi.test.ts" }, "keywords": [], "author": "Drizzle Team", "license": "Apache-2.0", "private": true, "devDependencies": { "@cloudflare/workers-types": "^4.20241004.0", "@libsql/client": "^0.10.0", "@neondatabase/serverless": "0.10.0", "@originjs/vite-plugin-commonjs": "^1.0.3", "@paralleldrive/cuid2": "^2.2.2", "@types/async-retry": "^1.4.8", "@types/better-sqlite3": "^7.6.4", "@types/dockerode": "^3.3.18", "@types/node": "^20.2.5", "@types/pg": "^8.10.1", "@types/sql.js": "^1.4.4", "@types/uuid": "^9.0.1", "@types/ws": "^8.5.10", "@upstash/redis": "^1.34.3", "@vitest/ui": "^1.6.0", "ava": "^5.3.0", "cross-env": "^7.0.3", "keyv": "^5.2.3", "import-in-the-middle": "^1.13.1", "ts-node": "^10.9.2", "tsx": "^4.14.0", "vite-tsconfig-paths": "^4.3.2", "zx": "^8.3.2" }, "dependencies": { "@aws-sdk/client-rds-data": "^3.549.0", "@aws-sdk/credential-providers": "^3.549.0", "@electric-sql/pglite": "0.2.12", "@libsql/client": "^0.10.0", "@miniflare/d1": "^2.14.4", "@miniflare/shared": "^2.14.4", "@planetscale/database": "^1.16.0", "@prisma/client": "5.14.0", "@tidbcloud/serverless": "^0.1.1", "@typescript/analyze-trace": "^0.10.0", "@vercel/postgres": "^0.8.0", "@xata.io/client": "^0.29.3", "async-retry": "^1.3.3", "better-sqlite3": "11.9.1", "dockerode": "^4.0.6", "dotenv": "^16.1.4", "drizzle-prisma-generator": "^0.1.2", "drizzle-seed": "workspace:../drizzle-seed/dist", "drizzle-typebox": "workspace:../drizzle-typebox/dist", "drizzle-valibot": "workspace:../drizzle-valibot/dist", "drizzle-zod": "workspace:../drizzle-zod/dist", "gel": "^2.0.0", "get-port": "^7.0.0", "mysql2": "^3.14.1", "pg": "^8.11.0", "postgres": "^3.3.5", "prisma": "5.14.0", "source-map-support": "^0.5.21", "sql.js": "^1.8.0", "sqlite3": "^5.1.4", "sst": "^3.14.24", "uuid": "^9.0.0", "uvu": "^0.5.6", "vitest": "^3.1.3", "ws": "^8.18.2", "zod": "^3.20.2" } } ================================================ FILE: integration-tests/sst-env.d.ts ================================================ // eslint-disable-next-line @typescript-eslint/triple-slash-reference /// ================================================ FILE: integration-tests/sst.config.ts ================================================ // eslint-disable-next-line @typescript-eslint/triple-slash-reference /// export default $config({ app(_input) { return { name: 'awsdataapi', removal: 'remove', home: 'aws', }; }, async run() { new sst.aws.Postgres('Postgres', { scaling: { min: '0.5 ACU', max: '1 ACU', }, }); }, }); ================================================ FILE: integration-tests/tests/awsdatapi.alltypes.test.ts ================================================ /* eslint-disable unicorn/no-empty-file */ // import 'dotenv/config'; // import { RDSDataClient } from '@aws-sdk/client-rds-data'; // import { fromIni } from '@aws-sdk/credential-providers'; // import type { TestFn } from 'ava'; // import anyTest from 'ava'; // import * as dotenv from 'dotenv'; // import { name, sql } from 'drizzle-orm'; // import type { AwsDataApiPgDatabase } from 'drizzle-orm/aws-data-api/pg'; // import { drizzle } from 'drizzle-orm/aws-data-api/pg'; // import { // bigint, // bigserial, // boolean, // date, // decimal, // doublePrecision, // integer, // json, // jsonb, // numeric, // pgEnum, // pgTable, // real, // serial, // smallint, // text, // time, // timestamp, // varchar, // } from 'drizzle-orm/pg-core'; // dotenv.config(); // export const allColumns = pgTable('all_columns', { // sm: smallint('smallint'), // smdef: smallint('smallint_def').default(10), // int: integer('integer'), // intdef: integer('integer_def').default(10), // numeric: numeric('numeric'), // numeric2: numeric('numeric2', { precision: 5 }), // numeric3: numeric('numeric3', { scale: 2 }), // numeric4: numeric('numeric4', { precision: 5, scale: 2 }), // numericdef: numeric('numeridef').default('100'), // bigint: bigint('bigint', { mode: 'number' }), // bigintdef: bigint('bigintdef', { mode: 'number' }).default(100), // bool: boolean('boolean'), // booldef: boolean('boolean_def').default(true), // text: text('text'), // textdef: text('textdef').default('text'), // varchar: varchar('varchar'), // varchardef: varchar('varchardef').default('text'), // serial: serial('serial'), // bigserial: bigserial('bigserial', { mode: 'number' }), // decimal: decimal('decimal', { precision: 100, scale: 2 }), // decimaldef: decimal('decimaldef', { precision: 100, scale: 2 }).default('100.0'), // doublePrecision: doublePrecision('doublePrecision'), // doublePrecisiondef: doublePrecision('doublePrecisiondef').default(100), // real: real('real'), // realdef: real('realdef').default(100), // json: json('json').$type<{ attr: string }>(), // jsondef: json('jsondef').$type<{ attr: string }>().default({ attr: 'value' }), // jsonb: jsonb('jsonb').$type<{ attr: string }>(), // jsonbdef: jsonb('jsonbdef').$type<{ attr: string }>().default({ attr: 'value' }), // time: time('time'), // time2: time('time2', { precision: 6, withTimezone: true }), // timedefnow: time('timedefnow').defaultNow(), // timestamp: timestamp('timestamp'), // timestamp2: timestamp('timestamp2', { precision: 6, withTimezone: true }), // timestamp3: timestamp('timestamp3', { withTimezone: true }), // timestamp4: timestamp('timestamp4', { precision: 4 }), // timestampdef: timestamp('timestampdef').defaultNow(), // date: date('date', { mode: 'date' }), // datedef: date('datedef').defaultNow(), // }); // interface Context { // db: AwsDataApiPgDatabase; // row: typeof allColumns.$inferSelect; // } // const test = anyTest as TestFn; // test.before(async (t) => { // const ctx = t.context; // const database = process.env['AWS_DATA_API_DB']!; // const secretArn = process.env['AWS_DATA_API_SECRET_ARN']!; // const resourceArn = process.env['AWS_DATA_API_RESOURCE_ARN']!; // const rdsClient = new RDSDataClient({ // credentials: fromIni({ profile: process.env['AWS_TEST_PROFILE'] }), // region: 'us-east-1', // }); // ctx.db = drizzle(rdsClient, { // database, // secretArn, // resourceArn, // // logger: new DefaultLogger(), // }); // await ctx.db.execute(sql` // CREATE TABLE IF NOT EXISTS "all_columns" ( // "smallint" smallint, // "smallint_def" smallint DEFAULT 10, // "integer" integer, // "integer_def" integer DEFAULT 10, // "numeric" numeric, // "numeric2" numeric(5), // "numeric3" numeric, // "numeric4" numeric(5, 2), // "numeridef" numeric DEFAULT '100', // "bigint" bigint, // "bigintdef" bigint DEFAULT 100, // "boolean" boolean, // "boolean_def" boolean DEFAULT true, // "text" text, // "textdef" text DEFAULT 'text', // "varchar" varchar, // "varchardef" varchar DEFAULT 'text', // "serial" serial, // "bigserial" bigserial, // "decimal" numeric(100, 2), // "decimaldef" numeric(100, 2) DEFAULT '100.0', // "doublePrecision" double precision, // "doublePrecisiondef" double precision DEFAULT 100, // "real" real, // "realdef" real DEFAULT 100, // "json" json, // "jsondef" json DEFAULT '{"attr":"value"}'::json, // "jsonb" jsonb, // "jsonbdef" jsonb DEFAULT '{"attr":"value"}'::jsonb, // "time" time, // "time2" time, // "timedefnow" time DEFAULT now(), // "timestamp" timestamp, // "timestamp2" timestamp (6) with time zone, // "timestamp3" timestamp with time zone, // "timestamp4" timestamp (4), // "timestampdef" timestamp DEFAULT now(), // "date" date, // "datedef" date DEFAULT now() // ) // `); // const now = new Date(); // await ctx.db.insert(allColumns).values({ // sm: 12, // int: 22, // numeric: '1.1', // numeric2: '123.45', // numeric3: '123.45', // numeric4: '123.45', // bigint: 1578, // bool: true, // text: 'inserted_text', // varchar: 'inserted_varchar', // serial: 44, // bigserial: 63473487, // decimal: '100.1', // doublePrecision: 7384.34, // real: 73849.11, // json: { attr: 'hellohello' }, // jsonb: { attr: 'hellohello' }, // time: '11:12:00', // time2: '11:12:00', // timestamp: now, // timestamp2: now, // timestamp3: now, // timestamp4: now, // date: now, // // interval: '10 days' // }); // const resultRows = await ctx.db.select().from(allColumns); // t.is(resultRows.length, 1); // const row = resultRows[0]!; // ctx.row = row; // }); // test.serial('[small] serial type', async (t) => { // const { row } = t.context; // t.assert(typeof row.sm === 'number'); // t.is(row.sm, 12); // }); // test.serial('[small serial] type with default', async (t) => { // const { row } = t.context; // t.assert(typeof row.sm === 'number'); // t.is(row.smdef, 10); // }); // test.serial('[int] type', async (t) => { // const { row } = t.context; // t.assert(typeof row.int === 'number'); // t.is(row.int, 22); // }); // test.serial('[int] type with default', async (t) => { // const { row } = t.context; // t.assert(typeof row.intdef === 'number'); // t.is(row.intdef, 10); // }); // test.serial('[numeric] type', async (t) => { // const { row } = t.context; // t.assert(typeof row.int === 'number'); // t.is(row.int, 22); // }); // test.serial('[numeric(precision)] type', async (t) => { // const { row } = t.context; // t.assert(typeof row.int === 'number'); // t.is(row.int, 22); // }); // test.serial('[numeric(scale)] type', async (t) => { // const { row } = t.context; // t.assert(typeof row.int === 'number'); // t.is(row.int, 22); // }); // test.serial('[numeric(precision, scale)] type', async (t) => { // const { row } = t.context; // t.assert(typeof row.int === 'number'); // t.is(row.int, 22); // }); // test.serial('[numeric] type with default', async (t) => { // const { row } = t.context; // t.assert(typeof row.int === 'number'); // t.is(row.int, 22); // }); // test.serial('[bigint] type', async (t) => { // const { row } = t.context; // t.assert(typeof row.bigint === 'number'); // t.is(row.bigint, 1578); // }); // test.serial('[bigint] type with default', async (t) => { // const { row } = t.context; // t.assert(typeof row.bigintdef === 'number'); // t.is(row.bigintdef, 100); // }); // test.serial('[boolean] type', async (t) => { // const { row } = t.context; // t.assert(typeof row.bool === 'boolean'); // t.is(row.bool, true); // }); // test.serial('[boolean] type with default', async (t) => { // const { row } = t.context; // t.assert(typeof row.booldef === 'boolean'); // t.is(row.booldef, true); // }); // test.serial('[text] type', async (t) => { // const { row } = t.context; // t.assert(typeof row.text === 'string'); // t.is(row.text, 'inserted_text'); // }); // test.serial('[text] type with default', async (t) => { // const { row } = t.context; // t.assert(typeof row.textdef === 'string'); // t.is(row.textdef, 'text'); // }); // test.serial('[varchar] type', async (t) => { // const { row } = t.context; // t.assert(typeof row.varchar === 'string'); // t.is(row.varchar, 'inserted_varchar'); // }); // test.serial('[varchar] type with default', async (t) => { // const { row } = t.context; // t.assert(typeof row.varchardef === 'string'); // t.is(row.varchardef, 'text'); // }); // test.serial('[serial] type', async (t) => { // const { row } = t.context; // t.assert(typeof row.serial === 'number'); // t.is(row.serial, 44); // }); // test.serial('[bigserial] type', async (t) => { // const { row } = t.context; // t.assert(typeof row.bigserial === 'number'); // t.is(row.bigserial, 63473487); // }); // test.serial('[decimal] type', async (t) => { // const { row } = t.context; // t.assert(typeof row.decimal === 'string'); // t.is(row.decimal, '100.10'); // }); // test.serial('[decimal] type with default', async (t) => { // const { row } = t.context; // t.assert(typeof row.decimaldef === 'string'); // t.is(row.decimaldef, '100.00'); // }); // test.serial('[double precision] type', async (t) => { // const { row } = t.context; // t.assert(typeof row.doublePrecision === 'number'); // t.is(row.doublePrecision, 7384.34); // }); // test.serial('[double precision] type with default', async (t) => { // const { row } = t.context; // t.assert(typeof row.doublePrecisiondef === 'number'); // t.is(row.doublePrecisiondef, 100); // }); // test.serial('[real] type', async (t) => { // const { row } = t.context; // t.assert(typeof row.real === 'number'); // t.is(row.real, 73849.11); // }); // test.serial('[real] type with default', async (t) => { // const { row } = t.context; // t.assert(typeof row.realdef === 'number'); // t.is(row.realdef, 100); // }); // test.serial('[json] type', async (t) => { // const { row } = t.context; // t.assert(typeof row.json?.attr === 'string'); // t.deepEqual(row.json, { attr: 'hellohello' }); // }); // test.serial('[json] type with default', async (t) => { // const { row } = t.context; // t.assert(typeof row.jsondef?.attr === 'string'); // t.deepEqual(row.jsondef, { attr: 'value' }); // }); // test.serial('[jsonb] type', async (t) => { // const { row } = t.context; // t.assert(typeof row.jsonb?.attr === 'string'); // t.deepEqual(row.jsonb, { attr: 'hellohello' }); // }); // test.serial('[jsonb] type with default', async (t) => { // const { row } = t.context; // t.assert(typeof row.jsonbdef?.attr === 'string'); // t.deepEqual(row.jsonbdef, { attr: 'value' }); // }); // test.serial('[time] type', async (t) => { // const { row } = t.context; // t.assert(typeof row.time === 'string'); // t.assert(typeof row.time2 === 'string'); // t.assert(typeof row.timedefnow === 'string'); // }); // test.serial('[timestamp] type with default', async (t) => { // const { row } = t.context; // t.assert(row.timestamp instanceof Date); // eslint-disable-line no-instanceof/no-instanceof // t.assert(row.timestamp2 instanceof Date); // eslint-disable-line no-instanceof/no-instanceof // t.assert(row.timestamp3 instanceof Date); // eslint-disable-line no-instanceof/no-instanceof // t.assert(row.timestamp4 instanceof Date); // eslint-disable-line no-instanceof/no-instanceof // t.assert(row.timestampdef instanceof Date); // eslint-disable-line no-instanceof/no-instanceof // }); // test.serial('[date] type with default', async (t) => { // const { row } = t.context; // t.assert(row.date instanceof Date); // eslint-disable-line no-instanceof/no-instanceof // t.assert(typeof row.datedef === 'string'); // }); // test.serial('select from enum', async (t) => { // const { db } = t.context; // const muscleEnum = pgEnum('muscle', [ // 'abdominals', // 'hamstrings', // 'adductors', // 'quadriceps', // 'biceps', // 'shoulders', // 'chest', // 'middle_back', // 'calves', // 'glutes', // 'lower_back', // 'lats', // 'triceps', // 'traps', // 'forearms', // 'neck', // 'abductors', // ]); // const forceEnum = pgEnum('force', ['isometric', 'isotonic', 'isokinetic']); // const levelEnum = pgEnum('level', ['beginner', 'intermediate', 'advanced']); // const mechanicEnum = pgEnum('mechanic', ['compound', 'isolation']); // const equipmentEnum = pgEnum('equipment', ['barbell', 'dumbbell', 'bodyweight', 'machine', 'cable', 'kettlebell']); // const categoryEnum = pgEnum('category', ['upper_body', 'lower_body', 'full_body']); // const exercises = pgTable('exercises', { // id: serial('id').primaryKey(), // name: varchar('name').notNull(), // force: forceEnum('force'), // level: levelEnum('level'), // mechanic: mechanicEnum('mechanic'), // equipment: equipmentEnum('equipment'), // instructions: text('instructions'), // category: categoryEnum('category'), // primaryMuscles: muscleEnum('primary_muscles').array(), // secondaryMuscles: muscleEnum('secondary_muscles').array(), // createdAt: timestamp('created_at').notNull().default(sql`now()`), // updatedAt: timestamp('updated_at').notNull().default(sql`now()`), // }); // await db.execute(sql`drop table if exists ${exercises}`); // await db.execute(sql`drop type if exists ${name(muscleEnum.enumName)}`); // await db.execute(sql`drop type if exists ${name(forceEnum.enumName)}`); // await db.execute(sql`drop type if exists ${name(levelEnum.enumName)}`); // await db.execute(sql`drop type if exists ${name(mechanicEnum.enumName)}`); // await db.execute(sql`drop type if exists ${name(equipmentEnum.enumName)}`); // await db.execute(sql`drop type if exists ${name(categoryEnum.enumName)}`); // await db.execute( // sql`create type ${ // name(muscleEnum.enumName) // } as enum ('abdominals', 'hamstrings', 'adductors', 'quadriceps', 'biceps', 'shoulders', 'chest', 'middle_back', 'calves', 'glutes', 'lower_back', 'lats', 'triceps', 'traps', 'forearms', 'neck', 'abductors')`, // ); // await db.execute(sql`create type ${name(forceEnum.enumName)} as enum ('isometric', 'isotonic', 'isokinetic')`); // await db.execute(sql`create type ${name(levelEnum.enumName)} as enum ('beginner', 'intermediate', 'advanced')`); // await db.execute(sql`create type ${name(mechanicEnum.enumName)} as enum ('compound', 'isolation')`); // await db.execute( // sql`create type ${ // name(equipmentEnum.enumName) // } as enum ('barbell', 'dumbbell', 'bodyweight', 'machine', 'cable', 'kettlebell')`, // ); // await db.execute(sql`create type ${name(categoryEnum.enumName)} as enum ('upper_body', 'lower_body', 'full_body')`); // await db.execute(sql` // create table ${exercises} ( // id serial primary key, // name varchar not null, // force force, // level level, // mechanic mechanic, // equipment equipment, // instructions text, // category category, // primary_muscles muscle[], // secondary_muscles muscle[], // created_at timestamp not null default now(), // updated_at timestamp not null default now() // ) // `); // await db.insert(exercises).values({ // name: 'Bench Press', // force: 'isotonic', // level: 'beginner', // mechanic: 'compound', // equipment: 'barbell', // instructions: // 'Lie on your back on a flat bench. Grasp the barbell with an overhand grip, slightly wider than shoulder width. Unrack the barbell and hold it over you with your arms locked. Lower the barbell to your chest. Press the barbell back to the starting position.', // category: 'upper_body', // primaryMuscles: ['chest', 'triceps'], // secondaryMuscles: ['shoulders', 'traps'], // }); // const result = await db.select().from(exercises); // t.deepEqual(result, [ // { // id: 1, // name: 'Bench Press', // force: 'isotonic', // level: 'beginner', // mechanic: 'compound', // equipment: 'barbell', // instructions: // 'Lie on your back on a flat bench. Grasp the barbell with an overhand grip, slightly wider than shoulder width. Unrack the barbell and hold it over you with your arms locked. Lower the barbell to your chest. Press the barbell back to the starting position.', // category: 'upper_body', // primaryMuscles: ['chest', 'triceps'], // secondaryMuscles: ['shoulders', 'traps'], // createdAt: result[0]!.createdAt, // updatedAt: result[0]!.updatedAt, // }, // ]); // await db.execute(sql`drop table ${exercises}`); // await db.execute(sql`drop type ${name(muscleEnum.enumName)}`); // await db.execute(sql`drop type ${name(forceEnum.enumName)}`); // await db.execute(sql`drop type ${name(levelEnum.enumName)}`); // await db.execute(sql`drop type ${name(mechanicEnum.enumName)}`); // await db.execute(sql`drop type ${name(equipmentEnum.enumName)}`); // await db.execute(sql`drop type ${name(categoryEnum.enumName)}`); // }); // test.after.always(async (t) => { // const ctx = t.context; // await ctx.db.execute(sql`drop table "all_columns"`); // }); ================================================ FILE: integration-tests/tests/bun/bun-sql.test.ts ================================================ import retry from 'async-retry'; import { SQL as BunSQL } from 'bun'; import { afterAll, afterEach, beforeAll, beforeEach, expect, test } from 'bun:test'; import type Docker from 'dockerode'; // eslint-disable-next-line @typescript-eslint/consistent-type-imports import { and, arrayContained, arrayContains, arrayOverlaps, asc, avg, avgDistinct, count, countDistinct, eq, Equal, exists, getTableColumns, gt, gte, ilike, inArray, is, lt, max, min, notInArray, or, SQL, sql, SQLWrapper, sum, sumDistinct, TransactionRollbackError, } from 'drizzle-orm'; import type { BunSQLDatabase } from 'drizzle-orm/bun-sql'; import { drizzle } from 'drizzle-orm/bun-sql'; import { authenticatedRole, crudPolicy } from 'drizzle-orm/neon'; import { usersSync } from 'drizzle-orm/neon/neon-auth'; import type { PgColumn, PgDatabase, PgQueryResultHKT } from 'drizzle-orm/pg-core'; import { alias, bigint, bigserial, boolean, char, cidr, date, doublePrecision, except, exceptAll, foreignKey, getMaterializedViewConfig, getTableConfig, getViewConfig, index, inet, integer, intersect, intersectAll, interval, json, jsonb, line, macaddr, macaddr8, numeric, PgDialect, pgEnum, pgMaterializedView, PgPolicy, pgPolicy, pgSchema, pgTable, pgTableCreator, pgView, point, primaryKey, real, serial, smallint, smallserial, text, time, timestamp, union, unionAll, unique, uniqueKeyName, uuid, uuid as pgUuid, varchar, } from 'drizzle-orm/pg-core'; import { Expect } from '~/utils'; export const usersTable = pgTable('users', { id: serial('id' as string).primaryKey(), name: text('name').notNull(), verified: boolean('verified').notNull().default(false), jsonb: jsonb('jsonb').$type(), createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(), }); const usersOnUpdate = pgTable('users_on_update', { id: serial('id').primaryKey(), name: text('name').notNull(), updateCounter: integer('update_counter').default(sql`1`).$onUpdateFn(() => sql`update_counter + 1`), updatedAt: timestamp('updated_at', { mode: 'date', precision: 3 }).$onUpdate(() => new Date()), alwaysNull: text('always_null').$type().$onUpdate(() => null), }); const citiesTable = pgTable('cities', { id: serial('id').primaryKey(), name: text('name').notNull(), state: char('state', { length: 2 }), }); const cities2Table = pgTable('cities', { id: serial('id').primaryKey(), name: text('name').notNull(), }); const users2Table = pgTable('users2', { id: serial('id').primaryKey(), name: text('name').notNull(), cityId: integer('city_id').references(() => citiesTable.id), }); const coursesTable = pgTable('courses', { id: serial('id').primaryKey(), name: text('name').notNull(), categoryId: integer('category_id').references(() => courseCategoriesTable.id), }); const courseCategoriesTable = pgTable('course_categories', { id: serial('id').primaryKey(), name: text('name').notNull(), }); const orders = pgTable('orders', { id: serial('id').primaryKey(), region: text('region').notNull(), product: text('product').notNull().$default(() => 'random_string'), amount: integer('amount').notNull(), quantity: integer('quantity').notNull(), }); const network = pgTable('network_table', { inet: inet('inet').notNull(), cidr: cidr('cidr').notNull(), macaddr: macaddr('macaddr').notNull(), macaddr8: macaddr8('macaddr8').notNull(), }); const salEmp = pgTable('sal_emp', { name: text('name'), payByQuarter: integer('pay_by_quarter').array(), schedule: text('schedule').array().array(), }); const _tictactoe = pgTable('tictactoe', { squares: integer('squares').array(3).array(3), }); export const usersMigratorTable = pgTable('users12', { id: serial('id').primaryKey(), name: text('name').notNull(), email: text('email').notNull(), }); // To test aggregate functions const aggregateTable = pgTable('aggregate_table', { id: serial('id').notNull(), name: text('name').notNull(), a: integer('a'), b: integer('b'), c: integer('c'), nullOnly: integer('null_only'), }); // To test another schema and multischema export const mySchema = pgSchema('mySchema'); export const usersMySchemaTable = mySchema.table('users', { id: serial('id').primaryKey(), name: text('name').notNull(), verified: boolean('verified').notNull().default(false), jsonb: jsonb('jsonb').$type(), createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(), }); const citiesMySchemaTable = mySchema.table('cities', { id: serial('id').primaryKey(), name: text('name').notNull(), state: char('state', { length: 2 }), }); const users2MySchemaTable = mySchema.table('users2', { id: serial('id').primaryKey(), name: text('name').notNull(), cityId: integer('city_id').references(() => citiesTable.id), }); const jsonTestTable = pgTable('jsontest', { id: serial('id').primaryKey(), json: json('json').$type<{ string: string; number: number }>(), jsonb: jsonb('jsonb').$type<{ string: string; number: number }>(), }); const en = pgEnum('en', ['enVal1', 'enVal2']); const allTypesTable = pgTable('all_types', { serial: serial('serial'), bigserial53: bigserial('bigserial53', { mode: 'number', }), bigserial64: bigserial('bigserial64', { mode: 'bigint', }), int: integer('int'), bigint53: bigint('bigint53', { mode: 'number', }), bigint64: bigint('bigint64', { mode: 'bigint', }), bool: boolean('bool'), char: char('char'), cidr: cidr('cidr'), date: date('date', { mode: 'date', }), dateStr: date('date_str', { mode: 'string', }), double: doublePrecision('double'), enum: en('enum'), inet: inet('inet'), interval: interval('interval'), json: json('json'), jsonb: jsonb('jsonb'), line: line('line', { mode: 'abc', }), lineTuple: line('line_tuple', { mode: 'tuple', }), macaddr: macaddr('macaddr'), macaddr8: macaddr8('macaddr8'), numeric: numeric('numeric'), numericNum: numeric('numeric_num', { mode: 'number', }), numericBig: numeric('numeric_big', { mode: 'bigint', }), point: point('point', { mode: 'xy', }), pointTuple: point('point_tuple', { mode: 'tuple', }), real: real('real'), smallint: smallint('smallint'), smallserial: smallserial('smallserial'), text: text('text'), time: time('time'), timestamp: timestamp('timestamp', { mode: 'date', }), timestampTz: timestamp('timestamp_tz', { mode: 'date', withTimezone: true, }), timestampStr: timestamp('timestamp_str', { mode: 'string', }), timestampTzStr: timestamp('timestamp_tz_str', { mode: 'string', withTimezone: true, }), uuid: uuid('uuid'), varchar: varchar('varchar'), arrint: integer('arrint').array(), arrbigint53: bigint('arrbigint53', { mode: 'number', }).array(), arrbigint64: bigint('arrbigint64', { mode: 'bigint', }).array(), arrbool: boolean('arrbool').array(), arrchar: char('arrchar').array(), arrcidr: cidr('arrcidr').array(), arrdate: date('arrdate', { mode: 'date', }).array(), arrdateStr: date('arrdate_str', { mode: 'string', }).array(), arrdouble: doublePrecision('arrdouble').array(), arrenum: en('arrenum').array(), arrinet: inet('arrinet').array(), arrinterval: interval('arrinterval').array(), arrjson: json('arrjson').array(), arrjsonb: jsonb('arrjsonb').array(), arrline: line('arrline', { mode: 'abc', }).array(), arrlineTuple: line('arrline_tuple', { mode: 'tuple', }).array(), arrmacaddr: macaddr('arrmacaddr').array(), arrmacaddr8: macaddr8('arrmacaddr8').array(), arrnumeric: numeric('arrnumeric').array(), arrnumericNum: numeric('arrnumeric_num', { mode: 'number', }).array(), arrnumericBig: numeric('arrnumeric_big', { mode: 'bigint', }).array(), arrpoint: point('arrpoint', { mode: 'xy', }).array(), arrpointTuple: point('arrpoint_tuple', { mode: 'tuple', }).array(), arrreal: real('arrreal').array(), arrsmallint: smallint('arrsmallint').array(), arrtext: text('arrtext').array(), arrtime: time('arrtime').array(), arrtimestamp: timestamp('arrtimestamp', { mode: 'date', }).array(), arrtimestampTz: timestamp('arrtimestamp_tz', { mode: 'date', withTimezone: true, }).array(), arrtimestampStr: timestamp('arrtimestamp_str', { mode: 'string', }).array(), arrtimestampTzStr: timestamp('arrtimestamp_tz_str', { mode: 'string', withTimezone: true, }).array(), arruuid: uuid('arruuid').array(), arrvarchar: varchar('arrvarchar').array(), }); let pgContainer: Docker.Container | undefined; afterAll(async () => { await pgContainer?.stop().catch(console.error); }); let db: BunSQLDatabase; let client: BunSQL; beforeAll(async () => { const connectionString = process.env['PG_CONNECTION_STRING']; client = await retry(async () => { // @ts-expect-error const connClient = new BunSQL(connectionString, { max: 1 }); await connClient.unsafe(`select 1`); return connClient; }, { retries: 20, factor: 1, minTimeout: 250, maxTimeout: 250, randomize: false, onRetry() { client?.end(); }, }); db = drizzle(client, { logger: false }); }); afterAll(async () => { await client?.end(); }); beforeEach(async () => { await db.execute(sql`drop schema if exists public cascade`); await db.execute(sql`drop schema if exists ${mySchema} cascade`); await db.execute(sql`create schema public`); await db.execute(sql`create schema if not exists custom_migrations`); await db.execute(sql`create schema ${mySchema}`); // public users await db.execute( sql` create table users ( id serial primary key, name text not null, verified boolean not null default false, jsonb jsonb, created_at timestamptz not null default now() ) `, ); // public cities await db.execute( sql` create table cities ( id serial primary key, name text not null, state char(2) ) `, ); // public users2 await db.execute( sql` create table users2 ( id serial primary key, name text not null, city_id integer references cities(id) ) `, ); await db.execute( sql` create table course_categories ( id serial primary key, name text not null ) `, ); await db.execute( sql` create table courses ( id serial primary key, name text not null, category_id integer references course_categories(id) ) `, ); await db.execute( sql` create table orders ( id serial primary key, region text not null, product text not null, amount integer not null, quantity integer not null ) `, ); await db.execute( sql` create table network_table ( inet inet not null, cidr cidr not null, macaddr macaddr not null, macaddr8 macaddr8 not null ) `, ); await db.execute( sql` create table sal_emp ( name text not null, pay_by_quarter integer[] not null, schedule text[][] not null ) `, ); await db.execute( sql` create table tictactoe ( squares integer[3][3] not null ) `, ); // // mySchema users await db.execute( sql` create table ${usersMySchemaTable} ( id serial primary key, name text not null, verified boolean not null default false, jsonb jsonb, created_at timestamptz not null default now() ) `, ); // mySchema cities await db.execute( sql` create table ${citiesMySchemaTable} ( id serial primary key, name text not null, state char(2) ) `, ); // mySchema users2 await db.execute( sql` create table ${users2MySchemaTable} ( id serial primary key, name text not null, city_id integer references "mySchema".cities(id) ) `, ); await db.execute( sql` create table jsontest ( id serial primary key, json json, jsonb jsonb ) `, ); }); afterEach(async () => { await db.execute(sql`drop schema if exists custom_migrations cascade`); }); async function setupSetOperationTest(db: PgDatabase) { await db.execute(sql`drop table if exists users2`); await db.execute(sql`drop table if exists cities`); await db.execute( sql` create table cities ( id serial primary key, name text not null ) `, ); await db.execute( sql` create table users2 ( id serial primary key, name text not null, city_id integer references cities(id) ) `, ); await db.insert(cities2Table).values([ { id: 1, name: 'New York' }, { id: 2, name: 'London' }, { id: 3, name: 'Tampa' }, ]); await db.insert(users2Table).values([ { id: 1, name: 'John', cityId: 1 }, { id: 2, name: 'Jane', cityId: 2 }, { id: 3, name: 'Jack', cityId: 3 }, { id: 4, name: 'Peter', cityId: 3 }, { id: 5, name: 'Ben', cityId: 2 }, { id: 6, name: 'Jill', cityId: 1 }, { id: 7, name: 'Mary', cityId: 2 }, { id: 8, name: 'Sally', cityId: 1 }, ]); } async function setupAggregateFunctionsTest(db: PgDatabase) { await db.execute(sql`drop table if exists "aggregate_table"`); await db.execute( sql` create table "aggregate_table" ( "id" serial not null, "name" text not null, "a" integer, "b" integer, "c" integer, "null_only" integer ); `, ); await db.insert(aggregateTable).values([ { name: 'value 1', a: 5, b: 10, c: 20 }, { name: 'value 1', a: 5, b: 20, c: 30 }, { name: 'value 2', a: 10, b: 50, c: 60 }, { name: 'value 3', a: 20, b: 20, c: null }, { name: 'value 4', a: null, b: 90, c: 120 }, { name: 'value 5', a: 80, b: 10, c: null }, { name: 'value 6', a: null, b: null, c: 150 }, ]); } test('table configs: unique third param', async () => { const cities1Table = pgTable('cities1', { id: serial('id').primaryKey(), name: text('name').notNull(), state: char('state', { length: 2 }), }, (t) => ({ f: unique('custom_name').on(t.name, t.state).nullsNotDistinct(), f1: unique('custom_name1').on(t.name, t.state), })); const tableConfig = getTableConfig(cities1Table); expect(tableConfig.uniqueConstraints).toHaveLength(2); expect(tableConfig.uniqueConstraints[0]?.name).toBe('custom_name'); expect(tableConfig.uniqueConstraints[0]?.nullsNotDistinct).toBe(true); expect(tableConfig.uniqueConstraints[0]?.columns.map((t) => t.name)).toEqual(['name', 'state']); expect(tableConfig.uniqueConstraints[1]?.name).toBe('custom_name1'); expect(tableConfig.uniqueConstraints[1]?.nullsNotDistinct).toBe(false); expect(tableConfig.uniqueConstraints[1]?.columns.map((t) => t.name)).toEqual(['name', 'state']); }); test('table configs: unique in column', async () => { const cities1Table = pgTable('cities1', { id: serial('id').primaryKey(), name: text('name').notNull().unique(), state: char('state', { length: 2 }).unique('custom'), field: char('field', { length: 2 }).unique('custom_field', { nulls: 'not distinct' }), }); const tableConfig = getTableConfig(cities1Table); const columnName = tableConfig.columns.find((it) => it.name === 'name'); expect(columnName?.uniqueName).toBe(uniqueKeyName(cities1Table, [columnName!.name])); expect(columnName?.isUnique).toBe(true); const columnState = tableConfig.columns.find((it) => it.name === 'state'); expect(columnState?.uniqueName).toBe('custom'); expect(columnState?.isUnique).toBe(true); const columnField = tableConfig.columns.find((it) => it.name === 'field'); expect(columnField?.uniqueName).toBe('custom_field'); expect(columnField?.isUnique).toBe(true); expect(columnField?.uniqueType).toBe('not distinct'); }); test('table config: foreign keys name', async () => { const table = pgTable('cities', { id: serial('id').primaryKey(), name: text('name').notNull(), state: text('state'), }, (t) => ({ f: foreignKey({ foreignColumns: [t.id], columns: [t.id], name: 'custom_fk' }), })); const tableConfig = getTableConfig(table); expect(tableConfig.foreignKeys).toHaveLength(1); expect(tableConfig.foreignKeys[0]!.getName()).toBe('custom_fk'); }); test('table config: primary keys name', async () => { const table = pgTable('cities', { id: serial('id').primaryKey(), name: text('name').notNull(), state: text('state'), }, (t) => ({ f: primaryKey({ columns: [t.id, t.name], name: 'custom_pk' }), })); const tableConfig = getTableConfig(table); expect(tableConfig.primaryKeys).toHaveLength(1); expect(tableConfig.primaryKeys[0]!.getName()).toBe('custom_pk'); }); test('select all fields', async () => { const now = Date.now(); await db.insert(usersTable).values({ name: 'John' }); const result = await db.select().from(usersTable); expect(result[0]!.createdAt).toBeInstanceOf(Date); expect(Math.abs(result[0]!.createdAt.getTime() - now)).toBeLessThan(100); expect(result).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); }); test('select sql', async () => { await db.insert(usersTable).values({ name: 'John' }); const users = await db .select({ name: sql`upper(${usersTable.name})`, }) .from(usersTable); expect(users).toEqual([{ name: 'JOHN' }]); }); test('select typed sql', async () => { await db.insert(usersTable).values({ name: 'John' }); const users = await db.select({ name: sql`upper(${usersTable.name})`, }).from(usersTable); expect(users).toEqual([{ name: 'JOHN' }]); }); test('select with empty array in inArray', async () => { await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); const result = await db .select({ name: sql`upper(${usersTable.name})`, }) .from(usersTable) .where(inArray(usersTable.id, [])); expect(result).toEqual([]); }); test('select with empty array in notInArray', async () => { await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); const result = await db .select({ name: sql`upper(${usersTable.name})`, }) .from(usersTable) .where(notInArray(usersTable.id, [])); expect(result).toEqual([{ name: 'JOHN' }, { name: 'JANE' }, { name: 'JANE' }]); }); test('$default function', async () => { const insertedOrder = await db.insert(orders).values({ id: 1, region: 'Ukraine', amount: 1, quantity: 1 }) .returning(); const selectedOrder = await db.select().from(orders); expect(insertedOrder).toEqual([{ id: 1, amount: 1, quantity: 1, region: 'Ukraine', product: 'random_string', }]); expect(selectedOrder).toEqual([{ id: 1, amount: 1, quantity: 1, region: 'Ukraine', product: 'random_string', }]); }); test('select distinct', async () => { const usersDistinctTable = pgTable('users_distinct', { id: integer('id').notNull(), name: text('name').notNull(), age: integer('age').notNull(), }); await db.execute(sql`drop table if exists ${usersDistinctTable}`); await db.execute(sql`create table ${usersDistinctTable} (id integer, name text, age integer)`); await db.insert(usersDistinctTable).values([ { id: 1, name: 'John', age: 24 }, { id: 1, name: 'John', age: 24 }, { id: 2, name: 'John', age: 25 }, { id: 1, name: 'Jane', age: 24 }, { id: 1, name: 'Jane', age: 26 }, ]); const users1 = await db.selectDistinct().from(usersDistinctTable).orderBy( usersDistinctTable.id, usersDistinctTable.name, ); const users2 = await db.selectDistinctOn([usersDistinctTable.id]).from(usersDistinctTable).orderBy( usersDistinctTable.id, ); const users3 = await db.selectDistinctOn([usersDistinctTable.name], { name: usersDistinctTable.name }).from( usersDistinctTable, ).orderBy(usersDistinctTable.name); const users4 = await db.selectDistinctOn([usersDistinctTable.id, usersDistinctTable.age]).from( usersDistinctTable, ).orderBy(usersDistinctTable.id, usersDistinctTable.age); await db.execute(sql`drop table ${usersDistinctTable}`); expect(users1).toEqual([ { id: 1, name: 'Jane', age: 24 }, { id: 1, name: 'Jane', age: 26 }, { id: 1, name: 'John', age: 24 }, { id: 2, name: 'John', age: 25 }, ]); expect(users2).toHaveLength(2); expect(users2[0]?.id).toBe(1); expect(users2[1]?.id).toBe(2); expect(users3).toHaveLength(2); expect(users3[0]?.name).toBe('Jane'); expect(users3[1]?.name).toBe('John'); expect(users4).toEqual([ { id: 1, name: 'John', age: 24 }, { id: 1, name: 'Jane', age: 26 }, { id: 2, name: 'John', age: 25 }, ]); }); test('insert returning sql', async () => { const users = await db .insert(usersTable) .values({ name: 'John' }) .returning({ name: sql`upper(${usersTable.name})`, }); expect(users).toEqual([{ name: 'JOHN' }]); }); test('delete returning sql', async () => { await db.insert(usersTable).values({ name: 'John' }); const users = await db .delete(usersTable) .where(eq(usersTable.name, 'John')) .returning({ name: sql`upper(${usersTable.name})`, }); expect(users).toEqual([{ name: 'JOHN' }]); }); test('update returning sql', async () => { await db.insert(usersTable).values({ name: 'John' }); const users = await db .update(usersTable) .set({ name: 'Jane' }) .where(eq(usersTable.name, 'John')) .returning({ name: sql`upper(${usersTable.name})`, }); expect(users).toEqual([{ name: 'JANE' }]); }); test('update with returning all fields', async () => { const now = Date.now(); await db.insert(usersTable).values({ name: 'John' }); const users = await db .update(usersTable) .set({ name: 'Jane' }) .where(eq(usersTable.name, 'John')) .returning(); expect(users[0]!.createdAt).toBeInstanceOf(Date); expect(Math.abs(users[0]!.createdAt.getTime() - now)).toBeLessThan(100); expect(users).toEqual([ { id: 1, name: 'Jane', verified: false, jsonb: null, createdAt: users[0]!.createdAt }, ]); }); test('update with returning partial', async () => { await db.insert(usersTable).values({ name: 'John' }); const users = await db .update(usersTable) .set({ name: 'Jane' }) .where(eq(usersTable.name, 'John')) .returning({ id: usersTable.id, name: usersTable.name, }); expect(users).toEqual([{ id: 1, name: 'Jane' }]); }); test('delete with returning all fields', async () => { const now = Date.now(); await db.insert(usersTable).values({ name: 'John' }); const users = await db.delete(usersTable).where(eq(usersTable.name, 'John')).returning(); expect(users[0]!.createdAt).toBeInstanceOf(Date); expect(Math.abs(users[0]!.createdAt.getTime() - now)).toBeLessThan(100); expect(users).toEqual([ { id: 1, name: 'John', verified: false, jsonb: null, createdAt: users[0]!.createdAt }, ]); }); test('delete with returning partial', async () => { await db.insert(usersTable).values({ name: 'John' }); const users = await db.delete(usersTable).where(eq(usersTable.name, 'John')).returning({ id: usersTable.id, name: usersTable.name, }); expect(users).toEqual([{ id: 1, name: 'John' }]); }); test('insert + select', async () => { await db.insert(usersTable).values({ name: 'John' }); const result = await db.select().from(usersTable); expect(result).toEqual([ { id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }, ]); await db.insert(usersTable).values({ name: 'Jane' }); const result2 = await db.select().from(usersTable); expect(result2).toEqual([ { id: 1, name: 'John', verified: false, jsonb: null, createdAt: result2[0]!.createdAt }, { id: 2, name: 'Jane', verified: false, jsonb: null, createdAt: result2[1]!.createdAt }, ]); }); test('json insert', async () => { await db.insert(usersTable).values({ name: 'John', jsonb: ['foo', 'bar'] }); const result = await db .select({ id: usersTable.id, name: usersTable.name, jsonb: usersTable.jsonb, }) .from(usersTable); expect(result).toEqual([{ id: 1, name: 'John', jsonb: ['foo', 'bar'] }]); }); test('char insert', async () => { await db.insert(citiesTable).values({ name: 'Austin', state: 'TX' }); const result = await db .select({ id: citiesTable.id, name: citiesTable.name, state: citiesTable.state }) .from(citiesTable); expect(result).toEqual([{ id: 1, name: 'Austin', state: 'TX' }]); }); test('char update', async () => { await db.insert(citiesTable).values({ name: 'Austin', state: 'TX' }); await db.update(citiesTable).set({ name: 'Atlanta', state: 'GA' }).where(eq(citiesTable.id, 1)); const result = await db .select({ id: citiesTable.id, name: citiesTable.name, state: citiesTable.state }) .from(citiesTable); expect(result).toEqual([{ id: 1, name: 'Atlanta', state: 'GA' }]); }); test('char delete', async () => { await db.insert(citiesTable).values({ name: 'Austin', state: 'TX' }); await db.delete(citiesTable).where(eq(citiesTable.state, 'TX')); const result = await db .select({ id: citiesTable.id, name: citiesTable.name, state: citiesTable.state }) .from(citiesTable); expect(result).toEqual([]); }); test('insert with overridden default values', async () => { await db.insert(usersTable).values({ name: 'John', verified: true }); const result = await db.select().from(usersTable); expect(result).toEqual([ { id: 1, name: 'John', verified: true, jsonb: null, createdAt: result[0]!.createdAt }, ]); }); test('insert many', async () => { await db .insert(usersTable) .values([ { name: 'John' }, { name: 'Bruce', jsonb: ['foo', 'bar'] }, { name: 'Jane' }, { name: 'Austin', verified: true }, ]); const result = await db .select({ id: usersTable.id, name: usersTable.name, jsonb: usersTable.jsonb, verified: usersTable.verified, }) .from(usersTable); expect(result).toEqual([ { id: 1, name: 'John', jsonb: null, verified: false }, { id: 2, name: 'Bruce', jsonb: ['foo', 'bar'], verified: false }, { id: 3, name: 'Jane', jsonb: null, verified: false }, { id: 4, name: 'Austin', jsonb: null, verified: true }, ]); }); test('insert many with returning', async () => { const result = await db .insert(usersTable) .values([ { name: 'John' }, { name: 'Bruce', jsonb: ['foo', 'bar'] }, { name: 'Jane' }, { name: 'Austin', verified: true }, ]) .returning({ id: usersTable.id, name: usersTable.name, jsonb: usersTable.jsonb, verified: usersTable.verified, }); expect(result).toEqual([ { id: 1, name: 'John', jsonb: null, verified: false }, { id: 2, name: 'Bruce', jsonb: ['foo', 'bar'], verified: false }, { id: 3, name: 'Jane', jsonb: null, verified: false }, { id: 4, name: 'Austin', jsonb: null, verified: true }, ]); }); test('select with group by as field', async () => { await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); const result = await db .select({ name: usersTable.name }) .from(usersTable) .groupBy(usersTable.name); expect(result).toEqual([{ name: 'Jane' }, { name: 'John' }]); }); test('select with exists', async () => { await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); const user = alias(usersTable, 'user'); const result = await db.select({ name: usersTable.name }).from(usersTable).where( exists( db.select({ one: sql`1` }).from(user).where(and(eq(usersTable.name, 'John'), eq(user.id, usersTable.id))), ), ); expect(result).toEqual([{ name: 'John' }]); }); test('select with group by as sql', async () => { await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); const result = await db .select({ name: usersTable.name }) .from(usersTable) .groupBy(sql`${usersTable.name}`); expect(result).toEqual([{ name: 'Jane' }, { name: 'John' }]); }); test('select with group by as sql + column', async () => { await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); const result = await db .select({ name: usersTable.name }) .from(usersTable) .groupBy(sql`${usersTable.name}`, usersTable.id); expect(result).toEqual([{ name: 'Jane' }, { name: 'Jane' }, { name: 'John' }]); }); test('select with group by as column + sql', async () => { await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); const result = await db .select({ name: usersTable.name }) .from(usersTable) .groupBy(usersTable.id, sql`${usersTable.name}`); expect(result).toEqual([{ name: 'Jane' }, { name: 'Jane' }, { name: 'John' }]); }); test('select with group by complex query', async () => { await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); const result = await db .select({ name: usersTable.name }) .from(usersTable) .groupBy(usersTable.id, sql`${usersTable.name}`) .orderBy(asc(usersTable.name)) .limit(1); expect(result).toEqual([{ name: 'Jane' }]); }); test('build query', async () => { const query = db .select({ id: usersTable.id, name: usersTable.name }) .from(usersTable) .groupBy(usersTable.id, usersTable.name) .toSQL(); expect(query).toEqual({ sql: 'select "id", "name" from "users" group by "users"."id", "users"."name"', params: [], }); }); test('insert sql', async () => { await db.insert(usersTable).values({ name: sql`${'John'}` }); const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable); expect(result).toEqual([{ id: 1, name: 'John' }]); }); test('partial join with alias', async () => { const customerAlias = alias(usersTable, 'customer'); await db.insert(usersTable).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); const result = await db .select({ user: { id: usersTable.id, name: usersTable.name, }, customer: { id: customerAlias.id, name: customerAlias.name, }, }) .from(usersTable) .leftJoin(customerAlias, eq(customerAlias.id, 11)) .where(eq(usersTable.id, 10)); expect(result).toEqual([ { user: { id: 10, name: 'Ivan' }, customer: { id: 11, name: 'Hans' }, }, ]); }); test('full join with alias', async () => { const pgTable = pgTableCreator((name) => `prefixed_${name}`); const users = pgTable('users', { id: serial('id').primaryKey(), name: text('name').notNull(), }); await db.execute(sql`drop table if exists ${users}`); await db.execute(sql`create table ${users} (id serial primary key, name text not null)`); const customers = alias(users, 'customer'); await db.insert(users).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); const result = await db .select() .from(users) .leftJoin(customers, eq(customers.id, 11)) .where(eq(users.id, 10)); expect(result).toEqual([{ users: { id: 10, name: 'Ivan', }, customer: { id: 11, name: 'Hans', }, }]); await db.execute(sql`drop table ${users}`); }); test('select from alias', async () => { const pgTable = pgTableCreator((name) => `prefixed_${name}`); const users = pgTable('users', { id: serial('id').primaryKey(), name: text('name').notNull(), }); await db.execute(sql`drop table if exists ${users}`); await db.execute(sql`create table ${users} (id serial primary key, name text not null)`); const user = alias(users, 'user'); const customers = alias(users, 'customer'); await db.insert(users).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); const result = await db .select() .from(user) .leftJoin(customers, eq(customers.id, 11)) .where(eq(user.id, 10)); expect(result).toEqual([{ user: { id: 10, name: 'Ivan', }, customer: { id: 11, name: 'Hans', }, }]); await db.execute(sql`drop table ${users}`); }); test('insert with spaces', async () => { await db.insert(usersTable).values({ name: sql`'Jo h n'` }); const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable); expect(result).toEqual([{ id: 1, name: 'Jo h n' }]); }); test('prepared statement', async () => { await db.insert(usersTable).values({ name: 'John' }); const statement = db .select({ id: usersTable.id, name: usersTable.name, }) .from(usersTable) .prepare('statement1'); const result = await statement.execute(); expect(result).toEqual([{ id: 1, name: 'John' }]); }); test('insert: placeholders on columns with encoder', async () => { const statement = db.insert(usersTable).values({ name: 'John', jsonb: sql.placeholder('jsonb'), }).prepare('encoder_statement'); await statement.execute({ jsonb: ['foo', 'bar'] }); const result = await db .select({ id: usersTable.id, jsonb: usersTable.jsonb, }) .from(usersTable); expect(result).toEqual([ { id: 1, jsonb: ['foo', 'bar'] }, ]); }); test('prepared statement reuse', async () => { const stmt = db .insert(usersTable) .values({ verified: true, name: sql.placeholder('name'), }) .prepare('stmt2'); for (let i = 0; i < 10; i++) { await stmt.execute({ name: `John ${i}` }); } const result = await db .select({ id: usersTable.id, name: usersTable.name, verified: usersTable.verified, }) .from(usersTable); expect(result).toEqual([ { id: 1, name: 'John 0', verified: true }, { id: 2, name: 'John 1', verified: true }, { id: 3, name: 'John 2', verified: true }, { id: 4, name: 'John 3', verified: true }, { id: 5, name: 'John 4', verified: true }, { id: 6, name: 'John 5', verified: true }, { id: 7, name: 'John 6', verified: true }, { id: 8, name: 'John 7', verified: true }, { id: 9, name: 'John 8', verified: true }, { id: 10, name: 'John 9', verified: true }, ]); }); test('prepared statement with placeholder in .where', async () => { await db.insert(usersTable).values({ name: 'John' }); const stmt = db .select({ id: usersTable.id, name: usersTable.name, }) .from(usersTable) .where(eq(usersTable.id, sql.placeholder('id'))) .prepare('stmt3'); const result = await stmt.execute({ id: 1 }); expect(result).toEqual([{ id: 1, name: 'John' }]); }); test('prepared statement with placeholder in .limit', async () => { await db.insert(usersTable).values({ name: 'John' }); const stmt = db .select({ id: usersTable.id, name: usersTable.name, }) .from(usersTable) .where(eq(usersTable.id, sql.placeholder('id'))) .limit(sql.placeholder('limit')) .prepare('stmt_limit'); const result = await stmt.execute({ id: 1, limit: 1 }); expect(result).toEqual([{ id: 1, name: 'John' }]); expect(result).toHaveLength(1); }); test('prepared statement with placeholder in .offset', async () => { await db.insert(usersTable).values([{ name: 'John' }, { name: 'John1' }]); const stmt = db .select({ id: usersTable.id, name: usersTable.name, }) .from(usersTable) .offset(sql.placeholder('offset')) .prepare('stmt_offset'); const result = await stmt.execute({ offset: 1 }); expect(result).toEqual([{ id: 2, name: 'John1' }]); }); test('prepared statement built using $dynamic', async () => { function withLimitOffset(qb: any) { return qb.limit(sql.placeholder('limit')).offset(sql.placeholder('offset')); } await db.insert(usersTable).values([{ name: 'John' }, { name: 'John1' }]); const stmt = db .select({ id: usersTable.id, name: usersTable.name, }) .from(usersTable) .$dynamic(); withLimitOffset(stmt).prepare('stmt_limit'); const result = await stmt.execute({ limit: 1, offset: 1 }); expect(result).toEqual([{ id: 2, name: 'John1' }]); expect(result).toHaveLength(1); }); // TODO change tests to new structure test('Query check: Insert all defaults in 1 row', async () => { const users = pgTable('users', { id: serial('id').primaryKey(), name: text('name').default('Dan'), state: text('state'), }); const query = db .insert(users) .values({}) .toSQL(); expect(query).toEqual({ sql: 'insert into "users" ("id", "name", "state") values (default, default, default)', params: [], }); }); test('Query check: Insert all defaults in multiple rows', async () => { const users = pgTable('users', { id: serial('id').primaryKey(), name: text('name').default('Dan'), state: text('state').default('UA'), }); const query = db .insert(users) .values([{}, {}]) .toSQL(); expect(query).toEqual({ sql: 'insert into "users" ("id", "name", "state") values (default, default, default), (default, default, default)', params: [], }); }); test('Insert all defaults in 1 row', async () => { const users = pgTable('empty_insert_single', { id: serial('id').primaryKey(), name: text('name').default('Dan'), state: text('state'), }); await db.execute(sql`drop table if exists ${users}`); await db.execute( sql`create table ${users} (id serial primary key, name text default 'Dan', state text)`, ); await db.insert(users).values({}); const res = await db.select().from(users); expect(res).toEqual([{ id: 1, name: 'Dan', state: null }]); }); test('Insert all defaults in multiple rows', async () => { const users = pgTable('empty_insert_multiple', { id: serial('id').primaryKey(), name: text('name').default('Dan'), state: text('state'), }); await db.execute(sql`drop table if exists ${users}`); await db.execute( sql`create table ${users} (id serial primary key, name text default 'Dan', state text)`, ); await db.insert(users).values([{}, {}]); const res = await db.select().from(users); expect(res).toEqual([{ id: 1, name: 'Dan', state: null }, { id: 2, name: 'Dan', state: null }]); }); test('build query insert with onConflict do update', async () => { const query = db .insert(usersTable) .values({ name: 'John', jsonb: ['foo', 'bar'] }) .onConflictDoUpdate({ target: usersTable.id, set: { name: 'John1' } }) .toSQL(); expect(query).toEqual({ sql: 'insert into "users" ("id", "name", "verified", "jsonb", "created_at") values (default, $1, default, $2, default) on conflict ("id") do update set "name" = $3', params: ['John', '["foo","bar"]', 'John1'], }); }); test('build query insert with onConflict do update / multiple columns', async () => { const query = db .insert(usersTable) .values({ name: 'John', jsonb: ['foo', 'bar'] }) .onConflictDoUpdate({ target: [usersTable.id, usersTable.name], set: { name: 'John1' } }) .toSQL(); expect(query).toEqual({ sql: 'insert into "users" ("id", "name", "verified", "jsonb", "created_at") values (default, $1, default, $2, default) on conflict ("id","name") do update set "name" = $3', params: ['John', '["foo","bar"]', 'John1'], }); }); test('build query insert with onConflict do nothing', async () => { const query = db .insert(usersTable) .values({ name: 'John', jsonb: ['foo', 'bar'] }) .onConflictDoNothing() .toSQL(); expect(query).toEqual({ sql: 'insert into "users" ("id", "name", "verified", "jsonb", "created_at") values (default, $1, default, $2, default) on conflict do nothing', params: ['John', '["foo","bar"]'], }); }); test('build query insert with onConflict do nothing + target', async () => { const query = db .insert(usersTable) .values({ name: 'John', jsonb: ['foo', 'bar'] }) .onConflictDoNothing({ target: usersTable.id }) .toSQL(); expect(query).toEqual({ sql: 'insert into "users" ("id", "name", "verified", "jsonb", "created_at") values (default, $1, default, $2, default) on conflict ("id") do nothing', params: ['John', '["foo","bar"]'], }); }); test('insert with onConflict do update', async () => { await db.insert(usersTable).values({ name: 'John' }); await db .insert(usersTable) .values({ id: 1, name: 'John' }) .onConflictDoUpdate({ target: usersTable.id, set: { name: 'John1' } }); const res = await db .select({ id: usersTable.id, name: usersTable.name }) .from(usersTable) .where(eq(usersTable.id, 1)); expect(res).toEqual([{ id: 1, name: 'John1' }]); }); test('insert with onConflict do nothing', async () => { await db.insert(usersTable).values({ name: 'John' }); await db.insert(usersTable).values({ id: 1, name: 'John' }).onConflictDoNothing(); const res = await db .select({ id: usersTable.id, name: usersTable.name }) .from(usersTable) .where(eq(usersTable.id, 1)); expect(res).toEqual([{ id: 1, name: 'John' }]); }); test('insert with onConflict do nothing + target', async () => { await db.insert(usersTable).values({ name: 'John' }); await db .insert(usersTable) .values({ id: 1, name: 'John' }) .onConflictDoNothing({ target: usersTable.id }); const res = await db .select({ id: usersTable.id, name: usersTable.name }) .from(usersTable) .where(eq(usersTable.id, 1)); expect(res).toEqual([{ id: 1, name: 'John' }]); }); test('left join (flat object fields)', async () => { const { id: cityId } = await db .insert(citiesTable) .values([{ name: 'Paris' }, { name: 'London' }]) .returning({ id: citiesTable.id }) .then((rows) => rows[0]!); await db.insert(users2Table).values([{ name: 'John', cityId }, { name: 'Jane' }]); const res = await db .select({ userId: users2Table.id, userName: users2Table.name, cityId: citiesTable.id, cityName: citiesTable.name, }) .from(users2Table) .leftJoin(citiesTable, eq(users2Table.cityId, citiesTable.id)); expect(res).toEqual([ { userId: 1, userName: 'John', cityId, cityName: 'Paris' }, { userId: 2, userName: 'Jane', cityId: null, cityName: null }, ]); }); test('left join (grouped fields)', async () => { const { id: cityId } = await db .insert(citiesTable) .values([{ name: 'Paris' }, { name: 'London' }]) .returning({ id: citiesTable.id }) .then((rows) => rows[0]!); await db.insert(users2Table).values([{ name: 'John', cityId }, { name: 'Jane' }]); const res = await db .select({ id: users2Table.id, user: { name: users2Table.name, nameUpper: sql`upper(${users2Table.name})`, }, city: { id: citiesTable.id, name: citiesTable.name, nameUpper: sql`upper(${citiesTable.name})`, }, }) .from(users2Table) .leftJoin(citiesTable, eq(users2Table.cityId, citiesTable.id)); expect(res).toEqual([ { id: 1, user: { name: 'John', nameUpper: 'JOHN' }, city: { id: cityId, name: 'Paris', nameUpper: 'PARIS' }, }, { id: 2, user: { name: 'Jane', nameUpper: 'JANE' }, city: null, }, ]); }); test('left join (all fields)', async () => { const { id: cityId } = await db .insert(citiesTable) .values([{ name: 'Paris' }, { name: 'London' }]) .returning({ id: citiesTable.id }) .then((rows) => rows[0]!); await db.insert(users2Table).values([{ name: 'John', cityId }, { name: 'Jane' }]); const res = await db .select() .from(users2Table) .leftJoin(citiesTable, eq(users2Table.cityId, citiesTable.id)); expect(res).toEqual([ { users2: { id: 1, name: 'John', cityId, }, cities: { id: cityId, name: 'Paris', state: null, }, }, { users2: { id: 2, name: 'Jane', cityId: null, }, cities: null, }, ]); }); test('join subquery', async () => { await db .insert(courseCategoriesTable) .values([ { name: 'Category 1' }, { name: 'Category 2' }, { name: 'Category 3' }, { name: 'Category 4' }, ]); await db .insert(coursesTable) .values([ { name: 'Development', categoryId: 2 }, { name: 'IT & Software', categoryId: 3 }, { name: 'Marketing', categoryId: 4 }, { name: 'Design', categoryId: 1 }, ]); const sq2 = db .select({ categoryId: courseCategoriesTable.id, category: courseCategoriesTable.name, total: sql`count(${courseCategoriesTable.id})`, }) .from(courseCategoriesTable) .groupBy(courseCategoriesTable.id, courseCategoriesTable.name) .as('sq2'); const res = await db .select({ courseName: coursesTable.name, categoryId: sq2.categoryId, }) .from(coursesTable) .leftJoin(sq2, eq(coursesTable.categoryId, sq2.categoryId)) .orderBy(coursesTable.name); expect(res).toEqual([ { courseName: 'Design', categoryId: 1 }, { courseName: 'Development', categoryId: 2 }, { courseName: 'IT & Software', categoryId: 3 }, { courseName: 'Marketing', categoryId: 4 }, ]); }); test('with ... select', async () => { await db.insert(orders).values([ { region: 'Europe', product: 'A', amount: 10, quantity: 1 }, { region: 'Europe', product: 'A', amount: 20, quantity: 2 }, { region: 'Europe', product: 'B', amount: 20, quantity: 2 }, { region: 'Europe', product: 'B', amount: 30, quantity: 3 }, { region: 'US', product: 'A', amount: 30, quantity: 3 }, { region: 'US', product: 'A', amount: 40, quantity: 4 }, { region: 'US', product: 'B', amount: 40, quantity: 4 }, { region: 'US', product: 'B', amount: 50, quantity: 5 }, ]); const regionalSales = db .$with('regional_sales') .as( db .select({ region: orders.region, totalSales: sql`sum(${orders.amount})`.as('total_sales'), }) .from(orders) .groupBy(orders.region), ); const topRegions = db .$with('top_regions') .as( db .select({ region: regionalSales.region, }) .from(regionalSales) .where( gt( regionalSales.totalSales, db.select({ sales: sql`sum(${regionalSales.totalSales})/10` }).from(regionalSales), ), ), ); const result1 = await db .with(regionalSales, topRegions) .select({ region: orders.region, product: orders.product, productUnits: sql`sum(${orders.quantity})::int`, productSales: sql`sum(${orders.amount})::int`, }) .from(orders) .where(inArray(orders.region, db.select({ region: topRegions.region }).from(topRegions))) .groupBy(orders.region, orders.product) .orderBy(orders.region, orders.product); const result2 = await db .with(regionalSales, topRegions) .selectDistinct({ region: orders.region, product: orders.product, productUnits: sql`sum(${orders.quantity})::int`, productSales: sql`sum(${orders.amount})::int`, }) .from(orders) .where(inArray(orders.region, db.select({ region: topRegions.region }).from(topRegions))) .groupBy(orders.region, orders.product) .orderBy(orders.region, orders.product); const result3 = await db .with(regionalSales, topRegions) .selectDistinctOn([orders.region], { region: orders.region, productUnits: sql`sum(${orders.quantity})::int`, productSales: sql`sum(${orders.amount})::int`, }) .from(orders) .where(inArray(orders.region, db.select({ region: topRegions.region }).from(topRegions))) .groupBy(orders.region) .orderBy(orders.region); expect(result1).toEqual([ { region: 'Europe', product: 'A', productUnits: 3, productSales: 30, }, { region: 'Europe', product: 'B', productUnits: 5, productSales: 50, }, { region: 'US', product: 'A', productUnits: 7, productSales: 70, }, { region: 'US', product: 'B', productUnits: 9, productSales: 90, }, ]); expect(result2).toEqual(result1); expect(result3).toEqual([ { region: 'Europe', productUnits: 8, productSales: 80, }, { region: 'US', productUnits: 16, productSales: 160, }, ]); }); test('with ... update', async () => { const products = pgTable('products', { id: serial('id').primaryKey(), price: numeric('price').notNull(), cheap: boolean('cheap').notNull().default(false), }); await db.execute(sql`drop table if exists ${products}`); await db.execute(sql` create table ${products} ( id serial primary key, price numeric not null, cheap boolean not null default false ) `); await db.insert(products).values([ { price: '10.99' }, { price: '25.85' }, { price: '32.99' }, { price: '2.50' }, { price: '4.59' }, ]); const averagePrice = db .$with('average_price') .as( db .select({ value: sql`avg(${products.price})`.as('value'), }) .from(products), ); const result = await db .with(averagePrice) .update(products) .set({ cheap: true, }) .where(lt(products.price, sql`(select * from ${averagePrice})`)) .returning({ id: products.id, }); expect(result).toEqual([ { id: 1 }, { id: 4 }, { id: 5 }, ]); }); test('with ... insert', async () => { const users = pgTable('users', { username: text('username').notNull(), admin: boolean('admin').notNull(), }); await db.execute(sql`drop table if exists ${users}`); await db.execute(sql`create table ${users} (username text not null, admin boolean not null default false)`); const userCount = db .$with('user_count') .as( db .select({ value: sql`count(*)`.as('value'), }) .from(users), ); const result = await db .with(userCount) .insert(users) .values([ { username: 'user1', admin: sql`((select * from ${userCount}) = 0)` }, ]) .returning({ admin: users.admin, }); expect(result).toEqual([{ admin: true }]); }); test('with ... delete', async () => { await db.insert(orders).values([ { region: 'Europe', product: 'A', amount: 10, quantity: 1 }, { region: 'Europe', product: 'A', amount: 20, quantity: 2 }, { region: 'Europe', product: 'B', amount: 20, quantity: 2 }, { region: 'Europe', product: 'B', amount: 30, quantity: 3 }, { region: 'US', product: 'A', amount: 30, quantity: 3 }, { region: 'US', product: 'A', amount: 40, quantity: 4 }, { region: 'US', product: 'B', amount: 40, quantity: 4 }, { region: 'US', product: 'B', amount: 50, quantity: 5 }, ]); const averageAmount = db .$with('average_amount') .as( db .select({ value: sql`avg(${orders.amount})`.as('value'), }) .from(orders), ); const result = await db .with(averageAmount) .delete(orders) .where(gt(orders.amount, sql`(select * from ${averageAmount})`)) .returning({ id: orders.id, }); expect(result).toEqual([ { id: 6 }, { id: 7 }, { id: 8 }, ]); }); test('select from subquery sql', async () => { await db.insert(users2Table).values([{ name: 'John' }, { name: 'Jane' }]); const sq = db .select({ name: sql`${users2Table.name} || ' modified'`.as('name') }) .from(users2Table) .as('sq'); const res = await db.select({ name: sq.name }).from(sq); expect(res).toEqual([{ name: 'John modified' }, { name: 'Jane modified' }]); }); test('select a field without joining its table', () => { expect(() => db.select({ name: users2Table.name }).from(usersTable).prepare('query')).toThrowError(); }); test('select all fields from subquery without alias', () => { const sq = db.$with('sq').as(db.select({ name: sql`upper(${users2Table.name})` }).from(users2Table)); expect(() => db.select().from(sq).prepare('query')).toThrowError(); }); test('select count()', async () => { await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }]); const res = await db.select({ count: sql`count(*)` }).from(usersTable); expect(res).toEqual([{ count: '2' }]); }); test('select count w/ custom mapper', async () => { function count(value: PgColumn | SQLWrapper): SQL; function count(value: PgColumn | SQLWrapper, alias: string): SQL.Aliased; function count(value: PgColumn | SQLWrapper, alias?: string): SQL | SQL.Aliased { const result = sql`count(${value})`.mapWith(Number); if (!alias) { return result; } return result.as(alias); } await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }]); const res = await db.select({ count: count(sql`*`) }).from(usersTable); expect(res).toEqual([{ count: 2 }]); }); test('network types', async () => { const value: typeof network.$inferSelect = { inet: '127.0.0.1', cidr: '192.168.100.128/25', macaddr: '08:00:2b:01:02:03', macaddr8: '08:00:2b:01:02:03:04:05', }; await db.insert(network).values(value); const res = await db.select().from(network); expect(res).toEqual([value]); }); test.skip('array types', async () => { const values: typeof salEmp.$inferSelect[] = [ { name: 'John', payByQuarter: [10000, 10000, 10000, 10000], schedule: [['meeting', 'lunch'], ['training', 'presentation']], }, { name: 'Carol', payByQuarter: [20000, 25000, 25000, 25000], schedule: [['breakfast', 'consulting'], ['meeting', 'lunch']], }, ]; await db.insert(salEmp).values(values); const res = await db.select().from(salEmp); expect(res).toEqual(values); }); test('select for ...', () => { { const query = db .select() .from(users2Table) .for('update') .toSQL(); expect(query.sql).toMatch(/ for update$/); } { const query = db .select() .from(users2Table) .for('update', { of: [users2Table, coursesTable] }) .toSQL(); expect(query.sql).toMatch(/ for update of "users2", "courses"$/); } { const query = db .select() .from(users2Table) .for('no key update', { of: users2Table }) .toSQL(); expect(query.sql).toMatch(/for no key update of "users2"$/); } { const query = db .select() .from(users2Table) .for('no key update', { of: users2Table, skipLocked: true }) .toSQL(); expect(query.sql).toMatch(/ for no key update of "users2" skip locked$/); } { const query = db .select() .from(users2Table) .for('share', { of: users2Table, noWait: true }) .toSQL(); expect(query.sql).toMatch(/for share of "users2" nowait$/); } }); test('having', async () => { await db.insert(citiesTable).values([{ name: 'London' }, { name: 'Paris' }, { name: 'New York' }]); await db.insert(users2Table).values([{ name: 'John', cityId: 1 }, { name: 'Jane', cityId: 1 }, { name: 'Jack', cityId: 2, }]); const result = await db .select({ id: citiesTable.id, name: sql`upper(${citiesTable.name})`.as('upper_name'), usersCount: sql`count(${users2Table.id})::int`.as('users_count'), }) .from(citiesTable) .leftJoin(users2Table, eq(users2Table.cityId, citiesTable.id)) .where(({ name }) => sql`length(${name}) >= 3`) .groupBy(citiesTable.id) .having(({ usersCount }) => sql`${usersCount} > 0`) .orderBy(({ name }) => name); expect(result).toEqual([ { id: 1, name: 'LONDON', usersCount: 2, }, { id: 2, name: 'PARIS', usersCount: 1, }, ]); }); test('view', async () => { const newYorkers1 = pgView('new_yorkers') .as((qb) => qb.select().from(users2Table).where(eq(users2Table.cityId, 1))); const newYorkers2 = pgView('new_yorkers', { id: serial('id').primaryKey(), name: text('name').notNull(), cityId: integer('city_id').notNull(), }).as(sql`select * from ${users2Table} where ${eq(users2Table.cityId, 1)}`); const newYorkers3 = pgView('new_yorkers', { id: serial('id').primaryKey(), name: text('name').notNull(), cityId: integer('city_id').notNull(), }).existing(); await db.execute(sql`create view ${newYorkers1} as ${getViewConfig(newYorkers1).query}`); await db.insert(citiesTable).values([{ name: 'New York' }, { name: 'Paris' }]); await db.insert(users2Table).values([ { name: 'John', cityId: 1 }, { name: 'Jane', cityId: 1 }, { name: 'Jack', cityId: 2 }, ]); { const result = await db.select().from(newYorkers1); expect(result).toEqual([ { id: 1, name: 'John', cityId: 1 }, { id: 2, name: 'Jane', cityId: 1 }, ]); } { const result = await db.select().from(newYorkers2); expect(result).toEqual([ { id: 1, name: 'John', cityId: 1 }, { id: 2, name: 'Jane', cityId: 1 }, ]); } { const result = await db.select().from(newYorkers3); expect(result).toEqual([ { id: 1, name: 'John', cityId: 1 }, { id: 2, name: 'Jane', cityId: 1 }, ]); } { const result = await db.select({ name: newYorkers1.name }).from(newYorkers1); expect(result).toEqual([ { name: 'John' }, { name: 'Jane' }, ]); } await db.execute(sql`drop view ${newYorkers1}`); }); // NEXT test('materialized view', async () => { const newYorkers1 = pgMaterializedView('new_yorkers') .as((qb) => qb.select().from(users2Table).where(eq(users2Table.cityId, 1))); const newYorkers2 = pgMaterializedView('new_yorkers', { id: serial('id').primaryKey(), name: text('name').notNull(), cityId: integer('city_id').notNull(), }).as(sql`select * from ${users2Table} where ${eq(users2Table.cityId, 1)}`); const newYorkers3 = pgMaterializedView('new_yorkers', { id: serial('id').primaryKey(), name: text('name').notNull(), cityId: integer('city_id').notNull(), }).existing(); await db.execute(sql`create materialized view ${newYorkers1} as ${getMaterializedViewConfig(newYorkers1).query}`); await db.insert(citiesTable).values([{ name: 'New York' }, { name: 'Paris' }]); await db.insert(users2Table).values([ { name: 'John', cityId: 1 }, { name: 'Jane', cityId: 1 }, { name: 'Jack', cityId: 2 }, ]); { const result = await db.select().from(newYorkers1); expect(result).toEqual([]); } await db.refreshMaterializedView(newYorkers1); { const result = await db.select().from(newYorkers1); expect(result).toEqual([ { id: 1, name: 'John', cityId: 1 }, { id: 2, name: 'Jane', cityId: 1 }, ]); } { const result = await db.select().from(newYorkers2); expect(result).toEqual([ { id: 1, name: 'John', cityId: 1 }, { id: 2, name: 'Jane', cityId: 1 }, ]); } { const result = await db.select().from(newYorkers3); expect(result).toEqual([ { id: 1, name: 'John', cityId: 1 }, { id: 2, name: 'Jane', cityId: 1 }, ]); } { const result = await db.select({ name: newYorkers1.name }).from(newYorkers1); expect(result).toEqual([ { name: 'John' }, { name: 'Jane' }, ]); } await db.execute(sql`drop materialized view ${newYorkers1}`); }); test('select from existing view', async () => { const schema = pgSchema('test_schema'); const newYorkers = schema.view('new_yorkers', { id: integer('id').notNull(), }).existing(); await db.execute(sql`drop schema if exists ${schema} cascade`); await db.execute(sql`create schema ${schema}`); await db.execute(sql`create view ${newYorkers} as select id from ${usersTable}`); await db.insert(usersTable).values({ id: 100, name: 'John' }); const result = await db.select({ id: usersTable.id, }).from(usersTable).innerJoin(newYorkers, eq(newYorkers.id, usersTable.id)); expect(result).toEqual([{ id: 100 }]); }); // TODO: copy to SQLite and MySQL, add to docs test('select from raw sql', async () => { const result = await db.select({ id: sql`id`, name: sql`name`, }).from(sql`(select 1 as id, 'John' as name) as users`); Expect>; expect(result).toEqual([ { id: 1, name: 'John' }, ]); }); test('select from raw sql with joins', async () => { const result = await db .select({ id: sql`users.id`, name: sql`users.name`, userCity: sql`users.city`, cityName: sql`cities.name`, }) .from(sql`(select 1 as id, 'John' as name, 'New York' as city) as users`) .leftJoin(sql`(select 1 as id, 'Paris' as name) as cities`, sql`cities.id = users.id`); Expect>; expect(result).toEqual([ { id: 1, name: 'John', userCity: 'New York', cityName: 'Paris' }, ]); }); test('join on aliased sql from select', async () => { const result = await db .select({ userId: sql`users.id`.as('userId'), name: sql`users.name`, userCity: sql`users.city`, cityId: sql`cities.id`.as('cityId'), cityName: sql`cities.name`, }) .from(sql`(select 1 as id, 'John' as name, 'New York' as city) as users`) .leftJoin(sql`(select 1 as id, 'Paris' as name) as cities`, (cols) => eq(cols.cityId, cols.userId)); Expect< Equal<{ userId: number; name: string; userCity: string; cityId: number; cityName: string }[], typeof result> >; expect(result).toEqual([ { userId: 1, name: 'John', userCity: 'New York', cityId: 1, cityName: 'Paris' }, ]); }); test('join on aliased sql from with clause', async () => { const users = db.$with('users').as( db.select({ id: sql`id`.as('userId'), name: sql`name`.as('userName'), city: sql`city`.as('city'), }).from( sql`(select 1 as id, 'John' as name, 'New York' as city) as users`, ), ); const cities = db.$with('cities').as( db.select({ id: sql`id`.as('cityId'), name: sql`name`.as('cityName'), }).from( sql`(select 1 as id, 'Paris' as name) as cities`, ), ); const result = await db .with(users, cities) .select({ userId: users.id, name: users.name, userCity: users.city, cityId: cities.id, cityName: cities.name, }) .from(users) .leftJoin(cities, (cols) => eq(cols.cityId, cols.userId)); Expect< Equal<{ userId: number; name: string; userCity: string; cityId: number; cityName: string }[], typeof result> >; expect(result).toEqual([ { userId: 1, name: 'John', userCity: 'New York', cityId: 1, cityName: 'Paris' }, ]); }); test('prefixed table', async () => { const pgTable = pgTableCreator((name) => `myprefix_${name}`); const users = pgTable('test_prefixed_table_with_unique_name', { id: integer('id').primaryKey(), name: text('name').notNull(), }); await db.execute(sql`drop table if exists ${users}`); await db.execute( sql`create table myprefix_test_prefixed_table_with_unique_name (id integer not null primary key, name text not null)`, ); await db.insert(users).values({ id: 1, name: 'John' }); const result = await db.select().from(users); expect(result).toEqual([{ id: 1, name: 'John' }]); await db.execute(sql`drop table ${users}`); }); test('select from enum', async () => { const muscleEnum = pgEnum('muscle', [ 'abdominals', 'hamstrings', 'adductors', 'quadriceps', 'biceps', 'shoulders', 'chest', 'middle_back', 'calves', 'glutes', 'lower_back', 'lats', 'triceps', 'traps', 'forearms', 'neck', 'abductors', ]); const forceEnum = pgEnum('force', ['isometric', 'isotonic', 'isokinetic']); const levelEnum = pgEnum('level', ['beginner', 'intermediate', 'advanced']); const mechanicEnum = pgEnum('mechanic', ['compound', 'isolation']); const equipmentEnum = pgEnum('equipment', [ 'barbell', 'dumbbell', 'bodyweight', 'machine', 'cable', 'kettlebell', ]); const categoryEnum = pgEnum('category', ['upper_body', 'lower_body', 'full_body']); const exercises = pgTable('exercises', { id: serial('id').primaryKey(), name: varchar('name').notNull(), force: forceEnum('force'), level: levelEnum('level'), mechanic: mechanicEnum('mechanic'), equipment: equipmentEnum('equipment'), instructions: text('instructions'), category: categoryEnum('category'), primaryMuscles: muscleEnum('primary_muscles').array(), secondaryMuscles: muscleEnum('secondary_muscles').array(), createdAt: timestamp('created_at').notNull().default(sql`now()`), updatedAt: timestamp('updated_at').notNull().default(sql`now()`), }); await db.execute(sql`drop table if exists ${exercises}`); await db.execute(sql`drop type if exists ${sql.identifier(muscleEnum.enumName)}`); await db.execute(sql`drop type if exists ${sql.identifier(forceEnum.enumName)}`); await db.execute(sql`drop type if exists ${sql.identifier(levelEnum.enumName)}`); await db.execute(sql`drop type if exists ${sql.identifier(mechanicEnum.enumName)}`); await db.execute(sql`drop type if exists ${sql.identifier(equipmentEnum.enumName)}`); await db.execute(sql`drop type if exists ${sql.identifier(categoryEnum.enumName)}`); await db.execute( sql`create type ${ sql.identifier(muscleEnum.enumName) } as enum ('abdominals', 'hamstrings', 'adductors', 'quadriceps', 'biceps', 'shoulders', 'chest', 'middle_back', 'calves', 'glutes', 'lower_back', 'lats', 'triceps', 'traps', 'forearms', 'neck', 'abductors')`, ); await db.execute( sql`create type ${sql.identifier(forceEnum.enumName)} as enum ('isometric', 'isotonic', 'isokinetic')`, ); await db.execute( sql`create type ${sql.identifier(levelEnum.enumName)} as enum ('beginner', 'intermediate', 'advanced')`, ); await db.execute(sql`create type ${sql.identifier(mechanicEnum.enumName)} as enum ('compound', 'isolation')`); await db.execute( sql`create type ${ sql.identifier(equipmentEnum.enumName) } as enum ('barbell', 'dumbbell', 'bodyweight', 'machine', 'cable', 'kettlebell')`, ); await db.execute( sql`create type ${sql.identifier(categoryEnum.enumName)} as enum ('upper_body', 'lower_body', 'full_body')`, ); await db.execute(sql` create table ${exercises} ( id serial primary key, name varchar not null, force force, level level, mechanic mechanic, equipment equipment, instructions text, category category, primary_muscles muscle[], secondary_muscles muscle[], created_at timestamp not null default now(), updated_at timestamp not null default now() ) `); await db.insert(exercises).values({ name: 'Bench Press', force: 'isotonic', level: 'beginner', mechanic: 'compound', equipment: 'barbell', instructions: 'Lie on your back on a flat bench. Grasp the barbell with an overhand grip, slightly wider than shoulder width. Unrack the barbell and hold it over you with your arms locked. Lower the barbell to your chest. Press the barbell back to the starting position.', category: 'upper_body', primaryMuscles: ['chest', 'triceps'], secondaryMuscles: ['shoulders', 'traps'], }); const result = await db.select().from(exercises); expect(result).toEqual([ { id: 1, name: 'Bench Press', force: 'isotonic', level: 'beginner', mechanic: 'compound', equipment: 'barbell', instructions: 'Lie on your back on a flat bench. Grasp the barbell with an overhand grip, slightly wider than shoulder width. Unrack the barbell and hold it over you with your arms locked. Lower the barbell to your chest. Press the barbell back to the starting position.', category: 'upper_body', primaryMuscles: ['chest', 'triceps'], secondaryMuscles: ['shoulders', 'traps'], createdAt: result[0]!.createdAt, updatedAt: result[0]!.updatedAt, }, ]); await db.execute(sql`drop table ${exercises}`); await db.execute(sql`drop type ${sql.identifier(muscleEnum.enumName)}`); await db.execute(sql`drop type ${sql.identifier(forceEnum.enumName)}`); await db.execute(sql`drop type ${sql.identifier(levelEnum.enumName)}`); await db.execute(sql`drop type ${sql.identifier(mechanicEnum.enumName)}`); await db.execute(sql`drop type ${sql.identifier(equipmentEnum.enumName)}`); await db.execute(sql`drop type ${sql.identifier(categoryEnum.enumName)}`); }); test.skip('all date and time columns', async () => { const table = pgTable('all_columns', { id: serial('id').primaryKey(), dateString: date('date_string', { mode: 'string' }).notNull(), time: time('time', { precision: 3 }).notNull(), datetime: timestamp('datetime').notNull(), datetimeWTZ: timestamp('datetime_wtz', { withTimezone: true }).notNull(), datetimeString: timestamp('datetime_string', { mode: 'string' }).notNull(), datetimeFullPrecision: timestamp('datetime_full_precision', { precision: 6, mode: 'string' }).notNull(), datetimeWTZString: timestamp('datetime_wtz_string', { withTimezone: true, mode: 'string' }).notNull(), interval: interval('interval').notNull(), }); await db.execute(sql`drop table if exists ${table}`); await db.execute(sql` create table ${table} ( id serial primary key, date_string date not null, time time(3) not null, datetime timestamp not null, datetime_wtz timestamp with time zone not null, datetime_string timestamp not null, datetime_full_precision timestamp(6) not null, datetime_wtz_string timestamp with time zone not null, interval interval not null ) `); const someDatetime = new Date('2022-01-01T00:00:00.123Z'); const fullPrecision = '2022-01-01T00:00:00.123456Z'; const someTime = '23:23:12.432'; await db.insert(table).values({ dateString: '2022-01-01', time: someTime, datetime: someDatetime, datetimeWTZ: someDatetime, datetimeString: '2022-01-01T00:00:00.123Z', datetimeFullPrecision: fullPrecision, datetimeWTZString: '2022-01-01T00:00:00.123Z', interval: '1 day', }); const result = await db.select().from(table); Expect< Equal<{ id: number; dateString: string; time: string; datetime: Date; datetimeWTZ: Date; datetimeString: string; datetimeFullPrecision: string; datetimeWTZString: string; interval: string; }[], typeof result> >; Expect< Equal<{ dateString: string; time: string; datetime: Date; datetimeWTZ: Date; datetimeString: string; datetimeFullPrecision: string; datetimeWTZString: string; interval: string; id?: number | undefined; }, typeof table.$inferInsert> >; expect(result).toEqual([ { id: 1, dateString: '2022-01-01', time: someTime, datetime: someDatetime, datetimeWTZ: someDatetime, datetimeString: '2022-01-01 00:00:00.123', datetimeFullPrecision: fullPrecision.replace('T', ' ').replace('Z', ''), datetimeWTZString: '2022-01-01 00:00:00.123+00', interval: '1 day', }, ]); await db.execute(sql`drop table if exists ${table}`); }); test('all date and time columns with timezone second case mode date', async () => { const table = pgTable('all_columns', { id: serial('id').primaryKey(), timestamp: timestamp('timestamp_string', { mode: 'date', withTimezone: true, precision: 3 }).notNull(), }); await db.execute(sql`drop table if exists ${table}`); await db.execute(sql` create table ${table} ( id serial primary key, timestamp_string timestamp(3) with time zone not null ) `); const insertedDate = new Date(); // 1. Insert date as new date await db.insert(table).values([ { timestamp: insertedDate }, ]); // 2, Select as date and check that timezones are the same // There is no way to check timezone in Date object, as it is always represented internally in UTC const result = await db.select().from(table); expect(result).toEqual([{ id: 1, timestamp: insertedDate }]); // 3. Compare both dates expect(insertedDate.getTime()).toBe(result[0]!.timestamp.getTime()); await db.execute(sql`drop table if exists ${table}`); }); test('all date and time columns with timezone third case mode date', async () => { const table = pgTable('all_columns', { id: serial('id').primaryKey(), timestamp: timestamp('timestamp_string', { mode: 'date', withTimezone: true, precision: 3 }).notNull(), }); await db.execute(sql`drop table if exists ${table}`); await db.execute(sql` create table ${table} ( id serial primary key, timestamp_string timestamp(3) with time zone not null ) `); const insertedDate = new Date('2022-01-01 20:00:00.123-04'); // used different time zones, internally is still UTC const insertedDate2 = new Date('2022-01-02 04:00:00.123+04'); // They are both the same date in different time zones // 1. Insert date as new dates with different time zones await db.insert(table).values([ { timestamp: insertedDate }, { timestamp: insertedDate2 }, ]); // 2, Select and compare both dates const result = await db.select().from(table); expect(result[0]?.timestamp.getTime()).toBe(result[1]!.timestamp.getTime()); await db.execute(sql`drop table if exists ${table}`); }); test('orderBy with aliased column', () => { const query = db.select({ test: sql`something`.as('test'), }).from(users2Table).orderBy((fields) => fields.test).toSQL(); expect(query.sql).toBe('select something as "test" from "users2" order by "test"'); }); test('timestamp timezone', async () => { const usersTableWithAndWithoutTimezone = pgTable('users_test_with_and_without_timezone', { id: serial('id').primaryKey(), name: text('name').notNull(), createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(), updatedAt: timestamp('updated_at', { withTimezone: false }).notNull().defaultNow(), }); await db.execute(sql`drop table if exists ${usersTableWithAndWithoutTimezone}`); await db.execute( sql` create table users_test_with_and_without_timezone ( id serial not null primary key, name text not null, created_at timestamptz not null default now(), updated_at timestamp not null default now() ) `, ); const date = new Date(Date.parse('2020-01-01T00:00:00+04:00')); await db.insert(usersTableWithAndWithoutTimezone).values({ name: 'With default times' }); await db.insert(usersTableWithAndWithoutTimezone).values({ name: 'Without default times', createdAt: date, updatedAt: date, }); const users = await db.select().from(usersTableWithAndWithoutTimezone); // check that the timestamps are set correctly for default times expect(Math.abs(users[0]!.updatedAt.getTime() - Date.now())).toBeLessThan(2000); expect(Math.abs(users[0]!.createdAt.getTime() - Date.now())).toBeLessThan(2000); // check that the timestamps are set correctly for non default times expect(Math.abs(users[1]!.updatedAt.getTime() - date.getTime())).toBeLessThan(2000); expect(Math.abs(users[1]!.createdAt.getTime() - date.getTime())).toBeLessThan(2000); }); test('transaction', async () => { const users = pgTable('users_transactions', { id: serial('id').primaryKey(), balance: integer('balance').notNull(), }); const products = pgTable('products_transactions', { id: serial('id').primaryKey(), price: integer('price').notNull(), stock: integer('stock').notNull(), }); await db.execute(sql`drop table if exists ${users}`); await db.execute(sql`drop table if exists ${products}`); await db.execute(sql`create table users_transactions (id serial not null primary key, balance integer not null)`); await db.execute( sql`create table products_transactions (id serial not null primary key, price integer not null, stock integer not null)`, ); const user = await db.insert(users).values({ balance: 100 }).returning().then((rows) => rows[0]!); const product = await db.insert(products).values({ price: 10, stock: 10 }).returning().then((rows) => rows[0]!); await db.transaction(async (tx) => { await tx.update(users).set({ balance: user.balance - product.price }).where(eq(users.id, user.id)); await tx.update(products).set({ stock: product.stock - 1 }).where(eq(products.id, product.id)); }); const result = await db.select().from(users); expect(result).toEqual([{ id: 1, balance: 90 }]); await db.execute(sql`drop table ${users}`); await db.execute(sql`drop table ${products}`); }); test('transaction rollback', async () => { const users = pgTable('users_transactions_rollback', { id: serial('id').primaryKey(), balance: integer('balance').notNull(), }); await db.execute(sql`drop table if exists ${users}`); await db.execute( sql`create table users_transactions_rollback (id serial not null primary key, balance integer not null)`, ); await expect((async () => { await db.transaction(async (tx) => { await tx.insert(users).values({ balance: 100 }); tx.rollback(); }); })()).rejects.toThrowError(TransactionRollbackError); const result = await db.select().from(users); expect(result).toEqual([]); await db.execute(sql`drop table ${users}`); }); test('nested transaction', async () => { const users = pgTable('users_nested_transactions', { id: serial('id').primaryKey(), balance: integer('balance').notNull(), }); await db.execute(sql`drop table if exists ${users}`); await db.execute( sql`create table users_nested_transactions (id serial not null primary key, balance integer not null)`, ); await db.transaction(async (tx) => { await tx.insert(users).values({ balance: 100 }); await tx.transaction(async (tx) => { await tx.update(users).set({ balance: 200 }); }); }); const result = await db.select().from(users); expect(result).toEqual([{ id: 1, balance: 200 }]); await db.execute(sql`drop table ${users}`); }); test('nested transaction rollback', async () => { const users = pgTable('users_nested_transactions_rollback', { id: serial('id').primaryKey(), balance: integer('balance').notNull(), }); await db.execute(sql`drop table if exists ${users}`); await db.execute( sql`create table users_nested_transactions_rollback (id serial not null primary key, balance integer not null)`, ); await db.transaction(async (tx) => { await tx.insert(users).values({ balance: 100 }); await expect((async () => { await tx.transaction(async (tx) => { await tx.update(users).set({ balance: 200 }); tx.rollback(); }); })()).rejects.toThrowError(TransactionRollbackError); }); const result = await db.select().from(users); expect(result).toEqual([{ id: 1, balance: 100 }]); await db.execute(sql`drop table ${users}`); }); test('join subquery with join', async () => { const internalStaff = pgTable('internal_staff', { userId: integer('user_id').notNull(), }); const customUser = pgTable('custom_user', { id: integer('id').notNull(), }); const ticket = pgTable('ticket', { staffId: integer('staff_id').notNull(), }); await db.execute(sql`drop table if exists ${internalStaff}`); await db.execute(sql`drop table if exists ${customUser}`); await db.execute(sql`drop table if exists ${ticket}`); await db.execute(sql`create table internal_staff (user_id integer not null)`); await db.execute(sql`create table custom_user (id integer not null)`); await db.execute(sql`create table ticket (staff_id integer not null)`); await db.insert(internalStaff).values({ userId: 1 }); await db.insert(customUser).values({ id: 1 }); await db.insert(ticket).values({ staffId: 1 }); const subq = db .select() .from(internalStaff) .leftJoin(customUser, eq(internalStaff.userId, customUser.id)) .as('internal_staff'); const mainQuery = await db .select() .from(ticket) .leftJoin(subq, eq(subq.internal_staff.userId, ticket.staffId)); expect(mainQuery).toEqual([{ ticket: { staffId: 1 }, internal_staff: { internal_staff: { userId: 1 }, custom_user: { id: 1 }, }, }]); await db.execute(sql`drop table ${internalStaff}`); await db.execute(sql`drop table ${customUser}`); await db.execute(sql`drop table ${ticket}`); }); test('subquery with view', async () => { const users = pgTable('users_subquery_view', { id: serial('id').primaryKey(), name: text('name').notNull(), cityId: integer('city_id').notNull(), }); const newYorkers = pgView('new_yorkers').as((qb) => qb.select().from(users).where(eq(users.cityId, 1))); await db.execute(sql`drop table if exists ${users}`); await db.execute(sql`drop view if exists ${newYorkers}`); await db.execute( sql`create table ${users} (id serial not null primary key, name text not null, city_id integer not null)`, ); await db.execute(sql`create view ${newYorkers} as select * from ${users} where city_id = 1`); await db.insert(users).values([ { name: 'John', cityId: 1 }, { name: 'Jane', cityId: 2 }, { name: 'Jack', cityId: 1 }, { name: 'Jill', cityId: 2 }, ]); const sq = db.$with('sq').as(db.select().from(newYorkers)); const result = await db.with(sq).select().from(sq); expect(result).toEqual([ { id: 1, name: 'John', cityId: 1 }, { id: 3, name: 'Jack', cityId: 1 }, ]); await db.execute(sql`drop view ${newYorkers}`); await db.execute(sql`drop table ${users}`); }); test('join view as subquery', async () => { const users = pgTable('users_join_view', { id: serial('id').primaryKey(), name: text('name').notNull(), cityId: integer('city_id').notNull(), }); const newYorkers = pgView('new_yorkers').as((qb) => qb.select().from(users).where(eq(users.cityId, 1))); await db.execute(sql`drop table if exists ${users}`); await db.execute(sql`drop view if exists ${newYorkers}`); await db.execute( sql`create table ${users} (id serial not null primary key, name text not null, city_id integer not null)`, ); await db.execute(sql`create view ${newYorkers} as select * from ${users} where city_id = 1`); await db.insert(users).values([ { name: 'John', cityId: 1 }, { name: 'Jane', cityId: 2 }, { name: 'Jack', cityId: 1 }, { name: 'Jill', cityId: 2 }, ]); const sq = db.select().from(newYorkers).as('new_yorkers_sq'); const result = await db.select().from(users).leftJoin(sq, eq(users.id, sq.id)); expect(result).toEqual([ { users_join_view: { id: 1, name: 'John', cityId: 1 }, new_yorkers_sq: { id: 1, name: 'John', cityId: 1 }, }, { users_join_view: { id: 2, name: 'Jane', cityId: 2 }, new_yorkers_sq: null, }, { users_join_view: { id: 3, name: 'Jack', cityId: 1 }, new_yorkers_sq: { id: 3, name: 'Jack', cityId: 1 }, }, { users_join_view: { id: 4, name: 'Jill', cityId: 2 }, new_yorkers_sq: null, }, ]); await db.execute(sql`drop view ${newYorkers}`); await db.execute(sql`drop table ${users}`); }); test('table selection with single table', async () => { const users = pgTable('users', { id: serial('id').primaryKey(), name: text('name').notNull(), cityId: integer('city_id').notNull(), }); await db.execute(sql`drop table if exists ${users}`); await db.execute( sql`create table ${users} (id serial not null primary key, name text not null, city_id integer not null)`, ); await db.insert(users).values({ name: 'John', cityId: 1 }); const result = await db.select({ users }).from(users); expect(result).toEqual([{ users: { id: 1, name: 'John', cityId: 1 } }]); await db.execute(sql`drop table ${users}`); }); test('set null to jsonb field', async () => { const users = pgTable('users', { id: serial('id').primaryKey(), jsonb: jsonb('jsonb'), }); await db.execute(sql`drop table if exists ${users}`); await db.execute( sql`create table ${users} (id serial not null primary key, jsonb jsonb)`, ); const result = await db.insert(users).values({ jsonb: null }).returning(); expect(result).toEqual([{ id: 1, jsonb: null }]); await db.execute(sql`drop table ${users}`); }); test.skip('insert undefined', async () => { const users = pgTable('users', { id: serial('id').primaryKey(), name: text('name'), }); await db.execute(sql`drop table if exists ${users}`); await db.execute( sql`create table ${users} (id serial not null primary key, name text)`, ); await expect((async () => { await db.insert(users).values({ name: undefined }); })()).resolves.not.toThrowError(); await db.execute(sql`drop table ${users}`); }); test('update undefined', async () => { const users = pgTable('users', { id: serial('id').primaryKey(), name: text('name'), }); await db.execute(sql`drop table if exists ${users}`); await db.execute( sql`create table ${users} (id serial not null primary key, name text)`, ); await expect((async () => { await db.update(users).set({ name: undefined }); })()).rejects.toThrowError(); await expect((async () => { db.update(users).set({ name: undefined }); })()).rejects.toThrowError(); await db.execute(sql`drop table ${users}`); }); test('array operators', async () => { const posts = pgTable('posts', { id: serial('id').primaryKey(), tags: text('tags').array(), }); await db.execute(sql`drop table if exists ${posts}`); await db.execute( sql`create table ${posts} (id serial primary key, tags text[])`, ); await db.insert(posts).values([{ tags: ['ORM'], }, { tags: ['Typescript'], }, { tags: ['Typescript', 'ORM'], }, { tags: ['Typescript', 'Frontend', 'React'], }, { tags: ['Typescript', 'ORM', 'Database', 'Postgres'], }, { tags: ['Java', 'Spring', 'OOP'], }]); const contains = await db.select({ id: posts.id }).from(posts) .where(arrayContains(posts.tags, ['Typescript', 'ORM'])); const contained = await db.select({ id: posts.id }).from(posts) .where(arrayContained(posts.tags, ['Typescript', 'ORM'])); const overlaps = await db.select({ id: posts.id }).from(posts) .where(arrayOverlaps(posts.tags, ['Typescript', 'ORM'])); const withSubQuery = await db.select({ id: posts.id }).from(posts) .where(arrayContains( posts.tags, db.select({ tags: posts.tags }).from(posts).where(eq(posts.id, 1)), )); expect(contains).toEqual([{ id: 3 }, { id: 5 }]); expect(contained).toEqual([{ id: 1 }, { id: 2 }, { id: 3 }]); expect(overlaps).toEqual([{ id: 1 }, { id: 2 }, { id: 3 }, { id: 4 }, { id: 5 }]); expect(withSubQuery).toEqual([{ id: 1 }, { id: 3 }, { id: 5 }]); }); test('set operations (union) from query builder with subquery', async () => { await setupSetOperationTest(db); const sq = db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).as('sq'); const result = await db .select({ id: cities2Table.id, name: citiesTable.name }) .from(cities2Table).union( db.select().from(sq), ).orderBy(asc(sql`name`)).limit(2).offset(1); expect(result).toHaveLength(2); expect(result).toEqual([ { id: 3, name: 'Jack' }, { id: 2, name: 'Jane' }, ]); await expect((async () => { db .select({ id: cities2Table.id, name: citiesTable.name, name2: users2Table.name }) .from(cities2Table).union( // @ts-expect-error db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table), ).orderBy(asc(sql`name`)); })()).rejects.toThrowError(); }); test('set operations (union) as function', async () => { await setupSetOperationTest(db); const result = await union( db .select({ id: cities2Table.id, name: citiesTable.name }) .from(cities2Table).where(eq(citiesTable.id, 1)), db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id, 1)), db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id, 1)), ).orderBy(asc(sql`name`)).limit(1).offset(1); expect(result).toHaveLength(1); expect(result).toEqual([ { id: 1, name: 'New York' }, ]); await expect((async () => { union( db .select({ name: citiesTable.name, id: cities2Table.id }) .from(cities2Table).where(eq(citiesTable.id, 1)), db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id, 1)), db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id, 1)), ).orderBy(asc(sql`name`)); })()).rejects.toThrowError(); }); test('set operations (union all) from query builder', async () => { await setupSetOperationTest(db); const result = await db .select({ id: cities2Table.id, name: citiesTable.name }) .from(cities2Table).limit(2).unionAll( db .select({ id: cities2Table.id, name: citiesTable.name }) .from(cities2Table).limit(2), ).orderBy(asc(sql`id`)); expect(result).toHaveLength(4); expect(result).toEqual([ { id: 1, name: 'New York' }, { id: 1, name: 'New York' }, { id: 2, name: 'London' }, { id: 2, name: 'London' }, ]); await expect((async () => { db .select({ id: cities2Table.id, name: citiesTable.name }) .from(cities2Table).limit(2).unionAll( db .select({ name: citiesTable.name, id: cities2Table.id }) .from(cities2Table).limit(2), ).orderBy(asc(sql`id`)); })()).rejects.toThrowError(); }); test('set operations (union all) as function', async () => { await setupSetOperationTest(db); const result = await unionAll( db .select({ id: cities2Table.id, name: citiesTable.name }) .from(cities2Table).where(eq(citiesTable.id, 1)), db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id, 1)), db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id, 1)), ); expect(result).toHaveLength(3); expect(result).toEqual([ { id: 1, name: 'New York' }, { id: 1, name: 'John' }, { id: 1, name: 'John' }, ]); await expect((async () => { unionAll( db .select({ id: cities2Table.id, name: citiesTable.name }) .from(cities2Table).where(eq(citiesTable.id, 1)), db .select({ name: users2Table.name, id: users2Table.id }) .from(users2Table).where(eq(users2Table.id, 1)), db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id, 1)), ); })()).rejects.toThrowError(); }); test('set operations (intersect) from query builder', async () => { await setupSetOperationTest(db); const result = await db .select({ id: cities2Table.id, name: citiesTable.name }) .from(cities2Table).intersect( db .select({ id: cities2Table.id, name: citiesTable.name }) .from(cities2Table).where(gt(citiesTable.id, 1)), ).orderBy(asc(sql`name`)); expect(result).toHaveLength(2); expect(result).toEqual([ { id: 2, name: 'London' }, { id: 3, name: 'Tampa' }, ]); await expect((async () => { db .select({ id: cities2Table.id, name: citiesTable.name }) .from(cities2Table).intersect( // @ts-expect-error db .select({ id: cities2Table.id, name: citiesTable.name, id2: cities2Table.id }) .from(cities2Table).where(gt(citiesTable.id, 1)), ).orderBy(asc(sql`name`)); })()).rejects.toThrowError(); }); test('set operations (intersect) as function', async () => { await setupSetOperationTest(db); const result = await intersect( db .select({ id: cities2Table.id, name: citiesTable.name }) .from(cities2Table).where(eq(citiesTable.id, 1)), db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id, 1)), db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id, 1)), ); expect(result).toHaveLength(0); expect(result).toEqual([]); await expect((async () => { intersect( db .select({ id: cities2Table.id, name: citiesTable.name }) .from(cities2Table).where(eq(citiesTable.id, 1)), db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id, 1)), db .select({ name: users2Table.name, id: users2Table.id }) .from(users2Table).where(eq(users2Table.id, 1)), ); })()).rejects.toThrowError(); }); test('set operations (intersect all) from query builder', async () => { await setupSetOperationTest(db); const result = await db .select({ id: cities2Table.id, name: citiesTable.name }) .from(cities2Table).limit(2).intersectAll( db .select({ id: cities2Table.id, name: citiesTable.name }) .from(cities2Table).limit(2), ).orderBy(asc(sql`id`)); expect(result).toHaveLength(2); expect(result).toEqual([ { id: 1, name: 'New York' }, { id: 2, name: 'London' }, ]); await expect((async () => { db .select({ id: cities2Table.id, name: citiesTable.name }) .from(cities2Table).limit(2).intersectAll( db .select({ name: users2Table.name, id: users2Table.id }) .from(cities2Table).limit(2), ).orderBy(asc(sql`id`)); })()).rejects.toThrowError(); }); test('set operations (intersect all) as function', async () => { await setupSetOperationTest(db); const result = await intersectAll( db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id, 1)), db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id, 1)), db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id, 1)), ); expect(result).toHaveLength(1); expect(result).toEqual([ { id: 1, name: 'John' }, ]); await expect((async () => { intersectAll( db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id, 1)), db .select({ name: users2Table.name, id: users2Table.id }) .from(users2Table).where(eq(users2Table.id, 1)), db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id, 1)), ); })()).rejects.toThrowError(); }); test('set operations (except) from query builder', async () => { await setupSetOperationTest(db); const result = await db .select() .from(cities2Table).except( db .select() .from(cities2Table).where(gt(citiesTable.id, 1)), ); expect(result).toHaveLength(1); expect(result).toEqual([ { id: 1, name: 'New York' }, ]); await expect((async () => { db .select() .from(cities2Table).except( db .select({ name: users2Table.name, id: users2Table.id }) .from(cities2Table).where(gt(citiesTable.id, 1)), ); })()).rejects.toThrowError(); }); test('set operations (except) as function', async () => { await setupSetOperationTest(db); const result = await except( db .select({ id: cities2Table.id, name: citiesTable.name }) .from(cities2Table), db .select({ id: cities2Table.id, name: citiesTable.name }) .from(cities2Table).where(eq(citiesTable.id, 1)), db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id, 1)), ).orderBy(asc(sql`id`)); expect(result).toHaveLength(2); expect(result).toEqual([ { id: 2, name: 'London' }, { id: 3, name: 'Tampa' }, ]); await expect((async () => { except( db .select({ id: cities2Table.id, name: citiesTable.name }) .from(cities2Table), db .select({ name: users2Table.name, id: users2Table.id }) .from(cities2Table).where(eq(citiesTable.id, 1)), db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id, 1)), ).orderBy(asc(sql`id`)); })()).rejects.toThrowError(); }); test('set operations (except all) from query builder', async () => { await setupSetOperationTest(db); const result = await db .select() .from(cities2Table).exceptAll( db .select({ id: cities2Table.id, name: citiesTable.name }) .from(cities2Table).where(eq(citiesTable.id, 1)), ).orderBy(asc(sql`id`)); expect(result).toHaveLength(2); expect(result).toEqual([ { id: 2, name: 'London' }, { id: 3, name: 'Tampa' }, ]); await expect((async () => { db .select({ name: cities2Table.name, id: cities2Table.id }) .from(cities2Table).exceptAll( db .select({ id: cities2Table.id, name: citiesTable.name }) .from(cities2Table).where(eq(citiesTable.id, 1)), ).orderBy(asc(sql`id`)); })()).rejects.toThrowError(); }); test('set operations (except all) as function', async () => { await setupSetOperationTest(db); const result = await exceptAll( db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table), db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(gt(users2Table.id, 7)), db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id, 1)), ).orderBy(asc(sql`id`)).limit(5).offset(2); expect(result).toHaveLength(4); expect(result).toEqual([ { id: 4, name: 'Peter' }, { id: 5, name: 'Ben' }, { id: 6, name: 'Jill' }, { id: 7, name: 'Mary' }, ]); await expect((async () => { exceptAll( db .select({ name: users2Table.name, id: users2Table.id }) .from(users2Table), db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(gt(users2Table.id, 7)), db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id, 1)), ).orderBy(asc(sql`id`)); })()).rejects.toThrowError(); }); test('set operations (mixed) from query builder with subquery', async () => { await setupSetOperationTest(db); const sq = db .select() .from(cities2Table).where(gt(citiesTable.id, 1)).as('sq'); const result = await db .select() .from(cities2Table).except( ({ unionAll }) => unionAll( db.select().from(sq), db.select().from(cities2Table).where(eq(citiesTable.id, 2)), ), ); expect(result).toHaveLength(1); expect(result).toEqual([ { id: 1, name: 'New York' }, ]); await expect((async () => { db .select() .from(cities2Table).except( ({ unionAll }) => unionAll( db .select({ name: cities2Table.name, id: cities2Table.id }) .from(cities2Table).where(gt(citiesTable.id, 1)), db.select().from(cities2Table).where(eq(citiesTable.id, 2)), ), ); })()).rejects.toThrowError(); }); test('set operations (mixed all) as function', async () => { await setupSetOperationTest(db); const result = await union( db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id, 1)), except( db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(gte(users2Table.id, 5)), db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id, 7)), ), db .select().from(cities2Table).where(gt(citiesTable.id, 1)), ).orderBy(asc(sql`id`)); expect(result).toHaveLength(6); expect(result).toEqual([ { id: 1, name: 'John' }, { id: 2, name: 'London' }, { id: 3, name: 'Tampa' }, { id: 5, name: 'Ben' }, { id: 6, name: 'Jill' }, { id: 8, name: 'Sally' }, ]); await expect((async () => { union( db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id, 1)), except( db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(gte(users2Table.id, 5)), db .select({ name: users2Table.name, id: users2Table.id }) .from(users2Table).where(eq(users2Table.id, 7)), ), db .select().from(cities2Table).where(gt(citiesTable.id, 1)), ).orderBy(asc(sql`id`)); })()).rejects.toThrowError(); }); test('aggregate function: count', async () => { const table = aggregateTable; await setupAggregateFunctionsTest(db); const result1 = await db.select({ value: count() }).from(table); const result2 = await db.select({ value: count(table.a) }).from(table); const result3 = await db.select({ value: countDistinct(table.name) }).from(table); expect(result1[0]?.value).toBe(7); expect(result2[0]?.value).toBe(5); expect(result3[0]?.value).toBe(6); }); test('aggregate function: avg', async () => { const table = aggregateTable; await setupAggregateFunctionsTest(db); const result1 = await db.select({ value: avg(table.b) }).from(table); const result2 = await db.select({ value: avg(table.nullOnly) }).from(table); const result3 = await db.select({ value: avgDistinct(table.b) }).from(table); expect(result1[0]?.value).toBe('33.3333333333333333'); expect(result2[0]?.value).toBeNull(); expect(result3[0]?.value).toBe('42.5000000000000000'); }); test('aggregate function: sum', async () => { const table = aggregateTable; await setupAggregateFunctionsTest(db); const result1 = await db.select({ value: sum(table.b) }).from(table); const result2 = await db.select({ value: sum(table.nullOnly) }).from(table); const result3 = await db.select({ value: sumDistinct(table.b) }).from(table); expect(result1[0]?.value).toBe('200'); expect(result2[0]?.value).toBeNull(); expect(result3[0]?.value).toBe('170'); }); test('aggregate function: max', async () => { const table = aggregateTable; await setupAggregateFunctionsTest(db); const result1 = await db.select({ value: max(table.b) }).from(table); const result2 = await db.select({ value: max(table.nullOnly) }).from(table); expect(result1[0]?.value).toBe(90); expect(result2[0]?.value).toBeNull(); }); test('aggregate function: min', async () => { const table = aggregateTable; await setupAggregateFunctionsTest(db); const result1 = await db.select({ value: min(table.b) }).from(table); const result2 = await db.select({ value: min(table.nullOnly) }).from(table); expect(result1[0]?.value).toBe(10); expect(result2[0]?.value).toBeNull(); }); test.skip('array mapping and parsing', async () => { const arrays = pgTable('arrays_tests', { id: serial('id').primaryKey(), tags: text('tags').array(), nested: text('nested').array().array(), numbers: integer('numbers').notNull().array(), }); await db.execute(sql`drop table if exists ${arrays}`); await db.execute(sql` create table ${arrays} ( id serial primary key, tags text[], nested text[][], numbers integer[] ) `); await db.insert(arrays).values({ tags: ['', 'b', 'c'], nested: [['1', ''], ['3', '\\a']], numbers: [1, 2, 3], }); const result = await db.select().from(arrays); expect(result).toEqual([{ id: 1, tags: ['', 'b', 'c'], nested: [['1', ''], ['3', '\\a']], numbers: [1, 2, 3], }]); await db.execute(sql`drop table ${arrays}`); }); test('test $onUpdateFn and $onUpdate works as $default', async () => { await db.execute(sql`drop table if exists ${usersOnUpdate}`); await db.execute( sql` create table ${usersOnUpdate} ( id serial primary key, name text not null, update_counter integer default 1 not null, updated_at timestamp(3), always_null text ) `, ); await db.insert(usersOnUpdate).values([ { name: 'John' }, { name: 'Jane' }, { name: 'Jack' }, { name: 'Jill' }, ]); const { updatedAt, ...rest } = getTableColumns(usersOnUpdate); await db.select({ updatedAt }).from(usersOnUpdate).orderBy(asc(usersOnUpdate.id)); const response = await db.select({ ...rest }).from(usersOnUpdate).orderBy(asc(usersOnUpdate.id)); expect(response).toEqual([ { name: 'John', id: 1, updateCounter: 1, alwaysNull: null }, { name: 'Jane', id: 2, updateCounter: 1, alwaysNull: null }, { name: 'Jack', id: 3, updateCounter: 1, alwaysNull: null }, { name: 'Jill', id: 4, updateCounter: 1, alwaysNull: null }, ]); }); test('test $onUpdateFn and $onUpdate works updating', async () => { await db.execute(sql`drop table if exists ${usersOnUpdate}`); await db.execute( sql` create table ${usersOnUpdate} ( id serial primary key, name text not null, update_counter integer default 1, updated_at timestamp(3), always_null text ) `, ); await db.insert(usersOnUpdate).values([ { name: 'John', alwaysNull: 'this will be null after updating' }, { name: 'Jane' }, { name: 'Jack' }, { name: 'Jill' }, ]); const { updatedAt, ...rest } = getTableColumns(usersOnUpdate); await db.select({ updatedAt }).from(usersOnUpdate).orderBy(asc(usersOnUpdate.id)); await db.update(usersOnUpdate).set({ name: 'Angel' }).where(eq(usersOnUpdate.id, 1)); await db.update(usersOnUpdate).set({ updateCounter: null }).where(eq(usersOnUpdate.id, 2)); const justDates = await db.select({ updatedAt }).from(usersOnUpdate).orderBy(asc(usersOnUpdate.id)); const response = await db.select({ ...rest }).from(usersOnUpdate).orderBy(asc(usersOnUpdate.id)); expect(response).toEqual([ { name: 'Angel', id: 1, updateCounter: 2, alwaysNull: null }, { name: 'Jane', id: 2, updateCounter: null, alwaysNull: null }, { name: 'Jack', id: 3, updateCounter: 1, alwaysNull: null }, { name: 'Jill', id: 4, updateCounter: 1, alwaysNull: null }, ]); const msDelay = 15000; // expect(initial[0]?.updatedAt?.valueOf()).not.toBe(justDates[0]?.updatedAt?.valueOf()); for (const eachUser of justDates) { expect(eachUser.updatedAt!.valueOf()).toBeGreaterThan(Date.now() - msDelay); } }); test('test if method with sql operators', async () => { const users = pgTable('users', { id: serial('id').primaryKey(), name: text('name').notNull(), age: integer('age').notNull(), city: text('city').notNull(), }); await db.execute(sql`drop table if exists ${users}`); await db.execute(sql` create table ${users} ( id serial primary key, name text not null, age integer not null, city text not null ) `); await db.insert(users).values([ { id: 1, name: 'John', age: 20, city: 'New York' }, { id: 2, name: 'Alice', age: 21, city: 'New York' }, { id: 3, name: 'Nick', age: 22, city: 'London' }, { id: 4, name: 'Lina', age: 23, city: 'London' }, ]); const condition1 = true; const [result1] = await db.select().from(users).where(eq(users.id, 1).if(condition1)); expect(result1).toEqual({ id: 1, name: 'John', age: 20, city: 'New York' }); const condition2 = 1; const [result2] = await db.select().from(users).where(sql`${users.id} = 1`.if(condition2)); expect(result2).toEqual({ id: 1, name: 'John', age: 20, city: 'New York' }); const condition3 = 'non-empty string'; const result3 = await db.select().from(users).where( or(eq(users.id, 1).if(condition3), eq(users.id, 2).if(condition3)), ); expect(result3).toEqual([{ id: 1, name: 'John', age: 20, city: 'New York' }, { id: 2, name: 'Alice', age: 21, city: 'New York', }]); const condtition4 = false; const result4 = await db.select().from(users).where(eq(users.id, 1).if(condtition4)); expect(result4).toEqual([ { id: 1, name: 'John', age: 20, city: 'New York' }, { id: 2, name: 'Alice', age: 21, city: 'New York' }, { id: 3, name: 'Nick', age: 22, city: 'London' }, { id: 4, name: 'Lina', age: 23, city: 'London' }, ]); const condition5 = undefined; const result5 = await db.select().from(users).where(sql`${users.id} = 1`.if(condition5)); expect(result5).toEqual([ { id: 1, name: 'John', age: 20, city: 'New York' }, { id: 2, name: 'Alice', age: 21, city: 'New York' }, { id: 3, name: 'Nick', age: 22, city: 'London' }, { id: 4, name: 'Lina', age: 23, city: 'London' }, ]); const condition6 = null; const result6 = await db.select().from(users).where( or(eq(users.id, 1).if(condition6), eq(users.id, 2).if(condition6)), ); expect(result6).toEqual([ { id: 1, name: 'John', age: 20, city: 'New York' }, { id: 2, name: 'Alice', age: 21, city: 'New York' }, { id: 3, name: 'Nick', age: 22, city: 'London' }, { id: 4, name: 'Lina', age: 23, city: 'London' }, ]); const condition7 = { term1: 0, term2: 1, }; const result7 = await db.select().from(users).where( and(gt(users.age, 20).if(condition7.term1), eq(users.city, 'New York').if(condition7.term2)), ); expect(result7).toEqual([ { id: 1, name: 'John', age: 20, city: 'New York' }, { id: 2, name: 'Alice', age: 21, city: 'New York' }, ]); const condition8 = { term1: '', term2: 'non-empty string', }; const result8 = await db.select().from(users).where( or(lt(users.age, 21).if(condition8.term1), eq(users.city, 'London').if(condition8.term2)), ); expect(result8).toEqual([ { id: 3, name: 'Nick', age: 22, city: 'London' }, { id: 4, name: 'Lina', age: 23, city: 'London' }, ]); const condition9 = { term1: 1, term2: true, }; const result9 = await db.select().from(users).where( and( inArray(users.city, ['New York', 'London']).if(condition9.term1), ilike(users.name, 'a%').if(condition9.term2), ), ); expect(result9).toEqual([ { id: 2, name: 'Alice', age: 21, city: 'New York' }, ]); const condition10 = { term1: 4, term2: 19, }; const result10 = await db.select().from(users).where( and( sql`length(${users.name}) <= ${condition10.term1}`.if(condition10.term1), gt(users.age, condition10.term2).if(condition10.term2 > 20), ), ); expect(result10).toEqual([ { id: 1, name: 'John', age: 20, city: 'New York' }, { id: 3, name: 'Nick', age: 22, city: 'London' }, { id: 4, name: 'Lina', age: 23, city: 'London' }, ]); const condition11 = true; const result11 = await db.select().from(users).where( or(eq(users.city, 'New York'), gte(users.age, 22))!.if(condition11), ); expect(result11).toEqual([ { id: 1, name: 'John', age: 20, city: 'New York' }, { id: 2, name: 'Alice', age: 21, city: 'New York' }, { id: 3, name: 'Nick', age: 22, city: 'London' }, { id: 4, name: 'Lina', age: 23, city: 'London' }, ]); const condition12 = false; const result12 = await db.select().from(users).where( and(eq(users.city, 'London'), gte(users.age, 23))!.if(condition12), ); expect(result12).toEqual([ { id: 1, name: 'John', age: 20, city: 'New York' }, { id: 2, name: 'Alice', age: 21, city: 'New York' }, { id: 3, name: 'Nick', age: 22, city: 'London' }, { id: 4, name: 'Lina', age: 23, city: 'London' }, ]); const condition13 = true; const result13 = await db.select().from(users).where(sql`(city = 'New York' or age >= 22)`.if(condition13)); expect(result13).toEqual([ { id: 1, name: 'John', age: 20, city: 'New York' }, { id: 2, name: 'Alice', age: 21, city: 'New York' }, { id: 3, name: 'Nick', age: 22, city: 'London' }, { id: 4, name: 'Lina', age: 23, city: 'London' }, ]); const condition14 = false; const result14 = await db.select().from(users).where(sql`(city = 'London' and age >= 23)`.if(condition14)); expect(result14).toEqual([ { id: 1, name: 'John', age: 20, city: 'New York' }, { id: 2, name: 'Alice', age: 21, city: 'New York' }, { id: 3, name: 'Nick', age: 22, city: 'London' }, { id: 4, name: 'Lina', age: 23, city: 'London' }, ]); await db.execute(sql`drop table ${users}`); }); // MySchema tests test('mySchema :: select all fields', async () => { const now = Date.now(); await db.insert(usersMySchemaTable).values({ name: 'John' }); const result = await db.select().from(usersMySchemaTable); expect(result[0]!.createdAt).toBeInstanceOf(Date); expect(Math.abs(result[0]!.createdAt.getTime() - now)).toBeLessThan(100); expect(result).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); }); test('mySchema :: select sql', async () => { await db.insert(usersMySchemaTable).values({ name: 'John' }); const users = await db.select({ name: sql`upper(${usersMySchemaTable.name})`, }).from(usersMySchemaTable); expect(users).toEqual([{ name: 'JOHN' }]); }); test('mySchema :: select typed sql', async () => { await db.insert(usersMySchemaTable).values({ name: 'John' }); const users = await db.select({ name: sql`upper(${usersMySchemaTable.name})`, }).from(usersMySchemaTable); expect(users).toEqual([{ name: 'JOHN' }]); }); test('mySchema :: select distinct', async () => { const usersDistinctTable = pgTable('users_distinct', { id: integer('id').notNull(), name: text('name').notNull(), }); await db.execute(sql`drop table if exists ${usersDistinctTable}`); await db.execute(sql`create table ${usersDistinctTable} (id integer, name text)`); await db.insert(usersDistinctTable).values([ { id: 1, name: 'John' }, { id: 1, name: 'John' }, { id: 2, name: 'John' }, { id: 1, name: 'Jane' }, ]); const users1 = await db.selectDistinct().from(usersDistinctTable).orderBy( usersDistinctTable.id, usersDistinctTable.name, ); const users2 = await db.selectDistinctOn([usersDistinctTable.id]).from(usersDistinctTable).orderBy( usersDistinctTable.id, ); const users3 = await db.selectDistinctOn([usersDistinctTable.name], { name: usersDistinctTable.name }).from( usersDistinctTable, ).orderBy(usersDistinctTable.name); await db.execute(sql`drop table ${usersDistinctTable}`); expect(users1).toEqual([{ id: 1, name: 'Jane' }, { id: 1, name: 'John' }, { id: 2, name: 'John' }]); expect(users2).toHaveLength(2); expect(users2[0]?.id).toBe(1); expect(users2[1]?.id).toBe(2); expect(users3).toHaveLength(2); expect(users3[0]?.name).toBe('Jane'); expect(users3[1]?.name).toBe('John'); }); test('mySchema :: insert returning sql', async () => { const users = await db.insert(usersMySchemaTable).values({ name: 'John' }).returning({ name: sql`upper(${usersMySchemaTable.name})`, }); expect(users).toEqual([{ name: 'JOHN' }]); }); test('mySchema :: delete returning sql', async () => { await db.insert(usersMySchemaTable).values({ name: 'John' }); const users = await db.delete(usersMySchemaTable).where(eq(usersMySchemaTable.name, 'John')).returning({ name: sql`upper(${usersMySchemaTable.name})`, }); expect(users).toEqual([{ name: 'JOHN' }]); }); test('mySchema :: update with returning partial', async () => { await db.insert(usersMySchemaTable).values({ name: 'John' }); const users = await db.update(usersMySchemaTable).set({ name: 'Jane' }).where(eq(usersMySchemaTable.name, 'John')) .returning({ id: usersMySchemaTable.id, name: usersMySchemaTable.name, }); expect(users).toEqual([{ id: 1, name: 'Jane' }]); }); test('mySchema :: delete with returning all fields', async () => { const now = Date.now(); await db.insert(usersMySchemaTable).values({ name: 'John' }); const users = await db.delete(usersMySchemaTable).where(eq(usersMySchemaTable.name, 'John')).returning(); expect(users[0]!.createdAt).toBeInstanceOf(Date); expect(Math.abs(users[0]!.createdAt.getTime() - now)).toBeLessThan(100); expect(users).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: users[0]!.createdAt }]); }); test('mySchema :: insert + select', async () => { await db.insert(usersMySchemaTable).values({ name: 'John' }); const result = await db.select().from(usersMySchemaTable); expect(result).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); await db.insert(usersMySchemaTable).values({ name: 'Jane' }); const result2 = await db.select().from(usersMySchemaTable); expect(result2).toEqual([ { id: 1, name: 'John', verified: false, jsonb: null, createdAt: result2[0]!.createdAt }, { id: 2, name: 'Jane', verified: false, jsonb: null, createdAt: result2[1]!.createdAt }, ]); }); test('mySchema :: insert with overridden default values', async () => { await db.insert(usersMySchemaTable).values({ name: 'John', verified: true }); const result = await db.select().from(usersMySchemaTable); expect(result).toEqual([{ id: 1, name: 'John', verified: true, jsonb: null, createdAt: result[0]!.createdAt }]); }); test('mySchema :: insert many', async () => { await db.insert(usersMySchemaTable).values([ { name: 'John' }, { name: 'Bruce', jsonb: ['foo', 'bar'] }, { name: 'Jane' }, { name: 'Austin', verified: true }, ]); const result = await db.select({ id: usersMySchemaTable.id, name: usersMySchemaTable.name, jsonb: usersMySchemaTable.jsonb, verified: usersMySchemaTable.verified, }).from(usersMySchemaTable); expect(result).toEqual([ { id: 1, name: 'John', jsonb: null, verified: false }, { id: 2, name: 'Bruce', jsonb: ['foo', 'bar'], verified: false }, { id: 3, name: 'Jane', jsonb: null, verified: false }, { id: 4, name: 'Austin', jsonb: null, verified: true }, ]); }); test('mySchema :: select with group by as field', async () => { await db.insert(usersMySchemaTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); const result = await db.select({ name: usersMySchemaTable.name }).from(usersMySchemaTable) .groupBy(usersMySchemaTable.name); expect(result).toEqual([{ name: 'Jane' }, { name: 'John' }]); }); test('mySchema :: select with group by as column + sql', async () => { await db.insert(usersMySchemaTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); const result = await db.select({ name: usersMySchemaTable.name }).from(usersMySchemaTable) .groupBy(usersMySchemaTable.id, sql`${usersMySchemaTable.name}`); expect(result).toEqual([{ name: 'Jane' }, { name: 'Jane' }, { name: 'John' }]); }); test('mySchema :: build query', async () => { const query = db.select({ id: usersMySchemaTable.id, name: usersMySchemaTable.name }).from(usersMySchemaTable) .groupBy(usersMySchemaTable.id, usersMySchemaTable.name) .toSQL(); expect(query).toEqual({ sql: 'select "id", "name" from "mySchema"."users" group by "mySchema"."users"."id", "mySchema"."users"."name"', params: [], }); }); test('mySchema :: partial join with alias', async () => { const customerAlias = alias(usersMySchemaTable, 'customer'); await db.insert(usersMySchemaTable).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); const result = await db .select({ user: { id: usersMySchemaTable.id, name: usersMySchemaTable.name, }, customer: { id: customerAlias.id, name: customerAlias.name, }, }).from(usersMySchemaTable) .leftJoin(customerAlias, eq(customerAlias.id, 11)) .where(eq(usersMySchemaTable.id, 10)); expect(result).toEqual([{ user: { id: 10, name: 'Ivan' }, customer: { id: 11, name: 'Hans' }, }]); }); test('mySchema :: insert with spaces', async () => { await db.insert(usersMySchemaTable).values({ name: sql`'Jo h n'` }); const result = await db.select({ id: usersMySchemaTable.id, name: usersMySchemaTable.name }).from( usersMySchemaTable, ); expect(result).toEqual([{ id: 1, name: 'Jo h n' }]); }); test('mySchema :: prepared statement with placeholder in .limit', async () => { await db.insert(usersMySchemaTable).values({ name: 'John' }); const stmt = db .select({ id: usersMySchemaTable.id, name: usersMySchemaTable.name, }) .from(usersMySchemaTable) .where(eq(usersMySchemaTable.id, sql.placeholder('id'))) .limit(sql.placeholder('limit')) .prepare('mySchema_stmt_limit'); const result = await stmt.execute({ id: 1, limit: 1 }); expect(result).toEqual([{ id: 1, name: 'John' }]); expect(result).toHaveLength(1); }); test('mySchema :: build query insert with onConflict do update / multiple columns', async () => { const query = db.insert(usersMySchemaTable) .values({ name: 'John', jsonb: ['foo', 'bar'] }) .onConflictDoUpdate({ target: [usersMySchemaTable.id, usersMySchemaTable.name], set: { name: 'John1' } }) .toSQL(); expect(query).toEqual({ sql: 'insert into "mySchema"."users" ("id", "name", "verified", "jsonb", "created_at") values (default, $1, default, $2, default) on conflict ("id","name") do update set "name" = $3', params: ['John', '["foo","bar"]', 'John1'], }); }); test('mySchema :: build query insert with onConflict do nothing + target', async () => { const query = db.insert(usersMySchemaTable) .values({ name: 'John', jsonb: ['foo', 'bar'] }) .onConflictDoNothing({ target: usersMySchemaTable.id }) .toSQL(); expect(query).toEqual({ sql: 'insert into "mySchema"."users" ("id", "name", "verified", "jsonb", "created_at") values (default, $1, default, $2, default) on conflict ("id") do nothing', params: ['John', '["foo","bar"]'], }); }); test('mySchema :: select from tables with same name from different schema using alias', async () => { await db.insert(usersMySchemaTable).values({ id: 10, name: 'Ivan' }); await db.insert(usersTable).values({ id: 11, name: 'Hans' }); const customerAlias = alias(usersTable, 'customer'); const result = await db .select().from(usersMySchemaTable) .leftJoin(customerAlias, eq(customerAlias.id, 11)) .where(eq(customerAlias.id, 11)); expect(result).toEqual([{ users: { id: 10, name: 'Ivan', verified: false, jsonb: null, createdAt: result[0]!.users.createdAt, }, customer: { id: 11, name: 'Hans', verified: false, jsonb: null, createdAt: result[0]!.customer!.createdAt, }, }]); }); test('mySchema :: view', async () => { const newYorkers1 = mySchema.view('new_yorkers') .as((qb) => qb.select().from(users2MySchemaTable).where(eq(users2MySchemaTable.cityId, 1))); const newYorkers2 = mySchema.view('new_yorkers', { id: serial('id').primaryKey(), name: text('name').notNull(), cityId: integer('city_id').notNull(), }).as(sql`select * from ${users2MySchemaTable} where ${eq(users2MySchemaTable.cityId, 1)}`); const newYorkers3 = mySchema.view('new_yorkers', { id: serial('id').primaryKey(), name: text('name').notNull(), cityId: integer('city_id').notNull(), }).existing(); await db.execute(sql`create view ${newYorkers1} as ${getViewConfig(newYorkers1).query}`); await db.insert(citiesMySchemaTable).values([{ name: 'New York' }, { name: 'Paris' }]); await db.insert(users2MySchemaTable).values([ { name: 'John', cityId: 1 }, { name: 'Jane', cityId: 1 }, { name: 'Jack', cityId: 2 }, ]); { const result = await db.select().from(newYorkers1); expect(result).toEqual([ { id: 1, name: 'John', cityId: 1 }, { id: 2, name: 'Jane', cityId: 1 }, ]); } { const result = await db.select().from(newYorkers2); expect(result).toEqual([ { id: 1, name: 'John', cityId: 1 }, { id: 2, name: 'Jane', cityId: 1 }, ]); } { const result = await db.select().from(newYorkers3); expect(result).toEqual([ { id: 1, name: 'John', cityId: 1 }, { id: 2, name: 'Jane', cityId: 1 }, ]); } { const result = await db.select({ name: newYorkers1.name }).from(newYorkers1); expect(result).toEqual([ { name: 'John' }, { name: 'Jane' }, ]); } await db.execute(sql`drop view ${newYorkers1}`); }); test('mySchema :: materialized view', async () => { const newYorkers1 = mySchema.materializedView('new_yorkers') .as((qb) => qb.select().from(users2MySchemaTable).where(eq(users2MySchemaTable.cityId, 1))); const newYorkers2 = mySchema.materializedView('new_yorkers', { id: serial('id').primaryKey(), name: text('name').notNull(), cityId: integer('city_id').notNull(), }).as(sql`select * from ${users2MySchemaTable} where ${eq(users2MySchemaTable.cityId, 1)}`); const newYorkers3 = mySchema.materializedView('new_yorkers', { id: serial('id').primaryKey(), name: text('name').notNull(), cityId: integer('city_id').notNull(), }).existing(); await db.execute(sql`create materialized view ${newYorkers1} as ${getMaterializedViewConfig(newYorkers1).query}`); await db.insert(citiesMySchemaTable).values([{ name: 'New York' }, { name: 'Paris' }]); await db.insert(users2MySchemaTable).values([ { name: 'John', cityId: 1 }, { name: 'Jane', cityId: 1 }, { name: 'Jack', cityId: 2 }, ]); { const result = await db.select().from(newYorkers1); expect(result).toEqual([]); } await db.refreshMaterializedView(newYorkers1); { const result = await db.select().from(newYorkers1); expect(result).toEqual([ { id: 1, name: 'John', cityId: 1 }, { id: 2, name: 'Jane', cityId: 1 }, ]); } { const result = await db.select().from(newYorkers2); expect(result).toEqual([ { id: 1, name: 'John', cityId: 1 }, { id: 2, name: 'Jane', cityId: 1 }, ]); } { const result = await db.select().from(newYorkers3); expect(result).toEqual([ { id: 1, name: 'John', cityId: 1 }, { id: 2, name: 'Jane', cityId: 1 }, ]); } { const result = await db.select({ name: newYorkers1.name }).from(newYorkers1); expect(result).toEqual([ { name: 'John' }, { name: 'Jane' }, ]); } await db.execute(sql`drop materialized view ${newYorkers1}`); }); test('limit 0', async () => { await db.insert(usersTable).values({ name: 'John' }); const users = await db .select() .from(usersTable) .limit(0); expect(users).toEqual([]); }); test('limit -1', async () => { await db.insert(usersTable).values({ name: 'John' }); const users = await db .select() .from(usersTable) .limit(-1); expect(users.length).toBeGreaterThan(0); }); test('Object keys as column names', async () => { // Tests the following: // Column with required config // Column with optional config without providing a value // Column with optional config providing a value // Column without config const users = pgTable('users', { id: bigserial({ mode: 'number' }).primaryKey(), firstName: varchar(), lastName: varchar({ length: 50 }), admin: boolean(), }); await db.execute(sql`drop table if exists users`); await db.execute( sql` create table users ( "id" bigserial primary key, "firstName" varchar, "lastName" varchar(50), "admin" boolean ) `, ); await db.insert(users).values([ { firstName: 'John', lastName: 'Doe', admin: true }, { firstName: 'Jane', lastName: 'Smith', admin: false }, ]); const result = await db .select({ id: users.id, firstName: users.firstName, lastName: users.lastName }) .from(users) .where(eq(users.admin, true)); expect(result).toEqual([ { id: 1, firstName: 'John', lastName: 'Doe' }, ]); await db.execute(sql`drop table users`); }); test.skip('proper json and jsonb handling', async () => { const jsonTable = pgTable('json_table', { json: json('json').$type<{ name: string; age: number }>(), jsonb: jsonb('jsonb').$type<{ name: string; age: number }>(), }); await db.execute(sql`drop table if exists ${jsonTable}`); await db.execute(sql`create table ${jsonTable} (json json, jsonb jsonb)`); await db.insert(jsonTable).values({ json: { name: 'Tom', age: 75 }, jsonb: { name: 'Pete', age: 23 } }); const result = await db.select().from(jsonTable); const justNames = await db.select({ name1: sql`${jsonTable.json}->>'name'`.as('name1'), name2: sql`${jsonTable.jsonb}->>'name'`.as('name2'), }).from(jsonTable); expect(result).toStrictEqual([ { json: { name: 'Tom', age: 75 }, jsonb: { name: 'Pete', age: 23 }, }, ]); expect(justNames).toStrictEqual([ { name1: 'Tom', name2: 'Pete', }, ]); }); test.todo('set json/jsonb fields with objects and retrieve with the ->> operator', async () => { const obj = { string: 'test', number: 123 }; const { string: testString, number: testNumber } = obj; await db.insert(jsonTestTable).values({ json: obj, jsonb: obj, }); const result = await db.select({ jsonStringField: sql`${jsonTestTable.json}->>'string'`, jsonNumberField: sql`${jsonTestTable.json}->>'number'`, jsonbStringField: sql`${jsonTestTable.jsonb}->>'string'`, jsonbNumberField: sql`${jsonTestTable.jsonb}->>'number'`, }).from(jsonTestTable); expect(result).toStrictEqual([{ jsonStringField: testString, jsonNumberField: String(testNumber), jsonbStringField: testString, jsonbNumberField: String(testNumber), }]); }); test.todo('set json/jsonb fields with strings and retrieve with the ->> operator', async () => { const obj = { string: 'test', number: 123 }; const { string: testString, number: testNumber } = obj; await db.insert(jsonTestTable).values({ json: sql`${JSON.stringify(obj)}`, jsonb: sql`${JSON.stringify(obj)}`, }); const result = await db.select({ jsonStringField: sql`${jsonTestTable.json}->>'string'`, jsonNumberField: sql`${jsonTestTable.json}->>'number'`, jsonbStringField: sql`${jsonTestTable.jsonb}->>'string'`, jsonbNumberField: sql`${jsonTestTable.jsonb}->>'number'`, }).from(jsonTestTable); expect(result).toStrictEqual([{ jsonStringField: testString, jsonNumberField: String(testNumber), jsonbStringField: testString, jsonbNumberField: String(testNumber), }]); }); test.todo('set json/jsonb fields with objects and retrieve with the -> operator', async () => { const obj = { string: 'test', number: 123 }; const { string: testString, number: testNumber } = obj; await db.insert(jsonTestTable).values({ json: obj, jsonb: obj, }); const result = await db.select({ jsonStringField: sql`${jsonTestTable.json}->'string'`, jsonNumberField: sql`${jsonTestTable.json}->'number'`, jsonbStringField: sql`${jsonTestTable.jsonb}->'string'`, jsonbNumberField: sql`${jsonTestTable.jsonb}->'number'`, }).from(jsonTestTable); expect(result).toStrictEqual([{ jsonStringField: testString, jsonNumberField: testNumber, jsonbStringField: testString, jsonbNumberField: testNumber, }]); }); test.todo('set json/jsonb fields with strings and retrieve with the -> operator', async () => { const obj = { string: 'test', number: 123 }; const { string: testString, number: testNumber } = obj; await db.insert(jsonTestTable).values({ json: sql`${JSON.stringify(obj)}`, jsonb: sql`${JSON.stringify(obj)}`, }); const result = await db.select({ jsonStringField: sql`${jsonTestTable.json}->'string'`, jsonNumberField: sql`${jsonTestTable.json}->'number'`, jsonbStringField: sql`${jsonTestTable.jsonb}->'string'`, jsonbNumberField: sql`${jsonTestTable.jsonb}->'number'`, }).from(jsonTestTable); expect(result).toStrictEqual([{ jsonStringField: testString, jsonNumberField: testNumber, jsonbStringField: testString, jsonbNumberField: testNumber, }]); }); test('update ... from', async () => { await db.insert(cities2Table).values([ { name: 'New York City' }, { name: 'Seattle' }, ]); await db.insert(users2Table).values([ { name: 'John', cityId: 1 }, { name: 'Jane', cityId: 2 }, ]); const result = await db .update(users2Table) .set({ cityId: cities2Table.id, }) .from(cities2Table) .where(and(eq(cities2Table.name, 'Seattle'), eq(users2Table.name, 'John'))) .returning(); expect(result).toStrictEqual([{ id: 1, name: 'John', cityId: 2, cities: { id: 2, name: 'Seattle', }, }]); }); test('update ... from with alias', async () => { await db.insert(cities2Table).values([ { name: 'New York City' }, { name: 'Seattle' }, ]); await db.insert(users2Table).values([ { name: 'John', cityId: 1 }, { name: 'Jane', cityId: 2 }, ]); const users = alias(users2Table, 'u'); const cities = alias(cities2Table, 'c'); const result = await db .update(users) .set({ cityId: cities.id, }) .from(cities) .where(and(eq(cities.name, 'Seattle'), eq(users.name, 'John'))) .returning(); expect(result).toStrictEqual([{ id: 1, name: 'John', cityId: 2, c: { id: 2, name: 'Seattle', }, }]); }); test('update ... from with join', async () => { const states = pgTable('states', { id: serial('id').primaryKey(), name: text('name').notNull(), }); const cities = pgTable('cities', { id: serial('id').primaryKey(), name: text('name').notNull(), stateId: integer('state_id').references(() => states.id), }); const users = pgTable('users', { id: serial('id').primaryKey(), name: text('name').notNull(), cityId: integer('city_id').notNull().references(() => cities.id), }); await db.execute(sql`drop table if exists "states" cascade`); await db.execute(sql`drop table if exists "cities" cascade`); await db.execute(sql`drop table if exists "users" cascade`); await db.execute(sql` create table "states" ( "id" serial primary key, "name" text not null ) `); await db.execute(sql` create table "cities" ( "id" serial primary key, "name" text not null, "state_id" integer references "states"("id") ) `); await db.execute(sql` create table "users" ( "id" serial primary key, "name" text not null, "city_id" integer not null references "cities"("id") ) `); await db.insert(states).values([ { name: 'New York' }, { name: 'Washington' }, ]); await db.insert(cities).values([ { name: 'New York City', stateId: 1 }, { name: 'Seattle', stateId: 2 }, { name: 'London' }, ]); await db.insert(users).values([ { name: 'John', cityId: 1 }, { name: 'Jane', cityId: 2 }, { name: 'Jack', cityId: 3 }, ]); const result1 = await db .update(users) .set({ cityId: cities.id, }) .from(cities) .leftJoin(states, eq(cities.stateId, states.id)) .where(and(eq(cities.name, 'Seattle'), eq(users.name, 'John'))) .returning(); const result2 = await db .update(users) .set({ cityId: cities.id, }) .from(cities) .leftJoin(states, eq(cities.stateId, states.id)) .where(and(eq(cities.name, 'London'), eq(users.name, 'Jack'))) .returning(); expect(result1).toStrictEqual([{ id: 1, name: 'John', cityId: 2, cities: { id: 2, name: 'Seattle', stateId: 2, }, states: { id: 2, name: 'Washington', }, }]); expect(result2).toStrictEqual([{ id: 3, name: 'Jack', cityId: 3, cities: { id: 3, name: 'London', stateId: null, }, states: null, }]); }); test('insert into ... select', async () => { const notifications = pgTable('notifications', { id: serial('id').primaryKey(), sentAt: timestamp('sent_at').notNull().defaultNow(), message: text('message').notNull(), }); const users = pgTable('users', { id: serial('id').primaryKey(), name: text('name').notNull(), }); const userNotications = pgTable('user_notifications', { userId: integer('user_id').notNull().references(() => users.id, { onDelete: 'cascade' }), notificationId: integer('notification_id').notNull().references(() => notifications.id, { onDelete: 'cascade', }), }, (t) => ({ pk: primaryKey({ columns: [t.userId, t.notificationId] }), })); await db.execute(sql`drop table if exists notifications`); await db.execute(sql`drop table if exists users`); await db.execute(sql`drop table if exists user_notifications`); await db.execute(sql` create table notifications ( id serial primary key, sent_at timestamp not null default now(), message text not null ) `); await db.execute(sql` create table users ( id serial primary key, name text not null ) `); await db.execute(sql` create table user_notifications ( user_id int references users(id) on delete cascade, notification_id int references notifications(id) on delete cascade, primary key (user_id, notification_id) ) `); const newNotification = await db .insert(notifications) .values({ message: 'You are one of the 3 lucky winners!' }) .returning({ id: notifications.id }) .then((result) => result[0]); await db.insert(users).values([ { name: 'Alice' }, { name: 'Bob' }, { name: 'Charlie' }, { name: 'David' }, { name: 'Eve' }, ]); const sentNotifications = await db .insert(userNotications) .select( db .select({ userId: users.id, notificationId: sql`${newNotification!.id}`.as('notification_id'), }) .from(users) .where(inArray(users.name, ['Alice', 'Charlie', 'Eve'])) .orderBy(asc(users.id)), ) .returning(); expect(sentNotifications).toStrictEqual([ { userId: 1, notificationId: newNotification!.id }, { userId: 3, notificationId: newNotification!.id }, { userId: 5, notificationId: newNotification!.id }, ]); }); test('insert into ... select with keys in different order', async () => { const users1 = pgTable('users1', { id: serial('id').primaryKey(), name: text('name').notNull(), }); const users2 = pgTable('users2', { id: serial('id').primaryKey(), name: text('name').notNull(), }); await db.execute(sql`drop table if exists users1`); await db.execute(sql`drop table if exists users2`); await db.execute(sql` create table users1 ( id serial primary key, name text not null ) `); await db.execute(sql` create table users2 ( id serial primary key, name text not null ) `); expect( () => db .insert(users1) .select( db .select({ name: users2.name, id: users2.id, }) .from(users2), ), ).toThrowError(); }); test('policy', () => { { const policy = pgPolicy('test policy'); expect(is(policy, PgPolicy)).toBe(true); expect(policy.name).toBe('test policy'); } { const policy = pgPolicy('test policy', { as: 'permissive', for: 'all', to: 'public', using: sql`1=1`, withCheck: sql`1=1`, }); expect(is(policy, PgPolicy)).toBe(true); expect(policy.name).toBe('test policy'); expect(policy.as).toBe('permissive'); expect(policy.for).toBe('all'); expect(policy.to).toBe('public'); const dialect = new PgDialect(); expect(is(policy.using, SQL)).toBe(true); expect(dialect.sqlToQuery(policy.using!).sql).toBe('1=1'); expect(is(policy.withCheck, SQL)).toBe(true); expect(dialect.sqlToQuery(policy.withCheck!).sql).toBe('1=1'); } { const policy = pgPolicy('test policy', { to: 'custom value', }); expect(policy.to).toBe('custom value'); } { const p1 = pgPolicy('test policy'); const p2 = pgPolicy('test policy 2', { as: 'permissive', for: 'all', to: 'public', using: sql`1=1`, withCheck: sql`1=1`, }); const table = pgTable('table_with_policy', { id: serial('id').primaryKey(), name: text('name').notNull(), }, () => ({ p1, p2, })); const config = getTableConfig(table); expect(config.policies).toHaveLength(2); expect(config.policies[0]).toBe(p1); expect(config.policies[1]).toBe(p2); } }); test('neon: policy', () => { { const policy = crudPolicy({ read: true, modify: true, role: authenticatedRole, }); for (const it of Object.values(policy)) { expect(is(it, PgPolicy)).toBe(true); expect(it?.to).toStrictEqual(authenticatedRole); it?.using ? expect(it.using).toStrictEqual(sql`true`) : ''; it?.withCheck ? expect(it.withCheck).toStrictEqual(sql`true`) : ''; } } { const table = pgTable('name', { id: integer('id'), }, (t) => [ index('name').on(t.id), crudPolicy({ read: true, modify: true, role: authenticatedRole, }), primaryKey({ columns: [t.id], name: 'custom' }), ]); const { policies, indexes, primaryKeys } = getTableConfig(table); expect(policies.length).toBe(4); expect(indexes.length).toBe(1); expect(primaryKeys.length).toBe(1); expect(policies[0]?.name === 'crud-custom-policy-modify'); expect(policies[1]?.name === 'crud-custom-policy-read'); } }); test('neon: neon_auth', () => { const usersSyncTable = usersSync; const { columns, schema, name } = getTableConfig(usersSyncTable); expect(name).toBe('users_sync'); expect(schema).toBe('neon_auth'); expect(columns).toHaveLength(7); }); test('Enable RLS function', () => { const usersWithRLS = pgTable('users', { id: integer(), }).enableRLS(); const config1 = getTableConfig(usersWithRLS); const usersNoRLS = pgTable('users', { id: integer(), }); const config2 = getTableConfig(usersNoRLS); expect(config1.enableRLS).toBeTruthy(); expect(config2.enableRLS).toBeFalsy(); }); test('$count separate', async () => { const countTestTable = pgTable('count_test', { id: integer('id').notNull(), name: text('name').notNull(), }); await db.execute(sql`drop table if exists ${countTestTable}`); await db.execute(sql`create table ${countTestTable} (id int, name text)`); await db.insert(countTestTable).values([ { id: 1, name: 'First' }, { id: 2, name: 'Second' }, { id: 3, name: 'Third' }, { id: 4, name: 'Fourth' }, ]); const count = await db.$count(countTestTable); await db.execute(sql`drop table ${countTestTable}`); expect(count).toStrictEqual(4); }); test('$count embedded', async () => { const countTestTable = pgTable('count_test', { id: integer('id').notNull(), name: text('name').notNull(), }); await db.execute(sql`drop table if exists ${countTestTable}`); await db.execute(sql`create table ${countTestTable} (id int, name text)`); await db.insert(countTestTable).values([ { id: 1, name: 'First' }, { id: 2, name: 'Second' }, { id: 3, name: 'Third' }, { id: 4, name: 'Fourth' }, ]); const count = await db.select({ count: db.$count(countTestTable), }).from(countTestTable); await db.execute(sql`drop table ${countTestTable}`); expect(count).toStrictEqual([ { count: 4 }, { count: 4 }, { count: 4 }, { count: 4 }, ]); }); test('$count separate reuse', async () => { const countTestTable = pgTable('count_test', { id: integer('id').notNull(), name: text('name').notNull(), }); await db.execute(sql`drop table if exists ${countTestTable}`); await db.execute(sql`create table ${countTestTable} (id int, name text)`); await db.insert(countTestTable).values([ { id: 1, name: 'First' }, { id: 2, name: 'Second' }, { id: 3, name: 'Third' }, { id: 4, name: 'Fourth' }, ]); const count = db.$count(countTestTable); const count1 = await count; await db.insert(countTestTable).values({ id: 5, name: 'fifth' }); const count2 = await count; await db.insert(countTestTable).values({ id: 6, name: 'sixth' }); const count3 = await count; await db.execute(sql`drop table ${countTestTable}`); expect(count1).toStrictEqual(4); expect(count2).toStrictEqual(5); expect(count3).toStrictEqual(6); }); test('$count embedded reuse', async () => { const countTestTable = pgTable('count_test', { id: integer('id').notNull(), name: text('name').notNull(), }); await db.execute(sql`drop table if exists ${countTestTable}`); await db.execute(sql`create table ${countTestTable} (id int, name text)`); await db.insert(countTestTable).values([ { id: 1, name: 'First' }, { id: 2, name: 'Second' }, { id: 3, name: 'Third' }, { id: 4, name: 'Fourth' }, ]); const count = db.select({ count: db.$count(countTestTable), }).from(countTestTable); const count1 = await count; await db.insert(countTestTable).values({ id: 5, name: 'fifth' }); const count2 = await count; await db.insert(countTestTable).values({ id: 6, name: 'sixth' }); const count3 = await count; await db.execute(sql`drop table ${countTestTable}`); expect(count1).toStrictEqual([ { count: 4 }, { count: 4 }, { count: 4 }, { count: 4 }, ]); expect(count2).toStrictEqual([ { count: 5 }, { count: 5 }, { count: 5 }, { count: 5 }, { count: 5 }, ]); expect(count3).toStrictEqual([ { count: 6 }, { count: 6 }, { count: 6 }, { count: 6 }, { count: 6 }, { count: 6 }, ]); }); test('$count separate with filters', async () => { const countTestTable = pgTable('count_test', { id: integer('id').notNull(), name: text('name').notNull(), }); await db.execute(sql`drop table if exists ${countTestTable}`); await db.execute(sql`create table ${countTestTable} (id int, name text)`); await db.insert(countTestTable).values([ { id: 1, name: 'First' }, { id: 2, name: 'Second' }, { id: 3, name: 'Third' }, { id: 4, name: 'Fourth' }, ]); const count = await db.$count(countTestTable, gt(countTestTable.id, 1)); await db.execute(sql`drop table ${countTestTable}`); expect(count).toStrictEqual(3); }); test('$count embedded with filters', async () => { const countTestTable = pgTable('count_test', { id: integer('id').notNull(), name: text('name').notNull(), }); await db.execute(sql`drop table if exists ${countTestTable}`); await db.execute(sql`create table ${countTestTable} (id int, name text)`); await db.insert(countTestTable).values([ { id: 1, name: 'First' }, { id: 2, name: 'Second' }, { id: 3, name: 'Third' }, { id: 4, name: 'Fourth' }, ]); const count = await db.select({ count: db.$count(countTestTable, gt(countTestTable.id, 1)), }).from(countTestTable); await db.execute(sql`drop table ${countTestTable}`); expect(count).toStrictEqual([ { count: 3 }, { count: 3 }, { count: 3 }, { count: 3 }, ]); }); test('insert multiple rows into table with generated identity column', async () => { const identityColumnsTable = pgTable('identity_columns_table', { id: integer('id').generatedAlwaysAsIdentity(), id1: integer('id1').generatedByDefaultAsIdentity(), name: text('name').notNull(), }); // not passing identity columns await db.execute(sql`drop table if exists ${identityColumnsTable}`); await db.execute( sql`create table ${identityColumnsTable} ("id" integer generated always as identity, "id1" integer generated by default as identity, "name" text)`, ); let result = await db.insert(identityColumnsTable).values([ { name: 'John' }, { name: 'Jane' }, { name: 'Bob' }, ]).returning(); expect(result).toEqual([ { id: 1, id1: 1, name: 'John' }, { id: 2, id1: 2, name: 'Jane' }, { id: 3, id1: 3, name: 'Bob' }, ]); // passing generated by default as identity column await db.execute(sql`drop table if exists ${identityColumnsTable}`); await db.execute( sql`create table ${identityColumnsTable} ("id" integer generated always as identity, "id1" integer generated by default as identity, "name" text)`, ); result = await db.insert(identityColumnsTable).values([ { name: 'John', id1: 3 }, { name: 'Jane', id1: 5 }, { name: 'Bob', id1: 5 }, ]).returning(); expect(result).toEqual([ { id: 1, id1: 3, name: 'John' }, { id: 2, id1: 5, name: 'Jane' }, { id: 3, id1: 5, name: 'Bob' }, ]); // passing all identity columns await db.execute(sql`drop table if exists ${identityColumnsTable}`); await db.execute( sql`create table ${identityColumnsTable} ("id" integer generated always as identity, "id1" integer generated by default as identity, "name" text)`, ); result = await db.insert(identityColumnsTable).overridingSystemValue().values([ { name: 'John', id: 2, id1: 3 }, { name: 'Jane', id: 4, id1: 5 }, { name: 'Bob', id: 4, id1: 5 }, ]).returning(); expect(result).toEqual([ { id: 2, id1: 3, name: 'John' }, { id: 4, id1: 5, name: 'Jane' }, { id: 4, id1: 5, name: 'Bob' }, ]); }); test('insert as cte', async () => { const users = pgTable('users', { id: serial('id').primaryKey(), name: text('name').notNull(), }); await db.execute(sql`drop table if exists ${users}`); await db.execute(sql`create table ${users} (id serial not null primary key, name text not null)`); const sq1 = db.$with('sq').as( db.insert(users).values({ name: 'John' }).returning(), ); const result1 = await db.with(sq1).select().from(sq1); const result2 = await db.with(sq1).select({ id: sq1.id }).from(sq1); const sq2 = db.$with('sq').as( db.insert(users).values({ name: 'Jane' }).returning({ id: users.id, name: users.name }), ); const result3 = await db.with(sq2).select().from(sq2); const result4 = await db.with(sq2).select({ name: sq2.name }).from(sq2); expect(result1).toEqual([{ id: 1, name: 'John' }]); expect(result2).toEqual([{ id: 2 }]); expect(result3).toEqual([{ id: 3, name: 'Jane' }]); expect(result4).toEqual([{ name: 'Jane' }]); }); test('update as cte', async () => { const users = pgTable('users', { id: serial('id').primaryKey(), name: text('name').notNull(), age: integer('age').notNull(), }); await db.execute(sql`drop table if exists ${users}`); await db.execute( sql`create table ${users} (id serial not null primary key, name text not null, age integer not null)`, ); await db.insert(users).values([ { name: 'John', age: 30 }, { name: 'Jane', age: 30 }, ]); const sq1 = db.$with('sq').as( db.update(users).set({ age: 25 }).where(eq(users.name, 'John')).returning(), ); const result1 = await db.with(sq1).select().from(sq1); await db.update(users).set({ age: 30 }); const result2 = await db.with(sq1).select({ age: sq1.age }).from(sq1); const sq2 = db.$with('sq').as( db.update(users).set({ age: 20 }).where(eq(users.name, 'Jane')).returning({ name: users.name, age: users.age }), ); const result3 = await db.with(sq2).select().from(sq2); await db.update(users).set({ age: 30 }); const result4 = await db.with(sq2).select({ age: sq2.age }).from(sq2); expect(result1).toEqual([{ id: 1, name: 'John', age: 25 }]); expect(result2).toEqual([{ age: 25 }]); expect(result3).toEqual([{ name: 'Jane', age: 20 }]); expect(result4).toEqual([{ age: 20 }]); }); test('delete as cte', async () => { const users = pgTable('users', { id: serial('id').primaryKey(), name: text('name').notNull(), }); await db.execute(sql`drop table if exists ${users}`); await db.execute(sql`create table ${users} (id serial not null primary key, name text not null)`); await db.insert(users).values([ { name: 'John' }, { name: 'Jane' }, ]); const sq1 = db.$with('sq').as( db.delete(users).where(eq(users.name, 'John')).returning(), ); const result1 = await db.with(sq1).select().from(sq1); await db.insert(users).values({ name: 'John' }); const result2 = await db.with(sq1).select({ name: sq1.name }).from(sq1); const sq2 = db.$with('sq').as( db.delete(users).where(eq(users.name, 'Jane')).returning({ id: users.id, name: users.name }), ); const result3 = await db.with(sq2).select().from(sq2); await db.insert(users).values({ name: 'Jane' }); const result4 = await db.with(sq2).select({ name: sq2.name }).from(sq2); expect(result1).toEqual([{ id: 1, name: 'John' }]); expect(result2).toEqual([{ name: 'John' }]); expect(result3).toEqual([{ id: 2, name: 'Jane' }]); expect(result4).toEqual([{ name: 'Jane' }]); }); test('sql operator as cte', async () => { const users = pgTable('users', { id: serial('id').primaryKey(), name: text('name').notNull(), }); await db.execute(sql`drop table if exists ${users}`); await db.execute(sql`create table ${users} (id serial not null primary key, name text not null)`); await db.insert(users).values([ { name: 'John' }, { name: 'Jane' }, ]); const sq1 = db.$with('sq', { userId: users.id, data: { name: users.name, }, }).as(sql`select * from ${users} where ${users.name} = 'John'`); const result1 = await db.with(sq1).select().from(sq1); const sq2 = db.$with('sq', { userId: users.id, data: { name: users.name, }, }).as(() => sql`select * from ${users} where ${users.name} = 'Jane'`); const result2 = await db.with(sq2).select().from(sq1); expect(result1).toEqual([{ userId: 1, data: { name: 'John' } }]); expect(result2).toEqual([{ userId: 2, data: { name: 'Jane' } }]); }); test('all types', async () => { await db.execute(sql`CREATE TYPE "public"."en" AS ENUM('enVal1', 'enVal2');`); await db.execute(sql` CREATE TABLE "all_types" ( "serial" serial NOT NULL, "bigserial53" bigserial NOT NULL, "bigserial64" bigserial, "int" integer, "bigint53" bigint, "bigint64" bigint, "bool" boolean, "char" char, "cidr" "cidr", "date" date, "date_str" date, "double" double precision, "enum" "en", "inet" "inet", "interval" interval, "json" json, "jsonb" jsonb, "line" "line", "line_tuple" "line", "macaddr" "macaddr", "macaddr8" "macaddr8", "numeric" numeric, "numeric_num" numeric, "numeric_big" numeric, "point" "point", "point_tuple" "point", "real" real, "smallint" smallint, "smallserial" "smallserial" NOT NULL, "text" text, "time" time, "timestamp" timestamp, "timestamp_tz" timestamp with time zone, "timestamp_str" timestamp, "timestamp_tz_str" timestamp with time zone, "uuid" uuid, "varchar" varchar, "arrint" integer[], "arrbigint53" bigint[], "arrbigint64" bigint[], "arrbool" boolean[], "arrchar" char[], "arrcidr" "cidr"[], "arrdate" date[], "arrdate_str" date[], "arrdouble" double precision[], "arrenum" "en"[], "arrinet" "inet"[], "arrinterval" interval[], "arrjson" json[], "arrjsonb" jsonb[], "arrline" "line"[], "arrline_tuple" "line"[], "arrmacaddr" "macaddr"[], "arrmacaddr8" "macaddr8"[], "arrnumeric" numeric[], "arrnumeric_num" numeric[], "arrnumeric_big" numeric[], "arrpoint" "point"[], "arrpoint_tuple" "point"[], "arrreal" real[], "arrsmallint" smallint[], "arrtext" text[], "arrtime" time[], "arrtimestamp" timestamp[], "arrtimestamp_tz" timestamp with time zone[], "arrtimestamp_str" timestamp[], "arrtimestamp_tz_str" timestamp with time zone[], "arruuid" uuid[], "arrvarchar" varchar[] ); `); await db.insert(allTypesTable).values({ serial: 1, smallserial: 15, bigint53: 9007199254740991, bigint64: 5044565289845416380n, bigserial53: 9007199254740991, bigserial64: 5044565289845416380n, bool: true, char: 'c', cidr: '2001:4f8:3:ba:2e0:81ff:fe22:d1f1/128', inet: '192.168.0.1/24', macaddr: '08:00:2b:01:02:03', macaddr8: '08:00:2b:01:02:03:04:05', date: new Date(1741743161623), dateStr: new Date(1741743161623).toISOString(), double: 15.35325689124218, enum: 'enVal1', int: 621, interval: '2 months ago', json: { str: 'strval', arr: ['str', 10], }, jsonb: { str: 'strvalb', arr: ['strb', 11], }, line: { a: 1, b: 2, c: 3, }, lineTuple: [1, 2, 3], numeric: '475452353476', numericNum: 9007199254740991, numericBig: 5044565289845416380n, point: { x: 24.5, y: 49.6, }, pointTuple: [57.2, 94.3], real: 1.048596, smallint: 10, text: 'TEXT STRING', time: '13:59:28', timestamp: new Date(1741743161623), timestampTz: new Date(1741743161623), timestampStr: new Date(1741743161623).toISOString(), timestampTzStr: new Date(1741743161623).toISOString(), uuid: 'b77c9eef-8e28-4654-88a1-7221b46d2a1c', varchar: 'C4-', arrbigint53: [9007199254740991], arrbigint64: [5044565289845416380n], arrbool: [true], arrchar: ['c'], arrcidr: ['2001:4f8:3:ba:2e0:81ff:fe22:d1f1/128'], arrinet: ['192.168.0.1/24'], arrmacaddr: ['08:00:2b:01:02:03'], arrmacaddr8: ['08:00:2b:01:02:03:04:05'], arrdate: [new Date(1741743161623)], arrdateStr: [new Date(1741743161623).toISOString()], arrdouble: [15.35325689124218], arrenum: ['enVal1'], arrint: [621], arrinterval: ['2 months ago'], arrjson: [{ str: 'strval', arr: ['str', 10], }], arrjsonb: [{ str: 'strvalb', arr: ['strb', 11], }], arrline: [{ a: 1, b: 2, c: 3, }], arrlineTuple: [[1, 2, 3]], arrnumeric: ['475452353476'], arrnumericNum: [9007199254740991], arrnumericBig: [5044565289845416380n], arrpoint: [{ x: 24.5, y: 49.6, }], arrpointTuple: [[57.2, 94.3]], arrreal: [1.048596], arrsmallint: [10], arrtext: ['TEXT STRING'], arrtime: ['13:59:28'], arrtimestamp: [new Date(1741743161623)], arrtimestampTz: [new Date(1741743161623)], arrtimestampStr: [new Date(1741743161623).toISOString()], arrtimestampTzStr: [new Date(1741743161623).toISOString()], arruuid: ['b77c9eef-8e28-4654-88a1-7221b46d2a1c'], arrvarchar: ['C4-'], }); const rawRes = await db.select().from(allTypesTable); type ExpectedType = { serial: number; bigserial53: number; bigserial64: bigint; int: number | null; bigint53: number | null; bigint64: bigint | null; bool: boolean | null; char: string | null; cidr: string | null; date: Date | null; dateStr: string | null; double: number | null; enum: 'enVal1' | 'enVal2' | null; inet: string | null; interval: string | null; json: unknown; jsonb: unknown; line: { a: number; b: number; c: number; } | null; lineTuple: [number, number, number] | null; macaddr: string | null; macaddr8: string | null; numeric: string | null; numericNum: number | null; numericBig: bigint | null; point: { x: number; y: number; } | null; pointTuple: [number, number] | null; real: number | null; smallint: number | null; smallserial: number; text: string | null; time: string | null; timestamp: Date | null; timestampTz: Date | null; timestampStr: string | null; timestampTzStr: string | null; uuid: string | null; varchar: string | null; arrint: number[] | null; arrbigint53: number[] | null; arrbigint64: bigint[] | null; arrbool: boolean[] | null; arrchar: string[] | null; arrcidr: string[] | null; arrdate: Date[] | null; arrdateStr: string[] | null; arrdouble: number[] | null; arrenum: ('enVal1' | 'enVal2')[] | null; arrinet: string[] | null; arrinterval: string[] | null; arrjson: unknown[] | null; arrjsonb: unknown[] | null; arrline: { a: number; b: number; c: number; }[] | null; arrlineTuple: [number, number, number][] | null; arrmacaddr: string[] | null; arrmacaddr8: string[] | null; arrnumeric: string[] | null; arrnumericNum: number[] | null; arrnumericBig: bigint[] | null; arrpoint: { x: number; y: number }[] | null; arrpointTuple: [number, number][] | null; arrreal: number[] | null; arrsmallint: number[] | null; arrtext: string[] | null; arrtime: string[] | null; arrtimestamp: Date[] | null; arrtimestampTz: Date[] | null; arrtimestampStr: string[] | null; arrtimestampTzStr: string[] | null; arruuid: string[] | null; arrvarchar: string[] | null; }[]; const expectedRes: ExpectedType = [ { serial: 1, bigserial53: 9007199254740991, bigserial64: 5044565289845416380n, int: 621, bigint53: 9007199254740991, bigint64: 5044565289845416380n, bool: true, char: 'c', cidr: '2001:4f8:3:ba:2e0:81ff:fe22:d1f1/128', date: new Date('2025-03-12T00:00:00.000Z'), dateStr: '2025-03-12', double: 15.35325689124218, enum: 'enVal1', inet: '192.168.0.1/24', interval: '-2 mons', json: { str: 'strval', arr: ['str', 10] }, jsonb: { arr: ['strb', 11], str: 'strvalb' }, line: { a: 1, b: 2, c: 3 }, lineTuple: [1, 2, 3], macaddr: '08:00:2b:01:02:03', macaddr8: '08:00:2b:01:02:03:04:05', numeric: '475452353476', numericNum: 9007199254740991, numericBig: 5044565289845416380n, point: { x: 24.5, y: 49.6 }, pointTuple: [57.2, 94.3], real: 1.048596, smallint: 10, smallserial: 15, text: 'TEXT STRING', time: '13:59:28', timestamp: new Date('2025-03-12T01:32:41.623Z'), timestampTz: new Date('2025-03-12T01:32:41.623Z'), timestampStr: '2025-03-12 01:32:41.623', timestampTzStr: '2025-03-12 01:32:41.623+00', uuid: 'b77c9eef-8e28-4654-88a1-7221b46d2a1c', varchar: 'C4-', arrint: [621], arrbigint53: [9007199254740991], arrbigint64: [5044565289845416380n], arrbool: [true], arrchar: ['c'], arrcidr: ['2001:4f8:3:ba:2e0:81ff:fe22:d1f1/128'], arrdate: [new Date('2025-03-12T00:00:00.000Z')], arrdateStr: ['2025-03-12'], arrdouble: [15.35325689124218], arrenum: ['enVal1'], arrinet: ['192.168.0.1/24'], arrinterval: ['-2 mons'], arrjson: [{ str: 'strval', arr: ['str', 10] }], arrjsonb: [{ arr: ['strb', 11], str: 'strvalb' }], arrline: [{ a: 1, b: 2, c: 3 }], arrlineTuple: [[1, 2, 3]], arrmacaddr: ['08:00:2b:01:02:03'], arrmacaddr8: ['08:00:2b:01:02:03:04:05'], arrnumeric: ['475452353476'], arrnumericNum: [9007199254740991], arrnumericBig: [5044565289845416380n], arrpoint: [{ x: 24.5, y: 49.6 }], arrpointTuple: [[57.2, 94.3]], arrreal: [1.048596], arrsmallint: [10], arrtext: ['TEXT STRING'], arrtime: ['13:59:28'], arrtimestamp: [new Date('2025-03-12T01:32:41.623Z')], arrtimestampTz: [new Date('2025-03-12T01:32:41.623Z')], arrtimestampStr: ['2025-03-12 01:32:41.623'], arrtimestampTzStr: ['2025-03-12 01:32:41.623+00'], arruuid: ['b77c9eef-8e28-4654-88a1-7221b46d2a1c'], arrvarchar: ['C4-'], }, ]; Expect>; expect(rawRes).toStrictEqual(expectedRes); }); ================================================ FILE: integration-tests/tests/bun/sqlite-nw.test.ts ================================================ /// import { Database } from 'bun:sqlite'; import { DefaultLogger, sql } from 'drizzle-orm'; import type { BunSQLiteDatabase } from 'drizzle-orm/bun-sqlite'; import { drizzle } from 'drizzle-orm/bun-sqlite'; import { integer, real, sqliteTable, text } from 'drizzle-orm/sqlite-core'; import { suite } from 'uvu'; const order = sqliteTable('Order', { id: integer('Id'), customerId: text('CustomerId'), employeeId: integer('EmployeeId'), orderDate: text('OrderDate'), requiredDate: text('RequiredDate'), shippedDate: text('ShippedDate'), shipVia: integer('ShipVia'), freight: real('Freight'), shipName: text('ShipName'), shipAddress: text('ShipAddress'), shipCity: text('ShipCity'), shipRegion: text('ShipRegion'), shipPostalCode: text('ShipPostalCode'), shipCountry: text('ShipCountry'), }); interface Context { db: BunSQLiteDatabase; } const test = suite('sqlite-bun'); test.before((ctx) => { try { const dbPath = process.env['SQLITE_DB_PATH'] ?? ':memory:'; const client = new Database(dbPath); ctx.db = drizzle(client, { logger: new DefaultLogger() }); } catch (e) { console.error(e); } }); // test.before.each((ctx) => { // try { // const { db } = ctx; // db.run(sql`drop table if exists ${usersTable}`); // db.run(sql` // create table ${usersTable} ( // id integer primary key, // name text not null, // verified integer not null default 0, // json blob, // created_at text not null default (strftime('%s', 'now')) // )`); // } catch (e) { // console.error(e); // } // }); test('select', (ctx) => { const { db } = ctx; // TODO: convert to normalniy test console.log(db.select().from(order).where(sql`"Order"."ShipCountry" = "Germany"`).all()[0]); console.log(db.select().from(order).where(sql`"Order"."ShipCountry" = ${'Germany'}`).prepare().all()[0]); }); test.run(); // test.serial('select partial', (t) => { // const { db } = t.context; // db.insert(usersTable).values({ name: 'John' }).execute(); // const result = db.select({ name: usersTable.name }).from(usersTable).execute(); // t.deepEqual(result, [{ name: 'John' }]); // }); // test.serial('insert with auto increment', (t) => { // const { db } = t.context; // db.insert(usersTable).values( // { name: 'John' }, // { name: 'Jane' }, // { name: 'George' }, // { name: 'Austin' }, // ).execute(); // const result = db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).execute(); // t.deepEqual(result, [ // { id: 1, name: 'John' }, // { id: 2, name: 'Jane' }, // { id: 3, name: 'George' }, // { id: 4, name: 'Austin' }, // ]); // }); // test.serial('insert with default values', (t) => { // const { db } = t.context; // db.insert(usersTable).values({ name: 'John' }).execute(); // const result = db.select({ // id: usersTable.id, // name: usersTable.name, // verified: usersTable.verified, // }).from(usersTable).execute(); // t.deepEqual(result, [{ id: 1, name: 'John', verified: 0 }]); // }); // test.serial('insert with overridden default values', (t) => { // const { db } = t.context; // db.insert(usersTable).values({ name: 'John', verified: 1 }).execute(); // const result = db.select({ // id: usersTable.id, // name: usersTable.name, // verified: usersTable.verified, // }).from(usersTable).execute(); // t.deepEqual(result, [{ id: 1, name: 'John', verified: 1 }]); // }); // test.serial('update with returning all fields', (t) => { // const { db } = t.context; // const now = Date.now(); // db.insert(usersTable).values({ name: 'John' }).execute(); // const users = db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')).returning().execute(); // t.assert(users[0]!.createdAt instanceof Date); // t.assert(Math.abs(users[0]!.createdAt.getTime() - now) < 100); // t.deepEqual(users, [{ id: 1, name: 'Jane', verified: 0, json: null, createdAt: users[0]!.createdAt }]); // }); // test.serial('update with returning partial', (t) => { // const { db } = t.context; // db.insert(usersTable).values({ name: 'John' }).execute(); // const users = db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')).returning({ // id: usersTable.id, // name: usersTable.name, // }).execute(); // t.deepEqual(users, [{ id: 1, name: 'Jane' }]); // }); // test.serial('delete with returning all fields', (t) => { // const { db } = t.context; // const now = Date.now(); // db.insert(usersTable).values({ name: 'John' }).execute(); // const users = db.delete(usersTable).where(eq(usersTable.name, 'John')).returning().execute(); // t.assert(users[0]!.createdAt instanceof Date); // t.assert(Math.abs(users[0]!.createdAt.getTime() - now) < 100); // t.deepEqual(users, [{ id: 1, name: 'John', verified: 0, json: null, createdAt: users[0]!.createdAt }]); // }); // test.serial('delete with returning partial', (t) => { // const { db } = t.context; // db.insert(usersTable).values({ name: 'John' }).execute(); // const users = db.delete(usersTable).where(eq(usersTable.name, 'John')).returning({ // id: usersTable.id, // name: usersTable.name, // }).execute(); // t.deepEqual(users, [{ id: 1, name: 'John' }]); // }); // test.serial('insert + select', (t) => { // const { db } = t.context; // db.insert(usersTable).values({ name: 'John' }).execute(); // const result = db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).execute(); // t.deepEqual(result, [{ id: 1, name: 'John' }]); // db.insert(usersTable).values({ name: 'Jane' }).execute(); // const result2 = db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).execute(); // t.deepEqual(result2, [{ id: 1, name: 'John' }, { id: 2, name: 'Jane' }]); // }); // test.serial('json insert', (t) => { // const { db } = t.context; // db.insert(usersTable).values({ name: 'John', json: ['foo', 'bar'] }).execute(); // const result = db.select({ // id: usersTable.id, // name: usersTable.name, // json: usersTable.json, // }).from(usersTable).execute(); // t.deepEqual(result, [{ id: 1, name: 'John', json: ['foo', 'bar'] }]); // }); // test.serial('insert many', (t) => { // const { db } = t.context; // db.insert(usersTable).values( // { name: 'John' }, // { name: 'Bruce', json: ['foo', 'bar'] }, // { name: 'Jane' }, // { name: 'Austin', verified: 1 }, // ).execute(); // const result = db.select({ // id: usersTable.id, // name: usersTable.name, // json: usersTable.json, // verified: usersTable.verified, // }).from(usersTable).execute(); // t.deepEqual(result, [ // { id: 1, name: 'John', json: null, verified: 0 }, // { id: 2, name: 'Bruce', json: ['foo', 'bar'], verified: 0 }, // { id: 3, name: 'Jane', json: null, verified: 0 }, // { id: 4, name: 'Austin', json: null, verified: 1 }, // ]); // }); // test.serial('insert many with returning', (t) => { // const { db } = t.context; // const result = db.insert(usersTable).values( // { name: 'John' }, // { name: 'Bruce', json: ['foo', 'bar'] }, // { name: 'Jane' }, // { name: 'Austin', verified: 1 }, // ) // .returning({ // id: usersTable.id, // name: usersTable.name, // json: usersTable.json, // verified: usersTable.verified, // }) // .execute(); // t.deepEqual(result, [ // { id: 1, name: 'John', json: null, verified: 0 }, // { id: 2, name: 'Bruce', json: ['foo', 'bar'], verified: 0 }, // { id: 3, name: 'Jane', json: null, verified: 0 }, // { id: 4, name: 'Austin', json: null, verified: 1 }, // ]); // }); // test.serial('join with alias', (t) => { // const { db } = t.context; // const customerAlias = alias(usersTable, 'customer'); // db.insert(usersTable).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]).execute(); // const result = db // .select().from(usersTable) // .fields({ id: usersTable.id, name: usersTable.name }) // .leftJoin(customerAlias, eq(customerAlias.id, 11), { id: customerAlias.id, name: customerAlias.name }) // .where(eq(usersTable.id, 10)) // .execute(); // t.deepEqual(result, [{ // users: { id: 10, name: 'Ivan' }, // customer: { id: 11, name: 'Hans' }, // }]); // }); // test('insert with spaces', (t) => { // const { db } = t.context; // db.insert(usersTable).values({ name: sql`'Jo h n'` }).execute(); // const result = db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).execute(); // t.deepEqual(result, [{ id: 1, name: 'Jo h n' }]); // }); // test.after.always((t) => { // const ctx = t.context; // ctx.client?.close(); // }); ================================================ FILE: integration-tests/tests/bun/sqlite.test.ts ================================================ import { Database } from 'bun:sqlite'; import { beforeAll, beforeEach, expect, test } from 'bun:test'; import { sql } from 'drizzle-orm'; import type { BunSQLiteDatabase } from 'drizzle-orm/bun-sqlite'; import { drizzle } from 'drizzle-orm/bun-sqlite'; import { blob, integer, sqliteTable, text } from 'drizzle-orm/sqlite-core'; const usersTable = sqliteTable('users', { id: integer('id').primaryKey(), name: text('name').notNull(), verified: integer('verified').notNull().default(0), json: blob('json', { mode: 'json' }).$type(), bigInt: blob('big_int', { mode: 'bigint' }), createdAt: integer('created_at', { mode: 'timestamp_ms' }).notNull().default(sql`strftime('%s', 'now')`), }); let db: BunSQLiteDatabase; beforeAll(async () => { try { const dbPath = process.env['SQLITE_DB_PATH'] ?? ':memory:'; const client = new Database(dbPath); db = drizzle(client); } catch (e) { console.error(e); } }); beforeEach(async () => { try { db.run(sql`drop table if exists ${usersTable}`); db.run(sql` create table ${usersTable} ( id integer primary key, name text not null, verified integer not null default 0, json blob, big_int blob, created_at integer not null default (strftime('%s', 'now')) ) `); } catch (e) { console.error(e); } }); test.skip('select large integer', () => { const a = 1667476703000; const res = db.all<{ a: number }>(sql`select ${sql.raw(String(a))} as a`); const result = res[0]!; expect(result.a).toEqual(a); }); test('select all fields', () => { const now = Date.now(); db.insert(usersTable).values({ name: 'John' }).run(); const result = db.select().from(usersTable).all()[0]!; expect(result.createdAt).toBeInstanceOf(Date); expect(Math.abs(result.createdAt.getTime() - now)).toBeLessThan(100); expect(result).toEqual({ id: 1, name: 'John', verified: 0, json: null, createdAt: result.createdAt, bigInt: null }); }); test('select bigint', () => { db.insert(usersTable).values({ name: 'John', bigInt: BigInt(100) }).run(); const result = db.select({ bigInt: usersTable.bigInt }).from(usersTable).all()[0]!; expect(result).toEqual({ bigInt: BigInt(100) }); }); // test.serial('select partial', (t) => { // const { db } = t.context; // db.insert(usersTable).values({ name: 'John' }).execute(); // const result = db.select({ name: usersTable.name }).from(usersTable).execute(); // t.deepEqual(result, [{ name: 'John' }]); // }); // test.serial('insert with auto increment', (t) => { // const { db } = t.context; // db.insert(usersTable).values( // { name: 'John' }, // { name: 'Jane' }, // { name: 'George' }, // { name: 'Austin' }, // ).execute(); // const result = db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).execute(); // t.deepEqual(result, [ // { id: 1, name: 'John' }, // { id: 2, name: 'Jane' }, // { id: 3, name: 'George' }, // { id: 4, name: 'Austin' }, // ]); // }); // test.serial('insert with default values', (t) => { // const { db } = t.context; // db.insert(usersTable).values({ name: 'John' }).execute(); // const result = db.select({ // id: usersTable.id, // name: usersTable.name, // verified: usersTable.verified, // }).from(usersTable).execute(); // t.deepEqual(result, [{ id: 1, name: 'John', verified: 0 }]); // }); // test.serial('insert with overridden default values', (t) => { // const { db } = t.context; // db.insert(usersTable).values({ name: 'John', verified: 1 }).execute(); // const result = db.select({ // id: usersTable.id, // name: usersTable.name, // verified: usersTable.verified, // }).from(usersTable).execute(); // t.deepEqual(result, [{ id: 1, name: 'John', verified: 1 }]); // }); // test.serial('update with returning all fields', (t) => { // const { db } = t.context; // const now = Date.now(); // db.insert(usersTable).values({ name: 'John' }).execute(); // const users = db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')).returning().execute(); // t.assert(users[0]!.createdAt instanceof Date); // t.assert(Math.abs(users[0]!.createdAt.getTime() - now) < 100); // t.deepEqual(users, [{ id: 1, name: 'Jane', verified: 0, json: null, createdAt: users[0]!.createdAt }]); // }); // test.serial('update with returning partial', (t) => { // const { db } = t.context; // db.insert(usersTable).values({ name: 'John' }).execute(); // const users = db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')).returning({ // id: usersTable.id, // name: usersTable.name, // }).execute(); // t.deepEqual(users, [{ id: 1, name: 'Jane' }]); // }); // test.serial('delete with returning all fields', (t) => { // const { db } = t.context; // const now = Date.now(); // db.insert(usersTable).values({ name: 'John' }).execute(); // const users = db.delete(usersTable).where(eq(usersTable.name, 'John')).returning().execute(); // t.assert(users[0]!.createdAt instanceof Date); // t.assert(Math.abs(users[0]!.createdAt.getTime() - now) < 100); // t.deepEqual(users, [{ id: 1, name: 'John', verified: 0, json: null, createdAt: users[0]!.createdAt }]); // }); // test.serial('delete with returning partial', (t) => { // const { db } = t.context; // db.insert(usersTable).values({ name: 'John' }).execute(); // const users = db.delete(usersTable).where(eq(usersTable.name, 'John')).returning({ // id: usersTable.id, // name: usersTable.name, // }).execute(); // t.deepEqual(users, [{ id: 1, name: 'John' }]); // }); // test.serial('insert + select', (t) => { // const { db } = t.context; // db.insert(usersTable).values({ name: 'John' }).execute(); // const result = db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).execute(); // t.deepEqual(result, [{ id: 1, name: 'John' }]); // db.insert(usersTable).values({ name: 'Jane' }).execute(); // const result2 = db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).execute(); // t.deepEqual(result2, [{ id: 1, name: 'John' }, { id: 2, name: 'Jane' }]); // }); // test.serial('json insert', (t) => { // const { db } = t.context; // db.insert(usersTable).values({ name: 'John', json: ['foo', 'bar'] }).execute(); // const result = db.select({ // id: usersTable.id, // name: usersTable.name, // json: usersTable.json, // }).from(usersTable).execute(); // t.deepEqual(result, [{ id: 1, name: 'John', json: ['foo', 'bar'] }]); // }); // test.serial('insert many', (t) => { // const { db } = t.context; // db.insert(usersTable).values( // { name: 'John' }, // { name: 'Bruce', json: ['foo', 'bar'] }, // { name: 'Jane' }, // { name: 'Austin', verified: 1 }, // ).execute(); // const result = db.select({ // id: usersTable.id, // name: usersTable.name, // json: usersTable.json, // verified: usersTable.verified, // }).from(usersTable).execute(); // t.deepEqual(result, [ // { id: 1, name: 'John', json: null, verified: 0 }, // { id: 2, name: 'Bruce', json: ['foo', 'bar'], verified: 0 }, // { id: 3, name: 'Jane', json: null, verified: 0 }, // { id: 4, name: 'Austin', json: null, verified: 1 }, // ]); // }); // test.serial('insert many with returning', (t) => { // const { db } = t.context; // const result = db.insert(usersTable).values( // { name: 'John' }, // { name: 'Bruce', json: ['foo', 'bar'] }, // { name: 'Jane' }, // { name: 'Austin', verified: 1 }, // ) // .returning({ // id: usersTable.id, // name: usersTable.name, // json: usersTable.json, // verified: usersTable.verified, // }) // .execute(); // t.deepEqual(result, [ // { id: 1, name: 'John', json: null, verified: 0 }, // { id: 2, name: 'Bruce', json: ['foo', 'bar'], verified: 0 }, // { id: 3, name: 'Jane', json: null, verified: 0 }, // { id: 4, name: 'Austin', json: null, verified: 1 }, // ]); // }); // test.serial('join with alias', (t) => { // const { db } = t.context; // const customerAlias = alias(usersTable, 'customer'); // db.insert(usersTable).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]).execute(); // const result = db // .select().from(usersTable) // .fields({ id: usersTable.id, name: usersTable.name }) // .leftJoin(customerAlias, eq(customerAlias.id, 11), { id: customerAlias.id, name: customerAlias.name }) // .where(eq(usersTable.id, 10)) // .execute(); // t.deepEqual(result, [{ // users: { id: 10, name: 'Ivan' }, // customer: { id: 11, name: 'Hans' }, // }]); // }); // test('insert with spaces', (t) => { // const { db } = t.context; // db.insert(usersTable).values({ name: sql`'Jo h n'` }).execute(); // const result = db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).execute(); // t.deepEqual(result, [{ id: 1, name: 'Jo h n' }]); // }); // test.after.always((t) => { // const ctx = t.context; // ctx.client?.close(); // }); ================================================ FILE: integration-tests/tests/common.ts ================================================ import { beforeEach } from 'vitest'; export function skipTests(names: string[]) { beforeEach((ctx) => { if (ctx.task.suite?.name === 'common' && names.includes(ctx.task.name)) { ctx.skip(); } }); } ================================================ FILE: integration-tests/tests/extensions/postgis/pg.test.ts ================================================ import Docker from 'dockerode'; import { sql } from 'drizzle-orm'; import type { NodePgDatabase } from 'drizzle-orm/node-postgres'; import { drizzle } from 'drizzle-orm/node-postgres'; import { bigserial, geometry, line, pgTable, point } from 'drizzle-orm/pg-core'; import getPort from 'get-port'; import pg from 'pg'; import { v4 as uuid } from 'uuid'; import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; const { Client } = pg; const ENABLE_LOGGING = false; let pgContainer: Docker.Container; let docker: Docker; let client: pg.Client; let db: NodePgDatabase; async function createDockerDB(): Promise { const inDocker = (docker = new Docker()); const port = await getPort({ port: 5432 }); const image = 'postgis/postgis:16-3.4'; const pullStream = await docker.pull(image); await new Promise((resolve, reject) => inDocker.modem.followProgress(pullStream, (err) => (err ? reject(err) : resolve(err))) ); pgContainer = await docker.createContainer({ Image: image, Env: ['POSTGRES_PASSWORD=postgres', 'POSTGRES_USER=postgres', 'POSTGRES_DB=postgres'], name: `drizzle-integration-tests-${uuid()}`, HostConfig: { AutoRemove: true, PortBindings: { '5432/tcp': [{ HostPort: `${port}` }], }, }, }); await pgContainer.start(); return `postgres://postgres:postgres@localhost:${port}/postgres`; } beforeAll(async () => { const connectionString = process.env['PG_POSTGIS_CONNECTION_STRING'] ?? (await createDockerDB()); const sleep = 1000; let timeLeft = 20000; let connected = false; let lastError: unknown | undefined; do { try { client = new Client(connectionString); await client.connect(); connected = true; break; } catch (e) { lastError = e; await new Promise((resolve) => setTimeout(resolve, sleep)); timeLeft -= sleep; } } while (timeLeft > 0); if (!connected) { console.error('Cannot connect to Postgres'); await client?.end().catch(console.error); await pgContainer?.stop().catch(console.error); throw lastError; } db = drizzle(client, { logger: ENABLE_LOGGING }); await db.execute(sql`CREATE EXTENSION IF NOT EXISTS postgis;`); }); afterAll(async () => { await client?.end().catch(console.error); await pgContainer?.stop().catch(console.error); }); const items = pgTable('items', { id: bigserial('id', { mode: 'number' }).primaryKey(), point: point('point'), pointObj: point('point_xy', { mode: 'xy' }), line: line('line'), lineObj: line('line_abc', { mode: 'abc' }), geo: geometry('geo', { type: 'point' }), geoObj: geometry('geo_obj', { type: 'point', mode: 'xy' }), geoSrid: geometry('geo_options', { type: 'point', mode: 'xy', srid: 4000 }), }); beforeEach(async () => { await db.execute(sql`drop table if exists items cascade`); await db.execute(sql` CREATE TABLE items ( id bigserial PRIMARY KEY, "point" point, "point_xy" point, "line" line, "line_abc" line, "geo" geometry(point), "geo_obj" geometry(point), "geo_options" geometry(point,4000) ); `); }); test('insert + select', async () => { const insertedValues = await db.insert(items).values([{ point: [1, 2], pointObj: { x: 1, y: 2 }, line: [1, 2, 3], lineObj: { a: 1, b: 2, c: 3 }, geo: [1, 2], geoObj: { x: 1, y: 2 }, geoSrid: { x: 1, y: 2 }, }]).returning(); const response = await db.select().from(items); expect(insertedValues).toStrictEqual([{ id: 1, point: [1, 2], pointObj: { x: 1, y: 2 }, line: [1, 2, 3], lineObj: { a: 1, b: 2, c: 3 }, geo: [1, 2], geoObj: { x: 1, y: 2 }, geoSrid: { x: 1, y: 2 }, }]); expect(response).toStrictEqual([{ id: 1, point: [1, 2], pointObj: { x: 1, y: 2 }, line: [1, 2, 3], lineObj: { a: 1, b: 2, c: 3 }, geo: [1, 2], geoObj: { x: 1, y: 2 }, geoSrid: { x: 1, y: 2 }, }]); }); ================================================ FILE: integration-tests/tests/extensions/postgis/postgres.test.ts ================================================ import Docker from 'dockerode'; import { sql } from 'drizzle-orm'; import { bigserial, geometry, line, pgTable, point } from 'drizzle-orm/pg-core'; import { drizzle, type PostgresJsDatabase } from 'drizzle-orm/postgres-js'; import getPort from 'get-port'; import postgres, { type Sql } from 'postgres'; import { v4 as uuid } from 'uuid'; import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; const ENABLE_LOGGING = false; let pgContainer: Docker.Container; let docker: Docker; let client: Sql; let db: PostgresJsDatabase; async function createDockerDB(): Promise { const inDocker = (docker = new Docker()); const port = await getPort({ port: 5432 }); const image = 'postgis/postgis:16-3.4'; const pullStream = await docker.pull(image); await new Promise((resolve, reject) => inDocker.modem.followProgress(pullStream, (err) => (err ? reject(err) : resolve(err))) ); pgContainer = await docker.createContainer({ Image: image, Env: ['POSTGRES_PASSWORD=postgres', 'POSTGRES_USER=postgres', 'POSTGRES_DB=postgres'], name: `drizzle-integration-tests-${uuid()}`, HostConfig: { AutoRemove: true, PortBindings: { '5432/tcp': [{ HostPort: `${port}` }], }, }, }); await pgContainer.start(); return `postgres://postgres:postgres@localhost:${port}/postgres`; } beforeAll(async () => { const connectionString = process.env['PG_POSTGIS_CONNECTION_STRING'] ?? (await createDockerDB()); const sleep = 1000; let timeLeft = 20000; let connected = false; let lastError: unknown | undefined; do { try { client = postgres(connectionString, { max: 1, onnotice: () => { // disable notices }, }); await client`select 1`; connected = true; break; } catch (e) { lastError = e; await new Promise((resolve) => setTimeout(resolve, sleep)); timeLeft -= sleep; } } while (timeLeft > 0); if (!connected) { console.error('Cannot connect to Postgres'); await client?.end().catch(console.error); await pgContainer?.stop().catch(console.error); throw lastError; } db = drizzle(client, { logger: ENABLE_LOGGING }); await db.execute(sql`CREATE EXTENSION IF NOT EXISTS postgis;`); }); afterAll(async () => { await client?.end().catch(console.error); await pgContainer?.stop().catch(console.error); }); const items = pgTable('items', { id: bigserial('id', { mode: 'number' }).primaryKey(), point: point('point'), pointObj: point('point_xy', { mode: 'xy' }), line: line('line'), lineObj: line('line_abc', { mode: 'abc' }), geo: geometry('geo', { type: 'point' }), geoObj: geometry('geo_obj', { type: 'point', mode: 'xy' }), geoSrid: geometry('geo_options', { type: 'point', mode: 'xy', srid: 4000 }), }); beforeEach(async () => { await db.execute(sql`drop table if exists items cascade`); await db.execute(sql` CREATE TABLE items ( id bigserial PRIMARY KEY, "point" point, "point_xy" point, "line" line, "line_abc" line, "geo" geometry(point), "geo_obj" geometry(point), "geo_options" geometry(point,4000) ); `); }); test('insert + select', async () => { const insertedValues = await db.insert(items).values([{ point: [1, 2], pointObj: { x: 1, y: 2 }, line: [1, 2, 3], lineObj: { a: 1, b: 2, c: 3 }, geo: [1, 2], geoObj: { x: 1, y: 2 }, geoSrid: { x: 1, y: 2 }, }]).returning(); const response = await db.select().from(items); expect(insertedValues).toStrictEqual([{ id: 1, point: [1, 2], pointObj: { x: 1, y: 2 }, line: [1, 2, 3], lineObj: { a: 1, b: 2, c: 3 }, geo: [1, 2], geoObj: { x: 1, y: 2 }, geoSrid: { x: 1, y: 2 }, }]); expect(response).toStrictEqual([{ id: 1, point: [1, 2], pointObj: { x: 1, y: 2 }, line: [1, 2, 3], lineObj: { a: 1, b: 2, c: 3 }, geo: [1, 2], geoObj: { x: 1, y: 2 }, geoSrid: { x: 1, y: 2 }, }]); }); ================================================ FILE: integration-tests/tests/extensions/vectors/pg.test.ts ================================================ import Docker from 'dockerode'; import { eq, hammingDistance, jaccardDistance, l2Distance, not, sql } from 'drizzle-orm'; import type { NodePgDatabase } from 'drizzle-orm/node-postgres'; import { drizzle } from 'drizzle-orm/node-postgres'; import { bigserial, bit, halfvec, pgTable, sparsevec, vector } from 'drizzle-orm/pg-core'; import getPort from 'get-port'; import pg from 'pg'; import { v4 as uuid } from 'uuid'; import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; const { Client } = pg; const ENABLE_LOGGING = false; let pgContainer: Docker.Container; let docker: Docker; let client: pg.Client; let db: NodePgDatabase; async function createDockerDB(): Promise { const inDocker = (docker = new Docker()); const port = await getPort({ port: 5432 }); const image = 'pgvector/pgvector:pg16'; const pullStream = await docker.pull(image); await new Promise((resolve, reject) => inDocker.modem.followProgress(pullStream, (err) => (err ? reject(err) : resolve(err))) ); pgContainer = await docker.createContainer({ Image: image, Env: ['POSTGRES_PASSWORD=postgres', 'POSTGRES_USER=postgres', 'POSTGRES_DB=postgres'], name: `drizzle-integration-tests-${uuid()}`, HostConfig: { AutoRemove: true, PortBindings: { '5432/tcp': [{ HostPort: `${port}` }], }, }, }); await pgContainer.start(); return `postgres://postgres:postgres@localhost:${port}/postgres`; } beforeAll(async () => { const connectionString = process.env['PG_VECTOR_CONNECTION_STRING'] ?? (await createDockerDB()); const sleep = 250; let timeLeft = 5000; let connected = false; let lastError: unknown | undefined; do { try { client = new Client(connectionString); await client.connect(); connected = true; break; } catch (e) { lastError = e; await new Promise((resolve) => setTimeout(resolve, sleep)); timeLeft -= sleep; } } while (timeLeft > 0); if (!connected) { console.error('Cannot connect to Postgres'); await client?.end().catch(console.error); await pgContainer?.stop().catch(console.error); throw lastError; } db = drizzle(client, { logger: ENABLE_LOGGING }); await db.execute(sql`CREATE EXTENSION IF NOT EXISTS vector;`); }); afterAll(async () => { await client?.end().catch(console.error); await pgContainer?.stop().catch(console.error); }); const items = pgTable('items', { id: bigserial('id', { mode: 'number' }).primaryKey(), vector: vector('vector', { dimensions: 3 }), bit: bit('bit', { dimensions: 3 }), halfvec: halfvec('halfvec', { dimensions: 3 }), sparsevec: sparsevec('sparsevec', { dimensions: 5 }), }); beforeEach(async () => { await db.execute(sql`drop table if exists items cascade`); await db.execute(sql` CREATE TABLE items ( id bigserial PRIMARY KEY, "vector" vector(3), "bit" bit(3), "halfvec" halfvec(3), "sparsevec" sparsevec(5) ); `); }); test('insert + partial select', async () => { const insertedValues = await db.insert(items).values([{ vector: [3, 1, 2], bit: '000', halfvec: [1, 2, 3], sparsevec: '{1:1,3:2,5:3}/5', }]).returning(); const query = db.select({ distance: l2Distance(items.vector, [3, 1, 2]) }).from(items); expect(query.toSQL()).toStrictEqual({ sql: 'select "vector" <-> $1 from "items"', params: ['[3,1,2]'] }); const response = await query; expect(insertedValues).toStrictEqual([{ id: 1, vector: [3, 1, 2], bit: '000', halfvec: [1, 2, 3], sparsevec: '{1:1,3:2,5:3}/5', }]); expect(response).toStrictEqual([{ distance: 0 }]); }); // SELECT * FROM items WHERE embedding <-> '[3,1,2]' < 5; test('insert + complex where', async () => { const insertedValues = await db.insert(items).values([{ vector: [3, 1, 2], bit: '000', halfvec: [1, 2, 3], sparsevec: '{1:1,3:2,5:3}/5', }]).returning(); const query = db.select().from(items) .where(sql`${l2Distance(items.vector, [3, 1, 2])} < ${5}`) .limit(5); expect(query.toSQL()).toStrictEqual({ sql: 'select "id", "vector", "bit", "halfvec", "sparsevec" from "items" where "items"."vector" <-> $1 < $2 limit $3', params: ['[3,1,2]', 5, 5], }); const res = await query; expect(insertedValues).toStrictEqual([{ id: 1, vector: [3, 1, 2], bit: '000', halfvec: [1, 2, 3], sparsevec: '{1:1,3:2,5:3}/5', }]); expect(res).toStrictEqual([ { id: 1, vector: [3, 1, 2], bit: '000', halfvec: [1, 2, 3], sparsevec: '{1:1,3:2,5:3}/5', }, ]); }); // SELECT * FROM items WHERE id != 1 ORDER BY embedding <-> (SELECT embedding FROM items WHERE id = 1) LIMIT 5; test('insert + order by subquery', async () => { const insertedValues = await db.insert(items).values([{ vector: [3, 1, 2], bit: '000', halfvec: [1, 2, 3], sparsevec: '{1:1,3:2,5:3}/5', }, { vector: [3, 1, 2], bit: '000', halfvec: [1, 2, 3], sparsevec: '{1:1,3:2,5:3}/5', }]).returning(); const subquery = db.select({ vector: items.vector }).from(items).where(eq(items.id, 1)); const query = db.select().from(items) .where(not(eq(items.id, 1))) .orderBy(l2Distance(items.vector, subquery)) .limit(5); expect(query.toSQL()).toStrictEqual({ sql: 'select "id", "vector", "bit", "halfvec", "sparsevec" from "items" where not "items"."id" = $1 order by "items"."vector" <-> (select "vector" from "items" where "items"."id" = $2) limit $3', params: [1, 1, 5], }); const res = await query; expect(insertedValues).toStrictEqual([{ id: 1, vector: [3, 1, 2], bit: '000', halfvec: [1, 2, 3], sparsevec: '{1:1,3:2,5:3}/5', }, { id: 2, vector: [3, 1, 2], bit: '000', halfvec: [1, 2, 3], sparsevec: '{1:1,3:2,5:3}/5', }]); expect(res).toStrictEqual([ { id: 2, vector: [3, 1, 2], bit: '000', halfvec: [1, 2, 3], sparsevec: '{1:1,3:2,5:3}/5', }, ]); }); test('insert + select order by jaccard distance', async () => { const insertedValues = await db.insert(items).values({ vector: [1, 2, 3], bit: '000', halfvec: [1, 2, 3], sparsevec: '{1:1,3:2,5:3}/5', }).returning(); const bitQuery = db.select().from(items).orderBy(jaccardDistance(items.bit, '101')).limit(5); expect(bitQuery.toSQL()).toStrictEqual({ params: [ '101', 5, ], sql: 'select "id", "vector", "bit", "halfvec", "sparsevec" from "items" order by "items"."bit" <%> $1 limit $2', }); expect(insertedValues).toStrictEqual([{ id: 1, vector: [1, 2, 3], bit: '000', halfvec: [1, 2, 3], sparsevec: '{1:1,3:2,5:3}/5', }]); const response = await bitQuery; expect(response).toStrictEqual([{ id: 1, vector: [1, 2, 3], bit: '000', halfvec: [1, 2, 3], sparsevec: '{1:1,3:2,5:3}/5', }]); }); test('insert + select order by hamming distance', async () => { const insertedValues = await db.insert(items).values({ vector: [1, 2, 3], bit: '000', halfvec: [1, 2, 3], sparsevec: '{1:1,3:2,5:3}/5', }).returning(); const bitQuery = db.select().from(items).orderBy(hammingDistance(items.bit, '101')).limit(5); expect(bitQuery.toSQL()).toStrictEqual({ params: [ '101', 5, ], sql: 'select "id", "vector", "bit", "halfvec", "sparsevec" from "items" order by "items"."bit" <~> $1 limit $2', }); expect(insertedValues).toStrictEqual([{ id: 1, vector: [1, 2, 3], bit: '000', halfvec: [1, 2, 3], sparsevec: '{1:1,3:2,5:3}/5', }]); const response = await bitQuery; expect(response).toStrictEqual([{ id: 1, vector: [1, 2, 3], bit: '000', halfvec: [1, 2, 3], sparsevec: '{1:1,3:2,5:3}/5', }]); }); test('insert + select order by l2 distance', async () => { const insertedValues = await db.insert(items).values({ vector: [1, 2, 3], bit: '000', halfvec: [1, 2, 3], sparsevec: '{1:1,3:2,5:3}/5', }).returning(); const queryVector = db.select().from(items).orderBy(l2Distance(items.vector, [3, 1, 2])).limit(5); const queryHalfvec = db.select().from(items).orderBy(l2Distance(items.halfvec, [3, 1, 2])).limit(5); const querySparsevec = db.select().from(items).orderBy(l2Distance(items.sparsevec, '{1:3,3:1,5:2}/5')).limit(5); expect(queryVector.toSQL()).toStrictEqual({ params: [ '[3,1,2]', 5, ], sql: 'select "id", "vector", "bit", "halfvec", "sparsevec" from "items" order by "items"."vector" <-> $1 limit $2', }); expect(queryHalfvec.toSQL()).toStrictEqual({ params: [ '[3,1,2]', 5, ], sql: 'select "id", "vector", "bit", "halfvec", "sparsevec" from "items" order by "items"."halfvec" <-> $1 limit $2', }); expect(querySparsevec.toSQL()).toStrictEqual({ params: [ '{1:3,3:1,5:2}/5', 5, ], sql: 'select "id", "vector", "bit", "halfvec", "sparsevec" from "items" order by "items"."sparsevec" <-> $1 limit $2', }); const vectorRes = await queryVector; const halfvecRes = await queryHalfvec; const sparsevecRes = await querySparsevec; expect(insertedValues).toStrictEqual([{ id: 1, vector: [1, 2, 3], bit: '000', halfvec: [1, 2, 3], sparsevec: '{1:1,3:2,5:3}/5', }]); const expectedResponse = [{ id: 1, vector: [1, 2, 3], bit: '000', halfvec: [1, 2, 3], sparsevec: '{1:1,3:2,5:3}/5', }]; expect(vectorRes).toStrictEqual(expectedResponse); expect(halfvecRes).toStrictEqual(expectedResponse); expect(sparsevecRes).toStrictEqual(expectedResponse); }); test('select + insert all vectors', async () => { const insertedValues = await db.insert(items).values({ vector: [1, 2, 3], bit: '000', halfvec: [1, 2, 3], sparsevec: '{1:1,3:2,5:3}/5', }).returning(); const response = await db.select().from(items); expect(insertedValues).toStrictEqual([{ id: 1, vector: [1, 2, 3], bit: '000', halfvec: [1, 2, 3], sparsevec: '{1:1,3:2,5:3}/5', }]); expect(response).toStrictEqual([{ id: 1, vector: [1, 2, 3], bit: '000', halfvec: [1, 2, 3], sparsevec: '{1:1,3:2,5:3}/5', }]); }); ================================================ FILE: integration-tests/tests/extensions/vectors/postgres.test.ts ================================================ import Docker from 'dockerode'; import { eq, hammingDistance, jaccardDistance, l2Distance, not, sql } from 'drizzle-orm'; import { bigserial, bit, halfvec, pgTable, sparsevec, vector } from 'drizzle-orm/pg-core'; import { drizzle, type PostgresJsDatabase } from 'drizzle-orm/postgres-js'; import getPort from 'get-port'; import postgres, { type Sql } from 'postgres'; import { v4 as uuid } from 'uuid'; import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; const ENABLE_LOGGING = false; let pgContainer: Docker.Container; let docker: Docker; let client: Sql; let db: PostgresJsDatabase; async function createDockerDB(): Promise { const inDocker = (docker = new Docker()); const port = await getPort({ port: 5432 }); const image = 'pgvector/pgvector:pg16'; const pullStream = await docker.pull(image); await new Promise((resolve, reject) => inDocker.modem.followProgress(pullStream, (err) => (err ? reject(err) : resolve(err))) ); pgContainer = await docker.createContainer({ Image: image, Env: ['POSTGRES_PASSWORD=postgres', 'POSTGRES_USER=postgres', 'POSTGRES_DB=postgres'], name: `drizzle-integration-tests-${uuid()}`, HostConfig: { AutoRemove: true, PortBindings: { '5432/tcp': [{ HostPort: `${port}` }], }, }, }); await pgContainer.start(); return `postgres://postgres:postgres@localhost:${port}/postgres`; } beforeAll(async () => { const connectionString = process.env['PG_VECTOR_CONNECTION_STRING'] ?? (await createDockerDB()); const sleep = 250; let timeLeft = 5000; let connected = false; let lastError: unknown | undefined; do { try { client = postgres(connectionString, { max: 1, onnotice: () => { // disable notices }, }); await client`select 1`; connected = true; break; } catch (e) { lastError = e; await new Promise((resolve) => setTimeout(resolve, sleep)); timeLeft -= sleep; } } while (timeLeft > 0); if (!connected) { console.error('Cannot connect to Postgres'); await client?.end().catch(console.error); await pgContainer?.stop().catch(console.error); throw lastError; } db = drizzle(client, { logger: ENABLE_LOGGING }); await db.execute(sql`CREATE EXTENSION IF NOT EXISTS vector;`); }); afterAll(async () => { await client?.end().catch(console.error); await pgContainer?.stop().catch(console.error); }); const items = pgTable('items', { id: bigserial('id', { mode: 'number' }).primaryKey(), vector: vector('vector', { dimensions: 3 }), bit: bit('bit', { dimensions: 3 }), halfvec: halfvec('halfvec', { dimensions: 3 }), sparsevec: sparsevec('sparsevec', { dimensions: 5 }), }); beforeEach(async () => { await db.execute(sql`drop table if exists items cascade`); await db.execute(sql` CREATE TABLE items ( id bigserial PRIMARY KEY, "vector" vector(3), "bit" bit(3), "halfvec" halfvec(3), "sparsevec" sparsevec(5) ); `); }); test('insert + partial select', async () => { const insertedValues = await db.insert(items).values([{ vector: [3, 1, 2], bit: '000', halfvec: [1, 2, 3], sparsevec: '{1:1,3:2,5:3}/5', }]).returning(); const query = db.select({ distance: l2Distance(items.vector, [3, 1, 2]) }).from(items); expect(query.toSQL()).toStrictEqual({ sql: 'select "vector" <-> $1 from "items"', params: ['[3,1,2]'] }); const response = await query; expect(insertedValues).toStrictEqual([{ id: 1, vector: [3, 1, 2], bit: '000', halfvec: [1, 2, 3], sparsevec: '{1:1,3:2,5:3}/5', }]); expect(response).toStrictEqual([{ distance: 0 }]); }); // SELECT * FROM items WHERE embedding <-> '[3,1,2]' < 5; test('insert + complex where', async () => { const insertedValues = await db.insert(items).values([{ vector: [3, 1, 2], bit: '000', halfvec: [1, 2, 3], sparsevec: '{1:1,3:2,5:3}/5', }]).returning(); const query = db.select().from(items) .where(sql`${l2Distance(items.vector, [3, 1, 2])} < ${5}`) .limit(5); expect(query.toSQL()).toStrictEqual({ sql: 'select "id", "vector", "bit", "halfvec", "sparsevec" from "items" where "items"."vector" <-> $1 < $2 limit $3', params: ['[3,1,2]', 5, 5], }); const res = await query; expect(insertedValues).toStrictEqual([{ id: 1, vector: [3, 1, 2], bit: '000', halfvec: [1, 2, 3], sparsevec: '{1:1,3:2,5:3}/5', }]); expect(res).toStrictEqual([ { id: 1, vector: [3, 1, 2], bit: '000', halfvec: [1, 2, 3], sparsevec: '{1:1,3:2,5:3}/5', }, ]); }); // SELECT * FROM items WHERE id != 1 ORDER BY embedding <-> (SELECT embedding FROM items WHERE id = 1) LIMIT 5; test('insert + order by subquery', async () => { const insertedValues = await db.insert(items).values([{ vector: [3, 1, 2], bit: '000', halfvec: [1, 2, 3], sparsevec: '{1:1,3:2,5:3}/5', }, { vector: [3, 1, 2], bit: '000', halfvec: [1, 2, 3], sparsevec: '{1:1,3:2,5:3}/5', }]).returning(); const subquery = db.select({ vector: items.vector }).from(items).where(eq(items.id, 1)); const query = db.select().from(items) .where(not(eq(items.id, 1))) .orderBy(l2Distance(items.vector, subquery)) .limit(5); expect(query.toSQL()).toStrictEqual({ sql: 'select "id", "vector", "bit", "halfvec", "sparsevec" from "items" where not "items"."id" = $1 order by "items"."vector" <-> (select "vector" from "items" where "items"."id" = $2) limit $3', params: [1, 1, 5], }); const res = await query; expect(insertedValues).toStrictEqual([{ id: 1, vector: [3, 1, 2], bit: '000', halfvec: [1, 2, 3], sparsevec: '{1:1,3:2,5:3}/5', }, { id: 2, vector: [3, 1, 2], bit: '000', halfvec: [1, 2, 3], sparsevec: '{1:1,3:2,5:3}/5', }]); expect(res).toStrictEqual([ { id: 2, vector: [3, 1, 2], bit: '000', halfvec: [1, 2, 3], sparsevec: '{1:1,3:2,5:3}/5', }, ]); }); test('insert + select order by jaccard distance', async () => { const insertedValues = await db.insert(items).values({ vector: [1, 2, 3], bit: '000', halfvec: [1, 2, 3], sparsevec: '{1:1,3:2,5:3}/5', }).returning(); const bitQuery = db.select().from(items).orderBy(jaccardDistance(items.bit, '101')).limit(5); expect(bitQuery.toSQL()).toStrictEqual({ params: [ '101', 5, ], sql: 'select "id", "vector", "bit", "halfvec", "sparsevec" from "items" order by "items"."bit" <%> $1 limit $2', }); expect(insertedValues).toStrictEqual([{ id: 1, vector: [1, 2, 3], bit: '000', halfvec: [1, 2, 3], sparsevec: '{1:1,3:2,5:3}/5', }]); const response = await bitQuery; expect(response).toStrictEqual([{ id: 1, vector: [1, 2, 3], bit: '000', halfvec: [1, 2, 3], sparsevec: '{1:1,3:2,5:3}/5', }]); }); test('insert + select order by hamming distance', async () => { const insertedValues = await db.insert(items).values({ vector: [1, 2, 3], bit: '000', halfvec: [1, 2, 3], sparsevec: '{1:1,3:2,5:3}/5', }).returning(); const bitQuery = db.select().from(items).orderBy(hammingDistance(items.bit, '101')).limit(5); expect(bitQuery.toSQL()).toStrictEqual({ params: [ '101', 5, ], sql: 'select "id", "vector", "bit", "halfvec", "sparsevec" from "items" order by "items"."bit" <~> $1 limit $2', }); expect(insertedValues).toStrictEqual([{ id: 1, vector: [1, 2, 3], bit: '000', halfvec: [1, 2, 3], sparsevec: '{1:1,3:2,5:3}/5', }]); const response = await bitQuery; expect(response).toStrictEqual([{ id: 1, vector: [1, 2, 3], bit: '000', halfvec: [1, 2, 3], sparsevec: '{1:1,3:2,5:3}/5', }]); }); test('insert + select order by l2 distance', async () => { const insertedValues = await db.insert(items).values({ vector: [1, 2, 3], bit: '000', halfvec: [1, 2, 3], sparsevec: '{1:1,3:2,5:3}/5', }).returning(); const queryVector = db.select().from(items).orderBy(l2Distance(items.vector, [3, 1, 2])).limit(5); const queryHalfvec = db.select().from(items).orderBy(l2Distance(items.halfvec, [3, 1, 2])).limit(5); const querySparsevec = db.select().from(items).orderBy(l2Distance(items.sparsevec, '{1:3,3:1,5:2}/5')).limit(5); expect(queryVector.toSQL()).toStrictEqual({ params: [ '[3,1,2]', 5, ], sql: 'select "id", "vector", "bit", "halfvec", "sparsevec" from "items" order by "items"."vector" <-> $1 limit $2', }); expect(queryHalfvec.toSQL()).toStrictEqual({ params: [ '[3,1,2]', 5, ], sql: 'select "id", "vector", "bit", "halfvec", "sparsevec" from "items" order by "items"."halfvec" <-> $1 limit $2', }); expect(querySparsevec.toSQL()).toStrictEqual({ params: [ '{1:3,3:1,5:2}/5', 5, ], sql: 'select "id", "vector", "bit", "halfvec", "sparsevec" from "items" order by "items"."sparsevec" <-> $1 limit $2', }); const vectorRes = await queryVector; const halfvecRes = await queryHalfvec; const sparsevecRes = await querySparsevec; expect(insertedValues).toStrictEqual([{ id: 1, vector: [1, 2, 3], bit: '000', halfvec: [1, 2, 3], sparsevec: '{1:1,3:2,5:3}/5', }]); const expectedResponse = [{ id: 1, vector: [1, 2, 3], bit: '000', halfvec: [1, 2, 3], sparsevec: '{1:1,3:2,5:3}/5', }]; expect(vectorRes).toStrictEqual(expectedResponse); expect(halfvecRes).toStrictEqual(expectedResponse); expect(sparsevecRes).toStrictEqual(expectedResponse); }); test('select + insert all vectors', async () => { const insertedValues = await db.insert(items).values({ vector: [1, 2, 3], bit: '000', halfvec: [1, 2, 3], sparsevec: '{1:1,3:2,5:3}/5', }).returning(); const response = await db.select().from(items); expect(insertedValues).toStrictEqual([{ id: 1, vector: [1, 2, 3], bit: '000', halfvec: [1, 2, 3], sparsevec: '{1:1,3:2,5:3}/5', }]); expect(response).toStrictEqual([{ id: 1, vector: [1, 2, 3], bit: '000', halfvec: [1, 2, 3], sparsevec: '{1:1,3:2,5:3}/5', }]); }); ================================================ FILE: integration-tests/tests/gel/cache.ts ================================================ import { getTableName, is, Table } from 'drizzle-orm'; import type { MutationOption } from 'drizzle-orm/cache/core'; import { Cache } from 'drizzle-orm/cache/core'; import type { CacheConfig } from 'drizzle-orm/cache/core/types'; import Keyv from 'keyv'; // eslint-disable-next-line drizzle-internal/require-entity-kind export class TestGlobalCache extends Cache { private globalTtl: number = 1000; private usedTablesPerKey: Record = {}; constructor(private kv: Keyv = new Keyv()) { super(); } override strategy(): 'explicit' | 'all' { return 'all'; } override async get(key: string, _tables: string[], _isTag: boolean): Promise { const res = await this.kv.get(key) ?? undefined; return res; } override async put( key: string, response: any, tables: string[], isTag: boolean, config?: CacheConfig, ): Promise { await this.kv.set(key, response, config ? config.ex : this.globalTtl); for (const table of tables) { const keys = this.usedTablesPerKey[table]; if (keys === undefined) { this.usedTablesPerKey[table] = [key]; } else { keys.push(key); } } } override async onMutate(params: MutationOption): Promise { const tagsArray = params.tags ? Array.isArray(params.tags) ? params.tags : [params.tags] : []; const tablesArray = params.tables ? Array.isArray(params.tables) ? params.tables : [params.tables] : []; const keysToDelete = new Set(); for (const table of tablesArray) { const tableName = is(table, Table) ? getTableName(table) : table as string; const keys = this.usedTablesPerKey[tableName] ?? []; for (const key of keys) keysToDelete.add(key); } if (keysToDelete.size > 0 || tagsArray.length > 0) { for (const tag of tagsArray) { await this.kv.delete(tag); } for (const key of keysToDelete) { await this.kv.delete(key); for (const table of tablesArray) { const tableName = is(table, Table) ? getTableName(table) : table as string; this.usedTablesPerKey[tableName] = []; } } } } } // eslint-disable-next-line drizzle-internal/require-entity-kind export class TestCache extends TestGlobalCache { override strategy(): 'explicit' | 'all' { return 'explicit'; } } ================================================ FILE: integration-tests/tests/gel/createInstance.ts ================================================ import Docker from 'dockerode'; import getPort from 'get-port'; import { v4 as uuidV4 } from 'uuid'; import 'zx/globals'; export async function createDockerDB(): Promise<{ connectionString: string; container: Docker.Container }> { const docker = new Docker(); const port = await getPort({ port: 5656 }); const image = 'geldata/gel:6'; const pullStream = await docker.pull(image); await new Promise((resolve, reject) => docker.modem.followProgress(pullStream, (err) => (err ? reject(err) : resolve(err))) ); const gelContainer = await docker.createContainer({ Image: image, Env: [ 'GEL_CLIENT_SECURITY=insecure_dev_mode', 'GEL_SERVER_SECURITY=insecure_dev_mode', 'GEL_CLIENT_TLS_SECURITY=no_host_verification', 'GEL_SERVER_PASSWORD=password', ], name: `drizzle-integration-tests-${uuidV4()}`, HostConfig: { AutoRemove: true, PortBindings: { '5656/tcp': [{ HostPort: `${port}` }], }, }, }); await gelContainer.start(); return { connectionString: `gel://admin:password@localhost:${port}/main`, container: gelContainer }; } ================================================ FILE: integration-tests/tests/gel/gel-custom.test.ts ================================================ import retry from 'async-retry'; import type Docker from 'dockerode'; import { asc, eq, sql } from 'drizzle-orm'; import { drizzle, type GelJsDatabase } from 'drizzle-orm/gel'; import { alias, customType, gelTable, gelTableCreator } from 'drizzle-orm/gel-core'; import * as gel from 'gel'; import { afterAll, afterEach, beforeAll, beforeEach, expect, test } from 'vitest'; import { createDockerDB } from './createInstance'; import 'zx/globals'; $.quiet = true; const ENABLE_LOGGING = false; let db: GelJsDatabase; let client: gel.Client; let container: Docker.Container | undefined; let dsn: string; const tlsSecurity = '--tls-security=insecure'; function sleep(ms: number) { return new Promise((resolve) => setTimeout(resolve, ms)); } beforeAll(async () => { let connectionString; if (process.env['GEL_CONNECTION_STRING']) { connectionString = process.env['GEL_CONNECTION_STRING']; } else { const { connectionString: conStr, container: contrainerObj } = await createDockerDB(); connectionString = conStr; container = contrainerObj; } await sleep(15 * 1000); client = await retry(async () => { client = gel.createClient({ dsn: connectionString, tlsSecurity: 'insecure' }); return client; }, { retries: 20, factor: 1, minTimeout: 250, maxTimeout: 250, randomize: false, onRetry() { client?.close(); }, }); db = drizzle(client, { logger: ENABLE_LOGGING }); dsn = connectionString; await $`gel query "CREATE TYPE default::users_custom { create property id1: int16 { create constraint exclusive; }; create required property name: str; create property verified: bool { SET default := false; }; create property json: json; };" ${tlsSecurity} --dsn=${dsn}`; await $`gel query "CREATE TYPE default::prefixed_users_custom { create property id1: int16 { create constraint exclusive; }; create required property name: str; };" ${tlsSecurity} --dsn=${dsn}`; }); afterAll(async () => { await $`gel query "DROP TYPE default::users_custom;" ${tlsSecurity} --dsn=${dsn}`; await $`gel query "DROP TYPE default::prefixed_users_custom;" ${tlsSecurity} --dsn=${dsn}`; await client?.close(); await container?.stop().catch(console.error); }); beforeEach((ctx) => { ctx.gel = { db, }; }); afterEach(async () => { await $`gel query "DELETE default::users_custom;" ${tlsSecurity} --dsn=${dsn}`; await $`gel query "DELETE default::prefixed_users_custom;" ${tlsSecurity} --dsn=${dsn}`; }); const customInteger = customType<{ data: number; notNull: false; default: false }>({ dataType() { return 'integer'; }, }); const customText = customType<{ data: string }>({ dataType() { return 'text'; }, }); const customBoolean = customType<{ data: boolean }>({ dataType() { return 'boolean'; }, }); const customJson = (name: string) => customType<{ data: TData; driverData: string }>({ dataType() { return 'json'; }, })(name); const usersTable = gelTable('users_custom', { id1: customInteger('id1'), name: customText('name').notNull(), verified: customBoolean('verified').notNull().default(false), json: customJson('json'), }); test('select all fields', async (ctx) => { const { db } = ctx.gel; await db.insert(usersTable).values({ id1: 1, name: 'John' }); const result = await db.select().from(usersTable); expect(result).toEqual([{ id1: 1, name: 'John', verified: false, json: null }]); }); test('select sql', async (ctx) => { const { db } = ctx.gel; await db.insert(usersTable).values({ id1: 1, name: 'John' }); const users = await db.select({ name: sql`upper(${usersTable.name})`, }).from(usersTable); expect(users).toEqual([{ name: 'JOHN' }]); }); test('select typed sql', async (ctx) => { const { db } = ctx.gel; await db.insert(usersTable).values({ id1: 1, name: 'John' }); const users = await db.select({ name: sql`upper(${usersTable.name})`, }).from(usersTable); expect(users).toEqual([{ name: 'JOHN' }]); }); test('insert returning sql', async (ctx) => { const { db } = ctx.gel; const users = await db.insert(usersTable).values({ id1: 1, name: 'John' }).returning({ name: sql`upper(${usersTable.name})`, }); expect(users).toEqual([{ name: 'JOHN' }]); }); test('delete returning sql', async (ctx) => { const { db } = ctx.gel; await db.insert(usersTable).values({ id1: 1, name: 'John' }); const users = await db.delete(usersTable).where(eq(usersTable.name, 'John')).returning({ name: sql`upper(${usersTable.name})`, }); expect(users).toEqual([{ name: 'JOHN' }]); }); test('update returning sql', async (ctx) => { const { db } = ctx.gel; await db.insert(usersTable).values({ id1: 1, name: 'John' }); const users = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')).returning({ name: sql`upper(${usersTable.name})`, }); expect(users).toEqual([{ name: 'JANE' }]); }); test('update with returning all fields', async (ctx) => { const { db } = ctx.gel; await db.insert(usersTable).values({ id1: 1, name: 'John' }); const users = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')).returning(); expect(users).toEqual([{ id1: 1, name: 'Jane', verified: false, json: null }]); }); test('update with returning partial', async (ctx) => { const { db } = ctx.gel; await db.insert(usersTable).values({ id1: 1, name: 'John' }); const users = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')).returning({ id1: usersTable.id1, name: usersTable.name, }); expect(users).toEqual([{ id1: 1, name: 'Jane' }]); }); test('delete with returning all fields', async (ctx) => { const { db } = ctx.gel; await db.insert(usersTable).values({ id1: 1, name: 'John' }); const users = await db.delete(usersTable).where(eq(usersTable.name, 'John')).returning(); expect(users).toEqual([{ id1: 1, name: 'John', verified: false, json: null }]); }); test('delete with returning partial', async (ctx) => { const { db } = ctx.gel; await db.insert(usersTable).values({ id1: 1, name: 'John' }); const users = await db.delete(usersTable).where(eq(usersTable.name, 'John')).returning({ id1: usersTable.id1, name: usersTable.name, }); expect(users).toEqual([{ id1: 1, name: 'John' }]); }); test('insert + select', async (ctx) => { const { db } = ctx.gel; await db.insert(usersTable).values({ id1: 1, name: 'John' }); const result = await db.select().from(usersTable); expect(result).toEqual([{ id1: 1, name: 'John', verified: false, json: null }]); await db.insert(usersTable).values({ id1: 2, name: 'Jane' }); const result2 = await db.select().from(usersTable); expect(result2).toEqual([ { id1: 1, name: 'John', verified: false, json: null }, { id1: 2, name: 'Jane', verified: false, json: null }, ]); }); test('insert with overridden default values', async (ctx) => { const { db } = ctx.gel; await db.insert(usersTable).values({ id1: 1, name: 'John', verified: true }); const result = await db.select().from(usersTable); expect(result).toEqual([{ id1: 1, name: 'John', verified: true, json: null }]); }); test('insert many', async (ctx) => { const { db } = ctx.gel; await db.insert(usersTable).values([ { id1: 1, name: 'John' }, { id1: 2, name: 'Bruce', json: ['foo', 'bar'] }, { id1: 3, name: 'Jane' }, { id1: 4, name: 'Austin', verified: true }, ]); const result = await db.select({ id: usersTable.id1, name: usersTable.name, json: usersTable.json, verified: usersTable.verified, }).from(usersTable); expect(result).toEqual([ { id: 1, name: 'John', json: null, verified: false }, { id: 2, name: 'Bruce', json: ['foo', 'bar'], verified: false }, { id: 3, name: 'Jane', json: null, verified: false }, { id: 4, name: 'Austin', json: null, verified: true }, ]); }); test('insert many with returning', async (ctx) => { const { db } = ctx.gel; const result = await db.insert(usersTable).values([ { id1: 1, name: 'John' }, { id1: 2, name: 'Bruce', json: ['foo', 'bar'] }, { id1: 3, name: 'Jane' }, { id1: 4, name: 'Austin', verified: true }, ]) .returning({ id: usersTable.id1, name: usersTable.name, json: usersTable.json, verified: usersTable.verified, }); expect(result).toEqual([ { id: 1, name: 'John', json: null, verified: false }, { id: 2, name: 'Bruce', json: ['foo', 'bar'], verified: false }, { id: 3, name: 'Jane', json: null, verified: false }, { id: 4, name: 'Austin', json: null, verified: true }, ]); }); test('select with group by as field', async (ctx) => { const { db } = ctx.gel; await db.insert(usersTable).values([{ id1: 1, name: 'John' }, { id1: 2, name: 'Jane' }, { id1: 3, name: 'Jane' }]); const result = await db.select({ name: usersTable.name }).from(usersTable) .groupBy(usersTable.name); expect(result).toEqual([{ name: 'Jane' }, { name: 'John' }]); }); test('select with group by as sql', async (ctx) => { const { db } = ctx.gel; await db.insert(usersTable).values([{ id1: 1, name: 'John' }, { id1: 2, name: 'Jane' }, { id1: 3, name: 'Jane' }]); const result = await db.select({ name: usersTable.name }).from(usersTable) .groupBy(sql`${usersTable.name}`); expect(result).toEqual([{ name: 'Jane' }, { name: 'John' }]); }); test('select with group by as sql + column', async (ctx) => { const { db } = ctx.gel; await db.insert(usersTable).values([{ id1: 1, name: 'John' }, { id1: 2, name: 'Jane' }, { id1: 3, name: 'Jane' }]); const result = await db.select({ name: usersTable.name }).from(usersTable) .groupBy(sql`${usersTable.name}`, usersTable.id1); expect(result).toEqual([{ name: 'Jane' }, { name: 'John' }, { name: 'Jane' }]); }); test('select with group by as column + sql', async (ctx) => { const { db } = ctx.gel; await db.insert(usersTable).values([{ id1: 1, name: 'John' }, { id1: 2, name: 'Jane' }, { id1: 3, name: 'Jane' }]); const result = await db.select({ name: usersTable.name }).from(usersTable) .groupBy(usersTable.id1, sql`${usersTable.name}`); expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); }); test('select with group by complex query', async (ctx) => { const { db } = ctx.gel; await db.insert(usersTable).values([{ id1: 1, name: 'John' }, { id1: 2, name: 'Jane' }, { id1: 3, name: 'Jane' }]); const result = await db.select({ name: usersTable.name }).from(usersTable) .groupBy(usersTable.id1, sql`${usersTable.name}`) .orderBy(asc(usersTable.name)) .limit(1); expect(result).toEqual([{ name: 'Jane' }]); }); test('build query', async (ctx) => { const { db } = ctx.gel; const query = db.select({ id: usersTable.id1, name: usersTable.name }).from(usersTable) .groupBy(usersTable.id1, usersTable.name) .toSQL(); expect(query).toEqual({ sql: 'select "users_custom"."id1", "users_custom"."name" from "users_custom" group by "users_custom"."id1", "users_custom"."name"', params: [], }); }); test('insert sql', async (ctx) => { const { db } = ctx.gel; await db.insert(usersTable).values({ id1: 1, name: sql`${'John'}` }); const result = await db.select({ id: usersTable.id1, name: usersTable.name }).from(usersTable); expect(result).toEqual([{ id: 1, name: 'John' }]); }); test('partial join with alias', async (ctx) => { const { db } = ctx.gel; const customerAlias = alias(usersTable, 'customer'); await db.insert(usersTable).values([{ id1: 10, name: 'Ivan' }, { id1: 11, name: 'Hans' }]); const result = await db .select({ user: { id: usersTable.id1, name: usersTable.name, }, customer: { id: customerAlias.id1, name: customerAlias.name, }, }).from(usersTable) .leftJoin(customerAlias, eq(customerAlias.id1, 11)) .where(eq(usersTable.id1, 10)); expect(result).toEqual([{ user: { id: 10, name: 'Ivan' }, customer: { id: 11, name: 'Hans' }, }]); }); test('full join with alias', async (ctx) => { const { db } = ctx.gel; const gelTable = gelTableCreator((name) => `prefixed_${name}`); const users = gelTable('users_custom', { id1: customInteger('id1'), name: customText('name').notNull(), }); const customers = alias(users, 'customer'); await db.insert(users).values([{ id1: 10, name: 'Ivan' }, { id1: 11, name: 'Hans' }]); const result = await db .select().from(users) .leftJoin(customers, eq(customers.id1, 11)) .where(eq(users.id1, 10)); expect(result).toEqual([{ users_custom: { id1: 10, name: 'Ivan', }, customer: { id1: 11, name: 'Hans', }, }]); }); test('insert with spaces', async (ctx) => { const { db } = ctx.gel; await db.insert(usersTable).values({ id1: 1, name: sql`'Jo h n'` }); const result = await db.select({ id: usersTable.id1, name: usersTable.name }).from(usersTable); expect(result).toEqual([{ id: 1, name: 'Jo h n' }]); }); test('prepared statement', async (ctx) => { const { db } = ctx.gel; await db.insert(usersTable).values({ id1: 1, name: 'John' }); const statement = db.select({ id: usersTable.id1, name: usersTable.name, }).from(usersTable) .prepare('statement1'); const result = await statement.execute(); expect(result).toEqual([{ id: 1, name: 'John' }]); }); test('prepared statement reuse', async (ctx) => { const { db } = ctx.gel; const stmt = db.insert(usersTable).values({ id1: sql.placeholder('id1'), verified: true, name: sql.placeholder('name'), }).prepare('stmt2'); for (let i = 1; i < 11; i++) { await stmt.execute({ id1: i, name: `John ${i}` }); } const result = await db.select({ id1: usersTable.id1, name: usersTable.name, verified: usersTable.verified, }).from(usersTable); expect(result).toEqual([ { id1: 1, name: 'John 1', verified: true }, { id1: 2, name: 'John 2', verified: true }, { id1: 3, name: 'John 3', verified: true }, { id1: 4, name: 'John 4', verified: true }, { id1: 5, name: 'John 5', verified: true }, { id1: 6, name: 'John 6', verified: true }, { id1: 7, name: 'John 7', verified: true }, { id1: 8, name: 'John 8', verified: true }, { id1: 9, name: 'John 9', verified: true }, { id1: 10, name: 'John 10', verified: true }, ]); }); test('prepared statement with placeholder in .where', async (ctx) => { const { db } = ctx.gel; await db.insert(usersTable).values({ id1: 1, name: 'John' }); const stmt = db.select({ id: usersTable.id1, name: usersTable.name, }).from(usersTable) .where(eq(usersTable.id1, sql.placeholder('id'))) .prepare('stmt3'); const result = await stmt.execute({ id: 1 }); expect(result).toEqual([{ id: 1, name: 'John' }]); }); test('prepared statement with placeholder in .limit', async (ctx) => { const { db } = ctx.gel; await db.insert(usersTable).values({ id1: 1, name: 'John' }); const stmt = db .select({ id: usersTable.id1, name: usersTable.name, }) .from(usersTable) .where(eq(usersTable.id1, sql.placeholder('id'))) .limit(sql.placeholder('limit')) .prepare('stmt_limit'); const result = await stmt.execute({ id: 1, limit: 1 }); expect(result).toEqual([{ id: 1, name: 'John' }]); expect(result).toHaveLength(1); }); test('prepared statement with placeholder in .offset', async (ctx) => { const { db } = ctx.gel; await db.insert(usersTable).values([{ id1: 1, name: 'John' }, { id1: 2, name: 'John1' }]); const stmt = db .select({ id: usersTable.id1, name: usersTable.name, }) .from(usersTable) .offset(sql.placeholder('offset')) .prepare('stmt_offset'); const result = await stmt.execute({ offset: 1 }); expect(result).toEqual([{ id: 2, name: 'John1' }]); }); test('insert via db.execute + select via db.execute', async () => { await db.execute(sql`insert into ${usersTable} (${sql.identifier(usersTable.name.name)}) values (${'John'})`); const result = await db.execute<{ id1: number; name: string }>(sql`select id1, name from "users_custom"`); expect(result).toEqual([{ id1: null, name: 'John' }]); }); test('insert via db.execute + returning', async () => { const inserted = await db.execute<{ id: number; name: string }>( sql`insert into ${usersTable} (${ sql.identifier(usersTable.name.name) }) values (${'John'}) returning ${usersTable.id1}, ${usersTable.name}`, ); expect(inserted).toEqual([{ id1: null, name: 'John' }]); }); test('insert via db.execute w/ query builder', async () => { const inserted = await db.execute>( db.insert(usersTable).values({ id1: 1, name: 'John' }).returning({ id: usersTable.id1, name: usersTable.name }), ); expect(inserted).toEqual([{ id1: 1, name: 'John' }]); }); // TODO on conflict does not work // test.todo('build query insert with onConflict do update', async (ctx) => { // const { db } = ctx.gel; // const query = db.insert(usersTable) // .values({ id1: 1, name: 'John', jsonb: ['foo', 'bar'] }) // .onConflictDoUpdate({ target: usersTable.id1, set: { name: 'John1' } }) // .toSQL(); // expect(query).toEqual({ // sql: // 'insert into "users" ("id", "name", "verified", "jsonb", "created_at") values (default, $1, default, $2, default) on conflict ("id") do update set "name" = $3', // params: ['John', '["foo","bar"]', 'John1'], // }); // }); // // TODO on conflict does not work // test.todo('build query insert with onConflict do update / multiple columns', async (ctx) => { // const { db } = ctx.gel; // const query = db.insert(usersTable) // .values({ id1: 1, name: 'John', jsonb: ['foo', 'bar'] }) // .onConflictDoUpdate({ target: [usersTable.id1, usersTable.name], set: { name: 'John1' } }) // .toSQL(); // expect(query).toEqual({ // sql: // 'insert into "users" ("id", "name", "verified", "jsonb", "created_at") values (default, $1, default, $2, default) on conflict ("id","name") do update set "name" = $3', // params: ['John', '["foo","bar"]', 'John1'], // }); // }); // // TODO on conflict does not work // test.todo('build query insert with onConflict do nothing', async (ctx) => { // const { db } = ctx.gel; // const query = db.insert(usersTable) // .values({ id1: 1, name: 'John' }) // .onConflictDoNothing() // .toSQL(); // expect(query).toEqual({ // sql: 'insert into "users" ("id1", "name", "verified") values ($1, $2, default) on conflict do nothing', // params: [1, 'John'], // }); // }); // // TODO on conflict does not work // test.todo('build query insert with onConflict do nothing + target', async (ctx) => { // const { db } = ctx.gel; // const query = db.insert(usersTable) // .values({ id1: 1, name: 'John' }) // .onConflictDoNothing({ target: usersTable.id1 }) // .toSQL(); // expect(query).toEqual({ // sql: 'insert into "users" ("id1", "name", "verified") values ($1, $2, default) on conflict ("id1") do nothing', // params: [1, 'John'], // }); // }); // // TODO on conflict does not work // test.todo('insert with onConflict do update', async (ctx) => { // const { db } = ctx.gel; // await db.insert(usersTable) // .values({ id1: 1, name: 'John' }); // await db.insert(usersTable) // .values({ id1: 1, name: 'John' }) // .onConflictDoUpdate({ target: usersTable.id1, set: { name: 'John1' } }); // const res = await db.select({ id: usersTable.id1, name: usersTable.name }).from(usersTable).where( // eq(usersTable.id1, 1), // ); // expect(res).toEqual([{ id: 1, name: 'John1' }]); // }); // // TODO on conflict does not work // test.todo('insert with onConflict do nothing', async (ctx) => { // const { db } = ctx.gel; // await db.insert(usersTable) // .values({ id1: 1, name: 'John' }); // await db.insert(usersTable) // .values({ id1: 1, name: 'John' }) // .onConflictDoNothing(); // const res = await db.select({ id: usersTable.id1, name: usersTable.name }).from(usersTable).where( // eq(usersTable.id1, 1), // ); // expect(res).toEqual([{ id: 1, name: 'John' }]); // }); // // TODO on conflict does not work // test.todo('insert with onConflict do nothing + target', async (ctx) => { // const { db } = ctx.gel; // await db.insert(usersTable) // .values({ id1: 1, name: 'John' }); // await db.insert(usersTable) // .values({ id1: 1, name: 'John' }) // .onConflictDoNothing({ target: usersTable.id1 }); // const res = await db.select({ id: usersTable.id1, name: usersTable.name }).from(usersTable).where( // eq(usersTable.id1, 1), // ); // expect(res).toEqual([{ id: 1, name: 'John' }]); // }); ================================================ FILE: integration-tests/tests/gel/gel-ext.test.ts ================================================ import retry from 'async-retry'; import type Docker from 'dockerode'; import { relations, sql } from 'drizzle-orm'; import { drizzle, type GelJsDatabase } from 'drizzle-orm/gel'; import { foreignKey, gelSchema, gelTable, text, timestamptz, uniqueIndex, uuid } from 'drizzle-orm/gel-core'; import createClient, { type Client } from 'gel'; import { afterAll, afterEach, beforeAll, describe, expect, test } from 'vitest'; import 'zx/globals'; import { createDockerDB } from './createInstance'; $.quiet = true; const ENABLE_LOGGING = false; export const extauth = gelSchema('ext::auth'); export const identityInExtauth = extauth.table('Identity', { id: uuid().default(sql`uuid_generate_v4()`).primaryKey().notNull(), createdAt: timestamptz('created_at').default(sql`(clock_timestamp())`).notNull(), issuer: text().notNull(), modifiedAt: timestamptz('modified_at').notNull(), subject: text().notNull(), }, (table) => [ uniqueIndex('6bc2dd19-bce4-5810-bb1b-7007afe97a11;schemaconstr').using( 'btree', table.id.asc().nullsLast().op('uuid_ops'), ), ]); export const user = gelTable('User', { id: uuid().default(sql`uuid_generate_v4()`).primaryKey().notNull(), email: text().notNull(), identityId: uuid('identity_id').notNull(), username: text().notNull(), }, (table) => [ uniqueIndex('d504514c-26a7-11f0-b836-81aa188c0abe;schemaconstr').using( 'btree', table.id.asc().nullsLast().op('uuid_ops'), ), foreignKey({ columns: [table.identityId], foreignColumns: [identityInExtauth.id], name: 'User_fk_identity', }), ]); export const userRelations = relations(user, ({ one }) => ({ identity: one(identityInExtauth, { references: [identityInExtauth.id], fields: [user.identityId] }), })); const schema = { user, identityInExtauth, userRelations }; let client: Client; let db: GelJsDatabase; const tlsSecurity: string = 'insecure'; let dsn: string; let container: Docker.Container | undefined; function sleep(ms: number) { return new Promise((resolve) => setTimeout(resolve, ms)); } beforeAll(async () => { let connectionString; if (process.env['GEL_CONNECTION_STRING']) { connectionString = process.env['GEL_CONNECTION_STRING']; } else { const { connectionString: conStr, container: contrainerObj } = await createDockerDB(); connectionString = conStr; container = contrainerObj; } await sleep(15 * 1000); client = await retry(() => { client = createClient({ dsn: connectionString, tlsSecurity: 'insecure' }); return client; }, { retries: 20, factor: 1, minTimeout: 250, maxTimeout: 250, randomize: false, onRetry() { client?.close(); }, }); db = drizzle(client, { logger: ENABLE_LOGGING, schema: { user, identityInExtauth, userRelations } }); dsn = connectionString; }); afterAll(async () => { await client?.close().catch(console.error); await container?.stop().catch(console.error); }); describe('extensions tests group', async () => { beforeAll(async () => { await $`gel query 'CREATE EXTENSION pgcrypto VERSION "1.3"; CREATE EXTENSION auth VERSION "1.0"; CREATE TYPE default::User { CREATE REQUIRED LINK identity: ext::auth::Identity; CREATE REQUIRED PROPERTY email: std::str; CREATE REQUIRED PROPERTY username: std::str; }; CREATE GLOBAL default::current_user := (std::assert_single((SELECT default::User { id, username, email } FILTER (.identity = GLOBAL ext::auth::ClientTokenIdentity) )));' --tls-security=${tlsSecurity} --dsn=${dsn}`; }); afterEach(async () => { await $`gel query "DELETE default::User;" --tls-security=${tlsSecurity} --dsn=${dsn}`; }); test('check that you can query from ext::auth schema in gel', async () => { const [response] = await db.insert(identityInExtauth).values({ issuer: 'issuer', subject: 'subject', modifiedAt: new Date(), }).returning(); await db.insert(user).values({ identityId: response!.id, username: 'username', email: 'email' }); const userResponse = await db.select().from(user); const authResponse = await db.select().from(identityInExtauth); const relationsResponse = await db.query.user.findMany({ columns: { id: false, identityId: false, }, with: { identity: { columns: { id: false, modifiedAt: false, createdAt: false, }, }, }, }); expect(relationsResponse).toStrictEqual([{ email: 'email', identity: { issuer: 'issuer', subject: 'subject' }, username: 'username', }]); expect(userResponse.length).toBe(1); expect(authResponse.length).toBe(1); expect(userResponse[0]!.username).toBe('username'); expect(userResponse[0]!.email).toBe('email'); expect(authResponse[0]!.issuer).toBe('issuer'); expect(authResponse[0]!.subject).toBe('subject'); }); }); ================================================ FILE: integration-tests/tests/gel/gel.test.ts ================================================ import retry from 'async-retry'; import type Docker from 'dockerode'; import { and, arrayContained, arrayContains, arrayOverlaps, asc, avg, avgDistinct, count, countDistinct, eq, type Equal, exists, getTableColumns, gt, gte, ilike, inArray, is, like, lt, max, min, not, notInArray, or, SQL, sql, type SQLWrapper, sum, sumDistinct, } from 'drizzle-orm'; import { drizzle, type GelJsDatabase } from 'drizzle-orm/gel'; import type { GelColumn } from 'drizzle-orm/gel-core'; import { alias, boolean, dateDuration, decimal, duration, except, exceptAll, foreignKey, GelDialect, GelPolicy, gelPolicy, gelSchema, gelTable, gelTableCreator, getTableConfig, integer, intersect, intersectAll, json, localDate, localTime, primaryKey, relDuration, text, timestamp, timestamptz, union, unionAll, unique, uniqueKeyName, uuid as gelUuid, } from 'drizzle-orm/gel-core'; import createClient, { type Client, DateDuration, Duration, LocalDate, LocalDateTime, LocalTime, RelativeDuration, } from 'gel'; import { v4 as uuidV4 } from 'uuid'; import { afterAll, afterEach, beforeAll, beforeEach, describe, expect, expectTypeOf, test, vi } from 'vitest'; import { Expect } from '~/utils'; import 'zx/globals'; import { TestCache, TestGlobalCache } from './cache'; import { createDockerDB } from './createInstance'; $.quiet = true; const ENABLE_LOGGING = false; let client: Client; let db: GelJsDatabase; let dbGlobalCached: GelJsDatabase; let cachedDb: GelJsDatabase; const tlsSecurity: string = 'insecure'; let dsn: string; let container: Docker.Container | undefined; function sleep(ms: number) { return new Promise((resolve) => setTimeout(resolve, ms)); } declare module 'vitest' { interface TestContext { gel: { db: GelJsDatabase; }; cachedGel: { db: GelJsDatabase; dbGlobalCached: GelJsDatabase; }; } } const usersTable = gelTable('users', { id1: integer('id1').notNull(), name: text('name').notNull(), verified: boolean('verified').notNull().default(false), json: json('json').$type(), createdAt: timestamptz('created_at').notNull().defaultNow(), }); const postsTable = gelTable('posts', { id: integer().primaryKey(), description: text().notNull(), userId: integer('city_id').references(() => usersTable.id1), }); const usersOnUpdate = gelTable('users_on_update', { id1: integer('id1').notNull(), name: text('name').notNull(), updateCounter: integer('update_counter') .default(sql`1`) .$onUpdateFn(() => sql`update_counter + 1`), updatedAt: timestamptz('updated_at').$onUpdate(() => new Date()), alwaysNull: text('always_null') .$type() .$onUpdate(() => null), }); const citiesTable = gelTable('cities', { id1: integer('id1').notNull(), name: text('name').notNull(), state: text('state'), }); const cities2Table = gelTable('cities', { id1: integer('id1').notNull(), name: text('name').notNull(), }); const users2Table = gelTable('some_new_users', { id1: integer('id1').notNull(), name: text('name').notNull(), cityId: integer('cityId'), }); const users3Table = gelTable('users3', { id1: integer('id1'), name: text('name').notNull(), }); const coursesTable = gelTable('courses', { id1: integer('id1').notNull(), name: text('name').notNull(), categoryId: integer('categoryId').references(() => courseCategoriesTable.id1), }); const courseCategoriesTable = gelTable('course_categories', { id1: integer('id1').notNull(), name: text('name').notNull(), }); const orders = gelTable('orders', { id1: integer('id1'), region: text('region').notNull(), product: text('product') .notNull() .$default(() => 'random_string'), amount: integer('amount').notNull(), quantity: integer('quantity').notNull(), }); const salEmp = gelTable('sal_emp', { name: text('name'), payByQuarter: integer('pay_by_quarter').array(), }); const jsonTestTable = gelTable('jsontest', { id1: integer('id1').primaryKey(), json: json('json').$type<{ string: string; number: number }>(), }); // To test aggregate functions const aggregateTable = gelTable('aggregate_table', { id1: integer('id1').notNull(), name: text('name').notNull(), a: integer('a'), b: integer('b'), c: integer('c'), nullOnly: integer('nullOnly'), }); // To test another schema and multischema const mySchema = gelSchema('mySchema'); const usersMySchemaTable = mySchema.table('users', { id1: integer('id1').notNull(), name: text('name').notNull(), verified: boolean('verified').notNull().default(false), json: json('json').$type(), createdAt: timestamptz('created_at').notNull().defaultNow(), }); beforeAll(async () => { let connectionString; if (process.env['GEL_CONNECTION_STRING']) { connectionString = process.env['GEL_CONNECTION_STRING']; } else { const { connectionString: conStr, container: contrainerObj } = await createDockerDB(); connectionString = conStr; container = contrainerObj; } await sleep(15 * 1000); client = await retry(() => { client = createClient({ dsn: connectionString, tlsSecurity: 'insecure' }); return client; }, { retries: 20, factor: 1, minTimeout: 250, maxTimeout: 250, randomize: false, onRetry() { client?.close(); }, }); db = drizzle(client, { logger: ENABLE_LOGGING }); cachedDb = drizzle(client, { logger: ENABLE_LOGGING, cache: new TestCache(), }); dbGlobalCached = drizzle(client, { logger: ENABLE_LOGGING, cache: new TestGlobalCache(), }); dsn = connectionString; }); afterAll(async () => { await client?.close().catch(console.error); await container?.stop().catch(console.error); }); beforeEach((ctx) => { ctx.gel = { db, }; ctx.cachedGel = { db: cachedDb, dbGlobalCached, }; }); describe('some', async () => { beforeEach(async (ctx) => { await ctx.cachedGel.db.$cache?.invalidate({ tables: 'users' }); await ctx.cachedGel.dbGlobalCached.$cache?.invalidate({ tables: 'users' }); }); beforeAll(async () => { await $`gel query "CREATE TYPE default::users { create property id1: int16 { create constraint exclusive; }; create required property name: str; create required property verified: bool { SET default := false; }; create PROPERTY json: json; create required property created_at: datetime { SET default := datetime_of_statement(); }; };" --tls-security=${tlsSecurity} --dsn=${dsn}`; await $`gel query "CREATE TYPE default::users_with_cities { create property id1: int16 { create constraint exclusive; }; create required property name: str; create required property cityId: int32; };" --tls-security=${tlsSecurity} --dsn=${dsn}`; await $`gel query "CREATE TYPE default::users_with_undefined { create property id1: int16 { create constraint exclusive; }; create property name: str; };" --tls-security=${tlsSecurity} --dsn=${dsn}`; await $`gel query "CREATE TYPE default::users_insert_select { create property id1: int16 { create constraint exclusive; }; create property name: str; };" --tls-security=${tlsSecurity} --dsn=${dsn}`; await $`gel query "CREATE MODULE mySchema;" --tls-security=${tlsSecurity} --dsn=${dsn}`; await $`gel query "CREATE TYPE mySchema::users { create property id1: int16; create required property name: str; create required property verified: bool { SET default := false; }; create PROPERTY json: json; create required property created_at: datetime { SET default := datetime_of_statement(); }; };" --tls-security=${tlsSecurity} --dsn=${dsn}`; await $`gel query "CREATE TYPE default::orders { CREATE PROPERTY id1 -> int16; CREATE REQUIRED PROPERTY region -> str; CREATE REQUIRED PROPERTY product -> str; CREATE REQUIRED PROPERTY amount -> int64; CREATE REQUIRED PROPERTY quantity -> int64; }; " --tls-security=${tlsSecurity} --dsn=${dsn}`; await $`gel query "CREATE TYPE default::users_distinct { create required property id1 -> int16; create required property name -> str; create required property age -> int16; };" --tls-security=${tlsSecurity} --dsn=${dsn}`; await $`gel query "CREATE TYPE default::users3 { create property id1 -> int16; create required property name -> str; };" --tls-security=${tlsSecurity} --dsn=${dsn}`; await $`gel query "CREATE TYPE default::cities { create required property id1 -> int16; create required property name -> str; create property state -> str; };" --tls-security=${tlsSecurity} --dsn=${dsn}`; await $`gel query "CREATE TYPE default::courses { create required property id1 -> int16; create required property name -> str; create property categoryId -> int16; };" --tls-security=${tlsSecurity} --dsn=${dsn}`; await $`gel query "CREATE TYPE default::course_categories { create required property id1 -> int16; create required property name -> str; };" --tls-security=${tlsSecurity} --dsn=${dsn}`; await $`gel query "CREATE TYPE default::jsontest { create property id1 -> int16; create required property json -> json; };" --tls-security=${tlsSecurity} --dsn=${dsn}`; await $`gel query "CREATE TYPE default::sal_emp { create property name -> str; create property pay_by_quarter -> array; };" --tls-security=${tlsSecurity} --dsn=${dsn}`; await $`gel query "CREATE TYPE default::some_new_users { create required property id1 -> int16; create required property name -> str; create property cityId -> int32; };" --tls-security=${tlsSecurity} --dsn=${dsn}`; await $`gel query "CREATE TYPE default::aggregate_table { create property id1: int16; create required property name: str; create property a: int16; create property b: int16; create property c: int16; create PROPERTY nullOnly: int16; };" --tls-security=${tlsSecurity} --dsn=${dsn}`; await $`gel query "CREATE TYPE default::prefixed_users { CREATE PROPERTY id1 -> int16; CREATE REQUIRED PROPERTY name -> str; };" --tls-security=${tlsSecurity} --dsn=${dsn}`; await $`gel query "CREATE TYPE default::empty_insert_single { CREATE PROPERTY id1 -> int16; CREATE REQUIRED PROPERTY name -> str { SET default := 'Dan'; }; CREATE PROPERTY state -> str; };" --tls-security=${tlsSecurity} --dsn=${dsn}`; await $`gel query "CREATE TYPE default::empty_insert_multiple { CREATE PROPERTY id1 -> int16; CREATE REQUIRED PROPERTY name -> str { SET default := 'Dan'; }; CREATE PROPERTY state -> str; };" --tls-security=${tlsSecurity} --dsn=${dsn}`; await $`gel query "CREATE TYPE default::products { CREATE PROPERTY id1 -> int16; CREATE REQUIRED PROPERTY price -> decimal; CREATE REQUIRED PROPERTY cheap -> bool { SET default := false }; };" --tls-security=${tlsSecurity} --dsn=${dsn}`; await $`gel query "CREATE TYPE default::myprefix_test_prefixed_table_with_unique_name { create property id1 -> int16; create required property name -> str; };" --tls-security=${tlsSecurity} --dsn=${dsn}`; await $`gel query "CREATE TYPE default::metric_entry { create required property id1 -> uuid; create required property createdAt -> datetime; };" --tls-security=${tlsSecurity} --dsn=${dsn}`; await $`gel query "CREATE TYPE default::users_transactions { create required property id1 -> int16; create required property balance -> int16; };" --tls-security=${tlsSecurity} --dsn=${dsn}`; await $`gel query "CREATE TYPE default::products_transactions { create required property id1 -> int16; create required property price -> int16; create required property stock -> int16; };" --tls-security=${tlsSecurity} --dsn=${dsn}`; await $`gel query "CREATE TYPE default::users_transactions_rollback { create required property id1 -> int16; create required property balance -> int16; };" --tls-security=${tlsSecurity} --dsn=${dsn}`; await $`gel query "CREATE TYPE default::users_nested_transactions { create required property id1 -> int16; create required property balance -> int16; };" --tls-security=${tlsSecurity} --dsn=${dsn}`; await $`gel query "CREATE TYPE default::internal_staff { create required property userId -> int16; };" --tls-security=${tlsSecurity} --dsn=${dsn}`; await $`gel query "CREATE TYPE default::custom_user { create required property id1 -> int16; };" --tls-security=${tlsSecurity} --dsn=${dsn}`; await $`gel query "CREATE TYPE default::ticket { create required property staffId -> int16; };" --tls-security=${tlsSecurity} --dsn=${dsn}`; await $`gel query "CREATE TYPE default::posts { create required property id1 -> int16; create property tags -> array; };" --tls-security=${tlsSecurity} --dsn=${dsn}`; await $`gel query "CREATE TYPE dates_column { create property datetimeColumn -> datetime; create property local_datetimeColumn -> cal::local_datetime; create property local_dateColumn -> cal::local_date; create property local_timeColumn -> cal::local_time; create property durationColumn -> duration; create property relative_durationColumn -> cal::relative_duration; create property dateDurationColumn -> cal::date_duration; };" --tls-security=${tlsSecurity} --dsn=${dsn}`; await $`gel query "CREATE TYPE users_with_insert { create required property username -> str; create required property admin -> bool; };" --tls-security=${tlsSecurity} --dsn=${dsn}`; await $`gel query "CREATE TYPE users_test_with_and_without_timezone { create required property username -> str; create required property admin -> bool; };" --tls-security=${tlsSecurity} --dsn=${dsn}`; await $`gel query "CREATE TYPE default::arrays_tests { create property id1: int16 { create constraint exclusive; }; create property tags: array; create required property numbers: array; };" --tls-security=${tlsSecurity} --dsn=${dsn}`; await $`gel query "CREATE TYPE default::users_on_update { create required property id1 -> int16; create required property name -> str; create property update_counter -> int16 { SET default := 1 }; create property always_null -> str; create property updated_at -> datetime; };" --tls-security=${tlsSecurity} --dsn=${dsn}`; await $`gel query "CREATE TYPE default::json_table { create PROPERTY json: json; };" --tls-security=${tlsSecurity} --dsn=${dsn}`; await $`gel query "CREATE TYPE default::notifications { create required property id1 -> int16; create required property sentAt: datetime { SET default := datetime_of_statement(); }; create property message -> str; };" --tls-security=${tlsSecurity} --dsn=${dsn}`; await $`gel query "CREATE TYPE default::user_notifications { create required property userId -> int16; create required property notificationId -> int16; create property categoryId -> int16; };" --tls-security=${tlsSecurity} --dsn=${dsn}`; await $`gel query "CREATE TYPE default::users1 { create required property id1: int16; create required property name: str; };" --tls-security=${tlsSecurity} --dsn=${dsn}`; await $`gel query "CREATE TYPE default::users2 { create required property id1: int16; create required property name: str; };" --tls-security=${tlsSecurity} --dsn=${dsn}`; await $`gel query "CREATE TYPE default::count_test { create required property id1: int16; create required property name: str; };" --tls-security=${tlsSecurity} --dsn=${dsn}`; await $`gel query "CREATE TYPE default::users_with_names { create required property id1: int16; create required property firstName: str; create required property lastName: str; create required property admin: bool; };" --tls-security=${tlsSecurity} --dsn=${dsn}`; await $`gel query "CREATE TYPE default::users_with_age { create required property id1: int16; create required property name: str; create required property age: int32; create required property city: str; };" --tls-security=${tlsSecurity} --dsn=${dsn}`; await $`gel query "CREATE TYPE default::users_on_update_sql { create required property id1: int16; create required property name: str; create required property updated_at: datetime; };" --tls-security=${tlsSecurity} --dsn=${dsn}`; }); afterEach(async () => { await $`gel query "DELETE default::users;" --tls-security=${tlsSecurity} --dsn=${dsn}`; await $`gel query "DELETE default::prefixed_users;" --tls-security=${tlsSecurity} --dsn=${dsn}`; await $`gel query "DELETE default::some_new_users;" --tls-security=${tlsSecurity} --dsn=${dsn}`; await $`gel query "DELETE default::orders;" --tls-security=${tlsSecurity} --dsn=${dsn}`; await $`gel query "DELETE default::cities;" --tls-security=${tlsSecurity} --dsn=${dsn}`; await $`gel query "DELETE default::users_on_update;" --tls-security=${tlsSecurity} --dsn=${dsn}`; await $`gel query "DELETE default::aggregate_table;" --tls-security=${tlsSecurity} --dsn=${dsn}`; await $`gel query "DELETE mySchema::users;" --tls-security=${tlsSecurity} --dsn=${dsn}`; await $`gel query "DELETE default::count_test;" --tls-security=${tlsSecurity} --dsn=${dsn}`; await $`gel query "DELETE default::users1;" --tls-security=${tlsSecurity} --dsn=${dsn}`; await $`gel query "DELETE default::users2;" --tls-security=${tlsSecurity} --dsn=${dsn}`; await $`gel query "DELETE default::jsontest;" --tls-security=${tlsSecurity} --dsn=${dsn}`; await $`gel query "DELETE default::users_on_update_sql;" --tls-security=${tlsSecurity} --dsn=${dsn}`; }); afterAll(async () => { await $`gel query "DROP TYPE default::users" --tls-security=${tlsSecurity} --dsn=${dsn}`; await $`gel query "DROP TYPE default::users_with_cities" --tls-security=${tlsSecurity} --dsn=${dsn}`; await $`gel query "DROP TYPE default::users_with_undefined " --tls-security=${tlsSecurity} --dsn=${dsn}`; await $`gel query "DROP TYPE default::users_insert_select" --tls-security=${tlsSecurity} --dsn=${dsn}`; await $`gel query "DROP TYPE mySchema::users" --tls-security=${tlsSecurity} --dsn=${dsn}`; await $`gel query "DROP TYPE default::orders" --tls-security=${tlsSecurity} --dsn=${dsn}`; await $`gel query "DROP TYPE default::users_distinct" --tls-security=${tlsSecurity} --dsn=${dsn}`; await $`gel query "DROP TYPE default::users3" --tls-security=${tlsSecurity} --dsn=${dsn}`; await $`gel query "DROP TYPE default::cities" --tls-security=${tlsSecurity} --dsn=${dsn}`; await $`gel query "DROP TYPE default::courses" --tls-security=${tlsSecurity} --dsn=${dsn}`; await $`gel query "DROP TYPE default::course_categories" --tls-security=${tlsSecurity} --dsn=${dsn}`; await $`gel query "DROP TYPE default::jsontest" --tls-security=${tlsSecurity} --dsn=${dsn}`; await $`gel query "DROP TYPE default::sal_emp" --tls-security=${tlsSecurity} --dsn=${dsn}`; await $`gel query "DROP TYPE default::some_new_users" --tls-security=${tlsSecurity} --dsn=${dsn}`; await $`gel query "DROP TYPE default::aggregate_table" --tls-security=${tlsSecurity} --dsn=${dsn}`; await $`gel query "DROP TYPE default::prefixed_users" --tls-security=${tlsSecurity} --dsn=${dsn}`; await $`gel query "DROP TYPE default::empty_insert_single" --tls-security=${tlsSecurity} --dsn=${dsn}`; await $`gel query "DROP TYPE default::empty_insert_multiple" --tls-security=${tlsSecurity} --dsn=${dsn}`; await $`gel query "DROP TYPE default::products" --tls-security=${tlsSecurity} --dsn=${dsn}`; await $`gel query "DROP TYPE default::myprefix_test_prefixed_table_with_unique_name" --tls-security=${tlsSecurity} --dsn=${dsn}`; await $`gel query "DROP TYPE default::metric_entry" --tls-security=${tlsSecurity} --dsn=${dsn}`; await $`gel query "DROP TYPE default::users_transactions" --tls-security=${tlsSecurity} --dsn=${dsn}`; await $`gel query "DROP TYPE default::products_transactions" --tls-security=${tlsSecurity} --dsn=${dsn}`; await $`gel query "DROP TYPE default::users_transactions_rollback" --tls-security=${tlsSecurity} --dsn=${dsn}`; await $`gel query "DROP TYPE default::users_nested_transactions" --tls-security=${tlsSecurity} --dsn=${dsn}`; await $`gel query "DROP TYPE default::internal_staff" --tls-security=${tlsSecurity} --dsn=${dsn}`; await $`gel query "DROP TYPE default::custom_user" --tls-security=${tlsSecurity} --dsn=${dsn}`; await $`gel query "DROP TYPE default::ticket" --tls-security=${tlsSecurity} --dsn=${dsn}`; await $`gel query "DROP TYPE default::posts" --tls-security=${tlsSecurity} --dsn=${dsn}`; await $`gel query "DROP TYPE dates_column" --tls-security=${tlsSecurity} --dsn=${dsn}`; await $`gel query "DROP TYPE users_with_insert" --tls-security=${tlsSecurity} --dsn=${dsn}`; await $`gel query "DROP TYPE users_test_with_and_without_timezone" --tls-security=${tlsSecurity} --dsn=${dsn}`; await $`gel query "DROP TYPE default::arrays_tests" --tls-security=${tlsSecurity} --dsn=${dsn}`; await $`gel query "DROP TYPE default::users_on_update" --tls-security=${tlsSecurity} --dsn=${dsn}`; await $`gel query "DROP TYPE default::json_table" --tls-security=${tlsSecurity} --dsn=${dsn}`; await $`gel query "DROP TYPE default::notifications" --tls-security=${tlsSecurity} --dsn=${dsn}`; await $`gel query "DROP TYPE default::user_notifications" --tls-security=${tlsSecurity} --dsn=${dsn}`; await $`gel query "DROP TYPE default::users1" --tls-security=${tlsSecurity} --dsn=${dsn}`; await $`gel query "DROP TYPE default::users2" --tls-security=${tlsSecurity} --dsn=${dsn}`; await $`gel query "DROP TYPE default::count_test" --tls-security=${tlsSecurity} --dsn=${dsn}`; await $`gel query "DROP TYPE default::users_with_names" --tls-security=${tlsSecurity} --dsn=${dsn}`; await $`gel query "DROP MODULE mySchema;" --tls-security=${tlsSecurity} --dsn=${dsn}`; await $`gel query "DROP TYPE users_with_age;" --tls-security=${tlsSecurity} --dsn=${dsn}`; await $`gel query "DROP TYPE default::users_on_update_sql;" --tls-security=${tlsSecurity} --dsn=${dsn}`; }); async function setupSetOperationTest(db: GelJsDatabase) { await db.insert(cities2Table).values([ { id1: 1, name: 'New York' }, { id1: 2, name: 'London' }, { id1: 3, name: 'Tampa' }, ]); await db.insert(users2Table).values([ { id1: 1, name: 'John', cityId: 1 }, { id1: 2, name: 'Jane', cityId: 2 }, { id1: 3, name: 'Jack', cityId: 3 }, { id1: 4, name: 'Peter', cityId: 3 }, { id1: 5, name: 'Ben', cityId: 2 }, { id1: 6, name: 'Jill', cityId: 1 }, { id1: 7, name: 'Mary', cityId: 2 }, { id1: 8, name: 'Sally', cityId: 1 }, ]); } async function setupAggregateFunctionsTest(db: GelJsDatabase) { await db.insert(aggregateTable).values([ { id1: 1, name: 'value 1', a: 5, b: 10, c: 20 }, { id1: 2, name: 'value 1', a: 5, b: 20, c: 30 }, { id1: 3, name: 'value 2', a: 10, b: 50, c: 60 }, { id1: 4, name: 'value 3', a: 20, b: 20, c: null }, { id1: 5, name: 'value 4', a: null, b: 90, c: 120 }, { id1: 6, name: 'value 5', a: 80, b: 10, c: null }, { id1: 7, name: 'value 6', a: null, b: null, c: 150 }, ]); } test('table configs: unique third param', async () => { const cities1Table = gelTable( 'cities1', { id: integer('id').primaryKey(), name: text('name').notNull(), state: text('state'), }, (t) => ({ f: unique('custom_name').on(t.name, t.state).nullsNotDistinct(), f1: unique('custom_name1').on(t.name, t.state), }), ); const tableConfig = getTableConfig(cities1Table); expect(tableConfig.uniqueConstraints).toHaveLength(2); expect(tableConfig.uniqueConstraints[0]?.name).toBe('custom_name'); expect(tableConfig.uniqueConstraints[0]?.nullsNotDistinct).toBe(true); expect(tableConfig.uniqueConstraints[0]?.columns.map((t) => t.name)).toEqual(['name', 'state']); expect(tableConfig.uniqueConstraints[1]?.name).toBe('custom_name1'); expect(tableConfig.uniqueConstraints[1]?.nullsNotDistinct).toBe(false); expect(tableConfig.uniqueConstraints[1]?.columns.map((t) => t.name)).toEqual(['name', 'state']); }); test('table configs: unique in column', async () => { const cities1Table = gelTable('cities1', { id: integer('id').primaryKey(), name: text('name').notNull().unique(), state: text('state').unique('custom'), field: text('field').unique('custom_field', { nulls: 'not distinct' }), }); const tableConfig = getTableConfig(cities1Table); const columnName = tableConfig.columns.find((it) => it.name === 'name'); expect(columnName?.uniqueName).toBe(uniqueKeyName(cities1Table, [columnName!.name])); expect(columnName?.isUnique).toBe(true); const columnState = tableConfig.columns.find((it) => it.name === 'state'); expect(columnState?.uniqueName).toBe('custom'); expect(columnState?.isUnique).toBe(true); const columnField = tableConfig.columns.find((it) => it.name === 'field'); expect(columnField?.uniqueName).toBe('custom_field'); expect(columnField?.isUnique).toBe(true); expect(columnField?.uniqueType).toBe('not distinct'); }); test('table config: foreign keys name', async () => { const table = gelTable( 'cities', { id1: integer('id1').primaryKey(), name: text('name').notNull(), state: text('state'), }, (t) => ({ f: foreignKey({ foreignColumns: [t.id1], columns: [t.id1], name: 'custom_fk' }), }), ); const tableConfig = getTableConfig(table); expect(tableConfig.foreignKeys).toHaveLength(1); expect(tableConfig.foreignKeys[0]!.getName()).toBe('custom_fk'); }); test('table config: primary keys name', async () => { const table = gelTable( 'cities', { id: integer('id').primaryKey(), name: text('name').notNull(), state: text('state'), }, (t) => ({ f: primaryKey({ columns: [t.id, t.name], name: 'custom_pk' }), }), ); const tableConfig = getTableConfig(table); expect(tableConfig.primaryKeys).toHaveLength(1); expect(tableConfig.primaryKeys[0]!.getName()).toBe('custom_pk'); }); test('select all fields', async (ctx) => { const { db } = ctx.gel; const now = Date.now(); await db.insert(usersTable).values({ id1: 1, name: 'John' }); const result = await db.select().from(usersTable); expect(result[0]!.createdAt).toBeInstanceOf(Date); // TODO 100 ms expect(Math.abs(result[0]!.createdAt.getTime() - now)).toBeLessThan(500); expect(result.map((it) => ({ ...it, id: undefined }))).toEqual([ { id: undefined, id1: 1, name: 'John', verified: false, json: null, createdAt: result[0]!.createdAt, }, ]); }); test('select sql', async (ctx) => { const { db } = ctx.gel; await db.insert(usersTable).values({ id1: 1, name: 'John' }); const users = await db .select({ name: sql`upper(${usersTable.name})`, }) .from(usersTable); expect(users.map((it) => ({ ...it, id: undefined }))).toStrictEqual([{ id: undefined, name: 'JOHN' }]); }); test('select typed sql', async (ctx) => { const { db } = ctx.gel; await db.insert(usersTable).values({ id1: 1, name: 'John' }); const users = await db .select({ name: sql`upper(${usersTable.name})`, }) .from(usersTable); expect(users.map((it) => ({ ...it, id: undefined }))).toEqual([{ name: 'JOHN' }]); }); test('select with empty array in inArray', async (ctx) => { const { db } = ctx.gel; await db.insert(usersTable).values([ { id1: 1, name: 'John' }, { id1: 2, name: 'Jane' }, { id1: 3, name: 'Jane', }, ]); const result = await db .select({ name: sql`upper(${usersTable.name})`, }) .from(usersTable) .where(inArray(usersTable.id1, [])); expect(result.map((it) => ({ ...it, id: undefined }))).toEqual([]); }); test('select with empty array in notInArray', async (ctx) => { const { db } = ctx.gel; await db.insert(usersTable).values([ { id1: 1, name: 'John' }, { id1: 2, name: 'Jane' }, { id1: 3, name: 'Jane', }, ]); const result = await db .select({ name: sql`upper(${usersTable.name})`, }) .from(usersTable) .where(notInArray(usersTable.id1, [])); expect(result.map((it) => ({ ...it, id: undefined }))).toEqual([ { name: 'JOHN' }, { name: 'JANE' }, { name: 'JANE', }, ]); }); test('$default function', async (ctx) => { const { db } = ctx.gel; const insertedOrder = await db.insert(orders).values({ id1: 1, region: 'Ukraine', amount: 1, quantity: 1 }) .returning(); const selectedOrder = await db.select().from(orders); expect(insertedOrder.map((it) => ({ ...it, id: undefined }))).toEqual([ { id: undefined, amount: 1, id1: 1, quantity: 1, region: 'Ukraine', product: 'random_string', }, ]); expect(selectedOrder.map((it) => ({ ...it, id: undefined }))).toEqual([ { id: undefined, id1: 1, amount: 1, quantity: 1, region: 'Ukraine', product: 'random_string', }, ]); }); test('select distinct', async (ctx) => { const { db } = ctx.gel; const usersDistinctTable = gelTable('users_distinct', { id1: integer('id1').notNull(), name: text('name').notNull(), age: integer('age').notNull(), }); await db.insert(usersDistinctTable).values([ { id1: 1, name: 'John', age: 24 }, { id1: 1, name: 'John', age: 24 }, { id1: 2, name: 'John', age: 25 }, { id1: 1, name: 'Jane', age: 24 }, { id1: 1, name: 'Jane', age: 26 }, ]); const users1 = await db.selectDistinct().from(usersDistinctTable).orderBy( usersDistinctTable.id1, usersDistinctTable.name, ); const users2 = await db.selectDistinctOn([usersDistinctTable.id1]).from(usersDistinctTable).orderBy( usersDistinctTable.id1, ); const users3 = await db.selectDistinctOn([usersDistinctTable.name], { name: usersDistinctTable.name }).from( usersDistinctTable, ).orderBy(usersDistinctTable.name); const users4 = await db.selectDistinctOn([usersDistinctTable.id1, usersDistinctTable.age]).from( usersDistinctTable, ).orderBy(usersDistinctTable.id1, usersDistinctTable.age); expect(users1).toEqual([ { id1: 1, name: 'Jane', age: 24 }, { id1: 1, name: 'Jane', age: 26 }, { id1: 1, name: 'John', age: 24 }, { id1: 2, name: 'John', age: 25 }, ]); expect(users2).toHaveLength(2); expect(users2[0]?.id1).toBe(1); expect(users2[1]?.id1).toBe(2); expect(users3).toHaveLength(2); expect(users3[0]?.name).toBe('Jane'); expect(users3[1]?.name).toBe('John'); expect(users4).toEqual([ { id1: 1, name: 'John', age: 24 }, { id1: 1, name: 'Jane', age: 26 }, { id1: 2, name: 'John', age: 25 }, ]); }); test('insert returning sql', async (ctx) => { const { db } = ctx.gel; const users = await db .insert(usersTable) .values({ id1: 1, name: 'John' }) .returning({ name: sql`upper(${usersTable.name})`, }); expect(users).toEqual([{ name: 'JOHN' }]); }); test('delete returning sql', async (ctx) => { const { db } = ctx.gel; await db.insert(usersTable).values({ id1: 1, name: 'John' }); const users = await db .delete(usersTable) .where(eq(usersTable.name, 'John')) .returning({ name: sql`upper(${usersTable.name})`, }); expect(users).toEqual([{ name: 'JOHN' }]); }); test('update returning sql', async (ctx) => { const { db } = ctx.gel; await db.insert(usersTable).values({ id1: 1, name: 'John' }); const users = await db .update(usersTable) .set({ name: 'Jane' }) .where(eq(usersTable.name, 'John')) .returning({ name: sql`upper(${usersTable.name})`, }); expect(users).toEqual([{ name: 'JANE' }]); }); test('update with returning all fields', async (ctx) => { const { db } = ctx.gel; const now = Date.now(); await db.insert(usersTable).values({ id1: 1, name: 'John' }); const users = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')).returning(); expect(users[0]!.createdAt).toBeInstanceOf(Date); expect(Math.abs(users[0]!.createdAt.getTime() - now)).toBeLessThan(500); expect(users).toEqual([ { id1: 1, name: 'Jane', verified: false, json: null, createdAt: users[0]!.createdAt, }, ]); }); test('update with returning partial', async (ctx) => { const { db } = ctx.gel; await db.insert(usersTable).values({ id1: 1, name: 'John' }); const users = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')).returning({ id1: usersTable.id1, name: usersTable.name, }); expect(users).toEqual([{ id1: 1, name: 'Jane' }]); }); test('delete with returning all fields', async (ctx) => { const { db } = ctx.gel; const now = Date.now(); await db.insert(usersTable).values({ id1: 1, name: 'John' }); const users = await db.delete(usersTable).where(eq(usersTable.name, 'John')).returning(); expect(users[0]!.createdAt).toBeInstanceOf(Date); expect(Math.abs(users[0]!.createdAt.getTime() - now)).toBeLessThan(500); expect(users.map((it) => ({ ...it, id: undefined }))).toEqual([ { name: 'John', id1: 1, id: undefined, verified: false, json: null, createdAt: users[0]!.createdAt, }, ]); }); test('delete with returning partial', async (ctx) => { const { db } = ctx.gel; await db.insert(usersTable).values({ id1: 1, name: 'John' }); const users = await db.delete(usersTable).where(eq(usersTable.name, 'John')).returning({ id1: usersTable.id1, name: usersTable.name, }); expect(users.map((it) => ({ ...it, id: undefined }))).toEqual([{ id1: 1, name: 'John' }]); }); test('insert + select', async (ctx) => { const { db } = ctx.gel; await db.insert(usersTable).values({ id1: 1, name: 'John' }); const result = await db.select().from(usersTable); expect(result).toEqual([ { name: 'John', id1: 1, verified: false, json: null, createdAt: result[0]!.createdAt, }, ]); await db.insert(usersTable).values({ id1: 2, name: 'Jane' }); const result2 = await db.select().from(usersTable); expect(result2).toEqual([ { id1: 1, name: 'John', verified: false, json: null, createdAt: result2[0]!.createdAt }, { id1: 2, name: 'Jane', verified: false, json: null, createdAt: result2[1]!.createdAt }, ]); }); test('json insert', async (ctx) => { const { db } = ctx.gel; await db.insert(usersTable).values({ id1: 1, name: 'John', json: ['foo', 'bar'] }); const result = await db .select({ id1: usersTable.id1, name: usersTable.name, json: usersTable.json, }) .from(usersTable); expect(result).toEqual([ { id1: 1, name: 'John', json: ['foo', 'bar'], }, ]); }); test('insert with overridden default values', async (ctx) => { const { db } = ctx.gel; await db.insert(usersTable).values({ id1: 1, name: 'John', verified: true }); const result = await db.select().from(usersTable); expect(result).toEqual([ { id1: 1, name: 'John', verified: true, json: null, createdAt: result[0]!.createdAt, }, ]); }); test('insert many', async (ctx) => { const { db } = ctx.gel; await db.insert(usersTable).values([ { id1: 1, name: 'John' }, { id1: 2, name: 'Bruce', json: ['foo', 'bar'], verified: true }, { id1: 3, name: 'Jane' }, { id1: 4, name: 'Austin', verified: true }, ]); const result = await db .select({ name: usersTable.name, json: usersTable.json, verified: usersTable.verified, }) .from(usersTable); expect(result).toEqual([ { name: 'John', json: null, verified: false }, { name: 'Bruce', json: ['foo', 'bar'], verified: true }, { name: 'Jane', json: null, verified: false }, { name: 'Austin', json: null, verified: true }, ]); }); test('insert many with returning', async (ctx) => { const { db } = ctx.gel; const result = await db .insert(usersTable) .values([ { id1: 1, name: 'John' }, { id1: 2, name: 'Bruce', json: ['foo', 'bar'] }, { id1: 3, name: 'Jane' }, { id1: 4, name: 'Austin', verified: true, }, ]) .returning({ name: usersTable.name, json: usersTable.json, verified: usersTable.verified, }); expect(result).toEqual([ { name: 'John', json: null, verified: false }, { name: 'Bruce', json: ['foo', 'bar'], verified: false }, { name: 'Jane', json: null, verified: false }, { name: 'Austin', json: null, verified: true }, ]); }); test('select with group by as field', async (ctx) => { const { db } = ctx.gel; await db.insert(usersTable).values([ { id1: 1, name: 'John' }, { id1: 2, name: 'Jane' }, { id1: 3, name: 'Jane', }, ]); const result = await db.select({ name: usersTable.name }).from(usersTable).groupBy(usersTable.name); expect(result).toEqual([{ name: 'Jane' }, { name: 'John' }]); }); test('select with exists', async (ctx) => { const { db } = ctx.gel; await db.insert(usersTable).values([ { id1: 1, name: 'John' }, { id1: 2, name: 'Jane' }, { id1: 3, name: 'Jane', }, ]); const user = alias(usersTable, 'user'); const result = await db .select({ name: usersTable.name }) .from(usersTable) .where( exists( db .select({ one: sql`1` }) .from(user) .where(and(eq(usersTable.name, 'John'), eq(user.id1, usersTable.id1))), ), ); expect(result).toEqual([{ name: 'John' }]); }); test('select with group by as sql', async (ctx) => { const { db } = ctx.gel; await db.insert(usersTable).values([ { id1: 1, name: 'John' }, { id1: 2, name: 'Jane' }, { id1: 3, name: 'Jane', }, ]); const result = await db .select({ name: usersTable.name }) .from(usersTable) .groupBy(sql`${usersTable.name}`); expect(result).toEqual([{ name: 'Jane' }, { name: 'John' }]); }); test.skip('select with group by as sql + column', async (ctx) => { const { db } = ctx.gel; await db.insert(usersTable).values([ { id1: 1, name: 'John' }, { id1: 2, name: 'Jane' }, { id1: 3, name: 'Jane', }, ]); const result = await db .select({ name: usersTable.name }) .from(usersTable) .groupBy(sql`${usersTable.name}`, usersTable.id1); expect(result).toEqual([{ name: 'Jane' }, { name: 'John' }, { name: 'Jane' }]); }); test('select with group by as column + sql', async (ctx) => { const { db } = ctx.gel; await db.insert(usersTable).values([ { id1: 1, name: 'John' }, { id1: 2, name: 'Jane' }, { id1: 3, name: 'Jane', }, ]); const result = await db .select({ name: usersTable.name }) .from(usersTable) .groupBy(usersTable.id1, sql`${usersTable.name}`); expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); }); test('select with group by complex query', async (ctx) => { const { db } = ctx.gel; await db.insert(usersTable).values([ { id1: 1, name: 'John' }, { id1: 2, name: 'Jane' }, { id1: 3, name: 'Jane', }, ]); const result = await db .select({ name: usersTable.name }) .from(usersTable) .groupBy(usersTable.id1, sql`${usersTable.name}`) .orderBy(asc(usersTable.name)) .limit(1); expect(result).toEqual([{ name: 'Jane' }]); }); test('build query', async (ctx) => { const { db } = ctx.gel; const query = db.select({ id: usersTable.id1, name: usersTable.name }).from(usersTable).groupBy( usersTable.id1, usersTable.name, ).toSQL(); expect(query).toEqual({ sql: 'select "users"."id1", "users"."name" from "users" group by "users"."id1", "users"."name"', params: [], }); }); test('insert sql', async (ctx) => { const { db } = ctx.gel; await db.insert(usersTable).values({ id1: 1, name: sql`${'John'}` }); const result = await db.select({ name: usersTable.name }).from(usersTable); expect(result).toEqual([{ name: 'John' }]); }); test('partial join with alias', async (ctx) => { const { db } = ctx.gel; const customerAlias = alias(usersTable, 'customer'); await db.insert(usersTable).values([ { id1: 10, name: 'Ivan' }, { id1: 11, name: 'Hans' }, ]); const result = await db .select({ user: { id1: usersTable.id1, name: usersTable.name, }, customer: { id1: customerAlias.id1, name: customerAlias.name, }, }) .from(usersTable) .leftJoin(customerAlias, eq(customerAlias.id1, 11)) .where(eq(usersTable.id1, 10)); expect(result).toEqual([ { user: { id1: 10, name: 'Ivan' }, customer: { id1: 11, name: 'Hans' }, }, ]); }); test('full join with alias', async (ctx) => { const { db } = ctx.gel; const gelTable = gelTableCreator((name) => `prefixed_${name}`); const users = gelTable('users', { id1: integer('id1').primaryKey(), name: text('name').notNull(), }); const customers = alias(users, 'customer'); await db.insert(users).values([ { id1: 10, name: 'Ivan' }, { id1: 11, name: 'Hans' }, ]); const result = await db.select().from(users).leftJoin(customers, eq(customers.id1, 11)).where( eq(users.id1, 10), ); expect(result).toEqual([ { users: { id1: 10, name: 'Ivan', }, customer: { id1: 11, name: 'Hans', }, }, ]); }); test('select from alias', async (ctx) => { const { db } = ctx.gel; const gelTable = gelTableCreator((name) => `prefixed_${name}`); const users = gelTable('users', { id1: integer('id1'), name: text('name').notNull(), }); const user = alias(users, 'user'); const customers = alias(users, 'customer'); await db.insert(users).values([ { id1: 10, name: 'Ivan' }, { id1: 11, name: 'Hans' }, ]); const result = await db.select().from(user).leftJoin(customers, eq(customers.id1, 11)).where(eq(user.id1, 10)); expect(result).toEqual([ { user: { id1: 10, name: 'Ivan', }, customer: { id1: 11, name: 'Hans', }, }, ]); }); test('insert with spaces', async (ctx) => { const { db } = ctx.gel; await db.insert(usersTable).values({ id1: 1, name: sql`'Jo h n'` }); const result = await db.select({ id1: usersTable.id1, name: usersTable.name }).from(usersTable); expect(result).toEqual([{ id1: 1, name: 'Jo h n' }]); }); test('prepared statement', async (ctx) => { const { db } = ctx.gel; await db.insert(usersTable).values({ id1: 1, name: 'John' }); const statement = db .select({ name: usersTable.name, }) .from(usersTable) .prepare('statement1'); const result = await statement.execute(); expect(result).toEqual([{ name: 'John' }]); }); test('insert: placeholders on columns with encoder', async (ctx) => { const { db } = ctx.gel; const statement = db .insert(usersTable) .values({ id1: 1, name: 'John', json: sql.placeholder('json'), }) .prepare('encoder_statement'); await statement.execute({ json: ['foo', 'bar'] }); const result = await db .select({ json: usersTable.json, }) .from(usersTable); expect(result).toEqual([{ json: ['foo', 'bar'] }]); }); test('prepared statement reuse', async (ctx) => { const { db } = ctx.gel; const stmt = db .insert(usersTable) .values({ id1: sql.placeholder('id1'), verified: true, name: sql.placeholder('name'), }) .prepare('stmt2'); for (let i = 1; i < 11; i++) { await stmt.execute({ id1: i, name: `John ${i}` }); } const result = await db .select({ name: usersTable.name, verified: usersTable.verified, }) .from(usersTable); expect(result).toEqual([ { name: 'John 1', verified: true }, { name: 'John 2', verified: true }, { name: 'John 3', verified: true }, { name: 'John 4', verified: true }, { name: 'John 5', verified: true }, { name: 'John 6', verified: true }, { name: 'John 7', verified: true }, { name: 'John 8', verified: true }, { name: 'John 9', verified: true }, { name: 'John 10', verified: true }, ]); }); test('prepared statement with placeholder in .where', async (ctx) => { const { db } = ctx.gel; await db.insert(usersTable).values({ id1: 1, name: 'John' }); const stmt = db .select({ id1: usersTable.id1, name: usersTable.name, }) .from(usersTable) .where(eq(usersTable.id1, sql.placeholder('id1'))) .prepare('stmt3'); const result = await stmt.execute({ id1: 1 }); expect(result).toEqual([{ id1: 1, name: 'John' }]); }); test('prepared statement with placeholder in .limit', async (ctx) => { const { db } = ctx.gel; await db.insert(usersTable).values({ id1: 1, name: 'John' }); const stmt = db .select({ id1: usersTable.id1, name: usersTable.name, }) .from(usersTable) .where(eq(usersTable.id1, sql.placeholder('id1'))) .limit(sql.placeholder('limit')) .prepare('stmt_limit'); const result = await stmt.execute({ id1: 1, limit: 1 }); expect(result).toEqual([{ id1: 1, name: 'John' }]); expect(result).toHaveLength(1); }); test('prepared statement with placeholder in .offset', async (ctx) => { const { db } = ctx.gel; await db.insert(usersTable).values([ { id1: 1, name: 'John' }, { id1: 2, name: 'John1' }, ]); const stmt = db .select({ id1: usersTable.id1, name: usersTable.name, }) .from(usersTable) .offset(sql.placeholder('offset')) .prepare('stmt_offset'); const result = await stmt.execute({ offset: 1 }); expect(result).toEqual([{ id1: 2, name: 'John1' }]); }); test('prepared statement built using $dynamic', async (ctx) => { const { db } = ctx.gel; function withLimitOffset(qb: any) { return qb.limit(sql.placeholder('limit')).offset(sql.placeholder('offset')); } await db.insert(usersTable).values([ { id1: 1, name: 'John' }, { id1: 2, name: 'John1' }, ]); const stmt = db .select({ id1: usersTable.id1, name: usersTable.name, }) .from(usersTable) .$dynamic(); withLimitOffset(stmt).prepare('stmt_limit'); const result = await stmt.execute({ limit: 1, offset: 1 }); expect(result).toEqual([{ id1: 2, name: 'John1' }]); expect(result).toHaveLength(1); }); test('Query check: Insert all defaults in 1 row', async (ctx) => { const { db } = ctx.gel; const users = gelTable('users', { id: integer('id'), name: text('name').default('Dan'), state: text('state'), }); const query = db.insert(users).values({}).toSQL(); expect(query).toEqual({ sql: 'insert into "users" ("id", "name", "state") values (default, default, default)', params: [], }); }); test('Query check: Insert all defaults in multiple rows', async (ctx) => { const { db } = ctx.gel; const users = gelTable('users', { id: integer('id'), name: text('name').default('Dan'), state: text('state').default('UA'), }); const query = db.insert(users).values([{}, {}]).toSQL(); expect(query).toEqual({ sql: 'insert into "users" ("id", "name", "state") values (default, default, default), (default, default, default)', params: [], }); }); test('Insert all defaults in 1 row', async (ctx) => { const { db } = ctx.gel; const users = gelTable('empty_insert_single', { id1: integer('id1'), name: text('name').default('Dan'), state: text('state'), }); await db.insert(users).values({}); const res = await db.select().from(users); expect(res).toEqual([{ id1: null, name: 'Dan', state: null }]); }); test('Insert all defaults in multiple rows', async (ctx) => { const { db } = ctx.gel; const users = gelTable('empty_insert_multiple', { id: integer('id'), name: text('name').default('Dan'), state: text('state'), }); await db.insert(users).values([{}, {}]); const res = await db.select().from(users); expect(res.map((it) => ({ ...it, id: undefined }))).toEqual([ { id: undefined, name: 'Dan', state: null }, { id: undefined, name: 'Dan', state: null }, ]); }); // TODO not supported in gel test.todo('build query insert with onConflict do update', async (ctx) => { const { db } = ctx.gel; const query = db .insert(usersTable) .values({ id1: 1, name: 'John', json: ['foo', 'bar'] }) // .onConflictDoUpdate({ target: usersTable.id1, set: { name: 'John1' } }) .toSQL(); expect(query).toEqual({ sql: 'insert into "users" ("id1", "name", "verified", "json", "created_at") values ($1, $2, default, $3, default) on conflict ("id1") do update set "name" = $4', params: [1, 'John', ['foo', 'bar'], 'John1'], }); }); // TODO on conflict not supported in gel test.todo('build query insert with onConflict do update / multiple columns', async (ctx) => { const { db } = ctx.gel; const query = db .insert(usersTable) .values({ id1: 1, name: 'John', json: ['foo', 'bar'] }) // .onConflictDoUpdate({ target: [usersTable.id1, usersTable.name], set: { name: 'John1' } }) .toSQL(); expect(query).toEqual({ sql: 'insert into "users" ("id1", "name", "verified", "json", "created_at") values ($1, $2, default, $3, default) on conflict ("id1","name") do update set "name" = $4', params: [1, 'John', ['foo', 'bar'], 'John1'], }); }); // TODO on conflict not supported in gel test.todo('build query insert with onConflict do nothing', async (ctx) => { const { db } = ctx.gel; const query = db .insert(usersTable) .values({ id1: 1, name: 'John', json: ['foo', 'bar'] }) // .onConflictDoNothing() .toSQL(); expect(query).toEqual({ sql: 'insert into "users" ("id1", "name", "verified", "json", "created_at") values ($1, $2, default, 32, default) on conflict do nothing', params: [1, 'John', ['foo', 'bar']], }); }); // TODO on conflict not supported test.todo('build query insert with onConflict do nothing + target', async (ctx) => { const { db } = ctx.gel; const query = db .insert(usersTable) .values({ id1: 1, name: 'John', json: ['foo', 'bar'] }) // .onConflictDoNothing({ target: usersTable.id1 }) .toSQL(); expect(query).toEqual({ sql: 'insert into "users" ("id1", "name", "verified", "json", "created_at") values ($1, $2, default, $3, default) on conflict ("id1") do nothing', params: [1, 'John', ['foo', 'bar']], }); }); // TODO on conflict not supported in gel test.todo('insert with onConflict do update', async (ctx) => { const { db } = ctx.gel; await db.insert(usersTable).values({ id1: 1, name: 'John' }); await db .insert(usersTable) .values({ id1: 1, name: 'John' }); // .onConflictDoUpdate({ target: usersTable.id1, set: { name: 'John1' } }); const res = await db.select({ id1: usersTable.id1, name: usersTable.name }).from(usersTable).where( eq(usersTable.id1, 1), ); expect(res).toEqual([{ id1: 1, name: 'John1' }]); }); // TODO on conflict does not supported test.todo('insert with onConflict do nothing', async (ctx) => { const { db } = ctx.gel; await db.insert(usersTable).values({ id1: 1, name: 'John' }); // await db.insert(usersTable).values({ id1: 1, name: 'John' }).onConflictDoNothing(); const res = await db.select({ id1: usersTable.id1, name: usersTable.name }).from(usersTable).where( eq(usersTable.id1, 1), ); expect(res).toEqual([{ id1: 1, name: 'John' }]); }); // TODO on conflict does not supported test.todo('insert with onConflict do nothing + target', async (ctx) => { const { db } = ctx.gel; await db.insert(usersTable).values({ id1: 1, name: 'John' }); // await db.insert(usersTable).values({ id1: 1, name: 'John' }).onConflictDoNothing({ // target: usersTable.id1, // }); const res = await db.select({ id1: usersTable.id1, name: usersTable.name }).from(usersTable).where( eq(usersTable.id1, 1), ); expect(res).toEqual([{ id1: 1, name: 'John' }]); }); test('left join (flat object fields)', async (ctx) => { const { db } = ctx.gel; const { id1: cityId } = await db .insert(citiesTable) .values([ { id1: 1, name: 'Paris', state: 'Unknown' }, { id1: 2, name: 'London', state: 'Unknown' }, ]) .returning({ id1: citiesTable.id1 }) .then((rows) => rows[0]!); await db.insert(users2Table).values([ { id1: 1, name: 'John', cityId }, { id1: 2, name: 'Jane', cityId }, ]); const res = await db .select({ userId: users2Table.id1, userName: users2Table.name, cityId: citiesTable.id1, cityName: citiesTable.name, }) .from(users2Table) .leftJoin(citiesTable, eq(users2Table.cityId, citiesTable.id1)); expect(res).toEqual([ { userId: 1, userName: 'John', cityId, cityName: 'Paris' }, { userId: 2, userName: 'Jane', cityId, cityName: 'Paris' }, ]); }); test('left join (grouped fields)', async (ctx) => { const { db } = ctx.gel; const { id1: cityId } = await db .insert(citiesTable) .values([ { id1: 1, name: 'Paris' }, { id1: 2, name: 'London' }, ]) .returning({ id1: citiesTable.id1 }) .then((rows) => rows[0]!); await db.insert(users2Table).values([ { id1: 1, name: 'John', cityId }, { id1: 2, name: 'Jane', cityId }, ]); const res = await db .select({ id: users2Table.id1, user: { name: users2Table.name, nameUpper: sql`upper(${users2Table.name})`, }, city: { id: citiesTable.id1, name: citiesTable.name, nameUpper: sql`upper(${citiesTable.name})`, }, }) .from(users2Table) .leftJoin(citiesTable, eq(users2Table.cityId, citiesTable.id1)); expect(res).toEqual([ { id: 1, user: { name: 'John', nameUpper: 'JOHN' }, city: { id: cityId, name: 'Paris', nameUpper: 'PARIS' }, }, { id: 2, user: { name: 'Jane', nameUpper: 'JANE' }, city: { id: cityId, name: 'Paris', nameUpper: 'PARIS' }, }, ]); }); test('left join (all fields)', async (ctx) => { const { db } = ctx.gel; const { id1: cityId } = await db .insert(citiesTable) .values([ { id1: 1, name: 'Paris' }, { id1: 2, name: 'London' }, ]) .returning({ id1: citiesTable.id1 }) .then((rows) => rows[0]!); await db.insert(users2Table).values([ { id1: 1, name: 'John', cityId }, { id1: 2, name: 'Jane', cityId }, ]); const res = await db.select().from(users2Table).leftJoin(citiesTable, eq(users2Table.cityId, citiesTable.id1)); expect(res).toEqual([ { some_new_users: { id1: 1, name: 'John', cityId, }, cities: { id1: cityId, name: 'Paris', state: null, }, }, { some_new_users: { id1: 2, name: 'Jane', cityId, }, cities: { id1: cityId, name: 'Paris', state: null, }, }, ]); }); test('select from a many subquery', async (ctx) => { const { db } = ctx.gel; await db.insert(citiesTable) .values([{ id1: 1, name: 'Paris' }, { id1: 2, name: 'London' }]); await db.insert(users2Table).values([ { id1: 1, name: 'John', cityId: 1 }, { id1: 2, name: 'Jane', cityId: 2 }, { id1: 3, name: 'Jack', cityId: 2 }, ]); const res = await db.select({ population: db.select({ count: count().as('count') }).from(users2Table).where( eq(users2Table.cityId, citiesTable.id1), ).as( 'population', ), name: citiesTable.name, }).from(citiesTable); expectTypeOf(res).toEqualTypeOf<{ population: number; name: string; }[]>(); expect(res).toStrictEqual([{ population: 1, name: 'Paris', }, { population: 2, name: 'London', }]); }); test('select from a one subquery', async (ctx) => { const { db } = ctx.gel; await db.insert(citiesTable) .values([{ id1: 1, name: 'Paris' }, { id1: 2, name: 'London' }]); await db.insert(users2Table).values([ { id1: 1, name: 'John', cityId: 1 }, { id1: 2, name: 'Jane', cityId: 2 }, { id1: 3, name: 'Jack', cityId: 2 }, ]); const res = await db.select({ cityName: db.select({ name: citiesTable.name }).from(citiesTable).where(eq(users2Table.cityId, citiesTable.id1)) .as( 'cityName', ), name: users2Table.name, }).from(users2Table); expectTypeOf(res).toEqualTypeOf<{ cityName: string; name: string; }[]>(); expect(res).toStrictEqual([{ cityName: 'Paris', name: 'John', }, { cityName: 'London', name: 'Jane', }, { cityName: 'London', name: 'Jack', }]); }); test('join subquery', async (ctx) => { const { db } = ctx.gel; await db.insert(courseCategoriesTable).values([ { id1: 1, name: 'Category 1' }, { id1: 2, name: 'Category 2' }, { id1: 3, name: 'Category 3', }, { id1: 4, name: 'Category 4' }, ]); await db.insert(coursesTable).values([ { id1: 1, name: 'Development', categoryId: 2 }, { id1: 2, name: 'IT & Software', categoryId: 3 }, { id1: 3, name: 'Marketing', categoryId: 4 }, { id1: 4, name: 'Design', categoryId: 1 }, ]); const sq2 = db .select({ categoryId: courseCategoriesTable.id1, category: courseCategoriesTable.name, total: sql`count(${courseCategoriesTable.id1})`, }) .from(courseCategoriesTable) .groupBy(courseCategoriesTable.id1, courseCategoriesTable.name) .as('sq2'); const res = await db .select({ courseName: coursesTable.name, categoryId: sq2.categoryId, }) .from(coursesTable) .leftJoin(sq2, eq(coursesTable.categoryId, sq2.categoryId)) .orderBy(coursesTable.name); expect(res).toEqual([ { courseName: 'Design', categoryId: 1 }, { courseName: 'Development', categoryId: 2 }, { courseName: 'IT & Software', categoryId: 3 }, { courseName: 'Marketing', categoryId: 4 }, ]); }); test('with ... select', async (ctx) => { const { db } = ctx.gel; await db.insert(orders).values([ { region: 'Europe', product: 'A', amount: 10, quantity: 1 }, { region: 'Europe', product: 'A', amount: 20, quantity: 2 }, { region: 'Europe', product: 'B', amount: 20, quantity: 2 }, { region: 'Europe', product: 'B', amount: 30, quantity: 3 }, { region: 'US', product: 'A', amount: 30, quantity: 3 }, { region: 'US', product: 'A', amount: 40, quantity: 4 }, { region: 'US', product: 'B', amount: 40, quantity: 4 }, { region: 'US', product: 'B', amount: 50, quantity: 5 }, ]); const regionalSales = db.$with('regional_sales').as( db .select({ region: orders.region, totalSales: sql`sum(${orders.amount})`.as('total_sales'), }) .from(orders) .groupBy(orders.region), ); const topRegions = db.$with('top_regions').as( db .select({ region: regionalSales.region, }) .from(regionalSales) .where( gt( regionalSales.totalSales, db.select({ sales: sql`sum(${regionalSales.totalSales})/10` }).from(regionalSales), ), ), ); const result1 = await db .with(regionalSales, topRegions) .select({ region: orders.region, product: orders.product, productUnits: sql`sum(${orders.quantity})::int`, productSales: sql`sum(${orders.amount})::int`, }) .from(orders) .where(inArray(orders.region, db.select({ region: topRegions.region }).from(topRegions))) .groupBy(orders.region, orders.product) .orderBy(orders.region, orders.product); const result2 = await db .with(regionalSales, topRegions) .selectDistinct({ region: orders.region, product: orders.product, productUnits: sql`sum(${orders.quantity})::int`, productSales: sql`sum(${orders.amount})::int`, }) .from(orders) .where(inArray(orders.region, db.select({ region: topRegions.region }).from(topRegions))) .groupBy(orders.region, orders.product) .orderBy(orders.region, orders.product); const result3 = await db .with(regionalSales, topRegions) .selectDistinctOn([orders.region], { region: orders.region, productUnits: sql`sum(${orders.quantity})::int`, productSales: sql`sum(${orders.amount})::int`, }) .from(orders) .where(inArray(orders.region, db.select({ region: topRegions.region }).from(topRegions))) .groupBy(orders.region) .orderBy(orders.region); expect(result1).toEqual([ { region: 'Europe', product: 'A', productUnits: 3, productSales: 30, }, { region: 'Europe', product: 'B', productUnits: 5, productSales: 50, }, { region: 'US', product: 'A', productUnits: 7, productSales: 70, }, { region: 'US', product: 'B', productUnits: 9, productSales: 90, }, ]); expect(result2).toEqual(result1); expect(result3).toEqual([ { region: 'Europe', productUnits: 8, productSales: 80, }, { region: 'US', productUnits: 16, productSales: 160, }, ]); }); test('with ... update', async (ctx) => { const { db } = ctx.gel; const products = gelTable('products', { id1: integer('id1'), price: decimal('price').notNull(), cheap: boolean('cheap').notNull().default(false), }); await db.insert(products).values([ { id1: 1, price: '10.99' }, { id1: 2, price: '25.85' }, { id1: 3, price: '32.99' }, { id1: 4, price: '2.50' }, { id1: 5, price: '4.59' }, ]); const averagePrice = db.$with('average_price').as( db .select({ value: sql`avg(${products.price})`.as('value'), }) .from(products), ); const result = await db .with(averagePrice) .update(products) .set({ cheap: true, }) .where(lt(products.price, sql`(select * from ${averagePrice})`)) .returning({ id1: products.id1, }); expect(result).toEqual([{ id1: 1 }, { id1: 4 }, { id1: 5 }]); }); test('with ... insert', async (ctx) => { const { db } = ctx.gel; const users = gelTable('users_with_insert', { username: text('username').notNull(), admin: boolean('admin').notNull(), }); const userCount = db.$with('user_count').as( db .select({ value: sql`count(*)`.as('value'), }) .from(users), ); const result = await db .with(userCount) .insert(users) .values([{ username: 'user1', admin: sql`((select * from ${userCount}) = 0)` }]) .returning({ admin: users.admin, }); expect(result).toEqual([{ admin: true }]); }); test('with ... delete', async (ctx) => { const { db } = ctx.gel; await db.insert(orders).values([ { id1: 1, region: 'Europe', product: 'A', amount: 10, quantity: 1 }, { id1: 2, region: 'Europe', product: 'A', amount: 20, quantity: 2 }, { id1: 3, region: 'Europe', product: 'B', amount: 20, quantity: 2 }, { id1: 4, region: 'Europe', product: 'B', amount: 30, quantity: 3 }, { id1: 5, region: 'US', product: 'A', amount: 30, quantity: 3 }, { id1: 6, region: 'US', product: 'A', amount: 40, quantity: 4 }, { id1: 7, region: 'US', product: 'B', amount: 40, quantity: 4 }, { id1: 8, region: 'US', product: 'B', amount: 50, quantity: 5 }, ]); const averageAmount = db.$with('average_amount').as( db .select({ value: sql`avg(${orders.amount})`.as('value'), }) .from(orders), ); const result = await db .with(averageAmount) .delete(orders) .where(gt(orders.amount, sql`(select * from ${averageAmount})`)) .returning({ id1: orders.id1, }); expect(result).toEqual([{ id1: 6 }, { id1: 7 }, { id1: 8 }]); }); test('select from subquery sql', async (ctx) => { const { db } = ctx.gel; await db.insert(users3Table).values([ { id1: 1, name: 'John' }, { id1: 2, name: 'Jane' }, ]); const sq = db .select({ name: sql`${users3Table.name} || ' modified'`.as('name') }) .from(users3Table) .as('sq'); const res = await db.select({ name: sq.name }).from(sq); expect(res).toEqual([{ name: 'John modified' }, { name: 'Jane modified' }]); }); test('select a field without joining its table', (ctx) => { const { db } = ctx.gel; expect(() => db.select({ name: users3Table.name }).from(usersTable).prepare('query')).toThrowError(); }); test('select all fields from subquery without alias', (ctx) => { const { db } = ctx.gel; const sq = db.$with('sq').as(db.select({ name: sql`upper(${users3Table.name})` }).from(users3Table)); expect(() => db.select().from(sq).prepare('query')).toThrowError(); }); test('select count()', async (ctx) => { const { db } = ctx.gel; await db.insert(usersTable).values([ { id1: 1, name: 'John' }, { id1: 2, name: 'Jane' }, ]); const res = await db.select({ count: sql`count(*)` }).from(usersTable); expect(res).toEqual([{ count: 2 }]); }); test('select count w/ custom mapper', async (ctx) => { const { db } = ctx.gel; function count(value: GelColumn | SQLWrapper): SQL; function count(value: GelColumn | SQLWrapper, alias: string): SQL.Aliased; function count(value: GelColumn | SQLWrapper, alias?: string): SQL | SQL.Aliased { const result = sql`count(${value})`.mapWith(Number); if (!alias) { return result; } return result.as(alias); } await db.insert(usersTable).values([ { id1: 1, name: 'John' }, { id1: 2, name: 'Jane' }, ]); const res = await db.select({ count: count(sql`*`) }).from(usersTable); expect(res).toEqual([{ count: 2 }]); }); test('array types', async (ctx) => { const { db } = ctx.gel; const values: (typeof salEmp.$inferSelect)[] = [ { name: 'John', payByQuarter: [10000, 10000, 10000, 10000], }, { name: 'Carol', payByQuarter: [20000, 25000, 25000, 25000], }, ]; await db.insert(salEmp).values(values); const res = await db.select().from(salEmp); expect(res.map((it) => ({ ...it, id: undefined }))).toEqual(values); }); test('select for ...', (ctx) => { const { db } = ctx.gel; { const query = db.select().from(users3Table).for('update').toSQL(); expect(query.sql).toMatch(/ for update$/); } { const query = db .select() .from(users2Table) .for('update', { of: [users3Table, coursesTable] }) .toSQL(); expect(query.sql).toMatch(/ for update of "users3", "courses"$/); } { const query = db.select().from(users3Table).for('no key update', { of: users3Table }).toSQL(); expect(query.sql).toMatch(/for no key update of "users3"$/); } { const query = db.select().from(users3Table).for('no key update', { of: users3Table, skipLocked: true }) .toSQL(); expect(query.sql).toMatch(/ for no key update of "users3" skip locked$/); } { const query = db.select().from(users3Table).for('share', { of: users3Table, noWait: true }).toSQL(); expect(query.sql).toMatch(/for share of "users3" nowait$/); } }); // TODO // column "rel~1.0e3b7152-d977-11ef-a173-530b4c6088b1" must appear in the GROUP BY test.todo('having', async (ctx) => { const { db } = ctx.gel; await db.insert(citiesTable).values([ { id1: 1, name: 'London' }, { id1: 2, name: 'Paris' }, { id1: 3, name: 'New York', }, ]); await db.insert(users2Table).values([ { id1: 1, name: 'John', cityId: 1 }, { id1: 2, name: 'Jane', cityId: 1 }, { id1: 3, name: 'Jack', cityId: 2, }, ]); const result = await db .select({ id1: citiesTable.id1, name: sql`upper(${citiesTable.name})`.as('upper_name'), usersCount: sql`count(${users2Table.id1})::int`.as('users_count'), }) .from(citiesTable) .leftJoin(users2Table, eq(users2Table.cityId, citiesTable.id1)) .where(({ name }) => sql`length(${name}) >= 3`) .groupBy(citiesTable.id1) .having(({ usersCount }) => sql`${usersCount} > 0`) .orderBy(({ name }) => name); expect(result).toEqual([ { id1: 1, name: 'LONDON', usersCount: 2, }, { id1: 2, name: 'PARIS', usersCount: 1, }, ]); }); test('select from raw sql', async (ctx) => { const { db } = ctx.gel; const result = await db .select({ id: sql`id`, name: sql`name`, }) .from(sql`(select 1 as id, 'John' as name) as users`); Expect>; expect(result).toEqual([{ id: 1, name: 'John' }]); }); test('select from raw sql with joins', async (ctx) => { const { db } = ctx.gel; const result = await db .select({ id: sql`users.id`, name: sql`users.name`, userCity: sql`users.city`, cityName: sql`cities.name`, }) .from(sql`(select 1 as id, 'John' as name, 'New York' as city) as users`) .leftJoin(sql`(select 1 as id, 'Paris' as name) as cities`, sql`cities.id = users.id`); Expect>; expect(result).toEqual([{ id: 1, name: 'John', userCity: 'New York', cityName: 'Paris' }]); }); test('join on aliased sql from select', async (ctx) => { const { db } = ctx.gel; const result = await db .select({ userId: sql`users.id`.as('userId'), name: sql`users.name`, userCity: sql`users.city`, cityId: sql`cities.id`.as('cityId'), cityName: sql`cities.name`, }) .from(sql`(select 1 as id, 'John' as name, 'New York' as city) as users`) .leftJoin(sql`(select 1 as id, 'Paris' as name) as cities`, (cols) => eq(cols.cityId, cols.userId)); Expect< Equal<{ userId: number; name: string; userCity: string; cityId: number; cityName: string }[], typeof result> >; expect(result).toEqual([{ userId: 1, name: 'John', userCity: 'New York', cityId: 1, cityName: 'Paris' }]); }); test('join on aliased sql from with clause', async (ctx) => { const { db } = ctx.gel; const users = db.$with('users').as( db .select({ id: sql`id`.as('userId'), name: sql`name`.as('userName'), city: sql`city`.as('city'), }) .from(sql`(select 1 as id, 'John' as name, 'New York' as city) as users`), ); const cities = db.$with('cities').as( db .select({ id: sql`id`.as('cityId'), name: sql`name`.as('cityName'), }) .from(sql`(select 1 as id, 'Paris' as name) as cities`), ); const result = await db .with(users, cities) .select({ userId: users.id, name: users.name, userCity: users.city, cityId: cities.id, cityName: cities.name, }) .from(users) .leftJoin(cities, (cols) => eq(cols.cityId, cols.userId)); Expect< Equal<{ userId: number; name: string; userCity: string; cityId: number; cityName: string }[], typeof result> >; expect(result).toEqual([{ userId: 1, name: 'John', userCity: 'New York', cityId: 1, cityName: 'Paris' }]); }); test('prefixed table', async (ctx) => { const { db } = ctx.gel; const gelTable = gelTableCreator((name) => `myprefix_${name}`); const users = gelTable('test_prefixed_table_with_unique_name', { id1: integer('id1').primaryKey(), name: text('name').notNull(), }); await db.insert(users).values({ id1: 1, name: 'John' }); const result = await db.select().from(users); expect(result.map((it) => ({ ...it, id: undefined }))).toEqual([{ id1: 1, name: 'John' }]); }); test('all date and time columns', async (ctx) => { const { db } = ctx.gel; const table = gelTable('dates_column', { datetimeColumn: timestamptz().notNull(), local_datetimeColumn: timestamp().notNull(), local_dateColumn: localDate().notNull(), local_timeColumn: localTime().notNull(), durationColumn: duration().notNull(), relative_durationColumn: relDuration().notNull(), dateDurationColumn: dateDuration().notNull(), }); await db.insert(table).values({ datetimeColumn: new Date('2022-01-01T00:00:00.123Z'), local_datetimeColumn: new LocalDateTime(2014, 2, 1, 4, 1, 6, 2, 0, 0), local_dateColumn: new LocalDate(2013, 2, 1), local_timeColumn: new LocalTime(12, 42, 2, 3, 1, 0), durationColumn: new Duration(0, 0, 0, 0, 12, 3, 0, 0, 1, 3), relative_durationColumn: new RelativeDuration(2014, 2, 1, 4, 1, 6, 2, 0, 0), dateDurationColumn: new DateDuration(2032, 2, 1, 5), }); const result = await db.select().from(table); Expect< Equal< { datetimeColumn: Date; local_datetimeColumn: LocalDateTime; local_dateColumn: LocalDate; local_timeColumn: LocalTime; durationColumn: Duration; relative_durationColumn: RelativeDuration; dateDurationColumn: DateDuration; }[], typeof result > >; Expect< Equal< { datetimeColumn: Date; local_datetimeColumn: LocalDateTime; local_dateColumn: LocalDate; local_timeColumn: LocalTime; durationColumn: Duration; relative_durationColumn: RelativeDuration; dateDurationColumn: DateDuration; }, typeof table.$inferInsert > >; }); test('orderBy with aliased column', (ctx) => { const { db } = ctx.gel; const query = db .select({ test: sql`something`.as('test'), }) .from(users3Table) .orderBy((fields) => fields.test) .toSQL(); expect(query.sql).toBe('select something as "test" from "users3" order by "test"'); }); test('select from sql', async (ctx) => { const { db } = ctx.gel; const metricEntry = gelTable('metric_entry', { id1: gelUuid('id1').notNull(), createdAt: timestamptz('created_at').notNull(), }); const metricId = uuidV4(); const intervals = db.$with('intervals').as( db .select({ startTime: sql`(date'2023-03-01'+ x * '1 day'::interval)`.as('start_time'), endTime: sql`(date'2023-03-01'+ (x+1) *'1 day'::interval)`.as('end_time'), }) .from(sql`generate_series(0, 29, 1) as t(x)`), ); const func = () => db .with(intervals) .select({ startTime: intervals.startTime, endTime: intervals.endTime, count: sql`count(${metricEntry})`, }) .from(metricEntry) .rightJoin( intervals, and( eq(metricEntry.id1, metricId), gte(metricEntry.createdAt, intervals.startTime), lt(metricEntry.createdAt, intervals.endTime), ), ) .groupBy(intervals.startTime, intervals.endTime) .orderBy(asc(intervals.startTime)); await expect( (async () => { func(); })(), ).resolves.not.toThrowError(); }); test('transaction', async (ctx) => { const { db } = ctx.gel; const users = gelTable('users_transactions', { id1: integer('id1').notNull(), balance: integer('balance').notNull(), }); const products = gelTable('products_transactions', { id1: integer('id1').notNull(), price: integer('price').notNull(), stock: integer('stock').notNull(), }); const user = await db .insert(users) .values({ id1: 1, balance: 100 }) .returning() .then((rows) => rows[0]!); const product = await db .insert(products) .values({ id1: 1, price: 10, stock: 10 }) .returning() .then((rows) => rows[0]!); await db.transaction(async (tx) => { await tx .update(users) .set({ balance: user.balance - product.price }) .where(eq(users.id1, user.id1)); await tx .update(products) .set({ stock: product.stock - 1 }) .where(eq(products.id1, product.id1)); }); const result = await db.select().from(users); expect(result).toEqual([{ id1: 1, balance: 90 }]); }); test('transaction rollback', async (ctx) => { const { db } = ctx.gel; const users = gelTable('users_transactions_rollback', { id1: integer('id1').notNull(), balance: integer('balance').notNull(), }); await expect( (async () => { await db.transaction(async (tx) => { await tx.insert(users).values({ id1: 1, balance: 100 }); tx.rollback(); }); })(), ).rejects.toThrowError(Error); const result = await db.select().from(users); expect(result).toEqual([]); }); test('join subquery with join', async (ctx) => { const { db } = ctx.gel; const internalStaff = gelTable('internal_staff', { userId: integer('userId').notNull(), }); const customUser = gelTable('custom_user', { id1: integer('id1').notNull(), }); const ticket = gelTable('ticket', { staffId: integer('staffId').notNull(), }); await db.insert(internalStaff).values({ userId: 1 }); await db.insert(customUser).values({ id1: 1 }); await db.insert(ticket).values({ staffId: 1 }); const subq = db.select().from(internalStaff).leftJoin(customUser, eq(internalStaff.userId, customUser.id1)).as( 'internal_staff', ); const mainQuery = await db.select().from(ticket).leftJoin(subq, eq(subq.internal_staff.userId, ticket.staffId)); expect(mainQuery).toEqual([ { ticket: { staffId: 1 }, internal_staff: { internal_staff: { userId: 1 }, custom_user: { id1: 1 }, }, }, ]); }); test('table selection with single table', async (ctx) => { const { db } = ctx.gel; const users = gelTable('users_with_cities', { id1: integer('id1').notNull(), name: text('name').notNull(), cityId: integer('cityId').notNull(), }); await db.insert(users).values({ id1: 1, name: 'John', cityId: 1 }); const result = await db.select({ users }).from(users); expect(result).toEqual([{ users: { id1: 1, name: 'John', cityId: 1 } }]); }); test('set null to json field', async (ctx) => { const { db } = ctx.gel; const result = await db.insert(usersTable).values({ id1: 1, name: 'Alex', json: null }).returning(); expect(result.map((it) => ({ ...it, verified: undefined, createdAt: undefined }))).toEqual([ { id1: 1, name: 'Alex', json: null, verified: undefined, createdAt: undefined, }, ]); }); test('insert undefined', async (ctx) => { const { db } = ctx.gel; const users = gelTable('users_with_undefined', { id1: integer('id1').notNull(), name: text('name'), }); await expect( (async () => { await db.insert(users).values({ id1: 1, name: undefined }); })(), ).resolves.not.toThrowError(); }); test('update undefined', async (ctx) => { const { db } = ctx.gel; const users = gelTable('users', { id1: integer('id1').notNull(), name: text('name'), }); await expect( (async () => { await db.update(users).set({ name: undefined }); })(), ).rejects.toThrowError(); await expect( (async () => { db.update(users).set({ name: undefined }); })(), ).rejects.toThrowError(); }); test('array operators', async (ctx) => { const { db } = ctx.gel; const posts = gelTable('posts', { id1: integer('id1').notNull(), tags: text('tags').array(), }); await db.insert(posts).values([ { id1: 1, tags: ['ORM'], }, { id1: 2, tags: ['Typescript'], }, { id1: 3, tags: ['Typescript', 'ORM'], }, { id1: 4, tags: ['Typescript', 'Frontend', 'React'] }, { id1: 5, tags: ['Typescript', 'ORM', 'Database', 'Postgres'], }, { id1: 6, tags: ['Java', 'Spring', 'OOP'], }, ]); const contains = await db .select({ id1: posts.id1 }) .from(posts) .where(arrayContains(posts.tags, ['Typescript', 'ORM'])); const contained = await db .select({ id1: posts.id1 }) .from(posts) .where(arrayContained(posts.tags, ['Typescript', 'ORM'])); const overlaps = await db .select({ id1: posts.id1 }) .from(posts) .where(arrayOverlaps(posts.tags, ['Typescript', 'ORM'])); const withSubQuery = await db .select({ id1: posts.id1 }) .from(posts) .where(arrayContains(posts.tags, db.select({ tags: posts.tags }).from(posts).where(eq(posts.id1, 1)))); expect(contains).toEqual([{ id1: 3 }, { id1: 5 }]); expect(contained).toEqual([{ id1: 1 }, { id1: 2 }, { id1: 3 }]); expect(overlaps).toEqual([{ id1: 1 }, { id1: 2 }, { id1: 3 }, { id1: 4 }, { id1: 5 }]); expect(withSubQuery).toEqual([{ id1: 1 }, { id1: 3 }, { id1: 5 }]); }); test('set operations (union) from query builder with subquery', async (ctx) => { const { db } = ctx.gel; await setupSetOperationTest(db); const sq = db .select({ id: users2Table.id1, name: users2Table.name }) .from(users2Table).as('sq'); const result = await db .select({ id: cities2Table.id1, name: citiesTable.name }) .from(cities2Table).union( db.select().from(sq), ).orderBy(asc(sql`name`)).limit(2).offset(1); expect(result).toHaveLength(2); expect(result).toEqual([ { id: 3, name: 'Jack' }, { id: 2, name: 'Jane' }, ]); await expect((async () => { db .select({ id: cities2Table.id1, name: citiesTable.name, name2: users2Table.name }) .from(cities2Table).union( // @ts-expect-error db .select({ id: users2Table.id1, name: users2Table.name }) .from(users2Table), ).orderBy(asc(sql`name`)); })()).rejects.toThrowError(); }); test('set operations (union) as function', async (ctx) => { const { db } = ctx.gel; await setupSetOperationTest(db); const result = await union( db .select({ id: cities2Table.id1, name: citiesTable.name }) .from(cities2Table).where(eq(citiesTable.id1, 1)), db .select({ id: users2Table.id1, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id1, 1)), db .select({ id: users2Table.id1, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id1, 1)), ).orderBy(asc(sql`name`)).limit(1).offset(1); expect(result).toHaveLength(1); expect(result).toEqual([ { id: 1, name: 'New York' }, ]); await expect((async () => { union( db .select({ name: citiesTable.name, id: cities2Table.id1 }) .from(cities2Table).where(eq(citiesTable.id1, 1)), db .select({ id: users2Table.id1, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id1, 1)), db .select({ id: users2Table.id1, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id1, 1)), ).orderBy(asc(sql`name`)); })()).rejects.toThrowError(); }); test('set operations (union all) from query builder', async (ctx) => { const { db } = ctx.gel; await setupSetOperationTest(db); const result = await db .select({ id1: cities2Table.id1, name: citiesTable.name }) .from(cities2Table).limit(2).unionAll( db .select({ id1: cities2Table.id1, name: citiesTable.name }) .from(cities2Table).limit(2), ).orderBy(asc(sql`id1`)); expect(result).toHaveLength(4); expect(result).toEqual([ { id1: 1, name: 'New York' }, { id1: 1, name: 'New York' }, { id1: 2, name: 'London' }, { id1: 2, name: 'London' }, ]); await expect((async () => { db .select({ id1: cities2Table.id1, name: citiesTable.name }) .from(cities2Table).limit(2).unionAll( db .select({ name: citiesTable.name, id1: cities2Table.id1 }) .from(cities2Table).limit(2), ).orderBy(asc(sql`id1`)); })()).rejects.toThrowError(); }); test('set operations (union all) as function', async (ctx) => { const { db } = ctx.gel; await setupSetOperationTest(db); const result = await unionAll( db .select({ id: cities2Table.id1, name: citiesTable.name }) .from(cities2Table).where(eq(citiesTable.id1, 1)), db .select({ id: users2Table.id1, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id1, 1)), db .select({ id: users2Table.id1, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id1, 1)), ); expect(result).toHaveLength(3); expect(result).toEqual([ { id: 1, name: 'New York' }, { id: 1, name: 'John' }, { id: 1, name: 'John' }, ]); await expect((async () => { unionAll( db .select({ id: cities2Table.id1, name: citiesTable.name }) .from(cities2Table).where(eq(citiesTable.id1, 1)), db .select({ name: users2Table.name, id: users2Table.id1 }) .from(users2Table).where(eq(users2Table.id1, 1)), db .select({ id: users2Table.id1, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id1, 1)), ); })()).rejects.toThrowError(); }); test('set operations (intersect) from query builder', async (ctx) => { const { db } = ctx.gel; await setupSetOperationTest(db); const result = await db .select({ id: cities2Table.id1, name: citiesTable.name }) .from(cities2Table).intersect( db .select({ id: cities2Table.id1, name: citiesTable.name }) .from(cities2Table).where(gt(citiesTable.id1, 1)), ).orderBy(asc(sql`name`)); expect(result).toHaveLength(2); expect(result).toEqual([ { id: 2, name: 'London' }, { id: 3, name: 'Tampa' }, ]); await expect((async () => { db .select({ id: cities2Table.id1, name: citiesTable.name }) .from(cities2Table).intersect( // @ts-expect-error db .select({ id: cities2Table.id1, name: citiesTable.name, id2: cities2Table.id1 }) .from(cities2Table).where(gt(citiesTable.id1, 1)), ).orderBy(asc(sql`name`)); })()).rejects.toThrowError(); }); test('set operations (intersect) as function', async (ctx) => { const { db } = ctx.gel; await setupSetOperationTest(db); const result = await intersect( db .select({ id: cities2Table.id1, name: citiesTable.name }) .from(cities2Table).where(eq(citiesTable.id1, 1)), db .select({ id: users2Table.id1, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id1, 1)), db .select({ id: users2Table.id1, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id1, 1)), ); expect(result).toHaveLength(0); expect(result).toEqual([]); await expect((async () => { intersect( db .select({ id: cities2Table.id1, name: citiesTable.name }) .from(cities2Table).where(eq(citiesTable.id1, 1)), db .select({ id: users2Table.id1, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id1, 1)), db .select({ name: users2Table.name, id: users2Table.id1 }) .from(users2Table).where(eq(users2Table.id1, 1)), ); })()).rejects.toThrowError(); }); test('set operations (intersect all) from query builder', async (ctx) => { const { db } = ctx.gel; await setupSetOperationTest(db); const result = await db .select({ id1: cities2Table.id1, name: citiesTable.name }) .from(cities2Table).limit(2).intersectAll( db .select({ id1: cities2Table.id1, name: citiesTable.name }) .from(cities2Table).limit(2), ).orderBy(asc(sql`id1`)); expect(result).toHaveLength(2); expect(result).toEqual([ { id1: 1, name: 'New York' }, { id1: 2, name: 'London' }, ]); await expect((async () => { db .select({ id: cities2Table.id1, name: citiesTable.name }) .from(cities2Table).limit(2).intersectAll( db .select({ name: users2Table.name, id: users2Table.id1 }) .from(cities2Table).limit(2), ).orderBy(asc(sql`id`)); })()).rejects.toThrowError(); }); test('set operations (intersect all) as function', async (ctx) => { const { db } = ctx.gel; await setupSetOperationTest(db); const result = await intersectAll( db .select({ id: users2Table.id1, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id1, 1)), db .select({ id: users2Table.id1, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id1, 1)), db .select({ id: users2Table.id1, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id1, 1)), ); expect(result).toHaveLength(1); expect(result).toEqual([ { id: 1, name: 'John' }, ]); await expect((async () => { intersectAll( db .select({ id: users2Table.id1, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id1, 1)), db .select({ name: users2Table.name, id: users2Table.id1 }) .from(users2Table).where(eq(users2Table.id1, 1)), db .select({ id: users2Table.id1, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id1, 1)), ); })()).rejects.toThrowError(); }); test('set operations (except) from query builder', async (ctx) => { const { db } = ctx.gel; await setupSetOperationTest(db); const result = await db .select() .from(cities2Table).except( db .select() .from(cities2Table).where(gt(citiesTable.id1, 1)), ); expect(result).toHaveLength(1); expect(result).toEqual([ { id1: 1, name: 'New York' }, ]); await expect((async () => { db .select() .from(cities2Table).except( db .select({ name: users2Table.name, id1: users2Table.id1 }) .from(cities2Table).where(gt(citiesTable.id1, 1)), ); })()).rejects.toThrowError(); }); test('set operations (except) as function', async (ctx) => { const { db } = ctx.gel; await setupSetOperationTest(db); const result = await except( db .select({ id1: cities2Table.id1, name: citiesTable.name }) .from(cities2Table), db .select({ id1: cities2Table.id1, name: citiesTable.name }) .from(cities2Table).where(eq(citiesTable.id1, 1)), db .select({ id1: users2Table.id1, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id1, 1)), ).orderBy(asc(sql`id1`)); expect(result).toHaveLength(2); expect(result).toEqual([ { id1: 2, name: 'London' }, { id1: 3, name: 'Tampa' }, ]); await expect((async () => { except( db .select({ id1: cities2Table.id1, name: citiesTable.name }) .from(cities2Table), db .select({ name: users2Table.name, id1: users2Table.id1 }) .from(cities2Table).where(eq(citiesTable.id1, 1)), db .select({ id1: users2Table.id1, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id1, 1)), ).orderBy(asc(sql`id1`)); })()).rejects.toThrowError(); }); test('set operations (except all) from query builder', async (ctx) => { const { db } = ctx.gel; await setupSetOperationTest(db); const result = await db .select() .from(cities2Table).exceptAll( db .select({ id1: cities2Table.id1, name: citiesTable.name }) .from(cities2Table).where(eq(citiesTable.id1, 1)), ).orderBy(asc(sql`id1`)); expect(result).toHaveLength(2); expect(result).toEqual([ { id1: 2, name: 'London' }, { id1: 3, name: 'Tampa' }, ]); await expect((async () => { db .select({ name: cities2Table.name, id1: cities2Table.id1 }) .from(cities2Table).exceptAll( db .select({ id1: cities2Table.id1, name: citiesTable.name }) .from(cities2Table).where(eq(citiesTable.id1, 1)), ).orderBy(asc(sql`id1`)); })()).rejects.toThrowError(); }); test('set operations (except all) as function', async (ctx) => { const { db } = ctx.gel; await setupSetOperationTest(db); const result = await exceptAll( db .select({ id1: users2Table.id1, name: users2Table.name }) .from(users2Table), db .select({ id1: users2Table.id1, name: users2Table.name }) .from(users2Table).where(gt(users2Table.id1, 7)), db .select({ id1: users2Table.id1, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id1, 1)), ).orderBy(asc(sql`id1`)).limit(5).offset(2); expect(result).toHaveLength(4); expect(result).toEqual([ { id1: 4, name: 'Peter' }, { id1: 5, name: 'Ben' }, { id1: 6, name: 'Jill' }, { id1: 7, name: 'Mary' }, ]); await expect((async () => { exceptAll( db .select({ name: users2Table.name, id: users2Table.id1 }) .from(users2Table), db .select({ id: users2Table.id1, name: users2Table.name }) .from(users2Table).where(gt(users2Table.id1, 7)), db .select({ id: users2Table.id1, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id1, 1)), ).orderBy(asc(sql`id`)); })()).rejects.toThrowError(); }); test('set operations (mixed) from query builder with subquery', async (ctx) => { const { db } = ctx.gel; await setupSetOperationTest(db); const sq = db .select() .from(cities2Table).where(gt(citiesTable.id1, 1)).as('sq'); const result = await db .select() .from(cities2Table).except( ({ unionAll }) => unionAll( db.select().from(sq), db.select().from(cities2Table).where(eq(citiesTable.id1, 2)), ), ); expect(result).toHaveLength(1); expect(result).toEqual([ { id1: 1, name: 'New York' }, ]); await expect((async () => { db .select() .from(cities2Table).except( ({ unionAll }) => unionAll( db .select({ name: cities2Table.name, id1: cities2Table.id1 }) .from(cities2Table).where(gt(citiesTable.id1, 1)), db.select().from(cities2Table).where(eq(citiesTable.id1, 2)), ), ); })()).rejects.toThrowError(); }); test('set operations (mixed all) as function', async (ctx) => { const { db } = ctx.gel; await setupSetOperationTest(db); const result = await union( db .select({ id1: users2Table.id1, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id1, 1)), except( db .select({ id1: users2Table.id1, name: users2Table.name }) .from(users2Table).where(gte(users2Table.id1, 5)), db .select({ id1: users2Table.id1, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id1, 7)), ), db .select().from(cities2Table).where(gt(citiesTable.id1, 1)), ).orderBy(asc(sql`id1`)); expect(result).toHaveLength(6); expect(result).toEqual([ { id1: 1, name: 'John' }, { id1: 2, name: 'London' }, { id1: 3, name: 'Tampa' }, { id1: 5, name: 'Ben' }, { id1: 6, name: 'Jill' }, { id1: 8, name: 'Sally' }, ]); await expect((async () => { union( db .select({ id1: users2Table.id1, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id1, 1)), except( db .select({ id1: users2Table.id1, name: users2Table.name }) .from(users2Table).where(gte(users2Table.id1, 5)), db .select({ name: users2Table.name, id1: users2Table.id1 }) .from(users2Table).where(eq(users2Table.id1, 7)), ), db .select().from(cities2Table).where(gt(citiesTable.id1, 1)), ).orderBy(asc(sql`id`)); })()).rejects.toThrowError(); }); test('aggregate function: count', async (ctx) => { const { db } = ctx.gel; const table = aggregateTable; await setupAggregateFunctionsTest(db); const result1 = await db.select({ value: count() }).from(table); const result2 = await db.select({ value: count(table.a) }).from(table); const result3 = await db.select({ value: countDistinct(table.name) }).from(table); expect(result1[0]?.value).toBe(7); expect(result2[0]?.value).toBe(5); expect(result3[0]?.value).toBe(6); }); test('aggregate function: avg', async (ctx) => { const { db } = ctx.gel; const table = aggregateTable; await setupAggregateFunctionsTest(db); const result1 = await db.select({ value: avg(table.b) }).from(table); const result2 = await db.select({ value: avg(table.nullOnly) }).from(table); const result3 = await db.select({ value: avgDistinct(table.b) }).from(table); expect(result1[0]?.value).toBe('33.3333333333333333'); expect(result2[0]?.value).toBeNull(); expect(result3[0]?.value).toBe('42.5000000000000000'); }); test('aggregate function: sum', async (ctx) => { const { db } = ctx.gel; const table = aggregateTable; await setupAggregateFunctionsTest(db); const result1 = await db.select({ value: sum(table.b) }).from(table); const result2 = await db.select({ value: sum(table.nullOnly) }).from(table); const result3 = await db.select({ value: sumDistinct(table.b) }).from(table); expect(result1[0]?.value).toBe('200'); expect(result2[0]?.value).toBeNull(); expect(result3[0]?.value).toBe('170'); }); test('aggregate function: max', async (ctx) => { const { db } = ctx.gel; const table = aggregateTable; await setupAggregateFunctionsTest(db); const result1 = await db.select({ value: max(table.b) }).from(table); const result2 = await db.select({ value: max(table.nullOnly) }).from(table); expect(result1[0]?.value).toBe(90); expect(result2[0]?.value).toBeNull(); }); test('aggregate function: min', async (ctx) => { const { db } = ctx.gel; const table = aggregateTable; await setupAggregateFunctionsTest(db); const result1 = await db.select({ value: min(table.b) }).from(table); const result2 = await db.select({ value: min(table.nullOnly) }).from(table); expect(result1[0]?.value).toBe(10); expect(result2[0]?.value).toBeNull(); }); test('array mapping and parsing', async (ctx) => { const { db } = ctx.gel; const arrays = gelTable('arrays_tests', { id1: integer('id1').notNull(), tags: text('tags').array(), numbers: integer('numbers').notNull().array(), }); await db.insert(arrays).values({ id1: 1, tags: ['', 'b', 'c'], numbers: [1, 2, 3], }); const result = await db.select().from(arrays); expect(result).toEqual([ { id1: 1, tags: ['', 'b', 'c'], numbers: [1, 2, 3], }, ]); }); test('test $onUpdateFn and $onUpdate works as $default', async (ctx) => { const { db } = ctx.gel; await db.insert(usersOnUpdate).values([ { id1: 1, name: 'John' }, { id1: 2, name: 'Jane' }, { id1: 3, name: 'Jack', }, { id1: 4, name: 'Jill' }, ]); // const { updatedAt, ..._ } = getTableColumns(usersOnUpdate); // const justDates = await db.select({ updatedAt }).from(usersOnUpdate).orderBy(asc(usersOnUpdate.id1)); const response = await db.select(getTableColumns(usersOnUpdate)).from(usersOnUpdate).orderBy( asc(usersOnUpdate.id1), ); expect(response.map((it) => ({ ...it, updatedAt: undefined }))).toEqual([ { name: 'John', id1: 1, updateCounter: 1, alwaysNull: null, updatedAt: undefined }, { name: 'Jane', id1: 2, updateCounter: 1, alwaysNull: null, updatedAt: undefined }, { name: 'Jack', id1: 3, updateCounter: 1, alwaysNull: null, updatedAt: undefined }, { name: 'Jill', id1: 4, updateCounter: 1, alwaysNull: null, updatedAt: undefined }, ]); // const msDelay = 250; // for (const eachUser of justDates) { // expect(eachUser.updatedAt!.valueOf()).toBeGreaterThan(Date.now() - msDelay); // } }); test('test $onUpdateFn and $onUpdate works updating', async (ctx) => { const { db } = ctx.gel; await db.insert(usersOnUpdate).values([ { id1: 1, name: 'John', alwaysNull: 'this will be null after updating' }, { id1: 2, name: 'Jane' }, { id1: 3, name: 'Jack' }, { id1: 4, name: 'Jill' }, ]); const { updatedAt, ...rest } = getTableColumns(usersOnUpdate); await db.select({ updatedAt }).from(usersOnUpdate).orderBy(asc(usersOnUpdate.id1)); await db.update(usersOnUpdate).set({ name: 'Angel' }).where(eq(usersOnUpdate.id1, 1)); await db.update(usersOnUpdate).set({ updateCounter: null }).where(eq(usersOnUpdate.id1, 2)); // const justDates = await db.select({ updatedAt: usersOnUpdate.updatedAt }).from(usersOnUpdate).orderBy( // asc(usersOnUpdate.id1), // ); const response = await db.select({ ...rest }).from(usersOnUpdate).orderBy( asc(usersOnUpdate.id1), ); expect(response).toEqual([ { name: 'Angel', id1: 1, updateCounter: 2, alwaysNull: null }, { name: 'Jane', id1: 2, updateCounter: null, alwaysNull: null }, { name: 'Jack', id1: 3, updateCounter: 1, alwaysNull: null }, { name: 'Jill', id1: 4, updateCounter: 1, alwaysNull: null }, ]); // const msDelay = 500; // for (const eachUser of justDates) { // expect(eachUser.updatedAt!.valueOf()).toBeGreaterThan(Date.now() - msDelay); // } }); test('test if method with sql operators', async (ctx) => { const { db } = ctx.gel; const users = gelTable('users_with_age', { id1: integer('id1').notNull(), name: text('name').notNull(), age: integer('age').notNull(), city: text('city').notNull(), }); await db.insert(users).values([ { id1: 1, name: 'John', age: 20, city: 'New York' }, { id1: 2, name: 'Alice', age: 21, city: 'New York' }, { id1: 3, name: 'Nick', age: 22, city: 'London' }, { id1: 4, name: 'Lina', age: 23, city: 'London' }, ]); const condition1 = true; const [result1] = await db.select().from(users).where(eq(users.id1, 1).if(condition1)); expect({ ...result1, id: undefined }).toEqual({ id1: 1, name: 'John', age: 20, city: 'New York', }); const condition2 = 1; const [result2] = await db .select() .from(users) .where(sql`${users.id1} = 1`.if(condition2)); expect({ ...result2, id: undefined }).toEqual({ id1: 1, name: 'John', age: 20, city: 'New York' }); const condition3 = 'non-empty string'; const result3 = await db .select() .from(users) .where(or(eq(users.id1, 1).if(condition3), eq(users.id1, 2).if(condition3))); expect(result3.map((it) => ({ ...it, id: undefined }))).toEqual([ { id1: 1, name: 'John', age: 20, city: 'New York' }, { id1: 2, name: 'Alice', age: 21, city: 'New York', }, ]); const condtition4 = false; const result4 = await db.select().from(users).where(eq(users.id1, 1).if(condtition4)); expect(result4.map((it) => ({ ...it, id: undefined }))).toEqual([ { id1: 1, name: 'John', age: 20, city: 'New York' }, { id1: 2, name: 'Alice', age: 21, city: 'New York' }, { id1: 3, name: 'Nick', age: 22, city: 'London' }, { id1: 4, name: 'Lina', age: 23, city: 'London' }, ]); const condition5 = undefined; const result5 = await db .select() .from(users) .where(sql`${users.id1} = 1`.if(condition5)); expect(result5.map((it) => ({ ...it, id: undefined }))).toEqual([ { id1: 1, name: 'John', age: 20, city: 'New York' }, { id1: 2, name: 'Alice', age: 21, city: 'New York' }, { id1: 3, name: 'Nick', age: 22, city: 'London' }, { id1: 4, name: 'Lina', age: 23, city: 'London' }, ]); const condition6 = null; const result6 = await db .select() .from(users) .where(or(eq(users.id1, 1).if(condition6), eq(users.id1, 2).if(condition6))); expect(result6.map((it) => ({ ...it, id: undefined }))).toEqual([ { id1: 1, name: 'John', age: 20, city: 'New York' }, { id1: 2, name: 'Alice', age: 21, city: 'New York' }, { id1: 3, name: 'Nick', age: 22, city: 'London' }, { id1: 4, name: 'Lina', age: 23, city: 'London' }, ]); const condition7 = { term1: 0, term2: 1, }; const result7 = await db .select() .from(users) .where(and(gt(users.age, 20).if(condition7.term1), eq(users.city, 'New York').if(condition7.term2))); expect(result7.map((it) => ({ ...it, id: undefined }))).toEqual([ { id1: 1, name: 'John', age: 20, city: 'New York' }, { id1: 2, name: 'Alice', age: 21, city: 'New York' }, ]); const condition8 = { term1: '', term2: 'non-empty string', }; const result8 = await db .select() .from(users) .where(or(lt(users.age, 21).if(condition8.term1), eq(users.city, 'London').if(condition8.term2))); expect(result8.map((it) => ({ ...it, id: undefined }))).toEqual([ { id1: 3, name: 'Nick', age: 22, city: 'London' }, { id1: 4, name: 'Lina', age: 23, city: 'London' }, ]); const condition9 = { term1: 1, term2: true, }; const result9 = await db .select() .from(users) .where( and( inArray(users.city, ['New York', 'London']).if(condition9.term1), ilike(users.name, 'a%').if(condition9.term2), ), ); expect(result9.map((it) => ({ ...it, id: undefined }))).toEqual([ { id1: 2, name: 'Alice', age: 21, city: 'New York', }, ]); const condition10 = { term1: 4, term2: 19, }; const result10 = await db .select() .from(users) .where( and( sql`length(${users.name}) <= ${condition10.term1}`.if(condition10.term1), gt(users.age, condition10.term2).if(condition10.term2 > 20), ), ); expect(result10.map((it) => ({ ...it, id: undefined }))).toEqual([ { id1: 1, name: 'John', age: 20, city: 'New York' }, { id1: 3, name: 'Nick', age: 22, city: 'London' }, { id1: 4, name: 'Lina', age: 23, city: 'London' }, ]); const condition11 = true; const result11 = await db .select() .from(users) .where(or(eq(users.city, 'New York'), gte(users.age, 22))!.if(condition11)); expect(result11.map((it) => ({ ...it, id: undefined }))).toEqual([ { id1: 1, name: 'John', age: 20, city: 'New York' }, { id1: 2, name: 'Alice', age: 21, city: 'New York' }, { id1: 3, name: 'Nick', age: 22, city: 'London' }, { id1: 4, name: 'Lina', age: 23, city: 'London' }, ]); const condition12 = false; const result12 = await db .select() .from(users) .where(and(eq(users.city, 'London'), gte(users.age, 23))!.if(condition12)); expect(result12.map((it) => ({ ...it, id: undefined }))).toEqual([ { id1: 1, name: 'John', age: 20, city: 'New York' }, { id1: 2, name: 'Alice', age: 21, city: 'New York' }, { id1: 3, name: 'Nick', age: 22, city: 'London' }, { id1: 4, name: 'Lina', age: 23, city: 'London' }, ]); const condition13 = true; const result13 = await db .select() .from(users) .where(sql`(city = 'New York' or age >= 22)`.if(condition13)); expect(result13.map((it) => ({ ...it, id: undefined }))).toEqual([ { id1: 1, name: 'John', age: 20, city: 'New York' }, { id1: 2, name: 'Alice', age: 21, city: 'New York' }, { id1: 3, name: 'Nick', age: 22, city: 'London' }, { id1: 4, name: 'Lina', age: 23, city: 'London' }, ]); const condition14 = false; const result14 = await db .select() .from(users) .where(sql`(city = 'London' and age >= 23)`.if(condition14)); expect(result14.map((it) => ({ ...it, id: undefined }))).toEqual([ { id1: 1, name: 'John', age: 20, city: 'New York' }, { id1: 2, name: 'Alice', age: 21, city: 'New York' }, { id1: 3, name: 'Nick', age: 22, city: 'London' }, { id1: 4, name: 'Lina', age: 23, city: 'London' }, ]); }); // MySchema tests test('mySchema :: select all fields', async (ctx) => { const { db } = ctx.gel; const now = Date.now(); await db.insert(usersMySchemaTable).values({ id1: 1, name: 'John' }); const result = await db.select().from(usersMySchemaTable); expect(result[0]!.createdAt).toBeInstanceOf(Date); expect(Math.abs(result[0]!.createdAt.getTime() - now)).toBeLessThan(500); expect(result).toEqual([ { id1: 1, name: 'John', verified: false, json: null, createdAt: result[0]!.createdAt, }, ]); }); test('mySchema :: select sql', async (ctx) => { const { db } = ctx.gel; await db.insert(usersMySchemaTable).values({ id1: 1, name: 'John' }); const users = await db .select({ name: sql`upper(${usersMySchemaTable.name})`, }) .from(usersMySchemaTable); expect(users).toEqual([{ name: 'JOHN' }]); }); test('mySchema :: select typed sql', async (ctx) => { const { db } = ctx.gel; await db.insert(usersMySchemaTable).values({ id1: 1, name: 'John' }); const users = await db .select({ name: sql`upper(${usersMySchemaTable.name})`, }) .from(usersMySchemaTable); expect(users).toEqual([{ name: 'JOHN' }]); }); test('mySchema :: select distinct', async (ctx) => { const { db } = ctx.gel; await db.insert(usersMySchemaTable).values([ { id1: 1, name: 'John' }, { id1: 1, name: 'John' }, { id1: 2, name: 'John' }, { id1: 1, name: 'Jane' }, ]); const users1 = await db.selectDistinct().from(usersMySchemaTable).orderBy( usersMySchemaTable.id1, usersMySchemaTable.name, ); const users2 = await db.selectDistinctOn([usersMySchemaTable.id1]).from(usersMySchemaTable).orderBy( usersMySchemaTable.id1, ); const users3 = await db.selectDistinctOn([usersMySchemaTable.name], { name: usersMySchemaTable.name }).from( usersMySchemaTable, ).orderBy(usersMySchemaTable.name); expect(users1.map((it) => ({ ...it, id: undefined, createdAt: undefined }))).toEqual([ { id1: 1, name: 'Jane', id: undefined, verified: false, json: null, createdAt: undefined }, { id1: 1, name: 'John', id: undefined, verified: false, json: null, createdAt: undefined }, { id1: 2, name: 'John', id: undefined, verified: false, json: null, createdAt: undefined }, ]); expect(users2).toHaveLength(2); expect(users2[0]?.id1).toBe(1); expect(users2[1]?.id1).toBe(2); expect(users3).toHaveLength(2); expect(users3[0]?.name).toBe('Jane'); expect(users3[1]?.name).toBe('John'); }); test('mySchema :: insert returning sql', async (ctx) => { const { db } = ctx.gel; const users = await db .insert(usersMySchemaTable) .values({ id1: 1, name: 'John' }) .returning({ name: sql`upper(${usersMySchemaTable.name})`, }); expect(users).toEqual([{ name: 'JOHN' }]); }); test('mySchema :: delete returning sql', async (ctx) => { const { db } = ctx.gel; await db.insert(usersMySchemaTable).values({ id1: 1, name: 'John' }); const users = await db .delete(usersMySchemaTable) .where(eq(usersMySchemaTable.name, 'John')) .returning({ name: sql`upper(${usersMySchemaTable.name})`, }); expect(users).toEqual([{ name: 'JOHN' }]); }); test('mySchema :: update with returning partial', async (ctx) => { const { db } = ctx.gel; await db.insert(usersMySchemaTable).values({ id1: 1, name: 'John' }); const users = await db.update(usersMySchemaTable).set({ name: 'Jane' }).where( eq(usersMySchemaTable.name, 'John'), ) .returning({ id1: usersMySchemaTable.id1, name: usersMySchemaTable.name, }); expect(users).toEqual([{ id1: 1, name: 'Jane' }]); }); test('mySchema :: delete with returning all fields', async (ctx) => { const { db } = ctx.gel; const now = Date.now(); await db.insert(usersMySchemaTable).values({ id1: 1, name: 'John' }); const users = await db.delete(usersMySchemaTable).where(eq(usersMySchemaTable.name, 'John')).returning(); expect(users[0]!.createdAt).toBeInstanceOf(Date); expect(Math.abs(users[0]!.createdAt.getTime() - now)).toBeLessThan(500); expect(users).toEqual([ { id1: 1, name: 'John', verified: false, json: null, createdAt: users[0]!.createdAt, }, ]); }); test('mySchema :: insert + select', async (ctx) => { const { db } = ctx.gel; await db.insert(usersMySchemaTable).values({ id1: 1, name: 'John' }); const result = await db.select().from(usersMySchemaTable); expect(result).toEqual([ { id1: 1, name: 'John', verified: false, json: null, createdAt: result[0]!.createdAt, }, ]); await db.insert(usersMySchemaTable).values({ id1: 2, name: 'Jane' }); const result2 = await db.select().from(usersMySchemaTable); expect(result2).toEqual([ { id1: 1, name: 'John', verified: false, json: null, createdAt: result2[0]!.createdAt }, { id1: 2, name: 'Jane', verified: false, json: null, createdAt: result2[1]!.createdAt }, ]); }); test('mySchema :: insert with overridden default values', async (ctx) => { const { db } = ctx.gel; await db.insert(usersMySchemaTable).values({ id1: 1, name: 'John', verified: true }); const result = await db.select().from(usersMySchemaTable); expect(result).toEqual([ { id1: 1, name: 'John', verified: true, json: null, createdAt: result[0]!.createdAt, }, ]); }); test('mySchema :: insert many', async (ctx) => { const { db } = ctx.gel; await db.insert(usersMySchemaTable).values([ { id1: 1, name: 'John' }, { id1: 2, name: 'Bruce', json: ['foo', 'bar'], }, { id1: 3, name: 'Jane', }, { id1: 4, name: 'Austin', verified: true }, ]); const result = await db .select({ id1: usersMySchemaTable.id1, name: usersMySchemaTable.name, json: usersMySchemaTable.json, verified: usersMySchemaTable.verified, }) .from(usersMySchemaTable); expect(result).toEqual([ { id1: 1, name: 'John', json: null, verified: false }, { id1: 2, name: 'Bruce', json: ['foo', 'bar'], verified: false }, { id1: 3, name: 'Jane', json: null, verified: false }, { id1: 4, name: 'Austin', json: null, verified: true }, ]); }); test('mySchema :: select with group by as field', async (ctx) => { const { db } = ctx.gel; await db.insert(usersMySchemaTable).values([ { id1: 1, name: 'John' }, { id1: 2, name: 'Jane' }, { id1: 3, name: 'Jane', }, ]); const result = await db.select({ name: usersMySchemaTable.name }).from(usersMySchemaTable).groupBy( usersMySchemaTable.name, ); expect(result).toEqual([{ name: 'Jane' }, { name: 'John' }]); }); test('mySchema :: select with group by as column + sql', async (ctx) => { const { db } = ctx.gel; await db.insert(usersMySchemaTable).values([ { id1: 1, name: 'John' }, { id1: 2, name: 'Jane' }, { id1: 3, name: 'Jane', }, ]); const result = await db .select({ name: usersMySchemaTable.name }) .from(usersMySchemaTable) .groupBy(usersMySchemaTable.id1, sql`${usersMySchemaTable.name}`); expect(result.sort((a, b) => a.name.localeCompare(b.name))).toEqual([ { name: 'Jane' }, { name: 'Jane' }, { name: 'John', }, ]); }); test('mySchema :: build query', async (ctx) => { const { db } = ctx.gel; const query = db.select({ id1: usersMySchemaTable.id1, name: usersMySchemaTable.name }).from(usersMySchemaTable) .groupBy(usersMySchemaTable.id1, usersMySchemaTable.name).toSQL(); expect(query).toEqual({ sql: 'select "mySchema"."users"."id1", "mySchema"."users"."name" from "mySchema"."users" group by "mySchema"."users"."id1", "mySchema"."users"."name"', params: [], }); }); test('mySchema :: partial join with alias', async (ctx) => { const { db } = ctx.gel; const customerAlias = alias(usersMySchemaTable, 'customer'); await db.insert(usersMySchemaTable).values([ { id1: 10, name: 'Ivan' }, { id1: 11, name: 'Hans' }, ]); const result = await db .select({ user: { id1: usersMySchemaTable.id1, name: usersMySchemaTable.name, }, customer: { id1: customerAlias.id1, name: customerAlias.name, }, }) .from(usersMySchemaTable) .leftJoin(customerAlias, eq(customerAlias.id1, 11)) .where(eq(usersMySchemaTable.id1, 10)); expect(result).toEqual([ { user: { id1: 10, name: 'Ivan' }, customer: { id1: 11, name: 'Hans' }, }, ]); }); test('mySchema :: insert with spaces', async (ctx) => { const { db } = ctx.gel; await db.insert(usersMySchemaTable).values({ id1: 1, name: sql`'Jo h n'` }); const result = await db.select({ id1: usersMySchemaTable.id1, name: usersMySchemaTable.name }).from( usersMySchemaTable, ); expect(result).toEqual([{ id1: 1, name: 'Jo h n' }]); }); test('mySchema :: prepared statement with placeholder in .limit', async (ctx) => { const { db } = ctx.gel; await db.insert(usersMySchemaTable).values({ id1: 1, name: 'John' }); const stmt = db .select({ id1: usersMySchemaTable.id1, name: usersMySchemaTable.name, }) .from(usersMySchemaTable) .where(eq(usersMySchemaTable.id1, sql.placeholder('id1'))) .limit(sql.placeholder('limit')) .prepare('mySchema_stmt_limit'); const result = await stmt.execute({ id1: 1, limit: 1 }); expect(result.map((it) => ({ ...it, id: undefined }))).toEqual([{ id1: 1, name: 'John' }]); expect(result).toHaveLength(1); }); // TODO on conflict does not supported in gel test.todo('mySchema :: build query insert with onConflict do update / multiple columns', async (ctx) => { const { db } = ctx.gel; const query = db .insert(usersMySchemaTable) .values({ id1: 1, name: 'John', json: ['foo', 'bar'] }) // .onConflictDoUpdate({ target: [usersMySchemaTable.id1, usersMySchemaTable.name], set: { name: 'John1' } }) .toSQL(); expect(query).toEqual({ sql: 'insert into "mySchema"."users" ("id1", "name", "verified", "json", "created_at") values ($1, $2, default, $3, default) on conflict ("id1","name") do update set "name" = $4', params: [1, 'John', ['foo', 'bar'], 'John1'], }); }); // TODO on conflict not supported in gel test.todo('mySchema :: build query insert with onConflict do nothing + target', async (ctx) => { const { db } = ctx.gel; const query = db .insert(usersMySchemaTable) .values({ id1: 1, name: 'John', json: ['foo', 'bar'] }) // .onConflictDoNothing({ target: usersMySchemaTable.id1 }) .toSQL(); expect(query).toEqual({ sql: 'insert into "mySchema"."users" ("id1", "name", "verified", "json", "created_at") values ($1, $2, default, $3, default) on conflict ("id1") do nothing', params: [1, 'John', ['foo', 'bar']], }); }); test('mySchema :: select from tables with same name from different schema using alias', async (ctx) => { const { db } = ctx.gel; await db.insert(usersMySchemaTable).values({ id1: 10, name: 'Ivan' }); await db.insert(usersTable).values({ id1: 11, name: 'Hans' }); const customerAlias = alias(usersTable, 'customer'); const result = await db.select().from(usersMySchemaTable).leftJoin(customerAlias, eq(customerAlias.id1, 11)) .where(eq(customerAlias.id1, 11)); expect(result).toEqual([ { users: { id1: 10, name: 'Ivan', verified: false, json: null, createdAt: result[0]!.users.createdAt, }, customer: { id1: 11, name: 'Hans', verified: false, json: null, createdAt: result[0]!.customer!.createdAt, }, }, ]); }); test('limit 0', async (ctx) => { const { db } = ctx.gel; await db.insert(usersTable).values({ id1: 1, name: 'John' }); const users = await db.select().from(usersTable).limit(0); expect(users).toEqual([]); }); test('limit -1', async (ctx) => { const { db } = ctx.gel; await db.insert(usersTable).values({ id1: 2, name: 'John' }); const users = await db.select().from(usersTable).limit(-1); expect(users.length).toBeGreaterThan(0); }); test('Object keys as column names', async (ctx) => { const { db } = ctx.gel; // Tests the following: // Column with required config // Column with optional config without providing a value // Column with optional config providing a value // Column without config const users = gelTable('users_with_names', { id1: integer().notNull(), firstName: text(), lastName: text(), admin: boolean(), }); await db.insert(users).values([ { id1: 1, firstName: 'John', lastName: 'Doe', admin: true }, { id1: 2, firstName: 'Jane', lastName: 'Smith', admin: false }, ]); const result = await db.select({ id1: users.id1, firstName: users.firstName, lastName: users.lastName }).from( users, ).where(eq(users.admin, true)); expect(result).toEqual([{ id1: 1, firstName: 'John', lastName: 'Doe' }]); }); test('proper json handling', async (ctx) => { const { db } = ctx.gel; const jsonTable = gelTable('json_table', { json: json('json').$type<{ name: string; age: number }>(), }); await db.insert(jsonTable).values({ json: { name: 'Tom', age: 75 } }); const result = await db.select().from(jsonTable); const justNames = await db .select({ name1: sql`${jsonTable.json}->>'name'`.as('name1'), name2: sql`${jsonTable.json}->>'name'`.as('name2'), }) .from(jsonTable); expect(result).toStrictEqual([ { json: { name: 'Tom', age: 75 }, }, ]); expect(justNames).toStrictEqual([ { name1: 'Tom', name2: 'Tom', }, ]); }); test('set json fields with objects and retrieve with the ->> operator', async (ctx) => { const { db } = ctx.gel; const obj = { string: 'test', number: 123 }; const { string: testString, number: testNumber } = obj; await db.insert(jsonTestTable).values({ id1: 1, json: obj, }); const result = await db .select({ jsonStringField: sql`${jsonTestTable.json}->>'string'`, jsonNumberField: sql`${jsonTestTable.json}->>'number'`, }) .from(jsonTestTable); expect(result).toStrictEqual([ { jsonStringField: testString, jsonNumberField: String(testNumber), }, ]); }); test('set json fields with objects and retrieve with the -> operator', async (ctx) => { const { db } = ctx.gel; const obj = { string: 'test', number: 123 }; const { string: testString, number: testNumber } = obj; await db.insert(jsonTestTable).values({ id1: 1, json: obj }); const result = await db .select({ jsonStringField: sql`${jsonTestTable.json}->'string'`, jsonNumberField: sql`${jsonTestTable.json}->'number'`, }) .from(jsonTestTable); expect(result).toStrictEqual([ { jsonStringField: testString, jsonNumberField: testNumber, }, ]); }); test('set json fields with strings and retrieve with the -> operator', async (ctx) => { const { db } = ctx.gel; const obj = { string: 'test', number: 123 }; const { string: testString, number: testNumber } = obj; await db.insert(jsonTestTable).values({ id1: 1, json: sql`${obj}`, }); const result = await db .select({ jsonStringField: sql`${jsonTestTable.json}->'string'`, jsonNumberField: sql`${jsonTestTable.json}->'number'`, }) .from(jsonTestTable); expect(result).toStrictEqual([ { jsonStringField: testString, jsonNumberField: testNumber, }, ]); }); test('cross join', async (ctx) => { const { db } = ctx.gel; await db .insert(usersTable) .values([ { id1: 1, name: 'John' }, { id1: 2, name: 'Jane' }, ]); await db .insert(citiesTable) .values([ { id1: 1, name: 'Seattle' }, { id1: 2, name: 'New York City' }, ]); const result = await db .select({ user: usersTable.name, city: citiesTable.name, }) .from(usersTable) .crossJoin(citiesTable) .orderBy(usersTable.name, citiesTable.name); expect(result).toStrictEqual([ { city: 'New York City', user: 'Jane' }, { city: 'Seattle', user: 'Jane' }, { city: 'New York City', user: 'John' }, { city: 'Seattle', user: 'John' }, ]); }); test('left join (lateral)', async (ctx) => { const { db } = ctx.gel; await db .insert(citiesTable) .values([{ id1: 1, name: 'Paris' }, { id1: 2, name: 'London' }]) .returning({ id: citiesTable.id1 }); await db.insert(users2Table).values([{ id1: 1, name: 'John', cityId: 1 }, { id1: 2, name: 'Jane' }]); const sq = db .select({ userId: users2Table.id1, userName: users2Table.name, cityId: users2Table.cityId, }) .from(users2Table) .where(eq(users2Table.cityId, citiesTable.id1)) .as('sq'); const res = await db .select({ cityId: citiesTable.id1, cityName: citiesTable.name, userId: sq.userId, userName: sq.userName, }) .from(citiesTable) .leftJoinLateral(sq, sql`true`); expect(res).toStrictEqual([ { cityId: 1, cityName: 'Paris', userId: 1, userName: 'John' }, { cityId: 2, cityName: 'London', userId: null, userName: null }, ]); }); test('inner join (lateral)', async (ctx) => { const { db } = ctx.gel; await db .insert(citiesTable) .values([{ id1: 1, name: 'Paris' }, { id1: 2, name: 'London' }]) .returning({ id: citiesTable.id1 }); await db.insert(users2Table).values([{ id1: 1, name: 'John', cityId: 1 }, { id1: 2, name: 'Jane' }]); const sq = db .select({ userId: users2Table.id1, userName: users2Table.name, cityId: users2Table.cityId, }) .from(users2Table) .where(eq(users2Table.cityId, citiesTable.id1)) .as('sq'); const res = await db .select({ cityId: citiesTable.id1, cityName: citiesTable.name, userId: sq.userId, userName: sq.userName, }) .from(citiesTable) .innerJoinLateral(sq, sql`true`); expect(res).toStrictEqual([ { cityId: 1, cityName: 'Paris', userId: 1, userName: 'John' }, ]); }); test('cross join (lateral)', async (ctx) => { const { db } = ctx.gel; await db .insert(citiesTable) .values([{ id1: 1, name: 'Paris' }, { id1: 2, name: 'London' }, { id1: 3, name: 'Berlin' }]); await db.insert(users2Table).values([{ id1: 1, name: 'John', cityId: 1 }, { id1: 2, name: 'Jane' }, { id1: 3, name: 'Patrick', cityId: 2, }]); const sq = db .select({ userId: users2Table.id1, userName: users2Table.name, cityId: users2Table.cityId, }) .from(users2Table) .where(not(like(citiesTable.name, 'L%'))) .as('sq'); const res = await db .select({ cityId: citiesTable.id1, cityName: citiesTable.name, userId: sq.userId, userName: sq.userName, }) .from(citiesTable) .crossJoinLateral(sq) .orderBy(citiesTable.id1, sq.userId); expect(res).toStrictEqual([ { cityId: 1, cityName: 'Paris', userId: 1, userName: 'John', }, { cityId: 1, cityName: 'Paris', userId: 2, userName: 'Jane', }, { cityId: 1, cityName: 'Paris', userId: 3, userName: 'Patrick', }, { cityId: 3, cityName: 'Berlin', userId: 1, userName: 'John', }, { cityId: 3, cityName: 'Berlin', userId: 2, userName: 'Jane', }, { cityId: 3, cityName: 'Berlin', userId: 3, userName: 'Patrick', }, ]); }); // TODO not supported yet test.todo('update ... from', async (ctx) => { const { db } = ctx.gel; await db.insert(cities2Table).values([ { id1: 1, name: 'New York City' }, { id1: 2, name: 'Seattle' }, ]); await db.insert(users2Table).values([ { id1: 1, name: 'John', cityId: 1 }, { id1: 2, name: 'Jane', cityId: 2 }, ]); const result = await db .update(users2Table) .set({ cityId: cities2Table.id1, }) .from(cities2Table) .where(and(eq(cities2Table.name, 'Seattle'), eq(users2Table.name, 'John'))) .returning(); expect(result).toStrictEqual([ { id1: 1, name: 'John', cityId: 2, cities: { id1: 2, name: 'Seattle', }, }, ]); }); // TODO not supported yet test.todo('update ... from with alias', async (ctx) => { const { db } = ctx.gel; await db.insert(cities2Table).values([ { id1: 1, name: 'New York City' }, { id1: 2, name: 'Seattle' }, ]); await db.insert(users2Table).values([ { id1: 1, name: 'John', cityId: 1 }, { id1: 2, name: 'Jane', cityId: 2 }, ]); const users = alias(users2Table, 'u'); const cities = alias(cities2Table, 'c'); const result = await db .update(users) .set({ cityId: cities.id1, }) .from(cities) .where(and(eq(cities.name, 'Seattle'), eq(users.name, 'John'))) .returning(); expect(result).toStrictEqual([ { id1: 1, name: 'John', cityId: 2, c: { id1: 2, name: 'Seattle', }, }, ]); }); // TODO not supported yet // test.todo('update ... from with join', async (ctx) => { // const { db } = ctx.gel; // const states = gelTable('states', { // id1: integer('id1').primaryKey(), // name: text('name').notNull(), // }); // const cities = gelTable('cities', { // id1: integer('id1').primaryKey(), // name: text('name').notNull(), // stateId: integer('state_id').references(() => states.id1), // }); // const users = gelTable('users', { // id1: integer('id1').primaryKey(), // name: text('name').notNull(), // cityId: integer('city_id') // .notNull() // .references(() => cities.id1), // }); // await db.execute(sql`drop table if exists "states" cascade`); // await db.execute(sql`drop table if exists "cities" cascade`); // await db.execute(sql`drop table if exists "users" cascade`); // await db.execute(sql` // create table "states" ( // "id" serial primary key, // "name" text not null // ) // `); // await db.execute(sql` // create table "cities" ( // "id" serial primary key, // "name" text not null, // "state_id" integer references "states"("id") // ) // `); // await db.execute(sql` // create table "users" ( // "id" serial primary key, // "name" text not null, // "city_id" integer not null references "cities"("id") // ) // `); // await db.insert(states).values([{ id1: 1, name: 'New York' }, { id1: 2, name: 'Washington' }]); // await db.insert(cities).values([ // { id1: 1, name: 'New York City', stateId: 1 }, // { id1: 2, name: 'Seattle', stateId: 2 }, // { // id1: 2, // name: 'London', // }, // ]); // await db.insert(users).values([ // { id1: 1, name: 'John', cityId: 1 }, // { id1: 2, name: 'Jane', cityId: 2 }, // { id1: 3, name: 'Jack', cityId: 3 }, // ]); // const result1 = await db // .update(users) // .set({ // cityId: cities.id1, // }) // .from(cities) // .leftJoin(states, eq(cities.stateId, states.id1)) // .where(and(eq(cities.name, 'Seattle'), eq(users.name, 'John'))) // .returning(); // const result2 = await db // .update(users) // .set({ // cityId: cities.id1, // }) // .from(cities) // .leftJoin(states, eq(cities.stateId, states.id1)) // .where(and(eq(cities.name, 'London'), eq(users.name, 'Jack'))) // .returning(); // expect(result1).toStrictEqual([ // { // id: 1, // name: 'John', // cityId: 2, // cities: { // id: 2, // name: 'Seattle', // stateId: 2, // }, // states: { // id: 2, // name: 'Washington', // }, // }, // ]); // expect(result2).toStrictEqual([ // { // id: 3, // name: 'Jack', // cityId: 3, // cities: { // id: 3, // name: 'London', // stateId: null, // }, // states: null, // }, // ]); // }); test('insert into ... select', async (ctx) => { const { db } = ctx.gel; const notifications = gelTable('notifications', { id1: integer('id1').notNull(), sentAt: timestamp('sentAt').notNull().defaultNow(), message: text('message').notNull(), }); const users = gelTable('users_insert_select', { id1: integer('id1').notNull(), name: text('name').notNull(), }); const userNotications = gelTable('user_notifications', { userId: integer('userId').notNull(), notificationId: integer('notificationId').notNull(), }); const newNotification = await db .insert(notifications) .values({ id1: 1, message: 'You are one of the 3 lucky winners!' }) .returning({ id1: notifications.id1 }) .then((result) => result[0]); await db.insert(users).values([ { id1: 1, name: 'Alice' }, { id1: 2, name: 'Bob' }, { id1: 3, name: 'Charlie' }, { id1: 4, name: 'David', }, { id1: 5, name: 'Eve', }, ]); const sentNotifications = await db .insert(userNotications) .select( db .select({ userId: users.id1, notificationId: sql`${newNotification!.id1}`.as('notification_id'), }) .from(users) .where(inArray(users.name, ['Alice', 'Charlie', 'Eve'])) .orderBy(asc(users.id1)), ) .returning(); expect(sentNotifications).toStrictEqual([ { userId: 1, notificationId: newNotification!.id1 }, { userId: 3, notificationId: newNotification!.id1 }, { userId: 5, notificationId: newNotification!.id1 }, ]); }); test('insert into ... select with keys in different order', async (ctx) => { const { db } = ctx.gel; const users1 = gelTable('users1', { id1: integer('id1').notNull(), name: text('name').notNull(), }); const users2 = gelTable('users2', { id1: integer('id1').notNull(), name: text('name').notNull(), }); expect(() => db.insert(users1).select( db .select({ name: users2.name, id1: users2.id1, }) .from(users2), ) ).toThrowError(); }); test('policy', () => { { const policy = gelPolicy('test policy'); expect(is(policy, GelPolicy)).toBe(true); expect(policy.name).toBe('test policy'); } { const policy = gelPolicy('test policy', { as: 'permissive', for: 'all', to: 'public', using: sql`1=1`, withCheck: sql`1=1`, }); expect(is(policy, GelPolicy)).toBe(true); expect(policy.name).toBe('test policy'); expect(policy.as).toBe('permissive'); expect(policy.for).toBe('all'); expect(policy.to).toBe('public'); const dialect = new GelDialect(); expect(is(policy.using, SQL)).toBe(true); expect(dialect.sqlToQuery(policy.using!).sql).toBe('1=1'); expect(is(policy.withCheck, SQL)).toBe(true); expect(dialect.sqlToQuery(policy.withCheck!).sql).toBe('1=1'); } { const policy = gelPolicy('test policy', { to: 'custom value', }); expect(policy.to).toBe('custom value'); } { const p1 = gelPolicy('test policy'); const p2 = gelPolicy('test policy 2', { as: 'permissive', for: 'all', to: 'public', using: sql`1=1`, withCheck: sql`1=1`, }); const table = gelTable( 'table_with_policy', { id: integer('id').primaryKey(), name: text('name').notNull(), }, () => ({ p1, p2, }), ); const config = getTableConfig(table); expect(config.policies).toHaveLength(2); expect(config.policies[0]).toBe(p1); expect(config.policies[1]).toBe(p2); } }); test('Enable RLS function', () => { const usersWithRLS = gelTable('users', { id: integer(), }).enableRLS(); const config1 = getTableConfig(usersWithRLS); const usersNoRLS = gelTable('users', { id: integer(), }); const config2 = getTableConfig(usersNoRLS); expect(config1.enableRLS).toBeTruthy(); expect(config2.enableRLS).toBeFalsy(); }); test('test $onUpdateFn and $onUpdate works with sql value', async (ctx) => { const { db } = ctx.gel; const users = gelTable('users_on_update_sql', { id: integer('id1').notNull(), name: text('name').notNull(), updatedAt: timestamptz('updated_at').notNull().$onUpdate(() => sql`now()`), }); const insertResp = await db.insert(users).values({ id: 1, name: 'John', }).returning({ updatedAt: users.updatedAt, }); await new Promise((resolve) => setTimeout(resolve, 1000)); const now = Date.now(); await new Promise((resolve) => setTimeout(resolve, 1000)); const updateResp = await db.update(users).set({ name: 'John', }).returning({ updatedAt: users.updatedAt, }); expect(new Date(insertResp[0]?.updatedAt.toISOString() ?? 0).getTime()).lessThan(now); expect(new Date(updateResp[0]?.updatedAt.toISOString() ?? 0).getTime()).greaterThan(now); }); test('$count separate', async (ctx) => { const { db } = ctx.gel; const countTestTable = gelTable('count_test', { id1: integer('id1').notNull(), name: text('name').notNull(), }); await db.insert(countTestTable).values([ { id1: 1, name: 'First' }, { id1: 2, name: 'Second' }, { id1: 3, name: 'Third' }, { id1: 4, name: 'Fourth' }, ]); const count = await db.$count(countTestTable); expect(count).toStrictEqual(4); }); test('$count embedded', async (ctx) => { const { db } = ctx.gel; const countTestTable = gelTable('count_test', { id1: integer('id1').notNull(), name: text('name').notNull(), }); await db.insert(countTestTable).values([ { id1: 1, name: 'First' }, { id1: 2, name: 'Second' }, { id1: 3, name: 'Third' }, { id1: 4, name: 'Fourth' }, ]); const count = await db .select({ count: db.$count(countTestTable), }) .from(countTestTable); expect(count).toStrictEqual([{ count: 4 }, { count: 4 }, { count: 4 }, { count: 4 }]); }); test('$count separate reuse', async (ctx) => { const { db } = ctx.gel; const countTestTable = gelTable('count_test', { id1: integer('id1').notNull(), name: text('name').notNull(), }); await db.insert(countTestTable).values([ { id1: 1, name: 'First' }, { id1: 2, name: 'Second' }, { id1: 3, name: 'Third' }, { id1: 4, name: 'Fourth' }, ]); const count = db.$count(countTestTable); const count1 = await count; await db.insert(countTestTable).values({ id1: 5, name: 'fifth' }); const count2 = await count; await db.insert(countTestTable).values({ id1: 6, name: 'sixth' }); const count3 = await count; expect(count1).toStrictEqual(4); expect(count2).toStrictEqual(5); expect(count3).toStrictEqual(6); }); test('$count embedded reuse', async (ctx) => { const { db } = ctx.gel; const countTestTable = gelTable('count_test', { id1: integer('id1').notNull(), name: text('name').notNull(), }); await db.insert(countTestTable).values([ { id1: 1, name: 'First' }, { id1: 2, name: 'Second' }, { id1: 3, name: 'Third' }, { id1: 4, name: 'Fourth' }, ]); const count = db .select({ count: db.$count(countTestTable), }) .from(countTestTable); const count1 = await count; await db.insert(countTestTable).values({ id1: 5, name: 'fifth' }); const count2 = await count; await db.insert(countTestTable).values({ id1: 6, name: 'sixth' }); const count3 = await count; expect(count1).toStrictEqual([{ count: 4 }, { count: 4 }, { count: 4 }, { count: 4 }]); expect(count2).toStrictEqual([{ count: 5 }, { count: 5 }, { count: 5 }, { count: 5 }, { count: 5 }]); expect(count3).toStrictEqual([ { count: 6 }, { count: 6 }, { count: 6 }, { count: 6 }, { count: 6 }, { count: 6, }, ]); }); test('$count separate with filters', async (ctx) => { const { db } = ctx.gel; const countTestTable = gelTable('count_test', { id1: integer('id1').notNull(), name: text('name').notNull(), }); await db.insert(countTestTable).values([ { id1: 1, name: 'First' }, { id1: 2, name: 'Second' }, { id1: 3, name: 'Third' }, { id1: 4, name: 'Fourth' }, ]); const count = await db.$count(countTestTable, gt(countTestTable.id1, 1)); expect(count).toStrictEqual(3); }); test('$count embedded with filters', async (ctx) => { const { db } = ctx.gel; const countTestTable = gelTable('count_test', { id1: integer('id1').notNull(), name: text('name').notNull(), }); await db.insert(countTestTable).values([ { id1: 1, name: 'First' }, { id1: 2, name: 'Second' }, { id1: 3, name: 'Third' }, { id1: 4, name: 'Fourth' }, ]); const count = await db .select({ count: db.$count(countTestTable, gt(countTestTable.id1, 1)), }) .from(countTestTable); expect(count).toStrictEqual([{ count: 3 }, { count: 3 }, { count: 3 }, { count: 3 }]); }); // TODO test.todo('insert multiple rows into table with generated identity column', async (ctx) => { const { db } = ctx.gel; const identityColumnsTable = gelTable('identity_columns_table', { id: integer('id').generatedAlwaysAsIdentity(), id1: integer('id1').generatedByDefaultAsIdentity(), name: text('name').notNull(), }); // not passing identity columns await db.execute(sql`drop table if exists ${identityColumnsTable}`); await db.execute( sql`create table ${identityColumnsTable} ("id" integer generated always as identity, "id1" integer generated by default as identity, "name" text)`, ); let result = await db .insert(identityColumnsTable) .values([{ name: 'John' }, { name: 'Jane' }, { name: 'Bob' }]) .returning(); expect(result).toEqual([ { id: 1, id1: 1, name: 'John' }, { id: 2, id1: 2, name: 'Jane' }, { id: 3, id1: 3, name: 'Bob' }, ]); // passing generated by default as identity column await db.execute(sql`drop table if exists ${identityColumnsTable}`); await db.execute( sql`create table ${identityColumnsTable} ("id" integer generated always as identity, "id1" integer generated by default as identity, "name" text)`, ); result = await db .insert(identityColumnsTable) .values([ { name: 'John', id1: 3 }, { name: 'Jane', id1: 5 }, { name: 'Bob', id1: 5 }, ]) .returning(); expect(result).toEqual([ { id: 1, id1: 3, name: 'John' }, { id: 2, id1: 5, name: 'Jane' }, { id: 3, id1: 5, name: 'Bob' }, ]); // passing all identity columns await db.execute(sql`drop table if exists ${identityColumnsTable}`); await db.execute( sql`create table ${identityColumnsTable} ("id" integer generated always as identity, "id1" integer generated by default as identity, "name" text)`, ); result = await db .insert(identityColumnsTable) .overridingSystemValue() .values([ { name: 'John', id: 2, id1: 3 }, { name: 'Jane', id: 4, id1: 5 }, { name: 'Bob', id: 4, id1: 5 }, ]) .returning(); expect(result).toEqual([ { id: 2, id1: 3, name: 'John' }, { id: 4, id1: 5, name: 'Jane' }, { id: 4, id1: 5, name: 'Bob' }, ]); }); test('insert via db.execute + select via db.execute', async (ctx) => { const { db } = ctx.gel; await db.execute( sql`insert into ${usersTable} (${sql.identifier(usersTable.id1.name)},${ sql.identifier(usersTable.name.name) }) values (1, ${'John'})`, ); const result = await db.execute<{ id1: number; name: string }>( sql`select id1, name from "users"`, ); expect(result).toEqual([{ id1: 1, name: 'John' }]); }); test('insert via db.execute + returning', async (ctx) => { const { db } = ctx.gel; const inserted = await db.execute<{ id1: number; name: string }>( sql`insert into ${usersTable} (${ sql.identifier( usersTable.id1.name, ) }, ${ sql.identifier( usersTable.name.name, ) }) values (1, ${'John'}) returning ${usersTable.id1}, ${usersTable.name}`, ); expect(inserted).toEqual([{ id1: 1, name: 'John' }]); }); test('insert via db.execute w/ query builder', async (ctx) => { const { db } = ctx.gel; const inserted = await db.execute>( db .insert(usersTable) .values({ id1: 1, name: 'John' }) .returning({ id1: usersTable.id1, name: usersTable.name }), ); expect(inserted).toEqual([{ id1: 1, name: 'John' }]); }); test('test force invalidate', async (ctx) => { const { db } = ctx.cachedGel; const spyInvalidate = vi.spyOn(db.$cache, 'invalidate'); await db.$cache?.invalidate({ tables: 'users' }); expect(spyInvalidate).toHaveBeenCalledTimes(1); }); test('default global config - no cache should be hit', async (ctx) => { const { db } = ctx.cachedGel; // @ts-expect-error const spyPut = vi.spyOn(db.$cache, 'put'); // @ts-expect-error const spyGet = vi.spyOn(db.$cache, 'get'); // @ts-expect-error const spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); await db.select().from(usersTable); expect(spyPut).toHaveBeenCalledTimes(0); expect(spyGet).toHaveBeenCalledTimes(0); expect(spyInvalidate).toHaveBeenCalledTimes(0); }); test('default global config + enable cache on select: get, put', async (ctx) => { const { db } = ctx.cachedGel; // @ts-expect-error const spyPut = vi.spyOn(db.$cache, 'put'); // @ts-expect-error const spyGet = vi.spyOn(db.$cache, 'get'); // @ts-expect-error const spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); await db.select().from(usersTable).$withCache(); expect(spyPut).toHaveBeenCalledTimes(1); expect(spyGet).toHaveBeenCalledTimes(1); expect(spyInvalidate).toHaveBeenCalledTimes(0); }); test('default global config + enable cache on select + write: get, put, onMutate', async (ctx) => { const { db } = ctx.cachedGel; // @ts-expect-error const spyPut = vi.spyOn(db.$cache, 'put'); // @ts-expect-error const spyGet = vi.spyOn(db.$cache, 'get'); // @ts-expect-error const spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); await db.select().from(usersTable).$withCache({ config: { ex: 1 } }); expect(spyPut).toHaveBeenCalledTimes(1); expect(spyGet).toHaveBeenCalledTimes(1); expect(spyInvalidate).toHaveBeenCalledTimes(0); spyPut.mockClear(); spyGet.mockClear(); spyInvalidate.mockClear(); await db.insert(usersTable).values({ id1: 1, name: 'John' }); expect(spyPut).toHaveBeenCalledTimes(0); expect(spyGet).toHaveBeenCalledTimes(0); expect(spyInvalidate).toHaveBeenCalledTimes(1); }); test('default global config + enable cache on select + disable invalidate: get, put', async (ctx) => { const { db } = ctx.cachedGel; // @ts-expect-error const spyPut = vi.spyOn(db.$cache, 'put'); // @ts-expect-error const spyGet = vi.spyOn(db.$cache, 'get'); // @ts-expect-error const spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); await db.select().from(usersTable).$withCache({ tag: 'custom', autoInvalidate: false, config: { ex: 1 } }); expect(spyPut).toHaveBeenCalledTimes(1); expect(spyGet).toHaveBeenCalledTimes(1); expect(spyInvalidate).toHaveBeenCalledTimes(0); await db.insert(usersTable).values({ id1: 1, name: 'John' }); // invalidate force await db.$cache?.invalidate({ tags: ['custom'] }); }); test('global: true + disable cache', async (ctx) => { const { dbGlobalCached: db } = ctx.cachedGel; // @ts-expect-error const spyPut = vi.spyOn(db.$cache, 'put'); // @ts-expect-error const spyGet = vi.spyOn(db.$cache, 'get'); // @ts-expect-error const spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); await db.select().from(usersTable).$withCache(false); expect(spyPut).toHaveBeenCalledTimes(0); expect(spyGet).toHaveBeenCalledTimes(0); expect(spyInvalidate).toHaveBeenCalledTimes(0); }); test('global: true - cache should be hit', async (ctx) => { const { dbGlobalCached: db } = ctx.cachedGel; // @ts-expect-error const spyPut = vi.spyOn(db.$cache, 'put'); // @ts-expect-error const spyGet = vi.spyOn(db.$cache, 'get'); // @ts-expect-error const spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); await db.select().from(usersTable); expect(spyPut).toHaveBeenCalledTimes(1); expect(spyGet).toHaveBeenCalledTimes(1); expect(spyInvalidate).toHaveBeenCalledTimes(0); }); test('global: true - cache: false on select - no cache hit', async (ctx) => { const { dbGlobalCached: db } = ctx.cachedGel; // @ts-expect-error const spyPut = vi.spyOn(db.$cache, 'put'); // @ts-expect-error const spyGet = vi.spyOn(db.$cache, 'get'); // @ts-expect-error const spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); await db.select().from(usersTable).$withCache(false); expect(spyPut).toHaveBeenCalledTimes(0); expect(spyGet).toHaveBeenCalledTimes(0); expect(spyInvalidate).toHaveBeenCalledTimes(0); }); test('global: true - disable invalidate - cache hit + no invalidate', async (ctx) => { const { dbGlobalCached: db } = ctx.cachedGel; // @ts-expect-error const spyPut = vi.spyOn(db.$cache, 'put'); // @ts-expect-error const spyGet = vi.spyOn(db.$cache, 'get'); // @ts-expect-error const spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); await db.select().from(usersTable).$withCache({ autoInvalidate: false }); expect(spyPut).toHaveBeenCalledTimes(1); expect(spyGet).toHaveBeenCalledTimes(1); expect(spyInvalidate).toHaveBeenCalledTimes(0); spyPut.mockClear(); spyGet.mockClear(); spyInvalidate.mockClear(); await db.insert(usersTable).values({ id1: 1, name: 'John' }); expect(spyPut).toHaveBeenCalledTimes(0); expect(spyGet).toHaveBeenCalledTimes(0); expect(spyInvalidate).toHaveBeenCalledTimes(1); }); test('global: true - with custom tag', async (ctx) => { const { dbGlobalCached: db } = ctx.cachedGel; // @ts-expect-error const spyPut = vi.spyOn(db.$cache, 'put'); // @ts-expect-error const spyGet = vi.spyOn(db.$cache, 'get'); // @ts-expect-error const spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); await db.select().from(usersTable).$withCache({ tag: 'custom', autoInvalidate: false }); expect(spyPut).toHaveBeenCalledTimes(1); expect(spyGet).toHaveBeenCalledTimes(1); expect(spyInvalidate).toHaveBeenCalledTimes(0); await db.insert(usersTable).values({ id1: 1, name: 'John' }); // invalidate force await db.$cache?.invalidate({ tags: ['custom'] }); }); // check select used tables test('check simple select used tables', (ctx) => { const { db } = ctx.cachedGel; // @ts-expect-error expect(db.select().from(usersTable).getUsedTables()).toStrictEqual(['users']); // @ts-expect-error expect(db.select().from(sql`${usersTable}`).getUsedTables()).toStrictEqual(['users']); }); // check select+join used tables test('select+join', (ctx) => { const { db } = ctx.cachedGel; // @ts-expect-error expect(db.select().from(usersTable).leftJoin(postsTable, eq(usersTable.id, postsTable.userId)).getUsedTables()) .toStrictEqual(['users', 'posts']); expect( // @ts-expect-error db.select().from(sql`${usersTable}`).leftJoin(postsTable, eq(usersTable.id, postsTable.userId)).getUsedTables(), ).toStrictEqual(['users', 'posts']); }); // check select+2join used tables test('select+2joins', (ctx) => { const { db } = ctx.cachedGel; expect( db.select().from(usersTable).leftJoin( postsTable, eq(usersTable.id1, postsTable.userId), ).leftJoin( alias(postsTable, 'post2'), eq(usersTable.id1, postsTable.userId), ) // @ts-expect-error .getUsedTables(), ) .toStrictEqual(['users', 'posts']); expect( db.select().from(sql`${usersTable}`).leftJoin(postsTable, eq(usersTable.id1, postsTable.userId)).leftJoin( alias(postsTable, 'post2'), eq(usersTable.id1, postsTable.userId), // @ts-expect-error ).getUsedTables(), ).toStrictEqual(['users', 'posts']); }); // select subquery used tables test('select+join', (ctx) => { const { db } = ctx.cachedGel; const sq = db.select().from(usersTable).where(eq(usersTable.id1, 42)).as('sq'); // @ts-expect-error expect(db.select().from(sq).getUsedTables()).toStrictEqual(['users']); }); }); ================================================ FILE: integration-tests/tests/imports/index.test.ts ================================================ import { afterAll, expect, it } from 'vitest'; import 'zx/globals'; import * as fs from 'fs'; import path from 'path'; $.verbose = false; const IMPORTS_FOLDER = 'tests/imports/files'; const folderPath = '../drizzle-orm/dist/package.json'; const pj = JSON.parse(fs.readFileSync(folderPath, 'utf8')); if (!fs.existsSync(IMPORTS_FOLDER)) { fs.mkdirSync(IMPORTS_FOLDER); } it('dynamic imports check for CommonJS', async () => { const promises: ProcessPromise[] = []; for (const [i, key] of Object.keys(pj['exports']).entries()) { const o1 = path.join('drizzle-orm', key); if ( o1.startsWith('drizzle-orm/bun-sqlite') || o1.startsWith('drizzle-orm/pglite') || o1.startsWith('drizzle-orm/expo-sqlite') || o1.startsWith('drizzle-orm/libsql/wasm') || o1.startsWith('drizzle-orm/bun-sql') ) { continue; } fs.writeFileSync(`${IMPORTS_FOLDER}/imports_${i}.cjs`, 'requ'); fs.appendFileSync(`${IMPORTS_FOLDER}/imports_${i}.cjs`, 'ire("' + o1 + '");\n', {}); // fs.writeFileSync(`${IMPORTS_FOLDER}/imports_${i}.mjs`, 'imp'); // fs.appendFileSync(`${IMPORTS_FOLDER}/imports_${i}.mjs`, 'ort "' + o1 + '"\n', {}); promises.push( $`node ${IMPORTS_FOLDER}/imports_${i}.cjs`.nothrow(), // $`node ${IMPORTS_FOLDER}/imports_${i}.mjs`.nothrow(), ); } const results = await Promise.all(promises); for (const result of results) { expect(result.exitCode, result.message).toBe(0); } }); it('dynamic imports check for ESM', async () => { const promises: ProcessPromise[] = []; for (const [i, key] of Object.keys(pj['exports']).entries()) { const o1 = path.join('drizzle-orm', key); if ( o1.startsWith('drizzle-orm/bun-sqlite') || o1.startsWith('drizzle-orm/expo-sqlite') || o1.startsWith('drizzle-orm/bun-sql') ) { continue; } fs.writeFileSync(`${IMPORTS_FOLDER}/imports_${i}.mjs`, 'imp'); fs.appendFileSync(`${IMPORTS_FOLDER}/imports_${i}.mjs`, 'ort "' + o1 + '"\n', {}); promises.push( $`node ${IMPORTS_FOLDER}/imports_${i}.mjs`.nothrow(), $`node --import import-in-the-middle/hook.mjs ${IMPORTS_FOLDER}/imports_${i}.mjs`.nothrow(), ); } const results = await Promise.all(promises); for (const result of results) { expect(result.exitCode, result.message).toBe(0); } }); afterAll(() => { fs.rmdirSync(IMPORTS_FOLDER, { recursive: true }); }); ================================================ FILE: integration-tests/tests/mysql/mysql-common-cache.ts ================================================ import { eq, getTableName, is, sql, Table } from 'drizzle-orm'; import type { MutationOption } from 'drizzle-orm/cache/core'; import { Cache } from 'drizzle-orm/cache/core'; import type { CacheConfig } from 'drizzle-orm/cache/core/types'; import type { MySqlDatabase } from 'drizzle-orm/mysql-core'; import { alias, boolean, int, json, mysqlTable, serial, text, timestamp } from 'drizzle-orm/mysql-core'; import Keyv from 'keyv'; import { beforeEach, describe, expect, test, vi } from 'vitest'; // eslint-disable-next-line drizzle-internal/require-entity-kind export class TestGlobalCache extends Cache { private globalTtl: number = 1000; private usedTablesPerKey: Record = {}; constructor(private kv: Keyv = new Keyv()) { super(); } override strategy(): 'explicit' | 'all' { return 'all'; } override async get(key: string, _tables: string[], _isTag: boolean): Promise { const res = await this.kv.get(key) ?? undefined; return res; } override async put( key: string, response: any, tables: string[], isTag: boolean, config?: CacheConfig, ): Promise { await this.kv.set(key, response, config ? config.ex : this.globalTtl); for (const table of tables) { const keys = this.usedTablesPerKey[table]; if (keys === undefined) { this.usedTablesPerKey[table] = [key]; } else { keys.push(key); } } } override async onMutate(params: MutationOption): Promise { const tagsArray = params.tags ? Array.isArray(params.tags) ? params.tags : [params.tags] : []; const tablesArray = params.tables ? Array.isArray(params.tables) ? params.tables : [params.tables] : []; const keysToDelete = new Set(); for (const table of tablesArray) { const tableName = is(table, Table) ? getTableName(table) : table as string; const keys = this.usedTablesPerKey[tableName] ?? []; for (const key of keys) keysToDelete.add(key); } if (keysToDelete.size > 0 || tagsArray.length > 0) { for (const tag of tagsArray) { await this.kv.delete(tag); } for (const key of keysToDelete) { await this.kv.delete(key); for (const table of tablesArray) { const tableName = is(table, Table) ? getTableName(table) : table as string; this.usedTablesPerKey[tableName] = []; } } } } } // eslint-disable-next-line drizzle-internal/require-entity-kind export class TestCache extends TestGlobalCache { override strategy(): 'explicit' | 'all' { return 'explicit'; } } declare module 'vitest' { interface TestContext { cachedMySQL: { db: MySqlDatabase; dbGlobalCached: MySqlDatabase; }; } } const usersTable = mysqlTable('users', { id: serial('id').primaryKey(), name: text('name').notNull(), verified: boolean('verified').notNull().default(false), jsonb: json('jsonb').$type(), createdAt: timestamp('created_at', { fsp: 2 }).notNull().defaultNow(), }); const postsTable = mysqlTable('posts', { id: serial().primaryKey(), description: text().notNull(), userId: int('city_id').references(() => usersTable.id), }); export function tests() { describe('common_cache', () => { beforeEach(async (ctx) => { const { db, dbGlobalCached } = ctx.cachedMySQL; await db.execute(sql`drop table if exists users`); await db.execute(sql`drop table if exists posts`); await db.$cache?.invalidate({ tables: 'users' }); await dbGlobalCached.$cache?.invalidate({ tables: 'users' }); // public users await db.execute( sql` create table users ( id serial primary key, name text not null, verified boolean not null default false, jsonb json, created_at timestamp not null default now() ) `, ); await db.execute( sql` create table posts ( id serial primary key, description text not null, user_id int ) `, ); }); test('test force invalidate', async (ctx) => { const { db } = ctx.cachedMySQL; const spyInvalidate = vi.spyOn(db.$cache, 'invalidate'); await db.$cache?.invalidate({ tables: 'users' }); expect(spyInvalidate).toHaveBeenCalledTimes(1); }); test('default global config - no cache should be hit', async (ctx) => { const { db } = ctx.cachedMySQL; // @ts-expect-error const spyPut = vi.spyOn(db.$cache, 'put'); // @ts-expect-error const spyGet = vi.spyOn(db.$cache, 'get'); // @ts-expect-error const spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); await db.select().from(usersTable); expect(spyPut).toHaveBeenCalledTimes(0); expect(spyGet).toHaveBeenCalledTimes(0); expect(spyInvalidate).toHaveBeenCalledTimes(0); }); test('default global config + enable cache on select: get, put', async (ctx) => { const { db } = ctx.cachedMySQL; // @ts-expect-error const spyPut = vi.spyOn(db.$cache, 'put'); // @ts-expect-error const spyGet = vi.spyOn(db.$cache, 'get'); // @ts-expect-error const spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); await db.select().from(usersTable).$withCache(); expect(spyPut).toHaveBeenCalledTimes(1); expect(spyGet).toHaveBeenCalledTimes(1); expect(spyInvalidate).toHaveBeenCalledTimes(0); }); test('default global config + enable cache on select + write: get, put, onMutate', async (ctx) => { const { db } = ctx.cachedMySQL; // @ts-expect-error const spyPut = vi.spyOn(db.$cache, 'put'); // @ts-expect-error const spyGet = vi.spyOn(db.$cache, 'get'); // @ts-expect-error const spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); await db.select().from(usersTable).$withCache({ config: { ex: 1 } }); expect(spyPut).toHaveBeenCalledTimes(1); expect(spyGet).toHaveBeenCalledTimes(1); expect(spyInvalidate).toHaveBeenCalledTimes(0); spyPut.mockClear(); spyGet.mockClear(); spyInvalidate.mockClear(); await db.insert(usersTable).values({ name: 'John' }); expect(spyPut).toHaveBeenCalledTimes(0); expect(spyGet).toHaveBeenCalledTimes(0); expect(spyInvalidate).toHaveBeenCalledTimes(1); }); test('default global config + enable cache on select + disable invalidate: get, put', async (ctx) => { const { db } = ctx.cachedMySQL; // @ts-expect-error const spyPut = vi.spyOn(db.$cache, 'put'); // @ts-expect-error const spyGet = vi.spyOn(db.$cache, 'get'); // @ts-expect-error const spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); await db.select().from(usersTable).$withCache({ tag: 'custom', autoInvalidate: false, config: { ex: 1 } }); expect(spyPut).toHaveBeenCalledTimes(1); expect(spyGet).toHaveBeenCalledTimes(1); expect(spyInvalidate).toHaveBeenCalledTimes(0); await db.insert(usersTable).values({ name: 'John' }); // invalidate force await db.$cache?.invalidate({ tags: ['custom'] }); }); test('global: true + disable cache', async (ctx) => { const { dbGlobalCached: db } = ctx.cachedMySQL; // @ts-expect-error const spyPut = vi.spyOn(db.$cache, 'put'); // @ts-expect-error const spyGet = vi.spyOn(db.$cache, 'get'); // @ts-expect-error const spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); await db.select().from(usersTable).$withCache(false); expect(spyPut).toHaveBeenCalledTimes(0); expect(spyGet).toHaveBeenCalledTimes(0); expect(spyInvalidate).toHaveBeenCalledTimes(0); }); test('global: true - cache should be hit', async (ctx) => { const { dbGlobalCached: db } = ctx.cachedMySQL; // @ts-expect-error const spyPut = vi.spyOn(db.$cache, 'put'); // @ts-expect-error const spyGet = vi.spyOn(db.$cache, 'get'); // @ts-expect-error const spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); await db.select().from(usersTable); expect(spyPut).toHaveBeenCalledTimes(1); expect(spyGet).toHaveBeenCalledTimes(1); expect(spyInvalidate).toHaveBeenCalledTimes(0); }); test('global: true - cache: false on select - no cache hit', async (ctx) => { const { dbGlobalCached: db } = ctx.cachedMySQL; // @ts-expect-error const spyPut = vi.spyOn(db.$cache, 'put'); // @ts-expect-error const spyGet = vi.spyOn(db.$cache, 'get'); // @ts-expect-error const spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); await db.select().from(usersTable).$withCache(false); expect(spyPut).toHaveBeenCalledTimes(0); expect(spyGet).toHaveBeenCalledTimes(0); expect(spyInvalidate).toHaveBeenCalledTimes(0); }); test('global: true - disable invalidate - cache hit + no invalidate', async (ctx) => { const { dbGlobalCached: db } = ctx.cachedMySQL; // @ts-expect-error const spyPut = vi.spyOn(db.$cache, 'put'); // @ts-expect-error const spyGet = vi.spyOn(db.$cache, 'get'); // @ts-expect-error const spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); await db.select().from(usersTable).$withCache({ autoInvalidate: false }); expect(spyPut).toHaveBeenCalledTimes(1); expect(spyGet).toHaveBeenCalledTimes(1); expect(spyInvalidate).toHaveBeenCalledTimes(0); spyPut.mockClear(); spyGet.mockClear(); spyInvalidate.mockClear(); await db.insert(usersTable).values({ name: 'John' }); expect(spyPut).toHaveBeenCalledTimes(0); expect(spyGet).toHaveBeenCalledTimes(0); expect(spyInvalidate).toHaveBeenCalledTimes(1); }); test('global: true - with custom tag', async (ctx) => { const { dbGlobalCached: db } = ctx.cachedMySQL; // @ts-expect-error const spyPut = vi.spyOn(db.$cache, 'put'); // @ts-expect-error const spyGet = vi.spyOn(db.$cache, 'get'); // @ts-expect-error const spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); await db.select().from(usersTable).$withCache({ tag: 'custom', autoInvalidate: false }); expect(spyPut).toHaveBeenCalledTimes(1); expect(spyGet).toHaveBeenCalledTimes(1); expect(spyInvalidate).toHaveBeenCalledTimes(0); await db.insert(usersTable).values({ name: 'John' }); // invalidate force await db.$cache?.invalidate({ tags: ['custom'] }); }); // check select used tables test('check simple select used tables', (ctx) => { const { db } = ctx.cachedMySQL; // @ts-expect-error expect(db.select().from(usersTable).getUsedTables()).toStrictEqual(['users']); // @ts-expect-error expect(db.select().from(sql`${usersTable}`).getUsedTables()).toStrictEqual(['users']); }); // check select+join used tables test('select+join', (ctx) => { const { db } = ctx.cachedMySQL; // @ts-expect-error expect(db.select().from(usersTable).leftJoin(postsTable, eq(usersTable.id, postsTable.userId)).getUsedTables()) .toStrictEqual(['users', 'posts']); expect( // @ts-expect-error db.select().from(sql`${usersTable}`).leftJoin(postsTable, eq(usersTable.id, postsTable.userId)).getUsedTables(), ).toStrictEqual(['users', 'posts']); }); // check select+2join used tables test('select+2joins', (ctx) => { const { db } = ctx.cachedMySQL; expect( db.select().from(usersTable).leftJoin( postsTable, eq(usersTable.id, postsTable.userId), ).leftJoin( alias(postsTable, 'post2'), eq(usersTable.id, postsTable.userId), ) // @ts-expect-error .getUsedTables(), ) .toStrictEqual(['users', 'posts']); expect( db.select().from(sql`${usersTable}`).leftJoin(postsTable, eq(usersTable.id, postsTable.userId)).leftJoin( alias(postsTable, 'post2'), eq(usersTable.id, postsTable.userId), // @ts-expect-error ).getUsedTables(), ).toStrictEqual(['users', 'posts']); }); // select subquery used tables test('select+join', (ctx) => { const { db } = ctx.cachedMySQL; const sq = db.select().from(usersTable).where(eq(usersTable.id, 42)).as('sq'); db.select().from(sq); // @ts-expect-error expect(db.select().from(sq).getUsedTables()).toStrictEqual(['users']); }); }); } ================================================ FILE: integration-tests/tests/mysql/mysql-common.ts ================================================ /* eslint-disable @typescript-eslint/no-unused-vars */ import 'dotenv/config'; import Docker from 'dockerode'; import { and, asc, avg, avgDistinct, count, countDistinct, eq, exists, getTableColumns, gt, gte, inArray, like, lt, max, min, Name, not, notInArray, sql, sum, sumDistinct, TransactionRollbackError, } from 'drizzle-orm'; import type { MySqlDatabase } from 'drizzle-orm/mysql-core'; import { alias, bigint, binary, boolean, char, date, datetime, decimal, double, except, exceptAll, float, foreignKey, getTableConfig, getViewConfig, index, int, intersect, intersectAll, json, mediumint, mysqlEnum, mysqlSchema, mysqlTable, mysqlTableCreator, mysqlView, primaryKey, real, serial, smallint, text, time, timestamp, tinyint, union, unionAll, unique, uniqueIndex, uniqueKeyName, varbinary, varchar, year, } from 'drizzle-orm/mysql-core'; import type { MySqlRemoteDatabase } from 'drizzle-orm/mysql-proxy'; import { migrate } from 'drizzle-orm/mysql2/migrator'; import getPort from 'get-port'; import { v4 as uuid } from 'uuid'; import { afterAll, beforeEach, describe, expect, expectTypeOf, test } from 'vitest'; import { Expect, toLocalDate } from '~/utils.ts'; import type { Equal } from '~/utils.ts'; type TestMySQLDB = MySqlDatabase; declare module 'vitest' { interface TestContext { mysql: { db: TestMySQLDB; }; mysqlProxy: { db: MySqlRemoteDatabase; }; } } const ENABLE_LOGGING = false; const allTypesTable = mysqlTable('all_types', { serial: serial('serial'), bigint53: bigint('bigint53', { mode: 'number', }), bigint64: bigint('bigint64', { mode: 'bigint', }), binary: binary('binary'), boolean: boolean('boolean'), char: char('char'), date: date('date', { mode: 'date', }), dateStr: date('date_str', { mode: 'string', }), datetime: datetime('datetime', { mode: 'date', }), datetimeStr: datetime('datetime_str', { mode: 'string', }), decimal: decimal('decimal'), decimalNum: decimal('decimal_num', { scale: 30, mode: 'number', }), decimalBig: decimal('decimal_big', { scale: 30, mode: 'bigint', }), double: double('double'), float: float('float'), int: int('int'), json: json('json'), medInt: mediumint('med_int'), smallInt: smallint('small_int'), real: real('real'), text: text('text'), time: time('time'), timestamp: timestamp('timestamp', { mode: 'date', }), timestampStr: timestamp('timestamp_str', { mode: 'string', }), tinyInt: tinyint('tiny_int'), varbin: varbinary('varbin', { length: 16, }), varchar: varchar('varchar', { length: 255, }), year: year('year'), enum: mysqlEnum('enum', ['enV1', 'enV2']), }); const usersTable = mysqlTable('userstest', { id: serial('id').primaryKey(), name: text('name').notNull(), verified: boolean('verified').notNull().default(false), jsonb: json('jsonb').$type(), createdAt: timestamp('created_at', { fsp: 2 }).notNull().defaultNow(), }); const users2Table = mysqlTable('users2', { id: serial('id').primaryKey(), name: text('name').notNull(), cityId: int('city_id').references(() => citiesTable.id), }); const citiesTable = mysqlTable('cities', { id: serial('id').primaryKey(), name: text('name').notNull(), }); const usersOnUpdate = mysqlTable('users_on_update', { id: serial('id').primaryKey(), name: text('name').notNull(), updateCounter: int('update_counter').default(sql`1`).$onUpdateFn(() => sql`update_counter + 1`), updatedAt: datetime('updated_at', { mode: 'date', fsp: 3 }).$onUpdate(() => new Date()), uppercaseName: text('uppercase_name').$onUpdateFn(() => sql`upper(name)`), alwaysNull: text('always_null').$type().$onUpdateFn(() => null), // need to add $type because $onUpdate add a default value }); const datesTable = mysqlTable('datestable', { date: date('date'), dateAsString: date('date_as_string', { mode: 'string' }), time: time('time', { fsp: 1 }), datetime: datetime('datetime', { fsp: 2 }), datetimeAsString: datetime('datetime_as_string', { fsp: 2, mode: 'string' }), timestamp: timestamp('timestamp', { fsp: 3 }), timestampAsString: timestamp('timestamp_as_string', { fsp: 3, mode: 'string' }), year: year('year'), }); const coursesTable = mysqlTable('courses', { id: serial('id').primaryKey(), name: text('name').notNull(), categoryId: int('category_id').references(() => courseCategoriesTable.id), }); const courseCategoriesTable = mysqlTable('course_categories', { id: serial('id').primaryKey(), name: text('name').notNull(), }); const orders = mysqlTable('orders', { id: serial('id').primaryKey(), region: text('region').notNull(), product: text('product').notNull().$default(() => 'random_string'), amount: int('amount').notNull(), quantity: int('quantity').notNull(), }); const usersMigratorTable = mysqlTable('users12', { id: serial('id').primaryKey(), name: text('name').notNull(), email: text('email').notNull(), }, (table) => { return { name: uniqueIndex('').on(table.name).using('btree'), }; }); // To test aggregate functions const aggregateTable = mysqlTable('aggregate_table', { id: serial('id').notNull(), name: text('name').notNull(), a: int('a'), b: int('b'), c: int('c'), nullOnly: int('null_only'), }); // To test another schema and multischema const mySchema = mysqlSchema(`mySchema`); const usersMySchemaTable = mySchema.table('userstest', { id: serial('id').primaryKey(), name: text('name').notNull(), verified: boolean('verified').notNull().default(false), jsonb: json('jsonb').$type(), createdAt: timestamp('created_at', { fsp: 2 }).notNull().defaultNow(), }); const users2MySchemaTable = mySchema.table('users2', { id: serial('id').primaryKey(), name: text('name').notNull(), cityId: int('city_id').references(() => citiesTable.id), }); const citiesMySchemaTable = mySchema.table('cities', { id: serial('id').primaryKey(), name: text('name').notNull(), }); let mysqlContainer: Docker.Container; export async function createDockerDB(): Promise<{ connectionString: string; container: Docker.Container }> { const docker = new Docker(); const port = await getPort({ port: 3306 }); const image = 'mysql:8'; const pullStream = await docker.pull(image); await new Promise((resolve, reject) => docker.modem.followProgress(pullStream, (err) => (err ? reject(err) : resolve(err))) ); mysqlContainer = await docker.createContainer({ Image: image, Env: ['MYSQL_ROOT_PASSWORD=mysql', 'MYSQL_DATABASE=drizzle'], name: `drizzle-integration-tests-${uuid()}`, HostConfig: { AutoRemove: true, PortBindings: { '3306/tcp': [{ HostPort: `${port}` }], }, }, }); await mysqlContainer.start(); await new Promise((resolve) => setTimeout(resolve, 4000)); return { connectionString: `mysql://root:mysql@127.0.0.1:${port}/drizzle`, container: mysqlContainer }; } afterAll(async () => { await mysqlContainer?.stop().catch(console.error); }); export function tests(driver?: string) { describe('common', () => { // afterAll(async () => { // await mysqlContainer?.stop().catch(console.error); // }); beforeEach(async (ctx) => { const { db } = ctx.mysql; await db.execute(sql`drop table if exists userstest`); await db.execute(sql`drop table if exists users2`); await db.execute(sql`drop table if exists cities`); await db.execute(sql`drop table if exists \`all_types\``); if (driver !== 'planetscale') { await db.execute(sql`drop schema if exists \`mySchema\``); await db.execute(sql`create schema if not exists \`mySchema\``); } await db.execute( sql` create table userstest ( id serial primary key, name text not null, verified boolean not null default false, jsonb json, created_at timestamp not null default now() ) `, ); await db.execute( sql` create table users2 ( id serial primary key, name text not null, city_id int references cities(id) ) `, ); await db.execute( sql` create table cities ( id serial primary key, name text not null ) `, ); if (driver !== 'planetscale') { // mySchema await db.execute( sql` create table \`mySchema\`.\`userstest\` ( \`id\` serial primary key, \`name\` text not null, \`verified\` boolean not null default false, \`jsonb\` json, \`created_at\` timestamp not null default now() ) `, ); await db.execute( sql` create table \`mySchema\`.\`cities\` ( \`id\` serial primary key, \`name\` text not null ) `, ); await db.execute( sql` create table \`mySchema\`.\`users2\` ( \`id\` serial primary key, \`name\` text not null, \`city_id\` int references \`mySchema\`.\`cities\`(\`id\`) ) `, ); } }); async function setupReturningFunctionsTest(db: MySqlDatabase) { await db.execute(sql`drop table if exists \`users_default_fn\``); await db.execute( sql` create table \`users_default_fn\` ( \`id\` varchar(256) primary key, \`name\` text not null ); `, ); } async function setupSetOperationTest(db: TestMySQLDB) { await db.execute(sql`drop table if exists \`users2\``); await db.execute(sql`drop table if exists \`cities\``); await db.execute( sql` create table \`users2\` ( \`id\` serial primary key, \`name\` text not null, \`city_id\` int references \`cities\`(\`id\`) ) `, ); await db.execute( sql` create table \`cities\` ( \`id\` serial primary key, \`name\` text not null ) `, ); await db.insert(citiesTable).values([ { id: 1, name: 'New York' }, { id: 2, name: 'London' }, { id: 3, name: 'Tampa' }, ]); await db.insert(users2Table).values([ { id: 1, name: 'John', cityId: 1 }, { id: 2, name: 'Jane', cityId: 2 }, { id: 3, name: 'Jack', cityId: 3 }, { id: 4, name: 'Peter', cityId: 3 }, { id: 5, name: 'Ben', cityId: 2 }, { id: 6, name: 'Jill', cityId: 1 }, { id: 7, name: 'Mary', cityId: 2 }, { id: 8, name: 'Sally', cityId: 1 }, ]); } async function setupAggregateFunctionsTest(db: TestMySQLDB) { await db.execute(sql`drop table if exists \`aggregate_table\``); await db.execute( sql` create table \`aggregate_table\` ( \`id\` integer primary key auto_increment not null, \`name\` text not null, \`a\` integer, \`b\` integer, \`c\` integer, \`null_only\` integer ); `, ); await db.insert(aggregateTable).values([ { name: 'value 1', a: 5, b: 10, c: 20 }, { name: 'value 1', a: 5, b: 20, c: 30 }, { name: 'value 2', a: 10, b: 50, c: 60 }, { name: 'value 3', a: 20, b: 20, c: null }, { name: 'value 4', a: null, b: 90, c: 120 }, { name: 'value 5', a: 80, b: 10, c: null }, { name: 'value 6', a: null, b: null, c: 150 }, ]); } test('table config: unsigned ints', async () => { const unsignedInts = mysqlTable('cities1', { bigint: bigint('bigint', { mode: 'number', unsigned: true }), int: int('int', { unsigned: true }), smallint: smallint('smallint', { unsigned: true }), mediumint: mediumint('mediumint', { unsigned: true }), tinyint: tinyint('tinyint', { unsigned: true }), }); const tableConfig = getTableConfig(unsignedInts); const bigintColumn = tableConfig.columns.find((c) => c.name === 'bigint')!; const intColumn = tableConfig.columns.find((c) => c.name === 'int')!; const smallintColumn = tableConfig.columns.find((c) => c.name === 'smallint')!; const mediumintColumn = tableConfig.columns.find((c) => c.name === 'mediumint')!; const tinyintColumn = tableConfig.columns.find((c) => c.name === 'tinyint')!; expect(bigintColumn.getSQLType()).toBe('bigint unsigned'); expect(intColumn.getSQLType()).toBe('int unsigned'); expect(smallintColumn.getSQLType()).toBe('smallint unsigned'); expect(mediumintColumn.getSQLType()).toBe('mediumint unsigned'); expect(tinyintColumn.getSQLType()).toBe('tinyint unsigned'); }); test('table config: signed ints', async () => { const unsignedInts = mysqlTable('cities1', { bigint: bigint('bigint', { mode: 'number' }), int: int('int'), smallint: smallint('smallint'), mediumint: mediumint('mediumint'), tinyint: tinyint('tinyint'), }); const tableConfig = getTableConfig(unsignedInts); const bigintColumn = tableConfig.columns.find((c) => c.name === 'bigint')!; const intColumn = tableConfig.columns.find((c) => c.name === 'int')!; const smallintColumn = tableConfig.columns.find((c) => c.name === 'smallint')!; const mediumintColumn = tableConfig.columns.find((c) => c.name === 'mediumint')!; const tinyintColumn = tableConfig.columns.find((c) => c.name === 'tinyint')!; expect(bigintColumn.getSQLType()).toBe('bigint'); expect(intColumn.getSQLType()).toBe('int'); expect(smallintColumn.getSQLType()).toBe('smallint'); expect(mediumintColumn.getSQLType()).toBe('mediumint'); expect(tinyintColumn.getSQLType()).toBe('tinyint'); }); test('table config: foreign keys name', async () => { const table = mysqlTable('cities', { id: serial('id').primaryKey(), name: text('name').notNull(), state: text('state'), }, (t) => ({ f: foreignKey({ foreignColumns: [t.id], columns: [t.id], name: 'custom_fk' }), })); const tableConfig = getTableConfig(table); expect(tableConfig.foreignKeys).toHaveLength(1); expect(tableConfig.foreignKeys[0]!.getName()).toBe('custom_fk'); }); test('table config: primary keys name', async () => { const table = mysqlTable('cities', { id: serial('id').primaryKey(), name: text('name').notNull(), state: text('state'), }, (t) => ({ f: primaryKey({ columns: [t.id, t.name], name: 'custom_pk' }), })); const tableConfig = getTableConfig(table); expect(tableConfig.primaryKeys).toHaveLength(1); expect(tableConfig.primaryKeys[0]!.getName()).toBe('custom_pk'); }); test('table configs: unique third param', async () => { const cities1Table = mysqlTable('cities1', { id: serial('id').primaryKey(), name: text('name').notNull(), state: text('state'), }, (t) => ({ f: unique('custom_name').on(t.name, t.state), f1: unique('custom_name1').on(t.name, t.state), })); const tableConfig = getTableConfig(cities1Table); expect(tableConfig.uniqueConstraints).toHaveLength(2); expect(tableConfig.uniqueConstraints[0]?.name).toBe('custom_name'); expect(tableConfig.uniqueConstraints[0]?.columns.map((t) => t.name)).toEqual(['name', 'state']); expect(tableConfig.uniqueConstraints[1]?.name).toBe('custom_name1'); expect(tableConfig.uniqueConstraints[1]?.columns.map((t) => t.name)).toEqual(['name', 'state']); }); test('table configs: unique in column', async () => { const cities1Table = mysqlTable('cities1', { id: serial('id').primaryKey(), name: text('name').notNull().unique(), state: text('state').unique('custom'), field: text('field').unique('custom_field'), }); const tableConfig = getTableConfig(cities1Table); const columnName = tableConfig.columns.find((it) => it.name === 'name'); expect(columnName?.uniqueName).toBe(uniqueKeyName(cities1Table, [columnName!.name])); expect(columnName?.isUnique).toBeTruthy(); const columnState = tableConfig.columns.find((it) => it.name === 'state'); expect(columnState?.uniqueName).toBe('custom'); expect(columnState?.isUnique).toBeTruthy(); const columnField = tableConfig.columns.find((it) => it.name === 'field'); expect(columnField?.uniqueName).toBe('custom_field'); expect(columnField?.isUnique).toBeTruthy(); }); test('select all fields', async (ctx) => { const { db } = ctx.mysql; await db.insert(usersTable).values({ name: 'John' }); const result = await db.select().from(usersTable); expect(result[0]!.createdAt).toBeInstanceOf(Date); // not timezone based timestamp, thats why it should not work here // t.assert(Math.abs(result[0]!.createdAt.getTime() - now) < 2000); expect(result).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); }); test('select sql', async (ctx) => { const { db } = ctx.mysql; await db.insert(usersTable).values({ name: 'John' }); const users = await db.select({ name: sql`upper(${usersTable.name})`, }).from(usersTable); expect(users).toEqual([{ name: 'JOHN' }]); }); test('select typed sql', async (ctx) => { const { db } = ctx.mysql; await db.insert(usersTable).values({ name: 'John' }); const users = await db.select({ name: sql`upper(${usersTable.name})`, }).from(usersTable); expect(users).toEqual([{ name: 'JOHN' }]); }); test('select with empty array in inArray', async (ctx) => { const { db } = ctx.mysql; await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); const result = await db .select({ name: sql`upper(${usersTable.name})`, }) .from(usersTable) .where(inArray(usersTable.id, [])); expect(result).toEqual([]); }); test('select with empty array in notInArray', async (ctx) => { const { db } = ctx.mysql; await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); const result = await db .select({ name: sql`upper(${usersTable.name})`, }) .from(usersTable) .where(notInArray(usersTable.id, [])); expect(result).toEqual([{ name: 'JOHN' }, { name: 'JANE' }, { name: 'JANE' }]); }); test('select distinct', async (ctx) => { const { db } = ctx.mysql; const usersDistinctTable = mysqlTable('users_distinct', { id: int('id').notNull(), name: text('name').notNull(), }); await db.execute(sql`drop table if exists ${usersDistinctTable}`); await db.execute(sql`create table ${usersDistinctTable} (id int, name text)`); await db.insert(usersDistinctTable).values([ { id: 1, name: 'John' }, { id: 1, name: 'John' }, { id: 2, name: 'John' }, { id: 1, name: 'Jane' }, ]); const users = await db.selectDistinct().from(usersDistinctTable).orderBy( usersDistinctTable.id, usersDistinctTable.name, ); await db.execute(sql`drop table ${usersDistinctTable}`); expect(users).toEqual([{ id: 1, name: 'Jane' }, { id: 1, name: 'John' }, { id: 2, name: 'John' }]); }); test('insert returning sql', async (ctx) => { const { db } = ctx.mysql; const [result, _] = await db.insert(usersTable).values({ name: 'John' }); expect(result.insertId).toBe(1); }); test('delete returning sql', async (ctx) => { const { db } = ctx.mysql; await db.insert(usersTable).values({ name: 'John' }); const users = await db.delete(usersTable).where(eq(usersTable.name, 'John')); expect(users[0].affectedRows).toBe(1); }); test('update returning sql', async (ctx) => { const { db } = ctx.mysql; await db.insert(usersTable).values({ name: 'John' }); const users = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')); expect(users[0].changedRows).toBe(1); }); test('update with returning all fields', async (ctx) => { const { db } = ctx.mysql; await db.insert(usersTable).values({ name: 'John' }); const updatedUsers = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')); const users = await db.select().from(usersTable).where(eq(usersTable.id, 1)); expect(updatedUsers[0].changedRows).toBe(1); expect(users[0]!.createdAt).toBeInstanceOf(Date); // not timezone based timestamp, thats why it should not work here // t.assert(Math.abs(users[0]!.createdAt.getTime() - now) < 2000); expect(users).toEqual([{ id: 1, name: 'Jane', verified: false, jsonb: null, createdAt: users[0]!.createdAt }]); }); test('update with returning partial', async (ctx) => { const { db } = ctx.mysql; await db.insert(usersTable).values({ name: 'John' }); const updatedUsers = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')); const users = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).where( eq(usersTable.id, 1), ); expect(updatedUsers[0].changedRows).toBe(1); expect(users).toEqual([{ id: 1, name: 'Jane' }]); }); test('delete with returning all fields', async (ctx) => { const { db } = ctx.mysql; await db.insert(usersTable).values({ name: 'John' }); const deletedUser = await db.delete(usersTable).where(eq(usersTable.name, 'John')); expect(deletedUser[0].affectedRows).toBe(1); }); test('delete with returning partial', async (ctx) => { const { db } = ctx.mysql; await db.insert(usersTable).values({ name: 'John' }); const deletedUser = await db.delete(usersTable).where(eq(usersTable.name, 'John')); expect(deletedUser[0].affectedRows).toBe(1); }); test('insert + select', async (ctx) => { const { db } = ctx.mysql; await db.insert(usersTable).values({ name: 'John' }); const result = await db.select().from(usersTable); expect(result).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); await db.insert(usersTable).values({ name: 'Jane' }); const result2 = await db.select().from(usersTable); expect(result2).toEqual([ { id: 1, name: 'John', verified: false, jsonb: null, createdAt: result2[0]!.createdAt }, { id: 2, name: 'Jane', verified: false, jsonb: null, createdAt: result2[1]!.createdAt }, ]); }); test('json insert', async (ctx) => { const { db } = ctx.mysql; await db.insert(usersTable).values({ name: 'John', jsonb: ['foo', 'bar'] }); const result = await db.select({ id: usersTable.id, name: usersTable.name, jsonb: usersTable.jsonb, }).from(usersTable); expect(result).toEqual([{ id: 1, name: 'John', jsonb: ['foo', 'bar'] }]); }); test('insert with overridden default values', async (ctx) => { const { db } = ctx.mysql; await db.insert(usersTable).values({ name: 'John', verified: true }); const result = await db.select().from(usersTable); expect(result).toEqual([{ id: 1, name: 'John', verified: true, jsonb: null, createdAt: result[0]!.createdAt }]); }); test('insert many', async (ctx) => { const { db } = ctx.mysql; await db.insert(usersTable).values([ { name: 'John' }, { name: 'Bruce', jsonb: ['foo', 'bar'] }, { name: 'Jane' }, { name: 'Austin', verified: true }, ]); const result = await db.select({ id: usersTable.id, name: usersTable.name, jsonb: usersTable.jsonb, verified: usersTable.verified, }).from(usersTable); expect(result).toEqual([ { id: 1, name: 'John', jsonb: null, verified: false }, { id: 2, name: 'Bruce', jsonb: ['foo', 'bar'], verified: false }, { id: 3, name: 'Jane', jsonb: null, verified: false }, { id: 4, name: 'Austin', jsonb: null, verified: true }, ]); }); test('insert many with returning', async (ctx) => { const { db } = ctx.mysql; const result = await db.insert(usersTable).values([ { name: 'John' }, { name: 'Bruce', jsonb: ['foo', 'bar'] }, { name: 'Jane' }, { name: 'Austin', verified: true }, ]); expect(result[0].affectedRows).toBe(4); }); test('select with group by as field', async (ctx) => { const { db } = ctx.mysql; await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); const result = await db.select({ name: usersTable.name }).from(usersTable) .groupBy(usersTable.name); expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }]); }); test('select with exists', async (ctx) => { const { db } = ctx.mysql; await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); const user = alias(usersTable, 'user'); const result = await db.select({ name: usersTable.name }).from(usersTable).where( exists( db.select({ one: sql`1` }).from(user).where(and(eq(usersTable.name, 'John'), eq(user.id, usersTable.id))), ), ); expect(result).toEqual([{ name: 'John' }]); }); test('select with group by as sql', async (ctx) => { const { db } = ctx.mysql; await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); const result = await db.select({ name: usersTable.name }).from(usersTable) .groupBy(sql`${usersTable.name}`); expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }]); }); test('$default function', async (ctx) => { const { db } = ctx.mysql; await db.execute(sql`drop table if exists \`orders\``); await db.execute( sql` create table \`orders\` ( \`id\` serial primary key, \`region\` text not null, \`product\` text not null, \`amount\` int not null, \`quantity\` int not null ) `, ); await db.insert(orders).values({ id: 1, region: 'Ukraine', amount: 1, quantity: 1 }); const selectedOrder = await db.select().from(orders); expect(selectedOrder).toEqual([{ id: 1, amount: 1, quantity: 1, region: 'Ukraine', product: 'random_string', }]); }); test('$default with empty array', async (ctx) => { const { db } = ctx.mysql; await db.execute(sql`drop table if exists \`s_orders\``); await db.execute( sql` create table \`s_orders\` ( \`id\` serial primary key, \`region\` text default ('Ukraine'), \`product\` text not null ) `, ); const users = mysqlTable('s_orders', { id: serial('id').primaryKey(), region: text('region').default('Ukraine'), product: text('product').$defaultFn(() => 'random_string'), }); await db.insert(users).values({}); const selectedOrder = await db.select().from(users); expect(selectedOrder).toEqual([{ id: 1, region: 'Ukraine', product: 'random_string', }]); }); test('select with group by as sql + column', async (ctx) => { const { db } = ctx.mysql; await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); const result = await db.select({ name: usersTable.name }).from(usersTable) .groupBy(sql`${usersTable.name}`, usersTable.id); expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); }); test('select with group by as column + sql', async (ctx) => { const { db } = ctx.mysql; await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); const result = await db.select({ name: usersTable.name }).from(usersTable) .groupBy(usersTable.id, sql`${usersTable.name}`); expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); }); test('select with group by complex query', async (ctx) => { const { db } = ctx.mysql; await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); const result = await db.select({ name: usersTable.name }).from(usersTable) .groupBy(usersTable.id, sql`${usersTable.name}`) .orderBy(asc(usersTable.name)) .limit(1); expect(result).toEqual([{ name: 'Jane' }]); }); test('build query', async (ctx) => { const { db } = ctx.mysql; const query = db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable) .groupBy(usersTable.id, usersTable.name) .toSQL(); expect(query).toEqual({ sql: `select \`id\`, \`name\` from \`userstest\` group by \`userstest\`.\`id\`, \`userstest\`.\`name\``, params: [], }); }); test('Query check: Insert all defaults in 1 row', async (ctx) => { const { db } = ctx.mysql; const users = mysqlTable('users', { id: serial('id').primaryKey(), name: text('name').default('Dan'), state: text('state'), }); const query = db .insert(users) .values({}) .toSQL(); expect(query).toEqual({ sql: 'insert into `users` (`id`, `name`, `state`) values (default, default, default)', params: [], }); }); test('Query check: Insert all defaults in multiple rows', async (ctx) => { const { db } = ctx.mysql; const users = mysqlTable('users', { id: serial('id').primaryKey(), name: text('name').default('Dan'), state: text('state').default('UA'), }); const query = db .insert(users) .values([{}, {}]) .toSQL(); expect(query).toEqual({ sql: 'insert into `users` (`id`, `name`, `state`) values (default, default, default), (default, default, default)', params: [], }); }); test('Insert all defaults in 1 row', async (ctx) => { const { db } = ctx.mysql; const users = mysqlTable('empty_insert_single', { id: serial('id').primaryKey(), name: text('name').default('Dan'), state: text('state'), }); await db.execute(sql`drop table if exists ${users}`); await db.execute( sql`create table ${users} (id serial primary key, name text default ('Dan'), state text)`, ); await db.insert(users).values({}); const res = await db.select().from(users); expect(res).toEqual([{ id: 1, name: 'Dan', state: null }]); }); test('Insert all defaults in multiple rows', async (ctx) => { const { db } = ctx.mysql; const users = mysqlTable('empty_insert_multiple', { id: serial('id').primaryKey(), name: text('name').default('Dan'), state: text('state'), }); await db.execute(sql`drop table if exists ${users}`); await db.execute( sql`create table ${users} (id serial primary key, name text default ('Dan'), state text)`, ); await db.insert(users).values([{}, {}]); const res = await db.select().from(users); expect(res).toEqual([{ id: 1, name: 'Dan', state: null }, { id: 2, name: 'Dan', state: null }]); }); test('build query insert with onDuplicate', async (ctx) => { const { db } = ctx.mysql; const query = db.insert(usersTable) .values({ name: 'John', jsonb: ['foo', 'bar'] }) .onDuplicateKeyUpdate({ set: { name: 'John1' } }) .toSQL(); expect(query).toEqual({ sql: 'insert into `userstest` (`id`, `name`, `verified`, `jsonb`, `created_at`) values (default, ?, default, ?, default) on duplicate key update `name` = ?', params: ['John', '["foo","bar"]', 'John1'], }); }); test('insert with onDuplicate', async (ctx) => { const { db } = ctx.mysql; await db.insert(usersTable) .values({ name: 'John' }); await db.insert(usersTable) .values({ id: 1, name: 'John' }) .onDuplicateKeyUpdate({ set: { name: 'John1' } }); const res = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).where( eq(usersTable.id, 1), ); expect(res).toEqual([{ id: 1, name: 'John1' }]); }); test('insert conflict', async (ctx) => { const { db } = ctx.mysql; await db.insert(usersTable) .values({ name: 'John' }); await expect((async () => { db.insert(usersTable).values({ id: 1, name: 'John1' }); })()).resolves.not.toThrowError(); }); test('insert conflict with ignore', async (ctx) => { const { db } = ctx.mysql; await db.insert(usersTable) .values({ name: 'John' }); await db.insert(usersTable) .ignore() .values({ id: 1, name: 'John1' }); const res = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).where( eq(usersTable.id, 1), ); expect(res).toEqual([{ id: 1, name: 'John' }]); }); test('insert sql', async (ctx) => { const { db } = ctx.mysql; await db.insert(usersTable).values({ name: sql`${'John'}` }); const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable); expect(result).toEqual([{ id: 1, name: 'John' }]); }); test('partial join with alias', async (ctx) => { const { db } = ctx.mysql; const customerAlias = alias(usersTable, 'customer'); await db.insert(usersTable).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); const result = await db .select({ user: { id: usersTable.id, name: usersTable.name, }, customer: { id: customerAlias.id, name: customerAlias.name, }, }).from(usersTable) .leftJoin(customerAlias, eq(customerAlias.id, 11)) .where(eq(usersTable.id, 10)); expect(result).toEqual([{ user: { id: 10, name: 'Ivan' }, customer: { id: 11, name: 'Hans' }, }]); }); test('full join with alias', async (ctx) => { const { db } = ctx.mysql; const mysqlTable = mysqlTableCreator((name) => `prefixed_${name}`); const users = mysqlTable('users', { id: serial('id').primaryKey(), name: text('name').notNull(), }); await db.execute(sql`drop table if exists ${users}`); await db.execute(sql`create table ${users} (id serial primary key, name text not null)`); const customers = alias(users, 'customer'); await db.insert(users).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); const result = await db .select().from(users) .leftJoin(customers, eq(customers.id, 11)) .where(eq(users.id, 10)); expect(result).toEqual([{ users: { id: 10, name: 'Ivan', }, customer: { id: 11, name: 'Hans', }, }]); await db.execute(sql`drop table ${users}`); }); test('select from alias', async (ctx) => { const { db } = ctx.mysql; const mysqlTable = mysqlTableCreator((name) => `prefixed_${name}`); const users = mysqlTable('users', { id: serial('id').primaryKey(), name: text('name').notNull(), }); await db.execute(sql`drop table if exists ${users}`); await db.execute(sql`create table ${users} (id serial primary key, name text not null)`); const user = alias(users, 'user'); const customers = alias(users, 'customer'); await db.insert(users).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); const result = await db .select() .from(user) .leftJoin(customers, eq(customers.id, 11)) .where(eq(user.id, 10)); expect(result).toEqual([{ user: { id: 10, name: 'Ivan', }, customer: { id: 11, name: 'Hans', }, }]); await db.execute(sql`drop table ${users}`); }); test('insert with spaces', async (ctx) => { const { db } = ctx.mysql; await db.insert(usersTable).values({ name: sql`'Jo h n'` }); const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable); expect(result).toEqual([{ id: 1, name: 'Jo h n' }]); }); test('prepared statement', async (ctx) => { const { db } = ctx.mysql; await db.insert(usersTable).values({ name: 'John' }); const statement = db.select({ id: usersTable.id, name: usersTable.name, }).from(usersTable) .prepare(); const result = await statement.execute(); expect(result).toEqual([{ id: 1, name: 'John' }]); }); test('insert: placeholders on columns with encoder', async (ctx) => { const { db } = ctx.mysql; const date = new Date('2024-08-07T15:30:00Z'); const statement = db.insert(usersTable).values({ name: 'John', createdAt: sql.placeholder('createdAt'), }).prepare(); await statement.execute({ createdAt: date }); const result = await db .select({ id: usersTable.id, createdAt: usersTable.createdAt, }) .from(usersTable); expect(result).toEqual([ { id: 1, createdAt: date }, ]); }); test('prepared statement reuse', async (ctx) => { const { db } = ctx.mysql; const stmt = db.insert(usersTable).values({ verified: true, name: sql.placeholder('name'), }).prepare(); for (let i = 0; i < 10; i++) { await stmt.execute({ name: `John ${i}` }); } const result = await db.select({ id: usersTable.id, name: usersTable.name, verified: usersTable.verified, }).from(usersTable); expect(result).toEqual([ { id: 1, name: 'John 0', verified: true }, { id: 2, name: 'John 1', verified: true }, { id: 3, name: 'John 2', verified: true }, { id: 4, name: 'John 3', verified: true }, { id: 5, name: 'John 4', verified: true }, { id: 6, name: 'John 5', verified: true }, { id: 7, name: 'John 6', verified: true }, { id: 8, name: 'John 7', verified: true }, { id: 9, name: 'John 8', verified: true }, { id: 10, name: 'John 9', verified: true }, ]); }); test('prepared statement with placeholder in .where', async (ctx) => { const { db } = ctx.mysql; await db.insert(usersTable).values({ name: 'John' }); const stmt = db.select({ id: usersTable.id, name: usersTable.name, }).from(usersTable) .where(eq(usersTable.id, sql.placeholder('id'))) .prepare(); const result = await stmt.execute({ id: 1 }); expect(result).toEqual([{ id: 1, name: 'John' }]); }); test('prepared statement with placeholder in .limit', async (ctx) => { const { db } = ctx.mysql; await db.insert(usersTable).values({ name: 'John' }); const stmt = db .select({ id: usersTable.id, name: usersTable.name, }) .from(usersTable) .where(eq(usersTable.id, sql.placeholder('id'))) .limit(sql.placeholder('limit')) .prepare(); const result = await stmt.execute({ id: 1, limit: 1 }); expect(result).toEqual([{ id: 1, name: 'John' }]); expect(result).toHaveLength(1); }); test('prepared statement with placeholder in .offset', async (ctx) => { const { db } = ctx.mysql; await db.insert(usersTable).values([{ name: 'John' }, { name: 'John1' }]); const stmt = db .select({ id: usersTable.id, name: usersTable.name, }) .from(usersTable) .limit(sql.placeholder('limit')) .offset(sql.placeholder('offset')) .prepare(); const result = await stmt.execute({ limit: 1, offset: 1 }); expect(result).toEqual([{ id: 2, name: 'John1' }]); }); test('prepared statement built using $dynamic', async (ctx) => { const { db } = ctx.mysql; function withLimitOffset(qb: any) { return qb.limit(sql.placeholder('limit')).offset(sql.placeholder('offset')); } await db.insert(usersTable).values([{ name: 'John' }, { name: 'John1' }]); const stmt = db .select({ id: usersTable.id, name: usersTable.name, }) .from(usersTable) .$dynamic(); withLimitOffset(stmt).prepare('stmt_limit'); const result = await stmt.execute({ limit: 1, offset: 1 }); expect(result).toEqual([{ id: 2, name: 'John1' }]); expect(result).toHaveLength(1); }); test('migrator', async (ctx) => { const { db } = ctx.mysql; await db.execute(sql`drop table if exists cities_migration`); await db.execute(sql`drop table if exists users_migration`); await db.execute(sql`drop table if exists users12`); await db.execute(sql`drop table if exists __drizzle_migrations`); await migrate(db, { migrationsFolder: './drizzle2/mysql' }); await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); const result = await db.select().from(usersMigratorTable); expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); await db.execute(sql`drop table cities_migration`); await db.execute(sql`drop table users_migration`); await db.execute(sql`drop table users12`); await db.execute(sql`drop table __drizzle_migrations`); }); test('insert via db.execute + select via db.execute', async (ctx) => { const { db } = ctx.mysql; await db.execute(sql`insert into ${usersTable} (${new Name(usersTable.name.name)}) values (${'John'})`); const result = await db.execute<{ id: number; name: string }>(sql`select id, name from ${usersTable}`); expect(result[0]).toEqual([{ id: 1, name: 'John' }]); }); test('insert via db.execute w/ query builder', async (ctx) => { const { db } = ctx.mysql; const inserted = await db.execute( db.insert(usersTable).values({ name: 'John' }), ); expect(inserted[0].affectedRows).toBe(1); }); test('insert + select all possible dates', async (ctx) => { const { db } = ctx.mysql; await db.execute(sql`drop table if exists \`datestable\``); await db.execute( sql` create table \`datestable\` ( \`date\` date, \`date_as_string\` date, \`time\` time, \`datetime\` datetime, \`datetime_as_string\` datetime, \`timestamp\` timestamp(3), \`timestamp_as_string\` timestamp(3), \`year\` year ) `, ); const date = new Date('2022-11-11'); const dateWithMilliseconds = new Date('2022-11-11 12:12:12.123'); await db.insert(datesTable).values({ date: date, dateAsString: '2022-11-11', time: '12:12:12', datetime: date, year: 22, datetimeAsString: '2022-11-11 12:12:12', timestamp: dateWithMilliseconds, timestampAsString: '2022-11-11 12:12:12.123', }); const res = await db.select().from(datesTable); expect(res[0]?.date).toBeInstanceOf(Date); expect(res[0]?.datetime).toBeInstanceOf(Date); expect(typeof res[0]?.dateAsString).toBe('string'); expect(typeof res[0]?.datetimeAsString).toBe('string'); expect(res).toEqual([{ date: toLocalDate(new Date('2022-11-11')), dateAsString: '2022-11-11', time: '12:12:12', datetime: new Date('2022-11-11'), year: 2022, datetimeAsString: '2022-11-11 12:12:12', timestamp: new Date('2022-11-11 12:12:12.123'), timestampAsString: '2022-11-11 12:12:12.123', }]); await db.execute(sql`drop table if exists \`datestable\``); }); const tableWithEnums = mysqlTable('enums_test_case', { id: serial('id').primaryKey(), enum1: mysqlEnum('enum1', ['a', 'b', 'c']).notNull(), enum2: mysqlEnum('enum2', ['a', 'b', 'c']).default('a'), enum3: mysqlEnum('enum3', ['a', 'b', 'c']).notNull().default('b'), }); test('Mysql enum as ts enum', async (ctx) => { enum Test { a = 'a', b = 'b', c = 'c', } const tableWithTsEnums = mysqlTable('enums_test_case', { id: serial('id').primaryKey(), enum1: mysqlEnum('enum1', Test).notNull(), enum2: mysqlEnum('enum2', Test).default(Test.a), enum3: mysqlEnum('enum3', Test).notNull().default(Test.b), }); const { db } = ctx.mysql; await db.execute(sql`drop table if exists \`enums_test_case\``); await db.execute(sql` create table \`enums_test_case\` ( \`id\` serial primary key, \`enum1\` ENUM('a', 'b', 'c') not null, \`enum2\` ENUM('a', 'b', 'c') default 'a', \`enum3\` ENUM('a', 'b', 'c') not null default 'b' ) `); await db.insert(tableWithTsEnums).values([ { id: 1, enum1: Test.a, enum2: Test.b, enum3: Test.c }, { id: 2, enum1: Test.a, enum3: Test.c }, { id: 3, enum1: Test.a }, ]); const res = await db.select().from(tableWithTsEnums); await db.execute(sql`drop table \`enums_test_case\``); expect(res).toEqual([ { id: 1, enum1: 'a', enum2: 'b', enum3: 'c' }, { id: 2, enum1: 'a', enum2: 'a', enum3: 'c' }, { id: 3, enum1: 'a', enum2: 'a', enum3: 'b' }, ]); }); test('Mysql enum test case #1', async (ctx) => { const { db } = ctx.mysql; await db.execute(sql`drop table if exists \`enums_test_case\``); await db.execute(sql` create table \`enums_test_case\` ( \`id\` serial primary key, \`enum1\` ENUM('a', 'b', 'c') not null, \`enum2\` ENUM('a', 'b', 'c') default 'a', \`enum3\` ENUM('a', 'b', 'c') not null default 'b' ) `); await db.insert(tableWithEnums).values([ { id: 1, enum1: 'a', enum2: 'b', enum3: 'c' }, { id: 2, enum1: 'a', enum3: 'c' }, { id: 3, enum1: 'a' }, ]); const res = await db.select().from(tableWithEnums); await db.execute(sql`drop table \`enums_test_case\``); expect(res).toEqual([ { id: 1, enum1: 'a', enum2: 'b', enum3: 'c' }, { id: 2, enum1: 'a', enum2: 'a', enum3: 'c' }, { id: 3, enum1: 'a', enum2: 'a', enum3: 'b' }, ]); }); test('left join (flat object fields)', async (ctx) => { const { db } = ctx.mysql; await db.insert(citiesTable) .values([{ name: 'Paris' }, { name: 'London' }]); await db.insert(users2Table).values([{ name: 'John', cityId: 1 }, { name: 'Jane' }]); const res = await db.select({ userId: users2Table.id, userName: users2Table.name, cityId: citiesTable.id, cityName: citiesTable.name, }).from(users2Table) .leftJoin(citiesTable, eq(users2Table.cityId, citiesTable.id)); expect(res).toEqual([ { userId: 1, userName: 'John', cityId: 1, cityName: 'Paris' }, { userId: 2, userName: 'Jane', cityId: null, cityName: null }, ]); }); test('left join (grouped fields)', async (ctx) => { const { db } = ctx.mysql; await db.insert(citiesTable) .values([{ name: 'Paris' }, { name: 'London' }]); await db.insert(users2Table).values([{ name: 'John', cityId: 1 }, { name: 'Jane' }]); const res = await db.select({ id: users2Table.id, user: { name: users2Table.name, nameUpper: sql`upper(${users2Table.name})`, }, city: { id: citiesTable.id, name: citiesTable.name, nameUpper: sql`upper(${citiesTable.name})`, }, }).from(users2Table) .leftJoin(citiesTable, eq(users2Table.cityId, citiesTable.id)); expect(res).toEqual([ { id: 1, user: { name: 'John', nameUpper: 'JOHN' }, city: { id: 1, name: 'Paris', nameUpper: 'PARIS' }, }, { id: 2, user: { name: 'Jane', nameUpper: 'JANE' }, city: null, }, ]); }); test('left join (all fields)', async (ctx) => { const { db } = ctx.mysql; await db.insert(citiesTable) .values([{ name: 'Paris' }, { name: 'London' }]); await db.insert(users2Table).values([{ name: 'John', cityId: 1 }, { name: 'Jane' }]); const res = await db.select().from(users2Table) .leftJoin(citiesTable, eq(users2Table.cityId, citiesTable.id)); expect(res).toEqual([ { users2: { id: 1, name: 'John', cityId: 1, }, cities: { id: 1, name: 'Paris', }, }, { users2: { id: 2, name: 'Jane', cityId: null, }, cities: null, }, ]); }); test('select from a many subquery', async (ctx) => { const { db } = ctx.mysql; await db.insert(citiesTable) .values([{ name: 'Paris' }, { name: 'London' }]); await db.insert(users2Table).values([ { name: 'John', cityId: 1 }, { name: 'Jane', cityId: 2 }, { name: 'Jack', cityId: 2 }, ]); const res = await db.select({ population: db.select({ count: count().as('count') }).from(users2Table).where( eq(users2Table.cityId, citiesTable.id), ).as( 'population', ), name: citiesTable.name, }).from(citiesTable); expectTypeOf(res).toEqualTypeOf< { population: number; name: string; }[] >(); expect(res).toStrictEqual([{ population: 1, name: 'Paris', }, { population: 2, name: 'London', }]); }); test('select from a one subquery', async (ctx) => { const { db } = ctx.mysql; await db.insert(citiesTable) .values([{ name: 'Paris' }, { name: 'London' }]); await db.insert(users2Table).values([ { name: 'John', cityId: 1 }, { name: 'Jane', cityId: 2 }, { name: 'Jack', cityId: 2 }, ]); const res = await db.select({ cityName: db.select({ name: citiesTable.name }).from(citiesTable).where(eq(users2Table.cityId, citiesTable.id)) .as( 'cityName', ), name: users2Table.name, }).from(users2Table); expectTypeOf(res).toEqualTypeOf< { cityName: string; name: string; }[] >(); expect(res).toStrictEqual([{ cityName: 'Paris', name: 'John', }, { cityName: 'London', name: 'Jane', }, { cityName: 'London', name: 'Jack', }]); }); test('join subquery', async (ctx) => { const { db } = ctx.mysql; await db.execute(sql`drop table if exists \`courses\``); await db.execute(sql`drop table if exists \`course_categories\``); await db.execute( sql` create table \`course_categories\` ( \`id\` serial primary key, \`name\` text not null ) `, ); await db.execute( sql` create table \`courses\` ( \`id\` serial primary key, \`name\` text not null, \`category_id\` int references \`course_categories\`(\`id\`) ) `, ); await db.insert(courseCategoriesTable).values([ { name: 'Category 1' }, { name: 'Category 2' }, { name: 'Category 3' }, { name: 'Category 4' }, ]); await db.insert(coursesTable).values([ { name: 'Development', categoryId: 2 }, { name: 'IT & Software', categoryId: 3 }, { name: 'Marketing', categoryId: 4 }, { name: 'Design', categoryId: 1 }, ]); const sq2 = db .select({ categoryId: courseCategoriesTable.id, category: courseCategoriesTable.name, total: sql`count(${courseCategoriesTable.id})`, }) .from(courseCategoriesTable) .groupBy(courseCategoriesTable.id, courseCategoriesTable.name) .as('sq2'); const res = await db .select({ courseName: coursesTable.name, categoryId: sq2.categoryId, }) .from(coursesTable) .leftJoin(sq2, eq(coursesTable.categoryId, sq2.categoryId)) .orderBy(coursesTable.name); expect(res).toEqual([ { courseName: 'Design', categoryId: 1 }, { courseName: 'Development', categoryId: 2 }, { courseName: 'IT & Software', categoryId: 3 }, { courseName: 'Marketing', categoryId: 4 }, ]); await db.execute(sql`drop table if exists \`courses\``); await db.execute(sql`drop table if exists \`course_categories\``); }); test('with ... select', async (ctx) => { const { db } = ctx.mysql; await db.execute(sql`drop table if exists \`orders\``); await db.execute( sql` create table \`orders\` ( \`id\` serial primary key, \`region\` text not null, \`product\` text not null, \`amount\` int not null, \`quantity\` int not null ) `, ); await db.insert(orders).values([ { region: 'Europe', product: 'A', amount: 10, quantity: 1 }, { region: 'Europe', product: 'A', amount: 20, quantity: 2 }, { region: 'Europe', product: 'B', amount: 20, quantity: 2 }, { region: 'Europe', product: 'B', amount: 30, quantity: 3 }, { region: 'US', product: 'A', amount: 30, quantity: 3 }, { region: 'US', product: 'A', amount: 40, quantity: 4 }, { region: 'US', product: 'B', amount: 40, quantity: 4 }, { region: 'US', product: 'B', amount: 50, quantity: 5 }, ]); const regionalSales = db .$with('regional_sales') .as( db .select({ region: orders.region, totalSales: sql`sum(${orders.amount})`.as('total_sales'), }) .from(orders) .groupBy(orders.region), ); const topRegions = db .$with('top_regions') .as( db .select({ region: regionalSales.region, }) .from(regionalSales) .where( gt( regionalSales.totalSales, db.select({ sales: sql`sum(${regionalSales.totalSales})/10` }).from(regionalSales), ), ), ); const result = await db .with(regionalSales, topRegions) .select({ region: orders.region, product: orders.product, productUnits: sql`cast(sum(${orders.quantity}) as unsigned)`, productSales: sql`cast(sum(${orders.amount}) as unsigned)`, }) .from(orders) .where(inArray(orders.region, db.select({ region: topRegions.region }).from(topRegions))) .groupBy(orders.region, orders.product) .orderBy(orders.region, orders.product); expect(result).toEqual([ { region: 'Europe', product: 'A', productUnits: 3, productSales: 30, }, { region: 'Europe', product: 'B', productUnits: 5, productSales: 50, }, { region: 'US', product: 'A', productUnits: 7, productSales: 70, }, { region: 'US', product: 'B', productUnits: 9, productSales: 90, }, ]); }); test('with ... update', async (ctx) => { const { db } = ctx.mysql; const products = mysqlTable('products', { id: serial('id').primaryKey(), price: decimal('price', { precision: 15, scale: 2, }).notNull(), cheap: boolean('cheap').notNull().default(false), }); await db.execute(sql`drop table if exists ${products}`); await db.execute(sql` create table ${products} ( id serial primary key, price decimal(15, 2) not null, cheap boolean not null default false ) `); await db.insert(products).values([ { price: '10.99' }, { price: '25.85' }, { price: '32.99' }, { price: '2.50' }, { price: '4.59' }, ]); const averagePrice = db .$with('average_price') .as( db .select({ value: sql`avg(${products.price})`.as('value'), }) .from(products), ); await db .with(averagePrice) .update(products) .set({ cheap: true, }) .where(lt(products.price, sql`(select * from ${averagePrice})`)); const result = await db .select({ id: products.id, }) .from(products) .where(eq(products.cheap, true)); expect(result).toEqual([ { id: 1 }, { id: 4 }, { id: 5 }, ]); }); test('with ... delete', async (ctx) => { const { db } = ctx.mysql; await db.execute(sql`drop table if exists \`orders\``); await db.execute( sql` create table \`orders\` ( \`id\` serial primary key, \`region\` text not null, \`product\` text not null, \`amount\` int not null, \`quantity\` int not null ) `, ); await db.insert(orders).values([ { region: 'Europe', product: 'A', amount: 10, quantity: 1 }, { region: 'Europe', product: 'A', amount: 20, quantity: 2 }, { region: 'Europe', product: 'B', amount: 20, quantity: 2 }, { region: 'Europe', product: 'B', amount: 30, quantity: 3 }, { region: 'US', product: 'A', amount: 30, quantity: 3 }, { region: 'US', product: 'A', amount: 40, quantity: 4 }, { region: 'US', product: 'B', amount: 40, quantity: 4 }, { region: 'US', product: 'B', amount: 50, quantity: 5 }, ]); const averageAmount = db .$with('average_amount') .as( db .select({ value: sql`avg(${orders.amount})`.as('value'), }) .from(orders), ); await db .with(averageAmount) .delete(orders) .where(gt(orders.amount, sql`(select * from ${averageAmount})`)); const result = await db .select({ id: orders.id, }) .from(orders); expect(result).toEqual([ { id: 1 }, { id: 2 }, { id: 3 }, { id: 4 }, { id: 5 }, ]); }); test('select from subquery sql', async (ctx) => { const { db } = ctx.mysql; await db.insert(users2Table).values([{ name: 'John' }, { name: 'Jane' }]); const sq = db .select({ name: sql`concat(${users2Table.name}, " modified")`.as('name') }) .from(users2Table) .as('sq'); const res = await db.select({ name: sq.name }).from(sq); expect(res).toEqual([{ name: 'John modified' }, { name: 'Jane modified' }]); }); test('select a field without joining its table', (ctx) => { const { db } = ctx.mysql; expect(() => db.select({ name: users2Table.name }).from(usersTable).prepare()).toThrowError(); }); test('select all fields from subquery without alias', (ctx) => { const { db } = ctx.mysql; const sq = db.$with('sq').as(db.select({ name: sql`upper(${users2Table.name})` }).from(users2Table)); expect(() => db.select().from(sq).prepare()).toThrowError(); }); test('select count()', async (ctx) => { const { db } = ctx.mysql; await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }]); const res = await db.select({ count: sql`count(*)` }).from(usersTable); expect(res).toEqual([{ count: 2 }]); }); test('select for ...', (ctx) => { const { db } = ctx.mysql; { const query = db.select().from(users2Table).for('update').toSQL(); expect(query.sql).toMatch(/ for update$/); } { const query = db.select().from(users2Table).for('share', { skipLocked: true }).toSQL(); expect(query.sql).toMatch(/ for share skip locked$/); } { const query = db.select().from(users2Table).for('update', { noWait: true }).toSQL(); expect(query.sql).toMatch(/ for update nowait$/); } }); test('having', async (ctx) => { const { db } = ctx.mysql; await db.insert(citiesTable).values([{ name: 'London' }, { name: 'Paris' }, { name: 'New York' }]); await db.insert(users2Table).values([{ name: 'John', cityId: 1 }, { name: 'Jane', cityId: 1 }, { name: 'Jack', cityId: 2, }]); const result = await db .select({ id: citiesTable.id, name: sql`upper(${citiesTable.name})`.as('upper_name'), usersCount: sql`count(${users2Table.id})`.as('users_count'), }) .from(citiesTable) .leftJoin(users2Table, eq(users2Table.cityId, citiesTable.id)) .where(({ name }) => sql`length(${name}) >= 3`) .groupBy(citiesTable.id) .having(({ usersCount }) => sql`${usersCount} > 0`) .orderBy(({ name }) => name); expect(result).toEqual([ { id: 1, name: 'LONDON', usersCount: 2, }, { id: 2, name: 'PARIS', usersCount: 1, }, ]); }); test('view', async (ctx) => { const { db } = ctx.mysql; const newYorkers1 = mysqlView('new_yorkers') .as((qb) => qb.select().from(users2Table).where(eq(users2Table.cityId, 1))); const newYorkers2 = mysqlView('new_yorkers', { id: serial('id').primaryKey(), name: text('name').notNull(), cityId: int('city_id').notNull(), }).as(sql`select * from ${users2Table} where ${eq(users2Table.cityId, 1)}`); const newYorkers3 = mysqlView('new_yorkers', { id: serial('id').primaryKey(), name: text('name').notNull(), cityId: int('city_id').notNull(), }).existing(); await db.execute(sql`create view new_yorkers as ${getViewConfig(newYorkers1).query}`); await db.insert(citiesTable).values([{ name: 'New York' }, { name: 'Paris' }]); await db.insert(users2Table).values([ { name: 'John', cityId: 1 }, { name: 'Jane', cityId: 1 }, { name: 'Jack', cityId: 2 }, ]); { const result = await db.select().from(newYorkers1); expect(result).toEqual([ { id: 1, name: 'John', cityId: 1 }, { id: 2, name: 'Jane', cityId: 1 }, ]); } { const result = await db.select().from(newYorkers2); expect(result).toEqual([ { id: 1, name: 'John', cityId: 1 }, { id: 2, name: 'Jane', cityId: 1 }, ]); } { const result = await db.select().from(newYorkers3); expect(result).toEqual([ { id: 1, name: 'John', cityId: 1 }, { id: 2, name: 'Jane', cityId: 1 }, ]); } { const result = await db.select({ name: newYorkers1.name }).from(newYorkers1); expect(result).toEqual([ { name: 'John' }, { name: 'Jane' }, ]); } await db.execute(sql`drop view ${newYorkers1}`); }); test('select from raw sql', async (ctx) => { const { db } = ctx.mysql; const result = await db.select({ id: sql`id`, name: sql`name`, }).from(sql`(select 1 as id, 'John' as name) as users`); Expect>; expect(result).toEqual([ { id: 1, name: 'John' }, ]); }); test('select from raw sql with joins', async (ctx) => { const { db } = ctx.mysql; const result = await db .select({ id: sql`users.id`, name: sql`users.name`, userCity: sql`users.city`, cityName: sql`cities.name`, }) .from(sql`(select 1 as id, 'John' as name, 'New York' as city) as users`) .leftJoin(sql`(select 1 as id, 'Paris' as name) as cities`, sql`cities.id = users.id`); Expect>; expect(result).toEqual([ { id: 1, name: 'John', userCity: 'New York', cityName: 'Paris' }, ]); }); test('join on aliased sql from select', async (ctx) => { const { db } = ctx.mysql; const result = await db .select({ userId: sql`users.id`.as('userId'), name: sql`users.name`, userCity: sql`users.city`, cityId: sql`cities.id`.as('cityId'), cityName: sql`cities.name`, }) .from(sql`(select 1 as id, 'John' as name, 'New York' as city) as users`) .leftJoin(sql`(select 1 as id, 'Paris' as name) as cities`, (cols) => eq(cols.cityId, cols.userId)); Expect< Equal<{ userId: number; name: string; userCity: string; cityId: number; cityName: string }[], typeof result> >; expect(result).toEqual([ { userId: 1, name: 'John', userCity: 'New York', cityId: 1, cityName: 'Paris' }, ]); }); test('join on aliased sql from with clause', async (ctx) => { const { db } = ctx.mysql; const users = db.$with('users').as( db.select({ id: sql`id`.as('userId'), name: sql`name`.as('userName'), city: sql`city`.as('city'), }).from( sql`(select 1 as id, 'John' as name, 'New York' as city) as users`, ), ); const cities = db.$with('cities').as( db.select({ id: sql`id`.as('cityId'), name: sql`name`.as('cityName'), }).from( sql`(select 1 as id, 'Paris' as name) as cities`, ), ); const result = await db .with(users, cities) .select({ userId: users.id, name: users.name, userCity: users.city, cityId: cities.id, cityName: cities.name, }) .from(users) .leftJoin(cities, (cols) => eq(cols.cityId, cols.userId)); Expect< Equal<{ userId: number; name: string; userCity: string; cityId: number; cityName: string }[], typeof result> >; expect(result).toEqual([ { userId: 1, name: 'John', userCity: 'New York', cityId: 1, cityName: 'Paris' }, ]); }); test('prefixed table', async (ctx) => { const { db } = ctx.mysql; const mysqlTable = mysqlTableCreator((name) => `myprefix_${name}`); const users = mysqlTable('test_prefixed_table_with_unique_name', { id: int('id').primaryKey(), name: text('name').notNull(), }); await db.execute(sql`drop table if exists ${users}`); await db.execute( sql`create table myprefix_test_prefixed_table_with_unique_name (id int not null primary key, name text not null)`, ); await db.insert(users).values({ id: 1, name: 'John' }); const result = await db.select().from(users); expect(result).toEqual([{ id: 1, name: 'John' }]); await db.execute(sql`drop table ${users}`); }); test('orderBy with aliased column', (ctx) => { const { db } = ctx.mysql; const query = db.select({ test: sql`something`.as('test'), }).from(users2Table).orderBy((fields) => fields.test).toSQL(); expect(query.sql).toBe('select something as `test` from `users2` order by `test`'); }); test('timestamp timezone', async (ctx) => { const { db } = ctx.mysql; const date = new Date(Date.parse('2020-01-01T12:34:56+07:00')); await db.insert(usersTable).values({ name: 'With default times' }); await db.insert(usersTable).values({ name: 'Without default times', createdAt: date, }); const users = await db.select().from(usersTable); // check that the timestamps are set correctly for default times expect(Math.abs(users[0]!.createdAt.getTime() - Date.now())).toBeLessThan(2000); // check that the timestamps are set correctly for non default times expect(Math.abs(users[1]!.createdAt.getTime() - date.getTime())).toBeLessThan(2000); }); test('transaction', async (ctx) => { const { db } = ctx.mysql; const users = mysqlTable('users_transactions', { id: serial('id').primaryKey(), balance: int('balance').notNull(), }); const products = mysqlTable('products_transactions', { id: serial('id').primaryKey(), price: int('price').notNull(), stock: int('stock').notNull(), }); await db.execute(sql`drop table if exists ${users}`); await db.execute(sql`drop table if exists ${products}`); await db.execute(sql`create table users_transactions (id serial not null primary key, balance int not null)`); await db.execute( sql`create table products_transactions (id serial not null primary key, price int not null, stock int not null)`, ); const [{ insertId: userId }] = await db.insert(users).values({ balance: 100 }); const user = await db.select().from(users).where(eq(users.id, userId)).then((rows) => rows[0]!); const [{ insertId: productId }] = await db.insert(products).values({ price: 10, stock: 10 }); const product = await db.select().from(products).where(eq(products.id, productId)).then((rows) => rows[0]!); await db.transaction(async (tx) => { await tx.update(users).set({ balance: user.balance - product.price }).where(eq(users.id, user.id)); await tx.update(products).set({ stock: product.stock - 1 }).where(eq(products.id, product.id)); }); const result = await db.select().from(users); expect(result).toEqual([{ id: 1, balance: 90 }]); await db.execute(sql`drop table ${users}`); await db.execute(sql`drop table ${products}`); }); test('transaction with options (set isolationLevel)', async (ctx) => { const { db } = ctx.mysql; const users = mysqlTable('users_transactions', { id: serial('id').primaryKey(), balance: int('balance').notNull(), }); const products = mysqlTable('products_transactions', { id: serial('id').primaryKey(), price: int('price').notNull(), stock: int('stock').notNull(), }); await db.execute(sql`drop table if exists ${users}`); await db.execute(sql`drop table if exists ${products}`); await db.execute(sql`create table users_transactions (id serial not null primary key, balance int not null)`); await db.execute( sql`create table products_transactions (id serial not null primary key, price int not null, stock int not null)`, ); const [{ insertId: userId }] = await db.insert(users).values({ balance: 100 }); const user = await db.select().from(users).where(eq(users.id, userId)).then((rows) => rows[0]!); const [{ insertId: productId }] = await db.insert(products).values({ price: 10, stock: 10 }); const product = await db.select().from(products).where(eq(products.id, productId)).then((rows) => rows[0]!); await db.transaction(async (tx) => { await tx.update(users).set({ balance: user.balance - product.price }).where(eq(users.id, user.id)); await tx.update(products).set({ stock: product.stock - 1 }).where(eq(products.id, product.id)); }, { isolationLevel: 'serializable' }); const result = await db.select().from(users); expect(result).toEqual([{ id: 1, balance: 90 }]); await db.execute(sql`drop table ${users}`); await db.execute(sql`drop table ${products}`); }); test('transaction rollback', async (ctx) => { const { db } = ctx.mysql; const users = mysqlTable('users_transactions_rollback', { id: serial('id').primaryKey(), balance: int('balance').notNull(), }); await db.execute(sql`drop table if exists ${users}`); await db.execute( sql`create table users_transactions_rollback (id serial not null primary key, balance int not null)`, ); await expect((async () => { await db.transaction(async (tx) => { await tx.insert(users).values({ balance: 100 }); tx.rollback(); }); })()).rejects.toThrowError(TransactionRollbackError); const result = await db.select().from(users); expect(result).toEqual([]); await db.execute(sql`drop table ${users}`); }); test('nested transaction', async (ctx) => { const { db } = ctx.mysql; const users = mysqlTable('users_nested_transactions', { id: serial('id').primaryKey(), balance: int('balance').notNull(), }); await db.execute(sql`drop table if exists ${users}`); await db.execute( sql`create table users_nested_transactions (id serial not null primary key, balance int not null)`, ); await db.transaction(async (tx) => { await tx.insert(users).values({ balance: 100 }); await tx.transaction(async (tx) => { await tx.update(users).set({ balance: 200 }); }); }); const result = await db.select().from(users); expect(result).toEqual([{ id: 1, balance: 200 }]); await db.execute(sql`drop table ${users}`); }); test('nested transaction rollback', async (ctx) => { const { db } = ctx.mysql; const users = mysqlTable('users_nested_transactions_rollback', { id: serial('id').primaryKey(), balance: int('balance').notNull(), }); await db.execute(sql`drop table if exists ${users}`); await db.execute( sql`create table users_nested_transactions_rollback (id serial not null primary key, balance int not null)`, ); await db.transaction(async (tx) => { await tx.insert(users).values({ balance: 100 }); await expect((async () => { await tx.transaction(async (tx) => { await tx.update(users).set({ balance: 200 }); tx.rollback(); }); })()).rejects.toThrowError(TransactionRollbackError); }); const result = await db.select().from(users); expect(result).toEqual([{ id: 1, balance: 100 }]); await db.execute(sql`drop table ${users}`); }); test('join subquery with join', async (ctx) => { const { db } = ctx.mysql; const internalStaff = mysqlTable('internal_staff', { userId: int('user_id').notNull(), }); const customUser = mysqlTable('custom_user', { id: int('id').notNull(), }); const ticket = mysqlTable('ticket', { staffId: int('staff_id').notNull(), }); await db.execute(sql`drop table if exists ${internalStaff}`); await db.execute(sql`drop table if exists ${customUser}`); await db.execute(sql`drop table if exists ${ticket}`); await db.execute(sql`create table internal_staff (user_id integer not null)`); await db.execute(sql`create table custom_user (id integer not null)`); await db.execute(sql`create table ticket (staff_id integer not null)`); await db.insert(internalStaff).values({ userId: 1 }); await db.insert(customUser).values({ id: 1 }); await db.insert(ticket).values({ staffId: 1 }); const subq = db .select() .from(internalStaff) .leftJoin(customUser, eq(internalStaff.userId, customUser.id)) .as('internal_staff'); const mainQuery = await db .select() .from(ticket) .leftJoin(subq, eq(subq.internal_staff.userId, ticket.staffId)); expect(mainQuery).toEqual([{ ticket: { staffId: 1 }, internal_staff: { internal_staff: { userId: 1 }, custom_user: { id: 1 }, }, }]); await db.execute(sql`drop table ${internalStaff}`); await db.execute(sql`drop table ${customUser}`); await db.execute(sql`drop table ${ticket}`); }); test('subquery with view', async (ctx) => { const { db } = ctx.mysql; const users = mysqlTable('users_subquery_view', { id: serial('id').primaryKey(), name: text('name').notNull(), cityId: int('city_id').notNull(), }); const newYorkers = mysqlView('new_yorkers').as((qb) => qb.select().from(users).where(eq(users.cityId, 1))); await db.execute(sql`drop table if exists ${users}`); await db.execute(sql`drop view if exists ${newYorkers}`); await db.execute( sql`create table ${users} (id serial not null primary key, name text not null, city_id integer not null)`, ); await db.execute(sql`create view ${newYorkers} as select * from ${users} where city_id = 1`); await db.insert(users).values([ { name: 'John', cityId: 1 }, { name: 'Jane', cityId: 2 }, { name: 'Jack', cityId: 1 }, { name: 'Jill', cityId: 2 }, ]); const sq = db.$with('sq').as(db.select().from(newYorkers)); const result = await db.with(sq).select().from(sq); expect(result).toEqual([ { id: 1, name: 'John', cityId: 1 }, { id: 3, name: 'Jack', cityId: 1 }, ]); await db.execute(sql`drop view ${newYorkers}`); await db.execute(sql`drop table ${users}`); }); test('join view as subquery', async (ctx) => { const { db } = ctx.mysql; const users = mysqlTable('users_join_view', { id: serial('id').primaryKey(), name: text('name').notNull(), cityId: int('city_id').notNull(), }); const newYorkers = mysqlView('new_yorkers').as((qb) => qb.select().from(users).where(eq(users.cityId, 1))); await db.execute(sql`drop table if exists ${users}`); await db.execute(sql`drop view if exists ${newYorkers}`); await db.execute( sql`create table ${users} (id serial not null primary key, name text not null, city_id integer not null)`, ); await db.execute(sql`create view ${newYorkers} as select * from ${users} where city_id = 1`); await db.insert(users).values([ { name: 'John', cityId: 1 }, { name: 'Jane', cityId: 2 }, { name: 'Jack', cityId: 1 }, { name: 'Jill', cityId: 2 }, ]); const sq = db.select().from(newYorkers).as('new_yorkers_sq'); const result = await db.select().from(users).leftJoin(sq, eq(users.id, sq.id)); expect(result).toEqual([ { users_join_view: { id: 1, name: 'John', cityId: 1 }, new_yorkers_sq: { id: 1, name: 'John', cityId: 1 }, }, { users_join_view: { id: 2, name: 'Jane', cityId: 2 }, new_yorkers_sq: null, }, { users_join_view: { id: 3, name: 'Jack', cityId: 1 }, new_yorkers_sq: { id: 3, name: 'Jack', cityId: 1 }, }, { users_join_view: { id: 4, name: 'Jill', cityId: 2 }, new_yorkers_sq: null, }, ]); await db.execute(sql`drop view ${newYorkers}`); await db.execute(sql`drop table ${users}`); }); test('select iterator', async (ctx) => { const { db } = ctx.mysql; const users = mysqlTable('users_iterator', { id: serial('id').primaryKey(), }); await db.execute(sql`drop table if exists ${users}`); await db.execute(sql`create table ${users} (id serial not null primary key)`); await db.insert(users).values([{}, {}, {}]); const iter = db.select().from(users).iterator(); const result: typeof users.$inferSelect[] = []; for await (const row of iter) { result.push(row); } expect(result).toEqual([{ id: 1 }, { id: 2 }, { id: 3 }]); }); test('select iterator w/ prepared statement', async (ctx) => { const { db } = ctx.mysql; const users = mysqlTable('users_iterator', { id: serial('id').primaryKey(), }); await db.execute(sql`drop table if exists ${users}`); await db.execute(sql`create table ${users} (id serial not null primary key)`); await db.insert(users).values([{}, {}, {}]); const prepared = db.select().from(users).prepare(); const iter = prepared.iterator(); const result: typeof users.$inferSelect[] = []; for await (const row of iter) { result.push(row); } expect(result).toEqual([{ id: 1 }, { id: 2 }, { id: 3 }]); }); test('insert undefined', async (ctx) => { const { db } = ctx.mysql; const users = mysqlTable('users', { id: serial('id').primaryKey(), name: text('name'), }); await db.execute(sql`drop table if exists ${users}`); await db.execute( sql`create table ${users} (id serial not null primary key, name text)`, ); await expect((async () => { await db.insert(users).values({ name: undefined }); })()).resolves.not.toThrowError(); await db.execute(sql`drop table ${users}`); }); test('update undefined', async (ctx) => { const { db } = ctx.mysql; const users = mysqlTable('users', { id: serial('id').primaryKey(), name: text('name'), }); await db.execute(sql`drop table if exists ${users}`); await db.execute( sql`create table ${users} (id serial not null primary key, name text)`, ); await expect((async () => { await db.update(users).set({ name: undefined }); })()).rejects.toThrowError(); await expect((async () => { await db.update(users).set({ id: 1, name: undefined }); })()).resolves.not.toThrowError(); await db.execute(sql`drop table ${users}`); }); test('utc config for datetime', async (ctx) => { const { db } = ctx.mysql; await db.execute(sql`drop table if exists \`datestable\``); await db.execute( sql` create table \`datestable\` ( \`datetime_utc\` datetime(3), \`datetime\` datetime(3), \`datetime_as_string\` datetime ) `, ); const datesTable = mysqlTable('datestable', { datetimeUTC: datetime('datetime_utc', { fsp: 3, mode: 'date' }), datetime: datetime('datetime', { fsp: 3 }), datetimeAsString: datetime('datetime_as_string', { mode: 'string' }), }); const dateObj = new Date('2022-11-11'); const dateUtc = new Date('2022-11-11T12:12:12.122Z'); await db.insert(datesTable).values({ datetimeUTC: dateUtc, datetime: dateObj, datetimeAsString: '2022-11-11 12:12:12', }); const res = await db.select().from(datesTable); const [rawSelect] = await db.execute(sql`select \`datetime_utc\` from \`datestable\``); const selectedRow = (rawSelect as unknown as [{ datetime_utc: string }])[0]; expect(selectedRow.datetime_utc).toBe('2022-11-11 12:12:12.122'); expect(new Date(selectedRow.datetime_utc.replace(' ', 'T') + 'Z')).toEqual(dateUtc); expect(res[0]?.datetime).toBeInstanceOf(Date); expect(res[0]?.datetimeUTC).toBeInstanceOf(Date); expect(typeof res[0]?.datetimeAsString).toBe('string'); expect(res).toEqual([{ datetimeUTC: dateUtc, datetime: new Date('2022-11-11'), datetimeAsString: '2022-11-11 12:12:12', }]); await db.execute(sql`drop table if exists \`datestable\``); }); test('set operations (union) from query builder with subquery', async (ctx) => { const { db } = ctx.mysql; await setupSetOperationTest(db); const sq = db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).as('sq'); const result = await db .select({ id: citiesTable.id, name: citiesTable.name }) .from(citiesTable).union( db.select().from(sq), ).limit(8); expect(result).toHaveLength(8); expect(result).toEqual([ { id: 1, name: 'New York' }, { id: 2, name: 'London' }, { id: 3, name: 'Tampa' }, { id: 1, name: 'John' }, { id: 2, name: 'Jane' }, { id: 3, name: 'Jack' }, { id: 4, name: 'Peter' }, { id: 5, name: 'Ben' }, ]); // union should throw if selected fields are not in the same order await expect((async () => { db .select({ id: citiesTable.id, name: citiesTable.name }) .from(citiesTable).union( db .select({ name: users2Table.name, id: users2Table.id }) .from(users2Table), ); })()).rejects.toThrowError(); }); test('set operations (union) as function', async (ctx) => { const { db } = ctx.mysql; await setupSetOperationTest(db); const result = await union( db .select({ id: citiesTable.id, name: citiesTable.name }) .from(citiesTable).where(eq(citiesTable.id, 1)), db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id, 1)), db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id, 1)), ); expect(result).toHaveLength(2); expect(result).toEqual([ { id: 1, name: 'New York' }, { id: 1, name: 'John' }, ]); await expect((async () => { union( db .select({ id: citiesTable.id, name: citiesTable.name }) .from(citiesTable).where(eq(citiesTable.id, 1)), db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id, 1)), db .select({ name: users2Table.name, id: users2Table.id }) .from(users2Table).where(eq(users2Table.id, 1)), ); })()).rejects.toThrowError(); }); test('set operations (union all) from query builder', async (ctx) => { const { db } = ctx.mysql; await setupSetOperationTest(db); const result = await db .select({ id: citiesTable.id, name: citiesTable.name }) .from(citiesTable).limit(2).unionAll( db .select({ id: citiesTable.id, name: citiesTable.name }) .from(citiesTable).limit(2), ).orderBy(asc(sql`id`)).limit(3); expect(result).toHaveLength(3); expect(result).toEqual([ { id: 1, name: 'New York' }, { id: 1, name: 'New York' }, { id: 2, name: 'London' }, ]); await expect((async () => { db .select({ id: citiesTable.id, name: citiesTable.name }) .from(citiesTable).limit(2).unionAll( db .select({ name: citiesTable.name, id: citiesTable.id }) .from(citiesTable).limit(2), ).orderBy(asc(sql`id`)); })()).rejects.toThrowError(); }); test('set operations (union all) as function', async (ctx) => { const { db } = ctx.mysql; await setupSetOperationTest(db); const result = await unionAll( db .select({ id: citiesTable.id, name: citiesTable.name }) .from(citiesTable).where(eq(citiesTable.id, 1)), db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id, 1)), db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id, 1)), ).limit(1); expect(result).toHaveLength(1); expect(result).toEqual([ { id: 1, name: 'New York' }, ]); await expect((async () => { unionAll( db .select({ id: citiesTable.id, name: citiesTable.name }) .from(citiesTable).where(eq(citiesTable.id, 1)), db .select({ name: users2Table.name, id: users2Table.id }) .from(users2Table).where(eq(users2Table.id, 1)), db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id, 1)), ).limit(1); })()).rejects.toThrowError(); }); test('set operations (intersect) from query builder', async (ctx) => { const { db } = ctx.mysql; await setupSetOperationTest(db); const result = await db .select({ id: citiesTable.id, name: citiesTable.name }) .from(citiesTable).intersect( db .select({ id: citiesTable.id, name: citiesTable.name }) .from(citiesTable).where(gt(citiesTable.id, 1)), ); expect(result).toHaveLength(2); expect(result).toEqual([ { id: 2, name: 'London' }, { id: 3, name: 'Tampa' }, ]); await expect((async () => { db .select({ name: citiesTable.name, id: citiesTable.id }) .from(citiesTable).intersect( db .select({ id: citiesTable.id, name: citiesTable.name }) .from(citiesTable).where(gt(citiesTable.id, 1)), ); })()).rejects.toThrowError(); }); test('set operations (intersect) as function', async (ctx) => { const { db } = ctx.mysql; await setupSetOperationTest(db); const result = await intersect( db .select({ id: citiesTable.id, name: citiesTable.name }) .from(citiesTable).where(eq(citiesTable.id, 1)), db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id, 1)), db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id, 1)), ).limit(1); expect(result).toHaveLength(0); expect(result).toEqual([]); await expect((async () => { intersect( db .select({ id: citiesTable.id, name: citiesTable.name }) .from(citiesTable).where(eq(citiesTable.id, 1)), db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id, 1)), db .select({ name: users2Table.name, id: users2Table.id }) .from(users2Table).where(eq(users2Table.id, 1)), ).limit(1); })()).rejects.toThrowError(); }); test('set operations (intersect all) from query builder', async (ctx) => { const { db } = ctx.mysql; await setupSetOperationTest(db); const result = await db .select({ id: citiesTable.id, name: citiesTable.name }) .from(citiesTable).limit(2).intersectAll( db .select({ id: citiesTable.id, name: citiesTable.name }) .from(citiesTable).limit(2), ).orderBy(asc(sql`id`)); expect(result).toHaveLength(2); expect(result).toEqual([ { id: 1, name: 'New York' }, { id: 2, name: 'London' }, ]); await expect((async () => { db .select({ id: citiesTable.id, name: citiesTable.name }) .from(citiesTable).limit(2).intersectAll( db .select({ name: citiesTable.name, id: citiesTable.id }) .from(citiesTable).limit(2), ).orderBy(asc(sql`id`)); })()).rejects.toThrowError(); }); test('set operations (intersect all) as function', async (ctx) => { const { db } = ctx.mysql; await setupSetOperationTest(db); const result = await intersectAll( db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id, 1)), db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id, 1)), db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id, 1)), ); expect(result).toHaveLength(1); expect(result).toEqual([ { id: 1, name: 'John' }, ]); await expect((async () => { intersectAll( db .select({ name: users2Table.name, id: users2Table.id }) .from(users2Table).where(eq(users2Table.id, 1)), db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id, 1)), db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id, 1)), ); })()).rejects.toThrowError(); }); test('set operations (except) from query builder', async (ctx) => { const { db } = ctx.mysql; await setupSetOperationTest(db); const result = await db .select() .from(citiesTable).except( db .select() .from(citiesTable).where(gt(citiesTable.id, 1)), ); expect(result).toHaveLength(1); expect(result).toEqual([ { id: 1, name: 'New York' }, ]); }); test('set operations (except) as function', async (ctx) => { const { db } = ctx.mysql; await setupSetOperationTest(db); const result = await except( db .select({ id: citiesTable.id, name: citiesTable.name }) .from(citiesTable), db .select({ id: citiesTable.id, name: citiesTable.name }) .from(citiesTable).where(eq(citiesTable.id, 1)), db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id, 1)), ).limit(3); expect(result).toHaveLength(2); expect(result).toEqual([ { id: 2, name: 'London' }, { id: 3, name: 'Tampa' }, ]); await expect((async () => { except( db .select({ name: citiesTable.name, id: citiesTable.id }) .from(citiesTable), db .select({ id: citiesTable.id, name: citiesTable.name }) .from(citiesTable).where(eq(citiesTable.id, 1)), db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id, 1)), ).limit(3); })()).rejects.toThrowError(); }); test('set operations (except all) from query builder', async (ctx) => { const { db } = ctx.mysql; await setupSetOperationTest(db); const result = await db .select() .from(citiesTable).exceptAll( db .select({ id: citiesTable.id, name: citiesTable.name }) .from(citiesTable).where(eq(citiesTable.id, 1)), ).orderBy(asc(sql`id`)); expect(result).toHaveLength(2); expect(result).toEqual([ { id: 2, name: 'London' }, { id: 3, name: 'Tampa' }, ]); await expect((async () => { db .select() .from(citiesTable).exceptAll( db .select({ name: citiesTable.name, id: citiesTable.id }) .from(citiesTable).where(eq(citiesTable.id, 1)), ).orderBy(asc(sql`id`)); })()).rejects.toThrowError(); }); test('set operations (except all) as function', async (ctx) => { const { db } = ctx.mysql; await setupSetOperationTest(db); const result = await exceptAll( db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table), db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(gt(users2Table.id, 7)), db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id, 1)), ).limit(6).orderBy(asc(sql.identifier('id'))); expect(result).toHaveLength(6); expect(result).toEqual([ { id: 2, name: 'Jane' }, { id: 3, name: 'Jack' }, { id: 4, name: 'Peter' }, { id: 5, name: 'Ben' }, { id: 6, name: 'Jill' }, { id: 7, name: 'Mary' }, ]); await expect((async () => { exceptAll( db .select({ name: users2Table.name, id: users2Table.id }) .from(users2Table), db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(gt(users2Table.id, 7)), db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id, 1)), ).limit(6); })()).rejects.toThrowError(); }); test('set operations (mixed) from query builder', async (ctx) => { const { db } = ctx.mysql; await setupSetOperationTest(db); const result = await db .select() .from(citiesTable).except( ({ unionAll }) => unionAll( db .select() .from(citiesTable).where(gt(citiesTable.id, 1)), db.select().from(citiesTable).where(eq(citiesTable.id, 2)), ).orderBy(asc(citiesTable.id)).limit(1).offset(1), ); expect(result).toHaveLength(2); expect(result).toEqual([ { id: 1, name: 'New York' }, { id: 3, name: 'Tampa' }, ]); await expect((async () => { db .select() .from(citiesTable).except( ({ unionAll }) => unionAll( db .select({ name: citiesTable.name, id: citiesTable.id }) .from(citiesTable).where(gt(citiesTable.id, 1)), db.select().from(citiesTable).where(eq(citiesTable.id, 2)), ), ); })()).rejects.toThrowError(); }); test('set operations (mixed all) as function with subquery', async (ctx) => { const { db } = ctx.mysql; await setupSetOperationTest(db); const sq = except( db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(gte(users2Table.id, 5)), db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id, 7)), ).orderBy(asc(sql.identifier('id'))).as('sq'); const result = await union( db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id, 1)), db.select().from(sq).limit(1), db .select().from(citiesTable).where(gt(citiesTable.id, 1)), ); expect(result).toHaveLength(4); expect(result).toEqual([ { id: 1, name: 'John' }, { id: 5, name: 'Ben' }, { id: 2, name: 'London' }, { id: 3, name: 'Tampa' }, ]); await expect((async () => { union( db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id, 1)), except( db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(gte(users2Table.id, 5)), db .select({ name: users2Table.name, id: users2Table.id }) .from(users2Table).where(eq(users2Table.id, 7)), ).limit(1), db .select().from(citiesTable).where(gt(citiesTable.id, 1)), ); })()).rejects.toThrowError(); }); test('aggregate function: count', async (ctx) => { const { db } = ctx.mysql; const table = aggregateTable; await setupAggregateFunctionsTest(db); const result1 = await db.select({ value: count() }).from(table); const result2 = await db.select({ value: count(table.a) }).from(table); const result3 = await db.select({ value: countDistinct(table.name) }).from(table); expect(result1[0]?.value).toBe(7); expect(result2[0]?.value).toBe(5); expect(result3[0]?.value).toBe(6); }); test('aggregate function: avg', async (ctx) => { const { db } = ctx.mysql; const table = aggregateTable; await setupAggregateFunctionsTest(db); const result1 = await db.select({ value: avg(table.b) }).from(table); const result2 = await db.select({ value: avg(table.nullOnly) }).from(table); const result3 = await db.select({ value: avgDistinct(table.b) }).from(table); expect(result1[0]?.value).toBe('33.3333'); expect(result2[0]?.value).toBe(null); expect(result3[0]?.value).toBe('42.5000'); }); test('aggregate function: sum', async (ctx) => { const { db } = ctx.mysql; const table = aggregateTable; await setupAggregateFunctionsTest(db); const result1 = await db.select({ value: sum(table.b) }).from(table); const result2 = await db.select({ value: sum(table.nullOnly) }).from(table); const result3 = await db.select({ value: sumDistinct(table.b) }).from(table); expect(result1[0]?.value).toBe('200'); expect(result2[0]?.value).toBe(null); expect(result3[0]?.value).toBe('170'); }); test('aggregate function: max', async (ctx) => { const { db } = ctx.mysql; const table = aggregateTable; await setupAggregateFunctionsTest(db); const result1 = await db.select({ value: max(table.b) }).from(table); const result2 = await db.select({ value: max(table.nullOnly) }).from(table); expect(result1[0]?.value).toBe(90); expect(result2[0]?.value).toBe(null); }); test('aggregate function: min', async (ctx) => { const { db } = ctx.mysql; const table = aggregateTable; await setupAggregateFunctionsTest(db); const result1 = await db.select({ value: min(table.b) }).from(table); const result2 = await db.select({ value: min(table.nullOnly) }).from(table); expect(result1[0]?.value).toBe(10); expect(result2[0]?.value).toBe(null); }); test('test $onUpdateFn and $onUpdate works as $default', async (ctx) => { const { db } = ctx.mysql; await db.execute(sql`drop table if exists ${usersOnUpdate}`); await db.execute( sql` create table ${usersOnUpdate} ( id serial not null primary key, name text not null, update_counter integer default 1 not null, updated_at datetime(3), uppercase_name text, always_null text ) `, ); await db.insert(usersOnUpdate).values([ { name: 'John' }, { name: 'Jane' }, { name: 'Jack' }, { name: 'Jill' }, ]); const { updatedAt, ...rest } = getTableColumns(usersOnUpdate); const justDates = await db.select({ updatedAt }).from(usersOnUpdate); const response = await db.select({ ...rest }).from(usersOnUpdate); expect(response).toEqual([ { name: 'John', id: 1, updateCounter: 1, uppercaseName: 'JOHN', alwaysNull: null }, { name: 'Jane', id: 2, updateCounter: 1, uppercaseName: 'JANE', alwaysNull: null }, { name: 'Jack', id: 3, updateCounter: 1, uppercaseName: 'JACK', alwaysNull: null }, { name: 'Jill', id: 4, updateCounter: 1, uppercaseName: 'JILL', alwaysNull: null }, ]); const msDelay = 750; for (const eachUser of justDates) { expect(eachUser.updatedAt!.valueOf()).toBeGreaterThan(Date.now() - msDelay); } }); test('test $onUpdateFn and $onUpdate works updating', async (ctx) => { const { db } = ctx.mysql; await db.execute(sql`drop table if exists ${usersOnUpdate}`); await db.execute( sql` create table ${usersOnUpdate} ( id serial not null primary key, name text not null, update_counter integer default 1 not null, updated_at datetime(3), uppercase_name text, always_null text ) `, ); await db.insert(usersOnUpdate).values([ { name: 'John', alwaysNull: 'this will will be null after updating' }, { name: 'Jane' }, { name: 'Jack' }, { name: 'Jill' }, ]); const { updatedAt, ...rest } = getTableColumns(usersOnUpdate); const initial = await db.select({ updatedAt }).from(usersOnUpdate); await db.update(usersOnUpdate).set({ name: 'Angel', uppercaseName: null }).where(eq(usersOnUpdate.id, 1)); const justDates = await db.select({ updatedAt }).from(usersOnUpdate); const response = await db.select({ ...rest }).from(usersOnUpdate); expect(response).toEqual([ { name: 'Angel', id: 1, updateCounter: 2, uppercaseName: null, alwaysNull: null }, { name: 'Jane', id: 2, updateCounter: 1, uppercaseName: 'JANE', alwaysNull: null }, { name: 'Jack', id: 3, updateCounter: 1, uppercaseName: 'JACK', alwaysNull: null }, { name: 'Jill', id: 4, updateCounter: 1, uppercaseName: 'JILL', alwaysNull: null }, ]); const msDelay = 750; expect(initial[0]?.updatedAt?.valueOf()).not.toBe(justDates[0]?.updatedAt?.valueOf()); for (const eachUser of justDates) { expect(eachUser.updatedAt!.valueOf()).toBeGreaterThan(Date.now() - msDelay); } }); // mySchema tests test('mySchema :: select all fields', async (ctx) => { const { db } = ctx.mysql; await db.insert(usersMySchemaTable).values({ name: 'John' }); const result = await db.select().from(usersMySchemaTable); expect(result[0]!.createdAt).toBeInstanceOf(Date); // not timezone based timestamp, thats why it should not work here // t.assert(Math.abs(result[0]!.createdAt.getTime() - now) < 2000); expect(result).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); }); test('mySchema :: select sql', async (ctx) => { const { db } = ctx.mysql; await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); await db.insert(usersMySchemaTable).values({ name: 'John' }); const users = await db.select({ name: sql`upper(${usersMySchemaTable.name})`, }).from(usersMySchemaTable); expect(users).toEqual([{ name: 'JOHN' }]); }); test('mySchema :: select typed sql', async (ctx) => { const { db } = ctx.mysql; await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); await db.insert(usersMySchemaTable).values({ name: 'John' }); const users = await db.select({ name: sql`upper(${usersMySchemaTable.name})`, }).from(usersMySchemaTable); expect(users).toEqual([{ name: 'JOHN' }]); }); test('mySchema :: select distinct', async (ctx) => { const { db } = ctx.mysql; const usersDistinctTable = mysqlTable('users_distinct', { id: int('id').notNull(), name: text('name').notNull(), }); await db.execute(sql`drop table if exists ${usersDistinctTable}`); await db.execute(sql`create table ${usersDistinctTable} (id int, name text)`); await db.insert(usersDistinctTable).values([ { id: 1, name: 'John' }, { id: 1, name: 'John' }, { id: 2, name: 'John' }, { id: 1, name: 'Jane' }, ]); const users = await db.selectDistinct().from(usersDistinctTable).orderBy( usersDistinctTable.id, usersDistinctTable.name, ); await db.execute(sql`drop table ${usersDistinctTable}`); expect(users).toEqual([{ id: 1, name: 'Jane' }, { id: 1, name: 'John' }, { id: 2, name: 'John' }]); }); test('mySchema :: insert returning sql', async (ctx) => { const { db } = ctx.mysql; await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); const [result, _] = await db.insert(usersMySchemaTable).values({ name: 'John' }); expect(result.insertId).toBe(1); }); test('mySchema :: delete returning sql', async (ctx) => { const { db } = ctx.mysql; await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); await db.insert(usersMySchemaTable).values({ name: 'John' }); const users = await db.delete(usersMySchemaTable).where(eq(usersMySchemaTable.name, 'John')); expect(users[0].affectedRows).toBe(1); }); test('mySchema :: update with returning partial', async (ctx) => { const { db } = ctx.mysql; await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); await db.insert(usersMySchemaTable).values({ name: 'John' }); const updatedUsers = await db.update(usersMySchemaTable).set({ name: 'Jane' }).where( eq(usersMySchemaTable.name, 'John'), ); const users = await db.select({ id: usersMySchemaTable.id, name: usersMySchemaTable.name }).from( usersMySchemaTable, ) .where( eq(usersMySchemaTable.id, 1), ); expect(updatedUsers[0].changedRows).toBe(1); expect(users).toEqual([{ id: 1, name: 'Jane' }]); }); test('mySchema :: delete with returning all fields', async (ctx) => { const { db } = ctx.mysql; await db.insert(usersMySchemaTable).values({ name: 'John' }); const deletedUser = await db.delete(usersMySchemaTable).where(eq(usersMySchemaTable.name, 'John')); expect(deletedUser[0].affectedRows).toBe(1); }); test('mySchema :: insert + select', async (ctx) => { const { db } = ctx.mysql; await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); await db.insert(usersMySchemaTable).values({ name: 'John' }); const result = await db.select().from(usersMySchemaTable); expect(result).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); await db.insert(usersMySchemaTable).values({ name: 'Jane' }); const result2 = await db.select().from(usersMySchemaTable); expect(result2).toEqual([ { id: 1, name: 'John', verified: false, jsonb: null, createdAt: result2[0]!.createdAt }, { id: 2, name: 'Jane', verified: false, jsonb: null, createdAt: result2[1]!.createdAt }, ]); }); test('mySchema :: insert with overridden default values', async (ctx) => { const { db } = ctx.mysql; await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); await db.insert(usersMySchemaTable).values({ name: 'John', verified: true }); const result = await db.select().from(usersMySchemaTable); expect(result).toEqual([{ id: 1, name: 'John', verified: true, jsonb: null, createdAt: result[0]!.createdAt }]); }); test('mySchema :: insert many', async (ctx) => { const { db } = ctx.mysql; await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); await db.insert(usersMySchemaTable).values([ { name: 'John' }, { name: 'Bruce', jsonb: ['foo', 'bar'] }, { name: 'Jane' }, { name: 'Austin', verified: true }, ]); const result = await db.select({ id: usersMySchemaTable.id, name: usersMySchemaTable.name, jsonb: usersMySchemaTable.jsonb, verified: usersMySchemaTable.verified, }).from(usersMySchemaTable); expect(result).toEqual([ { id: 1, name: 'John', jsonb: null, verified: false }, { id: 2, name: 'Bruce', jsonb: ['foo', 'bar'], verified: false }, { id: 3, name: 'Jane', jsonb: null, verified: false }, { id: 4, name: 'Austin', jsonb: null, verified: true }, ]); }); test('mySchema :: select with group by as field', async (ctx) => { const { db } = ctx.mysql; await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); await db.insert(usersMySchemaTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); const result = await db.select({ name: usersMySchemaTable.name }).from(usersMySchemaTable) .groupBy(usersMySchemaTable.name); expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }]); }); test('mySchema :: select with group by as column + sql', async (ctx) => { const { db } = ctx.mysql; await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); await db.insert(usersMySchemaTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); const result = await db.select({ name: usersMySchemaTable.name }).from(usersMySchemaTable) .groupBy(usersMySchemaTable.id, sql`${usersMySchemaTable.name}`); expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); }); test('mySchema :: build query', async (ctx) => { const { db } = ctx.mysql; const query = db.select({ id: usersMySchemaTable.id, name: usersMySchemaTable.name }).from(usersMySchemaTable) .groupBy(usersMySchemaTable.id, usersMySchemaTable.name) .toSQL(); expect(query).toEqual({ sql: `select \`id\`, \`name\` from \`mySchema\`.\`userstest\` group by \`mySchema\`.\`userstest\`.\`id\`, \`mySchema\`.\`userstest\`.\`name\``, params: [], }); }); test('mySchema :: insert with spaces', async (ctx) => { const { db } = ctx.mysql; await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); await db.insert(usersMySchemaTable).values({ name: sql`'Jo h n'` }); const result = await db.select({ id: usersMySchemaTable.id, name: usersMySchemaTable.name }).from( usersMySchemaTable, ); expect(result).toEqual([{ id: 1, name: 'Jo h n' }]); }); test('mySchema :: prepared statement with placeholder in .where', async (ctx) => { const { db } = ctx.mysql; await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); await db.insert(usersMySchemaTable).values({ name: 'John' }); const stmt = db.select({ id: usersMySchemaTable.id, name: usersMySchemaTable.name, }).from(usersMySchemaTable) .where(eq(usersMySchemaTable.id, sql.placeholder('id'))) .prepare(); const result = await stmt.execute({ id: 1 }); expect(result).toEqual([{ id: 1, name: 'John' }]); }); test('mySchema :: select from tables with same name from different schema using alias', async (ctx) => { const { db } = ctx.mysql; await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); await db.execute(sql`drop table if exists \`userstest\``); await db.execute( sql` create table \`userstest\` ( \`id\` serial primary key, \`name\` text not null, \`verified\` boolean not null default false, \`jsonb\` json, \`created_at\` timestamp not null default now() ) `, ); await db.insert(usersMySchemaTable).values({ id: 10, name: 'Ivan' }); await db.insert(usersTable).values({ id: 11, name: 'Hans' }); const customerAlias = alias(usersTable, 'customer'); const result = await db .select().from(usersMySchemaTable) .leftJoin(customerAlias, eq(customerAlias.id, 11)) .where(eq(usersMySchemaTable.id, 10)); expect(result).toEqual([{ userstest: { id: 10, name: 'Ivan', verified: false, jsonb: null, createdAt: result[0]!.userstest.createdAt, }, customer: { id: 11, name: 'Hans', verified: false, jsonb: null, createdAt: result[0]!.customer!.createdAt, }, }]); }); test('insert $returningId: serial as id', async (ctx) => { const { db } = ctx.mysql; const result = await db.insert(usersTable).values({ name: 'John' }).$returningId(); expectTypeOf(result).toEqualTypeOf<{ id: number; }[]>(); expect(result).toStrictEqual([{ id: 1 }]); }); test('insert $returningId: serial as id, not first column', async (ctx) => { const { db } = ctx.mysql; const usersTableDefNotFirstColumn = mysqlTable('users2', { name: text('name').notNull(), id: serial('id').primaryKey(), }); const result = await db.insert(usersTableDefNotFirstColumn).values({ name: 'John' }).$returningId(); expectTypeOf(result).toEqualTypeOf<{ id: number; }[]>(); expect(result).toStrictEqual([{ id: 1 }]); }); test('insert $returningId: serial as id, batch insert', async (ctx) => { const { db } = ctx.mysql; const result = await db.insert(usersTable).values([{ name: 'John' }, { name: 'John1' }]).$returningId(); expectTypeOf(result).toEqualTypeOf<{ id: number; }[]>(); expect(result).toStrictEqual([{ id: 1 }, { id: 2 }]); }); test('insert $returningId: $default as primary key', async (ctx) => { const { db } = ctx.mysql; const uniqueKeys = ['ao865jf3mcmkfkk8o5ri495z', 'dyqs529eom0iczo2efxzbcut']; let iterator = 0; const usersTableDefFn = mysqlTable('users_default_fn', { customId: varchar('id', { length: 256 }).primaryKey().$defaultFn(() => { const value = uniqueKeys[iterator]!; iterator++; return value; }), name: text('name').notNull(), }); await setupReturningFunctionsTest(db); const result = await db.insert(usersTableDefFn).values([{ name: 'John' }, { name: 'John1' }]) // ^? .$returningId(); expectTypeOf(result).toEqualTypeOf<{ customId: string; }[]>(); expect(result).toStrictEqual([{ customId: 'ao865jf3mcmkfkk8o5ri495z' }, { customId: 'dyqs529eom0iczo2efxzbcut', }]); }); test('insert $returningId: $default as primary key with value', async (ctx) => { const { db } = ctx.mysql; const uniqueKeys = ['ao865jf3mcmkfkk8o5ri495z', 'dyqs529eom0iczo2efxzbcut']; let iterator = 0; const usersTableDefFn = mysqlTable('users_default_fn', { customId: varchar('id', { length: 256 }).primaryKey().$defaultFn(() => { const value = uniqueKeys[iterator]!; iterator++; return value; }), name: text('name').notNull(), }); await setupReturningFunctionsTest(db); const result = await db.insert(usersTableDefFn).values([{ name: 'John', customId: 'test' }, { name: 'John1' }]) // ^? .$returningId(); expectTypeOf(result).toEqualTypeOf<{ customId: string; }[]>(); expect(result).toStrictEqual([{ customId: 'test' }, { customId: 'ao865jf3mcmkfkk8o5ri495z' }]); }); test('mySchema :: view', async (ctx) => { const { db } = ctx.mysql; const newYorkers1 = mySchema.view('new_yorkers') .as((qb) => qb.select().from(users2MySchemaTable).where(eq(users2MySchemaTable.cityId, 1))); const newYorkers2 = mySchema.view('new_yorkers', { id: serial('id').primaryKey(), name: text('name').notNull(), cityId: int('city_id').notNull(), }).as(sql`select * from ${users2MySchemaTable} where ${eq(users2MySchemaTable.cityId, 1)}`); const newYorkers3 = mySchema.view('new_yorkers', { id: serial('id').primaryKey(), name: text('name').notNull(), cityId: int('city_id').notNull(), }).existing(); await db.execute(sql`create view ${newYorkers1} as ${getViewConfig(newYorkers1).query}`); await db.insert(citiesMySchemaTable).values([{ name: 'New York' }, { name: 'Paris' }]); await db.insert(users2MySchemaTable).values([ { name: 'John', cityId: 1 }, { name: 'Jane', cityId: 1 }, { name: 'Jack', cityId: 2 }, ]); { const result = await db.select().from(newYorkers1); expect(result).toEqual([ { id: 1, name: 'John', cityId: 1 }, { id: 2, name: 'Jane', cityId: 1 }, ]); } { const result = await db.select().from(newYorkers2); expect(result).toEqual([ { id: 1, name: 'John', cityId: 1 }, { id: 2, name: 'Jane', cityId: 1 }, ]); } { const result = await db.select().from(newYorkers3); expect(result).toEqual([ { id: 1, name: 'John', cityId: 1 }, { id: 2, name: 'Jane', cityId: 1 }, ]); } { const result = await db.select({ name: newYorkers1.name }).from(newYorkers1); expect(result).toEqual([ { name: 'John' }, { name: 'Jane' }, ]); } await db.execute(sql`drop view ${newYorkers1}`); }); test('test $onUpdateFn and $onUpdate works with sql value', async (ctx) => { const { db } = ctx.mysql; const users = mysqlTable('users', { id: serial('id').primaryKey(), name: text('name').notNull(), updatedAt: timestamp('updated_at', { fsp: 6, }) .notNull() .$onUpdate(() => sql`current_timestamp`), }); await db.execute(sql`drop table if exists ${users}`); await db.execute( sql` create table ${users} ( \`id\` serial primary key, \`name\` text not null, \`updated_at\` timestamp not null ) `, ); await db.insert(users).values({ name: 'John', }); const insertResp = await db.select({ updatedAt: users.updatedAt }).from(users); await new Promise((resolve) => setTimeout(resolve, 1000)); const now = Date.now(); await new Promise((resolve) => setTimeout(resolve, 1000)); await db.update(users).set({ name: 'John', }); const updateResp = await db.select({ updatedAt: users.updatedAt }).from(users); expect(insertResp[0]?.updatedAt.getTime() ?? 0).lessThan(now); expect(updateResp[0]?.updatedAt.getTime() ?? 0).greaterThan(now); }); test('$count separate', async (ctx) => { const { db } = ctx.mysql; const countTestTable = mysqlTable('count_test', { id: int('id').notNull(), name: text('name').notNull(), }); await db.execute(sql`drop table if exists ${countTestTable}`); await db.execute(sql`create table ${countTestTable} (id int, name text)`); await db.insert(countTestTable).values([ { id: 1, name: 'First' }, { id: 2, name: 'Second' }, { id: 3, name: 'Third' }, { id: 4, name: 'Fourth' }, ]); const count = await db.$count(countTestTable); await db.execute(sql`drop table ${countTestTable}`); expect(count).toStrictEqual(4); }); test('$count embedded', async (ctx) => { const { db } = ctx.mysql; const countTestTable = mysqlTable('count_test', { id: int('id').notNull(), name: text('name').notNull(), }); await db.execute(sql`drop table if exists ${countTestTable}`); await db.execute(sql`create table ${countTestTable} (id int, name text)`); await db.insert(countTestTable).values([ { id: 1, name: 'First' }, { id: 2, name: 'Second' }, { id: 3, name: 'Third' }, { id: 4, name: 'Fourth' }, ]); const count = await db.select({ count: db.$count(countTestTable), }).from(countTestTable); await db.execute(sql`drop table ${countTestTable}`); expect(count).toStrictEqual([ { count: 4 }, { count: 4 }, { count: 4 }, { count: 4 }, ]); }); test('$count separate reuse', async (ctx) => { const { db } = ctx.mysql; const countTestTable = mysqlTable('count_test', { id: int('id').notNull(), name: text('name').notNull(), }); await db.execute(sql`drop table if exists ${countTestTable}`); await db.execute(sql`create table ${countTestTable} (id int, name text)`); await db.insert(countTestTable).values([ { id: 1, name: 'First' }, { id: 2, name: 'Second' }, { id: 3, name: 'Third' }, { id: 4, name: 'Fourth' }, ]); const count = db.$count(countTestTable); const count1 = await count; await db.insert(countTestTable).values({ id: 5, name: 'fifth' }); const count2 = await count; await db.insert(countTestTable).values({ id: 6, name: 'sixth' }); const count3 = await count; await db.execute(sql`drop table ${countTestTable}`); expect(count1).toStrictEqual(4); expect(count2).toStrictEqual(5); expect(count3).toStrictEqual(6); }); test('$count embedded reuse', async (ctx) => { const { db } = ctx.mysql; const countTestTable = mysqlTable('count_test', { id: int('id').notNull(), name: text('name').notNull(), }); await db.execute(sql`drop table if exists ${countTestTable}`); await db.execute(sql`create table ${countTestTable} (id int, name text)`); await db.insert(countTestTable).values([ { id: 1, name: 'First' }, { id: 2, name: 'Second' }, { id: 3, name: 'Third' }, { id: 4, name: 'Fourth' }, ]); const count = db.select({ count: db.$count(countTestTable), }).from(countTestTable); const count1 = await count; await db.insert(countTestTable).values({ id: 5, name: 'fifth' }); const count2 = await count; await db.insert(countTestTable).values({ id: 6, name: 'sixth' }); const count3 = await count; await db.execute(sql`drop table ${countTestTable}`); expect(count1).toStrictEqual([ { count: 4 }, { count: 4 }, { count: 4 }, { count: 4 }, ]); expect(count2).toStrictEqual([ { count: 5 }, { count: 5 }, { count: 5 }, { count: 5 }, { count: 5 }, ]); expect(count3).toStrictEqual([ { count: 6 }, { count: 6 }, { count: 6 }, { count: 6 }, { count: 6 }, { count: 6 }, ]); }); test('$count separate with filters', async (ctx) => { const { db } = ctx.mysql; const countTestTable = mysqlTable('count_test', { id: int('id').notNull(), name: text('name').notNull(), }); await db.execute(sql`drop table if exists ${countTestTable}`); await db.execute(sql`create table ${countTestTable} (id int, name text)`); await db.insert(countTestTable).values([ { id: 1, name: 'First' }, { id: 2, name: 'Second' }, { id: 3, name: 'Third' }, { id: 4, name: 'Fourth' }, ]); const count = await db.$count(countTestTable, gt(countTestTable.id, 1)); await db.execute(sql`drop table ${countTestTable}`); expect(count).toStrictEqual(3); }); test('$count embedded with filters', async (ctx) => { const { db } = ctx.mysql; const countTestTable = mysqlTable('count_test', { id: int('id').notNull(), name: text('name').notNull(), }); await db.execute(sql`drop table if exists ${countTestTable}`); await db.execute(sql`create table ${countTestTable} (id int, name text)`); await db.insert(countTestTable).values([ { id: 1, name: 'First' }, { id: 2, name: 'Second' }, { id: 3, name: 'Third' }, { id: 4, name: 'Fourth' }, ]); const count = await db.select({ count: db.$count(countTestTable, gt(countTestTable.id, 1)), }).from(countTestTable); await db.execute(sql`drop table ${countTestTable}`); expect(count).toStrictEqual([ { count: 3 }, { count: 3 }, { count: 3 }, { count: 3 }, ]); }); test('limit 0', async (ctx) => { const { db } = ctx.mysql; await db.insert(usersTable).values({ name: 'John' }); const users = await db .select() .from(usersTable) .limit(0); expect(users).toEqual([]); }); test('limit -1', async (ctx) => { const { db } = ctx.mysql; await db.insert(usersTable).values({ name: 'John' }); const users = await db .select() .from(usersTable) .limit(-1); expect(users.length).toBeGreaterThan(0); }); test('define constraints as array', async (ctx) => { const { db } = ctx.mysql; const table = mysqlTable('name', { id: int(), }, (t) => [ index('name').on(t.id), primaryKey({ columns: [t.id], name: 'custom' }), ]); const { indexes, primaryKeys } = getTableConfig(table); expect(indexes.length).toBe(1); expect(primaryKeys.length).toBe(1); }); test('define constraints as array inside third param', async (ctx) => { const { db } = ctx.mysql; const table = mysqlTable('name', { id: int(), }, (t) => [ [index('name').on(t.id), primaryKey({ columns: [t.id], name: 'custom' })], ]); const { indexes, primaryKeys } = getTableConfig(table); expect(indexes.length).toBe(1); expect(primaryKeys.length).toBe(1); }); test('update with limit and order by', async (ctx) => { const { db } = ctx.mysql; await db.insert(usersTable).values([ { name: 'Barry', verified: false }, { name: 'Alan', verified: false }, { name: 'Carl', verified: false }, ]); await db.update(usersTable).set({ verified: true }).limit(2).orderBy(asc(usersTable.name)); const result = await db.select({ name: usersTable.name, verified: usersTable.verified }).from(usersTable).orderBy( asc(usersTable.name), ); expect(result).toStrictEqual([ { name: 'Alan', verified: true }, { name: 'Barry', verified: true }, { name: 'Carl', verified: false }, ]); }); test('delete with limit and order by', async (ctx) => { const { db } = ctx.mysql; await db.insert(usersTable).values([ { name: 'Barry', verified: false }, { name: 'Alan', verified: false }, { name: 'Carl', verified: false }, ]); await db.delete(usersTable).where(eq(usersTable.verified, false)).limit(1).orderBy(asc(usersTable.name)); const result = await db.select({ name: usersTable.name, verified: usersTable.verified }).from(usersTable).orderBy( asc(usersTable.name), ); expect(result).toStrictEqual([ { name: 'Barry', verified: false }, { name: 'Carl', verified: false }, ]); }); test('Object keys as column names', async (ctx) => { const { db } = ctx.mysql; // Tests the following: // Column with required config // Column with optional config without providing a value // Column with optional config providing a value // Column without config const users = mysqlTable('users', { id: bigint({ mode: 'number' }).autoincrement().primaryKey(), createdAt: timestamp(), updatedAt: timestamp({ fsp: 3 }), admin: boolean(), }); await db.execute(sql`drop table if exists users`); await db.execute( sql` create table users ( \`id\` bigint auto_increment primary key, \`createdAt\` timestamp, \`updatedAt\` timestamp(3), \`admin\` boolean ) `, ); await db.insert(users).values([ { createdAt: sql`now() - interval 30 day`, updatedAt: sql`now() - interval 1 day`, admin: true }, { createdAt: sql`now() - interval 1 day`, updatedAt: sql`now() - interval 30 day`, admin: true }, { createdAt: sql`now() - interval 1 day`, updatedAt: sql`now() - interval 1 day`, admin: false }, ]); const result = await db .select({ id: users.id, admin: users.admin }) .from(users) .where( and( gt(users.createdAt, sql`now() - interval 7 day`), gt(users.updatedAt, sql`now() - interval 7 day`), ), ); expect(result).toEqual([ { id: 3, admin: false }, ]); await db.execute(sql`drop table users`); }); test('cross join', async (ctx) => { const { db } = ctx.mysql; await db .insert(usersTable) .values([ { name: 'John' }, { name: 'Jane' }, ]); await db .insert(citiesTable) .values([ { name: 'Seattle' }, { name: 'New York City' }, ]); const result = await db .select({ user: usersTable.name, city: citiesTable.name, }) .from(usersTable) .crossJoin(citiesTable) .orderBy(usersTable.name, citiesTable.name); expect(result).toStrictEqual([ { city: 'New York City', user: 'Jane' }, { city: 'Seattle', user: 'Jane' }, { city: 'New York City', user: 'John' }, { city: 'Seattle', user: 'John' }, ]); }); test('left join (lateral)', async (ctx) => { const { db } = ctx.mysql; await db .insert(citiesTable) .values([{ id: 1, name: 'Paris' }, { id: 2, name: 'London' }]); await db.insert(users2Table).values([{ name: 'John', cityId: 1 }, { name: 'Jane' }]); const sq = db .select({ userId: users2Table.id, userName: users2Table.name, cityId: users2Table.cityId, }) .from(users2Table) .where(eq(users2Table.cityId, citiesTable.id)) .as('sq'); const res = await db .select({ cityId: citiesTable.id, cityName: citiesTable.name, userId: sq.userId, userName: sq.userName, }) .from(citiesTable) .leftJoinLateral(sq, sql`true`); expect(res).toStrictEqual([ { cityId: 1, cityName: 'Paris', userId: 1, userName: 'John' }, { cityId: 2, cityName: 'London', userId: null, userName: null }, ]); }); test('inner join (lateral)', async (ctx) => { const { db } = ctx.mysql; await db .insert(citiesTable) .values([{ id: 1, name: 'Paris' }, { id: 2, name: 'London' }]); await db.insert(users2Table).values([{ name: 'John', cityId: 1 }, { name: 'Jane' }]); const sq = db .select({ userId: users2Table.id, userName: users2Table.name, cityId: users2Table.cityId, }) .from(users2Table) .where(eq(users2Table.cityId, citiesTable.id)) .as('sq'); const res = await db .select({ cityId: citiesTable.id, cityName: citiesTable.name, userId: sq.userId, userName: sq.userName, }) .from(citiesTable) .innerJoinLateral(sq, sql`true`); expect(res).toStrictEqual([ { cityId: 1, cityName: 'Paris', userId: 1, userName: 'John' }, ]); }); test('cross join (lateral)', async (ctx) => { const { db } = ctx.mysql; await db .insert(citiesTable) .values([{ id: 1, name: 'Paris' }, { id: 2, name: 'London' }, { id: 3, name: 'Berlin' }]); await db.insert(users2Table).values([{ name: 'John', cityId: 1 }, { name: 'Jane' }, { name: 'Patrick', cityId: 2, }]); const sq = db .select({ userId: users2Table.id, userName: users2Table.name, cityId: users2Table.cityId, }) .from(users2Table) .where(not(like(citiesTable.name, 'L%'))) .as('sq'); const res = await db .select({ cityId: citiesTable.id, cityName: citiesTable.name, userId: sq.userId, userName: sq.userName, }) .from(citiesTable) .crossJoinLateral(sq) .orderBy(citiesTable.id, sq.userId); expect(res).toStrictEqual([ { cityId: 1, cityName: 'Paris', userId: 1, userName: 'John', }, { cityId: 1, cityName: 'Paris', userId: 2, userName: 'Jane', }, { cityId: 1, cityName: 'Paris', userId: 3, userName: 'Patrick', }, { cityId: 3, cityName: 'Berlin', userId: 1, userName: 'John', }, { cityId: 3, cityName: 'Berlin', userId: 2, userName: 'Jane', }, { cityId: 3, cityName: 'Berlin', userId: 3, userName: 'Patrick', }, ]); }); test('all types', async (ctx) => { const { db } = ctx.mysql; await db.execute(sql` CREATE TABLE \`all_types\` ( \`serial\` serial AUTO_INCREMENT, \`bigint53\` bigint, \`bigint64\` bigint, \`binary\` binary, \`boolean\` boolean, \`char\` char, \`date\` date, \`date_str\` date, \`datetime\` datetime, \`datetime_str\` datetime, \`decimal\` decimal, \`decimal_num\` decimal(30), \`decimal_big\` decimal(30), \`double\` double, \`float\` float, \`int\` int, \`json\` json, \`med_int\` mediumint, \`small_int\` smallint, \`real\` real, \`text\` text, \`time\` time, \`timestamp\` timestamp, \`timestamp_str\` timestamp, \`tiny_int\` tinyint, \`varbin\` varbinary(16), \`varchar\` varchar(255), \`year\` year, \`enum\` enum('enV1','enV2') ); `); await db.insert(allTypesTable).values({ serial: 1, bigint53: 9007199254740991, bigint64: 5044565289845416380n, binary: '1', boolean: true, char: 'c', date: new Date(1741743161623), dateStr: new Date(1741743161623).toISOString().slice(0, 19).replace('T', ' '), datetime: new Date(1741743161623), datetimeStr: new Date(1741743161623).toISOString().slice(0, 19).replace('T', ' '), decimal: '47521', decimalNum: 9007199254740991, decimalBig: 5044565289845416380n, double: 15.35325689124218, enum: 'enV1', float: 1.048596, real: 1.048596, text: 'C4-', int: 621, json: { str: 'strval', arr: ['str', 10], }, medInt: 560, smallInt: 14, time: '04:13:22', timestamp: new Date(1741743161623), timestampStr: new Date(1741743161623).toISOString().slice(0, 19).replace('T', ' '), tinyInt: 7, varbin: '1010110101001101', varchar: 'VCHAR', year: 2025, }); const rawRes = await db.select().from(allTypesTable); type ExpectedType = { serial: number; bigint53: number | null; bigint64: bigint | null; binary: string | null; boolean: boolean | null; char: string | null; date: Date | null; dateStr: string | null; datetime: Date | null; datetimeStr: string | null; decimal: string | null; decimalNum: number | null; decimalBig: bigint | null; double: number | null; float: number | null; int: number | null; json: unknown; medInt: number | null; smallInt: number | null; real: number | null; text: string | null; time: string | null; timestamp: Date | null; timestampStr: string | null; tinyInt: number | null; varbin: string | null; varchar: string | null; year: number | null; enum: 'enV1' | 'enV2' | null; }[]; const expectedRes: ExpectedType = [ { serial: 1, bigint53: 9007199254740991, bigint64: 5044565289845416380n, binary: '1', boolean: true, char: 'c', date: new Date('2025-03-12T00:00:00.000Z'), dateStr: '2025-03-12', datetime: new Date('2025-03-12T01:32:42.000Z'), datetimeStr: '2025-03-12 01:32:41', decimal: '47521', decimalNum: 9007199254740991, decimalBig: 5044565289845416380n, double: 15.35325689124218, float: 1.0486, int: 621, json: { arr: ['str', 10], str: 'strval' }, medInt: 560, smallInt: 14, real: 1.048596, text: 'C4-', time: '04:13:22', timestamp: new Date('2025-03-12T01:32:42.000Z'), timestampStr: '2025-03-12 01:32:41', tinyInt: 7, varbin: '1010110101001101', varchar: 'VCHAR', year: 2025, enum: 'enV1', }, ]; expectTypeOf(rawRes).toEqualTypeOf(); expect(rawRes).toStrictEqual(expectedRes); }); }); test('insert into ... select', async (ctx) => { const { db } = ctx.mysql; const notifications = mysqlTable('notifications', { id: serial('id').primaryKey(), sentAt: timestamp('sent_at').notNull().defaultNow(), message: text('message').notNull(), }); const users = mysqlTable('users', { id: serial('id').primaryKey(), name: text('name').notNull(), }); const userNotications = mysqlTable('user_notifications', { userId: int('user_id').notNull().references(() => users.id, { onDelete: 'cascade' }), notificationId: int('notification_id').notNull().references(() => notifications.id, { onDelete: 'cascade' }), }, (t) => ({ pk: primaryKey({ columns: [t.userId, t.notificationId] }), })); await db.execute(sql`drop table if exists ${notifications}`); await db.execute(sql`drop table if exists ${users}`); await db.execute(sql`drop table if exists ${userNotications}`); await db.execute(sql` create table ${notifications} ( \`id\` serial primary key, \`sent_at\` timestamp not null default now(), \`message\` text not null ) `); await db.execute(sql` create table ${users} ( \`id\` serial primary key, \`name\` text not null ) `); await db.execute(sql` create table ${userNotications} ( \`user_id\` int references users(id) on delete cascade, \`notification_id\` int references notifications(id) on delete cascade, primary key (user_id, notification_id) ) `); await db .insert(notifications) .values({ message: 'You are one of the 3 lucky winners!' }); const newNotification = await db .select({ id: notifications.id }) .from(notifications) .then((result) => result[0]); await db.insert(users).values([ { name: 'Alice' }, { name: 'Bob' }, { name: 'Charlie' }, { name: 'David' }, { name: 'Eve' }, ]); await db .insert(userNotications) .select( db .select({ userId: users.id, notificationId: sql`(${newNotification!.id})`.as('notification_id'), }) .from(users) .where(inArray(users.name, ['Alice', 'Charlie', 'Eve'])) .orderBy(asc(users.id)), ); const sentNotifications = await db.select().from(userNotications); expect(sentNotifications).toStrictEqual([ { userId: 1, notificationId: newNotification!.id }, { userId: 3, notificationId: newNotification!.id }, { userId: 5, notificationId: newNotification!.id }, ]); }); test('insert into ... select with keys in different order', async (ctx) => { const { db } = ctx.mysql; const users1 = mysqlTable('users1', { id: serial('id').primaryKey(), name: text('name').notNull(), }); const users2 = mysqlTable('users2', { id: serial('id').primaryKey(), name: text('name').notNull(), }); await db.execute(sql`drop table if exists ${users1}`); await db.execute(sql`drop table if exists ${users2}`); await db.execute(sql` create table ${users1} ( \`id\` serial primary key, \`name\` text not null ) `); await db.execute(sql` create table ${users2} ( \`id\` serial primary key, \`name\` text not null ) `); expect( () => db .insert(users1) .select( db .select({ name: users2.name, id: users2.id, }) .from(users2), ), ).toThrowError(); }); test('MySqlTable :: select with `use index` hint', async (ctx) => { const { db } = ctx.mysql; const users = mysqlTable('users', { id: serial('id').primaryKey(), name: varchar('name', { length: 100 }).notNull(), }, () => [usersTableNameIndex]); const usersTableNameIndex = index('users_name_index').on(users.name); await db.execute(sql`drop table if exists ${users}`); await db.execute(sql` create table ${users} ( \`id\` serial primary key, \`name\` varchar(100) not null ) `); await db.execute(sql`create index users_name_index ON users(name)`); await db.insert(users).values([ { name: 'Alice' }, { name: 'Bob' }, { name: 'Charlie' }, { name: 'David' }, { name: 'Eve' }, ]); const result = await db.select() .from(users, { useIndex: [usersTableNameIndex], }) .where(eq(users.name, 'David')); expect(result).toHaveLength(1); expect(result).toEqual([{ id: 4, name: 'David' }]); }); test('MySqlTable :: select with `use index` hint on 1 index', async (ctx) => { const { db } = ctx.mysql; const users = mysqlTable('users', { id: serial('id').primaryKey(), name: varchar('name', { length: 100 }).notNull(), }, () => [usersTableNameIndex]); const usersTableNameIndex = index('users_name_index').on(users.name); await db.execute(sql`drop table if exists ${users}`); await db.execute(sql` create table ${users} ( \`id\` serial primary key, \`name\` varchar(100) not null ) `); await db.execute(sql`create index users_name_index ON users(name)`); const query = db.select() .from(users, { useIndex: usersTableNameIndex, }) .where(eq(users.name, 'David')) .toSQL(); expect(query.sql).to.include('USE INDEX (users_name_index)'); }); test('MySqlTable :: select with `use index` hint on multiple indexes', async (ctx) => { const { db } = ctx.mysql; const users = mysqlTable('users', { id: serial('id').primaryKey(), name: varchar('name', { length: 100 }).notNull(), age: int('age').notNull(), }, () => [usersTableNameIndex, usersTableAgeIndex]); const usersTableNameIndex = index('users_name_index').on(users.name); const usersTableAgeIndex = index('users_age_index').on(users.age); await db.execute(sql`drop table if exists ${users}`); await db.execute(sql` create table ${users} ( \`id\` serial primary key, \`name\` varchar(100) not null, \`age\` int not null ) `); await db.execute(sql`create index users_name_index ON users(name)`); await db.execute(sql`create index users_age_index ON users(age)`); const query = db.select() .from(users, { useIndex: [usersTableNameIndex, usersTableAgeIndex], }) .where(eq(users.name, 'David')) .toSQL(); expect(query.sql).to.include('USE INDEX (users_name_index, users_age_index)'); }); test('MySqlTable :: select with `use index` hint on not existed index', async (ctx) => { const { db } = ctx.mysql; const users = mysqlTable('users', { id: serial('id').primaryKey(), name: varchar('name', { length: 100 }).notNull(), }, () => [usersTableNameIndex]); const usersTableNameIndex = index('users_name_index').on(users.name); await db.execute(sql`drop table if exists ${users}`); await db.execute(sql` create table ${users} ( \`id\` serial primary key, \`name\` varchar(100) not null ) `); await db.execute(sql`create index users_name_index ON users(name)`); await db.insert(users).values([ { name: 'Alice' }, { name: 'Bob' }, { name: 'Charlie' }, { name: 'David' }, { name: 'Eve' }, ]); await expect((async () => { return await db.select() .from(users, { useIndex: ['some_other_index'], }) .where(eq(users.name, 'David')); })()).rejects.toThrowError(); }); test('MySqlTable :: select with `use index` + `force index` incompatible hints', async (ctx) => { const { db } = ctx.mysql; const users = mysqlTable('users', { id: serial('id').primaryKey(), name: varchar('name', { length: 100 }).notNull(), age: int('age').notNull(), }, () => [usersTableNameIndex, usersTableAgeIndex]); const usersTableNameIndex = index('users_name_index').on(users.name); const usersTableAgeIndex = index('users_age_index').on(users.age); await db.execute(sql`drop table if exists ${users}`); await db.execute(sql` create table ${users} ( \`id\` serial primary key, \`name\` varchar(100) not null, \`age\` int not null ) `); await db.execute(sql`create index users_name_index ON users(name)`); await db.execute(sql`create index users_age_index ON users(age)`); await db.insert(users).values([ { name: 'Alice', age: 18 }, { name: 'Bob', age: 19 }, { name: 'Charlie', age: 20 }, { name: 'David', age: 21 }, { name: 'Eve', age: 22 }, ]); await expect((async () => { return await db.select() .from(users, { useIndex: [usersTableNameIndex], forceIndex: [usersTableAgeIndex], }) .where(eq(users.name, 'David')); })()).rejects.toThrowError(); }); test('MySqlTable :: select with join `use index` hint', async (ctx) => { const { db } = ctx.mysql; const users = mysqlTable('users', { id: serial('id').primaryKey(), name: varchar('name', { length: 100 }).notNull(), }); const posts = mysqlTable('posts', { id: serial('id').primaryKey(), text: varchar('text', { length: 100 }).notNull(), userId: int('user_id').references(() => users.id, { onDelete: 'cascade' }).notNull(), }, () => [postsTableUserIdIndex]); const postsTableUserIdIndex = index('posts_user_id_index').on(posts.userId); await db.execute(sql`drop table if exists ${posts}`); await db.execute(sql`drop table if exists ${users}`); await db.execute(sql` create table ${users} ( \`id\` serial primary key, \`name\` varchar(100) not null ) `); await db.execute(sql` create table ${posts} ( \`id\` serial primary key, \`text\` varchar(100) not null, \`user_id\` int not null references users(id) on delete cascade ) `); await db.execute(sql`create index posts_user_id_index ON posts(user_id)`); await db.insert(users).values([ { name: 'Alice' }, { name: 'Bob' }, { name: 'Charlie' }, { name: 'David' }, { name: 'Eve' }, ]); await db.insert(posts).values([ { text: 'Alice post', userId: 1 }, { text: 'Bob post', userId: 2 }, { text: 'Charlie post', userId: 3 }, { text: 'David post', userId: 4 }, { text: 'Eve post', userId: 5 }, ]); const result = await db.select({ userId: users.id, name: users.name, postId: posts.id, text: posts.text, }) .from(users) .leftJoin(posts, eq(users.id, posts.userId), { useIndex: [postsTableUserIdIndex], }) .where(and( eq(users.name, 'David'), eq(posts.text, 'David post'), )); expect(result).toHaveLength(1); expect(result).toEqual([{ userId: 4, name: 'David', postId: 4, text: 'David post' }]); }); test('MySqlTable :: select with join `use index` hint on 1 index', async (ctx) => { const { db } = ctx.mysql; const users = mysqlTable('users', { id: serial('id').primaryKey(), name: varchar('name', { length: 100 }).notNull(), }); const posts = mysqlTable('posts', { id: serial('id').primaryKey(), text: varchar('text', { length: 100 }).notNull(), userId: int('user_id').references(() => users.id, { onDelete: 'cascade' }).notNull(), }, () => [postsTableUserIdIndex]); const postsTableUserIdIndex = index('posts_user_id_index').on(posts.userId); await db.execute(sql`drop table if exists ${posts}`); await db.execute(sql`drop table if exists ${users}`); await db.execute(sql` create table ${users} ( \`id\` serial primary key, \`name\` varchar(100) not null ) `); await db.execute(sql` create table ${posts} ( \`id\` serial primary key, \`text\` varchar(100) not null, \`user_id\` int not null references users(id) on delete cascade ) `); await db.execute(sql`create index posts_user_id_index ON posts(user_id)`); const query = db.select({ userId: users.id, name: users.name, postId: posts.id, text: posts.text, }) .from(users) .leftJoin(posts, eq(users.id, posts.userId), { useIndex: postsTableUserIdIndex, }) .where(and( eq(users.name, 'David'), eq(posts.text, 'David post'), )).toSQL(); expect(query.sql).to.include('USE INDEX (posts_user_id_index)'); }); test('MySqlTable :: select with cross join `use index` hint', async (ctx) => { const { db } = ctx.mysql; const users = mysqlTable('users', { id: serial('id').primaryKey(), name: varchar('name', { length: 100 }).notNull(), }); const posts = mysqlTable('posts', { id: serial('id').primaryKey(), text: varchar('text', { length: 100 }).notNull(), userId: int('user_id').references(() => users.id, { onDelete: 'cascade' }).notNull(), }, () => [postsTableUserIdIndex]); const postsTableUserIdIndex = index('posts_user_id_index').on(posts.userId); await db.execute(sql`drop table if exists ${posts}`); await db.execute(sql`drop table if exists ${users}`); await db.execute(sql` create table ${users} ( \`id\` serial primary key, \`name\` varchar(100) not null ) `); await db.execute(sql` create table ${posts} ( \`id\` serial primary key, \`text\` varchar(100) not null, \`user_id\` int not null references users(id) on delete cascade ) `); await db.execute(sql`create index posts_user_id_index ON posts(user_id)`); await db.insert(users).values([ { id: 1, name: 'Alice' }, { id: 2, name: 'Bob' }, ]); await db.insert(posts).values([ { id: 1, text: 'Alice post', userId: 1 }, { id: 2, text: 'Bob post', userId: 2 }, ]); const result = await db.select() .from(users) .crossJoin(posts, { useIndex: [postsTableUserIdIndex], }) .orderBy(users.id, posts.id); expect(result).toStrictEqual([{ users: { id: 1, name: 'Alice' }, posts: { id: 1, text: 'Alice post', userId: 1 }, }, { users: { id: 1, name: 'Alice' }, posts: { id: 2, text: 'Bob post', userId: 2 }, }, { users: { id: 2, name: 'Bob' }, posts: { id: 1, text: 'Alice post', userId: 1 }, }, { users: { id: 2, name: 'Bob' }, posts: { id: 2, text: 'Bob post', userId: 2 }, }]); }); test('MySqlTable :: select with cross join `use index` hint on 1 index', async (ctx) => { const { db } = ctx.mysql; const users = mysqlTable('users', { id: serial('id').primaryKey(), name: varchar('name', { length: 100 }).notNull(), }); const posts = mysqlTable('posts', { id: serial('id').primaryKey(), text: varchar('text', { length: 100 }).notNull(), userId: int('user_id').references(() => users.id, { onDelete: 'cascade' }).notNull(), }, () => [postsTableUserIdIndex]); const postsTableUserIdIndex = index('posts_user_id_index').on(posts.userId); await db.execute(sql`drop table if exists ${posts}`); await db.execute(sql`drop table if exists ${users}`); await db.execute(sql` create table ${users} ( \`id\` serial primary key, \`name\` varchar(100) not null ) `); await db.execute(sql` create table ${posts} ( \`id\` serial primary key, \`text\` varchar(100) not null, \`user_id\` int not null references users(id) on delete cascade ) `); await db.execute(sql`create index posts_user_id_index ON posts(user_id)`); const query = db.select({ userId: users.id, name: users.name, postId: posts.id, text: posts.text, }) .from(users) .crossJoin(posts, { useIndex: postsTableUserIdIndex, }) .where(and( eq(users.name, 'David'), eq(posts.text, 'David post'), )).toSQL(); expect(query.sql).to.include('USE INDEX (posts_user_id_index)'); }); test('MySqlTable :: select with join `use index` hint on multiple indexes', async (ctx) => { const { db } = ctx.mysql; const users = mysqlTable('users', { id: serial('id').primaryKey(), name: varchar('name', { length: 100 }).notNull(), }); const posts = mysqlTable('posts', { id: serial('id').primaryKey(), text: varchar('text', { length: 100 }).notNull(), userId: int('user_id').references(() => users.id, { onDelete: 'cascade' }).notNull(), }, () => [postsTableUserIdIndex, postsTableTextIndex]); const postsTableUserIdIndex = index('posts_user_id_index').on(posts.userId); const postsTableTextIndex = index('posts_text_index').on(posts.text); await db.execute(sql`drop table if exists ${posts}`); await db.execute(sql`drop table if exists ${users}`); await db.execute(sql` create table ${users} ( \`id\` serial primary key, \`name\` varchar(100) not null ) `); await db.execute(sql` create table ${posts} ( \`id\` serial primary key, \`text\` varchar(100) not null, \`user_id\` int not null references users(id) on delete cascade ) `); await db.execute(sql`create index posts_user_id_index ON posts(user_id)`); await db.execute(sql`create index posts_text_index ON posts(text)`); const query = db.select({ userId: users.id, name: users.name, postId: posts.id, text: posts.text, }) .from(users) .leftJoin(posts, eq(users.id, posts.userId), { useIndex: [postsTableUserIdIndex, postsTableTextIndex], }) .where(and( eq(users.name, 'David'), eq(posts.text, 'David post'), )).toSQL(); expect(query.sql).to.include('USE INDEX (posts_user_id_index, posts_text_index)'); }); test('MySqlTable :: select with join `use index` hint on not existed index', async (ctx) => { const { db } = ctx.mysql; const users = mysqlTable('users', { id: serial('id').primaryKey(), name: varchar('name', { length: 100 }).notNull(), }); const posts = mysqlTable('posts', { id: serial('id').primaryKey(), text: varchar('text', { length: 100 }).notNull(), userId: int('user_id').references(() => users.id, { onDelete: 'cascade' }).notNull(), }, () => [postsTableUserIdIndex]); const postsTableUserIdIndex = index('posts_user_id_index').on(posts.userId); await db.execute(sql`drop table if exists ${posts}`); await db.execute(sql`drop table if exists ${users}`); await db.execute(sql` create table ${users} ( \`id\` serial primary key, \`name\` varchar(100) not null ) `); await db.execute(sql` create table ${posts} ( \`id\` serial primary key, \`text\` varchar(100) not null, \`user_id\` int not null references users(id) on delete cascade ) `); await db.execute(sql`create index posts_user_id_index ON posts(user_id)`); await db.insert(users).values([ { name: 'Alice' }, { name: 'Bob' }, { name: 'Charlie' }, { name: 'David' }, { name: 'Eve' }, ]); await db.insert(posts).values([ { text: 'Alice post', userId: 1 }, { text: 'Bob post', userId: 2 }, { text: 'Charlie post', userId: 3 }, { text: 'David post', userId: 4 }, { text: 'Eve post', userId: 5 }, ]); await expect((async () => { return await db.select({ userId: users.id, name: users.name, postId: posts.id, text: posts.text, }) .from(users) .leftJoin(posts, eq(users.id, posts.userId), { useIndex: ['some_other_index'], }) .where(and( eq(users.name, 'David'), eq(posts.text, 'David post'), )); })()).rejects.toThrowError(); }); test('MySqlTable :: select with join `use index` + `force index` incompatible hints', async (ctx) => { const { db } = ctx.mysql; const users = mysqlTable('users', { id: serial('id').primaryKey(), name: varchar('name', { length: 100 }).notNull(), }); const posts = mysqlTable('posts', { id: serial('id').primaryKey(), text: varchar('text', { length: 100 }).notNull(), userId: int('user_id').references(() => users.id, { onDelete: 'cascade' }).notNull(), }, () => [postsTableUserIdIndex, postsTableTextIndex]); const postsTableUserIdIndex = index('posts_user_id_index').on(posts.userId); const postsTableTextIndex = index('posts_text_index').on(posts.text); await db.execute(sql`drop table if exists ${posts}`); await db.execute(sql`drop table if exists ${users}`); await db.execute(sql` create table ${users} ( \`id\` serial primary key, \`name\` varchar(100) not null ) `); await db.execute(sql` create table ${posts} ( \`id\` serial primary key, \`text\` varchar(100) not null, \`user_id\` int not null references users(id) on delete cascade ) `); await db.execute(sql`create index posts_user_id_index ON posts(user_id)`); await db.execute(sql`create index posts_text_index ON posts(text)`); await db.insert(users).values([ { name: 'Alice' }, { name: 'Bob' }, { name: 'Charlie' }, { name: 'David' }, { name: 'Eve' }, ]); await db.insert(posts).values([ { text: 'Alice post', userId: 1 }, { text: 'Bob post', userId: 2 }, { text: 'Charlie post', userId: 3 }, { text: 'David post', userId: 4 }, { text: 'Eve post', userId: 5 }, ]); await expect((async () => { return await db.select({ userId: users.id, name: users.name, postId: posts.id, text: posts.text, }) .from(users) .leftJoin(posts, eq(users.id, posts.userId), { useIndex: [postsTableUserIdIndex], forceIndex: [postsTableTextIndex], }) .where(and( eq(users.name, 'David'), eq(posts.text, 'David post'), )); })()).rejects.toThrowError(); }); test('MySqlTable :: select with Subquery join `use index`', async (ctx) => { const { db } = ctx.mysql; const users = mysqlTable('users', { id: serial('id').primaryKey(), name: varchar('name', { length: 100 }).notNull(), }); const posts = mysqlTable('posts', { id: serial('id').primaryKey(), text: varchar('text', { length: 100 }).notNull(), userId: int('user_id').references(() => users.id, { onDelete: 'cascade' }).notNull(), }, () => [postsTableUserIdIndex]); const postsTableUserIdIndex = index('posts_user_id_index').on(posts.userId); await db.execute(sql`drop table if exists ${posts}`); await db.execute(sql`drop table if exists ${users}`); await db.execute(sql` create table ${users} ( \`id\` serial primary key, \`name\` varchar(100) not null ) `); await db.execute(sql` create table ${posts} ( \`id\` serial primary key, \`text\` varchar(100) not null, \`user_id\` int not null references users(id) on delete cascade ) `); await db.execute(sql`create index posts_user_id_index ON posts(user_id)`); await db.insert(users).values([ { name: 'Alice' }, { name: 'Bob' }, { name: 'Charlie' }, { name: 'David' }, { name: 'Eve' }, ]); await db.insert(posts).values([ { text: 'Alice post', userId: 1 }, { text: 'Bob post', userId: 2 }, { text: 'Charlie post', userId: 3 }, { text: 'David post', userId: 4 }, { text: 'Eve post', userId: 5 }, ]); const sq = db.select().from(posts, { useIndex: [postsTableUserIdIndex] }).where(eq(posts.userId, 1)).as('sq'); const result = await db.select({ userId: users.id, name: users.name, postId: sq.id, text: sq.text, }) .from(users) .leftJoin(sq, eq(users.id, sq.userId)) .where(eq(users.name, 'Alice')); expect(result).toHaveLength(1); expect(result).toEqual([{ userId: 1, name: 'Alice', postId: 1, text: 'Alice post' }]); }); test('MySqlTable :: select with Subquery join with `use index` in join', async (ctx) => { const { db } = ctx.mysql; const users = mysqlTable('users', { id: serial('id').primaryKey(), name: varchar('name', { length: 100 }).notNull(), }); const posts = mysqlTable('posts', { id: serial('id').primaryKey(), text: varchar('text', { length: 100 }).notNull(), userId: int('user_id').references(() => users.id, { onDelete: 'cascade' }).notNull(), }, () => [postsTableUserIdIndex]); const postsTableUserIdIndex = index('posts_user_id_index').on(posts.userId); await db.execute(sql`drop table if exists ${posts}`); await db.execute(sql`drop table if exists ${users}`); await db.execute(sql` create table ${users} ( \`id\` serial primary key, \`name\` varchar(100) not null ) `); await db.execute(sql` create table ${posts} ( \`id\` serial primary key, \`text\` varchar(100) not null, \`user_id\` int not null references users(id) on delete cascade ) `); await db.execute(sql`create index posts_user_id_index ON posts(user_id)`); const sq = db.select().from(posts).where(eq(posts.userId, 1)).as('sq'); const query = db.select({ userId: users.id, name: users.name, postId: sq.id, text: sq.text, }) .from(users) // @ts-expect-error .leftJoin(sq, eq(users.id, sq.userId, { useIndex: [postsTableUserIdIndex] })) .where(eq(users.name, 'Alice')) .toSQL(); expect(query.sql).not.include('USE INDEX'); }); test('View :: select with `use index` hint', async (ctx) => { const { db } = ctx.mysql; const users = mysqlTable('users', { id: serial('id').primaryKey(), name: varchar('name', { length: 100 }).notNull(), }, () => [usersTableNameIndex]); const usersTableNameIndex = index('users_name_index').on(users.name); const usersView = mysqlView('users_view').as((qb) => qb.select().from(users)); await db.execute(sql`drop table if exists ${users}`); await db.execute(sql` create table ${users} ( \`id\` serial primary key, \`name\` varchar(100) not null ) `); await db.execute(sql`create index users_name_index ON users(name)`); await db.execute(sql`create view ${usersView} as select * from ${users}`); // @ts-expect-error const query = db.select().from(usersView, { useIndex: [usersTableNameIndex], }).toSQL(); expect(query.sql).not.include('USE INDEX'); await db.execute(sql`drop view ${usersView}`); }); test('Subquery :: select with `use index` hint', async (ctx) => { const { db } = ctx.mysql; const users = mysqlTable('users', { id: serial('id').primaryKey(), name: varchar('name', { length: 100 }).notNull(), }, () => [usersTableNameIndex]); const usersTableNameIndex = index('users_name_index').on(users.name); await db.execute(sql`drop table if exists ${users}`); await db.execute(sql` create table ${users} ( \`id\` serial primary key, \`name\` varchar(100) not null ) `); await db.execute(sql`create index users_name_index ON users(name)`); const sq = db.select().from(users).as('sq'); // @ts-expect-error const query = db.select().from(sq, { useIndex: [usersTableNameIndex], }).toSQL(); expect(query.sql).not.include('USE INDEX'); }); test('sql operator as cte', async (ctx) => { const { db } = ctx.mysql; const users = mysqlTable('users', { id: serial('id').primaryKey(), name: text('name').notNull(), }); await db.execute(sql`drop table if exists ${users}`); await db.execute(sql`create table ${users} (id serial not null primary key, name text not null)`); await db.insert(users).values([ { name: 'John' }, { name: 'Jane' }, ]); const sq1 = db.$with('sq', { userId: users.id, data: { name: users.name, }, }).as(sql`select * from ${users} where ${users.name} = 'John'`); const result1 = await db.with(sq1).select().from(sq1); const sq2 = db.$with('sq', { userId: users.id, data: { name: users.name, }, }).as(() => sql`select * from ${users} where ${users.name} = 'Jane'`); const result2 = await db.with(sq2).select().from(sq1); expect(result1).toEqual([{ userId: 1, data: { name: 'John' } }]); expect(result2).toEqual([{ userId: 2, data: { name: 'Jane' } }]); }); } ================================================ FILE: integration-tests/tests/mysql/mysql-custom.test.ts ================================================ import retry from 'async-retry'; import type Docker from 'dockerode'; import { asc, eq, Name, sql } from 'drizzle-orm'; import { alias, binary, customType, date, datetime, mysqlEnum, mysqlTable, mysqlTableCreator, serial, text, time, varchar, year, } from 'drizzle-orm/mysql-core'; import type { MySql2Database } from 'drizzle-orm/mysql2'; import { drizzle } from 'drizzle-orm/mysql2'; import { migrate } from 'drizzle-orm/mysql2/migrator'; import * as mysql from 'mysql2/promise'; import { v4 as uuid } from 'uuid'; import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; import { toLocalDate } from '~/utils'; import { createDockerDB } from './mysql-common'; const ENABLE_LOGGING = false; let db: MySql2Database; let client: mysql.Connection; let container: Docker.Container | undefined; beforeAll(async () => { let connectionString; if (process.env['MYSQL_CONNECTION_STRING']) { connectionString = process.env['MYSQL_CONNECTION_STRING']; } else { const { connectionString: conStr, container: contrainerObj } = await createDockerDB(); connectionString = conStr; container = contrainerObj; } client = await retry(async () => { client = await mysql.createConnection(connectionString); await client.connect(); return client; }, { retries: 20, factor: 1, minTimeout: 250, maxTimeout: 250, randomize: false, onRetry() { client?.end(); }, }); db = drizzle(client, { logger: ENABLE_LOGGING }); }); afterAll(async () => { await client?.end(); await container?.stop().catch(console.error); }); beforeEach((ctx) => { ctx.mysql = { db, }; }); const customSerial = customType<{ data: number; notNull: true; default: true }>({ dataType() { return 'serial'; }, }); const customText = customType<{ data: string }>({ dataType() { return 'text'; }, }); const customBoolean = customType<{ data: boolean }>({ dataType() { return 'boolean'; }, fromDriver(value) { if (typeof value === 'boolean') { return value; } return value === 1; }, }); const customJson = (name: string) => customType<{ data: TData; driverData: string }>({ dataType() { return 'json'; }, toDriver(value: TData): string { return JSON.stringify(value); }, })(name); const customTimestamp = customType< { data: Date; driverData: string; config: { fsp: number } } >({ dataType(config) { const precision = config?.fsp === undefined ? '' : ` (${config.fsp})`; return `timestamp${precision}`; }, fromDriver(value: string): Date { return new Date(value); }, }); const customBinary = customType<{ data: string; driverData: Buffer; config: { length: number } }>({ dataType(config) { return config?.length === undefined ? `binary` : `binary(${config.length})`; }, toDriver(value) { return sql`UNHEX(${value})`; }, fromDriver(value) { return value.toString('hex'); }, }); const usersTable = mysqlTable('userstest', { id: customSerial('id').primaryKey(), name: customText('name').notNull(), verified: customBoolean('verified').notNull().default(false), jsonb: customJson('jsonb'), createdAt: customTimestamp('created_at', { fsp: 2 }).notNull().default(sql`now()`), }); const datesTable = mysqlTable('datestable', { date: date('date'), dateAsString: date('date_as_string', { mode: 'string' }), time: time('time', { fsp: 1 }), datetime: datetime('datetime', { fsp: 2 }), datetimeAsString: datetime('datetime_as_string', { fsp: 2, mode: 'string' }), year: year('year'), }); export const testTable = mysqlTable('test_table', { id: customBinary('id', { length: 16 }).primaryKey(), sqlId: binary('sql_id', { length: 16 }), rawId: varchar('raw_id', { length: 64 }), }); const usersMigratorTable = mysqlTable('users12', { id: serial('id').primaryKey(), name: text('name').notNull(), email: text('email').notNull(), }); beforeEach(async () => { await db.execute(sql`drop table if exists \`userstest\``); await db.execute(sql`drop table if exists \`datestable\``); await db.execute(sql`drop table if exists \`test_table\``); // await ctx.db.execute(sql`create schema public`); await db.execute( sql` create table \`userstest\` ( \`id\` serial primary key, \`name\` text not null, \`verified\` boolean not null default false, \`jsonb\` json, \`created_at\` timestamp not null default now() ) `, ); await db.execute( sql` create table \`datestable\` ( \`date\` date, \`date_as_string\` date, \`time\` time, \`datetime\` datetime, \`datetime_as_string\` datetime, \`year\` year ) `, ); await db.execute( sql` create table \`test_table\` ( \`id\` binary(16) primary key, \`sql_id\` binary(16), \`raw_id\` varchar(64) ) `, ); }); test('select all fields', async (ctx) => { const { db } = ctx.mysql; await db.insert(usersTable).values({ name: 'John' }); const result = await db.select().from(usersTable); expect(result[0]!.createdAt).toBeInstanceOf(Date); // not timezone based timestamp, thats why it should not work here // t.assert(Math.abs(result[0]!.createdAt.getTime() - now) < 2000); expect(result).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); }); test('select sql', async (ctx) => { const { db } = ctx.mysql; await db.insert(usersTable).values({ name: 'John' }); const users = await db.select({ name: sql`upper(${usersTable.name})`, }).from(usersTable); expect(users).toEqual([{ name: 'JOHN' }]); }); test('select typed sql', async (ctx) => { const { db } = ctx.mysql; await db.insert(usersTable).values({ name: 'John' }); const users = await db.select({ name: sql`upper(${usersTable.name})`, }).from(usersTable); expect(users).toEqual([{ name: 'JOHN' }]); }); test('insert returning sql', async (ctx) => { const { db } = ctx.mysql; const [result, _] = await db.insert(usersTable).values({ name: 'John' }); expect(result.insertId).toBe(1); }); test('delete returning sql', async (ctx) => { const { db } = ctx.mysql; await db.insert(usersTable).values({ name: 'John' }); const users = await db.delete(usersTable).where(eq(usersTable.name, 'John')); expect(users[0].affectedRows).toBe(1); }); test('update returning sql', async (ctx) => { const { db } = ctx.mysql; await db.insert(usersTable).values({ name: 'John' }); const users = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')); expect(users[0].changedRows).toBe(1); }); test('update with returning all fields', async (ctx) => { const { db } = ctx.mysql; await db.insert(usersTable).values({ name: 'John' }); const updatedUsers = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')); const users = await db.select().from(usersTable).where(eq(usersTable.id, 1)); expect(updatedUsers[0].changedRows).toBe(1); expect(users[0]!.createdAt).toBeInstanceOf(Date); // not timezone based timestamp, thats why it should not work here // t.assert(Math.abs(users[0]!.createdAt.getTime() - now) < 2000); expect(users).toEqual([{ id: 1, name: 'Jane', verified: false, jsonb: null, createdAt: users[0]!.createdAt }]); }); test('update with returning partial', async (ctx) => { const { db } = ctx.mysql; await db.insert(usersTable).values({ name: 'John' }); const updatedUsers = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')); const users = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).where( eq(usersTable.id, 1), ); expect(updatedUsers[0].changedRows).toBe(1); expect(users).toEqual([{ id: 1, name: 'Jane' }]); }); test('delete with returning all fields', async (ctx) => { const { db } = ctx.mysql; await db.insert(usersTable).values({ name: 'John' }); const deletedUser = await db.delete(usersTable).where(eq(usersTable.name, 'John')); expect(deletedUser[0].affectedRows).toBe(1); }); test('delete with returning partial', async (ctx) => { const { db } = ctx.mysql; await db.insert(usersTable).values({ name: 'John' }); const deletedUser = await db.delete(usersTable).where(eq(usersTable.name, 'John')); expect(deletedUser[0].affectedRows).toBe(1); }); test('insert + select', async (ctx) => { const { db } = ctx.mysql; await db.insert(usersTable).values({ name: 'John' }); const result = await db.select().from(usersTable); expect(result).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); await db.insert(usersTable).values({ name: 'Jane' }); const result2 = await db.select().from(usersTable); expect(result2).toEqual([ { id: 1, name: 'John', verified: false, jsonb: null, createdAt: result2[0]!.createdAt }, { id: 2, name: 'Jane', verified: false, jsonb: null, createdAt: result2[1]!.createdAt }, ]); }); test('json insert', async (ctx) => { const { db } = ctx.mysql; await db.insert(usersTable).values({ name: 'John', jsonb: ['foo', 'bar'] }); const result = await db.select({ id: usersTable.id, name: usersTable.name, jsonb: usersTable.jsonb, }).from(usersTable); expect(result).toEqual([{ id: 1, name: 'John', jsonb: ['foo', 'bar'] }]); }); test('insert with overridden default values', async (ctx) => { const { db } = ctx.mysql; await db.insert(usersTable).values({ name: 'John', verified: true }); const result = await db.select().from(usersTable); expect(result).toEqual([{ id: 1, name: 'John', verified: true, jsonb: null, createdAt: result[0]!.createdAt }]); }); test('insert many', async (ctx) => { const { db } = ctx.mysql; await db.insert(usersTable).values([ { name: 'John' }, { name: 'Bruce', jsonb: ['foo', 'bar'] }, { name: 'Jane' }, { name: 'Austin', verified: true }, ]); const result = await db.select({ id: usersTable.id, name: usersTable.name, jsonb: usersTable.jsonb, verified: usersTable.verified, }).from(usersTable); expect(result).toEqual([ { id: 1, name: 'John', jsonb: null, verified: false }, { id: 2, name: 'Bruce', jsonb: ['foo', 'bar'], verified: false }, { id: 3, name: 'Jane', jsonb: null, verified: false }, { id: 4, name: 'Austin', jsonb: null, verified: true }, ]); }); test('insert many with returning', async (ctx) => { const { db } = ctx.mysql; const result = await db.insert(usersTable).values([ { name: 'John' }, { name: 'Bruce', jsonb: ['foo', 'bar'] }, { name: 'Jane' }, { name: 'Austin', verified: true }, ]); expect(result[0].affectedRows).toBe(4); }); test('select with group by as field', async (ctx) => { const { db } = ctx.mysql; await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); const result = await db.select({ name: usersTable.name }).from(usersTable) .groupBy(usersTable.name); expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }]); }); test('select with group by as sql', async (ctx) => { const { db } = ctx.mysql; await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); const result = await db.select({ name: usersTable.name }).from(usersTable) .groupBy(sql`${usersTable.name}`); expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }]); }); test('select with group by as sql + column', async (ctx) => { const { db } = ctx.mysql; await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); const result = await db.select({ name: usersTable.name }).from(usersTable) .groupBy(sql`${usersTable.name}`, usersTable.id); expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); }); test('select with group by as column + sql', async (ctx) => { const { db } = ctx.mysql; await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); const result = await db.select({ name: usersTable.name }).from(usersTable) .groupBy(usersTable.id, sql`${usersTable.name}`); expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); }); test('select with group by complex query', async (ctx) => { const { db } = ctx.mysql; await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); const result = await db.select({ name: usersTable.name }).from(usersTable) .groupBy(usersTable.id, sql`${usersTable.name}`) .orderBy(asc(usersTable.name)) .limit(1); expect(result).toEqual([{ name: 'Jane' }]); }); test('build query', async (ctx) => { const { db } = ctx.mysql; const query = db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable) .groupBy(usersTable.id, usersTable.name) .toSQL(); expect(query).toEqual({ sql: `select \`id\`, \`name\` from \`userstest\` group by \`userstest\`.\`id\`, \`userstest\`.\`name\``, params: [], }); }); test('build query insert with onDuplicate', async (ctx) => { const { db } = ctx.mysql; const query = db.insert(usersTable) .values({ name: 'John', jsonb: ['foo', 'bar'] }) .onDuplicateKeyUpdate({ set: { name: 'John1' } }) .toSQL(); expect(query).toEqual({ sql: 'insert into `userstest` (`id`, `name`, `verified`, `jsonb`, `created_at`) values (default, ?, default, ?, default) on duplicate key update `name` = ?', params: ['John', '["foo","bar"]', 'John1'], }); }); test('insert with onDuplicate', async (ctx) => { const { db } = ctx.mysql; await db.insert(usersTable) .values({ name: 'John' }); await db.insert(usersTable) .values({ id: 1, name: 'John' }) .onDuplicateKeyUpdate({ set: { name: 'John1' } }); const res = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).where( eq(usersTable.id, 1), ); expect(res).toEqual([{ id: 1, name: 'John1' }]); }); test('insert conflict', async (ctx) => { const { db } = ctx.mysql; await db.insert(usersTable) .values({ name: 'John' }); await expect((async () => { db.insert(usersTable).values({ id: 1, name: 'John1' }); })()).resolves.not.toThrowError(); }); test('insert conflict with ignore', async (ctx) => { const { db } = ctx.mysql; await db.insert(usersTable) .values({ name: 'John' }); await db.insert(usersTable) .ignore() .values({ id: 1, name: 'John1' }); const res = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).where( eq(usersTable.id, 1), ); expect(res).toEqual([{ id: 1, name: 'John' }]); }); test('insert sql', async (ctx) => { const { db } = ctx.mysql; await db.insert(usersTable).values({ name: sql`${'John'}` }); const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable); expect(result).toEqual([{ id: 1, name: 'John' }]); }); test('partial join with alias', async (ctx) => { const { db } = ctx.mysql; const customerAlias = alias(usersTable, 'customer'); await db.insert(usersTable).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); const result = await db .select({ user: { id: usersTable.id, name: usersTable.name, }, customer: { id: customerAlias.id, name: customerAlias.name, }, }).from(usersTable) .leftJoin(customerAlias, eq(customerAlias.id, 11)) .where(eq(usersTable.id, 10)); expect(result).toEqual([{ user: { id: 10, name: 'Ivan' }, customer: { id: 11, name: 'Hans' }, }]); }); test('full join with alias', async (ctx) => { const { db } = ctx.mysql; const mysqlTable = mysqlTableCreator((name) => `prefixed_${name}`); const users = mysqlTable('users', { id: serial('id').primaryKey(), name: text('name').notNull(), }); await db.execute(sql`drop table if exists ${users}`); await db.execute(sql`create table ${users} (id serial primary key, name text not null)`); const customers = alias(users, 'customer'); await db.insert(users).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); const result = await db .select().from(users) .leftJoin(customers, eq(customers.id, 11)) .where(eq(users.id, 10)); expect(result).toEqual([{ users: { id: 10, name: 'Ivan', }, customer: { id: 11, name: 'Hans', }, }]); await db.execute(sql`drop table ${users}`); }); test('select from alias', async (ctx) => { const { db } = ctx.mysql; const mysqlTable = mysqlTableCreator((name) => `prefixed_${name}`); const users = mysqlTable('users', { id: serial('id').primaryKey(), name: text('name').notNull(), }); await db.execute(sql`drop table if exists ${users}`); await db.execute(sql`create table ${users} (id serial primary key, name text not null)`); const user = alias(users, 'user'); const customers = alias(users, 'customer'); await db.insert(users).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); const result = await db .select() .from(user) .leftJoin(customers, eq(customers.id, 11)) .where(eq(user.id, 10)); expect(result).toEqual([{ user: { id: 10, name: 'Ivan', }, customer: { id: 11, name: 'Hans', }, }]); await db.execute(sql`drop table ${users}`); }); test('insert with spaces', async (ctx) => { const { db } = ctx.mysql; await db.insert(usersTable).values({ name: sql`'Jo h n'` }); const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable); expect(result).toEqual([{ id: 1, name: 'Jo h n' }]); }); test('prepared statement', async (ctx) => { const { db } = ctx.mysql; await db.insert(usersTable).values({ name: 'John' }); const statement = db.select({ id: usersTable.id, name: usersTable.name, }).from(usersTable) .prepare(); const result = await statement.execute(); expect(result).toEqual([{ id: 1, name: 'John' }]); }); test('prepared statement reuse', async (ctx) => { const { db } = ctx.mysql; const stmt = db.insert(usersTable).values({ verified: true, name: sql.placeholder('name'), }).prepare(); for (let i = 0; i < 10; i++) { await stmt.execute({ name: `John ${i}` }); } const result = await db.select({ id: usersTable.id, name: usersTable.name, verified: usersTable.verified, }).from(usersTable); expect(result).toEqual([ { id: 1, name: 'John 0', verified: true }, { id: 2, name: 'John 1', verified: true }, { id: 3, name: 'John 2', verified: true }, { id: 4, name: 'John 3', verified: true }, { id: 5, name: 'John 4', verified: true }, { id: 6, name: 'John 5', verified: true }, { id: 7, name: 'John 6', verified: true }, { id: 8, name: 'John 7', verified: true }, { id: 9, name: 'John 8', verified: true }, { id: 10, name: 'John 9', verified: true }, ]); }); test('prepared statement with placeholder in .where', async (ctx) => { const { db } = ctx.mysql; await db.insert(usersTable).values({ name: 'John' }); const stmt = db.select({ id: usersTable.id, name: usersTable.name, }).from(usersTable) .where(eq(usersTable.id, sql.placeholder('id'))) .prepare(); const result = await stmt.execute({ id: 1 }); expect(result).toEqual([{ id: 1, name: 'John' }]); }); test('migrator', async (ctx) => { const { db } = ctx.mysql; await db.execute(sql`drop table if exists cities_migration`); await db.execute(sql`drop table if exists users_migration`); await db.execute(sql`drop table if exists users12`); await db.execute(sql`drop table if exists __drizzle_migrations`); await migrate(db, { migrationsFolder: './drizzle2/mysql' }); await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); const result = await db.select().from(usersMigratorTable); expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); await db.execute(sql`drop table cities_migration`); await db.execute(sql`drop table users_migration`); await db.execute(sql`drop table users12`); await db.execute(sql`drop table __drizzle_migrations`); }); test('insert via db.execute + select via db.execute', async (ctx) => { const { db } = ctx.mysql; await db.execute(sql`insert into ${usersTable} (${new Name(usersTable.name.name)}) values (${'John'})`); const result = await db.execute<{ id: number; name: string }>(sql`select id, name from ${usersTable}`); expect(result[0]).toEqual([{ id: 1, name: 'John' }]); }); test('insert via db.execute w/ query builder', async (ctx) => { const { db } = ctx.mysql; const inserted = await db.execute( db.insert(usersTable).values({ name: 'John' }), ); expect(inserted[0].affectedRows).toBe(1); }); test('insert + select all possible dates', async (ctx) => { const { db } = ctx.mysql; const date = new Date('2022-11-11'); await db.insert(datesTable).values({ date: date, dateAsString: '2022-11-11', time: '12:12:12', datetime: date, year: 22, datetimeAsString: '2022-11-11 12:12:12', }); const res = await db.select().from(datesTable); expect(res[0]?.date).toBeInstanceOf(Date); expect(res[0]?.datetime).toBeInstanceOf(Date); expect(res[0]?.dateAsString).toBeTypeOf('string'); expect(res[0]?.datetimeAsString).toBeTypeOf('string'); expect(res).toEqual([{ date: toLocalDate(new Date('2022-11-11')), dateAsString: '2022-11-11', time: '12:12:12', datetime: new Date('2022-11-11'), year: 2022, datetimeAsString: '2022-11-11 12:12:12', }]); }); const tableWithEnums = mysqlTable('enums_test_case', { id: serial('id').primaryKey(), enum1: mysqlEnum('enum1', ['a', 'b', 'c']).notNull(), enum2: mysqlEnum('enum2', ['a', 'b', 'c']).default('a'), enum3: mysqlEnum('enum3', ['a', 'b', 'c']).notNull().default('b'), }); test('Mysql enum test case #1', async (ctx) => { const { db } = ctx.mysql; await db.execute(sql`drop table if exists \`enums_test_case\``); await db.execute(sql` create table \`enums_test_case\` ( \`id\` serial primary key, \`enum1\` ENUM('a', 'b', 'c') not null, \`enum2\` ENUM('a', 'b', 'c') default 'a', \`enum3\` ENUM('a', 'b', 'c') not null default 'b' ) `); await db.insert(tableWithEnums).values([ { id: 1, enum1: 'a', enum2: 'b', enum3: 'c' }, { id: 2, enum1: 'a', enum3: 'c' }, { id: 3, enum1: 'a' }, ]); const res = await db.select().from(tableWithEnums); await db.execute(sql`drop table \`enums_test_case\``); expect(res).toEqual([ { id: 1, enum1: 'a', enum2: 'b', enum3: 'c' }, { id: 2, enum1: 'a', enum2: 'a', enum3: 'c' }, { id: 3, enum1: 'a', enum2: 'a', enum3: 'b' }, ]); }); test('custom binary', async (ctx) => { const { db } = ctx.mysql; const id = uuid().replace(/-/g, ''); await db.insert(testTable).values({ id, sqlId: sql`UNHEX(${id})`, rawId: id, }); const res = await db.select().from(testTable); expect(res).toEqual([{ id, sqlId: Buffer.from(id, 'hex').toString(), rawId: id, }]); }); ================================================ FILE: integration-tests/tests/mysql/mysql-planetscale.test.ts ================================================ import { Client } from '@planetscale/database'; import type { PlanetScaleDatabase } from 'drizzle-orm/planetscale-serverless'; import { drizzle } from 'drizzle-orm/planetscale-serverless'; import { beforeAll, beforeEach } from 'vitest'; import { skipTests } from '~/common'; import { tests } from './mysql-common'; import { TestCache, TestGlobalCache, tests as cacheTests } from './mysql-common-cache'; const ENABLE_LOGGING = false; let db: PlanetScaleDatabase; let dbGlobalCached: PlanetScaleDatabase; let cachedDb: PlanetScaleDatabase; beforeAll(async () => { const client = new Client({ url: process.env['PLANETSCALE_CONNECTION_STRING']! }); db = drizzle(client, { logger: ENABLE_LOGGING }); cachedDb = drizzle(client, { logger: ENABLE_LOGGING, cache: new TestCache() }); dbGlobalCached = drizzle(client, { logger: ENABLE_LOGGING, cache: new TestGlobalCache() }); }); beforeEach((ctx) => { ctx.mysql = { db, }; ctx.cachedMySQL = { db: cachedDb, dbGlobalCached, }; }); skipTests([ 'mySchema :: view', 'mySchema :: select from tables with same name from different schema using alias', 'mySchema :: prepared statement with placeholder in .where', 'mySchema :: insert with spaces', 'mySchema :: select with group by as column + sql', 'mySchema :: select with group by as field', 'mySchema :: insert many', 'mySchema :: insert with overridden default values', 'mySchema :: insert + select', 'mySchema :: delete with returning all fields', 'mySchema :: update with returning partial', 'mySchema :: delete returning sql', 'mySchema :: insert returning sql', 'mySchema :: select typed sql', 'mySchema :: select sql', 'mySchema :: select all fields', 'test $onUpdateFn and $onUpdate works updating', 'test $onUpdateFn and $onUpdate works as $default', 'set operations (mixed all) as function with subquery', 'set operations (mixed) from query builder', 'set operations (except all) as function', 'set operations (except all) from query builder', 'set operations (except) as function', 'set operations (except) from query builder', 'set operations (intersect all) as function', 'set operations (intersect all) from query builder', 'set operations (intersect) as function', 'set operations (intersect) from query builder', 'select iterator w/ prepared statement', 'select iterator', 'subquery with view', 'join on aliased sql from with clause', 'with ... delete', 'with ... update', 'with ... select', // to redefine in this file 'utc config for datetime', 'transaction', 'transaction with options (set isolationLevel)', 'having', 'select count()', 'insert via db.execute w/ query builder', 'insert via db.execute + select via db.execute', 'insert many with returning', 'delete with returning partial', 'delete with returning all fields', 'update with returning partial', 'update with returning all fields', 'update returning sql', 'delete returning sql', 'insert returning sql', ]); tests('planetscale'); cacheTests(); ================================================ FILE: integration-tests/tests/mysql/mysql-prefixed.test.ts ================================================ import retry from 'async-retry'; import type Docker from 'dockerode'; import type { Equal } from 'drizzle-orm'; import { asc, eq, getTableName, gt, inArray, Name, sql, TransactionRollbackError } from 'drizzle-orm'; import { alias, boolean, date, datetime, getViewConfig, int, json, mysqlEnum, mysqlTable as mysqlTableRaw, mysqlTableCreator, mysqlView, serial, text, time, timestamp, uniqueIndex, year, } from 'drizzle-orm/mysql-core'; import type { MySql2Database } from 'drizzle-orm/mysql2'; import { drizzle } from 'drizzle-orm/mysql2'; import { migrate } from 'drizzle-orm/mysql2/migrator'; import * as mysql from 'mysql2/promise'; import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; import { Expect, toLocalDate } from '~/utils'; import { createDockerDB } from './mysql-common'; const ENABLE_LOGGING = false; let db: MySql2Database; let client: mysql.Connection; let container: Docker.Container | undefined; beforeAll(async () => { let connectionString; if (process.env['MYSQL_CONNECTION_STRING']) { connectionString = process.env['MYSQL_CONNECTION_STRING']; } else { const { connectionString: conStr, container: contrainerObj } = await createDockerDB(); connectionString = conStr; container = contrainerObj; } client = await retry(async () => { client = await mysql.createConnection(connectionString); await client.connect(); return client; }, { retries: 20, factor: 1, minTimeout: 250, maxTimeout: 250, randomize: false, onRetry() { client?.end(); }, }); db = drizzle(client, { logger: ENABLE_LOGGING }); }); afterAll(async () => { await client?.end(); await container?.stop().catch(console.error); }); const tablePrefix = 'drizzle_tests_'; const mysqlTable = mysqlTableCreator((name) => `${tablePrefix}${name}`); const usersTable = mysqlTable('userstest', { id: serial('id').primaryKey(), name: text('name').notNull(), verified: boolean('verified').notNull().default(false), jsonb: json('jsonb').$type(), createdAt: timestamp('created_at', { fsp: 2 }).notNull().defaultNow(), }); const users2Table = mysqlTable('users2', { id: serial('id').primaryKey(), name: text('name').notNull(), cityId: int('city_id').references(() => citiesTable.id), }); const citiesTable = mysqlTable('cities', { id: serial('id').primaryKey(), name: text('name').notNull(), }); beforeEach(async () => { await db.execute(sql`drop table if exists ${usersTable}`); await db.execute(sql`drop table if exists ${users2Table}`); await db.execute(sql`drop table if exists ${citiesTable}`); await db.execute( sql` create table ${usersTable} ( \`id\` serial primary key, \`name\` text not null, \`verified\` boolean not null default false, \`jsonb\` json, \`created_at\` timestamp not null default now() ) `, ); await db.execute( sql` create table ${users2Table} ( \`id\` serial primary key, \`name\` text not null, \`city_id\` int references ${citiesTable}(\`id\`) ) `, ); await db.execute( sql` create table ${citiesTable} ( \`id\` serial primary key, \`name\` text not null ) `, ); }); test('select all fields', async () => { await db.insert(usersTable).values({ name: 'John' }); const result = await db.select().from(usersTable); expect(result[0]!.createdAt).toBeInstanceOf(Date); // not timezone based timestamp, thats why it should not work here // t.assert(Math.abs(result[0]!.createdAt.getTime() - now) < 2000); expect(result).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); }); test('select sql', async () => { await db.insert(usersTable).values({ name: 'John' }); const users = await db.select({ name: sql`upper(${usersTable.name})`, }).from(usersTable); expect(users).toEqual([{ name: 'JOHN' }]); }); test('select typed sql', async () => { await db.insert(usersTable).values({ name: 'John' }); const users = await db.select({ name: sql`upper(${usersTable.name})`, }).from(usersTable); expect(users).toEqual([{ name: 'JOHN' }]); }); test('select distinct', async () => { const usersDistinctTable = mysqlTable('users_distinct', { id: int('id').notNull(), name: text('name').notNull(), }); await db.execute(sql`drop table if exists ${usersDistinctTable}`); await db.execute(sql`create table ${usersDistinctTable} (id int, name text)`); await db.insert(usersDistinctTable).values([ { id: 1, name: 'John' }, { id: 1, name: 'John' }, { id: 2, name: 'John' }, { id: 1, name: 'Jane' }, ]); const users = await db.selectDistinct().from(usersDistinctTable).orderBy( usersDistinctTable.id, usersDistinctTable.name, ); await db.execute(sql`drop table ${usersDistinctTable}`); expect(users).toEqual([{ id: 1, name: 'Jane' }, { id: 1, name: 'John' }, { id: 2, name: 'John' }]); }); test('insert returning sql', async () => { const [result, _] = await db.insert(usersTable).values({ name: 'John' }); expect(result.insertId).toBe(1); }); test('delete returning sql', async () => { await db.insert(usersTable).values({ name: 'John' }); const users = await db.delete(usersTable).where(eq(usersTable.name, 'John')); expect(users[0].affectedRows).toBe(1); }); test('update returning sql', async () => { await db.insert(usersTable).values({ name: 'John' }); const users = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')); expect(users[0].changedRows).toBe(1); }); test('update with returning all fields', async () => { await db.insert(usersTable).values({ name: 'John' }); const updatedUsers = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')); const users = await db.select().from(usersTable).where(eq(usersTable.id, 1)); expect(updatedUsers[0].changedRows).toBe(1); expect(users[0]!.createdAt).toBeInstanceOf(Date); // not timezone based timestamp, thats why it should not work here // t.assert(Math.abs(users[0]!.createdAt.getTime() - now) < 2000); expect(users).toEqual([{ id: 1, name: 'Jane', verified: false, jsonb: null, createdAt: users[0]!.createdAt }]); }); test('update with returning partial', async () => { await db.insert(usersTable).values({ name: 'John' }); const updatedUsers = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')); const users = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).where( eq(usersTable.id, 1), ); expect(updatedUsers[0].changedRows).toBe(1); expect(users).toEqual([{ id: 1, name: 'Jane' }]); }); test('delete with returning all fields', async () => { await db.insert(usersTable).values({ name: 'John' }); const deletedUser = await db.delete(usersTable).where(eq(usersTable.name, 'John')); expect(deletedUser[0].affectedRows).toBe(1); }); test('delete with returning partial', async () => { await db.insert(usersTable).values({ name: 'John' }); const deletedUser = await db.delete(usersTable).where(eq(usersTable.name, 'John')); expect(deletedUser[0].affectedRows).toBe(1); }); test('insert + select', async () => { await db.insert(usersTable).values({ name: 'John' }); const result = await db.select().from(usersTable); expect(result).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); await db.insert(usersTable).values({ name: 'Jane' }); const result2 = await db.select().from(usersTable); expect(result2).toEqual([ { id: 1, name: 'John', verified: false, jsonb: null, createdAt: result2[0]!.createdAt }, { id: 2, name: 'Jane', verified: false, jsonb: null, createdAt: result2[1]!.createdAt }, ]); }); test('json insert', async () => { await db.insert(usersTable).values({ name: 'John', jsonb: ['foo', 'bar'] }); const result = await db.select({ id: usersTable.id, name: usersTable.name, jsonb: usersTable.jsonb, }).from(usersTable); expect(result).toEqual([{ id: 1, name: 'John', jsonb: ['foo', 'bar'] }]); }); test('insert with overridden default values', async () => { await db.insert(usersTable).values({ name: 'John', verified: true }); const result = await db.select().from(usersTable); expect(result).toEqual([{ id: 1, name: 'John', verified: true, jsonb: null, createdAt: result[0]!.createdAt }]); }); test('insert many', async () => { await db.insert(usersTable).values([ { name: 'John' }, { name: 'Bruce', jsonb: ['foo', 'bar'] }, { name: 'Jane' }, { name: 'Austin', verified: true }, ]); const result = await db.select({ id: usersTable.id, name: usersTable.name, jsonb: usersTable.jsonb, verified: usersTable.verified, }).from(usersTable); expect(result).toEqual([ { id: 1, name: 'John', jsonb: null, verified: false }, { id: 2, name: 'Bruce', jsonb: ['foo', 'bar'], verified: false }, { id: 3, name: 'Jane', jsonb: null, verified: false }, { id: 4, name: 'Austin', jsonb: null, verified: true }, ]); }); test('insert many with returning', async () => { const result = await db.insert(usersTable).values([ { name: 'John' }, { name: 'Bruce', jsonb: ['foo', 'bar'] }, { name: 'Jane' }, { name: 'Austin', verified: true }, ]); expect(result[0].affectedRows).toBe(4); }); test('select with group by as field', async () => { await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); const result = await db.select({ name: usersTable.name }).from(usersTable) .groupBy(usersTable.name); expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }]); }); test('select with group by as sql', async () => { await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); const result = await db.select({ name: usersTable.name }).from(usersTable) .groupBy(sql`${usersTable.name}`); expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }]); }); test('select with group by as sql + column', async () => { await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); const result = await db.select({ name: usersTable.name }).from(usersTable) .groupBy(sql`${usersTable.name}`, usersTable.id); expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); }); test('select with group by as column + sql', async () => { await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); const result = await db.select({ name: usersTable.name }).from(usersTable) .groupBy(usersTable.id, sql`${usersTable.name}`); expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); }); test('select with group by complex query', async () => { await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); const result = await db.select({ name: usersTable.name }).from(usersTable) .groupBy(usersTable.id, sql`${usersTable.name}`) .orderBy(asc(usersTable.name)) .limit(1); expect(result).toEqual([{ name: 'Jane' }]); }); test('build query', async () => { const query = db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable) .groupBy(usersTable.id, usersTable.name) .toSQL(); expect(query).toEqual({ sql: `select \`id\`, \`name\` from \`${getTableName(usersTable)}\` group by \`${ getTableName(usersTable) }\`.\`id\`, \`${getTableName(usersTable)}\`.\`name\``, params: [], }); }); test('build query insert with onDuplicate', async () => { const query = db.insert(usersTable) .values({ name: 'John', jsonb: ['foo', 'bar'] }) .onDuplicateKeyUpdate({ set: { name: 'John1' } }) .toSQL(); expect(query).toEqual({ sql: `insert into \`${ getTableName(usersTable) }\` (\`id\`, \`name\`, \`verified\`, \`jsonb\`, \`created_at\`) values (default, ?, default, ?, default) on duplicate key update \`name\` = ?`, params: ['John', '["foo","bar"]', 'John1'], }); }); test('insert with onDuplicate', async () => { await db.insert(usersTable) .values({ name: 'John' }); await db.insert(usersTable) .values({ id: 1, name: 'John' }) .onDuplicateKeyUpdate({ set: { name: 'John1' } }); const res = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).where( eq(usersTable.id, 1), ); expect(res).toEqual([{ id: 1, name: 'John1' }]); }); test('insert conflict', async () => { await db.insert(usersTable) .values({ name: 'John' }); await expect((async () => { db.insert(usersTable).values({ id: 1, name: 'John1' }); })()).resolves.not.toThrowError(); }); test('insert conflict with ignore', async () => { await db.insert(usersTable) .values({ name: 'John' }); await db.insert(usersTable) .ignore() .values({ id: 1, name: 'John1' }); const res = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).where( eq(usersTable.id, 1), ); expect(res).toEqual([{ id: 1, name: 'John' }]); }); test('insert sql', async () => { await db.insert(usersTable).values({ name: sql`${'John'}` }); const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable); expect(result).toEqual([{ id: 1, name: 'John' }]); }); test('partial join with alias', async () => { const customerAlias = alias(usersTable, 'customer'); await db.insert(usersTable).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); const result = await db .select({ user: { id: usersTable.id, name: usersTable.name, }, customer: { id: customerAlias.id, name: customerAlias.name, }, }).from(usersTable) .leftJoin(customerAlias, eq(customerAlias.id, 11)) .where(eq(usersTable.id, 10)); expect(result).toEqual([{ user: { id: 10, name: 'Ivan' }, customer: { id: 11, name: 'Hans' }, }]); }); test('full join with alias', async () => { const mysqlTable = mysqlTableCreator((name) => `prefixed_${name}`); const users = mysqlTable('users', { id: serial('id').primaryKey(), name: text('name').notNull(), }); await db.execute(sql`drop table if exists ${users}`); await db.execute(sql`create table ${users} (id serial primary key, name text not null)`); const customers = alias(users, 'customer'); await db.insert(users).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); const result = await db .select().from(users) .leftJoin(customers, eq(customers.id, 11)) .where(eq(users.id, 10)); expect(result).toEqual([{ users: { id: 10, name: 'Ivan', }, customer: { id: 11, name: 'Hans', }, }]); await db.execute(sql`drop table ${users}`); }); test('select from alias', async () => { const mysqlTable = mysqlTableCreator((name) => `prefixed_${name}`); const users = mysqlTable('users', { id: serial('id').primaryKey(), name: text('name').notNull(), }); await db.execute(sql`drop table if exists ${users}`); await db.execute(sql`create table ${users} (id serial primary key, name text not null)`); const user = alias(users, 'user'); const customers = alias(users, 'customer'); await db.insert(users).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); const result = await db .select() .from(user) .leftJoin(customers, eq(customers.id, 11)) .where(eq(user.id, 10)); expect(result).toEqual([{ user: { id: 10, name: 'Ivan', }, customer: { id: 11, name: 'Hans', }, }]); await db.execute(sql`drop table ${users}`); }); test('insert with spaces', async () => { await db.insert(usersTable).values({ name: sql`'Jo h n'` }); const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable); expect(result).toEqual([{ id: 1, name: 'Jo h n' }]); }); test('prepared statement', async () => { await db.insert(usersTable).values({ name: 'John' }); const statement = db.select({ id: usersTable.id, name: usersTable.name, }).from(usersTable) .prepare(); const result = await statement.execute(); expect(result).toEqual([{ id: 1, name: 'John' }]); }); test('prepared statement reuse', async () => { const stmt = db.insert(usersTable).values({ verified: true, name: sql.placeholder('name'), }).prepare(); for (let i = 0; i < 10; i++) { await stmt.execute({ name: `John ${i}` }); } const result = await db.select({ id: usersTable.id, name: usersTable.name, verified: usersTable.verified, }).from(usersTable); expect(result).toEqual([ { id: 1, name: 'John 0', verified: true }, { id: 2, name: 'John 1', verified: true }, { id: 3, name: 'John 2', verified: true }, { id: 4, name: 'John 3', verified: true }, { id: 5, name: 'John 4', verified: true }, { id: 6, name: 'John 5', verified: true }, { id: 7, name: 'John 6', verified: true }, { id: 8, name: 'John 7', verified: true }, { id: 9, name: 'John 8', verified: true }, { id: 10, name: 'John 9', verified: true }, ]); }); test('prepared statement with placeholder in .where', async () => { await db.insert(usersTable).values({ name: 'John' }); const stmt = db.select({ id: usersTable.id, name: usersTable.name, }).from(usersTable) .where(eq(usersTable.id, sql.placeholder('id'))) .prepare(); const result = await stmt.execute({ id: 1 }); expect(result).toEqual([{ id: 1, name: 'John' }]); }); test('migrator', async () => { const usersMigratorTable = mysqlTableRaw('users12', { id: serial('id').primaryKey(), name: text('name').notNull(), email: text('email').notNull(), }, (table) => { return { name: uniqueIndex('').on(table.name).using('btree'), }; }); await db.execute(sql.raw(`drop table if exists cities_migration`)); await db.execute(sql.raw(`drop table if exists users_migration`)); await db.execute(sql.raw(`drop table if exists users12`)); await db.execute(sql.raw(`drop table if exists __drizzle_migrations`)); await migrate(db, { migrationsFolder: './drizzle2/mysql' }); await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); const result = await db.select().from(usersMigratorTable); expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); await db.execute(sql.raw(`drop table cities_migration`)); await db.execute(sql.raw(`drop table users_migration`)); await db.execute(sql.raw(`drop table users12`)); await db.execute(sql.raw(`drop table __drizzle_migrations`)); }); test('insert via db.execute + select via db.execute', async () => { await db.execute(sql`insert into ${usersTable} (${new Name(usersTable.name.name)}) values (${'John'})`); const result = await db.execute<{ id: number; name: string }>(sql`select id, name from ${usersTable}`); expect(result[0]).toEqual([{ id: 1, name: 'John' }]); }); test('insert via db.execute w/ query builder', async () => { const inserted = await db.execute( db.insert(usersTable).values({ name: 'John' }), ); expect(inserted[0].affectedRows).toBe(1); }); test('insert + select all possible dates', async () => { const datesTable = mysqlTable('datestable', { date: date('date'), dateAsString: date('date_as_string', { mode: 'string' }), time: time('time', { fsp: 1 }), datetime: datetime('datetime', { fsp: 2 }), datetimeAsString: datetime('datetime_as_string', { fsp: 2, mode: 'string' }), year: year('year'), }); await db.execute(sql`drop table if exists ${datesTable}`); await db.execute( sql` create table ${datesTable} ( \`date\` date, \`date_as_string\` date, \`time\` time, \`datetime\` datetime, \`datetime_as_string\` datetime, \`year\` year ) `, ); const d = new Date('2022-11-11'); await db.insert(datesTable).values({ date: d, dateAsString: '2022-11-11', time: '12:12:12', datetime: d, year: 22, datetimeAsString: '2022-11-11 12:12:12', }); const res = await db.select().from(datesTable); expect(res[0]?.date).toBeInstanceOf(Date); expect(res[0]?.datetime).toBeInstanceOf(Date); expect(typeof res[0]?.dateAsString).toBe('string'); expect(typeof res[0]?.datetimeAsString).toBe('string'); expect(res).toEqual([{ date: toLocalDate(new Date('2022-11-11')), dateAsString: '2022-11-11', time: '12:12:12', datetime: new Date('2022-11-11'), year: 2022, datetimeAsString: '2022-11-11 12:12:12', }]); await db.execute(sql`drop table ${datesTable}`); }); test('Mysql enum test case #1', async () => { const tableWithEnums = mysqlTable('enums_test_case', { id: serial('id').primaryKey(), enum1: mysqlEnum('enum1', ['a', 'b', 'c']).notNull(), enum2: mysqlEnum('enum2', ['a', 'b', 'c']).default('a'), enum3: mysqlEnum('enum3', ['a', 'b', 'c']).notNull().default('b'), }); await db.execute(sql`drop table if exists ${tableWithEnums}`); await db.execute(sql` create table ${tableWithEnums} ( \`id\` serial primary key, \`enum1\` ENUM('a', 'b', 'c') not null, \`enum2\` ENUM('a', 'b', 'c') default 'a', \`enum3\` ENUM('a', 'b', 'c') not null default 'b' ) `); await db.insert(tableWithEnums).values([ { id: 1, enum1: 'a', enum2: 'b', enum3: 'c' }, { id: 2, enum1: 'a', enum3: 'c' }, { id: 3, enum1: 'a' }, ]); const res = await db.select().from(tableWithEnums); await db.execute(sql`drop table ${tableWithEnums}`); expect(res).toEqual([ { id: 1, enum1: 'a', enum2: 'b', enum3: 'c' }, { id: 2, enum1: 'a', enum2: 'a', enum3: 'c' }, { id: 3, enum1: 'a', enum2: 'a', enum3: 'b' }, ]); }); test('left join (flat object fields)', async () => { await db.insert(citiesTable) .values([{ name: 'Paris' }, { name: 'London' }]); await db.insert(users2Table).values([{ name: 'John', cityId: 1 }, { name: 'Jane' }]); const res = await db.select({ userId: users2Table.id, userName: users2Table.name, cityId: citiesTable.id, cityName: citiesTable.name, }).from(users2Table) .leftJoin(citiesTable, eq(users2Table.cityId, citiesTable.id)); expect(res).toEqual([ { userId: 1, userName: 'John', cityId: 1, cityName: 'Paris' }, { userId: 2, userName: 'Jane', cityId: null, cityName: null }, ]); }); test('left join (grouped fields)', async () => { await db.insert(citiesTable) .values([{ name: 'Paris' }, { name: 'London' }]); await db.insert(users2Table).values([{ name: 'John', cityId: 1 }, { name: 'Jane' }]); const res = await db.select({ id: users2Table.id, user: { name: users2Table.name, nameUpper: sql`upper(${users2Table.name})`, }, city: { id: citiesTable.id, name: citiesTable.name, nameUpper: sql`upper(${citiesTable.name})`, }, }).from(users2Table) .leftJoin(citiesTable, eq(users2Table.cityId, citiesTable.id)); expect(res).toEqual([ { id: 1, user: { name: 'John', nameUpper: 'JOHN' }, city: { id: 1, name: 'Paris', nameUpper: 'PARIS' }, }, { id: 2, user: { name: 'Jane', nameUpper: 'JANE' }, city: null, }, ]); }); test('left join (all fields)', async () => { await db.insert(citiesTable) .values([{ name: 'Paris' }, { name: 'London' }]); await db.insert(users2Table).values([{ name: 'John', cityId: 1 }, { name: 'Jane' }]); const res = await db.select().from(users2Table) .leftJoin(citiesTable, eq(users2Table.cityId, citiesTable.id)); expect(res).toEqual([ { users2: { id: 1, name: 'John', cityId: 1, }, cities: { id: 1, name: 'Paris', }, }, { users2: { id: 2, name: 'Jane', cityId: null, }, cities: null, }, ]); }); test('join subquery', async () => { const coursesTable = mysqlTable('courses', { id: serial('id').primaryKey(), name: text('name').notNull(), categoryId: int('category_id').references(() => courseCategoriesTable.id), }); const courseCategoriesTable = mysqlTable('course_categories', { id: serial('id').primaryKey(), name: text('name').notNull(), }); await db.execute(sql`drop table if exists ${coursesTable}`); await db.execute(sql`drop table if exists ${courseCategoriesTable}`); await db.execute( sql` create table ${courseCategoriesTable} ( \`id\` serial primary key, \`name\` text not null ) `, ); await db.execute( sql` create table ${coursesTable} ( \`id\` serial primary key, \`name\` text not null, \`category_id\` int references ${courseCategoriesTable}(\`id\`) ) `, ); await db.insert(courseCategoriesTable).values([ { name: 'Category 1' }, { name: 'Category 2' }, { name: 'Category 3' }, { name: 'Category 4' }, ]); await db.insert(coursesTable).values([ { name: 'Development', categoryId: 2 }, { name: 'IT & Software', categoryId: 3 }, { name: 'Marketing', categoryId: 4 }, { name: 'Design', categoryId: 1 }, ]); const sq2 = db .select({ categoryId: courseCategoriesTable.id, category: courseCategoriesTable.name, total: sql`count(${courseCategoriesTable.id})`, }) .from(courseCategoriesTable) .groupBy(courseCategoriesTable.id, courseCategoriesTable.name) .as('sq2'); const res = await db .select({ courseName: coursesTable.name, categoryId: sq2.categoryId, }) .from(coursesTable) .leftJoin(sq2, eq(coursesTable.categoryId, sq2.categoryId)) .orderBy(coursesTable.name); await db.execute(sql`drop table ${coursesTable}`); await db.execute(sql`drop table ${courseCategoriesTable}`); expect(res).toEqual([ { courseName: 'Design', categoryId: 1 }, { courseName: 'Development', categoryId: 2 }, { courseName: 'IT & Software', categoryId: 3 }, { courseName: 'Marketing', categoryId: 4 }, ]); }); test('with ... select', async () => { const orders = mysqlTable('orders', { id: serial('id').primaryKey(), region: text('region').notNull(), product: text('product').notNull(), amount: int('amount').notNull(), quantity: int('quantity').notNull(), }); await db.execute(sql`drop table if exists ${orders}`); await db.execute( sql` create table ${orders} ( \`id\` serial primary key, \`region\` text not null, \`product\` text not null, \`amount\` int not null, \`quantity\` int not null ) `, ); await db.insert(orders).values([ { region: 'Europe', product: 'A', amount: 10, quantity: 1 }, { region: 'Europe', product: 'A', amount: 20, quantity: 2 }, { region: 'Europe', product: 'B', amount: 20, quantity: 2 }, { region: 'Europe', product: 'B', amount: 30, quantity: 3 }, { region: 'US', product: 'A', amount: 30, quantity: 3 }, { region: 'US', product: 'A', amount: 40, quantity: 4 }, { region: 'US', product: 'B', amount: 40, quantity: 4 }, { region: 'US', product: 'B', amount: 50, quantity: 5 }, ]); const regionalSales = db .$with('regional_sales') .as( db .select({ region: orders.region, totalSales: sql`sum(${orders.amount})`.as('total_sales'), }) .from(orders) .groupBy(orders.region), ); const topRegions = db .$with('top_regions') .as( db .select({ region: regionalSales.region, }) .from(regionalSales) .where( gt( regionalSales.totalSales, db.select({ sales: sql`sum(${regionalSales.totalSales})/10` }).from(regionalSales), ), ), ); const result = await db .with(regionalSales, topRegions) .select({ region: orders.region, product: orders.product, productUnits: sql`cast(sum(${orders.quantity}) as unsigned)`, productSales: sql`cast(sum(${orders.amount}) as unsigned)`, }) .from(orders) .where(inArray(orders.region, db.select({ region: topRegions.region }).from(topRegions))) .groupBy(orders.region, orders.product) .orderBy(orders.region, orders.product); await db.execute(sql`drop table ${orders}`); expect(result).toEqual([ { region: 'Europe', product: 'A', productUnits: 3, productSales: 30, }, { region: 'Europe', product: 'B', productUnits: 5, productSales: 50, }, { region: 'US', product: 'A', productUnits: 7, productSales: 70, }, { region: 'US', product: 'B', productUnits: 9, productSales: 90, }, ]); }); test('select from subquery sql', async () => { await db.insert(users2Table).values([{ name: 'John' }, { name: 'Jane' }]); const sq = db .select({ name: sql`concat(${users2Table.name}, " modified")`.as('name') }) .from(users2Table) .as('sq'); const res = await db.select({ name: sq.name }).from(sq); expect(res).toEqual([{ name: 'John modified' }, { name: 'Jane modified' }]); }); test('select a field without joining its table', () => { expect(() => db.select({ name: users2Table.name }).from(usersTable).prepare()).toThrowError(); }); test('select all fields from subquery without alias', () => { const sq = db.$with('sq').as(db.select({ name: sql`upper(${users2Table.name})` }).from(users2Table)); expect(() => db.select().from(sq).prepare()).toThrowError(); }); test('select count()', async () => { await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }]); const res = await db.select({ count: sql`count(*)` }).from(usersTable); expect(res).toEqual([{ count: 2 }]); }); test('select for ...', () => { { const query = db.select().from(users2Table).for('update').toSQL(); expect(query.sql).toMatch(/ for update$/); } { const query = db.select().from(users2Table).for('share', { skipLocked: true }).toSQL(); expect(query.sql).toMatch(/ for share skip locked$/); } { const query = db.select().from(users2Table).for('update', { noWait: true }).toSQL(); expect(query.sql).toMatch(/ for update nowait$/); } }); test('having', async () => { await db.insert(citiesTable).values([{ name: 'London' }, { name: 'Paris' }, { name: 'New York' }]); await db.insert(users2Table).values([{ name: 'John', cityId: 1 }, { name: 'Jane', cityId: 1 }, { name: 'Jack', cityId: 2, }]); const result = await db .select({ id: citiesTable.id, name: sql`upper(${citiesTable.name})`.as('upper_name'), usersCount: sql`count(${users2Table.id})`.as('users_count'), }) .from(citiesTable) .leftJoin(users2Table, eq(users2Table.cityId, citiesTable.id)) .where(({ name }) => sql`length(${name}) >= 3`) .groupBy(citiesTable.id) .having(({ usersCount }) => sql`${usersCount} > 0`) .orderBy(({ name }) => name); expect(result).toEqual([ { id: 1, name: 'LONDON', usersCount: 2, }, { id: 2, name: 'PARIS', usersCount: 1, }, ]); }); test('view', async () => { const newYorkers1 = mysqlView('new_yorkers') .as((qb) => qb.select().from(users2Table).where(eq(users2Table.cityId, 1))); const newYorkers2 = mysqlView('new_yorkers', { id: serial('id').primaryKey(), name: text('name').notNull(), cityId: int('city_id').notNull(), }).as(sql`select * from ${users2Table} where ${eq(users2Table.cityId, 1)}`); const newYorkers3 = mysqlView('new_yorkers', { id: serial('id').primaryKey(), name: text('name').notNull(), cityId: int('city_id').notNull(), }).existing(); await db.execute(sql`create view new_yorkers as ${getViewConfig(newYorkers1).query}`); await db.insert(citiesTable).values([{ name: 'New York' }, { name: 'Paris' }]); await db.insert(users2Table).values([ { name: 'John', cityId: 1 }, { name: 'Jane', cityId: 1 }, { name: 'Jack', cityId: 2 }, ]); { const result = await db.select().from(newYorkers1); expect(result).toEqual([ { id: 1, name: 'John', cityId: 1 }, { id: 2, name: 'Jane', cityId: 1 }, ]); } { const result = await db.select().from(newYorkers2); expect(result).toEqual([ { id: 1, name: 'John', cityId: 1 }, { id: 2, name: 'Jane', cityId: 1 }, ]); } { const result = await db.select().from(newYorkers3); expect(result).toEqual([ { id: 1, name: 'John', cityId: 1 }, { id: 2, name: 'Jane', cityId: 1 }, ]); } { const result = await db.select({ name: newYorkers1.name }).from(newYorkers1); expect(result).toEqual([ { name: 'John' }, { name: 'Jane' }, ]); } await db.execute(sql`drop view ${newYorkers1}`); }); test('select from raw sql', async () => { const result = await db.select({ id: sql`id`, name: sql`name`, }).from(sql`(select 1 as id, 'John' as name) as users`); Expect>; expect(result).toEqual([ { id: 1, name: 'John' }, ]); }); test('select from raw sql with joins', async () => { const result = await db .select({ id: sql`users.id`, name: sql`users.name`, userCity: sql`users.city`, cityName: sql`cities.name`, }) .from(sql`(select 1 as id, 'John' as name, 'New York' as city) as users`) .leftJoin(sql`(select 1 as id, 'Paris' as name) as cities`, sql`cities.id = users.id`); Expect>; expect(result).toEqual([ { id: 1, name: 'John', userCity: 'New York', cityName: 'Paris' }, ]); }); test('join on aliased sql from select', async () => { const result = await db .select({ userId: sql`users.id`.as('userId'), name: sql`users.name`, userCity: sql`users.city`, cityId: sql`cities.id`.as('cityId'), cityName: sql`cities.name`, }) .from(sql`(select 1 as id, 'John' as name, 'New York' as city) as users`) .leftJoin(sql`(select 1 as id, 'Paris' as name) as cities`, (cols) => eq(cols.cityId, cols.userId)); Expect>; expect(result).toEqual([ { userId: 1, name: 'John', userCity: 'New York', cityId: 1, cityName: 'Paris' }, ]); }); test('join on aliased sql from with clause', async () => { const users = db.$with('users').as( db.select({ id: sql`id`.as('userId'), name: sql`name`.as('userName'), city: sql`city`.as('city'), }).from( sql`(select 1 as id, 'John' as name, 'New York' as city) as users`, ), ); const cities = db.$with('cities').as( db.select({ id: sql`id`.as('cityId'), name: sql`name`.as('cityName'), }).from( sql`(select 1 as id, 'Paris' as name) as cities`, ), ); const result = await db .with(users, cities) .select({ userId: users.id, name: users.name, userCity: users.city, cityId: cities.id, cityName: cities.name, }) .from(users) .leftJoin(cities, (cols) => eq(cols.cityId, cols.userId)); Expect>; expect(result).toEqual([ { userId: 1, name: 'John', userCity: 'New York', cityId: 1, cityName: 'Paris' }, ]); }); test('prefixed table', async () => { const mysqlTable = mysqlTableCreator((name) => `myprefix_${name}`); const users = mysqlTable('test_prefixed_table_with_unique_name', { id: int('id').primaryKey(), name: text('name').notNull(), }); await db.execute(sql`drop table if exists ${users}`); await db.execute( sql`create table myprefix_test_prefixed_table_with_unique_name (id int not null primary key, name text not null)`, ); await db.insert(users).values({ id: 1, name: 'John' }); const result = await db.select().from(users); expect(result).toEqual([{ id: 1, name: 'John' }]); await db.execute(sql`drop table ${users}`); }); test('orderBy with aliased column', () => { const query = db.select({ test: sql`something`.as('test'), }).from(users2Table).orderBy((fields) => fields.test).toSQL(); expect(query.sql).toBe(`select something as \`test\` from \`${getTableName(users2Table)}\` order by \`test\``); }); test('timestamp timezone', async () => { const date = new Date(Date.parse('2020-01-01T12:34:56+07:00')); await db.insert(usersTable).values({ name: 'With default times' }); await db.insert(usersTable).values({ name: 'Without default times', createdAt: date, }); const users = await db.select().from(usersTable); // check that the timestamps are set correctly for default times expect(Math.abs(users[0]!.createdAt.getTime() - Date.now())).toBeLessThan(2000); // check that the timestamps are set correctly for non default times expect(Math.abs(users[1]!.createdAt.getTime() - date.getTime())).toBeLessThan(2000); }); test('transaction', async () => { const users = mysqlTable('users_transactions', { id: serial('id').primaryKey(), balance: int('balance').notNull(), }); const products = mysqlTable('products_transactions', { id: serial('id').primaryKey(), price: int('price').notNull(), stock: int('stock').notNull(), }); await db.execute(sql`drop table if exists ${users}`); await db.execute(sql`drop table if exists ${products}`); await db.execute(sql`create table ${users} (id serial not null primary key, balance int not null)`); await db.execute( sql`create table ${products} (id serial not null primary key, price int not null, stock int not null)`, ); const [{ insertId: userId }] = await db.insert(users).values({ balance: 100 }); const user = await db.select().from(users).where(eq(users.id, userId)).then((rows) => rows[0]!); const [{ insertId: productId }] = await db.insert(products).values({ price: 10, stock: 10 }); const product = await db.select().from(products).where(eq(products.id, productId)).then((rows) => rows[0]!); await db.transaction(async (tx) => { await tx.update(users).set({ balance: user.balance - product.price }).where(eq(users.id, user.id)); await tx.update(products).set({ stock: product.stock - 1 }).where(eq(products.id, product.id)); }); const result = await db.select().from(users); await db.execute(sql`drop table ${users}`); await db.execute(sql`drop table ${products}`); expect(result).toEqual([{ id: 1, balance: 90 }]); }); test('transaction rollback', async () => { const users = mysqlTable('users_transactions_rollback', { id: serial('id').primaryKey(), balance: int('balance').notNull(), }); await db.execute(sql`drop table if exists ${users}`); await db.execute( sql`create table ${users} (id serial not null primary key, balance int not null)`, ); await expect((async () => { await db.transaction(async (tx) => { await tx.insert(users).values({ balance: 100 }); tx.rollback(); }); })()).rejects.toThrowError(TransactionRollbackError); const result = await db.select().from(users); await db.execute(sql`drop table ${users}`); expect(result).toEqual([]); }); test('nested transaction', async () => { const users = mysqlTable('users_nested_transactions', { id: serial('id').primaryKey(), balance: int('balance').notNull(), }); await db.execute(sql`drop table if exists ${users}`); await db.execute( sql`create table ${users} (id serial not null primary key, balance int not null)`, ); await db.transaction(async (tx) => { await tx.insert(users).values({ balance: 100 }); await tx.transaction(async (tx) => { await tx.update(users).set({ balance: 200 }); }); }); const result = await db.select().from(users); await db.execute(sql`drop table ${users}`); expect(result).toEqual([{ id: 1, balance: 200 }]); }); test('nested transaction rollback', async () => { const users = mysqlTable('users_nested_transactions_rollback', { id: serial('id').primaryKey(), balance: int('balance').notNull(), }); await db.execute(sql`drop table if exists ${users}`); await db.execute( sql`create table ${users} (id serial not null primary key, balance int not null)`, ); await db.transaction(async (tx) => { await tx.insert(users).values({ balance: 100 }); await expect((async () => { await tx.transaction(async (tx) => { await tx.update(users).set({ balance: 200 }); tx.rollback(); }); })()).rejects.toThrowError(TransactionRollbackError); }); const result = await db.select().from(users); await db.execute(sql`drop table ${users}`); expect(result).toEqual([{ id: 1, balance: 100 }]); }); test('join subquery with join', async () => { const internalStaff = mysqlTable('internal_staff', { userId: int('user_id').notNull(), }); const customUser = mysqlTable('custom_user', { id: int('id').notNull(), }); const ticket = mysqlTable('ticket', { staffId: int('staff_id').notNull(), }); await db.execute(sql`drop table if exists ${internalStaff}`); await db.execute(sql`drop table if exists ${customUser}`); await db.execute(sql`drop table if exists ${ticket}`); await db.execute(sql`create table ${internalStaff} (user_id integer not null)`); await db.execute(sql`create table ${customUser} (id integer not null)`); await db.execute(sql`create table ${ticket} (staff_id integer not null)`); await db.insert(internalStaff).values({ userId: 1 }); await db.insert(customUser).values({ id: 1 }); await db.insert(ticket).values({ staffId: 1 }); const subq = db .select() .from(internalStaff) .leftJoin(customUser, eq(internalStaff.userId, customUser.id)) .as('internal_staff'); const mainQuery = await db .select() .from(ticket) .leftJoin(subq, eq(subq.internal_staff.userId, ticket.staffId)); await db.execute(sql`drop table ${internalStaff}`); await db.execute(sql`drop table ${customUser}`); await db.execute(sql`drop table ${ticket}`); expect(mainQuery).toEqual([{ ticket: { staffId: 1 }, internal_staff: { internal_staff: { userId: 1 }, custom_user: { id: 1 }, }, }]); }); test('subquery with view', async () => { const users = mysqlTable('users_subquery_view', { id: serial('id').primaryKey(), name: text('name').notNull(), cityId: int('city_id').notNull(), }); const newYorkers = mysqlView('new_yorkers').as((qb) => qb.select().from(users).where(eq(users.cityId, 1))); await db.execute(sql`drop table if exists ${users}`); await db.execute(sql`drop view if exists ${newYorkers}`); await db.execute( sql`create table ${users} (id serial not null primary key, name text not null, city_id integer not null)`, ); await db.execute(sql`create view ${newYorkers} as select * from ${users} where city_id = 1`); await db.insert(users).values([ { name: 'John', cityId: 1 }, { name: 'Jane', cityId: 2 }, { name: 'Jack', cityId: 1 }, { name: 'Jill', cityId: 2 }, ]); const sq = db.$with('sq').as(db.select().from(newYorkers)); const result = await db.with(sq).select().from(sq); await db.execute(sql`drop view ${newYorkers}`); await db.execute(sql`drop table ${users}`); expect(result).toEqual([ { id: 1, name: 'John', cityId: 1 }, { id: 3, name: 'Jack', cityId: 1 }, ]); }); test('join view as subquery', async () => { const users = mysqlTable('users_join_view', { id: serial('id').primaryKey(), name: text('name').notNull(), cityId: int('city_id').notNull(), }); const newYorkers = mysqlView('new_yorkers').as((qb) => qb.select().from(users).where(eq(users.cityId, 1))); await db.execute(sql`drop table if exists ${users}`); await db.execute(sql`drop view if exists ${newYorkers}`); await db.execute( sql`create table ${users} (id serial not null primary key, name text not null, city_id integer not null)`, ); await db.execute(sql`create view ${newYorkers} as select * from ${users} where city_id = 1`); await db.insert(users).values([ { name: 'John', cityId: 1 }, { name: 'Jane', cityId: 2 }, { name: 'Jack', cityId: 1 }, { name: 'Jill', cityId: 2 }, ]); const sq = db.select().from(newYorkers).as('new_yorkers_sq'); const result = await db.select().from(users).leftJoin(sq, eq(users.id, sq.id)); expect(result).toEqual([ { users_join_view: { id: 1, name: 'John', cityId: 1 }, new_yorkers_sq: { id: 1, name: 'John', cityId: 1 }, }, { users_join_view: { id: 2, name: 'Jane', cityId: 2 }, new_yorkers_sq: null, }, { users_join_view: { id: 3, name: 'Jack', cityId: 1 }, new_yorkers_sq: { id: 3, name: 'Jack', cityId: 1 }, }, { users_join_view: { id: 4, name: 'Jill', cityId: 2 }, new_yorkers_sq: null, }, ]); await db.execute(sql`drop view ${newYorkers}`); await db.execute(sql`drop table ${users}`); }); test('select iterator', async () => { const users = mysqlTable('users_iterator', { id: serial('id').primaryKey(), }); await db.execute(sql`drop table if exists ${users}`); await db.execute(sql`create table ${users} (id serial not null primary key)`); await db.insert(users).values([{}, {}, {}]); const iter = db.select().from(users).iterator(); const result: typeof users.$inferSelect[] = []; for await (const row of iter) { result.push(row); } expect(result).toEqual([{ id: 1 }, { id: 2 }, { id: 3 }]); }); test('select iterator w/ prepared statement', async () => { const users = mysqlTable('users_iterator', { id: serial('id').primaryKey(), }); await db.execute(sql`drop table if exists ${users}`); await db.execute(sql`create table ${users} (id serial not null primary key)`); await db.insert(users).values([{}, {}, {}]); const prepared = db.select().from(users).prepare(); const iter = prepared.iterator(); const result: typeof users.$inferSelect[] = []; for await (const row of iter) { result.push(row); } expect(result).toEqual([{ id: 1 }, { id: 2 }, { id: 3 }]); }); test('insert undefined', async () => { const users = mysqlTable('users', { id: serial('id').primaryKey(), name: text('name'), }); await db.execute(sql`drop table if exists ${users}`); await db.execute( sql`create table ${users} (id serial not null primary key, name text)`, ); await expect((async () => { await db.insert(users).values({ name: undefined }); })()).resolves.not.toThrowError(); await db.execute(sql`drop table ${users}`); }); test('update undefined', async () => { const users = mysqlTable('users', { id: serial('id').primaryKey(), name: text('name'), }); await db.execute(sql`drop table if exists ${users}`); await db.execute( sql`create table ${users} (id serial not null primary key, name text)`, ); await expect((async () => { await db.update(users).set({ name: undefined }); })()).rejects.toThrowError(); await expect((async () => { await db.update(users).set({ id: 1, name: undefined }); })()).resolves.not.toThrowError(); await db.execute(sql`drop table ${users}`); }); ================================================ FILE: integration-tests/tests/mysql/mysql-proxy.test.ts ================================================ import retry from 'async-retry'; import type { MySqlRemoteDatabase } from 'drizzle-orm/mysql-proxy'; import { drizzle as proxyDrizzle } from 'drizzle-orm/mysql-proxy'; import * as mysql from 'mysql2/promise'; import { afterAll, beforeAll, beforeEach } from 'vitest'; import { skipTests } from '~/common'; import { createDockerDB, tests } from './mysql-common'; const ENABLE_LOGGING = false; // eslint-disable-next-line drizzle-internal/require-entity-kind class ServerSimulator { constructor(private db: mysql.Connection) {} async query(sql: string, params: any[], method: 'all' | 'execute') { if (method === 'all') { try { const result = await this.db.query({ sql, values: params, rowsAsArray: true, typeCast: function(field: any, next: any) { if (field.type === 'TIMESTAMP' || field.type === 'DATETIME' || field.type === 'DATE') { return field.string(); } return next(); }, }); return { data: result[0] as any }; } catch (e: any) { return { error: e }; } } else if (method === 'execute') { try { const result = await this.db.query({ sql, values: params, typeCast: function(field: any, next: any) { if (field.type === 'TIMESTAMP' || field.type === 'DATETIME' || field.type === 'DATE') { return field.string(); } return next(); }, }); return { data: result as any }; } catch (e: any) { return { error: e }; } } else { return { error: 'Unknown method value' }; } } async migrations(queries: string[]) { await this.db.query('START TRANSACTION'); try { for (const query of queries) { await this.db.query(query); } await this.db.query('COMMIT'); } catch (e) { await this.db.query('ROLLBACK'); throw e; } return {}; } } let db: MySqlRemoteDatabase; let client: mysql.Connection; let serverSimulator: ServerSimulator; beforeAll(async () => { let connectionString; if (process.env['MYSQL_CONNECTION_STRING']) { connectionString = process.env['MYSQL_CONNECTION_STRING']; } else { const { connectionString: conStr } = await createDockerDB(); connectionString = conStr; } client = await retry(async () => { client = await mysql.createConnection({ uri: connectionString, supportBigNumbers: true, }); await client.connect(); return client; }, { retries: 20, factor: 1, minTimeout: 250, maxTimeout: 250, randomize: false, onRetry() { client?.end(); }, }); serverSimulator = new ServerSimulator(client); db = proxyDrizzle(async (sql, params, method) => { try { const response = await serverSimulator.query(sql, params, method); if (response.error !== undefined) { throw response.error; } return { rows: response.data }; } catch (e: any) { console.error('Error from mysql proxy server:', e.message); throw e; } }, { logger: ENABLE_LOGGING }); }); afterAll(async () => { await client?.end(); }); beforeEach((ctx) => { ctx.mysql = { db, }; }); skipTests([ 'select iterator w/ prepared statement', 'select iterator', 'nested transaction rollback', 'nested transaction', 'transaction rollback', 'transaction', 'transaction with options (set isolationLevel)', 'migrator', ]); tests(); ================================================ FILE: integration-tests/tests/mysql/mysql.test.ts ================================================ import retry from 'async-retry'; import type { MySql2Database } from 'drizzle-orm/mysql2'; import { drizzle } from 'drizzle-orm/mysql2'; import * as mysql from 'mysql2/promise'; import { afterAll, beforeAll, beforeEach } from 'vitest'; import { createDockerDB, tests } from './mysql-common'; import { TestCache, TestGlobalCache, tests as cacheTests } from './mysql-common-cache'; const ENABLE_LOGGING = false; let db: MySql2Database; let dbGlobalCached: MySql2Database; let cachedDb: MySql2Database; let client: mysql.Connection; beforeAll(async () => { let connectionString; if (process.env['MYSQL_CONNECTION_STRING']) { connectionString = process.env['MYSQL_CONNECTION_STRING']; } else { const { connectionString: conStr } = await createDockerDB(); connectionString = conStr; } client = await retry(async () => { client = await mysql.createConnection({ uri: connectionString!, supportBigNumbers: true, }); await client.connect(); return client; }, { retries: 20, factor: 1, minTimeout: 250, maxTimeout: 250, randomize: false, onRetry() { client?.end(); }, }); db = drizzle(client, { logger: ENABLE_LOGGING }); cachedDb = drizzle(client, { logger: ENABLE_LOGGING, cache: new TestCache() }); dbGlobalCached = drizzle(client, { logger: ENABLE_LOGGING, cache: new TestGlobalCache() }); }); afterAll(async () => { await client?.end(); }); beforeEach((ctx) => { ctx.mysql = { db, }; ctx.cachedMySQL = { db: cachedDb, dbGlobalCached, }; }); cacheTests(); tests(); ================================================ FILE: integration-tests/tests/mysql/tidb-serverless.test.ts ================================================ import 'dotenv/config'; import { connect } from '@tidbcloud/serverless'; import type { TiDBServerlessDatabase } from 'drizzle-orm/tidb-serverless'; import { drizzle } from 'drizzle-orm/tidb-serverless'; import { beforeAll, beforeEach } from 'vitest'; import { skipTests } from '~/common.ts'; import { tests } from './mysql-common.ts'; const ENABLE_LOGGING = false; let db: TiDBServerlessDatabase; beforeAll(async () => { const connectionString = process.env['TIDB_CONNECTION_STRING']; if (!connectionString) { throw new Error('TIDB_CONNECTION_STRING is not set'); } const client = connect({ url: connectionString }); db = drizzle(client!, { logger: ENABLE_LOGGING }); }); beforeEach((ctx) => { ctx.mysql = { db, }; }); skipTests([ 'mySchema :: select with group by as field', 'mySchema :: delete with returning all fields', 'mySchema :: update with returning partial', 'mySchema :: delete returning sql', 'mySchema :: insert returning sql', 'test $onUpdateFn and $onUpdate works updating', 'set operations (mixed all) as function with subquery', 'set operations (union) from query builder with subquery', 'join on aliased sql from with clause', 'join on aliased sql from select', 'select from raw sql with joins', 'select from raw sql', 'having', 'select count()', 'with ... select', 'insert via db.execute w/ query builder', 'insert via db.execute + select via db.execute', 'select with group by as sql', 'select with group by as field', 'insert many with returning', 'delete with returning partial', 'delete with returning all fields', 'update with returning partial', 'update with returning all fields', 'update returning sql', 'delete returning sql', 'insert returning sql', // not supported 'set operations (except all) as function', 'set operations (except all) from query builder', 'set operations (intersect all) as function', 'set operations (intersect all) from query builder', 'set operations (union all) as function', 'tc config for datetime', 'select iterator w/ prepared statement', 'select iterator', 'transaction', 'transaction with options (set isolationLevel)', 'Insert all defaults in multiple rows', 'Insert all defaults in 1 row', '$default with empty array', 'utc config for datetime', ]); tests(); ================================================ FILE: integration-tests/tests/mysql-returning.test.ts ================================================ import 'dotenv/config'; import type { TestFn } from 'ava'; import anyTest from 'ava'; import Docker from 'dockerode'; import { DefaultLogger, sql } from 'drizzle-orm'; import { boolean, json, mysqlTable, serial, text, timestamp, varchar } from 'drizzle-orm/mysql-core'; import type { MySql2Database } from 'drizzle-orm/mysql2'; import { drizzle } from 'drizzle-orm/mysql2'; import getPort from 'get-port'; import * as mysql from 'mysql2/promise'; import { v4 as uuid } from 'uuid'; const ENABLE_LOGGING = false; const usersTable = mysqlTable('userstest', { id: serial('id').primaryKey(), name: text('name').notNull(), verified: boolean('verified').notNull().default(false), jsonb: json('jsonb').$type(), createdAt: timestamp('created_at', { fsp: 2 }).notNull().defaultNow(), }); interface Context { docker: Docker; mysqlContainer: Docker.Container; db: MySql2Database; client: mysql.Connection; } const test = anyTest as TestFn; async function createDockerDB(ctx: Context): Promise { const docker = (ctx.docker = new Docker()); const port = await getPort({ port: 3306 }); const image = 'mysql:8'; const pullStream = await docker.pull(image); await new Promise((resolve, reject) => docker.modem.followProgress(pullStream, (err) => (err ? reject(err) : resolve(err))) ); ctx.mysqlContainer = await docker.createContainer({ Image: image, Env: ['MYSQL_ROOT_PASSWORD=mysql', 'MYSQL_DATABASE=drizzle'], name: `drizzle-integration-tests-${uuid()}`, HostConfig: { AutoRemove: true, PortBindings: { '3306/tcp': [{ HostPort: `${port}` }], }, }, }); await ctx.mysqlContainer.start(); return `mysql://root:mysql@127.0.0.1:${port}/drizzle`; } test.before(async (t) => { const ctx = t.context; const connectionString = process.env['MYSQL_CONNECTION_STRING'] ?? await createDockerDB(ctx); const sleep = 1000; let timeLeft = 20000; let connected = false; let lastError: unknown | undefined; do { try { ctx.client = await mysql.createConnection(connectionString); await ctx.client.connect(); connected = true; break; } catch (e) { lastError = e; await new Promise((resolve) => setTimeout(resolve, sleep)); timeLeft -= sleep; } } while (timeLeft > 0); if (!connected) { console.error('Cannot connect to MySQL'); await ctx.client?.end().catch(console.error); await ctx.mysqlContainer?.stop().catch(console.error); throw lastError; } ctx.db = drizzle(ctx.client, { logger: ENABLE_LOGGING ? new DefaultLogger() : undefined }); }); test.after.always(async (t) => { const ctx = t.context; await ctx.client?.end().catch(console.error); await ctx.mysqlContainer?.stop().catch(console.error); }); test.beforeEach(async (t) => { const ctx = t.context; await ctx.db.execute(sql`drop table if exists \`userstest\``); await ctx.db.execute(sql`drop table if exists \`users2\``); await ctx.db.execute(sql`drop table if exists \`cities\``); await ctx.db.execute( sql` create table \`userstest\` ( \`id\` serial primary key, \`name\` text not null, \`verified\` boolean not null default false, \`jsonb\` json, \`created_at\` timestamp not null default now() ) `, ); await ctx.db.execute( sql` create table \`users2\` ( \`id\` serial primary key, \`name\` text not null, \`city_id\` int references \`cities\`(\`id\`) ) `, ); await ctx.db.execute( sql` create table \`cities\` ( \`id\` serial primary key, \`name\` text not null ) `, ); }); async function setupReturningFunctionsTest(db: MySql2Database) { await db.execute(sql`drop table if exists \`users_default_fn\``); await db.execute( sql` create table \`users_default_fn\` ( \`id\` varchar(256) primary key, \`name\` text not null ); `, ); } test.serial('insert $returningId: serail as id', async (t) => { const { db } = t.context; const result = await db.insert(usersTable).values({ name: 'John' }).$returningId(); // ^? t.deepEqual(result, [{ id: 1 }]); }); test.serial('insert $returningId: serail as id, batch insert', async (t) => { const { db } = t.context; const result = await db.insert(usersTable).values([{ name: 'John' }, { name: 'John1' }]).$returningId(); // ^? t.deepEqual(result, [{ id: 1 }, { id: 2 }]); }); test.serial('insert $returningId: $default as primary key', async (t) => { const { db } = t.context; const uniqueKeys = ['ao865jf3mcmkfkk8o5ri495z', 'dyqs529eom0iczo2efxzbcut']; let iterator = 0; const usersTableDefFn = mysqlTable('users_default_fn', { customId: varchar('id', { length: 256 }).primaryKey().$defaultFn(() => { const value = uniqueKeys[iterator]!; iterator++; return value; }), name: text('name').notNull(), }); await setupReturningFunctionsTest(db); const result = await db.insert(usersTableDefFn).values([{ name: 'John' }, { name: 'John1' }]) // ^? .$returningId(); t.deepEqual(result, [{ customId: 'ao865jf3mcmkfkk8o5ri495z' }, { customId: 'dyqs529eom0iczo2efxzbcut' }]); }); test.serial('insert $returningId: $default as primary key with value', async (t) => { const { db } = t.context; const uniqueKeys = ['ao865jf3mcmkfkk8o5ri495z', 'dyqs529eom0iczo2efxzbcut']; let iterator = 0; const usersTableDefFn = mysqlTable('users_default_fn', { customId: varchar('id', { length: 256 }).primaryKey().$defaultFn(() => { const value = uniqueKeys[iterator]!; iterator++; return value; }), name: text('name').notNull(), }); await setupReturningFunctionsTest(db); const result = await db.insert(usersTableDefFn).values([{ name: 'John', customId: 'test' }, { name: 'John1' }]) // ^? .$returningId(); t.deepEqual(result, [{ customId: 'test' }, { customId: 'ao865jf3mcmkfkk8o5ri495z' }]); }); ================================================ FILE: integration-tests/tests/pg/awsdatapi.test.ts ================================================ import 'dotenv/config'; import { RDSDataClient } from '@aws-sdk/client-rds-data'; import * as dotenv from 'dotenv'; import { asc, eq, inArray, notInArray, relations, sql, TransactionRollbackError } from 'drizzle-orm'; import type { AwsDataApiPgDatabase } from 'drizzle-orm/aws-data-api/pg'; import { drizzle } from 'drizzle-orm/aws-data-api/pg'; import { migrate } from 'drizzle-orm/aws-data-api/pg/migrator'; import { alias, boolean, date, integer, jsonb, pgTable, pgTableCreator, serial, text, time, timestamp, uuid, } from 'drizzle-orm/pg-core'; import { Resource } from 'sst'; import { afterAll, beforeAll, beforeEach, expect, expectTypeOf, test } from 'vitest'; import type { Equal } from '../utils.ts'; import { Expect, randomString } from '../utils.ts'; dotenv.config(); const ENABLE_LOGGING = false; const usersTable = pgTable('users', { id: serial('id').primaryKey(), name: text('name').notNull(), verified: boolean('verified').notNull().default(false), jsonb: jsonb('jsonb').$type(), bestTexts: text('best_texts') .array() .default(sql`'{}'`) .notNull(), createdAt: timestamp('created_at', { withTimezone: true }) .notNull() .defaultNow(), }); const usersMigratorTable = pgTable('users12', { id: serial('id').primaryKey(), name: text('name').notNull(), email: text('email').notNull(), }); const todo = pgTable('todo', { id: uuid('id').primaryKey(), title: text('title').notNull(), description: text('description'), }); const todoRelations = relations(todo, (ctx) => ({ user: ctx.many(todoUser), })); const user = pgTable('user', { id: uuid('id').primaryKey(), email: text('email').notNull(), }); const userRelations = relations(user, (ctx) => ({ todos: ctx.many(todoUser), })); const todoUser = pgTable('todo_user', { todoId: uuid('todo_id').references(() => todo.id), userId: uuid('user_id').references(() => user.id), }); const todoToGroupRelations = relations(todoUser, (ctx) => ({ todo: ctx.one(todo, { fields: [todoUser.todoId], references: [todo.id], }), user: ctx.one(user, { fields: [todoUser.userId], references: [user.id], }), })); const schema = { todo, todoRelations, user, userRelations, todoUser, todoToGroupRelations, }; let db: AwsDataApiPgDatabase; beforeAll(async () => { const rdsClient = new RDSDataClient(); db = drizzle(rdsClient, { // @ts-ignore database: Resource.Postgres.database, // @ts-ignore secretArn: Resource.Postgres.secretArn, // @ts-ignore resourceArn: Resource.Postgres.clusterArn, logger: ENABLE_LOGGING, schema, }); }); beforeEach(async () => { await db.execute(sql`drop schema public cascade`); await db.execute(sql`create schema public`); await db.execute( sql` create table users ( id serial primary key, name text not null, verified boolean not null default false, jsonb jsonb, best_texts text[] not null default '{}', created_at timestamptz not null default now() ) `, ); await db.execute( sql` create table todo ( id uuid primary key, title text not null, description text ) `, ); await db.execute( sql` create table "user" ( id uuid primary key, email text not null ) `, ); await db.execute( sql` create table todo_user ( todo_id uuid references todo(id), user_id uuid references "user"(id) ) `, ); }); test('select all fields', async () => { const insertResult = await db.insert(usersTable).values({ name: 'John' }); expect(insertResult.numberOfRecordsUpdated).toBe(1); const result = await db.select().from(usersTable); expect(result[0]!.createdAt).toBeInstanceOf(Date); // t.assert(Math.abs(result[0]!.createdAt.getTime() - now) < 100); expect(result).toEqual([ { bestTexts: [], id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt, }, ]); }); test('select sql', async () => { await db.insert(usersTable).values({ name: 'John' }); const users = await db .select({ name: sql`upper(${usersTable.name})`, }) .from(usersTable); expect(users).toEqual([{ name: 'JOHN' }]); }); test('select with empty array in inArray', async () => { await db .insert(usersTable) .values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); const users = await db .select({ name: sql`upper(${usersTable.name})`, }) .from(usersTable) .where(inArray(usersTable.id, [])); expect(users).toEqual([]); }); test('select with empty array in notInArray', async () => { await db .insert(usersTable) .values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); const result = await db .select({ name: sql`upper(${usersTable.name})`, }) .from(usersTable) .where(notInArray(usersTable.id, [])); expect(result).toEqual([ { name: 'JOHN' }, { name: 'JANE' }, { name: 'JANE' }, ]); }); test('select typed sql', async () => { await db.insert(usersTable).values({ name: 'John' }); const users = await db .select({ name: sql`upper(${usersTable.name})`, }) .from(usersTable); expect(users).toEqual([{ name: 'JOHN' }]); }); test('select distinct', async () => { const usersDistinctTable = pgTable('users_distinct', { id: integer('id').notNull(), name: text('name').notNull(), }); await db.execute(sql`drop table if exists ${usersDistinctTable}`); await db.execute( sql`create table ${usersDistinctTable} (id integer, name text)`, ); await db.insert(usersDistinctTable).values([ { id: 1, name: 'John' }, { id: 1, name: 'John' }, { id: 2, name: 'John' }, { id: 1, name: 'Jane' }, ]); const users1 = await db .selectDistinct() .from(usersDistinctTable) .orderBy(usersDistinctTable.id, usersDistinctTable.name); const users2 = await db .selectDistinctOn([usersDistinctTable.id]) .from(usersDistinctTable) .orderBy(usersDistinctTable.id); const users3 = await db .selectDistinctOn([usersDistinctTable.name], { name: usersDistinctTable.name, }) .from(usersDistinctTable) .orderBy(usersDistinctTable.name); await db.execute(sql`drop table ${usersDistinctTable}`); expect(users1).toEqual([ { id: 1, name: 'Jane' }, { id: 1, name: 'John' }, { id: 2, name: 'John' }, ]); expect(users2.length).toEqual(2); expect(users2[0]?.id).toEqual(1); expect(users2[1]?.id).toEqual(2); expect(users3.length).toEqual(2); expect(users3[0]?.name).toEqual('Jane'); expect(users3[1]?.name).toEqual('John'); }); test('insert returning sql', async () => { const users = await db .insert(usersTable) .values({ name: 'John' }) .returning({ name: sql`upper(${usersTable.name})`, }); expect(users).toEqual([{ name: 'JOHN' }]); }); test('delete returning sql', async () => { await db.insert(usersTable).values({ name: 'John' }); const users = await db .delete(usersTable) .where(eq(usersTable.name, 'John')) .returning({ name: sql`upper(${usersTable.name})`, }); expect(users).toEqual([{ name: 'JOHN' }]); }); test('update returning sql', async () => { await db.insert(usersTable).values({ name: 'John' }); const users = await db .update(usersTable) .set({ name: 'Jane' }) .where(eq(usersTable.name, 'John')) .returning({ name: sql`upper(${usersTable.name})`, }); expect(users).toEqual([{ name: 'JANE' }]); }); test('update with returning all fields', async () => { await db.insert(usersTable).values({ name: 'John' }); const users = await db .update(usersTable) .set({ name: 'Jane' }) .where(eq(usersTable.name, 'John')) .returning(); expect(users[0]!.createdAt).toBeInstanceOf(Date); // t.assert(Math.abs(users[0]!.createdAt.getTime() - now) < 100); expect(users).toEqual([ { id: 1, bestTexts: [], name: 'Jane', verified: false, jsonb: null, createdAt: users[0]!.createdAt, }, ]); }); test('update with returning partial', async () => { await db.insert(usersTable).values({ name: 'John' }); const users = await db .update(usersTable) .set({ name: 'Jane' }) .where(eq(usersTable.name, 'John')) .returning({ id: usersTable.id, name: usersTable.name, }); expect(users).toEqual([{ id: 1, name: 'Jane' }]); }); test('delete with returning all fields', async () => { await db.insert(usersTable).values({ name: 'John' }); const users = await db .delete(usersTable) .where(eq(usersTable.name, 'John')) .returning(); expect(users[0]!.createdAt).toBeInstanceOf(Date); // t.assert(Math.abs(users[0]!.createdAt.getTime() - now) < 100); expect(users).toEqual([ { bestTexts: [], id: 1, name: 'John', verified: false, jsonb: null, createdAt: users[0]!.createdAt, }, ]); }); test('delete with returning partial', async () => { await db.insert(usersTable).values({ name: 'John' }); const users = await db .delete(usersTable) .where(eq(usersTable.name, 'John')) .returning({ id: usersTable.id, name: usersTable.name, }); expect(users).toEqual([{ id: 1, name: 'John' }]); }); test('insert + select', async () => { await db.insert(usersTable).values({ name: 'John' }); const result = await db.select().from(usersTable); expect(result).toEqual([ { bestTexts: [], id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt, }, ]); await db.insert(usersTable).values({ name: 'Jane' }); const result2 = await db.select().from(usersTable); expect(result2).toEqual([ { bestTexts: [], id: 1, name: 'John', verified: false, jsonb: null, createdAt: result2[0]!.createdAt, }, { bestTexts: [], id: 2, name: 'Jane', verified: false, jsonb: null, createdAt: result2[1]!.createdAt, }, ]); }); test('json insert', async () => { await db.insert(usersTable).values({ name: 'John', jsonb: ['foo', 'bar'] }); const result = await db .select({ id: usersTable.id, name: usersTable.name, jsonb: usersTable.jsonb, }) .from(usersTable); expect(result).toEqual([{ id: 1, name: 'John', jsonb: ['foo', 'bar'] }]); }); test('insert with overridden default values', async () => { await db.insert(usersTable).values({ name: 'John', verified: true }); const result = await db.select().from(usersTable); expect(result).toEqual([ { bestTexts: [], id: 1, name: 'John', verified: true, jsonb: null, createdAt: result[0]!.createdAt, }, ]); }); test('insert many', async () => { await db .insert(usersTable) .values([ { name: 'John' }, { name: 'Bruce', jsonb: ['foo', 'bar'] }, { name: 'Jane' }, { name: 'Austin', verified: true }, ]); const result = await db .select({ id: usersTable.id, name: usersTable.name, jsonb: usersTable.jsonb, verified: usersTable.verified, }) .from(usersTable); expect(result).toEqual([ { id: 1, name: 'John', jsonb: null, verified: false }, { id: 2, name: 'Bruce', jsonb: ['foo', 'bar'], verified: false }, { id: 3, name: 'Jane', jsonb: null, verified: false }, { id: 4, name: 'Austin', jsonb: null, verified: true }, ]); }); test('insert many with returning', async () => { const result = await db .insert(usersTable) .values([ { name: 'John' }, { name: 'Bruce', jsonb: ['foo', 'bar'] }, { name: 'Jane' }, { name: 'Austin', verified: true }, ]) .returning({ id: usersTable.id, name: usersTable.name, jsonb: usersTable.jsonb, verified: usersTable.verified, }); expect(result).toEqual([ { id: 1, name: 'John', jsonb: null, verified: false }, { id: 2, name: 'Bruce', jsonb: ['foo', 'bar'], verified: false }, { id: 3, name: 'Jane', jsonb: null, verified: false }, { id: 4, name: 'Austin', jsonb: null, verified: true }, ]); }); test('select with group by as field', async () => { await db .insert(usersTable) .values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); const result = await db .select({ name: usersTable.name }) .from(usersTable) .groupBy(usersTable.name); expect(result).toEqual([{ name: 'Jane' }, { name: 'John' }]); }); test('select with group by as sql', async () => { await db .insert(usersTable) .values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); const result = await db .select({ name: usersTable.name }) .from(usersTable) .groupBy(sql`${usersTable.name}`); expect(result).toEqual([{ name: 'Jane' }, { name: 'John' }]); }); test('select with group by as sql + column', async () => { await db .insert(usersTable) .values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); const result = await db .select({ name: usersTable.name }) .from(usersTable) .groupBy(sql`${usersTable.name}`, usersTable.id); expect(result).toEqual([ { name: 'Jane' }, { name: 'Jane' }, { name: 'John' }, ]); }); test('select with group by as column + sql', async () => { await db .insert(usersTable) .values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); const result = await db .select({ name: usersTable.name }) .from(usersTable) .groupBy(usersTable.id, sql`${usersTable.name}`); expect(result).toEqual([ { name: 'Jane' }, { name: 'Jane' }, { name: 'John' }, ]); }); test('select with group by complex query', async () => { await db .insert(usersTable) .values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); const result = await db .select({ name: usersTable.name }) .from(usersTable) .groupBy(usersTable.id, sql`${usersTable.name}`) .orderBy(asc(usersTable.name)) .limit(1); expect(result).toEqual([{ name: 'Jane' }]); }); test('build query', async () => { const query = db .select({ id: usersTable.id, name: usersTable.name }) .from(usersTable) .groupBy(usersTable.id, usersTable.name) .toSQL(); expect(query).toEqual({ sql: 'select "id", "name" from "users" group by "users"."id", "users"."name"', params: [], // typings: [] }); }); test('insert sql', async () => { await db.insert(usersTable).values({ name: sql`${'John'}` }); const result = await db .select({ id: usersTable.id, name: usersTable.name }) .from(usersTable); expect(result).toEqual([{ id: 1, name: 'John' }]); }); test('partial join with alias', async () => { const customerAlias = alias(usersTable, 'customer'); await db.insert(usersTable).values([ { id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }, ]); const result = await db .select({ user: { id: usersTable.id, name: usersTable.name, }, customer: { id: customerAlias.id, name: customerAlias.name, }, }) .from(usersTable) .leftJoin(customerAlias, eq(customerAlias.id, 11)) .where(eq(usersTable.id, 10)); expect(result).toEqual([ { user: { id: 10, name: 'Ivan' }, customer: { id: 11, name: 'Hans' }, }, ]); }); test('full join with alias', async () => { const customerAlias = alias(usersTable, 'customer'); await db.insert(usersTable).values([ { id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }, ]); const result = await db .select() .from(usersTable) .leftJoin(customerAlias, eq(customerAlias.id, 11)) .where(eq(usersTable.id, 10)); expect(result).toEqual([ { users: { id: 10, bestTexts: [], name: 'Ivan', verified: false, jsonb: null, createdAt: result[0]!.users.createdAt, }, customer: { bestTexts: [], id: 11, name: 'Hans', verified: false, jsonb: null, createdAt: result[0]!.customer!.createdAt, }, }, ]); }); test('select from alias', async () => { const pgTable = pgTableCreator((name) => `prefixed_${name}`); const users = pgTable('users', { id: serial('id').primaryKey(), name: text('name').notNull(), }); await db.execute(sql`drop table if exists ${users}`); await db.execute( sql`create table ${users} (id serial primary key, name text not null)`, ); const user = alias(users, 'user'); const customers = alias(users, 'customer'); await db.insert(users).values([ { id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }, ]); const result = await db .select() .from(user) .leftJoin(customers, eq(customers.id, 11)) .where(eq(user.id, 10)); expect(result).toEqual([ { user: { id: 10, name: 'Ivan', }, customer: { id: 11, name: 'Hans', }, }, ]); await db.execute(sql`drop table ${users}`); }); test('insert with spaces', async () => { await db.insert(usersTable).values({ name: sql`'Jo h n'` }); const result = await db .select({ id: usersTable.id, name: usersTable.name }) .from(usersTable); expect(result).toEqual([{ id: 1, name: 'Jo h n' }]); }); test('prepared statement', async () => { await db.insert(usersTable).values({ name: 'John' }); const statement = db .select({ id: usersTable.id, name: usersTable.name, }) .from(usersTable) .prepare('statement1'); const result = await statement.execute(); expect(result).toEqual([{ id: 1, name: 'John' }]); }); test('prepared statement reuse', async () => { const stmt = db .insert(usersTable) .values({ verified: true, name: sql.placeholder('name'), }) .prepare('stmt2'); for (let i = 0; i < 10; i++) { await stmt.execute({ name: `John ${i}` }); } const result = await db .select({ id: usersTable.id, name: usersTable.name, verified: usersTable.verified, }) .from(usersTable); expect(result).toEqual([ { id: 1, name: 'John 0', verified: true }, { id: 2, name: 'John 1', verified: true }, { id: 3, name: 'John 2', verified: true }, { id: 4, name: 'John 3', verified: true }, { id: 5, name: 'John 4', verified: true }, { id: 6, name: 'John 5', verified: true }, { id: 7, name: 'John 6', verified: true }, { id: 8, name: 'John 7', verified: true }, { id: 9, name: 'John 8', verified: true }, { id: 10, name: 'John 9', verified: true }, ]); }); test('prepared statement with placeholder in .where', async () => { await db.insert(usersTable).values({ name: 'John' }); const stmt = db .select({ id: usersTable.id, name: usersTable.name, }) .from(usersTable) .where(eq(usersTable.id, sql.placeholder('id'))) .prepare('stmt3'); const result = await stmt.execute({ id: 1 }); expect(result).toEqual([{ id: 1, name: 'John' }]); }); test('migrator : default migration strategy', async () => { await db.execute(sql`drop table if exists all_columns`); await db.execute(sql`drop table if exists users12`); await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); await migrate(db, { migrationsFolder: './drizzle2/pg' }); await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); const result = await db.select().from(usersMigratorTable); expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); await db.execute(sql`drop table all_columns`); await db.execute(sql`drop table users12`); await db.execute(sql`drop table "drizzle"."__drizzle_migrations"`); }); test('migrator : migrate with custom schema', async () => { await db.execute(sql`drop table if exists all_columns`); await db.execute(sql`drop table if exists users12`); await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); await migrate(db, { migrationsFolder: './drizzle2/pg', migrationsSchema: 'custom_migrations', }); // test if the custom migrations table was created const { rows } = await db.execute( sql`select * from custom_migrations."__drizzle_migrations";`, ); expect(rows).toBeTruthy(); expect(rows!.length).toBeGreaterThan(0); // test if the migrated table are working as expected await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); const result = await db.select().from(usersMigratorTable); expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); await db.execute(sql`drop table all_columns`); await db.execute(sql`drop table users12`); await db.execute( sql`drop table custom_migrations."__drizzle_migrations"`, ); }); test('migrator : migrate with custom table', async () => { const customTable = randomString(); await db.execute(sql`drop table if exists all_columns`); await db.execute(sql`drop table if exists users12`); await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); await migrate(db, { migrationsFolder: './drizzle2/pg', migrationsTable: customTable, }); // test if the custom migrations table was created const { rows } = await db.execute( sql`select * from "drizzle".${sql.identifier(customTable)};`, ); expect(rows).toBeTruthy(); expect(rows!.length).toBeGreaterThan(0); // test if the migrated table are working as expected await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); const result = await db.select().from(usersMigratorTable); expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); await db.execute(sql`drop table all_columns`); await db.execute(sql`drop table users12`); await db.execute(sql`drop table "drizzle".${sql.identifier(customTable)}`); }); test('migrator : migrate with custom table and custom schema', async () => { const customTable = randomString(); await db.execute(sql`drop table if exists all_columns`); await db.execute(sql`drop table if exists users12`); await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); await migrate(db, { migrationsFolder: './drizzle2/pg', migrationsTable: customTable, migrationsSchema: 'custom_migrations', }); // test if the custom migrations table was created const { rows } = await db.execute( sql`select * from custom_migrations.${ sql.identifier( customTable, ) };`, ); expect(rows).toBeTruthy(); expect(rows!.length).toBeGreaterThan(0); // test if the migrated table are working as expected await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); const result = await db.select().from(usersMigratorTable); expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); await db.execute(sql`drop table all_columns`); await db.execute(sql`drop table users12`); await db.execute( sql`drop table custom_migrations.${ sql.identifier( customTable, ) }`, ); }); test('insert via db.execute + select via db.execute', async () => { await db.execute( sql`insert into ${usersTable} (${ sql.identifier( usersTable.name.name, ) }) values (${'John'})`, ); const result = await db.execute<{ id: number; name: string }>( sql`select id, name from "users"`, ); expectTypeOf(result.rows).toEqualTypeOf<{ id: number; name: string }[]>(); expect(result.rows).toEqual([{ id: 1, name: 'John' }]); }); test('insert via db.execute + returning', async () => { const inserted = await db.execute( sql`insert into ${usersTable} (${ sql.identifier( usersTable.name.name, ) }) values (${'John'}) returning ${usersTable.id}, ${usersTable.name}`, ); expect(inserted.rows).toEqual([{ id: 1, name: 'John' }]); }); test('insert via db.execute w/ query builder', async () => { const inserted = await db.execute( db .insert(usersTable) .values({ name: 'John' }) .returning({ id: usersTable.id, name: usersTable.name }), ); expect(inserted.rows).toEqual([{ id: 1, name: 'John' }]); }); test('build query insert with onConflict do update', async () => { const query = db .insert(usersTable) .values({ name: 'John', jsonb: ['foo', 'bar'] }) .onConflictDoUpdate({ target: usersTable.id, set: { name: 'John1' } }) .toSQL(); expect(query).toEqual({ sql: 'insert into "users" ("id", "name", "verified", "jsonb", "best_texts", "created_at") values (default, :1, default, :2, default, default) on conflict ("id") do update set "name" = :3', params: ['John', '["foo","bar"]', 'John1'], // typings: ['none', 'json', 'none'] }); }); test('build query insert with onConflict do update / multiple columns', async () => { const query = db .insert(usersTable) .values({ name: 'John', jsonb: ['foo', 'bar'] }) .onConflictDoUpdate({ target: [usersTable.id, usersTable.name], set: { name: 'John1' }, }) .toSQL(); expect(query).toEqual({ sql: 'insert into "users" ("id", "name", "verified", "jsonb", "best_texts", "created_at") values (default, :1, default, :2, default, default) on conflict ("id","name") do update set "name" = :3', params: ['John', '["foo","bar"]', 'John1'], // typings: ['none', 'json', 'none'] }); }); test('build query insert with onConflict do nothing', async () => { const query = db .insert(usersTable) .values({ name: 'John', jsonb: ['foo', 'bar'] }) .onConflictDoNothing() .toSQL(); expect(query).toEqual({ sql: 'insert into "users" ("id", "name", "verified", "jsonb", "best_texts", "created_at") values (default, :1, default, :2, default, default) on conflict do nothing', params: ['John', '["foo","bar"]'], // typings: ['none', 'json'] }); }); test('build query insert with onConflict do nothing + target', async () => { const query = db .insert(usersTable) .values({ name: 'John', jsonb: ['foo', 'bar'] }) .onConflictDoNothing({ target: usersTable.id }) .toSQL(); expect(query).toEqual({ sql: 'insert into "users" ("id", "name", "verified", "jsonb", "best_texts", "created_at") values (default, :1, default, :2, default, default) on conflict ("id") do nothing', params: ['John', '["foo","bar"]'], // typings: ['none', 'json'] }); }); test('insert with onConflict do update', async () => { await db.insert(usersTable).values({ name: 'John' }); await db .insert(usersTable) .values({ id: 1, name: 'John' }) .onConflictDoUpdate({ target: usersTable.id, set: { name: 'John1' } }); const res = await db .select({ id: usersTable.id, name: usersTable.name }) .from(usersTable) .where(eq(usersTable.id, 1)); expect(res).toEqual([{ id: 1, name: 'John1' }]); }); test('insert with onConflict do nothing', async () => { await db.insert(usersTable).values({ name: 'John' }); await db .insert(usersTable) .values({ id: 1, name: 'John' }) .onConflictDoNothing(); const res = await db .select({ id: usersTable.id, name: usersTable.name }) .from(usersTable) .where(eq(usersTable.id, 1)); expect(res).toEqual([{ id: 1, name: 'John' }]); }); test('insert with onConflict do nothing + target', async () => { await db.insert(usersTable).values({ name: 'John' }); await db .insert(usersTable) .values({ id: 1, name: 'John' }) .onConflictDoNothing({ target: usersTable.id }); const res = await db .select({ id: usersTable.id, name: usersTable.name }) .from(usersTable) .where(eq(usersTable.id, 1)); expect(res).toEqual([{ id: 1, name: 'John' }]); }); test('transaction', async () => { const users = pgTable('users_transactions', { id: serial('id').primaryKey(), balance: integer('balance').notNull(), }); const products = pgTable('products_transactions', { id: serial('id').primaryKey(), price: integer('price').notNull(), stock: integer('stock').notNull(), }); await db.execute(sql`drop table if exists ${users}`); await db.execute(sql`drop table if exists ${products}`); await db.execute( sql`create table users_transactions (id serial not null primary key, balance integer not null)`, ); await db.execute( sql`create table products_transactions (id serial not null primary key, price integer not null, stock integer not null)`, ); const user = await db .insert(users) .values({ balance: 100 }) .returning() .then((rows) => rows[0]!); const product = await db .insert(products) .values({ price: 10, stock: 10 }) .returning() .then((rows) => rows[0]!); await db.transaction(async (tx) => { await tx .update(users) .set({ balance: user.balance - product.price }) .where(eq(users.id, user.id)); await tx .update(products) .set({ stock: product.stock - 1 }) .where(eq(products.id, product.id)); }); const result = await db.select().from(users); expect(result).toEqual([{ id: 1, balance: 90 }]); await db.execute(sql`drop table ${users}`); await db.execute(sql`drop table ${products}`); }); test('transaction rollback', async () => { const users = pgTable('users_transactions_rollback', { id: serial('id').primaryKey(), balance: integer('balance').notNull(), }); await db.execute(sql`drop table if exists ${users}`); await db.execute( sql`create table users_transactions_rollback (id serial not null primary key, balance integer not null)`, ); await expect( db.transaction(async (tx) => { await tx.insert(users).values({ balance: 100 }); tx.rollback(); }), ).rejects.toThrowError(TransactionRollbackError); const result = await db.select().from(users); expect(result).toEqual([]); await db.execute(sql`drop table ${users}`); }); test('nested transaction', async () => { const users = pgTable('users_nested_transactions', { id: serial('id').primaryKey(), balance: integer('balance').notNull(), }); await db.execute(sql`drop table if exists ${users}`); await db.execute( sql`create table users_nested_transactions (id serial not null primary key, balance integer not null)`, ); await db.transaction(async (tx) => { await tx.insert(users).values({ balance: 100 }); await tx.transaction(async (tx) => { await tx.update(users).set({ balance: 200 }); }); }); const result = await db.select().from(users); expect(result).toEqual([{ id: 1, balance: 200 }]); await db.execute(sql`drop table ${users}`); }); test('nested transaction rollback', async () => { const users = pgTable('users_nested_transactions_rollback', { id: serial('id').primaryKey(), balance: integer('balance').notNull(), }); await db.execute(sql`drop table if exists ${users}`); await db.execute( sql`create table users_nested_transactions_rollback (id serial not null primary key, balance integer not null)`, ); await db.transaction(async (tx) => { await tx.insert(users).values({ balance: 100 }); await expect( tx.transaction(async (tx2) => { await tx2.update(users).set({ balance: 200 }); tx2.rollback(); }), ).rejects.toThrowError(TransactionRollbackError); }); const result = await db.select().from(users); expect(result).toEqual([{ id: 1, balance: 100 }]); await db.execute(sql`drop table ${users}`); }); test('select from raw sql', async () => { const result = await db.execute(sql`select 1 as id, 'John' as name`); expect(result.rows).toEqual([{ id: 1, name: 'John' }]); }); test('select from raw sql with mapped values', async () => { const result = await db .select({ id: sql`id`, name: sql`name`, }) .from(sql`(select 1 as id, 'John' as name) as users`); expect(result).toEqual([{ id: 1, name: 'John' }]); }); test('insert with array values works', async () => { const bestTexts = ['text1', 'text2', 'text3']; const [insertResult] = await db .insert(usersTable) .values({ name: 'John', bestTexts, }) .returning(); expect(insertResult?.bestTexts).toEqual(bestTexts); }); test('update with array values works', async () => { const [newUser] = await db .insert(usersTable) .values({ name: 'John' }) .returning(); const bestTexts = ['text4', 'text5', 'text6']; const [insertResult] = await db .update(usersTable) .set({ bestTexts, }) .where(eq(usersTable.id, newUser!.id)) .returning(); expect(insertResult?.bestTexts).toEqual(bestTexts); }); test('insert with array values works', async () => { const bestTexts = ['text1', 'text2', 'text3']; const [insertResult] = await db .insert(usersTable) .values({ name: 'John', bestTexts, }) .returning(); expect(insertResult?.bestTexts).toEqual(bestTexts); }); test('update with array values works', async () => { const [newUser] = await db .insert(usersTable) .values({ name: 'John' }) .returning(); const bestTexts = ['text4', 'text5', 'text6']; const [insertResult] = await db .update(usersTable) .set({ bestTexts, }) .where(eq(usersTable.id, newUser!.id)) .returning(); expect(insertResult?.bestTexts).toEqual(bestTexts); }); test('insert with array values works', async () => { const bestTexts = ['text1', 'text2', 'text3']; const [insertResult] = await db .insert(usersTable) .values({ name: 'John', bestTexts, }) .returning(); expect(insertResult?.bestTexts).toEqual(bestTexts); }); test('update with array values works', async () => { const [newUser] = await db .insert(usersTable) .values({ name: 'John' }) .returning(); const bestTexts = ['text4', 'text5', 'text6']; const [insertResult] = await db .update(usersTable) .set({ bestTexts, }) .where(eq(usersTable.id, newUser!.id)) .returning(); expect(insertResult?.bestTexts).toEqual(bestTexts); }); test('all date and time columns', async () => { const table = pgTable('all_columns', { id: serial('id').primaryKey(), dateString: date('date_string', { mode: 'string' }).notNull(), time: time('time', { precision: 3 }).notNull(), datetime: timestamp('datetime').notNull(), // datetimeWTZ: timestamp('datetime_wtz', { withTimezone: true }).notNull(), datetimeString: timestamp('datetime_string', { mode: 'string' }).notNull(), datetimeFullPrecision: timestamp('datetime_full_precision', { precision: 6, mode: 'string', }).notNull(), // datetimeWTZString: timestamp('datetime_wtz_string', { withTimezone: true, mode: 'string' }).notNull(), }); await db.execute(sql`drop table if exists ${table}`); await db.execute(sql` create table ${table} ( id serial primary key, date_string date not null, time time(3) not null, datetime timestamp not null, -- datetime_wtz timestamp with time zone not null, datetime_string timestamp not null, datetime_full_precision timestamp(6) not null -- datetime_wtz_string timestamp with time zone not null ) `); const someDatetime = new Date('2022-01-01T00:00:00.123Z'); const fullPrecision = '2022-01-01T00:00:00.123456'; const someTime = '23:23:12.432'; await db.insert(table).values({ dateString: '2022-01-01', time: someTime, datetime: someDatetime, // datetimeWTZ: someDatetime, datetimeString: '2022-01-01T00:00:00.123Z', datetimeFullPrecision: fullPrecision, // datetimeWTZString: '2022-01-01T00:00:00.123Z', }); const result = await db.select().from(table); Expect< Equal< { id: number; dateString: string; time: string; datetime: Date; // datetimeWTZ: Date; datetimeString: string; datetimeFullPrecision: string; // datetimeWTZString: string; }[], typeof result > >; Expect< Equal< { dateString: string; time: string; datetime: Date; // datetimeWTZ: Date; datetimeString: string; datetimeFullPrecision: string; // datetimeWTZString: string; id?: number | undefined; }, typeof table.$inferInsert > >; expect(result).toEqual([ { id: 1, dateString: '2022-01-01', time: someTime, datetime: someDatetime, // datetimeWTZ: someDatetime, datetimeString: '2022-01-01 00:00:00.123', datetimeFullPrecision: fullPrecision.replace('T', ' ').replace('Z', ''), // datetimeWTZString: '2022-01-01 00:00:00.123+00', }, ]); await db.execute(sql`drop table if exists ${table}`); }); test.skip('all date and time columns with timezone', async () => { const table = pgTable('all_columns', { id: serial('id').primaryKey(), timestamp: timestamp('timestamp_string', { mode: 'string', withTimezone: true, precision: 6, }).notNull(), timestampAsDate: timestamp('timestamp_date', { withTimezone: true, precision: 3, }).notNull(), timestampTimeZones: timestamp('timestamp_date_2', { withTimezone: true, precision: 3, }).notNull(), }); await db.execute(sql`drop table if exists ${table}`); await db.execute(sql` create table ${table} ( id serial primary key, timestamp_string timestamp(6) with time zone not null, timestamp_date timestamp(3) with time zone not null, timestamp_date_2 timestamp(3) with time zone not null ) `); const timestampString = '2022-01-01 00:00:00.123456-0200'; const timestampDate = new Date(); const timestampDateWTZ = new Date('2022-01-01 00:00:00.123 +0500'); const timestampString2 = '2022-01-01 00:00:00.123456-0400'; const timestampDate2 = new Date(); const timestampDateWTZ2 = new Date('2022-01-01 00:00:00.123 +0200'); await db.insert(table).values([ { timestamp: timestampString, timestampAsDate: timestampDate, timestampTimeZones: timestampDateWTZ, }, { timestamp: timestampString2, timestampAsDate: timestampDate2, timestampTimeZones: timestampDateWTZ2, }, ]); const result = await db.select().from(table); const result2 = await db.execute<{ id: number; timestamp_string: string; timestamp_date: string; timestamp_date_2: string; }>(sql`select * from ${table}`); // Whatever you put in, you get back when you're using the date mode // But when using the string mode, postgres returns a string transformed into UTC expect(result).toEqual([ { id: 1, timestamp: '2022-01-01 02:00:00.123456+00', timestampAsDate: timestampDate, timestampTimeZones: timestampDateWTZ, }, { id: 2, timestamp: '2022-01-01 04:00:00.123456+00', timestampAsDate: timestampDate2, timestampTimeZones: timestampDateWTZ2, }, ]); expect(result2.rows).toEqual([ { id: 1, timestamp_string: '2022-01-01 02:00:00.123456+00', timestamp_date: timestampDate.toISOString().replace('T', ' ').replace('Z', '') + '+00', timestamp_date_2: timestampDateWTZ.toISOString().replace('T', ' ').replace('Z', '') + '+00', }, { id: 2, timestamp_string: '2022-01-01 04:00:00.123456+00', timestamp_date: timestampDate2.toISOString().replace('T', ' ').replace('Z', '') + '+00', timestamp_date_2: timestampDateWTZ2.toISOString().replace('T', ' ').replace('Z', '') + '+00', }, ]); expect(result[0]?.timestampTimeZones.getTime()).toEqual( new Date((result2.rows?.[0] as any).timestamp_date_2 as any).getTime(), ); await db.execute(sql`drop table if exists ${table}`); }); test('all date and time columns without timezone', async () => { const table = pgTable('all_columns', { id: serial('id').primaryKey(), timestampString: timestamp('timestamp_string', { mode: 'string', precision: 6, }).notNull(), timestampString2: timestamp('timestamp_string2', { precision: 3, mode: 'string', }).notNull(), timestampDate: timestamp('timestamp_date', { precision: 3 }).notNull(), }); await db.execute(sql`drop table if exists ${table}`); await db.execute(sql` create table ${table} ( id serial primary key, timestamp_string timestamp(6) not null, timestamp_string2 timestamp(3) not null, timestamp_date timestamp(3) not null ) `); const timestampString = '2022-01-01 00:00:00.123456'; // const timestampString2 = '2022-01-02 00:00:00.123 -0300'; const timestampString2 = '2022-01-02 00:00:00.123'; const timestampDate = new Date('2022-01-01 00:00:00.123Z'); const timestampString_2 = '2022-01-01 00:00:00.123456'; // const timestampString2_2 = '2022-01-01 00:00:00.123 -0300'; const timestampString2_2 = '2022-01-01 00:00:00.123'; // const timestampDate2 = new Date('2022-01-01 00:00:00.123 +0200'); const timestampDate2 = new Date('2022-01-01 00:00:00.123'); await db.insert(table).values([ { timestampString, timestampString2, timestampDate }, { timestampString: timestampString_2, timestampString2: timestampString2_2, timestampDate: timestampDate2, }, ]); const result = await db.select().from(table); const result2 = await db.execute<{ id: number; timestamp_string: string; timestamp_string2: string; timestamp_date: string; }>(sql`select * from ${table}`); // Whatever you put in, you get back when you're using the date mode // But when using the string mode, postgres returns a string transformed into UTC expect(result).toEqual([ { id: 1, timestampString: timestampString, timestampString2: '2022-01-02 00:00:00.123', timestampDate: timestampDate, }, { id: 2, timestampString: timestampString_2, timestampString2: '2022-01-01 00:00:00.123', timestampDate: timestampDate2, }, ]); expect(result2.rows).toEqual([ { id: 1, timestamp_string: timestampString, timestamp_string2: '2022-01-02 00:00:00.123', timestamp_date: timestampDate .toISOString() .replace('T', ' ') .replace('Z', ''), }, { id: 2, timestamp_string: timestampString_2, timestamp_string2: '2022-01-01 00:00:00.123', timestamp_date: timestampDate2 .toISOString() .replace('T', ' ') .replace('Z', ''), }, ]); expect((result2.rows?.[0] as any).timestamp_string).toEqual( '2022-01-01 00:00:00.123456', ); // need to add the 'Z', otherwise javascript assumes it's in local time expect( new Date(((result2.rows?.[0] as any).timestamp_date + 'Z') as any).getTime(), ).toEqual(timestampDate.getTime()); await db.execute(sql`drop table if exists ${table}`); }); test('Typehints mix for RQB', async () => { const uuid = 'd997d46d-5769-4c78-9a35-93acadbe6076'; const res = await db.query.user.findMany({ where: eq(user.id, uuid), with: { todos: { with: { todo: true, }, }, }, }); expect(res).toStrictEqual([]); }); test('Typehints mix for findFirst', async () => { const uuid = 'd997d46d-5769-4c78-9a35-93acadbe6076'; await db.insert(user).values({ id: uuid, email: 'd' }); const res = await db.query.user.findFirst({ where: eq(user.id, uuid), }); expect(res).toStrictEqual({ id: 'd997d46d-5769-4c78-9a35-93acadbe6076', email: 'd' }); }); afterAll(async () => { await db.execute(sql`drop table if exists "users"`); await db.execute(sql`drop table if exists "todo_user"`); await db.execute(sql`drop table if exists "user"`); await db.execute(sql`drop table if exists "todo"`); await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); }); ================================================ FILE: integration-tests/tests/pg/neon-http-batch.test.ts ================================================ import { neon, type NeonQueryFunction } from '@neondatabase/serverless'; import { drizzle, type NeonHttpDatabase } from 'drizzle-orm/neon-http'; import { beforeAll, beforeEach, expect, test } from 'vitest'; import { commentLikesConfig, commentsConfig, commentsTable, groupsConfig, groupsTable, postsConfig, postsTable, usersConfig, usersTable, usersToGroupsConfig, usersToGroupsTable, } from './neon-http-batch'; import { TestCache, TestGlobalCache } from './pg-common-cache'; const ENABLE_LOGGING = false; export const schema = { usersTable, postsTable, commentsTable, usersToGroupsTable, groupsTable, commentLikesConfig, commentsConfig, postsConfig, usersToGroupsConfig, groupsConfig, usersConfig, }; let db: NeonHttpDatabase; let client: NeonQueryFunction; let dbGlobalCached: NeonHttpDatabase; let cachedDb: NeonHttpDatabase; beforeAll(async () => { const connectionString = process.env['NEON_HTTP_CONNECTION_STRING']; if (!connectionString) { throw new Error('NEON_HTTP_CONNECTION_STRING is not defined'); } client = neon(connectionString); db = drizzle(client, { schema, logger: ENABLE_LOGGING }); cachedDb = drizzle(client, { logger: ENABLE_LOGGING, cache: new TestCache(), }); dbGlobalCached = drizzle(client, { logger: ENABLE_LOGGING, cache: new TestGlobalCache(), }); }); beforeEach((ctx) => { ctx.neonPg = { db, }; ctx.cachedPg = { db: cachedDb, dbGlobalCached, }; }); test('skip', async () => { expect(1).toBe(1); }); ================================================ FILE: integration-tests/tests/pg/neon-http-batch.ts ================================================ import Docker from 'dockerode'; import type { InferSelectModel } from 'drizzle-orm'; import { eq, relations, sql } from 'drizzle-orm'; import type { NeonHttpQueryResult } from 'drizzle-orm/neon-http'; import { integer, pgTable, primaryKey, serial, text, timestamp } from 'drizzle-orm/pg-core'; import type { AnyPgColumn } from 'drizzle-orm/pg-core'; import getPort from 'get-port'; import { v4 as uuidV4 } from 'uuid'; import { afterAll, beforeEach, describe, expect, expectTypeOf, test } from 'vitest'; export const usersTable = pgTable('users', { id: serial('id').primaryKey(), name: text('name').notNull(), verified: integer('verified').notNull().default(0), invitedBy: integer('invited_by').references((): AnyPgColumn => usersTable.id), }); export const usersConfig = relations(usersTable, ({ one, many }) => ({ invitee: one(usersTable, { fields: [usersTable.invitedBy], references: [usersTable.id], }), usersToGroups: many(usersToGroupsTable), posts: many(postsTable), })); export const groupsTable = pgTable('groups', { id: serial('id').primaryKey(), name: text('name').notNull(), description: text('description'), }); export const groupsConfig = relations(groupsTable, ({ many }) => ({ usersToGroups: many(usersToGroupsTable), })); export const usersToGroupsTable = pgTable( 'users_to_groups', { id: serial('id'), userId: integer('user_id').notNull().references(() => usersTable.id), groupId: integer('group_id').notNull().references(() => groupsTable.id), }, (t) => ({ pk: primaryKey({ columns: [t.userId, t.groupId] }), }), ); export const usersToGroupsConfig = relations(usersToGroupsTable, ({ one }) => ({ group: one(groupsTable, { fields: [usersToGroupsTable.groupId], references: [groupsTable.id], }), user: one(usersTable, { fields: [usersToGroupsTable.userId], references: [usersTable.id], }), })); export const postsTable = pgTable('posts', { id: serial('id').primaryKey(), content: text('content').notNull(), ownerId: integer('owner_id').references(() => usersTable.id), createdAt: timestamp('created_at').notNull().defaultNow(), }); export const postsConfig = relations(postsTable, ({ one, many }) => ({ author: one(usersTable, { fields: [postsTable.ownerId], references: [usersTable.id], }), comments: many(commentsTable), })); export const commentsTable = pgTable('comments', { id: serial('id').primaryKey(), content: text('content').notNull(), creator: integer('creator').references(() => usersTable.id), postId: integer('post_id').references(() => postsTable.id), createdAt: timestamp('created_at').notNull().defaultNow(), }); export const commentsConfig = relations(commentsTable, ({ one, many }) => ({ post: one(postsTable, { fields: [commentsTable.postId], references: [postsTable.id], }), author: one(usersTable, { fields: [commentsTable.creator], references: [usersTable.id], }), likes: many(commentLikesTable), })); export const commentLikesTable = pgTable('comment_likes', { id: serial('id').primaryKey(), creator: integer('creator').references(() => usersTable.id), commentId: integer('comment_id').references(() => commentsTable.id), createdAt: timestamp('created_at').notNull().defaultNow(), }); export const commentLikesConfig = relations(commentLikesTable, ({ one }) => ({ comment: one(commentsTable, { fields: [commentLikesTable.commentId], references: [commentsTable.id], }), author: one(usersTable, { fields: [commentLikesTable.creator], references: [usersTable.id], }), })); let pgContainer: Docker.Container; export async function createDockerDB(): Promise { const docker = new Docker(); const port = await getPort({ port: 5432 }); const image = 'postgres:14'; const pullStream = await docker.pull(image); await new Promise((resolve, reject) => docker.modem.followProgress(pullStream, (err) => (err ? reject(err) : resolve(err))) ); pgContainer = await docker.createContainer({ Image: image, Env: ['POSTGRES_PASSWORD=postgres', 'POSTGRES_USER=postgres', 'POSTGRES_DB=postgres'], name: `drizzle-integration-tests-${uuidV4()}`, HostConfig: { AutoRemove: true, PortBindings: { '5432/tcp': [{ HostPort: `${port}` }], }, }, }); await pgContainer.start(); return `postgres://postgres:postgres@localhost:${port}/postgres`; } afterAll(async () => { await pgContainer?.stop().catch(console.error); }); export function tests() { describe('common', () => { beforeEach(async (ctx) => { const { db } = ctx.pg; await db.execute(sql`drop schema if exists public cascade`); await db.execute(sql`drop schema if exists mySchema cascade`); await db.execute( sql` create table users ( id serial primary key, name text not null, verified int not null default 0, invited_by int references users(id) ) `, ); await db.execute( sql` create table groups ( id serial primary key, name text not null, description text ) `, ); await db.execute( sql` create table users_to_groups ( id serial, user_id int not null references users(id), group_id int not null references groups(id), primary key (user_id, group_id) ) `, ); await db.execute( sql` create table posts ( id serial primary key, content text not null, owner_id int references users(id), created_at timestamp not null default now() ) `, ); await db.execute( sql` create table comments ( id serial primary key, content text not null, creator int references users(id), post_id int references posts(id), created_at timestamp not null default now() ) `, ); await db.execute( sql` create table comment_likes ( id serial primary key, creator int references users(id), comment_id int references comments(id), created_at timestamp not null default now() ) `, ); }); test('batch api example', async (ctx) => { const { db } = ctx.neonPg; const batchResponse = await db.batch([ db.insert(usersTable).values({ id: 1, name: 'John' }).returning({ id: usersTable.id, invitedBy: usersTable.invitedBy, }), db.insert(usersTable).values({ id: 2, name: 'Dan' }), db.select().from(usersTable), ]); expectTypeOf(batchResponse).toEqualTypeOf<[ { id: number; invitedBy: number | null; }[], NeonHttpQueryResult, { id: number; name: string; verified: number; invitedBy: number | null; }[], ]>(); expect(batchResponse.length).eq(3); expect(batchResponse[0]).toEqual([{ id: 1, invitedBy: null, }]); expect(batchResponse[1]).toMatchObject({ rows: [], rowCount: 1 }); expect(batchResponse[2]).toEqual([ { id: 1, name: 'John', verified: 0, invitedBy: null }, { id: 2, name: 'Dan', verified: 0, invitedBy: null }, ]); }); // batch api only relational many test('insert + findMany', async (ctx) => { const { db } = ctx.neonPg; const batchResponse = await db.batch([ db.insert(usersTable).values({ id: 1, name: 'John' }).returning({ id: usersTable.id }), db.insert(usersTable).values({ id: 2, name: 'Dan' }), db.query.usersTable.findMany({}), ]); expectTypeOf(batchResponse).toEqualTypeOf<[ { id: number; }[], NeonHttpQueryResult, { id: number; name: string; verified: number; invitedBy: number | null; }[], ]>(); expect(batchResponse.length).eq(3); expect(batchResponse[0]).toEqual([{ id: 1, }]); expect(batchResponse[1]).toMatchObject({ rows: [], rowCount: 1 }); expect(batchResponse[2]).toEqual([ { id: 1, name: 'John', verified: 0, invitedBy: null }, { id: 2, name: 'Dan', verified: 0, invitedBy: null }, ]); }); // batch api relational many + one test('insert + findMany + findFirst', async (ctx) => { const { db } = ctx.neonPg; const batchResponse = await db.batch([ db.insert(usersTable).values({ id: 1, name: 'John' }).returning({ id: usersTable.id }), db.insert(usersTable).values({ id: 2, name: 'Dan' }), db.query.usersTable.findMany({}), db.query.usersTable.findFirst({}), ]); expectTypeOf(batchResponse).toEqualTypeOf<[ { id: number; }[], NeonHttpQueryResult, { id: number; name: string; verified: number; invitedBy: number | null; }[], { id: number; name: string; verified: number; invitedBy: number | null; } | undefined, ]>(); expect(batchResponse.length).eq(4); expect(batchResponse[0]).toEqual([{ id: 1, }]); expect(batchResponse[1]).toMatchObject({ rows: [], rowCount: 1 }); expect(batchResponse[2]).toEqual([ { id: 1, name: 'John', verified: 0, invitedBy: null }, { id: 2, name: 'Dan', verified: 0, invitedBy: null }, ]); expect(batchResponse[3]).toEqual( { id: 1, name: 'John', verified: 0, invitedBy: null }, ); }); test('insert + db.execute', async (ctx) => { const { db } = ctx.neonPg; const batchResponse = await db.batch([ db.insert(usersTable).values({ id: 1, name: 'John' }).returning({ id: usersTable.id }), db.execute(sql`insert into users (id, name) values (2, 'Dan')`), ]); expectTypeOf(batchResponse).toEqualTypeOf<[ { id: number; }[], NeonHttpQueryResult>, ]>(); expect(batchResponse.length).eq(2); expect(batchResponse[0]).toEqual([{ id: 1, }]); expect(batchResponse[1]).toMatchObject({ rowAsArray: false, rows: [], rowCount: 1 }); }); // batch api combined rqb + raw call test('insert + findManyWith + db.all', async (ctx) => { const { db } = ctx.neonPg; const batchResponse = await db.batch([ db.insert(usersTable).values({ id: 1, name: 'John' }).returning({ id: usersTable.id }), db.insert(usersTable).values({ id: 2, name: 'Dan' }), db.query.usersTable.findMany({}), db.execute(sql`select * from users`), ]); expectTypeOf(batchResponse).toEqualTypeOf<[ { id: number; }[], NeonHttpQueryResult, { id: number; name: string; verified: number; invitedBy: number | null; }[], NeonHttpQueryResult<{ id: number; name: string; verified: number; invitedBy: number | null; }>, ]>(); expect(batchResponse.length).eq(4); expect(batchResponse[0]).toEqual([{ id: 1, }]); expect(batchResponse[1]).toMatchObject({ rowAsArray: true, rows: [], rowCount: 1 }); expect(batchResponse[2]).toEqual([ { id: 1, name: 'John', verified: 0, invitedBy: null }, { id: 2, name: 'Dan', verified: 0, invitedBy: null }, ]); expect(batchResponse[3]).toMatchObject({ rows: [ { id: 1, name: 'John', verified: 0, invited_by: null }, { id: 2, name: 'Dan', verified: 0, invited_by: null }, ], }); }); // batch api for insert + update + select test('insert + update + select + select partial', async (ctx) => { const { db } = ctx.neonPg; const batchResponse = await db.batch([ db.insert(usersTable).values({ id: 1, name: 'John' }).returning({ id: usersTable.id }), db.update(usersTable).set({ name: 'Dan' }).where(eq(usersTable.id, 1)), db.query.usersTable.findMany({}), db.select().from(usersTable).where(eq(usersTable.id, 1)), db.select({ id: usersTable.id, invitedBy: usersTable.invitedBy }).from(usersTable), ]); expectTypeOf(batchResponse).toEqualTypeOf<[ { id: number; }[], NeonHttpQueryResult, { id: number; name: string; verified: number; invitedBy: number | null; }[], { id: number; name: string; verified: number; invitedBy: number | null; }[], { id: number; invitedBy: number | null; }[], ]>(); expect(batchResponse.length).eq(5); expect(batchResponse[0]).toEqual([{ id: 1, }]); expect(batchResponse[1]).toMatchObject({ rows: [], rowCount: 1 }); expect(batchResponse[2]).toEqual([ { id: 1, name: 'Dan', verified: 0, invitedBy: null }, ]); expect(batchResponse[3]).toEqual([ { id: 1, name: 'Dan', verified: 0, invitedBy: null }, ]); expect(batchResponse[4]).toEqual([ { id: 1, invitedBy: null }, ]); }); // batch api for insert + delete + select test('insert + delete + select + select partial', async (ctx) => { const { db } = ctx.neonPg; const batchResponse = await db.batch([ db.insert(usersTable).values({ id: 1, name: 'John' }).returning({ id: usersTable.id }), db.insert(usersTable).values({ id: 2, name: 'Dan' }), db.delete(usersTable).where(eq(usersTable.id, 1)).returning({ id: usersTable.id, invitedBy: usersTable.invitedBy, }), db.query.usersTable.findFirst({ columns: { id: true, invitedBy: true, }, }), ]); expectTypeOf(batchResponse).toEqualTypeOf<[ { id: number; }[], NeonHttpQueryResult, { id: number; invitedBy: number | null; }[], { id: number; invitedBy: number | null; } | undefined, ]>(); expect(batchResponse.length).eq(4); expect(batchResponse[0]).toEqual([{ id: 1, }]); expect(batchResponse[1]).toMatchObject({ rows: [], rowCount: 1 }); expect(batchResponse[2]).toEqual([ { id: 1, invitedBy: null }, ]); expect(batchResponse[3]).toEqual( { id: 2, invitedBy: null }, ); }); test('select raw', async (ctx) => { const { db } = ctx.neonPg; await db.insert(usersTable).values([{ id: 1, name: 'John' }, { id: 2, name: 'Dan' }]); const batchResponse = await db.batch([ db.execute>(sql`select * from users`), db.execute>(sql`select * from users where id = 1`), ]); expectTypeOf(batchResponse).toEqualTypeOf<[ NeonHttpQueryResult<{ id: number; name: string; verified: number; invited_by: number | null; }>, NeonHttpQueryResult<{ id: number; name: string; verified: number; invited_by: number | null; }>, ]>(); expect(batchResponse.length).eq(2); expect(batchResponse[0]).toMatchObject({ rows: [ { id: 1, name: 'John', verified: 0, invited_by: null }, { id: 2, name: 'Dan', verified: 0, invited_by: null }, ], }); expect(batchResponse[1]).toMatchObject({ rows: [ { id: 1, name: 'John', verified: 0, invited_by: null }, ], }); }); }); } ================================================ FILE: integration-tests/tests/pg/neon-http.test.ts ================================================ import { neon, neonConfig, type NeonQueryFunction } from '@neondatabase/serverless'; import { eq, sql } from 'drizzle-orm'; import { drizzle, type NeonHttpDatabase } from 'drizzle-orm/neon-http'; import { migrate } from 'drizzle-orm/neon-http/migrator'; import { pgMaterializedView, pgTable, serial, timestamp } from 'drizzle-orm/pg-core'; import { beforeAll, beforeEach, describe, expect, test, vi } from 'vitest'; import { skipTests } from '~/common'; import { randomString } from '~/utils'; import { tests, usersMigratorTable, usersTable } from './pg-common'; import { TestCache, TestGlobalCache, tests as cacheTests } from './pg-common-cache'; const ENABLE_LOGGING = false; let db: NeonHttpDatabase; let dbGlobalCached: NeonHttpDatabase; let cachedDb: NeonHttpDatabase; beforeAll(async () => { const connectionString = process.env['NEON_HTTP_CONNECTION_STRING']; if (!connectionString) { throw new Error('NEON_CONNECTION_STRING is not defined'); } neonConfig.fetchEndpoint = (host) => { const [protocol, port] = host === 'db.localtest.me' ? ['http', 4444] : ['https', 443]; return `${protocol}://${host}:${port}/sql`; }; const client = neon(connectionString); db = drizzle(client, { logger: ENABLE_LOGGING }); cachedDb = drizzle(client, { logger: ENABLE_LOGGING, cache: new TestCache(), }); dbGlobalCached = drizzle(client, { logger: ENABLE_LOGGING, cache: new TestGlobalCache(), }); }); beforeEach((ctx) => { ctx.pg = { db, }; ctx.cachedPg = { db: cachedDb, dbGlobalCached, }; }); test('migrator : default migration strategy', async () => { await db.execute(sql`drop table if exists all_columns`); await db.execute(sql`drop table if exists users12`); await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); await migrate(db, { migrationsFolder: './drizzle2/pg' }); await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); const result = await db.select().from(usersMigratorTable); expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); await db.execute(sql`drop table all_columns`); await db.execute(sql`drop table users12`); await db.execute(sql`drop table "drizzle"."__drizzle_migrations"`); }); test('migrator : migrate with custom schema', async () => { await db.execute(sql`drop table if exists all_columns`); await db.execute(sql`drop table if exists users12`); await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); await migrate(db, { migrationsFolder: './drizzle2/pg', migrationsSchema: 'custom_migrations' }); // test if the custom migrations table was created const { rowCount } = await db.execute(sql`select * from custom_migrations."__drizzle_migrations";`); expect(rowCount && rowCount > 0).toBeTruthy(); // test if the migrated table are working as expected await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); const result = await db.select().from(usersMigratorTable); expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); await db.execute(sql`drop table all_columns`); await db.execute(sql`drop table users12`); await db.execute(sql`drop table custom_migrations."__drizzle_migrations"`); }); test('migrator : migrate with custom table', async () => { const customTable = randomString(); await db.execute(sql`drop table if exists all_columns`); await db.execute(sql`drop table if exists users12`); await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); await migrate(db, { migrationsFolder: './drizzle2/pg', migrationsTable: customTable }); // test if the custom migrations table was created const { rowCount } = await db.execute(sql`select * from "drizzle".${sql.identifier(customTable)};`); expect(rowCount && rowCount > 0).toBeTruthy(); // test if the migrated table are working as expected await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); const result = await db.select().from(usersMigratorTable); expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); await db.execute(sql`drop table all_columns`); await db.execute(sql`drop table users12`); await db.execute(sql`drop table "drizzle".${sql.identifier(customTable)}`); }); test('migrator : migrate with custom table and custom schema', async () => { const customTable = randomString(); await db.execute(sql`drop table if exists all_columns`); await db.execute(sql`drop table if exists users12`); await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); await migrate(db, { migrationsFolder: './drizzle2/pg', migrationsTable: customTable, migrationsSchema: 'custom_migrations', }); // test if the custom migrations table was created const { rowCount } = await db.execute( sql`select * from custom_migrations.${sql.identifier(customTable)};`, ); expect(rowCount && rowCount > 0).toBeTruthy(); // test if the migrated table are working as expected await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); const result = await db.select().from(usersMigratorTable); expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); await db.execute(sql`drop table all_columns`); await db.execute(sql`drop table users12`); await db.execute(sql`drop table custom_migrations.${sql.identifier(customTable)}`); }); test('all date and time columns without timezone first case mode string', async () => { const table = pgTable('all_columns', { id: serial('id').primaryKey(), timestamp: timestamp('timestamp_string', { mode: 'string', precision: 6 }).notNull(), }); await db.execute(sql`drop table if exists ${table}`); await db.execute(sql` create table ${table} ( id serial primary key, timestamp_string timestamp(6) not null ) `); // 1. Insert date in string format without timezone in it await db.insert(table).values([ { timestamp: '2022-01-01 02:00:00.123456' }, ]); // 2, Select in string format and check that values are the same const result = await db.select().from(table); expect(result).toEqual([{ id: 1, timestamp: '2022-01-01 02:00:00.123456' }]); // 3. Select as raw query and check that values are the same const result2 = await db.execute<{ id: number; timestamp_string: string; }>(sql`select * from ${table}`); expect(result2.rows).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456' }]); await db.execute(sql`drop table if exists ${table}`); }); test('all date and time columns without timezone second case mode string', async () => { const table = pgTable('all_columns', { id: serial('id').primaryKey(), timestamp: timestamp('timestamp_string', { mode: 'string', precision: 6 }).notNull(), }); await db.execute(sql`drop table if exists ${table}`); await db.execute(sql` create table ${table} ( id serial primary key, timestamp_string timestamp(6) not null ) `); // 1. Insert date in string format with timezone in it await db.insert(table).values([ { timestamp: '2022-01-01T02:00:00.123456-02' }, ]); // 2, Select as raw query and check that values are the same const result = await db.execute<{ id: number; timestamp_string: string; }>(sql`select * from ${table}`); expect(result.rows).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456' }]); await db.execute(sql`drop table if exists ${table}`); }); test('all date and time columns without timezone third case mode date', async () => { const table = pgTable('all_columns', { id: serial('id').primaryKey(), timestamp: timestamp('timestamp_string', { mode: 'date', precision: 3 }).notNull(), }); await db.execute(sql`drop table if exists ${table}`); await db.execute(sql` create table ${table} ( id serial primary key, timestamp_string timestamp(3) not null ) `); const insertedDate = new Date('2022-01-01 20:00:00.123+04'); // 1. Insert date as new date await db.insert(table).values([ { timestamp: insertedDate }, ]); // 2, Select as raw query as string const result = await db.execute<{ id: number; timestamp_string: string; }>(sql`select * from ${table}`); // 3. Compare both dates using orm mapping - Need to add 'Z' to tell JS that it is UTC expect(new Date(result.rows[0]!.timestamp_string + 'Z').getTime()).toBe(insertedDate.getTime()); await db.execute(sql`drop table if exists ${table}`); }); test('test mode string for timestamp with timezone', async () => { const table = pgTable('all_columns', { id: serial('id').primaryKey(), timestamp: timestamp('timestamp_string', { mode: 'string', withTimezone: true, precision: 6 }).notNull(), }); await db.execute(sql`drop table if exists ${table}`); await db.execute(sql` create table ${table} ( id serial primary key, timestamp_string timestamp(6) with time zone not null ) `); const timestampString = '2022-01-01 00:00:00.123456-0200'; // 1. Insert date in string format with timezone in it await db.insert(table).values([ { timestamp: timestampString }, ]); // 2. Select date in string format and check that the values are the same const result = await db.select().from(table); // 2.1 Notice that postgres will return the date in UTC, but it is exactly the same expect(result).toEqual([{ id: 1, timestamp: '2022-01-01 02:00:00.123456+00' }]); // 3. Select as raw query and checke that values are the same const result2 = await db.execute<{ id: number; timestamp_string: string; }>(sql`select * from ${table}`); // 3.1 Notice that postgres will return the date in UTC, but it is exactlt the same expect(result2.rows).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456+00' }]); await db.execute(sql`drop table if exists ${table}`); }); test('test mode date for timestamp with timezone', async () => { const table = pgTable('all_columns', { id: serial('id').primaryKey(), timestamp: timestamp('timestamp_string', { mode: 'date', withTimezone: true, precision: 3 }).notNull(), }); await db.execute(sql`drop table if exists ${table}`); await db.execute(sql` create table ${table} ( id serial primary key, timestamp_string timestamp(3) with time zone not null ) `); const timestampString = new Date('2022-01-01 00:00:00.456-0200'); // 1. Insert date in string format with timezone in it await db.insert(table).values([ { timestamp: timestampString }, ]); // 2. Select date in string format and check that the values are the same const result = await db.select().from(table); // 2.1 Notice that postgres will return the date in UTC, but it is exactly the same expect(result).toEqual([{ id: 1, timestamp: timestampString }]); // 3. Select as raw query and checke that values are the same const result2 = await db.execute<{ id: number; timestamp_string: string; }>(sql`select * from ${table}`); // 3.1 Notice that postgres will return the date in UTC, but it is exactlt the same expect(result2.rows).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.456+00' }]); await db.execute(sql`drop table if exists ${table}`); }); test('test mode string for timestamp with timezone in UTC timezone', async () => { // get current timezone from db const timezone = await db.execute<{ TimeZone: string }>(sql`show timezone`); // set timezone to UTC await db.execute(sql`set time zone 'UTC'`); const table = pgTable('all_columns', { id: serial('id').primaryKey(), timestamp: timestamp('timestamp_string', { mode: 'string', withTimezone: true, precision: 6 }).notNull(), }); await db.execute(sql`drop table if exists ${table}`); await db.execute(sql` create table ${table} ( id serial primary key, timestamp_string timestamp(6) with time zone not null ) `); const timestampString = '2022-01-01 00:00:00.123456-0200'; // 1. Insert date in string format with timezone in it await db.insert(table).values([ { timestamp: timestampString }, ]); // 2. Select date in string format and check that the values are the same const result = await db.select().from(table); // 2.1 Notice that postgres will return the date in UTC, but it is exactly the same expect(result).toEqual([{ id: 1, timestamp: '2022-01-01 02:00:00.123456+00' }]); // 3. Select as raw query and checke that values are the same const result2 = await db.execute<{ id: number; timestamp_string: string; }>(sql`select * from ${table}`); // 3.1 Notice that postgres will return the date in UTC, but it is exactlt the same expect(result2.rows).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456+00' }]); await db.execute(sql`set time zone '${sql.raw(timezone.rows[0]!.TimeZone)}'`); await db.execute(sql`drop table if exists ${table}`); }); test.skip('test mode string for timestamp with timezone in different timezone', async () => { // get current timezone from db const timezone = await db.execute<{ TimeZone: string }>(sql`show timezone`); // set timezone to HST (UTC - 10) await db.execute(sql`set time zone 'HST'`); const table = pgTable('all_columns', { id: serial('id').primaryKey(), timestamp: timestamp('timestamp_string', { mode: 'string', withTimezone: true, precision: 6 }).notNull(), }); await db.execute(sql`drop table if exists ${table}`); await db.execute(sql` create table ${table} ( id serial primary key, timestamp_string timestamp(6) with time zone not null ) `); const timestampString = '2022-01-01 00:00:00.123456-1000'; // 1. Insert date in string format with timezone in it await db.insert(table).values([ { timestamp: timestampString }, ]); // 2. Select date in string format and check that the values are the same const result = await db.select().from(table); expect(result).toEqual([{ id: 1, timestamp: '2022-01-01 00:00:00.123456-10' }]); // 3. Select as raw query and checke that values are the same const result2 = await db.execute<{ id: number; timestamp_string: string; }>(sql`select * from ${table}`); expect(result2.rows).toEqual([{ id: 1, timestamp_string: '2022-01-01 00:00:00.123456+00' }]); await db.execute(sql`set time zone '${sql.raw(timezone.rows[0]!.TimeZone)}'`); await db.execute(sql`drop table if exists ${table}`); }); skipTests([ 'migrator : default migration strategy', 'migrator : migrate with custom schema', 'migrator : migrate with custom table', 'migrator : migrate with custom table and custom schema', 'insert via db.execute + select via db.execute', 'insert via db.execute + returning', 'insert via db.execute w/ query builder', 'all date and time columns without timezone first case mode string', 'all date and time columns without timezone third case mode date', 'test mode string for timestamp with timezone', 'test mode date for timestamp with timezone', 'test mode string for timestamp with timezone in UTC timezone', 'nested transaction rollback', 'transaction rollback', 'nested transaction', 'transaction', 'timestamp timezone', 'test $onUpdateFn and $onUpdate works as $default', ]); tests(); cacheTests(); beforeEach(async () => { await db.execute(sql`drop schema if exists public cascade`); await db.execute(sql`create schema public`); await db.execute( sql` create table users ( id serial primary key, name text not null, verified boolean not null default false, jsonb jsonb, created_at timestamptz not null default now() ) `, ); }); test('insert via db.execute + select via db.execute', async () => { await db.execute( sql`insert into ${usersTable} (${sql.identifier(usersTable.name.name)}) values (${'John'})`, ); const result = await db.execute<{ id: number; name: string }>( sql`select id, name from "users"`, ); expect(result.rows).toEqual([{ id: 1, name: 'John' }]); }); test('insert via db.execute + returning', async () => { const inserted = await db.execute<{ id: number; name: string }>( sql`insert into ${usersTable} (${ sql.identifier( usersTable.name.name, ) }) values (${'John'}) returning ${usersTable.id}, ${usersTable.name}`, ); expect(inserted.rows).toEqual([{ id: 1, name: 'John' }]); }); test('insert via db.execute w/ query builder', async () => { const inserted = await db.execute>( db .insert(usersTable) .values({ name: 'John' }) .returning({ id: usersTable.id, name: usersTable.name }), ); expect(inserted.rows).toEqual([{ id: 1, name: 'John' }]); }); describe('$withAuth tests', (it) => { const client = vi.fn(); const db = drizzle({ client: client as any as NeonQueryFunction, schema: { usersTable, }, }); it('$count', async () => { await db.$withAuth('$count').$count(usersTable).catch(() => null); expect(client.mock.lastCall?.[2]).toStrictEqual({ arrayMode: false, fullResults: true, authToken: '$count' }); }); it('delete', async () => { await db.$withAuth('delete').delete(usersTable).catch(() => null); expect(client.mock.lastCall?.[2]).toStrictEqual({ arrayMode: false, fullResults: true, authToken: 'delete' }); }); it('select', async () => { await db.$withAuth('select').select().from(usersTable).catch(() => null); expect(client.mock.lastCall?.[2]).toStrictEqual({ arrayMode: true, fullResults: true, authToken: 'select' }); }); it('selectDistinct', async () => { await db.$withAuth('selectDistinct').selectDistinct().from(usersTable).catch(() => null); expect(client.mock.lastCall?.[2]).toStrictEqual({ arrayMode: true, fullResults: true, authToken: 'selectDistinct', }); }); it('selectDistinctOn', async () => { await db.$withAuth('selectDistinctOn').selectDistinctOn([usersTable.name]).from(usersTable).catch(() => null); expect(client.mock.lastCall?.[2]).toStrictEqual({ arrayMode: true, fullResults: true, authToken: 'selectDistinctOn', }); }); it('update', async () => { await db.$withAuth('update').update(usersTable).set({ name: 'CHANGED', }).where(eq(usersTable.name, 'TARGET')).catch(() => null); expect(client.mock.lastCall?.[2]).toStrictEqual({ arrayMode: false, fullResults: true, authToken: 'update' }); }); it('insert', async () => { await db.$withAuth('insert').insert(usersTable).values({ name: 'WITHAUTHUSER', }).catch(() => null); expect(client.mock.lastCall?.[2]).toStrictEqual({ arrayMode: false, fullResults: true, authToken: 'insert' }); }); it('with', async () => { await db.$withAuth('with').with(db.$with('WITH').as((qb) => qb.select().from(usersTable))).select().from(usersTable) .catch(() => null); expect(client.mock.lastCall?.[2]).toStrictEqual({ arrayMode: true, fullResults: true, authToken: 'with' }); }); it('rqb', async () => { await db.$withAuth('rqb').query.usersTable.findFirst().catch(() => null); expect(client.mock.lastCall?.[2]).toStrictEqual({ arrayMode: true, fullResults: true, authToken: 'rqb' }); }); it('exec', async () => { await db.$withAuth('exec').execute(`SELECT 1`).catch(() => null); expect(client.mock.lastCall?.[2]).toStrictEqual({ arrayMode: false, fullResults: true, authToken: 'exec' }); }); it('prepared', async () => { const prep = db.$withAuth('prepared').select().from(usersTable).prepare('withAuthPrepared'); await prep.execute().catch(() => null); expect(client.mock.lastCall?.[2]).toStrictEqual({ arrayMode: true, fullResults: true, authToken: 'prepared' }); }); it('refreshMaterializedView', async () => { const johns = pgMaterializedView('johns') .as((qb) => qb.select().from(usersTable).where(eq(usersTable.name, 'John'))); await db.$withAuth('refreshMaterializedView').refreshMaterializedView(johns); expect(client.mock.lastCall?.[2]).toStrictEqual({ arrayMode: false, fullResults: true, authToken: 'refreshMaterializedView', }); }); }); describe('$withAuth callback tests', (it) => { const client = vi.fn(); const db = drizzle({ client: client as any as NeonQueryFunction, schema: { usersTable, }, }); const auth = (token: string) => () => token; it('$count', async () => { await db.$withAuth(auth('$count')).$count(usersTable).catch(() => null); expect(client.mock.lastCall?.[2]['authToken']()).toStrictEqual('$count'); }); it('delete', async () => { await db.$withAuth(auth('delete')).delete(usersTable).catch(() => null); expect(client.mock.lastCall?.[2]['authToken']()).toStrictEqual('delete'); }); it('select', async () => { await db.$withAuth(auth('select')).select().from(usersTable).catch(() => null); expect(client.mock.lastCall?.[2]['authToken']()).toStrictEqual('select'); }); it('selectDistinct', async () => { await db.$withAuth(auth('selectDistinct')).selectDistinct().from(usersTable).catch(() => null); expect(client.mock.lastCall?.[2]['authToken']()).toStrictEqual('selectDistinct'); }); it('selectDistinctOn', async () => { await db.$withAuth(auth('selectDistinctOn')).selectDistinctOn([usersTable.name]).from(usersTable).catch(() => null); expect(client.mock.lastCall?.[2]['authToken']()).toStrictEqual('selectDistinctOn'); }); it('update', async () => { await db.$withAuth(auth('update')).update(usersTable).set({ name: 'CHANGED', }).where(eq(usersTable.name, 'TARGET')).catch(() => null); expect(client.mock.lastCall?.[2]['authToken']()).toStrictEqual('update'); }); it('insert', async () => { await db.$withAuth(auth('insert')).insert(usersTable).values({ name: 'WITHAUTHUSER', }).catch(() => null); expect(client.mock.lastCall?.[2]['authToken']()).toStrictEqual('insert'); }); it('with', async () => { await db.$withAuth(auth('with')).with(db.$with('WITH').as((qb) => qb.select().from(usersTable))).select().from( usersTable, ) .catch(() => null); expect(client.mock.lastCall?.[2]['authToken']()).toStrictEqual('with'); }); it('rqb', async () => { await db.$withAuth(auth('rqb')).query.usersTable.findFirst().catch(() => null); expect(client.mock.lastCall?.[2]['authToken']()).toStrictEqual('rqb'); }); it('exec', async () => { await db.$withAuth(auth('exec')).execute(`SELECT 1`).catch(() => null); expect(client.mock.lastCall?.[2]['authToken']()).toStrictEqual('exec'); }); it('prepared', async () => { const prep = db.$withAuth(auth('prepared')).select().from(usersTable).prepare('withAuthPrepared'); await prep.execute().catch(() => null); expect(client.mock.lastCall?.[2]['authToken']()).toStrictEqual('prepared'); }); it('refreshMaterializedView', async () => { const johns = pgMaterializedView('johns') .as((qb) => qb.select().from(usersTable).where(eq(usersTable.name, 'John'))); await db.$withAuth(auth('refreshMaterializedView')).refreshMaterializedView(johns); expect(client.mock.lastCall?.[2]['authToken']()).toStrictEqual('refreshMaterializedView'); }); }); describe('$withAuth async callback tests', (it) => { const client = vi.fn(); const db = drizzle({ client: client as any as NeonQueryFunction, schema: { usersTable, }, }); const auth = (token: string) => async () => token; it('$count', async () => { await db.$withAuth(auth('$count')).$count(usersTable).catch(() => null); expect(client.mock.lastCall?.[2]['authToken']()).toBeInstanceOf(Promise); expect(await client.mock.lastCall?.[2]['authToken']()).toStrictEqual('$count'); }); it('delete', async () => { await db.$withAuth(auth('delete')).delete(usersTable).catch(() => null); expect(client.mock.lastCall?.[2]['authToken']()).toBeInstanceOf(Promise); expect(await client.mock.lastCall?.[2]['authToken']()).toStrictEqual('delete'); }); it('select', async () => { await db.$withAuth(auth('select')).select().from(usersTable).catch(() => null); expect(client.mock.lastCall?.[2]['authToken']()).toBeInstanceOf(Promise); expect(await client.mock.lastCall?.[2]['authToken']()).toStrictEqual('select'); }); it('selectDistinct', async () => { await db.$withAuth(auth('selectDistinct')).selectDistinct().from(usersTable).catch(() => null); expect(client.mock.lastCall?.[2]['authToken']()).toBeInstanceOf(Promise); expect(await client.mock.lastCall?.[2]['authToken']()).toStrictEqual('selectDistinct'); }); it('selectDistinctOn', async () => { await db.$withAuth(auth('selectDistinctOn')).selectDistinctOn([usersTable.name]).from(usersTable).catch(() => null); expect(client.mock.lastCall?.[2]['authToken']()).toBeInstanceOf(Promise); expect(await client.mock.lastCall?.[2]['authToken']()).toStrictEqual('selectDistinctOn'); }); it('update', async () => { await db.$withAuth(auth('update')).update(usersTable).set({ name: 'CHANGED', }).where(eq(usersTable.name, 'TARGET')).catch(() => null); expect(client.mock.lastCall?.[2]['authToken']()).toBeInstanceOf(Promise); expect(await client.mock.lastCall?.[2]['authToken']()).toStrictEqual('update'); }); it('insert', async () => { await db.$withAuth(auth('insert')).insert(usersTable).values({ name: 'WITHAUTHUSER', }).catch(() => null); expect(client.mock.lastCall?.[2]['authToken']()).toBeInstanceOf(Promise); expect(await client.mock.lastCall?.[2]['authToken']()).toStrictEqual('insert'); }); it('with', async () => { await db.$withAuth(auth('with')).with(db.$with('WITH').as((qb) => qb.select().from(usersTable))).select().from( usersTable, ) .catch(() => null); expect(client.mock.lastCall?.[2]['authToken']()).toBeInstanceOf(Promise); expect(await client.mock.lastCall?.[2]['authToken']()).toStrictEqual('with'); }); it('rqb', async () => { await db.$withAuth(auth('rqb')).query.usersTable.findFirst().catch(() => null); expect(client.mock.lastCall?.[2]['authToken']()).toBeInstanceOf(Promise); expect(await client.mock.lastCall?.[2]['authToken']()).toStrictEqual('rqb'); }); it('exec', async () => { await db.$withAuth(auth('exec')).execute(`SELECT 1`).catch(() => null); expect(client.mock.lastCall?.[2]['authToken']()).toBeInstanceOf(Promise); expect(await client.mock.lastCall?.[2]['authToken']()).toStrictEqual('exec'); }); it('prepared', async () => { const prep = db.$withAuth(auth('prepared')).select().from(usersTable).prepare('withAuthPrepared'); await prep.execute().catch(() => null); expect(client.mock.lastCall?.[2]['authToken']()).toBeInstanceOf(Promise); expect(await client.mock.lastCall?.[2]['authToken']()).toStrictEqual('prepared'); }); it('refreshMaterializedView', async () => { const johns = pgMaterializedView('johns') .as((qb) => qb.select().from(usersTable).where(eq(usersTable.name, 'John'))); await db.$withAuth(auth('refreshMaterializedView')).refreshMaterializedView(johns); expect(client.mock.lastCall?.[2]['authToken']()).toBeInstanceOf(Promise); expect(await client.mock.lastCall?.[2]['authToken']()).toStrictEqual('refreshMaterializedView'); }); }); ================================================ FILE: integration-tests/tests/pg/neon-serverless.test.ts ================================================ import { neonConfig, Pool } from '@neondatabase/serverless'; import { eq, sql } from 'drizzle-orm'; import { drizzle, type NeonDatabase } from 'drizzle-orm/neon-serverless'; import { migrate } from 'drizzle-orm/neon-serverless/migrator'; import { pgTable, serial, timestamp } from 'drizzle-orm/pg-core'; import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; import ws from 'ws'; import { skipTests } from '~/common'; import { randomString } from '~/utils'; import { mySchema, tests, usersMigratorTable, usersMySchemaTable, usersTable } from './pg-common'; import { TestCache, TestGlobalCache, tests as cacheTests } from './pg-common-cache'; const ENABLE_LOGGING = false; let db: NeonDatabase; let dbGlobalCached: NeonDatabase; let cachedDb: NeonDatabase; let client: Pool; neonConfig.wsProxy = (host) => `${host}:5446/v1`; neonConfig.useSecureWebSocket = false; neonConfig.pipelineTLS = false; neonConfig.pipelineConnect = false; neonConfig.webSocketConstructor = ws; beforeAll(async () => { const connectionString = process.env['NEON_SERVERLESS_CONNECTION_STRING']; if (!connectionString) { throw new Error('NEON_SERVERLESS_CONNECTION_STRING is not defined'); } client = new Pool({ connectionString }); db = drizzle(client, { logger: ENABLE_LOGGING }); cachedDb = drizzle(client, { logger: ENABLE_LOGGING, cache: new TestCache(), }); dbGlobalCached = drizzle(client, { logger: ENABLE_LOGGING, cache: new TestGlobalCache(), }); }); afterAll(async () => { await client?.end(); }); beforeEach((ctx) => { ctx.pg = { db, }; ctx.cachedPg = { db: cachedDb, dbGlobalCached, }; }); test('migrator : default migration strategy', async () => { await db.execute(sql`drop table if exists all_columns`); await db.execute(sql`drop table if exists users12`); await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); await migrate(db, { migrationsFolder: './drizzle2/pg' }); await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); const result = await db.select().from(usersMigratorTable); expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); await db.execute(sql`drop table all_columns`); await db.execute(sql`drop table users12`); await db.execute(sql`drop table "drizzle"."__drizzle_migrations"`); }); test('migrator : migrate with custom schema', async () => { await db.execute(sql`drop table if exists all_columns`); await db.execute(sql`drop table if exists users12`); await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); await migrate(db, { migrationsFolder: './drizzle2/pg', migrationsSchema: 'custom_migrations' }); // test if the custom migrations table was created const { rowCount } = await db.execute(sql`select * from custom_migrations."__drizzle_migrations";`); expect(rowCount && rowCount > 0).toBeTruthy(); // test if the migrated table are working as expected await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); const result = await db.select().from(usersMigratorTable); expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); await db.execute(sql`drop table all_columns`); await db.execute(sql`drop table users12`); await db.execute(sql`drop table custom_migrations."__drizzle_migrations"`); }); test('migrator : migrate with custom table', async () => { const customTable = randomString(); await db.execute(sql`drop table if exists all_columns`); await db.execute(sql`drop table if exists users12`); await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); await migrate(db, { migrationsFolder: './drizzle2/pg', migrationsTable: customTable }); // test if the custom migrations table was created const { rowCount } = await db.execute(sql`select * from "drizzle".${sql.identifier(customTable)};`); expect(rowCount && rowCount > 0).toBeTruthy(); // test if the migrated table are working as expected await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); const result = await db.select().from(usersMigratorTable); expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); await db.execute(sql`drop table all_columns`); await db.execute(sql`drop table users12`); await db.execute(sql`drop table "drizzle".${sql.identifier(customTable)}`); }); test('migrator : migrate with custom table and custom schema', async () => { const customTable = randomString(); await db.execute(sql`drop table if exists all_columns`); await db.execute(sql`drop table if exists users12`); await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); await migrate(db, { migrationsFolder: './drizzle2/pg', migrationsTable: customTable, migrationsSchema: 'custom_migrations', }); // test if the custom migrations table was created const { rowCount } = await db.execute( sql`select * from custom_migrations.${sql.identifier(customTable)};`, ); expect(rowCount && rowCount > 0).toBeTruthy(); // test if the migrated table are working as expected await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); const result = await db.select().from(usersMigratorTable); expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); await db.execute(sql`drop table all_columns`); await db.execute(sql`drop table users12`); await db.execute(sql`drop table custom_migrations.${sql.identifier(customTable)}`); }); test('all date and time columns without timezone first case mode string', async () => { const table = pgTable('all_columns', { id: serial('id').primaryKey(), timestamp: timestamp('timestamp_string', { mode: 'string', precision: 6 }).notNull(), }); await db.execute(sql`drop table if exists ${table}`); await db.execute(sql` create table ${table} ( id serial primary key, timestamp_string timestamp(6) not null ) `); // 1. Insert date in string format without timezone in it await db.insert(table).values([ { timestamp: '2022-01-01 02:00:00.123456' }, ]); // 2, Select in string format and check that values are the same const result = await db.select().from(table); expect(result).toEqual([{ id: 1, timestamp: '2022-01-01 02:00:00.123456' }]); // 3. Select as raw query and check that values are the same const result2 = await db.execute<{ id: number; timestamp_string: string; }>(sql`select * from ${table}`); expect(result2.rows).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456' }]); await db.execute(sql`drop table if exists ${table}`); }); test('all date and time columns without timezone second case mode string', async () => { const table = pgTable('all_columns', { id: serial('id').primaryKey(), timestamp: timestamp('timestamp_string', { mode: 'string', precision: 6 }).notNull(), }); await db.execute(sql`drop table if exists ${table}`); await db.execute(sql` create table ${table} ( id serial primary key, timestamp_string timestamp(6) not null ) `); // 1. Insert date in string format with timezone in it await db.insert(table).values([ { timestamp: '2022-01-01T02:00:00.123456-02' }, ]); // 2, Select as raw query and check that values are the same const result = await db.execute<{ id: number; timestamp_string: string; }>(sql`select * from ${table}`); expect(result.rows).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456' }]); await db.execute(sql`drop table if exists ${table}`); }); test('all date and time columns without timezone third case mode date', async () => { const table = pgTable('all_columns', { id: serial('id').primaryKey(), timestamp: timestamp('timestamp_string', { mode: 'date', precision: 3 }).notNull(), }); await db.execute(sql`drop table if exists ${table}`); await db.execute(sql` create table ${table} ( id serial primary key, timestamp_string timestamp(3) not null ) `); const insertedDate = new Date('2022-01-01 20:00:00.123+04'); // 1. Insert date as new date await db.insert(table).values([ { timestamp: insertedDate }, ]); // 2, Select as raw query as string const result = await db.execute<{ id: number; timestamp_string: string; }>(sql`select * from ${table}`); // 3. Compare both dates using orm mapping - Need to add 'Z' to tell JS that it is UTC expect(new Date(result.rows[0]!.timestamp_string + 'Z').getTime()).toBe(insertedDate.getTime()); await db.execute(sql`drop table if exists ${table}`); }); test('test mode string for timestamp with timezone', async () => { const table = pgTable('all_columns', { id: serial('id').primaryKey(), timestamp: timestamp('timestamp_string', { mode: 'string', withTimezone: true, precision: 6 }).notNull(), }); await db.execute(sql`drop table if exists ${table}`); await db.execute(sql` create table ${table} ( id serial primary key, timestamp_string timestamp(6) with time zone not null ) `); const timestampString = '2022-01-01 00:00:00.123456-0200'; // 1. Insert date in string format with timezone in it await db.insert(table).values([ { timestamp: timestampString }, ]); // 2. Select date in string format and check that the values are the same const result = await db.select().from(table); // 2.1 Notice that postgres will return the date in UTC, but it is exactly the same expect(result).toEqual([{ id: 1, timestamp: '2022-01-01 02:00:00.123456+00' }]); // 3. Select as raw query and checke that values are the same const result2 = await db.execute<{ id: number; timestamp_string: string; }>(sql`select * from ${table}`); // 3.1 Notice that postgres will return the date in UTC, but it is exactlt the same expect(result2.rows).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456+00' }]); await db.execute(sql`drop table if exists ${table}`); }); test('test mode date for timestamp with timezone', async () => { const table = pgTable('all_columns', { id: serial('id').primaryKey(), timestamp: timestamp('timestamp_string', { mode: 'date', withTimezone: true, precision: 3 }).notNull(), }); await db.execute(sql`drop table if exists ${table}`); await db.execute(sql` create table ${table} ( id serial primary key, timestamp_string timestamp(3) with time zone not null ) `); const timestampString = new Date('2022-01-01 00:00:00.456-0200'); // 1. Insert date in string format with timezone in it await db.insert(table).values([ { timestamp: timestampString }, ]); // 2. Select date in string format and check that the values are the same const result = await db.select().from(table); // 2.1 Notice that postgres will return the date in UTC, but it is exactly the same expect(result).toEqual([{ id: 1, timestamp: timestampString }]); // 3. Select as raw query and checke that values are the same const result2 = await db.execute<{ id: number; timestamp_string: string; }>(sql`select * from ${table}`); // 3.1 Notice that postgres will return the date in UTC, but it is exactlt the same expect(result2.rows).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.456+00' }]); await db.execute(sql`drop table if exists ${table}`); }); test('test mode string for timestamp with timezone in UTC timezone', async () => { // get current timezone from db const timezone = await db.execute<{ TimeZone: string }>(sql`show timezone`); // set timezone to UTC await db.execute(sql`set time zone 'UTC'`); const table = pgTable('all_columns', { id: serial('id').primaryKey(), timestamp: timestamp('timestamp_string', { mode: 'string', withTimezone: true, precision: 6 }).notNull(), }); await db.execute(sql`drop table if exists ${table}`); await db.execute(sql` create table ${table} ( id serial primary key, timestamp_string timestamp(6) with time zone not null ) `); const timestampString = '2022-01-01 00:00:00.123456-0200'; // 1. Insert date in string format with timezone in it await db.insert(table).values([ { timestamp: timestampString }, ]); // 2. Select date in string format and check that the values are the same const result = await db.select().from(table); // 2.1 Notice that postgres will return the date in UTC, but it is exactly the same expect(result).toEqual([{ id: 1, timestamp: '2022-01-01 02:00:00.123456+00' }]); // 3. Select as raw query and checke that values are the same const result2 = await db.execute<{ id: number; timestamp_string: string; }>(sql`select * from ${table}`); // 3.1 Notice that postgres will return the date in UTC, but it is exactlt the same expect(result2.rows).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456+00' }]); await db.execute(sql`set time zone '${sql.raw(timezone.rows[0]!.TimeZone)}'`); await db.execute(sql`drop table if exists ${table}`); }); test.skip('test mode string for timestamp with timezone in different timezone', async () => { // get current timezone from db const timezone = await db.execute<{ TimeZone: string }>(sql`show timezone`); // set timezone to HST (UTC - 10) await db.execute(sql`set time zone 'HST'`); const table = pgTable('all_columns', { id: serial('id').primaryKey(), timestamp: timestamp('timestamp_string', { mode: 'string', withTimezone: true, precision: 6 }).notNull(), }); await db.execute(sql`drop table if exists ${table}`); await db.execute(sql` create table ${table} ( id serial primary key, timestamp_string timestamp(6) with time zone not null ) `); const timestampString = '2022-01-01 00:00:00.123456-1000'; // 1. Insert date in string format with timezone in it await db.insert(table).values([ { timestamp: timestampString }, ]); // 2. Select date in string format and check that the values are the same const result = await db.select().from(table); expect(result).toEqual([{ id: 1, timestamp: '2022-01-01 00:00:00.123456-10' }]); // 3. Select as raw query and checke that values are the same const result2 = await db.execute<{ id: number; timestamp_string: string; }>(sql`select * from ${table}`); expect(result2.rows).toEqual([{ id: 1, timestamp_string: '2022-01-01 00:00:00.123456+00' }]); await db.execute(sql`set time zone '${sql.raw(timezone.rows[0]!.TimeZone)}'`); await db.execute(sql`drop table if exists ${table}`); }); test('select all fields', async (ctx) => { const { db } = ctx.pg; const now = Date.now(); await db.insert(usersTable).values({ name: 'John' }); const result = await db.select().from(usersTable); expect(result[0]!.createdAt).toBeInstanceOf(Date); expect(Math.abs(result[0]!.createdAt.getTime() - now)).toBeLessThan(3000); expect(result).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); }); test('update with returning all fields', async (ctx) => { const { db } = ctx.pg; const now = Date.now(); await db.insert(usersTable).values({ name: 'John' }); const users = await db .update(usersTable) .set({ name: 'Jane' }) .where(eq(usersTable.name, 'John')) .returning(); expect(users[0]!.createdAt).toBeInstanceOf(Date); expect(Math.abs(users[0]!.createdAt.getTime() - now)).toBeLessThan(3000); expect(users).toEqual([ { id: 1, name: 'Jane', verified: false, jsonb: null, createdAt: users[0]!.createdAt }, ]); }); test('delete with returning all fields', async (ctx) => { const { db } = ctx.pg; const now = Date.now(); await db.insert(usersTable).values({ name: 'John' }); const users = await db.delete(usersTable).where(eq(usersTable.name, 'John')).returning(); expect(users[0]!.createdAt).toBeInstanceOf(Date); expect(Math.abs(users[0]!.createdAt.getTime() - now)).toBeLessThan(3000); expect(users).toEqual([ { id: 1, name: 'John', verified: false, jsonb: null, createdAt: users[0]!.createdAt }, ]); }); test('mySchema :: select all fields', async (ctx) => { const { db } = ctx.pg; const now = Date.now(); await db.insert(usersMySchemaTable).values({ name: 'John' }); const result = await db.select().from(usersMySchemaTable); expect(result[0]!.createdAt).toBeInstanceOf(Date); expect(Math.abs(result[0]!.createdAt.getTime() - now)).toBeLessThan(3000); expect(result).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); }); test('mySchema :: delete with returning all fields', async (ctx) => { const { db } = ctx.pg; const now = Date.now(); await db.insert(usersMySchemaTable).values({ name: 'John' }); const users = await db.delete(usersMySchemaTable).where(eq(usersMySchemaTable.name, 'John')).returning(); expect(users[0]!.createdAt).toBeInstanceOf(Date); expect(Math.abs(users[0]!.createdAt.getTime() - now)).toBeLessThan(3000); expect(users).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: users[0]!.createdAt }]); }); skipTests([ 'migrator : default migration strategy', 'migrator : migrate with custom schema', 'migrator : migrate with custom table', 'migrator : migrate with custom table and custom schema', 'insert via db.execute + select via db.execute', 'insert via db.execute + returning', 'insert via db.execute w/ query builder', 'all date and time columns without timezone first case mode string', 'all date and time columns without timezone third case mode date', 'test mode string for timestamp with timezone', 'test mode date for timestamp with timezone', 'test mode string for timestamp with timezone in UTC timezone', 'nested transaction rollback', 'transaction rollback', 'nested transaction', 'transaction', 'timestamp timezone', 'test $onUpdateFn and $onUpdate works as $default', 'select all fields', 'update with returning all fields', 'delete with returning all fields', 'mySchema :: select all fields', 'mySchema :: delete with returning all fields', ]); tests(); cacheTests(); beforeEach(async () => { await db.execute(sql`drop schema if exists public cascade`); await db.execute(sql`drop schema if exists ${mySchema} cascade`); await db.execute(sql`create schema public`); await db.execute(sql`create schema ${mySchema}`); await db.execute( sql` create table users ( id serial primary key, name text not null, verified boolean not null default false, jsonb jsonb, created_at timestamptz not null default now() ) `, ); await db.execute( sql` create table ${usersMySchemaTable} ( id serial primary key, name text not null, verified boolean not null default false, jsonb jsonb, created_at timestamptz not null default now() ) `, ); }); test('insert via db.execute + select via db.execute', async () => { await db.execute( sql`insert into ${usersTable} (${sql.identifier(usersTable.name.name)}) values (${'John'})`, ); const result = await db.execute<{ id: number; name: string }>( sql`select id, name from "users"`, ); expect(result.rows).toEqual([{ id: 1, name: 'John' }]); }); test('insert via db.execute + returning', async () => { const inserted = await db.execute<{ id: number; name: string }>( sql`insert into ${usersTable} (${ sql.identifier( usersTable.name.name, ) }) values (${'John'}) returning ${usersTable.id}, ${usersTable.name}`, ); expect(inserted.rows).toEqual([{ id: 1, name: 'John' }]); }); test('insert via db.execute w/ query builder', async () => { const inserted = await db.execute>( db .insert(usersTable) .values({ name: 'John' }) .returning({ id: usersTable.id, name: usersTable.name }), ); expect(inserted.rows).toEqual([{ id: 1, name: 'John' }]); }); ================================================ FILE: integration-tests/tests/pg/node-postgres.test.ts ================================================ import retry from 'async-retry'; import { sql } from 'drizzle-orm'; import type { NodePgDatabase } from 'drizzle-orm/node-postgres'; import { drizzle } from 'drizzle-orm/node-postgres'; import { migrate } from 'drizzle-orm/node-postgres/migrator'; import { pgTable, serial, timestamp } from 'drizzle-orm/pg-core'; import { Client } from 'pg'; import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; import { skipTests } from '~/common'; import { randomString } from '~/utils'; import { createDockerDB, tests, usersMigratorTable, usersTable } from './pg-common'; import { TestCache, TestGlobalCache, tests as cacheTests } from './pg-common-cache'; const ENABLE_LOGGING = false; let db: NodePgDatabase; let client: Client; let dbGlobalCached: NodePgDatabase; let cachedDb: NodePgDatabase; beforeAll(async () => { let connectionString; if (process.env['PG_CONNECTION_STRING']) { connectionString = process.env['PG_CONNECTION_STRING']; } else { const { connectionString: conStr } = await createDockerDB(); connectionString = conStr; } client = await retry(async () => { client = new Client(connectionString); await client.connect(); return client; }, { retries: 20, factor: 1, minTimeout: 250, maxTimeout: 250, randomize: false, onRetry() { client?.end(); }, }); db = drizzle(client, { logger: ENABLE_LOGGING }); cachedDb = drizzle(client, { logger: ENABLE_LOGGING, cache: new TestCache() }); dbGlobalCached = drizzle(client, { logger: ENABLE_LOGGING, cache: new TestGlobalCache() }); }); afterAll(async () => { await client?.end(); }); beforeEach((ctx) => { ctx.pg = { db, }; ctx.cachedPg = { db: cachedDb, dbGlobalCached, }; }); test('migrator : default migration strategy', async () => { await db.execute(sql`drop table if exists all_columns`); await db.execute(sql`drop table if exists users12`); await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); await migrate(db, { migrationsFolder: './drizzle2/pg' }); await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); const result = await db.select().from(usersMigratorTable); expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); await db.execute(sql`drop table all_columns`); await db.execute(sql`drop table users12`); await db.execute(sql`drop table "drizzle"."__drizzle_migrations"`); }); test('migrator : migrate with custom schema', async () => { const customSchema = randomString(); await db.execute(sql`drop table if exists all_columns`); await db.execute(sql`drop table if exists users12`); await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); await migrate(db, { migrationsFolder: './drizzle2/pg', migrationsSchema: customSchema }); // test if the custom migrations table was created const { rowCount } = await db.execute(sql`select * from ${sql.identifier(customSchema)}."__drizzle_migrations";`); expect(rowCount && rowCount > 0).toBeTruthy(); // test if the migrated table are working as expected await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); const result = await db.select().from(usersMigratorTable); expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); await db.execute(sql`drop table all_columns`); await db.execute(sql`drop table users12`); await db.execute(sql`drop table ${sql.identifier(customSchema)}."__drizzle_migrations"`); }); test('migrator : migrate with custom table', async () => { const customTable = randomString(); await db.execute(sql`drop table if exists all_columns`); await db.execute(sql`drop table if exists users12`); await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); await migrate(db, { migrationsFolder: './drizzle2/pg', migrationsTable: customTable }); // test if the custom migrations table was created const { rowCount } = await db.execute(sql`select * from "drizzle".${sql.identifier(customTable)};`); expect(rowCount && rowCount > 0).toBeTruthy(); // test if the migrated table are working as expected await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); const result = await db.select().from(usersMigratorTable); expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); await db.execute(sql`drop table all_columns`); await db.execute(sql`drop table users12`); await db.execute(sql`drop table "drizzle".${sql.identifier(customTable)}`); }); test('migrator : migrate with custom table and custom schema', async () => { const customTable = randomString(); const customSchema = randomString(); await db.execute(sql`drop table if exists all_columns`); await db.execute(sql`drop table if exists users12`); await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); await migrate(db, { migrationsFolder: './drizzle2/pg', migrationsTable: customTable, migrationsSchema: customSchema, }); // test if the custom migrations table was created const { rowCount } = await db.execute( sql`select * from ${sql.identifier(customSchema)}.${sql.identifier(customTable)};`, ); expect(rowCount && rowCount > 0).toBeTruthy(); // test if the migrated table are working as expected await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); const result = await db.select().from(usersMigratorTable); expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); await db.execute(sql`drop table all_columns`); await db.execute(sql`drop table users12`); await db.execute(sql`drop table ${sql.identifier(customSchema)}.${sql.identifier(customTable)}`); }); test('all date and time columns without timezone first case mode string', async () => { const table = pgTable('all_columns', { id: serial('id').primaryKey(), timestamp: timestamp('timestamp_string', { mode: 'string', precision: 6 }).notNull(), }); await db.execute(sql`drop table if exists ${table}`); await db.execute(sql` create table ${table} ( id serial primary key, timestamp_string timestamp(6) not null ) `); // 1. Insert date in string format without timezone in it await db.insert(table).values([ { timestamp: '2022-01-01 02:00:00.123456' }, ]); // 2, Select in string format and check that values are the same const result = await db.select().from(table); expect(result).toEqual([{ id: 1, timestamp: '2022-01-01 02:00:00.123456' }]); // 3. Select as raw query and check that values are the same const result2 = await db.execute<{ id: number; timestamp_string: string; }>(sql`select * from ${table}`); expect(result2.rows).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456' }]); await db.execute(sql`drop table if exists ${table}`); }); test('all date and time columns without timezone second case mode string', async () => { const table = pgTable('all_columns', { id: serial('id').primaryKey(), timestamp: timestamp('timestamp_string', { mode: 'string', precision: 6 }).notNull(), }); await db.execute(sql`drop table if exists ${table}`); await db.execute(sql` create table ${table} ( id serial primary key, timestamp_string timestamp(6) not null ) `); // 1. Insert date in string format with timezone in it await db.insert(table).values([ { timestamp: '2022-01-01T02:00:00.123456-02' }, ]); // 2, Select as raw query and check that values are the same const result = await db.execute<{ id: number; timestamp_string: string; }>(sql`select * from ${table}`); expect(result.rows).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456' }]); await db.execute(sql`drop table if exists ${table}`); }); test('all date and time columns without timezone third case mode date', async () => { const table = pgTable('all_columns', { id: serial('id').primaryKey(), timestamp: timestamp('timestamp_string', { mode: 'date', precision: 3 }).notNull(), }); await db.execute(sql`drop table if exists ${table}`); await db.execute(sql` create table ${table} ( id serial primary key, timestamp_string timestamp(3) not null ) `); const insertedDate = new Date('2022-01-01 20:00:00.123+04'); // 1. Insert date as new date await db.insert(table).values([ { timestamp: insertedDate }, ]); // 2, Select as raw query as string const result = await db.execute<{ id: number; timestamp_string: string; }>(sql`select * from ${table}`); // 3. Compare both dates using orm mapping - Need to add 'Z' to tell JS that it is UTC expect(new Date(result.rows[0]!.timestamp_string + 'Z').getTime()).toBe(insertedDate.getTime()); await db.execute(sql`drop table if exists ${table}`); }); test('test mode string for timestamp with timezone', async () => { const table = pgTable('all_columns', { id: serial('id').primaryKey(), timestamp: timestamp('timestamp_string', { mode: 'string', withTimezone: true, precision: 6 }).notNull(), }); await db.execute(sql`drop table if exists ${table}`); await db.execute(sql` create table ${table} ( id serial primary key, timestamp_string timestamp(6) with time zone not null ) `); const timestampString = '2022-01-01 00:00:00.123456-0200'; // 1. Insert date in string format with timezone in it await db.insert(table).values([ { timestamp: timestampString }, ]); // 2. Select date in string format and check that the values are the same const result = await db.select().from(table); // 2.1 Notice that postgres will return the date in UTC, but it is exactly the same expect(result).toEqual([{ id: 1, timestamp: '2022-01-01 02:00:00.123456+00' }]); // 3. Select as raw query and checke that values are the same const result2 = await db.execute<{ id: number; timestamp_string: string; }>(sql`select * from ${table}`); // 3.1 Notice that postgres will return the date in UTC, but it is exactlt the same expect(result2.rows).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456+00' }]); await db.execute(sql`drop table if exists ${table}`); }); test('test mode date for timestamp with timezone', async () => { const table = pgTable('all_columns', { id: serial('id').primaryKey(), timestamp: timestamp('timestamp_string', { mode: 'date', withTimezone: true, precision: 3 }).notNull(), }); await db.execute(sql`drop table if exists ${table}`); await db.execute(sql` create table ${table} ( id serial primary key, timestamp_string timestamp(3) with time zone not null ) `); const timestampString = new Date('2022-01-01 00:00:00.456-0200'); // 1. Insert date in string format with timezone in it await db.insert(table).values([ { timestamp: timestampString }, ]); // 2. Select date in string format and check that the values are the same const result = await db.select().from(table); // 2.1 Notice that postgres will return the date in UTC, but it is exactly the same expect(result).toEqual([{ id: 1, timestamp: timestampString }]); // 3. Select as raw query and checke that values are the same const result2 = await db.execute<{ id: number; timestamp_string: string; }>(sql`select * from ${table}`); // 3.1 Notice that postgres will return the date in UTC, but it is exactlt the same expect(result2.rows).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.456+00' }]); await db.execute(sql`drop table if exists ${table}`); }); test('test mode string for timestamp with timezone in UTC timezone', async () => { // get current timezone from db const timezone = await db.execute<{ TimeZone: string }>(sql`show timezone`); // set timezone to UTC await db.execute(sql`set time zone 'UTC'`); const table = pgTable('all_columns', { id: serial('id').primaryKey(), timestamp: timestamp('timestamp_string', { mode: 'string', withTimezone: true, precision: 6 }).notNull(), }); await db.execute(sql`drop table if exists ${table}`); await db.execute(sql` create table ${table} ( id serial primary key, timestamp_string timestamp(6) with time zone not null ) `); const timestampString = '2022-01-01 00:00:00.123456-0200'; // 1. Insert date in string format with timezone in it await db.insert(table).values([ { timestamp: timestampString }, ]); // 2. Select date in string format and check that the values are the same const result = await db.select().from(table); // 2.1 Notice that postgres will return the date in UTC, but it is exactly the same expect(result).toEqual([{ id: 1, timestamp: '2022-01-01 02:00:00.123456+00' }]); // 3. Select as raw query and checke that values are the same const result2 = await db.execute<{ id: number; timestamp_string: string; }>(sql`select * from ${table}`); // 3.1 Notice that postgres will return the date in UTC, but it is exactlt the same expect(result2.rows).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456+00' }]); await db.execute(sql`set time zone '${sql.raw(timezone.rows[0]!.TimeZone)}'`); await db.execute(sql`drop table if exists ${table}`); }); test('test mode string for timestamp with timezone in different timezone', async () => { // get current timezone from db const timezone = await db.execute<{ TimeZone: string }>(sql`show timezone`); // set timezone to HST (UTC - 10) await db.execute(sql`set time zone '-10'`); const table = pgTable('all_columns', { id: serial('id').primaryKey(), timestamp: timestamp('timestamp_string', { mode: 'string', withTimezone: true, precision: 6 }).notNull(), }); await db.execute(sql`drop table if exists ${table}`); await db.execute(sql` create table ${table} ( id serial primary key, timestamp_string timestamp(6) with time zone not null ) `); const timestampString = '2022-01-01 00:00:00.123456-1000'; // 1. Insert date in string format with timezone in it await db.insert(table).values([ { timestamp: timestampString }, ]); // 2. Select date in string format and check that the values are the same const result = await db.select().from(table); expect(result).toEqual([{ id: 1, timestamp: '2022-01-01 00:00:00.123456-10' }]); // 3. Select as raw query and checke that values are the same const result2 = await db.execute<{ id: number; timestamp_string: string; }>(sql`select * from ${table}`); expect(result2.rows).toEqual([{ id: 1, timestamp_string: '2022-01-01 00:00:00.123456-10' }]); await db.execute(sql`set time zone '${sql.raw(timezone.rows[0]!.TimeZone)}'`); await db.execute(sql`drop table if exists ${table}`); }); skipTests([ 'migrator : default migration strategy', 'migrator : migrate with custom schema', 'migrator : migrate with custom table', 'migrator : migrate with custom table and custom schema', 'insert via db.execute + select via db.execute', 'insert via db.execute + returning', 'insert via db.execute w/ query builder', 'all date and time columns without timezone first case mode string', 'all date and time columns without timezone third case mode date', 'test mode string for timestamp with timezone', 'test mode date for timestamp with timezone', 'test mode string for timestamp with timezone in UTC timezone', 'test mode string for timestamp with timezone in different timezone', ]); tests(); cacheTests(); beforeEach(async () => { await db.execute(sql`drop schema if exists public cascade`); await db.execute(sql`create schema public`); await db.execute( sql` create table users ( id serial primary key, name text not null, verified boolean not null default false, jsonb jsonb, created_at timestamptz not null default now() ) `, ); }); test('insert via db.execute + select via db.execute', async () => { await db.execute( sql`insert into ${usersTable} (${sql.identifier(usersTable.name.name)}) values (${'John'})`, ); const result = await db.execute<{ id: number; name: string }>( sql`select id, name from "users"`, ); expect(result.rows).toEqual([{ id: 1, name: 'John' }]); }); test('insert via db.execute + returning', async () => { const inserted = await db.execute<{ id: number; name: string }>( sql`insert into ${usersTable} (${ sql.identifier( usersTable.name.name, ) }) values (${'John'}) returning ${usersTable.id}, ${usersTable.name}`, ); expect(inserted.rows).toEqual([{ id: 1, name: 'John' }]); }); test('insert via db.execute w/ query builder', async () => { const inserted = await db.execute>( db .insert(usersTable) .values({ name: 'John' }) .returning({ id: usersTable.id, name: usersTable.name }), ); expect(inserted.rows).toEqual([{ id: 1, name: 'John' }]); }); ================================================ FILE: integration-tests/tests/pg/pg-common-cache.ts ================================================ import type Docker from 'dockerode'; import { eq, getTableName, is, sql, Table } from 'drizzle-orm'; import type { MutationOption } from 'drizzle-orm/cache/core'; import { Cache } from 'drizzle-orm/cache/core'; import type { CacheConfig } from 'drizzle-orm/cache/core/types'; import type { PgDatabase, PgQueryResultHKT } from 'drizzle-orm/pg-core'; import { alias, boolean, integer, jsonb, pgTable, serial, text, timestamp } from 'drizzle-orm/pg-core'; import Keyv from 'keyv'; import { afterAll, beforeEach, describe, expect, test, vi } from 'vitest'; // eslint-disable-next-line drizzle-internal/require-entity-kind export class TestGlobalCache extends Cache { private globalTtl: number = 1000; private usedTablesPerKey: Record = {}; constructor(private kv: Keyv = new Keyv()) { super(); } override strategy(): 'explicit' | 'all' { return 'all'; } override async get(key: string, _tables: string[], _isTag: boolean): Promise { const res = await this.kv.get(key) ?? undefined; return res; } override async put( key: string, response: any, tables: string[], isTag: boolean, config?: CacheConfig, ): Promise { await this.kv.set(key, response, config ? config.ex : this.globalTtl); for (const table of tables) { const keys = this.usedTablesPerKey[table]; if (keys === undefined) { this.usedTablesPerKey[table] = [key]; } else { keys.push(key); } } } override async onMutate(params: MutationOption): Promise { const tagsArray = params.tags ? Array.isArray(params.tags) ? params.tags : [params.tags] : []; const tablesArray = params.tables ? Array.isArray(params.tables) ? params.tables : [params.tables] : []; const keysToDelete = new Set(); for (const table of tablesArray) { const tableName = is(table, Table) ? getTableName(table) : table as string; const keys = this.usedTablesPerKey[tableName] ?? []; for (const key of keys) keysToDelete.add(key); } if (keysToDelete.size > 0 || tagsArray.length > 0) { for (const tag of tagsArray) { await this.kv.delete(tag); } for (const key of keysToDelete) { await this.kv.delete(key); for (const table of tablesArray) { const tableName = is(table, Table) ? getTableName(table) : table as string; this.usedTablesPerKey[tableName] = []; } } } } } // eslint-disable-next-line drizzle-internal/require-entity-kind export class TestCache extends TestGlobalCache { override strategy(): 'explicit' | 'all' { return 'explicit'; } } declare module 'vitest' { interface TestContext { cachedPg: { db: PgDatabase; dbGlobalCached: PgDatabase; }; } } const usersTable = pgTable('users', { id: serial().primaryKey(), name: text().notNull(), verified: boolean().notNull().default(false), jsonb: jsonb().$type(), createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(), }); const postsTable = pgTable('posts', { id: serial().primaryKey(), description: text().notNull(), userId: integer('city_id').references(() => usersTable.id), }); let pgContainer: Docker.Container; afterAll(async () => { await pgContainer?.stop().catch(console.error); }); export function tests() { describe('common', () => { beforeEach(async (ctx) => { const { db, dbGlobalCached } = ctx.cachedPg; await db.execute(sql`drop schema if exists public cascade`); await db.$cache?.invalidate({ tables: 'users' }); await dbGlobalCached.$cache?.invalidate({ tables: 'users' }); await db.execute(sql`create schema public`); // public users await db.execute( sql` create table users ( id serial primary key, name text not null, verified boolean not null default false, jsonb jsonb, created_at timestamptz not null default now() ) `, ); }); test('test force invalidate', async (ctx) => { const { db } = ctx.cachedPg; const spyInvalidate = vi.spyOn(db.$cache, 'invalidate'); await db.$cache?.invalidate({ tables: 'users' }); expect(spyInvalidate).toHaveBeenCalledTimes(1); }); test('default global config - no cache should be hit', async (ctx) => { const { db } = ctx.cachedPg; // @ts-expect-error const spyPut = vi.spyOn(db.$cache, 'put'); // @ts-expect-error const spyGet = vi.spyOn(db.$cache, 'get'); // @ts-expect-error const spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); await db.select().from(usersTable); expect(spyPut).toHaveBeenCalledTimes(0); expect(spyGet).toHaveBeenCalledTimes(0); expect(spyInvalidate).toHaveBeenCalledTimes(0); }); test('default global config + enable cache on select: get, put', async (ctx) => { const { db } = ctx.cachedPg; // @ts-expect-error const spyPut = vi.spyOn(db.$cache, 'put'); // @ts-expect-error const spyGet = vi.spyOn(db.$cache, 'get'); // @ts-expect-error const spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); await db.select().from(usersTable).$withCache(); expect(spyPut).toHaveBeenCalledTimes(1); expect(spyGet).toHaveBeenCalledTimes(1); expect(spyInvalidate).toHaveBeenCalledTimes(0); }); test('default global config + enable cache on select + write: get, put, onMutate', async (ctx) => { const { db } = ctx.cachedPg; // @ts-expect-error const spyPut = vi.spyOn(db.$cache, 'put'); // @ts-expect-error const spyGet = vi.spyOn(db.$cache, 'get'); // @ts-expect-error const spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); await db.select().from(usersTable).$withCache({ config: { ex: 1 } }); expect(spyPut).toHaveBeenCalledTimes(1); expect(spyGet).toHaveBeenCalledTimes(1); expect(spyInvalidate).toHaveBeenCalledTimes(0); spyPut.mockClear(); spyGet.mockClear(); spyInvalidate.mockClear(); await db.insert(usersTable).values({ name: 'John' }); expect(spyPut).toHaveBeenCalledTimes(0); expect(spyGet).toHaveBeenCalledTimes(0); expect(spyInvalidate).toHaveBeenCalledTimes(1); }); test('default global config + enable cache on select + disable invalidate: get, put', async (ctx) => { const { db } = ctx.cachedPg; // @ts-expect-error const spyPut = vi.spyOn(db.$cache, 'put'); // @ts-expect-error const spyGet = vi.spyOn(db.$cache, 'get'); // @ts-expect-error const spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); await db.select().from(usersTable).$withCache({ tag: 'custom', autoInvalidate: false, config: { ex: 1 } }); expect(spyPut).toHaveBeenCalledTimes(1); expect(spyGet).toHaveBeenCalledTimes(1); expect(spyInvalidate).toHaveBeenCalledTimes(0); await db.insert(usersTable).values({ name: 'John' }); // invalidate force await db.$cache?.invalidate({ tags: ['custom'] }); }); test('global: true + disable cache', async (ctx) => { const { dbGlobalCached: db } = ctx.cachedPg; // @ts-expect-error const spyPut = vi.spyOn(db.$cache, 'put'); // @ts-expect-error const spyGet = vi.spyOn(db.$cache, 'get'); // @ts-expect-error const spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); await db.select().from(usersTable).$withCache(false); expect(spyPut).toHaveBeenCalledTimes(0); expect(spyGet).toHaveBeenCalledTimes(0); expect(spyInvalidate).toHaveBeenCalledTimes(0); }); test('global: true - cache should be hit', async (ctx) => { const { dbGlobalCached: db } = ctx.cachedPg; // @ts-expect-error const spyPut = vi.spyOn(db.$cache, 'put'); // @ts-expect-error const spyGet = vi.spyOn(db.$cache, 'get'); // @ts-expect-error const spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); await db.select().from(usersTable); expect(spyPut).toHaveBeenCalledTimes(1); expect(spyGet).toHaveBeenCalledTimes(1); expect(spyInvalidate).toHaveBeenCalledTimes(0); }); test('global: true - cache: false on select - no cache hit', async (ctx) => { const { dbGlobalCached: db } = ctx.cachedPg; // @ts-expect-error const spyPut = vi.spyOn(db.$cache, 'put'); // @ts-expect-error const spyGet = vi.spyOn(db.$cache, 'get'); // @ts-expect-error const spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); await db.select().from(usersTable).$withCache(false); expect(spyPut).toHaveBeenCalledTimes(0); expect(spyGet).toHaveBeenCalledTimes(0); expect(spyInvalidate).toHaveBeenCalledTimes(0); }); test('global: true - disable invalidate - cache hit + no invalidate', async (ctx) => { const { dbGlobalCached: db } = ctx.cachedPg; // @ts-expect-error const spyPut = vi.spyOn(db.$cache, 'put'); // @ts-expect-error const spyGet = vi.spyOn(db.$cache, 'get'); // @ts-expect-error const spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); await db.select().from(usersTable).$withCache({ autoInvalidate: false }); expect(spyPut).toHaveBeenCalledTimes(1); expect(spyGet).toHaveBeenCalledTimes(1); expect(spyInvalidate).toHaveBeenCalledTimes(0); spyPut.mockClear(); spyGet.mockClear(); spyInvalidate.mockClear(); await db.insert(usersTable).values({ name: 'John' }); expect(spyPut).toHaveBeenCalledTimes(0); expect(spyGet).toHaveBeenCalledTimes(0); expect(spyInvalidate).toHaveBeenCalledTimes(1); }); test('global: true - with custom tag', async (ctx) => { const { dbGlobalCached: db } = ctx.cachedPg; // @ts-expect-error const spyPut = vi.spyOn(db.$cache, 'put'); // @ts-expect-error const spyGet = vi.spyOn(db.$cache, 'get'); // @ts-expect-error const spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); await db.select().from(usersTable).$withCache({ tag: 'custom', autoInvalidate: false }); expect(spyPut).toHaveBeenCalledTimes(1); expect(spyGet).toHaveBeenCalledTimes(1); expect(spyInvalidate).toHaveBeenCalledTimes(0); await db.insert(usersTable).values({ name: 'John' }); // invalidate force await db.$cache?.invalidate({ tags: ['custom'] }); }); test('global: true - with custom tag + with autoinvalidate', async (ctx) => { const { dbGlobalCached: db } = ctx.cachedPg; // @ts-expect-error const spyPut = vi.spyOn(db.$cache, 'put'); // @ts-expect-error const spyGet = vi.spyOn(db.$cache, 'get'); // @ts-expect-error const spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); await db.select().from(usersTable).$withCache({ tag: 'custom' }); expect(spyPut).toHaveBeenCalledTimes(1); expect(spyGet).toHaveBeenCalledTimes(1); await db.insert(usersTable).values({ name: 'John' }); expect(spyInvalidate).toHaveBeenCalledTimes(1); // invalidate force await db.$cache?.invalidate({ tags: ['custom'] }); }); // check select used tables test('check simple select used tables', (ctx) => { const { db } = ctx.cachedPg; // @ts-expect-error expect(db.select().from(usersTable).getUsedTables()).toStrictEqual(['users']); // @ts-expect-error expect(db.select().from(sql`${usersTable}`).getUsedTables()).toStrictEqual(['users']); }); // check select+join used tables test('select+join', (ctx) => { const { db } = ctx.cachedPg; // @ts-expect-error expect(db.select().from(usersTable).leftJoin(postsTable, eq(usersTable.id, postsTable.userId)).getUsedTables()) .toStrictEqual(['users', 'posts']); expect( // @ts-expect-error db.select().from(sql`${usersTable}`).leftJoin(postsTable, eq(usersTable.id, postsTable.userId)).getUsedTables(), ).toStrictEqual(['users', 'posts']); }); // check select+2join used tables test('select+2joins', (ctx) => { const { db } = ctx.cachedPg; expect( db.select().from(usersTable).leftJoin( postsTable, eq(usersTable.id, postsTable.userId), ).leftJoin( alias(postsTable, 'post2'), eq(usersTable.id, postsTable.userId), ) // @ts-expect-error .getUsedTables(), ) .toStrictEqual(['users', 'posts']); expect( db.select().from(sql`${usersTable}`).leftJoin(postsTable, eq(usersTable.id, postsTable.userId)).leftJoin( alias(postsTable, 'post2'), eq(usersTable.id, postsTable.userId), // @ts-expect-error ).getUsedTables(), ).toStrictEqual(['users', 'posts']); }); // select subquery used tables test('select+join', (ctx) => { const { db } = ctx.cachedPg; const sq = db.select().from(usersTable).where(eq(usersTable.id, 42)).as('sq'); db.select().from(sq); // @ts-expect-error expect(db.select().from(sq).getUsedTables()).toStrictEqual(['users']); }); }); } ================================================ FILE: integration-tests/tests/pg/pg-common.ts ================================================ import Docker from 'dockerode'; // eslint-disable-next-line @typescript-eslint/consistent-type-imports import { and, arrayContained, arrayContains, arrayOverlaps, asc, avg, avgDistinct, count, countDistinct, eq, Equal, exists, getTableColumns, gt, gte, ilike, inArray, is, like, lt, max, min, not, notInArray, or, SQL, sql, SQLWrapper, sum, sumDistinct, TransactionRollbackError, } from 'drizzle-orm'; import { authenticatedRole, crudPolicy, usersSync } from 'drizzle-orm/neon'; import type { NeonHttpDatabase } from 'drizzle-orm/neon-http'; import type { PgColumn, PgDatabase, PgQueryResultHKT } from 'drizzle-orm/pg-core'; import { alias, bigint, bigserial, boolean, char, cidr, date, doublePrecision, except, exceptAll, foreignKey, getMaterializedViewConfig, getTableConfig, getViewConfig, index, inet, integer, intersect, intersectAll, interval, json, jsonb, line, macaddr, macaddr8, numeric, PgDialect, pgEnum, pgMaterializedView, PgPolicy, pgPolicy, pgSchema, pgTable, pgTableCreator, pgView, point, primaryKey, real, serial, smallint, smallserial, text, time, timestamp, union, unionAll, unique, uniqueKeyName, uuid, uuid as pgUuid, varchar, } from 'drizzle-orm/pg-core'; import getPort from 'get-port'; import { v4 as uuidV4 } from 'uuid'; import { afterAll, afterEach, beforeEach, describe, expect, expectTypeOf, test } from 'vitest'; import { Expect } from '~/utils'; import type { schema } from './neon-http-batch.test'; // eslint-disable-next-line @typescript-eslint/no-import-type-side-effects // import { type NodePgDatabase } from 'drizzle-orm/node-postgres'; declare module 'vitest' { interface TestContext { pg: { db: PgDatabase; }; neonPg: { db: NeonHttpDatabase; }; } } const en = pgEnum('en', ['enVal1', 'enVal2']); const allTypesTable = pgTable('all_types', { serial: serial('serial'), bigserial53: bigserial('bigserial53', { mode: 'number', }), bigserial64: bigserial('bigserial64', { mode: 'bigint', }), int: integer('int'), bigint53: bigint('bigint53', { mode: 'number', }), bigint64: bigint('bigint64', { mode: 'bigint', }), bool: boolean('bool'), char: char('char'), cidr: cidr('cidr'), date: date('date', { mode: 'date', }), dateStr: date('date_str', { mode: 'string', }), double: doublePrecision('double'), enum: en('enum'), inet: inet('inet'), interval: interval('interval'), json: json('json'), jsonb: jsonb('jsonb'), line: line('line', { mode: 'abc', }), lineTuple: line('line_tuple', { mode: 'tuple', }), macaddr: macaddr('macaddr'), macaddr8: macaddr8('macaddr8'), numeric: numeric('numeric'), numericNum: numeric('numeric_num', { mode: 'number', }), numericBig: numeric('numeric_big', { mode: 'bigint', }), point: point('point', { mode: 'xy', }), pointTuple: point('point_tuple', { mode: 'tuple', }), real: real('real'), smallint: smallint('smallint'), smallserial: smallserial('smallserial'), text: text('text'), time: time('time'), timestamp: timestamp('timestamp', { mode: 'date', }), timestampTz: timestamp('timestamp_tz', { mode: 'date', withTimezone: true, }), timestampStr: timestamp('timestamp_str', { mode: 'string', }), timestampTzStr: timestamp('timestamp_tz_str', { mode: 'string', withTimezone: true, }), uuid: uuid('uuid'), varchar: varchar('varchar'), arrint: integer('arrint').array(), arrbigint53: bigint('arrbigint53', { mode: 'number', }).array(), arrbigint64: bigint('arrbigint64', { mode: 'bigint', }).array(), arrbool: boolean('arrbool').array(), arrchar: char('arrchar').array(), arrcidr: cidr('arrcidr').array(), arrdate: date('arrdate', { mode: 'date', }).array(), arrdateStr: date('arrdate_str', { mode: 'string', }).array(), arrdouble: doublePrecision('arrdouble').array(), arrenum: en('arrenum').array(), arrinet: inet('arrinet').array(), arrinterval: interval('arrinterval').array(), arrjson: json('arrjson').array(), arrjsonb: jsonb('arrjsonb').array(), arrline: line('arrline', { mode: 'abc', }).array(), arrlineTuple: line('arrline_tuple', { mode: 'tuple', }).array(), arrmacaddr: macaddr('arrmacaddr').array(), arrmacaddr8: macaddr8('arrmacaddr8').array(), arrnumeric: numeric('arrnumeric').array(), arrnumericNum: numeric('arrnumeric_num', { mode: 'number', }).array(), arrnumericBig: numeric('arrnumeric_big', { mode: 'bigint', }).array(), arrpoint: point('arrpoint', { mode: 'xy', }).array(), arrpointTuple: point('arrpoint_tuple', { mode: 'tuple', }).array(), arrreal: real('arrreal').array(), arrsmallint: smallint('arrsmallint').array(), arrtext: text('arrtext').array(), arrtime: time('arrtime').array(), arrtimestamp: timestamp('arrtimestamp', { mode: 'date', }).array(), arrtimestampTz: timestamp('arrtimestamp_tz', { mode: 'date', withTimezone: true, }).array(), arrtimestampStr: timestamp('arrtimestamp_str', { mode: 'string', }).array(), arrtimestampTzStr: timestamp('arrtimestamp_tz_str', { mode: 'string', withTimezone: true, }).array(), arruuid: uuid('arruuid').array(), arrvarchar: varchar('arrvarchar').array(), }); export const usersTable = pgTable('users', { id: serial('id' as string).primaryKey(), name: text('name').notNull(), verified: boolean('verified').notNull().default(false), jsonb: jsonb('jsonb').$type(), createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(), }); const usersOnUpdate = pgTable('users_on_update', { id: serial('id').primaryKey(), name: text('name').notNull(), updateCounter: integer('update_counter').default(sql`1`).$onUpdateFn(() => sql`update_counter + 1`), updatedAt: timestamp('updated_at', { mode: 'date', precision: 3 }).$onUpdate(() => new Date()), alwaysNull: text('always_null').$type().$onUpdate(() => null), // uppercaseName: text('uppercase_name').$onUpdateFn(() => sql`upper(name)`), looks like this is not supported in pg }); const citiesTable = pgTable('cities', { id: serial('id').primaryKey(), name: text('name').notNull(), state: char('state', { length: 2 }), }); const cities2Table = pgTable('cities', { id: serial('id').primaryKey(), name: text('name').notNull(), }); const users2Table = pgTable('users2', { id: serial('id').primaryKey(), name: text('name').notNull(), cityId: integer('city_id').references(() => citiesTable.id), }); const coursesTable = pgTable('courses', { id: serial('id').primaryKey(), name: text('name').notNull(), categoryId: integer('category_id').references(() => courseCategoriesTable.id), }); const courseCategoriesTable = pgTable('course_categories', { id: serial('id').primaryKey(), name: text('name').notNull(), }); const orders = pgTable('orders', { id: serial('id').primaryKey(), region: text('region').notNull(), product: text('product').notNull().$default(() => 'random_string'), amount: integer('amount').notNull(), quantity: integer('quantity').notNull(), }); const network = pgTable('network_table', { inet: inet('inet').notNull(), cidr: cidr('cidr').notNull(), macaddr: macaddr('macaddr').notNull(), macaddr8: macaddr8('macaddr8').notNull(), }); const salEmp = pgTable('sal_emp', { name: text('name'), payByQuarter: integer('pay_by_quarter').array(), schedule: text('schedule').array().array(), }); const _tictactoe = pgTable('tictactoe', { squares: integer('squares').array(3).array(3), }); export const usersMigratorTable = pgTable('users12', { id: serial('id').primaryKey(), name: text('name').notNull(), email: text('email').notNull(), }); // To test aggregate functions const aggregateTable = pgTable('aggregate_table', { id: serial('id').notNull(), name: text('name').notNull(), a: integer('a'), b: integer('b'), c: integer('c'), nullOnly: integer('null_only'), }); // To test another schema and multischema export const mySchema = pgSchema('mySchema'); export const usersMySchemaTable = mySchema.table('users', { id: serial('id').primaryKey(), name: text('name').notNull(), verified: boolean('verified').notNull().default(false), jsonb: jsonb('jsonb').$type(), createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(), }); const citiesMySchemaTable = mySchema.table('cities', { id: serial('id').primaryKey(), name: text('name').notNull(), state: char('state', { length: 2 }), }); const users2MySchemaTable = mySchema.table('users2', { id: serial('id').primaryKey(), name: text('name').notNull(), cityId: integer('city_id').references(() => citiesTable.id), }); const jsonTestTable = pgTable('jsontest', { id: serial('id').primaryKey(), json: json('json').$type<{ string: string; number: number }>(), jsonb: jsonb('jsonb').$type<{ string: string; number: number }>(), }); let pgContainer: Docker.Container; export async function createDockerDB(): Promise<{ connectionString: string; container: Docker.Container }> { const docker = new Docker(); const port = await getPort({ port: 5432 }); const image = 'postgres:14'; const pullStream = await docker.pull(image); await new Promise((resolve, reject) => docker.modem.followProgress(pullStream, (err) => (err ? reject(err) : resolve(err))) ); pgContainer = await docker.createContainer({ Image: image, Env: ['POSTGRES_PASSWORD=postgres', 'POSTGRES_USER=postgres', 'POSTGRES_DB=postgres'], name: `drizzle-integration-tests-${uuidV4()}`, HostConfig: { AutoRemove: true, PortBindings: { '5432/tcp': [{ HostPort: `${port}` }], }, }, }); await pgContainer.start(); return { connectionString: `postgres://postgres:postgres@localhost:${port}/postgres`, container: pgContainer }; } afterAll(async () => { await pgContainer?.stop().catch(console.error); }); export function tests() { describe('common', () => { beforeEach(async (ctx) => { const { db } = ctx.pg; await db.execute(sql`drop schema if exists public cascade`); await db.execute(sql`drop schema if exists ${mySchema} cascade`); await db.execute(sql`create schema public`); await db.execute(sql`create schema if not exists custom_migrations`); await db.execute(sql`create schema ${mySchema}`); // public users await db.execute( sql` create table users ( id serial primary key, name text not null, verified boolean not null default false, jsonb jsonb, created_at timestamptz not null default now() ) `, ); // public cities await db.execute( sql` create table cities ( id serial primary key, name text not null, state char(2) ) `, ); // public users2 await db.execute( sql` create table users2 ( id serial primary key, name text not null, city_id integer references cities(id) ) `, ); await db.execute( sql` create table course_categories ( id serial primary key, name text not null ) `, ); await db.execute( sql` create table courses ( id serial primary key, name text not null, category_id integer references course_categories(id) ) `, ); await db.execute( sql` create table orders ( id serial primary key, region text not null, product text not null, amount integer not null, quantity integer not null ) `, ); await db.execute( sql` create table network_table ( inet inet not null, cidr cidr not null, macaddr macaddr not null, macaddr8 macaddr8 not null ) `, ); await db.execute( sql` create table sal_emp ( name text not null, pay_by_quarter integer[] not null, schedule text[][] not null ) `, ); await db.execute( sql` create table tictactoe ( squares integer[3][3] not null ) `, ); // // mySchema users await db.execute( sql` create table ${usersMySchemaTable} ( id serial primary key, name text not null, verified boolean not null default false, jsonb jsonb, created_at timestamptz not null default now() ) `, ); // mySchema cities await db.execute( sql` create table ${citiesMySchemaTable} ( id serial primary key, name text not null, state char(2) ) `, ); // mySchema users2 await db.execute( sql` create table ${users2MySchemaTable} ( id serial primary key, name text not null, city_id integer references "mySchema".cities(id) ) `, ); await db.execute( sql` create table jsontest ( id serial primary key, json json, jsonb jsonb ) `, ); }); afterEach(async (ctx) => { const { db } = ctx.pg; await db.execute(sql`drop schema if exists custom_migrations cascade`); }); async function setupSetOperationTest(db: PgDatabase) { await db.execute(sql`drop table if exists users2`); await db.execute(sql`drop table if exists cities`); await db.execute( sql` create table cities ( id serial primary key, name text not null ) `, ); await db.execute( sql` create table users2 ( id serial primary key, name text not null, city_id integer references cities(id) ) `, ); await db.insert(cities2Table).values([ { id: 1, name: 'New York' }, { id: 2, name: 'London' }, { id: 3, name: 'Tampa' }, ]); await db.insert(users2Table).values([ { id: 1, name: 'John', cityId: 1 }, { id: 2, name: 'Jane', cityId: 2 }, { id: 3, name: 'Jack', cityId: 3 }, { id: 4, name: 'Peter', cityId: 3 }, { id: 5, name: 'Ben', cityId: 2 }, { id: 6, name: 'Jill', cityId: 1 }, { id: 7, name: 'Mary', cityId: 2 }, { id: 8, name: 'Sally', cityId: 1 }, ]); } async function setupAggregateFunctionsTest(db: PgDatabase) { await db.execute(sql`drop table if exists "aggregate_table"`); await db.execute( sql` create table "aggregate_table" ( "id" serial not null, "name" text not null, "a" integer, "b" integer, "c" integer, "null_only" integer ); `, ); await db.insert(aggregateTable).values([ { name: 'value 1', a: 5, b: 10, c: 20 }, { name: 'value 1', a: 5, b: 20, c: 30 }, { name: 'value 2', a: 10, b: 50, c: 60 }, { name: 'value 3', a: 20, b: 20, c: null }, { name: 'value 4', a: null, b: 90, c: 120 }, { name: 'value 5', a: 80, b: 10, c: null }, { name: 'value 6', a: null, b: null, c: 150 }, ]); } test('table configs: unique third param', async () => { const cities1Table = pgTable('cities1', { id: serial('id').primaryKey(), name: text('name').notNull(), state: char('state', { length: 2 }), }, (t) => ({ f: unique('custom_name').on(t.name, t.state).nullsNotDistinct(), f1: unique('custom_name1').on(t.name, t.state), })); const tableConfig = getTableConfig(cities1Table); expect(tableConfig.uniqueConstraints).toHaveLength(2); expect(tableConfig.uniqueConstraints[0]?.name).toBe('custom_name'); expect(tableConfig.uniqueConstraints[0]?.nullsNotDistinct).toBe(true); expect(tableConfig.uniqueConstraints[0]?.columns.map((t) => t.name)).toEqual(['name', 'state']); expect(tableConfig.uniqueConstraints[1]?.name).toBe('custom_name1'); expect(tableConfig.uniqueConstraints[1]?.nullsNotDistinct).toBe(false); expect(tableConfig.uniqueConstraints[1]?.columns.map((t) => t.name)).toEqual(['name', 'state']); }); test('table configs: unique in column', async () => { const cities1Table = pgTable('cities1', { id: serial('id').primaryKey(), name: text('name').notNull().unique(), state: char('state', { length: 2 }).unique('custom'), field: char('field', { length: 2 }).unique('custom_field', { nulls: 'not distinct' }), }); const tableConfig = getTableConfig(cities1Table); const columnName = tableConfig.columns.find((it) => it.name === 'name'); expect(columnName?.uniqueName).toBe(uniqueKeyName(cities1Table, [columnName!.name])); expect(columnName?.isUnique).toBe(true); const columnState = tableConfig.columns.find((it) => it.name === 'state'); expect(columnState?.uniqueName).toBe('custom'); expect(columnState?.isUnique).toBe(true); const columnField = tableConfig.columns.find((it) => it.name === 'field'); expect(columnField?.uniqueName).toBe('custom_field'); expect(columnField?.isUnique).toBe(true); expect(columnField?.uniqueType).toBe('not distinct'); }); test('table config: foreign keys name', async () => { const table = pgTable('cities', { id: serial('id').primaryKey(), name: text('name').notNull(), state: text('state'), }, (t) => ({ f: foreignKey({ foreignColumns: [t.id], columns: [t.id], name: 'custom_fk' }), })); const tableConfig = getTableConfig(table); expect(tableConfig.foreignKeys).toHaveLength(1); expect(tableConfig.foreignKeys[0]!.getName()).toBe('custom_fk'); }); test('table config: primary keys name', async () => { const table = pgTable('cities', { id: serial('id').primaryKey(), name: text('name').notNull(), state: text('state'), }, (t) => ({ f: primaryKey({ columns: [t.id, t.name], name: 'custom_pk' }), })); const tableConfig = getTableConfig(table); expect(tableConfig.primaryKeys).toHaveLength(1); expect(tableConfig.primaryKeys[0]!.getName()).toBe('custom_pk'); }); test('select all fields', async (ctx) => { const { db } = ctx.pg; const now = Date.now(); await db.insert(usersTable).values({ name: 'John' }); const result = await db.select().from(usersTable); expect(result[0]!.createdAt).toBeInstanceOf(Date); expect(Math.abs(result[0]!.createdAt.getTime() - now)).toBeLessThan(300); expect(result).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); }); test('select sql', async (ctx) => { const { db } = ctx.pg; await db.insert(usersTable).values({ name: 'John' }); const users = await db .select({ name: sql`upper(${usersTable.name})`, }) .from(usersTable); expect(users).toEqual([{ name: 'JOHN' }]); }); test('select typed sql', async (ctx) => { const { db } = ctx.pg; await db.insert(usersTable).values({ name: 'John' }); const users = await db.select({ name: sql`upper(${usersTable.name})`, }).from(usersTable); expect(users).toEqual([{ name: 'JOHN' }]); }); test('select with empty array in inArray', async (ctx) => { const { db } = ctx.pg; await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); const result = await db .select({ name: sql`upper(${usersTable.name})`, }) .from(usersTable) .where(inArray(usersTable.id, [])); expect(result).toEqual([]); }); test('select with empty array in notInArray', async (ctx) => { const { db } = ctx.pg; await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); const result = await db .select({ name: sql`upper(${usersTable.name})`, }) .from(usersTable) .where(notInArray(usersTable.id, [])); expect(result).toEqual([{ name: 'JOHN' }, { name: 'JANE' }, { name: 'JANE' }]); }); test('$default function', async (ctx) => { const { db } = ctx.pg; const insertedOrder = await db.insert(orders).values({ id: 1, region: 'Ukraine', amount: 1, quantity: 1 }) .returning(); const selectedOrder = await db.select().from(orders); expect(insertedOrder).toEqual([{ id: 1, amount: 1, quantity: 1, region: 'Ukraine', product: 'random_string', }]); expect(selectedOrder).toEqual([{ id: 1, amount: 1, quantity: 1, region: 'Ukraine', product: 'random_string', }]); }); test('select distinct', async (ctx) => { const { db } = ctx.pg; const usersDistinctTable = pgTable('users_distinct', { id: integer('id').notNull(), name: text('name').notNull(), age: integer('age').notNull(), }); await db.execute(sql`drop table if exists ${usersDistinctTable}`); await db.execute(sql`create table ${usersDistinctTable} (id integer, name text, age integer)`); await db.insert(usersDistinctTable).values([ { id: 1, name: 'John', age: 24 }, { id: 1, name: 'John', age: 24 }, { id: 2, name: 'John', age: 25 }, { id: 1, name: 'Jane', age: 24 }, { id: 1, name: 'Jane', age: 26 }, ]); const users1 = await db.selectDistinct().from(usersDistinctTable).orderBy( usersDistinctTable.id, usersDistinctTable.name, ); const users2 = await db.selectDistinctOn([usersDistinctTable.id]).from(usersDistinctTable).orderBy( usersDistinctTable.id, ); const users3 = await db.selectDistinctOn([usersDistinctTable.name], { name: usersDistinctTable.name }).from( usersDistinctTable, ).orderBy(usersDistinctTable.name); const users4 = await db.selectDistinctOn([usersDistinctTable.id, usersDistinctTable.age]).from( usersDistinctTable, ).orderBy(usersDistinctTable.id, usersDistinctTable.age); await db.execute(sql`drop table ${usersDistinctTable}`); expect(users1).toEqual([ { id: 1, name: 'Jane', age: 24 }, { id: 1, name: 'Jane', age: 26 }, { id: 1, name: 'John', age: 24 }, { id: 2, name: 'John', age: 25 }, ]); expect(users2).toHaveLength(2); expect(users2[0]?.id).toBe(1); expect(users2[1]?.id).toBe(2); expect(users3).toHaveLength(2); expect(users3[0]?.name).toBe('Jane'); expect(users3[1]?.name).toBe('John'); expect(users4).toEqual([ { id: 1, name: 'John', age: 24 }, { id: 1, name: 'Jane', age: 26 }, { id: 2, name: 'John', age: 25 }, ]); }); test('insert returning sql', async (ctx) => { const { db } = ctx.pg; const users = await db .insert(usersTable) .values({ name: 'John' }) .returning({ name: sql`upper(${usersTable.name})`, }); expect(users).toEqual([{ name: 'JOHN' }]); }); test('delete returning sql', async (ctx) => { const { db } = ctx.pg; await db.insert(usersTable).values({ name: 'John' }); const users = await db .delete(usersTable) .where(eq(usersTable.name, 'John')) .returning({ name: sql`upper(${usersTable.name})`, }); expect(users).toEqual([{ name: 'JOHN' }]); }); test('update returning sql', async (ctx) => { const { db } = ctx.pg; await db.insert(usersTable).values({ name: 'John' }); const users = await db .update(usersTable) .set({ name: 'Jane' }) .where(eq(usersTable.name, 'John')) .returning({ name: sql`upper(${usersTable.name})`, }); expect(users).toEqual([{ name: 'JANE' }]); }); test('update with returning all fields', async (ctx) => { const { db } = ctx.pg; const now = Date.now(); await db.insert(usersTable).values({ name: 'John' }); const users = await db .update(usersTable) .set({ name: 'Jane' }) .where(eq(usersTable.name, 'John')) .returning(); expect(users[0]!.createdAt).toBeInstanceOf(Date); expect(Math.abs(users[0]!.createdAt.getTime() - now)).toBeLessThan(300); expect(users).toEqual([ { id: 1, name: 'Jane', verified: false, jsonb: null, createdAt: users[0]!.createdAt }, ]); }); test('update with returning partial', async (ctx) => { const { db } = ctx.pg; await db.insert(usersTable).values({ name: 'John' }); const users = await db .update(usersTable) .set({ name: 'Jane' }) .where(eq(usersTable.name, 'John')) .returning({ id: usersTable.id, name: usersTable.name, }); expect(users).toEqual([{ id: 1, name: 'Jane' }]); }); test('delete with returning all fields', async (ctx) => { const { db } = ctx.pg; const now = Date.now(); await db.insert(usersTable).values({ name: 'John' }); const users = await db.delete(usersTable).where(eq(usersTable.name, 'John')).returning(); expect(users[0]!.createdAt).toBeInstanceOf(Date); expect(Math.abs(users[0]!.createdAt.getTime() - now)).toBeLessThan(300); expect(users).toEqual([ { id: 1, name: 'John', verified: false, jsonb: null, createdAt: users[0]!.createdAt }, ]); }); test('delete with returning partial', async (ctx) => { const { db } = ctx.pg; await db.insert(usersTable).values({ name: 'John' }); const users = await db.delete(usersTable).where(eq(usersTable.name, 'John')).returning({ id: usersTable.id, name: usersTable.name, }); expect(users).toEqual([{ id: 1, name: 'John' }]); }); test('insert + select', async (ctx) => { const { db } = ctx.pg; await db.insert(usersTable).values({ name: 'John' }); const result = await db.select().from(usersTable); expect(result).toEqual([ { id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }, ]); await db.insert(usersTable).values({ name: 'Jane' }); const result2 = await db.select().from(usersTable); expect(result2).toEqual([ { id: 1, name: 'John', verified: false, jsonb: null, createdAt: result2[0]!.createdAt }, { id: 2, name: 'Jane', verified: false, jsonb: null, createdAt: result2[1]!.createdAt }, ]); }); test('json insert', async (ctx) => { const { db } = ctx.pg; await db.insert(usersTable).values({ name: 'John', jsonb: ['foo', 'bar'] }); const result = await db .select({ id: usersTable.id, name: usersTable.name, jsonb: usersTable.jsonb, }) .from(usersTable); expect(result).toEqual([{ id: 1, name: 'John', jsonb: ['foo', 'bar'] }]); }); test('char insert', async (ctx) => { const { db } = ctx.pg; await db.insert(citiesTable).values({ name: 'Austin', state: 'TX' }); const result = await db .select({ id: citiesTable.id, name: citiesTable.name, state: citiesTable.state }) .from(citiesTable); expect(result).toEqual([{ id: 1, name: 'Austin', state: 'TX' }]); }); test('char update', async (ctx) => { const { db } = ctx.pg; await db.insert(citiesTable).values({ name: 'Austin', state: 'TX' }); await db.update(citiesTable).set({ name: 'Atlanta', state: 'GA' }).where(eq(citiesTable.id, 1)); const result = await db .select({ id: citiesTable.id, name: citiesTable.name, state: citiesTable.state }) .from(citiesTable); expect(result).toEqual([{ id: 1, name: 'Atlanta', state: 'GA' }]); }); test('char delete', async (ctx) => { const { db } = ctx.pg; await db.insert(citiesTable).values({ name: 'Austin', state: 'TX' }); await db.delete(citiesTable).where(eq(citiesTable.state, 'TX')); const result = await db .select({ id: citiesTable.id, name: citiesTable.name, state: citiesTable.state }) .from(citiesTable); expect(result).toEqual([]); }); test('insert with overridden default values', async (ctx) => { const { db } = ctx.pg; await db.insert(usersTable).values({ name: 'John', verified: true }); const result = await db.select().from(usersTable); expect(result).toEqual([ { id: 1, name: 'John', verified: true, jsonb: null, createdAt: result[0]!.createdAt }, ]); }); test('insert many', async (ctx) => { const { db } = ctx.pg; await db .insert(usersTable) .values([ { name: 'John' }, { name: 'Bruce', jsonb: ['foo', 'bar'] }, { name: 'Jane' }, { name: 'Austin', verified: true }, ]); const result = await db .select({ id: usersTable.id, name: usersTable.name, jsonb: usersTable.jsonb, verified: usersTable.verified, }) .from(usersTable); expect(result).toEqual([ { id: 1, name: 'John', jsonb: null, verified: false }, { id: 2, name: 'Bruce', jsonb: ['foo', 'bar'], verified: false }, { id: 3, name: 'Jane', jsonb: null, verified: false }, { id: 4, name: 'Austin', jsonb: null, verified: true }, ]); }); test('insert many with returning', async (ctx) => { const { db } = ctx.pg; const result = await db .insert(usersTable) .values([ { name: 'John' }, { name: 'Bruce', jsonb: ['foo', 'bar'] }, { name: 'Jane' }, { name: 'Austin', verified: true }, ]) .returning({ id: usersTable.id, name: usersTable.name, jsonb: usersTable.jsonb, verified: usersTable.verified, }); expect(result).toEqual([ { id: 1, name: 'John', jsonb: null, verified: false }, { id: 2, name: 'Bruce', jsonb: ['foo', 'bar'], verified: false }, { id: 3, name: 'Jane', jsonb: null, verified: false }, { id: 4, name: 'Austin', jsonb: null, verified: true }, ]); }); test('select with group by as field', async (ctx) => { const { db } = ctx.pg; await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); const result = await db .select({ name: usersTable.name }) .from(usersTable) .groupBy(usersTable.name); expect(result).toEqual([{ name: 'Jane' }, { name: 'John' }]); }); test('select with exists', async (ctx) => { const { db } = ctx.pg; await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); const user = alias(usersTable, 'user'); const result = await db.select({ name: usersTable.name }).from(usersTable).where( exists( db.select({ one: sql`1` }).from(user).where(and(eq(usersTable.name, 'John'), eq(user.id, usersTable.id))), ), ); expect(result).toEqual([{ name: 'John' }]); }); test('select with group by as sql', async (ctx) => { const { db } = ctx.pg; await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); const result = await db .select({ name: usersTable.name }) .from(usersTable) .groupBy(sql`${usersTable.name}`); expect(result).toEqual([{ name: 'Jane' }, { name: 'John' }]); }); test('select with group by as sql + column', async (ctx) => { const { db } = ctx.pg; await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); const result = await db .select({ name: usersTable.name }) .from(usersTable) .groupBy(sql`${usersTable.name}`, usersTable.id); expect(result).toEqual([{ name: 'Jane' }, { name: 'Jane' }, { name: 'John' }]); }); test('select with group by as column + sql', async (ctx) => { const { db } = ctx.pg; await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); const result = await db .select({ name: usersTable.name }) .from(usersTable) .groupBy(usersTable.id, sql`${usersTable.name}`); expect(result).toEqual([{ name: 'Jane' }, { name: 'Jane' }, { name: 'John' }]); }); test('select with group by complex query', async (ctx) => { const { db } = ctx.pg; await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); const result = await db .select({ name: usersTable.name }) .from(usersTable) .groupBy(usersTable.id, sql`${usersTable.name}`) .orderBy(asc(usersTable.name)) .limit(1); expect(result).toEqual([{ name: 'Jane' }]); }); test('build query', async (ctx) => { const { db } = ctx.pg; const query = db .select({ id: usersTable.id, name: usersTable.name }) .from(usersTable) .groupBy(usersTable.id, usersTable.name) .toSQL(); expect(query).toEqual({ sql: 'select "id", "name" from "users" group by "users"."id", "users"."name"', params: [], }); }); test('insert sql', async (ctx) => { const { db } = ctx.pg; await db.insert(usersTable).values({ name: sql`${'John'}` }); const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable); expect(result).toEqual([{ id: 1, name: 'John' }]); }); test('partial join with alias', async (ctx) => { const { db } = ctx.pg; const customerAlias = alias(usersTable, 'customer'); await db.insert(usersTable).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); const result = await db .select({ user: { id: usersTable.id, name: usersTable.name, }, customer: { id: customerAlias.id, name: customerAlias.name, }, }) .from(usersTable) .leftJoin(customerAlias, eq(customerAlias.id, 11)) .where(eq(usersTable.id, 10)); expect(result).toEqual([ { user: { id: 10, name: 'Ivan' }, customer: { id: 11, name: 'Hans' }, }, ]); }); test('full join with alias', async (ctx) => { const { db } = ctx.pg; const pgTable = pgTableCreator((name) => `prefixed_${name}`); const users = pgTable('users', { id: serial('id').primaryKey(), name: text('name').notNull(), }); await db.execute(sql`drop table if exists ${users}`); await db.execute(sql`create table ${users} (id serial primary key, name text not null)`); const customers = alias(users, 'customer'); await db.insert(users).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); const result = await db .select() .from(users) .leftJoin(customers, eq(customers.id, 11)) .where(eq(users.id, 10)); expect(result).toEqual([{ users: { id: 10, name: 'Ivan', }, customer: { id: 11, name: 'Hans', }, }]); await db.execute(sql`drop table ${users}`); }); test('select from alias', async (ctx) => { const { db } = ctx.pg; const pgTable = pgTableCreator((name) => `prefixed_${name}`); const users = pgTable('users', { id: serial('id').primaryKey(), name: text('name').notNull(), }); await db.execute(sql`drop table if exists ${users}`); await db.execute(sql`create table ${users} (id serial primary key, name text not null)`); const user = alias(users, 'user'); const customers = alias(users, 'customer'); await db.insert(users).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); const result = await db .select() .from(user) .leftJoin(customers, eq(customers.id, 11)) .where(eq(user.id, 10)); expect(result).toEqual([{ user: { id: 10, name: 'Ivan', }, customer: { id: 11, name: 'Hans', }, }]); await db.execute(sql`drop table ${users}`); }); test('insert with spaces', async (ctx) => { const { db } = ctx.pg; await db.insert(usersTable).values({ name: sql`'Jo h n'` }); const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable); expect(result).toEqual([{ id: 1, name: 'Jo h n' }]); }); test('prepared statement', async (ctx) => { const { db } = ctx.pg; await db.insert(usersTable).values({ name: 'John' }); const statement = db .select({ id: usersTable.id, name: usersTable.name, }) .from(usersTable) .prepare('statement1'); const result = await statement.execute(); expect(result).toEqual([{ id: 1, name: 'John' }]); }); test('insert: placeholders on columns with encoder', async (ctx) => { const { db } = ctx.pg; const statement = db.insert(usersTable).values({ name: 'John', jsonb: sql.placeholder('jsonb'), }).prepare('encoder_statement'); await statement.execute({ jsonb: ['foo', 'bar'] }); const result = await db .select({ id: usersTable.id, jsonb: usersTable.jsonb, }) .from(usersTable); expect(result).toEqual([ { id: 1, jsonb: ['foo', 'bar'] }, ]); }); test('prepared statement reuse', async (ctx) => { const { db } = ctx.pg; const stmt = db .insert(usersTable) .values({ verified: true, name: sql.placeholder('name'), }) .prepare('stmt2'); for (let i = 0; i < 10; i++) { await stmt.execute({ name: `John ${i}` }); } const result = await db .select({ id: usersTable.id, name: usersTable.name, verified: usersTable.verified, }) .from(usersTable); expect(result).toEqual([ { id: 1, name: 'John 0', verified: true }, { id: 2, name: 'John 1', verified: true }, { id: 3, name: 'John 2', verified: true }, { id: 4, name: 'John 3', verified: true }, { id: 5, name: 'John 4', verified: true }, { id: 6, name: 'John 5', verified: true }, { id: 7, name: 'John 6', verified: true }, { id: 8, name: 'John 7', verified: true }, { id: 9, name: 'John 8', verified: true }, { id: 10, name: 'John 9', verified: true }, ]); }); test('prepared statement with placeholder in .where', async (ctx) => { const { db } = ctx.pg; await db.insert(usersTable).values({ name: 'John' }); const stmt = db .select({ id: usersTable.id, name: usersTable.name, }) .from(usersTable) .where(eq(usersTable.id, sql.placeholder('id'))) .prepare('stmt3'); const result = await stmt.execute({ id: 1 }); expect(result).toEqual([{ id: 1, name: 'John' }]); }); test('prepared statement with placeholder in .limit', async (ctx) => { const { db } = ctx.pg; await db.insert(usersTable).values({ name: 'John' }); const stmt = db .select({ id: usersTable.id, name: usersTable.name, }) .from(usersTable) .where(eq(usersTable.id, sql.placeholder('id'))) .limit(sql.placeholder('limit')) .prepare('stmt_limit'); const result = await stmt.execute({ id: 1, limit: 1 }); expect(result).toEqual([{ id: 1, name: 'John' }]); expect(result).toHaveLength(1); }); test('prepared statement with placeholder in .offset', async (ctx) => { const { db } = ctx.pg; await db.insert(usersTable).values([{ name: 'John' }, { name: 'John1' }]); const stmt = db .select({ id: usersTable.id, name: usersTable.name, }) .from(usersTable) .offset(sql.placeholder('offset')) .prepare('stmt_offset'); const result = await stmt.execute({ offset: 1 }); expect(result).toEqual([{ id: 2, name: 'John1' }]); }); test('prepared statement built using $dynamic', async (ctx) => { const { db } = ctx.pg; function withLimitOffset(qb: any) { return qb.limit(sql.placeholder('limit')).offset(sql.placeholder('offset')); } await db.insert(usersTable).values([{ name: 'John' }, { name: 'John1' }]); const stmt = db .select({ id: usersTable.id, name: usersTable.name, }) .from(usersTable) .$dynamic(); withLimitOffset(stmt).prepare('stmt_limit'); const result = await stmt.execute({ limit: 1, offset: 1 }); expect(result).toEqual([{ id: 2, name: 'John1' }]); expect(result).toHaveLength(1); }); // TODO change tests to new structure test('Query check: Insert all defaults in 1 row', async (ctx) => { const { db } = ctx.pg; const users = pgTable('users', { id: serial('id').primaryKey(), name: text('name').default('Dan'), state: text('state'), }); const query = db .insert(users) .values({}) .toSQL(); expect(query).toEqual({ sql: 'insert into "users" ("id", "name", "state") values (default, default, default)', params: [], }); }); test('Query check: Insert all defaults in multiple rows', async (ctx) => { const { db } = ctx.pg; const users = pgTable('users', { id: serial('id').primaryKey(), name: text('name').default('Dan'), state: text('state').default('UA'), }); const query = db .insert(users) .values([{}, {}]) .toSQL(); expect(query).toEqual({ sql: 'insert into "users" ("id", "name", "state") values (default, default, default), (default, default, default)', params: [], }); }); test('Insert all defaults in 1 row', async (ctx) => { const { db } = ctx.pg; const users = pgTable('empty_insert_single', { id: serial('id').primaryKey(), name: text('name').default('Dan'), state: text('state'), }); await db.execute(sql`drop table if exists ${users}`); await db.execute( sql`create table ${users} (id serial primary key, name text default 'Dan', state text)`, ); await db.insert(users).values({}); const res = await db.select().from(users); expect(res).toEqual([{ id: 1, name: 'Dan', state: null }]); }); test('Insert all defaults in multiple rows', async (ctx) => { const { db } = ctx.pg; const users = pgTable('empty_insert_multiple', { id: serial('id').primaryKey(), name: text('name').default('Dan'), state: text('state'), }); await db.execute(sql`drop table if exists ${users}`); await db.execute( sql`create table ${users} (id serial primary key, name text default 'Dan', state text)`, ); await db.insert(users).values([{}, {}]); const res = await db.select().from(users); expect(res).toEqual([{ id: 1, name: 'Dan', state: null }, { id: 2, name: 'Dan', state: null }]); }); test('build query insert with onConflict do update', async (ctx) => { const { db } = ctx.pg; const query = db .insert(usersTable) .values({ name: 'John', jsonb: ['foo', 'bar'] }) .onConflictDoUpdate({ target: usersTable.id, set: { name: 'John1' } }) .toSQL(); expect(query).toEqual({ sql: 'insert into "users" ("id", "name", "verified", "jsonb", "created_at") values (default, $1, default, $2, default) on conflict ("id") do update set "name" = $3', params: ['John', '["foo","bar"]', 'John1'], }); }); test('build query insert with onConflict do update / multiple columns', async (ctx) => { const { db } = ctx.pg; const query = db .insert(usersTable) .values({ name: 'John', jsonb: ['foo', 'bar'] }) .onConflictDoUpdate({ target: [usersTable.id, usersTable.name], set: { name: 'John1' } }) .toSQL(); expect(query).toEqual({ sql: 'insert into "users" ("id", "name", "verified", "jsonb", "created_at") values (default, $1, default, $2, default) on conflict ("id","name") do update set "name" = $3', params: ['John', '["foo","bar"]', 'John1'], }); }); test('build query insert with onConflict do nothing', async (ctx) => { const { db } = ctx.pg; const query = db .insert(usersTable) .values({ name: 'John', jsonb: ['foo', 'bar'] }) .onConflictDoNothing() .toSQL(); expect(query).toEqual({ sql: 'insert into "users" ("id", "name", "verified", "jsonb", "created_at") values (default, $1, default, $2, default) on conflict do nothing', params: ['John', '["foo","bar"]'], }); }); test('build query insert with onConflict do nothing + target', async (ctx) => { const { db } = ctx.pg; const query = db .insert(usersTable) .values({ name: 'John', jsonb: ['foo', 'bar'] }) .onConflictDoNothing({ target: usersTable.id }) .toSQL(); expect(query).toEqual({ sql: 'insert into "users" ("id", "name", "verified", "jsonb", "created_at") values (default, $1, default, $2, default) on conflict ("id") do nothing', params: ['John', '["foo","bar"]'], }); }); test('insert with onConflict do update', async (ctx) => { const { db } = ctx.pg; await db.insert(usersTable).values({ name: 'John' }); await db .insert(usersTable) .values({ id: 1, name: 'John' }) .onConflictDoUpdate({ target: usersTable.id, set: { name: 'John1' } }); const res = await db .select({ id: usersTable.id, name: usersTable.name }) .from(usersTable) .where(eq(usersTable.id, 1)); expect(res).toEqual([{ id: 1, name: 'John1' }]); }); test('insert with onConflict do nothing', async (ctx) => { const { db } = ctx.pg; await db.insert(usersTable).values({ name: 'John' }); await db.insert(usersTable).values({ id: 1, name: 'John' }).onConflictDoNothing(); const res = await db .select({ id: usersTable.id, name: usersTable.name }) .from(usersTable) .where(eq(usersTable.id, 1)); expect(res).toEqual([{ id: 1, name: 'John' }]); }); test('insert with onConflict do nothing + target', async (ctx) => { const { db } = ctx.pg; await db.insert(usersTable).values({ name: 'John' }); await db .insert(usersTable) .values({ id: 1, name: 'John' }) .onConflictDoNothing({ target: usersTable.id }); const res = await db .select({ id: usersTable.id, name: usersTable.name }) .from(usersTable) .where(eq(usersTable.id, 1)); expect(res).toEqual([{ id: 1, name: 'John' }]); }); test('left join (flat object fields)', async (ctx) => { const { db } = ctx.pg; const { id: cityId } = await db .insert(citiesTable) .values([{ name: 'Paris' }, { name: 'London' }]) .returning({ id: citiesTable.id }) .then((rows) => rows[0]!); await db.insert(users2Table).values([{ name: 'John', cityId }, { name: 'Jane' }]); const res = await db .select({ userId: users2Table.id, userName: users2Table.name, cityId: citiesTable.id, cityName: citiesTable.name, }) .from(users2Table) .leftJoin(citiesTable, eq(users2Table.cityId, citiesTable.id)); expect(res).toEqual([ { userId: 1, userName: 'John', cityId, cityName: 'Paris' }, { userId: 2, userName: 'Jane', cityId: null, cityName: null }, ]); }); test('left join (grouped fields)', async (ctx) => { const { db } = ctx.pg; const { id: cityId } = await db .insert(citiesTable) .values([{ name: 'Paris' }, { name: 'London' }]) .returning({ id: citiesTable.id }) .then((rows) => rows[0]!); await db.insert(users2Table).values([{ name: 'John', cityId }, { name: 'Jane' }]); const res = await db .select({ id: users2Table.id, user: { name: users2Table.name, nameUpper: sql`upper(${users2Table.name})`, }, city: { id: citiesTable.id, name: citiesTable.name, nameUpper: sql`upper(${citiesTable.name})`, }, }) .from(users2Table) .leftJoin(citiesTable, eq(users2Table.cityId, citiesTable.id)); expect(res).toEqual([ { id: 1, user: { name: 'John', nameUpper: 'JOHN' }, city: { id: cityId, name: 'Paris', nameUpper: 'PARIS' }, }, { id: 2, user: { name: 'Jane', nameUpper: 'JANE' }, city: null, }, ]); }); test('left join (all fields)', async (ctx) => { const { db } = ctx.pg; const { id: cityId } = await db .insert(citiesTable) .values([{ name: 'Paris' }, { name: 'London' }]) .returning({ id: citiesTable.id }) .then((rows) => rows[0]!); await db.insert(users2Table).values([{ name: 'John', cityId }, { name: 'Jane' }]); const res = await db .select() .from(users2Table) .leftJoin(citiesTable, eq(users2Table.cityId, citiesTable.id)); expect(res).toEqual([ { users2: { id: 1, name: 'John', cityId, }, cities: { id: cityId, name: 'Paris', state: null, }, }, { users2: { id: 2, name: 'Jane', cityId: null, }, cities: null, }, ]); }); test('select from a many subquery', async (ctx) => { const { db } = ctx.pg; await db.insert(citiesTable) .values([{ name: 'Paris' }, { name: 'London' }]); await db.insert(users2Table).values([ { name: 'John', cityId: 1 }, { name: 'Jane', cityId: 2 }, { name: 'Jack', cityId: 2 }, ]); const res = await db.select({ population: db.select({ count: count().as('count') }).from(users2Table).where( eq(users2Table.cityId, citiesTable.id), ).as( 'population', ), name: citiesTable.name, }).from(citiesTable); expectTypeOf(res).toEqualTypeOf<{ population: number; name: string; }[]>(); expect(res).toStrictEqual([{ population: 1, name: 'Paris', }, { population: 2, name: 'London', }]); }); test('select from a one subquery', async (ctx) => { const { db } = ctx.pg; await db.insert(citiesTable) .values([{ name: 'Paris' }, { name: 'London' }]); await db.insert(users2Table).values([ { name: 'John', cityId: 1 }, { name: 'Jane', cityId: 2 }, { name: 'Jack', cityId: 2 }, ]); const res = await db.select({ cityName: db.select({ name: citiesTable.name }).from(citiesTable).where(eq(users2Table.cityId, citiesTable.id)) .as( 'cityName', ), name: users2Table.name, }).from(users2Table); expectTypeOf(res).toEqualTypeOf<{ cityName: string; name: string; }[]>(); expect(res).toStrictEqual([{ cityName: 'Paris', name: 'John', }, { cityName: 'London', name: 'Jane', }, { cityName: 'London', name: 'Jack', }]); }); test('join subquery', async (ctx) => { const { db } = ctx.pg; await db .insert(courseCategoriesTable) .values([ { name: 'Category 1' }, { name: 'Category 2' }, { name: 'Category 3' }, { name: 'Category 4' }, ]); await db .insert(coursesTable) .values([ { name: 'Development', categoryId: 2 }, { name: 'IT & Software', categoryId: 3 }, { name: 'Marketing', categoryId: 4 }, { name: 'Design', categoryId: 1 }, ]); const sq2 = db .select({ categoryId: courseCategoriesTable.id, category: courseCategoriesTable.name, total: sql`count(${courseCategoriesTable.id})`, }) .from(courseCategoriesTable) .groupBy(courseCategoriesTable.id, courseCategoriesTable.name) .as('sq2'); const res = await db .select({ courseName: coursesTable.name, categoryId: sq2.categoryId, }) .from(coursesTable) .leftJoin(sq2, eq(coursesTable.categoryId, sq2.categoryId)) .orderBy(coursesTable.name); expect(res).toEqual([ { courseName: 'Design', categoryId: 1 }, { courseName: 'Development', categoryId: 2 }, { courseName: 'IT & Software', categoryId: 3 }, { courseName: 'Marketing', categoryId: 4 }, ]); }); test('with ... select', async (ctx) => { const { db } = ctx.pg; await db.insert(orders).values([ { region: 'Europe', product: 'A', amount: 10, quantity: 1 }, { region: 'Europe', product: 'A', amount: 20, quantity: 2 }, { region: 'Europe', product: 'B', amount: 20, quantity: 2 }, { region: 'Europe', product: 'B', amount: 30, quantity: 3 }, { region: 'US', product: 'A', amount: 30, quantity: 3 }, { region: 'US', product: 'A', amount: 40, quantity: 4 }, { region: 'US', product: 'B', amount: 40, quantity: 4 }, { region: 'US', product: 'B', amount: 50, quantity: 5 }, ]); const regionalSales = db .$with('regional_sales') .as( db .select({ region: orders.region, totalSales: sql`sum(${orders.amount})`.as('total_sales'), }) .from(orders) .groupBy(orders.region), ); const topRegions = db .$with('top_regions') .as( db .select({ region: regionalSales.region, }) .from(regionalSales) .where( gt( regionalSales.totalSales, db.select({ sales: sql`sum(${regionalSales.totalSales})/10` }).from(regionalSales), ), ), ); const result1 = await db .with(regionalSales, topRegions) .select({ region: orders.region, product: orders.product, productUnits: sql`sum(${orders.quantity})::int`, productSales: sql`sum(${orders.amount})::int`, }) .from(orders) .where(inArray(orders.region, db.select({ region: topRegions.region }).from(topRegions))) .groupBy(orders.region, orders.product) .orderBy(orders.region, orders.product); const result2 = await db .with(regionalSales, topRegions) .selectDistinct({ region: orders.region, product: orders.product, productUnits: sql`sum(${orders.quantity})::int`, productSales: sql`sum(${orders.amount})::int`, }) .from(orders) .where(inArray(orders.region, db.select({ region: topRegions.region }).from(topRegions))) .groupBy(orders.region, orders.product) .orderBy(orders.region, orders.product); const result3 = await db .with(regionalSales, topRegions) .selectDistinctOn([orders.region], { region: orders.region, productUnits: sql`sum(${orders.quantity})::int`, productSales: sql`sum(${orders.amount})::int`, }) .from(orders) .where(inArray(orders.region, db.select({ region: topRegions.region }).from(topRegions))) .groupBy(orders.region) .orderBy(orders.region); expect(result1).toEqual([ { region: 'Europe', product: 'A', productUnits: 3, productSales: 30, }, { region: 'Europe', product: 'B', productUnits: 5, productSales: 50, }, { region: 'US', product: 'A', productUnits: 7, productSales: 70, }, { region: 'US', product: 'B', productUnits: 9, productSales: 90, }, ]); expect(result2).toEqual(result1); expect(result3).toEqual([ { region: 'Europe', productUnits: 8, productSales: 80, }, { region: 'US', productUnits: 16, productSales: 160, }, ]); }); test('with ... update', async (ctx) => { const { db } = ctx.pg; const products = pgTable('products', { id: serial('id').primaryKey(), price: numeric('price').notNull(), cheap: boolean('cheap').notNull().default(false), }); await db.execute(sql`drop table if exists ${products}`); await db.execute(sql` create table ${products} ( id serial primary key, price numeric not null, cheap boolean not null default false ) `); await db.insert(products).values([ { price: '10.99' }, { price: '25.85' }, { price: '32.99' }, { price: '2.50' }, { price: '4.59' }, ]); const averagePrice = db .$with('average_price') .as( db .select({ value: sql`avg(${products.price})`.as('value'), }) .from(products), ); const result = await db .with(averagePrice) .update(products) .set({ cheap: true, }) .where(lt(products.price, sql`(select * from ${averagePrice})`)) .returning({ id: products.id, }); expect(result).toEqual([ { id: 1 }, { id: 4 }, { id: 5 }, ]); }); test('with ... insert', async (ctx) => { const { db } = ctx.pg; const users = pgTable('users', { username: text('username').notNull(), admin: boolean('admin').notNull(), }); await db.execute(sql`drop table if exists ${users}`); await db.execute(sql`create table ${users} (username text not null, admin boolean not null default false)`); const userCount = db .$with('user_count') .as( db .select({ value: sql`count(*)`.as('value'), }) .from(users), ); const result = await db .with(userCount) .insert(users) .values([ { username: 'user1', admin: sql`((select * from ${userCount}) = 0)` }, ]) .returning({ admin: users.admin, }); expect(result).toEqual([{ admin: true }]); }); test('with ... delete', async (ctx) => { const { db } = ctx.pg; await db.insert(orders).values([ { region: 'Europe', product: 'A', amount: 10, quantity: 1 }, { region: 'Europe', product: 'A', amount: 20, quantity: 2 }, { region: 'Europe', product: 'B', amount: 20, quantity: 2 }, { region: 'Europe', product: 'B', amount: 30, quantity: 3 }, { region: 'US', product: 'A', amount: 30, quantity: 3 }, { region: 'US', product: 'A', amount: 40, quantity: 4 }, { region: 'US', product: 'B', amount: 40, quantity: 4 }, { region: 'US', product: 'B', amount: 50, quantity: 5 }, ]); const averageAmount = db .$with('average_amount') .as( db .select({ value: sql`avg(${orders.amount})`.as('value'), }) .from(orders), ); const result = await db .with(averageAmount) .delete(orders) .where(gt(orders.amount, sql`(select * from ${averageAmount})`)) .returning({ id: orders.id, }); expect(result).toEqual([ { id: 6 }, { id: 7 }, { id: 8 }, ]); }); test('select from subquery sql', async (ctx) => { const { db } = ctx.pg; await db.insert(users2Table).values([{ name: 'John' }, { name: 'Jane' }]); const sq = db .select({ name: sql`${users2Table.name} || ' modified'`.as('name') }) .from(users2Table) .as('sq'); const res = await db.select({ name: sq.name }).from(sq); expect(res).toEqual([{ name: 'John modified' }, { name: 'Jane modified' }]); }); test('select a field without joining its table', (ctx) => { const { db } = ctx.pg; expect(() => db.select({ name: users2Table.name }).from(usersTable).prepare('query')).toThrowError(); }); test('select all fields from subquery without alias', (ctx) => { const { db } = ctx.pg; const sq = db.$with('sq').as(db.select({ name: sql`upper(${users2Table.name})` }).from(users2Table)); expect(() => db.select().from(sq).prepare('query')).toThrowError(); }); test('select count()', async (ctx) => { const { db } = ctx.pg; await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }]); const res = await db.select({ count: sql`count(*)` }).from(usersTable); expect(res).toEqual([{ count: '2' }]); }); test('select count w/ custom mapper', async (ctx) => { const { db } = ctx.pg; function count(value: PgColumn | SQLWrapper): SQL; function count(value: PgColumn | SQLWrapper, alias: string): SQL.Aliased; function count(value: PgColumn | SQLWrapper, alias?: string): SQL | SQL.Aliased { const result = sql`count(${value})`.mapWith(Number); if (!alias) { return result; } return result.as(alias); } await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }]); const res = await db.select({ count: count(sql`*`) }).from(usersTable); expect(res).toEqual([{ count: 2 }]); }); test('network types', async (ctx) => { const { db } = ctx.pg; const value: typeof network.$inferSelect = { inet: '127.0.0.1', cidr: '192.168.100.128/25', macaddr: '08:00:2b:01:02:03', macaddr8: '08:00:2b:01:02:03:04:05', }; await db.insert(network).values(value); const res = await db.select().from(network); expect(res).toEqual([value]); }); test('array types', async (ctx) => { const { db } = ctx.pg; const values: typeof salEmp.$inferSelect[] = [ { name: 'John', payByQuarter: [10000, 10000, 10000, 10000], schedule: [['meeting', 'lunch'], ['training', 'presentation']], }, { name: 'Carol', payByQuarter: [20000, 25000, 25000, 25000], schedule: [['breakfast', 'consulting'], ['meeting', 'lunch']], }, ]; await db.insert(salEmp).values(values); const res = await db.select().from(salEmp); expect(res).toEqual(values); }); test('select for ...', (ctx) => { const { db } = ctx.pg; { const query = db .select() .from(users2Table) .for('update') .toSQL(); expect(query.sql).toMatch(/ for update$/); } { const query = db .select() .from(users2Table) .for('update', { of: [users2Table, coursesTable] }) .toSQL(); expect(query.sql).toMatch(/ for update of "users2", "courses"$/); } { const query = db .select() .from(users2Table) .for('no key update', { of: users2Table }) .toSQL(); expect(query.sql).toMatch(/for no key update of "users2"$/); } { const query = db .select() .from(users2Table) .for('no key update', { of: users2Table, skipLocked: true }) .toSQL(); expect(query.sql).toMatch(/ for no key update of "users2" skip locked$/); } { const query = db .select() .from(users2Table) .for('share', { of: users2Table, noWait: true }) .toSQL(); expect(query.sql).toMatch(/for share of "users2" nowait$/); } }); test('having', async (ctx) => { const { db } = ctx.pg; await db.insert(citiesTable).values([{ name: 'London' }, { name: 'Paris' }, { name: 'New York' }]); await db.insert(users2Table).values([{ name: 'John', cityId: 1 }, { name: 'Jane', cityId: 1 }, { name: 'Jack', cityId: 2, }]); const result = await db .select({ id: citiesTable.id, name: sql`upper(${citiesTable.name})`.as('upper_name'), usersCount: sql`count(${users2Table.id})::int`.as('users_count'), }) .from(citiesTable) .leftJoin(users2Table, eq(users2Table.cityId, citiesTable.id)) .where(({ name }) => sql`length(${name}) >= 3`) .groupBy(citiesTable.id) .having(({ usersCount }) => sql`${usersCount} > 0`) .orderBy(({ name }) => name); expect(result).toEqual([ { id: 1, name: 'LONDON', usersCount: 2, }, { id: 2, name: 'PARIS', usersCount: 1, }, ]); }); test('view', async (ctx) => { const { db } = ctx.pg; const newYorkers1 = pgView('new_yorkers') .as((qb) => qb.select().from(users2Table).where(eq(users2Table.cityId, 1))); const newYorkers2 = pgView('new_yorkers', { id: serial('id').primaryKey(), name: text('name').notNull(), cityId: integer('city_id').notNull(), }).as(sql`select * from ${users2Table} where ${eq(users2Table.cityId, 1)}`); const newYorkers3 = pgView('new_yorkers', { id: serial('id').primaryKey(), name: text('name').notNull(), cityId: integer('city_id').notNull(), }).existing(); await db.execute(sql`create view ${newYorkers1} as ${getViewConfig(newYorkers1).query}`); await db.insert(citiesTable).values([{ name: 'New York' }, { name: 'Paris' }]); await db.insert(users2Table).values([ { name: 'John', cityId: 1 }, { name: 'Jane', cityId: 1 }, { name: 'Jack', cityId: 2 }, ]); { const result = await db.select().from(newYorkers1); expect(result).toEqual([ { id: 1, name: 'John', cityId: 1 }, { id: 2, name: 'Jane', cityId: 1 }, ]); } { const result = await db.select().from(newYorkers2); expect(result).toEqual([ { id: 1, name: 'John', cityId: 1 }, { id: 2, name: 'Jane', cityId: 1 }, ]); } { const result = await db.select().from(newYorkers3); expect(result).toEqual([ { id: 1, name: 'John', cityId: 1 }, { id: 2, name: 'Jane', cityId: 1 }, ]); } { const result = await db.select({ name: newYorkers1.name }).from(newYorkers1); expect(result).toEqual([ { name: 'John' }, { name: 'Jane' }, ]); } await db.execute(sql`drop view ${newYorkers1}`); }); // NEXT test('materialized view', async (ctx) => { const { db } = ctx.pg; const newYorkers1 = pgMaterializedView('new_yorkers') .as((qb) => qb.select().from(users2Table).where(eq(users2Table.cityId, 1))); const newYorkers2 = pgMaterializedView('new_yorkers', { id: serial('id').primaryKey(), name: text('name').notNull(), cityId: integer('city_id').notNull(), }).as(sql`select * from ${users2Table} where ${eq(users2Table.cityId, 1)}`); const newYorkers3 = pgMaterializedView('new_yorkers', { id: serial('id').primaryKey(), name: text('name').notNull(), cityId: integer('city_id').notNull(), }).existing(); await db.execute(sql`create materialized view ${newYorkers1} as ${getMaterializedViewConfig(newYorkers1).query}`); await db.insert(citiesTable).values([{ name: 'New York' }, { name: 'Paris' }]); await db.insert(users2Table).values([ { name: 'John', cityId: 1 }, { name: 'Jane', cityId: 1 }, { name: 'Jack', cityId: 2 }, ]); { const result = await db.select().from(newYorkers1); expect(result).toEqual([]); } await db.refreshMaterializedView(newYorkers1); { const result = await db.select().from(newYorkers1); expect(result).toEqual([ { id: 1, name: 'John', cityId: 1 }, { id: 2, name: 'Jane', cityId: 1 }, ]); } { const result = await db.select().from(newYorkers2); expect(result).toEqual([ { id: 1, name: 'John', cityId: 1 }, { id: 2, name: 'Jane', cityId: 1 }, ]); } { const result = await db.select().from(newYorkers3); expect(result).toEqual([ { id: 1, name: 'John', cityId: 1 }, { id: 2, name: 'Jane', cityId: 1 }, ]); } { const result = await db.select({ name: newYorkers1.name }).from(newYorkers1); expect(result).toEqual([ { name: 'John' }, { name: 'Jane' }, ]); } await db.execute(sql`drop materialized view ${newYorkers1}`); }); test('select from existing view', async (ctx) => { const { db } = ctx.pg; const schema = pgSchema('test_schema'); const newYorkers = schema.view('new_yorkers', { id: integer('id').notNull(), }).existing(); await db.execute(sql`drop schema if exists ${schema} cascade`); await db.execute(sql`create schema ${schema}`); await db.execute(sql`create view ${newYorkers} as select id from ${usersTable}`); await db.insert(usersTable).values({ id: 100, name: 'John' }); const result = await db.select({ id: usersTable.id, }).from(usersTable).innerJoin(newYorkers, eq(newYorkers.id, usersTable.id)); expect(result).toEqual([{ id: 100 }]); }); // TODO: copy to SQLite and MySQL, add to docs test('select from raw sql', async (ctx) => { const { db } = ctx.pg; const result = await db.select({ id: sql`id`, name: sql`name`, }).from(sql`(select 1 as id, 'John' as name) as users`); Expect>; expect(result).toEqual([ { id: 1, name: 'John' }, ]); }); test('select from raw sql with joins', async (ctx) => { const { db } = ctx.pg; const result = await db .select({ id: sql`users.id`, name: sql`users.name`, userCity: sql`users.city`, cityName: sql`cities.name`, }) .from(sql`(select 1 as id, 'John' as name, 'New York' as city) as users`) .leftJoin(sql`(select 1 as id, 'Paris' as name) as cities`, sql`cities.id = users.id`); Expect>; expect(result).toEqual([ { id: 1, name: 'John', userCity: 'New York', cityName: 'Paris' }, ]); }); test('join on aliased sql from select', async (ctx) => { const { db } = ctx.pg; const result = await db .select({ userId: sql`users.id`.as('userId'), name: sql`users.name`, userCity: sql`users.city`, cityId: sql`cities.id`.as('cityId'), cityName: sql`cities.name`, }) .from(sql`(select 1 as id, 'John' as name, 'New York' as city) as users`) .leftJoin(sql`(select 1 as id, 'Paris' as name) as cities`, (cols) => eq(cols.cityId, cols.userId)); Expect< Equal<{ userId: number; name: string; userCity: string; cityId: number; cityName: string }[], typeof result> >; expect(result).toEqual([ { userId: 1, name: 'John', userCity: 'New York', cityId: 1, cityName: 'Paris' }, ]); }); test('join on aliased sql from with clause', async (ctx) => { const { db } = ctx.pg; const users = db.$with('users').as( db.select({ id: sql`id`.as('userId'), name: sql`name`.as('userName'), city: sql`city`.as('city'), }).from( sql`(select 1 as id, 'John' as name, 'New York' as city) as users`, ), ); const cities = db.$with('cities').as( db.select({ id: sql`id`.as('cityId'), name: sql`name`.as('cityName'), }).from( sql`(select 1 as id, 'Paris' as name) as cities`, ), ); const result = await db .with(users, cities) .select({ userId: users.id, name: users.name, userCity: users.city, cityId: cities.id, cityName: cities.name, }) .from(users) .leftJoin(cities, (cols) => eq(cols.cityId, cols.userId)); Expect< Equal<{ userId: number; name: string; userCity: string; cityId: number; cityName: string }[], typeof result> >; expect(result).toEqual([ { userId: 1, name: 'John', userCity: 'New York', cityId: 1, cityName: 'Paris' }, ]); }); test('prefixed table', async (ctx) => { const { db } = ctx.pg; const pgTable = pgTableCreator((name) => `myprefix_${name}`); const users = pgTable('test_prefixed_table_with_unique_name', { id: integer('id').primaryKey(), name: text('name').notNull(), }); await db.execute(sql`drop table if exists ${users}`); await db.execute( sql`create table myprefix_test_prefixed_table_with_unique_name (id integer not null primary key, name text not null)`, ); await db.insert(users).values({ id: 1, name: 'John' }); const result = await db.select().from(users); expect(result).toEqual([{ id: 1, name: 'John' }]); await db.execute(sql`drop table ${users}`); }); test('select from enum as ts enum', async (ctx) => { const { db } = ctx.pg; enum Muscle { abdominals = 'abdominals', hamstrings = 'hamstrings', adductors = 'adductors', quadriceps = 'quadriceps', biceps = 'biceps', shoulders = 'shoulders', chest = 'chest', middle_back = 'middle_back', calves = 'calves', glutes = 'glutes', lower_back = 'lower_back', lats = 'lats', triceps = 'triceps', traps = 'traps', forearms = 'forearms', neck = 'neck', abductors = 'abductors', } enum Force { isometric = 'isometric', isotonic = 'isotonic', isokinetic = 'isokinetic', } enum Level { beginner = 'beginner', intermediate = 'intermediate', advanced = 'advanced', } enum Mechanic { compound = 'compound', isolation = 'isolation', } enum Equipment { barbell = 'barbell', dumbbell = 'dumbbell', bodyweight = 'bodyweight', machine = 'machine', cable = 'cable', kettlebell = 'kettlebell', } enum Category { upper_body = 'upper_body', lower_body = 'lower_body', full_body = 'full_body', } const muscleEnum = pgEnum('muscle', Muscle); const forceEnum = pgEnum('force', Force); const levelEnum = pgEnum('level', Level); const mechanicEnum = pgEnum('mechanic', Mechanic); const equipmentEnum = pgEnum('equipment', Equipment); const categoryEnum = pgEnum('category', Category); const exercises = pgTable('exercises', { id: serial('id').primaryKey(), name: varchar('name').notNull(), force: forceEnum('force'), level: levelEnum('level'), mechanic: mechanicEnum('mechanic'), equipment: equipmentEnum('equipment'), instructions: text('instructions'), category: categoryEnum('category'), primaryMuscles: muscleEnum('primary_muscles').array(), secondaryMuscles: muscleEnum('secondary_muscles').array(), createdAt: timestamp('created_at').notNull().default(sql`now()`), updatedAt: timestamp('updated_at').notNull().default(sql`now()`), }); await db.execute(sql`drop table if exists ${exercises}`); await db.execute(sql`drop type if exists ${sql.identifier(muscleEnum.enumName)}`); await db.execute(sql`drop type if exists ${sql.identifier(forceEnum.enumName)}`); await db.execute(sql`drop type if exists ${sql.identifier(levelEnum.enumName)}`); await db.execute(sql`drop type if exists ${sql.identifier(mechanicEnum.enumName)}`); await db.execute(sql`drop type if exists ${sql.identifier(equipmentEnum.enumName)}`); await db.execute(sql`drop type if exists ${sql.identifier(categoryEnum.enumName)}`); await db.execute( sql`create type ${ sql.identifier(muscleEnum.enumName) } as enum ('abdominals', 'hamstrings', 'adductors', 'quadriceps', 'biceps', 'shoulders', 'chest', 'middle_back', 'calves', 'glutes', 'lower_back', 'lats', 'triceps', 'traps', 'forearms', 'neck', 'abductors')`, ); await db.execute( sql`create type ${sql.identifier(forceEnum.enumName)} as enum ('isometric', 'isotonic', 'isokinetic')`, ); await db.execute( sql`create type ${sql.identifier(levelEnum.enumName)} as enum ('beginner', 'intermediate', 'advanced')`, ); await db.execute(sql`create type ${sql.identifier(mechanicEnum.enumName)} as enum ('compound', 'isolation')`); await db.execute( sql`create type ${ sql.identifier(equipmentEnum.enumName) } as enum ('barbell', 'dumbbell', 'bodyweight', 'machine', 'cable', 'kettlebell')`, ); await db.execute( sql`create type ${sql.identifier(categoryEnum.enumName)} as enum ('upper_body', 'lower_body', 'full_body')`, ); await db.execute(sql` create table ${exercises} ( id serial primary key, name varchar not null, force force, level level, mechanic mechanic, equipment equipment, instructions text, category category, primary_muscles muscle[], secondary_muscles muscle[], created_at timestamp not null default now(), updated_at timestamp not null default now() ) `); await db.insert(exercises).values({ name: 'Bench Press', force: Force.isotonic, level: Level.beginner, mechanic: Mechanic.compound, equipment: Equipment.barbell, instructions: 'Lie on your back on a flat bench. Grasp the barbell with an overhand grip, slightly wider than shoulder width. Unrack the barbell and hold it over you with your arms locked. Lower the barbell to your chest. Press the barbell back to the starting position.', category: Category.upper_body, primaryMuscles: [Muscle.chest, Muscle.triceps], secondaryMuscles: [Muscle.shoulders, Muscle.traps], }); const result = await db.select().from(exercises); expect(result).toEqual([ { id: 1, name: 'Bench Press', force: 'isotonic', level: 'beginner', mechanic: 'compound', equipment: 'barbell', instructions: 'Lie on your back on a flat bench. Grasp the barbell with an overhand grip, slightly wider than shoulder width. Unrack the barbell and hold it over you with your arms locked. Lower the barbell to your chest. Press the barbell back to the starting position.', category: 'upper_body', primaryMuscles: ['chest', 'triceps'], secondaryMuscles: ['shoulders', 'traps'], createdAt: result[0]!.createdAt, updatedAt: result[0]!.updatedAt, }, ]); await db.execute(sql`drop table ${exercises}`); await db.execute(sql`drop type ${sql.identifier(muscleEnum.enumName)}`); await db.execute(sql`drop type ${sql.identifier(forceEnum.enumName)}`); await db.execute(sql`drop type ${sql.identifier(levelEnum.enumName)}`); await db.execute(sql`drop type ${sql.identifier(mechanicEnum.enumName)}`); await db.execute(sql`drop type ${sql.identifier(equipmentEnum.enumName)}`); await db.execute(sql`drop type ${sql.identifier(categoryEnum.enumName)}`); }); test('select from enum', async (ctx) => { const { db } = ctx.pg; const muscleEnum = pgEnum('muscle', [ 'abdominals', 'hamstrings', 'adductors', 'quadriceps', 'biceps', 'shoulders', 'chest', 'middle_back', 'calves', 'glutes', 'lower_back', 'lats', 'triceps', 'traps', 'forearms', 'neck', 'abductors', ]); const forceEnum = pgEnum('force', ['isometric', 'isotonic', 'isokinetic']); const levelEnum = pgEnum('level', ['beginner', 'intermediate', 'advanced']); const mechanicEnum = pgEnum('mechanic', ['compound', 'isolation']); const equipmentEnum = pgEnum('equipment', [ 'barbell', 'dumbbell', 'bodyweight', 'machine', 'cable', 'kettlebell', ]); const categoryEnum = pgEnum('category', ['upper_body', 'lower_body', 'full_body']); const exercises = pgTable('exercises', { id: serial('id').primaryKey(), name: varchar('name').notNull(), force: forceEnum('force'), level: levelEnum('level'), mechanic: mechanicEnum('mechanic'), equipment: equipmentEnum('equipment'), instructions: text('instructions'), category: categoryEnum('category'), primaryMuscles: muscleEnum('primary_muscles').array(), secondaryMuscles: muscleEnum('secondary_muscles').array(), createdAt: timestamp('created_at').notNull().default(sql`now()`), updatedAt: timestamp('updated_at').notNull().default(sql`now()`), }); await db.execute(sql`drop table if exists ${exercises}`); await db.execute(sql`drop type if exists ${sql.identifier(muscleEnum.enumName)}`); await db.execute(sql`drop type if exists ${sql.identifier(forceEnum.enumName)}`); await db.execute(sql`drop type if exists ${sql.identifier(levelEnum.enumName)}`); await db.execute(sql`drop type if exists ${sql.identifier(mechanicEnum.enumName)}`); await db.execute(sql`drop type if exists ${sql.identifier(equipmentEnum.enumName)}`); await db.execute(sql`drop type if exists ${sql.identifier(categoryEnum.enumName)}`); await db.execute( sql`create type ${ sql.identifier(muscleEnum.enumName) } as enum ('abdominals', 'hamstrings', 'adductors', 'quadriceps', 'biceps', 'shoulders', 'chest', 'middle_back', 'calves', 'glutes', 'lower_back', 'lats', 'triceps', 'traps', 'forearms', 'neck', 'abductors')`, ); await db.execute( sql`create type ${sql.identifier(forceEnum.enumName)} as enum ('isometric', 'isotonic', 'isokinetic')`, ); await db.execute( sql`create type ${sql.identifier(levelEnum.enumName)} as enum ('beginner', 'intermediate', 'advanced')`, ); await db.execute(sql`create type ${sql.identifier(mechanicEnum.enumName)} as enum ('compound', 'isolation')`); await db.execute( sql`create type ${ sql.identifier(equipmentEnum.enumName) } as enum ('barbell', 'dumbbell', 'bodyweight', 'machine', 'cable', 'kettlebell')`, ); await db.execute( sql`create type ${sql.identifier(categoryEnum.enumName)} as enum ('upper_body', 'lower_body', 'full_body')`, ); await db.execute(sql` create table ${exercises} ( id serial primary key, name varchar not null, force force, level level, mechanic mechanic, equipment equipment, instructions text, category category, primary_muscles muscle[], secondary_muscles muscle[], created_at timestamp not null default now(), updated_at timestamp not null default now() ) `); await db.insert(exercises).values({ name: 'Bench Press', force: 'isotonic', level: 'beginner', mechanic: 'compound', equipment: 'barbell', instructions: 'Lie on your back on a flat bench. Grasp the barbell with an overhand grip, slightly wider than shoulder width. Unrack the barbell and hold it over you with your arms locked. Lower the barbell to your chest. Press the barbell back to the starting position.', category: 'upper_body', primaryMuscles: ['chest', 'triceps'], secondaryMuscles: ['shoulders', 'traps'], }); const result = await db.select().from(exercises); expect(result).toEqual([ { id: 1, name: 'Bench Press', force: 'isotonic', level: 'beginner', mechanic: 'compound', equipment: 'barbell', instructions: 'Lie on your back on a flat bench. Grasp the barbell with an overhand grip, slightly wider than shoulder width. Unrack the barbell and hold it over you with your arms locked. Lower the barbell to your chest. Press the barbell back to the starting position.', category: 'upper_body', primaryMuscles: ['chest', 'triceps'], secondaryMuscles: ['shoulders', 'traps'], createdAt: result[0]!.createdAt, updatedAt: result[0]!.updatedAt, }, ]); await db.execute(sql`drop table ${exercises}`); await db.execute(sql`drop type ${sql.identifier(muscleEnum.enumName)}`); await db.execute(sql`drop type ${sql.identifier(forceEnum.enumName)}`); await db.execute(sql`drop type ${sql.identifier(levelEnum.enumName)}`); await db.execute(sql`drop type ${sql.identifier(mechanicEnum.enumName)}`); await db.execute(sql`drop type ${sql.identifier(equipmentEnum.enumName)}`); await db.execute(sql`drop type ${sql.identifier(categoryEnum.enumName)}`); }); test('all date and time columns', async (ctx) => { const { db } = ctx.pg; const table = pgTable('all_columns', { id: serial('id').primaryKey(), dateString: date('date_string', { mode: 'string' }).notNull(), time: time('time', { precision: 3 }).notNull(), datetime: timestamp('datetime').notNull(), datetimeWTZ: timestamp('datetime_wtz', { withTimezone: true }).notNull(), datetimeString: timestamp('datetime_string', { mode: 'string' }).notNull(), datetimeFullPrecision: timestamp('datetime_full_precision', { precision: 6, mode: 'string' }).notNull(), datetimeWTZString: timestamp('datetime_wtz_string', { withTimezone: true, mode: 'string' }).notNull(), interval: interval('interval').notNull(), }); await db.execute(sql`drop table if exists ${table}`); await db.execute(sql` create table ${table} ( id serial primary key, date_string date not null, time time(3) not null, datetime timestamp not null, datetime_wtz timestamp with time zone not null, datetime_string timestamp not null, datetime_full_precision timestamp(6) not null, datetime_wtz_string timestamp with time zone not null, interval interval not null ) `); const someDatetime = new Date('2022-01-01T00:00:00.123Z'); const fullPrecision = '2022-01-01T00:00:00.123456Z'; const someTime = '23:23:12.432'; await db.insert(table).values({ dateString: '2022-01-01', time: someTime, datetime: someDatetime, datetimeWTZ: someDatetime, datetimeString: '2022-01-01T00:00:00.123Z', datetimeFullPrecision: fullPrecision, datetimeWTZString: '2022-01-01T00:00:00.123Z', interval: '1 day', }); const result = await db.select().from(table); Expect< Equal<{ id: number; dateString: string; time: string; datetime: Date; datetimeWTZ: Date; datetimeString: string; datetimeFullPrecision: string; datetimeWTZString: string; interval: string; }[], typeof result> >; Expect< Equal<{ dateString: string; time: string; datetime: Date; datetimeWTZ: Date; datetimeString: string; datetimeFullPrecision: string; datetimeWTZString: string; interval: string; id?: number | undefined; }, typeof table.$inferInsert> >; expect(result).toEqual([ { id: 1, dateString: '2022-01-01', time: someTime, datetime: someDatetime, datetimeWTZ: someDatetime, datetimeString: '2022-01-01 00:00:00.123', datetimeFullPrecision: fullPrecision.replace('T', ' ').replace('Z', ''), datetimeWTZString: '2022-01-01 00:00:00.123+00', interval: '1 day', }, ]); await db.execute(sql`drop table if exists ${table}`); }); test('all date and time columns with timezone second case mode date', async (ctx) => { const { db } = ctx.pg; const table = pgTable('all_columns', { id: serial('id').primaryKey(), timestamp: timestamp('timestamp_string', { mode: 'date', withTimezone: true, precision: 3 }).notNull(), }); await db.execute(sql`drop table if exists ${table}`); await db.execute(sql` create table ${table} ( id serial primary key, timestamp_string timestamp(3) with time zone not null ) `); const insertedDate = new Date(); // 1. Insert date as new date await db.insert(table).values([ { timestamp: insertedDate }, ]); // 2, Select as date and check that timezones are the same // There is no way to check timezone in Date object, as it is always represented internally in UTC const result = await db.select().from(table); expect(result).toEqual([{ id: 1, timestamp: insertedDate }]); // 3. Compare both dates expect(insertedDate.getTime()).toBe(result[0]?.timestamp.getTime()); await db.execute(sql`drop table if exists ${table}`); }); test('all date and time columns with timezone third case mode date', async (ctx) => { const { db } = ctx.pg; const table = pgTable('all_columns', { id: serial('id').primaryKey(), timestamp: timestamp('timestamp_string', { mode: 'date', withTimezone: true, precision: 3 }).notNull(), }); await db.execute(sql`drop table if exists ${table}`); await db.execute(sql` create table ${table} ( id serial primary key, timestamp_string timestamp(3) with time zone not null ) `); const insertedDate = new Date('2022-01-01 20:00:00.123-04'); // used different time zones, internally is still UTC const insertedDate2 = new Date('2022-01-02 04:00:00.123+04'); // They are both the same date in different time zones // 1. Insert date as new dates with different time zones await db.insert(table).values([ { timestamp: insertedDate }, { timestamp: insertedDate2 }, ]); // 2, Select and compare both dates const result = await db.select().from(table); expect(result[0]?.timestamp.getTime()).toBe(result[1]?.timestamp.getTime()); await db.execute(sql`drop table if exists ${table}`); }); test('orderBy with aliased column', (ctx) => { const { db } = ctx.pg; const query = db.select({ test: sql`something`.as('test'), }).from(users2Table).orderBy((fields) => fields.test).toSQL(); expect(query.sql).toBe('select something as "test" from "users2" order by "test"'); }); test('select from sql', async (ctx) => { const { db } = ctx.pg; const metricEntry = pgTable('metric_entry', { id: pgUuid('id').notNull(), createdAt: timestamp('created_at').notNull(), }); await db.execute(sql`drop table if exists ${metricEntry}`); await db.execute(sql`create table ${metricEntry} (id uuid not null, created_at timestamp not null)`); const metricId = uuidV4(); const intervals = db.$with('intervals').as( db .select({ startTime: sql`(date'2023-03-01'+ x * '1 day'::interval)`.as('start_time'), endTime: sql`(date'2023-03-01'+ (x+1) *'1 day'::interval)`.as('end_time'), }) .from(sql`generate_series(0, 29, 1) as t(x)`), ); const func = () => db .with(intervals) .select({ startTime: intervals.startTime, endTime: intervals.endTime, count: sql`count(${metricEntry})`, }) .from(metricEntry) .rightJoin( intervals, and( eq(metricEntry.id, metricId), gte(metricEntry.createdAt, intervals.startTime), lt(metricEntry.createdAt, intervals.endTime), ), ) .groupBy(intervals.startTime, intervals.endTime) .orderBy(asc(intervals.startTime)); await expect((async () => { func(); })()).resolves.not.toThrowError(); }); test('timestamp timezone', async (ctx) => { const { db } = ctx.pg; const usersTableWithAndWithoutTimezone = pgTable('users_test_with_and_without_timezone', { id: serial('id').primaryKey(), name: text('name').notNull(), createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(), updatedAt: timestamp('updated_at', { withTimezone: false }).notNull().defaultNow(), }); await db.execute(sql`drop table if exists ${usersTableWithAndWithoutTimezone}`); await db.execute( sql` create table users_test_with_and_without_timezone ( id serial not null primary key, name text not null, created_at timestamptz not null default now(), updated_at timestamp not null default now() ) `, ); const date = new Date(Date.parse('2020-01-01T00:00:00+04:00')); await db.insert(usersTableWithAndWithoutTimezone).values({ name: 'With default times' }); await db.insert(usersTableWithAndWithoutTimezone).values({ name: 'Without default times', createdAt: date, updatedAt: date, }); const users = await db.select().from(usersTableWithAndWithoutTimezone); // check that the timestamps are set correctly for default times expect(Math.abs(users[0]!.updatedAt.getTime() - Date.now())).toBeLessThan(2000); expect(Math.abs(users[0]!.createdAt.getTime() - Date.now())).toBeLessThan(2000); // check that the timestamps are set correctly for non default times expect(Math.abs(users[1]!.updatedAt.getTime() - date.getTime())).toBeLessThan(2000); expect(Math.abs(users[1]!.createdAt.getTime() - date.getTime())).toBeLessThan(2000); }); test('transaction', async (ctx) => { const { db } = ctx.pg; const users = pgTable('users_transactions', { id: serial('id').primaryKey(), balance: integer('balance').notNull(), }); const products = pgTable('products_transactions', { id: serial('id').primaryKey(), price: integer('price').notNull(), stock: integer('stock').notNull(), }); await db.execute(sql`drop table if exists ${users}`); await db.execute(sql`drop table if exists ${products}`); await db.execute(sql`create table users_transactions (id serial not null primary key, balance integer not null)`); await db.execute( sql`create table products_transactions (id serial not null primary key, price integer not null, stock integer not null)`, ); const user = await db.insert(users).values({ balance: 100 }).returning().then((rows) => rows[0]!); const product = await db.insert(products).values({ price: 10, stock: 10 }).returning().then((rows) => rows[0]!); await db.transaction(async (tx) => { await tx.update(users).set({ balance: user.balance - product.price }).where(eq(users.id, user.id)); await tx.update(products).set({ stock: product.stock - 1 }).where(eq(products.id, product.id)); }); const result = await db.select().from(users); expect(result).toEqual([{ id: 1, balance: 90 }]); await db.execute(sql`drop table ${users}`); await db.execute(sql`drop table ${products}`); }); test('transaction rollback', async (ctx) => { const { db } = ctx.pg; const users = pgTable('users_transactions_rollback', { id: serial('id').primaryKey(), balance: integer('balance').notNull(), }); await db.execute(sql`drop table if exists ${users}`); await db.execute( sql`create table users_transactions_rollback (id serial not null primary key, balance integer not null)`, ); await expect((async () => { await db.transaction(async (tx) => { await tx.insert(users).values({ balance: 100 }); tx.rollback(); }); })()).rejects.toThrowError(TransactionRollbackError); const result = await db.select().from(users); expect(result).toEqual([]); await db.execute(sql`drop table ${users}`); }); test('nested transaction', async (ctx) => { const { db } = ctx.pg; const users = pgTable('users_nested_transactions', { id: serial('id').primaryKey(), balance: integer('balance').notNull(), }); await db.execute(sql`drop table if exists ${users}`); await db.execute( sql`create table users_nested_transactions (id serial not null primary key, balance integer not null)`, ); await db.transaction(async (tx) => { await tx.insert(users).values({ balance: 100 }); await tx.transaction(async (tx) => { await tx.update(users).set({ balance: 200 }); }); }); const result = await db.select().from(users); expect(result).toEqual([{ id: 1, balance: 200 }]); await db.execute(sql`drop table ${users}`); }); test('nested transaction rollback', async (ctx) => { const { db } = ctx.pg; const users = pgTable('users_nested_transactions_rollback', { id: serial('id').primaryKey(), balance: integer('balance').notNull(), }); await db.execute(sql`drop table if exists ${users}`); await db.execute( sql`create table users_nested_transactions_rollback (id serial not null primary key, balance integer not null)`, ); await db.transaction(async (tx) => { await tx.insert(users).values({ balance: 100 }); await expect((async () => { await tx.transaction(async (tx) => { await tx.update(users).set({ balance: 200 }); tx.rollback(); }); })()).rejects.toThrowError(TransactionRollbackError); }); const result = await db.select().from(users); expect(result).toEqual([{ id: 1, balance: 100 }]); await db.execute(sql`drop table ${users}`); }); test('join subquery with join', async (ctx) => { const { db } = ctx.pg; const internalStaff = pgTable('internal_staff', { userId: integer('user_id').notNull(), }); const customUser = pgTable('custom_user', { id: integer('id').notNull(), }); const ticket = pgTable('ticket', { staffId: integer('staff_id').notNull(), }); await db.execute(sql`drop table if exists ${internalStaff}`); await db.execute(sql`drop table if exists ${customUser}`); await db.execute(sql`drop table if exists ${ticket}`); await db.execute(sql`create table internal_staff (user_id integer not null)`); await db.execute(sql`create table custom_user (id integer not null)`); await db.execute(sql`create table ticket (staff_id integer not null)`); await db.insert(internalStaff).values({ userId: 1 }); await db.insert(customUser).values({ id: 1 }); await db.insert(ticket).values({ staffId: 1 }); const subq = db .select() .from(internalStaff) .leftJoin(customUser, eq(internalStaff.userId, customUser.id)) .as('internal_staff'); const mainQuery = await db .select() .from(ticket) .leftJoin(subq, eq(subq.internal_staff.userId, ticket.staffId)); expect(mainQuery).toEqual([{ ticket: { staffId: 1 }, internal_staff: { internal_staff: { userId: 1 }, custom_user: { id: 1 }, }, }]); await db.execute(sql`drop table ${internalStaff}`); await db.execute(sql`drop table ${customUser}`); await db.execute(sql`drop table ${ticket}`); }); test('subquery with view', async (ctx) => { const { db } = ctx.pg; const users = pgTable('users_subquery_view', { id: serial('id').primaryKey(), name: text('name').notNull(), cityId: integer('city_id').notNull(), }); const newYorkers = pgView('new_yorkers').as((qb) => qb.select().from(users).where(eq(users.cityId, 1))); await db.execute(sql`drop table if exists ${users}`); await db.execute(sql`drop view if exists ${newYorkers}`); await db.execute( sql`create table ${users} (id serial not null primary key, name text not null, city_id integer not null)`, ); await db.execute(sql`create view ${newYorkers} as select * from ${users} where city_id = 1`); await db.insert(users).values([ { name: 'John', cityId: 1 }, { name: 'Jane', cityId: 2 }, { name: 'Jack', cityId: 1 }, { name: 'Jill', cityId: 2 }, ]); const sq = db.$with('sq').as(db.select().from(newYorkers)); const result = await db.with(sq).select().from(sq); expect(result).toEqual([ { id: 1, name: 'John', cityId: 1 }, { id: 3, name: 'Jack', cityId: 1 }, ]); await db.execute(sql`drop view ${newYorkers}`); await db.execute(sql`drop table ${users}`); }); test('join view as subquery', async (ctx) => { const { db } = ctx.pg; const users = pgTable('users_join_view', { id: serial('id').primaryKey(), name: text('name').notNull(), cityId: integer('city_id').notNull(), }); const newYorkers = pgView('new_yorkers').as((qb) => qb.select().from(users).where(eq(users.cityId, 1))); await db.execute(sql`drop table if exists ${users}`); await db.execute(sql`drop view if exists ${newYorkers}`); await db.execute( sql`create table ${users} (id serial not null primary key, name text not null, city_id integer not null)`, ); await db.execute(sql`create view ${newYorkers} as select * from ${users} where city_id = 1`); await db.insert(users).values([ { name: 'John', cityId: 1 }, { name: 'Jane', cityId: 2 }, { name: 'Jack', cityId: 1 }, { name: 'Jill', cityId: 2 }, ]); const sq = db.select().from(newYorkers).as('new_yorkers_sq'); const result = await db.select().from(users).leftJoin(sq, eq(users.id, sq.id)); expect(result).toEqual([ { users_join_view: { id: 1, name: 'John', cityId: 1 }, new_yorkers_sq: { id: 1, name: 'John', cityId: 1 }, }, { users_join_view: { id: 2, name: 'Jane', cityId: 2 }, new_yorkers_sq: null, }, { users_join_view: { id: 3, name: 'Jack', cityId: 1 }, new_yorkers_sq: { id: 3, name: 'Jack', cityId: 1 }, }, { users_join_view: { id: 4, name: 'Jill', cityId: 2 }, new_yorkers_sq: null, }, ]); await db.execute(sql`drop view ${newYorkers}`); await db.execute(sql`drop table ${users}`); }); test('table selection with single table', async (ctx) => { const { db } = ctx.pg; const users = pgTable('users', { id: serial('id').primaryKey(), name: text('name').notNull(), cityId: integer('city_id').notNull(), }); await db.execute(sql`drop table if exists ${users}`); await db.execute( sql`create table ${users} (id serial not null primary key, name text not null, city_id integer not null)`, ); await db.insert(users).values({ name: 'John', cityId: 1 }); const result = await db.select({ users }).from(users); expect(result).toEqual([{ users: { id: 1, name: 'John', cityId: 1 } }]); await db.execute(sql`drop table ${users}`); }); test('set null to jsonb field', async (ctx) => { const { db } = ctx.pg; const users = pgTable('users', { id: serial('id').primaryKey(), jsonb: jsonb('jsonb'), }); await db.execute(sql`drop table if exists ${users}`); await db.execute( sql`create table ${users} (id serial not null primary key, jsonb jsonb)`, ); const result = await db.insert(users).values({ jsonb: null }).returning(); expect(result).toEqual([{ id: 1, jsonb: null }]); await db.execute(sql`drop table ${users}`); }); test('insert undefined', async (ctx) => { const { db } = ctx.pg; const users = pgTable('users', { id: serial('id').primaryKey(), name: text('name'), }); await db.execute(sql`drop table if exists ${users}`); await db.execute( sql`create table ${users} (id serial not null primary key, name text)`, ); await expect((async () => { await db.insert(users).values({ name: undefined }); })()).resolves.not.toThrowError(); await db.execute(sql`drop table ${users}`); }); test('update undefined', async (ctx) => { const { db } = ctx.pg; const users = pgTable('users', { id: serial('id').primaryKey(), name: text('name'), }); await db.execute(sql`drop table if exists ${users}`); await db.execute( sql`create table ${users} (id serial not null primary key, name text)`, ); await expect((async () => { await db.update(users).set({ name: undefined }); })()).rejects.toThrowError(); await expect((async () => { db.update(users).set({ name: undefined }); })()).rejects.toThrowError(); await db.execute(sql`drop table ${users}`); }); test('array operators', async (ctx) => { const { db } = ctx.pg; const posts = pgTable('posts', { id: serial('id').primaryKey(), tags: text('tags').array(), }); await db.execute(sql`drop table if exists ${posts}`); await db.execute( sql`create table ${posts} (id serial primary key, tags text[])`, ); await db.insert(posts).values([{ tags: ['ORM'], }, { tags: ['Typescript'], }, { tags: ['Typescript', 'ORM'], }, { tags: ['Typescript', 'Frontend', 'React'], }, { tags: ['Typescript', 'ORM', 'Database', 'Postgres'], }, { tags: ['Java', 'Spring', 'OOP'], }]); const contains = await db.select({ id: posts.id }).from(posts) .where(arrayContains(posts.tags, ['Typescript', 'ORM'])); const contained = await db.select({ id: posts.id }).from(posts) .where(arrayContained(posts.tags, ['Typescript', 'ORM'])); const overlaps = await db.select({ id: posts.id }).from(posts) .where(arrayOverlaps(posts.tags, ['Typescript', 'ORM'])); const withSubQuery = await db.select({ id: posts.id }).from(posts) .where(arrayContains( posts.tags, db.select({ tags: posts.tags }).from(posts).where(eq(posts.id, 1)), )); expect(contains).toEqual([{ id: 3 }, { id: 5 }]); expect(contained).toEqual([{ id: 1 }, { id: 2 }, { id: 3 }]); expect(overlaps).toEqual([{ id: 1 }, { id: 2 }, { id: 3 }, { id: 4 }, { id: 5 }]); expect(withSubQuery).toEqual([{ id: 1 }, { id: 3 }, { id: 5 }]); }); test('set operations (union) from query builder with subquery', async (ctx) => { const { db } = ctx.pg; await setupSetOperationTest(db); const sq = db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).as('sq'); const result = await db .select({ id: cities2Table.id, name: citiesTable.name }) .from(cities2Table).union( db.select().from(sq), ).orderBy(asc(sql`name`)).limit(2).offset(1); expect(result).toHaveLength(2); expect(result).toEqual([ { id: 3, name: 'Jack' }, { id: 2, name: 'Jane' }, ]); await expect((async () => { db .select({ id: cities2Table.id, name: citiesTable.name, name2: users2Table.name }) .from(cities2Table).union( // @ts-expect-error db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table), ).orderBy(asc(sql`name`)); })()).rejects.toThrowError(); }); test('set operations (union) as function', async (ctx) => { const { db } = ctx.pg; await setupSetOperationTest(db); const result = await union( db .select({ id: cities2Table.id, name: citiesTable.name }) .from(cities2Table).where(eq(citiesTable.id, 1)), db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id, 1)), db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id, 1)), ).orderBy(asc(sql`name`)).limit(1).offset(1); expect(result).toHaveLength(1); expect(result).toEqual([ { id: 1, name: 'New York' }, ]); await expect((async () => { union( db .select({ name: citiesTable.name, id: cities2Table.id }) .from(cities2Table).where(eq(citiesTable.id, 1)), db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id, 1)), db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id, 1)), ).orderBy(asc(sql`name`)); })()).rejects.toThrowError(); }); test('set operations (union all) from query builder', async (ctx) => { const { db } = ctx.pg; await setupSetOperationTest(db); const result = await db .select({ id: cities2Table.id, name: citiesTable.name }) .from(cities2Table).limit(2).unionAll( db .select({ id: cities2Table.id, name: citiesTable.name }) .from(cities2Table).limit(2), ).orderBy(asc(sql`id`)); expect(result).toHaveLength(4); expect(result).toEqual([ { id: 1, name: 'New York' }, { id: 1, name: 'New York' }, { id: 2, name: 'London' }, { id: 2, name: 'London' }, ]); await expect((async () => { db .select({ id: cities2Table.id, name: citiesTable.name }) .from(cities2Table).limit(2).unionAll( db .select({ name: citiesTable.name, id: cities2Table.id }) .from(cities2Table).limit(2), ).orderBy(asc(sql`id`)); })()).rejects.toThrowError(); }); test('set operations (union all) as function', async (ctx) => { const { db } = ctx.pg; await setupSetOperationTest(db); const result = await unionAll( db .select({ id: cities2Table.id, name: citiesTable.name }) .from(cities2Table).where(eq(citiesTable.id, 1)), db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id, 1)), db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id, 1)), ); expect(result).toHaveLength(3); expect(result).toEqual([ { id: 1, name: 'New York' }, { id: 1, name: 'John' }, { id: 1, name: 'John' }, ]); await expect((async () => { unionAll( db .select({ id: cities2Table.id, name: citiesTable.name }) .from(cities2Table).where(eq(citiesTable.id, 1)), db .select({ name: users2Table.name, id: users2Table.id }) .from(users2Table).where(eq(users2Table.id, 1)), db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id, 1)), ); })()).rejects.toThrowError(); }); test('set operations (intersect) from query builder', async (ctx) => { const { db } = ctx.pg; await setupSetOperationTest(db); const result = await db .select({ id: cities2Table.id, name: citiesTable.name }) .from(cities2Table).intersect( db .select({ id: cities2Table.id, name: citiesTable.name }) .from(cities2Table).where(gt(citiesTable.id, 1)), ).orderBy(asc(sql`name`)); expect(result).toHaveLength(2); expect(result).toEqual([ { id: 2, name: 'London' }, { id: 3, name: 'Tampa' }, ]); await expect((async () => { db .select({ id: cities2Table.id, name: citiesTable.name }) .from(cities2Table).intersect( // @ts-expect-error db .select({ id: cities2Table.id, name: citiesTable.name, id2: cities2Table.id }) .from(cities2Table).where(gt(citiesTable.id, 1)), ).orderBy(asc(sql`name`)); })()).rejects.toThrowError(); }); test('set operations (intersect) as function', async (ctx) => { const { db } = ctx.pg; await setupSetOperationTest(db); const result = await intersect( db .select({ id: cities2Table.id, name: citiesTable.name }) .from(cities2Table).where(eq(citiesTable.id, 1)), db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id, 1)), db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id, 1)), ); expect(result).toHaveLength(0); expect(result).toEqual([]); await expect((async () => { intersect( db .select({ id: cities2Table.id, name: citiesTable.name }) .from(cities2Table).where(eq(citiesTable.id, 1)), db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id, 1)), db .select({ name: users2Table.name, id: users2Table.id }) .from(users2Table).where(eq(users2Table.id, 1)), ); })()).rejects.toThrowError(); }); test('set operations (intersect all) from query builder', async (ctx) => { const { db } = ctx.pg; await setupSetOperationTest(db); const result = await db .select({ id: cities2Table.id, name: citiesTable.name }) .from(cities2Table).limit(2).intersectAll( db .select({ id: cities2Table.id, name: citiesTable.name }) .from(cities2Table).limit(2), ).orderBy(asc(sql`id`)); expect(result).toHaveLength(2); expect(result).toEqual([ { id: 1, name: 'New York' }, { id: 2, name: 'London' }, ]); await expect((async () => { db .select({ id: cities2Table.id, name: citiesTable.name }) .from(cities2Table).limit(2).intersectAll( db .select({ name: users2Table.name, id: users2Table.id }) .from(cities2Table).limit(2), ).orderBy(asc(sql`id`)); })()).rejects.toThrowError(); }); test('set operations (intersect all) as function', async (ctx) => { const { db } = ctx.pg; await setupSetOperationTest(db); const result = await intersectAll( db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id, 1)), db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id, 1)), db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id, 1)), ); expect(result).toHaveLength(1); expect(result).toEqual([ { id: 1, name: 'John' }, ]); await expect((async () => { intersectAll( db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id, 1)), db .select({ name: users2Table.name, id: users2Table.id }) .from(users2Table).where(eq(users2Table.id, 1)), db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id, 1)), ); })()).rejects.toThrowError(); }); test('set operations (except) from query builder', async (ctx) => { const { db } = ctx.pg; await setupSetOperationTest(db); const result = await db .select() .from(cities2Table).except( db .select() .from(cities2Table).where(gt(citiesTable.id, 1)), ); expect(result).toHaveLength(1); expect(result).toEqual([ { id: 1, name: 'New York' }, ]); await expect((async () => { db .select() .from(cities2Table).except( db .select({ name: users2Table.name, id: users2Table.id }) .from(cities2Table).where(gt(citiesTable.id, 1)), ); })()).rejects.toThrowError(); }); test('set operations (except) as function', async (ctx) => { const { db } = ctx.pg; await setupSetOperationTest(db); const result = await except( db .select({ id: cities2Table.id, name: citiesTable.name }) .from(cities2Table), db .select({ id: cities2Table.id, name: citiesTable.name }) .from(cities2Table).where(eq(citiesTable.id, 1)), db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id, 1)), ).orderBy(asc(sql`id`)); expect(result).toHaveLength(2); expect(result).toEqual([ { id: 2, name: 'London' }, { id: 3, name: 'Tampa' }, ]); await expect((async () => { except( db .select({ id: cities2Table.id, name: citiesTable.name }) .from(cities2Table), db .select({ name: users2Table.name, id: users2Table.id }) .from(cities2Table).where(eq(citiesTable.id, 1)), db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id, 1)), ).orderBy(asc(sql`id`)); })()).rejects.toThrowError(); }); test('set operations (except all) from query builder', async (ctx) => { const { db } = ctx.pg; await setupSetOperationTest(db); const result = await db .select() .from(cities2Table).exceptAll( db .select({ id: cities2Table.id, name: citiesTable.name }) .from(cities2Table).where(eq(citiesTable.id, 1)), ).orderBy(asc(sql`id`)); expect(result).toHaveLength(2); expect(result).toEqual([ { id: 2, name: 'London' }, { id: 3, name: 'Tampa' }, ]); await expect((async () => { db .select({ name: cities2Table.name, id: cities2Table.id }) .from(cities2Table).exceptAll( db .select({ id: cities2Table.id, name: citiesTable.name }) .from(cities2Table).where(eq(citiesTable.id, 1)), ).orderBy(asc(sql`id`)); })()).rejects.toThrowError(); }); test('set operations (except all) as function', async (ctx) => { const { db } = ctx.pg; await setupSetOperationTest(db); const result = await exceptAll( db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table), db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(gt(users2Table.id, 7)), db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id, 1)), ).orderBy(asc(sql`id`)).limit(5).offset(2); expect(result).toHaveLength(4); expect(result).toEqual([ { id: 4, name: 'Peter' }, { id: 5, name: 'Ben' }, { id: 6, name: 'Jill' }, { id: 7, name: 'Mary' }, ]); await expect((async () => { exceptAll( db .select({ name: users2Table.name, id: users2Table.id }) .from(users2Table), db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(gt(users2Table.id, 7)), db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id, 1)), ).orderBy(asc(sql`id`)); })()).rejects.toThrowError(); }); test('set operations (mixed) from query builder with subquery', async (ctx) => { const { db } = ctx.pg; await setupSetOperationTest(db); const sq = db .select() .from(cities2Table).where(gt(citiesTable.id, 1)).as('sq'); const result = await db .select() .from(cities2Table).except( ({ unionAll }) => unionAll( db.select().from(sq), db.select().from(cities2Table).where(eq(citiesTable.id, 2)), ), ); expect(result).toHaveLength(1); expect(result).toEqual([ { id: 1, name: 'New York' }, ]); await expect((async () => { db .select() .from(cities2Table).except( ({ unionAll }) => unionAll( db .select({ name: cities2Table.name, id: cities2Table.id }) .from(cities2Table).where(gt(citiesTable.id, 1)), db.select().from(cities2Table).where(eq(citiesTable.id, 2)), ), ); })()).rejects.toThrowError(); }); test('set operations (mixed all) as function', async (ctx) => { const { db } = ctx.pg; await setupSetOperationTest(db); const result = await union( db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id, 1)), except( db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(gte(users2Table.id, 5)), db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id, 7)), ), db .select().from(cities2Table).where(gt(citiesTable.id, 1)), ).orderBy(asc(sql`id`)); expect(result).toHaveLength(6); expect(result).toEqual([ { id: 1, name: 'John' }, { id: 2, name: 'London' }, { id: 3, name: 'Tampa' }, { id: 5, name: 'Ben' }, { id: 6, name: 'Jill' }, { id: 8, name: 'Sally' }, ]); await expect((async () => { union( db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id, 1)), except( db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(gte(users2Table.id, 5)), db .select({ name: users2Table.name, id: users2Table.id }) .from(users2Table).where(eq(users2Table.id, 7)), ), db .select().from(cities2Table).where(gt(citiesTable.id, 1)), ).orderBy(asc(sql`id`)); })()).rejects.toThrowError(); }); test('aggregate function: count', async (ctx) => { const { db } = ctx.pg; const table = aggregateTable; await setupAggregateFunctionsTest(db); const result1 = await db.select({ value: count() }).from(table); const result2 = await db.select({ value: count(table.a) }).from(table); const result3 = await db.select({ value: countDistinct(table.name) }).from(table); expect(result1[0]?.value).toBe(7); expect(result2[0]?.value).toBe(5); expect(result3[0]?.value).toBe(6); }); test('aggregate function: avg', async (ctx) => { const { db } = ctx.pg; const table = aggregateTable; await setupAggregateFunctionsTest(db); const result1 = await db.select({ value: avg(table.b) }).from(table); const result2 = await db.select({ value: avg(table.nullOnly) }).from(table); const result3 = await db.select({ value: avgDistinct(table.b) }).from(table); expect(result1[0]?.value).toBe('33.3333333333333333'); expect(result2[0]?.value).toBeNull(); expect(result3[0]?.value).toBe('42.5000000000000000'); }); test('aggregate function: sum', async (ctx) => { const { db } = ctx.pg; const table = aggregateTable; await setupAggregateFunctionsTest(db); const result1 = await db.select({ value: sum(table.b) }).from(table); const result2 = await db.select({ value: sum(table.nullOnly) }).from(table); const result3 = await db.select({ value: sumDistinct(table.b) }).from(table); expect(result1[0]?.value).toBe('200'); expect(result2[0]?.value).toBeNull(); expect(result3[0]?.value).toBe('170'); }); test('aggregate function: max', async (ctx) => { const { db } = ctx.pg; const table = aggregateTable; await setupAggregateFunctionsTest(db); const result1 = await db.select({ value: max(table.b) }).from(table); const result2 = await db.select({ value: max(table.nullOnly) }).from(table); expect(result1[0]?.value).toBe(90); expect(result2[0]?.value).toBeNull(); }); test('aggregate function: min', async (ctx) => { const { db } = ctx.pg; const table = aggregateTable; await setupAggregateFunctionsTest(db); const result1 = await db.select({ value: min(table.b) }).from(table); const result2 = await db.select({ value: min(table.nullOnly) }).from(table); expect(result1[0]?.value).toBe(10); expect(result2[0]?.value).toBeNull(); }); test('array mapping and parsing', async (ctx) => { const { db } = ctx.pg; const arrays = pgTable('arrays_tests', { id: serial('id').primaryKey(), tags: text('tags').array(), nested: text('nested').array().array(), numbers: integer('numbers').notNull().array(), }); await db.execute(sql`drop table if exists ${arrays}`); await db.execute(sql` create table ${arrays} ( id serial primary key, tags text[], nested text[][], numbers integer[] ) `); await db.insert(arrays).values({ tags: ['', 'b', 'c'], nested: [['1', ''], ['3', '\\a']], numbers: [1, 2, 3], }); const result = await db.select().from(arrays); expect(result).toEqual([{ id: 1, tags: ['', 'b', 'c'], nested: [['1', ''], ['3', '\\a']], numbers: [1, 2, 3], }]); await db.execute(sql`drop table ${arrays}`); }); test('test $onUpdateFn and $onUpdate works as $default', async (ctx) => { const { db } = ctx.pg; await db.execute(sql`drop table if exists ${usersOnUpdate}`); await db.execute( sql` create table ${usersOnUpdate} ( id serial primary key, name text not null, update_counter integer default 1 not null, updated_at timestamp(3), always_null text ) `, ); await db.insert(usersOnUpdate).values([ { name: 'John' }, { name: 'Jane' }, { name: 'Jack' }, { name: 'Jill' }, ]); const { updatedAt, ...rest } = getTableColumns(usersOnUpdate); const justDates = await db.select({ updatedAt }).from(usersOnUpdate).orderBy(asc(usersOnUpdate.id)); const response = await db.select({ ...rest }).from(usersOnUpdate).orderBy(asc(usersOnUpdate.id)); expect(response).toEqual([ { name: 'John', id: 1, updateCounter: 1, alwaysNull: null }, { name: 'Jane', id: 2, updateCounter: 1, alwaysNull: null }, { name: 'Jack', id: 3, updateCounter: 1, alwaysNull: null }, { name: 'Jill', id: 4, updateCounter: 1, alwaysNull: null }, ]); const msDelay = 250; for (const eachUser of justDates) { expect(eachUser.updatedAt!.valueOf()).toBeGreaterThan(Date.now() - msDelay); } }); test('test $onUpdateFn and $onUpdate works updating', async (ctx) => { const { db } = ctx.pg; await db.execute(sql`drop table if exists ${usersOnUpdate}`); await db.execute( sql` create table ${usersOnUpdate} ( id serial primary key, name text not null, update_counter integer default 1, updated_at timestamp(3), always_null text ) `, ); await db.insert(usersOnUpdate).values([ { name: 'John', alwaysNull: 'this will be null after updating' }, { name: 'Jane' }, { name: 'Jack' }, { name: 'Jill' }, ]); const { updatedAt, ...rest } = getTableColumns(usersOnUpdate); await db.select({ updatedAt }).from(usersOnUpdate).orderBy(asc(usersOnUpdate.id)); await db.update(usersOnUpdate).set({ name: 'Angel' }).where(eq(usersOnUpdate.id, 1)); await db.update(usersOnUpdate).set({ updateCounter: null }).where(eq(usersOnUpdate.id, 2)); const justDates = await db.select({ updatedAt }).from(usersOnUpdate).orderBy(asc(usersOnUpdate.id)); const response = await db.select({ ...rest }).from(usersOnUpdate).orderBy(asc(usersOnUpdate.id)); expect(response).toEqual([ { name: 'Angel', id: 1, updateCounter: 2, alwaysNull: null }, { name: 'Jane', id: 2, updateCounter: null, alwaysNull: null }, { name: 'Jack', id: 3, updateCounter: 1, alwaysNull: null }, { name: 'Jill', id: 4, updateCounter: 1, alwaysNull: null }, ]); const msDelay = 15000; // expect(initial[0]?.updatedAt?.valueOf()).not.toBe(justDates[0]?.updatedAt?.valueOf()); for (const eachUser of justDates) { expect(eachUser.updatedAt!.valueOf()).toBeGreaterThan(Date.now() - msDelay); } }); test('test if method with sql operators', async (ctx) => { const { db } = ctx.pg; const users = pgTable('users', { id: serial('id').primaryKey(), name: text('name').notNull(), age: integer('age').notNull(), city: text('city').notNull(), }); await db.execute(sql`drop table if exists ${users}`); await db.execute(sql` create table ${users} ( id serial primary key, name text not null, age integer not null, city text not null ) `); await db.insert(users).values([ { id: 1, name: 'John', age: 20, city: 'New York' }, { id: 2, name: 'Alice', age: 21, city: 'New York' }, { id: 3, name: 'Nick', age: 22, city: 'London' }, { id: 4, name: 'Lina', age: 23, city: 'London' }, ]); const condition1 = true; const [result1] = await db.select().from(users).where(eq(users.id, 1).if(condition1)); expect(result1).toEqual({ id: 1, name: 'John', age: 20, city: 'New York' }); const condition2 = 1; const [result2] = await db.select().from(users).where(sql`${users.id} = 1`.if(condition2)); expect(result2).toEqual({ id: 1, name: 'John', age: 20, city: 'New York' }); const condition3 = 'non-empty string'; const result3 = await db.select().from(users).where( or(eq(users.id, 1).if(condition3), eq(users.id, 2).if(condition3)), ); expect(result3).toEqual([{ id: 1, name: 'John', age: 20, city: 'New York' }, { id: 2, name: 'Alice', age: 21, city: 'New York', }]); const condtition4 = false; const result4 = await db.select().from(users).where(eq(users.id, 1).if(condtition4)); expect(result4).toEqual([ { id: 1, name: 'John', age: 20, city: 'New York' }, { id: 2, name: 'Alice', age: 21, city: 'New York' }, { id: 3, name: 'Nick', age: 22, city: 'London' }, { id: 4, name: 'Lina', age: 23, city: 'London' }, ]); const condition5 = undefined; const result5 = await db.select().from(users).where(sql`${users.id} = 1`.if(condition5)); expect(result5).toEqual([ { id: 1, name: 'John', age: 20, city: 'New York' }, { id: 2, name: 'Alice', age: 21, city: 'New York' }, { id: 3, name: 'Nick', age: 22, city: 'London' }, { id: 4, name: 'Lina', age: 23, city: 'London' }, ]); const condition6 = null; const result6 = await db.select().from(users).where( or(eq(users.id, 1).if(condition6), eq(users.id, 2).if(condition6)), ); expect(result6).toEqual([ { id: 1, name: 'John', age: 20, city: 'New York' }, { id: 2, name: 'Alice', age: 21, city: 'New York' }, { id: 3, name: 'Nick', age: 22, city: 'London' }, { id: 4, name: 'Lina', age: 23, city: 'London' }, ]); const condition7 = { term1: 0, term2: 1, }; const result7 = await db.select().from(users).where( and(gt(users.age, 20).if(condition7.term1), eq(users.city, 'New York').if(condition7.term2)), ); expect(result7).toEqual([ { id: 1, name: 'John', age: 20, city: 'New York' }, { id: 2, name: 'Alice', age: 21, city: 'New York' }, ]); const condition8 = { term1: '', term2: 'non-empty string', }; const result8 = await db.select().from(users).where( or(lt(users.age, 21).if(condition8.term1), eq(users.city, 'London').if(condition8.term2)), ); expect(result8).toEqual([ { id: 3, name: 'Nick', age: 22, city: 'London' }, { id: 4, name: 'Lina', age: 23, city: 'London' }, ]); const condition9 = { term1: 1, term2: true, }; const result9 = await db.select().from(users).where( and( inArray(users.city, ['New York', 'London']).if(condition9.term1), ilike(users.name, 'a%').if(condition9.term2), ), ); expect(result9).toEqual([ { id: 2, name: 'Alice', age: 21, city: 'New York' }, ]); const condition10 = { term1: 4, term2: 19, }; const result10 = await db.select().from(users).where( and( sql`length(${users.name}) <= ${condition10.term1}`.if(condition10.term1), gt(users.age, condition10.term2).if(condition10.term2 > 20), ), ); expect(result10).toEqual([ { id: 1, name: 'John', age: 20, city: 'New York' }, { id: 3, name: 'Nick', age: 22, city: 'London' }, { id: 4, name: 'Lina', age: 23, city: 'London' }, ]); const condition11 = true; const result11 = await db.select().from(users).where( or(eq(users.city, 'New York'), gte(users.age, 22))!.if(condition11), ); expect(result11).toEqual([ { id: 1, name: 'John', age: 20, city: 'New York' }, { id: 2, name: 'Alice', age: 21, city: 'New York' }, { id: 3, name: 'Nick', age: 22, city: 'London' }, { id: 4, name: 'Lina', age: 23, city: 'London' }, ]); const condition12 = false; const result12 = await db.select().from(users).where( and(eq(users.city, 'London'), gte(users.age, 23))!.if(condition12), ); expect(result12).toEqual([ { id: 1, name: 'John', age: 20, city: 'New York' }, { id: 2, name: 'Alice', age: 21, city: 'New York' }, { id: 3, name: 'Nick', age: 22, city: 'London' }, { id: 4, name: 'Lina', age: 23, city: 'London' }, ]); const condition13 = true; const result13 = await db.select().from(users).where(sql`(city = 'New York' or age >= 22)`.if(condition13)); expect(result13).toEqual([ { id: 1, name: 'John', age: 20, city: 'New York' }, { id: 2, name: 'Alice', age: 21, city: 'New York' }, { id: 3, name: 'Nick', age: 22, city: 'London' }, { id: 4, name: 'Lina', age: 23, city: 'London' }, ]); const condition14 = false; const result14 = await db.select().from(users).where(sql`(city = 'London' and age >= 23)`.if(condition14)); expect(result14).toEqual([ { id: 1, name: 'John', age: 20, city: 'New York' }, { id: 2, name: 'Alice', age: 21, city: 'New York' }, { id: 3, name: 'Nick', age: 22, city: 'London' }, { id: 4, name: 'Lina', age: 23, city: 'London' }, ]); await db.execute(sql`drop table ${users}`); }); // MySchema tests test('mySchema :: select all fields', async (ctx) => { const { db } = ctx.pg; const now = Date.now(); await db.insert(usersMySchemaTable).values({ name: 'John' }); const result = await db.select().from(usersMySchemaTable); expect(result[0]!.createdAt).toBeInstanceOf(Date); expect(Math.abs(result[0]!.createdAt.getTime() - now)).toBeLessThan(300); expect(result).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); }); test('mySchema :: select sql', async (ctx) => { const { db } = ctx.pg; await db.insert(usersMySchemaTable).values({ name: 'John' }); const users = await db.select({ name: sql`upper(${usersMySchemaTable.name})`, }).from(usersMySchemaTable); expect(users).toEqual([{ name: 'JOHN' }]); }); test('mySchema :: select typed sql', async (ctx) => { const { db } = ctx.pg; await db.insert(usersMySchemaTable).values({ name: 'John' }); const users = await db.select({ name: sql`upper(${usersMySchemaTable.name})`, }).from(usersMySchemaTable); expect(users).toEqual([{ name: 'JOHN' }]); }); test('mySchema :: select distinct', async (ctx) => { const { db } = ctx.pg; const usersDistinctTable = pgTable('users_distinct', { id: integer('id').notNull(), name: text('name').notNull(), }); await db.execute(sql`drop table if exists ${usersDistinctTable}`); await db.execute(sql`create table ${usersDistinctTable} (id integer, name text)`); await db.insert(usersDistinctTable).values([ { id: 1, name: 'John' }, { id: 1, name: 'John' }, { id: 2, name: 'John' }, { id: 1, name: 'Jane' }, ]); const users1 = await db.selectDistinct().from(usersDistinctTable).orderBy( usersDistinctTable.id, usersDistinctTable.name, ); const users2 = await db.selectDistinctOn([usersDistinctTable.id]).from(usersDistinctTable).orderBy( usersDistinctTable.id, ); const users3 = await db.selectDistinctOn([usersDistinctTable.name], { name: usersDistinctTable.name }).from( usersDistinctTable, ).orderBy(usersDistinctTable.name); await db.execute(sql`drop table ${usersDistinctTable}`); expect(users1).toEqual([{ id: 1, name: 'Jane' }, { id: 1, name: 'John' }, { id: 2, name: 'John' }]); expect(users2).toHaveLength(2); expect(users2[0]?.id).toBe(1); expect(users2[1]?.id).toBe(2); expect(users3).toHaveLength(2); expect(users3[0]?.name).toBe('Jane'); expect(users3[1]?.name).toBe('John'); }); test('mySchema :: insert returning sql', async (ctx) => { const { db } = ctx.pg; const users = await db.insert(usersMySchemaTable).values({ name: 'John' }).returning({ name: sql`upper(${usersMySchemaTable.name})`, }); expect(users).toEqual([{ name: 'JOHN' }]); }); test('mySchema :: delete returning sql', async (ctx) => { const { db } = ctx.pg; await db.insert(usersMySchemaTable).values({ name: 'John' }); const users = await db.delete(usersMySchemaTable).where(eq(usersMySchemaTable.name, 'John')).returning({ name: sql`upper(${usersMySchemaTable.name})`, }); expect(users).toEqual([{ name: 'JOHN' }]); }); test('mySchema :: update with returning partial', async (ctx) => { const { db } = ctx.pg; await db.insert(usersMySchemaTable).values({ name: 'John' }); const users = await db.update(usersMySchemaTable).set({ name: 'Jane' }).where(eq(usersMySchemaTable.name, 'John')) .returning({ id: usersMySchemaTable.id, name: usersMySchemaTable.name, }); expect(users).toEqual([{ id: 1, name: 'Jane' }]); }); test('mySchema :: delete with returning all fields', async (ctx) => { const { db } = ctx.pg; const now = Date.now(); await db.insert(usersMySchemaTable).values({ name: 'John' }); const users = await db.delete(usersMySchemaTable).where(eq(usersMySchemaTable.name, 'John')).returning(); expect(users[0]!.createdAt).toBeInstanceOf(Date); expect(Math.abs(users[0]!.createdAt.getTime() - now)).toBeLessThan(300); expect(users).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: users[0]!.createdAt }]); }); test('mySchema :: insert + select', async (ctx) => { const { db } = ctx.pg; await db.insert(usersMySchemaTable).values({ name: 'John' }); const result = await db.select().from(usersMySchemaTable); expect(result).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); await db.insert(usersMySchemaTable).values({ name: 'Jane' }); const result2 = await db.select().from(usersMySchemaTable); expect(result2).toEqual([ { id: 1, name: 'John', verified: false, jsonb: null, createdAt: result2[0]!.createdAt }, { id: 2, name: 'Jane', verified: false, jsonb: null, createdAt: result2[1]!.createdAt }, ]); }); test('mySchema :: insert with overridden default values', async (ctx) => { const { db } = ctx.pg; await db.insert(usersMySchemaTable).values({ name: 'John', verified: true }); const result = await db.select().from(usersMySchemaTable); expect(result).toEqual([{ id: 1, name: 'John', verified: true, jsonb: null, createdAt: result[0]!.createdAt }]); }); test('mySchema :: insert many', async (ctx) => { const { db } = ctx.pg; await db.insert(usersMySchemaTable).values([ { name: 'John' }, { name: 'Bruce', jsonb: ['foo', 'bar'] }, { name: 'Jane' }, { name: 'Austin', verified: true }, ]); const result = await db.select({ id: usersMySchemaTable.id, name: usersMySchemaTable.name, jsonb: usersMySchemaTable.jsonb, verified: usersMySchemaTable.verified, }).from(usersMySchemaTable); expect(result).toEqual([ { id: 1, name: 'John', jsonb: null, verified: false }, { id: 2, name: 'Bruce', jsonb: ['foo', 'bar'], verified: false }, { id: 3, name: 'Jane', jsonb: null, verified: false }, { id: 4, name: 'Austin', jsonb: null, verified: true }, ]); }); test('mySchema :: select with group by as field', async (ctx) => { const { db } = ctx.pg; await db.insert(usersMySchemaTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); const result = await db.select({ name: usersMySchemaTable.name }).from(usersMySchemaTable) .groupBy(usersMySchemaTable.name); expect(result).toEqual([{ name: 'Jane' }, { name: 'John' }]); }); test('mySchema :: select with group by as column + sql', async (ctx) => { const { db } = ctx.pg; await db.insert(usersMySchemaTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); const result = await db.select({ name: usersMySchemaTable.name }).from(usersMySchemaTable) .groupBy(usersMySchemaTable.id, sql`${usersMySchemaTable.name}`); expect(result).toEqual([{ name: 'Jane' }, { name: 'Jane' }, { name: 'John' }]); }); test('mySchema :: build query', async (ctx) => { const { db } = ctx.pg; const query = db.select({ id: usersMySchemaTable.id, name: usersMySchemaTable.name }).from(usersMySchemaTable) .groupBy(usersMySchemaTable.id, usersMySchemaTable.name) .toSQL(); expect(query).toEqual({ sql: 'select "id", "name" from "mySchema"."users" group by "mySchema"."users"."id", "mySchema"."users"."name"', params: [], }); }); test('mySchema :: partial join with alias', async (ctx) => { const { db } = ctx.pg; const customerAlias = alias(usersMySchemaTable, 'customer'); await db.insert(usersMySchemaTable).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); const result = await db .select({ user: { id: usersMySchemaTable.id, name: usersMySchemaTable.name, }, customer: { id: customerAlias.id, name: customerAlias.name, }, }).from(usersMySchemaTable) .leftJoin(customerAlias, eq(customerAlias.id, 11)) .where(eq(usersMySchemaTable.id, 10)); expect(result).toEqual([{ user: { id: 10, name: 'Ivan' }, customer: { id: 11, name: 'Hans' }, }]); }); test('mySchema :: insert with spaces', async (ctx) => { const { db } = ctx.pg; await db.insert(usersMySchemaTable).values({ name: sql`'Jo h n'` }); const result = await db.select({ id: usersMySchemaTable.id, name: usersMySchemaTable.name }).from( usersMySchemaTable, ); expect(result).toEqual([{ id: 1, name: 'Jo h n' }]); }); test('mySchema :: prepared statement with placeholder in .limit', async (ctx) => { const { db } = ctx.pg; await db.insert(usersMySchemaTable).values({ name: 'John' }); const stmt = db .select({ id: usersMySchemaTable.id, name: usersMySchemaTable.name, }) .from(usersMySchemaTable) .where(eq(usersMySchemaTable.id, sql.placeholder('id'))) .limit(sql.placeholder('limit')) .prepare('mySchema_stmt_limit'); const result = await stmt.execute({ id: 1, limit: 1 }); expect(result).toEqual([{ id: 1, name: 'John' }]); expect(result).toHaveLength(1); }); test('mySchema :: build query insert with onConflict do update / multiple columns', async (ctx) => { const { db } = ctx.pg; const query = db.insert(usersMySchemaTable) .values({ name: 'John', jsonb: ['foo', 'bar'] }) .onConflictDoUpdate({ target: [usersMySchemaTable.id, usersMySchemaTable.name], set: { name: 'John1' } }) .toSQL(); expect(query).toEqual({ sql: 'insert into "mySchema"."users" ("id", "name", "verified", "jsonb", "created_at") values (default, $1, default, $2, default) on conflict ("id","name") do update set "name" = $3', params: ['John', '["foo","bar"]', 'John1'], }); }); test('mySchema :: build query insert with onConflict do nothing + target', async (ctx) => { const { db } = ctx.pg; const query = db.insert(usersMySchemaTable) .values({ name: 'John', jsonb: ['foo', 'bar'] }) .onConflictDoNothing({ target: usersMySchemaTable.id }) .toSQL(); expect(query).toEqual({ sql: 'insert into "mySchema"."users" ("id", "name", "verified", "jsonb", "created_at") values (default, $1, default, $2, default) on conflict ("id") do nothing', params: ['John', '["foo","bar"]'], }); }); test('mySchema :: select from tables with same name from different schema using alias', async (ctx) => { const { db } = ctx.pg; await db.insert(usersMySchemaTable).values({ id: 10, name: 'Ivan' }); await db.insert(usersTable).values({ id: 11, name: 'Hans' }); const customerAlias = alias(usersTable, 'customer'); const result = await db .select().from(usersMySchemaTable) .leftJoin(customerAlias, eq(customerAlias.id, 11)) .where(eq(customerAlias.id, 11)); expect(result).toEqual([{ users: { id: 10, name: 'Ivan', verified: false, jsonb: null, createdAt: result[0]!.users.createdAt, }, customer: { id: 11, name: 'Hans', verified: false, jsonb: null, createdAt: result[0]!.customer!.createdAt, }, }]); }); test('mySchema :: view', async (ctx) => { const { db } = ctx.pg; const newYorkers1 = mySchema.view('new_yorkers') .as((qb) => qb.select().from(users2MySchemaTable).where(eq(users2MySchemaTable.cityId, 1))); const newYorkers2 = mySchema.view('new_yorkers', { id: serial('id').primaryKey(), name: text('name').notNull(), cityId: integer('city_id').notNull(), }).as(sql`select * from ${users2MySchemaTable} where ${eq(users2MySchemaTable.cityId, 1)}`); const newYorkers3 = mySchema.view('new_yorkers', { id: serial('id').primaryKey(), name: text('name').notNull(), cityId: integer('city_id').notNull(), }).existing(); await db.execute(sql`create view ${newYorkers1} as ${getViewConfig(newYorkers1).query}`); await db.insert(citiesMySchemaTable).values([{ name: 'New York' }, { name: 'Paris' }]); await db.insert(users2MySchemaTable).values([ { name: 'John', cityId: 1 }, { name: 'Jane', cityId: 1 }, { name: 'Jack', cityId: 2 }, ]); { const result = await db.select().from(newYorkers1); expect(result).toEqual([ { id: 1, name: 'John', cityId: 1 }, { id: 2, name: 'Jane', cityId: 1 }, ]); } { const result = await db.select().from(newYorkers2); expect(result).toEqual([ { id: 1, name: 'John', cityId: 1 }, { id: 2, name: 'Jane', cityId: 1 }, ]); } { const result = await db.select().from(newYorkers3); expect(result).toEqual([ { id: 1, name: 'John', cityId: 1 }, { id: 2, name: 'Jane', cityId: 1 }, ]); } { const result = await db.select({ name: newYorkers1.name }).from(newYorkers1); expect(result).toEqual([ { name: 'John' }, { name: 'Jane' }, ]); } await db.execute(sql`drop view ${newYorkers1}`); }); test('mySchema :: materialized view', async (ctx) => { const { db } = ctx.pg; const newYorkers1 = mySchema.materializedView('new_yorkers') .as((qb) => qb.select().from(users2MySchemaTable).where(eq(users2MySchemaTable.cityId, 1))); const newYorkers2 = mySchema.materializedView('new_yorkers', { id: serial('id').primaryKey(), name: text('name').notNull(), cityId: integer('city_id').notNull(), }).as(sql`select * from ${users2MySchemaTable} where ${eq(users2MySchemaTable.cityId, 1)}`); const newYorkers3 = mySchema.materializedView('new_yorkers', { id: serial('id').primaryKey(), name: text('name').notNull(), cityId: integer('city_id').notNull(), }).existing(); await db.execute(sql`create materialized view ${newYorkers1} as ${getMaterializedViewConfig(newYorkers1).query}`); await db.insert(citiesMySchemaTable).values([{ name: 'New York' }, { name: 'Paris' }]); await db.insert(users2MySchemaTable).values([ { name: 'John', cityId: 1 }, { name: 'Jane', cityId: 1 }, { name: 'Jack', cityId: 2 }, ]); { const result = await db.select().from(newYorkers1); expect(result).toEqual([]); } await db.refreshMaterializedView(newYorkers1); { const result = await db.select().from(newYorkers1); expect(result).toEqual([ { id: 1, name: 'John', cityId: 1 }, { id: 2, name: 'Jane', cityId: 1 }, ]); } { const result = await db.select().from(newYorkers2); expect(result).toEqual([ { id: 1, name: 'John', cityId: 1 }, { id: 2, name: 'Jane', cityId: 1 }, ]); } { const result = await db.select().from(newYorkers3); expect(result).toEqual([ { id: 1, name: 'John', cityId: 1 }, { id: 2, name: 'Jane', cityId: 1 }, ]); } { const result = await db.select({ name: newYorkers1.name }).from(newYorkers1); expect(result).toEqual([ { name: 'John' }, { name: 'Jane' }, ]); } await db.execute(sql`drop materialized view ${newYorkers1}`); }); test('limit 0', async (ctx) => { const { db } = ctx.pg; await db.insert(usersTable).values({ name: 'John' }); const users = await db .select() .from(usersTable) .limit(0); expect(users).toEqual([]); }); test('limit -1', async (ctx) => { const { db } = ctx.pg; await db.insert(usersTable).values({ name: 'John' }); const users = await db .select() .from(usersTable) .limit(-1); expect(users.length).toBeGreaterThan(0); }); test('Object keys as column names', async (ctx) => { const { db } = ctx.pg; // Tests the following: // Column with required config // Column with optional config without providing a value // Column with optional config providing a value // Column without config const users = pgTable('users', { id: bigserial({ mode: 'number' }).primaryKey(), firstName: varchar(), lastName: varchar({ length: 50 }), admin: boolean(), }); await db.execute(sql`drop table if exists users`); await db.execute( sql` create table users ( "id" bigserial primary key, "firstName" varchar, "lastName" varchar(50), "admin" boolean ) `, ); await db.insert(users).values([ { firstName: 'John', lastName: 'Doe', admin: true }, { firstName: 'Jane', lastName: 'Smith', admin: false }, ]); const result = await db .select({ id: users.id, firstName: users.firstName, lastName: users.lastName }) .from(users) .where(eq(users.admin, true)); expect(result).toEqual([ { id: 1, firstName: 'John', lastName: 'Doe' }, ]); await db.execute(sql`drop table users`); }); test('proper json and jsonb handling', async (ctx) => { const { db } = ctx.pg; const jsonTable = pgTable('json_table', { json: json('json').$type<{ name: string; age: number }>(), jsonb: jsonb('jsonb').$type<{ name: string; age: number }>(), }); await db.execute(sql`drop table if exists ${jsonTable}`); await db.execute(sql`create table ${jsonTable} (json json, jsonb jsonb)`); await db.insert(jsonTable).values({ json: { name: 'Tom', age: 75 }, jsonb: { name: 'Pete', age: 23 } }); const result = await db.select().from(jsonTable); const justNames = await db.select({ name1: sql`${jsonTable.json}->>'name'`.as('name1'), name2: sql`${jsonTable.jsonb}->>'name'`.as('name2'), }).from(jsonTable); expect(result).toStrictEqual([ { json: { name: 'Tom', age: 75 }, jsonb: { name: 'Pete', age: 23 }, }, ]); expect(justNames).toStrictEqual([ { name1: 'Tom', name2: 'Pete', }, ]); }); test('set json/jsonb fields with objects and retrieve with the ->> operator', async (ctx) => { const { db } = ctx.pg; const obj = { string: 'test', number: 123 }; const { string: testString, number: testNumber } = obj; await db.insert(jsonTestTable).values({ json: obj, jsonb: obj, }); const result = await db.select({ jsonStringField: sql`${jsonTestTable.json}->>'string'`, jsonNumberField: sql`${jsonTestTable.json}->>'number'`, jsonbStringField: sql`${jsonTestTable.jsonb}->>'string'`, jsonbNumberField: sql`${jsonTestTable.jsonb}->>'number'`, }).from(jsonTestTable); expect(result).toStrictEqual([{ jsonStringField: testString, jsonNumberField: String(testNumber), jsonbStringField: testString, jsonbNumberField: String(testNumber), }]); }); test('set json/jsonb fields with strings and retrieve with the ->> operator', async (ctx) => { const { db } = ctx.pg; const obj = { string: 'test', number: 123 }; const { string: testString, number: testNumber } = obj; await db.insert(jsonTestTable).values({ json: sql`${JSON.stringify(obj)}`, jsonb: sql`${JSON.stringify(obj)}`, }); const result = await db.select({ jsonStringField: sql`${jsonTestTable.json}->>'string'`, jsonNumberField: sql`${jsonTestTable.json}->>'number'`, jsonbStringField: sql`${jsonTestTable.jsonb}->>'string'`, jsonbNumberField: sql`${jsonTestTable.jsonb}->>'number'`, }).from(jsonTestTable); expect(result).toStrictEqual([{ jsonStringField: testString, jsonNumberField: String(testNumber), jsonbStringField: testString, jsonbNumberField: String(testNumber), }]); }); test('set json/jsonb fields with objects and retrieve with the -> operator', async (ctx) => { const { db } = ctx.pg; const obj = { string: 'test', number: 123 }; const { string: testString, number: testNumber } = obj; await db.insert(jsonTestTable).values({ json: obj, jsonb: obj, }); const result = await db.select({ jsonStringField: sql`${jsonTestTable.json}->'string'`, jsonNumberField: sql`${jsonTestTable.json}->'number'`, jsonbStringField: sql`${jsonTestTable.jsonb}->'string'`, jsonbNumberField: sql`${jsonTestTable.jsonb}->'number'`, }).from(jsonTestTable); expect(result).toStrictEqual([{ jsonStringField: testString, jsonNumberField: testNumber, jsonbStringField: testString, jsonbNumberField: testNumber, }]); }); test('set json/jsonb fields with strings and retrieve with the -> operator', async (ctx) => { const { db } = ctx.pg; const obj = { string: 'test', number: 123 }; const { string: testString, number: testNumber } = obj; await db.insert(jsonTestTable).values({ json: sql`${JSON.stringify(obj)}`, jsonb: sql`${JSON.stringify(obj)}`, }); const result = await db.select({ jsonStringField: sql`${jsonTestTable.json}->'string'`, jsonNumberField: sql`${jsonTestTable.json}->'number'`, jsonbStringField: sql`${jsonTestTable.jsonb}->'string'`, jsonbNumberField: sql`${jsonTestTable.jsonb}->'number'`, }).from(jsonTestTable); expect(result).toStrictEqual([{ jsonStringField: testString, jsonNumberField: testNumber, jsonbStringField: testString, jsonbNumberField: testNumber, }]); }); test('update ... from', async (ctx) => { const { db } = ctx.pg; await db.insert(cities2Table).values([ { name: 'New York City' }, { name: 'Seattle' }, ]); await db.insert(users2Table).values([ { name: 'John', cityId: 1 }, { name: 'Jane', cityId: 2 }, ]); const result = await db .update(users2Table) .set({ cityId: cities2Table.id, }) .from(cities2Table) .where(and(eq(cities2Table.name, 'Seattle'), eq(users2Table.name, 'John'))) .returning(); expect(result).toStrictEqual([{ id: 1, name: 'John', cityId: 2, cities: { id: 2, name: 'Seattle', }, }]); }); test('update ... from with alias', async (ctx) => { const { db } = ctx.pg; await db.insert(cities2Table).values([ { name: 'New York City' }, { name: 'Seattle' }, ]); await db.insert(users2Table).values([ { name: 'John', cityId: 1 }, { name: 'Jane', cityId: 2 }, ]); const users = alias(users2Table, 'u'); const cities = alias(cities2Table, 'c'); const result = await db .update(users) .set({ cityId: cities.id, }) .from(cities) .where(and(eq(cities.name, 'Seattle'), eq(users.name, 'John'))) .returning(); expect(result).toStrictEqual([{ id: 1, name: 'John', cityId: 2, c: { id: 2, name: 'Seattle', }, }]); }); test('update ... from with join', async (ctx) => { const { db } = ctx.pg; const states = pgTable('states', { id: serial('id').primaryKey(), name: text('name').notNull(), }); const cities = pgTable('cities', { id: serial('id').primaryKey(), name: text('name').notNull(), stateId: integer('state_id').references(() => states.id), }); const users = pgTable('users', { id: serial('id').primaryKey(), name: text('name').notNull(), cityId: integer('city_id').notNull().references(() => cities.id), }); await db.execute(sql`drop table if exists "states" cascade`); await db.execute(sql`drop table if exists "cities" cascade`); await db.execute(sql`drop table if exists "users" cascade`); await db.execute(sql` create table "states" ( "id" serial primary key, "name" text not null ) `); await db.execute(sql` create table "cities" ( "id" serial primary key, "name" text not null, "state_id" integer references "states"("id") ) `); await db.execute(sql` create table "users" ( "id" serial primary key, "name" text not null, "city_id" integer not null references "cities"("id") ) `); await db.insert(states).values([ { name: 'New York' }, { name: 'Washington' }, ]); await db.insert(cities).values([ { name: 'New York City', stateId: 1 }, { name: 'Seattle', stateId: 2 }, { name: 'London' }, ]); await db.insert(users).values([ { name: 'John', cityId: 1 }, { name: 'Jane', cityId: 2 }, { name: 'Jack', cityId: 3 }, ]); const result1 = await db .update(users) .set({ cityId: cities.id, }) .from(cities) .leftJoin(states, eq(cities.stateId, states.id)) .where(and(eq(cities.name, 'Seattle'), eq(users.name, 'John'))) .returning(); const result2 = await db .update(users) .set({ cityId: cities.id, }) .from(cities) .leftJoin(states, eq(cities.stateId, states.id)) .where(and(eq(cities.name, 'London'), eq(users.name, 'Jack'))) .returning(); expect(result1).toStrictEqual([{ id: 1, name: 'John', cityId: 2, cities: { id: 2, name: 'Seattle', stateId: 2, }, states: { id: 2, name: 'Washington', }, }]); expect(result2).toStrictEqual([{ id: 3, name: 'Jack', cityId: 3, cities: { id: 3, name: 'London', stateId: null, }, states: null, }]); }); test('insert into ... select', async (ctx) => { const { db } = ctx.pg; const notifications = pgTable('notifications', { id: serial('id').primaryKey(), sentAt: timestamp('sent_at').notNull().defaultNow(), message: text('message').notNull(), }); const users = pgTable('users', { id: serial('id').primaryKey(), name: text('name').notNull(), }); const userNotications = pgTable('user_notifications', { userId: integer('user_id').notNull().references(() => users.id, { onDelete: 'cascade' }), notificationId: integer('notification_id').notNull().references(() => notifications.id, { onDelete: 'cascade', }), }, (t) => ({ pk: primaryKey({ columns: [t.userId, t.notificationId] }), })); await db.execute(sql`drop table if exists notifications`); await db.execute(sql`drop table if exists users`); await db.execute(sql`drop table if exists user_notifications`); await db.execute(sql` create table notifications ( id serial primary key, sent_at timestamp not null default now(), message text not null ) `); await db.execute(sql` create table users ( id serial primary key, name text not null ) `); await db.execute(sql` create table user_notifications ( user_id int references users(id) on delete cascade, notification_id int references notifications(id) on delete cascade, primary key (user_id, notification_id) ) `); const newNotification = await db .insert(notifications) .values({ message: 'You are one of the 3 lucky winners!' }) .returning({ id: notifications.id }) .then((result) => result[0]); await db.insert(users).values([ { name: 'Alice' }, { name: 'Bob' }, { name: 'Charlie' }, { name: 'David' }, { name: 'Eve' }, ]); const sentNotifications = await db .insert(userNotications) .select( db .select({ userId: users.id, notificationId: sql`${newNotification!.id}`.as('notification_id'), }) .from(users) .where(inArray(users.name, ['Alice', 'Charlie', 'Eve'])) .orderBy(asc(users.id)), ) .returning(); expect(sentNotifications).toStrictEqual([ { userId: 1, notificationId: newNotification!.id }, { userId: 3, notificationId: newNotification!.id }, { userId: 5, notificationId: newNotification!.id }, ]); }); test('insert into ... select with keys in different order', async (ctx) => { const { db } = ctx.pg; const users1 = pgTable('users1', { id: serial('id').primaryKey(), name: text('name').notNull(), }); const users2 = pgTable('users2', { id: serial('id').primaryKey(), name: text('name').notNull(), }); await db.execute(sql`drop table if exists users1`); await db.execute(sql`drop table if exists users2`); await db.execute(sql` create table users1 ( id serial primary key, name text not null ) `); await db.execute(sql` create table users2 ( id serial primary key, name text not null ) `); expect( () => db .insert(users1) .select( db .select({ name: users2.name, id: users2.id, }) .from(users2), ), ).toThrowError(); }); test('policy', () => { { const policy = pgPolicy('test policy'); expect(is(policy, PgPolicy)).toBe(true); expect(policy.name).toBe('test policy'); } { const policy = pgPolicy('test policy', { as: 'permissive', for: 'all', to: 'public', using: sql`1=1`, withCheck: sql`1=1`, }); expect(is(policy, PgPolicy)).toBe(true); expect(policy.name).toBe('test policy'); expect(policy.as).toBe('permissive'); expect(policy.for).toBe('all'); expect(policy.to).toBe('public'); const dialect = new PgDialect(); expect(is(policy.using, SQL)).toBe(true); expect(dialect.sqlToQuery(policy.using!).sql).toBe('1=1'); expect(is(policy.withCheck, SQL)).toBe(true); expect(dialect.sqlToQuery(policy.withCheck!).sql).toBe('1=1'); } { const policy = pgPolicy('test policy', { to: 'custom value', }); expect(policy.to).toBe('custom value'); } { const p1 = pgPolicy('test policy'); const p2 = pgPolicy('test policy 2', { as: 'permissive', for: 'all', to: 'public', using: sql`1=1`, withCheck: sql`1=1`, }); const table = pgTable('table_with_policy', { id: serial('id').primaryKey(), name: text('name').notNull(), }, () => ({ p1, p2, })); const config = getTableConfig(table); expect(config.policies).toHaveLength(2); expect(config.policies[0]).toBe(p1); expect(config.policies[1]).toBe(p2); } }); test('neon: policy', () => { { const policy = crudPolicy({ read: true, modify: true, role: authenticatedRole, }); for (const it of Object.values(policy)) { expect(is(it, PgPolicy)).toBe(true); expect(it?.to).toStrictEqual(authenticatedRole); it?.using ? expect(it.using).toStrictEqual(sql`true`) : ''; it?.withCheck ? expect(it.withCheck).toStrictEqual(sql`true`) : ''; } } { const table = pgTable('name', { id: integer('id'), }, (t) => [ index('name').on(t.id), crudPolicy({ read: true, modify: true, role: authenticatedRole, }), primaryKey({ columns: [t.id], name: 'custom' }), ]); const { policies, indexes, primaryKeys } = getTableConfig(table); expect(policies.length).toBe(4); expect(indexes.length).toBe(1); expect(primaryKeys.length).toBe(1); expect(policies[0]?.name === 'crud-custom-policy-modify'); expect(policies[1]?.name === 'crud-custom-policy-read'); } }); test('neon: neon_auth', () => { const usersSyncTable = usersSync; const { columns, schema, name } = getTableConfig(usersSyncTable); expect(name).toBe('users_sync'); expect(schema).toBe('neon_auth'); expect(columns).toHaveLength(7); }); test('Enable RLS function', () => { const usersWithRLS = pgTable('users', { id: integer(), }).enableRLS(); const config1 = getTableConfig(usersWithRLS); const usersNoRLS = pgTable('users', { id: integer(), }); const config2 = getTableConfig(usersNoRLS); expect(config1.enableRLS).toBeTruthy(); expect(config2.enableRLS).toBeFalsy(); }); test('test $onUpdateFn and $onUpdate works with sql value', async (ctx) => { const { db } = ctx.pg; const users = pgTable('users_on_update', { id: serial('id').primaryKey(), name: text('name').notNull(), updatedAt: timestamp('updated_at', { mode: 'date' }).notNull().$onUpdate(() => sql`now()`), }); await db.execute( sql` create table ${users} ( "id" serial primary key, "name" text not null, "updated_at" timestamp(3) ) `, ); const insertResp = await db.insert(users).values({ name: 'John', }).returning({ updatedAt: users.updatedAt, }); await new Promise((resolve) => setTimeout(resolve, 1000)); const now = Date.now(); await new Promise((resolve) => setTimeout(resolve, 1000)); const updateResp = await db.update(users).set({ name: 'John', }).returning({ updatedAt: users.updatedAt, }); expect(insertResp[0]?.updatedAt.getTime() ?? 0).lessThan(now); expect(updateResp[0]?.updatedAt.getTime() ?? 0).greaterThan(now); }); test('$count separate', async (ctx) => { const { db } = ctx.pg; const countTestTable = pgTable('count_test', { id: integer('id').notNull(), name: text('name').notNull(), }); await db.execute(sql`drop table if exists ${countTestTable}`); await db.execute(sql`create table ${countTestTable} (id int, name text)`); await db.insert(countTestTable).values([ { id: 1, name: 'First' }, { id: 2, name: 'Second' }, { id: 3, name: 'Third' }, { id: 4, name: 'Fourth' }, ]); const count = await db.$count(countTestTable); await db.execute(sql`drop table ${countTestTable}`); expect(count).toStrictEqual(4); }); test('$count embedded', async (ctx) => { const { db } = ctx.pg; const countTestTable = pgTable('count_test', { id: integer('id').notNull(), name: text('name').notNull(), }); await db.execute(sql`drop table if exists ${countTestTable}`); await db.execute(sql`create table ${countTestTable} (id int, name text)`); await db.insert(countTestTable).values([ { id: 1, name: 'First' }, { id: 2, name: 'Second' }, { id: 3, name: 'Third' }, { id: 4, name: 'Fourth' }, ]); const count = await db.select({ count: db.$count(countTestTable), }).from(countTestTable); await db.execute(sql`drop table ${countTestTable}`); expect(count).toStrictEqual([ { count: 4 }, { count: 4 }, { count: 4 }, { count: 4 }, ]); }); test('$count separate reuse', async (ctx) => { const { db } = ctx.pg; const countTestTable = pgTable('count_test', { id: integer('id').notNull(), name: text('name').notNull(), }); await db.execute(sql`drop table if exists ${countTestTable}`); await db.execute(sql`create table ${countTestTable} (id int, name text)`); await db.insert(countTestTable).values([ { id: 1, name: 'First' }, { id: 2, name: 'Second' }, { id: 3, name: 'Third' }, { id: 4, name: 'Fourth' }, ]); const count = db.$count(countTestTable); const count1 = await count; await db.insert(countTestTable).values({ id: 5, name: 'fifth' }); const count2 = await count; await db.insert(countTestTable).values({ id: 6, name: 'sixth' }); const count3 = await count; await db.execute(sql`drop table ${countTestTable}`); expect(count1).toStrictEqual(4); expect(count2).toStrictEqual(5); expect(count3).toStrictEqual(6); }); test('$count embedded reuse', async (ctx) => { const { db } = ctx.pg; const countTestTable = pgTable('count_test', { id: integer('id').notNull(), name: text('name').notNull(), }); await db.execute(sql`drop table if exists ${countTestTable}`); await db.execute(sql`create table ${countTestTable} (id int, name text)`); await db.insert(countTestTable).values([ { id: 1, name: 'First' }, { id: 2, name: 'Second' }, { id: 3, name: 'Third' }, { id: 4, name: 'Fourth' }, ]); const count = db.select({ count: db.$count(countTestTable), }).from(countTestTable); const count1 = await count; await db.insert(countTestTable).values({ id: 5, name: 'fifth' }); const count2 = await count; await db.insert(countTestTable).values({ id: 6, name: 'sixth' }); const count3 = await count; await db.execute(sql`drop table ${countTestTable}`); expect(count1).toStrictEqual([ { count: 4 }, { count: 4 }, { count: 4 }, { count: 4 }, ]); expect(count2).toStrictEqual([ { count: 5 }, { count: 5 }, { count: 5 }, { count: 5 }, { count: 5 }, ]); expect(count3).toStrictEqual([ { count: 6 }, { count: 6 }, { count: 6 }, { count: 6 }, { count: 6 }, { count: 6 }, ]); }); test('$count separate with filters', async (ctx) => { const { db } = ctx.pg; const countTestTable = pgTable('count_test', { id: integer('id').notNull(), name: text('name').notNull(), }); await db.execute(sql`drop table if exists ${countTestTable}`); await db.execute(sql`create table ${countTestTable} (id int, name text)`); await db.insert(countTestTable).values([ { id: 1, name: 'First' }, { id: 2, name: 'Second' }, { id: 3, name: 'Third' }, { id: 4, name: 'Fourth' }, ]); const count = await db.$count(countTestTable, gt(countTestTable.id, 1)); await db.execute(sql`drop table ${countTestTable}`); expect(count).toStrictEqual(3); }); test('$count embedded with filters', async (ctx) => { const { db } = ctx.pg; const countTestTable = pgTable('count_test', { id: integer('id').notNull(), name: text('name').notNull(), }); await db.execute(sql`drop table if exists ${countTestTable}`); await db.execute(sql`create table ${countTestTable} (id int, name text)`); await db.insert(countTestTable).values([ { id: 1, name: 'First' }, { id: 2, name: 'Second' }, { id: 3, name: 'Third' }, { id: 4, name: 'Fourth' }, ]); const count = await db.select({ count: db.$count(countTestTable, gt(countTestTable.id, 1)), }).from(countTestTable); await db.execute(sql`drop table ${countTestTable}`); expect(count).toStrictEqual([ { count: 3 }, { count: 3 }, { count: 3 }, { count: 3 }, ]); }); test('insert multiple rows into table with generated identity column', async (ctx) => { const { db } = ctx.pg; const identityColumnsTable = pgTable('identity_columns_table', { id: integer('id').generatedAlwaysAsIdentity(), id1: integer('id1').generatedByDefaultAsIdentity(), name: text('name').notNull(), }); // not passing identity columns await db.execute(sql`drop table if exists ${identityColumnsTable}`); await db.execute( sql`create table ${identityColumnsTable} ("id" integer generated always as identity, "id1" integer generated by default as identity, "name" text)`, ); let result = await db.insert(identityColumnsTable).values([ { name: 'John' }, { name: 'Jane' }, { name: 'Bob' }, ]).returning(); expect(result).toEqual([ { id: 1, id1: 1, name: 'John' }, { id: 2, id1: 2, name: 'Jane' }, { id: 3, id1: 3, name: 'Bob' }, ]); // passing generated by default as identity column await db.execute(sql`drop table if exists ${identityColumnsTable}`); await db.execute( sql`create table ${identityColumnsTable} ("id" integer generated always as identity, "id1" integer generated by default as identity, "name" text)`, ); result = await db.insert(identityColumnsTable).values([ { name: 'John', id1: 3 }, { name: 'Jane', id1: 5 }, { name: 'Bob', id1: 5 }, ]).returning(); expect(result).toEqual([ { id: 1, id1: 3, name: 'John' }, { id: 2, id1: 5, name: 'Jane' }, { id: 3, id1: 5, name: 'Bob' }, ]); // passing all identity columns await db.execute(sql`drop table if exists ${identityColumnsTable}`); await db.execute( sql`create table ${identityColumnsTable} ("id" integer generated always as identity, "id1" integer generated by default as identity, "name" text)`, ); result = await db.insert(identityColumnsTable).overridingSystemValue().values([ { name: 'John', id: 2, id1: 3 }, { name: 'Jane', id: 4, id1: 5 }, { name: 'Bob', id: 4, id1: 5 }, ]).returning(); expect(result).toEqual([ { id: 2, id1: 3, name: 'John' }, { id: 4, id1: 5, name: 'Jane' }, { id: 4, id1: 5, name: 'Bob' }, ]); }); test('insert as cte', async (ctx) => { const { db } = ctx.pg; const users = pgTable('users', { id: serial('id').primaryKey(), name: text('name').notNull(), }); await db.execute(sql`drop table if exists ${users}`); await db.execute(sql`create table ${users} (id serial not null primary key, name text not null)`); const sq1 = db.$with('sq').as( db.insert(users).values({ name: 'John' }).returning(), ); const result1 = await db.with(sq1).select().from(sq1); const result2 = await db.with(sq1).select({ id: sq1.id }).from(sq1); const sq2 = db.$with('sq').as( db.insert(users).values({ name: 'Jane' }).returning({ id: users.id, name: users.name }), ); const result3 = await db.with(sq2).select().from(sq2); const result4 = await db.with(sq2).select({ name: sq2.name }).from(sq2); expect(result1).toEqual([{ id: 1, name: 'John' }]); expect(result2).toEqual([{ id: 2 }]); expect(result3).toEqual([{ id: 3, name: 'Jane' }]); expect(result4).toEqual([{ name: 'Jane' }]); }); test('update as cte', async (ctx) => { const { db } = ctx.pg; const users = pgTable('users', { id: serial('id').primaryKey(), name: text('name').notNull(), age: integer('age').notNull(), }); await db.execute(sql`drop table if exists ${users}`); await db.execute( sql`create table ${users} (id serial not null primary key, name text not null, age integer not null)`, ); await db.insert(users).values([ { name: 'John', age: 30 }, { name: 'Jane', age: 30 }, ]); const sq1 = db.$with('sq').as( db.update(users).set({ age: 25 }).where(eq(users.name, 'John')).returning(), ); const result1 = await db.with(sq1).select().from(sq1); await db.update(users).set({ age: 30 }); const result2 = await db.with(sq1).select({ age: sq1.age }).from(sq1); const sq2 = db.$with('sq').as( db.update(users).set({ age: 20 }).where(eq(users.name, 'Jane')).returning({ name: users.name, age: users.age }), ); const result3 = await db.with(sq2).select().from(sq2); await db.update(users).set({ age: 30 }); const result4 = await db.with(sq2).select({ age: sq2.age }).from(sq2); expect(result1).toEqual([{ id: 1, name: 'John', age: 25 }]); expect(result2).toEqual([{ age: 25 }]); expect(result3).toEqual([{ name: 'Jane', age: 20 }]); expect(result4).toEqual([{ age: 20 }]); }); test('delete as cte', async (ctx) => { const { db } = ctx.pg; const users = pgTable('users', { id: serial('id').primaryKey(), name: text('name').notNull(), }); await db.execute(sql`drop table if exists ${users}`); await db.execute(sql`create table ${users} (id serial not null primary key, name text not null)`); await db.insert(users).values([ { name: 'John' }, { name: 'Jane' }, ]); const sq1 = db.$with('sq').as( db.delete(users).where(eq(users.name, 'John')).returning(), ); const result1 = await db.with(sq1).select().from(sq1); await db.insert(users).values({ name: 'John' }); const result2 = await db.with(sq1).select({ name: sq1.name }).from(sq1); const sq2 = db.$with('sq').as( db.delete(users).where(eq(users.name, 'Jane')).returning({ id: users.id, name: users.name }), ); const result3 = await db.with(sq2).select().from(sq2); await db.insert(users).values({ name: 'Jane' }); const result4 = await db.with(sq2).select({ name: sq2.name }).from(sq2); expect(result1).toEqual([{ id: 1, name: 'John' }]); expect(result2).toEqual([{ name: 'John' }]); expect(result3).toEqual([{ id: 2, name: 'Jane' }]); expect(result4).toEqual([{ name: 'Jane' }]); }); test('sql operator as cte', async (ctx) => { const { db } = ctx.pg; const users = pgTable('users', { id: serial('id').primaryKey(), name: text('name').notNull(), }); await db.execute(sql`drop table if exists ${users}`); await db.execute(sql`create table ${users} (id serial not null primary key, name text not null)`); await db.insert(users).values([ { name: 'John' }, { name: 'Jane' }, ]); const sq1 = db.$with('sq', { userId: users.id, data: { name: users.name, }, }).as(sql`select * from ${users} where ${users.name} = 'John'`); const result1 = await db.with(sq1).select().from(sq1); const sq2 = db.$with('sq', { userId: users.id, data: { name: users.name, }, }).as(() => sql`select * from ${users} where ${users.name} = 'Jane'`); const result2 = await db.with(sq2).select().from(sq1); expect(result1).toEqual([{ userId: 1, data: { name: 'John' } }]); expect(result2).toEqual([{ userId: 2, data: { name: 'Jane' } }]); }); test('cross join', async (ctx) => { const { db } = ctx.pg; await db .insert(usersTable) .values([ { name: 'John' }, { name: 'Jane' }, ]); await db .insert(citiesTable) .values([ { name: 'Seattle' }, { name: 'New York City' }, ]); const result = await db .select({ user: usersTable.name, city: citiesTable.name, }) .from(usersTable) .crossJoin(citiesTable) .orderBy(usersTable.name, citiesTable.name); expect(result).toStrictEqual([ { city: 'New York City', user: 'Jane' }, { city: 'Seattle', user: 'Jane' }, { city: 'New York City', user: 'John' }, { city: 'Seattle', user: 'John' }, ]); }); test('left join (lateral)', async (ctx) => { const { db } = ctx.pg; await db .insert(citiesTable) .values([{ id: 1, name: 'Paris' }, { id: 2, name: 'London' }]); await db.insert(users2Table).values([{ name: 'John', cityId: 1 }, { name: 'Jane' }]); const sq = db .select({ userId: users2Table.id, userName: users2Table.name, cityId: users2Table.cityId, }) .from(users2Table) .where(eq(users2Table.cityId, citiesTable.id)) .as('sq'); const res = await db .select({ cityId: citiesTable.id, cityName: citiesTable.name, userId: sq.userId, userName: sq.userName, }) .from(citiesTable) .leftJoinLateral(sq, sql`true`); expect(res).toStrictEqual([ { cityId: 1, cityName: 'Paris', userId: 1, userName: 'John' }, { cityId: 2, cityName: 'London', userId: null, userName: null }, ]); }); test('inner join (lateral)', async (ctx) => { const { db } = ctx.pg; await db .insert(citiesTable) .values([{ id: 1, name: 'Paris' }, { id: 2, name: 'London' }]); await db.insert(users2Table).values([{ name: 'John', cityId: 1 }, { name: 'Jane' }]); const sq = db .select({ userId: users2Table.id, userName: users2Table.name, cityId: users2Table.cityId, }) .from(users2Table) .where(eq(users2Table.cityId, citiesTable.id)) .as('sq'); const res = await db .select({ cityId: citiesTable.id, cityName: citiesTable.name, userId: sq.userId, userName: sq.userName, }) .from(citiesTable) .innerJoinLateral(sq, sql`true`); expect(res).toStrictEqual([ { cityId: 1, cityName: 'Paris', userId: 1, userName: 'John' }, ]); }); test('cross join (lateral)', async (ctx) => { const { db } = ctx.pg; await db .insert(citiesTable) .values([{ id: 1, name: 'Paris' }, { id: 2, name: 'London' }, { id: 3, name: 'Berlin' }]); await db.insert(users2Table).values([{ name: 'John', cityId: 1 }, { name: 'Jane' }, { name: 'Patrick', cityId: 2, }]); const sq = db .select({ userId: users2Table.id, userName: users2Table.name, cityId: users2Table.cityId, }) .from(users2Table) .where(not(like(citiesTable.name, 'L%'))) .as('sq'); const res = await db .select({ cityId: citiesTable.id, cityName: citiesTable.name, userId: sq.userId, userName: sq.userName, }) .from(citiesTable) .crossJoinLateral(sq) .orderBy(citiesTable.id, sq.userId); expect(res).toStrictEqual([ { cityId: 1, cityName: 'Paris', userId: 1, userName: 'John', }, { cityId: 1, cityName: 'Paris', userId: 2, userName: 'Jane', }, { cityId: 1, cityName: 'Paris', userId: 3, userName: 'Patrick', }, { cityId: 3, cityName: 'Berlin', userId: 1, userName: 'John', }, { cityId: 3, cityName: 'Berlin', userId: 2, userName: 'Jane', }, { cityId: 3, cityName: 'Berlin', userId: 3, userName: 'Patrick', }, ]); }); test('all types', async (ctx) => { const { db } = ctx.pg; await db.execute(sql`CREATE TYPE "public"."en" AS ENUM('enVal1', 'enVal2');`); await db.execute(sql` CREATE TABLE "all_types" ( "serial" serial NOT NULL, "bigserial53" bigserial NOT NULL, "bigserial64" bigserial, "int" integer, "bigint53" bigint, "bigint64" bigint, "bool" boolean, "char" char, "cidr" "cidr", "date" date, "date_str" date, "double" double precision, "enum" "en", "inet" "inet", "interval" interval, "json" json, "jsonb" jsonb, "line" "line", "line_tuple" "line", "macaddr" "macaddr", "macaddr8" "macaddr8", "numeric" numeric, "numeric_num" numeric, "numeric_big" numeric, "point" "point", "point_tuple" "point", "real" real, "smallint" smallint, "smallserial" "smallserial" NOT NULL, "text" text, "time" time, "timestamp" timestamp, "timestamp_tz" timestamp with time zone, "timestamp_str" timestamp, "timestamp_tz_str" timestamp with time zone, "uuid" uuid, "varchar" varchar, "arrint" integer[], "arrbigint53" bigint[], "arrbigint64" bigint[], "arrbool" boolean[], "arrchar" char[], "arrcidr" "cidr"[], "arrdate" date[], "arrdate_str" date[], "arrdouble" double precision[], "arrenum" "en"[], "arrinet" "inet"[], "arrinterval" interval[], "arrjson" json[], "arrjsonb" jsonb[], "arrline" "line"[], "arrline_tuple" "line"[], "arrmacaddr" "macaddr"[], "arrmacaddr8" "macaddr8"[], "arrnumeric" numeric[], "arrnumeric_num" numeric[], "arrnumeric_big" numeric[], "arrpoint" "point"[], "arrpoint_tuple" "point"[], "arrreal" real[], "arrsmallint" smallint[], "arrtext" text[], "arrtime" time[], "arrtimestamp" timestamp[], "arrtimestamp_tz" timestamp with time zone[], "arrtimestamp_str" timestamp[], "arrtimestamp_tz_str" timestamp with time zone[], "arruuid" uuid[], "arrvarchar" varchar[] ); `); await db.insert(allTypesTable).values({ serial: 1, smallserial: 15, bigint53: 9007199254740991, bigint64: 5044565289845416380n, bigserial53: 9007199254740991, bigserial64: 5044565289845416380n, bool: true, char: 'c', cidr: '2001:4f8:3:ba:2e0:81ff:fe22:d1f1/128', inet: '192.168.0.1/24', macaddr: '08:00:2b:01:02:03', macaddr8: '08:00:2b:01:02:03:04:05', date: new Date(1741743161623), dateStr: new Date(1741743161623).toISOString(), double: 15.35325689124218, enum: 'enVal1', int: 621, interval: '2 months ago', json: { str: 'strval', arr: ['str', 10], }, jsonb: { str: 'strvalb', arr: ['strb', 11], }, line: { a: 1, b: 2, c: 3, }, lineTuple: [1, 2, 3], numeric: '475452353476', numericNum: 9007199254740991, numericBig: 5044565289845416380n, point: { x: 24.5, y: 49.6, }, pointTuple: [57.2, 94.3], real: 1.048596, smallint: 10, text: 'TEXT STRING', time: '13:59:28', timestamp: new Date(1741743161623), timestampTz: new Date(1741743161623), timestampStr: new Date(1741743161623).toISOString(), timestampTzStr: new Date(1741743161623).toISOString(), uuid: 'b77c9eef-8e28-4654-88a1-7221b46d2a1c', varchar: 'C4-', arrbigint53: [9007199254740991], arrbigint64: [5044565289845416380n], arrbool: [true], arrchar: ['c'], arrcidr: ['2001:4f8:3:ba:2e0:81ff:fe22:d1f1/128'], arrinet: ['192.168.0.1/24'], arrmacaddr: ['08:00:2b:01:02:03'], arrmacaddr8: ['08:00:2b:01:02:03:04:05'], arrdate: [new Date(1741743161623)], arrdateStr: [new Date(1741743161623).toISOString()], arrdouble: [15.35325689124218], arrenum: ['enVal1'], arrint: [621], arrinterval: ['2 months ago'], arrjson: [{ str: 'strval', arr: ['str', 10], }], arrjsonb: [{ str: 'strvalb', arr: ['strb', 11], }], arrline: [{ a: 1, b: 2, c: 3, }], arrlineTuple: [[1, 2, 3]], arrnumeric: ['475452353476'], arrnumericNum: [9007199254740991], arrnumericBig: [5044565289845416380n], arrpoint: [{ x: 24.5, y: 49.6, }], arrpointTuple: [[57.2, 94.3]], arrreal: [1.048596], arrsmallint: [10], arrtext: ['TEXT STRING'], arrtime: ['13:59:28'], arrtimestamp: [new Date(1741743161623)], arrtimestampTz: [new Date(1741743161623)], arrtimestampStr: [new Date(1741743161623).toISOString()], arrtimestampTzStr: [new Date(1741743161623).toISOString()], arruuid: ['b77c9eef-8e28-4654-88a1-7221b46d2a1c'], arrvarchar: ['C4-'], }); const rawRes = await db.select().from(allTypesTable); type ExpectedType = { serial: number; bigserial53: number; bigserial64: bigint; int: number | null; bigint53: number | null; bigint64: bigint | null; bool: boolean | null; char: string | null; cidr: string | null; date: Date | null; dateStr: string | null; double: number | null; enum: 'enVal1' | 'enVal2' | null; inet: string | null; interval: string | null; json: unknown; jsonb: unknown; line: { a: number; b: number; c: number; } | null; lineTuple: [number, number, number] | null; macaddr: string | null; macaddr8: string | null; numeric: string | null; numericNum: number | null; numericBig: bigint | null; point: { x: number; y: number; } | null; pointTuple: [number, number] | null; real: number | null; smallint: number | null; smallserial: number; text: string | null; time: string | null; timestamp: Date | null; timestampTz: Date | null; timestampStr: string | null; timestampTzStr: string | null; uuid: string | null; varchar: string | null; arrint: number[] | null; arrbigint53: number[] | null; arrbigint64: bigint[] | null; arrbool: boolean[] | null; arrchar: string[] | null; arrcidr: string[] | null; arrdate: Date[] | null; arrdateStr: string[] | null; arrdouble: number[] | null; arrenum: ('enVal1' | 'enVal2')[] | null; arrinet: string[] | null; arrinterval: string[] | null; arrjson: unknown[] | null; arrjsonb: unknown[] | null; arrline: { a: number; b: number; c: number; }[] | null; arrlineTuple: [number, number, number][] | null; arrmacaddr: string[] | null; arrmacaddr8: string[] | null; arrnumeric: string[] | null; arrnumericNum: number[] | null; arrnumericBig: bigint[] | null; arrpoint: { x: number; y: number }[] | null; arrpointTuple: [number, number][] | null; arrreal: number[] | null; arrsmallint: number[] | null; arrtext: string[] | null; arrtime: string[] | null; arrtimestamp: Date[] | null; arrtimestampTz: Date[] | null; arrtimestampStr: string[] | null; arrtimestampTzStr: string[] | null; arruuid: string[] | null; arrvarchar: string[] | null; }[]; const expectedRes: ExpectedType = [ { serial: 1, bigserial53: 9007199254740991, bigserial64: 5044565289845416380n, int: 621, bigint53: 9007199254740991, bigint64: 5044565289845416380n, bool: true, char: 'c', cidr: '2001:4f8:3:ba:2e0:81ff:fe22:d1f1/128', date: new Date('2025-03-12T00:00:00.000Z'), dateStr: '2025-03-12', double: 15.35325689124218, enum: 'enVal1', inet: '192.168.0.1/24', interval: '-2 mons', json: { str: 'strval', arr: ['str', 10] }, jsonb: { arr: ['strb', 11], str: 'strvalb' }, line: { a: 1, b: 2, c: 3 }, lineTuple: [1, 2, 3], macaddr: '08:00:2b:01:02:03', macaddr8: '08:00:2b:01:02:03:04:05', numeric: '475452353476', numericNum: 9007199254740991, numericBig: 5044565289845416380n, point: { x: 24.5, y: 49.6 }, pointTuple: [57.2, 94.3], real: 1.048596, smallint: 10, smallserial: 15, text: 'TEXT STRING', time: '13:59:28', timestamp: new Date('2025-03-12T01:32:41.623Z'), timestampTz: new Date('2025-03-12T01:32:41.623Z'), timestampStr: '2025-03-12 01:32:41.623', timestampTzStr: '2025-03-12 01:32:41.623+00', uuid: 'b77c9eef-8e28-4654-88a1-7221b46d2a1c', varchar: 'C4-', arrint: [621], arrbigint53: [9007199254740991], arrbigint64: [5044565289845416380n], arrbool: [true], arrchar: ['c'], arrcidr: ['2001:4f8:3:ba:2e0:81ff:fe22:d1f1/128'], arrdate: [new Date('2025-03-12T00:00:00.000Z')], arrdateStr: ['2025-03-12'], arrdouble: [15.35325689124218], arrenum: ['enVal1'], arrinet: ['192.168.0.1/24'], arrinterval: ['-2 mons'], arrjson: [{ str: 'strval', arr: ['str', 10] }], arrjsonb: [{ arr: ['strb', 11], str: 'strvalb' }], arrline: [{ a: 1, b: 2, c: 3 }], arrlineTuple: [[1, 2, 3]], arrmacaddr: ['08:00:2b:01:02:03'], arrmacaddr8: ['08:00:2b:01:02:03:04:05'], arrnumeric: ['475452353476'], arrnumericNum: [9007199254740991], arrnumericBig: [5044565289845416380n], arrpoint: [{ x: 24.5, y: 49.6 }], arrpointTuple: [[57.2, 94.3]], arrreal: [1.048596], arrsmallint: [10], arrtext: ['TEXT STRING'], arrtime: ['13:59:28'], arrtimestamp: [new Date('2025-03-12T01:32:41.623Z')], arrtimestampTz: [new Date('2025-03-12T01:32:41.623Z')], arrtimestampStr: ['2025-03-12 01:32:41.623'], arrtimestampTzStr: ['2025-03-12 01:32:41.623+00'], arruuid: ['b77c9eef-8e28-4654-88a1-7221b46d2a1c'], arrvarchar: ['C4-'], }, ]; expectTypeOf(rawRes).toEqualTypeOf(); expect(rawRes).toStrictEqual(expectedRes); }); }); } ================================================ FILE: integration-tests/tests/pg/pg-custom.test.ts ================================================ import retry from 'async-retry'; import type Docker from 'dockerode'; import { asc, eq, sql } from 'drizzle-orm'; import type { NodePgDatabase } from 'drizzle-orm/node-postgres'; import { drizzle } from 'drizzle-orm/node-postgres'; import { migrate } from 'drizzle-orm/node-postgres/migrator'; import { alias, customType, pgTable, pgTableCreator, serial, text } from 'drizzle-orm/pg-core'; import { Client } from 'pg'; import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; import { randomString } from '~/utils'; import { createDockerDB } from './pg-common'; const ENABLE_LOGGING = false; let db: NodePgDatabase; let client: Client; let container: Docker.Container | undefined; beforeAll(async () => { let connectionString; if (process.env['PG_CONNECTION_STRING']) { connectionString = process.env['PG_CONNECTION_STRING']; } else { const { connectionString: conStr, container: contrainerObj } = await createDockerDB(); connectionString = conStr; container = contrainerObj; } client = await retry(async () => { client = new Client(connectionString); await client.connect(); return client; }, { retries: 20, factor: 1, minTimeout: 250, maxTimeout: 250, randomize: false, onRetry() { client?.end(); }, }); db = drizzle(client, { logger: ENABLE_LOGGING }); }); afterAll(async () => { await client?.end(); await container?.stop().catch(console.error); }); beforeEach((ctx) => { ctx.pg = { db, }; }); const customSerial = customType<{ data: number; notNull: true; default: true }>({ dataType() { return 'serial'; }, }); const customText = customType<{ data: string }>({ dataType() { return 'text'; }, }); const customBoolean = customType<{ data: boolean }>({ dataType() { return 'boolean'; }, }); const customJsonb = (name: string) => customType<{ data: TData; driverData: string }>({ dataType() { return 'jsonb'; }, toDriver(value: TData): string { return JSON.stringify(value); }, })(name); const customTimestamp = customType< { data: Date; driverData: string; config: { withTimezone: boolean; precision?: number } } >({ dataType(config) { const precision = config?.precision === undefined ? '' : ` (${config.precision})`; return `timestamp${precision}${config?.withTimezone ? ' with time zone' : ''}`; }, fromDriver(value: string): Date { return new Date(value); }, }); const usersTable = pgTable('users', { id: customSerial('id').primaryKey(), name: customText('name').notNull(), verified: customBoolean('verified').notNull().default(false), jsonb: customJsonb('jsonb'), createdAt: customTimestamp('created_at', { withTimezone: true }).notNull().default(sql`now()`), }); const usersMigratorTable = pgTable('users12', { id: serial('id').primaryKey(), name: text('name').notNull(), email: text('email').notNull(), }); beforeEach(async (ctx) => { const { db } = ctx.pg; await db.execute(sql`drop schema if exists public cascade`); await db.execute(sql`create schema public`); await db.execute( sql` create table users ( id serial primary key, name text not null, verified boolean not null default false, jsonb jsonb, created_at timestamptz not null default now() ) `, ); }); test('select all fields', async (ctx) => { const { db } = ctx.pg; const now = Date.now(); await db.insert(usersTable).values({ name: 'John' }); const result = await db.select().from(usersTable); expect(result[0]!.createdAt).toBeInstanceOf(Date); expect(Math.abs(result[0]!.createdAt.getTime() - now)).toBeLessThan(100); expect(result).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); }); test('select sql', async (ctx) => { const { db } = ctx.pg; await db.insert(usersTable).values({ name: 'John' }); const users = await db.select({ name: sql`upper(${usersTable.name})`, }).from(usersTable); expect(users).toEqual([{ name: 'JOHN' }]); }); test('select typed sql', async (ctx) => { const { db } = ctx.pg; await db.insert(usersTable).values({ name: 'John' }); const users = await db.select({ name: sql`upper(${usersTable.name})`, }).from(usersTable); expect(users).toEqual([{ name: 'JOHN' }]); }); test('insert returning sql', async (ctx) => { const { db } = ctx.pg; const users = await db.insert(usersTable).values({ name: 'John' }).returning({ name: sql`upper(${usersTable.name})`, }); expect(users).toEqual([{ name: 'JOHN' }]); }); test('delete returning sql', async (ctx) => { const { db } = ctx.pg; await db.insert(usersTable).values({ name: 'John' }); const users = await db.delete(usersTable).where(eq(usersTable.name, 'John')).returning({ name: sql`upper(${usersTable.name})`, }); expect(users).toEqual([{ name: 'JOHN' }]); }); test('update returning sql', async (ctx) => { const { db } = ctx.pg; await db.insert(usersTable).values({ name: 'John' }); const users = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')).returning({ name: sql`upper(${usersTable.name})`, }); expect(users).toEqual([{ name: 'JANE' }]); }); test('update with returning all fields', async (ctx) => { const { db } = ctx.pg; const now = Date.now(); await db.insert(usersTable).values({ name: 'John' }); const users = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')).returning(); expect(users[0]!.createdAt).toBeInstanceOf(Date); expect(Math.abs(users[0]!.createdAt.getTime() - now)).toBeLessThan(100); expect(users).toEqual([{ id: 1, name: 'Jane', verified: false, jsonb: null, createdAt: users[0]!.createdAt }]); }); test('update with returning partial', async (ctx) => { const { db } = ctx.pg; await db.insert(usersTable).values({ name: 'John' }); const users = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')).returning({ id: usersTable.id, name: usersTable.name, }); expect(users).toEqual([{ id: 1, name: 'Jane' }]); }); test('delete with returning all fields', async (ctx) => { const { db } = ctx.pg; const now = Date.now(); await db.insert(usersTable).values({ name: 'John' }); const users = await db.delete(usersTable).where(eq(usersTable.name, 'John')).returning(); expect(users[0]!.createdAt).toBeInstanceOf(Date); expect(Math.abs(users[0]!.createdAt.getTime() - now)).toBeLessThan(100); expect(users).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: users[0]!.createdAt }]); }); test('delete with returning partial', async (ctx) => { const { db } = ctx.pg; await db.insert(usersTable).values({ name: 'John' }); const users = await db.delete(usersTable).where(eq(usersTable.name, 'John')).returning({ id: usersTable.id, name: usersTable.name, }); expect(users).toEqual([{ id: 1, name: 'John' }]); }); test('insert + select', async (ctx) => { const { db } = ctx.pg; await db.insert(usersTable).values({ name: 'John' }); const result = await db.select().from(usersTable); expect(result).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); await db.insert(usersTable).values({ name: 'Jane' }); const result2 = await db.select().from(usersTable); expect(result2).toEqual([ { id: 1, name: 'John', verified: false, jsonb: null, createdAt: result2[0]!.createdAt }, { id: 2, name: 'Jane', verified: false, jsonb: null, createdAt: result2[1]!.createdAt }, ]); }); test('json insert', async (ctx) => { const { db } = ctx.pg; await db.insert(usersTable).values({ name: 'John', jsonb: ['foo', 'bar'] }); const result = await db.select({ id: usersTable.id, name: usersTable.name, jsonb: usersTable.jsonb, }).from(usersTable); expect(result).toEqual([{ id: 1, name: 'John', jsonb: ['foo', 'bar'] }]); }); test('insert with overridden default values', async (ctx) => { const { db } = ctx.pg; await db.insert(usersTable).values({ name: 'John', verified: true }); const result = await db.select().from(usersTable); expect(result).toEqual([{ id: 1, name: 'John', verified: true, jsonb: null, createdAt: result[0]!.createdAt }]); }); test('insert many', async (ctx) => { const { db } = ctx.pg; await db.insert(usersTable).values([ { name: 'John' }, { name: 'Bruce', jsonb: ['foo', 'bar'] }, { name: 'Jane' }, { name: 'Austin', verified: true }, ]); const result = await db.select({ id: usersTable.id, name: usersTable.name, jsonb: usersTable.jsonb, verified: usersTable.verified, }).from(usersTable); expect(result).toEqual([ { id: 1, name: 'John', jsonb: null, verified: false }, { id: 2, name: 'Bruce', jsonb: ['foo', 'bar'], verified: false }, { id: 3, name: 'Jane', jsonb: null, verified: false }, { id: 4, name: 'Austin', jsonb: null, verified: true }, ]); }); test('insert many with returning', async (ctx) => { const { db } = ctx.pg; const result = await db.insert(usersTable).values([ { name: 'John' }, { name: 'Bruce', jsonb: ['foo', 'bar'] }, { name: 'Jane' }, { name: 'Austin', verified: true }, ]) .returning({ id: usersTable.id, name: usersTable.name, jsonb: usersTable.jsonb, verified: usersTable.verified, }); expect(result).toEqual([ { id: 1, name: 'John', jsonb: null, verified: false }, { id: 2, name: 'Bruce', jsonb: ['foo', 'bar'], verified: false }, { id: 3, name: 'Jane', jsonb: null, verified: false }, { id: 4, name: 'Austin', jsonb: null, verified: true }, ]); }); test('select with group by as field', async (ctx) => { const { db } = ctx.pg; await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); const result = await db.select({ name: usersTable.name }).from(usersTable) .groupBy(usersTable.name); expect(result).toEqual([{ name: 'Jane' }, { name: 'John' }]); }); test('select with group by as sql', async (ctx) => { const { db } = ctx.pg; await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); const result = await db.select({ name: usersTable.name }).from(usersTable) .groupBy(sql`${usersTable.name}`); expect(result).toEqual([{ name: 'Jane' }, { name: 'John' }]); }); test('select with group by as sql + column', async (ctx) => { const { db } = ctx.pg; await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); const result = await db.select({ name: usersTable.name }).from(usersTable) .groupBy(sql`${usersTable.name}`, usersTable.id); expect(result).toEqual([{ name: 'Jane' }, { name: 'Jane' }, { name: 'John' }]); }); test('select with group by as column + sql', async (ctx) => { const { db } = ctx.pg; await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); const result = await db.select({ name: usersTable.name }).from(usersTable) .groupBy(usersTable.id, sql`${usersTable.name}`); expect(result).toEqual([{ name: 'Jane' }, { name: 'Jane' }, { name: 'John' }]); }); test('select with group by complex query', async (ctx) => { const { db } = ctx.pg; await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); const result = await db.select({ name: usersTable.name }).from(usersTable) .groupBy(usersTable.id, sql`${usersTable.name}`) .orderBy(asc(usersTable.name)) .limit(1); expect(result).toEqual([{ name: 'Jane' }]); }); test('build query', async (ctx) => { const { db } = ctx.pg; const query = db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable) .groupBy(usersTable.id, usersTable.name) .toSQL(); expect(query).toEqual({ sql: 'select "id", "name" from "users" group by "users"."id", "users"."name"', params: [], }); }); test('insert sql', async (ctx) => { const { db } = ctx.pg; await db.insert(usersTable).values({ name: sql`${'John'}` }); const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable); expect(result).toEqual([{ id: 1, name: 'John' }]); }); test('partial join with alias', async (ctx) => { const { db } = ctx.pg; const customerAlias = alias(usersTable, 'customer'); await db.insert(usersTable).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); const result = await db .select({ user: { id: usersTable.id, name: usersTable.name, }, customer: { id: customerAlias.id, name: customerAlias.name, }, }).from(usersTable) .leftJoin(customerAlias, eq(customerAlias.id, 11)) .where(eq(usersTable.id, 10)); expect(result).toEqual([{ user: { id: 10, name: 'Ivan' }, customer: { id: 11, name: 'Hans' }, }]); }); test('full join with alias', async (ctx) => { const { db } = ctx.pg; const pgTable = pgTableCreator((name) => `prefixed_${name}`); const users = pgTable('users', { id: serial('id').primaryKey(), name: text('name').notNull(), }); await db.execute(sql`drop table if exists ${users}`); await db.execute(sql`create table ${users} (id serial primary key, name text not null)`); const customers = alias(users, 'customer'); await db.insert(users).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); const result = await db .select().from(users) .leftJoin(customers, eq(customers.id, 11)) .where(eq(users.id, 10)); expect(result).toEqual([{ users: { id: 10, name: 'Ivan', }, customer: { id: 11, name: 'Hans', }, }]); await db.execute(sql`drop table ${users}`); }); test('insert with spaces', async (ctx) => { const { db } = ctx.pg; await db.insert(usersTable).values({ name: sql`'Jo h n'` }); const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable); expect(result).toEqual([{ id: 1, name: 'Jo h n' }]); }); test('prepared statement', async (ctx) => { const { db } = ctx.pg; await db.insert(usersTable).values({ name: 'John' }); const statement = db.select({ id: usersTable.id, name: usersTable.name, }).from(usersTable) .prepare('statement1'); const result = await statement.execute(); expect(result).toEqual([{ id: 1, name: 'John' }]); }); test('prepared statement reuse', async (ctx) => { const { db } = ctx.pg; const stmt = db.insert(usersTable).values({ verified: true, name: sql.placeholder('name'), }).prepare('stmt2'); for (let i = 0; i < 10; i++) { await stmt.execute({ name: `John ${i}` }); } const result = await db.select({ id: usersTable.id, name: usersTable.name, verified: usersTable.verified, }).from(usersTable); expect(result).toEqual([ { id: 1, name: 'John 0', verified: true }, { id: 2, name: 'John 1', verified: true }, { id: 3, name: 'John 2', verified: true }, { id: 4, name: 'John 3', verified: true }, { id: 5, name: 'John 4', verified: true }, { id: 6, name: 'John 5', verified: true }, { id: 7, name: 'John 6', verified: true }, { id: 8, name: 'John 7', verified: true }, { id: 9, name: 'John 8', verified: true }, { id: 10, name: 'John 9', verified: true }, ]); }); test('prepared statement with placeholder in .where', async (ctx) => { const { db } = ctx.pg; await db.insert(usersTable).values({ name: 'John' }); const stmt = db.select({ id: usersTable.id, name: usersTable.name, }).from(usersTable) .where(eq(usersTable.id, sql.placeholder('id'))) .prepare('stmt3'); const result = await stmt.execute({ id: 1 }); expect(result).toEqual([{ id: 1, name: 'John' }]); }); test('prepared statement with placeholder in .limit', async (ctx) => { const { db } = ctx.pg; await db.insert(usersTable).values({ name: 'John' }); const stmt = db .select({ id: usersTable.id, name: usersTable.name, }) .from(usersTable) .where(eq(usersTable.id, sql.placeholder('id'))) .limit(sql.placeholder('limit')) .prepare('stmt_limit'); const result = await stmt.execute({ id: 1, limit: 1 }); expect(result).toEqual([{ id: 1, name: 'John' }]); expect(result).toHaveLength(1); }); test('prepared statement with placeholder in .offset', async (ctx) => { const { db } = ctx.pg; await db.insert(usersTable).values([{ name: 'John' }, { name: 'John1' }]); const stmt = db .select({ id: usersTable.id, name: usersTable.name, }) .from(usersTable) .offset(sql.placeholder('offset')) .prepare('stmt_offset'); const result = await stmt.execute({ offset: 1 }); expect(result).toEqual([{ id: 2, name: 'John1' }]); }); test('migrator : default migration strategy', async () => { await db.execute(sql`drop table if exists all_columns`); await db.execute(sql`drop table if exists users12`); await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); await migrate(db, { migrationsFolder: './drizzle2/pg' }); await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); const result = await db.select().from(usersMigratorTable); expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); await db.execute(sql`drop table all_columns`); await db.execute(sql`drop table users12`); await db.execute(sql`drop table "drizzle"."__drizzle_migrations"`); }); test('migrator : migrate with custom schema', async () => { const customSchema = randomString(); await db.execute(sql`drop table if exists all_columns`); await db.execute(sql`drop table if exists users12`); await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); await migrate(db, { migrationsFolder: './drizzle2/pg', migrationsSchema: customSchema }); // test if the custom migrations table was created const { rowCount } = await db.execute(sql`select * from ${sql.identifier(customSchema)}."__drizzle_migrations";`); expect(rowCount! > 0).toBeTruthy(); // test if the migrated table are working as expected await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); const result = await db.select().from(usersMigratorTable); expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); await db.execute(sql`drop table all_columns`); await db.execute(sql`drop table users12`); await db.execute(sql`drop table ${sql.identifier(customSchema)}."__drizzle_migrations"`); }); test('migrator : migrate with custom table', async () => { const customTable = randomString(); await db.execute(sql`drop table if exists all_columns`); await db.execute(sql`drop table if exists users12`); await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); await migrate(db, { migrationsFolder: './drizzle2/pg', migrationsTable: customTable }); // test if the custom migrations table was created const { rowCount } = await db.execute(sql`select * from "drizzle".${sql.identifier(customTable)};`); expect(rowCount! > 0).toBeTruthy(); // test if the migrated table are working as expected await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); const result = await db.select().from(usersMigratorTable); expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); await db.execute(sql`drop table all_columns`); await db.execute(sql`drop table users12`); await db.execute(sql`drop table "drizzle".${sql.identifier(customTable)}`); }); test('migrator : migrate with custom table and custom schema', async () => { const customTable = randomString(); const customSchema = randomString(); await db.execute(sql`drop table if exists all_columns`); await db.execute(sql`drop table if exists users12`); await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); await migrate(db, { migrationsFolder: './drizzle2/pg', migrationsTable: customTable, migrationsSchema: customSchema, }); // test if the custom migrations table was created const { rowCount } = await db.execute( sql`select * from ${sql.identifier(customSchema)}.${sql.identifier(customTable)};`, ); expect(rowCount! > 0).toBeTruthy(); // test if the migrated table are working as expected await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); const result = await db.select().from(usersMigratorTable); expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); await db.execute(sql`drop table all_columns`); await db.execute(sql`drop table users12`); await db.execute(sql`drop table ${sql.identifier(customSchema)}.${sql.identifier(customTable)}`); }); test('insert via db.execute + select via db.execute', async () => { await db.execute(sql`insert into ${usersTable} (${sql.identifier(usersTable.name.name)}) values (${'John'})`); const result = await db.execute<{ id: number; name: string }>(sql`select id, name from "users"`); expect(result.rows).toEqual([{ id: 1, name: 'John' }]); }); test('insert via db.execute + returning', async () => { const inserted = await db.execute<{ id: number; name: string }>( sql`insert into ${usersTable} (${ sql.identifier(usersTable.name.name) }) values (${'John'}) returning ${usersTable.id}, ${usersTable.name}`, ); expect(inserted.rows).toEqual([{ id: 1, name: 'John' }]); }); test('insert via db.execute w/ query builder', async () => { const inserted = await db.execute>( db.insert(usersTable).values({ name: 'John' }).returning({ id: usersTable.id, name: usersTable.name }), ); expect(inserted.rows).toEqual([{ id: 1, name: 'John' }]); }); test('build query insert with onConflict do update', async (ctx) => { const { db } = ctx.pg; const query = db.insert(usersTable) .values({ name: 'John', jsonb: ['foo', 'bar'] }) .onConflictDoUpdate({ target: usersTable.id, set: { name: 'John1' } }) .toSQL(); expect(query).toEqual({ sql: 'insert into "users" ("id", "name", "verified", "jsonb", "created_at") values (default, $1, default, $2, default) on conflict ("id") do update set "name" = $3', params: ['John', '["foo","bar"]', 'John1'], }); }); test('build query insert with onConflict do update / multiple columns', async (ctx) => { const { db } = ctx.pg; const query = db.insert(usersTable) .values({ name: 'John', jsonb: ['foo', 'bar'] }) .onConflictDoUpdate({ target: [usersTable.id, usersTable.name], set: { name: 'John1' } }) .toSQL(); expect(query).toEqual({ sql: 'insert into "users" ("id", "name", "verified", "jsonb", "created_at") values (default, $1, default, $2, default) on conflict ("id","name") do update set "name" = $3', params: ['John', '["foo","bar"]', 'John1'], }); }); test('build query insert with onConflict do nothing', async (ctx) => { const { db } = ctx.pg; const query = db.insert(usersTable) .values({ name: 'John', jsonb: ['foo', 'bar'] }) .onConflictDoNothing() .toSQL(); expect(query).toEqual({ sql: 'insert into "users" ("id", "name", "verified", "jsonb", "created_at") values (default, $1, default, $2, default) on conflict do nothing', params: ['John', '["foo","bar"]'], }); }); test('build query insert with onConflict do nothing + target', async (ctx) => { const { db } = ctx.pg; const query = db.insert(usersTable) .values({ name: 'John', jsonb: ['foo', 'bar'] }) .onConflictDoNothing({ target: usersTable.id }) .toSQL(); expect(query).toEqual({ sql: 'insert into "users" ("id", "name", "verified", "jsonb", "created_at") values (default, $1, default, $2, default) on conflict ("id") do nothing', params: ['John', '["foo","bar"]'], }); }); test('insert with onConflict do update', async (ctx) => { const { db } = ctx.pg; await db.insert(usersTable) .values({ name: 'John' }); await db.insert(usersTable) .values({ id: 1, name: 'John' }) .onConflictDoUpdate({ target: usersTable.id, set: { name: 'John1' } }); const res = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).where( eq(usersTable.id, 1), ); expect(res).toEqual([{ id: 1, name: 'John1' }]); }); test('insert with onConflict do nothing', async (ctx) => { const { db } = ctx.pg; await db.insert(usersTable) .values({ name: 'John' }); await db.insert(usersTable) .values({ id: 1, name: 'John' }) .onConflictDoNothing(); const res = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).where( eq(usersTable.id, 1), ); expect(res).toEqual([{ id: 1, name: 'John' }]); }); test('insert with onConflict do nothing + target', async (ctx) => { const { db } = ctx.pg; await db.insert(usersTable) .values({ name: 'John' }); await db.insert(usersTable) .values({ id: 1, name: 'John' }) .onConflictDoNothing({ target: usersTable.id }); const res = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).where( eq(usersTable.id, 1), ); expect(res).toEqual([{ id: 1, name: 'John' }]); }); ================================================ FILE: integration-tests/tests/pg/pg-proxy.test.ts ================================================ import retry from 'async-retry'; import { sql } from 'drizzle-orm'; import { pgTable, serial, timestamp } from 'drizzle-orm/pg-core'; import type { PgRemoteDatabase } from 'drizzle-orm/pg-proxy'; import { drizzle as proxyDrizzle } from 'drizzle-orm/pg-proxy'; import { migrate } from 'drizzle-orm/pg-proxy/migrator'; import * as pg from 'pg'; import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; import { skipTests } from '~/common'; import { createDockerDB, tests, usersMigratorTable, usersTable } from './pg-common'; import { TestCache, TestGlobalCache, tests as cacheTests } from './pg-common-cache'; // eslint-disable-next-line drizzle-internal/require-entity-kind class ServerSimulator { constructor(private db: pg.Client) { const { types } = pg; types.setTypeParser(types.builtins.TIMESTAMPTZ, (val) => val); types.setTypeParser(types.builtins.TIMESTAMP, (val) => val); types.setTypeParser(types.builtins.DATE, (val) => val); types.setTypeParser(types.builtins.INTERVAL, (val) => val); types.setTypeParser(1231, (val) => val); types.setTypeParser(1115, (val) => val); types.setTypeParser(1185, (val) => val); types.setTypeParser(1187, (val) => val); types.setTypeParser(1182, (val) => val); } async query(sql: string, params: any[], method: 'all' | 'execute') { if (method === 'all') { try { const result = await this.db.query({ text: sql, values: params, rowMode: 'array', }); return { data: result.rows as any }; } catch (e: any) { return { error: e }; } } else if (method === 'execute') { try { const result = await this.db.query({ text: sql, values: params, }); return { data: result.rows as any }; } catch (e: any) { return { error: e }; } } else { return { error: 'Unknown method value' }; } } async migrations(queries: string[]) { await this.db.query('BEGIN'); try { for (const query of queries) { await this.db.query(query); } await this.db.query('COMMIT'); } catch (e) { await this.db.query('ROLLBACK'); throw e; } return {}; } } const ENABLE_LOGGING = false; let db: PgRemoteDatabase; let dbGlobalCached: PgRemoteDatabase; let cachedDb: PgRemoteDatabase; let client: pg.Client; let serverSimulator: ServerSimulator; beforeAll(async () => { let connectionString; if (process.env['PG_CONNECTION_STRING']) { connectionString = process.env['PG_CONNECTION_STRING']; } else { const { connectionString: conStr } = await createDockerDB(); connectionString = conStr; } client = await retry(async () => { client = new pg.Client(connectionString); await client.connect(); return client; }, { retries: 20, factor: 1, minTimeout: 250, maxTimeout: 250, randomize: false, onRetry() { client?.end(); }, }); serverSimulator = new ServerSimulator(client); const proxyHandler = async (sql: string, params: any[], method: any) => { try { const response = await serverSimulator.query(sql, params, method); if (response.error !== undefined) { throw response.error; } return { rows: response.data }; } catch (e: any) { console.error('Error from pg proxy server:', e.message); throw e; } }; db = proxyDrizzle(proxyHandler, { logger: ENABLE_LOGGING, }); cachedDb = proxyDrizzle(proxyHandler, { logger: ENABLE_LOGGING, cache: new TestCache() }); dbGlobalCached = proxyDrizzle(proxyHandler, { logger: ENABLE_LOGGING, cache: new TestGlobalCache() }); }); afterAll(async () => { await client?.end(); }); beforeEach((ctx) => { ctx.pg = { db, }; ctx.cachedPg = { db: cachedDb, dbGlobalCached, }; }); test('migrator : default migration strategy', async () => { await db.execute(sql`drop table if exists all_columns`); await db.execute(sql`drop table if exists users12`); await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); // './drizzle2/pg-proxy/first' ?? await migrate(db, async (queries) => { try { await serverSimulator.migrations(queries); } catch (e) { console.error(e); throw new Error('Proxy server cannot run migrations'); } }, { migrationsFolder: './drizzle2/pg' }); await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); const result = await db.select().from(usersMigratorTable); expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); await db.execute(sql`drop table all_columns`); await db.execute(sql`drop table users12`); await db.execute(sql`drop table "drizzle"."__drizzle_migrations"`); }); test('all date and time columns without timezone first case mode string', async () => { const table = pgTable('all_columns', { id: serial('id').primaryKey(), timestamp: timestamp('timestamp_string', { mode: 'string', precision: 6 }).notNull(), }); await db.execute(sql`drop table if exists ${table}`); await db.execute(sql` create table ${table} ( id serial primary key, timestamp_string timestamp(6) not null ) `); // 1. Insert date in string format without timezone in it await db.insert(table).values([ { timestamp: '2022-01-01 02:00:00.123456' }, ]); // 2, Select in string format and check that values are the same const result = await db.select().from(table); expect(result).toEqual([{ id: 1, timestamp: '2022-01-01 02:00:00.123456' }]); // 3. Select as raw query and check that values are the same const result2 = await db.execute<{ id: number; timestamp_string: string; }>(sql`select * from ${table}`); expect(result2).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456' }]); await db.execute(sql`drop table if exists ${table}`); }); test('all date and time columns without timezone second case mode string', async () => { const table = pgTable('all_columns', { id: serial('id').primaryKey(), timestamp: timestamp('timestamp_string', { mode: 'string', precision: 6 }).notNull(), }); await db.execute(sql`drop table if exists ${table}`); await db.execute(sql` create table ${table} ( id serial primary key, timestamp_string timestamp(6) not null ) `); // 1. Insert date in string format with timezone in it await db.insert(table).values([ { timestamp: '2022-01-01T02:00:00.123456-02' }, ]); // 2, Select as raw query and check that values are the same const result = await db.execute<{ id: number; timestamp_string: string; }>(sql`select * from ${table}`); expect(result).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456' }]); await db.execute(sql`drop table if exists ${table}`); }); test('all date and time columns without timezone third case mode date', async () => { const table = pgTable('all_columns', { id: serial('id').primaryKey(), timestamp: timestamp('timestamp_string', { mode: 'date', precision: 3 }).notNull(), }); await db.execute(sql`drop table if exists ${table}`); await db.execute(sql` create table ${table} ( id serial primary key, timestamp_string timestamp(3) not null ) `); const insertedDate = new Date('2022-01-01 20:00:00.123+04'); // 1. Insert date as new date await db.insert(table).values([ { timestamp: insertedDate }, ]); // 2, Select as raw query as string const result = await db.execute<{ id: number; timestamp_string: string; }>(sql`select * from ${table}`); // 3. Compare both dates using orm mapping - Need to add 'Z' to tell JS that it is UTC expect(new Date(result[0]!.timestamp_string + 'Z').getTime()).toBe(insertedDate.getTime()); await db.execute(sql`drop table if exists ${table}`); }); test('test mode string for timestamp with timezone', async () => { const table = pgTable('all_columns', { id: serial('id').primaryKey(), timestamp: timestamp('timestamp_string', { mode: 'string', withTimezone: true, precision: 6 }).notNull(), }); await db.execute(sql`drop table if exists ${table}`); await db.execute(sql` create table ${table} ( id serial primary key, timestamp_string timestamp(6) with time zone not null ) `); const timestampString = '2022-01-01 00:00:00.123456-0200'; // 1. Insert date in string format with timezone in it await db.insert(table).values([ { timestamp: timestampString }, ]); // 2. Select date in string format and check that the values are the same const result = await db.select().from(table); // 2.1 Notice that postgres will return the date in UTC, but it is exactly the same expect(result).toEqual([{ id: 1, timestamp: '2022-01-01 02:00:00.123456+00' }]); // 3. Select as raw query and checke that values are the same const result2 = await db.execute<{ id: number; timestamp_string: string; }>(sql`select * from ${table}`); // 3.1 Notice that postgres will return the date in UTC, but it is exactlt the same expect(result2).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456+00' }]); await db.execute(sql`drop table if exists ${table}`); }); test('test mode date for timestamp with timezone', async () => { const table = pgTable('all_columns', { id: serial('id').primaryKey(), timestamp: timestamp('timestamp_string', { mode: 'date', withTimezone: true, precision: 3 }).notNull(), }); await db.execute(sql`drop table if exists ${table}`); await db.execute(sql` create table ${table} ( id serial primary key, timestamp_string timestamp(3) with time zone not null ) `); const timestampString = new Date('2022-01-01 00:00:00.456-0200'); // 1. Insert date in string format with timezone in it await db.insert(table).values([ { timestamp: timestampString }, ]); // 2. Select date in string format and check that the values are the same const result = await db.select().from(table); // 2.1 Notice that postgres will return the date in UTC, but it is exactly the same expect(result).toEqual([{ id: 1, timestamp: timestampString }]); // 3. Select as raw query and checke that values are the same const result2 = await db.execute<{ id: number; timestamp_string: string; }>(sql`select * from ${table}`); // 3.1 Notice that postgres will return the date in UTC, but it is exactlt the same expect(result2).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.456+00' }]); await db.execute(sql`drop table if exists ${table}`); }); test('test mode string for timestamp with timezone in UTC timezone', async () => { // get current timezone from db const timezone = await db.execute<{ TimeZone: string }>(sql`show timezone`); // set timezone to UTC await db.execute(sql`set time zone 'UTC'`); const table = pgTable('all_columns', { id: serial('id').primaryKey(), timestamp: timestamp('timestamp_string', { mode: 'string', withTimezone: true, precision: 6 }).notNull(), }); await db.execute(sql`drop table if exists ${table}`); await db.execute(sql` create table ${table} ( id serial primary key, timestamp_string timestamp(6) with time zone not null ) `); const timestampString = '2022-01-01 00:00:00.123456-0200'; // 1. Insert date in string format with timezone in it await db.insert(table).values([ { timestamp: timestampString }, ]); // 2. Select date in string format and check that the values are the same const result = await db.select().from(table); // 2.1 Notice that postgres will return the date in UTC, but it is exactly the same expect(result).toEqual([{ id: 1, timestamp: '2022-01-01 02:00:00.123456+00' }]); // 3. Select as raw query and checke that values are the same const result2 = await db.execute<{ id: number; timestamp_string: string; }>(sql`select * from ${table}`); // 3.1 Notice that postgres will return the date in UTC, but it is exactlt the same expect(result2).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456+00' }]); await db.execute(sql`set time zone '${sql.raw(timezone[0]!.TimeZone)}'`); await db.execute(sql`drop table if exists ${table}`); }); test('test mode string for timestamp with timezone in different timezone', async () => { // get current timezone from db const timezone = await db.execute<{ TimeZone: string }>(sql`show timezone`); // set timezone to HST (UTC - 10) await db.execute(sql`set time zone '-10'`); const table = pgTable('all_columns', { id: serial('id').primaryKey(), timestamp: timestamp('timestamp_string', { mode: 'string', withTimezone: true, precision: 6 }).notNull(), }); await db.execute(sql`drop table if exists ${table}`); await db.execute(sql` create table ${table} ( id serial primary key, timestamp_string timestamp(6) with time zone not null ) `); const timestampString = '2022-01-01 00:00:00.123456-1000'; // 1. Insert date in string format with timezone in it await db.insert(table).values([ { timestamp: timestampString }, ]); // 2. Select date in string format and check that the values are the same const result = await db.select().from(table); expect(result).toEqual([{ id: 1, timestamp: '2022-01-01 00:00:00.123456-10' }]); // 3. Select as raw query and checke that values are the same const result2 = await db.execute<{ id: number; timestamp_string: string; }>(sql`select * from ${table}`); expect(result2).toEqual([{ id: 1, timestamp_string: '2022-01-01 00:00:00.123456-10' }]); await db.execute(sql`set time zone '${sql.raw(timezone[0]!.TimeZone)}'`); await db.execute(sql`drop table if exists ${table}`); }); skipTests([ 'migrator : default migration strategy', 'migrator : migrate with custom schema', 'migrator : migrate with custom table', 'migrator : migrate with custom table and custom schema', 'insert via db.execute + select via db.execute', 'insert via db.execute + returning', 'insert via db.execute w/ query builder', 'all date and time columns without timezone first case mode string', 'all date and time columns without timezone third case mode date', 'test mode string for timestamp with timezone', 'test mode date for timestamp with timezone', 'test mode string for timestamp with timezone in UTC timezone', 'test mode string for timestamp with timezone in different timezone', 'transaction', 'transaction rollback', 'nested transaction', 'nested transaction rollback', 'test $onUpdateFn and $onUpdate works updating', ]); beforeEach(async () => { await db.execute(sql`drop schema if exists public cascade`); await db.execute(sql`create schema public`); await db.execute( sql` create table users ( id serial primary key, name text not null, verified boolean not null default false, jsonb jsonb, created_at timestamptz not null default now() ) `, ); }); test('insert via db.execute + select via db.execute', async () => { await db.execute( sql`insert into ${usersTable} (${sql.identifier(usersTable.name.name)}) values (${'John'})`, ); const result = await db.execute<{ id: number; name: string }>( sql`select id, name from "users"`, ); expect(result).toEqual([{ id: 1, name: 'John' }]); }); test('insert via db.execute + returning', async () => { const inserted = await db.execute<{ id: number; name: string }>( sql`insert into ${usersTable} (${ sql.identifier( usersTable.name.name, ) }) values (${'John'}) returning ${usersTable.id}, ${usersTable.name}`, ); expect(inserted).toEqual([{ id: 1, name: 'John' }]); }); test('insert via db.execute w/ query builder', async () => { const inserted = await db.execute>( db .insert(usersTable) .values({ name: 'John' }) .returning({ id: usersTable.id, name: usersTable.name }), ); expect(inserted).toEqual([{ id: 1, name: 'John' }]); }); tests(); cacheTests(); ================================================ FILE: integration-tests/tests/pg/pglite.test.ts ================================================ import { PGlite } from '@electric-sql/pglite'; import { Name, sql } from 'drizzle-orm'; import { drizzle, type PgliteDatabase } from 'drizzle-orm/pglite'; import { migrate } from 'drizzle-orm/pglite/migrator'; import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; import { skipTests } from '~/common'; import { tests, usersMigratorTable, usersTable } from './pg-common'; import { TestCache, TestGlobalCache, tests as cacheTests } from './pg-common-cache'; const ENABLE_LOGGING = false; let db: PgliteDatabase; let dbGlobalCached: PgliteDatabase; let cachedDb: PgliteDatabase; let client: PGlite; beforeAll(async () => { client = new PGlite(); db = drizzle(client, { logger: ENABLE_LOGGING }); cachedDb = drizzle(client, { logger: ENABLE_LOGGING, cache: new TestCache(), }); dbGlobalCached = drizzle(client, { logger: ENABLE_LOGGING, cache: new TestGlobalCache(), }); }); afterAll(async () => { await client?.close(); }); beforeEach((ctx) => { ctx.pg = { db, }; ctx.cachedPg = { db: cachedDb, dbGlobalCached, }; }); test('migrator : default migration strategy', async () => { await db.execute(sql`drop table if exists all_columns`); await db.execute( sql`drop table if exists users12`, ); await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); await migrate(db, { migrationsFolder: './drizzle2/pg' }); await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); const result = await db.select().from(usersMigratorTable); expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); await db.execute(sql`drop table all_columns`); await db.execute(sql`drop table users12`); await db.execute(sql`drop table "drizzle"."__drizzle_migrations"`); }); test('insert via db.execute + select via db.execute', async () => { await db.execute(sql`insert into ${usersTable} (${new Name(usersTable.name.name)}) values (${'John'})`); const result = await db.execute<{ id: number; name: string }>(sql`select id, name from "users"`); expect(Array.prototype.slice.call(result.rows)).toEqual([{ id: 1, name: 'John' }]); }); test('insert via db.execute + returning', async () => { const result = await db.execute<{ id: number; name: string }>( sql`insert into ${usersTable} (${new Name( usersTable.name.name, )}) values (${'John'}) returning ${usersTable.id}, ${usersTable.name}`, ); expect(Array.prototype.slice.call(result.rows)).toEqual([{ id: 1, name: 'John' }]); }); test('insert via db.execute w/ query builder', async () => { const result = await db.execute>( db.insert(usersTable).values({ name: 'John' }).returning({ id: usersTable.id, name: usersTable.name }), ); expect(Array.prototype.slice.call(result.rows)).toEqual([{ id: 1, name: 'John' }]); }); skipTests([ 'migrator : default migration strategy', 'migrator : migrate with custom schema', 'migrator : migrate with custom table', 'migrator : migrate with custom table and custom schema', 'insert via db.execute + select via db.execute', 'insert via db.execute + returning', 'insert via db.execute w/ query builder', 'all date and time columns without timezone first case mode string', 'all date and time columns without timezone third case mode date', 'test mode string for timestamp with timezone', 'test mode date for timestamp with timezone', 'test mode string for timestamp with timezone in UTC timezone', 'test mode string for timestamp with timezone in different timezone', 'view', 'materialized view', 'subquery with view', 'mySchema :: materialized view', 'select count()', // not working in 0.2.12 'select with group by as sql + column', 'select with group by as column + sql', 'mySchema :: select with group by as column + sql', ]); tests(); cacheTests(); beforeEach(async () => { await db.execute(sql`drop schema if exists public cascade`); await db.execute(sql`create schema public`); await db.execute( sql` create table users ( id serial primary key, name text not null, verified boolean not null default false, jsonb jsonb, created_at timestamptz not null default now() ) `, ); }); ================================================ FILE: integration-tests/tests/pg/postgres-js.test.ts ================================================ import retry from 'async-retry'; import type { PostgresJsDatabase } from 'drizzle-orm/postgres-js'; import { drizzle } from 'drizzle-orm/postgres-js'; import postgres, { type Sql } from 'postgres'; import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; import { Name, sql } from 'drizzle-orm'; import { pgTable, serial, timestamp } from 'drizzle-orm/pg-core'; import { migrate } from 'drizzle-orm/postgres-js/migrator'; import { skipTests } from '~/common'; import { randomString } from '~/utils'; import { createDockerDB, tests, usersMigratorTable, usersTable } from './pg-common'; import { TestCache, TestGlobalCache, tests as cacheTests } from './pg-common-cache'; const ENABLE_LOGGING = false; let db: PostgresJsDatabase; let dbGlobalCached: PostgresJsDatabase; let cachedDb: PostgresJsDatabase; let client: Sql; beforeAll(async () => { let connectionString; if (process.env['PG_CONNECTION_STRING']) { connectionString = process.env['PG_CONNECTION_STRING']; } else { const { connectionString: conStr } = await createDockerDB(); connectionString = conStr; } client = await retry(async () => { client = postgres(connectionString, { max: 1, onnotice: () => { // disable notices }, }); await client`select 1`; return client; }, { retries: 20, factor: 1, minTimeout: 250, maxTimeout: 250, randomize: false, onRetry() { client?.end(); }, }); db = drizzle(client, { logger: ENABLE_LOGGING }); cachedDb = drizzle(client, { logger: ENABLE_LOGGING, cache: new TestCache() }); dbGlobalCached = drizzle(client, { logger: ENABLE_LOGGING, cache: new TestGlobalCache() }); }); afterAll(async () => { await client?.end(); }); beforeEach((ctx) => { ctx.pg = { db, }; ctx.cachedPg = { db: cachedDb, dbGlobalCached, }; }); test('migrator : default migration strategy', async () => { await db.execute(sql`drop table if exists all_columns`); await db.execute(sql`drop table if exists users12`); await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); await migrate(db, { migrationsFolder: './drizzle2/pg' }); await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); const result = await db.select().from(usersMigratorTable); expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); await db.execute(sql`drop table all_columns`); await db.execute(sql`drop table users12`); await db.execute(sql`drop table "drizzle"."__drizzle_migrations"`); }); test('migrator : migrate with custom schema', async () => { const customSchema = randomString(); await db.execute(sql`drop table if exists all_columns`); await db.execute(sql`drop table if exists users12`); await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); await migrate(db, { migrationsFolder: './drizzle2/pg', migrationsSchema: customSchema }); // test if the custom migrations table was created const { count } = await db.execute(sql`select * from ${sql.identifier(customSchema)}."__drizzle_migrations";`); expect(count > 0).toBeTruthy(); // test if the migrated table are working as expected await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); const result = await db.select().from(usersMigratorTable); expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); await db.execute(sql`drop table all_columns`); await db.execute(sql`drop table users12`); await db.execute(sql`drop table ${sql.identifier(customSchema)}."__drizzle_migrations"`); }); test('migrator : migrate with custom table', async () => { const customTable = randomString(); await db.execute(sql`drop table if exists all_columns`); await db.execute(sql`drop table if exists users12`); await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); await migrate(db, { migrationsFolder: './drizzle2/pg', migrationsTable: customTable }); // test if the custom migrations table was created const { count } = await db.execute(sql`select * from "drizzle".${sql.identifier(customTable)};`); expect(count > 0).toBeTruthy(); // test if the migrated table are working as expected await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); const result = await db.select().from(usersMigratorTable); expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); await db.execute(sql`drop table all_columns`); await db.execute(sql`drop table users12`); await db.execute(sql`drop table "drizzle".${sql.identifier(customTable)}`); }); test('migrator : migrate with custom table and custom schema', async () => { const customTable = randomString(); const customSchema = randomString(); await db.execute(sql`drop table if exists all_columns`); await db.execute(sql`drop table if exists users12`); await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); await migrate(db, { migrationsFolder: './drizzle2/pg', migrationsTable: customTable, migrationsSchema: customSchema, }); // test if the custom migrations table was created const { count } = await db.execute( sql`select * from ${sql.identifier(customSchema)}.${sql.identifier(customTable)};`, ); expect(count > 0).toBeTruthy(); // test if the migrated table are working as expected await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); const result = await db.select().from(usersMigratorTable); expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); await db.execute(sql`drop table all_columns`); await db.execute(sql`drop table users12`); await db.execute(sql`drop table ${sql.identifier(customSchema)}.${sql.identifier(customTable)}`); }); test('all date and time columns without timezone first case mode string', async () => { const table = pgTable('all_columns', { id: serial('id').primaryKey(), timestamp: timestamp('timestamp_string', { mode: 'string', precision: 6 }).notNull(), }); await db.execute(sql`drop table if exists ${table}`); await db.execute(sql` create table ${table} ( id serial primary key, timestamp_string timestamp(6) not null ) `); // 1. Insert date in string format without timezone in it await db.insert(table).values([ { timestamp: '2022-01-01 02:00:00.123456' }, ]); // 2, Select in string format and check that values are the same const result = await db.select().from(table); expect(result).toEqual([{ id: 1, timestamp: '2022-01-01 02:00:00.123456' }]); // 3. Select as raw query and check that values are the same const result2 = await db.execute<{ id: number; timestamp_string: string; }>(sql`select * from ${table}`); expect([...result2]).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456' }]); await db.execute(sql`drop table if exists ${table}`); }); test('all date and time columns without timezone second case mode string', async () => { const table = pgTable('all_columns', { id: serial('id').primaryKey(), timestamp: timestamp('timestamp_string', { mode: 'string', precision: 6 }).notNull(), }); await db.execute(sql`drop table if exists ${table}`); await db.execute(sql` create table ${table} ( id serial primary key, timestamp_string timestamp(6) not null ) `); // 1. Insert date in string format with timezone in it await db.insert(table).values([ { timestamp: '2022-01-01T02:00:00.123456-02' }, ]); // 2, Select as raw query and check that values are the same const result = await db.execute<{ id: number; timestamp_string: string; }>(sql`select * from ${table}`); expect([...result]).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456' }]); await db.execute(sql`drop table if exists ${table}`); }); test('all date and time columns without timezone third case mode date', async () => { const table = pgTable('all_columns', { id: serial('id').primaryKey(), timestamp: timestamp('timestamp_string', { mode: 'date', precision: 3 }).notNull(), }); await db.execute(sql`drop table if exists ${table}`); await db.execute(sql` create table ${table} ( id serial primary key, timestamp_string timestamp(3) not null ) `); const insertedDate = new Date('2022-01-01 20:00:00.123+04'); // 1. Insert date as new date await db.insert(table).values([ { timestamp: insertedDate }, ]); // 2, Select as raw query as string const result = await db.execute<{ id: number; timestamp_string: string; }>(sql`select * from ${table}`); // 3. Compare both dates using orm mapping - Need to add 'Z' to tell JS that it is UTC expect(new Date(result[0]!.timestamp_string + 'Z').getTime()).toBe(insertedDate.getTime()); await db.execute(sql`drop table if exists ${table}`); }); test('test mode string for timestamp with timezone', async () => { const table = pgTable('all_columns', { id: serial('id').primaryKey(), timestamp: timestamp('timestamp_string', { mode: 'string', withTimezone: true, precision: 6 }).notNull(), }); await db.execute(sql`drop table if exists ${table}`); await db.execute(sql` create table ${table} ( id serial primary key, timestamp_string timestamp(6) with time zone not null ) `); const timestampString = '2022-01-01 00:00:00.123456-0200'; // 1. Insert date in string format with timezone in it await db.insert(table).values([ { timestamp: timestampString }, ]); // 2. Select date in string format and check that the values are the same const result = await db.select().from(table); // 2.1 Notice that postgres will return the date in UTC, but it is exactly the same expect(result).toEqual([{ id: 1, timestamp: '2022-01-01 02:00:00.123456+00' }]); // 3. Select as raw query and checke that values are the same const result2 = await db.execute<{ id: number; timestamp_string: string; }>(sql`select * from ${table}`); // 3.1 Notice that postgres will return the date in UTC, but it is exactlt the same expect([...result2]).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456+00' }]); await db.execute(sql`drop table if exists ${table}`); }); test('test mode date for timestamp with timezone', async () => { const table = pgTable('all_columns', { id: serial('id').primaryKey(), timestamp: timestamp('timestamp_string', { mode: 'date', withTimezone: true, precision: 3 }).notNull(), }); await db.execute(sql`drop table if exists ${table}`); await db.execute(sql` create table ${table} ( id serial primary key, timestamp_string timestamp(3) with time zone not null ) `); const timestampString = new Date('2022-01-01 00:00:00.456-0200'); // 1. Insert date in string format with timezone in it await db.insert(table).values([ { timestamp: timestampString }, ]); // 2. Select date in string format and check that the values are the same const result = await db.select().from(table); // 2.1 Notice that postgres will return the date in UTC, but it is exactly the same expect(result).toEqual([{ id: 1, timestamp: timestampString }]); // 3. Select as raw query and checke that values are the same const result2 = await db.execute<{ id: number; timestamp_string: string; }>(sql`select * from ${table}`); // 3.1 Notice that postgres will return the date in UTC, but it is exactlt the same expect([...result2]).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.456+00' }]); await db.execute(sql`drop table if exists ${table}`); }); test('test mode string for timestamp with timezone in UTC timezone', async () => { // get current timezone from db const [timezone] = await db.execute<{ TimeZone: string }>(sql`show timezone`); // set timezone to UTC await db.execute(sql`set time zone 'UTC'`); const table = pgTable('all_columns', { id: serial('id').primaryKey(), timestamp: timestamp('timestamp_string', { mode: 'string', withTimezone: true, precision: 6 }).notNull(), }); await db.execute(sql`drop table if exists ${table}`); await db.execute(sql` create table ${table} ( id serial primary key, timestamp_string timestamp(6) with time zone not null ) `); const timestampString = '2022-01-01 00:00:00.123456-0200'; // 1. Insert date in string format with timezone in it await db.insert(table).values([ { timestamp: timestampString }, ]); // 2. Select date in string format and check that the values are the same const result = await db.select().from(table); // 2.1 Notice that postgres will return the date in UTC, but it is exactly the same expect(result).toEqual([{ id: 1, timestamp: '2022-01-01 02:00:00.123456+00' }]); // 3. Select as raw query and checke that values are the same const result2 = await db.execute<{ id: number; timestamp_string: string; }>(sql`select * from ${table}`); // 3.1 Notice that postgres will return the date in UTC, but it is exactlt the same expect([...result2]).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456+00' }]); await db.execute(sql`set time zone '${sql.raw(timezone!.TimeZone)}'`); await db.execute(sql`drop table if exists ${table}`); }); test('test mode string for timestamp with timezone in different timezone', async () => { // get current timezone from db const [timezone] = await db.execute<{ TimeZone: string }>(sql`show timezone`); // set timezone to HST (UTC - 10) await db.execute(sql`set time zone '-10'`); const table = pgTable('all_columns', { id: serial('id').primaryKey(), timestamp: timestamp('timestamp_string', { mode: 'string', withTimezone: true, precision: 6 }).notNull(), }); await db.execute(sql`drop table if exists ${table}`); await db.execute(sql` create table ${table} ( id serial primary key, timestamp_string timestamp(6) with time zone not null ) `); const timestampString = '2022-01-01 00:00:00.123456-1000'; // 1. Insert date in string format with timezone in it await db.insert(table).values([ { timestamp: timestampString }, ]); // 2. Select date in string format and check that the values are the same const result = await db.select().from(table); expect(result).toEqual([{ id: 1, timestamp: '2022-01-01 00:00:00.123456-10' }]); // 3. Select as raw query and checke that values are the same const result2 = await db.execute<{ id: number; timestamp_string: string; }>(sql`select * from ${table}`); expect([...result2]).toEqual([{ id: 1, timestamp_string: '2022-01-01 00:00:00.123456-10' }]); await db.execute(sql`set time zone '${sql.raw(timezone!.TimeZone)}'`); await db.execute(sql`drop table if exists ${table}`); }); skipTests([ 'migrator : default migration strategy', 'migrator : migrate with custom schema', 'migrator : migrate with custom table', 'migrator : migrate with custom table and custom schema', 'insert via db.execute + select via db.execute', 'insert via db.execute + returning', 'insert via db.execute w/ query builder', 'all date and time columns without timezone first case mode string', 'all date and time columns without timezone third case mode date', 'test mode string for timestamp with timezone', 'test mode date for timestamp with timezone', 'test mode string for timestamp with timezone in UTC timezone', 'test mode string for timestamp with timezone in different timezone', ]); tests(); cacheTests(); beforeEach(async () => { await db.execute(sql`drop schema if exists public cascade`); await db.execute(sql`create schema public`); await db.execute( sql` create table users ( id serial primary key, name text not null, verified boolean not null default false, jsonb jsonb, created_at timestamptz not null default now() ) `, ); }); test('insert via db.execute + select via db.execute', async () => { await db.execute(sql`insert into ${usersTable} (${new Name(usersTable.name.name)}) values (${'John'})`); const result = await db.execute<{ id: number; name: string }>(sql`select id, name from "users"`); expect(Array.prototype.slice.call(result)).toEqual([{ id: 1, name: 'John' }]); }); test('insert via db.execute + returning', async () => { const result = await db.execute<{ id: number; name: string }>( sql`insert into ${usersTable} (${new Name( usersTable.name.name, )}) values (${'John'}) returning ${usersTable.id}, ${usersTable.name}`, ); expect(Array.prototype.slice.call(result)).toEqual([{ id: 1, name: 'John' }]); }); test('insert via db.execute w/ query builder', async () => { const result = await db.execute>( db.insert(usersTable).values({ name: 'John' }).returning({ id: usersTable.id, name: usersTable.name }), ); expect(Array.prototype.slice.call(result)).toEqual([{ id: 1, name: 'John' }]); }); ================================================ FILE: integration-tests/tests/pg/rls/rls.definition.test.ts ================================================ import { crudPolicy } from 'drizzle-orm/neon'; import { getTableConfig, integer, pgPolicy, pgRole, pgTable } from 'drizzle-orm/pg-core'; import { test } from 'vitest'; test.skip('getTableConfig: policies', async () => { const schema = pgTable('hhh', { id: integer(), }, () => [ pgPolicy('name'), crudPolicy({ role: pgRole('users'), read: true, modify: true }), ]); const tc = getTableConfig(schema); console.log(tc.policies); }); ================================================ FILE: integration-tests/tests/pg/vercel-pg.test.ts ================================================ import { createClient, type VercelClient } from '@vercel/postgres'; import { sql } from 'drizzle-orm'; import { pgTable, serial, timestamp } from 'drizzle-orm/pg-core'; import { drizzle, type VercelPgDatabase } from 'drizzle-orm/vercel-postgres'; import { migrate } from 'drizzle-orm/vercel-postgres/migrator'; import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; import { skipTests } from '~/common'; import { randomString } from '~/utils'; import { createDockerDB, tests, tests as cacheTests, usersMigratorTable, usersTable } from './pg-common'; import { TestCache, TestGlobalCache } from './pg-common-cache'; const ENABLE_LOGGING = false; let db: VercelPgDatabase; let dbGlobalCached: VercelPgDatabase; let cachedDb: VercelPgDatabase; let client: VercelClient; beforeAll(async () => { let connectionString; if (process.env['PG_CONNECTION_STRING']) { connectionString = process.env['PG_CONNECTION_STRING']; } else { const { connectionString: conStr } = await createDockerDB(); connectionString = conStr; } const sleep = 250; let timeLeft = 5000; let connected = false; let lastError: unknown | undefined; do { try { client = createClient({ connectionString }); await client.connect(); connected = true; break; } catch (e) { lastError = e; await new Promise((resolve) => setTimeout(resolve, sleep)); timeLeft -= sleep; } } while (timeLeft > 0); if (!connected) { console.log(connectionString); console.error('Cannot connect to Postgres'); await client?.end().catch(console.error); // await pgContainer?.stop().catch(console.error); throw lastError; } db = drizzle(client, { logger: ENABLE_LOGGING }); cachedDb = drizzle(client, { logger: ENABLE_LOGGING, cache: new TestCache() }); dbGlobalCached = drizzle(client, { logger: ENABLE_LOGGING, cache: new TestGlobalCache() }); }); afterAll(async () => { await client?.end(); }); beforeEach((ctx) => { ctx.pg = { db, }; ctx.cachedPg = { db: cachedDb, dbGlobalCached, }; }); test('migrator : default migration strategy', async () => { await db.execute(sql`drop table if exists all_columns`); await db.execute(sql`drop table if exists users12`); await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); await migrate(db, { migrationsFolder: './drizzle2/pg' }); await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); const result = await db.select().from(usersMigratorTable); expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); await db.execute(sql`drop table all_columns`); await db.execute(sql`drop table users12`); await db.execute(sql`drop table "drizzle"."__drizzle_migrations"`); }); test('migrator : migrate with custom schema', async () => { await db.execute(sql`drop table if exists all_columns`); await db.execute(sql`drop table if exists users12`); await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); await migrate(db, { migrationsFolder: './drizzle2/pg', migrationsSchema: 'custom_migrations' }); // test if the custom migrations table was created const { rowCount } = await db.execute(sql`select * from custom_migrations."__drizzle_migrations";`); expect(rowCount && rowCount > 0).toBeTruthy(); // test if the migrated table are working as expected await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); const result = await db.select().from(usersMigratorTable); expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); await db.execute(sql`drop table all_columns`); await db.execute(sql`drop table users12`); await db.execute(sql`drop table custom_migrations."__drizzle_migrations"`); }); test('migrator : migrate with custom table', async () => { const customTable = randomString(); await db.execute(sql`drop table if exists all_columns`); await db.execute(sql`drop table if exists users12`); await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); await migrate(db, { migrationsFolder: './drizzle2/pg', migrationsTable: customTable }); // test if the custom migrations table was created const { rowCount } = await db.execute(sql`select * from "drizzle".${sql.identifier(customTable)};`); expect(rowCount && rowCount > 0).toBeTruthy(); // test if the migrated table are working as expected await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); const result = await db.select().from(usersMigratorTable); expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); await db.execute(sql`drop table all_columns`); await db.execute(sql`drop table users12`); await db.execute(sql`drop table "drizzle".${sql.identifier(customTable)}`); }); test('migrator : migrate with custom table and custom schema', async () => { const customTable = randomString(); await db.execute(sql`drop table if exists all_columns`); await db.execute(sql`drop table if exists users12`); await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); await migrate(db, { migrationsFolder: './drizzle2/pg', migrationsTable: customTable, migrationsSchema: 'custom_migrations', }); // test if the custom migrations table was created const { rowCount } = await db.execute( sql`select * from custom_migrations.${sql.identifier(customTable)};`, ); expect(rowCount && rowCount > 0).toBeTruthy(); // test if the migrated table are working as expected await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); const result = await db.select().from(usersMigratorTable); expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); await db.execute(sql`drop table all_columns`); await db.execute(sql`drop table users12`); await db.execute(sql`drop table custom_migrations.${sql.identifier(customTable)}`); }); test('all date and time columns without timezone first case mode string', async () => { const table = pgTable('all_columns', { id: serial('id').primaryKey(), timestamp: timestamp('timestamp_string', { mode: 'string', precision: 6 }).notNull(), }); await db.execute(sql`drop table if exists ${table}`); await db.execute(sql` create table ${table} ( id serial primary key, timestamp_string timestamp(6) not null ) `); // 1. Insert date in string format without timezone in it await db.insert(table).values([ { timestamp: '2022-01-01 02:00:00.123456' }, ]); // 2, Select in string format and check that values are the same const result = await db.select().from(table); expect(result).toEqual([{ id: 1, timestamp: '2022-01-01 02:00:00.123456' }]); // 3. Select as raw query and check that values are the same const result2 = await db.execute<{ id: number; timestamp_string: string; }>(sql`select * from ${table}`); expect(result2.rows).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456' }]); await db.execute(sql`drop table if exists ${table}`); }); test('all date and time columns without timezone second case mode string', async () => { const table = pgTable('all_columns', { id: serial('id').primaryKey(), timestamp: timestamp('timestamp_string', { mode: 'string', precision: 6 }).notNull(), }); await db.execute(sql`drop table if exists ${table}`); await db.execute(sql` create table ${table} ( id serial primary key, timestamp_string timestamp(6) not null ) `); // 1. Insert date in string format with timezone in it await db.insert(table).values([ { timestamp: '2022-01-01T02:00:00.123456-02' }, ]); // 2, Select as raw query and check that values are the same const result = await db.execute<{ id: number; timestamp_string: string; }>(sql`select * from ${table}`); expect(result.rows).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456' }]); await db.execute(sql`drop table if exists ${table}`); }); test('all date and time columns without timezone third case mode date', async () => { const table = pgTable('all_columns', { id: serial('id').primaryKey(), timestamp: timestamp('timestamp_string', { mode: 'date', precision: 3 }).notNull(), }); await db.execute(sql`drop table if exists ${table}`); await db.execute(sql` create table ${table} ( id serial primary key, timestamp_string timestamp(3) not null ) `); const insertedDate = new Date('2022-01-01 20:00:00.123+04'); // 1. Insert date as new date await db.insert(table).values([ { timestamp: insertedDate }, ]); // 2, Select as raw query as string const result = await db.execute<{ id: number; timestamp_string: string; }>(sql`select * from ${table}`); // 3. Compare both dates using orm mapping - Need to add 'Z' to tell JS that it is UTC expect(new Date(result.rows[0]!.timestamp_string + 'Z').getTime()).toBe(insertedDate.getTime()); await db.execute(sql`drop table if exists ${table}`); }); test('test mode string for timestamp with timezone', async () => { const table = pgTable('all_columns', { id: serial('id').primaryKey(), timestamp: timestamp('timestamp_string', { mode: 'string', withTimezone: true, precision: 6 }).notNull(), }); await db.execute(sql`drop table if exists ${table}`); await db.execute(sql` create table ${table} ( id serial primary key, timestamp_string timestamp(6) with time zone not null ) `); const timestampString = '2022-01-01 00:00:00.123456-0200'; // 1. Insert date in string format with timezone in it await db.insert(table).values([ { timestamp: timestampString }, ]); // 2. Select date in string format and check that the values are the same const result = await db.select().from(table); // 2.1 Notice that postgres will return the date in UTC, but it is exactly the same expect(result).toEqual([{ id: 1, timestamp: '2022-01-01 02:00:00.123456+00' }]); // 3. Select as raw query and checke that values are the same const result2 = await db.execute<{ id: number; timestamp_string: string; }>(sql`select * from ${table}`); // 3.1 Notice that postgres will return the date in UTC, but it is exactlt the same expect(result2.rows).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456+00' }]); await db.execute(sql`drop table if exists ${table}`); }); test('test mode date for timestamp with timezone', async () => { const table = pgTable('all_columns', { id: serial('id').primaryKey(), timestamp: timestamp('timestamp_string', { mode: 'date', withTimezone: true, precision: 3 }).notNull(), }); await db.execute(sql`drop table if exists ${table}`); await db.execute(sql` create table ${table} ( id serial primary key, timestamp_string timestamp(3) with time zone not null ) `); const timestampString = new Date('2022-01-01 00:00:00.456-0200'); // 1. Insert date in string format with timezone in it await db.insert(table).values([ { timestamp: timestampString }, ]); // 2. Select date in string format and check that the values are the same const result = await db.select().from(table); // 2.1 Notice that postgres will return the date in UTC, but it is exactly the same expect(result).toEqual([{ id: 1, timestamp: timestampString }]); // 3. Select as raw query and checke that values are the same const result2 = await db.execute<{ id: number; timestamp_string: string; }>(sql`select * from ${table}`); // 3.1 Notice that postgres will return the date in UTC, but it is exactlt the same expect(result2.rows).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.456+00' }]); await db.execute(sql`drop table if exists ${table}`); }); test('test mode string for timestamp with timezone in UTC timezone', async () => { // get current timezone from db const timezone = await db.execute<{ TimeZone: string }>(sql`show timezone`); // set timezone to UTC await db.execute(sql`set time zone 'UTC'`); const table = pgTable('all_columns', { id: serial('id').primaryKey(), timestamp: timestamp('timestamp_string', { mode: 'string', withTimezone: true, precision: 6 }).notNull(), }); await db.execute(sql`drop table if exists ${table}`); await db.execute(sql` create table ${table} ( id serial primary key, timestamp_string timestamp(6) with time zone not null ) `); const timestampString = '2022-01-01 00:00:00.123456-0200'; // 1. Insert date in string format with timezone in it await db.insert(table).values([ { timestamp: timestampString }, ]); // 2. Select date in string format and check that the values are the same const result = await db.select().from(table); // 2.1 Notice that postgres will return the date in UTC, but it is exactly the same expect(result).toEqual([{ id: 1, timestamp: '2022-01-01 02:00:00.123456+00' }]); // 3. Select as raw query and checke that values are the same const result2 = await db.execute<{ id: number; timestamp_string: string; }>(sql`select * from ${table}`); // 3.1 Notice that postgres will return the date in UTC, but it is exactlt the same expect(result2.rows).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456+00' }]); await db.execute(sql`set time zone '${sql.raw(timezone.rows[0]!.TimeZone)}'`); await db.execute(sql`drop table if exists ${table}`); }); test('test mode string for timestamp with timezone in different timezone', async () => { // get current timezone from db const timezone = await db.execute<{ TimeZone: string }>(sql`show timezone`); // set timezone to HST (UTC - 10) await db.execute(sql`set time zone 'HST'`); const table = pgTable('all_columns', { id: serial('id').primaryKey(), timestamp: timestamp('timestamp_string', { mode: 'string', withTimezone: true, precision: 6 }).notNull(), }); await db.execute(sql`drop table if exists ${table}`); await db.execute(sql` create table ${table} ( id serial primary key, timestamp_string timestamp(6) with time zone not null ) `); const timestampString = '2022-01-01 00:00:00.123456-1000'; // 1. Insert date in string format with timezone in it await db.insert(table).values([ { timestamp: timestampString }, ]); // 2. Select date in string format and check that the values are the same const result = await db.select().from(table); expect(result).toEqual([{ id: 1, timestamp: '2022-01-01 00:00:00.123456-10' }]); // 3. Select as raw query and checke that values are the same const result2 = await db.execute<{ id: number; timestamp_string: string; }>(sql`select * from ${table}`); expect(result2.rows).toEqual([{ id: 1, timestamp_string: '2022-01-01 00:00:00.123456-10' }]); await db.execute(sql`set time zone '${sql.raw(timezone.rows[0]!.TimeZone)}'`); await db.execute(sql`drop table if exists ${table}`); }); skipTests([ 'migrator : default migration strategy', 'migrator : migrate with custom schema', 'migrator : migrate with custom table', 'migrator : migrate with custom table and custom schema', 'insert via db.execute + select via db.execute', 'insert via db.execute + returning', 'insert via db.execute w/ query builder', 'all date and time columns without timezone first case mode string', 'all date and time columns without timezone third case mode date', 'test mode string for timestamp with timezone', 'test mode date for timestamp with timezone', 'test mode string for timestamp with timezone in UTC timezone', 'test mode string for timestamp with timezone in different timezone', 'build query insert with onConflict do nothing + target', // 'select from tables with same name from different schema using alias', // ]); tests(); cacheTests(); beforeEach(async () => { await db.execute(sql`drop schema if exists public cascade`); await db.execute(sql`create schema public`); await db.execute( sql` create table users ( id serial primary key, name text not null, verified boolean not null default false, jsonb jsonb, created_at timestamptz not null default now() ) `, ); }); test('insert via db.execute + select via db.execute', async () => { await db.execute( sql`insert into ${usersTable} (${sql.identifier(usersTable.name.name)}) values (${'John'})`, ); const result = await db.execute<{ id: number; name: string }>( sql`select id, name from "users"`, ); expect(result.rows).toEqual([{ id: 1, name: 'John' }]); }); test('insert via db.execute + returning', async () => { const inserted = await db.execute<{ id: number; name: string }>( sql`insert into ${usersTable} (${ sql.identifier( usersTable.name.name, ) }) values (${'John'}) returning ${usersTable.id}, ${usersTable.name}`, ); expect(inserted.rows).toEqual([{ id: 1, name: 'John' }]); }); test('insert via db.execute w/ query builder', async () => { const inserted = await db.execute>( db .insert(usersTable) .values({ name: 'John' }) .returning({ id: usersTable.id, name: usersTable.name }), ); expect(inserted.rows).toEqual([{ id: 1, name: 'John' }]); }); ================================================ FILE: integration-tests/tests/pg/xata-http.test.ts ================================================ import retry from 'async-retry'; import { sql } from 'drizzle-orm'; import { pgTable, serial, timestamp } from 'drizzle-orm/pg-core'; import { drizzle } from 'drizzle-orm/xata-http'; import type { XataHttpClient, XataHttpDatabase } from 'drizzle-orm/xata-http'; import { migrate } from 'drizzle-orm/xata-http/migrator'; import { beforeAll, beforeEach, expect, test } from 'vitest'; import { skipTests } from '~/common'; import { randomString } from '~/utils'; import { getXataClient } from '../xata/xata.ts'; import { tests, tests as cacheTests, usersMigratorTable, usersTable } from './pg-common'; import { TestCache, TestGlobalCache } from './pg-common-cache.ts'; const ENABLE_LOGGING = false; let db: XataHttpDatabase; let dbGlobalCached: XataHttpDatabase; let cachedDb: XataHttpDatabase; let client: XataHttpClient; beforeAll(async () => { const apiKey = process.env['XATA_API_KEY']; if (!apiKey) { throw new Error('XATA_API_KEY is not defined'); } client = await retry(async () => { client = getXataClient(); return client; }, { retries: 20, factor: 1, minTimeout: 250, maxTimeout: 250, randomize: false, }); db = drizzle(client, { logger: ENABLE_LOGGING }); cachedDb = drizzle(client, { logger: ENABLE_LOGGING, cache: new TestCache() }); dbGlobalCached = drizzle(client, { logger: ENABLE_LOGGING, cache: new TestGlobalCache() }); }); beforeEach((ctx) => { ctx.pg = { db, }; ctx.cachedPg = { db: cachedDb, dbGlobalCached, }; }); test('migrator : default migration strategy', async () => { await db.execute(sql`drop table if exists all_columns`); await db.execute(sql`drop table if exists users12`); await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); await migrate(db, { migrationsFolder: './drizzle2/pg' }); await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); const result = await db.select().from(usersMigratorTable); expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); await db.execute(sql`drop table all_columns`); await db.execute(sql`drop table users12`); await db.execute(sql`drop table "drizzle"."__drizzle_migrations"`); }); test('migrator : migrate with custom table', async () => { const customTable = randomString(); await db.execute(sql`drop table if exists all_columns`); await db.execute(sql`drop table if exists users12`); await db.execute(sql`drop table if exists "drizzle"."__drizzle_migrations"`); await migrate(db, { migrationsFolder: './drizzle2/pg', migrationsTable: customTable }); // test if the custom migrations table was created const { records } = await db.execute(sql`select * from "drizzle".${sql.identifier(customTable)};`); expect(records && records.length > 0).toBeTruthy(); // test if the migrated table are working as expected await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); const result = await db.select().from(usersMigratorTable); expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); await db.execute(sql`drop table all_columns`); await db.execute(sql`drop table users12`); await db.execute(sql`drop table "drizzle".${sql.identifier(customTable)}`); }); test('all date and time columns without timezone first case mode string', async () => { const table = pgTable('all_columns', { id: serial('id').primaryKey(), timestamp: timestamp('timestamp_string', { mode: 'string', precision: 6 }).notNull(), }); await db.execute(sql`drop table if exists ${table}`); await db.execute(sql` create table ${table} ( id serial primary key, timestamp_string timestamp(6) not null ) `); // 1. Insert date in string format without timezone in it await db.insert(table).values([ { timestamp: '2022-01-01 02:00:00.123456' }, ]); // 2, Select in string format and check that values are the same const result = await db.select().from(table); expect(result).toEqual([{ id: 1, timestamp: '2022-01-01 02:00:00.123456' }]); // 3. Select as raw query and check that values are the same const result2 = await db.execute<{ id: number; timestamp_string: string; }>(sql`select * from ${table}`); expect(result2.records).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456' }]); await db.execute(sql`drop table if exists ${table}`); }); test('all date and time columns without timezone second case mode string', async () => { const table = pgTable('all_columns', { id: serial('id').primaryKey(), timestamp: timestamp('timestamp_string', { mode: 'string', precision: 6 }).notNull(), }); await db.execute(sql`drop table if exists ${table}`); await db.execute(sql` create table ${table} ( id serial primary key, timestamp_string timestamp(6) not null ) `); // 1. Insert date in string format with timezone in it await db.insert(table).values([ { timestamp: '2022-01-01T02:00:00.123456-02' }, ]); // 2, Select as raw query and check that values are the same const result = await db.execute<{ id: number; timestamp_string: string; }>(sql`select * from ${table}`); expect(result.records).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456' }]); await db.execute(sql`drop table if exists ${table}`); }); test('all date and time columns without timezone third case mode date', async () => { const table = pgTable('all_columns', { id: serial('id').primaryKey(), timestamp: timestamp('timestamp_string', { mode: 'date', precision: 3 }).notNull(), }); await db.execute(sql`drop table if exists ${table}`); await db.execute(sql` create table ${table} ( id serial primary key, timestamp_string timestamp(3) not null ) `); const insertedDate = new Date('2022-01-01 20:00:00.123+04'); // 1. Insert date as new date await db.insert(table).values([ { timestamp: insertedDate }, ]); // 2, Select as raw query as string const result = await db.execute<{ id: number; timestamp_string: string; }>(sql`select * from ${table}`); // 3. Compare both dates using orm mapping - Need to add 'Z' to tell JS that it is UTC expect(new Date(result.records[0]!.timestamp_string + 'Z').getTime()).toBe(insertedDate.getTime()); await db.execute(sql`drop table if exists ${table}`); }); test('test mode string for timestamp with timezone', async () => { const table = pgTable('all_columns', { id: serial('id').primaryKey(), timestamp: timestamp('timestamp_string', { mode: 'string', withTimezone: true, precision: 6 }).notNull(), }); await db.execute(sql`drop table if exists ${table}`); await db.execute(sql` create table ${table} ( id serial primary key, timestamp_string timestamp(6) with time zone not null ) `); const timestampString = '2022-01-01 00:00:00.123456-0200'; // 1. Insert date in string format with timezone in it await db.insert(table).values([ { timestamp: timestampString }, ]); // 2. Select date in string format and check that the values are the same const result = await db.select().from(table); // 2.1 Notice that postgres will return the date in UTC, but it is exactly the same expect(result).toEqual([{ id: 1, timestamp: '2022-01-01 02:00:00.123456+00' }]); // 3. Select as raw query and checke that values are the same const result2 = await db.execute<{ id: number; timestamp_string: string; }>(sql`select * from ${table}`); // 3.1 Notice that postgres will return the date in UTC, but it is exactlt the same expect(result2.records).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456+00' }]); await db.execute(sql`drop table if exists ${table}`); }); test('test mode date for timestamp with timezone', async () => { const table = pgTable('all_columns', { id: serial('id').primaryKey(), timestamp: timestamp('timestamp_string', { mode: 'date', withTimezone: true, precision: 3 }).notNull(), }); await db.execute(sql`drop table if exists ${table}`); await db.execute(sql` create table ${table} ( id serial primary key, timestamp_string timestamp(3) with time zone not null ) `); const timestampString = new Date('2022-01-01 00:00:00.456-0200'); // 1. Insert date in string format with timezone in it await db.insert(table).values([ { timestamp: timestampString }, ]); // 2. Select date in string format and check that the values are the same const result = await db.select().from(table); // 2.1 Notice that postgres will return the date in UTC, but it is exactly the same expect(result).toEqual([{ id: 1, timestamp: timestampString }]); // 3. Select as raw query and checke that values are the same const result2 = await db.execute<{ id: number; timestamp_string: string; }>(sql`select * from ${table}`); // 3.1 Notice that postgres will return the date in UTC, but it is exactlt the same expect(result2.records).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.456+00' }]); await db.execute(sql`drop table if exists ${table}`); }); test('test mode string for timestamp with timezone in UTC timezone', async () => { // get current timezone from db const timezone = await db.execute<{ TimeZone: string }>(sql`show timezone`); // set timezone to UTC await db.execute(sql`set time zone 'UTC'`); const table = pgTable('all_columns', { id: serial('id').primaryKey(), timestamp: timestamp('timestamp_string', { mode: 'string', withTimezone: true, precision: 6 }).notNull(), }); await db.execute(sql`drop table if exists ${table}`); await db.execute(sql` create table ${table} ( id serial primary key, timestamp_string timestamp(6) with time zone not null ) `); const timestampString = '2022-01-01 00:00:00.123456-0200'; // 1. Insert date in string format with timezone in it await db.insert(table).values([ { timestamp: timestampString }, ]); // 2. Select date in string format and check that the values are the same const result = await db.select().from(table); // 2.1 Notice that postgres will return the date in UTC, but it is exactly the same expect(result).toEqual([{ id: 1, timestamp: '2022-01-01 02:00:00.123456+00' }]); // 3. Select as raw query and checke that values are the same const result2 = await db.execute<{ id: number; timestamp_string: string; }>(sql`select * from ${table}`); // 3.1 Notice that postgres will return the date in UTC, but it is exactlt the same expect(result2.records).toEqual([{ id: 1, timestamp_string: '2022-01-01 02:00:00.123456+00' }]); await db.execute(sql`set time zone '${sql.raw(timezone.records[0]!.TimeZone)}'`); await db.execute(sql`drop table if exists ${table}`); }); test('test mode string for timestamp with timezone in different timezone', async () => { // get current timezone from db const timezone = await db.execute<{ TimeZone: string }>(sql`show timezone`); // set timezone to HST (UTC - 10) await db.execute(sql`set time zone 'HST'`); const table = pgTable('all_columns', { id: serial('id').primaryKey(), timestamp: timestamp('timestamp_string', { mode: 'string', withTimezone: true, precision: 6 }).notNull(), }); await db.execute(sql`drop table if exists ${table}`); await db.execute(sql` create table ${table} ( id serial primary key, timestamp_string timestamp(6) with time zone not null ) `); const timestampString = '2022-01-01 00:00:00.123456-1000'; // 1. Insert date in string format with timezone in it await db.insert(table).values([ { timestamp: timestampString }, ]); // 2. Select date in string format and check that the values are the same const result = await db.select().from(table); expect(result).toEqual([{ id: 1, timestamp: '2022-01-01 00:00:00.123456-10' }]); // 3. Select as raw query and checke that values are the same const result2 = await db.execute<{ id: number; timestamp_string: string; }>(sql`select * from ${table}`); expect(result2.records).toEqual([{ id: 1, timestamp_string: '2022-01-01 00:00:00.123456-10' }]); await db.execute(sql`set time zone '${sql.raw(timezone.records[0]!.TimeZone)}'`); await db.execute(sql`drop table if exists ${table}`); }); skipTests([ 'migrator : default migration strategy', 'migrator : migrate with custom schema', 'migrator : migrate with custom table', 'migrator : migrate with custom table and custom schema', 'insert via db.execute + select via db.execute', 'insert via db.execute + returning', 'insert via db.execute w/ query builder', 'all date and time columns without timezone first case mode string', 'all date and time columns without timezone third case mode date', 'test mode string for timestamp with timezone', 'test mode date for timestamp with timezone', 'test mode string for timestamp with timezone in UTC timezone', 'test mode string for timestamp with timezone in different timezone', 'view', 'materialized view', 'select from enum', 'subquery with view', ]); tests(); cacheTests(); beforeEach(async () => { await db.execute(sql`drop schema if exists public cascade`); await db.execute(sql`create schema public`); await db.execute( sql` create table users ( id serial primary key, name text not null, verified boolean not null default false, jsonb jsonb, created_at timestamptz not null default now() ) `, ); }); test('insert via db.execute + select via db.execute', async () => { await db.execute( sql`insert into ${usersTable} (${sql.identifier(usersTable.name.name)}) values (${'John'})`, ); const result = await db.execute<{ id: number; name: string }>( sql`select id, name from "users"`, ); expect(result.records).toEqual([{ id: 1, name: 'John' }]); }); test('insert via db.execute + returning', async () => { const inserted = await db.execute<{ id: number; name: string }>( sql`insert into ${usersTable} (${ sql.identifier( usersTable.name.name, ) }) values (${'John'}) returning ${usersTable.id}, ${usersTable.name}`, ); expect(inserted.records).toEqual([{ id: 1, name: 'John' }]); }); test('insert via db.execute w/ query builder', async () => { const inserted = await db.execute>( db .insert(usersTable) .values({ name: 'John' }) .returning({ id: usersTable.id, name: usersTable.name }), ); expect(inserted.records).toEqual([{ id: 1, name: 'John' }]); }); ================================================ FILE: integration-tests/tests/relational/bettersqlite.test.ts ================================================ import 'dotenv/config'; import Database from 'better-sqlite3'; import { desc, DrizzleError, eq, gt, gte, or, placeholder, sql, TransactionRollbackError } from 'drizzle-orm'; import { type BetterSQLite3Database, drizzle } from 'drizzle-orm/better-sqlite3'; import { beforeAll, beforeEach, expect, expectTypeOf, test } from 'vitest'; import * as schema from './sqlite.schema.ts'; const { usersTable, postsTable, commentsTable, usersToGroupsTable, groupsTable } = schema; const ENABLE_LOGGING = false; /* Test cases: - querying nested relation without PK with additional fields */ let db: BetterSQLite3Database; beforeAll(() => { const dbPath = process.env['SQLITE_DB_PATH'] ?? ':memory:'; db = drizzle(new Database(dbPath), { schema, logger: ENABLE_LOGGING }); }); beforeEach(() => { db.run(sql`drop table if exists \`groups\``); db.run(sql`drop table if exists \`users\``); db.run(sql`drop table if exists \`users_to_groups\``); db.run(sql`drop table if exists \`posts\``); db.run(sql`drop table if exists \`comments\``); db.run(sql`drop table if exists \`comment_likes\``); db.run( sql` CREATE TABLE \`users\` ( \`id\` integer PRIMARY KEY AUTOINCREMENT NOT NULL, \`name\` text NOT NULL, \`verified\` integer DEFAULT 0 NOT NULL, \`invited_by\` integer ); `, ); db.run( sql` CREATE TABLE \`groups\` ( \`id\` integer PRIMARY KEY AUTOINCREMENT NOT NULL, \`name\` text NOT NULL, \`description\` text ); `, ); db.run( sql` CREATE TABLE \`users_to_groups\` ( \`id\` integer PRIMARY KEY AUTOINCREMENT NOT NULL, \`user_id\` integer NOT NULL, \`group_id\` integer NOT NULL ); `, ); db.run( sql` CREATE TABLE \`posts\` ( \`id\` integer PRIMARY KEY AUTOINCREMENT NOT NULL, \`content\` text NOT NULL, \`owner_id\` integer, \`created_at\` integer DEFAULT current_timestamp NOT NULL ); `, ); db.run( sql` CREATE TABLE \`comments\` ( \`id\` integer PRIMARY KEY AUTOINCREMENT NOT NULL, \`content\` text NOT NULL, \`creator\` integer, \`post_id\` integer, \`created_at\` integer DEFAULT current_timestamp NOT NULL ); `, ); db.run( sql` CREATE TABLE \`comment_likes\` ( \`id\` integer PRIMARY KEY AUTOINCREMENT NOT NULL, \`creator\` integer, \`comment_id\` integer, \`created_at\` integer DEFAULT current_timestamp NOT NULL ); `, ); }); /* [Find Many] One relation users+posts */ test('[Find Many] Get users with posts', () => { db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]).run(); const usersWithPosts = db.query.usersTable.findMany({ with: { posts: true, }, }).sync(); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; verified: number; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[]>(); usersWithPosts.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(usersWithPosts.length).eq(3); expect(usersWithPosts[0]?.posts.length).eq(1); expect(usersWithPosts[1]?.posts.length).eq(1); expect(usersWithPosts[2]?.posts.length).eq(1); expect(usersWithPosts[0]).toEqual({ id: 1, name: 'Dan', verified: 0, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], }); expect(usersWithPosts[1]).toEqual({ id: 2, name: 'Andrew', verified: 0, invitedBy: null, posts: [{ id: 2, ownerId: 2, content: 'Post2', createdAt: usersWithPosts[1]?.posts[0]?.createdAt }], }); expect(usersWithPosts[2]).toEqual({ id: 3, name: 'Alex', verified: 0, invitedBy: null, posts: [{ id: 3, ownerId: 3, content: 'Post3', createdAt: usersWithPosts[2]?.posts[0]?.createdAt }], }); }); test('[Find Many] Get users with posts + limit posts', () => { db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]).run(); const usersWithPosts = db.query.usersTable.findMany({ with: { posts: { limit: 1, }, }, }).sync(); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; verified: number; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[]>(); usersWithPosts.sort((a, b) => (a.id > b.id) ? 1 : -1); usersWithPosts[0]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); usersWithPosts[1]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); usersWithPosts[2]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(usersWithPosts.length).eq(3); expect(usersWithPosts[0]?.posts.length).eq(1); expect(usersWithPosts[1]?.posts.length).eq(1); expect(usersWithPosts[2]?.posts.length).eq(1); expect(usersWithPosts[0]).toEqual({ id: 1, name: 'Dan', verified: 0, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], }); expect(usersWithPosts[1]).toEqual({ id: 2, name: 'Andrew', verified: 0, invitedBy: null, posts: [{ id: 4, ownerId: 2, content: 'Post2', createdAt: usersWithPosts[1]?.posts[0]?.createdAt }], }); expect(usersWithPosts[2]).toEqual({ id: 3, name: 'Alex', verified: 0, invitedBy: null, posts: [{ id: 6, ownerId: 3, content: 'Post3', createdAt: usersWithPosts[2]?.posts[0]?.createdAt }], }); }); test('[Find Many] Get users with posts + limit posts and users', () => { db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]).run(); const usersWithPosts = db.query.usersTable.findMany({ limit: 2, with: { posts: { limit: 1, }, }, }).sync(); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; verified: number; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[]>(); usersWithPosts.sort((a, b) => (a.id > b.id) ? 1 : -1); usersWithPosts[0]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); usersWithPosts[1]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(usersWithPosts.length).eq(2); expect(usersWithPosts[0]?.posts.length).eq(1); expect(usersWithPosts[1]?.posts.length).eq(1); expect(usersWithPosts[0]).toEqual({ id: 1, name: 'Dan', verified: 0, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], }); expect(usersWithPosts[1]).toEqual({ id: 2, name: 'Andrew', verified: 0, invitedBy: null, posts: [{ id: 4, ownerId: 2, content: 'Post2', createdAt: usersWithPosts[1]?.posts[0]?.createdAt }], }); }); test('[Find Many] Get users with posts + custom fields', () => { db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]).run(); const usersWithPosts = db.query.usersTable.findMany({ with: { posts: true, }, extras: ({ name }) => ({ lowerName: sql`lower(${name})`.as('name_lower'), }), }).sync(); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; verified: number; invitedBy: number | null; lowerName: string; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[]>(); usersWithPosts.sort((a, b) => (a.id > b.id) ? 1 : -1); usersWithPosts[0]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); usersWithPosts[1]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); usersWithPosts[2]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(usersWithPosts.length).toEqual(3); expect(usersWithPosts[0]?.posts.length).toEqual(3); expect(usersWithPosts[1]?.posts.length).toEqual(2); expect(usersWithPosts[2]?.posts.length).toEqual(2); expect(usersWithPosts[0]).toEqual({ id: 1, name: 'Dan', verified: 0, invitedBy: null, lowerName: 'dan', posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }, { id: 2, ownerId: 1, content: 'Post1.2', createdAt: usersWithPosts[0]?.posts[1]?.createdAt, }, { id: 3, ownerId: 1, content: 'Post1.3', createdAt: usersWithPosts[0]?.posts[2]?.createdAt }], }); expect(usersWithPosts[1]).toEqual({ id: 2, name: 'Andrew', lowerName: 'andrew', verified: 0, invitedBy: null, posts: [{ id: 4, ownerId: 2, content: 'Post2', createdAt: usersWithPosts[1]?.posts[0]?.createdAt }, { id: 5, ownerId: 2, content: 'Post2.1', createdAt: usersWithPosts[1]?.posts[1]?.createdAt, }], }); expect(usersWithPosts[2]).toEqual({ id: 3, name: 'Alex', lowerName: 'alex', verified: 0, invitedBy: null, posts: [{ id: 6, ownerId: 3, content: 'Post3', createdAt: usersWithPosts[2]?.posts[0]?.createdAt }, { id: 7, ownerId: 3, content: 'Post3.1', createdAt: usersWithPosts[2]?.posts[1]?.createdAt, }], }); }); test('[Find Many] Get users with posts + custom fields + limits', () => { db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]).run(); const usersWithPosts = db.query.usersTable.findMany({ limit: 1, with: { posts: { limit: 1, }, }, extras: (usersTable, { sql }) => ({ lowerName: sql`lower(${usersTable.name})`.as('name_lower'), }), }).sync(); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; verified: number; invitedBy: number | null; lowerName: string; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[]>(); expect(usersWithPosts.length).toEqual(1); expect(usersWithPosts[0]?.posts.length).toEqual(1); expect(usersWithPosts[0]).toEqual({ id: 1, name: 'Dan', lowerName: 'dan', verified: 0, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], }); }); test('[Find Many] Get users with posts + orderBy', () => { db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); db.insert(postsTable).values([ { ownerId: 1, content: '1' }, { ownerId: 1, content: '2' }, { ownerId: 1, content: '3' }, { ownerId: 2, content: '4' }, { ownerId: 2, content: '5' }, { ownerId: 3, content: '6' }, { ownerId: 3, content: '7' }, ]).run(); const usersWithPosts = db.query.usersTable.findMany({ with: { posts: { orderBy: (postsTable, { desc }) => [desc(postsTable.content)], }, }, orderBy: (usersTable, { desc }) => [desc(usersTable.id)], }).sync(); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; verified: number; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[]>(); expect(usersWithPosts[0]).toEqual({ id: 3, name: 'Alex', verified: 0, invitedBy: null, posts: expect.anything(), }); expect(usersWithPosts[0]!.posts).toEqual([ { id: 7, ownerId: 3, content: '7', createdAt: expect.any(Date) }, { id: 6, ownerId: 3, content: '6', createdAt: expect.any(Date) }, ]); expect(usersWithPosts[1]).toEqual({ id: 2, name: 'Andrew', verified: 0, invitedBy: null, posts: expect.anything(), }); expect(usersWithPosts[1]!.posts).toEqual([ { id: 5, ownerId: 2, content: '5', createdAt: expect.any(Date) }, { id: 4, ownerId: 2, content: '4', createdAt: expect.any(Date) }, ]); expect(usersWithPosts[2]).toEqual({ id: 1, name: 'Dan', verified: 0, invitedBy: null, posts: expect.anything(), }); expect(usersWithPosts[2]!.posts).toEqual([ { id: 3, ownerId: 1, content: '3', createdAt: expect.any(Date) }, { id: 2, ownerId: 1, content: '2', createdAt: expect.any(Date) }, { id: 1, ownerId: 1, content: '1', createdAt: expect.any(Date) }, ]); }); test('[Find Many] Get users with posts + where', () => { db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]).run(); const usersWithPosts = db.query.usersTable.findMany({ where: (({ id }, { eq }) => eq(id, 1)), with: { posts: { where: (({ id }, { eq }) => eq(id, 1)), }, }, }).sync(); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; verified: number; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[]>(); expect(usersWithPosts.length).eq(1); expect(usersWithPosts[0]?.posts.length).eq(1); expect(usersWithPosts[0]).toEqual({ id: 1, name: 'Dan', verified: 0, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], }); }); test('[Find Many] Get users with posts + where + partial', () => { db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]).run(); const usersWithPosts = db.query.usersTable.findMany({ columns: { id: true, name: true, }, with: { posts: { columns: { id: true, content: true, }, where: (({ id }, { eq }) => eq(id, 1)), }, }, where: (({ id }, { eq }) => eq(id, 1)), }).sync(); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; posts: { id: number; content: string; }[]; }[]>(); expect(usersWithPosts.length).eq(1); expect(usersWithPosts[0]?.posts.length).eq(1); expect(usersWithPosts[0]).toEqual({ id: 1, name: 'Dan', posts: [{ id: 1, content: 'Post1' }], }); }); test('[Find Many] Get users with posts + where + partial. Did not select posts id, but used it in where', () => { db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]).run(); const usersWithPosts = db.query.usersTable.findMany({ columns: { id: true, name: true, }, with: { posts: { columns: { id: true, content: true, }, where: (({ id }, { eq }) => eq(id, 1)), }, }, where: (({ id }, { eq }) => eq(id, 1)), }).sync(); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; posts: { id: number; content: string; }[]; }[]>(); expect(usersWithPosts.length).eq(1); expect(usersWithPosts[0]?.posts.length).eq(1); expect(usersWithPosts[0]).toEqual({ id: 1, name: 'Dan', posts: [{ id: 1, content: 'Post1' }], }); }); test('[Find Many] Get users with posts + where + partial(true + false)', () => { db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]).run(); const usersWithPosts = db.query.usersTable.findMany({ columns: { id: true, name: false, }, with: { posts: { columns: { id: true, content: false, }, where: (({ id }, { eq }) => eq(id, 1)), }, }, where: (({ id }, { eq }) => eq(id, 1)), }).sync(); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; posts: { id: number; }[]; }[]>(); expect(usersWithPosts.length).eq(1); expect(usersWithPosts[0]?.posts.length).eq(1); expect(usersWithPosts[0]).toEqual({ id: 1, posts: [{ id: 1 }], }); }); test('[Find Many] Get users with posts + where + partial(false)', () => { db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]).run(); const usersWithPosts = db.query.usersTable.findMany({ columns: { name: false, }, with: { posts: { columns: { content: false, }, where: (({ id }, { eq }) => eq(id, 1)), }, }, where: (({ id }, { eq }) => eq(id, 1)), }).sync(); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; verified: number; invitedBy: number | null; posts: { id: number; ownerId: number | null; createdAt: Date; }[]; }[]>(); expect(usersWithPosts.length).eq(1); expect(usersWithPosts[0]?.posts.length).eq(1); expect(usersWithPosts[0]).toEqual({ id: 1, verified: 0, invitedBy: null, posts: [{ id: 1, ownerId: 1, createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], }); }); test('[Find Many] Get users with posts in transaction', () => { let usersWithPosts: { id: number; name: string; verified: number; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[] = []; db.transaction((tx) => { tx.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); tx.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]).run(); usersWithPosts = tx.query.usersTable.findMany({ where: (({ id }, { eq }) => eq(id, 1)), with: { posts: { where: (({ id }, { eq }) => eq(id, 1)), }, }, }).sync(); }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; verified: number; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[]>(); expect(usersWithPosts.length).eq(1); expect(usersWithPosts[0]?.posts.length).eq(1); expect(usersWithPosts[0]).toEqual({ id: 1, name: 'Dan', verified: 0, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], }); }); test('[Find Many] Get users with posts in rollbacked transaction', () => { let usersWithPosts: { id: number; name: string; verified: number; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[] = []; expect(() => db.transaction((tx) => { tx.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); tx.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]).run(); tx.rollback(); usersWithPosts = tx.query.usersTable.findMany({ where: (({ id }, { eq }) => eq(id, 1)), with: { posts: { where: (({ id }, { eq }) => eq(id, 1)), }, }, }).sync(); }) ).toThrow(TransactionRollbackError); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; verified: number; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[]>(); expect(usersWithPosts.length).eq(0); }); // select only custom test('[Find Many] Get only custom fields', () => { db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); db.insert(postsTable).values([ { id: 1, ownerId: 1, content: 'Post1' }, { id: 2, ownerId: 1, content: 'Post1.2' }, { id: 3, ownerId: 1, content: 'Post1.3' }, { id: 4, ownerId: 2, content: 'Post2' }, { id: 5, ownerId: 2, content: 'Post2.1' }, { id: 6, ownerId: 3, content: 'Post3' }, { id: 7, ownerId: 3, content: 'Post3.1' }, ]).run(); const usersWithPosts = db.query.usersTable.findMany({ columns: {}, with: { posts: { columns: {}, extras: ({ content }) => ({ lowerName: sql`lower(${content})`.as('content_lower'), }), }, }, extras: ({ name }) => ({ lowerName: sql`lower(${name})`.as('name_lower'), }), }).sync(); expectTypeOf(usersWithPosts).toEqualTypeOf<{ lowerName: string; posts: { lowerName: string; }[]; }[]>(); expect(usersWithPosts.length).toEqual(3); expect(usersWithPosts[0]?.posts.length).toEqual(3); expect(usersWithPosts[1]?.posts.length).toEqual(2); expect(usersWithPosts[2]?.posts.length).toEqual(2); expect(usersWithPosts[0]?.lowerName).toEqual('dan'); expect(usersWithPosts[1]?.lowerName).toEqual('andrew'); expect(usersWithPosts[2]?.lowerName).toEqual('alex'); expect(usersWithPosts[0]?.posts).toContainEqual({ lowerName: 'post1', }); expect(usersWithPosts[0]?.posts).toContainEqual({ lowerName: 'post1.2', }); expect(usersWithPosts[0]?.posts).toContainEqual({ lowerName: 'post1.3', }); expect(usersWithPosts[1]?.posts).toContainEqual({ lowerName: 'post2', }); expect(usersWithPosts[1]?.posts).toContainEqual({ lowerName: 'post2.1', }); expect(usersWithPosts[2]?.posts).toContainEqual({ lowerName: 'post3', }); expect(usersWithPosts[2]?.posts).toContainEqual({ lowerName: 'post3.1', }); }); test('[Find Many] Get only custom fields + where', () => { db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]).run(); const usersWithPosts = db.query.usersTable.findMany({ columns: {}, with: { posts: { columns: {}, where: gte(postsTable.id, 2), extras: ({ content }) => ({ lowerName: sql`lower(${content})`.as('content_lower'), }), }, }, where: eq(usersTable.id, 1), extras: ({ name }) => ({ lowerName: sql`lower(${name})`.as('name_lower'), }), }).sync(); expectTypeOf(usersWithPosts).toEqualTypeOf<{ lowerName: string; posts: { lowerName: string; }[]; }[]>(); expect(usersWithPosts.length).toEqual(1); expect(usersWithPosts[0]?.posts.length).toEqual(2); expect(usersWithPosts).toContainEqual({ lowerName: 'dan', posts: [{ lowerName: 'post1.2' }, { lowerName: 'post1.3' }], }); }); test('[Find Many] Get only custom fields + where + limit', () => { db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]).run(); const usersWithPosts = db.query.usersTable.findMany({ columns: {}, with: { posts: { columns: {}, where: gte(postsTable.id, 2), limit: 1, extras: ({ content }) => ({ lowerName: sql`lower(${content})`.as('content_lower'), }), }, }, where: eq(usersTable.id, 1), extras: ({ name }) => ({ lowerName: sql`lower(${name})`.as('name_lower'), }), }).sync(); expectTypeOf(usersWithPosts).toEqualTypeOf<{ lowerName: string; posts: { lowerName: string; }[]; }[]>(); expect(usersWithPosts.length).toEqual(1); expect(usersWithPosts[0]?.posts.length).toEqual(1); expect(usersWithPosts).toContainEqual({ lowerName: 'dan', posts: [{ lowerName: 'post1.2' }], }); }); test('[Find Many] Get only custom fields + where + orderBy', () => { db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]).run(); const usersWithPosts = db.query.usersTable.findMany({ columns: {}, with: { posts: { columns: {}, where: gte(postsTable.id, 2), orderBy: [desc(postsTable.id)], extras: ({ content }) => ({ lowerName: sql`lower(${content})`.as('content_lower'), }), }, }, where: eq(usersTable.id, 1), extras: ({ name }) => ({ lowerName: sql`lower(${name})`.as('name_lower'), }), }).sync(); expectTypeOf(usersWithPosts).toEqualTypeOf<{ lowerName: string; posts: { lowerName: string; }[]; }[]>(); expect(usersWithPosts.length).toEqual(1); expect(usersWithPosts[0]?.posts.length).toEqual(2); expect(usersWithPosts).toContainEqual({ lowerName: 'dan', posts: [{ lowerName: 'post1.3' }, { lowerName: 'post1.2' }], }); }); // select only custom find one test('[Find One] Get only custom fields', () => { db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]).run(); const usersWithPosts = db.query.usersTable.findFirst({ columns: {}, with: { posts: { columns: {}, extras: ({ content }) => ({ lowerName: sql`lower(${content})`.as('content_lower'), }), }, }, extras: ({ name }) => ({ lowerName: sql`lower(${name})`.as('name_lower'), }), }).sync(); expectTypeOf(usersWithPosts).toEqualTypeOf< { lowerName: string; posts: { lowerName: string; }[]; } | undefined >(); expect(usersWithPosts?.posts.length).toEqual(3); expect(usersWithPosts?.lowerName).toEqual('dan'); expect(usersWithPosts?.posts).toContainEqual({ lowerName: 'post1', }); expect(usersWithPosts?.posts).toContainEqual({ lowerName: 'post1.2', }); expect(usersWithPosts?.posts).toContainEqual({ lowerName: 'post1.3', }); }); test('[Find One] Get only custom fields + where', () => { db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]).run(); const usersWithPosts = db.query.usersTable.findFirst({ columns: {}, with: { posts: { columns: {}, where: gte(postsTable.id, 2), extras: ({ content }) => ({ lowerName: sql`lower(${content})`.as('content_lower'), }), }, }, where: eq(usersTable.id, 1), extras: ({ name }) => ({ lowerName: sql`lower(${name})`.as('name_lower'), }), }).sync(); expectTypeOf(usersWithPosts).toEqualTypeOf< { lowerName: string; posts: { lowerName: string; }[]; } | undefined >(); expect(usersWithPosts?.posts.length).toEqual(2); expect(usersWithPosts).toEqual({ lowerName: 'dan', posts: [{ lowerName: 'post1.2' }, { lowerName: 'post1.3' }], }); }); test('[Find One] Get only custom fields + where + limit', () => { db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]).run(); const usersWithPosts = db.query.usersTable.findFirst({ columns: {}, with: { posts: { columns: {}, where: gte(postsTable.id, 2), limit: 1, extras: ({ content }) => ({ lowerName: sql`lower(${content})`.as('content_lower'), }), }, }, where: eq(usersTable.id, 1), extras: ({ name }) => ({ lowerName: sql`lower(${name})`.as('name_lower'), }), }).sync(); expectTypeOf(usersWithPosts).toEqualTypeOf< { lowerName: string; posts: { lowerName: string; }[]; } | undefined >(); expect(usersWithPosts?.posts.length).toEqual(1); expect(usersWithPosts).toEqual({ lowerName: 'dan', posts: [{ lowerName: 'post1.2' }], }); }); test('[Find One] Get only custom fields + where + orderBy', () => { db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]).run(); const usersWithPosts = db.query.usersTable.findFirst({ columns: {}, with: { posts: { columns: {}, where: gte(postsTable.id, 2), orderBy: [desc(postsTable.id)], extras: ({ content }) => ({ lowerName: sql`lower(${content})`.as('content_lower'), }), }, }, where: eq(usersTable.id, 1), extras: ({ name }) => ({ lowerName: sql`lower(${name})`.as('name_lower'), }), }).sync(); expectTypeOf(usersWithPosts).toEqualTypeOf< { lowerName: string; posts: { lowerName: string; }[]; } | undefined >(); expect(usersWithPosts?.posts.length).toEqual(2); expect(usersWithPosts).toEqual({ lowerName: 'dan', posts: [{ lowerName: 'post1.3' }, { lowerName: 'post1.2' }], }); }); // columns {} test('[Find Many] Get select {}', () => { db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); expect(() => db.query.usersTable.findMany({ columns: {}, }).sync() ).toThrow(DrizzleError); }); // columns {} test('[Find One] Get select {}', () => { db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); expect(() => db.query.usersTable.findFirst({ columns: {}, }).sync() ).toThrow(DrizzleError); }); // deep select {} test('[Find Many] Get deep select {}', () => { db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]).run(); expect(() => db.query.usersTable.findMany({ columns: {}, with: { posts: { columns: {}, }, }, }).sync() ).toThrow(DrizzleError); }); // deep select {} test('[Find One] Get deep select {}', () => { db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]).run(); expect(() => db.query.usersTable.findFirst({ columns: {}, with: { posts: { columns: {}, }, }, }).sync() ).toThrow(DrizzleError); }); /* Prepared statements for users+posts */ test('[Find Many] Get users with posts + prepared limit', () => { db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]).run(); const prepared = db.query.usersTable.findMany({ with: { posts: { limit: placeholder('limit'), }, }, }).prepare(); const usersWithPosts = prepared.execute({ limit: 1 }).sync(); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; verified: number; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[]>(); expect(usersWithPosts.length).eq(3); expect(usersWithPosts[0]?.posts.length).eq(1); expect(usersWithPosts[1]?.posts.length).eq(1); expect(usersWithPosts[2]?.posts.length).eq(1); expect(usersWithPosts).toContainEqual({ id: 1, name: 'Dan', verified: 0, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], }); expect(usersWithPosts).toContainEqual({ id: 2, name: 'Andrew', verified: 0, invitedBy: null, posts: [{ id: 4, ownerId: 2, content: 'Post2', createdAt: usersWithPosts[1]?.posts[0]?.createdAt }], }); expect(usersWithPosts).toContainEqual({ id: 3, name: 'Alex', verified: 0, invitedBy: null, posts: [{ id: 6, ownerId: 3, content: 'Post3', createdAt: usersWithPosts[2]?.posts[0]?.createdAt }], }); }); test('[Find Many] Get users with posts + prepared limit + offset', () => { db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]).run(); const prepared = db.query.usersTable.findMany({ limit: placeholder('uLimit'), offset: placeholder('uOffset'), with: { posts: { limit: placeholder('pLimit'), }, }, }).prepare(); const usersWithPosts = prepared.execute({ pLimit: 1, uLimit: 3, uOffset: 1 }).sync(); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; verified: number; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[]>(); expect(usersWithPosts.length).eq(2); expect(usersWithPosts[0]?.posts.length).eq(1); expect(usersWithPosts[1]?.posts.length).eq(1); expect(usersWithPosts).toContainEqual({ id: 2, name: 'Andrew', verified: 0, invitedBy: null, posts: [{ id: 4, ownerId: 2, content: 'Post2', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], }); expect(usersWithPosts).toContainEqual({ id: 3, name: 'Alex', verified: 0, invitedBy: null, posts: [{ id: 6, ownerId: 3, content: 'Post3', createdAt: usersWithPosts[1]?.posts[0]?.createdAt }], }); }); test('[Find Many] Get users with posts + prepared where', () => { db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]).run(); const prepared = db.query.usersTable.findMany({ where: (({ id }, { eq }) => eq(id, placeholder('id'))), with: { posts: { where: (({ id }, { eq }) => eq(id, 1)), }, }, }).prepare(); const usersWithPosts = prepared.execute({ id: 1 }).sync(); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; verified: number; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[]>(); expect(usersWithPosts.length).eq(1); expect(usersWithPosts[0]?.posts.length).eq(1); expect(usersWithPosts[0]).toEqual({ id: 1, name: 'Dan', verified: 0, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], }); }); test('[Find Many] Get users with posts + prepared + limit + offset + where', () => { db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]).run(); const prepared = db.query.usersTable.findMany({ limit: placeholder('uLimit'), offset: placeholder('uOffset'), where: (({ id }, { eq, or }) => or(eq(id, placeholder('id')), eq(id, 3))), with: { posts: { where: (({ id }, { eq }) => eq(id, placeholder('pid'))), limit: placeholder('pLimit'), }, }, }).prepare(); const usersWithPosts = prepared.execute({ pLimit: 1, uLimit: 3, uOffset: 1, id: 2, pid: 6 }).sync(); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; verified: number; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[]>(); expect(usersWithPosts.length).eq(1); expect(usersWithPosts[0]?.posts.length).eq(1); expect(usersWithPosts).toContainEqual({ id: 3, name: 'Alex', verified: 0, invitedBy: null, posts: [{ id: 6, ownerId: 3, content: 'Post3', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], }); }); /* [Find One] One relation users+posts */ test('[Find One] Get users with posts', () => { db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]).run(); const usersWithPosts = db.query.usersTable.findFirst({ with: { posts: true, }, }).sync(); expectTypeOf(usersWithPosts).toEqualTypeOf< { id: number; name: string; verified: number; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; } | undefined >(); expect(usersWithPosts!.posts.length).eq(1); expect(usersWithPosts).toEqual({ id: 1, name: 'Dan', verified: 0, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts?.posts[0]?.createdAt }], }); }); test('[Find One] Get users with posts + limit posts', () => { db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]).run(); const usersWithPosts = db.query.usersTable.findFirst({ with: { posts: { limit: 1, }, }, }).sync(); expectTypeOf(usersWithPosts).toEqualTypeOf< { id: number; name: string; verified: number; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; } | undefined >(); expect(usersWithPosts!.posts.length).eq(1); expect(usersWithPosts).toEqual({ id: 1, name: 'Dan', verified: 0, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts?.posts[0]?.createdAt }], }); }); test('[Find One] Get users with posts no results found', () => { const usersWithPosts = db.query.usersTable.findFirst({ with: { posts: { limit: 1, }, }, }).sync(); expectTypeOf(usersWithPosts).toEqualTypeOf< { id: number; name: string; verified: number; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; } | undefined >(); expect(usersWithPosts).toBeUndefined(); }); test('[Find One] Get users with posts + limit posts and users', () => { db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]).run(); const usersWithPosts = db.query.usersTable.findFirst({ with: { posts: { limit: 1, }, }, }).sync(); expectTypeOf(usersWithPosts).toEqualTypeOf< { id: number; name: string; verified: number; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; } | undefined >(); expect(usersWithPosts!.posts.length).eq(1); expect(usersWithPosts).toEqual({ id: 1, name: 'Dan', verified: 0, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts?.posts[0]?.createdAt }], }); }); test('[Find One] Get users with posts + custom fields', () => { db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]).run(); const usersWithPosts = db.query.usersTable.findFirst({ with: { posts: true, }, extras: ({ name }) => ({ lowerName: sql`lower(${name})`.as('name_lower'), }), }).sync(); expectTypeOf(usersWithPosts).toEqualTypeOf< { id: number; name: string; verified: number; invitedBy: number | null; lowerName: string; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; } | undefined >(); expect(usersWithPosts!.posts.length).toEqual(3); expect(usersWithPosts?.lowerName).toEqual('dan'); expect(usersWithPosts?.id).toEqual(1); expect(usersWithPosts?.verified).toEqual(0); expect(usersWithPosts?.invitedBy).toEqual(null); expect(usersWithPosts?.name).toEqual('Dan'); expect(usersWithPosts?.posts).toContainEqual({ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts?.posts[0]?.createdAt, }); expect(usersWithPosts?.posts).toContainEqual({ id: 2, ownerId: 1, content: 'Post1.2', createdAt: usersWithPosts?.posts[1]?.createdAt, }); expect(usersWithPosts?.posts).toContainEqual({ id: 3, ownerId: 1, content: 'Post1.3', createdAt: usersWithPosts?.posts[2]?.createdAt, }); }); test('[Find One] Get users with posts + custom fields + limits', () => { db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]).run(); const usersWithPosts = db.query.usersTable.findFirst({ with: { posts: { limit: 1, }, }, extras: (usersTable, { sql }) => ({ lowerName: sql`lower(${usersTable.name})`.as('name_lower'), }), }).sync(); expectTypeOf(usersWithPosts).toEqualTypeOf< { id: number; name: string; verified: number; invitedBy: number | null; lowerName: string; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; } | undefined >(); expect(usersWithPosts!.posts.length).toEqual(1); expect(usersWithPosts).toEqual({ id: 1, name: 'Dan', lowerName: 'dan', verified: 0, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts?.posts[0]?.createdAt }], }); }); test.skip('[Find One] Get users with posts + orderBy', () => { db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); db.insert(postsTable).values([ { ownerId: 1, content: '1' }, { ownerId: 1, content: '2' }, { ownerId: 1, content: '3' }, { ownerId: 2, content: '4' }, { ownerId: 2, content: '5' }, { ownerId: 3, content: '6' }, { ownerId: 3, content: '7' }, ]).run(); const usersWithPosts = db.query.usersTable.findFirst({ with: { posts: { orderBy: (postsTable, { desc }) => [desc(postsTable.id)], }, }, orderBy: (usersTable, { desc }) => [desc(usersTable.id)], }).sync(); expectTypeOf(usersWithPosts).toEqualTypeOf< { id: number; name: string; verified: number; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; } | undefined >(); expect(usersWithPosts!.posts.length).eq(2); expect(usersWithPosts).toEqual({ id: 3, name: 'Alex', verified: 0, invitedBy: null, posts: [{ id: 7, ownerId: 3, content: '7', createdAt: usersWithPosts?.posts[1]?.createdAt, }, { id: 6, ownerId: 3, content: '6', createdAt: usersWithPosts?.posts[0]?.createdAt }], }); }); test('[Find One] Get users with posts + where', () => { db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]).run(); const usersWithPosts = db.query.usersTable.findFirst({ where: (({ id }, { eq }) => eq(id, 1)), with: { posts: { where: (({ id }, { eq }) => eq(id, 1)), }, }, }).sync(); expectTypeOf(usersWithPosts).toEqualTypeOf< { id: number; name: string; verified: number; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; } | undefined >(); expect(usersWithPosts!.posts.length).eq(1); expect(usersWithPosts).toEqual({ id: 1, name: 'Dan', verified: 0, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts?.posts[0]?.createdAt }], }); }); test('[Find One] Get users with posts + where + partial', () => { db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]).run(); const usersWithPosts = db.query.usersTable.findFirst({ columns: { id: true, name: true, }, with: { posts: { columns: { id: true, content: true, }, where: (({ id }, { eq }) => eq(id, 1)), }, }, where: (({ id }, { eq }) => eq(id, 1)), }).sync(); expectTypeOf(usersWithPosts).toEqualTypeOf< { id: number; name: string; posts: { id: number; content: string; }[]; } | undefined >(); expect(usersWithPosts!.posts.length).eq(1); expect(usersWithPosts).toEqual({ id: 1, name: 'Dan', posts: [{ id: 1, content: 'Post1' }], }); }); test('[Find One] Get users with posts + where + partial. Did not select posts id, but used it in where', () => { db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]).run(); const usersWithPosts = db.query.usersTable.findFirst({ columns: { id: true, name: true, }, with: { posts: { columns: { id: true, content: true, }, where: (({ id }, { eq }) => eq(id, 1)), }, }, where: (({ id }, { eq }) => eq(id, 1)), }).sync(); expectTypeOf(usersWithPosts).toEqualTypeOf< { id: number; name: string; posts: { id: number; content: string; }[]; } | undefined >(); expect(usersWithPosts!.posts.length).eq(1); expect(usersWithPosts).toEqual({ id: 1, name: 'Dan', posts: [{ id: 1, content: 'Post1' }], }); }); test('[Find One] Get users with posts + where + partial(true + false)', () => { db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]).run(); const usersWithPosts = db.query.usersTable.findFirst({ columns: { id: true, name: false, }, with: { posts: { columns: { id: true, content: false, }, where: (({ id }, { eq }) => eq(id, 1)), }, }, where: (({ id }, { eq }) => eq(id, 1)), }).sync(); expectTypeOf(usersWithPosts).toEqualTypeOf< { id: number; posts: { id: number; }[]; } | undefined >(); expect(usersWithPosts!.posts.length).eq(1); expect(usersWithPosts).toEqual({ id: 1, posts: [{ id: 1 }], }); }); test('[Find One] Get users with posts + where + partial(false)', () => { db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]).run(); const usersWithPosts = db.query.usersTable.findFirst({ columns: { name: false, }, with: { posts: { columns: { content: false, }, where: (({ id }, { eq }) => eq(id, 1)), }, }, where: (({ id }, { eq }) => eq(id, 1)), }).sync(); expectTypeOf(usersWithPosts).toEqualTypeOf< { id: number; verified: number; invitedBy: number | null; posts: { id: number; ownerId: number | null; createdAt: Date; }[]; } | undefined >(); expect(usersWithPosts!.posts.length).eq(1); expect(usersWithPosts).toEqual({ id: 1, verified: 0, invitedBy: null, posts: [{ id: 1, ownerId: 1, createdAt: usersWithPosts?.posts[0]?.createdAt }], }); }); /* One relation users+users. Self referencing */ test('Get user with invitee', () => { db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]).run(); const usersWithInvitee = db.query.usersTable.findMany({ with: { invitee: true, }, }).sync(); expectTypeOf(usersWithInvitee).toEqualTypeOf< { id: number; name: string; verified: number; invitedBy: number | null; invitee: { id: number; name: string; verified: number; invitedBy: number | null; } | null; }[] >(); usersWithInvitee.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(usersWithInvitee.length).eq(4); expect(usersWithInvitee[0]?.invitee).toBeNull(); expect(usersWithInvitee[1]?.invitee).toBeNull(); expect(usersWithInvitee[2]?.invitee).not.toBeNull(); expect(usersWithInvitee[3]?.invitee).not.toBeNull(); expect(usersWithInvitee[0]).toEqual({ id: 1, name: 'Dan', verified: 0, invitedBy: null, invitee: null, }); expect(usersWithInvitee[1]).toEqual({ id: 2, name: 'Andrew', verified: 0, invitedBy: null, invitee: null, }); expect(usersWithInvitee[2]).toEqual({ id: 3, name: 'Alex', verified: 0, invitedBy: 1, invitee: { id: 1, name: 'Dan', verified: 0, invitedBy: null }, }); expect(usersWithInvitee[3]).toEqual({ id: 4, name: 'John', verified: 0, invitedBy: 2, invitee: { id: 2, name: 'Andrew', verified: 0, invitedBy: null }, }); }); test('Get user + limit with invitee', () => { db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew', invitedBy: 1 }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]).run(); const usersWithInvitee = db.query.usersTable.findMany({ with: { invitee: true, }, limit: 2, }).sync(); expectTypeOf(usersWithInvitee).toEqualTypeOf< { id: number; name: string; verified: number; invitedBy: number | null; invitee: { id: number; name: string; verified: number; invitedBy: number | null; } | null; }[] >(); usersWithInvitee.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(usersWithInvitee.length).eq(2); expect(usersWithInvitee[0]?.invitee).toBeNull(); expect(usersWithInvitee[1]?.invitee).not.toBeNull(); expect(usersWithInvitee[0]).toEqual({ id: 1, name: 'Dan', verified: 0, invitedBy: null, invitee: null, }); expect(usersWithInvitee[1]).toEqual({ id: 2, name: 'Andrew', verified: 0, invitedBy: 1, invitee: { id: 1, name: 'Dan', verified: 0, invitedBy: null }, }); }); test('Get user with invitee and custom fields', () => { db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]).run(); const usersWithInvitee = db.query.usersTable.findMany({ extras: (users, { sql }) => ({ lower: sql`lower(${users.name})`.as('lower_name') }), with: { invitee: { extras: (invitee, { sql }) => ({ lower: sql`lower(${invitee.name})`.as('lower_name') }), }, }, }).sync(); expectTypeOf(usersWithInvitee).toEqualTypeOf< { id: number; name: string; verified: number; lower: string; invitedBy: number | null; invitee: { id: number; name: string; verified: number; lower: string; invitedBy: number | null; } | null; }[] >(); usersWithInvitee.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(usersWithInvitee.length).eq(4); expect(usersWithInvitee[0]?.invitee).toBeNull(); expect(usersWithInvitee[1]?.invitee).toBeNull(); expect(usersWithInvitee[2]?.invitee).not.toBeNull(); expect(usersWithInvitee[3]?.invitee).not.toBeNull(); expect(usersWithInvitee[0]).toEqual({ id: 1, name: 'Dan', lower: 'dan', verified: 0, invitedBy: null, invitee: null, }); expect(usersWithInvitee[1]).toEqual({ id: 2, name: 'Andrew', lower: 'andrew', verified: 0, invitedBy: null, invitee: null, }); expect(usersWithInvitee[2]).toEqual({ id: 3, name: 'Alex', lower: 'alex', verified: 0, invitedBy: 1, invitee: { id: 1, name: 'Dan', lower: 'dan', verified: 0, invitedBy: null }, }); expect(usersWithInvitee[3]).toEqual({ id: 4, name: 'John', lower: 'john', verified: 0, invitedBy: 2, invitee: { id: 2, name: 'Andrew', lower: 'andrew', verified: 0, invitedBy: null }, }); }); test('Get user with invitee and custom fields + limits', () => { db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]).run(); const usersWithInvitee = db.query.usersTable.findMany({ extras: (users, { sql }) => ({ lower: sql`lower(${users.name})`.as('lower_name') }), limit: 3, with: { invitee: { extras: (invitee, { sql }) => ({ lower: sql`lower(${invitee.name})`.as('lower_name') }), }, }, }).sync(); expectTypeOf(usersWithInvitee).toEqualTypeOf< { id: number; name: string; verified: number; lower: string; invitedBy: number | null; invitee: { id: number; name: string; verified: number; lower: string; invitedBy: number | null; } | null; }[] >(); usersWithInvitee.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(usersWithInvitee.length).eq(3); expect(usersWithInvitee[0]?.invitee).toBeNull(); expect(usersWithInvitee[1]?.invitee).toBeNull(); expect(usersWithInvitee[2]?.invitee).not.toBeNull(); expect(usersWithInvitee[0]).toEqual({ id: 1, name: 'Dan', lower: 'dan', verified: 0, invitedBy: null, invitee: null, }); expect(usersWithInvitee[1]).toEqual({ id: 2, name: 'Andrew', lower: 'andrew', verified: 0, invitedBy: null, invitee: null, }); expect(usersWithInvitee[2]).toEqual({ id: 3, name: 'Alex', lower: 'alex', verified: 0, invitedBy: 1, invitee: { id: 1, name: 'Dan', lower: 'dan', verified: 0, invitedBy: null }, }); }); test('Get user with invitee + order by', () => { db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]).run(); const usersWithInvitee = db.query.usersTable.findMany({ orderBy: (users, { desc }) => [desc(users.id)], with: { invitee: true, }, }).sync(); expectTypeOf(usersWithInvitee).toEqualTypeOf< { id: number; name: string; verified: number; invitedBy: number | null; invitee: { id: number; name: string; verified: number; invitedBy: number | null; } | null; }[] >(); expect(usersWithInvitee.length).eq(4); expect(usersWithInvitee[3]?.invitee).toBeNull(); expect(usersWithInvitee[2]?.invitee).toBeNull(); expect(usersWithInvitee[1]?.invitee).not.toBeNull(); expect(usersWithInvitee[0]?.invitee).not.toBeNull(); expect(usersWithInvitee[3]).toEqual({ id: 1, name: 'Dan', verified: 0, invitedBy: null, invitee: null, }); expect(usersWithInvitee[2]).toEqual({ id: 2, name: 'Andrew', verified: 0, invitedBy: null, invitee: null, }); expect(usersWithInvitee[1]).toEqual({ id: 3, name: 'Alex', verified: 0, invitedBy: 1, invitee: { id: 1, name: 'Dan', verified: 0, invitedBy: null }, }); expect(usersWithInvitee[0]).toEqual({ id: 4, name: 'John', verified: 0, invitedBy: 2, invitee: { id: 2, name: 'Andrew', verified: 0, invitedBy: null }, }); }); test('Get user with invitee + where', () => { db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]).run(); const usersWithInvitee = db.query.usersTable.findMany({ where: (users, { eq, or }) => (or(eq(users.id, 3), eq(users.id, 4))), with: { invitee: true, }, }).sync(); expectTypeOf(usersWithInvitee).toEqualTypeOf< { id: number; name: string; verified: number; invitedBy: number | null; invitee: { id: number; name: string; verified: number; invitedBy: number | null; } | null; }[] >(); expect(usersWithInvitee.length).eq(2); expect(usersWithInvitee[0]?.invitee).not.toBeNull(); expect(usersWithInvitee[1]?.invitee).not.toBeNull(); expect(usersWithInvitee).toContainEqual({ id: 3, name: 'Alex', verified: 0, invitedBy: 1, invitee: { id: 1, name: 'Dan', verified: 0, invitedBy: null }, }); expect(usersWithInvitee).toContainEqual({ id: 4, name: 'John', verified: 0, invitedBy: 2, invitee: { id: 2, name: 'Andrew', verified: 0, invitedBy: null }, }); }); test('Get user with invitee + where + partial', () => { db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]).run(); const usersWithInvitee = db.query.usersTable.findMany({ where: (users, { eq, or }) => (or(eq(users.id, 3), eq(users.id, 4))), columns: { id: true, name: true, }, with: { invitee: { columns: { id: true, name: true, }, }, }, }).sync(); expectTypeOf(usersWithInvitee).toEqualTypeOf< { id: number; name: string; invitee: { id: number; name: string; } | null; }[] >(); expect(usersWithInvitee.length).eq(2); expect(usersWithInvitee[0]?.invitee).not.toBeNull(); expect(usersWithInvitee[1]?.invitee).not.toBeNull(); expect(usersWithInvitee).toContainEqual({ id: 3, name: 'Alex', invitee: { id: 1, name: 'Dan' }, }); expect(usersWithInvitee).toContainEqual({ id: 4, name: 'John', invitee: { id: 2, name: 'Andrew' }, }); }); test('Get user with invitee + where + partial. Did not select users id, but used it in where', () => { db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]).run(); const usersWithInvitee = db.query.usersTable.findMany({ where: (users, { eq, or }) => (or(eq(users.id, 3), eq(users.id, 4))), columns: { name: true, }, with: { invitee: { columns: { id: true, name: true, }, }, }, }).sync(); expectTypeOf(usersWithInvitee).toEqualTypeOf< { name: string; invitee: { id: number; name: string; } | null; }[] >(); expect(usersWithInvitee.length).eq(2); expect(usersWithInvitee[0]?.invitee).not.toBeNull(); expect(usersWithInvitee[1]?.invitee).not.toBeNull(); expect(usersWithInvitee).toContainEqual({ name: 'Alex', invitee: { id: 1, name: 'Dan' }, }); expect(usersWithInvitee).toContainEqual({ name: 'John', invitee: { id: 2, name: 'Andrew' }, }); }); test('Get user with invitee + where + partial(true+false)', () => { db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]).run(); const usersWithInvitee = db.query.usersTable.findMany({ where: (users, { eq, or }) => (or(eq(users.id, 3), eq(users.id, 4))), columns: { id: true, name: true, verified: false, }, with: { invitee: { columns: { id: true, name: true, verified: false, }, }, }, }).sync(); expectTypeOf(usersWithInvitee).toEqualTypeOf< { id: number; name: string; invitee: { id: number; name: string; } | null; }[] >(); expect(usersWithInvitee.length).eq(2); expect(usersWithInvitee[0]?.invitee).not.toBeNull(); expect(usersWithInvitee[1]?.invitee).not.toBeNull(); expect(usersWithInvitee).toContainEqual({ id: 3, name: 'Alex', invitee: { id: 1, name: 'Dan' }, }); expect(usersWithInvitee).toContainEqual({ id: 4, name: 'John', invitee: { id: 2, name: 'Andrew' }, }); }); test('Get user with invitee + where + partial(false)', () => { db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]).run(); const usersWithInvitee = db.query.usersTable.findMany({ where: (users, { eq, or }) => (or(eq(users.id, 3), eq(users.id, 4))), columns: { verified: false, }, with: { invitee: { columns: { name: false, }, }, }, }).sync(); expectTypeOf(usersWithInvitee).toEqualTypeOf< { id: number; name: string; invitedBy: number | null; invitee: { id: number; verified: number; invitedBy: number | null; } | null; }[] >(); expect(usersWithInvitee.length).eq(2); expect(usersWithInvitee[0]?.invitee).not.toBeNull(); expect(usersWithInvitee[1]?.invitee).not.toBeNull(); expect(usersWithInvitee).toContainEqual({ id: 3, name: 'Alex', invitedBy: 1, invitee: { id: 1, verified: 0, invitedBy: null }, }); expect(usersWithInvitee).toContainEqual({ id: 4, name: 'John', invitedBy: 2, invitee: { id: 2, verified: 0, invitedBy: null }, }); }); /* Two first-level relations users+users and users+posts */ test('Get user with invitee and posts', () => { db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]).run(); db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]).run(); const response = db.query.usersTable.findMany({ with: { invitee: true, posts: true, }, }).sync(); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: number; invitedBy: number | null; posts: { id: number; ownerId: number | null; content: string; createdAt: Date }[]; invitee: { id: number; name: string; verified: number; invitedBy: number | null; } | null; }[] >(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).eq(4); expect(response[0]?.invitee).toBeNull(); expect(response[1]?.invitee).toBeNull(); expect(response[2]?.invitee).not.toBeNull(); expect(response[3]?.invitee).not.toBeNull(); expect(response[0]?.posts.length).eq(1); expect(response[1]?.posts.length).eq(1); expect(response[2]?.posts.length).eq(1); expect(response).toContainEqual({ id: 1, name: 'Dan', verified: 0, invitedBy: null, invitee: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: response[0]?.posts[0]?.createdAt }], }); expect(response).toContainEqual({ id: 2, name: 'Andrew', verified: 0, invitedBy: null, invitee: null, posts: [{ id: 2, ownerId: 2, content: 'Post2', createdAt: response[1]?.posts[0]?.createdAt }], }); expect(response).toContainEqual({ id: 3, name: 'Alex', verified: 0, invitedBy: 1, invitee: { id: 1, name: 'Dan', verified: 0, invitedBy: null }, posts: [{ id: 3, ownerId: 3, content: 'Post3', createdAt: response[2]?.posts[0]?.createdAt }], }); expect(response).toContainEqual({ id: 4, name: 'John', verified: 0, invitedBy: 2, invitee: { id: 2, name: 'Andrew', verified: 0, invitedBy: null }, posts: [], }); }); test('Get user with invitee and posts + limit posts and users', () => { db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]).run(); db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]).run(); const response = db.query.usersTable.findMany({ limit: 3, with: { invitee: true, posts: { limit: 1, }, }, }).sync(); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: number; invitedBy: number | null; posts: { id: number; ownerId: number | null; content: string; createdAt: Date }[]; invitee: { id: number; name: string; verified: number; invitedBy: number | null; } | null; }[] >(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).eq(3); expect(response[0]?.invitee).toBeNull(); expect(response[1]?.invitee).toBeNull(); expect(response[2]?.invitee).not.toBeNull(); expect(response[0]?.posts.length).eq(1); expect(response[1]?.posts.length).eq(1); expect(response[2]?.posts.length).eq(1); expect(response).toContainEqual({ id: 1, name: 'Dan', verified: 0, invitedBy: null, invitee: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: response[0]?.posts[0]?.createdAt }], }); expect(response).toContainEqual({ id: 2, name: 'Andrew', verified: 0, invitedBy: null, invitee: null, posts: [{ id: 3, ownerId: 2, content: 'Post2', createdAt: response[1]?.posts[0]?.createdAt }], }); expect(response).toContainEqual({ id: 3, name: 'Alex', verified: 0, invitedBy: 1, invitee: { id: 1, name: 'Dan', verified: 0, invitedBy: null }, posts: [{ id: 5, ownerId: 3, content: 'Post3', createdAt: response[2]?.posts[0]?.createdAt }], }); }); test('Get user with invitee and posts + limits + custom fields in each', () => { db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]).run(); db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]).run(); const response = db.query.usersTable.findMany({ limit: 3, extras: (users, { sql }) => ({ lower: sql`lower(${users.name})`.as('lower_name') }), with: { invitee: { extras: (users, { sql }) => ({ lower: sql`lower(${users.name})`.as('lower_invitee_name') }), }, posts: { limit: 1, extras: (posts, { sql }) => ({ lower: sql`lower(${posts.content})`.as('lower_content') }), }, }, }).sync(); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: number; lower: string; invitedBy: number | null; posts: { id: number; lower: string; ownerId: number | null; content: string; createdAt: Date }[]; invitee: { id: number; name: string; lower: string; verified: number; invitedBy: number | null; } | null; }[] >(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).eq(3); expect(response[0]?.invitee).toBeNull(); expect(response[1]?.invitee).toBeNull(); expect(response[2]?.invitee).not.toBeNull(); expect(response[0]?.posts.length).eq(1); expect(response[1]?.posts.length).eq(1); expect(response[2]?.posts.length).eq(1); expect(response).toContainEqual({ id: 1, name: 'Dan', lower: 'dan', verified: 0, invitedBy: null, invitee: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', lower: 'post1', createdAt: response[0]?.posts[0]?.createdAt }], }); expect(response).toContainEqual({ id: 2, name: 'Andrew', lower: 'andrew', verified: 0, invitedBy: null, invitee: null, posts: [{ id: 3, ownerId: 2, content: 'Post2', lower: 'post2', createdAt: response[1]?.posts[0]?.createdAt }], }); expect(response).toContainEqual({ id: 3, name: 'Alex', lower: 'alex', verified: 0, invitedBy: 1, invitee: { id: 1, name: 'Dan', lower: 'dan', verified: 0, invitedBy: null }, posts: [{ id: 5, ownerId: 3, content: 'Post3', lower: 'post3', createdAt: response[2]?.posts[0]?.createdAt }], }); }); test('Get user with invitee and posts + custom fields in each', () => { db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]).run(); db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]).run(); const response = db.query.usersTable.findMany({ extras: (users, { sql }) => ({ lower: sql`lower(${users.name})`.as('lower_name') }), with: { invitee: { extras: (users, { sql }) => ({ lower: sql`lower(${users.name})`.as('lower_name') }), }, posts: { extras: (posts, { sql }) => ({ lower: sql`lower(${posts.content})`.as('lower_name') }), }, }, }).sync(); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: number; lower: string; invitedBy: number | null; posts: { id: number; lower: string; ownerId: number | null; content: string; createdAt: Date }[]; invitee: { id: number; name: string; lower: string; verified: number; invitedBy: number | null; } | null; }[] >(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); response[0]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); response[1]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); response[2]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).eq(4); expect(response[0]?.invitee).toBeNull(); expect(response[1]?.invitee).toBeNull(); expect(response[2]?.invitee).not.toBeNull(); expect(response[3]?.invitee).not.toBeNull(); expect(response[0]?.posts.length).eq(2); expect(response[1]?.posts.length).eq(2); expect(response[2]?.posts.length).eq(2); expect(response[3]?.posts.length).eq(0); expect(response).toContainEqual({ id: 1, name: 'Dan', lower: 'dan', verified: 0, invitedBy: null, invitee: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', lower: 'post1', createdAt: response[0]?.posts[0]?.createdAt }, { id: 2, ownerId: 1, content: 'Post1.1', lower: 'post1.1', createdAt: response[0]?.posts[1]?.createdAt, }], }); expect(response).toContainEqual({ id: 2, name: 'Andrew', lower: 'andrew', verified: 0, invitedBy: null, invitee: null, posts: [{ id: 3, ownerId: 2, content: 'Post2', lower: 'post2', createdAt: response[1]?.posts[0]?.createdAt }, { id: 4, ownerId: 2, content: 'Post2.1', lower: 'post2.1', createdAt: response[1]?.posts[1]?.createdAt, }], }); expect(response).toContainEqual({ id: 3, name: 'Alex', lower: 'alex', verified: 0, invitedBy: 1, invitee: { id: 1, name: 'Dan', lower: 'dan', verified: 0, invitedBy: null }, posts: [{ id: 5, ownerId: 3, content: 'Post3', lower: 'post3', createdAt: response[2]?.posts[0]?.createdAt }, { id: 6, ownerId: 3, content: 'Post3.1', lower: 'post3.1', createdAt: response[2]?.posts[1]?.createdAt, }], }); expect(response).toContainEqual({ id: 4, name: 'John', lower: 'john', verified: 0, invitedBy: 2, invitee: { id: 2, name: 'Andrew', lower: 'andrew', verified: 0, invitedBy: null }, posts: [], }); }); // TODO Check order test.skip('Get user with invitee and posts + orderBy', () => { db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]).run(); db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, ]).run(); const response = db.query.usersTable.findMany({ orderBy: (users, { desc }) => [desc(users.id)], with: { invitee: true, posts: { orderBy: (posts, { desc }) => [desc(posts.id)], }, }, }).sync(); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: number; invitedBy: number | null; posts: { id: number; ownerId: number | null; content: string; createdAt: Date }[]; invitee: { id: number; name: string; verified: number; invitedBy: number | null; } | null; }[] >(); expect(response.length).eq(4); expect(response[3]?.invitee).toBeNull(); expect(response[2]?.invitee).toBeNull(); expect(response[1]?.invitee).not.toBeNull(); expect(response[0]?.invitee).not.toBeNull(); expect(response[0]?.posts.length).eq(0); expect(response[1]?.posts.length).eq(1); expect(response[2]?.posts.length).eq(2); expect(response[3]?.posts.length).eq(2); expect(response[3]).toEqual({ id: 1, name: 'Dan', verified: 0, invitedBy: null, invitee: null, posts: [{ id: 2, ownerId: 1, content: 'Post1.1', createdAt: response[3]?.posts[0]?.createdAt }, { id: 1, ownerId: 1, content: 'Post1', createdAt: response[3]?.posts[1]?.createdAt, }], }); expect(response[2]).toEqual({ id: 2, name: 'Andrew', verified: 0, invitedBy: null, invitee: null, posts: [{ id: 4, ownerId: 2, content: 'Post2.1', createdAt: response[2]?.posts[0]?.createdAt }, { id: 3, ownerId: 2, content: 'Post2', createdAt: response[2]?.posts[1]?.createdAt, }], }); expect(response[1]).toEqual({ id: 3, name: 'Alex', verified: 0, invitedBy: 1, invitee: { id: 1, name: 'Dan', verified: 0, invitedBy: null }, posts: [{ id: 5, ownerId: 3, content: 'Post3', createdAt: response[3]?.posts[1]?.createdAt, }], }); expect(response[0]).toEqual({ id: 4, name: 'John', verified: 0, invitedBy: 2, invitee: { id: 2, name: 'Andrew', verified: 0, invitedBy: null }, posts: [], }); }); test('Get user with invitee and posts + where', () => { db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]).run(); db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]).run(); const response = db.query.usersTable.findMany({ where: (users, { eq, or }) => (or(eq(users.id, 2), eq(users.id, 3))), with: { invitee: true, posts: { where: (posts, { eq }) => (eq(posts.ownerId, 2)), }, }, }).sync(); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: number; invitedBy: number | null; posts: { id: number; ownerId: number | null; content: string; createdAt: Date }[]; invitee: { id: number; name: string; verified: number; invitedBy: number | null; } | null; }[] >(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).eq(2); expect(response[0]?.invitee).toBeNull(); expect(response[1]?.invitee).not.toBeNull(); expect(response[0]?.posts.length).eq(1); expect(response[1]?.posts.length).eq(0); expect(response).toContainEqual({ id: 2, name: 'Andrew', verified: 0, invitedBy: null, invitee: null, posts: [{ id: 2, ownerId: 2, content: 'Post2', createdAt: response[0]?.posts[0]?.createdAt }], }); expect(response).toContainEqual({ id: 3, name: 'Alex', verified: 0, invitedBy: 1, invitee: { id: 1, name: 'Dan', verified: 0, invitedBy: null }, posts: [], }); }); test('Get user with invitee and posts + limit posts and users + where', () => { db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]).run(); db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]).run(); const response = db.query.usersTable.findMany({ where: (users, { eq, or }) => (or(eq(users.id, 3), eq(users.id, 4))), limit: 1, with: { invitee: true, posts: { where: (posts, { eq }) => (eq(posts.ownerId, 3)), limit: 1, }, }, }).sync(); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: number; invitedBy: number | null; posts: { id: number; ownerId: number | null; content: string; createdAt: Date }[]; invitee: { id: number; name: string; verified: number; invitedBy: number | null; } | null; }[] >(); expect(response.length).eq(1); expect(response[0]?.invitee).not.toBeNull(); expect(response[0]?.posts.length).eq(1); expect(response).toContainEqual({ id: 3, name: 'Alex', verified: 0, invitedBy: 1, invitee: { id: 1, name: 'Dan', verified: 0, invitedBy: null }, posts: [{ id: 5, ownerId: 3, content: 'Post3', createdAt: response[0]?.posts[0]?.createdAt }], }); }); test('Get user with invitee and posts + orderBy + where + custom', () => { db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]).run(); db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, ]).run(); const response = db.query.usersTable.findMany({ orderBy: [desc(usersTable.id)], where: or(eq(usersTable.id, 3), eq(usersTable.id, 4)), extras: { lower: sql`lower(${usersTable.name})`.as('lower_name'), }, with: { invitee: true, posts: { where: eq(postsTable.ownerId, 3), orderBy: [desc(postsTable.id)], extras: { lower: sql`lower(${postsTable.content})`.as('lower_name'), }, }, }, }).sync(); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: number; invitedBy: number | null; lower: string; posts: { id: number; lower: string; ownerId: number | null; content: string; createdAt: Date }[]; invitee: { id: number; name: string; verified: number; invitedBy: number | null; } | null; }[] >(); expect(response.length).eq(2); expect(response[1]?.invitee).not.toBeNull(); expect(response[0]?.invitee).not.toBeNull(); expect(response[0]?.posts.length).eq(0); expect(response[1]?.posts.length).eq(1); expect(response[1]).toEqual({ id: 3, name: 'Alex', lower: 'alex', verified: 0, invitedBy: 1, invitee: { id: 1, name: 'Dan', verified: 0, invitedBy: null }, posts: [{ id: 5, ownerId: 3, content: 'Post3', lower: 'post3', createdAt: response[1]?.posts[0]?.createdAt, }], }); expect(response[0]).toEqual({ id: 4, name: 'John', lower: 'john', verified: 0, invitedBy: 2, invitee: { id: 2, name: 'Andrew', verified: 0, invitedBy: null }, posts: [], }); }); test('Get user with invitee and posts + orderBy + where + partial + custom', () => { db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]).run(); db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, ]).run(); const response = db.query.usersTable.findMany({ orderBy: [desc(usersTable.id)], where: or(eq(usersTable.id, 3), eq(usersTable.id, 4)), extras: { lower: sql`lower(${usersTable.name})`.as('lower_name'), }, columns: { id: true, name: true, }, with: { invitee: { columns: { id: true, name: true, }, extras: { lower: sql`lower(${usersTable.name})`.as('lower_name'), }, }, posts: { columns: { id: true, content: true, }, where: eq(postsTable.ownerId, 3), orderBy: [desc(postsTable.id)], extras: { lower: sql`lower(${postsTable.content})`.as('lower_name'), }, }, }, }).sync(); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; lower: string; posts: { id: number; lower: string; content: string }[]; invitee: { id: number; name: string; lower: string; } | null; }[] >(); expect(response.length).eq(2); expect(response[1]?.invitee).not.toBeNull(); expect(response[0]?.invitee).not.toBeNull(); expect(response[0]?.posts.length).eq(0); expect(response[1]?.posts.length).eq(1); expect(response[1]).toEqual({ id: 3, name: 'Alex', lower: 'alex', invitee: { id: 1, name: 'Dan', lower: 'dan' }, posts: [{ id: 5, content: 'Post3', lower: 'post3', }], }); expect(response[0]).toEqual({ id: 4, name: 'John', lower: 'john', invitee: { id: 2, name: 'Andrew', lower: 'andrew' }, posts: [], }); }); /* One two-level relation users+posts+comments */ test('Get user with posts and posts with comments', () => { db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); db.insert(postsTable).values([ { id: 1, ownerId: 1, content: 'Post1' }, { id: 2, ownerId: 2, content: 'Post2' }, { id: 3, ownerId: 3, content: 'Post3' }, ]).run(); db.insert(commentsTable).values([ { postId: 1, content: 'Comment1', creator: 2 }, { postId: 2, content: 'Comment2', creator: 2 }, { postId: 3, content: 'Comment3', creator: 3 }, ]).run(); const response = db.query.usersTable.findMany({ with: { posts: { with: { comments: true, }, }, }, }).sync(); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: number; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; comments: { id: number; content: string; createdAt: Date; creator: number | null; postId: number | null; }[]; }[]; }[] >(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).eq(3); expect(response[0]?.posts.length).eq(1); expect(response[1]?.posts.length).eq(1); expect(response[2]?.posts.length).eq(1); expect(response[0]?.posts[0]?.comments.length).eq(1); expect(response[1]?.posts[0]?.comments.length).eq(1); expect(response[2]?.posts[0]?.comments.length).eq(1); expect(response[0]).toEqual({ id: 1, name: 'Dan', verified: 0, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: response[0]?.posts[0]?.createdAt, comments: [ { id: 1, content: 'Comment1', creator: 2, postId: 1, createdAt: response[0]?.posts[0]?.comments[0]?.createdAt, }, ], }], }); expect(response[1]).toEqual({ id: 2, name: 'Andrew', verified: 0, invitedBy: null, posts: [{ id: 2, ownerId: 2, content: 'Post2', createdAt: response[1]?.posts[0]?.createdAt, comments: [ { id: 2, content: 'Comment2', creator: 2, postId: 2, createdAt: response[1]?.posts[0]?.comments[0]?.createdAt, }, ], }], }); // expect(response[2]).toEqual({ // id: 3, // name: 'Alex', // verified: 0, // invitedBy: null, // posts: [{ // id: 3, // ownerId: 3, // content: 'Post3', // createdAt: response[2]?.posts[0]?.createdAt, // comments: [ // { // id: , // content: 'Comment3', // creator: 3, // postId: 3, // createdAt: response[2]?.posts[0]?.comments[0]?.createdAt, // }, // ], // }], // }); }); // Get user with limit posts and limit comments // Get user with custom field + post + comment with custom field // Get user with limit + posts orderBy + comment orderBy // Get user with where + posts where + comment where // Get user with where + posts partial where + comment where // Get user with where + posts partial where + comment partial(false) where // Get user with where partial(false) + posts partial where partial(false) + comment partial(false+true) where // Get user with where + posts partial where + comment where. Didn't select field from where in posts // Get user with where + posts partial where + comment where. Didn't select field from where for all // Get with limit+offset in each /* One two-level + One first-level relation users+posts+comments and users+users */ /* One three-level relation users+posts+comments+comment_owner */ test('Get user with posts and posts with comments and comments with owner', () => { db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); db.insert(postsTable).values([ { id: 1, ownerId: 1, content: 'Post1' }, { id: 2, ownerId: 2, content: 'Post2' }, { id: 3, ownerId: 3, content: 'Post3' }, ]).run(); db.insert(commentsTable).values([ { postId: 1, content: 'Comment1', creator: 2 }, { postId: 2, content: 'Comment2', creator: 2 }, { postId: 3, content: 'Comment3', creator: 3 }, ]).run(); const response = db.query.usersTable.findMany({ with: { posts: { with: { comments: { with: { author: true, }, }, }, }, }, }).sync(); expectTypeOf(response).toEqualTypeOf<{ id: number; name: string; verified: number; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; comments: { id: number; content: string; createdAt: Date; creator: number | null; postId: number | null; author: { id: number; name: string; verified: number; invitedBy: number | null; } | null; }[]; }[]; }[]>(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).eq(3); expect(response[0]?.posts.length).eq(1); expect(response[1]?.posts.length).eq(1); expect(response[2]?.posts.length).eq(1); expect(response[0]?.posts[0]?.comments.length).eq(1); expect(response[1]?.posts[0]?.comments.length).eq(1); expect(response[2]?.posts[0]?.comments.length).eq(1); expect(response[0]).toEqual({ id: 1, name: 'Dan', verified: 0, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: response[0]?.posts[0]?.createdAt, comments: [ { id: 1, content: 'Comment1', creator: 2, author: { id: 2, name: 'Andrew', verified: 0, invitedBy: null, }, postId: 1, createdAt: response[0]?.posts[0]?.comments[0]?.createdAt, }, ], }], }); expect(response[1]).toEqual({ id: 2, name: 'Andrew', verified: 0, invitedBy: null, posts: [{ id: 2, ownerId: 2, content: 'Post2', createdAt: response[1]?.posts[0]?.createdAt, comments: [ { id: 2, content: 'Comment2', creator: 2, author: { id: 2, name: 'Andrew', verified: 0, invitedBy: null, }, postId: 2, createdAt: response[1]?.posts[0]?.comments[0]?.createdAt, }, ], }], }); }); test('Get user with posts and posts with comments and comments with owner where exists', () => { db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); db.insert(postsTable).values([ { id: 1, ownerId: 1, content: 'Post1' }, { id: 2, ownerId: 2, content: 'Post2' }, { id: 3, ownerId: 3, content: 'Post3' }, ]).run(); db.insert(commentsTable).values([ { postId: 1, content: 'Comment1', creator: 2 }, { postId: 2, content: 'Comment2', creator: 2 }, { postId: 3, content: 'Comment3', creator: 3 }, ]).run(); const response = db.query.usersTable.findMany({ with: { posts: { with: { comments: { with: { author: true, }, }, }, }, }, where: (table, { exists, eq }) => exists(db.select({ one: sql`1` }).from(usersTable).where(eq(sql`1`, table.id))), }).sync(); expectTypeOf(response).toEqualTypeOf<{ id: number; name: string; verified: number; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; comments: { id: number; content: string; createdAt: Date; creator: number | null; postId: number | null; author: { id: number; name: string; verified: number; invitedBy: number | null; } | null; }[]; }[]; }[]>(); expect(response.length).eq(1); expect(response[0]?.posts.length).eq(1); expect(response[0]?.posts[0]?.comments.length).eq(1); expect(response[0]).toEqual({ id: 1, name: 'Dan', verified: 0, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: response[0]?.posts[0]?.createdAt, comments: [ { id: 1, content: 'Comment1', creator: 2, author: { id: 2, name: 'Andrew', verified: 0, invitedBy: null, }, postId: 1, createdAt: response[0]?.posts[0]?.comments[0]?.createdAt, }, ], }], }); }); /* One three-level relation + 1 first-level relatioon 1. users+posts+comments+comment_owner 2. users+users */ /* One four-level relation users+posts+comments+coment_likes */ /* [Find Many] Many-to-many cases Users+users_to_groups+groups */ test('[Find Many] Get users with groups', () => { db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]).run(); db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]).run(); const response = db.query.usersTable.findMany({ with: { usersToGroups: { columns: {}, with: { group: true, }, }, }, }).sync(); expectTypeOf(response).toEqualTypeOf<{ id: number; name: string; verified: number; invitedBy: number | null; usersToGroups: { group: { id: number; name: string; description: string | null; }; }[]; }[]>(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).toEqual(3); expect(response[0]?.usersToGroups.length).toEqual(1); expect(response[1]?.usersToGroups.length).toEqual(1); expect(response[2]?.usersToGroups.length).toEqual(2); expect(response).toContainEqual({ id: 1, name: 'Dan', verified: 0, invitedBy: null, usersToGroups: [{ group: { id: 1, name: 'Group1', description: null, }, }], }); expect(response).toContainEqual({ id: 2, name: 'Andrew', verified: 0, invitedBy: null, usersToGroups: [{ group: { id: 2, name: 'Group2', description: null, }, }], }); expect(response).toContainEqual({ id: 3, name: 'Alex', verified: 0, invitedBy: null, usersToGroups: expect.arrayContaining([{ group: { id: 2, name: 'Group2', description: null, }, }, { group: { id: 3, name: 'Group3', description: null, }, }]), }); }); test('[Find Many] Get groups with users', () => { db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]).run(); db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]).run(); const response = db.query.groupsTable.findMany({ with: { usersToGroups: { columns: {}, with: { user: true, }, }, }, }).sync(); expectTypeOf(response).toEqualTypeOf<{ id: number; name: string; description: string | null; usersToGroups: { user: { id: number; name: string; verified: number; invitedBy: number | null; }; }[]; }[]>(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).toEqual(3); expect(response[0]?.usersToGroups.length).toEqual(1); expect(response[1]?.usersToGroups.length).toEqual(2); expect(response[2]?.usersToGroups.length).toEqual(1); expect(response).toContainEqual({ id: 1, name: 'Group1', description: null, usersToGroups: [{ user: { id: 1, name: 'Dan', verified: 0, invitedBy: null, }, }], }); expect(response).toContainEqual({ id: 2, name: 'Group2', description: null, usersToGroups: [{ user: { id: 2, name: 'Andrew', verified: 0, invitedBy: null, }, }, { user: { id: 3, name: 'Alex', verified: 0, invitedBy: null, }, }], }); expect(response).toContainEqual({ id: 3, name: 'Group3', description: null, usersToGroups: [{ user: { id: 3, name: 'Alex', verified: 0, invitedBy: null, }, }], }); }); test('[Find Many] Get users with groups + limit', () => { db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]).run(); db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 2, groupId: 3 }, { userId: 3, groupId: 2 }, ]).run(); const response = db.query.usersTable.findMany({ limit: 2, with: { usersToGroups: { limit: 1, columns: {}, with: { group: true, }, }, }, }).sync(); expectTypeOf(response).toEqualTypeOf<{ id: number; name: string; verified: number; invitedBy: number | null; usersToGroups: { group: { id: number; name: string; description: string | null; }; }[]; }[]>(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).toEqual(2); expect(response[0]?.usersToGroups.length).toEqual(1); expect(response[1]?.usersToGroups.length).toEqual(1); expect(response).toContainEqual({ id: 1, name: 'Dan', verified: 0, invitedBy: null, usersToGroups: [{ group: { id: 1, name: 'Group1', description: null, }, }], }); expect(response).toContainEqual({ id: 2, name: 'Andrew', verified: 0, invitedBy: null, usersToGroups: [{ group: { id: 2, name: 'Group2', description: null, }, }], }); }); test('[Find Many] Get groups with users + limit', () => { db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]).run(); db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]).run(); const response = db.query.groupsTable.findMany({ limit: 2, with: { usersToGroups: { limit: 1, columns: {}, with: { user: true, }, }, }, }).sync(); expectTypeOf(response).toEqualTypeOf<{ id: number; name: string; description: string | null; usersToGroups: { user: { id: number; name: string; verified: number; invitedBy: number | null; }; }[]; }[]>(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).toEqual(2); expect(response[0]?.usersToGroups.length).toEqual(1); expect(response[1]?.usersToGroups.length).toEqual(1); expect(response).toContainEqual({ id: 1, name: 'Group1', description: null, usersToGroups: [{ user: { id: 1, name: 'Dan', verified: 0, invitedBy: null, }, }], }); expect(response).toContainEqual({ id: 2, name: 'Group2', description: null, usersToGroups: [{ user: { id: 2, name: 'Andrew', verified: 0, invitedBy: null, }, }], }); }); test('[Find Many] Get users with groups + limit + where', () => { db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]).run(); db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 2, groupId: 3 }, { userId: 3, groupId: 2 }, ]).run(); const response = db.query.usersTable.findMany({ limit: 1, where: (_, { eq, or }) => or(eq(usersTable.id, 1), eq(usersTable.id, 2)), with: { usersToGroups: { where: eq(usersToGroupsTable.groupId, 1), columns: {}, with: { group: true, }, }, }, }).sync(); expectTypeOf(response).toEqualTypeOf<{ id: number; name: string; verified: number; invitedBy: number | null; usersToGroups: { group: { id: number; name: string; description: string | null; }; }[]; }[]>(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).toEqual(1); expect(response[0]?.usersToGroups.length).toEqual(1); expect(response).toContainEqual({ id: 1, name: 'Dan', verified: 0, invitedBy: null, usersToGroups: [{ group: { id: 1, name: 'Group1', description: null, }, }], }); }); test('[Find Many] Get groups with users + limit + where', () => { db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]).run(); db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]).run(); const response = db.query.groupsTable.findMany({ limit: 1, where: gt(groupsTable.id, 1), with: { usersToGroups: { where: eq(usersToGroupsTable.userId, 2), limit: 1, columns: {}, with: { user: true, }, }, }, }).sync(); expectTypeOf(response).toEqualTypeOf<{ id: number; name: string; description: string | null; usersToGroups: { user: { id: number; name: string; verified: number; invitedBy: number | null; }; }[]; }[]>(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).toEqual(1); expect(response[0]?.usersToGroups.length).toEqual(1); expect(response).toContainEqual({ id: 2, name: 'Group2', description: null, usersToGroups: [{ user: { id: 2, name: 'Andrew', verified: 0, invitedBy: null, }, }], }); }); test('[Find Many] Get users with groups + where', () => { db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]).run(); db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 2, groupId: 3 }, { userId: 3, groupId: 2 }, ]).run(); const response = db.query.usersTable.findMany({ where: (_, { eq, or }) => or(eq(usersTable.id, 1), eq(usersTable.id, 2)), with: { usersToGroups: { where: eq(usersToGroupsTable.groupId, 2), columns: {}, with: { group: true, }, }, }, }).sync(); expectTypeOf(response).toEqualTypeOf<{ id: number; name: string; verified: number; invitedBy: number | null; usersToGroups: { group: { id: number; name: string; description: string | null; }; }[]; }[]>(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).toEqual(2); expect(response[0]?.usersToGroups.length).toEqual(0); expect(response[1]?.usersToGroups.length).toEqual(1); expect(response).toContainEqual({ id: 1, name: 'Dan', verified: 0, invitedBy: null, usersToGroups: [], }); expect(response).toContainEqual({ id: 2, name: 'Andrew', verified: 0, invitedBy: null, usersToGroups: [{ group: { id: 2, name: 'Group2', description: null, }, }], }); }); test('[Find Many] Get groups with users + where', () => { db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]).run(); db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]).run(); const response = db.query.groupsTable.findMany({ where: gt(groupsTable.id, 1), with: { usersToGroups: { where: eq(usersToGroupsTable.userId, 2), columns: {}, with: { user: true, }, }, }, }).sync(); expectTypeOf(response).toEqualTypeOf<{ id: number; name: string; description: string | null; usersToGroups: { user: { id: number; name: string; verified: number; invitedBy: number | null; }; }[]; }[]>(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).toEqual(2); expect(response[0]?.usersToGroups.length).toEqual(1); expect(response[1]?.usersToGroups.length).toEqual(0); expect(response).toContainEqual({ id: 2, name: 'Group2', description: null, usersToGroups: [{ user: { id: 2, name: 'Andrew', verified: 0, invitedBy: null, }, }], }); expect(response).toContainEqual({ id: 3, name: 'Group3', description: null, usersToGroups: [], }); }); test('[Find Many] Get users with groups + orderBy', () => { db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]).run(); db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]).run(); const response = db.query.usersTable.findMany({ orderBy: (users, { desc }) => [desc(users.id)], with: { usersToGroups: { orderBy: [desc(usersToGroupsTable.groupId)], columns: {}, with: { group: true, }, }, }, }).sync(); expectTypeOf(response).toEqualTypeOf<{ id: number; name: string; verified: number; invitedBy: number | null; usersToGroups: { group: { id: number; name: string; description: string | null; }; }[]; }[]>(); expect(response.length).toEqual(3); expect(response[0]?.usersToGroups.length).toEqual(2); expect(response[1]?.usersToGroups.length).toEqual(1); expect(response[2]?.usersToGroups.length).toEqual(1); expect(response[2]).toEqual({ id: 1, name: 'Dan', verified: 0, invitedBy: null, usersToGroups: [{ group: { id: 1, name: 'Group1', description: null, }, }], }); expect(response[1]).toEqual({ id: 2, name: 'Andrew', verified: 0, invitedBy: null, usersToGroups: [{ group: { id: 2, name: 'Group2', description: null, }, }], }); expect(response[0]).toEqual({ id: 3, name: 'Alex', verified: 0, invitedBy: null, usersToGroups: [{ group: { id: 3, name: 'Group3', description: null, }, }, { group: { id: 2, name: 'Group2', description: null, }, }], }); }); test('[Find Many] Get groups with users + orderBy', () => { db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]).run(); db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]).run(); const response = db.query.groupsTable.findMany({ orderBy: [desc(groupsTable.id)], with: { usersToGroups: { orderBy: (utg, { desc }) => [desc(utg.userId)], columns: {}, with: { user: true, }, }, }, }).sync(); expectTypeOf(response).toEqualTypeOf<{ id: number; name: string; description: string | null; usersToGroups: { user: { id: number; name: string; verified: number; invitedBy: number | null; }; }[]; }[]>(); expect(response.length).toEqual(3); expect(response[0]?.usersToGroups.length).toEqual(1); expect(response[1]?.usersToGroups.length).toEqual(2); expect(response[2]?.usersToGroups.length).toEqual(1); expect(response[2]).toEqual({ id: 1, name: 'Group1', description: null, usersToGroups: [{ user: { id: 1, name: 'Dan', verified: 0, invitedBy: null, }, }], }); expect(response[1]).toEqual({ id: 2, name: 'Group2', description: null, usersToGroups: [{ user: { id: 3, name: 'Alex', verified: 0, invitedBy: null, }, }, { user: { id: 2, name: 'Andrew', verified: 0, invitedBy: null, }, }], }); expect(response[0]).toEqual({ id: 3, name: 'Group3', description: null, usersToGroups: [{ user: { id: 3, name: 'Alex', verified: 0, invitedBy: null, }, }], }); }); test('[Find Many] Get users with groups + orderBy + limit', () => { db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]).run(); db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]).run(); const response = db.query.usersTable.findMany({ orderBy: (users, { desc }) => [desc(users.id)], limit: 2, with: { usersToGroups: { limit: 1, orderBy: [desc(usersToGroupsTable.groupId)], columns: {}, with: { group: true, }, }, }, }).sync(); expectTypeOf(response).toEqualTypeOf<{ id: number; name: string; verified: number; invitedBy: number | null; usersToGroups: { group: { id: number; name: string; description: string | null; }; }[]; }[]>(); expect(response.length).toEqual(2); expect(response[0]?.usersToGroups.length).toEqual(1); expect(response[1]?.usersToGroups.length).toEqual(1); expect(response[1]).toEqual({ id: 2, name: 'Andrew', verified: 0, invitedBy: null, usersToGroups: [{ group: { id: 2, name: 'Group2', description: null, }, }], }); expect(response[0]).toEqual({ id: 3, name: 'Alex', verified: 0, invitedBy: null, usersToGroups: [{ group: { id: 3, name: 'Group3', description: null, }, }], }); }); /* [Find One] Many-to-many cases Users+users_to_groups+groups */ test('[Find One] Get users with groups', () => { db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]).run(); db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]).run(); const response = db.query.usersTable.findFirst({ with: { usersToGroups: { columns: {}, with: { group: true, }, }, }, }).sync(); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: number; invitedBy: number | null; usersToGroups: { group: { id: number; name: string; description: string | null; }; }[]; } | undefined >(); expect(response?.usersToGroups.length).toEqual(1); expect(response).toEqual({ id: 1, name: 'Dan', verified: 0, invitedBy: null, usersToGroups: [{ group: { id: 1, name: 'Group1', description: null, }, }], }); }); test('[Find One] Get groups with users', () => { db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]).run(); db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]).run(); const response = db.query.groupsTable.findFirst({ with: { usersToGroups: { columns: {}, with: { user: true, }, }, }, }).sync(); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; description: string | null; usersToGroups: { user: { id: number; name: string; verified: number; invitedBy: number | null; }; }[]; } | undefined >(); expect(response?.usersToGroups.length).toEqual(1); expect(response).toEqual({ id: 1, name: 'Group1', description: null, usersToGroups: [{ user: { id: 1, name: 'Dan', verified: 0, invitedBy: null, }, }], }); }); test('[Find One] Get users with groups + limit', () => { db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]).run(); db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 2, groupId: 3 }, { userId: 3, groupId: 2 }, ]).run(); const response = db.query.usersTable.findFirst({ with: { usersToGroups: { limit: 1, columns: {}, with: { group: true, }, }, }, }).sync(); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: number; invitedBy: number | null; usersToGroups: { group: { id: number; name: string; description: string | null; }; }[]; } | undefined >(); expect(response?.usersToGroups.length).toEqual(1); expect(response).toEqual({ id: 1, name: 'Dan', verified: 0, invitedBy: null, usersToGroups: [{ group: { id: 1, name: 'Group1', description: null, }, }], }); }); test('[Find One] Get groups with users + limit', () => { db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]).run(); db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]).run(); const response = db.query.groupsTable.findFirst({ with: { usersToGroups: { limit: 1, columns: {}, with: { user: true, }, }, }, }).sync(); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; description: string | null; usersToGroups: { user: { id: number; name: string; verified: number; invitedBy: number | null; }; }[]; } | undefined >(); expect(response?.usersToGroups.length).toEqual(1); expect(response).toEqual({ id: 1, name: 'Group1', description: null, usersToGroups: [{ user: { id: 1, name: 'Dan', verified: 0, invitedBy: null, }, }], }); }); test('[Find One] Get users with groups + limit + where', () => { db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]).run(); db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 2, groupId: 3 }, { userId: 3, groupId: 2 }, ]).run(); const response = db.query.usersTable.findFirst({ where: (_, { eq, or }) => or(eq(usersTable.id, 1), eq(usersTable.id, 2)), with: { usersToGroups: { where: eq(usersToGroupsTable.groupId, 1), columns: {}, with: { group: true, }, }, }, }).sync(); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: number; invitedBy: number | null; usersToGroups: { group: { id: number; name: string; description: string | null; }; }[]; } | undefined >(); expect(response?.usersToGroups.length).toEqual(1); expect(response).toEqual({ id: 1, name: 'Dan', verified: 0, invitedBy: null, usersToGroups: [{ group: { id: 1, name: 'Group1', description: null, }, }], }); }); test('[Find One] Get groups with users + limit + where', () => { db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]).run(); db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]).run(); const response = db.query.groupsTable.findFirst({ where: gt(groupsTable.id, 1), with: { usersToGroups: { where: eq(usersToGroupsTable.userId, 2), limit: 1, columns: {}, with: { user: true, }, }, }, }).sync(); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; description: string | null; usersToGroups: { user: { id: number; name: string; verified: number; invitedBy: number | null; }; }[]; } | undefined >(); expect(response?.usersToGroups.length).toEqual(1); expect(response).toEqual({ id: 2, name: 'Group2', description: null, usersToGroups: [{ user: { id: 2, name: 'Andrew', verified: 0, invitedBy: null, }, }], }); }); test('[Find One] Get users with groups + where', () => { db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]).run(); db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 2, groupId: 3 }, { userId: 3, groupId: 2 }, ]).run(); const response = db.query.usersTable.findFirst({ where: (_, { eq, or }) => or(eq(usersTable.id, 1), eq(usersTable.id, 2)), with: { usersToGroups: { where: eq(usersToGroupsTable.groupId, 2), columns: {}, with: { group: true, }, }, }, }).sync(); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: number; invitedBy: number | null; usersToGroups: { group: { id: number; name: string; description: string | null; }; }[]; } | undefined >(); expect(response?.usersToGroups.length).toEqual(0); expect(response).toEqual({ id: 1, name: 'Dan', verified: 0, invitedBy: null, usersToGroups: [], }); }); test('[Find One] Get groups with users + where', () => { db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]).run(); db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]).run(); const response = db.query.groupsTable.findFirst({ where: gt(groupsTable.id, 1), with: { usersToGroups: { where: eq(usersToGroupsTable.userId, 2), columns: {}, with: { user: true, }, }, }, }).sync(); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; description: string | null; usersToGroups: { user: { id: number; name: string; verified: number; invitedBy: number | null; }; }[]; } | undefined >(); expect(response?.usersToGroups.length).toEqual(1); expect(response).toEqual({ id: 2, name: 'Group2', description: null, usersToGroups: [{ user: { id: 2, name: 'Andrew', verified: 0, invitedBy: null, }, }], }); }); test('[Find One] Get users with groups + orderBy', () => { db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]).run(); db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]).run(); const response = db.query.usersTable.findFirst({ orderBy: (users, { desc }) => [desc(users.id)], with: { usersToGroups: { orderBy: [desc(usersToGroupsTable.groupId)], columns: {}, with: { group: true, }, }, }, }).sync(); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: number; invitedBy: number | null; usersToGroups: { group: { id: number; name: string; description: string | null; }; }[]; } | undefined >(); expect(response?.usersToGroups.length).toEqual(2); expect(response).toEqual({ id: 3, name: 'Alex', verified: 0, invitedBy: null, usersToGroups: [{ group: { id: 3, name: 'Group3', description: null, }, }, { group: { id: 2, name: 'Group2', description: null, }, }], }); }); test('[Find One] Get groups with users + orderBy', () => { db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]).run(); db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]).run(); const response = db.query.groupsTable.findFirst({ orderBy: [desc(groupsTable.id)], with: { usersToGroups: { orderBy: (utg, { desc }) => [desc(utg.userId)], columns: {}, with: { user: true, }, }, }, }).sync(); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; description: string | null; usersToGroups: { user: { id: number; name: string; verified: number; invitedBy: number | null; }; }[]; } | undefined >(); expect(response?.usersToGroups.length).toEqual(1); expect(response).toEqual({ id: 3, name: 'Group3', description: null, usersToGroups: [{ user: { id: 3, name: 'Alex', verified: 0, invitedBy: null, }, }], }); }); test('[Find One] Get users with groups + orderBy + limit', () => { db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]).run(); db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]).run(); const response = db.query.usersTable.findFirst({ orderBy: (users, { desc }) => [desc(users.id)], with: { usersToGroups: { limit: 1, orderBy: [desc(usersToGroupsTable.groupId)], columns: {}, with: { group: true, }, }, }, }).sync(); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: number; invitedBy: number | null; usersToGroups: { group: { id: number; name: string; description: string | null; }; }[]; } | undefined >(); expect(response?.usersToGroups.length).toEqual(1); expect(response).toEqual({ id: 3, name: 'Alex', verified: 0, invitedBy: null, usersToGroups: [{ group: { id: 3, name: 'Group3', description: null, }, }], }); }); test('Get groups with users + orderBy + limit', () => { db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]).run(); db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]).run(); const response = db.query.groupsTable.findMany({ orderBy: [desc(groupsTable.id)], limit: 2, with: { usersToGroups: { limit: 1, orderBy: (utg, { desc }) => [desc(utg.userId)], columns: {}, with: { user: true, }, }, }, }).sync(); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; description: string | null; usersToGroups: { user: { id: number; name: string; verified: number; invitedBy: number | null; }; }[]; }[] >(); expect(response.length).toEqual(2); expect(response[0]?.usersToGroups.length).toEqual(1); expect(response[1]?.usersToGroups.length).toEqual(1); expect(response[1]).toEqual({ id: 2, name: 'Group2', description: null, usersToGroups: [{ user: { id: 3, name: 'Alex', verified: 0, invitedBy: null, }, }], }); expect(response[0]).toEqual({ id: 3, name: 'Group3', description: null, usersToGroups: [{ user: { id: 3, name: 'Alex', verified: 0, invitedBy: null, }, }], }); }); test('Get users with groups + custom', () => { db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]).run(); db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]).run(); const response = db.query.usersTable.findMany({ extras: { lower: sql`lower(${usersTable.name})`.as('lower_name'), }, with: { usersToGroups: { columns: {}, with: { group: { extras: { lower: sql`lower(${groupsTable.name})`.as('lower_name'), }, }, }, }, }, }).sync(); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: number; invitedBy: number | null; lower: string; usersToGroups: { group: { id: number; name: string; description: string | null; lower: string; }; }[]; }[] >(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); response[0]?.usersToGroups.sort((a, b) => (a.group.id > b.group.id) ? 1 : -1); response[1]?.usersToGroups.sort((a, b) => (a.group.id > b.group.id) ? 1 : -1); response[2]?.usersToGroups.sort((a, b) => (a.group.id > b.group.id) ? 1 : -1); expect(response.length).toEqual(3); expect(response[0]?.usersToGroups.length).toEqual(1); expect(response[1]?.usersToGroups.length).toEqual(1); expect(response[2]?.usersToGroups.length).toEqual(2); expect(response).toContainEqual({ id: 1, name: 'Dan', lower: 'dan', verified: 0, invitedBy: null, usersToGroups: [{ group: { id: 1, name: 'Group1', lower: 'group1', description: null, }, }], }); expect(response).toContainEqual({ id: 2, name: 'Andrew', lower: 'andrew', verified: 0, invitedBy: null, usersToGroups: [{ group: { id: 2, name: 'Group2', lower: 'group2', description: null, }, }], }); expect(response).toContainEqual({ id: 3, name: 'Alex', lower: 'alex', verified: 0, invitedBy: null, usersToGroups: [{ group: { id: 2, name: 'Group2', lower: 'group2', description: null, }, }, { group: { id: 3, name: 'Group3', lower: 'group3', description: null, }, }], }); }); test('Get groups with users + custom', () => { db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]).run(); db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]).run(); const response = db.query.groupsTable.findMany({ extras: (table, { sql }) => ({ lower: sql`lower(${table.name})`.as('lower_name'), }), with: { usersToGroups: { columns: {}, with: { user: { extras: (table, { sql }) => ({ lower: sql`lower(${table.name})`.as('lower_name'), }), }, }, }, }, }).sync(); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; description: string | null; lower: string; usersToGroups: { user: { id: number; name: string; verified: number; invitedBy: number | null; lower: string; }; }[]; }[] >(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).toEqual(3); expect(response[0]?.usersToGroups.length).toEqual(1); expect(response[1]?.usersToGroups.length).toEqual(2); expect(response[2]?.usersToGroups.length).toEqual(1); expect(response).toContainEqual({ id: 1, name: 'Group1', lower: 'group1', description: null, usersToGroups: [{ user: { id: 1, name: 'Dan', lower: 'dan', verified: 0, invitedBy: null, }, }], }); expect(response).toContainEqual({ id: 2, name: 'Group2', lower: 'group2', description: null, usersToGroups: [{ user: { id: 2, name: 'Andrew', lower: 'andrew', verified: 0, invitedBy: null, }, }, { user: { id: 3, name: 'Alex', lower: 'alex', verified: 0, invitedBy: null, }, }], }); expect(response).toContainEqual({ id: 3, name: 'Group3', lower: 'group3', description: null, usersToGroups: [{ user: { id: 3, name: 'Alex', lower: 'alex', verified: 0, invitedBy: null, }, }], }); }); test('async api', async () => { await db.insert(usersTable).values([{ id: 1, name: 'Dan' }]); const users = await db.query.usersTable.findMany(); expect(users).toEqual([{ id: 1, name: 'Dan', verified: 0, invitedBy: null }]); }); test('async api - sync()', () => { db.insert(usersTable).values([{ id: 1, name: 'Dan' }]).run(); const users = db.query.usersTable.findMany().sync(); expect(users).toEqual([{ id: 1, name: 'Dan', verified: 0, invitedBy: null }]); }); test('async api - prepare', async () => { const insertStmt = db.insert(usersTable).values([{ id: 1, name: 'Dan' }]).prepare(); await insertStmt.execute(); const queryStmt = db.query.usersTable.findMany().prepare(); const users = await queryStmt.execute(); expect(users).toEqual([{ id: 1, name: 'Dan', verified: 0, invitedBy: null }]); }); test('async api - sync() + prepare', () => { const insertStmt = db.insert(usersTable).values([{ id: 1, name: 'Dan' }]).prepare(); insertStmt.execute().sync(); const queryStmt = db.query.usersTable.findMany().prepare(); const users = queryStmt.execute().sync(); expect(users).toEqual([{ id: 1, name: 'Dan', verified: 0, invitedBy: null }]); }); test('.toSQL()', () => { const query = db.query.usersTable.findFirst().toSQL(); expect(query).toHaveProperty('sql', expect.any(String)); expect(query).toHaveProperty('params', expect.any(Array)); }); // + custom + where + orderby // + custom + where + orderby + limit // + partial // + partial(false) // + partial + orderBy + where (all not selected) /* One four-level relation users+posts+comments+coment_likes + users+users_to_groups+groups */ /* Really hard case 1. users+posts+comments+coment_likes 2. users+users_to_groups+groups 3. users+users */ ================================================ FILE: integration-tests/tests/relational/db.ts ================================================ import 'dotenv/config'; import Database from 'better-sqlite3'; import { sql } from 'drizzle-orm'; import { drizzle } from 'drizzle-orm/better-sqlite3'; import util from 'node:util'; import * as schema from './tables.ts'; async function main() { const bdb = new Database(process.env['SQLITE_DB_PATH']!); const db = drizzle(bdb, { schema, logger: true }); const result = db.query.users.findMany({ columns: { id: true, name: true, }, with: { posts: { columns: { authorId: true, }, with: { comments: true, }, extras: { lower: sql`lower(${schema.posts.title})`.as('lower_name'), }, }, }, }); console.log(util.inspect(result, false, null, true)); bdb.close(); } main(); ================================================ FILE: integration-tests/tests/relational/issues-schemas/duplicates/mysql/mysql.duplicates.test.ts ================================================ import 'dotenv/config'; import Docker from 'dockerode'; import { sql } from 'drizzle-orm'; import { drizzle, type MySql2Database } from 'drizzle-orm/mysql2'; import getPort from 'get-port'; import * as mysql from 'mysql2/promise'; import { v4 as uuid } from 'uuid'; import { afterAll, beforeAll, beforeEach, expect, expectTypeOf, test } from 'vitest'; import * as schema from './mysql.duplicates.ts'; const ENABLE_LOGGING = false; /* Test cases: - querying nested relation without PK with additional fields */ let mysqlContainer: Docker.Container; let db: MySql2Database; let client: mysql.Connection; async function createDockerDB(): Promise { const docker = new Docker(); const port = await getPort({ port: 3306 }); const image = 'mysql:8'; const pullStream = await docker.pull(image); await new Promise((resolve, reject) => docker.modem.followProgress(pullStream, (err) => (err ? reject(err) : resolve(err))) ); mysqlContainer = await docker.createContainer({ Image: image, Env: ['MYSQL_ROOT_PASSWORD=mysql', 'MYSQL_DATABASE=drizzle'], name: `drizzle-integration-tests-${uuid()}`, HostConfig: { AutoRemove: true, PortBindings: { '3306/tcp': [{ HostPort: `${port}` }], }, }, }); await mysqlContainer.start(); return `mysql://root:mysql@127.0.0.1:${port}/drizzle`; } beforeAll(async () => { const connectionString = process.env['MYSQL_CONNECTION_STRING'] ?? await createDockerDB(); const sleep = 1000; let timeLeft = 30000; let connected = false; let lastError: unknown | undefined; do { try { client = await mysql.createConnection(connectionString); await client.connect(); connected = true; break; } catch (e) { lastError = e; await new Promise((resolve) => setTimeout(resolve, sleep)); timeLeft -= sleep; } } while (timeLeft > 0); if (!connected) { console.error('Cannot connect to MySQL'); await client?.end().catch(console.error); await mysqlContainer?.stop().catch(console.error); throw lastError; } db = drizzle(client, { schema, logger: ENABLE_LOGGING, mode: 'default' }); }); afterAll(async () => { await client?.end().catch(console.error); await mysqlContainer?.stop().catch(console.error); }); beforeEach(async () => { await db.execute(sql`drop table if exists \`members\``); await db.execute(sql`drop table if exists \`artist_to_member\``); await db.execute(sql`drop table if exists \`artists\``); await db.execute(sql`drop table if exists \`albums\``); await db.execute( sql` CREATE TABLE \`members\` ( \`id\` serial AUTO_INCREMENT PRIMARY KEY NOT NULL, \`created_at\` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, \`updated_at\` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, \`name_en\` varchar(50) NOT NULL, \`name_kr\` varchar(50) NOT NULL, \`stage_name_en\` varchar(50) NOT NULL, \`stage_name_kr\` varchar(50) NOT NULL, \`image\` varchar(255) NOT NULL, \`instagram\` varchar(255) NOT NULL); `, ); await db.execute( sql` CREATE TABLE \`artist_to_member\` ( \`id\` serial AUTO_INCREMENT PRIMARY KEY NOT NULL, \`member_id\` int NOT NULL, \`artist_id\` int NOT NULL); `, ); await db.execute( sql` CREATE TABLE \`artists\` ( \`id\` serial AUTO_INCREMENT PRIMARY KEY NOT NULL, \`created_at\` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, \`updated_at\` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, \`name_en\` varchar(50) NOT NULL, \`name_kr\` varchar(50) NOT NULL, \`debut\` date NOT NULL, \`company_id\` int NOT NULL, \`is_group\` boolean NOT NULL DEFAULT true, \`image\` varchar(255) NOT NULL, \`twitter\` varchar(255) NOT NULL, \`instagram\` varchar(255) NOT NULL, \`youtube\` varchar(255) NOT NULL, \`website\` varchar(255) NOT NULL, \`spotify_id\` varchar(32)); `, ); await db.execute( sql` CREATE TABLE \`albums\` ( \`id\` serial AUTO_INCREMENT PRIMARY KEY NOT NULL, \`created_at\` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, \`updated_at\` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, \`artist_id\` int NOT NULL, \`name\` varchar(50) NOT NULL, \`region\` enum('en','kr','jp','other') NOT NULL, \`release_date\` date NOT NULL, \`image\` varchar(255) NOT NULL, \`spotify_id\` varchar(32)); `, ); }); test('Simple case from GH', async () => { await db.insert(schema.artists).values([ { id: 1, nameEn: 'Dan', nameKr: '', debut: new Date(), companyId: 1, image: '', twitter: '', instagram: '', youtube: '', website: '', }, { id: 2, nameEn: 'Andrew', nameKr: '', debut: new Date(), companyId: 1, image: '', twitter: '', instagram: '', youtube: '', website: '', }, { id: 3, nameEn: 'Alex', nameKr: '', debut: new Date(), companyId: 1, image: '', twitter: '', instagram: '', youtube: '', website: '', }, ]); await db.insert(schema.albums).values([ { id: 1, artistId: 1, name: 'Album1', region: 'en', releaseDate: new Date(), image: '' }, { id: 2, artistId: 2, name: 'Album2', region: 'en', releaseDate: new Date(), image: '' }, { id: 3, artistId: 3, name: 'Album3', region: 'en', releaseDate: new Date(), image: '' }, ]); await db.insert(schema.members).values([ { id: 1, nameEn: 'MemberA', nameKr: '', stageNameEn: '', stageNameKr: '', image: '', instagram: '' }, { id: 2, nameEn: 'MemberB', nameKr: '', stageNameEn: '', stageNameKr: '', image: '', instagram: '' }, { id: 3, nameEn: 'MemberC', nameKr: '', stageNameEn: '', stageNameKr: '', image: '', instagram: '' }, ]); await db.insert(schema.artistsToMembers).values([ { memberId: 1, artistId: 1 }, { memberId: 2, artistId: 1 }, { memberId: 2, artistId: 2 }, { memberId: 3, artistId: 3 }, ]); const response = await db.query.artists.findFirst({ where: (artists, { eq }) => eq(artists.id, 1), with: { albums: true, members: { columns: {}, with: { member: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; createdAt: Date; updatedAt: Date; nameEn: string; nameKr: string; debut: Date; companyId: number; isGroup: boolean; image: string; twitter: string; instagram: string; youtube: string; website: string; spotifyId: string | null; members: { member: { id: number; createdAt: Date; updatedAt: Date; nameEn: string; nameKr: string; image: string; instagram: string; stageNameEn: string; stageNameKr: string; }; }[]; albums: { id: number; name: string; createdAt: Date; updatedAt: Date; image: string; spotifyId: string | null; artistId: number; region: 'en' | 'kr' | 'jp' | 'other'; releaseDate: Date; }[]; } | undefined >(); expect(response?.members.length).eq(2); expect(response?.albums.length).eq(1); expect(response?.albums[0]).toEqual({ id: 1, createdAt: response?.albums[0]?.createdAt, updatedAt: response?.albums[0]?.updatedAt, artistId: 1, name: 'Album1', region: 'en', releaseDate: response?.albums[0]?.releaseDate, image: '', spotifyId: null, }); }); ================================================ FILE: integration-tests/tests/relational/issues-schemas/duplicates/mysql/mysql.duplicates.ts ================================================ import { relations, sql } from 'drizzle-orm'; import { boolean, date, index, int, mysqlEnum, mysqlTable, serial, timestamp, varchar } from 'drizzle-orm/mysql-core'; export const artists = mysqlTable( 'artists', { id: serial('id').primaryKey(), createdAt: timestamp('created_at') .notNull() .default(sql`CURRENT_TIMESTAMP`), updatedAt: timestamp('updated_at') .notNull() .default(sql`CURRENT_TIMESTAMP`), nameEn: varchar('name_en', { length: 50 }).notNull(), nameKr: varchar('name_kr', { length: 50 }).notNull(), debut: date('debut').notNull(), companyId: int('company_id').notNull(), isGroup: boolean('is_group').notNull().default(true), image: varchar('image', { length: 255 }).notNull(), twitter: varchar('twitter', { length: 255 }).notNull(), instagram: varchar('instagram', { length: 255 }).notNull(), youtube: varchar('youtube', { length: 255 }).notNull(), website: varchar('website', { length: 255 }).notNull(), spotifyId: varchar('spotify_id', { length: 32 }), }, (table) => ({ nameEnIndex: index('artists__name_en__idx').on(table.nameEn), }), ); export const members = mysqlTable('members', { id: serial('id').primaryKey(), createdAt: timestamp('created_at') .notNull() .default(sql`CURRENT_TIMESTAMP`), updatedAt: timestamp('updated_at') .notNull() .default(sql`CURRENT_TIMESTAMP`), nameEn: varchar('name_en', { length: 50 }).notNull(), nameKr: varchar('name_kr', { length: 50 }).notNull(), stageNameEn: varchar('stage_name_en', { length: 50 }).notNull(), stageNameKr: varchar('stage_name_kr', { length: 50 }).notNull(), image: varchar('image', { length: 255 }).notNull(), instagram: varchar('instagram', { length: 255 }).notNull(), }); export const artistsToMembers = mysqlTable( 'artist_to_member', { id: serial('id').primaryKey(), memberId: int('member_id').notNull(), artistId: int('artist_id').notNull(), }, (table) => ({ memberArtistIndex: index('artist_to_member__artist_id__member_id__idx').on( table.memberId, table.artistId, ), }), ); export const albums = mysqlTable( 'albums', { id: serial('id').primaryKey(), createdAt: timestamp('created_at') .notNull() .default(sql`CURRENT_TIMESTAMP`), updatedAt: timestamp('updated_at') .notNull() .default(sql`CURRENT_TIMESTAMP`), artistId: int('artist_id').notNull(), name: varchar('name', { length: 50 }).notNull(), region: mysqlEnum('region', ['en', 'kr', 'jp', 'other']).notNull(), releaseDate: date('release_date').notNull(), image: varchar('image', { length: 255 }).notNull(), spotifyId: varchar('spotify_id', { length: 32 }), }, (table) => ({ artistIndex: index('albums__artist_id__idx').on(table.artistId), nameIndex: index('albums__name__idx').on(table.name), }), ); // relations export const artistRelations = relations(artists, ({ many }) => ({ albums: many(albums), members: many(artistsToMembers), })); export const albumRelations = relations(albums, ({ one }) => ({ artist: one(artists, { fields: [albums.artistId], references: [artists.id], }), })); export const memberRelations = relations(members, ({ many }) => ({ artists: many(artistsToMembers), })); export const artistsToMembersRelations = relations(artistsToMembers, ({ one }) => ({ artist: one(artists, { fields: [artistsToMembers.artistId], references: [artists.id], }), member: one(members, { fields: [artistsToMembers.memberId], references: [members.id], }), })); ================================================ FILE: integration-tests/tests/relational/issues-schemas/duplicates/pg/pg.duplicates.test.ts ================================================ import 'dotenv/config'; import Docker from 'dockerode'; import { sql } from 'drizzle-orm'; import { drizzle, type NodePgDatabase } from 'drizzle-orm/node-postgres'; import getPort from 'get-port'; import pg from 'pg'; import { v4 as uuid } from 'uuid'; import { afterAll, beforeAll, beforeEach, expect, expectTypeOf, test } from 'vitest'; import * as schema from './pg.duplicates.ts'; const { Client } = pg; const ENABLE_LOGGING = false; /* Test cases: - querying nested relation without PK with additional fields */ let pgContainer: Docker.Container; let db: NodePgDatabase; let client: pg.Client; async function createDockerDB(): Promise { const docker = new Docker(); const port = await getPort({ port: 5432 }); const image = 'postgres:14'; const pullStream = await docker.pull(image); await new Promise((resolve, reject) => docker.modem.followProgress(pullStream, (err) => err ? reject(err) : resolve(err)) ); pgContainer = await docker.createContainer({ Image: image, Env: [ 'POSTGRES_PASSWORD=postgres', 'POSTGRES_USER=postgres', 'POSTGRES_DB=postgres', ], name: `drizzle-integration-tests-${uuid()}`, HostConfig: { AutoRemove: true, PortBindings: { '5432/tcp': [{ HostPort: `${port}` }], }, }, }); await pgContainer.start(); return `postgres://postgres:postgres@localhost:${port}/postgres`; } beforeAll(async () => { const connectionString = process.env['PG_CONNECTION_STRING'] ?? (await createDockerDB()); const sleep = 250; let timeLeft = 5000; let connected = false; let lastError: unknown | undefined; do { try { client = new Client(connectionString); await client.connect(); connected = true; break; } catch (e) { lastError = e; await new Promise((resolve) => setTimeout(resolve, sleep)); timeLeft -= sleep; } } while (timeLeft > 0); if (!connected) { console.error('Cannot connect to Postgres'); await client?.end().catch(console.error); await pgContainer?.stop().catch(console.error); throw lastError; } db = drizzle(client, { schema, logger: ENABLE_LOGGING }); }); afterAll(async () => { await client?.end().catch(console.error); await pgContainer?.stop().catch(console.error); }); beforeEach(async () => { await db.execute(sql`drop table if exists "members"`); await db.execute(sql`drop table if exists "artist_to_member"`); await db.execute(sql`drop table if exists "artists"`); await db.execute(sql`drop table if exists "albums"`); await db.execute( sql` CREATE TABLE "members" ( "id" serial PRIMARY KEY NOT NULL, "created_at" timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, "updated_at" timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ); `, ); await db.execute( sql` CREATE TABLE "artist_to_member" ( "id" serial PRIMARY KEY NOT NULL, "member_id" int NOT NULL, "artist_id" int NOT NULL); `, ); await db.execute( sql` CREATE TABLE "artists" ( "id" serial PRIMARY KEY NOT NULL, "created_at" timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, "updated_at" timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, "company_id" int NOT NULL); `, ); await db.execute( sql` CREATE TABLE "albums" ( "id" serial PRIMARY KEY NOT NULL, "created_at" timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, "updated_at" timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, "artist_id" int NOT NULL); `, ); }); test('Simple case from GH', async () => { await db.insert(schema.artists).values([ { id: 1, companyId: 1, }, { id: 2, companyId: 1, }, { id: 3, companyId: 1, }, ]); await db.insert(schema.albums).values([ { id: 1, artistId: 1 }, { id: 2, artistId: 2 }, { id: 3, artistId: 3 }, ]); await db.insert(schema.members).values([ { id: 1 }, { id: 2 }, { id: 3 }, ]); await db.insert(schema.artistsToMembers).values([ { memberId: 1, artistId: 1 }, { memberId: 2, artistId: 1 }, { memberId: 2, artistId: 2 }, { memberId: 3, artistId: 3 }, ]); const response = await db.query.artists.findFirst({ where: (artists, { eq }) => eq(artists.id, 1), with: { albums: true, members: { columns: {}, with: { member: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; createdAt: Date; updatedAt: Date; companyId: number; albums: { id: number; createdAt: Date; updatedAt: Date; artistId: number; }[]; members: { member: { id: number; createdAt: Date; updatedAt: Date; }; }[]; } | undefined >(); expect(response?.members.length).eq(2); expect(response?.albums.length).eq(1); expect(response?.albums[0]).toEqual({ id: 1, createdAt: response?.albums[0]?.createdAt, updatedAt: response?.albums[0]?.updatedAt, artistId: 1, }); }); ================================================ FILE: integration-tests/tests/relational/issues-schemas/duplicates/pg/pg.duplicates.ts ================================================ import { relations, sql } from 'drizzle-orm'; import { index, integer, pgTable, serial, timestamp } from 'drizzle-orm/pg-core'; export const artists = pgTable( 'artists', { id: serial('id').primaryKey(), createdAt: timestamp('created_at') .notNull() .default(sql`CURRENT_TIMESTAMP`), updatedAt: timestamp('updated_at') .notNull() .default(sql`CURRENT_TIMESTAMP`), companyId: integer('company_id').notNull(), }, ); export const members = pgTable('members', { id: serial('id').primaryKey(), createdAt: timestamp('created_at') .notNull() .default(sql`CURRENT_TIMESTAMP`), updatedAt: timestamp('updated_at') .notNull() .default(sql`CURRENT_TIMESTAMP`), }); export const artistsToMembers = pgTable( 'artist_to_member', { id: serial('id').primaryKey(), memberId: integer('member_id').notNull(), artistId: integer('artist_id').notNull(), }, (table) => ({ memberArtistIndex: index('artist_to_member__artist_id__member_id__idx').on( table.memberId, table.artistId, ), }), ); export const albums = pgTable( 'albums', { id: serial('id').primaryKey(), createdAt: timestamp('created_at') .notNull() .default(sql`CURRENT_TIMESTAMP`), updatedAt: timestamp('updated_at') .notNull() .default(sql`CURRENT_TIMESTAMP`), artistId: integer('artist_id').notNull(), }, (table) => ({ artistIndex: index('albums__artist_id__idx').on(table.artistId), }), ); // relations export const artistRelations = relations(artists, ({ many }) => ({ albums: many(albums), members: many(artistsToMembers), })); export const albumRelations = relations(albums, ({ one }) => ({ artist: one(artists, { fields: [albums.artistId], references: [artists.id], }), })); export const memberRelations = relations(members, ({ many }) => ({ artists: many(artistsToMembers), })); export const artistsToMembersRelations = relations(artistsToMembers, ({ one }) => ({ artist: one(artists, { fields: [artistsToMembers.artistId], references: [artists.id], }), member: one(members, { fields: [artistsToMembers.memberId], references: [members.id], }), })); ================================================ FILE: integration-tests/tests/relational/issues-schemas/wrong-mapping/pg.schema.ts ================================================ import { relations } from 'drizzle-orm'; import { boolean, integer, pgTable, primaryKey, text, uuid } from 'drizzle-orm/pg-core'; export const menuItems = pgTable('menu_items', { id: uuid('id').defaultRandom().primaryKey(), }); export const modifierGroups = pgTable('modifier_groups', { id: uuid('id').defaultRandom().primaryKey(), }); export const menuItemModifierGroups = pgTable( 'menu_item_modifier_groups', { menuItemId: uuid('menu_item_id') .notNull() .references(() => menuItems.id), modifierGroupId: uuid('modifier_group_id') .notNull() .references(() => modifierGroups.id), order: integer('order').default(0), }, (table) => ({ menuItemIdModifierGroupIdOrderPk: primaryKey( table.menuItemId, table.modifierGroupId, table.order, ), }), ); export const ingredients = pgTable('ingredients', { id: uuid('id').defaultRandom().primaryKey(), name: text('name').notNull(), description: text('description'), imageUrl: text('image_url'), inStock: boolean('in_stock').default(true), }); export const modifiers = pgTable('modifiers', { id: uuid('id').defaultRandom().primaryKey(), ingredientId: uuid('ingredient_id').references(() => ingredients.id), itemId: uuid('item_id').references(() => menuItems.id), }); export const menuItemIngredients = pgTable( 'menu_item_ingredients', { menuItemId: uuid('menu_item_id') .notNull() .references(() => menuItems.id), ingredientId: uuid('ingredient_id') .notNull() .references(() => ingredients.id), order: integer('order').default(0), }, (table) => ({ menuItemIdIngredientIdOrderPk: primaryKey( table.menuItemId, table.ingredientId, table.order, ), }), ); export const modifierGroupModifiers = pgTable( 'modifier_group_modifiers', { modifierGroupId: uuid('modifier_group_id') .notNull() .references(() => modifierGroups.id), modifierId: uuid('modifier_id') .notNull() .references(() => modifiers.id), order: integer('order').default(0), }, (table) => ({ modifierGroupIdModifierIdOrderPk: primaryKey( table.modifierGroupId, table.modifierId, table.order, ), }), ); export const menuItemRelations = relations(menuItems, ({ many }) => ({ ingredients: many(menuItemIngredients), modifierGroups: many(menuItemModifierGroups), // category: one(menuCategories, { // fields: [menuItems.categoryId], // references: [menuCategories.id], // }), })); export const menuItemIngredientRelations = relations( menuItemIngredients, ({ one }) => ({ menuItem: one(menuItems, { fields: [menuItemIngredients.menuItemId], references: [menuItems.id], }), ingredient: one(ingredients, { fields: [menuItemIngredients.ingredientId], references: [ingredients.id], }), }), ); export const ingredientRelations = relations(ingredients, ({ many }) => ({ menuItems: many(menuItemIngredients), })); export const modifierGroupRelations = relations(modifierGroups, ({ many }) => ({ menuItems: many(menuItemModifierGroups), modifiers: many(modifierGroupModifiers), })); export const modifierRelations = relations(modifiers, ({ one, many }) => ({ modifierGroups: many(modifierGroupModifiers), ingredient: one(ingredients, { fields: [modifiers.ingredientId], references: [ingredients.id], }), item: one(menuItems, { fields: [modifiers.itemId], references: [menuItems.id], }), })); export const menuItemModifierGroupRelations = relations( menuItemModifierGroups, ({ one }) => ({ menuItem: one(menuItems, { fields: [menuItemModifierGroups.menuItemId], references: [menuItems.id], }), modifierGroup: one(modifierGroups, { fields: [menuItemModifierGroups.modifierGroupId], references: [modifierGroups.id], }), }), ); export const modifierGroupModifierRelations = relations( modifierGroupModifiers, ({ one }) => ({ modifierGroup: one(modifierGroups, { fields: [modifierGroupModifiers.modifierGroupId], references: [modifierGroups.id], }), modifier: one(modifiers, { fields: [modifierGroupModifiers.modifierId], references: [modifiers.id], }), }), ); ================================================ FILE: integration-tests/tests/relational/issues-schemas/wrong-mapping/pg.test.ts ================================================ import 'dotenv/config'; import Docker from 'dockerode'; import { desc, sql } from 'drizzle-orm'; import { drizzle, type NodePgDatabase } from 'drizzle-orm/node-postgres'; import getPort from 'get-port'; import pg from 'pg'; import { v4 as uuid } from 'uuid'; import { afterAll, beforeAll, beforeEach, expectTypeOf, test } from 'vitest'; import * as schema from './pg.schema.ts'; const { Client } = pg; const ENABLE_LOGGING = false; /* Test cases: - querying nested relation without PK with additional fields */ let pgContainer: Docker.Container; let db: NodePgDatabase; let client: pg.Client; async function createDockerDB(): Promise { const docker = new Docker(); const port = await getPort({ port: 5432 }); const image = 'postgres:14'; const pullStream = await docker.pull(image); await new Promise((resolve, reject) => docker.modem.followProgress(pullStream, (err) => err ? reject(err) : resolve(err)) ); pgContainer = await docker.createContainer({ Image: image, Env: [ 'POSTGRES_PASSWORD=postgres', 'POSTGRES_USER=postgres', 'POSTGRES_DB=postgres', ], name: `drizzle-integration-tests-${uuid()}`, HostConfig: { AutoRemove: true, PortBindings: { '5432/tcp': [{ HostPort: `${port}` }], }, }, }); await pgContainer.start(); return `postgres://postgres:postgres@localhost:${port}/postgres`; } beforeAll(async () => { const connectionString = process.env['PG_CONNECTION_STRING'] ?? (await createDockerDB()); const sleep = 250; let timeLeft = 5000; let connected = false; let lastError: unknown | undefined; do { try { client = new Client(connectionString); await client.connect(); connected = true; break; } catch (e) { lastError = e; await new Promise((resolve) => setTimeout(resolve, sleep)); timeLeft -= sleep; } } while (timeLeft > 0); if (!connected) { console.error('Cannot connect to Postgres'); await client?.end().catch(console.error); await pgContainer?.stop().catch(console.error); throw lastError; } db = drizzle(client, { schema, logger: ENABLE_LOGGING }); }); afterAll(async () => { await client?.end().catch(console.error); await pgContainer?.stop().catch(console.error); }); beforeEach(async () => { await db.execute(sql`drop schema public cascade`); await db.execute(sql`create schema public`); await db.execute( sql` CREATE TABLE IF NOT EXISTS "ingredients" ( "id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL, "name" text NOT NULL, "description" text, "image_url" text, "in_stock" boolean DEFAULT true ); CREATE TABLE IF NOT EXISTS "menu_item_ingredients" ( "menu_item_id" uuid NOT NULL, "ingredient_id" uuid NOT NULL, "order" integer DEFAULT 0 ); ALTER TABLE "menu_item_ingredients" ADD CONSTRAINT "menu_item_ingredients_menu_item_id_ingredient_id_order" PRIMARY KEY("menu_item_id","ingredient_id","order"); CREATE TABLE IF NOT EXISTS "menu_item_modifier_groups" ( "menu_item_id" uuid NOT NULL, "modifier_group_id" uuid NOT NULL, "order" integer DEFAULT 0 ); ALTER TABLE "menu_item_modifier_groups" ADD CONSTRAINT "menu_item_modifier_groups_menu_item_id_modifier_group_id_order" PRIMARY KEY("menu_item_id","modifier_group_id","order"); CREATE TABLE IF NOT EXISTS "menu_items" ( "id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL ); CREATE TABLE IF NOT EXISTS "modifier_group_modifiers" ( "modifier_group_id" uuid NOT NULL, "modifier_id" uuid NOT NULL, "order" integer DEFAULT 0 ); ALTER TABLE "modifier_group_modifiers" ADD CONSTRAINT "modifier_group_modifiers_modifier_group_id_modifier_id_order" PRIMARY KEY("modifier_group_id","modifier_id","order"); CREATE TABLE IF NOT EXISTS "modifier_groups" ( "id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL ); CREATE TABLE IF NOT EXISTS "modifiers" ( "id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL, "ingredient_id" uuid, "item_id" uuid ); DO $$ BEGIN ALTER TABLE "menu_item_ingredients" ADD CONSTRAINT "menu_item_ingredients_menu_item_id_menu_items_id_fk" FOREIGN KEY ("menu_item_id") REFERENCES "menu_items"("id") ON DELETE no action ON UPDATE no action; EXCEPTION WHEN duplicate_object THEN null; END $$; DO $$ BEGIN ALTER TABLE "menu_item_ingredients" ADD CONSTRAINT "menu_item_ingredients_ingredient_id_ingredients_id_fk" FOREIGN KEY ("ingredient_id") REFERENCES "ingredients"("id") ON DELETE no action ON UPDATE no action; EXCEPTION WHEN duplicate_object THEN null; END $$; DO $$ BEGIN ALTER TABLE "menu_item_modifier_groups" ADD CONSTRAINT "menu_item_modifier_groups_menu_item_id_menu_items_id_fk" FOREIGN KEY ("menu_item_id") REFERENCES "menu_items"("id") ON DELETE no action ON UPDATE no action; EXCEPTION WHEN duplicate_object THEN null; END $$; DO $$ BEGIN ALTER TABLE "menu_item_modifier_groups" ADD CONSTRAINT "menu_item_modifier_groups_modifier_group_id_modifier_groups_id_fk" FOREIGN KEY ("modifier_group_id") REFERENCES "modifier_groups"("id") ON DELETE no action ON UPDATE no action; EXCEPTION WHEN duplicate_object THEN null; END $$; DO $$ BEGIN ALTER TABLE "modifier_group_modifiers" ADD CONSTRAINT "modifier_group_modifiers_modifier_group_id_modifier_groups_id_fk" FOREIGN KEY ("modifier_group_id") REFERENCES "modifier_groups"("id") ON DELETE no action ON UPDATE no action; EXCEPTION WHEN duplicate_object THEN null; END $$; DO $$ BEGIN ALTER TABLE "modifier_group_modifiers" ADD CONSTRAINT "modifier_group_modifiers_modifier_id_modifiers_id_fk" FOREIGN KEY ("modifier_id") REFERENCES "modifiers"("id") ON DELETE no action ON UPDATE no action; EXCEPTION WHEN duplicate_object THEN null; END $$; DO $$ BEGIN ALTER TABLE "modifiers" ADD CONSTRAINT "modifiers_ingredient_id_ingredients_id_fk" FOREIGN KEY ("ingredient_id") REFERENCES "ingredients"("id") ON DELETE no action ON UPDATE no action; EXCEPTION WHEN duplicate_object THEN null; END $$; DO $$ BEGIN ALTER TABLE "modifiers" ADD CONSTRAINT "modifiers_item_id_menu_items_id_fk" FOREIGN KEY ("item_id") REFERENCES "menu_items"("id") ON DELETE no action ON UPDATE no action; EXCEPTION WHEN duplicate_object THEN null; END $$; `, ); }); test('Simple case from GH', async () => { const firstMenuItemId = uuid(); const secondMenuItemId = uuid(); const firstModGroupsId = uuid(); const secondModGroupsId = uuid(); await db.insert(schema.menuItems).values([{ id: firstMenuItemId }, { id: secondMenuItemId }]); await db.insert(schema.modifierGroups).values([{ id: firstModGroupsId }, { id: secondModGroupsId }]); await db.insert(schema.menuItemModifierGroups).values([{ modifierGroupId: firstModGroupsId, menuItemId: firstMenuItemId, }, { modifierGroupId: firstModGroupsId, menuItemId: secondMenuItemId, }, { modifierGroupId: secondModGroupsId, menuItemId: firstMenuItemId, }]); const firstIngredientId = uuid(); const secondIngredientId = uuid(); await db.insert(schema.ingredients).values([{ id: firstIngredientId, name: 'first', }, { id: secondIngredientId, name: 'second', }]); const firstModifierId = uuid(); const secondModifierId = uuid(); await db.insert(schema.modifiers).values([{ id: firstModifierId, ingredientId: firstIngredientId, itemId: firstMenuItemId, }, { id: secondModifierId, ingredientId: secondIngredientId, itemId: secondMenuItemId, }]); await db.insert(schema.modifierGroupModifiers).values([ { modifierGroupId: firstModGroupsId, modifierId: firstModifierId, }, { modifierGroupId: secondModGroupsId, modifierId: secondModifierId, }, ]); const response = await db.query.menuItems .findMany({ with: { modifierGroups: { with: { modifierGroup: { with: { modifiers: { with: { modifier: { with: { ingredient: true, item: true, }, }, }, orderBy: desc(schema.modifierGroupModifiers.order), }, }, }, }, orderBy: schema.menuItemModifierGroups.order, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: string; modifierGroups: { menuItemId: string; modifierGroupId: string; order: number | null; modifierGroup: { id: string; modifiers: { modifierGroupId: string; order: number | null; modifierId: string; modifier: { id: string; ingredientId: string | null; itemId: string | null; ingredient: { id: string; name: string; description: string | null; imageUrl: string | null; inStock: boolean | null; } | null; item: { id: string; } | null; }; }[]; }; }[]; }[] >(); // TODO: don't rely on items order // expect(response.length).eq(2); // expect(response[0]?.modifierGroups.length).eq(1); // expect(response[0]?.modifierGroups[0]?.modifierGroup.modifiers.length).eq(1); // TODO: add correct IDs // expect(response[0]?.modifierGroups[0]?.modifierGroup.modifiers[0]?.modifier.ingredient?.id).eq( // '0b2b9abc-5975-4a1d-ba3d-6fc3b3149902', // ); // expect(response[0]?.modifierGroups[0]?.modifierGroup.modifiers[0]?.modifier.item?.id).eq( // 'a867133e-60b7-4003-aaa0-deeefad7e518', // ); }); ================================================ FILE: integration-tests/tests/relational/mysql.planetscale.test.ts ================================================ import 'dotenv/config'; import { Client } from '@planetscale/database'; import { desc, DrizzleError, eq, gt, gte, or, placeholder, sql, TransactionRollbackError } from 'drizzle-orm'; import { drizzle, type PlanetScaleDatabase } from 'drizzle-orm/planetscale-serverless'; import { beforeAll, beforeEach, expect, expectTypeOf, test } from 'vitest'; import * as schema from './mysql.schema.ts'; const { usersTable, postsTable, commentsTable, usersToGroupsTable, groupsTable } = schema; const ENABLE_LOGGING = false; /* Test cases: - querying nested relation without PK with additional fields */ let db: PlanetScaleDatabase; beforeAll(async () => { db = drizzle( new Client({ url: process.env['PLANETSCALE_CONNECTION_STRING']!, // host: process.env['DATABASE_HOST']!, // username: process.env['DATABASE_USERNAME']!, // password: process.env['DATABASE_PASSWORD']!, }), { schema, logger: ENABLE_LOGGING }, ); await Promise.all([ db.execute(sql`drop table if exists \`users\``), db.execute(sql`drop table if exists \`groups\``), db.execute(sql`drop table if exists \`users_to_groups\``), db.execute(sql`drop table if exists \`posts\``), db.execute(sql`drop table if exists \`comments\``), db.execute(sql`drop table if exists \`comment_likes\``), ]); await Promise.all([ db.execute( sql` CREATE TABLE IF NOT EXISTS \`users\` ( \`id\` bigint PRIMARY KEY AUTO_INCREMENT NOT NULL, \`name\` text NOT NULL, \`verified\` boolean DEFAULT false NOT NULL, \`invited_by\` bigint ); `, ), db.execute( sql` CREATE TABLE IF NOT EXISTS \`groups\` ( \`id\` bigint PRIMARY KEY AUTO_INCREMENT NOT NULL, \`name\` text NOT NULL, \`description\` text ); `, ), db.execute( sql` CREATE TABLE IF NOT EXISTS \`users_to_groups\` ( \`id\` serial PRIMARY KEY AUTO_INCREMENT NOT NULL, \`user_id\` bigint, \`group_id\` bigint ); `, ), db.execute( sql` CREATE TABLE IF NOT EXISTS \`posts\` ( \`id\` bigint PRIMARY KEY AUTO_INCREMENT NOT NULL, \`content\` text NOT NULL, \`owner_id\` bigint, \`created_at\` timestamp DEFAULT CURRENT_TIMESTAMP NOT NULL ); `, ), db.execute( sql` CREATE TABLE IF NOT EXISTS \`comments\` ( \`id\` bigint PRIMARY KEY AUTO_INCREMENT NOT NULL, \`content\` text NOT NULL, \`creator\` bigint, \`post_id\` bigint, \`created_at\` timestamp DEFAULT CURRENT_TIMESTAMP NOT NULL ); `, ), db.execute( sql` CREATE TABLE IF NOT EXISTS \`comment_likes\` ( \`id\` bigint PRIMARY KEY AUTO_INCREMENT NOT NULL, \`creator\` bigint, \`comment_id\` bigint, \`created_at\` timestamp DEFAULT CURRENT_TIMESTAMP NOT NULL ); `, ), ]); }); beforeEach(async () => { await Promise.all([ db.delete(usersTable), db.delete(postsTable), db.delete(commentsTable), db.delete(groupsTable), db.delete(usersToGroupsTable), db.execute(sql`delete from \`comment_likes\``), ]); await Promise.all([ db.execute(sql`ALTER TABLE \`users\` AUTO_INCREMENT = 1`), db.execute(sql`ALTER TABLE \`groups\` AUTO_INCREMENT = 1`), db.execute(sql`ALTER TABLE \`users_to_groups\` AUTO_INCREMENT = 1`), db.execute(sql`ALTER TABLE \`posts\` AUTO_INCREMENT = 1`), db.execute(sql`ALTER TABLE \`comments\` AUTO_INCREMENT = 1`), db.execute(sql`ALTER TABLE \`comment_likes\` AUTO_INCREMENT = 1`), ]); }); /* [Find Many] One relation users+posts */ test('[Find Many] Get users with posts', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]); const usersWithPosts = await db.query.usersTable.findMany({ with: { posts: true, }, }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[]>(); usersWithPosts.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(usersWithPosts.length).eq(3); expect(usersWithPosts[0]?.posts.length).eq(1); expect(usersWithPosts[1]?.posts.length).eq(1); expect(usersWithPosts[2]?.posts.length).eq(1); expect(usersWithPosts[0]).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], }); expect(usersWithPosts[1]).toEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: null, posts: [{ id: 2, ownerId: 2, content: 'Post2', createdAt: usersWithPosts[1]?.posts[0]?.createdAt }], }); expect(usersWithPosts[2]).toEqual({ id: 3, name: 'Alex', verified: false, invitedBy: null, posts: [{ id: 3, ownerId: 3, content: 'Post3', createdAt: usersWithPosts[2]?.posts[0]?.createdAt }], }); }); test('[Find Many] Get users with posts + limit posts', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const usersWithPosts = await db.query.usersTable.findMany({ with: { posts: { limit: 1, }, }, }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[]>(); usersWithPosts.sort((a, b) => (a.id > b.id) ? 1 : -1); usersWithPosts[0]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); usersWithPosts[1]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); usersWithPosts[2]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(usersWithPosts.length).eq(3); expect(usersWithPosts[0]?.posts.length).eq(1); expect(usersWithPosts[1]?.posts.length).eq(1); expect(usersWithPosts[2]?.posts.length).eq(1); expect(usersWithPosts[0]).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], }); expect(usersWithPosts[1]).toEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: null, posts: [{ id: 4, ownerId: 2, content: 'Post2', createdAt: usersWithPosts[1]?.posts[0]?.createdAt }], }); expect(usersWithPosts[2]).toEqual({ id: 3, name: 'Alex', verified: false, invitedBy: null, posts: [{ id: 6, ownerId: 3, content: 'Post3', createdAt: usersWithPosts[2]?.posts[0]?.createdAt }], }); }); test('[Find Many] Get users with posts + limit posts and users', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const usersWithPosts = await db.query.usersTable.findMany({ limit: 2, with: { posts: { limit: 1, }, }, }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[]>(); usersWithPosts.sort((a, b) => (a.id > b.id) ? 1 : -1); usersWithPosts[0]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); usersWithPosts[1]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(usersWithPosts.length).eq(2); expect(usersWithPosts[0]?.posts.length).eq(1); expect(usersWithPosts[1]?.posts.length).eq(1); expect(usersWithPosts[0]).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], }); expect(usersWithPosts[1]).toEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: null, posts: [{ id: 4, ownerId: 2, content: 'Post2', createdAt: usersWithPosts[1]?.posts[0]?.createdAt }], }); }); test('[Find Many] Get users with posts + custom fields', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const usersWithPosts = await db.query.usersTable.findMany({ with: { posts: true, }, extras: ({ name }) => ({ lowerName: sql`lower(${name})`.as('name_lower'), }), }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; lowerName: string; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[]>(); usersWithPosts.sort((a, b) => (a.id > b.id) ? 1 : -1); usersWithPosts[0]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); usersWithPosts[1]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); usersWithPosts[2]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(usersWithPosts.length).toEqual(3); expect(usersWithPosts[0]?.posts.length).toEqual(3); expect(usersWithPosts[1]?.posts.length).toEqual(2); expect(usersWithPosts[2]?.posts.length).toEqual(2); expect(usersWithPosts[0]).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, lowerName: 'dan', posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }, { id: 2, ownerId: 1, content: 'Post1.2', createdAt: usersWithPosts[0]?.posts[1]?.createdAt, }, { id: 3, ownerId: 1, content: 'Post1.3', createdAt: usersWithPosts[0]?.posts[2]?.createdAt }], }); expect(usersWithPosts[1]).toEqual({ id: 2, name: 'Andrew', lowerName: 'andrew', verified: false, invitedBy: null, posts: [{ id: 4, ownerId: 2, content: 'Post2', createdAt: usersWithPosts[1]?.posts[0]?.createdAt }, { id: 5, ownerId: 2, content: 'Post2.1', createdAt: usersWithPosts[1]?.posts[1]?.createdAt, }], }); expect(usersWithPosts[2]).toEqual({ id: 3, name: 'Alex', lowerName: 'alex', verified: false, invitedBy: null, posts: [{ id: 6, ownerId: 3, content: 'Post3', createdAt: usersWithPosts[2]?.posts[0]?.createdAt }, { id: 7, ownerId: 3, content: 'Post3.1', createdAt: usersWithPosts[2]?.posts[1]?.createdAt, }], }); }); test('[Find Many] Get users with posts + custom fields + limits', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const usersWithPosts = await db.query.usersTable.findMany({ limit: 1, with: { posts: { limit: 1, }, }, extras: (usersTable, { sql }) => ({ lowerName: sql`lower(${usersTable.name})`.as('name_lower'), }), }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; lowerName: string; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[]>(); expect(usersWithPosts.length).toEqual(1); expect(usersWithPosts[0]?.posts.length).toEqual(1); expect(usersWithPosts[0]).toEqual({ id: 1, name: 'Dan', lowerName: 'dan', verified: false, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], }); }); test('[Find Many] Get users with posts + orderBy', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: '1' }, { ownerId: 1, content: '2' }, { ownerId: 1, content: '3' }, { ownerId: 2, content: '4' }, { ownerId: 2, content: '5' }, { ownerId: 3, content: '6' }, { ownerId: 3, content: '7' }, ]); const usersWithPosts = await db.query.usersTable.findMany({ with: { posts: { orderBy: (postsTable, { desc }) => [desc(postsTable.content)], }, }, orderBy: (usersTable, { desc }) => [desc(usersTable.id)], }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[]>(); expect(usersWithPosts.length).eq(3); expect(usersWithPosts[0]?.posts.length).eq(2); expect(usersWithPosts[1]?.posts.length).eq(2); expect(usersWithPosts[2]?.posts.length).eq(3); expect(usersWithPosts[2]).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, posts: [ { id: 3, ownerId: 1, content: '3', createdAt: usersWithPosts[2]?.posts[2]?.createdAt, }, { id: 2, ownerId: 1, content: '2', createdAt: usersWithPosts[2]?.posts[1]?.createdAt, }, { id: 1, ownerId: 1, content: '1', createdAt: usersWithPosts[2]?.posts[0]?.createdAt, }, ], }); expect(usersWithPosts[1]).toEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: null, posts: [{ id: 5, ownerId: 2, content: '5', createdAt: usersWithPosts[1]?.posts[1]?.createdAt, }, { id: 4, ownerId: 2, content: '4', createdAt: usersWithPosts[1]?.posts[0]?.createdAt }], }); expect(usersWithPosts[0]).toEqual({ id: 3, name: 'Alex', verified: false, invitedBy: null, posts: [{ id: 7, ownerId: 3, content: '7', createdAt: usersWithPosts[0]?.posts[1]?.createdAt, }, { id: 6, ownerId: 3, content: '6', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], }); }); test('[Find Many] Get users with posts + where', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]); const usersWithPosts = await db.query.usersTable.findMany({ where: (({ id }, { eq }) => eq(id, 1)), with: { posts: { where: (({ id }, { eq }) => eq(id, 1)), }, }, }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[]>(); expect(usersWithPosts.length).eq(1); expect(usersWithPosts[0]?.posts.length).eq(1); expect(usersWithPosts[0]).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], }); }); test('[Find Many] Get users with posts + where + partial', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]); const usersWithPosts = await db.query.usersTable.findMany({ columns: { id: true, name: true, }, with: { posts: { columns: { id: true, content: true, }, where: (({ id }, { eq }) => eq(id, 1)), }, }, where: (({ id }, { eq }) => eq(id, 1)), }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; posts: { id: number; content: string; }[]; }[]>(); expect(usersWithPosts.length).eq(1); expect(usersWithPosts[0]?.posts.length).eq(1); expect(usersWithPosts[0]).toEqual({ id: 1, name: 'Dan', posts: [{ id: 1, content: 'Post1' }], }); }); test('[Find Many] Get users with posts + where + partial. Did not select posts id, but used it in where', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]); const usersWithPosts = await db.query.usersTable.findMany({ columns: { id: true, name: true, }, with: { posts: { columns: { id: true, content: true, }, where: (({ id }, { eq }) => eq(id, 1)), }, }, where: (({ id }, { eq }) => eq(id, 1)), }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; posts: { id: number; content: string; }[]; }[]>(); expect(usersWithPosts.length).eq(1); expect(usersWithPosts[0]?.posts.length).eq(1); expect(usersWithPosts[0]).toEqual({ id: 1, name: 'Dan', posts: [{ id: 1, content: 'Post1' }], }); }); test('[Find Many] Get users with posts + where + partial(true + false)', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]); const usersWithPosts = await db.query.usersTable.findMany({ columns: { id: true, name: false, }, with: { posts: { columns: { id: true, content: false, }, where: (({ id }, { eq }) => eq(id, 1)), }, }, where: (({ id }, { eq }) => eq(id, 1)), }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; posts: { id: number; }[]; }[]>(); expect(usersWithPosts.length).eq(1); expect(usersWithPosts[0]?.posts.length).eq(1); expect(usersWithPosts[0]).toEqual({ id: 1, posts: [{ id: 1 }], }); }); test('[Find Many] Get users with posts + where + partial(false)', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]); const usersWithPosts = await db.query.usersTable.findMany({ columns: { name: false, }, with: { posts: { columns: { content: false, }, where: (({ id }, { eq }) => eq(id, 1)), }, }, where: (({ id }, { eq }) => eq(id, 1)), }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; verified: boolean; invitedBy: number | null; posts: { id: number; ownerId: number | null; createdAt: Date; }[]; }[]>(); expect(usersWithPosts.length).eq(1); expect(usersWithPosts[0]?.posts.length).eq(1); expect(usersWithPosts[0]).toEqual({ id: 1, verified: false, invitedBy: null, posts: [{ id: 1, ownerId: 1, createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], }); }); test('[Find Many] Get users with posts in transaction', async () => { let usersWithPosts: { id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[] = []; await db.transaction(async (tx) => { await tx.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await tx.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]); usersWithPosts = await tx.query.usersTable.findMany({ where: (({ id }, { eq }) => eq(id, 1)), with: { posts: { where: (({ id }, { eq }) => eq(id, 1)), }, }, }); }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[]>(); expect(usersWithPosts.length).eq(1); expect(usersWithPosts[0]?.posts.length).eq(1); expect(usersWithPosts[0]).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], }); }); test('[Find Many] Get users with posts in rollbacked transaction', async () => { let usersWithPosts: { id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[] = []; await expect(db.transaction(async (tx) => { await tx.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await tx.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]); tx.rollback(); usersWithPosts = await tx.query.usersTable.findMany({ where: (({ id }, { eq }) => eq(id, 1)), with: { posts: { where: (({ id }, { eq }) => eq(id, 1)), }, }, }); })).rejects.toThrowError(new TransactionRollbackError()); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[]>(); expect(usersWithPosts.length).eq(0); }); // select only custom test('[Find Many] Get only custom fields', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { id: 1, ownerId: 1, content: 'Post1' }, { id: 2, ownerId: 1, content: 'Post1.2' }, { id: 3, ownerId: 1, content: 'Post1.3' }, { id: 4, ownerId: 2, content: 'Post2' }, { id: 5, ownerId: 2, content: 'Post2.1' }, { id: 6, ownerId: 3, content: 'Post3' }, { id: 7, ownerId: 3, content: 'Post3.1' }, ]); const usersWithPosts = await db.query.usersTable.findMany({ columns: {}, with: { posts: { columns: {}, extras: ({ content }) => ({ lowerName: sql`lower(${content})`.as('content_lower'), }), }, }, extras: ({ name }) => ({ lowerName: sql`lower(${name})`.as('name_lower'), }), }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ lowerName: string; posts: { lowerName: string; }[]; }[]>(); expect(usersWithPosts.length).toEqual(3); expect(usersWithPosts[0]?.posts.length).toEqual(3); expect(usersWithPosts[1]?.posts.length).toEqual(2); expect(usersWithPosts[2]?.posts.length).toEqual(2); expect(usersWithPosts[0]?.lowerName).toEqual('dan'); expect(usersWithPosts[1]?.lowerName).toEqual('andrew'); expect(usersWithPosts[2]?.lowerName).toEqual('alex'); expect(usersWithPosts[0]?.posts).toContainEqual({ lowerName: 'post1', }); expect(usersWithPosts[0]?.posts).toContainEqual({ lowerName: 'post1.2', }); expect(usersWithPosts[0]?.posts).toContainEqual({ lowerName: 'post1.3', }); expect(usersWithPosts[1]?.posts).toContainEqual({ lowerName: 'post2', }); expect(usersWithPosts[1]?.posts).toContainEqual({ lowerName: 'post2.1', }); expect(usersWithPosts[2]?.posts).toContainEqual({ lowerName: 'post3', }); expect(usersWithPosts[2]?.posts).toContainEqual({ lowerName: 'post3.1', }); }); test('[Find Many] Get only custom fields + where', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const usersWithPosts = await db.query.usersTable.findMany({ columns: {}, with: { posts: { columns: {}, where: gte(postsTable.id, 2), extras: ({ content }) => ({ lowerName: sql`lower(${content})`.as('content_lower'), }), }, }, where: eq(usersTable.id, 1), extras: ({ name }) => ({ lowerName: sql`lower(${name})`.as('name_lower'), }), }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ lowerName: string; posts: { lowerName: string; }[]; }[]>(); expect(usersWithPosts.length).toEqual(1); expect(usersWithPosts[0]?.posts.length).toEqual(2); expect(usersWithPosts).toContainEqual({ lowerName: 'dan', posts: [{ lowerName: 'post1.2' }, { lowerName: 'post1.3' }], }); }); test('[Find Many] Get only custom fields + where + limit', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const usersWithPosts = await db.query.usersTable.findMany({ columns: {}, with: { posts: { columns: {}, where: gte(postsTable.id, 2), limit: 1, extras: ({ content }) => ({ lowerName: sql`lower(${content})`.as('content_lower'), }), }, }, where: eq(usersTable.id, 1), extras: ({ name }) => ({ lowerName: sql`lower(${name})`.as('name_lower'), }), }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ lowerName: string; posts: { lowerName: string; }[]; }[]>(); expect(usersWithPosts.length).toEqual(1); expect(usersWithPosts[0]?.posts.length).toEqual(1); expect(usersWithPosts).toContainEqual({ lowerName: 'dan', posts: [{ lowerName: 'post1.2' }], }); }); test('[Find Many] Get only custom fields + where + orderBy', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const usersWithPosts = await db.query.usersTable.findMany({ columns: {}, with: { posts: { columns: {}, where: gte(postsTable.id, 2), orderBy: [desc(postsTable.id)], extras: ({ content }) => ({ lowerName: sql`lower(${content})`.as('content_lower'), }), }, }, where: eq(usersTable.id, 1), extras: ({ name }) => ({ lowerName: sql`lower(${name})`.as('name_lower'), }), }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ lowerName: string; posts: { lowerName: string; }[]; }[]>(); expect(usersWithPosts.length).toEqual(1); expect(usersWithPosts[0]?.posts.length).toEqual(2); expect(usersWithPosts).toContainEqual({ lowerName: 'dan', posts: [{ lowerName: 'post1.3' }, { lowerName: 'post1.2' }], }); }); // select only custom find one test('[Find One] Get only custom fields', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const usersWithPosts = await db.query.usersTable.findFirst({ columns: {}, with: { posts: { columns: {}, extras: ({ content }) => ({ lowerName: sql`lower(${content})`.as('content_lower'), }), }, }, extras: ({ name }) => ({ lowerName: sql`lower(${name})`.as('name_lower'), }), }); expectTypeOf(usersWithPosts).toEqualTypeOf< { lowerName: string; posts: { lowerName: string; }[]; } | undefined >(); expect(usersWithPosts?.posts.length).toEqual(3); expect(usersWithPosts?.lowerName).toEqual('dan'); expect(usersWithPosts?.posts).toContainEqual({ lowerName: 'post1', }); expect(usersWithPosts?.posts).toContainEqual({ lowerName: 'post1.2', }); expect(usersWithPosts?.posts).toContainEqual({ lowerName: 'post1.3', }); }); test('[Find One] Get only custom fields + where', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const usersWithPosts = await db.query.usersTable.findFirst({ columns: {}, with: { posts: { columns: {}, where: gte(postsTable.id, 2), extras: ({ content }) => ({ lowerName: sql`lower(${content})`.as('content_lower'), }), }, }, where: eq(usersTable.id, 1), extras: ({ name }) => ({ lowerName: sql`lower(${name})`.as('name_lower'), }), }); expectTypeOf(usersWithPosts).toEqualTypeOf< { lowerName: string; posts: { lowerName: string; }[]; } | undefined >(); expect(usersWithPosts?.posts.length).toEqual(2); expect(usersWithPosts).toEqual({ lowerName: 'dan', posts: [{ lowerName: 'post1.2' }, { lowerName: 'post1.3' }], }); }); test('[Find One] Get only custom fields + where + limit', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const usersWithPosts = await db.query.usersTable.findFirst({ columns: {}, with: { posts: { columns: {}, where: gte(postsTable.id, 2), limit: 1, extras: ({ content }) => ({ lowerName: sql`lower(${content})`.as('content_lower'), }), }, }, where: eq(usersTable.id, 1), extras: ({ name }) => ({ lowerName: sql`lower(${name})`.as('name_lower'), }), }); expectTypeOf(usersWithPosts).toEqualTypeOf< { lowerName: string; posts: { lowerName: string; }[]; } | undefined >(); expect(usersWithPosts?.posts.length).toEqual(1); expect(usersWithPosts).toEqual({ lowerName: 'dan', posts: [{ lowerName: 'post1.2' }], }); }); test('[Find One] Get only custom fields + where + orderBy', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const usersWithPosts = await db.query.usersTable.findFirst({ columns: {}, with: { posts: { columns: {}, where: gte(postsTable.id, 2), orderBy: [desc(postsTable.id)], extras: ({ content }) => ({ lowerName: sql`lower(${content})`.as('content_lower'), }), }, }, where: eq(usersTable.id, 1), extras: ({ name }) => ({ lowerName: sql`lower(${name})`.as('name_lower'), }), }); expectTypeOf(usersWithPosts).toEqualTypeOf< { lowerName: string; posts: { lowerName: string; }[]; } | undefined >(); expect(usersWithPosts?.posts.length).toEqual(2); expect(usersWithPosts).toEqual({ lowerName: 'dan', posts: [{ lowerName: 'post1.3' }, { lowerName: 'post1.2' }], }); }); // columns {} test('[Find Many] Get select {}', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await expect(async () => await db.query.usersTable.findMany({ columns: {}, }) ).rejects.toThrow(DrizzleError); }); // columns {} test('[Find One] Get select {}', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await expect(async () => await db.query.usersTable.findFirst({ columns: {}, }) ).rejects.toThrow(DrizzleError); }); // deep select {} test('[Find Many] Get deep select {}', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]); await expect(async () => await db.query.usersTable.findMany({ columns: {}, with: { posts: { columns: {}, }, }, }) ).rejects.toThrow(DrizzleError); }); // deep select {} test('[Find One] Get deep select {}', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]); await expect(async () => await db.query.usersTable.findFirst({ columns: {}, with: { posts: { columns: {}, }, }, }) ).rejects.toThrow(DrizzleError); }); /* Prepared statements for users+posts */ test('[Find Many] Get users with posts + prepared limit', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const prepared = db.query.usersTable.findMany({ with: { posts: { limit: placeholder('limit'), }, }, }).prepare(); const usersWithPosts = await prepared.execute({ limit: 1 }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[]>(); expect(usersWithPosts.length).eq(3); expect(usersWithPosts[0]?.posts.length).eq(1); expect(usersWithPosts[1]?.posts.length).eq(1); expect(usersWithPosts[2]?.posts.length).eq(1); expect(usersWithPosts).toContainEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], }); expect(usersWithPosts).toContainEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: null, posts: [{ id: 4, ownerId: 2, content: 'Post2', createdAt: usersWithPosts[1]?.posts[0]?.createdAt }], }); expect(usersWithPosts).toContainEqual({ id: 3, name: 'Alex', verified: false, invitedBy: null, posts: [{ id: 6, ownerId: 3, content: 'Post3', createdAt: usersWithPosts[2]?.posts[0]?.createdAt }], }); }); test('[Find Many] Get users with posts + prepared limit + offset', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const prepared = db.query.usersTable.findMany({ limit: placeholder('uLimit'), offset: placeholder('uOffset'), with: { posts: { limit: placeholder('pLimit'), }, }, }).prepare(); const usersWithPosts = await prepared.execute({ pLimit: 1, uLimit: 3, uOffset: 1 }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[]>(); expect(usersWithPosts.length).eq(2); expect(usersWithPosts[0]?.posts.length).eq(1); expect(usersWithPosts[1]?.posts.length).eq(1); expect(usersWithPosts).toContainEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: null, posts: [{ id: 4, ownerId: 2, content: 'Post2', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], }); expect(usersWithPosts).toContainEqual({ id: 3, name: 'Alex', verified: false, invitedBy: null, posts: [{ id: 6, ownerId: 3, content: 'Post3', createdAt: usersWithPosts[1]?.posts[0]?.createdAt }], }); }); test('[Find Many] Get users with posts + prepared where', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]); const prepared = db.query.usersTable.findMany({ where: (({ id }, { eq }) => eq(id, placeholder('id'))), with: { posts: { where: (({ id }, { eq }) => eq(id, 1)), }, }, }).prepare(); const usersWithPosts = await prepared.execute({ id: 1 }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[]>(); expect(usersWithPosts.length).eq(1); expect(usersWithPosts[0]?.posts.length).eq(1); expect(usersWithPosts[0]).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], }); }); test('[Find Many] Get users with posts + prepared + limit + offset + where', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const prepared = db.query.usersTable.findMany({ limit: placeholder('uLimit'), offset: placeholder('uOffset'), where: (({ id }, { eq, or }) => or(eq(id, placeholder('id')), eq(id, 3))), with: { posts: { where: (({ id }, { eq }) => eq(id, placeholder('pid'))), limit: placeholder('pLimit'), }, }, }).prepare(); const usersWithPosts = await prepared.execute({ pLimit: 1, uLimit: 3, uOffset: 1, id: 2, pid: 6 }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[]>(); expect(usersWithPosts.length).eq(1); expect(usersWithPosts[0]?.posts.length).eq(1); expect(usersWithPosts).toContainEqual({ id: 3, name: 'Alex', verified: false, invitedBy: null, posts: [{ id: 6, ownerId: 3, content: 'Post3', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], }); }); /* [Find One] One relation users+posts */ test('[Find One] Get users with posts', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]); const usersWithPosts = await db.query.usersTable.findFirst({ with: { posts: true, }, }); expectTypeOf(usersWithPosts).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; } | undefined >(); expect(usersWithPosts!.posts.length).eq(1); expect(usersWithPosts).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts?.posts[0]?.createdAt }], }); }); test('[Find One] Get users with posts + limit posts', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const usersWithPosts = await db.query.usersTable.findFirst({ with: { posts: { limit: 1, }, }, }); expectTypeOf(usersWithPosts).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; } | undefined >(); expect(usersWithPosts!.posts.length).eq(1); expect(usersWithPosts).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts?.posts[0]?.createdAt }], }); }); test('[Find One] Get users with posts no results found', async () => { const usersWithPosts = await db.query.usersTable.findFirst({ with: { posts: { limit: 1, }, }, }); expectTypeOf(usersWithPosts).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; } | undefined >(); expect(usersWithPosts).toBeUndefined(); }); test('[Find One] Get users with posts + limit posts and users', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const usersWithPosts = await db.query.usersTable.findFirst({ with: { posts: { limit: 1, }, }, }); expectTypeOf(usersWithPosts).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; } | undefined >(); expect(usersWithPosts!.posts.length).eq(1); expect(usersWithPosts).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts?.posts[0]?.createdAt }], }); }); test('[Find One] Get users with posts + custom fields', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const usersWithPosts = await db.query.usersTable.findFirst({ with: { posts: true, }, extras: ({ name }) => ({ lowerName: sql`lower(${name})`.as('name_lower'), }), }); expectTypeOf(usersWithPosts).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; lowerName: string; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; } | undefined >(); expect(usersWithPosts!.posts.length).toEqual(3); expect(usersWithPosts?.lowerName).toEqual('dan'); expect(usersWithPosts?.id).toEqual(1); expect(usersWithPosts?.verified).toEqual(false); expect(usersWithPosts?.invitedBy).toEqual(null); expect(usersWithPosts?.name).toEqual('Dan'); expect(usersWithPosts?.posts).toContainEqual({ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts?.posts[0]?.createdAt, }); expect(usersWithPosts?.posts).toContainEqual({ id: 2, ownerId: 1, content: 'Post1.2', createdAt: usersWithPosts?.posts[1]?.createdAt, }); expect(usersWithPosts?.posts).toContainEqual({ id: 3, ownerId: 1, content: 'Post1.3', createdAt: usersWithPosts?.posts[2]?.createdAt, }); }); test('[Find One] Get users with posts + custom fields + limits', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const usersWithPosts = await db.query.usersTable.findFirst({ with: { posts: { limit: 1, }, }, extras: (usersTable, { sql }) => ({ lowerName: sql`lower(${usersTable.name})`.as('name_lower'), }), }); expectTypeOf(usersWithPosts).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; lowerName: string; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; } | undefined >(); expect(usersWithPosts!.posts.length).toEqual(1); expect(usersWithPosts).toEqual({ id: 1, name: 'Dan', lowerName: 'dan', verified: false, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts?.posts[0]?.createdAt }], }); }); test('[Find One] Get users with posts + orderBy', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: '1' }, { ownerId: 1, content: '2' }, { ownerId: 1, content: '3' }, { ownerId: 2, content: '4' }, { ownerId: 2, content: '5' }, { ownerId: 3, content: '6' }, { ownerId: 3, content: '7' }, ]); const usersWithPosts = await db.query.usersTable.findFirst({ with: { posts: { orderBy: (postsTable, { desc }) => [desc(postsTable.content)], }, }, orderBy: (usersTable, { desc }) => [desc(usersTable.id)], }); expectTypeOf(usersWithPosts).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; } | undefined >(); expect(usersWithPosts!.posts.length).eq(2); expect(usersWithPosts).toEqual({ id: 3, name: 'Alex', verified: false, invitedBy: null, posts: [{ id: 7, ownerId: 3, content: '7', createdAt: usersWithPosts?.posts[1]?.createdAt, }, { id: 6, ownerId: 3, content: '6', createdAt: usersWithPosts?.posts[0]?.createdAt }], }); }); test('[Find One] Get users with posts + where', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]); const usersWithPosts = await db.query.usersTable.findFirst({ where: (({ id }, { eq }) => eq(id, 1)), with: { posts: { where: (({ id }, { eq }) => eq(id, 1)), }, }, }); expectTypeOf(usersWithPosts).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; } | undefined >(); expect(usersWithPosts!.posts.length).eq(1); expect(usersWithPosts).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts?.posts[0]?.createdAt }], }); }); test('[Find One] Get users with posts + where + partial', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]); const usersWithPosts = await db.query.usersTable.findFirst({ columns: { id: true, name: true, }, with: { posts: { columns: { id: true, content: true, }, where: (({ id }, { eq }) => eq(id, 1)), }, }, where: (({ id }, { eq }) => eq(id, 1)), }); expectTypeOf(usersWithPosts).toEqualTypeOf< { id: number; name: string; posts: { id: number; content: string; }[]; } | undefined >(); expect(usersWithPosts!.posts.length).eq(1); expect(usersWithPosts).toEqual({ id: 1, name: 'Dan', posts: [{ id: 1, content: 'Post1' }], }); }); test('[Find One] Get users with posts + where + partial. Did not select posts id, but used it in where', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]); const usersWithPosts = await db.query.usersTable.findFirst({ columns: { id: true, name: true, }, with: { posts: { columns: { id: true, content: true, }, where: (({ id }, { eq }) => eq(id, 1)), }, }, where: (({ id }, { eq }) => eq(id, 1)), }); expectTypeOf(usersWithPosts).toEqualTypeOf< { id: number; name: string; posts: { id: number; content: string; }[]; } | undefined >(); expect(usersWithPosts!.posts.length).eq(1); expect(usersWithPosts).toEqual({ id: 1, name: 'Dan', posts: [{ id: 1, content: 'Post1' }], }); }); test('[Find One] Get users with posts + where + partial(true + false)', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]); const usersWithPosts = await db.query.usersTable.findFirst({ columns: { id: true, name: false, }, with: { posts: { columns: { id: true, content: false, }, where: (({ id }, { eq }) => eq(id, 1)), }, }, where: (({ id }, { eq }) => eq(id, 1)), }); expectTypeOf(usersWithPosts).toEqualTypeOf< { id: number; posts: { id: number; }[]; } | undefined >(); expect(usersWithPosts!.posts.length).eq(1); expect(usersWithPosts).toEqual({ id: 1, posts: [{ id: 1 }], }); }); test('[Find One] Get users with posts + where + partial(false)', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]); const usersWithPosts = await db.query.usersTable.findFirst({ columns: { name: false, }, with: { posts: { columns: { content: false, }, where: (({ id }, { eq }) => eq(id, 1)), }, }, where: (({ id }, { eq }) => eq(id, 1)), }); expectTypeOf(usersWithPosts).toEqualTypeOf< { id: number; verified: boolean; invitedBy: number | null; posts: { id: number; ownerId: number | null; createdAt: Date; }[]; } | undefined >(); expect(usersWithPosts!.posts.length).eq(1); expect(usersWithPosts).toEqual({ id: 1, verified: false, invitedBy: null, posts: [{ id: 1, ownerId: 1, createdAt: usersWithPosts?.posts[0]?.createdAt }], }); }); /* One relation users+users. Self referencing */ test('Get user with invitee', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]); const usersWithInvitee = await db.query.usersTable.findMany({ with: { invitee: true, }, }); expectTypeOf(usersWithInvitee).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; invitee: { id: number; name: string; verified: boolean; invitedBy: number | null; } | null; }[] >(); usersWithInvitee.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(usersWithInvitee.length).eq(4); expect(usersWithInvitee[0]?.invitee).toBeNull(); expect(usersWithInvitee[1]?.invitee).toBeNull(); expect(usersWithInvitee[2]?.invitee).not.toBeNull(); expect(usersWithInvitee[3]?.invitee).not.toBeNull(); expect(usersWithInvitee[0]).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, invitee: null, }); expect(usersWithInvitee[1]).toEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: null, invitee: null, }); expect(usersWithInvitee[2]).toEqual({ id: 3, name: 'Alex', verified: false, invitedBy: 1, invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, }); expect(usersWithInvitee[3]).toEqual({ id: 4, name: 'John', verified: false, invitedBy: 2, invitee: { id: 2, name: 'Andrew', verified: false, invitedBy: null }, }); }); test('Get user + limit with invitee', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew', invitedBy: 1 }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]); const usersWithInvitee = await db.query.usersTable.findMany({ with: { invitee: true, }, limit: 2, }); expectTypeOf(usersWithInvitee).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; invitee: { id: number; name: string; verified: boolean; invitedBy: number | null; } | null; }[] >(); usersWithInvitee.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(usersWithInvitee.length).eq(2); expect(usersWithInvitee[0]?.invitee).toBeNull(); expect(usersWithInvitee[1]?.invitee).not.toBeNull(); expect(usersWithInvitee[0]).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, invitee: null, }); expect(usersWithInvitee[1]).toEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: 1, invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, }); }); test('Get user with invitee and custom fields', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]); const usersWithInvitee = await db.query.usersTable.findMany({ extras: (users, { sql }) => ({ lower: sql`lower(${users.name})`.as('lower_name') }), with: { invitee: { extras: (invitee, { sql }) => ({ lower: sql`lower(${invitee.name})`.as('lower_name') }), }, }, }); expectTypeOf(usersWithInvitee).toEqualTypeOf< { id: number; name: string; verified: boolean; lower: string; invitedBy: number | null; invitee: { id: number; name: string; verified: boolean; lower: string; invitedBy: number | null; } | null; }[] >(); usersWithInvitee.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(usersWithInvitee.length).eq(4); expect(usersWithInvitee[0]?.invitee).toBeNull(); expect(usersWithInvitee[1]?.invitee).toBeNull(); expect(usersWithInvitee[2]?.invitee).not.toBeNull(); expect(usersWithInvitee[3]?.invitee).not.toBeNull(); expect(usersWithInvitee[0]).toEqual({ id: 1, name: 'Dan', lower: 'dan', verified: false, invitedBy: null, invitee: null, }); expect(usersWithInvitee[1]).toEqual({ id: 2, name: 'Andrew', lower: 'andrew', verified: false, invitedBy: null, invitee: null, }); expect(usersWithInvitee[2]).toEqual({ id: 3, name: 'Alex', lower: 'alex', verified: false, invitedBy: 1, invitee: { id: 1, name: 'Dan', lower: 'dan', verified: false, invitedBy: null }, }); expect(usersWithInvitee[3]).toEqual({ id: 4, name: 'John', lower: 'john', verified: false, invitedBy: 2, invitee: { id: 2, name: 'Andrew', lower: 'andrew', verified: false, invitedBy: null }, }); }); test('Get user with invitee and custom fields + limits', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]); const usersWithInvitee = await db.query.usersTable.findMany({ extras: (users, { sql }) => ({ lower: sql`lower(${users.name})`.as('lower_name') }), limit: 3, with: { invitee: { extras: (invitee, { sql }) => ({ lower: sql`lower(${invitee.name})`.as('lower_name') }), }, }, }); expectTypeOf(usersWithInvitee).toEqualTypeOf< { id: number; name: string; verified: boolean; lower: string; invitedBy: number | null; invitee: { id: number; name: string; verified: boolean; lower: string; invitedBy: number | null; } | null; }[] >(); usersWithInvitee.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(usersWithInvitee.length).eq(3); expect(usersWithInvitee[0]?.invitee).toBeNull(); expect(usersWithInvitee[1]?.invitee).toBeNull(); expect(usersWithInvitee[2]?.invitee).not.toBeNull(); expect(usersWithInvitee[0]).toEqual({ id: 1, name: 'Dan', lower: 'dan', verified: false, invitedBy: null, invitee: null, }); expect(usersWithInvitee[1]).toEqual({ id: 2, name: 'Andrew', lower: 'andrew', verified: false, invitedBy: null, invitee: null, }); expect(usersWithInvitee[2]).toEqual({ id: 3, name: 'Alex', lower: 'alex', verified: false, invitedBy: 1, invitee: { id: 1, name: 'Dan', lower: 'dan', verified: false, invitedBy: null }, }); }); test('Get user with invitee + order by', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]); const usersWithInvitee = await db.query.usersTable.findMany({ orderBy: (users, { desc }) => [desc(users.id)], with: { invitee: true, }, }); expectTypeOf(usersWithInvitee).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; invitee: { id: number; name: string; verified: boolean; invitedBy: number | null; } | null; }[] >(); expect(usersWithInvitee.length).eq(4); expect(usersWithInvitee[3]?.invitee).toBeNull(); expect(usersWithInvitee[2]?.invitee).toBeNull(); expect(usersWithInvitee[1]?.invitee).not.toBeNull(); expect(usersWithInvitee[0]?.invitee).not.toBeNull(); expect(usersWithInvitee[3]).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, invitee: null, }); expect(usersWithInvitee[2]).toEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: null, invitee: null, }); expect(usersWithInvitee[1]).toEqual({ id: 3, name: 'Alex', verified: false, invitedBy: 1, invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, }); expect(usersWithInvitee[0]).toEqual({ id: 4, name: 'John', verified: false, invitedBy: 2, invitee: { id: 2, name: 'Andrew', verified: false, invitedBy: null }, }); }); test('Get user with invitee + where', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]); const usersWithInvitee = await db.query.usersTable.findMany({ where: (users, { eq, or }) => (or(eq(users.id, 3), eq(users.id, 4))), with: { invitee: true, }, }); expectTypeOf(usersWithInvitee).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; invitee: { id: number; name: string; verified: boolean; invitedBy: number | null; } | null; }[] >(); expect(usersWithInvitee.length).eq(2); expect(usersWithInvitee[0]?.invitee).not.toBeNull(); expect(usersWithInvitee[1]?.invitee).not.toBeNull(); expect(usersWithInvitee).toContainEqual({ id: 3, name: 'Alex', verified: false, invitedBy: 1, invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, }); expect(usersWithInvitee).toContainEqual({ id: 4, name: 'John', verified: false, invitedBy: 2, invitee: { id: 2, name: 'Andrew', verified: false, invitedBy: null }, }); }); test('Get user with invitee + where + partial', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]); const usersWithInvitee = await db.query.usersTable.findMany({ where: (users, { eq, or }) => (or(eq(users.id, 3), eq(users.id, 4))), columns: { id: true, name: true, }, with: { invitee: { columns: { id: true, name: true, }, }, }, }); expectTypeOf(usersWithInvitee).toEqualTypeOf< { id: number; name: string; invitee: { id: number; name: string; } | null; }[] >(); expect(usersWithInvitee.length).eq(2); expect(usersWithInvitee[0]?.invitee).not.toBeNull(); expect(usersWithInvitee[1]?.invitee).not.toBeNull(); expect(usersWithInvitee).toContainEqual({ id: 3, name: 'Alex', invitee: { id: 1, name: 'Dan' }, }); expect(usersWithInvitee).toContainEqual({ id: 4, name: 'John', invitee: { id: 2, name: 'Andrew' }, }); }); test('Get user with invitee + where + partial. Did not select users id, but used it in where', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]); const usersWithInvitee = await db.query.usersTable.findMany({ where: (users, { eq, or }) => (or(eq(users.id, 3), eq(users.id, 4))), columns: { name: true, }, with: { invitee: { columns: { id: true, name: true, }, }, }, }); expectTypeOf(usersWithInvitee).toEqualTypeOf< { name: string; invitee: { id: number; name: string; } | null; }[] >(); expect(usersWithInvitee.length).eq(2); expect(usersWithInvitee[0]?.invitee).not.toBeNull(); expect(usersWithInvitee[1]?.invitee).not.toBeNull(); expect(usersWithInvitee).toContainEqual({ name: 'Alex', invitee: { id: 1, name: 'Dan' }, }); expect(usersWithInvitee).toContainEqual({ name: 'John', invitee: { id: 2, name: 'Andrew' }, }); }); test('Get user with invitee + where + partial(true+false)', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]); const usersWithInvitee = await db.query.usersTable.findMany({ where: (users, { eq, or }) => (or(eq(users.id, 3), eq(users.id, 4))), columns: { id: true, name: true, verified: false, }, with: { invitee: { columns: { id: true, name: true, verified: false, }, }, }, }); expectTypeOf(usersWithInvitee).toEqualTypeOf< { id: number; name: string; invitee: { id: number; name: string; } | null; }[] >(); expect(usersWithInvitee.length).eq(2); expect(usersWithInvitee[0]?.invitee).not.toBeNull(); expect(usersWithInvitee[1]?.invitee).not.toBeNull(); expect(usersWithInvitee).toContainEqual({ id: 3, name: 'Alex', invitee: { id: 1, name: 'Dan' }, }); expect(usersWithInvitee).toContainEqual({ id: 4, name: 'John', invitee: { id: 2, name: 'Andrew' }, }); }); test('Get user with invitee + where + partial(false)', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]); const usersWithInvitee = await db.query.usersTable.findMany({ where: (users, { eq, or }) => (or(eq(users.id, 3), eq(users.id, 4))), columns: { verified: false, }, with: { invitee: { columns: { name: false, }, }, }, }); expectTypeOf(usersWithInvitee).toEqualTypeOf< { id: number; name: string; invitedBy: number | null; invitee: { id: number; verified: boolean; invitedBy: number | null; } | null; }[] >(); expect(usersWithInvitee.length).eq(2); expect(usersWithInvitee[0]?.invitee).not.toBeNull(); expect(usersWithInvitee[1]?.invitee).not.toBeNull(); expect(usersWithInvitee).toContainEqual({ id: 3, name: 'Alex', invitedBy: 1, invitee: { id: 1, verified: false, invitedBy: null }, }); expect(usersWithInvitee).toContainEqual({ id: 4, name: 'John', invitedBy: 2, invitee: { id: 2, verified: false, invitedBy: null }, }); }); /* Two first-level relations users+users and users+posts */ test('Get user with invitee and posts', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]); const response = await db.query.usersTable.findMany({ with: { invitee: true, posts: true, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; ownerId: number | null; content: string; createdAt: Date }[]; invitee: { id: number; name: string; verified: boolean; invitedBy: number | null; } | null; }[] >(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).eq(4); expect(response[0]?.invitee).toBeNull(); expect(response[1]?.invitee).toBeNull(); expect(response[2]?.invitee).not.toBeNull(); expect(response[3]?.invitee).not.toBeNull(); expect(response[0]?.posts.length).eq(1); expect(response[1]?.posts.length).eq(1); expect(response[2]?.posts.length).eq(1); expect(response).toContainEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, invitee: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: response[0]?.posts[0]?.createdAt }], }); expect(response).toContainEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: null, invitee: null, posts: [{ id: 2, ownerId: 2, content: 'Post2', createdAt: response[1]?.posts[0]?.createdAt }], }); expect(response).toContainEqual({ id: 3, name: 'Alex', verified: false, invitedBy: 1, invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, posts: [{ id: 3, ownerId: 3, content: 'Post3', createdAt: response[2]?.posts[0]?.createdAt }], }); expect(response).toContainEqual({ id: 4, name: 'John', verified: false, invitedBy: 2, invitee: { id: 2, name: 'Andrew', verified: false, invitedBy: null }, posts: [], }); }); test('Get user with invitee and posts + limit posts and users', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const response = await db.query.usersTable.findMany({ limit: 3, with: { invitee: true, posts: { limit: 1, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; ownerId: number | null; content: string; createdAt: Date }[]; invitee: { id: number; name: string; verified: boolean; invitedBy: number | null; } | null; }[] >(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).eq(3); expect(response[0]?.invitee).toBeNull(); expect(response[1]?.invitee).toBeNull(); expect(response[2]?.invitee).not.toBeNull(); expect(response[0]?.posts.length).eq(1); expect(response[1]?.posts.length).eq(1); expect(response[2]?.posts.length).eq(1); expect(response).toContainEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, invitee: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: response[0]?.posts[0]?.createdAt }], }); expect(response).toContainEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: null, invitee: null, posts: [{ id: 3, ownerId: 2, content: 'Post2', createdAt: response[1]?.posts[0]?.createdAt }], }); expect(response).toContainEqual({ id: 3, name: 'Alex', verified: false, invitedBy: 1, invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, posts: [{ id: 5, ownerId: 3, content: 'Post3', createdAt: response[2]?.posts[0]?.createdAt }], }); }); test('Get user with invitee and posts + limits + custom fields in each', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const response = await db.query.usersTable.findMany({ limit: 3, extras: (users, { sql }) => ({ lower: sql`lower(${users.name})`.as('lower_name') }), with: { invitee: { extras: (users, { sql }) => ({ lower: sql`lower(${users.name})`.as('lower_invitee_name') }), }, posts: { limit: 1, extras: (posts, { sql }) => ({ lower: sql`lower(${posts.content})`.as('lower_content') }), }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: boolean; lower: string; invitedBy: number | null; posts: { id: number; lower: string; ownerId: number | null; content: string; createdAt: Date }[]; invitee: { id: number; name: string; lower: string; verified: boolean; invitedBy: number | null; } | null; }[] >(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).eq(3); expect(response[0]?.invitee).toBeNull(); expect(response[1]?.invitee).toBeNull(); expect(response[2]?.invitee).not.toBeNull(); expect(response[0]?.posts.length).eq(1); expect(response[1]?.posts.length).eq(1); expect(response[2]?.posts.length).eq(1); expect(response).toContainEqual({ id: 1, name: 'Dan', lower: 'dan', verified: false, invitedBy: null, invitee: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', lower: 'post1', createdAt: response[0]?.posts[0]?.createdAt }], }); expect(response).toContainEqual({ id: 2, name: 'Andrew', lower: 'andrew', verified: false, invitedBy: null, invitee: null, posts: [{ id: 3, ownerId: 2, content: 'Post2', lower: 'post2', createdAt: response[1]?.posts[0]?.createdAt }], }); expect(response).toContainEqual({ id: 3, name: 'Alex', lower: 'alex', verified: false, invitedBy: 1, invitee: { id: 1, name: 'Dan', lower: 'dan', verified: false, invitedBy: null }, posts: [{ id: 5, ownerId: 3, content: 'Post3', lower: 'post3', createdAt: response[2]?.posts[0]?.createdAt }], }); }); test('Get user with invitee and posts + custom fields in each', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const response = await db.query.usersTable.findMany({ extras: (users, { sql }) => ({ lower: sql`lower(${users.name})`.as('lower_name') }), with: { invitee: { extras: (users, { sql }) => ({ lower: sql`lower(${users.name})`.as('lower_name') }), }, posts: { extras: (posts, { sql }) => ({ lower: sql`lower(${posts.content})`.as('lower_name') }), }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: boolean; lower: string; invitedBy: number | null; posts: { id: number; lower: string; ownerId: number | null; content: string; createdAt: Date }[]; invitee: { id: number; name: string; lower: string; verified: boolean; invitedBy: number | null; } | null; }[] >(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); response[0]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); response[1]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); response[2]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).eq(4); expect(response[0]?.invitee).toBeNull(); expect(response[1]?.invitee).toBeNull(); expect(response[2]?.invitee).not.toBeNull(); expect(response[3]?.invitee).not.toBeNull(); expect(response[0]?.posts.length).eq(2); expect(response[1]?.posts.length).eq(2); expect(response[2]?.posts.length).eq(2); expect(response[3]?.posts.length).eq(0); expect(response).toContainEqual({ id: 1, name: 'Dan', lower: 'dan', verified: false, invitedBy: null, invitee: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', lower: 'post1', createdAt: response[0]?.posts[0]?.createdAt }, { id: 2, ownerId: 1, content: 'Post1.1', lower: 'post1.1', createdAt: response[0]?.posts[1]?.createdAt, }], }); expect(response).toContainEqual({ id: 2, name: 'Andrew', lower: 'andrew', verified: false, invitedBy: null, invitee: null, posts: [{ id: 3, ownerId: 2, content: 'Post2', lower: 'post2', createdAt: response[1]?.posts[0]?.createdAt }, { id: 4, ownerId: 2, content: 'Post2.1', lower: 'post2.1', createdAt: response[1]?.posts[1]?.createdAt, }], }); expect(response).toContainEqual({ id: 3, name: 'Alex', lower: 'alex', verified: false, invitedBy: 1, invitee: { id: 1, name: 'Dan', lower: 'dan', verified: false, invitedBy: null }, posts: [{ id: 5, ownerId: 3, content: 'Post3', lower: 'post3', createdAt: response[2]?.posts[0]?.createdAt }, { id: 6, ownerId: 3, content: 'Post3.1', lower: 'post3.1', createdAt: response[2]?.posts[1]?.createdAt, }], }); expect(response).toContainEqual({ id: 4, name: 'John', lower: 'john', verified: false, invitedBy: 2, invitee: { id: 2, name: 'Andrew', lower: 'andrew', verified: false, invitedBy: null }, posts: [], }); }); test('Get user with invitee and posts + orderBy', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, ]); const response = await db.query.usersTable.findMany({ orderBy: (users, { desc }) => [desc(users.id)], with: { invitee: true, posts: { orderBy: (posts, { desc }) => [desc(posts.id)], }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; ownerId: number | null; content: string; createdAt: Date }[]; invitee: { id: number; name: string; verified: boolean; invitedBy: number | null; } | null; }[] >(); expect(response.length).eq(4); expect(response[3]?.invitee).toBeNull(); expect(response[2]?.invitee).toBeNull(); expect(response[1]?.invitee).not.toBeNull(); expect(response[0]?.invitee).not.toBeNull(); expect(response[0]?.posts.length).eq(0); expect(response[1]?.posts.length).eq(1); expect(response[2]?.posts.length).eq(2); expect(response[3]?.posts.length).eq(2); expect(response[3]).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, invitee: null, posts: [{ id: 2, ownerId: 1, content: 'Post1.1', createdAt: response[3]?.posts[0]?.createdAt }, { id: 1, ownerId: 1, content: 'Post1', createdAt: response[3]?.posts[1]?.createdAt, }], }); expect(response[2]).toEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: null, invitee: null, posts: [{ id: 4, ownerId: 2, content: 'Post2.1', createdAt: response[2]?.posts[0]?.createdAt }, { id: 3, ownerId: 2, content: 'Post2', createdAt: response[2]?.posts[1]?.createdAt, }], }); expect(response[1]).toEqual({ id: 3, name: 'Alex', verified: false, invitedBy: 1, invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, posts: [{ id: 5, ownerId: 3, content: 'Post3', createdAt: response[3]?.posts[1]?.createdAt, }], }); expect(response[0]).toEqual({ id: 4, name: 'John', verified: false, invitedBy: 2, invitee: { id: 2, name: 'Andrew', verified: false, invitedBy: null }, posts: [], }); }); test('Get user with invitee and posts + where', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]); const response = await db.query.usersTable.findMany({ where: (users, { eq, or }) => (or(eq(users.id, 2), eq(users.id, 3))), with: { invitee: true, posts: { where: (posts, { eq }) => (eq(posts.ownerId, 2)), }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; ownerId: number | null; content: string; createdAt: Date }[]; invitee: { id: number; name: string; verified: boolean; invitedBy: number | null; } | null; }[] >(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).eq(2); expect(response[0]?.invitee).toBeNull(); expect(response[1]?.invitee).not.toBeNull(); expect(response[0]?.posts.length).eq(1); expect(response[1]?.posts.length).eq(0); expect(response).toContainEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: null, invitee: null, posts: [{ id: 2, ownerId: 2, content: 'Post2', createdAt: response[0]?.posts[0]?.createdAt }], }); expect(response).toContainEqual({ id: 3, name: 'Alex', verified: false, invitedBy: 1, invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, posts: [], }); }); test('Get user with invitee and posts + limit posts and users + where', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const response = await db.query.usersTable.findMany({ where: (users, { eq, or }) => (or(eq(users.id, 3), eq(users.id, 4))), limit: 1, with: { invitee: true, posts: { where: (posts, { eq }) => (eq(posts.ownerId, 3)), limit: 1, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; ownerId: number | null; content: string; createdAt: Date }[]; invitee: { id: number; name: string; verified: boolean; invitedBy: number | null; } | null; }[] >(); expect(response.length).eq(1); expect(response[0]?.invitee).not.toBeNull(); expect(response[0]?.posts.length).eq(1); expect(response).toContainEqual({ id: 3, name: 'Alex', verified: false, invitedBy: 1, invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, posts: [{ id: 5, ownerId: 3, content: 'Post3', createdAt: response[0]?.posts[0]?.createdAt }], }); }); test('Get user with invitee and posts + orderBy + where + custom', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, ]); const response = await db.query.usersTable.findMany({ orderBy: [desc(usersTable.id)], where: or(eq(usersTable.id, 3), eq(usersTable.id, 4)), extras: { lower: sql`lower(${usersTable.name})`.as('lower_name'), }, with: { invitee: true, posts: { where: eq(postsTable.ownerId, 3), orderBy: [desc(postsTable.id)], extras: { lower: sql`lower(${postsTable.content})`.as('lower_name'), }, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; lower: string; posts: { id: number; lower: string; ownerId: number | null; content: string; createdAt: Date }[]; invitee: { id: number; name: string; verified: boolean; invitedBy: number | null; } | null; }[] >(); expect(response.length).eq(2); expect(response[1]?.invitee).not.toBeNull(); expect(response[0]?.invitee).not.toBeNull(); expect(response[0]?.posts.length).eq(0); expect(response[1]?.posts.length).eq(1); expect(response[1]).toEqual({ id: 3, name: 'Alex', lower: 'alex', verified: false, invitedBy: 1, invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, posts: [{ id: 5, ownerId: 3, content: 'Post3', lower: 'post3', createdAt: response[1]?.posts[0]?.createdAt, }], }); expect(response[0]).toEqual({ id: 4, name: 'John', lower: 'john', verified: false, invitedBy: 2, invitee: { id: 2, name: 'Andrew', verified: false, invitedBy: null }, posts: [], }); }); test('Get user with invitee and posts + orderBy + where + partial + custom', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, ]); const response = await db.query.usersTable.findMany({ orderBy: [desc(usersTable.id)], where: or(eq(usersTable.id, 3), eq(usersTable.id, 4)), extras: { lower: sql`lower(${usersTable.name})`.as('lower_name'), }, columns: { id: true, name: true, }, with: { invitee: { columns: { id: true, name: true, }, extras: { lower: sql`lower(${usersTable.name})`.as('lower_name'), }, }, posts: { columns: { id: true, content: true, }, where: eq(postsTable.ownerId, 3), orderBy: [desc(postsTable.id)], extras: { lower: sql`lower(${postsTable.content})`.as('lower_name'), }, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; lower: string; posts: { id: number; lower: string; content: string }[]; invitee: { id: number; name: string; lower: string; } | null; }[] >(); expect(response.length).eq(2); expect(response[1]?.invitee).not.toBeNull(); expect(response[0]?.invitee).not.toBeNull(); expect(response[0]?.posts.length).eq(0); expect(response[1]?.posts.length).eq(1); expect(response[1]).toEqual({ id: 3, name: 'Alex', lower: 'alex', invitee: { id: 1, name: 'Dan', lower: 'dan' }, posts: [{ id: 5, content: 'Post3', lower: 'post3', }], }); expect(response[0]).toEqual({ id: 4, name: 'John', lower: 'john', invitee: { id: 2, name: 'Andrew', lower: 'andrew' }, posts: [], }); }); /* One two-level relation users+posts+comments */ test('Get user with posts and posts with comments', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { id: 1, ownerId: 1, content: 'Post1' }, { id: 2, ownerId: 2, content: 'Post2' }, { id: 3, ownerId: 3, content: 'Post3' }, ]); await db.insert(commentsTable).values([ { postId: 1, content: 'Comment1', creator: 2 }, { postId: 2, content: 'Comment2', creator: 2 }, { postId: 3, content: 'Comment3', creator: 3 }, ]); const response = await db.query.usersTable.findMany({ with: { posts: { with: { comments: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; comments: { id: number; content: string; createdAt: Date; creator: number | null; postId: number | null; }[]; }[]; }[] >(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).eq(3); expect(response[0]?.posts.length).eq(1); expect(response[1]?.posts.length).eq(1); expect(response[2]?.posts.length).eq(1); expect(response[0]?.posts[0]?.comments.length).eq(1); expect(response[1]?.posts[0]?.comments.length).eq(1); expect(response[2]?.posts[0]?.comments.length).eq(1); expect(response[0]).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: response[0]?.posts[0]?.createdAt, comments: [ { id: 1, content: 'Comment1', creator: 2, postId: 1, createdAt: response[0]?.posts[0]?.comments[0]?.createdAt, }, ], }], }); expect(response[1]).toEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: null, posts: [{ id: 2, ownerId: 2, content: 'Post2', createdAt: response[1]?.posts[0]?.createdAt, comments: [ { id: 2, content: 'Comment2', creator: 2, postId: 2, createdAt: response[1]?.posts[0]?.comments[0]?.createdAt, }, ], }], }); // expect(response[2]).toEqual({ // id: 3, // name: 'Alex', // verified: false, // invitedBy: null, // posts: [{ // id: 3, // ownerId: 3, // content: 'Post3', // createdAt: response[2]?.posts[0]?.createdAt, // comments: [ // { // id: , // content: 'Comment3', // creator: 3, // postId: 3, // createdAt: response[2]?.posts[0]?.comments[0]?.createdAt, // }, // ], // }], // }); }); // Get user with limit posts and limit comments // Get user with custom field + post + comment with custom field // Get user with limit + posts orderBy + comment orderBy // Get user with where + posts where + comment where // Get user with where + posts partial where + comment where // Get user with where + posts partial where + comment partial(false) where // Get user with where partial(false) + posts partial where partial(false) + comment partial(false+true) where // Get user with where + posts partial where + comment where. Didn't select field from where in posts // Get user with where + posts partial where + comment where. Didn't select field from where for all // Get with limit+offset in each /* One two-level + One first-level relation users+posts+comments and users+users */ /* One three-level relation users+posts+comments+comment_owner */ test('Get user with posts and posts with comments and comments with owner', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { id: 1, ownerId: 1, content: 'Post1' }, { id: 2, ownerId: 2, content: 'Post2' }, { id: 3, ownerId: 3, content: 'Post3' }, ]); await db.insert(commentsTable).values([ { postId: 1, content: 'Comment1', creator: 2 }, { postId: 2, content: 'Comment2', creator: 2 }, { postId: 3, content: 'Comment3', creator: 3 }, ]); const response = await db.query.usersTable.findMany({ with: { posts: { with: { comments: { with: { author: true, }, }, }, }, }, }); expectTypeOf(response).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; comments: { id: number; content: string; createdAt: Date; creator: number | null; postId: number | null; author: { id: number; name: string; verified: boolean; invitedBy: number | null; } | null; }[]; }[]; }[]>(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).eq(3); expect(response[0]?.posts.length).eq(1); expect(response[1]?.posts.length).eq(1); expect(response[2]?.posts.length).eq(1); expect(response[0]?.posts[0]?.comments.length).eq(1); expect(response[1]?.posts[0]?.comments.length).eq(1); expect(response[2]?.posts[0]?.comments.length).eq(1); expect(response[0]).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: response[0]?.posts[0]?.createdAt, comments: [ { id: 1, content: 'Comment1', creator: 2, author: { id: 2, name: 'Andrew', verified: false, invitedBy: null, }, postId: 1, createdAt: response[0]?.posts[0]?.comments[0]?.createdAt, }, ], }], }); expect(response[1]).toEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: null, posts: [{ id: 2, ownerId: 2, content: 'Post2', createdAt: response[1]?.posts[0]?.createdAt, comments: [ { id: 2, content: 'Comment2', creator: 2, author: { id: 2, name: 'Andrew', verified: false, invitedBy: null, }, postId: 2, createdAt: response[1]?.posts[0]?.comments[0]?.createdAt, }, ], }], }); }); test('Get user with posts and posts with comments and comments with owner where exists', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { id: 1, ownerId: 1, content: 'Post1' }, { id: 2, ownerId: 2, content: 'Post2' }, { id: 3, ownerId: 3, content: 'Post3' }, ]); await db.insert(commentsTable).values([ { postId: 1, content: 'Comment1', creator: 2 }, { postId: 2, content: 'Comment2', creator: 2 }, { postId: 3, content: 'Comment3', creator: 3 }, ]); const response = await db.query.usersTable.findMany({ with: { posts: { with: { comments: { with: { author: true, }, }, }, }, }, where: (table, { exists, eq }) => exists(db.select({ one: sql`1` }).from(usersTable).where(eq(sql`1`, table.id))), }); expectTypeOf(response).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; comments: { id: number; content: string; createdAt: Date; creator: number | null; postId: number | null; author: { id: number; name: string; verified: boolean; invitedBy: number | null; } | null; }[]; }[]; }[]>(); expect(response.length).eq(1); expect(response[0]?.posts.length).eq(1); expect(response[0]?.posts[0]?.comments.length).eq(1); expect(response[0]).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: response[0]?.posts[0]?.createdAt, comments: [ { id: 1, content: 'Comment1', creator: 2, author: { id: 2, name: 'Andrew', verified: false, invitedBy: null, }, postId: 1, createdAt: response[0]?.posts[0]?.comments[0]?.createdAt, }, ], }], }); }); /* One three-level relation + 1 first-level relatioon 1. users+posts+comments+comment_owner 2. users+users */ /* One four-level relation users+posts+comments+coment_likes */ /* [Find Many] Many-to-many cases Users+users_to_groups+groups */ test('[Find Many] Get users with groups', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.usersTable.findMany({ with: { usersToGroups: { columns: {}, with: { group: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; usersToGroups: { group: { id: number; name: string; description: string | null; }; }[]; }[]>(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).toEqual(3); expect(response[0]?.usersToGroups.length).toEqual(1); expect(response[1]?.usersToGroups.length).toEqual(1); expect(response[2]?.usersToGroups.length).toEqual(2); expect(response).toContainEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, usersToGroups: [{ group: { id: 1, name: 'Group1', description: null, }, }], }); expect(response).toContainEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: null, usersToGroups: [{ group: { id: 2, name: 'Group2', description: null, }, }], }); expect(response).toContainEqual({ id: 3, name: 'Alex', verified: false, invitedBy: null, usersToGroups: [{ group: { id: 3, name: 'Group3', description: null, }, }, { group: { id: 2, name: 'Group2', description: null, }, }], }); }); test('[Find Many] Get groups with users', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.groupsTable.findMany({ with: { usersToGroups: { columns: {}, with: { user: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf<{ id: number; name: string; description: string | null; usersToGroups: { user: { id: number; name: string; verified: boolean; invitedBy: number | null; }; }[]; }[]>(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).toEqual(3); expect(response[0]?.usersToGroups.length).toEqual(1); expect(response[1]?.usersToGroups.length).toEqual(2); expect(response[2]?.usersToGroups.length).toEqual(1); expect(response).toContainEqual({ id: 1, name: 'Group1', description: null, usersToGroups: [{ user: { id: 1, name: 'Dan', verified: false, invitedBy: null, }, }], }); expect(response).toContainEqual({ id: 2, name: 'Group2', description: null, usersToGroups: [{ user: { id: 2, name: 'Andrew', verified: false, invitedBy: null, }, }, { user: { id: 3, name: 'Alex', verified: false, invitedBy: null, }, }], }); expect(response).toContainEqual({ id: 3, name: 'Group3', description: null, usersToGroups: [{ user: { id: 3, name: 'Alex', verified: false, invitedBy: null, }, }], }); }); test('[Find Many] Get users with groups + limit', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 2, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.usersTable.findMany({ limit: 2, with: { usersToGroups: { limit: 1, columns: {}, with: { group: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; usersToGroups: { group: { id: number; name: string; description: string | null; }; }[]; }[]>(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).toEqual(2); expect(response[0]?.usersToGroups.length).toEqual(1); expect(response[1]?.usersToGroups.length).toEqual(1); expect(response).toContainEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, usersToGroups: [{ group: { id: 1, name: 'Group1', description: null, }, }], }); expect(response).toContainEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: null, usersToGroups: [{ group: { id: 2, name: 'Group2', description: null, }, }], }); }); test('[Find Many] Get groups with users + limit', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.groupsTable.findMany({ limit: 2, with: { usersToGroups: { limit: 1, columns: {}, with: { user: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf<{ id: number; name: string; description: string | null; usersToGroups: { user: { id: number; name: string; verified: boolean; invitedBy: number | null; }; }[]; }[]>(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).toEqual(2); expect(response[0]?.usersToGroups.length).toEqual(1); expect(response[1]?.usersToGroups.length).toEqual(1); expect(response).toContainEqual({ id: 1, name: 'Group1', description: null, usersToGroups: [{ user: { id: 1, name: 'Dan', verified: false, invitedBy: null, }, }], }); expect(response).toContainEqual({ id: 2, name: 'Group2', description: null, usersToGroups: [{ user: { id: 2, name: 'Andrew', verified: false, invitedBy: null, }, }], }); }); test('[Find Many] Get users with groups + limit + where', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 2, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.usersTable.findMany({ limit: 1, where: (_, { eq, or }) => or(eq(usersTable.id, 1), eq(usersTable.id, 2)), with: { usersToGroups: { where: eq(usersToGroupsTable.groupId, 1), columns: {}, with: { group: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; usersToGroups: { group: { id: number; name: string; description: string | null; }; }[]; }[]>(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).toEqual(1); expect(response[0]?.usersToGroups.length).toEqual(1); expect(response).toContainEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, usersToGroups: [{ group: { id: 1, name: 'Group1', description: null, }, }], }); }); test('[Find Many] Get groups with users + limit + where', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.groupsTable.findMany({ limit: 1, where: gt(groupsTable.id, 1), with: { usersToGroups: { where: eq(usersToGroupsTable.userId, 2), limit: 1, columns: {}, with: { user: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf<{ id: number; name: string; description: string | null; usersToGroups: { user: { id: number; name: string; verified: boolean; invitedBy: number | null; }; }[]; }[]>(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).toEqual(1); expect(response[0]?.usersToGroups.length).toEqual(1); expect(response).toContainEqual({ id: 2, name: 'Group2', description: null, usersToGroups: [{ user: { id: 2, name: 'Andrew', verified: false, invitedBy: null, }, }], }); }); test('[Find Many] Get users with groups + where', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 2, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.usersTable.findMany({ where: (_, { eq, or }) => or(eq(usersTable.id, 1), eq(usersTable.id, 2)), with: { usersToGroups: { where: eq(usersToGroupsTable.groupId, 2), columns: {}, with: { group: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; usersToGroups: { group: { id: number; name: string; description: string | null; }; }[]; }[]>(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).toEqual(2); expect(response[0]?.usersToGroups.length).toEqual(0); expect(response[1]?.usersToGroups.length).toEqual(1); expect(response).toContainEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, usersToGroups: [], }); expect(response).toContainEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: null, usersToGroups: [{ group: { id: 2, name: 'Group2', description: null, }, }], }); }); test('[Find Many] Get groups with users + where', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.groupsTable.findMany({ where: gt(groupsTable.id, 1), with: { usersToGroups: { where: eq(usersToGroupsTable.userId, 2), columns: {}, with: { user: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf<{ id: number; name: string; description: string | null; usersToGroups: { user: { id: number; name: string; verified: boolean; invitedBy: number | null; }; }[]; }[]>(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).toEqual(2); expect(response[0]?.usersToGroups.length).toEqual(1); expect(response[1]?.usersToGroups.length).toEqual(0); expect(response).toContainEqual({ id: 2, name: 'Group2', description: null, usersToGroups: [{ user: { id: 2, name: 'Andrew', verified: false, invitedBy: null, }, }], }); expect(response).toContainEqual({ id: 3, name: 'Group3', description: null, usersToGroups: [], }); }); test('[Find Many] Get users with groups + orderBy', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.usersTable.findMany({ orderBy: (users, { desc }) => [desc(users.id)], with: { usersToGroups: { orderBy: [desc(usersToGroupsTable.groupId)], columns: {}, with: { group: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; usersToGroups: { group: { id: number; name: string; description: string | null; }; }[]; }[]>(); expect(response.length).toEqual(3); expect(response[0]?.usersToGroups.length).toEqual(2); expect(response[1]?.usersToGroups.length).toEqual(1); expect(response[2]?.usersToGroups.length).toEqual(1); expect(response[2]).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, usersToGroups: [{ group: { id: 1, name: 'Group1', description: null, }, }], }); expect(response[1]).toEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: null, usersToGroups: [{ group: { id: 2, name: 'Group2', description: null, }, }], }); expect(response[0]).toEqual({ id: 3, name: 'Alex', verified: false, invitedBy: null, usersToGroups: [{ group: { id: 3, name: 'Group3', description: null, }, }, { group: { id: 2, name: 'Group2', description: null, }, }], }); }); test('[Find Many] Get groups with users + orderBy', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.groupsTable.findMany({ orderBy: [desc(groupsTable.id)], with: { usersToGroups: { orderBy: (utg, { desc }) => [desc(utg.userId)], columns: {}, with: { user: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf<{ id: number; name: string; description: string | null; usersToGroups: { user: { id: number; name: string; verified: boolean; invitedBy: number | null; }; }[]; }[]>(); expect(response.length).toEqual(3); expect(response[0]?.usersToGroups.length).toEqual(1); expect(response[1]?.usersToGroups.length).toEqual(2); expect(response[2]?.usersToGroups.length).toEqual(1); expect(response[2]).toEqual({ id: 1, name: 'Group1', description: null, usersToGroups: [{ user: { id: 1, name: 'Dan', verified: false, invitedBy: null, }, }], }); expect(response[1]).toEqual({ id: 2, name: 'Group2', description: null, usersToGroups: [{ user: { id: 3, name: 'Alex', verified: false, invitedBy: null, }, }, { user: { id: 2, name: 'Andrew', verified: false, invitedBy: null, }, }], }); expect(response[0]).toEqual({ id: 3, name: 'Group3', description: null, usersToGroups: [{ user: { id: 3, name: 'Alex', verified: false, invitedBy: null, }, }], }); }); test('[Find Many] Get users with groups + orderBy + limit', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.usersTable.findMany({ orderBy: (users, { desc }) => [desc(users.id)], limit: 2, with: { usersToGroups: { limit: 1, orderBy: [desc(usersToGroupsTable.groupId)], columns: {}, with: { group: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; usersToGroups: { group: { id: number; name: string; description: string | null; }; }[]; }[]>(); expect(response.length).toEqual(2); expect(response[0]?.usersToGroups.length).toEqual(1); expect(response[1]?.usersToGroups.length).toEqual(1); expect(response[1]).toEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: null, usersToGroups: [{ group: { id: 2, name: 'Group2', description: null, }, }], }); expect(response[0]).toEqual({ id: 3, name: 'Alex', verified: false, invitedBy: null, usersToGroups: [{ group: { id: 3, name: 'Group3', description: null, }, }], }); }); /* [Find One] Many-to-many cases Users+users_to_groups+groups */ test('[Find One] Get users with groups', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.usersTable.findFirst({ with: { usersToGroups: { columns: {}, with: { group: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; usersToGroups: { group: { id: number; name: string; description: string | null; }; }[]; } | undefined >(); expect(response?.usersToGroups.length).toEqual(1); expect(response).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, usersToGroups: [{ group: { id: 1, name: 'Group1', description: null, }, }], }); }); test('[Find One] Get groups with users', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.groupsTable.findFirst({ with: { usersToGroups: { columns: {}, with: { user: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; description: string | null; usersToGroups: { user: { id: number; name: string; verified: boolean; invitedBy: number | null; }; }[]; } | undefined >(); expect(response?.usersToGroups.length).toEqual(1); expect(response).toEqual({ id: 1, name: 'Group1', description: null, usersToGroups: [{ user: { id: 1, name: 'Dan', verified: false, invitedBy: null, }, }], }); }); test('[Find One] Get users with groups + limit', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 2, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.usersTable.findFirst({ with: { usersToGroups: { limit: 1, columns: {}, with: { group: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; usersToGroups: { group: { id: number; name: string; description: string | null; }; }[]; } | undefined >(); expect(response?.usersToGroups.length).toEqual(1); expect(response).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, usersToGroups: [{ group: { id: 1, name: 'Group1', description: null, }, }], }); }); test('[Find One] Get groups with users + limit', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.groupsTable.findFirst({ with: { usersToGroups: { limit: 1, columns: {}, with: { user: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; description: string | null; usersToGroups: { user: { id: number; name: string; verified: boolean; invitedBy: number | null; }; }[]; } | undefined >(); expect(response?.usersToGroups.length).toEqual(1); expect(response).toEqual({ id: 1, name: 'Group1', description: null, usersToGroups: [{ user: { id: 1, name: 'Dan', verified: false, invitedBy: null, }, }], }); }); test('[Find One] Get users with groups + limit + where', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 2, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.usersTable.findFirst({ where: (_, { eq, or }) => or(eq(usersTable.id, 1), eq(usersTable.id, 2)), with: { usersToGroups: { where: eq(usersToGroupsTable.groupId, 1), columns: {}, with: { group: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; usersToGroups: { group: { id: number; name: string; description: string | null; }; }[]; } | undefined >(); expect(response?.usersToGroups.length).toEqual(1); expect(response).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, usersToGroups: [{ group: { id: 1, name: 'Group1', description: null, }, }], }); }); test('[Find One] Get groups with users + limit + where', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.groupsTable.findFirst({ where: gt(groupsTable.id, 1), with: { usersToGroups: { where: eq(usersToGroupsTable.userId, 2), limit: 1, columns: {}, with: { user: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; description: string | null; usersToGroups: { user: { id: number; name: string; verified: boolean; invitedBy: number | null; }; }[]; } | undefined >(); expect(response?.usersToGroups.length).toEqual(1); expect(response).toEqual({ id: 2, name: 'Group2', description: null, usersToGroups: [{ user: { id: 2, name: 'Andrew', verified: false, invitedBy: null, }, }], }); }); test('[Find One] Get users with groups + where', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 2, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.usersTable.findFirst({ where: (_, { eq, or }) => or(eq(usersTable.id, 1), eq(usersTable.id, 2)), with: { usersToGroups: { where: eq(usersToGroupsTable.groupId, 2), columns: {}, with: { group: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; usersToGroups: { group: { id: number; name: string; description: string | null; }; }[]; } | undefined >(); expect(response?.usersToGroups.length).toEqual(0); expect(response).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, usersToGroups: [], }); }); test('[Find One] Get groups with users + where', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.groupsTable.findFirst({ where: gt(groupsTable.id, 1), with: { usersToGroups: { where: eq(usersToGroupsTable.userId, 2), columns: {}, with: { user: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; description: string | null; usersToGroups: { user: { id: number; name: string; verified: boolean; invitedBy: number | null; }; }[]; } | undefined >(); expect(response?.usersToGroups.length).toEqual(1); expect(response).toEqual({ id: 2, name: 'Group2', description: null, usersToGroups: [{ user: { id: 2, name: 'Andrew', verified: false, invitedBy: null, }, }], }); }); test('[Find One] Get users with groups + orderBy', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.usersTable.findFirst({ orderBy: (users, { desc }) => [desc(users.id)], with: { usersToGroups: { orderBy: [desc(usersToGroupsTable.groupId)], columns: {}, with: { group: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; usersToGroups: { group: { id: number; name: string; description: string | null; }; }[]; } | undefined >(); expect(response?.usersToGroups.length).toEqual(2); expect(response).toEqual({ id: 3, name: 'Alex', verified: false, invitedBy: null, usersToGroups: [{ group: { id: 3, name: 'Group3', description: null, }, }, { group: { id: 2, name: 'Group2', description: null, }, }], }); }); test('[Find One] Get groups with users + orderBy', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.groupsTable.findFirst({ orderBy: [desc(groupsTable.id)], with: { usersToGroups: { orderBy: (utg, { desc }) => [desc(utg.userId)], columns: {}, with: { user: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; description: string | null; usersToGroups: { user: { id: number; name: string; verified: boolean; invitedBy: number | null; }; }[]; } | undefined >(); expect(response?.usersToGroups.length).toEqual(1); expect(response).toEqual({ id: 3, name: 'Group3', description: null, usersToGroups: [{ user: { id: 3, name: 'Alex', verified: false, invitedBy: null, }, }], }); }); test('[Find One] Get users with groups + orderBy + limit', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.usersTable.findFirst({ orderBy: (users, { desc }) => [desc(users.id)], with: { usersToGroups: { limit: 1, orderBy: [desc(usersToGroupsTable.groupId)], columns: {}, with: { group: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; usersToGroups: { group: { id: number; name: string; description: string | null; }; }[]; } | undefined >(); expect(response?.usersToGroups.length).toEqual(1); expect(response).toEqual({ id: 3, name: 'Alex', verified: false, invitedBy: null, usersToGroups: [{ group: { id: 3, name: 'Group3', description: null, }, }], }); }); test('Get groups with users + orderBy + limit', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.groupsTable.findMany({ orderBy: [desc(groupsTable.id)], limit: 2, with: { usersToGroups: { limit: 1, orderBy: (utg, { desc }) => [desc(utg.userId)], columns: {}, with: { user: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; description: string | null; usersToGroups: { user: { id: number; name: string; verified: boolean; invitedBy: number | null; }; }[]; }[] >(); expect(response.length).toEqual(2); expect(response[0]?.usersToGroups.length).toEqual(1); expect(response[1]?.usersToGroups.length).toEqual(1); expect(response[1]).toEqual({ id: 2, name: 'Group2', description: null, usersToGroups: [{ user: { id: 3, name: 'Alex', verified: false, invitedBy: null, }, }], }); expect(response[0]).toEqual({ id: 3, name: 'Group3', description: null, usersToGroups: [{ user: { id: 3, name: 'Alex', verified: false, invitedBy: null, }, }], }); }); test('Get users with groups + custom', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.usersTable.findMany({ extras: { lower: sql`lower(${usersTable.name})`.as('lower_name'), }, with: { usersToGroups: { columns: {}, with: { group: { extras: { lower: sql`lower(${groupsTable.name})`.as('lower_name'), }, }, }, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; lower: string; usersToGroups: { group: { id: number; name: string; description: string | null; lower: string; }; }[]; }[] >(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).toEqual(3); expect(response[0]?.usersToGroups.length).toEqual(1); expect(response[1]?.usersToGroups.length).toEqual(1); expect(response[2]?.usersToGroups.length).toEqual(2); expect(response).toContainEqual({ id: 1, name: 'Dan', lower: 'dan', verified: false, invitedBy: null, usersToGroups: [{ group: { id: 1, name: 'Group1', lower: 'group1', description: null, }, }], }); expect(response).toContainEqual({ id: 2, name: 'Andrew', lower: 'andrew', verified: false, invitedBy: null, usersToGroups: [{ group: { id: 2, name: 'Group2', lower: 'group2', description: null, }, }], }); expect(response).toContainEqual({ id: 3, name: 'Alex', lower: 'alex', verified: false, invitedBy: null, usersToGroups: [{ group: { id: 3, name: 'Group3', lower: 'group3', description: null, }, }, { group: { id: 2, name: 'Group2', lower: 'group2', description: null, }, }], }); }); test('Get groups with users + custom', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.groupsTable.findMany({ extras: (table, { sql }) => ({ lower: sql`lower(${table.name})`.as('lower_name'), }), with: { usersToGroups: { columns: {}, with: { user: { extras: (table, { sql }) => ({ lower: sql`lower(${table.name})`.as('lower_name'), }), }, }, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; description: string | null; lower: string; usersToGroups: { user: { id: number; name: string; verified: boolean; invitedBy: number | null; lower: string; }; }[]; }[] >(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).toEqual(3); expect(response[0]?.usersToGroups.length).toEqual(1); expect(response[1]?.usersToGroups.length).toEqual(2); expect(response[2]?.usersToGroups.length).toEqual(1); expect(response).toContainEqual({ id: 1, name: 'Group1', lower: 'group1', description: null, usersToGroups: [{ user: { id: 1, name: 'Dan', lower: 'dan', verified: false, invitedBy: null, }, }], }); expect(response).toContainEqual({ id: 2, name: 'Group2', lower: 'group2', description: null, usersToGroups: [{ user: { id: 2, name: 'Andrew', lower: 'andrew', verified: false, invitedBy: null, }, }, { user: { id: 3, name: 'Alex', lower: 'alex', verified: false, invitedBy: null, }, }], }); expect(response).toContainEqual({ id: 3, name: 'Group3', lower: 'group3', description: null, usersToGroups: [{ user: { id: 3, name: 'Alex', lower: 'alex', verified: false, invitedBy: null, }, }], }); }); test('.toSQL()', () => { const query = db.query.usersTable.findFirst().toSQL(); expect(query).toHaveProperty('sql', expect.any(String)); expect(query).toHaveProperty('params', expect.any(Array)); }); // + custom + where + orderby // + custom + where + orderby + limit // + partial // + partial(false) // + partial + orderBy + where (all not selected) /* One four-level relation users+posts+comments+coment_likes + users+users_to_groups+groups */ /* Really hard case 1. users+posts+comments+coment_likes 2. users+users_to_groups+groups 3. users+users */ ================================================ FILE: integration-tests/tests/relational/mysql.schema.ts ================================================ import { type AnyMySqlColumn, bigint, boolean, mysqlSchema, mysqlTable, primaryKey, serial, text, timestamp, } from 'drizzle-orm/mysql-core'; import { relations } from 'drizzle-orm'; export const usersTable = mysqlTable('users', { id: serial('id').primaryKey(), name: text('name').notNull(), verified: boolean('verified').notNull().default(false), invitedBy: bigint('invited_by', { mode: 'number' }).references( (): AnyMySqlColumn => usersTable.id, ), }); const schemaV1 = mysqlSchema('schemaV1'); export const usersV1 = schemaV1.table('usersV1', { id: serial('id').primaryKey(), name: text('name').notNull(), verified: boolean('verified').notNull().default(false), invitedBy: bigint('invited_by', { mode: 'number' }), }); export const usersTableV1 = schemaV1.table('users_table_V1', { id: serial('id').primaryKey(), name: text('name').notNull(), verified: boolean('verified').notNull().default(false), invitedBy: bigint('invited_by', { mode: 'number' }), }); export const usersConfig = relations(usersTable, ({ one, many }) => ({ invitee: one(usersTable, { fields: [usersTable.invitedBy], references: [usersTable.id], }), usersToGroups: many(usersToGroupsTable), posts: many(postsTable), comments: many(commentsTable), })); export const groupsTable = mysqlTable('groups', { id: serial('id').primaryKey(), name: text('name').notNull(), description: text('description'), }); export const groupsConfig = relations(groupsTable, ({ many }) => ({ usersToGroups: many(usersToGroupsTable), })); export const usersToGroupsTable = mysqlTable( 'users_to_groups', { id: serial('id').primaryKey(), userId: bigint('user_id', { mode: 'number' }).notNull().references( () => usersTable.id, ), groupId: bigint('group_id', { mode: 'number' }).notNull().references( () => groupsTable.id, ), }, (t) => ({ pk: primaryKey(t.userId, t.groupId), }), ); export const usersToGroupsConfig = relations(usersToGroupsTable, ({ one }) => ({ group: one(groupsTable, { fields: [usersToGroupsTable.groupId], references: [groupsTable.id], }), user: one(usersTable, { fields: [usersToGroupsTable.userId], references: [usersTable.id], }), })); export const postsTable = mysqlTable('posts', { id: serial('id').primaryKey(), content: text('content').notNull(), ownerId: bigint('owner_id', { mode: 'number' }).references( () => usersTable.id, ), createdAt: timestamp('created_at') .notNull() .defaultNow(), }); export const postsConfig = relations(postsTable, ({ one, many }) => ({ author: one(usersTable, { fields: [postsTable.ownerId], references: [usersTable.id], }), comments: many(commentsTable), })); export const commentsTable = mysqlTable('comments', { id: serial('id').primaryKey(), content: text('content').notNull(), creator: bigint('creator', { mode: 'number' }).references( () => usersTable.id, ), postId: bigint('post_id', { mode: 'number' }).references(() => postsTable.id), createdAt: timestamp('created_at') .notNull() .defaultNow(), }); export const commentsConfig = relations(commentsTable, ({ one, many }) => ({ post: one(postsTable, { fields: [commentsTable.postId], references: [postsTable.id], }), author: one(usersTable, { fields: [commentsTable.creator], references: [usersTable.id], }), likes: many(commentLikesTable), })); export const commentLikesTable = mysqlTable('comment_likes', { id: serial('id').primaryKey(), creator: bigint('creator', { mode: 'number' }).references( () => usersTable.id, ), commentId: bigint('comment_id', { mode: 'number' }).references( () => commentsTable.id, ), createdAt: timestamp('created_at') .notNull() .defaultNow(), }); export const commentLikesConfig = relations(commentLikesTable, ({ one }) => ({ comment: one(commentsTable, { fields: [commentLikesTable.commentId], references: [commentsTable.id], }), author: one(usersTable, { fields: [commentLikesTable.creator], references: [usersTable.id], }), })); ================================================ FILE: integration-tests/tests/relational/mysql.test.ts ================================================ import 'dotenv/config'; import Docker from 'dockerode'; import { desc, DrizzleError, eq, gt, gte, or, placeholder, sql, TransactionRollbackError } from 'drizzle-orm'; import { drizzle, type MySql2Database } from 'drizzle-orm/mysql2'; import getPort from 'get-port'; import * as mysql from 'mysql2/promise'; import { v4 as uuid } from 'uuid'; import { afterAll, beforeAll, beforeEach, expect, expectTypeOf, test } from 'vitest'; import * as schema from './mysql.schema.ts'; const { usersTable, postsTable, commentsTable, usersToGroupsTable, groupsTable, usersV1, usersTableV1 } = schema; const ENABLE_LOGGING = false; /* Test cases: - querying nested relation without PK with additional fields */ declare module 'vitest' { export interface TestContext { docker: Docker; mysqlContainer: Docker.Container; mysqlDb: MySql2Database; mysqlClient: mysql.Connection; } } let globalDocker: Docker; let mysqlContainer: Docker.Container; let db: MySql2Database; let client: mysql.Connection; async function createDockerDB(): Promise { const docker = (globalDocker = new Docker()); const port = await getPort({ port: 3306 }); const image = 'mysql:8'; const pullStream = await docker.pull(image); await new Promise((resolve, reject) => docker.modem.followProgress(pullStream, (err) => (err ? reject(err) : resolve(err))) ); mysqlContainer = await docker.createContainer({ Image: image, Env: ['MYSQL_ROOT_PASSWORD=mysql', 'MYSQL_DATABASE=drizzle'], name: `drizzle-integration-tests-${uuid()}`, HostConfig: { AutoRemove: true, PortBindings: { '3306/tcp': [{ HostPort: `${port}` }], }, }, }); await mysqlContainer.start(); return `mysql://root:mysql@127.0.0.1:${port}/drizzle`; } beforeAll(async () => { const connectionString = process.env['MYSQL_CONNECTION_STRING'] ?? await createDockerDB(); const sleep = 1000; let timeLeft = 30000; let connected = false; let lastError: unknown | undefined; do { try { client = await mysql.createConnection(connectionString); await client.connect(); connected = true; break; } catch (e) { lastError = e; await new Promise((resolve) => setTimeout(resolve, sleep)); timeLeft -= sleep; } } while (timeLeft > 0); if (!connected) { console.error('Cannot connect to MySQL'); await client?.end().catch(console.error); await mysqlContainer?.stop().catch(console.error); throw lastError; } db = drizzle(client, { schema, logger: ENABLE_LOGGING, mode: 'default' }); }); afterAll(async () => { await client?.end().catch(console.error); await mysqlContainer?.stop().catch(console.error); }); beforeEach(async (ctx) => { ctx.mysqlDb = db; ctx.mysqlClient = client; ctx.docker = globalDocker; ctx.mysqlContainer = mysqlContainer; await ctx.mysqlDb.execute(sql`drop table if exists \`users\``); await ctx.mysqlDb.execute(sql`drop table if exists \`schemaV1\`.\`usersV1\``); await ctx.mysqlDb.execute(sql`drop table if exists \`schemaV1\`.\`users_table_V1\``); await ctx.mysqlDb.execute(sql`drop table if exists \`groups\``); await ctx.mysqlDb.execute(sql`drop table if exists \`users_to_groups\``); await ctx.mysqlDb.execute(sql`drop table if exists \`posts\``); await ctx.mysqlDb.execute(sql`drop table if exists \`comments\``); await ctx.mysqlDb.execute(sql`drop table if exists \`comment_likes\``); await ctx.mysqlDb.execute(sql`create schema if not exists \`schemaV1\``); await ctx.mysqlDb.execute( sql` CREATE TABLE \`users\` ( \`id\` serial PRIMARY KEY NOT NULL, \`name\` text NOT NULL, \`verified\` boolean DEFAULT false NOT NULL, \`invited_by\` bigint REFERENCES \`users\`(\`id\`) ); `, ); await ctx.mysqlDb.execute( sql` CREATE TABLE \`schemaV1\`.\`usersV1\` ( \`id\` serial PRIMARY KEY NOT NULL, \`name\` text NOT NULL, \`verified\` boolean DEFAULT false NOT NULL, \`invited_by\` bigint REFERENCES \`users\`(\`id\`) ); `, ); await ctx.mysqlDb.execute( sql` CREATE TABLE \`schemaV1\`.\`users_table_V1\` ( \`id\` serial PRIMARY KEY NOT NULL, \`name\` text NOT NULL, \`verified\` boolean DEFAULT false NOT NULL, \`invited_by\` bigint REFERENCES \`users\`(\`id\`) ); `, ); await ctx.mysqlDb.execute( sql` CREATE TABLE \`groups\` ( \`id\` serial PRIMARY KEY NOT NULL, \`name\` text NOT NULL, \`description\` text ); `, ); await ctx.mysqlDb.execute( sql` CREATE TABLE \`users_to_groups\` ( \`id\` serial PRIMARY KEY NOT NULL, \`user_id\` bigint REFERENCES \`users\`(\`id\`), \`group_id\` bigint REFERENCES \`groups\`(\`id\`) ); `, ); await ctx.mysqlDb.execute( sql` CREATE TABLE \`posts\` ( \`id\` serial PRIMARY KEY NOT NULL, \`content\` text NOT NULL, \`owner_id\` bigint REFERENCES \`users\`(\`id\`), \`created_at\` timestamp DEFAULT CURRENT_TIMESTAMP NOT NULL ); `, ); await ctx.mysqlDb.execute( sql` CREATE TABLE \`comments\` ( \`id\` serial PRIMARY KEY NOT NULL, \`content\` text NOT NULL, \`creator\` bigint REFERENCES \`users\`(\`id\`), \`post_id\` bigint REFERENCES \`posts\`(\`id\`), \`created_at\` timestamp DEFAULT CURRENT_TIMESTAMP NOT NULL ); `, ); await ctx.mysqlDb.execute( sql` CREATE TABLE \`comment_likes\` ( \`id\` serial PRIMARY KEY NOT NULL, \`creator\` bigint REFERENCES \`users\`(\`id\`), \`comment_id\` bigint REFERENCES \`comments\`(\`id\`), \`created_at\` timestamp DEFAULT CURRENT_TIMESTAMP NOT NULL ); `, ); }); /* [Find Many] One relation users+posts */ test('[Find Many] Get users with posts', async (t) => { const { mysqlDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]); const usersWithPosts = await db.query.usersTable.findMany({ with: { posts: true, }, }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[]>(); usersWithPosts.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(usersWithPosts.length).eq(3); expect(usersWithPosts[0]?.posts.length).eq(1); expect(usersWithPosts[1]?.posts.length).eq(1); expect(usersWithPosts[2]?.posts.length).eq(1); expect(usersWithPosts[0]).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], }); expect(usersWithPosts[1]).toEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: null, posts: [{ id: 2, ownerId: 2, content: 'Post2', createdAt: usersWithPosts[1]?.posts[0]?.createdAt }], }); expect(usersWithPosts[2]).toEqual({ id: 3, name: 'Alex', verified: false, invitedBy: null, posts: [{ id: 3, ownerId: 3, content: 'Post3', createdAt: usersWithPosts[2]?.posts[0]?.createdAt }], }); }); test('[Find Many] Get users with posts + limit posts', async (t) => { const { mysqlDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const usersWithPosts = await db.query.usersTable.findMany({ with: { posts: { limit: 1, }, }, }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[]>(); usersWithPosts.sort((a, b) => (a.id > b.id) ? 1 : -1); usersWithPosts[0]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); usersWithPosts[1]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); usersWithPosts[2]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(usersWithPosts.length).eq(3); expect(usersWithPosts[0]?.posts.length).eq(1); expect(usersWithPosts[1]?.posts.length).eq(1); expect(usersWithPosts[2]?.posts.length).eq(1); expect(usersWithPosts[0]).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], }); expect(usersWithPosts[1]).toEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: null, posts: [{ id: 4, ownerId: 2, content: 'Post2', createdAt: usersWithPosts[1]?.posts[0]?.createdAt }], }); expect(usersWithPosts[2]).toEqual({ id: 3, name: 'Alex', verified: false, invitedBy: null, posts: [{ id: 6, ownerId: 3, content: 'Post3', createdAt: usersWithPosts[2]?.posts[0]?.createdAt }], }); }); test('[Find Many] Get users with posts + limit posts and users', async (t) => { const { mysqlDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const usersWithPosts = await db.query.usersTable.findMany({ limit: 2, with: { posts: { limit: 1, }, }, }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[]>(); usersWithPosts.sort((a, b) => (a.id > b.id) ? 1 : -1); usersWithPosts[0]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); usersWithPosts[1]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(usersWithPosts.length).eq(2); expect(usersWithPosts[0]?.posts.length).eq(1); expect(usersWithPosts[1]?.posts.length).eq(1); expect(usersWithPosts[0]).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], }); expect(usersWithPosts[1]).toEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: null, posts: [{ id: 4, ownerId: 2, content: 'Post2', createdAt: usersWithPosts[1]?.posts[0]?.createdAt }], }); }); test('[Find Many] Get users with posts + custom fields', async (t) => { const { mysqlDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const usersWithPosts = await db.query.usersTable.findMany({ with: { posts: true, }, extras: ({ name }) => ({ lowerName: sql`lower(${name})`.as('name_lower'), }), }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; lowerName: string; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[]>(); usersWithPosts.sort((a, b) => (a.id > b.id) ? 1 : -1); usersWithPosts[0]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); usersWithPosts[1]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); usersWithPosts[2]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(usersWithPosts.length).toEqual(3); expect(usersWithPosts[0]?.posts.length).toEqual(3); expect(usersWithPosts[1]?.posts.length).toEqual(2); expect(usersWithPosts[2]?.posts.length).toEqual(2); expect(usersWithPosts[0]).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, lowerName: 'dan', posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }, { id: 2, ownerId: 1, content: 'Post1.2', createdAt: usersWithPosts[0]?.posts[1]?.createdAt, }, { id: 3, ownerId: 1, content: 'Post1.3', createdAt: usersWithPosts[0]?.posts[2]?.createdAt }], }); expect(usersWithPosts[1]).toEqual({ id: 2, name: 'Andrew', lowerName: 'andrew', verified: false, invitedBy: null, posts: [{ id: 4, ownerId: 2, content: 'Post2', createdAt: usersWithPosts[1]?.posts[0]?.createdAt }, { id: 5, ownerId: 2, content: 'Post2.1', createdAt: usersWithPosts[1]?.posts[1]?.createdAt, }], }); expect(usersWithPosts[2]).toEqual({ id: 3, name: 'Alex', lowerName: 'alex', verified: false, invitedBy: null, posts: [{ id: 6, ownerId: 3, content: 'Post3', createdAt: usersWithPosts[2]?.posts[0]?.createdAt }, { id: 7, ownerId: 3, content: 'Post3.1', createdAt: usersWithPosts[2]?.posts[1]?.createdAt, }], }); }); test('[Find Many] Get users with posts + custom fields + limits', async (t) => { const { mysqlDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const usersWithPosts = await db.query.usersTable.findMany({ limit: 1, with: { posts: { limit: 1, }, }, extras: (usersTable, { sql }) => ({ lowerName: sql`lower(${usersTable.name})`.as('name_lower'), }), }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; lowerName: string; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[]>(); expect(usersWithPosts.length).toEqual(1); expect(usersWithPosts[0]?.posts.length).toEqual(1); expect(usersWithPosts[0]).toEqual({ id: 1, name: 'Dan', lowerName: 'dan', verified: false, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], }); }); test('[Find Many] Get users with posts + orderBy', async (t) => { const { mysqlDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: '1' }, { ownerId: 1, content: '2' }, { ownerId: 1, content: '3' }, { ownerId: 2, content: '4' }, { ownerId: 2, content: '5' }, { ownerId: 3, content: '6' }, { ownerId: 3, content: '7' }, ]); const usersWithPosts = await db.query.usersTable.findMany({ with: { posts: { orderBy: (postsTable, { desc }) => [desc(postsTable.content)], }, }, orderBy: (usersTable, { desc }) => [desc(usersTable.id)], }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[]>(); expect(usersWithPosts.length).eq(3); expect(usersWithPosts[0]?.posts.length).eq(2); expect(usersWithPosts[1]?.posts.length).eq(2); expect(usersWithPosts[2]?.posts.length).eq(3); expect(usersWithPosts[2]).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, posts: [{ id: 3, ownerId: 1, content: '3', createdAt: usersWithPosts[2]?.posts[2]?.createdAt }, { id: 2, ownerId: 1, content: '2', createdAt: usersWithPosts[2]?.posts[1]?.createdAt, }, { id: 1, ownerId: 1, content: '1', createdAt: usersWithPosts[2]?.posts[0]?.createdAt }], }); expect(usersWithPosts[1]).toEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: null, posts: [{ id: 5, ownerId: 2, content: '5', createdAt: usersWithPosts[1]?.posts[1]?.createdAt, }, { id: 4, ownerId: 2, content: '4', createdAt: usersWithPosts[1]?.posts[0]?.createdAt }], }); expect(usersWithPosts[0]).toEqual({ id: 3, name: 'Alex', verified: false, invitedBy: null, posts: [{ id: 7, ownerId: 3, content: '7', createdAt: usersWithPosts[0]?.posts[1]?.createdAt, }, { id: 6, ownerId: 3, content: '6', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], }); }); test('[Find Many] Get users with posts + where', async (t) => { const { mysqlDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]); const usersWithPosts = await db.query.usersTable.findMany({ where: (({ id }, { eq }) => eq(id, 1)), with: { posts: { where: (({ id }, { eq }) => eq(id, 1)), }, }, }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[]>(); expect(usersWithPosts.length).eq(1); expect(usersWithPosts[0]?.posts.length).eq(1); expect(usersWithPosts[0]).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], }); }); test('[Find Many] Get users with posts + where + partial', async (t) => { const { mysqlDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]); const usersWithPosts = await db.query.usersTable.findMany({ columns: { id: true, name: true, }, with: { posts: { columns: { id: true, content: true, }, where: (({ id }, { eq }) => eq(id, 1)), }, }, where: (({ id }, { eq }) => eq(id, 1)), }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; posts: { id: number; content: string; }[]; }[]>(); expect(usersWithPosts.length).eq(1); expect(usersWithPosts[0]?.posts.length).eq(1); expect(usersWithPosts[0]).toEqual({ id: 1, name: 'Dan', posts: [{ id: 1, content: 'Post1' }], }); }); test('[Find Many] Get users with posts + where + partial. Did not select posts id, but used it in where', async (t) => { const { mysqlDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]); const usersWithPosts = await db.query.usersTable.findMany({ columns: { id: true, name: true, }, with: { posts: { columns: { id: true, content: true, }, where: (({ id }, { eq }) => eq(id, 1)), }, }, where: (({ id }, { eq }) => eq(id, 1)), }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; posts: { id: number; content: string; }[]; }[]>(); expect(usersWithPosts.length).eq(1); expect(usersWithPosts[0]?.posts.length).eq(1); expect(usersWithPosts[0]).toEqual({ id: 1, name: 'Dan', posts: [{ id: 1, content: 'Post1' }], }); }); test('[Find Many] Get users with posts + where + partial(true + false)', async (t) => { const { mysqlDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]); const usersWithPosts = await db.query.usersTable.findMany({ columns: { id: true, name: false, }, with: { posts: { columns: { id: true, content: false, }, where: (({ id }, { eq }) => eq(id, 1)), }, }, where: (({ id }, { eq }) => eq(id, 1)), }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; posts: { id: number; }[]; }[]>(); expect(usersWithPosts.length).eq(1); expect(usersWithPosts[0]?.posts.length).eq(1); expect(usersWithPosts[0]).toEqual({ id: 1, posts: [{ id: 1 }], }); }); test('[Find Many] Get users with posts + where + partial(false)', async (t) => { const { mysqlDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]); const usersWithPosts = await db.query.usersTable.findMany({ columns: { name: false, }, with: { posts: { columns: { content: false, }, where: (({ id }, { eq }) => eq(id, 1)), }, }, where: (({ id }, { eq }) => eq(id, 1)), }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; verified: boolean; invitedBy: number | null; posts: { id: number; ownerId: number | null; createdAt: Date; }[]; }[]>(); expect(usersWithPosts.length).eq(1); expect(usersWithPosts[0]?.posts.length).eq(1); expect(usersWithPosts[0]).toEqual({ id: 1, verified: false, invitedBy: null, posts: [{ id: 1, ownerId: 1, createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], }); }); test('[Find Many] Get users with posts in transaction', async (t) => { const { mysqlDb: db } = t; let usersWithPosts: { id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[] = []; await db.transaction(async (tx) => { await tx.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await tx.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]); usersWithPosts = await tx.query.usersTable.findMany({ where: (({ id }, { eq }) => eq(id, 1)), with: { posts: { where: (({ id }, { eq }) => eq(id, 1)), }, }, }); }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[]>(); expect(usersWithPosts.length).eq(1); expect(usersWithPosts[0]?.posts.length).eq(1); expect(usersWithPosts[0]).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], }); }); test('[Find Many] Get users with posts in rollbacked transaction', async (t) => { const { mysqlDb: db } = t; let usersWithPosts: { id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[] = []; await expect(db.transaction(async (tx) => { await tx.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await tx.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]); tx.rollback(); usersWithPosts = await tx.query.usersTable.findMany({ where: (({ id }, { eq }) => eq(id, 1)), with: { posts: { where: (({ id }, { eq }) => eq(id, 1)), }, }, }); })).rejects.toThrowError(new TransactionRollbackError()); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[]>(); expect(usersWithPosts.length).eq(0); }); // select only custom test('[Find Many] Get only custom fields', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { id: 1, ownerId: 1, content: 'Post1' }, { id: 2, ownerId: 1, content: 'Post1.2' }, { id: 3, ownerId: 1, content: 'Post1.3' }, { id: 4, ownerId: 2, content: 'Post2' }, { id: 5, ownerId: 2, content: 'Post2.1' }, { id: 6, ownerId: 3, content: 'Post3' }, { id: 7, ownerId: 3, content: 'Post3.1' }, ]); const usersWithPosts = await db.query.usersTable.findMany({ columns: {}, with: { posts: { columns: {}, extras: ({ content }) => ({ lowerName: sql`lower(${content})`.as('content_lower'), }), }, }, extras: ({ name }) => ({ lowerName: sql`lower(${name})`.as('name_lower'), }), }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ lowerName: string; posts: { lowerName: string; }[]; }[]>(); expect(usersWithPosts.length).toEqual(3); expect(usersWithPosts[0]?.posts.length).toEqual(3); expect(usersWithPosts[1]?.posts.length).toEqual(2); expect(usersWithPosts[2]?.posts.length).toEqual(2); expect(usersWithPosts[0]?.lowerName).toEqual('dan'); expect(usersWithPosts[1]?.lowerName).toEqual('andrew'); expect(usersWithPosts[2]?.lowerName).toEqual('alex'); expect(usersWithPosts[0]?.posts).toContainEqual({ lowerName: 'post1', }); expect(usersWithPosts[0]?.posts).toContainEqual({ lowerName: 'post1.2', }); expect(usersWithPosts[0]?.posts).toContainEqual({ lowerName: 'post1.3', }); expect(usersWithPosts[1]?.posts).toContainEqual({ lowerName: 'post2', }); expect(usersWithPosts[1]?.posts).toContainEqual({ lowerName: 'post2.1', }); expect(usersWithPosts[2]?.posts).toContainEqual({ lowerName: 'post3', }); expect(usersWithPosts[2]?.posts).toContainEqual({ lowerName: 'post3.1', }); }); test('[Find Many] Get only custom fields + where', async (t) => { const { mysqlDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const usersWithPosts = await db.query.usersTable.findMany({ columns: {}, with: { posts: { columns: {}, where: gte(postsTable.id, 2), extras: ({ content }) => ({ lowerName: sql`lower(${content})`.as('content_lower'), }), }, }, where: eq(usersTable.id, 1), extras: ({ name }) => ({ lowerName: sql`lower(${name})`.as('name_lower'), }), }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ lowerName: string; posts: { lowerName: string; }[]; }[]>(); expect(usersWithPosts.length).toEqual(1); expect(usersWithPosts[0]?.posts.length).toEqual(2); expect(usersWithPosts).toContainEqual({ lowerName: 'dan', posts: [{ lowerName: 'post1.2' }, { lowerName: 'post1.3' }], }); }); test('[Find Many] Get only custom fields + where + limit', async (t) => { const { mysqlDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const usersWithPosts = await db.query.usersTable.findMany({ columns: {}, with: { posts: { columns: {}, where: gte(postsTable.id, 2), limit: 1, extras: ({ content }) => ({ lowerName: sql`lower(${content})`.as('content_lower'), }), }, }, where: eq(usersTable.id, 1), extras: ({ name }) => ({ lowerName: sql`lower(${name})`.as('name_lower'), }), }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ lowerName: string; posts: { lowerName: string; }[]; }[]>(); expect(usersWithPosts.length).toEqual(1); expect(usersWithPosts[0]?.posts.length).toEqual(1); expect(usersWithPosts).toContainEqual({ lowerName: 'dan', posts: [{ lowerName: 'post1.2' }], }); }); test('[Find Many] Get only custom fields + where + orderBy', async (t) => { const { mysqlDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const usersWithPosts = await db.query.usersTable.findMany({ columns: {}, with: { posts: { columns: {}, where: gte(postsTable.id, 2), orderBy: [desc(postsTable.id)], extras: ({ content }) => ({ lowerName: sql`lower(${content})`.as('content_lower'), }), }, }, where: eq(usersTable.id, 1), extras: ({ name }) => ({ lowerName: sql`lower(${name})`.as('name_lower'), }), }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ lowerName: string; posts: { lowerName: string; }[]; }[]>(); expect(usersWithPosts.length).toEqual(1); expect(usersWithPosts[0]?.posts.length).toEqual(2); expect(usersWithPosts).toContainEqual({ lowerName: 'dan', posts: [{ lowerName: 'post1.3' }, { lowerName: 'post1.2' }], }); }); // select only custom find one test('[Find One] Get only custom fields', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const usersWithPosts = await db.query.usersTable.findFirst({ columns: {}, with: { posts: { columns: {}, extras: ({ content }) => ({ lowerName: sql`lower(${content})`.as('content_lower'), }), }, }, extras: ({ name }) => ({ lowerName: sql`lower(${name})`.as('name_lower'), }), }); expectTypeOf(usersWithPosts).toEqualTypeOf< { lowerName: string; posts: { lowerName: string; }[]; } | undefined >(); expect(usersWithPosts?.posts.length).toEqual(3); expect(usersWithPosts?.lowerName).toEqual('dan'); expect(usersWithPosts?.posts).toContainEqual({ lowerName: 'post1', }); expect(usersWithPosts?.posts).toContainEqual({ lowerName: 'post1.2', }); expect(usersWithPosts?.posts).toContainEqual({ lowerName: 'post1.3', }); }); test('[Find One] Get only custom fields + where', async (t) => { const { mysqlDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const usersWithPosts = await db.query.usersTable.findFirst({ columns: {}, with: { posts: { columns: {}, where: gte(postsTable.id, 2), extras: ({ content }) => ({ lowerName: sql`lower(${content})`.as('content_lower'), }), }, }, where: eq(usersTable.id, 1), extras: ({ name }) => ({ lowerName: sql`lower(${name})`.as('name_lower'), }), }); expectTypeOf(usersWithPosts).toEqualTypeOf< { lowerName: string; posts: { lowerName: string; }[]; } | undefined >(); expect(usersWithPosts?.posts.length).toEqual(2); expect(usersWithPosts).toEqual({ lowerName: 'dan', posts: [{ lowerName: 'post1.2' }, { lowerName: 'post1.3' }], }); }); test('[Find One] Get only custom fields + where + limit', async (t) => { const { mysqlDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const usersWithPosts = await db.query.usersTable.findFirst({ columns: {}, with: { posts: { columns: {}, where: gte(postsTable.id, 2), limit: 1, extras: ({ content }) => ({ lowerName: sql`lower(${content})`.as('content_lower'), }), }, }, where: eq(usersTable.id, 1), extras: ({ name }) => ({ lowerName: sql`lower(${name})`.as('name_lower'), }), }); expectTypeOf(usersWithPosts).toEqualTypeOf< { lowerName: string; posts: { lowerName: string; }[]; } | undefined >(); expect(usersWithPosts?.posts.length).toEqual(1); expect(usersWithPosts).toEqual({ lowerName: 'dan', posts: [{ lowerName: 'post1.2' }], }); }); test('[Find One] Get only custom fields + where + orderBy', async (t) => { const { mysqlDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const usersWithPosts = await db.query.usersTable.findFirst({ columns: {}, with: { posts: { columns: {}, where: gte(postsTable.id, 2), orderBy: [desc(postsTable.id)], extras: ({ content }) => ({ lowerName: sql`lower(${content})`.as('content_lower'), }), }, }, where: eq(usersTable.id, 1), extras: ({ name }) => ({ lowerName: sql`lower(${name})`.as('name_lower'), }), }); expectTypeOf(usersWithPosts).toEqualTypeOf< { lowerName: string; posts: { lowerName: string; }[]; } | undefined >(); expect(usersWithPosts?.posts.length).toEqual(2); expect(usersWithPosts).toEqual({ lowerName: 'dan', posts: [{ lowerName: 'post1.3' }, { lowerName: 'post1.2' }], }); }); // columns {} test('[Find Many] Get select {}', async (t) => { const { mysqlDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await expect( async () => await db.query.usersTable.findMany({ columns: {}, }), ).rejects.toThrow(DrizzleError); }); // columns {} test('[Find One] Get select {}', async (t) => { const { mysqlDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await expect(async () => await db.query.usersTable.findFirst({ columns: {}, }) ).rejects.toThrow(DrizzleError); }); // deep select {} test('[Find Many] Get deep select {}', async (t) => { const { mysqlDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]); await expect(async () => await db.query.usersTable.findMany({ columns: {}, with: { posts: { columns: {}, }, }, }) ).rejects.toThrow(DrizzleError); }); // deep select {} test('[Find One] Get deep select {}', async (t) => { const { mysqlDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]); await expect(async () => await db.query.usersTable.findFirst({ columns: {}, with: { posts: { columns: {}, }, }, }) ).rejects.toThrow(DrizzleError); }); /* Prepared statements for users+posts */ test('[Find Many] Get users with posts + prepared limit', async (t) => { const { mysqlDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const prepared = db.query.usersTable.findMany({ with: { posts: { limit: placeholder('limit'), }, }, }).prepare(); const usersWithPosts = await prepared.execute({ limit: 1 }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[]>(); expect(usersWithPosts.length).eq(3); expect(usersWithPosts[0]?.posts.length).eq(1); expect(usersWithPosts[1]?.posts.length).eq(1); expect(usersWithPosts[2]?.posts.length).eq(1); expect(usersWithPosts).toContainEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], }); expect(usersWithPosts).toContainEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: null, posts: [{ id: 4, ownerId: 2, content: 'Post2', createdAt: usersWithPosts[1]?.posts[0]?.createdAt }], }); expect(usersWithPosts).toContainEqual({ id: 3, name: 'Alex', verified: false, invitedBy: null, posts: [{ id: 6, ownerId: 3, content: 'Post3', createdAt: usersWithPosts[2]?.posts[0]?.createdAt }], }); }); test('[Find Many] Get users with posts + prepared limit + offset', async (t) => { const { mysqlDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const prepared = db.query.usersTable.findMany({ limit: placeholder('uLimit'), offset: placeholder('uOffset'), with: { posts: { limit: placeholder('pLimit'), }, }, }).prepare(); const usersWithPosts = await prepared.execute({ pLimit: 1, uLimit: 3, uOffset: 1 }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[]>(); expect(usersWithPosts.length).eq(2); expect(usersWithPosts[0]?.posts.length).eq(1); expect(usersWithPosts[1]?.posts.length).eq(1); expect(usersWithPosts).toContainEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: null, posts: [{ id: 4, ownerId: 2, content: 'Post2', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], }); expect(usersWithPosts).toContainEqual({ id: 3, name: 'Alex', verified: false, invitedBy: null, posts: [{ id: 6, ownerId: 3, content: 'Post3', createdAt: usersWithPosts[1]?.posts[0]?.createdAt }], }); }); test('[Find Many] Get users with posts + prepared where', async (t) => { const { mysqlDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]); const prepared = db.query.usersTable.findMany({ where: (({ id }, { eq }) => eq(id, placeholder('id'))), with: { posts: { where: (({ id }, { eq }) => eq(id, 1)), }, }, }).prepare(); const usersWithPosts = await prepared.execute({ id: 1 }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[]>(); expect(usersWithPosts.length).eq(1); expect(usersWithPosts[0]?.posts.length).eq(1); expect(usersWithPosts[0]).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], }); }); test('[Find Many] Get users with posts + prepared + limit + offset + where', async (t) => { const { mysqlDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const prepared = db.query.usersTable.findMany({ limit: placeholder('uLimit'), offset: placeholder('uOffset'), where: (({ id }, { eq, or }) => or(eq(id, placeholder('id')), eq(id, 3))), with: { posts: { where: (({ id }, { eq }) => eq(id, placeholder('pid'))), limit: placeholder('pLimit'), }, }, }).prepare(); const usersWithPosts = await prepared.execute({ pLimit: 1, uLimit: 3, uOffset: 1, id: 2, pid: 6 }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[]>(); expect(usersWithPosts.length).eq(1); expect(usersWithPosts[0]?.posts.length).eq(1); expect(usersWithPosts).toContainEqual({ id: 3, name: 'Alex', verified: false, invitedBy: null, posts: [{ id: 6, ownerId: 3, content: 'Post3', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], }); }); /* [Find One] One relation users+posts */ test('[Find One] Get users with posts', async (t) => { const { mysqlDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]); const usersWithPosts = await db.query.usersTable.findFirst({ with: { posts: true, }, }); expectTypeOf(usersWithPosts).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; } | undefined >(); expect(usersWithPosts!.posts.length).eq(1); expect(usersWithPosts).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts?.posts[0]?.createdAt }], }); }); test('[Find One] Get users with posts + limit posts', async (t) => { const { mysqlDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const usersWithPosts = await db.query.usersTable.findFirst({ with: { posts: { limit: 1, }, }, }); expectTypeOf(usersWithPosts).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; } | undefined >(); expect(usersWithPosts!.posts.length).eq(1); expect(usersWithPosts).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts?.posts[0]?.createdAt }], }); }); test('[Find One] Get users with posts no results found', async (t) => { const { mysqlDb: db } = t; const usersWithPosts = await db.query.usersTable.findFirst({ with: { posts: { limit: 1, }, }, }); expectTypeOf(usersWithPosts).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; } | undefined >(); expect(usersWithPosts).toBeUndefined(); }); test('[Find One] Get users with posts + limit posts and users', async (t) => { const { mysqlDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const usersWithPosts = await db.query.usersTable.findFirst({ with: { posts: { limit: 1, }, }, }); expectTypeOf(usersWithPosts).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; } | undefined >(); expect(usersWithPosts!.posts.length).eq(1); expect(usersWithPosts).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts?.posts[0]?.createdAt }], }); }); test('[Find One] Get users with posts + custom fields', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const usersWithPosts = await db.query.usersTable.findFirst({ with: { posts: true, }, extras: ({ name }) => ({ lowerName: sql`lower(${name})`.as('name_lower'), }), }); expectTypeOf(usersWithPosts).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; lowerName: string; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; } | undefined >(); expect(usersWithPosts!.posts.length).toEqual(3); expect(usersWithPosts?.lowerName).toEqual('dan'); expect(usersWithPosts?.id).toEqual(1); expect(usersWithPosts?.verified).toEqual(false); expect(usersWithPosts?.invitedBy).toEqual(null); expect(usersWithPosts?.name).toEqual('Dan'); expect(usersWithPosts?.posts).toContainEqual({ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts?.posts[0]?.createdAt, }); expect(usersWithPosts?.posts).toContainEqual({ id: 2, ownerId: 1, content: 'Post1.2', createdAt: usersWithPosts?.posts[1]?.createdAt, }); expect(usersWithPosts?.posts).toContainEqual({ id: 3, ownerId: 1, content: 'Post1.3', createdAt: usersWithPosts?.posts[2]?.createdAt, }); }); test('[Find One] Get users with posts + custom fields + limits', async (t) => { const { mysqlDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const usersWithPosts = await db.query.usersTable.findFirst({ with: { posts: { limit: 1, }, }, extras: (usersTable, { sql }) => ({ lowerName: sql`lower(${usersTable.name})`.as('name_lower'), }), }); expectTypeOf(usersWithPosts).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; lowerName: string; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; } | undefined >(); expect(usersWithPosts!.posts.length).toEqual(1); expect(usersWithPosts).toEqual({ id: 1, name: 'Dan', lowerName: 'dan', verified: false, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts?.posts[0]?.createdAt }], }); }); test('[Find One] Get users with posts + orderBy', async (t) => { const { mysqlDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: '1' }, { ownerId: 1, content: '2' }, { ownerId: 1, content: '3' }, { ownerId: 2, content: '4' }, { ownerId: 2, content: '5' }, { ownerId: 3, content: '6' }, { ownerId: 3, content: '7' }, ]); const usersWithPosts = await db.query.usersTable.findFirst({ with: { posts: { orderBy: (postsTable, { desc }) => [desc(postsTable.content)], }, }, orderBy: (usersTable, { desc }) => [desc(usersTable.id)], }); expectTypeOf(usersWithPosts).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; } | undefined >(); expect(usersWithPosts!.posts.length).eq(2); expect(usersWithPosts).toEqual({ id: 3, name: 'Alex', verified: false, invitedBy: null, posts: [{ id: 7, ownerId: 3, content: '7', createdAt: usersWithPosts?.posts[1]?.createdAt, }, { id: 6, ownerId: 3, content: '6', createdAt: usersWithPosts?.posts[0]?.createdAt }], }); }); test('[Find One] Get users with posts + where', async (t) => { const { mysqlDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]); const usersWithPosts = await db.query.usersTable.findFirst({ where: (({ id }, { eq }) => eq(id, 1)), with: { posts: { where: (({ id }, { eq }) => eq(id, 1)), }, }, }); expectTypeOf(usersWithPosts).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; } | undefined >(); expect(usersWithPosts!.posts.length).eq(1); expect(usersWithPosts).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts?.posts[0]?.createdAt }], }); }); test('[Find One] Get users with posts + where + partial', async (t) => { const { mysqlDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]); const usersWithPosts = await db.query.usersTable.findFirst({ columns: { id: true, name: true, }, with: { posts: { columns: { id: true, content: true, }, where: (({ id }, { eq }) => eq(id, 1)), }, }, where: (({ id }, { eq }) => eq(id, 1)), }); expectTypeOf(usersWithPosts).toEqualTypeOf< { id: number; name: string; posts: { id: number; content: string; }[]; } | undefined >(); expect(usersWithPosts!.posts.length).eq(1); expect(usersWithPosts).toEqual({ id: 1, name: 'Dan', posts: [{ id: 1, content: 'Post1' }], }); }); test('[Find One] Get users with posts + where + partial. Did not select posts id, but used it in where', async (t) => { const { mysqlDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]); const usersWithPosts = await db.query.usersTable.findFirst({ columns: { id: true, name: true, }, with: { posts: { columns: { id: true, content: true, }, where: (({ id }, { eq }) => eq(id, 1)), }, }, where: (({ id }, { eq }) => eq(id, 1)), }); expectTypeOf(usersWithPosts).toEqualTypeOf< { id: number; name: string; posts: { id: number; content: string; }[]; } | undefined >(); expect(usersWithPosts!.posts.length).eq(1); expect(usersWithPosts).toEqual({ id: 1, name: 'Dan', posts: [{ id: 1, content: 'Post1' }], }); }); test('[Find One] Get users with posts + where + partial(true + false)', async (t) => { const { mysqlDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]); const usersWithPosts = await db.query.usersTable.findFirst({ columns: { id: true, name: false, }, with: { posts: { columns: { id: true, content: false, }, where: (({ id }, { eq }) => eq(id, 1)), }, }, where: (({ id }, { eq }) => eq(id, 1)), }); expectTypeOf(usersWithPosts).toEqualTypeOf< { id: number; posts: { id: number; }[]; } | undefined >(); expect(usersWithPosts!.posts.length).eq(1); expect(usersWithPosts).toEqual({ id: 1, posts: [{ id: 1 }], }); }); test('[Find One] Get users with posts + where + partial(false)', async (t) => { const { mysqlDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]); const usersWithPosts = await db.query.usersTable.findFirst({ columns: { name: false, }, with: { posts: { columns: { content: false, }, where: (({ id }, { eq }) => eq(id, 1)), }, }, where: (({ id }, { eq }) => eq(id, 1)), }); expectTypeOf(usersWithPosts).toEqualTypeOf< { id: number; verified: boolean; invitedBy: number | null; posts: { id: number; ownerId: number | null; createdAt: Date; }[]; } | undefined >(); expect(usersWithPosts!.posts.length).eq(1); expect(usersWithPosts).toEqual({ id: 1, verified: false, invitedBy: null, posts: [{ id: 1, ownerId: 1, createdAt: usersWithPosts?.posts[0]?.createdAt }], }); }); /* One relation users+users. Self referencing */ test('Get user with invitee', async (t) => { const { mysqlDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]); const usersWithInvitee = await db.query.usersTable.findMany({ with: { invitee: true, }, }); expectTypeOf(usersWithInvitee).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; invitee: { id: number; name: string; verified: boolean; invitedBy: number | null; } | null; }[] >(); usersWithInvitee.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(usersWithInvitee.length).eq(4); expect(usersWithInvitee[0]?.invitee).toBeNull(); expect(usersWithInvitee[1]?.invitee).toBeNull(); expect(usersWithInvitee[2]?.invitee).not.toBeNull(); expect(usersWithInvitee[3]?.invitee).not.toBeNull(); expect(usersWithInvitee[0]).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, invitee: null, }); expect(usersWithInvitee[1]).toEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: null, invitee: null, }); expect(usersWithInvitee[2]).toEqual({ id: 3, name: 'Alex', verified: false, invitedBy: 1, invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, }); expect(usersWithInvitee[3]).toEqual({ id: 4, name: 'John', verified: false, invitedBy: 2, invitee: { id: 2, name: 'Andrew', verified: false, invitedBy: null }, }); }); test('Get user + limit with invitee', async (t) => { const { mysqlDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew', invitedBy: 1 }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]); const usersWithInvitee = await db.query.usersTable.findMany({ with: { invitee: true, }, limit: 2, }); expectTypeOf(usersWithInvitee).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; invitee: { id: number; name: string; verified: boolean; invitedBy: number | null; } | null; }[] >(); usersWithInvitee.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(usersWithInvitee.length).eq(2); expect(usersWithInvitee[0]?.invitee).toBeNull(); expect(usersWithInvitee[1]?.invitee).not.toBeNull(); expect(usersWithInvitee[0]).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, invitee: null, }); expect(usersWithInvitee[1]).toEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: 1, invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, }); }); test('Get user with invitee and custom fields', async (t) => { const { mysqlDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]); const usersWithInvitee = await db.query.usersTable.findMany({ extras: (users, { sql }) => ({ lower: sql`lower(${users.name})`.as('lower_name') }), with: { invitee: { extras: (invitee, { sql }) => ({ lower: sql`lower(${invitee.name})`.as('lower_name') }), }, }, }); expectTypeOf(usersWithInvitee).toEqualTypeOf< { id: number; name: string; verified: boolean; lower: string; invitedBy: number | null; invitee: { id: number; name: string; verified: boolean; lower: string; invitedBy: number | null; } | null; }[] >(); usersWithInvitee.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(usersWithInvitee.length).eq(4); expect(usersWithInvitee[0]?.invitee).toBeNull(); expect(usersWithInvitee[1]?.invitee).toBeNull(); expect(usersWithInvitee[2]?.invitee).not.toBeNull(); expect(usersWithInvitee[3]?.invitee).not.toBeNull(); expect(usersWithInvitee[0]).toEqual({ id: 1, name: 'Dan', lower: 'dan', verified: false, invitedBy: null, invitee: null, }); expect(usersWithInvitee[1]).toEqual({ id: 2, name: 'Andrew', lower: 'andrew', verified: false, invitedBy: null, invitee: null, }); expect(usersWithInvitee[2]).toEqual({ id: 3, name: 'Alex', lower: 'alex', verified: false, invitedBy: 1, invitee: { id: 1, name: 'Dan', lower: 'dan', verified: false, invitedBy: null }, }); expect(usersWithInvitee[3]).toEqual({ id: 4, name: 'John', lower: 'john', verified: false, invitedBy: 2, invitee: { id: 2, name: 'Andrew', lower: 'andrew', verified: false, invitedBy: null }, }); }); test('Get user with invitee and custom fields + limits', async (t) => { const { mysqlDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]); const usersWithInvitee = await db.query.usersTable.findMany({ extras: (users, { sql }) => ({ lower: sql`lower(${users.name})`.as('lower_name') }), limit: 3, with: { invitee: { extras: (invitee, { sql }) => ({ lower: sql`lower(${invitee.name})`.as('lower_name') }), }, }, }); expectTypeOf(usersWithInvitee).toEqualTypeOf< { id: number; name: string; verified: boolean; lower: string; invitedBy: number | null; invitee: { id: number; name: string; verified: boolean; lower: string; invitedBy: number | null; } | null; }[] >(); usersWithInvitee.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(usersWithInvitee.length).eq(3); expect(usersWithInvitee[0]?.invitee).toBeNull(); expect(usersWithInvitee[1]?.invitee).toBeNull(); expect(usersWithInvitee[2]?.invitee).not.toBeNull(); expect(usersWithInvitee[0]).toEqual({ id: 1, name: 'Dan', lower: 'dan', verified: false, invitedBy: null, invitee: null, }); expect(usersWithInvitee[1]).toEqual({ id: 2, name: 'Andrew', lower: 'andrew', verified: false, invitedBy: null, invitee: null, }); expect(usersWithInvitee[2]).toEqual({ id: 3, name: 'Alex', lower: 'alex', verified: false, invitedBy: 1, invitee: { id: 1, name: 'Dan', lower: 'dan', verified: false, invitedBy: null }, }); }); test('Get user with invitee + order by', async (t) => { const { mysqlDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]); const usersWithInvitee = await db.query.usersTable.findMany({ orderBy: (users, { desc }) => [desc(users.id)], with: { invitee: true, }, }); expectTypeOf(usersWithInvitee).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; invitee: { id: number; name: string; verified: boolean; invitedBy: number | null; } | null; }[] >(); expect(usersWithInvitee.length).eq(4); expect(usersWithInvitee[3]?.invitee).toBeNull(); expect(usersWithInvitee[2]?.invitee).toBeNull(); expect(usersWithInvitee[1]?.invitee).not.toBeNull(); expect(usersWithInvitee[0]?.invitee).not.toBeNull(); expect(usersWithInvitee[3]).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, invitee: null, }); expect(usersWithInvitee[2]).toEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: null, invitee: null, }); expect(usersWithInvitee[1]).toEqual({ id: 3, name: 'Alex', verified: false, invitedBy: 1, invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, }); expect(usersWithInvitee[0]).toEqual({ id: 4, name: 'John', verified: false, invitedBy: 2, invitee: { id: 2, name: 'Andrew', verified: false, invitedBy: null }, }); }); test('Get user with invitee + where', async (t) => { const { mysqlDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]); const usersWithInvitee = await db.query.usersTable.findMany({ where: (users, { eq, or }) => (or(eq(users.id, 3), eq(users.id, 4))), with: { invitee: true, }, }); expectTypeOf(usersWithInvitee).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; invitee: { id: number; name: string; verified: boolean; invitedBy: number | null; } | null; }[] >(); expect(usersWithInvitee.length).eq(2); expect(usersWithInvitee[0]?.invitee).not.toBeNull(); expect(usersWithInvitee[1]?.invitee).not.toBeNull(); expect(usersWithInvitee).toContainEqual({ id: 3, name: 'Alex', verified: false, invitedBy: 1, invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, }); expect(usersWithInvitee).toContainEqual({ id: 4, name: 'John', verified: false, invitedBy: 2, invitee: { id: 2, name: 'Andrew', verified: false, invitedBy: null }, }); }); test('Get user with invitee + where + partial', async (t) => { const { mysqlDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]); const usersWithInvitee = await db.query.usersTable.findMany({ where: (users, { eq, or }) => (or(eq(users.id, 3), eq(users.id, 4))), columns: { id: true, name: true, }, with: { invitee: { columns: { id: true, name: true, }, }, }, }); expectTypeOf(usersWithInvitee).toEqualTypeOf< { id: number; name: string; invitee: { id: number; name: string; } | null; }[] >(); expect(usersWithInvitee.length).eq(2); expect(usersWithInvitee[0]?.invitee).not.toBeNull(); expect(usersWithInvitee[1]?.invitee).not.toBeNull(); expect(usersWithInvitee).toContainEqual({ id: 3, name: 'Alex', invitee: { id: 1, name: 'Dan' }, }); expect(usersWithInvitee).toContainEqual({ id: 4, name: 'John', invitee: { id: 2, name: 'Andrew' }, }); }); test('Get user with invitee + where + partial. Did not select users id, but used it in where', async (t) => { const { mysqlDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]); const usersWithInvitee = await db.query.usersTable.findMany({ where: (users, { eq, or }) => (or(eq(users.id, 3), eq(users.id, 4))), columns: { name: true, }, with: { invitee: { columns: { id: true, name: true, }, }, }, }); expectTypeOf(usersWithInvitee).toEqualTypeOf< { name: string; invitee: { id: number; name: string; } | null; }[] >(); expect(usersWithInvitee.length).eq(2); expect(usersWithInvitee[0]?.invitee).not.toBeNull(); expect(usersWithInvitee[1]?.invitee).not.toBeNull(); expect(usersWithInvitee).toContainEqual({ name: 'Alex', invitee: { id: 1, name: 'Dan' }, }); expect(usersWithInvitee).toContainEqual({ name: 'John', invitee: { id: 2, name: 'Andrew' }, }); }); test('Get user with invitee + where + partial(true+false)', async (t) => { const { mysqlDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]); const usersWithInvitee = await db.query.usersTable.findMany({ where: (users, { eq, or }) => (or(eq(users.id, 3), eq(users.id, 4))), columns: { id: true, name: true, verified: false, }, with: { invitee: { columns: { id: true, name: true, verified: false, }, }, }, }); expectTypeOf(usersWithInvitee).toEqualTypeOf< { id: number; name: string; invitee: { id: number; name: string; } | null; }[] >(); expect(usersWithInvitee.length).eq(2); expect(usersWithInvitee[0]?.invitee).not.toBeNull(); expect(usersWithInvitee[1]?.invitee).not.toBeNull(); expect(usersWithInvitee).toContainEqual({ id: 3, name: 'Alex', invitee: { id: 1, name: 'Dan' }, }); expect(usersWithInvitee).toContainEqual({ id: 4, name: 'John', invitee: { id: 2, name: 'Andrew' }, }); }); test('Get user with invitee + where + partial(false)', async (t) => { const { mysqlDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]); const usersWithInvitee = await db.query.usersTable.findMany({ where: (users, { eq, or }) => (or(eq(users.id, 3), eq(users.id, 4))), columns: { verified: false, }, with: { invitee: { columns: { name: false, }, }, }, }); expectTypeOf(usersWithInvitee).toEqualTypeOf< { id: number; name: string; invitedBy: number | null; invitee: { id: number; verified: boolean; invitedBy: number | null; } | null; }[] >(); expect(usersWithInvitee.length).eq(2); expect(usersWithInvitee[0]?.invitee).not.toBeNull(); expect(usersWithInvitee[1]?.invitee).not.toBeNull(); expect(usersWithInvitee).toContainEqual({ id: 3, name: 'Alex', invitedBy: 1, invitee: { id: 1, verified: false, invitedBy: null }, }); expect(usersWithInvitee).toContainEqual({ id: 4, name: 'John', invitedBy: 2, invitee: { id: 2, verified: false, invitedBy: null }, }); }); /* Two first-level relations users+users and users+posts */ test('Get user with invitee and posts', async (t) => { const { mysqlDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]); const response = await db.query.usersTable.findMany({ with: { invitee: true, posts: true, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; ownerId: number | null; content: string; createdAt: Date }[]; invitee: { id: number; name: string; verified: boolean; invitedBy: number | null; } | null; }[] >(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).eq(4); expect(response[0]?.invitee).toBeNull(); expect(response[1]?.invitee).toBeNull(); expect(response[2]?.invitee).not.toBeNull(); expect(response[3]?.invitee).not.toBeNull(); expect(response[0]?.posts.length).eq(1); expect(response[1]?.posts.length).eq(1); expect(response[2]?.posts.length).eq(1); expect(response).toContainEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, invitee: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: response[0]?.posts[0]?.createdAt }], }); expect(response).toContainEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: null, invitee: null, posts: [{ id: 2, ownerId: 2, content: 'Post2', createdAt: response[1]?.posts[0]?.createdAt }], }); expect(response).toContainEqual({ id: 3, name: 'Alex', verified: false, invitedBy: 1, invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, posts: [{ id: 3, ownerId: 3, content: 'Post3', createdAt: response[2]?.posts[0]?.createdAt }], }); expect(response).toContainEqual({ id: 4, name: 'John', verified: false, invitedBy: 2, invitee: { id: 2, name: 'Andrew', verified: false, invitedBy: null }, posts: [], }); }); test('Get user with invitee and posts + limit posts and users', async (t) => { const { mysqlDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const response = await db.query.usersTable.findMany({ limit: 3, with: { invitee: true, posts: { limit: 1, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; ownerId: number | null; content: string; createdAt: Date }[]; invitee: { id: number; name: string; verified: boolean; invitedBy: number | null; } | null; }[] >(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).eq(3); expect(response[0]?.invitee).toBeNull(); expect(response[1]?.invitee).toBeNull(); expect(response[2]?.invitee).not.toBeNull(); expect(response[0]?.posts.length).eq(1); expect(response[1]?.posts.length).eq(1); expect(response[2]?.posts.length).eq(1); expect(response).toContainEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, invitee: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: response[0]?.posts[0]?.createdAt }], }); expect(response).toContainEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: null, invitee: null, posts: [{ id: 3, ownerId: 2, content: 'Post2', createdAt: response[1]?.posts[0]?.createdAt }], }); expect(response).toContainEqual({ id: 3, name: 'Alex', verified: false, invitedBy: 1, invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, posts: [{ id: 5, ownerId: 3, content: 'Post3', createdAt: response[2]?.posts[0]?.createdAt }], }); }); test('Get user with invitee and posts + limits + custom fields in each', async (t) => { const { mysqlDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const response = await db.query.usersTable.findMany({ limit: 3, extras: (users, { sql }) => ({ lower: sql`lower(${users.name})`.as('lower_name') }), with: { invitee: { extras: (users, { sql }) => ({ lower: sql`lower(${users.name})`.as('lower_invitee_name') }), }, posts: { limit: 1, extras: (posts, { sql }) => ({ lower: sql`lower(${posts.content})`.as('lower_content') }), }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: boolean; lower: string; invitedBy: number | null; posts: { id: number; lower: string; ownerId: number | null; content: string; createdAt: Date }[]; invitee: { id: number; name: string; lower: string; verified: boolean; invitedBy: number | null; } | null; }[] >(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).eq(3); expect(response[0]?.invitee).toBeNull(); expect(response[1]?.invitee).toBeNull(); expect(response[2]?.invitee).not.toBeNull(); expect(response[0]?.posts.length).eq(1); expect(response[1]?.posts.length).eq(1); expect(response[2]?.posts.length).eq(1); expect(response).toContainEqual({ id: 1, name: 'Dan', lower: 'dan', verified: false, invitedBy: null, invitee: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', lower: 'post1', createdAt: response[0]?.posts[0]?.createdAt }], }); expect(response).toContainEqual({ id: 2, name: 'Andrew', lower: 'andrew', verified: false, invitedBy: null, invitee: null, posts: [{ id: 3, ownerId: 2, content: 'Post2', lower: 'post2', createdAt: response[1]?.posts[0]?.createdAt }], }); expect(response).toContainEqual({ id: 3, name: 'Alex', lower: 'alex', verified: false, invitedBy: 1, invitee: { id: 1, name: 'Dan', lower: 'dan', verified: false, invitedBy: null }, posts: [{ id: 5, ownerId: 3, content: 'Post3', lower: 'post3', createdAt: response[2]?.posts[0]?.createdAt }], }); }); test('Get user with invitee and posts + custom fields in each', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const response = await db.query.usersTable.findMany({ extras: (users, { sql }) => ({ lower: sql`lower(${users.name})`.as('lower_name') }), with: { invitee: { extras: (users, { sql }) => ({ lower: sql`lower(${users.name})`.as('lower_name') }), }, posts: { extras: (posts, { sql }) => ({ lower: sql`lower(${posts.content})`.as('lower_name') }), }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: boolean; lower: string; invitedBy: number | null; posts: { id: number; lower: string; ownerId: number | null; content: string; createdAt: Date }[]; invitee: { id: number; name: string; lower: string; verified: boolean; invitedBy: number | null; } | null; }[] >(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); response[0]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); response[1]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); response[2]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).eq(4); expect(response[0]?.invitee).toBeNull(); expect(response[1]?.invitee).toBeNull(); expect(response[2]?.invitee).not.toBeNull(); expect(response[3]?.invitee).not.toBeNull(); expect(response[0]?.posts.length).eq(2); expect(response[1]?.posts.length).eq(2); expect(response[2]?.posts.length).eq(2); expect(response[3]?.posts.length).eq(0); expect(response).toContainEqual({ id: 1, name: 'Dan', lower: 'dan', verified: false, invitedBy: null, invitee: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', lower: 'post1', createdAt: response[0]?.posts[0]?.createdAt }, { id: 2, ownerId: 1, content: 'Post1.1', lower: 'post1.1', createdAt: response[0]?.posts[1]?.createdAt, }], }); expect(response).toContainEqual({ id: 2, name: 'Andrew', lower: 'andrew', verified: false, invitedBy: null, invitee: null, posts: [{ id: 3, ownerId: 2, content: 'Post2', lower: 'post2', createdAt: response[1]?.posts[0]?.createdAt }, { id: 4, ownerId: 2, content: 'Post2.1', lower: 'post2.1', createdAt: response[1]?.posts[1]?.createdAt, }], }); expect(response).toContainEqual({ id: 3, name: 'Alex', lower: 'alex', verified: false, invitedBy: 1, invitee: { id: 1, name: 'Dan', lower: 'dan', verified: false, invitedBy: null }, posts: [{ id: 5, ownerId: 3, content: 'Post3', lower: 'post3', createdAt: response[2]?.posts[0]?.createdAt }, { id: 6, ownerId: 3, content: 'Post3.1', lower: 'post3.1', createdAt: response[2]?.posts[1]?.createdAt, }], }); expect(response).toContainEqual({ id: 4, name: 'John', lower: 'john', verified: false, invitedBy: 2, invitee: { id: 2, name: 'Andrew', lower: 'andrew', verified: false, invitedBy: null }, posts: [], }); }); test('Get user with invitee and posts + orderBy', async (t) => { const { mysqlDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, ]); const response = await db.query.usersTable.findMany({ orderBy: (users, { desc }) => [desc(users.id)], with: { invitee: true, posts: { orderBy: (posts, { desc }) => [desc(posts.id)], }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; ownerId: number | null; content: string; createdAt: Date }[]; invitee: { id: number; name: string; verified: boolean; invitedBy: number | null; } | null; }[] >(); expect(response.length).eq(4); expect(response[3]?.invitee).toBeNull(); expect(response[2]?.invitee).toBeNull(); expect(response[1]?.invitee).not.toBeNull(); expect(response[0]?.invitee).not.toBeNull(); expect(response[0]?.posts.length).eq(0); expect(response[1]?.posts.length).eq(1); expect(response[2]?.posts.length).eq(2); expect(response[3]?.posts.length).eq(2); expect(response[3]).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, invitee: null, posts: [{ id: 2, ownerId: 1, content: 'Post1.1', createdAt: response[3]?.posts[0]?.createdAt }, { id: 1, ownerId: 1, content: 'Post1', createdAt: response[3]?.posts[1]?.createdAt, }], }); expect(response[2]).toEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: null, invitee: null, posts: [{ id: 4, ownerId: 2, content: 'Post2.1', createdAt: response[2]?.posts[0]?.createdAt }, { id: 3, ownerId: 2, content: 'Post2', createdAt: response[2]?.posts[1]?.createdAt, }], }); expect(response[1]).toEqual({ id: 3, name: 'Alex', verified: false, invitedBy: 1, invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, posts: [{ id: 5, ownerId: 3, content: 'Post3', createdAt: response[3]?.posts[1]?.createdAt, }], }); expect(response[0]).toEqual({ id: 4, name: 'John', verified: false, invitedBy: 2, invitee: { id: 2, name: 'Andrew', verified: false, invitedBy: null }, posts: [], }); }); test('Get user with invitee and posts + where', async (t) => { const { mysqlDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]); const response = await db.query.usersTable.findMany({ where: (users, { eq, or }) => (or(eq(users.id, 2), eq(users.id, 3))), with: { invitee: true, posts: { where: (posts, { eq }) => (eq(posts.ownerId, 2)), }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; ownerId: number | null; content: string; createdAt: Date }[]; invitee: { id: number; name: string; verified: boolean; invitedBy: number | null; } | null; }[] >(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).eq(2); expect(response[0]?.invitee).toBeNull(); expect(response[1]?.invitee).not.toBeNull(); expect(response[0]?.posts.length).eq(1); expect(response[1]?.posts.length).eq(0); expect(response).toContainEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: null, invitee: null, posts: [{ id: 2, ownerId: 2, content: 'Post2', createdAt: response[0]?.posts[0]?.createdAt }], }); expect(response).toContainEqual({ id: 3, name: 'Alex', verified: false, invitedBy: 1, invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, posts: [], }); }); test('Get user with invitee and posts + limit posts and users + where', async (t) => { const { mysqlDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const response = await db.query.usersTable.findMany({ where: (users, { eq, or }) => (or(eq(users.id, 3), eq(users.id, 4))), limit: 1, with: { invitee: true, posts: { where: (posts, { eq }) => (eq(posts.ownerId, 3)), limit: 1, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; ownerId: number | null; content: string; createdAt: Date }[]; invitee: { id: number; name: string; verified: boolean; invitedBy: number | null; } | null; }[] >(); expect(response.length).eq(1); expect(response[0]?.invitee).not.toBeNull(); expect(response[0]?.posts.length).eq(1); expect(response).toContainEqual({ id: 3, name: 'Alex', verified: false, invitedBy: 1, invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, posts: [{ id: 5, ownerId: 3, content: 'Post3', createdAt: response[0]?.posts[0]?.createdAt }], }); }); test('Get user with invitee and posts + orderBy + where + custom', async (t) => { const { mysqlDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, ]); const response = await db.query.usersTable.findMany({ orderBy: [desc(usersTable.id)], where: or(eq(usersTable.id, 3), eq(usersTable.id, 4)), extras: { lower: sql`lower(${usersTable.name})`.as('lower_name'), }, with: { invitee: true, posts: { where: eq(postsTable.ownerId, 3), orderBy: [desc(postsTable.id)], extras: { lower: sql`lower(${postsTable.content})`.as('lower_name'), }, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; lower: string; posts: { id: number; lower: string; ownerId: number | null; content: string; createdAt: Date }[]; invitee: { id: number; name: string; verified: boolean; invitedBy: number | null; } | null; }[] >(); expect(response.length).eq(2); expect(response[1]?.invitee).not.toBeNull(); expect(response[0]?.invitee).not.toBeNull(); expect(response[0]?.posts.length).eq(0); expect(response[1]?.posts.length).eq(1); expect(response[1]).toEqual({ id: 3, name: 'Alex', lower: 'alex', verified: false, invitedBy: 1, invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, posts: [{ id: 5, ownerId: 3, content: 'Post3', lower: 'post3', createdAt: response[1]?.posts[0]?.createdAt, }], }); expect(response[0]).toEqual({ id: 4, name: 'John', lower: 'john', verified: false, invitedBy: 2, invitee: { id: 2, name: 'Andrew', verified: false, invitedBy: null }, posts: [], }); }); test('Get user with invitee and posts + orderBy + where + partial + custom', async (t) => { const { mysqlDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, ]); const response = await db.query.usersTable.findMany({ orderBy: [desc(usersTable.id)], where: or(eq(usersTable.id, 3), eq(usersTable.id, 4)), extras: { lower: sql`lower(${usersTable.name})`.as('lower_name'), }, columns: { id: true, name: true, }, with: { invitee: { columns: { id: true, name: true, }, extras: { lower: sql`lower(${usersTable.name})`.as('lower_name'), }, }, posts: { columns: { id: true, content: true, }, where: eq(postsTable.ownerId, 3), orderBy: [desc(postsTable.id)], extras: { lower: sql`lower(${postsTable.content})`.as('lower_name'), }, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; lower: string; posts: { id: number; lower: string; content: string }[]; invitee: { id: number; name: string; lower: string; } | null; }[] >(); expect(response.length).eq(2); expect(response[1]?.invitee).not.toBeNull(); expect(response[0]?.invitee).not.toBeNull(); expect(response[0]?.posts.length).eq(0); expect(response[1]?.posts.length).eq(1); expect(response[1]).toEqual({ id: 3, name: 'Alex', lower: 'alex', invitee: { id: 1, name: 'Dan', lower: 'dan' }, posts: [{ id: 5, content: 'Post3', lower: 'post3', }], }); expect(response[0]).toEqual({ id: 4, name: 'John', lower: 'john', invitee: { id: 2, name: 'Andrew', lower: 'andrew' }, posts: [], }); }); /* One two-level relation users+posts+comments */ test('Get user with posts and posts with comments', async (t) => { const { mysqlDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { id: 1, ownerId: 1, content: 'Post1' }, { id: 2, ownerId: 2, content: 'Post2' }, { id: 3, ownerId: 3, content: 'Post3' }, ]); await db.insert(commentsTable).values([ { postId: 1, content: 'Comment1', creator: 2 }, { postId: 2, content: 'Comment2', creator: 2 }, { postId: 3, content: 'Comment3', creator: 3 }, ]); const response = await db.query.usersTable.findMany({ with: { posts: { with: { comments: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; comments: { id: number; content: string; createdAt: Date; creator: number | null; postId: number | null; }[]; }[]; }[] >(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).eq(3); expect(response[0]?.posts.length).eq(1); expect(response[1]?.posts.length).eq(1); expect(response[2]?.posts.length).eq(1); expect(response[0]?.posts[0]?.comments.length).eq(1); expect(response[1]?.posts[0]?.comments.length).eq(1); expect(response[2]?.posts[0]?.comments.length).eq(1); expect(response[0]).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: response[0]?.posts[0]?.createdAt, comments: [ { id: 1, content: 'Comment1', creator: 2, postId: 1, createdAt: response[0]?.posts[0]?.comments[0]?.createdAt, }, ], }], }); expect(response[1]).toEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: null, posts: [{ id: 2, ownerId: 2, content: 'Post2', createdAt: response[1]?.posts[0]?.createdAt, comments: [ { id: 2, content: 'Comment2', creator: 2, postId: 2, createdAt: response[1]?.posts[0]?.comments[0]?.createdAt, }, ], }], }); // expect(response[2]).toEqual({ // id: 3, // name: 'Alex', // verified: false, // invitedBy: null, // posts: [{ // id: 3, // ownerId: 3, // content: 'Post3', // createdAt: response[2]?.posts[0]?.createdAt, // comments: [ // { // id: , // content: 'Comment3', // creator: 3, // postId: 3, // createdAt: response[2]?.posts[0]?.comments[0]?.createdAt, // }, // ], // }], // }); }); // Get user with limit posts and limit comments // Get user with custom field + post + comment with custom field // Get user with limit + posts orderBy + comment orderBy // Get user with where + posts where + comment where // Get user with where + posts partial where + comment where // Get user with where + posts partial where + comment partial(false) where // Get user with where partial(false) + posts partial where partial(false) + comment partial(false+true) where // Get user with where + posts partial where + comment where. Didn't select field from where in posts // Get user with where + posts partial where + comment where. Didn't select field from where for all // Get with limit+offset in each /* One two-level + One first-level relation users+posts+comments and users+users */ /* One three-level relation users+posts+comments+comment_owner */ test('Get user with posts and posts with comments and comments with owner', async (t) => { const { mysqlDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { id: 1, ownerId: 1, content: 'Post1' }, { id: 2, ownerId: 2, content: 'Post2' }, { id: 3, ownerId: 3, content: 'Post3' }, ]); await db.insert(commentsTable).values([ { postId: 1, content: 'Comment1', creator: 2 }, { postId: 2, content: 'Comment2', creator: 2 }, { postId: 3, content: 'Comment3', creator: 3 }, ]); const response = await db.query.usersTable.findMany({ with: { posts: { with: { comments: { with: { author: true, }, }, }, }, }, }); expectTypeOf(response).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; comments: { id: number; content: string; createdAt: Date; creator: number | null; postId: number | null; author: { id: number; name: string; verified: boolean; invitedBy: number | null; } | null; }[]; }[]; }[]>(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).eq(3); expect(response[0]?.posts.length).eq(1); expect(response[1]?.posts.length).eq(1); expect(response[2]?.posts.length).eq(1); expect(response[0]?.posts[0]?.comments.length).eq(1); expect(response[1]?.posts[0]?.comments.length).eq(1); expect(response[2]?.posts[0]?.comments.length).eq(1); expect(response[0]).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: response[0]?.posts[0]?.createdAt, comments: [ { id: 1, content: 'Comment1', creator: 2, author: { id: 2, name: 'Andrew', verified: false, invitedBy: null, }, postId: 1, createdAt: response[0]?.posts[0]?.comments[0]?.createdAt, }, ], }], }); expect(response[1]).toEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: null, posts: [{ id: 2, ownerId: 2, content: 'Post2', createdAt: response[1]?.posts[0]?.createdAt, comments: [ { id: 2, content: 'Comment2', creator: 2, author: { id: 2, name: 'Andrew', verified: false, invitedBy: null, }, postId: 2, createdAt: response[1]?.posts[0]?.comments[0]?.createdAt, }, ], }], }); }); test('Get user with posts and posts with comments and comments with owner where exists', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { id: 1, ownerId: 1, content: 'Post1' }, { id: 2, ownerId: 2, content: 'Post2' }, { id: 3, ownerId: 3, content: 'Post3' }, ]); await db.insert(commentsTable).values([ { postId: 1, content: 'Comment1', creator: 2 }, { postId: 2, content: 'Comment2', creator: 2 }, { postId: 3, content: 'Comment3', creator: 3 }, ]); const response = await db.query.usersTable.findMany({ with: { posts: { with: { comments: { with: { author: true, }, }, }, }, }, where: (table, { exists, eq }) => exists(db.select({ one: sql`1` }).from(usersTable).where(eq(sql`1`, table.id))), }); expectTypeOf(response).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; comments: { id: number; content: string; createdAt: Date; creator: number | null; postId: number | null; author: { id: number; name: string; verified: boolean; invitedBy: number | null; } | null; }[]; }[]; }[]>(); expect(response.length).eq(1); expect(response[0]?.posts.length).eq(1); expect(response[0]?.posts[0]?.comments.length).eq(1); expect(response[0]).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: response[0]?.posts[0]?.createdAt, comments: [ { id: 1, content: 'Comment1', creator: 2, author: { id: 2, name: 'Andrew', verified: false, invitedBy: null, }, postId: 1, createdAt: response[0]?.posts[0]?.comments[0]?.createdAt, }, ], }], }); }); /* One three-level relation + 1 first-level relatioon 1. users+posts+comments+comment_owner 2. users+users */ /* One four-level relation users+posts+comments+coment_likes */ /* [Find Many] Many-to-many cases Users+users_to_groups+groups */ test('[Find Many] Get users with groups', async (t) => { const { mysqlDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.usersTable.findMany({ with: { usersToGroups: { columns: {}, with: { group: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; usersToGroups: { group: { id: number; name: string; description: string | null; }; }[]; }[]>(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).toEqual(3); expect(response[0]?.usersToGroups.length).toEqual(1); expect(response[1]?.usersToGroups.length).toEqual(1); expect(response[2]?.usersToGroups.length).toEqual(2); expect(response).toContainEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, usersToGroups: [{ group: { id: 1, name: 'Group1', description: null, }, }], }); expect(response).toContainEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: null, usersToGroups: [{ group: { id: 2, name: 'Group2', description: null, }, }], }); expect(response).toContainEqual({ id: 3, name: 'Alex', verified: false, invitedBy: null, usersToGroups: [{ group: { id: 3, name: 'Group3', description: null, }, }, { group: { id: 2, name: 'Group2', description: null, }, }], }); }); test('[Find Many] Get groups with users', async (t) => { const { mysqlDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.groupsTable.findMany({ with: { usersToGroups: { columns: {}, with: { user: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf<{ id: number; name: string; description: string | null; usersToGroups: { user: { id: number; name: string; verified: boolean; invitedBy: number | null; }; }[]; }[]>(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).toEqual(3); expect(response[0]?.usersToGroups.length).toEqual(1); expect(response[1]?.usersToGroups.length).toEqual(2); expect(response[2]?.usersToGroups.length).toEqual(1); expect(response).toContainEqual({ id: 1, name: 'Group1', description: null, usersToGroups: [{ user: { id: 1, name: 'Dan', verified: false, invitedBy: null, }, }], }); expect(response).toContainEqual({ id: 2, name: 'Group2', description: null, usersToGroups: [{ user: { id: 2, name: 'Andrew', verified: false, invitedBy: null, }, }, { user: { id: 3, name: 'Alex', verified: false, invitedBy: null, }, }], }); expect(response).toContainEqual({ id: 3, name: 'Group3', description: null, usersToGroups: [{ user: { id: 3, name: 'Alex', verified: false, invitedBy: null, }, }], }); }); test('[Find Many] Get users with groups + limit', async (t) => { const { mysqlDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 2, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.usersTable.findMany({ limit: 2, with: { usersToGroups: { limit: 1, columns: {}, with: { group: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; usersToGroups: { group: { id: number; name: string; description: string | null; }; }[]; }[]>(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).toEqual(2); expect(response[0]?.usersToGroups.length).toEqual(1); expect(response[1]?.usersToGroups.length).toEqual(1); expect(response).toContainEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, usersToGroups: [{ group: { id: 1, name: 'Group1', description: null, }, }], }); expect(response).toContainEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: null, usersToGroups: [{ group: { id: 2, name: 'Group2', description: null, }, }], }); }); test('[Find Many] Get groups with users + limit', async (t) => { const { mysqlDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.groupsTable.findMany({ limit: 2, with: { usersToGroups: { limit: 1, columns: {}, with: { user: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf<{ id: number; name: string; description: string | null; usersToGroups: { user: { id: number; name: string; verified: boolean; invitedBy: number | null; }; }[]; }[]>(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).toEqual(2); expect(response[0]?.usersToGroups.length).toEqual(1); expect(response[1]?.usersToGroups.length).toEqual(1); expect(response).toContainEqual({ id: 1, name: 'Group1', description: null, usersToGroups: [{ user: { id: 1, name: 'Dan', verified: false, invitedBy: null, }, }], }); expect(response).toContainEqual({ id: 2, name: 'Group2', description: null, usersToGroups: [{ user: { id: 2, name: 'Andrew', verified: false, invitedBy: null, }, }], }); }); test('[Find Many] Get users with groups + limit + where', async (t) => { const { mysqlDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 2, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.usersTable.findMany({ limit: 1, where: (_, { eq, or }) => or(eq(usersTable.id, 1), eq(usersTable.id, 2)), with: { usersToGroups: { where: eq(usersToGroupsTable.groupId, 1), columns: {}, with: { group: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; usersToGroups: { group: { id: number; name: string; description: string | null; }; }[]; }[]>(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).toEqual(1); expect(response[0]?.usersToGroups.length).toEqual(1); expect(response).toContainEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, usersToGroups: [{ group: { id: 1, name: 'Group1', description: null, }, }], }); }); test('[Find Many] Get groups with users + limit + where', async (t) => { const { mysqlDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.groupsTable.findMany({ limit: 1, where: gt(groupsTable.id, 1), with: { usersToGroups: { where: eq(usersToGroupsTable.userId, 2), limit: 1, columns: {}, with: { user: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf<{ id: number; name: string; description: string | null; usersToGroups: { user: { id: number; name: string; verified: boolean; invitedBy: number | null; }; }[]; }[]>(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).toEqual(1); expect(response[0]?.usersToGroups.length).toEqual(1); expect(response).toContainEqual({ id: 2, name: 'Group2', description: null, usersToGroups: [{ user: { id: 2, name: 'Andrew', verified: false, invitedBy: null, }, }], }); }); test('[Find Many] Get users with groups + where', async (t) => { const { mysqlDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 2, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.usersTable.findMany({ where: (_, { eq, or }) => or(eq(usersTable.id, 1), eq(usersTable.id, 2)), with: { usersToGroups: { where: eq(usersToGroupsTable.groupId, 2), columns: {}, with: { group: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; usersToGroups: { group: { id: number; name: string; description: string | null; }; }[]; }[]>(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).toEqual(2); expect(response[0]?.usersToGroups.length).toEqual(0); expect(response[1]?.usersToGroups.length).toEqual(1); expect(response).toContainEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, usersToGroups: [], }); expect(response).toContainEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: null, usersToGroups: [{ group: { id: 2, name: 'Group2', description: null, }, }], }); }); test('[Find Many] Get groups with users + where', async (t) => { const { mysqlDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.groupsTable.findMany({ where: gt(groupsTable.id, 1), with: { usersToGroups: { where: eq(usersToGroupsTable.userId, 2), columns: {}, with: { user: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf<{ id: number; name: string; description: string | null; usersToGroups: { user: { id: number; name: string; verified: boolean; invitedBy: number | null; }; }[]; }[]>(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).toEqual(2); expect(response[0]?.usersToGroups.length).toEqual(1); expect(response[1]?.usersToGroups.length).toEqual(0); expect(response).toContainEqual({ id: 2, name: 'Group2', description: null, usersToGroups: [{ user: { id: 2, name: 'Andrew', verified: false, invitedBy: null, }, }], }); expect(response).toContainEqual({ id: 3, name: 'Group3', description: null, usersToGroups: [], }); }); test('[Find Many] Get users with groups + orderBy', async (t) => { const { mysqlDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.usersTable.findMany({ orderBy: (users, { desc }) => [desc(users.id)], with: { usersToGroups: { orderBy: [desc(usersToGroupsTable.groupId)], columns: {}, with: { group: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; usersToGroups: { group: { id: number; name: string; description: string | null; }; }[]; }[]>(); expect(response.length).toEqual(3); expect(response[0]?.usersToGroups.length).toEqual(2); expect(response[1]?.usersToGroups.length).toEqual(1); expect(response[2]?.usersToGroups.length).toEqual(1); expect(response[2]).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, usersToGroups: [{ group: { id: 1, name: 'Group1', description: null, }, }], }); expect(response[1]).toEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: null, usersToGroups: [{ group: { id: 2, name: 'Group2', description: null, }, }], }); expect(response[0]).toEqual({ id: 3, name: 'Alex', verified: false, invitedBy: null, usersToGroups: [{ group: { id: 3, name: 'Group3', description: null, }, }, { group: { id: 2, name: 'Group2', description: null, }, }], }); }); test('[Find Many] Get groups with users + orderBy', async (t) => { const { mysqlDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.groupsTable.findMany({ orderBy: [desc(groupsTable.id)], with: { usersToGroups: { orderBy: (utg, { desc }) => [desc(utg.userId)], columns: {}, with: { user: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf<{ id: number; name: string; description: string | null; usersToGroups: { user: { id: number; name: string; verified: boolean; invitedBy: number | null; }; }[]; }[]>(); expect(response.length).toEqual(3); expect(response[0]?.usersToGroups.length).toEqual(1); expect(response[1]?.usersToGroups.length).toEqual(2); expect(response[2]?.usersToGroups.length).toEqual(1); expect(response[2]).toEqual({ id: 1, name: 'Group1', description: null, usersToGroups: [{ user: { id: 1, name: 'Dan', verified: false, invitedBy: null, }, }], }); expect(response[1]).toEqual({ id: 2, name: 'Group2', description: null, usersToGroups: [{ user: { id: 3, name: 'Alex', verified: false, invitedBy: null, }, }, { user: { id: 2, name: 'Andrew', verified: false, invitedBy: null, }, }], }); expect(response[0]).toEqual({ id: 3, name: 'Group3', description: null, usersToGroups: [{ user: { id: 3, name: 'Alex', verified: false, invitedBy: null, }, }], }); }); test('[Find Many] Get users with groups + orderBy + limit', async (t) => { const { mysqlDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.usersTable.findMany({ orderBy: (users, { desc }) => [desc(users.id)], limit: 2, with: { usersToGroups: { limit: 1, orderBy: [desc(usersToGroupsTable.groupId)], columns: {}, with: { group: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; usersToGroups: { group: { id: number; name: string; description: string | null; }; }[]; }[]>(); expect(response.length).toEqual(2); expect(response[0]?.usersToGroups.length).toEqual(1); expect(response[1]?.usersToGroups.length).toEqual(1); expect(response[1]).toEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: null, usersToGroups: [{ group: { id: 2, name: 'Group2', description: null, }, }], }); expect(response[0]).toEqual({ id: 3, name: 'Alex', verified: false, invitedBy: null, usersToGroups: [{ group: { id: 3, name: 'Group3', description: null, }, }], }); }); /* [Find One] Many-to-many cases Users+users_to_groups+groups */ test('[Find One] Get users with groups', async (t) => { const { mysqlDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.usersTable.findFirst({ with: { usersToGroups: { columns: {}, with: { group: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; usersToGroups: { group: { id: number; name: string; description: string | null; }; }[]; } | undefined >(); expect(response?.usersToGroups.length).toEqual(1); expect(response).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, usersToGroups: [{ group: { id: 1, name: 'Group1', description: null, }, }], }); }); test('[Find One] Get groups with users', async (t) => { const { mysqlDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.groupsTable.findFirst({ with: { usersToGroups: { columns: {}, with: { user: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; description: string | null; usersToGroups: { user: { id: number; name: string; verified: boolean; invitedBy: number | null; }; }[]; } | undefined >(); expect(response?.usersToGroups.length).toEqual(1); expect(response).toEqual({ id: 1, name: 'Group1', description: null, usersToGroups: [{ user: { id: 1, name: 'Dan', verified: false, invitedBy: null, }, }], }); }); test('[Find One] Get users with groups + limit', async (t) => { const { mysqlDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 2, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.usersTable.findFirst({ with: { usersToGroups: { limit: 1, columns: {}, with: { group: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; usersToGroups: { group: { id: number; name: string; description: string | null; }; }[]; } | undefined >(); expect(response?.usersToGroups.length).toEqual(1); expect(response).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, usersToGroups: [{ group: { id: 1, name: 'Group1', description: null, }, }], }); }); test('[Find One] Get groups with users + limit', async (t) => { const { mysqlDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.groupsTable.findFirst({ with: { usersToGroups: { limit: 1, columns: {}, with: { user: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; description: string | null; usersToGroups: { user: { id: number; name: string; verified: boolean; invitedBy: number | null; }; }[]; } | undefined >(); expect(response?.usersToGroups.length).toEqual(1); expect(response).toEqual({ id: 1, name: 'Group1', description: null, usersToGroups: [{ user: { id: 1, name: 'Dan', verified: false, invitedBy: null, }, }], }); }); test('[Find One] Get users with groups + limit + where', async (t) => { const { mysqlDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 2, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.usersTable.findFirst({ where: (_, { eq, or }) => or(eq(usersTable.id, 1), eq(usersTable.id, 2)), with: { usersToGroups: { where: eq(usersToGroupsTable.groupId, 1), columns: {}, with: { group: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; usersToGroups: { group: { id: number; name: string; description: string | null; }; }[]; } | undefined >(); expect(response?.usersToGroups.length).toEqual(1); expect(response).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, usersToGroups: [{ group: { id: 1, name: 'Group1', description: null, }, }], }); }); test('[Find One] Get groups with users + limit + where', async (t) => { const { mysqlDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.groupsTable.findFirst({ where: gt(groupsTable.id, 1), with: { usersToGroups: { where: eq(usersToGroupsTable.userId, 2), limit: 1, columns: {}, with: { user: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; description: string | null; usersToGroups: { user: { id: number; name: string; verified: boolean; invitedBy: number | null; }; }[]; } | undefined >(); expect(response?.usersToGroups.length).toEqual(1); expect(response).toEqual({ id: 2, name: 'Group2', description: null, usersToGroups: [{ user: { id: 2, name: 'Andrew', verified: false, invitedBy: null, }, }], }); }); test('[Find One] Get users with groups + where', async (t) => { const { mysqlDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 2, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.usersTable.findFirst({ where: (_, { eq, or }) => or(eq(usersTable.id, 1), eq(usersTable.id, 2)), with: { usersToGroups: { where: eq(usersToGroupsTable.groupId, 2), columns: {}, with: { group: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; usersToGroups: { group: { id: number; name: string; description: string | null; }; }[]; } | undefined >(); expect(response?.usersToGroups.length).toEqual(0); expect(response).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, usersToGroups: [], }); }); test('[Find One] Get groups with users + where', async (t) => { const { mysqlDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.groupsTable.findFirst({ where: gt(groupsTable.id, 1), with: { usersToGroups: { where: eq(usersToGroupsTable.userId, 2), columns: {}, with: { user: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; description: string | null; usersToGroups: { user: { id: number; name: string; verified: boolean; invitedBy: number | null; }; }[]; } | undefined >(); expect(response?.usersToGroups.length).toEqual(1); expect(response).toEqual({ id: 2, name: 'Group2', description: null, usersToGroups: [{ user: { id: 2, name: 'Andrew', verified: false, invitedBy: null, }, }], }); }); test('[Find One] Get users with groups + orderBy', async (t) => { const { mysqlDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.usersTable.findFirst({ orderBy: (users, { desc }) => [desc(users.id)], with: { usersToGroups: { orderBy: [desc(usersToGroupsTable.groupId)], columns: {}, with: { group: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; usersToGroups: { group: { id: number; name: string; description: string | null; }; }[]; } | undefined >(); expect(response?.usersToGroups.length).toEqual(2); expect(response).toEqual({ id: 3, name: 'Alex', verified: false, invitedBy: null, usersToGroups: [{ group: { id: 3, name: 'Group3', description: null, }, }, { group: { id: 2, name: 'Group2', description: null, }, }], }); }); test('[Find One] Get groups with users + orderBy', async (t) => { const { mysqlDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.groupsTable.findFirst({ orderBy: [desc(groupsTable.id)], with: { usersToGroups: { orderBy: (utg, { desc }) => [desc(utg.userId)], columns: {}, with: { user: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; description: string | null; usersToGroups: { user: { id: number; name: string; verified: boolean; invitedBy: number | null; }; }[]; } | undefined >(); expect(response?.usersToGroups.length).toEqual(1); expect(response).toEqual({ id: 3, name: 'Group3', description: null, usersToGroups: [{ user: { id: 3, name: 'Alex', verified: false, invitedBy: null, }, }], }); }); test('[Find One] Get users with groups + orderBy + limit', async (t) => { const { mysqlDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.usersTable.findFirst({ orderBy: (users, { desc }) => [desc(users.id)], with: { usersToGroups: { limit: 1, orderBy: [desc(usersToGroupsTable.groupId)], columns: {}, with: { group: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; usersToGroups: { group: { id: number; name: string; description: string | null; }; }[]; } | undefined >(); expect(response?.usersToGroups.length).toEqual(1); expect(response).toEqual({ id: 3, name: 'Alex', verified: false, invitedBy: null, usersToGroups: [{ group: { id: 3, name: 'Group3', description: null, }, }], }); }); test('Get groups with users + orderBy + limit', async (t) => { const { mysqlDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.groupsTable.findMany({ orderBy: [desc(groupsTable.id)], limit: 2, with: { usersToGroups: { limit: 1, orderBy: (utg, { desc }) => [desc(utg.userId)], columns: {}, with: { user: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; description: string | null; usersToGroups: { user: { id: number; name: string; verified: boolean; invitedBy: number | null; }; }[]; }[] >(); expect(response.length).toEqual(2); expect(response[0]?.usersToGroups.length).toEqual(1); expect(response[1]?.usersToGroups.length).toEqual(1); expect(response[1]).toEqual({ id: 2, name: 'Group2', description: null, usersToGroups: [{ user: { id: 3, name: 'Alex', verified: false, invitedBy: null, }, }], }); expect(response[0]).toEqual({ id: 3, name: 'Group3', description: null, usersToGroups: [{ user: { id: 3, name: 'Alex', verified: false, invitedBy: null, }, }], }); }); test('Get users with groups + custom', async (t) => { const { mysqlDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.usersTable.findMany({ extras: { lower: sql`lower(${usersTable.name})`.as('lower_name'), }, with: { usersToGroups: { columns: {}, with: { group: { extras: { lower: sql`lower(${groupsTable.name})`.as('lower_name'), }, }, }, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; lower: string; usersToGroups: { group: { id: number; name: string; description: string | null; lower: string; }; }[]; }[] >(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).toEqual(3); expect(response[0]?.usersToGroups.length).toEqual(1); expect(response[1]?.usersToGroups.length).toEqual(1); expect(response[2]?.usersToGroups.length).toEqual(2); expect(response).toContainEqual({ id: 1, name: 'Dan', lower: 'dan', verified: false, invitedBy: null, usersToGroups: [{ group: { id: 1, name: 'Group1', lower: 'group1', description: null, }, }], }); expect(response).toContainEqual({ id: 2, name: 'Andrew', lower: 'andrew', verified: false, invitedBy: null, usersToGroups: [{ group: { id: 2, name: 'Group2', lower: 'group2', description: null, }, }], }); expect(response).toContainEqual({ id: 3, name: 'Alex', lower: 'alex', verified: false, invitedBy: null, usersToGroups: [{ group: { id: 3, name: 'Group3', lower: 'group3', description: null, }, }, { group: { id: 2, name: 'Group2', lower: 'group2', description: null, }, }], }); }); test('Get groups with users + custom', async (t) => { const { mysqlDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.groupsTable.findMany({ extras: (table, { sql }) => ({ lower: sql`lower(${table.name})`.as('lower_name'), }), with: { usersToGroups: { columns: {}, with: { user: { extras: (table, { sql }) => ({ lower: sql`lower(${table.name})`.as('lower_name'), }), }, }, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; description: string | null; lower: string; usersToGroups: { user: { id: number; name: string; verified: boolean; invitedBy: number | null; lower: string; }; }[]; }[] >(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).toEqual(3); expect(response[0]?.usersToGroups.length).toEqual(1); expect(response[1]?.usersToGroups.length).toEqual(2); expect(response[2]?.usersToGroups.length).toEqual(1); expect(response).toContainEqual({ id: 1, name: 'Group1', lower: 'group1', description: null, usersToGroups: [{ user: { id: 1, name: 'Dan', lower: 'dan', verified: false, invitedBy: null, }, }], }); expect(response).toContainEqual({ id: 2, name: 'Group2', lower: 'group2', description: null, usersToGroups: [{ user: { id: 2, name: 'Andrew', lower: 'andrew', verified: false, invitedBy: null, }, }, { user: { id: 3, name: 'Alex', lower: 'alex', verified: false, invitedBy: null, }, }], }); expect(response).toContainEqual({ id: 3, name: 'Group3', lower: 'group3', description: null, usersToGroups: [{ user: { id: 3, name: 'Alex', lower: 'alex', verified: false, invitedBy: null, }, }], }); }); test('[Find Many] Get schema users - dbName & tsName match', async (t) => { const { mysqlDb: db } = t; await db.insert(usersV1).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); const schemaUsers = await db.query.usersV1.findMany(); expectTypeOf(schemaUsers).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; }[]>(); schemaUsers.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(schemaUsers.length).eq(3); expect(schemaUsers[0]).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, }); expect(schemaUsers[1]).toEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: null, }); expect(schemaUsers[2]).toEqual({ id: 3, name: 'Alex', verified: false, invitedBy: null, }); }); test('[Find Many] Get schema users - dbName & tsName mismatch', async (t) => { const { mysqlDb: db } = t; await db.insert(usersTableV1).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); const schemaUsers = await db.query.usersTableV1.findMany(); expectTypeOf(schemaUsers).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; }[]>(); schemaUsers.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(schemaUsers.length).eq(3); expect(schemaUsers[0]).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, }); expect(schemaUsers[1]).toEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: null, }); expect(schemaUsers[2]).toEqual({ id: 3, name: 'Alex', verified: false, invitedBy: null, }); }); test('.toSQL()', () => { const query = db.query.usersTable.findFirst().toSQL(); expect(query).toHaveProperty('sql', expect.any(String)); expect(query).toHaveProperty('params', expect.any(Array)); }); // + custom + where + orderby // + custom + where + orderby + limit // + partial // + partial(false) // + partial + orderBy + where (all not selected) /* One four-level relation users+posts+comments+coment_likes + users+users_to_groups+groups */ /* Really hard case 1. users+posts+comments+coment_likes 2. users+users_to_groups+groups 3. users+users */ ================================================ FILE: integration-tests/tests/relational/pg.postgresjs.test.ts ================================================ import 'dotenv/config'; import Docker from 'dockerode'; import { desc, DrizzleError, eq, gt, gte, or, placeholder, sql, TransactionRollbackError } from 'drizzle-orm'; import { drizzle, type PostgresJsDatabase } from 'drizzle-orm/postgres-js'; import getPort from 'get-port'; import postgres from 'postgres'; import { v4 as uuid } from 'uuid'; import { afterAll, beforeAll, beforeEach, expect, expectTypeOf, test } from 'vitest'; import * as schema from './pg.schema.ts'; const ENABLE_LOGGING = false; const { usersTable, postsTable, commentsTable, usersToGroupsTable, groupsTable, usersV1, usersTableV1 } = schema; /* Test cases: - querying nested relation without PK with additional fields */ declare module 'vitest' { export interface TestContext { docker: Docker; pgContainer: Docker.Container; pgjsDb: PostgresJsDatabase; pgjsClient: postgres.Sql<{}>; } } let globalDocker: Docker; let pgContainer: Docker.Container; let db: PostgresJsDatabase; let client: postgres.Sql<{}>; async function createDockerDB(): Promise { const docker = (globalDocker = new Docker()); const port = await getPort({ port: 5432 }); const image = 'postgres:14'; const pullStream = await docker.pull(image); await new Promise((resolve, reject) => docker.modem.followProgress(pullStream, (err) => err ? reject(err) : resolve(err)) ); pgContainer = await docker.createContainer({ Image: image, Env: [ 'POSTGRES_PASSWORD=postgres', 'POSTGRES_USER=postgres', 'POSTGRES_DB=postgres', ], name: `drizzle-integration-tests-${uuid()}`, HostConfig: { AutoRemove: true, PortBindings: { '5432/tcp': [{ HostPort: `${port}` }], }, }, }); await pgContainer.start(); return `postgres://postgres:postgres@localhost:${port}/postgres`; } beforeAll(async () => { const connectionString = process.env['PG_CONNECTION_STRING'] ?? (await createDockerDB()); const sleep = 250; let timeLeft = 5000; let connected = false; let lastError: unknown | undefined; do { try { client = postgres(connectionString, { max: 1, onnotice: () => { // disable notices }, }); await client`select 1`; connected = true; break; } catch (e) { lastError = e; await new Promise((resolve) => setTimeout(resolve, sleep)); timeLeft -= sleep; } } while (timeLeft > 0); if (!connected) { console.error('Cannot connect to Postgres'); await client?.end().catch(console.error); await pgContainer?.stop().catch(console.error); throw lastError; } db = drizzle(client, { schema, logger: ENABLE_LOGGING }); }); afterAll(async () => { await client?.end().catch(console.error); await pgContainer?.stop().catch(console.error); }); beforeEach(async (ctx) => { ctx.pgjsDb = db; ctx.pgjsClient = client; ctx.docker = globalDocker; ctx.pgContainer = pgContainer; await ctx.pgjsDb.execute(sql`drop schema public cascade`); await ctx.pgjsDb.execute(sql`drop schema if exists "schemaV1" cascade`); await ctx.pgjsDb.execute(sql`create schema public`); await ctx.pgjsDb.execute(sql`create schema "schemaV1"`); await ctx.pgjsDb.execute( sql` CREATE TABLE "users" ( "id" serial PRIMARY KEY NOT NULL, "name" text NOT NULL, "verified" boolean DEFAULT false NOT NULL, "invited_by" int REFERENCES "users"("id") ); `, ); await ctx.pgjsDb.execute( sql` CREATE TABLE "schemaV1"."usersV1" ( "id" serial PRIMARY KEY NOT NULL, "name" text NOT NULL, "verified" boolean DEFAULT false NOT NULL, "invited_by" int ); `, ); await ctx.pgjsDb.execute( sql` CREATE TABLE "schemaV1"."users_table_V1" ( "id" serial PRIMARY KEY NOT NULL, "name" text NOT NULL, "verified" boolean DEFAULT false NOT NULL, "invited_by" int ); `, ); await ctx.pgjsDb.execute( sql` CREATE TABLE IF NOT EXISTS "groups" ( "id" serial PRIMARY KEY NOT NULL, "name" text NOT NULL, "description" text ); `, ); await ctx.pgjsDb.execute( sql` CREATE TABLE IF NOT EXISTS "users_to_groups" ( "id" serial PRIMARY KEY NOT NULL, "user_id" int REFERENCES "users"("id"), "group_id" int REFERENCES "groups"("id") ); `, ); await ctx.pgjsDb.execute( sql` CREATE TABLE IF NOT EXISTS "posts" ( "id" serial PRIMARY KEY NOT NULL, "content" text NOT NULL, "owner_id" int REFERENCES "users"("id"), "created_at" timestamp with time zone DEFAULT now() NOT NULL ); `, ); await ctx.pgjsDb.execute( sql` CREATE TABLE IF NOT EXISTS "comments" ( "id" serial PRIMARY KEY NOT NULL, "content" text NOT NULL, "creator" int REFERENCES "users"("id"), "post_id" int REFERENCES "posts"("id"), "created_at" timestamp with time zone DEFAULT now() NOT NULL ); `, ); await ctx.pgjsDb.execute( sql` CREATE TABLE IF NOT EXISTS "comment_likes" ( "id" serial PRIMARY KEY NOT NULL, "creator" int REFERENCES "users"("id"), "comment_id" int REFERENCES "comments"("id"), "created_at" timestamp with time zone DEFAULT now() NOT NULL ); `, ); }); /* [Find Many] One relation users+posts */ test('[Find Many] Get users with posts', async (t) => { const { pgjsDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]); const usersWithPosts = await db.query.usersTable.findMany({ with: { posts: true, }, }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[]>(); usersWithPosts.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(usersWithPosts.length).eq(3); expect(usersWithPosts[0]?.posts.length).eq(1); expect(usersWithPosts[1]?.posts.length).eq(1); expect(usersWithPosts[2]?.posts.length).eq(1); expect(usersWithPosts[0]).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], }); expect(usersWithPosts[1]).toEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: null, posts: [{ id: 2, ownerId: 2, content: 'Post2', createdAt: usersWithPosts[1]?.posts[0]?.createdAt }], }); expect(usersWithPosts[2]).toEqual({ id: 3, name: 'Alex', verified: false, invitedBy: null, posts: [{ id: 3, ownerId: 3, content: 'Post3', createdAt: usersWithPosts[2]?.posts[0]?.createdAt }], }); }); test('[Find Many] Get users with posts + limit posts', async (t) => { const { pgjsDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const usersWithPosts = await db.query.usersTable.findMany({ with: { posts: { limit: 1, }, }, }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[]>(); usersWithPosts.sort((a, b) => (a.id > b.id) ? 1 : -1); usersWithPosts[0]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); usersWithPosts[1]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); usersWithPosts[2]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(usersWithPosts.length).eq(3); expect(usersWithPosts[0]?.posts.length).eq(1); expect(usersWithPosts[1]?.posts.length).eq(1); expect(usersWithPosts[2]?.posts.length).eq(1); expect(usersWithPosts[0]).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], }); expect(usersWithPosts[1]).toEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: null, posts: [{ id: 4, ownerId: 2, content: 'Post2', createdAt: usersWithPosts[1]?.posts[0]?.createdAt }], }); expect(usersWithPosts[2]).toEqual({ id: 3, name: 'Alex', verified: false, invitedBy: null, posts: [{ id: 6, ownerId: 3, content: 'Post3', createdAt: usersWithPosts[2]?.posts[0]?.createdAt }], }); }); test('[Find Many] Get users with posts + limit posts and users', async (t) => { const { pgjsDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const usersWithPosts = await db.query.usersTable.findMany({ limit: 2, with: { posts: { limit: 1, }, }, }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[]>(); usersWithPosts.sort((a, b) => (a.id > b.id) ? 1 : -1); usersWithPosts[0]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); usersWithPosts[1]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(usersWithPosts.length).eq(2); expect(usersWithPosts[0]?.posts.length).eq(1); expect(usersWithPosts[1]?.posts.length).eq(1); expect(usersWithPosts[0]).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], }); expect(usersWithPosts[1]).toEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: null, posts: [{ id: 4, ownerId: 2, content: 'Post2', createdAt: usersWithPosts[1]?.posts[0]?.createdAt }], }); }); test('[Find Many] Get users with posts + custom fields', async (t) => { const { pgjsDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const usersWithPosts = await db.query.usersTable.findMany({ with: { posts: true, }, extras: ({ name }) => ({ lowerName: sql`lower(${name})`.as('name_lower'), }), }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; lowerName: string; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[]>(); usersWithPosts.sort((a, b) => (a.id > b.id) ? 1 : -1); usersWithPosts[0]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); usersWithPosts[1]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); usersWithPosts[2]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(usersWithPosts.length).toEqual(3); expect(usersWithPosts[0]?.posts.length).toEqual(3); expect(usersWithPosts[1]?.posts.length).toEqual(2); expect(usersWithPosts[2]?.posts.length).toEqual(2); expect(usersWithPosts[0]).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, lowerName: 'dan', posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }, { id: 2, ownerId: 1, content: 'Post1.2', createdAt: usersWithPosts[0]?.posts[1]?.createdAt, }, { id: 3, ownerId: 1, content: 'Post1.3', createdAt: usersWithPosts[0]?.posts[2]?.createdAt }], }); expect(usersWithPosts[1]).toEqual({ id: 2, name: 'Andrew', lowerName: 'andrew', verified: false, invitedBy: null, posts: [{ id: 4, ownerId: 2, content: 'Post2', createdAt: usersWithPosts[1]?.posts[0]?.createdAt }, { id: 5, ownerId: 2, content: 'Post2.1', createdAt: usersWithPosts[1]?.posts[1]?.createdAt, }], }); expect(usersWithPosts[2]).toEqual({ id: 3, name: 'Alex', lowerName: 'alex', verified: false, invitedBy: null, posts: [{ id: 6, ownerId: 3, content: 'Post3', createdAt: usersWithPosts[2]?.posts[0]?.createdAt }, { id: 7, ownerId: 3, content: 'Post3.1', createdAt: usersWithPosts[2]?.posts[1]?.createdAt, }], }); }); test('[Find Many] Get users with posts + custom fields + limits', async (t) => { const { pgjsDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const usersWithPosts = await db.query.usersTable.findMany({ limit: 1, with: { posts: { limit: 1, }, }, extras: (usersTable, { sql }) => ({ lowerName: sql`lower(${usersTable.name})`.as('name_lower'), }), }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; lowerName: string; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[]>(); expect(usersWithPosts.length).toEqual(1); expect(usersWithPosts[0]?.posts.length).toEqual(1); expect(usersWithPosts[0]).toEqual({ id: 1, name: 'Dan', lowerName: 'dan', verified: false, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], }); }); test('[Find Many] Get users with posts + orderBy', async (t) => { const { pgjsDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: '1' }, { ownerId: 1, content: '2' }, { ownerId: 1, content: '3' }, { ownerId: 2, content: '4' }, { ownerId: 2, content: '5' }, { ownerId: 3, content: '6' }, { ownerId: 3, content: '7' }, ]); const usersWithPosts = await db.query.usersTable.findMany({ with: { posts: { orderBy: (postsTable, { desc }) => [desc(postsTable.content)], }, }, orderBy: (usersTable, { desc }) => [desc(usersTable.id)], }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[]>(); expect(usersWithPosts.length).eq(3); expect(usersWithPosts[0]?.posts.length).eq(2); expect(usersWithPosts[1]?.posts.length).eq(2); expect(usersWithPosts[2]?.posts.length).eq(3); expect(usersWithPosts[2]).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, posts: [{ id: 3, ownerId: 1, content: '3', createdAt: usersWithPosts[2]?.posts[2]?.createdAt }, { id: 2, ownerId: 1, content: '2', createdAt: usersWithPosts[2]?.posts[1]?.createdAt, }, { id: 1, ownerId: 1, content: '1', createdAt: usersWithPosts[2]?.posts[0]?.createdAt }], }); expect(usersWithPosts[1]).toEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: null, posts: [{ id: 5, ownerId: 2, content: '5', createdAt: usersWithPosts[1]?.posts[1]?.createdAt, }, { id: 4, ownerId: 2, content: '4', createdAt: usersWithPosts[1]?.posts[0]?.createdAt }], }); expect(usersWithPosts[0]).toEqual({ id: 3, name: 'Alex', verified: false, invitedBy: null, posts: [{ id: 7, ownerId: 3, content: '7', createdAt: usersWithPosts[0]?.posts[1]?.createdAt, }, { id: 6, ownerId: 3, content: '6', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], }); }); test('[Find Many] Get users with posts + where', async (t) => { const { pgjsDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]); const usersWithPosts = await db.query.usersTable.findMany({ where: (({ id }, { eq }) => eq(id, 1)), with: { posts: { where: (({ id }, { eq }) => eq(id, 1)), }, }, }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[]>(); expect(usersWithPosts.length).eq(1); expect(usersWithPosts[0]?.posts.length).eq(1); expect(usersWithPosts[0]).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], }); }); test('[Find Many] Get users with posts + where + partial', async (t) => { const { pgjsDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]); const usersWithPosts = await db.query.usersTable.findMany({ columns: { id: true, name: true, }, with: { posts: { columns: { id: true, content: true, }, where: (({ id }, { eq }) => eq(id, 1)), }, }, where: (({ id }, { eq }) => eq(id, 1)), }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; posts: { id: number; content: string; }[]; }[]>(); expect(usersWithPosts.length).eq(1); expect(usersWithPosts[0]?.posts.length).eq(1); expect(usersWithPosts[0]).toEqual({ id: 1, name: 'Dan', posts: [{ id: 1, content: 'Post1' }], }); }); test('[Find Many] Get users with posts + where + partial. Did not select posts id, but used it in where', async (t) => { const { pgjsDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]); const usersWithPosts = await db.query.usersTable.findMany({ columns: { id: true, name: true, }, with: { posts: { columns: { id: true, content: true, }, where: (({ id }, { eq }) => eq(id, 1)), }, }, where: (({ id }, { eq }) => eq(id, 1)), }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; posts: { id: number; content: string; }[]; }[]>(); expect(usersWithPosts.length).eq(1); expect(usersWithPosts[0]?.posts.length).eq(1); expect(usersWithPosts[0]).toEqual({ id: 1, name: 'Dan', posts: [{ id: 1, content: 'Post1' }], }); }); test('[Find Many] Get users with posts + where + partial(true + false)', async (t) => { const { pgjsDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]); const usersWithPosts = await db.query.usersTable.findMany({ columns: { id: true, name: false, }, with: { posts: { columns: { id: true, content: false, }, where: (({ id }, { eq }) => eq(id, 1)), }, }, where: (({ id }, { eq }) => eq(id, 1)), }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; posts: { id: number; }[]; }[]>(); expect(usersWithPosts.length).eq(1); expect(usersWithPosts[0]?.posts.length).eq(1); expect(usersWithPosts[0]).toEqual({ id: 1, posts: [{ id: 1 }], }); }); test('[Find Many] Get users with posts + where + partial(false)', async (t) => { const { pgjsDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]); const usersWithPosts = await db.query.usersTable.findMany({ columns: { name: false, }, with: { posts: { columns: { content: false, }, where: (({ id }, { eq }) => eq(id, 1)), }, }, where: (({ id }, { eq }) => eq(id, 1)), }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; verified: boolean; invitedBy: number | null; posts: { id: number; ownerId: number | null; createdAt: Date; }[]; }[]>(); expect(usersWithPosts.length).eq(1); expect(usersWithPosts[0]?.posts.length).eq(1); expect(usersWithPosts[0]).toEqual({ id: 1, verified: false, invitedBy: null, posts: [{ id: 1, ownerId: 1, createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], }); }); test('[Find Many] Get users with posts in transaction', async (t) => { const { pgjsDb: db } = t; let usersWithPosts: { id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[] = []; await db.transaction(async (tx) => { await tx.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await tx.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]); usersWithPosts = await tx.query.usersTable.findMany({ where: (({ id }, { eq }) => eq(id, 1)), with: { posts: { where: (({ id }, { eq }) => eq(id, 1)), }, }, }); }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[]>(); expect(usersWithPosts.length).eq(1); expect(usersWithPosts[0]?.posts.length).eq(1); expect(usersWithPosts[0]).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], }); }); test('[Find Many] Get users with posts in rollbacked transaction', async (t) => { const { pgjsDb: db } = t; let usersWithPosts: { id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[] = []; await expect(db.transaction(async (tx) => { await tx.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await tx.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]); tx.rollback(); usersWithPosts = await tx.query.usersTable.findMany({ where: (({ id }, { eq }) => eq(id, 1)), with: { posts: { where: (({ id }, { eq }) => eq(id, 1)), }, }, }); })).rejects.toThrowError(new TransactionRollbackError()); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[]>(); expect(usersWithPosts.length).eq(0); }); // select only custom test('[Find Many] Get only custom fields', async (t) => { const { pgjsDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const usersWithPosts = await db.query.usersTable.findMany({ columns: {}, with: { posts: { columns: {}, extras: ({ content }) => ({ lowerName: sql`lower(${content})`.as('content_lower'), }), }, }, extras: ({ name }) => ({ lowerName: sql`lower(${name})`.as('name_lower'), }), }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ lowerName: string; posts: { lowerName: string; }[]; }[]>(); expect(usersWithPosts.length).toEqual(3); expect(usersWithPosts[0]?.posts.length).toEqual(3); expect(usersWithPosts[1]?.posts.length).toEqual(2); expect(usersWithPosts[2]?.posts.length).toEqual(2); expect(usersWithPosts).toContainEqual({ lowerName: 'dan', posts: [{ lowerName: 'post1' }, { lowerName: 'post1.2', }, { lowerName: 'post1.3' }], }); expect(usersWithPosts).toContainEqual({ lowerName: 'andrew', posts: [{ lowerName: 'post2' }, { lowerName: 'post2.1', }], }); expect(usersWithPosts).toContainEqual({ lowerName: 'alex', posts: [{ lowerName: 'post3' }, { lowerName: 'post3.1', }], }); }); test('[Find Many] Get only custom fields + where', async (t) => { const { pgjsDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const usersWithPosts = await db.query.usersTable.findMany({ columns: {}, with: { posts: { columns: {}, where: gte(postsTable.id, 2), extras: ({ content }) => ({ lowerName: sql`lower(${content})`.as('content_lower'), }), }, }, where: eq(usersTable.id, 1), extras: ({ name }) => ({ lowerName: sql`lower(${name})`.as('name_lower'), }), }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ lowerName: string; posts: { lowerName: string; }[]; }[]>(); expect(usersWithPosts.length).toEqual(1); expect(usersWithPosts[0]?.posts.length).toEqual(2); expect(usersWithPosts).toContainEqual({ lowerName: 'dan', posts: [{ lowerName: 'post1.2' }, { lowerName: 'post1.3' }], }); }); test('[Find Many] Get only custom fields + where + limit', async (t) => { const { pgjsDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const usersWithPosts = await db.query.usersTable.findMany({ columns: {}, with: { posts: { columns: {}, where: gte(postsTable.id, 2), limit: 1, extras: ({ content }) => ({ lowerName: sql`lower(${content})`.as('content_lower'), }), }, }, where: eq(usersTable.id, 1), extras: ({ name }) => ({ lowerName: sql`lower(${name})`.as('name_lower'), }), }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ lowerName: string; posts: { lowerName: string; }[]; }[]>(); expect(usersWithPosts.length).toEqual(1); expect(usersWithPosts[0]?.posts.length).toEqual(1); expect(usersWithPosts).toContainEqual({ lowerName: 'dan', posts: [{ lowerName: 'post1.2' }], }); }); test('[Find Many] Get only custom fields + where + orderBy', async (t) => { const { pgjsDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const usersWithPosts = await db.query.usersTable.findMany({ columns: {}, with: { posts: { columns: {}, where: gte(postsTable.id, 2), orderBy: [desc(postsTable.id)], extras: ({ content }) => ({ lowerName: sql`lower(${content})`.as('content_lower'), }), }, }, where: eq(usersTable.id, 1), extras: ({ name }) => ({ lowerName: sql`lower(${name})`.as('name_lower'), }), }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ lowerName: string; posts: { lowerName: string; }[]; }[]>(); expect(usersWithPosts.length).toEqual(1); expect(usersWithPosts[0]?.posts.length).toEqual(2); expect(usersWithPosts).toContainEqual({ lowerName: 'dan', posts: [{ lowerName: 'post1.3' }, { lowerName: 'post1.2' }], }); }); // select only custom find one test('[Find One] Get only custom fields', async (t) => { const { pgjsDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const usersWithPosts = await db.query.usersTable.findFirst({ columns: {}, with: { posts: { columns: {}, extras: ({ content }) => ({ lowerName: sql`lower(${content})`.as('content_lower'), }), }, }, extras: ({ name }) => ({ lowerName: sql`lower(${name})`.as('name_lower'), }), }); expectTypeOf(usersWithPosts).toEqualTypeOf< { lowerName: string; posts: { lowerName: string; }[]; } | undefined >(); expect(usersWithPosts?.posts.length).toEqual(3); expect(usersWithPosts).toEqual({ lowerName: 'dan', posts: [{ lowerName: 'post1' }, { lowerName: 'post1.2', }, { lowerName: 'post1.3' }], }); }); test('[Find One] Get only custom fields + where', async (t) => { const { pgjsDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const usersWithPosts = await db.query.usersTable.findFirst({ columns: {}, with: { posts: { columns: {}, where: gte(postsTable.id, 2), extras: ({ content }) => ({ lowerName: sql`lower(${content})`.as('content_lower'), }), }, }, where: eq(usersTable.id, 1), extras: ({ name }) => ({ lowerName: sql`lower(${name})`.as('name_lower'), }), }); expectTypeOf(usersWithPosts).toEqualTypeOf< { lowerName: string; posts: { lowerName: string; }[]; } | undefined >(); expect(usersWithPosts?.posts.length).toEqual(2); expect(usersWithPosts).toEqual({ lowerName: 'dan', posts: [{ lowerName: 'post1.2' }, { lowerName: 'post1.3' }], }); }); test('[Find One] Get only custom fields + where + limit', async (t) => { const { pgjsDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const usersWithPosts = await db.query.usersTable.findFirst({ columns: {}, with: { posts: { columns: {}, where: gte(postsTable.id, 2), limit: 1, extras: ({ content }) => ({ lowerName: sql`lower(${content})`.as('content_lower'), }), }, }, where: eq(usersTable.id, 1), extras: ({ name }) => ({ lowerName: sql`lower(${name})`.as('name_lower'), }), }); expectTypeOf(usersWithPosts).toEqualTypeOf< { lowerName: string; posts: { lowerName: string; }[]; } | undefined >(); expect(usersWithPosts?.posts.length).toEqual(1); expect(usersWithPosts).toEqual({ lowerName: 'dan', posts: [{ lowerName: 'post1.2' }], }); }); test('[Find One] Get only custom fields + where + orderBy', async (t) => { const { pgjsDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const usersWithPosts = await db.query.usersTable.findFirst({ columns: {}, with: { posts: { columns: {}, where: gte(postsTable.id, 2), orderBy: [desc(postsTable.id)], extras: ({ content }) => ({ lowerName: sql`lower(${content})`.as('content_lower'), }), }, }, where: eq(usersTable.id, 1), extras: ({ name }) => ({ lowerName: sql`lower(${name})`.as('name_lower'), }), }); expectTypeOf(usersWithPosts).toEqualTypeOf< { lowerName: string; posts: { lowerName: string; }[]; } | undefined >(); expect(usersWithPosts?.posts.length).toEqual(2); expect(usersWithPosts).toEqual({ lowerName: 'dan', posts: [{ lowerName: 'post1.3' }, { lowerName: 'post1.2' }], }); }); // columns {} test('[Find Many] Get select {}', async (t) => { const { pgjsDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await expect(async () => await db.query.usersTable.findMany({ columns: {}, }) ).rejects.toThrow(DrizzleError); }); // columns {} test('[Find One] Get select {}', async (t) => { const { pgjsDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await expect(async () => await db.query.usersTable.findFirst({ columns: {}, }) ).rejects.toThrow(DrizzleError); }); // deep select {} test('[Find Many] Get deep select {}', async (t) => { const { pgjsDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]); await expect(async () => await db.query.usersTable.findMany({ columns: {}, with: { posts: { columns: {}, }, }, }) ).rejects.toThrow(DrizzleError); }); // deep select {} test('[Find One] Get deep select {}', async (t) => { const { pgjsDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]); await expect(async () => await db.query.usersTable.findFirst({ columns: {}, with: { posts: { columns: {}, }, }, }) ).rejects.toThrow(DrizzleError); }); /* Prepared statements for users+posts */ test('[Find Many] Get users with posts + prepared limit', async (t) => { const { pgjsDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const prepared = db.query.usersTable.findMany({ with: { posts: { limit: placeholder('limit'), }, }, }).prepare('query1'); const usersWithPosts = await prepared.execute({ limit: 1 }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[]>(); expect(usersWithPosts.length).eq(3); expect(usersWithPosts[0]?.posts.length).eq(1); expect(usersWithPosts[1]?.posts.length).eq(1); expect(usersWithPosts[2]?.posts.length).eq(1); expect(usersWithPosts).toContainEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], }); expect(usersWithPosts).toContainEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: null, posts: [{ id: 4, ownerId: 2, content: 'Post2', createdAt: usersWithPosts[1]?.posts[0]?.createdAt }], }); expect(usersWithPosts).toContainEqual({ id: 3, name: 'Alex', verified: false, invitedBy: null, posts: [{ id: 6, ownerId: 3, content: 'Post3', createdAt: usersWithPosts[2]?.posts[0]?.createdAt }], }); }); test('[Find Many] Get users with posts + prepared limit + offset', async (t) => { const { pgjsDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const prepared = db.query.usersTable.findMany({ limit: placeholder('uLimit'), offset: placeholder('uOffset'), with: { posts: { limit: placeholder('pLimit'), }, }, }).prepare('query2'); const usersWithPosts = await prepared.execute({ pLimit: 1, uLimit: 3, uOffset: 1 }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[]>(); expect(usersWithPosts.length).eq(2); expect(usersWithPosts[0]?.posts.length).eq(1); expect(usersWithPosts[1]?.posts.length).eq(1); expect(usersWithPosts).toContainEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: null, posts: [{ id: 4, ownerId: 2, content: 'Post2', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], }); expect(usersWithPosts).toContainEqual({ id: 3, name: 'Alex', verified: false, invitedBy: null, posts: [{ id: 6, ownerId: 3, content: 'Post3', createdAt: usersWithPosts[1]?.posts[0]?.createdAt }], }); }); test('[Find Many] Get users with posts + prepared where', async (t) => { const { pgjsDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]); const prepared = db.query.usersTable.findMany({ where: (({ id }, { eq }) => eq(id, placeholder('id'))), with: { posts: { where: (({ id }, { eq }) => eq(id, 1)), }, }, }).prepare('query3'); const usersWithPosts = await prepared.execute({ id: 1 }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[]>(); expect(usersWithPosts.length).eq(1); expect(usersWithPosts[0]?.posts.length).eq(1); expect(usersWithPosts[0]).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], }); }); test('[Find Many] Get users with posts + prepared + limit + offset + where', async (t) => { const { pgjsDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const prepared = db.query.usersTable.findMany({ limit: placeholder('uLimit'), offset: placeholder('uOffset'), where: (({ id }, { eq, or }) => or(eq(id, placeholder('id')), eq(id, 3))), with: { posts: { where: (({ id }, { eq }) => eq(id, placeholder('pid'))), limit: placeholder('pLimit'), }, }, }).prepare('query4'); const usersWithPosts = await prepared.execute({ pLimit: 1, uLimit: 3, uOffset: 1, id: 2, pid: 6 }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[]>(); expect(usersWithPosts.length).eq(1); expect(usersWithPosts[0]?.posts.length).eq(1); expect(usersWithPosts).toContainEqual({ id: 3, name: 'Alex', verified: false, invitedBy: null, posts: [{ id: 6, ownerId: 3, content: 'Post3', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], }); }); /* [Find One] One relation users+posts */ test('[Find One] Get users with posts', async (t) => { const { pgjsDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]); const usersWithPosts = await db.query.usersTable.findFirst({ with: { posts: true, }, }); expectTypeOf(usersWithPosts).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; } | undefined >(); expect(usersWithPosts!.posts.length).eq(1); expect(usersWithPosts).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts?.posts[0]?.createdAt }], }); }); test('[Find One] Get users with posts + limit posts', async (t) => { const { pgjsDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const usersWithPosts = await db.query.usersTable.findFirst({ with: { posts: { limit: 1, }, }, }); expectTypeOf(usersWithPosts).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; } | undefined >(); expect(usersWithPosts!.posts.length).eq(1); expect(usersWithPosts).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts?.posts[0]?.createdAt }], }); }); test('[Find One] Get users with posts no results found', async (t) => { const { pgjsDb: db } = t; const usersWithPosts = await db.query.usersTable.findFirst({ with: { posts: { limit: 1, }, }, }); expectTypeOf(usersWithPosts).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; } | undefined >(); expect(usersWithPosts).toBeUndefined(); }); test('[Find One] Get users with posts + limit posts and users', async (t) => { const { pgjsDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const usersWithPosts = await db.query.usersTable.findFirst({ with: { posts: { limit: 1, }, }, }); expectTypeOf(usersWithPosts).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; } | undefined >(); expect(usersWithPosts!.posts.length).eq(1); expect(usersWithPosts).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts?.posts[0]?.createdAt }], }); }); test('[Find One] Get users with posts + custom fields', async (t) => { const { pgjsDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const usersWithPosts = await db.query.usersTable.findFirst({ with: { posts: true, }, extras: ({ name }) => ({ lowerName: sql`lower(${name})`.as('name_lower'), }), }); expectTypeOf(usersWithPosts).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; lowerName: string; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; } | undefined >(); expect(usersWithPosts!.posts.length).toEqual(3); expect(usersWithPosts).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, lowerName: 'dan', posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts?.posts[0]?.createdAt }, { id: 2, ownerId: 1, content: 'Post1.2', createdAt: usersWithPosts?.posts[1]?.createdAt, }, { id: 3, ownerId: 1, content: 'Post1.3', createdAt: usersWithPosts?.posts[2]?.createdAt }], }); }); test('[Find One] Get users with posts + custom fields + limits', async (t) => { const { pgjsDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const usersWithPosts = await db.query.usersTable.findFirst({ with: { posts: { limit: 1, }, }, extras: (usersTable, { sql }) => ({ lowerName: sql`lower(${usersTable.name})`.as('name_lower'), }), }); expectTypeOf(usersWithPosts).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; lowerName: string; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; } | undefined >(); expect(usersWithPosts!.posts.length).toEqual(1); expect(usersWithPosts).toEqual({ id: 1, name: 'Dan', lowerName: 'dan', verified: false, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts?.posts[0]?.createdAt }], }); }); test('[Find One] Get users with posts + orderBy', async (t) => { const { pgjsDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: '1' }, { ownerId: 1, content: '2' }, { ownerId: 1, content: '3' }, { ownerId: 2, content: '4' }, { ownerId: 2, content: '5' }, { ownerId: 3, content: '6' }, { ownerId: 3, content: '7' }, ]); const usersWithPosts = await db.query.usersTable.findFirst({ with: { posts: { orderBy: (postsTable, { desc }) => [desc(postsTable.content)], }, }, orderBy: (usersTable, { desc }) => [desc(usersTable.id)], }); expectTypeOf(usersWithPosts).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; } | undefined >(); expect(usersWithPosts!.posts.length).eq(2); expect(usersWithPosts).toEqual({ id: 3, name: 'Alex', verified: false, invitedBy: null, posts: [{ id: 7, ownerId: 3, content: '7', createdAt: usersWithPosts?.posts[1]?.createdAt, }, { id: 6, ownerId: 3, content: '6', createdAt: usersWithPosts?.posts[0]?.createdAt }], }); }); test('[Find One] Get users with posts + where', async (t) => { const { pgjsDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]); const usersWithPosts = await db.query.usersTable.findFirst({ where: (({ id }, { eq }) => eq(id, 1)), with: { posts: { where: (({ id }, { eq }) => eq(id, 1)), }, }, }); expectTypeOf(usersWithPosts).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; } | undefined >(); expect(usersWithPosts!.posts.length).eq(1); expect(usersWithPosts).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts?.posts[0]?.createdAt }], }); }); test('[Find One] Get users with posts + where + partial', async (t) => { const { pgjsDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]); const usersWithPosts = await db.query.usersTable.findFirst({ columns: { id: true, name: true, }, with: { posts: { columns: { id: true, content: true, }, where: (({ id }, { eq }) => eq(id, 1)), }, }, where: (({ id }, { eq }) => eq(id, 1)), }); expectTypeOf(usersWithPosts).toEqualTypeOf< { id: number; name: string; posts: { id: number; content: string; }[]; } | undefined >(); expect(usersWithPosts!.posts.length).eq(1); expect(usersWithPosts).toEqual({ id: 1, name: 'Dan', posts: [{ id: 1, content: 'Post1' }], }); }); test('[Find One] Get users with posts + where + partial. Did not select posts id, but used it in where', async (t) => { const { pgjsDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]); const usersWithPosts = await db.query.usersTable.findFirst({ columns: { id: true, name: true, }, with: { posts: { columns: { id: true, content: true, }, where: (({ id }, { eq }) => eq(id, 1)), }, }, where: (({ id }, { eq }) => eq(id, 1)), }); expectTypeOf(usersWithPosts).toEqualTypeOf< { id: number; name: string; posts: { id: number; content: string; }[]; } | undefined >(); expect(usersWithPosts!.posts.length).eq(1); expect(usersWithPosts).toEqual({ id: 1, name: 'Dan', posts: [{ id: 1, content: 'Post1' }], }); }); test('[Find One] Get users with posts + where + partial(true + false)', async (t) => { const { pgjsDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]); const usersWithPosts = await db.query.usersTable.findFirst({ columns: { id: true, name: false, }, with: { posts: { columns: { id: true, content: false, }, where: (({ id }, { eq }) => eq(id, 1)), }, }, where: (({ id }, { eq }) => eq(id, 1)), }); expectTypeOf(usersWithPosts).toEqualTypeOf< { id: number; posts: { id: number; }[]; } | undefined >(); expect(usersWithPosts!.posts.length).eq(1); expect(usersWithPosts).toEqual({ id: 1, posts: [{ id: 1 }], }); }); test('[Find One] Get users with posts + where + partial(false)', async (t) => { const { pgjsDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]); const usersWithPosts = await db.query.usersTable.findFirst({ columns: { name: false, }, with: { posts: { columns: { content: false, }, where: (({ id }, { eq }) => eq(id, 1)), }, }, where: (({ id }, { eq }) => eq(id, 1)), }); expectTypeOf(usersWithPosts).toEqualTypeOf< { id: number; verified: boolean; invitedBy: number | null; posts: { id: number; ownerId: number | null; createdAt: Date; }[]; } | undefined >(); expect(usersWithPosts!.posts.length).eq(1); expect(usersWithPosts).toEqual({ id: 1, verified: false, invitedBy: null, posts: [{ id: 1, ownerId: 1, createdAt: usersWithPosts?.posts[0]?.createdAt }], }); }); /* One relation users+users. Self referencing */ test('Get user with invitee', async (t) => { const { pgjsDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]); const usersWithInvitee = await db.query.usersTable.findMany({ with: { invitee: true, }, }); expectTypeOf(usersWithInvitee).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; invitee: { id: number; name: string; verified: boolean; invitedBy: number | null; } | null; }[] >(); usersWithInvitee.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(usersWithInvitee.length).eq(4); expect(usersWithInvitee[0]?.invitee).toBeNull(); expect(usersWithInvitee[1]?.invitee).toBeNull(); expect(usersWithInvitee[2]?.invitee).not.toBeNull(); expect(usersWithInvitee[3]?.invitee).not.toBeNull(); expect(usersWithInvitee[0]).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, invitee: null, }); expect(usersWithInvitee[1]).toEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: null, invitee: null, }); expect(usersWithInvitee[2]).toEqual({ id: 3, name: 'Alex', verified: false, invitedBy: 1, invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, }); expect(usersWithInvitee[3]).toEqual({ id: 4, name: 'John', verified: false, invitedBy: 2, invitee: { id: 2, name: 'Andrew', verified: false, invitedBy: null }, }); }); test('Get user + limit with invitee', async (t) => { const { pgjsDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew', invitedBy: 1 }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]); const usersWithInvitee = await db.query.usersTable.findMany({ with: { invitee: true, }, limit: 2, }); expectTypeOf(usersWithInvitee).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; invitee: { id: number; name: string; verified: boolean; invitedBy: number | null; } | null; }[] >(); usersWithInvitee.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(usersWithInvitee.length).eq(2); expect(usersWithInvitee[0]?.invitee).toBeNull(); expect(usersWithInvitee[1]?.invitee).not.toBeNull(); expect(usersWithInvitee[0]).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, invitee: null, }); expect(usersWithInvitee[1]).toEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: 1, invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, }); }); test('Get user with invitee and custom fields', async (t) => { const { pgjsDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]); const usersWithInvitee = await db.query.usersTable.findMany({ extras: (users, { sql }) => ({ lower: sql`lower(${users.name})`.as('lower_name') }), with: { invitee: { extras: (invitee, { sql }) => ({ lower: sql`lower(${invitee.name})`.as('lower_name') }), }, }, }); expectTypeOf(usersWithInvitee).toEqualTypeOf< { id: number; name: string; verified: boolean; lower: string; invitedBy: number | null; invitee: { id: number; name: string; verified: boolean; lower: string; invitedBy: number | null; } | null; }[] >(); usersWithInvitee.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(usersWithInvitee.length).eq(4); expect(usersWithInvitee[0]?.invitee).toBeNull(); expect(usersWithInvitee[1]?.invitee).toBeNull(); expect(usersWithInvitee[2]?.invitee).not.toBeNull(); expect(usersWithInvitee[3]?.invitee).not.toBeNull(); expect(usersWithInvitee[0]).toEqual({ id: 1, name: 'Dan', lower: 'dan', verified: false, invitedBy: null, invitee: null, }); expect(usersWithInvitee[1]).toEqual({ id: 2, name: 'Andrew', lower: 'andrew', verified: false, invitedBy: null, invitee: null, }); expect(usersWithInvitee[2]).toEqual({ id: 3, name: 'Alex', lower: 'alex', verified: false, invitedBy: 1, invitee: { id: 1, name: 'Dan', lower: 'dan', verified: false, invitedBy: null }, }); expect(usersWithInvitee[3]).toEqual({ id: 4, name: 'John', lower: 'john', verified: false, invitedBy: 2, invitee: { id: 2, name: 'Andrew', lower: 'andrew', verified: false, invitedBy: null }, }); }); test('Get user with invitee and custom fields + limits', async (t) => { const { pgjsDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]); const usersWithInvitee = await db.query.usersTable.findMany({ extras: (users, { sql }) => ({ lower: sql`lower(${users.name})`.as('lower_name') }), limit: 3, with: { invitee: { extras: (invitee, { sql }) => ({ lower: sql`lower(${invitee.name})`.as('lower_name') }), }, }, }); expectTypeOf(usersWithInvitee).toEqualTypeOf< { id: number; name: string; verified: boolean; lower: string; invitedBy: number | null; invitee: { id: number; name: string; verified: boolean; lower: string; invitedBy: number | null; } | null; }[] >(); usersWithInvitee.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(usersWithInvitee.length).eq(3); expect(usersWithInvitee[0]?.invitee).toBeNull(); expect(usersWithInvitee[1]?.invitee).toBeNull(); expect(usersWithInvitee[2]?.invitee).not.toBeNull(); expect(usersWithInvitee[0]).toEqual({ id: 1, name: 'Dan', lower: 'dan', verified: false, invitedBy: null, invitee: null, }); expect(usersWithInvitee[1]).toEqual({ id: 2, name: 'Andrew', lower: 'andrew', verified: false, invitedBy: null, invitee: null, }); expect(usersWithInvitee[2]).toEqual({ id: 3, name: 'Alex', lower: 'alex', verified: false, invitedBy: 1, invitee: { id: 1, name: 'Dan', lower: 'dan', verified: false, invitedBy: null }, }); }); test('Get user with invitee + order by', async (t) => { const { pgjsDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]); const usersWithInvitee = await db.query.usersTable.findMany({ orderBy: (users, { desc }) => [desc(users.id)], with: { invitee: true, }, }); expectTypeOf(usersWithInvitee).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; invitee: { id: number; name: string; verified: boolean; invitedBy: number | null; } | null; }[] >(); expect(usersWithInvitee.length).eq(4); expect(usersWithInvitee[3]?.invitee).toBeNull(); expect(usersWithInvitee[2]?.invitee).toBeNull(); expect(usersWithInvitee[1]?.invitee).not.toBeNull(); expect(usersWithInvitee[0]?.invitee).not.toBeNull(); expect(usersWithInvitee[3]).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, invitee: null, }); expect(usersWithInvitee[2]).toEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: null, invitee: null, }); expect(usersWithInvitee[1]).toEqual({ id: 3, name: 'Alex', verified: false, invitedBy: 1, invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, }); expect(usersWithInvitee[0]).toEqual({ id: 4, name: 'John', verified: false, invitedBy: 2, invitee: { id: 2, name: 'Andrew', verified: false, invitedBy: null }, }); }); test('Get user with invitee + where', async (t) => { const { pgjsDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]); const usersWithInvitee = await db.query.usersTable.findMany({ where: (users, { eq, or }) => (or(eq(users.id, 3), eq(users.id, 4))), with: { invitee: true, }, }); expectTypeOf(usersWithInvitee).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; invitee: { id: number; name: string; verified: boolean; invitedBy: number | null; } | null; }[] >(); expect(usersWithInvitee.length).eq(2); expect(usersWithInvitee[0]?.invitee).not.toBeNull(); expect(usersWithInvitee[1]?.invitee).not.toBeNull(); expect(usersWithInvitee).toContainEqual({ id: 3, name: 'Alex', verified: false, invitedBy: 1, invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, }); expect(usersWithInvitee).toContainEqual({ id: 4, name: 'John', verified: false, invitedBy: 2, invitee: { id: 2, name: 'Andrew', verified: false, invitedBy: null }, }); }); test('Get user with invitee + where + partial', async (t) => { const { pgjsDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]); const usersWithInvitee = await db.query.usersTable.findMany({ where: (users, { eq, or }) => (or(eq(users.id, 3), eq(users.id, 4))), columns: { id: true, name: true, }, with: { invitee: { columns: { id: true, name: true, }, }, }, }); expectTypeOf(usersWithInvitee).toEqualTypeOf< { id: number; name: string; invitee: { id: number; name: string; } | null; }[] >(); expect(usersWithInvitee.length).eq(2); expect(usersWithInvitee[0]?.invitee).not.toBeNull(); expect(usersWithInvitee[1]?.invitee).not.toBeNull(); expect(usersWithInvitee).toContainEqual({ id: 3, name: 'Alex', invitee: { id: 1, name: 'Dan' }, }); expect(usersWithInvitee).toContainEqual({ id: 4, name: 'John', invitee: { id: 2, name: 'Andrew' }, }); }); test('Get user with invitee + where + partial. Did not select users id, but used it in where', async (t) => { const { pgjsDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]); const usersWithInvitee = await db.query.usersTable.findMany({ where: (users, { eq, or }) => (or(eq(users.id, 3), eq(users.id, 4))), columns: { name: true, }, with: { invitee: { columns: { id: true, name: true, }, }, }, }); expectTypeOf(usersWithInvitee).toEqualTypeOf< { name: string; invitee: { id: number; name: string; } | null; }[] >(); expect(usersWithInvitee.length).eq(2); expect(usersWithInvitee[0]?.invitee).not.toBeNull(); expect(usersWithInvitee[1]?.invitee).not.toBeNull(); expect(usersWithInvitee).toContainEqual({ name: 'Alex', invitee: { id: 1, name: 'Dan' }, }); expect(usersWithInvitee).toContainEqual({ name: 'John', invitee: { id: 2, name: 'Andrew' }, }); }); test('Get user with invitee + where + partial(true+false)', async (t) => { const { pgjsDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]); const usersWithInvitee = await db.query.usersTable.findMany({ where: (users, { eq, or }) => (or(eq(users.id, 3), eq(users.id, 4))), columns: { id: true, name: true, verified: false, }, with: { invitee: { columns: { id: true, name: true, verified: false, }, }, }, }); expectTypeOf(usersWithInvitee).toEqualTypeOf< { id: number; name: string; invitee: { id: number; name: string; } | null; }[] >(); expect(usersWithInvitee.length).eq(2); expect(usersWithInvitee[0]?.invitee).not.toBeNull(); expect(usersWithInvitee[1]?.invitee).not.toBeNull(); expect(usersWithInvitee).toContainEqual({ id: 3, name: 'Alex', invitee: { id: 1, name: 'Dan' }, }); expect(usersWithInvitee).toContainEqual({ id: 4, name: 'John', invitee: { id: 2, name: 'Andrew' }, }); }); test('Get user with invitee + where + partial(false)', async (t) => { const { pgjsDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]); const usersWithInvitee = await db.query.usersTable.findMany({ where: (users, { eq, or }) => (or(eq(users.id, 3), eq(users.id, 4))), columns: { verified: false, }, with: { invitee: { columns: { name: false, }, }, }, }); expectTypeOf(usersWithInvitee).toEqualTypeOf< { id: number; name: string; invitedBy: number | null; invitee: { id: number; verified: boolean; invitedBy: number | null; } | null; }[] >(); expect(usersWithInvitee.length).eq(2); expect(usersWithInvitee[0]?.invitee).not.toBeNull(); expect(usersWithInvitee[1]?.invitee).not.toBeNull(); expect(usersWithInvitee).toContainEqual({ id: 3, name: 'Alex', invitedBy: 1, invitee: { id: 1, verified: false, invitedBy: null }, }); expect(usersWithInvitee).toContainEqual({ id: 4, name: 'John', invitedBy: 2, invitee: { id: 2, verified: false, invitedBy: null }, }); }); /* Two first-level relations users+users and users+posts */ test('Get user with invitee and posts', async (t) => { const { pgjsDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]); const response = await db.query.usersTable.findMany({ with: { invitee: true, posts: true, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; ownerId: number | null; content: string; createdAt: Date }[]; invitee: { id: number; name: string; verified: boolean; invitedBy: number | null; } | null; }[] >(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).eq(4); expect(response[0]?.invitee).toBeNull(); expect(response[1]?.invitee).toBeNull(); expect(response[2]?.invitee).not.toBeNull(); expect(response[3]?.invitee).not.toBeNull(); expect(response[0]?.posts.length).eq(1); expect(response[1]?.posts.length).eq(1); expect(response[2]?.posts.length).eq(1); expect(response).toContainEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, invitee: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: response[0]?.posts[0]?.createdAt }], }); expect(response).toContainEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: null, invitee: null, posts: [{ id: 2, ownerId: 2, content: 'Post2', createdAt: response[1]?.posts[0]?.createdAt }], }); expect(response).toContainEqual({ id: 3, name: 'Alex', verified: false, invitedBy: 1, invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, posts: [{ id: 3, ownerId: 3, content: 'Post3', createdAt: response[2]?.posts[0]?.createdAt }], }); expect(response).toContainEqual({ id: 4, name: 'John', verified: false, invitedBy: 2, invitee: { id: 2, name: 'Andrew', verified: false, invitedBy: null }, posts: [], }); }); test('Get user with invitee and posts + limit posts and users', async (t) => { const { pgjsDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const response = await db.query.usersTable.findMany({ limit: 3, with: { invitee: true, posts: { limit: 1, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; ownerId: number | null; content: string; createdAt: Date }[]; invitee: { id: number; name: string; verified: boolean; invitedBy: number | null; } | null; }[] >(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).eq(3); expect(response[0]?.invitee).toBeNull(); expect(response[1]?.invitee).toBeNull(); expect(response[2]?.invitee).not.toBeNull(); expect(response[0]?.posts.length).eq(1); expect(response[1]?.posts.length).eq(1); expect(response[2]?.posts.length).eq(1); expect(response).toContainEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, invitee: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: response[0]?.posts[0]?.createdAt }], }); expect(response).toContainEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: null, invitee: null, posts: [{ id: 3, ownerId: 2, content: 'Post2', createdAt: response[1]?.posts[0]?.createdAt }], }); expect(response).toContainEqual({ id: 3, name: 'Alex', verified: false, invitedBy: 1, invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, posts: [{ id: 5, ownerId: 3, content: 'Post3', createdAt: response[2]?.posts[0]?.createdAt }], }); }); test('Get user with invitee and posts + limits + custom fields in each', async (t) => { const { pgjsDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const response = await db.query.usersTable.findMany({ limit: 3, extras: (users, { sql }) => ({ lower: sql`lower(${users.name})`.as('lower_name') }), with: { invitee: { extras: (users, { sql }) => ({ lower: sql`lower(${users.name})`.as('lower_invitee_name') }), }, posts: { limit: 1, extras: (posts, { sql }) => ({ lower: sql`lower(${posts.content})`.as('lower_content') }), }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: boolean; lower: string; invitedBy: number | null; posts: { id: number; lower: string; ownerId: number | null; content: string; createdAt: Date }[]; invitee: { id: number; name: string; lower: string; verified: boolean; invitedBy: number | null; } | null; }[] >(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).eq(3); expect(response[0]?.invitee).toBeNull(); expect(response[1]?.invitee).toBeNull(); expect(response[2]?.invitee).not.toBeNull(); expect(response[0]?.posts.length).eq(1); expect(response[1]?.posts.length).eq(1); expect(response[2]?.posts.length).eq(1); expect(response).toContainEqual({ id: 1, name: 'Dan', lower: 'dan', verified: false, invitedBy: null, invitee: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', lower: 'post1', createdAt: response[0]?.posts[0]?.createdAt }], }); expect(response).toContainEqual({ id: 2, name: 'Andrew', lower: 'andrew', verified: false, invitedBy: null, invitee: null, posts: [{ id: 3, ownerId: 2, content: 'Post2', lower: 'post2', createdAt: response[1]?.posts[0]?.createdAt }], }); expect(response).toContainEqual({ id: 3, name: 'Alex', lower: 'alex', verified: false, invitedBy: 1, invitee: { id: 1, name: 'Dan', lower: 'dan', verified: false, invitedBy: null }, posts: [{ id: 5, ownerId: 3, content: 'Post3', lower: 'post3', createdAt: response[2]?.posts[0]?.createdAt }], }); }); test('Get user with invitee and posts + custom fields in each', async (t) => { const { pgjsDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const response = await db.query.usersTable.findMany({ extras: (users, { sql }) => ({ lower: sql`lower(${users.name})`.as('lower_name') }), with: { invitee: { extras: (users, { sql }) => ({ lower: sql`lower(${users.name})`.as('lower_name') }), }, posts: { extras: (posts, { sql }) => ({ lower: sql`lower(${posts.content})`.as('lower_name') }), }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: boolean; lower: string; invitedBy: number | null; posts: { id: number; lower: string; ownerId: number | null; content: string; createdAt: Date }[]; invitee: { id: number; name: string; lower: string; verified: boolean; invitedBy: number | null; } | null; }[] >(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).eq(4); expect(response[0]?.invitee).toBeNull(); expect(response[1]?.invitee).toBeNull(); expect(response[2]?.invitee).not.toBeNull(); expect(response[3]?.invitee).not.toBeNull(); expect(response[0]?.posts.length).eq(2); expect(response[1]?.posts.length).eq(2); expect(response[2]?.posts.length).eq(2); expect(response[3]?.posts.length).eq(0); expect(response).toContainEqual({ id: 1, name: 'Dan', lower: 'dan', verified: false, invitedBy: null, invitee: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', lower: 'post1', createdAt: response[0]?.posts[0]?.createdAt }, { id: 2, ownerId: 1, content: 'Post1.1', lower: 'post1.1', createdAt: response[0]?.posts[1]?.createdAt, }], }); expect(response).toContainEqual({ id: 2, name: 'Andrew', lower: 'andrew', verified: false, invitedBy: null, invitee: null, posts: [{ id: 3, ownerId: 2, content: 'Post2', lower: 'post2', createdAt: response[1]?.posts[0]?.createdAt }, { id: 4, ownerId: 2, content: 'Post2.1', lower: 'post2.1', createdAt: response[1]?.posts[1]?.createdAt, }], }); expect(response).toContainEqual({ id: 3, name: 'Alex', lower: 'alex', verified: false, invitedBy: 1, invitee: { id: 1, name: 'Dan', lower: 'dan', verified: false, invitedBy: null }, posts: [{ id: 5, ownerId: 3, content: 'Post3', lower: 'post3', createdAt: response[2]?.posts[0]?.createdAt }, { id: 6, ownerId: 3, content: 'Post3.1', lower: 'post3.1', createdAt: response[2]?.posts[1]?.createdAt, }], }); expect(response).toContainEqual({ id: 4, name: 'John', lower: 'john', verified: false, invitedBy: 2, invitee: { id: 2, name: 'Andrew', lower: 'andrew', verified: false, invitedBy: null }, posts: [], }); }); test('Get user with invitee and posts + orderBy', async (t) => { const { pgjsDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, ]); const response = await db.query.usersTable.findMany({ orderBy: (users, { desc }) => [desc(users.id)], with: { invitee: true, posts: { orderBy: (posts, { desc }) => [desc(posts.id)], }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; ownerId: number | null; content: string; createdAt: Date }[]; invitee: { id: number; name: string; verified: boolean; invitedBy: number | null; } | null; }[] >(); expect(response.length).eq(4); expect(response[3]?.invitee).toBeNull(); expect(response[2]?.invitee).toBeNull(); expect(response[1]?.invitee).not.toBeNull(); expect(response[0]?.invitee).not.toBeNull(); expect(response[0]?.posts.length).eq(0); expect(response[1]?.posts.length).eq(1); expect(response[2]?.posts.length).eq(2); expect(response[3]?.posts.length).eq(2); expect(response[3]).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, invitee: null, posts: [{ id: 2, ownerId: 1, content: 'Post1.1', createdAt: response[3]?.posts[0]?.createdAt }, { id: 1, ownerId: 1, content: 'Post1', createdAt: response[3]?.posts[1]?.createdAt, }], }); expect(response[2]).toEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: null, invitee: null, posts: [{ id: 4, ownerId: 2, content: 'Post2.1', createdAt: response[2]?.posts[0]?.createdAt }, { id: 3, ownerId: 2, content: 'Post2', createdAt: response[2]?.posts[1]?.createdAt, }], }); expect(response[1]).toEqual({ id: 3, name: 'Alex', verified: false, invitedBy: 1, invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, posts: [{ id: 5, ownerId: 3, content: 'Post3', createdAt: response[3]?.posts[1]?.createdAt, }], }); expect(response[0]).toEqual({ id: 4, name: 'John', verified: false, invitedBy: 2, invitee: { id: 2, name: 'Andrew', verified: false, invitedBy: null }, posts: [], }); }); test('Get user with invitee and posts + where', async (t) => { const { pgjsDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]); const response = await db.query.usersTable.findMany({ where: (users, { eq, or }) => (or(eq(users.id, 2), eq(users.id, 3))), with: { invitee: true, posts: { where: (posts, { eq }) => (eq(posts.ownerId, 2)), }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; ownerId: number | null; content: string; createdAt: Date }[]; invitee: { id: number; name: string; verified: boolean; invitedBy: number | null; } | null; }[] >(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).eq(2); expect(response[0]?.invitee).toBeNull(); expect(response[1]?.invitee).not.toBeNull(); expect(response[0]?.posts.length).eq(1); expect(response[1]?.posts.length).eq(0); expect(response).toContainEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: null, invitee: null, posts: [{ id: 2, ownerId: 2, content: 'Post2', createdAt: response[0]?.posts[0]?.createdAt }], }); expect(response).toContainEqual({ id: 3, name: 'Alex', verified: false, invitedBy: 1, invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, posts: [], }); }); test('Get user with invitee and posts + limit posts and users + where', async (t) => { const { pgjsDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const response = await db.query.usersTable.findMany({ where: (users, { eq, or }) => (or(eq(users.id, 3), eq(users.id, 4))), limit: 1, with: { invitee: true, posts: { where: (posts, { eq }) => (eq(posts.ownerId, 3)), limit: 1, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; ownerId: number | null; content: string; createdAt: Date }[]; invitee: { id: number; name: string; verified: boolean; invitedBy: number | null; } | null; }[] >(); expect(response.length).eq(1); expect(response[0]?.invitee).not.toBeNull(); expect(response[0]?.posts.length).eq(1); expect(response).toContainEqual({ id: 3, name: 'Alex', verified: false, invitedBy: 1, invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, posts: [{ id: 5, ownerId: 3, content: 'Post3', createdAt: response[0]?.posts[0]?.createdAt }], }); }); test('Get user with invitee and posts + orderBy + where + custom', async (t) => { const { pgjsDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, ]); const response = await db.query.usersTable.findMany({ orderBy: [desc(usersTable.id)], where: or(eq(usersTable.id, 3), eq(usersTable.id, 4)), extras: { lower: sql`lower(${usersTable.name})`.as('lower_name'), }, with: { invitee: true, posts: { where: eq(postsTable.ownerId, 3), orderBy: [desc(postsTable.id)], extras: { lower: sql`lower(${postsTable.content})`.as('lower_name'), }, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; lower: string; posts: { id: number; lower: string; ownerId: number | null; content: string; createdAt: Date }[]; invitee: { id: number; name: string; verified: boolean; invitedBy: number | null; } | null; }[] >(); expect(response.length).eq(2); expect(response[1]?.invitee).not.toBeNull(); expect(response[0]?.invitee).not.toBeNull(); expect(response[0]?.posts.length).eq(0); expect(response[1]?.posts.length).eq(1); expect(response[1]).toEqual({ id: 3, name: 'Alex', lower: 'alex', verified: false, invitedBy: 1, invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, posts: [{ id: 5, ownerId: 3, content: 'Post3', lower: 'post3', createdAt: response[1]?.posts[0]?.createdAt, }], }); expect(response[0]).toEqual({ id: 4, name: 'John', lower: 'john', verified: false, invitedBy: 2, invitee: { id: 2, name: 'Andrew', verified: false, invitedBy: null }, posts: [], }); }); test('Get user with invitee and posts + orderBy + where + partial + custom', async (t) => { const { pgjsDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, ]); const response = await db.query.usersTable.findMany({ orderBy: [desc(usersTable.id)], where: or(eq(usersTable.id, 3), eq(usersTable.id, 4)), extras: { lower: sql`lower(${usersTable.name})`.as('lower_name'), }, columns: { id: true, name: true, }, with: { invitee: { columns: { id: true, name: true, }, extras: { lower: sql`lower(${usersTable.name})`.as('lower_name'), }, }, posts: { columns: { id: true, content: true, }, where: eq(postsTable.ownerId, 3), orderBy: [desc(postsTable.id)], extras: { lower: sql`lower(${postsTable.content})`.as('lower_name'), }, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; lower: string; posts: { id: number; lower: string; content: string }[]; invitee: { id: number; name: string; lower: string; } | null; }[] >(); expect(response.length).eq(2); expect(response[1]?.invitee).not.toBeNull(); expect(response[0]?.invitee).not.toBeNull(); expect(response[0]?.posts.length).eq(0); expect(response[1]?.posts.length).eq(1); expect(response[1]).toEqual({ id: 3, name: 'Alex', lower: 'alex', invitee: { id: 1, name: 'Dan', lower: 'dan' }, posts: [{ id: 5, content: 'Post3', lower: 'post3', }], }); expect(response[0]).toEqual({ id: 4, name: 'John', lower: 'john', invitee: { id: 2, name: 'Andrew', lower: 'andrew' }, posts: [], }); }); /* One two-level relation users+posts+comments */ test('Get user with posts and posts with comments', async (t) => { const { pgjsDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { id: 1, ownerId: 1, content: 'Post1' }, { id: 2, ownerId: 2, content: 'Post2' }, { id: 3, ownerId: 3, content: 'Post3' }, ]); await db.insert(commentsTable).values([ { postId: 1, content: 'Comment1', creator: 2 }, { postId: 2, content: 'Comment2', creator: 2 }, { postId: 3, content: 'Comment3', creator: 3 }, ]); const response = await db.query.usersTable.findMany({ with: { posts: { with: { comments: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; comments: { id: number; content: string; createdAt: Date; creator: number | null; postId: number | null; }[]; }[]; }[] >(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).eq(3); expect(response[0]?.posts.length).eq(1); expect(response[1]?.posts.length).eq(1); expect(response[2]?.posts.length).eq(1); expect(response[0]?.posts[0]?.comments.length).eq(1); expect(response[1]?.posts[0]?.comments.length).eq(1); expect(response[2]?.posts[0]?.comments.length).eq(1); expect(response[0]).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: response[0]?.posts[0]?.createdAt, comments: [ { id: 1, content: 'Comment1', creator: 2, postId: 1, createdAt: response[0]?.posts[0]?.comments[0]?.createdAt, }, ], }], }); expect(response[1]).toEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: null, posts: [{ id: 2, ownerId: 2, content: 'Post2', createdAt: response[1]?.posts[0]?.createdAt, comments: [ { id: 2, content: 'Comment2', creator: 2, postId: 2, createdAt: response[1]?.posts[0]?.comments[0]?.createdAt, }, ], }], }); // expect(response[2]).toEqual({ // id: 3, // name: 'Alex', // verified: false, // invitedBy: null, // posts: [{ // id: 3, // ownerId: 3, // content: 'Post3', // createdAt: response[2]?.posts[0]?.createdAt, // comments: [ // { // id: , // content: 'Comment3', // creator: 3, // postId: 3, // createdAt: response[2]?.posts[0]?.comments[0]?.createdAt, // }, // ], // }], // }); }); // Get user with limit posts and limit comments // Get user with custom field + post + comment with custom field // Get user with limit + posts orderBy + comment orderBy // Get user with where + posts where + comment where // Get user with where + posts partial where + comment where // Get user with where + posts partial where + comment partial(false) where // Get user with where partial(false) + posts partial where partial(false) + comment partial(false+true) where // Get user with where + posts partial where + comment where. Didn't select field from where in posts // Get user with where + posts partial where + comment where. Didn't select field from where for all // Get with limit+offset in each /* One two-level + One first-level relation users+posts+comments and users+users */ /* One three-level relation users+posts+comments+comment_owner */ test('Get user with posts and posts with comments and comments with owner', async (t) => { const { pgjsDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { id: 1, ownerId: 1, content: 'Post1' }, { id: 2, ownerId: 2, content: 'Post2' }, { id: 3, ownerId: 3, content: 'Post3' }, ]); await db.insert(commentsTable).values([ { postId: 1, content: 'Comment1', creator: 2 }, { postId: 2, content: 'Comment2', creator: 2 }, { postId: 3, content: 'Comment3', creator: 3 }, ]); const response = await db.query.usersTable.findMany({ with: { posts: { with: { comments: { with: { author: true, }, }, }, }, }, }); expectTypeOf(response).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; comments: { id: number; content: string; createdAt: Date; creator: number | null; postId: number | null; author: { id: number; name: string; verified: boolean; invitedBy: number | null; } | null; }[]; }[]; }[]>(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).eq(3); expect(response[0]?.posts.length).eq(1); expect(response[1]?.posts.length).eq(1); expect(response[2]?.posts.length).eq(1); expect(response[0]?.posts[0]?.comments.length).eq(1); expect(response[1]?.posts[0]?.comments.length).eq(1); expect(response[2]?.posts[0]?.comments.length).eq(1); expect(response[0]).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: response[0]?.posts[0]?.createdAt, comments: [ { id: 1, content: 'Comment1', creator: 2, author: { id: 2, name: 'Andrew', verified: false, invitedBy: null, }, postId: 1, createdAt: response[0]?.posts[0]?.comments[0]?.createdAt, }, ], }], }); expect(response[1]).toEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: null, posts: [{ id: 2, ownerId: 2, content: 'Post2', createdAt: response[1]?.posts[0]?.createdAt, comments: [ { id: 2, content: 'Comment2', creator: 2, author: { id: 2, name: 'Andrew', verified: false, invitedBy: null, }, postId: 2, createdAt: response[1]?.posts[0]?.comments[0]?.createdAt, }, ], }], }); }); test('Get user with posts and posts with comments and comments with owner where exists', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { id: 1, ownerId: 1, content: 'Post1' }, { id: 2, ownerId: 2, content: 'Post2' }, { id: 3, ownerId: 3, content: 'Post3' }, ]); await db.insert(commentsTable).values([ { postId: 1, content: 'Comment1', creator: 2 }, { postId: 2, content: 'Comment2', creator: 2 }, { postId: 3, content: 'Comment3', creator: 3 }, ]); const response = await db.query.usersTable.findMany({ with: { posts: { with: { comments: { with: { author: true, }, }, }, }, }, where: (table, { notExists, eq }) => notExists(db.select({ one: sql`1` }).from(usersTable).where(eq(sql`1`, table.id))), }); expectTypeOf(response).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; comments: { id: number; content: string; createdAt: Date; creator: number | null; postId: number | null; author: { id: number; name: string; verified: boolean; invitedBy: number | null; } | null; }[]; }[]; }[]>(); expect(response.length).eq(2); expect(response[0]?.posts.length).eq(1); expect(response[0]?.posts[0]?.comments.length).eq(1); expect(response[0]).toEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: null, posts: [{ id: 2, ownerId: 2, content: 'Post2', createdAt: response[0]?.posts[0]?.createdAt, comments: [ { id: 2, content: 'Comment2', creator: 2, author: { id: 2, name: 'Andrew', verified: false, invitedBy: null, }, postId: 2, createdAt: response[0]?.posts[0]?.comments[0]?.createdAt, }, ], }], }); }); /* One three-level relation + 1 first-level relatioon 1. users+posts+comments+comment_owner 2. users+users */ /* One four-level relation users+posts+comments+coment_likes */ /* [Find Many] Many-to-many cases Users+users_to_groups+groups */ test('[Find Many] Get users with groups', async (t) => { const { pgjsDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.usersTable.findMany({ with: { usersToGroups: { columns: {}, with: { group: true, }, orderBy: usersToGroupsTable.groupId, }, }, }); expectTypeOf(response).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; usersToGroups: { group: { id: number; name: string; description: string | null; }; }[]; }[]>(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).toEqual(3); expect(response[0]?.usersToGroups.length).toEqual(1); expect(response[1]?.usersToGroups.length).toEqual(1); expect(response[2]?.usersToGroups.length).toEqual(2); expect(response).toContainEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, usersToGroups: [{ group: { id: 1, name: 'Group1', description: null, }, }], }); expect(response).toContainEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: null, usersToGroups: [{ group: { id: 2, name: 'Group2', description: null, }, }], }); expect(response).toContainEqual({ id: 3, name: 'Alex', verified: false, invitedBy: null, usersToGroups: expect.arrayContaining([ { group: { id: 2, name: 'Group2', description: null, }, }, { group: { id: 3, name: 'Group3', description: null, }, }, ]), }); }); test('[Find Many] Get groups with users', async (t) => { const { pgjsDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.groupsTable.findMany({ with: { usersToGroups: { columns: {}, with: { user: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf<{ id: number; name: string; description: string | null; usersToGroups: { user: { id: number; name: string; verified: boolean; invitedBy: number | null; }; }[]; }[]>(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).toEqual(3); expect(response[0]?.usersToGroups.length).toEqual(1); expect(response[1]?.usersToGroups.length).toEqual(2); expect(response[2]?.usersToGroups.length).toEqual(1); expect(response).toContainEqual({ id: 1, name: 'Group1', description: null, usersToGroups: [{ user: { id: 1, name: 'Dan', verified: false, invitedBy: null, }, }], }); expect(response).toContainEqual({ id: 2, name: 'Group2', description: null, usersToGroups: [{ user: { id: 2, name: 'Andrew', verified: false, invitedBy: null, }, }, { user: { id: 3, name: 'Alex', verified: false, invitedBy: null, }, }], }); expect(response).toContainEqual({ id: 3, name: 'Group3', description: null, usersToGroups: [{ user: { id: 3, name: 'Alex', verified: false, invitedBy: null, }, }], }); }); test('[Find Many] Get users with groups + limit', async (t) => { const { pgjsDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 2, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.usersTable.findMany({ limit: 2, with: { usersToGroups: { limit: 1, columns: {}, with: { group: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; usersToGroups: { group: { id: number; name: string; description: string | null; }; }[]; }[]>(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).toEqual(2); expect(response[0]?.usersToGroups.length).toEqual(1); expect(response[1]?.usersToGroups.length).toEqual(1); expect(response).toContainEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, usersToGroups: [{ group: { id: 1, name: 'Group1', description: null, }, }], }); expect(response).toContainEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: null, usersToGroups: [{ group: { id: 2, name: 'Group2', description: null, }, }], }); }); test('[Find Many] Get groups with users + limit', async (t) => { const { pgjsDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.groupsTable.findMany({ limit: 2, with: { usersToGroups: { limit: 1, columns: {}, with: { user: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf<{ id: number; name: string; description: string | null; usersToGroups: { user: { id: number; name: string; verified: boolean; invitedBy: number | null; }; }[]; }[]>(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).toEqual(2); expect(response[0]?.usersToGroups.length).toEqual(1); expect(response[1]?.usersToGroups.length).toEqual(1); expect(response).toContainEqual({ id: 1, name: 'Group1', description: null, usersToGroups: [{ user: { id: 1, name: 'Dan', verified: false, invitedBy: null, }, }], }); expect(response).toContainEqual({ id: 2, name: 'Group2', description: null, usersToGroups: [{ user: { id: 2, name: 'Andrew', verified: false, invitedBy: null, }, }], }); }); test('[Find Many] Get users with groups + limit + where', async (t) => { const { pgjsDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 2, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.usersTable.findMany({ limit: 1, where: (_, { eq, or }) => or(eq(usersTable.id, 1), eq(usersTable.id, 2)), with: { usersToGroups: { where: eq(usersToGroupsTable.groupId, 1), columns: {}, with: { group: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; usersToGroups: { group: { id: number; name: string; description: string | null; }; }[]; }[]>(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).toEqual(1); expect(response[0]?.usersToGroups.length).toEqual(1); expect(response).toContainEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, usersToGroups: [{ group: { id: 1, name: 'Group1', description: null, }, }], }); }); test('[Find Many] Get groups with users + limit + where', async (t) => { const { pgjsDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.groupsTable.findMany({ limit: 1, where: gt(groupsTable.id, 1), with: { usersToGroups: { where: eq(usersToGroupsTable.userId, 2), limit: 1, columns: {}, with: { user: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf<{ id: number; name: string; description: string | null; usersToGroups: { user: { id: number; name: string; verified: boolean; invitedBy: number | null; }; }[]; }[]>(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).toEqual(1); expect(response[0]?.usersToGroups.length).toEqual(1); expect(response).toContainEqual({ id: 2, name: 'Group2', description: null, usersToGroups: [{ user: { id: 2, name: 'Andrew', verified: false, invitedBy: null, }, }], }); }); test('[Find Many] Get users with groups + where', async (t) => { const { pgjsDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 2, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.usersTable.findMany({ where: (_, { eq, or }) => or(eq(usersTable.id, 1), eq(usersTable.id, 2)), with: { usersToGroups: { where: eq(usersToGroupsTable.groupId, 2), columns: {}, with: { group: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; usersToGroups: { group: { id: number; name: string; description: string | null; }; }[]; }[]>(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).toEqual(2); expect(response[0]?.usersToGroups.length).toEqual(0); expect(response[1]?.usersToGroups.length).toEqual(1); expect(response).toContainEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, usersToGroups: [], }); expect(response).toContainEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: null, usersToGroups: [{ group: { id: 2, name: 'Group2', description: null, }, }], }); }); test('[Find Many] Get groups with users + where', async (t) => { const { pgjsDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.groupsTable.findMany({ where: gt(groupsTable.id, 1), with: { usersToGroups: { where: eq(usersToGroupsTable.userId, 2), columns: {}, with: { user: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf<{ id: number; name: string; description: string | null; usersToGroups: { user: { id: number; name: string; verified: boolean; invitedBy: number | null; }; }[]; }[]>(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).toEqual(2); expect(response[0]?.usersToGroups.length).toEqual(1); expect(response[1]?.usersToGroups.length).toEqual(0); expect(response).toContainEqual({ id: 2, name: 'Group2', description: null, usersToGroups: [{ user: { id: 2, name: 'Andrew', verified: false, invitedBy: null, }, }], }); expect(response).toContainEqual({ id: 3, name: 'Group3', description: null, usersToGroups: [], }); }); test('[Find Many] Get users with groups + orderBy', async (t) => { const { pgjsDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.usersTable.findMany({ orderBy: (users, { desc }) => [desc(users.id)], with: { usersToGroups: { orderBy: [desc(usersToGroupsTable.groupId)], columns: {}, with: { group: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; usersToGroups: { group: { id: number; name: string; description: string | null; }; }[]; }[]>(); expect(response.length).toEqual(3); expect(response[0]?.usersToGroups.length).toEqual(2); expect(response[1]?.usersToGroups.length).toEqual(1); expect(response[2]?.usersToGroups.length).toEqual(1); expect(response[2]).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, usersToGroups: [{ group: { id: 1, name: 'Group1', description: null, }, }], }); expect(response[1]).toEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: null, usersToGroups: [{ group: { id: 2, name: 'Group2', description: null, }, }], }); expect(response[0]).toEqual({ id: 3, name: 'Alex', verified: false, invitedBy: null, usersToGroups: [{ group: { id: 3, name: 'Group3', description: null, }, }, { group: { id: 2, name: 'Group2', description: null, }, }], }); }); test('[Find Many] Get groups with users + orderBy', async (t) => { const { pgjsDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.groupsTable.findMany({ orderBy: [desc(groupsTable.id)], with: { usersToGroups: { orderBy: (utg, { desc }) => [desc(utg.userId)], columns: {}, with: { user: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf<{ id: number; name: string; description: string | null; usersToGroups: { user: { id: number; name: string; verified: boolean; invitedBy: number | null; }; }[]; }[]>(); expect(response.length).toEqual(3); expect(response[0]?.usersToGroups.length).toEqual(1); expect(response[1]?.usersToGroups.length).toEqual(2); expect(response[2]?.usersToGroups.length).toEqual(1); expect(response[2]).toEqual({ id: 1, name: 'Group1', description: null, usersToGroups: [{ user: { id: 1, name: 'Dan', verified: false, invitedBy: null, }, }], }); expect(response[1]).toEqual({ id: 2, name: 'Group2', description: null, usersToGroups: [{ user: { id: 3, name: 'Alex', verified: false, invitedBy: null, }, }, { user: { id: 2, name: 'Andrew', verified: false, invitedBy: null, }, }], }); expect(response[0]).toEqual({ id: 3, name: 'Group3', description: null, usersToGroups: [{ user: { id: 3, name: 'Alex', verified: false, invitedBy: null, }, }], }); }); test('[Find Many] Get users with groups + orderBy + limit', async (t) => { const { pgjsDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.usersTable.findMany({ orderBy: (users, { desc }) => [desc(users.id)], limit: 2, with: { usersToGroups: { limit: 1, orderBy: [desc(usersToGroupsTable.groupId)], columns: {}, with: { group: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; usersToGroups: { group: { id: number; name: string; description: string | null; }; }[]; }[]>(); expect(response.length).toEqual(2); expect(response[0]?.usersToGroups.length).toEqual(1); expect(response[1]?.usersToGroups.length).toEqual(1); expect(response[1]).toEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: null, usersToGroups: [{ group: { id: 2, name: 'Group2', description: null, }, }], }); expect(response[0]).toEqual({ id: 3, name: 'Alex', verified: false, invitedBy: null, usersToGroups: [{ group: { id: 3, name: 'Group3', description: null, }, }], }); }); /* [Find One] Many-to-many cases Users+users_to_groups+groups */ test('[Find One] Get users with groups', async (t) => { const { pgjsDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.usersTable.findFirst({ with: { usersToGroups: { columns: {}, with: { group: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; usersToGroups: { group: { id: number; name: string; description: string | null; }; }[]; } | undefined >(); expect(response?.usersToGroups.length).toEqual(1); expect(response).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, usersToGroups: [{ group: { id: 1, name: 'Group1', description: null, }, }], }); }); test('[Find One] Get groups with users', async (t) => { const { pgjsDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.groupsTable.findFirst({ with: { usersToGroups: { columns: {}, with: { user: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; description: string | null; usersToGroups: { user: { id: number; name: string; verified: boolean; invitedBy: number | null; }; }[]; } | undefined >(); expect(response?.usersToGroups.length).toEqual(1); expect(response).toEqual({ id: 1, name: 'Group1', description: null, usersToGroups: [{ user: { id: 1, name: 'Dan', verified: false, invitedBy: null, }, }], }); }); test('[Find One] Get users with groups + limit', async (t) => { const { pgjsDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 2, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.usersTable.findFirst({ with: { usersToGroups: { limit: 1, columns: {}, with: { group: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; usersToGroups: { group: { id: number; name: string; description: string | null; }; }[]; } | undefined >(); expect(response?.usersToGroups.length).toEqual(1); expect(response).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, usersToGroups: [{ group: { id: 1, name: 'Group1', description: null, }, }], }); }); test('[Find One] Get groups with users + limit', async (t) => { const { pgjsDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.groupsTable.findFirst({ with: { usersToGroups: { limit: 1, columns: {}, with: { user: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; description: string | null; usersToGroups: { user: { id: number; name: string; verified: boolean; invitedBy: number | null; }; }[]; } | undefined >(); expect(response?.usersToGroups.length).toEqual(1); expect(response).toEqual({ id: 1, name: 'Group1', description: null, usersToGroups: [{ user: { id: 1, name: 'Dan', verified: false, invitedBy: null, }, }], }); }); test('[Find One] Get users with groups + limit + where', async (t) => { const { pgjsDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 2, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.usersTable.findFirst({ where: (_, { eq, or }) => or(eq(usersTable.id, 1), eq(usersTable.id, 2)), with: { usersToGroups: { where: eq(usersToGroupsTable.groupId, 1), columns: {}, with: { group: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; usersToGroups: { group: { id: number; name: string; description: string | null; }; }[]; } | undefined >(); expect(response?.usersToGroups.length).toEqual(1); expect(response).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, usersToGroups: [{ group: { id: 1, name: 'Group1', description: null, }, }], }); }); test('[Find One] Get groups with users + limit + where', async (t) => { const { pgjsDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.groupsTable.findFirst({ where: gt(groupsTable.id, 1), with: { usersToGroups: { where: eq(usersToGroupsTable.userId, 2), limit: 1, columns: {}, with: { user: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; description: string | null; usersToGroups: { user: { id: number; name: string; verified: boolean; invitedBy: number | null; }; }[]; } | undefined >(); expect(response?.usersToGroups.length).toEqual(1); expect(response).toEqual({ id: 2, name: 'Group2', description: null, usersToGroups: [{ user: { id: 2, name: 'Andrew', verified: false, invitedBy: null, }, }], }); }); test('[Find One] Get users with groups + where', async (t) => { const { pgjsDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 2, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.usersTable.findFirst({ where: (_, { eq, or }) => or(eq(usersTable.id, 1), eq(usersTable.id, 2)), with: { usersToGroups: { where: eq(usersToGroupsTable.groupId, 2), columns: {}, with: { group: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; usersToGroups: { group: { id: number; name: string; description: string | null; }; }[]; } | undefined >(); expect(response?.usersToGroups.length).toEqual(0); expect(response).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, usersToGroups: [], }); }); test('[Find One] Get groups with users + where', async (t) => { const { pgjsDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.groupsTable.findFirst({ where: gt(groupsTable.id, 1), with: { usersToGroups: { where: eq(usersToGroupsTable.userId, 2), columns: {}, with: { user: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; description: string | null; usersToGroups: { user: { id: number; name: string; verified: boolean; invitedBy: number | null; }; }[]; } | undefined >(); expect(response?.usersToGroups.length).toEqual(1); expect(response).toEqual({ id: 2, name: 'Group2', description: null, usersToGroups: [{ user: { id: 2, name: 'Andrew', verified: false, invitedBy: null, }, }], }); }); test('[Find One] Get users with groups + orderBy', async (t) => { const { pgjsDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.usersTable.findFirst({ orderBy: (users, { desc }) => [desc(users.id)], with: { usersToGroups: { orderBy: [desc(usersToGroupsTable.groupId)], columns: {}, with: { group: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; usersToGroups: { group: { id: number; name: string; description: string | null; }; }[]; } | undefined >(); expect(response?.usersToGroups.length).toEqual(2); expect(response).toEqual({ id: 3, name: 'Alex', verified: false, invitedBy: null, usersToGroups: [{ group: { id: 3, name: 'Group3', description: null, }, }, { group: { id: 2, name: 'Group2', description: null, }, }], }); }); test('[Find One] Get groups with users + orderBy', async (t) => { const { pgjsDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.groupsTable.findFirst({ orderBy: [desc(groupsTable.id)], with: { usersToGroups: { orderBy: (utg, { desc }) => [desc(utg.userId)], columns: {}, with: { user: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; description: string | null; usersToGroups: { user: { id: number; name: string; verified: boolean; invitedBy: number | null; }; }[]; } | undefined >(); expect(response?.usersToGroups.length).toEqual(1); expect(response).toEqual({ id: 3, name: 'Group3', description: null, usersToGroups: [{ user: { id: 3, name: 'Alex', verified: false, invitedBy: null, }, }], }); }); test('[Find One] Get users with groups + orderBy + limit', async (t) => { const { pgjsDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.usersTable.findFirst({ orderBy: (users, { desc }) => [desc(users.id)], with: { usersToGroups: { limit: 1, orderBy: [desc(usersToGroupsTable.groupId)], columns: {}, with: { group: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; usersToGroups: { group: { id: number; name: string; description: string | null; }; }[]; } | undefined >(); expect(response?.usersToGroups.length).toEqual(1); expect(response).toEqual({ id: 3, name: 'Alex', verified: false, invitedBy: null, usersToGroups: [{ group: { id: 3, name: 'Group3', description: null, }, }], }); }); test('Get groups with users + orderBy + limit', async (t) => { const { pgjsDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.groupsTable.findMany({ orderBy: [desc(groupsTable.id)], limit: 2, with: { usersToGroups: { limit: 1, orderBy: (utg, { desc }) => [desc(utg.userId)], columns: {}, with: { user: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; description: string | null; usersToGroups: { user: { id: number; name: string; verified: boolean; invitedBy: number | null; }; }[]; }[] >(); expect(response.length).toEqual(2); expect(response[0]?.usersToGroups.length).toEqual(1); expect(response[1]?.usersToGroups.length).toEqual(1); expect(response[1]).toEqual({ id: 2, name: 'Group2', description: null, usersToGroups: [{ user: { id: 3, name: 'Alex', verified: false, invitedBy: null, }, }], }); expect(response[0]).toEqual({ id: 3, name: 'Group3', description: null, usersToGroups: [{ user: { id: 3, name: 'Alex', verified: false, invitedBy: null, }, }], }); }); test('Get users with groups + custom', async (t) => { const { pgjsDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.usersTable.findMany({ extras: { lower: sql`lower(${usersTable.name})`.as('lower_name'), }, with: { usersToGroups: { columns: {}, with: { group: { extras: { lower: sql`lower(${groupsTable.name})`.as('lower_name'), }, }, }, orderBy: usersToGroupsTable.groupId, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; lower: string; usersToGroups: { group: { id: number; name: string; description: string | null; lower: string; }; }[]; }[] >(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).toEqual(3); expect(response[0]?.usersToGroups.length).toEqual(1); expect(response[1]?.usersToGroups.length).toEqual(1); expect(response[2]?.usersToGroups.length).toEqual(2); expect(response).toContainEqual({ id: 1, name: 'Dan', lower: 'dan', verified: false, invitedBy: null, usersToGroups: [{ group: { id: 1, name: 'Group1', lower: 'group1', description: null, }, }], }); expect(response).toContainEqual({ id: 2, name: 'Andrew', lower: 'andrew', verified: false, invitedBy: null, usersToGroups: [{ group: { id: 2, name: 'Group2', lower: 'group2', description: null, }, }], }); expect(response).toContainEqual({ id: 3, name: 'Alex', lower: 'alex', verified: false, invitedBy: null, usersToGroups: [ { group: { id: 2, name: 'Group2', lower: 'group2', description: null, }, }, { group: { id: 3, name: 'Group3', lower: 'group3', description: null, }, }, ], }); }); test('Get groups with users + custom', async (t) => { const { pgjsDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.groupsTable.findMany({ extras: (table, { sql }) => ({ lower: sql`lower(${table.name})`.as('lower_name'), }), with: { usersToGroups: { columns: {}, with: { user: { extras: (table, { sql }) => ({ lower: sql`lower(${table.name})`.as('lower_name'), }), }, }, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; description: string | null; lower: string; usersToGroups: { user: { id: number; name: string; verified: boolean; invitedBy: number | null; lower: string; }; }[]; }[] >(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).toEqual(3); expect(response[0]?.usersToGroups.length).toEqual(1); expect(response[1]?.usersToGroups.length).toEqual(2); expect(response[2]?.usersToGroups.length).toEqual(1); expect(response).toContainEqual({ id: 1, name: 'Group1', lower: 'group1', description: null, usersToGroups: [{ user: { id: 1, name: 'Dan', lower: 'dan', verified: false, invitedBy: null, }, }], }); expect(response).toContainEqual({ id: 2, name: 'Group2', lower: 'group2', description: null, usersToGroups: [{ user: { id: 2, name: 'Andrew', lower: 'andrew', verified: false, invitedBy: null, }, }, { user: { id: 3, name: 'Alex', lower: 'alex', verified: false, invitedBy: null, }, }], }); expect(response).toContainEqual({ id: 3, name: 'Group3', lower: 'group3', description: null, usersToGroups: [{ user: { id: 3, name: 'Alex', lower: 'alex', verified: false, invitedBy: null, }, }], }); }); test('[Find Many] Get schema users - dbName & tsName match', async (t) => { const { pgjsDb: db } = t; await db.insert(usersV1).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); const schemaUsers = await db.query.usersV1.findMany(); expectTypeOf(schemaUsers).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; }[]>(); schemaUsers.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(schemaUsers.length).eq(3); expect(schemaUsers[0]).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, }); expect(schemaUsers[1]).toEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: null, }); expect(schemaUsers[2]).toEqual({ id: 3, name: 'Alex', verified: false, invitedBy: null, }); }); test('[Find Many] Get schema users - dbName & tsName mismatch', async (t) => { const { pgjsDb: db } = t; await db.insert(usersTableV1).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); const schemaUsers = await db.query.usersTableV1.findMany(); expectTypeOf(schemaUsers).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; }[]>(); schemaUsers.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(schemaUsers.length).eq(3); expect(schemaUsers[0]).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, }); expect(schemaUsers[1]).toEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: null, }); expect(schemaUsers[2]).toEqual({ id: 3, name: 'Alex', verified: false, invitedBy: null, }); }); test('.toSQL()', () => { const query = db.query.usersTable.findFirst().toSQL(); expect(query).toHaveProperty('sql', expect.any(String)); expect(query).toHaveProperty('params', expect.any(Array)); }); // + custom + where + orderby // + custom + where + orderby + limit // + partial // + partial(false) // + partial + orderBy + where (all not selected) /* One four-level relation users+posts+comments+coment_likes + users+users_to_groups+groups */ /* Really hard case 1. users+posts+comments+coment_likes 2. users+users_to_groups+groups 3. users+users */ ================================================ FILE: integration-tests/tests/relational/pg.schema.ts ================================================ import { boolean, integer, type PgColumn, pgSchema, pgTable, primaryKey, serial, text, timestamp, } from 'drizzle-orm/pg-core'; import { relations } from 'drizzle-orm'; export const usersTable = pgTable('users', { id: serial('id').primaryKey(), name: text('name').notNull(), verified: boolean('verified').notNull().default(false), invitedBy: integer('invited_by').references((): PgColumn => usersTable.id), }); export const schemaV1 = pgSchema('schemaV1'); export const usersV1 = schemaV1.table('usersV1', { id: serial('id').primaryKey(), name: text('name').notNull(), verified: boolean('verified').notNull().default(false), invitedBy: integer('invited_by'), }); export const usersTableV1 = schemaV1.table('users_table_V1', { id: serial('id').primaryKey(), name: text('name').notNull(), verified: boolean('verified').notNull().default(false), invitedBy: integer('invited_by'), }); export const usersConfig = relations(usersTable, ({ one, many }) => ({ invitee: one(usersTable, { fields: [usersTable.invitedBy], references: [usersTable.id] }), usersToGroups: many(usersToGroupsTable), posts: many(postsTable), })); export const groupsTable = pgTable('groups', { id: serial('id').primaryKey(), name: text('name').notNull(), description: text('description'), }); export const groupsConfig = relations(groupsTable, ({ many }) => ({ usersToGroups: many(usersToGroupsTable), })); export const usersToGroupsTable = pgTable('users_to_groups', { id: serial('id').primaryKey(), userId: integer('user_id').notNull().references(() => usersTable.id), groupId: integer('group_id').notNull().references(() => groupsTable.id), }, (t) => ({ pk: primaryKey(t.groupId, t.userId), })); export const usersToGroupsConfig = relations(usersToGroupsTable, ({ one }) => ({ group: one(groupsTable, { fields: [usersToGroupsTable.groupId], references: [groupsTable.id] }), user: one(usersTable, { fields: [usersToGroupsTable.userId], references: [usersTable.id] }), })); export const postsTable = pgTable('posts', { id: serial('id').primaryKey(), content: text('content').notNull(), ownerId: integer('owner_id').references(() => usersTable.id), createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(), }); export const postsConfig = relations(postsTable, ({ one, many }) => ({ author: one(usersTable, { fields: [postsTable.ownerId], references: [usersTable.id] }), comments: many(commentsTable), })); export const commentsTable = pgTable('comments', { id: serial('id').primaryKey(), content: text('content').notNull(), creator: integer('creator').references(() => usersTable.id), postId: integer('post_id').references(() => postsTable.id), createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(), }); export const commentsConfig = relations(commentsTable, ({ one, many }) => ({ post: one(postsTable, { fields: [commentsTable.postId], references: [postsTable.id] }), author: one(usersTable, { fields: [commentsTable.creator], references: [usersTable.id] }), likes: many(commentLikesTable), })); export const commentLikesTable = pgTable('comment_likes', { id: serial('id').primaryKey(), creator: integer('creator').references(() => usersTable.id), commentId: integer('comment_id').references(() => commentsTable.id), createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(), }); export const commentLikesConfig = relations(commentLikesTable, ({ one }) => ({ comment: one(commentsTable, { fields: [commentLikesTable.commentId], references: [commentsTable.id] }), author: one(usersTable, { fields: [commentLikesTable.creator], references: [usersTable.id] }), })); ================================================ FILE: integration-tests/tests/relational/pg.test.ts ================================================ import 'dotenv/config'; import Docker from 'dockerode'; import { desc, DrizzleError, eq, gt, gte, or, placeholder, sql, TransactionRollbackError } from 'drizzle-orm'; import { drizzle, type NodePgDatabase } from 'drizzle-orm/node-postgres'; import getPort from 'get-port'; import pg from 'pg'; import { v4 as uuid } from 'uuid'; import { afterAll, beforeAll, beforeEach, expect, expectTypeOf, test } from 'vitest'; import * as schema from './pg.schema.ts'; const { Client } = pg; const { usersTable, postsTable, commentsTable, usersToGroupsTable, groupsTable, usersV1, usersTableV1 } = schema; const ENABLE_LOGGING = false; /* Test cases: - querying nested relation without PK with additional fields */ declare module 'vitest' { export interface TestContext { docker: Docker; pgContainer: Docker.Container; pgDb: NodePgDatabase; pgClient: pg.Client; } } let globalDocker: Docker; let pgContainer: Docker.Container; let db: NodePgDatabase; let client: pg.Client; async function createDockerDB(): Promise { const docker = (globalDocker = new Docker()); const port = await getPort({ port: 5432 }); const image = 'postgres:14'; const pullStream = await docker.pull(image); await new Promise((resolve, reject) => docker.modem.followProgress(pullStream, (err) => err ? reject(err) : resolve(err)) ); pgContainer = await docker.createContainer({ Image: image, Env: [ 'POSTGRES_PASSWORD=postgres', 'POSTGRES_USER=postgres', 'POSTGRES_DB=postgres', ], name: `drizzle-integration-tests-${uuid()}`, HostConfig: { AutoRemove: true, PortBindings: { '5432/tcp': [{ HostPort: `${port}` }], }, }, }); await pgContainer.start(); return `postgres://postgres:postgres@localhost:${port}/postgres`; } beforeAll(async () => { const connectionString = process.env['PG_CONNECTION_STRING'] ?? (await createDockerDB()); const sleep = 250; let timeLeft = 5000; let connected = false; let lastError: unknown | undefined; do { try { client = new Client(connectionString); await client.connect(); connected = true; break; } catch (e) { lastError = e; await new Promise((resolve) => setTimeout(resolve, sleep)); timeLeft -= sleep; } } while (timeLeft > 0); if (!connected) { console.error('Cannot connect to Postgres'); await client?.end().catch(console.error); await pgContainer?.stop().catch(console.error); throw lastError; } db = drizzle(client, { schema, logger: ENABLE_LOGGING }); }); afterAll(async () => { await client?.end().catch(console.error); await pgContainer?.stop().catch(console.error); }); beforeEach(async (ctx) => { ctx.pgDb = db; ctx.pgClient = client; ctx.docker = globalDocker; ctx.pgContainer = pgContainer; await ctx.pgDb.execute(sql`drop schema public cascade`); await ctx.pgDb.execute(sql`drop schema if exists "schemaV1" cascade`); await ctx.pgDb.execute(sql`create schema public`); await ctx.pgDb.execute(sql`create schema "schemaV1"`); await ctx.pgDb.execute( sql` CREATE TABLE "users" ( "id" serial PRIMARY KEY NOT NULL, "name" text NOT NULL, "verified" boolean DEFAULT false NOT NULL, "invited_by" int REFERENCES "users"("id") ); `, ); await ctx.pgDb.execute( sql` CREATE TABLE "schemaV1"."usersV1" ( "id" serial PRIMARY KEY NOT NULL, "name" text NOT NULL, "verified" boolean DEFAULT false NOT NULL, "invited_by" int ); `, ); await ctx.pgDb.execute( sql` CREATE TABLE "schemaV1"."users_table_V1" ( "id" serial PRIMARY KEY NOT NULL, "name" text NOT NULL, "verified" boolean DEFAULT false NOT NULL, "invited_by" int ); `, ); await ctx.pgDb.execute( sql` CREATE TABLE IF NOT EXISTS "groups" ( "id" serial PRIMARY KEY NOT NULL, "name" text NOT NULL, "description" text ); `, ); await ctx.pgDb.execute( sql` CREATE TABLE IF NOT EXISTS "users_to_groups" ( "id" serial PRIMARY KEY NOT NULL, "user_id" int REFERENCES "users"("id"), "group_id" int REFERENCES "groups"("id") ); `, ); await ctx.pgDb.execute( sql` CREATE TABLE IF NOT EXISTS "posts" ( "id" serial PRIMARY KEY NOT NULL, "content" text NOT NULL, "owner_id" int REFERENCES "users"("id"), "created_at" timestamp with time zone DEFAULT now() NOT NULL ); `, ); await ctx.pgDb.execute( sql` CREATE TABLE IF NOT EXISTS "comments" ( "id" serial PRIMARY KEY NOT NULL, "content" text NOT NULL, "creator" int REFERENCES "users"("id"), "post_id" int REFERENCES "posts"("id"), "created_at" timestamp with time zone DEFAULT now() NOT NULL ); `, ); await ctx.pgDb.execute( sql` CREATE TABLE IF NOT EXISTS "comment_likes" ( "id" serial PRIMARY KEY NOT NULL, "creator" int REFERENCES "users"("id"), "comment_id" int REFERENCES "comments"("id"), "created_at" timestamp with time zone DEFAULT now() NOT NULL ); `, ); }); /* [Find Many] One relation users+posts */ test('[Find Many] Get users with posts', async (t) => { const { pgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]); const usersWithPosts = await db.query.usersTable.findMany({ with: { posts: true, }, }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[]>(); usersWithPosts.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(usersWithPosts.length).eq(3); expect(usersWithPosts[0]?.posts.length).eq(1); expect(usersWithPosts[1]?.posts.length).eq(1); expect(usersWithPosts[2]?.posts.length).eq(1); expect(usersWithPosts[0]).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], }); expect(usersWithPosts[1]).toEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: null, posts: [{ id: 2, ownerId: 2, content: 'Post2', createdAt: usersWithPosts[1]?.posts[0]?.createdAt }], }); expect(usersWithPosts[2]).toEqual({ id: 3, name: 'Alex', verified: false, invitedBy: null, posts: [{ id: 3, ownerId: 3, content: 'Post3', createdAt: usersWithPosts[2]?.posts[0]?.createdAt }], }); }); test('[Find Many] Get users with posts + limit posts', async (t) => { const { pgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const usersWithPosts = await db.query.usersTable.findMany({ with: { posts: { limit: 1, }, }, }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[]>(); usersWithPosts.sort((a, b) => (a.id > b.id) ? 1 : -1); usersWithPosts[0]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); usersWithPosts[1]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); usersWithPosts[2]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(usersWithPosts.length).eq(3); expect(usersWithPosts[0]?.posts.length).eq(1); expect(usersWithPosts[1]?.posts.length).eq(1); expect(usersWithPosts[2]?.posts.length).eq(1); expect(usersWithPosts[0]).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], }); expect(usersWithPosts[1]).toEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: null, posts: [{ id: 4, ownerId: 2, content: 'Post2', createdAt: usersWithPosts[1]?.posts[0]?.createdAt }], }); expect(usersWithPosts[2]).toEqual({ id: 3, name: 'Alex', verified: false, invitedBy: null, posts: [{ id: 6, ownerId: 3, content: 'Post3', createdAt: usersWithPosts[2]?.posts[0]?.createdAt }], }); }); test('[Find Many] Get users with posts + limit posts and users', async (t) => { const { pgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const usersWithPosts = await db.query.usersTable.findMany({ limit: 2, with: { posts: { limit: 1, }, }, }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[]>(); usersWithPosts.sort((a, b) => (a.id > b.id) ? 1 : -1); usersWithPosts[0]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); usersWithPosts[1]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(usersWithPosts.length).eq(2); expect(usersWithPosts[0]?.posts.length).eq(1); expect(usersWithPosts[1]?.posts.length).eq(1); expect(usersWithPosts[0]).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], }); expect(usersWithPosts[1]).toEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: null, posts: [{ id: 4, ownerId: 2, content: 'Post2', createdAt: usersWithPosts[1]?.posts[0]?.createdAt }], }); }); test('[Find Many] Get users with posts + custom fields', async (t) => { const { pgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const usersWithPosts = await db.query.usersTable.findMany({ with: { posts: true, }, extras: ({ name }) => ({ lowerName: sql`lower(${name})`.as('name_lower'), }), }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; lowerName: string; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[]>(); usersWithPosts.sort((a, b) => (a.id > b.id) ? 1 : -1); usersWithPosts[0]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); usersWithPosts[1]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); usersWithPosts[2]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(usersWithPosts.length).toEqual(3); expect(usersWithPosts[0]?.posts.length).toEqual(3); expect(usersWithPosts[1]?.posts.length).toEqual(2); expect(usersWithPosts[2]?.posts.length).toEqual(2); expect(usersWithPosts[0]).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, lowerName: 'dan', posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }, { id: 2, ownerId: 1, content: 'Post1.2', createdAt: usersWithPosts[0]?.posts[1]?.createdAt, }, { id: 3, ownerId: 1, content: 'Post1.3', createdAt: usersWithPosts[0]?.posts[2]?.createdAt }], }); expect(usersWithPosts[1]).toEqual({ id: 2, name: 'Andrew', lowerName: 'andrew', verified: false, invitedBy: null, posts: [{ id: 4, ownerId: 2, content: 'Post2', createdAt: usersWithPosts[1]?.posts[0]?.createdAt }, { id: 5, ownerId: 2, content: 'Post2.1', createdAt: usersWithPosts[1]?.posts[1]?.createdAt, }], }); expect(usersWithPosts[2]).toEqual({ id: 3, name: 'Alex', lowerName: 'alex', verified: false, invitedBy: null, posts: [{ id: 6, ownerId: 3, content: 'Post3', createdAt: usersWithPosts[2]?.posts[0]?.createdAt }, { id: 7, ownerId: 3, content: 'Post3.1', createdAt: usersWithPosts[2]?.posts[1]?.createdAt, }], }); }); test('[Find Many] Get users with posts + custom fields + limits', async (t) => { const { pgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const usersWithPosts = await db.query.usersTable.findMany({ limit: 1, with: { posts: { limit: 1, }, }, extras: (usersTable, { sql }) => ({ lowerName: sql`lower(${usersTable.name})`.as('name_lower'), }), }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; lowerName: string; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[]>(); expect(usersWithPosts.length).toEqual(1); expect(usersWithPosts[0]?.posts.length).toEqual(1); expect(usersWithPosts[0]).toEqual({ id: 1, name: 'Dan', lowerName: 'dan', verified: false, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], }); }); test('[Find Many] Get users with posts + orderBy', async (t) => { const { pgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: '1' }, { ownerId: 1, content: '2' }, { ownerId: 1, content: '3' }, { ownerId: 2, content: '4' }, { ownerId: 2, content: '5' }, { ownerId: 3, content: '6' }, { ownerId: 3, content: '7' }, ]); const usersWithPosts = await db.query.usersTable.findMany({ with: { posts: { orderBy: (postsTable, { desc }) => [desc(postsTable.content)], }, }, orderBy: (usersTable, { desc }) => [desc(usersTable.id)], }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[]>(); expect(usersWithPosts.length).eq(3); expect(usersWithPosts[0]?.posts.length).eq(2); expect(usersWithPosts[1]?.posts.length).eq(2); expect(usersWithPosts[2]?.posts.length).eq(3); expect(usersWithPosts[2]).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, posts: [{ id: 3, ownerId: 1, content: '3', createdAt: usersWithPosts[2]?.posts[2]?.createdAt }, { id: 2, ownerId: 1, content: '2', createdAt: usersWithPosts[2]?.posts[1]?.createdAt, }, { id: 1, ownerId: 1, content: '1', createdAt: usersWithPosts[2]?.posts[0]?.createdAt }], }); expect(usersWithPosts[1]).toEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: null, posts: [{ id: 5, ownerId: 2, content: '5', createdAt: usersWithPosts[1]?.posts[1]?.createdAt, }, { id: 4, ownerId: 2, content: '4', createdAt: usersWithPosts[1]?.posts[0]?.createdAt }], }); expect(usersWithPosts[0]).toEqual({ id: 3, name: 'Alex', verified: false, invitedBy: null, posts: [{ id: 7, ownerId: 3, content: '7', createdAt: usersWithPosts[0]?.posts[1]?.createdAt, }, { id: 6, ownerId: 3, content: '6', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], }); }); test('[Find Many] Get users with posts + where', async (t) => { const { pgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]); const usersWithPosts = await db.query.usersTable.findMany({ where: (({ id }, { eq }) => eq(id, 1)), with: { posts: { where: (({ id }, { eq }) => eq(id, 1)), }, }, }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[]>(); expect(usersWithPosts.length).eq(1); expect(usersWithPosts[0]?.posts.length).eq(1); expect(usersWithPosts[0]).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], }); }); test('[Find Many] Get users with posts + where + partial', async (t) => { const { pgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]); const usersWithPosts = await db.query.usersTable.findMany({ columns: { id: true, name: true, }, with: { posts: { columns: { id: true, content: true, }, where: (({ id }, { eq }) => eq(id, 1)), }, }, where: (({ id }, { eq }) => eq(id, 1)), }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; posts: { id: number; content: string; }[]; }[]>(); expect(usersWithPosts.length).eq(1); expect(usersWithPosts[0]?.posts.length).eq(1); expect(usersWithPosts[0]).toEqual({ id: 1, name: 'Dan', posts: [{ id: 1, content: 'Post1' }], }); }); test('[Find Many] Get users with posts + where + partial. Did not select posts id, but used it in where', async (t) => { const { pgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]); const usersWithPosts = await db.query.usersTable.findMany({ columns: { id: true, name: true, }, with: { posts: { columns: { id: true, content: true, }, where: (({ id }, { eq }) => eq(id, 1)), }, }, where: (({ id }, { eq }) => eq(id, 1)), }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; posts: { id: number; content: string; }[]; }[]>(); expect(usersWithPosts.length).eq(1); expect(usersWithPosts[0]?.posts.length).eq(1); expect(usersWithPosts[0]).toEqual({ id: 1, name: 'Dan', posts: [{ id: 1, content: 'Post1' }], }); }); test('[Find Many] Get users with posts + where + partial(true + false)', async (t) => { const { pgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]); const usersWithPosts = await db.query.usersTable.findMany({ columns: { id: true, name: false, }, with: { posts: { columns: { id: true, content: false, }, where: (({ id }, { eq }) => eq(id, 1)), }, }, where: (({ id }, { eq }) => eq(id, 1)), }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; posts: { id: number; }[]; }[]>(); expect(usersWithPosts.length).eq(1); expect(usersWithPosts[0]?.posts.length).eq(1); expect(usersWithPosts[0]).toEqual({ id: 1, posts: [{ id: 1 }], }); }); test('[Find Many] Get users with posts + where + partial(false)', async (t) => { const { pgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]); const usersWithPosts = await db.query.usersTable.findMany({ columns: { name: false, }, with: { posts: { columns: { content: false, }, where: (({ id }, { eq }) => eq(id, 1)), }, }, where: (({ id }, { eq }) => eq(id, 1)), }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; verified: boolean; invitedBy: number | null; posts: { id: number; ownerId: number | null; createdAt: Date; }[]; }[]>(); expect(usersWithPosts.length).eq(1); expect(usersWithPosts[0]?.posts.length).eq(1); expect(usersWithPosts[0]).toEqual({ id: 1, verified: false, invitedBy: null, posts: [{ id: 1, ownerId: 1, createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], }); }); test('[Find Many] Get users with posts in transaction', async (t) => { const { pgDb: db } = t; let usersWithPosts: { id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[] = []; await db.transaction(async (tx) => { await tx.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await tx.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]); usersWithPosts = await tx.query.usersTable.findMany({ where: (({ id }, { eq }) => eq(id, 1)), with: { posts: { where: (({ id }, { eq }) => eq(id, 1)), }, }, }); }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[]>(); expect(usersWithPosts.length).eq(1); expect(usersWithPosts[0]?.posts.length).eq(1); expect(usersWithPosts[0]).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], }); }); test('[Find Many] Get users with posts in rollbacked transaction', async (t) => { const { pgDb: db } = t; let usersWithPosts: { id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[] = []; await expect(db.transaction(async (tx) => { await tx.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await tx.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]); tx.rollback(); usersWithPosts = await tx.query.usersTable.findMany({ where: (({ id }, { eq }) => eq(id, 1)), with: { posts: { where: (({ id }, { eq }) => eq(id, 1)), }, }, }); })).rejects.toThrowError(new TransactionRollbackError()); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[]>(); expect(usersWithPosts.length).eq(0); }); // select only custom test('[Find Many] Get only custom fields', async (t) => { const { pgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const usersWithPosts = await db.query.usersTable.findMany({ columns: {}, with: { posts: { columns: {}, extras: ({ content }) => ({ lowerName: sql`lower(${content})`.as('content_lower'), }), }, }, extras: ({ name }) => ({ lowerName: sql`lower(${name})`.as('name_lower'), }), }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ lowerName: string; posts: { lowerName: string; }[]; }[]>(); expect(usersWithPosts.length).toEqual(3); expect(usersWithPosts[0]?.posts.length).toEqual(3); expect(usersWithPosts[1]?.posts.length).toEqual(2); expect(usersWithPosts[2]?.posts.length).toEqual(2); expect(usersWithPosts).toContainEqual({ lowerName: 'dan', posts: [{ lowerName: 'post1' }, { lowerName: 'post1.2', }, { lowerName: 'post1.3' }], }); expect(usersWithPosts).toContainEqual({ lowerName: 'andrew', posts: [{ lowerName: 'post2' }, { lowerName: 'post2.1', }], }); expect(usersWithPosts).toContainEqual({ lowerName: 'alex', posts: [{ lowerName: 'post3' }, { lowerName: 'post3.1', }], }); }); test('[Find Many] Get only custom fields + where', async (t) => { const { pgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const usersWithPosts = await db.query.usersTable.findMany({ columns: {}, with: { posts: { columns: {}, where: gte(postsTable.id, 2), extras: ({ content }) => ({ lowerName: sql`lower(${content})`.as('content_lower'), }), }, }, where: eq(usersTable.id, 1), extras: ({ name }) => ({ lowerName: sql`lower(${name})`.as('name_lower'), }), }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ lowerName: string; posts: { lowerName: string; }[]; }[]>(); expect(usersWithPosts.length).toEqual(1); expect(usersWithPosts[0]?.posts.length).toEqual(2); expect(usersWithPosts).toContainEqual({ lowerName: 'dan', posts: [{ lowerName: 'post1.2' }, { lowerName: 'post1.3' }], }); }); test('[Find Many] Get only custom fields + where + limit', async (t) => { const { pgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const usersWithPosts = await db.query.usersTable.findMany({ columns: {}, with: { posts: { columns: {}, where: gte(postsTable.id, 2), limit: 1, extras: ({ content }) => ({ lowerName: sql`lower(${content})`.as('content_lower'), }), }, }, where: eq(usersTable.id, 1), extras: ({ name }) => ({ lowerName: sql`lower(${name})`.as('name_lower'), }), }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ lowerName: string; posts: { lowerName: string; }[]; }[]>(); expect(usersWithPosts.length).toEqual(1); expect(usersWithPosts[0]?.posts.length).toEqual(1); expect(usersWithPosts).toContainEqual({ lowerName: 'dan', posts: [{ lowerName: 'post1.2' }], }); }); test('[Find Many] Get only custom fields + where + orderBy', async (t) => { const { pgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const usersWithPosts = await db.query.usersTable.findMany({ columns: {}, with: { posts: { columns: {}, where: gte(postsTable.id, 2), orderBy: [desc(postsTable.id)], extras: ({ content }) => ({ lowerName: sql`lower(${content})`.as('content_lower'), }), }, }, where: eq(usersTable.id, 1), extras: ({ name }) => ({ lowerName: sql`lower(${name})`.as('name_lower'), }), }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ lowerName: string; posts: { lowerName: string; }[]; }[]>(); expect(usersWithPosts.length).toEqual(1); expect(usersWithPosts[0]?.posts.length).toEqual(2); expect(usersWithPosts).toContainEqual({ lowerName: 'dan', posts: [{ lowerName: 'post1.3' }, { lowerName: 'post1.2' }], }); }); // select only custom find one test('[Find One] Get only custom fields', async (t) => { const { pgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const usersWithPosts = await db.query.usersTable.findFirst({ columns: {}, with: { posts: { columns: {}, extras: ({ content }) => ({ lowerName: sql`lower(${content})`.as('content_lower'), }), }, }, extras: ({ name }) => ({ lowerName: sql`lower(${name})`.as('name_lower'), }), }); expectTypeOf(usersWithPosts).toEqualTypeOf< { lowerName: string; posts: { lowerName: string; }[]; } | undefined >(); expect(usersWithPosts?.posts.length).toEqual(3); expect(usersWithPosts).toEqual({ lowerName: 'dan', posts: [{ lowerName: 'post1' }, { lowerName: 'post1.2', }, { lowerName: 'post1.3' }], }); }); test('[Find One] Get only custom fields + where', async (t) => { const { pgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const usersWithPosts = await db.query.usersTable.findFirst({ columns: {}, with: { posts: { columns: {}, where: gte(postsTable.id, 2), extras: ({ content }) => ({ contentLower: sql`lower(${content})`.as('content_lower'), }), }, }, where: eq(usersTable.id, 1), extras: ({ name }) => ({ lowerName: sql`lower(${name})`.as('name_lower'), }), }); expectTypeOf(usersWithPosts).toEqualTypeOf< { lowerName: string; posts: { contentLower: string; }[]; } | undefined >(); expect(usersWithPosts?.posts.length).toEqual(2); expect(usersWithPosts).toEqual({ lowerName: 'dan', posts: [{ contentLower: 'post1.2' }, { contentLower: 'post1.3' }], }); }); test('[Find One] Get only custom fields + where + limit', async (t) => { const { pgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const usersWithPosts = await db.query.usersTable.findFirst({ columns: {}, with: { posts: { columns: {}, where: gte(postsTable.id, 2), limit: 1, extras: ({ content }) => ({ lowerName: sql`lower(${content})`.as('content_lower'), }), }, }, where: eq(usersTable.id, 1), extras: ({ name }) => ({ lowerName: sql`lower(${name})`.as('name_lower'), }), }); expectTypeOf(usersWithPosts).toEqualTypeOf< { lowerName: string; posts: { lowerName: string; }[]; } | undefined >(); expect(usersWithPosts?.posts.length).toEqual(1); expect(usersWithPosts).toEqual({ lowerName: 'dan', posts: [{ lowerName: 'post1.2' }], }); }); test('[Find One] Get only custom fields + where + orderBy', async (t) => { const { pgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const usersWithPosts = await db.query.usersTable.findFirst({ columns: {}, with: { posts: { columns: {}, where: gte(postsTable.id, 2), orderBy: [desc(postsTable.id)], extras: ({ content }) => ({ lowerName: sql`lower(${content})`.as('content_lower'), }), }, }, where: eq(usersTable.id, 1), extras: ({ name }) => ({ lowerName: sql`lower(${name})`.as('name_lower'), }), }); expectTypeOf(usersWithPosts).toEqualTypeOf< { lowerName: string; posts: { lowerName: string; }[]; } | undefined >(); expect(usersWithPosts?.posts.length).toEqual(2); expect(usersWithPosts).toEqual({ lowerName: 'dan', posts: [{ lowerName: 'post1.3' }, { lowerName: 'post1.2' }], }); }); // columns {} test('[Find Many] Get select {}', async (t) => { const { pgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await expect(async () => await db.query.usersTable.findMany({ columns: {}, }) ).rejects.toThrow(DrizzleError); }); // columns {} test('[Find One] Get select {}', async (t) => { const { pgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await expect(async () => await db.query.usersTable.findFirst({ columns: {}, }) ).rejects.toThrow(DrizzleError); }); // deep select {} test('[Find Many] Get deep select {}', async (t) => { const { pgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]); await expect(async () => await db.query.usersTable.findMany({ columns: {}, with: { posts: { columns: {}, }, }, }) ).rejects.toThrow(DrizzleError); }); // deep select {} test('[Find One] Get deep select {}', async (t) => { const { pgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]); await expect(async () => await db.query.usersTable.findFirst({ columns: {}, with: { posts: { columns: {}, }, }, }) ).rejects.toThrow(DrizzleError); }); /* Prepared statements for users+posts */ test('[Find Many] Get users with posts + prepared limit', async (t) => { const { pgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const prepared = db.query.usersTable.findMany({ with: { posts: { limit: placeholder('limit'), }, }, }).prepare('query1'); const usersWithPosts = await prepared.execute({ limit: 1 }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[]>(); expect(usersWithPosts.length).eq(3); expect(usersWithPosts[0]?.posts.length).eq(1); expect(usersWithPosts[1]?.posts.length).eq(1); expect(usersWithPosts[2]?.posts.length).eq(1); expect(usersWithPosts).toContainEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], }); expect(usersWithPosts).toContainEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: null, posts: [{ id: 4, ownerId: 2, content: 'Post2', createdAt: usersWithPosts[1]?.posts[0]?.createdAt }], }); expect(usersWithPosts).toContainEqual({ id: 3, name: 'Alex', verified: false, invitedBy: null, posts: [{ id: 6, ownerId: 3, content: 'Post3', createdAt: usersWithPosts[2]?.posts[0]?.createdAt }], }); }); test('[Find Many] Get users with posts + prepared limit + offset', async (t) => { const { pgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const prepared = db.query.usersTable.findMany({ limit: placeholder('uLimit'), offset: placeholder('uOffset'), with: { posts: { limit: placeholder('pLimit'), }, }, }).prepare('query2'); const usersWithPosts = await prepared.execute({ pLimit: 1, uLimit: 3, uOffset: 1 }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[]>(); expect(usersWithPosts.length).eq(2); expect(usersWithPosts[0]?.posts.length).eq(1); expect(usersWithPosts[1]?.posts.length).eq(1); expect(usersWithPosts).toContainEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: null, posts: [{ id: 4, ownerId: 2, content: 'Post2', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], }); expect(usersWithPosts).toContainEqual({ id: 3, name: 'Alex', verified: false, invitedBy: null, posts: [{ id: 6, ownerId: 3, content: 'Post3', createdAt: usersWithPosts[1]?.posts[0]?.createdAt }], }); }); test('[Find Many] Get users with posts + prepared where', async (t) => { const { pgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]); const prepared = db.query.usersTable.findMany({ where: (({ id }, { eq }) => eq(id, placeholder('id'))), with: { posts: { where: (({ id }, { eq }) => eq(id, 1)), }, }, }).prepare('query3'); const usersWithPosts = await prepared.execute({ id: 1 }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[]>(); expect(usersWithPosts.length).eq(1); expect(usersWithPosts[0]?.posts.length).eq(1); expect(usersWithPosts[0]).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], }); }); test('[Find Many] Get users with posts + prepared + limit + offset + where', async (t) => { const { pgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const prepared = db.query.usersTable.findMany({ limit: placeholder('uLimit'), offset: placeholder('uOffset'), where: (({ id }, { eq, or }) => or(eq(id, placeholder('id')), eq(id, 3))), with: { posts: { where: (({ id }, { eq }) => eq(id, placeholder('pid'))), limit: placeholder('pLimit'), }, }, }).prepare('query4'); const usersWithPosts = await prepared.execute({ pLimit: 1, uLimit: 3, uOffset: 1, id: 2, pid: 6 }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[]>(); expect(usersWithPosts.length).eq(1); expect(usersWithPosts[0]?.posts.length).eq(1); expect(usersWithPosts).toContainEqual({ id: 3, name: 'Alex', verified: false, invitedBy: null, posts: [{ id: 6, ownerId: 3, content: 'Post3', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], }); }); /* [Find One] One relation users+posts */ test('[Find One] Get users with posts', async (t) => { const { pgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]); const usersWithPosts = await db.query.usersTable.findFirst({ with: { posts: true, }, }); expectTypeOf(usersWithPosts).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; } | undefined >(); expect(usersWithPosts!.posts.length).eq(1); expect(usersWithPosts).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts?.posts[0]?.createdAt }], }); }); test('[Find One] Get users with posts + limit posts', async (t) => { const { pgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const usersWithPosts = await db.query.usersTable.findFirst({ with: { posts: { limit: 1, }, }, }); expectTypeOf(usersWithPosts).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; } | undefined >(); expect(usersWithPosts!.posts.length).eq(1); expect(usersWithPosts).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts?.posts[0]?.createdAt }], }); }); test('[Find One] Get users with posts no results found', async (t) => { const { pgDb: db } = t; const usersWithPosts = await db.query.usersTable.findFirst({ with: { posts: { limit: 1, }, }, }); expectTypeOf(usersWithPosts).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; } | undefined >(); expect(usersWithPosts).toBeUndefined(); }); test('[Find One] Get users with posts + limit posts and users', async (t) => { const { pgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const usersWithPosts = await db.query.usersTable.findFirst({ with: { posts: { limit: 1, }, }, }); expectTypeOf(usersWithPosts).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; } | undefined >(); expect(usersWithPosts!.posts.length).eq(1); expect(usersWithPosts).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts?.posts[0]?.createdAt }], }); }); test('[Find One] Get users with posts + custom fields', async (t) => { const { pgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const usersWithPosts = await db.query.usersTable.findFirst({ with: { posts: true, }, extras: ({ name }) => ({ lowerName: sql`lower(${name})`.as('name_lower'), }), }); expectTypeOf(usersWithPosts).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; lowerName: string; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; } | undefined >(); expect(usersWithPosts!.posts.length).toEqual(3); expect(usersWithPosts).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, lowerName: 'dan', posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts?.posts[0]?.createdAt }, { id: 2, ownerId: 1, content: 'Post1.2', createdAt: usersWithPosts?.posts[1]?.createdAt, }, { id: 3, ownerId: 1, content: 'Post1.3', createdAt: usersWithPosts?.posts[2]?.createdAt }], }); }); test('[Find One] Get users with posts + custom fields + limits', async (t) => { const { pgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const usersWithPosts = await db.query.usersTable.findFirst({ with: { posts: { limit: 1, }, }, extras: (usersTable, { sql }) => ({ lowerName: sql`lower(${usersTable.name})`.as('name_lower'), }), }); expectTypeOf(usersWithPosts).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; lowerName: string; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; } | undefined >(); expect(usersWithPosts!.posts.length).toEqual(1); expect(usersWithPosts).toEqual({ id: 1, name: 'Dan', lowerName: 'dan', verified: false, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts?.posts[0]?.createdAt }], }); }); test('[Find One] Get users with posts + orderBy', async (t) => { const { pgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: '1' }, { ownerId: 1, content: '2' }, { ownerId: 1, content: '3' }, { ownerId: 2, content: '4' }, { ownerId: 2, content: '5' }, { ownerId: 3, content: '6' }, { ownerId: 3, content: '7' }, ]); const usersWithPosts = await db.query.usersTable.findFirst({ with: { posts: { orderBy: (postsTable, { desc }) => [desc(postsTable.content)], }, }, orderBy: (usersTable, { desc }) => [desc(usersTable.id)], }); expectTypeOf(usersWithPosts).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; } | undefined >(); expect(usersWithPosts!.posts.length).eq(2); expect(usersWithPosts).toEqual({ id: 3, name: 'Alex', verified: false, invitedBy: null, posts: [{ id: 7, ownerId: 3, content: '7', createdAt: usersWithPosts?.posts[1]?.createdAt, }, { id: 6, ownerId: 3, content: '6', createdAt: usersWithPosts?.posts[0]?.createdAt }], }); }); test('[Find One] Get users with posts + where', async (t) => { const { pgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]); const usersWithPosts = await db.query.usersTable.findFirst({ where: (({ id }, { eq }) => eq(id, 1)), with: { posts: { where: (({ id }, { eq }) => eq(id, 1)), }, }, }); expectTypeOf(usersWithPosts).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; } | undefined >(); expect(usersWithPosts!.posts.length).eq(1); expect(usersWithPosts).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts?.posts[0]?.createdAt }], }); }); test('[Find One] Get users with posts + where + partial', async (t) => { const { pgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]); const usersWithPosts = await db.query.usersTable.findFirst({ columns: { id: true, name: true, }, with: { posts: { columns: { id: true, content: true, }, where: (({ id }, { eq }) => eq(id, 1)), }, }, where: (({ id }, { eq }) => eq(id, 1)), }); expectTypeOf(usersWithPosts).toEqualTypeOf< { id: number; name: string; posts: { id: number; content: string; }[]; } | undefined >(); expect(usersWithPosts!.posts.length).eq(1); expect(usersWithPosts).toEqual({ id: 1, name: 'Dan', posts: [{ id: 1, content: 'Post1' }], }); }); test('[Find One] Get users with posts + where + partial. Did not select posts id, but used it in where', async (t) => { const { pgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]); const usersWithPosts = await db.query.usersTable.findFirst({ columns: { id: true, name: true, }, with: { posts: { columns: { id: true, content: true, }, where: (({ id }, { eq }) => eq(id, 1)), }, }, where: (({ id }, { eq }) => eq(id, 1)), }); expectTypeOf(usersWithPosts).toEqualTypeOf< { id: number; name: string; posts: { id: number; content: string; }[]; } | undefined >(); expect(usersWithPosts!.posts.length).eq(1); expect(usersWithPosts).toEqual({ id: 1, name: 'Dan', posts: [{ id: 1, content: 'Post1' }], }); }); test('[Find One] Get users with posts + where + partial(true + false)', async (t) => { const { pgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]); const usersWithPosts = await db.query.usersTable.findFirst({ columns: { id: true, name: false, }, with: { posts: { columns: { id: true, content: false, }, where: (({ id }, { eq }) => eq(id, 1)), }, }, where: (({ id }, { eq }) => eq(id, 1)), }); expectTypeOf(usersWithPosts).toEqualTypeOf< { id: number; posts: { id: number; }[]; } | undefined >(); expect(usersWithPosts!.posts.length).eq(1); expect(usersWithPosts).toEqual({ id: 1, posts: [{ id: 1 }], }); }); test('[Find One] Get users with posts + where + partial(false)', async (t) => { const { pgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]); const usersWithPosts = await db.query.usersTable.findFirst({ columns: { name: false, }, with: { posts: { columns: { content: false, }, where: (({ id }, { eq }) => eq(id, 1)), }, }, where: (({ id }, { eq }) => eq(id, 1)), }); expectTypeOf(usersWithPosts).toEqualTypeOf< { id: number; verified: boolean; invitedBy: number | null; posts: { id: number; ownerId: number | null; createdAt: Date; }[]; } | undefined >(); expect(usersWithPosts!.posts.length).eq(1); expect(usersWithPosts).toEqual({ id: 1, verified: false, invitedBy: null, posts: [{ id: 1, ownerId: 1, createdAt: usersWithPosts?.posts[0]?.createdAt }], }); }); /* One relation users+users. Self referencing */ test('Get user with invitee', async (t) => { const { pgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]); const usersWithInvitee = await db.query.usersTable.findMany({ with: { invitee: true, }, }); expectTypeOf(usersWithInvitee).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; invitee: { id: number; name: string; verified: boolean; invitedBy: number | null; } | null; }[] >(); usersWithInvitee.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(usersWithInvitee.length).eq(4); expect(usersWithInvitee[0]?.invitee).toBeNull(); expect(usersWithInvitee[1]?.invitee).toBeNull(); expect(usersWithInvitee[2]?.invitee).not.toBeNull(); expect(usersWithInvitee[3]?.invitee).not.toBeNull(); expect(usersWithInvitee[0]).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, invitee: null, }); expect(usersWithInvitee[1]).toEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: null, invitee: null, }); expect(usersWithInvitee[2]).toEqual({ id: 3, name: 'Alex', verified: false, invitedBy: 1, invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, }); expect(usersWithInvitee[3]).toEqual({ id: 4, name: 'John', verified: false, invitedBy: 2, invitee: { id: 2, name: 'Andrew', verified: false, invitedBy: null }, }); }); test('Get user + limit with invitee', async (t) => { const { pgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew', invitedBy: 1 }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]); const usersWithInvitee = await db.query.usersTable.findMany({ with: { invitee: true, }, limit: 2, }); expectTypeOf(usersWithInvitee).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; invitee: { id: number; name: string; verified: boolean; invitedBy: number | null; } | null; }[] >(); usersWithInvitee.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(usersWithInvitee.length).eq(2); expect(usersWithInvitee[0]?.invitee).toBeNull(); expect(usersWithInvitee[1]?.invitee).not.toBeNull(); expect(usersWithInvitee[0]).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, invitee: null, }); expect(usersWithInvitee[1]).toEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: 1, invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, }); }); test('Get user with invitee and custom fields', async (t) => { const { pgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]); const usersWithInvitee = await db.query.usersTable.findMany({ extras: (users, { sql }) => ({ lower: sql`lower(${users.name})`.as('lower_name') }), with: { invitee: { extras: (invitee, { sql }) => ({ lower: sql`lower(${invitee.name})`.as('lower_name') }), }, }, }); expectTypeOf(usersWithInvitee).toEqualTypeOf< { id: number; name: string; verified: boolean; lower: string; invitedBy: number | null; invitee: { id: number; name: string; verified: boolean; lower: string; invitedBy: number | null; } | null; }[] >(); usersWithInvitee.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(usersWithInvitee.length).eq(4); expect(usersWithInvitee[0]?.invitee).toBeNull(); expect(usersWithInvitee[1]?.invitee).toBeNull(); expect(usersWithInvitee[2]?.invitee).not.toBeNull(); expect(usersWithInvitee[3]?.invitee).not.toBeNull(); expect(usersWithInvitee[0]).toEqual({ id: 1, name: 'Dan', lower: 'dan', verified: false, invitedBy: null, invitee: null, }); expect(usersWithInvitee[1]).toEqual({ id: 2, name: 'Andrew', lower: 'andrew', verified: false, invitedBy: null, invitee: null, }); expect(usersWithInvitee[2]).toEqual({ id: 3, name: 'Alex', lower: 'alex', verified: false, invitedBy: 1, invitee: { id: 1, name: 'Dan', lower: 'dan', verified: false, invitedBy: null }, }); expect(usersWithInvitee[3]).toEqual({ id: 4, name: 'John', lower: 'john', verified: false, invitedBy: 2, invitee: { id: 2, name: 'Andrew', lower: 'andrew', verified: false, invitedBy: null }, }); }); test('Get user with invitee and custom fields + limits', async (t) => { const { pgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]); const usersWithInvitee = await db.query.usersTable.findMany({ extras: (users, { sql }) => ({ lower: sql`lower(${users.name})`.as('lower_name') }), limit: 3, with: { invitee: { extras: (invitee, { sql }) => ({ lower: sql`lower(${invitee.name})`.as('lower_name') }), }, }, }); expectTypeOf(usersWithInvitee).toEqualTypeOf< { id: number; name: string; verified: boolean; lower: string; invitedBy: number | null; invitee: { id: number; name: string; verified: boolean; lower: string; invitedBy: number | null; } | null; }[] >(); usersWithInvitee.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(usersWithInvitee.length).eq(3); expect(usersWithInvitee[0]?.invitee).toBeNull(); expect(usersWithInvitee[1]?.invitee).toBeNull(); expect(usersWithInvitee[2]?.invitee).not.toBeNull(); expect(usersWithInvitee[0]).toEqual({ id: 1, name: 'Dan', lower: 'dan', verified: false, invitedBy: null, invitee: null, }); expect(usersWithInvitee[1]).toEqual({ id: 2, name: 'Andrew', lower: 'andrew', verified: false, invitedBy: null, invitee: null, }); expect(usersWithInvitee[2]).toEqual({ id: 3, name: 'Alex', lower: 'alex', verified: false, invitedBy: 1, invitee: { id: 1, name: 'Dan', lower: 'dan', verified: false, invitedBy: null }, }); }); test('Get user with invitee + order by', async (t) => { const { pgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]); const usersWithInvitee = await db.query.usersTable.findMany({ orderBy: (users, { desc }) => [desc(users.id)], with: { invitee: true, }, }); expectTypeOf(usersWithInvitee).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; invitee: { id: number; name: string; verified: boolean; invitedBy: number | null; } | null; }[] >(); expect(usersWithInvitee.length).eq(4); expect(usersWithInvitee[3]?.invitee).toBeNull(); expect(usersWithInvitee[2]?.invitee).toBeNull(); expect(usersWithInvitee[1]?.invitee).not.toBeNull(); expect(usersWithInvitee[0]?.invitee).not.toBeNull(); expect(usersWithInvitee[3]).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, invitee: null, }); expect(usersWithInvitee[2]).toEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: null, invitee: null, }); expect(usersWithInvitee[1]).toEqual({ id: 3, name: 'Alex', verified: false, invitedBy: 1, invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, }); expect(usersWithInvitee[0]).toEqual({ id: 4, name: 'John', verified: false, invitedBy: 2, invitee: { id: 2, name: 'Andrew', verified: false, invitedBy: null }, }); }); test('Get user with invitee + where', async (t) => { const { pgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]); const usersWithInvitee = await db.query.usersTable.findMany({ where: (users, { eq, or }) => (or(eq(users.id, 3), eq(users.id, 4))), with: { invitee: true, }, }); expectTypeOf(usersWithInvitee).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; invitee: { id: number; name: string; verified: boolean; invitedBy: number | null; } | null; }[] >(); expect(usersWithInvitee.length).eq(2); expect(usersWithInvitee[0]?.invitee).not.toBeNull(); expect(usersWithInvitee[1]?.invitee).not.toBeNull(); expect(usersWithInvitee).toContainEqual({ id: 3, name: 'Alex', verified: false, invitedBy: 1, invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, }); expect(usersWithInvitee).toContainEqual({ id: 4, name: 'John', verified: false, invitedBy: 2, invitee: { id: 2, name: 'Andrew', verified: false, invitedBy: null }, }); }); test('Get user with invitee + where + partial', async (t) => { const { pgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]); const usersWithInvitee = await db.query.usersTable.findMany({ where: (users, { eq, or }) => (or(eq(users.id, 3), eq(users.id, 4))), columns: { id: true, name: true, }, with: { invitee: { columns: { id: true, name: true, }, }, }, }); expectTypeOf(usersWithInvitee).toEqualTypeOf< { id: number; name: string; invitee: { id: number; name: string; } | null; }[] >(); expect(usersWithInvitee.length).eq(2); expect(usersWithInvitee[0]?.invitee).not.toBeNull(); expect(usersWithInvitee[1]?.invitee).not.toBeNull(); expect(usersWithInvitee).toContainEqual({ id: 3, name: 'Alex', invitee: { id: 1, name: 'Dan' }, }); expect(usersWithInvitee).toContainEqual({ id: 4, name: 'John', invitee: { id: 2, name: 'Andrew' }, }); }); test('Get user with invitee + where + partial. Did not select users id, but used it in where', async (t) => { const { pgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]); const usersWithInvitee = await db.query.usersTable.findMany({ where: (users, { eq, or }) => (or(eq(users.id, 3), eq(users.id, 4))), columns: { name: true, }, with: { invitee: { columns: { id: true, name: true, }, }, }, }); expectTypeOf(usersWithInvitee).toEqualTypeOf< { name: string; invitee: { id: number; name: string; } | null; }[] >(); expect(usersWithInvitee.length).eq(2); expect(usersWithInvitee[0]?.invitee).not.toBeNull(); expect(usersWithInvitee[1]?.invitee).not.toBeNull(); expect(usersWithInvitee).toContainEqual({ name: 'Alex', invitee: { id: 1, name: 'Dan' }, }); expect(usersWithInvitee).toContainEqual({ name: 'John', invitee: { id: 2, name: 'Andrew' }, }); }); test('Get user with invitee + where + partial(true+false)', async (t) => { const { pgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]); const usersWithInvitee = await db.query.usersTable.findMany({ where: (users, { eq, or }) => (or(eq(users.id, 3), eq(users.id, 4))), columns: { id: true, name: true, verified: false, }, with: { invitee: { columns: { id: true, name: true, verified: false, }, }, }, }); expectTypeOf(usersWithInvitee).toEqualTypeOf< { id: number; name: string; invitee: { id: number; name: string; } | null; }[] >(); expect(usersWithInvitee.length).eq(2); expect(usersWithInvitee[0]?.invitee).not.toBeNull(); expect(usersWithInvitee[1]?.invitee).not.toBeNull(); expect(usersWithInvitee).toContainEqual({ id: 3, name: 'Alex', invitee: { id: 1, name: 'Dan' }, }); expect(usersWithInvitee).toContainEqual({ id: 4, name: 'John', invitee: { id: 2, name: 'Andrew' }, }); }); test('Get user with invitee + where + partial(false)', async (t) => { const { pgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]); const usersWithInvitee = await db.query.usersTable.findMany({ where: (users, { eq, or }) => (or(eq(users.id, 3), eq(users.id, 4))), columns: { verified: false, }, with: { invitee: { columns: { name: false, }, }, }, }); expectTypeOf(usersWithInvitee).toEqualTypeOf< { id: number; name: string; invitedBy: number | null; invitee: { id: number; verified: boolean; invitedBy: number | null; } | null; }[] >(); expect(usersWithInvitee.length).eq(2); expect(usersWithInvitee[0]?.invitee).not.toBeNull(); expect(usersWithInvitee[1]?.invitee).not.toBeNull(); expect(usersWithInvitee).toContainEqual({ id: 3, name: 'Alex', invitedBy: 1, invitee: { id: 1, verified: false, invitedBy: null }, }); expect(usersWithInvitee).toContainEqual({ id: 4, name: 'John', invitedBy: 2, invitee: { id: 2, verified: false, invitedBy: null }, }); }); /* Two first-level relations users+users and users+posts */ test('Get user with invitee and posts', async (t) => { const { pgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]); const response = await db.query.usersTable.findMany({ with: { invitee: true, posts: true, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; ownerId: number | null; content: string; createdAt: Date }[]; invitee: { id: number; name: string; verified: boolean; invitedBy: number | null; } | null; }[] >(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).eq(4); expect(response[0]?.invitee).toBeNull(); expect(response[1]?.invitee).toBeNull(); expect(response[2]?.invitee).not.toBeNull(); expect(response[3]?.invitee).not.toBeNull(); expect(response[0]?.posts.length).eq(1); expect(response[1]?.posts.length).eq(1); expect(response[2]?.posts.length).eq(1); expect(response).toContainEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, invitee: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: response[0]?.posts[0]?.createdAt }], }); expect(response).toContainEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: null, invitee: null, posts: [{ id: 2, ownerId: 2, content: 'Post2', createdAt: response[1]?.posts[0]?.createdAt }], }); expect(response).toContainEqual({ id: 3, name: 'Alex', verified: false, invitedBy: 1, invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, posts: [{ id: 3, ownerId: 3, content: 'Post3', createdAt: response[2]?.posts[0]?.createdAt }], }); expect(response).toContainEqual({ id: 4, name: 'John', verified: false, invitedBy: 2, invitee: { id: 2, name: 'Andrew', verified: false, invitedBy: null }, posts: [], }); }); test('Get user with invitee and posts + limit posts and users', async (t) => { const { pgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const response = await db.query.usersTable.findMany({ limit: 3, with: { invitee: true, posts: { limit: 1, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; ownerId: number | null; content: string; createdAt: Date }[]; invitee: { id: number; name: string; verified: boolean; invitedBy: number | null; } | null; }[] >(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).eq(3); expect(response[0]?.invitee).toBeNull(); expect(response[1]?.invitee).toBeNull(); expect(response[2]?.invitee).not.toBeNull(); expect(response[0]?.posts.length).eq(1); expect(response[1]?.posts.length).eq(1); expect(response[2]?.posts.length).eq(1); expect(response).toContainEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, invitee: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: response[0]?.posts[0]?.createdAt }], }); expect(response).toContainEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: null, invitee: null, posts: [{ id: 3, ownerId: 2, content: 'Post2', createdAt: response[1]?.posts[0]?.createdAt }], }); expect(response).toContainEqual({ id: 3, name: 'Alex', verified: false, invitedBy: 1, invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, posts: [{ id: 5, ownerId: 3, content: 'Post3', createdAt: response[2]?.posts[0]?.createdAt }], }); }); test('Get user with invitee and posts + limits + custom fields in each', async (t) => { const { pgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const response = await db.query.usersTable.findMany({ limit: 3, extras: (users, { sql }) => ({ lower: sql`lower(${users.name})`.as('lower_name') }), with: { invitee: { extras: (users, { sql }) => ({ lower: sql`lower(${users.name})`.as('lower_invitee_name') }), }, posts: { limit: 1, extras: (posts, { sql }) => ({ lower: sql`lower(${posts.content})`.as('lower_content') }), }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: boolean; lower: string; invitedBy: number | null; posts: { id: number; lower: string; ownerId: number | null; content: string; createdAt: Date }[]; invitee: { id: number; name: string; lower: string; verified: boolean; invitedBy: number | null; } | null; }[] >(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).eq(3); expect(response[0]?.invitee).toBeNull(); expect(response[1]?.invitee).toBeNull(); expect(response[2]?.invitee).not.toBeNull(); expect(response[0]?.posts.length).eq(1); expect(response[1]?.posts.length).eq(1); expect(response[2]?.posts.length).eq(1); expect(response).toContainEqual({ id: 1, name: 'Dan', lower: 'dan', verified: false, invitedBy: null, invitee: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', lower: 'post1', createdAt: response[0]?.posts[0]?.createdAt }], }); expect(response).toContainEqual({ id: 2, name: 'Andrew', lower: 'andrew', verified: false, invitedBy: null, invitee: null, posts: [{ id: 3, ownerId: 2, content: 'Post2', lower: 'post2', createdAt: response[1]?.posts[0]?.createdAt }], }); expect(response).toContainEqual({ id: 3, name: 'Alex', lower: 'alex', verified: false, invitedBy: 1, invitee: { id: 1, name: 'Dan', lower: 'dan', verified: false, invitedBy: null }, posts: [{ id: 5, ownerId: 3, content: 'Post3', lower: 'post3', createdAt: response[2]?.posts[0]?.createdAt }], }); }); test('Get user with invitee and posts + custom fields in each', async (t) => { const { pgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const response = await db.query.usersTable.findMany({ extras: (users, { sql }) => ({ lower: sql`lower(${users.name})`.as('lower_name') }), with: { invitee: { extras: (users, { sql }) => ({ lower: sql`lower(${users.name})`.as('lower_name') }), }, posts: { extras: (posts, { sql }) => ({ lower: sql`lower(${posts.content})`.as('lower_name') }), }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: boolean; lower: string; invitedBy: number | null; posts: { id: number; lower: string; ownerId: number | null; content: string; createdAt: Date }[]; invitee: { id: number; name: string; lower: string; verified: boolean; invitedBy: number | null; } | null; }[] >(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).eq(4); expect(response[0]?.invitee).toBeNull(); expect(response[1]?.invitee).toBeNull(); expect(response[2]?.invitee).not.toBeNull(); expect(response[3]?.invitee).not.toBeNull(); expect(response[0]?.posts.length).eq(2); expect(response[1]?.posts.length).eq(2); expect(response[2]?.posts.length).eq(2); expect(response[3]?.posts.length).eq(0); expect(response).toContainEqual({ id: 1, name: 'Dan', lower: 'dan', verified: false, invitedBy: null, invitee: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', lower: 'post1', createdAt: response[0]?.posts[0]?.createdAt }, { id: 2, ownerId: 1, content: 'Post1.1', lower: 'post1.1', createdAt: response[0]?.posts[1]?.createdAt, }], }); expect(response).toContainEqual({ id: 2, name: 'Andrew', lower: 'andrew', verified: false, invitedBy: null, invitee: null, posts: [{ id: 3, ownerId: 2, content: 'Post2', lower: 'post2', createdAt: response[1]?.posts[0]?.createdAt }, { id: 4, ownerId: 2, content: 'Post2.1', lower: 'post2.1', createdAt: response[1]?.posts[1]?.createdAt, }], }); expect(response).toContainEqual({ id: 3, name: 'Alex', lower: 'alex', verified: false, invitedBy: 1, invitee: { id: 1, name: 'Dan', lower: 'dan', verified: false, invitedBy: null }, posts: [{ id: 5, ownerId: 3, content: 'Post3', lower: 'post3', createdAt: response[2]?.posts[0]?.createdAt }, { id: 6, ownerId: 3, content: 'Post3.1', lower: 'post3.1', createdAt: response[2]?.posts[1]?.createdAt, }], }); expect(response).toContainEqual({ id: 4, name: 'John', lower: 'john', verified: false, invitedBy: 2, invitee: { id: 2, name: 'Andrew', lower: 'andrew', verified: false, invitedBy: null }, posts: [], }); }); test('Get user with invitee and posts + orderBy', async (t) => { const { pgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, ]); const response = await db.query.usersTable.findMany({ orderBy: (users, { desc }) => [desc(users.id)], with: { invitee: true, posts: { orderBy: (posts, { desc }) => [desc(posts.id)], }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; ownerId: number | null; content: string; createdAt: Date }[]; invitee: { id: number; name: string; verified: boolean; invitedBy: number | null; } | null; }[] >(); expect(response.length).eq(4); expect(response[3]?.invitee).toBeNull(); expect(response[2]?.invitee).toBeNull(); expect(response[1]?.invitee).not.toBeNull(); expect(response[0]?.invitee).not.toBeNull(); expect(response[0]?.posts.length).eq(0); expect(response[1]?.posts.length).eq(1); expect(response[2]?.posts.length).eq(2); expect(response[3]?.posts.length).eq(2); expect(response[3]).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, invitee: null, posts: [{ id: 2, ownerId: 1, content: 'Post1.1', createdAt: response[3]?.posts[0]?.createdAt }, { id: 1, ownerId: 1, content: 'Post1', createdAt: response[3]?.posts[1]?.createdAt, }], }); expect(response[2]).toEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: null, invitee: null, posts: [{ id: 4, ownerId: 2, content: 'Post2.1', createdAt: response[2]?.posts[0]?.createdAt }, { id: 3, ownerId: 2, content: 'Post2', createdAt: response[2]?.posts[1]?.createdAt, }], }); expect(response[1]).toEqual({ id: 3, name: 'Alex', verified: false, invitedBy: 1, invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, posts: [{ id: 5, ownerId: 3, content: 'Post3', createdAt: response[3]?.posts[1]?.createdAt, }], }); expect(response[0]).toEqual({ id: 4, name: 'John', verified: false, invitedBy: 2, invitee: { id: 2, name: 'Andrew', verified: false, invitedBy: null }, posts: [], }); }); test('Get user with invitee and posts + where', async (t) => { const { pgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]); const response = await db.query.usersTable.findMany({ where: (users, { eq, or }) => (or(eq(users.id, 2), eq(users.id, 3))), with: { invitee: true, posts: { where: (posts, { eq }) => (eq(posts.ownerId, 2)), }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; ownerId: number | null; content: string; createdAt: Date }[]; invitee: { id: number; name: string; verified: boolean; invitedBy: number | null; } | null; }[] >(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).eq(2); expect(response[0]?.invitee).toBeNull(); expect(response[1]?.invitee).not.toBeNull(); expect(response[0]?.posts.length).eq(1); expect(response[1]?.posts.length).eq(0); expect(response).toContainEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: null, invitee: null, posts: [{ id: 2, ownerId: 2, content: 'Post2', createdAt: response[0]?.posts[0]?.createdAt }], }); expect(response).toContainEqual({ id: 3, name: 'Alex', verified: false, invitedBy: 1, invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, posts: [], }); }); test('Get user with invitee and posts + limit posts and users + where', async (t) => { const { pgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const response = await db.query.usersTable.findMany({ where: (users, { eq, or }) => (or(eq(users.id, 3), eq(users.id, 4))), limit: 1, with: { invitee: true, posts: { where: (posts, { eq }) => (eq(posts.ownerId, 3)), limit: 1, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; ownerId: number | null; content: string; createdAt: Date }[]; invitee: { id: number; name: string; verified: boolean; invitedBy: number | null; } | null; }[] >(); expect(response.length).eq(1); expect(response[0]?.invitee).not.toBeNull(); expect(response[0]?.posts.length).eq(1); expect(response).toContainEqual({ id: 3, name: 'Alex', verified: false, invitedBy: 1, invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, posts: [{ id: 5, ownerId: 3, content: 'Post3', createdAt: response[0]?.posts[0]?.createdAt }], }); }); test('Get user with invitee and posts + orderBy + where + custom', async (t) => { const { pgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, ]); const response = await db.query.usersTable.findMany({ orderBy: [desc(usersTable.id)], where: or(eq(usersTable.id, 3), eq(usersTable.id, 4)), extras: { lower: sql`lower(${usersTable.name})`.as('lower_name'), }, with: { invitee: true, posts: { where: eq(postsTable.ownerId, 3), orderBy: [desc(postsTable.id)], extras: { lower: sql`lower(${postsTable.content})`.as('lower_name'), }, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; lower: string; posts: { id: number; lower: string; ownerId: number | null; content: string; createdAt: Date }[]; invitee: { id: number; name: string; verified: boolean; invitedBy: number | null; } | null; }[] >(); expect(response.length).eq(2); expect(response[1]?.invitee).not.toBeNull(); expect(response[0]?.invitee).not.toBeNull(); expect(response[0]?.posts.length).eq(0); expect(response[1]?.posts.length).eq(1); expect(response[1]).toEqual({ id: 3, name: 'Alex', lower: 'alex', verified: false, invitedBy: 1, invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, posts: [{ id: 5, ownerId: 3, content: 'Post3', lower: 'post3', createdAt: response[1]?.posts[0]?.createdAt, }], }); expect(response[0]).toEqual({ id: 4, name: 'John', lower: 'john', verified: false, invitedBy: 2, invitee: { id: 2, name: 'Andrew', verified: false, invitedBy: null }, posts: [], }); }); test('Get user with invitee and posts + orderBy + where + partial + custom', async (t) => { const { pgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, ]); const response = await db.query.usersTable.findMany({ orderBy: [desc(usersTable.id)], where: or(eq(usersTable.id, 3), eq(usersTable.id, 4)), extras: { lower: sql`lower(${usersTable.name})`.as('lower_name'), }, columns: { id: true, name: true, }, with: { invitee: { columns: { id: true, name: true, }, extras: { lower: sql`lower(${usersTable.name})`.as('lower_name'), }, }, posts: { columns: { id: true, content: true, }, where: eq(postsTable.ownerId, 3), orderBy: [desc(postsTable.id)], extras: { lower: sql`lower(${postsTable.content})`.as('lower_name'), }, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; lower: string; posts: { id: number; lower: string; content: string }[]; invitee: { id: number; name: string; lower: string; } | null; }[] >(); expect(response.length).eq(2); expect(response[1]?.invitee).not.toBeNull(); expect(response[0]?.invitee).not.toBeNull(); expect(response[0]?.posts.length).eq(0); expect(response[1]?.posts.length).eq(1); expect(response[1]).toEqual({ id: 3, name: 'Alex', lower: 'alex', invitee: { id: 1, name: 'Dan', lower: 'dan' }, posts: [{ id: 5, content: 'Post3', lower: 'post3', }], }); expect(response[0]).toEqual({ id: 4, name: 'John', lower: 'john', invitee: { id: 2, name: 'Andrew', lower: 'andrew' }, posts: [], }); }); /* One two-level relation users+posts+comments */ test('Get user with posts and posts with comments', async (t) => { const { pgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { id: 1, ownerId: 1, content: 'Post1' }, { id: 2, ownerId: 2, content: 'Post2' }, { id: 3, ownerId: 3, content: 'Post3' }, ]); await db.insert(commentsTable).values([ { postId: 1, content: 'Comment1', creator: 2 }, { postId: 2, content: 'Comment2', creator: 2 }, { postId: 3, content: 'Comment3', creator: 3 }, ]); const response = await db.query.usersTable.findMany({ with: { posts: { with: { comments: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; comments: { id: number; content: string; createdAt: Date; creator: number | null; postId: number | null; }[]; }[]; }[] >(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).eq(3); expect(response[0]?.posts.length).eq(1); expect(response[1]?.posts.length).eq(1); expect(response[2]?.posts.length).eq(1); expect(response[0]?.posts[0]?.comments.length).eq(1); expect(response[1]?.posts[0]?.comments.length).eq(1); expect(response[2]?.posts[0]?.comments.length).eq(1); expect(response[0]).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: response[0]?.posts[0]?.createdAt, comments: [ { id: 1, content: 'Comment1', creator: 2, postId: 1, createdAt: response[0]?.posts[0]?.comments[0]?.createdAt, }, ], }], }); expect(response[1]).toEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: null, posts: [{ id: 2, ownerId: 2, content: 'Post2', createdAt: response[1]?.posts[0]?.createdAt, comments: [ { id: 2, content: 'Comment2', creator: 2, postId: 2, createdAt: response[1]?.posts[0]?.comments[0]?.createdAt, }, ], }], }); // expect(response[2]).toEqual({ // id: 3, // name: 'Alex', // verified: false, // invitedBy: null, // posts: [{ // id: 3, // ownerId: 3, // content: 'Post3', // createdAt: response[2]?.posts[0]?.createdAt, // comments: [ // { // id: , // content: 'Comment3', // creator: 3, // postId: 3, // createdAt: response[2]?.posts[0]?.comments[0]?.createdAt, // }, // ], // }], // }); }); // Get user with limit posts and limit comments // Get user with custom field + post + comment with custom field // Get user with limit + posts orderBy + comment orderBy // Get user with where + posts where + comment where // Get user with where + posts partial where + comment where // Get user with where + posts partial where + comment partial(false) where // Get user with where partial(false) + posts partial where partial(false) + comment partial(false+true) where // Get user with where + posts partial where + comment where. Didn't select field from where in posts // Get user with where + posts partial where + comment where. Didn't select field from where for all // Get with limit+offset in each /* One two-level + One first-level relation users+posts+comments and users+users */ /* One three-level relation users+posts+comments+comment_owner */ test('Get user with posts and posts with comments and comments with owner', async (t) => { const { pgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { id: 1, ownerId: 1, content: 'Post1' }, { id: 2, ownerId: 2, content: 'Post2' }, { id: 3, ownerId: 3, content: 'Post3' }, ]); await db.insert(commentsTable).values([ { postId: 1, content: 'Comment1', creator: 2 }, { postId: 2, content: 'Comment2', creator: 2 }, { postId: 3, content: 'Comment3', creator: 3 }, ]); const response = await db.query.usersTable.findMany({ with: { posts: { with: { comments: { with: { author: true, }, }, }, }, }, }); expectTypeOf(response).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; comments: { id: number; content: string; createdAt: Date; creator: number | null; postId: number | null; author: { id: number; name: string; verified: boolean; invitedBy: number | null; } | null; }[]; }[]; }[]>(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).eq(3); expect(response[0]?.posts.length).eq(1); expect(response[1]?.posts.length).eq(1); expect(response[2]?.posts.length).eq(1); expect(response[0]?.posts[0]?.comments.length).eq(1); expect(response[1]?.posts[0]?.comments.length).eq(1); expect(response[2]?.posts[0]?.comments.length).eq(1); expect(response[0]).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: response[0]?.posts[0]?.createdAt, comments: [ { id: 1, content: 'Comment1', creator: 2, author: { id: 2, name: 'Andrew', verified: false, invitedBy: null, }, postId: 1, createdAt: response[0]?.posts[0]?.comments[0]?.createdAt, }, ], }], }); expect(response[1]).toEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: null, posts: [{ id: 2, ownerId: 2, content: 'Post2', createdAt: response[1]?.posts[0]?.createdAt, comments: [ { id: 2, content: 'Comment2', creator: 2, author: { id: 2, name: 'Andrew', verified: false, invitedBy: null, }, postId: 2, createdAt: response[1]?.posts[0]?.comments[0]?.createdAt, }, ], }], }); }); test('Get user with posts and posts with comments and comments with owner where exists', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { id: 1, ownerId: 1, content: 'Post1' }, { id: 2, ownerId: 2, content: 'Post2' }, { id: 3, ownerId: 3, content: 'Post3' }, ]); await db.insert(commentsTable).values([ { postId: 1, content: 'Comment1', creator: 2 }, { postId: 2, content: 'Comment2', creator: 2 }, { postId: 3, content: 'Comment3', creator: 3 }, ]); const response = await db.query.usersTable.findMany({ with: { posts: { with: { comments: { with: { author: true, }, }, }, }, }, where: (table, { exists, eq }) => exists(db.select({ one: sql`1` }).from(usersTable).where(eq(sql`1`, table.id))), }); expectTypeOf(response).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; comments: { id: number; content: string; createdAt: Date; creator: number | null; postId: number | null; author: { id: number; name: string; verified: boolean; invitedBy: number | null; } | null; }[]; }[]; }[]>(); expect(response.length).eq(1); expect(response[0]?.posts.length).eq(1); expect(response[0]?.posts[0]?.comments.length).eq(1); expect(response[0]).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: response[0]?.posts[0]?.createdAt, comments: [ { id: 1, content: 'Comment1', creator: 2, author: { id: 2, name: 'Andrew', verified: false, invitedBy: null, }, postId: 1, createdAt: response[0]?.posts[0]?.comments[0]?.createdAt, }, ], }], }); }); /* One three-level relation + 1 first-level relation 1. users+posts+comments+comment_owner 2. users+users */ /* One four-level relation users+posts+comments+comment_likes */ /* [Find Many] Many-to-many cases Users+users_to_groups+groups */ test('[Find Many] Get users with groups', async (t) => { const { pgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.usersTable.findMany({ with: { usersToGroups: { columns: {}, with: { group: true, }, orderBy: usersToGroupsTable.userId, }, }, orderBy: usersTable.id, }); expectTypeOf(response).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; usersToGroups: { group: { id: number; name: string; description: string | null; }; }[]; }[]>(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).toEqual(3); expect(response[0]?.usersToGroups.length).toEqual(1); expect(response[1]?.usersToGroups.length).toEqual(1); expect(response[2]?.usersToGroups.length).toEqual(2); expect(response).toContainEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, usersToGroups: [{ group: { id: 1, name: 'Group1', description: null, }, }], }); expect(response).toContainEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: null, usersToGroups: [{ group: { id: 2, name: 'Group2', description: null, }, }], }); expect(response).toContainEqual({ id: 3, name: 'Alex', verified: false, invitedBy: null, usersToGroups: expect.arrayContaining([ { group: { id: 2, name: 'Group2', description: null, }, }, { group: { id: 3, name: 'Group3', description: null, }, }, ]), }); }); test('[Find Many] Get groups with users', async (t) => { const { pgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.groupsTable.findMany({ with: { usersToGroups: { columns: {}, with: { user: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf<{ id: number; name: string; description: string | null; usersToGroups: { user: { id: number; name: string; verified: boolean; invitedBy: number | null; }; }[]; }[]>(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).toEqual(3); expect(response[0]?.usersToGroups.length).toEqual(1); expect(response[1]?.usersToGroups.length).toEqual(2); expect(response[2]?.usersToGroups.length).toEqual(1); expect(response).toContainEqual({ id: 1, name: 'Group1', description: null, usersToGroups: [{ user: { id: 1, name: 'Dan', verified: false, invitedBy: null, }, }], }); expect(response).toContainEqual({ id: 2, name: 'Group2', description: null, usersToGroups: [{ user: { id: 2, name: 'Andrew', verified: false, invitedBy: null, }, }, { user: { id: 3, name: 'Alex', verified: false, invitedBy: null, }, }], }); expect(response).toContainEqual({ id: 3, name: 'Group3', description: null, usersToGroups: [{ user: { id: 3, name: 'Alex', verified: false, invitedBy: null, }, }], }); }); test('[Find Many] Get users with groups + limit', async (t) => { const { pgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 2, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.usersTable.findMany({ limit: 2, with: { usersToGroups: { limit: 1, columns: {}, with: { group: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; usersToGroups: { group: { id: number; name: string; description: string | null; }; }[]; }[]>(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).toEqual(2); expect(response[0]?.usersToGroups.length).toEqual(1); expect(response[1]?.usersToGroups.length).toEqual(1); expect(response).toContainEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, usersToGroups: [{ group: { id: 1, name: 'Group1', description: null, }, }], }); expect(response).toContainEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: null, usersToGroups: [{ group: { id: 2, name: 'Group2', description: null, }, }], }); }); test('[Find Many] Get groups with users + limit', async (t) => { const { pgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.groupsTable.findMany({ limit: 2, with: { usersToGroups: { limit: 1, columns: {}, with: { user: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf<{ id: number; name: string; description: string | null; usersToGroups: { user: { id: number; name: string; verified: boolean; invitedBy: number | null; }; }[]; }[]>(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).toEqual(2); expect(response[0]?.usersToGroups.length).toEqual(1); expect(response[1]?.usersToGroups.length).toEqual(1); expect(response).toContainEqual({ id: 1, name: 'Group1', description: null, usersToGroups: [{ user: { id: 1, name: 'Dan', verified: false, invitedBy: null, }, }], }); expect(response).toContainEqual({ id: 2, name: 'Group2', description: null, usersToGroups: [{ user: { id: 2, name: 'Andrew', verified: false, invitedBy: null, }, }], }); }); test('[Find Many] Get users with groups + limit + where', async (t) => { const { pgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 2, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.usersTable.findMany({ limit: 1, where: (_, { eq, or }) => or(eq(usersTable.id, 1), eq(usersTable.id, 2)), with: { usersToGroups: { where: eq(usersToGroupsTable.groupId, 1), columns: {}, with: { group: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; usersToGroups: { group: { id: number; name: string; description: string | null; }; }[]; }[]>(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).toEqual(1); expect(response[0]?.usersToGroups.length).toEqual(1); expect(response).toContainEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, usersToGroups: [{ group: { id: 1, name: 'Group1', description: null, }, }], }); }); test('[Find Many] Get groups with users + limit + where', async (t) => { const { pgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.groupsTable.findMany({ limit: 1, where: gt(groupsTable.id, 1), with: { usersToGroups: { where: eq(usersToGroupsTable.userId, 2), limit: 1, columns: {}, with: { user: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf<{ id: number; name: string; description: string | null; usersToGroups: { user: { id: number; name: string; verified: boolean; invitedBy: number | null; }; }[]; }[]>(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).toEqual(1); expect(response[0]?.usersToGroups.length).toEqual(1); expect(response).toContainEqual({ id: 2, name: 'Group2', description: null, usersToGroups: [{ user: { id: 2, name: 'Andrew', verified: false, invitedBy: null, }, }], }); }); test('[Find Many] Get users with groups + where', async (t) => { const { pgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 2, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.usersTable.findMany({ where: (_, { eq, or }) => or(eq(usersTable.id, 1), eq(usersTable.id, 2)), with: { usersToGroups: { where: eq(usersToGroupsTable.groupId, 2), columns: {}, with: { group: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; usersToGroups: { group: { id: number; name: string; description: string | null; }; }[]; }[]>(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).toEqual(2); expect(response[0]?.usersToGroups.length).toEqual(0); expect(response[1]?.usersToGroups.length).toEqual(1); expect(response).toContainEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, usersToGroups: [], }); expect(response).toContainEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: null, usersToGroups: [{ group: { id: 2, name: 'Group2', description: null, }, }], }); }); test('[Find Many] Get groups with users + where', async (t) => { const { pgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.groupsTable.findMany({ where: gt(groupsTable.id, 1), with: { usersToGroups: { where: eq(usersToGroupsTable.userId, 2), columns: {}, with: { user: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf<{ id: number; name: string; description: string | null; usersToGroups: { user: { id: number; name: string; verified: boolean; invitedBy: number | null; }; }[]; }[]>(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).toEqual(2); expect(response[0]?.usersToGroups.length).toEqual(1); expect(response[1]?.usersToGroups.length).toEqual(0); expect(response).toContainEqual({ id: 2, name: 'Group2', description: null, usersToGroups: [{ user: { id: 2, name: 'Andrew', verified: false, invitedBy: null, }, }], }); expect(response).toContainEqual({ id: 3, name: 'Group3', description: null, usersToGroups: [], }); }); test('[Find Many] Get users with groups + orderBy', async (t) => { const { pgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.usersTable.findMany({ orderBy: (users, { desc }) => [desc(users.id)], with: { usersToGroups: { orderBy: [desc(usersToGroupsTable.groupId)], columns: {}, with: { group: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; usersToGroups: { group: { id: number; name: string; description: string | null; }; }[]; }[]>(); expect(response.length).toEqual(3); expect(response[0]?.usersToGroups.length).toEqual(2); expect(response[1]?.usersToGroups.length).toEqual(1); expect(response[2]?.usersToGroups.length).toEqual(1); expect(response[2]).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, usersToGroups: [{ group: { id: 1, name: 'Group1', description: null, }, }], }); expect(response[1]).toEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: null, usersToGroups: [{ group: { id: 2, name: 'Group2', description: null, }, }], }); expect(response[0]).toEqual({ id: 3, name: 'Alex', verified: false, invitedBy: null, usersToGroups: [{ group: { id: 3, name: 'Group3', description: null, }, }, { group: { id: 2, name: 'Group2', description: null, }, }], }); }); test('[Find Many] Get groups with users + orderBy', async (t) => { const { pgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.groupsTable.findMany({ orderBy: [desc(groupsTable.id)], with: { usersToGroups: { orderBy: (utg, { desc }) => [desc(utg.userId)], columns: {}, with: { user: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf<{ id: number; name: string; description: string | null; usersToGroups: { user: { id: number; name: string; verified: boolean; invitedBy: number | null; }; }[]; }[]>(); expect(response.length).toEqual(3); expect(response[0]?.usersToGroups.length).toEqual(1); expect(response[1]?.usersToGroups.length).toEqual(2); expect(response[2]?.usersToGroups.length).toEqual(1); expect(response[2]).toEqual({ id: 1, name: 'Group1', description: null, usersToGroups: [{ user: { id: 1, name: 'Dan', verified: false, invitedBy: null, }, }], }); expect(response[1]).toEqual({ id: 2, name: 'Group2', description: null, usersToGroups: [{ user: { id: 3, name: 'Alex', verified: false, invitedBy: null, }, }, { user: { id: 2, name: 'Andrew', verified: false, invitedBy: null, }, }], }); expect(response[0]).toEqual({ id: 3, name: 'Group3', description: null, usersToGroups: [{ user: { id: 3, name: 'Alex', verified: false, invitedBy: null, }, }], }); }); test('[Find Many] Get users with groups + orderBy + limit', async (t) => { const { pgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.usersTable.findMany({ orderBy: (users, { desc }) => [desc(users.id)], limit: 2, with: { usersToGroups: { limit: 1, orderBy: [desc(usersToGroupsTable.groupId)], columns: {}, with: { group: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; usersToGroups: { group: { id: number; name: string; description: string | null; }; }[]; }[]>(); expect(response.length).toEqual(2); expect(response[0]?.usersToGroups.length).toEqual(1); expect(response[1]?.usersToGroups.length).toEqual(1); expect(response[1]).toEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: null, usersToGroups: [{ group: { id: 2, name: 'Group2', description: null, }, }], }); expect(response[0]).toEqual({ id: 3, name: 'Alex', verified: false, invitedBy: null, usersToGroups: [{ group: { id: 3, name: 'Group3', description: null, }, }], }); }); /* [Find One] Many-to-many cases Users+users_to_groups+groups */ test('[Find One] Get users with groups', async (t) => { const { pgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.usersTable.findFirst({ with: { usersToGroups: { columns: {}, with: { group: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; usersToGroups: { group: { id: number; name: string; description: string | null; }; }[]; } | undefined >(); expect(response?.usersToGroups.length).toEqual(1); expect(response).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, usersToGroups: [{ group: { id: 1, name: 'Group1', description: null, }, }], }); }); test('[Find One] Get groups with users', async (t) => { const { pgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.groupsTable.findFirst({ with: { usersToGroups: { columns: {}, with: { user: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; description: string | null; usersToGroups: { user: { id: number; name: string; verified: boolean; invitedBy: number | null; }; }[]; } | undefined >(); expect(response?.usersToGroups.length).toEqual(1); expect(response).toEqual({ id: 1, name: 'Group1', description: null, usersToGroups: [{ user: { id: 1, name: 'Dan', verified: false, invitedBy: null, }, }], }); }); test('[Find One] Get users with groups + limit', async (t) => { const { pgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 2, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.usersTable.findFirst({ with: { usersToGroups: { limit: 1, columns: {}, with: { group: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; usersToGroups: { group: { id: number; name: string; description: string | null; }; }[]; } | undefined >(); expect(response?.usersToGroups.length).toEqual(1); expect(response).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, usersToGroups: [{ group: { id: 1, name: 'Group1', description: null, }, }], }); }); test('[Find One] Get groups with users + limit', async (t) => { const { pgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.groupsTable.findFirst({ with: { usersToGroups: { limit: 1, columns: {}, with: { user: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; description: string | null; usersToGroups: { user: { id: number; name: string; verified: boolean; invitedBy: number | null; }; }[]; } | undefined >(); expect(response?.usersToGroups.length).toEqual(1); expect(response).toEqual({ id: 1, name: 'Group1', description: null, usersToGroups: [{ user: { id: 1, name: 'Dan', verified: false, invitedBy: null, }, }], }); }); test('[Find One] Get users with groups + limit + where', async (t) => { const { pgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 2, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.usersTable.findFirst({ where: (_, { eq, or }) => or(eq(usersTable.id, 1), eq(usersTable.id, 2)), with: { usersToGroups: { where: eq(usersToGroupsTable.groupId, 1), columns: {}, with: { group: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; usersToGroups: { group: { id: number; name: string; description: string | null; }; }[]; } | undefined >(); expect(response?.usersToGroups.length).toEqual(1); expect(response).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, usersToGroups: [{ group: { id: 1, name: 'Group1', description: null, }, }], }); }); test('[Find One] Get groups with users + limit + where', async (t) => { const { pgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.groupsTable.findFirst({ where: gt(groupsTable.id, 1), with: { usersToGroups: { where: eq(usersToGroupsTable.userId, 2), limit: 1, columns: {}, with: { user: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; description: string | null; usersToGroups: { user: { id: number; name: string; verified: boolean; invitedBy: number | null; }; }[]; } | undefined >(); expect(response?.usersToGroups.length).toEqual(1); expect(response).toEqual({ id: 2, name: 'Group2', description: null, usersToGroups: [{ user: { id: 2, name: 'Andrew', verified: false, invitedBy: null, }, }], }); }); test('[Find One] Get users with groups + where', async (t) => { const { pgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 2, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.usersTable.findFirst({ where: (_, { eq, or }) => or(eq(usersTable.id, 1), eq(usersTable.id, 2)), with: { usersToGroups: { where: eq(usersToGroupsTable.groupId, 2), columns: {}, with: { group: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; usersToGroups: { group: { id: number; name: string; description: string | null; }; }[]; } | undefined >(); expect(response?.usersToGroups.length).toEqual(0); expect(response).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, usersToGroups: [], }); }); test('[Find One] Get groups with users + where', async (t) => { const { pgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.groupsTable.findFirst({ where: gt(groupsTable.id, 1), with: { usersToGroups: { where: eq(usersToGroupsTable.userId, 2), columns: {}, with: { user: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; description: string | null; usersToGroups: { user: { id: number; name: string; verified: boolean; invitedBy: number | null; }; }[]; } | undefined >(); expect(response?.usersToGroups.length).toEqual(1); expect(response).toEqual({ id: 2, name: 'Group2', description: null, usersToGroups: [{ user: { id: 2, name: 'Andrew', verified: false, invitedBy: null, }, }], }); }); test('[Find One] Get users with groups + orderBy', async (t) => { const { pgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.usersTable.findFirst({ orderBy: (users, { desc }) => [desc(users.id)], with: { usersToGroups: { orderBy: [desc(usersToGroupsTable.groupId)], columns: {}, with: { group: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; usersToGroups: { group: { id: number; name: string; description: string | null; }; }[]; } | undefined >(); expect(response?.usersToGroups.length).toEqual(2); expect(response).toEqual({ id: 3, name: 'Alex', verified: false, invitedBy: null, usersToGroups: [{ group: { id: 3, name: 'Group3', description: null, }, }, { group: { id: 2, name: 'Group2', description: null, }, }], }); }); test('[Find One] Get groups with users + orderBy', async (t) => { const { pgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.groupsTable.findFirst({ orderBy: [desc(groupsTable.id)], with: { usersToGroups: { orderBy: (utg, { desc }) => [desc(utg.userId)], columns: {}, with: { user: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; description: string | null; usersToGroups: { user: { id: number; name: string; verified: boolean; invitedBy: number | null; }; }[]; } | undefined >(); expect(response?.usersToGroups.length).toEqual(1); expect(response).toEqual({ id: 3, name: 'Group3', description: null, usersToGroups: [{ user: { id: 3, name: 'Alex', verified: false, invitedBy: null, }, }], }); }); test('[Find One] Get users with groups + orderBy + limit', async (t) => { const { pgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.usersTable.findFirst({ orderBy: (users, { desc }) => [desc(users.id)], with: { usersToGroups: { limit: 1, orderBy: [desc(usersToGroupsTable.groupId)], columns: {}, with: { group: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; usersToGroups: { group: { id: number; name: string; description: string | null; }; }[]; } | undefined >(); expect(response?.usersToGroups.length).toEqual(1); expect(response).toEqual({ id: 3, name: 'Alex', verified: false, invitedBy: null, usersToGroups: [{ group: { id: 3, name: 'Group3', description: null, }, }], }); }); test('Get groups with users + orderBy + limit', async (t) => { const { pgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.groupsTable.findMany({ orderBy: [desc(groupsTable.id)], limit: 2, with: { usersToGroups: { limit: 1, orderBy: (utg, { desc }) => [desc(utg.userId)], columns: {}, with: { user: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; description: string | null; usersToGroups: { user: { id: number; name: string; verified: boolean; invitedBy: number | null; }; }[]; }[] >(); expect(response.length).toEqual(2); expect(response[0]?.usersToGroups.length).toEqual(1); expect(response[1]?.usersToGroups.length).toEqual(1); expect(response[1]).toEqual({ id: 2, name: 'Group2', description: null, usersToGroups: [{ user: { id: 3, name: 'Alex', verified: false, invitedBy: null, }, }], }); expect(response[0]).toEqual({ id: 3, name: 'Group3', description: null, usersToGroups: [{ user: { id: 3, name: 'Alex', verified: false, invitedBy: null, }, }], }); }); test('Get users with groups + custom', async (t) => { const { pgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.usersTable.findMany({ extras: { lower: sql`lower(${usersTable.name})`.as('lower_name'), }, with: { usersToGroups: { columns: {}, with: { group: { extras: { lower: sql`lower(${groupsTable.name})`.as('lower_name'), }, }, }, orderBy: usersToGroupsTable.groupId, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; lower: string; usersToGroups: { group: { id: number; name: string; description: string | null; lower: string; }; }[]; }[] >(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).toEqual(3); expect(response[0]?.usersToGroups.length).toEqual(1); expect(response[1]?.usersToGroups.length).toEqual(1); expect(response[2]?.usersToGroups.length).toEqual(2); expect(response).toContainEqual({ id: 1, name: 'Dan', lower: 'dan', verified: false, invitedBy: null, usersToGroups: [{ group: { id: 1, name: 'Group1', lower: 'group1', description: null, }, }], }); expect(response).toContainEqual({ id: 2, name: 'Andrew', lower: 'andrew', verified: false, invitedBy: null, usersToGroups: [{ group: { id: 2, name: 'Group2', lower: 'group2', description: null, }, }], }); expect(response).toContainEqual({ id: 3, name: 'Alex', lower: 'alex', verified: false, invitedBy: null, usersToGroups: [ { group: { id: 2, name: 'Group2', lower: 'group2', description: null, }, }, { group: { id: 3, name: 'Group3', lower: 'group3', description: null, }, }, ], }); }); test('Get groups with users + custom', async (t) => { const { pgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.groupsTable.findMany({ extras: (table, { sql }) => ({ lower: sql`lower(${table.name})`.as('lower_name'), }), with: { usersToGroups: { columns: {}, with: { user: { extras: (table, { sql }) => ({ lower: sql`lower(${table.name})`.as('lower_name'), }), }, }, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; description: string | null; lower: string; usersToGroups: { user: { id: number; name: string; verified: boolean; invitedBy: number | null; lower: string; }; }[]; }[] >(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).toEqual(3); expect(response[0]?.usersToGroups.length).toEqual(1); expect(response[1]?.usersToGroups.length).toEqual(2); expect(response[2]?.usersToGroups.length).toEqual(1); expect(response).toContainEqual({ id: 1, name: 'Group1', lower: 'group1', description: null, usersToGroups: [{ user: { id: 1, name: 'Dan', lower: 'dan', verified: false, invitedBy: null, }, }], }); expect(response).toContainEqual({ id: 2, name: 'Group2', lower: 'group2', description: null, usersToGroups: [{ user: { id: 2, name: 'Andrew', lower: 'andrew', verified: false, invitedBy: null, }, }, { user: { id: 3, name: 'Alex', lower: 'alex', verified: false, invitedBy: null, }, }], }); expect(response).toContainEqual({ id: 3, name: 'Group3', lower: 'group3', description: null, usersToGroups: [{ user: { id: 3, name: 'Alex', lower: 'alex', verified: false, invitedBy: null, }, }], }); }); test('Filter by columns not present in select', async (t) => { const { pgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); const response = await db.query.usersTable.findFirst({ columns: { id: true, }, where: eq(usersTable.name, 'Dan'), }); expect(response).toEqual({ id: 1 }); }); test('[Find Many] Get schema users - dbName & tsName match', async (t) => { const { pgDb: db } = t; await db.insert(usersV1).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); const schemaUsers = await db.query.usersV1.findMany(); expectTypeOf(schemaUsers).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; }[]>(); schemaUsers.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(schemaUsers.length).eq(3); expect(schemaUsers[0]).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, }); expect(schemaUsers[1]).toEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: null, }); expect(schemaUsers[2]).toEqual({ id: 3, name: 'Alex', verified: false, invitedBy: null, }); }); test('[Find Many] Get schema users - dbName & tsName mismatch', async (t) => { const { pgDb: db } = t; await db.insert(usersTableV1).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); const schemaUsers = await db.query.usersTableV1.findMany(); expectTypeOf(schemaUsers).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; }[]>(); schemaUsers.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(schemaUsers.length).eq(3); expect(schemaUsers[0]).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, }); expect(schemaUsers[1]).toEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: null, }); expect(schemaUsers[2]).toEqual({ id: 3, name: 'Alex', verified: false, invitedBy: null, }); }); test('.toSQL()', () => { const query = db.query.usersTable.findFirst().toSQL(); expect(query).toHaveProperty('sql', expect.any(String)); expect(query).toHaveProperty('params', expect.any(Array)); }); // test('Filter by relational column', async (t) => { // const { pgDb: db } = t; // await db.insert(usersTable).values([ // { id: 1, name: 'Dan' }, // { id: 2, name: 'Andrew' }, // { id: 3, name: 'Alex' }, // ]); // await db.insert(postsTable).values([ // { id: 1, ownerId: 1, content: 'Content1' }, // { id: 2, ownerId: 2, content: 'Post2' }, // ]); // const response = await db.query.usersTable.findMany({ // with: { // posts: true, // }, // where: (users) => sql`json_array_length(${users.posts}) > 0`, // orderBy: usersTable.id, // }); // expectTypeOf(response).toEqualTypeOf<{ // id: number; // name: string; // verified: boolean; // invitedBy: number | null; // posts: { // id: number; // content: string; // ownerId: number | null; // createdAt: Date; // }[]; // }[]>(); // expect(response.length).toEqual(2); // expect(response[0]).toEqual({ // id: 1, // name: 'Dan', // verified: false, // invitedBy: null, // posts: [{ // id: 1, // content: 'Content1', // ownerId: 1, // createdAt: expect.any(Date), // }], // }); // expect(response[1]).toEqual({ // id: 2, // name: 'Andrew', // verified: false, // invitedBy: null, // posts: [{ // id: 2, // content: 'Post2', // ownerId: 2, // createdAt: expect.any(Date), // }], // }); // }); // + custom + where + orderby // + custom + where + orderby + limit // + partial // + partial(false) // + partial + orderBy + where (all not selected) /* One four-level relation users+posts+comments+coment_likes + users+users_to_groups+groups */ /* Really hard case 1. users+posts+comments+coment_likes 2. users+users_to_groups+groups 3. users+users */ ================================================ FILE: integration-tests/tests/relational/singlestore.schema.ts ================================================ import { bigint, boolean, primaryKey, serial, singlestoreTable, text, timestamp } from 'drizzle-orm/singlestore-core'; import { relations } from 'drizzle-orm'; export const usersTable = singlestoreTable('users', { id: serial('id').primaryKey(), name: text('name').notNull(), verified: boolean('verified').notNull().default(false), invitedBy: bigint('invited_by', { mode: 'number' }), }); export const usersConfig = relations(usersTable, ({ one, many }) => ({ invitee: one(usersTable, { fields: [usersTable.invitedBy], references: [usersTable.id], }), usersToGroups: many(usersToGroupsTable), posts: many(postsTable), comments: many(commentsTable), })); export const groupsTable = singlestoreTable('groups', { id: serial('id').primaryKey(), name: text('name').notNull(), description: text('description'), }); export const groupsConfig = relations(groupsTable, ({ many }) => ({ usersToGroups: many(usersToGroupsTable), })); export const usersToGroupsTable = singlestoreTable( 'users_to_groups', { id: serial('id').primaryKey(), userId: bigint('user_id', { mode: 'number' }).notNull(), groupId: bigint('group_id', { mode: 'number' }).notNull(), }, (t) => ({ pk: primaryKey(t.userId, t.groupId), }), ); export const usersToGroupsConfig = relations(usersToGroupsTable, ({ one }) => ({ group: one(groupsTable, { fields: [usersToGroupsTable.groupId], references: [groupsTable.id], }), user: one(usersTable, { fields: [usersToGroupsTable.userId], references: [usersTable.id], }), })); export const postsTable = singlestoreTable('posts', { id: serial('id').primaryKey(), content: text('content').notNull(), ownerId: bigint('owner_id', { mode: 'number' }), createdAt: timestamp('created_at') .notNull() .defaultNow(), }); export const postsConfig = relations(postsTable, ({ one, many }) => ({ author: one(usersTable, { fields: [postsTable.ownerId], references: [usersTable.id], }), comments: many(commentsTable), })); export const commentsTable = singlestoreTable('comments', { id: serial('id').primaryKey(), content: text('content').notNull(), creator: bigint('creator', { mode: 'number' }), postId: bigint('post_id', { mode: 'number' }), createdAt: timestamp('created_at') .notNull() .defaultNow(), }); export const commentsConfig = relations(commentsTable, ({ one, many }) => ({ post: one(postsTable, { fields: [commentsTable.postId], references: [postsTable.id], }), author: one(usersTable, { fields: [commentsTable.creator], references: [usersTable.id], }), likes: many(commentLikesTable), })); export const commentLikesTable = singlestoreTable('comment_likes', { id: serial('id').primaryKey(), creator: bigint('creator', { mode: 'number' }), commentId: bigint('comment_id', { mode: 'number' }), createdAt: timestamp('created_at') .notNull() .defaultNow(), }); export const commentLikesConfig = relations(commentLikesTable, ({ one }) => ({ comment: one(commentsTable, { fields: [commentLikesTable.commentId], references: [commentsTable.id], }), author: one(usersTable, { fields: [commentLikesTable.creator], references: [usersTable.id], }), })); ================================================ FILE: integration-tests/tests/relational/singlestore.test.ts ================================================ // import retry from 'async-retry'; // import Docker from 'dockerode'; // import 'dotenv/config'; // import { desc, DrizzleError, eq, gt, gte, or, placeholder, sql, TransactionRollbackError } from 'drizzle-orm'; // import { drizzle, type SingleStoreDriverDatabase } from 'drizzle-orm/singlestore'; // import getPort from 'get-port'; // import * as mysql from 'mysql2/promise'; // import { v4 as uuid } from 'uuid'; // import { afterAll, beforeAll, beforeEach, expect, expectTypeOf, test } from 'vitest'; // import * as schema from './singlestore.schema.ts'; // const { usersTable, postsTable, commentsTable, usersToGroupsTable, groupsTable } = schema; // const ENABLE_LOGGING = false; // /* // Test cases: // - querying nested relation without PK with additional fields // */ // declare module 'vitest' { // export interface TestContext { // docker: Docker; // singlestoreContainer: Docker.Container; // singlestoreDb: SingleStoreDriverDatabase; // singlestoreClient: mysql.Connection; // } // } // let globalDocker: Docker; // let singlestoreContainer: Docker.Container; // let db: SingleStoreDriverDatabase; // let client: mysql.Connection; // async function createDockerDB(): Promise { // const docker = new Docker(); // const port = await getPort({ port: 3306 }); // const image = 'ghcr.io/singlestore-labs/singlestoredb-dev:latest'; // const pullStream = await docker.pull(image); // await new Promise((resolve, reject) => // docker.modem.followProgress(pullStream, (err) => (err ? reject(err) : resolve(err))) // ); // singlestoreContainer = await docker.createContainer({ // Image: image, // Env: ['ROOT_PASSWORD=singlestore'], // name: `drizzle-integration-tests-${uuid()}`, // HostConfig: { // AutoRemove: true, // PortBindings: { // '3306/tcp': [{ HostPort: `${port}` }], // }, // }, // }); // await singlestoreContainer.start(); // await new Promise((resolve) => setTimeout(resolve, 4000)); // return `singlestore://root:singlestore@localhost:${port}/`; // } // beforeAll(async () => { // const connectionString = process.env['SINGLESTORE_CONNECTION_STRING'] ?? (await createDockerDB()); // client = await retry(async () => { // client = await mysql.createConnection(connectionString); // await client.connect(); // return client; // }, { // retries: 20, // factor: 1, // minTimeout: 250, // maxTimeout: 250, // randomize: false, // onRetry() { // client?.end(); // }, // }); // await client.query(`CREATE DATABASE IF NOT EXISTS drizzle;`); // await client.changeUser({ database: 'drizzle' }); // db = drizzle(client, { schema, logger: ENABLE_LOGGING }); // }); // afterAll(async () => { // await client?.end().catch(console.error); // await singlestoreContainer?.stop().catch(console.error); // }); // beforeEach(async (ctx) => { // ctx.singlestoreDb = db; // ctx.singlestoreClient = client; // ctx.docker = globalDocker; // ctx.singlestoreContainer = singlestoreContainer; // await ctx.singlestoreDb.execute(sql`drop table if exists \`users\``); // await ctx.singlestoreDb.execute(sql`drop table if exists \`groups\``); // await ctx.singlestoreDb.execute(sql`drop table if exists \`users_to_groups\``); // await ctx.singlestoreDb.execute(sql`drop table if exists \`posts\``); // await ctx.singlestoreDb.execute(sql`drop table if exists \`comments\``); // await ctx.singlestoreDb.execute(sql`drop table if exists \`comment_likes\``); // await ctx.singlestoreDb.execute( // sql` // CREATE TABLE \`users\` ( // \`id\` serial PRIMARY KEY NOT NULL, // \`name\` text NOT NULL, // \`verified\` boolean DEFAULT false NOT NULL, // \`invited_by\` bigint // ); // `, // ); // await ctx.singlestoreDb.execute( // sql` // CREATE TABLE \`groups\` ( // \`id\` serial PRIMARY KEY NOT NULL, // \`name\` text NOT NULL, // \`description\` text // ); // `, // ); // await ctx.singlestoreDb.execute( // sql` // CREATE TABLE \`users_to_groups\` ( // \`id\` serial PRIMARY KEY NOT NULL, // \`user_id\` bigint, // \`group_id\` bigint // ); // `, // ); // await ctx.singlestoreDb.execute( // sql` // CREATE TABLE \`posts\` ( // \`id\` serial PRIMARY KEY NOT NULL, // \`content\` text NOT NULL, // \`owner_id\` bigint, // \`created_at\` timestamp DEFAULT CURRENT_TIMESTAMP NOT NULL // ); // `, // ); // await ctx.singlestoreDb.execute( // sql` // CREATE TABLE \`comments\` ( // \`id\` serial PRIMARY KEY NOT NULL, // \`content\` text NOT NULL, // \`creator\` bigint, // \`post_id\` bigint, // \`created_at\` timestamp DEFAULT CURRENT_TIMESTAMP NOT NULL // ); // `, // ); // await ctx.singlestoreDb.execute( // sql` // CREATE TABLE \`comment_likes\` ( // \`id\` serial PRIMARY KEY NOT NULL, // \`creator\` bigint, // \`comment_id\` bigint, // \`created_at\` timestamp DEFAULT CURRENT_TIMESTAMP NOT NULL // ); // `, // ); // }); // /* // [Find Many] One relation users+posts // */ // test('[Find Many] Get users with posts', async (t) => { // const { singlestoreDb: db } = t; // await db.insert(usersTable).values([ // { id: 1, name: 'Dan' }, // { id: 2, name: 'Andrew' }, // { id: 3, name: 'Alex' }, // ]); // await db.insert(postsTable).values([ // { ownerId: 1, content: 'Post1' }, // { ownerId: 2, content: 'Post2' }, // { ownerId: 3, content: 'Post3' }, // ]); // const usersWithPosts = await db.query.usersTable.findMany({ // with: { // posts: true, // }, // }); // expectTypeOf(usersWithPosts).toEqualTypeOf<{ // id: number; // name: string; // verified: boolean; // invitedBy: number | null; // posts: { // id: number; // content: string; // ownerId: number | null; // createdAt: Date; // }[]; // }[]>(); // usersWithPosts.sort((a, b) => (a.id > b.id) ? 1 : -1); // expect(usersWithPosts.length).eq(3); // expect(usersWithPosts[0]?.posts.length).eq(1); // expect(usersWithPosts[1]?.posts.length).eq(1); // expect(usersWithPosts[2]?.posts.length).eq(1); // expect(usersWithPosts[0]).toEqual({ // id: 1, // name: 'Dan', // verified: false, // invitedBy: null, // posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], // }); // expect(usersWithPosts[1]).toEqual({ // id: 2, // name: 'Andrew', // verified: false, // invitedBy: null, // posts: [{ id: 2, ownerId: 2, content: 'Post2', createdAt: usersWithPosts[1]?.posts[0]?.createdAt }], // }); // expect(usersWithPosts[2]).toEqual({ // id: 3, // name: 'Alex', // verified: false, // invitedBy: null, // posts: [{ id: 3, ownerId: 3, content: 'Post3', createdAt: usersWithPosts[2]?.posts[0]?.createdAt }], // }); // }); // test.skip('[Find Many] Get users with posts + limit posts', async (t) => { // const { singlestoreDb: db } = t; // await db.insert(usersTable).values([ // { id: 1, name: 'Dan' }, // { id: 2, name: 'Andrew' }, // { id: 3, name: 'Alex' }, // ]); // await db.insert(postsTable).values([ // { ownerId: 1, content: 'Post1' }, // { ownerId: 1, content: 'Post1.2' }, // { ownerId: 1, content: 'Post1.3' }, // { ownerId: 2, content: 'Post2' }, // { ownerId: 2, content: 'Post2.1' }, // { ownerId: 3, content: 'Post3' }, // { ownerId: 3, content: 'Post3.1' }, // ]); // const usersWithPosts = await db.query.usersTable.findMany({ // with: { // posts: { // limit: 1, // }, // }, // }); // expectTypeOf(usersWithPosts).toEqualTypeOf<{ // id: number; // name: string; // verified: boolean; // invitedBy: number | null; // posts: { // id: number; // content: string; // ownerId: number | null; // createdAt: Date; // }[]; // }[]>(); // usersWithPosts.sort((a, b) => (a.id > b.id) ? 1 : -1); // usersWithPosts[0]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); // usersWithPosts[1]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); // usersWithPosts[2]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); // expect(usersWithPosts.length).eq(3); // expect(usersWithPosts[0]?.posts.length).eq(1); // expect(usersWithPosts[1]?.posts.length).eq(1); // expect(usersWithPosts[2]?.posts.length).eq(1); // expect(usersWithPosts[0]).toEqual({ // id: 1, // name: 'Dan', // verified: false, // invitedBy: null, // posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], // }); // expect(usersWithPosts[1]).toEqual({ // id: 2, // name: 'Andrew', // verified: false, // invitedBy: null, // posts: [{ id: 4, ownerId: 2, content: 'Post2', createdAt: usersWithPosts[1]?.posts[0]?.createdAt }], // }); // expect(usersWithPosts[2]).toEqual({ // id: 3, // name: 'Alex', // verified: false, // invitedBy: null, // posts: [{ id: 6, ownerId: 3, content: 'Post3', createdAt: usersWithPosts[2]?.posts[0]?.createdAt }], // }); // }); // test.skip('[Find Many] Get users with posts + limit posts and users', async (t) => { // const { singlestoreDb: db } = t; // await db.insert(usersTable).values([ // { id: 1, name: 'Dan' }, // { id: 2, name: 'Andrew' }, // { id: 3, name: 'Alex' }, // ]); // await db.insert(postsTable).values([ // { ownerId: 1, content: 'Post1' }, // { ownerId: 1, content: 'Post1.2' }, // { ownerId: 1, content: 'Post1.3' }, // { ownerId: 2, content: 'Post2' }, // { ownerId: 2, content: 'Post2.1' }, // { ownerId: 3, content: 'Post3' }, // { ownerId: 3, content: 'Post3.1' }, // ]); // const usersWithPosts = await db.query.usersTable.findMany({ // limit: 2, // with: { // posts: { // limit: 1, // }, // }, // }); // expectTypeOf(usersWithPosts).toEqualTypeOf<{ // id: number; // name: string; // verified: boolean; // invitedBy: number | null; // posts: { // id: number; // content: string; // ownerId: number | null; // createdAt: Date; // }[]; // }[]>(); // usersWithPosts.sort((a, b) => (a.id > b.id) ? 1 : -1); // usersWithPosts[0]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); // usersWithPosts[1]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); // expect(usersWithPosts.length).eq(2); // expect(usersWithPosts[0]?.posts.length).eq(1); // expect(usersWithPosts[1]?.posts.length).eq(1); // expect(usersWithPosts[0]).toEqual({ // id: 1, // name: 'Dan', // verified: false, // invitedBy: null, // posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], // }); // expect(usersWithPosts[1]).toEqual({ // id: 2, // name: 'Andrew', // verified: false, // invitedBy: null, // posts: [{ id: 4, ownerId: 2, content: 'Post2', createdAt: usersWithPosts[1]?.posts[0]?.createdAt }], // }); // }); // test('[Find Many] Get users with posts + custom fields', async (t) => { // const { singlestoreDb: db } = t; // await db.insert(usersTable).values([ // { id: 1, name: 'Dan' }, // { id: 2, name: 'Andrew' }, // { id: 3, name: 'Alex' }, // ]); // await db.insert(postsTable).values([ // { ownerId: 1, content: 'Post1' }, // { ownerId: 1, content: 'Post1.2' }, // { ownerId: 1, content: 'Post1.3' }, // { ownerId: 2, content: 'Post2' }, // { ownerId: 2, content: 'Post2.1' }, // { ownerId: 3, content: 'Post3' }, // { ownerId: 3, content: 'Post3.1' }, // ]); // const usersWithPosts = await db.query.usersTable.findMany({ // with: { // posts: true, // }, // extras: ({ name }) => ({ // lowerName: sql`lower(${name})`.as('name_lower'), // }), // }); // expectTypeOf(usersWithPosts).toEqualTypeOf<{ // id: number; // name: string; // verified: boolean; // invitedBy: number | null; // lowerName: string; // posts: { // id: number; // content: string; // ownerId: number | null; // createdAt: Date; // }[]; // }[]>(); // usersWithPosts.sort((a, b) => (a.id > b.id) ? 1 : -1); // usersWithPosts[0]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); // usersWithPosts[1]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); // usersWithPosts[2]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); // expect(usersWithPosts.length).toEqual(3); // expect(usersWithPosts[0]?.posts.length).toEqual(3); // expect(usersWithPosts[1]?.posts.length).toEqual(2); // expect(usersWithPosts[2]?.posts.length).toEqual(2); // expect(usersWithPosts[0]).toEqual({ // id: 1, // name: 'Dan', // verified: false, // invitedBy: null, // lowerName: 'dan', // posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }, { // id: 2, // ownerId: 1, // content: 'Post1.2', // createdAt: usersWithPosts[0]?.posts[1]?.createdAt, // }, { id: 3, ownerId: 1, content: 'Post1.3', createdAt: usersWithPosts[0]?.posts[2]?.createdAt }], // }); // expect(usersWithPosts[1]).toEqual({ // id: 2, // name: 'Andrew', // lowerName: 'andrew', // verified: false, // invitedBy: null, // posts: [{ id: 4, ownerId: 2, content: 'Post2', createdAt: usersWithPosts[1]?.posts[0]?.createdAt }, { // id: 5, // ownerId: 2, // content: 'Post2.1', // createdAt: usersWithPosts[1]?.posts[1]?.createdAt, // }], // }); // expect(usersWithPosts[2]).toEqual({ // id: 3, // name: 'Alex', // lowerName: 'alex', // verified: false, // invitedBy: null, // posts: [{ id: 6, ownerId: 3, content: 'Post3', createdAt: usersWithPosts[2]?.posts[0]?.createdAt }, { // id: 7, // ownerId: 3, // content: 'Post3.1', // createdAt: usersWithPosts[2]?.posts[1]?.createdAt, // }], // }); // }); // test.skip('[Find Many] Get users with posts + custom fields + limits', async (t) => { // const { singlestoreDb: db } = t; // await db.insert(usersTable).values([ // { id: 1, name: 'Dan' }, // { id: 2, name: 'Andrew' }, // { id: 3, name: 'Alex' }, // ]); // await db.insert(postsTable).values([ // { ownerId: 1, content: 'Post1' }, // { ownerId: 1, content: 'Post1.2' }, // { ownerId: 1, content: 'Post1.3' }, // { ownerId: 2, content: 'Post2' }, // { ownerId: 2, content: 'Post2.1' }, // { ownerId: 3, content: 'Post3' }, // { ownerId: 3, content: 'Post3.1' }, // ]); // const usersWithPosts = await db.query.usersTable.findMany({ // limit: 1, // with: { // posts: { // limit: 1, // }, // }, // extras: (usersTable, { sql }) => ({ // lowerName: sql`lower(${usersTable.name})`.as('name_lower'), // }), // }); // expectTypeOf(usersWithPosts).toEqualTypeOf<{ // id: number; // name: string; // verified: boolean; // invitedBy: number | null; // lowerName: string; // posts: { // id: number; // content: string; // ownerId: number | null; // createdAt: Date; // }[]; // }[]>(); // expect(usersWithPosts.length).toEqual(1); // expect(usersWithPosts[0]?.posts.length).toEqual(1); // expect(usersWithPosts[0]).toEqual({ // id: 1, // name: 'Dan', // lowerName: 'dan', // verified: false, // invitedBy: null, // posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], // }); // }); // test.skip('[Find Many] Get users with posts + orderBy', async (t) => { // const { singlestoreDb: db } = t; // await db.insert(usersTable).values([ // { id: 1, name: 'Dan' }, // { id: 2, name: 'Andrew' }, // { id: 3, name: 'Alex' }, // ]); // await db.insert(postsTable).values([ // { ownerId: 1, content: '1' }, // { ownerId: 1, content: '2' }, // { ownerId: 1, content: '3' }, // { ownerId: 2, content: '4' }, // { ownerId: 2, content: '5' }, // { ownerId: 3, content: '6' }, // { ownerId: 3, content: '7' }, // ]); // const usersWithPosts = await db.query.usersTable.findMany({ // with: { // posts: { // orderBy: (postsTable, { desc }) => [desc(postsTable.content)], // }, // }, // orderBy: (usersTable, { desc }) => [desc(usersTable.id)], // }); // expectTypeOf(usersWithPosts).toEqualTypeOf<{ // id: number; // name: string; // verified: boolean; // invitedBy: number | null; // posts: { // id: number; // content: string; // ownerId: number | null; // createdAt: Date; // }[]; // }[]>(); // expect(usersWithPosts.length).eq(3); // expect(usersWithPosts[0]?.posts.length).eq(2); // expect(usersWithPosts[1]?.posts.length).eq(2); // expect(usersWithPosts[2]?.posts.length).eq(3); // expect(usersWithPosts[2]).toEqual({ // id: 1, // name: 'Dan', // verified: false, // invitedBy: null, // posts: [{ id: 3, ownerId: 1, content: '3', createdAt: usersWithPosts[2]?.posts[2]?.createdAt }, { // id: 2, // ownerId: 1, // content: '2', // createdAt: usersWithPosts[2]?.posts[1]?.createdAt, // }, { id: 1, ownerId: 1, content: '1', createdAt: usersWithPosts[2]?.posts[0]?.createdAt }], // }); // expect(usersWithPosts[1]).toEqual({ // id: 2, // name: 'Andrew', // verified: false, // invitedBy: null, // posts: [{ // id: 5, // ownerId: 2, // content: '5', // createdAt: usersWithPosts[1]?.posts[1]?.createdAt, // }, { id: 4, ownerId: 2, content: '4', createdAt: usersWithPosts[1]?.posts[0]?.createdAt }], // }); // expect(usersWithPosts[0]).toEqual({ // id: 3, // name: 'Alex', // verified: false, // invitedBy: null, // posts: [{ // id: 7, // ownerId: 3, // content: '7', // createdAt: usersWithPosts[0]?.posts[1]?.createdAt, // }, { id: 6, ownerId: 3, content: '6', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], // }); // }); // test('[Find Many] Get users with posts + where', async (t) => { // const { singlestoreDb: db } = t; // await db.insert(usersTable).values([ // { id: 1, name: 'Dan' }, // { id: 2, name: 'Andrew' }, // { id: 3, name: 'Alex' }, // ]); // await db.insert(postsTable).values([ // { ownerId: 1, content: 'Post1' }, // { ownerId: 1, content: 'Post1.1' }, // { ownerId: 2, content: 'Post2' }, // { ownerId: 3, content: 'Post3' }, // ]); // const usersWithPosts = await db.query.usersTable.findMany({ // where: (({ id }, { eq }) => eq(id, 1)), // with: { // posts: { // where: (({ id }, { eq }) => eq(id, 1)), // }, // }, // }); // expectTypeOf(usersWithPosts).toEqualTypeOf<{ // id: number; // name: string; // verified: boolean; // invitedBy: number | null; // posts: { // id: number; // content: string; // ownerId: number | null; // createdAt: Date; // }[]; // }[]>(); // expect(usersWithPosts.length).eq(1); // expect(usersWithPosts[0]?.posts.length).eq(1); // expect(usersWithPosts[0]).toEqual({ // id: 1, // name: 'Dan', // verified: false, // invitedBy: null, // posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], // }); // }); // test('[Find Many] Get users with posts + where + partial', async (t) => { // const { singlestoreDb: db } = t; // await db.insert(usersTable).values([ // { id: 1, name: 'Dan' }, // { id: 2, name: 'Andrew' }, // { id: 3, name: 'Alex' }, // ]); // await db.insert(postsTable).values([ // { ownerId: 1, content: 'Post1' }, // { ownerId: 1, content: 'Post1.1' }, // { ownerId: 2, content: 'Post2' }, // { ownerId: 3, content: 'Post3' }, // ]); // const usersWithPosts = await db.query.usersTable.findMany({ // columns: { // id: true, // name: true, // }, // with: { // posts: { // columns: { // id: true, // content: true, // }, // where: (({ id }, { eq }) => eq(id, 1)), // }, // }, // where: (({ id }, { eq }) => eq(id, 1)), // }); // expectTypeOf(usersWithPosts).toEqualTypeOf<{ // id: number; // name: string; // posts: { // id: number; // content: string; // }[]; // }[]>(); // expect(usersWithPosts.length).eq(1); // expect(usersWithPosts[0]?.posts.length).eq(1); // expect(usersWithPosts[0]).toEqual({ // id: 1, // name: 'Dan', // posts: [{ id: 1, content: 'Post1' }], // }); // }); // test('[Find Many] Get users with posts + where + partial. Did not select posts id, but used it in where', async (t) => { // const { singlestoreDb: db } = t; // await db.insert(usersTable).values([ // { id: 1, name: 'Dan' }, // { id: 2, name: 'Andrew' }, // { id: 3, name: 'Alex' }, // ]); // await db.insert(postsTable).values([ // { ownerId: 1, content: 'Post1' }, // { ownerId: 1, content: 'Post1.1' }, // { ownerId: 2, content: 'Post2' }, // { ownerId: 3, content: 'Post3' }, // ]); // const usersWithPosts = await db.query.usersTable.findMany({ // columns: { // id: true, // name: true, // }, // with: { // posts: { // columns: { // id: true, // content: true, // }, // where: (({ id }, { eq }) => eq(id, 1)), // }, // }, // where: (({ id }, { eq }) => eq(id, 1)), // }); // expectTypeOf(usersWithPosts).toEqualTypeOf<{ // id: number; // name: string; // posts: { // id: number; // content: string; // }[]; // }[]>(); // expect(usersWithPosts.length).eq(1); // expect(usersWithPosts[0]?.posts.length).eq(1); // expect(usersWithPosts[0]).toEqual({ // id: 1, // name: 'Dan', // posts: [{ id: 1, content: 'Post1' }], // }); // }); // test('[Find Many] Get users with posts + where + partial(true + false)', async (t) => { // const { singlestoreDb: db } = t; // await db.insert(usersTable).values([ // { id: 1, name: 'Dan' }, // { id: 2, name: 'Andrew' }, // { id: 3, name: 'Alex' }, // ]); // await db.insert(postsTable).values([ // { ownerId: 1, content: 'Post1' }, // { ownerId: 1, content: 'Post1.1' }, // { ownerId: 2, content: 'Post2' }, // { ownerId: 3, content: 'Post3' }, // ]); // const usersWithPosts = await db.query.usersTable.findMany({ // columns: { // id: true, // name: false, // }, // with: { // posts: { // columns: { // id: true, // content: false, // }, // where: (({ id }, { eq }) => eq(id, 1)), // }, // }, // where: (({ id }, { eq }) => eq(id, 1)), // }); // expectTypeOf(usersWithPosts).toEqualTypeOf<{ // id: number; // posts: { // id: number; // }[]; // }[]>(); // expect(usersWithPosts.length).eq(1); // expect(usersWithPosts[0]?.posts.length).eq(1); // expect(usersWithPosts[0]).toEqual({ // id: 1, // posts: [{ id: 1 }], // }); // }); // test('[Find Many] Get users with posts + where + partial(false)', async (t) => { // const { singlestoreDb: db } = t; // await db.insert(usersTable).values([ // { id: 1, name: 'Dan' }, // { id: 2, name: 'Andrew' }, // { id: 3, name: 'Alex' }, // ]); // await db.insert(postsTable).values([ // { ownerId: 1, content: 'Post1' }, // { ownerId: 1, content: 'Post1.1' }, // { ownerId: 2, content: 'Post2' }, // { ownerId: 3, content: 'Post3' }, // ]); // const usersWithPosts = await db.query.usersTable.findMany({ // columns: { // name: false, // }, // with: { // posts: { // columns: { // content: false, // }, // where: (({ id }, { eq }) => eq(id, 1)), // }, // }, // where: (({ id }, { eq }) => eq(id, 1)), // }); // expectTypeOf(usersWithPosts).toEqualTypeOf<{ // id: number; // verified: boolean; // invitedBy: number | null; // posts: { // id: number; // ownerId: number | null; // createdAt: Date; // }[]; // }[]>(); // expect(usersWithPosts.length).eq(1); // expect(usersWithPosts[0]?.posts.length).eq(1); // expect(usersWithPosts[0]).toEqual({ // id: 1, // verified: false, // invitedBy: null, // posts: [{ id: 1, ownerId: 1, createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], // }); // }); // test('[Find Many] Get users with posts in transaction', async (t) => { // const { singlestoreDb: db } = t; // let usersWithPosts: { // id: number; // name: string; // verified: boolean; // invitedBy: number | null; // posts: { // id: number; // content: string; // ownerId: number | null; // createdAt: Date; // }[]; // }[] = []; // await db.transaction(async (tx) => { // await tx.insert(usersTable).values([ // { id: 1, name: 'Dan' }, // { id: 2, name: 'Andrew' }, // { id: 3, name: 'Alex' }, // ]); // await tx.insert(postsTable).values([ // { ownerId: 1, content: 'Post1' }, // { ownerId: 1, content: 'Post1.1' }, // { ownerId: 2, content: 'Post2' }, // { ownerId: 3, content: 'Post3' }, // ]); // usersWithPosts = await tx.query.usersTable.findMany({ // where: (({ id }, { eq }) => eq(id, 1)), // with: { // posts: { // where: (({ id }, { eq }) => eq(id, 1)), // }, // }, // }); // }); // expectTypeOf(usersWithPosts).toEqualTypeOf<{ // id: number; // name: string; // verified: boolean; // invitedBy: number | null; // posts: { // id: number; // content: string; // ownerId: number | null; // createdAt: Date; // }[]; // }[]>(); // expect(usersWithPosts.length).eq(1); // expect(usersWithPosts[0]?.posts.length).eq(1); // expect(usersWithPosts[0]).toEqual({ // id: 1, // name: 'Dan', // verified: false, // invitedBy: null, // posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], // }); // }); // test('[Find Many] Get users with posts in rollbacked transaction', async (t) => { // const { singlestoreDb: db } = t; // let usersWithPosts: { // id: number; // name: string; // verified: boolean; // invitedBy: number | null; // posts: { // id: number; // content: string; // ownerId: number | null; // createdAt: Date; // }[]; // }[] = []; // await expect(db.transaction(async (tx) => { // await tx.insert(usersTable).values([ // { id: 1, name: 'Dan' }, // { id: 2, name: 'Andrew' }, // { id: 3, name: 'Alex' }, // ]); // await tx.insert(postsTable).values([ // { ownerId: 1, content: 'Post1' }, // { ownerId: 1, content: 'Post1.1' }, // { ownerId: 2, content: 'Post2' }, // { ownerId: 3, content: 'Post3' }, // ]); // tx.rollback(); // usersWithPosts = await tx.query.usersTable.findMany({ // where: (({ id }, { eq }) => eq(id, 1)), // with: { // posts: { // where: (({ id }, { eq }) => eq(id, 1)), // }, // }, // }); // })).rejects.toThrowError(new TransactionRollbackError()); // expectTypeOf(usersWithPosts).toEqualTypeOf<{ // id: number; // name: string; // verified: boolean; // invitedBy: number | null; // posts: { // id: number; // content: string; // ownerId: number | null; // createdAt: Date; // }[]; // }[]>(); // expect(usersWithPosts.length).eq(0); // }); // // select only custom // test('[Find Many] Get only custom fields', async () => { // await db.insert(usersTable).values([ // { id: 1, name: 'Dan' }, // { id: 2, name: 'Andrew' }, // { id: 3, name: 'Alex' }, // ]); // await db.insert(postsTable).values([ // { id: 1, ownerId: 1, content: 'Post1' }, // { id: 2, ownerId: 1, content: 'Post1.2' }, // { id: 3, ownerId: 1, content: 'Post1.3' }, // { id: 4, ownerId: 2, content: 'Post2' }, // { id: 5, ownerId: 2, content: 'Post2.1' }, // { id: 6, ownerId: 3, content: 'Post3' }, // { id: 7, ownerId: 3, content: 'Post3.1' }, // ]); // const usersWithPosts = await db.query.usersTable.findMany({ // columns: {}, // with: { // posts: { // columns: {}, // extras: ({ content }) => ({ // lowerName: sql`lower(${content})`.as('content_lower'), // }), // }, // }, // extras: ({ name }) => ({ // lowerName: sql`lower(${name})`.as('name_lower'), // }), // }); // // Type Assertion // expectTypeOf(usersWithPosts).toEqualTypeOf<{ // lowerName: string; // posts: { // lowerName: string; // }[]; // }[]>(); // // General Assertions // expect(usersWithPosts).toHaveLength(3); // // Helper function to find user by lowerName // const findUser = (lowerName: string) => usersWithPosts.find((user) => user.lowerName === lowerName); // // Assertions for each user // const dan = findUser('dan'); // const andrew = findUser('andrew'); // const alex = findUser('alex'); // expect(dan).toBeDefined(); // expect(andrew).toBeDefined(); // expect(alex).toBeDefined(); // // Verify the number of posts for each user // expect(dan?.posts).toHaveLength(3); // expect(andrew?.posts).toHaveLength(2); // expect(alex?.posts).toHaveLength(2); // // Define expected posts for each user // const expectedDanPosts = ['post1', 'post1.2', 'post1.3']; // const expectedAndrewPosts = ['post2', 'post2.1']; // const expectedAlexPosts = ['post3', 'post3.1']; // // Helper function to extract lowerNames from posts // const getPostLowerNames = (posts: { lowerName: string }[]) => posts.map((post) => post.lowerName); // // Assertions for Dan's posts // expect(getPostLowerNames(dan!.posts)).toEqual(expect.arrayContaining(expectedDanPosts)); // expect(getPostLowerNames(dan!.posts)).toHaveLength(expectedDanPosts.length); // // Assertions for Andrew's posts // expect(getPostLowerNames(andrew!.posts)).toEqual(expect.arrayContaining(expectedAndrewPosts)); // expect(getPostLowerNames(andrew!.posts)).toHaveLength(expectedAndrewPosts.length); // // Assertions for Alex's posts // expect(getPostLowerNames(alex!.posts)).toEqual(expect.arrayContaining(expectedAlexPosts)); // expect(getPostLowerNames(alex!.posts)).toHaveLength(expectedAlexPosts.length); // }); // // select only custom with where clause (Order Agnostic) // test('[Find Many] Get only custom fields + where', async (t) => { // const { singlestoreDb: db } = t; // // Insert Users // await db.insert(usersTable).values([ // { id: 1, name: 'Dan' }, // { id: 2, name: 'Andrew' }, // { id: 3, name: 'Alex' }, // ]); // // Insert Posts // await db.insert(postsTable).values([ // { ownerId: 1, content: 'Post1' }, // { ownerId: 1, content: 'Post1.2' }, // { ownerId: 1, content: 'Post1.3' }, // { ownerId: 2, content: 'Post2' }, // { ownerId: 2, content: 'Post2.1' }, // { ownerId: 3, content: 'Post3' }, // { ownerId: 3, content: 'Post3.1' }, // ]); // // Query Users with Posts where users.id = 1 and posts.id >= 2 // const usersWithPosts = await db.query.usersTable.findMany({ // columns: {}, // with: { // posts: { // columns: {}, // where: gte(postsTable.id, 2), // extras: ({ content }) => ({ // lowerName: sql`lower(${content})`.as('content_lower'), // }), // }, // }, // where: eq(usersTable.id, 1), // extras: ({ name }) => ({ // lowerName: sql`lower(${name})`.as('name_lower'), // }), // }); // // Type Assertion // expectTypeOf(usersWithPosts).toEqualTypeOf<{ // lowerName: string; // posts: { // lowerName: string; // }[]; // }[]>(); // // General Assertions // expect(usersWithPosts).toHaveLength(1); // // Since we expect only one user, we can extract it directly // const danWithPosts = usersWithPosts[0]; // // Assert that the user exists and has the correct lowerName // expect(danWithPosts).toBeDefined(); // expect(danWithPosts?.lowerName).toBe('dan'); // // Assert that the user has the expected number of posts // expect(danWithPosts?.posts).toHaveLength(2); // // Define the expected posts // const expectedPosts = ['post1.2', 'post1.3']; // // Extract the lowerName of each post // const actualPostLowerNames = danWithPosts?.posts.map((post) => post.lowerName); // // Assert that all expected posts are present, regardless of order // for (const expectedPost of expectedPosts) { // expect(actualPostLowerNames).toContain(expectedPost); // } // // Additionally, ensure no unexpected posts are present // expect(actualPostLowerNames).toHaveLength(expectedPosts.length); // }); // test.skip('[Find Many] Get only custom fields + where + limit', async (t) => { // const { singlestoreDb: db } = t; // await db.insert(usersTable).values([ // { id: 1, name: 'Dan' }, // { id: 2, name: 'Andrew' }, // { id: 3, name: 'Alex' }, // ]); // await db.insert(postsTable).values([ // { ownerId: 1, content: 'Post1' }, // { ownerId: 1, content: 'Post1.2' }, // { ownerId: 1, content: 'Post1.3' }, // { ownerId: 2, content: 'Post2' }, // { ownerId: 2, content: 'Post2.1' }, // { ownerId: 3, content: 'Post3' }, // { ownerId: 3, content: 'Post3.1' }, // ]); // const usersWithPosts = await db.query.usersTable.findMany({ // columns: {}, // with: { // posts: { // columns: {}, // where: gte(postsTable.id, 2), // limit: 1, // extras: ({ content }) => ({ // lowerName: sql`lower(${content})`.as('content_lower'), // }), // }, // }, // where: eq(usersTable.id, 1), // extras: ({ name }) => ({ // lowerName: sql`lower(${name})`.as('name_lower'), // }), // }); // expectTypeOf(usersWithPosts).toEqualTypeOf<{ // lowerName: string; // posts: { // lowerName: string; // }[]; // }[]>(); // expect(usersWithPosts.length).toEqual(1); // expect(usersWithPosts[0]?.posts.length).toEqual(1); // expect(usersWithPosts).toContainEqual({ // lowerName: 'dan', // posts: [{ lowerName: 'post1.2' }], // }); // }); // test.skip('[Find Many] Get only custom fields + where + orderBy', async (t) => { // const { singlestoreDb: db } = t; // await db.insert(usersTable).values([ // { id: 1, name: 'Dan' }, // { id: 2, name: 'Andrew' }, // { id: 3, name: 'Alex' }, // ]); // await db.insert(postsTable).values([ // { ownerId: 1, content: 'Post1' }, // { ownerId: 1, content: 'Post1.2' }, // { ownerId: 1, content: 'Post1.3' }, // { ownerId: 2, content: 'Post2' }, // { ownerId: 2, content: 'Post2.1' }, // { ownerId: 3, content: 'Post3' }, // { ownerId: 3, content: 'Post3.1' }, // ]); // const usersWithPosts = await db.query.usersTable.findMany({ // columns: {}, // with: { // posts: { // columns: {}, // where: gte(postsTable.id, 2), // orderBy: [desc(postsTable.id)], // extras: ({ content }) => ({ // lowerName: sql`lower(${content})`.as('content_lower'), // }), // }, // }, // where: eq(usersTable.id, 1), // extras: ({ name }) => ({ // lowerName: sql`lower(${name})`.as('name_lower'), // }), // }); // expectTypeOf(usersWithPosts).toEqualTypeOf<{ // lowerName: string; // posts: { // lowerName: string; // }[]; // }[]>(); // expect(usersWithPosts.length).toEqual(1); // expect(usersWithPosts[0]?.posts.length).toEqual(2); // expect(usersWithPosts).toContainEqual({ // lowerName: 'dan', // posts: [{ lowerName: 'post1.3' }, { lowerName: 'post1.2' }], // }); // }); // // select only custom find one (Order Agnostic) // test('[Find One] Get only custom fields (Order Agnostic)', async () => { // await db.insert(usersTable).values([ // { id: 1, name: 'Dan' }, // { id: 2, name: 'Andrew' }, // { id: 3, name: 'Alex' }, // ]); // await db.insert(postsTable).values([ // { ownerId: 1, content: 'Post1' }, // { ownerId: 1, content: 'Post1.2' }, // { ownerId: 1, content: 'Post1.3' }, // { ownerId: 2, content: 'Post2' }, // { ownerId: 2, content: 'Post2.1' }, // { ownerId: 3, content: 'Post3' }, // { ownerId: 3, content: 'Post3.1' }, // ]); // // Query to find the first user without any specific order // const usersWithPosts = await db.query.usersTable.findFirst({ // columns: {}, // with: { // posts: { // columns: {}, // extras: ({ content }) => ({ // lowerName: sql`lower(${content})`.as('content_lower'), // }), // }, // }, // extras: ({ name }) => ({ // lowerName: sql`lower(${name})`.as('name_lower'), // }), // }); // // Type Assertion // expectTypeOf(usersWithPosts).toEqualTypeOf< // { // lowerName: string; // posts: { // lowerName: string; // }[]; // } | undefined // >(); // // General Assertions // expect(usersWithPosts).toBeDefined(); // // Since findFirst without orderBy can return any user, we'll verify the returned user and their posts // if (usersWithPosts) { // // Define expected users and their corresponding posts // const expectedUsers: { [key: string]: string[] } = { // dan: ['post1', 'post1.2', 'post1.3'], // andrew: ['post2', 'post2.1'], // alex: ['post3', 'post3.1'], // }; // // Verify that the returned user is one of the expected users // expect(Object.keys(expectedUsers)).toContain(usersWithPosts.lowerName); // // Get the expected posts for the returned user // const expectedPosts = expectedUsers[usersWithPosts.lowerName] as string[]; // // Verify the number of posts // expect(usersWithPosts.posts).toHaveLength(expectedPosts.length); // // Extract the lowerName of each post // const actualPostLowerNames = usersWithPosts.posts.map((post) => post.lowerName); // // Assert that all expected posts are present, regardless of order // for (const expectedPost of expectedPosts) { // expect(actualPostLowerNames).toContain(expectedPost.toLowerCase()); // } // } // }); // // select only custom find one with where clause (Order Agnostic) // test('[Find One] Get only custom fields + where (Order Agnostic)', async (t) => { // const { singlestoreDb: db } = t; // await db.insert(usersTable).values([ // { id: 1, name: 'Dan' }, // { id: 2, name: 'Andrew' }, // { id: 3, name: 'Alex' }, // ]); // await db.insert(postsTable).values([ // { ownerId: 1, content: 'Post1' }, // { ownerId: 1, content: 'Post1.2' }, // { ownerId: 1, content: 'Post1.3' }, // { ownerId: 2, content: 'Post2' }, // { ownerId: 2, content: 'Post2.1' }, // { ownerId: 3, content: 'Post3' }, // { ownerId: 3, content: 'Post3.1' }, // ]); // // Query to find the first user with id = 1 and posts with id >= 2 // const usersWithPosts = await db.query.usersTable.findFirst({ // columns: {}, // with: { // posts: { // columns: {}, // where: gte(postsTable.id, 2), // extras: ({ content }) => ({ // lowerName: sql`lower(${content})`.as('content_lower'), // }), // }, // }, // where: eq(usersTable.id, 1), // extras: ({ name }) => ({ // lowerName: sql`lower(${name})`.as('name_lower'), // }), // }); // // Type Assertion // expectTypeOf(usersWithPosts).toEqualTypeOf< // { // lowerName: string; // posts: { // lowerName: string; // }[]; // } | undefined // >(); // // General Assertions // expect(usersWithPosts).toBeDefined(); // if (usersWithPosts) { // // Assert that the returned user has the expected lowerName // expect(usersWithPosts.lowerName).toBe('dan'); // // Assert that the user has exactly two posts // expect(usersWithPosts.posts).toHaveLength(2); // // Define the expected posts // const expectedPosts = ['post1.2', 'post1.3']; // // Extract the lowerName of each post // const actualPostLowerNames = usersWithPosts.posts.map((post) => post.lowerName); // // Assert that all expected posts are present, regardless of order // for (const expectedPost of expectedPosts) { // expect(actualPostLowerNames).toContain(expectedPost.toLowerCase()); // } // // Additionally, ensure no unexpected posts are present // expect(actualPostLowerNames).toHaveLength(expectedPosts.length); // } // }); // test.skip('[Find One] Get only custom fields + where + limit', async (t) => { // const { singlestoreDb: db } = t; // await db.insert(usersTable).values([ // { id: 1, name: 'Dan' }, // { id: 2, name: 'Andrew' }, // { id: 3, name: 'Alex' }, // ]); // await db.insert(postsTable).values([ // { ownerId: 1, content: 'Post1' }, // { ownerId: 1, content: 'Post1.2' }, // { ownerId: 1, content: 'Post1.3' }, // { ownerId: 2, content: 'Post2' }, // { ownerId: 2, content: 'Post2.1' }, // { ownerId: 3, content: 'Post3' }, // { ownerId: 3, content: 'Post3.1' }, // ]); // const usersWithPosts = await db.query.usersTable.findFirst({ // columns: {}, // with: { // posts: { // columns: {}, // where: gte(postsTable.id, 2), // limit: 1, // extras: ({ content }) => ({ // lowerName: sql`lower(${content})`.as('content_lower'), // }), // }, // }, // where: eq(usersTable.id, 1), // extras: ({ name }) => ({ // lowerName: sql`lower(${name})`.as('name_lower'), // }), // }); // expectTypeOf(usersWithPosts).toEqualTypeOf< // { // lowerName: string; // posts: { // lowerName: string; // }[]; // } | undefined // >(); // expect(usersWithPosts?.posts.length).toEqual(1); // expect(usersWithPosts).toEqual({ // lowerName: 'dan', // posts: [{ lowerName: 'post1.2' }], // }); // }); // test.skip('[Find One] Get only custom fields + where + orderBy', async (t) => { // const { singlestoreDb: db } = t; // await db.insert(usersTable).values([ // { id: 1, name: 'Dan' }, // { id: 2, name: 'Andrew' }, // { id: 3, name: 'Alex' }, // ]); // await db.insert(postsTable).values([ // { ownerId: 1, content: 'Post1' }, // { ownerId: 1, content: 'Post1.2' }, // { ownerId: 1, content: 'Post1.3' }, // { ownerId: 2, content: 'Post2' }, // { ownerId: 2, content: 'Post2.1' }, // { ownerId: 3, content: 'Post3' }, // { ownerId: 3, content: 'Post3.1' }, // ]); // const usersWithPosts = await db.query.usersTable.findFirst({ // columns: {}, // with: { // posts: { // columns: {}, // where: gte(postsTable.id, 2), // orderBy: [desc(postsTable.id)], // extras: ({ content }) => ({ // lowerName: sql`lower(${content})`.as('content_lower'), // }), // }, // }, // where: eq(usersTable.id, 1), // extras: ({ name }) => ({ // lowerName: sql`lower(${name})`.as('name_lower'), // }), // }); // expectTypeOf(usersWithPosts).toEqualTypeOf< // { // lowerName: string; // posts: { // lowerName: string; // }[]; // } | undefined // >(); // expect(usersWithPosts?.posts.length).toEqual(2); // expect(usersWithPosts).toEqual({ // lowerName: 'dan', // posts: [{ lowerName: 'post1.3' }, { lowerName: 'post1.2' }], // }); // }); // // columns {} // test('[Find Many] Get select {}', async (t) => { // const { singlestoreDb: db } = t; // await db.insert(usersTable).values([ // { id: 1, name: 'Dan' }, // { id: 2, name: 'Andrew' }, // { id: 3, name: 'Alex' }, // ]); // await expect( // async () => // await db.query.usersTable.findMany({ // columns: {}, // }), // ).rejects.toThrow(DrizzleError); // }); // // columns {} // test('[Find One] Get select {}', async (t) => { // const { singlestoreDb: db } = t; // await db.insert(usersTable).values([ // { id: 1, name: 'Dan' }, // { id: 2, name: 'Andrew' }, // { id: 3, name: 'Alex' }, // ]); // await expect(async () => // await db.query.usersTable.findFirst({ // columns: {}, // }) // ).rejects.toThrow(DrizzleError); // }); // // deep select {} // test('[Find Many] Get deep select {}', async (t) => { // const { singlestoreDb: db } = t; // await db.insert(usersTable).values([ // { id: 1, name: 'Dan' }, // { id: 2, name: 'Andrew' }, // { id: 3, name: 'Alex' }, // ]); // await db.insert(postsTable).values([ // { ownerId: 1, content: 'Post1' }, // { ownerId: 2, content: 'Post2' }, // { ownerId: 3, content: 'Post3' }, // ]); // await expect(async () => // await db.query.usersTable.findMany({ // columns: {}, // with: { // posts: { // columns: {}, // }, // }, // }) // ).rejects.toThrow(DrizzleError); // }); // // deep select {} // test('[Find One] Get deep select {}', async (t) => { // const { singlestoreDb: db } = t; // await db.insert(usersTable).values([ // { id: 1, name: 'Dan' }, // { id: 2, name: 'Andrew' }, // { id: 3, name: 'Alex' }, // ]); // await db.insert(postsTable).values([ // { ownerId: 1, content: 'Post1' }, // { ownerId: 2, content: 'Post2' }, // { ownerId: 3, content: 'Post3' }, // ]); // await expect(async () => // await db.query.usersTable.findFirst({ // columns: {}, // with: { // posts: { // columns: {}, // }, // }, // }) // ).rejects.toThrow(DrizzleError); // }); // /* // Prepared statements for users+posts // */ // test.skip('[Find Many] Get users with posts + prepared limit', async (t) => { // const { singlestoreDb: db } = t; // await db.insert(usersTable).values([ // { id: 1, name: 'Dan' }, // { id: 2, name: 'Andrew' }, // { id: 3, name: 'Alex' }, // ]); // await db.insert(postsTable).values([ // { ownerId: 1, content: 'Post1' }, // { ownerId: 1, content: 'Post1.2' }, // { ownerId: 1, content: 'Post1.3' }, // { ownerId: 2, content: 'Post2' }, // { ownerId: 2, content: 'Post2.1' }, // { ownerId: 3, content: 'Post3' }, // { ownerId: 3, content: 'Post3.1' }, // ]); // const prepared = db.query.usersTable.findMany({ // with: { // posts: { // limit: placeholder('limit'), // }, // }, // }).prepare(); // const usersWithPosts = await prepared.execute({ limit: 1 }); // expectTypeOf(usersWithPosts).toEqualTypeOf<{ // id: number; // name: string; // verified: boolean; // invitedBy: number | null; // posts: { // id: number; // content: string; // ownerId: number | null; // createdAt: Date; // }[]; // }[]>(); // expect(usersWithPosts.length).eq(3); // expect(usersWithPosts[0]?.posts.length).eq(1); // expect(usersWithPosts[1]?.posts.length).eq(1); // expect(usersWithPosts[2]?.posts.length).eq(1); // expect(usersWithPosts).toContainEqual({ // id: 1, // name: 'Dan', // verified: false, // invitedBy: null, // posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], // }); // expect(usersWithPosts).toContainEqual({ // id: 2, // name: 'Andrew', // verified: false, // invitedBy: null, // posts: [{ id: 4, ownerId: 2, content: 'Post2', createdAt: usersWithPosts[1]?.posts[0]?.createdAt }], // }); // expect(usersWithPosts).toContainEqual({ // id: 3, // name: 'Alex', // verified: false, // invitedBy: null, // posts: [{ id: 6, ownerId: 3, content: 'Post3', createdAt: usersWithPosts[2]?.posts[0]?.createdAt }], // }); // }); // test.skip('[Find Many] Get users with posts + prepared limit + offset', async (t) => { // const { singlestoreDb: db } = t; // await db.insert(usersTable).values([ // { id: 1, name: 'Dan' }, // { id: 2, name: 'Andrew' }, // { id: 3, name: 'Alex' }, // ]); // await db.insert(postsTable).values([ // { ownerId: 1, content: 'Post1' }, // { ownerId: 1, content: 'Post1.2' }, // { ownerId: 1, content: 'Post1.3' }, // { ownerId: 2, content: 'Post2' }, // { ownerId: 2, content: 'Post2.1' }, // { ownerId: 3, content: 'Post3' }, // { ownerId: 3, content: 'Post3.1' }, // ]); // const prepared = db.query.usersTable.findMany({ // limit: placeholder('uLimit'), // offset: placeholder('uOffset'), // with: { // posts: { // limit: placeholder('pLimit'), // }, // }, // }).prepare(); // const usersWithPosts = await prepared.execute({ pLimit: 1, uLimit: 3, uOffset: 1 }); // expectTypeOf(usersWithPosts).toEqualTypeOf<{ // id: number; // name: string; // verified: boolean; // invitedBy: number | null; // posts: { // id: number; // content: string; // ownerId: number | null; // createdAt: Date; // }[]; // }[]>(); // expect(usersWithPosts.length).eq(2); // expect(usersWithPosts[0]?.posts.length).eq(1); // expect(usersWithPosts[1]?.posts.length).eq(1); // expect(usersWithPosts).toContainEqual({ // id: 2, // name: 'Andrew', // verified: false, // invitedBy: null, // posts: [{ id: 4, ownerId: 2, content: 'Post2', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], // }); // expect(usersWithPosts).toContainEqual({ // id: 3, // name: 'Alex', // verified: false, // invitedBy: null, // posts: [{ id: 6, ownerId: 3, content: 'Post3', createdAt: usersWithPosts[1]?.posts[0]?.createdAt }], // }); // }); // test('[Find Many] Get users with posts + prepared where', async (t) => { // const { singlestoreDb: db } = t; // await db.insert(usersTable).values([ // { id: 1, name: 'Dan' }, // { id: 2, name: 'Andrew' }, // { id: 3, name: 'Alex' }, // ]); // await db.insert(postsTable).values([ // { ownerId: 1, content: 'Post1' }, // { ownerId: 1, content: 'Post1.1' }, // { ownerId: 2, content: 'Post2' }, // { ownerId: 3, content: 'Post3' }, // ]); // const prepared = db.query.usersTable.findMany({ // where: (({ id }, { eq }) => eq(id, placeholder('id'))), // with: { // posts: { // where: (({ id }, { eq }) => eq(id, 1)), // }, // }, // }).prepare(); // const usersWithPosts = await prepared.execute({ id: 1 }); // expectTypeOf(usersWithPosts).toEqualTypeOf<{ // id: number; // name: string; // verified: boolean; // invitedBy: number | null; // posts: { // id: number; // content: string; // ownerId: number | null; // createdAt: Date; // }[]; // }[]>(); // expect(usersWithPosts.length).eq(1); // expect(usersWithPosts[0]?.posts.length).eq(1); // expect(usersWithPosts[0]).toEqual({ // id: 1, // name: 'Dan', // verified: false, // invitedBy: null, // posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], // }); // }); // test.skip('[Find Many] Get users with posts + prepared + limit + offset + where', async (t) => { // const { singlestoreDb: db } = t; // await db.insert(usersTable).values([ // { id: 1, name: 'Dan' }, // { id: 2, name: 'Andrew' }, // { id: 3, name: 'Alex' }, // ]); // await db.insert(postsTable).values([ // { ownerId: 1, content: 'Post1' }, // { ownerId: 1, content: 'Post1.2' }, // { ownerId: 1, content: 'Post1.3' }, // { ownerId: 2, content: 'Post2' }, // { ownerId: 2, content: 'Post2.1' }, // { ownerId: 3, content: 'Post3' }, // { ownerId: 3, content: 'Post3.1' }, // ]); // const prepared = db.query.usersTable.findMany({ // limit: placeholder('uLimit'), // offset: placeholder('uOffset'), // where: (({ id }, { eq, or }) => or(eq(id, placeholder('id')), eq(id, 3))), // with: { // posts: { // where: (({ id }, { eq }) => eq(id, placeholder('pid'))), // limit: placeholder('pLimit'), // }, // }, // }).prepare(); // const usersWithPosts = await prepared.execute({ pLimit: 1, uLimit: 3, uOffset: 1, id: 2, pid: 6 }); // expectTypeOf(usersWithPosts).toEqualTypeOf<{ // id: number; // name: string; // verified: boolean; // invitedBy: number | null; // posts: { // id: number; // content: string; // ownerId: number | null; // createdAt: Date; // }[]; // }[]>(); // expect(usersWithPosts.length).eq(1); // expect(usersWithPosts[0]?.posts.length).eq(1); // expect(usersWithPosts).toContainEqual({ // id: 3, // name: 'Alex', // verified: false, // invitedBy: null, // posts: [{ id: 6, ownerId: 3, content: 'Post3', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], // }); // }); // /* // [Find One] One relation users+posts // */ // test.only('[Find One] Get users with posts', async (t) => { // const { singlestoreDb: db } = t; // await db.insert(usersTable).values([ // { id: 1, name: 'Dan' }, // { id: 2, name: 'Andrew' }, // { id: 3, name: 'Alex' }, // ]); // await db.insert(postsTable).values([ // { ownerId: 1, content: 'Post1' }, // { ownerId: 2, content: 'Post2' }, // { ownerId: 3, content: 'Post3' }, // ]); // const usersWithPosts = await db.query.usersTable.findFirst({ // with: { // posts: true, // }, // }); // // Type Assertion // expectTypeOf(usersWithPosts).toEqualTypeOf< // { // id: number; // name: string; // verified: boolean; // invitedBy: number | null; // posts: { // id: number; // content: string; // ownerId: number | null; // createdAt: Date; // }[]; // } | undefined // >(); // // General Assertions // expect(usersWithPosts).toBeDefined(); // if (usersWithPosts) { // const { id, name, posts } = usersWithPosts; // // Verify that the user is one of the inserted users // const validUsers: { [key: number]: string } = { // 1: 'dan', // 2: 'andrew', // 3: 'alex', // }; // expect(validUsers[id]).toBe(name.toLowerCase()); // // Assert that the user has exactly one post // expect(posts).toHaveLength(1); // const post = posts[0]; // // Verify that the post belongs to the user // expect(post?.ownerId).toBe(id); // // Verify that the post content matches the user // const expectedPostContent = `Post${id}`; // expect(post?.content.toLowerCase()).toBe(expectedPostContent.toLowerCase()); // // Optionally, verify the presence of `createdAt` // expect(post?.createdAt).toBeInstanceOf(Date); // } // }); // test.skip('[Find One] Get users with posts + limit posts', async (t) => { // const { singlestoreDb: db } = t; // await db.insert(usersTable).values([ // { id: 1, name: 'Dan' }, // { id: 2, name: 'Andrew' }, // { id: 3, name: 'Alex' }, // ]); // await db.insert(postsTable).values([ // { ownerId: 1, content: 'Post1' }, // { ownerId: 1, content: 'Post1.2' }, // { ownerId: 1, content: 'Post1.3' }, // { ownerId: 2, content: 'Post2' }, // { ownerId: 2, content: 'Post2.1' }, // { ownerId: 3, content: 'Post3' }, // { ownerId: 3, content: 'Post3.1' }, // ]); // const usersWithPosts = await db.query.usersTable.findFirst({ // with: { // posts: { // limit: 1, // }, // }, // }); // expectTypeOf(usersWithPosts).toEqualTypeOf< // { // id: number; // name: string; // verified: boolean; // invitedBy: number | null; // posts: { // id: number; // content: string; // ownerId: number | null; // createdAt: Date; // }[]; // } | undefined // >(); // expect(usersWithPosts!.posts.length).eq(1); // expect(usersWithPosts).toEqual({ // id: 1, // name: 'Dan', // verified: false, // invitedBy: null, // posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts?.posts[0]?.createdAt }], // }); // }); // test.skip('[Find One] Get users with posts no results found', async (t) => { // const { singlestoreDb: db } = t; // const usersWithPosts = await db.query.usersTable.findFirst({ // with: { // posts: { // limit: 1, // }, // }, // }); // expectTypeOf(usersWithPosts).toEqualTypeOf< // { // id: number; // name: string; // verified: boolean; // invitedBy: number | null; // posts: { // id: number; // content: string; // ownerId: number | null; // createdAt: Date; // }[]; // } | undefined // >(); // expect(usersWithPosts).toBeUndefined(); // }); // test.skip('[Find One] Get users with posts + limit posts and users', async (t) => { // const { singlestoreDb: db } = t; // await db.insert(usersTable).values([ // { id: 1, name: 'Dan' }, // { id: 2, name: 'Andrew' }, // { id: 3, name: 'Alex' }, // ]); // await db.insert(postsTable).values([ // { ownerId: 1, content: 'Post1' }, // { ownerId: 1, content: 'Post1.2' }, // { ownerId: 1, content: 'Post1.3' }, // { ownerId: 2, content: 'Post2' }, // { ownerId: 2, content: 'Post2.1' }, // { ownerId: 3, content: 'Post3' }, // { ownerId: 3, content: 'Post3.1' }, // ]); // const usersWithPosts = await db.query.usersTable.findFirst({ // with: { // posts: { // limit: 1, // }, // }, // }); // expectTypeOf(usersWithPosts).toEqualTypeOf< // { // id: number; // name: string; // verified: boolean; // invitedBy: number | null; // posts: { // id: number; // content: string; // ownerId: number | null; // createdAt: Date; // }[]; // } | undefined // >(); // expect(usersWithPosts!.posts.length).eq(1); // expect(usersWithPosts).toEqual({ // id: 1, // name: 'Dan', // verified: false, // invitedBy: null, // posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts?.posts[0]?.createdAt }], // }); // }); // test('[Find One] Get users with posts + custom fields', async () => { // await db.insert(usersTable).values([ // { id: 1, name: 'Dan' }, // { id: 2, name: 'Andrew' }, // { id: 3, name: 'Alex' }, // ]); // await db.insert(postsTable).values([ // { ownerId: 1, content: 'Post1' }, // { ownerId: 1, content: 'Post1.2' }, // { ownerId: 1, content: 'Post1.3' }, // { ownerId: 2, content: 'Post2' }, // { ownerId: 2, content: 'Post2.1' }, // { ownerId: 3, content: 'Post3' }, // { ownerId: 3, content: 'Post3.1' }, // ]); // const usersWithPosts = await db.query.usersTable.findFirst({ // with: { // posts: true, // }, // extras: ({ name }) => ({ // lowerName: sql`lower(${name})`.as('name_lower'), // }), // }); // // Type Assertion // expectTypeOf(usersWithPosts).toEqualTypeOf< // { // id: number; // name: string; // verified: boolean; // invitedBy: number | null; // lowerName: string; // posts: { // id: number; // content: string; // ownerId: number | null; // createdAt: Date; // }[]; // } | undefined // >(); // // General Assertions // expect(usersWithPosts).toBeDefined(); // if (usersWithPosts) { // const { id, lowerName, posts } = usersWithPosts; // // Define valid users and their expected lower names // const validUsers: { [key: number]: string } = { // 1: 'dan', // 2: 'andrew', // 3: 'alex', // }; // // Verify that the returned user's lowerName matches the expected value // expect(validUsers[id]).toBe(lowerName); // // Define the expected posts based on the user ID // const expectedPostsByUser: Record = { // 1: ['post1', 'post1.2', 'post1.3'], // 2: ['post2', 'post2.1'], // 3: ['post3', 'post3.1'], // }; // // Get the expected posts for the returned user // const expectedPosts = expectedPostsByUser[id] || []; // // Extract the lowerName of each post // const actualPostContents = posts.map((post) => post.content.toLowerCase()); // // Assert that all expected posts are present, regardless of order // for (const expectedPost of expectedPosts) { // expect(actualPostContents).toContain(expectedPost.toLowerCase()); // } // // Optionally, ensure that no unexpected posts are present // expect(actualPostContents).toHaveLength(expectedPosts.length); // } // }); // test.skip('[Find One] Get users with posts + custom fields + limits', async (t) => { // const { singlestoreDb: db } = t; // await db.insert(usersTable).values([ // { id: 1, name: 'Dan' }, // { id: 2, name: 'Andrew' }, // { id: 3, name: 'Alex' }, // ]); // await db.insert(postsTable).values([ // { ownerId: 1, content: 'Post1' }, // { ownerId: 1, content: 'Post1.2' }, // { ownerId: 1, content: 'Post1.3' }, // { ownerId: 2, content: 'Post2' }, // { ownerId: 2, content: 'Post2.1' }, // { ownerId: 3, content: 'Post3' }, // { ownerId: 3, content: 'Post3.1' }, // ]); // const usersWithPosts = await db.query.usersTable.findFirst({ // with: { // posts: { // limit: 1, // }, // }, // extras: (usersTable, { sql }) => ({ // lowerName: sql`lower(${usersTable.name})`.as('name_lower'), // }), // }); // expectTypeOf(usersWithPosts).toEqualTypeOf< // { // id: number; // name: string; // verified: boolean; // invitedBy: number | null; // lowerName: string; // posts: { // id: number; // content: string; // ownerId: number | null; // createdAt: Date; // }[]; // } | undefined // >(); // expect(usersWithPosts!.posts.length).toEqual(1); // expect(usersWithPosts).toEqual({ // id: 1, // name: 'Dan', // lowerName: 'dan', // verified: false, // invitedBy: null, // posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts?.posts[0]?.createdAt }], // }); // }); // test.skip('[Find One] Get users with posts + orderBy', async (t) => { // const { singlestoreDb: db } = t; // await db.insert(usersTable).values([ // { id: 1, name: 'Dan' }, // { id: 2, name: 'Andrew' }, // { id: 3, name: 'Alex' }, // ]); // await db.insert(postsTable).values([ // { ownerId: 1, content: '1' }, // { ownerId: 1, content: '2' }, // { ownerId: 1, content: '3' }, // { ownerId: 2, content: '4' }, // { ownerId: 2, content: '5' }, // { ownerId: 3, content: '6' }, // { ownerId: 3, content: '7' }, // ]); // const usersWithPosts = await db.query.usersTable.findFirst({ // with: { // posts: { // orderBy: (postsTable, { desc }) => [desc(postsTable.content)], // }, // }, // orderBy: (usersTable, { desc }) => [desc(usersTable.id)], // }); // expectTypeOf(usersWithPosts).toEqualTypeOf< // { // id: number; // name: string; // verified: boolean; // invitedBy: number | null; // posts: { // id: number; // content: string; // ownerId: number | null; // createdAt: Date; // }[]; // } | undefined // >(); // expect(usersWithPosts!.posts.length).eq(2); // expect(usersWithPosts).toEqual({ // id: 3, // name: 'Alex', // verified: false, // invitedBy: null, // posts: [{ // id: 7, // ownerId: 3, // content: '7', // createdAt: usersWithPosts?.posts[1]?.createdAt, // }, { id: 6, ownerId: 3, content: '6', createdAt: usersWithPosts?.posts[0]?.createdAt }], // }); // }); // test('[Find One] Get users with posts + where', async (t) => { // const { singlestoreDb: db } = t; // await db.insert(usersTable).values([ // { id: 1, name: 'Dan' }, // { id: 2, name: 'Andrew' }, // { id: 3, name: 'Alex' }, // ]); // await db.insert(postsTable).values([ // { ownerId: 1, content: 'Post1' }, // { ownerId: 1, content: 'Post1.1' }, // { ownerId: 2, content: 'Post2' }, // { ownerId: 3, content: 'Post3' }, // ]); // const usersWithPosts = await db.query.usersTable.findFirst({ // where: (({ id }, { eq }) => eq(id, 1)), // with: { // posts: { // where: (({ id }, { eq }) => eq(id, 1)), // }, // }, // }); // expectTypeOf(usersWithPosts).toEqualTypeOf< // { // id: number; // name: string; // verified: boolean; // invitedBy: number | null; // posts: { // id: number; // content: string; // ownerId: number | null; // createdAt: Date; // }[]; // } | undefined // >(); // expect(usersWithPosts!.posts.length).eq(1); // expect(usersWithPosts).toEqual({ // id: 1, // name: 'Dan', // verified: false, // invitedBy: null, // posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts?.posts[0]?.createdAt }], // }); // }); // test('[Find One] Get users with posts + where + partial', async (t) => { // const { singlestoreDb: db } = t; // await db.insert(usersTable).values([ // { id: 1, name: 'Dan' }, // { id: 2, name: 'Andrew' }, // { id: 3, name: 'Alex' }, // ]); // await db.insert(postsTable).values([ // { ownerId: 1, content: 'Post1' }, // { ownerId: 1, content: 'Post1.1' }, // { ownerId: 2, content: 'Post2' }, // { ownerId: 3, content: 'Post3' }, // ]); // const usersWithPosts = await db.query.usersTable.findFirst({ // columns: { // id: true, // name: true, // }, // with: { // posts: { // columns: { // id: true, // content: true, // }, // where: (({ id }, { eq }) => eq(id, 1)), // }, // }, // where: (({ id }, { eq }) => eq(id, 1)), // }); // expectTypeOf(usersWithPosts).toEqualTypeOf< // { // id: number; // name: string; // posts: { // id: number; // content: string; // }[]; // } | undefined // >(); // expect(usersWithPosts!.posts.length).eq(1); // expect(usersWithPosts).toEqual({ // id: 1, // name: 'Dan', // posts: [{ id: 1, content: 'Post1' }], // }); // }); // test.skip('[Find One] Get users with posts + where + partial. Did not select posts id, but used it in where', async (t) => { // const { singlestoreDb: db } = t; // await db.insert(usersTable).values([ // { id: 1, name: 'Dan' }, // { id: 2, name: 'Andrew' }, // { id: 3, name: 'Alex' }, // ]); // await db.insert(postsTable).values([ // { ownerId: 1, content: 'Post1' }, // { ownerId: 1, content: 'Post1.1' }, // { ownerId: 2, content: 'Post2' }, // { ownerId: 3, content: 'Post3' }, // ]); // const usersWithPosts = await db.query.usersTable.findFirst({ // columns: { // id: true, // name: true, // }, // with: { // posts: { // columns: { // id: true, // content: true, // }, // where: (({ id }, { eq }) => eq(id, 1)), // }, // }, // where: (({ id }, { eq }) => eq(id, 1)), // }); // expectTypeOf(usersWithPosts).toEqualTypeOf< // { // id: number; // name: string; // posts: { // id: number; // content: string; // }[]; // } | undefined // >(); // expect(usersWithPosts!.posts.length).eq(1); // expect(usersWithPosts).toEqual({ // id: 1, // name: 'Dan', // posts: [{ id: 1, content: 'Post1' }], // }); // }); // test.skip('[Find One] Get users with posts + where + partial(true + false)', async (t) => { // const { singlestoreDb: db } = t; // await db.insert(usersTable).values([ // { id: 1, name: 'Dan' }, // { id: 2, name: 'Andrew' }, // { id: 3, name: 'Alex' }, // ]); // await db.insert(postsTable).values([ // { ownerId: 1, content: 'Post1' }, // { ownerId: 1, content: 'Post1.1' }, // { ownerId: 2, content: 'Post2' }, // { ownerId: 3, content: 'Post3' }, // ]); // const usersWithPosts = await db.query.usersTable.findFirst({ // columns: { // id: true, // name: false, // }, // with: { // posts: { // columns: { // id: true, // content: false, // }, // where: (({ id }, { eq }) => eq(id, 1)), // }, // }, // where: (({ id }, { eq }) => eq(id, 1)), // }); // expectTypeOf(usersWithPosts).toEqualTypeOf< // { // id: number; // posts: { // id: number; // }[]; // } | undefined // >(); // expect(usersWithPosts!.posts.length).eq(1); // expect(usersWithPosts).toEqual({ // id: 1, // posts: [{ id: 1 }], // }); // }); // test.skip('[Find One] Get users with posts + where + partial(false)', async (t) => { // const { singlestoreDb: db } = t; // await db.insert(usersTable).values([ // { id: 1, name: 'Dan' }, // { id: 2, name: 'Andrew' }, // { id: 3, name: 'Alex' }, // ]); // await db.insert(postsTable).values([ // { ownerId: 1, content: 'Post1' }, // { ownerId: 1, content: 'Post1.1' }, // { ownerId: 2, content: 'Post2' }, // { ownerId: 3, content: 'Post3' }, // ]); // const usersWithPosts = await db.query.usersTable.findFirst({ // columns: { // name: false, // }, // with: { // posts: { // columns: { // content: false, // }, // where: (({ id }, { eq }) => eq(id, 1)), // }, // }, // where: (({ id }, { eq }) => eq(id, 1)), // }); // expectTypeOf(usersWithPosts).toEqualTypeOf< // { // id: number; // verified: boolean; // invitedBy: number | null; // posts: { // id: number; // ownerId: number | null; // createdAt: Date; // }[]; // } | undefined // >(); // expect(usersWithPosts!.posts.length).eq(1); // expect(usersWithPosts).toEqual({ // id: 1, // verified: false, // invitedBy: null, // posts: [{ id: 1, ownerId: 1, createdAt: usersWithPosts?.posts[0]?.createdAt }], // }); // }); // /* // One relation users+users. Self referencing // */ // test.skip('Get user with invitee', async (t) => { // const { singlestoreDb: db } = t; // await db.insert(usersTable).values([ // { id: 1, name: 'Dan' }, // { id: 2, name: 'Andrew' }, // { id: 3, name: 'Alex', invitedBy: 1 }, // { id: 4, name: 'John', invitedBy: 2 }, // ]); // const usersWithInvitee = await db.query.usersTable.findMany({ // with: { // invitee: true, // }, // }); // expectTypeOf(usersWithInvitee).toEqualTypeOf< // { // id: number; // name: string; // verified: boolean; // invitedBy: number | null; // invitee: { // id: number; // name: string; // verified: boolean; // invitedBy: number | null; // } | null; // }[] // >(); // usersWithInvitee.sort((a, b) => (a.id > b.id) ? 1 : -1); // expect(usersWithInvitee.length).eq(4); // expect(usersWithInvitee[0]?.invitee).toBeNull(); // expect(usersWithInvitee[1]?.invitee).toBeNull(); // expect(usersWithInvitee[2]?.invitee).not.toBeNull(); // expect(usersWithInvitee[3]?.invitee).not.toBeNull(); // expect(usersWithInvitee[0]).toEqual({ // id: 1, // name: 'Dan', // verified: false, // invitedBy: null, // invitee: null, // }); // expect(usersWithInvitee[1]).toEqual({ // id: 2, // name: 'Andrew', // verified: false, // invitedBy: null, // invitee: null, // }); // expect(usersWithInvitee[2]).toEqual({ // id: 3, // name: 'Alex', // verified: false, // invitedBy: 1, // invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, // }); // expect(usersWithInvitee[3]).toEqual({ // id: 4, // name: 'John', // verified: false, // invitedBy: 2, // invitee: { id: 2, name: 'Andrew', verified: false, invitedBy: null }, // }); // }); // test.skip('Get user + limit with invitee', async (t) => { // const { singlestoreDb: db } = t; // await db.insert(usersTable).values([ // { id: 1, name: 'Dan' }, // { id: 2, name: 'Andrew', invitedBy: 1 }, // { id: 3, name: 'Alex', invitedBy: 1 }, // { id: 4, name: 'John', invitedBy: 2 }, // ]); // const usersWithInvitee = await db.query.usersTable.findMany({ // with: { // invitee: true, // }, // limit: 2, // }); // expectTypeOf(usersWithInvitee).toEqualTypeOf< // { // id: number; // name: string; // verified: boolean; // invitedBy: number | null; // invitee: { // id: number; // name: string; // verified: boolean; // invitedBy: number | null; // } | null; // }[] // >(); // usersWithInvitee.sort((a, b) => (a.id > b.id) ? 1 : -1); // expect(usersWithInvitee.length).eq(2); // expect(usersWithInvitee[0]?.invitee).toBeNull(); // expect(usersWithInvitee[1]?.invitee).not.toBeNull(); // expect(usersWithInvitee[0]).toEqual({ // id: 1, // name: 'Dan', // verified: false, // invitedBy: null, // invitee: null, // }); // expect(usersWithInvitee[1]).toEqual({ // id: 2, // name: 'Andrew', // verified: false, // invitedBy: 1, // invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, // }); // }); // test.skip('Get user with invitee and custom fields', async (t) => { // const { singlestoreDb: db } = t; // await db.insert(usersTable).values([ // { id: 1, name: 'Dan' }, // { id: 2, name: 'Andrew' }, // { id: 3, name: 'Alex', invitedBy: 1 }, // { id: 4, name: 'John', invitedBy: 2 }, // ]); // const usersWithInvitee = await db.query.usersTable.findMany({ // extras: (users, { sql }) => ({ lower: sql`lower(${users.name})`.as('lower_name') }), // with: { // invitee: { // extras: (invitee, { sql }) => ({ lower: sql`lower(${invitee.name})`.as('lower_name') }), // }, // }, // }); // expectTypeOf(usersWithInvitee).toEqualTypeOf< // { // id: number; // name: string; // verified: boolean; // lower: string; // invitedBy: number | null; // invitee: { // id: number; // name: string; // verified: boolean; // lower: string; // invitedBy: number | null; // } | null; // }[] // >(); // usersWithInvitee.sort((a, b) => (a.id > b.id) ? 1 : -1); // expect(usersWithInvitee.length).eq(4); // expect(usersWithInvitee[0]?.invitee).toBeNull(); // expect(usersWithInvitee[1]?.invitee).toBeNull(); // expect(usersWithInvitee[2]?.invitee).not.toBeNull(); // expect(usersWithInvitee[3]?.invitee).not.toBeNull(); // expect(usersWithInvitee[0]).toEqual({ // id: 1, // name: 'Dan', // lower: 'dan', // verified: false, // invitedBy: null, // invitee: null, // }); // expect(usersWithInvitee[1]).toEqual({ // id: 2, // name: 'Andrew', // lower: 'andrew', // verified: false, // invitedBy: null, // invitee: null, // }); // expect(usersWithInvitee[2]).toEqual({ // id: 3, // name: 'Alex', // lower: 'alex', // verified: false, // invitedBy: 1, // invitee: { id: 1, name: 'Dan', lower: 'dan', verified: false, invitedBy: null }, // }); // expect(usersWithInvitee[3]).toEqual({ // id: 4, // name: 'John', // lower: 'john', // verified: false, // invitedBy: 2, // invitee: { id: 2, name: 'Andrew', lower: 'andrew', verified: false, invitedBy: null }, // }); // }); // test.skip('Get user with invitee and custom fields + limits', async (t) => { // const { singlestoreDb: db } = t; // await db.insert(usersTable).values([ // { id: 1, name: 'Dan' }, // { id: 2, name: 'Andrew' }, // { id: 3, name: 'Alex', invitedBy: 1 }, // { id: 4, name: 'John', invitedBy: 2 }, // ]); // const usersWithInvitee = await db.query.usersTable.findMany({ // extras: (users, { sql }) => ({ lower: sql`lower(${users.name})`.as('lower_name') }), // limit: 3, // with: { // invitee: { // extras: (invitee, { sql }) => ({ lower: sql`lower(${invitee.name})`.as('lower_name') }), // }, // }, // }); // expectTypeOf(usersWithInvitee).toEqualTypeOf< // { // id: number; // name: string; // verified: boolean; // lower: string; // invitedBy: number | null; // invitee: { // id: number; // name: string; // verified: boolean; // lower: string; // invitedBy: number | null; // } | null; // }[] // >(); // usersWithInvitee.sort((a, b) => (a.id > b.id) ? 1 : -1); // expect(usersWithInvitee.length).eq(3); // expect(usersWithInvitee[0]?.invitee).toBeNull(); // expect(usersWithInvitee[1]?.invitee).toBeNull(); // expect(usersWithInvitee[2]?.invitee).not.toBeNull(); // expect(usersWithInvitee[0]).toEqual({ // id: 1, // name: 'Dan', // lower: 'dan', // verified: false, // invitedBy: null, // invitee: null, // }); // expect(usersWithInvitee[1]).toEqual({ // id: 2, // name: 'Andrew', // lower: 'andrew', // verified: false, // invitedBy: null, // invitee: null, // }); // expect(usersWithInvitee[2]).toEqual({ // id: 3, // name: 'Alex', // lower: 'alex', // verified: false, // invitedBy: 1, // invitee: { id: 1, name: 'Dan', lower: 'dan', verified: false, invitedBy: null }, // }); // }); // test.skip('Get user with invitee + order by', async (t) => { // const { singlestoreDb: db } = t; // await db.insert(usersTable).values([ // { id: 1, name: 'Dan' }, // { id: 2, name: 'Andrew' }, // { id: 3, name: 'Alex', invitedBy: 1 }, // { id: 4, name: 'John', invitedBy: 2 }, // ]); // const usersWithInvitee = await db.query.usersTable.findMany({ // orderBy: (users, { desc }) => [desc(users.id)], // with: { // invitee: true, // }, // }); // expectTypeOf(usersWithInvitee).toEqualTypeOf< // { // id: number; // name: string; // verified: boolean; // invitedBy: number | null; // invitee: { // id: number; // name: string; // verified: boolean; // invitedBy: number | null; // } | null; // }[] // >(); // expect(usersWithInvitee.length).eq(4); // expect(usersWithInvitee[3]?.invitee).toBeNull(); // expect(usersWithInvitee[2]?.invitee).toBeNull(); // expect(usersWithInvitee[1]?.invitee).not.toBeNull(); // expect(usersWithInvitee[0]?.invitee).not.toBeNull(); // expect(usersWithInvitee[3]).toEqual({ // id: 1, // name: 'Dan', // verified: false, // invitedBy: null, // invitee: null, // }); // expect(usersWithInvitee[2]).toEqual({ // id: 2, // name: 'Andrew', // verified: false, // invitedBy: null, // invitee: null, // }); // expect(usersWithInvitee[1]).toEqual({ // id: 3, // name: 'Alex', // verified: false, // invitedBy: 1, // invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, // }); // expect(usersWithInvitee[0]).toEqual({ // id: 4, // name: 'John', // verified: false, // invitedBy: 2, // invitee: { id: 2, name: 'Andrew', verified: false, invitedBy: null }, // }); // }); // test.skip('Get user with invitee + where', async (t) => { // const { singlestoreDb: db } = t; // await db.insert(usersTable).values([ // { id: 1, name: 'Dan' }, // { id: 2, name: 'Andrew' }, // { id: 3, name: 'Alex', invitedBy: 1 }, // { id: 4, name: 'John', invitedBy: 2 }, // ]); // const usersWithInvitee = await db.query.usersTable.findMany({ // where: (users, { eq, or }) => (or(eq(users.id, 3), eq(users.id, 4))), // with: { // invitee: true, // }, // }); // expectTypeOf(usersWithInvitee).toEqualTypeOf< // { // id: number; // name: string; // verified: boolean; // invitedBy: number | null; // invitee: { // id: number; // name: string; // verified: boolean; // invitedBy: number | null; // } | null; // }[] // >(); // expect(usersWithInvitee.length).eq(2); // expect(usersWithInvitee[0]?.invitee).not.toBeNull(); // expect(usersWithInvitee[1]?.invitee).not.toBeNull(); // expect(usersWithInvitee).toContainEqual({ // id: 3, // name: 'Alex', // verified: false, // invitedBy: 1, // invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, // }); // expect(usersWithInvitee).toContainEqual({ // id: 4, // name: 'John', // verified: false, // invitedBy: 2, // invitee: { id: 2, name: 'Andrew', verified: false, invitedBy: null }, // }); // }); // test.skip('Get user with invitee + where + partial', async (t) => { // const { singlestoreDb: db } = t; // await db.insert(usersTable).values([ // { id: 1, name: 'Dan' }, // { id: 2, name: 'Andrew' }, // { id: 3, name: 'Alex', invitedBy: 1 }, // { id: 4, name: 'John', invitedBy: 2 }, // ]); // const usersWithInvitee = await db.query.usersTable.findMany({ // where: (users, { eq, or }) => (or(eq(users.id, 3), eq(users.id, 4))), // columns: { // id: true, // name: true, // }, // with: { // invitee: { // columns: { // id: true, // name: true, // }, // }, // }, // }); // expectTypeOf(usersWithInvitee).toEqualTypeOf< // { // id: number; // name: string; // invitee: { // id: number; // name: string; // } | null; // }[] // >(); // expect(usersWithInvitee.length).eq(2); // expect(usersWithInvitee[0]?.invitee).not.toBeNull(); // expect(usersWithInvitee[1]?.invitee).not.toBeNull(); // expect(usersWithInvitee).toContainEqual({ // id: 3, // name: 'Alex', // invitee: { id: 1, name: 'Dan' }, // }); // expect(usersWithInvitee).toContainEqual({ // id: 4, // name: 'John', // invitee: { id: 2, name: 'Andrew' }, // }); // }); // test.skip('Get user with invitee + where + partial. Did not select users id, but used it in where', async (t) => { // const { singlestoreDb: db } = t; // await db.insert(usersTable).values([ // { id: 1, name: 'Dan' }, // { id: 2, name: 'Andrew' }, // { id: 3, name: 'Alex', invitedBy: 1 }, // { id: 4, name: 'John', invitedBy: 2 }, // ]); // const usersWithInvitee = await db.query.usersTable.findMany({ // where: (users, { eq, or }) => (or(eq(users.id, 3), eq(users.id, 4))), // columns: { // name: true, // }, // with: { // invitee: { // columns: { // id: true, // name: true, // }, // }, // }, // }); // expectTypeOf(usersWithInvitee).toEqualTypeOf< // { // name: string; // invitee: { // id: number; // name: string; // } | null; // }[] // >(); // expect(usersWithInvitee.length).eq(2); // expect(usersWithInvitee[0]?.invitee).not.toBeNull(); // expect(usersWithInvitee[1]?.invitee).not.toBeNull(); // expect(usersWithInvitee).toContainEqual({ // name: 'Alex', // invitee: { id: 1, name: 'Dan' }, // }); // expect(usersWithInvitee).toContainEqual({ // name: 'John', // invitee: { id: 2, name: 'Andrew' }, // }); // }); // test.skip('Get user with invitee + where + partial(true+false)', async (t) => { // const { singlestoreDb: db } = t; // await db.insert(usersTable).values([ // { id: 1, name: 'Dan' }, // { id: 2, name: 'Andrew' }, // { id: 3, name: 'Alex', invitedBy: 1 }, // { id: 4, name: 'John', invitedBy: 2 }, // ]); // const usersWithInvitee = await db.query.usersTable.findMany({ // where: (users, { eq, or }) => (or(eq(users.id, 3), eq(users.id, 4))), // columns: { // id: true, // name: true, // verified: false, // }, // with: { // invitee: { // columns: { // id: true, // name: true, // verified: false, // }, // }, // }, // }); // expectTypeOf(usersWithInvitee).toEqualTypeOf< // { // id: number; // name: string; // invitee: { // id: number; // name: string; // } | null; // }[] // >(); // expect(usersWithInvitee.length).eq(2); // expect(usersWithInvitee[0]?.invitee).not.toBeNull(); // expect(usersWithInvitee[1]?.invitee).not.toBeNull(); // expect(usersWithInvitee).toContainEqual({ // id: 3, // name: 'Alex', // invitee: { id: 1, name: 'Dan' }, // }); // expect(usersWithInvitee).toContainEqual({ // id: 4, // name: 'John', // invitee: { id: 2, name: 'Andrew' }, // }); // }); // test.skip('Get user with invitee + where + partial(false)', async (t) => { // const { singlestoreDb: db } = t; // await db.insert(usersTable).values([ // { id: 1, name: 'Dan' }, // { id: 2, name: 'Andrew' }, // { id: 3, name: 'Alex', invitedBy: 1 }, // { id: 4, name: 'John', invitedBy: 2 }, // ]); // const usersWithInvitee = await db.query.usersTable.findMany({ // where: (users, { eq, or }) => (or(eq(users.id, 3), eq(users.id, 4))), // columns: { // verified: false, // }, // with: { // invitee: { // columns: { // name: false, // }, // }, // }, // }); // expectTypeOf(usersWithInvitee).toEqualTypeOf< // { // id: number; // name: string; // invitedBy: number | null; // invitee: { // id: number; // verified: boolean; // invitedBy: number | null; // } | null; // }[] // >(); // expect(usersWithInvitee.length).eq(2); // expect(usersWithInvitee[0]?.invitee).not.toBeNull(); // expect(usersWithInvitee[1]?.invitee).not.toBeNull(); // expect(usersWithInvitee).toContainEqual({ // id: 3, // name: 'Alex', // invitedBy: 1, // invitee: { id: 1, verified: false, invitedBy: null }, // }); // expect(usersWithInvitee).toContainEqual({ // id: 4, // name: 'John', // invitedBy: 2, // invitee: { id: 2, verified: false, invitedBy: null }, // }); // }); // /* // Two first-level relations users+users and users+posts // */ // test.skip('Get user with invitee and posts', async (t) => { // const { singlestoreDb: db } = t; // await db.insert(usersTable).values([ // { id: 1, name: 'Dan' }, // { id: 2, name: 'Andrew' }, // { id: 3, name: 'Alex', invitedBy: 1 }, // { id: 4, name: 'John', invitedBy: 2 }, // ]); // await db.insert(postsTable).values([ // { ownerId: 1, content: 'Post1' }, // { ownerId: 2, content: 'Post2' }, // { ownerId: 3, content: 'Post3' }, // ]); // const response = await db.query.usersTable.findMany({ // with: { // invitee: true, // posts: true, // }, // }); // expectTypeOf(response).toEqualTypeOf< // { // id: number; // name: string; // verified: boolean; // invitedBy: number | null; // posts: { id: number; ownerId: number | null; content: string; createdAt: Date }[]; // invitee: { // id: number; // name: string; // verified: boolean; // invitedBy: number | null; // } | null; // }[] // >(); // response.sort((a, b) => (a.id > b.id) ? 1 : -1); // expect(response.length).eq(4); // expect(response[0]?.invitee).toBeNull(); // expect(response[1]?.invitee).toBeNull(); // expect(response[2]?.invitee).not.toBeNull(); // expect(response[3]?.invitee).not.toBeNull(); // expect(response[0]?.posts.length).eq(1); // expect(response[1]?.posts.length).eq(1); // expect(response[2]?.posts.length).eq(1); // expect(response).toContainEqual({ // id: 1, // name: 'Dan', // verified: false, // invitedBy: null, // invitee: null, // posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: response[0]?.posts[0]?.createdAt }], // }); // expect(response).toContainEqual({ // id: 2, // name: 'Andrew', // verified: false, // invitedBy: null, // invitee: null, // posts: [{ id: 2, ownerId: 2, content: 'Post2', createdAt: response[1]?.posts[0]?.createdAt }], // }); // expect(response).toContainEqual({ // id: 3, // name: 'Alex', // verified: false, // invitedBy: 1, // invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, // posts: [{ id: 3, ownerId: 3, content: 'Post3', createdAt: response[2]?.posts[0]?.createdAt }], // }); // expect(response).toContainEqual({ // id: 4, // name: 'John', // verified: false, // invitedBy: 2, // invitee: { id: 2, name: 'Andrew', verified: false, invitedBy: null }, // posts: [], // }); // }); // test.skip('Get user with invitee and posts + limit posts and users', async (t) => { // const { singlestoreDb: db } = t; // await db.insert(usersTable).values([ // { id: 1, name: 'Dan' }, // { id: 2, name: 'Andrew' }, // { id: 3, name: 'Alex', invitedBy: 1 }, // { id: 4, name: 'John', invitedBy: 2 }, // ]); // await db.insert(postsTable).values([ // { ownerId: 1, content: 'Post1' }, // { ownerId: 1, content: 'Post1.1' }, // { ownerId: 2, content: 'Post2' }, // { ownerId: 2, content: 'Post2.1' }, // { ownerId: 3, content: 'Post3' }, // { ownerId: 3, content: 'Post3.1' }, // ]); // const response = await db.query.usersTable.findMany({ // limit: 3, // with: { // invitee: true, // posts: { // limit: 1, // }, // }, // }); // expectTypeOf(response).toEqualTypeOf< // { // id: number; // name: string; // verified: boolean; // invitedBy: number | null; // posts: { id: number; ownerId: number | null; content: string; createdAt: Date }[]; // invitee: { // id: number; // name: string; // verified: boolean; // invitedBy: number | null; // } | null; // }[] // >(); // response.sort((a, b) => (a.id > b.id) ? 1 : -1); // expect(response.length).eq(3); // expect(response[0]?.invitee).toBeNull(); // expect(response[1]?.invitee).toBeNull(); // expect(response[2]?.invitee).not.toBeNull(); // expect(response[0]?.posts.length).eq(1); // expect(response[1]?.posts.length).eq(1); // expect(response[2]?.posts.length).eq(1); // expect(response).toContainEqual({ // id: 1, // name: 'Dan', // verified: false, // invitedBy: null, // invitee: null, // posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: response[0]?.posts[0]?.createdAt }], // }); // expect(response).toContainEqual({ // id: 2, // name: 'Andrew', // verified: false, // invitedBy: null, // invitee: null, // posts: [{ id: 3, ownerId: 2, content: 'Post2', createdAt: response[1]?.posts[0]?.createdAt }], // }); // expect(response).toContainEqual({ // id: 3, // name: 'Alex', // verified: false, // invitedBy: 1, // invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, // posts: [{ id: 5, ownerId: 3, content: 'Post3', createdAt: response[2]?.posts[0]?.createdAt }], // }); // }); // test.skip('Get user with invitee and posts + limits + custom fields in each', async (t) => { // const { singlestoreDb: db } = t; // await db.insert(usersTable).values([ // { id: 1, name: 'Dan' }, // { id: 2, name: 'Andrew' }, // { id: 3, name: 'Alex', invitedBy: 1 }, // { id: 4, name: 'John', invitedBy: 2 }, // ]); // await db.insert(postsTable).values([ // { ownerId: 1, content: 'Post1' }, // { ownerId: 1, content: 'Post1.1' }, // { ownerId: 2, content: 'Post2' }, // { ownerId: 2, content: 'Post2.1' }, // { ownerId: 3, content: 'Post3' }, // { ownerId: 3, content: 'Post3.1' }, // ]); // const response = await db.query.usersTable.findMany({ // limit: 3, // extras: (users, { sql }) => ({ lower: sql`lower(${users.name})`.as('lower_name') }), // with: { // invitee: { // extras: (users, { sql }) => ({ lower: sql`lower(${users.name})`.as('lower_invitee_name') }), // }, // posts: { // limit: 1, // extras: (posts, { sql }) => ({ lower: sql`lower(${posts.content})`.as('lower_content') }), // }, // }, // }); // expectTypeOf(response).toEqualTypeOf< // { // id: number; // name: string; // verified: boolean; // lower: string; // invitedBy: number | null; // posts: { id: number; lower: string; ownerId: number | null; content: string; createdAt: Date }[]; // invitee: { // id: number; // name: string; // lower: string; // verified: boolean; // invitedBy: number | null; // } | null; // }[] // >(); // response.sort((a, b) => (a.id > b.id) ? 1 : -1); // expect(response.length).eq(3); // expect(response[0]?.invitee).toBeNull(); // expect(response[1]?.invitee).toBeNull(); // expect(response[2]?.invitee).not.toBeNull(); // expect(response[0]?.posts.length).eq(1); // expect(response[1]?.posts.length).eq(1); // expect(response[2]?.posts.length).eq(1); // expect(response).toContainEqual({ // id: 1, // name: 'Dan', // lower: 'dan', // verified: false, // invitedBy: null, // invitee: null, // posts: [{ id: 1, ownerId: 1, content: 'Post1', lower: 'post1', createdAt: response[0]?.posts[0]?.createdAt }], // }); // expect(response).toContainEqual({ // id: 2, // name: 'Andrew', // lower: 'andrew', // verified: false, // invitedBy: null, // invitee: null, // posts: [{ id: 3, ownerId: 2, content: 'Post2', lower: 'post2', createdAt: response[1]?.posts[0]?.createdAt }], // }); // expect(response).toContainEqual({ // id: 3, // name: 'Alex', // lower: 'alex', // verified: false, // invitedBy: 1, // invitee: { id: 1, name: 'Dan', lower: 'dan', verified: false, invitedBy: null }, // posts: [{ id: 5, ownerId: 3, content: 'Post3', lower: 'post3', createdAt: response[2]?.posts[0]?.createdAt }], // }); // }); // test.skip('Get user with invitee and posts + custom fields in each', async () => { // await db.insert(usersTable).values([ // { id: 1, name: 'Dan' }, // { id: 2, name: 'Andrew' }, // { id: 3, name: 'Alex', invitedBy: 1 }, // { id: 4, name: 'John', invitedBy: 2 }, // ]); // await db.insert(postsTable).values([ // { ownerId: 1, content: 'Post1' }, // { ownerId: 1, content: 'Post1.1' }, // { ownerId: 2, content: 'Post2' }, // { ownerId: 2, content: 'Post2.1' }, // { ownerId: 3, content: 'Post3' }, // { ownerId: 3, content: 'Post3.1' }, // ]); // const response = await db.query.usersTable.findMany({ // extras: (users, { sql }) => ({ lower: sql`lower(${users.name})`.as('lower_name') }), // with: { // invitee: { // extras: (users, { sql }) => ({ lower: sql`lower(${users.name})`.as('lower_name') }), // }, // posts: { // extras: (posts, { sql }) => ({ lower: sql`lower(${posts.content})`.as('lower_name') }), // }, // }, // }); // expectTypeOf(response).toEqualTypeOf< // { // id: number; // name: string; // verified: boolean; // lower: string; // invitedBy: number | null; // posts: { id: number; lower: string; ownerId: number | null; content: string; createdAt: Date }[]; // invitee: { // id: number; // name: string; // lower: string; // verified: boolean; // invitedBy: number | null; // } | null; // }[] // >(); // response.sort((a, b) => (a.id > b.id) ? 1 : -1); // response[0]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); // response[1]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); // response[2]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); // expect(response.length).eq(4); // expect(response[0]?.invitee).toBeNull(); // expect(response[1]?.invitee).toBeNull(); // expect(response[2]?.invitee).not.toBeNull(); // expect(response[3]?.invitee).not.toBeNull(); // expect(response[0]?.posts.length).eq(2); // expect(response[1]?.posts.length).eq(2); // expect(response[2]?.posts.length).eq(2); // expect(response[3]?.posts.length).eq(0); // expect(response).toContainEqual({ // id: 1, // name: 'Dan', // lower: 'dan', // verified: false, // invitedBy: null, // invitee: null, // posts: [{ id: 1, ownerId: 1, content: 'Post1', lower: 'post1', createdAt: response[0]?.posts[0]?.createdAt }, { // id: 2, // ownerId: 1, // content: 'Post1.1', // lower: 'post1.1', // createdAt: response[0]?.posts[1]?.createdAt, // }], // }); // expect(response).toContainEqual({ // id: 2, // name: 'Andrew', // lower: 'andrew', // verified: false, // invitedBy: null, // invitee: null, // posts: [{ id: 3, ownerId: 2, content: 'Post2', lower: 'post2', createdAt: response[1]?.posts[0]?.createdAt }, { // id: 4, // ownerId: 2, // content: 'Post2.1', // lower: 'post2.1', // createdAt: response[1]?.posts[1]?.createdAt, // }], // }); // expect(response).toContainEqual({ // id: 3, // name: 'Alex', // lower: 'alex', // verified: false, // invitedBy: 1, // invitee: { id: 1, name: 'Dan', lower: 'dan', verified: false, invitedBy: null }, // posts: [{ id: 5, ownerId: 3, content: 'Post3', lower: 'post3', createdAt: response[2]?.posts[0]?.createdAt }, { // id: 6, // ownerId: 3, // content: 'Post3.1', // lower: 'post3.1', // createdAt: response[2]?.posts[1]?.createdAt, // }], // }); // expect(response).toContainEqual({ // id: 4, // name: 'John', // lower: 'john', // verified: false, // invitedBy: 2, // invitee: { id: 2, name: 'Andrew', lower: 'andrew', verified: false, invitedBy: null }, // posts: [], // }); // }); // test.skip('Get user with invitee and posts + orderBy', async (t) => { // const { singlestoreDb: db } = t; // await db.insert(usersTable).values([ // { id: 1, name: 'Dan' }, // { id: 2, name: 'Andrew' }, // { id: 3, name: 'Alex', invitedBy: 1 }, // { id: 4, name: 'John', invitedBy: 2 }, // ]); // await db.insert(postsTable).values([ // { ownerId: 1, content: 'Post1' }, // { ownerId: 1, content: 'Post1.1' }, // { ownerId: 2, content: 'Post2' }, // { ownerId: 2, content: 'Post2.1' }, // { ownerId: 3, content: 'Post3' }, // ]); // const response = await db.query.usersTable.findMany({ // orderBy: (users, { desc }) => [desc(users.id)], // with: { // invitee: true, // posts: { // orderBy: (posts, { desc }) => [desc(posts.id)], // }, // }, // }); // expectTypeOf(response).toEqualTypeOf< // { // id: number; // name: string; // verified: boolean; // invitedBy: number | null; // posts: { id: number; ownerId: number | null; content: string; createdAt: Date }[]; // invitee: { // id: number; // name: string; // verified: boolean; // invitedBy: number | null; // } | null; // }[] // >(); // expect(response.length).eq(4); // expect(response[3]?.invitee).toBeNull(); // expect(response[2]?.invitee).toBeNull(); // expect(response[1]?.invitee).not.toBeNull(); // expect(response[0]?.invitee).not.toBeNull(); // expect(response[0]?.posts.length).eq(0); // expect(response[1]?.posts.length).eq(1); // expect(response[2]?.posts.length).eq(2); // expect(response[3]?.posts.length).eq(2); // expect(response[3]).toEqual({ // id: 1, // name: 'Dan', // verified: false, // invitedBy: null, // invitee: null, // posts: [{ id: 2, ownerId: 1, content: 'Post1.1', createdAt: response[3]?.posts[0]?.createdAt }, { // id: 1, // ownerId: 1, // content: 'Post1', // createdAt: response[3]?.posts[1]?.createdAt, // }], // }); // expect(response[2]).toEqual({ // id: 2, // name: 'Andrew', // verified: false, // invitedBy: null, // invitee: null, // posts: [{ id: 4, ownerId: 2, content: 'Post2.1', createdAt: response[2]?.posts[0]?.createdAt }, { // id: 3, // ownerId: 2, // content: 'Post2', // createdAt: response[2]?.posts[1]?.createdAt, // }], // }); // expect(response[1]).toEqual({ // id: 3, // name: 'Alex', // verified: false, // invitedBy: 1, // invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, // posts: [{ // id: 5, // ownerId: 3, // content: 'Post3', // createdAt: response[3]?.posts[1]?.createdAt, // }], // }); // expect(response[0]).toEqual({ // id: 4, // name: 'John', // verified: false, // invitedBy: 2, // invitee: { id: 2, name: 'Andrew', verified: false, invitedBy: null }, // posts: [], // }); // }); // test.skip('Get user with invitee and posts + where', async (t) => { // const { singlestoreDb: db } = t; // await db.insert(usersTable).values([ // { id: 1, name: 'Dan' }, // { id: 2, name: 'Andrew' }, // { id: 3, name: 'Alex', invitedBy: 1 }, // { id: 4, name: 'John', invitedBy: 2 }, // ]); // await db.insert(postsTable).values([ // { ownerId: 1, content: 'Post1' }, // { ownerId: 2, content: 'Post2' }, // { ownerId: 3, content: 'Post3' }, // ]); // const response = await db.query.usersTable.findMany({ // where: (users, { eq, or }) => (or(eq(users.id, 2), eq(users.id, 3))), // with: { // invitee: true, // posts: { // where: (posts, { eq }) => (eq(posts.ownerId, 2)), // }, // }, // }); // expectTypeOf(response).toEqualTypeOf< // { // id: number; // name: string; // verified: boolean; // invitedBy: number | null; // posts: { id: number; ownerId: number | null; content: string; createdAt: Date }[]; // invitee: { // id: number; // name: string; // verified: boolean; // invitedBy: number | null; // } | null; // }[] // >(); // response.sort((a, b) => (a.id > b.id) ? 1 : -1); // expect(response.length).eq(2); // expect(response[0]?.invitee).toBeNull(); // expect(response[1]?.invitee).not.toBeNull(); // expect(response[0]?.posts.length).eq(1); // expect(response[1]?.posts.length).eq(0); // expect(response).toContainEqual({ // id: 2, // name: 'Andrew', // verified: false, // invitedBy: null, // invitee: null, // posts: [{ id: 2, ownerId: 2, content: 'Post2', createdAt: response[0]?.posts[0]?.createdAt }], // }); // expect(response).toContainEqual({ // id: 3, // name: 'Alex', // verified: false, // invitedBy: 1, // invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, // posts: [], // }); // }); // test.skip('Get user with invitee and posts + limit posts and users + where', async (t) => { // const { singlestoreDb: db } = t; // await db.insert(usersTable).values([ // { id: 1, name: 'Dan' }, // { id: 2, name: 'Andrew' }, // { id: 3, name: 'Alex', invitedBy: 1 }, // { id: 4, name: 'John', invitedBy: 2 }, // ]); // await db.insert(postsTable).values([ // { ownerId: 1, content: 'Post1' }, // { ownerId: 1, content: 'Post1.1' }, // { ownerId: 2, content: 'Post2' }, // { ownerId: 2, content: 'Post2.1' }, // { ownerId: 3, content: 'Post3' }, // { ownerId: 3, content: 'Post3.1' }, // ]); // const response = await db.query.usersTable.findMany({ // where: (users, { eq, or }) => (or(eq(users.id, 3), eq(users.id, 4))), // limit: 1, // with: { // invitee: true, // posts: { // where: (posts, { eq }) => (eq(posts.ownerId, 3)), // limit: 1, // }, // }, // }); // expectTypeOf(response).toEqualTypeOf< // { // id: number; // name: string; // verified: boolean; // invitedBy: number | null; // posts: { id: number; ownerId: number | null; content: string; createdAt: Date }[]; // invitee: { // id: number; // name: string; // verified: boolean; // invitedBy: number | null; // } | null; // }[] // >(); // expect(response.length).eq(1); // expect(response[0]?.invitee).not.toBeNull(); // expect(response[0]?.posts.length).eq(1); // expect(response).toContainEqual({ // id: 3, // name: 'Alex', // verified: false, // invitedBy: 1, // invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, // posts: [{ id: 5, ownerId: 3, content: 'Post3', createdAt: response[0]?.posts[0]?.createdAt }], // }); // }); // test.skip('Get user with invitee and posts + orderBy + where + custom', async (t) => { // const { singlestoreDb: db } = t; // await db.insert(usersTable).values([ // { id: 1, name: 'Dan' }, // { id: 2, name: 'Andrew' }, // { id: 3, name: 'Alex', invitedBy: 1 }, // { id: 4, name: 'John', invitedBy: 2 }, // ]); // await db.insert(postsTable).values([ // { ownerId: 1, content: 'Post1' }, // { ownerId: 1, content: 'Post1.1' }, // { ownerId: 2, content: 'Post2' }, // { ownerId: 2, content: 'Post2.1' }, // { ownerId: 3, content: 'Post3' }, // ]); // const response = await db.query.usersTable.findMany({ // orderBy: [desc(usersTable.id)], // where: or(eq(usersTable.id, 3), eq(usersTable.id, 4)), // extras: { // lower: sql`lower(${usersTable.name})`.as('lower_name'), // }, // with: { // invitee: true, // posts: { // where: eq(postsTable.ownerId, 3), // orderBy: [desc(postsTable.id)], // extras: { // lower: sql`lower(${postsTable.content})`.as('lower_name'), // }, // }, // }, // }); // expectTypeOf(response).toEqualTypeOf< // { // id: number; // name: string; // verified: boolean; // invitedBy: number | null; // lower: string; // posts: { id: number; lower: string; ownerId: number | null; content: string; createdAt: Date }[]; // invitee: { // id: number; // name: string; // verified: boolean; // invitedBy: number | null; // } | null; // }[] // >(); // expect(response.length).eq(2); // expect(response[1]?.invitee).not.toBeNull(); // expect(response[0]?.invitee).not.toBeNull(); // expect(response[0]?.posts.length).eq(0); // expect(response[1]?.posts.length).eq(1); // expect(response[1]).toEqual({ // id: 3, // name: 'Alex', // lower: 'alex', // verified: false, // invitedBy: 1, // invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, // posts: [{ // id: 5, // ownerId: 3, // content: 'Post3', // lower: 'post3', // createdAt: response[1]?.posts[0]?.createdAt, // }], // }); // expect(response[0]).toEqual({ // id: 4, // name: 'John', // lower: 'john', // verified: false, // invitedBy: 2, // invitee: { id: 2, name: 'Andrew', verified: false, invitedBy: null }, // posts: [], // }); // }); // test.skip('Get user with invitee and posts + orderBy + where + partial + custom', async (t) => { // const { singlestoreDb: db } = t; // await db.insert(usersTable).values([ // { id: 1, name: 'Dan' }, // { id: 2, name: 'Andrew' }, // { id: 3, name: 'Alex', invitedBy: 1 }, // { id: 4, name: 'John', invitedBy: 2 }, // ]); // await db.insert(postsTable).values([ // { ownerId: 1, content: 'Post1' }, // { ownerId: 1, content: 'Post1.1' }, // { ownerId: 2, content: 'Post2' }, // { ownerId: 2, content: 'Post2.1' }, // { ownerId: 3, content: 'Post3' }, // ]); // const response = await db.query.usersTable.findMany({ // orderBy: [desc(usersTable.id)], // where: or(eq(usersTable.id, 3), eq(usersTable.id, 4)), // extras: { // lower: sql`lower(${usersTable.name})`.as('lower_name'), // }, // columns: { // id: true, // name: true, // }, // with: { // invitee: { // columns: { // id: true, // name: true, // }, // extras: { // lower: sql`lower(${usersTable.name})`.as('lower_name'), // }, // }, // posts: { // columns: { // id: true, // content: true, // }, // where: eq(postsTable.ownerId, 3), // orderBy: [desc(postsTable.id)], // extras: { // lower: sql`lower(${postsTable.content})`.as('lower_name'), // }, // }, // }, // }); // expectTypeOf(response).toEqualTypeOf< // { // id: number; // name: string; // lower: string; // posts: { id: number; lower: string; content: string }[]; // invitee: { // id: number; // name: string; // lower: string; // } | null; // }[] // >(); // expect(response.length).eq(2); // expect(response[1]?.invitee).not.toBeNull(); // expect(response[0]?.invitee).not.toBeNull(); // expect(response[0]?.posts.length).eq(0); // expect(response[1]?.posts.length).eq(1); // expect(response[1]).toEqual({ // id: 3, // name: 'Alex', // lower: 'alex', // invitee: { id: 1, name: 'Dan', lower: 'dan' }, // posts: [{ // id: 5, // content: 'Post3', // lower: 'post3', // }], // }); // expect(response[0]).toEqual({ // id: 4, // name: 'John', // lower: 'john', // invitee: { id: 2, name: 'Andrew', lower: 'andrew' }, // posts: [], // }); // }); // /* // One two-level relation users+posts+comments // */ // test.skip('Get user with posts and posts with comments', async (t) => { // const { singlestoreDb: db } = t; // await db.insert(usersTable).values([ // { id: 1, name: 'Dan' }, // { id: 2, name: 'Andrew' }, // { id: 3, name: 'Alex' }, // ]); // await db.insert(postsTable).values([ // { id: 1, ownerId: 1, content: 'Post1' }, // { id: 2, ownerId: 2, content: 'Post2' }, // { id: 3, ownerId: 3, content: 'Post3' }, // ]); // await db.insert(commentsTable).values([ // { postId: 1, content: 'Comment1', creator: 2 }, // { postId: 2, content: 'Comment2', creator: 2 }, // { postId: 3, content: 'Comment3', creator: 3 }, // ]); // const response = await db.query.usersTable.findMany({ // with: { // posts: { // with: { // comments: true, // }, // }, // }, // }); // expectTypeOf(response).toEqualTypeOf< // { // id: number; // name: string; // verified: boolean; // invitedBy: number | null; // posts: { // id: number; // content: string; // ownerId: number | null; // createdAt: Date; // comments: { // id: number; // content: string; // createdAt: Date; // creator: number | null; // postId: number | null; // }[]; // }[]; // }[] // >(); // response.sort((a, b) => (a.id > b.id) ? 1 : -1); // expect(response.length).eq(3); // expect(response[0]?.posts.length).eq(1); // expect(response[1]?.posts.length).eq(1); // expect(response[2]?.posts.length).eq(1); // expect(response[0]?.posts[0]?.comments.length).eq(1); // expect(response[1]?.posts[0]?.comments.length).eq(1); // expect(response[2]?.posts[0]?.comments.length).eq(1); // expect(response[0]).toEqual({ // id: 1, // name: 'Dan', // verified: false, // invitedBy: null, // posts: [{ // id: 1, // ownerId: 1, // content: 'Post1', // createdAt: response[0]?.posts[0]?.createdAt, // comments: [ // { // id: 1, // content: 'Comment1', // creator: 2, // postId: 1, // createdAt: response[0]?.posts[0]?.comments[0]?.createdAt, // }, // ], // }], // }); // expect(response[1]).toEqual({ // id: 2, // name: 'Andrew', // verified: false, // invitedBy: null, // posts: [{ // id: 2, // ownerId: 2, // content: 'Post2', // createdAt: response[1]?.posts[0]?.createdAt, // comments: [ // { // id: 2, // content: 'Comment2', // creator: 2, // postId: 2, // createdAt: response[1]?.posts[0]?.comments[0]?.createdAt, // }, // ], // }], // }); // // expect(response[2]).toEqual({ // // id: 3, // // name: 'Alex', // // verified: false, // // invitedBy: null, // // posts: [{ // // id: 3, // // ownerId: 3, // // content: 'Post3', // // createdAt: response[2]?.posts[0]?.createdAt, // // comments: [ // // { // // id: , // // content: 'Comment3', // // creator: 3, // // postId: 3, // // createdAt: response[2]?.posts[0]?.comments[0]?.createdAt, // // }, // // ], // // }], // // }); // }); // // Get user with limit posts and limit comments // // Get user with custom field + post + comment with custom field // // Get user with limit + posts orderBy + comment orderBy // // Get user with where + posts where + comment where // // Get user with where + posts partial where + comment where // // Get user with where + posts partial where + comment partial(false) where // // Get user with where partial(false) + posts partial where partial(false) + comment partial(false+true) where // // Get user with where + posts partial where + comment where. Didn't select field from where in posts // // Get user with where + posts partial where + comment where. Didn't select field from where for all // // Get with limit+offset in each // /* // One two-level + One first-level relation users+posts+comments and users+users // */ // /* // One three-level relation users+posts+comments+comment_owner // */ // test.skip('Get user with posts and posts with comments and comments with owner', async (t) => { // const { singlestoreDb: db } = t; // await db.insert(usersTable).values([ // { id: 1, name: 'Dan' }, // { id: 2, name: 'Andrew' }, // { id: 3, name: 'Alex' }, // ]); // await db.insert(postsTable).values([ // { id: 1, ownerId: 1, content: 'Post1' }, // { id: 2, ownerId: 2, content: 'Post2' }, // { id: 3, ownerId: 3, content: 'Post3' }, // ]); // await db.insert(commentsTable).values([ // { postId: 1, content: 'Comment1', creator: 2 }, // { postId: 2, content: 'Comment2', creator: 2 }, // { postId: 3, content: 'Comment3', creator: 3 }, // ]); // const response = await db.query.usersTable.findMany({ // with: { // posts: { // with: { // comments: { // with: { // author: true, // }, // }, // }, // }, // }, // }); // expectTypeOf(response).toEqualTypeOf<{ // id: number; // name: string; // verified: boolean; // invitedBy: number | null; // posts: { // id: number; // content: string; // ownerId: number | null; // createdAt: Date; // comments: { // id: number; // content: string; // createdAt: Date; // creator: number | null; // postId: number | null; // author: { // id: number; // name: string; // verified: boolean; // invitedBy: number | null; // } | null; // }[]; // }[]; // }[]>(); // response.sort((a, b) => (a.id > b.id) ? 1 : -1); // expect(response.length).eq(3); // expect(response[0]?.posts.length).eq(1); // expect(response[1]?.posts.length).eq(1); // expect(response[2]?.posts.length).eq(1); // expect(response[0]?.posts[0]?.comments.length).eq(1); // expect(response[1]?.posts[0]?.comments.length).eq(1); // expect(response[2]?.posts[0]?.comments.length).eq(1); // expect(response[0]).toEqual({ // id: 1, // name: 'Dan', // verified: false, // invitedBy: null, // posts: [{ // id: 1, // ownerId: 1, // content: 'Post1', // createdAt: response[0]?.posts[0]?.createdAt, // comments: [ // { // id: 1, // content: 'Comment1', // creator: 2, // author: { // id: 2, // name: 'Andrew', // verified: false, // invitedBy: null, // }, // postId: 1, // createdAt: response[0]?.posts[0]?.comments[0]?.createdAt, // }, // ], // }], // }); // expect(response[1]).toEqual({ // id: 2, // name: 'Andrew', // verified: false, // invitedBy: null, // posts: [{ // id: 2, // ownerId: 2, // content: 'Post2', // createdAt: response[1]?.posts[0]?.createdAt, // comments: [ // { // id: 2, // content: 'Comment2', // creator: 2, // author: { // id: 2, // name: 'Andrew', // verified: false, // invitedBy: null, // }, // postId: 2, // createdAt: response[1]?.posts[0]?.comments[0]?.createdAt, // }, // ], // }], // }); // }); // test.skip('Get user with posts and posts with comments and comments with owner where exists', async () => { // await db.insert(usersTable).values([ // { id: 1, name: 'Dan' }, // { id: 2, name: 'Andrew' }, // { id: 3, name: 'Alex' }, // ]); // await db.insert(postsTable).values([ // { id: 1, ownerId: 1, content: 'Post1' }, // { id: 2, ownerId: 2, content: 'Post2' }, // { id: 3, ownerId: 3, content: 'Post3' }, // ]); // await db.insert(commentsTable).values([ // { postId: 1, content: 'Comment1', creator: 2 }, // { postId: 2, content: 'Comment2', creator: 2 }, // { postId: 3, content: 'Comment3', creator: 3 }, // ]); // const response = await db.query.usersTable.findMany({ // with: { // posts: { // with: { // comments: { // with: { // author: true, // }, // }, // }, // }, // }, // where: (table, { exists, eq }) => exists(db.select({ one: sql`1` }).from(usersTable).where(eq(sql`1`, table.id))), // }); // expectTypeOf(response).toEqualTypeOf<{ // id: number; // name: string; // verified: boolean; // invitedBy: number | null; // posts: { // id: number; // content: string; // ownerId: number | null; // createdAt: Date; // comments: { // id: number; // content: string; // createdAt: Date; // creator: number | null; // postId: number | null; // author: { // id: number; // name: string; // verified: boolean; // invitedBy: number | null; // } | null; // }[]; // }[]; // }[]>(); // expect(response.length).eq(1); // expect(response[0]?.posts.length).eq(1); // expect(response[0]?.posts[0]?.comments.length).eq(1); // expect(response[0]).toEqual({ // id: 1, // name: 'Dan', // verified: false, // invitedBy: null, // posts: [{ // id: 1, // ownerId: 1, // content: 'Post1', // createdAt: response[0]?.posts[0]?.createdAt, // comments: [ // { // id: 1, // content: 'Comment1', // creator: 2, // author: { // id: 2, // name: 'Andrew', // verified: false, // invitedBy: null, // }, // postId: 1, // createdAt: response[0]?.posts[0]?.comments[0]?.createdAt, // }, // ], // }], // }); // }); // /* // One three-level relation + 1 first-level relatioon // 1. users+posts+comments+comment_owner // 2. users+users // */ // /* // One four-level relation users+posts+comments+coment_likes // */ // /* // [Find Many] Many-to-many cases // Users+users_to_groups+groups // */ // test.skip('[Find Many] Get users with groups', async (t) => { // const { singlestoreDb: db } = t; // await db.insert(usersTable).values([ // { id: 1, name: 'Dan' }, // { id: 2, name: 'Andrew' }, // { id: 3, name: 'Alex' }, // ]); // await db.insert(groupsTable).values([ // { id: 1, name: 'Group1' }, // { id: 2, name: 'Group2' }, // { id: 3, name: 'Group3' }, // ]); // await db.insert(usersToGroupsTable).values([ // { userId: 1, groupId: 1 }, // { userId: 2, groupId: 2 }, // { userId: 3, groupId: 3 }, // { userId: 3, groupId: 2 }, // ]); // const response = await db.query.usersTable.findMany({ // with: { // usersToGroups: { // columns: {}, // with: { // group: true, // }, // }, // }, // }); // expectTypeOf(response).toEqualTypeOf<{ // id: number; // name: string; // verified: boolean; // invitedBy: number | null; // usersToGroups: { // group: { // id: number; // name: string; // description: string | null; // }; // }[]; // }[]>(); // response.sort((a, b) => (a.id > b.id) ? 1 : -1); // expect(response.length).toEqual(3); // expect(response[0]?.usersToGroups.length).toEqual(1); // expect(response[1]?.usersToGroups.length).toEqual(1); // expect(response[2]?.usersToGroups.length).toEqual(2); // expect(response).toContainEqual({ // id: 1, // name: 'Dan', // verified: false, // invitedBy: null, // usersToGroups: [{ // group: { // id: 1, // name: 'Group1', // description: null, // }, // }], // }); // expect(response).toContainEqual({ // id: 2, // name: 'Andrew', // verified: false, // invitedBy: null, // usersToGroups: [{ // group: { // id: 2, // name: 'Group2', // description: null, // }, // }], // }); // expect(response).toContainEqual({ // id: 3, // name: 'Alex', // verified: false, // invitedBy: null, // usersToGroups: [{ // group: { // id: 3, // name: 'Group3', // description: null, // }, // }, { // group: { // id: 2, // name: 'Group2', // description: null, // }, // }], // }); // }); // test.skip('[Find Many] Get groups with users', async (t) => { // const { singlestoreDb: db } = t; // await db.insert(usersTable).values([ // { id: 1, name: 'Dan' }, // { id: 2, name: 'Andrew' }, // { id: 3, name: 'Alex' }, // ]); // await db.insert(groupsTable).values([ // { id: 1, name: 'Group1' }, // { id: 2, name: 'Group2' }, // { id: 3, name: 'Group3' }, // ]); // await db.insert(usersToGroupsTable).values([ // { userId: 1, groupId: 1 }, // { userId: 2, groupId: 2 }, // { userId: 3, groupId: 3 }, // { userId: 3, groupId: 2 }, // ]); // const response = await db.query.groupsTable.findMany({ // with: { // usersToGroups: { // columns: {}, // with: { // user: true, // }, // }, // }, // }); // expectTypeOf(response).toEqualTypeOf<{ // id: number; // name: string; // description: string | null; // usersToGroups: { // user: { // id: number; // name: string; // verified: boolean; // invitedBy: number | null; // }; // }[]; // }[]>(); // response.sort((a, b) => (a.id > b.id) ? 1 : -1); // expect(response.length).toEqual(3); // expect(response[0]?.usersToGroups.length).toEqual(1); // expect(response[1]?.usersToGroups.length).toEqual(2); // expect(response[2]?.usersToGroups.length).toEqual(1); // expect(response).toContainEqual({ // id: 1, // name: 'Group1', // description: null, // usersToGroups: [{ // user: { // id: 1, // name: 'Dan', // verified: false, // invitedBy: null, // }, // }], // }); // expect(response).toContainEqual({ // id: 2, // name: 'Group2', // description: null, // usersToGroups: [{ // user: { // id: 2, // name: 'Andrew', // verified: false, // invitedBy: null, // }, // }, { // user: { // id: 3, // name: 'Alex', // verified: false, // invitedBy: null, // }, // }], // }); // expect(response).toContainEqual({ // id: 3, // name: 'Group3', // description: null, // usersToGroups: [{ // user: { // id: 3, // name: 'Alex', // verified: false, // invitedBy: null, // }, // }], // }); // }); // test.skip('[Find Many] Get users with groups + limit', async (t) => { // const { singlestoreDb: db } = t; // await db.insert(usersTable).values([ // { id: 1, name: 'Dan' }, // { id: 2, name: 'Andrew' }, // { id: 3, name: 'Alex' }, // ]); // await db.insert(groupsTable).values([ // { id: 1, name: 'Group1' }, // { id: 2, name: 'Group2' }, // { id: 3, name: 'Group3' }, // ]); // await db.insert(usersToGroupsTable).values([ // { userId: 1, groupId: 1 }, // { userId: 2, groupId: 2 }, // { userId: 2, groupId: 3 }, // { userId: 3, groupId: 2 }, // ]); // const response = await db.query.usersTable.findMany({ // limit: 2, // with: { // usersToGroups: { // limit: 1, // columns: {}, // with: { // group: true, // }, // }, // }, // }); // expectTypeOf(response).toEqualTypeOf<{ // id: number; // name: string; // verified: boolean; // invitedBy: number | null; // usersToGroups: { // group: { // id: number; // name: string; // description: string | null; // }; // }[]; // }[]>(); // response.sort((a, b) => (a.id > b.id) ? 1 : -1); // expect(response.length).toEqual(2); // expect(response[0]?.usersToGroups.length).toEqual(1); // expect(response[1]?.usersToGroups.length).toEqual(1); // expect(response).toContainEqual({ // id: 1, // name: 'Dan', // verified: false, // invitedBy: null, // usersToGroups: [{ // group: { // id: 1, // name: 'Group1', // description: null, // }, // }], // }); // expect(response).toContainEqual({ // id: 2, // name: 'Andrew', // verified: false, // invitedBy: null, // usersToGroups: [{ // group: { // id: 2, // name: 'Group2', // description: null, // }, // }], // }); // }); // test.skip('[Find Many] Get groups with users + limit', async (t) => { // const { singlestoreDb: db } = t; // await db.insert(usersTable).values([ // { id: 1, name: 'Dan' }, // { id: 2, name: 'Andrew' }, // { id: 3, name: 'Alex' }, // ]); // await db.insert(groupsTable).values([ // { id: 1, name: 'Group1' }, // { id: 2, name: 'Group2' }, // { id: 3, name: 'Group3' }, // ]); // await db.insert(usersToGroupsTable).values([ // { userId: 1, groupId: 1 }, // { userId: 2, groupId: 2 }, // { userId: 3, groupId: 3 }, // { userId: 3, groupId: 2 }, // ]); // const response = await db.query.groupsTable.findMany({ // limit: 2, // with: { // usersToGroups: { // limit: 1, // columns: {}, // with: { // user: true, // }, // }, // }, // }); // expectTypeOf(response).toEqualTypeOf<{ // id: number; // name: string; // description: string | null; // usersToGroups: { // user: { // id: number; // name: string; // verified: boolean; // invitedBy: number | null; // }; // }[]; // }[]>(); // response.sort((a, b) => (a.id > b.id) ? 1 : -1); // expect(response.length).toEqual(2); // expect(response[0]?.usersToGroups.length).toEqual(1); // expect(response[1]?.usersToGroups.length).toEqual(1); // expect(response).toContainEqual({ // id: 1, // name: 'Group1', // description: null, // usersToGroups: [{ // user: { // id: 1, // name: 'Dan', // verified: false, // invitedBy: null, // }, // }], // }); // expect(response).toContainEqual({ // id: 2, // name: 'Group2', // description: null, // usersToGroups: [{ // user: { // id: 2, // name: 'Andrew', // verified: false, // invitedBy: null, // }, // }], // }); // }); // test.skip('[Find Many] Get users with groups + limit + where', async (t) => { // const { singlestoreDb: db } = t; // await db.insert(usersTable).values([ // { id: 1, name: 'Dan' }, // { id: 2, name: 'Andrew' }, // { id: 3, name: 'Alex' }, // ]); // await db.insert(groupsTable).values([ // { id: 1, name: 'Group1' }, // { id: 2, name: 'Group2' }, // { id: 3, name: 'Group3' }, // ]); // await db.insert(usersToGroupsTable).values([ // { userId: 1, groupId: 1 }, // { userId: 2, groupId: 2 }, // { userId: 2, groupId: 3 }, // { userId: 3, groupId: 2 }, // ]); // const response = await db.query.usersTable.findMany({ // limit: 1, // where: (_, { eq, or }) => or(eq(usersTable.id, 1), eq(usersTable.id, 2)), // with: { // usersToGroups: { // where: eq(usersToGroupsTable.groupId, 1), // columns: {}, // with: { // group: true, // }, // }, // }, // }); // expectTypeOf(response).toEqualTypeOf<{ // id: number; // name: string; // verified: boolean; // invitedBy: number | null; // usersToGroups: { // group: { // id: number; // name: string; // description: string | null; // }; // }[]; // }[]>(); // response.sort((a, b) => (a.id > b.id) ? 1 : -1); // expect(response.length).toEqual(1); // expect(response[0]?.usersToGroups.length).toEqual(1); // expect(response).toContainEqual({ // id: 1, // name: 'Dan', // verified: false, // invitedBy: null, // usersToGroups: [{ // group: { // id: 1, // name: 'Group1', // description: null, // }, // }], // }); // }); // test.skip('[Find Many] Get groups with users + limit + where', async (t) => { // const { singlestoreDb: db } = t; // await db.insert(usersTable).values([ // { id: 1, name: 'Dan' }, // { id: 2, name: 'Andrew' }, // { id: 3, name: 'Alex' }, // ]); // await db.insert(groupsTable).values([ // { id: 1, name: 'Group1' }, // { id: 2, name: 'Group2' }, // { id: 3, name: 'Group3' }, // ]); // await db.insert(usersToGroupsTable).values([ // { userId: 1, groupId: 1 }, // { userId: 2, groupId: 2 }, // { userId: 3, groupId: 3 }, // { userId: 3, groupId: 2 }, // ]); // const response = await db.query.groupsTable.findMany({ // limit: 1, // where: gt(groupsTable.id, 1), // with: { // usersToGroups: { // where: eq(usersToGroupsTable.userId, 2), // limit: 1, // columns: {}, // with: { // user: true, // }, // }, // }, // }); // expectTypeOf(response).toEqualTypeOf<{ // id: number; // name: string; // description: string | null; // usersToGroups: { // user: { // id: number; // name: string; // verified: boolean; // invitedBy: number | null; // }; // }[]; // }[]>(); // response.sort((a, b) => (a.id > b.id) ? 1 : -1); // expect(response.length).toEqual(1); // expect(response[0]?.usersToGroups.length).toEqual(1); // expect(response).toContainEqual({ // id: 2, // name: 'Group2', // description: null, // usersToGroups: [{ // user: { // id: 2, // name: 'Andrew', // verified: false, // invitedBy: null, // }, // }], // }); // }); // test.skip('[Find Many] Get users with groups + where', async (t) => { // const { singlestoreDb: db } = t; // await db.insert(usersTable).values([ // { id: 1, name: 'Dan' }, // { id: 2, name: 'Andrew' }, // { id: 3, name: 'Alex' }, // ]); // await db.insert(groupsTable).values([ // { id: 1, name: 'Group1' }, // { id: 2, name: 'Group2' }, // { id: 3, name: 'Group3' }, // ]); // await db.insert(usersToGroupsTable).values([ // { userId: 1, groupId: 1 }, // { userId: 2, groupId: 2 }, // { userId: 2, groupId: 3 }, // { userId: 3, groupId: 2 }, // ]); // const response = await db.query.usersTable.findMany({ // where: (_, { eq, or }) => or(eq(usersTable.id, 1), eq(usersTable.id, 2)), // with: { // usersToGroups: { // where: eq(usersToGroupsTable.groupId, 2), // columns: {}, // with: { // group: true, // }, // }, // }, // }); // expectTypeOf(response).toEqualTypeOf<{ // id: number; // name: string; // verified: boolean; // invitedBy: number | null; // usersToGroups: { // group: { // id: number; // name: string; // description: string | null; // }; // }[]; // }[]>(); // response.sort((a, b) => (a.id > b.id) ? 1 : -1); // expect(response.length).toEqual(2); // expect(response[0]?.usersToGroups.length).toEqual(0); // expect(response[1]?.usersToGroups.length).toEqual(1); // expect(response).toContainEqual({ // id: 1, // name: 'Dan', // verified: false, // invitedBy: null, // usersToGroups: [], // }); // expect(response).toContainEqual({ // id: 2, // name: 'Andrew', // verified: false, // invitedBy: null, // usersToGroups: [{ // group: { // id: 2, // name: 'Group2', // description: null, // }, // }], // }); // }); // test.skip('[Find Many] Get groups with users + where', async (t) => { // const { singlestoreDb: db } = t; // await db.insert(usersTable).values([ // { id: 1, name: 'Dan' }, // { id: 2, name: 'Andrew' }, // { id: 3, name: 'Alex' }, // ]); // await db.insert(groupsTable).values([ // { id: 1, name: 'Group1' }, // { id: 2, name: 'Group2' }, // { id: 3, name: 'Group3' }, // ]); // await db.insert(usersToGroupsTable).values([ // { userId: 1, groupId: 1 }, // { userId: 2, groupId: 2 }, // { userId: 3, groupId: 3 }, // { userId: 3, groupId: 2 }, // ]); // const response = await db.query.groupsTable.findMany({ // where: gt(groupsTable.id, 1), // with: { // usersToGroups: { // where: eq(usersToGroupsTable.userId, 2), // columns: {}, // with: { // user: true, // }, // }, // }, // }); // expectTypeOf(response).toEqualTypeOf<{ // id: number; // name: string; // description: string | null; // usersToGroups: { // user: { // id: number; // name: string; // verified: boolean; // invitedBy: number | null; // }; // }[]; // }[]>(); // response.sort((a, b) => (a.id > b.id) ? 1 : -1); // expect(response.length).toEqual(2); // expect(response[0]?.usersToGroups.length).toEqual(1); // expect(response[1]?.usersToGroups.length).toEqual(0); // expect(response).toContainEqual({ // id: 2, // name: 'Group2', // description: null, // usersToGroups: [{ // user: { // id: 2, // name: 'Andrew', // verified: false, // invitedBy: null, // }, // }], // }); // expect(response).toContainEqual({ // id: 3, // name: 'Group3', // description: null, // usersToGroups: [], // }); // }); // test.skip('[Find Many] Get users with groups + orderBy', async (t) => { // const { singlestoreDb: db } = t; // await db.insert(usersTable).values([ // { id: 1, name: 'Dan' }, // { id: 2, name: 'Andrew' }, // { id: 3, name: 'Alex' }, // ]); // await db.insert(groupsTable).values([ // { id: 1, name: 'Group1' }, // { id: 2, name: 'Group2' }, // { id: 3, name: 'Group3' }, // ]); // await db.insert(usersToGroupsTable).values([ // { userId: 1, groupId: 1 }, // { userId: 2, groupId: 2 }, // { userId: 3, groupId: 3 }, // { userId: 3, groupId: 2 }, // ]); // const response = await db.query.usersTable.findMany({ // orderBy: (users, { desc }) => [desc(users.id)], // with: { // usersToGroups: { // orderBy: [desc(usersToGroupsTable.groupId)], // columns: {}, // with: { // group: true, // }, // }, // }, // }); // expectTypeOf(response).toEqualTypeOf<{ // id: number; // name: string; // verified: boolean; // invitedBy: number | null; // usersToGroups: { // group: { // id: number; // name: string; // description: string | null; // }; // }[]; // }[]>(); // expect(response.length).toEqual(3); // expect(response[0]?.usersToGroups.length).toEqual(2); // expect(response[1]?.usersToGroups.length).toEqual(1); // expect(response[2]?.usersToGroups.length).toEqual(1); // expect(response[2]).toEqual({ // id: 1, // name: 'Dan', // verified: false, // invitedBy: null, // usersToGroups: [{ // group: { // id: 1, // name: 'Group1', // description: null, // }, // }], // }); // expect(response[1]).toEqual({ // id: 2, // name: 'Andrew', // verified: false, // invitedBy: null, // usersToGroups: [{ // group: { // id: 2, // name: 'Group2', // description: null, // }, // }], // }); // expect(response[0]).toEqual({ // id: 3, // name: 'Alex', // verified: false, // invitedBy: null, // usersToGroups: [{ // group: { // id: 3, // name: 'Group3', // description: null, // }, // }, { // group: { // id: 2, // name: 'Group2', // description: null, // }, // }], // }); // }); // test.skip('[Find Many] Get groups with users + orderBy', async (t) => { // const { singlestoreDb: db } = t; // await db.insert(usersTable).values([ // { id: 1, name: 'Dan' }, // { id: 2, name: 'Andrew' }, // { id: 3, name: 'Alex' }, // ]); // await db.insert(groupsTable).values([ // { id: 1, name: 'Group1' }, // { id: 2, name: 'Group2' }, // { id: 3, name: 'Group3' }, // ]); // await db.insert(usersToGroupsTable).values([ // { userId: 1, groupId: 1 }, // { userId: 2, groupId: 2 }, // { userId: 3, groupId: 3 }, // { userId: 3, groupId: 2 }, // ]); // const response = await db.query.groupsTable.findMany({ // orderBy: [desc(groupsTable.id)], // with: { // usersToGroups: { // orderBy: (utg, { desc }) => [desc(utg.userId)], // columns: {}, // with: { // user: true, // }, // }, // }, // }); // expectTypeOf(response).toEqualTypeOf<{ // id: number; // name: string; // description: string | null; // usersToGroups: { // user: { // id: number; // name: string; // verified: boolean; // invitedBy: number | null; // }; // }[]; // }[]>(); // expect(response.length).toEqual(3); // expect(response[0]?.usersToGroups.length).toEqual(1); // expect(response[1]?.usersToGroups.length).toEqual(2); // expect(response[2]?.usersToGroups.length).toEqual(1); // expect(response[2]).toEqual({ // id: 1, // name: 'Group1', // description: null, // usersToGroups: [{ // user: { // id: 1, // name: 'Dan', // verified: false, // invitedBy: null, // }, // }], // }); // expect(response[1]).toEqual({ // id: 2, // name: 'Group2', // description: null, // usersToGroups: [{ // user: { // id: 3, // name: 'Alex', // verified: false, // invitedBy: null, // }, // }, { // user: { // id: 2, // name: 'Andrew', // verified: false, // invitedBy: null, // }, // }], // }); // expect(response[0]).toEqual({ // id: 3, // name: 'Group3', // description: null, // usersToGroups: [{ // user: { // id: 3, // name: 'Alex', // verified: false, // invitedBy: null, // }, // }], // }); // }); // test.skip('[Find Many] Get users with groups + orderBy + limit', async (t) => { // const { singlestoreDb: db } = t; // await db.insert(usersTable).values([ // { id: 1, name: 'Dan' }, // { id: 2, name: 'Andrew' }, // { id: 3, name: 'Alex' }, // ]); // await db.insert(groupsTable).values([ // { id: 1, name: 'Group1' }, // { id: 2, name: 'Group2' }, // { id: 3, name: 'Group3' }, // ]); // await db.insert(usersToGroupsTable).values([ // { userId: 1, groupId: 1 }, // { userId: 2, groupId: 2 }, // { userId: 3, groupId: 3 }, // { userId: 3, groupId: 2 }, // ]); // const response = await db.query.usersTable.findMany({ // orderBy: (users, { desc }) => [desc(users.id)], // limit: 2, // with: { // usersToGroups: { // limit: 1, // orderBy: [desc(usersToGroupsTable.groupId)], // columns: {}, // with: { // group: true, // }, // }, // }, // }); // expectTypeOf(response).toEqualTypeOf<{ // id: number; // name: string; // verified: boolean; // invitedBy: number | null; // usersToGroups: { // group: { // id: number; // name: string; // description: string | null; // }; // }[]; // }[]>(); // expect(response.length).toEqual(2); // expect(response[0]?.usersToGroups.length).toEqual(1); // expect(response[1]?.usersToGroups.length).toEqual(1); // expect(response[1]).toEqual({ // id: 2, // name: 'Andrew', // verified: false, // invitedBy: null, // usersToGroups: [{ // group: { // id: 2, // name: 'Group2', // description: null, // }, // }], // }); // expect(response[0]).toEqual({ // id: 3, // name: 'Alex', // verified: false, // invitedBy: null, // usersToGroups: [{ // group: { // id: 3, // name: 'Group3', // description: null, // }, // }], // }); // }); // /* // [Find One] Many-to-many cases // Users+users_to_groups+groups // */ // test.skip('[Find One] Get users with groups', async (t) => { // const { singlestoreDb: db } = t; // await db.insert(usersTable).values([ // { id: 1, name: 'Dan' }, // { id: 2, name: 'Andrew' }, // { id: 3, name: 'Alex' }, // ]); // await db.insert(groupsTable).values([ // { id: 1, name: 'Group1' }, // { id: 2, name: 'Group2' }, // { id: 3, name: 'Group3' }, // ]); // await db.insert(usersToGroupsTable).values([ // { userId: 1, groupId: 1 }, // { userId: 2, groupId: 2 }, // { userId: 3, groupId: 3 }, // { userId: 3, groupId: 2 }, // ]); // const response = await db.query.usersTable.findFirst({ // with: { // usersToGroups: { // columns: {}, // with: { // group: true, // }, // }, // }, // }); // expectTypeOf(response).toEqualTypeOf< // { // id: number; // name: string; // verified: boolean; // invitedBy: number | null; // usersToGroups: { // group: { // id: number; // name: string; // description: string | null; // }; // }[]; // } | undefined // >(); // expect(response?.usersToGroups.length).toEqual(1); // expect(response).toEqual({ // id: 1, // name: 'Dan', // verified: false, // invitedBy: null, // usersToGroups: [{ // group: { // id: 1, // name: 'Group1', // description: null, // }, // }], // }); // }); // test.skip('[Find One] Get groups with users', async (t) => { // const { singlestoreDb: db } = t; // await db.insert(usersTable).values([ // { id: 1, name: 'Dan' }, // { id: 2, name: 'Andrew' }, // { id: 3, name: 'Alex' }, // ]); // await db.insert(groupsTable).values([ // { id: 1, name: 'Group1' }, // { id: 2, name: 'Group2' }, // { id: 3, name: 'Group3' }, // ]); // await db.insert(usersToGroupsTable).values([ // { userId: 1, groupId: 1 }, // { userId: 2, groupId: 2 }, // { userId: 3, groupId: 3 }, // { userId: 3, groupId: 2 }, // ]); // const response = await db.query.groupsTable.findFirst({ // with: { // usersToGroups: { // columns: {}, // with: { // user: true, // }, // }, // }, // }); // expectTypeOf(response).toEqualTypeOf< // { // id: number; // name: string; // description: string | null; // usersToGroups: { // user: { // id: number; // name: string; // verified: boolean; // invitedBy: number | null; // }; // }[]; // } | undefined // >(); // expect(response?.usersToGroups.length).toEqual(1); // expect(response).toEqual({ // id: 1, // name: 'Group1', // description: null, // usersToGroups: [{ // user: { // id: 1, // name: 'Dan', // verified: false, // invitedBy: null, // }, // }], // }); // }); // test.skip('[Find One] Get users with groups + limit', async (t) => { // const { singlestoreDb: db } = t; // await db.insert(usersTable).values([ // { id: 1, name: 'Dan' }, // { id: 2, name: 'Andrew' }, // { id: 3, name: 'Alex' }, // ]); // await db.insert(groupsTable).values([ // { id: 1, name: 'Group1' }, // { id: 2, name: 'Group2' }, // { id: 3, name: 'Group3' }, // ]); // await db.insert(usersToGroupsTable).values([ // { userId: 1, groupId: 1 }, // { userId: 2, groupId: 2 }, // { userId: 2, groupId: 3 }, // { userId: 3, groupId: 2 }, // ]); // const response = await db.query.usersTable.findFirst({ // with: { // usersToGroups: { // limit: 1, // columns: {}, // with: { // group: true, // }, // }, // }, // }); // expectTypeOf(response).toEqualTypeOf< // { // id: number; // name: string; // verified: boolean; // invitedBy: number | null; // usersToGroups: { // group: { // id: number; // name: string; // description: string | null; // }; // }[]; // } | undefined // >(); // expect(response?.usersToGroups.length).toEqual(1); // expect(response).toEqual({ // id: 1, // name: 'Dan', // verified: false, // invitedBy: null, // usersToGroups: [{ // group: { // id: 1, // name: 'Group1', // description: null, // }, // }], // }); // }); // test.skip('[Find One] Get groups with users + limit', async (t) => { // const { singlestoreDb: db } = t; // await db.insert(usersTable).values([ // { id: 1, name: 'Dan' }, // { id: 2, name: 'Andrew' }, // { id: 3, name: 'Alex' }, // ]); // await db.insert(groupsTable).values([ // { id: 1, name: 'Group1' }, // { id: 2, name: 'Group2' }, // { id: 3, name: 'Group3' }, // ]); // await db.insert(usersToGroupsTable).values([ // { userId: 1, groupId: 1 }, // { userId: 2, groupId: 2 }, // { userId: 3, groupId: 3 }, // { userId: 3, groupId: 2 }, // ]); // const response = await db.query.groupsTable.findFirst({ // with: { // usersToGroups: { // limit: 1, // columns: {}, // with: { // user: true, // }, // }, // }, // }); // expectTypeOf(response).toEqualTypeOf< // { // id: number; // name: string; // description: string | null; // usersToGroups: { // user: { // id: number; // name: string; // verified: boolean; // invitedBy: number | null; // }; // }[]; // } | undefined // >(); // expect(response?.usersToGroups.length).toEqual(1); // expect(response).toEqual({ // id: 1, // name: 'Group1', // description: null, // usersToGroups: [{ // user: { // id: 1, // name: 'Dan', // verified: false, // invitedBy: null, // }, // }], // }); // }); // test.skip('[Find One] Get users with groups + limit + where', async (t) => { // const { singlestoreDb: db } = t; // await db.insert(usersTable).values([ // { id: 1, name: 'Dan' }, // { id: 2, name: 'Andrew' }, // { id: 3, name: 'Alex' }, // ]); // await db.insert(groupsTable).values([ // { id: 1, name: 'Group1' }, // { id: 2, name: 'Group2' }, // { id: 3, name: 'Group3' }, // ]); // await db.insert(usersToGroupsTable).values([ // { userId: 1, groupId: 1 }, // { userId: 2, groupId: 2 }, // { userId: 2, groupId: 3 }, // { userId: 3, groupId: 2 }, // ]); // const response = await db.query.usersTable.findFirst({ // where: (_, { eq, or }) => or(eq(usersTable.id, 1), eq(usersTable.id, 2)), // with: { // usersToGroups: { // where: eq(usersToGroupsTable.groupId, 1), // columns: {}, // with: { // group: true, // }, // }, // }, // }); // expectTypeOf(response).toEqualTypeOf< // { // id: number; // name: string; // verified: boolean; // invitedBy: number | null; // usersToGroups: { // group: { // id: number; // name: string; // description: string | null; // }; // }[]; // } | undefined // >(); // expect(response?.usersToGroups.length).toEqual(1); // expect(response).toEqual({ // id: 1, // name: 'Dan', // verified: false, // invitedBy: null, // usersToGroups: [{ // group: { // id: 1, // name: 'Group1', // description: null, // }, // }], // }); // }); // test.skip('[Find One] Get groups with users + limit + where', async (t) => { // const { singlestoreDb: db } = t; // await db.insert(usersTable).values([ // { id: 1, name: 'Dan' }, // { id: 2, name: 'Andrew' }, // { id: 3, name: 'Alex' }, // ]); // await db.insert(groupsTable).values([ // { id: 1, name: 'Group1' }, // { id: 2, name: 'Group2' }, // { id: 3, name: 'Group3' }, // ]); // await db.insert(usersToGroupsTable).values([ // { userId: 1, groupId: 1 }, // { userId: 2, groupId: 2 }, // { userId: 3, groupId: 3 }, // { userId: 3, groupId: 2 }, // ]); // const response = await db.query.groupsTable.findFirst({ // where: gt(groupsTable.id, 1), // with: { // usersToGroups: { // where: eq(usersToGroupsTable.userId, 2), // limit: 1, // columns: {}, // with: { // user: true, // }, // }, // }, // }); // expectTypeOf(response).toEqualTypeOf< // { // id: number; // name: string; // description: string | null; // usersToGroups: { // user: { // id: number; // name: string; // verified: boolean; // invitedBy: number | null; // }; // }[]; // } | undefined // >(); // expect(response?.usersToGroups.length).toEqual(1); // expect(response).toEqual({ // id: 2, // name: 'Group2', // description: null, // usersToGroups: [{ // user: { // id: 2, // name: 'Andrew', // verified: false, // invitedBy: null, // }, // }], // }); // }); // test.skip('[Find One] Get users with groups + where', async (t) => { // const { singlestoreDb: db } = t; // await db.insert(usersTable).values([ // { id: 1, name: 'Dan' }, // { id: 2, name: 'Andrew' }, // { id: 3, name: 'Alex' }, // ]); // await db.insert(groupsTable).values([ // { id: 1, name: 'Group1' }, // { id: 2, name: 'Group2' }, // { id: 3, name: 'Group3' }, // ]); // await db.insert(usersToGroupsTable).values([ // { userId: 1, groupId: 1 }, // { userId: 2, groupId: 2 }, // { userId: 2, groupId: 3 }, // { userId: 3, groupId: 2 }, // ]); // const response = await db.query.usersTable.findFirst({ // where: (_, { eq, or }) => or(eq(usersTable.id, 1), eq(usersTable.id, 2)), // with: { // usersToGroups: { // where: eq(usersToGroupsTable.groupId, 2), // columns: {}, // with: { // group: true, // }, // }, // }, // }); // expectTypeOf(response).toEqualTypeOf< // { // id: number; // name: string; // verified: boolean; // invitedBy: number | null; // usersToGroups: { // group: { // id: number; // name: string; // description: string | null; // }; // }[]; // } | undefined // >(); // expect(response?.usersToGroups.length).toEqual(0); // expect(response).toEqual({ // id: 1, // name: 'Dan', // verified: false, // invitedBy: null, // usersToGroups: [], // }); // }); // test.skip('[Find One] Get groups with users + where', async (t) => { // const { singlestoreDb: db } = t; // await db.insert(usersTable).values([ // { id: 1, name: 'Dan' }, // { id: 2, name: 'Andrew' }, // { id: 3, name: 'Alex' }, // ]); // await db.insert(groupsTable).values([ // { id: 1, name: 'Group1' }, // { id: 2, name: 'Group2' }, // { id: 3, name: 'Group3' }, // ]); // await db.insert(usersToGroupsTable).values([ // { userId: 1, groupId: 1 }, // { userId: 2, groupId: 2 }, // { userId: 3, groupId: 3 }, // { userId: 3, groupId: 2 }, // ]); // const response = await db.query.groupsTable.findFirst({ // where: gt(groupsTable.id, 1), // with: { // usersToGroups: { // where: eq(usersToGroupsTable.userId, 2), // columns: {}, // with: { // user: true, // }, // }, // }, // }); // expectTypeOf(response).toEqualTypeOf< // { // id: number; // name: string; // description: string | null; // usersToGroups: { // user: { // id: number; // name: string; // verified: boolean; // invitedBy: number | null; // }; // }[]; // } | undefined // >(); // expect(response?.usersToGroups.length).toEqual(1); // expect(response).toEqual({ // id: 2, // name: 'Group2', // description: null, // usersToGroups: [{ // user: { // id: 2, // name: 'Andrew', // verified: false, // invitedBy: null, // }, // }], // }); // }); // test.skip('[Find One] Get users with groups + orderBy', async (t) => { // const { singlestoreDb: db } = t; // await db.insert(usersTable).values([ // { id: 1, name: 'Dan' }, // { id: 2, name: 'Andrew' }, // { id: 3, name: 'Alex' }, // ]); // await db.insert(groupsTable).values([ // { id: 1, name: 'Group1' }, // { id: 2, name: 'Group2' }, // { id: 3, name: 'Group3' }, // ]); // await db.insert(usersToGroupsTable).values([ // { userId: 1, groupId: 1 }, // { userId: 2, groupId: 2 }, // { userId: 3, groupId: 3 }, // { userId: 3, groupId: 2 }, // ]); // const response = await db.query.usersTable.findFirst({ // orderBy: (users, { desc }) => [desc(users.id)], // with: { // usersToGroups: { // orderBy: [desc(usersToGroupsTable.groupId)], // columns: {}, // with: { // group: true, // }, // }, // }, // }); // expectTypeOf(response).toEqualTypeOf< // { // id: number; // name: string; // verified: boolean; // invitedBy: number | null; // usersToGroups: { // group: { // id: number; // name: string; // description: string | null; // }; // }[]; // } | undefined // >(); // expect(response?.usersToGroups.length).toEqual(2); // expect(response).toEqual({ // id: 3, // name: 'Alex', // verified: false, // invitedBy: null, // usersToGroups: [{ // group: { // id: 3, // name: 'Group3', // description: null, // }, // }, { // group: { // id: 2, // name: 'Group2', // description: null, // }, // }], // }); // }); // test.skip('[Find One] Get groups with users + orderBy', async (t) => { // const { singlestoreDb: db } = t; // await db.insert(usersTable).values([ // { id: 1, name: 'Dan' }, // { id: 2, name: 'Andrew' }, // { id: 3, name: 'Alex' }, // ]); // await db.insert(groupsTable).values([ // { id: 1, name: 'Group1' }, // { id: 2, name: 'Group2' }, // { id: 3, name: 'Group3' }, // ]); // await db.insert(usersToGroupsTable).values([ // { userId: 1, groupId: 1 }, // { userId: 2, groupId: 2 }, // { userId: 3, groupId: 3 }, // { userId: 3, groupId: 2 }, // ]); // const response = await db.query.groupsTable.findFirst({ // orderBy: [desc(groupsTable.id)], // with: { // usersToGroups: { // orderBy: (utg, { desc }) => [desc(utg.userId)], // columns: {}, // with: { // user: true, // }, // }, // }, // }); // expectTypeOf(response).toEqualTypeOf< // { // id: number; // name: string; // description: string | null; // usersToGroups: { // user: { // id: number; // name: string; // verified: boolean; // invitedBy: number | null; // }; // }[]; // } | undefined // >(); // expect(response?.usersToGroups.length).toEqual(1); // expect(response).toEqual({ // id: 3, // name: 'Group3', // description: null, // usersToGroups: [{ // user: { // id: 3, // name: 'Alex', // verified: false, // invitedBy: null, // }, // }], // }); // }); // test.skip('[Find One] Get users with groups + orderBy + limit', async (t) => { // const { singlestoreDb: db } = t; // await db.insert(usersTable).values([ // { id: 1, name: 'Dan' }, // { id: 2, name: 'Andrew' }, // { id: 3, name: 'Alex' }, // ]); // await db.insert(groupsTable).values([ // { id: 1, name: 'Group1' }, // { id: 2, name: 'Group2' }, // { id: 3, name: 'Group3' }, // ]); // await db.insert(usersToGroupsTable).values([ // { userId: 1, groupId: 1 }, // { userId: 2, groupId: 2 }, // { userId: 3, groupId: 3 }, // { userId: 3, groupId: 2 }, // ]); // const response = await db.query.usersTable.findFirst({ // orderBy: (users, { desc }) => [desc(users.id)], // with: { // usersToGroups: { // limit: 1, // orderBy: [desc(usersToGroupsTable.groupId)], // columns: {}, // with: { // group: true, // }, // }, // }, // }); // expectTypeOf(response).toEqualTypeOf< // { // id: number; // name: string; // verified: boolean; // invitedBy: number | null; // usersToGroups: { // group: { // id: number; // name: string; // description: string | null; // }; // }[]; // } | undefined // >(); // expect(response?.usersToGroups.length).toEqual(1); // expect(response).toEqual({ // id: 3, // name: 'Alex', // verified: false, // invitedBy: null, // usersToGroups: [{ // group: { // id: 3, // name: 'Group3', // description: null, // }, // }], // }); // }); // test.skip('Get groups with users + orderBy + limit', async (t) => { // const { singlestoreDb: db } = t; // await db.insert(usersTable).values([ // { id: 1, name: 'Dan' }, // { id: 2, name: 'Andrew' }, // { id: 3, name: 'Alex' }, // ]); // await db.insert(groupsTable).values([ // { id: 1, name: 'Group1' }, // { id: 2, name: 'Group2' }, // { id: 3, name: 'Group3' }, // ]); // await db.insert(usersToGroupsTable).values([ // { userId: 1, groupId: 1 }, // { userId: 2, groupId: 2 }, // { userId: 3, groupId: 3 }, // { userId: 3, groupId: 2 }, // ]); // const response = await db.query.groupsTable.findMany({ // orderBy: [desc(groupsTable.id)], // limit: 2, // with: { // usersToGroups: { // limit: 1, // orderBy: (utg, { desc }) => [desc(utg.userId)], // columns: {}, // with: { // user: true, // }, // }, // }, // }); // expectTypeOf(response).toEqualTypeOf< // { // id: number; // name: string; // description: string | null; // usersToGroups: { // user: { // id: number; // name: string; // verified: boolean; // invitedBy: number | null; // }; // }[]; // }[] // >(); // expect(response.length).toEqual(2); // expect(response[0]?.usersToGroups.length).toEqual(1); // expect(response[1]?.usersToGroups.length).toEqual(1); // expect(response[1]).toEqual({ // id: 2, // name: 'Group2', // description: null, // usersToGroups: [{ // user: { // id: 3, // name: 'Alex', // verified: false, // invitedBy: null, // }, // }], // }); // expect(response[0]).toEqual({ // id: 3, // name: 'Group3', // description: null, // usersToGroups: [{ // user: { // id: 3, // name: 'Alex', // verified: false, // invitedBy: null, // }, // }], // }); // }); // test.skip('Get users with groups + custom', async (t) => { // const { singlestoreDb: db } = t; // await db.insert(usersTable).values([ // { id: 1, name: 'Dan' }, // { id: 2, name: 'Andrew' }, // { id: 3, name: 'Alex' }, // ]); // await db.insert(groupsTable).values([ // { id: 1, name: 'Group1' }, // { id: 2, name: 'Group2' }, // { id: 3, name: 'Group3' }, // ]); // await db.insert(usersToGroupsTable).values([ // { userId: 1, groupId: 1 }, // { userId: 2, groupId: 2 }, // { userId: 3, groupId: 3 }, // { userId: 3, groupId: 2 }, // ]); // const response = await db.query.usersTable.findMany({ // extras: { // lower: sql`lower(${usersTable.name})`.as('lower_name'), // }, // with: { // usersToGroups: { // columns: {}, // with: { // group: { // extras: { // lower: sql`lower(${groupsTable.name})`.as('lower_name'), // }, // }, // }, // }, // }, // }); // expectTypeOf(response).toEqualTypeOf< // { // id: number; // name: string; // verified: boolean; // invitedBy: number | null; // lower: string; // usersToGroups: { // group: { // id: number; // name: string; // description: string | null; // lower: string; // }; // }[]; // }[] // >(); // response.sort((a, b) => (a.id > b.id) ? 1 : -1); // expect(response.length).toEqual(3); // expect(response[0]?.usersToGroups.length).toEqual(1); // expect(response[1]?.usersToGroups.length).toEqual(1); // expect(response[2]?.usersToGroups.length).toEqual(2); // expect(response).toContainEqual({ // id: 1, // name: 'Dan', // lower: 'dan', // verified: false, // invitedBy: null, // usersToGroups: [{ // group: { // id: 1, // name: 'Group1', // lower: 'group1', // description: null, // }, // }], // }); // expect(response).toContainEqual({ // id: 2, // name: 'Andrew', // lower: 'andrew', // verified: false, // invitedBy: null, // usersToGroups: [{ // group: { // id: 2, // name: 'Group2', // lower: 'group2', // description: null, // }, // }], // }); // expect(response).toContainEqual({ // id: 3, // name: 'Alex', // lower: 'alex', // verified: false, // invitedBy: null, // usersToGroups: [{ // group: { // id: 3, // name: 'Group3', // lower: 'group3', // description: null, // }, // }, { // group: { // id: 2, // name: 'Group2', // lower: 'group2', // description: null, // }, // }], // }); // }); // test.skip('Get groups with users + custom', async (t) => { // const { singlestoreDb: db } = t; // await db.insert(usersTable).values([ // { id: 1, name: 'Dan' }, // { id: 2, name: 'Andrew' }, // { id: 3, name: 'Alex' }, // ]); // await db.insert(groupsTable).values([ // { id: 1, name: 'Group1' }, // { id: 2, name: 'Group2' }, // { id: 3, name: 'Group3' }, // ]); // await db.insert(usersToGroupsTable).values([ // { userId: 1, groupId: 1 }, // { userId: 2, groupId: 2 }, // { userId: 3, groupId: 3 }, // { userId: 3, groupId: 2 }, // ]); // const response = await db.query.groupsTable.findMany({ // extras: (table, { sql }) => ({ // lower: sql`lower(${table.name})`.as('lower_name'), // }), // with: { // usersToGroups: { // columns: {}, // with: { // user: { // extras: (table, { sql }) => ({ // lower: sql`lower(${table.name})`.as('lower_name'), // }), // }, // }, // }, // }, // }); // expectTypeOf(response).toEqualTypeOf< // { // id: number; // name: string; // description: string | null; // lower: string; // usersToGroups: { // user: { // id: number; // name: string; // verified: boolean; // invitedBy: number | null; // lower: string; // }; // }[]; // }[] // >(); // response.sort((a, b) => (a.id > b.id) ? 1 : -1); // expect(response.length).toEqual(3); // expect(response[0]?.usersToGroups.length).toEqual(1); // expect(response[1]?.usersToGroups.length).toEqual(2); // expect(response[2]?.usersToGroups.length).toEqual(1); // expect(response).toContainEqual({ // id: 1, // name: 'Group1', // lower: 'group1', // description: null, // usersToGroups: [{ // user: { // id: 1, // name: 'Dan', // lower: 'dan', // verified: false, // invitedBy: null, // }, // }], // }); // expect(response).toContainEqual({ // id: 2, // name: 'Group2', // lower: 'group2', // description: null, // usersToGroups: [{ // user: { // id: 2, // name: 'Andrew', // lower: 'andrew', // verified: false, // invitedBy: null, // }, // }, { // user: { // id: 3, // name: 'Alex', // lower: 'alex', // verified: false, // invitedBy: null, // }, // }], // }); // expect(response).toContainEqual({ // id: 3, // name: 'Group3', // lower: 'group3', // description: null, // usersToGroups: [{ // user: { // id: 3, // name: 'Alex', // lower: 'alex', // verified: false, // invitedBy: null, // }, // }], // }); // }); // test('.toSQL()', () => { // const query = db.query.usersTable.findFirst().toSQL(); // expect(query).toHaveProperty('sql', expect.any(String)); // expect(query).toHaveProperty('params', expect.any(Array)); // }); // // + custom + where + orderby // // + custom + where + orderby + limit // // + partial // // + partial(false) // // + partial + orderBy + where (all not selected) // /* // One four-level relation users+posts+comments+coment_likes // + users+users_to_groups+groups // */ // /* // Really hard case // 1. users+posts+comments+coment_likes // 2. users+users_to_groups+groups // 3. users+users // */ // eslint-disable-next-line unicorn/no-empty-file ================================================ FILE: integration-tests/tests/relational/sqlite.schema.ts ================================================ import { type AnySQLiteColumn, integer, primaryKey, sqliteTable, text } from 'drizzle-orm/sqlite-core'; import { relations, sql } from 'drizzle-orm'; export const usersTable = sqliteTable('users', { id: integer('id').primaryKey({ autoIncrement: true }), name: text('name').notNull(), verified: integer('verified').notNull().default(0), invitedBy: integer('invited_by').references((): AnySQLiteColumn => usersTable.id), }); export const usersConfig = relations(usersTable, ({ one, many }) => ({ invitee: one(usersTable, { fields: [usersTable.invitedBy], references: [usersTable.id], }), usersToGroups: many(usersToGroupsTable), posts: many(postsTable), })); export const groupsTable = sqliteTable('groups', { id: integer('id').primaryKey({ autoIncrement: true }), name: text('name').notNull(), description: text('description'), }); export const groupsConfig = relations(groupsTable, ({ many }) => ({ usersToGroups: many(usersToGroupsTable), })); export const usersToGroupsTable = sqliteTable( 'users_to_groups', { id: integer('id').primaryKey({ autoIncrement: true }), userId: integer('user_id', { mode: 'number' }).notNull().references( () => usersTable.id, ), groupId: integer('group_id', { mode: 'number' }).notNull().references( () => groupsTable.id, ), }, (t) => ({ pk: primaryKey(t.userId, t.groupId), }), ); export const usersToGroupsConfig = relations(usersToGroupsTable, ({ one }) => ({ group: one(groupsTable, { fields: [usersToGroupsTable.groupId], references: [groupsTable.id], }), user: one(usersTable, { fields: [usersToGroupsTable.userId], references: [usersTable.id], }), })); export const postsTable = sqliteTable('posts', { id: integer('id').primaryKey({ autoIncrement: true }), content: text('content').notNull(), ownerId: integer('owner_id', { mode: 'number' }).references( () => usersTable.id, ), createdAt: integer('created_at', { mode: 'timestamp_ms' }) .notNull().default(sql`current_timestamp`), }); export const postsConfig = relations(postsTable, ({ one, many }) => ({ author: one(usersTable, { fields: [postsTable.ownerId], references: [usersTable.id], }), comments: many(commentsTable), })); export const commentsTable = sqliteTable('comments', { id: integer('id').primaryKey({ autoIncrement: true }), content: text('content').notNull(), creator: integer('creator', { mode: 'number' }).references( () => usersTable.id, ), postId: integer('post_id', { mode: 'number' }).references(() => postsTable.id), createdAt: integer('created_at', { mode: 'timestamp_ms' }) .notNull().default(sql`current_timestamp`), }); export const commentsConfig = relations(commentsTable, ({ one, many }) => ({ post: one(postsTable, { fields: [commentsTable.postId], references: [postsTable.id], }), author: one(usersTable, { fields: [commentsTable.creator], references: [usersTable.id], }), likes: many(commentLikesTable), })); export const commentLikesTable = sqliteTable('comment_likes', { id: integer('id').primaryKey({ autoIncrement: true }), creator: integer('creator', { mode: 'number' }).references( () => usersTable.id, ), commentId: integer('comment_id', { mode: 'number' }).references( () => commentsTable.id, ), createdAt: integer('created_at', { mode: 'timestamp_ms' }) .notNull().default(sql`current_timestamp`), }); export const commentLikesConfig = relations(commentLikesTable, ({ one }) => ({ comment: one(commentsTable, { fields: [commentLikesTable.commentId], references: [commentsTable.id], }), author: one(usersTable, { fields: [commentLikesTable.creator], references: [usersTable.id], }), })); ================================================ FILE: integration-tests/tests/relational/tables.ts ================================================ import { relations } from 'drizzle-orm'; import { foreignKey, int, integer, sqliteTable, text } from 'drizzle-orm/sqlite-core'; export const users = sqliteTable('users', { id: integer('id').primaryKey(), name: text('name').notNull(), cityId: int('city_id').references(() => cities.id).notNull(), homeCityId: int('home_city_id').references(() => cities.id), createdAt: integer('created_at', { mode: 'timestamp' }).notNull(), }); export const usersConfig = relations(users, ({ one, many }) => ({ city: one(cities, { relationName: 'UsersInCity', fields: [users.cityId], references: [cities.id] }), homeCity: one(cities, { fields: [users.homeCityId], references: [cities.id] }), posts: many(posts), comments: many(comments), })); export const cities = sqliteTable('cities', { id: integer('id').primaryKey(), name: text('name').notNull(), }); export const citiesConfig = relations(cities, ({ many }) => ({ users: many(users, { relationName: 'UsersInCity' }), })); export const posts = sqliteTable('posts', { id: integer('id').primaryKey(), title: text('title').notNull(), authorId: int('author_id').references(() => users.id), }); export const postsConfig = relations(posts, ({ one, many }) => ({ author: one(users, { fields: [posts.authorId], references: [users.id] }), comments: many(comments), })); export const comments = sqliteTable('comments', { id: integer('id').primaryKey(), postId: int('post_id').references(() => posts.id).notNull(), authorId: int('author_id').references(() => users.id), text: text('text').notNull(), }); export const commentsConfig = relations(comments, ({ one }) => ({ post: one(posts, { fields: [comments.postId], references: [posts.id] }), author: one(users, { fields: [comments.authorId], references: [users.id] }), })); export const books = sqliteTable('books', { id: integer('id').primaryKey(), name: text('name').notNull(), }); export const booksConfig = relations(books, ({ many }) => ({ authors: many(bookAuthors), })); export const bookAuthors = sqliteTable('book_authors', { bookId: int('book_id').references(() => books.id).notNull(), authorId: int('author_id').references(() => users.id).notNull(), role: text('role').notNull(), }); export const bookAuthorsConfig = relations(bookAuthors, ({ one }) => ({ book: one(books, { fields: [bookAuthors.bookId], references: [books.id] }), author: one(users, { fields: [bookAuthors.authorId], references: [users.id] }), })); export const node = sqliteTable('node', { id: integer('id').primaryKey(), parentId: int('parent_id'), leftId: int('left_id'), rightId: int('right_id'), }, (node) => ({ fk1: foreignKey(() => ({ columns: [node.parentId], foreignColumns: [node.id] })), fk2: foreignKey(() => ({ columns: [node.leftId], foreignColumns: [node.id] })), fk3: foreignKey(() => ({ columns: [node.rightId], foreignColumns: [node.id] })), })); export const nodeRelations = relations(node, ({ one }) => ({ parent: one(node, { fields: [node.parentId], references: [node.id] }), left: one(node, { fields: [node.leftId], references: [node.id] }), right: one(node, { fields: [node.rightId], references: [node.id] }), })); ================================================ FILE: integration-tests/tests/relational/turso.test.ts ================================================ import 'dotenv/config'; import { type Client, createClient } from '@libsql/client'; import { desc, DrizzleError, eq, gt, gte, or, placeholder, sql, TransactionRollbackError } from 'drizzle-orm'; import { drizzle, type LibSQLDatabase } from 'drizzle-orm/libsql'; import { beforeAll, beforeEach, expect, expectTypeOf, test } from 'vitest'; import * as schema from './sqlite.schema.ts'; const { usersTable, postsTable, commentsTable, usersToGroupsTable, groupsTable } = schema; const ENABLE_LOGGING = false; /* Test cases: - querying nested relation without PK with additional fields */ let db: LibSQLDatabase; beforeAll(async () => { const url = process.env['LIBSQL_URL']; const authToken = process.env['LIBSQL_AUTH_TOKEN']; if (!url) { throw new Error('LIBSQL_URL is not set'); } const sleep = 250; let timeLeft = 5000; let connected = false; let lastError: unknown | undefined; let client: Client; do { try { client = createClient({ url, authToken }); connected = true; break; } catch (e) { lastError = e; await new Promise((resolve) => setTimeout(resolve, sleep)); timeLeft -= sleep; } } while (timeLeft > 0); if (!connected) { console.error('Cannot connect to libsql'); throw lastError; } db = drizzle(client!, { logger: ENABLE_LOGGING, schema }); }); beforeEach(async () => { await db.run(sql`drop table if exists \`groups\``); await db.run(sql`drop table if exists \`users\``); await db.run(sql`drop table if exists \`users_to_groups\``); await db.run(sql`drop table if exists \`posts\``); await db.run(sql`drop table if exists \`comments\``); await db.run(sql`drop table if exists \`comment_likes\``); await db.run( sql` CREATE TABLE \`users\` ( \`id\` integer PRIMARY KEY AUTOINCREMENT NOT NULL, \`name\` text NOT NULL, \`verified\` integer DEFAULT 0 NOT NULL, \`invited_by\` integer ); `, ); await db.run( sql` CREATE TABLE \`groups\` ( \`id\` integer PRIMARY KEY AUTOINCREMENT NOT NULL, \`name\` text NOT NULL, \`description\` text ); `, ); await db.run( sql` CREATE TABLE \`users_to_groups\` ( \`id\` integer PRIMARY KEY AUTOINCREMENT NOT NULL, \`user_id\` integer NOT NULL, \`group_id\` integer NOT NULL ); `, ); await db.run( sql` CREATE TABLE \`posts\` ( \`id\` integer PRIMARY KEY AUTOINCREMENT NOT NULL, \`content\` text NOT NULL, \`owner_id\` integer, \`created_at\` integer DEFAULT current_timestamp NOT NULL ); `, ); await db.run( sql` CREATE TABLE \`comments\` ( \`id\` integer PRIMARY KEY AUTOINCREMENT NOT NULL, \`content\` text NOT NULL, \`creator\` integer, \`post_id\` integer, \`created_at\` integer DEFAULT current_timestamp NOT NULL ); `, ); await db.run( sql` CREATE TABLE \`comment_likes\` ( \`id\` integer PRIMARY KEY AUTOINCREMENT NOT NULL, \`creator\` integer, \`comment_id\` integer, \`created_at\` integer DEFAULT current_timestamp NOT NULL ); `, ); }); /* [Find Many] One relation users+posts */ test('[Find Many] Get users with posts', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]).run(); const usersWithPosts = await db.query.usersTable.findMany({ with: { posts: true, }, }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; verified: number; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[]>(); usersWithPosts.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(usersWithPosts.length).eq(3); expect(usersWithPosts[0]?.posts.length).eq(1); expect(usersWithPosts[1]?.posts.length).eq(1); expect(usersWithPosts[2]?.posts.length).eq(1); expect(usersWithPosts[0]).toEqual({ id: 1, name: 'Dan', verified: 0, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], }); expect(usersWithPosts[1]).toEqual({ id: 2, name: 'Andrew', verified: 0, invitedBy: null, posts: [{ id: 2, ownerId: 2, content: 'Post2', createdAt: usersWithPosts[1]?.posts[0]?.createdAt }], }); expect(usersWithPosts[2]).toEqual({ id: 3, name: 'Alex', verified: 0, invitedBy: null, posts: [{ id: 3, ownerId: 3, content: 'Post3', createdAt: usersWithPosts[2]?.posts[0]?.createdAt }], }); }); test('[Find Many] Get users with posts + limit posts', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]).run(); const usersWithPosts = await db.query.usersTable.findMany({ with: { posts: { limit: 1, }, }, }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; verified: number; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[]>(); usersWithPosts.sort((a, b) => (a.id > b.id) ? 1 : -1); usersWithPosts[0]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); usersWithPosts[1]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); usersWithPosts[2]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(usersWithPosts.length).eq(3); expect(usersWithPosts[0]?.posts.length).eq(1); expect(usersWithPosts[1]?.posts.length).eq(1); expect(usersWithPosts[2]?.posts.length).eq(1); expect(usersWithPosts[0]).toEqual({ id: 1, name: 'Dan', verified: 0, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], }); expect(usersWithPosts[1]).toEqual({ id: 2, name: 'Andrew', verified: 0, invitedBy: null, posts: [{ id: 4, ownerId: 2, content: 'Post2', createdAt: usersWithPosts[1]?.posts[0]?.createdAt }], }); expect(usersWithPosts[2]).toEqual({ id: 3, name: 'Alex', verified: 0, invitedBy: null, posts: [{ id: 6, ownerId: 3, content: 'Post3', createdAt: usersWithPosts[2]?.posts[0]?.createdAt }], }); }); test('[Find Many] Get users with posts + limit posts and users', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]).run(); const usersWithPosts = await db.query.usersTable.findMany({ limit: 2, with: { posts: { limit: 1, }, }, }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; verified: number; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[]>(); usersWithPosts.sort((a, b) => (a.id > b.id) ? 1 : -1); usersWithPosts[0]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); usersWithPosts[1]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(usersWithPosts.length).eq(2); expect(usersWithPosts[0]?.posts.length).eq(1); expect(usersWithPosts[1]?.posts.length).eq(1); expect(usersWithPosts[0]).toEqual({ id: 1, name: 'Dan', verified: 0, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], }); expect(usersWithPosts[1]).toEqual({ id: 2, name: 'Andrew', verified: 0, invitedBy: null, posts: [{ id: 4, ownerId: 2, content: 'Post2', createdAt: usersWithPosts[1]?.posts[0]?.createdAt }], }); }); test('[Find Many] Get users with posts + custom fields', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]).run(); const usersWithPosts = await db.query.usersTable.findMany({ with: { posts: true, }, extras: ({ name }) => ({ lowerName: sql`lower(${name})`.as('name_lower'), }), }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; verified: number; invitedBy: number | null; lowerName: string; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[]>(); usersWithPosts.sort((a, b) => (a.id > b.id) ? 1 : -1); usersWithPosts[0]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); usersWithPosts[1]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); usersWithPosts[2]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(usersWithPosts.length).toEqual(3); expect(usersWithPosts[0]?.posts.length).toEqual(3); expect(usersWithPosts[1]?.posts.length).toEqual(2); expect(usersWithPosts[2]?.posts.length).toEqual(2); expect(usersWithPosts[0]).toEqual({ id: 1, name: 'Dan', verified: 0, invitedBy: null, lowerName: 'dan', posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }, { id: 2, ownerId: 1, content: 'Post1.2', createdAt: usersWithPosts[0]?.posts[1]?.createdAt, }, { id: 3, ownerId: 1, content: 'Post1.3', createdAt: usersWithPosts[0]?.posts[2]?.createdAt }], }); expect(usersWithPosts[1]).toEqual({ id: 2, name: 'Andrew', lowerName: 'andrew', verified: 0, invitedBy: null, posts: [{ id: 4, ownerId: 2, content: 'Post2', createdAt: usersWithPosts[1]?.posts[0]?.createdAt }, { id: 5, ownerId: 2, content: 'Post2.1', createdAt: usersWithPosts[1]?.posts[1]?.createdAt, }], }); expect(usersWithPosts[2]).toEqual({ id: 3, name: 'Alex', lowerName: 'alex', verified: 0, invitedBy: null, posts: [{ id: 6, ownerId: 3, content: 'Post3', createdAt: usersWithPosts[2]?.posts[0]?.createdAt }, { id: 7, ownerId: 3, content: 'Post3.1', createdAt: usersWithPosts[2]?.posts[1]?.createdAt, }], }); }); test('[Find Many] Get users with posts + custom fields + limits', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]).run(); const usersWithPosts = await db.query.usersTable.findMany({ limit: 1, with: { posts: { limit: 1, }, }, extras: (usersTable, { sql }) => ({ lowerName: sql`lower(${usersTable.name})`.as('name_lower'), }), }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; verified: number; invitedBy: number | null; lowerName: string; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[]>(); expect(usersWithPosts.length).toEqual(1); expect(usersWithPosts[0]?.posts.length).toEqual(1); expect(usersWithPosts[0]).toEqual({ id: 1, name: 'Dan', lowerName: 'dan', verified: 0, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], }); }); // TODO check order test.skip('[Find Many] Get users with posts + orderBy', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); await db.insert(postsTable).values([ { ownerId: 1, content: '1' }, { ownerId: 1, content: '2' }, { ownerId: 1, content: '3' }, { ownerId: 2, content: '4' }, { ownerId: 2, content: '5' }, { ownerId: 3, content: '6' }, { ownerId: 3, content: '7' }, ]).run(); const usersWithPosts = await db.query.usersTable.findMany({ with: { posts: { orderBy: (postsTable, { desc }) => [desc(postsTable.content)], }, }, orderBy: (usersTable, { desc }) => [desc(usersTable.id)], }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; verified: number; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[]>(); expect(usersWithPosts.length).eq(3); expect(usersWithPosts[0]?.posts.length).eq(2); expect(usersWithPosts[1]?.posts.length).eq(2); expect(usersWithPosts[2]?.posts.length).eq(3); expect(usersWithPosts[2]).toEqual({ id: 1, name: 'Dan', verified: 0, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: '1', createdAt: usersWithPosts[2]?.posts[2]?.createdAt }, { id: 2, ownerId: 1, content: '2', createdAt: usersWithPosts[2]?.posts[1]?.createdAt, }, { id: 3, ownerId: 1, content: '3', createdAt: usersWithPosts[2]?.posts[0]?.createdAt }], }); expect(usersWithPosts[1]).toEqual({ id: 2, name: 'Andrew', verified: 0, invitedBy: null, posts: [{ id: 5, ownerId: 2, content: '5', createdAt: usersWithPosts[1]?.posts[1]?.createdAt, }, { id: 4, ownerId: 2, content: '4', createdAt: usersWithPosts[1]?.posts[0]?.createdAt }], }); expect(usersWithPosts[0]).toEqual({ id: 3, name: 'Alex', verified: 0, invitedBy: null, posts: [{ id: 7, ownerId: 3, content: '7', createdAt: usersWithPosts[0]?.posts[1]?.createdAt, }, { id: 6, ownerId: 3, content: '6', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], }); }); test('[Find Many] Get users with posts + where', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]).run(); const usersWithPosts = await db.query.usersTable.findMany({ where: (({ id }, { eq }) => eq(id, 1)), with: { posts: { where: (({ id }, { eq }) => eq(id, 1)), }, }, }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; verified: number; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[]>(); expect(usersWithPosts.length).eq(1); expect(usersWithPosts[0]?.posts.length).eq(1); expect(usersWithPosts[0]).toEqual({ id: 1, name: 'Dan', verified: 0, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], }); }); test('[Find Many] Get users with posts + where + partial', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]).run(); const usersWithPosts = await db.query.usersTable.findMany({ columns: { id: true, name: true, }, with: { posts: { columns: { id: true, content: true, }, where: (({ id }, { eq }) => eq(id, 1)), }, }, where: (({ id }, { eq }) => eq(id, 1)), }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; posts: { id: number; content: string; }[]; }[]>(); expect(usersWithPosts.length).eq(1); expect(usersWithPosts[0]?.posts.length).eq(1); expect(usersWithPosts[0]).toEqual({ id: 1, name: 'Dan', posts: [{ id: 1, content: 'Post1' }], }); }); test('[Find Many] Get users with posts + where + partial. Did not select posts id, but used it in where', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]).run(); const usersWithPosts = await db.query.usersTable.findMany({ columns: { id: true, name: true, }, with: { posts: { columns: { id: true, content: true, }, where: (({ id }, { eq }) => eq(id, 1)), }, }, where: (({ id }, { eq }) => eq(id, 1)), }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; posts: { id: number; content: string; }[]; }[]>(); expect(usersWithPosts.length).eq(1); expect(usersWithPosts[0]?.posts.length).eq(1); expect(usersWithPosts[0]).toEqual({ id: 1, name: 'Dan', posts: [{ id: 1, content: 'Post1' }], }); }); test('[Find Many] Get users with posts + where + partial(true + false)', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]).run(); const usersWithPosts = await db.query.usersTable.findMany({ columns: { id: true, name: false, }, with: { posts: { columns: { id: true, content: false, }, where: (({ id }, { eq }) => eq(id, 1)), }, }, where: (({ id }, { eq }) => eq(id, 1)), }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; posts: { id: number; }[]; }[]>(); expect(usersWithPosts.length).eq(1); expect(usersWithPosts[0]?.posts.length).eq(1); expect(usersWithPosts[0]).toEqual({ id: 1, posts: [{ id: 1 }], }); }); test('[Find Many] Get users with posts + where + partial(false)', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]).run(); const usersWithPosts = await db.query.usersTable.findMany({ columns: { name: false, }, with: { posts: { columns: { content: false, }, where: (({ id }, { eq }) => eq(id, 1)), }, }, where: (({ id }, { eq }) => eq(id, 1)), }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; verified: number; invitedBy: number | null; posts: { id: number; ownerId: number | null; createdAt: Date; }[]; }[]>(); expect(usersWithPosts.length).eq(1); expect(usersWithPosts[0]?.posts.length).eq(1); expect(usersWithPosts[0]).toEqual({ id: 1, verified: 0, invitedBy: null, posts: [{ id: 1, ownerId: 1, createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], }); }); test('[Find Many] Get users with posts in transaction', async () => { let usersWithPosts: { id: number; name: string; verified: number; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[] = []; await db.transaction(async (tx) => { await tx.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); await tx.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]).run(); usersWithPosts = await tx.query.usersTable.findMany({ where: (({ id }, { eq }) => eq(id, 1)), with: { posts: { where: (({ id }, { eq }) => eq(id, 1)), }, }, }); }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; verified: number; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[]>(); expect(usersWithPosts.length).eq(1); expect(usersWithPosts[0]?.posts.length).eq(1); expect(usersWithPosts[0]).toEqual({ id: 1, name: 'Dan', verified: 0, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], }); }); test('[Find Many] Get users with posts in rollbacked transaction', async () => { let usersWithPosts: { id: number; name: string; verified: number; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[] = []; expect(db.transaction(async (tx) => { await tx.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); await tx.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]).run(); await tx.rollback(); usersWithPosts = await tx.query.usersTable.findMany({ where: (({ id }, { eq }) => eq(id, 1)), with: { posts: { where: (({ id }, { eq }) => eq(id, 1)), }, }, }); })).rejects.toThrowError(new TransactionRollbackError()); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; verified: number; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[]>(); expect(usersWithPosts.length).eq(0); }); // select only custom test('[Find Many] Get only custom fields', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); await db.insert(postsTable).values([ { id: 1, ownerId: 1, content: 'Post1' }, { id: 2, ownerId: 1, content: 'Post1.2' }, { id: 3, ownerId: 1, content: 'Post1.3' }, { id: 4, ownerId: 2, content: 'Post2' }, { id: 5, ownerId: 2, content: 'Post2.1' }, { id: 6, ownerId: 3, content: 'Post3' }, { id: 7, ownerId: 3, content: 'Post3.1' }, ]).run(); const usersWithPosts = await db.query.usersTable.findMany({ columns: {}, with: { posts: { columns: {}, extras: ({ content }) => ({ lowerName: sql`lower(${content})`.as('content_lower'), }), }, }, extras: ({ name }) => ({ lowerName: sql`lower(${name})`.as('name_lower'), }), }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ lowerName: string; posts: { lowerName: string; }[]; }[]>(); expect(usersWithPosts.length).toEqual(3); expect(usersWithPosts[0]?.posts.length).toEqual(3); expect(usersWithPosts[1]?.posts.length).toEqual(2); expect(usersWithPosts[2]?.posts.length).toEqual(2); expect(usersWithPosts[0]?.lowerName).toEqual('dan'); expect(usersWithPosts[1]?.lowerName).toEqual('andrew'); expect(usersWithPosts[2]?.lowerName).toEqual('alex'); expect(usersWithPosts[0]?.posts).toContainEqual({ lowerName: 'post1', }); expect(usersWithPosts[0]?.posts).toContainEqual({ lowerName: 'post1.2', }); expect(usersWithPosts[0]?.posts).toContainEqual({ lowerName: 'post1.3', }); expect(usersWithPosts[1]?.posts).toContainEqual({ lowerName: 'post2', }); expect(usersWithPosts[1]?.posts).toContainEqual({ lowerName: 'post2.1', }); expect(usersWithPosts[2]?.posts).toContainEqual({ lowerName: 'post3', }); expect(usersWithPosts[2]?.posts).toContainEqual({ lowerName: 'post3.1', }); }); test('[Find Many] Get only custom fields + where', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]).run(); const usersWithPosts = await db.query.usersTable.findMany({ columns: {}, with: { posts: { columns: {}, where: gte(postsTable.id, 2), extras: ({ content }) => ({ lowerName: sql`lower(${content})`.as('content_lower'), }), }, }, where: eq(usersTable.id, 1), extras: ({ name }) => ({ lowerName: sql`lower(${name})`.as('name_lower'), }), }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ lowerName: string; posts: { lowerName: string; }[]; }[]>(); expect(usersWithPosts.length).toEqual(1); expect(usersWithPosts[0]?.posts.length).toEqual(2); expect(usersWithPosts).toContainEqual({ lowerName: 'dan', posts: [{ lowerName: 'post1.2' }, { lowerName: 'post1.3' }], }); }); test('[Find Many] Get only custom fields + where + limit', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]).run(); const usersWithPosts = await db.query.usersTable.findMany({ columns: {}, with: { posts: { columns: {}, where: gte(postsTable.id, 2), limit: 1, extras: ({ content }) => ({ lowerName: sql`lower(${content})`.as('content_lower'), }), }, }, where: eq(usersTable.id, 1), extras: ({ name }) => ({ lowerName: sql`lower(${name})`.as('name_lower'), }), }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ lowerName: string; posts: { lowerName: string; }[]; }[]>(); expect(usersWithPosts.length).toEqual(1); expect(usersWithPosts[0]?.posts.length).toEqual(1); expect(usersWithPosts).toContainEqual({ lowerName: 'dan', posts: [{ lowerName: 'post1.2' }], }); }); test('[Find Many] Get only custom fields + where + orderBy', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]).run(); const usersWithPosts = await db.query.usersTable.findMany({ columns: {}, with: { posts: { columns: {}, where: gte(postsTable.id, 2), orderBy: [desc(postsTable.id)], extras: ({ content }) => ({ lowerName: sql`lower(${content})`.as('content_lower'), }), }, }, where: eq(usersTable.id, 1), extras: ({ name }) => ({ lowerName: sql`lower(${name})`.as('name_lower'), }), }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ lowerName: string; posts: { lowerName: string; }[]; }[]>(); expect(usersWithPosts.length).toEqual(1); expect(usersWithPosts[0]?.posts.length).toEqual(2); expect(usersWithPosts).toContainEqual({ lowerName: 'dan', posts: [{ lowerName: 'post1.3' }, { lowerName: 'post1.2' }], }); }); // select only custom find one test('[Find One] Get only custom fields', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]).run(); const usersWithPosts = await db.query.usersTable.findFirst({ columns: {}, with: { posts: { columns: {}, extras: ({ content }) => ({ lowerName: sql`lower(${content})`.as('content_lower'), }), }, }, extras: ({ name }) => ({ lowerName: sql`lower(${name})`.as('name_lower'), }), }); expectTypeOf(usersWithPosts).toEqualTypeOf< { lowerName: string; posts: { lowerName: string; }[]; } | undefined >(); expect(usersWithPosts?.posts.length).toEqual(3); expect(usersWithPosts?.lowerName).toEqual('dan'); expect(usersWithPosts?.posts).toContainEqual({ lowerName: 'post1', }); expect(usersWithPosts?.posts).toContainEqual({ lowerName: 'post1.2', }); expect(usersWithPosts?.posts).toContainEqual({ lowerName: 'post1.3', }); }); test('[Find One] Get only custom fields + where', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]).run(); const usersWithPosts = await db.query.usersTable.findFirst({ columns: {}, with: { posts: { columns: {}, where: gte(postsTable.id, 2), extras: ({ content }) => ({ lowerName: sql`lower(${content})`.as('content_lower'), }), }, }, where: eq(usersTable.id, 1), extras: ({ name }) => ({ lowerName: sql`lower(${name})`.as('name_lower'), }), }); expectTypeOf(usersWithPosts).toEqualTypeOf< { lowerName: string; posts: { lowerName: string; }[]; } | undefined >(); expect(usersWithPosts?.posts.length).toEqual(2); expect(usersWithPosts).toEqual({ lowerName: 'dan', posts: [{ lowerName: 'post1.2' }, { lowerName: 'post1.3' }], }); }); test('[Find One] Get only custom fields + where + limit', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]).run(); const usersWithPosts = await db.query.usersTable.findFirst({ columns: {}, with: { posts: { columns: {}, where: gte(postsTable.id, 2), limit: 1, extras: ({ content }) => ({ lowerName: sql`lower(${content})`.as('content_lower'), }), }, }, where: eq(usersTable.id, 1), extras: ({ name }) => ({ lowerName: sql`lower(${name})`.as('name_lower'), }), }); expectTypeOf(usersWithPosts).toEqualTypeOf< { lowerName: string; posts: { lowerName: string; }[]; } | undefined >(); expect(usersWithPosts?.posts.length).toEqual(1); expect(usersWithPosts).toEqual({ lowerName: 'dan', posts: [{ lowerName: 'post1.2' }], }); }); test('[Find One] Get only custom fields + where + orderBy', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]).run(); const usersWithPosts = await db.query.usersTable.findFirst({ columns: {}, with: { posts: { columns: {}, where: gte(postsTable.id, 2), orderBy: [desc(postsTable.id)], extras: ({ content }) => ({ lowerName: sql`lower(${content})`.as('content_lower'), }), }, }, where: eq(usersTable.id, 1), extras: ({ name }) => ({ lowerName: sql`lower(${name})`.as('name_lower'), }), }); expectTypeOf(usersWithPosts).toEqualTypeOf< { lowerName: string; posts: { lowerName: string; }[]; } | undefined >(); expect(usersWithPosts?.posts.length).toEqual(2); expect(usersWithPosts).toEqual({ lowerName: 'dan', posts: [{ lowerName: 'post1.3' }, { lowerName: 'post1.2' }], }); }); // columns {} test('[Find Many] Get select {}', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); await expect(async () => await db.query.usersTable.findMany({ columns: {}, }) ).rejects.toThrow(DrizzleError); }); // columns {} test('[Find One] Get select {}', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); await expect(async () => await db.query.usersTable.findFirst({ columns: {}, }) ).rejects.toThrow(DrizzleError); }); // deep select {} test('[Find Many] Get deep select {}', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]).run(); await expect(async () => await db.query.usersTable.findMany({ columns: {}, with: { posts: { columns: {}, }, }, }) ).rejects.toThrow(DrizzleError); }); // deep select {} test('[Find One] Get deep select {}', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]).run(); await expect(async () => await db.query.usersTable.findFirst({ columns: {}, with: { posts: { columns: {}, }, }, }) ).rejects.toThrow(DrizzleError); }); /* Prepared statements for users+posts */ test('[Find Many] Get users with posts + prepared limit', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]).run(); const prepared = db.query.usersTable.findMany({ with: { posts: { limit: placeholder('limit'), }, }, }).prepare(); const usersWithPosts = await prepared.all({ limit: 1 }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; verified: number; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[]>(); expect(usersWithPosts.length).eq(3); expect(usersWithPosts[0]?.posts.length).eq(1); expect(usersWithPosts[1]?.posts.length).eq(1); expect(usersWithPosts[2]?.posts.length).eq(1); expect(usersWithPosts).toContainEqual({ id: 1, name: 'Dan', verified: 0, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], }); expect(usersWithPosts).toContainEqual({ id: 2, name: 'Andrew', verified: 0, invitedBy: null, posts: [{ id: 4, ownerId: 2, content: 'Post2', createdAt: usersWithPosts[1]?.posts[0]?.createdAt }], }); expect(usersWithPosts).toContainEqual({ id: 3, name: 'Alex', verified: 0, invitedBy: null, posts: [{ id: 6, ownerId: 3, content: 'Post3', createdAt: usersWithPosts[2]?.posts[0]?.createdAt }], }); }); test('[Find Many] Get users with posts + prepared limit + offset', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]).run(); const prepared = db.query.usersTable.findMany({ limit: placeholder('uLimit'), offset: placeholder('uOffset'), with: { posts: { limit: placeholder('pLimit'), }, }, }).prepare(); const usersWithPosts = await prepared.all({ pLimit: 1, uLimit: 3, uOffset: 1 }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; verified: number; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[]>(); expect(usersWithPosts.length).eq(2); expect(usersWithPosts[0]?.posts.length).eq(1); expect(usersWithPosts[1]?.posts.length).eq(1); expect(usersWithPosts).toContainEqual({ id: 2, name: 'Andrew', verified: 0, invitedBy: null, posts: [{ id: 4, ownerId: 2, content: 'Post2', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], }); expect(usersWithPosts).toContainEqual({ id: 3, name: 'Alex', verified: 0, invitedBy: null, posts: [{ id: 6, ownerId: 3, content: 'Post3', createdAt: usersWithPosts[1]?.posts[0]?.createdAt }], }); }); test('[Find Many] Get users with posts + prepared where', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]).run(); const prepared = db.query.usersTable.findMany({ where: (({ id }, { eq }) => eq(id, placeholder('id'))), with: { posts: { where: (({ id }, { eq }) => eq(id, 1)), }, }, }).prepare(); const usersWithPosts = await prepared.all({ id: 1 }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; verified: number; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[]>(); expect(usersWithPosts.length).eq(1); expect(usersWithPosts[0]?.posts.length).eq(1); expect(usersWithPosts[0]).toEqual({ id: 1, name: 'Dan', verified: 0, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], }); }); test('[Find Many] Get users with posts + prepared + limit + offset + where', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]).run(); const prepared = db.query.usersTable.findMany({ limit: placeholder('uLimit'), offset: placeholder('uOffset'), where: (({ id }, { eq, or }) => or(eq(id, placeholder('id')), eq(id, 3))), with: { posts: { where: (({ id }, { eq }) => eq(id, placeholder('pid'))), limit: placeholder('pLimit'), }, }, }).prepare(); const usersWithPosts = await prepared.all({ pLimit: 1, uLimit: 3, uOffset: 1, id: 2, pid: 6 }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; verified: number; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[]>(); expect(usersWithPosts.length).eq(1); expect(usersWithPosts[0]?.posts.length).eq(1); expect(usersWithPosts).toContainEqual({ id: 3, name: 'Alex', verified: 0, invitedBy: null, posts: [{ id: 6, ownerId: 3, content: 'Post3', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], }); }); /* [Find One] One relation users+posts */ test('[Find One] Get users with posts', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]).run(); const usersWithPosts = await db.query.usersTable.findFirst({ with: { posts: true, }, }); expectTypeOf(usersWithPosts).toEqualTypeOf< { id: number; name: string; verified: number; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; } | undefined >(); expect(usersWithPosts!.posts.length).eq(1); expect(usersWithPosts).toEqual({ id: 1, name: 'Dan', verified: 0, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts?.posts[0]?.createdAt }], }); }); test('[Find One] Get users with posts + limit posts', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]).run(); const usersWithPosts = await db.query.usersTable.findFirst({ with: { posts: { limit: 1, }, }, }); expectTypeOf(usersWithPosts).toEqualTypeOf< { id: number; name: string; verified: number; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; } | undefined >(); expect(usersWithPosts!.posts.length).eq(1); expect(usersWithPosts).toEqual({ id: 1, name: 'Dan', verified: 0, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts?.posts[0]?.createdAt }], }); }); test('[Find One] Get users with posts no results found', async () => { const usersWithPosts = await db.query.usersTable.findFirst({ with: { posts: { limit: 1, }, }, }); expectTypeOf(usersWithPosts).toEqualTypeOf< { id: number; name: string; verified: number; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; } | undefined >(); expect(usersWithPosts).toBeUndefined(); }); test('[Find One] Get users with posts + limit posts and users', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]).run(); const usersWithPosts = await db.query.usersTable.findFirst({ with: { posts: { limit: 1, }, }, }); expectTypeOf(usersWithPosts).toEqualTypeOf< { id: number; name: string; verified: number; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; } | undefined >(); expect(usersWithPosts!.posts.length).eq(1); expect(usersWithPosts).toEqual({ id: 1, name: 'Dan', verified: 0, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts?.posts[0]?.createdAt }], }); }); test('[Find One] Get users with posts + custom fields', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]).run(); const usersWithPosts = await db.query.usersTable.findFirst({ with: { posts: true, }, extras: ({ name }) => ({ lowerName: sql`lower(${name})`.as('name_lower'), }), }); expectTypeOf(usersWithPosts).toEqualTypeOf< { id: number; name: string; verified: number; invitedBy: number | null; lowerName: string; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; } | undefined >(); expect(usersWithPosts!.posts.length).toEqual(3); expect(usersWithPosts?.lowerName).toEqual('dan'); expect(usersWithPosts?.id).toEqual(1); expect(usersWithPosts?.verified).toEqual(0); expect(usersWithPosts?.invitedBy).toEqual(null); expect(usersWithPosts?.name).toEqual('Dan'); expect(usersWithPosts?.posts).toContainEqual({ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts?.posts[0]?.createdAt, }); expect(usersWithPosts?.posts).toContainEqual({ id: 2, ownerId: 1, content: 'Post1.2', createdAt: usersWithPosts?.posts[1]?.createdAt, }); expect(usersWithPosts?.posts).toContainEqual({ id: 3, ownerId: 1, content: 'Post1.3', createdAt: usersWithPosts?.posts[2]?.createdAt, }); }); test('[Find One] Get users with posts + custom fields + limits', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]).run(); const usersWithPosts = await db.query.usersTable.findFirst({ with: { posts: { limit: 1, }, }, extras: (usersTable, { sql }) => ({ lowerName: sql`lower(${usersTable.name})`.as('name_lower'), }), }); expectTypeOf(usersWithPosts).toEqualTypeOf< { id: number; name: string; verified: number; invitedBy: number | null; lowerName: string; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; } | undefined >(); expect(usersWithPosts!.posts.length).toEqual(1); expect(usersWithPosts).toEqual({ id: 1, name: 'Dan', lowerName: 'dan', verified: 0, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts?.posts[0]?.createdAt }], }); }); // TODO. Check order test.skip('[Find One] Get users with posts + orderBy', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); await db.insert(postsTable).values([ { ownerId: 1, content: '1' }, { ownerId: 1, content: '2' }, { ownerId: 1, content: '3' }, { ownerId: 2, content: '4' }, { ownerId: 2, content: '5' }, { ownerId: 3, content: '6' }, { ownerId: 3, content: '7' }, ]).run(); const usersWithPosts = await db.query.usersTable.findFirst({ with: { posts: { orderBy: (postsTable, { desc }) => [desc(postsTable.id)], }, }, orderBy: (usersTable, { desc }) => [desc(usersTable.id)], }); expectTypeOf(usersWithPosts).toEqualTypeOf< { id: number; name: string; verified: number; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; } | undefined >(); expect(usersWithPosts!.posts.length).eq(2); expect(usersWithPosts).toEqual({ id: 3, name: 'Alex', verified: 0, invitedBy: null, posts: [{ id: 7, ownerId: 3, content: '7', createdAt: usersWithPosts?.posts[1]?.createdAt, }, { id: 6, ownerId: 3, content: '6', createdAt: usersWithPosts?.posts[0]?.createdAt }], }); }); test('[Find One] Get users with posts + where', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]).run(); const usersWithPosts = await db.query.usersTable.findFirst({ where: (({ id }, { eq }) => eq(id, 1)), with: { posts: { where: (({ id }, { eq }) => eq(id, 1)), }, }, }); expectTypeOf(usersWithPosts).toEqualTypeOf< { id: number; name: string; verified: number; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; } | undefined >(); expect(usersWithPosts!.posts.length).eq(1); expect(usersWithPosts).toEqual({ id: 1, name: 'Dan', verified: 0, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts?.posts[0]?.createdAt }], }); }); test('[Find One] Get users with posts + where + partial', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]).run(); const usersWithPosts = await db.query.usersTable.findFirst({ columns: { id: true, name: true, }, with: { posts: { columns: { id: true, content: true, }, where: (({ id }, { eq }) => eq(id, 1)), }, }, where: (({ id }, { eq }) => eq(id, 1)), }); expectTypeOf(usersWithPosts).toEqualTypeOf< { id: number; name: string; posts: { id: number; content: string; }[]; } | undefined >(); expect(usersWithPosts!.posts.length).eq(1); expect(usersWithPosts).toEqual({ id: 1, name: 'Dan', posts: [{ id: 1, content: 'Post1' }], }); }); test('[Find One] Get users with posts + where + partial. Did not select posts id, but used it in where', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]).run(); const usersWithPosts = await db.query.usersTable.findFirst({ columns: { id: true, name: true, }, with: { posts: { columns: { id: true, content: true, }, where: (({ id }, { eq }) => eq(id, 1)), }, }, where: (({ id }, { eq }) => eq(id, 1)), }); expectTypeOf(usersWithPosts).toEqualTypeOf< { id: number; name: string; posts: { id: number; content: string; }[]; } | undefined >(); expect(usersWithPosts!.posts.length).eq(1); expect(usersWithPosts).toEqual({ id: 1, name: 'Dan', posts: [{ id: 1, content: 'Post1' }], }); }); test('[Find One] Get users with posts + where + partial(true + false)', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]).run(); const usersWithPosts = await db.query.usersTable.findFirst({ columns: { id: true, name: false, }, with: { posts: { columns: { id: true, content: false, }, where: (({ id }, { eq }) => eq(id, 1)), }, }, where: (({ id }, { eq }) => eq(id, 1)), }); expectTypeOf(usersWithPosts).toEqualTypeOf< { id: number; posts: { id: number; }[]; } | undefined >(); expect(usersWithPosts!.posts.length).eq(1); expect(usersWithPosts).toEqual({ id: 1, posts: [{ id: 1 }], }); }); test('[Find One] Get users with posts + where + partial(false)', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]).run(); const usersWithPosts = await db.query.usersTable.findFirst({ columns: { name: false, }, with: { posts: { columns: { content: false, }, where: (({ id }, { eq }) => eq(id, 1)), }, }, where: (({ id }, { eq }) => eq(id, 1)), }); expectTypeOf(usersWithPosts).toEqualTypeOf< { id: number; verified: number; invitedBy: number | null; posts: { id: number; ownerId: number | null; createdAt: Date; }[]; } | undefined >(); expect(usersWithPosts!.posts.length).eq(1); expect(usersWithPosts).toEqual({ id: 1, verified: 0, invitedBy: null, posts: [{ id: 1, ownerId: 1, createdAt: usersWithPosts?.posts[0]?.createdAt }], }); }); /* One relation users+users. Self referencing */ test('Get user with invitee', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]).run(); const usersWithInvitee = await db.query.usersTable.findMany({ with: { invitee: true, }, }); expectTypeOf(usersWithInvitee).toEqualTypeOf< { id: number; name: string; verified: number; invitedBy: number | null; invitee: { id: number; name: string; verified: number; invitedBy: number | null; } | null; }[] >(); usersWithInvitee.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(usersWithInvitee.length).eq(4); expect(usersWithInvitee[0]?.invitee).toBeNull(); expect(usersWithInvitee[1]?.invitee).toBeNull(); expect(usersWithInvitee[2]?.invitee).not.toBeNull(); expect(usersWithInvitee[3]?.invitee).not.toBeNull(); expect(usersWithInvitee[0]).toEqual({ id: 1, name: 'Dan', verified: 0, invitedBy: null, invitee: null, }); expect(usersWithInvitee[1]).toEqual({ id: 2, name: 'Andrew', verified: 0, invitedBy: null, invitee: null, }); expect(usersWithInvitee[2]).toEqual({ id: 3, name: 'Alex', verified: 0, invitedBy: 1, invitee: { id: 1, name: 'Dan', verified: 0, invitedBy: null }, }); expect(usersWithInvitee[3]).toEqual({ id: 4, name: 'John', verified: 0, invitedBy: 2, invitee: { id: 2, name: 'Andrew', verified: 0, invitedBy: null }, }); }); test('Get user + limit with invitee', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew', invitedBy: 1 }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]).run(); const usersWithInvitee = await db.query.usersTable.findMany({ with: { invitee: true, }, limit: 2, }); expectTypeOf(usersWithInvitee).toEqualTypeOf< { id: number; name: string; verified: number; invitedBy: number | null; invitee: { id: number; name: string; verified: number; invitedBy: number | null; } | null; }[] >(); usersWithInvitee.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(usersWithInvitee.length).eq(2); expect(usersWithInvitee[0]?.invitee).toBeNull(); expect(usersWithInvitee[1]?.invitee).not.toBeNull(); expect(usersWithInvitee[0]).toEqual({ id: 1, name: 'Dan', verified: 0, invitedBy: null, invitee: null, }); expect(usersWithInvitee[1]).toEqual({ id: 2, name: 'Andrew', verified: 0, invitedBy: 1, invitee: { id: 1, name: 'Dan', verified: 0, invitedBy: null }, }); }); test('Get user with invitee and custom fields', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]).run(); const usersWithInvitee = await db.query.usersTable.findMany({ extras: (users, { sql }) => ({ lower: sql`lower(${users.name})`.as('lower_name') }), with: { invitee: { extras: (invitee, { sql }) => ({ lower: sql`lower(${invitee.name})`.as('lower_name') }), }, }, }); expectTypeOf(usersWithInvitee).toEqualTypeOf< { id: number; name: string; verified: number; lower: string; invitedBy: number | null; invitee: { id: number; name: string; verified: number; lower: string; invitedBy: number | null; } | null; }[] >(); usersWithInvitee.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(usersWithInvitee.length).eq(4); expect(usersWithInvitee[0]?.invitee).toBeNull(); expect(usersWithInvitee[1]?.invitee).toBeNull(); expect(usersWithInvitee[2]?.invitee).not.toBeNull(); expect(usersWithInvitee[3]?.invitee).not.toBeNull(); expect(usersWithInvitee[0]).toEqual({ id: 1, name: 'Dan', lower: 'dan', verified: 0, invitedBy: null, invitee: null, }); expect(usersWithInvitee[1]).toEqual({ id: 2, name: 'Andrew', lower: 'andrew', verified: 0, invitedBy: null, invitee: null, }); expect(usersWithInvitee[2]).toEqual({ id: 3, name: 'Alex', lower: 'alex', verified: 0, invitedBy: 1, invitee: { id: 1, name: 'Dan', lower: 'dan', verified: 0, invitedBy: null }, }); expect(usersWithInvitee[3]).toEqual({ id: 4, name: 'John', lower: 'john', verified: 0, invitedBy: 2, invitee: { id: 2, name: 'Andrew', lower: 'andrew', verified: 0, invitedBy: null }, }); }); test('Get user with invitee and custom fields + limits', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]).run(); const usersWithInvitee = await db.query.usersTable.findMany({ extras: (users, { sql }) => ({ lower: sql`lower(${users.name})`.as('lower_name') }), limit: 3, with: { invitee: { extras: (invitee, { sql }) => ({ lower: sql`lower(${invitee.name})`.as('lower_name') }), }, }, }); expectTypeOf(usersWithInvitee).toEqualTypeOf< { id: number; name: string; verified: number; lower: string; invitedBy: number | null; invitee: { id: number; name: string; verified: number; lower: string; invitedBy: number | null; } | null; }[] >(); usersWithInvitee.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(usersWithInvitee.length).eq(3); expect(usersWithInvitee[0]?.invitee).toBeNull(); expect(usersWithInvitee[1]?.invitee).toBeNull(); expect(usersWithInvitee[2]?.invitee).not.toBeNull(); expect(usersWithInvitee[0]).toEqual({ id: 1, name: 'Dan', lower: 'dan', verified: 0, invitedBy: null, invitee: null, }); expect(usersWithInvitee[1]).toEqual({ id: 2, name: 'Andrew', lower: 'andrew', verified: 0, invitedBy: null, invitee: null, }); expect(usersWithInvitee[2]).toEqual({ id: 3, name: 'Alex', lower: 'alex', verified: 0, invitedBy: 1, invitee: { id: 1, name: 'Dan', lower: 'dan', verified: 0, invitedBy: null }, }); }); test('Get user with invitee + order by', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]).run(); const usersWithInvitee = await db.query.usersTable.findMany({ orderBy: (users, { desc }) => [desc(users.id)], with: { invitee: true, }, }); expectTypeOf(usersWithInvitee).toEqualTypeOf< { id: number; name: string; verified: number; invitedBy: number | null; invitee: { id: number; name: string; verified: number; invitedBy: number | null; } | null; }[] >(); expect(usersWithInvitee.length).eq(4); expect(usersWithInvitee[3]?.invitee).toBeNull(); expect(usersWithInvitee[2]?.invitee).toBeNull(); expect(usersWithInvitee[1]?.invitee).not.toBeNull(); expect(usersWithInvitee[0]?.invitee).not.toBeNull(); expect(usersWithInvitee[3]).toEqual({ id: 1, name: 'Dan', verified: 0, invitedBy: null, invitee: null, }); expect(usersWithInvitee[2]).toEqual({ id: 2, name: 'Andrew', verified: 0, invitedBy: null, invitee: null, }); expect(usersWithInvitee[1]).toEqual({ id: 3, name: 'Alex', verified: 0, invitedBy: 1, invitee: { id: 1, name: 'Dan', verified: 0, invitedBy: null }, }); expect(usersWithInvitee[0]).toEqual({ id: 4, name: 'John', verified: 0, invitedBy: 2, invitee: { id: 2, name: 'Andrew', verified: 0, invitedBy: null }, }); }); test('Get user with invitee + where', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]).run(); const usersWithInvitee = await db.query.usersTable.findMany({ where: (users, { eq, or }) => (or(eq(users.id, 3), eq(users.id, 4))), with: { invitee: true, }, }); expectTypeOf(usersWithInvitee).toEqualTypeOf< { id: number; name: string; verified: number; invitedBy: number | null; invitee: { id: number; name: string; verified: number; invitedBy: number | null; } | null; }[] >(); expect(usersWithInvitee.length).eq(2); expect(usersWithInvitee[0]?.invitee).not.toBeNull(); expect(usersWithInvitee[1]?.invitee).not.toBeNull(); expect(usersWithInvitee).toContainEqual({ id: 3, name: 'Alex', verified: 0, invitedBy: 1, invitee: { id: 1, name: 'Dan', verified: 0, invitedBy: null }, }); expect(usersWithInvitee).toContainEqual({ id: 4, name: 'John', verified: 0, invitedBy: 2, invitee: { id: 2, name: 'Andrew', verified: 0, invitedBy: null }, }); }); test('Get user with invitee + where + partial', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]).run(); const usersWithInvitee = await db.query.usersTable.findMany({ where: (users, { eq, or }) => (or(eq(users.id, 3), eq(users.id, 4))), columns: { id: true, name: true, }, with: { invitee: { columns: { id: true, name: true, }, }, }, }); expectTypeOf(usersWithInvitee).toEqualTypeOf< { id: number; name: string; invitee: { id: number; name: string; } | null; }[] >(); expect(usersWithInvitee.length).eq(2); expect(usersWithInvitee[0]?.invitee).not.toBeNull(); expect(usersWithInvitee[1]?.invitee).not.toBeNull(); expect(usersWithInvitee).toContainEqual({ id: 3, name: 'Alex', invitee: { id: 1, name: 'Dan' }, }); expect(usersWithInvitee).toContainEqual({ id: 4, name: 'John', invitee: { id: 2, name: 'Andrew' }, }); }); test('Get user with invitee + where + partial. Did not select users id, but used it in where', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]).run(); const usersWithInvitee = await db.query.usersTable.findMany({ where: (users, { eq, or }) => (or(eq(users.id, 3), eq(users.id, 4))), columns: { name: true, }, with: { invitee: { columns: { id: true, name: true, }, }, }, }); expectTypeOf(usersWithInvitee).toEqualTypeOf< { name: string; invitee: { id: number; name: string; } | null; }[] >(); expect(usersWithInvitee.length).eq(2); expect(usersWithInvitee[0]?.invitee).not.toBeNull(); expect(usersWithInvitee[1]?.invitee).not.toBeNull(); expect(usersWithInvitee).toContainEqual({ name: 'Alex', invitee: { id: 1, name: 'Dan' }, }); expect(usersWithInvitee).toContainEqual({ name: 'John', invitee: { id: 2, name: 'Andrew' }, }); }); test('Get user with invitee + where + partial(true+false)', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]).run(); const usersWithInvitee = await db.query.usersTable.findMany({ where: (users, { eq, or }) => (or(eq(users.id, 3), eq(users.id, 4))), columns: { id: true, name: true, verified: false, }, with: { invitee: { columns: { id: true, name: true, verified: false, }, }, }, }); expectTypeOf(usersWithInvitee).toEqualTypeOf< { id: number; name: string; invitee: { id: number; name: string; } | null; }[] >(); expect(usersWithInvitee.length).eq(2); expect(usersWithInvitee[0]?.invitee).not.toBeNull(); expect(usersWithInvitee[1]?.invitee).not.toBeNull(); expect(usersWithInvitee).toContainEqual({ id: 3, name: 'Alex', invitee: { id: 1, name: 'Dan' }, }); expect(usersWithInvitee).toContainEqual({ id: 4, name: 'John', invitee: { id: 2, name: 'Andrew' }, }); }); test('Get user with invitee + where + partial(false)', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]).run(); const usersWithInvitee = await db.query.usersTable.findMany({ where: (users, { eq, or }) => (or(eq(users.id, 3), eq(users.id, 4))), columns: { verified: false, }, with: { invitee: { columns: { name: false, }, }, }, }); expectTypeOf(usersWithInvitee).toEqualTypeOf< { id: number; name: string; invitedBy: number | null; invitee: { id: number; verified: number; invitedBy: number | null; } | null; }[] >(); expect(usersWithInvitee.length).eq(2); expect(usersWithInvitee[0]?.invitee).not.toBeNull(); expect(usersWithInvitee[1]?.invitee).not.toBeNull(); expect(usersWithInvitee).toContainEqual({ id: 3, name: 'Alex', invitedBy: 1, invitee: { id: 1, verified: 0, invitedBy: null }, }); expect(usersWithInvitee).toContainEqual({ id: 4, name: 'John', invitedBy: 2, invitee: { id: 2, verified: 0, invitedBy: null }, }); }); /* Two first-level relations users+users and users+posts */ test('Get user with invitee and posts', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]).run(); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]).run(); const response = await db.query.usersTable.findMany({ with: { invitee: true, posts: true, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: number; invitedBy: number | null; posts: { id: number; ownerId: number | null; content: string; createdAt: Date }[]; invitee: { id: number; name: string; verified: number; invitedBy: number | null; } | null; }[] >(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).eq(4); expect(response[0]?.invitee).toBeNull(); expect(response[1]?.invitee).toBeNull(); expect(response[2]?.invitee).not.toBeNull(); expect(response[3]?.invitee).not.toBeNull(); expect(response[0]?.posts.length).eq(1); expect(response[1]?.posts.length).eq(1); expect(response[2]?.posts.length).eq(1); expect(response).toContainEqual({ id: 1, name: 'Dan', verified: 0, invitedBy: null, invitee: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: response[0]?.posts[0]?.createdAt }], }); expect(response).toContainEqual({ id: 2, name: 'Andrew', verified: 0, invitedBy: null, invitee: null, posts: [{ id: 2, ownerId: 2, content: 'Post2', createdAt: response[1]?.posts[0]?.createdAt }], }); expect(response).toContainEqual({ id: 3, name: 'Alex', verified: 0, invitedBy: 1, invitee: { id: 1, name: 'Dan', verified: 0, invitedBy: null }, posts: [{ id: 3, ownerId: 3, content: 'Post3', createdAt: response[2]?.posts[0]?.createdAt }], }); expect(response).toContainEqual({ id: 4, name: 'John', verified: 0, invitedBy: 2, invitee: { id: 2, name: 'Andrew', verified: 0, invitedBy: null }, posts: [], }); }); test('Get user with invitee and posts + limit posts and users', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]).run(); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]).run(); const response = await db.query.usersTable.findMany({ limit: 3, with: { invitee: true, posts: { limit: 1, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: number; invitedBy: number | null; posts: { id: number; ownerId: number | null; content: string; createdAt: Date }[]; invitee: { id: number; name: string; verified: number; invitedBy: number | null; } | null; }[] >(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).eq(3); expect(response[0]?.invitee).toBeNull(); expect(response[1]?.invitee).toBeNull(); expect(response[2]?.invitee).not.toBeNull(); expect(response[0]?.posts.length).eq(1); expect(response[1]?.posts.length).eq(1); expect(response[2]?.posts.length).eq(1); expect(response).toContainEqual({ id: 1, name: 'Dan', verified: 0, invitedBy: null, invitee: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: response[0]?.posts[0]?.createdAt }], }); expect(response).toContainEqual({ id: 2, name: 'Andrew', verified: 0, invitedBy: null, invitee: null, posts: [{ id: 3, ownerId: 2, content: 'Post2', createdAt: response[1]?.posts[0]?.createdAt }], }); expect(response).toContainEqual({ id: 3, name: 'Alex', verified: 0, invitedBy: 1, invitee: { id: 1, name: 'Dan', verified: 0, invitedBy: null }, posts: [{ id: 5, ownerId: 3, content: 'Post3', createdAt: response[2]?.posts[0]?.createdAt }], }); }); test('Get user with invitee and posts + limits + custom fields in each', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]).run(); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]).run(); const response = await db.query.usersTable.findMany({ limit: 3, extras: (users, { sql }) => ({ lower: sql`lower(${users.name})`.as('lower_name') }), with: { invitee: { extras: (users, { sql }) => ({ lower: sql`lower(${users.name})`.as('lower_invitee_name') }), }, posts: { limit: 1, extras: (posts, { sql }) => ({ lower: sql`lower(${posts.content})`.as('lower_content') }), }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: number; lower: string; invitedBy: number | null; posts: { id: number; lower: string; ownerId: number | null; content: string; createdAt: Date }[]; invitee: { id: number; name: string; lower: string; verified: number; invitedBy: number | null; } | null; }[] >(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).eq(3); expect(response[0]?.invitee).toBeNull(); expect(response[1]?.invitee).toBeNull(); expect(response[2]?.invitee).not.toBeNull(); expect(response[0]?.posts.length).eq(1); expect(response[1]?.posts.length).eq(1); expect(response[2]?.posts.length).eq(1); expect(response).toContainEqual({ id: 1, name: 'Dan', lower: 'dan', verified: 0, invitedBy: null, invitee: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', lower: 'post1', createdAt: response[0]?.posts[0]?.createdAt }], }); expect(response).toContainEqual({ id: 2, name: 'Andrew', lower: 'andrew', verified: 0, invitedBy: null, invitee: null, posts: [{ id: 3, ownerId: 2, content: 'Post2', lower: 'post2', createdAt: response[1]?.posts[0]?.createdAt }], }); expect(response).toContainEqual({ id: 3, name: 'Alex', lower: 'alex', verified: 0, invitedBy: 1, invitee: { id: 1, name: 'Dan', lower: 'dan', verified: 0, invitedBy: null }, posts: [{ id: 5, ownerId: 3, content: 'Post3', lower: 'post3', createdAt: response[2]?.posts[0]?.createdAt }], }); }); test('Get user with invitee and posts + custom fields in each', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]).run(); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]).run(); const response = await db.query.usersTable.findMany({ extras: (users, { sql }) => ({ lower: sql`lower(${users.name})`.as('lower_name') }), with: { invitee: { extras: (users, { sql }) => ({ lower: sql`lower(${users.name})`.as('lower_name') }), }, posts: { extras: (posts, { sql }) => ({ lower: sql`lower(${posts.content})`.as('lower_name') }), }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: number; lower: string; invitedBy: number | null; posts: { id: number; lower: string; ownerId: number | null; content: string; createdAt: Date }[]; invitee: { id: number; name: string; lower: string; verified: number; invitedBy: number | null; } | null; }[] >(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); response[0]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); response[1]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); response[2]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).eq(4); expect(response[0]?.invitee).toBeNull(); expect(response[1]?.invitee).toBeNull(); expect(response[2]?.invitee).not.toBeNull(); expect(response[3]?.invitee).not.toBeNull(); expect(response[0]?.posts.length).eq(2); expect(response[1]?.posts.length).eq(2); expect(response[2]?.posts.length).eq(2); expect(response[3]?.posts.length).eq(0); expect(response).toContainEqual({ id: 1, name: 'Dan', lower: 'dan', verified: 0, invitedBy: null, invitee: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', lower: 'post1', createdAt: response[0]?.posts[0]?.createdAt }, { id: 2, ownerId: 1, content: 'Post1.1', lower: 'post1.1', createdAt: response[0]?.posts[1]?.createdAt, }], }); expect(response).toContainEqual({ id: 2, name: 'Andrew', lower: 'andrew', verified: 0, invitedBy: null, invitee: null, posts: [{ id: 3, ownerId: 2, content: 'Post2', lower: 'post2', createdAt: response[1]?.posts[0]?.createdAt }, { id: 4, ownerId: 2, content: 'Post2.1', lower: 'post2.1', createdAt: response[1]?.posts[1]?.createdAt, }], }); expect(response).toContainEqual({ id: 3, name: 'Alex', lower: 'alex', verified: 0, invitedBy: 1, invitee: { id: 1, name: 'Dan', lower: 'dan', verified: 0, invitedBy: null }, posts: [{ id: 5, ownerId: 3, content: 'Post3', lower: 'post3', createdAt: response[2]?.posts[0]?.createdAt }, { id: 6, ownerId: 3, content: 'Post3.1', lower: 'post3.1', createdAt: response[2]?.posts[1]?.createdAt, }], }); expect(response).toContainEqual({ id: 4, name: 'John', lower: 'john', verified: 0, invitedBy: 2, invitee: { id: 2, name: 'Andrew', lower: 'andrew', verified: 0, invitedBy: null }, posts: [], }); }); // TODO Check order test.skip('Get user with invitee and posts + orderBy', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]).run(); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, ]).run(); const response = await db.query.usersTable.findMany({ orderBy: (users, { desc }) => [desc(users.id)], with: { invitee: true, posts: { orderBy: (posts, { desc }) => [desc(posts.id)], }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: number; invitedBy: number | null; posts: { id: number; ownerId: number | null; content: string; createdAt: Date }[]; invitee: { id: number; name: string; verified: number; invitedBy: number | null; } | null; }[] >(); expect(response.length).eq(4); expect(response[3]?.invitee).toBeNull(); expect(response[2]?.invitee).toBeNull(); expect(response[1]?.invitee).not.toBeNull(); expect(response[0]?.invitee).not.toBeNull(); expect(response[0]?.posts.length).eq(0); expect(response[1]?.posts.length).eq(1); expect(response[2]?.posts.length).eq(2); expect(response[3]?.posts.length).eq(2); expect(response[3]).toEqual({ id: 1, name: 'Dan', verified: 0, invitedBy: null, invitee: null, posts: [{ id: 2, ownerId: 1, content: 'Post1.1', createdAt: response[3]?.posts[0]?.createdAt }, { id: 1, ownerId: 1, content: 'Post1', createdAt: response[3]?.posts[1]?.createdAt, }], }); expect(response[2]).toEqual({ id: 2, name: 'Andrew', verified: 0, invitedBy: null, invitee: null, posts: [{ id: 4, ownerId: 2, content: 'Post2.1', createdAt: response[2]?.posts[0]?.createdAt }, { id: 3, ownerId: 2, content: 'Post2', createdAt: response[2]?.posts[1]?.createdAt, }], }); expect(response[1]).toEqual({ id: 3, name: 'Alex', verified: 0, invitedBy: 1, invitee: { id: 1, name: 'Dan', verified: 0, invitedBy: null }, posts: [{ id: 5, ownerId: 3, content: 'Post3', createdAt: response[3]?.posts[1]?.createdAt, }], }); expect(response[0]).toEqual({ id: 4, name: 'John', verified: 0, invitedBy: 2, invitee: { id: 2, name: 'Andrew', verified: 0, invitedBy: null }, posts: [], }); }); test('Get user with invitee and posts + where', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]).run(); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]).run(); const response = await db.query.usersTable.findMany({ where: (users, { eq, or }) => (or(eq(users.id, 2), eq(users.id, 3))), with: { invitee: true, posts: { where: (posts, { eq }) => (eq(posts.ownerId, 2)), }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: number; invitedBy: number | null; posts: { id: number; ownerId: number | null; content: string; createdAt: Date }[]; invitee: { id: number; name: string; verified: number; invitedBy: number | null; } | null; }[] >(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).eq(2); expect(response[0]?.invitee).toBeNull(); expect(response[1]?.invitee).not.toBeNull(); expect(response[0]?.posts.length).eq(1); expect(response[1]?.posts.length).eq(0); expect(response).toContainEqual({ id: 2, name: 'Andrew', verified: 0, invitedBy: null, invitee: null, posts: [{ id: 2, ownerId: 2, content: 'Post2', createdAt: response[0]?.posts[0]?.createdAt }], }); expect(response).toContainEqual({ id: 3, name: 'Alex', verified: 0, invitedBy: 1, invitee: { id: 1, name: 'Dan', verified: 0, invitedBy: null }, posts: [], }); }); test('Get user with invitee and posts + limit posts and users + where', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]).run(); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]).run(); const response = await db.query.usersTable.findMany({ where: (users, { eq, or }) => (or(eq(users.id, 3), eq(users.id, 4))), limit: 1, with: { invitee: true, posts: { where: (posts, { eq }) => (eq(posts.ownerId, 3)), limit: 1, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: number; invitedBy: number | null; posts: { id: number; ownerId: number | null; content: string; createdAt: Date }[]; invitee: { id: number; name: string; verified: number; invitedBy: number | null; } | null; }[] >(); expect(response.length).eq(1); expect(response[0]?.invitee).not.toBeNull(); expect(response[0]?.posts.length).eq(1); expect(response).toContainEqual({ id: 3, name: 'Alex', verified: 0, invitedBy: 1, invitee: { id: 1, name: 'Dan', verified: 0, invitedBy: null }, posts: [{ id: 5, ownerId: 3, content: 'Post3', createdAt: response[0]?.posts[0]?.createdAt }], }); }); test('Get user with invitee and posts + orderBy + where + custom', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]).run(); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, ]).run(); const response = await db.query.usersTable.findMany({ orderBy: [desc(usersTable.id)], where: or(eq(usersTable.id, 3), eq(usersTable.id, 4)), extras: { lower: sql`lower(${usersTable.name})`.as('lower_name'), }, with: { invitee: true, posts: { where: eq(postsTable.ownerId, 3), orderBy: [desc(postsTable.id)], extras: { lower: sql`lower(${postsTable.content})`.as('lower_name'), }, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: number; invitedBy: number | null; lower: string; posts: { id: number; lower: string; ownerId: number | null; content: string; createdAt: Date }[]; invitee: { id: number; name: string; verified: number; invitedBy: number | null; } | null; }[] >(); expect(response.length).eq(2); expect(response[1]?.invitee).not.toBeNull(); expect(response[0]?.invitee).not.toBeNull(); expect(response[0]?.posts.length).eq(0); expect(response[1]?.posts.length).eq(1); expect(response[1]).toEqual({ id: 3, name: 'Alex', lower: 'alex', verified: 0, invitedBy: 1, invitee: { id: 1, name: 'Dan', verified: 0, invitedBy: null }, posts: [{ id: 5, ownerId: 3, content: 'Post3', lower: 'post3', createdAt: response[1]?.posts[0]?.createdAt, }], }); expect(response[0]).toEqual({ id: 4, name: 'John', lower: 'john', verified: 0, invitedBy: 2, invitee: { id: 2, name: 'Andrew', verified: 0, invitedBy: null }, posts: [], }); }); test('Get user with invitee and posts + orderBy + where + partial + custom', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]).run(); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, ]).run(); const response = await db.query.usersTable.findMany({ orderBy: [desc(usersTable.id)], where: or(eq(usersTable.id, 3), eq(usersTable.id, 4)), extras: { lower: sql`lower(${usersTable.name})`.as('lower_name'), }, columns: { id: true, name: true, }, with: { invitee: { columns: { id: true, name: true, }, extras: { lower: sql`lower(${usersTable.name})`.as('lower_name'), }, }, posts: { columns: { id: true, content: true, }, where: eq(postsTable.ownerId, 3), orderBy: [desc(postsTable.id)], extras: { lower: sql`lower(${postsTable.content})`.as('lower_name'), }, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; lower: string; posts: { id: number; lower: string; content: string }[]; invitee: { id: number; name: string; lower: string; } | null; }[] >(); expect(response.length).eq(2); expect(response[1]?.invitee).not.toBeNull(); expect(response[0]?.invitee).not.toBeNull(); expect(response[0]?.posts.length).eq(0); expect(response[1]?.posts.length).eq(1); expect(response[1]).toEqual({ id: 3, name: 'Alex', lower: 'alex', invitee: { id: 1, name: 'Dan', lower: 'dan' }, posts: [{ id: 5, content: 'Post3', lower: 'post3', }], }); expect(response[0]).toEqual({ id: 4, name: 'John', lower: 'john', invitee: { id: 2, name: 'Andrew', lower: 'andrew' }, posts: [], }); }); /* One two-level relation users+posts+comments */ test('Get user with posts and posts with comments', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); await db.insert(postsTable).values([ { id: 1, ownerId: 1, content: 'Post1' }, { id: 2, ownerId: 2, content: 'Post2' }, { id: 3, ownerId: 3, content: 'Post3' }, ]).run(); await db.insert(commentsTable).values([ { postId: 1, content: 'Comment1', creator: 2 }, { postId: 2, content: 'Comment2', creator: 2 }, { postId: 3, content: 'Comment3', creator: 3 }, ]).run(); const response = await db.query.usersTable.findMany({ with: { posts: { with: { comments: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: number; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; comments: { id: number; content: string; createdAt: Date; creator: number | null; postId: number | null; }[]; }[]; }[] >(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).eq(3); expect(response[0]?.posts.length).eq(1); expect(response[1]?.posts.length).eq(1); expect(response[2]?.posts.length).eq(1); expect(response[0]?.posts[0]?.comments.length).eq(1); expect(response[1]?.posts[0]?.comments.length).eq(1); expect(response[2]?.posts[0]?.comments.length).eq(1); expect(response[0]).toEqual({ id: 1, name: 'Dan', verified: 0, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: response[0]?.posts[0]?.createdAt, comments: [ { id: 1, content: 'Comment1', creator: 2, postId: 1, createdAt: response[0]?.posts[0]?.comments[0]?.createdAt, }, ], }], }); expect(response[1]).toEqual({ id: 2, name: 'Andrew', verified: 0, invitedBy: null, posts: [{ id: 2, ownerId: 2, content: 'Post2', createdAt: response[1]?.posts[0]?.createdAt, comments: [ { id: 2, content: 'Comment2', creator: 2, postId: 2, createdAt: response[1]?.posts[0]?.comments[0]?.createdAt, }, ], }], }); // expect(response[2]).toEqual({ // id: 3, // name: 'Alex', // verified: 0, // invitedBy: null, // posts: [{ // id: 3, // ownerId: 3, // content: 'Post3', // createdAt: response[2]?.posts[0]?.createdAt, // comments: [ // { // id: , // content: 'Comment3', // creator: 3, // postId: 3, // createdAt: response[2]?.posts[0]?.comments[0]?.createdAt, // }, // ], // }], // }); }); // Get user with limit posts and limit comments // Get user with custom field + post + comment with custom field // Get user with limit + posts orderBy + comment orderBy // Get user with where + posts where + comment where // Get user with where + posts partial where + comment where // Get user with where + posts partial where + comment partial(false) where // Get user with where partial(false) + posts partial where partial(false) + comment partial(false+true) where // Get user with where + posts partial where + comment where. Didn't select field from where in posts // Get user with where + posts partial where + comment where. Didn't select field from where for all // Get with limit+offset in each /* One two-level + One first-level relation users+posts+comments and users+users */ /* One three-level relation users+posts+comments+comment_owner */ test('Get user with posts and posts with comments and comments with owner', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); await db.insert(postsTable).values([ { id: 1, ownerId: 1, content: 'Post1' }, { id: 2, ownerId: 2, content: 'Post2' }, { id: 3, ownerId: 3, content: 'Post3' }, ]).run(); await db.insert(commentsTable).values([ { postId: 1, content: 'Comment1', creator: 2 }, { postId: 2, content: 'Comment2', creator: 2 }, { postId: 3, content: 'Comment3', creator: 3 }, ]).run(); const response = await db.query.usersTable.findMany({ with: { posts: { with: { comments: { with: { author: true, }, }, }, }, }, }); expectTypeOf(response).toEqualTypeOf<{ id: number; name: string; verified: number; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; comments: { id: number; content: string; createdAt: Date; creator: number | null; postId: number | null; author: { id: number; name: string; verified: number; invitedBy: number | null; } | null; }[]; }[]; }[]>(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).eq(3); expect(response[0]?.posts.length).eq(1); expect(response[1]?.posts.length).eq(1); expect(response[2]?.posts.length).eq(1); expect(response[0]?.posts[0]?.comments.length).eq(1); expect(response[1]?.posts[0]?.comments.length).eq(1); expect(response[2]?.posts[0]?.comments.length).eq(1); expect(response[0]).toEqual({ id: 1, name: 'Dan', verified: 0, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: response[0]?.posts[0]?.createdAt, comments: [ { id: 1, content: 'Comment1', creator: 2, author: { id: 2, name: 'Andrew', verified: 0, invitedBy: null, }, postId: 1, createdAt: response[0]?.posts[0]?.comments[0]?.createdAt, }, ], }], }); expect(response[1]).toEqual({ id: 2, name: 'Andrew', verified: 0, invitedBy: null, posts: [{ id: 2, ownerId: 2, content: 'Post2', createdAt: response[1]?.posts[0]?.createdAt, comments: [ { id: 2, content: 'Comment2', creator: 2, author: { id: 2, name: 'Andrew', verified: 0, invitedBy: null, }, postId: 2, createdAt: response[1]?.posts[0]?.comments[0]?.createdAt, }, ], }], }); }); /* One three-level relation + 1 first-level relatioon 1. users+posts+comments+comment_owner 2. users+users */ /* One four-level relation users+posts+comments+coment_likes */ /* [Find Many] Many-to-many cases Users+users_to_groups+groups */ test('[Find Many] Get users with groups', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]).run(); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]).run(); const response = await db.query.usersTable.findMany({ with: { usersToGroups: { columns: {}, orderBy: usersToGroupsTable.groupId, with: { group: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf<{ id: number; name: string; verified: number; invitedBy: number | null; usersToGroups: { group: { id: number; name: string; description: string | null; }; }[]; }[]>(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).toEqual(3); expect(response[0]?.usersToGroups.length).toEqual(1); expect(response[1]?.usersToGroups.length).toEqual(1); expect(response[2]?.usersToGroups.length).toEqual(2); expect(response).toContainEqual({ id: 1, name: 'Dan', verified: 0, invitedBy: null, usersToGroups: [{ group: { id: 1, name: 'Group1', description: null, }, }], }); expect(response).toContainEqual({ id: 2, name: 'Andrew', verified: 0, invitedBy: null, usersToGroups: [{ group: { id: 2, name: 'Group2', description: null, }, }], }); expect(response).toContainEqual({ id: 3, name: 'Alex', verified: 0, invitedBy: null, usersToGroups: [{ group: { id: 2, name: 'Group2', description: null, }, }, { group: { id: 3, name: 'Group3', description: null, }, }], }); }); test('[Find Many] Get groups with users', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]).run(); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]).run(); const response = await db.query.groupsTable.findMany({ with: { usersToGroups: { columns: {}, with: { user: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf<{ id: number; name: string; description: string | null; usersToGroups: { user: { id: number; name: string; verified: number; invitedBy: number | null; }; }[]; }[]>(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).toEqual(3); expect(response[0]?.usersToGroups.length).toEqual(1); expect(response[1]?.usersToGroups.length).toEqual(2); expect(response[2]?.usersToGroups.length).toEqual(1); expect(response).toContainEqual({ id: 1, name: 'Group1', description: null, usersToGroups: [{ user: { id: 1, name: 'Dan', verified: 0, invitedBy: null, }, }], }); expect(response).toContainEqual({ id: 2, name: 'Group2', description: null, usersToGroups: [{ user: { id: 2, name: 'Andrew', verified: 0, invitedBy: null, }, }, { user: { id: 3, name: 'Alex', verified: 0, invitedBy: null, }, }], }); expect(response).toContainEqual({ id: 3, name: 'Group3', description: null, usersToGroups: [{ user: { id: 3, name: 'Alex', verified: 0, invitedBy: null, }, }], }); }); test('[Find Many] Get users with groups + limit', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]).run(); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 2, groupId: 3 }, { userId: 3, groupId: 2 }, ]).run(); const response = await db.query.usersTable.findMany({ limit: 2, with: { usersToGroups: { limit: 1, columns: {}, with: { group: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf<{ id: number; name: string; verified: number; invitedBy: number | null; usersToGroups: { group: { id: number; name: string; description: string | null; }; }[]; }[]>(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).toEqual(2); expect(response[0]?.usersToGroups.length).toEqual(1); expect(response[1]?.usersToGroups.length).toEqual(1); expect(response).toContainEqual({ id: 1, name: 'Dan', verified: 0, invitedBy: null, usersToGroups: [{ group: { id: 1, name: 'Group1', description: null, }, }], }); expect(response).toContainEqual({ id: 2, name: 'Andrew', verified: 0, invitedBy: null, usersToGroups: [{ group: { id: 2, name: 'Group2', description: null, }, }], }); }); test('[Find Many] Get groups with users + limit', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]).run(); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]).run(); const response = await db.query.groupsTable.findMany({ limit: 2, with: { usersToGroups: { limit: 1, columns: {}, with: { user: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf<{ id: number; name: string; description: string | null; usersToGroups: { user: { id: number; name: string; verified: number; invitedBy: number | null; }; }[]; }[]>(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).toEqual(2); expect(response[0]?.usersToGroups.length).toEqual(1); expect(response[1]?.usersToGroups.length).toEqual(1); expect(response).toContainEqual({ id: 1, name: 'Group1', description: null, usersToGroups: [{ user: { id: 1, name: 'Dan', verified: 0, invitedBy: null, }, }], }); expect(response).toContainEqual({ id: 2, name: 'Group2', description: null, usersToGroups: [{ user: { id: 2, name: 'Andrew', verified: 0, invitedBy: null, }, }], }); }); test('[Find Many] Get users with groups + limit + where', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]).run(); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 2, groupId: 3 }, { userId: 3, groupId: 2 }, ]).run(); const response = await db.query.usersTable.findMany({ limit: 1, where: (_, { eq, or }) => or(eq(usersTable.id, 1), eq(usersTable.id, 2)), with: { usersToGroups: { where: eq(usersToGroupsTable.groupId, 1), columns: {}, with: { group: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf<{ id: number; name: string; verified: number; invitedBy: number | null; usersToGroups: { group: { id: number; name: string; description: string | null; }; }[]; }[]>(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).toEqual(1); expect(response[0]?.usersToGroups.length).toEqual(1); expect(response).toContainEqual({ id: 1, name: 'Dan', verified: 0, invitedBy: null, usersToGroups: [{ group: { id: 1, name: 'Group1', description: null, }, }], }); }); test('[Find Many] Get groups with users + limit + where', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]).run(); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]).run(); const response = await db.query.groupsTable.findMany({ limit: 1, where: gt(groupsTable.id, 1), with: { usersToGroups: { where: eq(usersToGroupsTable.userId, 2), limit: 1, columns: {}, with: { user: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf<{ id: number; name: string; description: string | null; usersToGroups: { user: { id: number; name: string; verified: number; invitedBy: number | null; }; }[]; }[]>(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).toEqual(1); expect(response[0]?.usersToGroups.length).toEqual(1); expect(response).toContainEqual({ id: 2, name: 'Group2', description: null, usersToGroups: [{ user: { id: 2, name: 'Andrew', verified: 0, invitedBy: null, }, }], }); }); test('[Find Many] Get users with groups + where', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]).run(); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 2, groupId: 3 }, { userId: 3, groupId: 2 }, ]).run(); const response = await db.query.usersTable.findMany({ where: (_, { eq, or }) => or(eq(usersTable.id, 1), eq(usersTable.id, 2)), with: { usersToGroups: { where: eq(usersToGroupsTable.groupId, 2), columns: {}, with: { group: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf<{ id: number; name: string; verified: number; invitedBy: number | null; usersToGroups: { group: { id: number; name: string; description: string | null; }; }[]; }[]>(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).toEqual(2); expect(response[0]?.usersToGroups.length).toEqual(0); expect(response[1]?.usersToGroups.length).toEqual(1); expect(response).toContainEqual({ id: 1, name: 'Dan', verified: 0, invitedBy: null, usersToGroups: [], }); expect(response).toContainEqual({ id: 2, name: 'Andrew', verified: 0, invitedBy: null, usersToGroups: [{ group: { id: 2, name: 'Group2', description: null, }, }], }); }); test('[Find Many] Get groups with users + where', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]).run(); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]).run(); const response = await db.query.groupsTable.findMany({ where: gt(groupsTable.id, 1), with: { usersToGroups: { where: eq(usersToGroupsTable.userId, 2), columns: {}, with: { user: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf<{ id: number; name: string; description: string | null; usersToGroups: { user: { id: number; name: string; verified: number; invitedBy: number | null; }; }[]; }[]>(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).toEqual(2); expect(response[0]?.usersToGroups.length).toEqual(1); expect(response[1]?.usersToGroups.length).toEqual(0); expect(response).toContainEqual({ id: 2, name: 'Group2', description: null, usersToGroups: [{ user: { id: 2, name: 'Andrew', verified: 0, invitedBy: null, }, }], }); expect(response).toContainEqual({ id: 3, name: 'Group3', description: null, usersToGroups: [], }); }); test('[Find Many] Get users with groups + orderBy', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]).run(); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]).run(); const response = await db.query.usersTable.findMany({ orderBy: (users, { desc }) => [desc(users.id)], with: { usersToGroups: { orderBy: [desc(usersToGroupsTable.groupId)], columns: {}, with: { group: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf<{ id: number; name: string; verified: number; invitedBy: number | null; usersToGroups: { group: { id: number; name: string; description: string | null; }; }[]; }[]>(); expect(response.length).toEqual(3); expect(response[0]?.usersToGroups.length).toEqual(2); expect(response[1]?.usersToGroups.length).toEqual(1); expect(response[2]?.usersToGroups.length).toEqual(1); expect(response[2]).toEqual({ id: 1, name: 'Dan', verified: 0, invitedBy: null, usersToGroups: [{ group: { id: 1, name: 'Group1', description: null, }, }], }); expect(response[1]).toEqual({ id: 2, name: 'Andrew', verified: 0, invitedBy: null, usersToGroups: [{ group: { id: 2, name: 'Group2', description: null, }, }], }); expect(response[0]).toEqual({ id: 3, name: 'Alex', verified: 0, invitedBy: null, usersToGroups: [{ group: { id: 3, name: 'Group3', description: null, }, }, { group: { id: 2, name: 'Group2', description: null, }, }], }); }); test('[Find Many] Get groups with users + orderBy', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]).run(); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]).run(); const response = await db.query.groupsTable.findMany({ orderBy: [desc(groupsTable.id)], with: { usersToGroups: { orderBy: (utg, { desc }) => [desc(utg.userId)], columns: {}, with: { user: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf<{ id: number; name: string; description: string | null; usersToGroups: { user: { id: number; name: string; verified: number; invitedBy: number | null; }; }[]; }[]>(); expect(response.length).toEqual(3); expect(response[0]?.usersToGroups.length).toEqual(1); expect(response[1]?.usersToGroups.length).toEqual(2); expect(response[2]?.usersToGroups.length).toEqual(1); expect(response[2]).toEqual({ id: 1, name: 'Group1', description: null, usersToGroups: [{ user: { id: 1, name: 'Dan', verified: 0, invitedBy: null, }, }], }); expect(response[1]).toEqual({ id: 2, name: 'Group2', description: null, usersToGroups: [{ user: { id: 3, name: 'Alex', verified: 0, invitedBy: null, }, }, { user: { id: 2, name: 'Andrew', verified: 0, invitedBy: null, }, }], }); expect(response[0]).toEqual({ id: 3, name: 'Group3', description: null, usersToGroups: [{ user: { id: 3, name: 'Alex', verified: 0, invitedBy: null, }, }], }); }); test('[Find Many] Get users with groups + orderBy + limit', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]).run(); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]).run(); const response = await db.query.usersTable.findMany({ orderBy: (users, { desc }) => [desc(users.id)], limit: 2, with: { usersToGroups: { limit: 1, orderBy: [desc(usersToGroupsTable.groupId)], columns: {}, with: { group: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf<{ id: number; name: string; verified: number; invitedBy: number | null; usersToGroups: { group: { id: number; name: string; description: string | null; }; }[]; }[]>(); expect(response.length).toEqual(2); expect(response[0]?.usersToGroups.length).toEqual(1); expect(response[1]?.usersToGroups.length).toEqual(1); expect(response[1]).toEqual({ id: 2, name: 'Andrew', verified: 0, invitedBy: null, usersToGroups: [{ group: { id: 2, name: 'Group2', description: null, }, }], }); expect(response[0]).toEqual({ id: 3, name: 'Alex', verified: 0, invitedBy: null, usersToGroups: [{ group: { id: 3, name: 'Group3', description: null, }, }], }); }); /* [Find One] Many-to-many cases Users+users_to_groups+groups */ test('[Find One] Get users with groups', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]).run(); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]).run(); const response = await db.query.usersTable.findFirst({ with: { usersToGroups: { columns: {}, with: { group: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: number; invitedBy: number | null; usersToGroups: { group: { id: number; name: string; description: string | null; }; }[]; } | undefined >(); expect(response?.usersToGroups.length).toEqual(1); expect(response).toEqual({ id: 1, name: 'Dan', verified: 0, invitedBy: null, usersToGroups: [{ group: { id: 1, name: 'Group1', description: null, }, }], }); }); test('[Find One] Get groups with users', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]).run(); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]).run(); const response = await db.query.groupsTable.findFirst({ with: { usersToGroups: { columns: {}, with: { user: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; description: string | null; usersToGroups: { user: { id: number; name: string; verified: number; invitedBy: number | null; }; }[]; } | undefined >(); expect(response?.usersToGroups.length).toEqual(1); expect(response).toEqual({ id: 1, name: 'Group1', description: null, usersToGroups: [{ user: { id: 1, name: 'Dan', verified: 0, invitedBy: null, }, }], }); }); test('[Find One] Get users with groups + limit', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]).run(); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 2, groupId: 3 }, { userId: 3, groupId: 2 }, ]).run(); const response = await db.query.usersTable.findFirst({ with: { usersToGroups: { limit: 1, columns: {}, with: { group: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: number; invitedBy: number | null; usersToGroups: { group: { id: number; name: string; description: string | null; }; }[]; } | undefined >(); expect(response?.usersToGroups.length).toEqual(1); expect(response).toEqual({ id: 1, name: 'Dan', verified: 0, invitedBy: null, usersToGroups: [{ group: { id: 1, name: 'Group1', description: null, }, }], }); }); test('[Find One] Get groups with users + limit', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]).run(); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]).run(); const response = await db.query.groupsTable.findFirst({ with: { usersToGroups: { limit: 1, columns: {}, with: { user: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; description: string | null; usersToGroups: { user: { id: number; name: string; verified: number; invitedBy: number | null; }; }[]; } | undefined >(); expect(response?.usersToGroups.length).toEqual(1); expect(response).toEqual({ id: 1, name: 'Group1', description: null, usersToGroups: [{ user: { id: 1, name: 'Dan', verified: 0, invitedBy: null, }, }], }); }); test('[Find One] Get users with groups + limit + where', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]).run(); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 2, groupId: 3 }, { userId: 3, groupId: 2 }, ]).run(); const response = await db.query.usersTable.findFirst({ where: (_, { eq, or }) => or(eq(usersTable.id, 1), eq(usersTable.id, 2)), with: { usersToGroups: { where: eq(usersToGroupsTable.groupId, 1), columns: {}, with: { group: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: number; invitedBy: number | null; usersToGroups: { group: { id: number; name: string; description: string | null; }; }[]; } | undefined >(); expect(response?.usersToGroups.length).toEqual(1); expect(response).toEqual({ id: 1, name: 'Dan', verified: 0, invitedBy: null, usersToGroups: [{ group: { id: 1, name: 'Group1', description: null, }, }], }); }); test('[Find One] Get groups with users + limit + where', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]).run(); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]).run(); const response = await db.query.groupsTable.findFirst({ where: gt(groupsTable.id, 1), with: { usersToGroups: { where: eq(usersToGroupsTable.userId, 2), limit: 1, columns: {}, with: { user: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; description: string | null; usersToGroups: { user: { id: number; name: string; verified: number; invitedBy: number | null; }; }[]; } | undefined >(); expect(response?.usersToGroups.length).toEqual(1); expect(response).toEqual({ id: 2, name: 'Group2', description: null, usersToGroups: [{ user: { id: 2, name: 'Andrew', verified: 0, invitedBy: null, }, }], }); }); test('[Find One] Get users with groups + where', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]).run(); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 2, groupId: 3 }, { userId: 3, groupId: 2 }, ]).run(); const response = await db.query.usersTable.findFirst({ where: (_, { eq, or }) => or(eq(usersTable.id, 1), eq(usersTable.id, 2)), with: { usersToGroups: { where: eq(usersToGroupsTable.groupId, 2), columns: {}, with: { group: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: number; invitedBy: number | null; usersToGroups: { group: { id: number; name: string; description: string | null; }; }[]; } | undefined >(); expect(response?.usersToGroups.length).toEqual(0); expect(response).toEqual({ id: 1, name: 'Dan', verified: 0, invitedBy: null, usersToGroups: [], }); }); test('[Find One] Get groups with users + where', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]).run(); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]).run(); const response = await db.query.groupsTable.findFirst({ where: gt(groupsTable.id, 1), with: { usersToGroups: { where: eq(usersToGroupsTable.userId, 2), columns: {}, with: { user: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; description: string | null; usersToGroups: { user: { id: number; name: string; verified: number; invitedBy: number | null; }; }[]; } | undefined >(); expect(response?.usersToGroups.length).toEqual(1); expect(response).toEqual({ id: 2, name: 'Group2', description: null, usersToGroups: [{ user: { id: 2, name: 'Andrew', verified: 0, invitedBy: null, }, }], }); }); test('[Find One] Get users with groups + orderBy', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]).run(); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]).run(); const response = await db.query.usersTable.findFirst({ orderBy: (users, { desc }) => [desc(users.id)], with: { usersToGroups: { orderBy: [desc(usersToGroupsTable.groupId)], columns: {}, with: { group: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: number; invitedBy: number | null; usersToGroups: { group: { id: number; name: string; description: string | null; }; }[]; } | undefined >(); expect(response?.usersToGroups.length).toEqual(2); expect(response).toEqual({ id: 3, name: 'Alex', verified: 0, invitedBy: null, usersToGroups: [{ group: { id: 3, name: 'Group3', description: null, }, }, { group: { id: 2, name: 'Group2', description: null, }, }], }); }); test('[Find One] Get groups with users + orderBy', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]).run(); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]).run(); const response = await db.query.groupsTable.findFirst({ orderBy: [desc(groupsTable.id)], with: { usersToGroups: { orderBy: (utg, { desc }) => [desc(utg.userId)], columns: {}, with: { user: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; description: string | null; usersToGroups: { user: { id: number; name: string; verified: number; invitedBy: number | null; }; }[]; } | undefined >(); expect(response?.usersToGroups.length).toEqual(1); expect(response).toEqual({ id: 3, name: 'Group3', description: null, usersToGroups: [{ user: { id: 3, name: 'Alex', verified: 0, invitedBy: null, }, }], }); }); test('[Find One] Get users with groups + orderBy + limit', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]).run(); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]).run(); const response = await db.query.usersTable.findFirst({ orderBy: (users, { desc }) => [desc(users.id)], with: { usersToGroups: { limit: 1, orderBy: [desc(usersToGroupsTable.groupId)], columns: {}, with: { group: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: number; invitedBy: number | null; usersToGroups: { group: { id: number; name: string; description: string | null; }; }[]; } | undefined >(); expect(response?.usersToGroups.length).toEqual(1); expect(response).toEqual({ id: 3, name: 'Alex', verified: 0, invitedBy: null, usersToGroups: [{ group: { id: 3, name: 'Group3', description: null, }, }], }); }); test('Get groups with users + orderBy + limit', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]).run(); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]).run(); const response = await db.query.groupsTable.findMany({ orderBy: [desc(groupsTable.id)], limit: 2, with: { usersToGroups: { limit: 1, orderBy: (utg, { desc }) => [desc(utg.userId)], columns: {}, with: { user: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; description: string | null; usersToGroups: { user: { id: number; name: string; verified: number; invitedBy: number | null; }; }[]; }[] >(); expect(response.length).toEqual(2); expect(response[0]?.usersToGroups.length).toEqual(1); expect(response[1]?.usersToGroups.length).toEqual(1); expect(response[1]).toEqual({ id: 2, name: 'Group2', description: null, usersToGroups: [{ user: { id: 3, name: 'Alex', verified: 0, invitedBy: null, }, }], }); expect(response[0]).toEqual({ id: 3, name: 'Group3', description: null, usersToGroups: [{ user: { id: 3, name: 'Alex', verified: 0, invitedBy: null, }, }], }); }); test('Get users with groups + custom', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]).run(); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]).run(); const response = await db.query.usersTable.findMany({ extras: { lower: sql`lower(${usersTable.name})`.as('lower_name'), }, with: { usersToGroups: { columns: {}, with: { group: { extras: { lower: sql`lower(${groupsTable.name})`.as('lower_name'), }, }, }, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: number; invitedBy: number | null; lower: string; usersToGroups: { group: { id: number; name: string; description: string | null; lower: string; }; }[]; }[] >(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); response[0]?.usersToGroups.sort((a, b) => (a.group.id > b.group.id) ? 1 : -1); response[1]?.usersToGroups.sort((a, b) => (a.group.id > b.group.id) ? 1 : -1); response[2]?.usersToGroups.sort((a, b) => (a.group.id > b.group.id) ? 1 : -1); expect(response.length).toEqual(3); expect(response[0]?.usersToGroups.length).toEqual(1); expect(response[1]?.usersToGroups.length).toEqual(1); expect(response[2]?.usersToGroups.length).toEqual(2); expect(response).toContainEqual({ id: 1, name: 'Dan', lower: 'dan', verified: 0, invitedBy: null, usersToGroups: [{ group: { id: 1, name: 'Group1', lower: 'group1', description: null, }, }], }); expect(response).toContainEqual({ id: 2, name: 'Andrew', lower: 'andrew', verified: 0, invitedBy: null, usersToGroups: [{ group: { id: 2, name: 'Group2', lower: 'group2', description: null, }, }], }); expect(response).toContainEqual({ id: 3, name: 'Alex', lower: 'alex', verified: 0, invitedBy: null, usersToGroups: [{ group: { id: 2, name: 'Group2', lower: 'group2', description: null, }, }, { group: { id: 3, name: 'Group3', lower: 'group3', description: null, }, }], }); }); test('Get groups with users + custom', async () => { await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]).run(); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]).run(); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]).run(); const response = await db.query.groupsTable.findMany({ extras: (table, { sql }) => ({ lower: sql`lower(${table.name})`.as('lower_name'), }), with: { usersToGroups: { columns: {}, with: { user: { extras: (table, { sql }) => ({ lower: sql`lower(${table.name})`.as('lower_name'), }), }, }, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; description: string | null; lower: string; usersToGroups: { user: { id: number; name: string; verified: number; invitedBy: number | null; lower: string; }; }[]; }[] >(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).toEqual(3); expect(response[0]?.usersToGroups.length).toEqual(1); expect(response[1]?.usersToGroups.length).toEqual(2); expect(response[2]?.usersToGroups.length).toEqual(1); expect(response).toContainEqual({ id: 1, name: 'Group1', lower: 'group1', description: null, usersToGroups: [{ user: { id: 1, name: 'Dan', lower: 'dan', verified: 0, invitedBy: null, }, }], }); expect(response).toContainEqual({ id: 2, name: 'Group2', lower: 'group2', description: null, usersToGroups: [{ user: { id: 2, name: 'Andrew', lower: 'andrew', verified: 0, invitedBy: null, }, }, { user: { id: 3, name: 'Alex', lower: 'alex', verified: 0, invitedBy: null, }, }], }); expect(response).toContainEqual({ id: 3, name: 'Group3', lower: 'group3', description: null, usersToGroups: [{ user: { id: 3, name: 'Alex', lower: 'alex', verified: 0, invitedBy: null, }, }], }); }); test('async api', async () => { await db.insert(usersTable).values([{ id: 1, name: 'Dan' }]); const users = await db.query.usersTable.findMany(); expect(users).toEqual([{ id: 1, name: 'Dan', verified: 0, invitedBy: null }]); }); test('async api - prepare', async () => { const insertStmt = db.insert(usersTable).values([{ id: 1, name: 'Dan' }]).prepare(); await insertStmt.execute(); const queryStmt = db.query.usersTable.findMany().prepare(); const users = await queryStmt.execute(); expect(users).toEqual([{ id: 1, name: 'Dan', verified: 0, invitedBy: null }]); }); test('.toSQL()', () => { const query = db.query.usersTable.findFirst().toSQL(); expect(query).toHaveProperty('sql', expect.any(String)); expect(query).toHaveProperty('params', expect.any(Array)); }); // + custom + where + orderby // + custom + where + orderby + limit // + partial // + partial(false) // + partial + orderBy + where (all not selected) /* One four-level relation users+posts+comments+coment_likes + users+users_to_groups+groups */ /* Really hard case 1. users+posts+comments+coment_likes 2. users+users_to_groups+groups 3. users+users */ ================================================ FILE: integration-tests/tests/relational/vercel.test.ts ================================================ import 'dotenv/config'; import { createClient, type VercelClient } from '@vercel/postgres'; import Docker from 'dockerode'; import { desc, eq, gt, gte, or, placeholder, sql, TransactionRollbackError } from 'drizzle-orm'; import { drizzle, type VercelPgDatabase } from 'drizzle-orm/vercel-postgres'; import getPort from 'get-port'; import { v4 as uuid } from 'uuid'; import { afterAll, beforeAll, beforeEach, expect, expectTypeOf, test } from 'vitest'; import * as schema from './pg.schema.ts'; const { usersTable, postsTable, commentsTable, usersToGroupsTable, groupsTable } = schema; const ENABLE_LOGGING = false; /* Test cases: - querying nested relation without PK with additional fields */ declare module 'vitest' { export interface TestContext { docker: Docker; vpgContainer: Docker.Container; vpgDb: VercelPgDatabase; vpgClient: VercelClient; } } let globalDocker: Docker; let pgContainer: Docker.Container; let db: VercelPgDatabase; let client: VercelClient; async function createDockerDB(): Promise { const docker = (globalDocker = new Docker()); const port = await getPort({ port: 5432 }); const image = 'postgres:14'; const pullStream = await docker.pull(image); await new Promise((resolve, reject) => docker.modem.followProgress(pullStream, (err) => err ? reject(err) : resolve(err)) ); pgContainer = await docker.createContainer({ Image: image, Env: [ 'POSTGRES_PASSWORD=postgres', 'POSTGRES_USER=postgres', 'POSTGRES_DB=postgres', ], name: `drizzle-integration-tests-${uuid()}`, HostConfig: { AutoRemove: true, PortBindings: { '5432/tcp': [{ HostPort: `${port}` }], }, }, }); await pgContainer.start(); return `postgres://postgres:postgres@localhost:${port}/postgres`; } beforeAll(async () => { const connectionString = process.env['PG_CONNECTION_STRING'] ?? (await createDockerDB()); const sleep = 250; let timeLeft = 5000; let connected = false; let lastError: unknown | undefined; do { try { client = createClient({ connectionString }); await client.connect(); connected = true; break; } catch (e) { lastError = e; await new Promise((resolve) => setTimeout(resolve, sleep)); timeLeft -= sleep; } } while (timeLeft > 0); if (!connected) { console.error('Cannot connect to Postgres'); await client?.end().catch(console.error); await pgContainer?.stop().catch(console.error); throw lastError; } db = drizzle(client, { schema, logger: ENABLE_LOGGING }); }); afterAll(async () => { await client?.end().catch(console.error); await pgContainer?.stop().catch(console.error); }); beforeEach(async (ctx) => { ctx.vpgDb = db; ctx.vpgClient = client; ctx.docker = globalDocker; ctx.vpgContainer = pgContainer; await ctx.vpgDb.execute(sql`drop schema public cascade`); await ctx.vpgDb.execute(sql`create schema public`); await ctx.vpgDb.execute( sql` CREATE TABLE "users" ( "id" serial PRIMARY KEY NOT NULL, "name" text NOT NULL, "verified" boolean DEFAULT false NOT NULL, "invited_by" int REFERENCES "users"("id") ); `, ); await ctx.vpgDb.execute( sql` CREATE TABLE IF NOT EXISTS "groups" ( "id" serial PRIMARY KEY NOT NULL, "name" text NOT NULL, "description" text ); `, ); await ctx.vpgDb.execute( sql` CREATE TABLE IF NOT EXISTS "users_to_groups" ( "id" serial PRIMARY KEY NOT NULL, "user_id" int REFERENCES "users"("id"), "group_id" int REFERENCES "groups"("id") ); `, ); await ctx.vpgDb.execute( sql` CREATE TABLE IF NOT EXISTS "posts" ( "id" serial PRIMARY KEY NOT NULL, "content" text NOT NULL, "owner_id" int REFERENCES "users"("id"), "created_at" timestamp with time zone DEFAULT now() NOT NULL ); `, ); await ctx.vpgDb.execute( sql` CREATE TABLE IF NOT EXISTS "comments" ( "id" serial PRIMARY KEY NOT NULL, "content" text NOT NULL, "creator" int REFERENCES "users"("id"), "post_id" int REFERENCES "posts"("id"), "created_at" timestamp with time zone DEFAULT now() NOT NULL ); `, ); await ctx.vpgDb.execute( sql` CREATE TABLE IF NOT EXISTS "comment_likes" ( "id" serial PRIMARY KEY NOT NULL, "creator" int REFERENCES "users"("id"), "comment_id" int REFERENCES "comments"("id"), "created_at" timestamp with time zone DEFAULT now() NOT NULL ); `, ); }); /* [Find Many] One relation users+posts */ test('[Find Many] Get users with posts', async (t) => { const { vpgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]); const usersWithPosts = await db.query.usersTable.findMany({ with: { posts: true, }, }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[]>(); usersWithPosts.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(usersWithPosts.length).eq(3); expect(usersWithPosts[0]?.posts.length).eq(1); expect(usersWithPosts[1]?.posts.length).eq(1); expect(usersWithPosts[2]?.posts.length).eq(1); expect(usersWithPosts[0]).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], }); expect(usersWithPosts[1]).toEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: null, posts: [{ id: 2, ownerId: 2, content: 'Post2', createdAt: usersWithPosts[1]?.posts[0]?.createdAt }], }); expect(usersWithPosts[2]).toEqual({ id: 3, name: 'Alex', verified: false, invitedBy: null, posts: [{ id: 3, ownerId: 3, content: 'Post3', createdAt: usersWithPosts[2]?.posts[0]?.createdAt }], }); }); test('[Find Many] Get users with posts + limit posts', async (t) => { const { vpgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const usersWithPosts = await db.query.usersTable.findMany({ with: { posts: { limit: 1, }, }, }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[]>(); usersWithPosts.sort((a, b) => (a.id > b.id) ? 1 : -1); usersWithPosts[0]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); usersWithPosts[1]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); usersWithPosts[2]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(usersWithPosts.length).eq(3); expect(usersWithPosts[0]?.posts.length).eq(1); expect(usersWithPosts[1]?.posts.length).eq(1); expect(usersWithPosts[2]?.posts.length).eq(1); expect(usersWithPosts[0]).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], }); expect(usersWithPosts[1]).toEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: null, posts: [{ id: 4, ownerId: 2, content: 'Post2', createdAt: usersWithPosts[1]?.posts[0]?.createdAt }], }); expect(usersWithPosts[2]).toEqual({ id: 3, name: 'Alex', verified: false, invitedBy: null, posts: [{ id: 6, ownerId: 3, content: 'Post3', createdAt: usersWithPosts[2]?.posts[0]?.createdAt }], }); }); test('[Find Many] Get users with posts + limit posts and users', async (t) => { const { vpgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const usersWithPosts = await db.query.usersTable.findMany({ limit: 2, with: { posts: { limit: 1, }, }, }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[]>(); usersWithPosts.sort((a, b) => (a.id > b.id) ? 1 : -1); usersWithPosts[0]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); usersWithPosts[1]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(usersWithPosts.length).eq(2); expect(usersWithPosts[0]?.posts.length).eq(1); expect(usersWithPosts[1]?.posts.length).eq(1); expect(usersWithPosts[0]).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], }); expect(usersWithPosts[1]).toEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: null, posts: [{ id: 4, ownerId: 2, content: 'Post2', createdAt: usersWithPosts[1]?.posts[0]?.createdAt }], }); }); test('[Find Many] Get users with posts + custom fields', async (t) => { const { vpgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const usersWithPosts = await db.query.usersTable.findMany({ with: { posts: true, }, extras: ({ name }) => ({ lowerName: sql`lower(${name})`.as('name_lower'), }), }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; lowerName: string; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[]>(); usersWithPosts.sort((a, b) => (a.id > b.id) ? 1 : -1); usersWithPosts[0]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); usersWithPosts[1]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); usersWithPosts[2]?.posts.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(usersWithPosts.length).toEqual(3); expect(usersWithPosts[0]?.posts.length).toEqual(3); expect(usersWithPosts[1]?.posts.length).toEqual(2); expect(usersWithPosts[2]?.posts.length).toEqual(2); expect(usersWithPosts[0]).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, lowerName: 'dan', posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }, { id: 2, ownerId: 1, content: 'Post1.2', createdAt: usersWithPosts[0]?.posts[1]?.createdAt, }, { id: 3, ownerId: 1, content: 'Post1.3', createdAt: usersWithPosts[0]?.posts[2]?.createdAt }], }); expect(usersWithPosts[1]).toEqual({ id: 2, name: 'Andrew', lowerName: 'andrew', verified: false, invitedBy: null, posts: [{ id: 4, ownerId: 2, content: 'Post2', createdAt: usersWithPosts[1]?.posts[0]?.createdAt }, { id: 5, ownerId: 2, content: 'Post2.1', createdAt: usersWithPosts[1]?.posts[1]?.createdAt, }], }); expect(usersWithPosts[2]).toEqual({ id: 3, name: 'Alex', lowerName: 'alex', verified: false, invitedBy: null, posts: [{ id: 6, ownerId: 3, content: 'Post3', createdAt: usersWithPosts[2]?.posts[0]?.createdAt }, { id: 7, ownerId: 3, content: 'Post3.1', createdAt: usersWithPosts[2]?.posts[1]?.createdAt, }], }); }); test('[Find Many] Get users with posts + custom fields + limits', async (t) => { const { vpgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const usersWithPosts = await db.query.usersTable.findMany({ limit: 1, with: { posts: { limit: 1, }, }, extras: (usersTable, { sql }) => ({ lowerName: sql`lower(${usersTable.name})`.as('name_lower'), }), }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; lowerName: string; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[]>(); expect(usersWithPosts.length).toEqual(1); expect(usersWithPosts[0]?.posts.length).toEqual(1); expect(usersWithPosts[0]).toEqual({ id: 1, name: 'Dan', lowerName: 'dan', verified: false, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], }); }); test('[Find Many] Get users with posts + orderBy', async (t) => { const { vpgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: '1' }, { ownerId: 1, content: '2' }, { ownerId: 1, content: '3' }, { ownerId: 2, content: '4' }, { ownerId: 2, content: '5' }, { ownerId: 3, content: '6' }, { ownerId: 3, content: '7' }, ]); const usersWithPosts = await db.query.usersTable.findMany({ with: { posts: { orderBy: (postsTable, { desc }) => [desc(postsTable.content)], }, }, orderBy: (usersTable, { desc }) => [desc(usersTable.id)], }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[]>(); expect(usersWithPosts.length).eq(3); expect(usersWithPosts[0]?.posts.length).eq(2); expect(usersWithPosts[1]?.posts.length).eq(2); expect(usersWithPosts[2]?.posts.length).eq(3); expect(usersWithPosts[2]).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, posts: [{ id: 3, ownerId: 1, content: '3', createdAt: usersWithPosts[2]?.posts[2]?.createdAt }, { id: 2, ownerId: 1, content: '2', createdAt: usersWithPosts[2]?.posts[1]?.createdAt, }, { id: 1, ownerId: 1, content: '1', createdAt: usersWithPosts[2]?.posts[0]?.createdAt }], }); expect(usersWithPosts[1]).toEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: null, posts: [{ id: 5, ownerId: 2, content: '5', createdAt: usersWithPosts[1]?.posts[1]?.createdAt, }, { id: 4, ownerId: 2, content: '4', createdAt: usersWithPosts[1]?.posts[0]?.createdAt }], }); expect(usersWithPosts[0]).toEqual({ id: 3, name: 'Alex', verified: false, invitedBy: null, posts: [{ id: 7, ownerId: 3, content: '7', createdAt: usersWithPosts[0]?.posts[1]?.createdAt, }, { id: 6, ownerId: 3, content: '6', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], }); }); test('[Find Many] Get users with posts + where', async (t) => { const { vpgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]); const usersWithPosts = await db.query.usersTable.findMany({ where: (({ id }, { eq }) => eq(id, 1)), with: { posts: { where: (({ id }, { eq }) => eq(id, 1)), }, }, }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[]>(); expect(usersWithPosts.length).eq(1); expect(usersWithPosts[0]?.posts.length).eq(1); expect(usersWithPosts[0]).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], }); }); test('[Find Many] Get users with posts + where + partial', async (t) => { const { vpgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]); const usersWithPosts = await db.query.usersTable.findMany({ columns: { id: true, name: true, }, with: { posts: { columns: { id: true, content: true, }, where: (({ id }, { eq }) => eq(id, 1)), }, }, where: (({ id }, { eq }) => eq(id, 1)), }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; posts: { id: number; content: string; }[]; }[]>(); expect(usersWithPosts.length).eq(1); expect(usersWithPosts[0]?.posts.length).eq(1); expect(usersWithPosts[0]).toEqual({ id: 1, name: 'Dan', posts: [{ id: 1, content: 'Post1' }], }); }); test('[Find Many] Get users with posts + where + partial. Did not select posts id, but used it in where', async (t) => { const { vpgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]); const usersWithPosts = await db.query.usersTable.findMany({ columns: { id: true, name: true, }, with: { posts: { columns: { id: true, content: true, }, where: (({ id }, { eq }) => eq(id, 1)), }, }, where: (({ id }, { eq }) => eq(id, 1)), }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; posts: { id: number; content: string; }[]; }[]>(); expect(usersWithPosts.length).eq(1); expect(usersWithPosts[0]?.posts.length).eq(1); expect(usersWithPosts[0]).toEqual({ id: 1, name: 'Dan', posts: [{ id: 1, content: 'Post1' }], }); }); test('[Find Many] Get users with posts + where + partial(true + false)', async (t) => { const { vpgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]); const usersWithPosts = await db.query.usersTable.findMany({ columns: { id: true, name: false, }, with: { posts: { columns: { id: true, content: false, }, where: (({ id }, { eq }) => eq(id, 1)), }, }, where: (({ id }, { eq }) => eq(id, 1)), }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; posts: { id: number; }[]; }[]>(); expect(usersWithPosts.length).eq(1); expect(usersWithPosts[0]?.posts.length).eq(1); expect(usersWithPosts[0]).toEqual({ id: 1, posts: [{ id: 1 }], }); }); test('[Find Many] Get users with posts + where + partial(false)', async (t) => { const { vpgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]); const usersWithPosts = await db.query.usersTable.findMany({ columns: { name: false, }, with: { posts: { columns: { content: false, }, where: (({ id }, { eq }) => eq(id, 1)), }, }, where: (({ id }, { eq }) => eq(id, 1)), }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; verified: boolean; invitedBy: number | null; posts: { id: number; ownerId: number | null; createdAt: Date; }[]; }[]>(); expect(usersWithPosts.length).eq(1); expect(usersWithPosts[0]?.posts.length).eq(1); expect(usersWithPosts[0]).toEqual({ id: 1, verified: false, invitedBy: null, posts: [{ id: 1, ownerId: 1, createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], }); }); test('[Find Many] Get users with posts in transaction', async (t) => { const { vpgDb: db } = t; let usersWithPosts: { id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[] = []; await db.transaction(async (tx) => { await tx.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await tx.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]); usersWithPosts = await tx.query.usersTable.findMany({ where: (({ id }, { eq }) => eq(id, 1)), with: { posts: { where: (({ id }, { eq }) => eq(id, 1)), }, }, }); }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[]>(); expect(usersWithPosts.length).eq(1); expect(usersWithPosts[0]?.posts.length).eq(1); expect(usersWithPosts[0]).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], }); }); test('[Find Many] Get users with posts in rollbacked transaction', async (t) => { const { vpgDb: db } = t; let usersWithPosts: { id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[] = []; await expect(db.transaction(async (tx) => { await tx.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await tx.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]); tx.rollback(); usersWithPosts = await tx.query.usersTable.findMany({ where: (({ id }, { eq }) => eq(id, 1)), with: { posts: { where: (({ id }, { eq }) => eq(id, 1)), }, }, }); })).rejects.toThrowError(new TransactionRollbackError()); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[]>(); expect(usersWithPosts.length).eq(0); }); // select only custom test('[Find Many] Get only custom fields', async (t) => { const { vpgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const usersWithPosts = await db.query.usersTable.findMany({ columns: {}, with: { posts: { columns: {}, extras: ({ content }) => ({ lowerName: sql`lower(${content})`.as('content_lower'), }), }, }, extras: ({ name }) => ({ lowerName: sql`lower(${name})`.as('name_lower'), }), }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ lowerName: string; posts: { lowerName: string; }[]; }[]>(); expect(usersWithPosts.length).toEqual(3); expect(usersWithPosts[0]?.posts.length).toEqual(3); expect(usersWithPosts[1]?.posts.length).toEqual(2); expect(usersWithPosts[2]?.posts.length).toEqual(2); expect(usersWithPosts).toContainEqual({ lowerName: 'dan', posts: [{ lowerName: 'post1' }, { lowerName: 'post1.2', }, { lowerName: 'post1.3' }], }); expect(usersWithPosts).toContainEqual({ lowerName: 'andrew', posts: [{ lowerName: 'post2' }, { lowerName: 'post2.1', }], }); expect(usersWithPosts).toContainEqual({ lowerName: 'alex', posts: [{ lowerName: 'post3' }, { lowerName: 'post3.1', }], }); }); test('[Find Many] Get only custom fields + where', async (t) => { const { vpgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const usersWithPosts = await db.query.usersTable.findMany({ columns: {}, with: { posts: { columns: {}, where: gte(postsTable.id, 2), extras: ({ content }) => ({ lowerName: sql`lower(${content})`.as('content_lower'), }), }, }, where: eq(usersTable.id, 1), extras: ({ name }) => ({ lowerName: sql`lower(${name})`.as('name_lower'), }), }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ lowerName: string; posts: { lowerName: string; }[]; }[]>(); expect(usersWithPosts.length).toEqual(1); expect(usersWithPosts[0]?.posts.length).toEqual(2); expect(usersWithPosts).toContainEqual({ lowerName: 'dan', posts: [{ lowerName: 'post1.2' }, { lowerName: 'post1.3' }], }); }); test('[Find Many] Get only custom fields + where + limit', async (t) => { const { vpgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const usersWithPosts = await db.query.usersTable.findMany({ columns: {}, with: { posts: { columns: {}, where: gte(postsTable.id, 2), limit: 1, extras: ({ content }) => ({ lowerName: sql`lower(${content})`.as('content_lower'), }), }, }, where: eq(usersTable.id, 1), extras: ({ name }) => ({ lowerName: sql`lower(${name})`.as('name_lower'), }), }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ lowerName: string; posts: { lowerName: string; }[]; }[]>(); expect(usersWithPosts.length).toEqual(1); expect(usersWithPosts[0]?.posts.length).toEqual(1); expect(usersWithPosts).toContainEqual({ lowerName: 'dan', posts: [{ lowerName: 'post1.2' }], }); }); test('[Find Many] Get only custom fields + where + orderBy', async (t) => { const { vpgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const usersWithPosts = await db.query.usersTable.findMany({ columns: {}, with: { posts: { columns: {}, where: gte(postsTable.id, 2), orderBy: [desc(postsTable.id)], extras: ({ content }) => ({ lowerName: sql`lower(${content})`.as('content_lower'), }), }, }, where: eq(usersTable.id, 1), extras: ({ name }) => ({ lowerName: sql`lower(${name})`.as('name_lower'), }), }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ lowerName: string; posts: { lowerName: string; }[]; }[]>(); expect(usersWithPosts.length).toEqual(1); expect(usersWithPosts[0]?.posts.length).toEqual(2); expect(usersWithPosts).toContainEqual({ lowerName: 'dan', posts: [{ lowerName: 'post1.3' }, { lowerName: 'post1.2' }], }); }); // select only custom find one test('[Find One] Get only custom fields', async (t) => { const { vpgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const usersWithPosts = await db.query.usersTable.findFirst({ columns: {}, with: { posts: { columns: {}, extras: ({ content }) => ({ lowerName: sql`lower(${content})`.as('content_lower'), }), }, }, extras: ({ name }) => ({ lowerName: sql`lower(${name})`.as('name_lower'), }), }); expectTypeOf(usersWithPosts).toEqualTypeOf< { lowerName: string; posts: { lowerName: string; }[]; } | undefined >(); expect(usersWithPosts?.posts.length).toEqual(3); expect(usersWithPosts).toEqual({ lowerName: 'dan', posts: [{ lowerName: 'post1' }, { lowerName: 'post1.2', }, { lowerName: 'post1.3' }], }); }); test('[Find One] Get only custom fields + where', async (t) => { const { vpgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const usersWithPosts = await db.query.usersTable.findFirst({ columns: {}, with: { posts: { columns: {}, where: gte(postsTable.id, 2), extras: ({ content }) => ({ lowerName: sql`lower(${content})`.as('content_lower'), }), }, }, where: eq(usersTable.id, 1), extras: ({ name }) => ({ lowerName: sql`lower(${name})`.as('name_lower'), }), }); expectTypeOf(usersWithPosts).toEqualTypeOf< { lowerName: string; posts: { lowerName: string; }[]; } | undefined >(); expect(usersWithPosts?.posts.length).toEqual(2); expect(usersWithPosts).toEqual({ lowerName: 'dan', posts: [{ lowerName: 'post1.2' }, { lowerName: 'post1.3' }], }); }); test('[Find One] Get only custom fields + where + limit', async (t) => { const { vpgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const usersWithPosts = await db.query.usersTable.findFirst({ columns: {}, with: { posts: { columns: {}, where: gte(postsTable.id, 2), limit: 1, extras: ({ content }) => ({ lowerName: sql`lower(${content})`.as('content_lower'), }), }, }, where: eq(usersTable.id, 1), extras: ({ name }) => ({ lowerName: sql`lower(${name})`.as('name_lower'), }), }); expectTypeOf(usersWithPosts).toEqualTypeOf< { lowerName: string; posts: { lowerName: string; }[]; } | undefined >(); expect(usersWithPosts?.posts.length).toEqual(1); expect(usersWithPosts).toEqual({ lowerName: 'dan', posts: [{ lowerName: 'post1.2' }], }); }); test('[Find One] Get only custom fields + where + orderBy', async (t) => { const { vpgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const usersWithPosts = await db.query.usersTable.findFirst({ columns: {}, with: { posts: { columns: {}, where: gte(postsTable.id, 2), orderBy: [desc(postsTable.id)], extras: ({ content }) => ({ lowerName: sql`lower(${content})`.as('content_lower'), }), }, }, where: eq(usersTable.id, 1), extras: ({ name }) => ({ lowerName: sql`lower(${name})`.as('name_lower'), }), }); expectTypeOf(usersWithPosts).toEqualTypeOf< { lowerName: string; posts: { lowerName: string; }[]; } | undefined >(); expect(usersWithPosts?.posts.length).toEqual(2); expect(usersWithPosts).toEqual({ lowerName: 'dan', posts: [{ lowerName: 'post1.3' }, { lowerName: 'post1.2' }], }); }); // columns {} test('[Find Many] Get select {}', async (t) => { const { vpgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); const users = await db.query.usersTable.findMany({ columns: {}, }); expectTypeOf(users).toEqualTypeOf<{}[]>(); expect(users.length).toBe(3); expect(users[0]).toEqual({}); expect(users[1]).toEqual({}); expect(users[2]).toEqual({}); }); // columns {} test('[Find One] Get select {}', async (t) => { const { vpgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); const users = await db.query.usersTable.findFirst({ columns: {}, }); expectTypeOf(users).toEqualTypeOf<{} | undefined>(); expect(users).toEqual({}); }); // deep select {} test('[Find Many] Get deep select {}', async (t) => { const { vpgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]); const users = await db.query.usersTable.findMany({ columns: {}, with: { posts: { columns: {}, }, }, }); expectTypeOf(users).toEqualTypeOf<{ posts: {}[] }[]>(); expect(users.length).toBe(3); expect(users[0]).toEqual({ posts: [{}] }); expect(users[1]).toEqual({ posts: [{}] }); expect(users[2]).toEqual({ posts: [{}] }); }); // deep select {} test('[Find One] Get deep select {}', async (t) => { const { vpgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]); const users = await db.query.usersTable.findFirst({ columns: {}, with: { posts: { columns: {}, }, }, }); expectTypeOf(users).toEqualTypeOf<{ posts: {}[] } | undefined>(); expect(users).toEqual({ posts: [{}] }); }); /* Prepared statements for users+posts */ test('[Find Many] Get users with posts + prepared limit', async (t) => { const { vpgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const prepared = db.query.usersTable.findMany({ with: { posts: { limit: placeholder('limit'), }, }, }).prepare('query1'); const usersWithPosts = await prepared.execute({ limit: 1 }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[]>(); expect(usersWithPosts.length).eq(3); expect(usersWithPosts[0]?.posts.length).eq(1); expect(usersWithPosts[1]?.posts.length).eq(1); expect(usersWithPosts[2]?.posts.length).eq(1); expect(usersWithPosts).toContainEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], }); expect(usersWithPosts).toContainEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: null, posts: [{ id: 4, ownerId: 2, content: 'Post2', createdAt: usersWithPosts[1]?.posts[0]?.createdAt }], }); expect(usersWithPosts).toContainEqual({ id: 3, name: 'Alex', verified: false, invitedBy: null, posts: [{ id: 6, ownerId: 3, content: 'Post3', createdAt: usersWithPosts[2]?.posts[0]?.createdAt }], }); }); test('[Find Many] Get users with posts + prepared limit + offset', async (t) => { const { vpgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const prepared = db.query.usersTable.findMany({ limit: placeholder('uLimit'), offset: placeholder('uOffset'), with: { posts: { limit: placeholder('pLimit'), }, }, }).prepare('query2'); const usersWithPosts = await prepared.execute({ pLimit: 1, uLimit: 3, uOffset: 1 }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[]>(); expect(usersWithPosts.length).eq(2); expect(usersWithPosts[0]?.posts.length).eq(1); expect(usersWithPosts[1]?.posts.length).eq(1); expect(usersWithPosts).toContainEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: null, posts: [{ id: 4, ownerId: 2, content: 'Post2', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], }); expect(usersWithPosts).toContainEqual({ id: 3, name: 'Alex', verified: false, invitedBy: null, posts: [{ id: 6, ownerId: 3, content: 'Post3', createdAt: usersWithPosts[1]?.posts[0]?.createdAt }], }); }); test('[Find Many] Get users with posts + prepared where', async (t) => { const { vpgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]); const prepared = db.query.usersTable.findMany({ where: (({ id }, { eq }) => eq(id, placeholder('id'))), with: { posts: { where: (({ id }, { eq }) => eq(id, 1)), }, }, }).prepare('query3'); const usersWithPosts = await prepared.execute({ id: 1 }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[]>(); expect(usersWithPosts.length).eq(1); expect(usersWithPosts[0]?.posts.length).eq(1); expect(usersWithPosts[0]).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], }); }); test('[Find Many] Get users with posts + prepared + limit + offset + where', async (t) => { const { vpgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const prepared = db.query.usersTable.findMany({ limit: placeholder('uLimit'), offset: placeholder('uOffset'), where: (({ id }, { eq, or }) => or(eq(id, placeholder('id')), eq(id, 3))), with: { posts: { where: (({ id }, { eq }) => eq(id, placeholder('pid'))), limit: placeholder('pLimit'), }, }, }).prepare('query4'); const usersWithPosts = await prepared.execute({ pLimit: 1, uLimit: 3, uOffset: 1, id: 2, pid: 6 }); expectTypeOf(usersWithPosts).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; }[]>(); expect(usersWithPosts.length).eq(1); expect(usersWithPosts[0]?.posts.length).eq(1); expect(usersWithPosts).toContainEqual({ id: 3, name: 'Alex', verified: false, invitedBy: null, posts: [{ id: 6, ownerId: 3, content: 'Post3', createdAt: usersWithPosts[0]?.posts[0]?.createdAt }], }); }); /* [Find One] One relation users+posts */ test('[Find One] Get users with posts', async (t) => { const { vpgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]); const usersWithPosts = await db.query.usersTable.findFirst({ with: { posts: true, }, }); expectTypeOf(usersWithPosts).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; } | undefined >(); expect(usersWithPosts!.posts.length).eq(1); expect(usersWithPosts).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts?.posts[0]?.createdAt }], }); }); test('[Find One] Get users with posts + limit posts', async (t) => { const { vpgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const usersWithPosts = await db.query.usersTable.findFirst({ with: { posts: { limit: 1, }, }, }); expectTypeOf(usersWithPosts).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; } | undefined >(); expect(usersWithPosts!.posts.length).eq(1); expect(usersWithPosts).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts?.posts[0]?.createdAt }], }); }); test('[Find One] Get users with posts no results found', async (t) => { const { vpgDb: db } = t; const usersWithPosts = await db.query.usersTable.findFirst({ with: { posts: { limit: 1, }, }, }); expectTypeOf(usersWithPosts).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; } | undefined >(); expect(usersWithPosts).toBeUndefined(); }); test('[Find One] Get users with posts + limit posts and users', async (t) => { const { vpgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const usersWithPosts = await db.query.usersTable.findFirst({ with: { posts: { limit: 1, }, }, }); expectTypeOf(usersWithPosts).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; } | undefined >(); expect(usersWithPosts!.posts.length).eq(1); expect(usersWithPosts).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts?.posts[0]?.createdAt }], }); }); test('[Find One] Get users with posts + custom fields', async (t) => { const { vpgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const usersWithPosts = await db.query.usersTable.findFirst({ with: { posts: true, }, extras: ({ name }) => ({ lowerName: sql`lower(${name})`.as('name_lower'), }), }); expectTypeOf(usersWithPosts).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; lowerName: string; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; } | undefined >(); expect(usersWithPosts!.posts.length).toEqual(3); expect(usersWithPosts).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, lowerName: 'dan', posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts?.posts[0]?.createdAt }, { id: 2, ownerId: 1, content: 'Post1.2', createdAt: usersWithPosts?.posts[1]?.createdAt, }, { id: 3, ownerId: 1, content: 'Post1.3', createdAt: usersWithPosts?.posts[2]?.createdAt }], }); }); test('[Find One] Get users with posts + custom fields + limits', async (t) => { const { vpgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.2' }, { ownerId: 1, content: 'Post1.3' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const usersWithPosts = await db.query.usersTable.findFirst({ with: { posts: { limit: 1, }, }, extras: (usersTable, { sql }) => ({ lowerName: sql`lower(${usersTable.name})`.as('name_lower'), }), }); expectTypeOf(usersWithPosts).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; lowerName: string; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; } | undefined >(); expect(usersWithPosts!.posts.length).toEqual(1); expect(usersWithPosts).toEqual({ id: 1, name: 'Dan', lowerName: 'dan', verified: false, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts?.posts[0]?.createdAt }], }); }); test('[Find One] Get users with posts + orderBy', async (t) => { const { vpgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: '1' }, { ownerId: 1, content: '2' }, { ownerId: 1, content: '3' }, { ownerId: 2, content: '4' }, { ownerId: 2, content: '5' }, { ownerId: 3, content: '6' }, { ownerId: 3, content: '7' }, ]); const usersWithPosts = await db.query.usersTable.findFirst({ with: { posts: { orderBy: (postsTable, { desc }) => [desc(postsTable.content)], }, }, orderBy: (usersTable, { desc }) => [desc(usersTable.id)], }); expectTypeOf(usersWithPosts).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; } | undefined >(); expect(usersWithPosts!.posts.length).eq(2); expect(usersWithPosts).toEqual({ id: 3, name: 'Alex', verified: false, invitedBy: null, posts: [{ id: 7, ownerId: 3, content: '7', createdAt: usersWithPosts?.posts[1]?.createdAt, }, { id: 6, ownerId: 3, content: '6', createdAt: usersWithPosts?.posts[0]?.createdAt }], }); }); test('[Find One] Get users with posts + where', async (t) => { const { vpgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]); const usersWithPosts = await db.query.usersTable.findFirst({ where: (({ id }, { eq }) => eq(id, 1)), with: { posts: { where: (({ id }, { eq }) => eq(id, 1)), }, }, }); expectTypeOf(usersWithPosts).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; }[]; } | undefined >(); expect(usersWithPosts!.posts.length).eq(1); expect(usersWithPosts).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: usersWithPosts?.posts[0]?.createdAt }], }); }); test('[Find One] Get users with posts + where + partial', async (t) => { const { vpgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]); const usersWithPosts = await db.query.usersTable.findFirst({ columns: { id: true, name: true, }, with: { posts: { columns: { id: true, content: true, }, where: (({ id }, { eq }) => eq(id, 1)), }, }, where: (({ id }, { eq }) => eq(id, 1)), }); expectTypeOf(usersWithPosts).toEqualTypeOf< { id: number; name: string; posts: { id: number; content: string; }[]; } | undefined >(); expect(usersWithPosts!.posts.length).eq(1); expect(usersWithPosts).toEqual({ id: 1, name: 'Dan', posts: [{ id: 1, content: 'Post1' }], }); }); test('[Find One] Get users with posts + where + partial. Did not select posts id, but used it in where', async (t) => { const { vpgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]); const usersWithPosts = await db.query.usersTable.findFirst({ columns: { id: true, name: true, }, with: { posts: { columns: { id: true, content: true, }, where: (({ id }, { eq }) => eq(id, 1)), }, }, where: (({ id }, { eq }) => eq(id, 1)), }); expectTypeOf(usersWithPosts).toEqualTypeOf< { id: number; name: string; posts: { id: number; content: string; }[]; } | undefined >(); expect(usersWithPosts!.posts.length).eq(1); expect(usersWithPosts).toEqual({ id: 1, name: 'Dan', posts: [{ id: 1, content: 'Post1' }], }); }); test('[Find One] Get users with posts + where + partial(true + false)', async (t) => { const { vpgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]); const usersWithPosts = await db.query.usersTable.findFirst({ columns: { id: true, name: false, }, with: { posts: { columns: { id: true, content: false, }, where: (({ id }, { eq }) => eq(id, 1)), }, }, where: (({ id }, { eq }) => eq(id, 1)), }); expectTypeOf(usersWithPosts).toEqualTypeOf< { id: number; posts: { id: number; }[]; } | undefined >(); expect(usersWithPosts!.posts.length).eq(1); expect(usersWithPosts).toEqual({ id: 1, posts: [{ id: 1 }], }); }); test('[Find One] Get users with posts + where + partial(false)', async (t) => { const { vpgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]); const usersWithPosts = await db.query.usersTable.findFirst({ columns: { name: false, }, with: { posts: { columns: { content: false, }, where: (({ id }, { eq }) => eq(id, 1)), }, }, where: (({ id }, { eq }) => eq(id, 1)), }); expectTypeOf(usersWithPosts).toEqualTypeOf< { id: number; verified: boolean; invitedBy: number | null; posts: { id: number; ownerId: number | null; createdAt: Date; }[]; } | undefined >(); expect(usersWithPosts!.posts.length).eq(1); expect(usersWithPosts).toEqual({ id: 1, verified: false, invitedBy: null, posts: [{ id: 1, ownerId: 1, createdAt: usersWithPosts?.posts[0]?.createdAt }], }); }); /* One relation users+users. Self referencing */ test('Get user with invitee', async (t) => { const { vpgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]); const usersWithInvitee = await db.query.usersTable.findMany({ with: { invitee: true, }, }); expectTypeOf(usersWithInvitee).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; invitee: { id: number; name: string; verified: boolean; invitedBy: number | null; } | null; }[] >(); usersWithInvitee.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(usersWithInvitee.length).eq(4); expect(usersWithInvitee[0]?.invitee).toBeNull(); expect(usersWithInvitee[1]?.invitee).toBeNull(); expect(usersWithInvitee[2]?.invitee).not.toBeNull(); expect(usersWithInvitee[3]?.invitee).not.toBeNull(); expect(usersWithInvitee[0]).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, invitee: null, }); expect(usersWithInvitee[1]).toEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: null, invitee: null, }); expect(usersWithInvitee[2]).toEqual({ id: 3, name: 'Alex', verified: false, invitedBy: 1, invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, }); expect(usersWithInvitee[3]).toEqual({ id: 4, name: 'John', verified: false, invitedBy: 2, invitee: { id: 2, name: 'Andrew', verified: false, invitedBy: null }, }); }); test('Get user + limit with invitee', async (t) => { const { vpgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew', invitedBy: 1 }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]); const usersWithInvitee = await db.query.usersTable.findMany({ with: { invitee: true, }, limit: 2, }); expectTypeOf(usersWithInvitee).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; invitee: { id: number; name: string; verified: boolean; invitedBy: number | null; } | null; }[] >(); usersWithInvitee.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(usersWithInvitee.length).eq(2); expect(usersWithInvitee[0]?.invitee).toBeNull(); expect(usersWithInvitee[1]?.invitee).not.toBeNull(); expect(usersWithInvitee[0]).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, invitee: null, }); expect(usersWithInvitee[1]).toEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: 1, invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, }); }); test('Get user with invitee and custom fields', async (t) => { const { vpgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]); const usersWithInvitee = await db.query.usersTable.findMany({ extras: (users, { sql }) => ({ lower: sql`lower(${users.name})`.as('lower_name') }), with: { invitee: { extras: (invitee, { sql }) => ({ lower: sql`lower(${invitee.name})`.as('lower_name') }), }, }, }); expectTypeOf(usersWithInvitee).toEqualTypeOf< { id: number; name: string; verified: boolean; lower: string; invitedBy: number | null; invitee: { id: number; name: string; verified: boolean; lower: string; invitedBy: number | null; } | null; }[] >(); usersWithInvitee.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(usersWithInvitee.length).eq(4); expect(usersWithInvitee[0]?.invitee).toBeNull(); expect(usersWithInvitee[1]?.invitee).toBeNull(); expect(usersWithInvitee[2]?.invitee).not.toBeNull(); expect(usersWithInvitee[3]?.invitee).not.toBeNull(); expect(usersWithInvitee[0]).toEqual({ id: 1, name: 'Dan', lower: 'dan', verified: false, invitedBy: null, invitee: null, }); expect(usersWithInvitee[1]).toEqual({ id: 2, name: 'Andrew', lower: 'andrew', verified: false, invitedBy: null, invitee: null, }); expect(usersWithInvitee[2]).toEqual({ id: 3, name: 'Alex', lower: 'alex', verified: false, invitedBy: 1, invitee: { id: 1, name: 'Dan', lower: 'dan', verified: false, invitedBy: null }, }); expect(usersWithInvitee[3]).toEqual({ id: 4, name: 'John', lower: 'john', verified: false, invitedBy: 2, invitee: { id: 2, name: 'Andrew', lower: 'andrew', verified: false, invitedBy: null }, }); }); test('Get user with invitee and custom fields + limits', async (t) => { const { vpgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]); const usersWithInvitee = await db.query.usersTable.findMany({ extras: (users, { sql }) => ({ lower: sql`lower(${users.name})`.as('lower_name') }), limit: 3, with: { invitee: { extras: (invitee, { sql }) => ({ lower: sql`lower(${invitee.name})`.as('lower_name') }), }, }, }); expectTypeOf(usersWithInvitee).toEqualTypeOf< { id: number; name: string; verified: boolean; lower: string; invitedBy: number | null; invitee: { id: number; name: string; verified: boolean; lower: string; invitedBy: number | null; } | null; }[] >(); usersWithInvitee.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(usersWithInvitee.length).eq(3); expect(usersWithInvitee[0]?.invitee).toBeNull(); expect(usersWithInvitee[1]?.invitee).toBeNull(); expect(usersWithInvitee[2]?.invitee).not.toBeNull(); expect(usersWithInvitee[0]).toEqual({ id: 1, name: 'Dan', lower: 'dan', verified: false, invitedBy: null, invitee: null, }); expect(usersWithInvitee[1]).toEqual({ id: 2, name: 'Andrew', lower: 'andrew', verified: false, invitedBy: null, invitee: null, }); expect(usersWithInvitee[2]).toEqual({ id: 3, name: 'Alex', lower: 'alex', verified: false, invitedBy: 1, invitee: { id: 1, name: 'Dan', lower: 'dan', verified: false, invitedBy: null }, }); }); test('Get user with invitee + order by', async (t) => { const { vpgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]); const usersWithInvitee = await db.query.usersTable.findMany({ orderBy: (users, { desc }) => [desc(users.id)], with: { invitee: true, }, }); expectTypeOf(usersWithInvitee).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; invitee: { id: number; name: string; verified: boolean; invitedBy: number | null; } | null; }[] >(); expect(usersWithInvitee.length).eq(4); expect(usersWithInvitee[3]?.invitee).toBeNull(); expect(usersWithInvitee[2]?.invitee).toBeNull(); expect(usersWithInvitee[1]?.invitee).not.toBeNull(); expect(usersWithInvitee[0]?.invitee).not.toBeNull(); expect(usersWithInvitee[3]).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, invitee: null, }); expect(usersWithInvitee[2]).toEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: null, invitee: null, }); expect(usersWithInvitee[1]).toEqual({ id: 3, name: 'Alex', verified: false, invitedBy: 1, invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, }); expect(usersWithInvitee[0]).toEqual({ id: 4, name: 'John', verified: false, invitedBy: 2, invitee: { id: 2, name: 'Andrew', verified: false, invitedBy: null }, }); }); test('Get user with invitee + where', async (t) => { const { vpgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]); const usersWithInvitee = await db.query.usersTable.findMany({ where: (users, { eq, or }) => (or(eq(users.id, 3), eq(users.id, 4))), with: { invitee: true, }, }); expectTypeOf(usersWithInvitee).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; invitee: { id: number; name: string; verified: boolean; invitedBy: number | null; } | null; }[] >(); expect(usersWithInvitee.length).eq(2); expect(usersWithInvitee[0]?.invitee).not.toBeNull(); expect(usersWithInvitee[1]?.invitee).not.toBeNull(); expect(usersWithInvitee).toContainEqual({ id: 3, name: 'Alex', verified: false, invitedBy: 1, invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, }); expect(usersWithInvitee).toContainEqual({ id: 4, name: 'John', verified: false, invitedBy: 2, invitee: { id: 2, name: 'Andrew', verified: false, invitedBy: null }, }); }); test('Get user with invitee + where + partial', async (t) => { const { vpgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]); const usersWithInvitee = await db.query.usersTable.findMany({ where: (users, { eq, or }) => (or(eq(users.id, 3), eq(users.id, 4))), columns: { id: true, name: true, }, with: { invitee: { columns: { id: true, name: true, }, }, }, }); expectTypeOf(usersWithInvitee).toEqualTypeOf< { id: number; name: string; invitee: { id: number; name: string; } | null; }[] >(); expect(usersWithInvitee.length).eq(2); expect(usersWithInvitee[0]?.invitee).not.toBeNull(); expect(usersWithInvitee[1]?.invitee).not.toBeNull(); expect(usersWithInvitee).toContainEqual({ id: 3, name: 'Alex', invitee: { id: 1, name: 'Dan' }, }); expect(usersWithInvitee).toContainEqual({ id: 4, name: 'John', invitee: { id: 2, name: 'Andrew' }, }); }); test('Get user with invitee + where + partial. Did not select users id, but used it in where', async (t) => { const { vpgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]); const usersWithInvitee = await db.query.usersTable.findMany({ where: (users, { eq, or }) => (or(eq(users.id, 3), eq(users.id, 4))), columns: { name: true, }, with: { invitee: { columns: { id: true, name: true, }, }, }, }); expectTypeOf(usersWithInvitee).toEqualTypeOf< { name: string; invitee: { id: number; name: string; } | null; }[] >(); expect(usersWithInvitee.length).eq(2); expect(usersWithInvitee[0]?.invitee).not.toBeNull(); expect(usersWithInvitee[1]?.invitee).not.toBeNull(); expect(usersWithInvitee).toContainEqual({ name: 'Alex', invitee: { id: 1, name: 'Dan' }, }); expect(usersWithInvitee).toContainEqual({ name: 'John', invitee: { id: 2, name: 'Andrew' }, }); }); test('Get user with invitee + where + partial(true+false)', async (t) => { const { vpgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]); const usersWithInvitee = await db.query.usersTable.findMany({ where: (users, { eq, or }) => (or(eq(users.id, 3), eq(users.id, 4))), columns: { id: true, name: true, verified: false, }, with: { invitee: { columns: { id: true, name: true, verified: false, }, }, }, }); expectTypeOf(usersWithInvitee).toEqualTypeOf< { id: number; name: string; invitee: { id: number; name: string; } | null; }[] >(); expect(usersWithInvitee.length).eq(2); expect(usersWithInvitee[0]?.invitee).not.toBeNull(); expect(usersWithInvitee[1]?.invitee).not.toBeNull(); expect(usersWithInvitee).toContainEqual({ id: 3, name: 'Alex', invitee: { id: 1, name: 'Dan' }, }); expect(usersWithInvitee).toContainEqual({ id: 4, name: 'John', invitee: { id: 2, name: 'Andrew' }, }); }); test('Get user with invitee + where + partial(false)', async (t) => { const { vpgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]); const usersWithInvitee = await db.query.usersTable.findMany({ where: (users, { eq, or }) => (or(eq(users.id, 3), eq(users.id, 4))), columns: { verified: false, }, with: { invitee: { columns: { name: false, }, }, }, }); expectTypeOf(usersWithInvitee).toEqualTypeOf< { id: number; name: string; invitedBy: number | null; invitee: { id: number; verified: boolean; invitedBy: number | null; } | null; }[] >(); expect(usersWithInvitee.length).eq(2); expect(usersWithInvitee[0]?.invitee).not.toBeNull(); expect(usersWithInvitee[1]?.invitee).not.toBeNull(); expect(usersWithInvitee).toContainEqual({ id: 3, name: 'Alex', invitedBy: 1, invitee: { id: 1, verified: false, invitedBy: null }, }); expect(usersWithInvitee).toContainEqual({ id: 4, name: 'John', invitedBy: 2, invitee: { id: 2, verified: false, invitedBy: null }, }); }); /* Two first-level relations users+users and users+posts */ test('Get user with invitee and posts', async (t) => { const { vpgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]); const response = await db.query.usersTable.findMany({ with: { invitee: true, posts: true, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; ownerId: number | null; content: string; createdAt: Date }[]; invitee: { id: number; name: string; verified: boolean; invitedBy: number | null; } | null; }[] >(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).eq(4); expect(response[0]?.invitee).toBeNull(); expect(response[1]?.invitee).toBeNull(); expect(response[2]?.invitee).not.toBeNull(); expect(response[3]?.invitee).not.toBeNull(); expect(response[0]?.posts.length).eq(1); expect(response[1]?.posts.length).eq(1); expect(response[2]?.posts.length).eq(1); expect(response).toContainEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, invitee: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: response[0]?.posts[0]?.createdAt }], }); expect(response).toContainEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: null, invitee: null, posts: [{ id: 2, ownerId: 2, content: 'Post2', createdAt: response[1]?.posts[0]?.createdAt }], }); expect(response).toContainEqual({ id: 3, name: 'Alex', verified: false, invitedBy: 1, invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, posts: [{ id: 3, ownerId: 3, content: 'Post3', createdAt: response[2]?.posts[0]?.createdAt }], }); expect(response).toContainEqual({ id: 4, name: 'John', verified: false, invitedBy: 2, invitee: { id: 2, name: 'Andrew', verified: false, invitedBy: null }, posts: [], }); }); test('Get user with invitee and posts + limit posts and users', async (t) => { const { vpgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const response = await db.query.usersTable.findMany({ limit: 3, with: { invitee: true, posts: { limit: 1, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; ownerId: number | null; content: string; createdAt: Date }[]; invitee: { id: number; name: string; verified: boolean; invitedBy: number | null; } | null; }[] >(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).eq(3); expect(response[0]?.invitee).toBeNull(); expect(response[1]?.invitee).toBeNull(); expect(response[2]?.invitee).not.toBeNull(); expect(response[0]?.posts.length).eq(1); expect(response[1]?.posts.length).eq(1); expect(response[2]?.posts.length).eq(1); expect(response).toContainEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, invitee: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: response[0]?.posts[0]?.createdAt }], }); expect(response).toContainEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: null, invitee: null, posts: [{ id: 3, ownerId: 2, content: 'Post2', createdAt: response[1]?.posts[0]?.createdAt }], }); expect(response).toContainEqual({ id: 3, name: 'Alex', verified: false, invitedBy: 1, invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, posts: [{ id: 5, ownerId: 3, content: 'Post3', createdAt: response[2]?.posts[0]?.createdAt }], }); }); test('Get user with invitee and posts + limits + custom fields in each', async (t) => { const { vpgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const response = await db.query.usersTable.findMany({ limit: 3, extras: (users, { sql }) => ({ lower: sql`lower(${users.name})`.as('lower_name') }), with: { invitee: { extras: (users, { sql }) => ({ lower: sql`lower(${users.name})`.as('lower_invitee_name') }), }, posts: { limit: 1, extras: (posts, { sql }) => ({ lower: sql`lower(${posts.content})`.as('lower_content') }), }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: boolean; lower: string; invitedBy: number | null; posts: { id: number; lower: string; ownerId: number | null; content: string; createdAt: Date }[]; invitee: { id: number; name: string; lower: string; verified: boolean; invitedBy: number | null; } | null; }[] >(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).eq(3); expect(response[0]?.invitee).toBeNull(); expect(response[1]?.invitee).toBeNull(); expect(response[2]?.invitee).not.toBeNull(); expect(response[0]?.posts.length).eq(1); expect(response[1]?.posts.length).eq(1); expect(response[2]?.posts.length).eq(1); expect(response).toContainEqual({ id: 1, name: 'Dan', lower: 'dan', verified: false, invitedBy: null, invitee: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', lower: 'post1', createdAt: response[0]?.posts[0]?.createdAt }], }); expect(response).toContainEqual({ id: 2, name: 'Andrew', lower: 'andrew', verified: false, invitedBy: null, invitee: null, posts: [{ id: 3, ownerId: 2, content: 'Post2', lower: 'post2', createdAt: response[1]?.posts[0]?.createdAt }], }); expect(response).toContainEqual({ id: 3, name: 'Alex', lower: 'alex', verified: false, invitedBy: 1, invitee: { id: 1, name: 'Dan', lower: 'dan', verified: false, invitedBy: null }, posts: [{ id: 5, ownerId: 3, content: 'Post3', lower: 'post3', createdAt: response[2]?.posts[0]?.createdAt }], }); }); test('Get user with invitee and posts + custom fields in each', async (t) => { const { vpgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const response = await db.query.usersTable.findMany({ extras: (users, { sql }) => ({ lower: sql`lower(${users.name})`.as('lower_name') }), with: { invitee: { extras: (users, { sql }) => ({ lower: sql`lower(${users.name})`.as('lower_name') }), }, posts: { extras: (posts, { sql }) => ({ lower: sql`lower(${posts.content})`.as('lower_name') }), }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: boolean; lower: string; invitedBy: number | null; posts: { id: number; lower: string; ownerId: number | null; content: string; createdAt: Date }[]; invitee: { id: number; name: string; lower: string; verified: boolean; invitedBy: number | null; } | null; }[] >(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).eq(4); expect(response[0]?.invitee).toBeNull(); expect(response[1]?.invitee).toBeNull(); expect(response[2]?.invitee).not.toBeNull(); expect(response[3]?.invitee).not.toBeNull(); expect(response[0]?.posts.length).eq(2); expect(response[1]?.posts.length).eq(2); expect(response[2]?.posts.length).eq(2); expect(response[3]?.posts.length).eq(0); expect(response).toContainEqual({ id: 1, name: 'Dan', lower: 'dan', verified: false, invitedBy: null, invitee: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', lower: 'post1', createdAt: response[0]?.posts[0]?.createdAt }, { id: 2, ownerId: 1, content: 'Post1.1', lower: 'post1.1', createdAt: response[0]?.posts[1]?.createdAt, }], }); expect(response).toContainEqual({ id: 2, name: 'Andrew', lower: 'andrew', verified: false, invitedBy: null, invitee: null, posts: [{ id: 3, ownerId: 2, content: 'Post2', lower: 'post2', createdAt: response[1]?.posts[0]?.createdAt }, { id: 4, ownerId: 2, content: 'Post2.1', lower: 'post2.1', createdAt: response[1]?.posts[1]?.createdAt, }], }); expect(response).toContainEqual({ id: 3, name: 'Alex', lower: 'alex', verified: false, invitedBy: 1, invitee: { id: 1, name: 'Dan', lower: 'dan', verified: false, invitedBy: null }, posts: [{ id: 5, ownerId: 3, content: 'Post3', lower: 'post3', createdAt: response[2]?.posts[0]?.createdAt }, { id: 6, ownerId: 3, content: 'Post3.1', lower: 'post3.1', createdAt: response[2]?.posts[1]?.createdAt, }], }); expect(response).toContainEqual({ id: 4, name: 'John', lower: 'john', verified: false, invitedBy: 2, invitee: { id: 2, name: 'Andrew', lower: 'andrew', verified: false, invitedBy: null }, posts: [], }); }); test('Get user with invitee and posts + orderBy', async (t) => { const { vpgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, ]); const response = await db.query.usersTable.findMany({ orderBy: (users, { desc }) => [desc(users.id)], with: { invitee: true, posts: { orderBy: (posts, { desc }) => [desc(posts.id)], }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; ownerId: number | null; content: string; createdAt: Date }[]; invitee: { id: number; name: string; verified: boolean; invitedBy: number | null; } | null; }[] >(); expect(response.length).eq(4); expect(response[3]?.invitee).toBeNull(); expect(response[2]?.invitee).toBeNull(); expect(response[1]?.invitee).not.toBeNull(); expect(response[0]?.invitee).not.toBeNull(); expect(response[0]?.posts.length).eq(0); expect(response[1]?.posts.length).eq(1); expect(response[2]?.posts.length).eq(2); expect(response[3]?.posts.length).eq(2); expect(response[3]).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, invitee: null, posts: [{ id: 2, ownerId: 1, content: 'Post1.1', createdAt: response[3]?.posts[0]?.createdAt }, { id: 1, ownerId: 1, content: 'Post1', createdAt: response[3]?.posts[1]?.createdAt, }], }); expect(response[2]).toEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: null, invitee: null, posts: [{ id: 4, ownerId: 2, content: 'Post2.1', createdAt: response[2]?.posts[0]?.createdAt }, { id: 3, ownerId: 2, content: 'Post2', createdAt: response[2]?.posts[1]?.createdAt, }], }); expect(response[1]).toEqual({ id: 3, name: 'Alex', verified: false, invitedBy: 1, invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, posts: [{ id: 5, ownerId: 3, content: 'Post3', createdAt: response[3]?.posts[1]?.createdAt, }], }); expect(response[0]).toEqual({ id: 4, name: 'John', verified: false, invitedBy: 2, invitee: { id: 2, name: 'Andrew', verified: false, invitedBy: null }, posts: [], }); }); test('Get user with invitee and posts + where', async (t) => { const { vpgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 3, content: 'Post3' }, ]); const response = await db.query.usersTable.findMany({ where: (users, { eq, or }) => (or(eq(users.id, 2), eq(users.id, 3))), with: { invitee: true, posts: { where: (posts, { eq }) => (eq(posts.ownerId, 2)), }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; ownerId: number | null; content: string; createdAt: Date }[]; invitee: { id: number; name: string; verified: boolean; invitedBy: number | null; } | null; }[] >(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).eq(2); expect(response[0]?.invitee).toBeNull(); expect(response[1]?.invitee).not.toBeNull(); expect(response[0]?.posts.length).eq(1); expect(response[1]?.posts.length).eq(0); expect(response).toContainEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: null, invitee: null, posts: [{ id: 2, ownerId: 2, content: 'Post2', createdAt: response[0]?.posts[0]?.createdAt }], }); expect(response).toContainEqual({ id: 3, name: 'Alex', verified: false, invitedBy: 1, invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, posts: [], }); }); test('Get user with invitee and posts + limit posts and users + where', async (t) => { const { vpgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, { ownerId: 3, content: 'Post3.1' }, ]); const response = await db.query.usersTable.findMany({ where: (users, { eq, or }) => (or(eq(users.id, 3), eq(users.id, 4))), limit: 1, with: { invitee: true, posts: { where: (posts, { eq }) => (eq(posts.ownerId, 3)), limit: 1, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; ownerId: number | null; content: string; createdAt: Date }[]; invitee: { id: number; name: string; verified: boolean; invitedBy: number | null; } | null; }[] >(); expect(response.length).eq(1); expect(response[0]?.invitee).not.toBeNull(); expect(response[0]?.posts.length).eq(1); expect(response).toContainEqual({ id: 3, name: 'Alex', verified: false, invitedBy: 1, invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, posts: [{ id: 5, ownerId: 3, content: 'Post3', createdAt: response[0]?.posts[0]?.createdAt }], }); }); test('Get user with invitee and posts + orderBy + where + custom', async (t) => { const { vpgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, ]); const response = await db.query.usersTable.findMany({ orderBy: [desc(usersTable.id)], where: or(eq(usersTable.id, 3), eq(usersTable.id, 4)), extras: { lower: sql`lower(${usersTable.name})`.as('lower_name'), }, with: { invitee: true, posts: { where: eq(postsTable.ownerId, 3), orderBy: [desc(postsTable.id)], extras: { lower: sql`lower(${postsTable.content})`.as('lower_name'), }, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; lower: string; posts: { id: number; lower: string; ownerId: number | null; content: string; createdAt: Date }[]; invitee: { id: number; name: string; verified: boolean; invitedBy: number | null; } | null; }[] >(); expect(response.length).eq(2); expect(response[1]?.invitee).not.toBeNull(); expect(response[0]?.invitee).not.toBeNull(); expect(response[0]?.posts.length).eq(0); expect(response[1]?.posts.length).eq(1); expect(response[1]).toEqual({ id: 3, name: 'Alex', lower: 'alex', verified: false, invitedBy: 1, invitee: { id: 1, name: 'Dan', verified: false, invitedBy: null }, posts: [{ id: 5, ownerId: 3, content: 'Post3', lower: 'post3', createdAt: response[1]?.posts[0]?.createdAt, }], }); expect(response[0]).toEqual({ id: 4, name: 'John', lower: 'john', verified: false, invitedBy: 2, invitee: { id: 2, name: 'Andrew', verified: false, invitedBy: null }, posts: [], }); }); test('Get user with invitee and posts + orderBy + where + partial + custom', async (t) => { const { vpgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex', invitedBy: 1 }, { id: 4, name: 'John', invitedBy: 2 }, ]); await db.insert(postsTable).values([ { ownerId: 1, content: 'Post1' }, { ownerId: 1, content: 'Post1.1' }, { ownerId: 2, content: 'Post2' }, { ownerId: 2, content: 'Post2.1' }, { ownerId: 3, content: 'Post3' }, ]); const response = await db.query.usersTable.findMany({ orderBy: [desc(usersTable.id)], where: or(eq(usersTable.id, 3), eq(usersTable.id, 4)), extras: { lower: sql`lower(${usersTable.name})`.as('lower_name'), }, columns: { id: true, name: true, }, with: { invitee: { columns: { id: true, name: true, }, extras: { lower: sql`lower(${usersTable.name})`.as('lower_name'), }, }, posts: { columns: { id: true, content: true, }, where: eq(postsTable.ownerId, 3), orderBy: [desc(postsTable.id)], extras: { lower: sql`lower(${postsTable.content})`.as('lower_name'), }, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; lower: string; posts: { id: number; lower: string; content: string }[]; invitee: { id: number; name: string; lower: string; } | null; }[] >(); expect(response.length).eq(2); expect(response[1]?.invitee).not.toBeNull(); expect(response[0]?.invitee).not.toBeNull(); expect(response[0]?.posts.length).eq(0); expect(response[1]?.posts.length).eq(1); expect(response[1]).toEqual({ id: 3, name: 'Alex', lower: 'alex', invitee: { id: 1, name: 'Dan', lower: 'dan' }, posts: [{ id: 5, content: 'Post3', lower: 'post3', }], }); expect(response[0]).toEqual({ id: 4, name: 'John', lower: 'john', invitee: { id: 2, name: 'Andrew', lower: 'andrew' }, posts: [], }); }); /* One two-level relation users+posts+comments */ test('Get user with posts and posts with comments', async (t) => { const { vpgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { id: 1, ownerId: 1, content: 'Post1' }, { id: 2, ownerId: 2, content: 'Post2' }, { id: 3, ownerId: 3, content: 'Post3' }, ]); await db.insert(commentsTable).values([ { postId: 1, content: 'Comment1', creator: 2 }, { postId: 2, content: 'Comment2', creator: 2 }, { postId: 3, content: 'Comment3', creator: 3 }, ]); const response = await db.query.usersTable.findMany({ with: { posts: { with: { comments: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; comments: { id: number; content: string; createdAt: Date; creator: number | null; postId: number | null; }[]; }[]; }[] >(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).eq(3); expect(response[0]?.posts.length).eq(1); expect(response[1]?.posts.length).eq(1); expect(response[2]?.posts.length).eq(1); expect(response[0]?.posts[0]?.comments.length).eq(1); expect(response[1]?.posts[0]?.comments.length).eq(1); expect(response[2]?.posts[0]?.comments.length).eq(1); expect(response[0]).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: response[0]?.posts[0]?.createdAt, comments: [ { id: 1, content: 'Comment1', creator: 2, postId: 1, createdAt: response[0]?.posts[0]?.comments[0]?.createdAt, }, ], }], }); expect(response[1]).toEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: null, posts: [{ id: 2, ownerId: 2, content: 'Post2', createdAt: response[1]?.posts[0]?.createdAt, comments: [ { id: 2, content: 'Comment2', creator: 2, postId: 2, createdAt: response[1]?.posts[0]?.comments[0]?.createdAt, }, ], }], }); // expect(response[2]).toEqual({ // id: 3, // name: 'Alex', // verified: false, // invitedBy: null, // posts: [{ // id: 3, // ownerId: 3, // content: 'Post3', // createdAt: response[2]?.posts[0]?.createdAt, // comments: [ // { // id: , // content: 'Comment3', // creator: 3, // postId: 3, // createdAt: response[2]?.posts[0]?.comments[0]?.createdAt, // }, // ], // }], // }); }); // Get user with limit posts and limit comments // Get user with custom field + post + comment with custom field // Get user with limit + posts orderBy + comment orderBy // Get user with where + posts where + comment where // Get user with where + posts partial where + comment where // Get user with where + posts partial where + comment partial(false) where // Get user with where partial(false) + posts partial where partial(false) + comment partial(false+true) where // Get user with where + posts partial where + comment where. Didn't select field from where in posts // Get user with where + posts partial where + comment where. Didn't select field from where for all // Get with limit+offset in each /* One two-level + One first-level relation users+posts+comments and users+users */ /* One three-level relation users+posts+comments+comment_owner */ test('Get user with posts and posts with comments and comments with owner', async (t) => { const { vpgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(postsTable).values([ { id: 1, ownerId: 1, content: 'Post1' }, { id: 2, ownerId: 2, content: 'Post2' }, { id: 3, ownerId: 3, content: 'Post3' }, ]); await db.insert(commentsTable).values([ { postId: 1, content: 'Comment1', creator: 2 }, { postId: 2, content: 'Comment2', creator: 2 }, { postId: 3, content: 'Comment3', creator: 3 }, ]); const response = await db.query.usersTable.findMany({ with: { posts: { with: { comments: { with: { author: true, }, }, }, }, }, }); expectTypeOf(response).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; posts: { id: number; content: string; ownerId: number | null; createdAt: Date; comments: { id: number; content: string; createdAt: Date; creator: number | null; postId: number | null; author: { id: number; name: string; verified: boolean; invitedBy: number | null; } | null; }[]; }[]; }[]>(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).eq(3); expect(response[0]?.posts.length).eq(1); expect(response[1]?.posts.length).eq(1); expect(response[2]?.posts.length).eq(1); expect(response[0]?.posts[0]?.comments.length).eq(1); expect(response[1]?.posts[0]?.comments.length).eq(1); expect(response[2]?.posts[0]?.comments.length).eq(1); expect(response[0]).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, posts: [{ id: 1, ownerId: 1, content: 'Post1', createdAt: response[0]?.posts[0]?.createdAt, comments: [ { id: 1, content: 'Comment1', creator: 2, author: { id: 2, name: 'Andrew', verified: false, invitedBy: null, }, postId: 1, createdAt: response[0]?.posts[0]?.comments[0]?.createdAt, }, ], }], }); expect(response[1]).toEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: null, posts: [{ id: 2, ownerId: 2, content: 'Post2', createdAt: response[1]?.posts[0]?.createdAt, comments: [ { id: 2, content: 'Comment2', creator: 2, author: { id: 2, name: 'Andrew', verified: false, invitedBy: null, }, postId: 2, createdAt: response[1]?.posts[0]?.comments[0]?.createdAt, }, ], }], }); }); /* One three-level relation + 1 first-level relatioon 1. users+posts+comments+comment_owner 2. users+users */ /* One four-level relation users+posts+comments+coment_likes */ /* [Find Many] Many-to-many cases Users+users_to_groups+groups */ test('[Find Many] Get users with groups', async (t) => { const { vpgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.usersTable.findMany({ with: { usersToGroups: { columns: {}, with: { group: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; usersToGroups: { group: { id: number; name: string; description: string | null; }; }[]; }[]>(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).toEqual(3); expect(response[0]?.usersToGroups.length).toEqual(1); expect(response[1]?.usersToGroups.length).toEqual(1); expect(response[2]?.usersToGroups.length).toEqual(2); expect(response).toContainEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, usersToGroups: [{ group: { id: 1, name: 'Group1', description: null, }, }], }); expect(response).toContainEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: null, usersToGroups: [{ group: { id: 2, name: 'Group2', description: null, }, }], }); expect(response).toContainEqual({ id: 3, name: 'Alex', verified: false, invitedBy: null, usersToGroups: [{ group: { id: 3, name: 'Group3', description: null, }, }, { group: { id: 2, name: 'Group2', description: null, }, }], }); }); test('[Find Many] Get groups with users', async (t) => { const { vpgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.groupsTable.findMany({ with: { usersToGroups: { columns: {}, with: { user: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf<{ id: number; name: string; description: string | null; usersToGroups: { user: { id: number; name: string; verified: boolean; invitedBy: number | null; }; }[]; }[]>(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).toEqual(3); expect(response[0]?.usersToGroups.length).toEqual(1); expect(response[1]?.usersToGroups.length).toEqual(2); expect(response[2]?.usersToGroups.length).toEqual(1); expect(response).toContainEqual({ id: 1, name: 'Group1', description: null, usersToGroups: [{ user: { id: 1, name: 'Dan', verified: false, invitedBy: null, }, }], }); expect(response).toContainEqual({ id: 2, name: 'Group2', description: null, usersToGroups: [{ user: { id: 2, name: 'Andrew', verified: false, invitedBy: null, }, }, { user: { id: 3, name: 'Alex', verified: false, invitedBy: null, }, }], }); expect(response).toContainEqual({ id: 3, name: 'Group3', description: null, usersToGroups: [{ user: { id: 3, name: 'Alex', verified: false, invitedBy: null, }, }], }); }); test('[Find Many] Get users with groups + limit', async (t) => { const { vpgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 2, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.usersTable.findMany({ limit: 2, with: { usersToGroups: { limit: 1, columns: {}, with: { group: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; usersToGroups: { group: { id: number; name: string; description: string | null; }; }[]; }[]>(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).toEqual(2); expect(response[0]?.usersToGroups.length).toEqual(1); expect(response[1]?.usersToGroups.length).toEqual(1); expect(response).toContainEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, usersToGroups: [{ group: { id: 1, name: 'Group1', description: null, }, }], }); expect(response).toContainEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: null, usersToGroups: [{ group: { id: 2, name: 'Group2', description: null, }, }], }); }); test('[Find Many] Get groups with users + limit', async (t) => { const { vpgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.groupsTable.findMany({ limit: 2, with: { usersToGroups: { limit: 1, columns: {}, with: { user: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf<{ id: number; name: string; description: string | null; usersToGroups: { user: { id: number; name: string; verified: boolean; invitedBy: number | null; }; }[]; }[]>(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).toEqual(2); expect(response[0]?.usersToGroups.length).toEqual(1); expect(response[1]?.usersToGroups.length).toEqual(1); expect(response).toContainEqual({ id: 1, name: 'Group1', description: null, usersToGroups: [{ user: { id: 1, name: 'Dan', verified: false, invitedBy: null, }, }], }); expect(response).toContainEqual({ id: 2, name: 'Group2', description: null, usersToGroups: [{ user: { id: 2, name: 'Andrew', verified: false, invitedBy: null, }, }], }); }); test('[Find Many] Get users with groups + limit + where', async (t) => { const { vpgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 2, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.usersTable.findMany({ limit: 1, where: (_, { eq, or }) => or(eq(usersTable.id, 1), eq(usersTable.id, 2)), with: { usersToGroups: { where: eq(usersToGroupsTable.groupId, 1), columns: {}, with: { group: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; usersToGroups: { group: { id: number; name: string; description: string | null; }; }[]; }[]>(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).toEqual(1); expect(response[0]?.usersToGroups.length).toEqual(1); expect(response).toContainEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, usersToGroups: [{ group: { id: 1, name: 'Group1', description: null, }, }], }); }); test('[Find Many] Get groups with users + limit + where', async (t) => { const { vpgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.groupsTable.findMany({ limit: 1, where: gt(groupsTable.id, 1), with: { usersToGroups: { where: eq(usersToGroupsTable.userId, 2), limit: 1, columns: {}, with: { user: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf<{ id: number; name: string; description: string | null; usersToGroups: { user: { id: number; name: string; verified: boolean; invitedBy: number | null; }; }[]; }[]>(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).toEqual(1); expect(response[0]?.usersToGroups.length).toEqual(1); expect(response).toContainEqual({ id: 2, name: 'Group2', description: null, usersToGroups: [{ user: { id: 2, name: 'Andrew', verified: false, invitedBy: null, }, }], }); }); test('[Find Many] Get users with groups + where', async (t) => { const { vpgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 2, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.usersTable.findMany({ where: (_, { eq, or }) => or(eq(usersTable.id, 1), eq(usersTable.id, 2)), with: { usersToGroups: { where: eq(usersToGroupsTable.groupId, 2), columns: {}, with: { group: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; usersToGroups: { group: { id: number; name: string; description: string | null; }; }[]; }[]>(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).toEqual(2); expect(response[0]?.usersToGroups.length).toEqual(0); expect(response[1]?.usersToGroups.length).toEqual(1); expect(response).toContainEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, usersToGroups: [], }); expect(response).toContainEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: null, usersToGroups: [{ group: { id: 2, name: 'Group2', description: null, }, }], }); }); test('[Find Many] Get groups with users + where', async (t) => { const { vpgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.groupsTable.findMany({ where: gt(groupsTable.id, 1), with: { usersToGroups: { where: eq(usersToGroupsTable.userId, 2), columns: {}, with: { user: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf<{ id: number; name: string; description: string | null; usersToGroups: { user: { id: number; name: string; verified: boolean; invitedBy: number | null; }; }[]; }[]>(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).toEqual(2); expect(response[0]?.usersToGroups.length).toEqual(1); expect(response[1]?.usersToGroups.length).toEqual(0); expect(response).toContainEqual({ id: 2, name: 'Group2', description: null, usersToGroups: [{ user: { id: 2, name: 'Andrew', verified: false, invitedBy: null, }, }], }); expect(response).toContainEqual({ id: 3, name: 'Group3', description: null, usersToGroups: [], }); }); test('[Find Many] Get users with groups + orderBy', async (t) => { const { vpgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.usersTable.findMany({ orderBy: (users, { desc }) => [desc(users.id)], with: { usersToGroups: { orderBy: [desc(usersToGroupsTable.groupId)], columns: {}, with: { group: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; usersToGroups: { group: { id: number; name: string; description: string | null; }; }[]; }[]>(); expect(response.length).toEqual(3); expect(response[0]?.usersToGroups.length).toEqual(2); expect(response[1]?.usersToGroups.length).toEqual(1); expect(response[2]?.usersToGroups.length).toEqual(1); expect(response[2]).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, usersToGroups: [{ group: { id: 1, name: 'Group1', description: null, }, }], }); expect(response[1]).toEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: null, usersToGroups: [{ group: { id: 2, name: 'Group2', description: null, }, }], }); expect(response[0]).toEqual({ id: 3, name: 'Alex', verified: false, invitedBy: null, usersToGroups: [{ group: { id: 3, name: 'Group3', description: null, }, }, { group: { id: 2, name: 'Group2', description: null, }, }], }); }); test('[Find Many] Get groups with users + orderBy', async (t) => { const { vpgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.groupsTable.findMany({ orderBy: [desc(groupsTable.id)], with: { usersToGroups: { orderBy: (utg, { desc }) => [desc(utg.userId)], columns: {}, with: { user: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf<{ id: number; name: string; description: string | null; usersToGroups: { user: { id: number; name: string; verified: boolean; invitedBy: number | null; }; }[]; }[]>(); expect(response.length).toEqual(3); expect(response[0]?.usersToGroups.length).toEqual(1); expect(response[1]?.usersToGroups.length).toEqual(2); expect(response[2]?.usersToGroups.length).toEqual(1); expect(response[2]).toEqual({ id: 1, name: 'Group1', description: null, usersToGroups: [{ user: { id: 1, name: 'Dan', verified: false, invitedBy: null, }, }], }); expect(response[1]).toEqual({ id: 2, name: 'Group2', description: null, usersToGroups: [{ user: { id: 3, name: 'Alex', verified: false, invitedBy: null, }, }, { user: { id: 2, name: 'Andrew', verified: false, invitedBy: null, }, }], }); expect(response[0]).toEqual({ id: 3, name: 'Group3', description: null, usersToGroups: [{ user: { id: 3, name: 'Alex', verified: false, invitedBy: null, }, }], }); }); test('[Find Many] Get users with groups + orderBy + limit', async (t) => { const { vpgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.usersTable.findMany({ orderBy: (users, { desc }) => [desc(users.id)], limit: 2, with: { usersToGroups: { limit: 1, orderBy: [desc(usersToGroupsTable.groupId)], columns: {}, with: { group: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf<{ id: number; name: string; verified: boolean; invitedBy: number | null; usersToGroups: { group: { id: number; name: string; description: string | null; }; }[]; }[]>(); expect(response.length).toEqual(2); expect(response[0]?.usersToGroups.length).toEqual(1); expect(response[1]?.usersToGroups.length).toEqual(1); expect(response[1]).toEqual({ id: 2, name: 'Andrew', verified: false, invitedBy: null, usersToGroups: [{ group: { id: 2, name: 'Group2', description: null, }, }], }); expect(response[0]).toEqual({ id: 3, name: 'Alex', verified: false, invitedBy: null, usersToGroups: [{ group: { id: 3, name: 'Group3', description: null, }, }], }); }); /* [Find One] Many-to-many cases Users+users_to_groups+groups */ test('[Find One] Get users with groups', async (t) => { const { vpgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.usersTable.findFirst({ with: { usersToGroups: { columns: {}, with: { group: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; usersToGroups: { group: { id: number; name: string; description: string | null; }; }[]; } | undefined >(); expect(response?.usersToGroups.length).toEqual(1); expect(response).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, usersToGroups: [{ group: { id: 1, name: 'Group1', description: null, }, }], }); }); test('[Find One] Get groups with users', async (t) => { const { vpgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.groupsTable.findFirst({ with: { usersToGroups: { columns: {}, with: { user: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; description: string | null; usersToGroups: { user: { id: number; name: string; verified: boolean; invitedBy: number | null; }; }[]; } | undefined >(); expect(response?.usersToGroups.length).toEqual(1); expect(response).toEqual({ id: 1, name: 'Group1', description: null, usersToGroups: [{ user: { id: 1, name: 'Dan', verified: false, invitedBy: null, }, }], }); }); test('[Find One] Get users with groups + limit', async (t) => { const { vpgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 2, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.usersTable.findFirst({ with: { usersToGroups: { limit: 1, columns: {}, with: { group: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; usersToGroups: { group: { id: number; name: string; description: string | null; }; }[]; } | undefined >(); expect(response?.usersToGroups.length).toEqual(1); expect(response).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, usersToGroups: [{ group: { id: 1, name: 'Group1', description: null, }, }], }); }); test('[Find One] Get groups with users + limit', async (t) => { const { vpgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.groupsTable.findFirst({ with: { usersToGroups: { limit: 1, columns: {}, with: { user: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; description: string | null; usersToGroups: { user: { id: number; name: string; verified: boolean; invitedBy: number | null; }; }[]; } | undefined >(); expect(response?.usersToGroups.length).toEqual(1); expect(response).toEqual({ id: 1, name: 'Group1', description: null, usersToGroups: [{ user: { id: 1, name: 'Dan', verified: false, invitedBy: null, }, }], }); }); test('[Find One] Get users with groups + limit + where', async (t) => { const { vpgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 2, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.usersTable.findFirst({ where: (_, { eq, or }) => or(eq(usersTable.id, 1), eq(usersTable.id, 2)), with: { usersToGroups: { where: eq(usersToGroupsTable.groupId, 1), columns: {}, with: { group: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; usersToGroups: { group: { id: number; name: string; description: string | null; }; }[]; } | undefined >(); expect(response?.usersToGroups.length).toEqual(1); expect(response).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, usersToGroups: [{ group: { id: 1, name: 'Group1', description: null, }, }], }); }); test('[Find One] Get groups with users + limit + where', async (t) => { const { vpgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.groupsTable.findFirst({ where: gt(groupsTable.id, 1), with: { usersToGroups: { where: eq(usersToGroupsTable.userId, 2), limit: 1, columns: {}, with: { user: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; description: string | null; usersToGroups: { user: { id: number; name: string; verified: boolean; invitedBy: number | null; }; }[]; } | undefined >(); expect(response?.usersToGroups.length).toEqual(1); expect(response).toEqual({ id: 2, name: 'Group2', description: null, usersToGroups: [{ user: { id: 2, name: 'Andrew', verified: false, invitedBy: null, }, }], }); }); test('[Find One] Get users with groups + where', async (t) => { const { vpgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 2, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.usersTable.findFirst({ where: (_, { eq, or }) => or(eq(usersTable.id, 1), eq(usersTable.id, 2)), with: { usersToGroups: { where: eq(usersToGroupsTable.groupId, 2), columns: {}, with: { group: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; usersToGroups: { group: { id: number; name: string; description: string | null; }; }[]; } | undefined >(); expect(response?.usersToGroups.length).toEqual(0); expect(response).toEqual({ id: 1, name: 'Dan', verified: false, invitedBy: null, usersToGroups: [], }); }); test('[Find One] Get groups with users + where', async (t) => { const { vpgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.groupsTable.findFirst({ where: gt(groupsTable.id, 1), with: { usersToGroups: { where: eq(usersToGroupsTable.userId, 2), columns: {}, with: { user: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; description: string | null; usersToGroups: { user: { id: number; name: string; verified: boolean; invitedBy: number | null; }; }[]; } | undefined >(); expect(response?.usersToGroups.length).toEqual(1); expect(response).toEqual({ id: 2, name: 'Group2', description: null, usersToGroups: [{ user: { id: 2, name: 'Andrew', verified: false, invitedBy: null, }, }], }); }); test('[Find One] Get users with groups + orderBy', async (t) => { const { vpgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.usersTable.findFirst({ orderBy: (users, { desc }) => [desc(users.id)], with: { usersToGroups: { orderBy: [desc(usersToGroupsTable.groupId)], columns: {}, with: { group: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; usersToGroups: { group: { id: number; name: string; description: string | null; }; }[]; } | undefined >(); expect(response?.usersToGroups.length).toEqual(2); expect(response).toEqual({ id: 3, name: 'Alex', verified: false, invitedBy: null, usersToGroups: [{ group: { id: 3, name: 'Group3', description: null, }, }, { group: { id: 2, name: 'Group2', description: null, }, }], }); }); test('[Find One] Get groups with users + orderBy', async (t) => { const { vpgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.groupsTable.findFirst({ orderBy: [desc(groupsTable.id)], with: { usersToGroups: { orderBy: (utg, { desc }) => [desc(utg.userId)], columns: {}, with: { user: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; description: string | null; usersToGroups: { user: { id: number; name: string; verified: boolean; invitedBy: number | null; }; }[]; } | undefined >(); expect(response?.usersToGroups.length).toEqual(1); expect(response).toEqual({ id: 3, name: 'Group3', description: null, usersToGroups: [{ user: { id: 3, name: 'Alex', verified: false, invitedBy: null, }, }], }); }); test('[Find One] Get users with groups + orderBy + limit', async (t) => { const { vpgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.usersTable.findFirst({ orderBy: (users, { desc }) => [desc(users.id)], with: { usersToGroups: { limit: 1, orderBy: [desc(usersToGroupsTable.groupId)], columns: {}, with: { group: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; usersToGroups: { group: { id: number; name: string; description: string | null; }; }[]; } | undefined >(); expect(response?.usersToGroups.length).toEqual(1); expect(response).toEqual({ id: 3, name: 'Alex', verified: false, invitedBy: null, usersToGroups: [{ group: { id: 3, name: 'Group3', description: null, }, }], }); }); test('Get groups with users + orderBy + limit', async (t) => { const { vpgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.groupsTable.findMany({ orderBy: [desc(groupsTable.id)], limit: 2, with: { usersToGroups: { limit: 1, orderBy: (utg, { desc }) => [desc(utg.userId)], columns: {}, with: { user: true, }, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; description: string | null; usersToGroups: { user: { id: number; name: string; verified: boolean; invitedBy: number | null; }; }[]; }[] >(); expect(response.length).toEqual(2); expect(response[0]?.usersToGroups.length).toEqual(1); expect(response[1]?.usersToGroups.length).toEqual(1); expect(response[1]).toEqual({ id: 2, name: 'Group2', description: null, usersToGroups: [{ user: { id: 3, name: 'Alex', verified: false, invitedBy: null, }, }], }); expect(response[0]).toEqual({ id: 3, name: 'Group3', description: null, usersToGroups: [{ user: { id: 3, name: 'Alex', verified: false, invitedBy: null, }, }], }); }); test('Get users with groups + custom', async (t) => { const { vpgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.usersTable.findMany({ extras: { lower: sql`lower(${usersTable.name})`.as('lower_name'), }, with: { usersToGroups: { columns: {}, with: { group: { extras: { lower: sql`lower(${groupsTable.name})`.as('lower_name'), }, }, }, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; verified: boolean; invitedBy: number | null; lower: string; usersToGroups: { group: { id: number; name: string; description: string | null; lower: string; }; }[]; }[] >(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).toEqual(3); expect(response[0]?.usersToGroups.length).toEqual(1); expect(response[1]?.usersToGroups.length).toEqual(1); expect(response[2]?.usersToGroups.length).toEqual(2); expect(response).toContainEqual({ id: 1, name: 'Dan', lower: 'dan', verified: false, invitedBy: null, usersToGroups: [{ group: { id: 1, name: 'Group1', lower: 'group1', description: null, }, }], }); expect(response).toContainEqual({ id: 2, name: 'Andrew', lower: 'andrew', verified: false, invitedBy: null, usersToGroups: [{ group: { id: 2, name: 'Group2', lower: 'group2', description: null, }, }], }); expect(response).toContainEqual({ id: 3, name: 'Alex', lower: 'alex', verified: false, invitedBy: null, usersToGroups: [{ group: { id: 3, name: 'Group3', lower: 'group3', description: null, }, }, { group: { id: 2, name: 'Group2', lower: 'group2', description: null, }, }], }); }); test('Get groups with users + custom', async (t) => { const { vpgDb: db } = t; await db.insert(usersTable).values([ { id: 1, name: 'Dan' }, { id: 2, name: 'Andrew' }, { id: 3, name: 'Alex' }, ]); await db.insert(groupsTable).values([ { id: 1, name: 'Group1' }, { id: 2, name: 'Group2' }, { id: 3, name: 'Group3' }, ]); await db.insert(usersToGroupsTable).values([ { userId: 1, groupId: 1 }, { userId: 2, groupId: 2 }, { userId: 3, groupId: 3 }, { userId: 3, groupId: 2 }, ]); const response = await db.query.groupsTable.findMany({ extras: (table, { sql }) => ({ lower: sql`lower(${table.name})`.as('lower_name'), }), with: { usersToGroups: { columns: {}, with: { user: { extras: (table, { sql }) => ({ lower: sql`lower(${table.name})`.as('lower_name'), }), }, }, }, }, }); expectTypeOf(response).toEqualTypeOf< { id: number; name: string; description: string | null; lower: string; usersToGroups: { user: { id: number; name: string; verified: boolean; invitedBy: number | null; lower: string; }; }[]; }[] >(); response.sort((a, b) => (a.id > b.id) ? 1 : -1); expect(response.length).toEqual(3); expect(response[0]?.usersToGroups.length).toEqual(1); expect(response[1]?.usersToGroups.length).toEqual(2); expect(response[2]?.usersToGroups.length).toEqual(1); expect(response).toContainEqual({ id: 1, name: 'Group1', lower: 'group1', description: null, usersToGroups: [{ user: { id: 1, name: 'Dan', lower: 'dan', verified: false, invitedBy: null, }, }], }); expect(response).toContainEqual({ id: 2, name: 'Group2', lower: 'group2', description: null, usersToGroups: [{ user: { id: 2, name: 'Andrew', lower: 'andrew', verified: false, invitedBy: null, }, }, { user: { id: 3, name: 'Alex', lower: 'alex', verified: false, invitedBy: null, }, }], }); expect(response).toContainEqual({ id: 3, name: 'Group3', lower: 'group3', description: null, usersToGroups: [{ user: { id: 3, name: 'Alex', lower: 'alex', verified: false, invitedBy: null, }, }], }); }); test('.toSQL()', () => { const query = db.query.usersTable.findFirst().toSQL(); expect(query).toHaveProperty('sql', expect.any(String)); expect(query).toHaveProperty('params', expect.any(Array)); }); // + custom + where + orderby // + custom + where + orderby + limit // + partial // + partial(false) // + partial + orderBy + where (all not selected) /* One four-level relation users+posts+comments+coment_likes + users+users_to_groups+groups */ /* Really hard case 1. users+posts+comments+coment_likes 2. users+users_to_groups+groups 3. users+users */ ================================================ FILE: integration-tests/tests/replicas/mysql.test.ts ================================================ import { sql } from 'drizzle-orm'; import { boolean, mysqlTable, serial, text, withReplicas } from 'drizzle-orm/mysql-core'; import { drizzle } from 'drizzle-orm/mysql2'; import { describe, expect, it, vi } from 'vitest'; const usersTable = mysqlTable('users', { id: serial('id' as string).primaryKey(), name: text('name').notNull(), verified: boolean('verified').notNull().default(false), }); const users = mysqlTable('users', { id: serial('id' as string).primaryKey(), }); describe('[select] read replicas mysql', () => { it('primary select', () => { const primaryDb = drizzle.mock(); const read1 = drizzle.mock(); const read2 = drizzle.mock(); const db = withReplicas(primaryDb, [read1, read2]); const spyPrimary = vi.spyOn(primaryDb, 'select'); const spyRead1 = vi.spyOn(read1, 'select'); const spyRead2 = vi.spyOn(read2, 'select'); const query = db.$primary.select().from(users); expect(spyPrimary).toHaveBeenCalledTimes(1); expect(query.toSQL().sql).toEqual('select `id` from `users`'); expect(spyRead1).toHaveBeenCalledTimes(0); expect(spyRead2).toHaveBeenCalledTimes(0); }); it('random replica select', () => { const primaryDb = drizzle.mock(); const read1 = drizzle.mock(); const read2 = drizzle.mock(); const randomMockReplica = vi.fn().mockReturnValueOnce(read1).mockReturnValueOnce(read2); const db = withReplicas(primaryDb, [read1, read2], () => { return randomMockReplica(); }); const spyPrimary = vi.spyOn(primaryDb, 'select'); const spyRead1 = vi.spyOn(read1, 'select'); const spyRead2 = vi.spyOn(read2, 'select'); const query1 = db.select({ count: sql`count(*)`.as('count') }).from(users).limit(1); expect(spyPrimary).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenCalledTimes(1); expect(spyRead2).toHaveBeenCalledTimes(0); expect(query1.toSQL().sql).toEqual('select count(*) as `count` from `users` limit ?'); const query2 = db.select().from(users); expect(spyRead1).toHaveBeenCalledTimes(1); expect(spyRead2).toHaveBeenCalledTimes(1); expect(query2.toSQL().sql).toEqual('select `id` from `users`'); }); it('single read replica select', () => { const primaryDb = drizzle.mock(); const read1 = drizzle.mock(); const db = withReplicas(primaryDb, [read1]); const spyPrimary = vi.spyOn(primaryDb, 'select'); const spyRead1 = vi.spyOn(read1, 'select'); const query1 = db.select().from(users); expect(spyPrimary).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenCalledTimes(1); expect(query1.toSQL().sql).toEqual('select `id` from `users`'); const query2 = db.select().from(users); expect(spyRead1).toHaveBeenCalledTimes(2); expect(query2.toSQL().sql).toEqual('select `id` from `users`'); }); it('single read replica select + primary select', () => { const primaryDb = drizzle.mock(); const read1 = drizzle.mock(); const db = withReplicas(primaryDb, [read1]); const spyPrimary = vi.spyOn(primaryDb, 'select'); const spyRead1 = vi.spyOn(read1, 'select'); const query1 = db.select({ id: users.id }).from(users); expect(spyPrimary).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenCalledTimes(1); expect(query1.toSQL().sql).toEqual('select `id` from `users`'); const query2 = db.$primary.select().from(users); expect(spyPrimary).toHaveBeenCalledTimes(1); expect(spyRead1).toHaveBeenCalledTimes(1); expect(query2.toSQL().sql).toEqual('select `id` from `users`'); }); it('always first read select', () => { const primaryDb = drizzle.mock(); const read1 = drizzle.mock(); const read2 = drizzle.mock(); const db = withReplicas(primaryDb, [read1, read2], (replicas) => { return replicas[0]!; }); const spyPrimary = vi.spyOn(primaryDb, 'select'); const spyRead1 = vi.spyOn(read1, 'select'); const spyRead2 = vi.spyOn(read2, 'select'); const query1 = db.select().from(users); expect(spyPrimary).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenCalledTimes(1); expect(spyRead2).toHaveBeenCalledTimes(0); expect(query1.toSQL().sql).toEqual('select `id` from `users`'); const query2 = db.select().from(users); expect(spyRead1).toHaveBeenCalledTimes(2); expect(spyRead2).toHaveBeenCalledTimes(0); expect(query2.toSQL().sql).toEqual('select `id` from `users`'); }); }); describe('[selectDistinct] read replicas mysql', () => { it('primary selectDistinct', () => { const primaryDb = drizzle.mock(); const read1 = drizzle.mock(); const read2 = drizzle.mock(); const db = withReplicas(primaryDb, [read1, read2]); const spyPrimary = vi.spyOn(primaryDb, 'selectDistinct'); const spyRead1 = vi.spyOn(read1, 'selectDistinct'); const spyRead2 = vi.spyOn(read2, 'selectDistinct'); const query = db.$primary.selectDistinct().from(users); expect(spyPrimary).toHaveBeenCalledTimes(1); expect(spyRead1).toHaveBeenCalledTimes(0); expect(spyRead2).toHaveBeenCalledTimes(0); expect(query.toSQL().sql).toEqual('select distinct `id` from `users`'); }); it('random replica selectDistinct', () => { const primaryDb = drizzle.mock(); const read1 = drizzle.mock(); const read2 = drizzle.mock(); const randomMockReplica = vi.fn().mockReturnValueOnce(read1).mockReturnValueOnce(read2); const db = withReplicas(primaryDb, [read1, read2], () => { return randomMockReplica(); }); const spyPrimary = vi.spyOn(primaryDb, 'selectDistinct'); const spyRead1 = vi.spyOn(read1, 'selectDistinct'); const spyRead2 = vi.spyOn(read2, 'selectDistinct'); const query1 = db.selectDistinct().from(users); expect(spyPrimary).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenCalledTimes(1); expect(spyRead2).toHaveBeenCalledTimes(0); expect(query1.toSQL().sql).toEqual('select distinct `id` from `users`'); const query2 = db.selectDistinct().from(users); expect(spyRead1).toHaveBeenCalledTimes(1); expect(spyRead2).toHaveBeenCalledTimes(1); expect(query2.toSQL().sql).toEqual('select distinct `id` from `users`'); }); it('single read replica selectDistinct', () => { const primaryDb = drizzle.mock(); const read1 = drizzle.mock(); const db = withReplicas(primaryDb, [read1]); const spyPrimary = vi.spyOn(primaryDb, 'selectDistinct'); const spyRead1 = vi.spyOn(read1, 'selectDistinct'); const query1 = db.selectDistinct().from(users); expect(spyPrimary).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenCalledTimes(1); expect(query1.toSQL().sql).toEqual('select distinct `id` from `users`'); const query2 = db.selectDistinct().from(users); expect(spyRead1).toHaveBeenCalledTimes(2); expect(query2.toSQL().sql).toEqual('select distinct `id` from `users`'); }); it('single read replica selectDistinct + primary selectDistinct', () => { const primaryDb = drizzle.mock(); const read1 = drizzle.mock(); const db = withReplicas(primaryDb, [read1]); const spyPrimary = vi.spyOn(primaryDb, 'selectDistinct'); const spyRead1 = vi.spyOn(read1, 'selectDistinct'); const query1 = db.selectDistinct().from(users); expect(spyPrimary).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenCalledTimes(1); expect(query1.toSQL().sql).toEqual('select distinct `id` from `users`'); const query2 = db.$primary.selectDistinct().from(users); expect(spyPrimary).toHaveBeenCalledTimes(1); expect(spyRead1).toHaveBeenCalledTimes(1); expect(query2.toSQL().sql).toEqual('select distinct `id` from `users`'); }); it('always first read selectDistinct', () => { const primaryDb = drizzle.mock(); const read1 = drizzle.mock(); const read2 = drizzle.mock(); const db = withReplicas(primaryDb, [read1, read2], (replicas) => { return replicas[0]!; }); const spyPrimary = vi.spyOn(primaryDb, 'selectDistinct'); const spyRead1 = vi.spyOn(read1, 'selectDistinct'); const spyRead2 = vi.spyOn(read2, 'selectDistinct'); const query1 = db.selectDistinct().from(users); expect(spyPrimary).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenCalledTimes(1); expect(spyRead2).toHaveBeenCalledTimes(0); expect(query1.toSQL().sql).toEqual('select distinct `id` from `users`'); const query2 = db.selectDistinct().from(users); expect(spyRead1).toHaveBeenCalledTimes(2); expect(spyRead2).toHaveBeenCalledTimes(0); expect(query2.toSQL().sql).toEqual('select distinct `id` from `users`'); }); }); describe('[with] read replicas mysql', () => { it('primary with', () => { const primaryDb = drizzle.mock(); const read1 = drizzle.mock(); const read2 = drizzle.mock(); const db = withReplicas(primaryDb, [read1, read2]); const spyPrimary = vi.spyOn(primaryDb, 'with'); const spyRead1 = vi.spyOn(read1, 'with'); const spyRead2 = vi.spyOn(read2, 'with'); const obj1 = {} as any; const obj2 = {} as any; const obj3 = {} as any; const obj4 = {} as any; db.$primary.with(obj1, obj2, obj3, obj4); expect(spyPrimary).toHaveBeenCalledTimes(1); expect(spyRead1).toHaveBeenCalledTimes(0); expect(spyRead2).toHaveBeenCalledTimes(0); expect(spyPrimary).toHaveBeenCalledWith(obj1, obj2, obj3, obj4); }); it('random replica with', () => { const primaryDb = drizzle.mock(); const read1 = drizzle.mock(); const read2 = drizzle.mock(); const randomMockReplica = vi.fn().mockReturnValueOnce(read1).mockReturnValueOnce(read2); const db = withReplicas(primaryDb, [read1, read2], () => { return randomMockReplica(); }); const spyPrimary = vi.spyOn(primaryDb, 'with'); const spyRead1 = vi.spyOn(read1, 'with'); const spyRead2 = vi.spyOn(read2, 'with'); db.with(); expect(spyPrimary).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenCalledTimes(1); expect(spyRead2).toHaveBeenCalledTimes(0); db.with(); expect(spyRead1).toHaveBeenCalledTimes(1); expect(spyRead2).toHaveBeenCalledTimes(1); }); it('single read replica with', () => { const primaryDb = drizzle.mock(); const read1 = drizzle.mock(); const db = withReplicas(primaryDb, [read1]); const spyPrimary = vi.spyOn(primaryDb, 'with'); const spyRead1 = vi.spyOn(read1, 'with'); db.with(); expect(spyPrimary).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenCalledTimes(1); db.with(); expect(spyRead1).toHaveBeenCalledTimes(2); }); it('single read replica with + primary with', () => { const primaryDb = drizzle.mock(); const read1 = drizzle.mock(); const db = withReplicas(primaryDb, [read1]); const spyPrimary = vi.spyOn(primaryDb, 'with'); const spyRead1 = vi.spyOn(read1, 'with'); db.with(); expect(spyPrimary).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenCalledTimes(1); db.$primary.with(); expect(spyPrimary).toHaveBeenCalledTimes(1); expect(spyRead1).toHaveBeenCalledTimes(1); }); it('always first read with', () => { const primaryDb = drizzle.mock(); const read1 = drizzle.mock(); const read2 = drizzle.mock(); const db = withReplicas(primaryDb, [read1, read2], (replicas) => { return replicas[0]!; }); const spyPrimary = vi.spyOn(primaryDb, 'with'); const spyRead1 = vi.spyOn(read1, 'with'); const spyRead2 = vi.spyOn(read2, 'with'); const obj1 = {} as any; const obj2 = {} as any; const obj3 = {} as any; db.with(obj1); expect(spyPrimary).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenCalledTimes(1); expect(spyRead2).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenCalledWith(obj1); db.with(obj2, obj3); expect(spyRead1).toHaveBeenCalledTimes(2); expect(spyRead2).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenCalledWith(obj2, obj3); }); }); describe('[update] replicas mysql', () => { it('primary update', () => { const primaryDb = drizzle.mock(); const read1 = drizzle.mock(); const read2 = drizzle.mock(); const db = withReplicas(primaryDb, [read1, read2]); const spyPrimary = vi.spyOn(primaryDb, 'update'); const spyRead1 = vi.spyOn(read1, 'update'); const spyRead2 = vi.spyOn(read2, 'update'); const query1 = db.update(users).set({ id: 1 }); expect(spyPrimary).toHaveBeenCalledTimes(1); expect(spyRead1).toHaveBeenCalledTimes(0); expect(spyRead2).toHaveBeenCalledTimes(0); expect(query1.toSQL().sql).toEqual('update `users` set `id` = ?'); const query2 = db.update(users).set({ id: 1 }); expect(spyPrimary).toHaveBeenCalledTimes(2); expect(spyRead1).toHaveBeenCalledTimes(0); expect(spyRead2).toHaveBeenCalledTimes(0); expect(query2.toSQL().sql).toEqual('update `users` set `id` = ?'); const query3 = db.$primary.update(users).set({ id: 1 }); expect(spyPrimary).toHaveBeenCalledTimes(3); expect(spyRead1).toHaveBeenCalledTimes(0); expect(spyRead2).toHaveBeenCalledTimes(0); expect(query3.toSQL().sql).toEqual('update `users` set `id` = ?'); }); }); describe('[delete] replicas mysql', () => { it('primary delete', () => { const primaryDb = drizzle.mock(); const read1 = drizzle.mock(); const read2 = drizzle.mock(); const db = withReplicas(primaryDb, [read1, read2]); const spyPrimary = vi.spyOn(primaryDb, 'delete'); const spyRead1 = vi.spyOn(read1, 'delete'); const spyRead2 = vi.spyOn(read2, 'delete'); const query1 = db.delete(users); expect(spyPrimary).toHaveBeenCalledTimes(1); expect(spyRead1).toHaveBeenCalledTimes(0); expect(spyRead2).toHaveBeenCalledTimes(0); expect(spyPrimary).toHaveBeenCalledWith(users); expect(query1.toSQL().sql).toEqual('delete from `users`'); const query2 = db.delete(users); expect(spyPrimary).toHaveBeenCalledTimes(2); expect(spyRead1).toHaveBeenCalledTimes(0); expect(spyRead2).toHaveBeenCalledTimes(0); expect(spyPrimary).toHaveBeenNthCalledWith(2, users); expect(query2.toSQL().sql).toEqual('delete from `users`'); db.$primary.delete({} as any); expect(spyPrimary).toHaveBeenCalledTimes(3); expect(spyRead1).toHaveBeenCalledTimes(0); expect(spyRead2).toHaveBeenCalledTimes(0); }); }); describe('[insert] replicas mysql', () => { it('primary insert', () => { const primaryDb = drizzle.mock(); const read1 = drizzle.mock(); const read2 = drizzle.mock(); const db = withReplicas(primaryDb, [read1, read2]); const spyPrimary = vi.spyOn(primaryDb, 'insert'); const spyRead1 = vi.spyOn(read1, 'insert'); const spyRead2 = vi.spyOn(read2, 'insert'); const query = db.insert(users).values({ id: 1 }); expect(spyPrimary).toHaveBeenCalledTimes(1); expect(spyRead1).toHaveBeenCalledTimes(0); expect(spyRead2).toHaveBeenCalledTimes(0); expect(spyPrimary).toHaveBeenCalledWith(users); expect(query.toSQL().sql).toEqual('insert into `users` (`id`) values (?)'); db.insert(users); expect(spyPrimary).toHaveBeenCalledTimes(2); expect(spyRead1).toHaveBeenCalledTimes(0); expect(spyRead2).toHaveBeenCalledTimes(0); expect(spyPrimary).toHaveBeenNthCalledWith(2, users); db.$primary.insert({} as any); expect(spyPrimary).toHaveBeenCalledTimes(3); expect(spyRead1).toHaveBeenCalledTimes(0); expect(spyRead2).toHaveBeenCalledTimes(0); }); }); describe('[execute] replicas mysql', () => { it('primary execute', async () => { const primaryDb = drizzle.mock(); const read1 = drizzle.mock(); const read2 = drizzle.mock(); const db = withReplicas(primaryDb, [read1, read2]); const spyPrimary = vi.spyOn(primaryDb, 'execute'); const spyRead1 = vi.spyOn(read1, 'execute'); const spyRead2 = vi.spyOn(read2, 'execute'); expect(db.execute(sql``)).rejects.toThrow(); // try { // db.execute(sql``); // } catch { /* empty */ } expect(spyPrimary).toHaveBeenCalledTimes(1); expect(spyRead1).toHaveBeenCalledTimes(0); expect(spyRead2).toHaveBeenCalledTimes(0); expect(db.execute(sql``)).rejects.toThrow(); // try { // db.execute(sql``); // } catch { /* empty */ } expect(spyPrimary).toHaveBeenCalledTimes(2); expect(spyRead1).toHaveBeenCalledTimes(0); expect(spyRead2).toHaveBeenCalledTimes(0); expect(db.execute(sql``)).rejects.toThrow(); // try { // db.execute(sql``); // } catch { /* empty */ } expect(spyPrimary).toHaveBeenCalledTimes(3); expect(spyRead1).toHaveBeenCalledTimes(0); expect(spyRead2).toHaveBeenCalledTimes(0); }); }); describe('[transaction] replicas mysql', () => { it('primary transaction', async () => { const primaryDb = drizzle.mock(); const read1 = drizzle.mock(); const read2 = drizzle.mock(); const db = withReplicas(primaryDb, [read1, read2]); const spyPrimary = vi.spyOn(primaryDb, 'transaction'); const spyRead1 = vi.spyOn(read1, 'transaction'); const spyRead2 = vi.spyOn(read2, 'transaction'); const txFn1 = async (tx: any) => { tx.select().from({} as any); }; expect(db.transaction(txFn1)).rejects.toThrow(); expect(spyPrimary).toHaveBeenCalledTimes(1); expect(spyRead1).toHaveBeenCalledTimes(0); expect(spyRead2).toHaveBeenCalledTimes(0); expect(spyPrimary).toHaveBeenCalledWith(txFn1); const txFn2 = async (tx: any) => { tx.select().from({} as any); }; expect(db.transaction(txFn2)).rejects.toThrow(); expect(spyPrimary).toHaveBeenCalledTimes(2); expect(spyRead1).toHaveBeenCalledTimes(0); expect(spyRead2).toHaveBeenCalledTimes(0); expect(spyPrimary).toHaveBeenNthCalledWith(2, txFn2); expect(db.transaction(async (tx) => { tx.select().from({} as any); })).rejects.toThrow(); expect(spyPrimary).toHaveBeenCalledTimes(3); expect(spyRead1).toHaveBeenCalledTimes(0); expect(spyRead2).toHaveBeenCalledTimes(0); }); }); describe('[findFirst] read replicas mysql', () => { it('primary findFirst', () => { const primaryDb = drizzle.mock({ schema: { usersTable }, mode: 'default' }); const read1 = drizzle.mock({ schema: { usersTable }, mode: 'default' }); const read2 = drizzle.mock({ schema: { usersTable }, mode: 'default' }); const db = withReplicas(primaryDb, [read1, read2]); const spyPrimary = vi.spyOn(primaryDb['query']['usersTable'], 'findFirst'); const spyRead1 = vi.spyOn(read1['query']['usersTable'], 'findFirst'); const spyRead2 = vi.spyOn(read2['query']['usersTable'], 'findFirst'); const obj = {} as any; db.$primary.query.usersTable.findFirst(obj); expect(spyPrimary).toHaveBeenCalledTimes(1); expect(spyRead1).toHaveBeenCalledTimes(0); expect(spyRead2).toHaveBeenCalledTimes(0); expect(spyPrimary).toHaveBeenCalledWith(obj); }); it('random replica findFirst', () => { const primaryDb = drizzle.mock({ schema: { usersTable }, mode: 'default' }); const read1 = drizzle.mock({ schema: { usersTable }, mode: 'default' }); const read2 = drizzle.mock({ schema: { usersTable }, mode: 'default' }); const randomMockReplica = vi.fn().mockReturnValueOnce(read1).mockReturnValueOnce(read2); const db = withReplicas(primaryDb, [read1, read2], () => { return randomMockReplica(); }); const spyPrimary = vi.spyOn(primaryDb['query']['usersTable'], 'findFirst'); const spyRead1 = vi.spyOn(read1['query']['usersTable'], 'findFirst'); const spyRead2 = vi.spyOn(read2['query']['usersTable'], 'findFirst'); const par1 = {} as any; db.query.usersTable.findFirst(par1); expect(spyPrimary).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenCalledTimes(1); expect(spyRead2).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenCalledWith(par1); const query = db.query.usersTable.findFirst(); expect(spyRead1).toHaveBeenCalledTimes(1); expect(spyRead2).toHaveBeenCalledTimes(1); expect(query.toSQL().sql).toEqual('select `id`, `name`, `verified` from `users` `usersTable` limit ?'); }); it('single read replica findFirst', () => { const primaryDb = drizzle.mock({ schema: { usersTable }, mode: 'default' }); const read1 = drizzle.mock({ schema: { usersTable }, mode: 'default' }); const db = withReplicas(primaryDb, [read1]); const spyPrimary = vi.spyOn(primaryDb['query']['usersTable'], 'findFirst'); const spyRead1 = vi.spyOn(read1['query']['usersTable'], 'findFirst'); db.query.usersTable.findFirst(); expect(spyPrimary).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenCalledTimes(1); db.query.usersTable.findFirst(); expect(spyRead1).toHaveBeenCalledTimes(2); }); it('single read replica findFirst + primary findFirst', () => { const primaryDb = drizzle.mock({ schema: { usersTable }, mode: 'default' }); const read1 = drizzle.mock({ schema: { usersTable }, mode: 'default' }); const db = withReplicas(primaryDb, [read1]); const spyPrimary = vi.spyOn(primaryDb['query']['usersTable'], 'findFirst'); const spyRead1 = vi.spyOn(read1['query']['usersTable'], 'findFirst'); db.query.usersTable.findFirst(); expect(spyPrimary).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenCalledTimes(1); db.$primary.query.usersTable.findFirst(); expect(spyPrimary).toHaveBeenCalledTimes(1); expect(spyRead1).toHaveBeenCalledTimes(1); }); it('always first read findFirst', () => { const primaryDb = drizzle.mock({ schema: { usersTable }, mode: 'default' }); const read1 = drizzle.mock({ schema: { usersTable }, mode: 'default' }); const read2 = drizzle.mock({ schema: { usersTable }, mode: 'default' }); const db = withReplicas(primaryDb, [read1, read2], (replicas) => { return replicas[0]!; }); const spyPrimary = vi.spyOn(primaryDb['query']['usersTable'], 'findFirst'); const spyRead1 = vi.spyOn(read1['query']['usersTable'], 'findFirst'); const spyRead2 = vi.spyOn(read2['query']['usersTable'], 'findFirst'); db.query.usersTable.findFirst(); expect(spyPrimary).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenCalledTimes(1); expect(spyRead2).toHaveBeenCalledTimes(0); db.query.usersTable.findFirst(); expect(spyRead1).toHaveBeenCalledTimes(2); expect(spyRead2).toHaveBeenCalledTimes(0); }); }); describe('[findMany] read replicas mysql', () => { it('primary findMany', () => { const primaryDb = drizzle.mock({ schema: { usersTable }, mode: 'default' }); const read1 = drizzle.mock({ schema: { usersTable }, mode: 'default' }); const read2 = drizzle.mock({ schema: { usersTable }, mode: 'default' }); const db = withReplicas(primaryDb, [read1, read2]); const spyPrimary = vi.spyOn(primaryDb['query']['usersTable'], 'findMany'); const spyRead1 = vi.spyOn(read1['query']['usersTable'], 'findMany'); const spyRead2 = vi.spyOn(read2['query']['usersTable'], 'findMany'); const obj = {} as any; const query = db.$primary.query.usersTable.findMany(obj); expect(spyPrimary).toHaveBeenCalledTimes(1); expect(spyRead1).toHaveBeenCalledTimes(0); expect(spyRead2).toHaveBeenCalledTimes(0); expect(spyPrimary).toHaveBeenCalledWith(obj); expect(query.toSQL().sql).toEqual('select `id`, `name`, `verified` from `users` `usersTable`'); }); it('random replica findMany', () => { const primaryDb = drizzle.mock({ schema: { usersTable }, mode: 'default' }); const read1 = drizzle.mock({ schema: { usersTable }, mode: 'default' }); const read2 = drizzle.mock({ schema: { usersTable }, mode: 'default' }); const randomMockReplica = vi.fn().mockReturnValueOnce(read1).mockReturnValueOnce(read2); const db = withReplicas(primaryDb, [read1, read2], () => { return randomMockReplica(); }); const spyPrimary = vi.spyOn(primaryDb['query']['usersTable'], 'findMany'); const spyRead1 = vi.spyOn(read1['query']['usersTable'], 'findMany'); const spyRead2 = vi.spyOn(read2['query']['usersTable'], 'findMany'); const obj1 = {} as any; const obj2 = {} as any; const query1 = db.query.usersTable.findMany(obj1); expect(spyPrimary).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenCalledTimes(1); expect(spyRead2).toHaveBeenCalledTimes(0); expect(query1.toSQL().sql).toEqual('select `id`, `name`, `verified` from `users` `usersTable`'); expect(spyRead1).toHaveBeenCalledWith(obj1); const query2 = db.query.usersTable.findMany(obj2); expect(spyRead1).toHaveBeenCalledTimes(1); expect(spyRead2).toHaveBeenCalledTimes(1); expect(query2.toSQL().sql).toEqual('select `id`, `name`, `verified` from `users` `usersTable`'); expect(spyRead2).toHaveBeenCalledWith(obj2); }); it('single read replica findMany', () => { const primaryDb = drizzle.mock({ schema: { usersTable }, mode: 'default' }); const read1 = drizzle.mock({ schema: { usersTable }, mode: 'default' }); const db = withReplicas(primaryDb, [read1]); const spyPrimary = vi.spyOn(primaryDb['query']['usersTable'], 'findMany'); const spyRead1 = vi.spyOn(read1['query']['usersTable'], 'findMany'); const obj1 = {} as any; const obj2 = {} as any; const query1 = db.query.usersTable.findMany(obj1); expect(spyPrimary).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenCalledTimes(1); expect(spyRead1).toHaveBeenCalledWith(obj1); expect(query1.toSQL().sql).toEqual('select `id`, `name`, `verified` from `users` `usersTable`'); const query2 = db.query.usersTable.findMany(obj2); expect(spyRead1).toHaveBeenCalledTimes(2); expect(spyRead1).toHaveBeenNthCalledWith(2, obj2); expect(query2.toSQL().sql).toEqual('select `id`, `name`, `verified` from `users` `usersTable`'); }); it('single read replica findMany + primary findMany', () => { const primaryDb = drizzle.mock({ schema: { usersTable }, mode: 'default' }); const read1 = drizzle.mock({ schema: { usersTable }, mode: 'default' }); const db = withReplicas(primaryDb, [read1]); const spyPrimary = vi.spyOn(primaryDb['query']['usersTable'], 'findMany'); const spyRead1 = vi.spyOn(read1['query']['usersTable'], 'findMany'); const obj1 = {} as any; const obj2 = {} as any; const query1 = db.query.usersTable.findMany(obj1); expect(spyPrimary).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenCalledTimes(1); expect(spyRead1).toHaveBeenCalledWith(obj1); expect(query1.toSQL().sql).toEqual('select `id`, `name`, `verified` from `users` `usersTable`'); const query2 = db.$primary.query.usersTable.findMany(obj2); expect(spyPrimary).toHaveBeenCalledTimes(1); expect(spyRead1).toHaveBeenCalledTimes(1); expect(spyPrimary).toHaveBeenNthCalledWith(1, obj2); expect(query2.toSQL().sql).toEqual('select `id`, `name`, `verified` from `users` `usersTable`'); }); it('always first read findMany', () => { const primaryDb = drizzle.mock({ schema: { usersTable }, mode: 'default' }); const read1 = drizzle.mock({ schema: { usersTable }, mode: 'default' }); const read2 = drizzle.mock({ schema: { usersTable }, mode: 'default' }); const db = withReplicas(primaryDb, [read1, read2], (replicas) => { return replicas[0]!; }); const spyPrimary = vi.spyOn(primaryDb['query']['usersTable'], 'findMany'); const spyRead1 = vi.spyOn(read1['query']['usersTable'], 'findMany'); const spyRead2 = vi.spyOn(read2['query']['usersTable'], 'findMany'); const obj1 = {} as any; const obj2 = {} as any; const query1 = db.query.usersTable.findMany(obj1); expect(spyPrimary).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenCalledTimes(1); expect(spyRead2).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenCalledWith(obj1); expect(query1.toSQL().sql).toEqual('select `id`, `name`, `verified` from `users` `usersTable`'); const query2 = db.query.usersTable.findMany(obj2); expect(spyRead1).toHaveBeenCalledTimes(2); expect(spyRead2).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenNthCalledWith(2, obj2); expect(query2.toSQL().sql).toEqual('select `id`, `name`, `verified` from `users` `usersTable`'); }); }); describe('[$count] read replicas postgres', () => { it('primary $count', () => { const primaryDb = drizzle.mock(); const read1 = drizzle.mock(); const read2 = drizzle.mock(); const db = withReplicas(primaryDb, [read1, read2]); const spyPrimary = vi.spyOn(primaryDb, '$count'); const spyRead1 = vi.spyOn(read1, '$count'); const spyRead2 = vi.spyOn(read2, '$count'); db.$primary.$count(users); expect(spyPrimary).toHaveBeenCalledTimes(1); expect(spyRead1).toHaveBeenCalledTimes(0); expect(spyRead2).toHaveBeenCalledTimes(0); }); it('random replica $count', () => { const primaryDb = drizzle.mock(); const read1 = drizzle.mock(); const read2 = drizzle.mock(); const randomMockReplica = vi.fn().mockReturnValueOnce(read1).mockReturnValueOnce(read2); const db = withReplicas(primaryDb, [read1, read2], () => { return randomMockReplica(); }); const spyPrimary = vi.spyOn(primaryDb, '$count'); const spyRead1 = vi.spyOn(read1, '$count'); const spyRead2 = vi.spyOn(read2, '$count'); db.$count(users); expect(spyPrimary).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenCalledTimes(1); expect(spyRead2).toHaveBeenCalledTimes(0); db.$count(users); expect(spyRead1).toHaveBeenCalledTimes(1); expect(spyRead2).toHaveBeenCalledTimes(1); }); it('single read replica $count', () => { const primaryDb = drizzle.mock(); const read1 = drizzle.mock(); const db = withReplicas(primaryDb, [read1]); const spyPrimary = vi.spyOn(primaryDb, '$count'); const spyRead1 = vi.spyOn(read1, '$count'); db.$count(users); expect(spyPrimary).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenCalledTimes(1); db.$count(users); expect(spyRead1).toHaveBeenCalledTimes(2); }); it('single read replica $count + primary $count', () => { const primaryDb = drizzle.mock(); const read1 = drizzle.mock(); const db = withReplicas(primaryDb, [read1]); const spyPrimary = vi.spyOn(primaryDb, '$count'); const spyRead1 = vi.spyOn(read1, '$count'); db.$count(users); expect(spyPrimary).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenCalledTimes(1); db.$primary.$count(users); expect(spyPrimary).toHaveBeenCalledTimes(1); expect(spyRead1).toHaveBeenCalledTimes(1); }); it('always first read $count', () => { const primaryDb = drizzle.mock(); const read1 = drizzle.mock(); const read2 = drizzle.mock(); const db = withReplicas(primaryDb, [read1, read2], (replicas) => { return replicas[0]!; }); const spyPrimary = vi.spyOn(primaryDb, '$count'); const spyRead1 = vi.spyOn(read1, '$count'); const spyRead2 = vi.spyOn(read2, '$count'); db.$count(users); expect(spyPrimary).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenCalledTimes(1); expect(spyRead2).toHaveBeenCalledTimes(0); db.$count(users); expect(spyRead1).toHaveBeenCalledTimes(2); expect(spyRead2).toHaveBeenCalledTimes(0); }); }); ================================================ FILE: integration-tests/tests/replicas/postgres.test.ts ================================================ import { sql } from 'drizzle-orm'; import { drizzle } from 'drizzle-orm/node-postgres'; import { boolean, jsonb, pgTable, serial, text, timestamp, withReplicas } from 'drizzle-orm/pg-core'; import { describe, expect, it, vi } from 'vitest'; const usersTable = pgTable('users', { id: serial('id' as string).primaryKey(), name: text('name').notNull(), verified: boolean('verified').notNull().default(false), jsonb: jsonb('jsonb').$type(), createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(), }); const users = pgTable('users', { id: serial('id' as string).primaryKey(), }); describe('[select] read replicas postgres', () => { it('primary select', () => { const primaryDb = drizzle.mock(); const read1 = drizzle.mock(); const read2 = drizzle.mock(); const db = withReplicas(primaryDb, [read1, read2]); const spyPrimary = vi.spyOn(primaryDb, 'select'); const spyRead1 = vi.spyOn(read1, 'select'); const spyRead2 = vi.spyOn(read2, 'select'); const query = db.$primary.select().from(users); expect(spyPrimary).toHaveBeenCalledTimes(1); expect(query.toSQL().sql).toEqual('select "id" from "users"'); expect(spyRead1).toHaveBeenCalledTimes(0); expect(spyRead2).toHaveBeenCalledTimes(0); }); it('random replica select', () => { const primaryDb = drizzle.mock(); const read1 = drizzle.mock(); const read2 = drizzle.mock(); const randomMockReplica = vi.fn().mockReturnValueOnce(read1).mockReturnValueOnce(read2); const db = withReplicas(primaryDb, [read1, read2], () => { return randomMockReplica(); }); const spyPrimary = vi.spyOn(primaryDb, 'select'); const spyRead1 = vi.spyOn(read1, 'select'); const spyRead2 = vi.spyOn(read2, 'select'); const query1 = db.select({ count: sql`count(*)`.as('count') }).from(users).limit(1); expect(spyPrimary).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenCalledTimes(1); expect(spyRead2).toHaveBeenCalledTimes(0); expect(query1.toSQL().sql).toEqual('select count(*) as "count" from "users" limit $1'); const query2 = db.select().from(users); expect(spyRead1).toHaveBeenCalledTimes(1); expect(spyRead2).toHaveBeenCalledTimes(1); expect(query2.toSQL().sql).toEqual('select "id" from "users"'); }); it('single read replica select', () => { const primaryDb = drizzle.mock(); const read1 = drizzle.mock(); const db = withReplicas(primaryDb, [read1]); const spyPrimary = vi.spyOn(primaryDb, 'select'); const spyRead1 = vi.spyOn(read1, 'select'); const query1 = db.select().from(users); expect(spyPrimary).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenCalledTimes(1); expect(query1.toSQL().sql).toEqual('select "id" from "users"'); const query2 = db.select().from(users); expect(spyRead1).toHaveBeenCalledTimes(2); expect(query2.toSQL().sql).toEqual('select "id" from "users"'); }); it('single read replica select + primary select', () => { const primaryDb = drizzle.mock(); const read1 = drizzle.mock(); const db = withReplicas(primaryDb, [read1]); const spyPrimary = vi.spyOn(primaryDb, 'select'); const spyRead1 = vi.spyOn(read1, 'select'); const query1 = db.select({ id: users.id }).from(users); expect(spyPrimary).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenCalledTimes(1); expect(query1.toSQL().sql).toEqual('select "id" from "users"'); const query2 = db.$primary.select().from(users); expect(spyPrimary).toHaveBeenCalledTimes(1); expect(spyRead1).toHaveBeenCalledTimes(1); expect(query2.toSQL().sql).toEqual('select "id" from "users"'); }); it('always first read select', () => { const primaryDb = drizzle.mock(); const read1 = drizzle.mock(); const read2 = drizzle.mock(); const db = withReplicas(primaryDb, [read1, read2], (replicas) => { return replicas[0]!; }); const spyPrimary = vi.spyOn(primaryDb, 'select'); const spyRead1 = vi.spyOn(read1, 'select'); const spyRead2 = vi.spyOn(read2, 'select'); const query1 = db.select().from(users); expect(spyPrimary).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenCalledTimes(1); expect(spyRead2).toHaveBeenCalledTimes(0); expect(query1.toSQL().sql).toEqual('select "id" from "users"'); const query2 = db.select().from(users); expect(spyRead1).toHaveBeenCalledTimes(2); expect(spyRead2).toHaveBeenCalledTimes(0); expect(query2.toSQL().sql).toEqual('select "id" from "users"'); }); }); describe('[selectDistinct] read replicas postgres', () => { it('primary selectDistinct', () => { const primaryDb = drizzle.mock(); const read1 = drizzle.mock(); const read2 = drizzle.mock(); const db = withReplicas(primaryDb, [read1, read2]); const spyPrimary = vi.spyOn(primaryDb, 'selectDistinct'); const spyRead1 = vi.spyOn(read1, 'selectDistinct'); const spyRead2 = vi.spyOn(read2, 'selectDistinct'); const query = db.$primary.selectDistinct().from(users); expect(spyPrimary).toHaveBeenCalledTimes(1); expect(spyRead1).toHaveBeenCalledTimes(0); expect(spyRead2).toHaveBeenCalledTimes(0); expect(query.toSQL().sql).toEqual('select distinct "id" from "users"'); }); it('random replica selectDistinct', () => { const primaryDb = drizzle.mock(); const read1 = drizzle.mock(); const read2 = drizzle.mock(); const randomMockReplica = vi.fn().mockReturnValueOnce(read1).mockReturnValueOnce(read2); const db = withReplicas(primaryDb, [read1, read2], () => { return randomMockReplica(); }); const spyPrimary = vi.spyOn(primaryDb, 'selectDistinct'); const spyRead1 = vi.spyOn(read1, 'selectDistinct'); const spyRead2 = vi.spyOn(read2, 'selectDistinct'); const query1 = db.selectDistinct().from(users); expect(spyPrimary).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenCalledTimes(1); expect(spyRead2).toHaveBeenCalledTimes(0); expect(query1.toSQL().sql).toEqual('select distinct "id" from "users"'); const query2 = db.selectDistinct().from(users); expect(spyRead1).toHaveBeenCalledTimes(1); expect(spyRead2).toHaveBeenCalledTimes(1); expect(query2.toSQL().sql).toEqual('select distinct "id" from "users"'); }); it('single read replica selectDistinct', () => { const primaryDb = drizzle.mock(); const read1 = drizzle.mock(); const db = withReplicas(primaryDb, [read1]); const spyPrimary = vi.spyOn(primaryDb, 'selectDistinct'); const spyRead1 = vi.spyOn(read1, 'selectDistinct'); const query1 = db.selectDistinct().from(users); expect(spyPrimary).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenCalledTimes(1); expect(query1.toSQL().sql).toEqual('select distinct "id" from "users"'); const query2 = db.selectDistinct().from(users); expect(spyRead1).toHaveBeenCalledTimes(2); expect(query2.toSQL().sql).toEqual('select distinct "id" from "users"'); }); it('single read replica selectDistinct + primary selectDistinct', () => { const primaryDb = drizzle.mock(); const read1 = drizzle.mock(); const db = withReplicas(primaryDb, [read1]); const spyPrimary = vi.spyOn(primaryDb, 'selectDistinct'); const spyRead1 = vi.spyOn(read1, 'selectDistinct'); const query1 = db.selectDistinct().from(users); expect(spyPrimary).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenCalledTimes(1); expect(query1.toSQL().sql).toEqual('select distinct "id" from "users"'); const query2 = db.$primary.selectDistinct().from(users); expect(spyPrimary).toHaveBeenCalledTimes(1); expect(spyRead1).toHaveBeenCalledTimes(1); expect(query2.toSQL().sql).toEqual('select distinct "id" from "users"'); }); it('always first read selectDistinct', () => { const primaryDb = drizzle.mock(); const read1 = drizzle.mock(); const read2 = drizzle.mock(); const db = withReplicas(primaryDb, [read1, read2], (replicas) => { return replicas[0]!; }); const spyPrimary = vi.spyOn(primaryDb, 'selectDistinct'); const spyRead1 = vi.spyOn(read1, 'selectDistinct'); const spyRead2 = vi.spyOn(read2, 'selectDistinct'); const query1 = db.selectDistinct().from(users); expect(spyPrimary).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenCalledTimes(1); expect(spyRead2).toHaveBeenCalledTimes(0); expect(query1.toSQL().sql).toEqual('select distinct "id" from "users"'); const query2 = db.selectDistinct().from(users); expect(spyRead1).toHaveBeenCalledTimes(2); expect(spyRead2).toHaveBeenCalledTimes(0); expect(query2.toSQL().sql).toEqual('select distinct "id" from "users"'); }); }); describe('[with] read replicas postgres', () => { it('primary with', () => { const primaryDb = drizzle.mock(); const read1 = drizzle.mock(); const read2 = drizzle.mock(); const db = withReplicas(primaryDb, [read1, read2]); const spyPrimary = vi.spyOn(primaryDb, 'with'); const spyRead1 = vi.spyOn(read1, 'with'); const spyRead2 = vi.spyOn(read2, 'with'); const obj1 = {} as any; const obj2 = {} as any; const obj3 = {} as any; const obj4 = {} as any; db.$primary.with(obj1, obj2, obj3, obj4); expect(spyPrimary).toHaveBeenCalledTimes(1); expect(spyRead1).toHaveBeenCalledTimes(0); expect(spyRead2).toHaveBeenCalledTimes(0); expect(spyPrimary).toHaveBeenCalledWith(obj1, obj2, obj3, obj4); }); it('random replica with', () => { const primaryDb = drizzle.mock(); const read1 = drizzle.mock(); const read2 = drizzle.mock(); const randomMockReplica = vi.fn().mockReturnValueOnce(read1).mockReturnValueOnce(read2); const db = withReplicas(primaryDb, [read1, read2], () => { return randomMockReplica(); }); const spyPrimary = vi.spyOn(primaryDb, 'with'); const spyRead1 = vi.spyOn(read1, 'with'); const spyRead2 = vi.spyOn(read2, 'with'); db.with(); expect(spyPrimary).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenCalledTimes(1); expect(spyRead2).toHaveBeenCalledTimes(0); db.with(); expect(spyRead1).toHaveBeenCalledTimes(1); expect(spyRead2).toHaveBeenCalledTimes(1); }); it('single read replica with', () => { const primaryDb = drizzle.mock(); const read1 = drizzle.mock(); const db = withReplicas(primaryDb, [read1]); const spyPrimary = vi.spyOn(primaryDb, 'with'); const spyRead1 = vi.spyOn(read1, 'with'); db.with(); expect(spyPrimary).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenCalledTimes(1); db.with(); expect(spyRead1).toHaveBeenCalledTimes(2); }); it('single read replica with + primary with', () => { const primaryDb = drizzle.mock(); const read1 = drizzle.mock(); const db = withReplicas(primaryDb, [read1]); const spyPrimary = vi.spyOn(primaryDb, 'with'); const spyRead1 = vi.spyOn(read1, 'with'); db.with(); expect(spyPrimary).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenCalledTimes(1); db.$primary.with(); expect(spyPrimary).toHaveBeenCalledTimes(1); expect(spyRead1).toHaveBeenCalledTimes(1); }); it('always first read with', () => { const primaryDb = drizzle.mock(); const read1 = drizzle.mock(); const read2 = drizzle.mock(); const db = withReplicas(primaryDb, [read1, read2], (replicas) => { return replicas[0]!; }); const spyPrimary = vi.spyOn(primaryDb, 'with'); const spyRead1 = vi.spyOn(read1, 'with'); const spyRead2 = vi.spyOn(read2, 'with'); const obj1 = {} as any; const obj2 = {} as any; const obj3 = {} as any; db.with(obj1); expect(spyPrimary).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenCalledTimes(1); expect(spyRead2).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenCalledWith(obj1); db.with(obj2, obj3); expect(spyRead1).toHaveBeenCalledTimes(2); expect(spyRead2).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenCalledWith(obj2, obj3); }); }); describe('[update] replicas postgres', () => { it('primary update', () => { const primaryDb = drizzle.mock(); const read1 = drizzle.mock(); const read2 = drizzle.mock(); const db = withReplicas(primaryDb, [read1, read2]); const spyPrimary = vi.spyOn(primaryDb, 'update'); const spyRead1 = vi.spyOn(read1, 'update'); const spyRead2 = vi.spyOn(read2, 'update'); const query1 = db.update(users).set({ id: 1 }); expect(spyPrimary).toHaveBeenCalledTimes(1); expect(spyRead1).toHaveBeenCalledTimes(0); expect(spyRead2).toHaveBeenCalledTimes(0); expect(query1.toSQL().sql).toEqual('update "users" set "id" = $1'); const query2 = db.update(users).set({ id: 1 }); expect(spyPrimary).toHaveBeenCalledTimes(2); expect(spyRead1).toHaveBeenCalledTimes(0); expect(spyRead2).toHaveBeenCalledTimes(0); expect(query2.toSQL().sql).toEqual('update "users" set "id" = $1'); const query3 = db.$primary.update(users).set({ id: 1 }); expect(spyPrimary).toHaveBeenCalledTimes(3); expect(spyRead1).toHaveBeenCalledTimes(0); expect(spyRead2).toHaveBeenCalledTimes(0); expect(query3.toSQL().sql).toEqual('update "users" set "id" = $1'); }); }); describe('[delete] replicas postgres', () => { it('primary delete', () => { const primaryDb = drizzle.mock(); const read1 = drizzle.mock(); const read2 = drizzle.mock(); const db = withReplicas(primaryDb, [read1, read2]); const spyPrimary = vi.spyOn(primaryDb, 'delete'); const spyRead1 = vi.spyOn(read1, 'delete'); const spyRead2 = vi.spyOn(read2, 'delete'); const query1 = db.delete(users); expect(spyPrimary).toHaveBeenCalledTimes(1); expect(spyRead1).toHaveBeenCalledTimes(0); expect(spyRead2).toHaveBeenCalledTimes(0); expect(spyPrimary).toHaveBeenCalledWith(users); expect(query1.toSQL().sql).toEqual('delete from "users"'); const query2 = db.delete(users); expect(spyPrimary).toHaveBeenCalledTimes(2); expect(spyRead1).toHaveBeenCalledTimes(0); expect(spyRead2).toHaveBeenCalledTimes(0); expect(spyPrimary).toHaveBeenNthCalledWith(2, users); expect(query2.toSQL().sql).toEqual('delete from "users"'); db.$primary.delete({} as any); expect(spyPrimary).toHaveBeenCalledTimes(3); expect(spyRead1).toHaveBeenCalledTimes(0); expect(spyRead2).toHaveBeenCalledTimes(0); }); }); describe('[insert] replicas postgres', () => { it('primary insert', () => { const primaryDb = drizzle.mock(); const read1 = drizzle.mock(); const read2 = drizzle.mock(); const db = withReplicas(primaryDb, [read1, read2]); const spyPrimary = vi.spyOn(primaryDb, 'insert'); const spyRead1 = vi.spyOn(read1, 'insert'); const spyRead2 = vi.spyOn(read2, 'insert'); const query = db.insert(users).values({ id: 1 }); expect(spyPrimary).toHaveBeenCalledTimes(1); expect(spyRead1).toHaveBeenCalledTimes(0); expect(spyRead2).toHaveBeenCalledTimes(0); expect(spyPrimary).toHaveBeenCalledWith(users); expect(query.toSQL().sql).toEqual('insert into "users" ("id") values ($1)'); db.insert(users); expect(spyPrimary).toHaveBeenCalledTimes(2); expect(spyRead1).toHaveBeenCalledTimes(0); expect(spyRead2).toHaveBeenCalledTimes(0); expect(spyPrimary).toHaveBeenNthCalledWith(2, users); db.$primary.insert({} as any); expect(spyPrimary).toHaveBeenCalledTimes(3); expect(spyRead1).toHaveBeenCalledTimes(0); expect(spyRead2).toHaveBeenCalledTimes(0); }); }); describe('[execute] replicas postgres', () => { it('primary execute', async () => { const primaryDb = drizzle.mock(); const read1 = drizzle.mock(); const read2 = drizzle.mock(); const db = withReplicas(primaryDb, [read1, read2]); const spyPrimary = vi.spyOn(primaryDb, 'execute'); const spyRead1 = vi.spyOn(read1, 'execute'); const spyRead2 = vi.spyOn(read2, 'execute'); expect(db.execute(sql``)).rejects.toThrow(); // try { // db.execute(sql``); // } catch { /* empty */ } expect(spyPrimary).toHaveBeenCalledTimes(1); expect(spyRead1).toHaveBeenCalledTimes(0); expect(spyRead2).toHaveBeenCalledTimes(0); expect(db.execute(sql``)).rejects.toThrow(); // try { // db.execute(sql``); // } catch { /* empty */ } expect(spyPrimary).toHaveBeenCalledTimes(2); expect(spyRead1).toHaveBeenCalledTimes(0); expect(spyRead2).toHaveBeenCalledTimes(0); expect(db.execute(sql``)).rejects.toThrow(); // try { // db.execute(sql``); // } catch { /* empty */ } expect(spyPrimary).toHaveBeenCalledTimes(3); expect(spyRead1).toHaveBeenCalledTimes(0); expect(spyRead2).toHaveBeenCalledTimes(0); }); }); describe('[transaction] replicas postgres', () => { it('primary transaction', async () => { const primaryDb = drizzle.mock(); const read1 = drizzle.mock(); const read2 = drizzle.mock(); const db = withReplicas(primaryDb, [read1, read2]); const spyPrimary = vi.spyOn(primaryDb, 'transaction'); const spyRead1 = vi.spyOn(read1, 'transaction'); const spyRead2 = vi.spyOn(read2, 'transaction'); const txFn1 = async (tx: any) => { tx.select().from({} as any); }; expect(db.transaction(txFn1)).rejects.toThrow(); expect(spyPrimary).toHaveBeenCalledTimes(1); expect(spyRead1).toHaveBeenCalledTimes(0); expect(spyRead2).toHaveBeenCalledTimes(0); expect(spyPrimary).toHaveBeenCalledWith(txFn1); const txFn2 = async (tx: any) => { tx.select().from({} as any); }; expect(db.transaction(txFn2)).rejects.toThrow(); expect(spyPrimary).toHaveBeenCalledTimes(2); expect(spyRead1).toHaveBeenCalledTimes(0); expect(spyRead2).toHaveBeenCalledTimes(0); expect(spyPrimary).toHaveBeenNthCalledWith(2, txFn2); expect(db.transaction(async (tx) => { tx.select().from({} as any); })).rejects.toThrow(); expect(spyPrimary).toHaveBeenCalledTimes(3); expect(spyRead1).toHaveBeenCalledTimes(0); expect(spyRead2).toHaveBeenCalledTimes(0); }); }); describe('[findFirst] read replicas postgres', () => { it('primary findFirst', () => { const primaryDb = drizzle.mock({ schema: { usersTable } }); const read1 = drizzle.mock({ schema: { usersTable } }); const read2 = drizzle.mock({ schema: { usersTable } }); const db = withReplicas(primaryDb, [read1, read2]); const spyPrimary = vi.spyOn(primaryDb['query']['usersTable'], 'findFirst'); const spyRead1 = vi.spyOn(read1['query']['usersTable'], 'findFirst'); const spyRead2 = vi.spyOn(read2['query']['usersTable'], 'findFirst'); const obj = {} as any; db.$primary.query.usersTable.findFirst(obj); expect(spyPrimary).toHaveBeenCalledTimes(1); expect(spyRead1).toHaveBeenCalledTimes(0); expect(spyRead2).toHaveBeenCalledTimes(0); expect(spyPrimary).toHaveBeenCalledWith(obj); }); it('random replica findFirst', () => { const primaryDb = drizzle.mock({ schema: { usersTable } }); const read1 = drizzle.mock({ schema: { usersTable } }); const read2 = drizzle.mock({ schema: { usersTable } }); const randomMockReplica = vi.fn().mockReturnValueOnce(read1).mockReturnValueOnce(read2); const db = withReplicas(primaryDb, [read1, read2], () => { return randomMockReplica(); }); const spyPrimary = vi.spyOn(primaryDb['query']['usersTable'], 'findFirst'); const spyRead1 = vi.spyOn(read1['query']['usersTable'], 'findFirst'); const spyRead2 = vi.spyOn(read2['query']['usersTable'], 'findFirst'); const par1 = {} as any; db.query.usersTable.findFirst(par1); expect(spyPrimary).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenCalledTimes(1); expect(spyRead2).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenCalledWith(par1); const query = db.query.usersTable.findFirst(); expect(spyRead1).toHaveBeenCalledTimes(1); expect(spyRead2).toHaveBeenCalledTimes(1); expect(query.toSQL().sql).toEqual( 'select "id", "name", "verified", "jsonb", "created_at" from "users" "usersTable" limit $1', ); }); it('single read replica findFirst', () => { const primaryDb = drizzle.mock({ schema: { usersTable } }); const read1 = drizzle.mock({ schema: { usersTable } }); const db = withReplicas(primaryDb, [read1]); const spyPrimary = vi.spyOn(primaryDb['query']['usersTable'], 'findFirst'); const spyRead1 = vi.spyOn(read1['query']['usersTable'], 'findFirst'); db.query.usersTable.findFirst(); expect(spyPrimary).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenCalledTimes(1); db.query.usersTable.findFirst(); expect(spyRead1).toHaveBeenCalledTimes(2); }); it('single read replica findFirst + primary findFirst', () => { const primaryDb = drizzle.mock({ schema: { usersTable } }); const read1 = drizzle.mock({ schema: { usersTable } }); const db = withReplicas(primaryDb, [read1]); const spyPrimary = vi.spyOn(primaryDb['query']['usersTable'], 'findFirst'); const spyRead1 = vi.spyOn(read1['query']['usersTable'], 'findFirst'); db.query.usersTable.findFirst(); expect(spyPrimary).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenCalledTimes(1); db.$primary.query.usersTable.findFirst(); expect(spyPrimary).toHaveBeenCalledTimes(1); expect(spyRead1).toHaveBeenCalledTimes(1); }); it('always first read findFirst', () => { const primaryDb = drizzle.mock({ schema: { usersTable } }); const read1 = drizzle.mock({ schema: { usersTable } }); const read2 = drizzle.mock({ schema: { usersTable } }); const db = withReplicas(primaryDb, [read1, read2], (replicas) => { return replicas[0]!; }); const spyPrimary = vi.spyOn(primaryDb['query']['usersTable'], 'findFirst'); const spyRead1 = vi.spyOn(read1['query']['usersTable'], 'findFirst'); const spyRead2 = vi.spyOn(read2['query']['usersTable'], 'findFirst'); db.query.usersTable.findFirst(); expect(spyPrimary).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenCalledTimes(1); expect(spyRead2).toHaveBeenCalledTimes(0); db.query.usersTable.findFirst(); expect(spyRead1).toHaveBeenCalledTimes(2); expect(spyRead2).toHaveBeenCalledTimes(0); }); }); describe('[findMany] read replicas postgres', () => { it('primary findMany', () => { const primaryDb = drizzle.mock({ schema: { usersTable } }); const read1 = drizzle.mock({ schema: { usersTable } }); const read2 = drizzle.mock({ schema: { usersTable } }); const db = withReplicas(primaryDb, [read1, read2]); const spyPrimary = vi.spyOn(primaryDb['query']['usersTable'], 'findMany'); const spyRead1 = vi.spyOn(read1['query']['usersTable'], 'findMany'); const spyRead2 = vi.spyOn(read2['query']['usersTable'], 'findMany'); const obj = {} as any; const query = db.$primary.query.usersTable.findMany(obj); expect(spyPrimary).toHaveBeenCalledTimes(1); expect(spyRead1).toHaveBeenCalledTimes(0); expect(spyRead2).toHaveBeenCalledTimes(0); expect(spyPrimary).toHaveBeenCalledWith(obj); expect(query.toSQL().sql).toEqual( 'select "id", "name", "verified", "jsonb", "created_at" from "users" "usersTable"', ); }); it('random replica findMany', () => { const primaryDb = drizzle.mock({ schema: { usersTable } }); const read1 = drizzle.mock({ schema: { usersTable } }); const read2 = drizzle.mock({ schema: { usersTable } }); const randomMockReplica = vi.fn().mockReturnValueOnce(read1).mockReturnValueOnce(read2); const db = withReplicas(primaryDb, [read1, read2], () => { return randomMockReplica(); }); const spyPrimary = vi.spyOn(primaryDb['query']['usersTable'], 'findMany'); const spyRead1 = vi.spyOn(read1['query']['usersTable'], 'findMany'); const spyRead2 = vi.spyOn(read2['query']['usersTable'], 'findMany'); const obj1 = {} as any; const obj2 = {} as any; const query1 = db.query.usersTable.findMany(obj1); expect(spyPrimary).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenCalledTimes(1); expect(spyRead2).toHaveBeenCalledTimes(0); expect(query1.toSQL().sql).toEqual( 'select "id", "name", "verified", "jsonb", "created_at" from "users" "usersTable"', ); expect(spyRead1).toHaveBeenCalledWith(obj1); const query2 = db.query.usersTable.findMany(obj2); expect(spyRead1).toHaveBeenCalledTimes(1); expect(spyRead2).toHaveBeenCalledTimes(1); expect(query2.toSQL().sql).toEqual( 'select "id", "name", "verified", "jsonb", "created_at" from "users" "usersTable"', ); expect(spyRead2).toHaveBeenCalledWith(obj2); }); it('single read replica findMany', () => { const primaryDb = drizzle.mock({ schema: { usersTable } }); const read1 = drizzle.mock({ schema: { usersTable } }); const db = withReplicas(primaryDb, [read1]); const spyPrimary = vi.spyOn(primaryDb['query']['usersTable'], 'findMany'); const spyRead1 = vi.spyOn(read1['query']['usersTable'], 'findMany'); const obj1 = {} as any; const obj2 = {} as any; const query1 = db.query.usersTable.findMany(obj1); expect(spyPrimary).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenCalledTimes(1); expect(spyRead1).toHaveBeenCalledWith(obj1); expect(query1.toSQL().sql).toEqual( 'select "id", "name", "verified", "jsonb", "created_at" from "users" "usersTable"', ); const query2 = db.query.usersTable.findMany(obj2); expect(spyRead1).toHaveBeenCalledTimes(2); expect(spyRead1).toHaveBeenNthCalledWith(2, obj2); expect(query2.toSQL().sql).toEqual( 'select "id", "name", "verified", "jsonb", "created_at" from "users" "usersTable"', ); }); it('single read replica findMany + primary findMany', () => { const primaryDb = drizzle.mock({ schema: { usersTable } }); const read1 = drizzle.mock({ schema: { usersTable } }); const db = withReplicas(primaryDb, [read1]); const spyPrimary = vi.spyOn(primaryDb['query']['usersTable'], 'findMany'); const spyRead1 = vi.spyOn(read1['query']['usersTable'], 'findMany'); const obj1 = {} as any; const obj2 = {} as any; const query1 = db.query.usersTable.findMany(obj1); expect(spyPrimary).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenCalledTimes(1); expect(spyRead1).toHaveBeenCalledWith(obj1); expect(query1.toSQL().sql).toEqual( 'select "id", "name", "verified", "jsonb", "created_at" from "users" "usersTable"', ); const query2 = db.$primary.query.usersTable.findMany(obj2); expect(spyPrimary).toHaveBeenCalledTimes(1); expect(spyRead1).toHaveBeenCalledTimes(1); expect(spyPrimary).toHaveBeenNthCalledWith(1, obj2); expect(query2.toSQL().sql).toEqual( 'select "id", "name", "verified", "jsonb", "created_at" from "users" "usersTable"', ); }); it('always first read findMany', () => { const primaryDb = drizzle.mock({ schema: { usersTable } }); const read1 = drizzle.mock({ schema: { usersTable } }); const read2 = drizzle.mock({ schema: { usersTable } }); const db = withReplicas(primaryDb, [read1, read2], (replicas) => { return replicas[0]!; }); const spyPrimary = vi.spyOn(primaryDb['query']['usersTable'], 'findMany'); const spyRead1 = vi.spyOn(read1['query']['usersTable'], 'findMany'); const spyRead2 = vi.spyOn(read2['query']['usersTable'], 'findMany'); const obj1 = {} as any; const obj2 = {} as any; const query1 = db.query.usersTable.findMany(obj1); expect(spyPrimary).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenCalledTimes(1); expect(spyRead2).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenCalledWith(obj1); expect(query1.toSQL().sql).toEqual( 'select "id", "name", "verified", "jsonb", "created_at" from "users" "usersTable"', ); const query2 = db.query.usersTable.findMany(obj2); expect(spyRead1).toHaveBeenCalledTimes(2); expect(spyRead2).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenNthCalledWith(2, obj2); expect(query2.toSQL().sql).toEqual( 'select "id", "name", "verified", "jsonb", "created_at" from "users" "usersTable"', ); }); }); describe('[$count] read replicas postgres', () => { it('primary $count', () => { const primaryDb = drizzle.mock(); const read1 = drizzle.mock(); const read2 = drizzle.mock(); const db = withReplicas(primaryDb, [read1, read2]); const spyPrimary = vi.spyOn(primaryDb, '$count'); const spyRead1 = vi.spyOn(read1, '$count'); const spyRead2 = vi.spyOn(read2, '$count'); db.$primary.$count(users); expect(spyPrimary).toHaveBeenCalledTimes(1); expect(spyRead1).toHaveBeenCalledTimes(0); expect(spyRead2).toHaveBeenCalledTimes(0); }); it('random replica $count', () => { const primaryDb = drizzle.mock(); const read1 = drizzle.mock(); const read2 = drizzle.mock(); const randomMockReplica = vi.fn().mockReturnValueOnce(read1).mockReturnValueOnce(read2); const db = withReplicas(primaryDb, [read1, read2], () => { return randomMockReplica(); }); const spyPrimary = vi.spyOn(primaryDb, '$count'); const spyRead1 = vi.spyOn(read1, '$count'); const spyRead2 = vi.spyOn(read2, '$count'); db.$count(users); expect(spyPrimary).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenCalledTimes(1); expect(spyRead2).toHaveBeenCalledTimes(0); db.select().from(users); expect(spyRead1).toHaveBeenCalledTimes(1); expect(spyRead2).toHaveBeenCalledTimes(0); }); it('single read replica $count', () => { const primaryDb = drizzle.mock(); const read1 = drizzle.mock(); const db = withReplicas(primaryDb, [read1]); const spyPrimary = vi.spyOn(primaryDb, '$count'); const spyRead1 = vi.spyOn(read1, '$count'); db.$count(users); expect(spyPrimary).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenCalledTimes(1); db.$count(users); expect(spyRead1).toHaveBeenCalledTimes(2); }); it('single read replica $count + primary $count', () => { const primaryDb = drizzle.mock(); const read1 = drizzle.mock(); const db = withReplicas(primaryDb, [read1]); const spyPrimary = vi.spyOn(primaryDb, '$count'); const spyRead1 = vi.spyOn(read1, '$count'); db.$count(users); expect(spyPrimary).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenCalledTimes(1); db.$primary.$count(users); expect(spyPrimary).toHaveBeenCalledTimes(1); expect(spyRead1).toHaveBeenCalledTimes(1); }); it('always first read $count', () => { const primaryDb = drizzle.mock(); const read1 = drizzle.mock(); const read2 = drizzle.mock(); const db = withReplicas(primaryDb, [read1, read2], (replicas) => { return replicas[0]!; }); const spyPrimary = vi.spyOn(primaryDb, '$count'); const spyRead1 = vi.spyOn(read1, '$count'); const spyRead2 = vi.spyOn(read2, '$count'); db.$count(users); expect(spyPrimary).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenCalledTimes(1); expect(spyRead2).toHaveBeenCalledTimes(0); db.$count(users); expect(spyRead1).toHaveBeenCalledTimes(2); expect(spyRead2).toHaveBeenCalledTimes(0); }); }); ================================================ FILE: integration-tests/tests/replicas/singlestore.test.ts ================================================ import { sql } from 'drizzle-orm'; import { drizzle } from 'drizzle-orm/singlestore'; import { serial, singlestoreTable, withReplicas } from 'drizzle-orm/singlestore-core'; import { describe, expect, it, vi } from 'vitest'; // const usersTable = singlestoreTable('users', { // id: serial('id' as string).primaryKey(), // name: text('name').notNull(), // verified: boolean('verified').notNull().default(false), // }); const users = singlestoreTable('users', { id: serial('id' as string).primaryKey(), }); describe('[select] read replicas singlestore', () => { it('primary select', () => { const primaryDb = drizzle.mock(); const read1 = drizzle.mock(); const read2 = drizzle.mock(); const db = withReplicas(primaryDb, [read1, read2]); const spyPrimary = vi.spyOn(primaryDb, 'select'); const spyRead1 = vi.spyOn(read1, 'select'); const spyRead2 = vi.spyOn(read2, 'select'); const query = db.$primary.select().from(users); expect(spyPrimary).toHaveBeenCalledTimes(1); expect(query.toSQL().sql).toEqual('select `id` from `users`'); expect(spyRead1).toHaveBeenCalledTimes(0); expect(spyRead2).toHaveBeenCalledTimes(0); }); it('random replica select', () => { const primaryDb = drizzle.mock(); const read1 = drizzle.mock(); const read2 = drizzle.mock(); const randomMockReplica = vi.fn().mockReturnValueOnce(read1).mockReturnValueOnce(read2); const db = withReplicas(primaryDb, [read1, read2], () => { return randomMockReplica(); }); const spyPrimary = vi.spyOn(primaryDb, 'select'); const spyRead1 = vi.spyOn(read1, 'select'); const spyRead2 = vi.spyOn(read2, 'select'); const query1 = db.select({ count: sql`count(*)`.as('count') }).from(users).limit(1); expect(spyPrimary).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenCalledTimes(1); expect(spyRead2).toHaveBeenCalledTimes(0); expect(query1.toSQL().sql).toEqual('select count(*) as `count` from `users` limit ?'); const query2 = db.select().from(users); expect(spyRead1).toHaveBeenCalledTimes(1); expect(spyRead2).toHaveBeenCalledTimes(1); expect(query2.toSQL().sql).toEqual('select `id` from `users`'); }); it('single read replica select', () => { const primaryDb = drizzle.mock(); const read1 = drizzle.mock(); const db = withReplicas(primaryDb, [read1]); const spyPrimary = vi.spyOn(primaryDb, 'select'); const spyRead1 = vi.spyOn(read1, 'select'); const query1 = db.select().from(users); expect(spyPrimary).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenCalledTimes(1); expect(query1.toSQL().sql).toEqual('select `id` from `users`'); const query2 = db.select().from(users); expect(spyRead1).toHaveBeenCalledTimes(2); expect(query2.toSQL().sql).toEqual('select `id` from `users`'); }); it('single read replica select + primary select', () => { const primaryDb = drizzle.mock(); const read1 = drizzle.mock(); const db = withReplicas(primaryDb, [read1]); const spyPrimary = vi.spyOn(primaryDb, 'select'); const spyRead1 = vi.spyOn(read1, 'select'); const query1 = db.select({ id: users.id }).from(users); expect(spyPrimary).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenCalledTimes(1); expect(query1.toSQL().sql).toEqual('select `id` from `users`'); const query2 = db.$primary.select().from(users); expect(spyPrimary).toHaveBeenCalledTimes(1); expect(spyRead1).toHaveBeenCalledTimes(1); expect(query2.toSQL().sql).toEqual('select `id` from `users`'); }); it('always first read select', () => { const primaryDb = drizzle.mock(); const read1 = drizzle.mock(); const read2 = drizzle.mock(); const db = withReplicas(primaryDb, [read1, read2], (replicas) => { return replicas[0]!; }); const spyPrimary = vi.spyOn(primaryDb, 'select'); const spyRead1 = vi.spyOn(read1, 'select'); const spyRead2 = vi.spyOn(read2, 'select'); const query1 = db.select().from(users); expect(spyPrimary).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenCalledTimes(1); expect(spyRead2).toHaveBeenCalledTimes(0); expect(query1.toSQL().sql).toEqual('select `id` from `users`'); const query2 = db.select().from(users); expect(spyRead1).toHaveBeenCalledTimes(2); expect(spyRead2).toHaveBeenCalledTimes(0); expect(query2.toSQL().sql).toEqual('select `id` from `users`'); }); }); describe('[selectDistinct] read replicas singlestore', () => { it('primary selectDistinct', () => { const primaryDb = drizzle.mock(); const read1 = drizzle.mock(); const read2 = drizzle.mock(); const db = withReplicas(primaryDb, [read1, read2]); const spyPrimary = vi.spyOn(primaryDb, 'selectDistinct'); const spyRead1 = vi.spyOn(read1, 'selectDistinct'); const spyRead2 = vi.spyOn(read2, 'selectDistinct'); const query = db.$primary.selectDistinct().from(users); expect(spyPrimary).toHaveBeenCalledTimes(1); expect(spyRead1).toHaveBeenCalledTimes(0); expect(spyRead2).toHaveBeenCalledTimes(0); expect(query.toSQL().sql).toEqual('select distinct `id` from `users`'); }); it('random replica selectDistinct', () => { const primaryDb = drizzle.mock(); const read1 = drizzle.mock(); const read2 = drizzle.mock(); const randomMockReplica = vi.fn().mockReturnValueOnce(read1).mockReturnValueOnce(read2); const db = withReplicas(primaryDb, [read1, read2], () => { return randomMockReplica(); }); const spyPrimary = vi.spyOn(primaryDb, 'selectDistinct'); const spyRead1 = vi.spyOn(read1, 'selectDistinct'); const spyRead2 = vi.spyOn(read2, 'selectDistinct'); const query1 = db.selectDistinct().from(users); expect(spyPrimary).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenCalledTimes(1); expect(spyRead2).toHaveBeenCalledTimes(0); expect(query1.toSQL().sql).toEqual('select distinct `id` from `users`'); const query2 = db.selectDistinct().from(users); expect(spyRead1).toHaveBeenCalledTimes(1); expect(spyRead2).toHaveBeenCalledTimes(1); expect(query2.toSQL().sql).toEqual('select distinct `id` from `users`'); }); it('single read replica selectDistinct', () => { const primaryDb = drizzle.mock(); const read1 = drizzle.mock(); const db = withReplicas(primaryDb, [read1]); const spyPrimary = vi.spyOn(primaryDb, 'selectDistinct'); const spyRead1 = vi.spyOn(read1, 'selectDistinct'); const query1 = db.selectDistinct().from(users); expect(spyPrimary).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenCalledTimes(1); expect(query1.toSQL().sql).toEqual('select distinct `id` from `users`'); const query2 = db.selectDistinct().from(users); expect(spyRead1).toHaveBeenCalledTimes(2); expect(query2.toSQL().sql).toEqual('select distinct `id` from `users`'); }); it('single read replica selectDistinct + primary selectDistinct', () => { const primaryDb = drizzle.mock(); const read1 = drizzle.mock(); const db = withReplicas(primaryDb, [read1]); const spyPrimary = vi.spyOn(primaryDb, 'selectDistinct'); const spyRead1 = vi.spyOn(read1, 'selectDistinct'); const query1 = db.selectDistinct().from(users); expect(spyPrimary).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenCalledTimes(1); expect(query1.toSQL().sql).toEqual('select distinct `id` from `users`'); const query2 = db.$primary.selectDistinct().from(users); expect(spyPrimary).toHaveBeenCalledTimes(1); expect(spyRead1).toHaveBeenCalledTimes(1); expect(query2.toSQL().sql).toEqual('select distinct `id` from `users`'); }); it('always first read selectDistinct', () => { const primaryDb = drizzle.mock(); const read1 = drizzle.mock(); const read2 = drizzle.mock(); const db = withReplicas(primaryDb, [read1, read2], (replicas) => { return replicas[0]!; }); const spyPrimary = vi.spyOn(primaryDb, 'selectDistinct'); const spyRead1 = vi.spyOn(read1, 'selectDistinct'); const spyRead2 = vi.spyOn(read2, 'selectDistinct'); const query1 = db.selectDistinct().from(users); expect(spyPrimary).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenCalledTimes(1); expect(spyRead2).toHaveBeenCalledTimes(0); expect(query1.toSQL().sql).toEqual('select distinct `id` from `users`'); const query2 = db.selectDistinct().from(users); expect(spyRead1).toHaveBeenCalledTimes(2); expect(spyRead2).toHaveBeenCalledTimes(0); expect(query2.toSQL().sql).toEqual('select distinct `id` from `users`'); }); }); describe('[with] read replicas singlestore', () => { it('primary with', () => { const primaryDb = drizzle.mock(); const read1 = drizzle.mock(); const read2 = drizzle.mock(); const db = withReplicas(primaryDb, [read1, read2]); const spyPrimary = vi.spyOn(primaryDb, 'with'); const spyRead1 = vi.spyOn(read1, 'with'); const spyRead2 = vi.spyOn(read2, 'with'); const obj1 = {} as any; const obj2 = {} as any; const obj3 = {} as any; const obj4 = {} as any; db.$primary.with(obj1, obj2, obj3, obj4); expect(spyPrimary).toHaveBeenCalledTimes(1); expect(spyRead1).toHaveBeenCalledTimes(0); expect(spyRead2).toHaveBeenCalledTimes(0); expect(spyPrimary).toHaveBeenCalledWith(obj1, obj2, obj3, obj4); }); it('random replica with', () => { const primaryDb = drizzle.mock(); const read1 = drizzle.mock(); const read2 = drizzle.mock(); const randomMockReplica = vi.fn().mockReturnValueOnce(read1).mockReturnValueOnce(read2); const db = withReplicas(primaryDb, [read1, read2], () => { return randomMockReplica(); }); const spyPrimary = vi.spyOn(primaryDb, 'with'); const spyRead1 = vi.spyOn(read1, 'with'); const spyRead2 = vi.spyOn(read2, 'with'); db.with(); expect(spyPrimary).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenCalledTimes(1); expect(spyRead2).toHaveBeenCalledTimes(0); db.with(); expect(spyRead1).toHaveBeenCalledTimes(1); expect(spyRead2).toHaveBeenCalledTimes(1); }); it('single read replica with', () => { const primaryDb = drizzle.mock(); const read1 = drizzle.mock(); const db = withReplicas(primaryDb, [read1]); const spyPrimary = vi.spyOn(primaryDb, 'with'); const spyRead1 = vi.spyOn(read1, 'with'); db.with(); expect(spyPrimary).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenCalledTimes(1); db.with(); expect(spyRead1).toHaveBeenCalledTimes(2); }); it('single read replica with + primary with', () => { const primaryDb = drizzle.mock(); const read1 = drizzle.mock(); const db = withReplicas(primaryDb, [read1]); const spyPrimary = vi.spyOn(primaryDb, 'with'); const spyRead1 = vi.spyOn(read1, 'with'); db.with(); expect(spyPrimary).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenCalledTimes(1); db.$primary.with(); expect(spyPrimary).toHaveBeenCalledTimes(1); expect(spyRead1).toHaveBeenCalledTimes(1); }); it('always first read with', () => { const primaryDb = drizzle.mock(); const read1 = drizzle.mock(); const read2 = drizzle.mock(); const db = withReplicas(primaryDb, [read1, read2], (replicas) => { return replicas[0]!; }); const spyPrimary = vi.spyOn(primaryDb, 'with'); const spyRead1 = vi.spyOn(read1, 'with'); const spyRead2 = vi.spyOn(read2, 'with'); const obj1 = {} as any; const obj2 = {} as any; const obj3 = {} as any; db.with(obj1); expect(spyPrimary).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenCalledTimes(1); expect(spyRead2).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenCalledWith(obj1); db.with(obj2, obj3); expect(spyRead1).toHaveBeenCalledTimes(2); expect(spyRead2).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenCalledWith(obj2, obj3); }); }); describe('[update] replicas singlestore', () => { it('primary update', () => { const primaryDb = drizzle.mock(); const read1 = drizzle.mock(); const read2 = drizzle.mock(); const db = withReplicas(primaryDb, [read1, read2]); const spyPrimary = vi.spyOn(primaryDb, 'update'); const spyRead1 = vi.spyOn(read1, 'update'); const spyRead2 = vi.spyOn(read2, 'update'); const query1 = db.update(users).set({ id: 1 }); expect(spyPrimary).toHaveBeenCalledTimes(1); expect(spyRead1).toHaveBeenCalledTimes(0); expect(spyRead2).toHaveBeenCalledTimes(0); expect(query1.toSQL().sql).toEqual('update `users` set `id` = ?'); const query2 = db.update(users).set({ id: 1 }); expect(spyPrimary).toHaveBeenCalledTimes(2); expect(spyRead1).toHaveBeenCalledTimes(0); expect(spyRead2).toHaveBeenCalledTimes(0); expect(query2.toSQL().sql).toEqual('update `users` set `id` = ?'); const query3 = db.$primary.update(users).set({ id: 1 }); expect(spyPrimary).toHaveBeenCalledTimes(3); expect(spyRead1).toHaveBeenCalledTimes(0); expect(spyRead2).toHaveBeenCalledTimes(0); expect(query3.toSQL().sql).toEqual('update `users` set `id` = ?'); }); }); describe('[delete] replicas singlestore', () => { it('primary delete', () => { const primaryDb = drizzle.mock(); const read1 = drizzle.mock(); const read2 = drizzle.mock(); const db = withReplicas(primaryDb, [read1, read2]); const spyPrimary = vi.spyOn(primaryDb, 'delete'); const spyRead1 = vi.spyOn(read1, 'delete'); const spyRead2 = vi.spyOn(read2, 'delete'); const query1 = db.delete(users); expect(spyPrimary).toHaveBeenCalledTimes(1); expect(spyRead1).toHaveBeenCalledTimes(0); expect(spyRead2).toHaveBeenCalledTimes(0); expect(spyPrimary).toHaveBeenCalledWith(users); expect(query1.toSQL().sql).toEqual('delete from `users`'); const query2 = db.delete(users); expect(spyPrimary).toHaveBeenCalledTimes(2); expect(spyRead1).toHaveBeenCalledTimes(0); expect(spyRead2).toHaveBeenCalledTimes(0); expect(spyPrimary).toHaveBeenNthCalledWith(2, users); expect(query2.toSQL().sql).toEqual('delete from `users`'); db.$primary.delete({} as any); expect(spyPrimary).toHaveBeenCalledTimes(3); expect(spyRead1).toHaveBeenCalledTimes(0); expect(spyRead2).toHaveBeenCalledTimes(0); }); }); describe('[insert] replicas singlestore', () => { it('primary insert', () => { const primaryDb = drizzle.mock(); const read1 = drizzle.mock(); const read2 = drizzle.mock(); const db = withReplicas(primaryDb, [read1, read2]); const spyPrimary = vi.spyOn(primaryDb, 'insert'); const spyRead1 = vi.spyOn(read1, 'insert'); const spyRead2 = vi.spyOn(read2, 'insert'); const query = db.insert(users).values({ id: 1 }); expect(spyPrimary).toHaveBeenCalledTimes(1); expect(spyRead1).toHaveBeenCalledTimes(0); expect(spyRead2).toHaveBeenCalledTimes(0); expect(spyPrimary).toHaveBeenCalledWith(users); expect(query.toSQL().sql).toEqual('insert into `users` (`id`) values (?)'); db.insert(users); expect(spyPrimary).toHaveBeenCalledTimes(2); expect(spyRead1).toHaveBeenCalledTimes(0); expect(spyRead2).toHaveBeenCalledTimes(0); expect(spyPrimary).toHaveBeenNthCalledWith(2, users); db.$primary.insert({} as any); expect(spyPrimary).toHaveBeenCalledTimes(3); expect(spyRead1).toHaveBeenCalledTimes(0); expect(spyRead2).toHaveBeenCalledTimes(0); }); }); describe('[execute] replicas singlestore', () => { it('primary execute', async () => { const primaryDb = drizzle.mock(); const read1 = drizzle.mock(); const read2 = drizzle.mock(); const db = withReplicas(primaryDb, [read1, read2]); const spyPrimary = vi.spyOn(primaryDb, 'execute'); const spyRead1 = vi.spyOn(read1, 'execute'); const spyRead2 = vi.spyOn(read2, 'execute'); expect(db.execute(sql``)).rejects.toThrow(); // try { // db.execute(sql``); // } catch { /* empty */ } expect(spyPrimary).toHaveBeenCalledTimes(1); expect(spyRead1).toHaveBeenCalledTimes(0); expect(spyRead2).toHaveBeenCalledTimes(0); expect(db.execute(sql``)).rejects.toThrow(); // try { // db.execute(sql``); // } catch { /* empty */ } expect(spyPrimary).toHaveBeenCalledTimes(2); expect(spyRead1).toHaveBeenCalledTimes(0); expect(spyRead2).toHaveBeenCalledTimes(0); expect(db.execute(sql``)).rejects.toThrow(); // try { // db.execute(sql``); // } catch { /* empty */ } expect(spyPrimary).toHaveBeenCalledTimes(3); expect(spyRead1).toHaveBeenCalledTimes(0); expect(spyRead2).toHaveBeenCalledTimes(0); }); }); describe('[transaction] replicas singlestore', () => { it('primary transaction', async () => { const primaryDb = drizzle.mock(); const read1 = drizzle.mock(); const read2 = drizzle.mock(); const db = withReplicas(primaryDb, [read1, read2]); const spyPrimary = vi.spyOn(primaryDb, 'transaction'); const spyRead1 = vi.spyOn(read1, 'transaction'); const spyRead2 = vi.spyOn(read2, 'transaction'); const txFn1 = async (tx: any) => { tx.select().from({} as any); }; expect(db.transaction(txFn1)).rejects.toThrow(); expect(spyPrimary).toHaveBeenCalledTimes(1); expect(spyRead1).toHaveBeenCalledTimes(0); expect(spyRead2).toHaveBeenCalledTimes(0); expect(spyPrimary).toHaveBeenCalledWith(txFn1); const txFn2 = async (tx: any) => { tx.select().from({} as any); }; expect(db.transaction(txFn2)).rejects.toThrow(); expect(spyPrimary).toHaveBeenCalledTimes(2); expect(spyRead1).toHaveBeenCalledTimes(0); expect(spyRead2).toHaveBeenCalledTimes(0); expect(spyPrimary).toHaveBeenNthCalledWith(2, txFn2); expect(db.transaction(async (tx) => { tx.select().from({} as any); })).rejects.toThrow(); expect(spyPrimary).toHaveBeenCalledTimes(3); expect(spyRead1).toHaveBeenCalledTimes(0); expect(spyRead2).toHaveBeenCalledTimes(0); }); }); // We are waiting for SingleStore support for `json_array` function // describe('[findFirst] read replicas singlestore', () => { // // it('primary findFirst', () => { // // const primaryDb = drizzle.mock({ schema: { usersTable } }); // // const read1 = drizzle.mock({ schema: { usersTable } }); // // const read2 = drizzle.mock({ schema: { usersTable } }); // // const db = withReplicas(primaryDb, [read1, read2]); // // const spyPrimary = vi.spyOn(primaryDb['query']['usersTable'], 'findFirst'); // // const spyRead1 = vi.spyOn(read1['query']['usersTable'], 'findFirst'); // // const spyRead2 = vi.spyOn(read2['query']['usersTable'], 'findFirst'); // // const obj = {} as any; // // db.$primary.query.usersTable.findFirst(obj); // // expect(spyPrimary).toHaveBeenCalledTimes(1); // // expect(spyRead1).toHaveBeenCalledTimes(0); // // expect(spyRead2).toHaveBeenCalledTimes(0); // // expect(spyPrimary).toHaveBeenCalledWith(obj); // // }); // // it('random replica findFirst', () => { // // const primaryDb = drizzle.mock({ schema: { usersTable } }); // // const read1 = drizzle.mock({ schema: { usersTable } }); // // const read2 = drizzle.mock({ schema: { usersTable } }); // // const randomMockReplica = vi.fn().mockReturnValueOnce(read1).mockReturnValueOnce(read2); // // const db = withReplicas(primaryDb, [read1, read2], () => { // // return randomMockReplica(); // // }); // // const spyPrimary = vi.spyOn(primaryDb['query']['usersTable'], 'findFirst'); // // const spyRead1 = vi.spyOn(read1['query']['usersTable'], 'findFirst'); // // const spyRead2 = vi.spyOn(read2['query']['usersTable'], 'findFirst'); // // const par1 = {} as any; // // db.query.usersTable.findFirst(par1); // // expect(spyPrimary).toHaveBeenCalledTimes(0); // // expect(spyRead1).toHaveBeenCalledTimes(1); // // expect(spyRead2).toHaveBeenCalledTimes(0); // // expect(spyRead1).toHaveBeenCalledWith(par1); // // const query = db.query.usersTable.findFirst(); // // expect(spyRead1).toHaveBeenCalledTimes(1); // // expect(spyRead2).toHaveBeenCalledTimes(1); // // expect(query.toSQL().sql).toEqual('select `id`, `name`, `verified` from `users` `usersTable` limit ?'); // // }); // // We are waiting for SingleStore support for `json_array` function // // it('single read replica findFirst', () => { // // const primaryDb = drizzle.mock({ schema: { usersTable } }); // // const read1 = drizzle.mock({ schema: { usersTable } }); // // const db = withReplicas(primaryDb, [read1]); // // const spyPrimary = vi.spyOn(primaryDb['query']['usersTable'], 'findFirst'); // // const spyRead1 = vi.spyOn(read1['query']['usersTable'], 'findFirst'); // // db.query.usersTable.findFirst(); // // expect(spyPrimary).toHaveBeenCalledTimes(0); // // expect(spyRead1).toHaveBeenCalledTimes(1); // // db.query.usersTable.findFirst(); // // expect(spyRead1).toHaveBeenCalledTimes(2); // // }); // // We are waiting for SingleStore support for `json_array` function // // it('single read replica findFirst + primary findFirst', () => { // // const primaryDb = drizzle.mock({ schema: { usersTable } }); // // const read1 = drizzle.mock({ schema: { usersTable } }); // // const db = withReplicas(primaryDb, [read1]); // // const spyPrimary = vi.spyOn(primaryDb['query']['usersTable'], 'findFirst'); // // const spyRead1 = vi.spyOn(read1['query']['usersTable'], 'findFirst'); // // db.query.usersTable.findFirst(); // // expect(spyPrimary).toHaveBeenCalledTimes(0); // // expect(spyRead1).toHaveBeenCalledTimes(1); // // db.$primary.query.usersTable.findFirst(); // // expect(spyPrimary).toHaveBeenCalledTimes(1); // // expect(spyRead1).toHaveBeenCalledTimes(1); // // }); // // We are waiting for SingleStore support for `json_array` function // // it('always first read findFirst', () => { // // const primaryDb = drizzle.mock({ schema: { usersTable } }); // // const read1 = drizzle.mock({ schema: { usersTable } }); // // const read2 = drizzle.mock({ schema: { usersTable } }); // // const db = withReplicas(primaryDb, [read1, read2], (replicas) => { // // return replicas[0]!; // // }); // // const spyPrimary = vi.spyOn(primaryDb['query']['usersTable'], 'findFirst'); // // const spyRead1 = vi.spyOn(read1['query']['usersTable'], 'findFirst'); // // const spyRead2 = vi.spyOn(read2['query']['usersTable'], 'findFirst'); // // db.query.usersTable.findFirst(); // // expect(spyPrimary).toHaveBeenCalledTimes(0); // // expect(spyRead1).toHaveBeenCalledTimes(1); // // expect(spyRead2).toHaveBeenCalledTimes(0); // // db.query.usersTable.findFirst(); // // expect(spyRead1).toHaveBeenCalledTimes(2); // // expect(spyRead2).toHaveBeenCalledTimes(0); // // }); // }); // describe('[findMany] read replicas singlestore', () => { // // We are waiting for SingleStore support for `json_array` function // // it('primary findMany', () => { // // const primaryDb = drizzle.mock({ schema: { usersTable } }); // // const read1 = drizzle.mock({ schema: { usersTable } }); // // const read2 = drizzle.mock({ schema: { usersTable } }); // // const db = withReplicas(primaryDb, [read1, read2]); // // const spyPrimary = vi.spyOn(primaryDb['query']['usersTable'], 'findMany'); // // const spyRead1 = vi.spyOn(read1['query']['usersTable'], 'findMany'); // // const spyRead2 = vi.spyOn(read2['query']['usersTable'], 'findMany'); // // const obj = {} as any; // // const query = db.$primary.query.usersTable.findMany(obj); // // expect(spyPrimary).toHaveBeenCalledTimes(1); // // expect(spyRead1).toHaveBeenCalledTimes(0); // // expect(spyRead2).toHaveBeenCalledTimes(0); // // expect(spyPrimary).toHaveBeenCalledWith(obj); // // expect(query.toSQL().sql).toEqual('select `id`, `name`, `verified` from `users` `usersTable`'); // // }); // // We are waiting for SingleStore support for `json_array` function // // it('random replica findMany', () => { // // const primaryDb = drizzle.mock({ schema: { usersTable } }); // // const read1 = drizzle.mock({ schema: { usersTable } }); // // const read2 = drizzle.mock({ schema: { usersTable } }); // // const randomMockReplica = vi.fn().mockReturnValueOnce(read1).mockReturnValueOnce(read2); // // const db = withReplicas(primaryDb, [read1, read2], () => { // // return randomMockReplica(); // // }); // // const spyPrimary = vi.spyOn(primaryDb['query']['usersTable'], 'findMany'); // // const spyRead1 = vi.spyOn(read1['query']['usersTable'], 'findMany'); // // const spyRead2 = vi.spyOn(read2['query']['usersTable'], 'findMany'); // // const obj1 = {} as any; // // const obj2 = {} as any; // // const query1 = db.query.usersTable.findMany(obj1); // // expect(spyPrimary).toHaveBeenCalledTimes(0); // // expect(spyRead1).toHaveBeenCalledTimes(1); // // expect(spyRead2).toHaveBeenCalledTimes(0); // // expect(query1.toSQL().sql).toEqual('select `id`, `name`, `verified` from `users` `usersTable`'); // // expect(spyRead1).toHaveBeenCalledWith(obj1); // // const query2 = db.query.usersTable.findMany(obj2); // // expect(spyRead1).toHaveBeenCalledTimes(1); // // expect(spyRead2).toHaveBeenCalledTimes(1); // // expect(query2.toSQL().sql).toEqual('select `id`, `name`, `verified` from `users` `usersTable`'); // // expect(spyRead2).toHaveBeenCalledWith(obj2); // // }); // // We are waiting for SingleStore support for `json_array` function // // it('single read replica findMany', () => { // // const primaryDb = drizzle.mock({ schema: { usersTable } }); // // const read1 = drizzle.mock({ schema: { usersTable } }); // // const db = withReplicas(primaryDb, [read1]); // // const spyPrimary = vi.spyOn(primaryDb['query']['usersTable'], 'findMany'); // // const spyRead1 = vi.spyOn(read1['query']['usersTable'], 'findMany'); // // const obj1 = {} as any; // // const obj2 = {} as any; // // const query1 = db.query.usersTable.findMany(obj1); // // expect(spyPrimary).toHaveBeenCalledTimes(0); // // expect(spyRead1).toHaveBeenCalledTimes(1); // // expect(spyRead1).toHaveBeenCalledWith(obj1); // // expect(query1.toSQL().sql).toEqual('select `id`, `name`, `verified` from `users` `usersTable`'); // // const query2 = db.query.usersTable.findMany(obj2); // // expect(spyRead1).toHaveBeenCalledTimes(2); // // expect(spyRead1).toHaveBeenNthCalledWith(2, obj2); // // expect(query2.toSQL().sql).toEqual('select `id`, `name`, `verified` from `users` `usersTable`'); // // }); // // We are waiting for SingleStore support for `json_array` function // // it('single read replica findMany + primary findMany', () => { // // const primaryDb = drizzle.mock({ schema: { usersTable } }); // // const read1 = drizzle.mock({ schema: { usersTable } }); // // const db = withReplicas(primaryDb, [read1]); // // const spyPrimary = vi.spyOn(primaryDb['query']['usersTable'], 'findMany'); // // const spyRead1 = vi.spyOn(read1['query']['usersTable'], 'findMany'); // // const obj1 = {} as any; // // const obj2 = {} as any; // // const query1 = db.query.usersTable.findMany(obj1); // // expect(spyPrimary).toHaveBeenCalledTimes(0); // // expect(spyRead1).toHaveBeenCalledTimes(1); // // expect(spyRead1).toHaveBeenCalledWith(obj1); // // expect(query1.toSQL().sql).toEqual('select `id`, `name`, `verified` from `users` `usersTable`'); // // const query2 = db.$primary.query.usersTable.findMany(obj2); // // expect(spyPrimary).toHaveBeenCalledTimes(1); // // expect(spyRead1).toHaveBeenCalledTimes(1); // // expect(spyPrimary).toHaveBeenNthCalledWith(1, obj2); // // expect(query2.toSQL().sql).toEqual('select `id`, `name`, `verified` from `users` `usersTable`'); // // }); // // We are waiting for SingleStore support for `json_array` function // // it('always first read findMany', () => { // // const primaryDb = drizzle.mock({ schema: { usersTable } }); // // const read1 = drizzle.mock({ schema: { usersTable } }); // // const read2 = drizzle.mock({ schema: { usersTable } }); // // const db = withReplicas(primaryDb, [read1, read2], (replicas) => { // // return replicas[0]!; // // }); // // const spyPrimary = vi.spyOn(primaryDb['query']['usersTable'], 'findMany'); // // const spyRead1 = vi.spyOn(read1['query']['usersTable'], 'findMany'); // // const spyRead2 = vi.spyOn(read2['query']['usersTable'], 'findMany'); // // const obj1 = {} as any; // // const obj2 = {} as any; // // const query1 = db.query.usersTable.findMany(obj1); // // expect(spyPrimary).toHaveBeenCalledTimes(0); // // expect(spyRead1).toHaveBeenCalledTimes(1); // // expect(spyRead2).toHaveBeenCalledTimes(0); // // expect(spyRead1).toHaveBeenCalledWith(obj1); // // expect(query1.toSQL().sql).toEqual('select `id`, `name`, `verified` from `users` `usersTable`'); // // const query2 = db.query.usersTable.findMany(obj2); // // expect(spyRead1).toHaveBeenCalledTimes(2); // // expect(spyRead2).toHaveBeenCalledTimes(0); // // expect(spyRead1).toHaveBeenNthCalledWith(2, obj2); // // expect(query2.toSQL().sql).toEqual('select `id`, `name`, `verified` from `users` `usersTable`'); // // }); // }); describe('[$count] read replicas postgres', () => { it('primary $count', () => { const primaryDb = drizzle.mock(); const read1 = drizzle.mock(); const read2 = drizzle.mock(); const db = withReplicas(primaryDb, [read1, read2]); const spyPrimary = vi.spyOn(primaryDb, '$count'); const spyRead1 = vi.spyOn(read1, '$count'); const spyRead2 = vi.spyOn(read2, '$count'); db.$primary.$count(users); expect(spyPrimary).toHaveBeenCalledTimes(1); expect(spyRead1).toHaveBeenCalledTimes(0); expect(spyRead2).toHaveBeenCalledTimes(0); }); it('random replica $count', () => { const primaryDb = drizzle.mock(); const read1 = drizzle.mock(); const read2 = drizzle.mock(); const randomMockReplica = vi.fn().mockReturnValueOnce(read1).mockReturnValueOnce(read2); const db = withReplicas(primaryDb, [read1, read2], () => { return randomMockReplica(); }); const spyPrimary = vi.spyOn(primaryDb, '$count'); const spyRead1 = vi.spyOn(read1, '$count'); const spyRead2 = vi.spyOn(read2, '$count'); db.$count(users); expect(spyPrimary).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenCalledTimes(1); expect(spyRead2).toHaveBeenCalledTimes(0); db.select().from(users); expect(spyRead1).toHaveBeenCalledTimes(1); expect(spyRead2).toHaveBeenCalledTimes(0); }); it('single read replica $count', () => { const primaryDb = drizzle.mock(); const read1 = drizzle.mock(); const db = withReplicas(primaryDb, [read1]); const spyPrimary = vi.spyOn(primaryDb, '$count'); const spyRead1 = vi.spyOn(read1, '$count'); db.$count(users); expect(spyPrimary).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenCalledTimes(1); db.$count(users); expect(spyRead1).toHaveBeenCalledTimes(2); }); it('single read replica $count + primary $count', () => { const primaryDb = drizzle.mock(); const read1 = drizzle.mock(); const db = withReplicas(primaryDb, [read1]); const spyPrimary = vi.spyOn(primaryDb, '$count'); const spyRead1 = vi.spyOn(read1, '$count'); db.$count(users); expect(spyPrimary).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenCalledTimes(1); db.$primary.$count(users); expect(spyPrimary).toHaveBeenCalledTimes(1); expect(spyRead1).toHaveBeenCalledTimes(1); }); it('always first read $count', () => { const primaryDb = drizzle.mock(); const read1 = drizzle.mock(); const read2 = drizzle.mock(); const db = withReplicas(primaryDb, [read1, read2], (replicas) => { return replicas[0]!; }); const spyPrimary = vi.spyOn(primaryDb, '$count'); const spyRead1 = vi.spyOn(read1, '$count'); const spyRead2 = vi.spyOn(read2, '$count'); db.$count(users); expect(spyPrimary).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenCalledTimes(1); expect(spyRead2).toHaveBeenCalledTimes(0); db.$count(users); expect(spyRead1).toHaveBeenCalledTimes(2); expect(spyRead2).toHaveBeenCalledTimes(0); }); }); ================================================ FILE: integration-tests/tests/replicas/sqlite.test.ts ================================================ import { sql } from 'drizzle-orm'; import { drizzle } from 'drizzle-orm/libsql'; import { int, sqliteTable, text, withReplicas } from 'drizzle-orm/sqlite-core'; import { describe, expect, it, vi } from 'vitest'; const usersTable = sqliteTable('users', { id: int('id' as string).primaryKey(), name: text('name').notNull(), verified: text('verified').notNull().default('true'), }); const users = sqliteTable('users', { id: int('id' as string).primaryKey(), }); describe('[select] read replicas sqlite', () => { it('primary select', () => { const primaryDb = drizzle.mock(); const read1 = drizzle.mock(); const read2 = drizzle.mock(); const db = withReplicas(primaryDb, [read1, read2]); const spyPrimary = vi.spyOn(primaryDb, 'select'); const spyRead1 = vi.spyOn(read1, 'select'); const spyRead2 = vi.spyOn(read2, 'select'); const query = db.$primary.select().from(users); expect(spyPrimary).toHaveBeenCalledTimes(1); expect(query.toSQL().sql).toEqual('select "id" from "users"'); expect(spyRead1).toHaveBeenCalledTimes(0); expect(spyRead2).toHaveBeenCalledTimes(0); }); it('random replica select', () => { const primaryDb = drizzle.mock(); const read1 = drizzle.mock(); const read2 = drizzle.mock(); const randomMockReplica = vi.fn().mockReturnValueOnce(read1).mockReturnValueOnce(read2); const db = withReplicas(primaryDb, [read1, read2], () => { return randomMockReplica(); }); const spyPrimary = vi.spyOn(primaryDb, 'select'); const spyRead1 = vi.spyOn(read1, 'select'); const spyRead2 = vi.spyOn(read2, 'select'); const query1 = db.select({ count: sql`count(*)`.as('count') }).from(users).limit(1); expect(spyPrimary).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenCalledTimes(1); expect(spyRead2).toHaveBeenCalledTimes(0); expect(query1.toSQL().sql).toEqual('select count(*) as "count" from "users" limit ?'); const query2 = db.select().from(users); expect(spyRead1).toHaveBeenCalledTimes(1); expect(spyRead2).toHaveBeenCalledTimes(1); expect(query2.toSQL().sql).toEqual('select "id" from "users"'); }); it('single read replica select', () => { const primaryDb = drizzle.mock(); const read1 = drizzle.mock(); const db = withReplicas(primaryDb, [read1]); const spyPrimary = vi.spyOn(primaryDb, 'select'); const spyRead1 = vi.spyOn(read1, 'select'); const query1 = db.select().from(users); expect(spyPrimary).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenCalledTimes(1); expect(query1.toSQL().sql).toEqual('select "id" from "users"'); const query2 = db.select().from(users); expect(spyRead1).toHaveBeenCalledTimes(2); expect(query2.toSQL().sql).toEqual('select "id" from "users"'); }); it('single read replica select + primary select', () => { const primaryDb = drizzle.mock(); const read1 = drizzle.mock(); const db = withReplicas(primaryDb, [read1]); const spyPrimary = vi.spyOn(primaryDb, 'select'); const spyRead1 = vi.spyOn(read1, 'select'); const query1 = db.select({ id: users.id }).from(users); expect(spyPrimary).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenCalledTimes(1); expect(query1.toSQL().sql).toEqual('select "id" from "users"'); const query2 = db.$primary.select().from(users); expect(spyPrimary).toHaveBeenCalledTimes(1); expect(spyRead1).toHaveBeenCalledTimes(1); expect(query2.toSQL().sql).toEqual('select "id" from "users"'); }); it('always first read select', () => { const primaryDb = drizzle.mock(); const read1 = drizzle.mock(); const read2 = drizzle.mock(); const db = withReplicas(primaryDb, [read1, read2], (replicas) => { return replicas[0]!; }); const spyPrimary = vi.spyOn(primaryDb, 'select'); const spyRead1 = vi.spyOn(read1, 'select'); const spyRead2 = vi.spyOn(read2, 'select'); const query1 = db.select().from(users); expect(spyPrimary).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenCalledTimes(1); expect(spyRead2).toHaveBeenCalledTimes(0); expect(query1.toSQL().sql).toEqual('select "id" from "users"'); const query2 = db.select().from(users); expect(spyRead1).toHaveBeenCalledTimes(2); expect(spyRead2).toHaveBeenCalledTimes(0); expect(query2.toSQL().sql).toEqual('select "id" from "users"'); }); }); describe('[selectDistinct] read replicas sqlite', () => { it('primary selectDistinct', () => { const primaryDb = drizzle.mock(); const read1 = drizzle.mock(); const read2 = drizzle.mock(); const db = withReplicas(primaryDb, [read1, read2]); const spyPrimary = vi.spyOn(primaryDb, 'selectDistinct'); const spyRead1 = vi.spyOn(read1, 'selectDistinct'); const spyRead2 = vi.spyOn(read2, 'selectDistinct'); const query = db.$primary.selectDistinct().from(users); expect(spyPrimary).toHaveBeenCalledTimes(1); expect(spyRead1).toHaveBeenCalledTimes(0); expect(spyRead2).toHaveBeenCalledTimes(0); expect(query.toSQL().sql).toEqual('select distinct "id" from "users"'); }); it('random replica selectDistinct', () => { const primaryDb = drizzle.mock(); const read1 = drizzle.mock(); const read2 = drizzle.mock(); const randomMockReplica = vi.fn().mockReturnValueOnce(read1).mockReturnValueOnce(read2); const db = withReplicas(primaryDb, [read1, read2], () => { return randomMockReplica(); }); const spyPrimary = vi.spyOn(primaryDb, 'selectDistinct'); const spyRead1 = vi.spyOn(read1, 'selectDistinct'); const spyRead2 = vi.spyOn(read2, 'selectDistinct'); const query1 = db.selectDistinct().from(users); expect(spyPrimary).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenCalledTimes(1); expect(spyRead2).toHaveBeenCalledTimes(0); expect(query1.toSQL().sql).toEqual('select distinct "id" from "users"'); const query2 = db.selectDistinct().from(users); expect(spyRead1).toHaveBeenCalledTimes(1); expect(spyRead2).toHaveBeenCalledTimes(1); expect(query2.toSQL().sql).toEqual('select distinct "id" from "users"'); }); it('single read replica selectDistinct', () => { const primaryDb = drizzle.mock(); const read1 = drizzle.mock(); const db = withReplicas(primaryDb, [read1]); const spyPrimary = vi.spyOn(primaryDb, 'selectDistinct'); const spyRead1 = vi.spyOn(read1, 'selectDistinct'); const query1 = db.selectDistinct().from(users); expect(spyPrimary).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenCalledTimes(1); expect(query1.toSQL().sql).toEqual('select distinct "id" from "users"'); const query2 = db.selectDistinct().from(users); expect(spyRead1).toHaveBeenCalledTimes(2); expect(query2.toSQL().sql).toEqual('select distinct "id" from "users"'); }); it('single read replica selectDistinct + primary selectDistinct', () => { const primaryDb = drizzle.mock(); const read1 = drizzle.mock(); const db = withReplicas(primaryDb, [read1]); const spyPrimary = vi.spyOn(primaryDb, 'selectDistinct'); const spyRead1 = vi.spyOn(read1, 'selectDistinct'); const query1 = db.selectDistinct().from(users); expect(spyPrimary).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenCalledTimes(1); expect(query1.toSQL().sql).toEqual('select distinct "id" from "users"'); const query2 = db.$primary.selectDistinct().from(users); expect(spyPrimary).toHaveBeenCalledTimes(1); expect(spyRead1).toHaveBeenCalledTimes(1); expect(query2.toSQL().sql).toEqual('select distinct "id" from "users"'); }); it('always first read selectDistinct', () => { const primaryDb = drizzle.mock(); const read1 = drizzle.mock(); const read2 = drizzle.mock(); const db = withReplicas(primaryDb, [read1, read2], (replicas) => { return replicas[0]!; }); const spyPrimary = vi.spyOn(primaryDb, 'selectDistinct'); const spyRead1 = vi.spyOn(read1, 'selectDistinct'); const spyRead2 = vi.spyOn(read2, 'selectDistinct'); const query1 = db.selectDistinct().from(users); expect(spyPrimary).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenCalledTimes(1); expect(spyRead2).toHaveBeenCalledTimes(0); expect(query1.toSQL().sql).toEqual('select distinct "id" from "users"'); const query2 = db.selectDistinct().from(users); expect(spyRead1).toHaveBeenCalledTimes(2); expect(spyRead2).toHaveBeenCalledTimes(0); expect(query2.toSQL().sql).toEqual('select distinct "id" from "users"'); }); }); describe('[with] read replicas sqlite', () => { it('primary with', () => { const primaryDb = drizzle.mock(); const read1 = drizzle.mock(); const read2 = drizzle.mock(); const db = withReplicas(primaryDb, [read1, read2]); const spyPrimary = vi.spyOn(primaryDb, 'with'); const spyRead1 = vi.spyOn(read1, 'with'); const spyRead2 = vi.spyOn(read2, 'with'); const obj1 = {} as any; const obj2 = {} as any; const obj3 = {} as any; const obj4 = {} as any; db.$primary.with(obj1, obj2, obj3, obj4); expect(spyPrimary).toHaveBeenCalledTimes(1); expect(spyRead1).toHaveBeenCalledTimes(0); expect(spyRead2).toHaveBeenCalledTimes(0); expect(spyPrimary).toHaveBeenCalledWith(obj1, obj2, obj3, obj4); }); it('random replica with', () => { const primaryDb = drizzle.mock(); const read1 = drizzle.mock(); const read2 = drizzle.mock(); const randomMockReplica = vi.fn().mockReturnValueOnce(read1).mockReturnValueOnce(read2); const db = withReplicas(primaryDb, [read1, read2], () => { return randomMockReplica(); }); const spyPrimary = vi.spyOn(primaryDb, 'with'); const spyRead1 = vi.spyOn(read1, 'with'); const spyRead2 = vi.spyOn(read2, 'with'); db.with(); expect(spyPrimary).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenCalledTimes(1); expect(spyRead2).toHaveBeenCalledTimes(0); db.with(); expect(spyRead1).toHaveBeenCalledTimes(1); expect(spyRead2).toHaveBeenCalledTimes(1); }); it('single read replica with', () => { const primaryDb = drizzle.mock(); const read1 = drizzle.mock(); const db = withReplicas(primaryDb, [read1]); const spyPrimary = vi.spyOn(primaryDb, 'with'); const spyRead1 = vi.spyOn(read1, 'with'); db.with(); expect(spyPrimary).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenCalledTimes(1); db.with(); expect(spyRead1).toHaveBeenCalledTimes(2); }); it('single read replica with + primary with', () => { const primaryDb = drizzle.mock(); const read1 = drizzle.mock(); const db = withReplicas(primaryDb, [read1]); const spyPrimary = vi.spyOn(primaryDb, 'with'); const spyRead1 = vi.spyOn(read1, 'with'); db.with(); expect(spyPrimary).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenCalledTimes(1); db.$primary.with(); expect(spyPrimary).toHaveBeenCalledTimes(1); expect(spyRead1).toHaveBeenCalledTimes(1); }); it('always first read with', () => { const primaryDb = drizzle.mock(); const read1 = drizzle.mock(); const read2 = drizzle.mock(); const db = withReplicas(primaryDb, [read1, read2], (replicas) => { return replicas[0]!; }); const spyPrimary = vi.spyOn(primaryDb, 'with'); const spyRead1 = vi.spyOn(read1, 'with'); const spyRead2 = vi.spyOn(read2, 'with'); const obj1 = {} as any; const obj2 = {} as any; const obj3 = {} as any; db.with(obj1); expect(spyPrimary).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenCalledTimes(1); expect(spyRead2).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenCalledWith(obj1); db.with(obj2, obj3); expect(spyRead1).toHaveBeenCalledTimes(2); expect(spyRead2).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenCalledWith(obj2, obj3); }); }); describe('[update] replicas sqlite', () => { it('primary update', () => { const primaryDb = drizzle.mock(); const read1 = drizzle.mock(); const read2 = drizzle.mock(); const db = withReplicas(primaryDb, [read1, read2]); const spyPrimary = vi.spyOn(primaryDb, 'update'); const spyRead1 = vi.spyOn(read1, 'update'); const spyRead2 = vi.spyOn(read2, 'update'); const query1 = db.update(users).set({ id: 1 }); expect(spyPrimary).toHaveBeenCalledTimes(1); expect(spyRead1).toHaveBeenCalledTimes(0); expect(spyRead2).toHaveBeenCalledTimes(0); expect(query1.toSQL().sql).toEqual('update "users" set "id" = ?'); const query2 = db.update(users).set({ id: 1 }); expect(spyPrimary).toHaveBeenCalledTimes(2); expect(spyRead1).toHaveBeenCalledTimes(0); expect(spyRead2).toHaveBeenCalledTimes(0); expect(query2.toSQL().sql).toEqual('update "users" set "id" = ?'); const query3 = db.$primary.update(users).set({ id: 1 }); expect(spyPrimary).toHaveBeenCalledTimes(3); expect(spyRead1).toHaveBeenCalledTimes(0); expect(spyRead2).toHaveBeenCalledTimes(0); expect(query3.toSQL().sql).toEqual('update "users" set "id" = ?'); }); }); describe('[delete] replicas sqlite', () => { it('primary delete', () => { const primaryDb = drizzle.mock(); const read1 = drizzle.mock(); const read2 = drizzle.mock(); const db = withReplicas(primaryDb, [read1, read2]); const spyPrimary = vi.spyOn(primaryDb, 'delete'); const spyRead1 = vi.spyOn(read1, 'delete'); const spyRead2 = vi.spyOn(read2, 'delete'); const query1 = db.delete(users); expect(spyPrimary).toHaveBeenCalledTimes(1); expect(spyRead1).toHaveBeenCalledTimes(0); expect(spyRead2).toHaveBeenCalledTimes(0); expect(spyPrimary).toHaveBeenCalledWith(users); expect(query1.toSQL().sql).toEqual('delete from "users"'); const query2 = db.delete(users); expect(spyPrimary).toHaveBeenCalledTimes(2); expect(spyRead1).toHaveBeenCalledTimes(0); expect(spyRead2).toHaveBeenCalledTimes(0); expect(spyPrimary).toHaveBeenNthCalledWith(2, users); expect(query2.toSQL().sql).toEqual('delete from "users"'); db.$primary.delete({} as any); expect(spyPrimary).toHaveBeenCalledTimes(3); expect(spyRead1).toHaveBeenCalledTimes(0); expect(spyRead2).toHaveBeenCalledTimes(0); }); }); describe('[insert] replicas sqlite', () => { it('primary insert', () => { const primaryDb = drizzle.mock(); const read1 = drizzle.mock(); const read2 = drizzle.mock(); const db = withReplicas(primaryDb, [read1, read2]); const spyPrimary = vi.spyOn(primaryDb, 'insert'); const spyRead1 = vi.spyOn(read1, 'insert'); const spyRead2 = vi.spyOn(read2, 'insert'); const query = db.insert(users).values({ id: 1 }); expect(spyPrimary).toHaveBeenCalledTimes(1); expect(spyRead1).toHaveBeenCalledTimes(0); expect(spyRead2).toHaveBeenCalledTimes(0); expect(spyPrimary).toHaveBeenCalledWith(users); expect(query.toSQL().sql).toEqual('insert into "users" ("id") values (?)'); db.insert(users); expect(spyPrimary).toHaveBeenCalledTimes(2); expect(spyRead1).toHaveBeenCalledTimes(0); expect(spyRead2).toHaveBeenCalledTimes(0); expect(spyPrimary).toHaveBeenNthCalledWith(2, users); db.$primary.insert({} as any); expect(spyPrimary).toHaveBeenCalledTimes(3); expect(spyRead1).toHaveBeenCalledTimes(0); expect(spyRead2).toHaveBeenCalledTimes(0); }); }); describe('[execute] replicas sqlite', () => { it('primary execute', async () => { const primaryDb = drizzle.mock(); const read1 = drizzle.mock(); const read2 = drizzle.mock(); const db = withReplicas(primaryDb, [read1, read2]); const spyPrimary = vi.spyOn(primaryDb, 'all'); const spyRead1 = vi.spyOn(read1, 'all'); const spyRead2 = vi.spyOn(read2, 'all'); expect(db.all(sql``)).rejects.toThrow(); expect(spyPrimary).toHaveBeenCalledTimes(1); expect(spyRead1).toHaveBeenCalledTimes(0); expect(spyRead2).toHaveBeenCalledTimes(0); expect(db.all(sql``)).rejects.toThrow(); // try { // db.execute(sql``); // } catch { /* empty */ } expect(spyPrimary).toHaveBeenCalledTimes(2); expect(spyRead1).toHaveBeenCalledTimes(0); expect(spyRead2).toHaveBeenCalledTimes(0); expect(db.all(sql``)).rejects.toThrow(); // try { // db.execute(sql``); // } catch { /* empty */ } expect(spyPrimary).toHaveBeenCalledTimes(3); expect(spyRead1).toHaveBeenCalledTimes(0); expect(spyRead2).toHaveBeenCalledTimes(0); }); }); describe('[transaction] replicas sqlite', () => { it('primary transaction', async () => { const primaryDb = drizzle.mock(); const read1 = drizzle.mock(); const read2 = drizzle.mock(); const db = withReplicas(primaryDb, [read1, read2]); const spyPrimary = vi.spyOn(primaryDb, 'transaction'); const spyRead1 = vi.spyOn(read1, 'transaction'); const spyRead2 = vi.spyOn(read2, 'transaction'); const txFn1 = async (tx: any) => { tx.select().from({} as any); }; expect(db.transaction(txFn1)).rejects.toThrow(); expect(spyPrimary).toHaveBeenCalledTimes(1); expect(spyRead1).toHaveBeenCalledTimes(0); expect(spyRead2).toHaveBeenCalledTimes(0); expect(spyPrimary).toHaveBeenCalledWith(txFn1); const txFn2 = async (tx: any) => { tx.select().from({} as any); }; expect(db.transaction(txFn2)).rejects.toThrow(); expect(spyPrimary).toHaveBeenCalledTimes(2); expect(spyRead1).toHaveBeenCalledTimes(0); expect(spyRead2).toHaveBeenCalledTimes(0); expect(spyPrimary).toHaveBeenNthCalledWith(2, txFn2); expect(db.transaction(async (tx) => { tx.select().from({} as any); })).rejects.toThrow(); expect(spyPrimary).toHaveBeenCalledTimes(3); expect(spyRead1).toHaveBeenCalledTimes(0); expect(spyRead2).toHaveBeenCalledTimes(0); }); }); describe('[findFirst] read replicas sqlite', () => { it('primary findFirst', () => { const primaryDb = drizzle.mock({ schema: { usersTable } }); const read1 = drizzle.mock({ schema: { usersTable } }); const read2 = drizzle.mock({ schema: { usersTable } }); const db = withReplicas(primaryDb, [read1, read2]); const spyPrimary = vi.spyOn(primaryDb['query']['usersTable'], 'findFirst'); const spyRead1 = vi.spyOn(read1['query']['usersTable'], 'findFirst'); const spyRead2 = vi.spyOn(read2['query']['usersTable'], 'findFirst'); const obj = {} as any; db.$primary.query.usersTable.findFirst(obj); expect(spyPrimary).toHaveBeenCalledTimes(1); expect(spyRead1).toHaveBeenCalledTimes(0); expect(spyRead2).toHaveBeenCalledTimes(0); expect(spyPrimary).toHaveBeenCalledWith(obj); }); it('random replica findFirst', () => { const primaryDb = drizzle.mock({ schema: { usersTable } }); const read1 = drizzle.mock({ schema: { usersTable } }); const read2 = drizzle.mock({ schema: { usersTable } }); const randomMockReplica = vi.fn().mockReturnValueOnce(read1).mockReturnValueOnce(read2); const db = withReplicas(primaryDb, [read1, read2], () => { return randomMockReplica(); }); const spyPrimary = vi.spyOn(primaryDb['query']['usersTable'], 'findFirst'); const spyRead1 = vi.spyOn(read1['query']['usersTable'], 'findFirst'); const spyRead2 = vi.spyOn(read2['query']['usersTable'], 'findFirst'); const par1 = {} as any; db.query.usersTable.findFirst(par1); expect(spyPrimary).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenCalledTimes(1); expect(spyRead2).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenCalledWith(par1); const query = db.query.usersTable.findFirst(); expect(spyRead1).toHaveBeenCalledTimes(1); expect(spyRead2).toHaveBeenCalledTimes(1); expect(query.toSQL().sql).toEqual('select "id", "name", "verified" from "users" "usersTable" limit ?'); }); it('single read replica findFirst', () => { const primaryDb = drizzle.mock({ schema: { usersTable } }); const read1 = drizzle.mock({ schema: { usersTable } }); const db = withReplicas(primaryDb, [read1]); const spyPrimary = vi.spyOn(primaryDb['query']['usersTable'], 'findFirst'); const spyRead1 = vi.spyOn(read1['query']['usersTable'], 'findFirst'); db.query.usersTable.findFirst(); expect(spyPrimary).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenCalledTimes(1); db.query.usersTable.findFirst(); expect(spyRead1).toHaveBeenCalledTimes(2); }); it('single read replica findFirst + primary findFirst', () => { const primaryDb = drizzle.mock({ schema: { usersTable } }); const read1 = drizzle.mock({ schema: { usersTable } }); const db = withReplicas(primaryDb, [read1]); const spyPrimary = vi.spyOn(primaryDb['query']['usersTable'], 'findFirst'); const spyRead1 = vi.spyOn(read1['query']['usersTable'], 'findFirst'); db.query.usersTable.findFirst(); expect(spyPrimary).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenCalledTimes(1); db.$primary.query.usersTable.findFirst(); expect(spyPrimary).toHaveBeenCalledTimes(1); expect(spyRead1).toHaveBeenCalledTimes(1); }); it('always first read findFirst', () => { const primaryDb = drizzle.mock({ schema: { usersTable } }); const read1 = drizzle.mock({ schema: { usersTable } }); const read2 = drizzle.mock({ schema: { usersTable } }); const db = withReplicas(primaryDb, [read1, read2], (replicas) => { return replicas[0]!; }); const spyPrimary = vi.spyOn(primaryDb['query']['usersTable'], 'findFirst'); const spyRead1 = vi.spyOn(read1['query']['usersTable'], 'findFirst'); const spyRead2 = vi.spyOn(read2['query']['usersTable'], 'findFirst'); db.query.usersTable.findFirst(); expect(spyPrimary).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenCalledTimes(1); expect(spyRead2).toHaveBeenCalledTimes(0); db.query.usersTable.findFirst(); expect(spyRead1).toHaveBeenCalledTimes(2); expect(spyRead2).toHaveBeenCalledTimes(0); }); }); describe('[findMany] read replicas sqlite', () => { it('primary findMany', () => { const primaryDb = drizzle.mock({ schema: { usersTable } }); const read1 = drizzle.mock({ schema: { usersTable } }); const read2 = drizzle.mock({ schema: { usersTable } }); const db = withReplicas(primaryDb, [read1, read2]); const spyPrimary = vi.spyOn(primaryDb['query']['usersTable'], 'findMany'); const spyRead1 = vi.spyOn(read1['query']['usersTable'], 'findMany'); const spyRead2 = vi.spyOn(read2['query']['usersTable'], 'findMany'); const obj = {} as any; const query = db.$primary.query.usersTable.findMany(obj); expect(spyPrimary).toHaveBeenCalledTimes(1); expect(spyRead1).toHaveBeenCalledTimes(0); expect(spyRead2).toHaveBeenCalledTimes(0); expect(spyPrimary).toHaveBeenCalledWith(obj); expect(query.toSQL().sql).toEqual('select "id", "name", "verified" from "users" "usersTable"'); }); it('random replica findMany', () => { const primaryDb = drizzle.mock({ schema: { usersTable } }); const read1 = drizzle.mock({ schema: { usersTable } }); const read2 = drizzle.mock({ schema: { usersTable } }); const randomMockReplica = vi.fn().mockReturnValueOnce(read1).mockReturnValueOnce(read2); const db = withReplicas(primaryDb, [read1, read2], () => { return randomMockReplica(); }); const spyPrimary = vi.spyOn(primaryDb['query']['usersTable'], 'findMany'); const spyRead1 = vi.spyOn(read1['query']['usersTable'], 'findMany'); const spyRead2 = vi.spyOn(read2['query']['usersTable'], 'findMany'); const obj1 = {} as any; const obj2 = {} as any; const query1 = db.query.usersTable.findMany(obj1); expect(spyPrimary).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenCalledTimes(1); expect(spyRead2).toHaveBeenCalledTimes(0); expect(query1.toSQL().sql).toEqual('select "id", "name", "verified" from "users" "usersTable"'); expect(spyRead1).toHaveBeenCalledWith(obj1); const query2 = db.query.usersTable.findMany(obj2); expect(spyRead1).toHaveBeenCalledTimes(1); expect(spyRead2).toHaveBeenCalledTimes(1); expect(query2.toSQL().sql).toEqual('select "id", "name", "verified" from "users" "usersTable"'); expect(spyRead2).toHaveBeenCalledWith(obj2); }); it('single read replica findMany', () => { const primaryDb = drizzle.mock({ schema: { usersTable } }); const read1 = drizzle.mock({ schema: { usersTable } }); const db = withReplicas(primaryDb, [read1]); const spyPrimary = vi.spyOn(primaryDb['query']['usersTable'], 'findMany'); const spyRead1 = vi.spyOn(read1['query']['usersTable'], 'findMany'); const obj1 = {} as any; const obj2 = {} as any; const query1 = db.query.usersTable.findMany(obj1); expect(spyPrimary).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenCalledTimes(1); expect(spyRead1).toHaveBeenCalledWith(obj1); expect(query1.toSQL().sql).toEqual('select "id", "name", "verified" from "users" "usersTable"'); const query2 = db.query.usersTable.findMany(obj2); expect(spyRead1).toHaveBeenCalledTimes(2); expect(spyRead1).toHaveBeenNthCalledWith(2, obj2); expect(query2.toSQL().sql).toEqual('select "id", "name", "verified" from "users" "usersTable"'); }); it('single read replica findMany + primary findMany', () => { const primaryDb = drizzle.mock({ schema: { usersTable } }); const read1 = drizzle.mock({ schema: { usersTable } }); const db = withReplicas(primaryDb, [read1]); const spyPrimary = vi.spyOn(primaryDb['query']['usersTable'], 'findMany'); const spyRead1 = vi.spyOn(read1['query']['usersTable'], 'findMany'); const obj1 = {} as any; const obj2 = {} as any; const query1 = db.query.usersTable.findMany(obj1); expect(spyPrimary).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenCalledTimes(1); expect(spyRead1).toHaveBeenCalledWith(obj1); expect(query1.toSQL().sql).toEqual('select "id", "name", "verified" from "users" "usersTable"'); const query2 = db.$primary.query.usersTable.findMany(obj2); expect(spyPrimary).toHaveBeenCalledTimes(1); expect(spyRead1).toHaveBeenCalledTimes(1); expect(spyPrimary).toHaveBeenNthCalledWith(1, obj2); expect(query2.toSQL().sql).toEqual('select "id", "name", "verified" from "users" "usersTable"'); }); it('always first read findMany', () => { const primaryDb = drizzle.mock({ schema: { usersTable } }); const read1 = drizzle.mock({ schema: { usersTable } }); const read2 = drizzle.mock({ schema: { usersTable } }); const db = withReplicas(primaryDb, [read1, read2], (replicas) => { return replicas[0]!; }); const spyPrimary = vi.spyOn(primaryDb['query']['usersTable'], 'findMany'); const spyRead1 = vi.spyOn(read1['query']['usersTable'], 'findMany'); const spyRead2 = vi.spyOn(read2['query']['usersTable'], 'findMany'); const obj1 = {} as any; const obj2 = {} as any; const query1 = db.query.usersTable.findMany(obj1); expect(spyPrimary).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenCalledTimes(1); expect(spyRead2).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenCalledWith(obj1); expect(query1.toSQL().sql).toEqual('select "id", "name", "verified" from "users" "usersTable"'); const query2 = db.query.usersTable.findMany(obj2); expect(spyRead1).toHaveBeenCalledTimes(2); expect(spyRead2).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenNthCalledWith(2, obj2); expect(query2.toSQL().sql).toEqual('select "id", "name", "verified" from "users" "usersTable"'); }); }); describe('[$count] read replicas postgres', () => { it('primary $count', () => { const primaryDb = drizzle.mock(); const read1 = drizzle.mock(); const read2 = drizzle.mock(); const db = withReplicas(primaryDb, [read1, read2]); const spyPrimary = vi.spyOn(primaryDb, '$count'); const spyRead1 = vi.spyOn(read1, '$count'); const spyRead2 = vi.spyOn(read2, '$count'); db.$primary.$count(users); expect(spyPrimary).toHaveBeenCalledTimes(1); expect(spyRead1).toHaveBeenCalledTimes(0); expect(spyRead2).toHaveBeenCalledTimes(0); }); it('random replica $count', () => { const primaryDb = drizzle.mock(); const read1 = drizzle.mock(); const read2 = drizzle.mock(); const randomMockReplica = vi.fn().mockReturnValueOnce(read1).mockReturnValueOnce(read2); const db = withReplicas(primaryDb, [read1, read2], () => { return randomMockReplica(); }); const spyPrimary = vi.spyOn(primaryDb, '$count'); const spyRead1 = vi.spyOn(read1, '$count'); const spyRead2 = vi.spyOn(read2, '$count'); db.$count(users); expect(spyPrimary).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenCalledTimes(1); expect(spyRead2).toHaveBeenCalledTimes(0); db.select().from(users); expect(spyRead1).toHaveBeenCalledTimes(1); expect(spyRead2).toHaveBeenCalledTimes(0); }); it('single read replica $count', () => { const primaryDb = drizzle.mock(); const read1 = drizzle.mock(); const db = withReplicas(primaryDb, [read1]); const spyPrimary = vi.spyOn(primaryDb, '$count'); const spyRead1 = vi.spyOn(read1, '$count'); db.$count(users); expect(spyPrimary).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenCalledTimes(1); db.$count(users); expect(spyRead1).toHaveBeenCalledTimes(2); }); it('single read replica $count + primary $count', () => { const primaryDb = drizzle.mock(); const read1 = drizzle.mock(); const db = withReplicas(primaryDb, [read1]); const spyPrimary = vi.spyOn(primaryDb, '$count'); const spyRead1 = vi.spyOn(read1, '$count'); db.$count(users); expect(spyPrimary).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenCalledTimes(1); db.$primary.$count(users); expect(spyPrimary).toHaveBeenCalledTimes(1); expect(spyRead1).toHaveBeenCalledTimes(1); }); it('always first read $count', () => { const primaryDb = drizzle.mock(); const read1 = drizzle.mock(); const read2 = drizzle.mock(); const db = withReplicas(primaryDb, [read1, read2], (replicas) => { return replicas[0]!; }); const spyPrimary = vi.spyOn(primaryDb, '$count'); const spyRead1 = vi.spyOn(read1, '$count'); const spyRead2 = vi.spyOn(read2, '$count'); db.$count(users); expect(spyPrimary).toHaveBeenCalledTimes(0); expect(spyRead1).toHaveBeenCalledTimes(1); expect(spyRead2).toHaveBeenCalledTimes(0); db.$count(users); expect(spyRead1).toHaveBeenCalledTimes(2); expect(spyRead2).toHaveBeenCalledTimes(0); }); }); ================================================ FILE: integration-tests/tests/seeder/mysql.test.ts ================================================ import Docker from 'dockerode'; import { sql } from 'drizzle-orm'; import type { MySql2Database } from 'drizzle-orm/mysql2'; import { drizzle } from 'drizzle-orm/mysql2'; import { reset, seed } from 'drizzle-seed'; import getPort from 'get-port'; import type { Connection } from 'mysql2/promise'; import { createConnection } from 'mysql2/promise'; import { v4 as uuid } from 'uuid'; import { afterAll, afterEach, beforeAll, expect, test } from 'vitest'; import * as schema from './mysqlSchema.ts'; let mysqlContainer: Docker.Container; let client: Connection; let db: MySql2Database; async function createDockerDB(): Promise { const docker = new Docker(); const port = await getPort({ port: 3306 }); const image = 'mysql:8'; const pullStream = await docker.pull(image); await new Promise((resolve, reject) => // eslint-disable-next-line @typescript-eslint/no-unsafe-argument docker.modem.followProgress(pullStream, (err) => err ? reject(err) : resolve(err)) ); mysqlContainer = await docker.createContainer({ Image: image, Env: ['MYSQL_ROOT_PASSWORD=mysql', 'MYSQL_DATABASE=drizzle'], name: `drizzle-integration-tests-${uuid()}`, HostConfig: { AutoRemove: true, PortBindings: { '3306/tcp': [{ HostPort: `${port}` }], }, }, }); await mysqlContainer.start(); return `mysql://root:mysql@127.0.0.1:${port}/drizzle`; } const createNorthwindTables = async () => { await db.execute( sql` CREATE TABLE \`customer\` ( \`id\` varchar(256) NOT NULL, \`company_name\` text NOT NULL, \`contact_name\` text NOT NULL, \`contact_title\` text NOT NULL, \`address\` text NOT NULL, \`city\` text NOT NULL, \`postal_code\` text, \`region\` text, \`country\` text NOT NULL, \`phone\` text NOT NULL, \`fax\` text, CONSTRAINT \`customer_id\` PRIMARY KEY(\`id\`) ); `, ); await db.execute( sql` CREATE TABLE \`order_detail\` ( \`unit_price\` float NOT NULL, \`quantity\` int NOT NULL, \`discount\` float NOT NULL, \`order_id\` int NOT NULL, \`product_id\` int NOT NULL ); `, ); await db.execute( sql` CREATE TABLE \`employee\` ( \`id\` int NOT NULL, \`last_name\` text NOT NULL, \`first_name\` text, \`title\` text NOT NULL, \`title_of_courtesy\` text NOT NULL, \`birth_date\` timestamp NOT NULL, \`hire_date\` timestamp NOT NULL, \`address\` text NOT NULL, \`city\` text NOT NULL, \`postal_code\` text NOT NULL, \`country\` text NOT NULL, \`home_phone\` text NOT NULL, \`extension\` int NOT NULL, \`notes\` text NOT NULL, \`reports_to\` int, \`photo_path\` text, CONSTRAINT \`employee_id\` PRIMARY KEY(\`id\`) ); `, ); await db.execute( sql` CREATE TABLE \`order\` ( \`id\` int NOT NULL, \`order_date\` timestamp NOT NULL, \`required_date\` timestamp NOT NULL, \`shipped_date\` timestamp, \`ship_via\` int NOT NULL, \`freight\` float NOT NULL, \`ship_name\` text NOT NULL, \`ship_city\` text NOT NULL, \`ship_region\` text, \`ship_postal_code\` text, \`ship_country\` text NOT NULL, \`customer_id\` varchar(256) NOT NULL, \`employee_id\` int NOT NULL, CONSTRAINT \`order_id\` PRIMARY KEY(\`id\`) ); `, ); await db.execute( sql` CREATE TABLE \`product\` ( \`id\` int NOT NULL, \`name\` text NOT NULL, \`quantity_per_unit\` text NOT NULL, \`unit_price\` float NOT NULL, \`units_in_stock\` int NOT NULL, \`units_on_order\` int NOT NULL, \`reorder_level\` int NOT NULL, \`discontinued\` int NOT NULL, \`supplier_id\` int NOT NULL, CONSTRAINT \`product_id\` PRIMARY KEY(\`id\`) ); `, ); await db.execute( sql` CREATE TABLE \`supplier\` ( \`id\` int NOT NULL, \`company_name\` text NOT NULL, \`contact_name\` text NOT NULL, \`contact_title\` text NOT NULL, \`address\` text NOT NULL, \`city\` text NOT NULL, \`region\` text, \`postal_code\` text NOT NULL, \`country\` text NOT NULL, \`phone\` text NOT NULL, CONSTRAINT \`supplier_id\` PRIMARY KEY(\`id\`) ); `, ); await db.execute( sql` ALTER TABLE \`order_detail\` ADD CONSTRAINT \`order_detail_order_id_order_id_fk\` FOREIGN KEY (\`order_id\`) REFERENCES \`order\`(\`id\`) ON DELETE cascade ON UPDATE no action; `, ); await db.execute( sql` ALTER TABLE \`order_detail\` ADD CONSTRAINT \`order_detail_product_id_product_id_fk\` FOREIGN KEY (\`product_id\`) REFERENCES \`product\`(\`id\`) ON DELETE cascade ON UPDATE no action; `, ); await db.execute( sql` ALTER TABLE \`employee\` ADD CONSTRAINT \`employee_reports_to_employee_id_fk\` FOREIGN KEY (\`reports_to\`) REFERENCES \`employee\`(\`id\`) ON DELETE no action ON UPDATE no action; `, ); await db.execute( sql` ALTER TABLE \`order\` ADD CONSTRAINT \`order_customer_id_customer_id_fk\` FOREIGN KEY (\`customer_id\`) REFERENCES \`customer\`(\`id\`) ON DELETE cascade ON UPDATE no action; `, ); await db.execute( sql` ALTER TABLE \`order\` ADD CONSTRAINT \`order_employee_id_employee_id_fk\` FOREIGN KEY (\`employee_id\`) REFERENCES \`employee\`(\`id\`) ON DELETE cascade ON UPDATE no action; `, ); await db.execute( sql` ALTER TABLE \`product\` ADD CONSTRAINT \`product_supplier_id_supplier_id_fk\` FOREIGN KEY (\`supplier_id\`) REFERENCES \`supplier\`(\`id\`) ON DELETE cascade ON UPDATE no action; `, ); }; const createAllDataTypesTable = async () => { await db.execute( sql` CREATE TABLE \`all_data_types\` ( \`integer\` int, \`tinyint\` tinyint, \`smallint\` smallint, \`mediumint\` mediumint, \`bigint\` bigint, \`bigint_number\` bigint, \`real\` real, \`decimal\` decimal, \`double\` double, \`float\` float, \`serial\` serial AUTO_INCREMENT, \`binary\` binary(255), \`varbinary\` varbinary(256), \`char\` char(255), \`varchar\` varchar(256), \`text\` text, \`boolean\` boolean, \`date_string\` date, \`date\` date, \`datetime\` datetime, \`datetimeString\` datetime, \`time\` time, \`year\` year, \`timestamp_date\` timestamp, \`timestamp_string\` timestamp, \`json\` json, \`popularity\` enum('unknown','known','popular') ); `, ); }; const createAllGeneratorsTables = async () => { await db.execute( sql` CREATE TABLE \`datetime_table\` ( \`datetime\` datetime ); `, ); await db.execute( sql` CREATE TABLE \`year_table\` ( \`year\` year ); `, ); }; beforeAll(async () => { const connectionString = await createDockerDB(); const sleep = 1000; let timeLeft = 40000; let connected = false; let lastError: unknown | undefined; do { try { client = await createConnection(connectionString); await client.connect(); db = drizzle(client); connected = true; break; } catch (e) { lastError = e; await new Promise((resolve) => setTimeout(resolve, sleep)); timeLeft -= sleep; } } while (timeLeft > 0); if (!connected) { console.error('Cannot connect to MySQL'); await client?.end().catch(console.error); await mysqlContainer?.stop().catch(console.error); throw lastError; } await createNorthwindTables(); await createAllDataTypesTable(); await createAllGeneratorsTables(); }); afterAll(async () => { await client?.end().catch(console.error); await mysqlContainer?.stop().catch(console.error); }); afterEach(async () => { await reset(db, schema); }); test('basic seed test', async () => { await seed(db, schema); const customers = await db.select().from(schema.customers); const details = await db.select().from(schema.details); const employees = await db.select().from(schema.employees); const orders = await db.select().from(schema.orders); const products = await db.select().from(schema.products); const suppliers = await db.select().from(schema.suppliers); expect(customers.length).toBe(10); expect(details.length).toBe(10); expect(employees.length).toBe(10); expect(orders.length).toBe(10); expect(products.length).toBe(10); expect(suppliers.length).toBe(10); }); test('seed with options.count:11 test', async () => { await seed(db, schema, { count: 11 }); const customers = await db.select().from(schema.customers); const details = await db.select().from(schema.details); const employees = await db.select().from(schema.employees); const orders = await db.select().from(schema.orders); const products = await db.select().from(schema.products); const suppliers = await db.select().from(schema.suppliers); expect(customers.length).toBe(11); expect(details.length).toBe(11); expect(employees.length).toBe(11); expect(orders.length).toBe(11); expect(products.length).toBe(11); expect(suppliers.length).toBe(11); }); test('redefine(refine) customers count', async () => { await seed(db, schema, { count: 11 }).refine(() => ({ customers: { count: 12, }, })); const customers = await db.select().from(schema.customers); const details = await db.select().from(schema.details); const employees = await db.select().from(schema.employees); const orders = await db.select().from(schema.orders); const products = await db.select().from(schema.products); const suppliers = await db.select().from(schema.suppliers); expect(customers.length).toBe(12); expect(details.length).toBe(11); expect(employees.length).toBe(11); expect(orders.length).toBe(11); expect(products.length).toBe(11); expect(suppliers.length).toBe(11); }); test('redefine(refine) all tables count', async () => { await seed(db, schema, { count: 11 }).refine(() => ({ customers: { count: 12, }, details: { count: 13, }, employees: { count: 14, }, orders: { count: 15, }, products: { count: 16, }, suppliers: { count: 17, }, })); const customers = await db.select().from(schema.customers); const details = await db.select().from(schema.details); const employees = await db.select().from(schema.employees); const orders = await db.select().from(schema.orders); const products = await db.select().from(schema.products); const suppliers = await db.select().from(schema.suppliers); expect(customers.length).toBe(12); expect(details.length).toBe(13); expect(employees.length).toBe(14); expect(orders.length).toBe(15); expect(products.length).toBe(16); expect(suppliers.length).toBe(17); }); test("redefine(refine) orders count using 'with' in customers", async () => { await seed(db, schema, { count: 11 }).refine(() => ({ customers: { count: 4, with: { orders: 2, }, }, orders: { count: 13, }, })); const customers = await db.select().from(schema.customers); const details = await db.select().from(schema.details); const employees = await db.select().from(schema.employees); const orders = await db.select().from(schema.orders); const products = await db.select().from(schema.products); const suppliers = await db.select().from(schema.suppliers); expect(customers.length).toBe(4); expect(details.length).toBe(11); expect(employees.length).toBe(11); expect(orders.length).toBe(8); expect(products.length).toBe(11); expect(suppliers.length).toBe(11); }); test("sequential using of 'with'", async () => { await seed(db, schema, { count: 11 }).refine(() => ({ customers: { count: 4, with: { orders: 2, }, }, orders: { count: 12, with: { details: 3, }, }, })); const customers = await db.select().from(schema.customers); const details = await db.select().from(schema.details); const employees = await db.select().from(schema.employees); const orders = await db.select().from(schema.orders); const products = await db.select().from(schema.products); const suppliers = await db.select().from(schema.suppliers); expect(customers.length).toBe(4); expect(details.length).toBe(24); expect(employees.length).toBe(11); expect(orders.length).toBe(8); expect(products.length).toBe(11); expect(suppliers.length).toBe(11); }); // All data types test ------------------------------- test('basic seed test for all mysql data types', async () => { await seed(db, schema, { count: 1000 }); const allDataTypes = await db.select().from(schema.allDataTypes); // every value in each 10 rows does not equal undefined. const predicate = allDataTypes.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); }); // All generators test------------------------------- const count = 10000; test('datetime generator test', async () => { await seed(db, { datetimeTable: schema.datetimeTable }).refine((funcs) => ({ datetimeTable: { count, columns: { datetime: funcs.datetime(), }, }, })); const data = await db.select().from(schema.datetimeTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); }); test('year generator test', async () => { await seed(db, { yearTable: schema.yearTable }).refine((funcs) => ({ yearTable: { count, columns: { year: funcs.year(), }, }, })); const data = await db.select().from(schema.yearTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); }); ================================================ FILE: integration-tests/tests/seeder/mysqlSchema.ts ================================================ import type { AnyMySqlColumn } from 'drizzle-orm/mysql-core'; import { bigint, binary, boolean, char, date, datetime, decimal, double, float, int, json, mediumint, mysqlEnum, mysqlTable, real, serial, smallint, text, time, timestamp, tinyint, varbinary, varchar, year, } from 'drizzle-orm/mysql-core'; export const customers = mysqlTable('customer', { id: varchar('id', { length: 256 }).primaryKey(), companyName: text('company_name').notNull(), contactName: text('contact_name').notNull(), contactTitle: text('contact_title').notNull(), address: text('address').notNull(), city: text('city').notNull(), postalCode: text('postal_code'), region: text('region'), country: text('country').notNull(), phone: text('phone').notNull(), fax: text('fax'), }); export const employees = mysqlTable( 'employee', { id: int('id').primaryKey(), lastName: text('last_name').notNull(), firstName: text('first_name'), title: text('title').notNull(), titleOfCourtesy: text('title_of_courtesy').notNull(), birthDate: timestamp('birth_date').notNull(), hireDate: timestamp('hire_date').notNull(), address: text('address').notNull(), city: text('city').notNull(), postalCode: text('postal_code').notNull(), country: text('country').notNull(), homePhone: text('home_phone').notNull(), extension: int('extension').notNull(), notes: text('notes').notNull(), reportsTo: int('reports_to').references((): AnyMySqlColumn => employees.id), photoPath: text('photo_path'), }, ); export const orders = mysqlTable('order', { id: int('id').primaryKey(), orderDate: timestamp('order_date').notNull(), requiredDate: timestamp('required_date').notNull(), shippedDate: timestamp('shipped_date'), shipVia: int('ship_via').notNull(), freight: float('freight').notNull(), shipName: text('ship_name').notNull(), shipCity: text('ship_city').notNull(), shipRegion: text('ship_region'), shipPostalCode: text('ship_postal_code'), shipCountry: text('ship_country').notNull(), customerId: varchar('customer_id', { length: 256 }) .notNull() .references(() => customers.id, { onDelete: 'cascade' }), employeeId: int('employee_id') .notNull() .references(() => employees.id, { onDelete: 'cascade' }), }); export const suppliers = mysqlTable('supplier', { id: int('id').primaryKey(), companyName: text('company_name').notNull(), contactName: text('contact_name').notNull(), contactTitle: text('contact_title').notNull(), address: text('address').notNull(), city: text('city').notNull(), region: text('region'), postalCode: text('postal_code').notNull(), country: text('country').notNull(), phone: text('phone').notNull(), }); export const products = mysqlTable('product', { id: int('id').primaryKey(), name: text('name').notNull(), quantityPerUnit: text('quantity_per_unit').notNull(), unitPrice: float('unit_price').notNull(), unitsInStock: int('units_in_stock').notNull(), unitsOnOrder: int('units_on_order').notNull(), reorderLevel: int('reorder_level').notNull(), discontinued: int('discontinued').notNull(), supplierId: int('supplier_id') .notNull() .references(() => suppliers.id, { onDelete: 'cascade' }), }); export const details = mysqlTable('order_detail', { unitPrice: float('unit_price').notNull(), quantity: int('quantity').notNull(), discount: float('discount').notNull(), orderId: int('order_id') .notNull() .references(() => orders.id, { onDelete: 'cascade' }), productId: int('product_id') .notNull() .references(() => products.id, { onDelete: 'cascade' }), }); // All data types table ------------------------------- export const allDataTypes = mysqlTable('all_data_types', { int: int('integer'), tinyint: tinyint('tinyint'), smallint: smallint('smallint'), mediumint: mediumint('mediumint'), biginteger: bigint('bigint', { mode: 'bigint' }), bigintNumber: bigint('bigint_number', { mode: 'number' }), real: real('real'), decimal: decimal('decimal'), double: double('double'), float: float('float'), serial: serial('serial'), binary: binary('binary', { length: 255 }), varbinary: varbinary('varbinary', { length: 256 }), char: char('char', { length: 255 }), varchar: varchar('varchar', { length: 256 }), text: text('text'), boolean: boolean('boolean'), dateString: date('date_string', { mode: 'string' }), date: date('date', { mode: 'date' }), datetime: datetime('datetime', { mode: 'date' }), datetimeString: datetime('datetimeString', { mode: 'string' }), time: time('time'), year: year('year'), timestampDate: timestamp('timestamp_date', { mode: 'date' }), timestampString: timestamp('timestamp_string', { mode: 'string' }), json: json('json'), mysqlEnum: mysqlEnum('popularity', ['unknown', 'known', 'popular']), }); // All generators tables ------------------------------- export const datetimeTable = mysqlTable('datetime_table', { datetime: datetime('datetime'), }); export const yearTable = mysqlTable('year_table', { year: year('year'), }); ================================================ FILE: integration-tests/tests/seeder/pg.test.ts ================================================ import { PGlite } from '@electric-sql/pglite'; import { sql } from 'drizzle-orm'; import type { PgliteDatabase } from 'drizzle-orm/pglite'; import { drizzle } from 'drizzle-orm/pglite'; import { cities, countries, firstNames, lastNames, reset, seed } from 'drizzle-seed'; import { afterAll, afterEach, beforeAll, expect, test } from 'vitest'; import * as schema from './pgSchema.ts'; let client: PGlite; let db: PgliteDatabase; const createNorthwindTables = async () => { await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."customer" ( "id" varchar(256) PRIMARY KEY NOT NULL, "company_name" text NOT NULL, "contact_name" text NOT NULL, "contact_title" text NOT NULL, "address" text NOT NULL, "city" text NOT NULL, "postal_code" text, "region" text, "country" text NOT NULL, "phone" text NOT NULL, "fax" text ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."order_detail" ( "unit_price" numeric NOT NULL, "quantity" integer NOT NULL, "discount" numeric NOT NULL, "order_id" integer NOT NULL, "product_id" integer NOT NULL ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."employee" ( "id" integer PRIMARY KEY NOT NULL, "last_name" text NOT NULL, "first_name" text, "title" text NOT NULL, "title_of_courtesy" text NOT NULL, "birth_date" timestamp NOT NULL, "hire_date" timestamp NOT NULL, "address" text NOT NULL, "city" text NOT NULL, "postal_code" text NOT NULL, "country" text NOT NULL, "home_phone" text NOT NULL, "extension" integer NOT NULL, "notes" text NOT NULL, "reports_to" integer, "photo_path" text ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."order" ( "id" integer PRIMARY KEY NOT NULL, "order_date" timestamp NOT NULL, "required_date" timestamp NOT NULL, "shipped_date" timestamp, "ship_via" integer NOT NULL, "freight" numeric NOT NULL, "ship_name" text NOT NULL, "ship_city" text NOT NULL, "ship_region" text, "ship_postal_code" text, "ship_country" text NOT NULL, "customer_id" text NOT NULL, "employee_id" integer NOT NULL ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."product" ( "id" integer PRIMARY KEY NOT NULL, "name" text NOT NULL, "quantity_per_unit" text NOT NULL, "unit_price" numeric NOT NULL, "units_in_stock" integer NOT NULL, "units_on_order" integer NOT NULL, "reorder_level" integer NOT NULL, "discontinued" integer NOT NULL, "supplier_id" integer NOT NULL ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."supplier" ( "id" integer PRIMARY KEY NOT NULL, "company_name" text NOT NULL, "contact_name" text NOT NULL, "contact_title" text NOT NULL, "address" text NOT NULL, "city" text NOT NULL, "region" text, "postal_code" text NOT NULL, "country" text NOT NULL, "phone" text NOT NULL ); `, ); await db.execute( sql` DO $$ BEGIN ALTER TABLE "seeder_lib_pg"."order_detail" ADD CONSTRAINT "order_detail_order_id_order_id_fk" FOREIGN KEY ("order_id") REFERENCES "seeder_lib_pg"."order"("id") ON DELETE cascade ON UPDATE no action; EXCEPTION WHEN duplicate_object THEN null; END $$; `, ); await db.execute( sql` DO $$ BEGIN ALTER TABLE "seeder_lib_pg"."order_detail" ADD CONSTRAINT "order_detail_product_id_product_id_fk" FOREIGN KEY ("product_id") REFERENCES "seeder_lib_pg"."product"("id") ON DELETE cascade ON UPDATE no action; EXCEPTION WHEN duplicate_object THEN null; END $$; `, ); await db.execute( sql` DO $$ BEGIN ALTER TABLE "seeder_lib_pg"."employee" ADD CONSTRAINT "employee_reports_to_employee_id_fk" FOREIGN KEY ("reports_to") REFERENCES "seeder_lib_pg"."employee"("id") ON DELETE no action ON UPDATE no action; EXCEPTION WHEN duplicate_object THEN null; END $$; `, ); await db.execute( sql` DO $$ BEGIN ALTER TABLE "seeder_lib_pg"."order" ADD CONSTRAINT "order_customer_id_customer_id_fk" FOREIGN KEY ("customer_id") REFERENCES "seeder_lib_pg"."customer"("id") ON DELETE cascade ON UPDATE no action; EXCEPTION WHEN duplicate_object THEN null; END $$; `, ); await db.execute( sql` DO $$ BEGIN ALTER TABLE "seeder_lib_pg"."order" ADD CONSTRAINT "order_employee_id_employee_id_fk" FOREIGN KEY ("employee_id") REFERENCES "seeder_lib_pg"."employee"("id") ON DELETE cascade ON UPDATE no action; EXCEPTION WHEN duplicate_object THEN null; END $$; `, ); await db.execute( sql` DO $$ BEGIN ALTER TABLE "seeder_lib_pg"."product" ADD CONSTRAINT "product_supplier_id_supplier_id_fk" FOREIGN KEY ("supplier_id") REFERENCES "seeder_lib_pg"."supplier"("id") ON DELETE cascade ON UPDATE no action; EXCEPTION WHEN duplicate_object THEN null; END $$; `, ); }; const createAllDataTypesTable = async () => { await db.execute( sql` DO $$ BEGIN CREATE TYPE "seeder_lib_pg"."mood_enum" AS ENUM('sad', 'ok', 'happy'); EXCEPTION WHEN duplicate_object THEN null; END $$; `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."all_data_types" ( "integer" integer, "smallint" smallint, "bigint" bigint, "bigint_number" bigint, "serial" serial, "smallserial" smallserial, "bigserial" bigserial, "bigserial_number" bigserial, "boolean" boolean, "text" text, "varchar" varchar(256), "char" char(256), "numeric" numeric, "decimal" numeric, "real" real, "double_precision" double precision, "json" json, "jsonb" jsonb, "time" time, "timestamp_date" timestamp, "timestamp_string" timestamp, "date_string" date, "date" date, "interval" interval, "point" "point", "point_tuple" "point", "line" "line", "line_tuple" "line", "mood_enum" "seeder_lib_pg"."mood_enum", "uuid" "uuid" ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."all_array_data_types" ( "integer_array" integer[], "smallint_array" smallint[], "bigint_array" bigint[], "bigint_number_array" bigint[], "boolean_array" boolean[], "text_array" text[], "varchar_array" varchar(256)[], "char_array" char(256)[], "numeric_array" numeric[], "decimal_array" numeric[], "real_array" real[], "double_precision_array" double precision[], "json_array" json[], "jsonb_array" jsonb[], "time_array" time[], "timestamp_date_array" timestamp[], "timestamp_string_array" timestamp[], "date_string_array" date[], "date_array" date[], "interval_array" interval[], "point_array" "point"[], "point_tuple_array" "point"[], "line_array" "line"[], "line_tuple_array" "line"[], "mood_enum_array" "seeder_lib_pg"."mood_enum"[] ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."nd_arrays" ( "integer_1d_array" integer[3], "integer_2d_array" integer[3][4], "integer_3d_array" integer[3][4][5], "integer_4d_array" integer[3][4][5][6] ); `, ); }; const createAllGeneratorsTables = async () => { await db.execute( sql` DO $$ BEGIN CREATE TYPE "seeder_lib_pg"."enum" AS ENUM('sad', 'ok', 'happy'); EXCEPTION WHEN duplicate_object THEN null; END $$; `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."default_table" ( "default_string" text ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."default_array_table" ( "default_string" text[] ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."boolean_table" ( "boolean" boolean ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."boolean_array_table" ( "boolean" boolean[] ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."city_table" ( "city" varchar(256) ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."city_unique_table" ( "city_unique" varchar(256), CONSTRAINT "city_unique_table_city_unique_unique" UNIQUE("city_unique") ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."city_array_table" ( "city" varchar(256)[] ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."company_name_table" ( "company_name" text ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."company_name_unique_table" ( "company_name_unique" varchar(256), CONSTRAINT "company_name_unique_table_company_name_unique_unique" UNIQUE("company_name_unique") ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."company_name_array_table" ( "company_name" text[] ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."country_table" ( "country" varchar(256) ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."country_unique_table" ( "country_unique" varchar(256), CONSTRAINT "country_unique_table_country_unique_unique" UNIQUE("country_unique") ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."country_array_table" ( "country" varchar(256)[] ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."date_table" ( "date" date ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."date_array_table" ( "date" date[], "date_string" date[] ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."email_table" ( "email" varchar(256), CONSTRAINT "email_table_email_unique" UNIQUE("email") ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."email_array_table" ( "email" varchar(256)[] ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."enum_table" ( "mood_enum" "seeder_lib_pg"."enum" ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."first_name_table" ( "first_name" varchar(256) ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."first_name_unique_table" ( "first_name_unique" varchar(256), CONSTRAINT "first_name_unique_table_first_name_unique_unique" UNIQUE("first_name_unique") ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."first_name_array_table" ( "first_name" varchar(256)[] ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."full_name__table" ( "full_name_" varchar(256) ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."full_name_unique_table" ( "full_name_unique" varchar(256), CONSTRAINT "full_name_unique_table_full_name_unique_unique" UNIQUE("full_name_unique") ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."full_name_array_table" ( "full_name" varchar(256)[] ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."int_primary_key_table" ( "int_primary_key" integer, CONSTRAINT "int_primary_key_table_int_primary_key_unique" UNIQUE("int_primary_key") ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."int_table" ( "int" integer ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."int_unique_table" ( "int_unique" integer, CONSTRAINT "int_unique_table_int_unique_unique" UNIQUE("int_unique") ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."int_array_table" ( "int" integer[] ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."interval_table" ( "interval" interval ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."interval_unique_table" ( "interval_unique" interval, CONSTRAINT "interval_unique_table_interval_unique_unique" UNIQUE("interval_unique") ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."interval_array_table" ( "interval" interval[] ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."job_title_table" ( "job_title" text ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."job_title_array_table" ( "job_title" text[] ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."json_table" ( "json" json ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."json_array_table" ( "json" json[] ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."last_name_table" ( "last_name" varchar(256) ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."last_name_unique_table" ( "last_name_unique" varchar(256), CONSTRAINT "last_name_unique_table_last_name_unique_unique" UNIQUE("last_name_unique") ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."last_name_array_table" ( "last_name" varchar(256)[] ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."line_table" ( "line" "line" ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."line_array_table" ( "line" "line"[] ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."lorem_ipsum_table" ( "lorem_ipsum" text ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."lorem_ipsum_array_table" ( "lorem_ipsum" text[] ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."number_table" ( "number" real ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."number_unique_table" ( "number_unique" real, CONSTRAINT "number_unique_table_number_unique_unique" UNIQUE("number_unique") ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."number_array_table" ( "number" real[] ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."phone_number_table" ( "phoneNumber" varchar(256), "phone_number_template" varchar(256), "phone_number_prefixes" varchar(256), CONSTRAINT "phone_number_table_phoneNumber_unique" UNIQUE("phoneNumber"), CONSTRAINT "phone_number_table_phone_number_template_unique" UNIQUE("phone_number_template"), CONSTRAINT "phone_number_table_phone_number_prefixes_unique" UNIQUE("phone_number_prefixes") ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."phone_number_array_table" ( "phoneNumber" varchar(256)[], "phone_number_template" varchar(256)[], "phone_number_prefixes" varchar(256)[] ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."point_table" ( "point" "point" ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."point_array_table" ( "point" "point"[] ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."postcode_table" ( "postcode" varchar(256) ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."postcode_unique_table" ( "postcode_unique" varchar(256), CONSTRAINT "postcode_unique_table_postcode_unique_unique" UNIQUE("postcode_unique") ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."postcode_array_table" ( "postcode" varchar(256)[] ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."state_table" ( "state" text ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."state_array_table" ( "state" text[] ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."street_address_table" ( "street_address" varchar(256) ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."street_address_unique_table" ( "street_address_unique" varchar(256), CONSTRAINT "street_address_unique_table_street_address_unique_unique" UNIQUE("street_address_unique") ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."street_address_array_table" ( "street_address" varchar(256)[] ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."string_table" ( "string" text ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."string_unique_table" ( "string_unique" varchar(256), CONSTRAINT "string_unique_table_string_unique_unique" UNIQUE("string_unique") ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."string_array_table" ( "string" text[] ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."time_table" ( "time" time ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."time_array_table" ( "time" time[] ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."timestamp_table" ( "timestamp" timestamp ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."timestamp_array_table" ( "timestamp" timestamp[] ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."values_from_array_table" ( "values_from_array_not_null" varchar(256) NOT NULL, "values_from_array_weighted_not_null" varchar(256) NOT NULL ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."values_from_array_unique_table" ( "values_from_array" varchar(256), "values_from_array_not_null" varchar(256) NOT NULL, "values_from_array_weighted" varchar(256), "values_from_array_weighted_not_null" varchar(256) NOT NULL, CONSTRAINT "values_from_array_unique_table_values_from_array_unique" UNIQUE("values_from_array"), CONSTRAINT "values_from_array_unique_table_values_from_array_not_null_unique" UNIQUE("values_from_array_not_null"), CONSTRAINT "values_from_array_unique_table_values_from_array_weighted_unique" UNIQUE("values_from_array_weighted"), CONSTRAINT "values_from_array_unique_table_values_from_array_weighted_not_null_unique" UNIQUE("values_from_array_weighted_not_null") ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."values_from_array_array_table" ( "values_from_array" varchar(256) ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."weighted_random_table" ( "weighted_random" varchar(256) ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."weighted_random_with_unique_gens_table" ( "weighted_random_with_unique_gens" varchar(256), CONSTRAINT "weighted_random_with_unique_gens_table_weighted_random_with_unique_gens_unique" UNIQUE("weighted_random_with_unique_gens") ); `, ); await db.execute( sql` CREATE TABLE IF NOT EXISTS "seeder_lib_pg"."identity_columns_table" ( "id" integer GENERATED ALWAYS AS IDENTITY, "id1" integer, "name" text ); `, ); }; beforeAll(async () => { client = new PGlite(); db = drizzle(client); await db.execute(sql`CREATE SCHEMA IF NOT EXISTS "seeder_lib_pg";`); await createNorthwindTables(); await createAllDataTypesTable(); await createAllGeneratorsTables(); }); afterEach(async () => { await reset(db, schema); }); afterAll(async () => { await client.close(); }); test('basic seed test', async () => { const currSchema = { customers: schema.customers, details: schema.details, employees: schema.employees, orders: schema.orders, products: schema.products, suppliers: schema.suppliers, }; await seed(db, currSchema); const customers = await db.select().from(schema.customers); const details = await db.select().from(schema.details); const employees = await db.select().from(schema.employees); const orders = await db.select().from(schema.orders); const products = await db.select().from(schema.products); const suppliers = await db.select().from(schema.suppliers); expect(customers.length).toBe(10); expect(details.length).toBe(10); expect(employees.length).toBe(10); expect(orders.length).toBe(10); expect(products.length).toBe(10); expect(suppliers.length).toBe(10); }); test('seed with options.count:11 test', async () => { const currSchema = { customers: schema.customers, details: schema.details, employees: schema.employees, orders: schema.orders, products: schema.products, suppliers: schema.suppliers, }; await seed(db, currSchema, { count: 11 }); const customers = await db.select().from(schema.customers); const details = await db.select().from(schema.details); const employees = await db.select().from(schema.employees); const orders = await db.select().from(schema.orders); const products = await db.select().from(schema.products); const suppliers = await db.select().from(schema.suppliers); expect(customers.length).toBe(11); expect(details.length).toBe(11); expect(employees.length).toBe(11); expect(orders.length).toBe(11); expect(products.length).toBe(11); expect(suppliers.length).toBe(11); }); test('redefine(refine) customers count', async () => { const currSchema = { customers: schema.customers, details: schema.details, employees: schema.employees, orders: schema.orders, products: schema.products, suppliers: schema.suppliers, }; await seed(db, currSchema, { count: 11 }).refine(() => ({ customers: { count: 12, }, })); const customers = await db.select().from(schema.customers); const details = await db.select().from(schema.details); const employees = await db.select().from(schema.employees); const orders = await db.select().from(schema.orders); const products = await db.select().from(schema.products); const suppliers = await db.select().from(schema.suppliers); expect(customers.length).toBe(12); expect(details.length).toBe(11); expect(employees.length).toBe(11); expect(orders.length).toBe(11); expect(products.length).toBe(11); expect(suppliers.length).toBe(11); }); test('redefine(refine) all tables count', async () => { const currSchema = { customers: schema.customers, details: schema.details, employees: schema.employees, orders: schema.orders, products: schema.products, suppliers: schema.suppliers, }; await seed(db, currSchema, { count: 11 }).refine(() => ({ customers: { count: 12, }, details: { count: 13, }, employees: { count: 14, }, orders: { count: 15, }, products: { count: 16, }, suppliers: { count: 17, }, })); const customers = await db.select().from(schema.customers); const details = await db.select().from(schema.details); const employees = await db.select().from(schema.employees); const orders = await db.select().from(schema.orders); const products = await db.select().from(schema.products); const suppliers = await db.select().from(schema.suppliers); expect(customers.length).toBe(12); expect(details.length).toBe(13); expect(employees.length).toBe(14); expect(orders.length).toBe(15); expect(products.length).toBe(16); expect(suppliers.length).toBe(17); }); test("redefine(refine) orders count using 'with' in customers", async () => { const currSchema = { customers: schema.customers, details: schema.details, employees: schema.employees, orders: schema.orders, products: schema.products, suppliers: schema.suppliers, }; await seed(db, currSchema, { count: 11 }).refine(() => ({ customers: { count: 4, with: { orders: 2, }, }, orders: { count: 13, }, })); const customers = await db.select().from(schema.customers); const details = await db.select().from(schema.details); const employees = await db.select().from(schema.employees); const orders = await db.select().from(schema.orders); const products = await db.select().from(schema.products); const suppliers = await db.select().from(schema.suppliers); expect(customers.length).toBe(4); expect(details.length).toBe(11); expect(employees.length).toBe(11); expect(orders.length).toBe(8); expect(products.length).toBe(11); expect(suppliers.length).toBe(11); }); test("sequential using of 'with'", async () => { const currSchema = { customers: schema.customers, details: schema.details, employees: schema.employees, orders: schema.orders, products: schema.products, suppliers: schema.suppliers, }; await seed(db, currSchema, { count: 11 }).refine(() => ({ customers: { count: 4, with: { orders: 2, }, }, orders: { count: 12, with: { details: 3, }, }, })); const customers = await db.select().from(schema.customers); const details = await db.select().from(schema.details); const employees = await db.select().from(schema.employees); const orders = await db.select().from(schema.orders); const products = await db.select().from(schema.products); const suppliers = await db.select().from(schema.suppliers); expect(customers.length).toBe(4); expect(details.length).toBe(24); expect(employees.length).toBe(11); expect(orders.length).toBe(8); expect(products.length).toBe(11); expect(suppliers.length).toBe(11); }); test('seeding with identity columns', async () => { await seed(db, { identityColumnsTable: schema.identityColumnsTable }); const result = await db.select().from(schema.identityColumnsTable); expect(result.length).toBe(10); }); // All data types test ------------------------------- test('basic seed test for all postgres data types', async () => { await seed(db, { allDataTypes: schema.allDataTypes }, { count: 10000 }); const allDataTypes = await db.select().from(schema.allDataTypes); // every value in each 10 rows does not equal undefined. const predicate = allDataTypes.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); }); test('all array data types test', async () => { await seed(db, { allArrayDataTypes: schema.allArrayDataTypes }, { count: 1000 }); const allArrayDataTypes = await db.select().from(schema.allArrayDataTypes); // every value in each rows does not equal undefined. const predicate = allArrayDataTypes.every((row) => Object.values(row).every((val) => val !== undefined && val !== null && val.length === 10) ); expect(predicate).toBe(true); }); test('nd arrays', async () => { await seed(db, { ndArrays: schema.ndArrays }, { count: 1000 }); const ndArrays = await db.select().from(schema.ndArrays); // every value in each rows does not equal undefined. const predicate0 = ndArrays.every((row) => Object.values(row).every((val) => val !== undefined && val !== null && val.length !== 0) ); let predicate1 = true, predicate2 = true, predicate3 = true, predicate4 = true; for (const row of ndArrays) { predicate1 = predicate1 && (row.integer1DArray?.length === 3); predicate2 = predicate2 && (row.integer2DArray?.length === 4) && (row.integer2DArray[0]?.length === 3); predicate3 = predicate3 && (row.integer3DArray?.length === 5) && (row.integer3DArray[0]?.length === 4) && (row.integer3DArray[0][0]?.length === 3); predicate4 = predicate4 && (row.integer4DArray?.length === 6) && (row.integer4DArray[0]?.length === 5) && (row.integer4DArray[0][0]?.length === 4) && (row.integer4DArray[0][0][0]?.length === 3); } expect(predicate0 && predicate1 && predicate2 && predicate3 && predicate4).toBe(true); }); // All generators test------------------------------- const count = 1000; test('enum generator test', async () => { await seed(db, { enumTable: schema.enumTable }).refine(() => ({ enumTable: { count, }, })); const data = await db.select().from(schema.enumTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); }); test('default generator test', async () => { await seed(db, { defaultTable: schema.defaultTable }).refine((funcs) => ({ defaultTable: { count, columns: { defaultString: funcs.default({ defaultValue: 'default string' }), }, }, })); const data = await db.select().from(schema.defaultTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); }); test('default array generator test', async () => { await seed(db, { defaultTable: schema.defaultArrayTable }).refine((funcs) => ({ defaultTable: { count, columns: { defaultString: funcs.default({ defaultValue: 'default string', arraySize: 3 }), }, }, })); const data = await db.select().from(schema.defaultArrayTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null && val.length === 3)); expect(predicate).toBe(true); }); test('valuesFromArray generator test', async () => { await seed(db, { valuesFromArrayTable: schema.valuesFromArrayTable }).refine((funcs) => ({ valuesFromArrayTable: { count, columns: { valuesFromArrayNotNull: funcs.valuesFromArray({ values: lastNames }), valuesFromArrayWeightedNotNull: funcs.valuesFromArray({ values: [ { values: lastNames, weight: 0.3 }, { values: firstNames, weight: 0.7 }, ], }), }, }, })); const data = await db.select().from(schema.valuesFromArrayTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); }); test('valuesFromArray unique generator test', async () => { // valuesFromArrayUniqueTable----------------------------------------------------------------------------------- await seed(db, { valuesFromArrayUniqueTable: schema.valuesFromArrayUniqueTable }, { seed: 1 }).refine((funcs) => ({ valuesFromArrayUniqueTable: { count: 49998, columns: { valuesFromArray: funcs.valuesFromArray({ values: lastNames.slice(0, 20), isUnique: true }), valuesFromArrayNotNull: funcs.valuesFromArray({ values: lastNames, isUnique: true }), valuesFromArrayWeighted: funcs.valuesFromArray({ values: [ { values: lastNames.slice(0, 20000), weight: 0.3 }, { values: lastNames.slice(20000), weight: 0.7 }, ], isUnique: true, }), valuesFromArrayWeightedNotNull: funcs.valuesFromArray({ values: [ { values: lastNames.slice(0, 14920), weight: 0.3 }, { values: lastNames.slice(14920), weight: 0.7 }, ], isUnique: true, }), }, }, })); const data = await db.select().from(schema.valuesFromArrayUniqueTable); // console.log(valuesFromArrayUniqueTableData); const predicate = data.length !== 0 && data.every((row) => row['valuesFromArrayWeightedNotNull'] !== null && row['valuesFromArrayNotNull'] !== null ); expect(predicate).toBe(true); await expect( seed(db, { valuesFromArrayUniqueTable: schema.valuesFromArrayUniqueTable }).refine((funcs) => ({ valuesFromArrayUniqueTable: { count: 49998, columns: { valuesFromArrayWeightedNotNull: funcs.valuesFromArray({ values: [ { values: lastNames.slice(0, 20000), weight: 0.3 }, { values: lastNames.slice(20000), weight: 0.7 }, ], isUnique: true, }), }, }, })), ).rejects.toThrow( /^weighted values arrays is too small to generate values with specified probability for unique not null column\..+/, ); await expect( seed(db, { valuesFromArrayUniqueTable: schema.valuesFromArrayUniqueTable }).refine((funcs) => ({ valuesFromArrayUniqueTable: { count: 49998, columns: { valuesFromArrayNotNull: funcs.valuesFromArray({ values: lastNames.slice(20), isUnique: true, }), }, }, })), ).rejects.toThrow('There are no enough values to fill unique column.'); await expect( seed(db, { valuesFromArrayUniqueTable: schema.valuesFromArrayUniqueTable }, { seed: 1 }).refine((funcs) => ({ valuesFromArrayUniqueTable: { count: 49999, columns: { valuesFromArrayNotNull: funcs.valuesFromArray({ values: lastNames, isUnique: true, }), valuesFromArrayWeightedNotNull: funcs.valuesFromArray({ values: [ { values: lastNames.slice(0, 14854), weight: 0.3 }, { values: lastNames.slice(14854), weight: 0.7 }, ], isUnique: true, }), }, }, })), ).rejects.toThrow('There are no enough values to fill unique column.'); }); test('valuesFromArray array generator test', async () => { await seed(db, { valuesFromArrayTable: schema.valuesFromArrayArrayTable }).refine((funcs) => ({ valuesFromArrayTable: { count, columns: { valuesFromArray: funcs.valuesFromArray({ values: lastNames, arraySize: 3 }), }, }, })); const data = await db.select().from(schema.valuesFromArrayArrayTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null && val.length === 3)); expect(predicate).toBe(true); }); test('intPrimaryKey generator test', async () => { await seed(db, { intPrimaryKeyTable: schema.intPrimaryKeyTable }).refine((funcs) => ({ intPrimaryKeyTable: { count, columns: { intPrimaryKey: funcs.intPrimaryKey(), }, }, })); const data = await db.select().from(schema.intPrimaryKeyTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); }); test('number generator test', async () => { await seed(db, { numberTable: schema.numberTable }).refine((funcs) => ({ numberTable: { count, columns: { number: funcs.number(), }, }, })); const data = await db.select().from(schema.numberTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); }); test('number unique generator test', async () => { // numberUniqueTable----------------------------------------------------------------------------------- await seed(db, { numberUniqueTable: schema.numberUniqueTable }).refine((funcs) => ({ numberUniqueTable: { count: 20070, columns: { numberUnique: funcs.number({ isUnique: true, minValue: -100.23, maxValue: 100.46 }), }, }, })); const data = await db.select().from(schema.numberUniqueTable); const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null && val >= -100.23 && val <= 100.46) ); expect(predicate).toBe(true); await expect( seed(db, { numberUniqueTable: schema.numberUniqueTable }).refine((funcs) => ({ numberUniqueTable: { count: 20071, columns: { numberUnique: funcs.number({ isUnique: true, minValue: -100.23, maxValue: 100.46 }), }, }, })), ).rejects.toThrow('count exceeds max number of unique integers in given range(min, max), try to make range wider.'); }); test('number array generator test', async () => { await seed(db, { numberTable: schema.numberArrayTable }).refine((funcs) => ({ numberTable: { count, columns: { number: funcs.number({ arraySize: 3 }), }, }, })); const data = await db.select().from(schema.numberArrayTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null && val.length === 3)); expect(predicate).toBe(true); }); test('int generator test', async () => { await seed(db, { intTable: schema.intTable }).refine((funcs) => ({ intTable: { count, columns: { int: funcs.int(), }, }, })); const data = await db.select().from(schema.intTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); }); test('int unique generator test', async () => { // intUniqueTable----------------------------------------------------------------------------------- await seed(db, { intUniqueTable: schema.intUniqueTable }).refine((funcs) => ({ intUniqueTable: { count: 201, columns: { intUnique: funcs.int({ isUnique: true, minValue: -100, maxValue: 100 }), }, }, })); const data = await db.select().from(schema.intUniqueTable); const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); await expect( seed(db, { intUniqueTable: schema.intUniqueTable }).refine((funcs) => ({ intUniqueTable: { count: 202, columns: { intUnique: funcs.int({ isUnique: true, minValue: -100, maxValue: 100 }), }, }, })), ).rejects.toThrow('count exceeds max number of unique integers in given range(min, max), try to make range wider.'); }); test('int array generator test', async () => { await seed(db, { intTable: schema.intArrayTable }).refine((funcs) => ({ intTable: { count, columns: { int: funcs.int({ arraySize: 3 }), }, }, })); const data = await db.select().from(schema.intArrayTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null && val.length === 3)); expect(predicate).toBe(true); }); test('boolean generator test', async () => { await seed(db, { booleanTable: schema.booleanTable }).refine((funcs) => ({ booleanTable: { count, columns: { boolean: funcs.boolean(), }, }, })); const data = await db.select().from(schema.booleanTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); }); test('boolean array generator test', async () => { await seed(db, { booleanTable: schema.booleanArrayTable }).refine((funcs) => ({ booleanTable: { count, columns: { boolean: funcs.boolean({ arraySize: 3 }), }, }, })); const data = await db.select().from(schema.booleanArrayTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null && val.length === 3)); expect(predicate).toBe(true); }); test('date generator test', async () => { await seed(db, { dateTable: schema.dateTable }).refine((funcs) => ({ dateTable: { count, columns: { date: funcs.date(), }, }, })); const data = await db.select().from(schema.dateTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); }); test('date array generator test', async () => { await seed(db, { dateTable: schema.dateArrayTable }).refine((funcs) => ({ dateTable: { count, columns: { date: funcs.date({ arraySize: 3 }), dateString: funcs.date({ arraySize: 4 }), }, }, })); const data = await db.select().from(schema.dateArrayTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null && [3, 4].includes(val.length)) ); expect(predicate).toBe(true); }); test('time generator test', async () => { await seed(db, { timeTable: schema.timeTable }).refine((funcs) => ({ timeTable: { count, columns: { time: funcs.time(), }, }, })); const data = await db.select().from(schema.timeTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); }); test('time array generator test', async () => { await seed(db, { timeTable: schema.timeArrayTable }).refine((funcs) => ({ timeTable: { count, columns: { time: funcs.time({ arraySize: 3 }), }, }, })); const data = await db.select().from(schema.timeArrayTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null && val.length === 3)); expect(predicate).toBe(true); }); test('timestamp generator test', async () => { await seed(db, { timestampTable: schema.timestampTable }).refine((funcs) => ({ timestampTable: { count, columns: { timestamp: funcs.timestamp(), }, }, })); const data = await db.select().from(schema.timestampTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); }); test('timestamp array generator test', async () => { await seed(db, { timestampTable: schema.timestampArrayTable }).refine((funcs) => ({ timestampTable: { count, columns: { timestamp: funcs.timestamp({ arraySize: 3 }), }, }, })); const data = await db.select().from(schema.timestampArrayTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null && val.length === 3)); expect(predicate).toBe(true); }); test('json generator test', async () => { await seed(db, { jsonTable: schema.jsonTable }).refine((funcs) => ({ jsonTable: { count, columns: { json: funcs.json(), }, }, })); const data = await db.select().from(schema.jsonTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); }); test('json array generator test', async () => { await seed(db, { jsonTable: schema.jsonArrayTable }).refine((funcs) => ({ jsonTable: { count, columns: { json: funcs.json({ arraySize: 3 }), }, }, })); const data = await db.select().from(schema.jsonArrayTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null && val.length === 3)); expect(predicate).toBe(true); }); test('interval generator test', async () => { await seed(db, { intervalTable: schema.intervalTable }).refine((funcs) => ({ intervalTable: { count, columns: { interval: funcs.interval(), }, }, })); const data = await db.select().from(schema.intervalTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); }); test('interval unique generator test', async () => { // intervalUniqueTable----------------------------------------------------------------------------------- await seed(db, { intervalUniqueTable: schema.intervalUniqueTable }).refine((funcs) => ({ intervalUniqueTable: { count, columns: { intervalUnique: funcs.interval({ isUnique: true }), }, }, })); const data = await db.select().from(schema.intervalUniqueTable); const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); }); test('interval array generator test', async () => { await seed(db, { intervalTable: schema.intervalArrayTable }).refine((funcs) => ({ intervalTable: { count, columns: { interval: funcs.interval({ arraySize: 3 }), }, }, })); const data = await db.select().from(schema.intervalArrayTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null && val.length === 3)); expect(predicate).toBe(true); }); test('string generator test', async () => { await seed(db, { stringTable: schema.stringTable }).refine((funcs) => ({ stringTable: { count, columns: { string: funcs.string(), }, }, })); const data = await db.select().from(schema.stringTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); }); test('string unique generator test', async () => { await seed(db, { stringUniqueTable: schema.stringUniqueTable }).refine((funcs) => ({ stringUniqueTable: { count, columns: { stringUnique: funcs.string({ isUnique: true }), }, }, })); const data = await db.select().from(schema.stringUniqueTable); const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); }); test('string array generator test', async () => { await seed(db, { stringTable: schema.stringArrayTable }).refine((funcs) => ({ stringTable: { count, columns: { string: funcs.string({ arraySize: 3 }), }, }, })); const data = await db.select().from(schema.stringArrayTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null && val.length === 3)); expect(predicate).toBe(true); }); test('email generator test', async () => { await seed(db, { emailTable: schema.emailTable }).refine((funcs) => ({ emailTable: { count, columns: { email: funcs.email(), }, }, })); const data = await db.select().from(schema.emailTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); }); test('email array generator test', async () => { await seed(db, { emailTable: schema.emailArrayTable }).refine((funcs) => ({ emailTable: { count, columns: { email: funcs.email({ arraySize: 3 }), }, }, })); const data = await db.select().from(schema.emailArrayTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null && val.length === 3)); expect(predicate).toBe(true); }); test('firstName generator test', async () => { await seed(db, { firstNameTable: schema.firstNameTable }).refine((funcs) => ({ firstNameTable: { count, columns: { firstName: funcs.firstName(), }, }, })); const data = await db.select().from(schema.firstNameTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); }); test('firstName unique generator test', async () => { // firstNameUniqueTable----------------------------------------------------------------------------------- await seed(db, { firstNameUniqueTable: schema.firstNameUniqueTable }).refine((funcs) => ({ firstNameUniqueTable: { count: 30274, columns: { firstNameUnique: funcs.firstName({ isUnique: true }), }, }, })); const data = await db.select().from(schema.firstNameUniqueTable); const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); await expect( seed(db, { firstNameUniqueTable: schema.firstNameUniqueTable }, { count: 30275 }).refine((funcs) => ({ firstNameUniqueTable: { count: 30275, columns: { firstNameUnique: funcs.firstName({ isUnique: true }), }, }, })), ).rejects.toThrow('count exceeds max number of unique first names.'); }); test('firstName array generator test', async () => { await seed(db, { firstNameTable: schema.firstNameArrayTable }).refine((funcs) => ({ firstNameTable: { count, columns: { firstName: funcs.firstName({ arraySize: 3 }), }, }, })); const data = await db.select().from(schema.firstNameArrayTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null && val.length === 3)); expect(predicate).toBe(true); }); test('lastName generator test', async () => { await seed(db, { lastNameTable: schema.lastNameTable }).refine((funcs) => ({ lastNameTable: { count, columns: { lastName: funcs.lastName(), }, }, })); const data = await db.select().from(schema.lastNameTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); }); test('lastName unique generator test', async () => { // lastNameUniqueTable----------------------------------------------------------------------------------- await seed(db, { lastNameUniqueTable: schema.lastNameUniqueTable }).refine((funcs) => ({ lastNameUniqueTable: { count: 49998, columns: { lastNameUnique: funcs.lastName({ isUnique: true }), }, }, })); const data = await db.select().from(schema.lastNameUniqueTable); const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); await expect( seed(db, { lastNameUniqueTable: schema.lastNameUniqueTable }).refine((funcs) => ({ lastNameUniqueTable: { count: 49999, columns: { lastNameUnique: funcs.lastName({ isUnique: true }), }, }, })), ).rejects.toThrow('count exceeds max number of unique last names.'); }); test('lastName array generator test', async () => { await seed(db, { lastNameTable: schema.lastNameArrayTable }).refine((funcs) => ({ lastNameTable: { count, columns: { lastName: funcs.lastName({ arraySize: 3 }), }, }, })); const data = await db.select().from(schema.lastNameArrayTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null && val.length === 3)); expect(predicate).toBe(true); }); test('fullName generator test', async () => { await seed(db, { fullNameTable: schema.fullNameTable }).refine((funcs) => ({ fullNameTable: { count, columns: { fullName: funcs.fullName(), }, }, })); const data = await db.select().from(schema.fullNameTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); }); test('fullName unique generator test', async () => { // fullNameUniqueTable----------------------------------------------------------------------------------- await seed(db, { fullNameUniqueTable: schema.fullNameUniqueTable }).refine((funcs) => ({ fullNameUniqueTable: { count, columns: { fullNameUnique: funcs.fullName({ isUnique: true }), }, }, })); const data = await db.select().from(schema.fullNameUniqueTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); }); test('fullName array generator test', async () => { await seed(db, { fullNameTable: schema.fullNameArrayTable }).refine((funcs) => ({ fullNameTable: { count, columns: { fullName: funcs.fullName({ arraySize: 3 }), }, }, })); const data = await db.select().from(schema.fullNameArrayTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null && val.length === 3)); expect(predicate).toBe(true); }); test('country generator test', async () => { await seed(db, { countryTable: schema.countryTable }).refine((funcs) => ({ countryTable: { count, columns: { country: funcs.country(), }, }, })); const data = await db.select().from(schema.countryTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); }); test('country unique generator test', async () => { // countryUniqueTable----------------------------------------------------------------------------------- await seed(db, { countryUniqueTable: schema.countryUniqueTable }).refine((funcs) => ({ countryUniqueTable: { count: countries.length, columns: { countryUnique: funcs.country({ isUnique: true }), }, }, })); const data = await db.select().from(schema.countryUniqueTable); const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); await expect( seed(db, { countryUniqueTable: schema.countryUniqueTable }).refine((funcs) => ({ countryUniqueTable: { count: countries.length + 1, columns: { countryUnique: funcs.country({ isUnique: true }), }, }, })), ).rejects.toThrow('count exceeds max number of unique countries.'); }); test('country array generator test', async () => { await seed(db, { countryTable: schema.countryArrayTable }).refine((funcs) => ({ countryTable: { count, columns: { country: funcs.country({ arraySize: 3 }), }, }, })); const data = await db.select().from(schema.countryArrayTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null && val.length === 3)); expect(predicate).toBe(true); }); test('city generator test', async () => { await seed(db, { cityTable: schema.cityTable }).refine((funcs) => ({ cityTable: { count, columns: { city: funcs.city(), }, }, })); const data = await db.select().from(schema.cityTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); }); test('city unique generator test', async () => { // cityUniqueTable----------------------------------------------------------------------------------- await reset(db, { cityUniqueTable: schema.cityUniqueTable }); await seed(db, { cityUniqueTable: schema.cityUniqueTable }).refine((funcs) => ({ cityUniqueTable: { count: cities.length, columns: { cityUnique: funcs.city({ isUnique: true }), }, }, })); const data = await db.select().from(schema.cityUniqueTable); const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); await expect( seed(db, { cityUniqueTable: schema.cityUniqueTable }).refine((funcs) => ({ cityUniqueTable: { count: cities.length + 1, columns: { cityUnique: funcs.city({ isUnique: true }), }, }, })), ).rejects.toThrow('count exceeds max number of unique cities.'); }); test('city array generator test', async () => { await seed(db, { cityTable: schema.cityArrayTable }).refine((funcs) => ({ cityTable: { count, columns: { city: funcs.city({ arraySize: 3 }), }, }, })); const data = await db.select().from(schema.cityArrayTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null && val.length === 3)); expect(predicate).toBe(true); }); test('streetAddress generator test', async () => { await seed(db, { streetAddressTable: schema.streetAddressTable }).refine((funcs) => ({ streetAddressTable: { count, columns: { streetAddress: funcs.streetAddress(), }, }, })); const data = await db.select().from(schema.streetAddressTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); }); test('streetAddress unique generator test', async () => { await seed(db, { streetAddressUniqueTable: schema.streetAddressUniqueTable }).refine((funcs) => ({ streetAddressUniqueTable: { count, columns: { streetAddressUnique: funcs.streetAddress({ isUnique: true }), }, }, })); const data = await db.select().from(schema.streetAddressUniqueTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); }); test('streetAddress array generator test', async () => { await seed(db, { streetAddressTable: schema.streetAddressArrayTable }).refine((funcs) => ({ streetAddressTable: { count, columns: { streetAddress: funcs.streetAddress({ arraySize: 3 }), }, }, })); const data = await db.select().from(schema.streetAddressArrayTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null && val.length === 3)); expect(predicate).toBe(true); }); test('jobTitle generator test', async () => { await seed(db, { jobTitleTable: schema.jobTitleTable }).refine((funcs) => ({ jobTitleTable: { count, columns: { jobTitle: funcs.jobTitle(), }, }, })); const data = await db.select().from(schema.jobTitleTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); }); test('jobTitle array generator test', async () => { await seed(db, { jobTitleTable: schema.jobTitleArrayTable }).refine((funcs) => ({ jobTitleTable: { count, columns: { jobTitle: funcs.jobTitle({ arraySize: 3 }), }, }, })); const data = await db.select().from(schema.jobTitleArrayTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null && val.length === 3)); expect(predicate).toBe(true); }); test('postcode generator test', async () => { await seed(db, { postcodeTable: schema.postcodeTable }).refine((funcs) => ({ postcodeTable: { count, columns: { postcode: funcs.postcode(), }, }, })); const data = await db.select().from(schema.postcodeTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); }); test('postcode unique generator test', async () => { await seed(db, { postcodeUniqueTable: schema.postcodeUniqueTable }).refine((funcs) => ({ postcodeUniqueTable: { count, columns: { postcodeUnique: funcs.postcode({ isUnique: true }), }, }, })); const data = await db.select().from(schema.postcodeUniqueTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); }); test('postcode array generator test', async () => { await seed(db, { postcodeTable: schema.postcodeArrayTable }).refine((funcs) => ({ postcodeTable: { count, columns: { postcode: funcs.postcode({ arraySize: 3 }), }, }, })); const data = await db.select().from(schema.postcodeArrayTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null && val.length === 3)); expect(predicate).toBe(true); }); test('state generator test', async () => { await seed(db, { stateTable: schema.stateTable }).refine((funcs) => ({ stateTable: { count, columns: { state: funcs.state(), }, }, })); const data = await db.select().from(schema.stateTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); }); test('state array generator test', async () => { await seed(db, { stateTable: schema.stateArrayTable }).refine((funcs) => ({ stateTable: { count, columns: { state: funcs.state({ arraySize: 3 }), }, }, })); const data = await db.select().from(schema.stateArrayTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null && val.length === 3)); expect(predicate).toBe(true); }); test('companyName generator test', async () => { await seed(db, { companyNameTable: schema.companyNameTable }).refine((funcs) => ({ companyNameTable: { count, columns: { companyName: funcs.companyName(), }, }, })); const data = await db.select().from(schema.companyNameTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); }); test('companyName unique generator test', async () => { await seed(db, { companyNameUniqueTable: schema.companyNameUniqueTable }).refine((funcs) => ({ companyNameUniqueTable: { count, columns: { companyNameUnique: funcs.companyName({ isUnique: true }), }, }, })); const data = await db.select().from(schema.companyNameUniqueTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); }); test('companyName array generator test', async () => { await seed(db, { companyNameTable: schema.companyNameArrayTable }).refine((funcs) => ({ companyNameTable: { count, columns: { companyName: funcs.companyName({ arraySize: 3 }), }, }, })); const data = await db.select().from(schema.companyNameArrayTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null && val.length === 3)); expect(predicate).toBe(true); }); test('loremIpsum generator test', async () => { await seed(db, { loremIpsumTable: schema.loremIpsumTable }).refine((funcs) => ({ loremIpsumTable: { count, columns: { loremIpsum: funcs.loremIpsum(), }, }, })); const data = await db.select().from(schema.loremIpsumTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); }); test('loremIpsum array generator test', async () => { await seed(db, { loremIpsumTable: schema.loremIpsumArrayTable }).refine((funcs) => ({ loremIpsumTable: { count, columns: { loremIpsum: funcs.loremIpsum({ arraySize: 3 }), }, }, })); const data = await db.select().from(schema.loremIpsumArrayTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null && val.length === 3)); expect(predicate).toBe(true); }); test('point generator test', async () => { await seed(db, { pointTable: schema.pointTable }).refine((funcs) => ({ pointTable: { count, columns: { point: funcs.point(), }, }, })); const data = await db.select().from(schema.pointTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); }); test('point unique generator test', async () => { await reset(db, { pointTable: schema.pointTable }); await seed(db, { pointTable: schema.pointTable }).refine((funcs) => ({ pointTable: { count, columns: { point: funcs.point({ isUnique: true }), }, }, })); const data = await db.select().from(schema.pointTable); // every value in each row does not equal undefined. let predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); // using Set because PGlite does not support unique point const pointStrsSet = new Set(data.map((row) => row.point!.map(String).join(','))); predicate = pointStrsSet.size === data.length; expect(predicate).toBe(true); }); test('point array generator test', async () => { await seed(db, { pointTable: schema.pointArrayTable }).refine((funcs) => ({ pointTable: { count, columns: { point: funcs.point({ arraySize: 2 }), }, }, })); const data = await db.select().from(schema.pointArrayTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null && val.length === 2)); expect(predicate).toBe(true); }); test('line generator test', async () => { await seed(db, { lineTable: schema.lineTable }).refine((funcs) => ({ lineTable: { count, columns: { line: funcs.line(), }, }, })); const data = await db.select().from(schema.lineTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); }); test('line unique generator test', async () => { await reset(db, { lineTable: schema.lineTable }); await seed(db, { lineTable: schema.lineTable }).refine((funcs) => ({ lineTable: { count, columns: { line: funcs.line({ isUnique: true }), }, }, })); const data = await db.select().from(schema.lineTable); // every value in each row does not equal undefined. let predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); // using Set because PGlite does not support unique point const lineStrsSet = new Set(data.map((row) => row.line!.map(String).join(','))); predicate = lineStrsSet.size === data.length; expect(predicate).toBe(true); }); test('line array generator test', async () => { await seed(db, { lineTable: schema.lineArrayTable }).refine((funcs) => ({ lineTable: { count, columns: { line: funcs.line({ arraySize: 2 }), }, }, })); const data = await db.select().from(schema.lineArrayTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null && val.length === 2)); expect(predicate).toBe(true); }); test('phoneNumber generator test', async () => { await seed(db, { phoneNumberTable: schema.phoneNumberTable }).refine((funcs) => ({ phoneNumberTable: { count, columns: { phoneNumber: funcs.phoneNumber(), phoneNumberPrefixes: funcs.phoneNumber({ prefixes: ['+380 99', '+380 67', '+1'], generatedDigitsNumbers: [7, 7, 10], }), phoneNumberTemplate: funcs.phoneNumber({ template: '+380 ## ## ### ##' }), }, }, })); const data = await db.select().from(schema.phoneNumberTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); }); test('phoneNumber array generator test', async () => { await seed(db, { phoneNumberTable: schema.phoneNumberArrayTable }).refine((funcs) => ({ phoneNumberTable: { count, columns: { phoneNumber: funcs.phoneNumber({ arraySize: 3 }), phoneNumberPrefixes: funcs.phoneNumber({ prefixes: ['+380 99', '+380 67', '+1'], generatedDigitsNumbers: [7, 7, 10], arraySize: 4, }), phoneNumberTemplate: funcs.phoneNumber({ template: '+380 ## ## ### ##', arraySize: 5, }), }, }, })); const data = await db.select().from(schema.phoneNumberArrayTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null && [3, 4, 5].includes(val.length)) ); expect(predicate).toBe(true); }); test('weightedRandom generator test', async () => { await seed(db, { weightedRandomTable: schema.weightedRandomTable }).refine((funcs) => ({ weightedRandomTable: { count, columns: { weightedRandom: funcs.weightedRandom([ { value: funcs.default({ defaultValue: 'default value' }), weight: 0.3 }, { value: funcs.loremIpsum(), weight: 0.7 }, ]), }, }, })); const data = await db.select().from(schema.weightedRandomTable); // every value in each row does not equal undefined. const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); }); test('weightedRandom with unique gens generator test', async () => { await seed(db, { weightedRandomWithUniqueGensTable: schema.weightedRandomWithUniqueGensTable }).refine((funcs) => ({ weightedRandomWithUniqueGensTable: { count: 10000, columns: { weightedRandomWithUniqueGens: funcs.weightedRandom([ { weight: 0.3, value: funcs.email() }, { weight: 0.7, value: funcs.firstName({ isUnique: true }) }, ]), }, }, })); const data = await db.select().from(schema.weightedRandomWithUniqueGensTable); const predicate = data.length !== 0 && data.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); await expect( seed(db, { weightedRandomWithUniqueGensTable: schema.weightedRandomWithUniqueGensTable }).refine((funcs) => ({ weightedRandomWithUniqueGensTable: { count: 40000, columns: { weightedRandomWithUniqueGens: funcs.weightedRandom([ { weight: 0.1, value: funcs.email() }, { weight: 0.9, value: funcs.firstName({ isUnique: true }) }, ]), }, }, })), ).rejects.toThrow('count exceeds max number of unique first names.'); await expect( seed(db, { weightedRandomWithUniqueGensTable: schema.weightedRandomWithUniqueGensTable }).refine((funcs) => ({ weightedRandomWithUniqueGensTable: { count: 10000, columns: { weightedRandomWithUniqueGens: funcs.weightedRandom([ { weight: 0.2, value: funcs.email() }, { weight: 0.9, value: funcs.firstName({ isUnique: true }) }, ]), }, }, })), ).rejects.toThrow( 'The weights for the Weighted Random feature must add up to exactly 1. Please review your weights to ensure they total 1 before proceeding', ); }); ================================================ FILE: integration-tests/tests/seeder/pgSchema.ts ================================================ import type { AnyPgColumn } from 'drizzle-orm/pg-core'; import { bigint, bigserial, boolean, char, date, decimal, doublePrecision, integer, interval, json, jsonb, line, numeric, pgEnum, pgSchema, point, real, serial, smallint, smallserial, text, time, timestamp, uuid, varchar, } from 'drizzle-orm/pg-core'; export const schema = pgSchema('seeder_lib_pg'); export const customers = schema.table('customer', { id: varchar('id', { length: 256 }).primaryKey(), companyName: text('company_name').notNull(), contactName: text('contact_name').notNull(), contactTitle: text('contact_title').notNull(), address: text('address').notNull(), city: text('city').notNull(), postalCode: text('postal_code'), region: text('region'), country: text('country').notNull(), phone: text('phone').notNull(), fax: text('fax'), }); export const employees = schema.table( 'employee', { id: integer('id').primaryKey(), lastName: text('last_name').notNull(), firstName: text('first_name'), title: text('title').notNull(), titleOfCourtesy: text('title_of_courtesy').notNull(), birthDate: timestamp('birth_date').notNull(), hireDate: timestamp('hire_date').notNull(), address: text('address').notNull(), city: text('city').notNull(), postalCode: text('postal_code').notNull(), country: text('country').notNull(), homePhone: text('home_phone').notNull(), extension: integer('extension').notNull(), notes: text('notes').notNull(), reportsTo: integer('reports_to').references((): AnyPgColumn => employees.id), photoPath: text('photo_path'), }, ); export const orders = schema.table('order', { id: integer('id').primaryKey(), orderDate: timestamp('order_date').notNull(), requiredDate: timestamp('required_date').notNull(), shippedDate: timestamp('shipped_date'), shipVia: integer('ship_via').notNull(), freight: numeric('freight').notNull(), shipName: text('ship_name').notNull(), shipCity: text('ship_city').notNull(), shipRegion: text('ship_region'), shipPostalCode: text('ship_postal_code'), shipCountry: text('ship_country').notNull(), customerId: text('customer_id') .notNull() .references(() => customers.id, { onDelete: 'cascade' }), employeeId: integer('employee_id') .notNull() .references(() => employees.id, { onDelete: 'cascade' }), }); export const suppliers = schema.table('supplier', { id: integer('id').primaryKey(), companyName: text('company_name').notNull(), contactName: text('contact_name').notNull(), contactTitle: text('contact_title').notNull(), address: text('address').notNull(), city: text('city').notNull(), region: text('region'), postalCode: text('postal_code').notNull(), country: text('country').notNull(), phone: text('phone').notNull(), }); export const products = schema.table('product', { id: integer('id').primaryKey(), name: text('name').notNull(), quantityPerUnit: text('quantity_per_unit').notNull(), unitPrice: numeric('unit_price').notNull(), unitsInStock: integer('units_in_stock').notNull(), unitsOnOrder: integer('units_on_order').notNull(), reorderLevel: integer('reorder_level').notNull(), discontinued: integer('discontinued').notNull(), supplierId: integer('supplier_id') .notNull() .references(() => suppliers.id, { onDelete: 'cascade' }), }); export const details = schema.table('order_detail', { unitPrice: numeric('unit_price').notNull(), quantity: integer('quantity').notNull(), discount: numeric('discount').notNull(), orderId: integer('order_id') .notNull() .references(() => orders.id, { onDelete: 'cascade' }), productId: integer('product_id') .notNull() .references(() => products.id, { onDelete: 'cascade' }), }); // All data types table ------------------------------- export const moodEnum = pgEnum('mood_enum', ['sad', 'ok', 'happy']); export const allDataTypes = schema.table('all_data_types', { integer: integer('integer'), smallint: smallint('smallint'), biginteger: bigint('bigint', { mode: 'bigint' }), bigintNumber: bigint('bigint_number', { mode: 'number' }), serial: serial('serial'), smallserial: smallserial('smallserial'), bigserial: bigserial('bigserial', { mode: 'bigint' }), bigserialNumber: bigserial('bigserial_number', { mode: 'number' }), boolean: boolean('boolean'), text: text('text'), varchar: varchar('varchar', { length: 256 }), char: char('char', { length: 256 }), numeric: numeric('numeric'), decimal: decimal('decimal'), real: real('real'), doublePrecision: doublePrecision('double_precision'), json: json('json'), jsonb: jsonb('jsonb'), time: time('time'), timestampDate: timestamp('timestamp_date', { mode: 'date' }), timestampString: timestamp('timestamp_string', { mode: 'string' }), dateString: date('date_string', { mode: 'string' }), date: date('date', { mode: 'date' }), interval: interval('interval'), point: point('point', { mode: 'xy' }), pointTuple: point('point_tuple', { mode: 'tuple' }), line: line('line', { mode: 'abc' }), lineTuple: line('line_tuple', { mode: 'tuple' }), moodEnum: moodEnum('mood_enum'), uuid: uuid('uuid'), }); export const allArrayDataTypes = schema.table('all_array_data_types', { integerArray: integer('integer_array').array(), smallintArray: smallint('smallint_array').array(), bigintegerArray: bigint('bigint_array', { mode: 'bigint' }).array(), bigintNumberArray: bigint('bigint_number_array', { mode: 'number' }).array(), booleanArray: boolean('boolean_array').array(), textArray: text('text_array').array(), varcharArray: varchar('varchar_array', { length: 256 }).array(), charArray: char('char_array', { length: 256 }).array(), numericArray: numeric('numeric_array').array(), decimalArray: decimal('decimal_array').array(), realArray: real('real_array').array(), doublePrecisionArray: doublePrecision('double_precision_array').array(), jsonArray: json('json_array').array(), jsonbArray: jsonb('jsonb_array').array(), timeArray: time('time_array').array(), timestampDateArray: timestamp('timestamp_date_array', { mode: 'date' }).array(), timestampStringArray: timestamp('timestamp_string_array', { mode: 'string' }).array(), dateStringArray: date('date_string_array', { mode: 'string' }).array(), dateArray: date('date_array', { mode: 'date' }).array(), intervalArray: interval('interval_array').array(), pointArray: point('point_array', { mode: 'xy' }).array(), pointTupleArray: point('point_tuple_array', { mode: 'tuple' }).array(), lineArray: line('line_array', { mode: 'abc' }).array(), lineTupleArray: line('line_tuple_array', { mode: 'tuple' }).array(), moodEnumArray: moodEnum('mood_enum_array').array(), }); export const ndArrays = schema.table('nd_arrays', { integer1DArray: integer('integer_1d_array').array(3), integer2DArray: integer('integer_2d_array').array(3).array(4), integer3DArray: integer('integer_3d_array').array(3).array(4).array(5), integer4DArray: integer('integer_4d_array').array(3).array(4).array(5).array(6), }); // All generators tables ------------------------------- export const enumTable = schema.table('enum_table', { mood: moodEnum('mood_enum'), }); export const defaultTable = schema.table('default_table', { defaultString: text('default_string'), }); export const defaultArrayTable = schema.table('default_array_table', { defaultString: text('default_string').array(), }); export const valuesFromArrayTable = schema.table('values_from_array_table', { valuesFromArrayNotNull: varchar('values_from_array_not_null', { length: 256 }).notNull(), valuesFromArrayWeightedNotNull: varchar('values_from_array_weighted_not_null', { length: 256 }).notNull(), }); export const valuesFromArrayUniqueTable = schema.table('values_from_array_unique_table', { valuesFromArray: varchar('values_from_array', { length: 256 }).unique(), valuesFromArrayNotNull: varchar('values_from_array_not_null', { length: 256 }).unique().notNull(), valuesFromArrayWeighted: varchar('values_from_array_weighted', { length: 256 }).unique(), valuesFromArrayWeightedNotNull: varchar('values_from_array_weighted_not_null', { length: 256 }).unique().notNull(), }); export const valuesFromArrayArrayTable = schema.table('values_from_array_array_table', { valuesFromArray: varchar('values_from_array', { length: 256 }).array(), }); export const intPrimaryKeyTable = schema.table('int_primary_key_table', { intPrimaryKey: integer('int_primary_key').unique(), }); export const numberTable = schema.table('number_table', { number: real('number'), }); export const numberUniqueTable = schema.table('number_unique_table', { numberUnique: real('number_unique').unique(), }); export const numberArrayTable = schema.table('number_array_table', { number: real('number').array(), }); export const intTable = schema.table('int_table', { int: integer('int'), }); export const intUniqueTable = schema.table('int_unique_table', { intUnique: integer('int_unique').unique(), }); export const intArrayTable = schema.table('int_array_table', { int: integer('int').array(), }); export const booleanTable = schema.table('boolean_table', { boolean: boolean('boolean'), }); export const booleanArrayTable = schema.table('boolean_array_table', { boolean: boolean('boolean').array(), }); export const dateTable = schema.table('date_table', { date: date('date'), }); // TODO: add tests for data type with different modes export const dateArrayTable = schema.table('date_array_table', { date: date('date', { mode: 'date' }).array(), dateString: date('date_string', { mode: 'string' }).array(), }); export const timeTable = schema.table('time_table', { time: time('time'), }); export const timeArrayTable = schema.table('time_array_table', { time: time('time').array(), }); export const timestampTable = schema.table('timestamp_table', { timestamp: timestamp('timestamp'), }); export const timestampArrayTable = schema.table('timestamp_array_table', { timestamp: timestamp('timestamp').array(), }); export const jsonTable = schema.table('json_table', { json: json('json'), }); export const jsonArrayTable = schema.table('json_array_table', { json: json('json').array(), }); export const intervalTable = schema.table('interval_table', { interval: interval('interval'), }); export const intervalUniqueTable = schema.table('interval_unique_table', { intervalUnique: interval('interval_unique').unique(), }); export const intervalArrayTable = schema.table('interval_array_table', { interval: interval('interval').array(), }); export const stringTable = schema.table('string_table', { string: text('string'), }); export const stringUniqueTable = schema.table('string_unique_table', { stringUnique: varchar('string_unique', { length: 256 }).unique(), }); export const stringArrayTable = schema.table('string_array_table', { string: text('string').array(), }); export const emailTable = schema.table('email_table', { email: varchar('email', { length: 256 }).unique(), }); export const emailArrayTable = schema.table('email_array_table', { email: varchar('email', { length: 256 }).array(), }); export const firstNameTable = schema.table('first_name_table', { firstName: varchar('first_name', { length: 256 }), }); export const firstNameUniqueTable = schema.table('first_name_unique_table', { firstNameUnique: varchar('first_name_unique', { length: 256 }).unique(), }); export const firstNameArrayTable = schema.table('first_name_array_table', { firstName: varchar('first_name', { length: 256 }).array(), }); export const lastNameTable = schema.table('last_name_table', { lastName: varchar('last_name', { length: 256 }), }); export const lastNameUniqueTable = schema.table('last_name_unique_table', { lastNameUnique: varchar('last_name_unique', { length: 256 }).unique(), }); export const lastNameArrayTable = schema.table('last_name_array_table', { lastName: varchar('last_name', { length: 256 }).array(), }); export const fullNameTable = schema.table('full_name__table', { fullName: varchar('full_name_', { length: 256 }), }); export const fullNameUniqueTable = schema.table('full_name_unique_table', { fullNameUnique: varchar('full_name_unique', { length: 256 }).unique(), }); export const fullNameArrayTable = schema.table('full_name_array_table', { fullName: varchar('full_name', { length: 256 }).array(), }); export const countryTable = schema.table('country_table', { country: varchar('country', { length: 256 }), }); export const countryUniqueTable = schema.table('country_unique_table', { countryUnique: varchar('country_unique', { length: 256 }).unique(), }); export const countryArrayTable = schema.table('country_array_table', { country: varchar('country', { length: 256 }).array(), }); export const cityTable = schema.table('city_table', { city: varchar('city', { length: 256 }), }); export const cityUniqueTable = schema.table('city_unique_table', { cityUnique: varchar('city_unique', { length: 256 }).unique(), }); export const cityArrayTable = schema.table('city_array_table', { city: varchar('city', { length: 256 }).array(), }); export const streetAddressTable = schema.table('street_address_table', { streetAddress: varchar('street_address', { length: 256 }), }); export const streetAddressUniqueTable = schema.table('street_address_unique_table', { streetAddressUnique: varchar('street_address_unique', { length: 256 }).unique(), }); export const streetAddressArrayTable = schema.table('street_address_array_table', { streetAddress: varchar('street_address', { length: 256 }).array(), }); export const jobTitleTable = schema.table('job_title_table', { jobTitle: text('job_title'), }); export const jobTitleArrayTable = schema.table('job_title_array_table', { jobTitle: text('job_title').array(), }); export const postcodeTable = schema.table('postcode_table', { postcode: varchar('postcode', { length: 256 }), }); export const postcodeUniqueTable = schema.table('postcode_unique_table', { postcodeUnique: varchar('postcode_unique', { length: 256 }).unique(), }); export const postcodeArrayTable = schema.table('postcode_array_table', { postcode: varchar('postcode', { length: 256 }).array(), }); export const stateTable = schema.table('state_table', { state: text('state'), }); export const stateArrayTable = schema.table('state_array_table', { state: text('state').array(), }); export const companyNameTable = schema.table('company_name_table', { companyName: text('company_name'), }); export const companyNameUniqueTable = schema.table('company_name_unique_table', { companyNameUnique: varchar('company_name_unique', { length: 256 }).unique(), }); export const companyNameArrayTable = schema.table('company_name_array_table', { companyName: text('company_name').array(), }); export const loremIpsumTable = schema.table('lorem_ipsum_table', { loremIpsum: text('lorem_ipsum'), }); export const loremIpsumArrayTable = schema.table('lorem_ipsum_array_table', { loremIpsum: text('lorem_ipsum').array(), }); export const pointTable = schema.table('point_table', { point: point('point'), }); export const pointArrayTable = schema.table('point_array_table', { point: point('point').array(), }); export const lineTable = schema.table('line_table', { line: line('line'), }); export const lineArrayTable = schema.table('line_array_table', { line: line('line').array(), }); // export const pointUniqueTable = schema.table("point_unique_table", { // pointUnique: point("point_unique").unique(), // }); // export const lineUniqueTable = schema.table("line_unique_table", { // lineUnique: line("line_unique").unique(), // }); export const phoneNumberTable = schema.table('phone_number_table', { phoneNumber: varchar('phoneNumber', { length: 256 }).unique(), phoneNumberTemplate: varchar('phone_number_template', { length: 256 }).unique(), phoneNumberPrefixes: varchar('phone_number_prefixes', { length: 256 }).unique(), }); export const phoneNumberArrayTable = schema.table('phone_number_array_table', { phoneNumber: varchar('phoneNumber', { length: 256 }).array(), phoneNumberTemplate: varchar('phone_number_template', { length: 256 }).array(), phoneNumberPrefixes: varchar('phone_number_prefixes', { length: 256 }).array(), }); export const weightedRandomTable = schema.table('weighted_random_table', { weightedRandom: varchar('weighted_random', { length: 256 }), }); export const weightedRandomWithUniqueGensTable = schema.table('weighted_random_with_unique_gens_table', { weightedRandomWithUniqueGens: varchar('weighted_random_with_unique_gens', { length: 256 }).unique(), }); export const identityColumnsTable = schema.table('identity_columns_table', { id: integer('id').generatedAlwaysAsIdentity(), id1: integer('id1'), name: text('name'), }); ================================================ FILE: integration-tests/tests/seeder/sqlite.test.ts ================================================ import BetterSqlite3 from 'better-sqlite3'; import { sql } from 'drizzle-orm'; import type { BetterSQLite3Database } from 'drizzle-orm/better-sqlite3'; import { drizzle } from 'drizzle-orm/better-sqlite3'; import { reset, seed } from 'drizzle-seed'; import { afterAll, afterEach, beforeAll, expect, test } from 'vitest'; import * as schema from './sqliteSchema.ts'; let client: BetterSqlite3.Database; let db: BetterSQLite3Database; beforeAll(async () => { client = new BetterSqlite3(':memory:'); db = drizzle(client); db.run( sql.raw(` CREATE TABLE \`customer\` ( \`id\` text PRIMARY KEY NOT NULL, \`company_name\` text NOT NULL, \`contact_name\` text NOT NULL, \`contact_title\` text NOT NULL, \`address\` text NOT NULL, \`city\` text NOT NULL, \`postal_code\` text, \`region\` text, \`country\` text NOT NULL, \`phone\` text NOT NULL, \`fax\` text ); `), ); db.run( sql.raw(` CREATE TABLE \`order_detail\` ( \`unit_price\` numeric NOT NULL, \`quantity\` integer NOT NULL, \`discount\` numeric NOT NULL, \`order_id\` integer NOT NULL, \`product_id\` integer NOT NULL, FOREIGN KEY (\`order_id\`) REFERENCES \`order\`(\`id\`) ON UPDATE no action ON DELETE cascade, FOREIGN KEY (\`product_id\`) REFERENCES \`product\`(\`id\`) ON UPDATE no action ON DELETE cascade ); `), ); db.run( sql.raw(` CREATE TABLE \`employee\` ( \`id\` integer PRIMARY KEY NOT NULL, \`last_name\` text NOT NULL, \`first_name\` text, \`title\` text NOT NULL, \`title_of_courtesy\` text NOT NULL, \`birth_date\` integer NOT NULL, \`hire_date\` integer NOT NULL, \`address\` text NOT NULL, \`city\` text NOT NULL, \`postal_code\` text NOT NULL, \`country\` text NOT NULL, \`home_phone\` text NOT NULL, \`extension\` integer NOT NULL, \`notes\` text NOT NULL, \`reports_to\` integer, \`photo_path\` text, FOREIGN KEY (\`reports_to\`) REFERENCES \`employee\`(\`id\`) ON UPDATE no action ON DELETE no action ); `), ); db.run( sql.raw(` CREATE TABLE \`order\` ( \`id\` integer PRIMARY KEY NOT NULL, \`order_date\` integer NOT NULL, \`required_date\` integer NOT NULL, \`shipped_date\` integer, \`ship_via\` integer NOT NULL, \`freight\` numeric NOT NULL, \`ship_name\` text NOT NULL, \`ship_city\` text NOT NULL, \`ship_region\` text, \`ship_postal_code\` text, \`ship_country\` text NOT NULL, \`customer_id\` text NOT NULL, \`employee_id\` integer NOT NULL, FOREIGN KEY (\`customer_id\`) REFERENCES \`customer\`(\`id\`) ON UPDATE no action ON DELETE cascade, FOREIGN KEY (\`employee_id\`) REFERENCES \`employee\`(\`id\`) ON UPDATE no action ON DELETE cascade ); `), ); db.run( sql.raw(` CREATE TABLE \`product\` ( \`id\` integer PRIMARY KEY AUTOINCREMENT NOT NULL, \`name\` text NOT NULL, \`quantity_per_unit\` text NOT NULL, \`unit_price\` numeric NOT NULL, \`units_in_stock\` integer NOT NULL, \`units_on_order\` integer NOT NULL, \`reorder_level\` integer NOT NULL, \`discontinued\` integer NOT NULL, \`supplier_id\` integer NOT NULL, FOREIGN KEY (\`supplier_id\`) REFERENCES \`supplier\`(\`id\`) ON UPDATE no action ON DELETE cascade ); `), ); db.run( sql.raw(` CREATE TABLE \`supplier\` ( \`id\` integer PRIMARY KEY AUTOINCREMENT NOT NULL, \`company_name\` text NOT NULL, \`contact_name\` text NOT NULL, \`contact_title\` text NOT NULL, \`address\` text NOT NULL, \`city\` text NOT NULL, \`region\` text, \`postal_code\` text NOT NULL, \`country\` text NOT NULL, \`phone\` text NOT NULL ); `), ); // All data types test ------------------------------- db.run( sql.raw(` CREATE TABLE \`all_data_types\` ( \`integer_number\` integer, \`integer_boolean\` integer, \`integer_timestamp\` integer, \`integer_timestampms\` integer, \`real\` real, \`text\` text, \`text_json\` text, \`blob_bigint\` blob, \`blob_buffer\` blob, \`blob_json\` blob, \`numeric\` numeric ); `), ); }); afterAll(async () => { client.close(); }); afterEach(async () => { await reset(db, schema); }); test('basic seed test', async () => { await seed(db, schema); const customers = await db.select().from(schema.customers); const details = await db.select().from(schema.details); const employees = await db.select().from(schema.employees); const orders = await db.select().from(schema.orders); const products = await db.select().from(schema.products); const suppliers = await db.select().from(schema.suppliers); expect(customers.length).toBe(10); expect(details.length).toBe(10); expect(employees.length).toBe(10); expect(orders.length).toBe(10); expect(products.length).toBe(10); expect(suppliers.length).toBe(10); }); test('seed with options.count:11 test', async () => { await seed(db, schema, { count: 11 }); const customers = await db.select().from(schema.customers); const details = await db.select().from(schema.details); const employees = await db.select().from(schema.employees); const orders = await db.select().from(schema.orders); const products = await db.select().from(schema.products); const suppliers = await db.select().from(schema.suppliers); expect(customers.length).toBe(11); expect(details.length).toBe(11); expect(employees.length).toBe(11); expect(orders.length).toBe(11); expect(products.length).toBe(11); expect(suppliers.length).toBe(11); }); test('redefine(refine) customers count', async () => { await seed(db, schema, { count: 11 }).refine(() => ({ customers: { count: 12, }, })); const customers = await db.select().from(schema.customers); const details = await db.select().from(schema.details); const employees = await db.select().from(schema.employees); const orders = await db.select().from(schema.orders); const products = await db.select().from(schema.products); const suppliers = await db.select().from(schema.suppliers); expect(customers.length).toBe(12); expect(details.length).toBe(11); expect(employees.length).toBe(11); expect(orders.length).toBe(11); expect(products.length).toBe(11); expect(suppliers.length).toBe(11); }); test('redefine(refine) all tables count', async () => { await seed(db, schema, { count: 11 }).refine(() => ({ customers: { count: 12, }, details: { count: 13, }, employees: { count: 14, }, orders: { count: 15, }, products: { count: 16, }, suppliers: { count: 17, }, })); const customers = await db.select().from(schema.customers); const details = await db.select().from(schema.details); const employees = await db.select().from(schema.employees); const orders = await db.select().from(schema.orders); const products = await db.select().from(schema.products); const suppliers = await db.select().from(schema.suppliers); expect(customers.length).toBe(12); expect(details.length).toBe(13); expect(employees.length).toBe(14); expect(orders.length).toBe(15); expect(products.length).toBe(16); expect(suppliers.length).toBe(17); }); test("redefine(refine) orders count using 'with' in customers", async () => { await seed(db, schema, { count: 11 }).refine(() => ({ customers: { count: 4, with: { orders: 2, }, }, orders: { count: 13, }, })); const customers = await db.select().from(schema.customers); const details = await db.select().from(schema.details); const employees = await db.select().from(schema.employees); const orders = await db.select().from(schema.orders); const products = await db.select().from(schema.products); const suppliers = await db.select().from(schema.suppliers); expect(customers.length).toBe(4); expect(details.length).toBe(11); expect(employees.length).toBe(11); expect(orders.length).toBe(8); expect(products.length).toBe(11); expect(suppliers.length).toBe(11); }); test("sequential using of 'with'", async () => { await seed(db, schema, { count: 11 }).refine(() => ({ customers: { count: 4, with: { orders: 2, }, }, orders: { count: 12, with: { details: 3, }, }, })); const customers = await db.select().from(schema.customers); const details = await db.select().from(schema.details); const employees = await db.select().from(schema.employees); const orders = await db.select().from(schema.orders); const products = await db.select().from(schema.products); const suppliers = await db.select().from(schema.suppliers); expect(customers.length).toBe(4); expect(details.length).toBe(24); expect(employees.length).toBe(11); expect(orders.length).toBe(8); expect(products.length).toBe(11); expect(suppliers.length).toBe(11); }); // All data types test ------------------------------- test('basic seed test for all sqlite data types', async () => { // migrate(db, { migrationsFolder: path.join(__dirname, "sqliteMigrations") }); await seed(db, { allDataTypes: schema.allDataTypes }, { count: 10000 }); const allDataTypes = await db.select().from(schema.allDataTypes); // every value in each 10 rows does not equal undefined. const predicate = allDataTypes.every((row) => Object.values(row).every((val) => val !== undefined && val !== null)); expect(predicate).toBe(true); }); ================================================ FILE: integration-tests/tests/seeder/sqliteSchema.ts ================================================ import { blob, foreignKey, integer, numeric, real, sqliteTable, text } from 'drizzle-orm/sqlite-core'; export const customers = sqliteTable('customer', { id: text('id').primaryKey(), companyName: text('company_name').notNull(), contactName: text('contact_name').notNull(), contactTitle: text('contact_title').notNull(), address: text('address').notNull(), city: text('city').notNull(), postalCode: text('postal_code'), region: text('region'), country: text('country').notNull(), phone: text('phone').notNull(), fax: text('fax'), }); export const employees = sqliteTable( 'employee', { id: integer('id').primaryKey(), lastName: text('last_name').notNull(), firstName: text('first_name'), title: text('title').notNull(), titleOfCourtesy: text('title_of_courtesy').notNull(), birthDate: integer('birth_date', { mode: 'timestamp' }).notNull(), hireDate: integer('hire_date', { mode: 'timestamp' }).notNull(), address: text('address').notNull(), city: text('city').notNull(), postalCode: text('postal_code').notNull(), country: text('country').notNull(), homePhone: text('home_phone').notNull(), extension: integer('extension').notNull(), notes: text('notes').notNull(), reportsTo: integer('reports_to'), photoPath: text('photo_path'), }, (table) => ({ reportsToFk: foreignKey(() => ({ columns: [table.reportsTo], foreignColumns: [table.id], })), }), ); export const orders = sqliteTable('order', { id: integer('id').primaryKey(), orderDate: integer('order_date', { mode: 'timestamp' }).notNull(), requiredDate: integer('required_date', { mode: 'timestamp' }).notNull(), shippedDate: integer('shipped_date', { mode: 'timestamp' }), shipVia: integer('ship_via').notNull(), freight: numeric('freight').notNull(), shipName: text('ship_name').notNull(), shipCity: text('ship_city').notNull(), shipRegion: text('ship_region'), shipPostalCode: text('ship_postal_code'), shipCountry: text('ship_country').notNull(), customerId: text('customer_id') .notNull() .references(() => customers.id, { onDelete: 'cascade' }), employeeId: integer('employee_id') .notNull() .references(() => employees.id, { onDelete: 'cascade' }), }); export const suppliers = sqliteTable('supplier', { id: integer('id').primaryKey({ autoIncrement: true }), companyName: text('company_name').notNull(), contactName: text('contact_name').notNull(), contactTitle: text('contact_title').notNull(), address: text('address').notNull(), city: text('city').notNull(), region: text('region'), postalCode: text('postal_code').notNull(), country: text('country').notNull(), phone: text('phone').notNull(), }); export const products = sqliteTable('product', { id: integer('id').primaryKey({ autoIncrement: true }), name: text('name').notNull(), quantityPerUnit: text('quantity_per_unit').notNull(), unitPrice: numeric('unit_price').notNull(), unitsInStock: integer('units_in_stock').notNull(), unitsOnOrder: integer('units_on_order').notNull(), reorderLevel: integer('reorder_level').notNull(), discontinued: integer('discontinued').notNull(), supplierId: integer('supplier_id') .notNull() .references(() => suppliers.id, { onDelete: 'cascade' }), }); export const details = sqliteTable('order_detail', { unitPrice: numeric('unit_price').notNull(), quantity: integer('quantity').notNull(), discount: numeric('discount').notNull(), orderId: integer('order_id') .notNull() .references(() => orders.id, { onDelete: 'cascade' }), productId: integer('product_id') .notNull() .references(() => products.id, { onDelete: 'cascade' }), }); // All data types table ------------------------------- export const allDataTypes = sqliteTable('all_data_types', { integerNumber: integer('integer_number', { mode: 'number' }), integerBoolean: integer('integer_boolean', { mode: 'boolean' }), integerTimestamp: integer('integer_timestamp', { mode: 'timestamp' }), integerTimestampms: integer('integer_timestampms', { mode: 'timestamp_ms' }), real: real('real'), text: text('text', { mode: 'text' }), textJson: text('text_json', { mode: 'json' }), blobBigint: blob('blob_bigint', { mode: 'bigint' }), blobBuffer: blob('blob_buffer', { mode: 'buffer' }), blobJson: blob('blob_json', { mode: 'json' }), numeric: numeric('numeric'), }); ================================================ FILE: integration-tests/tests/singlestore/singlestore-cache.ts ================================================ import { eq, getTableName, is, sql, Table } from 'drizzle-orm'; import type { MutationOption } from 'drizzle-orm/cache/core'; import { Cache } from 'drizzle-orm/cache/core'; import type { CacheConfig } from 'drizzle-orm/cache/core/types'; import { alias, boolean, int, json, serial, type SingleStoreDatabase, singlestoreTable, text, timestamp, } from 'drizzle-orm/singlestore-core'; import Keyv from 'keyv'; import { beforeEach, describe, expect, test, vi } from 'vitest'; // eslint-disable-next-line drizzle-internal/require-entity-kind export class TestGlobalCache extends Cache { private globalTtl: number = 1000; private usedTablesPerKey: Record = {}; constructor(private kv: Keyv = new Keyv()) { super(); } override strategy(): 'explicit' | 'all' { return 'all'; } override async get(key: string, _tables: string[], _isTag: boolean): Promise { const res = await this.kv.get(key) ?? undefined; return res; } override async put( key: string, response: any, tables: string[], isTag: boolean, config?: CacheConfig, ): Promise { await this.kv.set(key, response, config ? config.ex : this.globalTtl); for (const table of tables) { const keys = this.usedTablesPerKey[table]; if (keys === undefined) { this.usedTablesPerKey[table] = [key]; } else { keys.push(key); } } } override async onMutate(params: MutationOption): Promise { const tagsArray = params.tags ? Array.isArray(params.tags) ? params.tags : [params.tags] : []; const tablesArray = params.tables ? Array.isArray(params.tables) ? params.tables : [params.tables] : []; const keysToDelete = new Set(); for (const table of tablesArray) { const tableName = is(table, Table) ? getTableName(table) : table as string; const keys = this.usedTablesPerKey[tableName] ?? []; for (const key of keys) keysToDelete.add(key); } if (keysToDelete.size > 0 || tagsArray.length > 0) { for (const tag of tagsArray) { await this.kv.delete(tag); } for (const key of keysToDelete) { await this.kv.delete(key); for (const table of tablesArray) { const tableName = is(table, Table) ? getTableName(table) : table as string; this.usedTablesPerKey[tableName] = []; } } } } } // eslint-disable-next-line drizzle-internal/require-entity-kind export class TestCache extends TestGlobalCache { override strategy(): 'explicit' | 'all' { return 'explicit'; } } type TestSingleStoreDB = SingleStoreDatabase; declare module 'vitest' { interface TestContext { cachedSingleStore: { db: TestSingleStoreDB; dbGlobalCached: TestSingleStoreDB; }; } } const usersTable = singlestoreTable('users', { id: serial('id').primaryKey(), name: text('name').notNull(), verified: boolean('verified').notNull().default(false), jsonb: json('jsonb').$type(), createdAt: timestamp('created_at').notNull().defaultNow(), }); const postsTable = singlestoreTable('posts', { id: serial().primaryKey(), description: text().notNull(), userId: int('city_id'), }); export function tests() { describe('common_cache', () => { beforeEach(async (ctx) => { const { db, dbGlobalCached } = ctx.cachedSingleStore; await db.execute(sql`drop table if exists users`); await db.execute(sql`drop table if exists posts`); await db.$cache?.invalidate({ tables: 'users' }); await dbGlobalCached.$cache?.invalidate({ tables: 'users' }); // public users await db.execute( sql` create table users ( id serial primary key, name text not null, verified boolean not null default false, jsonb json, created_at timestamp not null default now() ) `, ); await db.execute( sql` create table posts ( id serial primary key, description text not null, user_id int ) `, ); }); test('test force invalidate', async (ctx) => { const { db } = ctx.cachedSingleStore; const spyInvalidate = vi.spyOn(db.$cache, 'invalidate'); await db.$cache?.invalidate({ tables: 'users' }); expect(spyInvalidate).toHaveBeenCalledTimes(1); }); test('default global config - no cache should be hit', async (ctx) => { const { db } = ctx.cachedSingleStore; // @ts-expect-error const spyPut = vi.spyOn(db.$cache, 'put'); // @ts-expect-error const spyGet = vi.spyOn(db.$cache, 'get'); // @ts-expect-error const spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); await db.select().from(usersTable); expect(spyPut).toHaveBeenCalledTimes(0); expect(spyGet).toHaveBeenCalledTimes(0); expect(spyInvalidate).toHaveBeenCalledTimes(0); }); test('default global config + enable cache on select: get, put', async (ctx) => { const { db } = ctx.cachedSingleStore; // @ts-expect-error const spyPut = vi.spyOn(db.$cache, 'put'); // @ts-expect-error const spyGet = vi.spyOn(db.$cache, 'get'); // @ts-expect-error const spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); await db.select().from(usersTable).$withCache(); expect(spyPut).toHaveBeenCalledTimes(1); expect(spyGet).toHaveBeenCalledTimes(1); expect(spyInvalidate).toHaveBeenCalledTimes(0); }); test('default global config + enable cache on select + write: get, put, onMutate', async (ctx) => { const { db } = ctx.cachedSingleStore; // @ts-expect-error const spyPut = vi.spyOn(db.$cache, 'put'); // @ts-expect-error const spyGet = vi.spyOn(db.$cache, 'get'); // @ts-expect-error const spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); await db.select().from(usersTable).$withCache({ config: { ex: 1 } }); expect(spyPut).toHaveBeenCalledTimes(1); expect(spyGet).toHaveBeenCalledTimes(1); expect(spyInvalidate).toHaveBeenCalledTimes(0); spyPut.mockClear(); spyGet.mockClear(); spyInvalidate.mockClear(); await db.insert(usersTable).values({ name: 'John' }); expect(spyPut).toHaveBeenCalledTimes(0); expect(spyGet).toHaveBeenCalledTimes(0); expect(spyInvalidate).toHaveBeenCalledTimes(1); }); test('default global config + enable cache on select + disable invalidate: get, put', async (ctx) => { const { db } = ctx.cachedSingleStore; // @ts-expect-error const spyPut = vi.spyOn(db.$cache, 'put'); // @ts-expect-error const spyGet = vi.spyOn(db.$cache, 'get'); // @ts-expect-error const spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); await db.select().from(usersTable).$withCache({ tag: 'custom', autoInvalidate: false, config: { ex: 1 } }); expect(spyPut).toHaveBeenCalledTimes(1); expect(spyGet).toHaveBeenCalledTimes(1); expect(spyInvalidate).toHaveBeenCalledTimes(0); await db.insert(usersTable).values({ name: 'John' }); // invalidate force await db.$cache?.invalidate({ tags: ['custom'] }); }); test('global: true + disable cache', async (ctx) => { const { dbGlobalCached: db } = ctx.cachedSingleStore; // @ts-expect-error const spyPut = vi.spyOn(db.$cache, 'put'); // @ts-expect-error const spyGet = vi.spyOn(db.$cache, 'get'); // @ts-expect-error const spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); await db.select().from(usersTable).$withCache(false); expect(spyPut).toHaveBeenCalledTimes(0); expect(spyGet).toHaveBeenCalledTimes(0); expect(spyInvalidate).toHaveBeenCalledTimes(0); }); test('global: true - cache should be hit', async (ctx) => { const { dbGlobalCached: db } = ctx.cachedSingleStore; // @ts-expect-error const spyPut = vi.spyOn(db.$cache, 'put'); // @ts-expect-error const spyGet = vi.spyOn(db.$cache, 'get'); // @ts-expect-error const spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); await db.select().from(usersTable); expect(spyPut).toHaveBeenCalledTimes(1); expect(spyGet).toHaveBeenCalledTimes(1); expect(spyInvalidate).toHaveBeenCalledTimes(0); }); test('global: true - cache: false on select - no cache hit', async (ctx) => { const { dbGlobalCached: db } = ctx.cachedSingleStore; // @ts-expect-error const spyPut = vi.spyOn(db.$cache, 'put'); // @ts-expect-error const spyGet = vi.spyOn(db.$cache, 'get'); // @ts-expect-error const spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); await db.select().from(usersTable).$withCache(false); expect(spyPut).toHaveBeenCalledTimes(0); expect(spyGet).toHaveBeenCalledTimes(0); expect(spyInvalidate).toHaveBeenCalledTimes(0); }); test('global: true - disable invalidate - cache hit + no invalidate', async (ctx) => { const { dbGlobalCached: db } = ctx.cachedSingleStore; // @ts-expect-error const spyPut = vi.spyOn(db.$cache, 'put'); // @ts-expect-error const spyGet = vi.spyOn(db.$cache, 'get'); // @ts-expect-error const spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); await db.select().from(usersTable).$withCache({ autoInvalidate: false }); expect(spyPut).toHaveBeenCalledTimes(1); expect(spyGet).toHaveBeenCalledTimes(1); expect(spyInvalidate).toHaveBeenCalledTimes(0); spyPut.mockClear(); spyGet.mockClear(); spyInvalidate.mockClear(); await db.insert(usersTable).values({ name: 'John' }); expect(spyPut).toHaveBeenCalledTimes(0); expect(spyGet).toHaveBeenCalledTimes(0); expect(spyInvalidate).toHaveBeenCalledTimes(1); }); test('global: true - with custom tag', async (ctx) => { const { dbGlobalCached: db } = ctx.cachedSingleStore; // @ts-expect-error const spyPut = vi.spyOn(db.$cache, 'put'); // @ts-expect-error const spyGet = vi.spyOn(db.$cache, 'get'); // @ts-expect-error const spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); await db.select().from(usersTable).$withCache({ tag: 'custom', autoInvalidate: false }); expect(spyPut).toHaveBeenCalledTimes(1); expect(spyGet).toHaveBeenCalledTimes(1); expect(spyInvalidate).toHaveBeenCalledTimes(0); await db.insert(usersTable).values({ name: 'John' }); // invalidate force await db.$cache?.invalidate({ tags: ['custom'] }); }); // check select used tables test('check simple select used tables', (ctx) => { const { db } = ctx.cachedSingleStore; // @ts-expect-error expect(db.select().from(usersTable).getUsedTables()).toStrictEqual(['users']); // @ts-expect-error expect(db.select().from(sql`${usersTable}`).getUsedTables()).toStrictEqual(['users']); }); // check select+join used tables test('select+join', (ctx) => { const { db } = ctx.cachedSingleStore; // @ts-expect-error expect(db.select().from(usersTable).leftJoin(postsTable, eq(usersTable.id, postsTable.userId)).getUsedTables()) .toStrictEqual(['users', 'posts']); expect( // @ts-expect-error db.select().from(sql`${usersTable}`).leftJoin(postsTable, eq(usersTable.id, postsTable.userId)).getUsedTables(), ).toStrictEqual(['users', 'posts']); }); // check select+2join used tables test('select+2joins', (ctx) => { const { db } = ctx.cachedSingleStore; expect( db.select().from(usersTable).leftJoin( postsTable, eq(usersTable.id, postsTable.userId), ).leftJoin( alias(postsTable, 'post2'), eq(usersTable.id, postsTable.userId), ) // @ts-expect-error .getUsedTables(), ) .toStrictEqual(['users', 'posts']); expect( db.select().from(sql`${usersTable}`).leftJoin(postsTable, eq(usersTable.id, postsTable.userId)).leftJoin( alias(postsTable, 'post2'), eq(usersTable.id, postsTable.userId), // @ts-expect-error ).getUsedTables(), ).toStrictEqual(['users', 'posts']); }); // select subquery used tables test('select+join', (ctx) => { const { db } = ctx.cachedSingleStore; const sq = db.select().from(usersTable).where(eq(usersTable.id, 42)).as('sq'); db.select().from(sq); // @ts-expect-error expect(db.select().from(sq).getUsedTables()).toStrictEqual(['users']); }); }); } ================================================ FILE: integration-tests/tests/singlestore/singlestore-common.ts ================================================ /* eslint-disable @typescript-eslint/no-unused-vars */ import 'dotenv/config'; import Docker from 'dockerode'; import { and, asc, avg, avgDistinct, count, countDistinct, eq, exists, getTableColumns, gt, gte, inArray, lt, max, min, Name, notInArray, placeholder, sql, sum, sumDistinct, TransactionRollbackError, } from 'drizzle-orm'; import type { SingleStoreDatabase } from 'drizzle-orm/singlestore-core'; import { alias, bigint, binary, boolean, char, date, datetime, decimal, double, except, float, getTableConfig, index, int, intersect, json, mediumint, primaryKey, real, serial, singlestoreEnum, singlestoreSchema, singlestoreTable, singlestoreTableCreator, /* singlestoreView, */ smallint, text, time, timestamp, tinyint, union, unionAll, unique, uniqueIndex, uniqueKeyName, varbinary, varchar, vector, year, } from 'drizzle-orm/singlestore-core'; import { dotProduct, euclideanDistance } from 'drizzle-orm/singlestore-core/expressions'; import { migrate } from 'drizzle-orm/singlestore/migrator'; import getPort from 'get-port'; import { v4 as uuid } from 'uuid'; import { afterAll, beforeEach, describe, expect, expectTypeOf, test } from 'vitest'; import { Expect, toLocalDate } from '~/utils.ts'; import type { Equal } from '~/utils.ts'; type TestSingleStoreDB = SingleStoreDatabase; declare module 'vitest' { interface TestContext { singlestore: { db: TestSingleStoreDB; }; } } const ENABLE_LOGGING = false; const allTypesTable = singlestoreTable('all_types', { serial: serial('scol'), bigint53: bigint('bigint53', { mode: 'number', }), bigint64: bigint('bigint64', { mode: 'bigint', }), binary: binary('binary'), boolean: boolean('boolean'), char: char('char'), date: date('date', { mode: 'date', }), dateStr: date('date_str', { mode: 'string', }), datetime: datetime('datetime', { mode: 'date', }), datetimeStr: datetime('datetime_str', { mode: 'string', }), decimal: decimal('decimal'), decimalNum: decimal('decimal_num', { scale: 30, mode: 'number', }), decimalBig: decimal('decimal_big', { scale: 30, mode: 'bigint', }), double: double('double'), float: float('float'), int: int('int'), json: json('json'), medInt: mediumint('med_int'), smallInt: smallint('small_int'), real: real('real'), text: text('text'), time: time('time'), timestamp: timestamp('timestamp', { mode: 'date', }), timestampStr: timestamp('timestamp_str', { mode: 'string', }), tinyInt: tinyint('tiny_int'), varbin: varbinary('varbin', { length: 16, }), varchar: varchar('varchar', { length: 255, }), year: year('year'), enum: singlestoreEnum('enum', ['enV1', 'enV2']), }); const usersTable = singlestoreTable('userstest', { id: serial('id').primaryKey(), name: text('name').notNull(), verified: boolean('verified').notNull().default(false), jsonb: json('jsonb').$type(), createdAt: timestamp('created_at').notNull().defaultNow(), }); const users2Table = singlestoreTable('users2', { id: serial('id').primaryKey(), name: text('name').notNull(), cityId: int('city_id'), }); const citiesTable = singlestoreTable('cities', { id: serial('id').primaryKey(), name: text('name').notNull(), }); const usersOnUpdate = singlestoreTable('users_on_update', { id: serial('id').primaryKey(), name: text('name').notNull(), updateCounter: int('update_counter').default(sql`1`).$onUpdateFn(() => sql`update_counter + 1`), updatedAt: datetime('updated_at', { mode: 'date' }).$onUpdateFn(() => new Date()), alwaysNull: text('always_null').$type().$onUpdateFn(() => null), // need to add $type because $onUpdate add a default value }); const datesTable = singlestoreTable('datestable', { date: date('date'), dateAsString: date('date_as_string', { mode: 'string' }), time: time('time'), datetime: datetime('datetime'), datetimeAsString: datetime('datetime_as_string', { mode: 'string' }), timestamp: timestamp('timestamp'), timestampAsString: timestamp('timestamp_as_string', { mode: 'string' }), year: year('year'), }); const coursesTable = singlestoreTable('courses', { id: serial('id').primaryKey(), name: text('name').notNull(), categoryId: int('category_id'), }); const courseCategoriesTable = singlestoreTable('course_categories', { id: serial('id').primaryKey(), name: text('name').notNull(), }); const orders = singlestoreTable('orders', { id: serial('id').primaryKey(), region: text('region').notNull(), product: text('product').notNull().$default(() => 'random_string'), amount: int('amount').notNull(), quantity: int('quantity').notNull(), }); const usersMigratorTable = singlestoreTable('users12', { id: serial('id').primaryKey(), name: text('name').notNull(), email: text('email').notNull(), }, (table) => { return { name: uniqueIndex('').on(table.name).using('btree'), }; }); // To test aggregate functions const aggregateTable = singlestoreTable('aggregate_table', { id: serial('id').notNull(), name: text('name').notNull(), a: int('a'), b: int('b'), c: int('c'), nullOnly: int('null_only'), }); const vectorSearchTable = singlestoreTable('vector_search', { id: serial('id').notNull(), text: text('text').notNull(), embedding: vector('embedding', { dimensions: 10 }), }); // To test another schema and multischema const mySchema = singlestoreSchema(`mySchema`); const usersMySchemaTable = mySchema.table('userstest', { id: serial('id').primaryKey(), name: text('name').notNull(), verified: boolean('verified').notNull().default(false), jsonb: json('jsonb').$type(), createdAt: timestamp('created_at').notNull().defaultNow(), }); const users2MySchemaTable = mySchema.table('users2', { id: serial('id').primaryKey(), name: text('name').notNull(), cityId: int('city_id'), }); const citiesMySchemaTable = mySchema.table('cities', { id: serial('id').primaryKey(), name: text('name').notNull(), }); let singlestoreContainer: Docker.Container; export async function createDockerDB(): Promise<{ connectionString: string; container: Docker.Container }> { const docker = new Docker(); const port = await getPort({ port: 3306 }); const image = 'ghcr.io/singlestore-labs/singlestoredb-dev:latest'; const pullStream = await docker.pull(image); await new Promise((resolve, reject) => docker.modem.followProgress(pullStream, (err) => (err ? reject(err) : resolve(err))) ); singlestoreContainer = await docker.createContainer({ Image: image, Env: ['ROOT_PASSWORD=singlestore'], name: `drizzle-integration-tests-${uuid()}`, HostConfig: { AutoRemove: true, PortBindings: { '3306/tcp': [{ HostPort: `${port}` }], }, }, }); await singlestoreContainer.start(); await new Promise((resolve) => setTimeout(resolve, 4000)); return { connectionString: `singlestore://root:singlestore@localhost:${port}/`, container: singlestoreContainer, }; } export function tests(driver?: string) { describe('common', () => { afterAll(async () => { await singlestoreContainer?.stop().catch(console.error); }); beforeEach(async (ctx) => { const { db } = ctx.singlestore; await db.execute(sql`drop table if exists userstest`); await db.execute(sql`drop table if exists users2`); await db.execute(sql`drop table if exists cities`); await db.execute(sql`drop table if exists ${allTypesTable}`); await db.execute(sql`drop schema if exists \`mySchema\``); await db.execute(sql`create schema if not exists \`mySchema\``); await db.execute( sql` create table userstest ( id serial primary key, name text not null, verified boolean not null default false, jsonb json, created_at timestamp not null default now() ) `, ); await db.execute( sql` create table users2 ( id serial primary key, name text not null, city_id int ) `, ); await db.execute( sql` create table cities ( id serial primary key, name text not null ) `, ); // mySchema await db.execute( sql` create table \`mySchema\`.\`userstest\` ( \`id\` serial primary key, \`name\` text not null, \`verified\` boolean not null default false, \`jsonb\` json, \`created_at\` timestamp not null default now() ) `, ); await db.execute( sql` create table \`mySchema\`.\`cities\` ( \`id\` serial primary key, \`name\` text not null ) `, ); await db.execute( sql` create table \`mySchema\`.\`users2\` ( \`id\` serial primary key, \`name\` text not null, \`city_id\` int ) `, ); }); async function setupReturningFunctionsTest(db: SingleStoreDatabase) { await db.execute(sql`drop table if exists \`users_default_fn\``); await db.execute( sql` create table \`users_default_fn\` ( \`id\` varchar(256) primary key, \`name\` text not null ); `, ); } async function setupSetOperationTest(db: TestSingleStoreDB) { await db.execute(sql`drop table if exists \`users2\``); await db.execute(sql`drop table if exists \`cities\``); await db.execute( sql` create table \`users2\` ( \`id\` serial primary key, \`name\` text not null, \`city_id\` int ) `, ); await db.execute( sql` create table \`cities\` ( \`id\` serial primary key, \`name\` text not null ) `, ); await db.insert(citiesTable).values([ { id: 1, name: 'New York' }, { id: 2, name: 'London' }, { id: 3, name: 'Tampa' }, ]); await db.insert(users2Table).values([ { id: 1, name: 'John', cityId: 1 }, { id: 2, name: 'Jane', cityId: 2 }, { id: 3, name: 'Jack', cityId: 3 }, { id: 4, name: 'Peter', cityId: 3 }, { id: 5, name: 'Ben', cityId: 2 }, { id: 6, name: 'Jill', cityId: 1 }, { id: 7, name: 'Mary', cityId: 2 }, { id: 8, name: 'Sally', cityId: 1 }, ]); } async function setupAggregateFunctionsTest(db: TestSingleStoreDB) { await db.execute(sql`drop table if exists \`aggregate_table\``); await db.execute( sql` create table \`aggregate_table\` ( \`id\` integer primary key auto_increment not null, \`name\` text not null, \`a\` integer, \`b\` integer, \`c\` integer, \`null_only\` integer ); `, ); await db.insert(aggregateTable).values([ { id: 1, name: 'value 1', a: 5, b: 10, c: 20 }, { id: 2, name: 'value 1', a: 5, b: 20, c: 30 }, { id: 3, name: 'value 2', a: 10, b: 50, c: 60 }, { id: 4, name: 'value 3', a: 20, b: 20, c: null }, { id: 5, name: 'value 4', a: null, b: 90, c: 120 }, { id: 6, name: 'value 5', a: 80, b: 10, c: null }, { id: 7, name: 'value 6', a: null, b: null, c: 150 }, ]); } async function setupVectorSearchTest(db: TestSingleStoreDB) { await db.execute(sql`drop table if exists \`vector_search\``); await db.execute( sql` create table \`vector_search\` ( \`id\` integer primary key auto_increment not null, \`text\` text not null, \`embedding\` vector(10) not null ) `, ); await db.insert(vectorSearchTable).values([ { id: 1, text: 'I like dogs', embedding: [0.6119, 0.1395, 0.2921, 0.3664, 0.4561, 0.7852, 0.1997, 0.5142, 0.5924, 0.0465], }, { id: 2, text: 'I like cats', embedding: [0.6075, 0.1705, 0.0651, 0.9489, 0.9656, 0.8084, 0.3046, 0.0977, 0.6842, 0.4402], }, ]); } test('table config: unsigned ints', async () => { const unsignedInts = singlestoreTable('cities1', { bigint: bigint('bigint', { mode: 'number', unsigned: true }), int: int('int', { unsigned: true }), smallint: smallint('smallint', { unsigned: true }), mediumint: mediumint('mediumint', { unsigned: true }), tinyint: tinyint('tinyint', { unsigned: true }), }); const tableConfig = getTableConfig(unsignedInts); const bigintColumn = tableConfig.columns.find((c) => c.name === 'bigint')!; const intColumn = tableConfig.columns.find((c) => c.name === 'int')!; const smallintColumn = tableConfig.columns.find((c) => c.name === 'smallint')!; const mediumintColumn = tableConfig.columns.find((c) => c.name === 'mediumint')!; const tinyintColumn = tableConfig.columns.find((c) => c.name === 'tinyint')!; expect(bigintColumn.getSQLType()).toBe('bigint unsigned'); expect(intColumn.getSQLType()).toBe('int unsigned'); expect(smallintColumn.getSQLType()).toBe('smallint unsigned'); expect(mediumintColumn.getSQLType()).toBe('mediumint unsigned'); expect(tinyintColumn.getSQLType()).toBe('tinyint unsigned'); }); test('table config: signed ints', async () => { const unsignedInts = singlestoreTable('cities1', { bigint: bigint('bigint', { mode: 'number' }), int: int('int'), smallint: smallint('smallint'), mediumint: mediumint('mediumint'), tinyint: tinyint('tinyint'), }); const tableConfig = getTableConfig(unsignedInts); const bigintColumn = tableConfig.columns.find((c) => c.name === 'bigint')!; const intColumn = tableConfig.columns.find((c) => c.name === 'int')!; const smallintColumn = tableConfig.columns.find((c) => c.name === 'smallint')!; const mediumintColumn = tableConfig.columns.find((c) => c.name === 'mediumint')!; const tinyintColumn = tableConfig.columns.find((c) => c.name === 'tinyint')!; expect(bigintColumn.getSQLType()).toBe('bigint'); expect(intColumn.getSQLType()).toBe('int'); expect(smallintColumn.getSQLType()).toBe('smallint'); expect(mediumintColumn.getSQLType()).toBe('mediumint'); expect(tinyintColumn.getSQLType()).toBe('tinyint'); }); test('table config: primary keys name', async () => { const table = singlestoreTable('cities', { id: serial('id').primaryKey(), name: text('name').notNull(), state: text('state'), }, (t) => ({ f: primaryKey({ columns: [t.id, t.name], name: 'custom_pk' }), })); const tableConfig = getTableConfig(table); expect(tableConfig.primaryKeys).toHaveLength(1); expect(tableConfig.primaryKeys[0]!.getName()).toBe('custom_pk'); }); test('table configs: unique third param', async () => { const cities1Table = singlestoreTable('cities1', { id: serial('id').primaryKey(), name: text('name').notNull(), state: text('state'), }, (t) => ({ f: unique('custom_name').on(t.name, t.state), f1: unique('custom_name1').on(t.name, t.state), })); const tableConfig = getTableConfig(cities1Table); expect(tableConfig.uniqueConstraints).toHaveLength(2); expect(tableConfig.uniqueConstraints[0]?.name).toBe('custom_name'); expect(tableConfig.uniqueConstraints[0]?.columns.map((t) => t.name)).toEqual(['name', 'state']); expect(tableConfig.uniqueConstraints[1]?.name).toBe('custom_name1'); expect(tableConfig.uniqueConstraints[1]?.columns.map((t) => t.name)).toEqual(['name', 'state']); }); test('table configs: unique in column', async () => { const cities1Table = singlestoreTable('cities1', { id: serial('id').primaryKey(), name: text('name').notNull().unique(), state: text('state').unique('custom'), field: text('field').unique('custom_field'), }); const tableConfig = getTableConfig(cities1Table); const columnName = tableConfig.columns.find((it) => it.name === 'name'); expect(columnName?.uniqueName).toBe(uniqueKeyName(cities1Table, [columnName!.name])); expect(columnName?.isUnique).toBeTruthy(); const columnState = tableConfig.columns.find((it) => it.name === 'state'); expect(columnState?.uniqueName).toBe('custom'); expect(columnState?.isUnique).toBeTruthy(); const columnField = tableConfig.columns.find((it) => it.name === 'field'); expect(columnField?.uniqueName).toBe('custom_field'); expect(columnField?.isUnique).toBeTruthy(); }); test('select all fields', async (ctx) => { const { db } = ctx.singlestore; await db.insert(usersTable).values({ id: 1, name: 'John' }); const result = await db.select().from(usersTable); expect(result[0]!.createdAt).toBeInstanceOf(Date); // not timezone based timestamp, thats why it should not work here // t.assert(Math.abs(result[0]!.createdAt.getTime() - now) < 2000); expect(result).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); }); test('select sql', async (ctx) => { const { db } = ctx.singlestore; await db.insert(usersTable).values({ name: 'John' }); const users = await db.select({ name: sql`upper(${usersTable.name})`, }).from(usersTable); expect(users).toEqual([{ name: 'JOHN' }]); }); test('select typed sql', async (ctx) => { const { db } = ctx.singlestore; await db.insert(usersTable).values({ name: 'John' }); const users = await db.select({ name: sql`upper(${usersTable.name})`, }).from(usersTable); expect(users).toEqual([{ name: 'JOHN' }]); }); test('select with empty array in inArray', async (ctx) => { const { db } = ctx.singlestore; await db.insert(usersTable).values([{ id: 1, name: 'John' }, { id: 2, name: 'Jane' }, { id: 3, name: 'Jane' }]); const result = await db .select({ name: sql`upper(${usersTable.name})`, }) .from(usersTable) .where(inArray(usersTable.id, [])) .orderBy(asc(usersTable.id)); expect(result).toEqual([]); }); test('select with empty array in notInArray', async (ctx) => { const { db } = ctx.singlestore; await db.insert(usersTable).values([{ id: 1, name: 'John' }, { id: 2, name: 'Jane' }, { id: 3, name: 'Jane' }]); const result = await db .select({ name: sql`upper(${usersTable.name})`, }) .from(usersTable) .where(notInArray(usersTable.id, [])) .orderBy(asc(usersTable.id)); expect(result).toEqual([{ name: 'JOHN' }, { name: 'JANE' }, { name: 'JANE' }]); }); test('select distinct', async (ctx) => { const { db } = ctx.singlestore; const usersDistinctTable = singlestoreTable('users_distinct', { id: int('id').notNull(), name: text('name').notNull(), }); await db.execute(sql`drop table if exists ${usersDistinctTable}`); await db.execute(sql`create table ${usersDistinctTable} (id int, name text)`); await db.insert(usersDistinctTable).values([ { id: 1, name: 'John' }, { id: 1, name: 'John' }, { id: 2, name: 'John' }, { id: 1, name: 'Jane' }, ]); const users = await db.selectDistinct().from(usersDistinctTable).orderBy( usersDistinctTable.id, usersDistinctTable.name, ); await db.execute(sql`drop table ${usersDistinctTable}`); expect(users).toEqual([{ id: 1, name: 'Jane' }, { id: 1, name: 'John' }, { id: 2, name: 'John' }]); }); test('insert returning sql', async (ctx) => { const { db } = ctx.singlestore; const [result, _] = await db.insert(usersTable).values({ id: 1, name: 'John' }); expect(result.insertId).toBe(1); }); test('delete returning sql', async (ctx) => { const { db } = ctx.singlestore; await db.insert(usersTable).values({ name: 'John' }); const users = await db.delete(usersTable).where(eq(usersTable.name, 'John')); expect(users[0].affectedRows).toBe(1); }); test('update returning sql', async (ctx) => { const { db } = ctx.singlestore; await db.insert(usersTable).values({ name: 'John' }); const users = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')); expect(users[0].changedRows).toBe(1); }); test('update with returning all fields', async (ctx) => { const { db } = ctx.singlestore; await db.insert(usersTable).values({ id: 1, name: 'John' }); const updatedUsers = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')); const users = await db.select().from(usersTable).where(eq(usersTable.id, 1)); expect(updatedUsers[0].changedRows).toBe(1); expect(users[0]!.createdAt).toBeInstanceOf(Date); // not timezone based timestamp, thats why it should not work here // t.assert(Math.abs(users[0]!.createdAt.getTime() - now) < 2000); expect(users).toEqual([{ id: 1, name: 'Jane', verified: false, jsonb: null, createdAt: users[0]!.createdAt }]); }); test('update with returning partial', async (ctx) => { const { db } = ctx.singlestore; await db.insert(usersTable).values({ id: 1, name: 'John' }); const updatedUsers = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')); const users = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).where( eq(usersTable.id, 1), ); expect(updatedUsers[0].changedRows).toBe(1); expect(users).toEqual([{ id: 1, name: 'Jane' }]); }); test('delete with returning all fields', async (ctx) => { const { db } = ctx.singlestore; await db.insert(usersTable).values({ name: 'John' }); const deletedUser = await db.delete(usersTable).where(eq(usersTable.name, 'John')); expect(deletedUser[0].affectedRows).toBe(1); }); test('delete with returning partial', async (ctx) => { const { db } = ctx.singlestore; await db.insert(usersTable).values({ name: 'John' }); const deletedUser = await db.delete(usersTable).where(eq(usersTable.name, 'John')); expect(deletedUser[0].affectedRows).toBe(1); }); test('insert + select', async (ctx) => { const { db } = ctx.singlestore; await db.insert(usersTable).values({ id: 1, name: 'John' }); const result = await db.select().from(usersTable); expect(result).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); await db.insert(usersTable).values({ id: 2, name: 'Jane' }); const result2 = await db.select().from(usersTable).orderBy(asc(usersTable.id)); expect(result2).toEqual([ { id: 1, name: 'John', verified: false, jsonb: null, createdAt: result2[0]!.createdAt }, { id: 2, name: 'Jane', verified: false, jsonb: null, createdAt: result2[1]!.createdAt }, ]); }); test('json insert', async (ctx) => { const { db } = ctx.singlestore; await db.insert(usersTable).values({ id: 1, name: 'John', jsonb: ['foo', 'bar'] }); const result = await db.select({ id: usersTable.id, name: usersTable.name, jsonb: usersTable.jsonb, }).from(usersTable); expect(result).toEqual([{ id: 1, name: 'John', jsonb: ['foo', 'bar'] }]); }); test('insert with overridden default values', async (ctx) => { const { db } = ctx.singlestore; await db.insert(usersTable).values({ id: 1, name: 'John', verified: true }); const result = await db.select().from(usersTable); expect(result).toEqual([{ id: 1, name: 'John', verified: true, jsonb: null, createdAt: result[0]!.createdAt }]); }); test('insert many', async (ctx) => { const { db } = ctx.singlestore; await db.insert(usersTable).values([ { id: 1, name: 'John' }, { id: 2, name: 'Bruce', jsonb: ['foo', 'bar'] }, { id: 3, name: 'Jane' }, { id: 4, name: 'Austin', verified: true }, ]); const result = await db.select({ id: usersTable.id, name: usersTable.name, jsonb: usersTable.jsonb, verified: usersTable.verified, }).from(usersTable) .orderBy(asc(usersTable.id)); expect(result).toEqual([ { id: 1, name: 'John', jsonb: null, verified: false }, { id: 2, name: 'Bruce', jsonb: ['foo', 'bar'], verified: false }, { id: 3, name: 'Jane', jsonb: null, verified: false }, { id: 4, name: 'Austin', jsonb: null, verified: true }, ]); }); test('insert many with returning', async (ctx) => { const { db } = ctx.singlestore; const result = await db.insert(usersTable).values([ { name: 'John' }, { name: 'Bruce', jsonb: ['foo', 'bar'] }, { name: 'Jane' }, { name: 'Austin', verified: true }, ]); expect(result[0].affectedRows).toBe(4); }); test('select with group by as field', async (ctx) => { const { db } = ctx.singlestore; await db.insert(usersTable).values([{ id: 1, name: 'John' }, { id: 2, name: 'Jane' }, { id: 3, name: 'Jane' }]); const result = await db.select({ name: usersTable.name }).from(usersTable) .groupBy(usersTable.name) .orderBy(asc(usersTable.id)); expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }]); }); test('select with exists', async (ctx) => { const { db } = ctx.singlestore; await db.insert(usersTable).values([{ id: 1, name: 'John' }, { id: 2, name: 'Jane' }, { id: 3, name: 'Jane' }]); const user = alias(usersTable, 'user'); const result = await db.select({ name: usersTable.name }).from(usersTable).where( exists( db.select({ one: sql`1` }).from(user).where(and(eq(usersTable.name, 'John'), eq(user.id, usersTable.id))), ), ) .orderBy(asc(usersTable.id)); expect(result).toEqual([{ name: 'John' }]); }); test('select with group by as sql', async (ctx) => { const { db } = ctx.singlestore; await db.insert(usersTable).values([{ id: 1, name: 'John' }, { id: 2, name: 'Jane' }, { id: 3, name: 'Jane' }]); const result = await db.select({ name: usersTable.name }).from(usersTable) .groupBy(sql`${usersTable.name}`) .orderBy(asc(usersTable.id)); expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }]); }); test('$default function', async (ctx) => { const { db } = ctx.singlestore; await db.execute(sql`drop table if exists \`orders\``); await db.execute( sql` create table \`orders\` ( \`id\` serial primary key, \`region\` text not null, \`product\` text not null, \`amount\` int not null, \`quantity\` int not null ) `, ); await db.insert(orders).values({ id: 1, region: 'Ukraine', amount: 1, quantity: 1 }); const selectedOrder = await db.select().from(orders); expect(selectedOrder).toEqual([{ id: 1, amount: 1, quantity: 1, region: 'Ukraine', product: 'random_string', }]); }); test('$default with empty array', async (ctx) => { const { db } = ctx.singlestore; await db.execute(sql`drop table if exists \`s_orders\``); await db.execute( sql` create table \`s_orders\` ( \`id\` serial primary key, \`region\` text default 'Ukraine', \`product\` text not null ) `, ); const users = singlestoreTable('s_orders', { id: serial('id').primaryKey(), region: text('region').default('Ukraine'), product: text('product').$defaultFn(() => 'random_string'), }); await db.insert(users).values({ id: 1 }); const selectedOrder = await db.select().from(users); expect(selectedOrder).toEqual([{ id: 1, region: 'Ukraine', product: 'random_string', }]); }); test('select with group by as sql + column', async (ctx) => { const { db } = ctx.singlestore; await db.insert(usersTable).values([{ id: 1, name: 'John' }, { id: 2, name: 'Jane' }, { id: 3, name: 'Jane' }]); const result = await db.select({ name: usersTable.name }).from(usersTable) .groupBy(sql`${usersTable.name}`, usersTable.id) .orderBy(asc(usersTable.id)); expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); }); test('select with group by as column + sql', async (ctx) => { const { db } = ctx.singlestore; await db.insert(usersTable).values([{ id: 1, name: 'John' }, { id: 2, name: 'Jane' }, { id: 3, name: 'Jane' }]); const result = await db.select({ name: usersTable.name }).from(usersTable) .groupBy(usersTable.id, sql`${usersTable.name}`) .orderBy(asc(usersTable.id)); expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); }); test('select with group by complex query', async (ctx) => { const { db } = ctx.singlestore; await db.insert(usersTable).values([{ id: 1, name: 'John' }, { id: 2, name: 'Jane' }, { id: 3, name: 'Jane' }]); const result = await db.select({ name: usersTable.name }).from(usersTable) .groupBy(usersTable.id, sql`${usersTable.name}`) .orderBy(asc(usersTable.name)) .limit(1); expect(result).toEqual([{ name: 'Jane' }]); }); test('build query', async (ctx) => { const { db } = ctx.singlestore; const query = db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable) .groupBy(usersTable.id, usersTable.name) .toSQL(); expect(query).toEqual({ sql: `select \`id\`, \`name\` from \`userstest\` group by \`userstest\`.\`id\`, \`userstest\`.\`name\``, params: [], }); }); test('Query check: Insert all defaults in 1 row', async (ctx) => { const { db } = ctx.singlestore; const users = singlestoreTable('users', { id: serial('id').primaryKey(), name: text('name').default('Dan'), state: text('state'), }); const query = db .insert(users) .values({}) .toSQL(); expect(query).toEqual({ sql: 'insert into `users` (`id`, `name`, `state`) values (default, default, default)', params: [], }); }); test('Query check: Insert all defaults in multiple rows', async (ctx) => { const { db } = ctx.singlestore; const users = singlestoreTable('users', { id: serial('id').primaryKey(), name: text('name').default('Dan'), state: text('state').default('UA'), }); const query = db .insert(users) .values([{}, {}]) .toSQL(); expect(query).toEqual({ sql: 'insert into `users` (`id`, `name`, `state`) values (default, default, default), (default, default, default)', params: [], }); }); test('Insert all defaults in 1 row', async (ctx) => { const { db } = ctx.singlestore; const users = singlestoreTable('empty_insert_single', { id: serial('id').primaryKey(), name: text('name').default('Dan'), state: text('state'), }); await db.execute(sql`drop table if exists ${users}`); await db.execute( sql`create table ${users} (id serial primary key, name text default 'Dan', state text)`, ); await db.insert(users).values({ id: 1 }); const res = await db.select().from(users); expect(res).toEqual([{ id: 1, name: 'Dan', state: null }]); }); test('Insert all defaults in multiple rows', async (ctx) => { const { db } = ctx.singlestore; const users = singlestoreTable('empty_insert_multiple', { id: serial('id').primaryKey(), name: text('name').default('Dan'), state: text('state'), }); await db.execute(sql`drop table if exists ${users}`); await db.execute( sql`create table ${users} (id serial primary key, name text default 'Dan', state text)`, ); await db.insert(users).values([{ id: 1 }, { id: 2 }]); const res = await db.select().from(users).orderBy(asc(users.id)); expect(res).toEqual([{ id: 1, name: 'Dan', state: null }, { id: 2, name: 'Dan', state: null }]); }); test('build query insert with onDuplicate', async (ctx) => { const { db } = ctx.singlestore; const query = db.insert(usersTable) .values({ id: 1, name: 'John', jsonb: ['foo', 'bar'] }) .onDuplicateKeyUpdate({ set: { id: 1, name: 'John1' } }) .toSQL(); expect(query).toEqual({ sql: 'insert into `userstest` (`id`, `name`, `verified`, `jsonb`, `created_at`) values (?, ?, default, ?, default) on duplicate key update `id` = ?, `name` = ?', params: [1, 'John', '["foo","bar"]', 1, 'John1'], }); }); test('insert with onDuplicate', async (ctx) => { const { db } = ctx.singlestore; await db.insert(usersTable) .values({ id: 1, name: 'John' }); await db.insert(usersTable) .values({ id: 1, name: 'John' }) .onDuplicateKeyUpdate({ set: { name: 'John1' } }); const res = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).where( eq(usersTable.id, 1), ); expect(res).toEqual([{ id: 1, name: 'John1' }]); }); test('insert conflict', async (ctx) => { const { db } = ctx.singlestore; await db.insert(usersTable) .values({ id: 1, name: 'John' }); await expect((async () => { db.insert(usersTable).values({ id: 1, name: 'John1' }); })()).resolves.not.toThrowError(); }); test('insert conflict with ignore', async (ctx) => { const { db } = ctx.singlestore; await db.insert(usersTable) .values({ id: 1, name: 'John' }); await db.insert(usersTable) .ignore() .values({ id: 1, name: 'John1' }); const res = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).where( eq(usersTable.id, 1), ); expect(res).toEqual([{ id: 1, name: 'John' }]); }); test('insert sql', async (ctx) => { const { db } = ctx.singlestore; await db.insert(usersTable).values({ id: 1, name: sql`${'John'}` }); const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable); expect(result).toEqual([{ id: 1, name: 'John' }]); }); test('partial join with alias', async (ctx) => { const { db } = ctx.singlestore; const customerAlias = alias(usersTable, 'customer'); await db.insert(usersTable).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); const result = await db .select({ user: { id: usersTable.id, name: usersTable.name, }, customer: { id: customerAlias.id, name: customerAlias.name, }, }).from(usersTable) .leftJoin(customerAlias, eq(customerAlias.id, 11)) .where(eq(usersTable.id, 10)) .orderBy(asc(usersTable.id)); expect(result).toEqual([{ user: { id: 10, name: 'Ivan' }, customer: { id: 11, name: 'Hans' }, }]); }); test('full join with alias', async (ctx) => { const { db } = ctx.singlestore; const singlestoreTable = singlestoreTableCreator((name) => `prefixed_${name}`); const users = singlestoreTable('users', { id: serial('id').primaryKey(), name: text('name').notNull(), }); await db.execute(sql`drop table if exists ${users}`); await db.execute(sql`create table ${users} (id serial primary key, name text not null)`); const customers = alias(users, 'customer'); await db.insert(users).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); const result = await db .select().from(users) .leftJoin(customers, eq(customers.id, 11)) .where(eq(users.id, 10)) .orderBy(asc(users.id)); expect(result).toEqual([{ users: { id: 10, name: 'Ivan', }, customer: { id: 11, name: 'Hans', }, }]); await db.execute(sql`drop table ${users}`); }); test('select from alias', async (ctx) => { const { db } = ctx.singlestore; const singlestoreTable = singlestoreTableCreator((name) => `prefixed_${name}`); const users = singlestoreTable('users', { id: serial('id').primaryKey(), name: text('name').notNull(), }); await db.execute(sql`drop table if exists ${users}`); await db.execute(sql`create table ${users} (id serial primary key, name text not null)`); const user = alias(users, 'user'); const customers = alias(users, 'customer'); await db.insert(users).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); const result = await db .select() .from(user) .leftJoin(customers, eq(customers.id, 11)) .where(eq(user.id, 10)) .orderBy(asc(user.id)); expect(result).toEqual([{ user: { id: 10, name: 'Ivan', }, customer: { id: 11, name: 'Hans', }, }]); await db.execute(sql`drop table ${users}`); }); test('insert with spaces', async (ctx) => { const { db } = ctx.singlestore; await db.insert(usersTable).values({ id: 1, name: sql`'Jo h n'` }); const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable); expect(result).toEqual([{ id: 1, name: 'Jo h n' }]); }); test('prepared statement', async (ctx) => { const { db } = ctx.singlestore; await db.insert(usersTable).values({ id: 1, name: 'John' }); const statement = db.select({ id: usersTable.id, name: usersTable.name, }).from(usersTable) .prepare(); const result = await statement.execute(); expect(result).toEqual([{ id: 1, name: 'John' }]); }); test('insert: placeholders on columns with encoder', async (ctx) => { const { db } = ctx.singlestore; const date = new Date('2024-08-07T15:30:00Z'); const statement = db.insert(usersTable).values({ id: 1, name: 'John', createdAt: sql.placeholder('createdAt'), }).prepare(); await statement.execute({ createdAt: date }); const result = await db .select({ id: usersTable.id, createdAt: usersTable.createdAt, }) .from(usersTable); expect(result).toEqual([ { id: 1, createdAt: date }, ]); }); test('prepared statement reuse', async (ctx) => { const { db } = ctx.singlestore; const stmt = db.insert(usersTable).values({ verified: true, id: placeholder('id'), name: placeholder('name'), }).prepare(); for (let i = 0; i < 10; i++) { await stmt.execute({ id: i + 1, name: `John ${i}` }); } const result = await db.select({ id: usersTable.id, name: usersTable.name, verified: usersTable.verified, }).from(usersTable) .orderBy(asc(usersTable.id)); expect(result).toEqual([ { id: 1, name: 'John 0', verified: true }, { id: 2, name: 'John 1', verified: true }, { id: 3, name: 'John 2', verified: true }, { id: 4, name: 'John 3', verified: true }, { id: 5, name: 'John 4', verified: true }, { id: 6, name: 'John 5', verified: true }, { id: 7, name: 'John 6', verified: true }, { id: 8, name: 'John 7', verified: true }, { id: 9, name: 'John 8', verified: true }, { id: 10, name: 'John 9', verified: true }, ]); }); test('prepared statement with placeholder in .where', async (ctx) => { const { db } = ctx.singlestore; await db.insert(usersTable).values({ id: 1, name: 'John' }); const stmt = db.select({ id: usersTable.id, name: usersTable.name, }).from(usersTable) .where(eq(usersTable.id, placeholder('id'))) .prepare(); const result = await stmt.execute({ id: 1 }); expect(result).toEqual([{ id: 1, name: 'John' }]); }); test('migrator', async (ctx) => { const { db } = ctx.singlestore; await db.execute(sql`drop table if exists cities_migration`); await db.execute(sql`drop table if exists users_migration`); await db.execute(sql`drop table if exists users12`); await db.execute(sql`drop table if exists __drizzle_migrations`); await migrate(db, { migrationsFolder: './drizzle2/singlestore' }); await db.insert(usersMigratorTable).values({ id: 1, name: 'John', email: 'email' }); const result = await db.select().from(usersMigratorTable); expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); await db.execute(sql`drop table cities_migration`); await db.execute(sql`drop table users_migration`); await db.execute(sql`drop table users12`); await db.execute(sql`drop table __drizzle_migrations`); }); test('insert via db.execute + select via db.execute', async (ctx) => { const { db } = ctx.singlestore; await db.execute( sql`insert into ${usersTable} (${new Name(usersTable.id.name)},${new Name( usersTable.name.name, )}) values (1,${'John'})`, ); const result = await db.execute<{ id: number; name: string }>(sql`select id, name from ${usersTable}`); expect(result[0]).toEqual([{ id: 1, name: 'John' }]); }); test('insert via db.execute w/ query builder', async (ctx) => { const { db } = ctx.singlestore; const inserted = await db.execute( db.insert(usersTable).values({ id: 1, name: 'John' }), ); expect(inserted[0].affectedRows).toBe(1); }); test('insert + select all possible dates', async (ctx) => { const { db } = ctx.singlestore; await db.execute(sql`drop table if exists \`datestable\``); await db.execute( sql` create table \`datestable\` ( \`date\` date, \`date_as_string\` date, \`time\` time, \`datetime\` datetime, \`datetime_as_string\` datetime, \`timestamp\` timestamp(6), \`timestamp_as_string\` timestamp(6), \`year\` year ) `, ); const date = new Date('2022-11-11'); const dateWithMilliseconds = new Date('2022-11-11 12:12:12.123'); await db.insert(datesTable).values({ date: date, dateAsString: '2022-11-11', time: '12:12:12', datetime: date, year: 22, datetimeAsString: '2022-11-11 12:12:12', timestamp: dateWithMilliseconds, timestampAsString: '2022-11-11 12:12:12.123', }); const res = await db.select().from(datesTable); expect(res[0]?.date).toBeInstanceOf(Date); expect(res[0]?.datetime).toBeInstanceOf(Date); expect(typeof res[0]?.dateAsString).toBe('string'); expect(typeof res[0]?.datetimeAsString).toBe('string'); expect(res).toEqual([{ date: toLocalDate(new Date('2022-11-11')), dateAsString: '2022-11-11', time: '12:12:12', datetime: new Date('2022-11-11'), year: 2022, datetimeAsString: '2022-11-11 12:12:12', timestamp: new Date('2022-11-11 12:12:12.123'), timestampAsString: '2022-11-11 12:12:12.123000', }]); await db.execute(sql`drop table if exists \`datestable\``); }); const tableWithEnums = singlestoreTable('enums_test_case', { id: serial('id').primaryKey(), enum1: singlestoreEnum('enum1', ['a', 'b', 'c']).notNull(), enum2: singlestoreEnum('enum2', ['a', 'b', 'c']).default('a'), enum3: singlestoreEnum('enum3', ['a', 'b', 'c']).notNull().default('b'), }); test('SingleStore enum test case #1', async (ctx) => { const { db } = ctx.singlestore; await db.execute(sql`drop table if exists \`enums_test_case\``); await db.execute(sql` create table \`enums_test_case\` ( \`id\` serial primary key, \`enum1\` ENUM('a', 'b', 'c') not null, \`enum2\` ENUM('a', 'b', 'c') default 'a', \`enum3\` ENUM('a', 'b', 'c') not null default 'b' ) `); await db.insert(tableWithEnums).values([ { id: 1, enum1: 'a', enum2: 'b', enum3: 'c' }, { id: 2, enum1: 'a', enum3: 'c' }, { id: 3, enum1: 'a' }, ]); const res = await db.select().from(tableWithEnums).orderBy(asc(tableWithEnums.id)); await db.execute(sql`drop table \`enums_test_case\``); expect(res).toEqual([ { id: 1, enum1: 'a', enum2: 'b', enum3: 'c' }, { id: 2, enum1: 'a', enum2: 'a', enum3: 'c' }, { id: 3, enum1: 'a', enum2: 'a', enum3: 'b' }, ]); }); test('left join (flat object fields)', async (ctx) => { const { db } = ctx.singlestore; await db.insert(citiesTable) .values([{ id: 1, name: 'Paris' }, { id: 2, name: 'London' }]); await db.insert(users2Table).values([{ id: 1, name: 'John', cityId: 1 }, { id: 2, name: 'Jane' }]); const res = await db.select({ userId: users2Table.id, userName: users2Table.name, cityId: citiesTable.id, cityName: citiesTable.name, }).from(users2Table) .leftJoin(citiesTable, eq(users2Table.cityId, citiesTable.id)) .orderBy(users2Table.id); expect(res).toEqual([ { userId: 1, userName: 'John', cityId: 1, cityName: 'Paris' }, { userId: 2, userName: 'Jane', cityId: null, cityName: null }, ]); }); test('left join (grouped fields)', async (ctx) => { const { db } = ctx.singlestore; await db.insert(citiesTable) .values([{ id: 1, name: 'Paris' }, { id: 2, name: 'London' }]); await db.insert(users2Table).values([{ id: 1, name: 'John', cityId: 1 }, { id: 2, name: 'Jane' }]); const res = await db.select({ id: users2Table.id, user: { name: users2Table.name, nameUpper: sql`upper(${users2Table.name})`, }, city: { id: citiesTable.id, name: citiesTable.name, nameUpper: sql`upper(${citiesTable.name})`, }, }).from(users2Table) .leftJoin(citiesTable, eq(users2Table.cityId, citiesTable.id)) .orderBy(asc(users2Table.id)); expect(res).toEqual([ { id: 1, user: { name: 'John', nameUpper: 'JOHN' }, city: { id: 1, name: 'Paris', nameUpper: 'PARIS' }, }, { id: 2, user: { name: 'Jane', nameUpper: 'JANE' }, city: null, }, ]); }); test('select from a many subquery', async (ctx) => { const { db } = ctx.singlestore; await db.insert(citiesTable) .values([{ name: 'Paris' }, { name: 'London' }]); await db.insert(users2Table).values([ { name: 'John', cityId: 1 }, { name: 'Jane', cityId: 2 }, { name: 'Jack', cityId: 2 }, ]); const res = await db.select({ population: db.select({ count: count().as('count') }).from(users2Table).where( eq(users2Table.cityId, citiesTable.id), ).as( 'population', ), name: citiesTable.name, }).from(citiesTable); expectTypeOf(res).toEqualTypeOf< { population: number; name: string; }[] >(); expect(res).toStrictEqual(expect.arrayContaining([{ population: 1, name: 'Paris', }, { population: 2, name: 'London', }])); }); test('select from a one subquery', async (ctx) => { const { db } = ctx.singlestore; await db.insert(citiesTable) .values([{ name: 'Paris' }, { name: 'London' }]); await db.insert(users2Table).values([ { name: 'John', cityId: 1 }, { name: 'Jane', cityId: 2 }, { name: 'Jack', cityId: 2 }, ]); const res = await db.select({ cityName: db.select({ name: citiesTable.name }).from(citiesTable).where(eq(users2Table.cityId, citiesTable.id)) .as( 'cityName', ), name: users2Table.name, }).from(users2Table); expectTypeOf(res).toEqualTypeOf< { cityName: string; name: string; }[] >(); expect(res).toStrictEqual(expect.arrayContaining([{ cityName: 'Paris', name: 'John', }, { cityName: 'London', name: 'Jane', }, { cityName: 'London', name: 'Jack', }])); }); test('left join (all fields)', async (ctx) => { const { db } = ctx.singlestore; await db.insert(citiesTable) .values([{ id: 1, name: 'Paris' }, { id: 2, name: 'London' }]); await db.insert(users2Table).values([{ id: 1, name: 'John', cityId: 1 }, { id: 2, name: 'Jane' }]); const res = await db.select().from(users2Table) .leftJoin(citiesTable, eq(users2Table.cityId, citiesTable.id)) .orderBy(asc(users2Table.id)); expect(res).toEqual([ { users2: { id: 1, name: 'John', cityId: 1, }, cities: { id: 1, name: 'Paris', }, }, { users2: { id: 2, name: 'Jane', cityId: null, }, cities: null, }, ]); }); test('join subquery', async (ctx) => { const { db } = ctx.singlestore; await db.execute(sql`drop table if exists \`courses\``); await db.execute(sql`drop table if exists \`course_categories\``); await db.execute( sql` create table \`course_categories\` ( \`id\` serial primary key, \`name\` text not null ) `, ); await db.execute( sql` create table \`courses\` ( \`id\` serial primary key, \`name\` text not null, \`category_id\` int ) `, ); await db.insert(courseCategoriesTable).values([ { id: 1, name: 'Category 1' }, { id: 2, name: 'Category 2' }, { id: 3, name: 'Category 3' }, { id: 4, name: 'Category 4' }, ]); await db.insert(coursesTable).values([ { id: 1, name: 'Development', categoryId: 2 }, { id: 2, name: 'IT & Software', categoryId: 3 }, { id: 3, name: 'Marketing', categoryId: 4 }, { id: 4, name: 'Design', categoryId: 1 }, ]); const sq2 = db .select({ categoryId: courseCategoriesTable.id, category: courseCategoriesTable.name, total: sql`count(${courseCategoriesTable.id})`, }) .from(courseCategoriesTable) .groupBy(courseCategoriesTable.id, courseCategoriesTable.name) .as('sq2'); const res = await db .select({ courseName: coursesTable.name, categoryId: sq2.categoryId, }) .from(coursesTable) .leftJoin(sq2, eq(coursesTable.categoryId, sq2.categoryId)) .orderBy(coursesTable.name); expect(res).toEqual([ { courseName: 'Design', categoryId: 1 }, { courseName: 'Development', categoryId: 2 }, { courseName: 'IT & Software', categoryId: 3 }, { courseName: 'Marketing', categoryId: 4 }, ]); await db.execute(sql`drop table if exists \`courses\``); await db.execute(sql`drop table if exists \`course_categories\``); }); test('with ... select', async (ctx) => { const { db } = ctx.singlestore; await db.execute(sql`drop table if exists \`orders\``); await db.execute( sql` create table \`orders\` ( \`id\` serial primary key, \`region\` text not null, \`product\` text not null, \`amount\` int not null, \`quantity\` int not null ) `, ); await db.insert(orders).values([ { region: 'Europe', product: 'A', amount: 10, quantity: 1 }, { region: 'Europe', product: 'A', amount: 20, quantity: 2 }, { region: 'Europe', product: 'B', amount: 20, quantity: 2 }, { region: 'Europe', product: 'B', amount: 30, quantity: 3 }, { region: 'US', product: 'A', amount: 30, quantity: 3 }, { region: 'US', product: 'A', amount: 40, quantity: 4 }, { region: 'US', product: 'B', amount: 40, quantity: 4 }, { region: 'US', product: 'B', amount: 50, quantity: 5 }, ]); const regionalSales = db .$with('regional_sales') .as( db .select({ region: orders.region, totalSales: sql`sum(${orders.amount})`.as('total_sales'), }) .from(orders) .groupBy(orders.region), ); const topRegions = db .$with('top_regions') .as( db .select({ region: regionalSales.region, }) .from(regionalSales) .where( gt( regionalSales.totalSales, db.select({ sales: sql`sum(${regionalSales.totalSales})/10` }).from(regionalSales), ), ), ); const result = await db .with(regionalSales, topRegions) .select({ region: orders.region, product: orders.product, productUnits: sql`cast(sum(${orders.quantity}) as unsigned)`, productSales: sql`cast(sum(${orders.amount}) as unsigned)`, }) .from(orders) .where(inArray(orders.region, db.select({ region: topRegions.region }).from(topRegions))) .groupBy(orders.region, orders.product) .orderBy(orders.region, orders.product); expect(result).toEqual([ { region: 'Europe', product: 'A', productUnits: 3, productSales: 30, }, { region: 'Europe', product: 'B', productUnits: 5, productSales: 50, }, { region: 'US', product: 'A', productUnits: 7, productSales: 70, }, { region: 'US', product: 'B', productUnits: 9, productSales: 90, }, ]); }); test('with ... update', async (ctx) => { const { db } = ctx.singlestore; const products = singlestoreTable('products', { id: serial('id').primaryKey(), price: decimal('price', { precision: 15, scale: 2, }).notNull(), cheap: boolean('cheap').notNull().default(false), }); await db.execute(sql`drop table if exists ${products}`); await db.execute(sql` create table ${products} ( id serial primary key, price decimal(15, 2) not null, cheap boolean not null default false ) `); await db.insert(products).values([ { id: 1, price: '10.99' }, { id: 2, price: '25.85' }, { id: 3, price: '32.99' }, { id: 4, price: '2.50' }, { id: 5, price: '4.59' }, ]); const averagePrice = db .$with('average_price') .as( db .select({ value: sql`avg(${products.price})`.as('value'), }) .from(products), ); await db .with(averagePrice) .update(products) .set({ cheap: true, }) .where(lt(products.price, sql`(select * from ${averagePrice})`)); const result = await db .select({ id: products.id, }) .from(products) .where(eq(products.cheap, true)) .orderBy(asc(products.id)); expect(result).toEqual([ { id: 1 }, { id: 4 }, { id: 5 }, ]); }); test('with ... delete', async (ctx) => { const { db } = ctx.singlestore; await db.execute(sql`drop table if exists \`orders\``); await db.execute( sql` create table \`orders\` ( \`id\` serial primary key, \`region\` text not null, \`product\` text not null, \`amount\` int not null, \`quantity\` int not null ) `, ); await db.insert(orders).values([ { id: 1, region: 'Europe', product: 'A', amount: 10, quantity: 1 }, { id: 2, region: 'Europe', product: 'A', amount: 20, quantity: 2 }, { id: 3, region: 'Europe', product: 'B', amount: 20, quantity: 2 }, { id: 4, region: 'Europe', product: 'B', amount: 30, quantity: 3 }, { id: 5, region: 'US', product: 'A', amount: 30, quantity: 3 }, { id: 6, region: 'US', product: 'A', amount: 40, quantity: 4 }, { id: 7, region: 'US', product: 'B', amount: 40, quantity: 4 }, { id: 8, region: 'US', product: 'B', amount: 50, quantity: 5 }, ]); const averageAmount = db .$with('average_amount') .as( db .select({ value: sql`avg(${orders.amount})`.as('value'), }) .from(orders), ); await db .with(averageAmount) .delete(orders) .where(gt(orders.amount, sql`(select * from ${averageAmount})`)); const result = await db .select({ id: orders.id, }) .from(orders) .orderBy(asc(orders.id)); expect(result).toEqual([ { id: 1 }, { id: 2 }, { id: 3 }, { id: 4 }, { id: 5 }, ]); }); test('select from subquery sql', async (ctx) => { const { db } = ctx.singlestore; await db.insert(users2Table).values([{ id: 1, name: 'John' }, { id: 2, name: 'Jane' }]); const sq = db .select({ name: sql`concat(${users2Table.name}, " modified")`.as('name') }) .from(users2Table) .orderBy(asc(users2Table.id)) .as('sq'); const res = await db.select({ name: sq.name }).from(sq); expect(res).toEqual([{ name: 'John modified' }, { name: 'Jane modified' }]); }); test('select a field without joining its table', (ctx) => { const { db } = ctx.singlestore; expect(() => db.select({ name: users2Table.name }).from(usersTable).prepare()).toThrowError(); }); test('select all fields from subquery without alias', (ctx) => { const { db } = ctx.singlestore; const sq = db.$with('sq').as(db.select({ name: sql`upper(${users2Table.name})` }).from(users2Table)); expect(() => db.select().from(sq).prepare()).toThrowError(); }); test('select count()', async (ctx) => { const { db } = ctx.singlestore; await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }]); const res = await db.select({ count: sql`count(*)` }).from(usersTable); expect(res).toEqual([{ count: 2 }]); }); test('select for ...', (ctx) => { const { db } = ctx.singlestore; { const query = db.select().from(users2Table).for('update').toSQL(); expect(query.sql).toMatch(/ for update$/); } { const query = db.select().from(users2Table).for('share', { skipLocked: true }).toSQL(); expect(query.sql).toMatch(/ for share skip locked$/); } { const query = db.select().from(users2Table).for('update', { noWait: true }).toSQL(); expect(query.sql).toMatch(/ for update nowait$/); } }); test('having', async (ctx) => { const { db } = ctx.singlestore; await db.insert(citiesTable).values([{ id: 1, name: 'London' }, { id: 2, name: 'Paris' }, { id: 3, name: 'New York', }]); await db.insert(users2Table).values([{ id: 1, name: 'John', cityId: 1 }, { id: 2, name: 'Jane', cityId: 1 }, { id: 3, name: 'Jack', cityId: 2, }]); const result = await db .select({ id: citiesTable.id, name: sql`upper(${citiesTable.name})`.as('upper_name'), usersCount: sql`count(${users2Table.id})`.as('users_count'), }) .from(citiesTable) .leftJoin(users2Table, eq(users2Table.cityId, citiesTable.id)) .where(({ name }) => sql`length(${name}) >= 3`) .groupBy(citiesTable.id) .having(({ usersCount }) => sql`${usersCount} > 0`) .orderBy(({ name }) => name); expect(result).toEqual([ { id: 1, name: 'LONDON', usersCount: 2, }, { id: 2, name: 'PARIS', usersCount: 1, }, ]); }); // TODO: Unskip when views are supported /* test.skip('view', async (ctx) => { const { db } = ctx.singlestore; const newYorkers1 = singlestoreView('new_yorkers') .as((qb) => qb.select().from(users2Table).where(eq(users2Table.cityId, 1))); const newYorkers2 = singlestoreView('new_yorkers', { id: serial('id').primaryKey(), name: text('name').notNull(), cityId: int('city_id').notNull(), }).as(sql`select * from ${users2Table} where ${eq(users2Table.cityId, 1)}`); const newYorkers3 = singlestoreView('new_yorkers', { id: serial('id').primaryKey(), name: text('name').notNull(), cityId: int('city_id').notNull(), }).existing(); await db.execute(sql`create view new_yorkers as ${getViewConfig(newYorkers1).query}`); await db.insert(citiesTable).values([{ id: 1, name: 'New York' }, { id: 2, name: 'Paris' }]); await db.insert(users2Table).values([ { id: 1, name: 'John', cityId: 1 }, { id: 2, name: 'Jane', cityId: 1 }, { id: 3, name: 'Jack', cityId: 2 }, ]); { const result = await db.select().from(newYorkers1).orderBy(asc(newYorkers1.id)); expect(result).toEqual([ { id: 1, name: 'John', cityId: 1 }, { id: 2, name: 'Jane', cityId: 1 }, ]); } { const result = await db.select().from(newYorkers2).orderBy(asc(newYorkers2.id)); expect(result).toEqual([ { id: 1, name: 'John', cityId: 1 }, { id: 2, name: 'Jane', cityId: 1 }, ]); } { const result = await db.select().from(newYorkers3).orderBy(asc(newYorkers3.id)); expect(result).toEqual([ { id: 1, name: 'John', cityId: 1 }, { id: 2, name: 'Jane', cityId: 1 }, ]); } { const result = await db.select({ name: newYorkers1.name }).from(newYorkers1).orderBy(asc(newYorkers1.id)); expect(result).toEqual([ { name: 'John' }, { name: 'Jane' }, ]); } await db.execute(sql`drop view ${newYorkers1}`); }); */ test('select from raw sql', async (ctx) => { const { db } = ctx.singlestore; const result = await db.select({ id: sql`id`, name: sql`name`, }).from(sql`(select 1 as id, 'John' as name) as users`); Expect>; expect(result).toEqual([ { id: 1, name: 'John' }, ]); }); test('select from raw sql with joins', async (ctx) => { const { db } = ctx.singlestore; const result = await db .select({ id: sql`users.id`, name: sql`users.name`, userCity: sql`users.city`, cityName: sql`cities.name`, }) .from(sql`(select 1 as id, 'John' as name, 'New York' as city) as users`) .leftJoin(sql`(select 1 as id, 'Paris' as name) as cities`, sql`cities.id = users.id`); Expect>; expect(result).toEqual([ { id: 1, name: 'John', userCity: 'New York', cityName: 'Paris' }, ]); }); test('join on aliased sql from select', async (ctx) => { const { db } = ctx.singlestore; const result = await db .select({ userId: sql`users.id`.as('userId'), name: sql`users.name`, userCity: sql`users.city`, cityId: sql`cities.id`.as('cityId'), cityName: sql`cities.name`, }) .from(sql`(select 1 as id, 'John' as name, 'New York' as city) as users`) .leftJoin(sql`(select 1 as id, 'Paris' as name) as cities`, (cols) => eq(cols.cityId, cols.userId)); Expect< Equal<{ userId: number; name: string; userCity: string; cityId: number; cityName: string }[], typeof result> >; expect(result).toEqual([ { userId: 1, name: 'John', userCity: 'New York', cityId: 1, cityName: 'Paris' }, ]); }); test('join on aliased sql from with clause', async (ctx) => { const { db } = ctx.singlestore; const users = db.$with('users').as( db.select({ id: sql`id`.as('userId'), name: sql`name`.as('userName'), city: sql`city`.as('city'), }).from( sql`(select 1 as id, 'John' as name, 'New York' as city) as users`, ), ); const cities = db.$with('cities').as( db.select({ id: sql`id`.as('cityId'), name: sql`name`.as('cityName'), }).from( sql`(select 1 as id, 'Paris' as name) as cities`, ), ); const result = await db .with(users, cities) .select({ userId: users.id, name: users.name, userCity: users.city, cityId: cities.id, cityName: cities.name, }) .from(users) .leftJoin(cities, (cols) => eq(cols.cityId, cols.userId)); Expect< Equal<{ userId: number; name: string; userCity: string; cityId: number; cityName: string }[], typeof result> >; expect(result).toEqual([ { userId: 1, name: 'John', userCity: 'New York', cityId: 1, cityName: 'Paris' }, ]); }); test('prefixed table', async (ctx) => { const { db } = ctx.singlestore; const singlestoreTable = singlestoreTableCreator((name) => `myprefix_${name}`); const users = singlestoreTable('test_prefixed_table_with_unique_name', { id: int('id').primaryKey(), name: text('name').notNull(), }); await db.execute(sql`drop table if exists ${users}`); await db.execute( sql`create table myprefix_test_prefixed_table_with_unique_name (id int not null primary key, name text not null)`, ); await db.insert(users).values({ id: 1, name: 'John' }); const result = await db.select().from(users); expect(result).toEqual([{ id: 1, name: 'John' }]); await db.execute(sql`drop table ${users}`); }); test('orderBy with aliased column', (ctx) => { const { db } = ctx.singlestore; const query = db.select({ test: sql`something`.as('test'), }).from(users2Table).orderBy((fields) => fields.test).toSQL(); expect(query.sql).toBe('select something as `test` from `users2` order by `test`'); }); test('timestamp timezone', async (ctx) => { const { db } = ctx.singlestore; const date = new Date(Date.parse('2020-01-01T12:34:56+07:00')); await db.insert(usersTable).values({ id: 1, name: 'With default times' }); await db.insert(usersTable).values({ id: 2, name: 'Without default times', createdAt: date, }); const users = await db.select().from(usersTable).orderBy(asc(usersTable.id)); // check that the timestamps are set correctly for default times expect(Math.abs(users[0]!.createdAt.getTime() - Date.now())).toBeLessThan(2000); // check that the timestamps are set correctly for non default times expect(Math.abs(users[1]!.createdAt.getTime() - date.getTime())).toBeLessThan(2000); }); test('transaction', async (ctx) => { const { db } = ctx.singlestore; const users = singlestoreTable('users_transactions', { id: serial('id').primaryKey(), balance: int('balance').notNull(), }); const products = singlestoreTable('products_transactions', { id: serial('id').primaryKey(), price: int('price').notNull(), stock: int('stock').notNull(), }); await db.execute(sql`drop table if exists ${users}`); await db.execute(sql`drop table if exists ${products}`); await db.execute(sql`create table users_transactions (id serial not null primary key, balance int not null)`); await db.execute( sql`create table products_transactions (id serial not null primary key, price int not null, stock int not null)`, ); const [{ insertId: userId }] = await db.insert(users).values({ id: 1, balance: 100 }); const user = await db.select().from(users).where(eq(users.id, userId)).then((rows) => rows[0]!); const [{ insertId: productId }] = await db.insert(products).values({ id: 1, price: 10, stock: 10 }); const product = await db.select().from(products).where(eq(products.id, productId)).then((rows) => rows[0]!); await db.transaction(async (tx) => { await tx.update(users).set({ balance: user.balance - product.price }).where(eq(users.id, user.id)); await tx.update(products).set({ stock: product.stock - 1 }).where(eq(products.id, product.id)); }); const result = await db.select().from(users); expect(result).toEqual([{ id: 1, balance: 90 }]); await db.execute(sql`drop table ${users}`); await db.execute(sql`drop table ${products}`); }); test('transaction rollback', async (ctx) => { const { db } = ctx.singlestore; const users = singlestoreTable('users_transactions_rollback', { id: serial('id').primaryKey(), balance: int('balance').notNull(), }); await db.execute(sql`drop table if exists ${users}`); await db.execute( sql`create table users_transactions_rollback (id serial not null primary key, balance int not null)`, ); await expect((async () => { await db.transaction(async (tx) => { await tx.insert(users).values({ balance: 100 }); tx.rollback(); }); })()).rejects.toThrowError(TransactionRollbackError); const result = await db.select().from(users); expect(result).toEqual([]); await db.execute(sql`drop table ${users}`); }); test('join subquery with join', async (ctx) => { const { db } = ctx.singlestore; const internalStaff = singlestoreTable('internal_staff', { userId: int('user_id').notNull(), }); const customUser = singlestoreTable('custom_user', { id: int('id').notNull(), }); const ticket = singlestoreTable('ticket', { staffId: int('staff_id').notNull(), }); await db.execute(sql`drop table if exists ${internalStaff}`); await db.execute(sql`drop table if exists ${customUser}`); await db.execute(sql`drop table if exists ${ticket}`); await db.execute(sql`create table internal_staff (user_id integer not null)`); await db.execute(sql`create table custom_user (id integer not null)`); await db.execute(sql`create table ticket (staff_id integer not null)`); await db.insert(internalStaff).values({ userId: 1 }); await db.insert(customUser).values({ id: 1 }); await db.insert(ticket).values({ staffId: 1 }); const subq = db .select() .from(internalStaff) .leftJoin(customUser, eq(internalStaff.userId, customUser.id)) .as('internal_staff'); const mainQuery = await db .select() .from(ticket) .leftJoin(subq, eq(subq.internal_staff.userId, ticket.staffId)); expect(mainQuery).toEqual([{ ticket: { staffId: 1 }, internal_staff: { internal_staff: { userId: 1 }, custom_user: { id: 1 }, }, }]); await db.execute(sql`drop table ${internalStaff}`); await db.execute(sql`drop table ${customUser}`); await db.execute(sql`drop table ${ticket}`); }); // TODO: Unskip when views are supported /* test.skip('subquery with view', async (ctx) => { const { db } = ctx.singlestore; const users = singlestoreTable('users_subquery_view', { id: serial('id').primaryKey(), name: text('name').notNull(), cityId: int('city_id').notNull(), }); const newYorkers = singlestoreView('new_yorkers').as((qb) => qb.select().from(users).where(eq(users.cityId, 1))); await db.execute(sql`drop table if exists ${users}`); await db.execute(sql`drop view if exists ${newYorkers}`); await db.execute( sql`create table ${users} (id serial not null primary key, name text not null, city_id integer not null)`, ); await db.execute(sql`create view ${newYorkers} as select * from ${users} where city_id = 1`); await db.insert(users).values([ { id: 1, name: 'John', cityId: 1 }, { id: 2, name: 'Jane', cityId: 2 }, { id: 3, name: 'Jack', cityId: 1 }, { id: 4, name: 'Jill', cityId: 2 }, ]); const sq = db.$with('sq').as(db.select().from(newYorkers)); const result = await db.with(sq).select().from(sq).orderBy(asc(sq.id)); expect(result).toEqual([ { id: 1, name: 'John', cityId: 1 }, { id: 3, name: 'Jack', cityId: 1 }, ]); await db.execute(sql`drop view ${newYorkers}`); await db.execute(sql`drop table ${users}`); }); */ // TODO: Unskip when views are supported /* test.skip('join view as subquery', async (ctx) => { const { db } = ctx.singlestore; const users = singlestoreTable('users_join_view', { id: serial('id').primaryKey(), name: text('name').notNull(), cityId: int('city_id').notNull(), }); const newYorkers = singlestoreView('new_yorkers').as((qb) => qb.select().from(users).where(eq(users.cityId, 1))); await db.execute(sql`drop table if exists ${users}`); await db.execute(sql`drop view if exists ${newYorkers}`); await db.execute( sql`create table ${users} (id serial not null primary key, name text not null, city_id integer not null)`, ); await db.execute(sql`create view ${newYorkers} as select * from ${users} where city_id = 1`); await db.insert(users).values([ { id: 1, name: 'John', cityId: 1 }, { id: 2, name: 'Jane', cityId: 2 }, { id: 3, name: 'Jack', cityId: 1 }, { id: 4, name: 'Jill', cityId: 2 }, ]); const sq = db.select().from(newYorkers).as('new_yorkers_sq'); const result = await db.select().from(users).leftJoin(sq, eq(users.id, sq.id)).orderBy(asc(users.id)); expect(result).toEqual([ { users_join_view: { id: 1, name: 'John', cityId: 1 }, new_yorkers_sq: { id: 1, name: 'John', cityId: 1 }, }, { users_join_view: { id: 2, name: 'Jane', cityId: 2 }, new_yorkers_sq: null, }, { users_join_view: { id: 3, name: 'Jack', cityId: 1 }, new_yorkers_sq: { id: 3, name: 'Jack', cityId: 1 }, }, { users_join_view: { id: 4, name: 'Jill', cityId: 2 }, new_yorkers_sq: null, }, ]); await db.execute(sql`drop view ${newYorkers}`); await db.execute(sql`drop table ${users}`); }); */ test('select iterator', async (ctx) => { const { db } = ctx.singlestore; const users = singlestoreTable('users_iterator', { id: serial('id').primaryKey(), }); await db.execute(sql`drop table if exists ${users}`); await db.execute(sql`create table ${users} (id serial not null primary key)`); await db.insert(users).values([{ id: 1 }, { id: 2 }, { id: 3 }]); const iter = db.select().from(users) .orderBy(asc(users.id)) .iterator(); const result: typeof users.$inferSelect[] = []; for await (const row of iter) { result.push(row); } expect(result).toEqual([{ id: 1 }, { id: 2 }, { id: 3 }]); }); test('select iterator w/ prepared statement', async (ctx) => { const { db } = ctx.singlestore; const users = singlestoreTable('users_iterator', { id: serial('id').primaryKey(), }); await db.execute(sql`drop table if exists ${users}`); await db.execute(sql`create table ${users} (id serial not null primary key)`); await db.insert(users).values([{ id: 1 }, { id: 2 }, { id: 3 }]); const prepared = db.select().from(users) .orderBy(asc(users.id)) .prepare(); const iter = prepared.iterator(); const result: typeof users.$inferSelect[] = []; for await (const row of iter) { result.push(row); } expect(result).toEqual([{ id: 1 }, { id: 2 }, { id: 3 }]); }); test('insert undefined', async (ctx) => { const { db } = ctx.singlestore; const users = singlestoreTable('users', { id: serial('id').primaryKey(), name: text('name'), }); await db.execute(sql`drop table if exists ${users}`); await db.execute( sql`create table ${users} (id serial not null primary key, name text)`, ); await expect((async () => { await db.insert(users).values({ name: undefined }); })()).resolves.not.toThrowError(); await db.execute(sql`drop table ${users}`); }); test('update undefined', async (ctx) => { const { db } = ctx.singlestore; const users = singlestoreTable('users', { id: serial('id').primaryKey(), name: text('name'), }); await db.execute(sql`drop table if exists ${users}`); await db.execute( sql`create table ${users} (id serial not null primary key, name text)`, ); await expect((async () => { await db.update(users).set({ name: undefined }); })()).rejects.toThrowError(); await expect((async () => { await db.update(users).set({ id: 1, name: undefined }); })()).resolves.not.toThrowError(); await db.execute(sql`drop table ${users}`); }); test('utc config for datetime', async (ctx) => { const { db } = ctx.singlestore; await db.execute(sql`drop table if exists \`datestable\``); await db.execute( sql` create table \`datestable\` ( \`datetime_utc\` datetime(6), \`datetime\` datetime(6) ) `, ); const datesTable = singlestoreTable('datestable', { datetimeUTC: datetime('datetime_utc', { mode: 'date' }), datetime: datetime('datetime'), }); const dateObj = new Date('2022-11-11'); const dateUtc = new Date('2022-11-11T12:12:12.122Z'); await db.insert(datesTable).values({ datetimeUTC: dateUtc, datetime: dateObj, }); const res = await db.select().from(datesTable); const [rawSelect] = await db.execute(sql`select \`datetime_utc\` from \`datestable\``); const selectedRow = (rawSelect as unknown as [{ datetime_utc: string }])[0]; expect(selectedRow.datetime_utc).toBe('2022-11-11 12:12:12.122000'); expect(new Date(selectedRow.datetime_utc.replace(' ', 'T') + 'Z')).toEqual(dateUtc); expect(res[0]?.datetime).toBeInstanceOf(Date); expect(res[0]?.datetimeUTC).toBeInstanceOf(Date); expect(res).toEqual([{ datetimeUTC: dateUtc, datetime: new Date('2022-11-11'), }]); await db.execute(sql`drop table if exists \`datestable\``); }); // TODO (https://memsql.atlassian.net/browse/MCDB-63261) allow chaining limit and orderby in subquery test('set operations (union) from query builder with subquery', async (ctx) => { const { db } = ctx.singlestore; await setupSetOperationTest(db); const citiesQuery = db .select({ id: citiesTable.id, name: citiesTable.name, orderCol: sql`0`.as('orderCol'), }) .from(citiesTable); const usersQuery = db .select({ id: users2Table.id, name: users2Table.name, orderCol: sql`1`.as('orderCol'), }) .from(users2Table); const unionQuery = db .select({ id: sql`id`, name: sql`name`, }) .from( citiesQuery.union(usersQuery).as('combined'), ) .orderBy(sql`orderCol`, sql`id`) .limit(8); const result = await unionQuery; expect(result).toHaveLength(8); expect(result).toEqual([ { id: 1, name: 'New York' }, { id: 2, name: 'London' }, { id: 3, name: 'Tampa' }, { id: 1, name: 'John' }, { id: 2, name: 'Jane' }, { id: 3, name: 'Jack' }, { id: 4, name: 'Peter' }, { id: 5, name: 'Ben' }, ]); // union should throw if selected fields are not in the same order await expect((async () => { db .select({ id: citiesTable.id, name: citiesTable.name }) .from(citiesTable).union( db .select({ name: users2Table.name, id: users2Table.id }) .from(users2Table), ); })()).rejects.toThrowError(); }); test('set operations (union) as function', async (ctx) => { const { db } = ctx.singlestore; await setupSetOperationTest(db); const result = await union( db .select({ id: citiesTable.id, name: citiesTable.name }) .from(citiesTable).where(eq(citiesTable.id, 1)), db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id, 1)), db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id, 1)), ); expect(result).toHaveLength(2); expect(result).toEqual([ { id: 1, name: 'New York' }, { id: 1, name: 'John' }, ]); await expect((async () => { union( db .select({ id: citiesTable.id, name: citiesTable.name }) .from(citiesTable).where(eq(citiesTable.id, 1)), db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id, 1)), db .select({ name: users2Table.name, id: users2Table.id }) .from(users2Table).where(eq(users2Table.id, 1)), ); })()).rejects.toThrowError(); }); test('set operations (union all) from query builder', async (ctx) => { const { db } = ctx.singlestore; await setupSetOperationTest(db); const sq = db .select({ id: citiesTable.id, name: citiesTable.name }) .from(citiesTable).orderBy(asc(sql`id`)).limit(2).unionAll( db .select({ id: citiesTable.id, name: citiesTable.name }) .from(citiesTable).orderBy(asc(sql`id`)).limit(2), ).as('sq'); const result = await db.select().from(sq).orderBy(asc(sql`id`)).limit(3); expect(result).toHaveLength(3); expect(result).toEqual([ { id: 1, name: 'New York' }, { id: 1, name: 'New York' }, { id: 2, name: 'London' }, ]); await expect((async () => { db .select({ id: citiesTable.id, name: citiesTable.name }) .from(citiesTable).limit(2).unionAll( db .select({ name: citiesTable.name, id: citiesTable.id }) .from(citiesTable).limit(2), ).orderBy(asc(sql`id`)); })()).rejects.toThrowError(); }); test('set operations (union all) as function', async (ctx) => { const { db } = ctx.singlestore; await setupSetOperationTest(db); const sq = unionAll( db .select({ id: citiesTable.id, name: citiesTable.name }) .from(citiesTable).where(eq(citiesTable.id, 1)), db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id, 1)), db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id, 1)), ).as('sq'); const result = await db.select().from(sq).limit(1); expect(result).toHaveLength(1); expect(result).toEqual([ { id: 1, name: 'New York' }, ]); await expect((async () => { unionAll( db .select({ id: citiesTable.id, name: citiesTable.name }) .from(citiesTable).where(eq(citiesTable.id, 1)), db .select({ name: users2Table.name, id: users2Table.id }) .from(users2Table).where(eq(users2Table.id, 1)), db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id, 1)), ).limit(1); })()).rejects.toThrowError(); }); test('set operations (intersect) from query builder', async (ctx) => { const { db } = ctx.singlestore; await setupSetOperationTest(db); const sq = db .select({ id: citiesTable.id, name: citiesTable.name }) .from(citiesTable).intersect( db .select({ id: citiesTable.id, name: citiesTable.name }) .from(citiesTable).where(gt(citiesTable.id, 1)), ) .as('sq'); const result = await db.select().from(sq).orderBy(asc(sql`id`)); expect(result).toHaveLength(2); expect(result).toEqual([ { id: 2, name: 'London' }, { id: 3, name: 'Tampa' }, ]); await expect((async () => { db .select({ name: citiesTable.name, id: citiesTable.id }) .from(citiesTable).intersect( db .select({ id: citiesTable.id, name: citiesTable.name }) .from(citiesTable).where(gt(citiesTable.id, 1)), ); })()).rejects.toThrowError(); }); test('set operations (intersect) as function', async (ctx) => { const { db } = ctx.singlestore; await setupSetOperationTest(db); const sq = await intersect( db .select({ id: citiesTable.id, name: citiesTable.name }) .from(citiesTable).where(eq(citiesTable.id, 1)), db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id, 1)), db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id, 1)), ).as('sq'); const result = await db.select().from(sq).limit(1); expect(result).toHaveLength(0); expect(result).toEqual([]); await expect((async () => { intersect( db .select({ id: citiesTable.id, name: citiesTable.name }) .from(citiesTable).where(eq(citiesTable.id, 1)), db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id, 1)), db .select({ name: users2Table.name, id: users2Table.id }) .from(users2Table).where(eq(users2Table.id, 1)), ).limit(1); })()).rejects.toThrowError(); }); test('set operations (except) from query builder', async (ctx) => { const { db } = ctx.singlestore; await setupSetOperationTest(db); const result = await db .select() .from(citiesTable).except( db .select() .from(citiesTable).where(gt(citiesTable.id, 1)), ); expect(result).toHaveLength(1); expect(result).toEqual([ { id: 1, name: 'New York' }, ]); }); test('set operations (except) as function', async (ctx) => { const { db } = ctx.singlestore; await setupSetOperationTest(db); const sq = except( db .select({ id: citiesTable.id, name: citiesTable.name }) .from(citiesTable), db .select({ id: citiesTable.id, name: citiesTable.name }) .from(citiesTable).where(eq(citiesTable.id, 1)), db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id, 1)), ).as('sq'); const result = await db.select().from(sq).limit(3); expect(result).toHaveLength(2); expect(result).toContainEqual({ id: 2, name: 'London' }); expect(result).toContainEqual({ id: 3, name: 'Tampa' }); await expect((async () => { except( db .select({ name: citiesTable.name, id: citiesTable.id }) .from(citiesTable), db .select({ id: citiesTable.id, name: citiesTable.name }) .from(citiesTable).where(eq(citiesTable.id, 1)), db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id, 1)), ).limit(3); })()).rejects.toThrowError(); }); test('define constraints as array', async (ctx) => { const { db } = ctx.singlestore; const table = singlestoreTable('name', { id: int(), }, (t) => [ index('name').on(t.id), primaryKey({ columns: [t.id], name: 'custom' }), ]); const { indexes, primaryKeys } = getTableConfig(table); expect(indexes.length).toBe(1); expect(primaryKeys.length).toBe(1); }); test('define constraints as array inside third param', async (ctx) => { const { db } = ctx.singlestore; const table = singlestoreTable('name', { id: int(), }, (t) => [ [index('name').on(t.id), primaryKey({ columns: [t.id], name: 'custom' })], ]); const { indexes, primaryKeys } = getTableConfig(table); expect(indexes.length).toBe(1); expect(primaryKeys.length).toBe(1); }); test.skip('set operations (mixed) from query builder', async (ctx) => { const { db } = ctx.singlestore; await setupSetOperationTest(db); const sq1 = unionAll( db .select() .from(citiesTable).where(gt(citiesTable.id, 1)), db.select().from(citiesTable).where(eq(citiesTable.id, 2)), ).as('sq1'); const sq2 = await db.select().from(sq1).orderBy(asc(sql`id`)).as('sq2'); const sq3 = await db.select().from(sq2).limit(1).offset(1).as('sq3'); const result = await db .select() .from(citiesTable) .except( db .select() .from(sq3), ); expect(result).toHaveLength(2); expect(result).toEqual([ { id: 3, name: 'Tampa' }, { id: 1, name: 'New York' }, ]); await expect((async () => { db .select() .from(citiesTable).except( ({ unionAll }) => unionAll( db .select({ name: citiesTable.name, id: citiesTable.id }) .from(citiesTable).where(gt(citiesTable.id, 1)), db.select().from(citiesTable).where(eq(citiesTable.id, 2)), ), ); })()).rejects.toThrowError(); }); test('set operations (mixed all) as function with subquery', async (ctx) => { const { db } = ctx.singlestore; await setupSetOperationTest(db); const sq1 = except( db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(gte(users2Table.id, 5)), db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id, 7)), ).as('sq1'); const sq2 = await db.select().from(sq1).orderBy(asc(sql`id`)).as('sq2'); const sq3 = await db.select().from(sq2).limit(1).as('sq3'); const result = await union( db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id, 1)), db.select().from(sq3), db .select().from(citiesTable).where(gt(citiesTable.id, 1)), ); expect(result).toHaveLength(4); // multiple results possible as a result of the filters >= 5 and ==7 because singlestore doesn't guarantee order // dynamically validate results const hasValidEntry = (entry: { id: number; name: string }) => { if (entry.id === 1) return entry.name === 'John'; if (entry.id > 1 && entry.id < 5) return entry.name === 'Tampa' || entry.name === 'London'; if (entry.id >= 5 && entry.id !== 7) return true; // Accept any entry with id >= 5 and not 7 return false; }; for (const entry of result) { expect(hasValidEntry(entry)).toBe(true); } await expect((async () => { union( db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id, 1)), except( db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(gte(users2Table.id, 5)), db .select({ name: users2Table.name, id: users2Table.id }) .from(users2Table).where(eq(users2Table.id, 7)), ).limit(1), db .select().from(citiesTable).where(gt(citiesTable.id, 1)), ); })()).rejects.toThrowError(); }); test('aggregate function: count', async (ctx) => { const { db } = ctx.singlestore; const table = aggregateTable; await setupAggregateFunctionsTest(db); const result1 = await db.select({ value: count() }).from(table); const result2 = await db.select({ value: count(table.a) }).from(table); const result3 = await db.select({ value: countDistinct(table.name) }).from(table); expect(result1[0]?.value).toBe(7); expect(result2[0]?.value).toBe(5); expect(result3[0]?.value).toBe(6); }); test('aggregate function: avg', async (ctx) => { const { db } = ctx.singlestore; const table = aggregateTable; await setupAggregateFunctionsTest(db); const result1 = await db.select({ value: avg(table.b) }).from(table); const result2 = await db.select({ value: avg(table.nullOnly) }).from(table); const result3 = await db.select({ value: avgDistinct(table.b) }).from(table); expect(result1[0]?.value).toBe('33.3333'); expect(result2[0]?.value).toBe(null); expect(result3[0]?.value).toBe('42.5000'); }); test('aggregate function: sum', async (ctx) => { const { db } = ctx.singlestore; const table = aggregateTable; await setupAggregateFunctionsTest(db); const result1 = await db.select({ value: sum(table.b) }).from(table); const result2 = await db.select({ value: sum(table.nullOnly) }).from(table); const result3 = await db.select({ value: sumDistinct(table.b) }).from(table); expect(result1[0]?.value).toBe('200'); expect(result2[0]?.value).toBe(null); expect(result3[0]?.value).toBe('170'); }); test('aggregate function: max', async (ctx) => { const { db } = ctx.singlestore; const table = aggregateTable; await setupAggregateFunctionsTest(db); const result1 = await db.select({ value: max(table.b) }).from(table); const result2 = await db.select({ value: max(table.nullOnly) }).from(table); expect(result1[0]?.value).toBe(90); expect(result2[0]?.value).toBe(null); }); test('aggregate function: min', async (ctx) => { const { db } = ctx.singlestore; const table = aggregateTable; await setupAggregateFunctionsTest(db); const result1 = await db.select({ value: min(table.b) }).from(table); const result2 = await db.select({ value: min(table.nullOnly) }).from(table); expect(result1[0]?.value).toBe(10); expect(result2[0]?.value).toBe(null); }); test('simple vector search', async (ctx) => { const { db } = ctx.singlestore; const table = vectorSearchTable; const embedding = [0.42, 0.93, 0.88, 0.57, 0.32, 0.64, 0.76, 0.52, 0.19, 0.81]; // ChatGPT's 10 dimension embedding for "dogs are cool" not sure how accurate but it works await setupVectorSearchTest(db); const withRankEuclidean = db.select({ id: table.id, text: table.text, rank: sql`row_number() over (order by ${euclideanDistance(table.embedding, embedding)})`.as('rank'), }).from(table).as('with_rank'); const withRankDotProduct = db.select({ id: table.id, text: table.text, rank: sql`row_number() over (order by ${dotProduct(table.embedding, embedding)})`.as('rank'), }).from(table).as('with_rank'); const result1 = await db.select({ id: withRankEuclidean.id, text: withRankEuclidean.text }).from( withRankEuclidean, ).where(eq(withRankEuclidean.rank, 1)); const result2 = await db.select({ id: withRankDotProduct.id, text: withRankDotProduct.text }).from( withRankDotProduct, ).where(eq(withRankDotProduct.rank, 1)); expect(result1.length).toEqual(1); expect(result1[0]).toEqual({ id: 1, text: 'I like dogs' }); expect(result2.length).toEqual(1); expect(result2[0]).toEqual({ id: 1, text: 'I like dogs' }); }); test('test $onUpdateFn and $onUpdate works as $default', async (ctx) => { const { db } = ctx.singlestore; await db.execute(sql`drop table if exists ${usersOnUpdate}`); await db.execute( sql` create table ${usersOnUpdate} ( id serial not null primary key, name text not null, update_counter integer default 1 not null, updated_at datetime(6), always_null text ) `, ); await db.insert(usersOnUpdate).values([ { id: 1, name: 'John' }, { id: 2, name: 'Jane' }, { id: 3, name: 'Jack' }, { id: 4, name: 'Jill' }, ]); const { updatedAt, ...rest } = getTableColumns(usersOnUpdate); const justDates = await db.select({ updatedAt }).from(usersOnUpdate); const response = await db.select({ ...rest }).from(usersOnUpdate).orderBy(asc(usersOnUpdate.id)); expect(response).toEqual([ { name: 'John', id: 1, updateCounter: 1, alwaysNull: null }, { name: 'Jane', id: 2, updateCounter: 1, alwaysNull: null }, { name: 'Jack', id: 3, updateCounter: 1, alwaysNull: null }, { name: 'Jill', id: 4, updateCounter: 1, alwaysNull: null }, ]); const msDelay = 750; for (const eachUser of justDates) { expect(eachUser.updatedAt!.valueOf()).toBeGreaterThan(Date.now() - msDelay); } }); test('test $onUpdateFn and $onUpdate works updating', async (ctx) => { const { db } = ctx.singlestore; await db.execute(sql`drop table if exists ${usersOnUpdate}`); await db.execute( sql` create table ${usersOnUpdate} ( id serial not null primary key, name text not null, update_counter integer default 1 not null, updated_at datetime(6), always_null text ) `, ); await db.insert(usersOnUpdate).values([ { id: 1, name: 'John', alwaysNull: 'this will will be null after updating' }, { id: 2, name: 'Jane' }, { id: 3, name: 'Jack' }, { id: 4, name: 'Jill' }, ]); const { updatedAt, ...rest } = getTableColumns(usersOnUpdate); const initial = await db.select({ id: usersOnUpdate.id, updatedAt: usersOnUpdate.updatedAt }).from(usersOnUpdate); await db.update(usersOnUpdate).set({ name: 'Angel' }).where(eq(usersOnUpdate.id, 1)); const justDates = await db.select({ id: usersOnUpdate.id, updatedAt: usersOnUpdate.updatedAt }).from( usersOnUpdate, ); const response = await db.select().from(usersOnUpdate).orderBy(asc(usersOnUpdate.id)); expect(response).toEqual([ { id: 1, name: 'Angel', updateCounter: 2, updatedAt: expect.any(Date), alwaysNull: null }, { id: 2, name: 'Jane', updateCounter: 1, updatedAt: expect.any(Date), alwaysNull: null }, { id: 3, name: 'Jack', updateCounter: 1, updatedAt: expect.any(Date), alwaysNull: null }, { id: 4, name: 'Jill', updateCounter: 1, updatedAt: expect.any(Date), alwaysNull: null }, ]); const initialRecord = initial.find((record) => record.id === 1); const updatedRecord = justDates.find((record) => record.id === 1); expect(initialRecord?.updatedAt?.valueOf()).not.toBe(updatedRecord?.updatedAt?.valueOf()); const msDelay = 2000; for (const eachUser of justDates) { expect(eachUser.updatedAt!.valueOf()).toBeGreaterThan(Date.now() - msDelay); } }); // mySchema tests test('mySchema :: select all fields', async (ctx) => { const { db } = ctx.singlestore; await db.insert(usersMySchemaTable).values({ id: 1, name: 'John' }); const result = await db.select().from(usersMySchemaTable); expect(result[0]!.createdAt).toBeInstanceOf(Date); // not timezone based timestamp, thats why it should not work here // t.assert(Math.abs(result[0]!.createdAt.getTime() - now) < 2000); expect(result).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); }); test('mySchema :: select sql', async (ctx) => { const { db } = ctx.singlestore; await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); await db.insert(usersMySchemaTable).values({ name: 'John' }); const users = await db.select({ name: sql`upper(${usersMySchemaTable.name})`, }).from(usersMySchemaTable); expect(users).toEqual([{ name: 'JOHN' }]); }); test('mySchema :: select typed sql', async (ctx) => { const { db } = ctx.singlestore; await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); await db.insert(usersMySchemaTable).values({ name: 'John' }); const users = await db.select({ name: sql`upper(${usersMySchemaTable.name})`, }).from(usersMySchemaTable); expect(users).toEqual([{ name: 'JOHN' }]); }); test('mySchema :: select distinct', async (ctx) => { const { db } = ctx.singlestore; const usersDistinctTable = singlestoreTable('users_distinct', { id: int('id').notNull(), name: text('name').notNull(), }); await db.execute(sql`drop table if exists ${usersDistinctTable}`); await db.execute(sql`create table ${usersDistinctTable} (id int, name text)`); await db.insert(usersDistinctTable).values([ { id: 1, name: 'John' }, { id: 1, name: 'John' }, { id: 2, name: 'John' }, { id: 1, name: 'Jane' }, ]); const users = await db.selectDistinct().from(usersDistinctTable).orderBy( usersDistinctTable.id, usersDistinctTable.name, ); await db.execute(sql`drop table ${usersDistinctTable}`); expect(users).toEqual([{ id: 1, name: 'Jane' }, { id: 1, name: 'John' }, { id: 2, name: 'John' }]); }); test('mySchema :: insert returning sql', async (ctx) => { const { db } = ctx.singlestore; await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); const [result, _] = await db.insert(usersMySchemaTable).values({ id: 1, name: 'John' }); expect(result.insertId).toBe(1); }); test('mySchema :: delete returning sql', async (ctx) => { const { db } = ctx.singlestore; await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); await db.insert(usersMySchemaTable).values({ name: 'John' }); const users = await db.delete(usersMySchemaTable).where(eq(usersMySchemaTable.name, 'John')); expect(users[0].affectedRows).toBe(1); }); test('mySchema :: update with returning partial', async (ctx) => { const { db } = ctx.singlestore; await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); await db.insert(usersMySchemaTable).values({ id: 1, name: 'John' }); const updatedUsers = await db.update(usersMySchemaTable).set({ name: 'Jane' }).where( eq(usersMySchemaTable.name, 'John'), ); const users = await db.select({ id: usersMySchemaTable.id, name: usersMySchemaTable.name }).from( usersMySchemaTable, ) .where( eq(usersMySchemaTable.id, 1), ); expect(updatedUsers[0].changedRows).toBe(1); expect(users).toEqual([{ id: 1, name: 'Jane' }]); }); test('mySchema :: delete with returning all fields', async (ctx) => { const { db } = ctx.singlestore; await db.insert(usersMySchemaTable).values({ name: 'John' }); const deletedUser = await db.delete(usersMySchemaTable).where(eq(usersMySchemaTable.name, 'John')); expect(deletedUser[0].affectedRows).toBe(1); }); test('mySchema :: insert + select', async (ctx) => { const { db } = ctx.singlestore; await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); await db.insert(usersMySchemaTable).values({ id: 1, name: 'John' }); const result = await db.select().from(usersMySchemaTable); expect(result).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); await db.insert(usersMySchemaTable).values({ id: 2, name: 'Jane' }); const result2 = await db.select().from(usersMySchemaTable).orderBy(asc(usersMySchemaTable.id)); expect(result2).toEqual([ { id: 1, name: 'John', verified: false, jsonb: null, createdAt: result2[0]!.createdAt }, { id: 2, name: 'Jane', verified: false, jsonb: null, createdAt: result2[1]!.createdAt }, ]); }); test('mySchema :: insert with overridden default values', async (ctx) => { const { db } = ctx.singlestore; await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); await db.insert(usersMySchemaTable).values({ id: 1, name: 'John', verified: true }); const result = await db.select().from(usersMySchemaTable); expect(result).toEqual([{ id: 1, name: 'John', verified: true, jsonb: null, createdAt: result[0]!.createdAt }]); }); test('mySchema :: insert many', async (ctx) => { const { db } = ctx.singlestore; await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); await db.insert(usersMySchemaTable).values([ { id: 1, name: 'John' }, { id: 2, name: 'Bruce', jsonb: ['foo', 'bar'] }, { id: 3, name: 'Jane' }, { id: 4, name: 'Austin', verified: true }, ]); const result = await db.select({ id: usersMySchemaTable.id, name: usersMySchemaTable.name, jsonb: usersMySchemaTable.jsonb, verified: usersMySchemaTable.verified, }).from(usersMySchemaTable) .orderBy(asc(usersMySchemaTable.id)); expect(result).toEqual([ { id: 1, name: 'John', jsonb: null, verified: false }, { id: 2, name: 'Bruce', jsonb: ['foo', 'bar'], verified: false }, { id: 3, name: 'Jane', jsonb: null, verified: false }, { id: 4, name: 'Austin', jsonb: null, verified: true }, ]); }); test('mySchema :: select with group by as field', async (ctx) => { const { db } = ctx.singlestore; await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); await db.insert(usersMySchemaTable).values([{ id: 1, name: 'John' }, { id: 2, name: 'Jane' }, { id: 3, name: 'Jane', }]); const result = await db.select({ name: usersMySchemaTable.name }).from(usersMySchemaTable) .groupBy(usersMySchemaTable.name) .orderBy(asc(usersMySchemaTable.id)); expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }]); }); test('mySchema :: select with group by as column + sql', async (ctx) => { const { db } = ctx.singlestore; await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); await db.insert(usersMySchemaTable).values([{ id: 1, name: 'John' }, { id: 2, name: 'Jane' }, { id: 3, name: 'Jane', }]); const result = await db.select({ name: usersMySchemaTable.name }).from(usersMySchemaTable) .groupBy(usersMySchemaTable.id, sql`${usersMySchemaTable.name}`) .orderBy(asc(usersMySchemaTable.id)); expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); }); test('mySchema :: build query', async (ctx) => { const { db } = ctx.singlestore; const query = db.select({ id: usersMySchemaTable.id, name: usersMySchemaTable.name }).from(usersMySchemaTable) .groupBy(usersMySchemaTable.id, usersMySchemaTable.name) .toSQL(); expect(query).toEqual({ sql: `select \`id\`, \`name\` from \`mySchema\`.\`userstest\` group by \`mySchema\`.\`userstest\`.\`id\`, \`mySchema\`.\`userstest\`.\`name\``, params: [], }); }); test('mySchema :: insert with spaces', async (ctx) => { const { db } = ctx.singlestore; await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); await db.insert(usersMySchemaTable).values({ id: 1, name: sql`'Jo h n'` }); const result = await db.select({ id: usersMySchemaTable.id, name: usersMySchemaTable.name }).from( usersMySchemaTable, ); expect(result).toEqual([{ id: 1, name: 'Jo h n' }]); }); test('mySchema :: prepared statement with placeholder in .where', async (ctx) => { const { db } = ctx.singlestore; await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); await db.insert(usersMySchemaTable).values({ id: 1, name: 'John' }); const stmt = db.select({ id: usersMySchemaTable.id, name: usersMySchemaTable.name, }).from(usersMySchemaTable) .where(eq(usersMySchemaTable.id, sql.placeholder('id'))) .prepare(); const result = await stmt.execute({ id: 1 }); expect(result).toEqual([{ id: 1, name: 'John' }]); }); test('mySchema :: select from tables with same name from different schema using alias', async (ctx) => { const { db } = ctx.singlestore; await db.execute(sql`truncate table \`mySchema\`.\`userstest\``); await db.execute(sql`drop table if exists \`userstest\``); await db.execute( sql` create table \`userstest\` ( \`id\` serial primary key, \`name\` text not null, \`verified\` boolean not null default false, \`jsonb\` json, \`created_at\` timestamp not null default now() ) `, ); await db.insert(usersMySchemaTable).values({ id: 10, name: 'Ivan' }); await db.insert(usersTable).values({ id: 11, name: 'Hans' }); const customerAlias = alias(usersTable, 'customer'); const result = await db .select().from(usersMySchemaTable) .leftJoin(customerAlias, eq(customerAlias.id, 11)) .where(eq(usersMySchemaTable.id, 10)); expect(result).toEqual([{ userstest: { id: 10, name: 'Ivan', verified: false, jsonb: null, createdAt: result[0]!.userstest.createdAt, }, customer: { id: 11, name: 'Hans', verified: false, jsonb: null, createdAt: result[0]!.customer!.createdAt, }, }]); }); test('insert $returningId: serial as id', async (ctx) => { const { db } = ctx.singlestore; const result = await db.insert(usersTable).values({ id: 1, name: 'John' }).$returningId(); expectTypeOf(result).toEqualTypeOf<{ id: number; }[]>(); expect(result).toStrictEqual([{ id: 1 }]); }); test('insert $returningId: serial as id, batch insert', async (ctx) => { const { db } = ctx.singlestore; const result = await db.insert(usersTable).values([{ id: 1, name: 'John' }, { id: 2, name: 'John1' }]) .$returningId(); expectTypeOf(result).toEqualTypeOf<{ id: number; }[]>(); // singlestore auto increments when batch inserting, so the ids increment by one expect(result).toStrictEqual([{ id: 2 }, { id: 3 }]); }); test('insert $returningId: $default as primary key', async (ctx) => { const { db } = ctx.singlestore; const uniqueKeys = ['ao865jf3mcmkfkk8o5ri495z', 'dyqs529eom0iczo2efxzbcut']; let iterator = 0; const usersTableDefFn = singlestoreTable('users_default_fn', { customId: varchar('id', { length: 256 }).primaryKey().$defaultFn(() => { const value = uniqueKeys[iterator]!; iterator++; return value; }), name: text('name').notNull(), }); await setupReturningFunctionsTest(db); const result = await db.insert(usersTableDefFn).values([{ name: 'John' }, { name: 'John1' }]) // ^? .$returningId(); expectTypeOf(result).toEqualTypeOf<{ customId: string; }[]>(); expect(result).toStrictEqual([{ customId: 'ao865jf3mcmkfkk8o5ri495z' }, { customId: 'dyqs529eom0iczo2efxzbcut', }]); }); test('insert $returningId: $default as primary key with value', async (ctx) => { const { db } = ctx.singlestore; const uniqueKeys = ['ao865jf3mcmkfkk8o5ri495z', 'dyqs529eom0iczo2efxzbcut']; let iterator = 0; const usersTableDefFn = singlestoreTable('users_default_fn', { customId: varchar('id', { length: 256 }).primaryKey().$defaultFn(() => { const value = uniqueKeys[iterator]!; iterator++; return value; }), name: text('name').notNull(), }); await setupReturningFunctionsTest(db); const result = await db.insert(usersTableDefFn).values([{ name: 'John', customId: 'test' }, { name: 'John1' }]) // ^? .$returningId(); expectTypeOf(result).toEqualTypeOf<{ customId: string; }[]>(); expect(result).toStrictEqual([{ customId: 'test' }, { customId: 'ao865jf3mcmkfkk8o5ri495z' }]); }); // TODO: Unkip this test when views are supported /* test.skip('mySchema :: view', async (ctx) => { const { db } = ctx.singlestore; const newYorkers1 = mySchema.view('new_yorkers') .as((qb) => qb.select().from(users2MySchemaTable).where(eq(users2MySchemaTable.cityId, 1))); const newYorkers2 = mySchema.view('new_yorkers', { id: serial('id').primaryKey(), name: text('name').notNull(), cityId: int('city_id').notNull(), }).as(sql`select * from ${users2MySchemaTable} where ${eq(users2MySchemaTable.cityId, 1)}`); const newYorkers3 = mySchema.view('new_yorkers', { id: serial('id').primaryKey(), name: text('name').notNull(), cityId: int('city_id').notNull(), }).existing(); await db.execute(sql`create view ${newYorkers1} as ${getViewConfig(newYorkers1).query}`); await db.insert(citiesMySchemaTable).values([{ id: 1, name: 'New York' }, { id: 2, name: 'Paris' }]); await db.insert(users2MySchemaTable).values([ { id: 1, name: 'John', cityId: 1 }, { id: 2, name: 'Jane', cityId: 1 }, { id: 3, name: 'Jack', cityId: 2 }, ]); { const result = await db.select().from(newYorkers1).orderBy(asc(newYorkers1.id)); expect(result).toEqual([ { id: 1, name: 'John', cityId: 1 }, { id: 2, name: 'Jane', cityId: 1 }, ]); } { const result = await db.select().from(newYorkers2).orderBy(asc(newYorkers2.id)); expect(result).toEqual([ { id: 1, name: 'John', cityId: 1 }, { id: 2, name: 'Jane', cityId: 1 }, ]); } { const result = await db.select().from(newYorkers3).orderBy(asc(newYorkers3.id)); expect(result).toEqual([ { id: 1, name: 'John', cityId: 1 }, { id: 2, name: 'Jane', cityId: 1 }, ]); } { const result = await db.select({ name: newYorkers1.name }).from(newYorkers1).orderBy(asc(newYorkers1.id)); expect(result).toEqual([ { name: 'John' }, { name: 'Jane' }, ]); } await db.execute(sql`drop view ${newYorkers1}`); }); */ test('limit 0', async (ctx) => { const { db } = ctx.singlestore; await db.insert(usersTable).values({ name: 'John' }); const users = await db .select() .from(usersTable) .limit(0); expect(users).toEqual([]); }); test('limit -1', async (ctx) => { const { db } = ctx.singlestore; await db.insert(usersTable).values({ name: 'John' }); const users = await db .select() .from(usersTable) .limit(-1); expect(users.length).toBeGreaterThan(0); }); test('sql operator as cte', async (ctx) => { const { db } = ctx.singlestore; const users = singlestoreTable('users', { id: serial('id').primaryKey(), name: text('name').notNull(), }); await db.execute(sql`drop table if exists ${users}`); await db.execute(sql`create table ${users} (id serial not null primary key, name text not null)`); await db.insert(users).values([ { name: 'John' }, { name: 'Jane' }, ]); const sq1 = db.$with('sq', { userId: users.id, data: { name: users.name, }, }).as(sql`select * from ${users} where ${users.name} = 'John'`); const result1 = await db.with(sq1).select().from(sq1); const sq2 = db.$with('sq', { userId: users.id, data: { name: users.name, }, }).as(() => sql`select * from ${users} where ${users.name} = 'Jane'`); const result2 = await db.with(sq2).select().from(sq1); expect(result1).toEqual([{ userId: 1, data: { name: 'John' } }]); expect(result2).toEqual([{ userId: 2, data: { name: 'Jane' } }]); }); test('cross join', async (ctx) => { const { db } = ctx.singlestore; await db .insert(usersTable) .values([ { name: 'John' }, { name: 'Jane' }, ]); await db .insert(citiesTable) .values([ { name: 'Seattle' }, { name: 'New York City' }, ]); const result = await db .select({ user: usersTable.name, city: citiesTable.name, }) .from(usersTable) .crossJoin(citiesTable) .orderBy(usersTable.name, citiesTable.name); expect(result).toStrictEqual([ { city: 'New York City', user: 'Jane' }, { city: 'Seattle', user: 'Jane' }, { city: 'New York City', user: 'John' }, { city: 'Seattle', user: 'John' }, ]); }); test('left join (lateral)', async (ctx) => { const { db } = ctx.singlestore; await db .insert(citiesTable) .values([{ id: 1, name: 'Paris' }, { id: 2, name: 'London' }]); await db.insert(users2Table).values([{ name: 'John', cityId: 1 }, { name: 'Jane' }]); const sq = db .select({ userId: users2Table.id, userName: users2Table.name, cityId: users2Table.cityId, }) .from(users2Table) .where(eq(users2Table.cityId, citiesTable.id)) .as('sq'); const res = await db .select({ cityId: citiesTable.id, cityName: citiesTable.name, userId: sq.userId, userName: sq.userName, }) .from(citiesTable) .leftJoinLateral(sq, sql`true`) .orderBy(citiesTable.id); expect(res).toStrictEqual([ { cityId: 1, cityName: 'Paris', userId: 1, userName: 'John' }, { cityId: 2, cityName: 'London', userId: null, userName: null }, ]); }); test('inner join (lateral)', async (ctx) => { const { db } = ctx.singlestore; await db .insert(citiesTable) .values([{ id: 1, name: 'Paris' }, { id: 2, name: 'London' }]); await db.insert(users2Table).values([{ name: 'John', cityId: 1 }, { name: 'Jane' }]); const sq = db .select({ userId: users2Table.id, userName: users2Table.name, cityId: users2Table.cityId, }) .from(users2Table) .where(eq(users2Table.cityId, citiesTable.id)) .as('sq'); const res = await db .select({ cityId: citiesTable.id, cityName: citiesTable.name, userId: sq.userId, userName: sq.userName, }) .from(citiesTable) .innerJoinLateral(sq, sql`true`); expect(res).toStrictEqual([ { cityId: 1, cityName: 'Paris', userId: 1, userName: 'John' }, ]); }); test('cross join (lateral)', async (ctx) => { const { db } = ctx.singlestore; await db .insert(citiesTable) .values([{ id: 1, name: 'Paris' }, { id: 2, name: 'London' }]); await db.insert(users2Table).values([{ name: 'John', cityId: 1 }, { name: 'Jane', cityId: 2 }, { name: 'Patrick', cityId: 2, }]); const sq = db .select({ userId: users2Table.id, userName: users2Table.name, cityId: users2Table.cityId, }) .from(users2Table) .where(eq(users2Table.cityId, citiesTable.id)) .as('sq'); const res = await db .select({ cityId: citiesTable.id, cityName: citiesTable.name, userId: sq.userId, userName: sq.userName, }) .from(citiesTable) .crossJoinLateral(sq) .orderBy(sq.userId, citiesTable.id); expect(res).toStrictEqual([ { cityId: 1, cityName: 'Paris', userId: 1, userName: 'John', }, { cityId: 2, cityName: 'London', userId: 2, userName: 'Jane', }, { cityId: 2, cityName: 'London', userId: 3, userName: 'Patrick', }, ]); }); test('test $onUpdateFn and $onUpdate works with sql value', async (ctx) => { const { db } = ctx.singlestore; const users = singlestoreTable('users', { id: serial('id').primaryKey(), name: text('name').notNull(), updatedAt: timestamp('updated_at') .notNull() .$onUpdate(() => sql`current_timestamp`), }); await db.execute(sql`drop table if exists ${users}`); await db.execute( sql` create table ${users} ( \`id\` serial primary key, \`name\` text not null, \`updated_at\` timestamp not null ) `, ); await db.insert(users).values({ name: 'John', }); const insertResp = await db.select({ updatedAt: users.updatedAt }).from(users); await new Promise((resolve) => setTimeout(resolve, 1000)); const now = Date.now(); await new Promise((resolve) => setTimeout(resolve, 1000)); await db.update(users).set({ name: 'John', }); const updateResp = await db.select({ updatedAt: users.updatedAt }).from(users); expect(insertResp[0]?.updatedAt.getTime() ?? 0).lessThan(now); expect(updateResp[0]?.updatedAt.getTime() ?? 0).greaterThan(now); }); test('all types', async (ctx) => { const { db } = ctx.singlestore; await db.execute(sql` CREATE TABLE \`all_types\` ( \`scol\` serial, \`bigint53\` bigint, \`bigint64\` bigint, \`binary\` binary, \`boolean\` boolean, \`char\` char, \`date\` date, \`date_str\` date, \`datetime\` datetime, \`datetime_str\` datetime, \`decimal\` decimal, \`decimal_num\` decimal(30), \`decimal_big\` decimal(30), \`double\` double, \`float\` float, \`int\` int, \`json\` json, \`med_int\` mediumint, \`small_int\` smallint, \`real\` real, \`text\` text, \`time\` time, \`timestamp\` timestamp, \`timestamp_str\` timestamp, \`tiny_int\` tinyint, \`varbin\` varbinary(16), \`varchar\` varchar(255), \`year\` year, \`enum\` enum('enV1','enV2'), shard key(\`scol\`) ); `); await db.insert(allTypesTable).values({ serial: 1, bigint53: 9007199254740991, bigint64: 5044565289845416380n, binary: '1', boolean: true, char: 'c', date: new Date(1741743161623), dateStr: new Date(1741743161623).toISOString().slice(0, 19).replace('T', ' '), datetime: new Date(1741743161623), datetimeStr: new Date(1741743161623).toISOString().slice(0, 19).replace('T', ' '), decimal: '47521', decimalNum: 9007199254740991, decimalBig: 5044565289845416380n, double: 15.35325689124218, enum: 'enV1', float: 1.048596, real: 1.048596, text: 'C4-', int: 621, json: { str: 'strval', arr: ['str', 10], }, medInt: 560, smallInt: 14, time: '04:13:22', timestamp: new Date(1741743161623), timestampStr: new Date(1741743161623).toISOString().slice(0, 19).replace('T', ' '), tinyInt: 7, varbin: '1010110101001101', varchar: 'VCHAR', year: 2025, }); const rawRes = await db.select().from(allTypesTable); type ExpectedType = { serial: number; bigint53: number | null; bigint64: bigint | null; binary: string | null; boolean: boolean | null; char: string | null; date: Date | null; dateStr: string | null; datetime: Date | null; datetimeStr: string | null; decimal: string | null; decimalNum: number | null; decimalBig: bigint | null; double: number | null; float: number | null; int: number | null; json: unknown; medInt: number | null; smallInt: number | null; real: number | null; text: string | null; time: string | null; timestamp: Date | null; timestampStr: string | null; tinyInt: number | null; varbin: string | null; varchar: string | null; year: number | null; enum: 'enV1' | 'enV2' | null; }[]; const expectedRes: ExpectedType = [ { serial: 1, bigint53: 9007199254740991, bigint64: 5044565289845416380n, binary: '1', boolean: true, char: 'c', date: new Date('2025-03-12T00:00:00.000Z'), dateStr: '2025-03-12', datetime: new Date('2025-03-12T01:32:41.000Z'), datetimeStr: '2025-03-12 01:32:41', decimal: '47521', decimalNum: 9007199254740991, decimalBig: 5044565289845416380n, double: 15.35325689124218, float: 1.0486, int: 621, json: { arr: ['str', 10], str: 'strval' }, medInt: 560, smallInt: 14, real: 1.048596, text: 'C4-', time: '04:13:22', timestamp: new Date('2025-03-12T01:32:41.000Z'), timestampStr: '2025-03-12 01:32:41', tinyInt: 7, varbin: '1010110101001101', varchar: 'VCHAR', year: 2025, enum: 'enV1', }, ]; expectTypeOf(rawRes).toEqualTypeOf(); expect(rawRes).toStrictEqual(expectedRes); }); }); } ================================================ FILE: integration-tests/tests/singlestore/singlestore-custom.test.ts ================================================ import retry from 'async-retry'; import type Docker from 'dockerode'; import { asc, eq, Name, placeholder, sql } from 'drizzle-orm'; import type { SingleStoreDriverDatabase } from 'drizzle-orm/singlestore'; import { drizzle } from 'drizzle-orm/singlestore'; import { alias, binary, customType, date, datetime, serial, singlestoreEnum, singlestoreTable, singlestoreTableCreator, text, time, varchar, year, } from 'drizzle-orm/singlestore-core'; import { migrate } from 'drizzle-orm/singlestore/migrator'; import * as mysql2 from 'mysql2/promise'; import { v4 as uuid } from 'uuid'; import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; import { toLocalDate } from '~/utils'; import { createDockerDB } from './singlestore-common'; const ENABLE_LOGGING = false; let db: SingleStoreDriverDatabase; let client: mysql2.Connection; let container: Docker.Container | undefined; beforeAll(async () => { let connectionString; if (process.env['SINGLESTORE_CONNECTION_STRING']) { connectionString = process.env['SINGLESTORE_CONNECTION_STRING']; } else { const { connectionString: conStr, container: contrainerObj } = await createDockerDB(); connectionString = conStr; container = contrainerObj; } client = await retry(async () => { client = await mysql2.createConnection({ uri: connectionString, supportBigNumbers: true }); await client.connect(); return client; }, { retries: 20, factor: 1, minTimeout: 250, maxTimeout: 250, randomize: false, onRetry() { client?.end(); }, }); await client.query(`CREATE DATABASE IF NOT EXISTS drizzle;`); await client.changeUser({ database: 'drizzle' }); db = drizzle(client, { logger: ENABLE_LOGGING }); }); afterAll(async () => { await client?.end(); await container?.stop().catch(console.error); }); beforeEach((ctx) => { ctx.singlestore = { db, }; }); const customSerial = customType<{ data: number; notNull: true; default: true }>({ dataType() { return 'serial'; }, }); const customText = customType<{ data: string }>({ dataType() { return 'text'; }, }); const customBoolean = customType<{ data: boolean }>({ dataType() { return 'boolean'; }, fromDriver(value) { if (typeof value === 'boolean') { return value; } return value === 1; }, }); const customJson = (name: string) => customType<{ data: TData; driverData: string }>({ dataType() { return 'json'; }, toDriver(value: TData): string { return JSON.stringify(value); }, })(name); const customTimestamp = customType< { data: Date; driverData: string; config: { fsp: number } } >({ dataType(config) { const precision = config?.fsp === undefined ? '' : ` (${config.fsp})`; return `timestamp${precision}`; }, fromDriver(value: string): Date { return new Date(value); }, }); const customBinary = customType<{ data: string; driverData: Buffer; config: { length: number } }>({ dataType(config) { return config?.length === undefined ? `binary` : `binary(${config.length})`; }, toDriver(value) { return sql`UNHEX(${value})`; }, fromDriver(value) { return value.toString('hex'); }, }); const usersTable = singlestoreTable('userstest', { id: customSerial('id').primaryKey(), name: customText('name').notNull(), verified: customBoolean('verified').notNull().default(false), jsonb: customJson('jsonb'), createdAt: customTimestamp('created_at').notNull().default(sql`now()`), }); const datesTable = singlestoreTable('datestable', { date: date('date'), dateAsString: date('date_as_string', { mode: 'string' }), time: time('time'), datetime: datetime('datetime'), datetimeAsString: datetime('datetime_as_string', { mode: 'string' }), year: year('year'), }); export const testTable = singlestoreTable('test_table', { id: customBinary('id', { length: 16 }).primaryKey(), sqlId: binary('sql_id', { length: 16 }), rawId: varchar('raw_id', { length: 64 }), }); const usersMigratorTable = singlestoreTable('users12', { id: serial('id').primaryKey(), name: text('name').notNull(), email: text('email').notNull(), }); beforeEach(async () => { await db.execute(sql`drop table if exists \`userstest\``); await db.execute(sql`drop table if exists \`datestable\``); await db.execute(sql`drop table if exists \`test_table\``); // await ctx.db.execute(sql`create schema public`); await db.execute( sql` create table \`userstest\` ( \`id\` serial primary key, \`name\` text not null, \`verified\` boolean not null default false, \`jsonb\` json, \`created_at\` timestamp not null default now() ) `, ); await db.execute( sql` create table \`datestable\` ( \`date\` date, \`date_as_string\` date, \`time\` time, \`datetime\` datetime, \`datetime_as_string\` datetime, \`year\` year ) `, ); await db.execute( sql` create table \`test_table\` ( \`id\` binary(16) primary key, \`sql_id\` binary(16), \`raw_id\` varchar(64) ) `, ); }); test('select all fields', async (ctx) => { const { db } = ctx.singlestore; await db.insert(usersTable).values({ id: 1, name: 'John' }); const result = await db.select().from(usersTable); expect(result[0]!.createdAt).toBeInstanceOf(Date); // not timezone based timestamp, thats why it should not work here // t.assert(Math.abs(result[0]!.createdAt.getTime() - now) < 2000); expect(result).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); }); test('select sql', async (ctx) => { const { db } = ctx.singlestore; await db.insert(usersTable).values({ name: 'John' }); const users = await db.select({ name: sql`upper(${usersTable.name})`, }).from(usersTable); expect(users).toEqual([{ name: 'JOHN' }]); }); test('select typed sql', async (ctx) => { const { db } = ctx.singlestore; await db.insert(usersTable).values({ name: 'John' }); const users = await db.select({ name: sql`upper(${usersTable.name})`, }).from(usersTable); expect(users).toEqual([{ name: 'JOHN' }]); }); test('insert returning sql', async (ctx) => { const { db } = ctx.singlestore; const [result, _] = await db.insert(usersTable).values({ id: 1, name: 'John' }); expect(result.insertId).toBe(1); }); test('delete returning sql', async (ctx) => { const { db } = ctx.singlestore; await db.insert(usersTable).values({ name: 'John' }); const users = await db.delete(usersTable).where(eq(usersTable.name, 'John')); expect(users[0].affectedRows).toBe(1); }); test('update returning sql', async (ctx) => { const { db } = ctx.singlestore; await db.insert(usersTable).values({ name: 'John' }); const users = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')); expect(users[0].changedRows).toBe(1); }); test('update with returning all fields', async (ctx) => { const { db } = ctx.singlestore; await db.insert(usersTable).values({ id: 1, name: 'John' }); const updatedUsers = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')); const users = await db.select().from(usersTable).where(eq(usersTable.id, 1)); expect(updatedUsers[0].changedRows).toBe(1); expect(users[0]!.createdAt).toBeInstanceOf(Date); // not timezone based timestamp, thats why it should not work here // t.assert(Math.abs(users[0]!.createdAt.getTime() - now) < 2000); expect(users).toEqual([{ id: 1, name: 'Jane', verified: false, jsonb: null, createdAt: users[0]!.createdAt }]); }); test('update with returning partial', async (ctx) => { const { db } = ctx.singlestore; await db.insert(usersTable).values({ id: 1, name: 'John' }); const updatedUsers = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')); const users = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).where( eq(usersTable.id, 1), ); expect(updatedUsers[0].changedRows).toBe(1); expect(users).toEqual([{ id: 1, name: 'Jane' }]); }); test('delete with returning all fields', async (ctx) => { const { db } = ctx.singlestore; await db.insert(usersTable).values({ name: 'John' }); const deletedUser = await db.delete(usersTable).where(eq(usersTable.name, 'John')); expect(deletedUser[0].affectedRows).toBe(1); }); test('delete with returning partial', async (ctx) => { const { db } = ctx.singlestore; await db.insert(usersTable).values({ name: 'John' }); const deletedUser = await db.delete(usersTable).where(eq(usersTable.name, 'John')); expect(deletedUser[0].affectedRows).toBe(1); }); test('insert + select', async (ctx) => { const { db } = ctx.singlestore; await db.insert(usersTable).values({ id: 1, name: 'John' }); const result = await db.select().from(usersTable); expect(result).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); await db.insert(usersTable).values({ id: 2, name: 'Jane' }); const result2 = await db.select().from(usersTable).orderBy(asc(usersTable.id)); expect(result2).toEqual([ { id: 1, name: 'John', verified: false, jsonb: null, createdAt: result2[0]!.createdAt }, { id: 2, name: 'Jane', verified: false, jsonb: null, createdAt: result2[1]!.createdAt }, ]); }); test('json insert', async (ctx) => { const { db } = ctx.singlestore; await db.insert(usersTable).values({ id: 1, name: 'John', jsonb: ['foo', 'bar'] }); const result = await db.select({ id: usersTable.id, name: usersTable.name, jsonb: usersTable.jsonb, }).from(usersTable); expect(result).toEqual([{ id: 1, name: 'John', jsonb: ['foo', 'bar'] }]); }); test('insert with overridden default values', async (ctx) => { const { db } = ctx.singlestore; await db.insert(usersTable).values({ id: 1, name: 'John', verified: true }); const result = await db.select().from(usersTable); expect(result).toEqual([{ id: 1, name: 'John', verified: true, jsonb: null, createdAt: result[0]!.createdAt }]); }); test('insert many', async (ctx) => { const { db } = ctx.singlestore; await db.insert(usersTable).values([ { id: 1, name: 'John' }, { id: 2, name: 'Bruce', jsonb: ['foo', 'bar'] }, { id: 3, name: 'Jane' }, { id: 4, name: 'Austin', verified: true }, ]); const result = await db.select({ id: usersTable.id, name: usersTable.name, jsonb: usersTable.jsonb, verified: usersTable.verified, }).from(usersTable).orderBy(asc(usersTable.id)); expect(result).toEqual([ { id: 1, name: 'John', jsonb: null, verified: false }, { id: 2, name: 'Bruce', jsonb: ['foo', 'bar'], verified: false }, { id: 3, name: 'Jane', jsonb: null, verified: false }, { id: 4, name: 'Austin', jsonb: null, verified: true }, ]); }); test('insert many with returning', async (ctx) => { const { db } = ctx.singlestore; const result = await db.insert(usersTable).values([ { name: 'John' }, { name: 'Bruce', jsonb: ['foo', 'bar'] }, { name: 'Jane' }, { name: 'Austin', verified: true }, ]); expect(result[0].affectedRows).toBe(4); }); test('select with group by as field', async (ctx) => { const { db } = ctx.singlestore; await db.insert(usersTable).values([{ id: 1, name: 'John' }, { id: 2, name: 'Jane' }, { id: 3, name: 'Jane' }]); const result = await db.select({ name: usersTable.name }).from(usersTable) .groupBy(usersTable.name).orderBy(asc(usersTable.id)); expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }]); }); test('select with group by as sql', async (ctx) => { const { db } = ctx.singlestore; await db.insert(usersTable).values([{ id: 1, name: 'John' }, { id: 2, name: 'Jane' }, { id: 3, name: 'Jane' }]); const result = await db.select({ name: usersTable.name }).from(usersTable) .groupBy(sql`${usersTable.name}`).orderBy(asc(usersTable.id)); expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }]); }); test('select with group by as sql + column', async (ctx) => { const { db } = ctx.singlestore; await db.insert(usersTable).values([{ id: 1, name: 'John' }, { id: 2, name: 'Jane' }, { id: 3, name: 'Jane' }]); const result = await db.select({ name: usersTable.name }).from(usersTable) .groupBy(sql`${usersTable.name}`, usersTable.id).orderBy(asc(usersTable.id)); expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); }); test('select with group by as column + sql', async (ctx) => { const { db } = ctx.singlestore; await db.insert(usersTable).values([{ id: 1, name: 'John' }, { id: 2, name: 'Jane' }, { id: 3, name: 'Jane' }]); const result = await db.select({ name: usersTable.name }).from(usersTable) .groupBy(usersTable.id, sql`${usersTable.name}`).orderBy(asc(usersTable.id)); expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); }); test('select with group by complex query', async (ctx) => { const { db } = ctx.singlestore; await db.insert(usersTable).values([{ id: 1, name: 'John' }, { id: 2, name: 'Jane' }, { id: 3, name: 'Jane' }]); const result = await db.select({ name: usersTable.name }).from(usersTable) .groupBy(usersTable.id, sql`${usersTable.name}`) .orderBy(asc(usersTable.name)) .limit(1); expect(result).toEqual([{ name: 'Jane' }]); }); test('build query', async (ctx) => { const { db } = ctx.singlestore; const query = db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable) .groupBy(usersTable.id, usersTable.name) .toSQL(); expect(query).toEqual({ sql: `select \`id\`, \`name\` from \`userstest\` group by \`userstest\`.\`id\`, \`userstest\`.\`name\``, params: [], }); }); test('build query insert with onDuplicate', async (ctx) => { const { db } = ctx.singlestore; const query = db.insert(usersTable) .values({ name: 'John', jsonb: ['foo', 'bar'] }) .onDuplicateKeyUpdate({ set: { name: 'John1' } }) .toSQL(); expect(query).toEqual({ sql: 'insert into `userstest` (`id`, `name`, `verified`, `jsonb`, `created_at`) values (default, ?, default, ?, default) on duplicate key update `name` = ?', params: ['John', '["foo","bar"]', 'John1'], }); }); test('insert with onDuplicate', async (ctx) => { const { db } = ctx.singlestore; await db.insert(usersTable) .values({ id: 1, name: 'John' }); await db.insert(usersTable) .values({ id: 1, name: 'John' }) .onDuplicateKeyUpdate({ set: { name: 'John1' } }); const res = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).where( eq(usersTable.id, 1), ); expect(res).toEqual([{ id: 1, name: 'John1' }]); }); test('insert conflict', async (ctx) => { const { db } = ctx.singlestore; await db.insert(usersTable) .values({ id: 1, name: 'John' }); await expect((async () => { db.insert(usersTable).values({ id: 1, name: 'John1' }); })()).resolves.not.toThrowError(); }); test('insert conflict with ignore', async (ctx) => { const { db } = ctx.singlestore; await db.insert(usersTable) .values({ id: 1, name: 'John' }); await db.insert(usersTable) .ignore() .values({ id: 1, name: 'John1' }); const res = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).where( eq(usersTable.id, 1), ); expect(res).toEqual([{ id: 1, name: 'John' }]); }); test('insert sql', async (ctx) => { const { db } = ctx.singlestore; await db.insert(usersTable).values({ id: 1, name: sql`${'John'}` }); const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable); expect(result).toEqual([{ id: 1, name: 'John' }]); }); test('partial join with alias', async (ctx) => { const { db } = ctx.singlestore; const customerAlias = alias(usersTable, 'customer'); await db.insert(usersTable).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); const result = await db .select({ user: { id: usersTable.id, name: usersTable.name, }, customer: { id: customerAlias.id, name: customerAlias.name, }, }).from(usersTable) .leftJoin(customerAlias, eq(customerAlias.id, 11)) .where(eq(usersTable.id, 10)) .orderBy(asc(usersTable.id)); expect(result).toEqual([{ user: { id: 10, name: 'Ivan' }, customer: { id: 11, name: 'Hans' }, }]); }); test('full join with alias', async (ctx) => { const { db } = ctx.singlestore; const singlestoreTable = singlestoreTableCreator((name) => `prefixed_${name}`); const users = singlestoreTable('users', { id: serial('id').primaryKey(), name: text('name').notNull(), }); await db.execute(sql`drop table if exists ${users}`); await db.execute(sql`create table ${users} (id serial primary key, name text not null)`); const customers = alias(users, 'customer'); await db.insert(users).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); const result = await db .select().from(users) .leftJoin(customers, eq(customers.id, 11)) .where(eq(users.id, 10)) .orderBy(asc(users.id)); expect(result).toEqual([{ users: { id: 10, name: 'Ivan', }, customer: { id: 11, name: 'Hans', }, }]); await db.execute(sql`drop table ${users}`); }); test('select from alias', async (ctx) => { const { db } = ctx.singlestore; const singlestoreTable = singlestoreTableCreator((name) => `prefixed_${name}`); const users = singlestoreTable('users', { id: serial('id').primaryKey(), name: text('name').notNull(), }); await db.execute(sql`drop table if exists ${users}`); await db.execute(sql`create table ${users} (id serial primary key, name text not null)`); const user = alias(users, 'user'); const customers = alias(users, 'customer'); await db.insert(users).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); const result = await db .select() .from(user) .leftJoin(customers, eq(customers.id, 11)) .where(eq(user.id, 10)) .orderBy(asc(user.id)); expect(result).toEqual([{ user: { id: 10, name: 'Ivan', }, customer: { id: 11, name: 'Hans', }, }]); await db.execute(sql`drop table ${users}`); }); test('insert with spaces', async (ctx) => { const { db } = ctx.singlestore; await db.insert(usersTable).values({ id: 1, name: sql`'Jo h n'` }); const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable); expect(result).toEqual([{ id: 1, name: 'Jo h n' }]); }); test('prepared statement', async (ctx) => { const { db } = ctx.singlestore; await db.insert(usersTable).values({ id: 1, name: 'John' }); const statement = db.select({ id: usersTable.id, name: usersTable.name, }).from(usersTable) .prepare(); const result = await statement.execute(); expect(result).toEqual([{ id: 1, name: 'John' }]); }); test('prepared statement reuse', async (ctx) => { const { db } = ctx.singlestore; const stmt = db.insert(usersTable).values({ id: placeholder('id'), verified: true, name: placeholder('name'), }).prepare(); for (let i = 0; i < 10; i++) { await stmt.execute({ id: i + 1, name: `John ${i}` }); } const result = await db.select({ id: usersTable.id, name: usersTable.name, verified: usersTable.verified, }).from(usersTable).orderBy(asc(usersTable.id)); expect(result).toEqual([ { id: 1, name: 'John 0', verified: true }, { id: 2, name: 'John 1', verified: true }, { id: 3, name: 'John 2', verified: true }, { id: 4, name: 'John 3', verified: true }, { id: 5, name: 'John 4', verified: true }, { id: 6, name: 'John 5', verified: true }, { id: 7, name: 'John 6', verified: true }, { id: 8, name: 'John 7', verified: true }, { id: 9, name: 'John 8', verified: true }, { id: 10, name: 'John 9', verified: true }, ]); }); test('prepared statement with placeholder in .where', async (ctx) => { const { db } = ctx.singlestore; await db.insert(usersTable).values({ id: 1, name: 'John' }); const stmt = db.select({ id: usersTable.id, name: usersTable.name, }).from(usersTable) .where(eq(usersTable.id, placeholder('id'))) .prepare(); const result = await stmt.execute({ id: 1 }); expect(result).toEqual([{ id: 1, name: 'John' }]); }); test('migrator', async (ctx) => { const { db } = ctx.singlestore; await db.execute(sql`drop table if exists cities_migration`); await db.execute(sql`drop table if exists users_migration`); await db.execute(sql`drop table if exists users12`); await db.execute(sql`drop table if exists __drizzle_migrations`); await migrate(db, { migrationsFolder: './drizzle2/singlestore' }); await db.insert(usersMigratorTable).values({ id: 1, name: 'John', email: 'email' }); const result = await db.select().from(usersMigratorTable); expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); await db.execute(sql`drop table cities_migration`); await db.execute(sql`drop table users_migration`); await db.execute(sql`drop table users12`); await db.execute(sql`drop table __drizzle_migrations`); }); test('insert via db.execute + select via db.execute', async (ctx) => { const { db } = ctx.singlestore; await db.execute( sql`insert into ${usersTable} (${new Name(usersTable.id.name)}, ${new Name( usersTable.name.name, )}) values (1,${'John'})`, ); const result = await db.execute<{ id: number; name: string }>(sql`select id, name from ${usersTable}`); expect(result[0]).toEqual([{ id: 1, name: 'John' }]); }); test('insert via db.execute w/ query builder', async (ctx) => { const { db } = ctx.singlestore; const inserted = await db.execute( db.insert(usersTable).values({ name: 'John' }), ); expect(inserted[0].affectedRows).toBe(1); }); test('insert + select all possible dates', async (ctx) => { const { db } = ctx.singlestore; const date = new Date('2022-11-11'); await db.insert(datesTable).values({ date: date, dateAsString: '2022-11-11', time: '12:12:12', datetime: date, year: 22, datetimeAsString: '2022-11-11 12:12:12', }); const res = await db.select().from(datesTable); expect(res[0]?.date).toBeInstanceOf(Date); expect(res[0]?.datetime).toBeInstanceOf(Date); expect(res[0]?.dateAsString).toBeTypeOf('string'); expect(res[0]?.datetimeAsString).toBeTypeOf('string'); expect(res).toEqual([{ date: toLocalDate(new Date('2022-11-11')), dateAsString: '2022-11-11', time: '12:12:12', datetime: new Date('2022-11-11'), year: 2022, datetimeAsString: '2022-11-11 12:12:12', }]); }); const tableWithEnums = singlestoreTable('enums_test_case', { id: serial('id').primaryKey(), enum1: singlestoreEnum('enum1', ['a', 'b', 'c']).notNull(), enum2: singlestoreEnum('enum2', ['a', 'b', 'c']).default('a'), enum3: singlestoreEnum('enum3', ['a', 'b', 'c']).notNull().default('b'), }); test('SingleStore enum test case #1', async (ctx) => { const { db } = ctx.singlestore; await db.execute(sql`drop table if exists \`enums_test_case\``); await db.execute(sql` create table \`enums_test_case\` ( \`id\` serial primary key, \`enum1\` ENUM('a', 'b', 'c') not null, \`enum2\` ENUM('a', 'b', 'c') default 'a', \`enum3\` ENUM('a', 'b', 'c') not null default 'b' ) `); await db.insert(tableWithEnums).values([ { id: 1, enum1: 'a', enum2: 'b', enum3: 'c' }, { id: 2, enum1: 'a', enum3: 'c' }, { id: 3, enum1: 'a' }, ]); const res = await db.select().from(tableWithEnums).orderBy(asc(tableWithEnums.id)); await db.execute(sql`drop table \`enums_test_case\``); expect(res).toEqual([ { id: 1, enum1: 'a', enum2: 'b', enum3: 'c' }, { id: 2, enum1: 'a', enum2: 'a', enum3: 'c' }, { id: 3, enum1: 'a', enum2: 'a', enum3: 'b' }, ]); }); test('custom binary', async (ctx) => { const { db } = ctx.singlestore; const id = uuid().replace(/-/g, ''); await db.insert(testTable).values({ id, sqlId: sql`UNHEX(${id})`, rawId: id, }); const res = await db.select().from(testTable); expect(res).toEqual([{ id, sqlId: Buffer.from(id, 'hex').toString(), rawId: id, }]); }); ================================================ FILE: integration-tests/tests/singlestore/singlestore-prefixed.test.ts ================================================ import retry from 'async-retry'; import type Docker from 'dockerode'; import type { Equal } from 'drizzle-orm'; import { asc, eq, getTableName, gt, inArray, Name, sql, TransactionRollbackError } from 'drizzle-orm'; import type { SingleStoreDriverDatabase } from 'drizzle-orm/singlestore'; import { drizzle } from 'drizzle-orm/singlestore'; import { alias, boolean, date, datetime, int, json, serial, singlestoreEnum, singlestoreTable as singlestoreTableRaw, singlestoreTableCreator, /* singlestoreView, */ text, time, timestamp, uniqueIndex, year, } from 'drizzle-orm/singlestore-core'; import { migrate } from 'drizzle-orm/singlestore/migrator'; import * as mysql2 from 'mysql2/promise'; import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; import { Expect, toLocalDate } from '~/utils'; import { createDockerDB } from './singlestore-common'; const ENABLE_LOGGING = false; let db: SingleStoreDriverDatabase; let client: mysql2.Connection; let container: Docker.Container | undefined; beforeAll(async () => { let connectionString; if (process.env['SINGLESTORE_CONNECTION_STRING']) { connectionString = process.env['SINGLESTORE_CONNECTION_STRING']; } else { const { connectionString: conStr, container: contrainerObj } = await createDockerDB(); connectionString = conStr; container = contrainerObj; } client = await retry(async () => { client = await mysql2.createConnection({ uri: connectionString, supportBigNumbers: true }); await client.connect(); return client; }, { retries: 20, factor: 1, minTimeout: 250, maxTimeout: 250, randomize: false, onRetry() { client?.end(); }, }); await client.query(`CREATE DATABASE IF NOT EXISTS drizzle;`); await client.changeUser({ database: 'drizzle' }); db = drizzle(client, { logger: ENABLE_LOGGING }); }); afterAll(async () => { await client?.end(); await container?.stop().catch(console.error); }); const tablePrefix = 'drizzle_tests_'; const singlestoreTable = singlestoreTableCreator((name) => `${tablePrefix}${name}`); const usersTable = singlestoreTable('userstest', { id: serial('id').primaryKey(), name: text('name').notNull(), verified: boolean('verified').notNull().default(false), jsonb: json('jsonb').$type(), createdAt: timestamp('created_at').notNull().defaultNow(), }); const users2Table = singlestoreTable('users2', { id: serial('id').primaryKey(), name: text('name').notNull(), cityId: int('city_id'), }); const citiesTable = singlestoreTable('cities', { id: serial('id').primaryKey(), name: text('name').notNull(), }); beforeEach(async () => { await db.execute(sql`drop table if exists ${usersTable}`); await db.execute(sql`drop table if exists ${users2Table}`); await db.execute(sql`drop table if exists ${citiesTable}`); await db.execute( sql` create table ${usersTable} ( \`id\` serial primary key, \`name\` text not null, \`verified\` boolean not null default false, \`jsonb\` json, \`created_at\` timestamp not null default now() ) `, ); await db.execute( sql` create table ${users2Table} ( \`id\` serial primary key, \`name\` text not null, \`city_id\` int ) `, ); await db.execute( sql` create table ${citiesTable} ( \`id\` serial primary key, \`name\` text not null ) `, ); }); test('select all fields', async () => { await db.insert(usersTable).values({ id: 1, name: 'John' }); const result = await db.select().from(usersTable); expect(result[0]!.createdAt).toBeInstanceOf(Date); // not timezone based timestamp, thats why it should not work here // t.assert(Math.abs(result[0]!.createdAt.getTime() - now) < 2000); expect(result).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); }); test('select sql', async () => { await db.insert(usersTable).values({ name: 'John' }); const users = await db.select({ name: sql`upper(${usersTable.name})`, }).from(usersTable); expect(users).toEqual([{ name: 'JOHN' }]); }); test('select typed sql', async () => { await db.insert(usersTable).values({ name: 'John' }); const users = await db.select({ name: sql`upper(${usersTable.name})`, }).from(usersTable); expect(users).toEqual([{ name: 'JOHN' }]); }); test('select distinct', async () => { const usersDistinctTable = singlestoreTable('users_distinct', { id: int('id').notNull(), name: text('name').notNull(), }); await db.execute(sql`drop table if exists ${usersDistinctTable}`); await db.execute(sql`create table ${usersDistinctTable} (id int, name text)`); await db.insert(usersDistinctTable).values([ { id: 1, name: 'John' }, { id: 1, name: 'John' }, { id: 2, name: 'John' }, { id: 1, name: 'Jane' }, ]); const users = await db.selectDistinct().from(usersDistinctTable).orderBy( usersDistinctTable.id, usersDistinctTable.name, ); await db.execute(sql`drop table ${usersDistinctTable}`); expect(users).toEqual([{ id: 1, name: 'Jane' }, { id: 1, name: 'John' }, { id: 2, name: 'John' }]); }); test('insert returning sql', async () => { const [result, _] = await db.insert(usersTable).values({ id: 1, name: 'John' }); expect(result.insertId).toBe(1); }); test('delete returning sql', async () => { await db.insert(usersTable).values({ name: 'John' }); const users = await db.delete(usersTable).where(eq(usersTable.name, 'John')); expect(users[0].affectedRows).toBe(1); }); test('update returning sql', async () => { await db.insert(usersTable).values({ name: 'John' }); const users = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')); expect(users[0].changedRows).toBe(1); }); test('update with returning all fields', async () => { await db.insert(usersTable).values({ id: 1, name: 'John' }); const updatedUsers = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')); const users = await db.select().from(usersTable).where(eq(usersTable.id, 1)); expect(updatedUsers[0].changedRows).toBe(1); expect(users[0]!.createdAt).toBeInstanceOf(Date); // not timezone based timestamp, thats why it should not work here // t.assert(Math.abs(users[0]!.createdAt.getTime() - now) < 2000); expect(users).toEqual([{ id: 1, name: 'Jane', verified: false, jsonb: null, createdAt: users[0]!.createdAt }]); }); test('update with returning partial', async () => { await db.insert(usersTable).values({ id: 1, name: 'John' }); const updatedUsers = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')); const users = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).where( eq(usersTable.id, 1), ); expect(updatedUsers[0].changedRows).toBe(1); expect(users).toEqual([{ id: 1, name: 'Jane' }]); }); test('delete with returning all fields', async () => { await db.insert(usersTable).values({ name: 'John' }); const deletedUser = await db.delete(usersTable).where(eq(usersTable.name, 'John')); expect(deletedUser[0].affectedRows).toBe(1); }); test('delete with returning partial', async () => { await db.insert(usersTable).values({ name: 'John' }); const deletedUser = await db.delete(usersTable).where(eq(usersTable.name, 'John')); expect(deletedUser[0].affectedRows).toBe(1); }); test('insert + select', async () => { await db.insert(usersTable).values({ id: 1, name: 'John' }); const result = await db.select().from(usersTable); expect(result).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); await db.insert(usersTable).values({ id: 2, name: 'Jane' }); const result2 = await db.select().from(usersTable).orderBy(asc(usersTable.id)); expect(result2).toEqual([ { id: 1, name: 'John', verified: false, jsonb: null, createdAt: result2[0]!.createdAt }, { id: 2, name: 'Jane', verified: false, jsonb: null, createdAt: result2[1]!.createdAt }, ]); }); test('json insert', async () => { await db.insert(usersTable).values({ id: 1, name: 'John', jsonb: ['foo', 'bar'] }); const result = await db.select({ id: usersTable.id, name: usersTable.name, jsonb: usersTable.jsonb, }).from(usersTable); expect(result).toEqual([{ id: 1, name: 'John', jsonb: ['foo', 'bar'] }]); }); test('insert with overridden default values', async () => { await db.insert(usersTable).values({ id: 1, name: 'John', verified: true }); const result = await db.select().from(usersTable); expect(result).toEqual([{ id: 1, name: 'John', verified: true, jsonb: null, createdAt: result[0]!.createdAt }]); }); test('insert many', async () => { await db.insert(usersTable).values([ { id: 1, name: 'John' }, { id: 2, name: 'Bruce', jsonb: ['foo', 'bar'] }, { id: 3, name: 'Jane' }, { id: 4, name: 'Austin', verified: true }, ]); const result = await db.select({ id: usersTable.id, name: usersTable.name, jsonb: usersTable.jsonb, verified: usersTable.verified, }).from(usersTable) .orderBy(asc(usersTable.id)); expect(result).toEqual([ { id: 1, name: 'John', jsonb: null, verified: false }, { id: 2, name: 'Bruce', jsonb: ['foo', 'bar'], verified: false }, { id: 3, name: 'Jane', jsonb: null, verified: false }, { id: 4, name: 'Austin', jsonb: null, verified: true }, ]); }); test('insert many with returning', async () => { const result = await db.insert(usersTable).values([ { name: 'John' }, { name: 'Bruce', jsonb: ['foo', 'bar'] }, { name: 'Jane' }, { name: 'Austin', verified: true }, ]); expect(result[0].affectedRows).toBe(4); }); test('select with group by as field', async () => { await db.insert(usersTable).values([{ id: 1, name: 'John' }, { id: 2, name: 'Jane' }, { id: 3, name: 'Jane' }]); const result = await db.select({ name: usersTable.name }).from(usersTable) .groupBy(usersTable.name) .orderBy(asc(usersTable.id)); expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }]); }); test('select with group by as sql', async () => { await db.insert(usersTable).values([{ id: 1, name: 'John' }, { id: 2, name: 'Jane' }, { id: 3, name: 'Jane' }]); const result = await db.select({ name: usersTable.name }).from(usersTable) .groupBy(sql`${usersTable.name}`) .orderBy(asc(usersTable.id)); expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }]); }); test('select with group by as sql + column', async () => { await db.insert(usersTable).values([{ id: 1, name: 'John' }, { id: 2, name: 'Jane' }, { id: 3, name: 'Jane' }]); const result = await db.select({ name: usersTable.name }).from(usersTable) .groupBy(sql`${usersTable.name}`, usersTable.id) .orderBy(asc(usersTable.id)); expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); }); test('select with group by as column + sql', async () => { await db.insert(usersTable).values([{ id: 1, name: 'John' }, { id: 2, name: 'Jane' }, { id: 3, name: 'Jane' }]); const result = await db.select({ name: usersTable.name }).from(usersTable) .groupBy(usersTable.id, sql`${usersTable.name}`) .orderBy(asc(usersTable.id)); expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); }); test('select with group by complex query', async () => { await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); const result = await db.select({ name: usersTable.name }).from(usersTable) .groupBy(usersTable.id, sql`${usersTable.name}`) .orderBy(asc(usersTable.name)) .limit(1); expect(result).toEqual([{ name: 'Jane' }]); }); test('build query', async () => { const query = db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable) .groupBy(usersTable.id, usersTable.name) .toSQL(); expect(query).toEqual({ sql: `select \`id\`, \`name\` from \`${getTableName(usersTable)}\` group by \`${ getTableName(usersTable) }\`.\`id\`, \`${getTableName(usersTable)}\`.\`name\``, params: [], }); }); test('build query insert with onDuplicate', async () => { const query = db.insert(usersTable) .values({ name: 'John', jsonb: ['foo', 'bar'] }) .onDuplicateKeyUpdate({ set: { name: 'John1' } }) .toSQL(); expect(query).toEqual({ sql: `insert into \`${ getTableName(usersTable) }\` (\`id\`, \`name\`, \`verified\`, \`jsonb\`, \`created_at\`) values (default, ?, default, ?, default) on duplicate key update \`name\` = ?`, params: ['John', '["foo","bar"]', 'John1'], }); }); test('insert with onDuplicate', async () => { await db.insert(usersTable) .values({ id: 1, name: 'John' }); await db.insert(usersTable) .values({ id: 1, name: 'John' }) .onDuplicateKeyUpdate({ set: { name: 'John1' } }); const res = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).where( eq(usersTable.id, 1), ); expect(res).toEqual([{ id: 1, name: 'John1' }]); }); test('insert conflict', async () => { await db.insert(usersTable) .values({ name: 'John' }); await expect((async () => { db.insert(usersTable).values({ id: 1, name: 'John1' }); })()).resolves.not.toThrowError(); }); test('insert conflict with ignore', async () => { await db.insert(usersTable) .values({ id: 1, name: 'John' }); await db.insert(usersTable) .ignore() .values({ id: 1, name: 'John1' }); const res = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).where( eq(usersTable.id, 1), ); expect(res).toEqual([{ id: 1, name: 'John' }]); }); test('insert sql', async () => { await db.insert(usersTable).values({ id: 1, name: sql`${'John'}` }); const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable); expect(result).toEqual([{ id: 1, name: 'John' }]); }); test('partial join with alias', async () => { const customerAlias = alias(usersTable, 'customer'); await db.insert(usersTable).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); const result = await db .select({ user: { id: usersTable.id, name: usersTable.name, }, customer: { id: customerAlias.id, name: customerAlias.name, }, }).from(usersTable) .leftJoin(customerAlias, eq(customerAlias.id, 11)) .where(eq(usersTable.id, 10)) .orderBy(asc(usersTable.id)); expect(result).toEqual([{ user: { id: 10, name: 'Ivan' }, customer: { id: 11, name: 'Hans' }, }]); }); test('full join with alias', async () => { const singlestoreTable = singlestoreTableCreator((name) => `prefixed_${name}`); const users = singlestoreTable('users', { id: serial('id').primaryKey(), name: text('name').notNull(), }); await db.execute(sql`drop table if exists ${users}`); await db.execute(sql`create table ${users} (id serial primary key, name text not null)`); const customers = alias(users, 'customer'); await db.insert(users).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); const result = await db .select().from(users) .leftJoin(customers, eq(customers.id, 11)) .where(eq(users.id, 10)) .orderBy(asc(users.id)); expect(result).toEqual([{ users: { id: 10, name: 'Ivan', }, customer: { id: 11, name: 'Hans', }, }]); await db.execute(sql`drop table ${users}`); }); test('select from alias', async () => { const singlestoreTable = singlestoreTableCreator((name) => `prefixed_${name}`); const users = singlestoreTable('users', { id: serial('id').primaryKey(), name: text('name').notNull(), }); await db.execute(sql`drop table if exists ${users}`); await db.execute(sql`create table ${users} (id serial primary key, name text not null)`); const user = alias(users, 'user'); const customers = alias(users, 'customer'); await db.insert(users).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); const result = await db .select() .from(user) .leftJoin(customers, eq(customers.id, 11)) .where(eq(user.id, 10)) .orderBy(asc(user.id)); expect(result).toEqual([{ user: { id: 10, name: 'Ivan', }, customer: { id: 11, name: 'Hans', }, }]); await db.execute(sql`drop table ${users}`); }); test('insert with spaces', async () => { await db.insert(usersTable).values({ id: 1, name: sql`'Jo h n'` }); const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable); expect(result).toEqual([{ id: 1, name: 'Jo h n' }]); }); test('prepared statement', async () => { await db.insert(usersTable).values({ id: 1, name: 'John' }); const statement = db.select({ id: usersTable.id, name: usersTable.name, }).from(usersTable) .prepare(); const result = await statement.execute(); expect(result).toEqual([{ id: 1, name: 'John' }]); }); test('prepared statement reuse', async () => { const stmt = db.insert(usersTable).values({ verified: true, id: sql.placeholder('id'), name: sql.placeholder('name'), }).prepare(); for (let i = 0; i < 10; i++) { await stmt.execute({ id: i + 1, name: `John ${i}` }); } const result = await db.select({ id: usersTable.id, name: usersTable.name, verified: usersTable.verified, }).from(usersTable) .orderBy(asc(usersTable.id)); expect(result).toEqual([ { id: 1, name: 'John 0', verified: true }, { id: 2, name: 'John 1', verified: true }, { id: 3, name: 'John 2', verified: true }, { id: 4, name: 'John 3', verified: true }, { id: 5, name: 'John 4', verified: true }, { id: 6, name: 'John 5', verified: true }, { id: 7, name: 'John 6', verified: true }, { id: 8, name: 'John 7', verified: true }, { id: 9, name: 'John 8', verified: true }, { id: 10, name: 'John 9', verified: true }, ]); }); test('prepared statement with placeholder in .where', async () => { await db.insert(usersTable).values({ id: 1, name: 'John' }); const stmt = db.select({ id: usersTable.id, name: usersTable.name, }).from(usersTable) .where(eq(usersTable.id, sql.placeholder('id'))) .prepare(); const result = await stmt.execute({ id: 1 }); expect(result).toEqual([{ id: 1, name: 'John' }]); }); test('migrator', async () => { const usersMigratorTable = singlestoreTableRaw('users12', { id: serial('id').primaryKey(), name: text('name').notNull(), email: text('email').notNull(), }, (table) => { return { name: uniqueIndex('').on(table.name).using('btree'), }; }); await db.execute(sql.raw(`drop table if exists cities_migration`)); await db.execute(sql.raw(`drop table if exists users_migration`)); await db.execute(sql.raw(`drop table if exists users12`)); await db.execute(sql.raw(`drop table if exists __drizzle_migrations`)); await migrate(db, { migrationsFolder: './drizzle2/singlestore' }); await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); const result = await db.select().from(usersMigratorTable); expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); await db.execute(sql.raw(`drop table cities_migration`)); await db.execute(sql.raw(`drop table users_migration`)); await db.execute(sql.raw(`drop table users12`)); await db.execute(sql.raw(`drop table __drizzle_migrations`)); }); test('insert via db.execute + select via db.execute', async () => { await db.execute( sql`insert into ${usersTable} (${new Name(usersTable.id.name)}, ${new Name( usersTable.name.name, )}) values (1, ${'John'})`, ); const result = await db.execute<{ id: number; name: string }>(sql`select id, name from ${usersTable}`); expect(result[0]).toEqual([{ id: 1, name: 'John' }]); }); test('insert via db.execute w/ query builder', async () => { const inserted = await db.execute( db.insert(usersTable).values({ name: 'John' }), ); expect(inserted[0].affectedRows).toBe(1); }); test('insert + select all possible dates', async () => { const datesTable = singlestoreTable('datestable', { date: date('date'), dateAsString: date('date_as_string', { mode: 'string' }), time: time('time'), datetime: datetime('datetime'), datetimeAsString: datetime('datetime_as_string', { mode: 'string' }), year: year('year'), }); await db.execute(sql`drop table if exists ${datesTable}`); await db.execute( sql` create table ${datesTable} ( \`date\` date, \`date_as_string\` date, \`time\` time, \`datetime\` datetime, \`datetime_as_string\` datetime, \`year\` year ) `, ); const d = new Date('2022-11-11'); await db.insert(datesTable).values({ date: d, dateAsString: '2022-11-11', time: '12:12:12', datetime: d, year: 22, datetimeAsString: '2022-11-11 12:12:12', }); const res = await db.select().from(datesTable); expect(res[0]?.date).toBeInstanceOf(Date); expect(res[0]?.datetime).toBeInstanceOf(Date); expect(typeof res[0]?.dateAsString).toBe('string'); expect(typeof res[0]?.datetimeAsString).toBe('string'); expect(res).toEqual([{ date: toLocalDate(new Date('2022-11-11')), dateAsString: '2022-11-11', time: '12:12:12', datetime: new Date('2022-11-11'), year: 2022, datetimeAsString: '2022-11-11 12:12:12', }]); await db.execute(sql`drop table ${datesTable}`); }); test('SingleStore enum test case #1', async () => { const tableWithEnums = singlestoreTable('enums_test_case', { id: serial('id').primaryKey(), enum1: singlestoreEnum('enum1', ['a', 'b', 'c']).notNull(), enum2: singlestoreEnum('enum2', ['a', 'b', 'c']).default('a'), enum3: singlestoreEnum('enum3', ['a', 'b', 'c']).notNull().default('b'), }); await db.execute(sql`drop table if exists ${tableWithEnums}`); await db.execute(sql` create table ${tableWithEnums} ( \`id\` serial primary key, \`enum1\` ENUM('a', 'b', 'c') not null, \`enum2\` ENUM('a', 'b', 'c') default 'a', \`enum3\` ENUM('a', 'b', 'c') not null default 'b' ) `); await db.insert(tableWithEnums).values([ { id: 1, enum1: 'a', enum2: 'b', enum3: 'c' }, { id: 2, enum1: 'a', enum3: 'c' }, { id: 3, enum1: 'a' }, ]); const res = await db.select().from(tableWithEnums).orderBy(asc(tableWithEnums.id)); await db.execute(sql`drop table ${tableWithEnums}`); expect(res).toEqual([ { id: 1, enum1: 'a', enum2: 'b', enum3: 'c' }, { id: 2, enum1: 'a', enum2: 'a', enum3: 'c' }, { id: 3, enum1: 'a', enum2: 'a', enum3: 'b' }, ]); }); test('left join (flat object fields)', async () => { await db.insert(citiesTable) .values([{ id: 1, name: 'Paris' }, { id: 2, name: 'London' }]); await db.insert(users2Table).values([{ id: 1, name: 'John', cityId: 1 }, { id: 2, name: 'Jane' }]); const res = await db.select({ userId: users2Table.id, userName: users2Table.name, cityId: citiesTable.id, cityName: citiesTable.name, }).from(users2Table) .leftJoin(citiesTable, eq(users2Table.cityId, citiesTable.id)) .orderBy(asc(users2Table.id)); expect(res).toEqual([ { userId: 1, userName: 'John', cityId: 1, cityName: 'Paris' }, { userId: 2, userName: 'Jane', cityId: null, cityName: null }, ]); }); test('left join (grouped fields)', async () => { await db.insert(citiesTable) .values([{ id: 1, name: 'Paris' }, { id: 2, name: 'London' }]); await db.insert(users2Table).values([{ id: 1, name: 'John', cityId: 1 }, { id: 2, name: 'Jane' }]); const res = await db.select({ id: users2Table.id, user: { name: users2Table.name, nameUpper: sql`upper(${users2Table.name})`, }, city: { id: citiesTable.id, name: citiesTable.name, nameUpper: sql`upper(${citiesTable.name})`, }, }).from(users2Table) .leftJoin(citiesTable, eq(users2Table.cityId, citiesTable.id)) .orderBy(asc(users2Table.id)); expect(res).toEqual([ { id: 1, user: { name: 'John', nameUpper: 'JOHN' }, city: { id: 1, name: 'Paris', nameUpper: 'PARIS' }, }, { id: 2, user: { name: 'Jane', nameUpper: 'JANE' }, city: null, }, ]); }); test('left join (all fields)', async () => { await db.insert(citiesTable) .values([{ id: 1, name: 'Paris' }, { id: 2, name: 'London' }]); await db.insert(users2Table).values([{ id: 1, name: 'John', cityId: 1 }, { id: 2, name: 'Jane' }]); const res = await db.select().from(users2Table) .leftJoin(citiesTable, eq(users2Table.cityId, citiesTable.id)) .orderBy(asc(users2Table.id)); expect(res).toEqual([ { users2: { id: 1, name: 'John', cityId: 1, }, cities: { id: 1, name: 'Paris', }, }, { users2: { id: 2, name: 'Jane', cityId: null, }, cities: null, }, ]); }); test('join subquery', async () => { const coursesTable = singlestoreTable('courses', { id: serial('id').primaryKey(), name: text('name').notNull(), categoryId: int('category_id'), }); const courseCategoriesTable = singlestoreTable('course_categories', { id: serial('id').primaryKey(), name: text('name').notNull(), }); await db.execute(sql`drop table if exists ${coursesTable}`); await db.execute(sql`drop table if exists ${courseCategoriesTable}`); await db.execute( sql` create table ${courseCategoriesTable} ( \`id\` serial primary key, \`name\` text not null ) `, ); await db.execute( sql` create table ${coursesTable} ( \`id\` serial primary key, \`name\` text not null, \`category_id\` int ) `, ); await db.insert(courseCategoriesTable).values([ { id: 1, name: 'Category 1' }, { id: 2, name: 'Category 2' }, { id: 3, name: 'Category 3' }, { id: 4, name: 'Category 4' }, ]); await db.insert(coursesTable).values([ { id: 1, name: 'Development', categoryId: 2 }, { id: 2, name: 'IT & Software', categoryId: 3 }, { id: 3, name: 'Marketing', categoryId: 4 }, { id: 4, name: 'Design', categoryId: 1 }, ]); const sq2 = db .select({ categoryId: courseCategoriesTable.id, category: courseCategoriesTable.name, total: sql`count(${courseCategoriesTable.id})`, }) .from(courseCategoriesTable) .groupBy(courseCategoriesTable.id, courseCategoriesTable.name) .orderBy(courseCategoriesTable.id) .as('sq2'); const res = await db .select({ courseName: coursesTable.name, categoryId: sq2.categoryId, }) .from(coursesTable) .leftJoin(sq2, eq(coursesTable.categoryId, sq2.categoryId)) .orderBy(coursesTable.name); await db.execute(sql`drop table ${coursesTable}`); await db.execute(sql`drop table ${courseCategoriesTable}`); expect(res).toEqual([ { courseName: 'Design', categoryId: 1 }, { courseName: 'Development', categoryId: 2 }, { courseName: 'IT & Software', categoryId: 3 }, { courseName: 'Marketing', categoryId: 4 }, ]); }); test('with ... select', async () => { const orders = singlestoreTable('orders', { id: serial('id').primaryKey(), region: text('region').notNull(), product: text('product').notNull(), amount: int('amount').notNull(), quantity: int('quantity').notNull(), }); await db.execute(sql`drop table if exists ${orders}`); await db.execute( sql` create table ${orders} ( \`id\` serial primary key, \`region\` text not null, \`product\` text not null, \`amount\` int not null, \`quantity\` int not null ) `, ); await db.insert(orders).values([ { region: 'Europe', product: 'A', amount: 10, quantity: 1 }, { region: 'Europe', product: 'A', amount: 20, quantity: 2 }, { region: 'Europe', product: 'B', amount: 20, quantity: 2 }, { region: 'Europe', product: 'B', amount: 30, quantity: 3 }, { region: 'US', product: 'A', amount: 30, quantity: 3 }, { region: 'US', product: 'A', amount: 40, quantity: 4 }, { region: 'US', product: 'B', amount: 40, quantity: 4 }, { region: 'US', product: 'B', amount: 50, quantity: 5 }, ]); const regionalSales = db .$with('regional_sales') .as( db .select({ region: orders.region, totalSales: sql`sum(${orders.amount})`.as('total_sales'), }) .from(orders) .groupBy(orders.region), ); const topRegions = db .$with('top_regions') .as( db .select({ region: regionalSales.region, }) .from(regionalSales) .where( gt( regionalSales.totalSales, db.select({ sales: sql`sum(${regionalSales.totalSales})/10` }).from(regionalSales), ), ), ); const result = await db .with(regionalSales, topRegions) .select({ region: orders.region, product: orders.product, productUnits: sql`cast(sum(${orders.quantity}) as unsigned)`, productSales: sql`cast(sum(${orders.amount}) as unsigned)`, }) .from(orders) .where(inArray(orders.region, db.select({ region: topRegions.region }).from(topRegions))) .groupBy(orders.region, orders.product) .orderBy(orders.region, orders.product); await db.execute(sql`drop table ${orders}`); expect(result).toEqual([ { region: 'Europe', product: 'A', productUnits: 3, productSales: 30, }, { region: 'Europe', product: 'B', productUnits: 5, productSales: 50, }, { region: 'US', product: 'A', productUnits: 7, productSales: 70, }, { region: 'US', product: 'B', productUnits: 9, productSales: 90, }, ]); }); test('select from subquery sql', async () => { await db.insert(users2Table).values([{ id: 1, name: 'John' }, { id: 2, name: 'Jane' }]); const sq = db .select({ name: sql`concat(${users2Table.name}, " modified")`.as('name') }) .from(users2Table) .orderBy(asc(users2Table.id)) .as('sq'); const res = await db.select({ name: sq.name }).from(sq); expect(res).toEqual([{ name: 'John modified' }, { name: 'Jane modified' }]); }); test('select a field without joining its table', () => { expect(() => db.select({ name: users2Table.name }).from(usersTable).prepare()).toThrowError(); }); test('select all fields from subquery without alias', () => { const sq = db.$with('sq').as(db.select({ name: sql`upper(${users2Table.name})` }).from(users2Table)); expect(() => db.select().from(sq).prepare()).toThrowError(); }); test('select count()', async () => { await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }]); const res = await db.select({ count: sql`count(*)` }).from(usersTable); expect(res).toEqual([{ count: 2 }]); }); test('select for ...', () => { { const query = db.select().from(users2Table).for('update').toSQL(); expect(query.sql).toMatch(/ for update$/); } { const query = db.select().from(users2Table).for('share', { skipLocked: true }).toSQL(); expect(query.sql).toMatch(/ for share skip locked$/); } { const query = db.select().from(users2Table).for('update', { noWait: true }).toSQL(); expect(query.sql).toMatch(/ for update nowait$/); } }); test('having', async () => { await db.insert(citiesTable).values([{ id: 1, name: 'London' }, { id: 2, name: 'Paris' }, { id: 3, name: 'New York', }]); await db.insert(users2Table).values([{ id: 1, name: 'John', cityId: 1 }, { id: 2, name: 'Jane', cityId: 1 }, { id: 3, name: 'Jack', cityId: 2, }]); const result = await db .select({ id: citiesTable.id, name: sql`upper(${citiesTable.name})`.as('upper_name'), usersCount: sql`count(${users2Table.id})`.as('users_count'), }) .from(citiesTable) .leftJoin(users2Table, eq(users2Table.cityId, citiesTable.id)) .where(({ name }) => sql`length(${name}) >= 3`) .groupBy(citiesTable.id) .having(({ usersCount }) => sql`${usersCount} > 0`) .orderBy(({ name }) => name); expect(result).toEqual([ { id: 1, name: 'LONDON', usersCount: 2, }, { id: 2, name: 'PARIS', usersCount: 1, }, ]); }); // TODO: Unskip when views are supported /* test.skip('view', async () => { const newYorkers1 = singlestoreView('new_yorkers') .as((qb) => qb.select().from(users2Table).where(eq(users2Table.cityId, 1))); const newYorkers2 = singlestoreView('new_yorkers', { id: serial('id').primaryKey(), name: text('name').notNull(), cityId: int('city_id').notNull(), }).as(sql`select * from ${users2Table} where ${eq(users2Table.cityId, 1)}`); const newYorkers3 = singlestoreView('new_yorkers', { id: serial('id').primaryKey(), name: text('name').notNull(), cityId: int('city_id').notNull(), }).existing(); await db.execute(sql`create view new_yorkers as ${getViewConfig(newYorkers1).query}`); await db.insert(citiesTable).values([{ name: 'New York' }, { name: 'Paris' }]); await db.insert(users2Table).values([ { id: 1, name: 'John', cityId: 1 }, { id: 2, name: 'Jane', cityId: 1 }, { id: 3, name: 'Jack', cityId: 2 }, ]); { const result = await db.select().from(newYorkers1).orderBy(asc(newYorkers1.id)); expect(result).toEqual([ { id: 1, name: 'John', cityId: 1 }, { id: 2, name: 'Jane', cityId: 1 }, ]); } { const result = await db.select().from(newYorkers2).orderBy(asc(newYorkers2.id)); expect(result).toEqual([ { id: 1, name: 'John', cityId: 1 }, { id: 2, name: 'Jane', cityId: 1 }, ]); } { const result = await db.select().from(newYorkers3).orderBy(asc(newYorkers3.id)); expect(result).toEqual([ { id: 1, name: 'John', cityId: 1 }, { id: 2, name: 'Jane', cityId: 1 }, ]); } { const result = await db.select({ name: newYorkers1.name }).from(newYorkers1).orderBy(asc(newYorkers1.id)); expect(result).toEqual([ { name: 'John' }, { name: 'Jane' }, ]); } await db.execute(sql`drop view ${newYorkers1}`); }); */ test('select from raw sql', async () => { const result = await db.select({ id: sql`id`, name: sql`name`, }).from(sql`(select 1 as id, 'John' as name) as users`); Expect>; expect(result).toEqual([ { id: 1, name: 'John' }, ]); }); test('select from raw sql with joins', async () => { const result = await db .select({ id: sql`users.id`, name: sql`users.name`, userCity: sql`users.city`, cityName: sql`cities.name`, }) .from(sql`(select 1 as id, 'John' as name, 'New York' as city) as users`) .leftJoin(sql`(select 1 as id, 'Paris' as name) as cities`, sql`cities.id = users.id`); Expect>; expect(result).toEqual([ { id: 1, name: 'John', userCity: 'New York', cityName: 'Paris' }, ]); }); test('join on aliased sql from select', async () => { const result = await db .select({ userId: sql`users.id`.as('userId'), name: sql`users.name`, userCity: sql`users.city`, cityId: sql`cities.id`.as('cityId'), cityName: sql`cities.name`, }) .from(sql`(select 1 as id, 'John' as name, 'New York' as city) as users`) .leftJoin(sql`(select 1 as id, 'Paris' as name) as cities`, (cols) => eq(cols.cityId, cols.userId)); Expect>; expect(result).toEqual([ { userId: 1, name: 'John', userCity: 'New York', cityId: 1, cityName: 'Paris' }, ]); }); test('join on aliased sql from with clause', async () => { const users = db.$with('users').as( db.select({ id: sql`id`.as('userId'), name: sql`name`.as('userName'), city: sql`city`.as('city'), }).from( sql`(select 1 as id, 'John' as name, 'New York' as city) as users`, ), ); const cities = db.$with('cities').as( db.select({ id: sql`id`.as('cityId'), name: sql`name`.as('cityName'), }).from( sql`(select 1 as id, 'Paris' as name) as cities`, ), ); const result = await db .with(users, cities) .select({ userId: users.id, name: users.name, userCity: users.city, cityId: cities.id, cityName: cities.name, }) .from(users) .leftJoin(cities, (cols) => eq(cols.cityId, cols.userId)); Expect>; expect(result).toEqual([ { userId: 1, name: 'John', userCity: 'New York', cityId: 1, cityName: 'Paris' }, ]); }); test('prefixed table', async () => { const singlestoreTable = singlestoreTableCreator((name) => `myprefix_${name}`); const users = singlestoreTable('test_prefixed_table_with_unique_name', { id: int('id').primaryKey(), name: text('name').notNull(), }); await db.execute(sql`drop table if exists ${users}`); await db.execute( sql`create table myprefix_test_prefixed_table_with_unique_name (id int not null primary key, name text not null)`, ); await db.insert(users).values({ id: 1, name: 'John' }); const result = await db.select().from(users); expect(result).toEqual([{ id: 1, name: 'John' }]); await db.execute(sql`drop table ${users}`); }); test('orderBy with aliased column', () => { const query = db.select({ test: sql`something`.as('test'), }).from(users2Table).orderBy((fields) => fields.test).toSQL(); expect(query.sql).toBe(`select something as \`test\` from \`${getTableName(users2Table)}\` order by \`test\``); }); test('timestamp timezone', async () => { const date = new Date(Date.parse('2020-01-01T12:34:56+07:00')); await db.insert(usersTable).values({ id: 1, name: 'With default times' }); await db.insert(usersTable).values({ id: 2, name: 'Without default times', createdAt: date, }); const users = await db.select().from(usersTable).orderBy(asc(usersTable.id)); // check that the timestamps are set correctly for default times expect(Math.abs(users[0]!.createdAt.getTime() - Date.now())).toBeLessThan(2000); // check that the timestamps are set correctly for non default times expect(Math.abs(users[1]!.createdAt.getTime() - date.getTime())).toBeLessThan(2000); }); test('transaction', async () => { const users = singlestoreTable('users_transactions', { id: serial('id').primaryKey(), balance: int('balance').notNull(), }); const products = singlestoreTable('products_transactions', { id: serial('id').primaryKey(), price: int('price').notNull(), stock: int('stock').notNull(), }); await db.execute(sql`drop table if exists ${users}`); await db.execute(sql`drop table if exists ${products}`); await db.execute(sql`create table ${users} (id serial not null primary key, balance int not null)`); await db.execute( sql`create table ${products} (id serial not null primary key, price int not null, stock int not null)`, ); const [{ insertId: userId }] = await db.insert(users).values({ id: 1, balance: 100 }); const user = await db.select().from(users).where(eq(users.id, userId)).then((rows) => rows[0]!); const [{ insertId: productId }] = await db.insert(products).values({ id: 1, price: 10, stock: 10 }); const product = await db.select().from(products).where(eq(products.id, productId)).then((rows) => rows[0]!); await db.transaction(async (tx) => { await tx.update(users).set({ balance: user.balance - product.price }).where(eq(users.id, user.id)); await tx.update(products).set({ stock: product.stock - 1 }).where(eq(products.id, product.id)); }); const result = await db.select().from(users); await db.execute(sql`drop table ${users}`); await db.execute(sql`drop table ${products}`); expect(result).toEqual([{ id: 1, balance: 90 }]); }); test('transaction rollback', async () => { const users = singlestoreTable('users_transactions_rollback', { id: serial('id').primaryKey(), balance: int('balance').notNull(), }); await db.execute(sql`drop table if exists ${users}`); await db.execute( sql`create table ${users} (id serial not null primary key, balance int not null)`, ); await expect((async () => { await db.transaction(async (tx) => { await tx.insert(users).values({ balance: 100 }); tx.rollback(); }); })()).rejects.toThrowError(TransactionRollbackError); const result = await db.select().from(users); await db.execute(sql`drop table ${users}`); expect(result).toEqual([]); }); test('join subquery with join', async () => { const internalStaff = singlestoreTable('internal_staff', { userId: int('user_id').notNull(), }); const customUser = singlestoreTable('custom_user', { id: int('id').notNull(), }); const ticket = singlestoreTable('ticket', { staffId: int('staff_id').notNull(), }); await db.execute(sql`drop table if exists ${internalStaff}`); await db.execute(sql`drop table if exists ${customUser}`); await db.execute(sql`drop table if exists ${ticket}`); await db.execute(sql`create table ${internalStaff} (user_id integer not null)`); await db.execute(sql`create table ${customUser} (id integer not null)`); await db.execute(sql`create table ${ticket} (staff_id integer not null)`); await db.insert(internalStaff).values({ userId: 1 }); await db.insert(customUser).values({ id: 1 }); await db.insert(ticket).values({ staffId: 1 }); const subq = db .select() .from(internalStaff) .leftJoin(customUser, eq(internalStaff.userId, customUser.id)) .as('internal_staff'); const mainQuery = await db .select() .from(ticket) .leftJoin(subq, eq(subq.internal_staff.userId, ticket.staffId)); await db.execute(sql`drop table ${internalStaff}`); await db.execute(sql`drop table ${customUser}`); await db.execute(sql`drop table ${ticket}`); expect(mainQuery).toEqual([{ ticket: { staffId: 1 }, internal_staff: { internal_staff: { userId: 1 }, custom_user: { id: 1 }, }, }]); }); // TODO: Unskip when views are supported /* test.skip('subquery with view', async () => { const users = singlestoreTable('users_subquery_view', { id: serial('id').primaryKey(), name: text('name').notNull(), cityId: int('city_id').notNull(), }); const newYorkers = singlestoreView('new_yorkers').as((qb) => qb.select().from(users).where(eq(users.cityId, 1))); await db.execute(sql`drop table if exists ${users}`); await db.execute(sql`drop view if exists ${newYorkers}`); await db.execute( sql`create table ${users} (id serial not null primary key, name text not null, city_id integer not null)`, ); await db.execute(sql`create view ${newYorkers} as select * from ${users} where city_id = 1`); await db.insert(users).values([ { id: 1, name: 'John', cityId: 1 }, { id: 2, name: 'Jane', cityId: 2 }, { id: 3, name: 'Jack', cityId: 1 }, { id: 4, name: 'Jill', cityId: 2 }, ]); const sq = db.$with('sq').as(db.select().from(newYorkers)); const result = await db.with(sq).select().from(sq).orderBy(asc(sq.id)); await db.execute(sql`drop view ${newYorkers}`); await db.execute(sql`drop table ${users}`); expect(result).toEqual([ { id: 1, name: 'John', cityId: 1 }, { id: 3, name: 'Jack', cityId: 1 }, ]); }); */ // TODO: Unskip when views are supported /* test.skip('join view as subquery', async () => { const users = singlestoreTable('users_join_view', { id: serial('id').primaryKey(), name: text('name').notNull(), cityId: int('city_id').notNull(), }); const newYorkers = singlestoreView('new_yorkers').as((qb) => qb.select().from(users).where(eq(users.cityId, 1))); await db.execute(sql`drop table if exists ${users}`); await db.execute(sql`drop view if exists ${newYorkers}`); await db.execute( sql`create table ${users} (id serial not null primary key, name text not null, city_id integer not null)`, ); await db.execute(sql`create view ${newYorkers} as select * from ${users} where city_id = 1`); await db.insert(users).values([ { id: 1, name: 'John', cityId: 1 }, { id: 2, name: 'Jane', cityId: 2 }, { id: 3, name: 'Jack', cityId: 1 }, { id: 4, name: 'Jill', cityId: 2 }, ]); const sq = db.select().from(newYorkers).as('new_yorkers_sq'); const result = await db.select().from(users).leftJoin(sq, eq(users.id, sq.id)).orderBy(asc(users.id)); expect(result).toEqual([ { users_join_view: { id: 1, name: 'John', cityId: 1 }, new_yorkers_sq: { id: 1, name: 'John', cityId: 1 }, }, { users_join_view: { id: 2, name: 'Jane', cityId: 2 }, new_yorkers_sq: null, }, { users_join_view: { id: 3, name: 'Jack', cityId: 1 }, new_yorkers_sq: { id: 3, name: 'Jack', cityId: 1 }, }, { users_join_view: { id: 4, name: 'Jill', cityId: 2 }, new_yorkers_sq: null, }, ]); await db.execute(sql`drop view ${newYorkers}`); await db.execute(sql`drop table ${users}`); }); */ test('select iterator', async () => { const users = singlestoreTable('users_iterator', { id: serial('id').primaryKey(), }); await db.execute(sql`drop table if exists ${users}`); await db.execute(sql`create table ${users} (id serial not null primary key)`); await db.insert(users).values([{ id: 1 }, { id: 2 }, { id: 3 }]); const iter = db.select().from(users) .orderBy(asc(users.id)) .iterator(); const result: typeof users.$inferSelect[] = []; for await (const row of iter) { result.push(row); } expect(result).toEqual([{ id: 1 }, { id: 2 }, { id: 3 }]); }); test('select iterator w/ prepared statement', async () => { const users = singlestoreTable('users_iterator', { id: serial('id').primaryKey(), }); await db.execute(sql`drop table if exists ${users}`); await db.execute(sql`create table ${users} (id serial not null primary key)`); await db.insert(users).values([{ id: 1 }, { id: 2 }, { id: 3 }]); const prepared = db.select().from(users) .orderBy(asc(users.id)) .prepare(); const iter = prepared.iterator(); const result: typeof users.$inferSelect[] = []; for await (const row of iter) { result.push(row); } expect(result).toEqual([{ id: 1 }, { id: 2 }, { id: 3 }]); }); test('insert undefined', async () => { const users = singlestoreTable('users', { id: serial('id').primaryKey(), name: text('name'), }); await db.execute(sql`drop table if exists ${users}`); await db.execute( sql`create table ${users} (id serial not null primary key, name text)`, ); await expect((async () => { await db.insert(users).values({ name: undefined }); })()).resolves.not.toThrowError(); await db.execute(sql`drop table ${users}`); }); test('update undefined', async () => { const users = singlestoreTable('users', { id: serial('id').primaryKey(), name: text('name'), }); await db.execute(sql`drop table if exists ${users}`); await db.execute( sql`create table ${users} (id serial not null primary key, name text)`, ); await expect((async () => { await db.update(users).set({ name: undefined }); })()).rejects.toThrowError(); await expect((async () => { await db.update(users).set({ id: 1, name: undefined }); })()).resolves.not.toThrowError(); await db.execute(sql`drop table ${users}`); }); ================================================ FILE: integration-tests/tests/singlestore/singlestore-proxy.test.ts ================================================ import retry from 'async-retry'; import type { SingleStoreRemoteDatabase } from 'drizzle-orm/singlestore-proxy'; import { drizzle as proxyDrizzle } from 'drizzle-orm/singlestore-proxy'; import * as mysql2 from 'mysql2/promise'; import { afterAll, beforeAll, beforeEach } from 'vitest'; import { skipTests } from '~/common'; import { createDockerDB, tests } from './singlestore-common'; const ENABLE_LOGGING = false; // eslint-disable-next-line drizzle-internal/require-entity-kind class ServerSimulator { constructor(private db: mysql2.Connection) {} async query(sql: string, params: any[], method: 'all' | 'execute') { if (method === 'all') { try { const result = await this.db.query({ sql, values: params, rowsAsArray: true, typeCast: function(field: any, next: any) { if (field.type === 'TIMESTAMP' || field.type === 'DATETIME' || field.type === 'DATE') { return field.string(); } return next(); }, }); return { data: result[0] as any }; } catch (e: any) { return { error: e }; } } else if (method === 'execute') { try { const result = await this.db.query({ sql, values: params, typeCast: function(field: any, next: any) { if (field.type === 'TIMESTAMP' || field.type === 'DATETIME' || field.type === 'DATE') { return field.string(); } return next(); }, }); return { data: result as any }; } catch (e: any) { return { error: e }; } } else { return { error: 'Unknown method value' }; } } async migrations(queries: string[]) { await this.db.query('START TRANSACTION'); try { for (const query of queries) { await this.db.query(query); } await this.db.query('COMMIT'); } catch (e) { await this.db.query('ROLLBACK'); throw e; } return {}; } } let db: SingleStoreRemoteDatabase; let client: mysql2.Connection; let serverSimulator: ServerSimulator; beforeAll(async () => { let connectionString; if (process.env['SINGLESTORE_CONNECTION_STRING']) { connectionString = process.env['SINGLESTORE_CONNECTION_STRING']; } else { const { connectionString: conStr } = await createDockerDB(); connectionString = conStr; } client = await retry(async () => { client = await mysql2.createConnection({ uri: connectionString, supportBigNumbers: true }); await client.connect(); return client; }, { retries: 20, factor: 1, minTimeout: 250, maxTimeout: 250, randomize: false, onRetry() { client?.end(); }, }); await client.query(`CREATE DATABASE IF NOT EXISTS drizzle;`); await client.changeUser({ database: 'drizzle' }); serverSimulator = new ServerSimulator(client); db = proxyDrizzle(async (sql, params, method) => { try { const response = await serverSimulator.query(sql, params, method); if (response.error !== undefined) { throw response.error; } return { rows: response.data }; } catch (e: any) { console.error('Error from singlestore proxy server:', e.message); throw e; } }, { logger: ENABLE_LOGGING }); }); afterAll(async () => { await client?.end(); }); beforeEach((ctx) => { ctx.singlestore = { db, }; }); skipTests([ 'select iterator w/ prepared statement', 'select iterator', 'nested transaction rollback', 'nested transaction', 'transaction rollback', 'transaction', 'transaction with options (set isolationLevel)', 'migrator', ]); tests(); ================================================ FILE: integration-tests/tests/singlestore/singlestore.test.ts ================================================ import retry from 'async-retry'; import { drizzle } from 'drizzle-orm/singlestore'; import type { SingleStoreDriverDatabase } from 'drizzle-orm/singlestore'; import * as mysql2 from 'mysql2/promise'; import { afterAll, beforeAll, beforeEach } from 'vitest'; import { TestCache, TestGlobalCache, tests as cacheTests } from './singlestore-cache'; import { createDockerDB, tests } from './singlestore-common'; const ENABLE_LOGGING = false; let db: SingleStoreDriverDatabase; let dbGlobalCached: SingleStoreDriverDatabase; let cachedDb: SingleStoreDriverDatabase; let client: mysql2.Connection; beforeAll(async () => { let connectionString; if (process.env['SINGLESTORE_CONNECTION_STRING']) { connectionString = process.env['SINGLESTORE_CONNECTION_STRING']; } else { const { connectionString: conStr } = await createDockerDB(); connectionString = conStr; } client = await retry(async () => { client = await mysql2.createConnection({ uri: connectionString, supportBigNumbers: true }); await client.connect(); return client; }, { retries: 20, factor: 1, minTimeout: 250, maxTimeout: 250, randomize: false, onRetry() { client?.end(); }, }); await client.query(`CREATE DATABASE IF NOT EXISTS drizzle;`); await client.changeUser({ database: 'drizzle' }); db = drizzle(client, { logger: ENABLE_LOGGING }); cachedDb = drizzle(client, { logger: ENABLE_LOGGING, cache: new TestCache() }); dbGlobalCached = drizzle(client, { logger: ENABLE_LOGGING, cache: new TestGlobalCache() }); }); afterAll(async () => { await client?.end(); }); beforeEach((ctx) => { ctx.singlestore = { db, }; ctx.cachedSingleStore = { db: cachedDb, dbGlobalCached, }; }); cacheTests(); tests(); ================================================ FILE: integration-tests/tests/sqlite/better-sqlite.test.ts ================================================ import Database from 'better-sqlite3'; import { sql } from 'drizzle-orm'; import { type BetterSQLite3Database, drizzle } from 'drizzle-orm/better-sqlite3'; import { migrate } from 'drizzle-orm/better-sqlite3/migrator'; import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; import { skipTests } from '~/common'; import { anotherUsersMigratorTable, tests, usersMigratorTable } from './sqlite-common'; const ENABLE_LOGGING = false; let db: BetterSQLite3Database; let client: Database.Database; beforeAll(async () => { const dbPath = process.env['SQLITE_DB_PATH'] ?? ':memory:'; client = new Database(dbPath); db = drizzle(client, { logger: ENABLE_LOGGING }); }); afterAll(async () => { client?.close(); }); beforeEach((ctx) => { ctx.sqlite = { db, }; }); test('migrator', async () => { db.run(sql`drop table if exists another_users`); db.run(sql`drop table if exists users12`); db.run(sql`drop table if exists __drizzle_migrations`); migrate(db, { migrationsFolder: './drizzle2/sqlite' }); db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }).run(); const result = db.select().from(usersMigratorTable).all(); db.insert(anotherUsersMigratorTable).values({ name: 'John', email: 'email' }).run(); const result2 = db.select().from(anotherUsersMigratorTable).all(); expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); expect(result2).toEqual([{ id: 1, name: 'John', email: 'email' }]); db.run(sql`drop table another_users`); db.run(sql`drop table users12`); db.run(sql`drop table __drizzle_migrations`); }); skipTests([ /** * doesn't work properly: * Expect: should rollback transaction and don't insert/ update data * Received: data inserted/ updated */ 'transaction rollback', 'nested transaction rollback', ]); tests(); ================================================ FILE: integration-tests/tests/sqlite/d1-batch.test.ts ================================================ /// import 'dotenv/config'; import { D1Database, D1DatabaseAPI } from '@miniflare/d1'; import { createSQLiteDB } from '@miniflare/shared'; import { eq, relations, sql } from 'drizzle-orm'; import type { DrizzleD1Database } from 'drizzle-orm/d1'; import { drizzle } from 'drizzle-orm/d1'; import { type AnySQLiteColumn, integer, primaryKey, sqliteTable, text } from 'drizzle-orm/sqlite-core'; import { afterAll, beforeAll, beforeEach, expect, expectTypeOf, test } from 'vitest'; const ENABLE_LOGGING = false; export const usersTable = sqliteTable('users', { id: integer('id').primaryKey({ autoIncrement: true }), name: text('name').notNull(), verified: integer('verified').notNull().default(0), invitedBy: integer('invited_by').references((): AnySQLiteColumn => usersTable.id), }); export const usersConfig = relations(usersTable, ({ one, many }) => ({ invitee: one(usersTable, { fields: [usersTable.invitedBy], references: [usersTable.id], }), usersToGroups: many(usersToGroupsTable), posts: many(postsTable), })); export const groupsTable = sqliteTable('groups', { id: integer('id').primaryKey({ autoIncrement: true }), name: text('name').notNull(), description: text('description'), }); export const groupsConfig = relations(groupsTable, ({ many }) => ({ usersToGroups: many(usersToGroupsTable), })); export const usersToGroupsTable = sqliteTable( 'users_to_groups', { id: integer('id').primaryKey({ autoIncrement: true }), userId: integer('user_id', { mode: 'number' }).notNull().references( () => usersTable.id, ), groupId: integer('group_id', { mode: 'number' }).notNull().references( () => groupsTable.id, ), }, (t) => ({ pk: primaryKey({ columns: [t.userId, t.groupId] }), }), ); export const usersToGroupsConfig = relations(usersToGroupsTable, ({ one }) => ({ group: one(groupsTable, { fields: [usersToGroupsTable.groupId], references: [groupsTable.id], }), user: one(usersTable, { fields: [usersToGroupsTable.userId], references: [usersTable.id], }), })); export const postsTable = sqliteTable('posts', { id: integer('id').primaryKey({ autoIncrement: true }), content: text('content').notNull(), ownerId: integer('owner_id', { mode: 'number' }).references( () => usersTable.id, ), createdAt: integer('created_at', { mode: 'timestamp_ms' }) .notNull().default(sql`current_timestamp`), }); export const postsConfig = relations(postsTable, ({ one, many }) => ({ author: one(usersTable, { fields: [postsTable.ownerId], references: [usersTable.id], }), comments: many(commentsTable), })); export const commentsTable = sqliteTable('comments', { id: integer('id').primaryKey({ autoIncrement: true }), content: text('content').notNull(), creator: integer('creator', { mode: 'number' }).references( () => usersTable.id, ), postId: integer('post_id', { mode: 'number' }).references(() => postsTable.id), createdAt: integer('created_at', { mode: 'timestamp_ms' }) .notNull().default(sql`current_timestamp`), }); export const commentsConfig = relations(commentsTable, ({ one, many }) => ({ post: one(postsTable, { fields: [commentsTable.postId], references: [postsTable.id], }), author: one(usersTable, { fields: [commentsTable.creator], references: [usersTable.id], }), likes: many(commentLikesTable), })); export const commentLikesTable = sqliteTable('comment_likes', { id: integer('id').primaryKey({ autoIncrement: true }), creator: integer('creator', { mode: 'number' }).references( () => usersTable.id, ), commentId: integer('comment_id', { mode: 'number' }).references( () => commentsTable.id, ), createdAt: integer('created_at', { mode: 'timestamp_ms' }) .notNull().default(sql`current_timestamp`), }); export const commentLikesConfig = relations(commentLikesTable, ({ one }) => ({ comment: one(commentsTable, { fields: [commentLikesTable.commentId], references: [commentsTable.id], }), author: one(usersTable, { fields: [commentLikesTable.creator], references: [usersTable.id], }), })); const schema = { usersTable, postsTable, commentsTable, usersToGroupsTable, groupsTable, commentLikesConfig, commentsConfig, postsConfig, usersToGroupsConfig, groupsConfig, usersConfig, }; let db: DrizzleD1Database; beforeAll(async () => { const sqliteDb = await createSQLiteDB(':memory:'); const d1db = new D1Database(new D1DatabaseAPI(sqliteDb)); db = drizzle(d1db, { logger: ENABLE_LOGGING, schema }); }); beforeEach(async () => { await db.run(sql`drop table if exists \`groups\``); await db.run(sql`drop table if exists \`users\``); await db.run(sql`drop table if exists \`users_to_groups\``); await db.run(sql`drop table if exists \`posts\``); await db.run(sql`drop table if exists \`comments\``); await db.run(sql`drop table if exists \`comment_likes\``); await db.run( sql` CREATE TABLE \`users\` ( \`id\` integer PRIMARY KEY AUTOINCREMENT NOT NULL, \`name\` text NOT NULL, \`verified\` integer DEFAULT 0 NOT NULL, \`invited_by\` integer ); `, ); await db.run( sql` CREATE TABLE \`groups\` ( \`id\` integer PRIMARY KEY AUTOINCREMENT NOT NULL, \`name\` text NOT NULL, \`description\` text ); `, ); await db.run( sql` CREATE TABLE \`users_to_groups\` ( \`id\` integer PRIMARY KEY AUTOINCREMENT NOT NULL, \`user_id\` integer NOT NULL, \`group_id\` integer NOT NULL ); `, ); await db.run( sql` CREATE TABLE \`posts\` ( \`id\` integer PRIMARY KEY AUTOINCREMENT NOT NULL, \`content\` text NOT NULL, \`owner_id\` integer, \`created_at\` integer DEFAULT current_timestamp NOT NULL ); `, ); await db.run( sql` CREATE TABLE \`comments\` ( \`id\` integer PRIMARY KEY AUTOINCREMENT NOT NULL, \`content\` text NOT NULL, \`creator\` integer, \`post_id\` integer, \`created_at\` integer DEFAULT current_timestamp NOT NULL ); `, ); await db.run( sql` CREATE TABLE \`comment_likes\` ( \`id\` integer PRIMARY KEY AUTOINCREMENT NOT NULL, \`creator\` integer, \`comment_id\` integer, \`created_at\` integer DEFAULT current_timestamp NOT NULL ); `, ); }); afterAll(async () => { await db.run(sql`drop table if exists \`groups\``); await db.run(sql`drop table if exists \`users\``); await db.run(sql`drop table if exists \`users_to_groups\``); await db.run(sql`drop table if exists \`posts\``); await db.run(sql`drop table if exists \`comments\``); await db.run(sql`drop table if exists \`comment_likes\``); }); test('batch api example', async () => { const batchResponse = await db.batch([ db.insert(usersTable).values({ id: 1, name: 'John' }).returning({ id: usersTable.id, invitedBy: usersTable.invitedBy, }), db.insert(usersTable).values({ id: 2, name: 'Dan' }), db.select().from(usersTable), ]); expectTypeOf(batchResponse).toEqualTypeOf<[ { id: number; invitedBy: number | null; }[], D1Result, { id: number; name: string; verified: number; invitedBy: number | null; }[], ]>(); expect(batchResponse.length).eq(3); expect(batchResponse[0]).toEqual([{ id: 1, invitedBy: null, }]); // expect(batchResponse[1]).toEqual({ // results: [], // success: true, // meta: { // duration: 0.027083873748779297, // last_row_id: 2, // changes: 1, // served_by: 'miniflare.db', // internal_stats: null, // }, // }); expect(batchResponse[2]).toEqual([ { id: 1, name: 'John', verified: 0, invitedBy: null }, { id: 2, name: 'Dan', verified: 0, invitedBy: null }, ]); }); // batch api only relational many test('insert + findMany', async () => { const batchResponse = await db.batch([ db.insert(usersTable).values({ id: 1, name: 'John' }).returning({ id: usersTable.id }), db.insert(usersTable).values({ id: 2, name: 'Dan' }), db.query.usersTable.findMany({}), ]); expectTypeOf(batchResponse).toEqualTypeOf<[ { id: number; }[], D1Result, { id: number; name: string; verified: number; invitedBy: number | null; }[], ]>(); expect(batchResponse.length).eq(3); expect(batchResponse[0]).toEqual([{ id: 1, }]); // expect(batchResponse[1]).toEqual({ columns: [], rows: [], rowsAffected: 1, lastInsertRowid: 2n }); expect(batchResponse[2]).toEqual([ { id: 1, name: 'John', verified: 0, invitedBy: null }, { id: 2, name: 'Dan', verified: 0, invitedBy: null }, ]); }); // batch api relational many + one test('insert + findMany + findFirst', async () => { const batchResponse = await db.batch([ db.insert(usersTable).values({ id: 1, name: 'John' }).returning({ id: usersTable.id }), db.insert(usersTable).values({ id: 2, name: 'Dan' }), db.query.usersTable.findMany({}), db.query.usersTable.findFirst({}), ]); expectTypeOf(batchResponse).toEqualTypeOf<[ { id: number; }[], D1Result, { id: number; name: string; verified: number; invitedBy: number | null; }[], { id: number; name: string; verified: number; invitedBy: number | null; } | undefined, ]>(); expect(batchResponse.length).eq(4); expect(batchResponse[0]).toEqual([{ id: 1, }]); // expect(batchResponse[1]).toEqual({ columns: [], rows: [], rowsAffected: 1, lastInsertRowid: 2n }); expect(batchResponse[2]).toEqual([ { id: 1, name: 'John', verified: 0, invitedBy: null }, { id: 2, name: 'Dan', verified: 0, invitedBy: null }, ]); expect(batchResponse[3]).toEqual( { id: 1, name: 'John', verified: 0, invitedBy: null }, ); }); test('insert + db.all + db.get + db.values + db.run', async () => { const batchResponse = await db.batch([ db.insert(usersTable).values({ id: 1, name: 'John' }).returning({ id: usersTable.id }), db.run(sql`insert into users (id, name) values (2, 'Dan')`), db.all(sql`select * from users`), db.values(sql`select * from users`), db.get(sql`select * from users`), ]); expectTypeOf(batchResponse).toEqualTypeOf<[ { id: number; }[], D1Result, { id: number; name: string; verified: number; invitedBy: number | null; }[], unknown[][], { id: number; name: string; verified: number; invitedBy: number | null; }, ]>(); expect(batchResponse.length).eq(5); expect(batchResponse[0], 'insert').toEqual([{ id: 1, }]); // expect(batchResponse[1]).toEqual({ columns: [], rows: [], rowsAffected: 1, lastInsertRowid: 2n }); expect(batchResponse[2], 'all').toEqual([ { id: 1, name: 'John', verified: 0, invited_by: null }, { id: 2, name: 'Dan', verified: 0, invited_by: null }, ]); expect(batchResponse[3], 'values').toEqual([[1, 'John', 0, null], [2, 'Dan', 0, null]]); expect(batchResponse[4], 'get').toEqual( { id: 1, name: 'John', verified: 0, invited_by: null }, ); }); // batch api combined rqb + raw call test('insert + findManyWith + db.all', async () => { const batchResponse = await db.batch([ db.insert(usersTable).values({ id: 1, name: 'John' }).returning({ id: usersTable.id }), db.insert(usersTable).values({ id: 2, name: 'Dan' }), db.query.usersTable.findMany({}), db.all(sql`select * from users`), ]); expectTypeOf(batchResponse).toEqualTypeOf<[ { id: number; }[], D1Result, { id: number; name: string; verified: number; invitedBy: number | null; }[], { id: number; name: string; verified: number; invitedBy: number | null; }[], ]>(); expect(batchResponse.length).eq(4); expect(batchResponse[0]).toEqual([{ id: 1, }]); // expect(batchResponse[1]).toEqual({ columns: [], rows: [], rowsAffected: 1, lastInsertRowid: 2n }); expect(batchResponse[2]).toEqual([ { id: 1, name: 'John', verified: 0, invitedBy: null }, { id: 2, name: 'Dan', verified: 0, invitedBy: null }, ]); expect(batchResponse[3]).toEqual([ { id: 1, name: 'John', verified: 0, invited_by: null }, { id: 2, name: 'Dan', verified: 0, invited_by: null }, ]); }); // batch api for insert + update + select test('insert + update + select + select partial', async () => { const batchResponse = await db.batch([ db.insert(usersTable).values({ id: 1, name: 'John' }).returning({ id: usersTable.id }), db.update(usersTable).set({ name: 'Dan' }).where(eq(usersTable.id, 1)), db.query.usersTable.findMany({}), db.select().from(usersTable).where(eq(usersTable.id, 1)), db.select({ id: usersTable.id, invitedBy: usersTable.invitedBy }).from(usersTable), ]); expectTypeOf(batchResponse).toEqualTypeOf<[ { id: number; }[], D1Result, { id: number; name: string; verified: number; invitedBy: number | null; }[], { id: number; name: string; verified: number; invitedBy: number | null; }[], { id: number; invitedBy: number | null; }[], ]>(); expect(batchResponse.length).eq(5); expect(batchResponse[0]).toEqual([{ id: 1, }]); // expect(batchResponse[1]).toEqual({ columns: [], rows: [], rowsAffected: 1, lastInsertRowid: 1n }); expect(batchResponse[2]).toEqual([ { id: 1, name: 'Dan', verified: 0, invitedBy: null }, ]); expect(batchResponse[3]).toEqual([ { id: 1, name: 'Dan', verified: 0, invitedBy: null }, ]); expect(batchResponse[4]).toEqual([ { id: 1, invitedBy: null }, ]); }); // batch api for insert + delete + select test('insert + delete + select + select partial', async () => { const batchResponse = await db.batch([ db.insert(usersTable).values({ id: 1, name: 'John' }).returning({ id: usersTable.id }), db.insert(usersTable).values({ id: 2, name: 'Dan' }), db.delete(usersTable).where(eq(usersTable.id, 1)).returning({ id: usersTable.id, invitedBy: usersTable.invitedBy }), db.query.usersTable.findFirst({ columns: { id: true, invitedBy: true, }, }), ]); expectTypeOf(batchResponse).toEqualTypeOf<[ { id: number; }[], D1Result, { id: number; invitedBy: number | null; }[], { id: number; invitedBy: number | null; } | undefined, ]>(); expect(batchResponse.length).eq(4); expect(batchResponse[0]).toEqual([{ id: 1, }]); // expect(batchResponse[1]).toEqual({ columns: [], rows: [], rowsAffected: 1, lastInsertRowid: 2n }); expect(batchResponse[2]).toEqual([ { id: 1, invitedBy: null }, ]); expect(batchResponse[3]).toEqual( { id: 2, invitedBy: null }, ); }); ================================================ FILE: integration-tests/tests/sqlite/d1.test.ts ================================================ import { D1Database, D1DatabaseAPI } from '@miniflare/d1'; import { createSQLiteDB } from '@miniflare/shared'; import { sql } from 'drizzle-orm'; import type { DrizzleD1Database } from 'drizzle-orm/d1'; import { drizzle } from 'drizzle-orm/d1'; import { migrate } from 'drizzle-orm/d1/migrator'; import { beforeAll, beforeEach, expect, test } from 'vitest'; import { skipTests } from '~/common'; import { randomString } from '~/utils'; import { anotherUsersMigratorTable, tests, usersMigratorTable } from './sqlite-common'; import { TestCache, TestGlobalCache, tests as cacheTests } from './sqlite-common-cache'; const ENABLE_LOGGING = false; let db: DrizzleD1Database; let dbGlobalCached: DrizzleD1Database; let cachedDb: DrizzleD1Database; beforeAll(async () => { const sqliteDb = await createSQLiteDB(':memory:'); const d1db = new D1Database(new D1DatabaseAPI(sqliteDb)); db = drizzle(d1db, { logger: ENABLE_LOGGING }); cachedDb = drizzle(d1db, { logger: ENABLE_LOGGING, cache: new TestCache() }); dbGlobalCached = drizzle(d1db, { logger: ENABLE_LOGGING, cache: new TestGlobalCache() }); }); beforeEach((ctx) => { ctx.sqlite = { db, }; ctx.cachedSqlite = { db: cachedDb, dbGlobalCached, }; }); test('migrator', async () => { await db.run(sql`drop table if exists another_users`); await db.run(sql`drop table if exists users12`); await db.run(sql`drop table if exists __drizzle_migrations`); await migrate(db, { migrationsFolder: './drizzle2/sqlite' }); await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }).run(); const result = await db.select().from(usersMigratorTable).all(); await db.insert(anotherUsersMigratorTable).values({ name: 'John', email: 'email' }).run(); const result2 = await db.select().from(anotherUsersMigratorTable).all(); expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); expect(result2).toEqual([{ id: 1, name: 'John', email: 'email' }]); await db.run(sql`drop table another_users`); await db.run(sql`drop table users12`); await db.run(sql`drop table __drizzle_migrations`); }); test('migrator : migrate with custom table', async () => { const customTable = randomString(); await db.run(sql`drop table if exists another_users`); await db.run(sql`drop table if exists users12`); await db.run(sql`drop table if exists ${sql.identifier(customTable)}`); await migrate(db, { migrationsFolder: './drizzle2/sqlite', migrationsTable: customTable }); // test if the custom migrations table was created const res = await db.all(sql`select * from ${sql.identifier(customTable)};`); expect(res.length > 0).toBeTruthy(); // test if the migrated table are working as expected await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); const result = await db.select().from(usersMigratorTable); expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); await db.run(sql`drop table another_users`); await db.run(sql`drop table users12`); await db.run(sql`drop table ${sql.identifier(customTable)}`); }); skipTests([ // Cannot convert 49,50,55 to a BigInt 'insert bigint values', // SyntaxError: Unexpected token , in JSON at position 2 'json insert', 'insert many', 'insert many with returning', /** * TODO: Fix Bug! The objects should be equal * * See #528 for more details. * Tldr the D1 driver does not execute joins successfully */ 'partial join with alias', 'full join with alias', 'select from alias', 'join view as subquery', 'cross join', ]); cacheTests(); tests(); ================================================ FILE: integration-tests/tests/sqlite/durable-objects/drizzle/0000_cuddly_black_bolt.sql ================================================ CREATE TABLE `another_users` ( `id` integer PRIMARY KEY NOT NULL, `name` text NOT NULL, `email` text NOT NULL ); --> statement-breakpoint CREATE TABLE `users12` ( `id` integer PRIMARY KEY NOT NULL, `name` text NOT NULL, `email` text NOT NULL ); ================================================ FILE: integration-tests/tests/sqlite/durable-objects/drizzle/meta/0000_snapshot.json ================================================ { "version": "6", "dialect": "sqlite", "id": "66be869a-d55d-4790-a382-de654dff1506", "prevId": "00000000-0000-0000-0000-000000000000", "tables": { "another_users": { "name": "another_users", "columns": { "id": { "name": "id", "type": "integer", "primaryKey": true, "notNull": true, "autoincrement": false }, "name": { "name": "name", "type": "text", "primaryKey": false, "notNull": true, "autoincrement": false }, "email": { "name": "email", "type": "text", "primaryKey": false, "notNull": true, "autoincrement": false } }, "indexes": {}, "foreignKeys": {}, "compositePrimaryKeys": {}, "uniqueConstraints": {}, "checkConstraints": {} }, "users12": { "name": "users12", "columns": { "id": { "name": "id", "type": "integer", "primaryKey": true, "notNull": true, "autoincrement": false }, "name": { "name": "name", "type": "text", "primaryKey": false, "notNull": true, "autoincrement": false }, "email": { "name": "email", "type": "text", "primaryKey": false, "notNull": true, "autoincrement": false } }, "indexes": {}, "foreignKeys": {}, "compositePrimaryKeys": {}, "uniqueConstraints": {}, "checkConstraints": {} } }, "views": {}, "enums": {}, "_meta": { "schemas": {}, "tables": {}, "columns": {} }, "internal": { "indexes": {} } } ================================================ FILE: integration-tests/tests/sqlite/durable-objects/drizzle/meta/_journal.json ================================================ { "version": "7", "dialect": "sqlite", "entries": [ { "idx": 0, "version": "6", "when": 1732696446109, "tag": "0000_cuddly_black_bolt", "breakpoints": true } ] } ================================================ FILE: integration-tests/tests/sqlite/durable-objects/drizzle/migrations.js ================================================ import m0000 from './0000_cuddly_black_bolt.sql'; import journal from './meta/_journal.json'; export default { journal, migrations: { m0000, }, }; ================================================ FILE: integration-tests/tests/sqlite/durable-objects/index.ts ================================================ /// import { expect } from 'chai'; import { DurableObject } from 'cloudflare:workers'; import { and, asc, avg, avgDistinct, count, countDistinct, eq, exists, getTableColumns, gt, gte, inArray, lt, max, min, Name, notInArray, sql, sum, sumDistinct, } from 'drizzle-orm'; import { drizzle, type DrizzleSqliteDODatabase } from 'drizzle-orm/durable-sqlite'; import { migrate } from 'drizzle-orm/durable-sqlite/migrator'; import { alias, type BaseSQLiteDatabase, blob, except, getViewConfig, int, integer, intersect, numeric, primaryKey, sqliteTable, sqliteTableCreator, sqliteView, text, union, unionAll, } from 'drizzle-orm/sqlite-core'; import { type Equal, Expect } from '~/utils'; import migrations from './drizzle/migrations'; export const usersTable = sqliteTable('users', { id: integer('id').primaryKey(), name: text('name').notNull(), verified: integer('verified', { mode: 'boolean' }).notNull().default(false), json: blob('json', { mode: 'json' }).$type(), createdAt: integer('created_at', { mode: 'timestamp' }).notNull().default(sql`strftime('%s', 'now')`), }); export const usersOnUpdate = sqliteTable('users_on_update', { id: integer('id').primaryKey({ autoIncrement: true }), name: text('name').notNull(), updateCounter: integer('update_counter').default(sql`1`).$onUpdateFn(() => sql`update_counter + 1`), updatedAt: integer('updated_at', { mode: 'timestamp_ms' }).$onUpdate(() => new Date()), alwaysNull: text('always_null').$type().$onUpdate(() => null), }); export const users2Table = sqliteTable('users2', { id: integer('id').primaryKey(), name: text('name').notNull(), cityId: integer('city_id').references(() => citiesTable.id), }); export const citiesTable = sqliteTable('cities', { id: integer('id').primaryKey(), name: text('name').notNull(), }); export const coursesTable = sqliteTable('courses', { id: integer('id').primaryKey(), name: text('name').notNull(), categoryId: integer('category_id').references(() => courseCategoriesTable.id), }); export const courseCategoriesTable = sqliteTable('course_categories', { id: integer('id').primaryKey(), name: text('name').notNull(), }); export const orders = sqliteTable('orders', { id: integer('id').primaryKey(), region: text('region').notNull(), product: text('product').notNull().$default(() => 'random_string'), amount: integer('amount').notNull(), quantity: integer('quantity').notNull(), }); export const usersMigratorTable = sqliteTable('users12', { id: integer('id').primaryKey(), name: text('name').notNull(), email: text('email').notNull(), }); export const anotherUsersMigratorTable = sqliteTable('another_users', { id: integer('id').primaryKey(), name: text('name').notNull(), email: text('email').notNull(), }); export const pkExampleTable = sqliteTable('pk_example', { id: integer('id').notNull(), name: text('name').notNull(), email: text('email').notNull(), }, (table) => ({ compositePk: primaryKey({ columns: [table.id, table.name] }), })); export const bigIntExample = sqliteTable('big_int_example', { id: integer('id').primaryKey(), name: text('name').notNull(), bigInt: blob('big_int', { mode: 'bigint' }).notNull(), }); // To test aggregate functions export const aggregateTable = sqliteTable('aggregate_table', { id: integer('id').primaryKey({ autoIncrement: true }).notNull(), name: text('name').notNull(), a: integer('a'), b: integer('b'), c: integer('c'), nullOnly: integer('null_only'), }); async function setupSetOperationTest(db: BaseSQLiteDatabase) { await db.run(sql`drop table if exists users2`); await db.run(sql`drop table if exists cities`); await db.run(sql` create table \`cities\` ( id integer primary key, name text not null ) `); await db.run(sql` create table \`users2\` ( id integer primary key, name text not null, city_id integer references ${citiesTable}(${sql.identifier(citiesTable.id.name)}) ) `); await db.insert(citiesTable).values([ { id: 1, name: 'New York' }, { id: 2, name: 'London' }, { id: 3, name: 'Tampa' }, ]); await db.insert(users2Table).values([ { id: 1, name: 'John', cityId: 1 }, { id: 2, name: 'Jane', cityId: 2 }, { id: 3, name: 'Jack', cityId: 3 }, { id: 4, name: 'Peter', cityId: 3 }, { id: 5, name: 'Ben', cityId: 2 }, { id: 6, name: 'Jill', cityId: 1 }, { id: 7, name: 'Mary', cityId: 2 }, { id: 8, name: 'Sally', cityId: 1 }, ]); } async function setupAggregateFunctionsTest(db: BaseSQLiteDatabase) { await db.run(sql`drop table if exists "aggregate_table"`); await db.run( sql` create table "aggregate_table" ( "id" integer primary key autoincrement not null, "name" text not null, "a" integer, "b" integer, "c" integer, "null_only" integer ); `, ); await db.insert(aggregateTable).values([ { name: 'value 1', a: 5, b: 10, c: 20 }, { name: 'value 1', a: 5, b: 20, c: 30 }, { name: 'value 2', a: 10, b: 50, c: 60 }, { name: 'value 3', a: 20, b: 20, c: null }, { name: 'value 4', a: null, b: 90, c: 120 }, { name: 'value 5', a: 80, b: 10, c: null }, { name: 'value 6', a: null, b: null, c: 150 }, ]); } // eslint-disable-next-line drizzle-internal/require-entity-kind export class MyDurableObject extends DurableObject { storage: DurableObjectStorage; db: DrizzleSqliteDODatabase; constructor(ctx: DurableObjectState, env: Env) { super(ctx, env); this.storage = ctx.storage; this.db = drizzle(this.storage, { logger: false }); } async migrate1(): Promise { try { this.db.run(sql`drop table if exists another_users`); this.db.run(sql`drop table if exists users12`); this.db.run(sql`drop table if exists __drizzle_migrations`); migrate(this.db, migrations); this.db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }).run(); const result = this.db.select().from(usersMigratorTable).all(); this.db.insert(anotherUsersMigratorTable).values({ name: 'John', email: 'email' }).run(); const result2 = this.db.select().from(anotherUsersMigratorTable).all(); expect(result).deep.equal([{ id: 1, name: 'John', email: 'email' }]); expect(result2).deep.equal([{ id: 1, name: 'John', email: 'email' }]); this.db.run(sql`drop table another_users`); this.db.run(sql`drop table users12`); this.db.run(sql`drop table __drizzle_migrations`); } catch { throw new Error('migrate1 has broken'); } } async beforeEach(): Promise { this.db.run(sql`drop table if exists ${usersTable}`); this.db.run(sql`drop table if exists ${users2Table}`); this.db.run(sql`drop table if exists ${citiesTable}`); this.db.run(sql`drop table if exists ${coursesTable}`); this.db.run(sql`drop table if exists ${courseCategoriesTable}`); this.db.run(sql`drop table if exists ${orders}`); this.db.run(sql`drop table if exists ${bigIntExample}`); this.db.run(sql`drop table if exists ${pkExampleTable}`); this.db.run(sql`drop table if exists user_notifications_insert_into`); this.db.run(sql`drop table if exists users_insert_into`); this.db.run(sql`drop table if exists notifications_insert_into`); this.db.run(sql` create table ${usersTable} ( id integer primary key, name text not null, verified integer not null default 0, json blob, created_at integer not null default (strftime('%s', 'now')) ) `); this.db.run(sql` create table ${citiesTable} ( id integer primary key, name text not null ) `); this.db.run(sql` create table ${courseCategoriesTable} ( id integer primary key, name text not null ) `); this.db.run(sql` create table ${users2Table} ( id integer primary key, name text not null, city_id integer references ${citiesTable}(${sql.identifier(citiesTable.id.name)}) ) `); this.db.run(sql` create table ${coursesTable} ( id integer primary key, name text not null, category_id integer references ${courseCategoriesTable}(${sql.identifier(courseCategoriesTable.id.name)}) ) `); this.db.run(sql` create table ${orders} ( id integer primary key, region text not null, product text not null, amount integer not null, quantity integer not null ) `); this.db.run(sql` create table ${pkExampleTable} ( id integer not null, name text not null, email text not null, primary key (id, name) ) `); this.db.run(sql` create table ${bigIntExample} ( id integer primary key, name text not null, big_int blob not null ) `); } async insertBigIntValues(): Promise { try { await this.beforeEach(); this.db .insert(bigIntExample) .values({ name: 'one', bigInt: BigInt('0') }) .run(); this.db .insert(bigIntExample) .values({ name: 'two', bigInt: BigInt('127') }) .run(); this.db .insert(bigIntExample) .values({ name: 'three', bigInt: BigInt('32767') }) .run(); this.db .insert(bigIntExample) .values({ name: 'four', bigInt: BigInt('1234567890') }) .run(); this.db .insert(bigIntExample) .values({ name: 'five', bigInt: BigInt('12345678900987654321') }) .run(); const result = this.db.select().from(bigIntExample).all(); expect(result).deep.equal([ { id: 1, name: 'one', bigInt: BigInt('0') }, { id: 2, name: 'two', bigInt: BigInt('127') }, { id: 3, name: 'three', bigInt: BigInt('32767') }, { id: 4, name: 'four', bigInt: BigInt('1234567890') }, { id: 5, name: 'five', bigInt: BigInt('12345678900987654321') }, ]); } catch (error: any) { console.log(error); throw new Error('insertBigIntValues has broken'); } } async selectAllFields(): Promise { try { await this.beforeEach(); const now = Date.now(); this.db.insert(usersTable).values({ name: 'John' }).run(); const result = this.db.select().from(usersTable).all(); expect(result[0]!.createdAt).instanceOf(Date); expect(Math.abs(result[0]!.createdAt.getTime() - now)).lessThan(5000); expect(result).deep.equal([{ id: 1, name: 'John', verified: false, json: null, createdAt: result[0]!.createdAt, }]); } catch { throw new Error('selectAllFields has broken'); } } async selectPartial(): Promise { try { await this.beforeEach(); this.db.insert(usersTable).values({ name: 'John' }).run(); const result = this.db.select({ name: usersTable.name }).from(usersTable).all(); expect(result).deep.equal([{ name: 'John' }]); } catch (error: any) { console.error(error); throw new Error(`selectPartial error`); } } async selectSql(): Promise { try { await this.beforeEach(); this.db.insert(usersTable).values({ name: 'John' }).run(); const users = this.db .select({ name: sql`upper(${usersTable.name})`, }) .from(usersTable) .all(); expect(users).deep.equal([{ name: 'JOHN' }]); } catch { throw new Error('selectSql has broken'); } } async selectTypedSql(): Promise { try { await this.beforeEach(); this.db.insert(usersTable).values({ name: 'John' }).run(); const users = this.db .select({ name: sql`upper(${usersTable.name})`, }) .from(usersTable) .all(); expect(users).deep.equal([{ name: 'JOHN' }]); } catch { throw new Error('selectTypedSql has broken'); } } async selectWithEmptyArrayInInArray(): Promise { try { await this.beforeEach(); await this.db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); const result = await this.db .select({ name: sql`upper(${usersTable.name})`, }) .from(usersTable) .where(inArray(usersTable.id, [])); expect(result).deep.equal([]); } catch (error: any) { console.error(error); throw new Error('selectWithEmptyArrayInInArray has broken'); } } async selectWithEmptyArrayInNotInArray(): Promise { try { await this.beforeEach(); await this.db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); const result = await this.db .select({ name: sql`upper(${usersTable.name})`, }) .from(usersTable) .where(notInArray(usersTable.id, [])); expect(result).deep.equal([{ name: 'JOHN' }, { name: 'JANE' }, { name: 'JANE' }]); } catch (error: any) { console.error(error); throw new Error('selectWithEmptyArrayInNotInArray has broken'); } } async selectDistinct(): Promise { try { await this.beforeEach(); const usersDistinctTable = sqliteTable('users_distinct', { id: integer('id').notNull(), name: text('name').notNull(), }); this.db.run(sql`drop table if exists ${usersDistinctTable}`); this.db.run(sql`create table ${usersDistinctTable} (id integer, name text)`); this.db .insert(usersDistinctTable) .values([ { id: 1, name: 'John' }, { id: 1, name: 'John' }, { id: 2, name: 'John' }, { id: 1, name: 'Jane' }, ]) .run(); const users = this.db.selectDistinct().from(usersDistinctTable).orderBy( usersDistinctTable.id, usersDistinctTable.name, ).all(); this.db.run(sql`drop table ${usersDistinctTable}`); expect(users).deep.equal([ { id: 1, name: 'Jane' }, { id: 1, name: 'John' }, { id: 2, name: 'John' }, ]); } catch (error: any) { console.error(error); throw new Error('selectDistinct has broken'); } } async returingSql(): Promise { try { await this.beforeEach(); const users = this.db .insert(usersTable) .values({ name: 'John' }) .returning({ name: sql`upper(${usersTable.name})`, }) .all(); expect(users).deep.equal([{ name: 'JOHN' }]); } catch (error: any) { console.error(error); throw new Error('returingSql has broken'); } } async $defaultFunction(): Promise { try { await this.beforeEach(); await this.db.insert(orders).values({ id: 1, region: 'Ukraine', amount: 1, quantity: 1 }); const selectedOrder = await this.db.select().from(orders); expect(selectedOrder).deep.equal([ { id: 1, amount: 1, quantity: 1, region: 'Ukraine', product: 'random_string', }, ]); } catch (error: any) { console.error(error); throw new Error('defaultFunction has broken'); } } async deleteReturningSql(): Promise { try { await this.beforeEach(); this.db.insert(usersTable).values({ name: 'John' }).run(); const users = this.db .delete(usersTable) .where(eq(usersTable.name, 'John')) .returning({ name: sql`upper(${usersTable.name})`, }) .all(); expect(users).deep.equal([{ name: 'JOHN' }]); } catch (error: any) { console.error(error); throw new Error('deleteReturningSql has broken'); } } async queryCheckInsertSingleEmptyRow(): Promise { try { await this.beforeEach(); const users = sqliteTable('users', { id: integer('id').primaryKey(), name: text('name').default('Dan'), state: text('state'), }); const query = this.db.insert(users).values({}).toSQL(); expect(query).deep.equal({ sql: 'insert into "users" ("id", "name", "state") values (null, ?, null)', params: ['Dan'], }); } catch (error: any) { console.error(error); throw new Error('queryCheckInsertSingleEmptyRow has broken'); } } async queryCheckInsertMultipleEmptyRow(): Promise { try { await this.beforeEach(); const users = sqliteTable('users', { id: integer('id').primaryKey(), name: text('name').default('Dan'), state: text('state'), }); const query = this.db.insert(users).values([{}, {}]).toSQL(); expect(query).deep.equal({ sql: 'insert into "users" ("id", "name", "state") values (null, ?, null), (null, ?, null)', params: ['Dan', 'Dan'], }); } catch (error: any) { console.error(error); throw new Error('queryCheckInsertMultipleEmptyRow has broken'); } } async insertAllDefaultsIn1Row(): Promise { try { await this.beforeEach(); const users = sqliteTable('empty_insert_single', { id: integer('id').primaryKey(), name: text('name').default('Dan'), state: text('state'), }); this.db.run(sql`drop table if exists ${users}`); this.db.run(sql`create table ${users} (id integer primary key, name text default 'Dan', state text)`); this.db.insert(users).values({}).run(); const res = this.db.select().from(users).all(); expect(res).deep.equal([{ id: 1, name: 'Dan', state: null }]); } catch (error: any) { console.error(error); throw new Error('insertAllDefaultsIn1Row has broken'); } } async insertAllDefaultsInMultipleRows(): Promise { try { await this.beforeEach(); const users = sqliteTable('empty_insert_multiple', { id: integer('id').primaryKey(), name: text('name').default('Dan'), state: text('state'), }); this.db.run(sql`drop table if exists ${users}`); this.db.run(sql`create table ${users} (id integer primary key, name text default 'Dan', state text)`); this.db.insert(users).values([{}, {}]).run(); const res = this.db.select().from(users).all(); expect(res).deep.equal([ { id: 1, name: 'Dan', state: null }, { id: 2, name: 'Dan', state: null }, ]); } catch (error: any) { console.error(error); throw new Error('insertAllDefaultsInMultipleRows has broken'); } } async updateReturningSql(): Promise { try { await this.beforeEach(); this.db.insert(usersTable).values({ name: 'John' }).run(); const users = this.db .update(usersTable) .set({ name: 'Jane' }) .where(eq(usersTable.name, 'John')) .returning({ name: sql`upper(${usersTable.name})`, }) .all(); expect(users).deep.equal([{ name: 'JANE' }]); } catch (error: any) { console.error(error); throw new Error('updateReturningSql has broken'); } } async insertWithAutoIncrement(): Promise { try { await this.beforeEach(); this.db .insert(usersTable) .values([{ name: 'John' }, { name: 'Jane' }, { name: 'George' }, { name: 'Austin' }]) .run(); const result = this.db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).all(); expect(result).deep.equal([ { id: 1, name: 'John' }, { id: 2, name: 'Jane' }, { id: 3, name: 'George' }, { id: 4, name: 'Austin' }, ]); } catch (error: any) { console.error(error); throw new Error('insertWithAutoIncrement has broken'); } } async insertDataWithDefaultValues(): Promise { try { await this.beforeEach(); this.db.insert(usersTable).values({ name: 'John' }).run(); const result = this.db.select().from(usersTable).all(); expect(result).deep.equal([{ id: 1, name: 'John', verified: false, json: null, createdAt: result[0]!.createdAt, }]); } catch (error: any) { console.error(error); throw new Error('insertDataWithDefaultValues has broken'); } } async insertDataWithOverridenDefaultValues(): Promise { try { await this.beforeEach(); this.db.insert(usersTable).values({ name: 'John', verified: true }).run(); const result = this.db.select().from(usersTable).all(); expect(result).deep.equal([{ id: 1, name: 'John', verified: true, json: null, createdAt: result[0]!.createdAt }]); } catch (error: any) { console.error(error); throw new Error('insertDataWithOverridenDefaultValues has broken'); } } async updateWithReturningFields(): Promise { try { await this.beforeEach(); const now = Date.now(); this.db.insert(usersTable).values({ name: 'John' }).run(); const users = this.db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')).returning() .all(); expect(users[0]!.createdAt).instanceOf(Date); expect(Math.abs(users[0]!.createdAt.getTime() - now)).lessThan(5000); expect(users).deep.equal([{ id: 1, name: 'Jane', verified: false, json: null, createdAt: users[0]!.createdAt }]); } catch (error: any) { console.error(error); throw new Error('updateWithReturningFields has broken'); } } async updateWithReturningPartial(): Promise { try { await this.beforeEach(); this.db.insert(usersTable).values({ name: 'John' }).run(); const users = this.db .update(usersTable) .set({ name: 'Jane' }) .where(eq(usersTable.name, 'John')) .returning({ id: usersTable.id, name: usersTable.name, }) .all(); expect(users).deep.equal([{ id: 1, name: 'Jane' }]); } catch (error: any) { console.error(error); throw new Error('updateWithReturningFields has broken'); } } async updateWithReturningAllFields(): Promise { try { await this.beforeEach(); const now = Date.now(); this.db.insert(usersTable).values({ name: 'John' }).run(); const users = this.db.delete(usersTable).where(eq(usersTable.name, 'John')).returning().all(); expect(users[0]!.createdAt).instanceOf(Date); expect(Math.abs(users[0]!.createdAt.getTime() - now)).lessThan(5000); expect(users).deep.equal([{ id: 1, name: 'John', verified: false, json: null, createdAt: users[0]!.createdAt }]); } catch (error: any) { console.error(error); throw new Error('updateWithReturningFields has broken'); } } async deleteWithReturningPartial(): Promise { try { await this.beforeEach(); this.db.insert(usersTable).values({ name: 'John' }).run(); const users = this.db .delete(usersTable) .where(eq(usersTable.name, 'John')) .returning({ id: usersTable.id, name: usersTable.name, }) .all(); expect(users).deep.equal([{ id: 1, name: 'John' }]); } catch (error: any) { console.error(error); throw new Error(`deleteWithReturningPartial error`); } } async insertAndSelect(): Promise { try { await this.beforeEach(); this.db.insert(usersTable).values({ name: 'John' }).run(); const result = this.db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).all(); expect(result).deep.equal([{ id: 1, name: 'John' }]); this.db.insert(usersTable).values({ name: 'Jane' }).run(); const result2 = this.db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).all(); expect(result2).deep.equal([ { id: 1, name: 'John' }, { id: 2, name: 'Jane' }, ]); } catch (error: any) { console.error(error); throw new Error(`insertAndSelect error`); } } async jsonInsert(): Promise { try { await this.beforeEach(); this.db .insert(usersTable) .values({ name: 'John', json: ['foo', 'bar'] }) .run(); const result = this.db .select({ id: usersTable.id, name: usersTable.name, json: usersTable.json, }) .from(usersTable) .all(); expect(result).deep.equal([{ id: 1, name: 'John', json: ['foo', 'bar'] }]); } catch (error: any) { console.error(error); throw new Error(`jsonInsert error`); } } async insertMany(): Promise { try { await this.beforeEach(); this.db .insert(usersTable) .values([{ name: 'John' }, { name: 'Bruce', json: ['foo', 'bar'] }, { name: 'Jane' }, { name: 'Austin', verified: true, }]) .run(); const result = this.db .select({ id: usersTable.id, name: usersTable.name, json: usersTable.json, verified: usersTable.verified, }) .from(usersTable) .all(); expect(result).deep.equal([ { id: 1, name: 'John', json: null, verified: false }, { id: 2, name: 'Bruce', json: ['foo', 'bar'], verified: false }, { id: 3, name: 'Jane', json: null, verified: false }, { id: 4, name: 'Austin', json: null, verified: true }, ]); } catch (error: any) { console.error(error); throw new Error(`insertMany error`); } } async insertManyWithReturning(): Promise { try { await this.beforeEach(); const result = this.db .insert(usersTable) .values([{ name: 'John' }, { name: 'Bruce', json: ['foo', 'bar'] }, { name: 'Jane' }, { name: 'Austin', verified: true, }]) .returning({ id: usersTable.id, name: usersTable.name, json: usersTable.json, verified: usersTable.verified, }) .all(); expect(result).deep.equal([ { id: 1, name: 'John', json: null, verified: false }, { id: 2, name: 'Bruce', json: ['foo', 'bar'], verified: false }, { id: 3, name: 'Jane', json: null, verified: false }, { id: 4, name: 'Austin', json: null, verified: true }, ]); } catch (error: any) { console.error(error); throw new Error(`insertManyWithReturning error`); } } async partialJoinWithAlias(): Promise { try { await this.beforeEach(); const customerAlias = alias(usersTable, 'customer'); await this.db.insert(usersTable).values([ { id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }, ]); const result = await this.db .select({ user: { id: usersTable.id, name: usersTable.name, }, customer: { id: customerAlias.id, name: customerAlias.name, }, }) .from(usersTable) .leftJoin(customerAlias, eq(customerAlias.id, 11)) .where(eq(usersTable.id, 10)); expect(result).deep.equal([ { user: { id: 10, name: 'Ivan' }, customer: { id: 11, name: 'Hans' }, }, ]); } catch (error: any) { console.error(error); throw new Error(`partialJoinWithAlias error`); } } async fullJoinWithAlias(): Promise { try { await this.beforeEach(); const sqliteTable = sqliteTableCreator((name) => `prefixed_${name}`); const users = sqliteTable('users', { id: integer('id').primaryKey(), name: text('name').notNull(), }); this.db.run(sql`drop table if exists ${users}`); this.db.run(sql`create table ${users} (id integer primary key, name text not null)`); const customers = alias(users, 'customer'); this.db .insert(users) .values([ { id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }, ]) .run(); const result = this.db.select().from(users).leftJoin(customers, eq(customers.id, 11)).where(eq(users.id, 10)) .all(); expect(result).deep.equal([ { users: { id: 10, name: 'Ivan', }, customer: { id: 11, name: 'Hans', }, }, ]); this.db.run(sql`drop table ${users}`); } catch (error: any) { console.error(error); throw new Error(`fullJoinWithAlias error`); } } async selectFromAlias(): Promise { try { await this.beforeEach(); const sqliteTable = sqliteTableCreator((name) => `prefixed_${name}`); const users = sqliteTable('users', { id: integer('id').primaryKey(), name: text('name').notNull(), }); this.db.run(sql`drop table if exists ${users}`); this.db.run(sql`create table ${users} (id integer primary key, name text not null)`); const user = alias(users, 'user'); const customers = alias(users, 'customer'); this.db .insert(users) .values([ { id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }, ]) .run(); const result = this.db.select().from(user).leftJoin(customers, eq(customers.id, 11)).where(eq(user.id, 10)).all(); expect(result).deep.equal([ { user: { id: 10, name: 'Ivan', }, customer: { id: 11, name: 'Hans', }, }, ]); this.db.run(sql`drop table ${users}`); } catch (error: any) { console.error(error); throw new Error(`selectFromAlias error`); } } async insertWithSpaces(): Promise { try { await this.beforeEach(); this.db .insert(usersTable) .values({ name: sql`'Jo h n'` }) .run(); const result = await this.db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).all(); expect(result).deep.equal([{ id: 1, name: 'Jo h n' }]); } catch (error: any) { console.error(error); throw new Error(`insertWithSpaces error`); } } async preparedStatement(): Promise { try { await this.beforeEach(); this.db.insert(usersTable).values({ name: 'John' }).run(); const statement = this.db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).prepare(); const result = statement.all(); expect(result).deep.equal([{ id: 1, name: 'John' }]); } catch (error: any) { console.error(error); throw new Error(`preparedStatement error`); } } async preparedStatementReuse(): Promise { try { await this.beforeEach(); const stmt = this.db .insert(usersTable) .values({ name: sql.placeholder('name') }) .prepare(); for (let i = 0; i < 10; i++) { stmt.run({ name: `John ${i}` }); } const result = this.db .select({ id: usersTable.id, name: usersTable.name, }) .from(usersTable) .all(); expect(result).deep.equal([ { id: 1, name: 'John 0' }, { id: 2, name: 'John 1' }, { id: 3, name: 'John 2' }, { id: 4, name: 'John 3' }, { id: 5, name: 'John 4' }, { id: 6, name: 'John 5' }, { id: 7, name: 'John 6' }, { id: 8, name: 'John 7' }, { id: 9, name: 'John 8' }, { id: 10, name: 'John 9' }, ]); } catch (error: any) { console.error(error); throw new Error(`preparedStatementReuse error`); } } async insertPlaceholdersOnColumnsWithEncoder(): Promise { try { await this.beforeEach(); const stmt = this.db .insert(usersTable) .values({ name: 'John', verified: sql.placeholder('verified'), }) .prepare(); stmt.run({ verified: true }); stmt.run({ verified: false }); const result = this.db .select({ id: usersTable.id, verified: usersTable.verified, }) .from(usersTable) .all(); expect(result).deep.equal([ { id: 1, verified: true }, { id: 2, verified: false }, ]); } catch (error: any) { console.error(error); throw new Error(`insertPlaceholdersOnColumnsWithEncoder error`); } } async preparedStatementWithPlaceholderInWhere(): Promise { try { await this.beforeEach(); this.db.insert(usersTable).values({ name: 'John' }).run(); const stmt = this.db .select({ id: usersTable.id, name: usersTable.name, }) .from(usersTable) .where(eq(usersTable.id, sql.placeholder('id'))) .prepare(); const result = stmt.all({ id: 1 }); expect(result).deep.equal([{ id: 1, name: 'John' }]); } catch (error: any) { console.error(error); throw new Error(`preparedStatementWithPlaceholderInWhere error`); } } async preparedStatementWithPlaceholderInLimit(): Promise { try { await this.beforeEach(); this.db.insert(usersTable).values({ name: 'John' }).run(); const stmt = this.db .select({ id: usersTable.id, name: usersTable.name, }) .from(usersTable) .where(eq(usersTable.id, sql.placeholder('id'))) .limit(sql.placeholder('limit')) .prepare(); const result = await stmt.all({ id: 1, limit: 1 }); expect(result).deep.equal([{ id: 1, name: 'John' }]); expect(result).length(1); } catch (error: any) { console.error(error); throw new Error(`preparedStatementWithPlaceholderInLimit error`); } } async preparedStatementWithPlaceholderInOffset(): Promise { try { await this.beforeEach(); this.db .insert(usersTable) .values([{ name: 'John' }, { name: 'John1' }]) .run(); const stmt = this.db .select({ id: usersTable.id, name: usersTable.name, }) .from(usersTable) .limit(sql.placeholder('limit')) .offset(sql.placeholder('offset')) .prepare(); const result = stmt.all({ limit: 1, offset: 1 }); expect(result).deep.equal([{ id: 2, name: 'John1' }]); } catch (error: any) { console.error(error); throw new Error(`preparedStatementWithPlaceholderInOffset error`); } } async preparedStatementBuiltUsing$dynamic(): Promise { try { await this.beforeEach(); function withLimitOffset(qb: any) { return qb.limit(sql.placeholder('limit')).offset(sql.placeholder('offset')); } this.db .insert(usersTable) .values([{ name: 'John' }, { name: 'John1' }]) .run(); const stmt = this.db .select({ id: usersTable.id, name: usersTable.name, }) .from(usersTable) .$dynamic(); withLimitOffset(stmt).prepare('stmt_limit'); const result = await stmt.all({ limit: 1, offset: 1 }); expect(result).deep.equal([{ id: 2, name: 'John1' }]); expect(result).length(1); } catch (error: any) { console.error(error); throw new Error(`preparedStatementBuiltUsing error`); } } async selectWithGroupByAsField(): Promise { try { await this.beforeEach(); this.db .insert(usersTable) .values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]) .run(); const result = this.db.select({ name: usersTable.name }).from(usersTable).groupBy(usersTable.name).all(); expect(result).deep.equal([{ name: 'Jane' }, { name: 'John' }]); } catch (error: any) { console.error(error); throw new Error(`selectWithGroupByAsField error`); } } async selectWithExists(): Promise { try { await this.beforeEach(); this.db .insert(usersTable) .values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]) .run(); const user = alias(usersTable, 'user'); const result = this.db .select({ name: usersTable.name }) .from(usersTable) .where( exists( this.db .select({ one: sql`1` }) .from(user) .where(and(eq(usersTable.name, 'John'), eq(user.id, usersTable.id))), ), ) .all(); expect(result).deep.equal([{ name: 'John' }]); } catch (error: any) { console.error(error); throw new Error(`selectWithExists error`); } } async selectWithGroupByAsSql(): Promise { try { await this.beforeEach(); this.db .insert(usersTable) .values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]) .run(); const result = this.db .select({ name: usersTable.name }) .from(usersTable) .groupBy(sql`${usersTable.name}`) .all(); expect(result).deep.equal([{ name: 'Jane' }, { name: 'John' }]); } catch (error: any) { console.error(error); throw new Error(`selectWithGroupByAsSql error`); } } async selectWithGroupByAsSqlPlusColumn(): Promise { try { await this.beforeEach(); this.db .insert(usersTable) .values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]) .run(); const result = this.db .select({ name: usersTable.name }) .from(usersTable) .groupBy(sql`${usersTable.name}`, usersTable.id) .all(); expect(result).deep.equal([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); } catch (error: any) { console.error(error); throw new Error(`selectWithGroupByAsSqlPlusColumn error`); } } async selectWithGroupByAsColumnPlusSql(): Promise { try { await this.beforeEach(); this.db .insert(usersTable) .values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]) .run(); const result = this.db .select({ name: usersTable.name }) .from(usersTable) .groupBy(usersTable.id, sql`${usersTable.name}`) .all(); expect(result).deep.equal([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); } catch (error: any) { console.error(error); throw new Error(`selectWithGroupByAsColumnPlusSql error`); } } async selectWithGroupByComplexQuery(): Promise { try { await this.beforeEach(); this.db .insert(usersTable) .values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]) .run(); const result = this.db .select({ name: usersTable.name }) .from(usersTable) .groupBy(usersTable.id, sql`${usersTable.name}`) .orderBy(asc(usersTable.name)) .limit(1) .all(); expect(result).deep.equal([{ name: 'Jane' }]); } catch (error: any) { console.error(error); throw new Error(`selectWithGroupByComplexQuery error`); } } async buildQuery(): Promise { try { await this.beforeEach(); const query = this.db .select({ id: usersTable.id, name: usersTable.name }) .from(usersTable) .groupBy(usersTable.id, usersTable.name) .toSQL(); expect(query).deep.equal({ sql: 'select "id", "name" from "users" group by "users"."id", "users"."name"', params: [], }); } catch (error: any) { console.error(error); throw new Error(`buildQuery error`); } } async insertViaDbRunPlusSelectViaDbAll(): Promise { try { await this.beforeEach(); this.db.run(sql`insert into ${usersTable} (${new Name(usersTable.name.name)}) values (${'John'})`); const result = this.db.all<{ id: number; name: string }>(sql`select id, name from "users"`); expect(result).deep.equal([{ id: 1, name: 'John' }]); } catch (error: any) { console.error(error); throw new Error(`insertViaDbRunPlusSelectViaDbAll error`); } } async insertViaDbGet(): Promise { try { await this.beforeEach(); const inserted = this.db.get<{ id: number; name: string }>( sql`insert into ${usersTable} (${new Name( usersTable.name.name, )}) values (${'John'}) returning ${usersTable.id}, ${usersTable.name}`, ); expect(inserted).deep.equal({ id: 1, name: 'John' }); } catch (error: any) { console.error(error); throw new Error(`insertViaDbGet error`); } } async insertViaDbRunPlusSelectViaDbGet(): Promise { try { await this.beforeEach(); this.db.run(sql`insert into ${usersTable} (${new Name(usersTable.name.name)}) values (${'John'})`); const result = this.db.get<{ id: number; name: string }>( sql`select ${usersTable.id}, ${usersTable.name} from ${usersTable}`, ); expect(result).deep.equal({ id: 1, name: 'John' }); } catch (error: any) { console.error(error); throw new Error(`insertViaDbRunPlusSelectViaDbGet error`); } } async insertViaDbGetQueryBuilder(): Promise { try { await this.beforeEach(); const inserted = this.db.get>( this.db.insert(usersTable).values({ name: 'John' }).returning({ id: usersTable.id, name: usersTable.name }), ); expect(inserted).deep.equal({ id: 1, name: 'John' }); } catch (error: any) { console.error(error); throw new Error(`insertViaDbGetQueryBuilder error`); } } async joinSubquery(): Promise { try { await this.beforeEach(); this.db .insert(courseCategoriesTable) .values([{ name: 'Category 1' }, { name: 'Category 2' }, { name: 'Category 3' }, { name: 'Category 4' }]) .run(); this.db .insert(coursesTable) .values([ { name: 'Development', categoryId: 2 }, { name: 'IT & Software', categoryId: 3 }, { name: 'Marketing', categoryId: 4 }, { name: 'Design', categoryId: 1 }, ]) .run(); const sq2 = this.db .select({ categoryId: courseCategoriesTable.id, category: courseCategoriesTable.name, total: sql`count(${courseCategoriesTable.id})`, }) .from(courseCategoriesTable) .groupBy(courseCategoriesTable.id, courseCategoriesTable.name) .as('sq2'); const res = await this.db .select({ courseName: coursesTable.name, categoryId: sq2.categoryId, }) .from(coursesTable) .leftJoin(sq2, eq(coursesTable.categoryId, sq2.categoryId)) .orderBy(coursesTable.name) .all(); expect(res).deep.equal([ { courseName: 'Design', categoryId: 1 }, { courseName: 'Development', categoryId: 2 }, { courseName: 'IT & Software', categoryId: 3 }, { courseName: 'Marketing', categoryId: 4 }, ]); } catch (error: any) { console.error(error); throw new Error(`joinSubquery error`); } } async withSelect(): Promise { try { await this.beforeEach(); this.db .insert(orders) .values([ { region: 'Europe', product: 'A', amount: 10, quantity: 1 }, { region: 'Europe', product: 'A', amount: 20, quantity: 2 }, { region: 'Europe', product: 'B', amount: 20, quantity: 2 }, { region: 'Europe', product: 'B', amount: 30, quantity: 3 }, { region: 'US', product: 'A', amount: 30, quantity: 3 }, { region: 'US', product: 'A', amount: 40, quantity: 4 }, { region: 'US', product: 'B', amount: 40, quantity: 4 }, { region: 'US', product: 'B', amount: 50, quantity: 5 }, ]) .run(); const regionalSales = this.db.$with('regional_sales').as( this.db .select({ region: orders.region, totalSales: sql`sum(${orders.amount})`.as('total_sales'), }) .from(orders) .groupBy(orders.region), ); const topRegions = this.db.$with('top_regions').as( this.db .select({ region: regionalSales.region, }) .from(regionalSales) .where( gt( regionalSales.totalSales, this.db.select({ sales: sql`sum(${regionalSales.totalSales})/10` }).from(regionalSales), ), ), ); const result = this.db .with(regionalSales, topRegions) .select({ region: orders.region, product: orders.product, productUnits: sql`cast(sum(${orders.quantity}) as int)`, productSales: sql`cast(sum(${orders.amount}) as int)`, }) .from(orders) .where(inArray(orders.region, this.db.select({ region: topRegions.region }).from(topRegions))) .groupBy(orders.region, orders.product) .orderBy(orders.region, orders.product) .all(); expect(result).deep.equal([ { region: 'Europe', product: 'A', productUnits: 3, productSales: 30, }, { region: 'Europe', product: 'B', productUnits: 5, productSales: 50, }, { region: 'US', product: 'A', productUnits: 7, productSales: 70, }, { region: 'US', product: 'B', productUnits: 9, productSales: 90, }, ]); } catch (error: any) { console.error(error); throw new Error(`withSelect error`); } } async withUpdate(): Promise { try { await this.beforeEach(); const products = sqliteTable('products', { id: integer('id').primaryKey(), price: numeric('price').notNull(), cheap: integer('cheap', { mode: 'boolean' }).notNull().default(false), }); this.db.run(sql`drop table if exists ${products}`); this.db.run(sql` create table ${products} ( id integer primary key, price numeric not null, cheap integer not null default 0 ) `); await this.db .insert(products) .values([{ price: '10.99' }, { price: '25.85' }, { price: '32.99' }, { price: '2.50' }, { price: '4.59' }]); const averagePrice = this.db.$with('average_price').as( this.db .select({ value: sql`avg(${products.price})`.as('value'), }) .from(products), ); const result = await this.db .with(averagePrice) .update(products) .set({ cheap: true, }) .where(lt(products.price, sql`(select * from ${averagePrice})`)) .returning({ id: products.id, }); expect(result).deep.equal([{ id: 1 }, { id: 4 }, { id: 5 }]); } catch (error: any) { console.error(error); throw new Error(`withUpdate error`); } } async withInsert(): Promise { try { await this.beforeEach(); const users = sqliteTable('users', { username: text('username').notNull(), admin: integer('admin', { mode: 'boolean' }).notNull(), }); this.db.run(sql`drop table if exists ${users}`); this.db.run(sql`create table ${users} (username text not null, admin integer not null default 0)`); const userCount = this.db.$with('user_count').as( this.db .select({ value: sql`count(*)`.as('value'), }) .from(users), ); const result = await this.db .with(userCount) .insert(users) .values([{ username: 'user1', admin: sql`((select * from ${userCount}) = 0)` }]) .returning({ admin: users.admin, }); expect(result).deep.equal([{ admin: true }]); } catch (error: any) { console.error(error); throw new Error(`withInsert error`); } } async withDelete(): Promise { try { await this.beforeEach(); await this.db.insert(orders).values([ { region: 'Europe', product: 'A', amount: 10, quantity: 1 }, { region: 'Europe', product: 'A', amount: 20, quantity: 2 }, { region: 'Europe', product: 'B', amount: 20, quantity: 2 }, { region: 'Europe', product: 'B', amount: 30, quantity: 3 }, { region: 'US', product: 'A', amount: 30, quantity: 3 }, { region: 'US', product: 'A', amount: 40, quantity: 4 }, { region: 'US', product: 'B', amount: 40, quantity: 4 }, { region: 'US', product: 'B', amount: 50, quantity: 5 }, ]); const averageAmount = this.db.$with('average_amount').as( this.db .select({ value: sql`avg(${orders.amount})`.as('value'), }) .from(orders), ); const result = this.db .with(averageAmount) .delete(orders) .where(gt(orders.amount, sql`(select * from ${averageAmount})`)) .returning({ id: orders.id, }) .all(); expect(result).deep.equal([{ id: 6 }, { id: 7 }, { id: 8 }]); } catch (error: any) { console.error(error); throw new Error(`withDelete error`); } } async selectFromSubquerySql(): Promise { try { await this.beforeEach(); this.db .insert(users2Table) .values([{ name: 'John' }, { name: 'Jane' }]) .run(); const sq = this.db .select({ name: sql`${users2Table.name} || ' modified'`.as('name') }) .from(users2Table) .as('sq'); const res = this.db.select({ name: sq.name }).from(sq).all(); expect(res).deep.equal([{ name: 'John modified' }, { name: 'Jane modified' }]); } catch (error: any) { console.error(error); throw new Error(`selectFromSubquerySql error`); } } async selectAFieldWithoutJoiningItsTable(): Promise { try { await this.beforeEach(); expect(() => this.db.select({ name: users2Table.name }).from(usersTable).prepare()).throw(); } catch (error: any) { console.error(error); throw new Error(`selectAFieldWithoutJoiningItsTable error`); } } async selectAllFieldsFromSubqueryWithoutAlias(): Promise { try { const sq = this.db.$with('sq').as( this.db.select({ name: sql`upper(${users2Table.name})` }).from(users2Table), ); expect(() => this.db.select().from(sq).prepare()).throw(); } catch (error: any) { console.error(error); throw new Error(`selectAllFieldsFromSubqueryWithoutAlias error`); } } async selectCount(): Promise { try { await this.beforeEach(); this.db .insert(usersTable) .values([{ name: 'John' }, { name: 'Jane' }]) .run(); const res = this.db .select({ count: sql`count(*)` }) .from(usersTable) .all(); expect(res).deep.equal([{ count: 2 }]); } catch (error: any) { console.error(error); throw new Error(`selectCount error`); } } async having(): Promise { try { await this.beforeEach(); this.db .insert(citiesTable) .values([{ name: 'London' }, { name: 'Paris' }, { name: 'New York' }]) .run(); this.db .insert(users2Table) .values([ { name: 'John', cityId: 1 }, { name: 'Jane', cityId: 1 }, { name: 'Jack', cityId: 2 }, ]) .run(); const result = this.db .select({ id: citiesTable.id, name: sql`upper(${citiesTable.name})`.as('upper_name'), usersCount: sql`count(${users2Table.id})`.as('users_count'), }) .from(citiesTable) .leftJoin(users2Table, eq(users2Table.cityId, citiesTable.id)) .where(({ name }) => sql`length(${name}) >= 3`) .groupBy(citiesTable.id) .having(({ usersCount }) => sql`${usersCount} > 0`) .orderBy(({ name }) => name) .all(); expect(result).deep.equal([ { id: 1, name: 'LONDON', usersCount: 2, }, { id: 2, name: 'PARIS', usersCount: 1, }, ]); } catch (error: any) { console.error(error); throw new Error(`having error`); } } async view(): Promise { try { await this.beforeEach(); const newYorkers1 = sqliteView('new_yorkers').as((qb) => qb.select().from(users2Table).where(eq(users2Table.cityId, 1)) ); const newYorkers2 = sqliteView('new_yorkers', { id: integer('id').primaryKey(), name: text('name').notNull(), cityId: integer('city_id').notNull(), }).as(sql`select * from ${users2Table} where ${eq(users2Table.cityId, 1)}`); const newYorkers3 = sqliteView('new_yorkers', { id: integer('id').primaryKey(), name: text('name').notNull(), cityId: integer('city_id').notNull(), }).existing(); this.db.run(sql`create view if not exists new_yorkers as ${getViewConfig(newYorkers1).query}`); this.db .insert(citiesTable) .values([{ name: 'New York' }, { name: 'Paris' }]) .run(); this.db .insert(users2Table) .values([ { name: 'John', cityId: 1 }, { name: 'Jane', cityId: 1 }, { name: 'Jack', cityId: 2 }, ]) .run(); { const result = this.db.select().from(newYorkers1).all(); expect(result).deep.equal([ { id: 1, name: 'John', cityId: 1 }, { id: 2, name: 'Jane', cityId: 1 }, ]); } { const result = this.db.select().from(newYorkers2).all(); expect(result).deep.equal([ { id: 1, name: 'John', cityId: 1 }, { id: 2, name: 'Jane', cityId: 1 }, ]); } { const result = this.db.select().from(newYorkers3).all(); expect(result).deep.equal([ { id: 1, name: 'John', cityId: 1 }, { id: 2, name: 'Jane', cityId: 1 }, ]); } { const result = this.db.select({ name: newYorkers1.name }).from(newYorkers1).all(); expect(result).deep.equal([{ name: 'John' }, { name: 'Jane' }]); } this.db.run(sql`drop view ${newYorkers1}`); } catch (error: any) { console.error(error); throw new Error(`view error`); } } async insertNullTimestamp(): Promise { try { await this.beforeEach(); const test = sqliteTable('test', { t: integer('t', { mode: 'timestamp' }), }); this.db.run(sql`create table ${test} (t timestamp)`); this.db.insert(test).values({ t: null }).run(); const res = await this.db.select().from(test).all(); expect(res).deep.equal([{ t: null }]); this.db.run(sql`drop table ${test}`); } catch (error: any) { console.error(error); throw new Error(`insertNullTimestamp error`); } } async selectFromRawSql(): Promise { try { const result = this.db .select({ id: sql`id`, name: sql`name`, }) .from(sql`(select 1 as id, 'John' as name) as users`) .all(); Expect>; expect(result).deep.equal([{ id: 1, name: 'John' }]); } catch (error: any) { console.error(error); throw new Error(`selectFromRawSql error`); } } async selectFromRawSqlWithJoins(): Promise { try { await this.beforeEach(); const result = this.db .select({ id: sql`users.id`, name: sql`users.name`.as('userName'), userCity: sql`users.city`, cityName: sql`cities.name`.as('cityName'), }) .from(sql`(select 1 as id, 'John' as name, 'New York' as city) as users`) .leftJoin(sql`(select 1 as id, 'Paris' as name) as cities`, sql`cities.id = users.id`) .all(); Expect>; expect(result).deep.equal([{ id: 1, name: 'John', userCity: 'New York', cityName: 'Paris' }]); } catch (error: any) { console.error(error); throw new Error(`selectFromRawSqlWithJoins error`); } } async joinOnAliasedSqlFromSelect(): Promise { try { await this.beforeEach(); const result = this.db .select({ userId: sql`users.id`.as('userId'), name: sql`users.name`.as('userName'), userCity: sql`users.city`, cityId: sql`cities.id`.as('cityId'), cityName: sql`cities.name`.as('cityName'), }) .from(sql`(select 1 as id, 'John' as name, 'New York' as city) as users`) .leftJoin(sql`(select 1 as id, 'Paris' as name) as cities`, (cols) => eq(cols.cityId, cols.userId)) .all(); Expect< Equal<{ userId: number; name: string; userCity: string; cityId: number; cityName: string }[], typeof result> >; expect(result).deep.equal([{ userId: 1, name: 'John', userCity: 'New York', cityId: 1, cityName: 'Paris' }]); } catch (error: any) { console.error(error); throw new Error(`joinOnAliasedSqlFromSelect error`); } } async joinOnAliasedSqlFromWithClause(): Promise { try { await this.beforeEach(); const users = this.db.$with('users').as( this.db .select({ id: sql`id`.as('userId'), name: sql`name`.as('userName'), city: sql`city`.as('city'), }) .from(sql`(select 1 as id, 'John' as name, 'New York' as city) as users`), ); const cities = this.db.$with('cities').as( this.db .select({ id: sql`id`.as('cityId'), name: sql`name`.as('cityName'), }) .from(sql`(select 1 as id, 'Paris' as name) as cities`), ); const result = this.db .with(users, cities) .select({ userId: users.id, name: users.name, userCity: users.city, cityId: cities.id, cityName: cities.name, }) .from(users) .leftJoin(cities, (cols) => eq(cols.cityId, cols.userId)) .all(); Expect< Equal<{ userId: number; name: string; userCity: string; cityId: number; cityName: string }[], typeof result> >; expect(result).deep.equal([{ userId: 1, name: 'John', userCity: 'New York', cityId: 1, cityName: 'Paris' }]); } catch (error: any) { console.error(error); throw new Error(`joinOnAliasedSqlFromWithClause error`); } } async prefixedTable(): Promise { try { await this.beforeEach(); const sqliteTable = sqliteTableCreator((name) => `myprefix_${name}`); const users = sqliteTable('test_prefixed_table_with_unique_name', { id: integer('id').primaryKey(), name: text('name').notNull(), }); this.db.run(sql`drop table if exists ${users}`); this.db.run( sql`create table myprefix_test_prefixed_table_with_unique_name (id integer not null primary key, name text not null)`, ); this.db.insert(users).values({ id: 1, name: 'John' }).run(); const result = this.db.select().from(users).all(); expect(result).deep.equal([{ id: 1, name: 'John' }]); this.db.run(sql`drop table ${users}`); } catch (error: any) { console.error(error); throw new Error(`prefixedTable error`); } } async orderByWithAliasedColumn(): Promise { try { await this.beforeEach(); const query = this.db .select({ test: sql`something`.as('test'), }) .from(users2Table) .orderBy((fields) => fields.test) .toSQL(); expect(query.sql).equal('select something as "test" from "users2" order by "test"'); } catch (error: any) { console.error(error); throw new Error(`orderByWithAliasedColumn error`); } } async transaction(): Promise { try { await this.beforeEach(); const users = sqliteTable('users_transactions', { id: integer('id').primaryKey(), balance: integer('balance').notNull(), }); const products = sqliteTable('products_transactions', { id: integer('id').primaryKey(), price: integer('price').notNull(), stock: integer('stock').notNull(), }); this.db.run(sql`drop table if exists ${users}`); this.db.run(sql`drop table if exists ${products}`); this.db.run(sql`create table users_transactions (id integer not null primary key, balance integer not null)`); this.db.run( sql`create table products_transactions (id integer not null primary key, price integer not null, stock integer not null)`, ); const user = this.db.insert(users).values({ balance: 100 }).returning().get(); const product = this.db.insert(products).values({ price: 10, stock: 10 }).returning().get(); this.db.transaction(async (tx) => { tx.update(users) .set({ balance: user.balance - product.price }) .where(eq(users.id, user.id)) .run(); tx.update(products) .set({ stock: product.stock - 1 }) .where(eq(products.id, product.id)) .run(); }); const result = this.db.select().from(users).all(); expect(result).deep.equal([{ id: 1, balance: 90 }]); this.db.run(sql`drop table ${users}`); this.db.run(sql`drop table ${products}`); } catch (error: any) { console.error(error); throw new Error(`transaction error`); } } // async transactionRollback(): Promise{ // const users = sqliteTable('users_transactions_rollback', { // id: integer('id').primaryKey(), // balance: integer('balance').notNull(), // }); // this.db.run(sql`drop table if exists ${users}`); // this.db.run( // sql`create table users_transactions_rollback (id integer not null primary key, balance integer not null)`, // ); // await expect(async () => { // this.db.transaction(async (tx) => { // tx.insert(users).values({ balance: 100 }).run(); // tx.rollback(); // }); // }).re(TransactionRollbackError); // const result = await db.select().from(users).all(); // expect(result).toEqual([]); // await db.run(sql`drop table ${users}`); // }; async nestedTransaction(): Promise { try { await this.beforeEach(); const users = sqliteTable('users_nested_transactions', { id: integer('id').primaryKey(), balance: integer('balance').notNull(), }); this.db.run(sql`drop table if exists ${users}`); this.db.run( sql`create table users_nested_transactions (id integer not null primary key, balance integer not null)`, ); this.db.transaction((tx) => { tx.insert(users).values({ balance: 100 }).run(); tx.transaction((tx) => { tx.update(users).set({ balance: 200 }).run(); }); }); const result = this.db.select().from(users).all(); expect(result).deep.equal([{ id: 1, balance: 200 }]); this.db.run(sql`drop table ${users}`); } catch (error: any) { console.error(error); throw new Error(`nestedTransaction error`); } } // async nestedTransactionRollback(): Promise{ // const users = sqliteTable('users_nested_transactions_rollback', { // id: integer('id').primaryKey(), // balance: integer('balance').notNull(), // }); // this.db.run(sql`drop table if exists ${users}`); // this.db.run( // sql`create table users_nested_transactions_rollback (id integer not null primary key, balance integer not null)`, // ); // this.db.transaction((tx) => { // this.tx.insert(users).values({ balance: 100 }).run(); // expect(async () => { // await tx.transaction(async (tx) => { // await tx.update(users).set({ balance: 200 }).run(); // tx.rollback(); // }); // }).rejects.toThrowError(TransactionRollbackError); // }); // const result = await db.select().from(users).all(); // expect(result).toEqual([{ id: 1, balance: 100 }]); // await db.run(sql`drop table ${users}`); // }; async joinSubqueryWithJoin(): Promise { try { await this.beforeEach(); const internalStaff = sqliteTable('internal_staff', { userId: integer('user_id').notNull(), }); const customUser = sqliteTable('custom_user', { id: integer('id').notNull(), }); const ticket = sqliteTable('ticket', { staffId: integer('staff_id').notNull(), }); this.db.run(sql`drop table if exists ${internalStaff}`); this.db.run(sql`drop table if exists ${customUser}`); this.db.run(sql`drop table if exists ${ticket}`); this.db.run(sql`create table internal_staff (user_id integer not null)`); this.db.run(sql`create table custom_user (id integer not null)`); this.db.run(sql`create table ticket (staff_id integer not null)`); this.db.insert(internalStaff).values({ userId: 1 }).run(); this.db.insert(customUser).values({ id: 1 }).run(); this.db.insert(ticket).values({ staffId: 1 }).run(); const subq = this.db.select().from(internalStaff).leftJoin(customUser, eq(internalStaff.userId, customUser.id)) .as('internal_staff'); const mainQuery = this.db.select().from(ticket).leftJoin(subq, eq(subq.internal_staff.userId, ticket.staffId)) .all(); expect(mainQuery).deep.equal([ { ticket: { staffId: 1 }, internal_staff: { internal_staff: { userId: 1 }, custom_user: { id: 1 }, }, }, ]); this.db.run(sql`drop table ${internalStaff}`); this.db.run(sql`drop table ${customUser}`); this.db.run(sql`drop table ${ticket}`); } catch (error: any) { console.error(error); throw new Error(`joinSubqueryWithJoin error`); } } async joinViewAsSubquery(): Promise { try { await this.beforeEach(); const users = sqliteTable('users_join_view', { id: integer('id').primaryKey(), name: text('name').notNull(), cityId: integer('city_id').notNull(), }); const newYorkers = sqliteView('new_yorkers').as((qb) => qb.select().from(users).where(eq(users.cityId, 1))); this.db.run(sql`drop table if exists ${users}`); this.db.run(sql`drop view if exists ${newYorkers}`); this.db.run( sql`create table ${users} (id integer not null primary key, name text not null, city_id integer not null)`, ); this.db.run(sql`create view if not exists ${newYorkers} as ${getViewConfig(newYorkers).query}`); this.db .insert(users) .values([ { name: 'John', cityId: 1 }, { name: 'Jane', cityId: 2 }, { name: 'Jack', cityId: 1 }, { name: 'Jill', cityId: 2 }, ]) .run(); const sq = this.db.select().from(newYorkers).as('new_yorkers_sq'); const result = await this.db.select().from(users).leftJoin(sq, eq(users.id, sq.id)).all(); expect(result).deep.equal([ { users_join_view: { id: 1, name: 'John', cityId: 1 }, new_yorkers_sq: { id: 1, name: 'John', cityId: 1 }, }, { users_join_view: { id: 2, name: 'Jane', cityId: 2 }, new_yorkers_sq: null, }, { users_join_view: { id: 3, name: 'Jack', cityId: 1 }, new_yorkers_sq: { id: 3, name: 'Jack', cityId: 1 }, }, { users_join_view: { id: 4, name: 'Jill', cityId: 2 }, new_yorkers_sq: null, }, ]); this.db.run(sql`drop view ${newYorkers}`); this.db.run(sql`drop table ${users}`); } catch (error: any) { console.error(error); throw new Error(`joinViewAsSubquery error`); } } async insertWithOnConflictDoNothing(): Promise { try { await this.beforeEach(); this.db.insert(usersTable).values({ id: 1, name: 'John' }).run(); this.db.insert(usersTable).values({ id: 1, name: 'John' }).onConflictDoNothing().run(); const res = this.db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).where( eq(usersTable.id, 1), ).all(); expect(res).deep.equal([{ id: 1, name: 'John' }]); } catch (error: any) { console.error(error); throw new Error(`insertWithOnConflictDoNothing error`); } } async insertWithOnConflictDoNothinUsingCompositePk(): Promise { try { await this.beforeEach(); this.db.insert(pkExampleTable).values({ id: 1, name: 'John', email: 'john@example.com' }).run(); this.db.insert(pkExampleTable).values({ id: 1, name: 'John', email: 'john1@example.com' }).onConflictDoNothing() .run(); const res = await this.db .select({ id: pkExampleTable.id, name: pkExampleTable.name, email: pkExampleTable.email }) .from(pkExampleTable) .where(eq(pkExampleTable.id, 1)) .all(); expect(res).deep.equal([{ id: 1, name: 'John', email: 'john@example.com' }]); } catch (error: any) { console.error(error); throw new Error(`insertWithOnConflictDoNothinUsingCompositePk error`); } } async insertWithOnConflictDoNothingUsingTarget(): Promise { try { this.db.insert(usersTable).values({ id: 1, name: 'John' }).run(); this.db.insert(usersTable).values({ id: 1, name: 'John' }).onConflictDoNothing({ target: usersTable.id }).run(); const res = this.db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).where( eq(usersTable.id, 1), ).all(); expect(res).deep.equal([{ id: 1, name: 'John' }]); } catch (error: any) { console.error(error); throw new Error(`insertWithOnConflictDoNothingUsingTarget error`); } } async insertWithOnConflictDoNothingUsingCompositePkAsTarget(): Promise { try { await this.beforeEach(); this.db.insert(pkExampleTable).values({ id: 1, name: 'John', email: 'john@example.com' }).run(); this.db .insert(pkExampleTable) .values({ id: 1, name: 'John', email: 'john1@example.com' }) .onConflictDoNothing({ target: [pkExampleTable.id, pkExampleTable.name] }) .run(); const res = this.db .select({ id: pkExampleTable.id, name: pkExampleTable.name, email: pkExampleTable.email }) .from(pkExampleTable) .where(eq(pkExampleTable.id, 1)) .all(); expect(res).deep.equal([{ id: 1, name: 'John', email: 'john@example.com' }]); } catch (error: any) { console.error(error); throw new Error(`insertWithOnConflictDoNothingUsingCompositePkAsTarget error`); } } async insertWithOnConflictDoUpdate(): Promise { try { await this.beforeEach(); this.db.insert(usersTable).values({ id: 1, name: 'John' }).run(); this.db .insert(usersTable) .values({ id: 1, name: 'John' }) .onConflictDoUpdate({ target: usersTable.id, set: { name: 'John1' } }) .run(); const res = this.db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).where( eq(usersTable.id, 1), ).all(); expect(res).deep.equal([{ id: 1, name: 'John1' }]); } catch (error: any) { console.error(error); throw new Error(`insertWithOnConflictDoUpdate error`); } } async insertWithOnConflictDoUpdateWhere(): Promise { try { await this.beforeEach(); this.db .insert(usersTable) .values([{ id: 1, name: 'John', verified: false }]) .run(); this.db .insert(usersTable) .values({ id: 1, name: 'John1', verified: true }) .onConflictDoUpdate({ target: usersTable.id, set: { name: 'John1', verified: true }, where: eq(usersTable.verified, false), }) .run(); const res = this.db .select({ id: usersTable.id, name: usersTable.name, verified: usersTable.verified }) .from(usersTable) .where(eq(usersTable.id, 1)) .all(); expect(res).deep.equal([{ id: 1, name: 'John1', verified: true }]); } catch (error: any) { console.error(error); throw new Error(`insertWithOnConflictDoUpdateWhere error`); } } async insertWithOnConflictDoUpdateUsingCompositePk(): Promise { try { await this.beforeEach(); this.db.insert(pkExampleTable).values({ id: 1, name: 'John', email: 'john@example.com' }).run(); this.db .insert(pkExampleTable) .values({ id: 1, name: 'John', email: 'john@example.com' }) .onConflictDoUpdate({ target: [pkExampleTable.id, pkExampleTable.name], set: { email: 'john1@example.com' } }) .run(); const res = this.db .select({ id: pkExampleTable.id, name: pkExampleTable.name, email: pkExampleTable.email }) .from(pkExampleTable) .where(eq(pkExampleTable.id, 1)) .all(); expect(res).deep.equal([{ id: 1, name: 'John', email: 'john1@example.com' }]); } catch (error: any) { console.error(error); throw new Error(`insertWithOnConflictDoUpdateUsingCompositePk error`); } } async insertUndefined(): Promise { const users = sqliteTable('users', { id: integer('id').primaryKey(), name: text('name'), }); this.db.run(sql`drop table if exists ${users}`); this.db.run( sql`create table ${users} (id integer primary key, name text)`, ); expect((() => { this.db.insert(users).values({ name: undefined }).run(); })()).not.throw(); this.db.run(sql`drop table ${users}`); } async updateUndefined(): Promise { const users = sqliteTable('users', { id: integer('id').primaryKey(), name: text('name'), }); this.db.run(sql`drop table if exists ${users}`); this.db.run( sql`create table ${users} (id integer primary key, name text)`, ); expect((() => { this.db.update(users).set({ name: undefined }).run(); })()).throw(); expect((() => { this.db.update(users).set({ id: 1, name: undefined }).run(); })()).not.throw(); this.db.run(sql`drop table ${users}`); } async apiCRUD(): Promise { try { await this.beforeEach(); const users = sqliteTable('users', { id: integer('id').primaryKey(), name: text('name'), }); this.db.run(sql`drop table if exists ${users}`); this.db.run(sql`create table ${users} (id integer primary key, name text)`); this.db.insert(users).values({ id: 1, name: 'John' }).run(); const res = this.db.select().from(users).all(); expect(res).deep.equal([{ id: 1, name: 'John' }]); this.db.update(users).set({ name: 'John1' }).where(eq(users.id, 1)).all(); const res1 = this.db.select().from(users).all(); expect(res1).deep.equal([{ id: 1, name: 'John1' }]); this.db.delete(users).where(eq(users.id, 1)).run(); const res2 = this.db.select().from(users).all(); expect(res2).deep.equal([]); this.db.run(sql`drop table ${users}`); } catch (error: any) { console.error(error); throw new Error(`apiCRUD error`); } } async apiInsertPlusSelectPreparePlusAsyncExecute(): Promise { try { await this.beforeEach(); const users = sqliteTable('users', { id: integer('id').primaryKey(), name: text('name'), }); this.db.run(sql`drop table if exists ${users}`); this.db.run(sql`create table ${users} (id integer primary key, name text)`); const insertStmt = this.db.insert(users).values({ id: 1, name: 'John' }).prepare(); insertStmt.execute().sync(); const selectStmt = this.db.select().from(users).prepare(); const res = selectStmt.execute().sync(); expect(res).deep.equal([{ id: 1, name: 'John' }]); const updateStmt = this.db.update(users).set({ name: 'John1' }).where(eq(users.id, 1)).prepare(); updateStmt.execute().sync(); const res1 = selectStmt.execute().sync(); expect(res1).deep.equal([{ id: 1, name: 'John1' }]); const deleteStmt = this.db.delete(users).where(eq(users.id, 1)).prepare(); deleteStmt.execute().sync(); const res2 = selectStmt.execute().sync(); expect(res2).deep.equal([]); this.db.run(sql`drop table ${users}`); } catch (error: any) { console.error(error); throw new Error(`apiInsertPlusSelectPreparePlusAsyncExecute error`); } } async apiInsertSelectPreparePlusSyncExecute(): Promise { try { await this.beforeEach(); const users = sqliteTable('users', { id: integer('id').primaryKey(), name: text('name'), }); this.db.run(sql`drop table if exists ${users}`); this.db.run(sql`create table ${users} (id integer primary key, name text)`); const insertStmt = this.db.insert(users).values({ id: 1, name: 'John' }).prepare(); insertStmt.execute().sync(); const selectStmt = this.db.select().from(users).prepare(); const res = selectStmt.execute().sync(); expect(res).deep.equal([{ id: 1, name: 'John' }]); const updateStmt = this.db.update(users).set({ name: 'John1' }).where(eq(users.id, 1)).prepare(); updateStmt.execute().sync(); const res1 = selectStmt.execute().sync(); expect(res1).deep.equal([{ id: 1, name: 'John1' }]); const deleteStmt = this.db.delete(users).where(eq(users.id, 1)).prepare(); deleteStmt.execute().sync(); const res2 = selectStmt.execute().sync(); expect(res2).deep.equal([]); this.db.run(sql`drop table ${users}`); } catch (error: any) { console.error(error); throw new Error(`apiInsertSelectPreparePlusSyncExecute error`); } } async selectPlusGetForEmptyResult(): Promise { try { await this.beforeEach(); const users = sqliteTable('users', { id: integer('id').primaryKey(), name: text('name'), }); this.db.run(sql`drop table if exists ${users}`); this.db.run(sql`create table ${users} (id integer primary key, name text)`); const res = this.db.select().from(users).where(eq(users.id, 1)).get(); expect(res).eq(undefined); this.db.run(sql`drop table ${users}`); } catch (error: any) { console.error(error); throw new Error(`selectPlusGetForEmptyResult error`); } } async setOperationsUnionFromQueryBuilderWithSubquery(): Promise { try { await this.beforeEach(); await setupSetOperationTest(this.db); const sq = this.db .select({ id: citiesTable.id, name: citiesTable.name }) .from(citiesTable) .union(this.db.select({ id: users2Table.id, name: users2Table.name }).from(users2Table)) .orderBy(asc(sql`name`)) .as('sq'); const result = await this.db.select().from(sq).limit(5).offset(5); expect(result).length(5); expect(result).deep.equal([ { id: 2, name: 'London' }, { id: 7, name: 'Mary' }, { id: 1, name: 'New York' }, { id: 4, name: 'Peter' }, { id: 8, name: 'Sally' }, ]); expect(() => { this.db .select({ name: citiesTable.name, id: citiesTable.id }) .from(citiesTable).union( this.db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table), ).orderBy(asc(sql`name`)).all(); }).throw(); } catch (error: any) { console.error(error); throw new Error(`setOperationsUnionFromQueryBuilderWithSubquery error`); } } async setOperationsUnionAsFunction(): Promise { try { await this.beforeEach(); await setupSetOperationTest(this.db); const result = union( this.db.select({ id: citiesTable.id, name: citiesTable.name }).from(citiesTable).where(eq(citiesTable.id, 1)), this.db.select({ id: users2Table.id, name: users2Table.name }).from(users2Table).where(eq(users2Table.id, 1)), this.db.select({ id: users2Table.id, name: users2Table.name }).from(users2Table).where(eq(users2Table.id, 1)), ).orderBy(asc(sql`name`)).all(); expect(result).length(2); expect(result).deep.equal([ { id: 1, name: 'John' }, { id: 1, name: 'New York' }, ]); expect(() => { union( this.db .select({ id: citiesTable.id, name: citiesTable.name }) .from(citiesTable).where(eq(citiesTable.id, 1)), this.db .select({ name: users2Table.name, id: users2Table.id }) .from(users2Table).where(eq(users2Table.id, 1)), this.db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id, 1)), ).orderBy(asc(sql`name`)).run(); }).throw(); } catch (error: any) { console.error(error); throw new Error(`setOperationsUnionAsFunction error`); } } async setOperationsUnionAllFromQueryBuilder(): Promise { try { await this.beforeEach(); await setupSetOperationTest(this.db); const result = this.db .select({ id: citiesTable.id, name: citiesTable.name }) .from(citiesTable) .unionAll(this.db.select({ id: citiesTable.id, name: citiesTable.name }).from(citiesTable)) .orderBy(asc(citiesTable.id)) .limit(5) .offset(1).all(); expect(result).length(5); expect(result).deep.equal([ { id: 1, name: 'New York' }, { id: 2, name: 'London' }, { id: 2, name: 'London' }, { id: 3, name: 'Tampa' }, { id: 3, name: 'Tampa' }, ]); expect(() => { this.db .select({ id: citiesTable.id, name: citiesTable.name }) .from(citiesTable).unionAll( this.db .select({ name: citiesTable.name, id: citiesTable.id }) .from(citiesTable), ).orderBy(asc(citiesTable.id)).limit(5).offset(1).run(); }).throw(); } catch (error: any) { console.error(error); throw new Error(`setOperationsUnionAllFromQueryBuilder error`); } } async setOperationsUnionAllAsFunction(): Promise { try { await this.beforeEach(); await setupSetOperationTest(this.db); const result = unionAll( this.db.select({ id: citiesTable.id, name: citiesTable.name }).from(citiesTable).where(eq(citiesTable.id, 1)), this.db.select({ id: users2Table.id, name: users2Table.name }).from(users2Table).where(eq(users2Table.id, 1)), this.db.select({ id: users2Table.id, name: users2Table.name }).from(users2Table).where(eq(users2Table.id, 1)), ).all(); expect(result).length(3); expect(result).deep.equal([ { id: 1, name: 'New York' }, { id: 1, name: 'John' }, { id: 1, name: 'John' }, ]); expect(() => { unionAll( this.db .select({ id: citiesTable.id, name: citiesTable.name }) .from(citiesTable).where(eq(citiesTable.id, 1)), this.db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id, 1)), this.db .select({ name: users2Table.name, id: users2Table.id }) .from(users2Table).where(eq(users2Table.id, 1)), ).run(); }).throw(); } catch (error: any) { console.error(error); throw new Error(`setOperationsUnionAllAsFunction error`); } } async setOperationsIntersectFromQueryBuilder(): Promise { try { await this.beforeEach(); await setupSetOperationTest(this.db); const result = this.db .select({ id: citiesTable.id, name: citiesTable.name }) .from(citiesTable) .intersect( this.db.select({ id: citiesTable.id, name: citiesTable.name }).from(citiesTable).where(gt(citiesTable.id, 1)), ) .orderBy(asc(sql`name`)).all(); expect(result).length(2); expect(result).deep.equal([ { id: 2, name: 'London' }, { id: 3, name: 'Tampa' }, ]); expect(() => { this.db .select({ name: citiesTable.name, id: citiesTable.id }) .from(citiesTable).intersect( this.db .select({ id: citiesTable.id, name: citiesTable.name }) .from(citiesTable).where(gt(citiesTable.id, 1)), ).orderBy(asc(sql`name`)).run(); }).throw(); } catch (error: any) { console.error(error); throw new Error(`setOperationsIntersectFromQueryBuilder error`); } } async setOperationsIntersectAsFunction(): Promise { try { await this.beforeEach(); await setupSetOperationTest(this.db); const result = intersect( this.db.select({ id: citiesTable.id, name: citiesTable.name }).from(citiesTable).where(eq(citiesTable.id, 1)), this.db.select({ id: users2Table.id, name: users2Table.name }).from(users2Table).where(eq(users2Table.id, 1)), this.db.select({ id: users2Table.id, name: users2Table.name }).from(users2Table).where(eq(users2Table.id, 1)), ).all(); expect(result).length(0); expect(result).deep.equal([]); expect(() => { intersect( this.db .select({ id: citiesTable.id, name: citiesTable.name }) .from(citiesTable).where(eq(citiesTable.id, 1)), this.db .select({ name: users2Table.name, id: users2Table.id }) .from(users2Table).where(eq(users2Table.id, 1)), this.db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id, 1)), ).run(); }).throw(); } catch (error: any) { console.error(error); throw new Error(`setOperationsIntersectAsFunction error`); } } async setOperationsExceptFromQueryBuilder(): Promise { try { await this.beforeEach(); await setupSetOperationTest(this.db); const result = this.db .select() .from(citiesTable) .except(this.db.select().from(citiesTable).where(gt(citiesTable.id, 1))).all(); expect(result).length(1); expect(result).deep.equal([{ id: 1, name: 'New York' }]); expect(() => { this.db .select() .from(citiesTable).except( this.db .select({ name: users2Table.name, id: users2Table.id }) .from(citiesTable).where(gt(citiesTable.id, 1)), ); }).throw(); } catch (error: any) { console.error(error); throw new Error(`setOperationsExceptFromQueryBuilder error`); } } async setOperationsExceptAsFunction(): Promise { try { await this.beforeEach(); await setupSetOperationTest(this.db); const result = except( this.db.select({ id: citiesTable.id, name: citiesTable.name }).from(citiesTable), this.db.select({ id: citiesTable.id, name: citiesTable.name }).from(citiesTable).where(eq(citiesTable.id, 1)), this.db.select({ id: users2Table.id, name: users2Table.name }).from(users2Table).where(eq(users2Table.id, 1)), ).orderBy(asc(sql`id`)).all(); expect(result).length(2); expect(result).deep.equal([ { id: 2, name: 'London' }, { id: 3, name: 'Tampa' }, ]); expect(() => { except( this.db .select({ name: citiesTable.name, id: citiesTable.id }) .from(citiesTable), this.db .select({ id: citiesTable.id, name: citiesTable.name }) .from(citiesTable).where(eq(citiesTable.id, 1)), this.db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id, 1)), ).orderBy(asc(sql`id`)).run(); }).throw(); } catch (error: any) { console.error(error); throw new Error(`setOperationsExceptAsFunction error`); } } async setOperationsMixedFromQueryBuilder(): Promise { try { await this.beforeEach(); await setupSetOperationTest(this.db); const result = this.db .select() .from(citiesTable) .except(({ unionAll }) => unionAll( this.db.select().from(citiesTable).where(gt(citiesTable.id, 1)), this.db.select().from(citiesTable).where(eq(citiesTable.id, 2)), ) ).all(); expect(result).length(2); expect(result).deep.equal([ { id: 1, name: 'New York' }, { id: 2, name: 'London' }, ]); expect(() => { this.db .select() .from(citiesTable).except( ({ unionAll }) => unionAll( this.db .select() .from(citiesTable).where(gt(citiesTable.id, 1)), this.db.select({ name: citiesTable.name, id: citiesTable.id }) .from(citiesTable).where(eq(citiesTable.id, 2)), ), ).run(); }).throw(); } catch (error: any) { console.error(error); throw new Error(`setOperationsMixedFromQueryBuilder error`); } } async setOperationsMixedAllAsFunctionWithSubquery(): Promise { try { await this.beforeEach(); await setupSetOperationTest(this.db); const sq = union( this.db.select({ id: users2Table.id, name: users2Table.name }).from(users2Table).where(eq(users2Table.id, 1)), except( this.db.select({ id: users2Table.id, name: users2Table.name }).from(users2Table).where( gte(users2Table.id, 5), ), this.db.select({ id: users2Table.id, name: users2Table.name }).from(users2Table).where(eq(users2Table.id, 7)), ), this.db.select().from(citiesTable).where(gt(citiesTable.id, 1)), ) .orderBy(asc(sql`id`)) .as('sq'); const result = await this.db.select().from(sq).limit(4).offset(1); expect(result).length(4); expect(result).deep.equal([ { id: 2, name: 'London' }, { id: 3, name: 'Tampa' }, { id: 5, name: 'Ben' }, { id: 6, name: 'Jill' }, ]); expect(() => { union( this.db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id, 1)), except( this.db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(gte(users2Table.id, 5)), this.db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id, 7)), ), this.db .select({ name: users2Table.name, id: users2Table.id }) .from(citiesTable).where(gt(citiesTable.id, 1)), ).orderBy(asc(sql`id`)).run(); }).throw(); } catch (error: any) { console.error(error); throw new Error(`setOperationsMixedAllAsFunctionWithSubquery error`); } } async aggregateFunctionCount(): Promise { try { await this.beforeEach(); const table = aggregateTable; await setupAggregateFunctionsTest(this.db); const result1 = await this.db.select({ value: count() }).from(table); const result2 = await this.db.select({ value: count(table.a) }).from(table); const result3 = await this.db.select({ value: countDistinct(table.name) }).from(table); expect(result1[0]?.value).eq(7); expect(result2[0]?.value).eq(5); expect(result3[0]?.value).eq(6); } catch (error: any) { console.error(error); throw new Error(`aggregateFunctionCount error`); } } async aggregatFunctionAvg(): Promise { try { await this.beforeEach(); const table = aggregateTable; await setupAggregateFunctionsTest(this.db); const result1 = await this.db.select({ value: avg(table.a) }).from(table); const result2 = await this.db.select({ value: avg(table.nullOnly) }).from(table); const result3 = await this.db.select({ value: avgDistinct(table.b) }).from(table); expect(result1[0]?.value).eq('24'); expect(result2[0]?.value).eq(null); expect(result3[0]?.value).eq('42.5'); } catch (error: any) { console.error(error); throw new Error(`aggregatFunctionAvg error`); } } async aggregateFunctionSum(): Promise { try { await this.beforeEach(); const table = aggregateTable; await setupAggregateFunctionsTest(this.db); const result1 = await this.db.select({ value: sum(table.b) }).from(table); const result2 = await this.db.select({ value: sum(table.nullOnly) }).from(table); const result3 = await this.db.select({ value: sumDistinct(table.b) }).from(table); expect(result1[0]?.value).eq('200'); expect(result2[0]?.value).eq(null); expect(result3[0]?.value).eq('170'); } catch (error: any) { console.error(error); throw new Error(`aggregateFunctionSum error`); } } async aggregateFunctionMax(): Promise { try { await this.beforeEach(); const table = aggregateTable; await setupAggregateFunctionsTest(this.db); const result1 = await this.db.select({ value: max(table.b) }).from(table); const result2 = await this.db.select({ value: max(table.nullOnly) }).from(table); expect(result1[0]?.value).eq(90); expect(result2[0]?.value).eq(null); } catch (error: any) { console.error(error); throw new Error(`aggregateFunctionMax error`); } } async aggregateFunctionMin(): Promise { try { await this.beforeEach(); const table = aggregateTable; await setupAggregateFunctionsTest(this.db); const result1 = await this.db.select({ value: min(table.b) }).from(table); const result2 = await this.db.select({ value: min(table.nullOnly) }).from(table); expect(result1[0]?.value).eq(10); expect(result2[0]?.value).eq(null); } catch (error: any) { console.error(error); throw new Error(`aggregateFunctionMin error`); } } async test$onUpdateFnAnd$onUpdateWorksAs$default(): Promise { try { await this.beforeEach(); this.db.run(sql`drop table if exists ${usersOnUpdate}`); this.db.run( sql` create table ${usersOnUpdate} ( id integer primary key autoincrement, name text not null, update_counter integer default 1 not null, updated_at integer, always_null text ) `, ); this.db .insert(usersOnUpdate) .values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jack' }, { name: 'Jill' }]) .run(); const { updatedAt, ...rest } = getTableColumns(usersOnUpdate); const justDates = await this.db.select({ updatedAt }).from(usersOnUpdate).orderBy(asc(usersOnUpdate.id)); const response = await this.db .select({ ...rest }) .from(usersOnUpdate) .orderBy(asc(usersOnUpdate.id)); expect(response).deep.equal([ { name: 'John', id: 1, updateCounter: 1, alwaysNull: null }, { name: 'Jane', id: 2, updateCounter: 1, alwaysNull: null }, { name: 'Jack', id: 3, updateCounter: 1, alwaysNull: null }, { name: 'Jill', id: 4, updateCounter: 1, alwaysNull: null }, ]); const msDelay = 250; for (const eachUser of justDates) { expect(eachUser.updatedAt!.valueOf()).greaterThan(Date.now() - msDelay); } } catch (error: any) { console.error(error); throw new Error(`test$onUpdateFnAnd$onUpdateWorksAs$default error`); } } async test$onUpdateFnAnd$onUpdateWorksUpdating(): Promise { try { await this.beforeEach(); this.db.run(sql`drop table if exists ${usersOnUpdate}`); this.db.run( sql` create table ${usersOnUpdate} ( id integer primary key autoincrement, name text not null, update_counter integer default 1, updated_at integer, always_null text ) `, ); await this.db .insert(usersOnUpdate) .values([{ name: 'John', alwaysNull: 'this will be null after updating' }, { name: 'Jane' }, { name: 'Jack' }, { name: 'Jill', }]); const { updatedAt, ...rest } = getTableColumns(usersOnUpdate); await this.db.update(usersOnUpdate).set({ name: 'Angel' }).where(eq(usersOnUpdate.id, 1)); await this.db.update(usersOnUpdate).set({ updateCounter: null }).where(eq(usersOnUpdate.id, 2)); const justDates = await this.db.select({ updatedAt }).from(usersOnUpdate).orderBy(asc(usersOnUpdate.id)); const response = await this.db .select({ ...rest }) .from(usersOnUpdate) .orderBy(asc(usersOnUpdate.id)); expect(response).deep.equal([ { name: 'Angel', id: 1, updateCounter: 2, alwaysNull: null }, { name: 'Jane', id: 2, updateCounter: null, alwaysNull: null }, { name: 'Jack', id: 3, updateCounter: 1, alwaysNull: null }, { name: 'Jill', id: 4, updateCounter: 1, alwaysNull: null }, ]); const msDelay = 250; for (const eachUser of justDates) { expect(eachUser.updatedAt!.valueOf()).greaterThan(Date.now() - msDelay); } } catch (error: any) { console.error(error); throw new Error(`test$onUpdateFnAnd$onUpdateWorksUpdating error`); } } async $countSeparate(): Promise { try { await this.beforeEach(); const countTestTable = sqliteTable('count_test', { id: int('id').notNull(), name: text('name').notNull(), }); this.db.run(sql`drop table if exists ${countTestTable}`); this.db.run(sql`create table ${countTestTable} (id int, name text)`); await this.db.insert(countTestTable).values([ { id: 1, name: 'First' }, { id: 2, name: 'Second' }, { id: 3, name: 'Third' }, { id: 4, name: 'Fourth' }, ]); const count = await this.db.$count(countTestTable); this.db.run(sql`drop table ${countTestTable}`); expect(count).eq(4); } catch (error: any) { console.error(error); throw new Error(`$countSeparate error`); } } async $countEmbedded(): Promise { try { await this.beforeEach(); const countTestTable = sqliteTable('count_test', { id: int('id').notNull(), name: text('name').notNull(), }); this.db.run(sql`drop table if exists ${countTestTable}`); this.db.run(sql`create table ${countTestTable} (id int, name text)`); await this.db.insert(countTestTable).values([ { id: 1, name: 'First' }, { id: 2, name: 'Second' }, { id: 3, name: 'Third' }, { id: 4, name: 'Fourth' }, ]); const count = await this.db .select({ count: this.db.$count(countTestTable), }) .from(countTestTable); this.db.run(sql`drop table ${countTestTable}`); expect(count).deep.equal([{ count: 4 }, { count: 4 }, { count: 4 }, { count: 4 }]); } catch (error: any) { console.error(error); throw new Error(`$countEmbedded error`); } } async $countSeparateReuse(): Promise { try { await this.beforeEach(); const countTestTable = sqliteTable('count_test', { id: int('id').notNull(), name: text('name').notNull(), }); this.db.run(sql`drop table if exists ${countTestTable}`); this.db.run(sql`create table ${countTestTable} (id int, name text)`); await this.db.insert(countTestTable).values([ { id: 1, name: 'First' }, { id: 2, name: 'Second' }, { id: 3, name: 'Third' }, { id: 4, name: 'Fourth' }, ]); const count = this.db.$count(countTestTable); const count1 = await count; await this.db.insert(countTestTable).values({ id: 5, name: 'fifth' }); const count2 = await count; await this.db.insert(countTestTable).values({ id: 6, name: 'sixth' }); const count3 = await count; this.db.run(sql`drop table ${countTestTable}`); expect(count1).eq(4); expect(count2).eq(5); expect(count3).eq(6); } catch (error: any) { console.error(error); throw new Error(`$countSeparateReuse error`); } } async $countEmbeddedReuse(): Promise { try { await this.beforeEach(); const countTestTable = sqliteTable('count_test', { id: int('id').notNull(), name: text('name').notNull(), }); this.db.run(sql`drop table if exists ${countTestTable}`); this.db.run(sql`create table ${countTestTable} (id int, name text)`); await this.db.insert(countTestTable).values([ { id: 1, name: 'First' }, { id: 2, name: 'Second' }, { id: 3, name: 'Third' }, { id: 4, name: 'Fourth' }, ]); const count = this.db .select({ count: this.db.$count(countTestTable), }) .from(countTestTable); const count1 = await count; await this.db.insert(countTestTable).values({ id: 5, name: 'fifth' }); const count2 = await count; await this.db.insert(countTestTable).values({ id: 6, name: 'sixth' }); const count3 = await count; this.db.run(sql`drop table ${countTestTable}`); expect(count1).deep.equal([{ count: 4 }, { count: 4 }, { count: 4 }, { count: 4 }]); expect(count2).deep.equal([{ count: 5 }, { count: 5 }, { count: 5 }, { count: 5 }, { count: 5 }]); expect(count3).deep.equal([{ count: 6 }, { count: 6 }, { count: 6 }, { count: 6 }, { count: 6 }, { count: 6 }]); } catch (error: any) { console.error(error); throw new Error(`$countEmbeddedReuse error`); } } async $countSeparateWithFilters(): Promise { try { await this.beforeEach(); const countTestTable = sqliteTable('count_test', { id: int('id').notNull(), name: text('name').notNull(), }); this.db.run(sql`drop table if exists ${countTestTable}`); this.db.run(sql`create table ${countTestTable} (id int, name text)`); await this.db.insert(countTestTable).values([ { id: 1, name: 'First' }, { id: 2, name: 'Second' }, { id: 3, name: 'Third' }, { id: 4, name: 'Fourth' }, ]); const count = await this.db.$count(countTestTable, gt(countTestTable.id, 1)); this.db.run(sql`drop table ${countTestTable}`); expect(count).deep.equal(3); } catch (error: any) { console.error(error); throw new Error(`$countSeparateWithFilters error`); } } async $countEmbeddedWithFilters(): Promise { try { await this.beforeEach(); const countTestTable = sqliteTable('count_test', { id: int('id').notNull(), name: text('name').notNull(), }); this.db.run(sql`drop table if exists ${countTestTable}`); this.db.run(sql`create table ${countTestTable} (id int, name text)`); await this.db.insert(countTestTable).values([ { id: 1, name: 'First' }, { id: 2, name: 'Second' }, { id: 3, name: 'Third' }, { id: 4, name: 'Fourth' }, ]); const count = await this.db .select({ count: this.db.$count(countTestTable, gt(countTestTable.id, 1)), }) .from(countTestTable); await this.db.run(sql`drop table ${countTestTable}`); expect(count).deep.equal([{ count: 3 }, { count: 3 }, { count: 3 }, { count: 3 }]); } catch (error: any) { console.error(error); throw new Error(`$countEmbeddedWithFilters error`); } } async updateWithLimitAndOrderBy(): Promise { try { await this.beforeEach(); await this.db.insert(usersTable).values([ { name: 'Barry', verified: false }, { name: 'Alan', verified: false }, { name: 'Carl', verified: false }, ]); await this.db.update(usersTable).set({ verified: true }).limit(2).orderBy(asc(usersTable.name)); const result = await this.db .select({ name: usersTable.name, verified: usersTable.verified }) .from(usersTable) .orderBy(asc(usersTable.name)); expect(result).deep.equal([ { name: 'Alan', verified: true }, { name: 'Barry', verified: true }, { name: 'Carl', verified: false }, ]); } catch (error: any) { console.error(error); throw new Error(`updateWithLimitAndOrderBy error`); } } async deleteWithLimitAndOrderBy(): Promise { try { await this.beforeEach(); await this.db.insert(usersTable).values([ { name: 'Barry', verified: false }, { name: 'Alan', verified: false }, { name: 'Carl', verified: false }, ]); await this.db.delete(usersTable).where(eq(usersTable.verified, false)).limit(1).orderBy(asc(usersTable.name)); const result = await this.db .select({ name: usersTable.name, verified: usersTable.verified }) .from(usersTable) .orderBy(asc(usersTable.name)); expect(result).deep.equal([ { name: 'Barry', verified: false }, { name: 'Carl', verified: false }, ]); } catch (error: any) { console.error(error); throw new Error(`deleteWithLimitAndOrderBy error`); } } } export default { /** * This is the standard fetch handler for a Cloudflare Worker * * @param request - The request submitted to the Worker from the client * @param env - The interface to reference bindings declared in wrangler.toml * @param ctx - The execution context of the Worker * @returns The response to be sent back to the client */ async fetch(request, env): Promise { try { const id: DurableObjectId = env.MY_DURABLE_OBJECT.idFromName('durable-object'); const stub = env.MY_DURABLE_OBJECT.get(id); await stub.migrate1(); await stub.insertBigIntValues(); await stub.selectAllFields(); await stub.selectPartial(); await stub.selectSql(); await stub.selectTypedSql(); await stub.selectWithEmptyArrayInInArray(); await stub.selectWithEmptyArrayInNotInArray(); await stub.selectDistinct(); await stub.returingSql(); await stub.$defaultFunction(); await stub.deleteReturningSql(); await stub.queryCheckInsertSingleEmptyRow(); await stub.queryCheckInsertMultipleEmptyRow(); await stub.insertAllDefaultsIn1Row(); await stub.insertAllDefaultsInMultipleRows(); await stub.updateReturningSql(); await stub.insertWithAutoIncrement(); await stub.insertDataWithDefaultValues(); await stub.insertDataWithOverridenDefaultValues(); await stub.updateWithReturningFields(); await stub.updateWithReturningPartial(); await stub.updateWithReturningAllFields(); await stub.deleteWithReturningPartial(); await stub.insertAndSelect(); await stub.jsonInsert(); await stub.insertMany(); await stub.insertManyWithReturning(); await stub.partialJoinWithAlias(); await stub.fullJoinWithAlias(); await stub.selectFromAlias(); await stub.insertWithSpaces(); await stub.preparedStatement(); await stub.preparedStatementReuse(); await stub.insertPlaceholdersOnColumnsWithEncoder(); await stub.preparedStatementWithPlaceholderInWhere(); await stub.preparedStatementWithPlaceholderInLimit(); await stub.preparedStatementWithPlaceholderInOffset(); await stub.preparedStatementBuiltUsing$dynamic(); await stub.selectWithGroupByAsField(); await stub.selectWithExists(); await stub.selectWithGroupByAsSql(); await stub.selectWithGroupByAsSqlPlusColumn(); await stub.selectWithGroupByAsColumnPlusSql(); await stub.selectWithGroupByComplexQuery(); await stub.buildQuery(); await stub.insertViaDbRunPlusSelectViaDbAll(); await stub.insertViaDbGet(); await stub.insertViaDbRunPlusSelectViaDbGet(); await stub.insertViaDbGetQueryBuilder(); await stub.joinSubquery(); await stub.withSelect(); await stub.withUpdate(); await stub.withInsert(); await stub.withDelete(); await stub.selectFromSubquerySql(); await stub.selectAFieldWithoutJoiningItsTable(); await stub.selectCount(); await stub.having(); await stub.insertNullTimestamp(); await stub.selectFromRawSql(); await stub.selectFromRawSqlWithJoins(); await stub.joinOnAliasedSqlFromSelect(); await stub.joinOnAliasedSqlFromWithClause(); await stub.prefixedTable(); await stub.orderByWithAliasedColumn(); await stub.transaction(); await stub.nestedTransaction(); await stub.joinSubqueryWithJoin(); await stub.joinViewAsSubquery(); await stub.insertWithOnConflictDoNothing(); await stub.insertWithOnConflictDoNothinUsingCompositePk(); await stub.insertWithOnConflictDoNothingUsingTarget(); await stub.insertWithOnConflictDoNothingUsingCompositePkAsTarget(); await stub.insertWithOnConflictDoUpdate(); await stub.insertWithOnConflictDoUpdateWhere(); await stub.insertWithOnConflictDoUpdateUsingCompositePk(); await stub.apiCRUD(); await stub.apiInsertPlusSelectPreparePlusAsyncExecute(); await stub.apiInsertSelectPreparePlusSyncExecute(); await stub.selectPlusGetForEmptyResult(); await stub.setOperationsUnionFromQueryBuilderWithSubquery(); await stub.setOperationsUnionAsFunction(); await stub.setOperationsUnionAllFromQueryBuilder(); await stub.setOperationsUnionAllAsFunction(); await stub.setOperationsIntersectFromQueryBuilder(); await stub.setOperationsIntersectAsFunction(); await stub.setOperationsExceptFromQueryBuilder(); await stub.setOperationsExceptAsFunction(); await stub.setOperationsMixedFromQueryBuilder(); await stub.setOperationsMixedAllAsFunctionWithSubquery(); await stub.aggregateFunctionCount(); await stub.aggregatFunctionAvg(); await stub.aggregateFunctionSum(); await stub.aggregateFunctionMax(); await stub.aggregateFunctionMin(); await stub.test$onUpdateFnAnd$onUpdateWorksAs$default(); await stub.test$onUpdateFnAnd$onUpdateWorksUpdating(); await stub.$countSeparate(); await stub.$countEmbedded(); await stub.$countEmbeddedReuse(); await stub.$countSeparateWithFilters(); await stub.$countEmbeddedWithFilters(); await stub.updateWithLimitAndOrderBy(); await stub.deleteWithLimitAndOrderBy(); await stub.updateUndefined(); await stub.insertUndefined(); return new Response(); } catch (error: any) { return new Response(error.message); } }, } satisfies ExportedHandler; ================================================ FILE: integration-tests/tests/sqlite/durable-objects/worker-configuration.d.ts ================================================ // Generated by Wrangler by running `wrangler types` interface Env { MY_DURABLE_OBJECT: DurableObjectNamespace; } ================================================ FILE: integration-tests/tests/sqlite/durable-objects/wrangler.toml ================================================ #:schema node_modules/wrangler/config-schema.json name = "sqlite-durable-objects" main = "index.ts" compatibility_date = "2024-11-12" compatibility_flags = [ "nodejs_compat" ] # Bind a Durable Object. Durable objects are a scale-to-zero compute primitive based on the actor model. # Durable Objects can live for as long as needed. Use these when you need a long-running "server", such as in realtime apps. # Docs: https://developers.cloudflare.com/workers/wrangler/configuration/#durable-objects [[durable_objects.bindings]] name = "MY_DURABLE_OBJECT" class_name = "MyDurableObject" # Durable Object migrations. # Docs: https://developers.cloudflare.com/workers/wrangler/configuration/#migrations [[migrations]] tag = "v1" new_sqlite_classes = ["MyDurableObject"] [[rules]] type = "Text" globs = ["**/*.sql"] fallthrough = true ================================================ FILE: integration-tests/tests/sqlite/libsql-batch.test.ts ================================================ import { createClient } from '@libsql/client'; import type { Client, ResultSet } from '@libsql/client'; import retry from 'async-retry'; import { eq, relations, sql } from 'drizzle-orm'; import { drizzle, type LibSQLDatabase } from 'drizzle-orm/libsql'; import type { AnySQLiteColumn } from 'drizzle-orm/sqlite-core'; import { integer, primaryKey, sqliteTable, text } from 'drizzle-orm/sqlite-core'; import { afterAll, beforeAll, beforeEach, expect, expectTypeOf, test } from 'vitest'; const ENABLE_LOGGING = false; export const usersTable = sqliteTable('users', { id: integer('id').primaryKey({ autoIncrement: true }), name: text('name').notNull(), verified: integer('verified').notNull().default(0), invitedBy: integer('invited_by').references((): AnySQLiteColumn => usersTable.id), }); export const usersConfig = relations(usersTable, ({ one, many }) => ({ invitee: one(usersTable, { fields: [usersTable.invitedBy], references: [usersTable.id], }), usersToGroups: many(usersToGroupsTable), posts: many(postsTable), })); export const groupsTable = sqliteTable('groups', { id: integer('id').primaryKey({ autoIncrement: true }), name: text('name').notNull(), description: text('description'), }); export const groupsConfig = relations(groupsTable, ({ many }) => ({ usersToGroups: many(usersToGroupsTable), })); export const usersToGroupsTable = sqliteTable( 'users_to_groups', { id: integer('id').primaryKey({ autoIncrement: true }), userId: integer('user_id', { mode: 'number' }).notNull().references( () => usersTable.id, ), groupId: integer('group_id', { mode: 'number' }).notNull().references( () => groupsTable.id, ), }, (t) => ({ pk: primaryKey({ columns: [t.userId, t.groupId] }), }), ); export const usersToGroupsConfig = relations(usersToGroupsTable, ({ one }) => ({ group: one(groupsTable, { fields: [usersToGroupsTable.groupId], references: [groupsTable.id], }), user: one(usersTable, { fields: [usersToGroupsTable.userId], references: [usersTable.id], }), })); export const postsTable = sqliteTable('posts', { id: integer('id').primaryKey({ autoIncrement: true }), content: text('content').notNull(), ownerId: integer('owner_id', { mode: 'number' }).references( () => usersTable.id, ), createdAt: integer('created_at', { mode: 'timestamp_ms' }) .notNull().default(sql`current_timestamp`), }); export const postsConfig = relations(postsTable, ({ one, many }) => ({ author: one(usersTable, { fields: [postsTable.ownerId], references: [usersTable.id], }), comments: many(commentsTable), })); export const commentsTable = sqliteTable('comments', { id: integer('id').primaryKey({ autoIncrement: true }), content: text('content').notNull(), creator: integer('creator', { mode: 'number' }).references( () => usersTable.id, ), postId: integer('post_id', { mode: 'number' }).references(() => postsTable.id), createdAt: integer('created_at', { mode: 'timestamp_ms' }) .notNull().default(sql`current_timestamp`), }); export const commentsConfig = relations(commentsTable, ({ one, many }) => ({ post: one(postsTable, { fields: [commentsTable.postId], references: [postsTable.id], }), author: one(usersTable, { fields: [commentsTable.creator], references: [usersTable.id], }), likes: many(commentLikesTable), })); export const commentLikesTable = sqliteTable('comment_likes', { id: integer('id').primaryKey({ autoIncrement: true }), creator: integer('creator', { mode: 'number' }).references( () => usersTable.id, ), commentId: integer('comment_id', { mode: 'number' }).references( () => commentsTable.id, ), createdAt: integer('created_at', { mode: 'timestamp_ms' }) .notNull().default(sql`current_timestamp`), }); export const commentLikesConfig = relations(commentLikesTable, ({ one }) => ({ comment: one(commentsTable, { fields: [commentLikesTable.commentId], references: [commentsTable.id], }), author: one(usersTable, { fields: [commentLikesTable.creator], references: [usersTable.id], }), })); const schema = { usersTable, postsTable, commentsTable, usersToGroupsTable, groupsTable, commentLikesConfig, commentsConfig, postsConfig, usersToGroupsConfig, groupsConfig, usersConfig, }; let db: LibSQLDatabase; let client: Client; beforeAll(async () => { const url = process.env['LIBSQL_URL']; const authToken = process.env['LIBSQL_AUTH_TOKEN']; if (!url) { throw new Error('LIBSQL_URL is not set'); } client = await retry(async () => { client = createClient({ url, authToken }); return client; }, { retries: 20, factor: 1, minTimeout: 250, maxTimeout: 250, randomize: false, onRetry() { client?.close(); }, }); db = drizzle(client, { schema, logger: ENABLE_LOGGING }); }); afterAll(async () => { // client?.close(); }); beforeEach(async () => { await db.run(sql`drop table if exists \`groups\``); await db.run(sql`drop table if exists \`users\``); await db.run(sql`drop table if exists \`users_to_groups\``); await db.run(sql`drop table if exists \`posts\``); await db.run(sql`drop table if exists \`comments\``); await db.run(sql`drop table if exists \`comment_likes\``); await db.run( sql` CREATE TABLE \`users\` ( \`id\` integer PRIMARY KEY AUTOINCREMENT NOT NULL, \`name\` text NOT NULL, \`verified\` integer DEFAULT 0 NOT NULL, \`invited_by\` integer ); `, ); await db.run( sql` CREATE TABLE \`groups\` ( \`id\` integer PRIMARY KEY AUTOINCREMENT NOT NULL, \`name\` text NOT NULL, \`description\` text ); `, ); await db.run( sql` CREATE TABLE \`users_to_groups\` ( \`id\` integer PRIMARY KEY AUTOINCREMENT NOT NULL, \`user_id\` integer NOT NULL, \`group_id\` integer NOT NULL ); `, ); await db.run( sql` CREATE TABLE \`posts\` ( \`id\` integer PRIMARY KEY AUTOINCREMENT NOT NULL, \`content\` text NOT NULL, \`owner_id\` integer, \`created_at\` integer DEFAULT current_timestamp NOT NULL ); `, ); await db.run( sql` CREATE TABLE \`comments\` ( \`id\` integer PRIMARY KEY AUTOINCREMENT NOT NULL, \`content\` text NOT NULL, \`creator\` integer, \`post_id\` integer, \`created_at\` integer DEFAULT current_timestamp NOT NULL ); `, ); await db.run( sql` CREATE TABLE \`comment_likes\` ( \`id\` integer PRIMARY KEY AUTOINCREMENT NOT NULL, \`creator\` integer, \`comment_id\` integer, \`created_at\` integer DEFAULT current_timestamp NOT NULL ); `, ); }); afterAll(async () => { await db.run(sql`drop table if exists \`groups\``); await db.run(sql`drop table if exists \`users\``); await db.run(sql`drop table if exists \`users_to_groups\``); await db.run(sql`drop table if exists \`posts\``); await db.run(sql`drop table if exists \`comments\``); await db.run(sql`drop table if exists \`comment_likes\``); client.close(); }); test('batch api example', async () => { const batchResponse = await db.batch([ db.insert(usersTable).values({ id: 1, name: 'John' }).returning({ id: usersTable.id, invitedBy: usersTable.invitedBy, }), db.insert(usersTable).values({ id: 2, name: 'Dan' }), db.select().from(usersTable), ]); expectTypeOf(batchResponse).toEqualTypeOf<[ { id: number; invitedBy: number | null; }[], ResultSet, { id: number; name: string; verified: number; invitedBy: number | null; }[], ]>(); expect(batchResponse.length).eq(3); expect(batchResponse[0]).toEqual([{ id: 1, invitedBy: null, }]); expect(batchResponse[1]).toEqual({ columnTypes: [], columns: [], rows: [], rowsAffected: 1, lastInsertRowid: 2n }); expect(batchResponse[2]).toEqual([ { id: 1, name: 'John', verified: 0, invitedBy: null }, { id: 2, name: 'Dan', verified: 0, invitedBy: null }, ]); }); // batch api only relational many test('insert + findMany', async () => { const batchResponse = await db.batch([ db.insert(usersTable).values({ id: 1, name: 'John' }).returning({ id: usersTable.id }), db.insert(usersTable).values({ id: 2, name: 'Dan' }), db.query.usersTable.findMany({}), ]); expectTypeOf(batchResponse).toEqualTypeOf<[ { id: number; }[], ResultSet, { id: number; name: string; verified: number; invitedBy: number | null; }[], ]>(); expect(batchResponse.length).eq(3); expect(batchResponse[0]).toEqual([{ id: 1, }]); expect(batchResponse[1]).toEqual({ columnTypes: [], columns: [], rows: [], rowsAffected: 1, lastInsertRowid: 2n }); expect(batchResponse[2]).toEqual([ { id: 1, name: 'John', verified: 0, invitedBy: null }, { id: 2, name: 'Dan', verified: 0, invitedBy: null }, ]); }); // batch api relational many + one test('insert + findMany + findFirst', async () => { const batchResponse = await db.batch([ db.insert(usersTable).values({ id: 1, name: 'John' }).returning({ id: usersTable.id }), db.insert(usersTable).values({ id: 2, name: 'Dan' }), db.query.usersTable.findMany({}), db.query.usersTable.findFirst({}), ]); expectTypeOf(batchResponse).toEqualTypeOf<[ { id: number; }[], ResultSet, { id: number; name: string; verified: number; invitedBy: number | null; }[], { id: number; name: string; verified: number; invitedBy: number | null; } | undefined, ]>(); expect(batchResponse.length).eq(4); expect(batchResponse[0]).toEqual([{ id: 1, }]); expect(batchResponse[1]).toEqual({ columnTypes: [], columns: [], rows: [], rowsAffected: 1, lastInsertRowid: 2n }); expect(batchResponse[2]).toEqual([ { id: 1, name: 'John', verified: 0, invitedBy: null }, { id: 2, name: 'Dan', verified: 0, invitedBy: null }, ]); expect(batchResponse[3]).toEqual( { id: 1, name: 'John', verified: 0, invitedBy: null }, ); }); test('insert + db.all + db.get + db.values + db.run', async () => { const batchResponse = await db.batch([ db.insert(usersTable).values({ id: 1, name: 'John' }).returning({ id: usersTable.id }), db.run(sql`insert into users (id, name) values (2, 'Dan')`), db.all(sql`select * from users`), db.values(sql`select * from users`), db.get(sql`select * from users`), ]); expectTypeOf(batchResponse).toEqualTypeOf<[ { id: number; }[], ResultSet, { id: number; name: string; verified: number; invitedBy: number | null; }[], unknown[][], { id: number; name: string; verified: number; invitedBy: number | null; }, ]>(); expect(batchResponse.length).eq(5); expect(batchResponse[0]).toEqual([{ id: 1, }]); expect(batchResponse[1]).toEqual({ columnTypes: [], columns: [], rows: [], rowsAffected: 1, lastInsertRowid: 2n }); expect(batchResponse[2]).toEqual([ { id: 1, name: 'John', verified: 0, invited_by: null }, { id: 2, name: 'Dan', verified: 0, invited_by: null }, ]); expect(batchResponse[3].map((row) => Array.prototype.slice.call(row))).toEqual([ [1, 'John', 0, null], [2, 'Dan', 0, null], ]); expect(batchResponse[4]).toEqual( { id: 1, name: 'John', verified: 0, invited_by: null }, ); }); // batch api combined rqb + raw call test('insert + findManyWith + db.all', async () => { const batchResponse = await db.batch([ db.insert(usersTable).values({ id: 1, name: 'John' }).returning({ id: usersTable.id }), db.insert(usersTable).values({ id: 2, name: 'Dan' }), db.query.usersTable.findMany({}), db.all(sql`select * from users`), ]); expectTypeOf(batchResponse).toEqualTypeOf<[ { id: number; }[], ResultSet, { id: number; name: string; verified: number; invitedBy: number | null; }[], { id: number; name: string; verified: number; invitedBy: number | null; }[], ]>(); expect(batchResponse.length).eq(4); expect(batchResponse[0]).toEqual([{ id: 1, }]); expect(batchResponse[1]).toEqual({ columnTypes: [], columns: [], rows: [], rowsAffected: 1, lastInsertRowid: 2n }); expect(batchResponse[2]).toEqual([ { id: 1, name: 'John', verified: 0, invitedBy: null }, { id: 2, name: 'Dan', verified: 0, invitedBy: null }, ]); expect(batchResponse[3]).toEqual([ { id: 1, name: 'John', verified: 0, invited_by: null }, { id: 2, name: 'Dan', verified: 0, invited_by: null }, ]); }); // batch api for insert + update + select test('insert + update + select + select partial', async () => { const batchResponse = await db.batch([ db.insert(usersTable).values({ id: 1, name: 'John' }).returning({ id: usersTable.id }), db.update(usersTable).set({ name: 'Dan' }).where(eq(usersTable.id, 1)), db.query.usersTable.findMany({}), db.select().from(usersTable).where(eq(usersTable.id, 1)), db.select({ id: usersTable.id, invitedBy: usersTable.invitedBy }).from(usersTable), ]); expectTypeOf(batchResponse).toEqualTypeOf<[ { id: number; }[], ResultSet, { id: number; name: string; verified: number; invitedBy: number | null; }[], { id: number; name: string; verified: number; invitedBy: number | null; }[], { id: number; invitedBy: number | null; }[], ]>(); expect(batchResponse.length).eq(5); expect(batchResponse[0]).toEqual([{ id: 1, }]); expect(batchResponse[1]).toEqual({ columnTypes: [], columns: [], rows: [], rowsAffected: 1, lastInsertRowid: 1n }); expect(batchResponse[2]).toEqual([ { id: 1, name: 'Dan', verified: 0, invitedBy: null }, ]); expect(batchResponse[3]).toEqual([ { id: 1, name: 'Dan', verified: 0, invitedBy: null }, ]); expect(batchResponse[4]).toEqual([ { id: 1, invitedBy: null }, ]); }); // batch api for insert + delete + select test('insert + delete + select + select partial', async () => { const batchResponse = await db.batch([ db.insert(usersTable).values({ id: 1, name: 'John' }).returning({ id: usersTable.id }), db.insert(usersTable).values({ id: 2, name: 'Dan' }), db.delete(usersTable).where(eq(usersTable.id, 1)).returning({ id: usersTable.id, invitedBy: usersTable.invitedBy }), db.query.usersTable.findFirst({ columns: { id: true, invitedBy: true, }, }), ]); expectTypeOf(batchResponse).toEqualTypeOf<[ { id: number; }[], ResultSet, { id: number; invitedBy: number | null; }[], { id: number; invitedBy: number | null; } | undefined, ]>(); expect(batchResponse.length).eq(4); expect(batchResponse[0]).toEqual([{ id: 1, }]); expect(batchResponse[1]).toEqual({ columnTypes: [], columns: [], rows: [], rowsAffected: 1, lastInsertRowid: 2n }); expect(batchResponse[2]).toEqual([ { id: 1, invitedBy: null }, ]); expect(batchResponse[3]).toEqual( { id: 2, invitedBy: null }, ); }); ================================================ FILE: integration-tests/tests/sqlite/libsql-http.test.ts ================================================ import { type Client, createClient } from '@libsql/client/http'; import retry from 'async-retry'; import { asc, eq, getTableColumns, sql } from 'drizzle-orm'; import type { LibSQLDatabase } from 'drizzle-orm/libsql'; import { drizzle } from 'drizzle-orm/libsql/http'; import { migrate } from 'drizzle-orm/libsql/migrator'; import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; import { skipTests } from '~/common'; import { randomString } from '~/utils'; import { anotherUsersMigratorTable, tests, usersMigratorTable, usersOnUpdate } from './sqlite-common'; const ENABLE_LOGGING = false; let db: LibSQLDatabase; let client: Client; beforeAll(async () => { const url = process.env['LIBSQL_REMOTE_URL']; const authToken = process.env['LIBSQL_REMOTE_TOKEN']; if (!url) { throw new Error('LIBSQL_REMOTE_URL is not set'); } client = await retry(async () => { client = createClient({ url, authToken }); return client; }, { retries: 20, factor: 1, minTimeout: 250, maxTimeout: 250, randomize: false, onRetry() { client?.close(); }, }); db = drizzle(client, { logger: ENABLE_LOGGING }); }); afterAll(async () => { client?.close(); }); beforeEach((ctx) => { ctx.sqlite = { db, }; }); test('migrator', async () => { await db.run(sql`drop table if exists another_users`); await db.run(sql`drop table if exists users12`); await db.run(sql`drop table if exists __drizzle_migrations`); await migrate(db, { migrationsFolder: './drizzle2/sqlite' }); await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }).run(); const result = await db.select().from(usersMigratorTable).all(); await db.insert(anotherUsersMigratorTable).values({ name: 'John', email: 'email' }).run(); const result2 = await db.select().from(anotherUsersMigratorTable).all(); expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); expect(result2).toEqual([{ id: 1, name: 'John', email: 'email' }]); await db.run(sql`drop table another_users`); await db.run(sql`drop table users12`); await db.run(sql`drop table __drizzle_migrations`); }); test('migrator : migrate with custom table', async () => { const customTable = randomString(); await db.run(sql`drop table if exists another_users`); await db.run(sql`drop table if exists users12`); await db.run(sql`drop table if exists ${sql.identifier(customTable)}`); await migrate(db, { migrationsFolder: './drizzle2/sqlite', migrationsTable: customTable }); // test if the custom migrations table was created const res = await db.all(sql`select * from ${sql.identifier(customTable)};`); expect(res.length > 0).toBeTruthy(); // test if the migrated table are working as expected await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); const result = await db.select().from(usersMigratorTable); expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); await db.run(sql`drop table another_users`); await db.run(sql`drop table users12`); await db.run(sql`drop table ${sql.identifier(customTable)}`); }); test('test $onUpdateFn and $onUpdate works as $default', async (ctx) => { const { db } = ctx.sqlite; await db.run(sql`drop table if exists ${usersOnUpdate}`); await db.run( sql` create table ${usersOnUpdate} ( id integer primary key autoincrement, name text not null, update_counter integer default 1 not null, updated_at integer, always_null text ) `, ); await db.insert(usersOnUpdate).values([ { name: 'John' }, { name: 'Jane' }, { name: 'Jack' }, { name: 'Jill' }, ]); const { updatedAt, ...rest } = getTableColumns(usersOnUpdate); const justDates = await db.select({ updatedAt }).from(usersOnUpdate).orderBy(asc(usersOnUpdate.id)); const response = await db.select({ ...rest }).from(usersOnUpdate).orderBy(asc(usersOnUpdate.id)); expect(response).toEqual([ { name: 'John', id: 1, updateCounter: 1, alwaysNull: null }, { name: 'Jane', id: 2, updateCounter: 1, alwaysNull: null }, { name: 'Jack', id: 3, updateCounter: 1, alwaysNull: null }, { name: 'Jill', id: 4, updateCounter: 1, alwaysNull: null }, ]); const msDelay = 1750; for (const eachUser of justDates) { expect(eachUser.updatedAt!.valueOf()).toBeGreaterThan(Date.now() - msDelay); } }); test('test $onUpdateFn and $onUpdate works updating', async (ctx) => { const { db } = ctx.sqlite; await db.run(sql`drop table if exists ${usersOnUpdate}`); await db.run( sql` create table ${usersOnUpdate} ( id integer primary key autoincrement, name text not null, update_counter integer default 1, updated_at integer, always_null text ) `, ); await db.insert(usersOnUpdate).values([ { name: 'John', alwaysNull: 'this will be null after updating' }, { name: 'Jane' }, { name: 'Jack' }, { name: 'Jill' }, ]); const { updatedAt, ...rest } = getTableColumns(usersOnUpdate); await db.update(usersOnUpdate).set({ name: 'Angel' }).where(eq(usersOnUpdate.id, 1)); await db.update(usersOnUpdate).set({ updateCounter: null }).where(eq(usersOnUpdate.id, 2)); const justDates = await db.select({ updatedAt }).from(usersOnUpdate).orderBy(asc(usersOnUpdate.id)); const response = await db.select({ ...rest }).from(usersOnUpdate).orderBy(asc(usersOnUpdate.id)); expect(response).toEqual([ { name: 'Angel', id: 1, updateCounter: 2, alwaysNull: null }, { name: 'Jane', id: 2, updateCounter: null, alwaysNull: null }, { name: 'Jack', id: 3, updateCounter: 1, alwaysNull: null }, { name: 'Jill', id: 4, updateCounter: 1, alwaysNull: null }, ]); const msDelay = 1750; for (const eachUser of justDates) { expect(eachUser.updatedAt!.valueOf()).toBeGreaterThan(Date.now() - msDelay); } }); skipTests([ 'delete with limit and order by', 'update with limit and order by', 'test $onUpdateFn and $onUpdate works as $default', 'test $onUpdateFn and $onUpdate works updating', ]); tests(); ================================================ FILE: integration-tests/tests/sqlite/libsql-node.test.ts ================================================ import { type Client, createClient } from '@libsql/client/node'; import retry from 'async-retry'; import { sql } from 'drizzle-orm'; import type { LibSQLDatabase } from 'drizzle-orm/libsql'; import { migrate } from 'drizzle-orm/libsql/migrator'; import { drizzle } from 'drizzle-orm/libsql/node'; import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; import { skipTests } from '~/common'; import { randomString } from '~/utils'; import { anotherUsersMigratorTable, tests, usersMigratorTable } from './sqlite-common'; const ENABLE_LOGGING = false; let db: LibSQLDatabase; let client: Client; beforeAll(async () => { const url = process.env['LIBSQL_URL']; const authToken = process.env['LIBSQL_AUTH_TOKEN']; if (!url) { throw new Error('LIBSQL_URL is not set'); } client = await retry(async () => { client = createClient({ url, authToken }); return client; }, { retries: 20, factor: 1, minTimeout: 250, maxTimeout: 250, randomize: false, onRetry() { client?.close(); }, }); db = drizzle(client, { logger: ENABLE_LOGGING }); }); afterAll(async () => { client?.close(); }); beforeEach((ctx) => { ctx.sqlite = { db, }; }); test('migrator', async () => { await db.run(sql`drop table if exists another_users`); await db.run(sql`drop table if exists users12`); await db.run(sql`drop table if exists __drizzle_migrations`); await migrate(db, { migrationsFolder: './drizzle2/sqlite' }); await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }).run(); const result = await db.select().from(usersMigratorTable).all(); await db.insert(anotherUsersMigratorTable).values({ name: 'John', email: 'email' }).run(); const result2 = await db.select().from(anotherUsersMigratorTable).all(); expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); expect(result2).toEqual([{ id: 1, name: 'John', email: 'email' }]); await db.run(sql`drop table another_users`); await db.run(sql`drop table users12`); await db.run(sql`drop table __drizzle_migrations`); }); test('migrator : migrate with custom table', async () => { const customTable = randomString(); await db.run(sql`drop table if exists another_users`); await db.run(sql`drop table if exists users12`); await db.run(sql`drop table if exists ${sql.identifier(customTable)}`); await migrate(db, { migrationsFolder: './drizzle2/sqlite', migrationsTable: customTable }); // test if the custom migrations table was created const res = await db.all(sql`select * from ${sql.identifier(customTable)};`); expect(res.length > 0).toBeTruthy(); // test if the migrated table are working as expected await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); const result = await db.select().from(usersMigratorTable); expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); await db.run(sql`drop table another_users`); await db.run(sql`drop table users12`); await db.run(sql`drop table ${sql.identifier(customTable)}`); }); skipTests([ 'delete with limit and order by', 'update with limit and order by', ]); tests(); ================================================ FILE: integration-tests/tests/sqlite/libsql-sqlite3.test.ts ================================================ import { type Client, createClient } from '@libsql/client/sqlite3'; import retry from 'async-retry'; import { sql } from 'drizzle-orm'; import type { LibSQLDatabase } from 'drizzle-orm/libsql'; import { migrate } from 'drizzle-orm/libsql/migrator'; import { drizzle } from 'drizzle-orm/libsql/sqlite3'; import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; import { skipTests } from '~/common'; import { randomString } from '~/utils'; import { anotherUsersMigratorTable, tests, usersMigratorTable } from './sqlite-common'; const ENABLE_LOGGING = false; let db: LibSQLDatabase; let client: Client; beforeAll(async () => { const url = ':memory:'; client = await retry(async () => { client = createClient({ url }); return client; }, { retries: 20, factor: 1, minTimeout: 250, maxTimeout: 250, randomize: false, onRetry() { client?.close(); }, }); db = drizzle(client, { logger: ENABLE_LOGGING }); }); afterAll(async () => { client?.close(); }); beforeEach((ctx) => { ctx.sqlite = { db, }; }); test('migrator', async () => { await db.run(sql`drop table if exists another_users`); await db.run(sql`drop table if exists users12`); await db.run(sql`drop table if exists __drizzle_migrations`); await migrate(db, { migrationsFolder: './drizzle2/sqlite' }); await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }).run(); const result = await db.select().from(usersMigratorTable).all(); await db.insert(anotherUsersMigratorTable).values({ name: 'John', email: 'email' }).run(); const result2 = await db.select().from(anotherUsersMigratorTable).all(); expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); expect(result2).toEqual([{ id: 1, name: 'John', email: 'email' }]); await db.run(sql`drop table another_users`); await db.run(sql`drop table users12`); await db.run(sql`drop table __drizzle_migrations`); }); test('migrator : migrate with custom table', async () => { const customTable = randomString(); await db.run(sql`drop table if exists another_users`); await db.run(sql`drop table if exists users12`); await db.run(sql`drop table if exists ${sql.identifier(customTable)}`); await migrate(db, { migrationsFolder: './drizzle2/sqlite', migrationsTable: customTable }); // test if the custom migrations table was created const res = await db.all(sql`select * from ${sql.identifier(customTable)};`); expect(res.length > 0).toBeTruthy(); // test if the migrated table are working as expected await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); const result = await db.select().from(usersMigratorTable); expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); await db.run(sql`drop table another_users`); await db.run(sql`drop table users12`); await db.run(sql`drop table ${sql.identifier(customTable)}`); }); skipTests([ 'delete with limit and order by', 'update with limit and order by', 'transaction', 'transaction rollback', 'nested transaction', 'nested transaction rollback', ]); tests(); ================================================ FILE: integration-tests/tests/sqlite/libsql-ws.test.ts ================================================ import { type Client, createClient } from '@libsql/client/ws'; import retry from 'async-retry'; import { asc, eq, getTableColumns, sql } from 'drizzle-orm'; import type { LibSQLDatabase } from 'drizzle-orm/libsql'; import { migrate } from 'drizzle-orm/libsql/migrator'; import { drizzle } from 'drizzle-orm/libsql/ws'; import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; import { skipTests } from '~/common'; import { randomString } from '~/utils'; import { anotherUsersMigratorTable, tests, usersMigratorTable, usersOnUpdate } from './sqlite-common'; const ENABLE_LOGGING = false; let db: LibSQLDatabase; let client: Client; beforeAll(async () => { const url = process.env['LIBSQL_REMOTE_URL']; const authToken = process.env['LIBSQL_REMOTE_TOKEN']; if (!url) { throw new Error('LIBSQL_REMOTE_URL is not set'); } client = await retry(async () => { client = createClient({ url, authToken }); return client; }, { retries: 20, factor: 1, minTimeout: 250, maxTimeout: 250, randomize: false, onRetry() { client?.close(); }, }); db = drizzle(client, { logger: ENABLE_LOGGING }); }); afterAll(async () => { client?.close(); }); beforeEach((ctx) => { ctx.sqlite = { db, }; }); test('migrator', async () => { await db.run(sql`drop table if exists another_users`); await db.run(sql`drop table if exists users12`); await db.run(sql`drop table if exists __drizzle_migrations`); await migrate(db, { migrationsFolder: './drizzle2/sqlite' }); await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }).run(); const result = await db.select().from(usersMigratorTable).all(); await db.insert(anotherUsersMigratorTable).values({ name: 'John', email: 'email' }).run(); const result2 = await db.select().from(anotherUsersMigratorTable).all(); expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); expect(result2).toEqual([{ id: 1, name: 'John', email: 'email' }]); await db.run(sql`drop table another_users`); await db.run(sql`drop table users12`); await db.run(sql`drop table __drizzle_migrations`); }); test('migrator : migrate with custom table', async () => { const customTable = randomString(); await db.run(sql`drop table if exists another_users`); await db.run(sql`drop table if exists users12`); await db.run(sql`drop table if exists ${sql.identifier(customTable)}`); await migrate(db, { migrationsFolder: './drizzle2/sqlite', migrationsTable: customTable }); // test if the custom migrations table was created const res = await db.all(sql`select * from ${sql.identifier(customTable)};`); expect(res.length > 0).toBeTruthy(); // test if the migrated table are working as expected await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); const result = await db.select().from(usersMigratorTable); expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); await db.run(sql`drop table another_users`); await db.run(sql`drop table users12`); await db.run(sql`drop table ${sql.identifier(customTable)}`); }); test('test $onUpdateFn and $onUpdate works as $default', async (ctx) => { const { db } = ctx.sqlite; await db.run(sql`drop table if exists ${usersOnUpdate}`); await db.run( sql` create table ${usersOnUpdate} ( id integer primary key autoincrement, name text not null, update_counter integer default 1 not null, updated_at integer, always_null text ) `, ); await db.insert(usersOnUpdate).values([ { name: 'John' }, { name: 'Jane' }, { name: 'Jack' }, { name: 'Jill' }, ]); const { updatedAt, ...rest } = getTableColumns(usersOnUpdate); const justDates = await db.select({ updatedAt }).from(usersOnUpdate).orderBy(asc(usersOnUpdate.id)); const response = await db.select({ ...rest }).from(usersOnUpdate).orderBy(asc(usersOnUpdate.id)); expect(response).toEqual([ { name: 'John', id: 1, updateCounter: 1, alwaysNull: null }, { name: 'Jane', id: 2, updateCounter: 1, alwaysNull: null }, { name: 'Jack', id: 3, updateCounter: 1, alwaysNull: null }, { name: 'Jill', id: 4, updateCounter: 1, alwaysNull: null }, ]); const msDelay = 1250; for (const eachUser of justDates) { expect(eachUser.updatedAt!.valueOf()).toBeGreaterThan(Date.now() - msDelay); } }); test('test $onUpdateFn and $onUpdate works updating', async (ctx) => { const { db } = ctx.sqlite; await db.run(sql`drop table if exists ${usersOnUpdate}`); await db.run( sql` create table ${usersOnUpdate} ( id integer primary key autoincrement, name text not null, update_counter integer default 1, updated_at integer, always_null text ) `, ); await db.insert(usersOnUpdate).values([ { name: 'John', alwaysNull: 'this will be null after updating' }, { name: 'Jane' }, { name: 'Jack' }, { name: 'Jill' }, ]); const { updatedAt, ...rest } = getTableColumns(usersOnUpdate); await db.update(usersOnUpdate).set({ name: 'Angel' }).where(eq(usersOnUpdate.id, 1)); await db.update(usersOnUpdate).set({ updateCounter: null }).where(eq(usersOnUpdate.id, 2)); const justDates = await db.select({ updatedAt }).from(usersOnUpdate).orderBy(asc(usersOnUpdate.id)); const response = await db.select({ ...rest }).from(usersOnUpdate).orderBy(asc(usersOnUpdate.id)); expect(response).toEqual([ { name: 'Angel', id: 1, updateCounter: 2, alwaysNull: null }, { name: 'Jane', id: 2, updateCounter: null, alwaysNull: null }, { name: 'Jack', id: 3, updateCounter: 1, alwaysNull: null }, { name: 'Jill', id: 4, updateCounter: 1, alwaysNull: null }, ]); const msDelay = 1250; for (const eachUser of justDates) { expect(eachUser.updatedAt!.valueOf()).toBeGreaterThan(Date.now() - msDelay); } }); skipTests([ 'delete with limit and order by', 'update with limit and order by', 'join view as subquery', 'test $onUpdateFn and $onUpdate works as $default', 'test $onUpdateFn and $onUpdate works updating', 'prepared statement reuse', ]); tests(); ================================================ FILE: integration-tests/tests/sqlite/libsql.test.ts ================================================ import { type Client, createClient } from '@libsql/client'; import retry from 'async-retry'; import { sql } from 'drizzle-orm'; import { drizzle, type LibSQLDatabase } from 'drizzle-orm/libsql'; import { migrate } from 'drizzle-orm/libsql/migrator'; import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; import { skipTests } from '~/common'; import { randomString } from '~/utils'; import { anotherUsersMigratorTable, tests, usersMigratorTable } from './sqlite-common'; import { TestCache, TestGlobalCache, tests as cacheTests } from './sqlite-common-cache'; const ENABLE_LOGGING = false; let db: LibSQLDatabase; let dbGlobalCached: LibSQLDatabase; let cachedDb: LibSQLDatabase; let client: Client; beforeAll(async () => { const url = process.env['LIBSQL_URL']; const authToken = process.env['LIBSQL_AUTH_TOKEN']; if (!url) { throw new Error('LIBSQL_URL is not set'); } client = await retry(async () => { client = createClient({ url, authToken }); return client; }, { retries: 20, factor: 1, minTimeout: 250, maxTimeout: 250, randomize: false, onRetry() { client?.close(); }, }); db = drizzle(client, { logger: ENABLE_LOGGING }); cachedDb = drizzle(client, { logger: ENABLE_LOGGING, cache: new TestCache() }); dbGlobalCached = drizzle(client, { logger: ENABLE_LOGGING, cache: new TestGlobalCache() }); }); afterAll(async () => { client?.close(); }); beforeEach((ctx) => { ctx.sqlite = { db, }; ctx.cachedSqlite = { db: cachedDb, dbGlobalCached, }; }); test('migrator', async () => { await db.run(sql`drop table if exists another_users`); await db.run(sql`drop table if exists users12`); await db.run(sql`drop table if exists __drizzle_migrations`); await migrate(db, { migrationsFolder: './drizzle2/sqlite' }); await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }).run(); const result = await db.select().from(usersMigratorTable).all(); await db.insert(anotherUsersMigratorTable).values({ name: 'John', email: 'email' }).run(); const result2 = await db.select().from(anotherUsersMigratorTable).all(); expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); expect(result2).toEqual([{ id: 1, name: 'John', email: 'email' }]); await db.run(sql`drop table another_users`); await db.run(sql`drop table users12`); await db.run(sql`drop table __drizzle_migrations`); }); test('migrator : migrate with custom table', async () => { const customTable = randomString(); await db.run(sql`drop table if exists another_users`); await db.run(sql`drop table if exists users12`); await db.run(sql`drop table if exists ${sql.identifier(customTable)}`); await migrate(db, { migrationsFolder: './drizzle2/sqlite', migrationsTable: customTable }); // test if the custom migrations table was created const res = await db.all(sql`select * from ${sql.identifier(customTable)};`); expect(res.length > 0).toBeTruthy(); // test if the migrated table are working as expected await db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }); const result = await db.select().from(usersMigratorTable); expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); await db.run(sql`drop table another_users`); await db.run(sql`drop table users12`); await db.run(sql`drop table ${sql.identifier(customTable)}`); }); skipTests([ 'delete with limit and order by', 'update with limit and order by', ]); cacheTests(); tests(); ================================================ FILE: integration-tests/tests/sqlite/sql-js.test.ts ================================================ import { sql } from 'drizzle-orm'; import type { SQLJsDatabase } from 'drizzle-orm/sql-js'; import { drizzle } from 'drizzle-orm/sql-js'; import { migrate } from 'drizzle-orm/sql-js/migrator'; import type { Database } from 'sql.js'; import initSqlJs from 'sql.js'; import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; import { skipTests } from '~/common'; import { anotherUsersMigratorTable, tests, usersMigratorTable } from './sqlite-common'; const ENABLE_LOGGING = false; let db: SQLJsDatabase; let client: Database; beforeAll(async () => { const SQL = await initSqlJs(); client = new SQL.Database(); db = drizzle(client, { logger: ENABLE_LOGGING }); }); beforeEach((ctx) => { ctx.sqlite = { db, }; }); afterAll(async () => { client?.close(); }); test('migrator', async () => { db.run(sql`drop table if exists another_users`); db.run(sql`drop table if exists users12`); db.run(sql`drop table if exists __drizzle_migrations`); migrate(db, { migrationsFolder: './drizzle2/sqlite' }); db.insert(usersMigratorTable).values({ name: 'John', email: 'email' }).run(); const result = db.select().from(usersMigratorTable).all(); db.insert(anotherUsersMigratorTable).values({ name: 'John', email: 'email' }).run(); const result2 = db.select().from(anotherUsersMigratorTable).all(); expect(result).toEqual([{ id: 1, name: 'John', email: 'email' }]); expect(result2).toEqual([{ id: 1, name: 'John', email: 'email' }]); db.run(sql`drop table another_users`); db.run(sql`drop table users12`); db.run(sql`drop table __drizzle_migrations`); }); skipTests([ /** * doesn't work properly: * Expect: should rollback transaction and don't insert/ update data * Received: data inserted/ updated */ 'transaction rollback', 'nested transaction rollback', 'delete with limit and order by', 'update with limit and order by', ]); tests(); ================================================ FILE: integration-tests/tests/sqlite/sqlite-common-cache.ts ================================================ import { eq, getTableName, is, sql, Table } from 'drizzle-orm'; import type { MutationOption } from 'drizzle-orm/cache/core'; import { Cache } from 'drizzle-orm/cache/core'; import type { CacheConfig } from 'drizzle-orm/cache/core/types'; import { alias, type BaseSQLiteDatabase, integer, sqliteTable, text } from 'drizzle-orm/sqlite-core'; import Keyv from 'keyv'; import { beforeEach, describe, expect, test, vi } from 'vitest'; // eslint-disable-next-line drizzle-internal/require-entity-kind export class TestGlobalCache extends Cache { private globalTtl: number = 1000; private usedTablesPerKey: Record = {}; constructor(private kv: Keyv = new Keyv()) { super(); } override strategy(): 'explicit' | 'all' { return 'all'; } override async get(key: string, _tables: string[], _isTag: boolean): Promise { const res = await this.kv.get(key) ?? undefined; return res; } override async put( key: string, response: any, tables: string[], isTag: boolean, config?: CacheConfig, ): Promise { await this.kv.set(key, response, config ? config.ex : this.globalTtl); for (const table of tables) { const keys = this.usedTablesPerKey[table]; if (keys === undefined) { this.usedTablesPerKey[table] = [key]; } else { keys.push(key); } } } override async onMutate(params: MutationOption): Promise { const tagsArray = params.tags ? Array.isArray(params.tags) ? params.tags : [params.tags] : []; const tablesArray = params.tables ? Array.isArray(params.tables) ? params.tables : [params.tables] : []; const keysToDelete = new Set(); for (const table of tablesArray) { const tableName = is(table, Table) ? getTableName(table) : table as string; const keys = this.usedTablesPerKey[tableName] ?? []; for (const key of keys) keysToDelete.add(key); } if (keysToDelete.size > 0 || tagsArray.length > 0) { for (const tag of tagsArray) { await this.kv.delete(tag); } for (const key of keysToDelete) { await this.kv.delete(key); for (const table of tablesArray) { const tableName = is(table, Table) ? getTableName(table) : table as string; this.usedTablesPerKey[tableName] = []; } } } } } // eslint-disable-next-line drizzle-internal/require-entity-kind export class TestCache extends TestGlobalCache { override strategy(): 'explicit' | 'all' { return 'explicit'; } } declare module 'vitest' { interface TestContext { cachedSqlite: { db: BaseSQLiteDatabase; dbGlobalCached: BaseSQLiteDatabase; }; sqlite: { db: BaseSQLiteDatabase<'async' | 'sync', any, Record>; }; } } const usersTable = sqliteTable('users', { id: integer('id').primaryKey({ autoIncrement: true }), name: text('name').notNull(), verified: integer('verified', { mode: 'boolean' }).notNull().default(false), jsonb: text('jsonb', { mode: 'json' }).$type(), createdAt: integer('created_at', { mode: 'timestamp' }), }); const postsTable = sqliteTable('posts', { id: integer().primaryKey({ autoIncrement: true }), description: text().notNull(), userId: integer('user_id').references(() => usersTable.id), }); export function tests() { describe('common_cache', () => { beforeEach(async (ctx) => { const { db, dbGlobalCached } = ctx.cachedSqlite; await db.run(sql`drop table if exists users`); await db.run(sql`drop table if exists posts`); await db.$cache?.invalidate({ tables: 'users' }); await dbGlobalCached.$cache?.invalidate({ tables: 'users' }); // public users await db.run( sql` create table users ( id integer primary key AUTOINCREMENT, name text not null, verified integer not null default 0, jsonb text, created_at integer ) `, ); await db.run( sql` create table posts ( id integer primary key AUTOINCREMENT, description text not null, user_id int ) `, ); }); test('test force invalidate', async (ctx) => { const { db } = ctx.cachedSqlite; const spyInvalidate = vi.spyOn(db.$cache, 'invalidate'); await db.$cache?.invalidate({ tables: 'users' }); expect(spyInvalidate).toHaveBeenCalledTimes(1); }); test('default global config - no cache should be hit', async (ctx) => { const { db } = ctx.cachedSqlite; // @ts-expect-error const spyPut = vi.spyOn(db.$cache, 'put'); // @ts-expect-error const spyGet = vi.spyOn(db.$cache, 'get'); // @ts-expect-error const spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); await db.select().from(usersTable); expect(spyPut).toHaveBeenCalledTimes(0); expect(spyGet).toHaveBeenCalledTimes(0); expect(spyInvalidate).toHaveBeenCalledTimes(0); }); test('default global config + enable cache on select: get, put', async (ctx) => { const { db } = ctx.cachedSqlite; // @ts-expect-error const spyPut = vi.spyOn(db.$cache, 'put'); // @ts-expect-error const spyGet = vi.spyOn(db.$cache, 'get'); // @ts-expect-error const spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); await db.select().from(usersTable).$withCache(); expect(spyPut).toHaveBeenCalledTimes(1); expect(spyGet).toHaveBeenCalledTimes(1); expect(spyInvalidate).toHaveBeenCalledTimes(0); }); test('default global config + enable cache on select + write: get, put, onMutate', async (ctx) => { const { db } = ctx.cachedSqlite; // @ts-expect-error const spyPut = vi.spyOn(db.$cache, 'put'); // @ts-expect-error const spyGet = vi.spyOn(db.$cache, 'get'); // @ts-expect-error const spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); await db.select().from(usersTable).$withCache({ config: { ex: 1 } }); expect(spyPut).toHaveBeenCalledTimes(1); expect(spyGet).toHaveBeenCalledTimes(1); expect(spyInvalidate).toHaveBeenCalledTimes(0); spyPut.mockClear(); spyGet.mockClear(); spyInvalidate.mockClear(); await db.insert(usersTable).values({ name: 'John' }); expect(spyPut).toHaveBeenCalledTimes(0); expect(spyGet).toHaveBeenCalledTimes(0); expect(spyInvalidate).toHaveBeenCalledTimes(1); }); test('default global config + enable cache on select + disable invalidate: get, put', async (ctx) => { const { db } = ctx.cachedSqlite; // @ts-expect-error const spyPut = vi.spyOn(db.$cache, 'put'); // @ts-expect-error const spyGet = vi.spyOn(db.$cache, 'get'); // @ts-expect-error const spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); await db.select().from(usersTable).$withCache({ tag: 'custom', autoInvalidate: false, config: { ex: 1 } }); expect(spyPut).toHaveBeenCalledTimes(1); expect(spyGet).toHaveBeenCalledTimes(1); expect(spyInvalidate).toHaveBeenCalledTimes(0); await db.insert(usersTable).values({ name: 'John' }); // invalidate force await db.$cache?.invalidate({ tags: ['custom'] }); }); test('global: true + disable cache', async (ctx) => { const { dbGlobalCached: db } = ctx.cachedSqlite; // @ts-expect-error const spyPut = vi.spyOn(db.$cache, 'put'); // @ts-expect-error const spyGet = vi.spyOn(db.$cache, 'get'); // @ts-expect-error const spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); await db.select().from(usersTable).$withCache(false); expect(spyPut).toHaveBeenCalledTimes(0); expect(spyGet).toHaveBeenCalledTimes(0); expect(spyInvalidate).toHaveBeenCalledTimes(0); }); test('global: true - cache should be hit', async (ctx) => { const { dbGlobalCached: db } = ctx.cachedSqlite; // @ts-expect-error const spyPut = vi.spyOn(db.$cache, 'put'); // @ts-expect-error const spyGet = vi.spyOn(db.$cache, 'get'); // @ts-expect-error const spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); await db.select().from(usersTable); expect(spyPut).toHaveBeenCalledTimes(1); expect(spyGet).toHaveBeenCalledTimes(1); expect(spyInvalidate).toHaveBeenCalledTimes(0); }); test('global: true - cache: false on select - no cache hit', async (ctx) => { const { dbGlobalCached: db } = ctx.cachedSqlite; // @ts-expect-error const spyPut = vi.spyOn(db.$cache, 'put'); // @ts-expect-error const spyGet = vi.spyOn(db.$cache, 'get'); // @ts-expect-error const spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); await db.select().from(usersTable).$withCache(false); expect(spyPut).toHaveBeenCalledTimes(0); expect(spyGet).toHaveBeenCalledTimes(0); expect(spyInvalidate).toHaveBeenCalledTimes(0); }); test('global: true - disable invalidate - cache hit + no invalidate', async (ctx) => { const { dbGlobalCached: db } = ctx.cachedSqlite; // @ts-expect-error const spyPut = vi.spyOn(db.$cache, 'put'); // @ts-expect-error const spyGet = vi.spyOn(db.$cache, 'get'); // @ts-expect-error const spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); await db.select().from(usersTable).$withCache({ autoInvalidate: false }); expect(spyPut).toHaveBeenCalledTimes(1); expect(spyGet).toHaveBeenCalledTimes(1); expect(spyInvalidate).toHaveBeenCalledTimes(0); spyPut.mockClear(); spyGet.mockClear(); spyInvalidate.mockClear(); await db.insert(usersTable).values({ name: 'John' }); expect(spyPut).toHaveBeenCalledTimes(0); expect(spyGet).toHaveBeenCalledTimes(0); expect(spyInvalidate).toHaveBeenCalledTimes(1); }); test('global: true - with custom tag', async (ctx) => { const { dbGlobalCached: db } = ctx.cachedSqlite; // @ts-expect-error const spyPut = vi.spyOn(db.$cache, 'put'); // @ts-expect-error const spyGet = vi.spyOn(db.$cache, 'get'); // @ts-expect-error const spyInvalidate = vi.spyOn(db.$cache, 'onMutate'); await db.select().from(usersTable).$withCache({ tag: 'custom', autoInvalidate: false }); expect(spyPut).toHaveBeenCalledTimes(1); expect(spyGet).toHaveBeenCalledTimes(1); expect(spyInvalidate).toHaveBeenCalledTimes(0); await db.insert(usersTable).values({ name: 'John' }); // invalidate force await db.$cache?.invalidate({ tags: ['custom'] }); }); // check select used tables test('check simple select used tables', (ctx) => { const { db } = ctx.cachedSqlite; // @ts-expect-error expect(db.select().from(usersTable).getUsedTables()).toStrictEqual(['users']); // @ts-expect-error expect(db.select().from(sql`${usersTable}`).getUsedTables()).toStrictEqual(['users']); }); // check select+join used tables test('select+join', (ctx) => { const { db } = ctx.cachedSqlite; // @ts-expect-error expect(db.select().from(usersTable).leftJoin(postsTable, eq(usersTable.id, postsTable.userId)).getUsedTables()) .toStrictEqual(['users', 'posts']); expect( // @ts-expect-error db.select().from(sql`${usersTable}`).leftJoin(postsTable, eq(usersTable.id, postsTable.userId)).getUsedTables(), ).toStrictEqual(['users', 'posts']); }); // check select+2join used tables test('select+2joins', (ctx) => { const { db } = ctx.cachedSqlite; expect( db.select().from(usersTable).leftJoin( postsTable, eq(usersTable.id, postsTable.userId), ).leftJoin( alias(postsTable, 'post2'), eq(usersTable.id, postsTable.userId), ) // @ts-expect-error .getUsedTables(), ) .toStrictEqual(['users', 'posts']); expect( db.select().from(sql`${usersTable}`).leftJoin(postsTable, eq(usersTable.id, postsTable.userId)).leftJoin( alias(postsTable, 'post2'), eq(usersTable.id, postsTable.userId), // @ts-expect-error ).getUsedTables(), ).toStrictEqual(['users', 'posts']); }); // select subquery used tables test('select+join', (ctx) => { const { db } = ctx.cachedSqlite; const sq = db.select().from(usersTable).where(eq(usersTable.id, 42)).as('sq'); // @ts-expect-error expect(db.select().from(sq).getUsedTables()).toStrictEqual(['users']); }); }); } ================================================ FILE: integration-tests/tests/sqlite/sqlite-common.ts ================================================ import { and, asc, avg, avgDistinct, count, countDistinct, eq, exists, getTableColumns, gt, gte, inArray, lt, max, min, Name, notInArray, sql, sum, sumDistinct, TransactionRollbackError, } from 'drizzle-orm'; import { alias, type BaseSQLiteDatabase, blob, except, foreignKey, getTableConfig, getViewConfig, index, int, integer, intersect, numeric, primaryKey, real, sqliteTable, sqliteTableCreator, sqliteView, text, union, unionAll, unique, uniqueKeyName, } from 'drizzle-orm/sqlite-core'; import { beforeEach, describe, expect, expectTypeOf, test } from 'vitest'; import type { Equal } from '~/utils'; import { Expect } from '~/utils'; declare module 'vitest' { interface TestContext { sqlite: { db: BaseSQLiteDatabase<'async' | 'sync', any, Record>; }; } } const allTypesTable = sqliteTable('all_types', { int: integer('int', { mode: 'number', }), bool: integer('bool', { mode: 'boolean', }), time: integer('time', { mode: 'timestamp', }), timeMs: integer('time_ms', { mode: 'timestamp_ms', }), bigint: blob('bigint', { mode: 'bigint', }), buffer: blob('buffer', { mode: 'buffer', }), json: blob('json', { mode: 'json', }), numeric: numeric('numeric'), numericNum: numeric('numeric_num', { mode: 'number', }), numericBig: numeric('numeric_big', { mode: 'bigint', }), real: real('real'), text: text('text', { mode: 'text', }), jsonText: text('json_text', { mode: 'json', }), }); export const usersTable = sqliteTable('users', { id: integer('id').primaryKey(), name: text('name').notNull(), verified: integer('verified', { mode: 'boolean' }).notNull().default(false), json: blob('json', { mode: 'json' }).$type(), createdAt: integer('created_at', { mode: 'timestamp' }).notNull().default(sql`strftime('%s', 'now')`), }); export const usersOnUpdate = sqliteTable('users_on_update', { id: integer('id').primaryKey({ autoIncrement: true }), name: text('name').notNull(), updateCounter: integer('update_counter').default(sql`1`).$onUpdateFn(() => sql`update_counter + 1`), updatedAt: integer('updated_at', { mode: 'timestamp_ms' }).$onUpdate(() => new Date()), alwaysNull: text('always_null').$type().$onUpdate(() => null), // uppercaseName: text('uppercase_name').$onUpdateFn(() => // sql`upper(s.name)` // ), This doesn't seem to be supported in sqlite }); export const users2Table = sqliteTable('users2', { id: integer('id').primaryKey(), name: text('name').notNull(), cityId: integer('city_id').references(() => citiesTable.id), }); export const citiesTable = sqliteTable('cities', { id: integer('id').primaryKey(), name: text('name').notNull(), }); const coursesTable = sqliteTable('courses', { id: integer('id').primaryKey(), name: text('name').notNull(), categoryId: integer('category_id').references(() => courseCategoriesTable.id), }); const courseCategoriesTable = sqliteTable('course_categories', { id: integer('id').primaryKey(), name: text('name').notNull(), }); const orders = sqliteTable('orders', { id: integer('id').primaryKey(), region: text('region').notNull(), product: text('product').notNull().$default(() => 'random_string'), amount: integer('amount').notNull(), quantity: integer('quantity').notNull(), }); export const usersMigratorTable = sqliteTable('users12', { id: integer('id').primaryKey(), name: text('name').notNull(), email: text('email').notNull(), }); export const anotherUsersMigratorTable = sqliteTable('another_users', { id: integer('id').primaryKey(), name: text('name').notNull(), email: text('email').notNull(), }); const pkExampleTable = sqliteTable('pk_example', { id: integer('id').notNull(), name: text('name').notNull(), email: text('email').notNull(), }, (table) => ({ compositePk: primaryKey({ columns: [table.id, table.name] }), })); const conflictChainExampleTable = sqliteTable('conflict_chain_example', { id: integer('id').notNull().unique(), name: text('name').notNull(), email: text('email').notNull(), }, (table) => ({ compositePk: primaryKey({ columns: [table.id, table.name] }), })); const bigIntExample = sqliteTable('big_int_example', { id: integer('id').primaryKey(), name: text('name').notNull(), bigInt: blob('big_int', { mode: 'bigint' }).notNull(), }); // To test aggregate functions const aggregateTable = sqliteTable('aggregate_table', { id: integer('id').primaryKey({ autoIncrement: true }).notNull(), name: text('name').notNull(), a: integer('a'), b: integer('b'), c: integer('c'), nullOnly: integer('null_only'), }); export function tests() { describe('common', () => { beforeEach(async (ctx) => { const { db } = ctx.sqlite; await db.run(sql`drop table if exists ${usersTable}`); await db.run(sql`drop table if exists ${users2Table}`); await db.run(sql`drop table if exists ${citiesTable}`); await db.run(sql`drop table if exists ${coursesTable}`); await db.run(sql`drop table if exists ${courseCategoriesTable}`); await db.run(sql`drop table if exists ${orders}`); await db.run(sql`drop table if exists ${bigIntExample}`); await db.run(sql`drop table if exists ${pkExampleTable}`); await db.run(sql`drop table if exists ${conflictChainExampleTable}`); await db.run(sql`drop table if exists ${allTypesTable}`); await db.run(sql`drop table if exists user_notifications_insert_into`); await db.run(sql`drop table if exists users_insert_into`); await db.run(sql`drop table if exists notifications_insert_into`); await db.run(sql` create table ${usersTable} ( id integer primary key, name text not null, verified integer not null default 0, json blob, created_at integer not null default (strftime('%s', 'now')) ) `); await db.run(sql` create table ${citiesTable} ( id integer primary key, name text not null ) `); await db.run(sql` create table ${courseCategoriesTable} ( id integer primary key, name text not null ) `); await db.run(sql` create table ${users2Table} ( id integer primary key, name text not null, city_id integer references ${citiesTable}(${sql.identifier(citiesTable.id.name)}) ) `); await db.run(sql` create table ${coursesTable} ( id integer primary key, name text not null, category_id integer references ${courseCategoriesTable}(${sql.identifier(courseCategoriesTable.id.name)}) ) `); await db.run(sql` create table ${orders} ( id integer primary key, region text not null, product text not null, amount integer not null, quantity integer not null ) `); await db.run(sql` create table ${pkExampleTable} ( id integer not null, name text not null, email text not null, primary key (id, name) ) `); await db.run(sql` create table ${conflictChainExampleTable} ( id integer not null unique, name text not null, email text not null, primary key (id, name) ) `); await db.run(sql` create table ${bigIntExample} ( id integer primary key, name text not null, big_int blob not null ) `); }); async function setupSetOperationTest(db: BaseSQLiteDatabase) { await db.run(sql`drop table if exists users2`); await db.run(sql`drop table if exists cities`); await db.run(sql` create table \`cities\` ( id integer primary key, name text not null ) `); await db.run(sql` create table \`users2\` ( id integer primary key, name text not null, city_id integer references ${citiesTable}(${sql.identifier(citiesTable.id.name)}) ) `); await db.insert(citiesTable).values([ { id: 1, name: 'New York' }, { id: 2, name: 'London' }, { id: 3, name: 'Tampa' }, ]); await db.insert(users2Table).values([ { id: 1, name: 'John', cityId: 1 }, { id: 2, name: 'Jane', cityId: 2 }, { id: 3, name: 'Jack', cityId: 3 }, { id: 4, name: 'Peter', cityId: 3 }, { id: 5, name: 'Ben', cityId: 2 }, { id: 6, name: 'Jill', cityId: 1 }, { id: 7, name: 'Mary', cityId: 2 }, { id: 8, name: 'Sally', cityId: 1 }, ]); } async function setupAggregateFunctionsTest(db: BaseSQLiteDatabase) { await db.run(sql`drop table if exists "aggregate_table"`); await db.run( sql` create table "aggregate_table" ( "id" integer primary key autoincrement not null, "name" text not null, "a" integer, "b" integer, "c" integer, "null_only" integer ); `, ); await db.insert(aggregateTable).values([ { name: 'value 1', a: 5, b: 10, c: 20 }, { name: 'value 1', a: 5, b: 20, c: 30 }, { name: 'value 2', a: 10, b: 50, c: 60 }, { name: 'value 3', a: 20, b: 20, c: null }, { name: 'value 4', a: null, b: 90, c: 120 }, { name: 'value 5', a: 80, b: 10, c: null }, { name: 'value 6', a: null, b: null, c: 150 }, ]); } test('table config: foreign keys name', async () => { const table = sqliteTable('cities', { id: int('id').primaryKey(), name: text('name').notNull(), state: text('state'), }, (t) => ({ f: foreignKey({ foreignColumns: [t.id], columns: [t.id], name: 'custom_fk' }), f1: foreignKey({ foreignColumns: [t.id], columns: [t.id], name: 'custom_fk_deprecated' }), })); const tableConfig = getTableConfig(table); expect(tableConfig.foreignKeys).toHaveLength(2); expect(tableConfig.foreignKeys[0]!.getName()).toBe('custom_fk'); expect(tableConfig.foreignKeys[1]!.getName()).toBe('custom_fk_deprecated'); }); test('table config: primary keys name', async () => { const table = sqliteTable('cities', { id: int('id').primaryKey(), name: text('name').notNull(), state: text('state'), }, (t) => ({ f: primaryKey({ columns: [t.id, t.name], name: 'custom_pk' }), })); const tableConfig = getTableConfig(table); expect(tableConfig.primaryKeys).toHaveLength(1); expect(tableConfig.primaryKeys[0]!.getName()).toBe('custom_pk'); }); test('insert bigint values', async (ctx) => { const { db } = ctx.sqlite; await db.insert(bigIntExample).values({ name: 'one', bigInt: BigInt('0') }).run(); await db.insert(bigIntExample).values({ name: 'two', bigInt: BigInt('127') }).run(); await db.insert(bigIntExample).values({ name: 'three', bigInt: BigInt('32767') }).run(); await db.insert(bigIntExample).values({ name: 'four', bigInt: BigInt('1234567890') }).run(); await db.insert(bigIntExample).values({ name: 'five', bigInt: BigInt('12345678900987654321') }).run(); const result = await db.select().from(bigIntExample).all(); expect(result).toEqual([ { id: 1, name: 'one', bigInt: BigInt('0') }, { id: 2, name: 'two', bigInt: BigInt('127') }, { id: 3, name: 'three', bigInt: BigInt('32767') }, { id: 4, name: 'four', bigInt: BigInt('1234567890') }, { id: 5, name: 'five', bigInt: BigInt('12345678900987654321') }, ]); }); test('select all fields', async (ctx) => { const { db } = ctx.sqlite; const now = Date.now(); await db.insert(usersTable).values({ name: 'John' }).run(); const result = await db.select().from(usersTable).all(); expect(result[0]!.createdAt).toBeInstanceOf(Date); expect(Math.abs(result[0]!.createdAt.getTime() - now)).toBeLessThan(5000); expect(result).toEqual([{ id: 1, name: 'John', verified: false, json: null, createdAt: result[0]!.createdAt }]); }); test('select partial', async (ctx) => { const { db } = ctx.sqlite; await db.insert(usersTable).values({ name: 'John' }).run(); const result = await db.select({ name: usersTable.name }).from(usersTable).all(); expect(result).toEqual([{ name: 'John' }]); }); test('select sql', async (ctx) => { const { db } = ctx.sqlite; await db.insert(usersTable).values({ name: 'John' }).run(); const users = await db.select({ name: sql`upper(${usersTable.name})`, }).from(usersTable).all(); expect(users).toEqual([{ name: 'JOHN' }]); }); test('select typed sql', async (ctx) => { const { db } = ctx.sqlite; await db.insert(usersTable).values({ name: 'John' }).run(); const users = await db.select({ name: sql`upper(${usersTable.name})`, }).from(usersTable).all(); expect(users).toEqual([{ name: 'JOHN' }]); }); test('select with empty array in inArray', async (ctx) => { const { db } = ctx.sqlite; await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); const result = await db .select({ name: sql`upper(${usersTable.name})`, }) .from(usersTable) .where(inArray(usersTable.id, [])); expect(result).toEqual([]); }); test('select with empty array in notInArray', async (ctx) => { const { db } = ctx.sqlite; await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); const result = await db .select({ name: sql`upper(${usersTable.name})`, }) .from(usersTable) .where(notInArray(usersTable.id, [])); expect(result).toEqual([{ name: 'JOHN' }, { name: 'JANE' }, { name: 'JANE' }]); }); test('select distinct', async (ctx) => { const { db } = ctx.sqlite; const usersDistinctTable = sqliteTable('users_distinct', { id: integer('id').notNull(), name: text('name').notNull(), }); await db.run(sql`drop table if exists ${usersDistinctTable}`); await db.run(sql`create table ${usersDistinctTable} (id integer, name text)`); await db.insert(usersDistinctTable).values([ { id: 1, name: 'John' }, { id: 1, name: 'John' }, { id: 2, name: 'John' }, { id: 1, name: 'Jane' }, ]).run(); const users = await db.selectDistinct().from(usersDistinctTable).orderBy( usersDistinctTable.id, usersDistinctTable.name, ).all(); await db.run(sql`drop table ${usersDistinctTable}`); expect(users).toEqual([{ id: 1, name: 'Jane' }, { id: 1, name: 'John' }, { id: 2, name: 'John' }]); }); test('insert returning sql', async (ctx) => { const { db } = ctx.sqlite; const users = await db.insert(usersTable).values({ name: 'John' }).returning({ name: sql`upper(${usersTable.name})`, }).all(); expect(users).toEqual([{ name: 'JOHN' }]); }); test('$default function', async (ctx) => { const { db } = ctx.sqlite; await db.insert(orders).values({ id: 1, region: 'Ukraine', amount: 1, quantity: 1 }); const selectedOrder = await db.select().from(orders); expect(selectedOrder).toEqual([{ id: 1, amount: 1, quantity: 1, region: 'Ukraine', product: 'random_string', }]); }); test('delete returning sql', async (ctx) => { const { db } = ctx.sqlite; await db.insert(usersTable).values({ name: 'John' }).run(); const users = await db.delete(usersTable).where(eq(usersTable.name, 'John')).returning({ name: sql`upper(${usersTable.name})`, }).all(); expect(users).toEqual([{ name: 'JOHN' }]); }); test('query check: insert single empty row', (ctx) => { const { db } = ctx.sqlite; const users = sqliteTable('users', { id: integer('id').primaryKey(), name: text('name').default('Dan'), state: text('state'), }); const query = db .insert(users) .values({}) .toSQL(); expect(query).toEqual({ sql: 'insert into "users" ("id", "name", "state") values (null, ?, null)', params: ['Dan'], }); }); test('query check: insert multiple empty rows', (ctx) => { const { db } = ctx.sqlite; const users = sqliteTable('users', { id: integer('id').primaryKey(), name: text('name').default('Dan'), state: text('state'), }); const query = db .insert(users) .values([{}, {}]) .toSQL(); expect(query).toEqual({ sql: 'insert into "users" ("id", "name", "state") values (null, ?, null), (null, ?, null)', params: ['Dan', 'Dan'], }); }); test('Insert all defaults in 1 row', async (ctx) => { const { db } = ctx.sqlite; const users = sqliteTable('empty_insert_single', { id: integer('id').primaryKey(), name: text('name').default('Dan'), state: text('state'), }); await db.run(sql`drop table if exists ${users}`); await db.run( sql`create table ${users} (id integer primary key, name text default 'Dan', state text)`, ); await db.insert(users).values({}).run(); const res = await db.select().from(users).all(); expect(res).toEqual([{ id: 1, name: 'Dan', state: null }]); }); test('Insert all defaults in multiple rows', async (ctx) => { const { db } = ctx.sqlite; const users = sqliteTable('empty_insert_multiple', { id: integer('id').primaryKey(), name: text('name').default('Dan'), state: text('state'), }); await db.run(sql`drop table if exists ${users}`); await db.run( sql`create table ${users} (id integer primary key, name text default 'Dan', state text)`, ); await db.insert(users).values([{}, {}]).run(); const res = await db.select().from(users).all(); expect(res).toEqual([{ id: 1, name: 'Dan', state: null }, { id: 2, name: 'Dan', state: null }]); }); test('update returning sql', async (ctx) => { const { db } = ctx.sqlite; await db.insert(usersTable).values({ name: 'John' }).run(); const users = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')).returning({ name: sql`upper(${usersTable.name})`, }).all(); expect(users).toEqual([{ name: 'JANE' }]); }); test('insert with auto increment', async (ctx) => { const { db } = ctx.sqlite; await db.insert(usersTable).values([ { name: 'John' }, { name: 'Jane' }, { name: 'George' }, { name: 'Austin' }, ]).run(); const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).all(); expect(result).toEqual([ { id: 1, name: 'John' }, { id: 2, name: 'Jane' }, { id: 3, name: 'George' }, { id: 4, name: 'Austin' }, ]); }); test('insert with default values', async (ctx) => { const { db } = ctx.sqlite; await db.insert(usersTable).values({ name: 'John' }).run(); const result = await db.select().from(usersTable).all(); expect(result).toEqual([{ id: 1, name: 'John', verified: false, json: null, createdAt: result[0]!.createdAt }]); }); test('insert with overridden default values', async (ctx) => { const { db } = ctx.sqlite; await db.insert(usersTable).values({ name: 'John', verified: true }).run(); const result = await db.select().from(usersTable).all(); expect(result).toEqual([{ id: 1, name: 'John', verified: true, json: null, createdAt: result[0]!.createdAt }]); }); test('update with returning all fields', async (ctx) => { const { db } = ctx.sqlite; const now = Date.now(); await db.insert(usersTable).values({ name: 'John' }).run(); const users = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')).returning() .all(); expect(users[0]!.createdAt).toBeInstanceOf(Date); expect(Math.abs(users[0]!.createdAt.getTime() - now)).toBeLessThan(5000); expect(users).toEqual([{ id: 1, name: 'Jane', verified: false, json: null, createdAt: users[0]!.createdAt }]); }); test('update with returning partial', async (ctx) => { const { db } = ctx.sqlite; await db.insert(usersTable).values({ name: 'John' }).run(); const users = await db.update(usersTable).set({ name: 'Jane' }).where(eq(usersTable.name, 'John')).returning({ id: usersTable.id, name: usersTable.name, }).all(); expect(users).toEqual([{ id: 1, name: 'Jane' }]); }); test('delete with returning all fields', async (ctx) => { const { db } = ctx.sqlite; const now = Date.now(); await db.insert(usersTable).values({ name: 'John' }).run(); const users = await db.delete(usersTable).where(eq(usersTable.name, 'John')).returning().all(); expect(users[0]!.createdAt).toBeInstanceOf(Date); expect(Math.abs(users[0]!.createdAt.getTime() - now)).toBeLessThan(5000); expect(users).toEqual([{ id: 1, name: 'John', verified: false, json: null, createdAt: users[0]!.createdAt }]); }); test('delete with returning partial', async (ctx) => { const { db } = ctx.sqlite; await db.insert(usersTable).values({ name: 'John' }).run(); const users = await db.delete(usersTable).where(eq(usersTable.name, 'John')).returning({ id: usersTable.id, name: usersTable.name, }).all(); expect(users).toEqual([{ id: 1, name: 'John' }]); }); test('insert + select', async (ctx) => { const { db } = ctx.sqlite; await db.insert(usersTable).values({ name: 'John' }).run(); const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).all(); expect(result).toEqual([{ id: 1, name: 'John' }]); await db.insert(usersTable).values({ name: 'Jane' }).run(); const result2 = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).all(); expect(result2).toEqual([{ id: 1, name: 'John' }, { id: 2, name: 'Jane' }]); }); test('json insert', async (ctx) => { const { db } = ctx.sqlite; await db.insert(usersTable).values({ name: 'John', json: ['foo', 'bar'] }).run(); const result = await db.select({ id: usersTable.id, name: usersTable.name, json: usersTable.json, }).from(usersTable).all(); expect(result).toEqual([{ id: 1, name: 'John', json: ['foo', 'bar'] }]); }); test('insert many', async (ctx) => { const { db } = ctx.sqlite; await db.insert(usersTable).values([ { name: 'John' }, { name: 'Bruce', json: ['foo', 'bar'] }, { name: 'Jane' }, { name: 'Austin', verified: true }, ]).run(); const result = await db.select({ id: usersTable.id, name: usersTable.name, json: usersTable.json, verified: usersTable.verified, }).from(usersTable).all(); expect(result).toEqual([ { id: 1, name: 'John', json: null, verified: false }, { id: 2, name: 'Bruce', json: ['foo', 'bar'], verified: false }, { id: 3, name: 'Jane', json: null, verified: false }, { id: 4, name: 'Austin', json: null, verified: true }, ]); }); test('insert many with returning', async (ctx) => { const { db } = ctx.sqlite; const result = await db.insert(usersTable).values([ { name: 'John' }, { name: 'Bruce', json: ['foo', 'bar'] }, { name: 'Jane' }, { name: 'Austin', verified: true }, ]) .returning({ id: usersTable.id, name: usersTable.name, json: usersTable.json, verified: usersTable.verified, }) .all(); expect(result).toEqual([ { id: 1, name: 'John', json: null, verified: false }, { id: 2, name: 'Bruce', json: ['foo', 'bar'], verified: false }, { id: 3, name: 'Jane', json: null, verified: false }, { id: 4, name: 'Austin', json: null, verified: true }, ]); }); test('partial join with alias', async (ctx) => { const { db } = ctx.sqlite; const customerAlias = alias(usersTable, 'customer'); await db.insert(usersTable).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); const result = await db .select({ user: { id: usersTable.id, name: usersTable.name, }, customer: { id: customerAlias.id, name: customerAlias.name, }, }).from(usersTable) .leftJoin(customerAlias, eq(customerAlias.id, 11)) .where(eq(usersTable.id, 10)); expect(result).toEqual([{ user: { id: 10, name: 'Ivan' }, customer: { id: 11, name: 'Hans' }, }]); }); test('full join with alias', async (ctx) => { const { db } = ctx.sqlite; const sqliteTable = sqliteTableCreator((name) => `prefixed_${name}`); const users = sqliteTable('users', { id: integer('id').primaryKey(), name: text('name').notNull(), }); await db.run(sql`drop table if exists ${users}`); await db.run(sql`create table ${users} (id integer primary key, name text not null)`); const customers = alias(users, 'customer'); await db.insert(users).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]).run(); const result = await db .select().from(users) .leftJoin(customers, eq(customers.id, 11)) .where(eq(users.id, 10)) .all(); expect(result).toEqual([{ users: { id: 10, name: 'Ivan', }, customer: { id: 11, name: 'Hans', }, }]); await db.run(sql`drop table ${users}`); }); test('select from alias', async (ctx) => { const { db } = ctx.sqlite; const sqliteTable = sqliteTableCreator((name) => `prefixed_${name}`); const users = sqliteTable('users', { id: integer('id').primaryKey(), name: text('name').notNull(), }); await db.run(sql`drop table if exists ${users}`); await db.run(sql`create table ${users} (id integer primary key, name text not null)`); const user = alias(users, 'user'); const customers = alias(users, 'customer'); await db.insert(users).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]).run(); const result = await db .select() .from(user) .leftJoin(customers, eq(customers.id, 11)) .where(eq(user.id, 10)) .all(); expect(result).toEqual([{ user: { id: 10, name: 'Ivan', }, customer: { id: 11, name: 'Hans', }, }]); await db.run(sql`drop table ${users}`); }); test('insert with spaces', async (ctx) => { const { db } = ctx.sqlite; await db.insert(usersTable).values({ name: sql`'Jo h n'` }).run(); const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).all(); expect(result).toEqual([{ id: 1, name: 'Jo h n' }]); }); test('prepared statement', async (ctx) => { const { db } = ctx.sqlite; await db.insert(usersTable).values({ name: 'John' }).run(); const statement = db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable).prepare(); const result = await statement.all(); expect(result).toEqual([{ id: 1, name: 'John' }]); }); test('prepared statement reuse', async (ctx) => { const { db } = ctx.sqlite; const stmt = db.insert(usersTable).values({ name: sql.placeholder('name') }).prepare(); for (let i = 0; i < 10; i++) { await stmt.run({ name: `John ${i}` }); } const result = await db.select({ id: usersTable.id, name: usersTable.name, }).from(usersTable).all(); expect(result).toEqual([ { id: 1, name: 'John 0' }, { id: 2, name: 'John 1' }, { id: 3, name: 'John 2' }, { id: 4, name: 'John 3' }, { id: 5, name: 'John 4' }, { id: 6, name: 'John 5' }, { id: 7, name: 'John 6' }, { id: 8, name: 'John 7' }, { id: 9, name: 'John 8' }, { id: 10, name: 'John 9' }, ]); }); test('insert: placeholders on columns with encoder', async (ctx) => { const { db } = ctx.sqlite; const stmt = db.insert(usersTable).values({ name: 'John', verified: sql.placeholder('verified'), }).prepare(); await stmt.run({ verified: true }); await stmt.run({ verified: false }); const result = await db.select({ id: usersTable.id, verified: usersTable.verified, }).from(usersTable).all(); expect(result).toEqual([ { id: 1, verified: true }, { id: 2, verified: false }, ]); }); test('prepared statement with placeholder in .where', async (ctx) => { const { db } = ctx.sqlite; await db.insert(usersTable).values({ name: 'John' }).run(); const stmt = db.select({ id: usersTable.id, name: usersTable.name, }).from(usersTable) .where(eq(usersTable.id, sql.placeholder('id'))) .prepare(); const result = await stmt.all({ id: 1 }); expect(result).toEqual([{ id: 1, name: 'John' }]); }); test('prepared statement with placeholder in .limit', async (ctx) => { const { db } = ctx.sqlite; await db.insert(usersTable).values({ name: 'John' }).run(); const stmt = db .select({ id: usersTable.id, name: usersTable.name, }) .from(usersTable) .where(eq(usersTable.id, sql.placeholder('id'))) .limit(sql.placeholder('limit')) .prepare(); const result = await stmt.all({ id: 1, limit: 1 }); expect(result).toEqual([{ id: 1, name: 'John' }]); expect(result).toHaveLength(1); }); test('prepared statement with placeholder in .offset', async (ctx) => { const { db } = ctx.sqlite; await db.insert(usersTable).values([{ name: 'John' }, { name: 'John1' }]).run(); const stmt = db .select({ id: usersTable.id, name: usersTable.name, }) .from(usersTable) .limit(sql.placeholder('limit')) .offset(sql.placeholder('offset')) .prepare(); const result = await stmt.all({ limit: 1, offset: 1 }); expect(result).toEqual([{ id: 2, name: 'John1' }]); }); test('prepared statement built using $dynamic', async (ctx) => { const { db } = ctx.sqlite; function withLimitOffset(qb: any) { return qb.limit(sql.placeholder('limit')).offset(sql.placeholder('offset')); } await db.insert(usersTable).values([{ name: 'John' }, { name: 'John1' }]).run(); const stmt = db .select({ id: usersTable.id, name: usersTable.name, }) .from(usersTable) .$dynamic(); withLimitOffset(stmt).prepare('stmt_limit'); const result = await stmt.all({ limit: 1, offset: 1 }); expect(result).toEqual([{ id: 2, name: 'John1' }]); expect(result).toHaveLength(1); }); test('select with group by as field', async (ctx) => { const { db } = ctx.sqlite; await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]).run(); const result = await db.select({ name: usersTable.name }).from(usersTable) .groupBy(usersTable.name) .all(); expect(result).toEqual([{ name: 'Jane' }, { name: 'John' }]); }); test('select with exists', async (ctx) => { const { db } = ctx.sqlite; await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]).run(); const user = alias(usersTable, 'user'); const result = await db.select({ name: usersTable.name }).from(usersTable).where( exists( db.select({ one: sql`1` }).from(user).where(and(eq(usersTable.name, 'John'), eq(user.id, usersTable.id))), ), ).all(); expect(result).toEqual([{ name: 'John' }]); }); test('select with group by as sql', async (ctx) => { const { db } = ctx.sqlite; await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]).run(); const result = await db.select({ name: usersTable.name }).from(usersTable) .groupBy(sql`${usersTable.name}`) .all(); expect(result).toEqual([{ name: 'Jane' }, { name: 'John' }]); }); test('select with group by as sql + column', async (ctx) => { const { db } = ctx.sqlite; await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]).run(); const result = await db.select({ name: usersTable.name }).from(usersTable) .groupBy(sql`${usersTable.name}`, usersTable.id) .all(); expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); }); test('select with group by as column + sql', async (ctx) => { const { db } = ctx.sqlite; await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]).run(); const result = await db.select({ name: usersTable.name }).from(usersTable) .groupBy(usersTable.id, sql`${usersTable.name}`) .all(); expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); }); test('select with group by complex query', async (ctx) => { const { db } = ctx.sqlite; await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]).run(); const result = await db.select({ name: usersTable.name }).from(usersTable) .groupBy(usersTable.id, sql`${usersTable.name}`) .orderBy(asc(usersTable.name)) .limit(1) .all(); expect(result).toEqual([{ name: 'Jane' }]); }); test('build query', async (ctx) => { const { db } = ctx.sqlite; const query = db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable) .groupBy(usersTable.id, usersTable.name) .toSQL(); expect(query).toEqual({ sql: 'select "id", "name" from "users" group by "users"."id", "users"."name"', params: [], }); }); test('insert via db.run + select via db.all', async (ctx) => { const { db } = ctx.sqlite; await db.run(sql`insert into ${usersTable} (${new Name(usersTable.name.name)}) values (${'John'})`); const result = await db.all<{ id: number; name: string }>(sql`select id, name from "users"`); expect(result).toEqual([{ id: 1, name: 'John' }]); }); test('insert via db.get', async (ctx) => { const { db } = ctx.sqlite; const inserted = await db.get<{ id: number; name: string }>( sql`insert into ${usersTable} (${new Name( usersTable.name.name, )}) values (${'John'}) returning ${usersTable.id}, ${usersTable.name}`, ); expect(inserted).toEqual({ id: 1, name: 'John' }); }); test('insert via db.run + select via db.get', async (ctx) => { const { db } = ctx.sqlite; await db.run(sql`insert into ${usersTable} (${new Name(usersTable.name.name)}) values (${'John'})`); const result = await db.get<{ id: number; name: string }>( sql`select ${usersTable.id}, ${usersTable.name} from ${usersTable}`, ); expect(result).toEqual({ id: 1, name: 'John' }); }); test('insert via db.get w/ query builder', async (ctx) => { const { db } = ctx.sqlite; const inserted = await db.get>( db.insert(usersTable).values({ name: 'John' }).returning({ id: usersTable.id, name: usersTable.name }), ); expect(inserted).toEqual({ id: 1, name: 'John' }); }); test('select from a many subquery', async (ctx) => { const { db } = ctx.sqlite; await db.insert(citiesTable) .values([{ name: 'Paris' }, { name: 'London' }]); await db.insert(users2Table).values([ { name: 'John', cityId: 1 }, { name: 'Jane', cityId: 2 }, { name: 'Jack', cityId: 2 }, ]); const res = await db.select({ population: db.select({ count: count().as('count') }).from(users2Table).where( eq(users2Table.cityId, citiesTable.id), ).as( 'population', ), name: citiesTable.name, }).from(citiesTable); expectTypeOf(res).toEqualTypeOf<{ population: number; name: string; }[]>(); expect(res).toStrictEqual([{ population: 1, name: 'Paris', }, { population: 2, name: 'London', }]); }); test('select from a one subquery', async (ctx) => { const { db } = ctx.sqlite; await db.insert(citiesTable) .values([{ name: 'Paris' }, { name: 'London' }]); await db.insert(users2Table).values([ { name: 'John', cityId: 1 }, { name: 'Jane', cityId: 2 }, { name: 'Jack', cityId: 2 }, ]); const res = await db.select({ cityName: db.select({ name: citiesTable.name }).from(citiesTable).where(eq(users2Table.cityId, citiesTable.id)) .as( 'cityName', ), name: users2Table.name, }).from(users2Table); expectTypeOf(res).toEqualTypeOf<{ cityName: string; name: string; }[]>(); expect(res).toStrictEqual([{ cityName: 'Paris', name: 'John', }, { cityName: 'London', name: 'Jane', }, { cityName: 'London', name: 'Jack', }]); }); test('join subquery', async (ctx) => { const { db } = ctx.sqlite; await db.insert(courseCategoriesTable).values([ { name: 'Category 1' }, { name: 'Category 2' }, { name: 'Category 3' }, { name: 'Category 4' }, ]).run(); await db.insert(coursesTable).values([ { name: 'Development', categoryId: 2 }, { name: 'IT & Software', categoryId: 3 }, { name: 'Marketing', categoryId: 4 }, { name: 'Design', categoryId: 1 }, ]).run(); const sq2 = db .select({ categoryId: courseCategoriesTable.id, category: courseCategoriesTable.name, total: sql`count(${courseCategoriesTable.id})`, }) .from(courseCategoriesTable) .groupBy(courseCategoriesTable.id, courseCategoriesTable.name) .as('sq2'); const res = await db .select({ courseName: coursesTable.name, categoryId: sq2.categoryId, }) .from(coursesTable) .leftJoin(sq2, eq(coursesTable.categoryId, sq2.categoryId)) .orderBy(coursesTable.name) .all(); expect(res).toEqual([ { courseName: 'Design', categoryId: 1 }, { courseName: 'Development', categoryId: 2 }, { courseName: 'IT & Software', categoryId: 3 }, { courseName: 'Marketing', categoryId: 4 }, ]); }); test('with ... select', async (ctx) => { const { db } = ctx.sqlite; await db.insert(orders).values([ { region: 'Europe', product: 'A', amount: 10, quantity: 1 }, { region: 'Europe', product: 'A', amount: 20, quantity: 2 }, { region: 'Europe', product: 'B', amount: 20, quantity: 2 }, { region: 'Europe', product: 'B', amount: 30, quantity: 3 }, { region: 'US', product: 'A', amount: 30, quantity: 3 }, { region: 'US', product: 'A', amount: 40, quantity: 4 }, { region: 'US', product: 'B', amount: 40, quantity: 4 }, { region: 'US', product: 'B', amount: 50, quantity: 5 }, ]).run(); const regionalSales = await db .$with('regional_sales') .as( db .select({ region: orders.region, totalSales: sql`sum(${orders.amount})`.as('total_sales'), }) .from(orders) .groupBy(orders.region), ); const topRegions = await db .$with('top_regions') .as( db .select({ region: regionalSales.region, }) .from(regionalSales) .where( gt( regionalSales.totalSales, db.select({ sales: sql`sum(${regionalSales.totalSales})/10` }).from(regionalSales), ), ), ); const result = await db .with(regionalSales, topRegions) .select({ region: orders.region, product: orders.product, productUnits: sql`cast(sum(${orders.quantity}) as int)`, productSales: sql`cast(sum(${orders.amount}) as int)`, }) .from(orders) .where(inArray(orders.region, db.select({ region: topRegions.region }).from(topRegions))) .groupBy(orders.region, orders.product) .orderBy(orders.region, orders.product) .all(); expect(result).toEqual([ { region: 'Europe', product: 'A', productUnits: 3, productSales: 30, }, { region: 'Europe', product: 'B', productUnits: 5, productSales: 50, }, { region: 'US', product: 'A', productUnits: 7, productSales: 70, }, { region: 'US', product: 'B', productUnits: 9, productSales: 90, }, ]); }); test('with ... update', async (ctx) => { const { db } = ctx.sqlite; const products = sqliteTable('products', { id: integer('id').primaryKey(), price: numeric('price').notNull(), cheap: integer('cheap', { mode: 'boolean' }).notNull().default(false), }); await db.run(sql`drop table if exists ${products}`); await db.run(sql` create table ${products} ( id integer primary key, price numeric not null, cheap integer not null default 0 ) `); await db.insert(products).values([ { price: '10.99' }, { price: '25.85' }, { price: '32.99' }, { price: '2.50' }, { price: '4.59' }, ]); const averagePrice = db .$with('average_price') .as( db .select({ value: sql`avg(${products.price})`.as('value'), }) .from(products), ); const result = await db .with(averagePrice) .update(products) .set({ cheap: true, }) .where(lt(products.price, sql`(select * from ${averagePrice})`)) .returning({ id: products.id, }); expect(result).toEqual([ { id: 1 }, { id: 4 }, { id: 5 }, ]); }); test('with ... insert', async (ctx) => { const { db } = ctx.sqlite; const users = sqliteTable('users', { username: text('username').notNull(), admin: integer('admin', { mode: 'boolean' }).notNull(), }); await db.run(sql`drop table if exists ${users}`); await db.run(sql`create table ${users} (username text not null, admin integer not null default 0)`); const userCount = db .$with('user_count') .as( db .select({ value: sql`count(*)`.as('value'), }) .from(users), ); const result = await db .with(userCount) .insert(users) .values([ { username: 'user1', admin: sql`((select * from ${userCount}) = 0)` }, ]) .returning({ admin: users.admin, }); expect(result).toEqual([{ admin: true }]); }); test('with ... delete', async (ctx) => { const { db } = ctx.sqlite; await db.insert(orders).values([ { region: 'Europe', product: 'A', amount: 10, quantity: 1 }, { region: 'Europe', product: 'A', amount: 20, quantity: 2 }, { region: 'Europe', product: 'B', amount: 20, quantity: 2 }, { region: 'Europe', product: 'B', amount: 30, quantity: 3 }, { region: 'US', product: 'A', amount: 30, quantity: 3 }, { region: 'US', product: 'A', amount: 40, quantity: 4 }, { region: 'US', product: 'B', amount: 40, quantity: 4 }, { region: 'US', product: 'B', amount: 50, quantity: 5 }, ]); const averageAmount = db .$with('average_amount') .as( db .select({ value: sql`avg(${orders.amount})`.as('value'), }) .from(orders), ); const result = await db .with(averageAmount) .delete(orders) .where(gt(orders.amount, sql`(select * from ${averageAmount})`)) .returning({ id: orders.id, }); expect(result).toEqual([ { id: 6 }, { id: 7 }, { id: 8 }, ]); }); test('select from subquery sql', async (ctx) => { const { db } = ctx.sqlite; await db.insert(users2Table).values([{ name: 'John' }, { name: 'Jane' }]).run(); const sq = db .select({ name: sql`${users2Table.name} || ' modified'`.as('name') }) .from(users2Table) .as('sq'); const res = await db.select({ name: sq.name }).from(sq).all(); expect(res).toEqual([{ name: 'John modified' }, { name: 'Jane modified' }]); }); test('select a field without joining its table', (ctx) => { const { db } = ctx.sqlite; expect(() => db.select({ name: users2Table.name }).from(usersTable).prepare()).toThrowError(); }); test('select all fields from subquery without alias', (ctx) => { const { db } = ctx.sqlite; const sq = db.$with('sq').as(db.select({ name: sql`upper(${users2Table.name})` }).from(users2Table)); expect(() => db.select().from(sq).prepare()).toThrowError(); }); test('select count()', async (ctx) => { const { db } = ctx.sqlite; await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }]).run(); const res = await db.select({ count: sql`count(*)` }).from(usersTable).all(); expect(res).toEqual([{ count: 2 }]); }); test('having', async (ctx) => { const { db } = ctx.sqlite; await db.insert(citiesTable).values([{ name: 'London' }, { name: 'Paris' }, { name: 'New York' }]).run(); await db.insert(users2Table).values([ { name: 'John', cityId: 1 }, { name: 'Jane', cityId: 1 }, { name: 'Jack', cityId: 2 }, ]).run(); const result = await db .select({ id: citiesTable.id, name: sql`upper(${citiesTable.name})`.as('upper_name'), usersCount: sql`count(${users2Table.id})`.as('users_count'), }) .from(citiesTable) .leftJoin(users2Table, eq(users2Table.cityId, citiesTable.id)) .where(({ name }) => sql`length(${name}) >= 3`) .groupBy(citiesTable.id) .having(({ usersCount }) => sql`${usersCount} > 0`) .orderBy(({ name }) => name) .all(); expect(result).toEqual([ { id: 1, name: 'LONDON', usersCount: 2, }, { id: 2, name: 'PARIS', usersCount: 1, }, ]); }); test('view', async (ctx) => { const { db } = ctx.sqlite; const newYorkers1 = sqliteView('new_yorkers') .as((qb) => qb.select().from(users2Table).where(eq(users2Table.cityId, 1))); const newYorkers2 = sqliteView('new_yorkers', { id: integer('id').primaryKey(), name: text('name').notNull(), cityId: integer('city_id').notNull(), }).as(sql`select * from ${users2Table} where ${eq(users2Table.cityId, 1)}`); const newYorkers3 = sqliteView('new_yorkers', { id: integer('id').primaryKey(), name: text('name').notNull(), cityId: integer('city_id').notNull(), }).existing(); await db.run(sql`create view if not exists new_yorkers as ${getViewConfig(newYorkers1).query}`); await db.insert(citiesTable).values([{ name: 'New York' }, { name: 'Paris' }]).run(); await db.insert(users2Table).values([ { name: 'John', cityId: 1 }, { name: 'Jane', cityId: 1 }, { name: 'Jack', cityId: 2 }, ]).run(); { const result = await db.select().from(newYorkers1).all(); expect(result).toEqual([ { id: 1, name: 'John', cityId: 1 }, { id: 2, name: 'Jane', cityId: 1 }, ]); } { const result = await db.select().from(newYorkers2).all(); expect(result).toEqual([ { id: 1, name: 'John', cityId: 1 }, { id: 2, name: 'Jane', cityId: 1 }, ]); } { const result = await db.select().from(newYorkers3).all(); expect(result).toEqual([ { id: 1, name: 'John', cityId: 1 }, { id: 2, name: 'Jane', cityId: 1 }, ]); } { const result = await db.select({ name: newYorkers1.name }).from(newYorkers1).all(); expect(result).toEqual([ { name: 'John' }, { name: 'Jane' }, ]); } await db.run(sql`drop view ${newYorkers1}`); }); test('insert null timestamp', async (ctx) => { const { db } = ctx.sqlite; const test = sqliteTable('test', { t: integer('t', { mode: 'timestamp' }), }); await db.run(sql`create table ${test} (t timestamp)`); await db.insert(test).values({ t: null }).run(); const res = await db.select().from(test).all(); expect(res).toEqual([{ t: null }]); await db.run(sql`drop table ${test}`); }); test('select from raw sql', async (ctx) => { const { db } = ctx.sqlite; const result = await db.select({ id: sql`id`, name: sql`name`, }).from(sql`(select 1 as id, 'John' as name) as users`).all(); Expect>; expect(result).toEqual([ { id: 1, name: 'John' }, ]); }); test('select from raw sql with joins', async (ctx) => { const { db } = ctx.sqlite; const result = await db .select({ id: sql`users.id`, name: sql`users.name`.as('userName'), userCity: sql`users.city`, cityName: sql`cities.name`.as('cityName'), }) .from(sql`(select 1 as id, 'John' as name, 'New York' as city) as users`) .leftJoin(sql`(select 1 as id, 'Paris' as name) as cities`, sql`cities.id = users.id`) .all(); Expect>; expect(result).toEqual([ { id: 1, name: 'John', userCity: 'New York', cityName: 'Paris' }, ]); }); test('join on aliased sql from select', async (ctx) => { const { db } = ctx.sqlite; const result = await db .select({ userId: sql`users.id`.as('userId'), name: sql`users.name`.as('userName'), userCity: sql`users.city`, cityId: sql`cities.id`.as('cityId'), cityName: sql`cities.name`.as('cityName'), }) .from(sql`(select 1 as id, 'John' as name, 'New York' as city) as users`) .leftJoin(sql`(select 1 as id, 'Paris' as name) as cities`, (cols) => eq(cols.cityId, cols.userId)) .all(); Expect< Equal<{ userId: number; name: string; userCity: string; cityId: number; cityName: string }[], typeof result> >; expect(result).toEqual([ { userId: 1, name: 'John', userCity: 'New York', cityId: 1, cityName: 'Paris' }, ]); }); test('join on aliased sql from with clause', async (ctx) => { const { db } = ctx.sqlite; const users = db.$with('users').as( db.select({ id: sql`id`.as('userId'), name: sql`name`.as('userName'), city: sql`city`.as('city'), }).from( sql`(select 1 as id, 'John' as name, 'New York' as city) as users`, ), ); const cities = db.$with('cities').as( db.select({ id: sql`id`.as('cityId'), name: sql`name`.as('cityName'), }).from( sql`(select 1 as id, 'Paris' as name) as cities`, ), ); const result = await db .with(users, cities) .select({ userId: users.id, name: users.name, userCity: users.city, cityId: cities.id, cityName: cities.name, }) .from(users) .leftJoin(cities, (cols) => eq(cols.cityId, cols.userId)) .all(); Expect< Equal<{ userId: number; name: string; userCity: string; cityId: number; cityName: string }[], typeof result> >; expect(result).toEqual([ { userId: 1, name: 'John', userCity: 'New York', cityId: 1, cityName: 'Paris' }, ]); }); test('prefixed table', async (ctx) => { const { db } = ctx.sqlite; const sqliteTable = sqliteTableCreator((name) => `myprefix_${name}`); const users = sqliteTable('test_prefixed_table_with_unique_name', { id: integer('id').primaryKey(), name: text('name').notNull(), }); await db.run(sql`drop table if exists ${users}`); await db.run( sql`create table myprefix_test_prefixed_table_with_unique_name (id integer not null primary key, name text not null)`, ); await db.insert(users).values({ id: 1, name: 'John' }).run(); const result = await db.select().from(users).all(); expect(result).toEqual([{ id: 1, name: 'John' }]); await db.run(sql`drop table ${users}`); }); test('orderBy with aliased column', (ctx) => { const { db } = ctx.sqlite; const query = db.select({ test: sql`something`.as('test'), }).from(users2Table).orderBy((fields) => fields.test).toSQL(); expect(query.sql).toBe('select something as "test" from "users2" order by "test"'); }); test('transaction', async (ctx) => { const { db } = ctx.sqlite; const users = sqliteTable('users_transactions', { id: integer('id').primaryKey(), balance: integer('balance').notNull(), }); const products = sqliteTable('products_transactions', { id: integer('id').primaryKey(), price: integer('price').notNull(), stock: integer('stock').notNull(), }); await db.run(sql`drop table if exists ${users}`); await db.run(sql`drop table if exists ${products}`); await db.run(sql`create table users_transactions (id integer not null primary key, balance integer not null)`); await db.run( sql`create table products_transactions (id integer not null primary key, price integer not null, stock integer not null)`, ); const user = await db.insert(users).values({ balance: 100 }).returning().get(); const product = await db.insert(products).values({ price: 10, stock: 10 }).returning().get(); await db.transaction(async (tx) => { await tx.update(users).set({ balance: user.balance - product.price }).where(eq(users.id, user.id)).run(); await tx.update(products).set({ stock: product.stock - 1 }).where(eq(products.id, product.id)).run(); }); const result = await db.select().from(users).all(); expect(result).toEqual([{ id: 1, balance: 90 }]); await db.run(sql`drop table ${users}`); await db.run(sql`drop table ${products}`); }); test('transaction rollback', async (ctx) => { const { db } = ctx.sqlite; const users = sqliteTable('users_transactions_rollback', { id: integer('id').primaryKey(), balance: integer('balance').notNull(), }); await db.run(sql`drop table if exists ${users}`); await db.run( sql`create table users_transactions_rollback (id integer not null primary key, balance integer not null)`, ); await expect(async () => { await db.transaction(async (tx) => { await tx.insert(users).values({ balance: 100 }).run(); tx.rollback(); }); }).rejects.toThrowError(TransactionRollbackError); const result = await db.select().from(users).all(); expect(result).toEqual([]); await db.run(sql`drop table ${users}`); }); test('nested transaction', async (ctx) => { const { db } = ctx.sqlite; const users = sqliteTable('users_nested_transactions', { id: integer('id').primaryKey(), balance: integer('balance').notNull(), }); await db.run(sql`drop table if exists ${users}`); await db.run( sql`create table users_nested_transactions (id integer not null primary key, balance integer not null)`, ); await db.transaction(async (tx) => { await tx.insert(users).values({ balance: 100 }).run(); await tx.transaction(async (tx) => { await tx.update(users).set({ balance: 200 }).run(); }); }); const result = await db.select().from(users).all(); expect(result).toEqual([{ id: 1, balance: 200 }]); await db.run(sql`drop table ${users}`); }); test('nested transaction rollback', async (ctx) => { const { db } = ctx.sqlite; const users = sqliteTable('users_nested_transactions_rollback', { id: integer('id').primaryKey(), balance: integer('balance').notNull(), }); await db.run(sql`drop table if exists ${users}`); await db.run( sql`create table users_nested_transactions_rollback (id integer not null primary key, balance integer not null)`, ); await db.transaction(async (tx) => { await tx.insert(users).values({ balance: 100 }).run(); await expect(async () => { await tx.transaction(async (tx) => { await tx.update(users).set({ balance: 200 }).run(); tx.rollback(); }); }).rejects.toThrowError(TransactionRollbackError); }); const result = await db.select().from(users).all(); expect(result).toEqual([{ id: 1, balance: 100 }]); await db.run(sql`drop table ${users}`); }); test('join subquery with join', async (ctx) => { const { db } = ctx.sqlite; const internalStaff = sqliteTable('internal_staff', { userId: integer('user_id').notNull(), }); const customUser = sqliteTable('custom_user', { id: integer('id').notNull(), }); const ticket = sqliteTable('ticket', { staffId: integer('staff_id').notNull(), }); await db.run(sql`drop table if exists ${internalStaff}`); await db.run(sql`drop table if exists ${customUser}`); await db.run(sql`drop table if exists ${ticket}`); await db.run(sql`create table internal_staff (user_id integer not null)`); await db.run(sql`create table custom_user (id integer not null)`); await db.run(sql`create table ticket (staff_id integer not null)`); await db.insert(internalStaff).values({ userId: 1 }).run(); await db.insert(customUser).values({ id: 1 }).run(); await db.insert(ticket).values({ staffId: 1 }).run(); const subq = await db .select() .from(internalStaff) .leftJoin(customUser, eq(internalStaff.userId, customUser.id)) .as('internal_staff'); const mainQuery = await db .select() .from(ticket) .leftJoin(subq, eq(subq.internal_staff.userId, ticket.staffId)) .all(); expect(mainQuery).toEqual([{ ticket: { staffId: 1 }, internal_staff: { internal_staff: { userId: 1 }, custom_user: { id: 1 }, }, }]); await db.run(sql`drop table ${internalStaff}`); await db.run(sql`drop table ${customUser}`); await db.run(sql`drop table ${ticket}`); }); test('join view as subquery', async (ctx) => { const { db } = ctx.sqlite; const users = sqliteTable('users_join_view', { id: integer('id').primaryKey(), name: text('name').notNull(), cityId: integer('city_id').notNull(), }); const newYorkers = sqliteView('new_yorkers').as((qb) => qb.select().from(users).where(eq(users.cityId, 1))); await db.run(sql`drop table if exists ${users}`); await db.run(sql`drop view if exists ${newYorkers}`); await db.run( sql`create table ${users} (id integer not null primary key, name text not null, city_id integer not null)`, ); await db.run(sql`create view if not exists ${newYorkers} as ${getViewConfig(newYorkers).query}`); db.insert(users).values([ { name: 'John', cityId: 1 }, { name: 'Jane', cityId: 2 }, { name: 'Jack', cityId: 1 }, { name: 'Jill', cityId: 2 }, ]).run(); const sq = db.select().from(newYorkers).as('new_yorkers_sq'); const result = await db.select().from(users).leftJoin(sq, eq(users.id, sq.id)).all(); expect(result).toEqual([ { users_join_view: { id: 1, name: 'John', cityId: 1 }, new_yorkers_sq: { id: 1, name: 'John', cityId: 1 }, }, { users_join_view: { id: 2, name: 'Jane', cityId: 2 }, new_yorkers_sq: null, }, { users_join_view: { id: 3, name: 'Jack', cityId: 1 }, new_yorkers_sq: { id: 3, name: 'Jack', cityId: 1 }, }, { users_join_view: { id: 4, name: 'Jill', cityId: 2 }, new_yorkers_sq: null, }, ]); await db.run(sql`drop view ${newYorkers}`); await db.run(sql`drop table ${users}`); }); test('insert with onConflict do nothing', async (ctx) => { const { db } = ctx.sqlite; await db.insert(usersTable).values({ id: 1, name: 'John' }).run(); await db .insert(usersTable) .values({ id: 1, name: 'John' }) .onConflictDoNothing() .run(); const res = await db .select({ id: usersTable.id, name: usersTable.name }) .from(usersTable) .where(eq(usersTable.id, 1)) .all(); expect(res).toEqual([{ id: 1, name: 'John' }]); }); test('insert with onConflict do nothing using composite pk', async (ctx) => { const { db } = ctx.sqlite; await db .insert(pkExampleTable) .values({ id: 1, name: 'John', email: 'john@example.com' }) .run(); await db .insert(pkExampleTable) .values({ id: 1, name: 'John', email: 'john1@example.com' }) .onConflictDoNothing() .run(); const res = await db .select({ id: pkExampleTable.id, name: pkExampleTable.name, email: pkExampleTable.email }) .from(pkExampleTable) .where(eq(pkExampleTable.id, 1)) .all(); expect(res).toEqual([{ id: 1, name: 'John', email: 'john@example.com' }]); }); test('insert with onConflict do nothing using target', async (ctx) => { const { db } = ctx.sqlite; await db.insert(usersTable).values({ id: 1, name: 'John' }).run(); await db .insert(usersTable) .values({ id: 1, name: 'John' }) .onConflictDoNothing({ target: usersTable.id }) .run(); const res = await db .select({ id: usersTable.id, name: usersTable.name }) .from(usersTable) .where(eq(usersTable.id, 1)) .all(); expect(res).toEqual([{ id: 1, name: 'John' }]); }); test('insert with onConflict do nothing using composite pk as target', async (ctx) => { const { db } = ctx.sqlite; await db .insert(pkExampleTable) .values({ id: 1, name: 'John', email: 'john@example.com' }) .run(); await db .insert(pkExampleTable) .values({ id: 1, name: 'John', email: 'john1@example.com' }) .onConflictDoNothing({ target: [pkExampleTable.id, pkExampleTable.name] }) .run(); const res = await db .select({ id: pkExampleTable.id, name: pkExampleTable.name, email: pkExampleTable.email }) .from(pkExampleTable) .where(eq(pkExampleTable.id, 1)) .all(); expect(res).toEqual([{ id: 1, name: 'John', email: 'john@example.com' }]); }); test('insert with onConflict do update', async (ctx) => { const { db } = ctx.sqlite; await db.insert(usersTable).values({ id: 1, name: 'John' }).run(); await db .insert(usersTable) .values({ id: 1, name: 'John' }) .onConflictDoUpdate({ target: usersTable.id, set: { name: 'John1' } }) .run(); const res = await db .select({ id: usersTable.id, name: usersTable.name }) .from(usersTable) .where(eq(usersTable.id, 1)) .all(); expect(res).toEqual([{ id: 1, name: 'John1' }]); }); test('insert with onConflict do update where', async (ctx) => { const { db } = ctx.sqlite; await db .insert(usersTable) .values([{ id: 1, name: 'John', verified: false }]) .run(); await db .insert(usersTable) .values({ id: 1, name: 'John1', verified: true }) .onConflictDoUpdate({ target: usersTable.id, set: { name: 'John1', verified: true }, where: eq(usersTable.verified, false), }) .run(); const res = await db .select({ id: usersTable.id, name: usersTable.name, verified: usersTable.verified }) .from(usersTable) .where(eq(usersTable.id, 1)) .all(); expect(res).toEqual([{ id: 1, name: 'John1', verified: true }]); }); test('insert with onConflict do update using composite pk', async (ctx) => { const { db } = ctx.sqlite; await db.insert(pkExampleTable).values({ id: 1, name: 'John', email: 'john@example.com' }).run(); await db .insert(pkExampleTable) .values({ id: 1, name: 'John', email: 'john@example.com' }) .onConflictDoUpdate({ target: [pkExampleTable.id, pkExampleTable.name], set: { email: 'john1@example.com' } }) .run(); const res = await db .select({ id: pkExampleTable.id, name: pkExampleTable.name, email: pkExampleTable.email }) .from(pkExampleTable) .where(eq(pkExampleTable.id, 1)) .all(); expect(res).toEqual([{ id: 1, name: 'John', email: 'john1@example.com' }]); }); test('insert with onConflict chained (.update -> .nothing)', async (ctx) => { const { db } = ctx.sqlite; await db.insert(conflictChainExampleTable).values([{ id: 1, name: 'John', email: 'john@example.com' }, { id: 2, name: 'John Second', email: '2john@example.com', }]).run(); await db .insert(conflictChainExampleTable) .values([{ id: 1, name: 'John', email: 'john@example.com' }, { id: 2, name: 'Anthony', email: 'idthief@example.com', }]) .onConflictDoUpdate({ target: [conflictChainExampleTable.id, conflictChainExampleTable.name], set: { email: 'john1@example.com' }, }) .onConflictDoNothing({ target: conflictChainExampleTable.id }) .run(); const res = await db .select({ id: conflictChainExampleTable.id, name: conflictChainExampleTable.name, email: conflictChainExampleTable.email, }) .from(conflictChainExampleTable) .orderBy(conflictChainExampleTable.id) .all(); expect(res).toEqual([{ id: 1, name: 'John', email: 'john1@example.com' }, { id: 2, name: 'John Second', email: '2john@example.com', }]); }); test('insert with onConflict chained (.nothing -> .update)', async (ctx) => { const { db } = ctx.sqlite; await db.insert(conflictChainExampleTable).values([{ id: 1, name: 'John', email: 'john@example.com' }, { id: 2, name: 'John Second', email: '2john@example.com', }]).run(); await db .insert(conflictChainExampleTable) .values([{ id: 1, name: 'John', email: 'john@example.com' }, { id: 2, name: 'Anthony', email: 'idthief@example.com', }]) .onConflictDoUpdate({ target: [conflictChainExampleTable.id, conflictChainExampleTable.name], set: { email: 'john1@example.com' }, }) .onConflictDoNothing({ target: conflictChainExampleTable.id }) .run(); const res = await db .select({ id: conflictChainExampleTable.id, name: conflictChainExampleTable.name, email: conflictChainExampleTable.email, }) .from(conflictChainExampleTable) .orderBy(conflictChainExampleTable.id) .all(); expect(res).toEqual([{ id: 1, name: 'John', email: 'john1@example.com' }, { id: 2, name: 'John Second', email: '2john@example.com', }]); }); test('insert with onConflict chained (.update -> .update)', async (ctx) => { const { db } = ctx.sqlite; await db.insert(conflictChainExampleTable).values([{ id: 1, name: 'John', email: 'john@example.com' }, { id: 2, name: 'John Second', email: '2john@example.com', }]).run(); await db .insert(conflictChainExampleTable) .values([{ id: 1, name: 'John', email: 'john@example.com' }, { id: 2, name: 'Anthony', email: 'idthief@example.com', }]) .onConflictDoUpdate({ target: [conflictChainExampleTable.id, conflictChainExampleTable.name], set: { email: 'john1@example.com' }, }) .onConflictDoUpdate({ target: conflictChainExampleTable.id, set: { email: 'john2@example.com' } }) .run(); const res = await db .select({ id: conflictChainExampleTable.id, name: conflictChainExampleTable.name, email: conflictChainExampleTable.email, }) .from(conflictChainExampleTable) .orderBy(conflictChainExampleTable.id) .all(); expect(res).toEqual([{ id: 1, name: 'John', email: 'john1@example.com' }, { id: 2, name: 'John Second', email: 'john2@example.com', }]); }); test('insert with onConflict chained (.nothing -> .nothing)', async (ctx) => { const { db } = ctx.sqlite; await db.insert(conflictChainExampleTable).values([{ id: 1, name: 'John', email: 'john@example.com' }, { id: 2, name: 'John Second', email: '2john@example.com', }]).run(); await db .insert(conflictChainExampleTable) .values([{ id: 1, name: 'John', email: 'john@example.com' }, { id: 2, name: 'Anthony', email: 'idthief@example.com', }]) .onConflictDoNothing({ target: [conflictChainExampleTable.id, conflictChainExampleTable.name], }) .onConflictDoNothing({ target: conflictChainExampleTable.id }) .run(); const res = await db .select({ id: conflictChainExampleTable.id, name: conflictChainExampleTable.name, email: conflictChainExampleTable.email, }) .from(conflictChainExampleTable) .orderBy(conflictChainExampleTable.id) .all(); expect(res).toEqual([{ id: 1, name: 'John', email: 'john@example.com' }, { id: 2, name: 'John Second', email: '2john@example.com', }]); }); test('insert undefined', async (ctx) => { const { db } = ctx.sqlite; const users = sqliteTable('users', { id: integer('id').primaryKey(), name: text('name'), }); await db.run(sql`drop table if exists ${users}`); await db.run( sql`create table ${users} (id integer primary key, name text)`, ); await expect((async () => { await db.insert(users).values({ name: undefined }).run(); })()).resolves.not.toThrowError(); await db.run(sql`drop table ${users}`); }); test('update undefined', async (ctx) => { const { db } = ctx.sqlite; const users = sqliteTable('users', { id: integer('id').primaryKey(), name: text('name'), }); await db.run(sql`drop table if exists ${users}`); await db.run( sql`create table ${users} (id integer primary key, name text)`, ); await expect((async () => { await db.update(users).set({ name: undefined }).run(); })()).rejects.toThrowError(); await expect((async () => { await db.update(users).set({ id: 1, name: undefined }).run(); })()).resolves.not.toThrowError(); await db.run(sql`drop table ${users}`); }); test('async api - CRUD', async (ctx) => { const { db } = ctx.sqlite; const users = sqliteTable('users', { id: integer('id').primaryKey(), name: text('name'), }); db.run(sql`drop table if exists ${users}`); db.run( sql`create table ${users} (id integer primary key, name text)`, ); await db.insert(users).values({ id: 1, name: 'John' }); const res = await db.select().from(users); expect(res).toEqual([{ id: 1, name: 'John' }]); await db.update(users).set({ name: 'John1' }).where(eq(users.id, 1)); const res1 = await db.select().from(users); expect(res1).toEqual([{ id: 1, name: 'John1' }]); await db.delete(users).where(eq(users.id, 1)); const res2 = await db.select().from(users); expect(res2).toEqual([]); await db.run(sql`drop table ${users}`); }); test('async api - insert + select w/ prepare + async execute', async (ctx) => { const { db } = ctx.sqlite; const users = sqliteTable('users', { id: integer('id').primaryKey(), name: text('name'), }); db.run(sql`drop table if exists ${users}`); db.run( sql`create table ${users} (id integer primary key, name text)`, ); const insertStmt = db.insert(users).values({ id: 1, name: 'John' }).prepare(); await insertStmt.execute(); const selectStmt = db.select().from(users).prepare(); const res = await selectStmt.execute(); expect(res).toEqual([{ id: 1, name: 'John' }]); const updateStmt = db.update(users).set({ name: 'John1' }).where(eq(users.id, 1)).prepare(); await updateStmt.execute(); const res1 = await selectStmt.execute(); expect(res1).toEqual([{ id: 1, name: 'John1' }]); const deleteStmt = db.delete(users).where(eq(users.id, 1)).prepare(); await deleteStmt.execute(); const res2 = await selectStmt.execute(); expect(res2).toEqual([]); await db.run(sql`drop table ${users}`); }); test('async api - insert + select w/ prepare + sync execute', async (ctx) => { const { db } = ctx.sqlite; const users = sqliteTable('users', { id: integer('id').primaryKey(), name: text('name'), }); db.run(sql`drop table if exists ${users}`); db.run( sql`create table ${users} (id integer primary key, name text)`, ); const insertStmt = db.insert(users).values({ id: 1, name: 'John' }).prepare(); await insertStmt.execute(); const selectStmt = db.select().from(users).prepare(); const res = await selectStmt.execute(); expect(res).toEqual([{ id: 1, name: 'John' }]); const updateStmt = db.update(users).set({ name: 'John1' }).where(eq(users.id, 1)).prepare(); await updateStmt.execute(); const res1 = await selectStmt.execute(); expect(res1).toEqual([{ id: 1, name: 'John1' }]); const deleteStmt = db.delete(users).where(eq(users.id, 1)).prepare(); await deleteStmt.execute(); const res2 = await selectStmt.execute(); expect(res2).toEqual([]); await db.run(sql`drop table ${users}`); }); test('select + .get() for empty result', async (ctx) => { const { db } = ctx.sqlite; const users = sqliteTable('users', { id: integer('id').primaryKey(), name: text('name'), }); db.run(sql`drop table if exists ${users}`); db.run( sql`create table ${users} (id integer primary key, name text)`, ); const res = await db.select().from(users).where(eq(users.id, 1)).get(); expect(res).toBeUndefined(); await db.run(sql`drop table ${users}`); }); test('set operations (union) from query builder with subquery', async (ctx) => { const { db } = ctx.sqlite; await setupSetOperationTest(db); const sq = db .select({ id: citiesTable.id, name: citiesTable.name }) .from(citiesTable).union( db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table), ).orderBy(asc(sql`name`)).as('sq'); const result = await db.select().from(sq).limit(5).offset(5); expect(result).toHaveLength(5); expect(result).toEqual([ { id: 2, name: 'London' }, { id: 7, name: 'Mary' }, { id: 1, name: 'New York' }, { id: 4, name: 'Peter' }, { id: 8, name: 'Sally' }, ]); await expect(async () => { db .select({ name: citiesTable.name, id: citiesTable.id }) .from(citiesTable).union( db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table), ).orderBy(asc(sql`name`)); }).rejects.toThrowError(); }); test('set operations (union) as function', async (ctx) => { const { db } = ctx.sqlite; await setupSetOperationTest(db); const result = await union( db .select({ id: citiesTable.id, name: citiesTable.name }) .from(citiesTable).where(eq(citiesTable.id, 1)), db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id, 1)), db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id, 1)), ).orderBy(asc(sql`name`)); expect(result).toHaveLength(2); expect(result).toEqual([ { id: 1, name: 'John' }, { id: 1, name: 'New York' }, ]); await expect(async () => { union( db .select({ id: citiesTable.id, name: citiesTable.name }) .from(citiesTable).where(eq(citiesTable.id, 1)), db .select({ name: users2Table.name, id: users2Table.id }) .from(users2Table).where(eq(users2Table.id, 1)), db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id, 1)), ).orderBy(asc(sql`name`)); }).rejects.toThrowError(); }); test('set operations (union all) from query builder', async (ctx) => { const { db } = ctx.sqlite; await setupSetOperationTest(db); const result = await db .select({ id: citiesTable.id, name: citiesTable.name }) .from(citiesTable).unionAll( db .select({ id: citiesTable.id, name: citiesTable.name }) .from(citiesTable), ).orderBy(asc(citiesTable.id)).limit(5).offset(1); expect(result).toHaveLength(5); expect(result).toEqual([ { id: 1, name: 'New York' }, { id: 2, name: 'London' }, { id: 2, name: 'London' }, { id: 3, name: 'Tampa' }, { id: 3, name: 'Tampa' }, ]); await expect(async () => { db .select({ id: citiesTable.id, name: citiesTable.name }) .from(citiesTable).unionAll( db .select({ name: citiesTable.name, id: citiesTable.id }) .from(citiesTable), ).orderBy(asc(citiesTable.id)).limit(5).offset(1); }).rejects.toThrowError(); }); test('set operations (union all) as function', async (ctx) => { const { db } = ctx.sqlite; await setupSetOperationTest(db); const result = await unionAll( db .select({ id: citiesTable.id, name: citiesTable.name }) .from(citiesTable).where(eq(citiesTable.id, 1)), db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id, 1)), db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id, 1)), ); expect(result).toHaveLength(3); expect(result).toEqual([ { id: 1, name: 'New York' }, { id: 1, name: 'John' }, { id: 1, name: 'John' }, ]); await expect(async () => { unionAll( db .select({ id: citiesTable.id, name: citiesTable.name }) .from(citiesTable).where(eq(citiesTable.id, 1)), db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id, 1)), db .select({ name: users2Table.name, id: users2Table.id }) .from(users2Table).where(eq(users2Table.id, 1)), ); }).rejects.toThrowError(); }); test('set operations (intersect) from query builder', async (ctx) => { const { db } = ctx.sqlite; await setupSetOperationTest(db); const result = await db .select({ id: citiesTable.id, name: citiesTable.name }) .from(citiesTable).intersect( db .select({ id: citiesTable.id, name: citiesTable.name }) .from(citiesTable).where(gt(citiesTable.id, 1)), ).orderBy(asc(sql`name`)); expect(result).toHaveLength(2); expect(result).toEqual([ { id: 2, name: 'London' }, { id: 3, name: 'Tampa' }, ]); await expect(async () => { db .select({ name: citiesTable.name, id: citiesTable.id }) .from(citiesTable).intersect( db .select({ id: citiesTable.id, name: citiesTable.name }) .from(citiesTable).where(gt(citiesTable.id, 1)), ).orderBy(asc(sql`name`)); }).rejects.toThrowError(); }); test('set operations (intersect) as function', async (ctx) => { const { db } = ctx.sqlite; await setupSetOperationTest(db); const result = await intersect( db .select({ id: citiesTable.id, name: citiesTable.name }) .from(citiesTable).where(eq(citiesTable.id, 1)), db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id, 1)), db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id, 1)), ); expect(result).toHaveLength(0); expect(result).toEqual([]); await expect(async () => { intersect( db .select({ id: citiesTable.id, name: citiesTable.name }) .from(citiesTable).where(eq(citiesTable.id, 1)), db .select({ name: users2Table.name, id: users2Table.id }) .from(users2Table).where(eq(users2Table.id, 1)), db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id, 1)), ); }).rejects.toThrowError(); }); test('set operations (except) from query builder', async (ctx) => { const { db } = ctx.sqlite; await setupSetOperationTest(db); const result = await db .select() .from(citiesTable).except( db .select() .from(citiesTable).where(gt(citiesTable.id, 1)), ); expect(result).toHaveLength(1); expect(result).toEqual([ { id: 1, name: 'New York' }, ]); await expect(async () => { db .select() .from(citiesTable).except( db .select({ name: users2Table.name, id: users2Table.id }) .from(citiesTable).where(gt(citiesTable.id, 1)), ); }).rejects.toThrowError(); }); test('set operations (except) as function', async (ctx) => { const { db } = ctx.sqlite; await setupSetOperationTest(db); const result = await except( db .select({ id: citiesTable.id, name: citiesTable.name }) .from(citiesTable), db .select({ id: citiesTable.id, name: citiesTable.name }) .from(citiesTable).where(eq(citiesTable.id, 1)), db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id, 1)), ).orderBy(asc(sql`id`)); expect(result).toHaveLength(2); expect(result).toEqual([ { id: 2, name: 'London' }, { id: 3, name: 'Tampa' }, ]); await expect(async () => { except( db .select({ name: citiesTable.name, id: citiesTable.id }) .from(citiesTable), db .select({ id: citiesTable.id, name: citiesTable.name }) .from(citiesTable).where(eq(citiesTable.id, 1)), db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id, 1)), ).orderBy(asc(sql`id`)); }).rejects.toThrowError(); }); test('set operations (mixed) from query builder', async (ctx) => { const { db } = ctx.sqlite; await setupSetOperationTest(db); const result = await db .select() .from(citiesTable).except( ({ unionAll }) => unionAll( db .select() .from(citiesTable).where(gt(citiesTable.id, 1)), db.select().from(citiesTable).where(eq(citiesTable.id, 2)), ), ); expect(result).toHaveLength(2); expect(result).toEqual([ { id: 1, name: 'New York' }, { id: 2, name: 'London' }, ]); await expect(async () => { db .select() .from(citiesTable).except( ({ unionAll }) => unionAll( db .select() .from(citiesTable).where(gt(citiesTable.id, 1)), db.select({ name: citiesTable.name, id: citiesTable.id }) .from(citiesTable).where(eq(citiesTable.id, 2)), ), ); }).rejects.toThrowError(); }); test('set operations (mixed all) as function with subquery', async (ctx) => { const { db } = ctx.sqlite; await setupSetOperationTest(db); const sq = union( db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id, 1)), except( db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(gte(users2Table.id, 5)), db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id, 7)), ), db .select().from(citiesTable).where(gt(citiesTable.id, 1)), ).orderBy(asc(sql`id`)).as('sq'); const result = await db.select().from(sq).limit(4).offset(1); expect(result).toHaveLength(4); expect(result).toEqual([ { id: 2, name: 'London' }, { id: 3, name: 'Tampa' }, { id: 5, name: 'Ben' }, { id: 6, name: 'Jill' }, ]); await expect(async () => { union( db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id, 1)), except( db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(gte(users2Table.id, 5)), db .select({ id: users2Table.id, name: users2Table.name }) .from(users2Table).where(eq(users2Table.id, 7)), ), db .select({ name: users2Table.name, id: users2Table.id }) .from(citiesTable).where(gt(citiesTable.id, 1)), ).orderBy(asc(sql`id`)); }).rejects.toThrowError(); }); test('define constraints as array', async (_ctx) => { const table = sqliteTable('name', { id: int(), }, (t) => [ index('name').on(t.id), primaryKey({ columns: [t.id], name: 'custom' }), ]); const { indexes, primaryKeys } = getTableConfig(table); expect(indexes.length).toBe(1); expect(primaryKeys.length).toBe(1); }); test('define constraints as array inside third param', async (_ctx) => { const table = sqliteTable('name', { id: int(), }, (t) => [ index('name').on(t.id), primaryKey({ columns: [t.id], name: 'custom' }), ]); const { indexes, primaryKeys } = getTableConfig(table); expect(indexes.length).toBe(1); expect(primaryKeys.length).toBe(1); }); test('aggregate function: count', async (ctx) => { const { db } = ctx.sqlite; const table = aggregateTable; await setupAggregateFunctionsTest(db); const result1 = await db.select({ value: count() }).from(table); const result2 = await db.select({ value: count(table.a) }).from(table); const result3 = await db.select({ value: countDistinct(table.name) }).from(table); expect(result1[0]?.value).toBe(7); expect(result2[0]?.value).toBe(5); expect(result3[0]?.value).toBe(6); }); test('aggregate function: avg', async (ctx) => { const { db } = ctx.sqlite; const table = aggregateTable; await setupAggregateFunctionsTest(db); const result1 = await db.select({ value: avg(table.a) }).from(table); const result2 = await db.select({ value: avg(table.nullOnly) }).from(table); const result3 = await db.select({ value: avgDistinct(table.b) }).from(table); expect(result1[0]?.value).toBe('24'); expect(result2[0]?.value).toBeNull(); expect(result3[0]?.value).toBe('42.5'); }); test('aggregate function: sum', async (ctx) => { const { db } = ctx.sqlite; const table = aggregateTable; await setupAggregateFunctionsTest(db); const result1 = await db.select({ value: sum(table.b) }).from(table); const result2 = await db.select({ value: sum(table.nullOnly) }).from(table); const result3 = await db.select({ value: sumDistinct(table.b) }).from(table); expect(result1[0]?.value).toBe('200'); expect(result2[0]?.value).toBeNull(); expect(result3[0]?.value).toBe('170'); }); test('aggregate function: max', async (ctx) => { const { db } = ctx.sqlite; const table = aggregateTable; await setupAggregateFunctionsTest(db); const result1 = await db.select({ value: max(table.b) }).from(table); const result2 = await db.select({ value: max(table.nullOnly) }).from(table); expect(result1[0]?.value).toBe(90); expect(result2[0]?.value).toBeNull(); }); test('aggregate function: min', async (ctx) => { const { db } = ctx.sqlite; const table = aggregateTable; await setupAggregateFunctionsTest(db); const result1 = await db.select({ value: min(table.b) }).from(table); const result2 = await db.select({ value: min(table.nullOnly) }).from(table); expect(result1[0]?.value).toBe(10); expect(result2[0]?.value).toBeNull(); }); test('test $onUpdateFn and $onUpdate works as $default', async (ctx) => { const { db } = ctx.sqlite; await db.run(sql`drop table if exists ${usersOnUpdate}`); await db.run( sql` create table ${usersOnUpdate} ( id integer primary key autoincrement, name text not null, update_counter integer default 1 not null, updated_at integer, always_null text ) `, ); await db.insert(usersOnUpdate).values([ { name: 'John' }, { name: 'Jane' }, { name: 'Jack' }, { name: 'Jill' }, ]); const { updatedAt, ...rest } = getTableColumns(usersOnUpdate); const justDates = await db.select({ updatedAt }).from(usersOnUpdate).orderBy(asc(usersOnUpdate.id)); const response = await db.select({ ...rest }).from(usersOnUpdate).orderBy(asc(usersOnUpdate.id)); expect(response).toEqual([ { name: 'John', id: 1, updateCounter: 1, alwaysNull: null }, { name: 'Jane', id: 2, updateCounter: 1, alwaysNull: null }, { name: 'Jack', id: 3, updateCounter: 1, alwaysNull: null }, { name: 'Jill', id: 4, updateCounter: 1, alwaysNull: null }, ]); const msDelay = 250; for (const eachUser of justDates) { expect(eachUser.updatedAt!.valueOf()).toBeGreaterThan(Date.now() - msDelay); } }); test('test $onUpdateFn and $onUpdate works updating', async (ctx) => { const { db } = ctx.sqlite; await db.run(sql`drop table if exists ${usersOnUpdate}`); await db.run( sql` create table ${usersOnUpdate} ( id integer primary key autoincrement, name text not null, update_counter integer default 1, updated_at integer, always_null text ) `, ); await db.insert(usersOnUpdate).values([ { name: 'John', alwaysNull: 'this will be null after updating' }, { name: 'Jane' }, { name: 'Jack' }, { name: 'Jill' }, ]); const { updatedAt, ...rest } = getTableColumns(usersOnUpdate); await db.update(usersOnUpdate).set({ name: 'Angel' }).where(eq(usersOnUpdate.id, 1)); await db.update(usersOnUpdate).set({ updateCounter: null }).where(eq(usersOnUpdate.id, 2)); const justDates = await db.select({ updatedAt }).from(usersOnUpdate).orderBy(asc(usersOnUpdate.id)); const response = await db.select({ ...rest }).from(usersOnUpdate).orderBy(asc(usersOnUpdate.id)); expect(response).toEqual([ { name: 'Angel', id: 1, updateCounter: 2, alwaysNull: null }, { name: 'Jane', id: 2, updateCounter: null, alwaysNull: null }, { name: 'Jack', id: 3, updateCounter: 1, alwaysNull: null }, { name: 'Jill', id: 4, updateCounter: 1, alwaysNull: null }, ]); const msDelay = 250; for (const eachUser of justDates) { expect(eachUser.updatedAt!.valueOf()).toBeGreaterThan(Date.now() - msDelay); } }); test('test $onUpdateFn and $onUpdate works with sql value', async (ctx) => { const { db } = ctx.sqlite; const users = sqliteTable('users', { id: integer('id').primaryKey({ autoIncrement: true }), name: text('name').notNull(), updatedAt: integer('updated_at') .notNull() .$onUpdate(() => sql`(strftime('%s', 'now') * 1000) + (strftime('%f', 'now') - strftime('%S', 'now')) * 1000` ), }); await db.run(sql`drop table if exists ${users}`); await db.run( sql` create table ${users} ( \`id\` integer primary key autoincrement, \`name\` text not null, \`updated_at\` integer not null ) `, ); const insertResp = await db.insert(users).values({ name: 'John', }).returning({ updatedAt: users.updatedAt, }); await new Promise((resolve) => setTimeout(resolve, 1000)); const now = Date.now(); await new Promise((resolve) => setTimeout(resolve, 1000)); const updateResp = await db.update(users).set({ name: 'John', }).returning({ updatedAt: users.updatedAt, }); expect(insertResp[0]?.updatedAt ?? 0).lessThan(now); expect(updateResp[0]?.updatedAt ?? 0).greaterThan(now); }); test('$count separate', async (ctx) => { const { db } = ctx.sqlite; const countTestTable = sqliteTable('count_test', { id: int('id').notNull(), name: text('name').notNull(), }); await db.run(sql`drop table if exists ${countTestTable}`); await db.run(sql`create table ${countTestTable} (id int, name text)`); await db.insert(countTestTable).values([ { id: 1, name: 'First' }, { id: 2, name: 'Second' }, { id: 3, name: 'Third' }, { id: 4, name: 'Fourth' }, ]); const count = await db.$count(countTestTable); await db.run(sql`drop table ${countTestTable}`); expect(count).toStrictEqual(4); }); test('$count embedded', async (ctx) => { const { db } = ctx.sqlite; const countTestTable = sqliteTable('count_test', { id: int('id').notNull(), name: text('name').notNull(), }); await db.run(sql`drop table if exists ${countTestTable}`); await db.run(sql`create table ${countTestTable} (id int, name text)`); await db.insert(countTestTable).values([ { id: 1, name: 'First' }, { id: 2, name: 'Second' }, { id: 3, name: 'Third' }, { id: 4, name: 'Fourth' }, ]); const count = await db.select({ count: db.$count(countTestTable), }).from(countTestTable); await db.run(sql`drop table ${countTestTable}`); expect(count).toStrictEqual([ { count: 4 }, { count: 4 }, { count: 4 }, { count: 4 }, ]); }); test('$count separate reuse', async (ctx) => { const { db } = ctx.sqlite; const countTestTable = sqliteTable('count_test', { id: int('id').notNull(), name: text('name').notNull(), }); await db.run(sql`drop table if exists ${countTestTable}`); await db.run(sql`create table ${countTestTable} (id int, name text)`); await db.insert(countTestTable).values([ { id: 1, name: 'First' }, { id: 2, name: 'Second' }, { id: 3, name: 'Third' }, { id: 4, name: 'Fourth' }, ]); const count = db.$count(countTestTable); const count1 = await count; await db.insert(countTestTable).values({ id: 5, name: 'fifth' }); const count2 = await count; await db.insert(countTestTable).values({ id: 6, name: 'sixth' }); const count3 = await count; await db.run(sql`drop table ${countTestTable}`); expect(count1).toStrictEqual(4); expect(count2).toStrictEqual(5); expect(count3).toStrictEqual(6); }); test('$count embedded reuse', async (ctx) => { const { db } = ctx.sqlite; const countTestTable = sqliteTable('count_test', { id: int('id').notNull(), name: text('name').notNull(), }); await db.run(sql`drop table if exists ${countTestTable}`); await db.run(sql`create table ${countTestTable} (id int, name text)`); await db.insert(countTestTable).values([ { id: 1, name: 'First' }, { id: 2, name: 'Second' }, { id: 3, name: 'Third' }, { id: 4, name: 'Fourth' }, ]); const count = db.select({ count: db.$count(countTestTable), }).from(countTestTable); const count1 = await count; await db.insert(countTestTable).values({ id: 5, name: 'fifth' }); const count2 = await count; await db.insert(countTestTable).values({ id: 6, name: 'sixth' }); const count3 = await count; await db.run(sql`drop table ${countTestTable}`); expect(count1).toStrictEqual([ { count: 4 }, { count: 4 }, { count: 4 }, { count: 4 }, ]); expect(count2).toStrictEqual([ { count: 5 }, { count: 5 }, { count: 5 }, { count: 5 }, { count: 5 }, ]); expect(count3).toStrictEqual([ { count: 6 }, { count: 6 }, { count: 6 }, { count: 6 }, { count: 6 }, { count: 6 }, ]); }); test('$count separate with filters', async (ctx) => { const { db } = ctx.sqlite; const countTestTable = sqliteTable('count_test', { id: int('id').notNull(), name: text('name').notNull(), }); await db.run(sql`drop table if exists ${countTestTable}`); await db.run(sql`create table ${countTestTable} (id int, name text)`); await db.insert(countTestTable).values([ { id: 1, name: 'First' }, { id: 2, name: 'Second' }, { id: 3, name: 'Third' }, { id: 4, name: 'Fourth' }, ]); const count = await db.$count(countTestTable, gt(countTestTable.id, 1)); await db.run(sql`drop table ${countTestTable}`); expect(count).toStrictEqual(3); }); test('$count embedded with filters', async (ctx) => { const { db } = ctx.sqlite; const countTestTable = sqliteTable('count_test', { id: int('id').notNull(), name: text('name').notNull(), }); await db.run(sql`drop table if exists ${countTestTable}`); await db.run(sql`create table ${countTestTable} (id int, name text)`); await db.insert(countTestTable).values([ { id: 1, name: 'First' }, { id: 2, name: 'Second' }, { id: 3, name: 'Third' }, { id: 4, name: 'Fourth' }, ]); const count = await db.select({ count: db.$count(countTestTable, gt(countTestTable.id, 1)), }).from(countTestTable); await db.run(sql`drop table ${countTestTable}`); expect(count).toStrictEqual([ { count: 3 }, { count: 3 }, { count: 3 }, { count: 3 }, ]); }); test('update with limit and order by', async (ctx) => { const { db } = ctx.sqlite; await db.insert(usersTable).values([ { name: 'Barry', verified: false }, { name: 'Alan', verified: false }, { name: 'Carl', verified: false }, ]); await db.update(usersTable).set({ verified: true }).limit(2).orderBy(asc(usersTable.name)); const result = await db.select({ name: usersTable.name, verified: usersTable.verified }).from(usersTable).orderBy( asc(usersTable.name), ); expect(result).toStrictEqual([ { name: 'Alan', verified: true }, { name: 'Barry', verified: true }, { name: 'Carl', verified: false }, ]); }); test('delete with limit and order by', async (ctx) => { const { db } = ctx.sqlite; await db.insert(usersTable).values([ { name: 'Barry', verified: false }, { name: 'Alan', verified: false }, { name: 'Carl', verified: false }, ]); await db.delete(usersTable).where(eq(usersTable.verified, false)).limit(1).orderBy(asc(usersTable.name)); const result = await db.select({ name: usersTable.name, verified: usersTable.verified }).from(usersTable).orderBy( asc(usersTable.name), ); expect(result).toStrictEqual([ { name: 'Barry', verified: false }, { name: 'Carl', verified: false }, ]); }); test('cross join', async (ctx) => { const { db } = ctx.sqlite; await db .insert(usersTable) .values([ { name: 'John' }, { name: 'Jane' }, ]); await db .insert(citiesTable) .values([ { name: 'Seattle' }, { name: 'New York City' }, ]); const result = await db .select({ user: usersTable.name, city: citiesTable.name, }) .from(usersTable) .crossJoin(citiesTable) .orderBy(usersTable.name, citiesTable.name); expect(result).toStrictEqual([ { city: 'New York City', user: 'Jane' }, { city: 'Seattle', user: 'Jane' }, { city: 'New York City', user: 'John' }, { city: 'Seattle', user: 'John' }, ]); }); test('all types', async (ctx) => { const { db } = ctx.sqlite; await db.run(sql` CREATE TABLE \`all_types\`( \`int\` integer, \`bool\` integer, \`time\` integer, \`time_ms\` integer, \`bigint\` blob, \`buffer\` blob, \`json\` blob, \`numeric\` numeric, \`numeric_num\` numeric, \`numeric_big\` numeric, \`real\` real, \`text\` text, \`json_text\` text ); `); await db.insert(allTypesTable).values({ int: 1, bool: true, bigint: 5044565289845416380n, buffer: Buffer.from([ 0x44, 0x65, 0x73, 0x70, 0x61, 0x69, 0x72, 0x20, 0x6F, 0x20, 0x64, 0x65, 0x73, 0x70, 0x61, 0x69, 0x72, 0x2E, 0x2E, 0x2E, ]), json: { str: 'strval', arr: ['str', 10], }, jsonText: { str: 'strvalb', arr: ['strb', 11], }, numeric: '475452353476', numericNum: 9007199254740991, numericBig: 5044565289845416380n, real: 1.048596, text: 'TEXT STRING', time: new Date(1741743161623), timeMs: new Date(1741743161623), }); const rawRes = await db.select().from(allTypesTable); expect(typeof rawRes[0]?.numericBig).toStrictEqual('bigint'); type ExpectedType = { int: number | null; bool: boolean | null; time: Date | null; timeMs: Date | null; bigint: bigint | null; buffer: Buffer | null; json: unknown; numeric: string | null; numericNum: number | null; numericBig: bigint | null; real: number | null; text: string | null; jsonText: unknown; }[]; const expectedRes: ExpectedType = [ { int: 1, bool: true, time: new Date('2025-03-12T01:32:41.000Z'), timeMs: new Date('2025-03-12T01:32:41.623Z'), bigint: 5044565289845416380n, buffer: Buffer.from([ 0x44, 0x65, 0x73, 0x70, 0x61, 0x69, 0x72, 0x20, 0x6F, 0x20, 0x64, 0x65, 0x73, 0x70, 0x61, 0x69, 0x72, 0x2E, 0x2E, 0x2E, ]), json: { str: 'strval', arr: ['str', 10] }, numeric: '475452353476', numericNum: 9007199254740991, numericBig: 5044565289845416380n, real: 1.048596, text: 'TEXT STRING', jsonText: { str: 'strvalb', arr: ['strb', 11] }, }, ]; expectTypeOf(rawRes).toEqualTypeOf(); expect(rawRes).toStrictEqual(expectedRes); }); }); test('table configs: unique third param', () => { const cities1Table = sqliteTable('cities1', { id: int('id').primaryKey(), name: text('name').notNull(), state: text('state'), }, (t) => ({ f: unique().on(t.name, t.state), f1: unique('custom').on(t.name, t.state), })); const tableConfig = getTableConfig(cities1Table); expect(tableConfig.uniqueConstraints).toHaveLength(2); expect(tableConfig.uniqueConstraints[0]?.columns.map((t) => t.name)).toEqual(['name', 'state']); expect( tableConfig.uniqueConstraints[0]?.name, ).toEqual( uniqueKeyName(cities1Table, tableConfig.uniqueConstraints[0]?.columns?.map((column) => column.name) ?? []), ); expect(tableConfig.uniqueConstraints[1]?.columns.map((t) => t.name)).toEqual(['name', 'state']); expect(tableConfig.uniqueConstraints[1]?.name).toBe('custom'); }); test('table configs: unique in column', () => { const cities1Table = sqliteTable('cities1', { id: int('id').primaryKey(), name: text('name').notNull().unique(), state: text('state').unique('custom'), field: text('field').unique(), }); const tableConfig = getTableConfig(cities1Table); const columnName = tableConfig.columns.find((it) => it.name === 'name'); expect(columnName?.isUnique).toBeTruthy(); expect(columnName?.uniqueName).toBe(uniqueKeyName(cities1Table, [columnName!.name])); const columnState = tableConfig.columns.find((it) => it.name === 'state'); expect(columnState?.isUnique).toBeTruthy(); expect(columnState?.uniqueName).toBe('custom'); const columnField = tableConfig.columns.find((it) => it.name === 'field'); expect(columnField?.isUnique).toBeTruthy(); expect(columnField?.uniqueName).toBe(uniqueKeyName(cities1Table, [columnField!.name])); }); test('limit 0', async (ctx) => { const { db } = ctx.sqlite; await db.insert(usersTable).values({ name: 'John' }); const users = await db .select() .from(usersTable) .limit(0); expect(users).toEqual([]); }); test('limit -1', async (ctx) => { const { db } = ctx.sqlite; await db.insert(usersTable).values({ name: 'John' }); const users = await db .select() .from(usersTable) .limit(-1); expect(users.length).toBeGreaterThan(0); }); test('update ... from', async (ctx) => { const { db } = ctx.sqlite; await db.run(sql`drop table if exists \`cities\``); await db.run(sql`drop table if exists \`users2\``); await db.run(sql` create table \`cities\` ( \`id\` integer primary key autoincrement, \`name\` text not null ) `); await db.run(sql` create table \`users2\` ( \`id\` integer primary key autoincrement, \`name\` text not null, \`city_id\` integer references \`cities\`(\`id\`) ) `); await db.insert(citiesTable).values([ { name: 'New York City' }, { name: 'Seattle' }, ]); await db.insert(users2Table).values([ { name: 'John', cityId: 1 }, { name: 'Jane', cityId: 2 }, ]); const result = await db .update(users2Table) .set({ cityId: citiesTable.id, }) .from(citiesTable) .where(and(eq(citiesTable.name, 'Seattle'), eq(users2Table.name, 'John'))) .returning(); expect(result).toStrictEqual([{ id: 1, name: 'John', cityId: 2, }]); }); test('update ... from with alias', async (ctx) => { const { db } = ctx.sqlite; await db.run(sql`drop table if exists \`users2\``); await db.run(sql`drop table if exists \`cities\``); await db.run(sql` create table \`cities\` ( \`id\` integer primary key autoincrement, \`name\` text not null ) `); await db.run(sql` create table \`users2\` ( \`id\` integer primary key autoincrement, \`name\` text not null, \`city_id\` integer references \`cities\`(\`id\`) ) `); await db.insert(citiesTable).values([ { name: 'New York City' }, { name: 'Seattle' }, ]); await db.insert(users2Table).values([ { name: 'John', cityId: 1 }, { name: 'Jane', cityId: 2 }, ]); const cities = alias(citiesTable, 'c'); const result = await db .update(users2Table) .set({ cityId: cities.id, }) .from(cities) .where(and(eq(cities.name, 'Seattle'), eq(users2Table.name, 'John'))) .returning(); expect(result).toStrictEqual([{ id: 1, name: 'John', cityId: 2, }]); await db.run(sql`drop table if exists \`users2\``); }); test('update ... from with join', async (ctx) => { const { db } = ctx.sqlite; const states = sqliteTable('states', { id: integer('id').primaryKey({ autoIncrement: true }), name: text('name').notNull(), }); const cities = sqliteTable('cities', { id: integer('id').primaryKey({ autoIncrement: true }), name: text('name').notNull(), stateId: integer('state_id').references(() => states.id), }); const users = sqliteTable('users', { id: integer('id').primaryKey({ autoIncrement: true }), name: text('name').notNull(), cityId: integer('city_id').notNull().references(() => cities.id), }); await db.run(sql`drop table if exists \`states\``); await db.run(sql`drop table if exists \`cities\``); await db.run(sql`drop table if exists \`users\``); await db.run(sql` create table \`states\` ( \`id\` integer primary key autoincrement, \`name\` text not null ) `); await db.run(sql` create table \`cities\` ( \`id\` integer primary key autoincrement, \`name\` text not null, \`state_id\` integer references \`states\`(\`id\`) ) `); await db.run(sql` create table \`users\` ( \`id\` integer primary key autoincrement, \`name\` text not null, \`city_id\` integer not null references \`cities\`(\`id\`) ) `); await db.insert(states).values([ { name: 'New York' }, { name: 'Washington' }, ]); await db.insert(cities).values([ { name: 'New York City', stateId: 1 }, { name: 'Seattle', stateId: 2 }, { name: 'London' }, ]); await db.insert(users).values([ { name: 'John', cityId: 1 }, { name: 'Jane', cityId: 2 }, { name: 'Jack', cityId: 3 }, ]); const result1 = await db .update(users) .set({ cityId: cities.id, }) .from(cities) .leftJoin(states, eq(cities.stateId, states.id)) .where(and(eq(cities.name, 'Seattle'), eq(users.name, 'John'))) .returning(); const result2 = await db .update(users) .set({ cityId: cities.id, }) .from(cities) .leftJoin(states, eq(cities.stateId, states.id)) .where(and(eq(cities.name, 'London'), eq(users.name, 'Jack'))) .returning(); expect(result1).toStrictEqual([{ id: 1, name: 'John', cityId: 2, }]); expect(result2).toStrictEqual([{ id: 3, name: 'Jack', cityId: 3, }]); }); test('insert into ... select', async (ctx) => { const { db } = ctx.sqlite; const notifications = sqliteTable('notifications_insert_into', { id: integer('id').primaryKey({ autoIncrement: true }), sentAt: integer('sent_at', { mode: 'timestamp' }).notNull().default(sql`current_timestamp`), message: text('message').notNull(), }); const users = sqliteTable('users_insert_into', { id: integer('id').primaryKey({ autoIncrement: true }), name: text('name').notNull(), }); const userNotications = sqliteTable('user_notifications_insert_into', { userId: integer('user_id').notNull().references(() => users.id, { onDelete: 'cascade' }), notificationId: integer('notification_id').notNull().references(() => notifications.id, { onDelete: 'cascade' }), }, (t) => ({ pk: primaryKey({ columns: [t.userId, t.notificationId] }), })); await db.run(sql`drop table if exists notifications_insert_into`); await db.run(sql`drop table if exists users_insert_into`); await db.run(sql`drop table if exists user_notifications_insert_into`); await db.run(sql` create table notifications_insert_into ( id integer primary key autoincrement, sent_at integer not null default (current_timestamp), message text not null ) `); await db.run(sql` create table users_insert_into ( id integer primary key autoincrement, name text not null ) `); await db.run(sql` create table user_notifications_insert_into ( user_id integer references users_insert_into(id) on delete cascade, notification_id integer references notifications_insert_into(id) on delete cascade, primary key (user_id, notification_id) ) `); const newNotification = await db .insert(notifications) .values({ message: 'You are one of the 3 lucky winners!' }) .returning({ id: notifications.id }) .then((result) => result[0]); await db.insert(users).values([ { name: 'Alice' }, { name: 'Bob' }, { name: 'Charlie' }, { name: 'David' }, { name: 'Eve' }, ]); const sentNotifications = await db .insert(userNotications) .select( db .select({ userId: users.id, notificationId: sql`${newNotification!.id}`.as('notification_id'), }) .from(users) .where(inArray(users.name, ['Alice', 'Charlie', 'Eve'])) .orderBy(asc(users.id)), ) .returning(); expect(sentNotifications).toStrictEqual([ { userId: 1, notificationId: newNotification!.id }, { userId: 3, notificationId: newNotification!.id }, { userId: 5, notificationId: newNotification!.id }, ]); }); test('insert into ... select with keys in different order', async (ctx) => { const { db } = ctx.sqlite; const users1 = sqliteTable('users1', { id: integer('id').primaryKey({ autoIncrement: true }), name: text('name').notNull(), }); const users2 = sqliteTable('users2', { id: integer('id').primaryKey({ autoIncrement: true }), name: text('name').notNull(), }); await db.run(sql`drop table if exists users1`); await db.run(sql`drop table if exists users2`); await db.run(sql` create table users1 ( id integer primary key autoincrement, name text not null ) `); await db.run(sql` create table users2 ( id integer primary key autoincrement, name text not null ) `); await expect(async () => { db .insert(users1) .select( db .select({ name: users2.name, id: users2.id, }) .from(users2), ); }).rejects.toThrowError(); }); test('Object keys as column names', async (ctx) => { const { db } = ctx.sqlite; // Tests the following: // Column with optional config without providing a value // Column with optional config providing a value // Column without config const users = sqliteTable('users', { id: integer().primaryKey({ autoIncrement: true }), createdAt: integer({ mode: 'timestamp' }), name: text(), }); await db.run(sql`drop table if exists users`); await db.run( sql` create table users ( \`id\` integer primary key autoincrement, \`createdAt\` integer, \`name\` text ) `, ); await db.insert(users).values([ { createdAt: new Date(Date.now() - 2592000000), name: 'John' }, { createdAt: new Date(Date.now() - 86400000), name: 'Jane' }, ]); const result = await db .select({ id: users.id, name: users.name }) .from(users) .where(gt(users.createdAt, new Date(Date.now() - 2592000000))); expect(result).toEqual([ { id: 2, name: 'Jane' }, ]); await db.run(sql`drop table users`); }); test('sql operator as cte', async (ctx) => { const { db } = ctx.sqlite; const users = sqliteTable('users', { id: integer('id').primaryKey({ autoIncrement: true }), name: text('name').notNull(), }); await db.run(sql`drop table if exists ${users}`); await db.run(sql`create table ${users} (id integer not null primary key autoincrement, name text not null)`); await db.insert(users).values([ { name: 'John' }, { name: 'Jane' }, ]); const sq1 = db.$with('sq', { userId: users.id, data: { name: users.name, }, }).as(sql`select * from ${users} where ${users.name} = 'John'`); const result1 = await db.with(sq1).select().from(sq1); const sq2 = db.$with('sq', { userId: users.id, data: { name: users.name, }, }).as(() => sql`select * from ${users} where ${users.name} = 'Jane'`); const result2 = await db.with(sq2).select().from(sq1); expect(result1).toEqual([{ userId: 1, data: { name: 'John' } }]); expect(result2).toEqual([{ userId: 2, data: { name: 'Jane' } }]); }); } ================================================ FILE: integration-tests/tests/sqlite/sqlite-proxy-batch.test.ts ================================================ /* eslint-disable drizzle-internal/require-entity-kind */ import type BetterSqlite3 from 'better-sqlite3'; import Database from 'better-sqlite3'; import { eq, relations, sql } from 'drizzle-orm'; import type { AnySQLiteColumn } from 'drizzle-orm/sqlite-core'; import { integer, primaryKey, sqliteTable, text } from 'drizzle-orm/sqlite-core'; import type { SqliteRemoteDatabase, SqliteRemoteResult } from 'drizzle-orm/sqlite-proxy'; import { drizzle as proxyDrizzle } from 'drizzle-orm/sqlite-proxy'; import { afterAll, beforeAll, beforeEach, expect, expectTypeOf, test } from 'vitest'; export const usersTable = sqliteTable('users', { id: integer('id').primaryKey({ autoIncrement: true }), name: text('name').notNull(), verified: integer('verified').notNull().default(0), invitedBy: integer('invited_by').references((): AnySQLiteColumn => usersTable.id), }); export const usersConfig = relations(usersTable, ({ one, many }) => ({ invitee: one(usersTable, { fields: [usersTable.invitedBy], references: [usersTable.id], }), usersToGroups: many(usersToGroupsTable), posts: many(postsTable), })); export const groupsTable = sqliteTable('groups', { id: integer('id').primaryKey({ autoIncrement: true }), name: text('name').notNull(), description: text('description'), }); export const groupsConfig = relations(groupsTable, ({ many }) => ({ usersToGroups: many(usersToGroupsTable), })); export const usersToGroupsTable = sqliteTable( 'users_to_groups', { id: integer('id').primaryKey({ autoIncrement: true }), userId: integer('user_id', { mode: 'number' }).notNull().references( () => usersTable.id, ), groupId: integer('group_id', { mode: 'number' }).notNull().references( () => groupsTable.id, ), }, (t) => ({ pk: primaryKey({ columns: [t.userId, t.groupId] }), }), ); export const usersToGroupsConfig = relations(usersToGroupsTable, ({ one }) => ({ group: one(groupsTable, { fields: [usersToGroupsTable.groupId], references: [groupsTable.id], }), user: one(usersTable, { fields: [usersToGroupsTable.userId], references: [usersTable.id], }), })); export const postsTable = sqliteTable('posts', { id: integer('id').primaryKey({ autoIncrement: true }), content: text('content').notNull(), ownerId: integer('owner_id', { mode: 'number' }).references( () => usersTable.id, ), createdAt: integer('created_at', { mode: 'timestamp_ms' }) .notNull().default(sql`current_timestamp`), }); export const postsConfig = relations(postsTable, ({ one, many }) => ({ author: one(usersTable, { fields: [postsTable.ownerId], references: [usersTable.id], }), comments: many(commentsTable), })); export const commentsTable = sqliteTable('comments', { id: integer('id').primaryKey({ autoIncrement: true }), content: text('content').notNull(), creator: integer('creator', { mode: 'number' }).references( () => usersTable.id, ), postId: integer('post_id', { mode: 'number' }).references(() => postsTable.id), createdAt: integer('created_at', { mode: 'timestamp_ms' }) .notNull().default(sql`current_timestamp`), }); export const commentsConfig = relations(commentsTable, ({ one, many }) => ({ post: one(postsTable, { fields: [commentsTable.postId], references: [postsTable.id], }), author: one(usersTable, { fields: [commentsTable.creator], references: [usersTable.id], }), likes: many(commentLikesTable), })); export const commentLikesTable = sqliteTable('comment_likes', { id: integer('id').primaryKey({ autoIncrement: true }), creator: integer('creator', { mode: 'number' }).references( () => usersTable.id, ), commentId: integer('comment_id', { mode: 'number' }).references( () => commentsTable.id, ), createdAt: integer('created_at', { mode: 'timestamp_ms' }) .notNull().default(sql`current_timestamp`), }); export const commentLikesConfig = relations(commentLikesTable, ({ one }) => ({ comment: one(commentsTable, { fields: [commentLikesTable.commentId], references: [commentsTable.id], }), author: one(usersTable, { fields: [commentLikesTable.creator], references: [usersTable.id], }), })); const schema = { usersTable, postsTable, commentsTable, usersToGroupsTable, groupsTable, commentLikesConfig, commentsConfig, postsConfig, usersToGroupsConfig, groupsConfig, usersConfig, }; class ServerSimulator { constructor(private db: BetterSqlite3.Database) {} async batch(queries: { sql: string; params: any[]; method: string }[]) { const results: { rows: any }[] = []; for (const query of queries) { const { method, sql, params } = query; if (method === 'run') { try { const result = this.db.prepare(sql).run(params); results.push(result as any); } catch (e: any) { return { error: e.message }; } } else if (method === 'all' || method === 'values') { try { const rows = this.db.prepare(sql).raw().all(params); results.push({ rows: rows }); } catch (e: any) { return { error: e.message }; } } else if (method === 'get') { try { const row = this.db.prepare(sql).raw().get(params); results.push({ rows: row }); } catch (e: any) { return { error: e.message }; } } else { return { error: 'Unknown method value' }; } } return results; } async query(sql: string, params: any[], method: string) { if (method === 'run') { try { const result = this.db.prepare(sql).run(params); return { data: result as any }; } catch (e: any) { return { error: e.message }; } } else if (method === 'all' || method === 'values') { try { const rows = this.db.prepare(sql).raw().all(params); return { data: rows }; } catch (e: any) { return { error: e.message }; } } else if (method === 'get') { try { const row = this.db.prepare(sql).raw().get(params); return { data: row }; } catch (e: any) { return { error: e.message }; } } else { return { error: 'Unknown method value' }; } } migrations(queries: string[]) { this.db.exec('BEGIN'); try { for (const query of queries) { this.db.exec(query); } this.db.exec('COMMIT'); } catch { this.db.exec('ROLLBACK'); } return {}; } } let db: SqliteRemoteDatabase; let client: Database.Database; let serverSimulator: ServerSimulator; beforeAll(async () => { const dbPath = process.env['SQLITE_DB_PATH'] ?? ':memory:'; client = new Database(dbPath); serverSimulator = new ServerSimulator(client); db = proxyDrizzle(async (sql, params, method) => { try { // console.log(sql, params, method); const rows = await serverSimulator.query(sql, params, method); // console.log('rowsTest', rows); if (rows.error !== undefined) { throw new Error(rows.error); } return { rows: rows.data }; } catch (e: any) { console.error('Error from sqlite proxy server:', e.response.data); throw e; } }, async (queries) => { try { const result = await serverSimulator.batch(queries); if ((result as any).error !== undefined) { throw new Error((result as any).error); } return result as { rows: any }[]; } catch (e: any) { console.error('Error from sqlite proxy server:', e); throw e; } }, { schema }); }); beforeEach(async () => { await db.run(sql`drop table if exists \`groups\``); await db.run(sql`drop table if exists \`users\``); await db.run(sql`drop table if exists \`users_to_groups\``); await db.run(sql`drop table if exists \`posts\``); await db.run(sql`drop table if exists \`comments\``); await db.run(sql`drop table if exists \`comment_likes\``); await db.run( sql` CREATE TABLE \`users\` ( \`id\` integer PRIMARY KEY AUTOINCREMENT NOT NULL, \`name\` text NOT NULL, \`verified\` integer DEFAULT 0 NOT NULL, \`invited_by\` integer ); `, ); await db.run( sql` CREATE TABLE \`groups\` ( \`id\` integer PRIMARY KEY AUTOINCREMENT NOT NULL, \`name\` text NOT NULL, \`description\` text ); `, ); await db.run( sql` CREATE TABLE \`users_to_groups\` ( \`id\` integer PRIMARY KEY AUTOINCREMENT NOT NULL, \`user_id\` integer NOT NULL, \`group_id\` integer NOT NULL ); `, ); await db.run( sql` CREATE TABLE \`posts\` ( \`id\` integer PRIMARY KEY AUTOINCREMENT NOT NULL, \`content\` text NOT NULL, \`owner_id\` integer, \`created_at\` integer DEFAULT current_timestamp NOT NULL ); `, ); await db.run( sql` CREATE TABLE \`comments\` ( \`id\` integer PRIMARY KEY AUTOINCREMENT NOT NULL, \`content\` text NOT NULL, \`creator\` integer, \`post_id\` integer, \`created_at\` integer DEFAULT current_timestamp NOT NULL ); `, ); await db.run( sql` CREATE TABLE \`comment_likes\` ( \`id\` integer PRIMARY KEY AUTOINCREMENT NOT NULL, \`creator\` integer, \`comment_id\` integer, \`created_at\` integer DEFAULT current_timestamp NOT NULL ); `, ); }); afterAll(async () => { await db.run(sql`drop table if exists \`groups\``); await db.run(sql`drop table if exists \`users\``); await db.run(sql`drop table if exists \`users_to_groups\``); await db.run(sql`drop table if exists \`posts\``); await db.run(sql`drop table if exists \`comments\``); await db.run(sql`drop table if exists \`comment_likes\``); client.close(); }); test('findMany + findOne api example', async () => { const user = await db.insert(usersTable).values({ id: 1, name: 'John' }).returning({ id: usersTable.id }); const insertRes = await db.insert(usersTable).values({ id: 2, name: 'Dan' }); const manyUsers = await db.query.usersTable.findMany({}); const oneUser = await db.query.usersTable.findFirst({}); expectTypeOf(user).toEqualTypeOf< { id: number; }[] >; expectTypeOf(insertRes).toEqualTypeOf; expectTypeOf(manyUsers).toEqualTypeOf<{ id: number; name: string; verified: number; invitedBy: number | null; }[]>; expectTypeOf(oneUser).toEqualTypeOf< { id: number; name: string; verified: number; invitedBy: number | null; } | undefined >; expect(user).toEqual([{ id: 1, }]); expect(insertRes).toEqual({ rows: { changes: 1, lastInsertRowid: 2 } }); expect(manyUsers).toEqual([ { id: 1, name: 'John', verified: 0, invitedBy: null }, { id: 2, name: 'Dan', verified: 0, invitedBy: null }, ]); expect(oneUser).toEqual( { id: 1, name: 'John', verified: 0, invitedBy: null }, ); }); test('batch api example', async () => { const batchResponse = await db.batch([ db.insert(usersTable).values({ id: 1, name: 'John' }).returning({ id: usersTable.id, invitedBy: usersTable.invitedBy, }), db.insert(usersTable).values({ id: 2, name: 'Dan' }), db.select().from(usersTable), ]); expectTypeOf(batchResponse).toEqualTypeOf<[ { id: number; invitedBy: number | null; }[], SqliteRemoteResult, { id: number; name: string; verified: number; invitedBy: number | null; }[], ]>(); expect(batchResponse.length).eq(3); expect(batchResponse[0]).toEqual([{ id: 1, invitedBy: null, }]); expect(batchResponse[1]).toEqual({ changes: 1, lastInsertRowid: 2 }); expect(batchResponse[2]).toEqual([ { id: 1, name: 'John', verified: 0, invitedBy: null }, { id: 2, name: 'Dan', verified: 0, invitedBy: null }, ]); }); // batch api only relational many test('insert + findMany', async () => { const batchResponse = await db.batch([ db.insert(usersTable).values({ id: 1, name: 'John' }).returning({ id: usersTable.id }), db.insert(usersTable).values({ id: 2, name: 'Dan' }), db.query.usersTable.findMany({}), ]); expectTypeOf(batchResponse).toEqualTypeOf<[ { id: number; }[], SqliteRemoteResult, { id: number; name: string; verified: number; invitedBy: number | null; }[], ]>(); expect(batchResponse.length).eq(3); expect(batchResponse[0]).toEqual([{ id: 1, }]); expect(batchResponse[1]).toEqual({ changes: 1, lastInsertRowid: 2 }); expect(batchResponse[2]).toEqual([ { id: 1, name: 'John', verified: 0, invitedBy: null }, { id: 2, name: 'Dan', verified: 0, invitedBy: null }, ]); }); // batch api relational many + one test('insert + findMany + findFirst', async () => { const batchResponse = await db.batch([ db.insert(usersTable).values({ id: 1, name: 'John' }).returning({ id: usersTable.id }), db.insert(usersTable).values({ id: 2, name: 'Dan' }), db.query.usersTable.findMany({}), db.query.usersTable.findFirst({}), ]); expectTypeOf(batchResponse).toEqualTypeOf<[ { id: number; }[], SqliteRemoteResult, { id: number; name: string; verified: number; invitedBy: number | null; }[], { id: number; name: string; verified: number; invitedBy: number | null; } | undefined, ]>(); expect(batchResponse.length).eq(4); expect(batchResponse[0]).toEqual([{ id: 1, }]); expect(batchResponse[1]).toEqual({ changes: 1, lastInsertRowid: 2 }); expect(batchResponse[2]).toEqual([ { id: 1, name: 'John', verified: 0, invitedBy: null }, { id: 2, name: 'Dan', verified: 0, invitedBy: null }, ]); expect(batchResponse[3]).toEqual( { id: 1, name: 'John', verified: 0, invitedBy: null }, ); }); test.skip('insert + db.all + db.get + db.values + db.run', async () => { const batchResponse = await db.batch([ db.insert(usersTable).values({ id: 1, name: 'John' }).returning({ id: usersTable.id }), db.run(sql`insert into users (id, name) values (2, 'Dan')`), db.all(sql`select * from users`), db.values(sql`select * from users`), db.get(sql`select * from users`), ]); expectTypeOf(batchResponse).toEqualTypeOf<[ { id: number; }[], SqliteRemoteResult, { id: number; name: string; verified: number; invitedBy: number | null; }[], unknown[][], { id: number; name: string; verified: number; invitedBy: number | null; }, ]>(); expect(batchResponse.length).eq(5); expect(batchResponse[0]).toEqual([{ id: 1, }]); expect(batchResponse[1]).toEqual({ changes: 1, lastInsertRowid: 2 }); expect(batchResponse[2]).toEqual([ { id: 1, name: 'John', verified: 0, invited_by: null }, { id: 2, name: 'Dan', verified: 0, invited_by: null }, ]); expect(batchResponse[3].map((row) => Array.prototype.slice.call(row))).toEqual([ [1, 'John', 0, null], [2, 'Dan', 0, null], ]); expect(batchResponse[4]).toEqual( { id: 1, name: 'John', verified: 0, invited_by: null }, ); }); // batch api combined rqb + raw call test('insert + findManyWith + db.all', async () => { const batchResponse = await db.batch([ db.insert(usersTable).values({ id: 1, name: 'John' }).returning({ id: usersTable.id }), db.insert(usersTable).values({ id: 2, name: 'Dan' }), db.query.usersTable.findMany({}), db.all(sql`select * from users`), ]); expectTypeOf(batchResponse).toEqualTypeOf<[ { id: number; }[], SqliteRemoteResult, { id: number; name: string; verified: number; invitedBy: number | null; }[], { id: number; name: string; verified: number; invitedBy: number | null; }[], ]>(); expect(batchResponse.length).eq(4); expect(batchResponse[0]).toEqual([{ id: 1, }]); expect(batchResponse[1]).toEqual({ changes: 1, lastInsertRowid: 2 }); expect(batchResponse[2]).toEqual([ { id: 1, name: 'John', verified: 0, invitedBy: null }, { id: 2, name: 'Dan', verified: 0, invitedBy: null }, ]); expect(batchResponse[3]).toEqual([ [1, 'John', 0, null], [2, 'Dan', 0, null], // { id: 1, name: 'John', verified: 0, invited_by: null }, // { id: 2, name: 'Dan', verified: 0, invited_by: null }, ]); }); // batch api for insert + update + select test('insert + update + select + select partial', async () => { const batchResponse = await db.batch([ db.insert(usersTable).values({ id: 1, name: 'John' }).returning({ id: usersTable.id }), db.update(usersTable).set({ name: 'Dan' }).where(eq(usersTable.id, 1)), db.query.usersTable.findMany({}), db.select().from(usersTable).where(eq(usersTable.id, 1)), db.select({ id: usersTable.id, invitedBy: usersTable.invitedBy }).from(usersTable), ]); expectTypeOf(batchResponse).toEqualTypeOf<[ { id: number; }[], SqliteRemoteResult, { id: number; name: string; verified: number; invitedBy: number | null; }[], { id: number; name: string; verified: number; invitedBy: number | null; }[], { id: number; invitedBy: number | null; }[], ]>(); expect(batchResponse.length).eq(5); expect(batchResponse[0]).toEqual([{ id: 1, }]); expect(batchResponse[1]).toEqual({ changes: 1, lastInsertRowid: 1 }); expect(batchResponse[2]).toEqual([ { id: 1, name: 'Dan', verified: 0, invitedBy: null }, ]); expect(batchResponse[3]).toEqual([ { id: 1, name: 'Dan', verified: 0, invitedBy: null }, ]); expect(batchResponse[4]).toEqual([ { id: 1, invitedBy: null }, ]); }); // batch api for insert + delete + select test('insert + delete + select + select partial', async () => { const batchResponse = await db.batch([ db.insert(usersTable).values({ id: 1, name: 'John' }).returning({ id: usersTable.id }), db.insert(usersTable).values({ id: 2, name: 'Dan' }), db.delete(usersTable).where(eq(usersTable.id, 1)).returning({ id: usersTable.id, invitedBy: usersTable.invitedBy }), db.query.usersTable.findFirst({ columns: { id: true, invitedBy: true, }, }), ]); expectTypeOf(batchResponse).toEqualTypeOf<[ { id: number; }[], SqliteRemoteResult, { id: number; invitedBy: number | null; }[], { id: number; invitedBy: number | null; } | undefined, ]>(); expect(batchResponse.length).eq(4); expect(batchResponse[0]).toEqual([{ id: 1, }]); expect(batchResponse[1]).toEqual({ changes: 1, lastInsertRowid: 2 }); expect(batchResponse[2]).toEqual([ { id: 1, invitedBy: null }, ]); expect(batchResponse[3]).toEqual( { id: 2, invitedBy: null }, ); }); ================================================ FILE: integration-tests/tests/sqlite/sqlite-proxy.test.ts ================================================ /* eslint-disable drizzle-internal/require-entity-kind */ import type BetterSqlite3 from 'better-sqlite3'; import Database from 'better-sqlite3'; import { Name, sql } from 'drizzle-orm'; import type { SqliteRemoteDatabase } from 'drizzle-orm/sqlite-proxy'; import { drizzle as proxyDrizzle } from 'drizzle-orm/sqlite-proxy'; import { afterAll, beforeAll, beforeEach, expect, test } from 'vitest'; import { skipTests } from '~/common'; import { tests, usersTable } from './sqlite-common'; import { TestCache, TestGlobalCache, tests as cacheTests } from './sqlite-common-cache'; class ServerSimulator { constructor(private db: BetterSqlite3.Database) {} async query(sql: string, params: any[], method: string) { if (method === 'run') { try { const result = this.db.prepare(sql).run(params); return { data: result as any }; } catch (e: any) { return { error: e.message }; } } else if (method === 'all' || method === 'values') { try { const rows = this.db.prepare(sql).raw().all(params); return { data: rows }; } catch (e: any) { return { error: e.message }; } } else if (method === 'get') { try { const row = this.db.prepare(sql).raw().get(params); return { data: row }; } catch (e: any) { return { error: e.message }; } } else { return { error: 'Unknown method value' }; } } migrations(queries: string[]) { this.db.exec('BEGIN'); try { for (const query of queries) { this.db.exec(query); } this.db.exec('COMMIT'); } catch { this.db.exec('ROLLBACK'); } return {}; } } let db: SqliteRemoteDatabase; let dbGlobalCached: SqliteRemoteDatabase; let cachedDb: SqliteRemoteDatabase; let client: Database.Database; let serverSimulator: ServerSimulator; beforeAll(async () => { const dbPath = process.env['SQLITE_DB_PATH'] ?? ':memory:'; client = new Database(dbPath); serverSimulator = new ServerSimulator(client); const callback = async (sql: string, params: any[], method: string) => { try { const rows = await serverSimulator.query(sql, params, method); if (rows.error !== undefined) { throw new Error(rows.error); } return { rows: rows.data }; } catch (e: any) { console.error('Error from sqlite proxy server:', e.response?.data ?? e.message); throw e; } }; db = proxyDrizzle(callback); cachedDb = proxyDrizzle(callback, { cache: new TestCache() }); dbGlobalCached = proxyDrizzle(callback, { cache: new TestGlobalCache() }); }); beforeEach((ctx) => { ctx.sqlite = { db, }; ctx.cachedSqlite = { db: cachedDb, dbGlobalCached, }; }); afterAll(async () => { client?.close(); }); skipTests([ // Different driver respond 'insert via db.get w/ query builder', 'insert via db.run + select via db.get', 'insert via db.get', 'insert via db.run + select via db.all', ]); cacheTests(); tests(); beforeEach(async () => { await db.run(sql`drop table if exists ${usersTable}`); await db.run(sql` create table ${usersTable} ( id integer primary key, name text not null, verified integer not null default 0, json blob, created_at integer not null default (strftime('%s', 'now')) ) `); }); test('insert via db.get w/ query builder', async () => { const inserted = await db.get>( db.insert(usersTable).values({ name: 'John' }).returning({ id: usersTable.id, name: usersTable.name }), ); expect(inserted).toEqual([1, 'John']); }); test('insert via db.run + select via db.get', async () => { await db.run(sql`insert into ${usersTable} (${new Name(usersTable.name.name)}) values (${'John'})`); const result = await db.get<{ id: number; name: string }>( sql`select ${usersTable.id}, ${usersTable.name} from ${usersTable}`, ); expect(result).toEqual([1, 'John']); }); test('insert via db.get', async () => { const inserted = await db.get<{ id: number; name: string }>( sql`insert into ${usersTable} (${new Name( usersTable.name.name, )}) values (${'John'}) returning ${usersTable.id}, ${usersTable.name}`, ); expect(inserted).toEqual([1, 'John']); }); test('insert via db.run + select via db.all', async (ctx) => { const { db } = ctx.sqlite; await db.run(sql`insert into ${usersTable} (${new Name(usersTable.name.name)}) values (${'John'})`); const result = await db.all<{ id: number; name: string }>(sql`select id, name from "users"`); expect(result).toEqual([[1, 'John']]); }); ================================================ FILE: integration-tests/tests/utils/is-config.test.ts ================================================ import 'dotenv/config'; import { PGlite as pglite } from '@electric-sql/pglite'; import { createClient as libsql } from '@libsql/client'; import { Client as neonClient, neon, neonConfig, Pool as neonPool } from '@neondatabase/serverless'; import { connect as planetscale } from '@planetscale/database'; import { connect as tidb } from '@tidbcloud/serverless'; import { createClient as vcClient, sql as vcSql } from '@vercel/postgres'; import betterSqlite3 from 'better-sqlite3'; import { type DrizzleConfig, isConfig } from 'drizzle-orm'; import { createConnection as ms2Connection, createPool as ms2Pool } from 'mysql2'; import { createConnection as ms2pConnection, createPool as ms2pPool } from 'mysql2/promise'; import pg from 'pg'; import postgres from 'postgres'; import { describe, expect } from 'vitest'; import ws from 'ws'; neonConfig.webSocketConstructor = ws; if ( !process.env['PG_CONNECTION_STRING'] || !process.env['MYSQL_CONNECTION_STRING'] || !process.env['PLANETSCALE_CONNECTION_STRING'] || !process.env['TIDB_CONNECTION_STRING'] || !process.env['NEON_CONNECTION_STRING'] // todo get back after we will have a pool for vercel // || !process.env['VERCEL_CONNECTION_STRING'] ) { throw new Error('process.env is missing some connection strings!'); } // process.env['POSTGRES_URL'] = process.env['VERCEL_CONNECTION_STRING']; describe('Objects', (it) => { it('Passes configs', () => { expect(isConfig({} as DrizzleConfig)).toEqual(true); expect( isConfig({ casing: 'camelCase', } as DrizzleConfig), ).toEqual(true); expect( isConfig({ logger: true, } as DrizzleConfig), ).toEqual(true); expect( isConfig({ logger: { logQuery: () => {}, }, } as DrizzleConfig), ).toEqual(true); expect( isConfig({ schema: { any: true, }, } as DrizzleConfig), ).toEqual(true); expect( isConfig({ casing: 'camelCase', logger: true, schema: { any: true, }, } as DrizzleConfig), ).toEqual(true); expect( isConfig({ casing: 'camelCase', trash: true, } as DrizzleConfig), ).toEqual(true); }); it('Rejects non-configs', () => { expect(isConfig('')).toEqual(false); expect(isConfig('data')).toEqual(false); expect(isConfig(true)).toEqual(false); expect(isConfig(false)).toEqual(false); expect(isConfig(null)).toEqual(false); expect(isConfig(undefined)).toEqual(false); expect(isConfig(5)).toEqual(false); expect(isConfig(BigInt(5))).toEqual(false); expect(isConfig(new Date())).toEqual(false); expect( isConfig({ trash: true, } as DrizzleConfig), ).toEqual(false); }); }); describe('Rejects drivers', (it) => { it('libsql', () => { const cl = libsql({ url: ':memory:', }); expect(isConfig(cl)).toEqual(false); }); it('better-sqlite3', () => { const cl = new betterSqlite3(':memory:'); expect(isConfig(cl)).toEqual(false); }); it('pglite', () => { const cl = new pglite('memory://'); expect(isConfig(cl)).toEqual(false); }); it('node-postgres:Pool', () => { const cl = new pg.Pool({ connectionString: process.env['PG_CONNECTION_STRING'], }); expect(isConfig(cl)).toEqual(false); }); it('node-postgres:Client', async () => { const cl = new pg.Client({ connectionString: process.env['PG_CONNECTION_STRING'], }); const res = isConfig(cl); await cl.end(); expect(res).toEqual(false); }); it('node-postgres:PoolClient', async () => { const cl = new pg.Pool({ connectionString: process.env['PG_CONNECTION_STRING'], }); const con = await cl.connect(); const res = isConfig(con); con.release(); expect(res).toEqual(false); }); it('postgres-js', () => { const cl = postgres(process.env['PG_CONNECTION_STRING']!); expect(isConfig(cl)).toEqual(false); }); it('vercel:sql', () => { expect(isConfig(vcSql)).toEqual(false); }); // it('vercel:Pool', () => { // const cl = vcPool({ // connectionString: process.env['VERCEL_CONNECTION_STRING'], // }); // expect(isConfig(cl)).toEqual(false); // }); it('vercel:Client', async () => { const cl = vcClient({ connectionString: process.env['NEON_CONNECTION_STRING']?.replace('-pooler', ''), }); const res = isConfig(cl); expect(res).toEqual(false); }); // it('vercel:PoolClient', async () => { // const cl = vcPool({ // connectionString: process.env['VERCEL_CONNECTION_STRING'], // }); // const con = await cl.connect(); // const res = isConfig(con); // con.release(); // expect(res).toEqual(false); // }); it('neon-serverless:Pool', async () => { const cl = new neonPool({ connectionString: process.env['NEON_CONNECTION_STRING']!, }); expect(isConfig(cl)).toEqual(false); }); it('neon-serverless:Client', async () => { const cl = new neonClient({ connectionString: process.env['NEON_CONNECTION_STRING']!, }); const res = isConfig(cl); await cl.end(); expect(res).toEqual(false); }); it('neon-serverless:PoolClient', async () => { const cl = new neonPool({ connectionString: process.env['NEON_CONNECTION_STRING']!, }); const con = await cl.connect(); const res = isConfig(con); con.release(); expect(res).toEqual(false); }); it('neon-http', async () => { const cl = neon(process.env['NEON_CONNECTION_STRING']!); expect(isConfig(cl)).toEqual(false); }); it('planetscale', async () => { const cl = planetscale({ url: process.env['PLANETSCALE_CONNECTION_STRING'], }); expect(isConfig(cl)).toEqual(false); }); it('mysql2:Pool', async () => { const cl = ms2Pool({ uri: process.env['MYSQL_CONNECTION_STRING'], }); expect(isConfig(cl)).toEqual(false); }); it('mysql2:Connection', async () => { const cl = ms2Connection({ uri: process.env['MYSQL_CONNECTION_STRING'], }); expect(isConfig(cl)).toEqual(false); }); it('mysql2/promise:Pool', async () => { const cl = await ms2pPool({ uri: process.env['MYSQL_CONNECTION_STRING'], }); const res = isConfig(cl); await cl.end(); expect(res).toEqual(false); }); it('mysql2/promise:Connection', async () => { const cl = await ms2pConnection({ uri: process.env['MYSQL_CONNECTION_STRING'], }); const res = isConfig(cl); await cl.end(); expect(res).toEqual(false); }); it('tidb', async () => { const cl = tidb({ url: process.env['TIDB_CONNECTION_STRING'], }); expect(isConfig(cl)).toEqual(false); }); }); describe('Accepts drivers in .client', (it) => { it('libsql', () => { const cl = libsql({ url: ':memory:', }); expect(isConfig({ client: cl })).toEqual(true); }); it('better-sqlite3', () => { const cl = new betterSqlite3(':memory:'); expect(isConfig({ client: cl })).toEqual(true); }); it('pglite', () => { const cl = new pglite('memory://'); expect(isConfig({ client: cl })).toEqual(true); }); it('node-postgres:Pool', () => { const cl = new pg.Pool({ connectionString: process.env['PG_CONNECTION_STRING'], }); expect(isConfig({ client: cl })).toEqual(true); }); it('node-postgres:Client', async () => { const cl = new pg.Client({ connectionString: process.env['PG_CONNECTION_STRING'], }); const res = isConfig({ client: cl }); await cl.end(); expect(res).toEqual(true); }); it('node-postgres:PoolClient', async () => { const cl = new pg.Pool({ connectionString: process.env['PG_CONNECTION_STRING'], }); const con = await cl.connect(); const res = isConfig({ client: con }); con.release(); expect(res).toEqual(true); }); it('postgres-js', () => { const cl = postgres(process.env['PG_CONNECTION_STRING']!); expect(isConfig({ client: cl })).toEqual(true); }); it('vercel:sql', () => { expect(isConfig({ client: vcSql })).toEqual(true); }); // it('vercel:Pool', () => { // const cl = vcPool({ // connectionString: process.env['VERCEL_CONNECTION_STRING'], // }); // expect(isConfig({client:cl})).toEqual(true); // }); it('vercel:Client', async () => { const cl = vcClient({ connectionString: process.env['NEON_CONNECTION_STRING']?.replace('-pooler', ''), }); const res = isConfig({ client: cl }); expect(res).toEqual(true); }); // it('vercel:PoolClient', async () => { // const cl = vcPool({ // connectionString: process.env['VERCEL_CONNECTION_STRING'], // }); // const con = await cl.connect(); // const res = isConfig({ client: con }); // con.release(); // expect(res).toEqual(true); // }); it('neon-serverless:Pool', async () => { const cl = new neonPool({ connectionString: process.env['NEON_CONNECTION_STRING']!, }); expect(isConfig({ client: cl })).toEqual(true); }); it('neon-serverless:Client', async () => { const cl = new neonClient({ connectionString: process.env['NEON_CONNECTION_STRING']!, }); const res = isConfig({ client: cl }); await cl.end(); expect(res).toEqual(true); }); it('neon-serverless:PoolClient', async () => { const cl = new neonPool({ connectionString: process.env['NEON_CONNECTION_STRING']!, }); const con = await cl.connect(); const res = isConfig({ client: con }); con.release(); expect(res).toEqual(true); }); it('neon-http', async () => { const cl = neon(process.env['NEON_CONNECTION_STRING']!); expect(isConfig({ client: cl })).toEqual(true); }); it('planetscale', async () => { const cl = planetscale({ url: process.env['PLANETSCALE_CONNECTION_STRING'], }); expect(isConfig({ client: cl })).toEqual(true); }); it('mysql2:Pool', async () => { const cl = ms2Pool({ uri: process.env['MYSQL_CONNECTION_STRING'], }); expect(isConfig({ client: cl })).toEqual(true); }); it('mysql2:Connection', async () => { const cl = ms2Connection({ uri: process.env['MYSQL_CONNECTION_STRING'], }); expect(isConfig({ client: cl })).toEqual(true); }); it('mysql2/promise:Pool', async () => { const cl = await ms2pPool({ uri: process.env['MYSQL_CONNECTION_STRING'], }); const res = isConfig({ client: cl }); await cl.end(); expect(res).toEqual(true); }); it('mysql2/promise:Connection', async () => { const cl = await ms2pConnection({ uri: process.env['MYSQL_CONNECTION_STRING'], }); const res = isConfig({ client: cl }); await cl.end(); expect(res).toEqual(true); }); it('tidb', async () => { const cl = tidb({ url: process.env['TIDB_CONNECTION_STRING'], }); expect(isConfig({ client: cl })).toEqual(true); }); }); ================================================ FILE: integration-tests/tests/utils.ts ================================================ // shut up eslint you cannot possibly comprehend what's happening here // eslint-disable-next-line @typescript-eslint/no-empty-function, @typescript-eslint/no-unused-vars export function Expect() {} export type Equal = (() => T extends X ? 1 : 2) extends (() => T extends Y ? 1 : 2) ? true : false; export function toLocalDate(date: Date) { const localTime = new Date(date.getTime() - date.getTimezoneOffset() * 60000); localTime.setUTCHours(0); return localTime; } export const randomString = () => Array.from({ length: 10 }, () => 'abcdefghijklmnopqrstuvwxyz'[Math.floor(Math.random() * 26)]).join(''); ================================================ FILE: integration-tests/tests/version.test.ts ================================================ import 'dotenv/config'; import * as version from 'drizzle-orm/version'; import { expect, test } from 'vitest'; import { z } from 'zod'; test('shape', () => { const shape = z.object({ compatibilityVersion: z.number(), npmVersion: z.string(), }); expect(() => shape.parse(version)).not.toThrowError(); }); ================================================ FILE: integration-tests/tests/xata/xata.ts ================================================ // Generated by Xata Codegen 0.29.3. Please do not edit. import { buildClient } from '@xata.io/client'; import type { BaseClientOptions, SchemaInference } from '@xata.io/client'; const tables = [] as const; export type SchemaTables = typeof tables; export type InferredTypes = SchemaInference; export type DatabaseSchema = {}; const DatabaseClient = buildClient(); const defaultOptions = { databaseURL: 'https://Andrii-Sherman-s-workspace-2r5ujp.us-east-1.xata.sh/db/integration-tests', }; // eslint-disable-next-line drizzle-internal/require-entity-kind export class XataClient extends DatabaseClient { constructor(options?: BaseClientOptions) { super({ ...defaultOptions, ...options }, tables); } } let instance: XataClient | undefined; export const getXataClient = () => { if (instance) return instance; instance = new XataClient(); return instance; }; ================================================ FILE: integration-tests/tsconfig.json ================================================ { "extends": "../tsconfig.json", "compilerOptions": { "checkJs": false, "noEmit": true, "paths": { "~/*": ["./tests/*"] } }, "include": ["tests", "type-tests"], "exclude": ["**/playground", "**/.sst", "tests/prisma/*/client/**/*.js"] } ================================================ FILE: integration-tests/type-tests/join-nodenext/gel.ts ================================================ import { eq } from 'drizzle-orm'; import { drizzle } from 'drizzle-orm/gel'; import { gelTable, text, uuid } from 'drizzle-orm/gel-core'; import { expectTypeOf } from 'vitest'; const account = gelTable('accounts', { id: uuid('id').primaryKey().notNull(), userId: uuid('user_id') .notNull() .references(() => users.id), meta: text().notNull(), }); const users = gelTable('users', { id: uuid('id').primaryKey(), name: text('name').notNull(), username: text('username').notNull().unique(), }); const db = drizzle.mock(); (async () => { const res = await db.select() .from(users) .innerJoin(account, eq(users.id, account.id)); expectTypeOf(res).toEqualTypeOf<{ accounts: { id: string; userId: string; meta: string; }; users: { id: string; name: string; username: string; }; }[]>(); }); ================================================ FILE: integration-tests/type-tests/join-nodenext/mysql.ts ================================================ import { eq } from 'drizzle-orm'; import { mysqlTable, text } from 'drizzle-orm/mysql-core'; import { drizzle } from 'drizzle-orm/mysql2'; import { expectTypeOf } from 'vitest'; const account = mysqlTable('accounts', { id: text('id').primaryKey().notNull(), userId: text('user_id') .notNull() .references(() => users.id), meta: text().notNull(), }); const users = mysqlTable('users', { id: text('id').primaryKey(), name: text('name').notNull(), username: text('username').notNull().unique(), }); const db = drizzle.mock(); (async () => { const res = await db.select() .from(users) .innerJoin(account, eq(users.id, account.id)); expectTypeOf(res).toEqualTypeOf<{ accounts: { id: string; userId: string; meta: string; }; users: { id: string; name: string; username: string; }; }[]>(); }); ================================================ FILE: integration-tests/type-tests/join-nodenext/package.json ================================================ { "type": "commonjs" } ================================================ FILE: integration-tests/type-tests/join-nodenext/pg.ts ================================================ import { eq } from 'drizzle-orm'; import { pgTable, text, uuid } from 'drizzle-orm/pg-core'; import { drizzle } from 'drizzle-orm/pglite'; import { expectTypeOf } from 'vitest'; const account = pgTable('accounts', { id: uuid('id').primaryKey().notNull(), userId: uuid('user_id') .notNull() .references(() => users.id), meta: text().notNull(), }); const users = pgTable('users', { id: uuid('id').primaryKey(), name: text('name').notNull(), username: text('username').notNull().unique(), }); const db = drizzle.mock(); (async () => { const res = await db.select() .from(users) .innerJoin(account, eq(users.id, account.id)); expectTypeOf(res).toEqualTypeOf<{ accounts: { id: string; userId: string; meta: string; }; users: { id: string; name: string; username: string; }; }[]>(); }); ================================================ FILE: integration-tests/type-tests/join-nodenext/singlestore.ts ================================================ import { eq } from 'drizzle-orm'; import { drizzle } from 'drizzle-orm/singlestore'; import { singlestoreTable, text } from 'drizzle-orm/singlestore-core'; import { expectTypeOf } from 'vitest'; const account = singlestoreTable('accounts', { id: text('id').primaryKey().notNull(), userId: text('user_id') .notNull(), meta: text().notNull(), }); const users = singlestoreTable('users', { id: text('id').primaryKey(), name: text('name').notNull(), username: text('username').notNull().unique(), }); const db = drizzle.mock(); (async () => { const res = await db.select() .from(users) .innerJoin(account, eq(users.id, account.id)); expectTypeOf(res).toEqualTypeOf<{ accounts: { id: string; userId: string; meta: string; }; users: { id: string; name: string; username: string; }; }[]>(); }); ================================================ FILE: integration-tests/type-tests/join-nodenext/sqlite.ts ================================================ import { eq } from 'drizzle-orm'; import { drizzle } from 'drizzle-orm/libsql'; import { sqliteTable, text } from 'drizzle-orm/sqlite-core'; import { expectTypeOf } from 'vitest'; const account = sqliteTable('accounts', { id: text('id').primaryKey().notNull(), userId: text('user_id') .notNull() .references(() => users.id), meta: text().notNull(), }); const users = sqliteTable('users', { id: text('id').primaryKey(), name: text('name').notNull(), username: text('username').notNull().unique(), }); const db = drizzle.mock(); (async () => { const res = await db.select() .from(users) .innerJoin(account, eq(users.id, account.id)); expectTypeOf(res).toEqualTypeOf<{ accounts: { id: string; userId: string; meta: string; }; users: { id: string; name: string; username: string; }; }[]>(); }); ================================================ FILE: integration-tests/type-tests/join-nodenext/tsconfig.json ================================================ { "compilerOptions": { "useDefineForClassFields": true, "module": "nodenext", "target": "es2022", "moduleResolution": "node16", "skipLibCheck": true, "allowImportingTsExtensions": true, "moduleDetection": "force", "noEmit": true, "strict": true, "noUnusedLocals": true, "noUnusedParameters": true, "noFallthroughCasesInSwitch": true, "noUncheckedSideEffectImports": true }, "noEmit": true, "include": ["./*.ts"] } ================================================ FILE: integration-tests/vitest-ci.config.ts ================================================ import 'dotenv/config'; import tsconfigPaths from 'vite-tsconfig-paths'; import { defineConfig } from 'vitest/config'; export default defineConfig({ test: { typecheck: { tsconfig: 'tsconfig.json', }, testTimeout: 100000, hookTimeout: 200000, isolate: true, poolOptions: { threads: { singleThread: true, }, }, maxWorkers: 1, fileParallelism: false, }, plugins: [tsconfigPaths()], }); ================================================ FILE: integration-tests/vitest.config.ts ================================================ import 'dotenv/config'; import tsconfigPaths from 'vite-tsconfig-paths'; import { defineConfig } from 'vitest/config'; export default defineConfig({ test: { include: [ 'tests/seeder/**/*.test.ts', 'tests/extensions/postgis/**/*', 'tests/relational/**/*.test.ts', 'tests/pg/**/*.test.ts', 'tests/mysql/**/*.test.ts', 'tests/singlestore/**/*.test.ts', 'tests/sqlite/**/*.test.ts', 'tests/replicas/**/*', 'tests/imports/**/*', 'tests/extensions/vectors/**/*', 'tests/version.test.ts', 'tests/pg/node-postgres.test.ts', 'tests/utils/is-config.test.ts', 'js-tests/driver-init/commonjs/*.test.cjs', 'js-tests/driver-init/module/*.test.mjs', 'tests/gel/**/*.test.ts', ], exclude: [ ...(process.env.SKIP_EXTERNAL_DB_TESTS ? [ 'tests/relational/mysql.planetscale.test.ts', 'tests/pg/neon-serverless.test.ts', 'tests/mysql/tidb-serverless.test.ts', 'tests/mysql/mysql-planetscale.test.ts', 'tests/sqlite/libsql.test.ts', 'tests/sqlite/libsql-batch.test.ts', 'tests/pg/neon-http.test.ts', 'tests/pg/neon-http-batch.test.ts', 'tests/utils/is-config.test.ts', // Uses external DBs in some cases 'js-tests/driver-init/commonjs/neon-http.test.cjs', 'js-tests/driver-init/commonjs/neon-ws.test.cjs', 'js-tests/driver-init/commonjs/planetscale.test.cjs', 'js-tests/driver-init/commonjs/tidb.test.cjs', 'js-tests/driver-init/commonjs/vercel.test.cjs', 'js-tests/driver-init/module/neon-http.test.mjs', 'js-tests/driver-init/module/neon-ws.test.mjs', 'js-tests/driver-init/module/planetscale.test.mjs', 'js-tests/driver-init/module/tidb.test.mjs', 'js-tests/driver-init/module/vercel.test.mjs', ] : []), 'tests/pg/awsdatapi.test.ts', 'tests/awsdatapi.alltypes.test.ts', 'tests/pg/vercel-pg.test.ts', 'tests/relational/vercel.test.ts', // Have a strange "invalid SQL: ERROR: must be owner of schema public" error. Will need to check with xata team 'tests/pg/xata-http.test.ts', 'tests/pg/neon-http-batch.ts', // todo: remove 'js-tests/driver-init/module/vercel.test.mjs', 'js-tests/driver-init/commonjs/vercel.test.cjs', // move back after decide on speed 'tests/sqlite/libsql-ws.test.ts', 'tests/sqlite/libsql-http.test.ts', 'tests/mysql/tidb-serverless.test.ts', // waiting for json_array from singlestore team 'tests/relational/singlestore.test.ts', 'js-tests/driver-init/module/planetscale.test.mjs', 'js-tests/driver-init/module/planetscale.test.cjs', 'js-tests/driver-init/commonjs/planetscale.test.cjs', ], typecheck: { tsconfig: 'tsconfig.json', }, testTimeout: 100000, hookTimeout: 200000, isolate: true, poolOptions: { threads: { singleThread: true, }, }, maxWorkers: 1, fileParallelism: false, }, plugins: [tsconfigPaths()], }); ================================================ FILE: package.json ================================================ { "name": "drizzle-root", "private": true, "scripts": { "build:orm": "turbo run build --filter drizzle-orm --color", "build": "turbo run build test:types //#lint --color", "b": "pnpm build", "pack": "turbo run pack --color", "test": "turbo run test --color", "t": "pnpm test", "test:types": "turbo run test:types --color", "lint": "dprint check --list-different", "lint:fix": "dprint fmt" }, "devDependencies": { "@arethetypeswrong/cli": "0.15.3", "@trivago/prettier-plugin-sort-imports": "^5.2.2", "@typescript-eslint/eslint-plugin": "^6.7.3", "@typescript-eslint/experimental-utils": "^5.62.0", "@typescript-eslint/parser": "^6.7.3", "bun-types": "^1.2.0", "concurrently": "^8.2.1", "dprint": "^0.46.2", "drizzle-kit": "^0.19.13", "drizzle-orm": "workspace:./drizzle-orm/dist", "drizzle-orm-old": "npm:drizzle-orm@^0.27.2", "eslint": "^8.50.0", "eslint-plugin-drizzle-internal": "link:eslint/eslint-plugin-drizzle-internal", "eslint-plugin-import": "^2.28.1", "eslint-plugin-no-instanceof": "^1.0.1", "eslint-plugin-unicorn": "^48.0.1", "eslint-plugin-unused-imports": "^3.0.0", "glob": "^10.3.10", "prettier": "^3.0.3", "recast": "^0.23.9", "resolve-tspaths": "^0.8.16", "tsup": "^8.3.5", "tsx": "^4.10.5", "turbo": "^2.2.3", "typescript": "5.6.3" }, "packageManager": "pnpm@10.6.3" } ================================================ FILE: patches/typescript@5.6.3.patch ================================================ diff --git a/lib/tsserver.js b/lib/tsserver.js index 3f1ce62663e3c32aa487f0fc7dcb3dd940e7cd24..559f2f70531180c5d54d98b18ae54a67eab54e1d 100644 --- a/lib/tsserver.js +++ b/lib/tsserver.js @@ -15203,7 +15203,7 @@ function isInternalDeclaration(node, sourceFile) { // src/compiler/utilities.ts var resolvingEmptyArray = []; var externalHelpersModuleNameText = "tslib"; -var defaultMaximumTruncationLength = 160; +var defaultMaximumTruncationLength = 1e6; var noTruncationMaximumTruncationLength = 1e6; function getDeclarationOfKind(symbol, kind) { const declarations = symbol.declarations; ================================================ FILE: pnpm-workspace.yaml ================================================ packages: - drizzle-orm - drizzle-kit - drizzle-zod - drizzle-typebox - drizzle-valibot - drizzle-arktype - drizzle-seed - integration-tests - eslint-plugin-drizzle onlyBuiltDependencies: - '@contrast/fn-inspect' - '@newrelic/native-metrics' - '@prisma/client' - '@prisma/engines' - better-sqlite3 - bufferutil - cpu-features - dprint - es5-ext - esbuild - prisma - protobufjs - sqlite3 - ssh2 - utf-8-validate - workerd ================================================ FILE: tsconfig.json ================================================ { "compilerOptions": { "isolatedModules": true, "composite": false, "target": "esnext", "module": "esnext", "moduleResolution": "bundler", "lib": ["es2022", "es2020", "es2018", "es2017", "es7", "es6", "es5"], "declaration": false, /* Generate .d.ts files from TypeScript and JavaScript files in your project. */ "declarationMap": false, "sourceMap": true, "allowJs": true, "incremental": false, "stripInternal": true, /* Disable emitting declarations that have '@internal' in their JSDoc comments. */ "allowSyntheticDefaultImports": true, /* Allow 'import x from y' when a module doesn't have a default export. */ "esModuleInterop": true, /* Emit additional JavaScript to ease support for importing CommonJS modules. This enables 'allowSyntheticDefaultImports' for type compatibility. */ "forceConsistentCasingInFileNames": true, /* Ensure that casing is correct in imports. */ "strict": true, /* Enable all strict type-checking options. */ "noImplicitAny": true, /* Enable error reporting for expressions and declarations with an implied 'any' type. */ "strictNullChecks": true, /* When type checking, take into account 'null' and 'undefined'. */ "strictFunctionTypes": true, /* When assigning functions, check to ensure parameters and the return values are subtype-compatible. */ "strictBindCallApply": true, /* Check that the arguments for 'bind', 'call', and 'apply' methods match the original function. */ "strictPropertyInitialization": true, /* Check for class properties that are declared but not set in the constructor. */ "noImplicitThis": true, /* Enable error reporting when 'this' is given the type 'any'. */ "useUnknownInCatchVariables": true, /* Default catch clause variables as 'unknown' instead of 'any'. */ "alwaysStrict": true, /* Ensure 'use strict' is always emitted. */ "exactOptionalPropertyTypes": false, /* Interpret optional property types as written, rather than adding 'undefined'. */ "noImplicitReturns": true, /* Enable error reporting for codepaths that do not explicitly return in a function. */ "noFallthroughCasesInSwitch": true, /* Enable error reporting for fallthrough cases in switch statements. */ "noUncheckedIndexedAccess": true, /* Add 'undefined' to a type when accessed using an index. */ "noImplicitOverride": true, /* Ensure overriding members in derived classes are marked with an override modifier. */ "noPropertyAccessFromIndexSignature": true, /* Enforces using indexed accessors for keys declared using an indexed type. */ "allowUnusedLabels": false, /* Disable error reporting for unused labels. */ "allowUnreachableCode": false, /* Disable error reporting for unreachable code. */ "skipLibCheck": true, /* Skip type checking all .d.ts files. */ "noErrorTruncation": true, /* Disable truncating types in error messages. */ "checkJs": true, "allowImportingTsExtensions": true }, "exclude": ["**/dist"] } ================================================ FILE: turbo.json ================================================ { "$schema": "https://turbo.build/schema.json", "tasks": { "//#lint": { "dependsOn": [ "^test:types", "drizzle-orm#build" ], "inputs": [ "**/*.ts", "!**/node_modules", "!**/dist", "!**/dist-dts" ], "outputLogs": "new-only" }, "test:types": { "dependsOn": [ "^test:types", "drizzle-orm#build", "drizzle-seed#build" ], "inputs": [ "src/**/*.ts", "tests/**/*.ts", "tsconfig.json", "tests/tsconfig.json", "../tsconfig.json" ], "outputLogs": "new-only" }, "drizzle-orm#build": { "inputs": [ "src/**/*.ts", "package.json", "README.md", "../README.md", "tsconfig.json", "tsconfig.*.json", "tsup.config.ts", "scripts/build.ts", "scripts/fix-imports.ts", "../tsconfig.json" ], "outputs": [ "dist/**", "dist-dts/**" ], "outputLogs": "new-only" }, "drizzle-kit#build": { "dependsOn": [ "drizzle-orm#build" ], "inputs": [ "src/**/*.ts", "package.json", "README.md", "../README.md", "tsconfig.json", "tsconfig.*.json", "tsup.config.ts", "scripts/build.ts", "scripts/fix-imports.ts", "../tsconfig.json" ], "outputs": [ "dist/**", "dist-dts/**" ], "outputLogs": "new-only" }, "drizzle-zod#build": { "dependsOn": [ "drizzle-orm#build" ], "inputs": [ "src/**/*.ts", "package.json", "README.md", "../README.md", "tsconfig.json", "tsconfig.*.json", "tsup.config.ts", "scripts/build.ts", "scripts/fix-imports.ts", "../tsconfig.json" ], "outputs": [ "dist/**", "dist-dts/**" ], "outputLogs": "new-only" }, "drizzle-typebox#build": { "dependsOn": [ "drizzle-orm#build" ], "inputs": [ "src/**/*.ts", "package.json", "README.md", "../README.md", "tsconfig.json", "tsconfig.*.json", "tsup.config.ts", "scripts/build.ts", "scripts/fix-imports.ts", "../tsconfig.json" ], "outputs": [ "dist/**", "dist-dts/**" ], "outputLogs": "new-only" }, "drizzle-valibot#build": { "dependsOn": [ "drizzle-orm#build" ], "inputs": [ "src/**/*.ts", "package.json", "README.md", "../README.md", "tsconfig.json", "tsconfig.*.json", "tsup.config.ts", "scripts/build.ts", "scripts/fix-imports.ts", "../tsconfig.json" ], "outputs": [ "dist/**", "dist-dts/**" ], "outputLogs": "new-only" }, "drizzle-arktype#build": { "dependsOn": [ "drizzle-orm#build" ], "inputs": [ "src/**/*.ts", "package.json", "README.md", "../README.md", "tsconfig.json", "tsconfig.*.json", "tsup.config.ts", "scripts/build.ts", "scripts/fix-imports.ts", "../tsconfig.json" ], "outputs": [ "dist/**", "dist-dts/**" ], "outputLogs": "new-only" }, "eslint-plugin-drizzle#build": { "dependsOn": [ "drizzle-orm#build" ], "inputs": [ "src/**/*.ts", "package.json", "README.md", "../README.md", "tsconfig.json", "tsconfig.*.json", "tsup.config.ts", "scripts/build.ts", "scripts/fix-imports.ts", "../tsconfig.json" ], "outputs": [ "dist/**", "dist-dts/**" ], "outputLogs": "new-only" }, "drizzle-seed#build": { "dependsOn": [ "drizzle-orm#build" ], "inputs": [ "src/**/*.ts", "package.json", "README.md", "../README.md", "tsconfig.json", "tsconfig.*.json", "tsup.config.ts", "scripts/build.ts", "scripts/fix-imports.ts", "../tsconfig.json" ], "outputs": [ "dist/**", "dist-dts/**" ], "outputLogs": "new-only" }, "integration-tests#build": { "dependsOn": [ "drizzle-orm#build", "drizzle-seed#build" ], "inputs": [ "src/**/*.ts", "package.json", "README.md", "../README.md", "tsconfig.json", "tsconfig.*.json", "tsup.config.ts", "scripts/build.ts", "scripts/fix-imports.ts", "../tsconfig.json" ], "outputs": [ "dist/**", "dist-dts/**" ], "outputLogs": "new-only" }, "pack": { "dependsOn": [ "build", "test:types" ], "inputs": [ "dist/**" ], "outputs": [ "package.tgz" ], "outputLogs": "new-only" }, "test": { "dependsOn": [ "build", "test:types" ], "inputs": [ "tests/**/*.test.ts", "tests/**/*.test.cjs", "tests/**/*.test.mjs" ], "outputLogs": "new-only" } } }